commit 5f763679fa33de1608d07f7651c6f16012b953ea Author: lujiale Date: Fri Mar 27 13:35:20 2020 +0800 initial version diff --git a/.clang-format b/.clang-format new file mode 100644 index 00000000..c931e8f0 --- /dev/null +++ b/.clang-format @@ -0,0 +1,152 @@ +--- +Language: Cpp +# BasedOnStyle: Google +AccessModifierOffset: -1 +AlignAfterOpenBracket: Align +AlignConsecutiveAssignments: false +AlignConsecutiveDeclarations: false +AlignEscapedNewlines: Left +AlignOperands: true +AlignTrailingComments: true +AllowAllParametersOfDeclarationOnNextLine: true +AllowShortBlocksOnASingleLine: false +AllowShortCaseLabelsOnASingleLine: false +AllowShortFunctionsOnASingleLine: All +AllowShortIfStatementsOnASingleLine: true +AllowShortLoopsOnASingleLine: true +AlwaysBreakAfterDefinitionReturnType: None +AlwaysBreakAfterReturnType: None +AlwaysBreakBeforeMultilineStrings: true +AlwaysBreakTemplateDeclarations: Yes +BinPackArguments: true +BinPackParameters: true +BraceWrapping: + AfterClass: false + AfterControlStatement: false + AfterEnum: false + AfterFunction: false + AfterNamespace: false + AfterObjCDeclaration: false + AfterStruct: false + AfterUnion: false + AfterExternBlock: false + BeforeCatch: false + BeforeElse: false + IndentBraces: false + SplitEmptyFunction: true + SplitEmptyRecord: true + SplitEmptyNamespace: true +BreakBeforeBinaryOperators: None +BreakBeforeBraces: Attach +BreakBeforeInheritanceComma: false +BreakInheritanceList: BeforeColon +BreakBeforeTernaryOperators: true +BreakConstructorInitializersBeforeComma: false +BreakConstructorInitializers: BeforeColon +BreakAfterJavaFieldAnnotations: false +BreakStringLiterals: true +ColumnLimit: 120 +CommentPragmas: '^ IWYU pragma:' +CompactNamespaces: false +ConstructorInitializerAllOnOneLineOrOnePerLine: true +ConstructorInitializerIndentWidth: 4 +ContinuationIndentWidth: 2 +Cpp11BracedListStyle: true +DerivePointerAlignment: true +DisableFormat: false +ExperimentalAutoDetectBinPacking: false +FixNamespaceComments: true +ForEachMacros: +# - foreach + - Q_FOREACH + - BOOST_FOREACH +IncludeBlocks: Preserve +IncludeCategories: + - Regex: '^' + Priority: 2 + - Regex: '^<.*\.h>' + Priority: 1 + - Regex: '^<.*' + Priority: 2 + - Regex: '.*' + Priority: 3 +IncludeIsMainRegex: '([-_](test|unittest))?$' +IndentCaseLabels: true +IndentPPDirectives: None +IndentWidth: 2 +IndentWrappedFunctionNames: false +JavaScriptQuotes: Leave +JavaScriptWrapImports: true +KeepEmptyLinesAtTheStartOfBlocks: false +MacroBlockBegin: '' +MacroBlockEnd: '' +MaxEmptyLinesToKeep: 1 +NamespaceIndentation: None +ObjCBinPackProtocolList: Never +ObjCBlockIndentWidth: 2 +ObjCSpaceAfterProperty: false +ObjCSpaceBeforeProtocolList: true +PenaltyBreakAssignment: 2 +PenaltyBreakBeforeFirstCallParameter: 1 +PenaltyBreakComment: 300 +PenaltyBreakFirstLessLess: 120 +PenaltyBreakString: 1000 +PenaltyBreakTemplateDeclaration: 10 +PenaltyExcessCharacter: 1000000 +PenaltyReturnTypeOnItsOwnLine: 200 +PointerAlignment: Left +RawStringFormats: + - Language: Cpp + Delimiters: + - cc + - CC + - cpp + - Cpp + - CPP + - 'c++' + - 'C++' + CanonicalDelimiter: '' + BasedOnStyle: google + - Language: TextProto + Delimiters: + - pb + - PB + - proto + - PROTO + EnclosingFunctions: + - EqualsProto + - EquivToProto + - PARSE_PARTIAL_TEXT_PROTO + - PARSE_TEST_PROTO + - PARSE_TEXT_PROTO + - ParseTextOrDie + - ParseTextProtoOrDie + CanonicalDelimiter: '' + BasedOnStyle: google +ReflowComments: true +SortIncludes: true +SortUsingDeclarations: true +SpaceAfterCStyleCast: false +SpaceAfterTemplateKeyword: true +SpaceBeforeAssignmentOperators: true +SpaceBeforeCpp11BracedList: false +SpaceBeforeCtorInitializerColon: true +SpaceBeforeInheritanceColon: true +SpaceBeforeParens: ControlStatements +SpaceBeforeRangeBasedForLoopColon: true +SpaceInEmptyParentheses: false +SpacesBeforeTrailingComments: 2 +SpacesInAngles: false +SpacesInContainerLiterals: true +SpacesInCStyleCastParentheses: false +SpacesInParentheses: false +SpacesInSquareBrackets: false +Standard: Auto +StatementMacros: + - Q_UNUSED + - QT_REQUIRE_VERSION +TabWidth: 2 +UseTab: Never +SortIncludes: false +... + diff --git a/.gitignore b/.gitignore new file mode 100644 index 00000000..b2b3fd49 --- /dev/null +++ b/.gitignore @@ -0,0 +1,29 @@ +# GraphEngine +/build +/output +/prebuilts +*.ir +*.out + +# Dynamic libraries +# *.so +*.dylib + +# Static libraries +*.la +*.lai +*.a +*.lib + +# Protocol buffers +*_pb2.py +*.pb.h +*.pb.cc + +# Object files +*.o + +# Editor +.vscode +.idea/ +cmake-build-* diff --git a/CMakeLists.txt b/CMakeLists.txt new file mode 100755 index 00000000..a75912fa --- /dev/null +++ b/CMakeLists.txt @@ -0,0 +1,134 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake_minimum_required(VERSION 3.14) +project (GraphEngine[CXX]) +set(CMAKE_CXX_STANDARD 14) + +set(LIBRARY_OUTPUT_PATH ${PROJECT_BINARY_DIR}) +set(GE_SOURCE_DIR ${CMAKE_CURRENT_LIST_DIR}) +set(GE_PROTO_DIR ${GE_SOURCE_DIR}/src) + +if (NOT BUILD_PATH) + set(BUILD_PATH "${CMAKE_SOURCE_DIR}/build") +endif() +# architecture: aarch64 or x86_64 +message(STATUS "System architecture: ${CMAKE_HOST_SYSTEM_PROCESSOR}") +# system: euleros or ubuntu +if (CMAKE_CXX_COMPILER_ID STREQUAL "GNU") + execute_process( + COMMAND bash "-c" "cat /etc/os-release | grep ^ID= | awk -F '=' '{print $2}'" + OUTPUT_VARIABLE SYSTEM_TYPE + ) + MESSAGE(STATUS "System type: ${SYSTEM_TYPE}.") +endif() + +# download json headers, rather than whole repository +include(${GE_SOURCE_DIR}/cmake/ge_utils.cmake) +include(${GE_SOURCE_DIR}/cmake/external_libs/json.cmake) +include(${GE_SOURCE_DIR}/cmake/external_libs/eigen.cmake) +include(${GE_SOURCE_DIR}/cmake/external_libs/gtest.cmake) +include(${GE_SOURCE_DIR}/cmake/external_libs/protobuf.cmake) +include(${GE_SOURCE_DIR}/cmake/external_libs/onnx.cmake) +set(CMAKE_SKIP_RPATH TRUE) + +# for CPU/GPU mode, find c_sec and slog from local prebuild +if(NOT ENABLE_D AND NOT GE_ONLY) + set(GE_PREBUILD_PATH ${GE_SOURCE_DIR}/third_party/prebuild/${CMAKE_HOST_SYSTEM_PROCESSOR}) + find_library(c_sec libc_sec.so ${GE_PREBUILD_PATH}) + find_library(slog libslog.so ${GE_PREBUILD_PATH}) +# if D_LINK_PATH is set in environment variables, search libraries in given path +elseif(DEFINED ENV{D_LINK_PATH}) + # D_LINK_PATH is set + set(GE_LIB_PATH $ENV{D_LINK_PATH}) + set(GE_SYS_ARCH "") + if(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "x86_64") + # x86 ubuntu + set(GE_SYS_ARCH "x86_64") + elseif(CMAKE_HOST_SYSTEM_PROCESSOR MATCHES "aarch64") + # arm euleros + set(GE_SYS_ARCH "aarch64") + else() + message(FATAL_ERROR "Running on a unsupported architecture: ${SYSTEM_TYPE}, build terminated") + endif() + set(GE_LIB_PATH ${GE_LIB_PATH}/${GE_SYS_ARCH}) + find_library(c_sec libc_sec.so ${GE_LIB_PATH}) + find_library(slog libslog.so ${GE_LIB_PATH}) + find_library(mmpa libmmpa.so ${GE_LIB_PATH}) + find_library(runtime libruntime.so ${GE_LIB_PATH}) + find_library(msprof libmsprof.so ${GE_LIB_PATH}) + find_library(register libregister.so ${GE_LIB_PATH}) + find_library(hccl libhccl.so ${GE_LIB_PATH}) + find_library(cce libcce.so ${GE_LIB_PATH}) + find_library(resource libresource.so ${GE_LIB_PATH}) +else() + # Ascend mode + set(HIAI_INSTALLED_DIR /usr/local/HiAI) + set(HIAI_DRIVER_DIR ${HIAI_INSTALLED_DIR}/driver/lib64) + set(HIAI_RUNTIME_DIR ${HIAI_INSTALLED_DIR}/runtime/lib64) + find_library(c_sec libc_sec.so ${HIAI_DRIVER_DIR}) + find_library(slog libslog.so ${HIAI_DRIVER_DIR}) + find_library(mmpa libmmpa.so ${HIAI_DRIVER_DIR}) + + find_library(cce libcce.so ${HIAI_RUNTIME_DIR}) + find_library(hccl libhccl.so ${HIAI_RUNTIME_DIR}) + find_library(runtime libruntime.so ${HIAI_RUNTIME_DIR}) + find_library(msprof libmsprof.so ${HIAI_RUNTIME_DIR}) + find_library(register libregister.so ${HIAI_RUNTIME_DIR}) + find_library(resource libresource.so ${HIAI_RUNTIME_DIR}) +endif() + +# add compile flags +include(CheckCXXCompilerFlag) +check_cxx_compiler_flag("-std=c++11" SUPPORT_CXX11) +if ("${CMAKE_BUILD_TYPE}" STREQUAL "Debug") + message("Build in Debug mode") + set(CMAKE_C_FLAGS "-O0 -g -Wall -fstack-protector-all -Wl,-z,relro,-z,now,-z,noexecstack -pipe -fPIC ${CMAKE_C_FLAGS}") + set(CMAKE_CXX_FLAGS "-O0 -g -Wall -fstack-protector-all -Wl,-z,relro,-z,now,-z,noexecstack -pipe -fPIC ${CMAKE_CXX_FLAGS}") + if ("${CMAKE_CXX_COMPILER_ID}" STREQUAL "GNU") + set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -rdynamic") + set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -rdynamic") + endif() +else() + set(CMAKE_C_FLAGS "-O2 -Wall -fPIC -fstack-protector-all -Wl,-z,relro,-z,now,-z,noexecstack -pipe ${CMAKE_C_FLAGS}") + set(CMAKE_CXX_FLAGS "-O2 -Wall -fPIC -fstack-protector-all -Wl,-z,relro,-z,now,-z,noexecstack -pipe ${CMAKE_CXX_FLAGS}") +endif () + +# force __FILE__ to show relative path of file, from source directory, as cmake project makes __FILE__ absolute directory +set(CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS} -D__FILE__='\"$(subst ${CMAKE_SOURCE_DIR}/,,$(abspath $<))\"' -Wno-builtin-macro-redefined") + +# compile libraries from following directories +# libgraph is compiled in any situation +add_subdirectory(${GE_SOURCE_DIR}/src/common/graph) +if(ENABLE_D) + # if MindSpore compiles in D mode, compile the following libraries + add_subdirectory(${GE_SOURCE_DIR}/src/ge/common) + add_subdirectory(${GE_SOURCE_DIR}/src/ge/ge_runtime) +elseif(GE_ONLY) + # standalone GraphEngine compiles all following libraries + add_subdirectory(${GE_SOURCE_DIR}/src/ge/common) + add_subdirectory(${GE_SOURCE_DIR}/src/ge/ge_runtime) + add_subdirectory(${GE_SOURCE_DIR}/src/ge/ge_local_engine) + add_subdirectory(${GE_SOURCE_DIR}/src/ge/graph/build/memory) + add_subdirectory(${GE_SOURCE_DIR}/src/ge/) + add_subdirectory(${GE_SOURCE_DIR}/src/ge/executor) + add_subdirectory(${GE_SOURCE_DIR}/src/ge/client) + add_subdirectory(${GE_SOURCE_DIR}/src/ge/plugin/engine) +endif() + +if (ENABLE_GE_COV OR ENABLE_GE_UT OR ENABLE_GE_ST) + add_subdirectory(tests) +endif() + diff --git a/LICENSE b/LICENSE new file mode 100644 index 00000000..261eeb9e --- /dev/null +++ b/LICENSE @@ -0,0 +1,201 @@ + Apache License + Version 2.0, January 2004 + http://www.apache.org/licenses/ + + TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION + + 1. Definitions. + + "License" shall mean the terms and conditions for use, reproduction, + and distribution as defined by Sections 1 through 9 of this document. + + "Licensor" shall mean the copyright owner or entity authorized by + the copyright owner that is granting the License. + + "Legal Entity" shall mean the union of the acting entity and all + other entities that control, are controlled by, or are under common + control with that entity. For the purposes of this definition, + "control" means (i) the power, direct or indirect, to cause the + direction or management of such entity, whether by contract or + otherwise, or (ii) ownership of fifty percent (50%) or more of the + outstanding shares, or (iii) beneficial ownership of such entity. + + "You" (or "Your") shall mean an individual or Legal Entity + exercising permissions granted by this License. + + "Source" form shall mean the preferred form for making modifications, + including but not limited to software source code, documentation + source, and configuration files. + + "Object" form shall mean any form resulting from mechanical + transformation or translation of a Source form, including but + not limited to compiled object code, generated documentation, + and conversions to other media types. + + "Work" shall mean the work of authorship, whether in Source or + Object form, made available under the License, as indicated by a + copyright notice that is included in or attached to the work + (an example is provided in the Appendix below). + + "Derivative Works" shall mean any work, whether in Source or Object + form, that is based on (or derived from) the Work and for which the + editorial revisions, annotations, elaborations, or other modifications + represent, as a whole, an original work of authorship. For the purposes + of this License, Derivative Works shall not include works that remain + separable from, or merely link (or bind by name) to the interfaces of, + the Work and Derivative Works thereof. + + "Contribution" shall mean any work of authorship, including + the original version of the Work and any modifications or additions + to that Work or Derivative Works thereof, that is intentionally + submitted to Licensor for inclusion in the Work by the copyright owner + or by an individual or Legal Entity authorized to submit on behalf of + the copyright owner. For the purposes of this definition, "submitted" + means any form of electronic, verbal, or written communication sent + to the Licensor or its representatives, including but not limited to + communication on electronic mailing lists, source code control systems, + and issue tracking systems that are managed by, or on behalf of, the + Licensor for the purpose of discussing and improving the Work, but + excluding communication that is conspicuously marked or otherwise + designated in writing by the copyright owner as "Not a Contribution." + + "Contributor" shall mean Licensor and any individual or Legal Entity + on behalf of whom a Contribution has been received by Licensor and + subsequently incorporated within the Work. + + 2. Grant of Copyright License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + copyright license to reproduce, prepare Derivative Works of, + publicly display, publicly perform, sublicense, and distribute the + Work and such Derivative Works in Source or Object form. + + 3. Grant of Patent License. Subject to the terms and conditions of + this License, each Contributor hereby grants to You a perpetual, + worldwide, non-exclusive, no-charge, royalty-free, irrevocable + (except as stated in this section) patent license to make, have made, + use, offer to sell, sell, import, and otherwise transfer the Work, + where such license applies only to those patent claims licensable + by such Contributor that are necessarily infringed by their + Contribution(s) alone or by combination of their Contribution(s) + with the Work to which such Contribution(s) was submitted. If You + institute patent litigation against any entity (including a + cross-claim or counterclaim in a lawsuit) alleging that the Work + or a Contribution incorporated within the Work constitutes direct + or contributory patent infringement, then any patent licenses + granted to You under this License for that Work shall terminate + as of the date such litigation is filed. + + 4. Redistribution. You may reproduce and distribute copies of the + Work or Derivative Works thereof in any medium, with or without + modifications, and in Source or Object form, provided that You + meet the following conditions: + + (a) You must give any other recipients of the Work or + Derivative Works a copy of this License; and + + (b) You must cause any modified files to carry prominent notices + stating that You changed the files; and + + (c) You must retain, in the Source form of any Derivative Works + that You distribute, all copyright, patent, trademark, and + attribution notices from the Source form of the Work, + excluding those notices that do not pertain to any part of + the Derivative Works; and + + (d) If the Work includes a "NOTICE" text file as part of its + distribution, then any Derivative Works that You distribute must + include a readable copy of the attribution notices contained + within such NOTICE file, excluding those notices that do not + pertain to any part of the Derivative Works, in at least one + of the following places: within a NOTICE text file distributed + as part of the Derivative Works; within the Source form or + documentation, if provided along with the Derivative Works; or, + within a display generated by the Derivative Works, if and + wherever such third-party notices normally appear. The contents + of the NOTICE file are for informational purposes only and + do not modify the License. You may add Your own attribution + notices within Derivative Works that You distribute, alongside + or as an addendum to the NOTICE text from the Work, provided + that such additional attribution notices cannot be construed + as modifying the License. + + You may add Your own copyright statement to Your modifications and + may provide additional or different license terms and conditions + for use, reproduction, or distribution of Your modifications, or + for any such Derivative Works as a whole, provided Your use, + reproduction, and distribution of the Work otherwise complies with + the conditions stated in this License. + + 5. Submission of Contributions. Unless You explicitly state otherwise, + any Contribution intentionally submitted for inclusion in the Work + by You to the Licensor shall be under the terms and conditions of + this License, without any additional terms or conditions. + Notwithstanding the above, nothing herein shall supersede or modify + the terms of any separate license agreement you may have executed + with Licensor regarding such Contributions. + + 6. Trademarks. This License does not grant permission to use the trade + names, trademarks, service marks, or product names of the Licensor, + except as required for reasonable and customary use in describing the + origin of the Work and reproducing the content of the NOTICE file. + + 7. Disclaimer of Warranty. Unless required by applicable law or + agreed to in writing, Licensor provides the Work (and each + Contributor provides its Contributions) on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or + implied, including, without limitation, any warranties or conditions + of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A + PARTICULAR PURPOSE. You are solely responsible for determining the + appropriateness of using or redistributing the Work and assume any + risks associated with Your exercise of permissions under this License. + + 8. Limitation of Liability. In no event and under no legal theory, + whether in tort (including negligence), contract, or otherwise, + unless required by applicable law (such as deliberate and grossly + negligent acts) or agreed to in writing, shall any Contributor be + liable to You for damages, including any direct, indirect, special, + incidental, or consequential damages of any character arising as a + result of this License or out of the use or inability to use the + Work (including but not limited to damages for loss of goodwill, + work stoppage, computer failure or malfunction, or any and all + other commercial damages or losses), even if such Contributor + has been advised of the possibility of such damages. + + 9. Accepting Warranty or Additional Liability. While redistributing + the Work or Derivative Works thereof, You may choose to offer, + and charge a fee for, acceptance of support, warranty, indemnity, + or other liability obligations and/or rights consistent with this + License. However, in accepting such obligations, You may act only + on Your own behalf and on Your sole responsibility, not on behalf + of any other Contributor, and only if You agree to indemnify, + defend, and hold each Contributor harmless for any liability + incurred by, or claims asserted against, such Contributor by reason + of your accepting any such warranty or additional liability. + + END OF TERMS AND CONDITIONS + + APPENDIX: How to apply the Apache License to your work. + + To apply the Apache License to your work, attach the following + boilerplate notice, with the fields enclosed by brackets "[]" + replaced with your own identifying information. (Don't include + the brackets!) The text should be enclosed in the appropriate + comment syntax for the file format. We also recommend that a + file or class name and description of purpose be included on the + same "printed page" as the copyright notice for easier + identification within third-party archives. + + Copyright [yyyy] [name of copyright owner] + + Licensed under the Apache License, Version 2.0 (the "License"); + you may not use this file except in compliance with the License. + You may obtain a copy of the License at + + http://www.apache.org/licenses/LICENSE-2.0 + + Unless required by applicable law or agreed to in writing, software + distributed under the License is distributed on an "AS IS" BASIS, + WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + See the License for the specific language governing permissions and + limitations under the License. diff --git a/NOTICE b/NOTICE new file mode 100644 index 00000000..7f0c3740 --- /dev/null +++ b/NOTICE @@ -0,0 +1,2 @@ +MindSpore GraphEngine +Copyright 2019-2020 Huawei Technologies Co., Ltd diff --git a/README.md b/README.md new file mode 100644 index 00000000..db223e2c --- /dev/null +++ b/README.md @@ -0,0 +1,107 @@ + GraphEngine(GE) is a sub-module of MindSpore connecting the front end and devices which was designed by the researches and engineers within Huawei Technologies Co.,Ltd. GE is implemented via C++. It takes the graph of front end as its input and a series of graph operations are carried out to adapt the graph to a certain form which can be effectively operated on devices. GE is specifically designed for an efficient operation on Ascend Chips. GE is automatically called without any exposure to the users. GE mainly consists of two parts, i.e. GE API and GE Core. The architecture diagram of GE is illustrated as follows + +![GE_schema](docs/GE_Architecture.png) + +- GE API + + GE API is the interface between GE Core and front end which controls the initialization and finalization of GE Core and Sessions. It also provides the interfaces for graph adding and running. + +- GE Core + + GE Core acts as the core module of GE and is responsible for graph processing operations. It consist of six parts, i.e. graph preparation, graph partition, graph optimization, graph compilation, graph loading and graph execution. These six parts are performed in series and all together complete the complicated graph processing operations. + + - Graph preparation + + All the shapes of feature maps and variables in the graph are inferred in this stage for memory allocation later. Some aggregations of operators like allreduce are performed as well. Ascend Chips are heterogeneous chips including CPUs and vector calculation units, i.e. AICORE. Each operator in the graph is assigned to a certain operating cores according to the costs and supports. These two cores correspond to two different abstract engines in software. + + - Graph partition + + The whole graph is split into several sub-graphs based on the assigned engine in previous stage. Certain operators are added to the sub-graphs as the marks for graph edges. Such a partition enables an efficient optimization, compilation in next stages. + + - Graph optimization + + Different optimizer interfaces are called due to different engines that each sub-graph belongs to. To thoroughly utilize the calculation ability of the CUBE module in AICORE, A novel data layout format for faster hardware fetch is applied and the transition between normal 4D to this special format is performed in this stage. Such an operation guarantees less data handling between RAMs and CUBEs. Certain combination of operators is fused into a single big operator to further reduce the computation costs. This fusion is carried out in this stage as well. + + - Graph compilation + + This stage can be divided into two parts, i.e. resources allocation and graph compilation. Memory allocation is completed considering memory reuse strategy in resources allocation stage. According to the graph information, the queue, event, stream resources are allocated. Each operator is compiled to a task bound to a certain stream. Tasks on the same stream are performed in series and task on different streams can be executed in parallel. This stream partition is completed in this stage. + + - Graph loading + + According to the engine information, the operators of graph are assigned to different engines and in this stage, the graph is loaded on the devices for running. + + - Graph execution + + The graph is executed on devices efficiently in this stage and the corresponding outputs are returned to the hosts. For efficiency consideration, a sink mode is provided where the graph is executed several times with the last output returned. Such a mode effectively reduces the data handling between devices and hosts. + + In training or evaluating process, the aforementioned graph processing operations are carried out automatically. All in all, GE is a linked up module between MindSpore front end and Ascend Chips aiming to adapt the graph designed by users to a more efficient form that can be directly executed on Ascend Chips. + +- [Installation](#installation) +- [Community](#community) +- [Contributing](#contributing) +- [Release Notes](#release-notes) +- [License](#license) + +# Installation + +## Installing GraphEngine + +GE is automatically installed and compiled once you finish installing MindSpore. There are three dynamic link libraries corresponding to GE. + +## Installing Using the Source Code + +You may also build GraphEngine from source. +To build GraphEngine, please make sure that you have access to an [Ascend 910](https://e.huawei.com/se/products/cloud-computing-dc/atlas/ascend-910) environment as compiling environment, and make sure that following software requirements are fulfilled. + > - GCC >= 7.3.0 + > - CMake >= 3.14.0 + > - Autoconf >= 2.64 + > - Libtool >= 2.4.6 + > - Automake >= 1.15.1 + + as your compiling environment and have GCC version >= 7.3.0 and CMake version >= 3.14.0 installed. It is necessary to be using an Ascend 910 environment to build GraphEngine. +The output of building GraphEngine is a set of shared libraries which can be linked with MindSpore, they are not meant to be used independently. + +1. Download GraphEngine source code. + +GraphEngine source code is available on [Gitee](https://gitee.com/mindspore/graphengine): +```shell +git clone https://gitee.com/mindspore/graphengine.git +cd graphengine +``` +2. Run the following command in the root directory of the source code to compile GraphEngine: +To build with default options, simply: +```shell +bash build.sh +``` + > - Before running the preceding command, ensure that the relevant paths have been added to the environment variable PATH. + > - In the build.sh script, the git clone command will be executed to obtain code from Gitee.com. Ensure that the network settings of Git are correct. + > - In the build.sh script, the default number of compilation threads is 8. If the compiler performance is poor, compilation errors may occur. You can add -j{Number of threads} in to bash command to reduce the number of threads. For example, `bash build.sh -j4`. + +3. Access the output directory of the source code, obtain the generated GraphEngine libraries which can be linked with MindSpore for further installation/testing. + +For more information on other options of building GraphEngine: +```shell +bash build.sh -h +``` + +If you wish to clean all outputs from last build and try again: +```shell +rm -rf build/ output/ +bash build.sh +``` + +## Community + +- [MindSpore Slack](https://join.slack.com/t/mindspore/shared_invite/enQtOTcwMTIxMDI3NjM0LTNkMWM2MzI5NjIyZWU5ZWQ5M2EwMTQ5MWNiYzMxOGM4OWFhZjI4M2E5OGI2YTg3ODU1ODE2Njg1MThiNWI3YmQ) - Ask questions and find answers. + +## Contributing + +Welcome contributions. See our [Contributor Wiki](https://gitee.com/mindspore/mindspore/blob/master/CONTRIBUTING.md) for more details. + +## Release Notes + +The release notes, see our [RELEASE](RELEASE.md). + +## License + +[Apache License 2.0](LICENSE) diff --git a/RELEASE.md b/RELEASE.md new file mode 100644 index 00000000..da8e10c1 --- /dev/null +++ b/RELEASE.md @@ -0,0 +1,17 @@ +# Release 0.1.0-alpha + +This is the initial release of GraphEngine(GE) which was designed by the researchers and engineers in Huawei Technologies Co.,Ltd. GE is implemented via C++ and acts as a powerful backing force for MindSpore. GE is a linked up module between MindSpore front end and Ascend Chips. + +## Main features + +- GE API + - GE provides an unified interface with the front end including graph management interfaces i.e., graph loading and graph execution, and GE core initiallization and finalization interfaces. + +- Graph Processing + - Six parts of graph processing operations are executed within GE, i.e. graph preparation, graph partition, graph optimization, graph compilation, graph loading and graph execution. + - GE highly optimizes the graph defined by the front end to a form which can be effectively executed on Ascend Chips. + - GE supports several optimizations to reduce computation costs, e.g. operator fusion, parallel operation, data format transition. + +- Debugging + - GE supports data dump and graph dump for debugging. + - GE provides profiling tools to thoroughly analyze the speed performances. \ No newline at end of file diff --git a/Third_Party_Open_Source_Software_Notice b/Third_Party_Open_Source_Software_Notice new file mode 100644 index 00000000..0d79cfa5 --- /dev/null +++ b/Third_Party_Open_Source_Software_Notice @@ -0,0 +1,460 @@ +OPEN SOURCE SOFTWARE NOTICE + +Please note we provide an open source software notice along with this product and/or this product firmware (in the following just “this product”). The open source software licenses are granted by the respective right holders. And the open source licenses prevail all other license information with regard to the respective open source software contained in the product, including but not limited to End User Software Licensing Agreement. This notice is provided on behalf of Huawei Technologies Co. Ltd. and any of its local subsidiaries which may have provided this product to you in your local country. + +Warranty Disclaimer +THE OPEN SOURCE SOFTWARE IN THIS PRODUCT IS DISTRIBUTED IN THE HOPE THAT IT WILL BE USEFUL, BUT WITHOUT ANY WARRANTY, WITHOUT EVEN THE IMPLIED WARRANTY OF MERCHANTABILITY OR FITNESS FOR A PARTICULAR PURPOSE. SEE THE APPLICABLE LICENSES FOR MORE DETAILS. + +Copyright Notice and License Texts + +Software: Eigen 3.3.7 +Copyright notice: +Copyright (C) 2014 Benoit Steiner +Copyright (C) 2013 Christian Seiler +Copyright (C) 2015 Eugene Brevdo +Copyright (C) 2014-2015 Benoit Steiner +Copyright (C) 2015 Navdeep Jaitly +Copyright (C) 2014 Eric Martin +Copyright (C) 2015 Benoit Steiner +Copyright (C) 2016 Rasmus Munk Larsen +Copyright (C) 2016 Benoit Steiner +Copyright (C) 2015 Jianwei Cui +Copyright (C) 2016 Eugene Brevdo +Copyright (C) 2015 Ke Yang +Copyright (C) 2016 Mehdi Goli, Codeplay Software Ltd +Copyright (C) 2014 Navdeep Jaitly +Copyright (C) 2016 Igor Babuschkin +Copyright (C) 2016 Dmitry Vyukov +Copyright (C) EDF R&D, lun sep 30 14:23:30 CEST 2002 +Copyright (C) 2008 Gael Guennebaud +Copyright (C) EDF R&D, lun sep 30 14:23:31 CEST 2002 +Copyright (C) 2008-2010 Gael Guennebaud +Copyright (C) 2008-2016 Gael Guennebaud +Copyright (C) 2009 Mark Borgerding mark a borgerding net +Copyright (C) 2008-2009 Gael Guennebaud +Copyright (C) 2013 Desire Nuentsa +Copyright (C) 2013 Gael Guennebaud +Copyright (C) 2011 Gael Guennebaud +Copyright (C) 2012 Desire NUENTSA WAKAM +Copyright (C) 2009 Benoit Jacob +Copyright (C) 2009 Gael Guennebaud +Copyright (C) 2006-2010 Benoit Jacob +Copyright (C) 2006-2008 Benoit Jacob +Copyright (C) EDF R&D, lun sep 30 14:23:28 CEST 2002 +Copyright (C) 2010 Manuel Yguel +Copyright (C) 2009 Claire Maurice +Copyright (C) 2010,2012 Jitse Niesen +Copyright (c) 2011, Intel Corporation. All rights reserved. +Copyright (C) 2012-2016 Gael Guennebaud +Copyright (C) 2016 Tobias Wood +Copyright (C) 2010 Jitse Niesen +Copyright (C) 2012 Alexey Korepanov +Copyright (C) 2010 Vincent Lejeune +Copyright (C) 2010 Gael Guennebaud +Copyright (C) 2010 Benoit Jacob +Copyright (C) 2017 Gael Guennebaud +Copyright (C) 2009-2010 Gael Guennebaud +Copyright (C) 2008 Benoit Jacob +Copyright (C) 2009 Mathieu Gautier +Copyright (C) 2010 Hauke Heibel +Copyright (C) 2009 Hauke Heibel +Copyright (C) 2008-2015 Gael Guennebaud +Copyright (C) EDF R&D, mar déc 3 18:59:36 CET 2002 +Copyright (C) EDF R&D, lun sep 30 14:23:17 CEST 2002 +Copyright (C) EDF R&D, mar déc 3 18:59:35 CET 2002 +Copyright (C) 2016 Konstantinos Margaritis +Copyright (C) 2007 Julien Pommier +Copyright (C) 2008-2011 Gael Guennebaud +Copyright (C) 2009 Keir Mierle +Copyright (C) 2011 Timothy E. Holy +Copyright (C) 2009 Hauke Heibel +Copyright (C) 2012 Desire Nuentsa +Copyright (C) 2014 Gael Guennebaud +Copyright (C) 2015 Tal Hadad +@copyright (c) 2009-2014 The University of Tennessee and The University of Tennessee Research Foundation. +@copyright (c) 2012-2016 Inria. All rights reserved. +@copyright (c) 2012-2014 Bordeaux INP, CNRS (LaBRI UMR 5800), Inria, Univ. Bordeaux. All rights reserved. +Copyright 2007-2009 Kitware, Inc. +Copyright 2012-2013 Inria +Copyright 2012-2013 Emmanuel Agullo +Copyright 2012-2013 Mathieu Faverge +Copyright 2012 Cedric Castagnede +Copyright 2013-2016 Florent Pruvost +Copyright 2016 Codeplay Software Ltd. +Copyright (c) 2006, 2007 Montel Laurent, +Copyright (c) 2008, 2009 Gael Guennebaud, +Copyright (c) 2009 Boudewijn Rempt +@copyright (c) 2012-2014 Inria. All rights reserved. +Copyright 2013 Florent Pruvost +Copyright (c) 2010 Jitse Niesen, +Copyright (C) 2009 Benjamin Schindler +Copyright (C) 2016 Pedro Gonnet (pedro.gonnet@gmail.com) +Copyright (C) 2016 Benoit Steiner (benoit.steiner.goog@gmail.com) +Copyright (C) 2009 Thomas Capricelli +Copyright (C) 2012-2013 Desire Nuentsa +Copyright (C) 2012-2014 Gael Guennebaud +Copyright Jorge More - Argonne National Laboratory +Copyright Burt Garbow - Argonne National Laboratory +Copyright Ken Hillstrom - Argonne National Laboratory +Copyright (C) 2009 Ilya Baran +Copyright (c) 2010, Intel Corp. +Copyright (C) 2009-2010 Benoit Jacob +Copyright (C) 2013-2016 Gael Guennebaud +Copyright (C) 2013 Gauthier Brun +Copyright (C) 2013 Nicolas Carre +Copyright (C) 2013 Jean Ceccato +Copyright (C) 2013 Pierre Zoppitelli +Copyright (C) 2013 Jitse Niesen +Copyright (C) 2014-2017 Gael Guennebaud +Copyright (C) 2013-2014 Gael Guennebaud +Copyright (C) 2011-2014 Gael Guennebaud +Copyright (C) 2012 Désiré Nuentsa-Wakam +Copyright (C) 2015 Gael Guennebaud +Copyright (C) 2012 Gael Guennebaud +Copyright (c) 1994 by Xerox Corporation. All rights reserved. +Copyright (C) 2001 Intel Corporation +Copyright (c) 2001 Intel Corporation. +Copyright (C) 2009 Gael Guennebaud +Copyright (C) 2013 Christoph Hertzberg +Copyright (C) 2015 Eugene Brevdo +Copyright (C) 2016 +Mehdi Goli Codeplay Software Ltd. +Ralph Potter Codeplay Software Ltd. +Luke Iwanski Codeplay Software Ltd. +Copyright (C) 2014 Jianwei Cui +Copyright (C) 2015 Vijay Vasudevan +Copyright (C) 2015 +Mehdi Goli Codeplay Software Ltd. +Ralph Potter Codeplay Software Ltd. +Luke Iwanski Codeplay Software Ltd. +Copyright (C) 2014 Navdeep Jaitly +Copyright (C) 2011 Gael Guennebaud +Copyright (C) 2012 desire Nuentsa +Copyright (C) 2012 Kolja Brix +Copyright (C) 2011 Kolja Brix +Copyright (C) 2011 Andreas Platen +Copyright (C) 2012 Chen-Pang He +Copyright (C) 2009 Jitse Niesen +Copyright (C) 2009-2011 Jitse Niesen +Copyright (C) 2012, 2013 Chen-Pang He +Copyright (C) 2011 Jitse Niesen +Copyright (C) 2012 Giacomo Po +Copyright (C) 2008-2010 Gael Guennebaud +Copyright (C) 2016 Gael Guennebaud +Copyright (C) 2010-2011 Hauke Heibel +Copyright (C) 2012 David Harmon +Copyright (C) 2007-2009 Benoit Jacob +Copyright (C) 2007-2010 Benoit Jacob +Copyright (C) 2008-2009 Benoit Jacob +Copyright (C) 2009 Kenneth Riddile +Copyright (C) 2010 Thomas Capricelli +Copyright (C) 2013 Pavel Holoborodko +Copyright (C) EDF R&D, lun sep 30 14:23:16 CEST 2002 +Copyright (C) EDF R&D, mar déc 3 18:59:37 CET 2002 +Copyright (C) 2006-2009 Benoit Jacob +Copyright (C) 2008-2010 Benoit Jacob +Copyright (c) 2008-2015 Pavel Holoborodko +Copyright (C) 20010-2011 Hauke Heibel +Copyright (c) 2006, Montel Laurent, +Copyright (c) 2007, Allen Winter, +Copyright (c) 2007, Alexander Neundorf, +Copyright (C) 2008 Guillaume Saupin +Copyright (C) 2008-2009 Guillaume Saupin +Copyright (C) 2009 Guillaume Saupin +Copyright (C) 2010-2016 Konstantinos Margaritis +Copyright (C) 2008-2016 Konstantinos Margaritis +Copyright (C) 2014 Benoit Steiner (benoit.steiner.goog@gmail.com) +Copyright (C) 2014 Pedro Gonnet (pedro.gonnet@gmail.com) +Copyright (c) Fabian Giesen, 2016 +Copyright (C) 2010 Konstantinos Margaritis +Copyright (C) 2007 Michael Olbrich +Copyright (C) 2011 Benoit Jacob +Copyright (C) 2011-2012 Jitse Niesen +Copyright (C) 2016 Rasmus Munk Larsen (rmlarsen@google.com) +Copyright (C) 2008-2014 Gael Guennebaud +Copyright (C) 2010-2013 Hauke Heibel +Copyright (C) 2006-2008, 2010 Benoit Jacob +Copyright (C) 2010-2016 Gael Guennebaud +Copyright (C) 2009-2015 Gael Guennebaud +Copyright (C) 2009 Ricard Marxer +Copyright (C) 2009-2014 Gael Guennebaud +Copyright (C) 2010-2011 Gael Guennebaud +Copyright (C) 2009 Rohit Garg +Copyright (c) 2006, Timothy A. Davis. +Copyright (c) 1998-2003 by the University of Florida. +Copyright (C) 2012 Désiré Nuentsa-Wakam +Copyright (C) 2008-2012 Gael Guennebaud +LDL Copyright (c) 2005 by Timothy A. Davis. All Rights Reserved. +Copyright (C) 2010 Daniel Lowengrub +Copyright (C) EDF R&D, lun sep 30 14:23:20 CEST 2002 +Copyright (C) EDF R&D, lun sep 30 14:23:19 CEST 2002 +Copyright (C) 2009, 2010, 2013 Jitse Niesen +Copyright (C) 2011, 2013 Chen-Pang He +Copyright (C) 2009-2011, 2013 Jitse Niesen +Copyright (C) 2011, 2013 Jitse Niesen +Copyright (C) 2011 Chen-Pang He +Copyright (C) 2010, 2013 Jitse Niesen +Copyright (C) 2010-2014 Gael Guennebaud +Copyright (C) 2012 The Android Open Source Project +(C) Desire NUENTSA WAKAM, INRIA +Copyright (C) EDF R&D, lun sep 30 14:23:18 CEST 2002 +Copyright (C) 2012 Keir Mierle +Copyright (C) 1989, 1991 Free Software Foundation, Inc. +Copyright (C) EDF R&D, lun sep 30 14:23:23 CEST 2002 +Copyright (C) EDF R&D, lun sep 30 14:23:24 CEST 2002 +Copyright (C) EDF R&D, lun sep 30 14:23:27 CEST 2002 +Copyright (C) 2007 Free Software Foundation, Inc. +Copyright (C) 1991, 1999 Free Software Foundation, Inc. +Copyright (C) 2015 Benoit Jacob +Geometric Tools, LLC Copyright (c) 1998-2010 +Copyright (C) EDF R&D, lun sep 30 14:23:15 CEST 2002 +Copyright (C) 2002-2007 Yves Renard +Copyright (C) 2012, 2014 Kolja Brix +Copyright (C) 1997-2001 Andrew Lumsdaine Lie-Quan Lee +Copyright (C) 2012 Desire NUENTSA WAKAM +Copyright (C) 2013 Hauke Heibel +Copyright (C) 2010-2011 Jitse Niesen +Intel Copyright (C) .... +Copyright (C) 2010-2017 Gael Guennebaud +Copyright (C) 20013 Gael Guennebaud +Copyright (C) 2008 Daniel Gomez Ferro +Copyright (C) 2013 Désiré Nuentsa-Wakam +Copyright (C) 2011-2015 Gael Guennebaud +Copyright (C) 20015 Gael Guennebaud +Copyright (C) 2014-2015 Gael Guennebaud + + +License: Mozilla Public License (MPL) V2.0 + +Mozilla Public License +Version 2.0 +1. Definitions +1.1. “Contributor” +means each individual or legal entity that creates, contributes to the creation of, or owns Covered Software. +1.2. “Contributor Version” +means the combination of the Contributions of others (if any) used by a Contributor and that particular Contributor’s Contribution. +1.3. “Contribution” +means Covered Software of a particular Contributor. +1.4. “Covered Software” +means Source Code Form to which the initial Contributor has attached the notice in Exhibit A, the Executable Form of such Source Code Form, and Modifications of such Source Code Form, in each case including portions thereof. +1.5. “Incompatible With Secondary Licenses” +means +that the initial Contributor has attached the notice described in Exhibit B to the Covered Software; or +that the Covered Software was made available under the terms of version 1.1 or earlier of the License, but not also under the terms of a Secondary License. +1.6. “Executable Form” +means any form of the work other than Source Code Form. +1.7. “Larger Work” +means a work that combines Covered Software with other material, in a separate file or files, that is not Covered Software. +1.8. “License” +means this document. +1.9. “Licensable” +means having the right to grant, to the maximum extent possible, whether at the time of the initial grant or subsequently, any and all of the rights conveyed by this License. +1.10. “Modifications” +means any of the following: +any file in Source Code Form that results from an addition to, deletion from, or modification of the contents of Covered Software; or +any new file in Source Code Form that contains any Covered Software. +1.11. “Patent Claims” of a Contributor +means any patent claim(s), including without limitation, method, process, and apparatus claims, in any patent Licensable by such Contributor that would be infringed, but for the grant of the License, by the making, using, selling, offering for sale, having made, import, or transfer of either its Contributions or its Contributor Version. +1.12. “Secondary License” +means either the GNU General Public License, Version 2.0, the GNU Lesser General Public License, Version 2.1, the GNU Affero General Public License, Version 3.0, or any later versions of those licenses. +1.13. “Source Code Form” +means the form of the work preferred for making modifications. +1.14. “You” (or “Your”) +means an individual or a legal entity exercising rights under this License. For legal entities, “You” includes any entity that controls, is controlled by, or is under common control with You. For purposes of this definition, “control” means (a) the power, direct or indirect, to cause the direction or management of such entity, whether by contract or otherwise, or (b) ownership of more than fifty percent (50%) of the outstanding shares or beneficial ownership of such entity. +2. License Grants and Conditions +2.1. Grants +Each Contributor hereby grants You a world-wide, royalty-free, non-exclusive license: +under intellectual property rights (other than patent or trademark) Licensable by such Contributor to use, reproduce, make available, modify, display, perform, distribute, and otherwise exploit its Contributions, either on an unmodified basis, with Modifications, or as part of a Larger Work; and +under Patent Claims of such Contributor to make, use, sell, offer for sale, have made, import, and otherwise transfer either its Contributions or its Contributor Version. +2.2. Effective Date +The licenses granted in Section 2.1 with respect to any Contribution become effective for each Contribution on the date the Contributor first distributes such Contribution. +2.3. Limitations on Grant Scope +The licenses granted in this Section 2 are the only rights granted under this License. No additional rights or licenses will be implied from the distribution or licensing of Covered Software under this License. Notwithstanding Section 2.1(b) above, no patent license is granted by a Contributor: +for any code that a Contributor has removed from Covered Software; or +for infringements caused by: (i) Your and any other third party’s modifications of Covered Software, or (ii) the combination of its Contributions with other software (except as part of its Contributor Version); or +under Patent Claims infringed by Covered Software in the absence of its Contributions. +This License does not grant any rights in the trademarks, service marks, or logos of any Contributor (except as may be necessary to comply with the notice requirements in Section 3.4). +2.4. Subsequent Licenses +No Contributor makes additional grants as a result of Your choice to distribute the Covered Software under a subsequent version of this License (see Section 10.2) or under the terms of a Secondary License (if permitted under the terms of Section 3.3). +2.5. Representation +Each Contributor represents that the Contributor believes its Contributions are its original creation(s) or it has sufficient rights to grant the rights to its Contributions conveyed by this License. +2.6. Fair Use +This License is not intended to limit any rights You have under applicable copyright doctrines of fair use, fair dealing, or other equivalents. +2.7. Conditions +Sections 3.1, 3.2, 3.3, and 3.4 are conditions of the licenses granted in Section 2.1. +3. Responsibilities +3.1. Distribution of Source Form +All distribution of Covered Software in Source Code Form, including any Modifications that You create or to which You contribute, must be under the terms of this License. You must inform recipients that the Source Code Form of the Covered Software is governed by the terms of this License, and how they can obtain a copy of this License. You may not attempt to alter or restrict the recipients’ rights in the Source Code Form. +3.2. Distribution of Executable Form +If You distribute Covered Software in Executable Form then: +such Covered Software must also be made available in Source Code Form, as described in Section 3.1, and You must inform recipients of the Executable Form how they can obtain a copy of such Source Code Form by reasonable means in a timely manner, at a charge no more than the cost of distribution to the recipient; and +You may distribute such Executable Form under the terms of this License, or sublicense it under different terms, provided that the license for the Executable Form does not attempt to limit or alter the recipients’ rights in the Source Code Form under this License. +3.3. Distribution of a Larger Work +You may create and distribute a Larger Work under terms of Your choice, provided that You also comply with the requirements of this License for the Covered Software. If the Larger Work is a combination of Covered Software with a work governed by one or more Secondary Licenses, and the Covered Software is not Incompatible With Secondary Licenses, this License permits You to additionally distribute such Covered Software under the terms of such Secondary License(s), so that the recipient of the Larger Work may, at their option, further distribute the Covered Software under the terms of either this License or such Secondary License(s). +3.4. Notices +You may not remove or alter the substance of any license notices (including copyright notices, patent notices, disclaimers of warranty, or limitations of liability) contained within the Source Code Form of the Covered Software, except that You may alter any license notices to the extent required to remedy known factual inaccuracies. +3.5. Application of Additional Terms +You may choose to offer, and to charge a fee for, warranty, support, indemnity or liability obligations to one or more recipients of Covered Software. However, You may do so only on Your own behalf, and not on behalf of any Contributor. You must make it absolutely clear that any such warranty, support, indemnity, or liability obligation is offered by You alone, and You hereby agree to indemnify every Contributor for any liability incurred by such Contributor as a result of warranty, support, indemnity or liability terms You offer. You may include additional disclaimers of warranty and limitations of liability specific to any jurisdiction. +4. Inability to Comply Due to Statute or Regulation +If it is impossible for You to comply with any of the terms of this License with respect to some or all of the Covered Software due to statute, judicial order, or regulation then You must: (a) comply with the terms of this License to the maximum extent possible; and (b) describe the limitations and the code they affect. Such description must be placed in a text file included with all distributions of the Covered Software under this License. Except to the extent prohibited by statute or regulation, such description must be sufficiently detailed for a recipient of ordinary skill to be able to understand it. +5. Termination +5.1. The rights granted under this License will terminate automatically if You fail to comply with any of its terms. However, if You become compliant, then the rights granted under this License from a particular Contributor are reinstated (a) provisionally, unless and until such Contributor explicitly and finally terminates Your grants, and (b) on an ongoing basis, if such Contributor fails to notify You of the non-compliance by some reasonable means prior to 60 days after You have come back into compliance. Moreover, Your grants from a particular Contributor are reinstated on an ongoing basis if such Contributor notifies You of the non-compliance by some reasonable means, this is the first time You have received notice of non-compliance with this License from such Contributor, and You become compliant prior to 30 days after Your receipt of the notice. +5.2. If You initiate litigation against any entity by asserting a patent infringement claim (excluding declaratory judgment actions, counter-claims, and cross-claims) alleging that a Contributor Version directly or indirectly infringes any patent, then the rights granted to You by any and all Contributors for the Covered Software under Section 2.1 of this License shall terminate. +5.3. In the event of termination under Sections 5.1 or 5.2 above, all end user license agreements (excluding distributors and resellers) which have been validly granted by You or Your distributors under this License prior to termination shall survive termination. +6. Disclaimer of Warranty +Covered Software is provided under this License on an “as is” basis, without warranty of any kind, either expressed, implied, or statutory, including, without limitation, warranties that the Covered Software is free of defects, merchantable, fit for a particular purpose or non-infringing. The entire risk as to the quality and performance of the Covered Software is with You. Should any Covered Software prove defective in any respect, You (not any Contributor) assume the cost of any necessary servicing, repair, or correction. This disclaimer of warranty constitutes an essential part of this License. No use of any Covered Software is authorized under this License except under this disclaimer. +7. Limitation of Liability +Under no circumstances and under no legal theory, whether tort (including negligence), contract, or otherwise, shall any Contributor, or anyone who distributes Covered Software as permitted above, be liable to You for any direct, indirect, special, incidental, or consequential damages of any character including, without limitation, damages for lost profits, loss of goodwill, work stoppage, computer failure or malfunction, or any and all other commercial damages or losses, even if such party shall have been informed of the possibility of such damages. This limitation of liability shall not apply to liability for death or personal injury resulting from such party’s negligence to the extent applicable law prohibits such limitation. Some jurisdictions do not allow the exclusion or limitation of incidental or consequential damages, so this exclusion and limitation may not apply to You. +8. Litigation +Any litigation relating to this License may be brought only in the courts of a jurisdiction where the defendant maintains its principal place of business and such litigation shall be governed by laws of that jurisdiction, without reference to its conflict-of-law provisions. Nothing in this Section shall prevent a party’s ability to bring cross-claims or counter-claims. +9. Miscellaneous +This License represents the complete agreement concerning the subject matter hereof. If any provision of this License is held to be unenforceable, such provision shall be reformed only to the extent necessary to make it enforceable. Any law or regulation which provides that the language of a contract shall be construed against the drafter shall not be used to construe this License against a Contributor. +10. Versions of the License +10.1. New Versions +Mozilla Foundation is the license steward. Except as provided in Section 10.3, no one other than the license steward has the right to modify or publish new versions of this License. Each version will be given a distinguishing version number. +10.2. Effect of New Versions +You may distribute the Covered Software under the terms of the version of the License under which You originally received the Covered Software, or under the terms of any subsequent version published by the license steward. +10.3. Modified Versions +If you create software not governed by this License, and you want to create a new license for such software, you may create and use a modified version of this License if you rename the license and remove any references to the name of the license steward (except to note that such modified license differs from this License). +10.4. Distributing Source Code Form that is Incompatible With Secondary Licenses +If You choose to distribute Source Code Form that is Incompatible With Secondary Licenses under the terms of this version of the License, the notice described in Exhibit B of this License must be attached. +Exhibit A - Source Code Form License Notice +This Source Code Form is subject to the terms of the Mozilla Public License, v. 2.0. If a copy of the MPL was not distributed with this file, You can obtain one at https://mozilla.org/MPL/2.0/. +If it is not possible or desirable to put the notice in a particular file, then You may include the notice in a location (such as a LICENSE file in a relevant directory) where a recipient would be likely to look for such a notice. +You may add additional accurate notices of copyright ownership. +Exhibit B - “Incompatible With Secondary Licenses” Notice +This Source Code Form is “Incompatible With Secondary Licenses”, as defined by the Mozilla Public License, v. 2.0. + + +Software: JSON for Modern C++ 3.6.1 +Copyright notice: +Copyright 2015 Google Inc. All rights reserved. +Copyright 2018 Google Inc. All rights reserved. +Copyright 2016 Ismael Jimenez Martinez. All rights reserved. +Copyright 2017 Roman Lebedev. All rights reserved. +Copyright (c) 2012 Two Blue Cubes Ltd. All rights reserved. +Copyright (c) 2015 Max Woolf +Copyright 2014 The Authors +Copyright (c) 2016 Nicolas Seriot +Copyright (c) 2015-2017 Niels Lohmann. +Copyright (c) 2015-2017 Niels Lohmann +Copyright (c) 2013-2019 Niels Lohmann . +Copyright (c) 2018 Vitaliy Manushkin . +Copyright (c) 2012, Erik Edlund +Copyright (c) 2013-2019 Niels Lohmann +Copyright 2013-2019 [Niels Lohmann](http:nlohmann.me) +Copyright (c) 2009 Google Inc. All rights reserved. +Copyright (C) 2009 Google Inc. + + +License: MIT License +The MIT License +Copyright (c) +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: +The above copyright notice and this permission notice shall be included in +all copies or substantial portions of the Software. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN +THE SOFTWARE. + + +Software: google/protobuf 3.8.0 +Copyright notice: +Copyright 2008, Google Inc. +Copyright 2008 Google Inc. All Rights Reserved. +Copyright [2007] Neal Norwitz +Portions Copyright [2007] Google Inc. +Copyright 2008 Google Inc. All rights reserved. +Copyright 2007 Google Inc. All Rights Reserved. +Copyright 2007, Google Inc. +Copyright 2013, Google Inc. +Copyright 2009, Google Inc. +Copyright 2006, Google Inc. +Copyright 2009 Google Inc. All rights reserved. +Copyright 2005, Google Inc. +Copyright 2008 Google Inc. +Copyright 2015, Google Inc. +Copyright (C) 1996-2015 Free Software Foundation, Inc. +Copyright (c) 2007-2010 Baptiste Lepilleur +Copyright 2007 Neal Norwitz +Copyright 2007 Google Inc. +Copyright 2008 Google Inc. All Rights Reserved. +Copyright 2014 Google Inc. All rights reserved. +Copyright 2015 Google Inc. All rights reserved. +Copyright (c) 2006, Google Inc. +Copyright 2012 Google Inc. All rights reserved. +Copyright 2005 Google Inc. +Copyright 2010 Google Inc. All Rights Reserved. +Copyright 2010, Google Inc. +Copyright 2005 Google Inc. All Rights Reserved. +Copyright 2009 Google Inc. All rights reserved. +Copyright 2009 Google Inc. All Rights Reserved. + + +License: BSD 3-Clause License +Copyright (c) , +All rights reserved. +Redistribution and use in source and binary forms, with or without modification, are permitted provided that the following conditions are met: +1. Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. +2. Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. +3. Neither the name of the copyright holder nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission. +THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. + + +Software: googletest 1.8.1 +Copyright notice: +Copyright 2009, Google Inc. +Copyright 2008, Google Inc. +Copyright 2007 Google Inc. +Copyright 2007, Google Inc. +Copyright 2013, Google Inc. +Copyright 2015, Google Inc. +Copyright 2005, Google Inc. +Copyright 2008 Google Inc. +Copyright 2006, Google Inc. +Copyright 2009 Google Inc. All Rights Reserved. +Copyright 2013 Google Inc. All Rights Reserved. +Copyright 2017 Google Inc. +Copyright 2007 Neal Norwitz +Copyright 2008 Google Inc. All Rights Reserved. +Copyright 2009 Neal Norwitz All Rights Reserved. +Copyright 2003 Google Inc. +Copyright 2009 Google Inc. +Copyright 2008 Google Inc. All Rights Reserved. +Copyright [2007] Neal Norwitz +Portions Copyright [2007] Google Inc. +Copyright 2010 Google Inc. All Rights Reserved. +Copyright 2010, Google Inc. +Copyright 2005 Google Inc. All Rights Reserved. +Copyright 2018, Google Inc. +Copyright 2003, Google Inc. +Copyright 2009 Google Inc. All rights reserved. +Copyright 2015 Google Inc. All rights reserved. +Copyright 2009 Google Inc. All rights reserved. +Copyright 2018 Google LLC. All rights reserved. +Copyright 2018, Google LLC. + + +License: BSD 3-Clause License +Please see above. + + +Software: onnx 1.6.0 +Copyright notice: +Copyright (c) ONNX Project Contributors. +Copyright (c) Facebook Inc. and Microsoft Corporation. + + +License: MIT License +Please see above. diff --git a/build.sh b/build.sh new file mode 100644 index 00000000..3cad0cad --- /dev/null +++ b/build.sh @@ -0,0 +1,172 @@ +#!/bin/bash +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +set -e +BASEPATH=$(cd "$(dirname $0)"; pwd) +OUTPUT_PATH="${BASEPATH}/output" +export BUILD_PATH="${BASEPATH}/build/" + +# print usage message +usage() +{ + echo "Usage:" + echo "sh build.sh [-j[n]] [-A] [-h] [-v] [-s] [-t] [-u] [-c]" + echo "" + echo "Options:" + echo " -h Print usage" + echo " -u Only compile ut, not execute" + echo " -s Build st" + echo " -j[n] Set the number of threads used for building GraphEngine, default is 8" + echo " -t Build and execute ut" + echo " -c Build ut with coverage tag" + echo " -v Display build command" + echo "to be continued ..." +} + +# parse and set optionss +checkopts() +{ + VERBOSE="" + THREAD_NUM=8 + ENABLE_GE_UT_ONLY_COMPILE="off" + ENABLE_GE_UT="off" + ENABLE_GE_ST="off" + ENABLE_GE_COV="off" + GE_ONLY="on" + # Process the options + while getopts 'ustchj:vA' opt + do + OPTARG=$(echo ${OPTARG} | tr '[A-Z]' '[a-z]') + case "${opt}" in + u) + ENABLE_GE_UT_ONLY_COMPILE="on" + ENABLE_GE_UT="on" + ;; + s) + ENABLE_GE_ST="on" + ;; + t) + ENABLE_GE_UT="on" + ;; + c) + ENABLE_GE_COV="on" + ;; + h) + usage + exit 0 + ;; + j) + THREAD_NUM=$OPTARG + ;; + v) + VERBOSE="VERBOSE=1" + ;; + A) + usage + ;; + *) + echo "Undefined option: ${opt}" + usage + exit 1 + esac + done +} +checkopts "$@" + +mk_dir() { + local create_dir="$1" # the target to make + + mkdir -pv "${create_dir}" + echo "created ${create_dir}" +} + +# GraphEngine build start +echo "---------------- GraphEngine build start ----------------" + +# create build path +build_graphengine() +{ + echo "create build directory and build GraphEngine"; + mk_dir "${BUILD_PATH}/graphengine" + cd "${BUILD_PATH}/graphengine" + CMAKE_ARGS="-DBUILD_PATH=$BUILD_PATH -DGE_ONLY=$GE_ONLY" + + if [[ "X$ENABLE_GE_COV" = "Xon" ]]; then + CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_GE_COV=ON" + fi + + if [[ "X$ENABLE_GE_UT" = "Xon" ]]; then + CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_GE_UT=ON" + fi + + + if [[ "X$ENABLE_GE_ST" = "Xon" ]]; then + CMAKE_ARGS="${CMAKE_ARGS} -DENABLE_GE_ST=ON" + fi + + echo "${CMAKE_ARGS}" + cmake ${CMAKE_ARGS} ../.. + make ${VERBOSE} -j${THREAD_NUM} + echo "GraphEngine build success!" +} +g++ -v +build_graphengine +echo "---------------- GraphEngine build finished ----------------" +mk_dir ${OUTPUT_PATH} +cp -rf "${BUILD_PATH}/graphengine/"*.so "${OUTPUT_PATH}" +rm -rf "${OUTPUT_PATH}/"libproto* +rm -f ${OUTPUT_PATH}/libgmock*.so +rm -f ${OUTPUT_PATH}/libgtest*.so +rm -f ${OUTPUT_PATH}/lib*_stub.so + +chmod -R 750 ${OUTPUT_PATH} +find ${OUTPUT_PATH} -name "*.so*" -print0 | xargs -0 chmod 500 + +echo "---------------- GraphEngine output package generated ----------------" + +if [[ "X$ENABLE_GE_ST" = "Xon" ]]; then + cp ${BUILD_PATH}/graphengine/tests/st/st_resnet50_train ${OUTPUT_PATH} +fi + +if [[ "X$ENABLE_GE_UT" = "Xon" || "X$ENABLE_GE_COV" = "Xon" ]]; then + cp ${BUILD_PATH}/graphengine/tests/ut/common/graph/ut_libgraph ${OUTPUT_PATH} + cp ${BUILD_PATH}/graphengine/tests/ut/ge/ut_libge_multiparts_utest ${OUTPUT_PATH} + cp ${BUILD_PATH}/graphengine/tests/ut/ge/ut_libge_distinct_load_utest ${OUTPUT_PATH} + cp ${BUILD_PATH}/graphengine/tests/ut/ge/ut_libge_others_utest ${OUTPUT_PATH} + cp ${BUILD_PATH}/graphengine/tests/ut/ge/ut_libge_kernel_utest ${OUTPUT_PATH} + + if [[ "X${ENABLE_GE_UT_ONLY_COMPILE}" != "Xon" ]]; then + export LD_LIBRARY_PATH=${D_LINK_PATH}/x86_64/:${BUILD_PATH}/graphengine/:/usr/local/HiAI/driver/lib64:/usr/local/HiAI/runtime/lib64:${LD_LIBRARY_PATH} + echo ${LD_LIBRARY_PATH} + ${OUTPUT_PATH}/ut_libgraph && + ${OUTPUT_PATH}/ut_libge_multiparts_utest && + ${OUTPUT_PATH}/ut_libge_distinct_load_utest && + ${OUTPUT_PATH}/ut_libge_others_utest && + ${OUTPUT_PATH}/ut_libge_kernel_utest + if [[ "$?" -ne 0 ]]; then + echo "!!! UT FAILED, PLEASE CHECK YOUR CHANGES !!!" + exit 1; + fi + fi + + if [[ "X$ENABLE_GE_COV" = "Xon" ]]; then + echo "Generating coverage statistics, please wait..." + cd ${BASEPATH} + rm -rf ${BASEPATH}/cov + mkdir ${BASEPATH}/cov + gcovr -r ./ --exclude 'third_party' --exclude 'build' --exclude 'tests' --exclude 'prebuild' --exclude 'inc' --print-summary --html --html-details -d -o cov/index.html + fi +fi diff --git a/cmake/external_libs/eigen.cmake b/cmake/external_libs/eigen.cmake new file mode 100644 index 00000000..b43e70b4 --- /dev/null +++ b/cmake/external_libs/eigen.cmake @@ -0,0 +1,13 @@ +set(Eigen3_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2") +set(Eigen3_CFLAGS "-D_FORTIFY_SOURCE=2 -O2") +set(Eigen3_NS "ge_") +graphengine_add_pkg(Eigen3 + VER 3.3.7 + URL https://gitlab.com/libeigen/eigen/-/archive/3.3.7/eigen-3.3.7.tar.gz + MD5 9e30f67e8531477de4117506fe44669b + CMAKE_OPTION -DBUILD_TESTING=OFF) + +find_package(Eigen3 3.3.7 REQUIRED ${GE_FIND_NO_DEFAULT_PATH}) +set_property(TARGET Eigen3::Eigen PROPERTY IMPORTED_GLOBAL TRUE) +add_library(graphengine::eigen ALIAS Eigen3::Eigen) +include_directories(${EIGEN3_INCLUDE_DIRS}) diff --git a/cmake/external_libs/gtest.cmake b/cmake/external_libs/gtest.cmake new file mode 100644 index 00000000..05f07984 --- /dev/null +++ b/cmake/external_libs/gtest.cmake @@ -0,0 +1,16 @@ +set(ge_gtest_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2") +set(ge_gtest_CFLAGS "-D_FORTIFY_SOURCE=2 -O2") + +graphengine_add_pkg(ge_gtest + VER 1.8.0 + LIBS gtest gtest_main + URL https://github.com/google/googletest/archive/release-1.8.0.tar.gz + MD5 16877098823401d1bf2ed7891d7dce36 + CMAKE_OPTION -DBUILD_TESTING=OFF -DCMAKE_POSITION_INDEPENDENT_CODE=ON -DBUILD_SHARED_LIBS=ON + -DCMAKE_MACOSX_RPATH=TRUE -Dgtest_disable_pthreads=ON) + +add_library(graphengine::gtest ALIAS ge_gtest::gtest) +add_library(graphengine::gtest_main ALIAS ge_gtest::gtest_main) +include_directories(${ge_gtest_INC}) +file(COPY ${ge_gtest_INC}/../lib/libgtest.so DESTINATION ${CMAKE_SOURCE_DIR}/build/graphengine) +file(COPY ${ge_gtest_INC}/../lib/libgtest_main.so DESTINATION ${CMAKE_SOURCE_DIR}/build/graphengine) diff --git a/cmake/external_libs/json.cmake b/cmake/external_libs/json.cmake new file mode 100644 index 00000000..4b9fa4e3 --- /dev/null +++ b/cmake/external_libs/json.cmake @@ -0,0 +1,9 @@ +set(nlohmann_json_CXXFLAGS "-D_FORTIFY_SOURCE=2 -O2") +set(nlohmann_json_CFLAGS "-D_FORTIFY_SOURCE=2 -O2") +graphengine_add_pkg(ge_nlohmann_json + VER 3.6.1 + HEAD_ONLY ./ + URL https://github.com/nlohmann/json/releases/download/v3.6.1/include.zip + MD5 0dc903888211db3a0f170304cd9f3a89) +include_directories(${ge_nlohmann_json_INC}) +add_library(graphengine::json ALIAS ge_nlohmann_json) \ No newline at end of file diff --git a/cmake/external_libs/onnx.cmake b/cmake/external_libs/onnx.cmake new file mode 100644 index 00000000..621f67c6 --- /dev/null +++ b/cmake/external_libs/onnx.cmake @@ -0,0 +1,5 @@ +graphengine_add_pkg(onnx + VER 1.6.0 + HEAD_ONLY ./ + URL https://github.com/onnx/onnx/releases/download/v1.6.0/onnx-1.6.0.tar.gz + MD5 512f2779d6215d4a36f366b6b9acdf1e) \ No newline at end of file diff --git a/cmake/external_libs/protobuf.cmake b/cmake/external_libs/protobuf.cmake new file mode 100644 index 00000000..e963fae5 --- /dev/null +++ b/cmake/external_libs/protobuf.cmake @@ -0,0 +1,57 @@ +if (NOT TARGET protobuf::libprotobuf) +graphengine_add_pkg(protobuf + VER 3.8.0 + HEAD_ONLY ./ + URL https://github.com/protocolbuffers/protobuf/archive/v3.8.0.tar.gz + MD5 3d9e32700639618a4d2d342c99d4507a) +set(protobuf_BUILD_TESTS OFF CACHE BOOL "Disahble protobuf test") +set(protobuf_BUILD_SHARED_LIBS ON CACHE BOOL "Gen shared library") +set(_ms_tmp_CMAKE_CXX_FLAGS ${CMAKE_CXX_FLAGS}) +string(REPLACE " -Wall" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") +string(REPLACE " -Werror" "" CMAKE_CXX_FLAGS "${CMAKE_CXX_FLAGS}") + +set(PROTOBUF_CMAKE_FILE "${protobuf_DIRPATH}/cmake/libprotobuf.cmake" ) +FILE(READ ${PROTOBUF_CMAKE_FILE} GE_MR_PROTOBUF_CMAKE) +STRING(REPLACE "VERSION \${protobuf_VERSION}" "VERSION 19" GE_MR_PROTOBUF_CMAKE_V19 "${GE_MR_PROTOBUF_CMAKE}" ) +FILE(WRITE ${PROTOBUF_CMAKE_FILE} "${GE_MR_PROTOBUF_CMAKE_V19}") + +add_subdirectory(${protobuf_DIRPATH}/cmake ${protobuf_DIRPATH}/build) +set(CMAKE_CXX_FLAGS ${_ms_tmp_CMAKE_CXX_FLAGS}) +endif() + +set(PROTOBUF_LIBRARY protobuf::libprotobuf) +include_directories(${protobuf_DIRPATH}/src) +add_library(ge_protobuf::protobuf ALIAS libprotobuf) + +function(ge_protobuf_generate comp c_var h_var) + if(NOT ARGN) + message(SEND_ERROR "Error: ge_protobuf_generate() called without any proto files") + return() + endif() + + set(${c_var}) + set(${h_var}) + + foreach(file ${ARGN}) + get_filename_component(abs_file ${file} ABSOLUTE) + get_filename_component(file_name ${file} NAME_WE) + get_filename_component(file_dir ${abs_file} PATH) + + list(APPEND ${c_var} "${CMAKE_BINARY_DIR}/proto/${comp}/proto/${file_name}.pb.cc") + list(APPEND ${h_var} "${CMAKE_BINARY_DIR}/proto/${comp}/proto/${file_name}.pb.h") + + add_custom_command( + OUTPUT "${CMAKE_BINARY_DIR}/proto/${comp}/proto/${file_name}.pb.cc" + "${CMAKE_BINARY_DIR}/proto/${comp}/proto/${file_name}.pb.h" + WORKING_DIRECTORY ${PROJECT_SOURCE_DIR} + COMMAND ${CMAKE_COMMAND} -E make_directory "${CMAKE_BINARY_DIR}/proto/${comp}/proto" + COMMAND protobuf::protoc -I${file_dir} --cpp_out=${CMAKE_BINARY_DIR}/proto/${comp}/proto ${abs_file} + DEPENDS protobuf::protoc ${abs_file} + COMMENT "Running C++ protocol buffer compiler on ${file}" VERBATIM ) + endforeach() + + set_source_files_properties(${${c_var}} ${${h_var}} PROPERTIES GENERATED TRUE) + set(${c_var} ${${c_var}} PARENT_SCOPE) + set(${h_var} ${${h_var}} PARENT_SCOPE) + +endfunction() diff --git a/cmake/ge_utils.cmake b/cmake/ge_utils.cmake new file mode 100644 index 00000000..8d09977a --- /dev/null +++ b/cmake/ge_utils.cmake @@ -0,0 +1,349 @@ +include(FetchContent) +set(FETCHCONTENT_QUIET OFF) + +function(graphengine_add_submodule_obj des_submodule_objs sub_dir submodule_name_obj) + + add_subdirectory(${sub_dir}) + + if(NOT TARGET ${submodule_name_obj}) + message(FATAL_ERROR "Can not find submodule '${submodule_name_obj}'. in ${CMAKE_CURRENT_LIST_FILE}") + endif() + if("$" IN_LIST ${des_submodule_objs}) + message(FATAL_ERROR "submodule '${submodule_name_obj}' added more than once. in ${CMAKE_CURRENT_LIST_FILE}") + endif() + + set(${des_submodule_objs} ${${des_submodule_objs}} $ PARENT_SCOPE) + +endfunction() + +get_filename_component(_MS_LIB_CACHE ~/.mslib REALPATH) +if (NOT EXISTS ${_MS_LIB_CACHE}) + file(MAKE_DIRECTORY ${_MS_LIB_CACHE}) +endif () +# set(FETCHCONTENT_BASE_DIR ${_MS_LIB_CACHE}) +# set(CMAKE_PREFIX_PATH ${_MS_LIB_CACHE}) +if (DEFINED ENV{MSLIBS_SERVER}) + set(LOCAL_LIBS_SERVER $ENV{MSLIBS_SERVER}) + message("LOCAL_LIBS_SERVER: ${LOCAL_LIBS_SERVER}") +endif () +if(LOCAL_LIBS_SERVER) + if (NOT ENV{no_proxy}) + set(ENV{no_proxy} "${LOCAL_LIBS_SERVER}") + else() + string(FIND $ENV{no_proxy} ${LOCAL_LIBS_SERVER} IP_POS) + if (${IP_POS} EQUAL -1) + set(ENV{no_proxy} "$ENV{no_proxy},${LOCAL_LIBS_SERVER}") + endif () + endif () +endif() + +function(__download_pkg pkg_name pkg_url pkg_md5) + + if(LOCAL_LIBS_SERVER) + get_filename_component(_URL_FILE_NAME ${pkg_url} NAME) + set(pkg_url "http://${LOCAL_LIBS_SERVER}:8081/libs/${pkg_name}/${_URL_FILE_NAME}" ${pkg_url}) + endif() + + FetchContent_Declare( + ${pkg_name} + URL ${pkg_url} + URL_HASH MD5=${pkg_md5} + ) + FetchContent_GetProperties(${pkg_name}) + message("download: ${${pkg_name}_SOURCE_DIR} , ${pkg_name} , ${pkg_url}") + if(NOT ${pkg_name}_POPULATED) + FetchContent_Populate(${pkg_name}) + set(${pkg_name}_SOURCE_DIR ${${pkg_name}_SOURCE_DIR} PARENT_SCOPE) + endif() + +endfunction() + +function(__download_pkg_with_git pkg_name pkg_url pkg_git_commit pkg_md5) + + if(LOCAL_LIBS_SERVER) + set(pkg_url "http://${LOCAL_LIBS_SERVER}:8081/libs/${pkg_name}/${pkg_git_commit}") + FetchContent_Declare( + ${pkg_name} + URL ${pkg_url} + URL_HASH MD5=${pkg_md5} + ) + else() + FetchContent_Declare( + ${pkg_name} + GIT_REPOSITORY ${pkg_url} + GIT_TAG ${pkg_git_commit}) + endif() + FetchContent_GetProperties(${pkg_name}) + message("download: ${${pkg_name}_SOURCE_DIR} , ${pkg_name} , ${pkg_url}") + if(NOT ${pkg_name}_POPULATED) + FetchContent_Populate(${pkg_name}) + set(${pkg_name}_SOURCE_DIR ${${pkg_name}_SOURCE_DIR} PARENT_SCOPE) + endif() + +endfunction() + + +function(__find_pkg_then_add_target pkg_name pkg_exe) + + unset(${pkg_name}_LIBS) + + message("_FIND:${${pkg_name}_BASE_DIR}") + + if(pkg_exe) + find_program(${pkg_exe}_EXE ${pkg_exe} PATHS ${${pkg_name}_BASE_DIR}/bin NO_DEFAULT_PATH) + if(NOT ${pkg_exe}_EXE) + return() + endif() + add_executable(${pkg_name}::${pkg_exe} IMPORTED GLOBAL) + set_target_properties(${pkg_name}::${pkg_exe} PROPERTIES + IMPORTED_LOCATION ${${pkg_exe}_EXE} + ) + message("found ${${pkg_exe}_EXE}") + endif() + + foreach(_LIB_NAME ${ARGN}) + set(_LIB_SEARCH_NAME ${_LIB_NAME}) + set(_LIB_TYPE SHARED) + if (${pkg_name}_USE_STATIC_LIBS) + set(_LIB_SEARCH_NAME "${CMAKE_STATIC_LIBRARY_PREFIX}${_LIB_NAME}${CMAKE_STATIC_LIBRARY_SUFFIX}") + set(_LIB_TYPE STATIC) + endif () + set(${_LIB_NAME}_LIB ${_LIB_NAME}_LIB-NOTFOUND) + find_library(${_LIB_NAME}_LIB ${_LIB_SEARCH_NAME} PATHS ${${pkg_name}_BASE_DIR}/lib NO_DEFAULT_PATH) + if(NOT ${_LIB_NAME}_LIB) + return() + endif() + add_library(${pkg_name}::${_LIB_NAME} ${_LIB_TYPE} IMPORTED GLOBAL) + set_target_properties(${pkg_name}::${_LIB_NAME} PROPERTIES + INTERFACE_INCLUDE_DIRECTORIES "${${pkg_name}_BASE_DIR}/include" + IMPORTED_LOCATION ${${_LIB_NAME}_LIB} + ) + list(APPEND ${pkg_name}_LIBS ${pkg_name}::${_LIB_NAME}) + message("found ${${_LIB_NAME}_LIB}") + STRING( REGEX REPLACE "(.+)/(.+)" "\\1" LIBPATH ${${_LIB_NAME}_LIB}) + set(${pkg_name}_LIBPATH ${LIBPATH} CACHE STRING INTERNAL) + endforeach(_LIB_NAME) + + set(${pkg_name}_LIBS ${${pkg_name}_LIBS} PARENT_SCOPE) +endfunction() + +function(__exec_cmd) + set(options ) + set(oneValueArgs WORKING_DIRECTORY) + set(multiValueArgs COMMAND) + + cmake_parse_arguments(EXEC "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} ) + + execute_process(COMMAND ${EXEC_COMMAND} + WORKING_DIRECTORY ${EXEC_WORKING_DIRECTORY} + RESULT_VARIABLE RESULT) + if(NOT RESULT EQUAL "0") + message(FATAL_ERROR "error! when ${EXEC_COMMAND} in ${EXEC_WORKING_DIRECTORY}") + endif() +endfunction() + +function(__check_patches pkg_patches) + # check patches + if (PKG_PATCHES) + file(TOUCH ${_MS_LIB_CACHE}/${pkg_name}_patch.md5) + file(READ ${_MS_LIB_CACHE}/${pkg_name}_patch.md5 ${pkg_name}_PATCHES_MD5) + + message("patches md5:${${pkg_name}_PATCHES_MD5}") + + set(${pkg_name}_PATCHES_NEW_MD5 ) + foreach(_PATCH ${PKG_PATCHES}) + file(MD5 ${_PATCH} _PF_MD5) + set(${pkg_name}_PATCHES_NEW_MD5 "${${pkg_name}_PATCHES_NEW_MD5},${_PF_MD5}") + endforeach(_PATCH) + + if (NOT ${pkg_name}_PATCHES_MD5 STREQUAL ${pkg_name}_PATCHES_NEW_MD5) + set(${pkg_name}_PATCHES ${PKG_PATCHES}) + file(REMOVE_RECURSE "${_MS_LIB_CACHE}/${pkg_name}-subbuild") + file(WRITE ${_MS_LIB_CACHE}/${pkg_name}_patch.md5 ${${pkg_name}_PATCHES_NEW_MD5}) + message("patches changed : ${${pkg_name}_PATCHES_NEW_MD5}") + endif () + endif () +endfunction() + +set(GE_FIND_NO_DEFAULT_PATH NO_CMAKE_PATH NO_CMAKE_ENVIRONMENT_PATH NO_SYSTEM_ENVIRONMENT_PATH + NO_CMAKE_BUILDS_PATH NO_CMAKE_PACKAGE_REGISTRY NO_CMAKE_SYSTEM_PATH + NO_CMAKE_SYSTEM_PACKAGE_REGISTRY) +set(GE_FIND_NO_DEFAULT_PATH ${GE_FIND_NO_DEFAULT_PATH} PARENT_SCOPE) + +function(graphengine_add_pkg pkg_name ) + set(options ) + set(oneValueArgs URL MD5 GIT_REPOSITORY GIT_TAG VER EXE DIR HEAD_ONLY) + set(multiValueArgs CMAKE_OPTION LIBS PRE_CONFIGURE_COMMAND CONFIGURE_COMMAND BUILD_OPTION INSTALL_INCS INSTALL_LIBS PATCHES) + cmake_parse_arguments(PKG "${options}" "${oneValueArgs}" "${multiValueArgs}" ${ARGN} ) + + set(__FIND_PKG_NAME ${pkg_name}) + string(TOLOWER ${pkg_name} pkg_name) + message("pkg name:${__FIND_PKG_NAME},${pkg_name}") + + set(${pkg_name}_PATCHES_HASH ) + foreach(_PATCH ${PKG_PATCHES}) + file(MD5 ${_PATCH} _PF_MD5) + set(${pkg_name}_PATCHES_HASH "${${pkg_name}_PATCHES_HASH},${_PF_MD5}") + endforeach(_PATCH) + + # check options + set(${pkg_name}_CONFIG_TXT + "${CMAKE_CXX_COMPILER_VERSION}-${CMAKE_C_COMPILER_VERSION} + ${ARGN} - ${${pkg_name}_USE_STATIC_LIBS}- ${${pkg_name}_PATCHES_HASH} + ${${pkg_name}_CXXFLAGS}--${${pkg_name}_CFLAGS}--${${pkg_name}_LDFLAGS}") + string(REPLACE ";" "-" ${pkg_name}_CONFIG_TXT ${${pkg_name}_CONFIG_TXT}) + string(MD5 ${pkg_name}_CONFIG_HASH ${${pkg_name}_CONFIG_TXT}) + + message("${pkg_name} config hash: ${${pkg_name}_CONFIG_HASH}") + + set(${pkg_name}_BASE_DIR ${_MS_LIB_CACHE}/${pkg_name}_${${pkg_name}_CONFIG_HASH}) + set(${pkg_name}_DIRPATH ${${pkg_name}_BASE_DIR} CACHE STRING INTERNAL) + + if(EXISTS ${${pkg_name}_BASE_DIR}/options.txt AND PKG_HEAD_ONLY) + set(${pkg_name}_INC ${${pkg_name}_BASE_DIR}/${PKG_HEAD_ONLY} PARENT_SCOPE) + add_library(${pkg_name} INTERFACE) + target_include_directories(${pkg_name} INTERFACE ${${pkg_name}_INC}) + return() + endif () + + if(NOT PKG_EXE) + set(PKG_EXE 0) + endif() + + set(${__FIND_PKG_NAME}_ROOT ${${pkg_name}_BASE_DIR}) + set(${__FIND_PKG_NAME}_ROOT ${${pkg_name}_BASE_DIR} PARENT_SCOPE) + + if (PKG_LIBS) + __find_pkg_then_add_target(${pkg_name} ${PKG_EXE} ${PKG_LIBS}) + if(${pkg_name}_LIBS) + set(${pkg_name}_INC ${${pkg_name}_BASE_DIR}/include PARENT_SCOPE) + message("Found libs: ${${pkg_name}_LIBS}") + return() + endif() + elseif(NOT PKG_HEAD_ONLY) + find_package(${__FIND_PKG_NAME} ${PKG_VER} NO_CMAKE_SYSTEM_PATH NO_SYSTEM_ENVIRONMENT_PATH) + if (${__FIND_PKG_NAME}_FOUND) + set(${pkg_name}_INC ${${pkg_name}_BASE_DIR}/include PARENT_SCOPE) + message("Found pkg: ${__FIND_PKG_NAME}") + return() + endif () + endif () + + if (NOT PKG_DIR) + if (PKG_GIT_REPOSITORY) + __download_pkg_with_git(${pkg_name} ${PKG_GIT_REPOSITORY} ${PKG_GIT_TAG} ${PKG_MD5}) + else() + __download_pkg(${pkg_name} ${PKG_URL} ${PKG_MD5}) + endif() + else() + set(${pkg_name}_SOURCE_DIR ${PKG_DIR}) + endif () + file(WRITE ${${pkg_name}_BASE_DIR}/options.txt ${${pkg_name}_CONFIG_TXT}) + message("${pkg_name}_SOURCE_DIR : ${${pkg_name}_SOURCE_DIR}") + + foreach(_PATCH_FILE ${PKG_PATCHES}) + message("patching ${${pkg_name}_SOURCE_DIR} -p1 < ${_PATCH_FILE}") + execute_process(COMMAND patch -p1 INPUT_FILE ${_PATCH_FILE} + WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR} + RESULT_VARIABLE Result) + if(NOT Result EQUAL "0") + message(FATAL_ERROR "Failed patch: ${_PATCH_FILE}") + endif() + endforeach(_PATCH_FILE) + + file(LOCK ${${pkg_name}_BASE_DIR} DIRECTORY GUARD FUNCTION RESULT_VARIABLE ${pkg_name}_LOCK_RET TIMEOUT 600) + if(NOT ${pkg_name}_LOCK_RET EQUAL "0") + message(FATAL_ERROR "error! when try lock ${${pkg_name}_BASE_DIR} : ${${pkg_name}_LOCK_RET}") + endif() + + if(${pkg_name}_SOURCE_DIR) + if (PKG_HEAD_ONLY) + file(GLOB ${pkg_name}_SOURCE_SUBDIRS ${${pkg_name}_SOURCE_DIR}/*) + file(COPY ${${pkg_name}_SOURCE_SUBDIRS} DESTINATION ${${pkg_name}_BASE_DIR}) + set(${pkg_name}_INC ${${pkg_name}_BASE_DIR}/${PKG_HEAD_ONLY} PARENT_SCOPE) + add_library(${pkg_name} INTERFACE) + target_include_directories(${pkg_name} INTERFACE ${${pkg_name}_INC}) + + elseif (PKG_CMAKE_OPTION) + # in cmake + file(MAKE_DIRECTORY ${${pkg_name}_SOURCE_DIR}/_build) + if (${pkg_name}_CFLAGS) + set(${pkg_name}_CMAKE_CFLAGS "-DCMAKE_C_FLAGS=${${pkg_name}_CFLAGS}") + endif () + if (${pkg_name}_CXXFLAGS) + set(${pkg_name}_CMAKE_CXXFLAGS "-DCMAKE_CXX_FLAGS=${${pkg_name}_CXXFLAGS}") + endif () + + if (${pkg_name}_LDFLAGS) + if (${pkg_name}_USE_STATIC_LIBS) + #set(${pkg_name}_CMAKE_LDFLAGS "-DCMAKE_STATIC_LINKER_FLAGS=${${pkg_name}_LDFLAGS}") + else() + set(${pkg_name}_CMAKE_LDFLAGS "-DCMAKE_SHARED_LINKER_FLAGS=${${pkg_name}_LDFLAGS}") + endif () + endif () + + __exec_cmd(COMMAND ${CMAKE_COMMAND} ${PKG_CMAKE_OPTION} -G ${CMAKE_GENERATOR} + ${${pkg_name}_CMAKE_CFLAGS} ${${pkg_name}_CMAKE_CXXFLAGS} ${${pkg_name}_CMAKE_LDFLAGS} + -DCMAKE_INSTALL_PREFIX=${${pkg_name}_BASE_DIR} .. + WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR}/_build) + + __exec_cmd(COMMAND ${CMAKE_COMMAND} --build . --target install -- -j8 + WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR}/_build) + + else() + if (${pkg_name}_CFLAGS) + set(${pkg_name}_MAKE_CFLAGS "CFLAGS=${${pkg_name}_CFLAGS}") + endif () + if (${pkg_name}_CXXFLAGS) + set(${pkg_name}_MAKE_CXXFLAGS "CXXFLAGS=${${pkg_name}_CXXFLAGS}") + endif () + if (${pkg_name}_LDFLAGS) + set(${pkg_name}_MAKE_LDFLAGS "LDFLAGS=${${pkg_name}_LDFLAGS}") + endif () + # in configure && make + if (PKG_PRE_CONFIGURE_COMMAND) + __exec_cmd(COMMAND ${PKG_PRE_CONFIGURE_COMMAND} + WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR}) + endif () + + if (PKG_CONFIGURE_COMMAND) + __exec_cmd(COMMAND ${PKG_CONFIGURE_COMMAND} + ${${pkg_name}_MAKE_CFLAGS} ${${pkg_name}_MAKE_CXXFLAGS} ${${pkg_name}_MAKE_LDFLAGS} + --prefix=${${pkg_name}_BASE_DIR} + WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR}) + endif () + set(${pkg_name}_BUILD_OPTION ${PKG_BUILD_OPTION}) + if (NOT PKG_CONFIGURE_COMMAND) + set(${pkg_name}_BUILD_OPTION ${${pkg_name}_BUILD_OPTION} + ${${pkg_name}_MAKE_CFLAGS} ${${pkg_name}_MAKE_CXXFLAGS} ${${pkg_name}_MAKE_LDFLAGS}) + endif () + # build + __exec_cmd(COMMAND ${CMAKE_MAKE_PROGRAM} ${${pkg_name}_BUILD_OPTION} -j8 + WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR}) + + if (PKG_INSTALL_INCS OR PKG_INSTALL_LIBS) + file(GLOB ${pkg_name}_INSTALL_INCS ${${pkg_name}_SOURCE_DIR}/${PKG_INSTALL_INCS}) + file(GLOB ${pkg_name}_INSTALL_LIBS ${${pkg_name}_SOURCE_DIR}/${PKG_INSTALL_LIBS}) + file(COPY ${${pkg_name}_INSTALL_INCS} DESTINATION ${${pkg_name}_BASE_DIR}/include) + file(COPY ${${pkg_name}_INSTALL_LIBS} DESTINATION ${${pkg_name}_BASE_DIR}/lib) + else() + __exec_cmd(COMMAND ${CMAKE_MAKE_PROGRAM} install WORKING_DIRECTORY ${${pkg_name}_SOURCE_DIR}) + endif () + endif () + endif() + + if (PKG_LIBS) + __find_pkg_then_add_target(${pkg_name} ${PKG_EXE} ${PKG_LIBS}) + set(${pkg_name}_INC ${${pkg_name}_BASE_DIR}/include PARENT_SCOPE) + if(NOT ${pkg_name}_LIBS) + message(FATAL_ERROR "Can not find pkg: ${pkg_name}") + endif() + else() + find_package(${__FIND_PKG_NAME} ${PKG_VER} QUIET) + if (${__FIND_PKG_NAME}_FOUND) + set(${pkg_name}_INC ${${pkg_name}_BASE_DIR}/include PARENT_SCOPE) + message("Found pkg: ${${__FIND_PKG_NAME}_LIBRARIES}") + return() + endif () + endif () +endfunction() diff --git a/docs/GE_Architecture.png b/docs/GE_Architecture.png new file mode 100644 index 00000000..5c194474 Binary files /dev/null and b/docs/GE_Architecture.png differ diff --git a/docs/README.md b/docs/README.md new file mode 100644 index 00000000..f27ae3f8 --- /dev/null +++ b/docs/README.md @@ -0,0 +1,3 @@ +# GraphEngine Documentation + +The GraphEngine documentation is in the [MindSpore Docs](https://gitee.com/mindspore/docs) repository. diff --git a/inc/common/blocking_queue.h b/inc/common/blocking_queue.h new file mode 100644 index 00000000..d91abd27 --- /dev/null +++ b/inc/common/blocking_queue.h @@ -0,0 +1,121 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_COMMON_BLOCKING_QUEUE_H_ +#define INC_COMMON_BLOCKING_QUEUE_H_ + +#include + +#include +#include +#include + +static const int kDefaultMaxQueueSize = 2048; + +template +class BlockingQueue { + public: + explicit BlockingQueue(uint32_t max_size = kDefaultMaxQueueSize) : max_size_(max_size), is_stoped_(false) {} + + ~BlockingQueue() {} + + bool Pop(T &item) { + std::unique_lock lock(mutex_); + + while (queue_.empty() && !is_stoped_) { + empty_cond_.wait(lock); + } + + if (is_stoped_) { + return false; + } + + item = queue_.front(); + queue_.pop_front(); + + full_cond_.notify_one(); + + return true; + } + + bool Push(const T &item, bool is_wait = true) { + std::unique_lock lock(mutex_); + + while (queue_.size() >= max_size_ && !is_stoped_) { + if (!is_wait) { + return false; + } + full_cond_.wait(lock); + } + + if (is_stoped_) { + return false; + } + + queue_.push_back(item); + + empty_cond_.notify_one(); + + return true; + } + + void Stop() { + { + std::unique_lock lock(mutex_); + is_stoped_ = true; + } + + full_cond_.notify_all(); + empty_cond_.notify_all(); + } + + void Restart() { + std::unique_lock lock(mutex_); + is_stoped_ = false; + } + + // if the queue stop , the function to release the unprocessed items will be call + std::list GetRemainItems() { + std::unique_lock lock(mutex_); + + if (!is_stoped_) { + return std::list(); + } + + return queue_; + } + + bool IsFull() { + std::unique_lock lock(mutex_); + return queue_.size() >= max_size_; + } + + void Clear() { + std::unique_lock lock(mutex_); + queue_.clear(); + } + + private: + std::list queue_; + std::mutex mutex_; + std::condition_variable empty_cond_; + std::condition_variable full_cond_; + uint32_t max_size_; + + bool is_stoped_; +}; + +#endif // INC_COMMON_BLOCKING_QUEUE_H_ diff --git a/inc/common/dynamic_aipp.h b/inc/common/dynamic_aipp.h new file mode 100755 index 00000000..4873dbec --- /dev/null +++ b/inc/common/dynamic_aipp.h @@ -0,0 +1,104 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_COMMON_DYNAMIC_AIPP_H_ +#define INC_COMMON_DYNAMIC_AIPP_H_ + +#include + +/// +/// @ingroup dnn +/// @brief struct define of dynamic aipp batch parameter. +/// +typedef struct tagAippDynamicBatchPara { + int8_t cropSwitch; // crop switch + int8_t scfSwitch; // resize switch + int8_t paddingSwitch; // 0: unable padding + // 1: padding config value,sfr_filling_hblank_ch0 ~ sfr_filling_hblank_ch2 + // 2: padding source picture data, single row/collumn copy + // 3: padding source picture data, block copy + // 4: padding source picture data, mirror copy + int8_t rotateSwitch; // rotate switch,0: non-ratate, + // 1: ratate 90° clockwise,2: ratate 180° clockwise,3: ratate 270° clockwise + int8_t reserve[4]; + int32_t cropStartPosW; // the start horizontal position of cropping + int32_t cropStartPosH; // the start vertical position of cropping + int32_t cropSizeW; // crop width + int32_t cropSizeH; // crop height + + int32_t scfInputSizeW; // input width of scf + int32_t scfInputSizeH; // input height of scf + int32_t scfOutputSizeW; // output width of scf + int32_t scfOutputSizeH; // output height of scf + + int32_t paddingSizeTop; // top padding size + int32_t paddingSizeBottom; // bottom padding size + int32_t paddingSizeLeft; // left padding size + int32_t paddingSizeRight; // right padding size + + int16_t dtcPixelMeanChn0; // mean value of channel 0 + int16_t dtcPixelMeanChn1; // mean value of channel 1 + int16_t dtcPixelMeanChn2; // mean value of channel 2 + int16_t dtcPixelMeanChn3; // mean value of channel 3 + + uint16_t dtcPixelMinChn0; // min value of channel 0 + uint16_t dtcPixelMinChn1; // min value of channel 1 + uint16_t dtcPixelMinChn2; // min value of channel 2 + uint16_t dtcPixelMinChn3; // min value of channel 3 + uint16_t dtcPixelVarReciChn0; // sfr_dtc_pixel_variance_reci_ch0 + uint16_t dtcPixelVarReciChn1; // sfr_dtc_pixel_variance_reci_ch1 + uint16_t dtcPixelVarReciChn2; // sfr_dtc_pixel_variance_reci_ch2 + uint16_t dtcPixelVarReciChn3; // sfr_dtc_pixel_variance_reci_ch3 + + int8_t reserve1[16]; // 32B assign, for ub copy +} kAippDynamicBatchPara; + +/// +/// @ingroup dnn +/// @brief struct definition of dynamic aipp parameter. lite:64+96*batchNum byte ; tiny:64+64*batchNum byte +/// +typedef struct tagAippDynamicPara { + uint8_t inputFormat; // input format:YUV420SP_U8/XRGB8888_U8/RGB888_U8 + int8_t cscSwitch; // csc switch + int8_t rbuvSwapSwitch; // rb/ub swap switch + int8_t axSwapSwitch; // RGBA->ARGB, YUVA->AYUV swap switch + int8_t batchNum; // batch parameter number + int8_t reserve1[3]; + int32_t srcImageSizeW; // source image width + int32_t srcImageSizeH; // source image height + int16_t cscMatrixR0C0; // csc_matrix_r0_c0 + int16_t cscMatrixR0C1; // csc_matrix_r0_c1 + int16_t cscMatrixR0C2; // csc_matrix_r0_c2 + int16_t cscMatrixR1C0; // csc_matrix_r1_c0 + int16_t cscMatrixR1C1; // csc_matrix_r1_c1 + int16_t cscMatrixR1C2; // csc_matrix_r1_c2 + int16_t cscMatrixR2C0; // csc_matrix_r2_c0 + int16_t cscMatrixR2C1; // csc_matrix_r2_c1 + int16_t cscMatrixR2C2; // csc_matrix_r2_c2 + int16_t reserve2[3]; + uint8_t cscOutputBiasR0; // output Bias for RGB to YUV, element of row 0, unsigned number + uint8_t cscOutputBiasR1; // output Bias for RGB to YUV, element of row 1, unsigned number + uint8_t cscOutputBiasR2; // output Bias for RGB to YUV, element of row 2, unsigned number + uint8_t cscInputBiasR0; // input Bias for YUV to RGB, element of row 0, unsigned number + uint8_t cscInputBiasR1; // input Bias for YUV to RGB, element of row 1, unsigned number + uint8_t cscInputBiasR2; // input Bias for YUV to RGB, element of row 2, unsigned number + uint8_t reserve3[2]; + int8_t reserve4[16]; // 32B assign, for ub copy + + kAippDynamicBatchPara aippBatchPara; // allow transfer several batch para. +} kAippDynamicPara; + +#endif // INC_COMMON_DYNAMIC_AIPP_H_ diff --git a/inc/common/npu_error_define.h b/inc/common/npu_error_define.h new file mode 100755 index 00000000..249ea673 --- /dev/null +++ b/inc/common/npu_error_define.h @@ -0,0 +1,94 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_COMMON_NPU_ERROR_DEFINE_H_ +#define INC_COMMON_NPU_ERROR_DEFINE_H_ + +typedef enum tagHiAiNpuLocal { + HIAI_HOST = 1, + HIAI_DEVICE = 2, +} HiAiNpuLocal; + +typedef enum tagHiAiNpuCodeType { + ERROR_CODE = 1, + EXCEPTION_CODE = 2, +} HiAiNpuCodeType; + +typedef enum tagHiAiNpuErrLevel { + NONE_LEVEL = 0, + SUGGESTION_LEVEL = 1, + NORMAL_LEVEL = 2, + SERIOUS_LEVEL = 3, + CRITICAL_ERROR = 4, +} HiAiNpuErrLevel; + +typedef enum tagHiAiNpuModuleId { + HIAI_DRIVER = 1, + HIAI_CTRLCPU = 2, + HIAI_TS = 3, + HIAI_RUNTIME = 4, + HIAI_AICPU = 5, + HIAI_CCE = 6, + HIAI_TVM = 7, + HIAI_FRAMEWORK = 8, + HiAI_ENGINE = 9, + HIAI_DVPP = 10, + HIAI_AIPP = 11, + HIAI_LOWPOWER = 12, + HIAI_MDC = 13, + HIAI_COMPILE = 14, + HIAI_TOOLCHIAN = 15, + HIAI_ALG = 16, + HIAI_PROFILING = 17, + HIAI_HCCL = 18, + HIAI_SIMULATION = 19, + HIAI_BIOS = 20, + HIAI_SEC = 21, + HIAI_TINY = 22, + HIAI_DP = 23, +} HiAiNpuModuleId; + +// bit 31-bit30 to be hiai local +#define HIAI_NPULOCAL_MASK 0xC0000000 +#define SHIFT_LOCAL_MASK 30 +#define HIAI_NPULOCAL_VAL_MASK 0x3 +// bit 29 -bit28 to be hiai aicpu code type +#define HIAI_CODE_TYPE_MASK 0x30000000 +#define SHIFT_CODE_MASK 28 +#define HIAI_CODE_TYPE_VAL_MASK 0x3 +// bit 27 -bit25 to be hiai error level +#define HIAI_ERROR_LEVEL_MASK 0x0E000000 +#define SHIFT_ERROR_LVL_MASK 25 +#define HIAI_ERROR_LEVEL_VAL_MASK 0x7 +// bit 24 -bit17 to be hiai mod +#define HIAI_MODE_ID_MASK 0x01FE0000 +#define SHIFT_MODE_MASK 17 +#define HIAI_MODE_ID_VAL_MASK 0xFF + +#define HIAI_NPU_LOC_BIT(a) \ + (HIAI_NPULOCAL_MASK & ((unsigned int)((HiAiNpuLocal)(a)) & HIAI_NPULOCAL_VAL_MASK) << SHIFT_LOCAL_MASK) +#define HIAI_NPU_CODE_TYPE_BIT(a) \ + (HIAI_CODE_TYPE_MASK & ((unsigned int)((HiAiNpuCodeType)(a)) & HIAI_CODE_TYPE_VAL_MASK) << SHIFT_CODE_MASK) +#define HIAI_NPU_ERR_LEV_BIT(a) \ + (HIAI_ERROR_LEVEL_MASK & ((unsigned int)((HiAiNpuErrLevel)(a)) & HIAI_ERROR_LEVEL_VAL_MASK) << SHIFT_ERROR_LVL_MASK) +#define HIAI_NPU_MOD_ID_BIT(a) \ + (HIAI_MODE_ID_MASK & ((unsigned int)((HiAiNpuModuleId)(a)) & HIAI_MODE_ID_VAL_MASK) << SHIFT_MODE_MASK) + +#define HIAI_NPU_ERR_CODE_HEAD(npuLocal, codeType, errLevel, moduleId) \ + (HIAI_NPU_LOC_BIT(npuLocal) + HIAI_NPU_CODE_TYPE_BIT(codeType) + HIAI_NPU_ERR_LEV_BIT(errLevel) + \ + HIAI_NPU_MOD_ID_BIT(moduleId)) + +#endif // INC_COMMON_NPU_ERROR_DEFINE_H_ diff --git a/inc/common/opskernel/ge_task_info.h b/inc/common/opskernel/ge_task_info.h new file mode 100755 index 00000000..74eee458 --- /dev/null +++ b/inc/common/opskernel/ge_task_info.h @@ -0,0 +1,55 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_COMMON_OPSKERNEL_GE_TASK_INFO_H_ +#define INC_COMMON_OPSKERNEL_GE_TASK_INFO_H_ + +#include +#include + +#include +#include + +using std::string; +namespace ge { +// DAVINCI_TRAIN/DAVINCI_CLOUD is not needed when GETaskKernelHcclInfo needed +struct GETaskKernelHcclInfo { + string hccl_type; + void *inputDataAddr; + void *outputDataAddr; + void *workSpaceAddr; + int32_t count; + int32_t dataType; + int32_t opType; + int64_t rootId; + uint64_t workSpaceMemSize; + std::vector hcclStreamList; +}; + +struct GETaskInfo { + uint32_t id; + uint16_t type; + uint32_t streamID; + void *stream; // rtKernelLaunch input argument + void *event; + void *privateDef; + uint32_t privateDefLen; + void *opsKernelStorePtr; + + GETaskKernelHcclInfo kernelHcclInfo; +}; +} // namespace ge +#endif // INC_COMMON_OPSKERNEL_GE_TASK_INFO_H_ diff --git a/inc/common/opskernel/ops_kernel_info_store.h b/inc/common/opskernel/ops_kernel_info_store.h new file mode 100644 index 00000000..df159998 --- /dev/null +++ b/inc/common/opskernel/ops_kernel_info_store.h @@ -0,0 +1,87 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_COMMON_OPSKERNEL_OPS_KERNEL_INFO_STORE_H_ +#define INC_COMMON_OPSKERNEL_OPS_KERNEL_INFO_STORE_H_ + +#include +#include +#include +#include + +#include "./ge_task_info.h" +#include "./ops_kernel_info_types.h" +#include "cce/aicpu_engine_struct.h" +#include "cce/fwk_adpt_struct.h" +#include "common/ge_inner_error_codes.h" +#include "graph/node.h" +#include "proto/task.pb.h" + +using std::map; +using std::string; +using std::to_string; +using std::vector; + +namespace ge { +class OpDesc; + +class OpsKernelInfoStore { + public: + OpsKernelInfoStore() {} + + virtual ~OpsKernelInfoStore() {} + + // initialize opsKernelInfoStore + virtual Status Initialize(const map &options) = 0; + + // finalize opsKernelInfoStore + virtual Status Finalize() = 0; + + virtual Status CreateSession(const std::map &session_options) { return SUCCESS; } + + virtual Status DestroySession(const std::map &session_options) { return SUCCESS; } + + // get all opsKernelInfo + virtual void GetAllOpsKernelInfo(map &infos) const = 0; + + // check whether opsKernelInfoStore is supported based on the operator attribute + virtual bool CheckSupported(const OpDescPtr &opDescPtr, std::string &un_supported_reason) const = 0; + + virtual bool CheckAccuracySupported(const OpDescPtr &opDescPtr, std::string &un_supported_reason, + bool realQuery = false) const { + return CheckSupported(opDescPtr, un_supported_reason); + } + + // requirement of memory allocation + virtual Status CalcOpRunningParam(Node &node) = 0; + + // generate task for op + virtual Status GenerateTask(const Node &node, RunContext &context, std::vector &tasks) = 0; + + // only call fe engine interface to compile single op + virtual Status CompileOp(vector &node_vec) { return SUCCESS; } + + // load task for op + virtual Status LoadTask(GETaskInfo &task) { return SUCCESS; } + + // only to call aicpu interface for generating task struct + virtual Status GenSingleOpRunTask(const NodePtr &node, STR_FWK_OP_KERNEL &task, string &task_info) { return SUCCESS; } + + // only to call aicpu interface for generating task struct + virtual Status GenMemCopyTask(uint64_t count, STR_FWK_OP_KERNEL &task, string &task_info) { return SUCCESS; } +}; +} // namespace ge +#endif // INC_COMMON_OPSKERNEL_OPS_KERNEL_INFO_STORE_H_ diff --git a/inc/common/opskernel/ops_kernel_info_types.h b/inc/common/opskernel/ops_kernel_info_types.h new file mode 100644 index 00000000..75e437af --- /dev/null +++ b/inc/common/opskernel/ops_kernel_info_types.h @@ -0,0 +1,63 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_COMMON_OPSKERNEL_OPS_KERNEL_INFO_TYPES_H_ +#define INC_COMMON_OPSKERNEL_OPS_KERNEL_INFO_TYPES_H_ + +#include + +#include +#include + +#include "graph/buffer.h" +#include "runtime/rt_model.h" + +using std::string; + +namespace ge { +struct RunContext { + rtModel_t model; + rtStream_t stream; + uint64_t sessionId; + uint64_t dataMemSize; + uint8_t *dataMemBase; + uint64_t weightMemSize; + uint8_t *weightMemBase; + ge::Buffer weightsBuffer; + std::vector graphStreamList; // all streams of graph which are sort by ge stream id(0,1,...) + std::vector graphEventList; // all events of graph which are sort by ge event id(0,1,...) +}; + +struct Task { + uint32_t id; + uint16_t type; + void *stream; + void *event; +}; + +struct OpInfo { + string engine; // engine name + string opKernelLib; // opsKernelStore name + int computeCost; // compute cost + bool flagPartial; // whether to support related shape + bool flagAsync; // Whether to support asynchronous + bool isAtomic; // whether to support atomic addr clean + string opFileName; // op file name + string opFuncName; // op function name +}; +} // namespace ge + +#endif // INC_COMMON_OPSKERNEL_OPS_KERNEL_INFO_TYPES_H_ diff --git a/inc/common/optimizer/graph_optimizer.h b/inc/common/optimizer/graph_optimizer.h new file mode 100644 index 00000000..cc972425 --- /dev/null +++ b/inc/common/optimizer/graph_optimizer.h @@ -0,0 +1,61 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_COMMON_OPTIMIZER_GRAPH_OPTIMIZER_H_ +#define INC_COMMON_OPTIMIZER_GRAPH_OPTIMIZER_H_ + +#include +#include + +#include "./graph_optimizer_types.h" +#include "common/ge_inner_error_codes.h" +#include "common/opskernel/ops_kernel_info_types.h" +#include "graph/compute_graph.h" + +using std::map; +using std::string; + +namespace ge { +class GraphOptimizer { + public: + virtual ~GraphOptimizer() {} + + // initialize graphOptimizer + virtual Status Initialize(const map &options) = 0; + + // close graphOptimizer + virtual Status Finalize() = 0; + + // optimize original graph for FE quant optimization + virtual Status OptimizeGraphPrepare(ComputeGraph &graph) { return SUCCESS; } + + // optimize original graph used in the graph preparation stage + virtual Status OptimizeOriginalGraph(ComputeGraph &graph) = 0; + + // optimize fused graph + virtual Status OptimizeFusedGraph(ComputeGraph &graph) = 0; + + // optimize the whole graph which will be used after graph merged + virtual Status OptimizeWholeGraph(ComputeGraph &graph) = 0; + + // get attributes of graph optimizer + virtual Status GetAttributes(GraphOptimizerAttribute &attrs) const = 0; + + // optimize streamed Graph + virtual Status OptimizeStreamGraph(ComputeGraph &graph, const RunContext &context) { return SUCCESS; } +}; +} // namespace ge +#endif // INC_COMMON_OPTIMIZER_GRAPH_OPTIMIZER_H_ diff --git a/inc/common/optimizer/graph_optimizer_types.h b/inc/common/optimizer/graph_optimizer_types.h new file mode 100755 index 00000000..5c760c0b --- /dev/null +++ b/inc/common/optimizer/graph_optimizer_types.h @@ -0,0 +1,36 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_COMMON_OPTIMIZER_GRAPH_OPTIMIZER_TYPES_H_ +#define INC_COMMON_OPTIMIZER_GRAPH_OPTIMIZER_TYPES_H_ + +#include +#include + +using std::string; +namespace ge { +enum OPTIMIZER_SCOPE { + UNIT = 0, + ENGINE, +}; + +struct GraphOptimizerAttribute { + string engineName; + OPTIMIZER_SCOPE scope; +}; +} // namespace ge + +#endif // INC_COMMON_OPTIMIZER_GRAPH_OPTIMIZER_TYPES_H_ diff --git a/inc/external/ge/ge_api.h b/inc/external/ge/ge_api.h new file mode 100644 index 00000000..6e49df97 --- /dev/null +++ b/inc/external/ge/ge_api.h @@ -0,0 +1,102 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GE_GE_API_H_ +#define INC_EXTERNAL_GE_GE_API_H_ + +#include +#include +#include + +#include "ge/ge_api_error_codes.h" +#include "ge/ge_api_types.h" +#include "graph/graph.h" +#include "graph/tensor.h" + +namespace ge { +typedef uint32_t (*pCallBackFunc)(uint32_t graph_id, const std::map ¶ms_list); + +// Initialize GE +Status GEInitialize(const std::map &options); + +// Finalize GE, release all resources +Status GEFinalize(); + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Session { + public: + explicit Session(const std::map &options); + + ~Session(); + + /// + /// @ingroup client + /// @brief add a graph with a specific graphId + /// @param [in] graphId graph id + /// @return Status result of function + /// + Status AddGraph(uint32_t graphId, const Graph &graph); + + /// + /// @ingroup ge_graph + /// @brief remove a graph of the session with specific session id + /// @param [in] graphId graph id + /// @return Status result of function + /// + Status RemoveGraph(uint32_t graphId); + + /// + /// @ingroup ge_graph + /// @brief run a graph of the session with specific session id + /// @param [in] graphId graph id + /// @param [in] inputs input data + /// @param [out] outputs output data + /// @return Status result of function + /// + Status RunGraph(uint32_t graphId, const std::vector &inputs, std::vector &outputs); + + /// + /// @ingroup ge_graph + /// @brief run graph in the session with specific session id asynchronously + /// @param [in] graphId: graph id + /// @param [in] inputs: input data + /// @param [out] outputs: output data + /// @param [out] callback: callback while runing graph has been finished. + /// The callback function will not be checked. + /// Please ensure that the implementation of the function is trusted. + /// @return Status result of function + /// + Status RunGraphAsync(uint32_t graphId, const std::vector &inputs, + std::vector &outputs, std::function callback); + + /// + /// @ingroup ge_graph + /// @brief register callback func with specific summary or checkpoint by users + /// @param [in] key: func key + /// @param [in] callback: callback specific summary or checkpoint. + /// The callback function will not be checked. + /// Please ensure that the implementation of the function is trusted. + /// @return Status result of function + /// + Status RegisterCallBackFunc(const std::string &key, const pCallBackFunc &callback); + + bool IsGraphNeedRebuild(uint32_t graphId); + + private: + uint64_t sessionId_; +}; +} // namespace ge + +#endif // INC_EXTERNAL_GE_GE_API_H_ diff --git a/inc/external/ge/ge_api_error_codes.h b/inc/external/ge/ge_api_error_codes.h new file mode 100644 index 00000000..e7f52724 --- /dev/null +++ b/inc/external/ge/ge_api_error_codes.h @@ -0,0 +1,76 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GE_GE_API_ERROR_CODES_H_ +#define INC_EXTERNAL_GE_GE_API_ERROR_CODES_H_ + +#include +#include + +namespace ge { +class StatusFactory { + public: + static StatusFactory *Instance() { + static StatusFactory instance; + return &instance; + } + + void RegisterErrorNo(uint32_t err, const std::string &desc) { + // Avoid repeated addition + if (err_desc_.find(err) != err_desc_.end()) { + return; + } + err_desc_[err] = desc; + } + + std::string GetErrDesc(uint32_t err) { + auto iter_find = err_desc_.find(err); + if (iter_find == err_desc_.end()) { + return ""; + } + return iter_find->second; + } + + protected: + StatusFactory() {} + ~StatusFactory() {} + + private: + std::map err_desc_; +}; + +class ErrorNoRegisterar { + public: + ErrorNoRegisterar(uint32_t err, const std::string &desc) { StatusFactory::Instance()->RegisterErrorNo(err, desc); } + ~ErrorNoRegisterar() {} +}; + +// Code compose(4 byte), runtime: 2 bit, type: 2 bit, level: 3 bit, sysid: 8 bit, modid: 5 bit, value: 12 bit +#define GE_ERRORNO(runtime, type, level, sysid, modid, name, value, desc) \ + constexpr ge::Status name = \ + ((0xFF & (static_cast(runtime))) << 30) | ((0xFF & (static_cast(type))) << 28) | \ + ((0xFF & (static_cast(level))) << 25) | ((0xFF & (static_cast(sysid))) << 17) | \ + ((0xFF & (static_cast(modid))) << 12) | (0x0FFF & (static_cast(value))); \ + const ErrorNoRegisterar g_##name##_errorno(name, desc); + +using Status = uint32_t; + +// General error code +GE_ERRORNO(0, 0, 0, 0, 0, SUCCESS, 0, "success"); +GE_ERRORNO(0b11, 0b11, 0b111, 0xFF, 0b11111, FAILED, 0xFFF, "failed"); +} // namespace ge + +#endif // INC_EXTERNAL_GE_GE_API_ERROR_CODES_H_ diff --git a/inc/external/ge/ge_api_types.h b/inc/external/ge/ge_api_types.h new file mode 100644 index 00000000..cdecd987 --- /dev/null +++ b/inc/external/ge/ge_api_types.h @@ -0,0 +1,195 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GE_GE_API_TYPES_H_ +#define INC_EXTERNAL_GE_GE_API_TYPES_H_ + +#include +#include +#include + +namespace ge { +// Option key: graph run mode +const char *const OPTION_GRAPH_RUN_MODE = "ge.graphRunMode"; + +// Option key: ome init +const char *const OPTION_EXEC_SESSION_ID = "ge.exec.sessionId"; +const char *const OPTION_EXEC_DEVICE_ID = "ge.exec.deviceId"; +const char *const OPTION_EXEC_JOB_ID = "ge.exec.jobId"; +const char *const OPTION_EXEC_IS_USEHCOM = "ge.exec.isUseHcom"; +const char *const OPTION_EXEC_RANK_ID = "ge.exec.rankId"; +const char *const OPTION_EXEC_POD_NAME = "ge.exec.podName"; +const char *const OPTION_EXEC_DEPLOY_MODE = "ge.exec.deployMode"; +const char *const OPTION_EXEC_RANK_TABLE_FILE = "ge.exec.rankTableFile"; +const char *const GE_AICPU_FLAG = "ge.aicpuFlag"; +const char *const OPTION_EXEC_EXTERN_PLUGIN_PATH = "ge.soLoadPath"; +const char *const OPTION_EXEC_ENABLE_DUMP = "ge.exec.enableDump"; +const char *const OPTION_EXEC_DUMP_PATH = "ge.exec.dumpPath"; +// Hccl flag, if ge.exec.hcclFlag =1, it means load plugin for opskernel, else:ge.exec.hcclFlag =0 +const char *const OPTION_EXEC_HCCL_FLAG = "ge.exec.hcclFlag"; +const char *const OPTION_EXEC_ATOMIC_FLAG = "ge.exec.enable_atomic"; + +// Option key: memory init +const char *const GRAPH_MEMORY_MAX_SIZE = "ge.graphMemoryMaxSize"; +const char *const VARIABLE_MEMORY_MAX_SIZE = "ge.variableMemoryMaxSize"; + +// Configure stream num by Session constructor options param, +// its value should be int32_t type, default value is "1" +const std::string STREAM_NUM = "ge.streamNum"; + +// Configure add head stream to model, +// its value should be "0" or "1", default value is "0" +const std::string HEAD_STREAM = "ge.headStream"; + +// Configure perf level by Session constructor options param, +// its value please see enum PerfLevel, default value is "4" +const std::string PERF_LEVEL = "ge.perfLevel"; + +// Configure encrypt mode by Session constructor options param, +// its value should be int32_t type, default value is "-1" +const std::string ENCRYPT_MODE = "ge.encryptMode"; + +// configure ek file by Session constructor options param, +// its value should be file path, default value is "" +const std::string EK_FILE = "ge.ekFile"; + +// Configure cert file by Session constructor options param, +// its value should be file path, default value is "" +const std::string CERT_FILE = "ge.certFile"; + +// Configure hw key file by Session constructor options param, +// its value should be file path, default value is "" +const std::string HW_KEY_FILE = "ge.hwKeyFile"; + +// Configure private file by Session constructor options param, +// its value should be file path, default value is "" +const std::string PRIVATE_KEY_FILE = "ge.privateKeyFile"; + +// Configure framework type by Session constructor options param, +// its value please see enum FrameworkType, default value is "3" +const std::string FRAMEWORK_TYPE = "ge.frameworkType"; + +// Configure calibration info file by Session constructor options param, +// its value should be file path, default value is "" +const std::string CALIBRATION_CONF_FILE = "ge.calibrationConfFile"; + +// Configure insert op info file by Session constructor options param, +// its value should be file path, default value is "" +const std::string INSERT_OP_FILE = "ge.insertOpFile"; + +// Configure output node name by Session constructor options param, +// its value should be std::string type, default value is "" +const std::string OUTPUT_NODE_NAME = "ge.outputNodeName"; + +// Configure weight compress flag by Session constructor options param, +// its value should be "0" or "1", default value is "0" +const std::string COMPRESS_FLAG = "ge.compressFlag"; + +const std::string ATUO_PRECISION_FLAG = "ge.exec.auto_mix_precision"; + +// Configure single op flag for FE +// its value should be "0" or "1", default value is "0" +const std::string SINGLE_OP_FLAG = "ge.exec.single_op"; + +// Configure train flag by Session constructor options param, +// its value should be "0" or "1", default value is "0" +const std::string TRAIN_FLAG = "ge.trainFlag"; + +// Configure run flag by Session constructor options param, +// its value should be "0" or "1", default value is "0" +const std::string RUN_FLAG = "ge.runFlag"; + +// Configure run flag by Session constructor options param, +// its value should be "0" or "1", default value is "0" +// this option is to enable local framework op feature +const std::string LOCAL_FMKOP_FLAG = "ge.enabledLocalFmkop"; + +// Configure run flag by Session constructor options param, +// its value should be a path +// this option is to obtain the TBE op plugin path +const std::string TBE_PLUGIN_PATH_FLAG = "ge.TBE_plugin_path"; + +// Configure run flag by Session constructor options param, +// its value should be a path +// this option is to obtain the DDK Version info +const std::string DDK_VERSION_FLAG = "ge.DDK_version"; + +// Configure run flag by Session constructor options param, +// its value should be a path +// this option is to obtain fe flag +const std::string GE_FE_FLAG = "ge.feFlag"; + +// Configure stream max parallel num only by Session constructor options param, +// its value should be stream:int, such as "DNN_V100:2,DNN_HCCL:3", +// default value is "1", such as "DNN_V100:1,DNN_HCCL:1" +// this option is to obtain stream max parallel num +const std::string STREAM_MAX_PARALLEL_NUM = "ge.streamMaxParallelNum"; + +// configure outputDatatype to setting net output type +const std::string OUTPUT_DATATYPE = "ge.outputDatatype"; + +// configure whether to enable hcom parallel by session constructor options param, +// its value should be "0" or "1", default value is "0" +const std::string HCOM_PARALLEL = "ge.hcomParallel"; + +// Configure auto tune mode, this option only take effect while AUTO_TUNE_FLAG is Y, +// example: GA|RL, support configure multiple, split by | +const std::string AUTO_TUNE_MODE = "ge.autoTuneMode"; + +// Configure core type "VectorEngine", default value is "AICoreEngine" +const std::string CORE_TYPE = "ge.engineType"; + +// Configure soc version , example: "Ascend310" +const std::string SOC_VERSION = "ge.socVersion"; + +// Save original model +const std::string SAVE_ORIGINAL_MODEL = "ge.saveOriginalModel"; + +// Save original model file name +const std::string ORIGINAL_MODEL_FILE = "ge.originalModelFile"; + +const char *const OPTION_GE_MAX_DUMP_FILE_NUM = "ge.maxDumpFileNum"; +const char *const OPTION_GE_MAX_DUMP_FILE_SIZE = "ge.maxDumpFileSize"; +const char *const OPTION_GE_MAX_DUMP_OP_NUM = "ge.maxDumpOpNum"; + +// Graph run mode +enum GraphRunMode { PREDICTION = 0, TRAIN }; + +// Data description +struct DataDesc { + void *data = nullptr; // data address + uint32_t length = 0; // data size + bool isDataSupportMemShare = false; +}; + +// Input/Output shape description +struct ShapeDesc { + int64_t num = 0; + int64_t channel = 0; + int64_t height = 0; + int64_t width = 0; + std::vector dims; +}; + +// Input/Output tensor info +struct TensorInfo { + uint32_t dataType; // data type + DataDesc data; // tensor data + ShapeDesc shapeInfo; // tensor shape +}; +} // namespace ge + +#endif // INC_EXTERNAL_GE_GE_API_TYPES_H_ diff --git a/inc/external/graph/attr_value.h b/inc/external/graph/attr_value.h new file mode 100644 index 00000000..f9635e6a --- /dev/null +++ b/inc/external/graph/attr_value.h @@ -0,0 +1,73 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GRAPH_ATTR_VALUE_H_ +#define INC_EXTERNAL_GRAPH_ATTR_VALUE_H_ + +#include +#include +#include +#include + +#include "external/graph/ge_error_codes.h" + +using std::make_shared; +using std::map; +using std::pair; +using std::string; +using std::to_string; +using std::unique_ptr; +using std::vector; + +namespace ge { +class AttrValueImpl; +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY AttrValue { + public: + using INT = int64_t; + using FLOAT = float; + using STR = std::string; + + AttrValue(); + ~AttrValue() = default; + + // GetValue, not list type + template + graphStatus GetValue(DT &val) const { + T valGet; + auto status = GetValue(valGet); + if (status != GRAPH_SUCCESS) { + return status; + } + val = DT(valGet); + return GRAPH_SUCCESS; + } + + template + static T CreateFrom(DT &&val) { + return val; + } + + std::shared_ptr impl; + + private: +#define VALUE_SET_GET_DEC(DT) graphStatus GetValue(DT &val) const; + VALUE_SET_GET_DEC(AttrValue::STR) + VALUE_SET_GET_DEC(AttrValue::INT) + VALUE_SET_GET_DEC(AttrValue::FLOAT) +#undef VALUE_SET_GET_DEC +}; +} // namespace ge +#endif // INC_EXTERNAL_GRAPH_ATTR_VALUE_H_ diff --git a/inc/external/graph/ge_error_codes.h b/inc/external/graph/ge_error_codes.h new file mode 100644 index 00000000..d815a22d --- /dev/null +++ b/inc/external/graph/ge_error_codes.h @@ -0,0 +1,38 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GRAPH_GE_ERROR_CODES_H_ +#define INC_EXTERNAL_GRAPH_GE_ERROR_CODES_H_ + +namespace ge { +#ifdef HOST_VISIBILITY +#define GE_FUNC_HOST_VISIBILITY __attribute__((visibility("default"))) +#else +#define GE_FUNC_HOST_VISIBILITY +#endif +#ifdef DEV_VISIBILITY +#define GE_FUNC_DEV_VISIBILITY __attribute__((visibility("default"))) +#else +#define GE_FUNC_DEV_VISIBILITY +#endif + +using graphStatus = uint32_t; +const graphStatus GRAPH_FAILED = 0xFFFFFFFF; +const graphStatus GRAPH_SUCCESS = 0; +const graphStatus GRAPH_PARAM_INVALID = 50331649; +} // namespace ge + +#endif // INC_EXTERNAL_GRAPH_GE_ERROR_CODES_H_ diff --git a/inc/external/graph/graph.h b/inc/external/graph/graph.h new file mode 100644 index 00000000..6e074239 --- /dev/null +++ b/inc/external/graph/graph.h @@ -0,0 +1,77 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GRAPH_GRAPH_H_ +#define INC_EXTERNAL_GRAPH_GRAPH_H_ + +#include +#include +#include +#include + +#include "external/graph/operator.h" + +namespace ge { +class GraphImpl; + +using GraphImplPtr = std::shared_ptr; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Graph { + friend class GraphUtils; + + public: + explicit Graph(const std::string &name); + + Graph() = default; + + ~Graph() = default; + + Graph &SetInputs(const std::vector &inputs); + + Graph &SetOutputs(const std::vector &outputs); + + Graph &SetOutputs(const std::vector>> &output_indexs); + + Graph &SetOutputs(const std::vector> &outputs); + + Graph &SetTargets(const std::vector &targets); + + bool IsValid() const; + + graphStatus AddOp(const ge::Operator &op); + + graphStatus FindOpByName(const string &name, ge::Operator &op) const; + + graphStatus GetAllOpName(std::vector &op_name) const; + + graphStatus SaveToFile(const string &file_name) const; + + graphStatus LoadFromFile(const string &file_name); + + /// + /// Set is need train iteration. + /// If set true, it means this graph need to be run iteration some + /// times(according variant "npu_runconfig/iterations_per_loop"). + /// @param need_iteration need_iteration:whether to set iteration or not + /// + void SetNeedIteration(bool need_iteration); + + private: + GraphImplPtr impl_{nullptr}; +}; +} // namespace ge + +#endif // INC_EXTERNAL_GRAPH_GRAPH_H_ diff --git a/inc/external/graph/inference_context.h b/inc/external/graph/inference_context.h new file mode 100644 index 00000000..993a4bf4 --- /dev/null +++ b/inc/external/graph/inference_context.h @@ -0,0 +1,79 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GRAPH_INFERENCE_CONTEXT_H_ +#define INC_EXTERNAL_GRAPH_INFERENCE_CONTEXT_H_ + +#include +#include +#include + +#include "external/graph/tensor.h" +#include "external/graph/types.h" + +namespace ge { +class InferenceContext; +using InferenceContextPtr = std::shared_ptr; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY ShapeAndType { + public: + ShapeAndType() = default; + ~ShapeAndType() = default; + + ShapeAndType(const Shape &shape, DataType data_type); + + void SetShape(const Shape &shape); + + void SetType(DataType data_type); + + const Shape &GetShape() const; + + DataType GetDataType() const; + + private: + Shape shape_; + DataType data_type_ = DT_UNDEFINED; +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY InferenceContext { + public: + InferenceContext() = default; + ~InferenceContext() = default; + InferenceContext(const InferenceContext &context) = delete; + InferenceContext(const InferenceContext &&context) = delete; + InferenceContext &operator=(const InferenceContext &context) = delete; + InferenceContext &operator=(const InferenceContext &&context) = delete; + + void SetInputHandleShapesAndTypes(std::vector> &&shapes_and_types); + + const std::vector> &GetInputHandleShapesAndTypes() const; + + const std::vector> &GetOutputHandleShapesAndTypes() const; + + void SetOutputHandleShapesAndTypes(const std::vector> &shapes_and_types); + void SetOutputHandleShapesAndTypes(std::vector> &&shapes_and_types); + + void SetMarks(const std::vector &marks); + const std::vector &GetMarks() const; + + private: + // For deliver to op in pair, help to support dynamic shape + std::vector marks_; + std::vector> input_handle_shapes_and_types_; + std::vector> output_handle_shapes_and_types_; +}; +} // namespace ge +#endif // INC_EXTERNAL_GRAPH_INFERENCE_CONTEXT_H_ diff --git a/inc/external/graph/operator.h b/inc/external/graph/operator.h new file mode 100644 index 00000000..4b84f074 --- /dev/null +++ b/inc/external/graph/operator.h @@ -0,0 +1,257 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GRAPH_OPERATOR_H_ +#define INC_EXTERNAL_GRAPH_OPERATOR_H_ + +#include +#include +#include +#include +#include + +#include "external/graph/ge_error_codes.h" +#include "external/graph//inference_context.h" +#include "external/graph//tensor.h" +#include "external/graph//usr_types.h" + +#ifndef USER_GE_LOGI +#define USER_GE_LOGI(...) +#endif // USER_GE_LOGI + +#ifndef USER_GE_LOGW +#define USER_GE_LOGW(...) +#endif // USER_GE_LOGW + +#ifndef USER_GE_LOGE +#define USER_GE_LOGE(...) +#endif // USER_GE_LOGE + +#define DYNAMIC_OUTPUT_TD_NUM(name) ("__dynamic_output_" + name + "_cnt") +#define DYNAMIC_INPUT_TD_NUM(name) ("__dynamic_input_" + name + "_cnt") + +namespace ge { +class OperatorImpl; + +class AttrValue; + +using OperatorImplPtr = std::shared_ptr; + +class OpIO; +using OutHandler = std::shared_ptr; +using InHandler = std::shared_ptr; + +using std::function; +using std::shared_ptr; +using std::string; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Operator { + public: + friend class OperatorImpl; + + friend class GraphBuilderImpl; + + using OpInt = int64_t; + using OpFloat = float; + using OpString = string; + using OpBool = bool; + using OpTensor = Tensor; + using OpType = ge::DataType; + using OpListInt = std::vector; + using OpListFloat = std::vector; + using OpListString = std::vector; + using OpListBool = std::vector; + using OpListTensor = std::vector; + using OpBytes = std::vector; + using OpListListInt = std::vector>; + using OpListType = std::vector; + + Operator() {} + + explicit Operator(const string &type); + + Operator(const string &name, const string &type); + + virtual ~Operator() = default; + + bool IsEmpty() const; + + string GetName() const; + + string GetOpType() const; + + // Only has one output index = 0 + Operator &SetInput(const string &dst_name, const Operator &src_oprt); + + Operator &SetInput(const string &dst_name, const Operator &src_oprt, const string &name); + + Operator &AddControlInput(const Operator &src_oprt); + + graphStatus GetInputConstData(const string &dst_name, Tensor &data) const; + + TensorDesc GetInputDesc(const string &name) const; + + TensorDesc GetInputDesc(uint32_t index) const; + + int GetDynamicOutputNum(const string &name) const; + + int GetDynamicInputNum(const string &name) const; + + graphStatus TryGetInputDesc(const string &name, TensorDesc &tensor_desc) const; + + graphStatus UpdateInputDesc(const string &name, const TensorDesc &tensor_desc); + + TensorDesc GetOutputDesc(const string &name) const; + + TensorDesc GetOutputDesc(uint32_t index) const; + + graphStatus UpdateOutputDesc(const string &name, const TensorDesc &tensor_desc); + + TensorDesc GetDynamicInputDesc(const string &name, uint32_t index) const; + + graphStatus UpdateDynamicInputDesc(const string &name, uint32_t index, const TensorDesc &tensor_desc); + + TensorDesc GetDynamicOutputDesc(const string &name, uint32_t index) const; + + graphStatus UpdateDynamicOutputDesc(const string &name, uint32_t index, const TensorDesc &tensor_desc); + + graphStatus InferShapeAndType(); + + void SetInferenceContext(const InferenceContextPtr &inference_context); + InferenceContextPtr GetInferenceContext() const; + + graphStatus VerifyAllAttr(bool disable_common_verifier = false); + + size_t GetInputsSize() const; + + size_t GetOutputsSize() const; + + const std::map GetAllAttrNamesAndTypes() const; + + Operator &SetAttr(const string &name, int64_t attr_value); + Operator &SetAttr(const string &name, int32_t attr_value); + Operator &SetAttr(const string &name, uint32_t attr_value); + graphStatus GetAttr(const string &name, int64_t &attr_value) const; + graphStatus GetAttr(const string &name, int32_t &attr_value) const; + graphStatus GetAttr(const string &name, uint32_t &attr_value) const; + Operator &SetAttr(const string &name, const std::vector &attr_value); + Operator &SetAttr(const string &name, const std::vector &attr_value); + Operator &SetAttr(const string &name, const std::vector &attr_value); + Operator &SetAttr(const string &name, std::initializer_list &&attr_value); + graphStatus GetAttr(const string &name, std::vector &attr_value) const; + graphStatus GetAttr(const string &name, std::vector &attr_value) const; + graphStatus GetAttr(const string &name, std::vector &attr_value) const; + + Operator &SetAttr(const string &name, float attr_value); + graphStatus GetAttr(const string &name, float &attr_value) const; + Operator &SetAttr(const string &name, const std::vector &attr_value); + graphStatus GetAttr(const string &name, std::vector &attr_value) const; + Operator &SetAttr(const string &name, AttrValue &&attr_value); + graphStatus GetAttr(const string &name, AttrValue &attr_value) const; + + Operator &SetAttr(const string &name, const string &attr_value); + graphStatus GetAttr(const string &name, string &attr_value) const; + Operator &SetAttr(const string &name, const std::vector &attr_value); + graphStatus GetAttr(const string &name, std::vector &attr_value) const; + + Operator &SetAttr(const string &name, bool attr_value); + graphStatus GetAttr(const string &name, bool &attr_value) const; + Operator &SetAttr(const string &name, const std::vector &attr_value); + graphStatus GetAttr(const string &name, std::vector &attr_value) const; + + Operator &SetAttr(const string &name, const Tensor &attr_value); + graphStatus GetAttr(const string &name, Tensor &attr_value) const; + Operator &SetAttr(const string &name, const std::vector &attr_value); + graphStatus GetAttr(const string &name, std::vector &attr_value) const; + + // Bytes type + Operator &SetAttr(const string &name, const OpBytes &attr_value); + // Bytes type + graphStatus GetAttr(const string &name, OpBytes &attr_value) const; + + Operator &SetAttr(const string &name, const UsrQuantizeFactorParams &attr_value); + graphStatus GetAttr(const string &name, UsrQuantizeFactorParams &attr_value) const; + + Operator &SetAttr(const string &name, const std::vector> &attr_value); + graphStatus GetAttr(const string &name, std::vector> &attr_value) const; + + Operator &SetAttr(const string &name, const std::vector &attr_value); + graphStatus GetAttr(const string &name, std::vector &attr_value) const; + + Operator &SetAttr(const string &name, const ge::DataType &attr_value); + graphStatus GetAttr(const string &name, ge::DataType &attr_value) const; + + void BreakConnect() const; + + protected: + void AttrRegister(const string &name, float attr_value); + void AttrRegister(const string &name, const std::vector &attr_value); + void AttrRegister(const string &name, int64_t attr_value); + void AttrRegister(const string &name, const std::vector &attr_value); + void AttrRegister(const string &name, const string &attr_value); + void AttrRegister(const string &name, const std::vector &attr_value); + void AttrRegister(const string &name, bool attr_value); + void AttrRegister(const string &name, const std::vector &attr_value); + void AttrRegister(const string &name, const Tensor &attr_value); + void AttrRegister(const string &name, const std::vector &attr_value); + void AttrRegister(const string &name, const OpBytes &attr_value); + void AttrRegister(const string &name, const std::vector> &attr_value); + void AttrRegister(const string &name, const std::vector &attr_value); + void AttrRegister(const string &name, const ge::DataType &attr_value); + + explicit Operator(OperatorImplPtr &&op_impl); + + void InputRegister(const string &name); + + void OptionalInputRegister(const string &name); + + void InferFuncRegister(const std::function &func); + + void VerifierFuncRegister(const std::function &func); + + void InferFormatFuncRegister(const std::function &func); + + void OutputRegister(const string &name); + + void DynamicInputRegister(const string &name, const unsigned int num, bool is_push_back = true); + + void DynamicOutputRegister(const string &name, const unsigned int num, bool is_push_back = true); + + void RequiredAttrRegister(const string &name); + + graphStatus VerifyAll(); + + // Only has one output index = 0 + Operator &SetInput(const string &dst_name, uint32_t dst_index, + const Operator &src_oprt); + + Operator &SetInput(const string &dst_name, uint32_t dst_index, const Operator &src_oprt, + const string &name); + + private: + Operator &SetInput(const string &dst_name, const OutHandler &out_handler); + + OutHandler GetOutput(const string &name) const; + + OperatorImplPtr GetOperatorImplPtr() const; + + OperatorImplPtr operator_impl_{nullptr}; + + graphStatus GetInputConstDataOut(const string &dst_name, Tensor &data) const; +}; +} // namespace ge + +#endif // INC_EXTERNAL_GRAPH_OPERATOR_H_ diff --git a/inc/external/graph/operator_factory.h b/inc/external/graph/operator_factory.h new file mode 100644 index 00000000..0078b904 --- /dev/null +++ b/inc/external/graph/operator_factory.h @@ -0,0 +1,68 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GRAPH_OPERATOR_FACTORY_H_ +#define INC_EXTERNAL_GRAPH_OPERATOR_FACTORY_H_ + +#include +#include +#include +#include + +#include "external/graph//operator.h" +#include "external/graph/ge_error_codes.h" + +namespace ge { +using OpCreator = std::function; +using InferShapeFunc = std::function; +using InferFormatFunc = std::function; +using VerifyFunc = std::function; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OperatorFactory { + public: + static Operator CreateOperator(const std::string &operator_name, const std::string &operator_type); + + static graphStatus GetOpsTypeList(std::vector &all_ops); + + static bool IsExistOp(const string &operator_type); +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OperatorCreatorRegister { + public: + OperatorCreatorRegister(const string &operator_type, OpCreator const &op_creator); + ~OperatorCreatorRegister() = default; +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY InferShapeFuncRegister { + public: + InferShapeFuncRegister(const std::string &operator_type, const InferShapeFunc &infer_shape_func); + ~InferShapeFuncRegister() = default; +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY InferFormatFuncRegister { + public: + InferFormatFuncRegister(const std::string &operator_type, const InferFormatFunc &infer_format_func); + ~InferFormatFuncRegister() = default; +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY VerifyFuncRegister { + public: + VerifyFuncRegister(const std::string &operator_type, const VerifyFunc &verify_func); + ~VerifyFuncRegister() = default; +}; +} // namespace ge + +#endif // INC_EXTERNAL_GRAPH_OPERATOR_FACTORY_H_ diff --git a/inc/external/graph/operator_reg.h b/inc/external/graph/operator_reg.h new file mode 100644 index 00000000..85f8db03 --- /dev/null +++ b/inc/external/graph/operator_reg.h @@ -0,0 +1,313 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GRAPH_OPERATOR_REG_H_ +#define INC_EXTERNAL_GRAPH_OPERATOR_REG_H_ + +#include +#include +#include +#include + +#include "external/graph/operator.h" +#include "external/graph/operator_factory.h" +#include "external/graph/tensor.h" +#include "external/graph/types.h" + +namespace ge { +using std::function; +using std::string; +using std::vector; + +class OpReg { + public: + OpReg &N() { return *this; } + + OpReg &ATTR() { return *this; } + + OpReg &REQUIRED_ATTR() { return *this; } + + OpReg &INPUT() { return *this; } + + OpReg &OPTIONAL_INPUT() { return *this; } + + OpReg &OUTPUT() { return *this; } + + OpReg &INFER_SHAPE_AND_TYPE() { return *this; } +}; + +#define REG_OP(x) \ + namespace op { \ + class x : public Operator { \ + typedef x _THIS_TYPE; \ + \ + public: \ + explicit x(const string &name) : Operator(name, #x) { __##x(); } \ + x() : Operator(#x) { __##x(); } \ + \ + private: \ + void __##x() { \ + OpReg() + +#define ATTR(x, Type, ...) \ + N(); \ + __attr_##x(); \ + } \ + \ + public: \ + static const string name_attr_##x() { return #x; } \ + Op##Type get_attr_##x() const { \ + Op##Type ret = __VA_ARGS__; \ + if (Operator::GetAttr(#x, ret) == GRAPH_FAILED) { \ + return ret; \ + } \ + return ret; \ + } \ + _THIS_TYPE &set_attr_##x(const Op##Type &v) { \ + Operator::SetAttr(#x, v); \ + return *this; \ + } \ + _THIS_TYPE &set_attr_##x(const function &v) { return *this; } \ + \ + private: \ + void __attr_##x() { \ + Operator::AttrRegister(#x, Op##Type(__VA_ARGS__)); \ + string attr_name(#x); \ + (void)OpReg() + +#define REQUIRED_ATTR(x, Type) \ + N(); \ + __required_attr_##x(); \ + } \ + \ + public: \ + static const string name_attr_##x() { return #x; } \ + Op##Type get_attr_##x() const { \ + Op##Type ret; \ + if (Operator::GetAttr(#x, ret) == GRAPH_FAILED) { \ + return ret; \ + } \ + return ret; \ + } \ + _THIS_TYPE &set_attr_##x(const Op##Type &v) { \ + Operator::SetAttr(#x, v); \ + return *this; \ + } \ + _THIS_TYPE &set_attr_##x(const function &v) { return *this; } \ + \ + private: \ + void __required_attr_##x() { \ + Operator::RequiredAttrRegister(#x); \ + string attr_name(#x); \ + (void)OpReg() + +#define INPUT(x, t) \ + N(); \ + __input_##x(); \ + } \ + \ + public: \ + static const string name_in_##x() { return #x; } \ + _THIS_TYPE &set_input_##x(Operator &v, const string &srcName) { \ + Operator::SetInput(#x, v, srcName); \ + return *this; \ + } \ + _THIS_TYPE &set_input_##x(Operator &v) { \ + Operator::SetInput(#x, v); \ + return *this; \ + } \ + TensorDesc get_input_desc_##x() const { return Operator::GetInputDesc(#x); } \ + graphStatus update_input_desc_##x(const TensorDesc &tensorDesc) { \ + return Operator::UpdateInputDesc(#x, tensorDesc); \ + } \ + \ + private: \ + void __input_##x() { \ + Operator::InputRegister(#x); \ + (void)OpReg() + +#define OPTIONAL_INPUT(x, t) \ + N(); \ + __optional_input_##x(); \ + } \ + \ + public: \ + static const string name_in_##x() { return #x; } \ + _THIS_TYPE &set_input_##x(Operator &v) { \ + Operator::SetInput(#x, v); \ + return *this; \ + } \ + _THIS_TYPE &set_input_##x(Operator &v, const string &srcName) { \ + Operator::SetInput(#x, v, srcName); \ + return *this; \ + } \ + TensorDesc get_input_desc_##x() const { return Operator::GetInputDesc(#x); } \ + graphStatus update_input_desc_##x(const TensorDesc &tensorDesc) { \ + return Operator::UpdateInputDesc(#x, tensorDesc); \ + } \ + \ + private: \ + void __optional_input_##x() { \ + Operator::OptionalInputRegister(#x); \ + (void)OpReg() + +#define OUTPUT(x, t) \ + N(); \ + __out_##x(); \ + } \ + \ + public: \ + static const string name_out_##x() { return #x; } \ + TensorDesc get_output_desc_##x() const { return Operator::GetOutputDesc(#x); } \ + graphStatus update_output_desc_##x(const TensorDesc &tensorDesc) { \ + return Operator::UpdateOutputDesc(#x, tensorDesc); \ + } \ + \ + private: \ + void __out_##x() { \ + Operator::OutputRegister(#x); \ + (void)OpReg() + +#define DYNAMIC_INPUT(x, t) \ + N(); \ + __dy_input_##x(); \ + } \ + \ + public: \ + _THIS_TYPE &create_dynamic_input_##x(unsigned int num, bool isPushBack = true) { \ + Operator::DynamicInputRegister(#x, num, isPushBack); \ + return *this; \ + } \ + TensorDesc get_dynamic_input_desc_##x(unsigned int index) const { return Operator::GetDynamicInputDesc(#x, index); } \ + graphStatus update_dynamic_input_desc_##x(unsigned int index, const TensorDesc &tensorDesc) { \ + return Operator::UpdateDynamicInputDesc(#x, index, tensorDesc); \ + } \ + _THIS_TYPE &set_dynamic_input_##x(unsigned int dstIndex, Operator &v) { \ + Operator::SetInput(#x, dstIndex, v); \ + return *this; \ + } \ + _THIS_TYPE &set_dynamic_input_##x(unsigned int dstIndex, Operator &v, const string &srcName) { \ + Operator::SetInput(#x, dstIndex, v, srcName); \ + return *this; \ + } \ + \ + private: \ + void __dy_input_##x() { \ + (void)OpReg() + +#define DYNAMIC_OUTPUT(x, t) \ + N(); \ + __dy_output_##x(); \ + } \ + \ + public: \ + _THIS_TYPE &create_dynamic_output_##x(unsigned int num, bool isPushBack = true) { \ + Operator::DynamicOutputRegister(#x, num, isPushBack); \ + return *this; \ + } \ + TensorDesc get_dynamic_output_desc_##x(unsigned int index) const { \ + return Operator::GetDynamicOutputDesc(#x, index); \ + } \ + graphStatus update_dynamic_output_desc_##x(unsigned int index, const TensorDesc &tensorDesc) { \ + return Operator::UpdateDynamicOutputDesc(#x, index, tensorDesc); \ + } \ + \ + private: \ + void __dy_output_##x() { \ + (void)OpReg() + +#define PASTE(g_register, y) g_register##y +#define __OP_END_IMPL__(x, y) \ + N(); \ + } \ + static_assert( \ + std::is_same::value, \ + "The class name entered into the OP_END_FACTORY_REG needs to be the same as the operator name you define."); \ + } \ + ; \ + static const OperatorCreatorRegister PASTE(g_register, y)(#x, [](const std::string &name) { return x(name); }); \ + } +#define OP_END_FACTORY_REG(x) __OP_END_IMPL__(x, __COUNTER__) + +// Specialized shape inferencer macro + +#define IMPLEMT_INFERFUNC(op_name, func_name) \ + GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY static graphStatus func_name(op::op_name &op) + +#define IMPLEMT_COMMON_INFERFUNC(func_name) \ + GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY static graphStatus func_name(Operator &op) + +#define IMPLEMT_INFERFORMAT_FUNC(op_name, func_name) \ + GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY static graphStatus func_name(op::op_name &op) + +// Specialized verifier macro + +#define IMPLEMT_VERIFIER(op_name, func_name) \ + GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY static graphStatus func_name(op::op_name op) + +#define INFER_VERIFY_FUNC(op_name, x) [&](Operator &v) { return x((op::op_name &)v); } + +#define COMMON_INFER_VERIFY_FUNC(x) [&](Operator &v) { return x(v); } + +#define INFER_FORMAT_FUNC(op_name, x) [&](Operator &v) { return x((op::op_name &)v); } + +#define __INFER_FUNC_REG_IMPL__(op_name, x, n) static const InferShapeFuncRegister PASTE(if_register, n)(#op_name, x) + +#define __VERIFY_FUNC_REG_IMPL__(op_name, x, n) static const VerifyFuncRegister PASTE(vf_register, n)(#op_name, x) +// Infer format func register +#define __INFER_FORMAT_FUNC_REG_IMPL__(op_name, x, n) \ + static const InferFormatFuncRegister PASTE(ff_register, n)(#op_name, x) + +// Shape inferencer & verifier register macro + +#define INFER_FUNC_REG(op_name, x) __INFER_FUNC_REG_IMPL__(op_name, INFER_VERIFY_FUNC(op_name, x), __COUNTER__) + +#define COMMON_INFER_FUNC_REG(op_name, x) __INFER_FUNC_REG_IMPL__(op_name, COMMON_INFER_VERIFY_FUNC(x), __COUNTER__) + +#define VERIFY_FUNC_REG(op_name, x) __VERIFY_FUNC_REG_IMPL__(op_name, INFER_VERIFY_FUNC(op_name, x), __COUNTER__) + +// Infer format func reg +#define INFER_FORMAT_FUNC_REG(op_name, x) \ + __INFER_FORMAT_FUNC_REG_IMPL__(op_name, INFER_FORMAT_FUNC(op_name, x), __COUNTER__) + +// Common shape inferencer + +#define ELMTWISE_INFER_SHAPEANDTYPE(in_name, out_name) \ + [](Operator op)->graphStatus { \ + auto x_shape = op.GetInputDesc(in_name).GetShape().GetDims(); \ + auto x_type = op.GetInputDesc(in_name).GetDataType(); \ + TensorDesc op_output_desc = op.GetOutputDesc(out_name); \ + op_output_desc.SetShape(ge::Shape(x_shape)); \ + op_output_desc.SetDataType(x_type); \ + return op.UpdateOutputDesc(out_name, op_output_desc); \ + } + +graphStatus BroadCastInfer(const function()> &get_in1_shape, + const function()> &get_in2_shape, + const function &y_shape)> &set_out_shape); + +#define BROADCAST_INFER(in1_name, in2_name, out_name) \ + [](Operator op)->graphStatus { \ + return BroadCastInfer([&]() { return op.GetInputDesc(in1_name).GetShape().GetDims(); }, \ + [&]() { return op.GetInputDesc(in2_name).GetShape().GetDims(); }, \ + [&](const vector &y_shape) { \ + TensorDesc op_output_desc = op.GetOutputDesc(out_name); \ + op_output_desc.SetShape(ge::Shape(y_shape)); \ + (void)op.UpdateOutputDesc(out_name, op_output_desc); \ + }); \ + } +} // namespace ge +#endif // INC_EXTERNAL_GRAPH_OPERATOR_REG_H_ diff --git a/inc/external/graph/tensor.h b/inc/external/graph/tensor.h new file mode 100644 index 00000000..a7f4a877 --- /dev/null +++ b/inc/external/graph/tensor.h @@ -0,0 +1,123 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GRAPH_TENSOR_H_ +#define INC_EXTERNAL_GRAPH_TENSOR_H_ + +#include +#include +#include +#include + +#include "external/graph/ge_error_codes.h" +#include "external/graph/types.h" + +namespace ge { +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Shape { + public: + Shape() = default; + ~Shape() = default; + explicit Shape(const std::vector &dims); + + size_t GetDimNum() const; + // If the idx is invalid, return 0 + int64_t GetDim(size_t idx) const; + graphStatus SetDim(size_t idx, int64_t value); + std::vector GetDims() const; + int64_t GetShapeSize() const; + + private: + std::vector dims_; +}; + +class TensorDescImpl; +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY TensorDesc { + public: + TensorDesc(); + ~TensorDesc() = default; + explicit TensorDesc(Shape shape, Format format = FORMAT_ND, DataType dt = DT_FLOAT); + // Copy + TensorDesc(const TensorDesc &desc); + // Move + TensorDesc(TensorDesc &&desc); + // Copy + TensorDesc &operator=(const TensorDesc &desc); + // Move + TensorDesc &operator=(TensorDesc &&desc); + + void Update(const Shape &shape, Format format = FORMAT_ND, DataType dt = DT_FLOAT); + Shape GetShape() const; + void SetShape(const Shape &shape); + + Format GetFormat() const; + void SetFormat(Format format); + + Shape GetOriginShape() const; + void SetOriginShape(const Shape &origin_shape); + + Format GetOriginFormat() const; + void SetOriginFormat(Format origin_format); + + DataType GetDataType() const; + void SetDataType(DataType dt); + + std::string GetName() const; + void SetName(const std::string &name); + + // Attr acess + void SetSize(int64_t size); + int64_t GetSize() const; + + int64_t GetRealDimCnt() const; + void SetRealDimCnt(const int64_t real_dim_cnt); + + private: + std::shared_ptr impl; +}; + +class TensorImpl; +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Tensor { + public: + Tensor(); + ~Tensor() = default; + explicit Tensor(const TensorDesc &tensorDesc); + Tensor(const TensorDesc &tensorDesc, const std::vector &data); + Tensor(const TensorDesc &tensorDesc, const uint8_t *data, size_t size); + Tensor(TensorDesc &&tensorDesc, std::vector &&data); + + TensorDesc GetTensorDesc() const; + graphStatus SetTensorDesc(const TensorDesc &tensorDesc); + + const uint8_t *GetData() const; + uint8_t *GetData(); + size_t GetSize() const; + + graphStatus SetData(std::vector &&data); + graphStatus SetData(const std::vector &data); + graphStatus SetData(const uint8_t *data, size_t size); + graphStatus SetData(const std::string &data); + graphStatus SetData(const std::vector &data); + graphStatus IsValid(); + + Tensor Clone() const; + + private: + std::shared_ptr impl; + friend class TensorAdapter; +}; +} // namespace ge + +#endif // INC_EXTERNAL_GRAPH_TENSOR_H_ diff --git a/inc/external/graph/types.h b/inc/external/graph/types.h new file mode 100755 index 00000000..d8c60ae0 --- /dev/null +++ b/inc/external/graph/types.h @@ -0,0 +1,224 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GRAPH_TYPES_H_ +#define INC_EXTERNAL_GRAPH_TYPES_H_ + +#include +#include +#include + +namespace ge { +static const int64_t UNKNOWN_DIM = -1; +static const std::vector UNKNOWN_SHAPE = {0}; + +#ifdef HOST_VISIBILITY +#define GE_FUNC_HOST_VISIBILITY __attribute__((visibility("default"))) +#else +#define GE_FUNC_HOST_VISIBILITY +#endif +#ifdef DEV_VISIBILITY +#define GE_FUNC_DEV_VISIBILITY __attribute__((visibility("default"))) +#else +#define GE_FUNC_DEV_VISIBILITY +#endif + +enum DataType { + DT_FLOAT = 0, // float type + DT_FLOAT16 = 1, // fp16 type + DT_INT8 = 2, // int8 type + DT_INT16 = 6, // int16 type + DT_UINT16 = 7, // uint16 type + DT_UINT8 = 4, // uint8 type + DT_INT32 = 3, // + DT_INT64 = 9, // int64 type + DT_UINT32 = 8, // unsigned int32 + DT_UINT64 = 10, // unsigned int64 + DT_BOOL = 12, // bool type + DT_DOUBLE = 11, // double type + DT_STRING = 13, // string type + DT_DUAL_SUB_INT8 = 14, // dual output int8 type + DT_DUAL_SUB_UINT8 = 15, // dual output uint8 type + DT_COMPLEX64 = 16, // complex64 type + DT_COMPLEX128 = 17, // complex128 type + DT_QINT8 = 18, // qint8 type + DT_QINT16 = 19, // qint16 type + DT_QINT32 = 20, // qint32 type + DT_QUINT8 = 21, // quint8 type + DT_QUINT16 = 22, // quint16 type + DT_RESOURCE = 23, // resource type + DT_STRING_REF = 24, // string ref type + DT_DUAL = 25, // dual output type + DT_UNDEFINED // Used to indicate a DataType field has not been set. +}; + +inline int GetSizeByDataType(DataType data_type) { + static int data_type_size[DT_UNDEFINED] = { + 4, // DT_FLOAT = 0, float type + 2, // DT_FLOAT16 = 1, fp16 type + 1, // DT_INT8 = 2, int8 type + 4, // DT_INT32 = 3, + 1, // DT_UINT8 = 4, uint8 type + -1, + 2, // DT_INT16 = 6, int16 type + 2, // DT_UINT16 = 7, uint16 type + 4, // DT_UINT32 = 8, unsigned int32 + 8, // DT_INT64 = 9, int64 type + 8, // DT_UINT64 = 10, unsigned int64 + 8, // DT_DOUBLE = 11, double type + 1, // DT_BOOL = 12, bool type + -1, // DT_STRING = 13, string type + 1, // DT_DUAL_SUB_INT8 = 14, dual output int8 type + 1, // DT_DUAL_SUB_UINT8 = 15, dual output uint8 type + 8, // DT_COMPLEX64 = 16, complex64 type + 16, // DT_COMPLEX128 = 17, complex128 type + 1, // DT_QINT8 = 18, qint8 type + 2, // DT_QINT16 = 19, qint16 type + 4, // DT_QINT32 = 20, qint32 type + 1, // DT_QUINT8 = 21, quint8 type + 2, // DT_QUINT16 = 22, quint16 type + -1, // DT_RESOURCE = 23, resource type + -1, // DT_STRING_REF = 24, string ref type + 5, // DT_DUAL = 25, dual output type (float + int8) + // DT_UNDEFINED Used to indicate a DataType field has not been set. + }; + if (data_type >= DT_UNDEFINED) { + return -1; + } + return data_type_size[data_type]; +} + +enum Format { + FORMAT_NCHW = 0, // NCHW + FORMAT_NHWC, // NHWC + FORMAT_ND, // Nd Tensor + FORMAT_NC1HWC0, // NC1HWC0 + FORMAT_FRACTAL_Z, // FRACTAL_Z + FORMAT_NC1C0HWPAD, + FORMAT_NHWC1C0, + FORMAT_FSR_NCHW, + FORMAT_FRACTAL_DECONV, + FORMAT_C1HWNC0, + FORMAT_FRACTAL_DECONV_TRANSPOSE, + FORMAT_FRACTAL_DECONV_SP_STRIDE_TRANS, + FORMAT_NC1HWC0_C04, // NC1HWC0, C0 =4 + FORMAT_FRACTAL_Z_C04, // FRACZ, C0 =4 + FORMAT_CHWN, + FORMAT_FRACTAL_DECONV_SP_STRIDE8_TRANS, + FORMAT_HWCN, + FORMAT_NC1KHKWHWC0, // KH,KW kernel h& kernel w maxpooling max output format + FORMAT_BN_WEIGHT, + FORMAT_FILTER_HWCK, // filter input tensor format + FORMAT_HASHTABLE_LOOKUP_LOOKUPS = 20, + FORMAT_HASHTABLE_LOOKUP_KEYS, + FORMAT_HASHTABLE_LOOKUP_VALUE, + FORMAT_HASHTABLE_LOOKUP_OUTPUT, + FORMAT_HASHTABLE_LOOKUP_HITS = 24, + FORMAT_C1HWNCoC0, + FORMAT_MD, + FORMAT_NDHWC, + FORMAT_FRACTAL_ZZ, + FORMAT_FRACTAL_NZ, + FORMAT_NCDHW, + FORMAT_DHWCK, // 3D filter input tensor format + FORMAT_NDC1HWC0, + FORMAT_FRACTAL_Z_3D, + FORMAT_CN, + FORMAT_NC, + FORMAT_RESERVED, + FORMAT_ALL +}; + +struct TensorDescInfo { + Format format_ = FORMAT_RESERVED; // tbe op register support format + DataType dataType_ = DT_UNDEFINED; // tbe op register support datatype +}; + +enum DeviceType { + NPU = 0, + CPU = 1, +}; + +struct TensorType { + explicit TensorType(DataType dt) { dt_vec_.push_back(dt); } + + TensorType(const std::initializer_list &types) { dt_vec_ = types; } + + static TensorType ALL() { + return TensorType{DT_BOOL, DT_COMPLEX128, DT_COMPLEX64, DT_DOUBLE, DT_FLOAT, DT_FLOAT16, DT_INT16, + DT_INT32, DT_INT64, DT_INT8, DT_QINT16, DT_QINT32, DT_QINT8, DT_QUINT16, + DT_QUINT8, DT_RESOURCE, DT_STRING, DT_UINT16, DT_UINT32, DT_UINT64, DT_UINT8}; + } + + static TensorType QuantifiedType() { return TensorType{DT_QINT16, DT_QINT32, DT_QINT8, DT_QUINT16, DT_QUINT8}; } + + static TensorType OrdinaryType() { + return TensorType{DT_BOOL, DT_COMPLEX128, DT_COMPLEX64, DT_DOUBLE, DT_FLOAT, DT_FLOAT16, DT_INT16, + DT_INT32, DT_INT64, DT_INT8, DT_UINT16, DT_UINT32, DT_UINT64, DT_UINT8}; + } + + static TensorType BasicType() { + return TensorType{DT_COMPLEX128, DT_COMPLEX64, DT_DOUBLE, DT_FLOAT, DT_FLOAT16, DT_INT16, + DT_INT32, DT_INT64, DT_INT8, DT_QINT16, DT_QINT32, DT_QINT8, + DT_QUINT16, DT_QUINT8, DT_UINT16, DT_UINT32, DT_UINT64, DT_UINT8}; + } + + static TensorType NumberType() { + return TensorType{DT_COMPLEX128, DT_COMPLEX64, DT_DOUBLE, DT_FLOAT, DT_FLOAT16, DT_INT16, DT_INT32, DT_INT64, + DT_INT8, DT_QINT32, DT_QINT8, DT_QUINT8, DT_UINT16, DT_UINT32, DT_UINT64, DT_UINT8}; + } + + static TensorType RealNumberType() { + return TensorType{DT_DOUBLE, DT_FLOAT, DT_FLOAT16, DT_INT16, DT_INT32, DT_INT64, + DT_INT8, DT_UINT16, DT_UINT32, DT_UINT64, DT_UINT8}; + } + + static TensorType ComplexDataType() { return TensorType{DT_COMPLEX128, DT_COMPLEX64}; } + + static TensorType IntegerDataType() { + return TensorType{DT_INT16, DT_INT32, DT_INT64, DT_INT8, DT_UINT16, DT_UINT32, DT_UINT64, DT_UINT8}; + } + + static TensorType SignedDataType() { return TensorType{DT_INT16, DT_INT32, DT_INT64, DT_INT8}; } + + static TensorType UnsignedDataType() { return TensorType{DT_UINT16, DT_UINT32, DT_UINT64, DT_UINT8}; } + + static TensorType FloatingDataType() { return TensorType{DT_DOUBLE, DT_FLOAT, DT_FLOAT16}; } + + static TensorType IndexNumberType() { return TensorType{DT_INT32, DT_INT64}; } + + static TensorType UnaryDataType() { return TensorType{DT_COMPLEX128, DT_COMPLEX64, DT_DOUBLE, DT_FLOAT, DT_FLOAT16}; } + + static TensorType FLOAT() { return TensorType{DT_FLOAT, DT_FLOAT16}; } + + std::vector dt_vec_; +}; +} // namespace ge + +namespace domi { +enum class ImplyType : unsigned int { + BUILDIN = 0, // Built in operator, normally executed by OME + TVM, // Compile to TVM bin file for execution + CUSTOM, // User defined calculation logic, executed by CPU + AI_CPU, // AICPU + CCE, // Cce + GELOCAL, // GE local, do node need execute by device + HCCL, // Hccl + INVALID = 0xFFFFFFFF, +}; +} // namespace domi + +#endif // INC_EXTERNAL_GRAPH_TYPES_H_ diff --git a/inc/external/graph/usr_types.h b/inc/external/graph/usr_types.h new file mode 100644 index 00000000..64c036bd --- /dev/null +++ b/inc/external/graph/usr_types.h @@ -0,0 +1,134 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_GRAPH_USR_TYPES_H_ +#define INC_EXTERNAL_GRAPH_USR_TYPES_H_ + +#include +#include +#include + +namespace ge { +#define USR_TYPE_DEC(type, name) \ + inline void set_##name(const type &value) { name = value; } \ + type *mutable_##name() { return &name; } + +#define USR_TYPE_HAS_DEC(type, name) \ + inline void set_##name(const type &value) { name = value; } \ + \ + private: \ + bool has_mutable_##name{false}; \ + \ + public: \ + bool has_##name() const { return (has_mutable_##name) || QuantizeFactorHasData(name); } \ + type *mutable_##name() { \ + has_mutable_##name = true; \ + return &name; \ + } + +#define USR_TYPE_BYTES_DEC(name) \ + inline void clear_##name() { name.clear(); } \ + inline void set_##name(const void *value, size_t size) { \ + name.assign(reinterpret_cast(const_cast(value)), \ + reinterpret_cast(const_cast(value)) + size); \ + } + +enum UsrQuantizeScaleType { USR_VECTOR_SCALE = 0, USR_SCALAR_SCALE = 1 }; +enum UsrQuantizeScaleMode { USR_NORMAL_MODE = 0, USR_SQRT_MODE = 1 }; +enum UsrQuantizeAlgorithm { + USR_NON_OFFSET_ALGO = 0, + USR_HALF_OFFSET_ALGO = 1, + USR_ALL_OFFSET_ALGO = 2, +}; + +struct UsrQuantizeFactor { + public: + // QuantizeScaleMode scale_mode; + UsrQuantizeScaleMode scale_mode{USR_NORMAL_MODE}; + std::vector scale_value; + int64_t scale_offset{0}; + std::vector offset_data_value; + int64_t offset_data_offset{0}; + std::vector offset_weight_value; + int64_t offset_weight_offset{0}; + std::vector offset_pad_value; + int64_t offset_pad_offset{0}; + + USR_TYPE_DEC(UsrQuantizeScaleMode, scale_mode); + USR_TYPE_BYTES_DEC(scale_value); + + USR_TYPE_DEC(int64_t, scale_offset); + USR_TYPE_BYTES_DEC(offset_data_value); + USR_TYPE_DEC(int64_t, offset_data_offset); + + USR_TYPE_BYTES_DEC(offset_weight_value); + USR_TYPE_DEC(int64_t, offset_weight_offset); + USR_TYPE_BYTES_DEC(offset_pad_value); + USR_TYPE_DEC(int64_t, offset_pad_offset); +}; + +static inline bool QuantizeFactorHasData(const UsrQuantizeFactor &factor) { + return factor.scale_value.size() > 0 || factor.offset_data_value.size() > 0 || + factor.offset_weight_value.size() > 0 || factor.offset_pad_value.size() > 0; +} + +struct UsrQuantizeCalcFactor { + public: + std::vector offsetw; + int64_t offsetw_offset{0}; + std::vector offsetd; + int64_t offsetd_offset{0}; + std::vector scalereq; + int64_t scaledreq_offset{0}; + std::vector offsetdnext; + int64_t offsetdnext_offset{0}; + + USR_TYPE_BYTES_DEC(offsetw); + USR_TYPE_DEC(int64_t, offsetw_offset); + USR_TYPE_BYTES_DEC(offsetd); + USR_TYPE_DEC(int64_t, offsetd_offset); + USR_TYPE_BYTES_DEC(scalereq); + USR_TYPE_DEC(int64_t, scaledreq_offset); + USR_TYPE_BYTES_DEC(offsetdnext); + USR_TYPE_DEC(int64_t, offsetdnext_offset); +}; + +static inline bool QuantizeFactorHasData(const UsrQuantizeCalcFactor &factor) { + return factor.offsetw.size() > 0 || factor.offsetd.size() > 0 || factor.scalereq.size() > 0 || + factor.offsetdnext.size() > 0; +} + +struct UsrQuantizeFactorParams { + UsrQuantizeAlgorithm quantize_algo{USR_NON_OFFSET_ALGO}; + UsrQuantizeScaleType scale_type{USR_VECTOR_SCALE}; + UsrQuantizeFactor quantize_param; + UsrQuantizeFactor dequantize_param; + UsrQuantizeFactor requantize_param; + UsrQuantizeCalcFactor quantizecalc_param; + USR_TYPE_DEC(UsrQuantizeAlgorithm, quantize_algo); + USR_TYPE_DEC(UsrQuantizeScaleType, scale_type); + USR_TYPE_HAS_DEC(UsrQuantizeFactor, quantize_param); + USR_TYPE_HAS_DEC(UsrQuantizeFactor, dequantize_param); + USR_TYPE_HAS_DEC(UsrQuantizeFactor, requantize_param); + USR_TYPE_HAS_DEC(UsrQuantizeCalcFactor, quantizecalc_param); +}; + +#undef USR_TYPE_DEC +#undef USR_TYPE_HAS_DEC +#undef USR_TYPE_BYTES_DEC +} // namespace ge + +#endif // INC_EXTERNAL_GRAPH_USR_TYPES_H_ diff --git a/inc/external/register/register.h b/inc/external/register/register.h new file mode 100644 index 00000000..d8a2211b --- /dev/null +++ b/inc/external/register/register.h @@ -0,0 +1,199 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_REGISTER_REGISTER_H_ +#define INC_EXTERNAL_REGISTER_REGISTER_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "graph/operator.h" +#include "register/register_error_codes.h" +#include "register/register_fmk_types.h" +#include "register/register_types.h" + +using std::unique_ptr; +using std::map; +using std::make_shared; +using std::to_string; +using std::string; +using std::pair; +using std::vector; + +namespace ge { +class Operator; +class TensorDesc; +class Tensor; +class TBEPluginManager; +} + +namespace domi { +struct OpOutput { + ge::Operator op; + // The output name of op + std::string outputName; +}; + +struct InferShapeContext { + ge::Operator op; + // Input name, input + std::map inputs; +}; + +struct InferShapeOutput { + std::vector outputDescs; + std::vector realDimCnt; +}; + +enum OmgMoveTypeToAttr { + OMG_MOVE_TYPE_DTYPE = 0, + OMG_MOVE_TYPE_VALUE, + OMG_MOVE_TYPE_SHAPE, + OMG_MOVE_TYPE_FORMAT, + OMG_MOVE_TYPE_AXIS, + OMG_MOVE_TYPE_SCALAR_VALUE, + OMG_REMOVE_TYPE_WITH_COND = 1000, +}; + +struct MoveInputToAttrStu { + int inputIdx; + std::string attrName; + OmgMoveTypeToAttr moveType; + bool attrValue; +}; + +Status AutoMappingFn(const google::protobuf::Message *op_src, ge::Operator &op); +Status AutoMappingFnDynamic(const google::protobuf::Message *op_src, ge::Operator &op, + std::map> dynamic_name_attr_value, + int in_pos = -1, int out_pos = -1); +using google::protobuf::Message; + +using ParseParamFunc = std::function; +using InferShapeFunc = std::function &)>; +using InferShapeFuncV2 = std::function; +using GetWorkspaceSizeFunc = std::function &)>; +using UpdateOpDescFunc = std::function; +using BuildTeBinFunc = std::function; + +class FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY OpRegistrationData { + public: + OpRegistrationData(const std::string &om_optype); + + ~OpRegistrationData(); + + OpRegistrationData &FrameworkType(const domi::FrameworkType &fmk_type); + + OpRegistrationData &OriginOpType(const std::initializer_list &ori_optype_list); + + OpRegistrationData &OriginOpType(const std::string &ori_optype); + + OpRegistrationData &ParseParamsFn(const ParseParamFunc &parseParamFn); + + OpRegistrationData &InferShapeAndTypeFn(const InferShapeFunc &inferShapeFn); + + OpRegistrationData &InferShapeAndTypeFn(const InferShapeFuncV2 &inferShapeFn); + + OpRegistrationData &UpdateOpDescFn(const UpdateOpDescFunc &updateOpDescFn); + + OpRegistrationData &GetWorkspaceSizeFn(const GetWorkspaceSizeFunc &getWorkspaceSizeFn); + + OpRegistrationData &TEBinBuildFn(const BuildTeBinFunc &buildTeBinFn); + + OpRegistrationData &ImplyType(const domi::ImplyType &imply_type); + + OpRegistrationData &Formats(const std::initializer_list &input_formats, + const std::initializer_list &output_formats); + + OpRegistrationData &WeightFormats(const std::initializer_list &weight_formats); + + OpRegistrationData &InputFormat(const std::initializer_list> &inputFormats); + OpRegistrationData &OutputFormat(const std::initializer_list> &outputFormats); + OpRegistrationData &InputDataType(const std::initializer_list> &inputDataTypes); + OpRegistrationData &OutputDataType(const std::initializer_list> &outputDataTypes); + OpRegistrationData &InputLimitedTensorDescInfo( + const std::initializer_list> &limitedTensorDescs); + OpRegistrationData &OutputLimitedTensorDescInfo( + const std::initializer_list> &limitedTensorDescs); + + OpRegistrationData &MoveInputToAttr(int inputIdx, const std::string &attrName, OmgMoveTypeToAttr moveType); + OpRegistrationData &DelInputWithCond(int inputIdx, const std::string &attrName, bool attrValue); + + private: + domi::FrameworkType fmk_type_; // Framework type + std::set ori_optype_set_; // OP type in the original model, there may be multiple + std::string om_optype_; // OP type in OM model + domi::ImplyType imply_type_; // Execution type + std::vector input_formats_; // Data formats supported by operator input + std::vector output_formats_; // Data formats supported by operator output + std::vector weight_formats_; // Data format supported by operator weight + + ParseParamFunc parseParamFn_; // ParseParam function + InferShapeFunc inferShapeFn_; // InferShape function + InferShapeFuncV2 inferShapeFnV2_; // InferShape function + GetWorkspaceSizeFunc getWorkspaceSizeFn_; // GetWorkspaceSizeFunc function + UpdateOpDescFunc updateOpDescFn_; + BuildTeBinFunc buildTeBinFn_; + // Input formats list supported by tbe operators + std::vector> supportedInputFormats_; + // Output formats list supported by tbe operators + std::vector> supportedOutputFormats_; + // Input datatypes list supported by tbe operators + std::vector> supportedInputDataTypes_; + // Output datatypes list supported by tbe operators + std::vector> supportedOutputDataTypes_; + // Input tensordesinfo list supported by tbe operator + std::vector> inputLimitedTensorDescs_; + // Output tensordesinfo list supported by tbe operator + std::vector> outputLimitedTensorDescs_; + + std::vector moveInputToAttrVec_; + friend class OpRegistry; + friend class OpRegistrationTbe; + friend class ge::TBEPluginManager; +}; + +class FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY OpReceiver { + public: + OpReceiver(OpRegistrationData ®_data); + ~OpReceiver() {} +}; + +#define REGISTER_CUSTOM_OP(name) REGISTER_CUSTOM_OP_UNIQ_HELPER(__COUNTER__, name) +#define REGISTER_CUSTOM_OP_UNIQ_HELPER(ctr, name) REGISTER_CUSTOM_OP_UNIQ(ctr, name) +#define REGISTER_CUSTOM_OP_UNIQ(ctr, name) \ + static OpReceiver register_op##ctr \ + __attribute__((unused)) = \ + OpRegistrationData(name) +} // namespace domi + +namespace ge { +using OpOutput = domi::OpOutput; +using InferShapeContext = domi::InferShapeContext; +using InferShapeOutput = domi::InferShapeOutput; +using OmgMoveTypeToAttr = domi::OmgMoveTypeToAttr; +using MoveInputToAttrStu = domi::MoveInputToAttrStu; +using OpRegistrationData = domi::OpRegistrationData; +using OpReceiver = domi::OpReceiver; +} +#endif // INC_EXTERNAL_REGISTER_REGISTER_H_ diff --git a/inc/external/register/register_error_codes.h b/inc/external/register/register_error_codes.h new file mode 100644 index 00000000..5bfee8a2 --- /dev/null +++ b/inc/external/register/register_error_codes.h @@ -0,0 +1,38 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_REGISTER_REGISTER_ERROR_CODES_H_ +#define INC_EXTERNAL_REGISTER_REGISTER_ERROR_CODES_H_ + +#define SYSID_FWK 3 // Subsystem ID +#define MODID_COMMON 0 // Common module ID + +#define DECLARE_ERRORNO(sysid, modid, name, value) \ + const domi::Status name = \ + ((0xFF & ((uint8_t)sysid)) << 24) | ((0xFF & ((uint8_t)modid)) << 16) | (0xFFFF & ((uint16_t)value)); + +#define DECLARE_ERRORNO_COMMON(name, value) DECLARE_ERRORNO(SYSID_FWK, MODID_COMMON, name, value) + +namespace domi { +using Status = uint32_t; + +// General error code +DECLARE_ERRORNO(0, 0, SUCCESS, 0); +DECLARE_ERRORNO(0xFF, 0xFF, FAILED, 0xFFFFFFFF); +DECLARE_ERRORNO_COMMON(PARAM_INVALID, 1); // 50331649 +} // namespace domi + +#endif // INC_EXTERNAL_REGISTER_REGISTER_ERROR_CODES_H_ diff --git a/inc/external/register/register_fmk_types.h b/inc/external/register/register_fmk_types.h new file mode 100644 index 00000000..e24957cc --- /dev/null +++ b/inc/external/register/register_fmk_types.h @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_REGISTER_REGISTER_FMK_TYPES_H_ +#define INC_EXTERNAL_REGISTER_REGISTER_FMK_TYPES_H_ + +#include + +namespace domi { +/// +/// @ingroup domi_omg +/// @brief AI framework types +/// +enum FrameworkType { + FMK_TYPE_C = 0, + FMK_TYPE_MINDSPORE = 1, + FMK_TYPE_T = 3, + FMK_TYPE_A_NN, + FMK_TYPE_RESERVED, +}; + +struct TEBinInfo { + std::string bin_file_path; + std::string json_file_path; + std::string ddk_version; +}; +} // namespace domi + +#endif // INC_EXTERNAL_REGISTER_REGISTER_FMK_TYPES_H_ diff --git a/inc/external/register/register_types.h b/inc/external/register/register_types.h new file mode 100644 index 00000000..be090281 --- /dev/null +++ b/inc/external/register/register_types.h @@ -0,0 +1,55 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_EXTERNAL_REGISTER_REGISTER_TYPES_H_ +#define INC_EXTERNAL_REGISTER_REGISTER_TYPES_H_ + +namespace domi { +#ifdef HOST_VISIBILITY +#define FMK_FUNC_HOST_VISIBILITY __attribute__((visibility("default"))) +#else +#define FMK_FUNC_HOST_VISIBILITY +#endif +#ifdef DEV_VISIBILITY +#define FMK_FUNC_DEV_VISIBILITY __attribute__((visibility("default"))) +#else +#define FMK_FUNC_DEV_VISIBILITY +#endif + +/// CCE defined constant + +/// +/// @ingroup domi +/// @brief original tensor type +/// +typedef enum tagDomiTensorFormat { + DOMI_TENSOR_NCHW = 0, // < NCHW + DOMI_TENSOR_NHWC, // < NHWC + DOMI_TENSOR_ND, // < Nd Tensor + DOMI_TENSOR_NC1HWC0, // < NC1HWC0 + DOMI_TENSOR_FRACTAL_Z, // < FRACTAL_Z + DOMI_TENSOR_NC1C0HWPAD, + DOMI_TENSOR_NHWC1C0, + DOMI_TENSOR_FSR_NCHW, + DOMI_TENSOR_FRACTAL_DECONV, + DOMI_TENSOR_BN_WEIGHT, + DOMI_TENSOR_CHWN, // Android NN Depth CONV + DOMI_TENSOR_FILTER_HWCK, // filter input tensor format + DOMI_TENSOR_RESERVED +} domiTensorFormat_t; +} // namespace domi + +#endif // INC_EXTERNAL_REGISTER_REGISTER_TYPES_H_ diff --git a/inc/framework/common/aicpu_op.h b/inc/framework/common/aicpu_op.h new file mode 100644 index 00000000..850ceca3 --- /dev/null +++ b/inc/framework/common/aicpu_op.h @@ -0,0 +1,22 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_AICPU_OP_H_ +#define INC_FRAMEWORK_COMMON_AICPU_OP_H_ + +#include "cce/customize.h" + +#endif // INC_FRAMEWORK_COMMON_AICPU_OP_H_ diff --git a/inc/framework/common/debug/ge_log.h b/inc/framework/common/debug/ge_log.h new file mode 100644 index 00000000..1556fd07 --- /dev/null +++ b/inc/framework/common/debug/ge_log.h @@ -0,0 +1,96 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_DEBUG_GE_LOG_H_ +#define INC_FRAMEWORK_COMMON_DEBUG_GE_LOG_H_ + +#include + +#include "framework/common/ge_inner_error_codes.h" +#include "toolchain/slog.h" + +#define GE_MODULE_NAME GE + +// trace status of log +enum TraceStatus { TRACE_INIT = 0, TRACE_RUNNING, TRACE_WAITING, TRACE_STOP }; + +#define GELOGE(ERROR_CODE, ...) GE_LOG_ERROR(GE_MODULE_NAME, ERROR_CODE, __VA_ARGS__) +#define GELOGW(...) GE_LOG_WARN(GE_MODULE_NAME, __VA_ARGS__) +#define GELOGI(...) GE_LOG_INFO(GE_MODULE_NAME, __VA_ARGS__) +#define GELOGD(...) GE_LOG_DEBUG(GE_MODULE_NAME, __VA_ARGS__) +#define GEEVENT(...) GE_LOG_EVENT(GE_MODULE_NAME, __VA_ARGS__) +#define GELOGO(...) GE_LOG_OPLOG(GE_MODULE_NAME, __VA_ARGS__) +#define GELOGT(VALUE, ...) GE_LOG_TRACE(GE_MODULE_NAME, VALUE, __VA_ARGS__) + +inline bool IsLogEnable(int module_name, int log_level) noexcept { + int32_t enable_event = 0; + int32_t dlog_level = dlog_getlevel(module_name, &enable_event); + if (dlog_level <= log_level) { + return true; + } + return false; +} + +#define GE_TIMESTAMP_START(stage) uint64_t startUsec_##stage = ge::GetCurrentTimestap() + +#define GE_TIMESTAMP_END(stage, stage_name) \ + do { \ + uint64_t endUsec_##stage = ge::GetCurrentTimestap(); \ + GEEVENT("[GEPERFTRACE] The time cost of %s is [%lu] micro second.", (stage_name), \ + (endUsec_##stage - startUsec_##stage)); \ + } while (0); + +#define GE_TIMESTAMP_CALLNUM_START(stage) \ + uint64_t startUsec_##stage = ge::GetCurrentTimestap(); \ + uint64_t call_num_of##stage = 0; \ + uint64_t time_of##stage = 0 + +#define GE_TIMESTAMP_RESTART(stage) (startUsec_##stage = ge::GetCurrentTimestap()) + +#define GE_TIMESTAMP_ADD(stage) \ + time_of##stage += ge::GetCurrentTimestap() - startUsec_##stage; \ + call_num_of##stage++ + +#define GE_TIMESTAMP_CALLNUM_END(stage, stage_name) \ + GEEVENT("[GEPERFTRACE] The time cost of %s is [%lu] micro second, call num is %lu", (stage_name), time_of##stage, \ + call_num_of##stage) + +#define GE_LOG_ERROR(MOD_NAME, ERROR_CODE, fmt, ...) \ + dlog_error(static_cast(MOD_NAME), "%s: ErrorNo: %d(%s) " fmt, __FUNCTION__, ERROR_CODE, \ + ((GE_GET_ERRORNO_STR(ERROR_CODE)).c_str()), ##__VA_ARGS__) +#define GE_LOG_WARN(MOD_NAME, fmt, ...) \ + if (IsLogEnable(static_cast(MOD_NAME), DLOG_WARN)) \ + dlog_warn(static_cast(MOD_NAME), "%s:" fmt, __FUNCTION__, ##__VA_ARGS__) +#define GE_LOG_INFO(MOD_NAME, fmt, ...) \ + if (IsLogEnable(static_cast(MOD_NAME), DLOG_INFO)) \ + dlog_info(static_cast(MOD_NAME), "%s:" fmt, __FUNCTION__, ##__VA_ARGS__) +#define GE_LOG_DEBUG(MOD_NAME, fmt, ...) \ + if (IsLogEnable(static_cast(MOD_NAME), DLOG_DEBUG)) \ + dlog_debug(static_cast(MOD_NAME), "%s:" fmt, __FUNCTION__, ##__VA_ARGS__) +#define GE_LOG_EVENT(MOD_NAME, fmt, ...) dlog_event(static_cast(MOD_NAME), "%s:" fmt, __FUNCTION__, ##__VA_ARGS__) +#define GE_LOG_OPLOG(MOD_NAME, fmt, ...) \ + Dlog(static_cast(MOD_NAME), DLOG_OPLOG, "%s:" fmt, __FUNCTION__, ##__VA_ARGS__) +#define GE_LOG_TRACE(MOD_NAME, value, fmt, ...) \ + do { \ + TraceStatus stat = value; \ + const char *const TraceStatStr[] = {"INIT", "RUNNING", "WAITING", "STOP"}; \ + int idx = static_cast(stat); \ + char *k = const_cast("status"); \ + char *v = const_cast(TraceStatStr[idx]); \ + KeyValue kv = {k, v}; \ + DlogWithKV(static_cast(MOD_NAME), DLOG_TRACE, &kv, 1, "%s:" fmt, __FUNCTION__, ##__VA_ARGS__); \ + } while (0) +#endif // INC_FRAMEWORK_COMMON_DEBUG_GE_LOG_H_ diff --git a/inc/framework/common/debug/log.h b/inc/framework/common/debug/log.h new file mode 100644 index 00000000..1aa4111c --- /dev/null +++ b/inc/framework/common/debug/log.h @@ -0,0 +1,254 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_DEBUG_LOG_H_ +#define INC_FRAMEWORK_COMMON_DEBUG_LOG_H_ + +#include + +#include "cce/cce_def.hpp" +#include "common/string_util.h" +#include "common/util.h" +#include "dlog/log.h" +#include "framework/common/debug/ge_log.h" +#include "ge/ge_api_error_codes.h" + +using cce::CC_STATUS_SUCCESS; +using cce::ccStatus_t; + +#if !defined(__ANDROID__) && !defined(ANDROID) +#define DOMI_LOGE(...) DAV_LOGE("DOMI", __VA_ARGS__) +#else +#include +#if defined(BUILD_VERSION_PERF) +#define DOMI_LOGE(fmt, ...) +#else +// The Android system has strict log control. Do not modify the log. +#define DOMI_LOGE(fmt, ...) \ + __android_log_print(ANDROID_LOG_ERROR, "NPU_FMK", "%s %s(%d)::" #fmt, __FILE__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#endif +#endif + +// ge marco +#define GE_LOGI_IF(condition, ...) \ + if ((condition)) { \ + GELOGI(__VA_ARGS__); \ + } + +#define GE_LOGW_IF(condition, ...) \ + if ((condition)) { \ + GELOGW(__VA_ARGS__); \ + } + +#define GE_LOGE_IF(condition, ...) \ + if ((condition)) { \ + GELOGE(ge::FAILED, __VA_ARGS__); \ + } + +// If expr is not SUCCESS, print the log and return the same value +#define GE_CHK_STATUS_RET(expr, ...) \ + do { \ + const ge::Status _status = (expr); \ + if (_status != ge::SUCCESS) { \ + GELOGE(ge::FAILED, __VA_ARGS__); \ + return _status; \ + } \ + } while (0); + +// If expr is not SUCCESS, print the log and do not execute return +#define GE_CHK_STATUS(expr, ...) \ + do { \ + const ge::Status _status = (expr); \ + if (_status != ge::SUCCESS) { \ + GELOGE(ge::FAILED, __VA_ARGS__); \ + } \ + } while (0); + +// If expr is not SUCCESS, return the same value +#define GE_CHK_STATUS_RET_NOLOG(expr) \ + do { \ + const ge::Status _status = (expr); \ + if (_status != ge::SUCCESS) { \ + return _status; \ + } \ + } while (0); + +// If expr is not SUCCESS, print the log and execute a custom statement +#define GE_CHK_STATUS_EXEC(expr, exec_expr, ...) \ + do { \ + const ge::Status _status = (expr); \ + GE_CHK_BOOL_EXEC(_status == SUCCESS, exec_expr, __VA_ARGS__); \ + } while (0); + +// If expr is not true, print the log and return the specified status +#define GE_CHK_BOOL_RET_STATUS(expr, _status, ...) \ + do { \ + bool b = (expr); \ + if (!b) { \ + std::string msg; \ + (void)msg.append(ge::StringUtils::FormatString(__VA_ARGS__)); \ + (void)msg.append( \ + ge::StringUtils::FormatString(" Error Code:0x%X(%s)", _status, GET_ERRORNO_STR(_status).c_str())); \ + GELOGE(ge::FAILED, "%s", msg.c_str()); \ + return _status; \ + } \ + } while (0); + +// If expr is not true, print the log and return the specified status +#define GE_CHK_BOOL_RET_STATUS_NOLOG(expr, _status, ...) \ + do { \ + bool b = (expr); \ + if (!b) { \ + return _status; \ + } \ + } while (0); + +// If expr is not true, print the log and execute a custom statement +#define GE_CHK_BOOL_EXEC(expr, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (!b) { \ + GELOGE(ge::FAILED, __VA_ARGS__); \ + exec_expr; \ + } \ + }; + +// If expr is not true, print the log and execute a custom statement +#define GE_CHK_BOOL_EXEC_WARN(expr, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (!b) { \ + GELOGW(__VA_ARGS__); \ + exec_expr; \ + } \ + }; +// If expr is not true, print the log and execute a custom statement +#define GE_CHK_BOOL_EXEC_INFO(expr, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (!b) { \ + GELOGI(__VA_ARGS__); \ + exec_expr; \ + } \ + }; + +// If expr is true, print logs and execute custom statements +#define GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(expr, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (b) { \ + GELOGE(ge::FAILED, __VA_ARGS__); \ + exec_expr; \ + } \ + }; +// If expr is true, print the Information log and execute a custom statement +#define GE_CHK_TRUE_EXEC_INFO(expr, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (b) { \ + GELOGI(__VA_ARGS__); \ + exec_expr; \ + } \ + }; + +// If expr is not SUCCESS, print the log and execute the expression + return _status +#define GE_CHK_BOOL_TRUE_EXEC_RET_STATUS(expr, _status, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (b) { \ + GELOGE(ge::FAILED, __VA_ARGS__); \ + exec_expr; \ + return _status; \ + } \ + }; + +// If expr is not true, execute a custom statement +#define GE_CHK_BOOL_EXEC_NOLOG(expr, exec_expr) \ + { \ + bool b = (expr); \ + if (!b) { \ + exec_expr; \ + } \ + }; + +// -----------------runtime related macro definitions------------------------------- +// If expr is not RT_ERROR_NONE, print the log +#define GE_CHK_RT(expr) \ + do { \ + rtError_t _rt_ret = (expr); \ + if (_rt_ret != RT_ERROR_NONE) { \ + GELOGE(ge::RT_FAILED, "Call rt api failed, ret: 0x%X", _rt_ret); \ + } \ + } while (0); + +// If expr is not RT_ERROR_NONE, print the log and execute the exec_expr expression +#define GE_CHK_RT_EXEC(expr, exec_expr) \ + { \ + rtError_t _rt_ret = (expr); \ + if (_rt_ret != RT_ERROR_NONE) { \ + GELOGE(ge::RT_FAILED, "Call rt api failed, ret: 0x%X", _rt_ret); \ + exec_expr; \ + } \ + } + +// If expr is not RT_ERROR_NONE, print the log and return +#define GE_CHK_RT_RET(expr) \ + do { \ + rtError_t _rt_ret = (expr); \ + if (_rt_ret != RT_ERROR_NONE) { \ + GELOGE(ge::RT_FAILED, "Call rt api failed, ret: 0x%X", _rt_ret); \ + return ge::RT_FAILED; \ + } \ + } while (0); + +// ------------------------cce related macro definitions---------------------------- +// If expr is not CC_STATUS_SUCCESS, print the log +#define GE_CHK_CCE(expr) \ + do { \ + ccStatus_t _cc_ret = (expr); \ + if (_cc_ret != CC_STATUS_SUCCESS) { \ + GELOGE(ge::CCE_FAILED, "Call cce api failed, ret: 0x%X", _cc_ret); \ + } \ + } while (0); + +// If expr is not CC_STATUS_SUCCESS, print the log and return +#define GE_CHK_CCE_RET(expr) \ + do { \ + ccStatus_t _cc_ret = (expr); \ + if (_cc_ret != CC_STATUS_SUCCESS) { \ + GELOGE(ge::CCE_FAILED, "Call cce api failed, ret: 0x%X", _cc_ret); \ + return ge::CCE_FAILED; \ + } \ + } while (0); + +// If expr is true, execute exec_expr without printing logs +#define GE_IF_BOOL_EXEC(expr, exec_expr) \ + { \ + if (expr) { \ + exec_expr; \ + } \ + } + +// If make_shared is abnormal, print the log and execute the statement +#define GE_MAKE_SHARED(exec_expr0, exec_expr1) \ + try { \ + exec_expr0; \ + } catch (const std::bad_alloc &) { \ + GELOGE(ge::FAILED, "Make shared failed"); \ + exec_expr1; \ + } + +#endif // INC_FRAMEWORK_COMMON_DEBUG_LOG_H_ diff --git a/inc/framework/common/fmk_error_codes.h b/inc/framework/common/fmk_error_codes.h new file mode 100644 index 00000000..289cbd68 --- /dev/null +++ b/inc/framework/common/fmk_error_codes.h @@ -0,0 +1,85 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_FMK_ERROR_CODES_H_ +#define INC_FRAMEWORK_COMMON_FMK_ERROR_CODES_H_ + +#include +#include + +#include "framework/common/fmk_types.h" +#include "register/register_error_codes.h" + +#define MODID_OMG 1 // OMG module ID +#define MODID_OME 2 // OME module ID +#define MODID_CALIBRATION 3 // Calibration module ID + +// Each module uses the following four macros to define error codes: +#define DECLARE_ERRORNO_OMG(name, value) DECLARE_ERRORNO(SYSID_FWK, MODID_OMG, name, value) +#define DECLARE_ERRORNO_OME(name, value) DECLARE_ERRORNO(SYSID_FWK, MODID_OME, name, value) +#define DECLARE_ERRORNO_CALIBRATION(name, value) DECLARE_ERRORNO(SYSID_FWK, MODID_CALIBRATION, name, value) + +#define DEF_ERRORNO(name, desc) const ErrorNoRegisterar g_##name##_errorno(name, desc); + +// Interface for Obtaining Error Code Description +#define GET_ERRORNO_STR(value) domi::StatusFactory::Instance()->GetErrDesc(value) + +namespace domi { +class StatusFactory { + public: + static StatusFactory *Instance(); + + void RegisterErrorNo(uint32_t err, const std::string &desc); + + std::string GetErrDesc(uint32_t err); + + protected: + StatusFactory() {} + ~StatusFactory() {} + + private: + std::map err_desc_; +}; + +class ErrorNoRegisterar { + public: + ErrorNoRegisterar(uint32_t err, const std::string &desc) { StatusFactory::Instance()->RegisterErrorNo(err, desc); } + ~ErrorNoRegisterar() {} +}; + +// Common errocode +DECLARE_ERRORNO_COMMON(MEMALLOC_FAILED, 0); // 50331648 +DECLARE_ERRORNO_COMMON(CCE_FAILED, 2); // 50331650 +DECLARE_ERRORNO_COMMON(RT_FAILED, 3); // 50331651 +DECLARE_ERRORNO_COMMON(INTERNAL_ERROR, 4); // 50331652 +DECLARE_ERRORNO_COMMON(CSEC_ERROR, 5); // 50331653 +DECLARE_ERRORNO_COMMON(TEE_ERROR, 6); // 50331653 +DECLARE_ERRORNO_COMMON(UNSUPPORTED, 100); +DECLARE_ERRORNO_COMMON(OUT_OF_MEMORY, 101); + +// Omg errorcode +DECLARE_ERRORNO_OMG(PARSE_MODEL_FAILED, 0); +DECLARE_ERRORNO_OMG(PARSE_WEIGHTS_FAILED, 1); +DECLARE_ERRORNO_OMG(NOT_INITIALIZED, 2); +DECLARE_ERRORNO_OMG(TIMEOUT, 3); + +// Ome errorcode +DECLARE_ERRORNO_OME(MODEL_NOT_READY, 0); +DECLARE_ERRORNO_OME(PUSH_DATA_FAILED, 1); +DECLARE_ERRORNO_OME(DATA_QUEUE_ISFULL, 2); +} // namespace domi + +#endif // INC_FRAMEWORK_COMMON_FMK_ERROR_CODES_H_ diff --git a/inc/framework/common/fmk_types.h b/inc/framework/common/fmk_types.h new file mode 100644 index 00000000..e7ab6d6e --- /dev/null +++ b/inc/framework/common/fmk_types.h @@ -0,0 +1,23 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_FMK_TYPES_H_ +#define INC_FRAMEWORK_COMMON_FMK_TYPES_H_ + +#include "graph/types.h" +#include "register/register_types.h" + +#endif // INC_FRAMEWORK_COMMON_FMK_TYPES_H_ diff --git a/inc/framework/common/ge_format_util.h b/inc/framework/common/ge_format_util.h new file mode 100644 index 00000000..9b1d7786 --- /dev/null +++ b/inc/framework/common/ge_format_util.h @@ -0,0 +1,40 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_GE_FORMAT_UTIL_H_ +#define INC_FRAMEWORK_COMMON_GE_FORMAT_UTIL_H_ + +#include + +#include "common/ge_inner_error_codes.h" +#include "graph/tensor.h" + +namespace ge { +class GeFormatUtil { + public: + /// + /// @name TransShape + /// @brief transform the shape of tensor according to destination format + /// @param [in] src_desc source tensor desc + /// @param [in] dst_format destination format + /// @param [out] dst_shape destination shape + /// @return Status + /// + static Status TransShape(const TensorDesc &src_desc, Format dst_format, std::vector &dst_shape); +}; +} // namespace ge + +#endif // INC_FRAMEWORK_COMMON_GE_FORMAT_UTIL_H_ diff --git a/inc/framework/common/ge_inner_error_codes.h b/inc/framework/common/ge_inner_error_codes.h new file mode 100644 index 00000000..123aafdf --- /dev/null +++ b/inc/framework/common/ge_inner_error_codes.h @@ -0,0 +1,295 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_GE_INNER_ERROR_CODES_H_ +#define INC_FRAMEWORK_COMMON_GE_INNER_ERROR_CODES_H_ + +#include +#include + +#include "ge/ge_api_error_codes.h" + +namespace ge { +// System ID +enum SystemIdType { kSysidGE = 8 }; + +// Runtime location +enum LogRuntime { + KRtHost = 0b01, + kRtDevice = 0b10, +}; + +// Sub model +enum SubModuleId { + kCommonModule = 0, + kClientModule = 1, + kInitModule = 2, + kSessionModule = 3, + kGraphModule = 4, + kEngineMOdule = 5, + kOpsModule = 6, + kPluginModule = 7, + kRuntimeModule = 8, + kExecutorModule = 9, + kGeneratorModule = 10, +}; + +// Error code type +enum ErrorCodeType { + kErrorCode = 0b01, + kExceptionCode = 0b10, +}; + +// Error level +enum ErrorLevel { + kCommonLevel = 0b000, + kSuggestionLevel = 0b001, + kMinorLevel = 0b010, + kMajorLevel = 0b011, + kCriticalLevel = 0b100, +}; + +// The error code is defined by the following macros +#define GE_ERRORNO_COMMON(name, value, desc) \ + GE_ERRORNO(KRtHost, kErrorCode, kCommonLevel, kSysidGE, kCommonModule, name, value, desc) +#define GE_ERRORNO_CLIENT(name, value, desc) \ + GE_ERRORNO(KRtHost, kErrorCode, kCommonLevel, kSysidGE, kClientModule, name, value, desc) +#define GE_ERRORNO_INIT(name, value, desc) \ + GE_ERRORNO(KRtHost, kErrorCode, kCommonLevel, kSysidGE, kInitModule, name, value, desc) +#define GE_ERRORNO_SESSION(name, value, desc) \ + GE_ERRORNO(KRtHost, kErrorCode, kCommonLevel, kSysidGE, kSessionModule, name, value, desc) +#define GE_ERRORNO_GRAPH(name, value, desc) \ + GE_ERRORNO(KRtHost, kErrorCode, kCommonLevel, kSysidGE, kGraphModule, name, value, desc) +#define GE_ERRORNO_ENGINE(name, value, desc) \ + GE_ERRORNO(KRtHost, kErrorCode, kCommonLevel, kSysidGE, kEngineMOdule, name, value, desc) +#define GE_ERRORNO_OPS(name, value, desc) \ + GE_ERRORNO(KRtHost, kErrorCode, kCommonLevel, kSysidGE, kOpsModule, name, value, desc) +#define GE_ERRORNO_PLUGIN(name, value, desc) \ + GE_ERRORNO(KRtHost, kErrorCode, kCommonLevel, kSysidGE, kPluginModule, name, value, desc) +#define GE_ERRORNO_RUNTIME(name, value, desc) \ + GE_ERRORNO(KRtHost, kErrorCode, kCommonLevel, kSysidGE, kRuntimeModule, name, value, desc) +#define GE_ERRORNO_EXECUTOR(name, value, desc) \ + GE_ERRORNO(kRtDevice, kErrorCode, kCommonLevel, kSysidGE, kExecutorModule, name, value, desc) +#define GE_ERRORNO_GENERATOR(name, value, desc) \ + GE_ERRORNO(KRtHost, kErrorCode, kCommonLevel, kSysidGE, kGeneratorModule, name, value, desc) + +// Get the description of the error code +#define GE_GET_ERRORNO_STR(value) ge::StatusFactory::Instance()->GetErrDesc(value) + +// Common module error code definition +GE_ERRORNO_COMMON(MEMALLOC_FAILED, 0, "Failed to allocate memory!"); // 1343225856 +GE_ERRORNO_COMMON(PARAM_INVALID, 1, "Parameter's invalid!"); // 1343225857 +GE_ERRORNO_COMMON(CCE_FAILED, 2, "Failed to call CCE API!"); // 1343225858 +GE_ERRORNO_COMMON(RT_FAILED, 3, "Failed to call runtime API!"); // 1343225859 +GE_ERRORNO_COMMON(INTERNAL_ERROR, 4, "Internal errors"); // 1343225860 +GE_ERRORNO_COMMON(CSEC_ERROR, 5, "Failed to call libc_sec API!"); // 1343225861 +GE_ERRORNO_COMMON(TEE_ERROR, 6, "Failed to call tee API!"); // 1343225862 +GE_ERRORNO_COMMON(END_OF_SEQUENCE, 7, "End of sequence!"); // 1343225863 + +// Error code for plugin manager +GE_ERRORNO_COMMON(GE_PLGMGR_PATH_INVALID, 30, "Path is invalid!"); // 1343225886 +GE_ERRORNO_COMMON(GE_PLGMGR_SO_NOT_EXIST, 31, "Failed to find any valid so file!"); // 1343225887 +GE_ERRORNO_COMMON(GE_PLGMGR_FUNC_NOT_EXIST, 32, "Failed to find any function!"); // 1343225888 +GE_ERRORNO_COMMON(GE_PLGMGR_INVOKE_FAILED, 33, "Failed to invoke any function!"); // 1343225889 + +GE_ERRORNO_COMMON(UNSUPPORTED, 100, "Parameter's unsupported!"); + +GE_ERRORNO_COMMON(OUT_OF_MEMORY, 101, "Out of memory!"); + +// Client module error code definition +GE_ERRORNO_CLIENT(GE_CLI_INIT_FAILED, 1, "GEInitialize Failed."); // 1343229953 +GE_ERRORNO_CLIENT(GE_CLI_FINAL_FAILED, 2, "GEFinalize Failed."); // 1343229954 +GE_ERRORNO_CLIENT(GE_CLI_SESS_CONSTRUCT_FAILED, 3, "Session constructor Failed."); // 1343229955 +GE_ERRORNO_CLIENT(GE_CLI_SESS_DESTROY_FAILED, 4, "Session destructor Failed."); // 1343229956 +GE_ERRORNO_CLIENT(GE_CLI_SESS_ADD_FAILED, 5, "Session AddGraph Failed."); // 1343229957 +GE_ERRORNO_CLIENT(GE_CLI_SESS_ADD_GRAPH_FAILED, 6, + "Session AddGraph Failed converting protobuf GraphProto."); // 1343229958 +GE_ERRORNO_CLIENT(GE_CLI_SESS_REMOVE_FAILED, 7, "Session RemoveGraph Failed."); // 1343229959 +GE_ERRORNO_CLIENT(GE_CLI_SESS_RUN_FAILED, 8, "Session RunGraph Failed."); // 1343229960 +GE_ERRORNO_CLIENT(GE_CLI_SESS_RUN_TENSOR_FAILED, 9, + "Session RunGraph Failed converting protobuf TensorProto."); // 1343229961 +GE_ERRORNO_CLIENT(GE_CLI_GE_ALREADY_INITIALIZED, 10, "GE is already initialized."); // 1343229962 +GE_ERRORNO_CLIENT(GE_CLI_GE_NOT_INITIALIZED, 11, "GE is not yet initialized or is finalized."); // 1343229963 + +// Init module error code definition +GE_ERRORNO_INIT(GE_MULTI_INIT, 0, "Multiple initializations are not supported."); // 1343234048 +GE_ERRORNO_INIT(GE_FINALIZE_NOT_INIT, 1, "Finalize is not allowed before initialization."); // 1343234049 +GE_ERRORNO_INIT(GE_MULTI_FINALIZE, 2, "Multiple finalizations are not supported."); // 1343234050 + +// Session module error code definition +GE_ERRORNO_SESSION(GE_SESS_INIT_FAILED, 0, "Failed to initialize session."); // 1343238144 +GE_ERRORNO_SESSION(GE_SESS_ALREADY_RUNNING, 1, "Session already running,not support parallel run."); // 1343238145 +GE_ERRORNO_SESSION(GE_SESS_GRAPH_NOT_EXIST, 2, "Graph ID not exist."); // 1343238146 +GE_ERRORNO_SESSION(GE_SESS_GRAPH_ALREADY_EXIST, 3, "Graph ID already exist."); // 1343238147 +GE_ERRORNO_SESSION(GE_SESS_GRAPH_IS_RUNNING, 4, "Graph is running."); // 1343238148 +GE_ERRORNO_SESSION(GE_SESSION_NOT_EXIST, 5, "Can not find session with specific session id."); // 1343238149 +GE_ERRORNO_SESSION(GE_SESSION_MANAGER_NOT_INIT, 6, "Session manager has not been initialized."); // 1343238150 + +// Graph module error code definition +GE_ERRORNO_GRAPH(GE_GRAPH_INIT_FAILED, 0, "Failed to initialize graph."); // 1343242240 +GE_ERRORNO_GRAPH(GE_GRAPH_ALREADY_RUNNING, 1, "graph already running,not support parallel run."); // 1343242241 +GE_ERRORNO_GRAPH(GE_GRAPH_GRAPH_NOT_EXIST, 2, "graph ID not exist."); // 1343242242 +GE_ERRORNO_GRAPH(GE_GRAPH_GRAPH_ALREADY_EXIST, 3, "Graph ID already exist."); // 1343242243 +GE_ERRORNO_GRAPH(GE_GRAPH_GRAPH_IS_RUNNING, 4, "Graph is running."); // 1343242244 +GE_ERRORNO_GRAPH(GE_GRAPH_MALLOC_FAILED, 5, "Graph malloc failed."); // 1343242245 +GE_ERRORNO_GRAPH(GE_GRAPH_FREE_FAILED, 6, "Graph FREE failed."); // 1343242246 +GE_ERRORNO_GRAPH(GE_GRAPH_NOT_MALLOC_BUFFER, 7, "Graph FREE failed, not malloc buffer."); // 1343242247 +GE_ERRORNO_GRAPH(GE_GRAPH_PARAM_NULLPTR, 8, "Graph param is NULL."); // 1343242248 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL, 9, "Get computeGraph by graphNode failed."); // 1343242249 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_RUN_GRAPH_NODE_NULL, 10, "Run graph node is null."); // 1343242250 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_RUN_GRAPH_INVALID, 11, "Get computeGraph by graphNode failed."); // 1343242251 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_INSERT_DYN_OP_FAILED, 12, "Graph which insert dynamic op failed."); // 1343242252 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_PREPROCESS_FAILED, 13, "Graph preprocess failed."); // 1343242253 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_GRAPH_FUSION_FAILED, 14, "Graph fusion failed."); // 1343242254 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_TINY_CAL_CHECK_FAILED, 15, "Check tiny calibration failed."); // 1343242255 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_CALIBRATION_FAILED, 16, "Calibration failed."); // 1343242256 +GE_ERRORNO_GRAPH(GE_GRAPH_SUBGRAPH_NUM_ZERO, 17, "Graph partition success, but subGraph num is 0."); // 1343242257 +GE_ERRORNO_GRAPH(GE_GRAPH_SUBGRAPH_ENGINENAME_REPEATED, 18, "Graph subGraph engine name is repeated."); // 1343242258 +GE_ERRORNO_GRAPH(GE_GRAPH_GET_IN_OUT_FAILED, 19, "OME GetInputOutputDescInfo failed."); // 1343242259 +GE_ERRORNO_GRAPH(GE_GRAPH_DATA_INPUT_FAILED, 20, "OME DataInput failed."); // 1343242260 +GE_ERRORNO_GRAPH(GE_GRAPH_EXECUTE_FAILED, 21, "Execute graph failed."); // 1343242261 +GE_ERRORNO_GRAPH(GE_GRAPH_DUPLICATE_ENGINE, 22, "Duplicate engine."); // 1343242262 +GE_ERRORNO_GRAPH(GE_GRAPH_EMPTY_SUBGRAPH, 23, "Empty sub graph info."); // 1343242263 +GE_ERRORNO_GRAPH(GE_GRAPH_EXECUTE_NOT_INIT, 24, "Call SetCondition first."); // 1343242264 +GE_ERRORNO_GRAPH(GE_GRAPH_PREPARE_FAILED, 25, "Prepare failed."); // 1343242265 +GE_ERRORNO_GRAPH(GE_GRAPH_SERIALIZE_FAILED, 26, "OMG SerializeModelDef failed."); // 1343242266 +GE_ERRORNO_GRAPH(GE_GRAPH_SAVE_FAILED, 27, "OMG SaveModel failed."); // 1343242267 +GE_ERRORNO_GRAPH(GE_GRAPH_PRERUN_FAILED, 28, "PreRun failed."); // 1343242268 +GE_ERRORNO_GRAPH(GE_GRAPH_SUBGRAPH_ID_INVALID, 29, "Graph subGraph id is invalid."); // 1343242269 +GE_ERRORNO_GRAPH(GE_GRAPH_INFERSHAPE_FAILED, 30, "Prepare Graph infershape failed"); // 1343242270 +GE_ERRORNO_GRAPH(GE_GRAPH_ISNULL, 31, "RunGraph input compute graph is NULL."); // 1343242271 +GE_ERRORNO_GRAPH(GE_GRAPH_SYNC_MODEL_FAILED, 32, "Graph SyncExecuteModel failed."); // 1343242272 +GE_ERRORNO_GRAPH(GE_GRAPH_RUNGRAPH_FAILED, 33, "Graph RunGraph failed."); // 1343242273 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_PARSE_DYN_OP_FAILED, 34, "Parse dynamic node config file failed"); // 1343242274 +GE_ERRORNO_GRAPH(GE_GRAPH_MULTI_SUBGRAPH_BUILD, 35, "Save model with multiple sub graph"); // 1343242275 +GE_ERRORNO_GRAPH(GE_GRAPH_GRAPH_NODE_NULL, 36, "Graph get graph node failed."); // 1343242276 +GE_ERRORNO_GRAPH(GE_GRAPH_NOT_INIT, 37, "Graph do not init."); // 1343242277 +GE_ERRORNO_GRAPH(GE_GRAPH_NULL_INPUT, 38, "input graph is null"); // 1343242278 +GE_ERRORNO_GRAPH(GE_GRAPH_TOPO_SORT_FAILED, 39, "topological sorting an partition failed"); // 1343242279 +GE_ERRORNO_GRAPH(GE_GRAPH_EMPTY_PARTITION, 40, "accessing an empty partition"); // 1343242280 +GE_ERRORNO_GRAPH(GE_GRAPH_UNSUPPORTED, 41, "unsupported feature in partition"); // 1343242281 +GE_ERRORNO_GRAPH(GE_GRAPH_ASSIGN_ENGINE_FAILED, 42, "assign engine failed"); // 1343242282 +GE_ERRORNO_GRAPH(GE_GRAPH_ADD_PLC_END_FAILED, 43, "add placeholder end node failed"); // 1343242283 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_PARSE_OUT_NODE_FAILED, 44, "Parse out node failed."); // 1343242284 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIMIZE_INSERT_OP_PARSE_FAILED, 45, + "OMG parse dynamic node config file failed."); // 1343242285 +GE_ERRORNO_GRAPH(GE_GRAPH_SAVE_WEIGHTS_FAILED, 46, "OMG Save Weights to Model failed."); // 1343242286 +GE_ERRORNO_GRAPH(GE_GRAPH_EMPTY_STRING_NAME, 47, "Empty string name."); // 1343242287 +GE_ERRORNO_GRAPH(GE_GRAPH_EMPTY_VARIABLE_TENSOR_TABLE, 48, "Empty variable-tensor table."); // 1343242288 +GE_ERRORNO_GRAPH(GE_GRAPH_VARIABLE_ALREADY_EXIST, 49, "Variable already exist."); // 1343242289 +GE_ERRORNO_GRAPH(GE_GRAPH_VARIABLE_DOES_NOT_EXIST, 50, "Variable does not exist."); // 1343242290 +GE_ERRORNO_GRAPH(GE_GRAPH_OPTIONS_INVALID, 51, "Client session options is invalid."); // 1343242291 +GE_ERRORNO_GRAPH(GE_GRAPH_NO_OUTPUT_DESC_INFO, 52, "No output desc info."); // 1343242292 +GE_ERRORNO_GRAPH(GE_GRAPH_OUTPUT_DESCINFO_TENSOR_NUM_MISMATCH, 53, + "Number of output descinfo and tensor mismatch."); // 1343242293 +GE_ERRORNO_GRAPH(GE_GRAPH_FILENAMEPREFIX_INVALID, 54, "Graph Save Model fileNamePrefix is invalid."); // 1343242294 +GE_ERRORNO_GRAPH(GE_GRAPH_NOT_BUILT, 55, "Graph is not built before SaveModel."); // 1343242295 +GE_ERRORNO_GRAPH(GE_GRAPH_SAVEMODEL_FAILED, 56, "Graph SaveModel failed."); // 1343242296 +GE_ERRORNO_GRAPH(GE_GRAPH_MEMORY_ALLOC_FAILED, 57, "Failed allocating memory for model file header."); // 1343242297 +GE_ERRORNO_GRAPH(GE_GRAPH_NODE_SEARCHER_REMOVE_GRAPH_FAILED, 58, "Failed remove graph in node seacher."); // 1343242298 +GE_ERRORNO_GRAPH(GE_GRAPH_NODE_SEARCHER_ADD_GRAPH_FAILED, 59, "Failed add graph in node seacher."); // 1343242299 +GE_ERRORNO_GRAPH(GE_GRAPH_NODE_SEARCHER_GET_GRAPH_REBUILD_FAILED, 60, + "Failed add graph in node seacher."); // 1343242300 +GE_ERRORNO_GRAPH(GE_GRAPH_NODE_SEARCHER_SET_GRAPH_FINISH_REBUILD_GRAPH_FAILED, 61, + "Failed set graph finish rebuild in node searcher."); // 1343242301 +GE_ERRORNO_GRAPH(GE_GRAPH_VARIABLE_OP_PASS_FAILED, 62, "Failed to run variable pass."); // 1343242302 + +// Optimize errocode +GE_ERRORNO_GRAPH(TO_BE_DELETED, 63, "The node of the graph to be deleted."); // 1343242303 +GE_ERRORNO_GRAPH(NOT_CHANGED, 64, "The node of the graph no changed."); // 1343242304 + +// Engine_manager module error code definition +GE_ERRORNO_ENGINE(GE_ENG_INIT_FAILED, 0, "Failed to initialize engine."); // 1343246336 +GE_ERRORNO_ENGINE(GE_ENG_FINALIZE_FAILED, 1, "Engine finalize failed."); // 1343246337 +GE_ERRORNO_ENGINE(GE_ENG_MEMTYPE_ERROR, 2, "Memory type HBM is necessary when engine is in device"); // 1343246338 + +// Ops module error code definition +GE_ERRORNO_OPS(GE_OPS_KERNEL_STORE_INIT_FAILED, 0, "Failed to initialize OpsKernelInfoStore."); // 1343250432 +GE_ERRORNO_OPS(GE_OPS_GRAPH_OPTIMIZER_INIT_FAILED, 1, "Failed to initialize GraphOptimizer."); // 1343250433 +GE_ERRORNO_OPS(GE_OPS_KERNEL_INFO_NOT_EXIST, 2, "OpsKernelInfo not exist."); // 1343250434 +GE_ERRORNO_OPS(GE_OPS_KERNEL_STORE_NOT_EXIST, 3, "OpsKernelInfoStore not exist."); // 1343250435 +GE_ERRORNO_OPS(GE_OPS_CALC_RUNNING_PARAM_FAILED, 4, "Failed to CalcOpRunningParam."); // 1343250436 +GE_ERRORNO_OPS(GE_OPS_GENERATE_TASK_FAILED, 5, "Failed to GenerateTask."); // 1343250437 +GE_ERRORNO_OPS(GE_OPS_OPTIMIZE_ORIGINAL_GRAPH_FAILED, 6, "Failed to OptimizeOriginalGraph."); // 1343250438 +GE_ERRORNO_OPS(GE_OPS_OPTIMIZE_FUSED_GRAPH_FAILED, 7, "Failed to OptimizeFusedGraph."); // 1343250439 +GE_ERRORNO_OPS(GE_OPS_ENGINE_IS_NOT_REGISTERED, 8, "Engine is not registered."); // 1343250440 +GE_ERRORNO_OPS(GE_OPS_GET_NO_VALID_SO, 9, + "There is no valid so about OpsKernelInfoStore or GraphOptimizer."); // 1343250441 +GE_ERRORNO_OPS(GE_OPS_GET_OPTIMIZE_BY_ENGINE_FAILED, 10, "Failed to get graphOptimizer by name."); // 1343250442 +GE_ERRORNO_OPS(GE_OPS_GET_OPTIMIZE_BY_PRIORITY_FAILED, 11, "Failed to get graphOptimizer by priority."); // 1343250443 +GE_ERRORNO_OPS(GE_OPS_LOAD_GE_OPTIMIZER_FAILED, 12, "Failed to load ge graphOptimizer."); // 1343250444 + +// Runtime module error code definition +GE_ERRORNO_RUNTIME(GE_RTI_DEVICE_ID_INVALID, 1, "device id is invalid"); +GE_ERRORNO_RUNTIME(GE_RTI_DEVICE_NOT_READY, 2, "set device failed, device not ready"); +GE_ERRORNO_RUNTIME(GE_RTI_MEMALLOC_FAILED, 3, "malloc memory failed"); +GE_ERRORNO_RUNTIME(GE_RTI_MODEL_NOT_LOADED, 4, "model has not been loaded"); +GE_ERRORNO_RUNTIME(GE_RTI_THREAD_POOL_IS_NULL, 5, "model excute failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_CCE_CREATE_HANDLE_FAILED, 6, "cce create handle failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_CCE_SET_STREAM_FAILED, 7, "cce set stream failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_CREATE_RTMODEL_FAILED, 8, "call runtime create rtModel failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_CREATE_STREAM_FAILED, 9, "call runtime create stream failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_BIND_STREAM_FAILED, 10, "call runtime bind stream to model failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_CREATE_LABLE_FAILED, 11, "call runtime create lable failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_MODEL_LOAD_COMPLETE_FAILED, 12, "call runtime model load complete failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_MODEL_GET_TASK_ID_FAILED, 14, "call runtime get task id failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_KERNEL_LAUNCH_FAILED, 13, "call runtime kernel launch failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_KERNEL_LAUNCHEX_FAILED, 15, "call runtime kernel launchex failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_KERNEL_FUSION_START_FAILED, 16, "call runtime kernel fusion start failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_KERNEL_FUSION_END_FAILED, 17, "call runtime kernel fusion end failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_LABEL_SET_FAILED, 18, "call runtime lable set failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_LABLE_GOTO_FAILED, 19, "call runtime lable goto failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_LABLE_SWITCH_FAILED, 20, "call runtime lable switch failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_MEM_ALLOC_MANAGED_FAILED, 21, "call runtime mem alloc managed failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_MEM_FREE_MANAGED_FAILED, 22, "call runtime mem free managed failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_FREE_FAILED, 23, "call runtime free failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_STREAM_SYNC_FAILED, 24, "call runtime sync stream failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_MODEL_EXCUTE_FAILED, 25, "call runtime model excute failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_MEM_ASYNC_FAILED, 26, "call runtime mem async failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_MEM_ALLOC_HOST_FAILED, 27, "call runtime alloc host memory failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_MEM_FREE_HOST_FAILED, 28, "call runtime free host memory failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_MEM_ALLOC_DEVICE_FAILED, 29, "call runtime alloc device memory failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_MEM_FREE_DEVICE_FAILED, 30, "call runtime free device memory failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_FLUSH_CACHE_FAILED, 31, "call runtime flush cache failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_UNBIND_STREAM_FAILED, 32, "unbind rtstream from rtmodel failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_DESTORY_STREAM_FAILED, 33, "destory stream failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_DESTORY_LABEL_FAILED, 34, "destory label failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_DESTORY_MODEL_FAILED, 35, "destory model failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_CCE_TRANS_TENSOR_FAILED, 36, "call cce transfer tensor descriptor failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_CCE_TRANS_FILTER_FAILED, 37, "call cce transfer filter descriptor failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_CCE_UPDATE_KERNEL_ARGS_FAILED, 38, "call cce update kernel args failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_CCE_DESTORY_HANDLE_FAILED, 39, "destory handle failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_CREATE_EVENT_FAILED, 40, "call rutime create event failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_EVENT_RECORD_FAILED, 41, "call rutime event record failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_STREAM_WAIT_EVENT_FAILED, 42, "call rutime stream wait event failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_HCCL_BROADCAST_FAILED, 43, "call hccl hcom broadcast failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_HCCL_ALL_GATHER_FAILED, 44, "call hccl hcom all gather failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_HCCL_ALL_REDUCE_FAILED, 45, "call hccl hcom all reduce failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_RUNTIME_DESTORY_EVENT_FAILED, 46, "destory rt event failed"); +GE_ERRORNO_RUNTIME(GE_RTI_CALL_HCCL_REDUCE_SCATTER_FAILED, 47, "call hccl hcom reduce scatter failed"); + +// Executor module error code definition +GE_ERRORNO_EXECUTOR(GE_EXEC_NOT_INIT, 1, "GE Executor is not yet initialized."); + +// Generator module error code definition +GE_ERRORNO_GENERATOR(GE_GENERATOR_GRAPH_MANAGER_INIT_FAILED, 1, "Graph manager initialize failed."); +GE_ERRORNO_GENERATOR(GE_GENERATOR_GRAPH_MANAGER_ADD_GRAPH_FAILED, 2, "Graph manager add graph failed."); +GE_ERRORNO_GENERATOR(GE_GENERATOR_GRAPH_MANAGER_BUILD_GRAPH_FAILED, 3, "Graph manager build graph failed."); +GE_ERRORNO_GENERATOR(GE_GENERATOR_GRAPH_MANAGER_FINALIZE_FAILED, 4, "Graph manager finalize failed."); +GE_ERRORNO_GENERATOR(GE_GENERATOR_GRAPH_MANAGER_SAVE_MODEL_FAILED, 5, "Graph manager save model failed."); +} // namespace ge + +#endif // INC_FRAMEWORK_COMMON_GE_INNER_ERROR_CODES_H_ diff --git a/inc/framework/common/ge_types.h b/inc/framework/common/ge_types.h new file mode 100755 index 00000000..28c7a811 --- /dev/null +++ b/inc/framework/common/ge_types.h @@ -0,0 +1,153 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_GE_TYPES_H_ +#define INC_FRAMEWORK_COMMON_GE_TYPES_H_ + +#include + +#include +#include + +#include "common/fmk_error_codes.h" +#include "ge/ge_api_error_codes.h" + +using std::string; + +namespace ge { +enum RuntimeType { HOST = 0, DEVICE = 1 }; + +enum PerfLevel { GEN_TASK_WITH_FUSION = -1, GEN_TASK_WITHOUT_L2FUSION = 3, GEN_TASK_WITHOUT_FUSION = 4 }; + +enum FrameworkType { + FMK_TYPE_C = 0, + FMK_TYPE_MINDSPORE = 1, + FMK_TYPE_T = 3, + FMK_TYPE_A_NN, + FMK_TYPE_RESERVED, +}; + +const char *const GE_ENGINE_ATTR_MEM_TYPE_HBM = "HBM"; + +// Data cache, including data address and length +struct DataBuffer { + public: + void *data; // Data address + uint32_t length; // Data length + bool isDataSupportMemShare = false; + DataBuffer(void *dataIn, uint32_t len, bool isSupportMemShare) + : data(dataIn), length(len), isDataSupportMemShare(isSupportMemShare) {} + + DataBuffer() : data(nullptr), length(0), isDataSupportMemShare(false) {} +}; + +/// +/// @ingroup domi_ome +/// @brief External inputdata +/// +struct InputData { + uint32_t index; // Index of input data + uint32_t timestamp; // Data creation time + uint32_t timeout; // Processing timeout + uint32_t model_id; // Model ID required for data processing + uint64_t request_id = 0; // Request ID + std::vector blobs; // Actual input data, currently only supports one input +}; + +// The definition of output result structure +struct OutputData { + uint32_t index; // Index of input data + uint32_t model_id; // The model ID corresponding to the processing result + + /// Output data cache, arranged in sequence of output operators. + /// If the operator has multiple outputs, + /// the data buffer order of the operator is the same as that defined in the + /// offline model + std::vector blobs; +}; + +// The definition of command data structure +struct Command { + std::string cmd_type; // Command type + std::vector cmd_params; // Command params +}; + +// The definition of I/O shape description +struct ShapeDescription { + int64_t num = 0; + int64_t channel = 0; + int64_t height = 0; + int64_t width = 0; + std::vector dims; +}; + +// Definition of input and output description information +struct InputOutputDescInfo { + std::string name; + uint32_t size; + uint32_t data_type; + ShapeDescription shape_info; +}; + +// The structure of offline Modeldata +struct ModelData { + void *model_data = nullptr; // Model binary data start addr + uint32_t model_len = 0; // Model binary data length + int32_t priority = 0; // Model priority + std::string key; // Key path for encrypt model, Empty for unencrypt +}; + +// The definition of Model information +struct ModelInfo { + uint32_t version = 0; + std::string name; + bool is_encrypt = 0; // 0:unencrypt, 1:encrypt + std::vector input_desc; + std::vector output_desc; + uint8_t reserved[3] = {0}; // 3-byte reserved field +}; + +// Asynchronous callback interface, implemented by the caller +class ModelListener { + public: + virtual ~ModelListener() {} + /// + /// @brief Asynchronous callback interface + /// @param [in] model_id Model ID of the callback + /// @param [in] data_index Index of the input_data + /// @param [in] resultCode Execution results + /// + virtual Status OnComputeDone(uint32_t model_id, uint32_t data_index, uint32_t result_code) = 0; +}; + +// OMM configuration item +struct Options { + int64_t session_id; + int32_t device_id; + int64_t job_id; + bool isUseHcom; + bool deployMode; + bool isAICPUMode; + bool enable_atomic; + string podName; + int64_t rankId; + string rankTableFile; + int32_t ge_hccl_flag = 0; + int32_t physical_device_id; +}; +} // namespace ge + +#endif // INC_FRAMEWORK_COMMON_GE_TYPES_H_ diff --git a/inc/framework/common/gflags_util.h b/inc/framework/common/gflags_util.h new file mode 100644 index 00000000..33a807bc --- /dev/null +++ b/inc/framework/common/gflags_util.h @@ -0,0 +1,76 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_GFLAGS_UTIL_H_ +#define INC_FRAMEWORK_COMMON_GFLAGS_UTIL_H_ + +#include +#include + +namespace ge { +class GflagsUtils { + public: + /// + /// @brief Determines whether the parameter is true + /// @param name name parameter name + /// @return true or false + /// + static bool IsSetCommandTrue(const char *name) { + std::string out; + return gflags::GetCommandLineOption(name, &out) && out == "true"; + } + + /// + /// @brief Determines whether the parameter is empty + /// @param name name parameter name + /// @return true if empty otherwise false + /// + static bool IsSetCommandNotEmpty(const char *name) { + std::string out; + return gflags::GetCommandLineOption(name, &out) && !out.empty(); + } + + /// + /// @brief Determines whether the parameter is not default + /// @param flag_name name parameter name + /// @return true if not default otherwise false + /// + static bool IsCommandLineNotDefault(const char *flag_name) { + google::CommandLineFlagInfo info; + return GetCommandLineFlagInfo(flag_name, &info) && !info.is_default; + } + + /// + /// @brief Modify gflags to print help information + /// @param flags_h Pass in the self-defined help parameter, it is recommended to be FLAGS_h + /// @return void + /// + static void ChangeHelpFlags(bool flags_h) { + if (flags_h || IsSetCommandTrue("help") || IsSetCommandTrue("helpfull") || IsSetCommandNotEmpty("helpon") || + IsSetCommandNotEmpty("helpmatch") || IsSetCommandTrue("helppackage") || IsSetCommandTrue("helpxml")) { + gflags::SetCommandLineOption("help", "false"); + gflags::SetCommandLineOption("helpfull", "false"); + gflags::SetCommandLineOption("helpon", ""); + gflags::SetCommandLineOption("helpmatch", ""); + gflags::SetCommandLineOption("helppackage", "false"); + gflags::SetCommandLineOption("helpxml", "false"); + gflags::SetCommandLineOption("helpshort", "true"); + } + } +}; +} // namespace ge + +#endif // INC_FRAMEWORK_COMMON_GFLAGS_UTIL_H_ diff --git a/inc/framework/common/helper/model_helper.h b/inc/framework/common/helper/model_helper.h new file mode 100755 index 00000000..2d17779f --- /dev/null +++ b/inc/framework/common/helper/model_helper.h @@ -0,0 +1,67 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_HELPER_MODEL_HELPER_H_ +#define INC_FRAMEWORK_COMMON_HELPER_MODEL_HELPER_H_ + +#include +#include + +#include "common/fmk_types.h" +#include "common/helper/om_file_helper.h" +#include "common/types.h" +#include "graph/model.h" +#include "model/ge_model.h" + +namespace ge { +class ModelHelper { + public: + ModelHelper() = default; + ~ModelHelper(); + + Status SaveToOmModel(const GeModelPtr &ge_model, const SaveParam &save_param, const std::string &output_file); + Status SaveOriginalGraphToOmModel(const ge::Graph &graph, const std::string &output_file); + Status LoadModel(const ge::ModelData &model_data); + + ModelFileHeader *GetFileHeader() { return file_header_; } + + GeModelPtr GetGeModel(); + + static Status TransModelToGeModel(const ModelPtr &model, GeModelPtr &ge_model); + static Status TransGeModelToModel(const GeModelPtr &geModelPtr, ModelPtr &modelPtr); + + private: + bool is_assign_model_ = false; + ModelFileHeader *file_header_ = nullptr; + // Encrypted model need delete temp model and unencrypted model need not delete model + uint8_t *model_addr_tmp_ = nullptr; + uint32_t model_len_tmp_ = 0; + GeModelPtr model_; + + ModelHelper(const ModelHelper &); + ModelHelper &operator=(const ModelHelper &); + Status GenerateGeModel(OmFileLoadHelper &om_load_helper); + Status LoadModelData(OmFileLoadHelper &om_load_helper); + void SetModelToGeModel(ge::Model &model); + Status LoadWeights(OmFileLoadHelper &om_load_helper); + Status LoadTask(OmFileLoadHelper &om_load_helper); + Status LoadTBEKernelStore(OmFileLoadHelper &om_load_helper); + Status ReleaseLocalModelData() noexcept; + Status SaveModelPartition(std::shared_ptr &om_file_save_helper, ModelPartitionType type, + const uint8_t *data, size_t size); +}; +} // namespace ge +#endif // INC_FRAMEWORK_COMMON_HELPER_MODEL_HELPER_H_ diff --git a/inc/framework/common/helper/om_file_helper.h b/inc/framework/common/helper/om_file_helper.h new file mode 100755 index 00000000..2fa06967 --- /dev/null +++ b/inc/framework/common/helper/om_file_helper.h @@ -0,0 +1,91 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_HELPER_OM_FILE_HELPER_H_ +#define INC_FRAMEWORK_COMMON_HELPER_OM_FILE_HELPER_H_ + +#include +#include + +#include "framework/common/fmk_types.h" +#include "framework/common/ge_types.h" +#include "framework/common/types.h" + +using std::string; +using std::vector; + +namespace ge { +struct ModelPartition { + ModelPartitionType type; + uint8_t *data = 0; + uint32_t size = 0; +}; + +struct OmFileContext { + vector partition_datas_; + vector partition_table_; + uint32_t model_data_len_; +}; + +struct SaveParam { + int32_t encode_mode; + std::string ek_file; + std::string cert_file; + std::string hw_key_file; + std::string pri_key_file; + std::string model_name; +}; + +class OmFileLoadHelper { + public: + Status Init(const ge::ModelData &model); + + Status Init(uint8_t *model_data, const uint32_t model_data_size); + + Status GetModelPartition(ModelPartitionType type, ModelPartition &partition); + + private: + Status CheckModelValid(const ge::ModelData &model) const; + + Status LoadModelPartitionTable(uint8_t *model_data, const uint32_t model_data_size); + + bool is_inited_{false}; + + public: + OmFileContext context_; +}; + +class OmFileSaveHelper { + public: + ModelFileHeader &GetModelFileHeader() { return model_header_; } + + uint32_t GetModelDataSize() const { return context_.model_data_len_; } + + ModelPartitionTable *GetPartitionTable(); + + Status AddPartition(ModelPartition &partition); + + const vector &GetModelPartitions() const; + + Status SaveModel(const SaveParam &save_param, const char *target_file); + + Status SaveModelToFile(const char *output_file); + + ModelFileHeader model_header_; + OmFileContext context_; +}; +} // namespace ge +#endif // INC_FRAMEWORK_COMMON_HELPER_OM_FILE_HELPER_H_ diff --git a/inc/framework/common/l2_cache_optimize.h b/inc/framework/common/l2_cache_optimize.h new file mode 100644 index 00000000..2fe70179 --- /dev/null +++ b/inc/framework/common/l2_cache_optimize.h @@ -0,0 +1,120 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_L2_CACHE_OPTIMIZE_H_ +#define INC_FRAMEWORK_COMMON_L2_CACHE_OPTIMIZE_H_ + +#include + +#include +#include +#include +#include + +#include "common/types.h" +#include "common/util.h" +#include "graph/compute_graph.h" + +namespace ge { +// Size of RC memory alignment, 2M +const size_t ALIGN_SIZE = 2097152; +const uint32_t RC_VALUE_DEFAULT = 1; +const uint32_t RC_VALUE_MAC = 32; + +// RC data type classification +enum RCType { + RC_DEFAULT, // Such as temporary workspace memory of operator, variable (including global and local variable) + RC_HCOM, // Output of gradient aggregation, RC value should be set to 0 + RC_L2LOSS, // Parameter of L2 loss operator, RC value should be set to 0 + RC_INPUTOUTPUT, // Input and output tensor of operator, RC value is returned by FE calculation + RC_WEIGHTS, // The weight, fp16, RC value used by FP/BP operator should be set to 1 or the actual access numbers + RC_DW, // The gradient data DW and RC value output by BP operator + // should be set to 1 or the actual access numbers + RC_ARGS // Args of FlowTable, actual access numbers +}; + +enum MemType { INPUT_TENSOR, OUTPUT_TENSOR, WEIGHT, WORKSPACE }; + +// Memory usage information < node, type, number > +struct NodeInfo { + string nodeName; + MemType memType; + size_t index; +}; + +// Memory block RC value +struct RCMemoryBlock { + RCType type; // RC type + size_t blockSize; // memory block size + size_t headOffset; // Start offset from base address + size_t tailOffset; // End offset from base address + uint32_t rcCount; // RC value + NodeInfo nodeInfo; // Input and output indexes of node objects to which RC belongs +}; + +// L2Cache optimizer +class L2CacheOptimize { + public: + explicit L2CacheOptimize(ge::ComputeGraphPtr &graph); + ~L2CacheOptimize(); + + // Collect the information L2Cache Memory optimization + Status Gath(); + + private: + ge::ComputeGraphPtr graph_; + + // Save RC block information list + vector weightRCs; + vector opRCs; + + // Extract RC information generated by FE from compiled graph + void RetirveRCinfo(); + + // Take the maximum common divisor of RC values for the duplicate address + void Merge(vector &blocks); + + // The RC information is aligned with the 2m address + void Align(vector &blocks); + + // Weight of l2loss operator, output of gradient aggregation output, RC value set to 0 + void HandleOutputZeroRC(RCType type, ge::NodePtr node, vector &outputList, vector &blocks); + + // Processing operator input Tensor's RC + void HandOPInput(ge::NodePtr node, vector &inputList, vector &blocks); + + // Processing operator output Tensor's RC + void HandOPoutput(ge::NodePtr node, vector &outputList, vector &blocks); + + // maximum common divisor + uint32_t Measure(uint32_t x, uint32_t y) const { + if (x == 0 || y == 0) return RC_VALUE_DEFAULT; + uint32_t z = y; + while (x % y != 0) { + z = x % y; + x = y; + y = z; + } + return z; + } + + bool Contain(const RCMemoryBlock &l_block, const RCMemoryBlock &r_block); + bool Cross(const RCMemoryBlock &l_block, const RCMemoryBlock &r_block); + bool Connect(const RCMemoryBlock &l_block, const RCMemoryBlock &r_block); +}; +} // namespace ge + +#endif // INC_FRAMEWORK_COMMON_L2_CACHE_OPTIMIZE_H_ diff --git a/inc/framework/common/op/attr_value_util.h b/inc/framework/common/op/attr_value_util.h new file mode 100644 index 00000000..204dd648 --- /dev/null +++ b/inc/framework/common/op/attr_value_util.h @@ -0,0 +1,161 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_OP_ATTR_VALUE_UTIL_H_ +#define INC_FRAMEWORK_COMMON_OP_ATTR_VALUE_UTIL_H_ + +#include +#include +#include + +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "proto/om.pb.h" + +using domi::AttrDef; +using domi::OpDef; +using domi::AttrDef_ListValue; +using domi::ModelDef; +using domi::NamedAttrs; + +namespace ge { +using AttrDefMap = ::google::protobuf::Map<::std::string, ::domi::AttrDef>; +using AttrDefPair = ::google::protobuf::MapPair; + +void AddOpAttr(const std::string &key, AttrDef &attr, OpDef *opdef); +// DEFINE_ADD_ATTR_VALUE +void AddOpAttr(const std::string &key, const std::string &value, AttrDefMap *attrs); +void AddOpAttr(const std::string &key, const char *value, AttrDefMap *attrs); +void AddOpAttr(const char *key, const char *value, AttrDefMap *attrs); +void AddOpAttr(const std::string &key, const uint32_t value, AttrDefMap *attrs); +void AddOpAttr(const std::string &key, const int32_t value, AttrDefMap *attrs); +void AddOpAttr(const std::string &key, const int64_t value, AttrDefMap *attrs); +void AddOpAttr(const std::string &key, const float value, AttrDefMap *attrs); +void AddOpAttr(const std::string &key, const double value, AttrDefMap *attrs); +void AddOpAttr(const std::string &key, const bool value, AttrDefMap *attrs); + +void AddOpAttr(const std::string &key, const AttrDef_ListValue &value, AttrDefMap *attrs); + +// DEFINE_ADD_ATTR_VALUE +void AddOpAttr(const std::string &key, const std::string &value, OpDef *opdef); +void AddOpAttr(const std::string &key, const char *value, OpDef *opdef); +void AddOpAttr(const char *key, const char *value, OpDef *opdef); +void AddOpAttr(const std::string &key, const uint32_t value, OpDef *opdef); +void AddOpAttr(const std::string &key, const int32_t value, OpDef *opdef); +void AddOpAttr(const std::string &key, const int64_t value, OpDef *opdef); +void AddOpAttr(const std::string &key, const float value, OpDef *opdef); +void AddOpAttr(const std::string &key, const double value, OpDef *opdef); +void AddOpAttr(const std::string &key, const bool value, OpDef *opdef); + +void AddOpAttr(const std::string &key, const AttrDef_ListValue &value, OpDef *opdef); + +void AddOpBytesAttr(const std::string &key, const void *value, size_t size, OpDef *opdef); + +// DEFINE_ADD_ATTR_VALUE_LIST +void AddOpAttrList(const std::string &key, const double value, AttrDefMap *attrs); +void AddOpAttrList(const std::string &key, const float value, AttrDefMap *attrs); +void AddOpAttrList(const std::string &key, const uint32_t value, AttrDefMap *attrs); +void AddOpAttrList(const std::string &key, const int32_t value, AttrDefMap *attrs); +void AddOpAttrList(const std::string &key, const std::string value, AttrDefMap *attrs); +void AddOpAttrList(const std::string &key, const double value, OpDef *opdef); +void AddOpAttrList(const std::string &key, const float value, OpDef *opdef); +void AddOpAttrList(const std::string &key, const uint32_t value, OpDef *opdef); +void AddOpAttrList(const std::string &key, const int32_t value, OpDef *opdef); +void AddOpAttrList(const std::string &key, const bool value, OpDef *opdef); +void AddOpAttrList(const std::string &key, const int64_t value, OpDef *opdef); + +void AddOpAttrList(const std::string &key, const std::string &value, OpDef *opdef); + +bool GetOpAttr(const std::string &key, std::string *value, const OpDef *opdef); +bool GetOpAttr(const std::string &key, int32_t *value, const OpDef *opdef); +bool GetOpAttr(const std::string &key, int64_t *value, const OpDef *opdef); +bool GetOpAttr(const std::string &key, uint32_t *value, const OpDef *opdef); +bool GetOpAttr(const std::string &key, float *value, const OpDef *opdef); +bool GetOpAttr(const std::string &key, double *value, const OpDef *opdef); +bool GetOpAttr(const std::string &key, bool *value, const OpDef *opdef); +bool GetOpAttr(const std::string &key, AttrDef_ListValue *value, const OpDef *opdef); + +uint32_t GetOpAttrListSize(const std::string &key, std::string value, const OpDef *opdef); +uint32_t GetOpAttrListSize(const std::string &key, int32_t value, const OpDef *opdef); +uint32_t GetOpAttrListSize(const std::string &key, int64_t value, const OpDef *opdef); +uint32_t GetOpAttrListSize(const std::string &key, uint32_t value, const OpDef *opdef); +uint32_t GetOpAttrListSize(const std::string &key, float value, const OpDef *opdef); +uint32_t GetOpAttrListSize(const std::string &key, double value, const OpDef *opdef); +uint32_t GetOpAttrListSize(const std::string &key, bool value, const OpDef *opdef); + +bool GetBytesAttr(const std::string &key, std::string *value, const OpDef *opdef); +bool GetBytesAttr(const std::string &key, std::string *value, const ModelDef *model_def); + +void AddModelAttr(const std::string &key, const std::string &value, ModelDef *model_def); +void AddModelAttr(const std::string &key, const char *value, ModelDef *model_def); +void AddModelAttr(const char *key, const char *value, ModelDef *model_def); +void AddModelAttr(const std::string &key, const uint32_t value, ModelDef *model_def); +void AddModelAttr(const std::string &key, const int32_t value, ModelDef *model_def); +void AddModelAttr(const std::string &key, const int64_t value, ModelDef *model_def); +void AddModelAttr(const std::string &key, const float value, ModelDef *model_def); +void AddModelAttr(const std::string &key, const double value, ModelDef *model_def); +void AddModelAttr(const std::string &key, const bool value, ModelDef *model_def); +void AddModelAttr(const std::string &key, const void *value, size_t size, ModelDef *model_def); +void AddModelAttr(const std::string &key, const AttrDef_ListValue &value, ModelDef *model_def); + +void AddModelAttrList(const std::string &key, const double value, ModelDef *model_def); +void AddModelAttrList(const std::string &key, const float value, ModelDef *model_def); +void AddModelAttrList(const std::string &key, const uint32_t value, ModelDef *model_def); +void AddModelAttrList(const std::string &key, const int32_t value, ModelDef *model_def); +void AddModelAttrList(const std::string &key, const std::string &value, ModelDef *model_def); + +bool GetModelAttr(const std::string &key, std::string *value, const ModelDef *model_def); +bool GetModelAttr(const std::string &key, int32_t *value, const ModelDef *model_def); +bool GetModelAttr(const std::string &key, int64_t *value, const ModelDef *model_def); +bool GetModelAttr(const std::string &key, uint32_t *value, const ModelDef *model_def); +bool GetModelAttr(const std::string &key, float *value, const ModelDef *model_def); +bool GetModelAttr(const std::string &key, double *value, const ModelDef *model_def); +bool GetModelAttr(const std::string &key, bool *value, const ModelDef *model_def); +bool GetModelAttr(const std::string &key, AttrDef_ListValue *value, const ModelDef *model_def); + +bool HasOpAttr(const OpDef *opdef, const std::string &attr_name); + +void SetAttrDef(const std::string &value, AttrDef *out); +void SetAttrDef(const char *value, AttrDef *out); +void SetAttrDef(const uint32_t value, AttrDef *out); +void SetAttrDef(const int32_t value, AttrDef *out); +void SetAttrDef(const float value, AttrDef *out); +void SetAttrDef(const double value, AttrDef *out); +void SetAttrDef(const bool value, AttrDef *out); +void SetAttrList(const std::string &value, AttrDef *out); +void SetAttrList(const bool value, AttrDef *out); +void SetAttrList(const float value, AttrDef *out); +void SetAttrList(const double value, AttrDef *out); +void SetAttrList(const uint32_t value, AttrDef *out); + +bool GetAttrDefValue(const std::string &key, std::string *value, const AttrDefMap &attr); +bool GetAttrDefValue(const std::string &key, int32_t *value, const AttrDefMap &attr); +bool GetAttrDefValue(const std::string &key, int64_t *value, const AttrDefMap &attr); +bool GetAttrDefValue(const std::string &key, uint32_t *value, const AttrDefMap &attr); +bool GetAttrDefValue(const std::string &key, float *value, const AttrDefMap &attr); +bool GetAttrDefValue(const std::string &key, double *value, const AttrDefMap &attr); +bool GetAttrDefValue(const std::string &key, bool *value, const AttrDefMap &attr); +bool GetAttrDefValue(const std::string &key, AttrDef_ListValue *value, const AttrDefMap &attr); +bool GetAttrDefValue(const std::string &key, NamedAttrs *&value, AttrDefMap *attr); +bool GetAttrDefValue(const std::string &key, const NamedAttrs *&value, const AttrDefMap &attr); + +bool GetAttrDefListValue(const std::string &key, int idx, int32_t *value, const AttrDefMap &attr); +bool GetAttrDefListValue(const std::string &key, int idx, uint32_t *value, const AttrDefMap &attr); +bool GetAttrDefListValue(const std::string &key, int idx, float *value, const AttrDefMap &attr); +bool GetAttrDefListValue(const std::string &key, int idx, double *value, const AttrDefMap &attr); +} // namespace ge + +#endif // INC_FRAMEWORK_COMMON_OP_ATTR_VALUE_UTIL_H_ diff --git a/inc/framework/common/op/ge_op_utils.h b/inc/framework/common/op/ge_op_utils.h new file mode 100644 index 00000000..9c5e1667 --- /dev/null +++ b/inc/framework/common/op/ge_op_utils.h @@ -0,0 +1,280 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_OP_GE_OP_UTILS_H_ +#define INC_FRAMEWORK_COMMON_OP_GE_OP_UTILS_H_ + +#include + +#include +#include + +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/attr_value.h" +#include "graph/ge_tensor.h" +#include "graph/node.h" +#include "graph/op_desc.h" +#include "proto/insert_op.pb.h" + +namespace ge { +using namespace cce; +using domi::Status; + +// Add Sub Mul +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t ADD_INPUT_NUM; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t SUB_INPUT_NUM; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t MUL_INPUT_NUM; + +// Permute +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const int32_t PERMUTE_ORDER_NUM; + +// Ssd PriroBox +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const double SSD_PRIORBOX_ASPECT_RATIO_VALUE; + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t STRIDEDSLICE_INPUT_NUM; + +// Switch +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t SWITCH_INPUT_NUM; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t SWITCH_OUTPUT_NUM; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t SWITCH_FALSE_OUTPUT; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t SWITCH_TRUE_OUTPUT; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t SWITCH_DATA_INPUT; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t SWITCH_PRED_INPUT; + +class OpUtils { + public: + /// + /// @ingroup domi_ome + /// @brief Check whether check_value is in [min_enum_value, max_enum_value] + /// @return true Within + /// @return false out of range + // + static inline bool CheckEnumValid(int32_t check_value, int32_t min_enum_value, int32_t max_enum_value) { + return check_value < min_enum_value ? false : (check_value >= max_enum_value ? false : true); + } + /// + /// @ingroup domi_omg + /// @brief Convert the dimension of array according to different format + /// @param [in] src_format src_shape format + /// @param [in] src Dimension array to be converted + /// @param [in] dst_format Target format after conversion + /// @param [out] dst Dimension array after conversion + /// @return SUCCESS success + /// @return FAILED fail + /// + static bool ConvertDim(ccTensorFormat_t src_format, const std::vector &src, ccTensorFormat_t dst_format, + std::vector &dst); + /// + /// @ingroup domi_omg + /// @brief Determine whether to manually calculate the tensor size based on the values of format and dim + /// @param [in] format, Format information of the tensor + /// @param [in] real_dim_cnt, Tensor dim + /// @return true Manually calculate the size based on dim and datatype + /// @return false skip + /// + static bool IsComputDimsSize(const int32_t format, const uint32_t real_dim_cnt); + /// + /// @ingroup domi_ome + /// @brief Initialize the tensor description, which is used for input and output. + /// @param [in] model_tensor Tensor information defined by the offline model + /// @param [out] cc_tensor Tensor definition used by CC + /// @return SUCCESS success + /// @return FAILED fail + /// + static Status InitTensorDescriptor(const ge::GeTensorDesc &model_tensor, ccTensorDescriptor_t &cc_tensor); + /// + /// @ingroup domi_ome + /// @brief Initialize the tensor description, which is used for input and output. + /// @param [in] model_tensor Tensor information defined by the offline model + /// @param [in] dst_data_type data_type of the target cc_tensor + /// @param [out] cc_tensor Tensor definition used by CC + /// @return SUCCESS success + /// @return FAILED fail + /// + static Status InitTensorDescriptor(const ge::GeTensorDesc &model_tensor, int32_t dst_data_type, + ccTensorDescriptor_t &cc_tensor); + /// + /// @ingroup domi_ome + /// @brief Initialize the tensor description for bias. + /// @param [in] model_tensor Tensor information defined by the offline model + /// @param [out] cc_tensor Tensor definition used by CC + /// @return SUCCESS success + /// @return FAILED fail + /// + /// + static Status InitTensorDescriptor(const ge::GeTensor &model_tensor, ccTensorDescriptor_t &cc_tensor); + /// + /// @ingroup domi_ome + /// @brief Initialize the tensor description for bias. + /// @param [in] model_tensor Tensor information defined by the offline model + /// @param [in] dst_data_type data_type of the target cc_tensor + /// @param [out] cc_tensor Tensor definition used by CC + /// @return SUCCESS success + /// @return FAILED fail + /// + static Status InitTensorDescriptor(const ge::GeTensor &model_tensor, int32_t dst_data_type, + ccTensorDescriptor_t &cc_tensor); + + static Status InitTensorDescriptor(int32_t format, int32_t data_type, const std::vector &dim, + ccTensorDescriptor_t &cc_tensor, uint32_t real_dim_cnt = 4); + /// + /// @ingroup domi_ome + /// @brief Destroys a tensor + /// @param [inout] cc_tensor Tensor definition used by CC + /// + static void DestroyTensorDescriptor(ccTensorDescriptor_t &cc_tensor) noexcept; + + /// + /// @ingroup domi_ome + /// @brief Destroys a tensor + /// @param [inout] cc_filter cc_filter Definition of the filter used by CC + /// + static void DestroyFilterDescriptor(ccFilterDescriptor_t &cc_filter); + + /// + /// @ingroup domi_ome + /// @brief Initializing Filter Description + /// @param [in] model_filter Filter information defined in the offline model + /// @param [out] cc_filter Definition of the filter used by CC + /// @return SUCCESS success + /// @return FAILED fail + /// + static Status InitFilterDescriptor(const ge::GeTensor &model_filter, ccFilterDescriptor_t &cc_filter); + + /// + /// @brief Extract AIPP parameters from AttrDefMap and splice them + /// @param [in] aipp_attr attr of operator + /// @param [out] aipp_params aipp parameters + /// @return enum of tagCCAippInputFormat + /// + static Status ConvertAippParams(const GeAttrValue::NamedAttrs &aipp_attr, domi::AippOpParams *aipp_params); + static Status TransferDim(const std::vector &dim, std::vector &dim_vector); + static void SliceData(std::vector &input, int64_t chunk_size, std::vector &output, int64_t begin, + int64_t out_dim, int64_t stride); + static Status SetOutputSliceData(void *data, int64_t data_size, int32_t data_type, std::vector &input_dims, + std::vector &begin, std::vector &output_dims, ge::GeTensor *output, + std::vector &stride); + + /// + /// @ingroup domi_omg + /// @brief Convert the convolution‘s weight data from [h, w, c, k] to [k, c, h, w] + /// @param [in] input Weight data in HWCK format + /// @param [in] H value of H dimension + /// @param [in] W value of W dimension + /// @param [in] C value of C dimension + /// @param [in] K value of K dimension + /// @param [out] output Data pointer after conversion. The format is KCHW. + /// + static void TransDataHWCK2KCHW(const void *input, int64_t H, int64_t W, int64_t C, int64_t K, void **output); + /// + /// @ingroup domi_omg + /// @brief Converts the convolution‘s weight data from [k, c, h, w] to [h, w, c, k]. + /// @param [in] input Weight data in HWCK format + /// @param [in] K value of K dimension + /// @param [in] C value of C dimension + /// @param [in] H value of H dimension + /// @param [in] W value of W dimension + /// @param [out] output Data pointer after conversion. The format is HWCK + /// + static void TransDataKCHW2HWCK(const void *input, int64_t K, int64_t C, int64_t H, int64_t W, void *output); + /// + /// @ingroup domi_omg + /// @brief Initialize the input and output description of the data node which is applied to filter weight in the + /// training network + /// @param [in] model_tensor input and output tensor information + /// @param [out] cc_tensor Tensor in CCE format after conversion + //// + static Status InitFilterTensorDescriptor(const ge::GeTensorDesc &model_tensor, ccFilterDescriptor_t &cc_tensor); + + static void SetTensorDescriptorAllOffsetQuantizeInfo(const GeTensorDesc &tensor, ccTensorDescriptor_t cc_tensor); + static vector GetWeights(const ge::Node &node); + static vector GetWeights(ge::ConstNodePtr node); + static vector MutableWeights(const ge::Node &node); + static vector MutableWeights(const ge::NodePtr node); + static Status SetWeights(ge::Node &node, const vector &weights); + static Status SetWeights(ge::NodePtr node, const vector &weights); + static Status GetShapeDataFromConstTensor(const ConstGeTensorPtr &tensor, DataType type, std::vector &dims); + + private: + friend class CceTensorDescriptor; + static uint32_t GetRealDimCnt(const GeTensorDesc &tensor_desc); +}; + +class CceTensorDescriptor; + +using CceTensorDescriptorPtr = std::shared_ptr; + +class CceTensorDescriptor { + public: + explicit CceTensorDescriptor(ccTensorDescriptor_t cc_tensor); + + CceTensorDescriptor(const CceTensorDescriptor &) = delete; + CceTensorDescriptor &operator=(const CceTensorDescriptor &) = delete; + + ~CceTensorDescriptor(); + + ccTensorDescriptor_t GetPtr() { return cc_tensor_; } + + /// + /// @brief Initializes the tensor based on shape information. + /// @param[in] format data permutation format + /// @param[in] data_type Data Type + /// @param[in] dim dim information + /// @return return code + /// + Status InitTensor(int32_t format, int32_t data_type, const std::vector &dims); + + Status InitTensor(int32_t format, int32_t data_type, const ge::GeShape &shape); + + /// + /// @brief get format of tensor + /// @param[out] format format of the tensor + /// @return return code + /// + Status GetFormat(ccTensorFormat_t *format); + + /// + /// @brief Obtains the size of the tensor. + /// @param[out] size size of Tensor + /// @return return code + /// + Status GetTensorSizeInBytes(uint32_t *size); + + /// + /// @brief transform tensor between 4d(NCHW) and 5d(NC1HWC0) + /// @param [in] xDesc descriptor of input tensor + /// @param [in] x point to input data in host memory + /// @param [in] dataTypeTransmode mode of data type transform + /// @param [in] yDesc descriptor of output tensor + /// @param [in|out] y point to output data in host memory + /// @param [in] ySizeInBytes size of outputData + /// @return return code + /// + static Status TransTensor(const ccTensorDescriptor_t xDesc, const void *x, const CceTensorDescriptorPtr &yDesc, + void *y, uint32_t ySizeInBytes); + + /// + /// @brief CceTensorDescriptor Static Constructor + /// @return CceTensorDescriptor smart pointer + /// + static CceTensorDescriptorPtr Create(); + + ccTensorDescriptor_t cc_tensor_ = nullptr; +}; +} // namespace ge +#endif // INC_FRAMEWORK_COMMON_OP_GE_OP_UTILS_H_ diff --git a/inc/framework/common/op/op_parser_util.h b/inc/framework/common/op/op_parser_util.h new file mode 100644 index 00000000..714491ee --- /dev/null +++ b/inc/framework/common/op/op_parser_util.h @@ -0,0 +1,425 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_OP_OP_PARSER_UTIL_H_ +#define INC_FRAMEWORK_COMMON_OP_OP_PARSER_UTIL_H_ + +#include +#include +#include +#include + +namespace domi { +// General +const float DEFAULT_ALPHA_VALUE = 1.0; +const float DEFAULT_BETA_VALUE = 0.0; +const uint32_t NORMAL_INPUT_NUM = 1; +const uint32_t NORMAL_OUTPUT_NUM = 1; +const uint32_t NORMAL_WORKSPACE_NUM = 0; +const int32_t NORMAL_1D_DIM_NUM = 1; +const int32_t NORMAL_SCALE_DIM_NUM = 0; +const int NORMAL_TENSOR_FORMAT = static_cast(cce::CC_TENSOR_NC1HWC0); +const int NORMAL_TENSOR_SIZE = 4; +const int NORMAL_DEVICE_DATA_TYPE = static_cast(cce::CC_DATA_HALF); +const int DEFAULT_POOLING_MODE = static_cast(cce::CC_POOLING_MAX); +const uint32_t DEFAULT_REAL_DIM_CNT = 4; + +// Const +const uint32_t CONST_OP_INPUT_NUM = 0; +const uint32_t CONST_OP_NORMAL_WEIGHT_SIZE = 1; + +// MatMul +const uint32_t MATMUL_INPUT_NUM = 2; + +// ActivationGrad +const int32_t ACTIVATIONGRAD_INPUT_NUM = 2; + +// FusedBatchNorm +const int32_t FUSED_BATCH_NORM_WORKSPACE_NUM = 1; +const int32_t FUSED_BATCH_NORM_INPUT_NUM = 5; +const int32_t FUSED_BATCH_NORM_OUTPUT_NUM = 5; +// FusedBatchNormGrad +const int32_t FUSEDBATCHNORMGRAD_WORKSPACE_NUM = 1; +const int32_t FUSEDBATCHNORMGRAD_INPUT_NUM = 5; +const int32_t FUSEDBATCHNORMGRAD_OUTPUT_NUM = 3; + +// Conv +const uint32_t CONVOLUTION_WORKSPACE_NUM = 1; +const uint32_t CONVOLUTION_PAD_SIZE = 4; +const uint32_t CONVOLUTION_STRIDE_SIZE = 2; +const uint32_t CONVOLUTION_DILATION_SIZE = 2; +const int32_t CONVOLUTION_ADJ_SIZE = 2; +const int32_t CONVOLUTION_TARGET_SHAPE_SIZE = 2; + +// ConvGradFilter +const uint32_t CONVGRADFILTER_WORKSPACE_NUM = 1; +const uint32_t CONVGRADFILTER_INPUT_NUM = 3; + +// Pooling +const uint32_t POOLING_WINDOW_SIZE = 2; +const uint32_t POOLING_STRIDE_SIZE = 2; +const uint32_t POOLING_PAD_SIZE = 4; + +// Add Sub Mul +const uint32_t ADD_INPUT_NUM = 2; +const uint32_t SUB_INPUT_NUM = 2; +const uint32_t MUL_INPUT_NUM = 2; +const uint32_t DIV_INPUT_NUM = 2; +const uint32_t ADD_WORKSPACE_NUM = 1; +const uint32_t SUB_WORKSPACE_NUM = 1; +const uint32_t MUL_WORKSPACE_NUM = 1; +const uint32_t DIV_WORKSPACE_NUM = 1; + +const int32_t DEFAULT_AXIS_VALUE = -1; + +const int32_t RESHAPE_AXIS_DEFAULT_VALUE = 0; +const int32_t RESHAPE_NUM_AXES_DEFAULT_VALUE = -1; +const uint32_t RESHAPE_WORKSPACE_NUM = 1; + +const uint32_t FLATTEN_WORKSPACE_NUM = 1; + +const int32_t CONCAT_MIN_INPUT_SIZE = 1; +const int32_t CONCAT_DEFAULT_AXIS = 1; +const uint32_t CONCAT_WORKSPACE_NUM = 1; + +// The value for LRN parameters +const uint32_t LRN_DEFAULT_NORM_REGION = 0; +const float LRN_DEFAULT_K = 1.0; +const uint32_t LRN_DEFAULT_LOCAL_SIZE = 5; +const float LRN_DEFAULT_ALPHA = 1.0; +const float LRN_DEFAULT_BETA = 0.75; + +/// +/// @ingroup domi_common +/// @brief default value of roipooling +/// +const uint32_t ROIPOOLING_DEFAULT_POOLED_H = 0; +const uint32_t ROIPOOLING_DEFAULT_POOLED_W = 0; +const float ROIPOOLING_DEFAULT_SPATIAL_SCALE = 1; +const int32_t ROIPOOLING_DEFAULT_SAMPLING_RATIO = -1; + +// DetectionOutput +const int32_t DETECTIONOUTPUT_INPUT_SIZE = 3; +const int32_t DETECTIONOUTPUT_OUTPUT_SIZE = 2; +const int32_t DETECTIONOUTPUT_WORKSPACE_NUM = 1; +const int DETECTIONOUTPUT_CLASS_NUM = 20; +const int DETECTIONOUTPUT_NUM_CLASSES_DEFAULT_VALUE = 21; +const float DETECTIONOUTPUT_NMS_THRESHOLD_DEFAULT_VALUE = 0.3; +const float DETECTIONOUTPUT_CONFIDENCE_THRESHOLD_DEFAULT_VALUE = 0.8; + +// Proposal +const int32_t PROPOSAL_INPUT_SIZE = 3; +const int32_t PROPOSAL_OUTPUT_MAX_SIZE = 2; +const int32_t PROPOSAL_WORKSPACE_NUM = 1; +const float PROPOSAL_BASE_SIZE_DEFAULT_VALUE = 16; +const float PROPOSAL_RATIO_DIM_0_DEFAULT_VALUE = 0.5; +const float PROPOSAL_RATIO_DIM_1_DEFAULT_VALUE = 1; +const float PROPOSAL_RATIO_DIM_2_DEFAULT_VALUE = 2; +const float PROPOSAL_SCALE_DIM_0_DEFAULT_VALUE = 8; +const float PROPOSAL_SCALE_DIM_1_DEFAULT_VALUE = 16; +const float PROPOSAL_SCALE_DIM_2_DEFAULT_VALUE = 32; +const float PROPOSAL_MIN_SIZE_DEFAULT_VALUE = 16; +const int PROPOSAL_PRE_NMS_TOPN_DEFAULT_VALUE = 6000; +const int PROPOSAL_POST_NMS_TOPN_DEFAULT_VALUE = 304; +const float PROPOSAL_NMS_THRESH_DEFAULT_VALUE = 0.7; +const float PROPOSAL_FILTER_THRESH_DEFAULT_VALUE = 0; + +// TVM OP +const uint32_t DEFAULT_KERNEL_BLOCK_DIM = 1; + +// Softmax +const int32_t SOFTMAX_WORKSPACE_NUM = 1; + +// SoftmaxCrossEntropy +const int32_t SOFTMAXCROSSENTROPY_INPUT_NUM = 2; +const int32_t SOFTMAXCROSSENTROPY_OUTPUT_NUM = 2; + +// Permute +const int32_t PERMUTE_INPUT_NUM = 1; +const int32_t PERMUTE_OUTPUT_NUM = 1; +const int32_t PERMUTE_WORKSPACE_NUM = 1; +const int32_t PERMUTE_ORDER_NUM = 4; + +// Ssd normalize +const int SSD_NORMALIZE_INPUT_SIZE = 1; +const float SSD_NORMALIZE_EPS_DEFAULT_VALUE = 2e-7; + +// SsdPriroBox +const int32_t SSD_PRIOR_BOX_WORKSPACE_NUM = 1; +const int32_t SSD_PRIOR_BOX_INPUT_NUM = 2; +const bool SSD_PRIOR_BOX_FLIP_VALUE = true; +const bool SSD_PRIOR_BOX_CLIP_VALUE = false; +const double SSD_PRIOR_BOX_ASPECT_OFFSET_VALUE = 0.5; +const double SSD_PRIORBOX_VARIANCE_VALUE = 0.1; +const double SSD_PRIORBOX_VARIANCE_SIZE_ONE = 1; +const double SSD_PRIORBOX_VARIANCE_SIZE_FOUR = 4; +const double SSD_PRIORBOX_ASPECT_RATIO_VALUE = 1.0; +const int SSD_PRIOR_BOX_CODETYPE_CORNER_VALUE = 1; +const int SSD_PRIOR_BOX_CODETYPE_CENTER_SIZE_VALUE = 2; +const int SSD_PRIOR_BOX_CODETYPE_CORNER_SIZE_VALUE = 3; + +// Ssd DetectionOutput +const int32_t SSD_DETECTIONOUTPUT_INPUT_SIZE = 3; +const int32_t SSD_DETECTIONOUTPUT_INPUT_SIZE_AFTER_FUSION = 2; +const int32_t SSD_DETECTIONOUTPUT_OUTPUT_SIZE = 2; +const int32_t SSD_DETECTIONOUTPUT_OUTPUT_SIZE_AFTER_FUSION = 3; +const int32_t SSD_DETECTIONOUTPUT_WORKSPACE_NUM = 1; +const int32_t SSD_DETECTIONOUTPUT_WORKSPACE_NUM_AFTER_FUSION = 0; +const bool SSD_DETECTIONOUTPUT_SHARED_LOCATION_DEFAULT_VALUE = true; +const int32_t SSD_DETECTIONOUTPUT_BACKGROUND_LABEL_ID_DEFAULT_VALUE = 0; +const float SSD_DETECTIONOUTPUT_NMS_THRESHOLD_DEFAULT_VALUE = 0.3; +const int32_t SSD_DETECTIONOUTPUT_TOP_K_DEFAULT_VALUE = 200; +const float SSD_DETECTIONOUTPUT_ETA_DEFAULT_VALUE = 1.0; +const int SSD_DETECTIONOUTPUT_CODE_TYPE_DEFAULT_VALUE = static_cast(cce::CC_BOX_CENTER_SIZE); +const int32_t SSD_DETECTIONOUTPUT_KEEP_TOP_K_DEFAULT_VALUE = 200; +const bool SSD_DETECTIONOUTPUT_VARIANCE_ENCODED_IN_TARGET_DEFAULT_VALUE = false; +const float SSD_DETECTIONOUTPUT_CONFIDENCE_THRESHOLD_DEFAULT_VALUE = 0.1; + +// Refinedet DetectionOutput +const int32_t REFINEDET_DETECTIONOUTPUT_INPUT_SIZE = 5; +const int32_t REFINEDET_DETECTIONOUTPUT_INPUT_SIZE_AFTER_FUSION = 2; +const int32_t REFINEDET_DETECTIONOUTPUT_OUTPUT_SIZE = 2; +const int32_t REFINEDET_DETECTIONOUTPUT_OUTPUT_SIZE_AFTER_FUSION = 3; +const int32_t REFINEDET_DETECTIONOUTPUT_WORKSPACE_NUM = 1; +const bool REFINEDET_DETECTIONOUTPUT_SHARED_LOCATION_DEFAULT_VALUE = true; +const int32_t REFINEDET_DETECTIONOUTPUT_BACKGROUND_LABEL_ID_DEFAULT_VALUE = 0; +const float REFINEDET_DETECTIONOUTPUT_NMS_THRESHOLD_DEFAULT_VALUE = 0.3; +const int32_t REFINEDET_DETECTIONOUTPUT_TOP_K_DEFAULT_VALUE = 200; +const float REFINEDET_DETECTIONOUTPUT_ETA_DEFAULT_VALUE = 1.0; +const bool REFINEDET_DETECTIONOUTPUT_VARIANCE_ENCODED_IN_TARGET_DEFAULT_VALUE = false; +const int REFINEDET_DETECTIONOUTPUT_CODE_TYPE_DEFAULT_VALUE = static_cast(cce::CC_BOX_CENTER_SIZE); +const int32_t REFINEDET_DETECTIONOUTPUT_KEEP_TOP_K_DEFAULT_VALUE = 200; +const float REFINEDET_DETECTIONOUTPUT_CONFIDENCE_THRESHOLD_DEFAULT_VALUE = 0.1; +const float REFINEDET_DETECTIONOUTPUT_OBJECTNESS_SCORE_DEFAULT_VALUE = 0; + +// Channel axpy +const int32_t CHANNEL_AXPY_INPUT_NUM = 3; +const int32_t CHANNEL_AXPY_INPUT_DIM_SIZE = 4; +const int32_t CHANNEL_AXPY_WORKSPACE_NUM = 1; + +// Psroi pooling +const int PSROI_POOLING_INPUT_COUNT = 2; +const int PSROI_POOLING_WORKSPACE_NUM = 1; + +// MaxPoolWithArgmax +const uint32_t MAX_POOL_WITH_ARGMAX_OUTPUT_NUM = 2; +const uint32_t MAX_POOL_GRAD_WITH_ARGMAX_INPUT_NUM = 3; + +// AvgPoolGrad +const uint32_t AVG_POOL_GRAD_INPUT_NUM = 2; + +// ROIAlign +const int32_t ROIALIGN_INPUT_SIZE = 2; +const int32_t ROIALIGN_WORKSPACE_NUM = 1; +const int32_t ROIALIGN_DEFAULT_POOLED_H = 1; +const int32_t ROIALIGN_DEFAULT_POOLED_W = 1; + +// Correlation +const uint32_t CORRELATION_INPUT_NUM = 2; +const int CORRELATION_WORKSPACE_NUM = 1; + +// Detectionpostprocess +const int32_t POSTPROCESS_INPUT_SIZE = 4; +const int32_t POSTPROCESS_OUTPUT_SIZE = 2; +const int32_t POSTPROCESS_WORKSPACE_NUM = 1; +const uint32_t POSTPROCESS_CLS_NUM_DEFAULT_VALUE = 12; +const uint32_t POSTPROCESS_POST_NMS_TOPN_DEFAULT_VALUE = 100; +const float POSTPROCESS_NMS_THRESH_DEFAULT_VALUE = 0.3; +const float POSTPROCESS_CONF_THRESH_DEFAULT_VALUE = 0.5; +const float POSTPROCESS_BBOX_REG_WEIGHT_DIM_DEFAULT_VALUE = 1.0; +const int32_t POSTPROCESS_BBOX_REG_WEIGHT_SIZE_DEFAULT_VALUE = 4; + +// Split +const int32_t SPLIT_INPUT_NUM = 2; +const int32_t SPLIT_DEFAULT_AXIS_VALUE = 1; +const int32_t SPLIT_MIN_OUTPUT_SIZE = 1; + +const uint32_t STRIDEDSLICE_INPUT_NUM = 4; +// Slice +const int32_t SLICE_INPUT_NUM = 3; +const int32_t SLICE_WEIGHT_NUM = 2; + +// GatherNd +const int32_t GATHERND_INPUT_NUM = 2; +// ArgMax +const int32_t ARGMAX_INPUT_NUM = 2; +const int32_t ARGMAX_REAL_INPUT_NUM = 1; + +// HighWay +const int32_t HIGHWAY_INPUT_NUM = 4; +const int32_t HIGHWAY_WORKSPACE_NUM = 1; +// RealDiv +const int32_t REALDIV_INPUT_NUM = 2; + +// Range +const int32_t RANGE_INPUT_NUM = 3; +const int32_t RANGE_OUTPUT_NUM = 1; +const int32_t RANGE_INPUT_DIM_SIZE = 0; + +// Pad +const int32_t PAD_WEIGHT_NUM = 1; +const int32_t PAD_DIM_SIZE = 2; +const int32_t PAD_DIM0 = 4; +const int32_t PAD_DIM1 = 2; +const int32_t PAD_WEIGHT_WITH_CONSTANT_NUM = 2; +const int32_t PAD_CONSTATNT_DEFAULT_VALUE = 0; +const int32_t PAD_PADDINGS_SIZE = 8; + +// Tile +const int32_t TILE_WEIGHT_NUM = 1; +const int32_t TILE_MULTIPLES_DIM_SIZE = 1; + +// DecodeBbox +const int32_t DECODE_BBOX_INPUT_NUM = 2; + +// GenerateRpnProposals +const int32_t GENERATE_RPN_PROPOSAL_INPUT_SIZE = 2; +const int32_t GENERATE_RPN_PROPOSAL_OUTPUT_SIZE = 3; + +// Decode_BBox +const int32_t DECODE_BBOX_INPUT_SIZE = 2; +const int32_t DEFAULT_DECODE_CLIP_VALUE = 0; + +// FastRcnnPredictions +const int32_t FASTRCNN_PREDICTIONS_INPUT_SIZE = 2; +const int32_t FASTRCNN_PREDICTIONS_OUTPUT_SIZE = 4; + +const int32_t CLIP_BOXES_INPUT_NUM = 1; +const int32_t CLIP_BOXES_WEIGHT_SIZE = 1; +const int32_t CLIP_BOXES_WEIGHT_ITEM_SIZE = 2; +const int32_t CLIP_BOXES_OUTPUT_NUM = 1; + +const int32_t FLOORDIV_INPUT_NUM = 2; +// Mean +const int32_t MEAN_WEIGHT_SIZE = 1; +const int32_t MEAN_WEIGHT_DIM_SIZE = 1; +const int32_t MEAN_WEIGHT_DIM = 2; +const int32_t MEAN_FIRST_AXIS = 2; +const int32_t MEAN_SECOND_AXIS = 3; +const int32_t MEAN_STRIDE_PLACE_HOLD = 1; +// Switch +const uint32_t SWITCH_INPUT_NUM = 2; +const uint32_t SWITCH_OUTPUT_NUM = 2; +// Merge +const uint32_t MERGE_INPUT_NUM = 2; +// Greater +const uint32_t GREATER_OUTPUT_NUM = 1; +const uint32_t GREATER_INPUT_NUM = 0; +const uint32_t GREATER_WEIGHT_NUM = 2; + +// Yolo region +const uint32_t YOLO_REGION_OUTPUT_NUM = 3; +const uint32_t YOLO_REGION_WORKSPACE_NUM = 1; +const uint32_t YOLO_REGION_COORDS = 4; +const uint32_t YOLO_REGION_CLASSES = 20; +const uint32_t YOLO_REGION_BOXES = 1; +const bool YOLO_REGION_BACKGROUND = false; +const bool YOLO_REGION_SOFTMAX = false; +const bool YOLO_REGION_SOFTMAX_TREE = false; + +// Yolo detectionoutput +const uint32_t YOLO_DETECTIONOUTPUT_INPUT_SIZE = 4; +const uint32_t YOLO_DETECTIONOUTPUT_OUTPUT_SIZE = 2; +const uint32_t YOLO_DETECTION_OUTPUT_WORKSPACE_NUM = 1; +const uint32_t YOLO_DETECTION_OUTPUT_CLASSES = 20; +const uint32_t YOLO_DETECTION_OUTPUT_BOXES_V2 = 5; +const uint32_t YOLO_DETECTION_OUTPUT_BOXES_V3 = 3; +const bool YOLO_DETECTION_OUTPUT_RELATIVE = true; +const float YOLO_DETECTION_OUTPUT_OBJECTNESS_THRESHOLD = 0.5; +const float YOLO_DETECTION_OUTPUT_CLASS_THRESHOLD = 0.5; +const uint32_t YOLO_DETECTION_OUTPUT_POST_TOP_K = UINT_MAX; +const float YOLO_DETECTION_OUTPUT_NMS_THRESHOLD = 0; +const float YOLO_DETECTION_OUTPUT_IOU_THRESHOLD_DECAY = 1.0; +const float YOLO_DETECTION_OUTPUT_COOR_SCALE_FACTOR = 1.0; + +// Reorg +const int32_t REORG_DEFAULT_STRIDE = 2; +const uint32_t REORG_INPUT_COUNT = 1; +// Reshape +const int32_t RESHAPE_INPUT_NUM = 2; +// Maximum +const int32_t MAXIMUM_INPUT_NUM = 2; + +// Spatialtf +const int32_t SPATIALTF_WORKSPACE_NUM = 1; + +const int32_t REVERSE_DEFAULT_AXIS = 1; +// Crop +const int32_t CROP_AXIS = 2; +const int32_t CROP_INPUT_NUM = 2; + +// ConvGradInput +const uint32_t CONVGRADINPUT_WORKSPACE_NUM = 1; +const uint32_t CONVGRADINPUT_INPUT_NUM = 3; + +// RNN +const uint32_t RNN_WORKSPACE_NUM = 1; + +// Cropandresize +const int32_t CROPANDRESIZE_WEIGHT_NUM = 1; +const int32_t CROPANDRESIZE_CROP_DIM_SIZE = 1; +const int32_t CROP_DIM0 = 2; + +// Attention decoder weight index +const uint32_t ATTENTION_DECODER_WEIGHT_ATTENW0 = 0; +const uint32_t ATTENTION_DECODER_WEIGHT_ATTENTION0_KERNEL = 1; +const uint32_t ATTENTION_DECODER_WEIGHT_ATTNOUTPUTPROJECTION_KERNEL = 2; +const uint32_t ATTENTION_DECODER_WEIGHT_ATTENTION_DECODER_KERNEL = 3; +const uint32_t ATTENTION_DECODER_WEIGHT_CELL0_GATES_KERNEL = 4; +const uint32_t ATTENTION_DECODER_WEIGHT_CELL0_CANDIDATE_KERNEL = 5; +const uint32_t ATTENTION_DECODER_WEIGHT_CELL1_GATES_KERNEL = 6; +const uint32_t ATTENTION_DECODER_WEIGHT_CELL1_CANDIDATE_KERNEL = 7; +const uint32_t ATTENTION_DECODER_WEIGHT_ATTENTION0_BIAS = 8; +const uint32_t ATTENTION_DECODER_WEIGHT_ATTNOUTPUTPROJECTION_BIAS = 9; +const uint32_t ATTENTION_DECODER_WEIGHT_ATTENTION_DECODER_BIAS = 10; +const uint32_t ATTENTION_DECODER_WEIGHT_CELL0_GATES_BIAS = 11; +const uint32_t ATTENTION_DECODER_WEIGHT_CELL0_CANDIDATE_BIAS = 12; +const uint32_t ATTENTION_DECODER_WEIGHT_CELL1_GATES_BIAS = 13; +const uint32_t ATTENTION_DECODER_WEIGHT_CELL1_CANDIDATE_BIAS = 14; +const uint32_t ATTENTION_DECODER_WEIGHT_EMBEDDING = 15; +const uint32_t ATTENTION_DECODER_WEIGHT_ATTENVA = 16; +const uint32_t ATTENTION_DECODER_WEIGHT_DECODER_INITIAL = 17; + +// Attention decoder weight size +const uint32_t ATTENTION_DECODER_WEIGHT_SIZE = 18; +const uint32_t ATTENTION_DECODER_INPUT_SIZE = 2; +const uint32_t ATTENTION_DECODER_WORKSPACE_NUM = 1; +const uint32_t ATTENTION_DECODER_INPUT_DECODER_INPUTS = 0; +const uint32_t ATTENTION_DECODER_INPUT_DECODER_INITIAL_HIDDEN = 1; + +const int ATTENTION_DECODER_ALGO_NORMAL = 0; +const int ATTENTION_DECODER_SYMBOLS = 10000; +const int ATTENTION_DECODER_EMBEDDING_SIZE = 128; +const int ATTENTION_DECODER_ATTENTION_NUM_HIDDEN = 256; +const int ATTENTION_DECODER_DECODER_NUM_HIDDEN = 128; +const int ATTENTION_DECODER_DECODER_NUM_LAYERS = 2; +const int ATTENTION_DECODER_RNN_UNBIDIRECTIONAL = 0; +const int ATTENTION_DECODER_SEQLEN_VALUE = 57; +const int ATTENTION_DECODER_GRU = 3; + +// Logicaland +const int32_t LOGICAL_AND_INPUT_NUM = 2; +const int32_t EQUAL_INPUT_NUM = 2; + +static const int32_t OP_WEIGHT_MEM_BASE_OFFSET = 512; + +// MultiShape +const uint32_t MULTI_SHAPE_INPUT_NUM = 2; + +// Shufflechannel +const uint32_t SHUFFLECHANNEL_DEFAULT_GROUP = 1; +} // namespace domi +#endif // INC_FRAMEWORK_COMMON_OP_OP_PARSER_UTIL_H_ diff --git a/inc/framework/common/op_types.h b/inc/framework/common/op_types.h new file mode 100644 index 00000000..4555d5c3 --- /dev/null +++ b/inc/framework/common/op_types.h @@ -0,0 +1,62 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_OP_TYPES_H_ +#define INC_FRAMEWORK_COMMON_OP_TYPES_H_ + +#include +#include + +namespace ge { +class OpTypeContainer { + public: + static OpTypeContainer *Instance() { + static OpTypeContainer instance; + return &instance; + } + ~OpTypeContainer() = default; + + void Register(const std::string &op_type) { op_type_list_.insert(op_type); } + + bool IsExisting(const std::string &op_type) { + auto iter_find = op_type_list_.find(op_type); + return iter_find != op_type_list_.end(); + } + + protected: + OpTypeContainer() {} + + private: + std::set op_type_list_; +}; + +class OpTypeRegistrar { + public: + explicit OpTypeRegistrar(const std::string &op_type) { OpTypeContainer::Instance()->Register(op_type); } + ~OpTypeRegistrar() {} +}; + +#define REGISTER_OPTYPE_DECLARE(var_name, str_name) \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const char *var_name; + +#define REGISTER_OPTYPE_DEFINE(var_name, str_name) \ + const char *var_name = str_name; \ + const OpTypeRegistrar g_##var_name##_reg(str_name); + +#define IS_OPTYPE_EXISTING(str_name) (OpTypeContainer::Instance()->IsExisting(str_name)) +} // namespace ge + +#endif // INC_FRAMEWORK_COMMON_OP_TYPES_H_ diff --git a/inc/framework/common/scope_guard.h b/inc/framework/common/scope_guard.h new file mode 100644 index 00000000..39214a43 --- /dev/null +++ b/inc/framework/common/scope_guard.h @@ -0,0 +1,60 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_SCOPE_GUARD_H_ +#define INC_FRAMEWORK_COMMON_SCOPE_GUARD_H_ + +#include +#include + +/// Usage: +/// Acquire Resource 1 +/// MAKE_GUARD([&] { Release Resource 1 }) +/// Acquire Resource 2 +/// MAKE_GUARD([&] { Release Resource 2 }) +#define GE_MAKE_GUARD(var, callback) ge::ScopeGuard make_guard_##var(callback) +#define GE_DISMISS_GUARD(var) make_guard_##var.Dismiss() + +namespace ge { +class ScopeGuard { + public: + // Noncopyable + ScopeGuard(ScopeGuard const &) = delete; + ScopeGuard &operator=(ScopeGuard const &) = delete; + + explicit ScopeGuard(const std::function &on_exit_scope) : on_exit_scope_(on_exit_scope), dismissed_(false) {} + + ~ScopeGuard() { + if (!dismissed_) { + if (on_exit_scope_ != nullptr) { + try { + on_exit_scope_(); + } catch (std::bad_function_call &e) { + } catch (...) { + } + } + } + } + + void Dismiss() { dismissed_ = true; } + + private: + std::function on_exit_scope_; + bool dismissed_; +}; +} // namespace ge + +#endif // INC_FRAMEWORK_COMMON_SCOPE_GUARD_H_ diff --git a/inc/framework/common/string_util.h b/inc/framework/common/string_util.h new file mode 100644 index 00000000..b74eddcf --- /dev/null +++ b/inc/framework/common/string_util.h @@ -0,0 +1,156 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_STRING_UTIL_H_ +#define INC_FRAMEWORK_COMMON_STRING_UTIL_H_ + +#include + +#include +#include +#include +#include +#include + +namespace ge { +class StringUtils { + public: + static std::string &Ltrim(std::string &s) { +#if __cplusplus >= 201103L + (void)s.erase(s.begin(), std::find_if(s.begin(), s.end(), [](int c) { return !std::isspace(c); })); +#else + (void)s.erase(s.begin(), std::find_if(s.begin(), s.end(), std::not1(std::ptr_fun(std::isspace)))); +#endif + return s; + } + + static std::string &Rtrim(std::string &s) { +#if __cplusplus >= 201103L + (void)s.erase(s.begin(), std::find_if(s.begin(), s.end(), [](int c) { return !std::isspace(c); })); +#else + (void)s.erase(std::find_if(s.rbegin(), s.rend(), std::not1(std::ptr_fun(std::isspace))).base(), s.end()); +#endif + return s; + } + + /// + /// @ingroup domi_common + /// @brief delete spaces at the beginning and end of a string + /// @param [in] string to be trimmed + /// @return string after trim + /// + static std::string &Trim(std::string &s) { return Ltrim(Rtrim(s)); } + + /// + /// @ingroup domi_common + /// @brief string splitting + /// @param [in] str string to be trimmed + /// @param [in] delim separator + /// @return string array after segmentation + /// + static std::vector Split(const std::string &str, char delim) { + std::vector elems; + + if (str.empty()) { + elems.emplace_back(""); + return elems; + } + + std::stringstream ss(str); + std::string item; + + while (getline(ss, item, delim)) { + elems.push_back(item); + } + + auto str_size = str.size(); + if (str_size > 0 && str[str_size - 1] == delim) { + elems.emplace_back(""); + } + + return elems; + } + /// + /// @ingroup domi_common + /// @brief obtain the file name + /// @param [in] s path name + /// @return file name + /// + static std::string GetFileName(std::string &s) { + if (s.empty()) { + return ""; + } + std::vector files = StringUtils::Split(s, '/'); + + return files.empty() ? "" : files[files.size() - 1]; + } + /// + /// @ingroup domi_common + /// @brief full replacement + /// @link + /// @param [in] str str string to be replaced + /// @param [in] old_value old Characters Before Replacement + /// @param [in] new_value new Characters Before Replacement + /// @return string after replacement + /// + static std::string ReplaceAll(std::string str, const std::string &old_value, const std::string &new_value) { + std::string::size_type cur_pos = 0; + std::string::size_type old_length = old_value.length(); + std::string::size_type new_length = new_value.length(); + // cycle replace + for (; cur_pos != std::string::npos; cur_pos += new_length) { + if ((cur_pos = str.find(old_value, cur_pos)) != std::string::npos) { + (void)str.replace(cur_pos, old_length, new_value); + } else { + break; + } + } + return str; + } + + /// + /// @ingroup domi_common + /// @brief checks whether a character string starts with a character string (prefix) + /// @link + /// @param [in] str string to be compared + /// @param [in] str_x prefix + /// @return if the value is a prefix, true is returned. Otherwise, false is returned + /// + static bool StartWith(const std::string &str, const std::string str_x) { + return ((str.size() >= str_x.size()) && (str.compare(0, str_x.size(), str_x) == 0)); + } + + /// + /// @ingroup domi_common + /// @brief format string + /// @link + /// @param [in] format specifies the character string format + /// @param [in] ... format Filling Content + /// @return formatted string + /// + static std::string FormatString(const char *format, ...) { + const uint32_t MAX_BUFFER_LEN = 1024; // the stack memory plint check result must be less than 1024 + va_list args; + va_start(args, format); + char buffer[MAX_BUFFER_LEN] = {0}; + int32_t ret = vsnprintf_s(buffer, MAX_BUFFER_LEN, MAX_BUFFER_LEN - 1, format, args); + va_end(args); + return ret > 0 ? buffer : ""; + } +}; +} // namespace ge + +#endif // INC_FRAMEWORK_COMMON_STRING_UTIL_H_ diff --git a/inc/framework/common/types.h b/inc/framework/common/types.h new file mode 100755 index 00000000..bf1812c8 --- /dev/null +++ b/inc/framework/common/types.h @@ -0,0 +1,1048 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_TYPES_H_ +#define INC_FRAMEWORK_COMMON_TYPES_H_ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "framework/common/fmk_error_codes.h" +#include "framework/common/fmk_types.h" +#include "framework/common/op_types.h" +#include "register/register_types.h" + +#if !defined(__ANDROID__) && !defined(ANDROID) +#define DOMI_DYNAMIC_CAST static_cast +#define DOMI_DYNAMIC_POINTER_CAST std::static_pointer_cast +#else +#define DOMI_DYNAMIC_CAST static_cast +#define DOMI_DYNAMIC_POINTER_CAST std::static_pointer_cast +#endif + +namespace ge { +// dump +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string DUMP_MODEL; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string DUMP_ALL_MODEL; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string DUMP_STATUS; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string DUMP_LAYER; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string DUMP_FILE_PATH; + +// public property names which are supported +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string PROP_OME_START_TIME; // Start time +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string PROP_OME_DUMP_PATH; // Dump path +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string PROP_OME_LOG_PATH; // Log path + +// Profile-related constants +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t CCE_PROFILE_ON; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t CCE_PROFILE_OFF; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string OME_PROFILE; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string CCE_PROFILE; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string RTS_PROFILE; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string PROFILER_JOBCTX; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string PROFILER_TARGET_PATH; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string RTS_PROFILE_PATH; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string PROFILE_STOP_KEY; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string PROFILE_STOP_VALUE; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::map PROFILE_COMPONENT_MAP; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string PROFILE_CONFIG; + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string MODEL_ATTR_TASKS; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string MODEL_ATTR_TASK_GEN_BASE_ADDR; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string MODEL_ATTR_TASK_GEN_WEIGHT_ADDR; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string MODEL_ATTR_FUSION_MODEL_DEF; + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const int MODEL_MAX_SIZE; // Max size of 2 GB minus 1 byte. +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint64_t FILE_HEADER_MAX_SIZE; // Max size of 3 GB. + +#if !defined(__ANDROID__) && !defined(ANDROID) +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint64_t ALLOC_MEMORY_MAX_SIZE; // Max size of 8 GB. +#else +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint64_t ALLOC_MEMORY_MAX_SIZE; // Max size of 512M. +#endif + +template +static std::pair flip_pair(const std::pair &p) { + return std::pair(p.second, p.first); +} + +template +static std::map flip_map(std::map src) { + std::map dst; + std::transform(src.begin(), src.end(), std::inserter(dst, dst.begin()), flip_pair); + return dst; +} + +REGISTER_OPTYPE_DECLARE(DATA, "Data"); +REGISTER_OPTYPE_DECLARE(AIPPDATA, "AippData"); +REGISTER_OPTYPE_DECLARE(CONVOLUTION, "Convolution"); +REGISTER_OPTYPE_DECLARE(CORRELATION, "Correlation"); +REGISTER_OPTYPE_DECLARE(CORRELATIONV2, "Correlation_V2"); +REGISTER_OPTYPE_DECLARE(DECONVOLUTION, "Deconvolution"); +REGISTER_OPTYPE_DECLARE(POOLING, "Pooling"); +REGISTER_OPTYPE_DECLARE(ELTWISE, "Eltwise"); +REGISTER_OPTYPE_DECLARE(RELU, "ReLU"); +REGISTER_OPTYPE_DECLARE(RELU6, "ReLU6"); +REGISTER_OPTYPE_DECLARE(SIGMOID, "Sigmoid"); +REGISTER_OPTYPE_DECLARE(ABSVAL, "AbsVal"); +REGISTER_OPTYPE_DECLARE(TANH, "TanH"); +REGISTER_OPTYPE_DECLARE(PRELU, "PReLU"); +REGISTER_OPTYPE_DECLARE(BATCHNORM, "BatchNorm"); +REGISTER_OPTYPE_DECLARE(FUSIONBATCHNORM, "FusionBatchNorm"); +REGISTER_OPTYPE_DECLARE(SCALE, "Scale"); +REGISTER_OPTYPE_DECLARE(FULL_CONNECTION, "FullConnection"); +REGISTER_OPTYPE_DECLARE(SOFTMAX, "Softmax"); +REGISTER_OPTYPE_DECLARE(PLUS, "Plus"); +REGISTER_OPTYPE_DECLARE(ACTIVATION, "Activation"); +REGISTER_OPTYPE_DECLARE(FLATTEN, "Flatten"); +REGISTER_OPTYPE_DECLARE(ADD, "Add"); +REGISTER_OPTYPE_DECLARE(SUB, "Sub"); +REGISTER_OPTYPE_DECLARE(MUL, "Mul"); +REGISTER_OPTYPE_DECLARE(MATMUL, "MatMul"); +REGISTER_OPTYPE_DECLARE(RSQRT, "Rsqrt"); +REGISTER_OPTYPE_DECLARE(BIASADD, "BiasAdd"); +REGISTER_OPTYPE_DECLARE(RESHAPE, "Reshape"); +REGISTER_OPTYPE_DECLARE(REFORMAT, "ReFormat"); +REGISTER_OPTYPE_DECLARE(DEPCONVOLUTION, "ConvolutionDepthwise"); +REGISTER_OPTYPE_DECLARE(DROPOUT, "Dropout"); +REGISTER_OPTYPE_DECLARE(DROPOUTDOMASK, "DropOutDoMask"); +REGISTER_OPTYPE_DECLARE(DROPOUTGENMASK, "DropOutGenMask"); +REGISTER_OPTYPE_DECLARE(CONCAT, "Concat"); +REGISTER_OPTYPE_DECLARE(ROIPOOLING, "ROIPooling"); +REGISTER_OPTYPE_DECLARE(PROPOSAL, "Proposal"); +REGISTER_OPTYPE_DECLARE(FSRDETECTIONOUTPUT, "FSRDetectionOutput"); +REGISTER_OPTYPE_DECLARE(DETECTIONPOSTPROCESS, "Detectpostprocess"); +REGISTER_OPTYPE_DECLARE(LRN, "LRN"); +REGISTER_OPTYPE_DECLARE(TRANSDATA, "TransData"); +REGISTER_OPTYPE_DECLARE(PERMUTE, "Permute"); +REGISTER_OPTYPE_DECLARE(SSDNORMALIZE, "SSDNormalize"); +REGISTER_OPTYPE_DECLARE(SSDPRIORBOX, "SSDPriorBox"); +REGISTER_OPTYPE_DECLARE(NETOUTPUT, "NetOutput"); +REGISTER_OPTYPE_DECLARE(SSDDETECTIONOUTPUT, "SSDDetectionOutput"); +REGISTER_OPTYPE_DECLARE(REFINEDETDETECTIONOUTPUT, "RefinedetDetectionOutput"); +REGISTER_OPTYPE_DECLARE(CHANNELAXPY, "ChannelAxpy"); +REGISTER_OPTYPE_DECLARE(PSROIPOOLING, "PSROIPooling"); +REGISTER_OPTYPE_DECLARE(POWER, "Power"); +REGISTER_OPTYPE_DECLARE(POW, "Pow"); +REGISTER_OPTYPE_DECLARE(ROIALIGN, "ROIAlign"); +REGISTER_OPTYPE_DECLARE(PYTHON, "Python"); +REGISTER_OPTYPE_DECLARE(FREESPACEEXTRACT, "FreespaceExtract"); +REGISTER_OPTYPE_DECLARE(SPATIALTF, "SpatialTransform"); +REGISTER_OPTYPE_DECLARE(SHAPE, "Shape"); +REGISTER_OPTYPE_DECLARE(SHAPEN, "ShapeN"); +REGISTER_OPTYPE_DECLARE(ARGMAX, "ArgMax"); +REGISTER_OPTYPE_DECLARE(GATHERND, "GatherNd"); +REGISTER_OPTYPE_DECLARE(GATHER, "Gather"); +REGISTER_OPTYPE_DECLARE(REALDIV, "RealDiv"); +REGISTER_OPTYPE_DECLARE(PACK, "Pack"); +REGISTER_OPTYPE_DECLARE(SLICE, "Slice"); +REGISTER_OPTYPE_DECLARE(FLOORDIV, "FloorDiv"); +REGISTER_OPTYPE_DECLARE(SQUEEZE, "Squeeze"); +REGISTER_OPTYPE_DECLARE(STRIDEDSLICE, "StridedSlice"); +REGISTER_OPTYPE_DECLARE(RANGE, "Range"); +REGISTER_OPTYPE_DECLARE(RPNPROPOSALS, "GenerateRpnProposals"); +REGISTER_OPTYPE_DECLARE(DECODEBBOX, "DecodeBBox"); +REGISTER_OPTYPE_DECLARE(PAD, "Pad"); +REGISTER_OPTYPE_DECLARE(PADV2, "PadV2"); +REGISTER_OPTYPE_DECLARE(MIRRORPAD, "MirrorPad"); +REGISTER_OPTYPE_DECLARE(TILE, "Tile"); +REGISTER_OPTYPE_DECLARE(SIZE, "Size"); +REGISTER_OPTYPE_DECLARE(CLIPBOXES, "Clipboxes"); +REGISTER_OPTYPE_DECLARE(FASTRCNNPREDICTIONS, "FastrcnnPredictions"); +REGISTER_OPTYPE_DECLARE(SPLIT, "Split"); +REGISTER_OPTYPE_DECLARE(SPLITV, "SplitV"); +REGISTER_OPTYPE_DECLARE(EXPANDDIMS, "ExpandDims"); +REGISTER_OPTYPE_DECLARE(EMPTY, "Empty"); +REGISTER_OPTYPE_DECLARE(MEAN, "Mean"); +REGISTER_OPTYPE_DECLARE(GREATER, "Greater"); +REGISTER_OPTYPE_DECLARE(SWITCH, "Switch"); +REGISTER_OPTYPE_DECLARE(SWITCHN, "SwitchN"); +REGISTER_OPTYPE_DECLARE(REFSWITCH, "RefSwitch"); +REGISTER_OPTYPE_DECLARE(MERGE, "Merge"); +REGISTER_OPTYPE_DECLARE(REFMERGE, "RefMerge"); +REGISTER_OPTYPE_DECLARE(ENTER, "Enter"); +REGISTER_OPTYPE_DECLARE(REFENTER, "RefEnter"); +REGISTER_OPTYPE_DECLARE(LOOPCOND, "LoopCond"); +REGISTER_OPTYPE_DECLARE(NEXTITERATION, "NextIteration"); +REGISTER_OPTYPE_DECLARE(REFNEXTITERATION, "RefNextIteration"); +REGISTER_OPTYPE_DECLARE(EXIT, "Exit"); +REGISTER_OPTYPE_DECLARE(REFEXIT, "RefExit"); +REGISTER_OPTYPE_DECLARE(CONTROLTRIGGER, "ControlTrigger"); +REGISTER_OPTYPE_DECLARE(TRANSPOSE, "Transpose"); +REGISTER_OPTYPE_DECLARE(TRANSPOSED, "TransposeD"); +REGISTER_OPTYPE_DECLARE(CAST, "Cast"); +REGISTER_OPTYPE_DECLARE(REGION, "Region"); +REGISTER_OPTYPE_DECLARE(YOLO, "Yolo"); +REGISTER_OPTYPE_DECLARE(YOLODETECTIONOUTPUT, "YoloDetectionOutput"); +REGISTER_OPTYPE_DECLARE(FILL, "Fill"); +REGISTER_OPTYPE_DECLARE(RANK, "Rank"); +REGISTER_OPTYPE_DECLARE(REVERSE, "Reverse"); +REGISTER_OPTYPE_DECLARE(UNPACK, "Unpack"); +REGISTER_OPTYPE_DECLARE(YOLO2REORG, "Yolo2Reorg"); +REGISTER_OPTYPE_DECLARE(REDUCESUM, "ReduceSum"); +REGISTER_OPTYPE_DECLARE(SUM, "Sum"); +REGISTER_OPTYPE_DECLARE(CONSTANT, "Const"); +REGISTER_OPTYPE_DECLARE(RESIZEBILINEAR, "ResizeBilinear"); +REGISTER_OPTYPE_DECLARE(RESIZEBILINEARGRAD, "ResizeBilinearGrad"); +REGISTER_OPTYPE_DECLARE(MAXIMUM, "Maximum"); +REGISTER_OPTYPE_DECLARE(FRAMEWORKOP, "FrameworkOp"); +REGISTER_OPTYPE_DECLARE(ARG, "_Arg"); +REGISTER_OPTYPE_DECLARE(FUSEDBATCHNORMGRAD, "FusedBatchNormGrad"); +REGISTER_OPTYPE_DECLARE(LSTM, "LSTM"); +REGISTER_OPTYPE_DECLARE(HIGHWAY, "HighWay"); +REGISTER_OPTYPE_DECLARE(RNN, "RNN"); +REGISTER_OPTYPE_DECLARE(ATTENTIONDECODER, "AttentionDecoder"); +REGISTER_OPTYPE_DECLARE(LOGICAL_NOT, "LogicalNot"); +REGISTER_OPTYPE_DECLARE(LOGICAL_AND, "LogicalAnd"); +REGISTER_OPTYPE_DECLARE(LOGICAL_OR, "LogicalOr"); +REGISTER_OPTYPE_DECLARE(EQUAL, "Equal"); +REGISTER_OPTYPE_DECLARE(NOTEQUAL, "NotEqual"); +REGISTER_OPTYPE_DECLARE(INTERP, "Interp"); +REGISTER_OPTYPE_DECLARE(SHUFFLECHANNEL, "ShuffleChannel"); +REGISTER_OPTYPE_DECLARE(AIPP, "Aipp"); +REGISTER_OPTYPE_DECLARE(MULTISHAPE, "MultiShape"); +REGISTER_OPTYPE_DECLARE(RECIPROCAL, "Reciprocal"); +REGISTER_OPTYPE_DECLARE(SELU, "Selu"); +REGISTER_OPTYPE_DECLARE(ELU, "Elu"); +REGISTER_OPTYPE_DECLARE(ACOSH, "Acosh"); +REGISTER_OPTYPE_DECLARE(ASINH, "Asinh"); +REGISTER_OPTYPE_DECLARE(MINIMUM, "Minimum"); +REGISTER_OPTYPE_DECLARE(CLIP, "Clip"); +REGISTER_OPTYPE_DECLARE(L2NORMALIZE, "L2Normalize"); +REGISTER_OPTYPE_DECLARE(CROPANDRESIZE, "CropAndResize"); +REGISTER_OPTYPE_DECLARE(UNUSEDCONST, "UnusedConst"); +REGISTER_OPTYPE_DECLARE(SPARSETODENSE, "SparseToDense"); +REGISTER_OPTYPE_DECLARE(NONMAXSUPPRESSION, "NonMaxSuppression"); +REGISTER_OPTYPE_DECLARE(TOPKV2, "TopKV2"); +REGISTER_OPTYPE_DECLARE(INVERTPERMUTATION, "InvertPermutation"); +REGISTER_OPTYPE_DECLARE(MULTINOMIAL, "Multinomial"); +REGISTER_OPTYPE_DECLARE(REVERSESEQUENCE, "ReverseSequence"); +REGISTER_OPTYPE_DECLARE(REDUCEPROD, "ReduceProd"); +REGISTER_OPTYPE_DECLARE(REDUCEMAX, "ReduceMax"); +REGISTER_OPTYPE_DECLARE(REDUCEMIN, "ReduceMin"); +REGISTER_OPTYPE_DECLARE(EXTRACTIMAGEPATCHES, "ExtractImagePatches"); +REGISTER_OPTYPE_DECLARE(SQRT, "Sqrt"); +REGISTER_OPTYPE_DECLARE(REDUCEALL, "ReduceAll"); +REGISTER_OPTYPE_DECLARE(RESIZENEARESTNEIGHBOR, "ResizeNearestNeighbor"); +REGISTER_OPTYPE_DECLARE(SPACETOBATCHND, "SpaceToBatchND"); +REGISTER_OPTYPE_DECLARE(BATCHTOSPACEND, "BatchToSpaceND"); +REGISTER_OPTYPE_DECLARE(ASSERT, "Assert"); +REGISTER_OPTYPE_DECLARE(GREATEREQUAL, "GreaterEqual"); +REGISTER_OPTYPE_DECLARE(FLOOR, "Floor"); +REGISTER_OPTYPE_DECLARE(RANDOMUNIFORM, "RandomUniform"); +REGISTER_OPTYPE_DECLARE(BATCHMATMUL, "BatchMatMul"); +REGISTER_OPTYPE_DECLARE(LESSEQUAL, "LessEqual"); +REGISTER_OPTYPE_DECLARE(ONEHOT, "OneHot"); +REGISTER_OPTYPE_DECLARE(LAYERNORM, "LayerNorm"); +REGISTER_OPTYPE_DECLARE(SPACETODEPTH, "SpaceToDepth"); +REGISTER_OPTYPE_DECLARE(DEPTHTOSPACE, "DepthToSpace"); +REGISTER_OPTYPE_DECLARE(RINT, "Rint"); +REGISTER_OPTYPE_DECLARE(ATAN, "Atan"); +REGISTER_OPTYPE_DECLARE(ATAN2, "Atan2"); +REGISTER_OPTYPE_DECLARE(ATANH, "Atanh"); +REGISTER_OPTYPE_DECLARE(ACOS, "Acos"); +REGISTER_OPTYPE_DECLARE(ASIN, "Asin"); +REGISTER_OPTYPE_DECLARE(NEG, "Neg"); +REGISTER_OPTYPE_DECLARE(LOG, "Log"); +REGISTER_OPTYPE_DECLARE(TAN, "Tan"); +REGISTER_OPTYPE_DECLARE(ROUND, "Round"); +REGISTER_OPTYPE_DECLARE(UPSAMPLE, "Upsample"); +REGISTER_OPTYPE_DECLARE(FLOORMOD, "FloorMod"); +REGISTER_OPTYPE_DECLARE(LESS, "Less"); +REGISTER_OPTYPE_DECLARE(ZEROSLIKE, "ZerosLike"); +REGISTER_OPTYPE_DECLARE(EXP, "Exp"); +REGISTER_OPTYPE_DECLARE(WHERE, "Where"); +REGISTER_OPTYPE_DECLARE(FAKEQUANTWITHMINMAXVARS, "FakeQuantWithMinMaxVars"); +REGISTER_OPTYPE_DECLARE(SOFTPLUS, "Softplus"); +REGISTER_OPTYPE_DECLARE(SOFTSIGN, "Softsign"); +REGISTER_OPTYPE_DECLARE(COSH, "Cosh"); +REGISTER_OPTYPE_DECLARE(SINH, "Sinh"); +REGISTER_OPTYPE_DECLARE(RETINAMULTIANCHORS, "RetinaMultiAnchor"); +REGISTER_OPTYPE_DECLARE(SQUAREDDIFFERENCE, "SquaredDifference"); +REGISTER_OPTYPE_DECLARE(REQUIREDSPACETOBATCHPADDINGS, "RequiredSpaceToBatchPaddings"); // for retinanet scope fusion +REGISTER_OPTYPE_DECLARE(SSDPOSTPROCESSOR, "SSDPostProcessor"); +REGISTER_OPTYPE_DECLARE(SSDANCHORGENERATOR, "SSDAnchorGenerator"); +REGISTER_OPTYPE_DECLARE(RETINANETBOXES, "RetinanetBoxes"); +REGISTER_OPTYPE_DECLARE(RETINANETCLIPPEDBOXES, "RetinanetClippedBoxes"); +REGISTER_OPTYPE_DECLARE(RETINANETFILTEREDDETECTIONS, "RetinanetFilteredDetections"); +REGISTER_OPTYPE_DECLARE(RETINANETPOSTPROCESSOR, "RetinanetPostProcessor"); +REGISTER_OPTYPE_DECLARE(RETINANETANCHORS, "RetinanetAnchors"); +REGISTER_OPTYPE_DECLARE(FASTERRCNNMAP, "FasterRCNNMap"); +REGISTER_OPTYPE_DECLARE(FASTERRCNNMAP1, "FasterRCNNMap1"); +REGISTER_OPTYPE_DECLARE(FASTERRCNNSECONDSTAGEPOSTPROCESSOR, "FasterRCNNSecondStagePostprocessor"); +REGISTER_OPTYPE_DECLARE(FASTERRCNNROIINTERPOOLING, "FasterRCNNROIInterPooling"); +REGISTER_OPTYPE_DECLARE(FASTERRCNNFIRSTSTAGEPOSTPROCESSOR, "FasterRCNNFirstStagePostprocessor"); +REGISTER_OPTYPE_DECLARE(FASTERRCNNGRIDANCHORGENERATOR, "FasterRCNNGridAnchorGenerator"); +REGISTER_OPTYPE_DECLARE(ROIINTERPOOLING, "ROIInterPooling"); +REGISTER_OPTYPE_DECLARE(FASTERRCNNCLIPTOWINDOW, "FasterRCNNClipToWindow"); +REGISTER_OPTYPE_DECLARE(EMBEDLOOKUP, "EmbedLookup"); +REGISTER_OPTYPE_DECLARE(HASHLOOKUP, "HashLookup"); +REGISTER_OPTYPE_DECLARE(LSH_PROJ, "LshProject"); +REGISTER_OPTYPE_DECLARE(SVDF, "SVDF"); +REGISTER_OPTYPE_DECLARE(IDENTITY, "Identity"); +REGISTER_OPTYPE_DECLARE(PLACEHOLDERWITHDEFAULT, "PlaceholderWithDefault"); +REGISTER_OPTYPE_DECLARE(IDENTITYN, "IdentityN"); +REGISTER_OPTYPE_DECLARE(GETSPAN, "GetSpan"); +REGISTER_OPTYPE_DECLARE(STOPGRADIENT, "StopGradient"); +REGISTER_OPTYPE_DECLARE(PREVENTGRADIENT, "PreventGradient"); +REGISTER_OPTYPE_DECLARE(GUARANTEECONST, "GuaranteeConst"); +REGISTER_OPTYPE_DECLARE(BROADCASTGRADIENTARGS, "BroadcastGradientArgs"); +REGISTER_OPTYPE_DECLARE(BROADCASTARGS, "BroadcastArgs"); +REGISTER_OPTYPE_DECLARE(CONCATV2, "ConcatV2"); +REGISTER_OPTYPE_DECLARE(CONCATOFFSET, "ConcatOffset"); +REGISTER_OPTYPE_DECLARE(LESSEQUAL, "LessEqual"); +REGISTER_OPTYPE_DECLARE(SELECT, "Select"); +REGISTER_OPTYPE_DECLARE(CONFUSIONMATRIX, "ConfusionMatrix"); +REGISTER_OPTYPE_DECLARE(PLACEHOLDER, "PlaceHolder"); +REGISTER_OPTYPE_DECLARE(END, "End"); +REGISTER_OPTYPE_DECLARE(BASICLSTMCELL, "BasicLSTMCell"); +REGISTER_OPTYPE_DECLARE(GETNEXT, "GetNext"); +REGISTER_OPTYPE_DECLARE(INITDATA, "InitData"); + +// ANN dedicated operator +REGISTER_OPTYPE_DECLARE(ANN_MEAN, "AnnMean"); +REGISTER_OPTYPE_DECLARE(ANN_CONVOLUTION, "AnnConvolution"); +REGISTER_OPTYPE_DECLARE(ANN_DEPCONVOLUTION, "AnnDepthConv"); +REGISTER_OPTYPE_DECLARE(ANN_FULLCONNECTION, "AnnFullConnection"); +REGISTER_OPTYPE_DECLARE(ANN_NETOUTPUT, "AnnNetOutput"); +REGISTER_OPTYPE_DECLARE(ANN_DATA, "AnnData"); +REGISTER_OPTYPE_DECLARE(ANN_RESHAPE, "AnnReshape"); +REGISTER_OPTYPE_DECLARE(ANN_ADD, "AnnAdd"); +REGISTER_OPTYPE_DECLARE(ANN_MUL, "AnnMul"); +REGISTER_OPTYPE_DECLARE(ANN_SUB, "AnnSub"); +REGISTER_OPTYPE_DECLARE(ANN_DIV, "AnnDiv"); +REGISTER_OPTYPE_DECLARE(ANN_DEQUANTIZE, "AnnDequant"); +REGISTER_OPTYPE_DECLARE(ANN_QUANTIZE, "AnnQuant"); +REGISTER_OPTYPE_DECLARE(ANN_PAD, "AnnPad"); +REGISTER_OPTYPE_DECLARE(ANN_RESIZE_BILINEAR, "AnnResizeBilinear"); + +// Training operator +REGISTER_OPTYPE_DECLARE(GATHERV2, "GatherV2"); +REGISTER_OPTYPE_DECLARE(CONVGRADFILTER, "Conv2DBackpropFilter"); +REGISTER_OPTYPE_DECLARE(CONV2D, "Conv2D"); +REGISTER_OPTYPE_DECLARE(CONV2DBACKPROPINPUT, "Conv2DBackpropInput"); +REGISTER_OPTYPE_DECLARE(FUSEDBATCHNORM, "FusedBatchNorm"); +REGISTER_OPTYPE_DECLARE(BIASADDGRAD, "BiasAddGrad"); +REGISTER_OPTYPE_DECLARE(ACTIVATIONGRAD, "ReluGrad"); +REGISTER_OPTYPE_DECLARE(MAXPOOLWITHARGMAX, "MaxPoolWithArgmax"); +REGISTER_OPTYPE_DECLARE(MAXPOOLGRADWITHARGMAX, "MaxPoolGradWithArgmax"); +REGISTER_OPTYPE_DECLARE(SPARSESOFTMAXCROSSENTROPYWITHLOGITS, "SparseSoftmaxCrossEntropyWithLogits"); +REGISTER_OPTYPE_DECLARE(SNAPSHOT, "Snapshot"); +REGISTER_OPTYPE_DECLARE(LAYERNORM, "LayerNorm"); +REGISTER_OPTYPE_DECLARE(HUBERLOSSGRAD, "HuberLossGrad"); +REGISTER_OPTYPE_DECLARE(HUBERLOSS, "HuberLoss"); +REGISTER_OPTYPE_DECLARE(NEGATIVE, "Negative"); +REGISTER_OPTYPE_DECLARE(SSDCAST, "SSDCast"); +REGISTER_OPTYPE_DECLARE(SSDSQUEEZEFUSION, "SsdSqueezeFusion"); +REGISTER_OPTYPE_DECLARE(SPARSESOFTMAXCROSSENTROPY, "SsdSparseSoftmaxCrossEntropy"); +REGISTER_OPTYPE_DECLARE(SPARSESOFTMAXCROSSENTROPYGRAD, "SsdSparseSoftmaxCrossEntropyGrad"); +REGISTER_OPTYPE_DECLARE(CONCATFIVE2FOUR, "ConcatFive2Four"); +REGISTER_OPTYPE_DECLARE(CONCATFOUR2FIVE, "ConcatFour2Five"); +REGISTER_OPTYPE_DECLARE(SSDREALDIVTILEMUL, "SSDRealdivTileMul"); +REGISTER_OPTYPE_DECLARE(SSDSUMMULREALDIVMEAN, "SSDSumMulRealdivMean"); + +REGISTER_OPTYPE_DECLARE(MEANGRAD, "MeanGrad"); +REGISTER_OPTYPE_DECLARE(TRANSLATE, "Translate"); +REGISTER_OPTYPE_DECLARE(ADDN, "AddN"); +REGISTER_OPTYPE_DECLARE(L2LOSS, "L2Loss"); +REGISTER_OPTYPE_DECLARE(MULTIPLY, "Multiply"); +REGISTER_OPTYPE_DECLARE(RELU6GRAD, "Relu6Grad"); +REGISTER_OPTYPE_DECLARE(AVGPOOLGRAD, "AvgPoolGrad"); +REGISTER_OPTYPE_DECLARE(DEPTHWISECONV2DBACKPROPFILTER, "DepthwiseConv2dNativeBackpropFilter"); +REGISTER_OPTYPE_DECLARE(DEPTHWISECONV2DBACKPORPINPUT, "DepthwiseConv2dNativeBackpropInput"); +REGISTER_OPTYPE_DECLARE(DEPTHWISECONV2DFORWARDNATIVE, "DepthwiseConv2dNative"); +REGISTER_OPTYPE_DECLARE(DROPOUTGRAD, "DropOutGrad"); +REGISTER_OPTYPE_DECLARE(APPLYRMSPROPMIXEDPRECISION, "apply_rms_prop_mixed_precision"); +REGISTER_OPTYPE_DECLARE(APPLYRMSPROP, "ApplyRMSProp"); +REGISTER_OPTYPE_DECLARE(LARS, "Lars"); +REGISTER_OPTYPE_DECLARE(DYNAMICSTITCH, "DynamicStitch"); + +// Variable sink related +REGISTER_OPTYPE_DECLARE(VARIABLEV2, "VariableV2"); +REGISTER_OPTYPE_DECLARE(VARHANDLEOP, "VarHandleOp"); +REGISTER_OPTYPE_DECLARE(TEMPORARYVARIABLE, "TemporaryVariable"); +REGISTER_OPTYPE_DECLARE(DESTROYTEMPORARYVARIABLE, "DestroyTemporaryVariable"); +REGISTER_OPTYPE_DECLARE(VARIABLE, "Variable"); + +REGISTER_OPTYPE_DECLARE(READVARIABLEOP, "ReadVariableOp"); + +REGISTER_OPTYPE_DECLARE(VARISINITIALIZEDOP, "VarIsInitializedOp"); +REGISTER_OPTYPE_DECLARE(ISVARIABLEINITIALIZED, "IsVariableInitialized"); + +REGISTER_OPTYPE_DECLARE(ASSIGN, "Assign"); +REGISTER_OPTYPE_DECLARE(ASSIGNVARIABLEOP, "AssignVariableOp"); + +REGISTER_OPTYPE_DECLARE(ASSIGNADD, "AssignAdd"); +REGISTER_OPTYPE_DECLARE(ASSIGNADDVARIABLEOP, "AssignAddVariableOp"); + +REGISTER_OPTYPE_DECLARE(ASSIGNSUB, "AssignSub"); +REGISTER_OPTYPE_DECLARE(ASSIGNSUBVARIABLEOP, "AssignSubVariableOp"); + +REGISTER_OPTYPE_DECLARE(APPLYMOMENTUM, "ApplyMomentum"); +REGISTER_OPTYPE_DECLARE(RESOURCEAPPLYMOMENTUM, "ResourceApplyMomentum"); +REGISTER_OPTYPE_DECLARE(SGD, "SGD"); +REGISTER_OPTYPE_DECLARE(NOOP, "NoOp"); +REGISTER_OPTYPE_DECLARE(LAYERNORMGRAD, "LayerNormGrad"); + +REGISTER_OPTYPE_DECLARE(SQUARE, "Square"); +REGISTER_OPTYPE_DECLARE(HCOMBROADCAST, "HcomBroadcast"); +REGISTER_OPTYPE_DECLARE(HCOMALLGATHER, "HcomAllGather"); +REGISTER_OPTYPE_DECLARE(HCOMALLREDUCE, "HcomAllReduce"); +REGISTER_OPTYPE_DECLARE(HCOMREDUCESCATTER, "HcomReduceScatter"); +REGISTER_OPTYPE_DECLARE(HCOMSEND, "HcomSend"); +REGISTER_OPTYPE_DECLARE(HCOMRECEIVE, "HcomReceive"); + +REGISTER_OPTYPE_DECLARE(VARASSIGN, "VarAssign"); +REGISTER_OPTYPE_DECLARE(VARISINITIALIZEDOP, "VarIsInitializedOp"); +REGISTER_OPTYPE_DECLARE(LogTimeStamp, "LogTimeStamp"); +REGISTER_OPTYPE_DECLARE(CONSTANTOP, "Constant"); +REGISTER_OPTYPE_DECLARE(STREAMSWITCH, "StreamSwitch"); +REGISTER_OPTYPE_DECLARE(STREAMSWITCHN, "StreamSwitchN"); +REGISTER_OPTYPE_DECLARE(STREAMACTIVE, "StreamActive"); +REGISTER_OPTYPE_DECLARE(MEMCPYASYNC, "MemcpyAsync"); +REGISTER_OPTYPE_DECLARE(STREAMMERGE, "StreamMerge"); +REGISTER_OPTYPE_DECLARE(ENDGRAPH, "EndGraph"); +REGISTER_OPTYPE_DECLARE(SEND, "Send"); +REGISTER_OPTYPE_DECLARE(RECV, "Recv"); +REGISTER_OPTYPE_DECLARE(ATOMICADDRCLEAN, "AtomicAddrClean"); + +REGISTER_OPTYPE_DECLARE(ABS_GRAD, "AbsGrad"); +REGISTER_OPTYPE_DECLARE(ACCUMULATE_N_V2, "AccumulateNV2"); +REGISTER_OPTYPE_DECLARE(ACOS_GRAD, "AcosGrad"); +REGISTER_OPTYPE_DECLARE(ACOSH_GRAD, "AcoshGrad"); +REGISTER_OPTYPE_DECLARE(ANY, "Any"); +REGISTER_OPTYPE_DECLARE(APPROXIMATE_EQUAL, "ApproximateEqual"); +REGISTER_OPTYPE_DECLARE(ASIN_GRAD, "AsinGrad"); +REGISTER_OPTYPE_DECLARE(ASINH_GRAD, "AsinhGrad"); +REGISTER_OPTYPE_DECLARE(ATAN_GRAD, "AtanGrad"); +REGISTER_OPTYPE_DECLARE(BROADCAST_TO, "BroadcastTo"); +REGISTER_OPTYPE_DECLARE(ELU_GRAD, "EluGrad"); +REGISTER_OPTYPE_DECLARE(ADD_V2, "AddV2"); +REGISTER_OPTYPE_DECLARE(DATAFORMATDIMMAP, "DataFormatDimMap"); +REGISTER_OPTYPE_DECLARE(DATAFORMATVECPERMUTE, "DataFormatVecPermute"); +REGISTER_OPTYPE_DECLARE(BESSELI0e, "BesselI0e"); +REGISTER_OPTYPE_DECLARE(BESSELI1e, "BesselI1e"); +REGISTER_OPTYPE_DECLARE(DEQUANTIZE, "Dequantize"); +REGISTER_OPTYPE_DECLARE(APPLYADADELTA, "ApplyAdadelta"); +REGISTER_OPTYPE_DECLARE(APPLYADAGRAD, "ApplyAdagrad"); +REGISTER_OPTYPE_DECLARE(APPLYADAGRADDA, "ApplyAdagradDA"); +REGISTER_OPTYPE_DECLARE(APPLYADAM, "ApplyAdam"); +REGISTER_OPTYPE_DECLARE(APPLYADAMAX, "ApplyAdaMax"); +REGISTER_OPTYPE_DECLARE(APPLYADDSIGN, "ApplyAddSign"); +REGISTER_OPTYPE_DECLARE(APPLYCENTEREDRMSPROP, "ApplyCenteredRMSProp"); +REGISTER_OPTYPE_DECLARE(APPLYFTRL, "ApplyFtrl"); +REGISTER_OPTYPE_DECLARE(APPLYFTRLV2, "ApplyFtrlv2"); +REGISTER_OPTYPE_DECLARE(APPLYGRADIENTDESCENT, "ApplyGradientDescent"); +REGISTER_OPTYPE_DECLARE(APPLYPOWERSIGN, "ApplyPowerSign"); +REGISTER_OPTYPE_DECLARE(APPLYPROXIMALADAGRAD, "ApplyProximalAdagrad"); +REGISTER_OPTYPE_DECLARE(APPLYPROXIMALGRADIENTDESCENT, "ApplyProximalGradientDescent"); + +REGISTER_OPTYPE_DECLARE(FOCAL_LOSS, "FocalLoss"); +REGISTER_OPTYPE_DECLARE(FOCAL_LOSS_GRAD, "FocalLossGrad"); +REGISTER_OPTYPE_DECLARE(SMOOTHL1_LOSS, "SmoothL1Loss"); +REGISTER_OPTYPE_DECLARE(SMOOTHL1_LOSS_grad, "SmoothL1LossGrad"); +REGISTER_OPTYPE_DECLARE(REDUCEMEAN, "ReduceMean"); +REGISTER_OPTYPE_DECLARE(CONCAT_V2, "ConcatV2"); +REGISTER_OPTYPE_DECLARE(ONEHOT_V2, "OneHotV2"); +REGISTER_OPTYPE_DECLARE(SLICE_V2, "SliceV2"); +REGISTER_OPTYPE_DECLARE(TILE_V2, "TileV2"); +REGISTER_OPTYPE_DECLARE(SUM_V2, "SumV2"); +// Common operator type when operators have the same name +REGISTER_OPTYPE_DECLARE(DETECTIONOUTPUT, "DetectionOutput"); + +// custom operator +REGISTER_OPTYPE_DECLARE(CUSTOMOP, "CustomOp"); +REGISTER_OPTYPE_DECLARE(CUSTOMOP_NCHW, "CustomOpNchw"); +REGISTER_OPTYPE_DECLARE(CUSTOMOP_NHWC, "CustomOpNhwc"); +REGISTER_OPTYPE_DECLARE(CUSTOMOP_NC1HWC0, "CustomOpNc1hwc0"); + +// Depthwise 4d_2_6d,6d_2_4d +REGISTER_OPTYPE_DECLARE(DEPTHWISEWEIGHT4D26D, "depthwise_weight_4d_2_6d"); +REGISTER_OPTYPE_DECLARE(DEPTHWISEWEIGHT6D24D, "depthwise_weight_6d_2_4d"); + +REGISTER_OPTYPE_DECLARE(SQRTGRAD, "SqrtGrad"); +REGISTER_OPTYPE_DECLARE(SIGMOIDGRAD, "SigmoidGrad"); + +enum InputMode { INPUT = 0, CONST }; + +// Definition of the processing status enum of the process module +enum ModelProcessState { + INIT_STATE = 0, // init status + WAIT_EVENT_STATE, // Wait for the event status + IND_RSLT_STATE, // The model execution result is being output to the high level + STOPPED_STATE, // Model execution completed. The model enters this state after Model Manager::Stop + RESERVED_STATE, // reserved +}; + +// Indicates the enun definition of the execution mode of the access module +enum SysMode { + INFERENCE = 0, // Normal, that is, Inference mode + DEBUG, // Debug mode + TIME, // Model execution time mode, including the execution time of each OP + STOP, // STOP mode + RESET, // RESET mode + PERFORMANCE, // Impact of enabling the performance model: 1. The input data of the model is considered ready and does + // not need to be converted + ANDROID_DEBUG, // Exports Android platform computing data + RESERVED, // reserved +}; + +// @brief encryption type of the model file +enum ModelEncryptType { + UNENCRYPTED, // not encrypted + ENCRYPTED // encrypted +}; + +/// +/// @brief signature verification +/// +enum ModelCheckType { + CHECK, // signature verification + UNCHECK // no verification +}; + +/// +/// @brief magic number of the model file +/// +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t MODEL_FILE_MAGIC_NUM; + +/// +/// @brief model header length +/// +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t MODEL_FILE_HEAD_LEN; + +/// +/// @brief model name length +/// +static constexpr uint32_t MODEL_NAME_LENGTH = 32; + +/// +/// @brief length of user-defined information +/// +static constexpr uint32_t USER_DEFINE_INFO_LENGTH = 32; + +/// +/// @brief length of the model file signature +/// +static constexpr uint32_t MODEL_FILE_CHECKSUM_LENGTH = 64; + +/// +/// @brief length of the reserved field in the model file header +/// +static constexpr uint32_t MODEL_FILE_RESERVED_LENGTH = 79; + +/// +/// @ingroup domi_omg +/// @brief INPUT node type +/// +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string INPUT_TYPE; + +/// +/// @ingroup domi_omg +/// @brief AIPP flag, indicating the aipp conv operator +/// +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string AIPP_CONV_FLAG; + +/// +/// @ingroup domi_omg +/// @brief AIPP flag, indicating the aipp data operator +/// +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string AIPP_DATA_FLAG; + +// flag of the Data operator, indicating that the input will be input to the dynamic AIPP operator +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string INPUT_TO_DYNAMIC_AIPP; + +// records the W dimension of the model input corresponding to the dynamic AIPP +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string AIPP_RELATED_DATA_DIM_W; + +// H dimension of the model input corresponding to the dynamic AIPP +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string AIPP_RELATED_DATA_DIM_H; + +// DATA node type +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string DATA_TYPE; + +// DATA Operator Type +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string AIPP_DATA_TYPE; + +// framework Operator Type +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string FRAMEWORK_OP_TYPE; + +// DATA node type +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string ANN_DATA_TYPE; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string ANN_NETOUTPUT_TYPE; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string ANN_DEPTHCONV_TYPE; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string ANN_CONV_TYPE; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string ANN_FC_TYPE; +// convolution node type +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_NET_OUTPUT; + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_END_GRAPH; + +// convolution node type +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string OP_TYPE_CONVOLUTION; +// adds a convolutional node name for the hard AIPP +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string AIPP_CONV_OP_NAME; +// delimiter of operator configuration items +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string OP_CONF_DELIMITER; + +// op attr name +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string ATTR_NAME_VALUE1; + +// op attr name, used to 6d_2_4d C channel +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string ATTR_NAME_INPUT_CVALUE; + +// op attr name +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string ATTR_NAME_VALUE1; + +// alpha default value +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const float ALPHA_DEFAULT_VALUE; + +// beta default value +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const float BETA_DEFAULT_VALUE; + +// coef default value +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const float COEF_DEFAULT_VALUE; + +// coef value of Relu6 +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const float RELU6_COEF; + +// stride default value +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t STRIDE_DEFAULT_VALUE; + +// pad default value +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t PAD_DEFAULT_VALUE; + +// dilation default value +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const int DILATION_DEFAULT_VALUE; + +// kernel default value +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t KERNEL_DEFAULT_VALUE; + +// default conv Group Size +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t DEFAULT_CONV_GROUP; + +// default deconv adj +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t DEFAULT_DECONV_ADJ; + +// indicate num 1 +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NUM_ONE; + +// dim default size value +static const int32_t DIM_DEFAULT_SIZE = 4; + +// the shape of c must be the mutiply of 16 for depthwise +static const uint32_t DEPTHWISE_DIM_C_BASE_NUM = 16; + +// C1HWNCoC0 dim size +static const int32_t DIM_C1HWNCoC0_SIZE = 6; +// C1HWNCoC0 C0 value +static const int C1HWCOC_C0_VALUE = 16; +// spatial default dim size +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const int32_t SPATIAL_DIM_DEFAULT_SIZE; + +// dim extension default value +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const int32_t DIM_DEFAULT_VALUE; + +// the first item in the weight list of opdef is filter +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const int32_t WEIGHT_FILTER_INDEX; + +// the second item in the weight list of opdef is bias. +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const int32_t WEIGHT_BIAS_INDEX; + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const int32_t TENSOR_ND_SUPPORT_SIZE; + +// default NCHW index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NCHW_DIM_N; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NCHW_DIM_C; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NCHW_DIM_H; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NCHW_DIM_W; + +// default C1HWNCoC0 index +static const uint32_t C1HWNCoC0_DIM_C1 = 0; +static const uint32_t C1HWNCoC0_DIM_H = 1; +static const uint32_t C1HWNCoC0_DIM_W = 2; +static const uint32_t C1HWNCoC0_DIM_N = 3; +static const uint32_t C1HWNCoC0_DIM_Co = 4; +static const uint32_t C1HWNCoC0_DIM_C0 = 5; + +// default KCHW index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t KCHW_DIM_K; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t KCHW_DIM_C; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t KCHW_DIM_H; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t KCHW_DIM_W; + +// default HWCK index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t HWCK_DIM_H; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t HWCK_DIM_W; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t HWCK_DIM_C; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t HWCK_DIM_K; + +// default NHWC index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NHWC_DIM_N; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NHWC_DIM_H; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NHWC_DIM_W; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NHWC_DIM_C; + +// default CHWN index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t CHWN_DIM_N; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t CHWN_DIM_C; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t CHWN_DIM_H; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t CHWN_DIM_W; + +// default CHW index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t CHW_DIM_C; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t CHW_DIM_H; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t CHW_DIM_W; + +// default HWC index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t HWC_DIM_H; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t HWC_DIM_W; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t HWC_DIM_C; +// default Pad index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t PAD_H_HEAD; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t PAD_H_TAIL; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t PAD_W_HEAD; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t PAD_W_TAIL; + +// default window index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t WINDOW_H; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t WINDOW_W; + +// default stride index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t STRIDE_H; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t STRIDE_W; + +// default dilation index +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t DILATION_H; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t DILATION_W; + +// the num of XRBG channel +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t XRGB_CHN_NUM; + +// default tensor format +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const int DEFAULT_FORMAT; + +// default global pooling +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const bool DEFAULT_GLOBAL_POOLING; + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t MODEL_VERSION; // model version 1.0 + +// Number of inputs of the Eltwise operator +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const int ELTWISE_MIN_INPUT_SIZE; + +// flowctrl +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_STREAM_SWITCH; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_STREAM_ACTIVE; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_FLOWCTRL_LOOP_PER_ITER; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_FLOWCTRL_LOOP_COND; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_FLOWCTRL_LOOP_INCREMENT; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_FLOWCTRL_LOOP_RESETVALUE; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_FLOWCTRL_LOOP_ASSIGNADD; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_FLOWCTRL_LOOP_ASSIGN; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_ATOMIC_ADDR_CLEAN; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t TRUE_STREAM_ID; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t STREAM_SWITCH_INPUT_NUM; + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_GLOBAL_STEP; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const std::string NODE_NAME_GLOBAL_STEP_ASSIGNADD; + +static const int PLATFORM_VERSION_LEN = 20; + +// Definition of the file header of the model file +struct ModelFileHeader { + uint32_t magic = MODEL_FILE_MAGIC_NUM; // magic number of DOMI + uint32_t headsize = MODEL_FILE_HEAD_LEN; // length of the model header. The value is fixed at 256 + uint32_t version = MODEL_VERSION; // version 1.0 + uint8_t checksum[MODEL_FILE_CHECKSUM_LENGTH] = {0}; // signature + uint32_t length = 0; // Ciphertext length. In the non-encryption model, the length is the plaintext length. + uint8_t is_encrypt = ModelEncryptType::UNENCRYPTED; // whether encrypted 0:not encrypt, 1:encrypt + uint8_t is_checksum = ModelCheckType::CHECK; // whether to check the checksum + uint8_t modeltype = 0; // 0:IR model 1:standard model 2: OM Tiny model + uint8_t genmode = 0; // 0:offline generate 1:online generate + uint8_t name[MODEL_NAME_LENGTH] = {0}; // Model name, which contains 32 characters + uint32_t ops = 0; // Computing power (Kops) + uint8_t userdefineinfo[USER_DEFINE_INFO_LENGTH] = {0}; // User-defined information. The value contains 32 characters + uint32_t om_ir_version = 0; + uint8_t platform_version[PLATFORM_VERSION_LEN] = {0}; + uint8_t platform_type = {0}; + uint8_t reserved[MODEL_FILE_RESERVED_LENGTH] = {0}; // Reserved field 79 +}; + +static constexpr uint8_t TARGET_TYPE_LTTE_8BIT = 0; +static constexpr uint8_t TARGET_TYPE_MINI_8BIT = 1; +static constexpr uint8_t TARGET_TYPE_TINY_8BIT = 2; + +static constexpr int32_t PARTITION_TYPE_MODEL_DEF = 0; +static constexpr int32_t PARTITION_TYPE_WEIGHTS = 1; +static constexpr int32_t PARTITION_TYPE_TASK_INFO = 2; + +// number of partitions in the current model +static constexpr uint32_t PARTITION_SIZE = 4; + +enum ModelPartitionType { MODEL_DEF = 0, WEIGHTS_DATA, TASK_INFO, TBE_KERNELS }; + +struct ModelPartitionMemInfo { + ModelPartitionType type; + uint32_t mem_offset; + uint32_t mem_size; +}; + +struct ModelPartitionTable { + uint32_t num; + ModelPartitionMemInfo partition[0]; +}; + +#define SIZE_OF_MODEL_PARTITION_TABLE(table) (sizeof(ModelPartitionTable) + sizeof(ModelPartitionMemInfo) * (table).num) + +static constexpr int32_t PTHREAD_CREAT_SUCCESS = 0; // pthread_creat success + +// Filter format +typedef enum tagDomiFilterFormat { + DOMI_FILTER_KCHW, // KCHW + DOMI_FILTER_HWCK, // HWCK + DOMI_FILTER_RESERVED +} domiFilterFormat_t; + +// Const data trans type +typedef enum tagDomiConstDataTransType { + DOMI_CONST_DATA_NOT_CHANGE = 0, // No action is required + DOMI_CONST_DATA_TRANS_MATMUL, // The const input to MatMul and needs to be transposed + DOMI_CONST_DATA_RESERVED +} domiConstDataTransType_t; + +// mode of activation +typedef enum tagDomiActivationMode { + DOMI_ACTIVATION_SIGMOID = 0, // sigmoid + DOMI_ACTIVATION_RELU, // ReLU + DOMI_ACTIVATION_TANH, // tanh + DOMI_ACTIVATION_CLIPPED_RELU, // clipped ReLU + DOMI_ACTIVATION_ELU, // ELU + DOMI_ACTIVATION_LEAKY_RELU, + DOMI_ACTIVATION_ABS, // Abs + DOMI_ACTIVATION_RELU1, // relu1 + DOMI_ACTIVATION_SOFTSIGN, // softsign + DOMI_ACTIVATION_SOFTPLUS, // softplus + DOMI_ACTIVATION_HARDSIGMOID, // hardsigmoid + DOMI_ACTIVATION_THRESHOLD_RELU, // threshold + DOMI_ACTIVATION_SELU, // selu + DOMI_ACTIVATION_LINEAR, // linear + DOMI_ACTIVATION_RESERVED +} domiActivationMode_t; + +// mode of batchnorm +typedef enum tagDomiBatchNormMode { + DOMI_BATCHNORM_PER_ACTIVATION = 0, // bnScale, bnBias tensor dims are 1xCxHxW + DOMI_BATCHNORM_SPATIAL, // bnScale, bnBias tensor dims are 1xCx1x1 + DOMI_BATCHNORM_RESERVED +} domiBatchNormMode_t; + +// eltwise mode +typedef enum tagDomiEltwiseMode { + DOMI_ELTWISE_PROD = 0, // prod + DOMI_ELTWISE_SUM, // sum + DOMI_ELTWISE_MAX, // max + DOMI_ELTWISE_RESERVED +} domiEltwiseMode_t; + +// mode of padding +typedef enum tagDomiPaddingMode { + DOMI_PADDING_CEIL = 0, // Default padding mode + DOMI_PADDING_DIRECTASSIGN, // Default padding mode: NOTSET + DOMI_PADDING_VALID, // VALID padding mode + DOMI_PADDING_SAME, // Padding values of 0 are always used + DOMI_PADDING_CEIL_NEW, // Padding values of 0 are always used + DOMI_PADDING_VALID_NEW, // Padding values of 0 are always used + DOMI_PADDING_SAME_NEW, // Padding values of 0 are always used + DOMI_PADDING_RESERVED +} domiPaddingMode_t; + +// algorithm of convolution forward +typedef enum tagDomiConvolutionFwdAlgo { + DOMI_CONVOLUTION_FWD_ALGO_GEMM = 0, // matrix gemm algo + DOMI_CONVOLUTION_FWD_ALGO_WINOGRAD, // Winograd Transform algo + DOMI_CONVOLUTION_FWD_ALGO_GEMM_ACCU_FLOAT32, // accumulate in L0c with FP32 + DOMI_CONVOLUTION_FWD_ALGO_RESERVED +} domiConvolutionFwdAlgo_t; + +typedef enum tagDomiFullConnectFwdAlgo { + DOMI_FULLCONNECT_FWD_ALGO_HALF = 0, // accumulate in L0c with FP16 + DOMI_FULLCONNECT_FWD_ALGO_FLOAT32 // accumulate in L0c with FP32 +} domiFullConnectFwdAlgo_t; + +typedef enum tagDomiPooingFwdAlgo { + DOMI_POOLING_FWD_ALGO_HALF = 0, // accumulate in L0c with FP16 + DOMI_POOLING_FWD_ALGO_FLOAT32 // accumulate in L0c with FP32 +} domiPooingFwdAlgo_t; + +// mode of convolution +typedef enum tagDomiConvolutionMode { + DOMI_CONV_CONVOLUTION = 0, // math convolution + DOMI_CONV_CROSS_CORRELATION, // cross-correlation convolution + DOMI_CONV_DECONVOLUTION, // deconvolution, also named transposed convolution + DOMI_CONV_MODE_DEPTHWISE, // depthwise convolution + DOMI_CONV_MODE_RESERVED +} domiConvolutionMode_t; + +// softmax mode +typedef enum tagDomiSoftmaxMode { + DOMI_SOFTMAX_MODE_INSTANCE = 0, // compute the softmax over all C, H, W for each N + DOMI_SOFTMAX_MODE_CHANNEL, // compute the softmax over all C for each H, W, N + DOMI_SOFTMAX_MODE_HEIGHT, // compute the softmax over all H for each N, C, W + DOMI_SOFTMAX_MODE_WIDTH, // compute the softmax over all W for each N, C, H + DOMI_SOFTMAX_MODE_RESERVED +} domiSoftmaxMode_t; + +// softmax algorithm +typedef enum tagDomiSoftmaxAlgo { + DOMI_SOFTMAX_FAST = 0, // straightforward implementation + DOMI_SOFTMAX_ACCURATE, // subtract max from every point to avoid overflow + DOMI_SOFTMAX_LOG, // perform the Log softmax operation to avoid overflow + DOMI_SOFTMAX_ACCURATE_FP32, + DOMI_SOFTMAX_RESERVED +} domiSoftmaxAlgo_t; + +// algorithm of convolution backward +typedef enum tagDomiConvolutionBwdAlgo { + DOMI_CONVOLUTION_BWD_ALGO_GEMM = 0, // matrix gemm algo + DOMI_CONVOLUTION_BWD_ALGO_WINOGRAD, // Winograd Transform algo + DOMI_CONVOLUTION_BWD_ALGO_RESERVED +} domiConvolutionBwdAlgo_t; + +// mode of pooling +typedef enum tagDomiPoolingMode { + DOMI_POOLING_MAX = 0, // max pooling + DOMI_POOLING_AVG, // average pooling + DOMI_POOLING_L2, // L2 pooling + DOMI_POOLING_RESERVED +} domiPoolingMode_t; + +// propagate Nan +typedef enum tagDomiNanPropagation { + DOMI_NAN_NOT_PROPAGATE = 0, // Nan numbers are not propagated + DOMI_NAN_PROPAGATE, // Nan numbers are propagated + DOMI_NAN_PROPAGATE_RESERVED +} domiNanPropagation_t; + +// mode of cropandresize +typedef enum tagDomiCropAndResizeMode { + DOMI_RESIZE_METHOD_BILINEAR = 0, // resize bilinear + DOMI_RESIZE_METHOD_NEAREST, // resize nearest + DOMI_RESIZE_RESERVED +} domiCropAndResizeMode_t; + +// yolo version +typedef enum tagDomiYoloVersion { DOMI_YOLO_V2 = 1, DOMI_YOLO_V3, DOMI_YOLO_TRSERVED } domiYoloVersion_t; + +typedef enum tagDomiRNNScopePassType { + DOMI_STATIC_BIDIRECTIONAL_RNN_GENERAL_PASS = 0, + DOMI_DYNAMIC_BIDIRECTIONAL_RNN_GENERAL_PASS, + DOMI_DYNAMIC_BIDIRECTIONAL_RNN_BIDAF_PASS +} domiRNNScopePassType; + +// RNNDataLayout +typedef enum tagDomiRNNDataLayout { + DOMI_RNN_ND_TBX = 0, // data[max_time,batch_size,Xt] + DOMI_RNN_ND_BTX, // data[batch_size,max_time,Xt] + DOMI_RNN_5D_TX1BX, // data[max_time,Xt,1,batch_size,Xt] + DOMI_RNN_5D_BX1TX, // dataa[batch_size,Xt,1,max_time,Xt] + DOMI_RNN_4DTBX1, + DOMI_ENN_DL_RESERVED +} domiRNNDataLayout_t; + +// RNNInputMode +typedef enum tagDomiRNNInputMode { DOMI_RNN_LINEAR_INPUT = 0, DOMI_RNN_SKIP_INPUT } domiRNNInputMode_t; + +// RNNDirectionMode +typedef enum tagDomiRNNDirectionMode { DOMI_RNN_UNIDIRECTIONAL = 0, DOMI_RNN_BIDIRECTIONAL } domiDirectionMode_t; + +typedef enum tagDomiPoolingCeilMode { DOMI_POOLING_FLOOR = 0, DOMI_POOLING_CEIL } domiPoolingCeilMode_t; + +// RNNMode +typedef enum tagDomiRNNActivationMode { + DOMI_RNN_ACTIVATION_SIGMOID = 0, // sigmoid + DOMI_RNN_ACTIVATION_TANH, // tanh + DOMI_RNN_ACTIVATION_RELU, // ReLU + DOMI_RNN_ACTIVATION_RELU1, // ReLU1 + DOMI_RNN_ACTIVATION_RELU6, // ReLU6 + DOMI_RNN_ACTIVATION_RESERVED +} domiRNNActivationMode_t; + +typedef enum tagDomiRNNLSTMOutMode { + DOMI_RNN_LSTM_OUT_SEPARATE = 0, + DOMI_RNN_LSTM_OUT_CONCAT, + DOMI_RNN_LSTM_OUT_RESERVED +} domiRNNLSTMOutPutMode_t; +typedef enum tagDomiRNNLSTMStateOutMode { + DOMI_RNN_LSTM_STATE_OUT_SEPARATE = 0, + DOMI_RNN_LSTM_STATE_OUT_CONCAT_ALL, + DOMI_RNN_LSTM_STATE_OUT_RESERVED +} domiRNNLSTMStateOutMode_t; + +typedef enum tagDomiRNNMode { + DOMI_RNN_RELU = 0, + DOMI_RNN_TANH, + DOMI_LSTM, + DOMI_GRU, + DOMI_RNN_MODE_RESERVED +} domiRNNMode_t; + +typedef enum tagDomiResizeBilinearMode { + DOMI_RESIZE_OUTPUT_DIM_BY_ZOOM_FACTOR = 0, // Output dimension specified by zoom factor + DOMI_RESIZE_OUTPUT_DIM_BY_SHRINK_FACTOR, // specified by shrink factor + DOMI_RESIZE_OUTPUT_DIM_EXPLICIT, // specified explicitly + DOMI_RESIZE_OUTPUT_DIM_RESERVED +} domiResizeOutputDimMode_t; + +#pragma pack(1) // single-byte alignment +// DUMP file struct +struct FileHeader { + int32_t Version; // version + int32_t Output_Offset; // output offset address + char Reserved[24] = {0}; // 24 bytes reserved +}; + +struct BasicInfo { + struct FileHeader header; // file header + int32_t stream_id; // stread id + uint64_t start_time; // start time + uint64_t end_time; // end time + uint32_t input_size; // input memory size + uint32_t output_size; // output memory size + uint32_t weight_size; // weight Memory Size + uint32_t workspace_size; // workspace + uint32_t total_size; // total memory size +}; +#pragma pack() // Cancels single-byte alignment +} // namespace ge + +namespace domi { +/// @brief Data structure definition related to task sinking +enum BuildMode { + GEN_TASK_WITHOUT_L2FUSION = 3, // Carrying task data (L2 convergence function disabled) + GEN_TASK_WITHOUT_FUSION = 4, // Carrying task data (all convergence functions disabled) + GEN_TASK_WITH_FUSION = 5 // Carrying task data (with UB/L1/L2 enabled for all convergence functions) +}; +} // namespace domi + +#endif // INC_FRAMEWORK_COMMON_TYPES_H_ diff --git a/inc/framework/common/util.h b/inc/framework/common/util.h new file mode 100644 index 00000000..c77872ed --- /dev/null +++ b/inc/framework/common/util.h @@ -0,0 +1,378 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_COMMON_UTIL_H_ +#define INC_FRAMEWORK_COMMON_UTIL_H_ + +#include +#include +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/scope_guard.h" +#include "framework/common/ge_inner_error_codes.h" +#include "mmpa/mmpa_api.h" + +#define CHECK_FALSE_EXEC(expr, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (!b) { \ + exec_expr; \ + } \ + }; + +// new ge marco +// Encapsulate common resource releases +#define GE_MAKE_GUARD_RTMEM(var) \ + GE_MAKE_GUARD(var, [&] { \ + if (var) GE_CHK_RT(rtFreeHost(var)); \ + }); + +#define GE_MAKE_GUARD_RTSTREAM(var) \ + GE_MAKE_GUARD(var, [&] { \ + if (var) GE_CHK_RT(rtStreamDestroy(var)); \ + }); + +#define GE_MAKE_GUARD_RTEVENT(var) \ + GE_MAKE_GUARD(var, [&] { \ + if (var) GE_CHK_RT(rtEventDestroy(var)); \ + }); + +#define GE_MAKE_GUARD_TENSOR(var) \ + GE_MAKE_GUARD(var, [&] { \ + if (var) GE_CHK_CCE(ccDestroyTensorDescriptor(&var)); \ + }); + +#define GE_MAKE_GUARD_FILTER_DESC(var) \ + GE_MAKE_GUARD(var, [&] { \ + if (var) GE_CHK_CCE(ccDestroyFilterDescriptor(&var)); \ + }); + +#define GE_RETURN_WITH_LOG_IF_ERROR(expr, ...) \ + do { \ + const ::ge::Status _status = (expr); \ + if (_status) { \ + GELOGE(ge::FAILED, __VA_ARGS__); \ + return _status; \ + } \ + } while (0) + +// Check if the parameter is false. If yes, return FAILED and record the error log +#define GE_RETURN_WITH_LOG_IF_FALSE(condition, ...) \ + do { \ + bool _condition = (condition); \ + if (!_condition) { \ + GELOGE(ge::FAILED, __VA_ARGS__); \ + return ge::FAILED; \ + } \ + } while (0) + +// Check if the parameter is null. If yes, return PARAM_INVALID and record the error +#define GE_CHECK_NOTNULL(val) \ + do { \ + if (val == nullptr) { \ + GELOGE(ge::PARAM_INVALID, "param[#val] must not be null."); \ + return ge::PARAM_INVALID; \ + } \ + } while (0) + +// Check if the parameter is null. If yes, return PARAM_INVALID and record the error +#define GE_CHECK_NOTNULL_JUST_RETURN(val) \ + do { \ + if (val == nullptr) { \ + GELOGE(ge::PARAM_INVALID, "param[#val] must not be null."); \ + return; \ + } \ + } while (0) + +// Check whether the parameter is null. If so, execute the exec_expr expression and record the error log +#define GE_CHECK_NOTNULL_EXEC(val, exec_expr) \ + do { \ + if (val == nullptr) { \ + GELOGE(ge::PARAM_INVALID, "param[#val] must not be null."); \ + exec_expr; \ + } \ + } while (0) + +// Check whether the parameter is null. If yes, return directly and record the error log +#define GE_RT_VOID_CHECK_NOTNULL(val) \ + do { \ + if (val == nullptr) { \ + GELOGE(ge::PARAM_INVALID, "param[#val] must not be null."); \ + return; \ + } \ + } while (0) + +// Check if the parameter is null. If yes, return false and record the error log +#define GE_RT_FALSE_CHECK_NOTNULL(val) \ + do { \ + if (val == nullptr) { \ + GELOGE(ge::FAILED, "param[#val] must not be null."); \ + return false; \ + } \ + } while (0) + +// Check if the parameter is out of bounds +#define GE_CHECK_SIZE(size) \ + do { \ + if (size == 0) { \ + GELOGE(ge::PARAM_INVALID, "param[#size] is out of range"); \ + return ge::PARAM_INVALID; \ + } \ + } while (0) + +// Macros that define the size variable +#define GE_DEFINE_BYTE_SIZE(_var_name, _expr, _sizeof) \ + uint32_t _var_name; \ + do { \ + uint32_t _expr_size = (_expr); \ + uint32_t _sizeof_size = (_sizeof); \ + if (_expr_size > (0xffffffff) / _sizeof_size) { \ + GELOGE(ge::PARAM_INVALID, "byte_size: [#_var_name] is out of range"); \ + return ge::PARAM_INVALID; \ + } \ + _var_name = _sizeof_size * _expr_size; \ + } while (0); + +// Check if the container is empty +#define GE_CHECK_VECTOR_NOT_EMPTY(vector) \ + do { \ + if (vector.empty()) { \ + GELOGE(ge::FAILED, "param[#vector] is empty !"); \ + return ge::FAILED; \ + } \ + } while (0) + +// Check if the value on the left is greater than or equal to the value on the right +#define GE_CHECK_GE(lhs, rhs) \ + do { \ + if (lhs < rhs) { \ + GELOGE(ge::PARAM_INVALID, "param[#lhs] is less than[#rhs]"); \ + return ge::PARAM_INVALID; \ + } \ + } while (0) + +// Check if the value on the left is less than or equal to the value on the right +#define GE_CHECK_LE(lhs, rhs) \ + do { \ + if (lhs > rhs) { \ + GELOGE(ge::PARAM_INVALID, "param[#lhs] is greater than[#rhs]"); \ + return ge::PARAM_INVALID; \ + } \ + } while (0) + +#define GE_DELETE_NEW_SINGLE(var) \ + { \ + if (var != nullptr) { \ + delete var; \ + var = nullptr; \ + } \ + }; + +#define GE_DELETE_NEW_ARRAY(var) \ + { \ + if (var != nullptr) { \ + delete[] var; \ + var = nullptr; \ + } \ + }; + +/// +/// @ingroup domi_common +/// @brief version of om.proto file +/// +static constexpr int32_t OM_PROTO_VERSION = 2; + +// Finding an Integer Ceiling Value Without Precision Loss +#define CEIL(N, n) (((N) + (n)-1) / (n)) + +namespace ge { +using google::protobuf::Message; + +/// +/// @ingroup domi_common +/// @brief Maximum file path length +/// +const int32_t DOMI_MAX_PATH_LEN = 256; + +/// +/// @ingroup domi_common +/// @brief proto file in bianary format +/// @param [in] file path of proto file +/// @param [out] proto memory for storing the proto file +/// @return true success +/// @return false fail +/// +bool ReadProtoFromBinaryFile(const char *file, Message *proto); + +/// +/// @ingroup domi_common +/// @brief Reads the proto structure from an array. +/// @param [in] data proto data to be read +/// @param [in] size proto data size +/// @param [out] proto Memory for storing the proto file +/// @return true success +/// @return false fail +/// +bool ReadProtoFromArray(const void *data, int size, Message *proto); + +/// +/// @ingroup domi_proto +/// @brief Reads the proto file in the text format. +/// @param [in] file path of proto file +/// @param [out] message Memory for storing the proto file +/// @return true success +/// @return false fail +/// +bool ReadProtoFromText(const char *file, google::protobuf::Message *message); + +bool ReadProtoFromMem(const char *data, int size, google::protobuf::Message *message); + +/// +/// @ingroup: domi_common +/// @brief: get length of file +/// @param [in] input_file: path of file +/// @return long: File length. If the file length fails to be obtained, the value -1 is returned. +/// +extern long GetFileLength(const std::string &input_file); + +/// +/// @ingroup domi_common +/// @brief Reads all data from a binary file. +/// @param [in] file_name path of file +/// @param [out] buffer Output memory address, which needs to be released by the caller. +/// @param [out] length Output memory size +/// @return false fail +/// @return true success +/// +bool ReadBytesFromBinaryFile(const char *file_name, char **buffer, int &length); + +bool ReadBytesFromBinaryFile(const char *file_name, std::vector &buffer); + +/// +/// @ingroup domi_common +/// @brief Recursively Creating a Directory +/// @param [in] directory_path Path, which can be a multi-level directory. +/// @return 0 success +/// @return -1 fail +/// +extern int CreateDirectory(const std::string &directory_path); + +/// +/// @ingroup domi_common +/// @brief Obtains the current time string. +/// @return Time character string in the format : %Y%m%d%H%M%S, eg: 20171011083555 +/// +std::string CurrentTimeInStr(); + +/// +/// @ingroup domi_common +/// @brief onverts Vector of a number to a string. +/// @param [in] v Vector of a number +/// @return string +/// +template +std::string ToString(std::vector &v) { + std::stringstream ss; + ss << "["; + for (T x : v) { + ss << x; + ss << ", "; + } + std::string strRet = + ss.str().substr(0, ss.str().length() - 2); // Delete the two extra characters at the end of the line. + strRet += "]"; + return strRet; +} + +/// +/// @ingroup domi_common +/// @brief Converts RepeatedField to String. +/// @param [in] rpd_field RepeatedField +/// @return string +/// +template +std::string ToString(const google::protobuf::RepeatedField &rpd_field) { + std::stringstream ss; + ss << "["; + for (T x : rpd_field) { + ss << x; + ss << ", "; + } + std::string strRet = + ss.str().substr(0, ss.str().length() - 2); // Delete the two extra characters at the end of the line. + strRet += "]"; + return strRet; +} + +/// +/// @ingroup domi_common +/// @brief Obtains the absolute time (timestamp) of the current system. +/// @return Timestamp, in microseconds (US) +/// +/// +uint64_t GetCurrentTimestap(); + +/// +/// @ingroup domi_common +/// @brief Check whether the product of two int64 numbers exceeds the int64 range. +/// @param [in] a +/// @param [in] b +/// @return false: true: The result is within the normal int64 range. +/// +bool CheckInt64MulOverflow(int64_t a, int64_t b); + +/// +/// @ingroup domi_common +/// @brief Absolute path for obtaining files. +/// @param [in] path of input file +/// @param [out] Absolute path of a file. If the absolute path cannot be obtained, an empty string is returned +/// +std::string RealPath(const char *path); + +/// +/// @ingroup domi_common +/// @brief Check whether the specified input file path is valid. +/// 1. The specified path cannot be empty. +/// 2. The path can be converted to an absolute path. +/// 3. The file path exists and is readable. +/// @param [in] file_path path of input file +/// @param [out] result +/// +bool CheckInputPathValid(const std::string &file_path); + +/// +/// @ingroup domi_common +/// @brief Checks whether the specified output file path is valid. +/// @param [in] file_path path of output file +/// @param [out] result +/// +bool CheckOutputPathValid(const std::string &file_path); + +/// +/// @ingroup domi_common +/// @brief Check whether the file path meets the whitelist verification requirements. +/// @param [in] filePath file path +/// @param [out] result +/// +bool ValidateStr(const std::string &filePath, const std::string &mode); +} // namespace ge + +#endif // INC_FRAMEWORK_COMMON_UTIL_H_ diff --git a/inc/framework/dlog/log.h b/inc/framework/dlog/log.h new file mode 100644 index 00000000..ab040560 --- /dev/null +++ b/inc/framework/dlog/log.h @@ -0,0 +1,66 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_DLOG_LOG_H_ +#define INC_FRAMEWORK_DLOG_LOG_H_ + +#include +#if !defined(__ANDROID__) && !defined(ANDROID) +#include "toolchain/slog.h" +#else +#include +#endif + +#ifdef _MSC_VER +#define FUNC_NAME __FUNCTION__ +#else +#define FUNC_NAME __PRETTY_FUNCTION__ +#endif + +#if !defined(__ANDROID__) && !defined(ANDROID) +#define DAV_LOGI(MOD_NAME, fmt, ...) \ + dlog_info(static_cast(GE), "%s:" #fmt, __FUNCTION__, ##__VA_ARGS__) +#define DAV_LOGW(MOD_NAME, fmt, ...) \ + dlog_warn(static_cast(GE), "%s:" #fmt, __FUNCTION__, ##__VA_ARGS__) +#define DAV_LOGE(MOD_NAME, fmt, ...) \ + dlog_error(static_cast(GE), "%s:" #fmt, __FUNCTION__, ##__VA_ARGS__) +#define DAV_LOGD(MOD_NAME, fmt, ...) \ + dlog_debug(static_cast(GE), "%s:" #fmt, __FUNCTION__, ##__VA_ARGS__) +#define DAV_EVENT(MOD_NAME, fmt, ...) \ + dlog_event(static_cast(GE), "%s:" #fmt, __FUNCTION__, ##__VA_ARGS__) +#else +#define DAV_LOGI(MOD_NAME, fmt, ...) \ + __android_log_print(ANDROID_LOG_INFO, MOD_NAME, "%s %s(%d)::" #fmt, __FILE__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define DAV_LOGW(MOD_NAME, fmt, ...) \ + __android_log_print(ANDROID_LOG_WARN, MOD_NAME, "%s %s(%d)::" #fmt, __FILE__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define DAV_LOGE(MOD_NAME, fmt, ...) \ + __android_log_print(ANDROID_LOG_ERROR, MOD_NAME, "%s %s(%d)::" #fmt, __FILE__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define DAV_LOGD(MOD_NAME, fmt, ...) \ + __android_log_print(ANDROID_LOG_DEBUG, MOD_NAME, "%s %s(%d)::" #fmt, __FILE__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#define DAV_EVENT(MOD_NAME, fmt, ...) \ + __android_log_print(ANDROID_LOG_DEBUG, MOD_NAME, "%s %s(%d)::" #fmt, __FILE__, __FUNCTION__, __LINE__, ##__VA_ARGS__) +#endif + +#define DLOG_DECLARE(level) \ + void Log_##level(const char *mod_name, const char *func, const char *file, int line, const char *format, ...) + +namespace ge { +DLOG_DECLARE(INFO); +DLOG_DECLARE(WARNING); +DLOG_DECLARE(ERROR); +} // namespace ge + +#endif // INC_FRAMEWORK_DLOG_LOG_H_ diff --git a/inc/framework/engine/dnnengine.h b/inc/framework/engine/dnnengine.h new file mode 100644 index 00000000..34cb2569 --- /dev/null +++ b/inc/framework/engine/dnnengine.h @@ -0,0 +1,55 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_ENGINE_DNNENGINE_H_ +#define INC_FRAMEWORK_ENGINE_DNNENGINE_H_ + +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/ge_types.h" +#include "graph/types.h" + +namespace ge { +enum PriorityEnum { + COST_0 = 0, + COST_1, + COST_2, + COST_9 = 9, +}; + +struct DNNEngineAttribute { + std::string engine_name; + std::vector mem_type; + uint32_t compute_cost; + enum RuntimeType runtime_type; // HOST, DEVICE + // set this attribute if the inputformat of engine must be specific, otherwise set FORMAT_RESERVED + Format engine_input_format; + Format engine_output_format; +}; + +class DNNEngine { + public: + virtual ~DNNEngine() = default; + virtual Status Initialize(const std::map &options) = 0; + virtual Status Finalize() = 0; + virtual void GetAttributes(DNNEngineAttribute &attr) const = 0; +}; +} // namespace ge + +#endif // INC_FRAMEWORK_ENGINE_DNNENGINE_H_ diff --git a/inc/framework/executor/ge_executor.h b/inc/framework/executor/ge_executor.h new file mode 100644 index 00000000..45398397 --- /dev/null +++ b/inc/framework/executor/ge_executor.h @@ -0,0 +1,166 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_EXECUTOR_GE_EXECUTOR_H_ +#define INC_FRAMEWORK_EXECUTOR_GE_EXECUTOR_H_ + +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/ge_types.h" +#include "common/types.h" +#include "graph/tensor.h" +#include "runtime/base.h" + +namespace ge { +class ModelListenerAdapter; + +class SingleOp; + +struct RunModelData { + uint32_t index; // Data index + uint32_t model_id; // Model id + std::vector blobs; // All input/output data buffer + uint32_t timestamp; // Data creation time + uint32_t timeout; // Processing timeout + uint64_t request_id = 0; // Request ID +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeExecutor { + public: + GeExecutor(); + ~GeExecutor() = default; + ge::Status Initialize(); + + // Load model + ge::Status LoadModelOffline(uint32_t &model_id, const std::string &path, const std::string &key, int32_t priority, + std::shared_ptr listener); + + ge::Status UnloadModel(uint32_t model_id); + + ge::Status RunModel(const ge::RunModelData &input_data, ge::RunModelData &output_data); + + // Get input and output descriptor + ge::Status GetModelDescInfo(uint32_t model_id, std::vector &input_desc, + std::vector &output_desc); + + ge::Status GetModelDescInfoForZeroCopy(uint32_t model_id, std::vector &input_desc, + std::vector &output_desc); + + ge::Status LoadModel(uint32_t &model_id, const ge::ModelData &model_data, + std::shared_ptr listener); + + ge::Status CommandHandle(const ge::Command &command); + + /// + /// @ingroup ge + /// @brief Query model memory consuming interface + /// @param [in] model_id Offline model ID + /// @param [out] max_size Memory size + /// @return SUCCESS + /// @return FAILED + /// + ge::Status GetMaxUsedMemory(uint32_t model_id, uint32_t &max_size); + + /// + /// @ingroup ge + /// @brief Load data from model file to memory + /// @param [in] const std::string &path: Offline model file path + /// @param [out] ModelData &model_data: Offline model memory data + /// @return SUCCESS handle successfully / others handle failed + /// + ge::Status LoadDataFromFile(const std::string &path, ge::ModelData &model_data); + + /// + /// @ingroup ge + /// @brief Load model from offline model memory data + /// @param [in] ModelData &model_data: Offline model data + /// @param [in] void *dev_ptr: Input/Output memory address + /// @param [in] size_t mem_size: Input/Output memory length + /// @param [in] void *weight_ptr: Weight memory address + /// @param [in] size_t weight_size: Weight memory length + /// @param [out] uint32_t &model_id: Corresponding identification after model loading + /// @return SUCCESS handle successfully / others handle failed + /// + ge::Status LoadModelFromData(uint32_t &model_id, const ge::ModelData &model_data, void *dev_ptr, size_t mem_size, + void *weight_ptr, size_t weight_size); + + /// + /// @ingroup ge + /// @brief Load task list from ModelData with queue. + /// @param [out] model_id: model id allocate from manager. + /// @param [in] model_data: Model data load from offline model. + /// @param [in] input_queue_ids: input queue ids create from user. + /// @param [in] output_queue_ids: input queue ids create from user. + /// @return: 0 for success / others for fail + /// + ge::Status LoadModelWithQ(uint32_t &model_id, const ge::ModelData &model_data, + const std::vector &input_queue_ids, + const std::vector &output_queue_ids); + + /// + /// @ingroup ge + /// @brief Synchronous execution of offline model(Do not create thread) + /// @param [in] uint32_t model_id: Model ID to execute + /// @param [in] void* stream: stream to execute + /// @param [in] bool async_mode: is asynchronize mode. + /// @param [in] const domi::InputData *input_data: Model input data + /// @param [out] domi::OutputData *output_data: Model output data + /// @return SUCCESS handle successfully / others handle failed + /// + ge::Status ExecModel(uint32_t model_id, void *stream, const ge::RunModelData &input_data, + ge::RunModelData &output_data, bool async_mode = false); + + /// + /// @ingroup ge + /// @brief Get weight memory size from model file + /// @param [in] const std::string &path: Offline model file path + /// @param [out] size_t &mem_size Execution memory size + /// @param [out] size_t &weight_size Weight memory space size + /// @return SUCCESS handle successfully / others handle failed + /// + ge::Status GetMemAndWeightSize(const std::string &path, size_t &mem_size, size_t &weight_size); + + /// + /// @ingroup ge + /// @brief Get weight memory size from model file + /// @param [in] const void *model_data Offline model buffer + /// @param [in] size_t model_size Offline model buffer length + /// @param [out] size_t &mem_size Execution memory size + /// @param [out] size_t &weight_size Weight memory space size + /// @return SUCCESS handle successfully / others handle failed + /// + ge::Status GetMemAndWeightSize(const void *model_data, size_t model_size, size_t &mem_size, size_t &weight_size); + + static ge::Status LoadSingleOp(const std::string &model_name, const ge::ModelData &model_data, void *stream, + SingleOp **single_op); + + static ge::Status ExecuteAsync(SingleOp *executor, const std::vector &inputs, + std::vector &outputs); + + static ge::Status ReleaseSingleOpResource(void *stream); + + private: + static bool is_init_; + std::vector> listener_adapters_; +}; + +ge::Status ModelInfoParser(const ge::ModelData &model, ge::ModelInfo &model_info); +} // namespace ge + +#endif // INC_FRAMEWORK_EXECUTOR_GE_EXECUTOR_H_ diff --git a/inc/framework/ge_runtime/davinci_model.h b/inc/framework/ge_runtime/davinci_model.h new file mode 100644 index 00000000..91e70159 --- /dev/null +++ b/inc/framework/ge_runtime/davinci_model.h @@ -0,0 +1,113 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_GE_RUNTIME_DAVINCI_MODEL_H_ +#define INC_FRAMEWORK_GE_RUNTIME_DAVINCI_MODEL_H_ + +#include +#include + +#include "ge_runtime/op_info.h" +#include "ge_runtime/task_info.h" + +namespace ge { +namespace model_runner { +class DavinciModel { + public: + DavinciModel(const std::vector> &task_info_list, + const std::vector> &data_info_list, + const std::vector> &output_info_list, + const std::vector> &constant_info_list, + const std::vector &variable_info_list, + const std::vector &wait_active_stream_list, + const std::vector &force_copy_stream_list, uint64_t mem_size = 0, uint64_t weight_size = 0, + uint64_t var_size = 0, uintptr_t logic_mem_base = 0, uintptr_t logic_weight_base = 0, + uintptr_t logic_var_base = 0, uint32_t stream_num = 0, uint32_t batch_num = 0, uint32_t event_num = 0, + int32_t priority = 0) + : task_info_list_(task_info_list), + data_info_list_(data_info_list), + output_info_list_(output_info_list), + constant_info_list_(constant_info_list), + variable_info_list_(variable_info_list), + wait_active_stream_list_(wait_active_stream_list), + force_copy_stream_list_(force_copy_stream_list), + mem_size_(mem_size), + weight_size_(weight_size), + var_size_(var_size), + logic_mem_base_(logic_mem_base), + logic_weight_base_(logic_weight_base), + logic_var_base_(logic_var_base), + stream_num_(stream_num), + batch_num_(batch_num), + event_num_(event_num), + priority_(priority) {} + ~DavinciModel() {} + + uint64_t GetMemSize() const { return mem_size_; } + uint64_t GetWeightSize() const { return weight_size_; } + uint64_t GetVarSize() const { return var_size_; } + + uintptr_t GetLogicMemBase() const { return logic_mem_base_; } + uintptr_t GetLogicWeightBase() const { return logic_weight_base_; } + uintptr_t GetLogicVarBase() const { return logic_var_base_; } + + uint32_t GetStreamNum() const { return stream_num_; } + uint32_t GetBatchNum() const { return batch_num_; } + uint32_t GetEventNum() const { return event_num_; } + + const std::vector &GetWaitActiveStreams() const { return wait_active_stream_list_; } + const std::vector &GetForceCopyStreams() const { return force_copy_stream_list_; } + + int32_t GetPriority() const { return priority_; } + + const std::vector> &GetTaskInfoList() const { return task_info_list_; } + const std::vector> &GetDataInfoList() const { return data_info_list_; } + const std::vector> &GetOutputInfoList() const { return output_info_list_; } + const std::vector> &GetConstantInfoList() const { return output_info_list_; } + const std::vector &GetVariableInfoList() const { return variable_info_list_; } + + private: + std::vector> task_info_list_; + std::vector> data_info_list_; + std::vector> output_info_list_; + std::vector> constant_info_list_; + std::vector variable_info_list_; + + std::vector wait_active_stream_list_; + std::vector force_copy_stream_list_; + + uint64_t mem_size_; + uint64_t weight_size_; + uint64_t var_size_; + + uintptr_t logic_mem_base_; + uintptr_t logic_weight_base_; + uintptr_t logic_var_base_; + + uint32_t stream_num_; + uint32_t batch_num_; + uint32_t event_num_; + + int32_t priority_; + + // Disable to copy constructor and assignment operator + DavinciModel &operator=(const DavinciModel &) = delete; + DavinciModel(const DavinciModel &) = delete; +}; +} // namespace model_runner +} // namespace ge + +#endif // INC_FRAMEWORK_GE_RUNTIME_DAVINCI_MODEL_H_ diff --git a/inc/framework/ge_runtime/model_runner.h b/inc/framework/ge_runtime/model_runner.h new file mode 100644 index 00000000..6e7abcb9 --- /dev/null +++ b/inc/framework/ge_runtime/model_runner.h @@ -0,0 +1,58 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_GE_RUNTIME_MODEL_RUNNER_H_ +#define INC_FRAMEWORK_GE_RUNTIME_MODEL_RUNNER_H_ + +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/ge_types.h" +#include "ge_runtime/davinci_model.h" + +namespace ge { +namespace model_runner { +class RuntimeModel; + +class ModelRunner { + public: + static ModelRunner &Instance(); + + bool LoadDavinciModel(uint32_t device_id, uint64_t session_id, uint32_t model_id, + std::shared_ptr davinci_model, std::shared_ptr listener); + + const std::vector &GetTaskIdList(uint32_t model_id) const; + + bool UnloadModel(uint32_t model_id); + + bool RunModel(uint32_t model_id, const InputData &input_data, OutputData *output_data); + + bool GetInputOutputDescInfo(uint32_t model_id, bool zero_copy, std::vector *input_desc, + std::vector *output_desc, std::vector *input_format, + std::vector *output_format); + + private: + ModelRunner() = default; + ~ModelRunner() = default; + + std::unordered_map> runtime_models_; +}; +} // namespace model_runner +} // namespace ge + +#endif // INC_FRAMEWORK_GE_RUNTIME_MODEL_RUNNER_H_ diff --git a/inc/framework/ge_runtime/op_info.h b/inc/framework/ge_runtime/op_info.h new file mode 100644 index 00000000..22c16ed6 --- /dev/null +++ b/inc/framework/ge_runtime/op_info.h @@ -0,0 +1,72 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_GE_RUNTIME_OP_INFO_H_ +#define INC_FRAMEWORK_GE_RUNTIME_OP_INFO_H_ + +#include +#include +#include + +namespace ge { +namespace model_runner { +struct TensorInfo { + int64_t GetShapeSize() const { + int64_t res = 1; + if (dims.empty()) { + return 0; + } + for (auto dim : dims) { + res *= dim; + } + return res; + } + + int64_t GetDim(uint32_t index) { + if (index >= dims.size()) { + return 0; + } + return dims[index]; + } + + std::vector dims; + uint32_t datatype; + uint32_t format; + uint32_t real_dim_cnt; + uint32_t size; + bool is_output; +}; + +struct OpInfo { + uint32_t index; + std::string name; + std::string type; + bool var_is_broadcast; + std::vector input_addrs; + std::vector output_addrs; + std::vector input_tensors; + std::vector output_tensors; + std::vector weight_tensors; + std::vector src_name; + std::vector src_index; + std::string weight_data; +}; + +using TensorInfoPtr = std::shared_ptr; +using OpInfoPtr = std::shared_ptr; +} // namespace model_runner +} // namespace ge +#endif // INC_FRAMEWORK_GE_RUNTIME_OP_INFO_H_ diff --git a/inc/framework/ge_runtime/task_info.h b/inc/framework/ge_runtime/task_info.h new file mode 100644 index 00000000..271df4a0 --- /dev/null +++ b/inc/framework/ge_runtime/task_info.h @@ -0,0 +1,394 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_GE_RUNTIME_TASK_INFO_H_ +#define INC_FRAMEWORK_GE_RUNTIME_TASK_INFO_H_ + +#include +#include +#include +#include +#include + +#include "cce/taskdown_api.h" + +namespace ge { +namespace model_runner { +enum TaskInfoType { + kCce = 0, + kTbe, + kAiCpu, + kLabelSet, + kLabelSwitch, + kLabelGoto, + kEventRecord, + kEventWait, + kFusionStart, + kFusionEnd, + kHccl, + kProfilerTrace, + kMemcpyAsync, + kStreamSwitch, + kStreamActive, + // Insert new task type here + kReserved = 23 +}; + +class TaskInfo { + public: + virtual ~TaskInfo() {} + uint32_t stream_id() const { return stream_id_; } + TaskInfoType type() const { return type_; } + + protected: + TaskInfo(uint32_t stream_id, TaskInfoType type) : stream_id_(stream_id), type_(type) {} + + private: + uint32_t stream_id_; + TaskInfoType type_; +}; + +class CceTaskInfo : public TaskInfo { + public: + CceTaskInfo(uint32_t stream_id, const cce::ccOpContext &ctx, const std::string &stub_func, uint32_t block_dim, + const std::vector &args, uint32_t args_size, const std::vector &sm_desc, + const std::vector &flow_table, const std::vector &args_offset, bool is_flowtable) + : TaskInfo(stream_id, TaskInfoType::kCce), + ctx_(ctx), + stub_func_(stub_func), + block_dim_(block_dim), + args_(args), + args_size_(args_size), + sm_desc_(sm_desc), + flow_table_(flow_table), + args_offset_(args_offset), + is_flowtable_(is_flowtable) {} + ~CceTaskInfo() override {} + + cce::ccOpContext cc_context() const { return ctx_; } + std::string stub_func() const { return stub_func_; } + uint32_t block_dim() const { return block_dim_; } + const std::vector &args() const { return args_; } + uint32_t args_size() const { return args_size_; } + const std::vector &sm_desc() const { return sm_desc_; } + const std::vector &flow_table() const { return flow_table_; } + const std::vector &args_offset() const { return args_offset_; } + bool is_flowtable() const { return is_flowtable_; } + + private: + cce::ccOpContext ctx_; + std::string stub_func_; + uint32_t block_dim_; + std::vector args_; + uint32_t args_size_; + std::vector sm_desc_; + std::vector flow_table_; + std::vector args_offset_; + bool is_flowtable_; +}; + +class TbeTaskInfo : public TaskInfo { + public: + TbeTaskInfo(uint32_t stream_id, const std::string &stub_func, uint32_t block_dim, const std::vector &args, + uint32_t args_size, const std::vector &sm_desc, void *binary, uint32_t binary_size, + const std::vector &meta_data, const std::vector &input_data_addrs, + const std::vector &output_data_addrs, const std::vector &workspace_addrs) + : TaskInfo(stream_id, TaskInfoType::kTbe), + stub_func_(stub_func), + block_dim_(block_dim), + args_(args), + args_size_(args_size), + sm_desc_(sm_desc), + binary_(binary), + binary_size_(binary_size), + meta_data_(meta_data), + input_data_addrs_(input_data_addrs), + output_data_addrs_(output_data_addrs), + workspace_addrs_(workspace_addrs) {} + ~TbeTaskInfo() override {} + + const std::string &stub_func() const { return stub_func_; } + uint32_t block_dim() const { return block_dim_; } + const std::vector &args() const { return args_; } + uint32_t args_size() const { return args_size_; } + const std::vector &sm_desc() const { return sm_desc_; } + void *binary() const { return binary_; } + uint32_t binary_size() const { return binary_size_; } + const std::vector &meta_data() const { return meta_data_; } + const std::vector &input_data_addrs() const { return input_data_addrs_; } + const std::vector &output_data_addrs() const { return output_data_addrs_; } + const std::vector &workspace_addrs() const { return workspace_addrs_; } + + void SetBinary(void *binary, uint32_t binary_size) { + binary_ = binary; + binary_size_ = binary_size; + } + + private: + std::string stub_func_; + uint32_t block_dim_; + std::vector args_; + uint32_t args_size_; + std::vector sm_desc_; + void *binary_; + uint32_t binary_size_; + std::vector meta_data_; + std::vector input_data_addrs_; + std::vector output_data_addrs_; + std::vector workspace_addrs_; +}; + +class AicpuTaskInfo : public TaskInfo { + public: + AicpuTaskInfo(uint32_t stream_id, const string &so_name, const std::string &kernel_name, const std::string &node_def, + const std::vector &input_data_addrs, const std::vector &output_data_addrs) + : TaskInfo(stream_id, TaskInfoType::kAiCpu), + so_name_(so_name), + kernel_name_(kernel_name), + node_def_(node_def), + input_data_addrs_(input_data_addrs), + output_data_addrs_(output_data_addrs) {} + ~AicpuTaskInfo() override {} + + const std::string &so_name() const { return so_name_; } + const std::string &kernel_name() const { return kernel_name_; } + const std::string &node_def() const { return node_def_; } + const std::vector &input_data_addrs() const { return input_data_addrs_; } + const std::vector &output_data_addrs() const { return output_data_addrs_; } + + private: + std::string so_name_; + std::string kernel_name_; + std::string node_def_; + std::vector input_data_addrs_; + std::vector output_data_addrs_; +}; + +class LabelTaskInfo : public TaskInfo { + public: + uint32_t label_id() const { return label_id_; } + + protected: + LabelTaskInfo(uint32_t stream_id, TaskInfoType type, uint32_t label_id) + : TaskInfo(stream_id, type), label_id_(label_id) {} + virtual ~LabelTaskInfo() override {} + + uint32_t label_id_; +}; + +class LabelSetTaskInfo : public LabelTaskInfo { + public: + LabelSetTaskInfo(uint32_t stream_id, uint32_t label_id) + : LabelTaskInfo(stream_id, TaskInfoType::kLabelSet, label_id) {} + ~LabelSetTaskInfo() override {} +}; + +class LabelSwitchTaskInfo : public LabelTaskInfo { + public: + LabelSwitchTaskInfo(uint32_t stream_id, uint32_t label_id) + : LabelTaskInfo(stream_id, TaskInfoType::kLabelSwitch, label_id) {} + ~LabelSwitchTaskInfo() override {} +}; + +class LabelGotoTaskInfo : public LabelTaskInfo { + public: + LabelGotoTaskInfo(uint32_t stream_id, uint32_t label_id) + : LabelTaskInfo(stream_id, TaskInfoType::kLabelGoto, label_id) {} + ~LabelGotoTaskInfo() override {} +}; + +class EventTaskInfo : public TaskInfo { + public: + uint32_t event_id() const { return event_id_; } + + protected: + EventTaskInfo(uint32_t stream_id, TaskInfoType type, uint32_t event_id) + : TaskInfo(stream_id, type), event_id_(event_id) {} + virtual ~EventTaskInfo() override {} + + uint32_t event_id_; +}; + +class EventRecordTaskInfo : public EventTaskInfo { + public: + EventRecordTaskInfo(uint32_t stream_id, uint32_t event_id) + : EventTaskInfo(stream_id, TaskInfoType::kEventRecord, event_id) {} + ~EventRecordTaskInfo() override {} +}; + +class EventWaitTaskInfo : public EventTaskInfo { + public: + EventWaitTaskInfo(uint32_t stream_id, uint32_t event_id) + : EventTaskInfo(stream_id, TaskInfoType::kEventWait, event_id) {} + ~EventWaitTaskInfo() override {} +}; + +class FusionStartTaskInfo : public TaskInfo { + public: + explicit FusionStartTaskInfo(uint32_t stream_id) : TaskInfo(stream_id, TaskInfoType::kFusionStart) {} + ~FusionStartTaskInfo() override {} +}; + +class FusionEndTaskInfo : public TaskInfo { + public: + explicit FusionEndTaskInfo(uint32_t stream_id) : TaskInfo(stream_id, TaskInfoType::kFusionEnd) {} + ~FusionEndTaskInfo() override {} +}; + +class HcclTaskInfo : public TaskInfo { + public: + HcclTaskInfo(uint32_t stream_id, const std::string hccl_type, void *input_data_addr, void *output_data_addr, + void *workspace_addr, int64_t workspace_size, int64_t hccl_stream_num, + const std::vector &private_def, void *ops_kernel_store, int32_t count, int64_t root_id, + int64_t op_type, int64_t data_type, std::function hcom_bind_model, + std::function hcom_unbind_model, + std::function, void *)> hcom_distribute_task) + : TaskInfo(stream_id, TaskInfoType::kHccl), + hccl_type_(hccl_type), + input_data_addr_(input_data_addr), + output_data_addr_(output_data_addr), + workspace_addr_(workspace_addr), + workspace_size_(workspace_size), + hccl_stream_num_(hccl_stream_num), + private_def_(private_def), + ops_kernel_store_(ops_kernel_store), + count_(count), + root_id_(root_id), + op_type_(op_type), + data_type_(data_type), + hcom_bind_model_(hcom_bind_model), + hcom_unbind_model_(hcom_unbind_model), + hcom_distribute_task_(hcom_distribute_task) {} + ~HcclTaskInfo() override {} + + const std::string &hccl_type() const { return hccl_type_; } + void *input_data_addr() const { return input_data_addr_; } + void *output_data_addr() const { return output_data_addr_; } + void *workspace_addr() const { return workspace_addr_; } + int64_t workspace_size() const { return workspace_size_; } + int64_t hccl_stream_num() const { return hccl_stream_num_; } + const std::vector &private_def() const { return private_def_; } + void *ops_kernel_store() const { return ops_kernel_store_; } + int32_t count() const { return count_; } + int64_t root_id() const { return root_id_; } + int64_t op_type() const { return op_type_; } + int64_t data_type() const { return data_type_; } + std::function hcom_bind_model() const { return hcom_bind_model_; } + std::function hcom_unbind_model() const { return hcom_unbind_model_; } + std::function, void *)> hcom_distribute_task() const { + return hcom_distribute_task_; + } + + private: + std::string hccl_type_; + void *input_data_addr_; + void *output_data_addr_; + void *workspace_addr_; + int64_t workspace_size_; + int64_t hccl_stream_num_; + std::vector private_def_; + void *ops_kernel_store_; + int32_t count_; + int64_t root_id_; + int64_t op_type_; + int64_t data_type_; + std::function hcom_bind_model_; + std::function hcom_unbind_model_; + std::function, void *)> hcom_distribute_task_; +}; + +class ProfilerTraceTaskInfo : public TaskInfo { + public: + ProfilerTraceTaskInfo(uint32_t stream_id, uint64_t log_id, bool notify, uint32_t flat) + : TaskInfo(stream_id, TaskInfoType::kProfilerTrace), log_id_(log_id), notify_(notify), flat_(flat) {} + ~ProfilerTraceTaskInfo() override {} + + uint64_t log_id() const { return log_id_; } + bool notify() const { return notify_; } + uint32_t flat() const { return flat_; } + + private: + uint64_t log_id_; + bool notify_; + uint32_t flat_; +}; + +class MemcpyAsyncTaskInfo : public TaskInfo { + public: + MemcpyAsyncTaskInfo(uint32_t stream_id, void *dst, uint64_t dst_max, void *src, uint64_t count, uint32_t kind) + : TaskInfo(stream_id, TaskInfoType::kMemcpyAsync), + dst_(dst), + dst_max_(dst_max), + src_(src), + count_(count), + kind_(kind) {} + ~MemcpyAsyncTaskInfo() override {} + + void *dst() const { return dst_; } + uint64_t dst_max() const { return dst_max_; } + void *src() const { return src_; } + uint64_t count() const { return count_; } + uint32_t kind() const { return kind_; } + + private: + void *dst_; + uint64_t dst_max_; + void *src_; + uint64_t count_; + int32_t kind_; +}; + +class StreamSwitchTaskInfo : public TaskInfo { + public: + StreamSwitchTaskInfo(uint32_t stream_id, int64_t true_stream_id, void *input_addr, void *value_addr, int64_t cond, + int64_t data_type) + : TaskInfo(stream_id, TaskInfoType::kStreamSwitch), + true_stream_id_(true_stream_id), + input_addr_(input_addr), + value_addr_(value_addr), + cond_(cond), + data_type_(data_type) {} + ~StreamSwitchTaskInfo() override {} + + int64_t true_stream_id() const { return true_stream_id_; } + void *input_addr() const { return input_addr_; } + void *value_addr() const { return value_addr_; } + int64_t cond() const { return cond_; } + int64_t data_type() const { return data_type_; } + + private: + int64_t true_stream_id_; + void *input_addr_; + void *value_addr_; + int64_t cond_; + int64_t data_type_; +}; + +class StreamActiveTaskInfo : public TaskInfo { + public: + StreamActiveTaskInfo(uint32_t stream_id, uint32_t active_stream_id) + : TaskInfo(stream_id, TaskInfoType::kStreamActive), active_stream_id_(active_stream_id) {} + ~StreamActiveTaskInfo() override {} + + uint32_t active_stream_id() const { return active_stream_id_; } + + private: + uint32_t active_stream_id_; +}; +} // namespace model_runner +} // namespace ge + +#endif // INC_FRAMEWORK_GE_RUNTIME_TASK_INFO_H_ diff --git a/inc/framework/generator/ge_generator.h b/inc/framework/generator/ge_generator.h new file mode 100644 index 00000000..1013a581 --- /dev/null +++ b/inc/framework/generator/ge_generator.h @@ -0,0 +1,67 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_GENERATOR_GE_GENERATOR_H_ +#define INC_FRAMEWORK_GENERATOR_GE_GENERATOR_H_ + +#include +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "graph/ge_tensor.h" +#include "graph/graph.h" +#include "graph/op_desc.h" + +namespace ge { +class GeGenerator { + public: + GeGenerator() = default; + + ~GeGenerator() = default; + + GeGenerator(const GeGenerator &) = delete; + + GeGenerator &operator=(const GeGenerator &) = delete; + + Status Initialize(const std::map &options); + + Status Finalize(); + + Status GenerateOfflineModel(const Graph &graph, const std::string &file_name_prefix, + const std::vector &inputs = std::vector()); + + /// + /// @ingroup ge + /// @brief: Build single OP in Model. + /// @param [in] op_desc: the OP description. + /// @param [in] inputs: input tensors. + /// @param [in] outputs: output tensors. + /// @param [in] model_file_name: name of model file. + /// @return SUCCESS or FAILED + /// + Status BuildSingleOpModel(OpDescPtr &op_desc, const std::vector &inputs, + const std::vector &outputs, const std::string &model_file_name); + + private: + class Impl; + + std::shared_ptr impl_; +}; +} // namespace ge + +#endif // INC_FRAMEWORK_GENERATOR_GE_GENERATOR_H_ diff --git a/inc/framework/generator/generator_api.h b/inc/framework/generator/generator_api.h new file mode 100644 index 00000000..71c6832e --- /dev/null +++ b/inc/framework/generator/generator_api.h @@ -0,0 +1,172 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_GENERATOR_GENERATOR_API_H_ +#define INC_FRAMEWORK_GENERATOR_GENERATOR_API_H_ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +typedef uint32_t Status_t; + +typedef void *OpAttr_t; +typedef void *OpTensor_t; + +/// +/// @ingroup ge +/// @brief Generate offline model for the op. +/// @param [in] op_type: type name of the op. +/// @param [in] in_tensor: input description array (created by OpTensorCreate). +/// @param [in] in_num: number of in_tensor. +/// @param [in] out_tensor: output description array (created by OpTensorCreate). +/// @param [in] out_num: number of out_tensor. +/// @param [in] attr: the attributes of the op (created by OpAttrCreate). +/// @param [in] om_file: file name for the om to save. +/// @return 0 for success / others for fail +/// +extern Status_t OpTaskGernerator(const char *op_type, const OpTensor_t *in_tensor, int in_num, + const OpTensor_t *out_tensor, int out_num, const OpAttr_t attr, const char *om_file); + +/// +/// @ingroup ge +/// @brief Create Tensor Description. +/// @param [in] format: tensor format of the data. +/// @param [in] datatype: tensor type of the data. +/// @param [in] shape: tensor shape array. +/// @param [in] num: number of shape. +/// @return OpTensor_t for success / nullptr for failure +/// +extern OpTensor_t OpTensorCreate(int format, int datatype, const int64_t *shape, int num); + +/// +/// @ingroup ge +/// @brief Destroy Tensor Description. +/// @param [in] OpTensor_t tensor: created by OpTensorCreate. +/// @param [out] none +/// @return 0 for success / others for failure. +/// +extern Status_t OpTensorDestroy(OpTensor_t tensor); + +/// +/// @ingroup ge +/// @brief Create an attribute holder. +/// @param [in] none +/// @param [out] none +/// @return OpAttr_t for success / nullptr for failure. +/// +extern OpAttr_t OpAttrCreate(); + +/// +/// @ingroup ge +/// @brief Destroy Attribute holder. +/// @param [in] OpAttr_t attr: created by OpAttrCreate. +/// @param [out] none +/// @return 0 for success / others for failure. +/// +extern Status_t OpAttrDestroy(OpAttr_t attr); + +/// +/// @ingroup ge +/// @brief Set a boolean attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attributed value. +/// @return 0 for success / others for failure. +/// +extern Status_t SetAttrBool(OpAttr_t attr, const char *name, bool value); + +/// +/// @ingroup ge +/// @brief Set an integer attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value. +/// @return 0 for success / others for failure. +/// +extern Status_t SetAttrInt(OpAttr_t attr, const char *name, int64_t value); + +/// +/// @ingroup ge +/// @brief Set a float attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value. +/// @return 0 for success / others for failure. +/// +extern Status_t SetAttrFloat(OpAttr_t attr, const char *name, float value); + +/// +/// @ingroup ge +/// @brief Set a string attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value (can`t be nullptr, end with '\0'). +/// @return 0 for success / others for failure. +/// +extern Status_t SetAttrString(OpAttr_t attr, const char *name, const char *value); + +/// +/// @ingroup ge +/// @brief Set a boolean array attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value array. +/// @param [in] num: number of value array. +/// @return 0 for success / others for failure. +/// +extern Status_t SetAttrBoolList(OpAttr_t attr, const char *name, const bool *value, int num); + +/// +/// @ingroup ge +/// @brief Set an integer array attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value array. +/// @param [in] num: number of value array. +/// @return 0 for success / others for failure. +/// +extern Status_t SetAttrIntList(OpAttr_t attr, const char *name, const int64_t *value, int num); + +/// +/// @ingroup ge +/// @brief Set a float array attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value array. +/// @param [in] num: number of value array. +/// @return 0 for success / others for failure. +/// +extern Status_t SetAttrFloatList(OpAttr_t attr, const char *name, const float *value, int num); + +/// +/// @ingroup ge +/// @brief Set a string array attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value array (each value can`t be nullptr, end with '\0'). +/// @param [in] num: number of value array. +/// @return 0 for success / others for failure. +/// +extern Status_t SetAttrStringList(OpAttr_t attr, const char *name, const char **value, int num); + +#ifdef __cplusplus +} +#endif + +#endif // INC_FRAMEWORK_GENERATOR_GENERATOR_API_H_ diff --git a/inc/framework/memory/memory_assigner.h b/inc/framework/memory/memory_assigner.h new file mode 100644 index 00000000..eb12367f --- /dev/null +++ b/inc/framework/memory/memory_assigner.h @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_MEMORY_MEMORY_ASSIGNER_H_ +#define INC_FRAMEWORK_MEMORY_MEMORY_ASSIGNER_H_ + +#include + +#include "common/ge_inner_error_codes.h" +#include "graph/node.h" + +namespace ge { +const int64_t kMemAlignSize = 512; +class MemoryAssigner { + public: + explicit MemoryAssigner(ge::ComputeGraphPtr compute_graph) : compute_graph_(std::move(compute_graph)) {} + virtual ~MemoryAssigner() = default; + + MemoryAssigner(const MemoryAssigner &) = delete; + + MemoryAssigner &operator=(const MemoryAssigner &) = delete; + + Status AssignMemory(bool is_loop_graph, size_t &mem_offset); + + private: + ge::ComputeGraphPtr compute_graph_; +}; +} // namespace ge +#endif // INC_FRAMEWORK_MEMORY_MEMORY_ASSIGNER_H_ diff --git a/inc/framework/omg/omg_inner_types.h b/inc/framework/omg/omg_inner_types.h new file mode 100644 index 00000000..70b1f04f --- /dev/null +++ b/inc/framework/omg/omg_inner_types.h @@ -0,0 +1,130 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_OMG_OMG_INNER_TYPES_H_ +#define INC_FRAMEWORK_OMG_OMG_INNER_TYPES_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "framework/common/fmk_error_codes.h" +#include "framework/common/types.h" +#include "register/register_fmk_types.h" + +using domi::DOMI_TENSOR_ND; +using domi::DOMI_TENSOR_RESERVED; +using domi::domiTensorFormat_t; +using domi::FMK_TYPE_RESERVED; +using domi::FrameworkType; +using std::map; +using std::string; +using std::unordered_map; +using std::vector; + +namespace ge { +/// +/// @ingroup domi_omg +/// @brief run model +/// +enum RunMode { + kGeOmModel = 0, // generate offline model file + kModelToJson = 1, // convert to JSON file + kOnlyPreCheck = 3, // only for pre-check + kPbtxtToJson = 5 // pbtxt to json +}; + +/// +/// @ingroup domi_omg +/// @brief high-precision mode +/// +enum HighPrecisionMode { + // the FP16 high-precision function is disabled in common mode + kHighPrecisonDefault = 0, + + // high-precision mode, in which FP16 high-precision mode (Convolution/FullConnect/AvgPooling are involved) is enable + kHighPrecisionFP16 = 1 +}; + +/// +/// @ingroup domi_omg +/// @brief description buffer data +/// +struct OMGBufferData { + void *data; + uint32_t length; +}; + +struct OmgContext { + OmgContext() { format = DOMI_TENSOR_ND; } + domiTensorFormat_t format; + + // format of the input specified by the command line + std::unordered_map input_nodes_format_map; + std::vector output_formats; + + // user-designate input dims + std::vector>> user_input_dims; + // global input dims + std::unordered_map> input_dims; + + // resolve the mapping between operators with the same name and corresponding network. format e.g. + // Detectionoutput:SsdDetectiontOutput + std::map op_conf_map; + // save the output node of the network. key = operator name, value = index, index indicates the output index of the + // operator + std::map> out_nodes_map; + // user-designate out nodes (this is used for determing the orders) + std::vector> user_out_nodes; + // path for the aicpu custom operator so_file + std::vector aicpu_op_run_paths; + // ddk version + std::string ddk_version; + // preferential format used by the entire network + domiTensorFormat_t net_format = DOMI_TENSOR_RESERVED; + domi::FrameworkType type = domi::FMK_TYPE_RESERVED; + RunMode run_mode = kOnlyPreCheck; + bool train_flag = false; + // whether to use FP16 high precision + int32_t fp16_high_precision = kHighPrecisonDefault; + + std::string output_type; + + // Save the name of the entire network: Some special operators are used to determine a network. Some operators in the + // network require special processing based on the specific network. + // e.g:faster-rcnn, the FirstStageProcessor module is determined as the Faster-R-CNN network based on the scope + // fusion. Then, the conv+reshape operators in the FirstStageBoxPredictor/BoxEncodingPredictor scope are combined. The + // convolution kernel rearrangement reshape operator needs to be deleted for the convolution kernel. + std::string net_name; + // whether to enable dynamic batch + bool enable_l2dynamic = false; +}; +} // namespace ge + +namespace domi { +/// +/// @ingroup domi_omg +/// @brief get OMG context +/// @return OmgContext context +/// +ge::OmgContext &GetContext(); +} // namespace domi + +#endif // INC_FRAMEWORK_OMG_OMG_INNER_TYPES_H_ diff --git a/inc/framework/omg/omg_types.h b/inc/framework/omg/omg_types.h new file mode 100644 index 00000000..771a53a4 --- /dev/null +++ b/inc/framework/omg/omg_types.h @@ -0,0 +1,22 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_OMG_OMG_TYPES_H_ +#define INC_FRAMEWORK_OMG_OMG_TYPES_H_ + +#include "register/register_fmk_types.h" + +#endif // INC_FRAMEWORK_OMG_OMG_TYPES_H_ diff --git a/inc/framework/omg/version.h b/inc/framework/omg/version.h new file mode 100755 index 00000000..993f0a8f --- /dev/null +++ b/inc/framework/omg/version.h @@ -0,0 +1,53 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_FRAMEWORK_OMG_VERSION_H_ +#define INC_FRAMEWORK_OMG_VERSION_H_ + +#include +#include +#include +#include + +#include "common/debug/log.h" +#include "common/string_util.h" +#include "framework/common/debug/ge_log.h" + +namespace ge { +class PlatformVersionManager { + public: + PlatformVersionManager() = delete; + ~PlatformVersionManager() = delete; + static Status GetPlatformVersion(std::string &ver) { +#if defined PLATFORM_PHOENIX + ver = "3.51.z"; +#elif defined PLATFORM_ORLANDO + ver = "3.31.z"; +#elif defined PLATFORM_MINI + ver = "1.11.z"; +#elif defined PLATFORM_CLOUD + ver = "1.61.z"; +#endif + std::vector version_splits = StringUtils::Split(ver, '.'); + GE_IF_BOOL_EXEC(version_splits.size() < 3, GELOGW("Read platform version error!"); return FAILED;); + + GELOGI("Read current platform version: %s.", ver.c_str()); + return SUCCESS; + } +}; // class PlatformManager +} // namespace ge + +#endif // INC_FRAMEWORK_OMG_VERSION_H_ diff --git a/inc/graph/anchor.h b/inc/graph/anchor.h new file mode 100644 index 00000000..5321fe47 --- /dev/null +++ b/inc/graph/anchor.h @@ -0,0 +1,278 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_ANCHOR_H_ +#define INC_GRAPH_ANCHOR_H_ + +#include +#include +#include + +#include "graph/ge_error_codes.h" +#include "graph/range_vistor.h" +#include "graph/types.h" + +namespace ge { +enum AnchorStatus { ANCHOR_SUSPEND = 0, ANCHOR_CONST = 1, ANCHOR_DATA = 2, ANCHOR_RESERVED = 3 }; +using std::string; +using std::vector; + +class Node; + +using NodePtr = std::shared_ptr; + +class Edge; + +using EdgePtr = std::shared_ptr; + +class Anchor; + +using AnchorPtr = std::shared_ptr; + +class DataAnchor; + +using DataAnchorPtr = std::shared_ptr; + +class InDataAnchor; + +using InDataAnchorPtr = std::shared_ptr; + +class OutDataAnchor; + +using OutDataAnchorPtr = std::shared_ptr; + +class ControlAnchor; + +using ControlAnchorPtr = std::shared_ptr; + +class InControlAnchor; + +using InControlAnchorPtr = std::shared_ptr; + +class OutControlAnchor; + +using OutControlAnchorPtr = std::shared_ptr; + +using ConstAnchor = const Anchor; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Anchor : public std::enable_shared_from_this { + friend class AnchorUtils; + + public: + using TYPE = const char *; + template + using Vistor = RangeVistor>; + + Anchor(const NodePtr &ownerNode, int idx); + + virtual ~Anchor() = default; + + protected: + // Whether the two anchors are equal + virtual bool Equal(AnchorPtr anchor) const = 0; + virtual bool IsTypeOf(TYPE type) const; + + public: + // Get all peer anchors connected to current anchor + Vistor GetPeerAnchors() const; + // Get the first peer anchor + AnchorPtr GetFirstPeerAnchor() const; + + // Get the node which is the owner of the anchor + NodePtr GetOwnerNode() const; + + // Remove all links with the anchor + void UnlinkAll() noexcept; + + // Remove link with the given anchor + graphStatus Unlink(const AnchorPtr &peer); + + // Replace the peeranchor with the new peeranchor + graphStatus ReplacePeer(const AnchorPtr &oldPeer, const AnchorPtr &firstPeer, const AnchorPtr &secondPeer); + + // Judge if the anchor is linked with the given anchor + bool IsLinkedWith(const AnchorPtr &peer); + + // Get the anchor index of the node + int GetIdx() const; + + // Set the anchor index of the node + void SetIdx(int index); + + protected: + // All peer anchors connected to current anchor + vector> peer_anchors_; + // The owner nodes of the anchor + std::weak_ptr owner_node_; + // The index of current anchor + int idx_; + template + static Anchor::TYPE TypeOf() { + static_assert(std::is_base_of::value, "T must be a Anchor!"); + return __PRETTY_FUNCTION__; + } + + public: + template + static std::shared_ptr DynamicAnchorCast(AnchorPtr anchorPtr) { + static_assert(std::is_base_of::value, "T must be a Anchor!"); + if (anchorPtr == nullptr || !anchorPtr->IsTypeOf()) { + return nullptr; + } + return std::static_pointer_cast(anchorPtr); + } + + template + bool IsTypeOf() { + return IsTypeOf(TypeOf()); + } +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY DataAnchor : public Anchor { + friend class AnchorUtils; + + public: + explicit DataAnchor(const NodePtr &ownerNode, int idx); + + virtual ~DataAnchor() = default; + + protected: + bool IsTypeOf(TYPE type) const override; + + private: + Format format_{FORMAT_ND}; + AnchorStatus status_{ANCHOR_SUSPEND}; +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY InDataAnchor : public DataAnchor { + friend class OutDataAnchor; + + friend class OutControlAnchor; + + public: + explicit InDataAnchor(const NodePtr &ownerNode, int idx); + + virtual ~InDataAnchor() = default; + + // Get source out data anchor + OutDataAnchorPtr GetPeerOutAnchor() const; + + // Build connection from OutDataAnchor to InDataAnchor + graphStatus LinkFrom(const OutDataAnchorPtr &src); + + protected: + bool Equal(AnchorPtr anchor) const override; + bool IsTypeOf(TYPE type) const override; +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OutDataAnchor : public DataAnchor { + friend class InDataAnchor; + + friend class AnchorUtils; + + public: + template + using Vistor = RangeVistor>; + + explicit OutDataAnchor(const NodePtr &ownerNode, int idx); + + virtual ~OutDataAnchor() = default; + // Get dst in data anchor(one or more) + Vistor GetPeerInDataAnchors() const; + uint32_t GetPeerInDataNodesSize() const; + + // Get dst in control anchor(one or more) + Vistor GetPeerInControlAnchors() const; + + // Build connection from OutDataAnchor to InDataAnchor + graphStatus LinkTo(const InDataAnchorPtr &dest); + + // Build connection from OutDataAnchor to InControlAnchor + graphStatus LinkTo(const InControlAnchorPtr &dest); + + protected: + bool Equal(AnchorPtr anchor) const override; + bool IsTypeOf(TYPE type) const override; +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY ControlAnchor : public Anchor { + public: + explicit ControlAnchor(const NodePtr &ownerNode); + + explicit ControlAnchor(const NodePtr &ownerNode, int idx); + + virtual ~ControlAnchor() = default; + + protected: + bool IsTypeOf(TYPE type) const override; +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY InControlAnchor : public ControlAnchor { + friend class OutControlAnchor; + + friend class OutDataAnchor; + + public: + explicit InControlAnchor(const NodePtr &ownerNode); + + explicit InControlAnchor(const NodePtr &ownerNode, int idx); + + virtual ~InControlAnchor() = default; + + // Get source out control anchors + Vistor GetPeerOutControlAnchors() const; + bool IsPeerOutAnchorsEmpty() const { return peer_anchors_.empty(); } + + // Get source out data anchors + Vistor GetPeerOutDataAnchors() const; + + // Build connection from OutControlAnchor to InControlAnchor + graphStatus LinkFrom(const OutControlAnchorPtr &src); + + protected: + bool Equal(AnchorPtr anchor) const override; + bool IsTypeOf(TYPE type) const override; +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OutControlAnchor : public ControlAnchor { + friend class InControlAnchor; + + public: + template + using Vistor = RangeVistor>; + + explicit OutControlAnchor(const NodePtr &ownerNode); + + explicit OutControlAnchor(const NodePtr &ownerNode, int idx); + + virtual ~OutControlAnchor() = default; + + // Get dst in control anchor(one or more) + Vistor GetPeerInControlAnchors() const; + // Get dst data anchor in control anchor(one or more) + Vistor GetPeerInDataAnchors() const; + + // Build connection from OutControlAnchor to InControlAnchor + graphStatus LinkTo(const InControlAnchorPtr &dest); + // Build connection from OutDataAnchor to InDataAnchor + graphStatus LinkTo(const InDataAnchorPtr &dest); + + protected: + bool Equal(AnchorPtr anchor) const override; + bool IsTypeOf(TYPE type) const override; +}; +} // namespace ge +#endif // INC_GRAPH_ANCHOR_H_ diff --git a/inc/graph/attr_value_serializable.h b/inc/graph/attr_value_serializable.h new file mode 100644 index 00000000..7570c73d --- /dev/null +++ b/inc/graph/attr_value_serializable.h @@ -0,0 +1,190 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_ATTR_VALUE_SERIALIZABLE_H_ +#define INC_GRAPH_ATTR_VALUE_SERIALIZABLE_H_ + +#include +#include + +#include "graph/ge_attr_value.h" + +namespace ge { +class GeAttrValue; +class _GeSerializable { + public: + template + struct ge_serializable_int64_t_support_type { + using DT = typename std::remove_cv::type; + static const bool value = std::is_same::value // by cast + || std::is_same::value || std::is_same::value || + std::is_same::value || std::is_same::value || + std::is_same::value || std::is_same::value; + }; + + template + static GeAttrValue SaveItemAsAttrValue(const T &t) { + return GeAttrValue::CreateFrom(t); + } + + template + static GeAttrValue SaveItemAsAttrValue(const vector &t) { + return GeAttrValue::CreateFrom(t); + } + + template = 0, typename DT = typename std::remove_cv::type> + static GeAttrValue SaveItemAsAttrValue(const T &t) { + return GeAttrValue::CreateFrom
(t); + } + // int64_t support type + template ::value, int>::type = 0> + static GeAttrValue SaveItemAsAttrValue(const T &t) { + return GeAttrValue::CreateFrom(t); + } + // vector int64_t support type + template ::value, int>::type = 0> + static GeAttrValue SaveItemAsAttrValue(const vector &t) { + return GeAttrValue::CreateFrom(t); + } + + template + static graphStatus LoadItemFromAttrValue(T &t, GeAttrValue &attrVal) { + return attrVal.GetValue(t); + } + + template + static graphStatus LoadItemFromAttrValue(vector &t, GeAttrValue &attrVal) { + return attrVal.GetValue(t); + } + + template = 0, typename DT = typename std::remove_cv::type> + static graphStatus LoadItemFromAttrValue(T &t, GeAttrValue &attrVal) { + return attrVal.GetValue
(t); + } + + template ::value, int>::type = 0> + static graphStatus LoadItemFromAttrValue(T &t, GeAttrValue &attrVal) { + return attrVal.GetValue(t); + } + + template ::value, int>::type = 0> + static graphStatus LoadItemFromAttrValue(vector &t, GeAttrValue &attrVal) { + return attrVal.GetValue(t); + } + + template + static void SaveItem(GeAttrValue::NamedAttrs &namedAttrs, string itemName, T &item, Args &... args) { + GeAttrValue itemVal = SaveItemAsAttrValue(item); + (void)namedAttrs.SetAttr(itemName, itemVal); + SaveItem(namedAttrs, args...); + } + + static void SaveItem(GeAttrValue::NamedAttrs &namedAttrs __attribute__((__unused__))) {} + + template + static graphStatus LoadItem(GeAttrValue::NamedAttrs &namedAttrs, string itemName, T &item, Args &... args) { + auto itemVal = namedAttrs.GetItem(itemName); + auto status = LoadItemFromAttrValue(item, itemVal); + if (status != GRAPH_SUCCESS) { + return status; + } + return LoadItem(namedAttrs, args...); + } + + static graphStatus LoadItem(GeAttrValue::NamedAttrs &namedAttrs __attribute__((__unused__))) { return GRAPH_SUCCESS; } +}; + + +#define _GE_FI(a) #a, a +#define _GE_MAP_FIELDS1(a1) _GE_FI(a1) +#define _GE_MAP_FIELDS2(a1, a2) _GE_FI(a1), _GE_FI(a2) +#define _GE_MAP_FIELDS3(a1, a2, a3) _GE_FI(a1), _GE_FI(a2), _GE_FI(a3) +#define _GE_MAP_FIELDS4(a1, a2, a3, a4) _GE_FI(a1), _GE_FI(a2), _GE_FI(a3), _GE_FI(a4) +#define _GE_MAP_FIELDS5(a1, a2, a3, a4, a5) _GE_FI(a1), _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5) +#define _GE_MAP_FIELDS6(a1, a2, a3, a4, a5, a6) _GE_FI(a1), _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5), _GE_FI(a6) +#define _GE_MAP_FIELDS7(a1, a2, a3, a4, a5, a6, a7) \ + _GE_FI(a1) \ + , _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5), _GE_FI(a6), _GE_FI(a7) +#define _GE_MAP_FIELDS8(a1, a2, a3, a4, a5, a6, a7, a8) \ + _GE_FI(a1) \ + , _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5), _GE_FI(a6), _GE_FI(a7), _GE_FI(a8) +#define _GE_MAP_FIELDS9(a1, a2, a3, a4, a5, a6, a7, a8, a9) \ + _GE_FI(a1) \ + , _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5), _GE_FI(a6), _GE_FI(a7), _GE_FI(a8), _GE_FI(a9) +#define _GE_MAP_FIELDS10(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10) \ + _GE_FI(a1) \ + , _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5), _GE_FI(a6), _GE_FI(a7), _GE_FI(a8), _GE_FI(a9), _GE_FI(a10) +#define _GE_MAP_FIELDS11(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11) \ + _GE_FI(a1) \ + , _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5), _GE_FI(a6), _GE_FI(a7), _GE_FI(a8), _GE_FI(a9), _GE_FI(a10), \ + _GE_FI(a11) +#define _GE_MAP_FIELDS12(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12) \ + _GE_FI(a1) \ + , _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5), _GE_FI(a6), _GE_FI(a7), _GE_FI(a8), _GE_FI(a9), _GE_FI(a10), \ + _GE_FI(a11), _GE_FI(a12) +#define _GE_MAP_FIELDS13(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13) \ + _GE_FI(a1) \ + , _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5), _GE_FI(a6), _GE_FI(a7), _GE_FI(a8), _GE_FI(a9), _GE_FI(a10), \ + _GE_FI(a11), _GE_FI(a12), _GE_FI(a13) +#define _GE_MAP_FIELDS14(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14) \ + _GE_FI(a1) \ + , _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5), _GE_FI(a6), _GE_FI(a7), _GE_FI(a8), _GE_FI(a9), _GE_FI(a10), \ + _GE_FI(a11), _GE_FI(a12), _GE_FI(a13), _GE_FI(a14) +#define _GE_MAP_FIELDS15(a1, a2, a3, a4, a5, a6, a7, a8, a9, a10, a11, a12, a13, a14, a15) \ + _GE_FI(a1) \ + , _GE_FI(a2), _GE_FI(a3), _GE_FI(a4), _GE_FI(a5), _GE_FI(a6), _GE_FI(a7), _GE_FI(a8), _GE_FI(a9), _GE_FI(a10), \ + _GE_FI(a11), _GE_FI(a12), _GE_FI(a13), _GE_FI(a14), _GE_FI(a15) + +#define _GE_PRIVATE_ARGS_GLUE(x, y) x y + +#define _GE_PRIVATE_MACRO_VAR_ARGS_IMPL_COUNT(_1, _2, _3, _4, _5, _6, _7, _8, _9, _10, _11, _12, _13, _14, _15, N, \ + ...) \ + N +#define _GE_PRIVATE_MACRO_VAR_ARGS_IMPL(args) _GE_PRIVATE_MACRO_VAR_ARGS_IMPL_COUNT args +#define _GE_COUNT_MACRO_VAR_ARGS(...) \ + _GE_PRIVATE_MACRO_VAR_ARGS_IMPL((__VA_ARGS__, 15, 14, 13, 12, 11, 10, 9, 8, 7, 6, 5, 4, 3, 2, 1, 0)) + +#define _GE_PRIVATE_MACRO_CHOOSE_HELPER2(M, count) M##count +#define _GE_PRIVATE_MACRO_CHOOSE_HELPER1(M, count) _GE_PRIVATE_MACRO_CHOOSE_HELPER2(M, count) +#define _GE_PRIVATE_MACRO_CHOOSE_HELPER(M, count) _GE_PRIVATE_MACRO_CHOOSE_HELPER1(M, count) + +#define _GE_INVOKE_VAR_MACRO(...) \ + _GE_PRIVATE_ARGS_GLUE(_GE_PRIVATE_MACRO_CHOOSE_HELPER(_GE_MAP_FIELDS, _GE_COUNT_MACRO_VAR_ARGS(__VA_ARGS__)), \ + (__VA_ARGS__)) + +#define GE_SERIALIZABLE(...) \ + public: \ + friend class ge::GeAttrValue; \ + using __ge_serializable = int; \ + \ + private: \ + ge::graphStatus Save(GeAttrValue &ar) const { \ + GeAttrValue::NamedAttrs named_attrs; \ + _GeSerializable::SaveItem(named_attrs, _GE_INVOKE_VAR_MACRO(__VA_ARGS__)); \ + return ar.SetValue(named_attrs); \ + } \ + ge::graphStatus Load(const GeAttrValue &ar) { \ + GeAttrValue::NamedAttrs named_attrs; \ + ge::graphStatus status = ar.GetValue(named_attrs); \ + if (status != GRAPH_SUCCESS) { \ + return status; \ + } \ + return _GeSerializable::LoadItem(named_attrs, _GE_INVOKE_VAR_MACRO(__VA_ARGS__)); \ + } + +// end NamedAttrs Helper: GE_SERIALIZABLE +} // namespace ge +#endif // INC_GRAPH_ATTR_VALUE_SERIALIZABLE_H_ diff --git a/inc/graph/buffer.h b/inc/graph/buffer.h new file mode 100644 index 00000000..d781fe0b --- /dev/null +++ b/inc/graph/buffer.h @@ -0,0 +1,83 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_BUFFER_H_ +#define INC_GRAPH_BUFFER_H_ + +#include +#include +#include + +#include "detail/attributes_holder.h" +#include "graph/types.h" + +namespace ge { +#ifdef HOST_VISIBILITY +#define GE_FUNC_HOST_VISIBILITY __attribute__((visibility("default"))) +#else +#define GE_FUNC_HOST_VISIBILITY +#endif +#ifdef DEV_VISIBILITY +#define GE_FUNC_DEV_VISIBILITY __attribute__((visibility("default"))) +#else +#define GE_FUNC_DEV_VISIBILITY +#endif + +using std::shared_ptr; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Buffer { + public: + Buffer(); + Buffer(const Buffer &other); + + explicit Buffer(std::size_t bufferSize, std::uint8_t defualtVal = 0); + + ~Buffer() = default; + + Buffer &operator=(const Buffer &other); + + static Buffer CopyFrom(const std::uint8_t *data, std::size_t bufferSize); + + const std::uint8_t *GetData() const; + std::uint8_t *GetData(); + std::size_t GetSize() const; + void ClearBuffer(); + + // For compatibility + inline const std::uint8_t *data() const { return GetData(); } + inline std::uint8_t *data() { return GetData(); } + inline std::size_t size() const { return GetSize(); } + inline void clear() { return ClearBuffer(); } + uint8_t operator[](size_t index) const { + if (buffer_ != nullptr && index < buffer_->size()) { + return (uint8_t)(*buffer_)[index]; + } + return 0xff; + } + + private: + GeIrProtoHelper data_; + std::string *buffer_ = nullptr; + + // Create buffer from protobuf obj + Buffer(const ProtoMsgOwner &protoOnwer, proto::AttrDef *buffer); + Buffer(const ProtoMsgOwner &protoOnwer, std::string *buffer); + + friend class GeAttrValueImp; + friend class GeTensor; +}; +} // namespace ge +#endif // INC_GRAPH_BUFFER_H_ diff --git a/inc/graph/compute_graph.h b/inc/graph/compute_graph.h new file mode 100755 index 00000000..68980d56 --- /dev/null +++ b/inc/graph/compute_graph.h @@ -0,0 +1,242 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_COMPUTE_GRAPH_H_ +#define INC_GRAPH_COMPUTE_GRAPH_H_ + +#include +#include +#include +#include +#include +#include + +#include "detail/attributes_holder.h" +#include "graph/anchor.h" +#include "graph/node.h" +#include "graph/op_desc.h" +#include "graph/range_vistor.h" + +namespace ge { +class Node; +using NodePtr = std::shared_ptr; +class Edge; +using EdgePtr = std::shared_ptr; + +class InDataAnchor; +using InDataAnchorPtr = std::shared_ptr; + +class OutDataAnchor; +using OutDataAnchorPtr = std::shared_ptr; + +class ControlAnchor; +using ControlAnchorPtr = std::shared_ptr; +class InControlAnchor; +using InControlAnchorPtr = std::shared_ptr; +class OutControlAnchor; +using OutControlAnchorPtr = std::shared_ptr; +class GeAttrValue; +using AttrValuePtr = std::shared_ptr; +using ConstComputeGraph = const ComputeGraph; + +class OperatorImpl; +using OperatorImplPtr = std::shared_ptr; + +class ComputeGraph : public std::enable_shared_from_this, public AttrHolder { + friend class GraphUtils; + + public: + template + using Vistor = RangeVistor>; + + explicit ComputeGraph(const std::string &name); + virtual ~ComputeGraph(); + + std::string GetName() const; + void SetName(const std::string &name); + + using AttrHolder::DelAttr; + using AttrHolder::GetAttr; + using AttrHolder::HasAttr; + using AttrHolder::SetAttr; + + size_t GetAllNodesSize() const; + Vistor GetAllNodes() const; + size_t GetDirectNodesSize() const; + Vistor GetDirectNode() const; + Vistor GetInputNodes() const; + Vistor GetOutputNodes() const; + + NodePtr FindNode(const std::string &name) const; + // Add node + NodePtr AddNode(NodePtr node); + NodePtr AddNode(OpDescPtr op); + NodePtr AddNodeFront(NodePtr node); + NodePtr AddNodeFront(const OpDescPtr &op); + NodePtr AddInputNode(NodePtr node); + NodePtr AddOutputNode(NodePtr node); + + graphStatus RemoveNode(const NodePtr &node); + graphStatus RemoveInputNode(const NodePtr &node); + graphStatus RemoveOutputNode(const NodePtr &node); + graphStatus RemoveConstInput(const NodePtr &node); + + std::shared_ptr AddSubGraph(std::shared_ptr sub_graph); + graphStatus RemoveSubGraph(const std::shared_ptr &sub_graph); + + graphStatus TopologicalSorting(); + bool IsValid() const; + void Dump() const; + + graphStatus IsolateNode(const NodePtr &node); + graphStatus Verify(); + graphStatus InferShape(); + graphStatus InferOriginFormat(); + graphStatus InferShapeInNeed(); + graphStatus InsertEventNodes(); + bool operator==(const ComputeGraph &r_compute_graph) const; + + const std::map, std::vector> &GetShareParamLayer() const { + return params_share_map_; + } + + void SetShareParamLayer(const std::map, std::vector> params_share_map) { + params_share_map_ = params_share_map; + } + + void SetInputsOrder(const std::vector &inputs_order) { inputs_order_ = inputs_order; } + + void SetGraphOutNodes(std::map> out_nodes_map) { out_nodes_map_ = out_nodes_map; } + + void AppendGraphOutNodes(std::map> out_nodes_map) { + for (auto &item : out_nodes_map) { + (void)out_nodes_map_.emplace(item.first, item.second); + } + } + + const std::map> &GetGraphOutNodes() const { return out_nodes_map_; } + + void SetOrigGraph(ComputeGraphPtr orig_graph) { origGraph_ = orig_graph; } + + ComputeGraphPtr GetOrigGraph(void) { return origGraph_; } + void SetOutputSize(uint32_t size) { output_size_ = size; } + uint32_t GetOutputSize() const { return output_size_; } + void SetInputSize(uint32_t size) { input_size_ = size; } + uint32_t GetInputSize() const { return input_size_; } + + /// + /// Set iteration needed. + /// If set is true, it means this graph need run iteration some + /// times(according variant "npu_runconfig/iterations_per_loop"). + /// @param need_iteration is need iteration + /// + void SetNeedIteration(bool need_iteration) { need_iteration_ = need_iteration; } + + void SetUserDefOutput(const std::string &output_name); + + const std::string GetOutput(); + + /// + /// Get need_iteration. + /// @return is need iteration + /// + bool GetNeedIteration() const { return need_iteration_; } + + void SetGraphOpName(const std::map &op_name_map) { op_name_map_ = op_name_map; } + const std::map &GetGraphOpName() const { return op_name_map_; } + + const std::map &GetAllNodesInfo() const; + + void SetAllNodesInfo(const std::map &nodes) { all_nodes_infos_ = nodes; } + + void SetGraphOutNodesInfo(std::vector> &out_nodes_info) { + output_nodes_info_ = out_nodes_info; + } + + void AppendGraphOutNodesInfo(std::vector> &out_nodes_info) { + output_nodes_info_.insert(output_nodes_info_.end(), out_nodes_info.begin(), out_nodes_info.end()); + } + + const std::vector> &GetGraphOutNodesInfo() const { return output_nodes_info_; } + + void SetGraphTargetNodesInfo(const std::vector &target_nodes_info) { + target_nodes_info_ = target_nodes_info; + } + const std::vector &GetGraphTargetNodesInfo() const { return target_nodes_info_; } + + void SetSessionID(uint64_t session_id) { session_id_ = session_id; } + uint64_t GetSessionID() const { return session_id_; } + + void SetGraphID(uint32_t graph_id) { graph_id_ = graph_id; } + uint32_t GetGraphID() const { return graph_id_; } + + void SaveDataFormat(ge::Format data_format) { data_format_ = data_format; } + ge::Format GetDataFormat() const { return data_format_; } + bool IsSummaryGraph() const { return is_summary_graph_; } + void SetSummaryFlag(bool is_summary_graph) { is_summary_graph_ = is_summary_graph; } + // Graph Before BFE + ComputeGraphPtr origGraph_; + + protected: + ProtoAttrMapHelper MutableAttrMap() override; + ConstProtoAttrMapHelper GetAttrMap() const override; + + private: + graphStatus DFSTopologicalSorting(std::vector &node_vec, std::map &map_in_edge_num, + std::vector &stack); + graphStatus BFSTopologicalSorting(std::vector &node_vec, std::map &map_in_edge_num, + std::deque &stack); + graphStatus CollectBreadthOutNode(const NodePtr &node, std::map &map_in_edge_num, + std::map &breadth_node_map); + graphStatus SortNodes(std::vector &stack, std::map &mapInEdgeNum); + size_t GetInEdgeSize(const NodePtr &node); + size_t GetOutEdgeSize(const NodePtr &node); + graphStatus RemoveExtraOutEdge(const NodePtr &node); + bool GraphMembersAreEqual(const ComputeGraph &r_graph) const; + bool GraphAttrsAreEqual(const ComputeGraph &r_graph) const; + bool VectorInputNodePtrIsEqual(const std::vector &r_node_ptr_vector, + const std::vector &l_node_ptr_vector) const; + + ProtoAttrMapHelper attrs_; + + friend class ModelSerializeImp; + friend class GraphDebugImp; + friend class OnnxUtils; + std::vector nodes_; + std::vector input_nodes_; + std::vector> sub_graph_; + std::string name_; + bool is_valid_flag_; + bool is_summary_graph_ = false; + // Indicates whether it is need iteration + bool need_iteration_ = false; + std::map, std::vector> params_share_map_; + std::map> out_nodes_map_; + // TaskIdx -> op_name Map + std::map op_name_map_; + std::vector inputs_order_; + uint32_t output_size_ = 1; + uint32_t input_size_ = 1; + std::map all_nodes_infos_; + std::vector> output_nodes_info_; + std::vector target_nodes_info_; + uint64_t session_id_ = 0; + uint32_t graph_id_ = 0; + ge::Format data_format_ = ge::FORMAT_ND; +}; +} // namespace ge + +#endif // INC_GRAPH_COMPUTE_GRAPH_H_ diff --git a/inc/graph/debug/ge_attr_define.h b/inc/graph/debug/ge_attr_define.h new file mode 100644 index 00000000..d3d0a122 --- /dev/null +++ b/inc/graph/debug/ge_attr_define.h @@ -0,0 +1,778 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_DEBUG_GE_ATTR_DEFINE_H_ +#define INC_GRAPH_DEBUG_GE_ATTR_DEFINE_H_ + +#include + +#include "graph/types.h" + +namespace ge { +#ifdef HOST_VISIBILITY +#define GE_FUNC_HOST_VISIBILITY __attribute__((visibility("default"))) +#else +#define GE_FUNC_HOST_VISIBILITY +#endif +#ifdef DEV_VISIBILITY +#define GE_FUNC_DEV_VISIBILITY __attribute__((visibility("default"))) +#else +#define GE_FUNC_DEV_VISIBILITY +#endif +// Public attribute +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_NAME; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_TYPE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_WORKSPACE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_WEIGHT_NAME; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_IS_QUANTIZE_FACTOR; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_ALPHA; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_BETA; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_PADMODE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_PADMODES; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_MODE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FILTER; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_BIAS; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_BIAS_TERM; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_PAD; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_PADS; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_PAD_SIZE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_PAD_MODE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_SCALE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_WINDOWS; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_GLOBAL_POOLING; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_CEIL_MODE; + +// GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string +// ATTR_NAME_WEIGHTS; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_RELUMODE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_STRIDE_SIZE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_RELU_FLAG; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_ALGO; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FORMAT; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FILTER_FORMAT; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_LRN_K; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_LRN_NORM_REGION; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_LRN_LOCAL_SIZE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_LRN_ALPHA; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_LRN_BETA; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_AXIS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_BROADCAST; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_OUTPUT_NUM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_TIDX; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_TPADDINGS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_IMG_H; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_IMG_W; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NET_H; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NET_W; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_TMULTIPLES; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_MULTIPLES; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_T; + +extern GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY const std::string ATTR_NAME_N; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_TSHAPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_NAN_OPT; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_AIPP; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_INPUT_FORMAT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_OUTPUT_FORMAT; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FRAMEWORK_NODE_DEF; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FRAMEWORK_OP_DEF; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FRAMEWORK_FWK_TYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FRAMEWORK_FUNC_DEF; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_INPUT_TENSOR_DESC; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_OUTPUT_TENSOR_DESC; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_INFERRED_FORMAT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_PRED_PERMUTE_DELETED; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_IGNORE_PRED_FORMAT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_WEIGHTS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_WEIGHTS_DATA; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_BROACAST_REAL_DIM_CNT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DIM_ALIGN; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_SESSION_GRAPH_ID; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_BATCH_NUM; + + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_AUTOMIC_ADD_START; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_AUTOMIC_ADD_MEM_SIZE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_STREAM_LABEL; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_STREAM_CYCLE_EVENT_FLAG; + +// to be deleted +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_TO_BE_DELETED; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PERMUTE_RESHAPE_FUSION; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PERMUTE_RESHAPE_FUSION_CONV_PROPOSAL; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PERMUTE_RESHAPE_FUSION_CONV_DECODEBBOX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PERMUTE_RESHAPE_FUSION_BOX_TYPE_NUM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_MBOX_LOC_FUSION; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_MBOX_CONF_FUSION; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_MBOX_OCR_FUSION; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_MBOX_FUSION_BOX_TYPE_NUM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_RESHAPE_SLICE_CONCAT_FUSION; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIORBOX_CONCAT; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REFINEDET_MBOX_LOC_FUSION; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REFINEDET_RESHAPE_SLICE_CONCAT_FUSION; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REFINEDET_MBOX_CONF_FUSION; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REFINEDET_MBOX_FUSION_BOX_TYPE_NUM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REFINEDET_PRIOR_BOX_ATTR_VARIANCE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REFINEDET_PRIOR_BOX_ATTR_VARIANCE_NUM; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string NEED_INFER; + +// _Arg +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_INDEX; +// _RetVal +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RETVAL_ATTR_NAME_INDEX; +// Data +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DATA_ATTR_NAME_DATA_TYPE; + +// Send +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SEND_ATTR_EVENT_ID; + +// Recv +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RECV_ATTR_EVENT_ID; + +// Convolution +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_COEF; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_STRIDE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_STRIDES; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DILATION; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DILATIONS; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_MODE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_ALGO; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_GROUP; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_PAD_MODE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_PAD; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_STRIDE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_DILATION; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_NUM_OUTPUT; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_KERNEL; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_FILTER; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_BIAS; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_RELU_FLAG; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_ADJ; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_TARGET_SHAPE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_BEFORE_PAD; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_ATTR_NAME_HAS_BIAS; + +// Pooling +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_NAN_OPT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_PAD_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_GLOBAL_POOLING; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_WINDOW; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_PAD; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_STRIDE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_CEIL_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_DATA_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_BEFORE_PAD; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POOLING_ATTR_NAME_ALGO; + +// Eltwise +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ELTWISE_ATTR_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ELTWISE_ATTR_COEFF; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ELTWISE_ATTR_WEIGHT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ELTWISE_ATTR_RELU_FLAG; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ELTWISE_ATTR_ALPHA; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ELTWISE_ATTR_BETA; + +// BatchNorm +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string BATCHNORM_ATTR_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string BATCHNORM_ATTR_EPSILON; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string BATCHNORM_ATTR_USE_GLOBAL_STATS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string BATCHNORM_ATTR_MOVING_AVERAGE_FRACTION; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string BATCHNORM_ATTR_ESTIMATED_MEAN; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string BATCHNORM_ATTR_ESTIMATED_VARIANCE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string BATCHNORM_ATTR_SCALE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string BATCHNORM_ATTR_BIAS; + +// Scale +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SCALE_ATTR_SCALE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SCALE_ATTR_BIAS; + +// FullConnection +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FULL_CONNECTION_ATTR_FILTER; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FULL_CONNECTION_ATTR_BIAS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FULL_CONNECTION_ATTR_NUM_OUTPUT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FULL_CONNECTION_ATTR_RELU_FLAG; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FULL_ATTR_NAME_ALGO; + +// SoftmaxOpParams +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SOFTMAX_ATTR_ALGO; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SOFTMAX_ATTR_MODE; + +// SparseSoftmaxCrossEntropy +extern GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY const std::string SPARSE_SOFTMAX_CROSS_ENTROPY_ATTR_MODE; +extern GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY const std::string SPARSE_SOFTMAX_CROSS_ENTROPY_IS_GRAD; +// Attr labelSmoothing +extern GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY const std::string SOFTMAX_CROSS_ENTROPY_LABELSMOOTHING; + +// ApplyMomentum +extern GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY const std::string APPLYMENTUM_ATTR_IS_GRAPH_FUSION; + +// Activation +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ACTIVATION_ATTR_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ACTIVATION_ATTR_COEF; + +// Concat +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONCAT_ATTR_NAME_AXIS; + +// Const +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONST_ATTR_NAME_DATA_TRANSTYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONST_ATTR_NAME_OUTPUT_FORMAT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONST_ATTR_NAME_OUTPUT_TYPE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DEPTH_SPACE_ATTR_BLOCK_SIZE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string L2_NORMALIZE_ATTR_EPS; + +// Roipooling +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ROIPOOLING_ATTR_NAME_POOLED_H; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ROIPOOLING_ATTR_NAME_POOLED_W; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ROIPOOLING_ATTR_NAME_SPATIAL_SCALE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ROIPOOLING_ATTR_NAME_RIO_POOLING_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ROIPOOLING_ATTR_NAME_POOLING_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ROIPOOLING_ATTR_NAME_SAMPLING_RATIO; + +// DetectionOutput +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_NUM_CLASSES; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_NMS_THRESHOLD; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_TOP_K; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_CONFIDENCE_THRESHOLD; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_IMG_H; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_IMG_W; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_BATCH_SIZE; +// Ssd DetectionOutput +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_ETA; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_SHARED_LOCATION; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_BACKGROUND_LABEL_ID; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_CODE_TYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_VARIANCE_ENCODED_IN_TARGET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_KEEP_TOP_K; + +// Refinedet DetectionOutput +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_OBJECTNESS_SCORE; + +// Yolo DetectionOutput +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_ClASSES; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_BIASES; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_RELATIVE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_OBJECTNESS_THRESHOLD; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_CLASS_THRESHOLD; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_POST_TOP_K; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_IOU_THRESHOLD_DECAY; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_COOR_SCALE_FACTOR; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DETECTIONOUTPUT_ATTR_YOLO_VERSION; + +// DetectionPostprocess +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POSTPROCESS_ATTR_NAME_CLS_NUM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POSTPROCESS_ATTR_NAME_CONF_THRESH; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POSTPROCESS_ATTR_NAME_NMS_THRESH; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POSTPROCESS_ATTR_POST_NMS_TOPN; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POSTPROCESS_ATTR_NAME_BBOX_REG_WEIGHT; + +// Spatialtransfrom +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SPTIALTF_ATTR_NAME_OUTPUT_H; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SPTIALTF_ATTR_NAME_OUTPUT_W; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SPTIALTF_ATTR_NAME_BORDER_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SPTIALTF_ATTR_NAME_AFFINE_TRANSFORM; + +// Proposal +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_NAME_FEAT_STRIDE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_NAME_BASE_SIZE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_NAME_MIN_SIZE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_NAME_RATIO; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_NAME_SCALE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_NAME_PRE_NMS_TOPN; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_NAME_POST_NMS_TOPN; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_NAME_NMS_THRESH; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_NAME_TOP_SIZE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_IMG_H; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PROPOSAL_ATTR_IMG_W; +// Softmax +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SOFTMAX_ATTR_AXIS; + +// Permute +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PERMUTE_ATTR_ORDER; + +// SSD Normalize +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSDNORMALIZE_ATTR_ACCROSS_SPATIAL; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSDNORMALIZE_ATTR_CHANNEL_SHARED; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSDNORMALIZE_ATTR_EPS; + +// Flatten +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FLATTEN_ATTR_AXIS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FLATTEN_ATTR_END_AXIS; + +// SsdPRIORBOX +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_FLIP; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_CLIP; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_IMG_H; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_IMG_W; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_STEP_H; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_STEP_W; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_OFFSET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_MIN_SIZE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_MAX_SIZE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_MIN_SIZE_NUM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_MAX_SIZE_NUM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_ASPECT_RATIO; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_ASPECT_RATIO_NUM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_VARIANCE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SSD_PRIOR_BOX_ATTR_VARIANCE_NUM; + +// PRelu +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PRELU_ATTR_CHANNEL_SHARED; + +// Psroi pooling +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PSROIPOOLING_ATTR_SPATIAL_SCALE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PSROIPOOLING_ATTR_OUTPUT_DIM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PSROIPOOLING_ATTR_GROUP_SIZE; + +// Power +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POWER_ATTR_NAME_POWER; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POWER_ATTR_NAME_SCALE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string POWER_ATTR_NAME_SHIFT; + +// Pack +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PACK_ATTR_NAME_NUM; + +// Unpack +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string UNPACK_ATTR_NAME_NUM; +// Gathernd +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string GATHERND_ATTR_NAME_TINDICES; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string GATHERND_ATTR_NAME_TPARAMS; + +// Argmax +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ARGMAX_ATTR_NAME_TOPK; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ARGMAX_ATTR_NAME_OUTMAX; + +// Relu +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_NEGATIVE_SLOPE; + +// FreeSpaceExtract +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FREESPACEEXTRACT_ATTR_NAME_ORG_HEIGHT; + +// Split +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SPLIT_ATTR_NAME_SLICE_POINT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SPLIT_ATTR_NAME_SIZE_SPLIT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SPLIT_ATTR_NAME_NUM_SPLIT; + +// Tvm +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string TVM_ATTR_NAME_MAGIC; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string TVM_ATTR_NAME_BLOCKDIM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string TVM_ATTR_NAME_METADATA; + +// Squeeze +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SQUEEZE_ATTR_AXIS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SQUEEZE_ATTR_DIMS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SQUEEZE_OP_NAME; + +// Stride slice +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string STRIDE_SLICE_ATTR_BEGIN_MASK; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string STRIDE_SLICE_ATTR_END_MASK; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string STRIDE_SLICE_ATTR_ELLIPSIS_MASK; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string STRIDE_SLICE_ATTR_NEW_AXIS_MASK; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK; + +// Slice +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SLICE_ATTR_NAME_BEGINS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SLICE_ATTR_NAME_SIZES; + +// Roialign +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ROIALIGN_ATTR_SPATIAL_SCALE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ROIALIGN_ATTR_SAMPLING_RATIO; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ROIALIGN_ATTR_NAME_POOLED_H; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ROIALIGN_ATTR_NAME_POOLED_W; + +// Generate_rpn_proposal +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string GENERATE_RPN_PROPOSAL_ATTR_PRE_NMS_TOPK; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string GENERATE_RPN_PROPOSAL_ATTR_POST_NMS_TOPK; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string GENERATE_RPN_PROPOSAL_ATTR_RPN_MINI_SIZE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string + GENERATE_RPN_PROPOSAL_ATTR_RPN_PROPOSAL_NMS_THRESH; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string + GENERATE_RPN_PROPOSAL_ATTR_RPN_PROPOSAL_FILTER_THRESH; +// Decode_bbox +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DECODE_BBOX_ATTR_DECODECLIP; + +// Cast +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CAST_ATTR_DSTT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CAST_ATTR_SRCT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CAST_ATTR_DST_TYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CAST_ATTR_TRUNCATE; + +// Fastrcnnn predications +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FASTRCNN_PREDICTIONS_ATTR_TOPK; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FASTRCNN_PREDICTIONS_ATTR_SCORE_THRESHOLD; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FASTRCNN_PREDICTIONS_ATTR_NMS_THRESHOLD; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FASTRCNN_PREDICTIONS_ATTR_NUM_CLASSES; + +// REORG +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REORG_ATTR_STRIDE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REORG_ATTR_REVERSE; + +// MERGE +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string MERGE_DEAD_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string MERGE_PRENODE_FLAG; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string TO_BE_OUTPUT; + +// ENTER +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ENTER_ATTR_FRAME_NAME; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ENTER_ATTR_CONSTANT_FLAG; + +// Concatv2 +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONCAT_V2_ATTR_TIDX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONCAT_V2_ATTR_N; +// SUM +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SUM_ATTR_TIDX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SUM_ATTR_AXIS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SUM_ATTR_KEEP_DIMS; + +// ResizeBilinear +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESIZE_BILINEAR_ATTR_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESIZE_BILINEAR_ATTR_ALIGN_CORNERS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESIZE_BILINEAR_ATTR_HEIGHT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESIZE_BILINEAR_ATTR_WIDTH; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESIZE_BILINEAR_ATTR_ZOOM_FACTOR; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESIZE_BILINEAR_ATTR_SHRINK_FACTOR; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESIZE_BILINEAR_ATTR_PAD_BEGIN; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESIZE_BILINEAR_ATTR_PAD_END; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESIZE_BILINEAR_ATTR_ALPHA; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESIZE_BILINEAR_ATTR_BETA; + +// MatMul +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string MATMUL_TRANSPOSE_X; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string MATMUL_TRANSPOSE_W; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string MATMUL_HAS_BIAS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string MATMUL_ATTR_IS_TRAINING; + +// Flatten +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FLATTEN_START_AXIS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FLATTEN_END_AXIS; + +// Reshape +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESHAPE_ATTR_AXIS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESHAPE_ATTR_NUM_AXES; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESHAPE_ATTR_FORMAT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESHAPE_ATTR_SHAPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESHAPE_ATTR_ALPHA; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RESHAPE_ATTR_BETA; + +// Frameoworkop +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string T_IN_DATATYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string T_OUT_DATATYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_OUT_N; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_OUT_C; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_OUT_H; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_OUT_W; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_PAD_DEPTH_CONV; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_PAD_CONV; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_BEFORE_PAD; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ANN_MEAN_KEEPDIMS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PAD_ATTR_PADDINGDS; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PAD_ATTR_CONSTANT_VALUE; + +// ConvGradFilter +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_GRAD_FILTER_OUTPUT_SHAPE; +// ConvGradInput +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CONV_GRAD_INPUT_OUTPUT_SHAPE; + +// Rnn +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RNN_MODE_STATIC; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string MUTI_RNN; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CELL_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string CNN_RNN; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string LSTM_CELL; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string GRU_CELL; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RNN_HT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RNN_XT_HT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string RNN_BATCH_SIZE; + +// Upsample +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string UPSAMPLE_ATTR_NAME_SCALE; + +// Filler +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FILLER_TYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FILLER_VALUE; + +// Shufflechannel +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SHUFFLE_CHANNEL_GROUP; + +// TopKV2 +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string TOPKV2_ATTR_K; + +// Calibaration +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string STRIDE_H_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string STRIDE_W_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PAD_TOP_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PAD_BOTTOM_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PAD_RIGHT_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string PAD_LEFT_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string QUANTIZE_ALGO_ATTR; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string SCALE_TYPE_ATTR; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string QUANTIZE_SCALE_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string QUANTIZE_SCALE_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string QUANTIZE_SCALE_OFFSET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string QUANTIZE_OFFSET_DATA_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string QUANTIZE_OFFSET_DATA_OFFSET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string QUANTIZE_OFFSET_WEIGHT_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string QUANTIZE_OFFSET_WEIGHT_OFFSET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string QUANTIZE_OFFSET_PAD_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string QUANTIZE_OFFSET_PAD_OFFSET; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DEQUANTIZE_SCALE_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DEQUANTIZE_SCALE_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DEQUANTIZE_SCALE_OFFSET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DEQUANTIZE_OFFSET_DATA_TYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DEQUANTIZE_OFFSET_DATA_OFFSET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DEQUANTIZE_OFFSET_WEIGHT_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DEQUANTIZE_OFFSET_WEIGHT_OFFSET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DEQUANTIZE_OFFSET_PAD_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DEQUANTIZE_OFFSET_PAD_OFFSET; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REQUANTIZE_SCALE_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REQUANTIZE_SCALE_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REQUANTIZE_SCALE_OFFSET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REQUANTIZE_OFFSET_DATA_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REQUANTIZE_OFFSET_DATA_OFFSET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REQUANTIZE_OFFSET_WEIGHT_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REQUANTIZE_OFFSET_WEIGHT_OFFSET; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REQUANTIZE_OFFSET_PAD_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REQUANTIZE_OFFSET_PAD_OFFSET; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_IS_CONST; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_GROUP; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DILATION_SIZE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_EPSILON; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_POOLING_MODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_CLASS_NUM; +// Model +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_TARGET_TYPE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_STREAM_NUM; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_EVENT_NUM; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_MEMORY_SIZE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_WEIGHT_SIZE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_TASK_GEN_BASE_ADDR; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_TASK_GEN_WEIGHT_ADDR; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_TASK_GEN_VAR_ADDR; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_VAR_SIZE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_MODEL_TASK_INDEX_OP_NAME; + +// Public attribute +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_IMPLY_TYPE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_BYTE_SIZE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FUSION_INFERENCE_ID; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FUSION_OPDEF; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_IO_OP; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_FUSION_SCOPE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_OPATTR; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_RELUFLAG; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_SEQLEN_INDEX; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_X_INDEX; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_CONT_INDEX; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_XSTATIC_INDEX; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string TARGET_TYPE_MINI; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string TARGET_TYPE_TINY; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string TARGET_TYPE_LITE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_CONTINUOUS_INPUT; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_CONTINUOUS_OUTPUT; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_REFERENCE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_ATOMIC_INDEX; + +// Used for mark the active label list to find stream of activated node +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_ACTIVE_LABEL_LIST; + +// Multi batch +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_PRED_VALUE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_BATCH_NUM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_BATCH_LABEL; + +// Control flow +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_STREAM_SWITCH_COND; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_TRUE_BRANCH_STREAM; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_ACTIVE_STREAM_LIST; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_SWITCHN_PRED_VALUE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_SWITCH_BRANCH_NODE_LABEL; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_SWITCH_TRUE_BRANCH_FLAG; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_SWITCH_DATA_TYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_ORIG_NODE_NAME; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_CYCLIC_DEPENDENCE_FLAG; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_NEXT_ITERATION; + +// Used for mark the active node is for loop, type:bool +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_IS_LOOP_ACTIVE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_MEMORY_TYPE_INPUT; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_MEMORY_TYPE_OUTPUT; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_MEMORY_TYPE_WORKSPACE; + +// Atomic addr clean attrs +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATOMIC_ATTR_INPUT_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATOMIC_ATTR_OUTPUT_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATOMIC_ATTR_IS_FUSION_NODE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATOMIC_ATTR_IS_ATOMIC_NODE; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string EXT_ATTR_ATOMIC_WORKSPACE_INFO; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string EXT_ATTR_ATOMIC_WORKSPACE_OFFSET; +// Used for find variable session_id +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string MODEL_ATTR_SESSION_ID; + +// Source/dst format for Op FormatTransfer +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FORMAT_TRANSFER_SRC_FORMAT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string FORMAT_TRANSFER_DST_FORMAT; + +// For compile op by ge call +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NEED_COMPILE; + +// For mutil-batch +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_INSERT_BY_MBATCH; + +// For inserted op +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_INSERTED_BY_GE; + +// For data dump +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DATA_DUMP_IS_MULTIOP; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DATA_DUMP_SUB_SPLITER_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DATA_DUMP_GROUP_OP_NAME; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DATA_DUMP_ORIGIN_NAME; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DATA_DUMP_ORIGIN_OUTPUT_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DATA_DUMP_ORIGIN_FORMAT; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_DATA_DUMP_ORIGIN_DATA_TYPE; + +// Varible +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REF_VAR_SRC_VAR_NAME; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string VAR_ATTR_SRC_VAR_NAME; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string REF_VAR_PRE_PEER_OUT_INDEX; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string VAR_ATTR_VAR_IS_BROADCAST; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string VAR_ATTR_VAR_IS_RESTORE; + +// HCOM +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string HCOM_ATTR_ROOT_RANK; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string HCOM_ATTR_REDUCE_TYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string HCOM_ATTR_RANK_SIZE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string HCOM_ATTR_SHAPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string HCOM_ATTR_DATA_TYPE; + + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_INPUT_DATATYPE; +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string ATTR_NAME_OUTPUT_DATATYPE; + +// Dynamic stitch +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY extern const std::string DYNAMIC_STITCH_ATTR_NAME_NUM; +} // namespace ge + +#endif // INC_GRAPH_DEBUG_GE_ATTR_DEFINE_H_ diff --git a/inc/graph/def_types.h b/inc/graph/def_types.h new file mode 100644 index 00000000..706e7f51 --- /dev/null +++ b/inc/graph/def_types.h @@ -0,0 +1,198 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_DEF_TYPES_H_ +#define INC_GRAPH_DEF_TYPES_H_ + +#include +#include +#include + +#include "graph/attr_value_serializable.h" +#include "graph/buffer.h" + +namespace ge { +#define DEF_TYPE_DEC(type, name) \ + inline void set_##name(const type &value) { name = value; } \ + type *mutable_##name() { return &name; } + +#define DEF_TYPE_HAS_DEC(type, name) \ + inline void set_##name(const type &value) { name = value; } \ + \ + private: \ + bool has_mutable_##name{false}; \ + \ + public: \ + bool has_##name() const { return (has_mutable_##name) || QuantizeFactorHasData(name); } \ + type *mutable_##name() { \ + has_mutable_##name = true; \ + return &name; \ + } + +#define DEF_TYPE_VEC_DEC(type, name) \ + inline int name##_size() const { return name.size(); } \ + inline void clear_##name() { name.clear(); } \ + inline void set_##name(int index, type value) { name[index] = value; } \ + inline void add_##name(type value) { name.push_back(value); } \ + inline std::vector *mutable_##name() { return &name; } + +#define DEF_TYPE_BYTES_DEC(name) \ + inline void clear_##name() { name.ClearBuffer(); } \ + inline void set_##name(const void *value, size_t size) { \ + name = Buffer::CopyFrom((const uint8_t *)(value), size); } \ + inline Buffer *mutable_##name() { return &name; } + +struct CompressInfo { + public: + CompressInfo() {} + CompressInfo(int32_t blockRow, int32_t blockCol, int32_t fractalK, int32_t fractalN, int32_t lastFractalK, + int32_t lastFractalN, int32_t cubeSize, int32_t loadDir) { + blockrow = blockRow; + blockcol = blockCol; + fractalk = fractalK; + fractaln = fractalN; + lastfractalk = lastFractalK; + lastfractaln = lastFractalN; + cubesize = cubeSize; + loaddir = loadDir; + } + + int32_t blockrow{0}; // Block row + int32_t blockcol{0}; // Block col + int32_t fractalk{0}; // Fractal K + int32_t fractaln{0}; // Fractal N + int32_t lastfractalk{0}; // K of last fractal + int32_t lastfractaln{0}; // N of last fractal + int32_t cubesize{0}; // Cube's length + int32_t loaddir{0}; // Data load directtiono 0:col load 1:row load + DEF_TYPE_DEC(int32_t, blockrow); + DEF_TYPE_DEC(int32_t, blockcol); + DEF_TYPE_DEC(int32_t, fractalk); + DEF_TYPE_DEC(int32_t, fractaln); + DEF_TYPE_DEC(int32_t, lastfractalk); + DEF_TYPE_DEC(int32_t, lastfractaln); + DEF_TYPE_DEC(int32_t, cubesize); + DEF_TYPE_DEC(int32_t, loaddir); + + GE_SERIALIZABLE(blockrow, blockcol, fractalk, fractaln, lastfractalk, lastfractaln, cubesize, loaddir); +}; + +enum QuantizeScaleType { VECTOR_SCALE = 0, SCALAR_SCALE = 1 }; +enum QuantizeScaleMode { NORMAL_MODE = 0, SQRT_MODE = 1 }; +enum QuantizeAlgorithm { + NON_OFFSET_ALGO = 0, + HALF_OFFSET_ALGO = 1, + ALL_OFFSET_ALGO = 2, +}; +struct QuantizeFactor { + public: + // QuantizeScaleMode scale_mode; + uint32_t scale_mode{0}; + Buffer scale_value; + int64_t scale_offset{0}; + Buffer offset_data_value; + int64_t offset_data_offset{0}; + Buffer offset_weight_value; + int64_t offset_weight_offset{0}; + Buffer offset_pad_value; + int64_t offset_pad_offset{0}; + + DEF_TYPE_DEC(uint32_t, scale_mode); + DEF_TYPE_BYTES_DEC(scale_value); + + DEF_TYPE_DEC(int64_t, scale_offset); + DEF_TYPE_BYTES_DEC(offset_data_value); + DEF_TYPE_DEC(int64_t, offset_data_offset); + + DEF_TYPE_BYTES_DEC(offset_weight_value); + DEF_TYPE_DEC(int64_t, offset_weight_offset); + DEF_TYPE_BYTES_DEC(offset_pad_value); + DEF_TYPE_DEC(int64_t, offset_pad_offset); + + GE_SERIALIZABLE(scale_mode, scale_value, scale_offset, offset_data_value, offset_data_offset, offset_weight_value, + offset_weight_offset, offset_pad_value, offset_pad_offset) +}; + +static inline bool QuantizeFactorHasData(const QuantizeFactor &factor) { + return factor.scale_value.GetSize() > 0 || factor.offset_data_value.GetSize() > 0 || + factor.offset_weight_value.GetSize() > 0 || factor.offset_pad_value.GetSize() > 0; +} + +struct AllOffsetQuantizeInfo { + public: + AllOffsetQuantizeInfo() {} + AllOffsetQuantizeInfo(float s, int32_t o) : scale(s), offset(o) {} + float scale{0}; + int32_t offset{0}; + + DEF_TYPE_DEC(float, scale); + DEF_TYPE_DEC(int32_t, offset); + + GE_SERIALIZABLE(scale, offset) +}; + +struct QuantizeCalcFactor { + public: + Buffer offsetw; + int64_t offsetw_offset{0}; + Buffer offsetd; + int64_t offsetd_offset{0}; + Buffer scalereq; + int64_t scaledreq_offset{0}; + Buffer offsetdnext; + int64_t offsetdnext_offset{0}; + + DEF_TYPE_BYTES_DEC(offsetw); + DEF_TYPE_DEC(int64_t, offsetw_offset); + DEF_TYPE_BYTES_DEC(offsetd); + DEF_TYPE_DEC(int64_t, offsetd_offset); + DEF_TYPE_BYTES_DEC(scalereq); + DEF_TYPE_DEC(int64_t, scaledreq_offset); + DEF_TYPE_BYTES_DEC(offsetdnext); + DEF_TYPE_DEC(int64_t, offsetdnext_offset); + + GE_SERIALIZABLE(offsetw, offsetw_offset, offsetd, offsetd_offset, scalereq, scaledreq_offset, offsetdnext, + offsetdnext_offset); +}; + +static inline bool QuantizeFactorHasData(const QuantizeCalcFactor &factor) { + return factor.offsetw.GetSize() > 0 || factor.offsetd.GetSize() > 0 || factor.scalereq.GetSize() > 0 || + factor.offsetdnext.GetSize() > 0; +} + +struct QuantizeFactorParams { + uint32_t quantize_algo{0}; + uint32_t scale_type{0}; + QuantizeFactor quantize_param; + QuantizeFactor dequantize_param; + QuantizeFactor requantize_param; + QuantizeCalcFactor quantizecalc_param; + DEF_TYPE_DEC(uint32_t, quantize_algo); + DEF_TYPE_DEC(uint32_t, scale_type); + DEF_TYPE_HAS_DEC(QuantizeFactor, quantize_param); + DEF_TYPE_HAS_DEC(QuantizeFactor, dequantize_param); + DEF_TYPE_HAS_DEC(QuantizeFactor, requantize_param); + DEF_TYPE_HAS_DEC(QuantizeCalcFactor, quantizecalc_param); + + GE_SERIALIZABLE(quantize_algo, scale_type, quantize_param, dequantize_param, requantize_param, quantizecalc_param, + has_mutable_quantize_param, has_mutable_dequantize_param, has_mutable_requantize_param, + has_mutable_quantizecalc_param); +}; + +#undef DEF_TYPE_DEC +} // namespace ge + +#endif // INC_GRAPH_DEF_TYPES_H_ diff --git a/inc/graph/detail/any_map.h b/inc/graph/detail/any_map.h new file mode 100644 index 00000000..c417c6a9 --- /dev/null +++ b/inc/graph/detail/any_map.h @@ -0,0 +1,118 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_DETAIL_ANY_MAP_H_ +#define INC_GRAPH_DETAIL_ANY_MAP_H_ + +#include +#include +#include +#include + +namespace ge { +using std::shared_ptr; +using std::string; + +class TypeID { + public: + template + static TypeID Of() { + return TypeID(__PRETTY_FUNCTION__); + } + + ~TypeID() = default; + + bool operator==(const TypeID &__arg) const { return type_ == __arg.type_; } + + private: + explicit TypeID(string type) : type_(std::move(type)) {} + + string type_; +}; + +class AnyMap { + public: + template + bool Set(const string &name, const DT &val); + + template + bool Get(const string &name, T &retValue) const; + + bool Has(const string &name) const { return anyValues_.find(name) != anyValues_.end(); } + + private: + class Placeholder { + public: + virtual ~Placeholder() = default; + + virtual const TypeID &GetTypeInfo() const = 0; + }; + + template + class Holder : public Placeholder { + public: + explicit Holder(const VT &value) : value_(value) {} + + ~Holder() override = default; + + const TypeID &GetTypeInfo() const override { + static const TypeID typeId = TypeID::Of(); + return typeId; + } + + const VT value_; + }; + + std::map> anyValues_; +}; + +template +bool AnyMap::Set(const string &name, const DT &val) { + auto it = anyValues_.find(name); + + std::shared_ptr> tmp; + try { + tmp = std::make_shared>(val); + } catch (std::bad_alloc &e) { + tmp = nullptr; + } catch (...) { + tmp = nullptr; + } + + if (it == anyValues_.end()) { + (void)anyValues_.emplace(name, tmp); + } else { + if (it->second && it->second->GetTypeInfo() == TypeID::Of
()) { + it->second = tmp; + } else { + return false; + } + } + return true; +} + +template +bool AnyMap::Get(const string &name, T &retValue) const { + auto it = anyValues_.find(name); + if (it != anyValues_.end() && it->second && it->second->GetTypeInfo() == TypeID::Of()) { + auto retPtr = std::static_pointer_cast>(it->second); + retValue = retPtr->value_; + return true; + } + return false; +} +} // namespace ge +#endif // INC_GRAPH_DETAIL_ANY_MAP_H_ diff --git a/inc/graph/detail/attributes_holder.h b/inc/graph/detail/attributes_holder.h new file mode 100644 index 00000000..77903b30 --- /dev/null +++ b/inc/graph/detail/attributes_holder.h @@ -0,0 +1,154 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_DETAIL_ATTRIBUTES_HOLDER_H_ +#define INC_GRAPH_DETAIL_ATTRIBUTES_HOLDER_H_ + +#include +#include +#include +#include +#include +#include + +#include "graph/detail/any_map.h" +#include "graph/ge_error_codes.h" +#include "graph/types.h" + +namespace google { +namespace protobuf { +class Message; +template +class Map; +} // namespace protobuf +} // namespace google + +namespace ge { +using std::string; +class GeAttrValue; + +namespace proto { +class AttrDef; +class TensorDef; +class TensorDescriptor; +class ShapeDef; +class NamedAttrs; +class ModelDef; +class OpDef; +class GraphDef; +} // namespace proto + +using ProtoAttrMap = ::google::protobuf::Map<::std::string, ::ge::proto::AttrDef>; +using ProtoMsgOwner = std::shared_ptr<::google::protobuf::Message>; + +template +class GeIrProtoHelper { + public: + GeIrProtoHelper(const ProtoMsgOwner &protoOwner, ProtoType *protoMsg) + : protoOwner_(protoOwner), protoMsg_(protoMsg) {} + + GeIrProtoHelper() { + protoOwner_ = std::shared_ptr<::google::protobuf::Message>(nullptr); + protoMsg_ = nullptr; + } + virtual ~GeIrProtoHelper() = default; + + template + GeIrProtoHelper(const GeIrProtoHelper &other) { + protoOwner_ = other.protoOwner_; + protoMsg_ = other.protoMsg_; + } + template + GeIrProtoHelper &operator=(const GeIrProtoHelper &other) { + protoOwner_ = other.protoOnwer_; + protoMsg_ = other.protoMsg_; + return *this; + } + void InitDefault(); + template + bool operator==(const GeIrProtoHelper &other) const { + return protoOwner_ == other.protoOwner_ && protoMsg_ == other.protoMsg_; + } + + inline const ProtoMsgOwner &GetProtoOwner() const { return protoOwner_; } + inline ProtoType *GetProtoMsg() const { return protoMsg_; } + void CopyValueFrom(const GeIrProtoHelper &other) { + if (other.protoMsg_ != nullptr && protoMsg_ != nullptr) { + *protoMsg_ = *other.protoMsg_; + } + } + void MoveValueFrom(GeIrProtoHelper &&other) { + if (other.protoMsg_ != nullptr && protoMsg_ != nullptr) { + *protoMsg_ = std::move(*other.protoMsg_); + } + } + + // protoMsg_ is part of protoOwner_ and they have the same runtime + ProtoMsgOwner protoOwner_ = nullptr; + ProtoType *protoMsg_ = nullptr; + friend class GeIrProtoHelper::value, typename std::remove_const::type, const ProtoType>::type>; +}; + +using ProtoAttrMapHelper = GeIrProtoHelper; +using ConstProtoAttrMapHelper = GeIrProtoHelper; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY AttrHolder { + public: + AttrHolder() = default; + virtual ~AttrHolder() = default; + + graphStatus SetAttr(const string &name, const GeAttrValue &value); + + graphStatus GetAttr(const string &name, GeAttrValue &value) const; + + bool HasAttr(const string &name) const; + + graphStatus DelAttr(const string &name); + + void CopyAttrsFrom(const AttrHolder &holder); + + template + bool SetExtAttr(const string &name, const T &value) { + return extAttrs_.Set(name, value); + } + template + T TryGetExtAttr(const string &name, T defaultValue) const { + T ret(defaultValue); + (void)extAttrs_.Get(name, ret); + return ret; + } + + protected: + graphStatus AddRequiredAttr(const std::string &name); + const std::unordered_set GetAllAttrNames() const; + const std::map GetAllAttrs() const; + + virtual ProtoAttrMapHelper MutableAttrMap() = 0; + virtual ConstProtoAttrMapHelper GetAttrMap() const = 0; + + friend class ModelSerializeImp; + friend class AttrUtils; + friend class AttrUtilsHelper; + + std::vector requiredAttrs_; + + private: + AnyMap extAttrs_; +}; +} // namespace ge + +#endif // INC_GRAPH_DETAIL_ATTRIBUTES_HOLDER_H_ diff --git a/inc/graph/detail/model_serialize_imp.h b/inc/graph/detail/model_serialize_imp.h new file mode 100644 index 00000000..c5ed49e4 --- /dev/null +++ b/inc/graph/detail/model_serialize_imp.h @@ -0,0 +1,90 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_DETAIL_MODEL_SERIALIZE_IMP_H_ +#define INC_GRAPH_DETAIL_MODEL_SERIALIZE_IMP_H_ + +#include +#include +#include +#include + +#include "graph/anchor.h" +#include "graph/model.h" +#include "detail/attributes_holder.h" +#include "graph/ge_tensor.h" +#include "graph/graph.h" +#include "graph/node.h" + +namespace ge { +using ComputeGraphPtr = std::shared_ptr; + +struct NodeNameGraphReq { + string node_name; + int32_t index; + ComputeGraphPtr graph; +}; + +struct NodeNameNodeReq { + string src_node_name; + int32_t src_out_index; + NodePtr dst_node; + int32_t dst_in_index; + string dst_node_name; +}; + +class ModelSerializeImp { + public: + bool SerializeModel(const Model &model, proto::ModelDef *modeProto); + + bool SerializeGraph(const ConstComputeGraphPtr &graph, proto::GraphDef *graphProto); + + bool SerializeEdge(const NodePtr &node, proto::OpDef *opDefProto); + + bool SerializeOpDesc(const ConstOpDescPtr &node, proto::OpDef *opDefProto); + + bool SerializeNode(const NodePtr &node, proto::OpDef *opDefProto); + + bool SerializeTensor(const ConstGeTensorPtr &tensor, proto::TensorDef *tensorProto); + + bool UnserializeModel(Model &model, proto::ModelDef &modeProto); + + bool UnserializeGraphWithoutEdge(ComputeGraphPtr &graph, proto::GraphDef &graphProto); + + bool UnserializeGraph(ComputeGraphPtr &graph, proto::GraphDef &graphProto); + + bool HandleNodeNameRef(); + + bool UnserializeOpDesc(OpDescPtr &opDesc, proto::OpDef &opDefProto); + + bool UnserializeNode(ComputeGraphPtr &graph, proto::OpDef &opDefProto); + + bool UnserializeTensor(GeTensorPtr &tensor, proto::TensorDef &tensorProto); + + bool ParseNodeIndex(const string &node_index, string &nodeName, int32_t &index); + + void SetProtobufOwner(const ProtoMsgOwner &bufferProtobufOnwer) { protobuf_owner_ = bufferProtobufOnwer; } + + private: + std::vector graph_input_node_names_; + std::vector graph_output_node_names_; + std::vector node_input_node_names_; + std::map node_map_; + ProtoMsgOwner protobuf_owner_; +}; +} // namespace ge + +#endif // INC_GRAPH_DETAIL_MODEL_SERIALIZE_IMP_H_ diff --git a/inc/graph/ge_attr_value.h b/inc/graph/ge_attr_value.h new file mode 100644 index 00000000..11da6fae --- /dev/null +++ b/inc/graph/ge_attr_value.h @@ -0,0 +1,342 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_GE_ATTR_VALUE_H_ +#define INC_GRAPH_GE_ATTR_VALUE_H_ + +#include +#include +#include +#include +#include +#include + +#include "graph/buffer.h" +#include "detail/attributes_holder.h" +#include "graph/ge_error_codes.h" +#include "graph/ge_tensor.h" + +using std::map; +using std::string; +using std::vector; + +namespace ge { +class GeTensor; + +using GeTensorPtr = std::shared_ptr; +using ConstGeTensorPtr = std::shared_ptr; + +class ComputeGraph; +using ComputeGraphPtr = std::shared_ptr; +using ConstComputeGraphPtr = std::shared_ptr; + +class GeTensorDesc; + +class GeAttrValueImp; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeAttrValue { + public: + class NamedAttrs : public AttrHolder { + public: + NamedAttrs(); + virtual ~NamedAttrs() = default; + void SetName(const std::string &name); + string GetName() const; + GeAttrValue GetItem(const string &key) const; + + protected: + ProtoAttrMapHelper MutableAttrMap() override; + ConstProtoAttrMapHelper GetAttrMap() const override; + + private: + // Create namedAttrs from protobuf obj + NamedAttrs(const ProtoMsgOwner &owner, proto::NamedAttrs *protoMsg); + GeIrProtoHelper named_attrs_; + friend class GeAttrValueImp; + }; + + using INT = int64_t; + using FLOAT = float; + using BOOL = bool; + using STR = std::string; + using TENSOR = GeTensorPtr; + using TENSOR_DESC = GeTensorDesc; + using GRAPH = ComputeGraphPtr; + using BYTES = Buffer; + using NAMED_ATTRS = NamedAttrs; + using DATA_TYPE = ge::DataType; + + using LIST_INT = vector; + using LIST_FLOAT = vector; + using LIST_BOOL = vector; + using LIST_STR = vector; + using LIST_TENSOR = vector; + using LIST_TENSOR_DESC = vector; + using LIST_GRAPH = vector; + using LIST_BYTES = vector; + using LIST_NAMED_ATTRS = vector; + using LIST_LIST_INT = vector>; + using LIST_DATA_TYPE = vector; + + enum ValueType { + VT_NONE = 0, + VT_STRING, + VT_FLOAT, + VT_BOOL, + VT_INT, + VT_TENSOR_DESC, + VT_TENSOR, + VT_BYTES, + VT_GRAPH, + VT_NAMED_ATTRS, + VT_LIST_LIST_INT, + VT_DATA_TYPE, + + VT_LIST_BASE = 1000, + VT_LIST_STRING = VT_LIST_BASE + VT_STRING, + VT_LIST_FLOAT = VT_LIST_BASE + VT_FLOAT, + VT_LIST_BOOL = VT_LIST_BASE + VT_BOOL, + VT_LIST_INT = VT_LIST_BASE + VT_INT, + VT_LIST_TENSOR_DESC = VT_LIST_BASE + VT_TENSOR_DESC, + VT_LIST_TENSOR = VT_LIST_BASE + VT_TENSOR, + VT_LIST_BYTES = VT_LIST_BASE + VT_BYTES, + VT_LIST_GRAPH = VT_LIST_BASE + VT_GRAPH, + VT_LIST_NAMED_ATTRS = VT_LIST_BASE + VT_NAMED_ATTRS, + VT_LIST_DATA_TYPE = VT_LIST_BASE + VT_DATA_TYPE, + }; + + template + struct IsAttrTypeEnable { + using DT = typename std::remove_cv::type; + + static bool const VALUE = std::is_same::value || std::is_same::value || + std::is_same::value || std::is_same::value || + std::is_same::value || std::is_same::value || + std::is_same::value || std::is_same::value || + std::is_same::value || std::is_same::value; + + // Not has list type of NamedAttrs + static bool const LIST_VALUE = std::is_same::value || std::is_same::value || + std::is_same::value || std::is_same::value || + std::is_same::value || std::is_same::value || + std::is_same::value || std::is_same::value || + std::is_same::value || + std::is_same::value || std::is_same::value; + }; + + template + // To cols + using enable_if_vector_type_valid_t = typename std::enable_if::LIST_VALUE, + int>::type; + + template + using enable_if_one_type_valid_t = typename std::enable_if::VALUE, int>::type; + + template + using enable_if_type_valid_t = + typename std::enable_if::VALUE || IsAttrTypeEnable::LIST_VALUE, int>::type; + + template + using enable_if_seriliable_type_valid_t = typename seriliable_type::__ge_serializable; + + GeAttrValue(); + ~GeAttrValue() = default; + // SetValue, Set initializer_list + template = 0> + graphStatus SetValue(std::initializer_list
&&val) { + T vectorVal; + for (auto &item : val) { + vectorVal.push_back(item); + } + return SetValue(vectorVal); + } + + // SetValue, Set vector + template = 0> + graphStatus SetValue(const std::vector
&val) { + T vectorVal; + for (auto item : val) { + vectorVal.push_back(item); + } + return SetValue(vectorVal); + } + + // SetValue, not list type + template = 0> + graphStatus SetValue(DT &&val) { + return SetValue(T(std::forward
(val))); + } + + // GE_SERIALIZABLE + template = 0> + graphStatus SetValue(const T &t) { + return t.Save(*this); + } + + template = 0> + graphStatus SetValue(const vector &t) { + vector attrs; + for (auto &item : t) { + GeAttrValue val; + item.Save(val); + NamedAttrs attrsItem; + (void)val.GetValue(attrsItem); + attrs.push_back(attrsItem); + } + return SetValue(attrs); + } + + // GetValue, list value + template = 0, + typename std::enable_if::value, int>::type = 0> + graphStatus GetValue(std::vector
&val) const { + T valGet; + val.clear(); + auto status = GetValue(valGet); + if (status != GRAPH_SUCCESS) { + return status; + } + for (auto item : valGet) { + val.push_back(item); + } + return GRAPH_SUCCESS; + } + + // GetValue, not list type + template = 0, + typename std::enable_if::value, int>::type = 0> + graphStatus GetValue(DT &val) const { + T valGet; + auto status = GetValue(valGet); + if (status != GRAPH_SUCCESS) { + return status; + } + val = DT(valGet); + return GRAPH_SUCCESS; + } + + // GE_SERIALIZABLE + template = 0> + graphStatus GetValue(T &t) { + return t.Load(*this); + } + + template = 0> + graphStatus GetValue(vector &t) { + graphStatus status; + t.clear(); + vector attrs; + status = this->GetValue(attrs); + if (status != GRAPH_SUCCESS) { + return status; + } + for (auto &attr : attrs) { + T item; + GeAttrValue val; + (void)val.SetValue(attr); + status = item.Load(val); + if (status != GRAPH_SUCCESS) { + return status; + } + t.push_back(item); + } + return GRAPH_SUCCESS; + } + + template = 0> + static GeAttrValue CreateFrom(DT &&val) { + GeAttrValue valRet; + (void)valRet.SetValue(std::forward
(val)); + return valRet; + } + + template = 0> + static GeAttrValue CreateFrom(std::initializer_list
&&val) { + GeAttrValue valRet; + (void)valRet.SetValue(std::move(val)); + return valRet; + } + + template = 0> + static GeAttrValue CreateFrom(const T &val) { + GeAttrValue valRet; + (void)valRet.SetValue(val); + return valRet; + } + + template = 0> + static GeAttrValue CreateFrom(const vector &val) { + GeAttrValue valRet; + (void)valRet.SetValue(val); + return valRet; + } + + ValueType GetValueType() const; + + bool IsEmpty() const; + + GeAttrValue Copy() const; + + // For map key + bool operator==(const GeAttrValue &other) const { return value_ == other.value_; } + + graphStatus MutableTensor(GeTensorPtr &tensor); + graphStatus MutableListTensor(vector &list_tensor); + + private: +#define VALUE_SET_GET_DEC(DT) \ + graphStatus SetValue(const DT &val); \ + graphStatus GetValue(DT &val) const; + VALUE_SET_GET_DEC(GeAttrValue::STR) + VALUE_SET_GET_DEC(GeAttrValue::INT) + VALUE_SET_GET_DEC(GeAttrValue::FLOAT) + VALUE_SET_GET_DEC(GeAttrValue::BOOL) + VALUE_SET_GET_DEC(GeTensorDesc) + VALUE_SET_GET_DEC(GeAttrValue::TENSOR) + VALUE_SET_GET_DEC(GeAttrValue::GRAPH) + VALUE_SET_GET_DEC(BYTES) + VALUE_SET_GET_DEC(NamedAttrs) + VALUE_SET_GET_DEC(ge::DataType) + VALUE_SET_GET_DEC(vector) + VALUE_SET_GET_DEC(vector) + VALUE_SET_GET_DEC(vector) + VALUE_SET_GET_DEC(vector) + VALUE_SET_GET_DEC(vector) + VALUE_SET_GET_DEC(vector) + VALUE_SET_GET_DEC(vector) + VALUE_SET_GET_DEC(vector) + VALUE_SET_GET_DEC(vector) + VALUE_SET_GET_DEC(vector>) + VALUE_SET_GET_DEC(vector) +#undef VALUE_SET_GET_DEC + + GeIrProtoHelper value_; + GeAttrValue(const ProtoMsgOwner &proto_owner, ge::proto::AttrDef *val); + + friend class AttrHolder; + friend class ModelSerializeImp; + friend class OnnxUtils; +}; + +class AttrValueImpl { + public: + AttrValueImpl() = default; + ~AttrValueImpl() = default; + + GeAttrValue geAttrValue_; +}; +} // namespace ge +#endif // INC_GRAPH_GE_ATTR_VALUE_H_ diff --git a/inc/graph/ge_context.h b/inc/graph/ge_context.h new file mode 100644 index 00000000..9948705b --- /dev/null +++ b/inc/graph/ge_context.h @@ -0,0 +1,45 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_GE_CONTEXT_H_ +#define INC_GRAPH_GE_CONTEXT_H_ + +#include + +#include "graph/ge_error_codes.h" + +namespace ge { +class GEContext { + public: + graphStatus GetOption(const std::string &key, std::string &option); + uint64_t SessionId(); + uint32_t DeviceId(); + uint64_t JobId(); + void Init(); + void SetCtxDeviceId(uint32_t device_id); + + private: + uint64_t session_id_ = 0; + uint32_t device_id_ = 0; + uint64_t job_id_ = 0; +}; + +/// Get context +/// @return +GEContext &GetContext(); +} // namespace ge + +#endif // INC_GRAPH_GE_CONTEXT_H_ diff --git a/inc/graph/ge_global_options.h b/inc/graph/ge_global_options.h new file mode 100755 index 00000000..b55192e2 --- /dev/null +++ b/inc/graph/ge_global_options.h @@ -0,0 +1,26 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_GE_GLOBAL_OPTIONS_H_ +#define INC_GRAPH_GE_GLOBAL_OPTIONS_H_ + +#include +#include + +namespace ge { +std::map &GetMutableGlobalOptions(); +} +#endif // INC_GRAPH_GE_GLOBAL_OPTIONS_H_ diff --git a/inc/graph/ge_local_context.h b/inc/graph/ge_local_context.h new file mode 100644 index 00000000..692c96e7 --- /dev/null +++ b/inc/graph/ge_local_context.h @@ -0,0 +1,44 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_GE_LOCAL_CONTEXT_H_ +#define INC_GRAPH_GE_LOCAL_CONTEXT_H_ + +#include +#include +#include + +#include "graph/ge_error_codes.h" + +using std::string; +using std::map; + +namespace ge { +class GEThreadLocalContext { + public: + graphStatus GetOption(const string &key, string &option); + void SetGlobalOption(map options_map); + void SetSessionOption(map options_map); + + private: + map session_options_; + map global_options_; +}; + +GEThreadLocalContext &GetThreadLocalContext(); +} // namespace ge + +#endif // INC_GRAPH_GE_LOCAL_CONTEXT_H_ diff --git a/inc/graph/ge_tensor.h b/inc/graph/ge_tensor.h new file mode 100644 index 00000000..78534438 --- /dev/null +++ b/inc/graph/ge_tensor.h @@ -0,0 +1,171 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_GE_TENSOR_H_ +#define INC_GRAPH_GE_TENSOR_H_ + +#include +#include +#include +#include + +#include "detail/attributes_holder.h" +#include "graph/buffer.h" +#include "graph/ge_error_codes.h" +#include "graph/types.h" + +namespace ge { +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeShape { + public: + GeShape(); + ~GeShape() = default; + explicit GeShape(std::vector s); + + size_t GetDimNum() const; + // If the idx is invalid, return 0 + int64_t GetDim(size_t idx) const; + graphStatus SetDim(size_t idx, int64_t value); + std::vector GetDims() const; + + int64_t GetShapeSize() const; + std::string ToString() const; + + GeShape(const GeShape &other); + GeShape(GeShape &&other); + GeShape &operator=(const GeShape &other); + GeShape &operator=(GeShape &&other); + + private: + GeIrProtoHelper shape_def_; + friend class GeTensorDesc; + // Create geshape from proto obj + GeShape(const ProtoMsgOwner &protoOnwer, proto::ShapeDef *protoMsg); + + void RefTo(const GeShape &shape) { shape_def_ = shape.shape_def_; } +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeTensorDesc : public AttrHolder { + friend class TensorUtils; + friend class GeAttrValue; + friend class ModelSerialize; + + public: + GeTensorDesc(); + explicit GeTensorDesc(GeShape shape, Format format = FORMAT_ND, DataType dt = DT_FLOAT); + GeTensorDesc(const GeTensorDesc &desc); + GeTensorDesc(GeTensorDesc &&desc); + + ~GeTensorDesc() = default; + bool operator==(const GeTensorDesc &r_ge_tensor_desc) const; + + void Update(GeShape shape, Format format = FORMAT_ND, DataType dt = DT_FLOAT); + + GeShape GetShape() const; + GeShape &MutableShape(); + void SetShape(GeShape shape); + + GeShape GetOriginShape() const; + void SetOriginShape(const GeShape &originShape); + + Format GetFormat() const; + void SetFormat(Format format); + + Format GetOriginFormat() const; + void SetOriginFormat(Format originFormat); + + DataType GetDataType() const; + void SetDataType(DataType dt); + + void SetOriginDataType(DataType originDataType); + DataType GetOriginDataType() const; + + GeTensorDesc Clone() const; + GeTensorDesc &operator=(const GeTensorDesc &desc); + GeTensorDesc &operator=(GeTensorDesc &&desc); + + graphStatus IsValid() const; + + protected: + ProtoAttrMapHelper MutableAttrMap() override; + ConstProtoAttrMapHelper GetAttrMap() const override; + + private: + bool GeTensorDescAttrsAreEqual(const GeTensorDesc &r_ge_tensor_desc) const; + using AttrHolder::DelAttr; + using AttrHolder::GetAllAttrs; + using AttrHolder::GetAttr; + using AttrHolder::HasAttr; + using AttrHolder::SetAttr; + + void Init(); + + // Create getensordesc from proto obj + GeTensorDesc(const ProtoMsgOwner &protoOnwer, proto::TensorDescriptor *protoMsg); + friend class GeTensor; + friend class GeAttrValueImp; + friend class ModelSerializeImp; + friend class OnnxUtils; + + GeIrProtoHelper tensor_descriptor_; + // Reference from tensorDescriptor_, do not direct use + mutable GeShape __shape_; + + void RefTo(const GeTensorDesc &tensorDesc) { tensor_descriptor_ = tensorDesc.tensor_descriptor_; } + GeShape &ShapeReference() const; +}; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeTensor { + public: + GeTensor(); + explicit GeTensor(const GeTensorDesc &tensorDesc); + explicit GeTensor(const GeTensorDesc &tensorDesc, const std::vector &data); + explicit GeTensor(const GeTensorDesc &tensorDesc, const Buffer &data); + explicit GeTensor(const GeTensorDesc &tensorDesc, const uint8_t *data, size_t size); + explicit GeTensor(GeTensorDesc &&tensorDesc, std::vector &&data); + ~GeTensor() = default; + + GeTensorDesc GetTensorDesc() const; + GeTensorDesc &MutableTensorDesc(); + void SetTensorDesc(const GeTensorDesc &tensorDesc); + + const Buffer GetData() const; + Buffer MutableData(); + graphStatus SetData(std::vector &&data); + graphStatus SetData(const std::vector &data); + graphStatus SetData(const Buffer &data); + graphStatus SetData(const uint8_t *data, size_t size); + + GeTensor Clone() const; + + // Share value + GeTensor(const GeTensor &other); + // Share value + GeTensor &operator=(const GeTensor &other); + + private: + friend class GeAttrValueImp; + friend class ModelSerializeImp; + friend class OnnxUtils; + // Create getensor from proto obj + GeTensor(const ProtoMsgOwner &protoOnwer, proto::TensorDef *protoMsg); + GeIrProtoHelper tensor_def_; + // Reference from tensorDef_, cab not use it directly + mutable GeTensorDesc __desc_; + GeTensorDesc &DescReference() const; +}; +} // namespace ge + +#endif // INC_GRAPH_GE_TENSOR_H_ diff --git a/inc/graph/graph_util.h b/inc/graph/graph_util.h new file mode 100644 index 00000000..c39ecbc1 --- /dev/null +++ b/inc/graph/graph_util.h @@ -0,0 +1,134 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_GRAPH_UTIL_H_ +#define INC_GRAPH_GRAPH_UTIL_H_ + +#include + +#include "proto/om.pb.h" + +namespace ge { +using AttrDefMap = ::google::protobuf::Map<::std::string, ::domi::AttrDef>; +bool HasOpAttr(const OpDef *opdef, std::string attr_name); +bool GetOpAttr(const std::string &key, int32_t *value, const OpDef *opdef); + +static const char OP_TYPE_DATA[] = "Data"; +static const char OP_TYPE_INPUT[] = "Input"; +static const char ATTR_KEY_INPUT_FORMAT[] = "input_format"; +static const char ATTR_KEY_OUTPUT_FORMAT[] = "output_format"; +static const char OP_TYPE_ANN_DATA[] = "AnnData"; +} // namespace ge + +#if !defined(__ANDROID__) && !defined(ANDROID) +#include "toolchain/slog.h" +const char levelStr[4][8] = {"ERROR", "WARN", "INFO", "DEBUG"}; +#else +#include +#include +const char levelStr[8][8] = {"EMERG", "ALERT", "CRIT", "ERROR", "WARNING", "NOTICE", "INFO", "DEBUG"}; +#endif + +#ifdef _MSC_VER +#define FUNC_NAME __FUNCTION__ +#else +#define FUNC_NAME __PRETTY_FUNCTION__ +#endif + +#if !defined(__ANDROID__) && !defined(ANDROID) +#define D_GRAPH_LOGI(MOD_NAME, fmt, ...) \ + dlog_info(FMK, "%s:%s:%d:" #fmt, __FUNCTION__, __FILE__, __LINE__, ##__VA_ARGS__) +#define D_GRAPH_LOGW(MOD_NAME, fmt, ...) \ + dlog_warn(FMK, "%s:%s:%d:" #fmt, __FUNCTION__, __FILE__, __LINE__, ##__VA_ARGS__) +#define D_GRAPH_LOGE(MOD_NAME, fmt, ...) \ + dlog_error(FMK, "%s:%s:%d:" #fmt, __FUNCTION__, __FILE__, __LINE__, ##__VA_ARGS__) +#else +#define D_GRAPH_LOG(level, format, ...) \ + do { \ + { \ + fprintf(stdout, "[%s] [%s] [%s] [%s] [%s:%d] " format "\n", "", "GRAPH", levelStr[level], __FUNCTION__, \ + __FILE__, __LINE__, ##__VA_ARGS__); \ + syslog(level, "%s %s:%d] [%s] %s " format "\n", "", __FILE__, __LINE__, "OPTIMIZER", __FUNCTION__, \ + ##__VA_ARGS__); \ + } \ + } while (0) +#define D_GRAPH_LOGI(MOD_NAME, fmt, ...) D_GRAPH_LOG(ANDROID_LOG_INFO, #fmt, ##__VA_ARGS__) +#define D_GRAPH_LOGW(MOD_NAME, fmt, ...) D_GRAPH_LOG(ANDROID_LOG_INFO, #fmt, ##__VA_ARGS__) +#define D_GRAPH_LOGE(MOD_NAME, fmt, ...) D_GRAPH_LOG(ANDROID_LOG_INFO, #fmt, ##__VA_ARGS__) +#endif + +#if !defined(__ANDROID__) && !defined(ANDROID) +#define GRAPH_LOGI(...) D_GRAPH_LOGI(GRAPH_MOD_NAME, __VA_ARGS__) +#define GRAPH_LOGW(...) D_GRAPH_LOGW(GRAPH_MOD_NAME, __VA_ARGS__) +#define GRAPH_LOGE(...) D_GRAPH_LOGE(GRAPH_MOD_NAME, __VA_ARGS__) +#else + +#define GRAPH_LOG(level, format, ...) \ + do { \ + { \ + fprintf(stdout, "[%s] [%s] [%s] [%s] [%s:%d] " format "\n", "", "GRAPH", levelStr[level], __FUNCTION__, \ + __FILE__, __LINE__, ##__VA_ARGS__); \ + syslog(level, "%s %s:%d] [%s] %s " format "\n", "", __FILE__, __LINE__, "OPTIMIZER", __FUNCTION__, \ + ##__VA_ARGS__); \ + } \ + } while (0) +#define GRAPH_LOGI(fmt, ...) GRAPH_LOG(ANDROID_LOG_INFO, #fmt, ##__VA_ARGS__) +#define GRAPH_LOGW(fmt, ...) GRAPH_LOG(ANDROID_LOG_INFO, #fmt, ##__VA_ARGS__) +#define GRAPH_LOGE(fmt, ...) GRAPH_LOG(ANDROID_LOG_INFO, #fmt, ##__VA_ARGS__) +#endif + +#define GRAPH_CHK_STATUS_RET_NOLOG(expr) \ + do { \ + const domi::graphStatus _status = (expr); \ + if (_status != domi::GRAPH_SUCCESS) { \ + return _status; \ + } \ + } while (0) + +#define GRAPH_CHK_BOOL_RET_STATUS(expr, _status, ...) \ + do { \ + bool b = (expr); \ + if (!b) { \ + GRAPH_LOGE(__VA_ARGS__); \ + return _status; \ + } \ + } while (0) + +#define GRAPH_CHK_BOOL_EXEC_NOLOG(expr, exec_expr) \ + { \ + bool b = (expr); \ + if (!b) { \ + exec_expr; \ + } \ + }; + +#define GRAPH_IF_BOOL_EXEC(expr, exec_expr) \ + { \ + if (expr) { \ + exec_expr; \ + } \ + } + +#define GRAPH_RETURN_WITH_LOG_IF_ERROR(expr, ...) \ + do { \ + const ::domi::graphStatus _status = (expr); \ + if (_status) { \ + GRAPH_LOGE(__VA_ARGS__); \ + return _status; \ + } \ + } while (0) + +#endif // INC_GRAPH_GRAPH_UTIL_H_ diff --git a/inc/graph/model.h b/inc/graph/model.h new file mode 100755 index 00000000..f29410ea --- /dev/null +++ b/inc/graph/model.h @@ -0,0 +1,95 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_MODEL_H_ +#define INC_GRAPH_MODEL_H_ + +#include +#include +#include +#include + +#include "detail/attributes_holder.h" +#include "graph/ge_attr_value.h" +#include "graph/graph.h" + +namespace ge { +using std::map; +using std::string; +using std::vector; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Model : public AttrHolder { + public: + Model(); + + ~Model() = default; + + Model(const string &name, const string &custom_version); + + string GetName() const; + void SetName(const string &name); + + uint32_t GetVersion() const; + + void SetVersion(uint32_t version) { version_ = version; } + + std::string GetPlatformVersion() const; + + void SetPlatformVersion(string version) { platform_version_ = version; } + + Graph GetGraph() const; + + void SetGraph(const Graph &graph); + + void SetAttr(const ProtoAttrMapHelper &attrs); + + using AttrHolder::GetAllAttrNames; + using AttrHolder::GetAllAttrs; + using AttrHolder::GetAttr; + using AttrHolder::HasAttr; + using AttrHolder::SetAttr; + + graphStatus Save(Buffer &buffer) const; + + graphStatus SaveToFile(const string &file_name) const; + // Model will be rewritten + static graphStatus Load(const uint8_t *data, size_t len, Model &model); + graphStatus Load(ge::proto::ModelDef &model_def); + graphStatus LoadFromFile(const string &file_name); + + bool IsValid() const; + + protected: + ConstProtoAttrMapHelper GetAttrMap() const override; + ProtoAttrMapHelper MutableAttrMap() override; + + private: + void Init(); + ProtoAttrMapHelper attrs_; + friend class ModelSerializeImp; + friend class GraphDebugImp; + friend class OnnxUtils; + friend class ModelHelper; + friend class ModelBuilder; + string name_; + uint32_t version_; + std::string platform_version_{""}; + Graph graph_; +}; +} // namespace ge +using ModelPtr = std::shared_ptr; + +#endif // INC_GRAPH_MODEL_H_ diff --git a/inc/graph/model_serialize.h b/inc/graph/model_serialize.h new file mode 100644 index 00000000..7f354388 --- /dev/null +++ b/inc/graph/model_serialize.h @@ -0,0 +1,54 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_MODEL_SERIALIZE_H_ +#define INC_GRAPH_MODEL_SERIALIZE_H_ + +#include +#include + +#include "graph/buffer.h" +#include "graph/compute_graph.h" +#include "graph/model.h" + +namespace ge { +class ModelSerialize { + public: + Buffer SerializeModel(const Model &model); + + Model UnserializeModel(const uint8_t *data, size_t len); + Model UnserializeModel(ge::proto::ModelDef &model_def); + + Buffer SerializeGraph(const ComputeGraphPtr &graph); + + ComputeGraphPtr UnserializeGraph(const uint8_t *data, size_t len); + + Buffer SerializeOpDesc(const ConstOpDescPtr &opDesc); + OpDescPtr UnserializeOpDesc(const uint8_t *data, size_t len); + + size_t GetSerializeModelSize(const Model &model); + + private: + static std::map &MutableTensorDescAttrMap(GeTensorDesc &tensorDesc); + + static const std::map &GetTensorDescAttrMap(const GeTensorDesc &tensorDesc); + + friend class ModelSerializeImp; + friend class GraphDebugImp; +}; +} // namespace ge + +#endif // INC_GRAPH_MODEL_SERIALIZE_H_ diff --git a/inc/graph/node.h b/inc/graph/node.h new file mode 100644 index 00000000..66e38a43 --- /dev/null +++ b/inc/graph/node.h @@ -0,0 +1,208 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_NODE_H_ +#define INC_GRAPH_NODE_H_ + +#include +#include +#include +#include +#include +#include + +#include "graph/ge_attr_value.h" +#include "graph/op_desc.h" +#include "graph/range_vistor.h" +#include "utils/attr_utils.h" + +namespace ge { +class ComputeGraph; + +using ComputeGraphPtr = std::shared_ptr; + +class Node; + +using NodePtr = std::shared_ptr; +using ConstNodePtr = std::shared_ptr; +using NodeRef = std::weak_ptr; + +class Anchor; + +using AnchorPtr = std::shared_ptr; + +class InDataAnchor; + +using InDataAnchorPtr = std::shared_ptr; + +class OutDataAnchor; + +using OutDataAnchorPtr = std::shared_ptr; + +class ControlAnchor; + +using ControlAnchorPtr = std::shared_ptr; + +class InControlAnchor; + +using InControlAnchorPtr = std::shared_ptr; + +class OutControlAnchor; + +using OutControlAnchorPtr = std::shared_ptr; + +using OpDescPtr = std::shared_ptr; + +using ConstNode = const Node; + +typedef std::vector> kFusionDataFlowVec_t; + +// Node is a component of ComputeGraph +class Node : public std::enable_shared_from_this { + friend class ComputeGraph; + friend class ModelSerializeImp; + + public: + template + using Vistor = RangeVistor>; + ~Node(); + Node(const Node &) = delete; + Node &operator=(const Node &) = delete; + bool operator==(const Node &r_node) const; + + protected: + Node() = default; + Node(const OpDescPtr &op, const ComputeGraphPtr &ownerGraph); + + public: + graphStatus Init(); + + std::string GetName() const; + std::string GetType() const; + + ComputeGraphPtr GetOwnerComputeGraph() const; + graphStatus SetOwnerComputeGraph(const ComputeGraphPtr &graph); + + Vistor GetAllInDataAnchors() const; + Vistor GetAllOutDataAnchors() const; + uint32_t GetAllInDataAnchorsSize() const; + uint32_t GetAllOutDataAnchorsSize() const; + Vistor GetAllOutAnchors() const; + Vistor GetAllInAnchors() const; + InDataAnchorPtr GetInDataAnchor(int idx) const; + OutDataAnchorPtr GetOutDataAnchor(int idx) const; + InControlAnchorPtr GetInControlAnchor() const; + OutControlAnchorPtr GetOutControlAnchor() const; + Vistor GetInNodes() const; + Vistor GetOutNodes() const; + AnchorPtr GetInAnchor(int idx) const; + AnchorPtr GetOutAnchor(int idx) const; + + bool IsAllInNodesSeen(std::unordered_set &nodes_seen) const; + + // All inData nodes + Vistor GetInDataNodes() const; + // All inControl nodes + Vistor GetInControlNodes() const; + // GetInAllNodes = InDataNodes + InControlNodes + Vistor GetInAllNodes() const; + + // All outData nodes + Vistor GetOutDataNodes() const; + uint32_t GetOutDataNodesSize() const; + // All outControl nodes + Vistor GetOutControlNodes() const; + // GetOutAllNodes = OutDataNodes + InControlNodes + Vistor GetOutAllNodes() const; + + // Get all indata nodes and its outanchor + Vistor> GetInDataNodesAndAnchors() const; + + // Get all outdata nodes and its inanchor + Vistor> GetOutDataNodesAndAnchors() const; + + graphStatus InferShapeAndType() const; + graphStatus Verify() const; + + graphStatus InferOriginFormat() const; + + OpDescPtr GetOpDesc() const; + + graphStatus UpdateOpDesc(const OpDescPtr &op); + + graphStatus AddLinkFrom(const NodePtr &input_node); + + graphStatus AddLinkFrom(const uint32_t &index, NodePtr input_node); + + graphStatus AddLinkFrom(const string &name, NodePtr input_node); + + graphStatus AddLinkFromForParse(const NodePtr &input_node); + + void AddSendEventId(uint32_t event_id) { send_event_id_list_.push_back(event_id); } + + void AddRecvEventId(uint32_t event_id) { recv_event_id_list_.push_back(event_id); } + + const std::vector &GetSendEventIdList() const { return send_event_id_list_; } + + const std::vector &GetRecvEventIdList() const { return recv_event_id_list_; } + void GetFusionInputFlowList(kFusionDataFlowVec_t &fusion_input_list) { + fusion_input_list = fusion_input_dataflow_list_; + } + + void GetFusionOutputFlowList(kFusionDataFlowVec_t &fusion_output_list) { + fusion_output_list = fusion_output_dataflow_list_; + } + + void SetFusionInputFlowList(kFusionDataFlowVec_t &fusion_input_list) { + fusion_input_dataflow_list_ = fusion_input_list; + } + + void SetFusionOutputFlowList(kFusionDataFlowVec_t &fusion_output_list) { + fusion_output_dataflow_list_ = fusion_output_list; + } + + void SetOrigNode(const NodePtr &orignode) { orig_node_ = orignode; } + + NodePtr GetOrigNode(void) { return orig_node_; } + + private: + bool NodeMembersAreEqual(const Node &r_node) const; + bool NodeAttrsAreEqual(const Node &r_node) const; + bool NodeInConnectsAreEqual(const Node &r_node) const; + bool NodeOutConnectsAreEqual(const Node &r_node) const; + bool NodeAnchorIsEqual(const AnchorPtr &l_anchor, const AnchorPtr &r_anchor, size_t i) const; + OpDescPtr op_; + std::weak_ptr owner_graph_; + vector in_data_anchors_; + vector out_data_anchors_; + InControlAnchorPtr in_control_anchor_; + OutControlAnchorPtr out_control_anchor_; + map attrs_; + bool has_init_{false}; + bool anchor_status_updated_{false}; + std::vector send_event_id_list_; + std::vector recv_event_id_list_; + + kFusionDataFlowVec_t fusion_input_dataflow_list_; + kFusionDataFlowVec_t fusion_output_dataflow_list_; + + NodePtr orig_node_; + friend class NodeUtils; + friend class OnnxUtils; +}; +} // namespace ge + +#endif // INC_GRAPH_NODE_H_ diff --git a/inc/graph/op_desc.h b/inc/graph/op_desc.h new file mode 100644 index 00000000..9a07641b --- /dev/null +++ b/inc/graph/op_desc.h @@ -0,0 +1,256 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_OP_DESC_H_ +#define INC_GRAPH_OP_DESC_H_ + +#include +#include +#include +#include +#include +#include + +#include "detail/attributes_holder.h" +#include "graph/range_vistor.h" + +#define DYNAMIN_INPUT_NAME(name, index) (((name)) + std::to_string((index))) +#define DYNAMIN_OUTPUT_NAME(name, index) (((name)) + std::to_string((index))) +namespace ge { +using std::map; +using std::pair; +using std::shared_ptr; +using std::string; +using std::vector; + +class Operator; +class GeTensorDesc; + +using GeTensorDescPtr = shared_ptr; +using ConstGeTensorDescPtr = shared_ptr; + +class OpDesc; + +using OpDescPtr = shared_ptr; +using ConstOpDescPtr = shared_ptr; + +class GeAttrValue; + +using ConstOpDesc = const OpDesc; + +class OpDesc : public std::enable_shared_from_this, public AttrHolder { + public: + template + using Vistor = RangeVistor>; + + friend class GraphBuilderImpl; + + friend class OperatorImpl; + + OpDesc(const string &name, const string &type); + + OpDesc(); + + ~OpDesc(); + + bool operator==(const OpDesc &r_op_desc) const; + + string GetName() const; + + void SetName(const string &name); + + string GetType() const; + + void SetType(const string &type); + + graphStatus AddInputDesc(const GeTensorDesc &input_desc); + + graphStatus AddInputDesc(const string &name, const GeTensorDesc &input_desc); + + graphStatus AddInputDesc(uint32_t index, const ge::GeTensorDesc &input_desc); + + graphStatus AddInputDescForward(const string &name, const unsigned int num); + + graphStatus AddOutputDescForward(const string &name, const unsigned int num); + + graphStatus AddOptionalInputDesc(const string &name, const GeTensorDesc &input_desc); + + graphStatus UpdateInputDesc(uint32_t index, const GeTensorDesc &tensor_desc); + + graphStatus UpdateInputDesc(const string &name, const GeTensorDesc &tensor_desc); + + bool InputIsSet(const string &name) const; + + GeTensorDesc GetInputDesc(uint32_t index) const; + + GeTensorDesc GetInputDesc(const string &name) const; + + Vistor GetAllInputNames() const; + + GeTensorDescPtr MutableInputDesc(uint32_t index) const; + + Vistor GetAllInputsDesc() const; + + Vistor GetAllInputsDescPtr() const; + + size_t GetInputsSize() const; + + graphStatus AddOutputDesc(const GeTensorDesc &output_desc); + + graphStatus AddOutputDesc(const string &name, const GeTensorDesc &output_desc); + + graphStatus UpdateOutputDesc(uint32_t index, const GeTensorDesc &tensor_desc); + + graphStatus UpdateOutputDesc(const string &name, const GeTensorDesc &tensor_desc); + + GeTensorDesc GetOutputDesc(uint32_t index) const; + + GeTensorDesc GetOutputDesc(const string &name) const; + + GeTensorDescPtr MutableOutputDesc(uint32_t index) const; + + Vistor GetAllOutputsDesc() const; + + Vistor GetAllOutputsDescPtr() const; + + size_t GetOutputsSize() const; + + ConstGeTensorDescPtr GetOutputDescPtr(uint32_t index) const; + + ConstGeTensorDescPtr GetInputDescPtr(uint32_t index) const; + + graphStatus AddDynamicInputDesc(const string &name, const unsigned int num, bool isPushBack = true); + + graphStatus AddDynamicOutputDesc(const string &name, const unsigned int num, bool isPushBack = true); + + bool IsOptionalInput(const string &name) const; + + bool IsOptionalInput(uint32_t index) const; + + std::map GetAllInputName(); + + std::map GetAllOutputName(); + + bool UpdateInputName(std::map inputNameIdx); + + bool UpdateOutputName(std::map outputNameIdx); + + void AddInferFunc(const std::function &func); + + std::function GetInferFunc() const; + + graphStatus InferShapeAndType(); + + void AddInferFormatFunc(const std::function &func); + + std::function GetInferFormatFunc() const; + + graphStatus DefaultInferFormat(); + + std::function GetVerifyFunc() const; + + void AddVerifierFunc(const std::function &func); + + graphStatus CallInferFormatFunc(Operator &op); + + graphStatus OpVerify(); + + graphStatus CommonVerify() const; + + using AttrHolder::AddRequiredAttr; + using AttrHolder::DelAttr; + using AttrHolder::GetAllAttrNames; + using AttrHolder::GetAllAttrs; + using AttrHolder::GetAttr; + using AttrHolder::HasAttr; + using AttrHolder::SetAttr; + + void SetId(int64_t id); + int64_t GetId() const; + void SetStreamId(int64_t stream_id); + int64_t GetStreamId() const; + void SetInputName(const vector &input_name); + vector GetInputName() const; + void SetSrcName(const vector &src_name); + vector GetSrcName() const; + void SetSrcIndex(const vector &src_index); + vector GetSrcIndex() const; + void SetInputOffset(const vector &input); + vector GetInputOffset() const; + void SetOutputOffset(const vector &input); + vector GetOutputOffset() const; + void SetDstName(const vector &dst_name); + vector GetDstName() const; + void SetDstIndex(const vector &dst_index); + vector GetDstIndex() const; + void SetWorkspace(const vector &workspace); + vector GetWorkspace() const; + void SetWorkspaceBytes(const vector &workspace_bytes); + vector GetWorkspaceBytes() const; + void SetIsInputConst(const vector &is_input_const); + vector GetIsInputConst() const; + + string GetInputNameByIndex(uint32_t index) const; + + int GetInputIndexByName(const string &name) const; + + string GetOutputNameByIndex(uint32_t index) const; + + int GetOutputIndexByName(const string &name) const; + + graphStatus RestoreInputNameIdx(const string &name, const int &index); + + graphStatus RestoreOutputNameIdx(const string &name, const int &index); + + graphStatus CallInferFunc(Operator &op); + + void SetOpKernelLibName(const std::string &name); + + std::string GetOpKernelLibName() const; + + void SetOpEngineName(const std::string &name); + + std::string GetOpEngineName() const; + + protected: + ProtoAttrMapHelper MutableAttrMap() override; + ConstProtoAttrMapHelper GetAttrMap() const override; + + private: + OpDesc(const ProtoMsgOwner &proto_msg_owner, ge::proto::OpDef *op_def); + bool OpDescMembersAreEqual(const OpDesc &r_op_desc) const; + bool OpDescAttrsAreEqual(const OpDesc &r_op_desc) const; + bool OpDescGenTensorDescsAreEqual(const OpDesc &r_op_desc) const; + + GeIrProtoHelper op_def_; + vector inputs_desc_{}; + map input_name_idx_{}; + std::unordered_set optional_input_names_{}; + vector outputs_desc_{}; + map output_name_idx_{}; + std::function infer_func_ = nullptr; + std::function infer_format_func_ = nullptr; + std::function verifier_func_ = nullptr; + string op_kernel_lib_name_; + string engine_name_; + friend class OpDescUtils; + friend class ModelSerializeImp; + friend class AttrUtils; + friend class GeAttrValueImp; + friend class OnnxUtils; +}; +} // namespace ge +#endif // INC_GRAPH_OP_DESC_H_ diff --git a/inc/graph/op_kernel_bin.h b/inc/graph/op_kernel_bin.h new file mode 100644 index 00000000..e81d79d0 --- /dev/null +++ b/inc/graph/op_kernel_bin.h @@ -0,0 +1,47 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_OP_KERNEL_BIN_H_ +#define INC_GRAPH_OP_KERNEL_BIN_H_ + +#include +#include +#include +#include + +namespace ge { +class OpKernelBin { + public: + OpKernelBin(std::string name, std::vector &&data) : name_(std::move(name)), data_(std::move(data)) {} + + ~OpKernelBin() = default; + + const std::string &GetName() const { return name_; } + const uint8_t *GetBinData() const { return (const uint8_t *)data_.data(); } + size_t GetBinDataSize() const { return data_.size(); } + OpKernelBin(const OpKernelBin &) = delete; + const OpKernelBin &operator=(const OpKernelBin &) = delete; + + private: + std::string name_; + std::vector data_; +}; + +using OpKernelBinPtr = std::shared_ptr; +const char *const OP_EXTATTR_NAME_TBE_KERNEL = "tbeKernel"; +} // namespace ge + +#endif // INC_GRAPH_OP_KERNEL_BIN_H_ diff --git a/inc/graph/operator_factory_impl.h b/inc/graph/operator_factory_impl.h new file mode 100644 index 00000000..92d38583 --- /dev/null +++ b/inc/graph/operator_factory_impl.h @@ -0,0 +1,58 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_OPERATOR_FACTORY_IMPL_H_ +#define INC_GRAPH_OPERATOR_FACTORY_IMPL_H_ + +#include +#include +#include +#include + +#include "graph/operator_factory.h" + +namespace ge { +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OperatorFactoryImpl { + public: + static Operator CreateOperator(const std::string &operator_name, const std::string &operator_type); + + static graphStatus GetOpsTypeList(std::vector &all_ops); + + static bool IsExistOp(const string &operator_type); + + static InferShapeFunc GetInferShapeFunc(const std::string &operator_type); + + static InferFormatFunc GetInferFormatFunc(const std::string &operator_type); + + static VerifyFunc GetVerifyFunc(const std::string &operator_type); + + static graphStatus RegisterOperatorCreator(const std::string &operator_type, OpCreator const &op_creator); + + static graphStatus RegisterInferShapeFunc(const std::string &operator_type, InferShapeFunc const infer_shape_func); + + static graphStatus RegisterInferFormatFunc(const std::string &operator_type, InferFormatFunc const infer_format_func); + + static graphStatus RegisterVerifyFunc(const std::string &operator_type, VerifyFunc const verify_func); + + private: + static shared_ptr> operator_creators_; + static shared_ptr> operator_infershape_funcs_; + static shared_ptr> operator_inferformat_funcs_; + static shared_ptr> operator_verify_funcs_; +}; +} // namespace ge + +#endif // INC_GRAPH_OPERATOR_FACTORY_IMPL_H_ diff --git a/inc/graph/opsproto_manager.h b/inc/graph/opsproto_manager.h new file mode 100644 index 00000000..46b722ec --- /dev/null +++ b/inc/graph/opsproto_manager.h @@ -0,0 +1,44 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_OPSPROTO_MANAGER_H_ +#define INC_GRAPH_OPSPROTO_MANAGER_H_ + +#include +#include +#include +#include +#include +#include + +namespace ge { +class OpsProtoManager { + public: + static OpsProtoManager *Instance(); + + bool Initialize(const std::map &options); + + void Finalize(); + + void LoadOpsProtoPluginSo(std::string &path); + + private: + std::string pluginPath_; + std::vector handles_; +}; +} // namespace ge + +#endif // INC_GRAPH_OPSPROTO_MANAGER_H_ diff --git a/inc/graph/range_vistor.h b/inc/graph/range_vistor.h new file mode 100644 index 00000000..20905bd9 --- /dev/null +++ b/inc/graph/range_vistor.h @@ -0,0 +1,53 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_RANGE_VISTOR_H_ +#define INC_GRAPH_RANGE_VISTOR_H_ + +#include + +template +class RangeVistor { + public: + using Iterator = typename std::vector::iterator; + using ConstIterator = typename std::vector::const_iterator; + + RangeVistor(O owner, const std::vector &vs) : owner_(owner), elements_(vs) {} + + ~RangeVistor() {} + + Iterator begin() { return elements_.begin(); } + + Iterator end() { return elements_.end(); } + + ConstIterator begin() const { return elements_.begin(); } + + ConstIterator end() const { return elements_.end(); } + + std::size_t size() const { return elements_.size(); } + + bool empty() const { return elements_.empty(); } + + E &at(std::size_t index) { return elements_.at(index); } + + const E &at(std::size_t index) const { return elements_.at(index); } + + private: + O owner_; + std::vector elements_; +}; + +#endif // INC_GRAPH_RANGE_VISTOR_H_ diff --git a/inc/graph/shape_refiner.h b/inc/graph/shape_refiner.h new file mode 100644 index 00000000..ef5b8aab --- /dev/null +++ b/inc/graph/shape_refiner.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_SHAPE_REFINER_H_ +#define INC_GRAPH_SHAPE_REFINER_H_ + +#include + +#include "external/graph/inference_context.h" +#include "external/graph/ge_error_codes.h" +#include "graph/node.h" + +namespace ge { +// ShapeRefiner performs shape inference for compute graphs +class ShapeRefiner { + public: + static graphStatus InferShapeAndType(const ConstNodePtr &node, Operator &op); + static graphStatus InferShapeAndType(const NodePtr &node); + + private: + static void PrintInOutTensorShape(const ge::NodePtr &node, const std::string &phase); +}; +} // namespace ge +#endif // INC_GRAPH_SHAPE_REFINER_H_ diff --git a/inc/graph/utils/anchor_utils.h b/inc/graph/utils/anchor_utils.h new file mode 100644 index 00000000..35b3b035 --- /dev/null +++ b/inc/graph/utils/anchor_utils.h @@ -0,0 +1,45 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_UTILS_ANCHOR_UTILS_H_ +#define INC_GRAPH_UTILS_ANCHOR_UTILS_H_ + +#include "graph/anchor.h" +#include "graph/node.h" + +namespace ge { +class AnchorUtils { + public: + // Get anchor format + static Format GetFormat(const DataAnchorPtr &dataAnchor); + + // Set anchor format + static graphStatus SetFormat(const DataAnchorPtr &dataAnchor, Format dataFormat); + + // Get anchor status + static AnchorStatus GetStatus(const DataAnchorPtr &dataAnchor); + + // Set anchor status + static graphStatus SetStatus(const DataAnchorPtr &dataAnchor, AnchorStatus anchorStatus); + + static bool HasControlEdge(const AnchorPtr &anchor); + + static bool IsControlEdge(const AnchorPtr &src, const AnchorPtr &dst); + + static int GetIdx(const AnchorPtr &anchor); +}; +} // namespace ge +#endif // INC_GRAPH_UTILS_ANCHOR_UTILS_H_ diff --git a/inc/graph/utils/attr_utils.h b/inc/graph/utils/attr_utils.h new file mode 100644 index 00000000..37dc79e9 --- /dev/null +++ b/inc/graph/utils/attr_utils.h @@ -0,0 +1,149 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_UTILS_ATTR_UTILS_H_ +#define INC_GRAPH_UTILS_ATTR_UTILS_H_ + +#include +#include +#include +#include "graph/detail/attributes_holder.h" +#include "graph/ge_attr_value.h" +#include "graph/types.h" + +namespace ge { +class OpDesc; +using OpDescPtr = std::shared_ptr; +using ConstOpDescPtr = std::shared_ptr; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY AttrUtils { + public: + class ConstAttrHolderAdapter; + class AttrHolderAdapter; + // Set + static bool HasAttr(ConstAttrHolderAdapter &&obj, const string &name); + + static bool SetInt(AttrHolderAdapter &&obj, const string &name, const int64_t &value); + static bool SetListInt(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetListInt(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetListInt(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetListInt(AttrHolderAdapter &&obj, const string &name, std::initializer_list &&value); + + static bool SetFloat(AttrHolderAdapter &&obj, const string &name, const float &value); + static bool SetListFloat(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetBool(AttrHolderAdapter &&obj, const string &name, const bool &value); + static bool SetListBool(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetStr(AttrHolderAdapter &&obj, const string &name, const string &value); + static bool SetListStr(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetTensorDesc(AttrHolderAdapter &&obj, const string &name, const GeTensorDesc &value); + static bool SetListTensorDesc(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetTensor(AttrHolderAdapter &&obj, const string &name, const GeTensorPtr &value); + static bool SetTensor(AttrHolderAdapter &&obj, const string &name, const ConstGeTensorPtr &value); + static bool SetTensor(AttrHolderAdapter &&obj, const string &name, const GeTensor &value); + static bool SetListTensor(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetListTensor(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetListTensor(AttrHolderAdapter &&obj, const string &name, + std::initializer_list &&value); + static bool SetListTensor(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetGraph(AttrHolderAdapter &&obj, const string &name, const ComputeGraphPtr &value); + static bool SetListGraph(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetBytes(AttrHolderAdapter &&obj, const string &name, const GeAttrValue::BYTES &value); + static bool SetListBytes(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetNamedAttrs(AttrHolderAdapter &&obj, const string &name, const GeAttrValue::NamedAttrs &value); + static bool SetListNamedAttrs(AttrHolderAdapter &&obj, const string &name, + const vector &value); + static bool SetListOpDesc(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool SetListOpDesc(AttrHolderAdapter &&obj, const string &name, const vector &value); + + // Get + static bool GetInt(ConstAttrHolderAdapter &&obj, const string &name, int64_t &value); + static bool GetInt(ConstAttrHolderAdapter &&obj, const string &name, int32_t &value); + static bool GetInt(ConstAttrHolderAdapter &&obj, const string &name, uint32_t &value); + static bool GetListInt(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + static bool GetListInt(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + static bool GetListInt(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + static bool GetFloat(ConstAttrHolderAdapter &&obj, const string &name, float &value); + static bool GetListFloat(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + static bool GetBool(ConstAttrHolderAdapter &&obj, const string &name, bool &value); + static bool GetListBool(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + static bool GetStr(ConstAttrHolderAdapter &&obj, const string &name, string &value); + static bool GetListStr(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + static bool GetTensorDesc(ConstAttrHolderAdapter &&obj, const string &name, GeTensorDesc &value); + static bool GetListTensorDesc(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + static bool GetTensor(ConstAttrHolderAdapter &&obj, const string &name, ConstGeTensorPtr &value); + static bool MutableTensor(AttrHolderAdapter &&obj, const string &name, GeTensorPtr &value); + static bool GetListTensor(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + static bool MutableListTensor(AttrHolderAdapter &&obj, const string &name, vector &value); + static bool GetGraph(ConstAttrHolderAdapter &&obj, const string &name, ComputeGraphPtr &value); + static bool GetListGraph(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + static bool GetBytes(ConstAttrHolderAdapter &&obj, const string &name, GeAttrValue::BYTES &value); + static bool GetListBytes(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + static bool GetNamedAttrs(ConstAttrHolderAdapter &&obj, const string &name, GeAttrValue::NamedAttrs &value); + static bool GetListNamedAttrs(ConstAttrHolderAdapter &&obj, const string &name, + vector &value); + static bool GetListOpDesc(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + // Value will be moved + static bool SetZeroCopyBytes(AttrHolderAdapter &&obj, const string &name, Buffer &&buffer); + static bool GetZeroCopyBytes(ConstAttrHolderAdapter &&obj, const string &name, Buffer &buffer); + // Value will be moved + static bool SetZeroCopyListBytes(AttrHolderAdapter &&obj, const string &name, + vector &listBuffer); + static bool GetZeroCopyListBytes(ConstAttrHolderAdapter &&obj, const string &name, vector &listBuffer); + + static bool SetListListInt(AttrHolderAdapter &&obj, const string &name, const vector> &value); + static bool GetListListInt(ConstAttrHolderAdapter &&obj, const string &name, vector> &value); + + static bool SetListDataType(AttrHolderAdapter &&obj, const string &name, const vector &value); + static bool GetListDataType(ConstAttrHolderAdapter &&obj, const string &name, vector &value); + + static bool SetDataType(AttrHolderAdapter &&obj, const string &name, const ge::DataType &value); + static bool GetDataType(ConstAttrHolderAdapter &&obj, const string &name, ge::DataType &value); + + static OpDescPtr CloneOpDesc(const ConstOpDescPtr &orgOpDesc); + + static OpDescPtr CopyOpDesc(const ConstOpDescPtr &orgOpDesc); + + class AttrHolderAdapter { + public: + AttrHolderAdapter(AttrHolder *obj) : obj_(obj) {} + ~AttrHolderAdapter() {} + template + AttrHolderAdapter(const std::shared_ptr &obj) : obj_(obj.get()) {} + AttrHolderAdapter(AttrHolder &obj) : obj_(&obj) {} + operator bool() const { return obj_ != nullptr; } + AttrHolder *operator->() { return obj_; } + AttrHolder *get() { return obj_; } + + AttrHolder *obj_; + }; + + class ConstAttrHolderAdapter { + public: + ConstAttrHolderAdapter(const AttrHolder *obj) : obj_(obj) {} + ~ConstAttrHolderAdapter() {} + template + ConstAttrHolderAdapter(const std::shared_ptr obj) : obj_(obj.get()) {} + ConstAttrHolderAdapter(const AttrHolder &obj) : obj_(&obj) {} + operator bool() const { return obj_ != nullptr; } + const AttrHolder *operator->() const { return obj_; } + const AttrHolder *get() const { return obj_; } + + private: + const AttrHolder *obj_; + }; +}; +} // namespace ge +#endif // INC_GRAPH_UTILS_ATTR_UTILS_H_ diff --git a/inc/graph/utils/graph_utils.h b/inc/graph/utils/graph_utils.h new file mode 100644 index 00000000..2d8f0fb9 --- /dev/null +++ b/inc/graph/utils/graph_utils.h @@ -0,0 +1,258 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_UTILS_GRAPH_UTILS_H_ +#define INC_GRAPH_UTILS_GRAPH_UTILS_H_ + +#include +#include +#include +#include +#include +#include "graph/anchor.h" +#include "graph/node.h" +#include "graph/compute_graph.h" +#include "graph/utils/anchor_utils.h" +#include "graph/graph.h" +#include "graph/model.h" + +#define REFER_ATTR_VALUE(VT_ENUM, DataType, attr, ret) \ + do { \ + DataType ret; \ + attr.GetValue(ret); \ + } while (0) + +#define PRINT_ATTR_VALUE_IF(value_type, VT_ENUM, DataType, attr, stream) \ + do { \ + if (value_type == VT_ENUM) { \ + REFER_ATTR_VALUE(VT_ENUM, DataType, attr, ret) \ + stream << ret; \ + } \ + } while (0) + +#define PRINT_LIST_ATTR_VALUE_IF(value_type, VT_ENUM, DataType, attr, stream) \ + do { \ + if (value_type == VT_ENUM) { \ + REFER_ATTR_VALUE(VT_ENUM, DataType, attr, ret) \ + stream << "["; \ + for (int i = 0; i < ret.size(); i++) { \ + stream << ret[i]; \ + if (i + 1 != ret.size()) stream << ", "; \ + } \ + stream << "]"; \ + } \ + } while (0) + +#define PRINT_ATTR_VALUE_ELIF(value_type, VT_ENUM, DataType, attr, stream) \ + else PRINT_ATTR_VALUE_IF(value_type, VT_ENUM, DataType, attr, stream) + +#define PRINT_LIST_ATTR_VALUE_ELIF(value_type, VT_ENUM, DataType, attr, stream) \ + else PRINT_LIST_ATTR_VALUE_IF(value_type, VT_ENUM, DataType, attr, stream) + +#define PRINT_SHAPE(i_o, n, idx, stream) \ + do { \ + auto op = n->GetOpDesc(); \ + GeTensorDesc td = i_o == "input" ? op->GetInputDesc(idx) : op->GetOutputDesc(idx); \ + auto shape = td.GetShape().GetDims(); \ + stream << "["; \ + for (int i = 0; i < shape.size(); i++) { \ + stream << shape[i]; \ + if (i + 1 < shape.size()) stream << ", "; \ + } \ + stream << "]"; \ + } while (0) + +#define PRINT_ATTR_FUNC(stream) \ + [&](GeAttrValue attr) { \ + auto type = attr.GetValueType(); \ + PRINT_ATTR_VALUE_IF(type, GeAttrValue::ValueType::VT_STRING, GeAttrValue::STR, attr, stream) \ + PRINT_ATTR_VALUE_ELIF(type, GeAttrValue::ValueType::VT_FLOAT, GeAttrValue::FLOAT, attr, stream) \ + PRINT_ATTR_VALUE_ELIF(type, GeAttrValue::ValueType::VT_BOOL, GeAttrValue::BOOL, attr, stream) \ + PRINT_ATTR_VALUE_ELIF(type, GeAttrValue::ValueType::VT_INT, GeAttrValue::INT, attr, stream) \ + PRINT_LIST_ATTR_VALUE_ELIF(type, GeAttrValue::ValueType::VT_LIST_STRING, GeAttrValue::LIST_STR, attr, stream) \ + PRINT_LIST_ATTR_VALUE_ELIF(type, GeAttrValue::ValueType::VT_LIST_FLOAT, GeAttrValue::LIST_FLOAT, attr, stream) \ + PRINT_LIST_ATTR_VALUE_ELIF(type, GeAttrValue::ValueType::VT_LIST_BOOL, GeAttrValue::LIST_BOOL, attr, stream) \ + PRINT_LIST_ATTR_VALUE_ELIF(type, GeAttrValue::ValueType::VT_LIST_INT, GeAttrValue::LIST_INT, attr, stream) \ + else if (type == GeAttrValue::ValueType::VT_TENSOR_DESC) stream << "TENSOR_DESC"; \ + else if (type == GeAttrValue::ValueType::VT_TENSOR) stream << "TENSOR"; \ + else if (type == GeAttrValue::ValueType::VT_BYTES) stream << "BYTES"; \ + else if (type == GeAttrValue::ValueType::VT_LIST_TENSOR_DESC) stream << "LIST_TENSOR_DESC"; \ + else if (type == GeAttrValue::ValueType::VT_LIST_TENSOR) stream << "LIST_TENSOR"; \ + else if (type == GeAttrValue::ValueType::VT_LIST_BYTES) stream << "LIST_BYTES"; \ + }; + +namespace ge { +class GraphUtils { + public: + static ComputeGraphPtr GetComputeGraph(const Graph &graph); + + static Graph CreateGraphFromComputeGraph(const ComputeGraphPtr compute_graph); + + static ComputeGraphPtr CreateGraphFromOperator(const string &name, const std::vector &inputs); + + static graphStatus AddEdge(const OutDataAnchorPtr &src, const InDataAnchorPtr &dst); + + static graphStatus AddEdge(const OutDataAnchorPtr &src, const Format &src_format, const InDataAnchorPtr &dst, + const Format &dst_format); + + static graphStatus AddEdge(const AnchorPtr &src, const AnchorPtr &dst); + + static graphStatus AddEdge(const OutControlAnchorPtr &src, const InControlAnchorPtr &dst); + + static graphStatus AddEdge(const OutDataAnchorPtr &src, const InControlAnchorPtr &dst); + + // check whether src is link to dst and then remove + static graphStatus RemoveEdge(const OutDataAnchorPtr &src, const InDataAnchorPtr &dst); + + static graphStatus RemoveEdge(const AnchorPtr &src, const AnchorPtr &dst); + + static graphStatus RemoveEdge(const OutControlAnchorPtr &src, const InControlAnchorPtr &dst); + + static graphStatus RemoveEdge(const OutDataAnchorPtr &src, const InControlAnchorPtr &dst); + + static graphStatus ReplaceEdgeDst(const OutDataAnchorPtr &src, const InDataAnchorPtr &dst, + const InDataAnchorPtr &new_dst); + + static graphStatus ReplaceEdgeDst(const OutControlAnchorPtr &src, const InControlAnchorPtr &dst, + const InControlAnchorPtr &new_dst); + + static graphStatus InsertNodeBetweenDataAnchors(const OutDataAnchorPtr &src, const InDataAnchorPtr &dst, + const NodePtr &new_node); + + static graphStatus RemoveNodeWithoutRelink(const ComputeGraphPtr &compute_graph, const NodePtr &node); + + static graphStatus InsertTransNode(ComputeGraphPtr compute_graph, const InDataAnchorPtr &in_data_anchor, + const std::vector &vec_op_desc); + + static graphStatus RemoveJustNode(ComputeGraphPtr compute_graph, const NodePtr &node); + + static graphStatus RemoveJustNode(ComputeGraph &compute_graph, const NodePtr &node); + + static void RecordOriginalNames(std::vector original_nodes, const ge::NodePtr &node); + + static void RecordOriginalNames(std::vector names_tmp, const ge::NodePtr &node); + + static bool CheckIsTrainGraph(const ge::ComputeGraphPtr &compute_graph); + + static bool MatchDumpStr(const std::string &suffix); + + static void DumpGEGraph(const ge::ComputeGraphPtr &graph, const std::string &suffix, bool is_always_dump = false); + + static bool LoadGEGraph(const char *file, ge::ComputeGraph &compute_graph); + + static bool CheckGlobalStepNode(const ge::NodePtr &node); + + static void BreakConnect(const std::map &all_nodes_infos); + + static void DumpGEGraphToOnnx(const ge::ComputeGraph &compute_graph, const std::string &suffix); + + static bool LoadGEGraphFromOnnx(const char *file, ge::ComputeGraph &compute_graph); + + static bool ReadProtoFromTextFile(const char *file, google::protobuf::Message *message); + + static void WriteProtoToTextFile(const google::protobuf::Message &proto, const char *real_path); + + static graphStatus AppendInputNode(const ComputeGraphPtr &graph, const NodePtr &node); + + /// + /// Isolating `node`, relinking data links from the in-anchor peer nodes to + /// the out-anchor peer nodes according to `io_map`, relinking control links + /// to ensure that input nodes of `node` are before out nodes + /// + /// Link the `io_map[i]` input anchor peer node to `i` output anchor peer + /// nodes, then unlink all links connecting with `node`. If `io_map[i]` < 0, + /// unlink all links from `i` output anchor without any relinking. + /// + /// @param node + /// @param io_map + /// @return + /// + static graphStatus IsolateNode(const NodePtr &node, const std::initializer_list &io_map); + static graphStatus IsolateNode(const NodePtr &node, const std::vector &io_map); + + /// + /// Isolate `node` which must be one input one output, equivalent to + /// `IsolateNode(node, {0})` + /// @param node + /// @return + /// + static graphStatus IsolateNodeOneIO(const NodePtr &node); + + /// + /// The data anchors replacing behavior is the same with + /// `ReplaceNodeDataAnchors`. In addition, replace all `old_node` control + /// anchors with `new_node`'s. + /// @param new_node + /// @param old_node + /// @param inputs_map + /// @param outputs_map + /// @return + /// + static graphStatus ReplaceNodeAnchors(const NodePtr &new_node, const NodePtr &old_node, + std::initializer_list inputs_map, std::initializer_list outputs_map); + + static graphStatus ReplaceNodeAnchors(const NodePtr &new_node, const NodePtr &old_node, + const std::vector &inputs_map, const std::vector &outputs_map); + + /// + /// Replace `old_node` data anchors with `new_node`'s according to `inputs_map` and `outputs_map`. + /// Replace the `i` in/out data anchor on `old_node` with + /// `inputs_map[i]`/`outputs_map[i]` data anchor on `new_node`. + /// If `inputs_map[i]`/`outputs_map[i]` < 0 or the index not contained in + /// `inputs_map[i]`/`outputs_map[i]`, the `i` data anchor will remain + /// on `old_node`. + /// @param new_node + /// @param old_node + /// @param inputs_map + /// @param outputs_map + /// @return + /// + static graphStatus ReplaceNodeDataAnchors(const NodePtr &new_node, const NodePtr &old_node, + std::initializer_list inputs_map, + std::initializer_list outputs_map); + + static graphStatus ReplaceNodeDataAnchors(const NodePtr &new_node, const NodePtr &old_node, + const std::vector &inputs_map, const std::vector &outputs_map); + + /// + /// Copy all in-control edges from `src_node` to `dst_node` + /// @param src_node + /// @param dst_node + /// @return + /// + static graphStatus CopyInCtrlEdges(const NodePtr &src_node, NodePtr &dst_node); + + static graphStatus MoveInCtrlEdges(const NodePtr &src_node, NodePtr &dst_node); + + /// + /// Copy all out-control edges from `src_node` to `dst_node` + /// @param src_node + /// @param dst_node + /// @return success: GRAPH_SUCESS + /// + static graphStatus CopyOutCtrlEdges(const NodePtr &src_node, NodePtr &dst_node); + + /// + /// Move all out-control edges from `src_node` to `dst_node` + /// @param src_node + /// @param dst_node + /// @return success: GRAPH_SUCESS + /// + static graphStatus MoveOutCtrlEdges(NodePtr &src_node, NodePtr &dst_node); +}; +} // namespace ge + +#endif // INC_GRAPH_UTILS_GRAPH_UTILS_H_ diff --git a/inc/graph/utils/node_utils.h b/inc/graph/utils/node_utils.h new file mode 100644 index 00000000..3902ed08 --- /dev/null +++ b/inc/graph/utils/node_utils.h @@ -0,0 +1,64 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_UTILS_NODE_UTILS_H_ +#define INC_GRAPH_UTILS_NODE_UTILS_H_ + +#include +#include +#include "graph/node.h" +namespace ge { +class NodeUtils { + public: + static graphStatus AddSendEventId(const NodePtr &node, const uint32_t &event_id); + static graphStatus AddRecvEventId(const NodePtr &node, const uint32_t &event_id); + static graphStatus GetSendEventIdList(const NodePtr &node, std::vector &vec_send); + static graphStatus GetRecvEventIdList(const NodePtr &node, std::vector &vec_recv); + + static graphStatus ClearSendInfo(); + static graphStatus ClearRecvInfo(); + + static graphStatus GetSingleOutputNodeOfNthLayer(const NodePtr &src, int depth, NodePtr &dst); + + static graphStatus GetDataOutAnchorAndControlInAnchor(const NodePtr &node_ptr, OutDataAnchorPtr &out_data, + InControlAnchorPtr &in_control); + + static graphStatus ClearInDataAnchor(const NodePtr &node_ptr, const InDataAnchorPtr &in_data_anchor); + static graphStatus SetAllAnchorStatus(const NodePtr &nodePtr); + static graphStatus SetAllAnchorStatus(Node &node); + static bool IsAnchorStatusSet(const NodePtr &nodePtr); + static bool IsAnchorStatusSet(const Node &node); + + static graphStatus MoveOutputEdges(const NodePtr &origin_node, const NodePtr &new_node); + + static void UpdateIsInputConst(const NodePtr &nodePtr); + static void UpdateIsInputConst(Node &node); + static bool IsConst(const Node &node); + static void UnlinkAll(const Node &node); + static graphStatus UpdatePeerNodeInputDesc(const NodePtr &node_ptr); + + static bool IsInNodesEmpty(const Node &node); + static GeTensorDesc GetOutputDesc(const Node &node, uint32_t index); + static GeTensorDesc GetInputDesc(const Node &node, uint32_t index); + static graphStatus UpdateOutputShape(const Node &node, uint32_t index, const GeShape &shape); + static graphStatus UpdateInputShape(const Node &node, uint32_t index, const GeShape &shape); + + private: + static std::map> map_send_info_; + static std::map> map_recv_info_; +}; +} // namespace ge +#endif // INC_GRAPH_UTILS_NODE_UTILS_H_ diff --git a/inc/graph/utils/op_desc_utils.h b/inc/graph/utils/op_desc_utils.h new file mode 100644 index 00000000..363e0ed5 --- /dev/null +++ b/inc/graph/utils/op_desc_utils.h @@ -0,0 +1,93 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_UTILS_OP_DESC_UTILS_H_ +#define INC_GRAPH_UTILS_OP_DESC_UTILS_H_ + +#include +#include +#include + +#include "graph/def_types.h" +#include "graph/node.h" +#include "graph/op_desc.h" +#include "graph/operator.h" +#include "graph/range_vistor.h" + +namespace ge { +class OpDesc; + +using OpDescPtr = std::shared_ptr; + +class OpDescUtils { + public: + template + using Vistor = RangeVistor>; + + OpDescUtils() = default; + ~OpDescUtils() = default; + static bool HasQuantizeFactorParams(const OpDescPtr &op_desc); + static bool HasQuantizeFactorParams(const OpDesc &op_desc); + static graphStatus GetQuantizeFactorParams(const OpDescPtr &op_desc, QuantizeFactorParams &quant); + static graphStatus GetQuantizeFactorParams(const OpDesc &op_desc, QuantizeFactorParams &quant); + static graphStatus SetQuantizeFactorParams(const OpDescPtr &op_desc, const QuantizeFactorParams &quant); + static graphStatus SetQuantizeFactorParams(OpDesc &op_desc, const QuantizeFactorParams &quant); + + static vector GetConstInputNode(const ge::Node &node); + static vector GetInputData(const vector &input_nodes); + + static vector GetWeights(const ge::Node &node); + static vector GetWeights(const ge::ConstNodePtr &node); + static vector MutableWeights(const ge::Node &node); + static vector MutableWeights(const ge::NodePtr node); + static graphStatus SetWeights(ge::Node &node, const vector &weights); + static graphStatus SetWeights(ge::NodePtr node, const vector &weights); + static graphStatus ClearWeights(ge::NodePtr node); + + static bool ClearInputDesc(ge::OpDescPtr op_desc, uint32_t index); + static bool ClearInputDesc(const ge::NodePtr &node); + static bool ClearOutputDesc(const ge::OpDescPtr &op_desc, uint32_t index); + static bool ClearOutputDesc(const ge::NodePtr &node); + static vector GetConstInputs(const ge::Node &node); + static vector GetConstInputs(const ge::ConstNodePtr &node); + static size_t GetNonConstInputsSize(const ge::Node &node); + static size_t GetNonConstInputsSize(ge::ConstNodePtr node); + // Index: Indicate the index of all non const inputs + static GeTensorDesc GetNonConstInputTensorDesc(const ge::Node &node, size_t index_non_const = 0); + static GeTensorDesc GetNonConstInputTensorDesc(const ge::ConstNodePtr &node, size_t index_non_const = 0); + static bool GetNonConstInputIndex(const ge::Node &node, size_t index_non_const, size_t &index); + static bool GetNonConstInputIndex(const ge::ConstNodePtr &node, size_t index_non_const, size_t &index); + // Index: Indicate the index of all inputs + static bool IsNonConstInput(const ge::Node &node, size_t index = 0); + static bool IsNonConstInput(const ge::ConstNodePtr &node, size_t index = 0); + + static vector GetNonConstTensorDesc(const ge::ConstNodePtr &node); + static graphStatus AddConstOpToAnchor(InDataAnchorPtr in_anchor, const GeTensorPtr &tensor_ptr); + + static Operator CreateOperatorFromOpDesc(OpDescPtr op_desc); + static Operator CreateOperatorFromNode(ge::ConstNodePtr node_ptr); + static OpDescPtr GetOpDescFromOperator(const Operator &oprt); + + static OpDescPtr CreateConstOp(const GeTensorPtr &tensor_ptr); + + private: + static GeTensorPtr MutableWeights(ge::OpDesc &op_desc); + static GeTensorPtr MutableWeights(ge::OpDescPtr op_desc); + static graphStatus SetWeights(ge::OpDesc &op_desc, const GeTensorPtr weight); + static graphStatus SetWeights(ge::OpDescPtr op_desc, const GeTensorPtr weight); +}; +} // namespace ge +#endif // INC_GRAPH_UTILS_OP_DESC_UTILS_H_ diff --git a/inc/graph/utils/tensor_adapter.h b/inc/graph/utils/tensor_adapter.h new file mode 100644 index 00000000..f9993606 --- /dev/null +++ b/inc/graph/utils/tensor_adapter.h @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_UTILS_TENSOR_ADAPTER_H_ +#define INC_GRAPH_UTILS_TENSOR_ADAPTER_H_ + +#include +#include "graph/ge_tensor.h" +#include "graph/tensor.h" +namespace ge { +using GeTensorPtr = std::shared_ptr; +using ConstGeTensorPtr = std::shared_ptr; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY TensorAdapter { + public: + static GeTensorDesc TensorDesc2GeTensorDesc(const TensorDesc &tensorDesc); + static TensorDesc GeTensorDesc2TensorDesc(const GeTensorDesc &geTensorDesc); + static GeTensorPtr Tensor2GeTensor(const Tensor &tensor); + static Tensor GeTensor2Tensor(const ConstGeTensorPtr &geTensor); + + static ConstGeTensorPtr AsGeTensorPtr(const Tensor &tensor); // Share value + static GeTensorPtr AsGeTensorPtr(Tensor &tensor); // Share value + static const GeTensor AsGeTensor(const Tensor &tensor); // Share value + static GeTensor AsGeTensor(Tensor &tensor); // Share value + static const Tensor AsTensor(const GeTensor &tensor); // Share value + static Tensor AsTensor(GeTensor &tensor); // Share value +}; +} // namespace ge +#endif // INC_GRAPH_UTILS_TENSOR_ADAPTER_H_ diff --git a/inc/graph/utils/tensor_utils.h b/inc/graph/utils/tensor_utils.h new file mode 100644 index 00000000..934ad12f --- /dev/null +++ b/inc/graph/utils/tensor_utils.h @@ -0,0 +1,77 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_UTILS_TENSOR_UTILS_H_ +#define INC_GRAPH_UTILS_TENSOR_UTILS_H_ + +#include + +#include "graph/def_types.h" +#include "graph/ge_error_codes.h" +#include "graph/ge_tensor.h" +namespace ge { +class TensorUtils { + public: + static ge::graphStatus GetSize(const GeTensorDesc &tensorDesc, uint32_t &size); + static void SetSize(GeTensorDesc &tensorDesc, uint32_t size); + static uint32_t GetWeightSize(const ConstGeTensorPtr &tensorPtr); + static uint32_t GetWeightSize(const GeTensor &tensor); + static uint32_t GetWeightSize(const GeTensorDesc &tensorDesc); + static uint8_t *GetWeightAddr(const ConstGeTensorPtr &tensorPtr, uint8_t *base); + static uint8_t *GetWeightAddr(const GeTensor &tensor, uint8_t *base); + static void SetWeightSize(GeTensorDesc &tensorDesc, uint32_t size); + static ge::graphStatus GetReuseInput(const GeTensorDesc &tensorDesc, bool &flag); + static void SetReuseInput(GeTensorDesc &tensorDesc, bool flag); + static ge::graphStatus GetOutputTensor(const GeTensorDesc &tensorDesc, bool &flag); + static void SetOutputTensor(GeTensorDesc &tensorDesc, bool flag); + static graphStatus GetDeviceType(const GeTensorDesc &tensorDesc, DeviceType &type); + static void SetDeviceType(GeTensorDesc &tensorDesc, DeviceType type); + static ge::graphStatus GetInputTensor(const GeTensorDesc &tensorDesc, bool &flag); + static void SetInputTensor(GeTensorDesc &tensorDesc, bool flag); + static ge::graphStatus GetRealDimCnt(const GeTensorDesc &tensorDesc, uint32_t &cnt); + static void SetRealDimCnt(GeTensorDesc &tensorDesc, uint32_t cnt); + static ge::graphStatus GetReuseInputIndex(const GeTensorDesc &tensorDesc, uint32_t &idx); + static void SetReuseInputIndex(GeTensorDesc &tensorDesc, uint32_t idx); + static ge::graphStatus GetDataOffset(const GeTensorDesc &tensorDesc, int64_t &offset); + static void SetDataOffset(GeTensorDesc &tensorDesc, int64_t offset); + static ge::graphStatus GetCmpsSize(const GeTensorDesc &tensorDesc, uint32_t &cmp_size); + static void SetCmpsSize(GeTensorDesc &tensorDesc, uint32_t cmp_size); + static ge::graphStatus GetCmpsTab(const GeTensorDesc &tensorDesc, vector &vec); + static void SetCmpsTab(GeTensorDesc &tensorDesc, const uint8_t *data, size_t size); + static ge::graphStatus GetCmpsTabOffset(const GeTensorDesc &tensorDesc, int64_t &tab_offset); + static void SetCmpsTabOffset(GeTensorDesc &tensorDesc, int64_t tab_offset); + static ge::graphStatus GetCmpsInfo(const GeTensorDesc &tensorDesc, CompressInfo &info); + static void SetCmpsInfo(GeTensorDesc &tensorDesc, const CompressInfo &info); + static bool HasAlloffsetQuantizeInfo(const GeTensorDesc &tensorDesc); + static ge::graphStatus GetAlloffsetQuantizeInfo(const GeTensorDesc &tensorDesc, AllOffsetQuantizeInfo &info); + static void SetAlloffsetQuantizeInfo(GeTensorDesc &tensorDesc, const AllOffsetQuantizeInfo &info); + static ge::graphStatus GetRC(const GeTensorDesc &tensorDesc, uint32_t &rc); + static void SetRC(GeTensorDesc &tensorDesc, uint32_t rc); + + /// + /// calculate mem size of the tensor. + /// @param shape tensor shape + /// @param format tensor format + /// @param data_type tensor data type + /// @param mem_size -1 means unknown shape,others means mem size + /// @return GRAPH_SUCCESS:success, others:failed + /// + static ge::graphStatus CalcTensorMemSize(const GeShape &shape, Format format, DataType data_type, int64_t &mem_size); + static ge::graphStatus GetTensorMemorySizeInBytes(const GeTensorDesc &desc_temp, uint32_t &size_temp); + static ge::graphStatus GetTensorSizeInBytes(const GeTensorDesc &desc_temp, uint32_t &size_temp); +}; +} // namespace ge +#endif // INC_GRAPH_UTILS_TENSOR_UTILS_H_ diff --git a/inc/graph/utils/type_utils.h b/inc/graph/utils/type_utils.h new file mode 100644 index 00000000..f5f8234d --- /dev/null +++ b/inc/graph/utils/type_utils.h @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_GRAPH_UTILS_TYPE_UTILS_H_ +#define INC_GRAPH_UTILS_TYPE_UTILS_H_ + +#include +#include +#include +#include "graph/def_types.h" +#include "graph/ge_error_codes.h" +#include "graph/types.h" +#include "graph/usr_types.h" + +namespace ge { +class TypeUtils { + public: + static bool IsDataTypeValid(DataType dt); + static bool IsFormatValid(Format format); + static bool IsInternalFormat(Format format); + + static std::string DataTypeToSerialString(DataType data_type); + static DataType SerialStringToDataType(const std::string &str); + static std::string FormatToSerialString(Format format); + static Format SerialStringToFormat(const std::string &str); + static Format DataFormatToFormat(const std::string &str); + + static graphStatus Usr2DefQuantizeFactorParams(const UsrQuantizeFactorParams &usr, QuantizeFactorParams &def); + static graphStatus Def2UsrQuantizeFactorParams(const QuantizeFactorParams &def, UsrQuantizeFactorParams &usr); + + static bool GetDataTypeLength(ge::DataType data_type, uint32_t &length); + static bool CheckUint64MulOverflow(uint64_t a, uint32_t b); +}; +} // namespace ge +#endif // INC_GRAPH_UTILS_TYPE_UTILS_H_ diff --git a/scripts/check_clang_format.sh b/scripts/check_clang_format.sh new file mode 100644 index 00000000..836ce7c7 --- /dev/null +++ b/scripts/check_clang_format.sh @@ -0,0 +1,128 @@ +#!/bin/bash +# Copyright 2019 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +set -e + +CLANG_FORMAT=$(which clang-format) || (echo "Please install 'clang-format' tool first"; exit 1) + +version=$("${CLANG_FORMAT}" --version | sed -n "s/.*\ \([0-9]*\)\.[0-9]*\.[0-9]*.*/\1/p") +if [[ "${version}" -lt "8" ]]; then + echo "clang-format's version must be at least 8.0.0" + exit 1 +fi + +CURRENT_PATH=$(pwd) +SCRIPTS_PATH=$(dirname "$0") + +echo "CURRENT_PATH=$CURRENT_PATH" +echo "SCRIPTS_PATH=$SCRIPTS_PATH" + +# print usage message +function usage() +{ + echo "Check whether the specified source files were well formated" + echo "Usage:" + echo "bash $0 [-a] [-c] [-l] [-h]" + echo "e.g. $0 -a" + echo "" + echo "Options:" + echo " -a Check code format of all files, default case" + echo " -c Check code format of the files changed compared to last commit" + echo " -l Check code format of the files changed in last commit" + echo " -h Print usage" +} + +# check and set options +function checkopts() +{ + # init variable + mode="all" # default check all files + + # Process the options + while getopts 'aclh' opt + do + case "${opt}" in + a) + mode="all" + ;; + c) + mode="changed" + ;; + l) + mode="lastcommit" + ;; + h) + usage + exit 0 + ;; + *) + echo "Unknown option ${opt}!" + usage + exit 1 + esac + done +} + +# init variable +# check options +checkopts "$@" + +# switch to project root path, which contains clang-format config file '.clang-format' +cd "${SCRIPTS_PATH}/.." || exit 1 + +CHECK_LIST_FILE='__checked_files_list__' + +if [ "X${mode}" == "Xall" ]; then + find src -type f -name "*" | grep "\.h$\|\.cc$" > "${CHECK_LIST_FILE}" || true + find inc -type f -name "*" | grep "\.h$\|\.cc$" >> "${CHECK_LIST_FILE}" || true +elif [ "X${mode}" == "Xchanged" ]; then + # --diff-filter=ACMRTUXB will ignore deleted files in commit + git diff --diff-filter=ACMRTUXB --name-only | grep "src" | grep "\.h$\|\.cc$" > "${CHECK_LIST_FILE}" || true + git diff --diff-filter=ACMRTUXB --name-only | grep "inc" | grep "\.h$\|\.cc$" >> "${CHECK_LIST_FILE}" || true +else # "X${mode}" == "Xlastcommit" + git diff --diff-filter=ACMRTUXB --name-only HEAD~ HEAD | grep "src" | grep "\.h$\|\.cc$" > "${CHECK_LIST_FILE}" || true + git diff --diff-filter=ACMRTUXB --name-only HEAD~ HEAD | grep "inc" | grep "\.h$\|\.cc$" >> "${CHECK_LIST_FILE}" || true +fi + +CHECK_RESULT_FILE=__code_format_check_result__ +echo "0" > "$CHECK_RESULT_FILE" + +# check format of files modified in the lastest commit +while read line; do + BASE_NAME=$(basename "${line}") + TEMP_FILE="__TEMP__${BASE_NAME}" + cp "${line}" "${TEMP_FILE}" + ${CLANG_FORMAT} -i "${TEMP_FILE}" + set +e + diff "${TEMP_FILE}" "${line}" + ret=$? + set -e + rm "${TEMP_FILE}" + if [[ "${ret}" -ne 0 ]]; then + echo "File ${line} is not formated, please format it." + echo "1" > "${CHECK_RESULT_FILE}" + break + fi +done < "${CHECK_LIST_FILE}" + +result=$(cat "${CHECK_RESULT_FILE}") +rm "${CHECK_RESULT_FILE}" +rm "${CHECK_LIST_FILE}" +cd "${CURRENT_PATH}" || exit 1 +if [[ "X${result}" == "X0" ]]; then + echo "Check PASS: specified files are well formated!" +fi +exit "${result}" diff --git a/scripts/format_source_code.sh b/scripts/format_source_code.sh new file mode 100644 index 00000000..81d19f4b --- /dev/null +++ b/scripts/format_source_code.sh @@ -0,0 +1,108 @@ +#!/bin/bash +# Copyright 2019 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +set -e + +CLANG_FORMAT=$(which clang-format) || (echo "Please install 'clang-format' tool first"; exit 1) + +version=$("${CLANG_FORMAT}" --version | sed -n "s/.*\ \([0-9]*\)\.[0-9]*\.[0-9]*.*/\1/p") +if [[ "${version}" -lt "8" ]]; then + echo "clang-format's version must be at least 8.0.0" + exit 1 +fi + +CURRENT_PATH=$(pwd) +SCRIPTS_PATH=$(dirname "$0") + +echo "CURRENT_PATH=${CURRENT_PATH}" +echo "SCRIPTS_PATH=${SCRIPTS_PATH}" + +# print usage message +function usage() +{ + echo "Format the specified source files to conform the code style." + echo "Usage:" + echo "bash $0 [-a] [-c] [-l] [-h]" + echo "e.g. $0 -c" + echo "" + echo "Options:" + echo " -a format of all files" + echo " -c format of the files changed compared to last commit, default case" + echo " -l format of the files changed in last commit" + echo " -h Print usage" +} + +# check and set options +function checkopts() +{ + # init variable + mode="changed" # default format changed files + + # Process the options + while getopts 'aclh' opt + do + case "${opt}" in + a) + mode="all" + ;; + c) + mode="changed" + ;; + l) + mode="lastcommit" + ;; + h) + usage + exit 0 + ;; + *) + echo "Unknown option ${opt}!" + usage + exit 1 + esac + done +} + +# init variable +# check options +checkopts "$@" + +# switch to project root path, which contains clang-format config file '.clang-format' +cd "${SCRIPTS_PATH}/.." || exit 1 + +FMT_FILE_LIST='__format_files_list__' + +if [[ "X${mode}" == "Xall" ]]; then + find src -type f -name "*" | grep "\.h$\|\.cc$" > "${FMT_FILE_LIST}" || true + find inc -type f -name "*" | grep "\.h$\|\.cc$" >> "${FMT_FILE_LIST}" || true +elif [[ "X${mode}" == "Xchanged" ]]; then + git diff --name-only | grep "src" | grep "\.h$\|\.cc$" > "${FMT_FILE_LIST}" || true + git diff --name-only | grep "inc" | grep "\.h$\|\.cc$" >> "${FMT_FILE_LIST}" || true +else # "X${mode}" == "Xlastcommit" + git diff --name-only HEAD~ HEAD | grep "src" | grep "\.h$\|\.cc$" > "${FMT_FILE_LIST}" || true + git diff --name-only HEAD~ HEAD | grep "inc" | grep "\.h$\|\.cc$" >> "${FMT_FILE_LIST}" || true +fi + +while read line; do + if [ -f "${line}" ]; then + ${CLANG_FORMAT} -i "${line}" + fi +done < "${FMT_FILE_LIST}" + +rm "${FMT_FILE_LIST}" +cd "${CURRENT_PATH}" || exit 1 + +echo "Specified cpp source files have been format successfully." diff --git a/src/common/graph/CMakeLists.txt b/src/common/graph/CMakeLists.txt new file mode 100755 index 00000000..c6bd0c3d --- /dev/null +++ b/src/common/graph/CMakeLists.txt @@ -0,0 +1,78 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +# libgraph.so +# compiling proto files generates some warnings, use no-unused-variable to suppress them +set(CMAKE_CXX_FLAGS "-Wno-unused-variable ${CMAKE_CXX_FLAGS}") +# add all proto files, generate corresponding .h and .cc files +file(GLOB_RECURSE PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "../../proto/om.proto" + "../../proto/ge_ir.proto" + "../../proto/insert_op.proto" + "../../proto/task.proto" + "../../proto/fwk_adaper.proto" + "../../proto/op_mapping_info.proto" + ) + +file(GLOB_RECURSE ONNX_PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "${onnx_INC}/onnx/onnx.proto" + ) + +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) +ge_protobuf_generate(ge PROTO_ONNX_SRCS PROTO_ONNX_HDRS ${ONNX_PROTO_LIST}) + +# need to remove dependencies on pb files later +file(GLOB_RECURSE SRC_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "*.cc" + "utils/*.cc" + "opsproto/*.cc" + "detail/*.cc" + "debug/*.cc" + "op_imp.cc" + "option/*.cc" + ) + +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}) +include_directories(${GE_SOURCE_DIR}/src) +include_directories(${GE_SOURCE_DIR}/src/ge) +include_directories(${GE_SOURCE_DIR}/src/common) +include_directories(${GE_SOURCE_DIR}/src/common/graph) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/inc/common) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/json/include) +include_directories(${GE_SOURCE_DIR}/third_party/protobuf/src) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) +include_directories(${GE_SOURCE_DIR}/build) + +######### libgraph.so ############# +add_library(graph SHARED ${SRC_LIST} ${PROTO_SRCS} ${PROTO_ONNX_SRCS}) +target_compile_definitions(graph PRIVATE + DAVINCI_CLOUD + Werror) +target_link_libraries(graph PRIVATE + ${PROTOBUF_LIBRARY} + ${c_sec} + ${slog} + rt + dl) diff --git a/src/common/graph/anchor.cc b/src/common/graph/anchor.cc new file mode 100644 index 00000000..d16c96e0 --- /dev/null +++ b/src/common/graph/anchor.cc @@ -0,0 +1,371 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/anchor.h" + +#include +#include + +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/node.h" + +namespace ge { +Anchor::Anchor(const NodePtr &owner_node, int idx) : owner_node_(owner_node), idx_(idx) {} + +bool Anchor::IsTypeOf(TYPE type) const { return strcmp(Anchor::TypeOf(), type) == 0; } + +Anchor::Vistor Anchor::GetPeerAnchors() const { + vector ret; + for (const auto &anchor : peer_anchors_) { + ret.push_back(anchor.lock()); + } + return Anchor::Vistor(shared_from_this(), ret); +} + +AnchorPtr Anchor::GetFirstPeerAnchor() const { + if (peer_anchors_.empty()) { + return nullptr; + } else { + return Anchor::DynamicAnchorCast(peer_anchors_.begin()->lock()); + } +} + +NodePtr Anchor::GetOwnerNode() const { return owner_node_.lock(); } + +void Anchor::UnlinkAll() noexcept { + if (!peer_anchors_.empty()) { + do { + auto peer_anchor_ptr = peer_anchors_.begin()->lock(); + if (Unlink(peer_anchor_ptr) != GRAPH_SUCCESS) { + GELOGW("unlink peer_anchor_ptr failed."); + } + } while (!peer_anchors_.empty()); + } +} + +graphStatus Anchor::Unlink(const AnchorPtr &peer) { + if (peer == nullptr) { + GELOGE(GRAPH_FAILED, "peer anchor is invalid."); + return GRAPH_FAILED; + } + auto it = std::find_if(peer_anchors_.begin(), peer_anchors_.end(), [peer](const std::weak_ptr &an) { + auto anchor = an.lock(); + return peer->Equal(anchor); + }); + + GE_IF_BOOL_EXEC(it == peer_anchors_.end(), GELOGW("this anchor is not connected to peer"); return GRAPH_FAILED); + + auto it_peer = + std::find_if(peer->peer_anchors_.begin(), peer->peer_anchors_.end(), [this](const std::weak_ptr &an) { + auto anchor = an.lock(); + return Equal(anchor); + }); + + GE_CHK_BOOL_RET_STATUS(it_peer != peer->peer_anchors_.end(), GRAPH_FAILED, "peer is not connected to this anchor"); + + (void)peer_anchors_.erase(it); + (void)peer->peer_anchors_.erase(it_peer); + return GRAPH_SUCCESS; +} + +graphStatus Anchor::ReplacePeer(const AnchorPtr &old_peer, const AnchorPtr &first_peer, const AnchorPtr &second_peer) { + GE_CHK_BOOL_RET_STATUS(old_peer != nullptr, GRAPH_FAILED, "this old peer anchor is nullptr"); + GE_CHK_BOOL_RET_STATUS(first_peer != nullptr, GRAPH_FAILED, "this first peer anchor is nullptr"); + GE_CHK_BOOL_RET_STATUS(second_peer != nullptr, GRAPH_FAILED, "this second peer anchor is nullptr"); + auto this_it = std::find_if(peer_anchors_.begin(), peer_anchors_.end(), [old_peer](const std::weak_ptr &an) { + auto anchor = an.lock(); + return old_peer->Equal(anchor); + }); + + GE_CHK_BOOL_RET_STATUS(this_it != peer_anchors_.end(), GRAPH_FAILED, "this anchor is not connected to old_peer"); + + auto old_it = std::find_if(old_peer->peer_anchors_.begin(), old_peer->peer_anchors_.end(), + [this](const std::weak_ptr &an) { + auto anchor = an.lock(); + return Equal(anchor); + }); + + GE_CHK_BOOL_RET_STATUS(old_it != old_peer->peer_anchors_.end(), GRAPH_FAILED, + "old_peer is not connected to this anchor"); + *this_it = first_peer; + first_peer->peer_anchors_.push_back(shared_from_this()); + *old_it = second_peer; + second_peer->peer_anchors_.push_back(old_peer); + return GRAPH_SUCCESS; +} + +bool Anchor::IsLinkedWith(const AnchorPtr &peer) { + auto it = std::find_if(peer_anchors_.begin(), peer_anchors_.end(), [peer](const std::weak_ptr &an) { + auto anchor = an.lock(); + GE_CHK_BOOL_RET_STATUS(peer != nullptr, false, "this old peer anchor is nullptr"); + return peer->Equal(anchor); + }); + return (it != peer_anchors_.end()); +} + +int Anchor::GetIdx() const { return idx_; } + +void Anchor::SetIdx(int index) { idx_ = index; } + +DataAnchor::DataAnchor(const NodePtr &owner_node, int idx) : Anchor(owner_node, idx) {} + +bool DataAnchor::IsTypeOf(TYPE type) const { + if (strcmp(Anchor::TypeOf(), type) == 0) { + return true; + } + return Anchor::IsTypeOf(type); +} + +InDataAnchor::InDataAnchor(const NodePtr &owner_node, int idx) : DataAnchor(owner_node, idx) {} + +OutDataAnchorPtr InDataAnchor::GetPeerOutAnchor() const { + if (peer_anchors_.empty()) { + return nullptr; + } else { + return Anchor::DynamicAnchorCast(peer_anchors_.begin()->lock()); + } +} + +graphStatus InDataAnchor::LinkFrom(const OutDataAnchorPtr &src) { + // InDataAnchor must be only linkfrom once + if (src == nullptr || !peer_anchors_.empty()) { + GELOGE(GRAPH_FAILED, "src anchor is invalid or the peerAnchors is not empty."); + return GRAPH_FAILED; + } + peer_anchors_.push_back(src); + src->peer_anchors_.push_back(shared_from_this()); + return GRAPH_SUCCESS; +} + +bool InDataAnchor::Equal(AnchorPtr anchor) const { + auto in_data_anchor = Anchor::DynamicAnchorCast(anchor); + if (in_data_anchor != nullptr) { + if (GetOwnerNode() == in_data_anchor->GetOwnerNode() && GetIdx() == in_data_anchor->GetIdx()) { + return true; + } + } + return false; +} + +bool InDataAnchor::IsTypeOf(TYPE type) const { + if (strcmp(Anchor::TypeOf(), type) == 0) { + return true; + } + return DataAnchor::IsTypeOf(type); +} + +OutDataAnchor::OutDataAnchor(const NodePtr &owner_node, int idx) : DataAnchor(owner_node, idx) {} + +OutDataAnchor::Vistor OutDataAnchor::GetPeerInDataAnchors() const { + vector ret; + for (const auto &anchor : peer_anchors_) { + auto in_data_anchor = Anchor::DynamicAnchorCast(anchor.lock()); + if (in_data_anchor != nullptr) { + ret.push_back(in_data_anchor); + } + } + return OutDataAnchor::Vistor(shared_from_this(), ret); +} + +uint32_t OutDataAnchor::GetPeerInDataNodesSize() const { + uint32_t out_nums = 0; + for (const auto &anchor : peer_anchors_) { + auto in_data_anchor = Anchor::DynamicAnchorCast(anchor.lock()); + if (in_data_anchor != nullptr && in_data_anchor->GetOwnerNode() != nullptr) { + out_nums++; + } + } + return out_nums; +} + +OutDataAnchor::Vistor OutDataAnchor::GetPeerInControlAnchors() const { + vector ret; + for (const auto &anchor : peer_anchors_) { + auto in_control_anchor = Anchor::DynamicAnchorCast(anchor.lock()); + if (in_control_anchor != nullptr) { + ret.push_back(in_control_anchor); + } + } + return OutDataAnchor::Vistor(shared_from_this(), ret); +} + +graphStatus OutDataAnchor::LinkTo(const InDataAnchorPtr &dest) { + if (dest == nullptr || !dest->peer_anchors_.empty()) { + GELOGE(GRAPH_FAILED, "dest anchor is invalid or the peerAnchors is not empty."); + return GRAPH_FAILED; + } + peer_anchors_.push_back(dest); + dest->peer_anchors_.push_back(shared_from_this()); + return GRAPH_SUCCESS; +} + +graphStatus OutDataAnchor::LinkTo(const InControlAnchorPtr &dest) { + if (dest == nullptr) { + GELOGE(GRAPH_FAILED, "dest anchor is invalid."); + return GRAPH_FAILED; + } + peer_anchors_.push_back(dest); + dest->peer_anchors_.push_back(shared_from_this()); + return GRAPH_SUCCESS; +} + +graphStatus OutControlAnchor::LinkTo(const InDataAnchorPtr &dest) { + if (dest == nullptr) { + GELOGE(GRAPH_FAILED, "dest anchor is invalid."); + return GRAPH_FAILED; + } + peer_anchors_.push_back(dest); + dest->peer_anchors_.push_back(shared_from_this()); + return GRAPH_SUCCESS; +} + +bool OutDataAnchor::Equal(AnchorPtr anchor) const { + CHECK_FALSE_EXEC(anchor != nullptr, return false); + auto out_data_anchor = Anchor::DynamicAnchorCast(anchor); + if (out_data_anchor != nullptr) { + if (GetOwnerNode() == out_data_anchor->GetOwnerNode() && GetIdx() == out_data_anchor->GetIdx()) { + return true; + } + } + return false; +} + +bool OutDataAnchor::IsTypeOf(TYPE type) const { + if (strcmp(Anchor::TypeOf(), type) == 0) { + return true; + } + return DataAnchor::IsTypeOf(type); +} + +ControlAnchor::ControlAnchor(const NodePtr &owner_node) : Anchor(owner_node, -1) {} + +ControlAnchor::ControlAnchor(const NodePtr &owner_node, int idx) : Anchor(owner_node, idx) {} + +bool ControlAnchor::IsTypeOf(TYPE type) const { + if (strcmp(Anchor::TypeOf(), type) == 0) { + return true; + } + return Anchor::IsTypeOf(type); +} + +InControlAnchor::InControlAnchor(const NodePtr &owner_node) : ControlAnchor(owner_node) {} + +InControlAnchor::InControlAnchor(const NodePtr &owner_node, int idx) : ControlAnchor(owner_node, idx) {} + +InControlAnchor::Vistor InControlAnchor::GetPeerOutControlAnchors() const { + vector ret; + for (const auto &anchor : peer_anchors_) { + auto out_control_anchor = Anchor::DynamicAnchorCast(anchor.lock()); + if (out_control_anchor != nullptr) { + ret.push_back(out_control_anchor); + } + } + return InControlAnchor::Vistor(shared_from_this(), ret); +} + +InControlAnchor::Vistor InControlAnchor::GetPeerOutDataAnchors() const { + vector ret; + for (const auto &anchor : peer_anchors_) { + auto out_data_anchor = Anchor::DynamicAnchorCast(anchor.lock()); + if (out_data_anchor != nullptr) { + ret.push_back(out_data_anchor); + } + } + return InControlAnchor::Vistor(shared_from_this(), ret); +} + +graphStatus InControlAnchor::LinkFrom(const OutControlAnchorPtr &src) { + if (src == nullptr) { + GELOGE(GRAPH_FAILED, "src anchor is invalid."); + return GRAPH_FAILED; + } + peer_anchors_.push_back(src); + src->peer_anchors_.push_back(shared_from_this()); + return GRAPH_SUCCESS; +} + +bool InControlAnchor::Equal(AnchorPtr anchor) const { + CHECK_FALSE_EXEC(anchor != nullptr, return false); + auto in_control_anchor = Anchor::DynamicAnchorCast(anchor); + if (in_control_anchor != nullptr) { + if (GetOwnerNode() == in_control_anchor->GetOwnerNode()) { + return true; + } + } + return false; +} + +bool InControlAnchor::IsTypeOf(TYPE type) const { + if (strcmp(Anchor::TypeOf(), type) == 0) { + return true; + } + return ControlAnchor::IsTypeOf(type); +} + +OutControlAnchor::OutControlAnchor(const NodePtr &owner_node) : ControlAnchor(owner_node) {} + +OutControlAnchor::OutControlAnchor(const NodePtr &owner_node, int idx) : ControlAnchor(owner_node, idx) {} + +OutControlAnchor::Vistor OutControlAnchor::GetPeerInControlAnchors() const { + vector ret; + for (const auto &anchor : peer_anchors_) { + auto in_control_anchor = Anchor::DynamicAnchorCast(anchor.lock()); + if (in_control_anchor != nullptr) { + ret.push_back(in_control_anchor); + } + } + return OutControlAnchor::Vistor(shared_from_this(), ret); +} + +OutControlAnchor::Vistor OutControlAnchor::GetPeerInDataAnchors() const { + vector ret; + for (const auto &anchor : peer_anchors_) { + auto in_data_anchor = Anchor::DynamicAnchorCast(anchor.lock()); + if (in_data_anchor != nullptr) { + ret.push_back(in_data_anchor); + } + } + return OutControlAnchor::Vistor(shared_from_this(), ret); +} + +graphStatus OutControlAnchor::LinkTo(const InControlAnchorPtr &dest) { + if (dest == nullptr) { + GELOGE(GRAPH_FAILED, "dest anchor is invalid."); + return GRAPH_FAILED; + } + peer_anchors_.push_back(dest); + dest->peer_anchors_.push_back(shared_from_this()); + return GRAPH_SUCCESS; +} + +bool OutControlAnchor::Equal(AnchorPtr anchor) const { + auto out_control_anchor = Anchor::DynamicAnchorCast(anchor); + if (out_control_anchor != nullptr) { + if (GetOwnerNode() == out_control_anchor->GetOwnerNode()) { + return true; + } + } + return false; +} + +bool OutControlAnchor::IsTypeOf(TYPE type) const { + if (strcmp(Anchor::TypeOf(), type) == 0) { + return true; + } + return ControlAnchor::IsTypeOf(type); +} +} // namespace ge diff --git a/src/common/graph/attr_value.cc b/src/common/graph/attr_value.cc new file mode 100644 index 00000000..a5457ecc --- /dev/null +++ b/src/common/graph/attr_value.cc @@ -0,0 +1,39 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "external/graph/attr_value.h" + +#include "debug/ge_log.h" +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/ge_attr_value.h" + +namespace ge { +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY AttrValue::AttrValue() { impl = ComGraphMakeShared(); } + +#define ATTR_VALUE_SET_GET_IMP(type) \ + graphStatus AttrValue::GetValue(type &val) const { \ + if (impl != nullptr) { \ + GELOGW("GetValue failed."); \ + return impl->geAttrValue_.GetValue(val); \ + } \ + return GRAPH_FAILED; \ + } + +ATTR_VALUE_SET_GET_IMP(AttrValue::STR) +ATTR_VALUE_SET_GET_IMP(AttrValue::INT) +ATTR_VALUE_SET_GET_IMP(AttrValue::FLOAT) +} // namespace ge diff --git a/src/common/graph/buffer.cc b/src/common/graph/buffer.cc new file mode 100644 index 00000000..f6dc7a83 --- /dev/null +++ b/src/common/graph/buffer.cc @@ -0,0 +1,114 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/buffer.h" + +#include "proto/ge_ir.pb.h" +#include "framework/common/debug/ge_log.h" + +namespace ge { +Buffer::Buffer() { + data_.InitDefault(); + if (data_.GetProtoMsg()) { + buffer_ = data_.GetProtoMsg()->mutable_bt(); + } +} + +Buffer::Buffer(const Buffer &other) { + // Share data + data_ = other.data_; + buffer_ = other.buffer_; +} + +// default +Buffer::Buffer(std::size_t buffer_size, std::uint8_t default_val) : Buffer() { + auto proto_msg = data_.GetProtoMsg(); + if (proto_msg != nullptr) { + try { + proto_msg->set_bt(std::string(buffer_size, default_val)); + buffer_ = proto_msg->mutable_bt(); + } catch (std::bad_alloc &e) { + GELOGE(MEMALLOC_FAILED, "Failed to alloc buffer memory, buffer size %zu", buffer_size); + buffer_ = nullptr; + } + } +} + +Buffer Buffer::CopyFrom(const std::uint8_t *data, std::size_t buffer_size) { + Buffer buffer; + auto proto_msg = buffer.data_.GetProtoMsg(); + if (proto_msg != nullptr && data != nullptr) { + try { + proto_msg->set_bt(data, buffer_size); + buffer.buffer_ = proto_msg->mutable_bt(); + } catch (std::bad_alloc &e) { + GELOGE(MEMALLOC_FAILED, "Failed to alloc buffer memory, buffer size %zu", buffer_size); + buffer.buffer_ = nullptr; + } + } + return buffer; +} + +Buffer::Buffer(const std::shared_ptr &proto_owner, proto::AttrDef *buffer) + : data_(proto_owner, buffer) { + if (data_.GetProtoMsg() != nullptr) { + buffer_ = data_.GetProtoMsg()->mutable_bt(); + } +} + +Buffer::Buffer(const std::shared_ptr &proto_owner, std::string *buffer) + : data_(proto_owner, nullptr) { + buffer_ = buffer; +} + +Buffer &Buffer::operator=(const Buffer &other) { + if (&other != this) { + // Share data + data_ = other.data_; + buffer_ = other.buffer_; + } + return *this; +} + +const std::uint8_t *Buffer::GetData() const { + if (buffer_ != nullptr) { + return (const std::uint8_t *)buffer_->data(); + } + return nullptr; +} + +std::uint8_t *Buffer::GetData() { + if (buffer_ != nullptr && !buffer_->empty()) { + // Avoid copy on write + (void)(*buffer_)[0]; + return reinterpret_cast(const_cast(buffer_->data())); + } + return nullptr; +} + +std::size_t Buffer::GetSize() const { + if (buffer_ != nullptr) { + return buffer_->size(); + } + return 0; +} + +void Buffer::ClearBuffer() { + if (buffer_ != nullptr) { + buffer_->clear(); + } +} +} // namespace ge diff --git a/src/common/graph/compute_graph.cc b/src/common/graph/compute_graph.cc new file mode 100755 index 00000000..d82e619f --- /dev/null +++ b/src/common/graph/compute_graph.cc @@ -0,0 +1,945 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/compute_graph.h" +#include +#include "./format_refiner.h" +#include "./ge_context.h" +#include "debug/ge_attr_define.h" +#include "debug/ge_log.h" +#include "debug/ge_op_types.h" +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "ge/ge_api_types.h" +#include "graph/shape_refiner.h" +#include "proto/ge_ir.pb.h" +#include "utils/ge_ir_utils.h" +#include "utils/graph_utils.h" +#include "utils/node_utils.h" +#include "utils/op_desc_utils.h" +#include "utils/string_utils.h" +#include "utils/tensor_utils.h" + +namespace ge { +namespace { +const size_t OUTPUT_PARAM_SIZE = 2; +} // namespace + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY ComputeGraph::ComputeGraph(const std::string &name) + : nodes_(), input_nodes_(), sub_graph_(), name_(name), is_valid_flag_(false), need_iteration_(false) { + attrs_.InitDefault(); +} +ComputeGraph::~ComputeGraph() {} +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY string ComputeGraph::GetName() const { return name_; } +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void ComputeGraph::SetName(const string &name) { name_ = name; } + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY size_t ComputeGraph::GetAllNodesSize() const { + size_t s = nodes_.size(); + for (const auto &sub_graph : sub_graph_) { + s += sub_graph->GetAllNodesSize(); + } + return s; +} +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY ComputeGraph::Vistor ComputeGraph::GetAllNodes() const { + vector all_nodes(nodes_.size()); + (void)std::copy(nodes_.begin(), nodes_.end(), all_nodes.begin()); + for (const auto &sub_graph : sub_graph_) { + if (sub_graph == nullptr) { + GELOGW("sub graph is nullptr"); + continue; + } + for (const auto &node : sub_graph->GetAllNodes()) { + all_nodes.push_back(node); + } + } + return Vistor(shared_from_this(), all_nodes); +} +size_t ComputeGraph::GetDirectNodesSize() const { return nodes_.size(); } +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY ComputeGraph::Vistor ComputeGraph::GetDirectNode() const { + return Vistor(shared_from_this(), nodes_); +} +ComputeGraph::Vistor ComputeGraph::GetInputNodes() const { + return Vistor(shared_from_this(), input_nodes_); +} + +ComputeGraph::Vistor ComputeGraph::GetOutputNodes() const { + std::vector result; + for (auto iter = output_nodes_info_.begin(); iter != output_nodes_info_.end(); ++iter) { + result.push_back(iter->first); + } + return Vistor(shared_from_this(), result); +} +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY NodePtr ComputeGraph::FindNode(const std::string &name) const { + for (const auto &node : nodes_) { + if (node == nullptr) { + continue; + } + if (node->GetName() == name) { + return node; + } + } + return nullptr; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool ComputeGraph::GraphAttrsAreEqual( + const ComputeGraph &r_graph) const { + // ProtoMsgOwner <::google::protobuf::Message> is temporarily ignored + if ((this->attrs_.protoMsg_ != nullptr) && (r_graph.attrs_.protoMsg_ != nullptr)) { + const auto &proto_attr_map = *(this->attrs_.protoMsg_); + const auto &r_proto_attr_map = *(r_graph.attrs_.protoMsg_); + // 1.Verify graph's ProtoAttrMap size + if (proto_attr_map.size() != r_proto_attr_map.size()) { + GELOGE(GRAPH_FAILED, "Size of compute graph's ProtoAttrMap verify failed, graph name: %s.", + this->GetName().c_str()); + return false; + } + // 2.Verify graph's ProtoAttrMap key, verify values is temporarily not implemented + for (const auto &it : proto_attr_map) { + if (r_proto_attr_map.count(it.first) == 0) { + GELOGE(GRAPH_FAILED, "Key of compute graph's ProtoAttrMap verify failed, graph name: %s key name: %s.", + this->GetName().c_str(), it.first.c_str()); + return false; + } + } + return true; + } + return ((this->attrs_.protoMsg_ == nullptr) && (r_graph.attrs_.protoMsg_ == nullptr)); +} + +/// Since there may be different input nodes +/// chosen by user in the same graph, special judgment is needed +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool ComputeGraph::VectorInputNodePtrIsEqual( + const std::vector &left_nodes, const std::vector &right_nodes) const { + const auto left_nodes_size = left_nodes.size(); + const auto right_nodes_size = right_nodes.size(); + if (left_nodes_size != right_nodes_size) { + GELOGE(GRAPH_FAILED, + "Check failed with graph input_nodes_: " + "left inputNodes size %zu is different with right inputNodes size %zu .", + left_nodes_size, right_nodes_size); + return false; + } + for (size_t j = 0; j < left_nodes_size; j++) { + if (left_nodes.at(j) == nullptr || right_nodes.at(j) == nullptr) { + GELOGE(GRAPH_FAILED, "left_nodes.at(%zu) or right_nodes.at(%zu) is nullptr", j, j); + return false; + } + const auto &left_input_name = left_nodes.at(j)->GetName(); + const auto &right_input_name = right_nodes.at(j)->GetName(); + if (left_input_name != right_input_name) { + GELOGE(GRAPH_FAILED, + "Check failed with graph input_nodes_: " + "left inputNode name %s is different with right inputNode name %s at inputNodes index %zu.", + left_input_name.c_str(), right_input_name.c_str(), j); + return false; + } + } + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool ComputeGraph::GraphMembersAreEqual( + const ComputeGraph &r_graph) const { + return (IsEqual(this->sub_graph_.size(), r_graph.sub_graph_.size(), "graph.sub_graph_.size()") && + IsEqual(this->nodes_.size(), r_graph.nodes_.size(), "graph.nodes_.size()") && + VectorInputNodePtrIsEqual(this->input_nodes_, r_graph.input_nodes_) && + IsEqual(this->name_, r_graph.name_, "graph.name_") && + IsEqual(this->is_valid_flag_, r_graph.is_valid_flag_, "graph.is_valid_flag_") && + IsEqual(this->need_iteration_, r_graph.need_iteration_, "graph.need_iteration_") && + IsEqual(this->params_share_map_, r_graph.params_share_map_, "graph.params_share_map_") && + IsEqual(this->out_nodes_map_, r_graph.out_nodes_map_, "graph.out_nodes_map_") && + IsEqual(this->inputs_order_, r_graph.inputs_order_, "graph.inputs_order_") && + IsEqual(this->output_size_, r_graph.output_size_, "graph.output_size_") && + IsEqual(this->input_size_, r_graph.input_size_, "graph.input_size_") && + IsEqual(this->output_nodes_info_, r_graph.output_nodes_info_, "graph.output_nodes_info_")); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool ComputeGraph::operator==(const ComputeGraph &r_graph) const { + // Firstly: Graph's members equal + if ((!GraphMembersAreEqual(r_graph)) || (!GraphAttrsAreEqual(r_graph))) { + return false; + } + + // Secondly: Node equal means the link relationship between node and node itself equal + for (const auto &left_node : nodes_) { + if (left_node == nullptr) { + GELOGE(GRAPH_FAILED, "left_node is nullptr"); + return false; + } + const auto &node_name = left_node->GetName(); + // After TopologicalSorting, node order can change, so find node by name + const auto &right_node = r_graph.FindNode(node_name); + GE_IF_BOOL_EXEC(right_node == nullptr, GELOGE(GRAPH_FAILED, "right_node is NULL!!!"); return false); + if (!(*right_node == *left_node)) { + GELOGE(GRAPH_FAILED, "Compare graph failed, node name: %s.", node_name.c_str()); + return false; + } + } + + // Thirdly: Recursively determine whether the sub graphs are equal + for (size_t i = 0; i < this->sub_graph_.size(); i++) { + if (!(*((this->sub_graph_)[i]) == *((r_graph.sub_graph_)[i]))) { + return false; + } + } + return true; +} + +NodePtr ComputeGraph::AddNodeFront(NodePtr node) { + if (node == nullptr || node->GetOpDesc() == nullptr) { + GELOGE(GRAPH_FAILED, "The node ptr or op desc should not be null."); + return nullptr; + } + node->GetOpDesc()->SetId(nodes_.size()); + if (nodes_[0] == nullptr) { + GELOGE(GRAPH_FAILED, "nodes_ size or nodes_[0] is nullptr"); + return nullptr; + } + if (nodes_.size() > 0 && nodes_[0]->GetType() == DATA) { + (void)nodes_.insert(nodes_.begin() + 1, node); + } else { + (void)nodes_.insert(nodes_.begin(), node); + } + return node; +} + +NodePtr ComputeGraph::AddNodeFront(const OpDescPtr &op) { + if (op == nullptr) { + GELOGE(GRAPH_FAILED, "The OpDesc ptr should be not null."); + return nullptr; + } + op->SetId(nodes_.size()); + NodePtr node_ptr = shared_ptr(new (std::nothrow) Node(op, shared_from_this())); + GE_IF_BOOL_EXEC(node_ptr == nullptr, GELOGE(GRAPH_FAILED, "node_ptr is NULL!!!"); return nullptr); + GE_IF_BOOL_EXEC(node_ptr->Init() != GRAPH_SUCCESS, GELOGE(GRAPH_FAILED, "node init fail."); return nullptr); + return AddNodeFront(node_ptr); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY NodePtr ComputeGraph::AddNode(NodePtr node) { + if (node == nullptr || node->GetOpDesc() == nullptr) { + GELOGE(GRAPH_FAILED, "The node ptr should be not null."); + return nullptr; + } + node->GetOpDesc()->SetId((int64_t)GetDirectNodesSize()); + nodes_.push_back(node); + return node; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY NodePtr ComputeGraph::AddNode(OpDescPtr op) { + if (op == nullptr) { + GELOGE(GRAPH_FAILED, "The OpDesc ptr should be not null."); + return nullptr; + } + op->SetId(GetDirectNodesSize()); + NodePtr node_ptr = shared_ptr(new (std::nothrow) Node(op, shared_from_this())); + GE_IF_BOOL_EXEC(node_ptr == nullptr, GELOGE(GRAPH_FAILED, "node_ptr is NULL!!!"); return nullptr); + GE_IF_BOOL_EXEC(node_ptr->Init() != GRAPH_SUCCESS, GELOGE(GRAPH_FAILED, "node init fail."); return nullptr); + return AddNode(node_ptr); +} +NodePtr ComputeGraph::AddInputNode(NodePtr node) { + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "The node ptr should be not null."); + return nullptr; + } + input_nodes_.push_back(node); + if (std::find(nodes_.begin(), nodes_.end(), node) == nodes_.end()) { + GE_CHK_BOOL_EXEC(AddNode(node) != nullptr, return nullptr, "add node failed"); + } + return node; +} +NodePtr ComputeGraph::AddOutputNode(NodePtr node) { + if (node == nullptr || node->GetOpDesc() == nullptr) { + GELOGE(GRAPH_FAILED, "The node ptr or opdesc should be not null."); + return nullptr; + } + + bool already_have = false; + NodePtr result = node; + // [output_nodes_info_ : should not be null] + for (const auto &item : output_nodes_info_) { + if (item.first->GetName() == node->GetName()) { + already_have = true; + result = item.first; + break; + } + } + + if (!already_have) { + output_nodes_info_.emplace_back(std::make_pair(node, 0)); + } + + if (std::find(nodes_.begin(), nodes_.end(), node) == nodes_.end()) { + GE_CHK_BOOL_EXEC(AddNode(node) != nullptr, return nullptr, "add node failed"); + } + return result; +} + +graphStatus ComputeGraph::RemoveConstInput(const NodePtr &node) { + GE_CHECK_NOTNULL(node); + + for (const auto &in_anchor : node->GetAllInDataAnchors()) { + auto out_anchor = in_anchor->GetPeerOutAnchor(); + if (out_anchor == nullptr || out_anchor->GetOwnerNode() == nullptr) { + continue; + } + if (out_anchor->GetOwnerNode()->GetType() == CONSTANT || out_anchor->GetOwnerNode()->GetType() == CONSTANTOP) { + GE_CHK_BOOL_RET_STATUS(GraphUtils::RemoveEdge(out_anchor, in_anchor) == GRAPH_SUCCESS, GRAPH_FAILED, + "Remove edge from const op failed."); + if (out_anchor->GetOwnerNode()->GetOutDataNodes().size() == 0) { + GELOGI("Remove const op %s.", out_anchor->GetOwnerNode()->GetName().c_str()); + auto iter = find(nodes_.begin(), nodes_.end(), out_anchor->GetOwnerNode()); + if (iter != nodes_.end()) { + (void)nodes_.erase(iter); + } + } + } + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus ComputeGraph::RemoveNode(const NodePtr &node) { + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "The node ptr should be not null."); + return GRAPH_FAILED; + } + + // delete const op for this node + (void)RemoveConstInput(node); + + // if the node save as input node, delete it + (void)RemoveInputNode(node); + + // if the node save as input node, delete it + (void)RemoveOutputNode(node); + + if (GRAPH_SUCCESS != IsolateNode(node)) { + GELOGE(GRAPH_FAILED, "Isolate node failed, node name: %s.", node->GetName().c_str()); + return GRAPH_FAILED; + } + + auto iter = find(nodes_.begin(), nodes_.end(), node); + if (iter != nodes_.end()) { + (void)nodes_.erase(iter); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} +// Used in sub_graph scenes +graphStatus ComputeGraph::RemoveInputNode(const NodePtr &node) { + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "The node ptr should be not null."); + return GRAPH_FAILED; + } + + auto iter = find(input_nodes_.begin(), input_nodes_.end(), node); + if (iter != input_nodes_.end()) { + (void)input_nodes_.erase(iter); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} + +// Used in sub_graph scenes +graphStatus ComputeGraph::RemoveOutputNode(const NodePtr &node) { + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "The node ptr should be not null."); + return GRAPH_FAILED; + } + + auto iter = output_nodes_info_.begin(); + bool find_node = false; + // [output_nodes_info_ : should not be null] + while (iter != output_nodes_info_.end()) { + if (node->GetName() == iter->first->GetName()) { + iter = output_nodes_info_.erase(iter); + find_node = true; + } else { + ++iter; + } + } + GE_IF_BOOL_EXEC(find_node == false, return GRAPH_FAILED); + return GRAPH_SUCCESS; +} +std::shared_ptr ComputeGraph::AddSubGraph(std::shared_ptr sub_graph) { + if (sub_graph == nullptr) { + GELOGE(GRAPH_FAILED, "The graph ptr should be not null."); + return nullptr; + } + sub_graph_.push_back(sub_graph); + return sub_graph; +} +graphStatus ComputeGraph::RemoveSubGraph(const std::shared_ptr &sub_graph) { + if (sub_graph == nullptr) { + GELOGE(GRAPH_FAILED, "The graph ptr should be not null."); + return GRAPH_FAILED; + } + + auto iter = find(sub_graph_.begin(), sub_graph_.end(), sub_graph); + if (iter != sub_graph_.end()) { + (void)sub_graph_.erase(iter); + return GRAPH_SUCCESS; + } else { + GELOGW("find sub_graph failed"); + return GRAPH_SUCCESS; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus ComputeGraph::InsertEventNodes() { + std::vector node_vec = nodes_; + for (const auto &node : GetAllNodes()) { + if (node == nullptr || node->GetOpDesc() == nullptr) { + GELOGW("node or OpDescPtr is nullptr."); + continue; + } + GE_IF_BOOL_EXEC(node == nullptr, GELOGE(GRAPH_FAILED, "The node should be not null."); return GRAPH_FAILED); + if (node->GetOpDesc()->GetType() == kRecvType) { + auto iter = find(node_vec.begin(), node_vec.end(), node); + if (iter == node_vec.end()) { + GELOGW("no node found."); + } else { + (void)node_vec.erase(iter); + } + + auto dst_iter = find(node_vec.begin(), node_vec.end(), node->GetOutControlNodes().at(0)); + (void)node_vec.insert(dst_iter, node); + } + if (node->GetOpDesc()->GetType() == kSendType) { + auto iter = find(node_vec.begin(), node_vec.end(), node); + if (iter == node_vec.end()) { + GELOGW("no node found."); + } else { + (void)node_vec.erase(iter); + } + + auto src_iter = find(node_vec.begin(), node_vec.end(), node->GetInControlNodes().at(0)); + (void)node_vec.insert(src_iter + 1, node); + } + } + nodes_.clear(); + for (size_t i = 0; i < node_vec.size(); ++i) { + NodePtr node = node_vec[i]; + if (node == nullptr || node->GetOpDesc() == nullptr) { + GELOGW("node or OpDescPtr is nullptr."); + } else { + node->GetOpDesc()->SetId((int64_t)i); + nodes_.push_back(node); + } + } + return GRAPH_SUCCESS; +} + +graphStatus ComputeGraph::DFSTopologicalSorting(std::vector &node_vec, + std::map &map_in_edge_num, + std::vector &stack) { + GELOGI("Runing_Dfs_Sort"); + // Record the number of non data nodes but no input nodes + GE_CHK_BOOL_EXEC(SortNodes(stack, map_in_edge_num) == GRAPH_SUCCESS, return GRAPH_FAILED, "sort nodes failed"); + + // Only data nodes here + while (!stack.empty()) { + NodePtr node = stack.back(); + stack.pop_back(); + node_vec.push_back(node); + GE_CHECK_NOTNULL(node->GetOpDesc()); + GELOGD("node_vec.push_back %s", node->GetOpDesc()->GetName().c_str()); + for (const auto &anchor : node->GetAllOutDataAnchors()) { + GE_CHECK_NOTNULL(anchor); + for (const auto &peer_in_anchor : anchor->GetPeerInDataAnchors()) { + GE_CHECK_NOTNULL(peer_in_anchor); + auto iter = map_in_edge_num.find(peer_in_anchor->GetOwnerNode()); + if (iter != map_in_edge_num.end() && --iter->second == 0) { + stack.push_back(peer_in_anchor->GetOwnerNode()); + } + } + for (const auto &peer_in_anchor : anchor->GetPeerInControlAnchors()) { + GE_CHECK_NOTNULL(peer_in_anchor); + auto iter = map_in_edge_num.find(peer_in_anchor->GetOwnerNode()); + if (iter != map_in_edge_num.end() && --iter->second == 0) { + stack.push_back(peer_in_anchor->GetOwnerNode()); + } + } + } + GE_IF_BOOL_EXEC( + node->GetOutControlAnchor() != nullptr, for (AnchorPtr peer_in_anchor + : node->GetOutControlAnchor()->GetPeerAnchors()) { + GE_CHECK_NOTNULL(peer_in_anchor); + auto iter = map_in_edge_num.find(peer_in_anchor->GetOwnerNode()); + if (iter != map_in_edge_num.end() && --iter->second == 0) { + stack.push_back(peer_in_anchor->GetOwnerNode()); + } + }) + } + + return GRAPH_SUCCESS; +} + +graphStatus ComputeGraph::BFSTopologicalSorting(std::vector &node_vec, + std::map &map_in_edge_num, + std::deque &stack) { + GELOGI("Runing_Bfs_Sort"); + std::vector stack_input; + std::map breadth_node_map; + // Record the number of non data nodes but no input nodes + GE_CHK_BOOL_EXEC(SortNodes(stack_input, map_in_edge_num) == GRAPH_SUCCESS, return GRAPH_FAILED, "sort nodes failed"); + + // Only data nodes here + while (!stack_input.empty() || !stack.empty()) { + NodePtr node = nullptr; + if (!stack.empty()) { + node = stack.back(); + stack.pop_back(); + } else { + node = stack_input.back(); + stack_input.pop_back(); + } + node_vec.push_back(node); + GE_CHECK_NOTNULL(node->GetOpDesc()); + GELOGD("node_vec.push_back %s", node->GetOpDesc()->GetName().c_str()); + + CollectBreadthOutNode(node, map_in_edge_num, breadth_node_map); + + for (const auto &name_node : breadth_node_map) { + (void)stack.push_front(name_node.second); + } + breadth_node_map.clear(); + } + + return GRAPH_SUCCESS; +} + +graphStatus ComputeGraph::CollectBreadthOutNode(const NodePtr &node, std::map &map_in_edge_num, + std::map &breadth_node_map) { + for (const auto &anchor : node->GetAllOutDataAnchors()) { + for (const auto &peer_in_anchor : anchor->GetPeerInDataAnchors()) { + auto iter = map_in_edge_num.find(peer_in_anchor->GetOwnerNode()); + if (iter != map_in_edge_num.end() && --iter->second == 0) { + (void)breadth_node_map.emplace(peer_in_anchor->GetOwnerNode()->GetName(), peer_in_anchor->GetOwnerNode()); + } + } + for (const auto &peer_in_anchor : anchor->GetPeerInControlAnchors()) { + auto iter = map_in_edge_num.find(peer_in_anchor->GetOwnerNode()); + if (iter != map_in_edge_num.end() && --iter->second == 0) { + (void)breadth_node_map.emplace(peer_in_anchor->GetOwnerNode()->GetName(), peer_in_anchor->GetOwnerNode()); + } + } + } + GE_IF_BOOL_EXEC( + node->GetOutControlAnchor() != nullptr, for (AnchorPtr peer_in_anchor + : node->GetOutControlAnchor()->GetPeerAnchors()) { + auto iter = map_in_edge_num.find(peer_in_anchor->GetOwnerNode()); + if (iter != map_in_edge_num.end() && --iter->second == 0) { + (void)breadth_node_map.emplace(peer_in_anchor->GetOwnerNode()->GetName(), peer_in_anchor->GetOwnerNode()); + } + }) + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus ComputeGraph::TopologicalSorting() { + std::vector node_vec; + std::map map_in_edge_num; + bool use_BFS = false; + string run_mode; + const int base = 10; + if (ge::GetContext().GetOption(ge::OPTION_GRAPH_RUN_MODE, run_mode) == GRAPH_SUCCESS && !run_mode.empty()) { + if (GraphRunMode(std::strtol(run_mode.c_str(), nullptr, base)) >= TRAIN) { + use_BFS = true; + } + } else { + GELOGW("Get OPTION_GRAPH_RUN_MODE failed, use BFSTopologicalSorting by default."); + } + + if (use_BFS) { + std::deque stack; + if (BFSTopologicalSorting(node_vec, map_in_edge_num, stack) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } else { + std::vector stack; + if (DFSTopologicalSorting(node_vec, map_in_edge_num, stack) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + // If they are not equal, there is a closed loop + if (node_vec.size() != nodes_.size()) { + std::set itered_nodes_set; + for (auto &node : node_vec) { + itered_nodes_set.insert(node.get()); + } + GE_LOGE("Failed to do topo sorting total %zu, itered %zu, exist closed loop in graph.", nodes_.size(), + node_vec.size()); + for (auto &node : nodes_) { + if (itered_nodes_set.count(node.get()) == 0) { + GE_LOGE("The node %s does not itered when topological sorting", node->GetName().c_str()); + } + } + return GRAPH_FAILED; + } + + nodes_.clear(); + for (size_t i = 0; i < node_vec.size(); i++) { + NodePtr node = node_vec[i]; // [node: should not be null] + node->GetOpDesc()->SetId(i); // [node->GetOpDesc(): should not be null] + nodes_.push_back(node); + } + is_valid_flag_ = true; + return GRAPH_SUCCESS; +} + +graphStatus ComputeGraph::SortNodes(std::vector &stack, std::map &map_in_edge_num) { + // Record the number of non data nodes but no input nodes + uint32_t spec_node_size = 0; + bool verify_isolated = false; + string run_mode; + const int base = 10; + // Need verify isolated point in PREDICTION mode. + if (ge::GetContext().GetOption(ge::OPTION_GRAPH_RUN_MODE, run_mode) == GRAPH_SUCCESS && !run_mode.empty()) { + if (GraphRunMode(std::strtol(run_mode.c_str(), nullptr, base)) < TRAIN) { + verify_isolated = true; + } + } + for (const auto &node : GetAllNodes()) { + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr, continue); + map_in_edge_num[node] = static_cast(GetInEdgeSize(node)); + if (map_in_edge_num[node] == 0) { + if ((node->GetOpDesc()->GetType() != kDataType) && (node->GetOpDesc()->GetType() != kAippDataType) && + (node->GetOpDesc()->GetType() != kInputType) && (node->GetOpDesc()->GetType() != kAnnDataType)) { + // At present, can only judge the isolated point without input and output. + // It is impossible to judge the situation with multiple output nodes. + if (verify_isolated && GetOutEdgeSize(node) == 0) { + GELOGE(GRAPH_FAILED, "May has isolated nodes in graph, node name: %s.", node->GetName().c_str()); + return GRAPH_FAILED; + } + (void)stack.insert(stack.begin(), node); + spec_node_size++; + continue; + } + // Need to insert the data nodes in reverse order + (void)stack.insert(stack.begin() + spec_node_size, node); + } + } + + /// Make sure the inputs order matches with user-designated + /// 1. Get the index of two input nodes in the user-inputs-order(inputs_order_) + /// 2. Compare two indices, if not match, swap the positions of two inputs + /// *: Remind: stack is reverse-order + for (size_t i = 0; i < stack.size(); ++i) { + // [stack: should not be null] + for (size_t j = i + 1; j < stack.size(); ++j) { + // If not found in 'inputs_order_', skip it + auto it_i = std::find(inputs_order_.begin(), inputs_order_.end(), stack[i]->GetName()); + GE_IF_BOOL_EXEC(it_i == inputs_order_.end(), continue); + auto it_j = std::find(inputs_order_.begin(), inputs_order_.end(), stack[j]->GetName()); + GE_IF_BOOL_EXEC(it_j == inputs_order_.end(), continue); + + // Compare index, swap them if it should be + auto inx_i = it_i - inputs_order_.begin(); + auto inx_j = it_j - inputs_order_.begin(); + GE_IF_BOOL_EXEC(inx_i < inx_j, std::swap(stack[i], stack[j])); + } + } + + return GRAPH_SUCCESS; +} +size_t ComputeGraph::GetInEdgeSize(const NodePtr &node) { + size_t in_edge_size = 0; + if (node == nullptr) { + return in_edge_size; + } + for (const auto &anchor : node->GetAllInDataAnchors()) { + in_edge_size = in_edge_size + anchor->GetPeerAnchors().size(); + // Break flow control data loop. + OutDataAnchorPtr out_anchor = anchor->GetPeerOutAnchor(); + if ((out_anchor != nullptr) && (out_anchor->GetOwnerNode() != nullptr)) { + NodePtr out_node = out_anchor->GetOwnerNode(); + if (out_node == nullptr) { + GELOGW("out node is nullptr"); + continue; + } + if ((out_node->GetType() == NEXTITERATION) || (out_node->GetType() == REFNEXTITERATION)) { + GE_IF_BOOL_EXEC(in_edge_size == 0, GELOGE(GRAPH_FAILED, "If [in_edge_size = 0], the result will be reversed"); + return in_edge_size); + in_edge_size -= 1; + } + } + } + if (node->GetInControlAnchor() != nullptr) { + in_edge_size = in_edge_size + node->GetInControlAnchor()->GetPeerAnchors().size(); + } + return in_edge_size; +} +size_t ComputeGraph::GetOutEdgeSize(const NodePtr &node) { + size_t out_edge_size = 0; + if (node == nullptr) { + return out_edge_size; + } + + // Break flow control data loop. + if ((node->GetType() != NEXTITERATION) && (node->GetType() != REFNEXTITERATION)) { + for (const auto &anchor : node->GetAllOutDataAnchors()) { + if (anchor != nullptr) { + out_edge_size = out_edge_size + anchor->GetPeerAnchors().size(); + } + } + } + if (node->GetOutControlAnchor() != nullptr) { + if (out_edge_size > (UINT32_MAX - node->GetOutControlAnchor()->GetPeerAnchors().size())) { + return 0; + } + out_edge_size = out_edge_size + node->GetOutControlAnchor()->GetPeerAnchors().size(); + } + return out_edge_size; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool ComputeGraph::IsValid() const { return is_valid_flag_; } +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void ComputeGraph::Dump() const { + GELOGI("graph name = %s.", GetName().c_str()); + for (const auto &node : GetAllNodes()) { + GELOGI("node name = %s.", node->GetName().c_str()); + for (const auto &anchor : node->GetAllOutDataAnchors()) { + for (const auto &peer_in_anchor : anchor->GetPeerInDataAnchors()) { + GE_IF_BOOL_EXEC(peer_in_anchor != nullptr && peer_in_anchor->GetOwnerNode() != nullptr, + GELOGI("node name = %s, out data node name = %s.", node->GetName().c_str(), + peer_in_anchor->GetOwnerNode()->GetName().c_str())); + } + for (const auto &peer_in_anchor : anchor->GetPeerInControlAnchors()) { + GE_IF_BOOL_EXEC(peer_in_anchor != nullptr && peer_in_anchor->GetOwnerNode() != nullptr, + GELOGI("node name = %s, out control node name = %s.", node->GetName().c_str(), + peer_in_anchor->GetOwnerNode()->GetName().c_str())); + } + } + GE_IF_BOOL_EXEC(node->GetOutControlAnchor() == nullptr, GELOGE(GRAPH_FAILED, "Out control anchor is null"); + return); + for (const auto &peer_in_anchor : node->GetOutControlAnchor()->GetPeerInControlAnchors()) { + GE_IF_BOOL_EXEC(peer_in_anchor != nullptr && peer_in_anchor->GetOwnerNode() != nullptr, + GELOGI("node name = %s, out control node name = %s.", node->GetName().c_str(), + peer_in_anchor->GetOwnerNode()->GetName().c_str())); + } + for (const auto &peer_in_anchor : node->GetOutControlAnchor()->GetPeerInDataAnchors()) { + GE_IF_BOOL_EXEC(peer_in_anchor != nullptr && peer_in_anchor->GetOwnerNode() != nullptr, + GELOGI("node name = %s, out control node name = %s.", node->GetName().c_str(), + peer_in_anchor->GetOwnerNode()->GetName().c_str())); + } + } +} +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus ComputeGraph::IsolateNode(const NodePtr &node) { + GE_CHECK_NOTNULL(node); + auto next_nodes = node->GetOutAllNodes(); + // If there is input data side + for (size_t i = 0; i < node->GetAllInDataAnchors().size(); i++) { + auto in_data_anchor = node->GetInDataAnchor(static_cast(i)); + auto pre_out_data_anchor = in_data_anchor->GetPeerOutAnchor(); + if (pre_out_data_anchor != nullptr) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(pre_out_data_anchor, in_data_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "remove edge failed"); + GE_IF_BOOL_EXEC(pre_out_data_anchor->GetOwnerNode()->GetType() == CONSTANT || + pre_out_data_anchor->GetOwnerNode()->GetType() == CONSTANTOP, + continue); + for (const auto &out_data_anchor : node->GetAllOutDataAnchors()) { + for (const auto &next_in_data_anchor : out_data_anchor->GetPeerInDataAnchors()) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(out_data_anchor, next_in_data_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "remove edge failed"); + GE_CHK_BOOL_EXEC(GraphUtils::AddEdge(pre_out_data_anchor, next_in_data_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "add edge failed"); + } + for (const auto &next_in_ctrl_anchor : out_data_anchor->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(out_data_anchor, next_in_ctrl_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "remove edge failed"); + GE_CHK_BOOL_EXEC(GraphUtils::AddEdge(pre_out_data_anchor, next_in_ctrl_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "add edge failed"); + } + } + auto out_ctrl_anchor = node->GetOutControlAnchor(); + GE_CHECK_NOTNULL(out_ctrl_anchor); + auto pre_out_ctrl_anchor = pre_out_data_anchor->GetOwnerNode()->GetOutControlAnchor(); + GE_CHECK_NOTNULL(pre_out_ctrl_anchor); + for (const auto &next_in_ctrl_anchor : out_ctrl_anchor->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(out_ctrl_anchor, next_in_ctrl_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "remove edge failed"); + GE_CHK_BOOL_EXEC(GraphUtils::AddEdge(pre_out_ctrl_anchor, next_in_ctrl_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "add edge failed"); + } + } + } + // If there is an input control side + auto in_ctrl_anchor = node->GetInControlAnchor(); + GE_CHECK_NOTNULL(in_ctrl_anchor); + for (const auto &pre_out_ctrl_anchor : in_ctrl_anchor->GetPeerOutControlAnchors()) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(pre_out_ctrl_anchor, in_ctrl_anchor) == GRAPH_SUCCESS, return GRAPH_FAILED, + "remove edge failed"); + for (const auto &out_data_anchor : node->GetAllOutDataAnchors()) { + for (const auto &next_in_ctrl_anchor : out_data_anchor->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(out_data_anchor, next_in_ctrl_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "remove edge failed"); + GE_CHK_BOOL_EXEC(GraphUtils::AddEdge(pre_out_ctrl_anchor, next_in_ctrl_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "add edge failed"); + } + } + auto out_ctrl_anchor = node->GetOutControlAnchor(); + if (out_ctrl_anchor != nullptr) { + for (const auto &next_in_ctrl_anchor : out_ctrl_anchor->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(out_ctrl_anchor, next_in_ctrl_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "remove edge failed"); + GE_CHK_BOOL_EXEC(GraphUtils::AddEdge(pre_out_ctrl_anchor, next_in_ctrl_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "add edge failed"); + } + } + } + + for (const auto &out_peer_data_anchor : in_ctrl_anchor->GetPeerOutDataAnchors()) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(out_peer_data_anchor, in_ctrl_anchor) == GRAPH_SUCCESS, return GRAPH_FAILED, + "remove edge failed"); + for (const auto &next_node : next_nodes) { + auto next_in_control_anchor = next_node->GetInControlAnchor(); + GE_CHK_BOOL_EXEC(GraphUtils::AddEdge(out_peer_data_anchor, next_in_control_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "add edge failed"); + } + } + + return RemoveExtraOutEdge(node); +} +graphStatus ComputeGraph::RemoveExtraOutEdge(const NodePtr &node) { + GE_CHECK_NOTNULL(node); + // Remove redundant output edges + for (const auto &out_data_anchor : node->GetAllOutDataAnchors()) { + for (const auto &next_in_data_anchor : out_data_anchor->GetPeerInDataAnchors()) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(out_data_anchor, next_in_data_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "remove edge failed"); + } + + for (const auto &next_in_ctrl_anchor : out_data_anchor->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(out_data_anchor, next_in_ctrl_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "remove edge failed"); + } + } + auto out_ctrl_anchor = node->GetOutControlAnchor(); + if (out_ctrl_anchor != nullptr) { + for (const auto &next_in_ctrl_anchor : out_ctrl_anchor->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC(GraphUtils::RemoveEdge(out_ctrl_anchor, next_in_ctrl_anchor) == GRAPH_SUCCESS, + return GRAPH_FAILED, "remove edge failed"); + } + } + return GRAPH_SUCCESS; +} + +graphStatus ComputeGraph::Verify() { + for (const auto &node_ptr : GetAllNodes()) { + GE_CHECK_NOTNULL(node_ptr); + GE_CHECK_NOTNULL(node_ptr->GetOpDesc()); + GE_CHK_BOOL_EXEC(node_ptr->GetOpDesc()->CommonVerify() == GRAPH_SUCCESS, return GRAPH_FAILED, + "Verifying %s failed.", node_ptr->GetName().c_str()); + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus ComputeGraph::InferOriginFormat() { + return ge::FormatRefiner::InferOrigineFormat(shared_from_this()); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus ComputeGraph::InferShapeInNeed() { + GE_CHK_BOOL_ONLY_LOG(TopologicalSorting() == GRAPH_SUCCESS, "Verifying failed."); + for (const auto &node_ptr : GetAllNodes()) { + GE_CHECK_NOTNULL(node_ptr); + auto op_desc = node_ptr->GetOpDesc(); + bool is_need_infer = false; + (void)ge::AttrUtils::GetBool(op_desc, NEED_INFER, is_need_infer); + if (is_need_infer) { + GE_CHK_BOOL_EXEC(node_ptr->Verify() == GRAPH_SUCCESS, return GRAPH_FAILED, "Verifying %s failed.", + node_ptr->GetName().c_str()); + + graphStatus status = node_ptr->InferShapeAndType(); + GE_CHK_BOOL_EXEC_INFO(node_ptr->GetType() == kDataType || GRAPH_PARAM_INVALID != status, break, + "Op %s does not have the IMPLEMT_INFERFUNC definition," + " and subsequent operators no longer perform shape inference.", + node_ptr->GetName().c_str()); + GE_CHK_BOOL_EXEC(status == GRAPH_SUCCESS, return GRAPH_FAILED, "Inferring %s failed.", + node_ptr->GetName().c_str()); + + for (const auto &out_anchor : node_ptr->GetAllOutDataAnchors()) { + GE_CHECK_NOTNULL(out_anchor->GetOwnerNode()->GetOpDesc()); + auto output_tensor = out_anchor->GetOwnerNode()->GetOpDesc()->GetOutputDesc(out_anchor->GetIdx()); + ge::TensorUtils::SetRealDimCnt(output_tensor, output_tensor.GetShape().GetDims().size()); + (void)out_anchor->GetOwnerNode()->GetOpDesc()->UpdateOutputDesc(out_anchor->GetIdx(), output_tensor); + for (const auto &peer_anchor : out_anchor->GetPeerInDataAnchors()) { + (void)peer_anchor->GetOwnerNode()->GetOpDesc()->UpdateInputDesc(peer_anchor->GetIdx(), output_tensor); + } + } + } + } + return GRAPH_SUCCESS; +} + +ProtoAttrMapHelper ComputeGraph::MutableAttrMap() { return attrs_; } + +ConstProtoAttrMapHelper ComputeGraph::GetAttrMap() const { + return ConstProtoAttrMapHelper(attrs_.GetProtoOwner(), attrs_.GetProtoMsg()); +} + +const std::map &ComputeGraph::GetAllNodesInfo() const { return all_nodes_infos_; } + +void ComputeGraph::SetUserDefOutput(const std::string &output_name) { + if (output_name.empty()) { + return; + } + + vector nodes = StringUtils::Split(output_name, ';'); + for (string node : nodes) { + vector item = StringUtils::Split(node, ':'); + if (item.size() != OUTPUT_PARAM_SIZE) { + GELOGW("invalid output param!input:%s", output_name.c_str()); + continue; + } + + int32_t index; + try { + index = stoi(StringUtils::Trim(item[1])); + } catch (const std::out_of_range &) { + GELOGW("outputname cause out of range execption!output_name:%s", output_name.c_str()); + continue; + } catch (const std::invalid_argument &) { + GELOGW("outputname cause invalid argument!output_name:%s", output_name.c_str()); + continue; + } catch (...) { + GELOGW("stoi fail! output_name:%s", output_name.c_str()); + continue; + } + auto iter = out_nodes_map_.find(item[0]); + if (iter == out_nodes_map_.end()) { + out_nodes_map_[item[0]] = std::vector(1, index); + } else { + auto idx_iter = std::find(iter->second.begin(), iter->second.end(), index); + if (idx_iter == iter->second.end()) { + iter->second.push_back(index); + } + } + } +} + +const std::string ComputeGraph::GetOutput() { + static const int resultDefaultSize = 2048; + string result; + result.reserve(resultDefaultSize); + auto iter = out_nodes_map_.begin(); + while (iter != out_nodes_map_.end()) { + auto idxes = iter->second; + for (auto idx : idxes) { + (void)result.append(iter->first).append(":").append(std::to_string(idx)).append(";"); + } + ++iter; + } + + return result.substr(0, result.length() - 1); +} +} // namespace ge diff --git a/src/common/graph/debug/ge_log.h b/src/common/graph/debug/ge_log.h new file mode 100644 index 00000000..a72b5886 --- /dev/null +++ b/src/common/graph/debug/ge_log.h @@ -0,0 +1,181 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COMMON_GRAPH_DEBUG_GE_LOG_H_ +#define COMMON_GRAPH_DEBUG_GE_LOG_H_ + +#include "graph/ge_error_codes.h" +#include "toolchain/slog.h" +#include "framework/common/debug/ge_log.h" + +#define GE_MOD_ID GE + +#ifdef _MSC_VER +#define FUNC_NAME __FUNCTION__ +#else +#define FUNC_NAME __PRETTY_FUNCTION__ +#endif + +#define D_GE_LOGE(fmt, ...) \ + dlog_error(static_cast(GE_MOD_ID), "%s:" fmt, __FUNCTION__, ##__VA_ARGS__) + +#define GE_LOGE(...) D_GE_LOGE(__VA_ARGS__) + +#define GE_LOGI_IF(condition, ...) \ + if ((condition)) { \ + GELOGI(__VA_ARGS__); \ + } + +#define GE_LOGW_IF(condition, ...) \ + if ((condition)) { \ + GELOGW(__VA_ARGS__); \ + } + +#define GE_LOGE_IF(condition, ...) \ + if ((condition)) { \ + GELOGE(ge::GRAPH_FAILED, __VA_ARGS__); \ + } + +#define GE_CHK_STATUS_RET_NOLOG(expr) \ + do { \ + const ge::graphStatus _status = (expr); \ + if (_status != ge::GRAPH_SUCCESS) { \ + return _status; \ + } \ + } while (0) + +#define GE_CHK_BOOL_RET_STATUS(expr, _status, ...) \ + do { \ + bool b = (expr); \ + if (!b) { \ + GELOGE(ge::GRAPH_FAILED, __VA_ARGS__); \ + return _status; \ + } \ + } while (0) + +#define GE_CHK_BOOL_EXEC_NOLOG(expr, exec_expr) \ + { \ + bool b = (expr); \ + if (!b) { \ + exec_expr; \ + } \ + } + +#define GE_IF_BOOL_EXEC(expr, exec_expr) \ + { \ + if (expr) { \ + exec_expr; \ + } \ + } + +#define GE_RETURN_WITH_LOG_IF_ERROR(expr, ...) \ + do { \ + const ge::graphStatus _status = (expr); \ + if (_status) { \ + GELOGE(ge::GRAPH_FAILED, __VA_ARGS__); \ + return _status; \ + } \ + } while (0) + +// If expr is true, the log is printed and a custom statement is executed +#define GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(expr, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (b) { \ + GELOGE(ge::GRAPH_FAILED, __VA_ARGS__); \ + exec_expr; \ + } \ + } + +// Only check error log +#define GE_CHK_BOOL_ONLY_LOG(expr, ...) \ + do { \ + bool b = (expr); \ + if (!b) { \ + GELOGI(__VA_ARGS__); \ + } \ + } while (0) + +// If expr is not true, do not print the log and return the specified status +#define GE_CHK_BOOL_RET_STATUS_NOLOG(expr, _status, ...) \ + do { \ + bool b = (expr); \ + if (!b) { \ + return _status; \ + } \ + } while (0) + +// If expr is not true, the log is printed and a custom statement is executed +#define GE_CHK_BOOL_EXEC(expr, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (!b) { \ + GELOGE(ge::GRAPH_FAILED, __VA_ARGS__); \ + exec_expr; \ + } \ + } + +// If expr is not true, the log is printed and a custom statement is executed +#define GE_CHK_BOOL_EXEC_INFO(expr, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (!b) { \ + GELOGI(__VA_ARGS__); \ + exec_expr; \ + } \ + } + +// If expr is not true, the log is printed and a custom statement is executed +#define GE_CHK_BOOL_EXEC_DEBUG(expr, exec_expr, ...) \ + { \ + bool b = (expr); \ + if (!b) { \ + GELOGD(__VA_ARGS__); \ + exec_expr; \ + } \ + } + +// If expr is not GRAPH_SUCCESS, print the log and return the same value +#define GE_CHK_STATUS_RET(expr, ...) \ + do { \ + const ge::graphStatus _status = (expr); \ + if (_status != ge::GRAPH_SUCCESS) { \ + GELOGE(ge::GRAPH_FAILED, __VA_ARGS__); \ + return _status; \ + } \ + } while (0) + +#define GE_MAKE_SHARED(exec_expr0, exec_expr1) \ + try { \ + exec_expr0; \ + } catch (...) { \ + GELOGE(ge::GRAPH_FAILED, "Make shared failed"); \ + exec_expr1; \ + } + +/// CCE related macro definition +/// If expr is not CC_STATUS_GRAPH_SUCCESS, print the log and return +#define GE_CHK_CCE_RET(expr) \ + do { \ + ccgraphStatus_t _cc_ret = (expr); \ + if (_cc_ret != CC_STATUS_GRAPH_SUCCESS) { \ + GELOGE(ge::GRAPH_FAILED, "Call cce api failed, ret: 0x%X", _cc_ret); \ + return ge::GRAPH_FAILED; \ + } \ + } while (0) + +#endif // COMMON_GRAPH_DEBUG_GE_LOG_H_ + diff --git a/src/common/graph/debug/ge_op_types.h b/src/common/graph/debug/ge_op_types.h new file mode 100644 index 00000000..3905ed0e --- /dev/null +++ b/src/common/graph/debug/ge_op_types.h @@ -0,0 +1,253 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COMMON_GRAPH_DEBUG_GE_OP_TYPES_H_ +#define COMMON_GRAPH_DEBUG_GE_OP_TYPES_H_ + +#include +#include +#include +#include +#include +#include +#include + +namespace ge { +#define GE_REGISTER_OPTYPE(var_name, str_name) static const char *var_name __attribute__((unused)) = str_name + +GE_REGISTER_OPTYPE(DATA, "Data"); +GE_REGISTER_OPTYPE(AIPPDATA, "AippData"); +GE_REGISTER_OPTYPE(CONVOLUTION, "Convolution"); +GE_REGISTER_OPTYPE(CORRELATION, "Correlation"); +GE_REGISTER_OPTYPE(CORRELATIONV2, "Correlation_V2"); +GE_REGISTER_OPTYPE(DECONVOLUTION, "Deconvolution"); +GE_REGISTER_OPTYPE(POOLING, "Pooling"); +GE_REGISTER_OPTYPE(ELTWISE, "Eltwise"); +GE_REGISTER_OPTYPE(RELU, "ReLU"); +GE_REGISTER_OPTYPE(RELU6, "ReLU6"); +GE_REGISTER_OPTYPE(SIGMOID, "Sigmoid"); +GE_REGISTER_OPTYPE(ABSVAL, "AbsVal"); +GE_REGISTER_OPTYPE(TANH, "TanH"); +GE_REGISTER_OPTYPE(PRELU, "PReLU"); +GE_REGISTER_OPTYPE(BATCHNORM, "BatchNorm"); +GE_REGISTER_OPTYPE(FUSIONBATCHNORM, "FusionBatchNorm"); +GE_REGISTER_OPTYPE(SCALE, "Scale"); +GE_REGISTER_OPTYPE(FULL_CONNECTION, "FullConnection"); +GE_REGISTER_OPTYPE(SOFTMAX, "Softmax"); +GE_REGISTER_OPTYPE(PLUS, "Plus"); +GE_REGISTER_OPTYPE(ACTIVATION, "Activation"); +GE_REGISTER_OPTYPE(FLATTEN, "Flatten"); +GE_REGISTER_OPTYPE(ADD, "Add"); +GE_REGISTER_OPTYPE(SUB, "Sub"); +GE_REGISTER_OPTYPE(MUL, "Mul"); +GE_REGISTER_OPTYPE(MATMUL, "MatMul"); +GE_REGISTER_OPTYPE(RSQRT, "Rsqrt"); +GE_REGISTER_OPTYPE(BIASADD, "BiasAdd"); +GE_REGISTER_OPTYPE(RESHAPE, "Reshape"); +GE_REGISTER_OPTYPE(DEPCONVOLUTION, "ConvolutionDepthwise"); +GE_REGISTER_OPTYPE(DROPOUT, "Dropout"); +GE_REGISTER_OPTYPE(CONCAT, "Concat"); +GE_REGISTER_OPTYPE(ROIPOOLING, "ROIPooling"); +GE_REGISTER_OPTYPE(PROPOSAL, "Proposal"); +GE_REGISTER_OPTYPE(FSRDETECTIONOUTPUT, "FSRDetectionOutput"); +GE_REGISTER_OPTYPE(DETECTIONPOSTPROCESS, "Detectpostprocess"); +GE_REGISTER_OPTYPE(LRN, "LRN"); +GE_REGISTER_OPTYPE(TRANSDATA, "TransData"); +GE_REGISTER_OPTYPE(PERMUTE, "Permute"); +GE_REGISTER_OPTYPE(SSDNORMALIZE, "SSDNormalize"); +GE_REGISTER_OPTYPE(SSDPRIORBOX, "SSDPriorBox"); +GE_REGISTER_OPTYPE(NETOUTPUT, "NetOutput"); +GE_REGISTER_OPTYPE(SSDDETECTIONOUTPUT, "SSDDetectionOutput"); +GE_REGISTER_OPTYPE(CHANNELAXPY, "ChannelAxpy"); +GE_REGISTER_OPTYPE(PSROIPOOLING, "PSROIPooling"); +GE_REGISTER_OPTYPE(POWER, "Power"); +GE_REGISTER_OPTYPE(ROIALIGN, "ROIAlign"); +GE_REGISTER_OPTYPE(PYTHON, "Python"); +GE_REGISTER_OPTYPE(FREESPACEEXTRACT, "FreespaceExtract"); +GE_REGISTER_OPTYPE(SPATIALTF, "SpatialTransform"); +GE_REGISTER_OPTYPE(SHAPE, "Shape"); +GE_REGISTER_OPTYPE(ARGMAX, "ArgMax"); +GE_REGISTER_OPTYPE(GATHERND, "GatherNd"); +GE_REGISTER_OPTYPE(GATHER, "Gather"); +GE_REGISTER_OPTYPE(REALDIV, "RealDiv"); +GE_REGISTER_OPTYPE(PACK, "Pack"); +GE_REGISTER_OPTYPE(SLICE, "Slice"); +GE_REGISTER_OPTYPE(FLOORDIV, "FloorDiv"); +GE_REGISTER_OPTYPE(SQUEEZE, "Squeeze"); +GE_REGISTER_OPTYPE(STRIDEDSLICE, "StridedSlice"); +GE_REGISTER_OPTYPE(RANGE, "Range"); +GE_REGISTER_OPTYPE(RPNPROPOSALS, "GenerateRpnProposals"); +GE_REGISTER_OPTYPE(DECODEBBOX, "DecodeBBox"); +GE_REGISTER_OPTYPE(PAD, "Pad"); +GE_REGISTER_OPTYPE(TILE, "Tile"); +GE_REGISTER_OPTYPE(SIZE, "Size"); +GE_REGISTER_OPTYPE(CLIPBOXES, "Clipboxes"); +GE_REGISTER_OPTYPE(FASTRCNNPREDICTIONS, "FastrcnnPredictions"); +GE_REGISTER_OPTYPE(SPLIT, "Split"); +GE_REGISTER_OPTYPE(EXPANDDIMS, "ExpandDims"); +GE_REGISTER_OPTYPE(MEAN, "Mean"); +GE_REGISTER_OPTYPE(GREATER, "Greater"); +GE_REGISTER_OPTYPE(SWITCH, "Switch"); +GE_REGISTER_OPTYPE(REFSWITCH, "RefSwitch"); +GE_REGISTER_OPTYPE(MERGE, "Merge"); +GE_REGISTER_OPTYPE(REFMERGE, "RefMerge"); +GE_REGISTER_OPTYPE(ENTER, "Enter"); +GE_REGISTER_OPTYPE(REFENTER, "RefEnter"); +GE_REGISTER_OPTYPE(LOOPCOND, "LoopCond"); +GE_REGISTER_OPTYPE(NEXTITERATION, "NextIteration"); +GE_REGISTER_OPTYPE(REFNEXTITERATION, "RefNextIteration"); +GE_REGISTER_OPTYPE(EXIT, "Exit"); +GE_REGISTER_OPTYPE(REFEXIT, "RefExit"); +GE_REGISTER_OPTYPE(CONTROLTRIGGER, "ControlTrigger"); +GE_REGISTER_OPTYPE(TRANSPOSE, "Transpose"); +GE_REGISTER_OPTYPE(CAST, "Cast"); +GE_REGISTER_OPTYPE(REGION, "Region"); +GE_REGISTER_OPTYPE(YOLO, "Yolo"); +GE_REGISTER_OPTYPE(YOLODETECTIONOUTPUT, "YoloDetectionOutput"); +GE_REGISTER_OPTYPE(FILL, "Fill"); +GE_REGISTER_OPTYPE(REVERSE, "Reverse"); +GE_REGISTER_OPTYPE(UNPACK, "Unpack"); +GE_REGISTER_OPTYPE(YOLO2REORG, "Yolo2Reorg"); +GE_REGISTER_OPTYPE(REDUCESUM, "ReduceSum"); +GE_REGISTER_OPTYPE(CONSTANT, "Const"); +GE_REGISTER_OPTYPE(RESIZEBILINEAR, "ResizeBilinear"); +GE_REGISTER_OPTYPE(MAXIMUM, "Maximum"); +GE_REGISTER_OPTYPE(FRAMEWORKOP, "FrameworkOp"); +GE_REGISTER_OPTYPE(ARG, "_Arg"); +GE_REGISTER_OPTYPE(FUSEDBATCHNORMGRAD, "FusedBatchNormGrad"); +GE_REGISTER_OPTYPE(LSTM, "LSTM"); +GE_REGISTER_OPTYPE(HIGHWAY, "HighWay"); +GE_REGISTER_OPTYPE(RNN, "RNN"); +GE_REGISTER_OPTYPE(ATTENTIONDECODER, "AttentionDecoder"); +GE_REGISTER_OPTYPE(LOGICAL_NOT, "LogicalNot"); +GE_REGISTER_OPTYPE(LOGICAL_AND, "LogicalAnd"); +GE_REGISTER_OPTYPE(EQUAL, "Equal"); +GE_REGISTER_OPTYPE(INTERP, "Interp"); +GE_REGISTER_OPTYPE(SHUFFLECHANNEL, "ShuffleChannel"); +GE_REGISTER_OPTYPE(AIPP, "Aipp"); + +GE_REGISTER_OPTYPE(CROPANDRESIZE, "CropAndResize"); +GE_REGISTER_OPTYPE(UNUSEDCONST, "UnusedConst"); +GE_REGISTER_OPTYPE(BROADCASTGRADIENTARGS, "BroadcastGradientArgs"); +GE_REGISTER_OPTYPE(BROADCASTARGS, "BroadcastArgs"); +GE_REGISTER_OPTYPE(STOPGRADIENT, "StopGradient"); +GE_REGISTER_OPTYPE(PPREVENTGRADIENT, "PreventGradient"); +GE_REGISTER_OPTYPE(GUARANTEECONST, "GuaranteeConst"); +GE_REGISTER_OPTYPE(SPARSETODENSE, "SparseToDense"); +GE_REGISTER_OPTYPE(NONMAXSUPPRESSION, "NonMaxSuppression"); +GE_REGISTER_OPTYPE(TOPKV2, "TopKV2"); +GE_REGISTER_OPTYPE(INVERTPERMUTATION, "InvertPermutation"); +GE_REGISTER_OPTYPE(MULTINOMIAL, "Multinomial"); +GE_REGISTER_OPTYPE(REVERSESEQUENCE, "ReverseSequence"); +GE_REGISTER_OPTYPE(GETNEXT, "GetNext"); +GE_REGISTER_OPTYPE(INITDATA, "InitData"); + +// ANN specific operator +GE_REGISTER_OPTYPE(ANN_MEAN, "AnnMean"); +GE_REGISTER_OPTYPE(ANN_CONVOLUTION, "AnnConvolution"); +GE_REGISTER_OPTYPE(ANN_DEPCONVOLUTION, "AnnDepthConv"); +GE_REGISTER_OPTYPE(DIV, "Div"); +GE_REGISTER_OPTYPE(ANN_FULLCONNECTION, "AnnFullConnection"); +GE_REGISTER_OPTYPE(ANN_NETOUTPUT, "AnnNetOutput"); +GE_REGISTER_OPTYPE(ANN_DATA, "AnnData"); + +// Training operator +GE_REGISTER_OPTYPE(CONVGRADFILTER, "Conv2DBackpropFilter"); +GE_REGISTER_OPTYPE(CONV2D, "Conv2D"); +GE_REGISTER_OPTYPE(CONV2DBACKPROPINPUT, "Conv2DBackpropInput"); +GE_REGISTER_OPTYPE(ACTIVATIONGRAD, "ReluGrad"); +GE_REGISTER_OPTYPE(CONSTANTOP, "Constant"); +GE_REGISTER_OPTYPE(AVGPOOLGRAD, "AvgPoolGrad"); +GE_REGISTER_OPTYPE(SQUARE, "Square"); +GE_REGISTER_OPTYPE(PLACEHOLDER, "PlaceHolder"); +GE_REGISTER_OPTYPE(END, "End"); +GE_REGISTER_OPTYPE(VARIABLE, "Variable"); + +/// @ingroup domi_omg +/// @brief INPUT node type +static const char* const kInputType = "Input"; + +/// +/// @ingroup domi_omg +/// @brief AIPP tag, tag for aipp conv operator +/// +static const char* const kAippConvFlag = "Aipp_Conv_Flag"; + +/// +/// @ingroup domi_omg +/// @brief AIPP tag, tag for aipp data operator +/// +static const char* const kAippDataFlag = "Aipp_Data_Flag"; + +/// +/// @ingroup domi_omg +/// @brief AIPP tag, tag for aipp data operator +/// +static const char* const kAippDataType = "AippData"; + +/// +/// @ingroup domi_omg +/// @brief DATA node type +/// +static const char* const kDataType = "Data"; + +/// +/// @ingroup domi_omg +/// @brief Frame operator type +/// +static const char* const kFrameworkOpType = "FrameworkOp"; + +/// +/// @ingroup domi_omg +/// @brief Data node type +/// +static const char* const kAnnDataType = "AnnData"; +static const char* const kAnnNetoutputType = "AnnNetOutput"; +/// +/// @ingroup domi_omg +/// @brief Convolution node type +/// +static const char* const kNodeNameNetOutput = "Node_Output"; + +/// +/// @ingroup domi_omg +/// @brief RECV node type +/// +static const char* const kRecvType = "Recv"; + +/// +/// @ingroup domi_omg +/// @brief SEND node type +/// +static const char* const kSendType = "Send"; + +/// +/// @ingroup domi_omg +/// @brief Convolution node type +/// +static const char* const kOpTypeConvolution = "Convolution"; +/// +/// @ingroup domi_omg +/// @brief Add convolution node name to hard AIPP +/// +static const char* const kAippConvOpNmae = "aipp_conv_op"; +/// +/// @ingroup domi_omg +/// @brief Operator configuration item separator +/// +static const char* const kOpConfDelimiter = ":"; +}; // namespace ge +#endif // COMMON_GRAPH_DEBUG_GE_OP_TYPES_H_ diff --git a/src/common/graph/debug/ge_util.h b/src/common/graph/debug/ge_util.h new file mode 100644 index 00000000..8a64014e --- /dev/null +++ b/src/common/graph/debug/ge_util.h @@ -0,0 +1,274 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COMMON_GRAPH_DEBUG_GE_UTIL_H_ +#define COMMON_GRAPH_DEBUG_GE_UTIL_H_ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_log.h" +#include "graph/ge_error_codes.h" + +#if !defined(__ANDROID__) && !defined(ANDROID) +#define GE_DYNAMIC_CAST dynamic_cast +#define GE_DYNAMIC_POINTER_CAST std::dynamic_pointer_cast +#else +#define GE_DYNAMIC_CAST static_cast +#define GE_DYNAMIC_POINTER_CAST std::static_pointer_cast +#endif + +#define GE_RETURN_IF_ERROR(expr) \ + do { \ + const ::ge::optStatus _status = (expr); \ + if (_status) return _status; \ + } while (0) + +#define GE_RETURN_WITH_LOG_IF_INFO(expr, ...) \ + do { \ + const ::ge::optStatus _status = (expr); \ + if (_status) { \ + GELOGI(__VA_ARGS__); \ + return _status; \ + } \ + } while (0) + +// Verify whether the parameter is true. If yes, return graph failed and record the error log +#define GE_RETURN_WITH_LOG_IF_TRUE(condition, ...) \ + do { \ + if (condition) { \ + GELOGE(ge::GRAPH_FAILED, __VA_ARGS__); \ + return ge::GRAPH_FAILED; \ + } \ + } while (0) + +// Verify whether the parameter is false. If yes, return graph failed and record the error log +#define GE_RETURN_WITH_LOG_IF_FALSE(condition, ...) \ + do { \ + bool _condition = (condition); \ + if (!_condition) { \ + GELOGE(ge::GRAPH_FAILED, __VA_ARGS__); \ + return ge::GRAPH_FAILED; \ + } \ + } while (0) + +// Verify whether the parameter is true. If yes, return GRAPH_PARAM_INVALID and record the error log +#define GE_RT_PARAM_INVALID_WITH_LOG_IF_TRUE(condition, ...) \ + do { \ + if (condition) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, __VA_ARGS__); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +// Verify whether the parameter is false. If yes, return GRAPH_PARAM_INVALID and record the error log +#define GE_RT_PARAM_INVALID_WITH_LOG_IF_FALSE(condition, ...) \ + do { \ + bool _condition = (condition); \ + if (!_condition) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, __VA_ARGS__); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +// Verify whether the parameter is null. If yes, return GRAPH_PARAM_INVALID and record the error log +#define GE_CHECK_NOTNULL(val) \ + do { \ + if (val == nullptr) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "param[%s] must not be null.", #val); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +// Verify whether the parameter is null. If yes, return GRAPH_PARAM_INVALID and record the error log +#define GE_CHECK_NOTNULL_EXEC(val, expr) \ + do { \ + if (val == nullptr) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "param[%s] must not be null.", #val); \ + expr; \ + } \ + } while (0) + +// Verify whether the parameter is null. If yes, return false and record the error log +#define GE_RT_FALSE_CHECK_NOTNULL(val) \ + do { \ + if (val == nullptr) { \ + GELOGE(ge::GRAPH_FAILED, "param[%s] must not be null.", #val); \ + return false; \ + } \ + } while (0) + +// Check whether the parameter is out of range +#define GE_CHECK_SIZE(size) \ + do { \ + if (size == 0) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "param[%s] is out of range", #size); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +/// +/// @ingroup GE_common +/// +#define GE_DEFINE_BYTE_SIZE(_var_name, _expr, _sizeof) \ + uint32_t _var_name; \ + do { \ + uint32_t _expr_size = (_expr); \ + uint32_t _sizeof_size = (_sizeof); \ + if (_expr_size > (0xffffffff) / _sizeof_size) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "byte size : %s is out of range", #_var_name); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + _var_name = _sizeof_size * _expr_size; \ + } while (0); + +// Check whether the container is empty +#define GE_CHECK_VECTOR_NOT_EMPTY(vector) \ + do { \ + if (vector.empty()) { \ + GELOGE(ge::GRAPH_FAILED, "param[#vector] is empty", #vector); \ + return ge::GRAPH_FAILED; \ + } \ + } while (0) + +// Check whether the container is empty and return the specified status code +#define GE_CHECK_VECTOR_NOT_EMPTY_RET_STATUS(vector, _status) \ + do { \ + if (vector.empty()) { \ + GELOGE(_status, "param[%s] is empty", #vector); \ + return _status; \ + } \ + } while (0) + +/// +/// @ingroup GE_common +/// @brief This macro provides the ability to disable copying constructors and assignment operators. +/// It is usually placed under private +/// +#define GE_DISALLOW_COPY_AND_ASSIGN(TypeName) \ + TypeName(const TypeName &) = delete; \ + void operator=(const TypeName &) = delete + +/// Check whether the size is 0 or out of range +/// @param:size:Size to be verified +#define GE_CHECK_SIZE_RANGE(size) \ + do { \ + if (size == 0 || size >= UINT_MAX / 4) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "param[%s] is out of range", #size); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +#define GE_CHECK_SHORT_SIZE_RANGE(size) \ + do { \ + if (size == 0 || size >= UINT_MAX / 2) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "param[%s] is out of range", #size); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +#define GE_CHECK_POSITIVE_SIZE_RANGE(size) \ + do { \ + if (size <= 0) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "param[%s] is not a positive number", #size); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +#define GE_CHECK_POSITIVE_SHORT_SIZE_RANGE(size) \ + do { \ + if (size <= 0 || size == 0 || size >= UINT_MAX / 4) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "param[%s] is out of range", #size); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +// Verify that the value on the left is greater than or equal to the value on the right +#define GE_CHECK_GE(lhs, rhs) \ + do { \ + if (lhs < rhs) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "param[%s] is less than[%s]", #lhs, #rhs); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +// Check whether the parameters are equal +#define GE_CHECK_EQ(val1, val2) \ + do { \ + if (val1 != val2) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "param[%s] is not equals to[%s]", #val1, #val2); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +// Verify that the value on the left is less than or equal to the value on the right +#define GE_CHECK_LE(lhs, rhs) \ + do { \ + if (lhs > rhs) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, "param[%s] is greater than[%s]", #lhs, #rhs); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +// Check whether the parameters are equal +#define GE_CHECK_EQ_WITH_LOG(val1, val2, ...) \ + do { \ + if (val1 != val2) { \ + GELOGE(ge::GRAPH_PARAM_INVALID, __VA_ARGS__); \ + return ge::GRAPH_PARAM_INVALID; \ + } \ + } while (0) + +// If expr is false, the custom statement is executed +#define CHECK_FALSE_EXEC(expr, exec_expr, ...) \ + do { \ + bool b = (expr); \ + if (!b) { \ + exec_expr; \ + } \ + } while (0) + +#define GE_DELETE_NEW_SINGLE(var) \ + do { \ + if (var != nullptr) { \ + delete var; \ + var = nullptr; \ + } \ + } while (0) + +#define GE_DELETE_NEW_ARRAY(var) \ + do { \ + if (var != nullptr) { \ + delete[] var; \ + var = nullptr; \ + } \ + } while (0) + +template +static inline std::shared_ptr ComGraphMakeShared(Args &&... args) { + using T_nc = typename std::remove_const::type; + std::shared_ptr ret(new (std::nothrow) T_nc(std::forward(args)...)); + return ret; +} + +#endif // COMMON_GRAPH_DEBUG_GE_UTIL_H_ diff --git a/src/common/graph/debug/graph_debug.cc b/src/common/graph/debug/graph_debug.cc new file mode 100644 index 00000000..6cc5e0ca --- /dev/null +++ b/src/common/graph/debug/graph_debug.cc @@ -0,0 +1,248 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/debug/graph_debug.h" + +#include +#include +#include +#include "debug/ge_util.h" + +#include "framework/common/debug/ge_log.h" + +#define TAB " " +#define STR_FMT(str) (" \"" + std::string(str) + "\" ") +#define INPUT_ANCHOR_PORT(name) ("__input__" + (name)) +#define OUTPUT_ANCHOR_PORT(name) ("__output__" + (name)) + +namespace ge { +std::unordered_set control_anchor; +std::vector types = { + "DT_FLOAT", "DT_FLOAT16", "DT_INT8", "DT_INT32", "DT_UINT8", "", + "DT_INT16", "DT_UINT16", "DT_UINT32", "DT_INT64", "DT_UINT64", "DT_DOUBLE", + "DT_BOOL", "DT_DUAL", "DT_DUAL_SUB_INT8", "DT_DUAL_SUB_UINT8", "DT_UNDEFINED"}; + +std::vector formats = {"FORMAT_NCHW", + "FORMAT_NHWC", + "FORMAT_ND", + "FORMAT_NC1HWC0", + "FORMAT_FRACTAL_Z", + "FORMAT_NC1C0HWPAD", + "FORMAT_NHWC1C0", + "FORMAT_FSR_NCHW", + "FORMAT_FRACTAL_DECONV", + "FORMAT_C1HWNC0", + "FORMAT_FRACTAL_DECONV_TRANSPOSE", + "FORMAT_FRACTAL_DECONV_SP_STRIDE_TRANS", + "FORMAT_NC1HWC0_C04", + "FORMAT_FRACTAL_Z_C04", + "FORMAT_CHWN", + "FORMAT_FRACTAL_DECONV_SP_STRIDE8_TRANS", + "FORMAT_HWCN", + "FORMAT_NC1KHKWHWC0", + "FORMAT_BN_WEIGHT", + "FORMAT_FILTER_HWCK", + "FORMAT_HASHTABLE_LOOKUP_LOOKUPS", + "FORMAT_HASHTABLE_LOOKUP_KEYS", + "FORMAT_HASHTABLE_LOOKUP_VALUE", + "FORMAT_HASHTABLE_LOOKUP_OUTPUT", + "FORMAT_HASHTABLE_LOOKUP_HITS", + "FORMAT_RESERVED"}; + +std::vector data_nodes = {"Const", "Data"}; + +void GraphDebugPrinter::DumpNodeToDot(const NodePtr node, std::ostringstream &out_) { + if (node == nullptr) { + GELOGI("Some nodes are null."); + return; + } + + bool in_control = false; + auto name = node->GetName(); + out_ << TAB << STR_FMT(name); + auto input_cnt = std::max(static_cast(1), node->GetAllInDataAnchors().size()); + auto output_cnt = std::max(static_cast(1), node->GetAllOutDataAnchors().size()); + if (control_anchor.find(node->GetName()) != control_anchor.end()) { + input_cnt++; + in_control = true; + } + auto max_col = input_cnt * output_cnt; + out_ << "[\n"; + if (find(data_nodes.begin(), data_nodes.end(), node->GetType()) != data_nodes.end()) { + out_ << TAB << TAB << "shape=plaintext, color=goldenrod\n"; + } else { + out_ << TAB << TAB << "shape=plaintext, color=deepskyblue\n"; + } + out_ << TAB << TAB << "label=<\n"; + out_ << TAB << TAB << R"(" << std::endl; + + auto input_anchors = node->GetAllInDataAnchors(); + auto op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL_EXEC(op_desc, return); + if (!input_anchors.empty()) { + out_ << TAB << TAB << ""; + } + for (const auto &anchor : input_anchors) { + string anchor_text = op_desc->GetInputNameByIndex(anchor->GetIdx()); + + out_ << ""; + } + if (in_control) { + string anchor_text = "ctrl"; + out_ << ""; + } + if (!input_anchors.empty()) { + out_ << "\n"; + } + // Node type + out_ << TAB << TAB << "\n"; + // Output + auto output_anchors = node->GetAllOutDataAnchors(); + if (!output_anchors.empty()) { + out_ << TAB << TAB << ""; + } + for (const auto &anchor : output_anchors) { + string anchor_text = op_desc->GetOutputNameByIndex(anchor->GetIdx()); + + out_ << ""; + } + + if (!output_anchors.empty()) { + out_ << "\n"; + } + out_ << TAB << TAB << "
" + << anchor_text << "" + << anchor_text << "
" + << "" << node->GetType() << "
" + << anchor_text << "
\n" << TAB << ">];\n"; +} + +void GraphDebugPrinter::DumpEdgeToDot(const NodePtr node, std::ostringstream &out_, uint32_t flag) { + if (node == nullptr) { + GELOGI("Some nodes are null."); + return; + } + auto all_out_anchor = node->GetAllOutDataAnchors(); + auto op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL_EXEC(op_desc, return); + for (const auto &anchor : all_out_anchor) { + auto src_anchor = anchor; + auto src_node_name = node->GetName(); + auto src_anchor_index = op_desc->GetOutputNameByIndex(static_cast(src_anchor->GetIdx())); + auto des_anchors = anchor->GetPeerAnchors(); + for (const auto &peer_in_anchor : des_anchors) { + auto in_data_anchor = Anchor::DynamicAnchorCast(peer_in_anchor); + std::string dst_node_name; + out_ << TAB << STR_FMT(src_node_name); + out_ << ":" << OUTPUT_ANCHOR_PORT(src_anchor_index); + auto op = peer_in_anchor->GetOwnerNode()->GetOpDesc(); + GE_CHECK_NOTNULL_EXEC(op, continue); + if (in_data_anchor != nullptr) { + dst_node_name = in_data_anchor->GetOwnerNode()->GetName(); + string des_anchor_index = op->GetInputNameByIndex(static_cast(in_data_anchor->GetIdx())); + out_ << " -> " << STR_FMT(dst_node_name); + out_ << ":" << INPUT_ANCHOR_PORT(des_anchor_index); + out_ << "["; + } + auto in_control_anchor = Anchor::DynamicAnchorCast(peer_in_anchor); + if (in_control_anchor != nullptr) { + dst_node_name = in_control_anchor->GetOwnerNode()->GetName(); + string des_anchor_index = "ctrl"; + out_ << " -> " << STR_FMT(dst_node_name); + out_ << ":" << INPUT_ANCHOR_PORT(des_anchor_index); + out_ << "["; + out_ << " style=dashed "; + } + if (flag != DOT_NOT_SHOW_EDGE_LABEL && in_data_anchor) { + string label; + auto src_ops = src_anchor->GetOwnerNode()->GetOpDesc(); + GE_CHECK_NOTNULL_EXEC(src_ops, return); + auto src_shape = src_ops->GetOutputDesc(src_anchor->GetIdx()).GetShape(); + auto dim = src_shape.GetDims(); + std::ostringstream tensor_info; + if (dim.size() > 0) { + for (unsigned int i = 0; i < dim.size(); i++) { + if (i != dim.size() - 1) { + tensor_info << dim[i] << "x"; + } else { + tensor_info << dim[i]; + } + } + } else { + tensor_info << "?"; + } + auto src_tensor_desc = src_ops->GetOutputDescPtr(src_anchor->GetIdx()); + GE_CHECK_NOTNULL_EXEC(src_tensor_desc, return); + auto format = src_tensor_desc->GetFormat(); + auto datatype = src_tensor_desc->GetDataType(); + tensor_info << " : " << formats[format] << " : " << types[datatype]; + label = tensor_info.str(); + out_ << "label=" << STR_FMT(label); + } + out_ << "]" << std::endl; + } + } +} + +graphStatus GraphDebugPrinter::DumpGraphDotFile(const Graph &graph, const std::string &output_dot_file_name, + uint32_t flag) { + auto compute_graph = GraphUtils::GetComputeGraph(graph); + if (compute_graph == nullptr) { + GELOGI("Compute graph is NULL ."); + return GRAPH_SUCCESS; + } + return DumpGraphDotFile(compute_graph, output_dot_file_name, flag); +} + +graphStatus GraphDebugPrinter::DumpGraphDotFile(const ComputeGraphPtr graph, const std::string &output_dot_file_name, + uint32_t flag) { + if (graph == nullptr) { + GELOGI("graph is null."); + return GRAPH_SUCCESS; + } + std::ostringstream out_; + out_ << "digraph G{\n"; + out_ << TAB << R"(ratio=compress;size="8, 100")" << std::endl; + out_ << TAB << R"(node[fontname="Consolas"])" << std::endl; + out_ << TAB << R"(edge[fontsize = "8" fontname = "Consolas" color="dimgray" ])" << std::endl; + auto all_nodes = graph->GetAllNodes(); + for (const auto &node : all_nodes) { + for (const auto &temp : node->GetAllOutDataAnchors()) { + for (const auto &peer : temp->GetPeerAnchors()) { + auto temp_control_anchor = Anchor::DynamicAnchorCast(peer); + if (temp_control_anchor) { + (void)control_anchor.insert(peer->GetOwnerNode()->GetName()); + } + } + } + } + for (const auto &node : all_nodes) { + DumpNodeToDot(node, out_); + } + for (const auto &node : all_nodes) { + DumpEdgeToDot(node, out_, flag); + } + out_ << "}"; + std::ofstream output_file(output_dot_file_name); + if (output_file.is_open()) { + output_file << out_.str(); + } else { + GELOGW("%s open error.", output_dot_file_name.c_str()); + } + return GRAPH_SUCCESS; +} +} // namespace ge diff --git a/src/common/graph/debug/graph_debug.h b/src/common/graph/debug/graph_debug.h new file mode 100644 index 00000000..90548869 --- /dev/null +++ b/src/common/graph/debug/graph_debug.h @@ -0,0 +1,50 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COMMON_GRAPH_DEBUG_GRAPH_DEBUG_H_ +#define COMMON_GRAPH_DEBUG_GRAPH_DEBUG_H_ + +#include +#include +#include +#include +#include + +#include "external/graph/graph.h" +#include "./ge_error_codes.h" +#include "graph/compute_graph.h" +#include "graph/debug/ge_log.h" +#include "graph/node.h" +#include "utils/graph_utils.h" + +namespace ge { +enum DotFileFlag { + // Show nodes, edges, size, type and format + DOT_FLAG_DEFAULT = 0, + DOT_NOT_SHOW_EDGE_LABEL = 1, +}; +class GraphDebugPrinter { + public: + static graphStatus DumpGraphDotFile(const Graph &graph, const std::string &output_dot_file_name, + uint32_t flag = DOT_FLAG_DEFAULT); + static graphStatus DumpGraphDotFile(const ComputeGraphPtr graph, const std::string &output_dot_file_name, + uint32_t flag = DOT_FLAG_DEFAULT); + static void DumpNodeToDot(const NodePtr node, std::ostringstream &out_); + static void DumpEdgeToDot(const NodePtr node, std::ostringstream &out_, uint32_t flag = DOT_FLAG_DEFAULT); +}; +} // namespace ge + +#endif // COMMON_GRAPH_DEBUG_GRAPH_DEBUG_H_ diff --git a/src/common/graph/detail/attributes_holder.cc b/src/common/graph/detail/attributes_holder.cc new file mode 100644 index 00000000..e75d5d1a --- /dev/null +++ b/src/common/graph/detail/attributes_holder.cc @@ -0,0 +1,243 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "detail/attributes_holder.h" + +#include + +#include "debug/ge_log.h" +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/ge_attr_value.h" +#include "proto/ge_ir.pb.h" + +namespace ge { +using std::map; +using std::unordered_set; +void AttrHolder::CopyAttrsFrom(const AttrHolder &holder) { MutableAttrMap().CopyValueFrom(holder.GetAttrMap()); } +graphStatus AttrHolder::SetAttr(const std::string &name, const GeAttrValue &value) { + if (value.IsEmpty()) { + GELOGE(GRAPH_FAILED, "value is empty, key %s", name.c_str()); + return GRAPH_FAILED; + } + auto proto_map = MutableAttrMap().GetProtoMsg(); + auto proto_val = value.value_.GetProtoMsg(); + if (proto_map == nullptr || proto_val == nullptr) { + return GRAPH_FAILED; + } + auto it = proto_map->find(name); + if (it != proto_map->end()) { + if (it->second.value_case() != proto::AttrDef::VALUE_NOT_SET && + it->second.value_case() != proto_val->value_case()) { + return GRAPH_FAILED; + } + } + (*proto_map)[name] = *proto_val; + return GRAPH_SUCCESS; +} + +graphStatus AttrHolder::AddRequiredAttr(const std::string &name) { + if (HasAttr(name)) { + return GRAPH_FAILED; + } + requiredAttrs_.push_back(name); + return GRAPH_SUCCESS; +} + +graphStatus AttrHolder::GetAttr(const std::string &name, GeAttrValue &value) const { + auto proto_map = GetAttrMap().GetProtoMsg(); + auto proto_val = value.value_.GetProtoMsg(); + if (proto_map == nullptr || proto_val == nullptr) { + return GRAPH_FAILED; + } + auto it = proto_map->find(name); + if (it != proto_map->end()) { + *proto_val = it->second; + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} + +bool AttrHolder::HasAttr(const std::string &name) const { + auto proto_map = GetAttrMap().GetProtoMsg(); + if (proto_map != nullptr) { + if (proto_map->find(name) != proto_map->end()) { + return true; + } + } + return std::find(requiredAttrs_.begin(), requiredAttrs_.end(), name) != requiredAttrs_.end(); +} + +graphStatus AttrHolder::DelAttr(const std::string &name) { + auto proto_map = MutableAttrMap().GetProtoMsg(); + if (proto_map == nullptr) { + return GRAPH_FAILED; + } + auto it = proto_map->find(name); + if (it != proto_map->end()) { + (void)proto_map->erase(it); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} + +const std::map AttrHolder::GetAllAttrs() const { + std::map attr_value_map; + auto proto_map = GetAttrMap().GetProtoMsg(); + if (proto_map != nullptr) { + auto proto_owner = GetAttrMap().GetProtoOwner(); + GE_CHK_BOOL_EXEC(proto_owner != nullptr, return attr_value_map, "proto_owner is nullptr"); + for (const auto &it : *proto_map) { + attr_value_map[it.first] = GeAttrValue(proto_owner, const_cast(&it.second)); + } + } + return attr_value_map; +} + +const std::unordered_set AttrHolder::GetAllAttrNames() const { + std::unordered_set names; + auto proto_map = GetAttrMap().GetProtoMsg(); + if (proto_map != nullptr) { + for (const auto &it : *proto_map) { + (void)names.insert(it.first); + } + } + for (const string &it : requiredAttrs_) { + (void)names.insert(it); + } + return names; +} + +template <> +void GeIrProtoHelper::InitDefault() { + std::shared_ptr proto_owner; + proto_owner = ComGraphMakeShared(); + if (proto_owner == nullptr) { + GELOGE(GRAPH_FAILED, "proto::AttrDef make shared failed"); + return; + } + protoMsg_ = proto_owner.get(); + protoOwner_ = proto_owner; +} + +template <> +void GeIrProtoHelper::InitDefault() { + std::shared_ptr proto_owner; + proto_owner = ComGraphMakeShared(); + if (proto_owner == nullptr) { + GELOGE(GRAPH_FAILED, "proto::TensorDef make shared failed"); + return; + } + protoMsg_ = proto_owner.get(); + protoOwner_ = proto_owner; +} + +template <> +void GeIrProtoHelper::InitDefault() { + std::shared_ptr proto_owner; + proto_owner = ComGraphMakeShared(); + if (proto_owner == nullptr) { + GELOGE(GRAPH_FAILED, "proto::TensorDescriptor make shared failed"); + return; + } + protoMsg_ = proto_owner.get(); + protoOwner_ = proto_owner; +} + +template <> +void GeIrProtoHelper::InitDefault() { + std::shared_ptr proto_owner; + proto_owner = ComGraphMakeShared(); + if (proto_owner == nullptr) { + GELOGE(GRAPH_FAILED, "proto::ShapeDef make shared failed"); + return; + } + protoMsg_ = proto_owner.get(); + protoOwner_ = proto_owner; +} + +template <> +void GeIrProtoHelper::InitDefault() { + std::shared_ptr proto_owner; + proto_owner = ComGraphMakeShared(); + if (proto_owner == nullptr) { + GELOGE(GRAPH_FAILED, "proto::NamedAttrs make shared failed"); + return; + } + protoMsg_ = proto_owner.get(); + protoOwner_ = proto_owner; +} + +template <> +void GeIrProtoHelper::InitDefault() { + std::shared_ptr proto_owner; + proto_owner = ComGraphMakeShared(); + if (proto_owner == nullptr) { + GELOGE(GRAPH_FAILED, "proto::ModelDef make shared failed"); + return; + } + protoMsg_ = proto_owner.get(); + protoOwner_ = proto_owner; +} + +template <> +void GeIrProtoHelper::InitDefault() { + std::shared_ptr proto_owner; + proto_owner = ComGraphMakeShared(); + if (proto_owner == nullptr) { + GELOGE(GRAPH_FAILED, "proto::OpDef make shared failed"); + return; + } + protoMsg_ = proto_owner.get(); + protoOwner_ = proto_owner; +} + +template <> +void GeIrProtoHelper::InitDefault() { + std::shared_ptr proto_owner; + proto_owner = ComGraphMakeShared(); + if (proto_owner == nullptr) { + GELOGE(GRAPH_FAILED, "proto::GraphDef make shared failed"); + return; + } + protoMsg_ = proto_owner.get(); + protoOwner_ = proto_owner; +} + +template <> +void GeIrProtoHelper::InitDefault() { + std::shared_ptr proto_owner; + proto_owner = ComGraphMakeShared(); + if (proto_owner == nullptr) { + GELOGE(GRAPH_FAILED, "proto::TensorDescriptor make shared failed"); + return; + } + protoMsg_ = proto_owner->mutable_attr(); + protoOwner_ = proto_owner; +} + +template <> +void GeIrProtoHelper::InitDefault() { + std::shared_ptr proto_owner; + proto_owner = ComGraphMakeShared(); + if (proto_owner == nullptr) { + GELOGE(GRAPH_FAILED, "proto::TensorDescriptor make shared failed"); + return; + } + protoMsg_ = &proto_owner->attr(); + protoOwner_ = proto_owner; +} +} // namespace ge diff --git a/src/common/graph/format_refiner.cc b/src/common/graph/format_refiner.cc new file mode 100644 index 00000000..1b95b500 --- /dev/null +++ b/src/common/graph/format_refiner.cc @@ -0,0 +1,386 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/format_refiner.h" + +#include +#include +#include +#include +#include + +#include "./compute_graph.h" +#include "./ge_error_codes.h" +#include "./graph/ge_tensor.h" +#include "./operator.h" +#include "./operator_factory.h" +#include "debug/ge_log.h" +#include "debug/ge_op_types.h" +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "utils/node_utils.h" +#include "utils/op_desc_utils.h" +#include "utils/tensor_utils.h" +#include "utils/type_utils.h" + +namespace ge { +namespace { +static const std::unordered_set kChangeDimNodes = {RESHAPE, PERMUTE, EXPANDDIMS, SQUEEZE}; +static bool net_format_is_nd = true; +static Format g_user_set_format = FORMAT_ND; +static bool is_first_infer = true; +} // namespace + +graphStatus FormatRefiner::RefreshConstantOutProcess(const OpDescPtr &op_desc) { + GE_CHECK_NOTNULL(op_desc); + if (op_desc->GetType() == CONSTANTOP && is_first_infer == true) { + ConstGeTensorPtr tensor_value; + if (!AttrUtils::GetTensor(op_desc, "value", tensor_value)) { + GELOGE(GRAPH_FAILED, "Get value failed, node name:%s.", op_desc->GetName().c_str()); + return GRAPH_FAILED; + } + GE_CHECK_NOTNULL(tensor_value); + (void)op_desc->UpdateOutputDesc(0, tensor_value->GetTensorDesc()); + } + return GRAPH_SUCCESS; +} +graphStatus FormatRefiner::GetAnchorPoints(const ge::ComputeGraphPtr &graph, std::vector &anchor_points, + std::vector &data_nodes, + std::unordered_map &node_status) { + if (graph == nullptr) { + GELOGE(GRAPH_FAILED, "input graph is null"); + return GRAPH_FAILED; + } + anchor_points.clear(); + // Get all anchor point nodes and switch nodes + for (const auto &node_ptr : graph->GetAllNodes()) { + if (node_ptr == nullptr) { + return GRAPH_FAILED; + } + auto op_desc = node_ptr->GetOpDesc(); + if (op_desc == nullptr) { + return GRAPH_FAILED; + } + graphStatus status = RefreshConstantOutProcess(op_desc); + if (status != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "refresh constant out process failed!"); + return GRAPH_FAILED; + } + // consider special node save process + // get all input desc format + bool node_is_all_nd = false; + for (uint32_t i = 0; i < static_cast(op_desc->GetInputsSize()); i++) { + auto input_desc = op_desc->GetInputDesc(i); + // Operator pre-set format but not origin format + auto input_format = input_desc.GetFormat(); + // Pre-save data node and default infer fail + if (node_ptr->GetType() == DATA) { + data_nodes.push_back(node_ptr); + } + if (input_format != FORMAT_ND && input_format != FORMAT_RESERVED) { + node_is_all_nd = true; + } + } + // Get all output desc format + for (uint32_t i = 0; i < static_cast(op_desc->GetOutputsSize()); i++) { + GeTensorDesc output_desc = op_desc->GetOutputDesc(i); + auto output_format = output_desc.GetFormat(); + if (output_format != FORMAT_ND && output_format != FORMAT_RESERVED) { + node_is_all_nd = true; + } + } + // check anchor point valid + if (!node_is_all_nd) { + continue; + } + GELOGD("Node[%s] is anchor point!", node_ptr->GetName().c_str()); + anchor_points.push_back(node_ptr); + } + GELOGI("anchor_points number is %zu", anchor_points.size()); + return GRAPH_SUCCESS; +} +graphStatus FormatRefiner::AnchorProcess(const ge::NodePtr &anchor_node, + std::unordered_map &node_status) { + if (anchor_node == nullptr) { + GELOGE(GRAPH_FAILED, "anchor node is null!"); + return GRAPH_FAILED; + } + std::deque nodes; + nodes.push_back(anchor_node); + while (!nodes.empty()) { + ge::NodePtr node = nodes.front(); + nodes.pop_front(); + graphStatus status = BackInferProcess(nodes, node, node_status); + if (status != GRAPH_SUCCESS && node != nullptr) { + GELOGE(status, "BackInferProcess failed!node name [%s]", node->GetName().c_str()); + return status; + } + status = ForwardInferProcess(nodes, node, node_status); + if (status != GRAPH_SUCCESS && node != nullptr) { + GELOGE(status, "ForwardInferProcess failed!node name [%s]", node->GetName().c_str()); + return status; + } + } + return GRAPH_SUCCESS; +} +graphStatus FormatRefiner::BackInferProcess(std::deque &nodes, ge::NodePtr &node, + std::unordered_map &node_status) { + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(node->GetOpDesc()); + + GELOGD("Enter back infer process!Node is [%s]", (node->GetName()).c_str()); + for (const auto &in_anchor : node->GetAllInDataAnchors()) { + GELOGD("Node is [%s] [B]", (node->GetName()).c_str()); + auto in_data_anchor_idx = in_anchor->GetIdx(); + auto to_be_set_format = (node->GetOpDesc()->GetInputDesc(in_data_anchor_idx)).GetOriginFormat(); + if (to_be_set_format == FORMAT_ND) { + GELOGD("Node [%s] [B], format is ND", (node->GetName()).c_str()); + continue; + } + auto peer_out_data_anchor = in_anchor->GetPeerOutAnchor(); + if (peer_out_data_anchor == nullptr) { + GELOGW("Node[%s] %dth in data anchor's peer_out_anchor is null", (node->GetName()).c_str(), in_data_anchor_idx); + continue; + } + auto peer_out_data_node = peer_out_data_anchor->GetOwnerNode(); + if (peer_out_data_node == nullptr || peer_out_data_node->GetOpDesc() == nullptr) { + GELOGW("Node[%s]\'s peer_out_data_node or peer_out_data_node desc is null", (node->GetName()).c_str()); + continue; + } + // Check format whether have been set + int idx = peer_out_data_anchor->GetIdx(); + auto ge_tensor_desc = peer_out_data_node->GetOpDesc()->GetOutputDesc(idx); + if (ge_tensor_desc.GetOriginFormat() == FORMAT_ND) { + auto dim_num = ge_tensor_desc.GetShape().GetDimNum(); + if (dim_num == 0) { + GELOGI("node name:%s idx:%d out is scalar. stop back infer!", peer_out_data_node->GetName().c_str(), idx); + continue; + } + /// Check whether node to change dims () + /// Because some node will calculate with 5D, C dim maybe multi meaning + auto peer_out_data_node_type = peer_out_data_node->GetType(); + auto iter1 = kChangeDimNodes.find(peer_out_data_node_type); + // 4 means dims num + if ((iter1 != kChangeDimNodes.end()) && (dim_num < 4)) { + GELOGI("Node[%s] is change dim node and shape is smaller than 4. do not modify format", + (peer_out_data_node->GetName()).c_str()); + continue; + } + + ge_tensor_desc.SetOriginFormat(to_be_set_format); + ge_tensor_desc.SetFormat(to_be_set_format); + (void)peer_out_data_node->GetOpDesc()->UpdateOutputDesc(idx, ge_tensor_desc); + + // Call operator infer format api (forward) to get out format + GELOGD("call infer format func[Back]!Node is [%s] ", (peer_out_data_node->GetName()).c_str()); + graphStatus status = peer_out_data_node->InferOriginFormat(); + if (status != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Node[%s] infer format failed", (peer_out_data_node->GetName()).c_str()); + return GRAPH_FAILED; + } + nodes.push_back(peer_out_data_node); + } + } + return GRAPH_SUCCESS; +} +graphStatus FormatRefiner::ForwardInferProcess(std::deque &nodes, ge::NodePtr &node, + std::unordered_map &node_status) { + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(node->GetOpDesc()); + GELOGD("Enter forward infer process!Node is [%s]", (node->GetName()).c_str()); + for (const auto &out_data_anchor : node->GetAllOutDataAnchors()) { + GELOGD("Node is [%s] [F]", (node->GetName()).c_str()); + GE_IF_BOOL_EXEC(out_data_anchor == nullptr, continue); + auto out_data_anchor_idx = out_data_anchor->GetIdx(); + auto to_be_set_format = (node->GetOpDesc()->GetOutputDesc(out_data_anchor_idx)).GetOriginFormat(); + if (to_be_set_format == FORMAT_ND) { + GELOGD("Node [%s] format is ND.[F]", (node->GetName()).c_str()); + continue; + } + for (const auto &peer_in_data_anchor : out_data_anchor->GetPeerInDataAnchors()) { + if (peer_in_data_anchor == nullptr) { + GELOGW("Node[%s] some peer_in_anchor is null", (node->GetName()).c_str()); + continue; + } + auto peer_in_data_node = peer_in_data_anchor->GetOwnerNode(); + if (peer_in_data_node == nullptr || peer_in_data_node->GetOpDesc() == nullptr) { + GELOGW("Node[%s] peer_in_data_node or peer_in_data_node desc is null", node->GetName().c_str()); + continue; + } + // Check format whether have been set + int idx = peer_in_data_anchor->GetIdx(); + auto ge_tensor_desc = peer_in_data_node->GetOpDesc()->GetInputDesc(idx); + if (ge_tensor_desc.GetOriginFormat() == FORMAT_ND) { + auto dim_num = ge_tensor_desc.GetShape().GetDimNum(); + if (dim_num == 0) { + GELOGI("node name:%s idx:%d in is scalar. stop forward infer!", peer_in_data_node->GetName().c_str(), idx); + continue; + } + /// Check whether node to change dims () + /// Because some node will calculate with 5D, C dim maybe multi meaning + auto peer_in_data_node_type = peer_in_data_node->GetType(); + auto iter1 = kChangeDimNodes.find(peer_in_data_node_type); + // 4 means dims num + if ((iter1 != kChangeDimNodes.end()) && (dim_num < 4)) { + GELOGI("Node[%s] is change dim node. do not infer origin format", (peer_in_data_node->GetName()).c_str()); + continue; + } + ge_tensor_desc.SetOriginFormat(to_be_set_format); + ge_tensor_desc.SetFormat(to_be_set_format); + (void)peer_in_data_node->GetOpDesc()->UpdateInputDesc(idx, ge_tensor_desc); + + /// Because netoutput node added before infer format ,so netoutput is end condition + /// must set netoutput format , because saved result depend on format + if (peer_in_data_node_type == NETOUTPUT) { + continue; + } + + // Call operator infer format api (forward) to get out format + GELOGD("call infer format func[Forward]!Node is [%s] ", (peer_in_data_node->GetName()).c_str()); + graphStatus status = peer_in_data_node->InferOriginFormat(); + if (status != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Node[%s] infer format failed", (peer_in_data_node->GetName()).c_str()); + return GRAPH_FAILED; + } + nodes.push_back(peer_in_data_node); + } + } + } + return GRAPH_SUCCESS; +} + +void FormatRefiner::RefreshOriginFormatOfAnchor(std::vector &anchor_points) { + for (const auto &node : anchor_points) { + if (node == nullptr || node->GetOpDesc() == nullptr) { + continue; + } + for (const auto &input_desc : node->GetOpDesc()->GetAllInputsDescPtr()) { + if (input_desc != nullptr) { + input_desc->SetOriginFormat(input_desc->GetFormat()); + } + } + for (const auto &output_desc : node->GetOpDesc()->GetAllOutputsDescPtr()) { + if (output_desc != nullptr) { + output_desc->SetOriginFormat(output_desc->GetFormat()); + } + } + } +} + +void FormatRefiner::SetInferOrigineFormatFlag(bool is_first) { is_first_infer = is_first; } + +graphStatus FormatRefiner::DataNodeFormatProcess(std::vector &data_nodes, ge::Format data_format, + std::unordered_map &node_status) { + bool is_internal_format = TypeUtils::IsInternalFormat(data_format); + bool need_process = ((!is_first_infer) && (is_internal_format == false) && (data_format != FORMAT_ND)); + if (!need_process) { + GELOGI("no necessary to do DataNodeFormatProcess.IsFirstInfer: %d, data_format:%s", is_first_infer, + TypeUtils::FormatToSerialString(data_format).c_str()); + return GRAPH_SUCCESS; + } + GELOGD("Enter DataNodeFormatProcess"); + std::vector uninferred_data_nodes; + // Check and renew data nodes format + for (const auto &data_node : data_nodes) { + GE_CHECK_NOTNULL(data_node); + auto op_desc = data_node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + GE_CHECK_NOTNULL(op_desc->GetOutputDescPtr(0)); + auto curr_format = op_desc->GetOutputDescPtr(0)->GetOriginFormat(); + if (curr_format != FORMAT_ND) { + // Data format has been inferred , continue + continue; + } + // Set format for un-inferred data node + auto input_descs = op_desc->GetAllInputsDescPtr(); + auto output_descs = op_desc->GetAllOutputsDescPtr(); + + for (const auto &input_desc : input_descs) { + if (input_desc != nullptr) { + input_desc->SetOriginFormat(data_format); + input_desc->SetFormat(data_format); + } + } + for (const auto &output_desc : output_descs) { + if (output_desc != nullptr) { + output_desc->SetOriginFormat(data_format); + output_desc->SetFormat(data_format); + } + } + uninferred_data_nodes.push_back(data_node); + } + // Reinfer format from uninfered data nodes + for (const auto &node : uninferred_data_nodes) { + if (node == nullptr) { + continue; + } + GELOGD("data node [%s] start infer format process", node->GetName().c_str()); + auto status = AnchorProcess(node, node_status); + if (status != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "data node [%s] infer format process failed!", node->GetName().c_str()); + return GRAPH_FAILED; + } + } + GELOGD("DataNodeFormatProcess success"); + return GRAPH_SUCCESS; +} + +graphStatus FormatRefiner::InferOrigineFormat(const ge::ComputeGraphPtr &graph) { + GELOGI("Enter InferOrigineFormat process!"); + + // True: inferred false:no-inferred + std::unordered_map node_status; + std::vector anchor_points; + std::vector data_nodes; + // global net format + net_format_is_nd = true; + g_user_set_format = FORMAT_ND; + + if (graph == nullptr) { + GELOGE(GRAPH_FAILED, "input graph is null"); + return GRAPH_FAILED; + } + // User set global net format + graphStatus status = GetAnchorPoints(graph, anchor_points, data_nodes, node_status); + if (status != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "GetAnchorPoints Process Faild!"); + return GRAPH_FAILED; + } + // Refresh origin format of anchor point + RefreshOriginFormatOfAnchor(anchor_points); + // Infer format process + for (const auto &anchor_node : anchor_points) { + if (anchor_node == nullptr) { + continue; + } + status = AnchorProcess(anchor_node, node_status); + if (status != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Anchor node [%s] process failed!", anchor_node->GetName().c_str()); + return GRAPH_FAILED; + } + } + /// According to discuss with sys-enginer, data node default format is ND.Its format + /// should be set by inferred.But if some data-node can not be got by infer, set context's + /// format for these data nodes. + /// Notice: ignore 5D formats + auto data_format = graph->GetDataFormat(); + status = DataNodeFormatProcess(data_nodes, data_format, node_status); + + // Set infer flag to false + SetInferOrigineFormatFlag(false); + return status; +} +} // namespace ge diff --git a/src/common/graph/format_refiner.h b/src/common/graph/format_refiner.h new file mode 100644 index 00000000..3b732d2f --- /dev/null +++ b/src/common/graph/format_refiner.h @@ -0,0 +1,51 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COMMON_GRAPH_FORMAT_REFINER_H_ +#define COMMON_GRAPH_FORMAT_REFINER_H_ + +#include +#include +#include +#include + +#include "./compute_graph.h" +#include "./external/graph/types.h" +#include "./ge_error_codes.h" + +namespace ge { +// ShapeRefiner performs shape inference for compute graphs +class FormatRefiner { + public: + static graphStatus InferOrigineFormat(const ge::ComputeGraphPtr &graph); + static void SetInferOrigineFormatFlag(bool is_first = true); + + private: + static graphStatus RefreshConstantOutProcess(const OpDescPtr &op_desc); + static graphStatus GetAnchorPoints(const ge::ComputeGraphPtr &graph, std::vector &anchor_points, + std::vector &data_nodes, + std::unordered_map &node_status); + static graphStatus AnchorProcess(const ge::NodePtr &anchor_node, std::unordered_map &node_status); + static void RefreshOriginFormatOfAnchor(std::vector &anchor_points); + static graphStatus BackInferProcess(std::deque &nodes, ge::NodePtr &node, + std::unordered_map &node_status); + static graphStatus ForwardInferProcess(std::deque &nodes, ge::NodePtr &node, + std::unordered_map &node_status); + static graphStatus DataNodeFormatProcess(std::vector &data_nodes, ge::Format data_format, + std::unordered_map &node_status); +}; +} // namespace ge +#endif // COMMON_GRAPH_FORMAT_REFINER_H_ diff --git a/src/common/graph/ge_attr_define.cc b/src/common/graph/ge_attr_define.cc new file mode 100644 index 00000000..d63b7761 --- /dev/null +++ b/src/common/graph/ge_attr_define.cc @@ -0,0 +1,755 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +namespace ge { +// Public attribute +const std::string ATTR_NAME_NAME = "name"; + +const std::string ATTR_NAME_TYPE = "type"; + +const std::string ATTR_NAME_WEIGHT_NAME = "weight_name"; + +const std::string ATTR_NAME_IS_QUANTIZE_FACTOR = "quantize_factor"; + +const std::string ATTR_NAME_ALPHA = "alpha"; + +const std::string ATTR_NAME_BETA = "beta"; + +const std::string ATTR_NAME_PADMODE = "pad_mode"; + +const std::string ATTR_NAME_PADMODES = "padding"; + +const std::string ATTR_NAME_MODE = "mode"; + +const std::string ATTR_NAME_FILTER = "filter"; + +const std::string ATTR_NAME_BIAS = "bias"; + +const std::string ATTR_NAME_BIAS_TERM = "bias_term"; + +const std::string ATTR_NAME_PAD = "pad"; + +const std::string ATTR_NAME_PADS = "pad"; + +const std::string ATTR_NAME_PAD_SIZE = "pad size"; + +const std::string ATTR_NAME_PAD_MODE = "pad mode"; + +const std::string ATTR_NAME_SCALE = "scale"; + +const std::string ATTR_NAME_WINDOWS = "windows"; + +const std::string ATTR_NAME_GLOBAL_POOLING = "global_pooling"; + +const std::string ATTR_NAME_CEIL_MODE = "ceil_mode"; + +const std::string ATTR_NAME_RELUMODE = "relu_mode"; + +const std::string ATTR_NAME_STRIDE_SIZE = "stride size"; + +const std::string ATTR_NAME_RELU_FLAG = "relu_flag"; + +const std::string ATTR_NAME_ALGO = "algo"; + +const std::string ATTR_NAME_FORMAT = "format"; + +const std::string ATTR_NAME_FILTER_FORMAT = "filter_format"; + +const std::string ATTR_NAME_LRN_K = "lrn_k"; + +const std::string ATTR_NAME_LRN_NORM_REGION = "lrn_normregion"; + +const std::string ATTR_NAME_LRN_LOCAL_SIZE = "lrn_localsize"; + +const std::string ATTR_NAME_LRN_ALPHA = "lrn_alpha"; + +const std::string ATTR_NAME_LRN_BETA = "lrn_beta"; + +const std::string ATTR_NAME_AXIS = "axis"; +const std::string ATTR_NAME_BROADCAST = "broadcast"; + +const std::string ATTR_NAME_OUTPUT_NUM = "output_num"; +const std::string ATTR_NAME_TIDX = "t_idx"; + +const std::string ATTR_NAME_TPADDINGS = "t_paddings"; +const std::string ATTR_IMG_H = "img_h"; +const std::string ATTR_IMG_W = "img_w"; +const std::string ATTR_NET_H = "net_h"; +const std::string ATTR_NET_W = "net_w"; + +const std::string ATTR_NAME_TMULTIPLES = "t_multiples"; + +const std::string ATTR_NAME_MULTIPLES = "multiples"; + +const std::string ATTR_NAME_T = "T"; +const std::string ATTR_NAME_N = "N"; + +const std::string ATTR_NAME_TSHAPE = "Tshape"; +const std::string ATTR_NAME_NAN_OPT = "nan_opt"; + +const std::string ATTR_NAME_AIPP = "aipp"; + +const std::string ATTR_NAME_INPUT_FORMAT = "input_format"; +const std::string ATTR_NAME_OUTPUT_FORMAT = "output_format"; + +const std::string ATTR_NAME_FRAMEWORK_NODE_DEF = "node_def"; +const std::string ATTR_NAME_FRAMEWORK_OP_DEF = "op_def"; +const std::string ATTR_NAME_FRAMEWORK_FWK_TYPE = "framework_type"; +const std::string ATTR_NAME_FRAMEWORK_FUNC_DEF = "func_def"; + +const std::string ATTR_NAME_INPUT_TENSOR_DESC = "input_tensor_desc"; +const std::string ATTR_NAME_OUTPUT_TENSOR_DESC = "output_tensor_desc"; + +const std::string ATTR_NAME_INFERRED_FORMAT = "inferred_format"; +const std::string ATTR_NAME_PRED_PERMUTE_DELETED = "pred_permute_deleted"; +const std::string ATTR_NAME_IGNORE_PRED_FORMAT = "ignore_pred_format"; +const std::string ATTR_NAME_WEIGHTS = "value"; +const std::string ATTR_NAME_WEIGHTS_DATA = "weights_data"; +const std::string ATTR_NAME_BROACAST_REAL_DIM_CNT = "broacast_real_dim_cnt"; +const std::string ATTR_NAME_DIM_ALIGN = "dim_align"; +const std::string ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE = "original_type"; + +const std::string ATTR_NAME_SESSION_GRAPH_ID = "session_graph_id"; + +const std::string ATTR_NAME_AUTOMIC_ADD_START = "automic_add_addr_start"; +const std::string ATTR_NAME_AUTOMIC_ADD_MEM_SIZE = "automic_add_mem_size"; +const std::string ATTR_MODEL_BATCH_NUM = "batch_num"; +const std::string ATTR_NAME_STREAM_LABEL = "_stream_label"; +const std::string ATTR_NAME_STREAM_CYCLE_EVENT_FLAG = "need_stream_cycle_event"; + +// To be deleted +const std::string ATTR_TO_BE_DELETED = "to_be_deleted"; +const std::string PERMUTE_RESHAPE_FUSION = "permute_reshape_fusion"; +const std::string PERMUTE_RESHAPE_FUSION_CONV_PROPOSAL = "fusion_conv_proposal"; +const std::string PERMUTE_RESHAPE_FUSION_CONV_DECODEBBOX = "fusion_conv_decodebbox"; +const std::string PERMUTE_RESHAPE_FUSION_BOX_TYPE_NUM = "box_type_num"; +const std::string SSD_MBOX_LOC_FUSION = "permute_flatten_fusion"; +const std::string SSD_MBOX_CONF_FUSION = "permute_flatten_reshape_flatten_fusion"; +const std::string SSD_MBOX_OCR_FUSION = "permute_flatten_ocr_fusion"; +const std::string SSD_MBOX_FUSION_BOX_TYPE_NUM = "ssd_mbox_fusion_box_type_num"; +const std::string SSD_RESHAPE_SLICE_CONCAT_FUSION = "reshape_slice_concat_fusion"; + +const std::string SSD_PRIORBOX_CONCAT = "ssd_mbox_conf_priorbox_concat_flag"; + +// Refinedet +const std::string REFINEDET_MBOX_LOC_FUSION = "permute_flatten_fusion"; +const std::string REFINEDET_RESHAPE_SLICE_CONCAT_FUSION = "reshape_slice_concat_fusion"; +const std::string REFINEDET_MBOX_CONF_FUSION = "permute_flatten_reshape_flatten_fusion"; +const std::string REFINEDET_MBOX_FUSION_BOX_TYPE_NUM = "ssd_mbox_fusion_box_type_num"; +const std::string REFINEDET_PRIOR_BOX_ATTR_VARIANCE = "variance"; +const std::string REFINEDET_PRIOR_BOX_ATTR_VARIANCE_NUM = "variance_num"; + +// _Arg +const std::string ATTR_NAME_INDEX = "index"; +// _RetVal +const std::string RETVAL_ATTR_NAME_INDEX = "retval_index"; +// Data +const std::string DATA_ATTR_NAME_DATA_TYPE = "data_type"; + +// Send +const std::string SEND_ATTR_EVENT_ID = "event_id"; + +// Recv +const std::string RECV_ATTR_EVENT_ID = "event_id"; + +// convolution +const std::string ATTR_NAME_COEF = "coef"; + +const std::string ATTR_NAME_STRIDE = "stride"; + +const std::string ATTR_NAME_STRIDES = "stride"; + +const std::string ATTR_NAME_DILATION = "dilation"; + +const std::string ATTR_NAME_DILATIONS = "dilation"; + +const std::string CONV_ATTR_NAME_MODE = "mode"; + +const std::string CONV_ATTR_NAME_ALGO = "algo"; + +const std::string CONV_ATTR_NAME_GROUP = "group"; + +const std::string CONV_ATTR_NAME_PAD_MODE = "pad_mode"; + +const std::string CONV_ATTR_NAME_PAD = "pad"; + +const std::string CONV_ATTR_NAME_STRIDE = "stride"; + +const std::string CONV_ATTR_NAME_DILATION = "dilation"; + +const std::string CONV_ATTR_NAME_NUM_OUTPUT = "num_output"; + +const std::string CONV_ATTR_NAME_KERNEL = "kernel"; + +const std::string CONV_ATTR_NAME_FILTER = "filter"; + +const std::string CONV_ATTR_NAME_BIAS = "bias"; + +const std::string CONV_ATTR_NAME_RELU_FLAG = "relu_flag"; + +const std::string CONV_ATTR_NAME_ADJ = "adj"; + +const std::string CONV_ATTR_NAME_TARGET_SHAPE = "target_shape"; + +const std::string CONV_ATTR_NAME_BEFORE_PAD = "before_pad"; + +const std::string CONV_ATTR_NAME_HAS_BIAS = "has_bias"; + +const std::string NEED_INFER = "isNeedInfer"; + +// Pooling +const std::string POOLING_ATTR_MODE = "mode"; +const std::string POOLING_ATTR_NAN_OPT = "nan_opt"; +const std::string POOLING_ATTR_PAD_MODE = "pad_mode"; +const std::string POOLING_ATTR_GLOBAL_POOLING = "global_pooling"; +const std::string POOLING_ATTR_WINDOW = "window"; +const std::string POOLING_ATTR_PAD = "pad"; +const std::string POOLING_ATTR_STRIDE = "stride"; +const std::string POOLING_ATTR_CEIL_MODE = "ceil_mode"; +const std::string POOLING_ATTR_DATA_MODE = "data_mode"; +const std::string POOLING_ATTR_BEFORE_PAD = "before_pad"; +const std::string POOLING_ATTR_NAME_ALGO = "algo"; + +// Eltwise +const std::string ELTWISE_ATTR_MODE = "mode"; +const std::string ELTWISE_ATTR_COEFF = "coeff"; +const std::string ELTWISE_ATTR_WEIGHT = "weight"; +const std::string ELTWISE_ATTR_RELU_FLAG = "relu_flag"; +const std::string ELTWISE_ATTR_ALPHA = "alpha"; +const std::string ELTWISE_ATTR_BETA = "beta"; + +// BatchNorm +const std::string BATCHNORM_ATTR_MODE = "mode"; +const std::string BATCHNORM_ATTR_EPSILON = "epsilon"; +const std::string BATCHNORM_ATTR_USE_GLOBAL_STATS = "use_global_stats"; +const std::string BATCHNORM_ATTR_MOVING_AVERAGE_FRACTION = "moving_average_fraction"; +const std::string BATCHNORM_ATTR_ESTIMATED_MEAN = "estimated_mean"; +const std::string BATCHNORM_ATTR_ESTIMATED_VARIANCE = "estimated_variance"; +const std::string BATCHNORM_ATTR_SCALE = "scale"; +const std::string BATCHNORM_ATTR_BIAS = "bias"; + +// Scale +const std::string SCALE_ATTR_SCALE = "scale"; +const std::string SCALE_ATTR_BIAS = "bias"; + +// FullConnection +const std::string FULL_CONNECTION_ATTR_FILTER = "filter"; +const std::string FULL_CONNECTION_ATTR_BIAS = "bias"; +const std::string FULL_CONNECTION_ATTR_NUM_OUTPUT = "num_output"; +const std::string FULL_CONNECTION_ATTR_RELU_FLAG = "relu_flag"; +const std::string FULL_ATTR_NAME_ALGO = "algo"; + +// SoftmaxOpParams +const std::string SOFTMAX_ATTR_ALGO = "algo"; +const std::string SOFTMAX_ATTR_MODE = "mode"; + +// SparseSoftmaxCrossEntropy +const std::string SPARSE_SOFTMAX_CROSS_ENTROPY_ATTR_MODE = "cross_entropy_mode"; +const std::string SPARSE_SOFTMAX_CROSS_ENTROPY_IS_GRAD = "cross_entropy_is_grad"; +// Attr labelSmoothing +const std::string SOFTMAX_CROSS_ENTROPY_LABELSMOOTHING = "labelSmoothing"; + +// ApplyMomentum +const std::string APPLYMENTUM_ATTR_IS_GRAPH_FUSION = "applymomentum_is_graph_fusion"; + +// Activation +const std::string ACTIVATION_ATTR_MODE = "mode"; +const std::string ACTIVATION_ATTR_COEF = "coef"; + +// Concat +const std::string CONCAT_ATTR_NAME_AXIS = "axis"; + +// Const +const std::string CONST_ATTR_NAME_DATA_TRANSTYPE = "data_transtype"; +const std::string CONST_ATTR_NAME_OUTPUT_FORMAT = "output_format"; +const std::string CONST_ATTR_NAME_OUTPUT_TYPE = "output_type"; + +// Roipooling +const std::string ROIPOOLING_ATTR_NAME_POOLED_H = "pooled_h"; +const std::string ROIPOOLING_ATTR_NAME_POOLED_W = "pooled_w"; +const std::string ROIPOOLING_ATTR_NAME_SPATIAL_SCALE = "spatial_scale"; +const std::string ROIPOOLING_ATTR_NAME_RIO_POOLING_MODE = "rio_pooling_mode"; +const std::string ROIPOOLING_ATTR_NAME_POOLING_MODE = "pooling_mode"; +const std::string ROIPOOLING_ATTR_NAME_SAMPLING_RATIO = "sampling_ratio"; + +// DetectionOutput +const std::string DETECTIONOUTPUT_ATTR_NUM_CLASSES = "num_classes"; +const std::string DETECTIONOUTPUT_ATTR_OCR_NUM_CLASSES = "ocr_num_classes"; +const std::string DETECTIONOUTPUT_ATTR_NMS_THRESHOLD = "nms_threshold"; +const std::string DETECTIONOUTPUT_ATTR_TOP_K = "top_k"; +const std::string DETECTIONOUTPUT_ATTR_CONFIDENCE_THRESHOLD = "confidence_threshold"; +const std::string DETECTIONOUTPUT_ATTR_IMG_H = "img_h"; +const std::string DETECTIONOUTPUT_ATTR_IMG_W = "img_w"; +const std::string DETECTIONOUTPUT_ATTR_BATCH_SIZE = "batch_size"; +// Ssd DetectionOutput +const std::string DETECTIONOUTPUT_ATTR_ETA = "eta"; +const std::string DETECTIONOUTPUT_ATTR_SHARED_LOCATION = "shared_location"; +const std::string DETECTIONOUTPUT_ATTR_BACKGROUND_LABEL_ID = "background_label_id"; +const std::string DETECTIONOUTPUT_ATTR_CODE_TYPE = "code_type"; +const std::string DETECTIONOUTPUT_ATTR_VARIANCE_ENCODED_IN_TARGET = "variance_encoded_in_target"; +const std::string DETECTIONOUTPUT_ATTR_KEEP_TOP_K = "keep_top_k"; +// Refinedet DetectionOutput +const std::string DETECTIONOUTPUT_ATTR_OBJECTNESS_SCORE = "objectness_score"; +// yolo DetectionOutput +const std::string DETECTIONOUTPUT_ATTR_ClASSES = "classes"; +const std::string DETECTIONOUTPUT_ATTR_BIASES = "biases"; +const std::string DETECTIONOUTPUT_ATTR_RELATIVE = "relative"; +const std::string DETECTIONOUTPUT_ATTR_OBJECTNESS_THRESHOLD = "objectness_threshold"; +const std::string DETECTIONOUTPUT_ATTR_CLASS_THRESHOLD = "class_threshold"; +const std::string DETECTIONOUTPUT_ATTR_POST_TOP_K = "post_top_k"; +const std::string DETECTIONOUTPUT_ATTR_IOU_THRESHOLD_DECAY = "iou_threshold_decay"; +const std::string DETECTIONOUTPUT_ATTR_COOR_SCALE_FACTOR = "coor_scale_factor"; +const std::string DETECTIONOUTPUT_ATTR_YOLO_VERSION = "yolo_version"; + +// DetectionPostprocess +const std::string POSTPROCESS_ATTR_NAME_CLS_NUM = "cls_num"; +const std::string POSTPROCESS_ATTR_NAME_CONF_THRESH = "conf_thresh"; +const std::string POSTPROCESS_ATTR_NAME_NMS_THRESH = "nms_thresh"; +const std::string POSTPROCESS_ATTR_POST_NMS_TOPN = "post_nms_topn"; +const std::string POSTPROCESS_ATTR_NAME_BBOX_REG_WEIGHT = "bbox_reg_weights"; + +// Spatialtransfrom +const std::string SPTIALTF_ATTR_NAME_OUTPUT_H = "output_h"; +const std::string SPTIALTF_ATTR_NAME_OUTPUT_W = "output_w"; +const std::string SPTIALTF_ATTR_NAME_BORDER_VALUE = "border_value"; +const std::string SPTIALTF_ATTR_NAME_AFFINE_TRANSFORM = "affine_transform"; + +// Proposa +const std::string PROPOSAL_ATTR_NAME_FEAT_STRIDE = "feat_stride"; +const std::string PROPOSAL_ATTR_NAME_BASE_SIZE = "base_size"; +const std::string PROPOSAL_ATTR_NAME_MIN_SIZE = "min_size"; +const std::string PROPOSAL_ATTR_NAME_RATIO = "ratio"; +const std::string PROPOSAL_ATTR_NAME_SCALE = "scale"; +const std::string PROPOSAL_ATTR_NAME_PRE_NMS_TOPN = "pre_nms_topn"; +const std::string PROPOSAL_ATTR_NAME_POST_NMS_TOPN = "post_nms_topn"; +const std::string PROPOSAL_ATTR_NAME_NMS_THRESH = "nms_thresh"; +const std::string PROPOSAL_ATTR_NAME_TOP_SIZE = "top_size"; +const std::string PROPOSAL_ATTR_IMG_H = "img_h"; +const std::string PROPOSAL_ATTR_IMG_W = "img_w"; +// Softmax +const std::string SOFTMAX_ATTR_AXIS = "axis"; + +// Permute +const std::string PERMUTE_ATTR_ORDER = "order"; + +// SSD Normalize +const std::string SSDNORMALIZE_ATTR_ACCROSS_SPATIAL = "across_spatial"; +const std::string SSDNORMALIZE_ATTR_CHANNEL_SHARED = "channel_shared"; +const std::string SSDNORMALIZE_ATTR_EPS = "eps"; + +// Flatten +const std::string FLATTEN_ATTR_AXIS = "axis"; +const std::string FLATTEN_ATTR_END_AXIS = "end_axis"; + +// SsdPRIORBOX +const std::string SSD_PRIOR_BOX_ATTR_FLIP = "flip"; +const std::string SSD_PRIOR_BOX_ATTR_CLIP = "clip"; +const std::string SSD_PRIOR_BOX_ATTR_IMG_H = "img_h"; +const std::string SSD_PRIOR_BOX_ATTR_IMG_W = "img_w"; +const std::string SSD_PRIOR_BOX_ATTR_STEP_H = "step_h"; +const std::string SSD_PRIOR_BOX_ATTR_STEP_W = "step_w"; +const std::string SSD_PRIOR_BOX_ATTR_OFFSET = "offset"; +const std::string SSD_PRIOR_BOX_ATTR_MIN_SIZE = "min_size"; +const std::string SSD_PRIOR_BOX_ATTR_MAX_SIZE = "max_size"; +const std::string SSD_PRIOR_BOX_ATTR_MIN_SIZE_NUM = "min_size_num"; +const std::string SSD_PRIOR_BOX_ATTR_MAX_SIZE_NUM = "max_size_num"; +const std::string SSD_PRIOR_BOX_ATTR_ASPECT_RATIO = "aspect_ratio"; +const std::string SSD_PRIOR_BOX_ATTR_ASPECT_RATIO_NUM = "aspect_ratio_num"; +const std::string SSD_PRIOR_BOX_ATTR_VARIANCE = "variance"; +const std::string SSD_PRIOR_BOX_ATTR_VARIANCE_NUM = "variance_num"; + +// PRelu +const std::string PRELU_ATTR_CHANNEL_SHARED = "channel_shared"; + +// Psroi pooling +const std::string PSROIPOOLING_ATTR_SPATIAL_SCALE = "spatial_scale"; +const std::string PSROIPOOLING_ATTR_OUTPUT_DIM = "output_dim"; +const std::string PSROIPOOLING_ATTR_GROUP_SIZE = "group_size"; + +// Power +const std::string POWER_ATTR_NAME_POWER = "power"; +const std::string POWER_ATTR_NAME_SCALE = "scale"; +const std::string POWER_ATTR_NAME_SHIFT = "shift"; + +// Pack +const std::string PACK_ATTR_NAME_NUM = "N"; + +// Unpack +const std::string UNPACK_ATTR_NAME_NUM = "num"; +// Gathernd +const std::string GATHERND_ATTR_NAME_TINDICES = "Tindices"; +const std::string GATHERND_ATTR_NAME_TPARAMS = "Tparams"; + +// Argmax +const std::string ARGMAX_ATTR_NAME_TOPK = "topk"; +const std::string ARGMAX_ATTR_NAME_REDUCESIZE = "reduce_size"; +const std::string ARGMAX_ATTR_NAME_REDUCESTRIDE = "reduce_stride"; +const std::string ARGMAX_ATTR_NAME_OUTMAX = "outmaxval"; + +// Relu +const std::string ATTR_NAME_NEGATIVE_SLOPE = "negative_slope"; + +// FreeSpaceExtract +const std::string FREESPACEEXTRACT_ATTR_NAME_ORG_HEIGHT = "org_height"; + +// Split +const std::string SPLIT_ATTR_NAME_SLICE_POINT = "slice_point"; +const std::string SPLIT_ATTR_NAME_SIZE_SPLIT = "size_split"; +const std::string SPLIT_ATTR_NAME_NUM_SPLIT = "num_split"; + +// Tvm +const std::string TVM_ATTR_NAME_MAGIC = "tvm_magic"; +const std::string TVM_ATTR_NAME_BLOCKDIM = "tvm_blockdim"; +const std::string TVM_ATTR_NAME_METADATA = "tvm_metadata"; + +// Squeeze +const std::string SQUEEZE_ATTR_AXIS = "axis"; +const std::string SQUEEZE_ATTR_DIMS = "squeeze_dims"; +const std::string SQUEEZE_OP_NAME = "Squeeze"; + +// Stride slice +const std::string STRIDE_SLICE_ATTR_BEGIN_MASK = "begin_mask"; +const std::string STRIDE_SLICE_ATTR_END_MASK = "end_mask"; +const std::string STRIDE_SLICE_ATTR_ELLIPSIS_MASK = "ellipsis_mask"; +const std::string STRIDE_SLICE_ATTR_NEW_AXIS_MASK = "new_axis_mask"; +const std::string STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK = "shrink_axis_mask"; + +// Slice +const std::string SLICE_ATTR_NAME_BEGINS = "begins"; +const std::string SLICE_ATTR_NAME_SIZES = "sizes"; + +// Roialign +const std::string ROIALIGN_ATTR_SPATIAL_SCALE = "spatial_scale"; +const std::string ROIALIGN_ATTR_SAMPLING_RATIO = "sampling_ratio"; +const std::string ROIALIGN_ATTR_NAME_POOLED_H = "pooled_h"; +const std::string ROIALIGN_ATTR_NAME_POOLED_W = "pooled_w"; + +// Generate_rpn_proposal +const std::string GENERATE_RPN_PROPOSAL_ATTR_PRE_NMS_TOPK = "pre_nms_topk"; +const std::string GENERATE_RPN_PROPOSAL_ATTR_POST_NMS_TOPK = "post_nms_topk"; +const std::string GENERATE_RPN_PROPOSAL_ATTR_RPN_MINI_SIZE = "rpn_mini_size"; +const std::string GENERATE_RPN_PROPOSAL_ATTR_RPN_PROPOSAL_NMS_THRESH = "rpn_proposal_nms_thresh"; +const std::string GENERATE_RPN_PROPOSAL_ATTR_RPN_PROPOSAL_FILTER_THRESH = "rpn_proposal_filter_thresh"; +// Decode_bbox +const std::string DECODE_BBOX_ATTR_DECODECLIP = "decodeClip"; + +// Cast +const std::string CAST_ATTR_DSTT = "DstT"; +const std::string CAST_ATTR_SRCT = "SrcT"; +const std::string CAST_ATTR_DST_TYPE = "dst_type"; +const std::string CAST_ATTR_TRUNCATE = "truncate"; + +// Fastrcnnn predications +const std::string FASTRCNN_PREDICTIONS_ATTR_TOPK = "fsr_topk"; +const std::string FASTRCNN_PREDICTIONS_ATTR_SCORE_THRESHOLD = "fsr_score_thres"; +const std::string FASTRCNN_PREDICTIONS_ATTR_NMS_THRESHOLD = "fsr_nms_thres"; +const std::string FASTRCNN_PREDICTIONS_ATTR_NUM_CLASSES = "fsr_num_classes"; + +// REORG +const std::string REORG_ATTR_STRIDE = "stride"; +const std::string REORG_ATTR_REVERSE = "reverse"; + +// MERGE +const std::string MERGE_DEAD_INDEX = "merge_dead_index"; +const std::string MERGE_PRENODE_FLAG = "merge_prenode_flag"; +const std::string TO_BE_OUTPUT = "to_be_output"; + +// ENTER +const std::string ENTER_ATTR_FRAME_NAME = "frame_name"; +const std::string ENTER_ATTR_CONSTANT_FLAG = "is_constant"; + +// Concatv2 +const std::string CONCAT_V2_ATTR_TIDX = "Tidx"; +const std::string CONCAT_V2_ATTR_N = "N"; +// SUM +const std::string SUM_ATTR_TIDX = "Tidx"; +const std::string SUM_ATTR_AXIS = "axis"; +const std::string SUM_ATTR_KEEP_DIMS = "keep_dims"; + +// ResizeBilinear +const std::string RESIZE_BILINEAR_ATTR_MODE = "mode"; +const std::string RESIZE_BILINEAR_ATTR_ALIGN_CORNERS = "align_corners"; +const std::string RESIZE_BILINEAR_ATTR_HEIGHT = "height"; +const std::string RESIZE_BILINEAR_ATTR_WIDTH = "width"; +const std::string RESIZE_BILINEAR_ATTR_ZOOM_FACTOR = "zoom_factor"; +const std::string RESIZE_BILINEAR_ATTR_SHRINK_FACTOR = "shrink_factor"; +const std::string RESIZE_BILINEAR_ATTR_PAD_BEGIN = "pad_begin"; +const std::string RESIZE_BILINEAR_ATTR_PAD_END = "pad_end"; +const std::string RESIZE_BILINEAR_ATTR_ALPHA = "alpha"; +const std::string RESIZE_BILINEAR_ATTR_BETA = "beta"; + +// RetinaNet +const std::string RETINANET_FILTER_BACKGROUND_TRUE = "retina_conv_filter_background"; +const std::string RETINANET_ANCHOR_FUSION = "retina_anchor_fusion"; + +// MatMul +const std::string MATMUL_TRANSPOSE_X = "transposeX"; +const std::string MATMUL_TRANSPOSE_W = "transposeW"; +const std::string MATMUL_HAS_BIAS = "has_bias"; +const std::string MATMUL_ATTR_IS_TRAINING = "matmul_is_training"; + +// Flatten +const std::string FLATTEN_START_AXIS = "start_axis"; +const std::string FLATTEN_END_AXIS = "end_axis"; + +// Reshape +const std::string RESHAPE_ATTR_AXIS = "axis"; +const std::string RESHAPE_ATTR_NUM_AXES = "num_axes"; +const std::string RESHAPE_ATTR_FORMAT = "format"; +const std::string RESHAPE_ATTR_SHAPE = "shape"; +const std::string RESHAPE_ATTR_ALPHA = "alpha"; +const std::string RESHAPE_ATTR_BETA = "beta"; + +// Frameoworkop +const std::string T_IN_DATATYPE = "t_in_datatype"; +const std::string T_OUT_DATATYPE = "t_out_datatype"; +const std::string ATTR_NAME_OUT_N = "out_n"; +const std::string ATTR_NAME_OUT_C = "out_c"; +const std::string ATTR_NAME_OUT_H = "out_h"; +const std::string ATTR_NAME_OUT_W = "out_w"; +const std::string ATTR_PAD_DEPTH_CONV = "pad_depth_conv"; +const std::string ATTR_PAD_CONV = "pad_conv"; + +const std::string ATTR_NAME_BEFORE_PAD = "before_pad"; +const std::string ANN_MEAN_KEEPDIMS = "AnnMeanKeepDims"; +const std::string PAD_ATTR_PADDINGDS = "paddings"; +const std::string PAD_ATTR_CONSTANT_VALUE = "padvalue"; + +// ConvGradFilter +const std::string CONV_GRAD_FILTER_OUTPUT_SHAPE = "conv_grad_filter_output_shape"; +// ConvGradInput +const std::string CONV_GRAD_INPUT_OUTPUT_SHAPE = "conv_grad_input_output_shape"; + +// Rnn +const std::string RNN_MODE_ = "rnn_"; +const std::string CNN_RNN = "cnn_rnn"; +const std::string MUTI_RNN = "multi_rnn"; +const std::string CELL_MODE = "mode"; +const std::string LSTM_CELL = "lstm_cell"; +const std::string GRU_CELL = "gru_cell"; +const std::string RNN_HT = "ht"; +const std::string RNN_XT_HT = "xt_ht"; +const std::string RNN_BATCH_SIZE = "batch_size"; + +// Upsample +const std::string UPSAMPLE_ATTR_NAME_SCALE = "scale"; + +// Filler +const std::string FILLER_TYPE = "filler_type"; +const std::string FILLER_VALUE = "filler_value"; + +// Shufflechannel +const std::string SHUFFLE_CHANNEL_GROUP = "group"; + +// TopKV2 +const std::string TOPKV2_ATTR_K = "k"; + +const std::string DEPTH_SPACE_ATTR_BLOCK_SIZE = "block_size"; +const std::string L2_NORMALIZE_ATTR_EPS = "eps"; + +// Calibaration +const std::string STRIDE_H_INDEX = "STRIDE_H_INDEX"; +const std::string STRIDE_W_INDEX = "STRIDE_W_INDEX"; +const std::string PAD_TOP_INDEX = "PAD_TOP_INDEX"; +const std::string PAD_BOTTOM_INDEX = "PAD_BOTTOM_INDEX"; +const std::string PAD_RIGHT_INDEX = "PAD_RIGHT_INDEX"; +const std::string PAD_LEFT_INDEX = "PAD_LEFT_INDEX"; +const std::string QUANTIZE_ALGO_ATTR = "quantize_algo"; +const std::string SCALE_TYPE_ATTR = "scale_type"; + +const std::string QUANTIZE_SCALE_MODE = "quantize_scale_mode"; +const std::string QUANTIZE_SCALE_VALUE = "quantize_scale_value"; +const std::string QUANTIZE_SCALE_OFFSET = "quantize_scale_offset"; +const std::string QUANTIZE_OFFSET_DATA_VALUE = "quantize_offset_data_value"; +const std::string QUANTIZE_OFFSET_DATA_OFFSET = "quantize_offset_data_offset"; +const std::string QUANTIZE_OFFSET_WEIGHT_VALUE = "quantize_offset_weight_value"; +const std::string QUANTIZE_OFFSET_WEIGHT_OFFSET = "quantize_offset_weight_offset"; +const std::string QUANTIZE_OFFSET_PAD_VALUE = "quantize_offset_pad_value"; +const std::string QUANTIZE_OFFSET_PAD_OFFSET = "quantize_offset_pad_offset"; + +const std::string DEQUANTIZE_SCALE_MODE = "dequantize_scale_mode"; +const std::string DEQUANTIZE_SCALE_VALUE = "dequantize_scale_value"; +const std::string DEQUANTIZE_SCALE_OFFSET = "dequantize_scale_offset"; +const std::string DEQUANTIZE_OFFSET_DATA_TYPE = "dequantize_offset_data_value"; +const std::string DEQUANTIZE_OFFSET_DATA_OFFSET = "dequantize_offset_data_offset"; +const std::string DEQUANTIZE_OFFSET_WEIGHT_VALUE = "dequantize_offset_weight_value"; +const std::string DEQUANTIZE_OFFSET_WEIGHT_OFFSET = "dequantize_offset_weight_offset"; +const std::string DEQUANTIZE_OFFSET_PAD_VALUE = "dequantize_offset_pad_value"; +const std::string DEQUANTIZE_OFFSET_PAD_OFFSET = "dequantize_offset_pad_offset"; + +const std::string REQUANTIZE_SCALE_MODE = "requantize_scale_mode"; +const std::string REQUANTIZE_SCALE_VALUE = "requantize_scale_value"; +const std::string REQUANTIZE_SCALE_OFFSET = "requantize_scale_offset"; +const std::string REQUANTIZE_OFFSET_DATA_VALUE = "requantize_offset_data_value"; +const std::string REQUANTIZE_OFFSET_DATA_OFFSET = "requantize_offset_data_offset"; +const std::string REQUANTIZE_OFFSET_WEIGHT_VALUE = "requantize_offset_weight_value"; +const std::string REQUANTIZE_OFFSET_WEIGHT_OFFSET = "requantize_offset_weight_offset"; +const std::string REQUANTIZE_OFFSET_PAD_VALUE = "requantize_offset_pad_value"; +const std::string REQUANTIZE_OFFSET_PAD_OFFSET = "requantize_offset_pad_offset"; + +const std::string ATTR_NAME_IS_CONST = "attr_name_is_const"; + +const std::string ATTR_NAME_GROUP = "group"; +const std::string ATTR_NAME_DILATION_SIZE = "dilation_size"; +const std::string ATTR_NAME_EPSILON = "epsilon"; +const std::string ATTR_NAME_POOLING_MODE = "mode"; +const std::string ATTR_NAME_CLASS_NUM = "class_num"; +// model +const std::string ATTR_MODEL_TARGET_TYPE = "target_type"; + +const std::string ATTR_MODEL_STREAM_NUM = "stream_num"; + +const std::string ATTR_MODEL_EVENT_NUM = "event_num"; + +const std::string ATTR_MODEL_MEMORY_SIZE = "memory_size"; + +const std::string ATTR_MODEL_WEIGHT_SIZE = "weight_size"; + +const std::string ATTR_MODEL_TASK_GEN_BASE_ADDR = "task_gen_base_addr"; + +const std::string ATTR_MODEL_TASK_GEN_WEIGHT_ADDR = "task_gen_weight_addr"; + +const std::string ATTR_MODEL_TASK_GEN_VAR_ADDR = "task_gen_variable_addr"; + +const std::string ATTR_MODEL_VAR_SIZE = "variable_size"; + +const std::string ATTR_MODEL_TASK_INDEX_OP_NAME = "task_index_op_name"; + +// Public attribute +const std::string ATTR_NAME_IMPLY_TYPE = "imply_type"; + +const std::string ATTR_NAME_BYTE_SIZE = "op_byte_size"; + +const std::string ATTR_NAME_FUSION_INFERENCE_ID = "fusion_inference_id"; + +const std::string ATTR_NAME_FUSION_OPDEF = "fusion_opdef"; + +const std::string ATTR_NAME_IO_OP = "io_op"; + +const std::string ATTR_NAME_FUSION_SCOPE = "fusion_scope"; + +const std::string ATTR_NAME_OPATTR = "opattr"; + +const std::string ATTR_NAME_RELUFLAG = "relu_flag"; + +const std::string ATTR_NAME_SEQLEN_INDEX = "seqlen_index"; + +const std::string ATTR_NAME_X_INDEX = "x_index"; + +const std::string ATTR_NAME_CONT_INDEX = "cont_index"; + +const std::string ATTR_NAME_XSTATIC_INDEX = "xstatic_index"; + +const std::string TARGET_TYPE_MINI = "MINI"; + +const std::string TARGET_TYPE_TINY = "TINY"; + +const std::string TARGET_TYPE_LITE = "LITE"; + +const std::string ATTR_NAME_CONTINUOUS_INPUT = "continuous_input"; + +const std::string ATTR_NAME_CONTINUOUS_OUTPUT = "continuous_output"; + +const std::string ATTR_NAME_REFERENCE = "reference"; + +const std::string ATTR_NAME_ATOMIC_INDEX = "atomic_index"; + +// Used for mark the active label list stream of activated node +const std::string ATTR_NAME_ACTIVE_LABEL_LIST = "_active_label_list"; + +// Multi batch +const std::string ATTR_NAME_PRED_VALUE = "_pred_value"; +const std::string ATTR_NAME_BATCH_NUM = "_batch_num"; +const std::string ATTR_NAME_BATCH_LABEL = "_batch_label"; + +// Control flow +const std::string ATTR_NAME_STREAM_SWITCH_COND = "switch_condition"; +const std::string ATTR_NAME_TRUE_BRANCH_STREAM = "true_branch_stream"; +const std::string ATTR_NAME_ACTIVE_STREAM_LIST = "active_stream_list"; +const std::string ATTR_NAME_SWITCHN_PRED_VALUE = "switch_pred_value"; + +const std::string ATTR_NAME_SWITCH_BRANCH_NODE_LABEL = "_switch_branch_node_label"; +const std::string ATTR_NAME_SWITCH_TRUE_BRANCH_FLAG = "_switch_true_branch_flag"; +const std::string ATTR_NAME_SWITCH_DATA_TYPE = "_switch_data_type"; +const std::string ATTR_NAME_ORIG_NODE_NAME = "_original_node_name"; +const std::string ATTR_NAME_CYCLIC_DEPENDENCE_FLAG = "_cyclic_dependence_flag"; + +const std::string ATTR_NAME_NEXT_ITERATION = "_next_iteration_node"; + +// Used for mark the active node is for loop, type:bool +const std::string ATTR_NAME_IS_LOOP_ACTIVE = "is_loop_active"; + +const std::string ATTR_NAME_MEMORY_TYPE_INPUT = "memory_type_input"; + +const std::string ATTR_NAME_MEMORY_TYPE_OUTPUT = "memory_type_output"; + +const std::string ATTR_NAME_MEMORY_TYPE_WORKSPACE = "memory_type_workspace"; + +const std::string MODEL_ATTR_SESSION_ID = "session_id"; + +// Atomic addr clean attrs +const std::string ATOMIC_ATTR_INPUT_INDEX = "atomic_input_index"; +const std::string ATOMIC_ATTR_OUTPUT_INDEX = "atomic_output_index"; +const std::string ATOMIC_ATTR_IS_FUSION_NODE = "is_fusion_node"; +const std::string EXT_ATTR_ATOMIC_WORKSPACE_INFO = "sub_node_workspace_info"; +const std::string EXT_ATTR_ATOMIC_WORKSPACE_OFFSET = "sub_node_workspace_offset"; +const std::string ATOMIC_ATTR_IS_ATOMIC_NODE = "is_atomic_node"; + +// Source/dst format for Op FormatTransfer +const std::string FORMAT_TRANSFER_SRC_FORMAT = "src_format"; +const std::string FORMAT_TRANSFER_DST_FORMAT = "dst_format"; + +// For compile op by ge call +const std::string ATTR_NEED_COMPILE = "_node_need_compile"; + +const std::string ATTR_INSERT_BY_MBATCH = "mbatch-inserted-node"; + +// For inserted op +const std::string ATTR_INSERTED_BY_GE = "_inserted_by_ge"; + +// For data dump +const std::string ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES = "_datadump_original_op_names"; +const std::string ATTR_NAME_DATA_DUMP_IS_MULTIOP = "_datadump_is_multiop"; +const std::string ATTR_NAME_DATA_DUMP_SUB_SPLITER_INDEX = "_datadump_sub_spliter_index"; +const std::string ATTR_NAME_DATA_DUMP_GROUP_OP_NAME = "_datadump_group_op_name"; +const std::string ATTR_NAME_DATA_DUMP_ORIGIN_NAME = "_datadump_origin_name"; +const std::string ATTR_NAME_DATA_DUMP_ORIGIN_OUTPUT_INDEX = "_datadump_origin_output_index"; +const std::string ATTR_NAME_DATA_DUMP_ORIGIN_FORMAT = "_datadump_origin_format"; +const std::string ATTR_NAME_DATA_DUMP_ORIGIN_DATA_TYPE = "_datadump_origin_data_type"; + +// Variable +const std::string REF_VAR_SRC_VAR_NAME = "ref_var_src_var_name"; +const std::string VAR_ATTR_SRC_VAR_NAME = "_src_var_name"; +const std::string REF_VAR_PRE_PEER_OUT_INDEX = "ref_var_pre_peer_out_index"; +const std::string VAR_ATTR_VAR_IS_BROADCAST = "_var_is_broadcast"; +const std::string VAR_ATTR_VAR_IS_RESTORE = "_var_is_restore"; + +// HCOM +const std::string HCOM_ATTR_ROOT_RANK = "root_rank"; +const std::string HCOM_ATTR_RANK_SIZE = "rank_size"; +const std::string HCOM_ATTR_SHAPE = "shape"; +const std::string HCOM_ATTR_DATA_TYPE = "dtype"; + +const std::string HCOM_ATTR_REDUCE_TYPE = "reduction"; + +const std::string ATTR_NAME_INPUT_DATATYPE = "input_datatype"; +const std::string ATTR_NAME_OUTPUT_DATATYPE = "output_datatype"; + +// Dynamic stitch +const std::string DYNAMIC_STITCH_ATTR_NAME_NUM = "DynamicStitchN_"; +} // namespace ge diff --git a/src/common/graph/ge_attr_value.cc b/src/common/graph/ge_attr_value.cc new file mode 100644 index 00000000..fe9cbfec --- /dev/null +++ b/src/common/graph/ge_attr_value.cc @@ -0,0 +1,1255 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/ge_attr_value.h" + +#include "graph/ge_tensor.h" +#include "external/graph/graph.h" +#include "utils/attr_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/model_serialize.h" +#include "proto/ge_ir.pb.h" +#include "detail/model_serialize_imp.h" +#include "graph/debug/ge_attr_define.h" +#include "debug/ge_log.h" +#include "debug/ge_util.h" + +using std::map; +using std::string; +using std::vector; + +namespace ge { +GeAttrValue::NamedAttrs::NamedAttrs() { named_attrs_.InitDefault(); } + +GeAttrValue::NamedAttrs::NamedAttrs(const ProtoMsgOwner &owner, proto::NamedAttrs *proto_msg) + : named_attrs_(owner, proto_msg) {} + +void GeAttrValue::NamedAttrs::SetName(const std::string &name) { + auto proto_msg = named_attrs_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->set_name(name); + } +} + +string GeAttrValue::NamedAttrs::GetName() const { + auto proto_msg = named_attrs_.GetProtoMsg(); + if (proto_msg != nullptr) { + return proto_msg->name(); + } + return string(); +} + +GeAttrValue GeAttrValue::NamedAttrs::GetItem(const string &key) const { + GeAttrValue value; + (void)GetAttr(key, value); + return value; +} + +ProtoAttrMapHelper GeAttrValue::NamedAttrs::MutableAttrMap() { + auto proto_msg = named_attrs_.GetProtoMsg(); + if (proto_msg != nullptr) { + return ProtoAttrMapHelper(named_attrs_.GetProtoOwner(), proto_msg->mutable_attr()); + } + return ProtoAttrMapHelper(named_attrs_.GetProtoOwner(), nullptr); +} + +ConstProtoAttrMapHelper GeAttrValue::NamedAttrs::GetAttrMap() const { + auto proto_msg = named_attrs_.GetProtoMsg(); + if (proto_msg != nullptr) { + return ConstProtoAttrMapHelper(named_attrs_.GetProtoOwner(), &proto_msg->attr()); + } + return ConstProtoAttrMapHelper(named_attrs_.GetProtoOwner(), nullptr); +} + +class GeAttrValueImp { + public: + static map attr_val_one_type_map_; + static map attr_val_list_type_map_; + + static bool SetValue(proto::AttrDef &attr_def, GeAttrValue::INT val); + static bool SetValue(proto::AttrDef &attr_def, GeAttrValue::FLOAT val); + static bool SetValue(proto::AttrDef &attr_def, GeAttrValue::BOOL val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::STR &val); + static bool SetValue(proto::AttrDef &attr_def, const ConstGeTensorPtr &val); + static bool SetValue(proto::AttrDef &attr_def, const GeTensor &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::TENSOR_DESC &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::BYTES &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::NAMED_ATTRS &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::GRAPH &val); + static bool SetValue(proto::AttrDef &attr_def, const vector &val); + static bool SetValue(proto::AttrDef &attr_def, const vector &val); + static bool SetValue(proto::AttrDef &attr_def, const vector &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::LIST_FLOAT &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::LIST_BOOL &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::LIST_STR &val); + static bool SetValue(proto::AttrDef &proto_attr_val, const vector &value); + static bool SetValue(proto::AttrDef &proto_attr_val, const vector &value); + static bool SetValue(proto::AttrDef &attr_def, const vector &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::LIST_TENSOR_DESC &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::LIST_BYTES &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::LIST_NAMED_ATTRS &val); + static bool SetValue(proto::AttrDef &attr_def, const GeAttrValue::LIST_GRAPH &val); + + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, GeAttrValue::INT &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, GeAttrValue::FLOAT &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, GeAttrValue::BOOL &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, GeAttrValue::STR &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, GeAttrValue::TENSOR &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, GeTensor &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::TENSOR_DESC &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, GeAttrValue::BYTES &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::NAMED_ATTRS &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, GeAttrValue::GRAPH &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::LIST_INT &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::LIST_FLOAT &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::LIST_BOOL &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::LIST_STR &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::LIST_TENSOR &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, vector &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::LIST_TENSOR_DESC &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::LIST_BYTES &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::LIST_NAMED_ATTRS &val); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + GeAttrValue::LIST_GRAPH &val); + // Value will be moved + static bool SetZeroCopyBytes(proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, Buffer &&buffer); + static bool GetZeroCopyBytes(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, Buffer &buffer); + // Value will be moved + static bool SetZeroCopyListBytes(proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + vector &list_buffer); + static bool GetZeroCopyListBytes(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + vector &list_buffer); + + static bool SetValue(proto::AttrDef &attr_def, const vector> &value); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + vector> &value); + static bool SetValue(proto::AttrDef &attr_def, const vector &value); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, + vector &value); + + static bool SetValue(proto::AttrDef &attr_def, const ge::DataType &value); + static bool GetValue(const proto::AttrDef &attr_def, const ProtoMsgOwner &proto_msg_owner, ge::DataType &value); +}; + +map GeAttrValueImp::attr_val_one_type_map_ = { + {proto::AttrDef::kI, GeAttrValue::VT_INT}, + {proto::AttrDef::kF, GeAttrValue::VT_FLOAT}, + {proto::AttrDef::kB, GeAttrValue::VT_BOOL}, + {proto::AttrDef::kS, GeAttrValue::VT_STRING}, + {proto::AttrDef::kT, GeAttrValue::VT_TENSOR}, + {proto::AttrDef::kTd, GeAttrValue::VT_TENSOR_DESC}, + {proto::AttrDef::kG, GeAttrValue::VT_GRAPH}, + {proto::AttrDef::kBt, GeAttrValue::VT_BYTES}, + {proto::AttrDef::kFunc, GeAttrValue::VT_NAMED_ATTRS}, + {proto::AttrDef::kListListInt, GeAttrValue::VT_LIST_LIST_INT}, + {proto::AttrDef::kDt, GeAttrValue::VT_DATA_TYPE}, +}; +map GeAttrValueImp::attr_val_list_type_map_ = { + {proto::AttrDef_ListValue_ListValueType_VT_LIST_INT, GeAttrValue::VT_LIST_INT}, + {proto::AttrDef_ListValue_ListValueType_VT_LIST_FLOAT, GeAttrValue::VT_LIST_FLOAT}, + {proto::AttrDef_ListValue_ListValueType_VT_LIST_BOOL, GeAttrValue::VT_LIST_BOOL}, + {proto::AttrDef_ListValue_ListValueType_VT_LIST_STRING, GeAttrValue::VT_LIST_STRING}, + {proto::AttrDef_ListValue_ListValueType_VT_LIST_TENSOR, GeAttrValue::VT_LIST_TENSOR}, + {proto::AttrDef_ListValue_ListValueType_VT_LIST_TENSOR_DESC, GeAttrValue::VT_LIST_TENSOR_DESC}, + {proto::AttrDef_ListValue_ListValueType_VT_LIST_GRAPH, GeAttrValue::VT_LIST_GRAPH}, + {proto::AttrDef_ListValue_ListValueType_VT_LIST_BYTES, GeAttrValue::VT_LIST_BYTES}, + {proto::AttrDef_ListValue_ListValueType_VT_LIST_NAMED_ATTRS, GeAttrValue::VT_LIST_NAMED_ATTRS}, + {proto::AttrDef_ListValue_ListValueType_VT_LIST_DATA_TYPE, GeAttrValue::VT_LIST_DATA_TYPE}, +}; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeAttrValue::GeAttrValue() { value_.InitDefault(); } + +GeAttrValue::GeAttrValue(const ProtoMsgOwner &proto_owner, ge::proto::AttrDef *val) : value_(proto_owner, val) {} + +GeAttrValue::ValueType GeAttrValue::GetValueType() const { + auto proto_msg = value_.GetProtoMsg(); + if (proto_msg != nullptr) { + auto val_case = proto_msg->value_case(); + if (val_case != proto::AttrDef::kList) { + auto it = GeAttrValueImp::attr_val_one_type_map_.find(val_case); + if (it != GeAttrValueImp::attr_val_one_type_map_.end()) { + return it->second; + } + } else { + auto it = GeAttrValueImp::attr_val_list_type_map_.find(proto_msg->list().val_type()); + if (it != GeAttrValueImp::attr_val_list_type_map_.end()) { + return it->second; + } + } + } + return GeAttrValue::VT_NONE; +} + +bool GeAttrValue::IsEmpty() const { return GetValueType() == VT_NONE; } + +GeAttrValue GeAttrValue::Copy() const { + GeAttrValue valueRet; + auto proto_msg = value_.GetProtoMsg(); + auto proto_msg_ret = valueRet.value_.GetProtoMsg(); + if (proto_msg != nullptr && proto_msg_ret != nullptr) { + *proto_msg_ret = *proto_msg; + } + return valueRet; +} + +#define ATTR_VALUE_SET_GET_IMP(type) \ + graphStatus GeAttrValue::SetValue(const type &val) { \ + auto proto_msg = value_.GetProtoMsg(); \ + if (proto_msg) { \ + if (GeAttrValueImp::SetValue(*proto_msg, val)) { \ + return GRAPH_SUCCESS; \ + } \ + } \ + return GRAPH_FAILED; \ + } \ + \ + graphStatus GeAttrValue::GetValue(type &val) const { \ + auto proto_msg = value_.GetProtoMsg(); \ + if (proto_msg) { \ + if (GeAttrValueImp::GetValue(*proto_msg, value_.GetProtoOwner(), val)) { \ + return GRAPH_SUCCESS; \ + } \ + } \ + return GRAPH_FAILED; \ + } + +ATTR_VALUE_SET_GET_IMP(GeAttrValue::STR) +ATTR_VALUE_SET_GET_IMP(vector) +ATTR_VALUE_SET_GET_IMP(GeAttrValue::INT) +ATTR_VALUE_SET_GET_IMP(vector) +ATTR_VALUE_SET_GET_IMP(GeAttrValue::FLOAT) +ATTR_VALUE_SET_GET_IMP(vector) +ATTR_VALUE_SET_GET_IMP(GeAttrValue::BOOL) +ATTR_VALUE_SET_GET_IMP(vector) +ATTR_VALUE_SET_GET_IMP(GeAttrValue::TENSOR_DESC) +ATTR_VALUE_SET_GET_IMP(vector) +ATTR_VALUE_SET_GET_IMP(GeAttrValue::TENSOR) +ATTR_VALUE_SET_GET_IMP(vector) +ATTR_VALUE_SET_GET_IMP(GeAttrValue::GRAPH) +ATTR_VALUE_SET_GET_IMP(vector) +ATTR_VALUE_SET_GET_IMP(GeAttrValue::BYTES) +ATTR_VALUE_SET_GET_IMP(vector) +ATTR_VALUE_SET_GET_IMP(GeAttrValue::NAMED_ATTRS) +ATTR_VALUE_SET_GET_IMP(vector) +ATTR_VALUE_SET_GET_IMP(vector>) +ATTR_VALUE_SET_GET_IMP(vector) +ATTR_VALUE_SET_GET_IMP(GeAttrValue::DATA_TYPE) + +#undef ATTR_VALUE_SET_GET_IMP + +graphStatus GeAttrValue::MutableTensor(GeTensorPtr &tensor) { return GetValue(tensor); } + +graphStatus GeAttrValue::MutableListTensor(vector &list_tensor) { return GetValue(list_tensor); } + +class AttrUtilsHelper { + public: + inline static bool GetValueCheckType(const proto::AttrDef &attr_def, proto::AttrDef::ValueCase proto_case) { + if (attr_def.value_case() != proto_case) { + GELOGW("Check Type Failed, proto case type %u, expected %u", attr_def.value_case(), proto_case); + return false; + } + return true; + } + + inline static bool GetValueCheckListType( + const proto::AttrDef &attr_def, proto::AttrDef_ListValue_ListValueType proto_list_case, + const std::function item_check_fun) { + if (attr_def.value_case() != proto::AttrDef::kList) { + GELOGW("Check ListType Failed, value_case %u", attr_def.value_case()); + return false; + } + auto &list = attr_def.list(); + if (list.val_type() == proto::AttrDef_ListValue_ListValueType_VT_LIST_NONE) { + return item_check_fun(attr_def); + } + if (list.val_type() != proto_list_case) { + GELOGW("Check ListType Failed, val_type %u, expected %u", list.val_type(), proto_list_case); + return false; + } + return true; + } + + inline static bool SetValueCheckType(proto::AttrDef &attr_def, proto::AttrDef::ValueCase proto_case) { + if (attr_def.value_case() != proto::AttrDef::VALUE_NOT_SET && attr_def.value_case() != proto_case) { + GELOGW("Check Type Failed, proto case type %u, expected %u", attr_def.value_case(), proto_case); + return false; + } + return true; + } + + inline static bool SetValueCheckAndSetListType(proto::AttrDef &attr_def, + proto::AttrDef_ListValue_ListValueType proto_list_case) { + if (attr_def.value_case() != proto::AttrDef::VALUE_NOT_SET && attr_def.value_case() != proto::AttrDef::kList) { + GELOGW("AttrUtils::Check Type Failed, value_case %u", attr_def.value_case()); + return false; + } + auto list = attr_def.mutable_list(); + if (list == nullptr) { + GELOGE(GRAPH_FAILED, "list is nullptr"); + return false; + } + if (list->val_type() != proto::AttrDef_ListValue_ListValueType_VT_LIST_NONE && + list->val_type() != proto_list_case) { + GELOGW("AttrUtils::Check ListType Type Failed, val_type %d, expected %d", static_cast(list->val_type()), + static_cast(proto_list_case)); + return false; + } + list->set_val_type(proto_list_case); + return true; + } + + static bool GetAttrMapItem(const AttrHolder *obj, const string &name, const proto::AttrDef *&attr_def) { + if (obj == nullptr) { + GELOGE(FAILED, "%s obj is nullptr", name.c_str()); + return false; + } + auto attr_map = obj->GetAttrMap().GetProtoMsg(); + if (attr_map == nullptr) { + GELOGE(FAILED, "%s attr map is nullptr", name.c_str()); + return false; + } + auto it = attr_map->find(name); + if (it == attr_map->end()) { + return false; + } + attr_def = &it->second; + return true; + } + + inline static bool MutableAttrMapItem(AttrHolder *obj, const string &name, proto::AttrDef *&attr_def) { + if (obj == nullptr) { + GELOGE(FAILED, " %s obj is nullptr", name.c_str()); + return false; + } + auto attr_map = obj->MutableAttrMap().GetProtoMsg(); + if (attr_map == nullptr) { + GELOGE(FAILED, "%s attr map is nullptr", name.c_str()); + return false; + } + // Get or add + attr_def = &((*attr_map)[name]); + return true; + } +}; + +#define ATTR_VALUE_IMP_SET_ONE(ValType, proto_case, protoItem) \ + bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, ValType value) { \ + if (!AttrUtilsHelper::SetValueCheckType(proto_attr_val, proto::AttrDef::proto_case)) { \ + return false; \ + } \ + proto_attr_val.set_##protoItem(value); \ + return true; \ + } + +#define ATTR_VALUE_IMP_SET_LIST(ValType, proto_list_case, protoItem) \ + bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, ValType value) { \ + if (!AttrUtilsHelper::SetValueCheckAndSetListType(proto_attr_val, \ + proto::AttrDef_ListValue_ListValueType_##proto_list_case)) { \ + return false; \ + } \ + auto list = proto_attr_val.mutable_list(); \ + list->clear_##protoItem(); \ + for (const auto &item : value) { \ + list->add_##protoItem(item); \ + } \ + return true; \ + } + +ATTR_VALUE_IMP_SET_ONE(int64_t, kI, i) +ATTR_VALUE_IMP_SET_ONE(float, kF, f) +ATTR_VALUE_IMP_SET_ONE(const string &, kS, s) +ATTR_VALUE_IMP_SET_ONE(bool, kB, b) + +ATTR_VALUE_IMP_SET_LIST(const vector &, VT_LIST_INT, i) +ATTR_VALUE_IMP_SET_LIST(const vector &, VT_LIST_INT, i) +ATTR_VALUE_IMP_SET_LIST(const vector &, VT_LIST_INT, i) +ATTR_VALUE_IMP_SET_LIST(const vector &, VT_LIST_FLOAT, f) +ATTR_VALUE_IMP_SET_LIST(const vector &, VT_LIST_STRING, s) +ATTR_VALUE_IMP_SET_LIST(const vector &, VT_LIST_BOOL, b) + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const GeTensorDesc &value) { + if (!AttrUtilsHelper::SetValueCheckType(proto_attr_val, proto::AttrDef::kTd)) { + return false; + } + auto proto_msg = value.tensor_descriptor_.GetProtoMsg(); + if (proto_msg == nullptr) { + return false; + } + *proto_attr_val.mutable_td() = *proto_msg; + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const vector &value) { + if (!AttrUtilsHelper::SetValueCheckAndSetListType(proto_attr_val, + proto::AttrDef_ListValue_ListValueType_VT_LIST_TENSOR_DESC)) { + return false; + } + auto list = proto_attr_val.mutable_list(); + GE_CHECK_NOTNULL_EXEC(list, return false); + list->clear_td(); + for (const auto &item : value) { + auto proto_msg = item.tensor_descriptor_.GetProtoMsg(); + if (proto_msg == nullptr) { + proto_attr_val.clear_list(); + return false; + } + *list->add_td() = *proto_msg; + } + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const ConstGeTensorPtr &value) { + if (value) { + return SetValue(proto_attr_val, *value); + } else { + return SetValue(proto_attr_val, GeTensor()); + } +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const GeTensor &val) { + if (!AttrUtilsHelper::SetValueCheckType(proto_attr_val, proto::AttrDef::kT)) { + return false; + } + auto proto_msg = val.tensor_def_.GetProtoMsg(); + if (proto_msg == nullptr) { + GELOGE(FAILED, "Proto msg is nullptr"); + return false; + } + *proto_attr_val.mutable_t() = *proto_msg; + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const vector &value) { + vector constList(value.size()); + std::copy(value.begin(), value.end(), constList.begin()); + return SetValue(proto_attr_val, constList); +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const vector &value) { + if (!AttrUtilsHelper::SetValueCheckAndSetListType(proto_attr_val, + proto::AttrDef_ListValue_ListValueType_VT_LIST_TENSOR)) { + return false; + } + auto list = proto_attr_val.mutable_list(); + GE_CHECK_NOTNULL_EXEC(list, return false); + list->clear_t(); + for (const auto &item : value) { + if (item == nullptr) { + GELOGE(GRAPH_FAILED, "AttrUtils::SetListTensor item is nullptr"); + proto_attr_val.clear_list(); + return false; + } + auto proto_msg = item->tensor_def_.GetProtoMsg(); + if (proto_msg == nullptr) { + GELOGE(FAILED, "Proto msg is nullptr"); + proto_attr_val.clear_list(); + return false; + } + *list->add_t() = *proto_msg; + } + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const vector &value) { + if (!AttrUtilsHelper::SetValueCheckAndSetListType(proto_attr_val, + proto::AttrDef_ListValue_ListValueType_VT_LIST_TENSOR)) { + return false; + } + auto list = proto_attr_val.mutable_list(); + GE_CHECK_NOTNULL_EXEC(list, return false); + list->clear_t(); + for (const auto &item : value) { + auto proto_msg = item.tensor_def_.GetProtoMsg(); + if (proto_msg == nullptr) { + GELOGE(FAILED, "Proto msg is nullptr"); + proto_attr_val.clear_list(); + return false; + } + *list->add_t() = *proto_msg; + } + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const GeAttrValue::BYTES &value) { + if (!AttrUtilsHelper::SetValueCheckType(proto_attr_val, proto::AttrDef::kBt)) { + return false; + } + size_t val_size = value.GetSize(); + proto_attr_val.set_bt(value.GetData(), val_size); + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const vector &value) { + if (!AttrUtilsHelper::SetValueCheckAndSetListType(proto_attr_val, + proto::AttrDef_ListValue_ListValueType_VT_LIST_BYTES)) { + return false; + } + auto list = proto_attr_val.mutable_list(); + GE_CHECK_NOTNULL_EXEC(list, return false); + list->clear_bt(); + for (const auto &item : value) { + list->add_bt(item.GetData(), item.GetSize()); + } + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const GeAttrValue::NamedAttrs &value) { + if (!AttrUtilsHelper::SetValueCheckType(proto_attr_val, proto::AttrDef::kFunc)) { + return false; + } + auto proto_msg = value.named_attrs_.GetProtoMsg(); + if (proto_msg == nullptr) { + GELOGE(FAILED, "Proto msg is nullptr"); + return false; + } + *proto_attr_val.mutable_func() = *proto_msg; + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const vector &value) { + if (!AttrUtilsHelper::SetValueCheckAndSetListType(proto_attr_val, + proto::AttrDef_ListValue_ListValueType_VT_LIST_NAMED_ATTRS)) { + return false; + } + auto list = proto_attr_val.mutable_list(); + GE_CHECK_NOTNULL_EXEC(list, return false); + list->clear_na(); + for (const auto &item : value) { + auto proto_msg = item.named_attrs_.GetProtoMsg(); + if (proto_msg == nullptr) { + proto_attr_val.clear_list(); + return false; + } + *list->add_na() = *proto_msg; + } + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const ge::ComputeGraphPtr &value) { + if (!AttrUtilsHelper::SetValueCheckType(proto_attr_val, proto::AttrDef::kG)) { + return false; + } + ModelSerializeImp imp; + if (!imp.SerializeGraph(value, proto_attr_val.mutable_g())) { + GELOGE(GRAPH_FAILED, "AttrUtils::SetGraph SerializeGraph Failed"); + proto_attr_val.clear_g(); + return false; + } + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const vector &value) { + if (!AttrUtilsHelper::SetValueCheckAndSetListType(proto_attr_val, + proto::AttrDef_ListValue_ListValueType_VT_LIST_GRAPH)) { + return false; + } + auto list = proto_attr_val.mutable_list(); + GE_CHECK_NOTNULL_EXEC(list, return false); + list->clear_g(); + + ModelSerializeImp imp; + for (const auto &item : value) { + if (!imp.SerializeGraph(item, list->add_g())) { + GELOGE(GRAPH_FAILED, "AttrUtils::SetListGraph SerializeGraph"); + proto_attr_val.clear_list(); + return false; + } + } + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const vector> &value) { + if (!AttrUtilsHelper::SetValueCheckType(proto_attr_val, proto::AttrDef::kListListInt)) { + return false; + } + proto_attr_val.clear_list_list_int(); + auto list_list_int = proto_attr_val.mutable_list_list_int(); + GE_CHECK_NOTNULL_EXEC(list_list_int, return false); + for (auto &list_int : value) { + auto list_item = list_list_int->add_list_list_i(); + GE_CHECK_NOTNULL_EXEC(list_item, return false); + for (auto &int_item : list_int) { + list_item->add_list_i(int_item); + } + } + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const vector &value) { + if (!AttrUtilsHelper::SetValueCheckAndSetListType(proto_attr_val, + proto::AttrDef_ListValue_ListValueType_VT_LIST_DATA_TYPE)) { + return false; + } + auto list = proto_attr_val.mutable_list(); + GE_CHECK_NOTNULL_EXEC(list, return false); + list->clear_dt(); + for (const auto &item : value) { + list->add_dt(static_cast(item)); + } + return true; +} + +bool GeAttrValueImp::SetValue(proto::AttrDef &proto_attr_val, const ge::DataType &value) { + if (!AttrUtilsHelper::SetValueCheckType(proto_attr_val, proto::AttrDef::kDt)) { + return false; + } + proto_attr_val.set_dt(static_cast(value)); + + return true; +} + +#define ATTR_VALUE_IMP_GET_ONE(ValType, proto_case, protoItem) \ + bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, ValType value) { \ + if (!AttrUtilsHelper::GetValueCheckType(proto_attr_val, proto::AttrDef::proto_case)) { \ + return false; \ + } \ + value = proto_attr_val.protoItem(); \ + return true; \ + } + +#define ListValueItemCheck(protoItem) \ + [](const proto::AttrDef &proto_attr_val) { return proto_attr_val.list().protoItem##_size() > 0; } + +#define ATTR_VALUE_IMP_GET_LIST(ValType, proto_list_case, protoItem) \ + bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, vector &value) { \ + value.clear(); \ + if (!AttrUtilsHelper::GetValueCheckListType(proto_attr_val, \ + proto::AttrDef_ListValue_ListValueType_##proto_list_case, \ + ListValueItemCheck(protoItem))) { \ + return false; \ + } \ + auto &list = proto_attr_val.list(); \ + for (const auto &item : list.protoItem()) { \ + value.push_back(item); \ + } \ + return true; \ + } + +ATTR_VALUE_IMP_GET_ONE(int64_t &, kI, i) +ATTR_VALUE_IMP_GET_ONE(float &, kF, f) +ATTR_VALUE_IMP_GET_ONE(string &, kS, s) +ATTR_VALUE_IMP_GET_ONE(bool &, kB, b) + +ATTR_VALUE_IMP_GET_LIST(int64_t, VT_LIST_INT, i) +ATTR_VALUE_IMP_GET_LIST(float, VT_LIST_FLOAT, f) +ATTR_VALUE_IMP_GET_LIST(string, VT_LIST_STRING, s) +ATTR_VALUE_IMP_GET_LIST(bool, VT_LIST_BOOL, b) + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, GeTensorDesc &value) { + if (!AttrUtilsHelper::GetValueCheckType(proto_attr_val, proto::AttrDef::kTd)) { + return false; + } + auto proto_msg = value.tensor_descriptor_.GetProtoMsg(); + if (proto_msg == nullptr) { + return false; + } + *proto_msg = proto_attr_val.td(); + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, + vector &value) { + if (!AttrUtilsHelper::GetValueCheckListType( + proto_attr_val, proto::AttrDef_ListValue_ListValueType_VT_LIST_TENSOR_DESC, ListValueItemCheck(td))) { + return false; + } + auto &list = proto_attr_val.list(); + for (const auto &item : list.td()) { + value.emplace_back(GeTensorDesc()); + auto proto_msg = value.back().tensor_descriptor_.GetProtoMsg(); + if (proto_msg == nullptr) { + return false; + } + *proto_msg = item; + } + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &proto_owner, + GeTensorPtr &value) { + if (!AttrUtilsHelper::GetValueCheckType(proto_attr_val, proto::AttrDef::kT)) { + return false; + } + value = std::shared_ptr( + new (std::nothrow) GeTensor(proto_owner, const_cast(proto_attr_val).mutable_t())); + GE_CHK_BOOL_RET_STATUS(value != nullptr, false, "value is nullptr"); + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &proto_owner, + vector &value) { + value.clear(); + if (!AttrUtilsHelper::GetValueCheckListType(proto_attr_val, proto::AttrDef_ListValue_ListValueType_VT_LIST_TENSOR, + ListValueItemCheck(t))) { + return false; + } + auto list = const_cast(proto_attr_val).mutable_list(); + GE_CHECK_NOTNULL_EXEC(list, return false); + for (auto &item : *(list->mutable_t())) { + std::shared_ptr temp_value = std::shared_ptr(new (std::nothrow) GeTensor(proto_owner, &item)); + GE_CHK_BOOL_RET_STATUS(temp_value != nullptr, false, "temp_value is nullptr"); + value.push_back(temp_value); + } + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, GeAttrValue::BYTES &value) { + if (!AttrUtilsHelper::GetValueCheckType(proto_attr_val, proto::AttrDef::kBt)) { + return false; + } + auto &proto_val = proto_attr_val.bt(); + GE_LOGI_IF(proto_val.size() == 0, "size res is 0."); + value = Buffer::CopyFrom(reinterpret_cast(proto_val.data()), proto_val.size()); + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, + vector &value) { + value.clear(); + if (!AttrUtilsHelper::GetValueCheckListType(proto_attr_val, proto::AttrDef_ListValue_ListValueType_VT_LIST_BYTES, + ListValueItemCheck(bt))) { + return false; + } + auto &list = proto_attr_val.list(); + for (const auto &item : list.bt()) { + value.push_back(Buffer::CopyFrom((const uint8_t *)item.data(), item.size())); + } + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, + GeAttrValue::NamedAttrs &value) { + if (!AttrUtilsHelper::GetValueCheckType(proto_attr_val, proto::AttrDef::kFunc)) { + return false; + } + auto proto_msg = value.named_attrs_.GetProtoMsg(); + if (proto_msg == nullptr) { + return false; + } + *proto_msg = proto_attr_val.func(); + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, + vector &value) { + value.clear(); + if (!AttrUtilsHelper::GetValueCheckListType( + proto_attr_val, proto::AttrDef_ListValue_ListValueType_VT_LIST_NAMED_ATTRS, ListValueItemCheck(na))) { + return false; + } + auto &list = proto_attr_val.list(); + for (const auto &item : list.na()) { + value.emplace_back(GeAttrValue::NamedAttrs()); + if (value.empty()) { + return false; + } + auto proto_msg = value.back().named_attrs_.GetProtoMsg(); + if (proto_msg == nullptr) { + return false; + } + *proto_msg = item; + } + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, ComputeGraphPtr &value) { + if (!AttrUtilsHelper::GetValueCheckType(proto_attr_val, proto::AttrDef::kG)) { + return false; + } + ComputeGraphPtr graph = nullptr; + std::shared_ptr graph_def; + graph_def = ComGraphMakeShared(proto_attr_val.g()); + if (graph_def == nullptr) { + GELOGE(GRAPH_FAILED, "proto::GraphDef make shared failed"); + graph_def = nullptr; + return false; + } else { + ModelSerializeImp imp; + imp.SetProtobufOwner(graph_def); + if (!imp.UnserializeGraph(graph, *graph_def)) { + GELOGE(GRAPH_FAILED, "UnserializeGraph Failed"); + return false; + } + value = graph; + } + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, + vector &value) { + value.clear(); + if (!AttrUtilsHelper::GetValueCheckListType(proto_attr_val, proto::AttrDef_ListValue_ListValueType_VT_LIST_GRAPH, + ListValueItemCheck(g))) { + return false; + } + auto &list = proto_attr_val.list(); + for (const auto &item : list.g()) { + std::shared_ptr graph_def; + graph_def = ComGraphMakeShared(item); + if (graph_def == nullptr) { + GELOGE(GRAPH_FAILED, "proto::GraphDef make shared failed"); + graph_def = nullptr; + return false; + } else { + ComputeGraphPtr graph = nullptr; + ModelSerializeImp imp; + imp.SetProtobufOwner(graph_def); + if (!imp.UnserializeGraph(graph, *graph_def)) { + GELOGE(GRAPH_FAILED, "UnserializeGraph Failed"); + return false; + } + value.push_back(graph); + } + } + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, + vector> &value) { + value.clear(); + if (!AttrUtilsHelper::GetValueCheckType(proto_attr_val, proto::AttrDef::kListListInt)) { + return false; + } + + auto &list_listint = proto_attr_val.list_list_int().list_list_i(); + for (auto &list_int : list_listint) { + vector list_item(list_int.list_i().size()); + if (!list_int.list_i().empty()) { + (void)std::copy(list_int.list_i().begin(), list_int.list_i().end(), list_item.begin()); + } + value.push_back(list_item); + } + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, + vector &value) { + if (!AttrUtilsHelper::GetValueCheckListType(proto_attr_val, proto::AttrDef_ListValue_ListValueType_VT_LIST_DATA_TYPE, + ListValueItemCheck(dt))) { + return false; + } + auto &list = proto_attr_val.list(); + for (const auto &item : list.dt()) { + value.emplace_back(static_cast(item)); + } + return true; +} + +bool GeAttrValueImp::GetValue(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, ge::DataType &value) { + if (!AttrUtilsHelper::GetValueCheckType(proto_attr_val, proto::AttrDef::kDt)) { + return false; + } + value = static_cast(proto_attr_val.dt()); + return true; +} + +GE_FUNC_HOST_VISIBILITY bool GeAttrValueImp::SetZeroCopyBytes(proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, + Buffer &&buffer) { + if (!AttrUtilsHelper::SetValueCheckType(proto_attr_val, proto::AttrDef::kBt)) { + return false; + } + auto proto_msg = buffer.data_.GetProtoMsg(); + if (proto_msg == nullptr) { + return false; + } + proto_attr_val.set_bt(std::move(*proto_msg->mutable_bt())); + return true; +} + +bool GeAttrValueImp::GetZeroCopyBytes(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &proto_owner, + Buffer &buffer) { + if (!AttrUtilsHelper::GetValueCheckType(proto_attr_val, proto::AttrDef::kBt)) { + return false; + } + buffer = Buffer(proto_owner, &const_cast(proto_attr_val)); + return true; +} + +bool GeAttrValueImp::SetZeroCopyListBytes(proto::AttrDef &proto_attr_val, const ProtoMsgOwner &, + vector &list_buffer) { + if (!AttrUtilsHelper::SetValueCheckAndSetListType(proto_attr_val, + proto::AttrDef_ListValue_ListValueType_VT_LIST_BYTES)) { + return false; + } + auto list = proto_attr_val.mutable_list(); + GE_CHECK_NOTNULL_EXEC(list, return false); + list->clear_bt(); + for (auto &item : list_buffer) { + auto proto_msg = item.data_.GetProtoMsg(); + if (proto_msg == nullptr) { + return false; + } + list->add_bt(std::move(*proto_msg->mutable_bt())); + } + return true; +} + +bool GeAttrValueImp::GetZeroCopyListBytes(const proto::AttrDef &proto_attr_val, const ProtoMsgOwner &proto_owner, + vector &list_buffer) { + list_buffer.clear(); + if (!AttrUtilsHelper::GetValueCheckListType(proto_attr_val, proto::AttrDef_ListValue_ListValueType_VT_LIST_BYTES, + ListValueItemCheck(bt))) { + return false; + } + auto list = const_cast(proto_attr_val).mutable_list(); + GE_CHECK_NOTNULL_EXEC(list, return false); + for (auto &item : *(list->mutable_bt())) { + list_buffer.emplace_back(Buffer(proto_owner, &item)); + } + return true; +} + +bool AttrUtils::HasAttr(ConstAttrHolderAdapter &&obj, const string &name) { + if (!obj) { + return false; + } + return obj->HasAttr(name); +} + +#define ATTR_UTILS_SET_IMP(FuncName, Type) \ + GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::Set##FuncName( \ + AttrHolderAdapter &&obj, const string &name, const Type &value) { \ + proto::AttrDef *proto_attr_val = nullptr; \ + if (!AttrUtilsHelper::MutableAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { \ + return false; \ + } \ + if (!GeAttrValueImp::SetValue(*proto_attr_val, value)) { \ + GELOGW("Set" #FuncName " failed key %s", name.c_str()); \ + return false; \ + } \ + return true; \ + } + +#define ATTR_UTILS_GET_IMP(FuncName, Type) \ + GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::Get##FuncName(ConstAttrHolderAdapter &&obj, \ + const string &name, Type &value) { \ + const proto::AttrDef *proto_attr_val = nullptr; \ + if (!AttrUtilsHelper::GetAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { \ + return false; \ + } \ + if (!GeAttrValueImp::GetValue(*proto_attr_val, obj->GetAttrMap().GetProtoOwner(), value)) { \ + GELOGW("Get" #FuncName " failed key %s", name.c_str()); \ + return false; \ + } \ + return true; \ + } + +#define ATTR_UTILS_SET_GET_IMP(FuncName, Type) \ + ATTR_UTILS_SET_IMP(FuncName, Type) \ + ATTR_UTILS_GET_IMP(FuncName, Type) + +ATTR_UTILS_SET_GET_IMP(Int, int64_t) +ATTR_UTILS_SET_GET_IMP(Float, float) +ATTR_UTILS_SET_GET_IMP(Bool, bool) +ATTR_UTILS_SET_GET_IMP(Str, string) +ATTR_UTILS_SET_GET_IMP(TensorDesc, GeTensorDesc) +ATTR_UTILS_SET_IMP(Tensor, GeTensorPtr) +ATTR_UTILS_SET_IMP(Tensor, ConstGeTensorPtr) +ATTR_UTILS_SET_IMP(Tensor, GeTensor) +ATTR_UTILS_SET_GET_IMP(NamedAttrs, GeAttrValue::NamedAttrs) +ATTR_UTILS_SET_GET_IMP(Bytes, Buffer) +ATTR_UTILS_SET_GET_IMP(Graph, ComputeGraphPtr) +ATTR_UTILS_SET_GET_IMP(ListListInt, vector>) + +ATTR_UTILS_SET_GET_IMP(ListInt, vector) +ATTR_UTILS_SET_IMP(ListInt, vector) +ATTR_UTILS_SET_IMP(ListInt, vector) +ATTR_UTILS_SET_GET_IMP(ListFloat, vector) +ATTR_UTILS_SET_GET_IMP(ListBool, vector) +ATTR_UTILS_SET_GET_IMP(ListStr, vector) +ATTR_UTILS_SET_GET_IMP(ListTensorDesc, vector) +ATTR_UTILS_SET_IMP(ListTensor, vector) +ATTR_UTILS_SET_IMP(ListTensor, vector) +ATTR_UTILS_SET_IMP(ListTensor, vector) +ATTR_UTILS_SET_GET_IMP(ListNamedAttrs, vector) +ATTR_UTILS_SET_GET_IMP(ListBytes, vector) +ATTR_UTILS_SET_GET_IMP(ListGraph, vector) +ATTR_UTILS_SET_GET_IMP(ListDataType, vector) +ATTR_UTILS_SET_GET_IMP(DataType, ge::DataType) + +bool AttrUtils::SetListTensor(AttrHolderAdapter &&obj, const string &name, + std::initializer_list &&value) { + return SetListTensor(std::move(obj), name, vector(value)); +} + +bool AttrUtils::GetTensor(ConstAttrHolderAdapter &&obj, const string &name, ConstGeTensorPtr &value) { + const proto::AttrDef *proto_attr_val = nullptr; + if (!AttrUtilsHelper::GetAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { + return false; + } + GeTensorPtr tensor; + if (!GeAttrValueImp::GetValue(*proto_attr_val, obj->GetAttrMap().GetProtoOwner(), tensor)) { + return false; + } + value = tensor; + return true; +} + +bool AttrUtils::GetListTensor(ConstAttrHolderAdapter &&obj, const string &name, vector &value) { + value.clear(); + const proto::AttrDef *proto_attr_val = nullptr; + if (!AttrUtilsHelper::GetAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { + return false; + } + vector tensor; + if (!GeAttrValueImp::GetValue(*proto_attr_val, obj->GetAttrMap().GetProtoOwner(), tensor)) { + return false; + } + value.insert(value.begin(), tensor.begin(), tensor.end()); + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::MutableTensor(AttrHolderAdapter &&obj, + const string &name, GeTensorPtr &value) { + const proto::AttrDef *proto_attr_val = nullptr; + if (!AttrUtilsHelper::GetAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { + return false; + } + return GeAttrValueImp::GetValue(*proto_attr_val, obj->GetAttrMap().GetProtoOwner(), value); +} + +bool AttrUtils::MutableListTensor(AttrHolderAdapter &&obj, const string &name, vector &value) { + value.clear(); + const proto::AttrDef *proto_attr_val = nullptr; + if (!AttrUtilsHelper::GetAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { + return false; + } + return GeAttrValueImp::GetValue(*proto_attr_val, obj->GetAttrMap().GetProtoOwner(), value); +} + +bool AttrUtils::SetListInt(AttrHolderAdapter &&obj, const string &name, std::initializer_list &&value) { + proto::AttrDef *proto_attr_val = nullptr; + if (!AttrUtilsHelper::MutableAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { + return false; + } + return GeAttrValueImp::SetValue(*proto_attr_val, value); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::GetInt(ConstAttrHolderAdapter &&obj, const string &name, + int32_t &value) { + int64_t int64_val = 0; + if (!AttrUtils::GetInt(std::move(obj), name, int64_val)) { + return false; + } + if (int64_val > INT32_MAX) { + GELOGE(GRAPH_FAILED, "%ld int64_t value cannot cast to int32_t", int64_val); + return false; + } + value = static_cast(int64_val); + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::GetInt(ConstAttrHolderAdapter &&obj, const string &name, + uint32_t &value) { + int64_t int64_val = 0; + if (!AttrUtils::GetInt(std::move(obj), name, int64_val)) { + return false; + } + if (int64_val > UINT32_MAX) { + GELOGE(GRAPH_FAILED, "%ld int64_t value cannot cast to uint32_t", int64_val); + return false; + } + value = static_cast(int64_val); + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::GetListInt(ConstAttrHolderAdapter &&obj, + const string &name, vector &value) { + value.clear(); + vector int64_list; + if (!GetListInt(std::move(obj), name, int64_list)) { + return false; + } + for (size_t i = 0; i < int64_list.size(); ++i) { + if (int64_list[i] > INT32_MAX) { + GELOGE(GRAPH_FAILED, "index %zu %ld int64_t value cannot cast to int32_t", i, int64_list[i]); + return false; + } + } + value.insert(value.begin(), int64_list.begin(), int64_list.end()); + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::GetListInt(ConstAttrHolderAdapter &&obj, + const string &name, vector &value) { + value.clear(); + vector int64_list; + if (!GetListInt(std::move(obj), name, int64_list)) { + return false; + } + for (size_t i = 0; i < int64_list.size(); ++i) { + if (int64_list[i] > UINT32_MAX) { + GELOGE(GRAPH_FAILED, "index %zu %ld int64_t value cannot cast to uint32_t", i, int64_list[i]); + return false; + } + } + value.insert(value.begin(), int64_list.begin(), int64_list.end()); + return true; +} + +bool AttrUtils::SetListOpDesc(AttrHolderAdapter &&obj, const string &name, const vector &value) { + if (obj) { + vector bytes_vals; + for (auto &item : value) { + ModelSerialize serialize; + auto buffer = serialize.SerializeOpDesc(item); + if (buffer.GetSize() == 0) { + return false; + } + bytes_vals.push_back(buffer); + } + return SetZeroCopyListBytes(std::move(obj), name, bytes_vals); + } + return false; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::SetListOpDesc(AttrHolderAdapter &&obj, + const string &name, + const vector &value) { + if (obj) { + vector bytes_vals; + for (auto &item : value) { + ModelSerialize serialize; + auto buffer = serialize.SerializeOpDesc(item); + if (buffer.GetSize() == 0) { + return false; + } + bytes_vals.push_back(buffer); + } + return SetZeroCopyListBytes(std::move(obj), name, bytes_vals); + } + return false; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::GetListOpDesc(ConstAttrHolderAdapter &&obj, + const string &name, + vector &value) { + value.clear(); + + vector bytes_vals; + if (!GetZeroCopyListBytes(std::move(obj), name, bytes_vals)) { + return false; + } + for (const auto &item : bytes_vals) { + ModelSerialize serialize; + auto op_desc = serialize.UnserializeOpDesc(item.GetData(), item.GetSize()); + value.push_back(op_desc); + } + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::SetZeroCopyBytes(AttrHolderAdapter &&obj, + const string &name, Buffer &&buffer) { + // Value will be moved + proto::AttrDef *proto_attr_val = nullptr; + if (!AttrUtilsHelper::MutableAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { + return false; + } + return GeAttrValueImp::SetZeroCopyBytes(*proto_attr_val, obj->GetAttrMap().GetProtoOwner(), std::move(buffer)); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool AttrUtils::GetZeroCopyBytes(ConstAttrHolderAdapter &&obj, + const string &name, Buffer &buffer) { + const proto::AttrDef *proto_attr_val = nullptr; + if (!AttrUtilsHelper::GetAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { + return false; + } + return GeAttrValueImp::GetZeroCopyBytes(*proto_attr_val, obj->GetAttrMap().GetProtoOwner(), buffer); +} + +bool AttrUtils::SetZeroCopyListBytes(AttrHolderAdapter &&obj, const string &name, vector &list_buffer) { + // Value will be moved + proto::AttrDef *proto_attr_val = nullptr; + if (!AttrUtilsHelper::MutableAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { + return false; + } + return GeAttrValueImp::SetZeroCopyListBytes(*proto_attr_val, obj->GetAttrMap().GetProtoOwner(), list_buffer); +} + +bool AttrUtils::GetZeroCopyListBytes(ConstAttrHolderAdapter &&obj, const string &name, vector &list_buffer) { + list_buffer.clear(); + const proto::AttrDef *proto_attr_val = nullptr; + if (!AttrUtilsHelper::GetAttrMapItem(obj.get(), name, proto_attr_val) || proto_attr_val == nullptr) { + return false; + } + return GeAttrValueImp::GetZeroCopyListBytes(*proto_attr_val, obj->GetAttrMap().GetProtoOwner(), list_buffer); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDescPtr AttrUtils::CloneOpDesc(const ConstOpDescPtr &org_op_desc) { + if (org_op_desc == nullptr) { + GELOGE(GRAPH_FAILED, "org_op_desc is null"); + return nullptr; + } + std::shared_ptr op_def; + op_def = ComGraphMakeShared(); + if (op_def == nullptr) { + GELOGE(GRAPH_FAILED, "proto::OpDef make shared failed"); + return nullptr; + } + ModelSerializeImp imp; + (void)imp.SerializeOpDesc(org_op_desc, op_def.get()); + + imp.SetProtobufOwner(op_def); + OpDescPtr op_desc = nullptr; + GE_CHK_BOOL_EXEC(imp.UnserializeOpDesc(op_desc, *op_def), return op_desc, "op_desc unserialize failed"); + op_desc->extAttrs_ = org_op_desc->extAttrs_; + + return op_desc; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDescPtr AttrUtils::CopyOpDesc(const ConstOpDescPtr &org_op_desc) { + if (org_op_desc == nullptr) { + GELOGE(GRAPH_FAILED, "org_op_desc is null"); + return nullptr; + } + std::shared_ptr op_def = ComGraphMakeShared(); + if (op_def == nullptr) { + GELOGE(GRAPH_FAILED, "proto::OpDef make shared failed"); + return nullptr; + } + ModelSerializeImp imp; + (void)imp.SerializeOpDesc(org_op_desc, op_def.get()); + + imp.SetProtobufOwner(op_def); + OpDescPtr op_desc = nullptr; + GE_CHK_BOOL_EXEC(imp.UnserializeOpDesc(op_desc, *op_def), return op_desc, "op_desc unserialize failed"); + + op_desc->extAttrs_ = org_op_desc->extAttrs_; + + op_desc->input_name_idx_.insert(org_op_desc->input_name_idx_.begin(), org_op_desc->input_name_idx_.end()); + op_desc->optional_input_names_.insert(org_op_desc->optional_input_names_.begin(), + org_op_desc->optional_input_names_.end()); + op_desc->output_name_idx_.insert(org_op_desc->output_name_idx_.begin(), org_op_desc->output_name_idx_.end()); + + op_desc->output_name_idx_.insert(org_op_desc->output_name_idx_.begin(), org_op_desc->output_name_idx_.end()); + + op_desc->infer_func_ = org_op_desc->infer_func_; + op_desc->infer_format_func_ = org_op_desc->infer_format_func_; + op_desc->verifier_func_ = org_op_desc->verifier_func_; + + return op_desc; +} +} // namespace ge diff --git a/src/common/graph/ge_tensor.cc b/src/common/graph/ge_tensor.cc new file mode 100644 index 00000000..7c7e0433 --- /dev/null +++ b/src/common/graph/ge_tensor.cc @@ -0,0 +1,929 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/ge_tensor.h" + +#include +#include +#include +#include + +#include "debug/ge_attr_define.h" +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/ge_attr_value.h" +#include "graph/model_serialize.h" +#include "proto/ge_ir.pb.h" +#include "utils/attr_utils.h" +#include "utils/ge_ir_utils.h" +#include "utils/tensor_utils.h" +#include "utils/type_utils.h" + +namespace ge { +static const char *const kKeyDataTypeSelfDefined = "__tensor_desc_data_type__"; + +static const std::map kDataTypeMap = { + {DT_UNDEFINED, proto::DT_UNDEFINED}, + {DT_FLOAT, proto::DT_FLOAT}, + {DT_FLOAT16, proto::DT_FLOAT16}, + {DT_INT8, proto::DT_INT8}, + {DT_UINT8, proto::DT_UINT8}, + {DT_INT16, proto::DT_INT16}, + {DT_UINT16, proto::DT_UINT16}, + {DT_INT32, proto::DT_INT32}, + {DT_INT64, proto::DT_INT64}, + {DT_UINT32, proto::DT_UINT32}, + {DT_UINT64, proto::DT_UINT64}, + {DT_BOOL, proto::DT_BOOL}, + {DT_DOUBLE, proto::DT_DOUBLE}, + {DT_DUAL, proto::DT_DUAL}, + {DT_DUAL_SUB_INT8, proto::DT_DUAL_SUB_INT8}, + {DT_DUAL_SUB_UINT8, proto::DT_DUAL_SUB_UINT8}, + {DT_COMPLEX64, proto::DT_COMPLEX64}, + {DT_COMPLEX128, proto::DT_COMPLEX128}, + {DT_QINT8, proto::DT_QINT8}, + {DT_QINT16, proto::DT_QINT16}, + {DT_QINT32, proto::DT_QINT32}, + {DT_QUINT8, proto::DT_QUINT8}, + {DT_QUINT16, proto::DT_QUINT16}, + {DT_RESOURCE, proto::DT_RESOURCE}, + {DT_STRING_REF, proto::DT_STRING_REF}, + {DT_STRING, proto::DT_STRING}, +}; + +static const std::map kDataTypeSelfDefinedMap = { + {DT_DUAL, 13}, {DT_DUAL_SUB_INT8, 14}, {DT_DUAL_SUB_UINT8, 15}, {DT_COMPLEX64, 16}, {DT_COMPLEX128, 17}, + {DT_QINT8, 18}, {DT_QINT16, 19}, {DT_QINT32, 20}, {DT_QUINT8, 21}, {DT_QUINT16, 22}, +}; + +GeShape::GeShape() { shape_def_.InitDefault(); } + +// Default +GeShape::GeShape(std::vector s) : GeShape() { + auto proto_msg = shape_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto i : s) { + proto_msg->add_dim(i); + } + } +} + +size_t GeShape::GetDimNum() const { + auto proto_msg = shape_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + if (proto_msg->dim_size() >= 0) { + return proto_msg->dim_size(); + } else { + return 0; + } + } + return 0; +} + +int64_t GeShape::GetDim(size_t idx) const { + auto proto_msg = shape_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + if (proto_msg->dim_size() > static_cast(idx)) { + return proto_msg->dim(static_cast(idx)); + } + } + return 0; +} + +graphStatus GeShape::SetDim(size_t idx, int64_t value) { + auto proto_msg = shape_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + auto dims = proto_msg->mutable_dim(); + GE_CHECK_NOTNULL(dims); + if (dims->empty()) { + GELOGE(GRAPH_FAILED, "shape is empty"); + return GRAPH_FAILED; + } + if (static_cast(idx) >= dims->size()) { + GELOGE(GRAPH_FAILED, "idx is out of range"); + return GRAPH_FAILED; + } + proto_msg->set_dim(static_cast(idx), value); + } + return GRAPH_SUCCESS; +} + +std::vector GeShape::GetDims() const { + vector dims; + auto proto_msg = shape_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto i : proto_msg->dim()) { + dims.push_back(i); + } + } + return dims; +} + +std::string GeShape::ToString() const { + auto proto_msg = shape_def_.GetProtoMsg(); + if (proto_msg == nullptr) { + return ""; + } + + std::stringstream ss; + bool first = true; + for (auto i : proto_msg->dim()) { + if (first) { + first = false; + } else { + ss << ","; + } + ss << i; + } + return ss.str(); +} + +int64_t GeShape::GetShapeSize() const { + int64_t res = 1; + auto proto_msg = shape_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + if (proto_msg->dim().empty()) { + return 0; + } + for (auto i : proto_msg->dim()) { + res *= i; + } + } + return res; +} + +const string TENSOR_UTILS_SIZE = "size"; +const string TENSOR_UTILS_WEIGHT_SIZE = "weight_size"; +const string TENSOR_UTILS_REUSE_INPUT = "reuse_input"; +const string TENSOR_UTILS_OUTPUT_TENSOR = "output_tensor"; +const string TENSOR_UTILS_DEVICE_TYPE = "device_type"; +const string TENSOR_UTILS_INPUT_TENSOR = "input_tensor"; +const string TENSOR_UTILS_REAL_DIM_CNT = "real_dim_cnt"; +const string TENSOR_UTILS_REUSE_INPUT_INDEX = "reuse_input_index"; +const string TENSOR_UTILS_DATA_OFFSET = "data_offset"; +const string TENSOR_UTILS_CMPS_SIZE = "cmps_size"; +const string TENSOR_UTILS_CMPS_TAB = "cmps_tab"; +const string TENSOR_UTILS_CMPS_TAB_OFFSET = "cmps_tab_offset"; +const string TENSOR_UTILS_CMPSINFO = "cmps_info"; +const string TENSOR_UTILS_ALLOFFSET_QUANTIZE_INFO = "alloffset_quantize_info"; +const string TENSOR_UTILS_RC = "rc"; +const string TENSOR_UTILS_ORIGIN_SHAPE = "origin_shape"; +const string TENSOR_UTILS_ORIGIN_FORMAT = "origin_format"; +const string TENSOR_UTILS_ORIGIN_DATA_TYPE = "origin_data_type"; + +GeShape::GeShape(const ProtoMsgOwner &proto_owner, proto::ShapeDef *proto_msg) : shape_def_(proto_owner, proto_msg) {} + +GeShape::GeShape(const GeShape &other) : GeShape() { shape_def_.CopyValueFrom(other.shape_def_); } + +GeShape::GeShape(GeShape &&other) : GeShape() { shape_def_.MoveValueFrom(std::move(other.shape_def_)); } + +GeShape &GeShape::operator=(const GeShape &other) { + if (&other != this) { + shape_def_.CopyValueFrom(other.shape_def_); + } + return *this; +} + +GeShape &GeShape::operator=(GeShape &&other) { + if (&other != this) { + shape_def_.CopyValueFrom(std::move(other.shape_def_)); + } + return *this; +} + +GeTensorDesc::GeTensorDesc() { + tensor_descriptor_.InitDefault(); + SetDataType(DT_FLOAT); + Init(); +} + +// Default +GeTensorDesc::GeTensorDesc(GeShape shape, Format format, DataType dt) : GeTensorDesc() { + SetFormat(format); + SetDataType(dt); + ShapeReference() = std::move(shape); +} + +// Default +GeTensorDesc::GeTensorDesc(const GeTensorDesc &desc) : GeTensorDesc() { + tensor_descriptor_.CopyValueFrom(desc.tensor_descriptor_); +} + +// Default +GeTensorDesc::GeTensorDesc(GeTensorDesc &&desc) : GeTensorDesc() { + tensor_descriptor_.MoveValueFrom(std::move(desc.tensor_descriptor_)); +} + +GeTensorDesc::GeTensorDesc(const ProtoMsgOwner &proto_owner, proto::TensorDescriptor *proto_msg) + : tensor_descriptor_(proto_owner, proto_msg) { + if (proto_msg != nullptr && !proto_msg->has_out_attr()) { + proto_msg->set_has_out_attr(true); + + int64_t size = 0; + (void)AttrUtils::GetInt(this, TENSOR_UTILS_SIZE, size); + proto_msg->set_size(size); + + int64_t weight_size = 0; + (void)AttrUtils::GetInt(this, TENSOR_UTILS_WEIGHT_SIZE, weight_size); + proto_msg->set_weight_size(weight_size); + + bool reuse_input = false; + (void)AttrUtils::GetBool(this, TENSOR_UTILS_REUSE_INPUT, reuse_input); + proto_msg->set_reuse_input(reuse_input); + + bool output_tensor = false; + (void)AttrUtils::GetBool(this, TENSOR_UTILS_OUTPUT_TENSOR, output_tensor); + proto_msg->set_output_tensor(output_tensor); + + string device_type = "NPU"; + (void)AttrUtils::GetStr(this, TENSOR_UTILS_DEVICE_TYPE, device_type); + proto_msg->set_device_type(device_type); + + bool input_tensor = false; + (void)AttrUtils::GetBool(this, TENSOR_UTILS_INPUT_TENSOR, input_tensor); + proto_msg->set_input_tensor(input_tensor); + + int64_t real_dim_cnt = 0; + (void)AttrUtils::GetInt(this, TENSOR_UTILS_REAL_DIM_CNT, real_dim_cnt); + proto_msg->set_real_dim_cnt(real_dim_cnt); + + int64_t reuse_input_index = 0; + (void)AttrUtils::GetInt(this, TENSOR_UTILS_REUSE_INPUT_INDEX, reuse_input_index); + proto_msg->set_reuse_input_index(reuse_input_index); + + int64_t data_offset = 0; + (void)AttrUtils::GetInt(this, TENSOR_UTILS_DATA_OFFSET, data_offset); + proto_msg->set_data_offset(data_offset); + + int64_t cmps_size = 0; + (void)AttrUtils::GetInt(this, TENSOR_UTILS_CMPS_SIZE, cmps_size); + proto_msg->set_cmps_size(cmps_size); + + string cmps_tab; + (void)AttrUtils::GetStr(this, TENSOR_UTILS_CMPS_TAB, cmps_tab); + proto_msg->set_cmps_tab(cmps_tab); + + int64_t cmps_tab_offset = 0; + (void)AttrUtils::GetInt(this, TENSOR_UTILS_CMPS_TAB_OFFSET, cmps_tab_offset); + proto_msg->set_cmps_tab_offset(cmps_tab_offset); + } +} + +bool GeTensorDesc::GeTensorDescAttrsAreEqual(const GeTensorDesc &r_ge_tensor_desc) const { + const auto &tensor_descriptor = this->tensor_descriptor_.GetProtoMsg(); + const auto &r_tensor_descriptor = r_ge_tensor_desc.tensor_descriptor_.GetProtoMsg(); + if ((tensor_descriptor != nullptr) && (r_tensor_descriptor != nullptr)) { + // Message TensorDescriptor in ge_ir.proto + return (IsEqual(tensor_descriptor->name(), r_tensor_descriptor->name(), "TensorDescriptor.name()") && + IsEqual(tensor_descriptor->dtype(), r_tensor_descriptor->dtype(), "TensorDescriptor.dtype()") && + // Message ShapeDef in ge_ir.proto + IsEqual(ToString(tensor_descriptor->shape().dim()), ToString(r_tensor_descriptor->shape().dim()), + "TensorDescriptor.shape().dim()") && + IsEqual(tensor_descriptor->layout(), r_tensor_descriptor->layout(), "TensorDescriptor.layout()") && + IsEqual(tensor_descriptor->has_out_attr(), r_tensor_descriptor->has_out_attr(), + "TensorDescriptor.has_out_attr()") && + IsEqual(tensor_descriptor->size(), r_tensor_descriptor->size(), "TensorDescriptor.size()") && + IsEqual(tensor_descriptor->weight_size(), r_tensor_descriptor->weight_size(), + "TensorDescriptor.weight_size()") && + IsEqual(tensor_descriptor->reuse_input(), r_tensor_descriptor->reuse_input(), + "TensorDescriptor.reuse_input()") && + IsEqual(tensor_descriptor->output_tensor(), r_tensor_descriptor->output_tensor(), + "TensorDescriptor.output_tensor()") && + IsEqual(tensor_descriptor->device_type(), r_tensor_descriptor->device_type(), + "TensorDescriptor.device_type()") && + IsEqual(tensor_descriptor->input_tensor(), r_tensor_descriptor->input_tensor(), + "TensorDescriptor.input_tensor()") && + IsEqual(tensor_descriptor->real_dim_cnt(), r_tensor_descriptor->real_dim_cnt(), + "TensorDescriptor.real_dim_cnt()") && + IsEqual(tensor_descriptor->reuse_input_index(), r_tensor_descriptor->reuse_input_index(), + "TensorDescriptor.reuse_input_index()") && + IsEqual(tensor_descriptor->data_offset(), r_tensor_descriptor->data_offset(), + "TensorDescriptor.data_offset()") && + IsEqual(tensor_descriptor->cmps_size(), r_tensor_descriptor->cmps_size(), "TensorDescriptor.cmps_size()") && + IsEqual(tensor_descriptor->cmps_tab(), r_tensor_descriptor->cmps_tab(), "TensorDescriptor.cmps_tab()") && + IsEqual(tensor_descriptor->cmps_tab_offset(), r_tensor_descriptor->cmps_tab_offset(), + "TensorDescriptor.cmps_tab_offset()")); + } else { + return ((tensor_descriptor == nullptr) && (r_tensor_descriptor == nullptr)); + } +} + +bool GeTensorDesc::operator==(const GeTensorDesc &r_ge_tensor_desc) const { + return GeTensorDescAttrsAreEqual(r_ge_tensor_desc); +} + +GeShape &GeTensorDesc::ShapeReference() const { + if (tensor_descriptor_.GetProtoMsg() != nullptr) { + GeShape refShape(tensor_descriptor_.GetProtoOwner(), tensor_descriptor_.GetProtoMsg()->mutable_shape()); + __shape_.RefTo(refShape); + } else { + GeShape refShape(tensor_descriptor_.GetProtoOwner(), nullptr); + __shape_.RefTo(refShape); + } + return __shape_; +} + +void GeTensorDesc::Init() { + SetFormat(FORMAT_ND); + SetOriginFormat(FORMAT_ND); + TensorUtils::SetDeviceType(*this, DeviceType::NPU); + if (tensor_descriptor_.GetProtoMsg() == nullptr) { + GELOGE(GRAPH_FAILED, "ProtoType nullptr."); + return; + } + tensor_descriptor_.GetProtoMsg()->set_has_out_attr(true); +} + +ProtoAttrMapHelper GeTensorDesc::MutableAttrMap() { + if (tensor_descriptor_.GetProtoMsg() != nullptr) { + return ProtoAttrMapHelper(tensor_descriptor_.GetProtoOwner(), tensor_descriptor_.GetProtoMsg()->mutable_attr()); + } + return ProtoAttrMapHelper(tensor_descriptor_.GetProtoOwner(), nullptr); +} + +ConstProtoAttrMapHelper GeTensorDesc::GetAttrMap() const { + if (tensor_descriptor_.GetProtoMsg() != nullptr) { + return ConstProtoAttrMapHelper(tensor_descriptor_.GetProtoOwner(), + tensor_descriptor_.GetProtoMsg()->mutable_attr()); + } + return ConstProtoAttrMapHelper(tensor_descriptor_.GetProtoOwner(), nullptr); +} + +void GeTensorDesc::Update(GeShape shape, Format format, DataType dt) { + ShapeReference() = std::move(shape); + SetFormat(format); + SetDataType(dt); +} +GeShape GeTensorDesc::GetShape() const { return ShapeReference(); } + +GeShape &GeTensorDesc::MutableShape() { return ShapeReference(); } + +void GeTensorDesc::SetShape(GeShape shape) { ShapeReference() = std::move(shape); } + +GeShape GeTensorDesc::GetOriginShape() const { + vector origin_shape; + if (!AttrUtils::GetListInt(this, TENSOR_UTILS_ORIGIN_SHAPE, origin_shape)) { + return GeShape(); + } + return GeShape(origin_shape); +} + +void GeTensorDesc::SetOriginShape(const GeShape &origin_shape) { + std::vector origin_shape_tmp = origin_shape.GetDims(); + (void)AttrUtils::SetListInt(this, TENSOR_UTILS_ORIGIN_SHAPE, origin_shape_tmp); +} + +Format GeTensorDesc::GetFormat() const { + auto tensor_descriptor_msg = tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + return TypeUtils::SerialStringToFormat(tensor_descriptor_msg->layout()); + } + return FORMAT_RESERVED; +} + +void GeTensorDesc::SetFormat(Format format) { + auto tensor_descriptor_msg = tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_layout(TypeUtils::FormatToSerialString(format)); + } +} + +Format GeTensorDesc::GetOriginFormat() const { + std::string origin_format_str; + if (!AttrUtils::GetStr(this, TENSOR_UTILS_ORIGIN_FORMAT, origin_format_str)) { + // Can not get the certificate and it's not set, return directly + return FORMAT_RESERVED; + } + if (origin_format_str == "RESERVED") { + return FORMAT_RESERVED; + } + return TypeUtils::SerialStringToFormat(origin_format_str); +} + +void GeTensorDesc::SetOriginFormat(Format origin_format) { + std::string origin_format_str = "RESERVED"; + if (origin_format != FORMAT_RESERVED) { + origin_format_str = TypeUtils::FormatToSerialString(origin_format); + } + (void)AttrUtils::SetStr(this, TENSOR_UTILS_ORIGIN_FORMAT, origin_format_str); +} + +DataType GeTensorDesc::GetDataType() const { + auto tensor_descriptor_msg = tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg == nullptr) { + return DT_UNDEFINED; + } + auto &attr_map = *(tensor_descriptor_msg->mutable_attr()); + // Data type + auto it_data_type = attr_map.find(kKeyDataTypeSelfDefined); + if (it_data_type != attr_map.end()) { + int64_t data_type_proto = it_data_type->second.i(); + for (auto it : kDataTypeSelfDefinedMap) { + if (it.second == data_type_proto) { + return it.first; + } + } + } else { + auto data_type_proto = tensor_descriptor_msg->dtype(); + for (auto it : kDataTypeMap) { + if (it.second == data_type_proto) { + return it.first; + } + } + } + return DT_UNDEFINED; +} + +void GeTensorDesc::SetDataType(DataType dataType) { + auto tensor_descriptor_msg = tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg == nullptr) { + return; + } + auto &attr_maps = *(tensor_descriptor_msg->mutable_attr()); + (void)attr_maps.erase(kKeyDataTypeSelfDefined); + + // Data type + auto it = kDataTypeMap.find(dataType); + if (it != kDataTypeMap.end()) { + tensor_descriptor_msg->set_dtype(it->second); + return; + } + auto it2 = kDataTypeSelfDefinedMap.find(dataType); + if (it2 != kDataTypeSelfDefinedMap.end()) { + attr_maps[kKeyDataTypeSelfDefined].set_i(it2->second); + } +} + +void GeTensorDesc::SetOriginDataType(DataType origin_data_type) { + std::string origin_data_type_str = "RESERVED"; + if (origin_data_type != DT_UNDEFINED) { + origin_data_type_str = TypeUtils::DataTypeToSerialString(origin_data_type); + } + (void)AttrUtils::SetStr(this, TENSOR_UTILS_ORIGIN_DATA_TYPE, origin_data_type_str); +} + +DataType GeTensorDesc::GetOriginDataType() const { + std::string origin_data_type_str; + if (!AttrUtils::GetStr(this, TENSOR_UTILS_ORIGIN_DATA_TYPE, origin_data_type_str)) { + return DT_UNDEFINED; + } + if (origin_data_type_str == "RESERVED") { + return DT_UNDEFINED; + } + return TypeUtils::SerialStringToDataType(origin_data_type_str); +} + +graphStatus GeTensorDesc::IsValid() const { + auto dtype = this->GetDataType(); + auto format = this->GetFormat(); + if (dtype == DT_UNDEFINED && format == FORMAT_RESERVED) { + return GRAPH_PARAM_INVALID; + } + return GRAPH_SUCCESS; +} + +GeTensorDesc GeTensorDesc::Clone() const { return *this; } + +GeTensorDesc &GeTensorDesc::operator=(const GeTensorDesc &desc) { + if (&desc != this) { + tensor_descriptor_.CopyValueFrom(desc.tensor_descriptor_); + } + return *this; +} + +GeTensorDesc &GeTensorDesc::operator=(GeTensorDesc &&desc) { + if (&desc != this) { + tensor_descriptor_.CopyValueFrom(std::move(desc.tensor_descriptor_)); + } + return *this; +} + +GeTensor::GeTensor::GeTensor() { + tensor_def_.InitDefault(); + // Default init desc + DescReference() = GeTensorDesc(); +} + +GeTensor::GeTensor(const GeTensorDesc &tensor_desc) : GeTensor() { DescReference() = tensor_desc; } + +GeTensor::GeTensor(const GeTensorDesc &tensor_desc, const vector &data) : GeTensor() { + DescReference() = tensor_desc; + auto proto_msg = tensor_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->set_data(data.data(), data.size()); + } +} + +GeTensor::GeTensor(const GeTensorDesc &tensor_desc, const uint8_t *data, size_t size) : GeTensor() { + DescReference() = tensor_desc; + auto proto_msg = tensor_def_.GetProtoMsg(); + if (proto_msg != nullptr && data != nullptr) { + proto_msg->set_data(data, size); + } +} + +GeTensor::GeTensor(GeTensorDesc &&tensor_desc, vector &&data) : GeTensor() { + DescReference() = std::move(tensor_desc); + auto proto_msg = tensor_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->set_data(data.data(), data.size()); + } +} + +GeTensor::GeTensor(const GeTensorDesc &tensor_desc, const Buffer &data) : GeTensor() { + DescReference() = tensor_desc; + auto proto_msg = tensor_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + if (data.size() == 0) { + GELOGI("GetSize res is 0."); + } + if (data.data() == nullptr) { + GELOGI("data addr is null."); + } + proto_msg->set_data(data.GetData(), data.GetSize()); + } +} + +GeTensor::GeTensor(const ProtoMsgOwner &proto_owner, proto::TensorDef *proto_msg) + : tensor_def_(proto_owner, proto_msg) {} + +GeTensorDesc GeTensor::GetTensorDesc() const { return DescReference(); } + +GeTensorDesc &GeTensor::MutableTensorDesc() { return DescReference(); } + +GeTensorDesc &GeTensor::DescReference() const { + if (tensor_def_.GetProtoMsg() != nullptr) { + GeTensorDesc tensor_desc(tensor_def_.GetProtoOwner(), tensor_def_.GetProtoMsg()->mutable_desc()); + __desc_.RefTo(tensor_desc); + } else { + GeTensorDesc tensor_desc(tensor_def_.GetProtoOwner(), nullptr); + __desc_.RefTo(tensor_desc); + } + return __desc_; +} + +void GeTensor::SetTensorDesc(const GeTensorDesc &tensor_desc) { + DescReference() = tensor_desc; +} + +const Buffer GeTensor::GetData() const { + auto proto_msg = tensor_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + return Buffer(tensor_def_.GetProtoOwner(), proto_msg->mutable_data()); + } + return Buffer(); +} + +Buffer GeTensor::MutableData() { + auto proto_msg = tensor_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + return Buffer(tensor_def_.GetProtoOwner(), proto_msg->mutable_data()); + } + return Buffer(); +} + +graphStatus GeTensor::SetData(vector &&data) { + auto proto_msg = tensor_def_.GetProtoMsg(); + GE_CHECK_NOTNULL(proto_msg); + proto_msg->set_data(data.data(), data.size()); + return GRAPH_SUCCESS; +} + +graphStatus GeTensor::SetData(const vector &data) { + auto proto_msg = tensor_def_.GetProtoMsg(); + GE_CHECK_NOTNULL(proto_msg); + proto_msg->set_data(data.data(), data.size()); + return GRAPH_SUCCESS; +} + +graphStatus GeTensor::SetData(const uint8_t *data, size_t size) { + GE_CHECK_NOTNULL(data); + auto proto_msg = tensor_def_.GetProtoMsg(); + GE_CHECK_NOTNULL(proto_msg); + proto_msg->set_data(data, size); + return GRAPH_SUCCESS; +} + +graphStatus GeTensor::SetData(const Buffer &data) { + auto proto_msg = tensor_def_.GetProtoMsg(); + GE_CHECK_NOTNULL(proto_msg); + if (data.size() == 0) { + GELOGI("GetSize res is 0."); + } + if (data.data() == nullptr) { + GELOGI("data addr is null."); + } + proto_msg->set_data(data.data(), data.size()); + return GRAPH_SUCCESS; +} + +GeTensor GeTensor::Clone() const { + GeTensor tensor; + tensor.tensor_def_.CopyValueFrom(tensor_def_); + return tensor; +} + +GeTensor::GeTensor(const GeTensor &other) { tensor_def_ = other.tensor_def_; } + +GeTensor &GeTensor::operator=(const GeTensor &other) { + if (&other != this) { + tensor_def_ = other.tensor_def_; + } + return *this; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetSize(const GeTensorDesc &tensor_desc, + uint32_t &size) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + GE_CHECK_NOTNULL(tensor_descriptor_msg); + size = static_cast(tensor_descriptor_msg->size()); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetSize(GeTensorDesc &tensor_desc, uint32_t size) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_size(size); + } +} + +uint32_t TensorUtils::GetWeightSize(const GeTensorDesc &tensor_desc) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + return static_cast(tensor_descriptor_msg->weight_size()); + } + return 0; +} + +uint32_t TensorUtils::GetWeightSize(const GeTensor &tensor) { return GetWeightSize(tensor.GetTensorDesc()); } + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY uint32_t TensorUtils::GetWeightSize(const ConstGeTensorPtr &tensor_ptr) { + if (tensor_ptr == nullptr) { + return 0; + } + return GetWeightSize(*tensor_ptr); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY uint8_t *TensorUtils::GetWeightAddr(const ConstGeTensorPtr &tensor_ptr, + uint8_t *base) { + if (tensor_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "tensor_ptr is null."); + return nullptr; + } + return GetWeightAddr(*tensor_ptr, base); +} + +uint8_t *TensorUtils::GetWeightAddr(const GeTensor &tensor, uint8_t *base) { + if (base == nullptr) { + GELOGE(GRAPH_FAILED, "base is null."); + return nullptr; + } + int64_t weight_data_offset = 0; + if (GetDataOffset(tensor.GetTensorDesc(), weight_data_offset) != GRAPH_SUCCESS) return nullptr; + + if (weight_data_offset == 0) { + // The weight of offset 0 is still in const op, still get from ATTR_NAME_WEIGHTS. + return const_cast(tensor.GetData().data()); + } + + return base + weight_data_offset; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetWeightSize(GeTensorDesc &tensor_desc, + uint32_t size) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_weight_size(size); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetReuseInput(const GeTensorDesc &tensor_desc, + bool &flag) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + GE_CHECK_NOTNULL(tensor_descriptor_msg); + flag = tensor_descriptor_msg->reuse_input(); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetReuseInput(GeTensorDesc &tensor_desc, bool flag) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_reuse_input(flag); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetOutputTensor(const GeTensorDesc &tensor_desc, + bool &flag) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + GE_CHECK_NOTNULL(tensor_descriptor_msg); + flag = tensor_descriptor_msg->output_tensor(); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetOutputTensor(GeTensorDesc &tensor_desc, bool flag) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_output_tensor(flag); + } +} + +static map device_to_str_map{ + {0, "NPU"}, {1, "CPU"}, +}; +static map str_to_device_map{ + {"NPU", 0}, {"CPU", 1}, +}; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetDeviceType(const GeTensorDesc &tensor_desc, + DeviceType &type) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + GE_CHECK_NOTNULL(tensor_descriptor_msg); + string type_str = tensor_descriptor_msg->device_type(); + type = DeviceType(str_to_device_map[type_str]); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetDeviceType(GeTensorDesc &tensor_desc, + DeviceType type) { + auto type_str = device_to_str_map[type]; + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_device_type(type_str); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetInputTensor(const GeTensorDesc &tensor_desc, + bool &flag) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + GE_CHECK_NOTNULL(tensor_descriptor_msg); + flag = tensor_descriptor_msg->input_tensor(); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetInputTensor(GeTensorDesc &tensor_desc, bool flag) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_input_tensor(flag); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetRealDimCnt(const GeTensorDesc &tensor_desc, + uint32_t &cnt) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + GE_CHECK_NOTNULL(tensor_descriptor_msg); + cnt = static_cast(tensor_descriptor_msg->real_dim_cnt()); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetRealDimCnt(GeTensorDesc &tensor_desc, + uint32_t cnt) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_real_dim_cnt(cnt); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +TensorUtils::GetReuseInputIndex(const GeTensorDesc &tensor_desc, uint32_t &idx) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + GE_CHECK_NOTNULL(tensor_descriptor_msg); + + idx = static_cast(tensor_descriptor_msg->reuse_input_index()); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetReuseInputIndex(GeTensorDesc &tensor_desc, + uint32_t idx) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_reuse_input_index(idx); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetDataOffset(const GeTensorDesc &tensor_desc, + int64_t &offset) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + offset = tensor_descriptor_msg->data_offset(); + return GRAPH_SUCCESS; + } else { + GELOGW("tensor_descriptor_msg is nullptr."); + return GRAPH_FAILED; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetDataOffset(GeTensorDesc &tensor_desc, + int64_t offset) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_data_offset(offset); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetCmpsSize(const GeTensorDesc &tensor_desc, + uint32_t &cmp_size) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + cmp_size = static_cast(tensor_descriptor_msg->cmps_size()); + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetCmpsSize(GeTensorDesc &tensor_desc, + uint32_t cmp_size) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_cmps_size(cmp_size); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetCmpsTab(const GeTensorDesc &tensor_desc, + vector &vec) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + string str = tensor_descriptor_msg->cmps_tab(); + vec.assign(str.begin(), str.end()); + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetCmpsTab(GeTensorDesc &tensor_desc, + const uint8_t *data, size_t size) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + GE_CHK_BOOL_EXEC(data != nullptr, return, "data is null."); + string str((const char *)data, size); + tensor_descriptor_msg->set_cmps_tab(str); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +TensorUtils::GetCmpsTabOffset(const GeTensorDesc &tensor_desc, int64_t &tab_offset) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tab_offset = tensor_descriptor_msg->cmps_tab_offset(); + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetCmpsTabOffset(GeTensorDesc &tensor_desc, + int64_t tab_offset) { + auto tensor_descriptor_msg = tensor_desc.tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor_msg != nullptr) { + tensor_descriptor_msg->set_cmps_tab_offset(tab_offset); + } +} + +graphStatus TensorUtils::GetCmpsInfo(const GeTensorDesc &tensor_desc, CompressInfo &info) { + GeAttrValue attr_value; + if (tensor_desc.GetAttr(TENSOR_UTILS_CMPSINFO, attr_value) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + return attr_value.GetValue(info); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetCmpsInfo(GeTensorDesc &tensor_desc, + const CompressInfo &info) { + (void)tensor_desc.SetAttr(TENSOR_UTILS_CMPSINFO, GeAttrValue::CreateFrom(info)); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool TensorUtils::HasAlloffsetQuantizeInfo( + const GeTensorDesc &tensor_desc) { + return tensor_desc.HasAttr(TENSOR_UTILS_ALLOFFSET_QUANTIZE_INFO); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +TensorUtils::GetAlloffsetQuantizeInfo(const GeTensorDesc &tensor_desc, AllOffsetQuantizeInfo &info) { + GeAttrValue attr_value; + if (tensor_desc.GetAttr(TENSOR_UTILS_ALLOFFSET_QUANTIZE_INFO, attr_value) != GRAPH_SUCCESS) { + GELOGW("get attr alloffset_quantize_info fail."); + } + return attr_value.GetValue(info); +} + +void TensorUtils::SetAlloffsetQuantizeInfo(GeTensorDesc &tensor_desc, const AllOffsetQuantizeInfo &info) { + (void)tensor_desc.SetAttr(TENSOR_UTILS_ALLOFFSET_QUANTIZE_INFO, GeAttrValue::CreateFrom(info)); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::GetRC(const GeTensorDesc &tensor_desc, + uint32_t &rc) { + return AttrUtils::GetInt(&tensor_desc, TENSOR_UTILS_RC, rc) ? GRAPH_SUCCESS : GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void TensorUtils::SetRC(GeTensorDesc &tensor_desc, uint32_t rc) { + (void)AttrUtils::SetInt(&tensor_desc, TENSOR_UTILS_RC, rc); +} +} // namespace ge diff --git a/src/common/graph/graph.cc b/src/common/graph/graph.cc new file mode 100644 index 00000000..5462e8c5 --- /dev/null +++ b/src/common/graph/graph.cc @@ -0,0 +1,347 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "external/graph/graph.h" + +#include "debug/ge_util.h" +#include "external/graph/operator.h" +#include "framework/common/debug/ge_log.h" +#include "graph/ge_attr_value.h" +#include "graph/model.h" +#include "graph/utils/graph_utils.h" + +using std::map; +using std::pair; +using std::string; +using std::vector; + +namespace ge { +class GraphImpl { + public: + friend class GraphUtils; + GraphImpl(const GraphImpl &) = delete; + GraphImpl &operator=(const GraphImpl &) = delete; + + explicit GraphImpl(const std::string &name) : name_(name) {} + + ~GraphImpl() { + if (IsValid()) { + if (compute_graph_ != nullptr) { + GraphUtils::BreakConnect(compute_graph_->GetAllNodesInfo()); + } + } + for (const auto &it : op_list_) { + Operator op = it.second; + op.BreakConnect(); + } + } + + graphStatus SetInputs(const std::vector &inputs) { + compute_graph_ = GraphUtils::CreateGraphFromOperator(name_, inputs); + GE_CHK_BOOL_RET_STATUS(compute_graph_ != nullptr, GRAPH_FAILED, "Build Graph failed."); + GE_CHK_BOOL_RET_STATUS(inputs.size() != 0, GRAPH_FAILED, "set input NULL."); + compute_graph_->SetInputSize(static_cast(inputs.size())); + return GRAPH_SUCCESS; + } + + graphStatus SetOutputs(const std::vector &outputs) { + if (compute_graph_ == nullptr) { + GELOGE(GRAPH_FAILED, "set ComputeGraph failed."); + return GRAPH_FAILED; + } + if (outputs.empty()) { + GELOGW("set outputs size is 0."); + return GRAPH_SUCCESS; + } + + // Construct special output node + std::vector>> output_indexs; + for (size_t i = 0; i < outputs.size(); ++i) { + output_indexs.emplace_back(outputs[i], std::vector{}); + } + + graphStatus ret = SetOutputs(output_indexs); + return ret; + } + + graphStatus SetOutputs(const std::vector>> &output_indexs) { + if (compute_graph_ == nullptr) { + GELOGE(GRAPH_FAILED, "set ComputeGraph failed."); + return GRAPH_FAILED; + } + if (output_indexs.empty()) { + GELOGW("set outputs size is 0."); + return GRAPH_SUCCESS; + } + + // Construct special output node + std::vector> output_nodes; + for (const auto &item : output_indexs) { + const Operator &output = item.first; + const vector &indexs = item.second; + ge::NodePtr node = compute_graph_->FindNode(output.GetName()); + if (node == nullptr) { + GELOGW("user designated out_node [%s] not exist in graph, will ignored!", output.GetName().c_str()); + continue; + } + + ge::OpDescPtr tmp_op_ptr = node->GetOpDesc(); + GE_CHECK_NOTNULL_EXEC(tmp_op_ptr, continue); + size_t out_size = tmp_op_ptr->GetOutputsSize(); + if (indexs.empty()) { + for (size_t i = 0; i < out_size; ++i) { + output_name_ += output.GetName() + ":" + std::to_string(i) + ";"; + output_nodes.emplace_back(node, i); + } + } else { + for (size_t i = 0; i < indexs.size(); ++i) { + if (indexs[i] >= out_size) { + GELOGW("index[%zu] is not belong to out_node[%s]", indexs[i], output.GetName().c_str()); + } else { + output_name_ += output.GetName() + ":" + std::to_string(i) + ";"; + output_nodes.emplace_back(node, indexs[i]); + } + } + } + } + + // Del last ";" + if (!output_name_.empty()) { + output_name_ = output_name_.substr(0, output_name_.length() - 1); + } + compute_graph_->SetUserDefOutput(output_name_); + compute_graph_->SetOutputSize(static_cast(output_indexs.size())); + compute_graph_->SetGraphOutNodesInfo(output_nodes); + return GRAPH_SUCCESS; + } + + graphStatus SetOutputs(const std::vector> &outputs) { + GE_CHK_BOOL_RET_STATUS(compute_graph_ != nullptr, GRAPH_FAILED, "set ComputeGraph faild."); + GE_CHK_BOOL_EXEC_INFO(outputs.size() != 0, return GRAPH_SUCCESS, "set outputs size is 0."); + + // Construct specified output + std::vector> output_nodes; + for (auto item : outputs) { + ge::NodePtr node = compute_graph_->FindNode(item.first.GetName()); + if (node == nullptr) { + GELOGE(GRAPH_FAILED, " Warning, user designated out_node (%s) not exist in graph, this out_node ignored!", + item.first.GetName().c_str()); + return GRAPH_FAILED; + } + ge::OpDescPtr tmp_op_ptr = node->GetOpDesc(); + GE_CHECK_NOTNULL_EXEC(tmp_op_ptr, continue); + size_t out_size = tmp_op_ptr->GetOutputsSize(); + + if (item.second.empty()) { + for (size_t i = 0; i < out_size; ++i) { + output_name_ += item.first.GetName() + ":" + std::to_string(i) + ";"; + output_nodes.push_back(std::make_pair(node, i)); + } + } else { + int32_t index = tmp_op_ptr->GetOutputIndexByName(item.second); + if (index < 0) { + GELOGE(GRAPH_FAILED, + " Warning, user designated out_node (%s):(%s) not exist in graph, this out_node ignored!", + item.first.GetName().c_str(), item.second.c_str()); + return GRAPH_FAILED; + } + output_name_ += item.first.GetName() + ":" + std::to_string(index) + ";"; + output_nodes.push_back(std::make_pair(node, index)); + } + } + // Del last ";" + if (!output_name_.empty()) { + output_name_ = output_name_.substr(0, output_name_.length() - 1); + } + compute_graph_->SetOutputSize(static_cast(outputs.size())); + compute_graph_->SetGraphOutNodesInfo(output_nodes); + GELOGI("********************SetOutputs Success***********************"); + GE_IF_BOOL_EXEC(!output_name_.empty(), GELOGI(" NetOutputs: (%s)", output_name_.c_str())); + + return GRAPH_SUCCESS; + } + + graphStatus SetTargets(const std::vector &targets) { + GE_CHK_BOOL_RET_STATUS(compute_graph_ != nullptr, GRAPH_FAILED, "set ComputeGraph faild."); + GE_CHK_BOOL_EXEC_INFO(targets.size() != 0, return GRAPH_SUCCESS, "set targets size is 0."); + + std::vector target_nodes; + for (auto item : targets) { + ge::NodePtr node = compute_graph_->FindNode(item.GetName()); + if (node == nullptr) { + GELOGW(" Warning, user designated target_node (%s) not exist in graph, this target_node ignored!", + item.GetName().c_str()); + continue; + } + target_nodes.push_back(node); + } + compute_graph_->SetGraphTargetNodesInfo(target_nodes); + return GRAPH_SUCCESS; + } + bool IsValid() const { return (compute_graph_ != nullptr); } + + graphStatus AddOp(const ge::Operator &op) { + std::pair::iterator, bool> ret; + ret = op_list_.emplace(std::pair(op.GetName(), op)); + GE_CHK_BOOL_RET_STATUS(ret.second != false, GRAPH_FAILED, "the op have added before, op name:%s.", + op.GetName().c_str()); + return GRAPH_SUCCESS; + } + + graphStatus GetAllOpName(std::vector &op_name) const { + for (const auto &it : op_list_) { + op_name.push_back(it.second.GetName()); + } + return GRAPH_SUCCESS; + } + + graphStatus FindOpByName(const string &name, ge::Operator &op) const { + auto it = op_list_.find(name); + GE_CHK_BOOL_EXEC(it != op_list_.end(), return GRAPH_FAILED, "Error: there is no op: %s.", name.c_str()); + op = it->second; + return GRAPH_SUCCESS; + } + + void SetNeedIteration(bool need_iteration) { + if (compute_graph_ == nullptr) { + GELOGE(GRAPH_FAILED, "Set need iteration failed, as compute graph is null."); + return; + } + compute_graph_->SetNeedIteration(need_iteration); + } + + private: + std::string name_; + std::string output_name_; + std::map op_list_; + ComputeGraphPtr compute_graph_{nullptr}; +}; + +Graph::Graph(const std::string &name) { + impl_ = ComGraphMakeShared(name); + if (impl_ == nullptr) { + GELOGW("GraphImpl make shared failed, impl_ is nullptr"); + } +} + +graphStatus Graph::AddOp(const ge::Operator &op) { + GE_CHK_BOOL_EXEC(impl_ != nullptr, return GRAPH_FAILED, "AddOp failed: graph can not be used, impl is nullptr."); + return impl_->AddOp(op); +} + +graphStatus Graph::GetAllOpName(std::vector &op_name) const { + GE_CHK_BOOL_EXEC(impl_ != nullptr, return GRAPH_FAILED, + "GetAllOpName failed: graph can not be used, impl is nullptr."); + return impl_->GetAllOpName(op_name); +} + +graphStatus Graph::FindOpByName(const std::string &name, Operator &op) const { + Operator op_find_op_def("NULL"); + op = op_find_op_def; + GE_CHK_BOOL_EXEC(impl_ != nullptr, return GRAPH_FAILED, + "FindOpByName failed: graph can not be used, impl is nullptr."); + return impl_->FindOpByName(name, op); +} + +Graph &Graph::SetInputs(const vector &inputs) { + GE_CHK_BOOL_EXEC(impl_ != nullptr, return *this, "SetInputs failed: graph can not be used, impl is nullptr.") + GE_CHK_BOOL_EXEC(inputs.size() > 0, return *this, "SetInputs failed: input operator size can not be 0."); + (void)impl_->SetInputs(inputs); + return *this; +} + +Graph &Graph::SetOutputs(const vector &outputs) { + if (impl_ == nullptr) { + GELOGE(GRAPH_FAILED, "SetOutputs failed: graph can not be used, impl is nullptr."); + return *this; + } + (void)impl_->SetOutputs(outputs); + return *this; +} + +Graph &Graph::SetOutputs(const std::vector>> &output_indexs) { + if (impl_ == nullptr) { + GELOGE(GRAPH_FAILED, "SetOutputs failed: graph can not be used, impl is nullptr."); + return *this; + } + (void)impl_->SetOutputs(output_indexs); + return *this; +} + +Graph &Graph::SetOutputs(const std::vector> &outputs) { + GE_CHK_BOOL_EXEC(impl_ != nullptr, return *this, "SetOutputs failed: graph can not be used, impl is nullptr.") + (void)impl_->SetOutputs(outputs); + return *this; +} + +Graph &Graph::SetTargets(const vector &targets) { + if (impl_ == nullptr) { + GELOGE(GRAPH_FAILED, "SetTargets failed: graph can not be used, impl is nullptr."); + return *this; + } + (void)impl_->SetTargets(targets); + return *this; +} + +bool Graph::IsValid() const { + if (impl_ == nullptr) { + return false; + } + return impl_->IsValid(); +} + +void Graph::SetNeedIteration(bool need_iteration) { + if (impl_ == nullptr) { + GELOGE(GRAPH_FAILED, "Set need iteration failed, as impl is null."); + return; + } + impl_->SetNeedIteration(need_iteration); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY ComputeGraphPtr GraphUtils::GetComputeGraph(const ge::Graph &graph) { + GE_CHK_BOOL_EXEC_NOLOG(graph.IsValid(), return nullptr); + return graph.impl_->compute_graph_; +} + +graphStatus Graph::SaveToFile(const string &file_name) const { + Model model = Model(); + model.SetGraph(*this); + return model.SaveToFile(file_name); +} + +graphStatus Graph::LoadFromFile(const string &file_name) { + Model model = Model(); + graphStatus ret = model.LoadFromFile(file_name); + if (ret != GRAPH_SUCCESS) { + return ret; + } + *this = model.GetGraph(); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Graph +GraphUtils::CreateGraphFromComputeGraph(const ge::ComputeGraphPtr compute_graph) { + GE_CHK_BOOL_EXEC_NOLOG(compute_graph != nullptr, return Graph("")); + + auto name = compute_graph->GetName(); + auto graph = Graph(name); + + GE_CHK_BOOL_EXEC_NOLOG(graph.impl_ != nullptr, return graph); + graph.impl_->compute_graph_ = compute_graph; + + return graph; +} +} // namespace ge diff --git a/src/common/graph/inference_context.cc b/src/common/graph/inference_context.cc new file mode 100644 index 00000000..9e2d96ab --- /dev/null +++ b/src/common/graph/inference_context.cc @@ -0,0 +1,53 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "external/graph/inference_context.h" + +namespace ge { +ShapeAndType::ShapeAndType(const Shape &shape, DataType data_type) : shape_(shape), data_type_(data_type) {} + +void ShapeAndType::SetShape(const Shape &shape) { shape_ = shape; } + +void ShapeAndType::SetType(DataType data_type) { data_type_ = data_type; } + +const Shape &ShapeAndType::GetShape() const { return shape_; } + +DataType ShapeAndType::GetDataType() const { return data_type_; } + +void InferenceContext::SetInputHandleShapesAndTypes(std::vector> &&shapes_and_types) { + input_handle_shapes_and_types_.swap(shapes_and_types); +} + +const std::vector> &InferenceContext::GetInputHandleShapesAndTypes() const { + return input_handle_shapes_and_types_; +} + +const std::vector> &InferenceContext::GetOutputHandleShapesAndTypes() const { + return output_handle_shapes_and_types_; +} + +void InferenceContext::SetOutputHandleShapesAndTypes(const std::vector> &shapes_and_types) { + output_handle_shapes_and_types_ = shapes_and_types; +} + +void InferenceContext::SetOutputHandleShapesAndTypes(std::vector> &&shapes_and_types) { + output_handle_shapes_and_types_.swap(shapes_and_types); +} + +void InferenceContext::SetMarks(const std::vector &marks) { marks_ = marks; } + +const std::vector &InferenceContext::GetMarks() const { return marks_; } +} // namespace ge diff --git a/src/common/graph/model.cc b/src/common/graph/model.cc new file mode 100644 index 00000000..fef2af38 --- /dev/null +++ b/src/common/graph/model.cc @@ -0,0 +1,183 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/model.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "debug/ge_attr_define.h" +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/model_serialize.h" +#include "proto/ge_ir.pb.h" +#include "utils/attr_utils.h" +#include "utils/ge_ir_utils.h" + +using google::protobuf::io::FileInputStream; +using google::protobuf::io::FileOutputStream; +using google::protobuf::io::ZeroCopyInputStream; + +namespace { +const int DEFAULT_VERSION = 1; +const int ACCESS_PERMISSION_BITS = 0400; +} // namespace + +namespace ge { +void Model::Init() { + (void)AttrUtils::SetInt(this, ATTR_MODEL_MEMORY_SIZE, 0); + (void)AttrUtils::SetInt(this, ATTR_MODEL_STREAM_NUM, 0); + (void)AttrUtils::SetInt(this, ATTR_MODEL_EVENT_NUM, 0); + (void)AttrUtils::SetInt(this, ATTR_MODEL_WEIGHT_SIZE, 0); + (void)AttrUtils::SetStr(this, ATTR_MODEL_TARGET_TYPE, TARGET_TYPE_MINI); + version_ = 0; +} + +Model::Model() { + attrs_.InitDefault(); + Init(); +} + +Model::Model(const string &name, const string &custom_version) + : name_(name), version_(DEFAULT_VERSION), platform_version_(custom_version) { + attrs_.InitDefault(); + Init(); +} + +string Model::GetName() const { return name_; } + +void Model::SetName(const string &name) { name_ = name; } + +uint32_t Model::GetVersion() const { return version_; } + +string Model::GetPlatformVersion() const { return platform_version_; } + +void Model::SetGraph(const ge::Graph &graph) { graph_ = graph; } + +Graph Model::GetGraph() const { return graph_; } + +graphStatus Model::Save(Buffer &buffer) const { + ModelSerialize serialize; + buffer = serialize.SerializeModel(*this); + return buffer.GetSize() > 0 ? GRAPH_SUCCESS : GRAPH_FAILED; +} + +void Model::SetAttr(const ProtoAttrMapHelper &attrs) { attrs_ = attrs; } + +graphStatus Model::Load(const uint8_t *data, size_t len, Model &model) { + ModelSerialize serialize; + model = serialize.UnserializeModel(data, len); + return model.IsValid() ? GRAPH_SUCCESS : GRAPH_FAILED; +} + +graphStatus Model::SaveToFile(const string &file_name) const { + Buffer buffer; + if ((*this).Save(buffer) != GRAPH_SUCCESS) { + GE_LOGE("save to file fail."); + return GRAPH_FAILED; + } + // Write file + ge::proto::ModelDef ge_proto; + if (buffer.GetData() != nullptr) { + std::string str((const char *)buffer.GetData(), buffer.GetSize()); + if (!ge_proto.ParseFromString(str)) { + return GRAPH_FAILED; + } + char real_path[PATH_MAX] = {0x00}; + if (strlen(file_name.c_str()) >= PATH_MAX) { + return GRAPH_FAILED; + } + if (realpath(file_name.c_str(), real_path) == nullptr) { + GELOGI("file %s does not exit, it will be created.", file_name.c_str()); + } + int fd = open(real_path, O_WRONLY | O_CREAT | O_TRUNC, ACCESS_PERMISSION_BITS); + if (fd < 0) { + GELOGE(GRAPH_FAILED, "open file failed, file path [%s] ", real_path); + return GRAPH_FAILED; + } + bool ret = ge_proto.SerializeToFileDescriptor(fd); + if (!ret) { + GELOGE(GRAPH_FAILED, "SerializeToFileDescriptor failed"); + if (close(fd) != 0) { + GELOGE(GRAPH_FAILED, "close file descriptor fail."); + return GRAPH_FAILED; + } + return GRAPH_FAILED; + } + if (close(fd) != 0) { + GELOGE(GRAPH_FAILED, "close file descriptor fail."); + return GRAPH_FAILED; + } + } + return GRAPH_SUCCESS; +} + +graphStatus Model::Load(ge::proto::ModelDef &model_def) { + ModelSerialize serialize; + *this = serialize.UnserializeModel(model_def); + return this->IsValid() ? GRAPH_SUCCESS : GRAPH_FAILED; +} + +bool Model::IsValid() const { return graph_.IsValid(); } + +graphStatus Model::LoadFromFile(const string &file_name) { + char real_path[PATH_MAX] = {0x00}; + if (strlen(file_name.c_str()) >= PATH_MAX) { + return GRAPH_FAILED; + } + if (realpath(file_name.c_str(), real_path) == nullptr) { + GELOGE(GRAPH_FAILED, "file %s does not exit, can not load.", file_name.c_str()); + return GRAPH_FAILED; + } + int fd = open(real_path, O_RDONLY); + if (fd < 0) { + GELOGE(GRAPH_FAILED, "open file failed"); + return GRAPH_FAILED; + } + + ge::proto::ModelDef model_def; + bool ret = model_def.ParseFromFileDescriptor(fd); + if (!ret) { + GELOGE(GRAPH_FAILED, "ParseFromFileDescriptor failed"); + if (close(fd) != 0) { + GELOGE(GRAPH_FAILED, "close file descriptor fail."); + return GRAPH_FAILED; + } + return GRAPH_FAILED; + } + if (close(fd) != 0) { + GELOGE(GRAPH_FAILED, "close file descriptor fail."); + return GRAPH_FAILED; + } + return Load(model_def); +} + +ProtoAttrMapHelper Model::MutableAttrMap() { return attrs_; } + +ConstProtoAttrMapHelper Model::GetAttrMap() const { + return ConstProtoAttrMapHelper(attrs_.GetProtoOwner(), attrs_.GetProtoMsg()); +} +} // namespace ge diff --git a/src/common/graph/model_serialize.cc b/src/common/graph/model_serialize.cc new file mode 100644 index 00000000..52397530 --- /dev/null +++ b/src/common/graph/model_serialize.cc @@ -0,0 +1,565 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/model_serialize.h" + +#include +#include + +#include "debug/ge_attr_define.h" +#include "debug/ge_log.h" +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/detail/model_serialize_imp.h" +#include "proto/ge_ir.pb.h" +#include "utils/graph_utils.h" + +using std::string; + +namespace ge { +bool ModelSerializeImp::ParseNodeIndex(const string &node_index, string &node_name, int32_t &index) { + auto sep = node_index.rfind(":"); + if (sep == string::npos) { + GELOGW("separator is not found in node_index."); + return false; + } + node_name = node_index.substr(0, sep); + auto index_str = node_index.substr(sep + 1); + index = static_cast(std::strtol(index_str.c_str(), nullptr, 10)); + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool ModelSerializeImp::SerializeTensor(const ConstGeTensorPtr &tensor, + proto::TensorDef *tensor_proto) { + GE_CHK_BOOL_EXEC(tensor != nullptr, return false, "tensor is null."); + GE_CHK_BOOL_EXEC(tensor_proto != nullptr, return false, "tensor_proto is null."); + + if (tensor->tensor_def_.GetProtoMsg() != nullptr) { + *tensor_proto = *tensor->tensor_def_.GetProtoMsg(); + return true; + } + return false; +} + +bool ModelSerializeImp::SerializeEdge(const NodePtr &node, proto::OpDef *op_def_proto) { + GE_CHK_BOOL_EXEC(node != nullptr, return false, "node is null."); + GE_CHK_BOOL_EXEC(op_def_proto != nullptr, return false, "op_def_proto is null."); + + op_def_proto->clear_input(); + // Inputs + for (const auto &in_data_anchor : node->GetAllInDataAnchors()) { + if (in_data_anchor != nullptr) { + auto peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + if (peer_out_anchor != nullptr && peer_out_anchor->GetOwnerNode()) { + op_def_proto->add_input(peer_out_anchor->GetOwnerNode()->GetName() + ":" + + std::to_string(peer_out_anchor->GetIdx())); + } else { + op_def_proto->add_input(""); + } + } + } + // Control edge + auto control_anchor = node->GetInControlAnchor(); + if (control_anchor != nullptr) { + auto peer_out_anchors = control_anchor->GetPeerOutControlAnchors(); + for (const auto &peer_out_anchor : peer_out_anchors) { + if (peer_out_anchor != nullptr && peer_out_anchor->GetOwnerNode()) { + op_def_proto->add_input(peer_out_anchor->GetOwnerNode()->GetName() + ":-1"); + } + } + } + return true; +} + +bool ModelSerializeImp::SerializeOpDesc(const ConstOpDescPtr &op_desc, proto::OpDef *op_def_proto) { + if (op_desc == nullptr || op_def_proto == nullptr) { + GELOGE(GRAPH_FAILED, "Input Para Invalid"); + return false; + } + if (op_desc->op_def_.GetProtoMsg() != nullptr) { + *op_def_proto = *op_desc->op_def_.GetProtoMsg(); + op_def_proto->clear_input_desc(); + op_def_proto->clear_output_desc(); + // Input descs + if (op_desc->GetInputsSize() > 0) { + auto size = static_cast(op_desc->GetInputsSize()); + for (uint32_t i = 0; i < size; i++) { + auto tensor_desc = op_desc->GetInputDescPtr(i); + if (tensor_desc != nullptr && tensor_desc->tensor_descriptor_.GetProtoMsg() != nullptr) { + *op_def_proto->add_input_desc() = *(tensor_desc->tensor_descriptor_.GetProtoMsg()); + } + } + } + // Output descs + if (op_desc->GetOutputsSize() > 0) { + auto size = static_cast(op_desc->GetOutputsSize()); + for (uint32_t i = 0; i < size; i++) { + auto tensor_desc = op_desc->GetOutputDescPtr(i); + if (tensor_desc != nullptr && tensor_desc->tensor_descriptor_.GetProtoMsg() != nullptr) { + *op_def_proto->add_output_desc() = *(tensor_desc->tensor_descriptor_.GetProtoMsg()); + } + } + } + } + return true; +} + +bool ModelSerializeImp::SerializeNode(const NodePtr &node, proto::OpDef *op_def_proto) { + if (node == nullptr || op_def_proto == nullptr) { + GELOGE(GRAPH_FAILED, "Input Para Node Invalid"); + return false; + } + if (!SerializeOpDesc(node->GetOpDesc(), op_def_proto)) { + GELOGE(GRAPH_FAILED, "Serialize OpDesc failed"); + return false; + } + if (SerializeEdge(node, op_def_proto)) { + return true; + } else { + return false; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool ModelSerializeImp::SerializeGraph(const ConstComputeGraphPtr &graph, + proto::GraphDef *graph_proto) { + if (graph == nullptr || graph_proto == nullptr) { + GELOGE(GRAPH_FAILED, "Input para Invalid"); + return false; + } + graph_proto->set_name(graph->GetName()); + // Inputs + for (const auto &input : graph->GetInputNodes()) { + if (input != nullptr) { + graph_proto->add_input(input->GetName() + ":0"); + } + } + // Outputs + for (const auto &output : graph->GetOutputNodes()) { + if (output != nullptr) { + graph_proto->add_output(output->GetName() + ":0"); + } + } + if (graph->attrs_.GetProtoMsg() != nullptr) { + *graph_proto->mutable_attr() = *graph->attrs_.GetProtoMsg(); + } + for (const auto &node : graph->GetDirectNode()) { + if (!SerializeNode(node, graph_proto->add_op())) { + if (node->GetOpDesc() != nullptr) { + GELOGE(GRAPH_FAILED, "Serialize Node %s failed", node->GetName().c_str()); + } + return false; + } + } + return true; +} + +bool ModelSerializeImp::SerializeModel(const Model &model, proto::ModelDef *model_proto) { + if (model_proto == nullptr) { + GELOGE(GRAPH_FAILED, "model_proto para Invalid"); + return false; + } + model_proto->set_name(model.GetName()); + model_proto->set_custom_version(model.GetPlatformVersion()); + model_proto->set_version(model.GetVersion()); + if (model.attrs_.GetProtoMsg()) { + *model_proto->mutable_attr() = *model.attrs_.GetProtoMsg(); + } + auto &graph = model.graph_; + auto compute_graph = GraphUtils::GetComputeGraph(graph); + if (compute_graph == nullptr) { + GELOGE(GRAPH_FAILED, "GetComputeGraph return nullptr"); + return false; + } + if (!SerializeGraph(compute_graph, model_proto->add_graph())) { + GELOGE(GRAPH_FAILED, "SerializeGraph fail"); + return false; + } + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool ModelSerializeImp::UnserializeTensor( + GeTensorPtr &tensor, proto::TensorDef &tensor_proto) { + tensor = std::shared_ptr(new (std::nothrow) GeTensor(protobuf_owner_, &tensor_proto)); + if (tensor == nullptr) { + GELOGE(GRAPH_FAILED, "tensor is nullptr"); + return false; + } else { + return true; + } +} + +bool ModelSerializeImp::UnserializeOpDesc(OpDescPtr &op_desc, proto::OpDef &op_def_proto) { + op_desc = std::shared_ptr(new (std::nothrow) OpDesc(protobuf_owner_, &op_def_proto)); + GE_CHK_BOOL_EXEC(op_desc != nullptr, return false, "op_desc is nullptr."); + + // Input tensor + for (auto &input_desc : *op_def_proto.mutable_input_desc()) { + std::shared_ptr temp_value = + std::shared_ptr(new (std::nothrow) GeTensorDesc(protobuf_owner_, &input_desc)); + GE_CHK_BOOL_RET_STATUS(temp_value != nullptr, false, "temp_value is nullptr"); + op_desc->inputs_desc_.push_back(temp_value); + } + // Output tensor + for (auto &output_desc : *op_def_proto.mutable_output_desc()) { + std::shared_ptr temp_value = + std::shared_ptr(new (std::nothrow) GeTensorDesc(protobuf_owner_, &output_desc)); + GE_CHK_BOOL_RET_STATUS(temp_value != nullptr, false, "temp_value is nullptr"); + op_desc->outputs_desc_.push_back(temp_value); + } + return true; +} + +bool ModelSerializeImp::UnserializeNode(ComputeGraphPtr &graph, proto::OpDef &op_def_proto) { + GE_RT_FALSE_CHECK_NOTNULL(graph); + OpDescPtr op_desc = nullptr; + if (!UnserializeOpDesc(op_desc, op_def_proto)) { + GELOGW("UnserializeOpDesc error."); + } + + NodePtr node = graph->AddNode(op_desc); + GE_CHK_BOOL_EXEC(node != nullptr, return false, "node is nullptr."); + + // Inputs + int dst_index = 0; + for (const auto &input : op_def_proto.input()) { + string node_name; + int32_t index = 0; + if (ParseNodeIndex(input, node_name, index)) { + node_input_node_names_.push_back(NodeNameNodeReq{node_name, index, node, dst_index, op_def_proto.name()}); + } + if (index >= 0) { + dst_index++; + } + } + node_map_[op_def_proto.name()] = node; + return true; +} + +bool ModelSerializeImp::HandleNodeNameRef() { + // Edges + for (auto &item : node_input_node_names_) { + auto src_node_it = node_map_.find(item.src_node_name); + if (src_node_it == node_map_.end()) { + GELOGE(GRAPH_FAILED, "cannot find node %s", item.src_node_name.c_str()); + return false; + } + GE_IF_BOOL_EXEC(src_node_it->second == nullptr || item.dst_node == nullptr, continue); + if (item.src_out_index >= 0) { + auto src_anchor = src_node_it->second->GetOutDataAnchor(item.src_out_index); + auto dst_anchor = item.dst_node->GetInDataAnchor(item.dst_in_index); + if (src_anchor == nullptr || dst_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "get anchor failed %s:%d, %s:%d ", item.src_node_name.c_str(), item.src_out_index, + item.dst_node_name.c_str(), item.dst_in_index); + return false; + } + GE_CHK_BOOL_ONLY_LOG((src_anchor->LinkTo(dst_anchor) == GRAPH_SUCCESS), " linkTo failed."); + } else { + // Control edge + auto src_anchor = src_node_it->second->GetOutControlAnchor(); + auto dst_anchor = item.dst_node->GetInControlAnchor(); + if (src_anchor != nullptr && dst_anchor != nullptr) { + GE_CHK_BOOL_ONLY_LOG((src_anchor->LinkTo(dst_anchor) == GRAPH_SUCCESS), " linkTo failed."); + } + } + } + // Graph input + for (auto &item : graph_input_node_names_) { + auto node_it = node_map_.find(item.node_name); + if (node_it == node_map_.end()) { + GELOGE(GRAPH_FAILED, "cannot find node %s", item.node_name.c_str()); + return false; + } + GE_IF_BOOL_EXEC(item.graph == nullptr, continue); + auto ret = item.graph->AddInputNode(node_it->second); + if (ret == nullptr) { + return false; + } + } + // Graph output + for (auto &item : graph_output_node_names_) { + auto node_it = node_map_.find(item.node_name); + if (node_it == node_map_.end()) { + GELOGE(GRAPH_FAILED, "cannot find node %s", item.node_name.c_str()); + return false; + } + + GE_IF_BOOL_EXEC(item.graph == nullptr, continue); + auto ret = item.graph->AddOutputNode(node_it->second); + if (ret == nullptr) { + GELOGE(GRAPH_FAILED, "AddOutputNode failed."); + return false; + } + } + node_input_node_names_.clear(); + graph_input_node_names_.clear(); + graph_output_node_names_.clear(); + node_map_.clear(); + return true; +} + +bool ModelSerializeImp::UnserializeModel(Model &model, proto::ModelDef &model_proto) { + model.name_ = model_proto.name(); + model.version_ = model_proto.version(); + model.platform_version_ = model_proto.custom_version(); + model.attrs_ = ProtoAttrMapHelper(protobuf_owner_, model_proto.mutable_attr()); + + auto &graphs_proto = *model_proto.mutable_graph(); + if (!graphs_proto.empty()) { + auto &graph_proto = graphs_proto[0]; + ComputeGraphPtr compute_graph_ptr; + if (UnserializeGraphWithoutEdge(compute_graph_ptr, graph_proto)) { + model.graph_ = GraphUtils::CreateGraphFromComputeGraph(compute_graph_ptr); + } + } + if (!HandleNodeNameRef()) { + GELOGE(GRAPH_FAILED, "HandleNodeNameRef failed"); + return false; + } + return true; +} + +bool ModelSerializeImp::UnserializeGraphWithoutEdge(ComputeGraphPtr &graph, proto::GraphDef &graph_proto) { + graph = ComGraphMakeShared(graph_proto.name()); + if (graph == nullptr) { + GELOGE(GRAPH_FAILED, "ComputeGraph make shared failed"); + return false; + } + + // Inputs + for (auto input : graph_proto.input()) { + string node_name; + int32_t index; + if (ParseNodeIndex(input, node_name, index)) { + graph_input_node_names_.push_back(NodeNameGraphReq{node_name, index, graph}); + } + } + // Outputs + for (auto output : graph_proto.output()) { + string node_name; + int32_t index; + if (ParseNodeIndex(output, node_name, index)) { + graph_output_node_names_.push_back(NodeNameGraphReq{node_name, index, graph}); + } + } + graph->attrs_ = ProtoAttrMapHelper(protobuf_owner_, graph_proto.mutable_attr()); + for (auto &op_def_proto : *graph_proto.mutable_op()) { + if (!UnserializeNode(graph, op_def_proto)) { + GELOGE(GRAPH_FAILED, "UnserializeNode fail"); + return false; + } + } + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool ModelSerializeImp::UnserializeGraph(ComputeGraphPtr &graph, + proto::GraphDef &graph_proto) { + if (!UnserializeGraphWithoutEdge(graph, graph_proto)) { + GELOGW("UnserializeGraphWithoutEdge fail"); + } + if (!HandleNodeNameRef()) { + GELOGE(GRAPH_FAILED, "Link Anchor or set graph input or output fail"); + return false; + } + return true; +} + +bool ReadProtoFromBinaryFile(const uint8_t *data, size_t len, google::protobuf::Message *proto) { + GE_CHK_BOOL_EXEC(data != nullptr, return false, "data is null."); + GE_CHK_BOOL_EXEC(proto != nullptr, return false, "proto is null."); + + google::protobuf::io::CodedInputStream coded_stream(data, len); + // 2048M -1 + coded_stream.SetTotalBytesLimit(INT32_MAX, -1); + if (!proto->ParseFromCodedStream(&coded_stream)) { + GELOGE(GRAPH_FAILED, "ReadProtoFromBinaryFile failed len %zu", len); + return false; + } + return true; +} + +Buffer ModelSerialize::SerializeModel(const Model &model) { + proto::ModelDef model_def; + ModelSerializeImp imp; + if (!imp.SerializeModel(model, &model_def)) { + return Buffer(); + } +#if !defined(__ANDROID__) && !defined(ANDROID) + Buffer buffer(model_def.ByteSizeLong()); +#else + Buffer buffer(model_def.ByteSize()); +#endif + GE_CHK_BOOL_ONLY_LOG(buffer.GetSize() != 0, "get size failed"); + GE_CHK_BOOL_ONLY_LOG((buffer.GetData() != nullptr), "get size failed"); + auto ret = model_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + if (ret != true) { + GELOGW("serialize to array fail."); + } + return buffer; +} + +size_t ModelSerialize::GetSerializeModelSize(const Model &model) { + proto::ModelDef model_def; + ModelSerializeImp imp; + if (!imp.SerializeModel(model, &model_def)) { + return 0; + } +#if !defined(__ANDROID__) && !defined(ANDROID) + return model_def.ByteSizeLong(); +#else + return model_def.ByteSize(); +#endif +} + +Model ModelSerialize::UnserializeModel(const uint8_t *data, size_t len) { + if (data == nullptr) { + GELOGE(GRAPH_FAILED, "data is nullptr"); + return Model(); + } + + std::shared_ptr model_proto_ptr; + model_proto_ptr = ComGraphMakeShared(); + if (model_proto_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "proto::ModelDef make shared failed"); + return Model(); + } + + auto &model_proto = *model_proto_ptr; + if (!ReadProtoFromBinaryFile(data, len, &model_proto)) { + GELOGE(GRAPH_FAILED, "ParseFromArray fail"); + return Model(); + } + + Model model; + ModelSerializeImp imp; + imp.SetProtobufOwner(model_proto_ptr); + if (!imp.UnserializeModel(model, model_proto)) { + GELOGE(GRAPH_FAILED, "Unserialize Model fail"); + return Model(); + } + return model; +} + +Model ModelSerialize::UnserializeModel(ge::proto::ModelDef &model_def) { + std::shared_ptr model_def_ptr = ComGraphMakeShared(model_def); + GE_CHK_BOOL_EXEC(model_def_ptr != nullptr, return Model(), "mode_def make shared failed"); + + ModelSerializeImp imp; + imp.SetProtobufOwner(model_def_ptr); + Model model; + if (!imp.UnserializeModel(model, *model_def_ptr)) { + GELOGE(GRAPH_FAILED, "Unserialize Model fail"); + return Model(); + } + return model; +} + +Buffer ModelSerialize::SerializeGraph(const ComputeGraphPtr &graph) { + proto::GraphDef graph_def; + ModelSerializeImp imp; + if (!imp.SerializeGraph(graph, &graph_def)) { + return Buffer(); + } +#if !defined(__ANDROID__) && !defined(ANDROID) + Buffer buffer(graph_def.ByteSizeLong()); +#else + Buffer buffer(graph_def.ByteSize()); +#endif + GE_CHK_BOOL_ONLY_LOG((buffer.GetSize() != 0), "get size failed"); + GE_CHK_BOOL_ONLY_LOG((buffer.GetData() != nullptr), "get size failed"); + auto ret = graph_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + if (ret != true) { + GE_LOGE("serialize to array fail."); + } + + return buffer; +} + +ComputeGraphPtr ModelSerialize::UnserializeGraph(const uint8_t *data, size_t len) { + if (data == nullptr) { + GELOGE(GRAPH_FAILED, "data is nullptr"); + return nullptr; + } + + std::shared_ptr graph_proto_ptr; + graph_proto_ptr = ComGraphMakeShared(); + if (graph_proto_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "proto::GraphDef make shared failed"); + return nullptr; + } + proto::GraphDef &graph_proto = *graph_proto_ptr; + if (!ReadProtoFromBinaryFile(data, len, &graph_proto)) { + GELOGE(GRAPH_FAILED, "ParseFromArray fail"); + return nullptr; + } + + ComputeGraphPtr graph; + ModelSerializeImp imp; + imp.SetProtobufOwner(graph_proto_ptr); + if (!imp.UnserializeGraph(graph, graph_proto)) { + return nullptr; + } + return graph; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Buffer ModelSerialize::SerializeOpDesc(const ConstOpDescPtr &op_desc) { + proto::OpDef op_def; + ModelSerializeImp imp; + if (!imp.SerializeOpDesc(op_desc, &op_def)) { + return Buffer(); + } +#if !defined(__ANDROID__) && !defined(ANDROID) + Buffer buffer(op_def.ByteSizeLong()); +#else + Buffer buffer(op_def.ByteSize()); +#endif + GE_CHK_BOOL_ONLY_LOG((buffer.GetSize() != 0), "get size failed"); + GE_CHK_BOOL_ONLY_LOG((buffer.GetData() != nullptr), "get size failed"); + auto ret = op_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + if (ret != true) { + GE_LOGE("serialize to array fail."); + } + + return buffer; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDescPtr ModelSerialize::UnserializeOpDesc(const uint8_t *data, + size_t len) { + if (data == nullptr) { + GELOGE(GRAPH_FAILED, "data is nullptr"); + return nullptr; + } + + std::shared_ptr op_def_ptr; + op_def_ptr = ComGraphMakeShared(); + if (op_def_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "proto::OpDef make shared failed"); + return nullptr; + } + proto::OpDef &op_def = *op_def_ptr; + if (!ReadProtoFromBinaryFile(data, len, &op_def)) { + GELOGE(GRAPH_FAILED, "ParseFromArray fail"); + return nullptr; + } + + OpDescPtr op_desc; + ModelSerializeImp imp; + imp.SetProtobufOwner(op_def_ptr); + if (!imp.UnserializeOpDesc(op_desc, op_def)) { + GELOGW("UnserializeOpDesc error."); + } + return op_desc; +} +} // namespace ge diff --git a/src/common/graph/node.cc b/src/common/graph/node.cc new file mode 100644 index 00000000..01866be4 --- /dev/null +++ b/src/common/graph/node.cc @@ -0,0 +1,842 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/node.h" + +#include + +#include "debug/ge_op_types.h" +#include "debug/ge_util.h" +#include "external/graph/operator_factory.h" +#include "framework/common/debug/ge_log.h" +#include "graph/ge_tensor.h" +#include "graph/operator_factory_impl.h" +#include "graph/shape_refiner.h" +#include "utils/ge_ir_utils.h" +#include "utils/node_utils.h" +#include "utils/op_desc_utils.h" + +using std::string; +using std::vector; + +namespace ge { +Node::Node(const OpDescPtr &op, const ComputeGraphPtr &owner_graph) + : op_(op), + owner_graph_(owner_graph), + in_data_anchors_(), + out_data_anchors_(), + in_control_anchor_(nullptr), + out_control_anchor_(nullptr), + attrs_(), + has_init_(false) { + anchor_status_updated_ = false; +} + +Node::~Node() { + for (const auto &in_data_anchor : in_data_anchors_) { + if (in_data_anchor != nullptr) { + in_data_anchor->UnlinkAll(); + } + } + for (const auto &out_data_anchor : out_data_anchors_) { + if (out_data_anchor != nullptr) { + out_data_anchor->UnlinkAll(); + } + } + if (in_control_anchor_ != nullptr) { + in_control_anchor_->UnlinkAll(); + } + if (out_control_anchor_ != nullptr) { + out_control_anchor_->UnlinkAll(); + } +} + +graphStatus Node::Init() { + if (has_init_) { + return GRAPH_SUCCESS; + } + GE_CHK_BOOL_EXEC(op_ != nullptr, return GRAPH_FAILED, "original OpDesc is nullptr"); + size_t size = op_->GetInputsSize(); + for (size_t i = 0; i < size; i++) { + std::shared_ptr anchor = ComGraphMakeShared(shared_from_this(), i); + if (anchor == nullptr) { + GELOGE(GRAPH_FAILED, "Current in_data_anchor is null, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + in_data_anchors_.push_back(anchor); + } + size = op_->GetOutputsSize(); + for (size_t i = 0; i < size; i++) { + std::shared_ptr anchor = ComGraphMakeShared(shared_from_this(), i); + if (anchor == nullptr) { + GELOGE(GRAPH_FAILED, "Current out_data_anchor is null, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + out_data_anchors_.push_back(anchor); + } + in_control_anchor_ = ComGraphMakeShared(shared_from_this(), -1); + out_control_anchor_ = ComGraphMakeShared(shared_from_this(), -1); + if (in_control_anchor_ == nullptr || out_control_anchor_ == nullptr) { + GELOGE(GRAPH_FAILED, "Current in_control_anchor or out_control_anchor is null, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + has_init_ = true; + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY std::string Node::GetName() const { + GE_CHK_BOOL_EXEC(op_ != nullptr, return string(), "original OpDesc is nullptr"); + return op_->GetName(); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY std::string Node::GetType() const { + GE_CHK_BOOL_EXEC(op_ != nullptr, return string(), "original OpDesc is nullptr"); + return op_->GetType(); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool Node::NodeAttrsAreEqual(const Node &r_node) const { + const auto &attr_map = this->attrs_; + const auto &r_attr_map = r_node.attrs_; + // 1.Verify node's map size + if (attr_map.size() != r_attr_map.size()) { + GELOGE(GRAPH_FAILED, "Size of node's attr map verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + // 2.Verify node's map key, verify values is temporarily not implemented + for (const auto &it : attr_map) { + if (r_attr_map.count(it.first) == 0) { + GELOGE(GRAPH_FAILED, "Key of node's attr map verify failed, node name: %s key name: %s.", this->GetName().c_str(), + it.first.c_str()); + return false; + } + } + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool Node::NodeMembersAreEqual(const Node &r_node) const { + return ((((this->op_ != nullptr) && (r_node.op_ != nullptr) && (IsEqual(*(this->op_), *(r_node.op_), "node.op_"))) || + ((this->op_ == nullptr) && (r_node.op_ == nullptr))) && + IsEqual(this->has_init_, r_node.has_init_, "node.has_init_") && + IsEqual(this->anchor_status_updated_, r_node.anchor_status_updated_, "node.anchor_status_updated_") && + IsEqual(this->send_event_id_list_, r_node.send_event_id_list_, "node.send_event_id_list_") && + IsEqual(this->recv_event_id_list_, r_node.recv_event_id_list_, "node.recv_event_id_list_")); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool Node::NodeAnchorIsEqual(const AnchorPtr &left_anchor, + const AnchorPtr &right_anchor, + size_t i) const { + GE_IF_BOOL_EXEC(left_anchor == nullptr, GELOGE(GRAPH_FAILED, "left_anchor is null."); return false); + GE_IF_BOOL_EXEC(right_anchor == nullptr, GELOGE(GRAPH_FAILED, "right_anchor is null."); return false); + + const auto anchor_peer_size = left_anchor->GetPeerAnchors().size(); + const auto right_anchor_peer_size = right_anchor->GetPeerAnchors().size(); + // Firstly, verify anchor's peer anchors size equal or not + if (anchor_peer_size != right_anchor_peer_size) { + GELOGE(GRAPH_FAILED, + "Size of anchor's peer anchors verify failed, node name: %s " + "anchor_peer_size [%zu] is different form [%zu] at index [%zu].", + this->GetName().c_str(), anchor_peer_size, right_anchor_peer_size, i); + return false; + } + // Secondly, verify anchor's peer anchor owner node equal or not + for (size_t j = 0; j < anchor_peer_size; j++) { + const auto &peer_node = left_anchor->GetPeerAnchors().at(j)->GetOwnerNode(); + const auto &r_peer_node = right_anchor->GetPeerAnchors().at(j)->GetOwnerNode(); + if (peer_node == nullptr || r_peer_node == nullptr) { + GELOGE(GRAPH_FAILED, "Error: anchor's peer node is null, node name: %s index[%zu] peer node index[%zu]. ", + this->GetName().c_str(), i, j); + return false; + } + // Determine the connection relationship by linking the node's name + if (peer_node->GetName() != r_peer_node->GetName()) { + GELOGE(GRAPH_FAILED, + "anchor's peer node name verify failed, node name: %s index[%zu]" + "peer node name %s is different from %s at index [%zu].", + this->GetName().c_str(), i, peer_node->GetName().c_str(), r_peer_node->GetName().c_str(), j); + return false; + } + } + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool Node::NodeInConnectsAreEqual(const Node &r_node) const { + // 1.Verify all in data and control anchors size + const auto in_data_anchor_size = this->GetAllInDataAnchors().size(); + const auto r_in_data_anchor_size = r_node.GetAllInDataAnchors().size(); + if (in_data_anchor_size != r_in_data_anchor_size) { + GELOGE(GRAPH_FAILED, "Size of node's in data anchors verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + const auto l_in_anchors = this->GetAllInAnchors(); + const auto r_in_anchors = r_node.GetAllInAnchors(); + // Data anchors size equal, all anchors size not equal, means control anchor size not equal + const auto in_control_anchor_size = l_in_anchors.size() - in_data_anchor_size; + const auto r_in_control_anchor_size = r_in_anchors.size() - r_in_data_anchor_size; + if (in_control_anchor_size != r_in_control_anchor_size) { + GELOGE(GRAPH_FAILED, "Size of node's in control anchors verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + // 2.Verify all in data and control anchors connect info + for (size_t i = 0; i < this->GetAllInAnchors().size(); i++) { + // Verify data anchors + if (i < in_data_anchor_size) { + const auto &in_anchor = l_in_anchors.at(i); + const auto &r_in_anchor = r_in_anchors.at(i); + if (!(NodeAnchorIsEqual(in_anchor, r_in_anchor, i))) { + GELOGE(GRAPH_FAILED, "Node's in data control anchor verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + } else { + // Verify control anchors + const auto &in_control_anchor = l_in_anchors.at(i); + const auto &r_in_control_anchor = r_in_anchors.at(i); + if (!(NodeAnchorIsEqual(in_control_anchor, r_in_control_anchor, i - in_data_anchor_size))) { + GELOGE(GRAPH_FAILED, "Node's in control anchor verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + } + } + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool Node::NodeOutConnectsAreEqual(const Node &r_node) const { + // 1.Verify all out data and control anchors size + const auto l_out_data_anchors = this->GetAllOutDataAnchors(); + const auto r_out_data_anchors = r_node.GetAllOutDataAnchors(); + const auto out_data_anchor_size = l_out_data_anchors.size(); + const auto r_out_data_anchor_size = r_out_data_anchors.size(); + if (out_data_anchor_size != r_out_data_anchor_size) { + GELOGE(GRAPH_FAILED, "Size of node's out data anchors verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + const auto l_out_anchors = this->GetAllOutAnchors(); + const auto r_out_anchors = r_node.GetAllOutAnchors(); + // Data anchors size equal, all anchors size not equal, means control anchor size not equal + const auto out_control_anchor_size = l_out_anchors.size() - out_data_anchor_size; + const auto r_out_control_anchor_size = r_out_anchors.size() - r_out_data_anchor_size; + if (out_control_anchor_size != r_out_control_anchor_size) { + GELOGE(GRAPH_FAILED, "Size of node's out control anchors verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + + // 2.Verify all out data and control anchors connect info + for (size_t i = 0; i < this->GetAllOutAnchors().size(); i++) { + // Verify data anchors + if (i < out_data_anchor_size) { + const auto &out_anchor = l_out_data_anchors.at(i); + const auto &r_out_anchor = r_out_data_anchors.at(i); + if (!(NodeAnchorIsEqual(out_anchor, r_out_anchor, i))) { + GELOGE(GRAPH_FAILED, "Node's out data control anchor verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + } else { + // Verify control anchors + const auto &out_control_anchor = l_out_anchors.at(i); + const auto &r_out_control_anchor = r_out_anchors.at(i); + if (!(NodeAnchorIsEqual(out_control_anchor, r_out_control_anchor, i - out_data_anchor_size))) { + GELOGE(GRAPH_FAILED, "Node's out control anchor verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + } + } + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool Node::operator==(const Node &r_node) const { + return (NodeMembersAreEqual(r_node) && NodeAttrsAreEqual(r_node) && NodeInConnectsAreEqual(r_node) && + NodeOutConnectsAreEqual(r_node)); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus Node::AddLinkFrom(const NodePtr &input_node) { + // This function is deprecated, please use other two overloaded functions + GE_CHECK_NOTNULL(input_node); + // Input_node ---> this + auto out_anchors = input_node->GetAllOutDataAnchors(); + if (out_anchors.size() != 1) { + GELOGE(GRAPH_FAILED, "out_anchor size is:%zu, only support 1", out_anchors.size()); + return GRAPH_PARAM_INVALID; + } + GE_CHK_BOOL_EXEC(op_ != nullptr, return GRAPH_FAILED, "original OpDesc is nullptr"); + auto op_desc = input_node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + + if (op_->AddInputDesc(op_desc->GetOutputDesc(0)) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add input desc failed."); + return GRAPH_FAILED; + } + std::shared_ptr anchor = ComGraphMakeShared(shared_from_this(), in_data_anchors_.size()); + if (anchor == nullptr) { + GELOGE(GRAPH_FAILED, "out_anchor size is:%zu, malloc shared_ptr failed.", out_anchors.size()); + return GRAPH_FAILED; + } + in_data_anchors_.push_back(anchor); + (void)out_anchors.at(0)->LinkTo(in_data_anchors_.back()); + + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus Node::AddLinkFrom(const uint32_t &index, + NodePtr input_node) { + GE_CHECK_NOTNULL(input_node); + // Input_node ---> this + auto out_anchors = input_node->GetAllOutDataAnchors(); + if (out_anchors.size() != 1) { + GELOGE(GRAPH_FAILED, "out_anchor size is:%zu, only support 1", out_anchors.size()); + return GRAPH_PARAM_INVALID; + } + + GE_CHECK_NOTNULL(op_); + auto op_desc = input_node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + + if (op_->AddInputDesc(index, op_desc->GetOutputDesc(0)) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add input desc failed."); + return GRAPH_FAILED; + } + std::shared_ptr anchor = ComGraphMakeShared(shared_from_this(), in_data_anchors_.size()); + if (anchor == nullptr) { + GELOGE(GRAPH_FAILED, "out_anchor size is:%zu, malloc shared_ptr failed.", out_anchors.size()); + return GRAPH_FAILED; + } + in_data_anchors_.push_back(anchor); + (void)out_anchors.at(0)->LinkTo(in_data_anchors_.back()); + + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus Node::AddLinkFromForParse(const NodePtr &input_node) { + // This function is used for ParseWeights. + GE_CHECK_NOTNULL(input_node); + // Input_node ---> this + auto out_anchors = input_node->GetAllOutDataAnchors(); + if (out_anchors.size() != 1) { + GELOGE(GRAPH_PARAM_INVALID, "out_anchor size is:%zu, only support 1", out_anchors.size()); + return GRAPH_PARAM_INVALID; + } + + std::shared_ptr anchor = ComGraphMakeShared(shared_from_this(), in_data_anchors_.size()); + if (anchor == nullptr) { + GELOGE(GRAPH_FAILED, "out_anchor size is:%zu, make anchor failed", out_anchors.size()); + return GRAPH_FAILED; + } + in_data_anchors_.push_back(anchor); + (void)out_anchors.at(0)->LinkTo(in_data_anchors_.back()); + + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus Node::AddLinkFrom(const string &name, NodePtr input_node) { + GE_CHECK_NOTNULL(input_node); + // Input_node ---> this + auto out_anchors = input_node->GetAllOutDataAnchors(); + if (out_anchors.size() != 1) { + GELOGE(GRAPH_PARAM_INVALID, "out_anchor size is:%zu, only support 1", out_anchors.size()); + return GRAPH_PARAM_INVALID; + } + + GE_CHECK_NOTNULL(op_); + auto op_desc = input_node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + + if (op_->AddInputDesc(name, op_desc->GetOutputDesc(0)) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add input desc failed."); + return GRAPH_FAILED; + } + std::shared_ptr anchor = ComGraphMakeShared(shared_from_this(), in_data_anchors_.size()); + if (anchor == nullptr) { + GELOGE(GRAPH_FAILED, "out_anchor size is:%zu, malloc shared_ptr failed.", out_anchors.size()); + return GRAPH_FAILED; + } + in_data_anchors_.push_back(anchor); + (void)out_anchors.at(0)->LinkTo(in_data_anchors_.back()); + + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY ComputeGraphPtr Node::GetOwnerComputeGraph() const { + return owner_graph_.lock(); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus Node::SetOwnerComputeGraph(const ComputeGraphPtr &graph) { + if (graph == nullptr) { + return GRAPH_PARAM_INVALID; + } + owner_graph_ = graph; + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetAllInDataAnchors() const { + return Vistor(shared_from_this(), in_data_anchors_); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetAllOutDataAnchors() const { + return Vistor(shared_from_this(), out_data_anchors_); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY uint32_t Node::GetAllInDataAnchorsSize() const { + return in_data_anchors_.size(); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY uint32_t Node::GetAllOutDataAnchorsSize() const { + return out_data_anchors_.size(); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetAllInAnchors() const { + std::vector vec; + // Push back in_data_anchors_ + for (const auto &in_anchor_iter : Vistor(shared_from_this(), in_data_anchors_)) { + auto in_anchor = Anchor::DynamicAnchorCast(in_anchor_iter); + if (in_anchor != nullptr) { + vec.push_back(in_anchor); + } + } + // Push back in_control_anchor_ + if ((in_control_anchor_->GetPeerOutControlAnchors().size() > 0) || + (in_control_anchor_->GetPeerOutDataAnchors().size() > 0)) { + auto in_anchor = Anchor::DynamicAnchorCast(in_control_anchor_); + if (in_anchor != nullptr) { + vec.push_back(in_anchor); + } + } + return Node::Vistor(shared_from_this(), vec); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetAllOutAnchors() const { + std::vector vec; + // Push back out_data_anchors_ + for (const auto &out_anchor_iter : Vistor(shared_from_this(), out_data_anchors_)) { + auto out_anchor = Anchor::DynamicAnchorCast(out_anchor_iter); + if (out_anchor != nullptr) { + vec.push_back(out_anchor); + } + } + // Push back out_control_anchor_ + if (out_control_anchor_->GetPeerInControlAnchors().size() > 0 || + out_control_anchor_->GetPeerInDataAnchors().size() > 0) { + auto out_anchor = Anchor::DynamicAnchorCast(out_control_anchor_); + if (out_anchor != nullptr) { + vec.push_back(out_anchor); + } + } + return Node::Vistor(shared_from_this(), vec); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY InDataAnchorPtr Node::GetInDataAnchor(int idx) const { + if (idx < 0 || idx >= static_cast(in_data_anchors_.size())) { + GELOGE(GRAPH_FAILED, "the node doesn't have %d th in_data_anchor, node %s:%s", idx, GetType().c_str(), + GetName().c_str()); + return nullptr; + } else { + return in_data_anchors_[idx]; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY AnchorPtr Node::GetInAnchor(int idx) const { + // Idx can't be less than -1 or >= in_data_anchors_.size(), -1 means index of control anchor_ + if (idx < -1 || idx >= static_cast(in_data_anchors_.size())) { + GELOGW("the node doesn't have %d th in_anchor, node %s:%s", idx, GetType().c_str(), GetName().c_str()); + return nullptr; + } else { + // Return control anchor + if (idx == -1) { + auto in_anchor = Anchor::DynamicAnchorCast(in_control_anchor_); + return in_anchor; + } + // Return data anchor + return in_data_anchors_[idx]; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY AnchorPtr Node::GetOutAnchor(int idx) const { + // Idx can't be less than -1 or >= out_data_anchors_.size(), -1 means index of control anchor_ + if (idx < -1 || idx >= static_cast(out_data_anchors_.size())) { + GELOGE(GRAPH_FAILED, "the node doesn't have %d th out_anchor, node %s:%s", idx, GetType().c_str(), + GetName().c_str()); + return nullptr; + } else { + // Return control anchor + if (idx == -1) { + auto out_anchor = Anchor::DynamicAnchorCast(out_control_anchor_); + return out_anchor; + } + // Return data anchor + return out_data_anchors_[idx]; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OutDataAnchorPtr Node::GetOutDataAnchor(int idx) const { + if (idx < 0 || idx >= static_cast(out_data_anchors_.size())) { + GELOGE(GRAPH_FAILED, "the node doesn't have %d th out_data_anchor, node %s:%s", idx, GetType().c_str(), + GetName().c_str()); + return nullptr; + } else { + return out_data_anchors_[idx]; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY InControlAnchorPtr Node::GetInControlAnchor() const { + return in_control_anchor_; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OutControlAnchorPtr Node::GetOutControlAnchor() const { + return out_control_anchor_; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetInNodes() const { + std::vector vec; + for (const auto &in_anchor : in_data_anchors_) { + GE_CHK_BOOL_EXEC((in_anchor != nullptr), continue, "in_data_anchor is nullptr"); + auto out_anchor = in_anchor->GetPeerOutAnchor(); + if (out_anchor == nullptr) { + continue; + } + auto node = out_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + if (in_control_anchor_ != nullptr) { + if (in_control_anchor_->IsPeerOutAnchorsEmpty()) { + return Node::Vistor(shared_from_this(), vec); + } + + auto peer_out_anchors = in_control_anchor_->GetPeerOutDataAnchors(); + for (const auto &out_anchor : peer_out_anchors) { + GE_CHK_BOOL_EXEC(out_anchor != nullptr, continue, " in_control_anchor_ peer out data anchors is nullptr"); + auto node = out_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + + auto peer_out_control_anchors = in_control_anchor_->GetPeerOutControlAnchors(); + for (const auto &out_control_anchor : peer_out_control_anchors) { + GE_CHK_BOOL_EXEC(out_control_anchor != nullptr, continue, + " in_control_anchor_ peer out control anchors is nullptr"); + auto node = out_control_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + } + return Node::Vistor(shared_from_this(), vec); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool Node::IsAllInNodesSeen( + std::unordered_set &nodes_seen) const { + for (const auto &in_anchor : in_data_anchors_) { + GE_CHK_BOOL_EXEC((in_anchor != nullptr), continue, "in_data_anchor is nullptr"); + auto out_anchor = in_anchor->GetPeerOutAnchor(); + if (out_anchor == nullptr) { + continue; + } + auto node = out_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + if ((node->GetType() == NEXTITERATION) || (node->GetType() == REFNEXTITERATION)) { + continue; + } + if (nodes_seen.count(node.get()) == 0) { + return false; + } + } + + if (in_control_anchor_ != nullptr) { + if (in_control_anchor_->IsPeerOutAnchorsEmpty()) { + return true; + } + auto peer_out_control_anchors = in_control_anchor_->GetPeerOutControlAnchors(); + for (const auto &out_control_anchor : peer_out_control_anchors) { + GE_CHK_BOOL_EXEC(out_control_anchor != nullptr, continue, "out_control_anchor is nullptr"); + auto node = out_control_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + if ((node->GetType() == NEXTITERATION) || (node->GetType() == REFNEXTITERATION)) { + continue; + } + if (nodes_seen.count(node.get()) == 0) { + return false; + } + } + } + + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetInDataNodes() const { + std::vector vec; + for (const auto &in_anchor : in_data_anchors_) { + GE_CHK_BOOL_EXEC((in_anchor != nullptr), continue, "in_data_anchor is nullptr"); + auto anchor_ptr = in_anchor->GetPeerOutAnchor(); + if (anchor_ptr == nullptr) { + continue; + } + auto node = anchor_ptr->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + return Node::Vistor(shared_from_this(), vec); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetInControlNodes() const { + std::vector vec; + if (in_control_anchor_ != nullptr) { + for (const auto &in_anchor : in_control_anchor_->GetPeerOutControlAnchors()) { + GE_CHK_BOOL_EXEC(in_anchor != nullptr, continue, "GetPeerOutControlAnchors is nullptr"); + auto node = in_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + } + return Node::Vistor(shared_from_this(), vec); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetOutNodes() const { + std::vector vec; + for (const auto &out_anchor : out_data_anchors_) { + GE_CHK_BOOL_EXEC((out_anchor != nullptr), continue, "out_data_anchors_ is nullptr"); + for (const auto &peer_in_anchor : out_anchor->GetPeerInDataAnchors()) { + GE_CHK_BOOL_EXEC((peer_in_anchor != nullptr), continue, "GetPeerInDataAnchors is nullptr"); + auto node = peer_in_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + } + if (out_control_anchor_ != nullptr) { + auto peer_in_control_anchors = out_control_anchor_->GetPeerInControlAnchors(); + for (const auto &in_control_anchor : peer_in_control_anchors) { + GE_CHK_BOOL_EXEC(in_control_anchor != nullptr, continue, + "out_control_anchor_ peer in control anchors is nullptr"); + auto node = in_control_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + } + return Node::Vistor(shared_from_this(), vec); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetInAllNodes() const { + std::vector vec; + for (const auto &in_node : GetInDataNodes()) { + vec.push_back(in_node); + } + for (const auto &in_control_node : GetInControlNodes()) { + vec.push_back(in_control_node); + } + return Node::Vistor(shared_from_this(), vec); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetOutDataNodes() const { + std::vector vec; + for (const auto &out_anchor : out_data_anchors_) { + GE_CHK_BOOL_EXEC((out_anchor != nullptr), continue, "out_data_anchors_ is nullptr"); + for (const auto &in_anchor : out_anchor->GetPeerInDataAnchors()) { + GE_CHK_BOOL_EXEC((in_anchor != nullptr), continue, "GetPeerInDataAnchors is nullptr"); + auto node = in_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + } + return Node::Vistor(shared_from_this(), vec); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY uint32_t Node::GetOutDataNodesSize() const { + uint32_t out_nums = 0; + for (const auto &out_anchor : out_data_anchors_) { + GE_CHK_BOOL_EXEC((out_anchor != nullptr), continue, "out_data_anchors_ is nullptr"); + out_nums += out_anchor->GetPeerInDataNodesSize(); + } + return out_nums; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetOutControlNodes() const { + std::vector vec; + + for (const auto &out_anchor : out_data_anchors_) { + GE_CHK_BOOL_EXEC((out_anchor != nullptr), continue, "out_data_anchors_ is nullptr"); + for (const auto &in_anchor : out_anchor->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC((in_anchor != nullptr), continue, "GetPeerInControlAnchors is nullptr"); + auto node = in_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + } + + if (out_control_anchor_ != nullptr) { + for (const auto &in_anchor : out_control_anchor_->GetPeerAnchors()) { + GE_CHK_BOOL_EXEC(in_anchor != nullptr, continue, "GetPeerInControlAnchors is nullptr"); + auto node = in_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + } + + return Node::Vistor(shared_from_this(), vec); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor Node::GetOutAllNodes() const { + std::vector vec; + for (const auto &out_anchor : out_data_anchors_) { + GE_CHK_BOOL_EXEC((out_anchor != nullptr), { continue; }, "out_data_anchors_ is nullptr"); + for (const auto &in_anchor : out_anchor->GetPeerInDataAnchors()) { + GE_CHK_BOOL_EXEC((in_anchor != nullptr), { continue; }, "GetPeerInDataAnchors is nullptr"); + auto node = in_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + for (const auto &in_anchor : out_anchor->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC(in_anchor != nullptr, continue, "GetPeerInControlAnchors is nullptr"); + auto node = in_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + } + + if (out_control_anchor_ != nullptr) { + for (const auto &in_anchor : out_control_anchor_->GetPeerAnchors()) { + GE_CHK_BOOL_EXEC(in_anchor != nullptr, continue, "GetPeerInControlAnchors is nullptr"); + auto node = in_anchor->GetOwnerNode(); + GE_CHK_BOOL_EXEC(node != nullptr, continue, "GetOwnerNode is nullptr"); + vec.push_back(node); + } + } + return Node::Vistor(shared_from_this(), vec); +} + +graphStatus Node::InferShapeAndType() const { + Operator op = ge::OpDescUtils::CreateOperatorFromNode(shared_from_this()); + graphStatus ret = ShapeRefiner::InferShapeAndType(shared_from_this(), op); + return ret; +} + +graphStatus Node::InferOriginFormat() const { + Operator op = ge::OpDescUtils::CreateOperatorFromNode(shared_from_this()); + // Get infer func and execute + GE_CHK_BOOL_EXEC(op_ != nullptr, return GRAPH_FAILED, "original OpDesc is nullptr"); + return op_->CallInferFormatFunc(op); +} +graphStatus Node::Verify() const { + const string data_type = "Data"; + const string aipp_data_type = "AippData"; + const string const_type = "Const"; + const string variable_type = "Variable"; + GE_CHK_BOOL_EXEC(op_ != nullptr, return GRAPH_FAILED, "original OpDesc is nullptr"); + + for (const auto &in_anchor_ptr : GetAllInDataAnchors()) { + if (in_anchor_ptr == nullptr) { + GELOGW("in anchor ptr is null"); + continue; + } + GE_CHK_BOOL_RET_STATUS( + op_->GetType() == data_type || op_->GetType() == aipp_data_type || op_->GetType() == const_type || + op_->GetType() == variable_type || op_->IsOptionalInput(in_anchor_ptr->GetIdx()) || + in_anchor_ptr->GetPeerAnchors().size() > 0, + GRAPH_FAILED, "operator %s's input %d is not linked.", GetName().c_str(), in_anchor_ptr->GetIdx()); + } + + string frameworkop_type = "FrameworkOp"; + if (op_->GetType() != frameworkop_type) { + auto node_op = ge::OperatorFactoryImpl::CreateOperator("node_op", op_->GetType()); + if (node_op.IsEmpty()) { + GELOGW("get op from OperatorFactory fail. opType: %s", op_->GetType().c_str()); + } else { + GELOGD("get op from OperatorFactory success. opType: %s", op_->GetType().c_str()); + auto temp_op_desc = ge::OpDescUtils::GetOpDescFromOperator(node_op); + if (temp_op_desc == nullptr) { + GELOGE(GRAPH_FAILED, "temp op desc is null"); + return GRAPH_FAILED; + } + if (!op_->UpdateInputName(temp_op_desc->GetAllInputName())) { + GELOGW("Verify UpdateInputName failed"); + } + if (!op_->UpdateOutputName(temp_op_desc->GetAllOutputName())) { + GELOGW("Verify UpdateOutputName failed"); + } + } + } + + if (op_->CommonVerify() == GRAPH_SUCCESS) { + Operator op_proxy = ge::OpDescUtils::CreateOperatorFromNode(shared_from_this()); + auto verify_func = op_->GetVerifyFunc(); + if (verify_func == nullptr) { + verify_func = OperatorFactoryImpl::GetVerifyFunc(GetType()); + } + if (verify_func != nullptr) { + return (graphStatus)verify_func(op_proxy); + } + return GRAPH_SUCCESS; + } else { + GELOGE(GRAPH_FAILED, "%s Verify failed.", op_->GetType().c_str()); + return GRAPH_FAILED; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDescPtr Node::GetOpDesc() const { return op_; } + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus Node::UpdateOpDesc(const OpDescPtr &op_desc) { + GE_CHK_BOOL_EXEC(op_ != nullptr, return GRAPH_FAILED, "original OpDesc is nullptr"); + GE_CHK_BOOL_EXEC(op_desc != nullptr, return GRAPH_PARAM_INVALID, "Param OpDesc is nullptr"); + GE_CHK_BOOL_EXEC(op_->GetInputsSize() == op_desc->GetInputsSize(), return GRAPH_PARAM_INVALID, + "Inputs count expected to be same, orginial OpDesc %zu, Param OpDesc %zu", op_->GetInputsSize(), + op_desc->GetInputsSize()); + GE_CHK_BOOL_EXEC(op_->GetOutputsSize() == op_desc->GetOutputsSize(), return GRAPH_PARAM_INVALID, + "Outputs count expected to be same, orginial OpDesc %zu, Param OpDesc %zu", op_->GetOutputsSize(), + op_desc->GetOutputsSize()); + op_ = op_desc; + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor> +Node::GetInDataNodesAndAnchors() const { + std::vector> vec; + for (const auto &p : in_data_anchors_) { + if (p == nullptr) { + GELOGW("indata anchor is nullptr, node %s:%s", GetType().c_str(), GetName().c_str()); + continue; + } + auto anchor_ptr = p->GetPeerOutAnchor(); + if (anchor_ptr == nullptr) { + continue; + } + auto node = anchor_ptr->GetOwnerNode(); + if (node == nullptr) { + GELOGW("src node is nullptr, node %s:%s", GetType().c_str(), GetName().c_str()); + continue; + } + vec.push_back(std::make_pair(node, anchor_ptr)); + } + return Node::Vistor>(shared_from_this(), vec); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Node::Vistor> +Node::GetOutDataNodesAndAnchors() const { + std::vector> vec; + for (const auto &p : out_data_anchors_) { + if (p == nullptr) { + GELOGW("out data anchor is nullptr, node %s:%s", GetType().c_str(), GetName().c_str()); + continue; + } + for (const auto &in_anchor : p->GetPeerInDataAnchors()) { + if (in_anchor == nullptr) { + GELOGW("dst in data anchor is nullptr, node %s:%s", GetType().c_str(), GetName().c_str()); + continue; + } + auto node = in_anchor->GetOwnerNode(); + if (node == nullptr) { + GELOGW("dst node is nullptr, node %s:%s", GetType().c_str(), GetName().c_str()); + continue; + } + vec.push_back(std::make_pair(node, in_anchor)); + } + } + return Node::Vistor>(shared_from_this(), vec); +} +} // namespace ge diff --git a/src/common/graph/op_desc.cc b/src/common/graph/op_desc.cc new file mode 100644 index 00000000..30bbb8fc --- /dev/null +++ b/src/common/graph/op_desc.cc @@ -0,0 +1,1109 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/op_desc.h" + +#include "debug/ge_attr_define.h" +#include "debug/ge_util.h" +#include "external/graph/operator.h" +#include "framework/common/debug/ge_log.h" +#include "graph/ge_attr_value.h" +#include "graph/ge_tensor.h" +#include "graph/operator_factory_impl.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/ge_ir_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "proto/ge_ir.pb.h" + +using std::make_pair; +using std::shared_ptr; +using std::string; +using std::vector; + +namespace ge { +const std::string ATTR_NAME_ID = "id"; + +const std::string ATTR_NAME_STREAM_ID = "stream_id"; + +const std::string ATTR_NAME_INPUT_NAME = "input_name"; + +const std::string ATTR_NAME_SRC_NAME = "src_name"; + +const std::string ATTR_NAME_SRC_INDEX = "src_index"; + +const std::string ATTR_NAME_INPUT = "input"; + +const std::string ATTR_NAME_OUTPUT = "output"; + +const std::string ATTR_NAME_INPUT_DESC = "input_desc"; + +const std::string ATTR_NAME_OUTPUT_DESC = "output_desc"; + +const std::string ATTR_NAME_DST_NAME = "dst_name"; + +const std::string ATTR_NAME_DST_INDEX = "dst_index"; + +const std::string ATTR_NAME_WORKSPACE = "workspace"; + +const std::string ATTR_NAME_WORKSPACE_BYTES = "workspace_bytes"; + +const std::string ATTR_NAME_IS_INPUT_CONST = "is_input_const"; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDesc::OpDesc() { + op_def_.InitDefault(); + if (op_def_.GetProtoMsg() != nullptr) { + op_def_.GetProtoMsg()->set_has_out_attr(true); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDesc::~OpDesc() {} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDesc::OpDesc(const std::string &name, const std::string &type) { + op_def_.InitDefault(); + if (op_def_.GetProtoMsg() != nullptr) { + op_def_.GetProtoMsg()->set_has_out_attr(true); + } + SetName(name); + SetType(type); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDesc::OpDesc(const ProtoMsgOwner &proto_msg_owner, + ge::proto::OpDef *op_def) + : op_def_(proto_msg_owner, op_def) { + if (op_def != nullptr && !op_def->has_out_attr()) { + op_def->set_has_out_attr(true); + + int64_t id = 0; + (void)AttrUtils::GetInt(this, ATTR_NAME_ID, id); + op_def->set_id(id); + + int64_t stream_id = 0; + (void)AttrUtils::GetInt(this, ATTR_NAME_STREAM_ID, stream_id); + op_def->set_stream_id(stream_id); + + vector input_name; + (void)AttrUtils::GetListStr(this, ATTR_NAME_INPUT_NAME, input_name); + for (auto &item : input_name) { + op_def->add_input_name(item); + } + vector src_name; + (void)AttrUtils::GetListStr(this, ATTR_NAME_SRC_NAME, src_name); + for (auto &item : src_name) { + op_def->add_src_name(item); + } + vector src_index; + (void)AttrUtils::GetListInt(this, ATTR_NAME_SRC_INDEX, src_index); + for (auto &item : src_index) { + op_def->add_src_index(item); + } + vector input; + (void)AttrUtils::GetListInt(this, ATTR_NAME_INPUT, input); + for (auto &item : input) { + op_def->add_input_i(item); + } + vector output; + (void)AttrUtils::GetListInt(this, ATTR_NAME_OUTPUT, output); + for (auto &item : output) { + op_def->add_output_i(item); + } + vector dst_name; + (void)AttrUtils::GetListStr(this, ATTR_NAME_DST_NAME, dst_name); + for (auto &item : dst_name) { + op_def->add_dst_name(item); + } + vector dst_index; + (void)AttrUtils::GetListInt(this, ATTR_NAME_DST_INDEX, dst_index); + for (auto &item : dst_index) { + op_def->add_dst_index(item); + } + vector workspace; + (void)AttrUtils::GetListInt(this, ATTR_NAME_WORKSPACE, workspace); + for (auto &item : workspace) { + op_def->add_workspace(item); + } + vector workspace_bytes; + (void)AttrUtils::GetListInt(this, ATTR_NAME_WORKSPACE_BYTES, workspace_bytes); + for (auto &item : workspace_bytes) { + op_def->add_workspace_bytes(item); + } + vector is_input_const; + (void)AttrUtils::GetListBool(this, ATTR_NAME_IS_INPUT_CONST, is_input_const); + for (auto item : is_input_const) { + op_def->add_is_input_const(item); + } + auto input_desc_mutable_list = (*op_def->mutable_attr())[ATTR_NAME_INPUT_DESC].mutable_list(); + if (input_desc_mutable_list != nullptr) { + *op_def->mutable_input_desc() = *(input_desc_mutable_list->mutable_td()); + } + auto output_desc_mutable_list = (*op_def->mutable_attr())[ATTR_NAME_OUTPUT_DESC].mutable_list(); + if (output_desc_mutable_list != nullptr) { + *op_def->mutable_output_desc() = *(output_desc_mutable_list->mutable_td()); + } + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY string OpDesc::GetName() const { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + return proto_msg->name(); + } + return ""; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetName(const std::string &name) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->set_name(name); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY string OpDesc::GetType() const { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + return proto_msg->type(); + } + return ""; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetType(const string &type) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->set_type(type); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus OpDesc::AddInputDesc(const ge::GeTensorDesc &input_desc) { + int index = static_cast(inputs_desc_.size()); + return AddInputDesc("__input" + std::to_string(index), input_desc); +} + +graphStatus OpDesc::AddInputDesc(uint32_t index, const ge::GeTensorDesc &input_desc) { + graphStatus ret = GRAPH_SUCCESS; + if (index < inputs_desc_.size()) { + // InputsDesc[index] is exist, then update it + ret = UpdateInputDesc(index, input_desc); + } else { + // InputDesc[index] is not exist, then add it + ret = AddInputDesc(input_desc); + } + return ret; +} + +graphStatus OpDesc::AddInputDesc(const string &name, const ge::GeTensorDesc &input_desc) { + if (input_name_idx_.find(name) != input_name_idx_.end()) { + GELOGI("input %s is exist, update it", name.c_str()); + graphStatus ret = UpdateInputDesc(name, input_desc); + return ret; + } else { + int index = static_cast(inputs_desc_.size()); + std::shared_ptr in_desc = ComGraphMakeShared(input_desc); + if (in_desc == nullptr) { + GELOGE(GRAPH_FAILED, "AddInputDesc failed, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + inputs_desc_.push_back(in_desc); + (void)input_name_idx_.insert(make_pair(name, index)); + return GRAPH_SUCCESS; + } +} + +graphStatus OpDesc::AddInputDescForward(const string &name, const unsigned int num) { + for (unsigned int i = 0; i < num; i++) { + string input_name = name + std::to_string(i); + GE_CHK_BOOL_RET_STATUS((input_name_idx_.find(input_name) == input_name_idx_.end()), GRAPH_FAILED, + "Add input tensor_desc is existed. name[%s]", input_name.c_str()); + + std::shared_ptr in_desc = ComGraphMakeShared(GeTensorDesc()); + if (in_desc == nullptr) { + GELOGE(GRAPH_FAILED, "AddInputDescForward failed, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + + (void)inputs_desc_.insert(inputs_desc_.begin(), in_desc); + + // Update index in input_name_idx + for (auto it = input_name_idx_.begin(); it != input_name_idx_.end(); ++it) { + it->second += 1; + } + + (void)input_name_idx_.insert(make_pair(input_name, 0)); + } + + return GRAPH_SUCCESS; +} + +graphStatus OpDesc::AddOutputDescForward(const string &name, const unsigned int num) { + for (unsigned int i = 0; i < num; i++) { + string output_name = name + std::to_string(i); + GE_CHK_BOOL_RET_STATUS((output_name_idx_.find(output_name) == output_name_idx_.end()), GRAPH_FAILED, + "Add output tensor_desc is existed. name[%s]", output_name.c_str()); + + std::shared_ptr in_desc = ComGraphMakeShared(GeTensorDesc()); + if (in_desc == nullptr) { + GELOGE(GRAPH_FAILED, "AddOutputDescForward failed, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + + (void)outputs_desc_.insert(outputs_desc_.begin(), in_desc); + + // Update index in output_name_idx + for (auto it = output_name_idx_.begin(); it != output_name_idx_.end(); ++it) { + it->second += 1; + } + (void)output_name_idx_.insert(make_pair(output_name, 0)); + } + + return GRAPH_SUCCESS; +} + +graphStatus OpDesc::AddOptionalInputDesc(const string &name, const ge::GeTensorDesc &input_desc) { + if (OpDesc::AddInputDesc(name, input_desc) == GRAPH_FAILED) return GRAPH_FAILED; + (void)optional_input_names_.insert(name); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +OpDesc::UpdateInputDesc(uint32_t index, const ge::GeTensorDesc &tensor_Desc) { + GE_CHK_BOOL_RET_STATUS((index < inputs_desc_.size()), GRAPH_FAILED, "The index is invalid. index[%u]", index); + + inputs_desc_[index] = ComGraphMakeShared(tensor_Desc); + if (inputs_desc_[index] == nullptr) { + GELOGE(GRAPH_FAILED, "UpdateInputDesc failed, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool OpDesc::OpDescMembersAreEqual(const OpDesc &r_op_desc) const { + return (IsEqual(this->input_name_idx_, r_op_desc.input_name_idx_, "OpDesc.input_name_idx_") && + IsEqual(this->output_name_idx_, r_op_desc.output_name_idx_, "OpDesc.output_name_idx_") && + IsEqual(this->optional_input_names_, r_op_desc.optional_input_names_, "OpDesc.optional_input_names_") && + IsEqual(this->engine_name_, r_op_desc.engine_name_, "OpDesc.engine_name_") && + IsEqual(this->op_kernel_lib_name_, r_op_desc.op_kernel_lib_name_, "OpDesc.op_kernel_lib_name_")); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool OpDesc::OpDescAttrsAreEqual(const OpDesc &r_op_desc) const { + const auto &op_def = this->op_def_.GetProtoMsg(); + const auto &r_op_def = r_op_desc.op_def_.GetProtoMsg(); + if ((op_def != nullptr) && (r_op_def != nullptr)) { + // Message OpDef in ge_ir.proto + return ( + IsEqual(op_def->name(), r_op_def->name(), "OpDef_.name()") && + IsEqual(op_def->type(), r_op_def->type(), "OpDef_.type()") && + IsEqual(ToString(op_def->input()), ToString(r_op_def->input()), "OpDef_.input()") && + IsEqual(op_def->has_out_attr(), r_op_def->has_out_attr(), "OpDef_.has_out_attr()") && + IsEqual(op_def->stream_id(), r_op_def->stream_id(), "OpDef_.stream_id()") && + IsEqual(ToString(op_def->input_name()), ToString(r_op_def->input_name()), "OpDef_.input_name()") && + IsEqual(ToString(op_def->src_name()), ToString(r_op_def->src_name()), "OpDef_.src_name()") && + IsEqual(ToString(op_def->dst_name()), ToString(r_op_def->dst_name()), "OpDef_.dst_name()") && + IsEqual(ToString(op_def->src_index()), ToString(r_op_def->src_index()), "OpDef_.src_index()") && + IsEqual(ToString(op_def->dst_index()), ToString(r_op_def->dst_index()), "OpDef_.dst_index()") && + IsEqual(ToString(op_def->input_i()), ToString(r_op_def->input_i()), "OpDef_.input_i()") && + IsEqual(ToString(op_def->output_i()), ToString(r_op_def->output_i()), "OpDef_.output_i()") && + IsEqual(ToString(op_def->workspace()), ToString(r_op_def->workspace()), "OpDef_.workspace()") && + IsEqual(ToString(op_def->workspace_bytes()), ToString(r_op_def->workspace_bytes()), + "OpDef_.workspace_bytes()") && + IsEqual(ToString(op_def->is_input_const()), ToString(r_op_def->is_input_const()), "OpDef_.is_input_const()")); + } else { + return ((op_def == nullptr) && (r_op_def == nullptr)); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool OpDesc::OpDescGenTensorDescsAreEqual( + const OpDesc &r_op_desc) const { + // 1.Verify inputs and outputs desc size + const auto inputs_desc_size = this->inputs_desc_.size(); + const auto r_inputs_desc_size = r_op_desc.inputs_desc_.size(); + if (inputs_desc_size != r_inputs_desc_size) { + GELOGE(GRAPH_FAILED, "Size of OpDesc's inputs desc verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + const auto outputs_desc_size = this->outputs_desc_.size(); + const auto r_outputs_desc_size = r_op_desc.outputs_desc_.size(); + if (outputs_desc_size != r_outputs_desc_size) { + GELOGE(GRAPH_FAILED, "Size of OpDesc's outputs desc verify failed, node name: %s.", this->GetName().c_str()); + return false; + } + // 2.Verify all inputs desc equal + for (uint32_t i = 0; i < inputs_desc_size; i++) { + const auto &in_ge_tensor_desc = this->GetInputDesc(i); + const auto &r_in_ge_tensor_desc = r_op_desc.GetInputDesc(i); + // Determine the connection relationship by GeTensorDesc + if (!(in_ge_tensor_desc == r_in_ge_tensor_desc)) { + GELOGE(GRAPH_FAILED, "Link info of OpDesc's inputs desc verify failed, OpDesc name: %s.", + this->GetName().c_str()); + return false; + } + } + // 3.Verify all outputs desc equal + for (uint32_t i = 0; i < outputs_desc_size; i++) { + const auto &out_ge_tensor_desc = this->GetOutputDesc(i); + const auto &r_out_ge_tensor_desc = r_op_desc.GetOutputDesc(i); + if (!(out_ge_tensor_desc == r_out_ge_tensor_desc)) { + GELOGE(GRAPH_FAILED, "Link info of OpDesc's outputs desc verify failed, OpDesc name: %s.", + this->GetName().c_str()); + return false; + } + } + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool OpDesc::operator==(const OpDesc &r_op_desc) const { + return (OpDescAttrsAreEqual(r_op_desc) && OpDescMembersAreEqual(r_op_desc) && + OpDescGenTensorDescsAreEqual(r_op_desc)); +} + +graphStatus OpDesc::UpdateInputDesc(const string &name, const ge::GeTensorDesc &tensor_Desc) { + auto it = input_name_idx_.find(name); + if (it == input_name_idx_.end()) { + GELOGW("Cann't find the input desc. name[%s]", name.c_str()); + return GRAPH_FAILED; + } + if (it->second >= inputs_desc_.size()) { + GELOGE(GRAPH_FAILED, "[%d] more than size of inputs_desc_", it->second); + return GRAPH_FAILED; + } + GE_IF_BOOL_EXEC(it->second >= inputs_desc_.size(), GELOGE(GRAPH_FAILED, "it->second is invalid."); + return GRAPH_FAILED); + inputs_desc_[it->second] = ComGraphMakeShared(tensor_Desc); + if (inputs_desc_[it->second] == nullptr) { + GELOGE(GRAPH_FAILED, "UpdateInputDesc failed, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + + return GRAPH_SUCCESS; +} + +bool OpDesc::InputIsSet(const string &name) const { + auto it = input_name_idx_.find(name); + if (it != input_name_idx_.end()) { + GE_IF_BOOL_EXEC(it->second >= inputs_desc_.size(), GELOGE(GRAPH_FAILED, "it->second is invalid."); return false); + auto tensor_desc = inputs_desc_[it->second]; + GE_IF_BOOL_EXEC(tensor_desc == nullptr, GELOGE(GRAPH_FAILED, "tensor_desc is null."); return false); + auto dims = tensor_desc->GetShape().GetDims(); + if (dims.size() > 0) { + return true; + } + } + return false; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeTensorDesc OpDesc::GetInputDesc(uint32_t index) const { + GE_CHK_BOOL_RET_STATUS_NOLOG(index < inputs_desc_.size(), GeTensorDesc()); + return *(inputs_desc_[index].get()); +} + +GeTensorDesc OpDesc::GetInputDesc(const string &name) const { + auto it = input_name_idx_.find(name); + GE_CHK_BOOL_RET_STATUS_NOLOG(it != input_name_idx_.end(), GeTensorDesc()); + GE_CHK_BOOL_RET_STATUS_NOLOG(it->second < inputs_desc_.size(), GeTensorDesc()); + return *(inputs_desc_[it->second].get()); +} + +GE_FUNC_HOST_VISIBILITY OpDesc::Vistor OpDesc::GetAllInputNames() const { + vector names; + if (input_name_idx_.empty()) { + return OpDesc::Vistor(shared_from_this(), names); + } + for (std::pair input : input_name_idx_) { + names.push_back(input.first); + } + return OpDesc::Vistor(shared_from_this(), names); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetOpKernelLibName(const std::string &name) { + op_kernel_lib_name_ = name; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY std::string OpDesc::GetOpKernelLibName() const { + return op_kernel_lib_name_; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetOpEngineName(const std::string &name) { + engine_name_ = name; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY std::string OpDesc::GetOpEngineName() const { return engine_name_; } + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeTensorDescPtr OpDesc::MutableInputDesc(uint32_t index) const { + GE_CHK_BOOL_RET_STATUS(index < inputs_desc_.size(), nullptr, "Can't find the input desc %u", index); + if (inputs_desc_[index] == nullptr) { + return nullptr; + } + GE_CHK_BOOL_RET_STATUS(inputs_desc_[index]->IsValid() == GRAPH_SUCCESS, nullptr, "input desc is invalid"); + return inputs_desc_[index]; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDesc::Vistor OpDesc::GetAllInputsDesc() const { + vector temp{}; + for (const auto &it : inputs_desc_) { + if (it->IsValid() == GRAPH_SUCCESS) { + temp.push_back(*it); + } else { + GELOGW("this inputDesc is InValid, it won't be return"); + continue; + } + } + return OpDesc::Vistor(shared_from_this(), temp); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDesc::Vistor OpDesc::GetAllInputsDescPtr() const { + vector temp{}; + for (const auto &it : inputs_desc_) { + if (it->IsValid() == GRAPH_SUCCESS) { + temp.push_back(it); + } else { + GELOGW("this inputDesc is InValid, it won't be return"); + continue; + } + } + return OpDesc::Vistor(shared_from_this(), temp); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY size_t OpDesc::GetInputsSize() const { + // Just return valid inputs size.InValid desc is set in default OPTION_INPUT register. + size_t size = 0; + for (const auto &it : inputs_desc_) { + if (it->IsValid() == GRAPH_SUCCESS) { + size++; + } + } + return size; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus OpDesc::AddOutputDesc(const ge::GeTensorDesc &output_desc) { + int index = static_cast(outputs_desc_.size()); + return AddOutputDesc("__output" + std::to_string(index), output_desc); +} + +graphStatus OpDesc::AddOutputDesc(const string &name, const ge::GeTensorDesc &output_desc) { + GE_CHK_BOOL_RET_STATUS((output_name_idx_.find(name) == output_name_idx_.end()), GRAPH_FAILED, + "Add output tensor_Desc is existed. name[%s]", name.c_str()); + int index = static_cast(outputs_desc_.size()); + + std::shared_ptr tensor = ComGraphMakeShared(output_desc); + if (tensor == nullptr) { + GELOGE(GRAPH_FAILED, "AddOutputDesc failed, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + outputs_desc_.push_back(tensor); + (void)output_name_idx_.insert(make_pair(name, index)); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +OpDesc::UpdateOutputDesc(uint32_t index, const ge::GeTensorDesc &tensor_Desc) { + GE_CHK_BOOL_RET_STATUS((index < outputs_desc_.size()), GRAPH_FAILED, "The index is invalid. index[%u]", index); + + outputs_desc_[index] = ComGraphMakeShared(tensor_Desc); + if (outputs_desc_[index] == nullptr) { + GELOGE(GRAPH_FAILED, "UpdateOutputDesc failed, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +graphStatus OpDesc::UpdateOutputDesc(const string &name, const ge::GeTensorDesc &tensor_Desc) { + auto it = output_name_idx_.find(name); + if (it == output_name_idx_.end()) { + GELOGW("Cann't find the output desc. name[%s]", name.c_str()); + return GRAPH_FAILED; + } + GE_IF_BOOL_EXEC(it->second >= outputs_desc_.size(), GELOGE(GRAPH_FAILED, "it->second is invalid."); + return GRAPH_FAILED); + outputs_desc_[it->second] = ComGraphMakeShared(tensor_Desc); + if (outputs_desc_[it->second] == nullptr) { + GELOGE(GRAPH_FAILED, "UpdateOutputDesc failed, malloc shared_ptr failed."); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeTensorDesc OpDesc::GetOutputDesc(uint32_t index) const { + GE_CHK_BOOL_RET_STATUS_NOLOG(index < outputs_desc_.size(), GeTensorDesc()); + return *(outputs_desc_[index].get()); +} + +GeTensorDesc OpDesc::GetOutputDesc(const string &name) const { + auto it = output_name_idx_.find(name); + GE_CHK_BOOL_RET_STATUS_NOLOG(it != output_name_idx_.end(), GeTensorDesc()); + GE_CHK_BOOL_RET_STATUS_NOLOG(it->second < outputs_desc_.size(), GeTensorDesc()); + return *(outputs_desc_[it->second].get()); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeTensorDescPtr OpDesc::MutableOutputDesc(uint32_t index) const { + GE_CHK_BOOL_RET_STATUS(index < outputs_desc_.size(), nullptr, "Cann't find the output desc %u", index); + return outputs_desc_[index]; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDesc::Vistor OpDesc::GetAllOutputsDesc() const { + vector temp{}; + for (const auto &it : outputs_desc_) { + temp.push_back(*it); + } + return OpDesc::Vistor(shared_from_this(), temp); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDesc::Vistor OpDesc::GetAllOutputsDescPtr() const { + return OpDesc::Vistor(shared_from_this(), outputs_desc_); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY size_t OpDesc::GetOutputsSize() const { return outputs_desc_.size(); } + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY ConstGeTensorDescPtr OpDesc::GetOutputDescPtr(uint32_t index) const { + GE_CHK_BOOL_RET_STATUS_NOLOG((index) < static_cast(outputs_desc_.size()), nullptr); + return outputs_desc_[index]; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY ConstGeTensorDescPtr OpDesc::GetInputDescPtr(uint32_t index) const { + GE_CHK_BOOL_RET_STATUS_NOLOG((index) < static_cast(inputs_desc_.size()), nullptr); + if (inputs_desc_[index] == nullptr) { + return nullptr; + } + if (inputs_desc_[index]->IsValid() != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "inputsDesc[%u] is InValid", index); + return nullptr; + } else { + return inputs_desc_[static_cast(index)]; + } +} + +graphStatus OpDesc::AddDynamicInputDesc(const string &name, const unsigned int num, bool is_push_back) { + if (is_push_back) { + for (unsigned int i = 0; i < num; i++) { + if (AddInputDesc(name + std::to_string(i), GeTensorDesc()) != GRAPH_SUCCESS) return GRAPH_FAILED; + } + } else { + if (AddInputDescForward(name, num) != GRAPH_SUCCESS) return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +graphStatus OpDesc::AddDynamicOutputDesc(const string &name, const unsigned int num, bool is_push_back) { + if (is_push_back) { + for (unsigned int i = 0; i < num; i++) { + if (AddOutputDesc(name + std::to_string(i), GeTensorDesc()) != GRAPH_SUCCESS) return GRAPH_FAILED; + } + } else { + if (AddOutputDescForward(name, num) != GRAPH_SUCCESS) return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +bool OpDesc::IsOptionalInput(const string &name) const { + return optional_input_names_.find(name) != optional_input_names_.end(); +} + +bool OpDesc::IsOptionalInput(uint32_t index) const { return IsOptionalInput(GetInputNameByIndex(index)); } + +std::map OpDesc::GetAllInputName() { return input_name_idx_; } + +std::map OpDesc::GetAllOutputName() { return output_name_idx_; } + +bool OpDesc::UpdateInputName(std::map input_name_idx) { + bool ret = true; + // Use inputDesc_.size() to contain the InValid OptionInput.GetInputsSize() will remove default OptionInput name. + auto input_map_size = inputs_desc_.size(); + auto factory_map_size = input_name_idx.size(); + // It indicates that some inputs have no optionalname. + // The redundant optionalname of factory needs to be deleted and then assigned + if (input_map_size < factory_map_size) { + GELOGI("UpdateInputName org inputname map size: %zu, factory inputname map size: %zu", input_map_size, + factory_map_size); + for (auto it = input_name_idx.begin(); it != input_name_idx.end();) { + if (it->second >= input_map_size) { + it = input_name_idx.erase(it); + } else { + ++it; + } + } + if (input_name_idx.size() == input_map_size) { + GELOGI("UpdateInputName"); + input_name_idx_ = input_name_idx; + } else { + ret = false; + GELOGW("after UpdateInputName factoryName map size : %zu", input_name_idx.size()); + } + } else if (input_map_size == factory_map_size) { + input_name_idx_ = input_name_idx; + } else { + ret = false; + GELOGW("org inputname map size: %zu, factory inputname map size: %zu", input_map_size, factory_map_size); + } + return ret; +} + +bool OpDesc::UpdateOutputName(std::map output_name_idx) { + size_t output_map_size = GetAllOutputsDesc().size(); + size_t factory_map_size = output_name_idx.size(); + if (output_map_size < factory_map_size) { + GELOGI("UpdateOutputName org outputname map size: %zu, factory outputname map size: %zu", output_map_size, + factory_map_size); + for (auto it = output_name_idx.begin(); it != output_name_idx.end();) { + if (it->second >= output_map_size) { + it = output_name_idx.erase(it); + } else { + ++it; + } + } + if (output_name_idx.size() == output_map_size) { + GELOGI("UpdateoutputName"); + output_name_idx_ = output_name_idx; + return true; + } + } else if (output_map_size == factory_map_size) { + output_name_idx_ = output_name_idx; + return true; + } else { + GELOGW("UpdateOutputName org name map size: %zu, factory map size: %zu", output_map_size, factory_map_size); + return false; + } + GELOGW("UpdateOutputName org name map size: %zu, factory map size: %zu", output_map_size, factory_map_size); + return false; +} + +std::function OpDesc::GetInferFunc() const { return infer_func_; } + +std::function OpDesc::GetVerifyFunc() const { return verifier_func_; } + +void OpDesc::AddInferFunc(const std::function &func) { infer_func_ = func; } + +std::function OpDesc::GetInferFormatFunc() const { return infer_format_func_; } + +void OpDesc::AddInferFormatFunc(const std::function &func) { infer_format_func_ = func; } + +void OpDesc::AddVerifierFunc(const std::function &func) { verifier_func_ = func; } + +graphStatus OpDesc::InferShapeAndType() { + if (infer_func_ == nullptr) { + infer_func_ = OperatorFactoryImpl::GetInferShapeFunc(GetType()); + if (infer_func_ == nullptr) { + GELOGW("%s does not have inferfunc_.", GetName().c_str()); + /// The infoshape function has not been added for each operator in the current operator information library. + /// No infoshape added operator skips the call + /// and directly uses the shape information passed down by the upper framework + return GRAPH_SUCCESS; + } + } + Operator op_proxy = ge::OpDescUtils::CreateOperatorFromOpDesc(shared_from_this()); + return (graphStatus)infer_func_(op_proxy); +} + +graphStatus OpDesc::DefaultInferFormat() { + ge::Format first_none_nd_format = FORMAT_ND; + auto input_descs = GetAllInputsDescPtr(); + auto output_descs = GetAllOutputsDescPtr(); + // Overall input and output,get the first non-nd format + for (const auto &input_desc : input_descs) { + Format origin_format = input_desc->GetOriginFormat(); + if (origin_format != FORMAT_ND) { + first_none_nd_format = origin_format; + break; + } + } + for (const auto &output_desc : output_descs) { + Format origin_format = output_desc->GetOriginFormat(); + if (origin_format != FORMAT_ND) { + first_none_nd_format = origin_format; + break; + } + } + // Refresh all input output format + GELOGD("Default infer format.node[%s], first none nod format is:%d", GetName().c_str(), first_none_nd_format); + + for (const auto &input_desc : input_descs) { + Format origin_format = input_desc->GetOriginFormat(); + GELOGD("Default infer format[in].node[%s].origin format is:%d", GetName().c_str(), origin_format); + if (origin_format == FORMAT_ND) { + input_desc->SetOriginFormat(first_none_nd_format); + input_desc->SetFormat(first_none_nd_format); + } + } + for (const auto &output_desc : output_descs) { + Format origin_format = output_desc->GetOriginFormat(); + GELOGD("Default infer format[out].node[%s].origin format is:%d", GetName().c_str(), origin_format); + if (origin_format == FORMAT_ND) { + output_desc->SetOriginFormat(first_none_nd_format); + output_desc->SetFormat(first_none_nd_format); + } + } + return GRAPH_SUCCESS; +} + +graphStatus OpDesc::OpVerify() { + Operator op_proxy = ge::OpDescUtils::CreateOperatorFromOpDesc(shared_from_this()); + if (verifier_func_ == nullptr) { + verifier_func_ = OperatorFactoryImpl::GetVerifyFunc(GetType()); + } + if (verifier_func_ != nullptr) { + return (graphStatus)verifier_func_(op_proxy); + } + return GRAPH_SUCCESS; +} + +graphStatus OpDesc::CommonVerify() const { + for (string iname : GetAllInputNames()) { + // Checking shape of all inputs + vector ishape = GetInputDesc(iname).GetShape().GetDims(); + for (int64_t dim : ishape) { + GE_CHK_BOOL_RET_STATUS(dim >= -1, GRAPH_FAILED, "operator input %s shape contains negative or zero dimension.", + iname.c_str()); + } + } + // Check all attributes defined + const auto all_attributes = GetAllAttrs(); + for (const auto name : GetAllAttrNames()) { + GE_CHK_BOOL_RET_STATUS(all_attributes.find(name) != all_attributes.end(), GRAPH_FAILED, + "operator attribute %s is empty.", name.c_str()); + } + + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY string OpDesc::GetInputNameByIndex(uint32_t index) const { + auto it = input_name_idx_.begin(); + for (; it != input_name_idx_.end(); ++it) { + if (it->second == index) { + break; + } + } + GE_CHK_BOOL_RET_STATUS_NOLOG(it != input_name_idx_.end(), ""); + return it->first; +} + +int OpDesc::GetInputIndexByName(const string &name) const { + auto it_find = input_name_idx_.find(name); + GE_CHK_BOOL_RET_STATUS_NOLOG(it_find != input_name_idx_.end(), -1); + return static_cast(it_find->second); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY string OpDesc::GetOutputNameByIndex(uint32_t index) const { + auto it = output_name_idx_.begin(); + for (; it != output_name_idx_.end(); ++it) { + if (it->second == index) { + break; + } + } + GE_CHK_BOOL_RET_STATUS_NOLOG(it != output_name_idx_.end(), ""); + return it->first; +} + +int OpDesc::GetOutputIndexByName(const string &name) const { + auto it_find = output_name_idx_.find(name); + GE_CHK_BOOL_RET_STATUS_NOLOG(it_find != output_name_idx_.end(), -1); + return static_cast(it_find->second); +} + +ProtoAttrMapHelper OpDesc::MutableAttrMap() { + if (op_def_.GetProtoMsg() == nullptr) { + GELOGE(GRAPH_FAILED, "op def get proto msg failed"); + return GeIrProtoHelper(); + } + return ProtoAttrMapHelper(op_def_.GetProtoOwner(), op_def_.GetProtoMsg()->mutable_attr()); +} + +ConstProtoAttrMapHelper OpDesc::GetAttrMap() const { + return ConstProtoAttrMapHelper(op_def_.GetProtoOwner(), &op_def_.GetProtoMsg()->attr()); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetId(int64_t id) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->set_id(id); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY int64_t OpDesc::GetId() const { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + return proto_msg->id(); + } + return 0; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetStreamId(int64_t stream_id) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->set_stream_id(stream_id); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY int64_t OpDesc::GetStreamId() const { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + return proto_msg->stream_id(); + } + return 0; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetInputName(const vector &input_name) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->clear_input_name(); + for (auto &item : input_name) { + proto_msg->add_input_name(item); + } + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDesc::GetInputName() const { + vector input_name; + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto &item : proto_msg->input_name()) { + input_name.push_back(item); + } + } + return input_name; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetSrcName(const vector &src_name) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->clear_src_name(); + for (auto &item : src_name) { + proto_msg->add_src_name(item); + } + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDesc::GetSrcName() const { + vector src_name; + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto &item : proto_msg->src_name()) { + src_name.push_back(item); + } + } + return src_name; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetSrcIndex(const vector &src_index) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->clear_src_index(); + for (auto &item : src_index) { + proto_msg->add_src_index(item); + } + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDesc::GetSrcIndex() const { + vector src_index; + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto &item : proto_msg->src_index()) { + src_index.push_back(item); + } + } + return src_index; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetInputOffset(const vector &input) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->clear_input_i(); + for (auto &item : input) { + proto_msg->add_input_i(item); + } + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDesc::GetInputOffset() const { + vector input; + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto &item : proto_msg->input_i()) { + input.push_back(item); + } + } + return input; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetOutputOffset(const vector &output) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->clear_output_i(); + for (auto &item : output) { + proto_msg->add_output_i(item); + } + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDesc::GetOutputOffset() const { + vector output; + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto &item : proto_msg->output_i()) { + output.push_back(item); + } + } + return output; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetDstName(const vector &dst_name) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->clear_dst_name(); + for (auto &item : dst_name) { + proto_msg->add_dst_name(item); + } + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDesc::GetDstName() const { + vector dst_name; + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto &item : proto_msg->dst_name()) { + dst_name.push_back(item); + } + } + return dst_name; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetDstIndex(const vector &dst_index) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->clear_dst_index(); + for (auto &item : dst_index) { + proto_msg->add_dst_index(item); + } + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDesc::GetDstIndex() const { + vector dst_index; + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto &item : proto_msg->dst_index()) { + dst_index.push_back(item); + } + } + return dst_index; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetWorkspace(const vector &workspace) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->clear_workspace(); + for (auto &item : workspace) { + proto_msg->add_workspace(item); + } + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDesc::GetWorkspace() const { + vector workspace; + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto &item : proto_msg->workspace()) { + workspace.push_back(item); + } + } + return workspace; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetWorkspaceBytes(const vector &workspace_bytes) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->clear_workspace_bytes(); + for (auto &item : workspace_bytes) { + proto_msg->add_workspace_bytes(item); + } + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDesc::GetWorkspaceBytes() const { + vector workspace_bytes; + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto &item : proto_msg->workspace_bytes()) { + workspace_bytes.push_back(item); + } + } + return workspace_bytes; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void OpDesc::SetIsInputConst(const vector &is_input_const) { + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + proto_msg->clear_is_input_const(); + for (auto item : is_input_const) { + proto_msg->add_is_input_const(item); + } + } + // If comes from ME,which is_input_const exist as attrs, outside no need to check GE_TRAIN flag + auto ret = AttrUtils::SetListBool(this, ATTR_NAME_IS_INPUT_CONST, is_input_const); + if (ret != true) { + GELOGE(GRAPH_FAILED, "set is_input_const fail."); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDesc::GetIsInputConst() const { + vector is_input_const; + auto proto_msg = op_def_.GetProtoMsg(); + if (proto_msg != nullptr) { + for (auto item : proto_msg->is_input_const()) { + is_input_const.push_back(item); + } + } + return is_input_const; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus OpDesc::RestoreInputNameIdx(const string &name, + const int &index) { + if (input_name_idx_.find(name) != input_name_idx_.end()) { + GELOGI("Restore input name index is existed. name[%s]", name.c_str()); + } + (void)input_name_idx_.insert(make_pair(name, index)); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus OpDesc::RestoreOutputNameIdx(const string &name, + const int &index) { + if (output_name_idx_.find(name) != output_name_idx_.end()) { + GELOGI("Restore output name index is existed. name[%s]", name.c_str()); + } + (void)output_name_idx_.insert(make_pair(name, index)); + return GRAPH_SUCCESS; +} +graphStatus OpDesc::CallInferFunc(Operator &op) { + if (infer_func_ == nullptr) { + infer_func_ = OperatorFactoryImpl::GetInferShapeFunc(GetType()); + if (infer_func_ == nullptr) { + GELOGW("%s does not have infer func.", GetName().c_str()); + return GRAPH_PARAM_INVALID; + } + } + graphStatus graph_status = (graphStatus)infer_func_(op); + if (graph_status != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "%s call infer func. ret: %u", GetName().c_str(), graph_status); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} +graphStatus OpDesc::CallInferFormatFunc(Operator &op) { + if (infer_format_func_ == nullptr) { + infer_format_func_ = OperatorFactoryImpl::GetInferFormatFunc(GetType()); + if (infer_format_func_ == nullptr) { + return DefaultInferFormat(); + } + } + return (graphStatus)infer_format_func_(op); +} +} // namespace ge diff --git a/src/common/graph/op_imp.cc b/src/common/graph/op_imp.cc new file mode 100644 index 00000000..5b21c15f --- /dev/null +++ b/src/common/graph/op_imp.cc @@ -0,0 +1,76 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include "debug/ge_log.h" +#include "debug/ge_util.h" + +namespace ge { +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +BroadCastInfer(const std::function()>& get_in1_shape, + const std::function()>& get_in2_shape, + const std::function& outShape)>& set_out_shape) { + auto x1_shape = get_in1_shape(); + auto x2_shape = get_in2_shape(); + std::vector y_shape; + + if (x1_shape.empty()) { + y_shape = x2_shape; + set_out_shape(y_shape); + return GRAPH_SUCCESS; + } + if (x2_shape.empty()) { + y_shape = x1_shape; + set_out_shape(y_shape); + return GRAPH_SUCCESS; + } + + int len_diff = static_cast(x1_shape.size() - x2_shape.size()); + if (len_diff >= 0) { + for (int i = 0; i < len_diff; i++) { + y_shape.push_back(x1_shape[i]); + } + int x2_shape_size = static_cast(x2_shape.size()); + for (int i = 0; i < x2_shape_size; i++) { + bool shapeFlag = + ((x1_shape[i + len_diff] != x2_shape[i]) && (std::min(x1_shape[i + len_diff], x2_shape[i]) != 1)); + if (shapeFlag) { + GE_LOGE("operands could not be broadcast together"); + return GRAPH_FAILED; + } + y_shape.push_back(std::max(x1_shape[i + len_diff], x2_shape[i])); + } + } else { + for (int i = 0; i < -len_diff; i++) { + y_shape.push_back(x2_shape[i]); + } + int x1_shape_size = static_cast(x1_shape.size()); + for (int i = 0; i < x1_shape_size; i++) { + bool shapeFlag = + ((x1_shape[i] != x2_shape[i - len_diff]) && (std::min(x1_shape[i], x2_shape[i - len_diff]) != 1)); + if (shapeFlag) { + GE_LOGE("operands could not be broadcast together"); + return GRAPH_FAILED; + } + y_shape.push_back(std::max(x1_shape[i], x2_shape[i - len_diff])); + } + } + set_out_shape(y_shape); + return GRAPH_SUCCESS; +} +} // namespace ge diff --git a/src/common/graph/operator.cc b/src/common/graph/operator.cc new file mode 100644 index 00000000..0b98bf1e --- /dev/null +++ b/src/common/graph/operator.cc @@ -0,0 +1,1268 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "external/graph/operator.h" + +#include +#include +#include +#include +#include + +#include "debug/ge_log.h" +#include "debug/ge_op_types.h" +#include "debug/ge_util.h" +#include "external/graph/attr_value.h" +#include "framework/common/debug/ge_log.h" +#include "graph/compute_graph.h" +#include "graph/ge_attr_value.h" +#include "graph/ge_tensor.h" +#include "graph/node.h" +#include "graph/op_desc.h" +#include "graph/operator_factory.h" +#include "utils/graph_utils.h" +#include "utils/op_desc_utils.h" +#include "utils/tensor_adapter.h" +#include "utils/tensor_utils.h" +#include "utils/type_utils.h" + +using std::enable_shared_from_this; +using std::make_pair; +using std::shared_ptr; +using std::string; +using std::to_string; +using std::vector; + +namespace { +const char *const kValue = "value"; +} // namespace + +namespace ge { +class OpIO { + public: + explicit OpIO(const string &name, int index, const OperatorImplPtr &owner) + : name_(name), index_(index), owner_(owner) {} + + ~OpIO() = default; + + string GetName() const { return name_; } + + int GetIndex() const { return index_; } + + OperatorImplPtr GetOwner() const { return owner_; } + + bool operator==(const OpIO &r_value) const { + return (this->name_ == r_value.GetName()) && (this->index_ == r_value.GetIndex()) && + (this->GetOwner() == r_value.GetOwner()); + } + + private: + string name_; + int index_; + std::shared_ptr owner_; +}; +class OperatorImpl : public std::enable_shared_from_this { + friend class GraphBuilderImpl; + friend class OpDescUtils; + + public: + explicit OperatorImpl(const string &name, const string &type) : op_desc_(ComGraphMakeShared(name, type)) { + if (op_desc_ == nullptr) { + GELOGW("OpDesc make shared failed"); + } + } + explicit OperatorImpl(const OpDescPtr &op_desc) : op_desc_(op_desc) {} + explicit OperatorImpl(ge::ConstNodePtr node) : node_(std::move(node)) { + if (node_ != nullptr && node_->GetOpDesc() != nullptr) { + op_desc_ = node_->GetOpDesc(); + } + } + ~OperatorImpl() {} + void SetInputImpl(const string &dst_name, const ge::Operator &src_oprt) { + GE_CHK_BOOL_EXEC(!dst_name.empty(), return, "dst name is empty"); + GE_CHK_BOOL_EXEC(op_desc_ != nullptr, return, "op_desc_ is nullptr."); + GE_CHK_BOOL_EXEC(src_oprt.operator_impl_ != nullptr, return, "operator_impl_ is nullptr."); + GE_CHK_BOOL_EXEC(src_oprt.operator_impl_->op_desc_ != nullptr, return, "op_desc_ is nullptr."); + + auto src_op_impl = src_oprt.GetOperatorImplPtr(); + GE_CHK_BOOL_EXEC(src_op_impl != nullptr, return, "Src impl is null."); + GE_CHK_BOOL_EXEC(src_op_impl->op_desc_ != nullptr, return, "Src impl's opdesc is null."); + GE_CHK_BOOL_EXEC(src_oprt.operator_impl_->op_desc_->GetOutputsSize() == 1, return, + "The source operator[%s] must has one output", + src_oprt.operator_impl_->op_desc_->GetName().c_str()) + + uint32_t src_index = 0; + string src_name = src_op_impl->op_desc_->GetOutputNameByIndex(src_index); + GE_CHK_BOOL_EXEC(!src_name.empty(), return, "Src output's name is empty."); + + OpIO out_handler(src_name, src_index, src_op_impl); + input_link_.insert(std::make_pair(dst_name, out_handler)); + + int dst_index = op_desc_->GetInputIndexByName(dst_name); + GE_CHK_BOOL_EXEC(dst_index >= 0, return, "Find input index by name failed. name[%s], op name:%s", dst_name.c_str(), + op_desc_->GetName().c_str()); + + bool is_const = false; + if (src_oprt.GetOpType() == CONSTANT) { + is_const = true; + } + auto is_input_const = op_desc_->GetIsInputConst(); + for (int i = static_cast(is_input_const.size()); i <= dst_index; ++i) { + is_input_const.push_back(false); + } + is_input_const[dst_index] = is_const; + op_desc_->SetIsInputConst(is_input_const); + + OpIO op_dst(dst_name, dst_index, shared_from_this()); + src_op_impl->UpdateLinkMapImpl(src_name, op_dst); + // Fix for linking opdesc + if (op_desc_->UpdateInputDesc(dst_name, src_op_impl->GetOutputDesc(src_name)) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Update inputdesc failed,dst name is %s, src name is %s", dst_name.c_str(), + src_name.c_str()); + return; + } + } + + void SetInputImpl(const string &dst_name, const ge::OutHandler &out_handler) { + GE_CHK_BOOL_EXEC(!dst_name.empty(), return, "dst name is empty"); + GE_CHK_BOOL_EXEC(out_handler != nullptr, return, "SetInputImpl faild, out_handler is nullptr."); + GE_CHK_BOOL_EXEC(op_desc_ != nullptr, return, "op_desc_ is nullptr."); + input_link_.insert(std::make_pair(dst_name, *out_handler)); + + string src_name = out_handler->GetName(); + int dst_index = op_desc_->GetInputIndexByName(dst_name); + GE_CHK_BOOL_EXEC(dst_index >= 0, return, "Find input index by name failed. name[%s], op name:%s", dst_name.c_str(), + op_desc_->GetName().c_str()); + GE_CHK_BOOL_EXEC(out_handler->GetOwner() != nullptr && out_handler->GetOwner()->GetOpDescImpl() != nullptr, return, + "out_handler invalid. name[%s]", dst_name.c_str()); + bool is_const = false; + if (out_handler->GetOwner()->GetOpDescImpl()->GetType() == CONSTANT) { + is_const = true; + } + auto is_input_const = op_desc_->GetIsInputConst(); + for (int i = static_cast(is_input_const.size()); i <= dst_index; ++i) { + is_input_const.push_back(false); + } + is_input_const[dst_index] = is_const; + op_desc_->SetIsInputConst(is_input_const); + + OpIO in_handler(dst_name, dst_index, shared_from_this()); + auto out_op_impl = out_handler->GetOwner(); + GE_CHK_BOOL_EXEC(out_op_impl != nullptr, return, "Get out_handler's impl failed."); + + out_op_impl->UpdateLinkMapImpl(src_name, in_handler); + GE_CHK_BOOL_EXEC( + op_desc_->UpdateInputDesc(dst_name, out_handler->GetOwner()->GetOutputDesc(src_name)) == GRAPH_SUCCESS, return, + "Update input desc failed,dst name is %s,src name is %s", dst_name.c_str(), + src_name.c_str()); // fix for linking opdesc + } + + void AddControlInputImp(const ge::Operator &src_oprt) { + if (src_oprt.operator_impl_ == nullptr) { + GELOGE(GRAPH_FAILED, "Src operator impl is nullptr"); + return; + } + for (auto &input : control_input_link_) { + if (input.lock() == src_oprt.operator_impl_) { + return; + } + } + control_input_link_.push_back(src_oprt.operator_impl_); + src_oprt.operator_impl_->control_output_link_.push_back(shared_from_this()); + } + + graphStatus GetInputImpl(const string &dst_name, ge::OpIO &out_handler) { + auto out = input_link_.find(dst_name); + if (out == input_link_.end()) { + return GRAPH_FAILED; + } + out_handler = out->second; + return GRAPH_SUCCESS; + } + + bool InputIsSet(const string &name) { + GE_CHK_BOOL_EXEC(op_desc_ != nullptr, return false, "op_desc_ is nullptr."); + return op_desc_->InputIsSet(name); + } + + string GetName() const { + GE_CHK_BOOL_EXEC(op_desc_ != nullptr, return string(), "op_desc_ is nullptr."); + return op_desc_->GetName(); + } + + GeTensorDesc GetInputDesc(const string &name) const { + GE_CHK_BOOL_EXEC(op_desc_ != nullptr, return GeTensorDesc(), "op_desc_ is nullptr."); + return op_desc_->GetInputDesc(name); + } + + GeTensorDesc GetInputDesc(uint32_t index) const { + GE_CHK_BOOL_EXEC(op_desc_ != nullptr, return GeTensorDesc(), "op_desc_ is nullptr."); + return op_desc_->GetInputDesc(index); + } + + graphStatus UpdateInputDesc(const string &name, const GeTensorDesc &tensor_desc) { + GE_CHK_BOOL_EXEC(op_desc_ != nullptr, return GRAPH_FAILED, "op_desc_ is nullptr."); + + return op_desc_->UpdateInputDesc(name, tensor_desc); + } + + OutHandler GetOutput(const string &name) { + GE_CHK_BOOL_EXEC(op_desc_ != nullptr, return nullptr, "op_desc_ is nullptr."); + + int src_index = op_desc_->GetOutputIndexByName(name); + GE_CHK_BOOL_EXEC(src_index >= 0, return nullptr, "Find src index by name failed. name[%s]", name.c_str()); + shared_ptr output_ptr = ComGraphMakeShared(name, src_index, shared_from_this()); + if (output_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "OpIO make shared failed"); + return nullptr; + } + return output_ptr; + } + + GeTensorDesc GetOutputDesc(const string &name) const { + GE_CHK_BOOL_EXEC(op_desc_ != nullptr, return GeTensorDesc(), "op_desc_ is nullptr."); + + return op_desc_->GetOutputDesc(name); + } + + GeTensorDesc GetOutputDesc(uint32_t index) const { + GE_CHK_BOOL_EXEC(op_desc_ != nullptr, return GeTensorDesc(), "op_desc_ is nullptr."); + + return op_desc_->GetOutputDesc(index); + } + + graphStatus UpdateOutputDesc(const string &name, const GeTensorDesc &tensor_desc) { + GE_CHK_BOOL_RET_STATUS(op_desc_ != nullptr, GRAPH_FAILED, "op_desc is nullptr."); + + auto res = op_desc_->UpdateOutputDesc(name, tensor_desc); + if (res == GRAPH_SUCCESS) { + for (auto ol : output_links_[name]) { + if (ol.GetOwner() == nullptr) { + GELOGW("%s get owner is nullptr", ol.GetName().c_str()); + continue; + } + GE_CHK_BOOL_RET_STATUS(ol.GetOwner()->UpdateInputDesc(ol.GetName(), tensor_desc) == GRAPH_SUCCESS, GRAPH_FAILED, + "Could not update next operator's input %s.", ol.GetName().c_str()); + } + } + return res; + } + + size_t GetInputsSize() const { + GE_IF_BOOL_EXEC(op_desc_ == nullptr, return 0); + return op_desc_->GetInputsSize(); + } + + size_t GetOutputsSize() const { + GE_IF_BOOL_EXEC(op_desc_ == nullptr, return 0); + return op_desc_->GetOutputsSize(); + } + + graphStatus SetAttr(const string &name, GeAttrValue &&attr_value) { + GE_CHK_BOOL_RET_STATUS(op_desc_ != nullptr, GRAPH_FAILED, "op_desc is nullptr."); + return op_desc_->SetAttr(name, std::move(attr_value)); + } + + graphStatus GetAttr(const string &name, GeAttrValue &attr_value) const { + GE_CHK_BOOL_RET_STATUS(op_desc_ != nullptr, GRAPH_FAILED, "op_desc is nullptr."); + return op_desc_->GetAttr(name, attr_value); + } + + OpDescPtr GetOpDescImpl() const { return op_desc_; } + + void UpdateLinkMapImpl(const string &src_name, OpIO &op_dst) { + auto it_find = output_links_.find(src_name); + if (it_find == output_links_.end()) { + std::vector dsts{op_dst}; + output_links_.insert(std::make_pair(src_name, dsts)); + } else { + it_find->second.push_back(op_dst); + } + } + + Operator ToOperator() { return Operator(shared_from_this()); } + + static OpDescPtr GetOpDesc(const Operator &oprt) { + GE_IF_BOOL_EXEC(oprt.operator_impl_ == nullptr, return nullptr); + return oprt.operator_impl_->op_desc_; + } + + void ClearOutputLinks() noexcept { output_links_.clear(); } + + void ClearInputLinks() noexcept { input_link_.clear(); } + + ge::ConstNodePtr GetNode() { return node_; } + + void SetInferenceContext(const InferenceContextPtr &inference_context) { inference_context_ = inference_context; } + + InferenceContextPtr GetInferenceContext() const { return inference_context_; } + + OpDescPtr op_desc_ = nullptr; + + private: + ge::ConstNodePtr node_{nullptr}; + ge::InferenceContextPtr inference_context_; + std::map> output_links_{}; + std::map input_link_{}; + std::vector> control_input_link_{}; + std::vector> control_output_link_{}; +}; + +// Used to manage OperatorImpl instances created by ge api. +class OperatorKeeper { + private: + OperatorKeeper() = default; + ~OperatorKeeper() { + for (const auto &iter : operators_) { + if (iter) { + iter->ClearInputLinks(); + iter->ClearOutputLinks(); + } + } + } + std::set operators_; + std::mutex mutex_; + + public: + static OperatorKeeper &GetInstance() { + static OperatorKeeper instance; + return instance; + } + void CheckInOperator(const OperatorImplPtr &op_impl) { + if (op_impl) { + std::lock_guard lock(mutex_); + operators_.insert(op_impl); + } + } + void CheckOutOperator(const OperatorImplPtr &op_impl) { + if (op_impl) { + std::lock_guard lock(mutex_); + operators_.erase(op_impl); + } + } +}; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Operator OpDescUtils::CreateOperatorFromNode(ge::ConstNodePtr node_ptr) { + ge::OperatorImplPtr operator_impl_ptr = ComGraphMakeShared(node_ptr); + if (operator_impl_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "OperatorImpl make shared failed"); + return Operator("default"); + } + return operator_impl_ptr->ToOperator(); +} + +Operator::Operator(const std::string &type) { + static uint32_t index = 0; + string name = type + "_" + std::to_string(index++); + operator_impl_ = ComGraphMakeShared(name, type); + if (operator_impl_ == nullptr) { + GELOGW("OperatorImpl make shared failed"); + } + OperatorKeeper::GetInstance().CheckInOperator(operator_impl_); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Operator OpDescUtils::CreateOperatorFromOpDesc(OpDescPtr op_desc) { + shared_ptr operator_impl_ptr; + operator_impl_ptr = ComGraphMakeShared(op_desc); + if (operator_impl_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "OperatorImpl make shared failed"); + return Operator("default"); + } + OperatorKeeper::GetInstance().CheckInOperator(operator_impl_ptr); + return operator_impl_ptr->ToOperator(); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY OpDescPtr OpDescUtils::GetOpDescFromOperator(const Operator &oprt) { + return OperatorImpl::GetOpDesc(oprt); +} + +GE_FUNC_HOST_VISIBILITY Operator::Operator(const string &name, const string &type) { + operator_impl_ = ComGraphMakeShared(name, type); + if (operator_impl_ == nullptr) { + GELOGE(GRAPH_FAILED, "OperatorImpl make shared failed"); + return; + } + OperatorKeeper::GetInstance().CheckInOperator(operator_impl_); +} + +Operator::Operator(ge::OperatorImplPtr &&op_impl) { operator_impl_ = std::move(op_impl); } + +bool Operator::IsEmpty() const { + if (operator_impl_ == nullptr) { + return true; + } + return false; +} + +string Operator::GetName() const { + if (operator_impl_ != nullptr) { + return operator_impl_->GetName(); + } + return ""; +} + +GE_FUNC_HOST_VISIBILITY Operator &Operator::SetInput(const string &dst_name, const ge::Operator &src_oprt) { + // Describe the connection relationship between operators, no create action + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return *this, "operator impl is nullptr."); + operator_impl_->SetInputImpl(dst_name, src_oprt); + return *this; +} + +Operator &Operator::SetInput(const string &dst_name, const ge::OutHandler &out_handler) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return *this, "operator impl is nullptr."); + operator_impl_->SetInputImpl(dst_name, out_handler); + return *this; +} + +Operator &Operator::SetInput(const std::string &dst_name, const ge::Operator &src_oprt, const std::string &name) { + auto out_handler = src_oprt.GetOutput(name); + GE_CHK_BOOL_EXEC(out_handler != nullptr, return *this, "out_handler is nullptr."); + (void)SetInput(dst_name, out_handler); + return *this; +} + +Operator &Operator::AddControlInput(const Operator &src_oprt) { + if (operator_impl_ == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr."); + return *this; + } + operator_impl_->AddControlInputImp(src_oprt); + return *this; +} + +graphStatus Operator::GetInputConstData(const string &dst_name, Tensor &data) const { + if (operator_impl_ == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr."); + return GRAPH_FAILED; + } + ge::ConstNodePtr node_ptr = operator_impl_->GetNode(); + if (node_ptr) { + // For inner compute graph + auto op_desc = node_ptr->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(GRAPH_FAILED, "op_desc is nullptr."); + return GRAPH_FAILED; + } + auto index = op_desc->GetInputIndexByName(dst_name); + auto in_data_anchor = node_ptr->GetInDataAnchor(index); + if (in_data_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "in_data_anchor is nullptr."); + return GRAPH_FAILED; + } + auto out_data_anchor = in_data_anchor->GetPeerOutAnchor(); + if (out_data_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "out_data_anchor is nullptr."); + return GRAPH_FAILED; + } + std::shared_ptr peer_node_ptr = out_data_anchor->GetOwnerNode(); + if (peer_node_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "peer_node_ptr is nullptr."); + return GRAPH_FAILED; + } + ge::OperatorImplPtr operator_impl_ptr = nullptr; + operator_impl_ptr = ComGraphMakeShared(peer_node_ptr); + if (operator_impl_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "OperatorImpl make shared failed"); + return GRAPH_FAILED; + } + Operator const_op(std::move(operator_impl_ptr)); + if (peer_node_ptr->GetOpDesc() != nullptr) { + const auto &op_descType = peer_node_ptr->GetOpDesc()->GetType(); + if (op_descType == CONSTANTOP) { + return const_op.GetAttr(kValue, data); + } else if (op_descType == CONSTANT) { + return const_op.GetAttr(kValue, data); + } + } + } else { + // For outer graph + return GetInputConstDataOut(dst_name, data); + } + return GRAPH_FAILED; +} +graphStatus Operator::GetInputConstDataOut(const string &dst_name, Tensor &data) const { + ge::OpIO out_handle("", 0, nullptr); + GE_CHECK_NOTNULL(operator_impl_); + if (operator_impl_->GetInputImpl(dst_name, out_handle) != GRAPH_SUCCESS) { + GELOGE(FAILED, "%s get input impl failed", dst_name.c_str()); + return GRAPH_FAILED; + } + if (out_handle.GetOwner() != nullptr && out_handle.GetOwner()->GetOpDescImpl() != nullptr) { + Operator const_op(out_handle.GetOwner()); + const auto &op_desc_impl_type = out_handle.GetOwner()->GetOpDescImpl()->GetType(); + if (op_desc_impl_type == CONSTANTOP) { + return const_op.GetAttr(kValue, data); + } else if (op_desc_impl_type == CONSTANT) { + return const_op.GetAttr(kValue, data); + } + } + return GRAPH_FAILED; +} + +TensorDesc Operator::GetInputDesc(const std::string &name) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return TensorDesc(), "operator impl is nullptr."); + return TensorAdapter::GeTensorDesc2TensorDesc(operator_impl_->GetInputDesc(name)); +} + +void Operator::SetInferenceContext(const InferenceContextPtr &inference_context) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return, "operator impl is nullptr."); + operator_impl_->SetInferenceContext(inference_context); +} + +InferenceContextPtr Operator::GetInferenceContext() const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return nullptr, "operator impl is nullptr."); + return operator_impl_->GetInferenceContext(); +} + +TensorDesc Operator::GetInputDesc(uint32_t index) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return TensorDesc(), "operator impl is nullptr."); + return TensorAdapter::GeTensorDesc2TensorDesc(operator_impl_->GetInputDesc(index)); +} + +graphStatus Operator::TryGetInputDesc(const string &name, TensorDesc &tensor_desc) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return GRAPH_FAILED, "operator impl is nullptr."); + auto check = operator_impl_->InputIsSet(name); + if (check) tensor_desc = TensorAdapter::GeTensorDesc2TensorDesc(operator_impl_->GetInputDesc(name)); + return check ? GRAPH_SUCCESS : GRAPH_FAILED; +} + +graphStatus Operator::UpdateInputDesc(const std::string &name, const ge::TensorDesc &tensor_desc) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return GRAPH_FAILED, "operator impl is nullptr."); + return operator_impl_->UpdateInputDesc(name, TensorAdapter::TensorDesc2GeTensorDesc(tensor_desc)); +} + +OutHandler Operator::GetOutput(const string &name) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return nullptr, "operator impl is nullptr."); + return operator_impl_->GetOutput(name); +} + +TensorDesc Operator::GetOutputDesc(const std::string &name) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return TensorDesc(), "operator impl is nullptr."); + return TensorAdapter::GeTensorDesc2TensorDesc(operator_impl_->GetOutputDesc(name)); +} + +TensorDesc Operator::GetOutputDesc(uint32_t index) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return TensorDesc(), "operator impl is nullptr."); + return TensorAdapter::GeTensorDesc2TensorDesc(operator_impl_->GetOutputDesc(index)); +} + +graphStatus Operator::UpdateOutputDesc(const std::string &name, const ge::TensorDesc &tensor_desc) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return GRAPH_FAILED, "operator impl is nullptr."); + return operator_impl_->UpdateOutputDesc(name, TensorAdapter::TensorDesc2GeTensorDesc(tensor_desc)); +} + +TensorDesc Operator::GetDynamicInputDesc(const string &name, uint32_t index) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return TensorDesc(), "operator impl is nullptr."); + return TensorAdapter::GeTensorDesc2TensorDesc(operator_impl_->GetInputDesc(name + std::to_string(index))); +} + +graphStatus Operator::UpdateDynamicInputDesc(const string &name, uint32_t index, const TensorDesc &tensor_desc) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return GRAPH_FAILED, "operator impl is nullptr."); + return operator_impl_->UpdateInputDesc(name + std::to_string(index), + TensorAdapter::TensorDesc2GeTensorDesc(tensor_desc)); +} + +TensorDesc Operator::GetDynamicOutputDesc(const string &name, uint32_t index) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return TensorDesc(), "operator impl is nullptr."); + return TensorAdapter::GeTensorDesc2TensorDesc(operator_impl_->GetOutputDesc(name + std::to_string(index))); +} + +graphStatus Operator::UpdateDynamicOutputDesc(const string &name, uint32_t index, const TensorDesc &tensor_desc) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return GRAPH_FAILED, "operator impl is nullptr."); + return operator_impl_->UpdateOutputDesc(name + std::to_string(index), + TensorAdapter::TensorDesc2GeTensorDesc(tensor_desc)); +} + +graphStatus Operator::InferShapeAndType() { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return GRAPH_FAILED, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return GRAPH_FAILED, "GetOpDescImpl is nullptr."); + + return operator_impl_->GetOpDescImpl()->CallInferFunc(*this); +} + +graphStatus Operator::VerifyAllAttr(bool disable_common_verifier) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return GRAPH_FAILED, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return GRAPH_FAILED, "GetOpDescImpl is nullptr."); + + if (!disable_common_verifier && (graphStatus)Operator::VerifyAll() == GRAPH_FAILED) { + return GRAPH_FAILED; + } else { + return (graphStatus)operator_impl_->GetOpDescImpl()->OpVerify(); + } +} + +GE_FUNC_HOST_VISIBILITY size_t Operator::GetInputsSize() const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return 0, "OperatorImpl_ is nullptr"); + return operator_impl_->GetInputsSize(); +} + +GE_FUNC_HOST_VISIBILITY size_t Operator::GetOutputsSize() const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return 0, "OperatorImpl_ is nullptr"); + return operator_impl_->GetOutputsSize(); +} + +// According to op get the attrs name and type +namespace { +const std::map kAttrTypesMap = { + {GeAttrValue::VT_NONE, "VT_STRING"}, + {GeAttrValue::VT_STRING, "VT_STRING"}, + {GeAttrValue::VT_FLOAT, "VT_FLOAT"}, + {GeAttrValue::VT_BOOL, "VT_BOOL"}, + {GeAttrValue::VT_INT, "VT_INT"}, + {GeAttrValue::VT_TENSOR_DESC, "VT_TENSOR_DESC"}, + {GeAttrValue::VT_TENSOR, "VT_TENSOR"}, + {GeAttrValue::VT_BYTES, "VT_BYTES"}, + {GeAttrValue::VT_GRAPH, "VT_GRAPH"}, + {GeAttrValue::VT_NAMED_ATTRS, "VT_NAMED_ATTRS"}, + {GeAttrValue::VT_LIST_BASE, "VT_LIST_BASE"}, + {GeAttrValue::VT_LIST_STRING, "VT_LIST_STRING"}, + {GeAttrValue::VT_LIST_FLOAT, "VT_LIST_FLOAT"}, + {GeAttrValue::VT_LIST_BOOL, "VT_LIST_BOOL"}, + {GeAttrValue::VT_LIST_INT, "VT_LIST_INT"}, + {GeAttrValue::VT_LIST_TENSOR_DESC, "VT_LIST_TENSOR_DESC"}, + {GeAttrValue::VT_LIST_TENSOR, "VT_LIST_TENSOR"}, + {GeAttrValue::VT_LIST_BYTES, "VT_LIST_BYTES"}, + {GeAttrValue::VT_GRAPH, "VT_GRAPH"}, + {GeAttrValue::VT_LIST_NAMED_ATTRS, "VT_LIST_NAMED_ATTRS"}, +}; +} // namespace +const std::map Operator::GetAllAttrNamesAndTypes() const { + std::map attr_types; + + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return attr_types, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return attr_types, "GetOpDescImpl is nullptr."); + std::map attr_map = operator_impl_->GetOpDescImpl()->GetAllAttrs(); + + map::iterator iter; + for (iter = attr_map.begin(); iter != attr_map.end(); ++iter) { + string name = iter->first; + GeAttrValue attr_value = iter->second; + + GeAttrValue::ValueType type = attr_value.GetValueType(); + + auto iter2 = kAttrTypesMap.find(type); + if (iter2 != kAttrTypesMap.end()) { + attr_types[name] = iter2->second; + } + } + + return attr_types; +} + +void Operator::InputRegister(const string &name) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return, "GetOpDescImpl is nullptr."); + (void)operator_impl_->GetOpDescImpl()->AddInputDesc(name, GeTensorDesc()); +} + +void Operator::OptionalInputRegister(const string &name) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return, "GetOpDescImpl is nullptr."); + (void)operator_impl_->GetOpDescImpl()->AddOptionalInputDesc(name, + GeTensorDesc(GeShape(), FORMAT_RESERVED, DT_UNDEFINED)); +} + +void Operator::InferFuncRegister(const std::function &func) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return, "GetOpDescImpl is nullptr."); + operator_impl_->GetOpDescImpl()->AddInferFunc(func); +} + +void Operator::InferFormatFuncRegister(const std::function &func) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return, "GetOpDescImpl is nullptr."); + operator_impl_->GetOpDescImpl()->AddInferFormatFunc(func); +} + +void Operator::VerifierFuncRegister(const std::function &func) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return, "GetOpDescImpl is nullptr."); + operator_impl_->GetOpDescImpl()->AddVerifierFunc(func); +} + +void Operator::OutputRegister(const string &name) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return, "GetOpDescImpl is nullptr."); + (void)operator_impl_->GetOpDescImpl()->AddOutputDesc(name, GeTensorDesc()); +} + +void Operator::DynamicInputRegister(const string &name, const unsigned int num, bool is_push_back) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return, "GetOpDescImpl is nullptr."); + GE_CHK_BOOL_EXEC(AttrUtils::SetInt(operator_impl_->GetOpDescImpl(), DYNAMIC_INPUT_TD_NUM(name), num), return, + "set int failed"); + (void)operator_impl_->GetOpDescImpl()->AddDynamicInputDesc(name, num, is_push_back); +} + +int Operator::GetDynamicInputNum(const string &name) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return 0, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return 0, "GetOpDescImpl is nullptr."); + int num = 0; + GE_CHK_BOOL_EXEC(AttrUtils::GetInt(operator_impl_->GetOpDescImpl(), DYNAMIC_INPUT_TD_NUM(name), num), return num, + "Get %s int failed", name.c_str()); + return num; +} + +void Operator::DynamicOutputRegister(const string &name, const unsigned int num, bool is_push_back) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return, "GetOpDescImpl is nullptr."); + (void)AttrUtils::SetInt(operator_impl_->GetOpDescImpl(), DYNAMIC_OUTPUT_TD_NUM(name), num); + (void)operator_impl_->GetOpDescImpl()->AddDynamicOutputDesc(name, num, is_push_back); +} + +int Operator::GetDynamicOutputNum(const string &name) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return 0, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return 0, "GetOpDescImpl is nullptr."); + int num = 0; + (void)AttrUtils::GetInt(operator_impl_->GetOpDescImpl(), DYNAMIC_INPUT_TD_NUM(name), num); + return num; +} + +void Operator::RequiredAttrRegister(const string &name) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return, "GetOpDescImpl is nullptr."); + (void)operator_impl_->GetOpDescImpl()->AddRequiredAttr(name); +} + +graphStatus Operator::VerifyAll() { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return GRAPH_FAILED, "operator impl is nullptr."); + GE_CHK_BOOL_EXEC(operator_impl_->GetOpDescImpl() != nullptr, return GRAPH_FAILED, "GetOpDescImpl is nullptr."); + + // Check all inputs defined + for (const string &iname : operator_impl_->GetOpDescImpl()->GetAllInputNames()) { + GE_CHK_BOOL_RET_STATUS(operator_impl_->GetOpDescImpl()->IsOptionalInput(iname) || operator_impl_->InputIsSet(iname), + GRAPH_FAILED, "operator input %s is not linked.", iname.c_str()); + vector ishape = operator_impl_->GetOpDescImpl()->GetInputDesc(iname).GetShape().GetDims(); + for (int64_t dim : ishape) { + GE_CHK_BOOL_RET_STATUS(dim > 0, GRAPH_FAILED, "operator input %s shape contains negative or zero dimension.", + iname.c_str()); + } + } + // Check all attributes defined + const auto all_attributes = operator_impl_->GetOpDescImpl()->GetAllAttrs(); + for (const auto &name : operator_impl_->GetOpDescImpl()->GetAllAttrNames()) { + GE_CHK_BOOL_RET_STATUS(all_attributes.find(name) != all_attributes.end(), GRAPH_FAILED, + "operator attribute %s is empty.", name.c_str()); + } + + return GRAPH_SUCCESS; +} + +string Operator::GetOpType() const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return "Data", "operator impl is nullptr."); + return OperatorImpl::GetOpDesc(*this)->GetType(); +} + +Operator &Operator::SetInput(const std::string &dst_name, uint32_t dst_index, const ge::Operator &src_oprt) { + string dynamic_dst_name = DYNAMIN_INPUT_NAME(dst_name, dst_index); + return SetInput(dynamic_dst_name, src_oprt); +} + +Operator &Operator::SetInput(const std::string &dst_name, uint32_t dst_index, const ge::Operator &src_oprt, + const std::string &name) { + string dynamic_dst_name = DYNAMIN_INPUT_NAME(dst_name, dst_index); + return SetInput(dynamic_dst_name, src_oprt, name); +} + +OperatorImplPtr Operator::GetOperatorImplPtr() const { return operator_impl_; } + +#define OP_ATTR_SET_IMP(ArgType, AttrUtilsFun) \ + Operator &Operator::SetAttr(const string &name, ArgType attr_value) { \ + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { \ + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); \ + return *this; \ + } \ + if (!AttrUtils::Set##AttrUtilsFun(operator_impl_->GetOpDescImpl(), name, attr_value)) { \ + GELOGW("set attr name %s failed.", name.c_str()); \ + } \ + return *this; \ + } + +#define OP_ATTR_GET_IMP(ArgType, AttrUtilsFun) \ + graphStatus Operator::GetAttr(const string &name, ArgType attr_value) const { \ + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { \ + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); \ + return GRAPH_FAILED; \ + } \ + if (!AttrUtils::Get##AttrUtilsFun(operator_impl_->GetOpDescImpl(), name, attr_value)) { \ + GELOGW("get attr name %s failed.", name.c_str()); \ + return GRAPH_FAILED; \ + } \ + return GRAPH_SUCCESS; \ + } + +void Operator::BreakConnect() const { + if (operator_impl_ == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr."); + return; + } + operator_impl_->ClearInputLinks(); + operator_impl_->ClearOutputLinks(); + OperatorKeeper::GetInstance().CheckOutOperator(operator_impl_); +} + +#define OP_ATTR_REG_IMP(ArgType, AttrUtilsFun) \ + void Operator::AttrRegister(const string &name, ArgType attr_value) { \ + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { \ + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); \ + return; \ + } \ + if (!AttrUtils::Set##AttrUtilsFun(operator_impl_->GetOpDescImpl(), name, attr_value)) { \ + GELOGW("reg attr name %s failed.", name.c_str()); \ + } \ + } + +OP_ATTR_SET_IMP(int64_t, Int) +OP_ATTR_SET_IMP(int32_t, Int) +OP_ATTR_SET_IMP(uint32_t, Int) +OP_ATTR_GET_IMP(int64_t &, Int) +OP_ATTR_GET_IMP(int32_t &, Int) +OP_ATTR_GET_IMP(uint32_t &, Int) +OP_ATTR_SET_IMP(const vector &, ListInt) +OP_ATTR_SET_IMP(const vector &, ListInt) +OP_ATTR_SET_IMP(const vector &, ListInt) +OP_ATTR_SET_IMP(std::initializer_list &&, ListInt) +OP_ATTR_GET_IMP(vector &, ListInt) +OP_ATTR_GET_IMP(vector &, ListInt) +OP_ATTR_GET_IMP(vector &, ListInt) +OP_ATTR_GET_IMP(vector> &, ListListInt) +OP_ATTR_SET_IMP(const vector> &, ListListInt) + +OP_ATTR_SET_IMP(float, Float) +OP_ATTR_GET_IMP(float &, Float) +OP_ATTR_SET_IMP(const vector &, ListFloat) +OP_ATTR_GET_IMP(vector &, ListFloat) + +OP_ATTR_SET_IMP(bool, Bool) +OP_ATTR_GET_IMP(bool &, Bool) +OP_ATTR_SET_IMP(const vector &, ListBool) +OP_ATTR_GET_IMP(vector &, ListBool) + +OP_ATTR_SET_IMP(const string &, Str) +OP_ATTR_GET_IMP(string &, Str) +OP_ATTR_SET_IMP(const vector &, ListStr) +OP_ATTR_GET_IMP(vector &, ListStr) + +OP_ATTR_REG_IMP(int64_t, Int) +OP_ATTR_REG_IMP(const vector &, ListInt) +OP_ATTR_REG_IMP(float, Float) +OP_ATTR_REG_IMP(const vector &, ListFloat) +OP_ATTR_REG_IMP(const string &, Str) +OP_ATTR_REG_IMP(const vector &, ListStr) +OP_ATTR_REG_IMP(bool, Bool) +OP_ATTR_REG_IMP(const vector &, ListBool) +OP_ATTR_REG_IMP(const vector> &, ListListInt) + +#undef OP_ATTR_SET_IMP +#undef OP_ATTR_GET_IMP +#undef OP_ATTR_REG_IMP + +Operator &Operator::SetAttr(const string &name, const Tensor &attr_value) { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return *this; + } + GeTensor tensor = TensorAdapter::AsGeTensor(attr_value); + if (!AttrUtils::SetTensor(operator_impl_->GetOpDescImpl(), name, tensor)) { + GELOGW("set attr name %s failed.", name.c_str()); + } + return *this; +} + +Operator &Operator::SetAttr(const string &name, const vector &attr_value) { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return *this; + } + vector val_list; + for (const auto &item : attr_value) { + auto tensor = TensorAdapter::AsGeTensor(item); + val_list.push_back(tensor); + } + if (!AttrUtils::SetListTensor(operator_impl_->GetOpDescImpl(), name, val_list)) { + GELOGW("set attr name %s failed.", name.c_str()); + } + return *this; +} + +graphStatus Operator::GetAttr(const string &name, Tensor &attr_value) const { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return GRAPH_FAILED; + } + ConstGeTensorPtr tensor; + if (!AttrUtils::GetTensor(operator_impl_->GetOpDescImpl(), name, tensor)) { + GELOGW("get attr name %s failed.", name.c_str()); + return GRAPH_FAILED; + } + attr_value = TensorAdapter::GeTensor2Tensor(tensor); + return GRAPH_SUCCESS; +} + +graphStatus Operator::GetAttr(const string &name, vector &attr_value) const { + attr_value.clear(); + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return GRAPH_FAILED; + } + vector val_list; + if (!AttrUtils::GetListTensor(operator_impl_->GetOpDescImpl(), name, val_list)) { + GELOGW("get attr name %s failed.", name.c_str()); + return GRAPH_FAILED; + } + for (auto &tensor : val_list) { + attr_value.push_back(TensorAdapter::GeTensor2Tensor(tensor)); + } + return GRAPH_SUCCESS; +} + +Operator &Operator::SetAttr(const string &name, const OpBytes &attr_value) { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return *this; + } + if (!AttrUtils::SetZeroCopyBytes(operator_impl_->GetOpDescImpl(), name, + Buffer::CopyFrom(attr_value.data(), attr_value.size()))) { + GELOGW("set attr name %s failed.", name.c_str()); + } + return *this; +} + +graphStatus Operator::GetAttr(const string &name, OpBytes &attr_value) const { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return GRAPH_FAILED; + } + Buffer buffer; + if (!AttrUtils::GetZeroCopyBytes(operator_impl_->GetOpDescImpl(), name, buffer)) { + GELOGW("get attr name %s failed.", name.c_str()); + return GRAPH_FAILED; + } + attr_value.clear(); + if (buffer.data() == nullptr) { + GELOGE(GRAPH_FAILED, "buffer data is null."); + return GRAPH_FAILED; + } + attr_value.assign(buffer.data(), buffer.data() + buffer.size()); + return GRAPH_SUCCESS; +} + +Operator &Operator::SetAttr(const string &name, const UsrQuantizeFactorParams &attr_value) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return *this, "operator impl is nullptr, name %s.", name.c_str()); + QuantizeFactorParams def_quant; + GE_CHK_BOOL_EXEC(TypeUtils::Usr2DefQuantizeFactorParams(attr_value, def_quant) == GRAPH_SUCCESS, return *this, + "trans para fail"); + GE_CHK_BOOL_EXEC(OpDescUtils::SetQuantizeFactorParams(operator_impl_->GetOpDescImpl(), def_quant) == GRAPH_SUCCESS, + return *this, "operator set QuantizeFactorParams fail"); + return *this; +} + +graphStatus Operator::GetAttr(const string &name, UsrQuantizeFactorParams &attr_value) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + QuantizeFactorParams def_quant; + GE_CHK_BOOL_EXEC(OpDescUtils::GetQuantizeFactorParams(operator_impl_->GetOpDescImpl(), def_quant) == GRAPH_SUCCESS, + return GRAPH_FAILED, "operator get QuantizeFactorParams fail"); + GE_CHK_BOOL_EXEC(TypeUtils::Def2UsrQuantizeFactorParams(def_quant, attr_value) == GRAPH_SUCCESS, return GRAPH_FAILED, + "trans para fail"); + return GRAPH_SUCCESS; +} + +Operator &Operator::SetAttr(const string &name, ge::AttrValue &&attrValue) { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return *this, "operator impl is nullptr."); + (void)operator_impl_->SetAttr(name, std::move(attrValue.impl->geAttrValue_)); + return *this; +} + +graphStatus Operator::GetAttr(const string &name, ge::AttrValue &attrValue) const { + GE_CHK_BOOL_EXEC(operator_impl_ != nullptr, return GRAPH_FAILED, "operator impl is nullptr."); + return operator_impl_->GetAttr(name, attrValue.impl->geAttrValue_); +} + +Operator &Operator::SetAttr(const string &name, const std::vector &attr_value) { + if (operator_impl_ == nullptr || !operator_impl_->GetOpDescImpl()) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return *this; + } + if (!AttrUtils::SetListDataType(operator_impl_->GetOpDescImpl(), name, attr_value)) { + GELOGW("set attr name %s failed.", name.c_str()); + } + return *this; +} + +graphStatus Operator::GetAttr(const string &name, std::vector &attr_value) const { + attr_value.clear(); + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return GRAPH_FAILED; + } + if (!AttrUtils::GetListDataType(operator_impl_->GetOpDescImpl(), name, attr_value)) { + GELOGW("get attr name %s failed.", name.c_str()); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +Operator &Operator::SetAttr(const string &name, const ge::DataType &attr_value) { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return *this; + } + if (!AttrUtils::SetDataType(operator_impl_->GetOpDescImpl(), name, attr_value)) { + GELOGW("set attr name %s failed.", name.c_str()); + } + return *this; +} + +graphStatus Operator::GetAttr(const string &name, ge::DataType &attr_value) const { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return GRAPH_FAILED; + } + if (!AttrUtils::GetDataType(operator_impl_->GetOpDescImpl(), name, attr_value)) { + GELOGW("get attr name %s failed.", name.c_str()); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +void Operator::AttrRegister(const string &name, const std::vector &attr_value) { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return; + } + if (!AttrUtils::SetListDataType(operator_impl_->GetOpDescImpl(), name, attr_value)) { + GELOGW("set attr name %s failed.", name.c_str()); + } +} + +void Operator::AttrRegister(const string &name, const ge::DataType &attr_value) { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return; + } + if (!AttrUtils::SetDataType(operator_impl_->GetOpDescImpl(), name, attr_value)) { + GELOGW("set attr name %s failed.", name.c_str()); + } +} + +void Operator::AttrRegister(const string &name, const Tensor &attr_value) { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return; + } + auto tensor = TensorAdapter::AsGeTensor(attr_value); + if (!AttrUtils::SetTensor(operator_impl_->GetOpDescImpl(), name, tensor)) { + GELOGW("reg attr name %s failed.", name.c_str()); + } +} + +void Operator::AttrRegister(const string &name, const vector &attr_value) { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return; + } + vector val_list; + for (const auto &item : attr_value) { + val_list.push_back(TensorAdapter::AsGeTensor(item)); + } + if (!AttrUtils::SetListTensor(operator_impl_->GetOpDescImpl(), name, val_list)) { + GELOGW("reg attr name %s failed.", name.c_str()); + } +} + +void Operator::AttrRegister(const string &name, const OpBytes &attr_value) { + if (operator_impl_ == nullptr || operator_impl_->GetOpDescImpl() == nullptr) { + GELOGE(GRAPH_FAILED, "operator impl is nullptr, name %s.", name.c_str()); + return; + } + if (!AttrUtils::SetZeroCopyBytes(operator_impl_->GetOpDescImpl(), name, + Buffer::CopyFrom(attr_value.data(), attr_value.size()))) { + GELOGW("reg attr name %s failed.", name.c_str()); + } +} + +class GraphBuilderImpl { + public: + explicit GraphBuilderImpl(const string &name) : graph_(ComGraphMakeShared(name)) { + if (graph_ == nullptr) { + GELOGE(GRAPH_FAILED, "ComputeGraph make shared failed"); + graph_ = nullptr; + return; + } + } + + ~GraphBuilderImpl() {} + + ComputeGraphPtr BuildGraph(const std::vector &inputs) { + std::vector vec_inputs; + for (auto &it : inputs) { + auto src_op_impl = it.operator_impl_; + GE_CHK_BOOL_EXEC(src_op_impl != nullptr, return nullptr, "Operator Impl is null."); + GE_CHK_BOOL_EXEC(src_op_impl->op_desc_ != nullptr, return nullptr, "Operator impl's opdesc is null."); + + string type = src_op_impl->op_desc_->GetType(); + auto node_op = ge::OperatorFactory::CreateOperator("node_op", type); + auto tensor_desc = ge::OpDescUtils::GetOpDescFromOperator(node_op); + GE_CHK_BOOL_EXEC(tensor_desc != nullptr, continue, "tensor_desc is null."); + if ((tensor_desc->GetInputsSize() == 0 && tensor_desc->GetOutputsSize() > 0) || type == DATA || + type == VARIABLE || type == INITDATA || type == GETNEXT) { + vec_inputs.push_back(it.operator_impl_); + } else { + GELOGW("Input operator should be Data, Variable operator or operator that has output but no input."); + } + } + + auto ret = WalkAllOperators(vec_inputs); + GE_CHK_BOOL_EXEC(ret == GRAPH_SUCCESS, return nullptr, "WalkAllOperators failed."); + + ret = AddEdge(); + GE_CHK_BOOL_EXEC(ret == GRAPH_SUCCESS, return nullptr, "AddEdge failed."); + + return graph_; + } + + const std::map &GetAllNodesInfo() const { return all_nodes_info_; } + + private: + graphStatus WalkAllOperators(const std::vector &vec_ops) { + GE_CHK_BOOL_EXEC(graph_ != nullptr, return GRAPH_FAILED, "graph_ is null.") + std::queue> que; + que.push(vec_ops); + while (!que.empty()) { + auto vec_tem = que.front(); + que.pop(); + for (const auto &op_impl : vec_tem) { + GE_CHK_BOOL_EXEC(op_impl != nullptr, return GRAPH_FAILED, "Operator Impl is null.") + GE_CHK_BOOL_EXEC_INFO(all_nodes_info_.find(op_impl) == all_nodes_info_.end(), continue) + auto node_ptr = graph_->AddNode(op_impl->op_desc_); + GE_CHK_BOOL_EXEC(node_ptr != nullptr, return GRAPH_FAILED, "Add node failed."); + all_nodes_info_.insert(std::make_pair(op_impl, node_ptr)); + + auto &out_links = op_impl->output_links_; + std::vector vec_op_forward{}; + for (const auto &out_link : out_links) { + for (const auto &op_forward : out_link.second) { + vec_op_forward.push_back(op_forward.GetOwner()); + } + } + + auto &out_control_links = op_impl->control_output_link_; + for (const auto &out_link : out_control_links) { + vec_op_forward.push_back(out_link.lock()); + } + que.push(vec_op_forward); + + auto &in_links = op_impl->input_link_; + std::vector vec_op_back_forward{}; + for (const auto &in_link : in_links) { + vec_op_back_forward.push_back(in_link.second.GetOwner()); + } + + auto &in_control_links = op_impl->control_input_link_; + for (const auto &in_link : in_control_links) { + vec_op_back_forward.push_back(in_link.lock()); + } + que.push(vec_op_back_forward); + } + } + return GRAPH_SUCCESS; + } + + graphStatus AddEdge() { + for (const auto &node_info : all_nodes_info_) { + auto src_op_impl_ptr = node_info.first; + auto src_node_ptr = node_info.second; + GE_IF_BOOL_EXEC(src_op_impl_ptr == nullptr || src_node_ptr == nullptr, continue); + auto out_links = src_op_impl_ptr->output_links_; + auto &op_desc = src_op_impl_ptr->op_desc_; + + for (const auto &out : out_links) { + auto src_idx = op_desc->GetOutputIndexByName(out.first); + GE_CHK_BOOL_EXEC(src_idx >= 0, return GRAPH_FAILED, "Find output index by name failed"); + + auto src_anchor = src_node_ptr->GetOutDataAnchor(src_idx); + GE_CHK_BOOL_EXEC(src_anchor != nullptr, return GRAPH_FAILED, "GetOutDataAnchor failed."); + + for (const auto &dst_opio : out.second) { + auto dst_node_info = all_nodes_info_.find(dst_opio.GetOwner()); + GE_CHK_BOOL_EXEC(dst_node_info != all_nodes_info_.end(), return GRAPH_FAILED, "Find Dst node failed."); + GE_IF_BOOL_EXEC(dst_node_info->second == nullptr, continue); + auto dst_anchor = dst_node_info->second->GetInDataAnchor(dst_opio.GetIndex()); + GE_CHK_BOOL_EXEC(dst_anchor != nullptr, return GRAPH_FAILED, "GetInDataAnchor failed."); + + auto ret = GraphUtils::AddEdge(src_anchor, dst_anchor); + GE_CHK_BOOL_EXEC(ret == GRAPH_SUCCESS, return GRAPH_FAILED, "AddEdge failed."); + } + } + auto out_control_anchor = src_node_ptr->GetOutControlAnchor(); + for (const auto &control_out : src_op_impl_ptr->control_output_link_) { + auto dst_node_info = all_nodes_info_.find(control_out.lock()); + if (dst_node_info == all_nodes_info_.end()) { + GELOGE(GRAPH_FAILED, "Find Dst node failed."); + return GRAPH_FAILED; + } + GE_IF_BOOL_EXEC(dst_node_info->second == nullptr, continue); + auto in_control_anchor = dst_node_info->second->GetInControlAnchor(); + auto ret = GraphUtils::AddEdge(out_control_anchor, in_control_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(ret, "AddEdge failed. srcNode %s:%s, dstNode %s:%s", op_desc->GetName().c_str(), + op_desc->GetType().c_str(), dst_node_info->second->GetName().c_str(), + dst_node_info->second->GetType().c_str()); + return ret; + } + } + } + return GRAPH_SUCCESS; + } + + ComputeGraphPtr graph_ = nullptr; + std::map all_nodes_info_{}; +}; + +inline bool HasSameNameNode(const ComputeGraphPtr &compute_graph) { + std::set node_names; + for (auto const &node : compute_graph->GetAllNodes()) { + node_names.insert(node->GetName()); + } + return node_names.size() != compute_graph->GetAllNodes().size(); +} + +ComputeGraphPtr GraphUtils::CreateGraphFromOperator(const string &name, const vector &inputs) { + auto graph_builder_impl = GraphBuilderImpl(name); + ComputeGraphPtr compute_graph = graph_builder_impl.BuildGraph(inputs); + GE_IF_BOOL_EXEC(compute_graph == nullptr, return compute_graph); + + compute_graph->SetAllNodesInfo(graph_builder_impl.GetAllNodesInfo()); + if (HasSameNameNode(compute_graph)) { + GELOGW("Compute do not allow has same name nodes."); + compute_graph = nullptr; + } + + return compute_graph; +} + +void GraphUtils::BreakConnect(const std::map &all_nodes_infos) { + for (const auto &it : all_nodes_infos) { + OperatorImplPtr op_impl = it.first; + if (op_impl == nullptr) { + GELOGW("operator impl is nullptr."); + continue; + } + op_impl->ClearOutputLinks(); + op_impl->ClearInputLinks(); + } +} +} // namespace ge diff --git a/src/common/graph/operator_factory.cc b/src/common/graph/operator_factory.cc new file mode 100644 index 00000000..43d61a7c --- /dev/null +++ b/src/common/graph/operator_factory.cc @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/operator_factory_impl.h" +#include "debug/ge_log.h" + +namespace ge { +Operator OperatorFactory::CreateOperator(const std::string &operator_name, const std::string &operator_type) { + return OperatorFactoryImpl::CreateOperator(operator_name, operator_type); +} + +graphStatus OperatorFactory::GetOpsTypeList(std::vector &all_ops) { + return OperatorFactoryImpl::GetOpsTypeList(all_ops); +} + +bool OperatorFactory::IsExistOp(const string &operator_type) { return OperatorFactoryImpl::IsExistOp(operator_type); } + +OperatorCreatorRegister::OperatorCreatorRegister(const string &operator_type, OpCreator const &op_creator) { + (void)OperatorFactoryImpl::RegisterOperatorCreator(operator_type, op_creator); +} + +InferShapeFuncRegister::InferShapeFuncRegister(const std::string &operator_type, + const InferShapeFunc &infer_shape_func) { + (void)OperatorFactoryImpl::RegisterInferShapeFunc(operator_type, infer_shape_func); +} + +InferFormatFuncRegister::InferFormatFuncRegister(const std::string &operator_type, + const InferFormatFunc &infer_format_func) { + (void)OperatorFactoryImpl::RegisterInferFormatFunc(operator_type, infer_format_func); +} + +VerifyFuncRegister::VerifyFuncRegister(const std::string &operator_type, const VerifyFunc &verify_func) { + (void)OperatorFactoryImpl::RegisterVerifyFunc(operator_type, verify_func); +} +} // namespace ge diff --git a/src/common/graph/operator_factory_impl.cc b/src/common/graph/operator_factory_impl.cc new file mode 100644 index 00000000..f9815968 --- /dev/null +++ b/src/common/graph/operator_factory_impl.cc @@ -0,0 +1,150 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/operator_factory_impl.h" + +#include "debug/ge_log.h" +#include "framework/common/debug/ge_log.h" + +namespace ge { +shared_ptr> OperatorFactoryImpl::operator_creators_; +shared_ptr> OperatorFactoryImpl::operator_infershape_funcs_; +shared_ptr> OperatorFactoryImpl::operator_inferformat_funcs_; +shared_ptr> OperatorFactoryImpl::operator_verify_funcs_; + +Operator OperatorFactoryImpl::CreateOperator(const std::string &operator_name, const std::string &operator_type) { + if (operator_creators_ == nullptr) { + return Operator(); + } + auto it = operator_creators_->find(operator_type); + if (it == operator_creators_->end()) { + GELOGW("no OpProto of [%s] registered", operator_type.c_str()); + return Operator(); + } + return it->second(operator_name); +} + +graphStatus OperatorFactoryImpl::GetOpsTypeList(std::vector &all_ops) { + all_ops.clear(); + if (operator_creators_ != nullptr) { + for (auto it = operator_creators_->begin(); it != operator_creators_->end(); ++it) { + all_ops.emplace_back(it->first); + } + } else { + GELOGE(GRAPH_FAILED, "no operator creators found"); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +bool OperatorFactoryImpl::IsExistOp(const string &operator_type) { + if (operator_creators_ == nullptr) { + return false; + } + auto it = operator_creators_->find(operator_type); + if (it == operator_creators_->end()) { + return false; + } + return true; +} + +InferShapeFunc OperatorFactoryImpl::GetInferShapeFunc(const std::string &operator_type) { + if (operator_infershape_funcs_ == nullptr) { + return nullptr; + } + auto it = operator_infershape_funcs_->find(operator_type); + if (it == operator_infershape_funcs_->end()) { + return nullptr; + } + return it->second; +} + +InferFormatFunc OperatorFactoryImpl::GetInferFormatFunc(const std::string &operator_type) { + if (operator_inferformat_funcs_ == nullptr) { + GELOGI("operator_inferformat_funcs_ is null"); + return nullptr; + } + auto it = operator_inferformat_funcs_->find(operator_type); + if (it == operator_inferformat_funcs_->end()) { + return nullptr; + } + return it->second; +} + +VerifyFunc OperatorFactoryImpl::GetVerifyFunc(const std::string &operator_type) { + if (operator_verify_funcs_ == nullptr) { + return nullptr; + } + auto it = operator_verify_funcs_->find(operator_type); + if (it == operator_verify_funcs_->end()) { + return nullptr; + } + return it->second; +} + +graphStatus OperatorFactoryImpl::RegisterOperatorCreator(const string &operator_type, OpCreator const &op_creator) { + if (operator_creators_ == nullptr) { + operator_creators_.reset(new (std::nothrow) std::map()); + } + auto it = operator_creators_->find(operator_type); + if (it != operator_creators_->end()) { + return GRAPH_FAILED; + } + (void)operator_creators_->emplace(operator_type, op_creator); + return GRAPH_SUCCESS; +} + +graphStatus OperatorFactoryImpl::RegisterInferShapeFunc(const std::string &operator_type, + InferShapeFunc const infer_shape_func) { + if (operator_infershape_funcs_ == nullptr) { + GELOGI("operator_infershape_funcs_ init"); + operator_infershape_funcs_.reset(new (std::nothrow) std::map()); + } + auto it = operator_infershape_funcs_->find(operator_type); + if (it != operator_infershape_funcs_->end()) { + return GRAPH_FAILED; + } + (void)operator_infershape_funcs_->emplace(operator_type, infer_shape_func); + return GRAPH_SUCCESS; +} + +graphStatus OperatorFactoryImpl::RegisterInferFormatFunc(const std::string &operator_type, + InferFormatFunc const infer_format_func) { + if (operator_inferformat_funcs_ == nullptr) { + GELOGI("operator_inferformat_funcs_ init"); + operator_inferformat_funcs_.reset(new (std::nothrow) std::map()); + } + auto it = operator_inferformat_funcs_->find(operator_type); + if (it != operator_inferformat_funcs_->end()) { + return GRAPH_FAILED; + } + (void)operator_inferformat_funcs_->emplace(operator_type, infer_format_func); + return GRAPH_SUCCESS; +} + +graphStatus OperatorFactoryImpl::RegisterVerifyFunc(const std::string &operator_type, VerifyFunc const verify_func) { + if (operator_verify_funcs_ == nullptr) { + GELOGI("operator_verify_funcs_ init"); + operator_verify_funcs_.reset(new (std::nothrow) std::map()); + } + auto it = operator_verify_funcs_->find(operator_type); + if (it != operator_verify_funcs_->end()) { + return GRAPH_FAILED; + } + (void)operator_verify_funcs_->emplace(operator_type, verify_func); + return GRAPH_SUCCESS; +} +} // namespace ge diff --git a/src/common/graph/opsproto/opsproto_manager.cc b/src/common/graph/opsproto/opsproto_manager.cc new file mode 100644 index 00000000..a5bdb4c5 --- /dev/null +++ b/src/common/graph/opsproto/opsproto_manager.cc @@ -0,0 +1,169 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/opsproto_manager.h" + +#include +#include +#include +#include +#include + +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_log.h" + +namespace ge { +OpsProtoManager *OpsProtoManager::Instance() { + static OpsProtoManager instance; + return &instance; +} + +bool OpsProtoManager::Initialize(const std::map &options) { + auto proto_iter = options.find("ge.opsProtoLibPath"); + if (proto_iter == options.end()) { + GELOGW("ge.opsProtoLibPath option not set, return."); + return false; + } + + pluginPath_ = proto_iter->second; + LoadOpsProtoPluginSo(pluginPath_); + + return true; +} + +void OpsProtoManager::Finalize() { + for (auto handle : handles_) { + if (handle != nullptr) { + if (dlclose(handle) != 0) { + GELOGW("failed to close handle, message: %s", dlerror()); + continue; + } + GELOGI("close opsprotomanager handler success"); + } else { + GELOGW("close opsprotomanager handler failure, handler is nullptr"); + } + } +} + +static std::vector Split(const std::string &str, char delim) { + std::vector elems; + if (str.empty()) { + elems.emplace_back(""); + return elems; + } + + std::stringstream ss(str); + std::string item; + + while (getline(ss, item, delim)) { + elems.push_back(item); + } + + auto str_size = str.size(); + if (str_size > 0 && str[str_size - 1] == delim) { + elems.emplace_back(""); + } + + return elems; +} + +static void FindParserSo(const std::string &path, std::vector &file_list) { + // Lib plugin path not exist + if (path.empty()) { + GELOGI("realPath is empty"); + return; + } + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(path.size() >= PATH_MAX, return, "path is invalid"); + + char resolved_path[PATH_MAX] = {0}; + + // Nullptr is returned when the path does not exist or there is no permission + // Return absolute path when path is accessible + if (realpath(path.c_str(), resolved_path) == nullptr) { + GELOGW("the path [%s] not exsit.", path.c_str()); + return; + } + + struct dirent *dent = nullptr; + DIR *dir = opendir(resolved_path); + // Lib plugin path not exist + if (dir == nullptr) { + GELOGW("Open directory %s failed,maybe it is not exit or not a dir", resolved_path); + return; + } + + while ((dent = readdir(dir)) != nullptr) { + if (strcmp(dent->d_name, ".") == 0 || strcmp(dent->d_name, "..") == 0) { + continue; + } + std::string name = dent->d_name; + std::string full_name = path + "/" + name; + const std::string so_suff = ".so"; + + if (dent->d_type != DT_DIR && name.size() >= so_suff.size() && + name.compare(name.size() - so_suff.size(), so_suff.size(), so_suff) == 0) { + file_list.push_back(full_name); + GELOGI("OpsProtoManager Parse full name = %s \n", full_name.c_str()); + } + } + if (closedir(dir) != 0) { + GELOGW("close dir fail."); + } +} + +static void GetPluginSoFileList(const std::string &path, std::vector &file_list) { + // Support multi lib directory with ":" as delimiter + std::vector v_path = Split(path, ':'); + + for (size_t i = 0; i < v_path.size(); ++i) { + FindParserSo(v_path[i], file_list); + GELOGI("OpsProtoManager full name = %s", v_path[i].c_str()); + } +} + +void OpsProtoManager::LoadOpsProtoPluginSo(std::string &path) { + if (path.empty()) { + GELOGE(GRAPH_FAILED, "filePath is invalid. please check your text file %s.", path.c_str()); + return; + } + std::vector file_list; + + // If there is .so file in the lib path + GetPluginSoFileList(path, file_list); + + // Not found any .so file in the lib path + if (file_list.empty()) { + GELOGE(GRAPH_FAILED, "OpsProtoManager can not find any plugin file in pluginPath: %s \n", path.c_str()); + return; + } + // Warning message + GELOGW("The shared library will not be checked. Please ensure that the source of the shared library is trusted."); + + // Load .so file + for (auto elem : file_list) { + void *handle = dlopen(elem.c_str(), RTLD_NOW | RTLD_GLOBAL | RTLD_NODELETE); + if (handle == nullptr) { + GELOGW("OpsProtoManager dlopen failed, plugin name:%s. Message(%s).", elem.c_str(), dlerror()); + continue; + } else { + // Close dl when the program exist, not close here + GELOGI("OpsProtoManager plugin load %s success.", elem.c_str()); + handles_.push_back(handle); + } + } +} +} // namespace ge diff --git a/src/common/graph/option/ge_context.cc b/src/common/graph/option/ge_context.cc new file mode 100644 index 00000000..ff9b0365 --- /dev/null +++ b/src/common/graph/option/ge_context.cc @@ -0,0 +1,77 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "./ge_context.h" + +#include "./ge_global_options.h" +#include "./ge_local_context.h" +#include "framework/common/debug/ge_log.h" + +namespace ge { +GEContext &GetContext() { + static GEContext ge_context{}; + return ge_context; +} + +graphStatus GEContext::GetOption(const std::string &key, std::string &option) { + return GetThreadLocalContext().GetOption(key, option); +} + +std::map &GetMutableGlobalOptions() { + static std::map global_options{}; + return global_options; +} + +void GEContext::Init() { + string session_id; + (void)GetOption("ge.exec.sessionId", session_id); + try { + session_id_ = static_cast(std::stoi(session_id.c_str())); + } catch (std::invalid_argument &) { + GELOGW("%s transform to int failed.", session_id.c_str()); + } catch (std::out_of_range &) { + GELOGW("%s transform to int failed.", session_id.c_str()); + } + + string device_id; + (void)GetOption("ge.exec.deviceId", device_id); + try { + device_id_ = static_cast(std::stoi(device_id.c_str())); + } catch (std::invalid_argument &) { + GELOGW("%s transform to int failed.", device_id.c_str()); + } catch (std::out_of_range &) { + GELOGW("%s transform to int failed.", device_id.c_str()); + } + + string job_id; + (void)GetOption("ge.exec.jobId", job_id); + try { + job_id_ = static_cast(std::stoi(job_id.c_str())); + } catch (std::invalid_argument &) { + GELOGW("%s transform to int failed.", job_id.c_str()); + } catch (std::out_of_range &) { + GELOGW("%s transform to int failed.", job_id.c_str()); + } +} + +uint64_t GEContext::SessionId() { return session_id_; } + +uint32_t GEContext::DeviceId() { return device_id_; } + +uint64_t GEContext::JobId() { return job_id_; } + +void GEContext::SetCtxDeviceId(uint32_t device_id) { device_id_ = device_id; } +} // namespace ge diff --git a/src/common/graph/option/ge_local_context.cc b/src/common/graph/option/ge_local_context.cc new file mode 100644 index 00000000..2a5b7a34 --- /dev/null +++ b/src/common/graph/option/ge_local_context.cc @@ -0,0 +1,51 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "./ge_local_context.h" + +#include + +namespace ge { +namespace { +thread_local GEThreadLocalContext thread_context; +} + +GEThreadLocalContext &GetThreadLocalContext() { return thread_context; } + +graphStatus GEThreadLocalContext::GetOption(const string &key, string &option) { + auto iter = session_options_.find(key); + if (iter != session_options_.end()) { + option = iter->second; + return GRAPH_SUCCESS; + } + auto global_iter = global_options_.find(key); + if (global_iter != global_options_.end()) { + option = global_iter->second; + return GRAPH_SUCCESS; + } + return GRAPH_PARAM_INVALID; +} + +void GEThreadLocalContext::SetGlobalOption(map options_map) { + global_options_.clear(); + global_options_ = std::move(options_map); +} + +void GEThreadLocalContext::SetSessionOption(map options_map) { + session_options_.clear(); + session_options_ = std::move(options_map); +} +} // namespace ge diff --git a/src/common/graph/shape_refiner.cc b/src/common/graph/shape_refiner.cc new file mode 100644 index 00000000..5c976dce --- /dev/null +++ b/src/common/graph/shape_refiner.cc @@ -0,0 +1,223 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/shape_refiner.h" + +#include +#include +#include +#include +#include + +#include "debug/ge_log.h" +#include "debug/ge_op_types.h" +#include "external/graph/operator.h" +#include "external/graph/operator_factory.h" +#include "framework/common/debug/ge_log.h" +#include "graph/compute_graph.h" +#include "utils/node_utils.h" +#include "utils/op_desc_utils.h" +#include "utils/tensor_utils.h" +#include "utils/type_utils.h" + +namespace ge { +void ShapeRefiner::PrintInOutTensorShape(const ge::NodePtr &node, const std::string &phase) { + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "node is null"); + return; + } + ge::OpDescPtr op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, GELOGE(GRAPH_FAILED, "op_desc is null."); return); + std::string str; + if (!op_desc->GetAllInputsDescPtr().empty()) { + std::string input_desc_str = "input shape: "; + for (const auto &input_desc : op_desc->GetAllInputsDescPtr()) { + input_desc_str += "["; + for (int64_t dim : input_desc->GetShape().GetDims()) { + input_desc_str += std::to_string(dim) + " "; + } + input_desc_str += "]"; + input_desc_str += ":" + TypeUtils::DataTypeToSerialString(input_desc->GetDataType()) + ":" + + TypeUtils::FormatToSerialString(input_desc->GetFormat()) + " "; + } + str += input_desc_str; + } + + if (!op_desc->GetAllOutputsDescPtr().empty()) { + std::string output_desc_str = "output shape: "; + for (const auto &output_desc : op_desc->GetAllOutputsDescPtr()) { + if (output_desc == nullptr) { + continue; + } + output_desc_str += "["; + for (int64_t dim : output_desc->GetShape().GetDims()) { + output_desc_str += std::to_string(dim) + " "; + } + output_desc_str += "]"; + output_desc_str += ":" + TypeUtils::DataTypeToSerialString(output_desc->GetDataType()) + ":" + + TypeUtils::FormatToSerialString(output_desc->GetFormat()) + " "; + } + str += output_desc_str; + } + GELOGD("Shape dump [%s], Node name: [%s]. %s", phase.c_str(), node->GetName().c_str(), str.c_str()); +} + +graphStatus ShapeRefiner::InferShapeAndType(const ConstNodePtr &node, Operator &op) { + GE_IF_BOOL_EXEC(node == nullptr, GELOGE(GRAPH_FAILED, "node is null."); return GRAPH_FAILED); + auto op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, GELOGE(GRAPH_FAILED, "op_desc is null."); return GRAPH_FAILED); + const auto &op_type = op_desc->GetType(); + + // Get infer func and execute + graphStatus ret = op_desc->CallInferFunc(op); + if (ret == GRAPH_PARAM_INVALID) { + // Op ir no infer func, try to get infer func from operator factory + auto node_op = ge::OperatorFactory::CreateOperator("node_op", op_desc->GetType()); + if (node_op.IsEmpty()) { + GELOGW("get op from OperatorFactory fail. opType: %s", op_type.c_str()); + return ret; + } + + GELOGD("get op from OperatorFactory success. opType: %s", op_type.c_str()); + auto temp_op_desc = ge::OpDescUtils::GetOpDescFromOperator(node_op); + if (temp_op_desc == nullptr) { + GELOGE(GRAPH_FAILED, "temp op desc is null"); + return GRAPH_FAILED; + } + if (!op_desc->UpdateInputName(temp_op_desc->GetAllInputName())) { + GELOGW("InferShapeAndType UpdateInputName failed"); + for (const auto &out_desc : op_desc->GetAllOutputsDescPtr()) { + if (out_desc != nullptr && out_desc->GetShape().GetDims().empty()) { + break; + } + return GRAPH_SUCCESS; + } + } + if (!op_desc->UpdateOutputName(temp_op_desc->GetAllOutputName())) { + GELOGW("InferShapeAndType UpdateOutputName failed"); + } + op_desc->AddInferFunc(temp_op_desc->GetInferFunc()); + ret = op_desc->CallInferFunc(op); + GELOGI("op CallInferFunc second. ret: %u", ret); + } + return ret; +} + +InferenceContextPtr CreateInferenceContext(const std::unordered_map &context_map, + const NodePtr &node) { + auto ctx = std::shared_ptr(new (std::nothrow) InferenceContext()); + if (ctx == nullptr) { + GELOGE(GRAPH_FAILED, "Failed to alloc InferenceContext"); + return nullptr; + } + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "node is null"); + return nullptr; + } + InferenceContextPtr inference_context = std::shared_ptr(ctx); + auto all_in_data_anchors = node->GetAllInDataAnchors(); + std::vector> input_shapes_and_types(all_in_data_anchors.size()); + std::vector marks; + + bool has_input_shapes_and_types = false; + for (const auto &in_anchor : all_in_data_anchors) { + const auto &out_anchor = in_anchor->GetPeerOutAnchor(); + if (out_anchor == nullptr) { + continue; + } + + auto input_node = out_anchor->GetOwnerNode(); + if (input_node == nullptr) { + continue; + } + + auto iter = context_map.find(input_node); + if (iter != context_map.end()) { + const auto &src_context = iter->second; + GE_IF_BOOL_EXEC(src_context == nullptr, GELOGE(GRAPH_FAILED, "src_context is null."); return nullptr); + GELOGD("node:%s get %ld marks from node:%s", node->GetName().c_str(), src_context->GetMarks().size(), + input_node->GetName().c_str()); + for (auto mark : src_context->GetMarks()) { + marks.push_back(mark); + } + auto output_idx = out_anchor->GetIdx(); + auto input_idx = in_anchor->GetIdx(); + auto output_shape_and_type = src_context->GetOutputHandleShapesAndTypes(); + if (output_idx < static_cast(output_shape_and_type.size())) { + GELOGI("Add shape and type from %s:%d to %s:%d", input_node->GetName().c_str(), output_idx, + node->GetName().c_str(), input_idx); + input_shapes_and_types[input_idx] = output_shape_and_type[output_idx]; + has_input_shapes_and_types = true; + } else { + GELOGI("[%s] Output out of range. index = %d, size = %zu", node->GetName().c_str(), output_idx, + output_shape_and_type.size()); + } + } + } + + if (has_input_shapes_and_types) { + ctx->SetInputHandleShapesAndTypes(std::move(input_shapes_and_types)); + } + ctx->SetMarks(marks); + + return inference_context; +} + +namespace { +std::unordered_map context_map; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus ShapeRefiner::InferShapeAndType(const NodePtr &node) { + GE_IF_BOOL_EXEC(node == nullptr, GELOGE(GRAPH_FAILED, "node is null."); return GRAPH_FAILED); + if (node->Verify() != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Verifying %s failed.", node->GetName().c_str()); + return GRAPH_FAILED; + } + + auto inference_context = CreateInferenceContext(context_map, node); + if (inference_context == nullptr) { + GELOGE(GRAPH_FAILED, "inference context is null"); + return GRAPH_FAILED; + } + + GELOGD("create context for node:%s, marks %zu", node->GetName().c_str(), inference_context->GetMarks().size()); + + PrintInOutTensorShape(node, "before_infershape"); + + Operator op = OpDescUtils::CreateOperatorFromNode(node); + op.SetInferenceContext(inference_context); + graphStatus status = InferShapeAndType(node, op); + if (status == GRAPH_PARAM_INVALID || status == GRAPH_SUCCESS) { + (void)ge::NodeUtils::UpdatePeerNodeInputDesc(node); + } else { + GELOGE(GRAPH_FAILED, "%s call infer function failed.", node->GetName().c_str()); + return GRAPH_FAILED; + } + + auto ctx_after_infer = op.GetInferenceContext(); + if (ctx_after_infer != nullptr) { + GELOGD("[%s] after infershape. mark:%zu", node->GetName().c_str(), ctx_after_infer->GetMarks().size()); + if (!ctx_after_infer->GetOutputHandleShapesAndTypes().empty() || !ctx_after_infer->GetMarks().empty()) { + GELOGD("[%s] set inference context after. mark:%zu", node->GetName().c_str(), ctx_after_infer->GetMarks().size()); + (void)context_map.emplace(node, ctx_after_infer); + } + } + + PrintInOutTensorShape(node, "after_infershape"); + + return GRAPH_SUCCESS; +} +} // namespace ge diff --git a/src/common/graph/tensor.cc b/src/common/graph/tensor.cc new file mode 100644 index 00000000..93e8b4ce --- /dev/null +++ b/src/common/graph/tensor.cc @@ -0,0 +1,595 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "external/graph/tensor.h" + +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/ge_tensor.h" +#include "securec.h" +#include "utils/attr_utils.h" +#include "utils/tensor_adapter.h" +#include "utils/tensor_utils.h" +#include "utils/type_utils.h" + +namespace { +/// Extra 8 bytes store pointer of string +/// Extra 1 byte store '\0' +const int EXTRA_STORE_POINTER_FOR_STRING = 8; +const int EXTRA_STORE_POINTER_FOR_STRING_AND_END_SYMBOL = 9; +} // namespace + +namespace ge { +// If not overflow return true +static bool Int64MulNotOverflow(int64_t a, int64_t b) { + if (a > 0) { + if (b > 0) { + if (a > (INT64_MAX / b)) { + return false; + } + } else { + if (b < (INT64_MIN / a)) { + return false; + } + } + } else { + if (b > 0) { + if (a < (INT64_MIN / b)) { + return false; + } + } else { + if ((a != 0) && (b < (INT64_MAX / a))) { + return false; + } + } + } + return true; +} + +class TensorDescImpl { + public: + TensorDescImpl() = default; + ~TensorDescImpl() = default; + TensorDescImpl(const Shape &shape, Format format, DataType dt) : shape_(shape), format_(format), data_type_(dt) {} + + Shape shape_; + Format format_ = FORMAT_ND; + Format origin_format_ = FORMAT_ND; + DataType data_type_ = DT_FLOAT; + Shape origin_shape_; + int64_t size_ = 0; + int64_t real_dim_cnt_ = 0; + std::string name_; +}; + +class TensorImpl { + public: + TensorImpl() = default; + ~TensorImpl() = default; + + explicit TensorImpl(const TensorDesc &tensor_desc) : ge_tensor(TensorAdapter::TensorDesc2GeTensorDesc(tensor_desc)) {} + TensorImpl(const TensorDesc &tensor_desc, const std::vector &data) + : ge_tensor(TensorAdapter::TensorDesc2GeTensorDesc(tensor_desc), data) {} + TensorImpl(const TensorDesc &tensor_desc, const uint8_t *data, size_t size) + : ge_tensor(TensorAdapter::TensorDesc2GeTensorDesc(tensor_desc), data, size) {} + TensorImpl(TensorDesc &&tensor_desc, std::vector &&data) + : ge_tensor(TensorAdapter::TensorDesc2GeTensorDesc(tensor_desc), std::move(data)) {} + + GeTensor ge_tensor; +}; + +Shape::Shape(const std::vector &dims) : dims_(dims) {} + +size_t Shape::GetDimNum() const { return dims_.size(); } + +int64_t Shape::GetDim(size_t idx) const { + if (idx >= dims_.size()) { + return 0; + } + return dims_[idx]; +} + +graphStatus Shape::SetDim(size_t idx, int64_t value) { + if (idx >= dims_.size()) { + return GRAPH_FAILED; + } + dims_[idx] = value; + return GRAPH_SUCCESS; +} + +std::vector Shape::GetDims() const { return dims_; } + +int64_t Shape::GetShapeSize() const { + if (dims_.empty()) { + return 0; + } + int64_t size = 1; + for (auto i : dims_) { + if (!Int64MulNotOverflow(size, i)) { + GELOGE(GRAPH_FAILED, "mul overflow: %ld, %ld", size, i); + size = 0; + return size; + } + size *= i; + } + return size; +} + +TensorDesc::TensorDesc() { impl = ComGraphMakeShared(); } + +TensorDesc::TensorDesc(Shape shape, Format format, DataType dt) { + impl = ComGraphMakeShared(shape, format, dt); + SetRealDimCnt(shape.GetDimNum()); +} + +TensorDesc::TensorDesc(const TensorDesc &desc) { + // Copy + impl = ComGraphMakeShared(); + if (desc.impl != nullptr && impl != nullptr) { + *impl = *desc.impl; + } +} + +TensorDesc::TensorDesc(TensorDesc &&desc) { + // Move + impl = std::move(desc.impl); +} + +TensorDesc &TensorDesc::operator=(const TensorDesc &desc) { + // Copy + if (&desc != this) { + impl = ComGraphMakeShared(); + if (desc.impl != nullptr && impl != nullptr) { + *impl = *desc.impl; + } + } + return *this; +} + +TensorDesc &TensorDesc::operator=(TensorDesc &&desc) { + if (&desc != this) { + impl = std::move(desc.impl); + } + return *this; +} + +void TensorDesc::Update(const Shape &shape, Format format, DataType dt) { + if (impl != nullptr) { + impl->shape_ = shape; + impl->format_ = format; + impl->data_type_ = dt; + } +} + +Shape TensorDesc::GetShape() const { + if (impl != nullptr) { + return impl->shape_; + } + return Shape(); +} + +void TensorDesc::SetShape(const Shape &shape) { + if (impl != nullptr) { + impl->shape_ = shape; + } +} + +Shape TensorDesc::GetOriginShape() const { + if (impl != nullptr) { + return impl->origin_shape_; + } + return Shape(); +} + +void TensorDesc::SetOriginShape(const Shape &origin_shape) { + if (impl != nullptr) { + impl->origin_shape_ = origin_shape; + } +} + +Format TensorDesc::GetFormat() const { + if (impl != nullptr) { + return impl->format_; + } + return FORMAT_RESERVED; +} + +void TensorDesc::SetFormat(Format format) { + if (impl != nullptr) { + impl->format_ = format; + } +} + +Format TensorDesc::GetOriginFormat() const { + if (impl != nullptr) { + return impl->origin_format_; + } + return FORMAT_RESERVED; +} + +void TensorDesc::SetOriginFormat(Format origin_format) { + if (impl != nullptr) { + impl->origin_format_ = origin_format; + } +} + +DataType TensorDesc::GetDataType() const { + if (impl != nullptr) { + return impl->data_type_; + } + return DT_UNDEFINED; +} + +void TensorDesc::SetDataType(DataType dt) { + if (impl != nullptr) { + impl->data_type_ = dt; + } +} + +void TensorDesc::SetSize(int64_t size) { + if (impl != nullptr) { + impl->size_ = size; + } +} + +int64_t TensorDesc::GetSize() const { + if (impl != nullptr) { + return impl->size_; + } + return 0; +} + +void TensorDesc::SetRealDimCnt(const int64_t real_dim_cnt) { + if (impl != nullptr) { + impl->real_dim_cnt_ = real_dim_cnt; + } +} + +int64_t TensorDesc::GetRealDimCnt() const { + if (impl != nullptr) { + return impl->real_dim_cnt_; + } + return 0; +} + +std::string TensorDesc::GetName() const { + if (impl != nullptr) { + return impl->name_; + } + return ""; +} + +void TensorDesc::SetName(const std::string &name) { + if (impl != nullptr) { + impl->name_ = name; + } +} + +Tensor::Tensor() { impl = ComGraphMakeShared(); } + +Tensor::Tensor(const TensorDesc &tensor_desc) { impl = ComGraphMakeShared(tensor_desc); } + +Tensor::Tensor(const TensorDesc &tensor_desc, const std::vector &data) { + uint64_t shape_size = tensor_desc.GetShape().GetShapeSize(); + DataType data_type = tensor_desc.GetDataType(); + uint32_t type_length; + bool ret = TypeUtils::GetDataTypeLength(data_type, type_length); + if (!ret) { + GELOGW("datatype %d is not found.", data_type); + } + + auto data_size = data.size(); + if (ret && (shape_size || (data_size != type_length))) { + if (type_length != 0 && UINT64_MAX / type_length < shape_size) { + GELOGW("mul overflow: %lu, %u", shape_size, type_length); + } else { + if (shape_size * type_length != data_size) { + GELOGW("tensor length not equal: shape_byte_size=%lu, data_size=%zu, dt_type=%s.", shape_size * type_length, + data_size, TypeUtils::DataTypeToSerialString(data_type).c_str()); + } + } + } + impl = ComGraphMakeShared(tensor_desc, data); +} + +Tensor::Tensor(const TensorDesc &tensor_desc, const uint8_t *data, size_t size) { + uint64_t shape_size = tensor_desc.GetShape().GetShapeSize(); + DataType data_type = tensor_desc.GetDataType(); + uint32_t type_length; + bool ret = TypeUtils::GetDataTypeLength(data_type, type_length); + if (!ret) { + GELOGW("datatype %d is not found.", data_type); + } + if (ret && (shape_size || (size != type_length))) { + if (type_length != 0 && UINT64_MAX / type_length < shape_size) { + GELOGW("mul overflow: %lu, %u", shape_size, type_length); + } else { + if (shape_size * type_length != size) { + GELOGW("tensor length not equal: shape_byte_size=%lu, data_size=%zu, dt_type=%s.", shape_size * type_length, + size, TypeUtils::DataTypeToSerialString(data_type).c_str()); + } + } + } + impl = ComGraphMakeShared(tensor_desc, data, size); +} + +Tensor::Tensor(TensorDesc &&tensor_desc, std::vector &&data) { + uint64_t shape_size = tensor_desc.GetShape().GetShapeSize(); + DataType data_type = tensor_desc.GetDataType(); + uint32_t type_length; + bool ret = TypeUtils::GetDataTypeLength(data_type, type_length); + if (!ret) { + GELOGW("datatype %d is not found.", data_type); + } + + auto data_size = data.size(); + if (ret && (shape_size || (data_size != type_length))) { + if (type_length != 0 && UINT64_MAX / type_length < shape_size) { + GELOGW("mul overflow: %lu, %u", shape_size, type_length); + } else { + if (shape_size * type_length != data_size) { + GELOGW("tensor length not equal: shape_byte_size=%lu, data_size=%zu, dt_type=%s.", shape_size * type_length, + data_size, TypeUtils::DataTypeToSerialString(data_type).c_str()); + } + } + } + impl = ComGraphMakeShared(std::move(tensor_desc), std::move(data)); +} + +TensorDesc Tensor::GetTensorDesc() const { + if (impl != nullptr) { + return TensorAdapter::GeTensorDesc2TensorDesc(impl->ge_tensor.MutableTensorDesc()); + } + return TensorDesc(); +} + +graphStatus Tensor::SetTensorDesc(const TensorDesc &tensor_desc) { + if (impl != nullptr) { + impl->ge_tensor.SetTensorDesc(TensorAdapter::TensorDesc2GeTensorDesc(tensor_desc)); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} + +const uint8_t *Tensor::GetData() const { + if (impl != nullptr) { + return impl->ge_tensor.GetData().data(); + } + return nullptr; +} + +uint8_t *Tensor::GetData() { + if (impl != nullptr) { + return impl->ge_tensor.MutableData().data(); + } + return nullptr; +} + +size_t Tensor::GetSize() const { + if (impl != nullptr) { + return impl->ge_tensor.GetData().size(); + } + return 0; +} + +graphStatus Tensor::SetData(std::vector &&data) { + if (impl != nullptr) { + (void)impl->ge_tensor.SetData(data); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} + +graphStatus Tensor::SetData(const std::vector &data) { + if (impl != nullptr) { + (void)impl->ge_tensor.SetData(data); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} + +graphStatus Tensor::SetData(const uint8_t *data, size_t size) { + if (impl != nullptr) { + (void)impl->ge_tensor.SetData(data, size); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} + +graphStatus Tensor::SetData(const std::string &data) { + if (impl != nullptr && (!data.empty())) { + /// Extra 8 bytes store pointer of string + /// Extra 1 byte store '\0' + size_t total_size = data.size() + EXTRA_STORE_POINTER_FOR_STRING_AND_END_SYMBOL; + std::unique_ptr buff(new (std::nothrow) char[total_size]()); + if (buff == nullptr) { + GELOGE(GRAPH_FAILED, "allocate string raw data buff failed"); + return GRAPH_FAILED; + } + uint64_t *p = reinterpret_cast(buff.get()); + // Front 8 bytes store pointer of string + char *raw_data = buff.get() + EXTRA_STORE_POINTER_FOR_STRING; + p[0] = reinterpret_cast(raw_data); + int32_t memcpy_ret = memcpy_s(raw_data, total_size - EXTRA_STORE_POINTER_FOR_STRING, data.c_str(), data.size() + 1); + GE_CHK_BOOL_RET_STATUS(memcpy_ret == EOK, GRAPH_FAILED, "copy data failed"); + (void)impl->ge_tensor.SetData(reinterpret_cast(buff.get()), total_size); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} +graphStatus Tensor::SetData(const std::vector &data) { + if (impl != nullptr) { + if (data.empty()) { + GELOGE(GRAPH_FAILED, "there is no data, please check the input variable"); + return GRAPH_FAILED; + } + size_t total_size = 0; + for (auto str : data) { + /// Extra 8 bytes store pointer of each string + /// Extra 1 byte store '\0' + total_size += (str.size() + EXTRA_STORE_POINTER_FOR_STRING_AND_END_SYMBOL); + } + std::unique_ptr buff(new (std::nothrow) char[total_size]); + if (buff == nullptr) { + GELOGE(GRAPH_FAILED, "allocate string raw data buff failed"); + return GRAPH_FAILED; + } + uint64_t *p = reinterpret_cast(buff.get()); + // Front some bytes store pointer of each string + char *raw_data = buff.get() + data.size() * sizeof(uint64_t); + uint64_t ptr_size = data.size() * sizeof(uint64_t); + for (size_t i = 0; i < data.size(); ++i) { + p[i] = reinterpret_cast(raw_data); + if (total_size < ptr_size) { + GELOGE(GRAPH_FAILED, "Subtraction invalid, total_size: %zu, ptr_size: %lu", total_size, ptr_size); + return GRAPH_FAILED; + } + int32_t memcpy_ret = memcpy_s(raw_data, total_size - ptr_size, data[i].c_str(), data[i].size() + 1); + GE_CHK_BOOL_RET_STATUS(memcpy_ret == EOK, GRAPH_FAILED, "copy data failed"); + raw_data += (data[i].size() + 1); + ptr_size += (data[i].size() + 1); + } + + (void)impl->ge_tensor.SetData(reinterpret_cast(buff.get()), total_size); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} + +graphStatus Tensor::IsValid() { + uint64_t shape_size = GetTensorDesc().GetShape().GetShapeSize(); + DataType data_type = GetTensorDesc().GetDataType(); + uint32_t type_length; + bool ret = TypeUtils::GetDataTypeLength(data_type, type_length); + if (!ret) { + GELOGW("datatype %d is not found.", data_type); + return GRAPH_SUCCESS; + } + + size_t data_size = GetSize(); + if (data_type != DT_STRING) { + if (shape_size || (data_size != type_length)) { + if (type_length != 0 && UINT64_MAX / type_length < shape_size) { + GELOGW("mul overflow: %lu, %u", shape_size, type_length); + } else { + if (shape_size * type_length != data_size) { + GELOGW("tensor length not equal: shape_byte_size=%lu, data_size=%zu, dt_type=%s.", shape_size * type_length, + data_size, TypeUtils::DataTypeToSerialString(data_type).c_str()); + } + } + } + } + return GRAPH_SUCCESS; +} + +Tensor Tensor::Clone() const { + Tensor tensor; + if (impl != nullptr && tensor.impl != nullptr) { + tensor.impl->ge_tensor = impl->ge_tensor.Clone(); + } + return tensor; +} + +GeTensorDesc TensorAdapter::TensorDesc2GeTensorDesc(const TensorDesc &tensor_desc) { + GeTensorDesc ge_tensor_desc(GeShape(tensor_desc.GetShape().GetDims()), tensor_desc.GetFormat(), + tensor_desc.GetDataType()); + ge_tensor_desc.SetOriginShape(GeShape(tensor_desc.GetOriginShape().GetDims())); + ge_tensor_desc.SetOriginFormat(tensor_desc.GetOriginFormat()); + auto size = static_cast(tensor_desc.GetSize()); + TensorUtils::SetSize(ge_tensor_desc, size); + + auto real_dim_cnt = static_cast(tensor_desc.GetRealDimCnt()); + TensorUtils::SetRealDimCnt(ge_tensor_desc, real_dim_cnt); + return ge_tensor_desc; +} + +TensorDesc TensorAdapter::GeTensorDesc2TensorDesc(const GeTensorDesc &ge_tensor_desc) { + TensorDesc tensor_desc(Shape(ge_tensor_desc.GetShape().GetDims()), ge_tensor_desc.GetFormat(), + ge_tensor_desc.GetDataType()); + tensor_desc.SetOriginShape(Shape(ge_tensor_desc.GetOriginShape().GetDims())); + tensor_desc.SetOriginFormat(ge_tensor_desc.GetOriginFormat()); + uint32_t size = 0; + (void)TensorUtils::GetSize(ge_tensor_desc, size); + tensor_desc.SetSize(size); + + uint32_t real_dim_cnt = 0; + (void)TensorUtils::GetRealDimCnt(ge_tensor_desc, real_dim_cnt); + tensor_desc.SetRealDimCnt(real_dim_cnt); + return tensor_desc; +} + +GeTensorPtr TensorAdapter::Tensor2GeTensor(const Tensor &tensor) { + GeTensorPtr ge_tensor; + if (tensor.impl != nullptr) { + ge_tensor = ComGraphMakeShared(tensor.impl->ge_tensor.Clone()); + } + return ge_tensor; +} + +Tensor TensorAdapter::GeTensor2Tensor(const ConstGeTensorPtr &ge_tensor) { + Tensor tensor; + if (ge_tensor != nullptr && tensor.impl != nullptr) { + tensor.impl->ge_tensor = ge_tensor->Clone(); + } + return tensor; +} + +ConstGeTensorPtr TensorAdapter::AsGeTensorPtr(const Tensor &tensor) { + GeTensorPtr ge_tensor; + if (tensor.impl != nullptr) { + ge_tensor = ComGraphMakeShared(tensor.impl->ge_tensor); + } + return ge_tensor; +} + +GeTensorPtr TensorAdapter::AsGeTensorPtr(Tensor &tensor) { + GeTensorPtr ge_tensor; + if (tensor.impl != nullptr) { + ge_tensor = ComGraphMakeShared(tensor.impl->ge_tensor); + } + return ge_tensor; +} + +const GeTensor TensorAdapter::AsGeTensor(const Tensor &tensor) { + if (tensor.impl != nullptr) { + return tensor.impl->ge_tensor; + } + return GeTensor(); +} + +GeTensor TensorAdapter::AsGeTensor(Tensor &tensor) { + if (tensor.impl != nullptr) { + return tensor.impl->ge_tensor; + } + return GeTensor(); +} + +const Tensor TensorAdapter::AsTensor(const GeTensor &ge_tensor) { + Tensor tensor; + if (tensor.impl != nullptr) { + tensor.impl->ge_tensor = ge_tensor; + } + return tensor; +} + +Tensor TensorAdapter::AsTensor(GeTensor &ge_tensor) { + Tensor tensor; + if (tensor.impl != nullptr) { + tensor.impl->ge_tensor = ge_tensor; + } + return tensor; +} +} // namespace ge diff --git a/src/common/graph/utils/anchor_utils.cc b/src/common/graph/utils/anchor_utils.cc new file mode 100644 index 00000000..3a284328 --- /dev/null +++ b/src/common/graph/utils/anchor_utils.cc @@ -0,0 +1,104 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "utils/anchor_utils.h" + +#include + +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" + +namespace ge { +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Format AnchorUtils::GetFormat(const DataAnchorPtr &data_anchor) { + if (data_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "The input data anchor is invalid."); + return FORMAT_RESERVED; + } + return data_anchor->format_; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus AnchorUtils::SetFormat(const DataAnchorPtr &data_anchor, + Format data_format) { + if ((data_anchor == nullptr) || (data_format == FORMAT_RESERVED)) { + GELOGE(GRAPH_FAILED, "The input data anchor or input data format is invalid ."); + return GRAPH_FAILED; + } + data_anchor->format_ = data_format; + return GRAPH_SUCCESS; +} + +// Get anchor status +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY AnchorStatus AnchorUtils::GetStatus(const DataAnchorPtr &data_anchor) { + if (data_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "The input data anchor is invalid."); + return ANCHOR_RESERVED; + } + return data_anchor->status_; +} + +// Set anchor status +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus AnchorUtils::SetStatus(const DataAnchorPtr &data_anchor, + AnchorStatus anchor_status) { + if ((data_anchor == nullptr) || (anchor_status == ANCHOR_RESERVED)) { + GELOGE(GRAPH_FAILED, "The input data anchor or input data format is invalid ."); + return GRAPH_FAILED; + } + data_anchor->status_ = anchor_status; + return GRAPH_SUCCESS; +} + +bool AnchorUtils::HasControlEdge(const AnchorPtr &anchor) { + auto control_anchor = Anchor::DynamicAnchorCast(anchor); + if (control_anchor != nullptr) { + return (control_anchor->GetPeerAnchors().size() != 0); + } + + auto data_anchor = Anchor::DynamicAnchorCast(anchor); + if (data_anchor) { + for (const auto &peer : data_anchor->GetPeerAnchors()) { + auto peer_cast = Anchor::DynamicAnchorCast(peer); + if (peer_cast) { + return true; + } + } + return false; + } + GELOGE(GRAPH_FAILED, "the anchor is neither control anchor nor data anchor"); + return false; +} + +bool AnchorUtils::IsControlEdge(const AnchorPtr &src, const AnchorPtr &dst) { + GE_CHK_BOOL_EXEC(src != nullptr, return false, "src is null."); + GE_CHK_BOOL_RET_STATUS_NOLOG(src->IsLinkedWith(dst), false); + auto src_control_anchor = Anchor::DynamicAnchorCast(src); + auto dst_control_anchor = Anchor::DynamicAnchorCast(dst); + return (src_control_anchor || dst_control_anchor); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY int AnchorUtils::GetIdx(const AnchorPtr &anchor) { + // Check if it can add edge between DataAnchor + auto data_anchor = Anchor::DynamicAnchorCast(anchor); + if (data_anchor != nullptr) { + return data_anchor->GetIdx(); + } + // Check if it can add edge between ControlAnchor + auto control_anchor = Anchor::DynamicAnchorCast(anchor); + if (control_anchor != nullptr) { + return control_anchor->GetIdx(); + } + return -1; +} +} // namespace ge diff --git a/src/common/graph/utils/ge_ir_utils.cc b/src/common/graph/utils/ge_ir_utils.cc new file mode 100644 index 00000000..f796a906 --- /dev/null +++ b/src/common/graph/utils/ge_ir_utils.cc @@ -0,0 +1,1057 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/utils/ge_ir_utils.h" + +#include + +#include "framework/common/debug/ge_log.h" + +namespace { +const char *const kControlAnchorIndex = ":-1"; +const char *const kNodeTypeForSubgraph = "subgraph"; +const char *const kDumpGEGraph = "DUMP_GE_GRAPH"; +const int8_t kMaxRecursionDepth = 10; +const char *const kDumpGeGraph = std::getenv(kDumpGEGraph); +const int64_t kDumpLevel = (kDumpGeGraph != nullptr) ? std::strtol(kDumpGeGraph, nullptr, 10) : ge::OnnxUtils::NO_DUMP; +} // namespace + +namespace ge { +// Part 1: from IR convert to ONNX Protobuf +static const std::map kGeDataTypeToOnnxMap = { + {DT_INT64, onnx::TensorProto_DataType_INT64}, {DT_UINT64, onnx::TensorProto_DataType_UINT64}, + {DT_FLOAT, onnx::TensorProto_DataType_FLOAT}, {DT_INT32, onnx::TensorProto_DataType_INT32}, + {DT_UINT32, onnx::TensorProto_DataType_UINT32}, {DT_INT8, onnx::TensorProto_DataType_INT8}, + {DT_UINT8, onnx::TensorProto_DataType_UINT8}, {DT_INT16, onnx::TensorProto_DataType_INT16}, + {DT_UINT16, onnx::TensorProto_DataType_UINT16}, {DT_FLOAT16, onnx::TensorProto_DataType_FLOAT16}, + {DT_DOUBLE, onnx::TensorProto_DataType_DOUBLE}, {DT_BOOL, onnx::TensorProto_DataType_BOOL}, +}; + +onnx::TensorProto_DataType OnnxUtils::EncodeDataType(DataType data_type) { + auto it = kGeDataTypeToOnnxMap.find(data_type); + if (it != kGeDataTypeToOnnxMap.end()) { + return it->second; + } else { + GELOGW("EncodeDataType: datatype not support %u", data_type); + return onnx::TensorProto_DataType_UNDEFINED; + } +} + +void OnnxUtils::AddAttrProtoFromAttribute(const std::pair &string_attr_value, + onnx::NodeProto *node_proto) { + if (node_proto == nullptr) { + GELOGE(FAILED, "Node proto is nullptr."); + return; + } + auto attr = node_proto->add_attribute(); + if (attr == nullptr) { + GELOGE(GRAPH_FAILED, "attr is nullptr."); + return; + } + auto attr_name = string_attr_value.first; + attr->set_name(attr_name); + auto attr_value = string_attr_value.second; + auto value_type = attr_value.GetValueType(); + switch (value_type) { + case GeAttrValue::VT_FLOAT: { + GeAttrValue::FLOAT data_f = 0; + (void)attr_value.GetValue(data_f); + attr->set_f(data_f); + attr->set_type(onnx::AttributeProto_AttributeType_FLOAT); + break; + } + case GeAttrValue::VT_LIST_FLOAT: { + GeAttrValue::LIST_FLOAT data_fs = {}; + (void)attr_value.GetValue(data_fs); + attr->set_type(onnx::AttributeProto_AttributeType_FLOATS); + for (auto &v : data_fs) { + attr->add_floats(v); + } + break; + } + case GeAttrValue::VT_INT: { + GeAttrValue::INT data_i = 0; + (void)attr_value.GetValue(data_i); + attr->set_type(onnx::AttributeProto_AttributeType_INT); + attr->set_i(data_i); + break; + } + case GeAttrValue::VT_LIST_INT: { + GeAttrValue::LIST_INT data_is = {}; + (void)attr_value.GetValue(data_is); + attr->set_type(onnx::AttributeProto_AttributeType_INTS); + for (auto &v : data_is) { + attr->add_ints(v); + } + break; + } + case GeAttrValue::VT_STRING: { + GeAttrValue::STR data_s; + (void)attr_value.GetValue(data_s); + attr->set_type(onnx::AttributeProto_AttributeType_STRING); + attr->set_s(data_s); + break; + } + case GeAttrValue::VT_LIST_STRING: { + GeAttrValue::LIST_STR data_ss = {}; + (void)attr_value.GetValue(data_ss); + attr->set_type(onnx::AttributeProto_AttributeType_STRINGS); + for (auto &v : data_ss) { + attr->add_strings(v); + } + break; + } + default: + GELOGW("GeAttrValue ValueType: %u is not supported for now", value_type); + break; + } +} + +void OnnxUtils::AddAttrProto(onnx::NodeProto *node_proto, onnx::AttributeProto_AttributeType type, const string &name, + void *data) { + if (node_proto == nullptr) { + GELOGE(FAILED, "Node_proto %s is nullptr.", name.c_str()); + return; + } + auto attr = node_proto->add_attribute(); + if (attr == nullptr) { + GELOGE(GRAPH_FAILED, "attr is nullptr."); + return; + } + attr->set_name(name); + switch (type) { + case onnx::AttributeProto_AttributeType_FLOAT: + attr->set_f((*(static_cast(data)))); + attr->set_type(onnx::AttributeProto_AttributeType_FLOAT); + break; + + case onnx::AttributeProto_AttributeType_FLOATS: + attr->set_type(onnx::AttributeProto_AttributeType_FLOATS); + for (auto &v : (*(static_cast *>(data)))) { + attr->add_floats(v); + } + break; + + case onnx::AttributeProto_AttributeType_INT: + attr->set_type(onnx::AttributeProto_AttributeType_INT); + attr->set_i((*(static_cast(data)))); + break; + + case onnx::AttributeProto_AttributeType_INTS: + attr->set_type(onnx::AttributeProto_AttributeType_INTS); + for (auto &v : *(static_cast *>(data))) { + attr->add_ints(v); + } + break; + + case onnx::AttributeProto_AttributeType_STRING: + attr->set_type(onnx::AttributeProto_AttributeType_STRING); + attr->set_s((*(static_cast(data)))); + break; + + case onnx::AttributeProto_AttributeType_STRINGS: + attr->set_type(onnx::AttributeProto_AttributeType_STRINGS); + for (auto &v : *(static_cast *>(data))) { + attr->add_strings(v); + } + break; + + default: + GELOGW("AttributeProto AttributeType: %u is not supported for now", type); + break; + } +} + +void OnnxUtils::AddAttrProto(onnx::NodeProto *node_proto, onnx::AttributeProto_AttributeType type, const string &name, + ::google::protobuf::RepeatedField<::google::protobuf::int64> data) { + if (node_proto == nullptr) { + GELOGE(FAILED, "Node_proto %s is nullptr.", name.c_str()); + return; + } + if (!data.empty()) { + auto attr = node_proto->add_attribute(); + if (attr == nullptr) { + GELOGE(GRAPH_FAILED, "attr is nullptr."); + return; + } + attr->set_name(name); + for (auto &v : data) { + attr->add_ints(v); + } + attr->set_type(type); + } +} + +void OnnxUtils::AddAttrProto(onnx::NodeProto *node_proto, onnx::AttributeProto_AttributeType type, const string &name, + ::google::protobuf::RepeatedField data) { + if (node_proto == nullptr) { + GELOGE(FAILED, "Node_proto %s is nullptr.", name.c_str()); + return; + } + if (!data.empty()) { + auto attr = node_proto->add_attribute(); + if (attr == nullptr) { + GELOGE(GRAPH_FAILED, "attr is nullptr."); + return; + } + attr->set_name(name); + for (auto &v : data) { + attr->add_ints(static_cast(v)); + } + attr->set_type(type); + } +} + +void OnnxUtils::AddAttrProto(onnx::NodeProto *node_proto, onnx::AttributeProto_AttributeType type, const string &name, + ::google::protobuf::RepeatedField data) { + if (node_proto == nullptr) { + GELOGE(FAILED, "Node_proto %s is nullptr.", name.c_str()); + return; + } + if (!data.empty()) { + auto attr = node_proto->add_attribute(); + if (attr == nullptr) { + GELOGE(GRAPH_FAILED, "attr is nullptr."); + return; + } + attr->set_name(name); + for (auto &v : data) { + attr->add_floats(v); + } + attr->set_type(type); + } +} + +void OnnxUtils::AddAttrProto(onnx::NodeProto *node_proto, onnx::AttributeProto_AttributeType type, const string &name, + ::google::protobuf::RepeatedPtrField<::std::string> data) { + if (node_proto == nullptr) { + GELOGE(FAILED, "Node proto %s is nullptr.", name.c_str()); + return; + } + if (!data.empty()) { + auto attr = node_proto->add_attribute(); + if (attr == nullptr) { + GELOGE(GRAPH_FAILED, "attr is nullptr."); + return; + } + attr->set_name(name); + for (auto &v : data) { + attr->add_strings(v); + } + attr->set_type(type); + } +} + +void OnnxUtils::AddAttrProtoForOpInAndOutDesc(onnx::NodeProto *node_proto, const OpDescPtr &op_desc) { + if (node_proto == nullptr || op_desc == nullptr) { + GELOGE(GRAPH_FAILED, "node_proto or op_desc is nullptr"); + return; + } + // Input describes + auto size_in = op_desc->GetInputsSize(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, "input_desc_nums", &size_in); + if (size_in > 0) { + for (uint32_t i = 0; i < size_in; i++) { + auto input_desc = op_desc->GetInputDescPtr(i); + if (input_desc != nullptr) { + auto data_type = TypeUtils::DataTypeToSerialString(input_desc->GetDataType()); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, "input_desc_dtype:" + std::to_string(i), + &data_type); + auto dims = input_desc->GetShape().GetDims(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "input_desc_shape:" + std::to_string(i), + &dims); + auto dims_origin = input_desc->GetOriginShape().GetDims(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, + "input_desc_origin_shape:" + std::to_string(i), &dims_origin); + auto layout = TypeUtils::FormatToSerialString(input_desc->GetFormat()); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, "input_desc_layout:" + std::to_string(i), + &layout); + auto layout_origin = TypeUtils::FormatToSerialString(input_desc->GetOriginFormat()); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, + "input_desc_origin_layout:" + std::to_string(i), &layout_origin); + auto tensor_descriptor = input_desc->tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor != nullptr) { + auto size = tensor_descriptor->size(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, "input_desc_size:" + std::to_string(i), + &size); + auto weight_size = tensor_descriptor->weight_size(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, + "input_desc_weight_size:" + std::to_string(i), &weight_size); + auto reuse_input = tensor_descriptor->reuse_input(); + auto reuse_input_int = static_cast(reuse_input); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, + "input_desc_reuse_input:" + std::to_string(i), &reuse_input_int); + auto output_tensor = tensor_descriptor->output_tensor(); + auto output_tensor_int = static_cast(output_tensor); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, + "input_desc_output_tensor:" + std::to_string(i), &output_tensor_int); + auto device_type = tensor_descriptor->device_type(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, + "input_desc_device_type:" + std::to_string(i), &device_type); + auto input_tensor = tensor_descriptor->input_tensor(); + auto input_tensor_int = static_cast(input_tensor); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, + "input_desc_input_tensor:" + std::to_string(i), &input_tensor_int); + auto real_dim_cnt = tensor_descriptor->real_dim_cnt(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, + "input_desc_real_dim_cnt:" + std::to_string(i), &real_dim_cnt); + auto data_offset = tensor_descriptor->data_offset(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, + "input_desc_data_offset:" + std::to_string(i), &data_offset); + auto cmps_size = tensor_descriptor->cmps_size(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, "input_desc_cmps_size:" + std::to_string(i), + &cmps_size); + auto cmps_tab = tensor_descriptor->cmps_tab(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, + "input_desc_cmps_tab:" + std::to_string(i), &cmps_tab); + auto cmps_tab_offset = tensor_descriptor->cmps_tab_offset(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, + "input_desc_cmps_tab_offset:" + std::to_string(i), &cmps_tab_offset); + } + } + } + } + // Output describes + auto size_out = op_desc->GetOutputsSize(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, "output_desc_nums", &size_out); + if (size_out > 0) { + for (uint32_t i = 0; i < size_out; i++) { + auto output_desc = op_desc->GetOutputDescPtr(i); + if (output_desc != nullptr) { + auto data_type = TypeUtils::DataTypeToSerialString(output_desc->GetDataType()); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, "output_desc_dtype:" + std::to_string(i), + &data_type); + auto dims = output_desc->GetShape().GetDims(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "output_desc_shape:" + std::to_string(i), + &dims); + auto dims_origin = output_desc->GetOriginShape().GetDims(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, + "output_desc_origin_shape:" + std::to_string(i), &dims_origin); + auto layout = TypeUtils::FormatToSerialString(output_desc->GetFormat()); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, "output_desc_layout:" + std::to_string(i), + &layout); + auto layout_origin = TypeUtils::FormatToSerialString(output_desc->GetOriginFormat()); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, + "output_desc_origin_layout:" + std::to_string(i), &layout_origin); + auto tensor_descriptor = output_desc->tensor_descriptor_.GetProtoMsg(); + if (tensor_descriptor != nullptr) { + auto size = tensor_descriptor->size(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, "output_desc_size:" + std::to_string(i), + &size); + auto weight_size = tensor_descriptor->weight_size(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, + "output_desc_weight_size:" + std::to_string(i), &weight_size); + auto device_type = tensor_descriptor->device_type(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, + "output_desc_device_type:" + std::to_string(i), &device_type); + auto real_dim_cnt = tensor_descriptor->real_dim_cnt(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, + "output_desc_real_dim_cnt:" + std::to_string(i), &real_dim_cnt); + } + } + } + } +} + +void OnnxUtils::AddAttrProtoForAttrsFromOpDef(const ge::proto::OpDef *op_def, onnx::NodeProto *node_proto) { + GE_CHK_BOOL_EXEC(op_def != nullptr, return, "Opdef is nullptr"); + const auto &op_def_attr_map = op_def->attr(); + for (const auto &item : op_def_attr_map) { + auto attr_name = item.first; + auto attr_def = item.second; + auto attr_type = attr_def.value_case(); + if (attr_type == ge::proto::AttrDef::kT) { + const auto &tensor_def = attr_def.t(); + const auto &tensor_desc = tensor_def.desc(); + auto data_type = ge::proto::DataType_Name(tensor_desc.dtype()); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, attr_name + "_desc_dtype:", &data_type); + auto dims = tensor_desc.shape().dim(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, attr_name + "_desc_shape:", dims); + auto layout = tensor_desc.layout(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, attr_name + "_desc_layout:", &layout); + auto device_type = tensor_desc.device_type(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, + attr_name + "_desc_device_type:", &device_type); + if (kDumpLevel == DUMP_ALL) { + auto data = tensor_def.data(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, attr_name + "_data", &data); + } + } + if (attr_type == ge::proto::AttrDef::kS) { + if (kDumpLevel == DUMP_ALL) { + auto str_value = attr_def.s(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRING, attr_name, &str_value); + } + } + if (attr_type == ge::proto::AttrDef::kI) { + auto int_value = attr_def.i(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, attr_name, &int_value); + } + if (attr_type == ge::proto::AttrDef::kF) { + auto float_value = attr_def.f(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_FLOAT, attr_name, &float_value); + } + if (attr_type == ge::proto::AttrDef::kB) { + auto int_value = static_cast(attr_def.b()); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, attr_name, &int_value); + } + if (attr_type == ge::proto::AttrDef::kList) { + const auto &list_value = attr_def.list(); + auto list_value_type = list_value.val_type(); + if (list_value_type == + ge::proto::AttrDef_ListValue_ListValueType::AttrDef_ListValue_ListValueType_VT_LIST_STRING) { + if (kDumpLevel == DUMP_ALL) { + const auto &strings = list_value.s(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRINGS, attr_name, strings); + } + } + if (list_value_type == + ge::proto::AttrDef_ListValue_ListValueType::AttrDef_ListValue_ListValueType_VT_LIST_FLOAT) { + const auto &floats = list_value.f(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_FLOATS, attr_name, floats); + } + if (list_value_type == ge::proto::AttrDef_ListValue_ListValueType::AttrDef_ListValue_ListValueType_VT_LIST_INT) { + const auto &ints = list_value.i(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, attr_name, ints); + } + if (list_value_type == ge::proto::AttrDef_ListValue_ListValueType::AttrDef_ListValue_ListValueType_VT_LIST_BOOL) { + const auto &bools = list_value.b(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, attr_name, bools); + } + } + } +} + +void OnnxUtils::AddAttrProtoFromNodeMembers(const NodePtr &node, onnx::NodeProto *node_proto) { + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "node is nullptr"); + return; + } + // 1.Attributes added from node's methods + auto send_list = node->send_event_id_list_; + if (!send_list.empty()) { + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "send_event_id_list", &send_list); + } + auto recv_list = node->recv_event_id_list_; + if (!recv_list.empty()) { + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "recv_event_id_list", &recv_list); + } + // 2.Attributes added from node's op_(message OpDef) + auto op_desc = node->op_; + if (op_desc != nullptr) { + // Input and out describes + AddAttrProtoForOpInAndOutDesc(node_proto, op_desc); + // Others + auto op_def = op_desc->op_def_.GetProtoMsg(); + if (op_def != nullptr) { + auto id = op_def->id(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, "id", &id); + auto stream_id = op_def->stream_id(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INT, "stream_id", &stream_id); + const auto &input_name = op_def->input_name(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRINGS, "input_name", input_name); + const auto &src_name = op_def->src_name(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRINGS, "src_name", src_name); + const auto &src_index = op_def->src_index(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "src_index", src_index); + const auto &dst_name = op_def->dst_name(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_STRINGS, "dst_name", dst_name); + const auto &dst_index = op_def->dst_index(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "dst_index", dst_index); + const auto &input_i = op_def->input_i(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "input_i", input_i); + const auto &output_i = op_def->output_i(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "output_i", output_i); + const auto &workspace = op_def->workspace(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "workspace", workspace); + const auto &workspace_bytes = op_def->workspace_bytes(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "workspace_bytes", workspace_bytes); + const auto &is_input_const = op_def->is_input_const(); + AddAttrProto(node_proto, onnx::AttributeProto_AttributeType_INTS, "is_input_const", is_input_const); + AddAttrProtoForAttrsFromOpDef(op_def, node_proto); + } + } +} + +bool OnnxUtils::EncodeNodeDesc(const NodePtr &node, onnx::NodeProto *node_proto) { + if ((node == nullptr) || (node_proto == nullptr)) { + GELOGE(GRAPH_FAILED, "EncodeOpDesc: Input Para Node Invalid"); + return false; + } + + // 2.Encode map attrs_ to AttributeProto + for (auto &node_attr : node->attrs_) { + AddAttrProtoFromAttribute(node_attr, node_proto); + } + // 3.Encode ge::Node members to AttributeProto + AddAttrProtoFromNodeMembers(node, node_proto); + return true; +} + +void OnnxUtils::EncodeNodeLinkForNetronVisual(const NodePtr &node, onnx::NodeProto *node_proto) { + if ((node == nullptr) || (node_proto == nullptr)) { + GELOGE(GRAPH_FAILED, "EncodeNodeLinkForNetronVisual: Input Para Node Invalid"); + return; + } + const auto &node_name = node->GetName(); + for (const auto &out_data_anchor : node->GetAllOutDataAnchors()) { + if ((out_data_anchor != nullptr) && (!out_data_anchor->GetPeerInDataAnchors().empty())) { + node_proto->add_output(node_name + ":" + std::to_string(out_data_anchor->GetIdx())); + } + } + auto out_control_anchor = node->GetOutControlAnchor(); + if ((out_control_anchor != nullptr) && (!out_control_anchor->GetPeerInControlAnchors().empty())) { + node_proto->add_output(node_name + kControlAnchorIndex); + } +} + +bool OnnxUtils::EncodeNodeLink(const NodePtr &node, onnx::NodeProto *node_proto) { + if ((node == nullptr) || (node_proto == nullptr)) { + GELOGE(GRAPH_FAILED, "EncodeNodeLink: Input Para Node Invalid"); + return false; + } + node_proto->clear_input(); + // 1. Add input by in data edge + for (const auto &in_data_anchor : node->GetAllInDataAnchors()) { + if (in_data_anchor != nullptr) { + auto peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + if ((peer_out_anchor != nullptr) && (peer_out_anchor->GetOwnerNode() != nullptr)) { + node_proto->add_input(peer_out_anchor->GetOwnerNode()->GetName() + ":" + + std::to_string(peer_out_anchor->GetIdx())); + } else { + // Add "" input + node_proto->add_input(""); + } + } + } + + // 2. Add input by in control edge + auto in_control_anchor = node->GetInControlAnchor(); + if (in_control_anchor != nullptr) { + auto peer_out_anchors = in_control_anchor->GetPeerOutControlAnchors(); + for (const auto &peer_out_anchor : peer_out_anchors) { + if (peer_out_anchor->GetOwnerNode()) { + node_proto->add_input(peer_out_anchor->GetOwnerNode()->GetName() + kControlAnchorIndex); + } + } + } + + // 3. Add output for Netron visual support + EncodeNodeLinkForNetronVisual(node, node_proto); + return true; +} + +bool OnnxUtils::EncodeNode(const NodePtr &node, onnx::NodeProto *node_proto) { + if ((node == nullptr) || (node_proto == nullptr)) { + GELOGE(GRAPH_FAILED, "EncodeNode: Input Para Node Invalid"); + return false; + } + // 1. Encode name and type + node_proto->set_name(node->GetName()); + /// Netron believes that some operators, such as the activation operator of softplus, only have one input, + /// while the link relation of control anchor may exist in ge, resulting in two inputs. Therefore, "ge:" prefix + /// is added to correctly display the link relation at the expense of some color features + node_proto->set_op_type("ge:" + node->GetType()); + + if (kDumpLevel != DUMP_WITH_OUT_DESC) { + // 2.for attr + if (!EncodeNodeDesc(node, node_proto)) { + GELOGE(GRAPH_FAILED, "Encode NodeDesc: %s failed", node->GetName().c_str()); + return false; + } + } + // 3.for link info + return EncodeNodeLink(node, node_proto); +} + +void OnnxUtils::EncodeTypeProtoTensorType(const NodePtr &node, onnx::TypeProto_Tensor *tensor_type) { + if ((node == nullptr) || (tensor_type == nullptr)) { + GELOGE(GRAPH_FAILED, "EncodeTypeProtoTensorType: Input Para Node or tensor_type Invalid"); + return; + } + const auto &op_desc = node->GetOpDesc(); + if (op_desc != nullptr) { + auto size_out = op_desc->GetOutputsSize(); + if (size_out > 0) { + for (uint32_t i = 0; i < size_out; i++) { + const ConstGeTensorDescPtr &ge_tensor = op_desc->GetOutputDescPtr(i); + if (ge_tensor != nullptr) { + auto ge_data_type = ge_tensor->GetDataType(); + auto onnx_data_type = EncodeDataType(ge_data_type); + tensor_type->set_elem_type(onnx_data_type); + onnx::TensorShapeProto *shape = tensor_type->mutable_shape(); + if (shape != nullptr) { + for (auto d : ge_tensor->GetShape().GetDims()) { + auto dim = shape->add_dim(); + dim->set_dim_value(d); + } + } + } + } + } + } else { + GELOGW("OpDesc Is Empty, nodeName %s nodeType %s", node->GetName().c_str(), node->GetType().c_str()); + return; + } +} + +void OnnxUtils::EncodeValueInfo(const NodePtr &node, onnx::ValueInfoProto *value_info_proto) { + if ((node == nullptr) || (value_info_proto == nullptr)) { + GELOGE(GRAPH_FAILED, "EncodeValueInfo: Input Para Node or value_info_proto Invalid"); + return; + } + value_info_proto->set_name(node->GetName()); + onnx::TypeProto *t = value_info_proto->mutable_type(); + onnx::TypeProto_Tensor *tensor_type = t->mutable_tensor_type(); + EncodeTypeProtoTensorType(node, tensor_type); +} + +bool OnnxUtils::EncodeGraph(const ConstComputeGraphPtr &graph, onnx::GraphProto *graph_proto) { + if ((graph == nullptr) || (graph_proto == nullptr)) { + GELOGE(GRAPH_FAILED, "EncodeGraph: Input para Invalid"); + return false; + } + graph_proto->set_name(graph->GetName()); + // 1. Add graph inputs + for (const auto &input : graph->GetInputNodes()) { + auto value_info_proto = graph_proto->add_input(); + EncodeValueInfo(input, value_info_proto); + } + // 2. Add graph outputs + for (const auto &output : graph->GetOutputNodes()) { + auto value_info_proto = graph_proto->add_output(); + EncodeValueInfo(output, value_info_proto); + } + // 3. Add nodes + for (const auto &node : graph->GetDirectNode()) { + if (!EncodeNode(node, graph_proto->add_node())) { + GELOGW("EncodeNode failed"); + continue; + } + } + return true; +} + +bool OnnxUtils::ConvertGeModelToModelProto(const ge::Model &model, onnx::ModelProto &model_proto) { + model_proto.set_model_version(model.GetVersion()); + model_proto.set_ir_version(onnx::IR_VERSION); + model_proto.set_producer_name(model.GetName()); + auto &graph = model.graph_; + auto compute_graph = GraphUtils::GetComputeGraph(graph); + if (compute_graph == nullptr) { + GELOGE(GRAPH_FAILED, "GetComputeGraph: return nullptr"); + return false; + } + auto graph_proto = model_proto.mutable_graph(); + if (graph_proto == nullptr) { + GELOGE(GRAPH_FAILED, "mutable_graph: %s return nullptr", compute_graph->GetName().c_str()); + return false; + } + if (!EncodeGraph(compute_graph, graph_proto)) { + GELOGE(GRAPH_FAILED, "EncodeGraph: %s fail", compute_graph->GetName().c_str()); + return false; + } + + // For subgraphs: a subgraph is represented by a node + for (const auto &sub_compute_graph : compute_graph->sub_graph_) { + if (sub_compute_graph != nullptr) { + auto node_proto = graph_proto->add_node(); + if (node_proto == nullptr) { + GELOGW("Node proto is nullptr"); + continue; + } + node_proto->set_name(sub_compute_graph->GetName()); + node_proto->set_op_type(kNodeTypeForSubgraph); + auto attr = node_proto->add_attribute(); + attr->set_name("graph"); + attr->set_type(onnx::AttributeProto_AttributeType_GRAPH); + auto sub_graph_proto = attr->mutable_g(); + if (!EncodeGraph(sub_compute_graph, sub_graph_proto)) { + GELOGW("Encode sub graph: %s fail", sub_compute_graph->GetName().c_str()); + continue; + } + } else { + GELOGW("Graph: %s subgraph is nullptr, skip EncodeGraph", compute_graph->GetName().c_str()); + continue; + } + } + return true; +} + +// Part 2: from ONNX Protobuf convert to IR +static std::map onnxDataTypeToGeMap = { + {onnx::TensorProto_DataType_INT64, DT_INT64}, {onnx::TensorProto_DataType_UINT64, DT_UINT64}, + {onnx::TensorProto_DataType_FLOAT, DT_FLOAT}, {onnx::TensorProto_DataType_INT32, DT_INT32}, + {onnx::TensorProto_DataType_UINT32, DT_UINT32}, {onnx::TensorProto_DataType_INT8, DT_INT8}, + {onnx::TensorProto_DataType_UINT8, DT_UINT8}, {onnx::TensorProto_DataType_INT16, DT_INT16}, + {onnx::TensorProto_DataType_UINT16, DT_UINT16}, {onnx::TensorProto_DataType_FLOAT16, DT_FLOAT16}, + {onnx::TensorProto_DataType_DOUBLE, DT_DOUBLE}, {onnx::TensorProto_DataType_BOOL, DT_BOOL}, +}; + +ge::DataType OnnxUtils::DecodeDataType(onnx::TensorProto_DataType data_type) { + auto it = onnxDataTypeToGeMap.find(data_type); + if (it != onnxDataTypeToGeMap.end()) { + return it->second; + } else { + GELOGW("DecodeDataType: datatype not support %u", data_type); + return ge::DT_UNDEFINED; + } +} + +bool OnnxUtils::ParseNameIndex(const std::string &node_name_index, std::string &node_name, int32_t &index) { + auto sep = node_name_index.rfind(':'); + if (sep == std::string::npos) { + return false; + } + node_name = node_name_index.substr(0, sep); + auto index_str = node_name_index.substr(sep + 1); + index = static_cast(std::strtol(index_str.c_str(), nullptr, 10)); + return true; +} + +bool OnnxUtils::DecodeNodeLinkImp(const NodeLinkInfo &item, NodePtr &node_ptr) { + if (node_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "DecodeNodeLinkImp: node_ptr is nullptr"); + return false; + } + // Data edge + if (item.src_out_index >= 0) { + auto src_anchor = node_ptr->GetOutDataAnchor(item.src_out_index); + auto dst_anchor = item.dst_node->GetInDataAnchor(item.dst_in_index); + if ((src_anchor == nullptr) || (dst_anchor == nullptr)) { + GELOGE(GRAPH_FAILED, "Get data anchor failed %s:%d, %s:%d ", item.src_node_name.c_str(), item.src_out_index, + item.dst_node_name.c_str(), item.dst_in_index); + return false; + } + if (src_anchor->LinkTo(dst_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Data Anchor: src_anchor->LinkTo(dst_anchor) failed"); + return false; + } + // Control edge + } else { + auto src_anchor = node_ptr->GetOutControlAnchor(); + auto dst_anchor = item.dst_node->GetInControlAnchor(); + if ((src_anchor == nullptr) || (dst_anchor == nullptr)) { + GELOGE(GRAPH_FAILED, "Get control anchor failed %s:%d, %s:%d ", item.src_node_name.c_str(), item.src_out_index, + item.dst_node_name.c_str(), item.dst_in_index); + return false; + } + if (src_anchor->LinkTo(dst_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Control Anchor: src_anchor->LinkTo(dst_anchor) failed"); + return false; + } + } + return true; +} + +bool OnnxUtils::DecodeNodeLink(const std::vector &node_proto_vector, + const std::map &node_map) { + for (const auto &node_proto : node_proto_vector) { + const auto &node_name = node_proto.name(); + auto dst_node = node_map.find(node_name); + if ((dst_node == node_map.end()) || (dst_node->second == nullptr)) { + GELOGE(GRAPH_FAILED, "destination node: %s find failed or is nullptr", node_name.c_str()); + return false; + } + int32_t dst_index = 0; + for (const auto &input : node_proto.input()) { + std::string input_node_name; + int32_t index = 0; + if (ParseNameIndex(input, input_node_name, index)) { + auto item = NodeLinkInfo{input_node_name, index, dst_node->second, dst_index, node_proto.name()}; + auto src_node = node_map.find(input_node_name); + if (src_node == node_map.end()) { + GELOGE(GRAPH_FAILED, "find src node: %s failed", input_node_name.c_str()); + return false; + } + auto node_ptr = src_node->second; + if (node_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "src node: %s is nullptr", input_node_name.c_str()); + return false; + } + if (!DecodeNodeLinkImp(item, node_ptr)) { + GELOGE(GRAPH_FAILED, "DecodeNodeLinkImp node: %s failed", input_node_name.c_str()); + return false; + } + } + if (index >= 0) { + dst_index++; + } + } + } + return true; +} + +void OnnxUtils::DecodeAttribute(const onnx::AttributeProto &attr_proto, std::vector &strings) { + if (attr_proto.type() != onnx::AttributeProto_AttributeType_STRINGS) { + GELOGE(GRAPH_FAILED, "Attribute %s call wrong decode attribute function", attr_proto.name().c_str()); + return; + } + for (int i = 0; i < attr_proto.strings_size(); i++) { + strings.push_back(attr_proto.strings(i)); + } +} + +void OnnxUtils::DecodeAttribute(const onnx::AttributeProto &attr_proto, std::string &value) { + if (attr_proto.type() != onnx::AttributeProto_AttributeType_STRING) { + GELOGE(GRAPH_FAILED, "Attribute %s call wrong decode attribute function", attr_proto.name().c_str()); + return; + } + value = attr_proto.s(); +} + +void OnnxUtils::DecodeAttribute(const onnx::AttributeProto &attr_proto, std::vector &ints) { + if (attr_proto.type() != onnx::AttributeProto_AttributeType_INTS) { + GELOGE(GRAPH_FAILED, "Attribute %s call wrong decode attribute function", attr_proto.name().c_str()); + return; + } + for (int i = 0; i < attr_proto.ints_size(); i++) { + ints.push_back(attr_proto.ints(i)); + } +} + +void OnnxUtils::DecodeAttribute(const onnx::AttributeProto &attr_proto, int64_t &value) { + if (attr_proto.type() != onnx::AttributeProto_AttributeType_INT) { + GELOGE(GRAPH_FAILED, "Attribute %s call wrong decode attribute function", attr_proto.name().c_str()); + return; + } + value = attr_proto.i(); +} + +void OnnxUtils::DecodeNodeAttributeForOpInAndOutDesc(const onnx::AttributeProto &attr_proto, + const std::string &attr_name_for_input_output_desc, int32_t index, + OpDescPtr &op_desc) { + if (op_desc == nullptr || op_desc->MutableInputDesc(static_cast(index)) == nullptr) { + GELOGE(GRAPH_FAILED, "op_desc or op_desc->MutableInputDesc(index) is nullptr"); + return; + } + if (attr_name_for_input_output_desc == "input_desc_dtype") { + auto data_type = TypeUtils::SerialStringToDataType(attr_proto.s()); + op_desc->MutableInputDesc(static_cast(index))->SetDataType(data_type); + } else if (attr_name_for_input_output_desc == "input_desc_shape") { + std::vector ints; + DecodeAttribute(attr_proto, ints); + GeShape ge_shape(ints); + op_desc->MutableInputDesc(static_cast(index))->SetShape(ge_shape); + } else if (attr_name_for_input_output_desc == "input_desc_layout") { + auto data_format = TypeUtils::SerialStringToFormat(attr_proto.s()); + op_desc->MutableInputDesc(static_cast(index))->SetFormat(data_format); + } else if (attr_name_for_input_output_desc == "input_desc_origin_shape") { + std::vector ints; + DecodeAttribute(attr_proto, ints); + GeShape ge_shape(ints); + op_desc->MutableInputDesc(static_cast(index))->SetOriginShape(ge_shape); + } else if (attr_name_for_input_output_desc == "input_desc_origin_layout") { + auto data_format = TypeUtils::SerialStringToFormat(attr_proto.s()); + op_desc->MutableInputDesc(static_cast(index))->SetOriginFormat(data_format); + } else if (attr_name_for_input_output_desc == "output_desc_dtype") { + auto data_type = TypeUtils::SerialStringToDataType(attr_proto.s()); + op_desc->MutableOutputDesc(static_cast(index))->SetDataType(data_type); + } else if (attr_name_for_input_output_desc == "output_desc_shape") { + std::vector ints; + DecodeAttribute(attr_proto, ints); + GeShape ge_shape(ints); + op_desc->MutableOutputDesc(static_cast(index))->SetShape(ge_shape); + } else if (attr_name_for_input_output_desc == "output_desc_layout") { + auto data_format = TypeUtils::SerialStringToFormat(attr_proto.s()); + op_desc->MutableOutputDesc(static_cast(index))->SetFormat(data_format); + } else if (attr_name_for_input_output_desc == "output_desc_origin_shape") { + std::vector ints; + DecodeAttribute(attr_proto, ints); + GeShape ge_shape(ints); + op_desc->MutableOutputDesc(static_cast(index))->SetOriginShape(ge_shape); + } else if (attr_name_for_input_output_desc == "output_desc_origin_layout") { + auto data_format = TypeUtils::SerialStringToFormat(attr_proto.s()); + op_desc->MutableOutputDesc(static_cast(index))->SetOriginFormat(data_format); + } else { + return; + } +} + +void OnnxUtils::DecodeNodeAttributeForOpDesc(const onnx::AttributeProto &attr_proto, OpDescPtr &op_desc) { + if (op_desc == nullptr) { + GELOGE(GRAPH_FAILED, "DecodeNodeAttributeForOpDesc: op_desc is nullptr"); + return; + } + const auto &attr_name = attr_proto.name(); + std::string attr_name_for_input_output_desc; + int32_t index = 0; + if (!ParseNameIndex(attr_name, attr_name_for_input_output_desc, index)) { + if (attr_name == "id") { + op_desc->SetId(attr_proto.i()); + } else if (attr_name == "stream_id") { + op_desc->SetStreamId(attr_proto.i()); + } else if (attr_name == "src_name") { + std::vector strings; + DecodeAttribute(attr_proto, strings); + op_desc->SetSrcName(strings); + } else if (attr_name == "dst_name") { + std::vector strings; + DecodeAttribute(attr_proto, strings); + op_desc->SetDstName(strings); + } else if (attr_name == "src_index") { + std::vector ints; + DecodeAttribute(attr_proto, ints); + op_desc->SetSrcIndex(ints); + } else if (attr_name == "dst_index") { + std::vector ints; + DecodeAttribute(attr_proto, ints); + op_desc->SetDstIndex(ints); + } else { + return; + } + // Update input and output desc + } else { + DecodeNodeAttributeForOpInAndOutDesc(attr_proto, attr_name_for_input_output_desc, index, op_desc); + } +} + +bool OnnxUtils::DecodeNodeDesc(const onnx::NodeProto *node_proto, OpDescPtr &op_desc) { + if (op_desc == nullptr || node_proto == nullptr) { + GELOGE(GRAPH_FAILED, " Op_desc is nullptr or node_proto is nullptr"); + return false; + } + // 1. Decode node_proto name and type + op_desc->SetName(node_proto->name()); + const auto &node_type_with_ge_prefix = node_proto->op_type(); + auto sep = node_type_with_ge_prefix.find(':'); + if (sep == std::string::npos) { + return false; + } + auto node_type = node_type_with_ge_prefix.substr(sep + 1); + op_desc->SetType(node_type); + // 2. Add empty input and output desc + for (const auto &attr : node_proto->attribute()) { + if (attr.name() == "input_desc_nums") { + auto size_in = attr.i(); + for (int64_t i = 0; i < size_in; i++) { + GeTensorDesc ge_tensor_desc; + if (op_desc->AddInputDesc(ge_tensor_desc) != GRAPH_SUCCESS) { + GELOGW("Add inputdesc failed"); + continue; + } + } + } + if (attr.name() == "output_desc_nums") { + auto size_out = attr.i(); + for (int64_t i = 0; i < size_out; i++) { + GeTensorDesc ge_tensor_desc; + if (op_desc->AddOutputDesc(ge_tensor_desc) != GRAPH_SUCCESS) { + GELOGW("add inputdesc failed"); + continue; + } + } + } + } + // 3.Decode node_proto attributes + for (int i = 0; i < node_proto->attribute_size(); i++) { + DecodeNodeAttributeForOpDesc(node_proto->attribute(i), op_desc); + } + return true; +} + +bool OnnxUtils::DecodeGraph(int recursion_depth, const onnx::GraphProto &graph_proto, ComputeGraphPtr &graph) { + if (recursion_depth > kMaxRecursionDepth) { + GELOGE(GRAPH_FAILED, "DecodeGraph: recursion depth is too large, abort"); + return false; + } + + graph = ComGraphMakeShared(graph_proto.name()); + if (graph == nullptr) { + GELOGE(GRAPH_FAILED, "ComputeGraph make shared failed"); + return false; + } + /// 1. Decode all nodes first, node should include input + /// and output nodes and nodes which represent sub graphs + std::map node_map; + std::vector node_proto_vector; + for (const auto &node_proto : graph_proto.node()) { + // a. nodes represent sub graphs + if (node_proto.op_type() == kNodeTypeForSubgraph) { + ComputeGraphPtr compute_graph; + // in this case, node only have one attr, whose type is AttributeProto_AttributeType_GRAPH + const auto &node_attr = node_proto.attribute(0); + if ((node_attr.type() == onnx::AttributeProto_AttributeType_GRAPH) && + DecodeGraph(recursion_depth + 1, node_attr.g(), compute_graph)) { + (void)graph->AddSubGraph(compute_graph); + } else { + GELOGE(GRAPH_FAILED, "Decode sub graph %s failed with node type:%d", node_proto.name().c_str(), + node_attr.type()); + return false; + } + // b. direct nodes in graph + } else { + node_proto_vector.push_back(node_proto); + OpDescPtr op_desc = ComGraphMakeShared(); + // b.1 For node desc + if (!DecodeNodeDesc(&node_proto, op_desc)) { + GELOGE(GRAPH_FAILED, "Decode node desc %s failed ", node_proto.name().c_str()); + return false; + } + auto node = graph->AddNode(op_desc); + node_map.insert(std::make_pair(node_proto.name(), node)); + } + } + /// We get all nodes in graph here + /// b.2 For node link + if (!DecodeNodeLink(node_proto_vector, node_map)) { + GELOGE(GRAPH_FAILED, "Decode node link failed"); + return false; + } + + // 2. Add inputs nodes for graph + for (const auto &input : graph_proto.input()) { + const auto &input_node_name = input.name(); + auto input_node_item = node_map.find(input_node_name); + if (input_node_item == node_map.end()) { + GELOGE(GRAPH_FAILED, "cannot find graph's input node %s in node_", input_node_name.c_str()); + return false; + } + auto ret = graph->AddInputNode(input_node_item->second); + GE_CHK_BOOL_EXEC(ret != nullptr, continue, "Add inputnode failed"); + } + // 3. Add outputs nodes for graph + for (const auto &output : graph_proto.output()) { + const auto &output_node_name = output.name(); + auto output_node_item = node_map.find(output_node_name); + if (output_node_item == node_map.end()) { + GELOGE(GRAPH_FAILED, "cannot find graph's output node %s in node_", output_node_name.c_str()); + return false; + } + auto ret = graph->AddOutputNode(output_node_item->second); + if (ret == nullptr) { + GELOGW("Add outputnode failed,out put node is %s", output_node_name.c_str()); + continue; + } + } + return true; +} + +bool OnnxUtils::ConvertModelProtoToGeModel(const onnx::ModelProto &model_proto, ge::Model &model) { + model.name_ = model_proto.producer_name(); + model.version_ = static_cast(model_proto.model_version()); + + auto &graph_proto = model_proto.graph(); + ComputeGraphPtr compute_graph; + // 0 means recursion depth, father call + if (!DecodeGraph(0, graph_proto, compute_graph)) { + GELOGE(GRAPH_FAILED, "Decode compute graph from graph_proto failed"); + return false; + } + model.graph_ = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + return true; +} +} // namespace ge diff --git a/src/common/graph/utils/ge_ir_utils.h b/src/common/graph/utils/ge_ir_utils.h new file mode 100644 index 00000000..d18500a0 --- /dev/null +++ b/src/common/graph/utils/ge_ir_utils.h @@ -0,0 +1,192 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COMMON_GRAPH_UTILS_GE_IR_UTILS_H_ +#define COMMON_GRAPH_UTILS_GE_IR_UTILS_H_ + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include + +#include "proto/ge_ir.pb.h" +#include "proto/onnx.pb.h" + +namespace ge { +const int kOffsetToString = 2; + +/// +/// @ingroup ge_ir_utils +/// @brief RepeatedField->String +/// @param [in] const rpd_field RepeatedField +/// @return String +/// +template +const std::string ToString(const google::protobuf::RepeatedField &rpd_field) { + std::stringstream ss; + ss << "["; + for (const T &x : rpd_field) { + ss << x; + ss << ", "; + } + std::string str_ret = ss.str().substr(0, ss.str().length() - kOffsetToString); + str_ret += "]"; + return str_ret; +} + +/// +/// @ingroup ge_ir_utils +/// @brief RepeatedPtrField->String +/// @param [in] const rpd_field RepeatedPtrField +/// @return String +/// +template +const std::string ToString(const google::protobuf::RepeatedPtrField &rpd_ptr_field) { + std::stringstream ss; + ss << "["; + for (const T &x : rpd_ptr_field) { + ss << x; + ss << ", "; + } + std::string str_ret = ss.str().substr(0, ss.str().length() - kOffsetToString); + str_ret += "]"; + return str_ret; +} + +/// +/// @ingroup ge_ir_utils +/// @brief check, if not equal, log with tag +/// @param [in] const left_value, right_value reference, log_info_tag +/// @return bool +/// +template +bool IsEqual(const T &l_value, const T &r_value, const std::string &log_info_tag) { + if (l_value == r_value) { + return true; + } else { + GELOGE(GRAPH_FAILED, "Check failed with %s", log_info_tag.c_str()); + return false; + } +} + +class OnnxUtils { + public: + enum DumpLevel { NO_DUMP = 0, DUMP_ALL = 1, DUMP_WITH_OUT_DATA = 2, DUMP_WITH_OUT_DESC = 3, DUMP_LEVEL_END }; + + static bool ConvertGeModelToModelProto(const ge::Model &model, onnx::ModelProto &model_proto); + + static bool ConvertModelProtoToGeModel(const onnx::ModelProto &model_proto, ge::Model &model); + + private: + // Part 1: from IR convert to ONNX Protobuf + static void AddAttrProto(onnx::NodeProto *node_proto, onnx::AttributeProto_AttributeType type, + const std::string &name, void *data); + + static void AddAttrProto(onnx::NodeProto *node_proto, onnx::AttributeProto_AttributeType type, + const std::string &name, ::google::protobuf::RepeatedField<::google::protobuf::int64> data); + + static void AddAttrProto(onnx::NodeProto *node_proto, onnx::AttributeProto_AttributeType type, + const std::string &name, ::google::protobuf::RepeatedField data); + + static void AddAttrProto(onnx::NodeProto *node_proto, onnx::AttributeProto_AttributeType type, + const std::string &name, ::google::protobuf::RepeatedField data); + + static void AddAttrProto(onnx::NodeProto *node_proto, onnx::AttributeProto_AttributeType type, + const std::string &name, ::google::protobuf::RepeatedPtrField<::std::string> data); + + static void AddAttrProtoFromNodeMembers(const NodePtr &node, onnx::NodeProto *node_proto); + + static void AddAttrProtoFromAttribute(const std::pair &string_attr_value, + onnx::NodeProto *node_proto); + + static void AddAttrProtoForOpInAndOutDesc(onnx::NodeProto *node_proto, const OpDescPtr &op_desc); + + static void AddAttrProtoForAttrsFromOpDef(const ge::proto::OpDef *op_def, onnx::NodeProto *node_proto); + + static onnx::TensorProto_DataType EncodeDataType(ge::DataType data_type); + + static void EncodeNodeLinkForNetronVisual(const NodePtr &node, onnx::NodeProto *node_proto); + + static bool EncodeNodeLink(const NodePtr &node, onnx::NodeProto *node_proto); + + static bool EncodeNodeDesc(const NodePtr &node, onnx::NodeProto *node_proto); + + static bool EncodeNode(const NodePtr &node, onnx::NodeProto *node_proto); + + static void EncodeTypeProtoTensorType(const NodePtr &node, onnx::TypeProto_Tensor *tensor_type); + + static void EncodeValueInfo(const NodePtr &n, onnx::ValueInfoProto *v); + + static bool EncodeGraph(const ConstComputeGraphPtr &graph, onnx::GraphProto *graph_proto); + + /// Part 2: from ONNX Protobuf convert to IR + /// Describes node's link relationships + struct NodeLinkInfo { + std::string src_node_name; + int32_t src_out_index; + NodePtr dst_node; + int32_t dst_in_index; + std::string dst_node_name; + }; + + // Parse node name and index + static bool ParseNameIndex(const std::string &node_name_index, std::string &node_name, int32_t &index); + + static ge::DataType DecodeDataType(onnx::TensorProto_DataType data_type); + + static void DecodeAttribute(const onnx::AttributeProto &attr_proto, std::vector &strings); + + static void DecodeAttribute(const onnx::AttributeProto &attr_proto, std::vector &ints); + + static void DecodeAttribute(const onnx::AttributeProto &attr_proto, int64_t &value); + + static void DecodeAttribute(const onnx::AttributeProto &attr_proto, std::string &value); + + static void DecodeNodeAttributeForOpInAndOutDesc(const onnx::AttributeProto &attr_proto, + const std::string &attr_name_for_input_output_desc, int32_t index, + OpDescPtr &op_desc); + + static void DecodeNodeAttributeForOpDesc(const onnx::AttributeProto &attr_proto, OpDescPtr &op_desc); + + static bool DecodeNodeLinkImp(const NodeLinkInfo &item, NodePtr &node_ptr); + + static bool DecodeNodeLink(const std::vector &node_proto_vector, + const std::map &node_map); + + static bool DecodeNodeDesc(const onnx::NodeProto *node_proto, OpDescPtr &node); + + static bool DecodeGraph(int recursion_depth, const onnx::GraphProto &graph_proto, ComputeGraphPtr &graph); +}; +} // namespace ge + +#endif // COMMON_GRAPH_UTILS_GE_IR_UTILS_H_ diff --git a/src/common/graph/utils/graph_utils.cc b/src/common/graph/utils/graph_utils.cc new file mode 100644 index 00000000..4852ba2e --- /dev/null +++ b/src/common/graph/utils/graph_utils.cc @@ -0,0 +1,1178 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "utils/graph_utils.h" + +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "./ge_context.h" +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "proto/ge_ir.pb.h" +#include "utils/attr_utils.h" +#include "utils/ge_ir_utils.h" +#include "utils/node_utils.h" + +using google::protobuf::io::FileOutputStream; + +namespace ge { +enum DumpGraphLevel { + kDumpLevel1 = 1, + kDumpLevel2 = 2, + kDumpLevel3 = 3, + kDumpLevelOther, +}; + +namespace { +const int32_t kBaseOfIntegerValue = 10; +#ifdef FMK_SUPPORT_DUMP +const char *const kDumpGeGraph = "DUMP_GE_GRAPH"; +#endif +const char *const kDumpGraphLevel = "DUMP_GRAPH_LEVEL"; +const char *const kDumpStrBuild = "Build"; +const char *const kDumpStrPartition = "partition"; +const char *const kDumpStrOptimizeSubgraph = "OptimizeSubGraph"; +const char *const kDumpStrAicpu = "Aicpu"; +}; // namespace + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::AddEdge(const OutDataAnchorPtr &src, + const InDataAnchorPtr &dst) { + if ((src != nullptr) && (src->LinkTo(dst) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Add edge Failed."); + return GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::AddEdge(const AnchorPtr &src, + const AnchorPtr &dst) { + OutDataAnchorPtr src_data = Anchor::DynamicAnchorCast(src); + InDataAnchorPtr dst_data = Anchor::DynamicAnchorCast(dst); + OutControlAnchorPtr src_control = Anchor::DynamicAnchorCast(src); + InControlAnchorPtr dst_control = Anchor::DynamicAnchorCast(dst); + if ((src_data != nullptr) && (dst_data != nullptr) && (src_data->LinkTo(dst_data) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + if ((src_data != nullptr) && (dst_control != nullptr) && (src_data->LinkTo(dst_control) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + if ((src_control != nullptr) && (dst_control != nullptr) && (src_control->LinkTo(dst_control) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + if ((src_control != nullptr) && (dst_data != nullptr) && (src_control->LinkTo(dst_data) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Add edge Failed."); + return GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::AddEdge(const OutDataAnchorPtr &src, + const Format &src_format, + const InDataAnchorPtr &dst, + const Format &dst_format) { + if ((src != nullptr) && (src->LinkTo(dst) == GRAPH_SUCCESS)) { + (void)AnchorUtils::SetFormat(src, src_format); + (void)AnchorUtils::SetFormat(dst, dst_format); + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Add edge Failed."); + return GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::AddEdge(const OutControlAnchorPtr &src, + const InControlAnchorPtr &dst) { + if ((src != nullptr) && (src->LinkTo(dst) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Add edge Failed."); + return GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::AddEdge(const OutDataAnchorPtr &src, + const InControlAnchorPtr &dst) { + if ((src != nullptr) && (src->LinkTo(dst) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Add edge Failed."); + return GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::RemoveEdge(const OutDataAnchorPtr &src, + const InDataAnchorPtr &dst) { + if ((src != nullptr) && (src->Unlink(dst) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Remove edge Failed."); + return GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::RemoveEdge(const AnchorPtr &src, + const AnchorPtr &dst) { + if ((src != nullptr) && (src->Unlink(dst) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Remove edge Failed."); + return GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::RemoveEdge(const OutControlAnchorPtr &src, + const InControlAnchorPtr &dst) { + if ((src != nullptr) && (src->Unlink(dst) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Remove edge Failed."); + return GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::RemoveEdge(const OutDataAnchorPtr &src, + const InControlAnchorPtr &dst) { + if ((src != nullptr) && (src->Unlink(dst) == GRAPH_SUCCESS)) { + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Remove edge Failed."); + return GRAPH_FAILED; +} + +graphStatus GraphUtils::ReplaceEdgeDst(const OutDataAnchorPtr &src, const InDataAnchorPtr &dst, + const InDataAnchorPtr &new_dst) { + if (RemoveEdge(src, dst) == GRAPH_SUCCESS && AddEdge(src, new_dst) == GRAPH_SUCCESS) { + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Replace edge dst Failed."); + return GRAPH_FAILED; +} + +graphStatus GraphUtils::ReplaceEdgeDst(const OutControlAnchorPtr &src, const InControlAnchorPtr &dst, + const InControlAnchorPtr &new_dst) { + if (RemoveEdge(src, dst) == GRAPH_SUCCESS && AddEdge(src, new_dst) == GRAPH_SUCCESS) { + return GRAPH_SUCCESS; + } + GELOGE(GRAPH_FAILED, "Replace edge dst Failed."); + return GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::InsertNodeBetweenDataAnchors( + const OutDataAnchorPtr &src, const InDataAnchorPtr &dst, const NodePtr &new_node) { + GE_CHECK_NOTNULL(src); + GE_CHECK_NOTNULL(dst); + GE_CHECK_NOTNULL(new_node); + + InDataAnchorPtr node_in_anchor = new_node->GetInDataAnchor(0); + GE_CHK_BOOL_RET_STATUS(node_in_anchor != nullptr, GRAPH_FAILED, "this node has not inDataAnchor"); + OutDataAnchorPtr node_out_anchor = new_node->GetOutDataAnchor(0); + GE_CHK_BOOL_RET_STATUS(node_out_anchor != nullptr, GRAPH_FAILED, "this node has not outDataAnchor"); + GE_CHK_STATUS_RET(src->ReplacePeer(dst, node_in_anchor, node_out_anchor), "ReplacePeer Failed"); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +GraphUtils::RemoveNodeWithoutRelink(const ComputeGraphPtr &compute_graph, const NodePtr &node) { + GE_CHECK_NOTNULL(compute_graph); + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "The node ptr should not be null."); + return GRAPH_FAILED; + } + + // If the node save as input node, delete it + (void)compute_graph->RemoveInputNode(node); + + // If the node save as output node, delete it + (void)compute_graph->RemoveOutputNode(node); + + auto iter = find(compute_graph->nodes_.begin(), compute_graph->nodes_.end(), node); + if (iter != compute_graph->nodes_.end()) { + compute_graph->nodes_.erase(iter); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} + +/// Add two edges to the new node, respectively connecting the SRC and DST +/// associated with the original edge +/// A ---> B transferred to A ---> N ---> B +graphStatus InsertTransNode(ComputeGraph &compute_graph, const InDataAnchorPtr &in_data_anchor, + const std::vector &vec_op_desc) { + for (auto &op_desc : vec_op_desc) { + GE_CHECK_NOTNULL(op_desc); + + auto ret = op_desc->AddInputDesc(GeTensorDesc()); + GE_CHK_BOOL_EXEC(ret == GRAPH_SUCCESS, return GRAPH_FAILED, "Add input desc failed"); + ret = op_desc->AddOutputDesc(GeTensorDesc()); + GE_CHK_BOOL_EXEC(ret == GRAPH_SUCCESS, return GRAPH_FAILED, "Add input desc failed"); + auto node_to_insert = compute_graph.AddNode(op_desc); + + GE_CHECK_NOTNULL(node_to_insert); + GE_CHECK_NOTNULL(in_data_anchor->GetPeerOutAnchor()); + + auto src = in_data_anchor->GetPeerOutAnchor()->GetOwnerNode(); + if (!src) { + GELOGE(GRAPH_FAILED, "src nullptr error."); + return GRAPH_FAILED; + } + + auto src_out_index = in_data_anchor->GetPeerOutAnchor()->GetIdx(); + + auto dst = in_data_anchor->GetOwnerNode(); + if (!dst) { + GELOGE(GRAPH_FAILED, "dst nullptr error."); + return GRAPH_FAILED; + } + + auto dst_in_index = in_data_anchor->GetIdx(); + + auto in_data_anchor_src_format = AnchorUtils::GetFormat(in_data_anchor->GetPeerOutAnchor()); + auto in_data_anchor_dst_format = AnchorUtils::GetFormat(in_data_anchor); + + GE_CHECK_NOTNULL(src->GetOutDataAnchor(src_out_index)); + GE_CHECK_NOTNULL(dst->GetInDataAnchor(dst_in_index)); + + ret = GraphUtils::RemoveEdge(src->GetOutDataAnchor(src_out_index), dst->GetInDataAnchor(dst_in_index)); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Remove edge failed"); + return GRAPH_FAILED; + } + + GE_CHECK_NOTNULL(node_to_insert->GetInDataAnchor(0)); + GE_CHECK_NOTNULL(node_to_insert->GetOutDataAnchor(0)); + + ret = GraphUtils::AddEdge(src->GetOutDataAnchor(src_out_index), node_to_insert->GetInDataAnchor(0)); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Add edge failed"); + return ret; + } + ret = GraphUtils::AddEdge(node_to_insert->GetOutDataAnchor(0), dst->GetInDataAnchor(dst_in_index)); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Add edge failed"); + return ret; + } + + if (op_desc->HasAttr("input_format")) { + int64_t input_format = 0; + int64_t output_format = 0; + if (!AttrUtils::GetInt(op_desc, "input_format", input_format)) { + GELOGW("get attr input_format failed"); + } + if (!AttrUtils::GetInt(op_desc, "output_format", output_format)) { + GELOGW("get attr output_format failed"); + } + + GE_CHECK_NOTNULL(node_to_insert->GetInDataAnchor(0)->GetPeerOutAnchor()); + GE_CHK_BOOL_RET_STATUS(node_to_insert->GetOutDataAnchor(0)->GetPeerInDataAnchors().empty(), GRAPH_FAILED, + "Vistor is empty"); + GE_CHECK_NOTNULL(node_to_insert->GetOutDataAnchor(0)->GetPeerInDataAnchors().at(0)); + + (void)AnchorUtils::SetFormat(node_to_insert->GetInDataAnchor(0)->GetPeerOutAnchor(), in_data_anchor_src_format); + (void)AnchorUtils::SetFormat(node_to_insert->GetInDataAnchor(0), (Format)input_format); + (void)AnchorUtils::SetFormat(node_to_insert->GetOutDataAnchor(0), (Format)output_format); + (void)AnchorUtils::SetFormat(node_to_insert->GetOutDataAnchor(0)->GetPeerInDataAnchors().at(0), + in_data_anchor_dst_format); + } + std::vector original_nodes; + GraphUtils::RecordOriginalNames(original_nodes, node_to_insert); + } + + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::InsertTransNode( + ComputeGraphPtr compute_graph, const InDataAnchorPtr &in_data_anchor, const std::vector &vec_op_desc) { + GE_CHECK_NOTNULL(compute_graph); + GE_CHECK_NOTNULL(in_data_anchor); + graphStatus ret = + ge::InsertTransNode(*compute_graph, in_data_anchor, vec_op_desc) == GRAPH_SUCCESS ? GRAPH_SUCCESS : GRAPH_FAILED; + return ret; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::RemoveJustNode(ComputeGraph &compute_graph, + const NodePtr &node) { + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "The node ptr should be not null."); + return GRAPH_FAILED; + } + auto iter = find(compute_graph.nodes_.begin(), compute_graph.nodes_.end(), node); + if (iter != compute_graph.nodes_.end()) { + compute_graph.nodes_.erase(iter); + return GRAPH_SUCCESS; + } + return GRAPH_FAILED; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::RemoveJustNode(ComputeGraphPtr compute_graph, + const NodePtr &node) { + GE_CHECK_NOTNULL(compute_graph); + GE_CHECK_NOTNULL(node); + graphStatus ret = (RemoveJustNode(*compute_graph, node) == GRAPH_SUCCESS ? GRAPH_SUCCESS : GRAPH_FAILED); + return ret; +} + +void GraphUtils::RecordOriginalNames(std::vector original_nodes, const ge::NodePtr &node) { + GE_CHK_BOOL_EXEC(node != nullptr, return, "node is null."); + std::vector original_names; + for (const auto &node_tmp : original_nodes) { + std::vector names_tmp; + ge::OpDescPtr opdesc_tmp = node_tmp->GetOpDesc(); + (void)ge::AttrUtils::GetListStr(opdesc_tmp, "original_op_names", names_tmp); + if (names_tmp.size() != 0) { + original_names.insert(original_names.end(), names_tmp.begin(), names_tmp.end()); + } else { + original_names.push_back(opdesc_tmp->GetName()); + } + } + if (original_names.size() == 0) { + std::string tmp; + original_names.push_back(tmp); + } + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetListStr(node->GetOpDesc(), "original_op_names", original_names), return, + "Set original_op_names fail."); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void GraphUtils::RecordOriginalNames(std::vector names_tmp, + const ge::NodePtr &node) { + GE_CHK_BOOL_EXEC(node != nullptr, return, "node is null."); + std::vector original_names; + if (names_tmp.size() != 0) { + original_names.insert(original_names.end(), names_tmp.begin(), names_tmp.end()); + } else { + std::string tmp; + original_names.push_back(tmp); + } + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetListStr(node->GetOpDesc(), "original_op_names", original_names), return, + "Set original_op_names fail."); +} + +// Check global_step Node has IsVariable and Read. +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool GraphUtils::CheckGlobalStepNode(const ge::NodePtr &node) { + GE_CHK_BOOL_EXEC( + node != nullptr, { return false; }, "node is null."); + bool has_variable = false; + bool has_cond_read = false; + for (const auto &out : node->GetOutDataNodes()) { + if ((out->GetType() == "VarIsInitializedOp") && (out->GetName() == "global_step/IsVariableInitialized")) { + has_variable = true; + } else if ((out->GetType() == "FrameworkOp") && (out->GetName() == "global_step/cond/read/Switch")) { + has_cond_read = true; + } + } + return (has_variable && has_cond_read); +} + +// Check origin ComputeGraph is TrainGraph. +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool GraphUtils::CheckIsTrainGraph( + const ge::ComputeGraphPtr &compute_graph) { + GE_CHK_BOOL_EXEC( + compute_graph != nullptr, { return false; }, "compute_graph is nullptr"); + + bool is_iterator_v2 = false; + bool is_train_graph = false; + for (const auto &node : compute_graph->GetDirectNode()) { + if (node->GetType() == "ApplyMomentum") { + return true; + } + // Check global_step has IsVariable and Read. + if ((node->GetType() == "Variable") && (node->GetName() == "global_step")) { + is_train_graph = CheckGlobalStepNode(node); + } else if ((node->GetType() == "FrameworkOp") && (node->GetName() == "IteratorGetNext")) { + // Train Graph must has GetNext. + is_iterator_v2 = true; + } + if (is_iterator_v2 && is_train_graph) { + break; + } + } + GELOGI("Generate: compute_graph is_iterator_v2[%d], is_train_graph[%d].", is_iterator_v2, is_train_graph); + return (is_iterator_v2 && is_train_graph); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool GraphUtils::MatchDumpStr(const std::string &suffix) { + char *dump_level = std::getenv(kDumpGraphLevel); + int64_t dump_graph_level = + (dump_level != nullptr) ? std::strtol(dump_level, nullptr, kBaseOfIntegerValue) : kDumpLevel2; + if (dump_graph_level == kDumpLevel1) { + return false; + } + + if (dump_graph_level == kDumpLevel2 && ((suffix.find(kDumpStrPartition) != std::string::npos) || + (suffix.find(kDumpStrOptimizeSubgraph) != std::string::npos) || + (suffix.find(kDumpStrAicpu) != std::string::npos))) { + return true; + } + + if (dump_graph_level == kDumpLevel3 && suffix.compare(kDumpStrBuild) != 0) { + return true; + } + + return false; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void GraphUtils::DumpGEGraph(const ge::ComputeGraphPtr &graph, + const std::string &suffix, + bool is_always_dump) { +#ifdef FMK_SUPPORT_DUMP + char *dump_ge_graph = std::getenv(kDumpGeGraph); + GE_IF_BOOL_EXEC(dump_ge_graph == nullptr && !is_always_dump, return;); + + // dump the graph according to different graph level + if (GraphUtils::MatchDumpStr(suffix)) { + return; + } + + // file name + static int file_idx = 0; + const int dump_graph_index_width = 5; + file_idx++; + GELOGD("Start to dump om txt: %d", file_idx); + + static int max_dumpfile_num = 0; + if (max_dumpfile_num == 0) { + string opt = "0"; + (void)GetContext().GetOption("ge.maxDumpFileNum", opt); + max_dumpfile_num = std::strtol(opt.c_str(), nullptr, kBaseOfIntegerValue); + } + if (max_dumpfile_num != 0 && file_idx > max_dumpfile_num) { + GELOGW("dump graph file cnt > maxDumpFileNum, maxDumpFileCnt=%d.", max_dumpfile_num); + return; + } + + std::stringstream stream_file_name; + stream_file_name << "ge_proto_" << std::setw(dump_graph_index_width) << std::setfill('0') << file_idx; + stream_file_name << "_" << suffix << ".txt"; + std::string proto_file = stream_file_name.str(); + + // Create buffer + ge::Model model("", ""); + model.SetGraph(GraphUtils::CreateGraphFromComputeGraph(std::const_pointer_cast(graph))); + Buffer buffer; + model.Save(buffer); + + // Write file + ge::proto::ModelDef ge_proto; + if (buffer.GetData() != nullptr) { + std::string str(reinterpret_cast(buffer.GetData()), buffer.GetSize()); + if (!ge_proto.ParseFromString(str)) { + GELOGE(GRAPH_FAILED, "parse from string failed."); + return; + } + char real_path[PATH_MAX] = {0x00}; + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(strlen(proto_file.c_str()) >= PATH_MAX, return, "file path is too longer!"); + GE_IF_BOOL_EXEC(realpath(proto_file.c_str(), real_path) == nullptr, + GELOGI("file %s does not exist, it will be created.", proto_file.c_str())); + + GraphUtils::WriteProtoToTextFile(ge_proto, real_path); + } +#else + GELOGW("need to define FMK_SUPPORT_DUMP for dump graph."); +#endif +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool GraphUtils::LoadGEGraph(const char *file, + ge::ComputeGraph &compute_graph) { + ge::proto::ModelDef model_def; + // Get ModelDef object from file generated by DumpGEGraph() + if (!ReadProtoFromTextFile(file, &model_def)) { + GELOGE(GRAPH_FAILED, "Get ModelDef failed from file"); + return false; + } + ge::Model model; + // Get Model object from ModelDef by deserialize ModelDef + if (model.Load(model_def) == GRAPH_SUCCESS) { + compute_graph = *(GraphUtils::GetComputeGraph(model.GetGraph())); + return true; + } else { + GELOGE(GRAPH_FAILED, "Get Model failed from ModelDef"); + return false; + } +} + +// Printing protocol messages in text format is useful for debugging and human editing of messages. +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void GraphUtils::WriteProtoToTextFile( + const google::protobuf::Message &proto, const char *real_path) { +#ifdef FMK_SUPPORT_DUMP + const int FILE_AUTHORITY = 0600; + int fd = open(real_path, O_WRONLY | O_CREAT | O_TRUNC, FILE_AUTHORITY); + if (fd < 0) { + GELOGE(GRAPH_FAILED, "fail to open the file: %s", real_path); + return; + } + google::protobuf::io::FileOutputStream *output = new (std::nothrow) FileOutputStream(fd); + if (output == nullptr) { + GELOGE(GRAPH_FAILED, "Output is nullptr"); + if (close(fd) != 0) { + GELOGE(GRAPH_FAILED, "Close fileoutputstream failed"); + } + return; + } + bool ret = google::protobuf::TextFormat::Print(proto, output); + if (!ret) { + GELOGE(GRAPH_FAILED, "Fail to write the file: %s", real_path); + delete output; + output = nullptr; + GE_CHK_BOOL_EXEC(close(fd) == 0, return, "Close fileoutputstream failed"); + return; + } + delete output; + output = nullptr; + GE_CHK_BOOL_EXEC(close(fd) == 0, return, "Close fileoutputstream failed"); + + FILE *file = fopen(real_path, "rb"); + if (file == nullptr) { + return; + } + if (fseek(file, 0L, SEEK_END) == 0) { + int64_t fileSize = ftell(file); + static int64_t maxDumpFileSize = 0; + if (maxDumpFileSize == 0) { + string opt = "0"; + (void)GetContext().GetOption("ge.maxDumpFileSize", opt); + maxDumpFileSize = atol(opt.c_str()); + } + if (maxDumpFileSize != 0 && fileSize != -1 && fileSize > maxDumpFileSize) { + GELOGW("dump graph file size > maxDumpFileSize, maxDumpFileSize=%ld.", maxDumpFileSize); + GE_IF_BOOL_EXEC(std::remove(real_path) != 0, GELOGW("remove %s failed", real_path)); + GE_CHK_BOOL_EXEC(fclose(file) == 0, return, "Fclose %s failed", real_path); + return; + } + } + GE_CHK_BOOL_EXEC(fclose(file) == 0, return, "Fclose fileoutputstream failed"); +#else + GELOGW("need to define FMK_SUPPORT_DUMP for dump graph."); +#endif +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool GraphUtils::ReadProtoFromTextFile( + const char *file, google::protobuf::Message *proto) { + if (file == nullptr || proto == nullptr) { + GELOGE(GRAPH_FAILED, "incorrect parameter. file path or message is invalid"); + return false; + } + std::ifstream fs(file, std::ifstream::in); + if (!fs.is_open()) { + GELOGE(GRAPH_FAILED, "proto file '%s' open fail.", file); + return false; + } + google::protobuf::io::IstreamInputStream input(&fs); + bool ret = google::protobuf::TextFormat::Parse(&input, proto); + if (!ret) { + GELOGE(GRAPH_FAILED, "parse proto from text ret fail, please check your text file '%s'.", file); + } + fs.close(); + return ret; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY void GraphUtils::DumpGEGraphToOnnx(const ge::ComputeGraph &compute_graph, + const std::string &suffix) { +#ifdef FMK_SUPPORT_DUMP + char *dump_ge_graph = std::getenv(kDumpGeGraph); + int64_t dump_ge_graph_level = + (dump_ge_graph != nullptr) ? std::strtol(dump_ge_graph, nullptr, kBaseOfIntegerValue) : OnnxUtils::NO_DUMP; + if ((dump_ge_graph_level == OnnxUtils::NO_DUMP) || (dump_ge_graph_level >= OnnxUtils::DUMP_LEVEL_END)) { + GELOGD("Skip DumpGEGraphToOnnx with dump_ge_graph_level %ld.", dump_ge_graph_level); + return; + } + + // dump the graph according to different graph level + if (GraphUtils::MatchDumpStr(suffix)) { + return; + } + + // 1.Get onnx::ModelProto from ge::Model + ge::Model model("GE", ""); + std::shared_ptr compute_graph_ptr = ComGraphMakeShared(compute_graph); + model.SetGraph(GraphUtils::CreateGraphFromComputeGraph(std::const_pointer_cast(compute_graph_ptr))); + onnx::ModelProto model_proto; + if (!OnnxUtils::ConvertGeModelToModelProto(model, model_proto)) { + GELOGE(GRAPH_FAILED, "DumpGEGraphToOnnx failed."); + return; + } + + // 2.Set file name + static int file_index = 0; + file_index++; + GELOGD("Start to dump ge onnx file: %d", file_index); + + static int max_dumpfile_num = 0; + if (max_dumpfile_num == 0) { + string opt = "0"; + (void)GetContext().GetOption("ge.maxDumpFileNum", opt); + max_dumpfile_num = std::strtol(opt.c_str(), nullptr, kBaseOfIntegerValue); + } + if (max_dumpfile_num != 0 && file_index > max_dumpfile_num) { + GELOGW("dump graph file cnt > maxDumpFileNum, maxDumpFileNum=%d.", max_dumpfile_num); + return; + } + + /// 99999 graphs can be dumped at most at one time + /// setw(5) is for formatted sort + std::stringstream stream_file_name; + stream_file_name << "ge_onnx_" << std::setw(5) << std::setfill('0') << file_index; + stream_file_name << "_" << suffix << ".pbtxt"; + std::string proto_file = stream_file_name.str(); + if ((proto_file.length()) >= NAME_MAX) { + GELOGE(GRAPH_FAILED, "File name is too longer!"); + return; + } + std::unique_ptr real_path(new (std::nothrow) char[PATH_MAX]{0}); + if (real_path == nullptr) { + GELOGE(GRAPH_FAILED, "New real_path failed."); + return; + } + /// Returning nullptr means 3 case as follows: + /// a.path is PATH_MAX chars or more + /// b.the file does not exist + /// c.the path has no permissions + /// Distinguish between last the two cases in the function WriteProtoToTextFile call open() + if (realpath(proto_file.c_str(), real_path.get()) == nullptr) { + // For case a + if (errno == ENAMETOOLONG) { + GELOGE(GRAPH_FAILED, "Call realpath failed: path is PATH_MAX chars or more."); + return; + } + } + + // 3. Serialize to file in current path + GraphUtils::WriteProtoToTextFile(model_proto, real_path.get()); +#else + GELOGW("need to define FMK_SUPPORT_DUMP for dump graph."); +#endif +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool GraphUtils::LoadGEGraphFromOnnx(const char *file, + ge::ComputeGraph &compute_graph) { + if (file == nullptr) { + GELOGE(GRAPH_FAILED, "incorrect parameter. file path is invalid"); + return false; + } + onnx::ModelProto model_proto; + // 1. Get ModelDef object from file generated by DumpGEGraphToOnnx() + if (!ReadProtoFromTextFile(file, &model_proto)) { + GELOGE(GRAPH_FAILED, "Get ModelDef from file failed"); + return false; + } + // 2.Convert onnx::ModelProto To ge::Model + ge::Model model; + if (!OnnxUtils::ConvertModelProtoToGeModel(model_proto, model)) { + GELOGE(GRAPH_FAILED, "Convert ModelDef to Model failed"); + return false; + } + auto compute_graph_ptr = GraphUtils::GetComputeGraph(model.GetGraph()); + if (compute_graph_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "Get compute graph from Model failed"); + return false; + } + compute_graph = *(compute_graph_ptr); + return true; +} + +namespace { +using InNodesToOut = std::unordered_map>; + +inline std::string GetNodeNameByAnchor(const Anchor *anchor) { + if (anchor == nullptr) { + GELOGE(GRAPH_FAILED, "Anchor is nullptr"); + return "Null"; + } + auto node = anchor->GetOwnerNode(); + return node == nullptr ? "Null" : node->GetName(); +} + +graphStatus ReplaceOutDataAnchor(const OutDataAnchorPtr &new_anchor, const OutDataAnchorPtr &old_anchor, + InNodesToOut *in_nodes_to_out = nullptr) { + if (new_anchor == nullptr || old_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "new_anchor or old_anchor is nullptr"); + return GRAPH_PARAM_INVALID; + } + auto new_node = new_anchor->GetOwnerNode(); + for (const auto &peer_in_anchor : old_anchor->GetPeerInDataAnchors()) { + auto ret = peer_in_anchor->Unlink(old_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Failed to unlink old anchor link from %s(%d) to %s(%d)", + GetNodeNameByAnchor(old_anchor.get()).c_str(), old_anchor->GetIdx(), + GetNodeNameByAnchor(peer_in_anchor.get()).c_str(), peer_in_anchor->GetIdx()); + return GRAPH_FAILED; + } + ret = peer_in_anchor->LinkFrom(new_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Failed to relink new anchors from %s(%d) to %s(%d)", + GetNodeNameByAnchor(new_anchor.get()).c_str(), new_anchor->GetIdx(), + GetNodeNameByAnchor(peer_in_anchor.get()).c_str(), peer_in_anchor->GetIdx()); + return GRAPH_FAILED; + } + + if (in_nodes_to_out != nullptr) { + (*in_nodes_to_out)[new_node].insert(peer_in_anchor->GetOwnerNode()); + } + } + return GRAPH_SUCCESS; +} + +graphStatus RelinkDataIO(const NodePtr &node, const std::vector &io_map, InNodesToOut &in_nodes_to_out) { + GE_CHECK_NOTNULL(node); + auto in_data_anchors = node->GetAllInDataAnchors(); + auto out_data_anchors = node->GetAllOutDataAnchors(); + if (out_data_anchors.size() < io_map.size()) { + GELOGE(GRAPH_FAILED, "The io_map specified for node %s type %s is larger %zu than the actual size %zu", + node->GetName().c_str(), node->GetType().c_str(), io_map.size(), out_data_anchors.size()); + return GRAPH_PARAM_INVALID; + } + + for (size_t i = 0; i < out_data_anchors.size(); ++i) { + auto out_data_anchor = out_data_anchors.at(i); + if (out_data_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "Failed to relink for node %s type %s, the out data anchor at index %zu is null", + node->GetName().c_str(), node->GetType().c_str(), i); + return GRAPH_FAILED; + } + + int in_index = -1; + if (i < io_map.size()) { + in_index = io_map.at(i); + } + if (in_index < 0) { + out_data_anchor->UnlinkAll(); + continue; + } + + if (in_index >= static_cast(in_data_anchors.size())) { + GELOGE(GRAPH_PARAM_INVALID, "Failed to relink for node %s type %s, invalid index %d specified for input(%zu)", + node->GetName().c_str(), node->GetType().c_str(), in_index, in_data_anchors.size()); + return GRAPH_PARAM_INVALID; + } + auto in_anchor = in_data_anchors.at(in_index); + if (in_anchor == nullptr) { + GELOGW("Invalid in data anchors(null) found at node %s type %s index %d, ignore it.", node->GetName().c_str(), + node->GetType().c_str(), in_index); + continue; + } + auto peer_out_anchor = in_anchor->GetPeerOutAnchor(); + if (peer_out_anchor == nullptr) { + continue; + } + if (peer_out_anchor->Unlink(in_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, + "Failed relink node %s type %s, failed to unlink the data link" + " from %s(%d) to it at input-index %d", + node->GetName().c_str(), node->GetType().c_str(), GetNodeNameByAnchor(peer_out_anchor.get()).c_str(), + peer_out_anchor->GetIdx(), in_index); + return GRAPH_FAILED; + } + auto ret = ReplaceOutDataAnchor(peer_out_anchor, out_data_anchor, &in_nodes_to_out); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Failed to relink node %s type %s for relinking data anchors", node->GetName().c_str(), + node->GetType().c_str()); + return GRAPH_FAILED; + } + } + + for (const auto &in_anchor : node->GetAllInDataAnchors()) { + in_anchor->UnlinkAll(); + } + return GRAPH_SUCCESS; +} + +InNodesToOut GetFullConnectIONodes(const NodePtr &node) { + InNodesToOut in_nodes_to_out; + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "Node is nullptr,node is %s", node->GetName().c_str()); + return in_nodes_to_out; + } + auto in_nodes_list = node->GetInNodes(); + auto out_nodes_list = node->GetOutNodes(); + auto out_nodes = std::unordered_set(out_nodes_list.begin(), out_nodes_list.end()); + + for (const auto &in_node : in_nodes_list) { + in_nodes_to_out.insert(std::make_pair(in_node, out_nodes)); + } + return in_nodes_to_out; +} + +graphStatus RelinkControlNodeIfNeed(const NodePtr &node, InNodesToOut &in_nodes_to_out, + InNodesToOut &connected_data_in_to_out) { + GE_CHECK_NOTNULL(node); + for (const auto &in_node_to_out : in_nodes_to_out) { + auto &in_node = in_node_to_out.first; + GE_CHECK_NOTNULL(in_node); + auto &connected_data_out = connected_data_in_to_out[in_node]; + for (const auto &out_node : in_node_to_out.second) { + GE_CHECK_NOTNULL(out_node); + if (connected_data_out.count(out_node) == 0) { + GE_CHECK_NOTNULL(in_node->GetOutControlAnchor()); + if (in_node->GetOutControlAnchor()->IsLinkedWith(out_node->GetInControlAnchor())) { + continue; + } + auto ret = GraphUtils::AddEdge(in_node->GetOutControlAnchor(), out_node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Failed to add control edge from %s to %s when isolating node %s type %s", + in_node->GetName().c_str(), out_node->GetName().c_str(), node->GetName().c_str(), + node->GetType().c_str()); + return GRAPH_FAILED; + } + } + } + } + return GRAPH_SUCCESS; +} + +graphStatus ReplaceOutDataAnchors(const Node::Vistor &new_outs, + const Node::Vistor &old_outs, const std::vector &outputs_map) { + auto new_out_size = new_outs.size(); + if (new_out_size < outputs_map.size()) { + GELOGE(GRAPH_PARAM_INVALID, + "Failed to replace out data anchors, the actual size %zu is less than the mapping size %zu", new_out_size, + outputs_map.size()); + return GRAPH_PARAM_INVALID; + } + for (size_t i = 0; i < new_out_size; ++i) { + auto &new_out_anchor = new_outs.at(i); + if (new_out_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "Failed to replace out data anchors, the out data anchor on new node is null, index %zu", i); + return GRAPH_FAILED; + } + if (i >= outputs_map.size()) { + continue; + } + auto old_index = outputs_map.at(i); + if (old_index < 0) { + continue; + } + + const OutDataAnchorPtr &old_out_anchor = old_outs.at(old_index); + if (old_out_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "Failed to replace out data anchors, the out data anchor on old node is null, index %d", + old_index); + return GRAPH_FAILED; + } + auto ret = ReplaceOutDataAnchor(new_out_anchor, old_out_anchor); + if (ret != GRAPH_SUCCESS) { + return ret; + } + } + + return GRAPH_SUCCESS; +} + +graphStatus ReplaceInDataAnchors(const Node::Vistor &new_ins, + const Node::Vistor &old_ins, const std::vector &inputs_map) { + auto new_in_size = new_ins.size(); + if (new_in_size < inputs_map.size()) { + GELOGE(GRAPH_FAILED, "Failed to replace in data anchors, the actual size %zu is less than the mapping size %zu", + new_in_size, inputs_map.size()); + return GRAPH_PARAM_INVALID; + } + + for (size_t i = 0; i < new_in_size; ++i) { + auto &new_in_anchor = new_ins.at(i); + if (new_in_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "Failed to replace in data anchors, the out data anchor on new node is null, index %zu", i); + return GRAPH_FAILED; + } + if (i >= inputs_map.size()) { + continue; + } + auto old_index = inputs_map.at(i); + if (old_index < 0) { + continue; + } + const InDataAnchorPtr &old_in_anchor = old_ins.at(old_index); + if (old_in_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "Failed to replace in data anchors, the out data anchor on old node is null, index %d", + old_index); + return GRAPH_FAILED; + } + + auto peer_out_anchor = old_in_anchor->GetPeerOutAnchor(); + if (peer_out_anchor == nullptr) { + GELOGW("Peer out anchor is nullptr"); + continue; + } + auto ret = peer_out_anchor->Unlink(old_in_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Failed to unlink old anchors, unlink from %s(%d) to %s(%d)", + GetNodeNameByAnchor(peer_out_anchor.get()).c_str(), peer_out_anchor->GetIdx(), + GetNodeNameByAnchor(old_in_anchor.get()).c_str(), old_in_anchor->GetIdx()); + return GRAPH_FAILED; + } + ret = peer_out_anchor->LinkTo(new_in_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Failed to link new anchors, link from %s(%d) to %s(%d)", + GetNodeNameByAnchor(peer_out_anchor.get()).c_str(), peer_out_anchor->GetIdx(), + GetNodeNameByAnchor(old_in_anchor.get()).c_str(), old_in_anchor->GetIdx()); + return GRAPH_FAILED; + } + } + return GRAPH_SUCCESS; +} + +graphStatus ReplaceControlAnchors(const NodePtr &new_node, const NodePtr &old_node) { + GE_CHECK_NOTNULL(new_node); + GE_CHECK_NOTNULL(old_node); + GE_CHECK_NOTNULL(new_node->GetInControlAnchor()); + GE_CHECK_NOTNULL(old_node->GetInControlAnchor()); + auto peer_out_anchors = old_node->GetInControlAnchor()->GetPeerAnchors(); + auto new_in_control_anchor = new_node->GetInControlAnchor(); + for (const auto &peer_out_anchor : peer_out_anchors) { + if (peer_out_anchor != nullptr) { + auto ret = GraphUtils::AddEdge(peer_out_anchor, new_in_control_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Add edge failed"); + return GRAPH_FAILED; + } + } + } + auto old_out_control_anchor = old_node->GetOutControlAnchor(); + GE_CHECK_NOTNULL(old_out_control_anchor); + auto peer_in_anchors = old_out_control_anchor->GetPeerAnchors(); + auto new_out_control_anchor = new_node->GetOutControlAnchor(); + GE_CHECK_NOTNULL(new_out_control_anchor); + for (const auto &peer_in_anchor : peer_in_anchors) { + if (peer_in_anchor != nullptr) { + auto ret = GraphUtils::AddEdge(new_out_control_anchor, peer_in_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Add edge failed"); + return GRAPH_FAILED; + } + } + } + + return GRAPH_SUCCESS; +} +} // namespace + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::IsolateNode(const NodePtr &node, + const std::vector &io_map) { + if (node == nullptr) { + GELOGE(GRAPH_PARAM_INVALID, "Failed to isolate node(null)"); + return GRAPH_PARAM_INVALID; + } + /// We must get full connections info before re-link data io, because the data + /// edges may be unlinked when relink data io + auto in_nodes_to_out = GetFullConnectIONodes(node); + InNodesToOut data_in_to_out; + auto ret = RelinkDataIO(node, io_map, data_in_to_out); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Failed to isolate node %s type %s when relink data IO", node->GetName().c_str(), + node->GetType().c_str()); + return ret; + } + + ret = RelinkControlNodeIfNeed(node, in_nodes_to_out, data_in_to_out); + if (ret != GRAPH_SUCCESS) { + return ret; + } + NodeUtils::UnlinkAll(*node); + + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +GraphUtils::IsolateNode(const NodePtr &node, const std::initializer_list &io_map) { + return IsolateNode(node, std::vector(io_map)); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::IsolateNodeOneIO(const NodePtr &node) { + if (node == nullptr) { + GELOGE(GRAPH_PARAM_INVALID, "incorrect parameter. node is invalid"); + return GRAPH_PARAM_INVALID; + } + if (node->GetAllInDataAnchorsSize() != 1) { + return GRAPH_PARAM_INVALID; + } + if (node->GetAllOutDataAnchorsSize() != 1) { + return GRAPH_PARAM_INVALID; + } + return IsolateNode(node, {0}); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +GraphUtils::ReplaceNodeAnchors(const NodePtr &new_node, const NodePtr &old_node, const std::vector &inputs_map, + const std::vector &outputs_map) { + if ((new_node == nullptr) || (old_node == nullptr)) { + GELOGE(GRAPH_FAILED, "Parameter is nullptr"); + return GRAPH_PARAM_INVALID; + } + auto ret = ReplaceNodeDataAnchors(new_node, old_node, inputs_map, outputs_map); + if (ret != GRAPH_SUCCESS) { + // The error log was printed in `ReplaceNodeDataAnchors` + return GRAPH_FAILED; + } + ret = ReplaceControlAnchors(new_node, old_node); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, + "Failed to replace control anchors when replace node from old node %s type %s to new node %s type %s", + old_node->GetName().c_str(), old_node->GetType().c_str(), new_node->GetName().c_str(), + new_node->GetType().c_str()); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::ReplaceNodeAnchors( + const NodePtr &new_node, const NodePtr &old_node, const std::initializer_list inputs_map, + const std::initializer_list outputs_map) { + return ReplaceNodeAnchors(new_node, old_node, std::vector(inputs_map), std::vector(outputs_map)); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +GraphUtils::ReplaceNodeDataAnchors(const NodePtr &new_node, const NodePtr &old_node, + std::initializer_list inputs_map, std::initializer_list outputs_map) { + return ReplaceNodeDataAnchors(new_node, old_node, std::vector(inputs_map), std::vector(outputs_map)); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +GraphUtils::ReplaceNodeDataAnchors(const NodePtr &new_node, const NodePtr &old_node, const std::vector &inputs_map, + const std::vector &outputs_map) { + if (new_node == nullptr || old_node == nullptr) { + GELOGE(GRAPH_FAILED, "Parameter is nullptr"); + return GRAPH_PARAM_INVALID; + } + + auto ret = ReplaceOutDataAnchors(new_node->GetAllOutDataAnchors(), old_node->GetAllOutDataAnchors(), outputs_map); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, + "Failed to replace out data anchors when replace node from old node %s type %s to new node %s type %s", + old_node->GetName().c_str(), old_node->GetType().c_str(), new_node->GetName().c_str(), + new_node->GetType().c_str()); + return GRAPH_FAILED; + } + ret = ReplaceInDataAnchors(new_node->GetAllInDataAnchors(), old_node->GetAllInDataAnchors(), inputs_map); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, + "Failed to replace in data anchors when replace node from old node %s type %s to new node %s type %s", + old_node->GetName().c_str(), old_node->GetType().c_str(), new_node->GetName().c_str(), + new_node->GetType().c_str()); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::CopyInCtrlEdges(const NodePtr &src_node, + NodePtr &dst_node) { + if ((src_node == nullptr) || (dst_node == nullptr)) { + GELOGE(GRAPH_FAILED, "Parameter is nullptr"); + return GRAPH_PARAM_INVALID; + } + auto src_ctrl_in_nodes = src_node->GetInControlNodes(); + if (src_ctrl_in_nodes.empty()) { + return GRAPH_SUCCESS; + } + + std::unordered_set exist_in_ctrl_nodes_set; + auto exist_in_ctrl_nodes = dst_node->GetInControlNodes(); + if (!exist_in_ctrl_nodes.empty()) { + exist_in_ctrl_nodes_set.insert(exist_in_ctrl_nodes.begin(), exist_in_ctrl_nodes.end()); + } + + auto dst_ctrl = dst_node->GetInControlAnchor(); + for (const auto &in_node : src_ctrl_in_nodes) { + if (exist_in_ctrl_nodes_set.count(in_node) > 0) { + continue; + } + auto ret = GraphUtils::AddEdge(in_node->GetOutControlAnchor(), dst_ctrl); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Failed to add control edge from %s to %s when copy control dependencies from %s to %s", + in_node->GetName().c_str(), dst_node->GetName().c_str(), src_node->GetName().c_str(), + dst_node->GetName().c_str()); + return ret; + } + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::MoveInCtrlEdges(const NodePtr &src_node, + NodePtr &dst_node) { + if (src_node == nullptr || dst_node == nullptr) { + GELOGE(GRAPH_FAILED, "Parameter is nullptr"); + return GRAPH_FAILED; + } + auto ret = CopyInCtrlEdges(src_node, dst_node); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Copy in ctrl edges failed"); + return ret; + } + GE_CHECK_NOTNULL(src_node->GetInControlAnchor()); + src_node->GetInControlAnchor()->UnlinkAll(); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::CopyOutCtrlEdges(const NodePtr &src_node, + NodePtr &dst_node) { + if (src_node == nullptr || dst_node == nullptr) { + GELOGE(GRAPH_FAILED, "Parameter is nullptr"); + return GRAPH_FAILED; + } + auto out_ctrl_nodes = src_node->GetOutControlNodes(); + if (out_ctrl_nodes.empty()) { + return GRAPH_SUCCESS; + } + + std::unordered_set exists_out_ctrl_nodes_set; + for (const auto &node : dst_node->GetOutControlNodes()) { + exists_out_ctrl_nodes_set.insert(node.get()); + } + + auto dst_out_ctrl = dst_node->GetOutControlAnchor(); + for (const auto &node : out_ctrl_nodes) { + if (exists_out_ctrl_nodes_set.count(node.get()) > 0) { + continue; + } + auto ret = GraphUtils::AddEdge(dst_out_ctrl, node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Failed to add control edge from %s to %s when copy control dependencies from %s to %s", + dst_node->GetName().c_str(), node->GetName().c_str(), src_node->GetName().c_str(), + dst_node->GetName().c_str()); + return ret; + } + } + + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::MoveOutCtrlEdges(NodePtr &src_node, + NodePtr &dst_node) { + if (src_node == nullptr || dst_node == nullptr) { + GELOGE(GRAPH_FAILED, "Parameter is nullptr"); + return GRAPH_FAILED; + } + auto ret = CopyOutCtrlEdges(src_node, dst_node); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "Copyout ctrl edges failed"); + return ret; + } + GE_CHECK_NOTNULL(src_node->GetOutControlAnchor()); + src_node->GetOutControlAnchor()->UnlinkAll(); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus GraphUtils::AppendInputNode(const ComputeGraphPtr &graph, + const NodePtr &node) { + if (graph->AddInputNode(node) == nullptr) { + GELOGE(GRAPH_FAILED, "Copyout ctrl edges failed"); + return GRAPH_FAILED; + } + graph->SetInputSize(graph->GetInputSize() + 1); + graph->inputs_order_.emplace_back(node->GetName()); + return GRAPH_SUCCESS; +} +} // namespace ge diff --git a/src/common/graph/utils/mem_utils.h b/src/common/graph/utils/mem_utils.h new file mode 100644 index 00000000..7e8dd9fd --- /dev/null +++ b/src/common/graph/utils/mem_utils.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COMMON_GRAPH_UTILS_MEM_UTILS_H_ +#define COMMON_GRAPH_UTILS_MEM_UTILS_H_ + +#include +#include + +namespace ge { +template +static inline std::shared_ptr<_Tp> MakeShared(_Args &&... __args) { + typedef typename std::remove_const<_Tp>::type _Tp_nc; + std::shared_ptr<_Tp> ret(new (std::nothrow) _Tp_nc(std::forward<_Args>(__args)...)); + return ret; +} +} + +#endif // COMMON_GRAPH_UTILS_MEM_UTILS_H_ diff --git a/src/common/graph/utils/node_utils.cc b/src/common/graph/utils/node_utils.cc new file mode 100644 index 00000000..d3ec3db8 --- /dev/null +++ b/src/common/graph/utils/node_utils.cc @@ -0,0 +1,361 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "utils/node_utils.h" + +#include "debug/ge_op_types.h" +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/anchor.h" +#include "utils/tensor_utils.h" +#include "utils/type_utils.h" + +namespace ge { +std::map> NodeUtils::map_send_info_{}; +std::map> NodeUtils::map_recv_info_{}; + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus NodeUtils::AddSendEventId(const NodePtr &node, + const uint32_t &event_id) { + GE_CHECK_NOTNULL(node); + map_send_info_[node].push_back(event_id); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus NodeUtils::AddRecvEventId(const NodePtr &node, + const uint32_t &event_id) { + GE_CHECK_NOTNULL(node); + map_recv_info_[node].push_back(event_id); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +NodeUtils::GetSendEventIdList(const NodePtr &node, std::vector &vec_send) { + GE_CHECK_NOTNULL(node); + auto find = map_send_info_.find(node); + if (find == map_send_info_.end()) { + return GRAPH_FAILED; + } else { + vec_send = find->second; + return GRAPH_SUCCESS; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +NodeUtils::GetRecvEventIdList(const NodePtr &node, std::vector &vec_recv) { + GE_CHECK_NOTNULL(node); + auto find = map_recv_info_.find(node); + if (find == map_recv_info_.end()) { + return GRAPH_FAILED; + } else { + vec_recv = find->second; + return GRAPH_SUCCESS; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus NodeUtils::ClearSendInfo() { + map_send_info_.clear(); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus NodeUtils::ClearRecvInfo() { + map_recv_info_.clear(); + return GRAPH_SUCCESS; +} + +graphStatus NodeUtils::GetSingleOutputNodeOfNthLayer(const NodePtr &src, int depth, NodePtr &dst) { + GE_CHECK_NOTNULL(src); + NodePtr cur_ptr; + if (depth < 1) { + return GRAPH_FAILED; + } + for (int i = 0; i < depth; i++) { + if (src->GetOutDataNodes().size() != 1) { + return GRAPH_FAILED; + } + cur_ptr = src->GetOutDataNodes().at(0); + } + dst = cur_ptr; + return GRAPH_SUCCESS; +} + +graphStatus NodeUtils::GetDataOutAnchorAndControlInAnchor(const NodePtr &node_ptr, OutDataAnchorPtr &out_data, + InControlAnchorPtr &in_control) { + GE_CHECK_NOTNULL(node_ptr); + for (const auto &p : node_ptr->GetAllOutDataAnchors()) { + GE_CHK_BOOL_EXEC((p != nullptr), continue, "GetAllOutDataAnchors is nullptr"); + for (const auto &p_in : p->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC((p_in != nullptr), continue, "GetPeerInDataAnchors is nullptr"); + out_data = p; + in_control = p_in; + return GRAPH_SUCCESS; + } + } + return GRAPH_FAILED; +} + +graphStatus NodeUtils::ClearInDataAnchor(const NodePtr &node_ptr, const InDataAnchorPtr &in_data_anchor) { + GE_CHK_BOOL_EXEC(node_ptr != nullptr && in_data_anchor != nullptr, return GRAPH_FAILED, + "node or in_data_anchor is nullptr"); + bool find_flag = false; + uint32_t index = 0; + vector::iterator it = node_ptr->in_data_anchors_.end(); + for (const auto &tmp : node_ptr->in_data_anchors_) { + if (tmp == in_data_anchor) { + find_flag = true; + auto iter = node_ptr->in_data_anchors_.begin() + index; + if (iter != node_ptr->in_data_anchors_.end()) { + it = node_ptr->in_data_anchors_.erase(iter); + } + break; + } + index++; + } + for (; it != node_ptr->in_data_anchors_.end(); ++it) { + (*it)->SetIdx(index); + index++; + } + + if (!find_flag) { + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus NodeUtils::SetAllAnchorStatus(const NodePtr &node_ptr) { + GE_CHK_BOOL_EXEC(node_ptr != nullptr, return GRAPH_FAILED, "node is nullptr"); + GE_CHK_BOOL_EXEC(SetAllAnchorStatus(*node_ptr) == GRAPH_SUCCESS, return GRAPH_FAILED, "set all anchor status failed"); + return GRAPH_SUCCESS; +} + +graphStatus NodeUtils::SetAllAnchorStatus(Node &node) { + node.anchor_status_updated_ = true; + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool NodeUtils::IsAnchorStatusSet(const NodePtr &node_ptr) { + GE_CHK_BOOL_EXEC(node_ptr != nullptr, return false, "node is nullptr"); + return IsAnchorStatusSet(*node_ptr); +} + +bool NodeUtils::IsAnchorStatusSet(const Node &node) { return node.anchor_status_updated_; } + +graphStatus NodeUtils::MoveOutputEdges(const NodePtr &origin_node, const NodePtr &new_node) { + if ((origin_node == nullptr) || (new_node == nullptr)) { + return GRAPH_FAILED; + } + auto origin_out_data_anchors = origin_node->GetAllOutDataAnchors(); + auto new_out_data_anchors = new_node->GetAllOutDataAnchors(); + if (origin_out_data_anchors.size() != new_out_data_anchors.size()) { + return GRAPH_FAILED; + } + + for (size_t i = 0; i < origin_out_data_anchors.size(); ++i) { + for (const auto &peer_anchor : origin_out_data_anchors.at(i)->GetPeerInDataAnchors()) { + GE_CHK_BOOL_EXEC(origin_out_data_anchors.at(i)->Unlink(peer_anchor) == GRAPH_SUCCESS, continue, + "unlink peer_anchor failed"); + GE_CHK_BOOL_EXEC(new_out_data_anchors.at(i)->LinkTo(peer_anchor) == GRAPH_SUCCESS, continue, + "linkto peer_anchor failed"); + } + + for (const auto &peer_anchor : origin_out_data_anchors.at(i)->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC(origin_out_data_anchors.at(i)->Unlink(peer_anchor) == GRAPH_SUCCESS, continue, + "unlink peer_anchor failed"); + GE_CHK_BOOL_EXEC(new_out_data_anchors.at(i)->LinkTo(peer_anchor) == GRAPH_SUCCESS, continue, + "linkto peer_anchor failed"); + } + } + + auto origin_out_control_anchor = origin_node->GetOutControlAnchor(); + GE_CHECK_NOTNULL(origin_out_control_anchor); + auto new_out_control_anchor = new_node->GetOutControlAnchor(); + GE_CHECK_NOTNULL(new_out_control_anchor); + for (const auto &peer_anchor : origin_out_control_anchor->GetPeerInControlAnchors()) { + GE_CHK_BOOL_EXEC(new_out_control_anchor->LinkTo(peer_anchor) == GRAPH_SUCCESS, continue, + "linkto peer_anchor failed"); + } + for (const auto &peer_anchor : origin_out_control_anchor->GetPeerInDataAnchors()) { + GE_CHK_BOOL_EXEC(new_out_control_anchor->LinkTo(peer_anchor) == GRAPH_SUCCESS, continue, + "linkto peer_anchor failed"); + } + origin_out_control_anchor->UnlinkAll(); + + return GRAPH_SUCCESS; +} + +bool NodeUtils::IsConst(const Node &node) { + auto src_node_type = node.GetType(); + bool is_const = ((src_node_type == CONSTANT) || (src_node_type == CONSTANTOP)); + return is_const; +} + +void NodeUtils::UpdateIsInputConst(const NodePtr &node_ptr) { + if (node_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "node is null"); + return; + } + UpdateIsInputConst(*node_ptr); +} + +/// +/// update is_input_const +/// @param node +/// @return void +/// +void NodeUtils::UpdateIsInputConst(Node &node) { + std::vector is_input_const; + size_t anchor_num = node.GetAllInDataAnchors().size(); + for (size_t i = 0; i < anchor_num; i++) { + auto in_anchor = node.GetInDataAnchor(static_cast(i)); + if (in_anchor == nullptr) { + is_input_const.push_back(false); + continue; + } + auto peer_out_anchor = in_anchor->GetPeerOutAnchor(); + if (peer_out_anchor == nullptr) { + is_input_const.push_back(false); + continue; + } + auto src_node = peer_out_anchor->GetOwnerNode(); + if (src_node == nullptr) { + is_input_const.push_back(false); + continue; + } + if (IsConst(*(src_node))) { + is_input_const.push_back(true); + } else { + is_input_const.push_back(false); + } + } + if (node.GetOpDesc() == nullptr) { + GELOGE(GRAPH_FAILED, "Node get opdesc is nullptr"); + return; + } + node.GetOpDesc()->SetIsInputConst(is_input_const); +} + +void NodeUtils::UnlinkAll(const Node &node) { + for (const auto &anchor : node.GetAllOutAnchors()) { + anchor->UnlinkAll(); + } + for (const auto &anchor : node.GetAllInAnchors()) { + anchor->UnlinkAll(); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus NodeUtils::UpdatePeerNodeInputDesc(const NodePtr &node_ptr) { + if (node_ptr == nullptr) { + GELOGE(GRAPH_FAILED, "Nodeptr is nullptr"); + return GRAPH_FAILED; + } + auto op_desc = node_ptr->GetOpDesc(); + if (op_desc == nullptr) { + return GRAPH_FAILED; + } + for (const auto &out_anchor : node_ptr->GetAllOutDataAnchors()) { + GeTensorDesc output_tensor = op_desc->GetOutputDesc(out_anchor->GetIdx()); + ge::TensorUtils::SetRealDimCnt(output_tensor, static_cast(output_tensor.GetShape().GetDims().size())); + output_tensor.SetOriginShape(output_tensor.GetShape()); + output_tensor.SetOriginDataType(output_tensor.GetDataType()); + GELOGD("node name is %s, origin shape is %ld, origin format is %s, origin data type is %s", + node_ptr->GetName().c_str(), output_tensor.GetOriginShape().GetShapeSize(), + TypeUtils::FormatToSerialString(output_tensor.GetOriginFormat()).c_str(), + TypeUtils::DataTypeToSerialString(output_tensor.GetOriginDataType()).c_str()); + (void)op_desc->UpdateOutputDesc(out_anchor->GetIdx(), output_tensor); + for (const auto &peer_anchor : out_anchor->GetPeerInDataAnchors()) { + if (peer_anchor->GetOwnerNode()->GetOpDesc() == nullptr) { + GELOGE(GRAPH_FAILED, "peer_anchor opdesc is null"); + continue; + } + auto peer_input_desc = peer_anchor->GetOwnerNode()->GetOpDesc()->GetInputDescPtr(peer_anchor->GetIdx()); + if (peer_input_desc == nullptr) { + GELOGE(GRAPH_FAILED, "peer_input_desc is nullptr"); + continue; + } + output_tensor.SetOriginFormat(peer_input_desc->GetOriginFormat()); + output_tensor.SetFormat(peer_input_desc->GetFormat()); + auto peer_op_desc = peer_anchor->GetOwnerNode()->GetOpDesc(); + GE_IF_BOOL_EXEC(peer_op_desc == nullptr, GELOGE(GRAPH_FAILED, "peer opdesc is null"); continue); + GE_IF_BOOL_EXEC(peer_op_desc->UpdateInputDesc(peer_anchor->GetIdx(), output_tensor) != GRAPH_SUCCESS, + GELOGE(GRAPH_FAILED, "peer opdesc is null"); + continue); + } + } + return GRAPH_SUCCESS; +} +bool NodeUtils::IsInNodesEmpty(const Node &node) { + for (const auto &in_anchor : node.in_data_anchors_) { + if (in_anchor != nullptr) { + auto out_anchor = in_anchor->GetPeerOutAnchor(); + if (out_anchor != nullptr) { + if (out_anchor->GetOwnerNode() != nullptr) { + return false; + } + } + } + } + + if ((node.in_control_anchor_ != nullptr) && (!node.in_control_anchor_->IsPeerOutAnchorsEmpty())) { + auto peer_out_control_anchors = node.in_control_anchor_->GetPeerOutControlAnchors(); + for (auto &out_control_anchor : peer_out_control_anchors) { + if (out_control_anchor != nullptr) { + if (out_control_anchor->GetOwnerNode() != nullptr) { + return false; + } + } + } + } + + return true; +} +GeTensorDesc NodeUtils::GetOutputDesc(const Node &node, uint32_t index) { + auto desc = node.GetOpDesc(); + if (desc == nullptr) { + return GeTensorDesc(); + } + return desc->GetOutputDesc(index); +} +GeTensorDesc NodeUtils::GetInputDesc(const Node &node, uint32_t index) { + auto desc = node.GetOpDesc(); + if (desc == nullptr) { + return GeTensorDesc(); + } + return desc->GetInputDesc(index); +} +graphStatus NodeUtils::UpdateOutputShape(const Node &node, uint32_t index, const GeShape &shape) { + auto desc = node.GetOpDesc(); + if (desc == nullptr) { + return GRAPH_PARAM_INVALID; + } + auto output_desc = desc->MutableOutputDesc(index); + if (output_desc == nullptr) { + return GRAPH_PARAM_INVALID; + } + output_desc->SetShape(shape); + return GRAPH_SUCCESS; +} +graphStatus NodeUtils::UpdateInputShape(const Node &node, uint32_t index, const GeShape &shape) { + auto desc = node.GetOpDesc(); + if (desc == nullptr) { + return GRAPH_PARAM_INVALID; + } + auto input_desc = desc->MutableInputDesc(index); + if (input_desc == nullptr) { + return GRAPH_PARAM_INVALID; + } + input_desc->SetShape(shape); + return GRAPH_SUCCESS; +} +} // namespace ge diff --git a/src/common/graph/utils/op_desc_utils.cc b/src/common/graph/utils/op_desc_utils.cc new file mode 100644 index 00000000..5cf9353f --- /dev/null +++ b/src/common/graph/utils/op_desc_utils.cc @@ -0,0 +1,573 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "utils/op_desc_utils.h" + +#include + +#include "debug/ge_attr_define.h" +#include "debug/ge_op_types.h" +#include "debug/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/anchor.h" +#include "graph/compute_graph.h" +#include "graph/ge_attr_value.h" +#include "utils/graph_utils.h" +#include "utils/node_utils.h" + +using std::vector; + +namespace ge { +const char OP_DESC_QUANT_PARAMS[] = "quantize_factor"; +static const int CONST_OP_NORMAL_WEIGHT_SIZE = 1; + +bool OpDescUtils::ClearInputDesc(const NodePtr &node) { + GE_CHK_BOOL_EXEC(node != nullptr, return false, "node is nullptr"); + GE_CHK_BOOL_EXEC(node->GetOpDesc() != nullptr, return false, "opdesc is nullptr"); + vector index_list; + for (const auto &in_anchor : node->GetAllInDataAnchors()) { + if (in_anchor->GetPeerOutAnchor() == nullptr) { + index_list.push_back(in_anchor->GetIdx()); + } + } + std::sort(index_list.begin(), index_list.end()); + // Node's in anchor index need shrink + for (size_t i = 0; i < index_list.size(); ++i) { + auto iter = node->GetOpDesc()->inputs_desc_.begin() + index_list[i]; + if (iter < node->GetOpDesc()->inputs_desc_.end()) { + (void)node->GetOpDesc()->inputs_desc_.erase(iter); + } else { + GELOGW("inputs_desc_ iterator out of range."); + } + } + + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool OpDescUtils::ClearInputDesc(OpDescPtr op_desc, + const uint32_t index) { + GE_CHK_BOOL_EXEC(op_desc != nullptr, return false, "op_desc is nullptr"); + GE_CHK_BOOL_EXEC(index < op_desc->inputs_desc_.size(), return false, "index %u is invalid.", index); + + auto iter = op_desc->inputs_desc_.begin() + index; + if (iter < op_desc->inputs_desc_.end()) { + (void)op_desc->inputs_desc_.erase(iter); + } else { + GELOGW("inputs_desc_ iterator out of range."); + } + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool OpDescUtils::HasQuantizeFactorParams(const OpDescPtr &op_desc) { + GE_CHK_BOOL_EXEC_INFO(op_desc != nullptr, return false, "op_desc is nullptr"); + return op_desc->HasAttr(OP_DESC_QUANT_PARAMS); +} + +bool OpDescUtils::ClearOutputDesc(const NodePtr &node) { + GE_CHK_BOOL_EXEC(node != nullptr, return false, "node is nullptr"); + GE_CHK_BOOL_EXEC(node->GetOpDesc() != nullptr, return false, "opdesc is nullptr"); + vector index_list; + for (const auto &out_anchor : node->GetAllOutDataAnchors()) { + if (out_anchor->GetPeerInDataAnchors().empty()) { + index_list.push_back(out_anchor->GetIdx()); + } + } + std::sort(index_list.begin(), index_list.end()); + // Node's out anchor index need shrink + for (size_t i = 0; i < index_list.size(); ++i) { + auto iter = node->GetOpDesc()->outputs_desc_.begin() + index_list[i]; + if (iter < node->GetOpDesc()->outputs_desc_.end()) { + (void)node->GetOpDesc()->outputs_desc_.erase(iter); + } else { + GELOGW("outputs_desc_ iterator out of range."); + } + } + + return true; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool OpDescUtils::ClearOutputDesc(const OpDescPtr &op_desc, + uint32_t index) { + GE_CHK_BOOL_EXEC(op_desc != nullptr, return false, "op_desc is nullptr"); + GE_CHK_BOOL_EXEC(index < op_desc->outputs_desc_.size(), return false, "index %u is invalid.", index); + + auto iter = op_desc->outputs_desc_.begin() + index; + if (iter < op_desc->outputs_desc_.end()) { + (void)op_desc->outputs_desc_.erase(iter); + } else { + GELOGW("outputs_desc_ iterator out of range."); + } + return true; +} + +bool OpDescUtils::HasQuantizeFactorParams(const OpDesc &op_desc) { return op_desc.HasAttr(OP_DESC_QUANT_PARAMS); } + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +OpDescUtils::GetQuantizeFactorParams(const OpDescPtr &op_desc, QuantizeFactorParams &quant) { + GE_CHK_BOOL_EXEC_INFO(op_desc != nullptr, return GRAPH_FAILED, "op_desc is nullptr"); + GeAttrValue attr_value; + GE_CHK_BOOL_EXEC_INFO(op_desc->GetAttr(OP_DESC_QUANT_PARAMS, attr_value) == GRAPH_SUCCESS, return GRAPH_FAILED, + "GetQuantizeFactorParams failed"); + return attr_value.GetValue(quant); +} + +graphStatus OpDescUtils::GetQuantizeFactorParams(const OpDesc &op_desc, QuantizeFactorParams &quant) { + GeAttrValue attr_value; + GE_CHK_BOOL_EXEC_INFO(op_desc.GetAttr(OP_DESC_QUANT_PARAMS, attr_value) == GRAPH_SUCCESS, return GRAPH_FAILED, + "GetQuantizeFactorParams failed"); + return attr_value.GetValue(quant); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +OpDescUtils::SetQuantizeFactorParams(const OpDescPtr &op_desc, const QuantizeFactorParams &quant) { + GE_CHK_BOOL_EXEC_INFO(op_desc != nullptr, return GRAPH_FAILED, "op_desc is nullptr"); + return op_desc->SetAttr(OP_DESC_QUANT_PARAMS, GeAttrValue::CreateFrom(quant)); +} + +graphStatus OpDescUtils::SetQuantizeFactorParams(OpDesc &op_desc, const QuantizeFactorParams &quant) { + return op_desc.SetAttr(OP_DESC_QUANT_PARAMS, GeAttrValue::CreateFrom(quant)); +} + +GeTensorPtr OpDescUtils::MutableWeights(OpDesc &op_desc) { + GeTensorPtr weight = nullptr; + if (!AttrUtils::MutableTensor(&op_desc, ATTR_NAME_WEIGHTS, weight)) { + GELOGW("MutableTensor error"); + } + + return weight; +} + +GE_FUNC_HOST_VISIBILITY GeTensorPtr OpDescUtils::MutableWeights(OpDescPtr op_desc) { + if (op_desc == nullptr) { + GELOGE(GRAPH_FAILED, "op_desc is null"); + return nullptr; + } + return MutableWeights(*op_desc); +} + +graphStatus OpDescUtils::SetWeights(OpDesc &op_desc, const GeTensorPtr weight) { + if (weight == nullptr) { + GELOGE(GRAPH_FAILED, "weight is null"); + return GRAPH_FAILED; + } + return AttrUtils::SetTensor(&op_desc, ATTR_NAME_WEIGHTS, weight) ? GRAPH_SUCCESS : GRAPH_FAILED; +} + +graphStatus OpDescUtils::SetWeights(OpDescPtr op_desc, const GeTensorPtr weight) { + GE_CHECK_NOTNULL(op_desc); + GE_CHECK_NOTNULL(weight); + return SetWeights(*op_desc, weight); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDescUtils::GetWeights(const ge::Node &node) { + auto weights = MutableWeights(node); + vector ret(weights.size()); + std::copy(weights.begin(), weights.end(), ret.begin()); + return ret; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDescUtils::GetWeights( + const ge::ConstNodePtr &node) { + if (node == nullptr) { + return vector(); + } + return GetWeights(*node); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDescUtils::GetConstInputNode( + const ge::Node &node) { + vector ret; + auto in_anchors = node.GetAllInDataAnchors(); + for (const auto &in_anchor : in_anchors) { + auto out_anchor = in_anchor->GetPeerOutAnchor(); + if (out_anchor == nullptr) { + // normally out_anchor could be null, this is ok + GELOGD("node %s' peer_out_anchor is null", node.GetName().c_str()); + continue; + } + auto in_node = out_anchor->GetOwnerNode(); + if ((in_node->GetType() == CONSTANT) || (in_node->GetType() == CONSTANTOP)) { + ret.push_back(in_node); + } + } + return ret; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDescUtils::GetInputData( + const vector &input_nodes) { + vector ret; + for (const auto &input_node : input_nodes) { + auto temp_weight = MutableWeights(input_node->GetOpDesc()); + if (temp_weight == nullptr) { + GELOGE(GRAPH_FAILED, "const op's weight is null, name: %s", input_node->GetName().c_str()); + return vector(); + } + ret.push_back(temp_weight); + } + + return ret; +} +size_t OpDescUtils::GetNonConstInputsSize(const ge::Node &node) { + if (NodeUtils::IsAnchorStatusSet(node)) { + size_t input_num = 0; + for (const auto &anchor : node.GetAllInDataAnchors()) { + if (ge::AnchorUtils::GetStatus(anchor) == ANCHOR_DATA) { + input_num++; + continue; + } + } + return input_num; + } else { + GE_IF_BOOL_EXEC( + node.GetInDataNodes().size() < GetConstInputs(node).size(), + GELOGE(GRAPH_FAILED, "%zu is smaller than %zu", node.GetInDataNodes().size(), GetConstInputs(node).size()); + return 0); + return node.GetInDataNodes().size() - GetConstInputs(node).size(); + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY size_t OpDescUtils::GetNonConstInputsSize(const ge::ConstNodePtr node) { + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "Node is nullptr"); + return 0; + } + return GetNonConstInputsSize(*node); +} + +GeTensorDesc OpDescUtils::GetNonConstInputTensorDesc(const ge::Node &node, size_t index_non_const) { + GE_CHK_BOOL_EXEC(node.GetOpDesc() != nullptr, return GeTensorDesc(), "node.GetOpDesc() is nullptr!"); + size_t i = 0; + if (NodeUtils::IsAnchorStatusSet(node)) { + for (const auto &anchor : node.GetAllInDataAnchors()) { + if (ge::AnchorUtils::GetStatus(anchor) == ANCHOR_DATA) { + if (index_non_const == i) { + return node.GetOpDesc()->GetInputDesc(static_cast(anchor->GetIdx())); + } + ++i; + } + } + } else { + for (const auto &anchor : node.GetAllInDataAnchors()) { + auto peer_anchor = anchor->GetPeerOutAnchor(); + if (peer_anchor == nullptr) { + continue; + } + auto owner_node = peer_anchor->GetOwnerNode(); + if (owner_node == nullptr) { + continue; + } + if (owner_node->GetType() == CONSTANT) { + continue; + } + if (index_non_const == i) { + return node.GetOpDesc()->GetInputDesc(anchor->GetIdx()); + } + ++i; + } + } + return GeTensorDesc(); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeTensorDesc +OpDescUtils::GetNonConstInputTensorDesc(const ge::ConstNodePtr &node, size_t index_non_const) { + CHECK_FALSE_EXEC(node != nullptr, return GeTensorDesc()); + return GetNonConstInputTensorDesc(*node, index_non_const); +} + +bool OpDescUtils::GetNonConstInputIndex(const ge::Node &node, const size_t index_non_const, size_t &index) { + bool ret = false; + size_t i = 0; + if (NodeUtils::IsAnchorStatusSet(node)) { + for (const auto &anchor : node.GetAllInDataAnchors()) { + if (ge::AnchorUtils::GetStatus(anchor) == ANCHOR_DATA) { + if (index_non_const == i) { + index = static_cast(anchor->GetIdx()); + ret = true; + } + ++i; + } + } + } else { + for (const auto &anchor : node.GetAllInDataAnchors()) { + auto peer_anchor = anchor->GetPeerOutAnchor(); + if (peer_anchor == nullptr) { + continue; + } + auto owner_node = peer_anchor->GetOwnerNode(); + if (owner_node == nullptr) { + continue; + } + if (owner_node->GetType() == CONSTANT) { + continue; + } + if (index_non_const == i) { + index = static_cast(anchor->GetIdx()); + ret = true; + } + ++i; + } + } + return ret; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool OpDescUtils::GetNonConstInputIndex(const ge::ConstNodePtr &node, + size_t index_non_const, + size_t &index) { + CHECK_FALSE_EXEC(node != nullptr, return false); + return GetNonConstInputIndex(*node, index_non_const, index); +} + +bool OpDescUtils::IsNonConstInput(const ge::Node &node, const size_t index) { + bool ret = false; + if (index < node.GetAllInDataAnchors().size()) { + if (NodeUtils::IsAnchorStatusSet(node)) { + ret = (ge::AnchorUtils::GetStatus(node.GetInDataAnchor(static_cast(index))) == ANCHOR_DATA); + } else { + for (const auto &anchor : node.GetAllInDataAnchors()) { + if (anchor->GetIdx() != static_cast(index)) { + continue; + } + auto peer_anchor = anchor->GetPeerOutAnchor(); + if (peer_anchor == nullptr) { + break; + } + auto owner_node = peer_anchor->GetOwnerNode(); + if (owner_node == nullptr) { + break; + } + ret = (owner_node->GetType() != CONSTANT); + } + } + } + + return ret; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool OpDescUtils::IsNonConstInput(const ge::ConstNodePtr &node, + size_t index) { + CHECK_FALSE_EXEC(node != nullptr, return false); + return IsNonConstInput(*node, index); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDescUtils::GetConstInputs( + const ge::ConstNodePtr &node) { + if (node == nullptr) { return vector(); } + return GetConstInputs(*node); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDescUtils::GetNonConstTensorDesc( + const ge::ConstNodePtr &node) { + if (node == nullptr || node->GetOpDesc() == nullptr) { + return vector(); + } + vector ret; + if (NodeUtils::IsAnchorStatusSet(*node)) { + for (const auto &in_anchor : node->GetAllInDataAnchors()) { + if (ge::AnchorUtils::GetStatus(in_anchor) == ANCHOR_DATA) { + ret.push_back(node->GetOpDesc()->GetInputDesc(in_anchor->GetIdx())); + } + } + } else { + for (const auto &in_anchor : node->GetAllInDataAnchors()) { + auto out_anchor = in_anchor->GetPeerOutAnchor(); + if (out_anchor == nullptr || out_anchor->GetOwnerNode()->GetOpDesc() == nullptr) { + continue; + } + if (out_anchor->GetOwnerNode()->GetOpDesc()->GetType() != CONSTANT) { + ret.push_back(node->GetOpDesc()->GetInputDesc(in_anchor->GetIdx())); + } + } + } + return ret; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDescUtils::GetConstInputs(const ge::Node &node) { + vector ret; + auto in_anchors = node.GetAllInDataAnchors(); + for (const auto &in_anchor : in_anchors) { + auto out_anchor = in_anchor->GetPeerOutAnchor(); + if (out_anchor == nullptr) continue; + + auto in_node = out_anchor->GetOwnerNode(); + if (in_node->GetType() == CONSTANT) { + ret.push_back(in_node); + } else if (in_node->GetType() == SWITCH && node.GetType() == MATMUL) { + // const --> switch --> matmul + auto switch_input = GetConstInputs(*in_node); + if (switch_input.size() > 0) { + ret.insert(ret.end(), switch_input.begin(), switch_input.end()); + } + } + } + return ret; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDescUtils::MutableWeights(const ge::Node &node) { + vector ret; + GE_CHK_BOOL_EXEC(node.GetOpDesc() != nullptr, return ret, "node.GetOpDesc is nullptr!"); + // Const operator, take the weight directly + if (node.GetOpDesc()->GetType() == CONSTANT || (node.GetOpDesc()->GetType() == CONSTANTOP)) { + auto weight = MutableWeights(node.GetOpDesc()); + if (weight == nullptr) { + GELOGI("const op has no weight, op name:%s", node.GetName().c_str()); + return ret; + } + ret.push_back(weight); + return ret; + } + + // Other operators, get weights from connected constop + auto input_nodes = GetConstInputs(node); + for (const auto &input_node : input_nodes) { + auto temp_weight = MutableWeights(input_node->GetOpDesc()); + if (temp_weight == nullptr) { + GELOGE(GRAPH_FAILED, "const op's weight is null, name: %s", input_node->GetName().c_str()); + return vector(); + } + ret.push_back(temp_weight); + } + + return ret; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY vector OpDescUtils::MutableWeights(const ge::NodePtr node) { + if (node == nullptr) { + GELOGE(GRAPH_FAILED, "Node is nullptr"); + return vector(); + } + return MutableWeights(*node); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +OpDescUtils::SetWeights(ge::Node &node, const vector &weights) { + GE_CHK_BOOL_EXEC(node.GetOpDesc() != nullptr, return GRAPH_PARAM_INVALID, "node.GetOpDesc is nullptr!"); + if (node.GetOpDesc()->GetType() == CONSTANT) { + if (weights.size() == CONST_OP_NORMAL_WEIGHT_SIZE) { + return SetWeights(node.GetOpDesc(), weights[0]); + } + GELOGI("const op weight size %zu should be 1", weights.size()); + return GRAPH_PARAM_INVALID; + } + + auto input_nodes = GetConstInputs(node); + if (weights.size() < input_nodes.size()) { + GELOGE(GRAPH_FAILED, "weights count can't be less than const input count"); + return GRAPH_PARAM_INVALID; + } + + ge::GeAttrValue::NamedAttrs named_attrs; + (void)ge::AttrUtils::SetListTensor(named_attrs, "key", weights); + vector copy_weights; + (void)ge::AttrUtils::MutableListTensor(named_attrs, "key", copy_weights); + + for (size_t i = 0; i < input_nodes.size(); ++i) { + if (input_nodes[i]->GetOpDesc() != nullptr) { + SetWeights(input_nodes[i]->GetOpDesc(), copy_weights[i]); + } + } + + // If set more weights than constop, need to add constop + for (size_t i = input_nodes.size(); i < copy_weights.size(); ++i) { + // Use org weight before SetWeights Overwrite + auto const_opdesc = CreateConstOp(copy_weights[i]); + GE_CHECK_NOTNULL(const_opdesc); + + auto owner_graph = node.GetOwnerComputeGraph(); + if (owner_graph == nullptr) { + GELOGE(GRAPH_FAILED, "node's graph is empty, name: %s", node.GetName().c_str()); + return GRAPH_PARAM_INVALID; + } + auto const_node = owner_graph->AddNodeFront(const_opdesc); + GE_CHK_BOOL_EXEC(node.AddLinkFrom(const_node) == GRAPH_SUCCESS, return GRAPH_FAILED, "graph add link failed!"); + std::vector original_nodes; + ge::GraphUtils::RecordOriginalNames(original_nodes, const_node); + } + return GRAPH_SUCCESS; +} + +OpDescPtr OpDescUtils::CreateConstOp(const GeTensorPtr &tensor_ptr) { + GE_CHK_BOOL_EXEC(tensor_ptr != nullptr, return nullptr, "tensor_ptr is nullptr!"); + shared_ptr const_opdesc = ComGraphMakeShared(); + if (const_opdesc == nullptr) { + GELOGE(GRAPH_FAILED, "failed to make_shared "); + return nullptr; + } + + GE_CHK_BOOL_EXEC(const_opdesc != nullptr, return nullptr, "const_opdesc is nullptr!"); + CHECK_FALSE_EXEC(SetWeights(const_opdesc, tensor_ptr) == ge::GRAPH_SUCCESS, return nullptr); + + const_opdesc->SetType(CONSTANT); + + static int const_count = 0; + const_opdesc->SetName("dynamic_const_" + std::to_string(const_count)); + + GELOGI("add const op: %s", const_opdesc->GetName().c_str()); + + ++const_count; + + (void)const_opdesc->AddOutputDesc(tensor_ptr->GetTensorDesc()); + + GELOGI("after add const op: %s", const_opdesc->GetName().c_str()); + + return const_opdesc; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +OpDescUtils::AddConstOpToAnchor(InDataAnchorPtr in_anchor, const GeTensorPtr &tensor_ptr) { + GE_CHECK_NOTNULL(in_anchor); + GE_CHECK_NOTNULL(tensor_ptr); + auto const_opdesc = CreateConstOp(tensor_ptr); + GE_CHECK_NOTNULL(const_opdesc); + auto in_node = in_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(in_node); + auto owner_graph = in_node->GetOwnerComputeGraph(); + if (owner_graph == nullptr) { + GELOGE(GRAPH_PARAM_INVALID, "node's graph is empty, name: %s", in_node->GetName().c_str()); + return GRAPH_PARAM_INVALID; + } + auto const_node = in_node->GetOwnerComputeGraph()->AddNodeFront(const_opdesc); + GE_CHECK_NOTNULL(const_node); + if (GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), in_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_PARAM_INVALID, "Addedge const to node failed."); + return GRAPH_PARAM_INVALID; + } + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +OpDescUtils::SetWeights(ge::NodePtr node, const vector &weights) { + GE_CHECK_NOTNULL(node); + return SetWeights(*node, weights); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus OpDescUtils::ClearWeights(const ge::NodePtr node) { + GE_CHECK_NOTNULL(node); + auto const_ops = GetConstInputs(node); + auto graph = node->GetOwnerComputeGraph(); + if (graph == nullptr) { + GELOGE(GRAPH_FAILED, "Graph is nullptr"); + return GRAPH_PARAM_INVALID; + } + for (const auto &const_op : const_ops) { + GE_CHK_STATUS_RET(GraphUtils::IsolateNode(const_op, {}), "Isolate removed node: %s, type: %s failed", + const_op->GetName().c_str(), const_op->GetType().c_str()); + GE_CHK_STATUS_RET(GraphUtils::RemoveNodeWithoutRelink(graph, const_op), + "Remove node: %s, type: %s without relink failed", const_op->GetName().c_str(), + const_op->GetType().c_str()); + } + return GRAPH_SUCCESS; +} +} // namespace ge diff --git a/src/common/graph/utils/string_utils.h b/src/common/graph/utils/string_utils.h new file mode 100644 index 00000000..79973d33 --- /dev/null +++ b/src/common/graph/utils/string_utils.h @@ -0,0 +1,69 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef COMMON_GRAPH_UTILS_STRING_UTILS_H_ +#define COMMON_GRAPH_UTILS_STRING_UTILS_H_ + +#include +#include +#include +#include +#include + +#include "securec.h" + +namespace ge { +class StringUtils { + public: + static std::string &Ltrim(std::string &s) { + (void)s.erase(s.begin(), std::find_if(s.begin(), s.end(), [](int c) { return !std::isspace(c); })); + return s; + } + + static std::string &Rtrim(std::string &s) { + (void)s.erase(std::find_if(s.rbegin(), s.rend(), [](int c) { return !std::isspace(c); }).base(), s.end()); + return s; + } + + /// @ingroup domi_common + /// @brief trim space + static std::string &Trim(std::string &s) { return Ltrim(Rtrim(s)); } + + // split string + static std::vector Split(const std::string &str, char delim) { + std::vector elems; + + if (str.empty()) { + elems.emplace_back(""); + return elems; + } + + std::stringstream ss(str); + std::string item; + + while (getline(ss, item, delim)) { + elems.push_back(item); + } + auto str_size = str.size(); + if (str_size > 0 && str[str_size - 1] == delim) { + elems.emplace_back(""); + } + + return elems; + } +}; +} // namespace ge +#endif // COMMON_GRAPH_UTILS_STRING_UTILS_H_ diff --git a/src/common/graph/utils/tensor_utils.cc b/src/common/graph/utils/tensor_utils.cc new file mode 100644 index 00000000..390fed46 --- /dev/null +++ b/src/common/graph/utils/tensor_utils.cc @@ -0,0 +1,388 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/utils/tensor_utils.h" + +#include + +#include "debug/ge_log.h" +#include "framework/common/debug/ge_log.h" +#include "graph/ge_tensor.h" +#include "graph/types.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace { +// When nc1hwc0 dim size = 5, calc element count directly. +const uint32_t kNc1hwc0CalcByDimsSize = 5; + +// Unknown shape element num +const int64_t kElementCntUnknownShape = -1; + +// Unknown shape mem size +const int64_t kMemSizeUnknownShape = -1; + +// Nchw and nhwc dim size must be 4 +const uint32_t kDimSize4d = 4; + +// C1HWNCoC0 dim size must be 6 +const uint32_t kDimSizeC1hwncoc0 = 6; + +// Cube size is 16 +const uint32_t kTheCubeSize = 16; + +// Default c0 size equals cube size. +const uint32_t kC0SizeDefault = kTheCubeSize; + +// Size equals int8 cube size is 32 +const uint32_t kC0SizeInt8 = 32; + +// NCHW dim N index +const int32_t kNchwDimIdxN = 0; +// NCHW dim C index +const int32_t kNchwDimIdxC = 1; +// NCHW dim H index +const int32_t kNchwDimIdxH = 2; +// NCHW dim W index +const int32_t kNchwDimIdxW = 3; + +const int kDataMemAlignSize = 32; +const int kNum2 = 2; +} // namespace + +/// +/// Check if a * b overflow. +/// @param a multiplier +/// @param b Multiplicand +/// @return true: overflow +/// false: not overflow +/// +static bool CheckMultiplyOverflowInt64(const int64_t &a, const int64_t &b) { + if (a > 0) { + if (b > 0) { + if (a > (INT64_MAX / b)) { + return true; + } + } else { + if (b < (INT64_MIN / a)) { + return true; + } + } + } else { + if (b > 0) { + if (a < (INT64_MIN / b)) { + return true; + } + } else { + if ((a != 0) && (b < (INT64_MAX / a))) { + return true; + } + } + } + return false; +} + +/// +/// Calculate element num by dims directly. +/// @param dims dim info +/// @param element_cnt element count +/// @return GRAPH_SUCCESS:success +/// other:failed +/// +static graphStatus CalcElementCntByDims(const std::vector &dims, int64_t &element_cnt) { + element_cnt = 1; + for (int64_t dim : dims) { + if (CheckMultiplyOverflowInt64(element_cnt, dim)) { + GELOGE(GRAPH_FAILED, "CalcElementCntByDims failed, as when multiplying %ld and %ld.", element_cnt, dim); + return GRAPH_FAILED; + } + element_cnt *= dim; + } + return GRAPH_SUCCESS; +} + +/// +/// Calculate fixed dims element num. +/// @param dims dim info +/// @param fixed_dim_size fixed dim size +/// @param element_cnt element count +/// @return GRAPH_SUCCESS:success +/// other:failed +/// +static graphStatus CalcElementCntOfFixedDims(const std::vector &dims, Format format, uint32_t fixed_dim_size, + int64_t &element_cnt) { + if (dims.size() != fixed_dim_size) { + GELOGW("Format %d(%s) need dim size=%u but %zu, calc as ND.", format, + TypeUtils::FormatToSerialString(format).c_str(), fixed_dim_size, dims.size()); + } + return CalcElementCntByDims(dims, element_cnt); +} + +/// +/// Get dim c0 size by type +/// @param data_type data type +/// @return c0 size +/// +static uint32_t GetDimC0(DataType &data_type) { + bool is_int8_size = (data_type == DT_INT8) || (data_type == DT_UINT8) || (data_type == DT_DUAL_SUB_UINT8) || + (data_type == DT_DUAL_SUB_INT8) || (data_type == DT_BOOL) || (data_type == DT_QINT8); + return is_int8_size ? kC0SizeInt8 : kC0SizeDefault; +} + +/// +/// Calculate nc1hwc0 element num. +/// @param dims dim info +/// @param data_type data type +/// @param element_cnt element count +/// @return GRAPH_SUCCESS:success +/// other:failed +/// +static graphStatus CalcElementCntOfNc1hwc0(const std::vector &dims, DataType data_type, int64_t &element_cnt) { + // When nc1hwc0 dims size = 5, no need split dim c + if (dims.size() == kNc1hwc0CalcByDimsSize) { + return CalcElementCntByDims(dims, element_cnt); + } else if (dims.size() != kDimSize4d) { + GELOGE(GRAPH_FAILED, "CalcElementCntOfNc1hwc0 failed as dims.size=%zu is not %u or %u.", dims.size(), kDimSize4d, + kNc1hwc0CalcByDimsSize); + return GRAPH_FAILED; + } + + auto c0 = static_cast(GetDimC0(data_type)); + // Nc1hwc0 dims is according to nchw, dim c index is 1. + auto c1 = static_cast(std::ceil(dims[kNchwDimIdxC] * 1.0 / c0)); + // Store dims is split c to c1 and c0. + std::vector store_dims = {dims[kNchwDimIdxN], c1, dims[kNchwDimIdxH], dims[kNchwDimIdxW], c0}; + return CalcElementCntByDims(store_dims, element_cnt); +} + +/// +/// Calculate FractalZ element num. +/// @param dims dim info +/// @param data_type data type +/// @param element_cnt element count +/// @return GRAPH_SUCCESS:success +/// other:failed +/// +static graphStatus CalcElementCntOfFractalZ(const std::vector &dims, DataType data_type, + int64_t &element_cnt) { + static char *parser_priority = std::getenv("PARSER_PRIORITY"); + if (parser_priority != nullptr && string(parser_priority) == "cce") { + if (dims.size() != kDimSize4d) { + GELOGE(GRAPH_FAILED, "CalcElementCntOfFractalZ failed as dims.size=%zu is not %u.", dims.size(), kDimSize4d); + return GRAPH_FAILED; + } + auto c0 = static_cast(GetDimC0(data_type)); + // FractalZ dims is according to nchw, dim c index is 1. + auto c1 = static_cast(std::ceil(dims[kNchwDimIdxC] * 1.0 / c0)); + + // Spread NC1HWC0 as a two dimension array, n as column dimension, + // C1HWC0 as row dimension + std::vector r_count_vec = {c1, dims[kNchwDimIdxH], dims[kNchwDimIdxW], c0}; + + int64_t r_count = 1; + graphStatus graph_status = CalcElementCntByDims(r_count_vec, r_count); + if (graph_status != GRAPH_SUCCESS) { + GELOGE(graph_status, "Calc [%ld, %ld, %ld, %ld] element count failed.", c1, dims[kNchwDimIdxH], + dims[kNchwDimIdxW], c0); + return graph_status; + } + + // Cube count in n + auto nc_cnt = static_cast(std::ceil(dims[kNchwDimIdxN] * 1.0 / kTheCubeSize)); + + // Cube count in vertical direction(C1HWC0) + int64_t vc_cnt = r_count / c0; + // Element count in each cube + int64_t cube_elem_cnt = c0 * kTheCubeSize; + + if (CheckMultiplyOverflowInt64(nc_cnt, vc_cnt)) { + GELOGE(GRAPH_FAILED, "The multiplication of %ld and %ld is overflow.", nc_cnt, vc_cnt); + return GRAPH_FAILED; + } + // Read data times needed by cube + int64_t c_cnt = nc_cnt * vc_cnt; + + if (CheckMultiplyOverflowInt64(c_cnt, cube_elem_cnt)) { + GELOGE(GRAPH_FAILED, "The multiplication of %ld and %ld is overflow.", c_cnt, cube_elem_cnt); + return GRAPH_FAILED; + } + // Element count after fractal arrangement + element_cnt = c_cnt * cube_elem_cnt; + return GRAPH_SUCCESS; + } else { + return CalcElementCntByDims(dims, element_cnt); + } +} + +/// +/// Calculate tensor element num. +/// @param dims dim info +/// @param format tensor format +/// @param data_type data type +/// @param element_cnt element count +/// @return GRAPH_SUCCESS:success +/// other:failed +/// +static graphStatus CalcTensorElementCnt(const std::vector &dims, Format format, DataType data_type, + int64_t &element_cnt) { + const string format_str = TypeUtils::FormatToSerialString(format); + // Check dims + for (size_t i = 0; i < dims.size(); ++i) { + int64_t dim = dims[i]; + if (dim < 0) { + GELOGI("It's unknown shape, as dims[%zu]=%ld negative, format=%d(%s).", i, dim, format, format_str.c_str()); + element_cnt = kElementCntUnknownShape; + return GRAPH_SUCCESS; + } else if (dim == 0) { + GELOGI("No need calc element count, as dims[%zu]=%ld, format=%d(%s).", i, dim, format, format_str.c_str()); + element_cnt = 0; + return GRAPH_SUCCESS; + } + } + + graphStatus graph_status; + switch (format) { + case FORMAT_ND: + case FORMAT_MD: + graph_status = CalcElementCntByDims(dims, element_cnt); + break; + case FORMAT_NCHW: + case FORMAT_HWCN: + case FORMAT_NHWC: + case FORMAT_CHWN: + graph_status = CalcElementCntOfFixedDims(dims, format, kDimSize4d, element_cnt); + break; + case FORMAT_C1HWNCoC0: + graph_status = CalcElementCntOfFixedDims(dims, format, kDimSizeC1hwncoc0, element_cnt); + break; + case FORMAT_NC1HWC0: + graph_status = CalcElementCntOfNc1hwc0(dims, data_type, element_cnt); + break; + case FORMAT_FRACTAL_Z: + graph_status = CalcElementCntOfFractalZ(dims, data_type, element_cnt); + break; + case FORMAT_FRACTAL_NZ: + case FORMAT_FRACTAL_ZZ: + graph_status = CalcElementCntByDims(dims, element_cnt); + break; + default: + GELOGE(GRAPH_FAILED, "unsupported format, format=%d(%s).", format, format_str.c_str()); + graph_status = GRAPH_FAILED; + break; + } + + const string type_str = TypeUtils::DataTypeToSerialString(data_type); + if (graph_status == GRAPH_SUCCESS) { + GELOGI( + "CalcTensorElementCnt end, format=%d(%s)," + " data_type=%d(%s), element_cnt=%ld.", + format, format_str.c_str(), data_type, type_str.c_str(), element_cnt); + } else { + GELOGE(GRAPH_FAILED, "CalcTensorElementCnt failed, format=%d(%s), data_type=%d(%s).", format, format_str.c_str(), + data_type, type_str.c_str()); + } + return graph_status; +} + +/// +/// Calculate tensor mem size. +/// @param shape tensor shape +/// @param format tensor format +/// @param data_type tensor data type +/// @param mem_size -1 means unknown shape,other means mem size +/// @return GRAPH_SUCCESS:success, other:failed +/// +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus TensorUtils::CalcTensorMemSize(const GeShape &shape, + Format format, + DataType data_type, + int64_t &mem_size) { + const string format_str = TypeUtils::FormatToSerialString(format); + const string type_str = TypeUtils::DataTypeToSerialString(data_type); + uint32_t type_size = 0; + bool result = TypeUtils::GetDataTypeLength(data_type, type_size); + if (!result) { + GELOGE(GRAPH_FAILED, "GetDataTypeLength failed, data_type=%d(%s).", data_type, type_str.c_str()); + return GRAPH_FAILED; + } + + std::vector dims = shape.GetDims(); + int64_t element_cnt = 0; + graphStatus status = CalcTensorElementCnt(dims, format, data_type, element_cnt); + if (status != GRAPH_SUCCESS) { + GELOGE(status, "CalcTensorElementCnt failed, status=%u format=%d(%s) data_type=%d(%s).", status, format, + format_str.c_str(), data_type, type_str.c_str()); + return status; + } + // Support unknown shape + if (element_cnt < 0) { + mem_size = kMemSizeUnknownShape; + GELOGI( + "element_cnt is unknown. " + "format=%d(%s), data_type=%d(%s), mem_size=%ld", + format, format_str.c_str(), data_type, type_str.c_str(), mem_size); + return GRAPH_SUCCESS; + } + auto type_size_int64 = static_cast(type_size); + if (CheckMultiplyOverflowInt64(element_cnt, type_size_int64)) { + GELOGE(GRAPH_FAILED, "CalcTensorMemSize overflow, when multiplying %ld and %ld, format=%d(%s), data_type=%d(%s).", + element_cnt, type_size_int64, format, format_str.c_str(), data_type, type_str.c_str()); + return GRAPH_FAILED; + } + mem_size = element_cnt * type_size_int64; + + GELOGI( + "CalcTensorMemSize end, " + "format=%d(%s), data_type=%d(%s), mem_size=%ld", + format, format_str.c_str(), data_type, type_str.c_str(), mem_size); + return GRAPH_SUCCESS; +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +TensorUtils::GetTensorMemorySizeInBytes(const GeTensorDesc &desc_temp, uint32_t &size_temp) { + graphStatus graph_status = GetTensorSizeInBytes(desc_temp, size_temp); + if (graph_status != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + // 64-byte alignment, if size is 0, align to 32 bytes + if (size_temp > (UINT32_MAX - kNum2 * kDataMemAlignSize)) { + GELOGW("The updated mem size %u is bigger than UINT32_MAX", size_temp); + } else { + size_temp = ((size_temp + kNum2 * kDataMemAlignSize - 1) / kDataMemAlignSize) * kDataMemAlignSize; + } + return GRAPH_SUCCESS; +} +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY graphStatus +TensorUtils::GetTensorSizeInBytes(const GeTensorDesc &desc_temp, uint32_t &size_temp) { + GeShape output_shape = desc_temp.GetShape(); + Format format = desc_temp.GetFormat(); + DataType data_type = desc_temp.GetDataType(); + int64_t output_mem_size = 0; + graphStatus graph_status = CalcTensorMemSize(output_shape, format, data_type, output_mem_size); + if (graph_status != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "CalcTensorMemSize failed!"); + return GRAPH_FAILED; + } + + if ((output_mem_size > UINT32_MAX) || (output_mem_size < 0)) { + GELOGE(GRAPH_FAILED, "After calc concat tensor memory size, output_mem_size = %ld, out of data range [0, %u]", + output_mem_size, UINT32_MAX); + return GRAPH_FAILED; + } + + size_temp = static_cast(output_mem_size); + return GRAPH_SUCCESS; +} +} // namespace ge diff --git a/src/common/graph/utils/type_utils.cc b/src/common/graph/utils/type_utils.cc new file mode 100644 index 00000000..61b57d80 --- /dev/null +++ b/src/common/graph/utils/type_utils.cc @@ -0,0 +1,376 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/utils/type_utils.h" +#include "debug/ge_util.h" + +namespace ge { +static const std::map kFormatToStringMap = { + {FORMAT_NCHW, "NCHW"}, + {FORMAT_NHWC, "NHWC"}, + {FORMAT_ND, "ND"}, + {FORMAT_NC1HWC0, "NC1HWC0"}, + {FORMAT_FRACTAL_Z, "FRACTAL_Z"}, + {FORMAT_NC1C0HWPAD, "NC1C0HWPAD"}, + {FORMAT_NHWC1C0, "NHWC1C0"}, + {FORMAT_FSR_NCHW, "FSR_NCHW"}, + {FORMAT_FRACTAL_DECONV, "FRACTAL_DECONV"}, + {FORMAT_C1HWNC0, "C1HWNC0"}, + {FORMAT_FRACTAL_DECONV_TRANSPOSE, "FRACTAL_DECONV_TRANSPOSE"}, + {FORMAT_FRACTAL_DECONV_SP_STRIDE_TRANS, "FRACTAL_DECONV_SP_STRIDE_TRANS"}, + {FORMAT_NC1HWC0_C04, "NC1HWC0_C04"}, + {FORMAT_FRACTAL_Z_C04, "FRACTAL_Z_C04"}, + {FORMAT_CHWN, "CHWN"}, + {FORMAT_FRACTAL_DECONV_SP_STRIDE8_TRANS, "DECONV_SP_STRIDE8_TRANS"}, + {FORMAT_NC1KHKWHWC0, "NC1KHKWHWC0"}, + {FORMAT_BN_WEIGHT, "BN_WEIGHT"}, + {FORMAT_FILTER_HWCK, "FILTER_HWCK"}, + {FORMAT_HWCN, "HWCN"}, + {FORMAT_HASHTABLE_LOOKUP_LOOKUPS, "LOOKUP_LOOKUPS"}, + {FORMAT_HASHTABLE_LOOKUP_KEYS, "LOOKUP_KEYS"}, + {FORMAT_HASHTABLE_LOOKUP_VALUE, "LOOKUP_VALUE"}, + {FORMAT_HASHTABLE_LOOKUP_OUTPUT, "LOOKUP_OUTPUT"}, + {FORMAT_HASHTABLE_LOOKUP_HITS, "LOOKUP_HITS"}, + {FORMAT_MD, "MD"}, + {FORMAT_NDHWC, "NDHWC"}, + {FORMAT_NCDHW, "NCDHW"}, + {FORMAT_DHWCK, "DHWCK"}, + {FORMAT_NDC1HWC0, "NDC1HWC0"}, + {FORMAT_FRACTAL_Z_3D, "FRACTAL_Z_3D"}, + {FORMAT_C1HWNCoC0, "C1HWNCoC0"}, + {FORMAT_FRACTAL_NZ, "FRACTAL_NZ"}, + {FORMAT_CN, "CN"}, + {FORMAT_NC, "NC"}, + {FORMAT_RESERVED, "FORMAT_RESERVED"}, + {FORMAT_ALL, "ALL"}}; + +static const std::unordered_set kInternalFormat = {"NC1HWC0", + "FRACTAL_Z", + "NC1C0HWPAD", + "NHWC1C0", + "FRACTAL_DECONV", + "C1HWNC0", + "FRACTAL_DECONV_TRANSPOSE", + "FRACTAL_DECONV_SP_STRIDE_TRANS", + "NC1HWC0_C04", + "FRACTAL_Z_C04", + "FRACTAL_DECONV_SP_STRIDE8_TRANS", + "NC1KHKWHWC0", + "C1HWNCoC0", + "FRACTAL_ZZ", + "FRACTAL_NZ", + "NDC1HWC0", + "FORMAT_FRACTAL_Z_3D"}; + +static const std::map kDataFormatMap = { + {"NCHW", FORMAT_NCHW}, {"NHWC", FORMAT_NHWC}, {"ND", FORMAT_ND}}; + +static const std::map kStringToFormatMap = { + {"NCHW", FORMAT_NCHW}, + {"NHWC", FORMAT_NHWC}, + {"ND", FORMAT_ND}, + {"NC1HWC0", FORMAT_NC1HWC0}, + {"FRACTAL_Z", FORMAT_FRACTAL_Z}, + {"NC1C0HWPAD", FORMAT_NC1C0HWPAD}, + {"NHWC1C0", FORMAT_NHWC1C0}, + {"FSR_NCHW", FORMAT_FSR_NCHW}, + {"FRACTAL_DECONV", FORMAT_FRACTAL_DECONV}, + {"C1HWNC0", FORMAT_C1HWNC0}, + {"FRACTAL_DECONV_TRANSPOSE", FORMAT_FRACTAL_DECONV_TRANSPOSE}, + {"FRACTAL_DECONV_SP_STRIDE_TRANS", FORMAT_FRACTAL_DECONV_SP_STRIDE_TRANS}, + {"NC1HWC0_C04", FORMAT_NC1HWC0_C04}, + {"FRACTAL_Z_C04", FORMAT_FRACTAL_Z_C04}, + {"CHWN", FORMAT_CHWN}, + {"DECONV_SP_STRIDE8_TRANS", FORMAT_FRACTAL_DECONV_SP_STRIDE8_TRANS}, + {"NC1KHKWHWC0", FORMAT_NC1KHKWHWC0}, + {"BN_WEIGHT", FORMAT_BN_WEIGHT}, + {"FILTER_HWCK", FORMAT_FILTER_HWCK}, + {"HWCN", FORMAT_HWCN}, + {"LOOKUP_LOOKUPS", FORMAT_HASHTABLE_LOOKUP_LOOKUPS}, + {"LOOKUP_KEYS", FORMAT_HASHTABLE_LOOKUP_KEYS}, + {"LOOKUP_VALUE", FORMAT_HASHTABLE_LOOKUP_VALUE}, + {"LOOKUP_OUTPUT", FORMAT_HASHTABLE_LOOKUP_OUTPUT}, + {"LOOKUP_HITS", FORMAT_HASHTABLE_LOOKUP_HITS}, + {"MD", FORMAT_MD}, + {"C1HWNCoC0", FORMAT_C1HWNCoC0}, + {"FRACTAL_NZ", FORMAT_FRACTAL_NZ}, + {"NDHWC", FORMAT_NDHWC}, + {"NCDHW", FORMAT_NCDHW}, + {"DHWCK", FORMAT_DHWCK}, + {"NDC1HWC0", FORMAT_NDC1HWC0}, + {"FRACTAL_Z_3D", FORMAT_FRACTAL_Z_3D}, + {"CN", FORMAT_CN}, + {"NC", FORMAT_NC}, + {"FORMAT_RESERVED", FORMAT_RESERVED}, + {"ALL", FORMAT_ALL}}; + +static const std::map kDataTypeToStringMap = { + {DT_UNDEFINED, "DT_UNDEFINED"}, // Used to indicate a DataType field has not been set. + {DT_FLOAT, "DT_FLOAT"}, // float type + {DT_FLOAT16, "DT_FLOAT16"}, // fp16 type + {DT_INT8, "DT_INT8"}, // int8 type + {DT_INT16, "DT_INT16"}, // int16 type + {DT_UINT16, "DT_UINT16"}, // uint16 type + {DT_UINT8, "DT_UINT8"}, // uint8 type + {DT_INT32, "DT_INT32"}, // uint32 type + {DT_INT64, "DT_INT64"}, // int64 type + {DT_UINT32, "DT_UINT32"}, // unsigned int32 + {DT_UINT64, "DT_UINT64"}, // unsigned int64 + {DT_BOOL, "DT_BOOL"}, // bool type + {DT_DOUBLE, "DT_DOUBLE"}, // double type + {DT_DUAL, "DT_DUAL"}, // dual output type + {DT_DUAL_SUB_INT8, "DT_DUAL_SUB_INT8"}, // dual output int8 type + {DT_DUAL_SUB_UINT8, "DT_DUAL_SUB_UINT8"}, // dual output uint8 type + {DT_COMPLEX64, "DT_COMPLEX64"}, // complex64 type + {DT_COMPLEX128, "DT_COMPLEX128"}, // complex128 type + {DT_QINT8, "DT_QINT8"}, // qint8 type + {DT_QINT16, "DT_QINT16"}, // qint16 type + {DT_QINT32, "DT_QINT32"}, // qint32 type + {DT_QUINT8, "DT_QUINT8"}, // quint8 type + {DT_QUINT16, "DT_QUINT16"}, // quint16 type + {DT_RESOURCE, "DT_RESOURCE"}, // resource type + {DT_STRING_REF, "DT_STRING_REF"}, // string ref type + {DT_STRING, "DT_STRING"}, // string type +}; + +static const std::map kStringTodataTypeMap = { + {"DT_UNDEFINED", DT_UNDEFINED}, // Used to indicate a DataType field has not been set. + {"DT_FLOAT", DT_FLOAT}, // float type + { + "DT_FLOAT16", + DT_FLOAT16, + }, // fp16 type + {"DT_INT8", DT_INT8}, // int8 type + {"DT_INT16", DT_INT16}, // int16 type + {"DT_UINT16", DT_UINT16}, // uint16 type + {"DT_UINT8", DT_UINT8}, // uint8 type + {"DT_INT32", DT_INT32}, // uint32 type + {"DT_INT64", DT_INT64}, // int64 type + {"DT_UINT32", DT_UINT32}, // unsigned int32 + {"DT_UINT64", DT_UINT64}, // unsigned int64 + {"DT_BOOL", DT_BOOL}, // bool type + {"DT_DOUBLE", DT_DOUBLE}, // double type + {"DT_DUAL", DT_DUAL}, // dual output type + {"DT_DUAL_SUB_INT8", DT_DUAL_SUB_INT8}, // dual output int8 type + {"DT_DUAL_SUB_UINT8", DT_DUAL_SUB_UINT8}, // dual output uint8 type + {"DT_COMPLEX64", DT_COMPLEX64}, // complex64 type + {"DT_COMPLEX128", DT_COMPLEX128}, // complex128 type + {"DT_QINT8", DT_QINT8}, // qint8 type + {"DT_QINT16", DT_QINT16}, // qint16 type + {"DT_QINT32", DT_QINT32}, // qint32 type + {"DT_QUINT8", DT_QUINT8}, // quint8 type + {"DT_QUINT16", DT_QUINT16}, // quint16 type + {"DT_RESOURCE", DT_RESOURCE}, // resource type + {"DT_STRING_REF", DT_STRING_REF}, // string ref type + {"DT_STRING", DT_STRING}, // string type +}; + +static const std::map kDataTypeToLength = { + {DT_BOOL, sizeof(bool)}, + {DT_INT64, sizeof(int64_t)}, + {DT_UINT64, sizeof(int64_t)}, + {DT_FLOAT, sizeof(float)}, + {DT_INT32, sizeof(int32_t)}, + {DT_UINT32, sizeof(int32_t)}, + {DT_INT8, sizeof(char)}, + {DT_UINT8, sizeof(char)}, + {DT_INT16, sizeof(int16_t)}, + {DT_UINT16, sizeof(int16_t)}, + {DT_FLOAT16, sizeof(int16_t)}, + {DT_DOUBLE, sizeof(double)}, + {DT_DUAL, sizeof(float) + sizeof(int8_t)}, + {DT_DUAL_SUB_INT8, sizeof(int8_t)}, + {DT_DUAL_SUB_UINT8, sizeof(uint8_t)}, + {DT_COMPLEX64, sizeof(int64_t)}, + {DT_COMPLEX128, sizeof(int64_t) * 2}, + {DT_QINT8, sizeof(int8_t)}, + {DT_QINT16, sizeof(int16_t)}, + {DT_QINT32, sizeof(int32_t)}, + {DT_QUINT8, sizeof(uint8_t)}, + {DT_QUINT16, sizeof(uint16_t)}, + {DT_STRING_REF, sizeof(uint64_t) * 2}, + {DT_STRING, sizeof(uint64_t)}, + {DT_RESOURCE, sizeof(uint64_t)}, +}; + +bool TypeUtils::IsDataTypeValid(DataType dt) { + uint32_t num = static_cast(dt); + GE_CHK_BOOL_EXEC((num <= DT_UNDEFINED), return false, "The DataType is invalid"); + return true; +} + +std::string TypeUtils::DataTypeToSerialString(DataType data_type) { + auto it = kDataTypeToStringMap.find(data_type); + if (it != kDataTypeToStringMap.end()) { + return it->second; + } else { + GELOGE(GRAPH_FAILED, "DataTypeToSerialString: datatype not support %u", data_type); + return "UNDEFINED"; + } +} + +DataType TypeUtils::SerialStringToDataType(const std::string &str) { + auto it = kStringTodataTypeMap.find(str); + if (it != kStringTodataTypeMap.end()) { + return it->second; + } else { + GELOGE(GRAPH_FAILED, "SerialStringToDataType: datatype not support %s", str.c_str()); + return DT_UNDEFINED; + } +} + +bool TypeUtils::IsFormatValid(Format format) { + uint32_t num = static_cast(format); + GE_CHK_BOOL_EXEC((num <= FORMAT_RESERVED), return false, "The Format is invalid"); + return true; +} + +bool TypeUtils::IsInternalFormat(Format format) { + std::string serial_format = FormatToSerialString(format); + auto iter = kInternalFormat.find(serial_format); + bool result = (iter == kInternalFormat.end()) ? false : true; + return result; +} + +std::string TypeUtils::FormatToSerialString(Format format) { + auto it = kFormatToStringMap.find(format); + if (it != kFormatToStringMap.end()) { + return it->second; + } else { + GELOGE(GRAPH_FAILED, "Format not support %u", format); + return "RESERVED"; + } +} +Format TypeUtils::SerialStringToFormat(const std::string &str) { + auto it = kStringToFormatMap.find(str); + if (it != kStringToFormatMap.end()) { + return it->second; + } else { + GELOGE(GRAPH_FAILED, "Format not support %s", str.c_str()); + return FORMAT_RESERVED; + } +} + +Format TypeUtils::DataFormatToFormat(const std::string &str) { + auto it = kDataFormatMap.find(str); + if (it != kDataFormatMap.end()) { + return it->second; + } else { + GELOGE(GRAPH_FAILED, "Format not support %s", str.c_str()); + return FORMAT_RESERVED; + } +} + +static inline void CopyDataFromBuffer(vector &data, const Buffer &buffer) { + data.clear(); + if (buffer.GetData() != nullptr && buffer.GetSize() != 0) { + data.assign(buffer.GetData(), buffer.GetData() + buffer.GetSize()); + } +} + +graphStatus Usr2DefQuantizeFactor(const UsrQuantizeFactor &usr, QuantizeFactor &def) { + def.scale_mode = uint32_t(usr.scale_mode); + def.set_scale_value(usr.scale_value.data(), usr.scale_value.size()); + def.scale_offset = usr.scale_offset; + def.set_offset_data_value(usr.offset_data_value.data(), usr.offset_data_value.size()); + def.offset_data_offset = usr.offset_data_offset; + def.set_offset_weight_value(usr.offset_weight_value.data(), usr.offset_weight_value.size()); + def.offset_weight_offset = usr.offset_weight_offset; + def.set_offset_pad_value(usr.offset_pad_value.data(), usr.offset_pad_value.size()); + def.offset_pad_offset = usr.offset_pad_offset; + return GRAPH_SUCCESS; +} +graphStatus Def2UsrQuantizeFactor(const QuantizeFactor &def, UsrQuantizeFactor &usr) { + usr.scale_mode = UsrQuantizeScaleMode(def.scale_mode); + CopyDataFromBuffer(usr.scale_value, def.scale_value); + usr.scale_offset = def.scale_offset; + CopyDataFromBuffer(usr.offset_data_value, def.offset_data_value); + usr.offset_data_offset = def.offset_data_offset; + CopyDataFromBuffer(usr.offset_weight_value, def.offset_weight_value); + usr.offset_weight_offset = def.offset_weight_offset; + CopyDataFromBuffer(usr.offset_pad_value, def.offset_pad_value); + usr.offset_pad_offset = def.offset_pad_offset; + return GRAPH_SUCCESS; +} +graphStatus Usr2DefUsrQuantizeCalcFactor(const UsrQuantizeCalcFactor &usr, QuantizeCalcFactor &def) { + def.set_offsetw(usr.offsetw.data(), usr.offsetw.size()); + def.offsetw_offset = usr.offsetw_offset; + def.set_offsetd(usr.offsetd.data(), usr.offsetd.size()); + def.offsetd_offset = usr.offsetd_offset; + def.set_scalereq(usr.scalereq.data(), usr.scalereq.size()); + def.scaledreq_offset = usr.scaledreq_offset; + def.set_offsetdnext(usr.offsetdnext.data(), usr.offsetdnext.size()); + def.offsetdnext_offset = usr.offsetdnext_offset; + return GRAPH_SUCCESS; +} +graphStatus Def2UsrQuantizeCalcFactor(const QuantizeCalcFactor &def, UsrQuantizeCalcFactor &usr) { + CopyDataFromBuffer(usr.offsetw, def.offsetw); + usr.offsetw_offset = def.offsetw_offset; + CopyDataFromBuffer(usr.offsetd, def.offsetd); + usr.offsetd_offset = def.offsetd_offset; + CopyDataFromBuffer(usr.scalereq, def.scalereq); + usr.scaledreq_offset = def.scaledreq_offset; + CopyDataFromBuffer(usr.offsetdnext, def.offsetdnext); + usr.offsetdnext_offset = def.offsetdnext_offset; + return GRAPH_SUCCESS; +} +graphStatus TypeUtils::Usr2DefQuantizeFactorParams(const UsrQuantizeFactorParams &usr, QuantizeFactorParams &def) { + def.quantize_algo = uint32_t(usr.quantize_algo); + def.scale_type = uint32_t(usr.scale_type); + GE_RETURN_WITH_LOG_IF_ERROR(Usr2DefQuantizeFactor(usr.quantize_param, def.quantize_param), + "Usr2DefQuantizeFactor quantize_param failed"); + GE_RETURN_WITH_LOG_IF_ERROR(Usr2DefQuantizeFactor(usr.dequantize_param, def.dequantize_param), + "Usr2DefQuantizeFactor dequantize_param failed"); + GE_RETURN_WITH_LOG_IF_ERROR(Usr2DefQuantizeFactor(usr.requantize_param, def.requantize_param), + "Usr2DefQuantizeFactor requantize_param failed"); + GE_RETURN_WITH_LOG_IF_ERROR(Usr2DefUsrQuantizeCalcFactor(usr.quantizecalc_param, def.quantizecalc_param), + "Usr2DefQuantizeFactor quantizecalc_param failed"); + return GRAPH_SUCCESS; +} +graphStatus TypeUtils::Def2UsrQuantizeFactorParams(const QuantizeFactorParams &def, UsrQuantizeFactorParams &usr) { + usr.quantize_algo = UsrQuantizeAlgorithm(def.quantize_algo); + usr.scale_type = UsrQuantizeScaleType(def.scale_type); + GE_RETURN_WITH_LOG_IF_ERROR(Def2UsrQuantizeFactor(def.quantize_param, usr.quantize_param), + "Def2UsrQuantizeFactor quantize_param failed"); + GE_RETURN_WITH_LOG_IF_ERROR(Def2UsrQuantizeFactor(def.dequantize_param, usr.dequantize_param), + "Def2UsrQuantizeFactor dequantize_param failed"); + GE_RETURN_WITH_LOG_IF_ERROR(Def2UsrQuantizeFactor(def.requantize_param, usr.requantize_param), + "Def2UsrQuantizeFactor requantize_param failed"); + GE_RETURN_WITH_LOG_IF_ERROR(Def2UsrQuantizeCalcFactor(def.quantizecalc_param, usr.quantizecalc_param), + "Def2UsrQuantizeCalcFactor quantizecalc_param failed"); + return GRAPH_SUCCESS; +} +bool TypeUtils::GetDataTypeLength(ge::DataType data_type, uint32_t &length) { + auto it = kDataTypeToLength.find(data_type); + if (it != kDataTypeToLength.end()) { + length = it->second; + return true; + } else { + GELOGE(GRAPH_FAILED, "data_type not support %d", data_type); + return false; + } +} +bool TypeUtils::CheckUint64MulOverflow(uint64_t a, uint32_t b) { + // Not overflow + if (a == 0) { + return false; + } + if ((ULLONG_MAX / a) >= b) { + return false; + } + return true; +} +} // namespace ge diff --git a/src/ge/CMakeLists.txt b/src/ge/CMakeLists.txt new file mode 100755 index 00000000..0b0c688c --- /dev/null +++ b/src/ge/CMakeLists.txt @@ -0,0 +1,474 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +# libge.so & libge_train.so +# will later be integrated into libgraph_runner.so, works for both training and inference +# compiling proto files generates some warnings, use no-unused-variable to suppress them +set(CMAKE_CXX_FLAGS "-Wno-unused-variable ${CMAKE_CXX_FLAGS}") +file(GLOB_RECURSE PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "../proto/fusion_model.proto" + ) + +file(GLOB_RECURSE PROTO_HEADER_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "../proto/om.proto" + "../proto/task.proto" + "../proto/insert_op.proto" + "../proto/ge_ir.proto" + "../proto/fwk_adapter.proto" + "../proto/op_mapping_info.proto" + ) +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) +ge_protobuf_generate(ge PROTO_HEADER_SRCS PROTO_HEADER_HDRS ${PROTO_HEADER_LIST}) +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}) +include_directories(${GE_SOURCE_DIR}/src) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/framework/common) +include_directories(${GE_SOURCE_DIR}/inc/runtime) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/cce) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/protobuf/src) +include_directories(${GE_SOURCE_DIR}/third_party/json/include) +include_directories(${GE_SOURCE_DIR}/third_party/eigen) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) + +######### libge_train.so ############# +# need to remove dependencies on pb files later +file(GLOB_RECURSE TRAIN_SRC_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "common/formats/format_transfers/*.cc" + "common/formats/formats.cc" + "common/formats/utils/formats_trans_utils.cc" + "common/fp16_t.cc" + "common/ge/plugin_manager.cc" + "common/profiling/profiling_manager.cc" + "engine_manager/dnnengine_manager.cc" + "generator/ge_generator.cc" + "generator/generator_api.cc" + "graph/build/graph_build.cc" + "graph/build/logical_stream_allocator.cc" + "graph/build/model_builder.cc" + "graph/build/optimize_stream_graph.cc" + "graph/build/run_context.cc" + "graph/build/stream_allocator.cc" + "graph/build/task_generator.cc" + "graph/common/bcast.cc" + "graph/common/omg_util.cc" + "graph/common/transop_util.cc" + "graph/execute/graph_execute.cc" + "graph/load/graph_loader.cc" + "graph/load/new_model_manager/data_dumper.cc" + "graph/load/new_model_manager/data_inputer.cc" + "graph/load/new_model_manager/davinci_model.cc" + "graph/load/new_model_manager/davinci_model_parser.cc" + "graph/load/new_model_manager/model_manager.cc" + "graph/load/new_model_manager/model_output.cc" + "graph/load/new_model_manager/model_utils.cc" + "graph/load/new_model_manager/task_info/end_graph_task_info.cc" + "graph/load/new_model_manager/task_info/event_record_task_info.cc" + "graph/load/new_model_manager/task_info/event_wait_task_info.cc" + "graph/load/new_model_manager/task_info/fusion_start_task_info.cc" + "graph/load/new_model_manager/task_info/fusion_stop_task_info.cc" + "graph/load/new_model_manager/task_info/hccl_task_info.cc" + "graph/load/new_model_manager/task_info/kernel_ex_task_info.cc" + "graph/load/new_model_manager/task_info/kernel_task_info.cc" + "graph/load/new_model_manager/task_info/label_goto_task_info.cc" + "graph/load/new_model_manager/task_info/label_set_task_info.cc" + "graph/load/new_model_manager/task_info/memcpy_async_task_info.cc" + "graph/load/new_model_manager/task_info/profiler_trace_task_info.cc" + "graph/load/new_model_manager/task_info/stream_active_task_info.cc" + "graph/load/new_model_manager/task_info/stream_switch_task_info.cc" + "graph/load/new_model_manager/task_info/task_info.cc" + "graph/load/new_model_manager/tbe_handle_store.cc" + "graph/load/output/output.cc" + "graph/manager/custom/custom_op.cc" + "graph/manager/graph_context.cc" + "graph/manager/graph_manager.cc" + "graph/manager/graph_manager_utils.cc" + "graph/manager/graph_mem_allocator.cc" + "graph/manager/graph_var_manager.cc" + "graph/manager/model_manager/event_manager.cc" + "graph/manager/trans_var_data_utils.cc" + "graph/manager/util/debug.cc" + "graph/manager/util/hcom_util.cc" + "graph/manager/util/node_searcher/need_rebuild_node_searcher.cc" + "graph/manager/util/rt_context_util.cc" + "graph/manager/util/variable_accelerate_ctrl.cc" + "graph/optimize/graph_functiondef.cc" + "graph/optimize/graph_optimize.cc" + "graph/optimize/graph_optimizer.cc" + "graph/optimize/optimizer/allreduce_fusion_pass.cc" + "graph/optimize/summary_optimize.cc" + "graph/partition/engine_place.cc" + "graph/partition/graph_partition.cc" + "graph/passes/addn_pass.cc" + "graph/passes/aicpu_constant_folding_pass.cc" + "graph/passes/assert_pass.cc" + "graph/passes/atomic_addr_clean_pass.cc" + "graph/passes/base_pass.cc" + "graph/passes/cast_translate_pass.cc" + "graph/passes/compile_nodes_pass.cc" + "graph/passes/constant_folding_pass.cc" + "graph/passes/constant_fuse_same_pass.cc" + "graph/passes/control_op_attr_pass.cc" + "graph/passes/control_trigger_pass.cc" + "graph/passes/dimension_adjust_pass.cc" + "graph/passes/dimension_compute_pass.cc" + "graph/passes/dropout_pass.cc" + "graph/passes/end_graph_pass.cc" + "graph/passes/enter_pass.cc" + "graph/passes/flow_ctrl_pass.cc" + "graph/passes/folding_kernel/add_kernel.cc" + "graph/passes/folding_kernel/broadcast_args_kernel.cc" + "graph/passes/folding_kernel/broadcast_gradient_args_kernel.cc" + "graph/passes/folding_kernel/cast_kernel.cc" + "graph/passes/folding_kernel/concat_offset_kernel.cc" + "graph/passes/folding_kernel/concat_v2_kernel.cc" + "graph/passes/folding_kernel/dynamic_stitch_kernel.cc" + "graph/passes/folding_kernel/empty_kernel.cc" + "graph/passes/folding_kernel/expanddims_kernel.cc" + "graph/passes/folding_kernel/fill_kernel.cc" + "graph/passes/folding_kernel/floordiv_kernel.cc" + "graph/passes/folding_kernel/floormod_kernel.cc" + "graph/passes/folding_kernel/gather_v2_kernel.cc" + "graph/passes/folding_kernel/greater_kernel.cc" + "graph/passes/folding_kernel/kernel_utils.cc" + "graph/passes/folding_kernel/maximum_kernel.cc" + "graph/passes/folding_kernel/mul_kernel.cc" + "graph/passes/folding_kernel/pack_kernel.cc" + "graph/passes/folding_kernel/permute_kernel.cc" + "graph/passes/folding_kernel/range_kernel.cc" + "graph/passes/folding_kernel/rank_kernel.cc" + "graph/passes/folding_kernel/reduce_prod_kernel.cc" + "graph/passes/folding_kernel/reshape_kernel.cc" + "graph/passes/folding_kernel/rsqrt_kernel.cc" + "graph/passes/folding_kernel/shape_kernel.cc" + "graph/passes/folding_kernel/shape_n_kernel.cc" + "graph/passes/folding_kernel/size_kernel.cc" + "graph/passes/folding_kernel/slice_kernel.cc" + "graph/passes/folding_kernel/squeeze_kernel.cc" + "graph/passes/folding_kernel/ssd_prior_box_kernel.cc" + "graph/passes/folding_kernel/strided_slice_kernel.cc" + "graph/passes/folding_kernel/sub_kernel.cc" + "graph/passes/folding_kernel/transdata_kernel.cc" + "graph/passes/folding_pass.cc" + "graph/passes/get_original_format_pass.cc" + "graph/passes/guarantee_const_pass.cc" + "graph/passes/hccl_memcpy_pass.cc" + "graph/passes/identify_reference_pass.cc" + "graph/passes/identity_pass.cc" + "graph/passes/infershape_pass.cc" + "graph/passes/isolated_op_remove_pass.cc" + "graph/passes/iterator_op_pass.cc" + "graph/passes/link_gen_mask_nodes_pass.cc" + "graph/passes/merge_pass.cc" + "graph/passes/multi_batch_pass.cc" + "graph/passes/net_output_pass.cc" + "graph/passes/next_iteration_pass.cc" + "graph/passes/no_reshape_op_remove_pass.cc" + "graph/passes/no_use_reshape_remove_pass.cc" + "graph/passes/pass_manager.cc" + "graph/passes/pass_utils.cc" + "graph/passes/permute_pass.cc" + "graph/passes/placeholder_with_default_pass.cc" + "graph/passes/prevent_gradient_pass.cc" + "graph/passes/print_op_pass.cc" + "graph/passes/prune_pass.cc" + "graph/passes/reshape_remove_pass.cc" + "graph/passes/resource_pair_add_control_pass.cc" + "graph/passes/resource_pair_remove_control_pass.cc" + "graph/passes/same_transdata_breadth_fusion_pass.cc" + "graph/passes/save_pass.cc" + "graph/passes/shape_operate_op_remove_pass.cc" + "graph/passes/snapshot_pass.cc" + "graph/passes/stop_gradient_pass.cc" + "graph/passes/switch_logic_remove_pass.cc" + "graph/passes/switch_op_pass.cc" + "graph/passes/switch_pass.cc" + "graph/passes/transop_breadth_fusion_pass.cc" + "graph/passes/transop_depth_fusion_pass.cc" + "graph/passes/transop_nearby_allreduce_fusion_pass.cc" + "graph/passes/transop_without_reshape_fusion_pass.cc" + "graph/passes/transpose_transdata_pass.cc" + "graph/passes/unused_const_pass.cc" + "graph/passes/unused_op_remove_pass.cc" + "graph/passes/update_net_output_pass.cc" + "graph/passes/var_is_initialized_op_pass.cc" + "graph/passes/variable_format_pass.cc" + "graph/passes/variable_op_pass.cc" + "graph/passes/variable_prepare_op_pass.cc" + "graph/passes/variable_ref_delete_op_pass.cc" + "graph/preprocess/graph_preprocess.cc" + "graph/preprocess/insert_op/base_insert_op.cc" + "graph/preprocess/insert_op/ge_aipp_op.cc" + "graph/preprocess/insert_op/util_insert_aipp_op.cc" + "graph/preprocess/multi_batch_copy_graph.cc" + "init/gelib.cc" + "model/ge_model.cc" + "omm/csa_interact.cc" + "opskernel_manager/ops_kernel_manager.cc" + "session/inner_session.cc" + "session/session_manager.cc" + "single_op/single_op.cc" + "single_op/single_op_manager.cc" + "single_op/single_op_model.cc" + "single_op/stream_resource.cc" + "single_op/task/build_task_utils.cc" + "single_op/task/op_task.cc" + "single_op/task/tbe_task_builder.cc" + ) + + +######### libge_train.so ############# +add_library(ge_train SHARED ${TRAIN_SRC_LIST} ${PROTO_SRCS} ${PROTO_HEADER_HDRS}) +target_compile_definitions(ge_train PRIVATE + PROTOBUF_INLINE_NOT_IN_HEADERS=0 + DAVINCI_SUPPORT_PROFILING + REUSE_MEMORY=1 + DAVINCI_TRAIN + DAVINCI_CLOUD + FMK_SUPPORT_DEBUG + PLATFORM_CLOUD) +target_link_libraries(ge_train + graph + ge_common + "-Wl,--whole-archive" + ge_memory + "-Wl,--no-whole-archive" + ${PROTOBUF_LIBRARY} + ${register} + ${c_sec} + ${slog} + ${mmpa} + ${hccl} + ${msprof} + ${runtime} + ${cce} + ${resouce} + rt + dl) + +######### libge.so ############# +# need to remove dependencies on pb files later +file(GLOB_RECURSE INFER_SRC_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "common/formats/format_transfers/*.cc" + "common/formats/formats.cc" + "common/formats/utils/formats_trans_utils.cc" + "common/fp16_t.cc" + "common/ge/plugin_manager.cc" + "common/profiling/profiling_manager.cc" + "engine_manager/dnnengine_manager.cc" + "generator/ge_generator.cc" + "generator/generator_api.cc" + "graph/build/graph_build.cc" + "graph/build/logical_stream_allocator.cc" + "graph/build/model_builder.cc" + "graph/build/optimize_stream_graph.cc" + "graph/build/run_context.cc" + "graph/build/stream_allocator.cc" + "graph/build/task_generator.cc" + "graph/common/bcast.cc" + "graph/common/omg_util.cc" + "graph/common/transop_util.cc" + "graph/execute/graph_execute.cc" + "graph/load/graph_loader.cc" + "graph/load/new_model_manager/data_dumper.cc" + "graph/load/new_model_manager/data_inputer.cc" + "graph/load/new_model_manager/davinci_model.cc" + "graph/load/new_model_manager/davinci_model_parser.cc" + "graph/load/new_model_manager/model_manager.cc" + "graph/load/new_model_manager/model_output.cc" + "graph/load/new_model_manager/model_utils.cc" + "graph/load/new_model_manager/task_info/end_graph_task_info.cc" + "graph/load/new_model_manager/task_info/event_record_task_info.cc" + "graph/load/new_model_manager/task_info/event_wait_task_info.cc" + "graph/load/new_model_manager/task_info/fusion_start_task_info.cc" + "graph/load/new_model_manager/task_info/fusion_stop_task_info.cc" + "graph/load/new_model_manager/task_info/kernel_ex_task_info.cc" + "graph/load/new_model_manager/task_info/kernel_task_info.cc" + "graph/load/new_model_manager/task_info/label_goto_task_info.cc" + "graph/load/new_model_manager/task_info/label_set_task_info.cc" + "graph/load/new_model_manager/task_info/memcpy_async_task_info.cc" + "graph/load/new_model_manager/task_info/profiler_trace_task_info.cc" + "graph/load/new_model_manager/task_info/stream_active_task_info.cc" + "graph/load/new_model_manager/task_info/stream_switch_task_info.cc" + "graph/load/new_model_manager/task_info/task_info.cc" + "graph/load/new_model_manager/tbe_handle_store.cc" + "graph/load/output/output.cc" + "graph/manager/custom/custom_op.cc" + "graph/manager/graph_context.cc" + "graph/manager/graph_manager.cc" + "graph/manager/graph_manager_utils.cc" + "graph/manager/graph_mem_allocator.cc" + "graph/manager/graph_var_manager.cc" + "graph/manager/model_manager/event_manager.cc" + "graph/manager/trans_var_data_utils.cc" + "graph/manager/util/debug.cc" + "graph/manager/util/node_searcher/need_rebuild_node_searcher.cc" + "graph/manager/util/rt_context_util.cc" + "graph/manager/util/variable_accelerate_ctrl.cc" + "graph/optimize/graph_functiondef.cc" + "graph/optimize/graph_optimize.cc" + "graph/optimize/graph_optimizer.cc" + "graph/optimize/optimizer/allreduce_fusion_inference_pass.cc" + "graph/optimize/summary_optimize.cc" + "graph/partition/engine_place.cc" + "graph/partition/graph_partition.cc" + "graph/passes/addn_pass.cc" + "graph/passes/aicpu_constant_folding_pass.cc" + "graph/passes/assert_pass.cc" + "graph/passes/atomic_addr_clean_pass.cc" + "graph/passes/base_pass.cc" + "graph/passes/cast_translate_pass.cc" + "graph/passes/compile_nodes_pass.cc" + "graph/passes/constant_folding_pass.cc" + "graph/passes/constant_fuse_same_pass.cc" + "graph/passes/control_op_attr_pass.cc" + "graph/passes/control_trigger_pass.cc" + "graph/passes/dimension_adjust_pass.cc" + "graph/passes/dimension_compute_pass.cc" + "graph/passes/dropout_pass.cc" + "graph/passes/end_graph_pass.cc" + "graph/passes/enter_pass.cc" + "graph/passes/flow_ctrl_pass.cc" + "graph/passes/folding_kernel/add_kernel.cc" + "graph/passes/folding_kernel/broadcast_args_kernel.cc" + "graph/passes/folding_kernel/broadcast_gradient_args_kernel.cc" + "graph/passes/folding_kernel/cast_kernel.cc" + "graph/passes/folding_kernel/concat_offset_kernel.cc" + "graph/passes/folding_kernel/concat_v2_kernel.cc" + "graph/passes/folding_kernel/dynamic_stitch_kernel.cc" + "graph/passes/folding_kernel/empty_kernel.cc" + "graph/passes/folding_kernel/expanddims_kernel.cc" + "graph/passes/folding_kernel/fill_kernel.cc" + "graph/passes/folding_kernel/floordiv_kernel.cc" + "graph/passes/folding_kernel/floormod_kernel.cc" + "graph/passes/folding_kernel/gather_v2_kernel.cc" + "graph/passes/folding_kernel/greater_kernel.cc" + "graph/passes/folding_kernel/kernel_utils.cc" + "graph/passes/folding_kernel/maximum_kernel.cc" + "graph/passes/folding_kernel/mul_kernel.cc" + "graph/passes/folding_kernel/pack_kernel.cc" + "graph/passes/folding_kernel/permute_kernel.cc" + "graph/passes/folding_kernel/range_kernel.cc" + "graph/passes/folding_kernel/rank_kernel.cc" + "graph/passes/folding_kernel/reduce_prod_kernel.cc" + "graph/passes/folding_kernel/reshape_kernel.cc" + "graph/passes/folding_kernel/rsqrt_kernel.cc" + "graph/passes/folding_kernel/shape_kernel.cc" + "graph/passes/folding_kernel/shape_n_kernel.cc" + "graph/passes/folding_kernel/size_kernel.cc" + "graph/passes/folding_kernel/slice_kernel.cc" + "graph/passes/folding_kernel/squeeze_kernel.cc" + "graph/passes/folding_kernel/ssd_prior_box_kernel.cc" + "graph/passes/folding_kernel/strided_slice_kernel.cc" + "graph/passes/folding_kernel/sub_kernel.cc" + "graph/passes/folding_kernel/transdata_kernel.cc" + "graph/passes/folding_pass.cc" + "graph/passes/get_original_format_pass.cc" + "graph/passes/guarantee_const_pass.cc" + "graph/passes/hccl_memcpy_pass.cc" + "graph/passes/identify_reference_pass.cc" + "graph/passes/identity_pass.cc" + "graph/passes/infershape_pass.cc" + "graph/passes/isolated_op_remove_pass.cc" + "graph/passes/iterator_op_pass.cc" + "graph/passes/link_gen_mask_nodes_pass.cc" + "graph/passes/merge_pass.cc" + "graph/passes/multi_batch_pass.cc" + "graph/passes/net_output_pass.cc" + "graph/passes/next_iteration_pass.cc" + "graph/passes/no_reshape_op_remove_pass.cc" + "graph/passes/no_use_reshape_remove_pass.cc" + "graph/passes/pass_manager.cc" + "graph/passes/pass_utils.cc" + "graph/passes/permute_pass.cc" + "graph/passes/placeholder_with_default_pass.cc" + "graph/passes/prevent_gradient_pass.cc" + "graph/passes/print_op_pass.cc" + "graph/passes/prune_pass.cc" + "graph/passes/reshape_remove_pass.cc" + "graph/passes/resource_pair_add_control_pass.cc" + "graph/passes/resource_pair_remove_control_pass.cc" + "graph/passes/same_transdata_breadth_fusion_pass.cc" + "graph/passes/save_pass.cc" + "graph/passes/shape_operate_op_remove_pass.cc" + "graph/passes/snapshot_pass.cc" + "graph/passes/stop_gradient_pass.cc" + "graph/passes/switch_logic_remove_pass.cc" + "graph/passes/switch_op_pass.cc" + "graph/passes/switch_pass.cc" + "graph/passes/transop_breadth_fusion_pass.cc" + "graph/passes/transop_depth_fusion_pass.cc" + "graph/passes/transop_nearby_allreduce_fusion_pass.cc" + "graph/passes/transop_without_reshape_fusion_pass.cc" + "graph/passes/transpose_transdata_pass.cc" + "graph/passes/unused_const_pass.cc" + "graph/passes/unused_op_remove_pass.cc" + "graph/passes/update_net_output_pass.cc" + "graph/passes/var_is_initialized_op_pass.cc" + "graph/passes/variable_format_pass.cc" + "graph/passes/variable_op_pass.cc" + "graph/passes/variable_prepare_op_pass.cc" + "graph/passes/variable_ref_delete_op_pass.cc" + "graph/preprocess/graph_preprocess.cc" + "graph/preprocess/insert_op/base_insert_op.cc" + "graph/preprocess/insert_op/ge_aipp_op.cc" + "graph/preprocess/insert_op/util_insert_aipp_op.cc" + "graph/preprocess/multi_batch_copy_graph.cc" + "init/gelib.cc" + "model/ge_model.cc" + "omm/csa_interact.cc" + "opskernel_manager/ops_kernel_manager.cc" + "session/inner_session.cc" + "session/session_manager.cc" + "single_op/single_op.cc" + "single_op/single_op_manager.cc" + "single_op/single_op_model.cc" + "single_op/stream_resource.cc" + "single_op/task/build_task_utils.cc" + "single_op/task/op_task.cc" + "single_op/task/tbe_task_builder.cc" + ) + +add_library(ge SHARED ${INFER_SRC_LIST} ${PROTO_SRCS} ${PROTO_HEADER_HDRS}) +target_compile_definitions(ge PRIVATE + PROTOBUF_INLINE_NOT_IN_HEADERS=0 + DAVINCI_SUPPORT_PROFILING + REUSE_MEMORY=1 + FMK_HOST_INFER + PLATFORM_CLOUD) +target_link_libraries(ge + graph + ge_common + "-Wl,--whole-archive" + ge_memory + "-Wl,--no-whole-archive" + ${PROTOBUF_LIBRARY} + ${register} + ${c_sec} + ${slog} + ${mmpa} + ${msprof} + ${runtime} + ${cce} + ${resouce} + rt + dl) diff --git a/src/ge/client/CMakeLists.txt b/src/ge/client/CMakeLists.txt new file mode 100755 index 00000000..3f034c9c --- /dev/null +++ b/src/ge/client/CMakeLists.txt @@ -0,0 +1,96 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +# libge_client.so & libge_client_train.so +# add all proto files, generate corresponding .h and .cc files +set(CMAKE_CXX_FLAGS "-Wno-unused-variable ${CMAKE_CXX_FLAGS}") +file(GLOB_RECURSE PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "../../proto/ge_api.proto" + ) + +file(GLOB_RECURSE PROTO_HEADER_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "../../proto/ge_ir.proto" + "../../proto/task.proto" + "../../proto/om.proto" + "../../proto/insert_op.proto" + ) + +file(GLOB_RECURSE SRC_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "ge_api.cc" + ) + +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) +ge_protobuf_generate(ge PROTO_HEADER_SRCS PROTO_HEADER_HDRS ${PROTO_HEADER_LIST}) + +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}/src/ge) +include_directories(${GE_SOURCE_DIR}/src) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/inc/common) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/cce) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/json/include) +include_directories(${GE_SOURCE_DIR}/third_party/protobuf/src) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) + +######### libge_client_train.so ############# +add_library(ge_client_train SHARED ${SRC_LIST} ${PROTO_SRCS} ${PROTO_HEADER_HDRS}) +target_compile_definitions(ge_client_train PRIVATE + Werror + PROTOBUF_INLINE_NOT_IN_HEADERS=0 + REUSE_MEMORY=1 + PLATFORM_CLOUD + DAVINCI_CLOUD) +target_link_libraries(ge_client_train + graph + ge_train + ge_common + ${PROTOBUF_LIBRARY} + ${register} + ${c_sec} + ${slog} + ${mmpa} + ${runtime} + ${cce} + rt + dl) + +############ libge_client.so ################ +add_library(ge_client SHARED ${SRC_LIST} ${PROTO_SRCS} ${PROTO_HEADER_HDRS}) +target_compile_definitions(ge_client_train PRIVATE + Werror + PROTOBUF_INLINE_NOT_IN_HEADERS=0 + REUSE_MEMORY=1 + PLATFORM_CLOUD) +target_link_libraries(ge_client + graph + ge + ge_common + ${PROTOBUF_LIBRARY} + ${register} + ${c_sec} + ${slog} + ${mmpa} + ${runtime} + ${cce} + rt + dl) diff --git a/src/ge/client/ge_api.cc b/src/ge/client/ge_api.cc new file mode 100644 index 00000000..a2d81695 --- /dev/null +++ b/src/ge/client/ge_api.cc @@ -0,0 +1,390 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge/ge_api.h" + +#include + +#include "common/debug/log.h" +#include "common/ge/datatype_util.h" +#include "common/ge/tbe_plugin_manager.h" +#include "framework/common/debug/ge_log.h" +#include "graph/detail/model_serialize_imp.h" +#include "graph/model_serialize.h" +#include "graph/opsproto_manager.h" +#include "graph/utils/tensor_adapter.h" +#include "graph/utils/type_utils.h" +#include "init/gelib.h" +#include "proto/ge_api.pb.h" +#include "register/op_registry.h" +#include "session/session_manager.h" + +using domi::GetContext; +using domi::OpRegistry; +using std::map; +using std::string; +using std::vector; + +namespace ge { +static const int32_t kMaxStrLen = 128; +static bool kGeInitialized = false; +static std::mutex kGeReleaseMutex; // GEFinalize and ~Session use + +void GetOpsProtoPath(std::string &opsproto_path) { + GELOGI("Enter get ops proto path schedule"); + const char *path_env = std::getenv("ASCEND_OPP_PATH"); + if (path_env != nullptr) { + std::string path = path_env; + opsproto_path = (path + "/op_proto/built-in/" + ":") + (path + "/op_proto/custom/"); + GELOGI("Get opsproto so path from env: %s", path.c_str()); + return; + } + std::string path_base = PluginManager::GetPath(); + GELOGI("path_base is %s", path_base.c_str()); + path_base = path_base.substr(0, path_base.rfind('/')); + path_base = path_base.substr(0, path_base.rfind('/') + 1); + opsproto_path = (path_base + "ops/op_proto/built-in/" + ":") + (path_base + "ops/op_proto/custom/"); +} + +Status CheckDumpAndReuseMemory(const std::map &options) { + const int kDecimal = 10; + auto dump_op_env = std::getenv("DUMP_OP"); + int dump_op_flag = (dump_op_env != nullptr) ? std::strtol(dump_op_env, nullptr, kDecimal) : 0; + auto disable_reuse_memory_iter = options.find("ge.exec.disableReuseMemory"); + if (disable_reuse_memory_iter != options.end()) { + if (disable_reuse_memory_iter->second == "0") { + GELOGD("ge.exec.disableReuseMemory=0, reuse memory is open"); + if (dump_op_flag) { + GELOGW("Will dump incorrect op data with GE Option ge.exec.disableReuseMemory=0"); + } + } else if (disable_reuse_memory_iter->second == "1") { + GELOGD("ge.exec.disableReuseMemory=1, reuse memory is close"); + } else { + GELOGE(PARAM_INVALID, "CheckDumpAndReuseMemory ge.exec.disableReuseMemory is valid"); + return FAILED; + } + } else { + if (dump_op_flag) { + GELOGW("Will dump incorrect op data with default reuse memory"); + } + } + return SUCCESS; +} + +Status CheckOptionsValid(const std::map &options) { + // check job_id is valid + auto job_id_iter = options.find(OPTION_EXEC_JOB_ID); + if (job_id_iter != options.end()) { + if (job_id_iter->second.length() > kMaxStrLen) { + GELOGE(PARAM_INVALID, "CheckOptionsValid job_id failed, string len > %d", kMaxStrLen); + return FAILED; + } + } + + // Check ge.exec.disableReuseMemory and env DUMP_OP + if (CheckDumpAndReuseMemory(options) != SUCCESS) { + return FAILED; + } + + return SUCCESS; +} + +// Initialize GE, prepare for execution, call GELib::Initialize +Status GEInitialize(const std::map &options) { + GELOGT(TRACE_INIT, "GEInitialize start"); + // 0.check init status + if (kGeInitialized) { + GELOGW("GEInitialize is called more than once"); + return SUCCESS; + } + // Load OpsProto lib plugin + std::string opsproto_path; + GetOpsProtoPath(opsproto_path); + OpsProtoManager *manager = OpsProtoManager::Instance(); + std::map option_tmp; + option_tmp.emplace(std::pair(string("ge.opsProtoLibPath"), opsproto_path)); + bool is_proto_init = manager->Initialize(option_tmp); + if (!is_proto_init) { + GELOGE(GE_CLI_INIT_FAILED, "geInitialize failed, ops proto path is invalid."); + return FAILED; + } + + // check options is valid + if (CheckOptionsValid(options) != SUCCESS) { + return FAILED; + } + + TBEPluginManager::Instance().InitPreparation(options); + // call Initialize + GELOGT(TRACE_RUNNING, "Initializing environment"); + Status ret = ge::GELib::Initialize(options); + if (ret != SUCCESS) { + GELOGE(GE_CLI_INIT_FAILED, "geInitialize failed, error code = %u", ret); + return FAILED; + } + + // 7.check return status, return + if (!kGeInitialized) { + // Initialize success, first time calling initialize + kGeInitialized = true; + } + + GELOGT(TRACE_STOP, "GEInitialize finished"); + return ret; +} + +// GE finalize, releasing all resources +Status GEFinalize() { + GELOGT(TRACE_INIT, "GEFinalize start"); + // check init status + if (!kGeInitialized) { + GELOGW("GEFinalize is called before GEInitialize"); + return SUCCESS; + } + + std::lock_guard lock(kGeReleaseMutex); + // call Finalize + GELOGT(TRACE_RUNNING, "Finalizing environment"); + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "GEFinalize Failed: GE not initialized"); + return GE_CLI_GE_NOT_INITIALIZED; + } + Status ret = instance_ptr->Finalize(); + GELOGI("GEFinalize finalize gelib ret=%u", ret); + if (ret != SUCCESS) { + GELOGE(ret, "GEFinalize Failed"); + return FAILED; + } + TBEPluginManager::Instance().Finalize(); + if (kGeInitialized && ret == SUCCESS) { + kGeInitialized = false; + } + + GELOGT(TRACE_STOP, "GEFinalize finished"); + return ret; +} + +// Initialize session,which calls innerSession +Session::Session(const std::map &options) { + GELOGT(TRACE_INIT, "Session Constructor start"); + // check init status + sessionId_ = 0; + if (!kGeInitialized) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED); + return; + } + // call Initialize + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "Session Constructor failed"); + return; + } + + GELOGT(TRACE_RUNNING, "Creating session"); + uint64_t session_id = 0; + Status ret = instance_ptr->SessionManagerObj().CreateSession(options, session_id); + GELOGT(TRACE_RUNNING, "Session id is %lu", session_id); + + // check return status, return, update session id if success + if (ret == SUCCESS) { + sessionId_ = session_id; + } else { + GELOGE(ret, "Session constructor failed, session Id not initialized"); + return; + } + GELOGT(TRACE_STOP, "Session Constructor finished"); +} + +// session destructor +Session::~Session() { + GELOGT(TRACE_INIT, "Session Destructor start"); + // 0.check init status + if (!kGeInitialized) { + GELOGW("GE is not yet initialized or is finalized."); + return; + } + + Status ret = FAILED; + std::lock_guard lock(kGeReleaseMutex); + try { + uint64_t session_id = sessionId_; + // call DestroySession + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGW("GE is not yet initialized or is finalized."); + return; + } + GELOGT(TRACE_RUNNING, "Session id is %lu", session_id); + + GELOGT(TRACE_RUNNING, "Destroying session"); + + ret = instance_ptr->SessionManagerObj().DestroySession(session_id); + } catch (google::protobuf::FatalException &e) { + GELOGE(GE_CLI_SESS_DESTROY_FAILED, "SessionDestructor throws FatalException"); + } + + // check return status, return, update session id if success + if (ret != SUCCESS) { + GELOGE(ret, "Session Destructor failed"); + } + + GELOGT(TRACE_STOP, "Session Destructor finished"); +} + +Status Session::AddGraph(uint32_t graph_id, const Graph &graph) { + GELOGT(TRACE_INIT, "Session AddGraph start"); + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (!instance_ptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "Session AddGraph failed"); + return FAILED; + } + + GELOGT(TRACE_RUNNING, "Adding Graph to session"); + Status ret = instance_ptr->SessionManagerObj().AddGraph(sessionId_, graph_id, graph); + if (ret != SUCCESS) { + GELOGE(ret, "Session AddGraph failed"); + return FAILED; + } + GELOGT(TRACE_STOP, "Session AddGraph finished"); + return ret; +} + +Status Session::RemoveGraph(uint32_t graph_id) { + GELOGT(TRACE_INIT, "Session RemoveGraph start"); + + // call RemoveGraph + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (!instance_ptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "Session RemoveGraph failed"); + return FAILED; + } + + GELOGT(TRACE_RUNNING, "Removing Graph from session"); + Status ret = instance_ptr->SessionManagerObj().RemoveGraph(sessionId_, graph_id); + // check return status, return + if (ret != SUCCESS) { + GELOGE(ret, "session RemoveGraph failed"); + return FAILED; + } + GELOGT(TRACE_STOP, "Session RemoveGraph finished"); + return ret; +} + +void PrintOutputResult(std::vector &outputs) { + if (outputs.empty() || outputs[0].GetData() == nullptr) { + GELOGW("outputs is empty or data is nullptr."); + return; + } + + size_t out_buf_size = outputs[0].GetSize(); + TensorDesc desc(outputs[0].GetTensorDesc()); + DataType data_type = desc.GetDataType(); + auto iter = CONST_OPDATA_TYPE_SIZE_MAP.find(data_type); + if (iter == CONST_OPDATA_TYPE_SIZE_MAP.end()) { + GELOGI("DataType %s has not defined size", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return; + } + size_t length = CONST_OPDATA_TYPE_SIZE_MAP[data_type]; + for (size_t i = 0; i < 10 && i < (out_buf_size / length); ++i) { // take first 10 at most + switch (data_type) { + case DT_BOOL: + case DT_INT8: + case DT_UINT8: + GELOGI("output data[%zu]=%d", i, *(reinterpret_cast(outputs[0].GetData()) + i)); + break; + case DT_INT16: + case DT_UINT16: + GELOGI("output data[%zu]=%d", i, *(reinterpret_cast(outputs[0].GetData()) + i)); + break; + case DT_INT32: + case DT_UINT32: + GELOGI("output data[%zu]=%d", i, *(reinterpret_cast(outputs[0].GetData()) + i)); + break; + case DT_INT64: + case DT_UINT64: + GELOGI("output data[%zu]=%ld", i, *(reinterpret_cast(outputs[0].GetData()) + i)); + break; + case DT_FLOAT: + GELOGI("output data[%zu]=%f", i, *(reinterpret_cast(outputs[0].GetData()) + i)); + break; + case DT_DOUBLE: + GELOGI("output data[%zu]=%lf", i, *(reinterpret_cast(outputs[0].GetData()) + i)); + break; + default: + GELOGI("Output datatype %s is not support print.", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return; + } + } +} + +Status Session::RunGraph(uint32_t graph_id, const std::vector &inputs, std::vector &outputs) { + GELOGT(TRACE_INIT, "Session RunGraph start"); + + std::vector graph_inputs = inputs; + // call RunGraph + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "Session RunGraph failed"); + return FAILED; + } + GELOGT(TRACE_RUNNING, "Running Graph"); + Status ret = instance_ptr->SessionManagerObj().RunGraph(sessionId_, graph_id, graph_inputs, outputs); + // check return status + if (ret != SUCCESS) { + GELOGE(ret, "Session RunGraph failed"); + return FAILED; + } + + // print output + if (outputs.size() > 0) { + PrintOutputResult(outputs); + } + + // return + GELOGT(TRACE_STOP, "Session RunGraph finished"); + return ret; +} + +Status Session::RegisterCallBackFunc(const std::string &key, const pCallBackFunc &callback) { + GELOGW( + "The callback function will not be checked. Please ensure that the implementation of the function is trusted."); + return ge::GELib::GetInstance()->SessionManagerObj().RegisterCallBackFunc(sessionId_, key, callback); +} + +Status Session::RunGraphAsync(uint32_t graph_id, const std::vector &inputs, + std::vector &outputs, std::function callback) { + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "SessionConstructor failed"); + return FAILED; + } + GELOGT(TRACE_RUNNING, "Run Graph Asynchronously"); + GELOGW( + "The callback function will not be checked. Please ensure that the implementation of the function is trusted."); + + Status ret = + ge::GELib::GetInstance()->SessionManagerObj().RunGraphAsync(sessionId_, graph_id, inputs, outputs, callback); + if (ret != SUCCESS) { + GELOGE(ret, "SessionManager RunGraphAsync failed"); + return FAILED; + } + return SUCCESS; +} +bool Session::IsGraphNeedRebuild(uint32_t graph_id) { + return ge::GELib::GetInstance()->SessionManagerObj().IsGraphNeedRebuild(sessionId_, graph_id); +} +} // namespace ge diff --git a/src/ge/common/CMakeLists.txt b/src/ge/common/CMakeLists.txt new file mode 100755 index 00000000..b0eb4ffc --- /dev/null +++ b/src/ge/common/CMakeLists.txt @@ -0,0 +1,100 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +# libge_common.so +file(GLOB PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "../../proto/om.proto" + "../../proto/ge_ir.proto" + "../../proto/task.proto" + "../../proto/insert_op.proto" + ) + +file(GLOB SRC_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "../model/ge_model.cc" + "auth/file_saver.cc" + "context/ctx.cc" + "debug/memory_dumper.cc" + "fmk_error_codes.cc" + "formats/format_transfers/datatype_transfer.cc" + "formats/format_transfers/format_transfer.cc" + "formats/format_transfers/format_transfer_c1hwncoc0_hwcn.cc" + "formats/format_transfers/format_transfer_fractal_nz.cc" + "formats/format_transfers/format_transfer_fractal_z.cc" + "formats/format_transfers/format_transfer_fractal_zz.cc" + "formats/format_transfers/format_transfer_fracz_hwcn.cc" + "formats/format_transfers/format_transfer_fracz_nchw.cc" + "formats/format_transfers/format_transfer_fracz_nhwc.cc" + "formats/format_transfers/format_transfer_hwcn_c1hwncoc0.cc" + "formats/format_transfers/format_transfer_nc1hwc0_nchw.cc" + "formats/format_transfers/format_transfer_nc1hwc0_nhwc.cc" + "formats/format_transfers/format_transfer_nchw_nc1hwc0.cc" + "formats/format_transfers/format_transfer_nhwc_nc1hwc0.cc" + "formats/format_transfers/format_transfer_transpose.cc" + "formats/formats.cc" + "formats/utils/formats_trans_utils.cc" + "fp16_t.cc" + "ge/datatype_util.cc" + "ge/tbe_plugin_manager.cc" + "ge_format_util.cc" + "helper/model_helper.cc" + "helper/om_file_helper.cc" + "model_parser/base.cc" + "op/attr_value_util.cc" + "op/ge_op_utils.cc" + "properties_manager.cc" + "tbe_kernel_store.cc" + "thread_pool.cc" + "types.cc" + "util.cc" + "model_saver.cc" + ) + +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) + +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${CMAKE_CURRENT_LIST_DIR}/op) +include_directories(${GE_SOURCE_DIR}/src/ge) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/cce) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/json/include) +include_directories(${GE_SOURCE_DIR}/third_party/eigen) +include_directories(${GE_SOURCE_DIR}/third_party/protobuf/src) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) + +############ libge_common.so ################ +add_library(ge_common SHARED ${SRC_LIST} ${PROTO_HDRS}) +target_compile_definitions(ge_common PUBLIC + PROTOBUF_INLINE_NOT_IN_HEADERS=0 + HOST_VISIBILITY + OS_CENTOS) +target_link_libraries(ge_common + graph + ${PROTOBUF_LIBRARY} + ${register} + ${c_sec} + ${slog} + ${mmpa} + ${cce} + ${resource} + rt + dl) diff --git a/src/ge/common/auth/file_saver.cc b/src/ge/common/auth/file_saver.cc new file mode 100644 index 00000000..cd28d917 --- /dev/null +++ b/src/ge/common/auth/file_saver.cc @@ -0,0 +1,199 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/auth/file_saver.h" + +#include +#include +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/util.h" + +namespace { +const int kFileOpSuccess = 0; +const char TEE_PASSCODE_FILE_SUFFIX[] = ".PASSCODE"; +const char TEE_DAVINCI_FILE_SUFFIX[] = ".om"; +const size_t TEE_DAVINCI_FILE_SUFFIX_SIZE = 3; +} // namespace + +namespace ge { +Status FileSaver::OpenFile(int32_t &fd, const std::string &file_path) { + if (CheckPath(file_path) != SUCCESS) { + GELOGE(FAILED, "Check output file failed."); + return FAILED; + } + + char real_path[PATH_MAX] = {0}; + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(file_path.length() >= PATH_MAX, return FAILED, "File path is too long!"); + GE_IF_BOOL_EXEC(realpath(file_path.c_str(), real_path) == nullptr, + GELOGI("File %s does not exit, it will be created.", file_path.c_str())); + // Open file + mode_t mode = S_IRUSR | S_IWUSR; + fd = mmOpen2(real_path, O_RDWR | O_CREAT | O_TRUNC, mode); + if (fd == EN_INVALID_PARAM || fd == EN_ERROR) { + // -1: Failed to open file; - 2: Illegal parameter + GELOGE(FAILED, "Open file failed. mmpa_errno = %d", fd); + return FAILED; + } + return SUCCESS; +} + +Status FileSaver::WriteData(const void *data, uint32_t size, int32_t fd) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(size == 0 || data == nullptr, return PARAM_INVALID); + + // Write data + mmSsize_t write_count = mmWrite(fd, const_cast(data), size); + // -1: Failed to write to file; - 2: Illegal parameter + if (write_count == EN_INVALID_PARAM || write_count == EN_ERROR) { + GELOGE(FAILED, "Write data failed. mmpa_errorno = %ld", write_count); + return FAILED; + } + + return SUCCESS; +} + +Status FileSaver::SaveWithFileHeader(const std::string &file_path, const ModelFileHeader &file_header, const void *data, + int len) { + if (data == nullptr || len <= 0) { + GELOGE(FAILED, "Model_data is null or the length[%d] less than 1.", len); + return FAILED; + } + + // Open file + int32_t fd = 0; + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(OpenFile(fd, file_path) != SUCCESS, return FAILED, "OpenFile FAILED"); + + Status ret = SUCCESS; + do { + // Write file header + GE_CHK_BOOL_EXEC(WriteData(static_cast(&file_header), sizeof(ModelFileHeader), fd) == SUCCESS, + ret = FAILED; + break, "WriteData FAILED"); + // write data + GE_CHK_BOOL_EXEC(WriteData(data, static_cast(len), fd) == SUCCESS, ret = FAILED, "WriteData FAILED"); + } while (0); + // Close file + if (mmClose(fd) != 0) { // mmClose 0: success + GELOGE(FAILED, "Close file failed."); + ret = FAILED; + } + return ret; +} + +Status FileSaver::SaveWithFileHeader(const std::string &file_path, const ModelFileHeader &file_header, + ModelPartitionTable &model_partition_table, + const std::vector &partition_datas) { + GE_CHK_BOOL_RET_STATUS( + !partition_datas.empty() && model_partition_table.num != 0 && model_partition_table.num == partition_datas.size(), + FAILED, "Invalid param:partition data size(%u), model_partition_table.num(%zu).", model_partition_table.num, + partition_datas.size()); + // Open file + int32_t fd = 0; + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(OpenFile(fd, file_path) != SUCCESS, return FAILED); + Status ret = SUCCESS; + do { + // Write file header + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG( + WriteData(static_cast(&file_header), sizeof(ModelFileHeader), fd) != SUCCESS, ret = FAILED; + break); + // Write model partition table + uint32_t table_size = static_cast(SIZE_OF_MODEL_PARTITION_TABLE(model_partition_table)); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG( + WriteData(static_cast(&model_partition_table), table_size, fd) != SUCCESS, ret = FAILED; break); + // Write partition data + for (const auto &partition_data : partition_datas) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG( + WriteData(static_cast(partition_data.data), partition_data.size, fd) != SUCCESS, ret = FAILED; + break); + } + } while (0); + // Close file + GE_CHK_BOOL_RET_STATUS(mmClose(fd) == EN_OK, FAILED, "Close file failed."); + return ret; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status FileSaver::CheckPath(const std::string &file_path) { + // Determine file path length + if (file_path.size() >= PATH_MAX) { + GELOGE(FAILED, "Path is too long:%zu", file_path.size()); + return FAILED; + } + + // Find the last separator + int path_split_pos = static_cast(file_path.size() - 1); + for (; path_split_pos >= 0; path_split_pos--) { + if (file_path[path_split_pos] == '\\' || file_path[path_split_pos] == '/') { + break; + } + } + + if (path_split_pos == 0) { + return SUCCESS; + } + + // If there is a path before the file name, create the path + if (path_split_pos != -1) { + if (CreateDirectory(std::string(file_path).substr(0, static_cast(path_split_pos))) != kFileOpSuccess) { + GELOGE(FAILED, "CreateDirectory failed, file path:%s.", file_path.c_str()); + return FAILED; + } + } + + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +FileSaver::SaveToFile(const string &file_path, const ge::ModelData &model, const ModelFileHeader *model_file_header) { + if (file_path.empty() || model.model_data == nullptr || model.model_len == 0) { + GELOGE(FAILED, "Incorrected input param. file_path.empty() || model.model_data == nullptr || model.model_len == 0"); + return FAILED; + } + + ModelFileHeader file_header; + + int32_t copy_header_ret = 0; + GE_IF_BOOL_EXEC(model_file_header != nullptr, copy_header_ret = memcpy_s(&file_header, sizeof(ModelFileHeader), + model_file_header, sizeof(ModelFileHeader))); + GE_CHK_BOOL_RET_STATUS(copy_header_ret == 0, FAILED, "Copy ModelFileHeader failed! memcpy_s return: %d", + copy_header_ret); + + file_header.length = model.model_len; + file_header.is_encrypt = ModelEncryptType::UNENCRYPTED; + + const Status ret = SaveWithFileHeader(file_path, file_header, model.model_data, file_header.length); + if (ret != SUCCESS) { + GELOGE(FAILED, "Save file failed, file_path:%s, file header len:%u.", file_path.c_str(), file_header.length); + return FAILED; + } + + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +FileSaver::SaveToFile(const string &file_path, ModelFileHeader &file_header, ModelPartitionTable &model_partition_table, + const std::vector &partition_datas) { + file_header.is_encrypt = ModelEncryptType::UNENCRYPTED; + const Status ret = SaveWithFileHeader(file_path, file_header, model_partition_table, partition_datas); + GE_CHK_BOOL_RET_STATUS(ret == SUCCESS, FAILED, "Save file failed, file_path:%s, file header len:%u.", + file_path.c_str(), file_header.length); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/common/auth/file_saver.h b/src/ge/common/auth/file_saver.h new file mode 100644 index 00000000..e382970a --- /dev/null +++ b/src/ge/common/auth/file_saver.h @@ -0,0 +1,105 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_AUTH_FILE_SAVER_H_ +#define GE_COMMON_AUTH_FILE_SAVER_H_ + +#include +#include + +#include "framework/common/helper/om_file_helper.h" +#include "framework/common/types.h" +#include "graph/buffer.h" +#include "mmpa/mmpa_api.h" + +struct PROC_PARAM { + uint8_t *model_name; + + // ISV Ek buffer + uint8_t *model_key; + uint32_t model_key_len; + + // ISV root certificate buffer + uint8_t *root_cert; + uint32_t root_cert_len; + + // ISV private key buffer + uint8_t *pri_key; + uint32_t pri_key_len; + + // Raw AI Module Image buffer + uint8_t *ai_image; + uint32_t ai_image_len; + + // ISV HW key buffer + uint8_t *hw_key; + uint32_t hw_key_len; +}; + +struct ProcOut { + uint8_t *passcode; + uint32_t passcode_len; + uint8_t *encrypted_img; + uint32_t encrypted_img_len; +}; + +namespace ge { +using std::string; + +class FileSaver { + public: + /// + /// @ingroup domi_common + /// @brief save model, no encryption + /// @return Status result + /// + static Status SaveToFile(const string &file_path, const ge::ModelData &model, + const ModelFileHeader *model_file_header = nullptr); + + static Status SaveToFile(const string &file_path, ModelFileHeader &model_file_header, + ModelPartitionTable &model_partition_table, + const std::vector &partition_datas); + + protected: + /// + /// @ingroup domi_common + /// @brief Check validity of the file path + /// @return Status result + /// + static Status CheckPath(const string &file_path); + + static Status WriteData(const void *data, uint32_t size, int32_t fd); + + static Status OpenFile(int32_t &fd, const std::string &file_path); + + /// + /// @ingroup domi_common + /// @brief save model to file + /// @param [in] file_path file output path + /// @param [in] file_header file header info + /// @param [in] data model data + /// @param [in] len model length + /// @return Status result + /// + static Status SaveWithFileHeader(const string &file_path, const ModelFileHeader &file_header, const void *data, + int len); + + static Status SaveWithFileHeader(const std::string &file_path, const ModelFileHeader &file_header, + ModelPartitionTable &model_partition_table, + const std::vector &partition_datas); +}; +} // namespace ge +#endif // GE_COMMON_AUTH_FILE_SAVER_H_ diff --git a/src/ge/common/context/ctx.cc b/src/ge/common/context/ctx.cc new file mode 100644 index 00000000..f6ae364d --- /dev/null +++ b/src/ge/common/context/ctx.cc @@ -0,0 +1,25 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "framework/omg/omg_inner_types.h" + +using ge::OmgContext; +namespace domi { +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY OmgContext &GetContext() { + static OmgContext context; + return context; +} +} // namespace domi diff --git a/src/ge/common/convert/pb2json.cc b/src/ge/common/convert/pb2json.cc new file mode 100644 index 00000000..f47621b8 --- /dev/null +++ b/src/ge/common/convert/pb2json.cc @@ -0,0 +1,218 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// File: pb2json.h +// Description: This imply file for protobuf message and json interconversion + +#include "common/convert/pb2json.h" + +#include +#include + +#include "framework/common/fmk_types.h" + +using std::set; +using std::string; + +namespace ge { +// JSON parses non utf8 character throwing exceptions, so some fields need to be shielded through black fields +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void Pb2Json::Message2Json(const ProtobufMsg &message, + const set &black_fields, Json &json, + bool enum2str) { + auto descriptor = message.GetDescriptor(); + auto reflection = message.GetReflection(); + if (descriptor == nullptr || reflection == nullptr) { + return; + } + + auto count = descriptor->field_count(); + + for (auto i = 0; i < count; ++i) { + const auto field = descriptor->field(i); + if (field == nullptr) { + return; + } + + // Do not display weight data + if (black_fields.find(field->name()) != black_fields.end()) { + continue; + } + + if (field->is_repeated()) { + if (reflection->FieldSize(message, field) > 0) { + RepeatedMessage2Json(message, field, reflection, black_fields, json[field->name()], enum2str); + } + continue; + } + + if (!reflection->HasField(message, field)) { + continue; + } + + OneField2Json(message, field, reflection, black_fields, json, enum2str); + } +} + +void Pb2Json::OneField2Json(const ProtobufMsg &message, const ProtobufFieldDescriptor *field, + const ProtobufReflection *reflection, const set &black_fields, Json &json, + bool enum2str) { + if (field == nullptr || reflection == nullptr) { + return; + } + switch (field->type()) { + case ProtobufFieldDescriptor::TYPE_MESSAGE: { + const ProtobufMsg &tmp_message = reflection->GetMessage(message, field); + if (0 != tmp_message.ByteSize()) { + Message2Json(tmp_message, black_fields, json[field->name()]); + } + break; + } + + case ProtobufFieldDescriptor::TYPE_BOOL: + json[field->name()] = reflection->GetBool(message, field); + break; + + case ProtobufFieldDescriptor::TYPE_ENUM: { + auto *enum_value_desc = reflection->GetEnum(message, field); + Enum2Json(enum_value_desc, field, enum2str, json); + break; + } + + case ProtobufFieldDescriptor::TYPE_INT32: + case ProtobufFieldDescriptor::TYPE_SINT32: + case ProtobufFieldDescriptor::TYPE_SFIXED32: + json[field->name()] = reflection->GetInt32(message, field); + break; + + case ProtobufFieldDescriptor::TYPE_UINT32: + case ProtobufFieldDescriptor::TYPE_FIXED32: + json[field->name()] = reflection->GetUInt32(message, field); + break; + + case ProtobufFieldDescriptor::TYPE_INT64: + case ProtobufFieldDescriptor::TYPE_SINT64: + case ProtobufFieldDescriptor::TYPE_SFIXED64: + json[field->name()] = reflection->GetInt64(message, field); + break; + + case ProtobufFieldDescriptor::TYPE_UINT64: + case ProtobufFieldDescriptor::TYPE_FIXED64: + json[field->name()] = reflection->GetUInt64(message, field); + break; + + case ProtobufFieldDescriptor::TYPE_FLOAT: + json[field->name()] = reflection->GetFloat(message, field); + break; + + case ProtobufFieldDescriptor::TYPE_STRING: + case ProtobufFieldDescriptor::TYPE_BYTES: + json[field->name()] = reflection->GetString(message, field); + break; + + default: + break; + } +} + +void Pb2Json::RepeatedMessage2Json(const ProtobufMsg &message, const ProtobufFieldDescriptor *field, + const ProtobufReflection *reflection, const set &black_fields, Json &json, + bool enum2str) { + if (field == nullptr || reflection == nullptr) { + Message2Json(message, black_fields, json); + return; + } + + for (auto i = 0; i < reflection->FieldSize(message, field); ++i) { + Json tmp_json; + switch (field->type()) { + case ProtobufFieldDescriptor::TYPE_MESSAGE: { + const ProtobufMsg &tmp_message = reflection->GetRepeatedMessage(message, field, i); + if (0 != tmp_message.ByteSize()) { + Message2Json(tmp_message, black_fields, tmp_json); + } + } break; + + case ProtobufFieldDescriptor::TYPE_BOOL: + tmp_json = reflection->GetRepeatedBool(message, field, i); + break; + + case ProtobufFieldDescriptor::TYPE_ENUM: { + auto *enum_value_desc = reflection->GetRepeatedEnum(message, field, i); + RepeatedEnum2Json(enum_value_desc, enum2str, tmp_json); + } break; + + case ProtobufFieldDescriptor::TYPE_INT32: + case ProtobufFieldDescriptor::TYPE_SINT32: + case ProtobufFieldDescriptor::TYPE_SFIXED32: + tmp_json = reflection->GetRepeatedInt32(message, field, i); + break; + + case ProtobufFieldDescriptor::TYPE_UINT32: + case ProtobufFieldDescriptor::TYPE_FIXED32: + tmp_json = reflection->GetRepeatedUInt32(message, field, i); + break; + + case ProtobufFieldDescriptor::TYPE_INT64: + case ProtobufFieldDescriptor::TYPE_SINT64: + case ProtobufFieldDescriptor::TYPE_SFIXED64: + tmp_json = reflection->GetRepeatedInt64(message, field, i); + break; + + case ProtobufFieldDescriptor::TYPE_UINT64: + case ProtobufFieldDescriptor::TYPE_FIXED64: + tmp_json = reflection->GetRepeatedUInt64(message, field, i); + break; + + case ProtobufFieldDescriptor::TYPE_FLOAT: + tmp_json = reflection->GetRepeatedFloat(message, field, i); + break; + + case ProtobufFieldDescriptor::TYPE_STRING: + case ProtobufFieldDescriptor::TYPE_BYTES: + tmp_json = reflection->GetRepeatedString(message, field, i); + break; + + default: + break; + } + json += tmp_json; + } +} + +void Pb2Json::Enum2Json(const ProtobufEnumValueDescriptor *enum_value_desc, const ProtobufFieldDescriptor *field, + bool enum2str, Json &json) { + if (enum_value_desc != nullptr) { + if (field == nullptr) { + return; + } + if (enum2str) { + json[field->name()] = enum_value_desc->name(); + } else { + json[field->name()] = enum_value_desc->number(); + } + } +} + +void Pb2Json::RepeatedEnum2Json(const ProtobufEnumValueDescriptor *enum_value_desc, bool enum2str, Json &json) { + if (enum_value_desc != nullptr) { + if (enum2str) { + json = enum_value_desc->name(); + } else { + json = enum_value_desc->number(); + } + } +} +} // namespace ge diff --git a/src/ge/common/convert/pb2json.h b/src/ge/common/convert/pb2json.h new file mode 100644 index 00000000..4048708d --- /dev/null +++ b/src/ge/common/convert/pb2json.h @@ -0,0 +1,68 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +// File: pb2json.h +// Description: This header file for protobuf message and json interconversion + +#ifndef GE_COMMON_CONVERT_PB2JSON_H_ +#define GE_COMMON_CONVERT_PB2JSON_H_ + +#include +#include +#include +#include + +#include "google/protobuf/descriptor.h" +#include "google/protobuf/message.h" +#include "nlohmann/json.hpp" + +namespace ge { +using Json = nlohmann::json; +using ProtobufMsg = ::google::protobuf::Message; +using ProtobufReflection = ::google::protobuf::Reflection; +using ProtobufFieldDescriptor = ::google::protobuf::FieldDescriptor; +using ProtobufDescriptor = ::google::protobuf::Descriptor; +using ProtobufEnumValueDescriptor = ::google::protobuf::EnumValueDescriptor; + +class Pb2Json { + public: + /** + * @ingroup domi_omg + * @brief Transfer protobuf object to JSON object + * @param [out] json Converted JSON object + * @return void success + * @author + */ + static void Message2Json(const ProtobufMsg &message, const std::set &black_fields, Json &json, + bool enum2str = false); + + protected: + static void RepeatedMessage2Json(const ProtobufMsg &message, const ProtobufFieldDescriptor *field, + const ProtobufReflection *reflection, const std::set &black_fields, + Json &json, bool enum2str); + + static void Enum2Json(const ProtobufEnumValueDescriptor *enum_value_desc, const ProtobufFieldDescriptor *field, + bool enum2str, Json &json); + + static void RepeatedEnum2Json(const ProtobufEnumValueDescriptor *enum_value_desc, bool enum2str, Json &json); + + static void OneField2Json(const ProtobufMsg &message, const ProtobufFieldDescriptor *field, + const ProtobufReflection *reflection, const std::set &black_fields, Json &json, + bool enum2str); +}; +} // namespace ge + +#endif // GE_COMMON_CONVERT_PB2JSON_H_ diff --git a/src/ge/common/debug/memory_dumper.cc b/src/ge/common/debug/memory_dumper.cc new file mode 100755 index 00000000..fcda5366 --- /dev/null +++ b/src/ge/common/debug/memory_dumper.cc @@ -0,0 +1,163 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/debug/memory_dumper.h" + +#include + +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/util.h" + +using std::string; + +static const int kInvalidFd = (-1); + +namespace ge { +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY MemoryDumper::MemoryDumper() : fd_(kInvalidFd) {} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY MemoryDumper::~MemoryDumper() { Close(); } + +// Dump the data to the file +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status MemoryDumper::DumpToFile(const char *filename, void *data, + uint32_t len) { + GE_CHK_BOOL_RET_STATUS(!(filename == nullptr || data == nullptr || len == 0), FAILED, + "Incorrect parameter. filename is nullptr || data is nullptr || len is 0"); + +#ifdef FMK_SUPPORT_DUMP + // Open the file + int fd = OpenFile(filename); + if (kInvalidFd == fd) { + GELOGE(FAILED, "Open file failed."); + return FAILED; + } + + // Write the data to the file + Status ret = SUCCESS; + mmSsize_t mmpa_ret = mmWrite(fd, data, len); + // mmWrite return -1:Failed to write data to file;return -2:Invalid parameter + if (mmpa_ret == EN_ERROR || mmpa_ret == EN_INVALID_PARAM) { + GELOGE(FAILED, "Write to file failed. errno = %ld", mmpa_ret); + ret = FAILED; + } + + // Close the file + if (mmClose(fd) != EN_OK) { // mmClose return 0: success + GELOGE(FAILED, "Close file failed."); + ret = FAILED; + } + + return ret; + +#else + GELOGW("need to define FMK_SUPPORT_DUMP for dump op input and output."); + return SUCCESS; +#endif +} + +// Open file +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status MemoryDumper::Open(const char *filename) { + GE_CHK_BOOL_RET_STATUS(filename != nullptr, FAILED, "Incorrect parameter. filename is nullptr"); + + // Try to remove file first for reduce the close time by overwriting way + // (The process of file closing will be about 100~200ms slower per file when written by overwriting way) + // If remove file failed, then try to open it with overwriting way + int ret = remove(filename); + // If remove file failed, print the warning log + if (ret != 0) { + GELOGW("Remove file failed."); + } + + fd_ = OpenFile(filename); + if (fd_ == kInvalidFd) { + GELOGE(FAILED, "Open %s failed.", filename); + return FAILED; + } + + return SUCCESS; +} + +// Dump the data to file +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status MemoryDumper::Dump(void *data, uint32_t len) const { + GE_CHK_BOOL_RET_STATUS(data != nullptr, FAILED, "Incorrect parameter. data is nullptr"); + +#ifdef FMK_SUPPORT_DUMP + mmSsize_t mmpa_ret = mmWrite(fd_, data, len); + // mmWrite return -1:failed to write data to file;return -2:invalid parameter + if (mmpa_ret == EN_ERROR || mmpa_ret == EN_INVALID_PARAM) { + GELOGE(FAILED, "Write to file failed. errno = %ld", mmpa_ret); + return FAILED; + } + + return SUCCESS; + +#else + GELOGW("need to define FMK_SUPPORT_DUMP for dump op input and output."); + return SUCCESS; +#endif +} + +// Close file +void MemoryDumper::Close() noexcept { + // Close file + if (fd_ != kInvalidFd && mmClose(fd_) != EN_OK) { + GELOGW("Close file failed."); + } + fd_ = kInvalidFd; +} + +// Open file +int MemoryDumper::OpenFile(const char *filename) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(filename == nullptr, return kInvalidFd, "Incorrect parameter. filename is nullptr"); + + // Find the last separator + int path_split_pos = static_cast(strlen(filename) - 1); + for (; path_split_pos >= 0; path_split_pos--) { + GE_IF_BOOL_EXEC(filename[path_split_pos] == '\\' || filename[path_split_pos] == '/', break;) + } + // Get the absolute path + string real_path; + char tmp_path[PATH_MAX] = {0}; + GE_IF_BOOL_EXEC( + -1 != path_split_pos, string prefix_path = std::string(filename).substr(0, path_split_pos); + string last_path = std::string(filename).substr(path_split_pos, strlen(filename) - 1); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(prefix_path.length() >= PATH_MAX, return kInvalidFd, "Prefix path is too long!"); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(realpath(prefix_path.c_str(), tmp_path) == nullptr, return kInvalidFd, + "Dir %s does not exit.", prefix_path.c_str()); + real_path = std::string(tmp_path) + last_path;) + GE_IF_BOOL_EXEC( + path_split_pos == -1 || path_split_pos == 0, + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(strlen(filename) >= PATH_MAX, return kInvalidFd, "Prefix path is too long!"); + GE_IF_BOOL_EXEC(realpath(filename, tmp_path) == nullptr, + GELOGI("File %s does not exit, it will be created.", filename)); + real_path = std::string(tmp_path);) + + // Open file, only the current user can read and write, to avoid malicious application access + // Using the O_EXCL, if the file already exists,return failed to avoid privilege escalation vulnerability. + mode_t mode = S_IRUSR | S_IWUSR; + + int32_t fd = mmOpen2(real_path.c_str(), O_WRONLY | O_CREAT | O_TRUNC, mode); + if (fd == EN_ERROR || fd == EN_INVALID_PARAM) { + GELOGE(kInvalidFd, "Open file failed. errno = %d", fd); + return kInvalidFd; + } + return fd; +} +} // namespace ge diff --git a/src/ge/common/debug/memory_dumper.h b/src/ge/common/debug/memory_dumper.h new file mode 100644 index 00000000..3cb87c99 --- /dev/null +++ b/src/ge/common/debug/memory_dumper.h @@ -0,0 +1,90 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_DEBUG_MEMORY_DUMPER_H_ +#define GE_COMMON_DEBUG_MEMORY_DUMPER_H_ + +#include + +#include "framework/common/types.h" +#include "mmpa/mmpa_api.h" +#include "framework/common/ge_inner_error_codes.h" + +namespace ge { +// MemoryDumper:dump memory data for internal test +// Output in one time: using DumpToFile +// Open file at one time and output multiple times: create MemoryDumper object first, and using Open/Dump/Close +class MemoryDumper { + public: + MemoryDumper(); + ~MemoryDumper(); + + // Assignment/copy is not allowed to avoid repeated release + MemoryDumper &operator=(const MemoryDumper &dumper) = delete; + MemoryDumper(const MemoryDumper &dumper) = delete; + + /** @ingroup domi_common + * @brief write memory data to file, if the filename is not exist, create it first + * @param [in] filename the output file path, specific to filename + * @param [in] data the memory data + * @param [in] len length of data + * @return SUCCESS output success + * @return FAILED output failed + * @author + */ + static Status DumpToFile(const char *filename, void *data, uint32_t len); + + /** @ingroup domi_common + * @brief open the dump file + * @param [in] filename the output file path, specific to filename + * @return SUCCESS open file success + * @return FAILED open file failed + * @author + */ + Status Open(const char *filename); + + /** @ingroup domi_common + * @brief write the Memory data to file + * @param [in] data the memory data + * @param [in] len length of data + * @return SUCCESS success + * @return FAILED failed + * @author + */ + Status Dump(void *data, uint32_t len) const; + + /** @ingroup domi_common + * @brief close the Dump file + * @return SUCCESS success + * @return FAILED failed + * @author + */ + void Close() noexcept; + + private: + /** @ingroup domi_common + * @brief open the dump file + * @param [in] filename the output file path, specific to filename + * @return int the file handle after file open, -1 means open file failed + * @author + */ + static int OpenFile(const char *filename); + + int fd_; +}; +} // namespace ge + +#endif // GE_COMMON_DEBUG_MEMORY_DUMPER_H_ diff --git a/src/ge/common/fmk_error_codes.cc b/src/ge/common/fmk_error_codes.cc new file mode 100644 index 00000000..3ad8503a --- /dev/null +++ b/src/ge/common/fmk_error_codes.cc @@ -0,0 +1,64 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "framework/common/fmk_error_codes.h" + +namespace domi { +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY StatusFactory *StatusFactory::Instance() { + static StatusFactory instance; + return &instance; +} +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void StatusFactory::RegisterErrorNo(uint32_t err, + const std::string &desc) { + if (err_desc_.find(err) != err_desc_.end()) { + return; + } + err_desc_[err] = desc; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY std::string StatusFactory::GetErrDesc(uint32_t err) { + auto iter_find = err_desc_.find(err); + if (iter_find == err_desc_.end()) { + return ""; + } + return iter_find->second; +} +// General error code +DEF_ERRORNO(SUCCESS, "Success"); +DEF_ERRORNO(FAILED, "Failed"); + +// Common errocode +DEF_ERRORNO(MEMALLOC_FAILED, "Failed to allocate memory!"); // 50331648 +DEF_ERRORNO(PARAM_INVALID, "Parameter's invalid!"); // 50331649 +DEF_ERRORNO(CCE_FAILED, "Failed to call CCE API!"); // 50331650 +DEF_ERRORNO(RT_FAILED, "Failed to call runtime API!"); // 50331651 +DEF_ERRORNO(INTERNAL_ERROR, "Internal errors"); // 50331652 +DEF_ERRORNO(CSEC_ERROR, "Failed to call libc_sec API!"); // 50331653 +DEF_ERRORNO(TEE_ERROR, "Failed to call tee API!"); // 50331653 +DEF_ERRORNO(UNSUPPORTED, "Parameter's unsupported!"); +DEF_ERRORNO(OUT_OF_MEMORY, "Out of memory!"); + +// errorcode +DEF_ERRORNO(PARSE_MODEL_FAILED, "Failed to parse the model!"); +DEF_ERRORNO(PARSE_WEIGHTS_FAILED, "Failed to parse the weights!"); +DEF_ERRORNO(NOT_INITIALIZED, "It hasn't been initialized!"); +DEF_ERRORNO(TIMEOUT, "Running time out!"); + +// errorcode +DEF_ERRORNO(MODEL_NOT_READY, "The model is not ready yet!"); +DEF_ERRORNO(PUSH_DATA_FAILED, "Failed to push data!"); +DEF_ERRORNO(DATA_QUEUE_ISFULL, "Data queue is full!"); +} // namespace domi diff --git a/src/ge/common/formats/format_transfers/datatype_transfer.cc b/src/ge/common/formats/format_transfers/datatype_transfer.cc new file mode 100644 index 00000000..957c101d --- /dev/null +++ b/src/ge/common/formats/format_transfers/datatype_transfer.cc @@ -0,0 +1,180 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/datatype_transfer.h" + +#include +#include +#include + +#include "common/formats/utils/formats_trans_utils.h" +#include "common/fp16_t.h" +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" +#include "securec.h" + +namespace ge { +namespace formats { + +namespace { +enum DataTypeTransMode { + kTransferWithDatatypeFloatToFloat16, + kTransferWithDatatypeFloatToInt32, + kTransferWithDatatypeFloat16ToFloat, + kTransferWithDatatypeFloat16ToInt32, + kTransferWithDatatypeInt32ToFloat, + kTransferWithDatatypeInt32ToFloat16, + kTransferWithDatatypeInt32ToUint8, + kTransferWithDatatypeInt32ToInt8, + kTransferWithDatatypeUint8ToFloat, + kTransferWithDatatypeUint8ToInt32, + kTransferWithDatatypeInt8ToFloat, + kTransferWithDatatypeInt8ToInt32, + kTransferWithDatatypeInt64ToInt32, +}; + +std::map, DataTypeTransMode> trans_mode_map{ + {std::pair(DT_FLOAT, DT_FLOAT16), kTransferWithDatatypeFloatToFloat16}, + {std::pair(DT_FLOAT, DT_INT32), kTransferWithDatatypeFloatToInt32}, + {std::pair(DT_FLOAT16, DT_FLOAT), kTransferWithDatatypeFloat16ToFloat}, + {std::pair(DT_FLOAT16, DT_INT32), kTransferWithDatatypeFloat16ToInt32}, + {std::pair(DT_INT32, DT_FLOAT), kTransferWithDatatypeInt32ToFloat}, + {std::pair(DT_INT32, DT_FLOAT16), kTransferWithDatatypeInt32ToFloat16}, + {std::pair(DT_INT32, DT_UINT8), kTransferWithDatatypeInt32ToUint8}, + {std::pair(DT_INT32, DT_INT8), kTransferWithDatatypeInt32ToInt8}, + {std::pair(DT_UINT8, DT_FLOAT), kTransferWithDatatypeUint8ToFloat}, + {std::pair(DT_UINT8, DT_INT32), kTransferWithDatatypeUint8ToInt32}, + {std::pair(DT_INT8, DT_FLOAT), kTransferWithDatatypeInt8ToFloat}, + {std::pair(DT_INT8, DT_INT32), kTransferWithDatatypeInt8ToInt32}, + {std::pair(DT_INT64, DT_INT32), kTransferWithDatatypeInt64ToInt32}}; + +template +Status TransDataSrc2Dst(const CastArgs &args, uint8_t *dst, const size_t data_size) { + SrcT src_data; + for (size_t idx = 0; idx != data_size; idx++) { + src_data = reinterpret_cast(args.data)[idx]; + reinterpret_cast(dst)[idx] = static_cast(src_data); + } + return SUCCESS; +} + +template +Status TransDataSrc2Fp16(const CastArgs &args, uint8_t *dst, const size_t data_size) { + fp16_t src_data; + for (size_t idx = 0; idx != data_size; idx++) { + src_data = reinterpret_cast(args.data)[idx]; + reinterpret_cast(dst)[idx] = src_data.val; + } + return SUCCESS; +} + +Status CastKernel(const CastArgs &args, uint8_t *dst, const size_t data_size, const DataTypeTransMode trans_mode) { + switch (trans_mode) { + case kTransferWithDatatypeFloatToFloat16: + return TransDataSrc2Fp16(args, dst, data_size); + case kTransferWithDatatypeFloatToInt32: + return TransDataSrc2Dst(args, dst, data_size); + case kTransferWithDatatypeFloat16ToFloat: + return TransDataSrc2Dst(args, dst, data_size); + case kTransferWithDatatypeFloat16ToInt32: + return TransDataSrc2Dst(args, dst, data_size); + case kTransferWithDatatypeInt32ToFloat: + return TransDataSrc2Dst(args, dst, data_size); + case kTransferWithDatatypeInt32ToFloat16: + return TransDataSrc2Fp16(args, dst, data_size); + case kTransferWithDatatypeInt32ToUint8: + return TransDataSrc2Dst(args, dst, data_size); + case kTransferWithDatatypeInt32ToInt8: + return TransDataSrc2Dst(args, dst, data_size); + case kTransferWithDatatypeUint8ToFloat: + return TransDataSrc2Dst(args, dst, data_size); + case kTransferWithDatatypeUint8ToInt32: + return TransDataSrc2Dst(args, dst, data_size); + case kTransferWithDatatypeInt8ToFloat: + return TransDataSrc2Dst(args, dst, data_size); + case kTransferWithDatatypeInt8ToInt32: + return TransDataSrc2Dst(args, dst, data_size); + case kTransferWithDatatypeInt64ToInt32: + return TransDataSrc2Dst(args, dst, data_size); + default: + GELOGE(PARAM_INVALID, "Trans data type from %s to %s is not supported.", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(args.dst_data_type).c_str()); + return UNSUPPORTED; + } +} +} // namespace + +Status DataTypeTransfer::TransDataType(const CastArgs &args, TransResult &result) { + GELOGD("Begin trans data from %s to %s, data size %zu", TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(args.dst_data_type).c_str(), args.src_data_size); + std::pair trans_info(args.src_data_type, args.dst_data_type); + auto iter = trans_mode_map.find(trans_info); + if (iter == trans_mode_map.end()) { + GELOGE(PARAM_INVALID, "Trans data type from %s to %s is not supported.", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(args.dst_data_type).c_str()); + return UNSUPPORTED; + } + auto trans_mode = iter->second; + + if (args.src_data_size == 0) { + GELOGE(PARAM_INVALID, "Invalid src data size %zu", args.src_data_size); + return PARAM_INVALID; + } + int size = GetSizeByDataType(args.dst_data_type); + if (size <= 0) { + GELOGE(PARAM_INVALID, "Failed to calc size from data type %s", + TypeUtils::DataTypeToSerialString(args.dst_data_type).c_str()); + return PARAM_INVALID; + } + if (args.src_data_size > static_cast(SIZE_MAX / size)) { + GELOGE(PARAM_INVALID, "args.src_data_size %zu or data type size %d too big.", args.src_data_size, size); + return PARAM_INVALID; + } + size_t total_size = static_cast(args.src_data_size * size); + std::shared_ptr dst(new (std::nothrow) uint8_t[total_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to alloc the memory for dst buf %zu, data size %zu", total_size, args.src_data_size); + return OUT_OF_MEMORY; + } + + if (CastKernel(args, dst.get(), args.src_data_size, trans_mode) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to cast data from %s to %s, data size %zu", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(args.dst_data_type).c_str(), args.src_data_size); + return INTERNAL_ERROR; + } + result.data = dst; + result.length = total_size; + return SUCCESS; +} + +std::shared_ptr BuildDataTypeTransfer(const CastArgs &args) { + if (!DataTypeTransferExists(args)) { + return nullptr; + } + return ge::MakeShared(); +} + +bool DataTypeTransferExists(const CastArgs &args) { + std::pair trans_info(args.src_data_type, args.dst_data_type); + auto iter = trans_mode_map.find(trans_info); + return iter != trans_mode_map.end(); +} +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/datatype_transfer.h b/src/ge/common/formats/format_transfers/datatype_transfer.h new file mode 100644 index 00000000..fe13a9b8 --- /dev/null +++ b/src/ge/common/formats/format_transfers/datatype_transfer.h @@ -0,0 +1,49 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_DATATYPE_TRANSFER_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_DATATYPE_TRANSFER_H_ + +#include +#include +#include + +#include "common/formats/format_transfers/format_transfer.h" +#include "external/graph/types.h" +#include "framework/common/ge_inner_error_codes.h" + +namespace ge { +namespace formats { + +struct CastArgs { + const uint8_t *data; + size_t src_data_size; + DataType src_data_type; + DataType dst_data_type; +}; + +class DataTypeTransfer { + public: + Status TransDataType(const CastArgs &args, TransResult &result); +}; + +std::shared_ptr BuildDataTypeTransfer(const CastArgs &args); + +bool DataTypeTransferExists(const CastArgs &args); +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_DATATYPE_TRANSFER_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer.cc b/src/ge/common/formats/format_transfers/format_transfer.cc new file mode 100644 index 00000000..76ba8192 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer.cc @@ -0,0 +1,69 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer.h" + +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +struct FormatTransferRegistry { + Status RegisterBuilder(Format src, Format dst, FormatTransferBuilder builder) { + src_dst_builder[src][dst] = std::move(builder); + return SUCCESS; + } + std::map> src_dst_builder; +}; + +FormatTransferRegistry &GetFormatTransferRegistry() { + static FormatTransferRegistry registry; + return registry; +} +} // namespace + +std::shared_ptr BuildFormatTransfer(const TransArgs &args) { + auto registry = GetFormatTransferRegistry(); + auto dst_builder = registry.src_dst_builder.find(args.src_format); + if (dst_builder == registry.src_dst_builder.end()) { + return nullptr; + } + auto builder_iter = dst_builder->second.find(args.dst_format); + if (builder_iter == dst_builder->second.end()) { + return nullptr; + } + return builder_iter->second(); +} + +bool FormatTransferExists(const TransArgs &args) { + auto registry = GetFormatTransferRegistry(); + auto dst_builder = registry.src_dst_builder.find(args.src_format); + if (dst_builder == registry.src_dst_builder.end()) { + return false; + } + return dst_builder->second.count(args.dst_format) > 0; +} + +FormatTransferRegister::FormatTransferRegister(FormatTransferBuilder builder, Format src, Format dst) { + (void)GetFormatTransferRegistry().RegisterBuilder(src, dst, std::move(builder)); + // RegisterBuilder() always return success, no need to check value +} +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer.h b/src/ge/common/formats/format_transfers/format_transfer.h new file mode 100644 index 00000000..3d03ebbe --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer.h @@ -0,0 +1,83 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_H_ + +#include +#include +#include + +#include "external/graph/types.h" +#include "framework/common/ge_inner_error_codes.h" +#include "common/ge/ge_util.h" + +namespace ge { +namespace formats { +struct TransArgs { + const uint8_t *data; + Format src_format; + Format dst_format; + // For scenes that need to supplement the shape, for example, 5D to 4D + // It is not possible to convert the format normally if you only get the src_shape, + // and must get the shape before you mend the shape. + // So the parameters here need to be passed in both src_shape and dst_shape + std::vector src_shape; + std::vector dst_shape; + DataType src_data_type; +}; + +struct TransResult { + std::shared_ptr data; + // data length in bytes + size_t length; +}; + +class FormatTransfer { + public: + virtual ~FormatTransfer() = default; + virtual Status TransFormat(const TransArgs &args, TransResult &result) = 0; + virtual Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, + Format dst_format, std::vector &dst_shape) = 0; +}; + +using FormatTransferBuilder = std::function()>; + +class FormatTransferRegister { + public: + FormatTransferRegister(FormatTransferBuilder builder, Format src, Format dst); + ~FormatTransferRegister() = default; +}; + +#define REGISTER_FORMAT_TRANSFER(TransferClass, format1, format2) \ + namespace { \ + FormatTransferRegister format_transfer_register_##TransferClass##format1##format2( \ + []() { return ge::MakeShared(); }, format1, format2); \ + } + +/** + * Build a formattransfer according to 'args' + * @param args + * @param result + * @return + */ +std::shared_ptr BuildFormatTransfer(const TransArgs &args); + +bool FormatTransferExists(const TransArgs &args); + +} // namespace formats +} // namespace ge +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_c1hwncoc0_hwcn.cc b/src/ge/common/formats/format_transfers/format_transfer_c1hwncoc0_hwcn.cc new file mode 100644 index 00000000..79194962 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_c1hwncoc0_hwcn.cc @@ -0,0 +1,157 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_c1hwncoc0_hwcn.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +bool CheckDataTypeSupported(const DataType &data_type) { return (data_type == DT_FLOAT || data_type == DT_FLOAT16); } + +Status CheckArgsForC1hwncoc0ToHwcn(const TransArgs &args) { + auto src_shape = args.src_shape; + auto dst_shape = args.dst_shape; + if (args.src_format != FORMAT_C1HWNCoC0 || args.dst_format != FORMAT_HWCN) { + GELOGE(UNSUPPORTED, "Does not support trans format from %s to %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + if (!CheckDataTypeSupported(args.src_data_type)) { + GELOGE(UNSUPPORTED, "Failed to trans shape from NC1HWNCoC0 to HWCN, invalid data type %s", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return UNSUPPORTED; + } + if (!CheckShapeValid(src_shape, kC1hwncoc0DimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(src_shape).c_str()); + return PARAM_INVALID; + } + if (!CheckShapeValid(dst_shape, kHwcnDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + if (src_shape.at(kC1hwncoc0C1) != (dst_shape.at(kHwcnC) - 1) / kCubeSize + 1 || + src_shape.at(kC1hwncoc0H) != dst_shape.at(kHwcnH) || src_shape.at(kC1hwncoc0W) != dst_shape.at(kHwcnW) || + src_shape.at(kC1hwncoc0N) != dst_shape.at(kHwcnN) || src_shape.at(kC1hwncoc0Co) != kCubeSize || + src_shape.at(kC1hwncoc0C0) != kCubeSize) { + GELOGE(PARAM_INVALID, "Failed to check relationship between src and dst shape, src shape %s, dst shape %s", + ShapeToString(src_shape).c_str(), ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +Status GetDstDataAfterTrans(const TransArgs &args, TransResult &result, int size, int64_t total_size) { + std::shared_ptr dst(new (std::nothrow) uint8_t[total_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld, shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), total_size, ShapeToString(args.dst_shape).c_str()); + return OUT_OF_MEMORY; + } + + auto h = args.src_shape.at(kC1hwncoc0H); + auto w = args.src_shape.at(kC1hwncoc0W); + auto n = args.src_shape.at(kC1hwncoc0N); + auto c0 = args.src_shape.at(kC1hwncoc0C0); + auto co = args.src_shape.at(kC1hwncoc0Co); + auto c = args.dst_shape.at(kHwcnC); + int64_t cn = c * n; + int64_t wcn = w * cn; + int64_t coc0 = co * c0; + int64_t ncoc0 = n * coc0; + int64_t wncoc0 = w * ncoc0; + int64_t hwncoc0 = h * wncoc0; + + for (int64_t h_idx = 0; h_idx < h; h_idx++) { + int64_t h_head_addr = h_idx * wcn; + for (int64_t w_idx = 0; w_idx < w; w_idx++) { + int64_t w_head_addr = h_head_addr + w_idx * cn; + for (int64_t c_idx = 0; c_idx < c; c_idx++) { + int64_t c_head_addr = w_head_addr + c_idx * n; + for (int64_t n_idx = 0; n_idx < n; n_idx++) { + int64_t dst_idx = c_head_addr + n_idx; + int64_t c1_idx = c_idx / kCubeSize; + int64_t c0_idx = c_idx % kCubeSize; + int64_t co_idx = c0_idx; + int64_t src_idx = c1_idx * hwncoc0 + h_idx * wncoc0 + w_idx * ncoc0 + n_idx * coc0 + co_idx * c0 + c0_idx; + auto src_offset = src_idx * size; + auto dst_offset = dst_idx * size; + // The memcpy_s/memset_s argument `dstMax` must be less than 2G + auto protected_size = total_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? total_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to copy data from C1HWNCoC0[%ld, %ld, %ld, %ld, %ld, %ld] offset %ld to " + "HWCN[%ld, %ld, %ld, %ld] offset %ld, err-code %d", + c1_idx, h_idx, w_idx, n_idx, co_idx, c0_idx, src_offset, h_idx, w_idx, c_idx, n_idx, dst_offset, + ret); + return INTERNAL_ERROR; + } + } + } + } + } + result.data = dst; + result.length = static_cast(total_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferC1hwncoc0Hwcn::TransFormat(const TransArgs &args, TransResult &result) { + if (CheckArgsForC1hwncoc0ToHwcn(args) != SUCCESS) { + return PARAM_INVALID; + } + int size = GetSizeByDataType(args.src_data_type); + int64_t total_size = GetItemNumByShape(args.dst_shape) * size; + if (total_size <= 0) { + GELOGE(INTERNAL_ERROR, "Get %ld total size from dst shape %s, src shape %s", total_size, + ShapeToString(args.dst_shape).c_str(), ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from C1HWNCoC0 to HWCN, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + if (GetDstDataAfterTrans(args, result, size, total_size) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to get data after trans, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + return INTERNAL_ERROR; + } + return SUCCESS; +} + +Status FormatTransferC1hwncoc0Hwcn::TransShape(Format src_format, const std::vector &src_shape, + DataType data_type, Format dst_format, std::vector &dst_shape) { + GELOGD("The shape derivation from C1HWNCoC0 to HWCN is not unique. Trans shape in this direction is not supported"); + return UNSUPPORTED; +} + +REGISTER_FORMAT_TRANSFER(FormatTransferC1hwncoc0Hwcn, FORMAT_C1HWNCoC0, FORMAT_HWCN) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_c1hwncoc0_hwcn.h b/src/ge/common/formats/format_transfers/format_transfer_c1hwncoc0_hwcn.h new file mode 100644 index 00000000..fccc4524 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_c1hwncoc0_hwcn.h @@ -0,0 +1,34 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_C1HWNCOC0_HWCN_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_C1HWNCOC0_HWCN_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class FormatTransferC1hwncoc0Hwcn : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_C1HWNCOC0_HWCN_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_fractal_nz.cc b/src/ge/common/formats/format_transfers/format_transfer_fractal_nz.cc new file mode 100644 index 00000000..d92cd134 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fractal_nz.cc @@ -0,0 +1,329 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_fractal_nz.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +const int kDimSize4D = 4; +bool IsDataTypeSupport(DataType data_type) { return GetSizeByDataType(data_type) > 0; } + +using ShapeVector = std::vector; +bool CheckShape(Format format, const ShapeVector &shape) { + switch (format) { + case FORMAT_ND: + return IsShapeValid(shape); + case FORMAT_NCHW: + case FORMAT_NHWC: + return CheckShapeValid(shape, kDimSize4D); + default: + GELOGE(PARAM_INVALID, "Trans format between %s and FORMAT_FRACTAL_NZ is not supported.", + TypeUtils::FormatToSerialString(format).c_str()); + return false; + } +} + +/// +/// After the conversion to two-dimensional matrix, the memory arrangement is small z and large N. +/// @src_shape: N*H*W +/// @dst_shape: N*W1*H1*H0*w0 +/// @return +/// +Status TransShapeToFracNz(const ShapeVector &src_shape, DataType data_type, ShapeVector &dst_shape, + ShapeVector &hw_shape) { + dst_shape.clear(); + hw_shape.clear(); + auto w0 = GetCubeSizeByDataType(data_type); + auto h0 = GetCubeSizeByDataType(data_type); + switch (src_shape.size()) { + case 1: + dst_shape.push_back(Ceil(src_shape[0], w0)); + dst_shape.push_back(1); + dst_shape.push_back(h0); + dst_shape.push_back(w0); + hw_shape.push_back(1); + hw_shape.push_back(1); + hw_shape.push_back(src_shape[0]); + if (!IsShapeValid(dst_shape)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + return SUCCESS; + default: + auto size = src_shape.size(); + int64_t times = 1; + for (size_t i = 0; i != size - 2; i++) { + dst_shape.push_back(src_shape[i]); + times *= src_shape[i]; + } + dst_shape.push_back(Ceil(src_shape[size - 1], w0)); + dst_shape.push_back(Ceil(src_shape[size - 2], h0)); + dst_shape.push_back(h0); + dst_shape.push_back(w0); + hw_shape.push_back(times); + hw_shape.push_back(src_shape[size - 2]); + hw_shape.push_back(src_shape[size - 1]); + if (!IsShapeValid(dst_shape)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + return SUCCESS; + } +} + +Status CheckShapeRelation(const TransArgs &args, ShapeVector &hw_shape) { + ShapeVector expect_src_shape; + auto ret = TransShapeToFracNz(args.dst_shape, args.src_data_type, expect_src_shape, hw_shape); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Trans shape from %s to %s, shape %s to %s, data type %s failed", + TypeUtils::FormatToSerialString(args.dst_format).c_str(), + TypeUtils::FormatToSerialString(args.src_format).c_str(), ShapeToString(args.dst_shape).c_str(), + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return INTERNAL_ERROR; + } + if (args.src_shape != expect_src_shape) { + GELOGE(PARAM_INVALID, "Failed to trans format from %s to %s, invalid relationship between src shape %s and dst %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str()); + return PARAM_INVALID; + } + return SUCCESS; +} + +Status TransFormatFromNdToFracNz(const TransArgs &args, TransResult &result, const ShapeVector &hw_shape) { + int size = GetSizeByDataType(args.src_data_type); + int64_t dst_size = GetItemNumByShape(args.dst_shape) * size; + std::shared_ptr dst(new (std::nothrow) uint8_t[dst_size](), std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), dst_size); + return OUT_OF_MEMORY; + } + + // src&dst_shape can be written as times*H*W & times*W1*H1*H0*W0, respectively. dst_shape_size >= kDimNum4D + auto times = hw_shape.at(0); + auto h = hw_shape.at(1); + auto w = hw_shape.at(2); + auto hw = h * w; + + auto shape_size = args.dst_shape.size(); + auto w1 = args.dst_shape[shape_size - 4]; + auto h1 = args.dst_shape[shape_size - 3]; + auto h0 = args.dst_shape[shape_size - 2]; + auto w0 = args.dst_shape[shape_size - 1]; + auto h1h0 = h1 * h0; + auto h1h0w0 = h1h0 * w0; + auto w1h1h0w0 = w1 * h1h0w0; + auto num_w1 = w / w0; + + for (int64_t times_idx = 0; times_idx < times; times_idx++) { + auto times_head = times_idx * w1h1h0w0; + auto src_times_head = times_idx * hw; + for (int64_t h1h0_idx = 0; h1h0_idx < h; h1h0_idx++) { + auto h1h0_head = times_head + h1h0_idx * w0; + auto src_h_head = src_times_head + h1h0_idx * w; + for (int64_t w1_idx = 0; w1_idx < num_w1; w1_idx++) { + auto dst_offset = (h1h0_head + w1_idx * h1h0w0) * size; + auto src_offset = (src_h_head + w1_idx * w0) * size; + auto protected_size = dst_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size * w0)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d", dst_offset, ret); + return INTERNAL_ERROR; + } + } + auto w1_head = num_w1 * w0; + for (int64_t w0_idx = 0; w1_head + w0_idx < w; w0_idx++) { + auto src_w_idx = w1_head + w0_idx; + auto dst_offset = (h1h0_head + num_w1 * h1h0w0 + w0_idx) * size; + auto src_offset = (src_h_head + src_w_idx) * size; + auto protected_size = dst_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d", dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + result.data = dst; + result.length = static_cast(dst_size); + return SUCCESS; +} + +Status TransFormatFromFracNzToNd(const TransArgs &args, TransResult &result, const ShapeVector &dst_hw_shape) { + int size = GetSizeByDataType(args.src_data_type); + int64_t dst_size = GetItemNumByShape(args.dst_shape) * size; + std::shared_ptr dst(new (std::nothrow) uint8_t[dst_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), dst_size); + return OUT_OF_MEMORY; + } + + auto times = dst_hw_shape.at(0); + auto h = dst_hw_shape.at(1); + auto w = dst_hw_shape.at(2); + auto hw = h * w; + + auto shape_size = args.src_shape.size(); + auto w1 = args.src_shape[shape_size - 4]; + auto h1 = args.src_shape[shape_size - 3]; + auto h0 = args.src_shape[shape_size - 2]; + auto w0 = args.src_shape[shape_size - 1]; + auto h1h0 = h1 * h0; + auto h1h0w0 = h1h0 * w0; + auto w1h1h0w0 = w1 * h1h0w0; + auto num_w1 = w / w0; + errno_t ret; + + for (int64_t times_idx = 0; times_idx < times; times_idx++) { + auto times_head = times_idx * w1h1h0w0; + auto dst_times_head = times_idx * hw; + for (int64_t h1h0_idx = 0; h1h0_idx < h; h1h0_idx++) { + auto h1h0_head = times_head + h1h0_idx * w0; + auto dst_h_head = dst_times_head + h1h0_idx * w; + for (int64_t w1_idx = 0; w1_idx < num_w1; w1_idx++) { + auto src_offset = (h1h0_head + w1_idx * h1h0w0) * size; + auto dst_offset = (dst_h_head + w1_idx * w0) * size; + auto protected_size = dst_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size * w0)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d", dst_offset, ret); + return INTERNAL_ERROR; + } + } + auto w1_head = num_w1 * w0; + for (int64_t w0_idx = 0; w1_head + w0_idx < w; w0_idx++) { + auto dst_w_idx = w1_head + w0_idx; + auto src_offset = (h1h0_head + num_w1 * h1h0w0 + w0_idx) * size; + auto dst_offset = (dst_h_head + dst_w_idx) * size; + auto protected_size = dst_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d", dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + result.data = dst; + result.length = static_cast(dst_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferFractalNz::TransFormat(const TransArgs &args, TransResult &result) { + if (!IsDataTypeSupport(args.src_data_type) || !CheckShape(args.src_format, args.src_shape) || + !IsShapeValid(args.dst_shape)) { + GELOGE(PARAM_INVALID, "Trans format from %s to %s, src shape %s, dst shape %s, data type %s is not supported", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from %s to %s, src shape %s, dst shape %s, data type %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + ShapeVector expect_shape; + ShapeVector hw_shape; + auto ret = TransShapeToFracNz(args.src_shape, args.src_data_type, expect_shape, hw_shape); + if (ret != SUCCESS) { + return ret; + } + if (args.dst_shape != expect_shape) { + GELOGE(PARAM_INVALID, "Failed to trans format from %s to %s, the dst shape %s is invalid, expect %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.dst_shape).c_str(), + ShapeToString(expect_shape).c_str()); + return PARAM_INVALID; + } + return TransFormatFromNdToFracNz(args, result, hw_shape); +} + +Status FormatTransferFractalNz::TransShape(Format src_format, const ShapeVector &src_shape, DataType data_type, + Format dst_format, ShapeVector &dst_shape) { + if (!IsDataTypeSupport(data_type) || !CheckShape(src_format, src_shape)) { + GELOGE(PARAM_INVALID, "Trans format from %s to %s, src shape %s, data type %s is not supported", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(dst_format).c_str(), + ShapeToString(src_shape).c_str(), TypeUtils::DataTypeToSerialString(data_type).c_str()); + return PARAM_INVALID; + } + ShapeVector hw_shape; + return TransShapeToFracNz(src_shape, data_type, dst_shape, hw_shape); +} + +Status FormatTransferFractalNzND::TransFormat(const TransArgs &args, TransResult &result) { + if (!IsDataTypeSupport(args.src_data_type) || !IsShapeValid(args.src_shape) || + !CheckShape(args.dst_format, args.dst_shape)) { + GELOGE(PARAM_INVALID, "Trans format from %s to %s, src shape %s, dst shape %s, data type %s is not supported", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from %s to %s, src shape %s, dst shape %s, data type %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + + ShapeVector hw_shape; + if (CheckShapeRelation(args, hw_shape) != SUCCESS) { + return PARAM_INVALID; + } + return TransFormatFromFracNzToNd(args, result, hw_shape); +} + +Status FormatTransferFractalNzND::TransShape(Format src_format, const ShapeVector &src_shape, DataType data_type, + Format dst_format, ShapeVector &dst_shape) { + GELOGD("The shape derivation from %s to %s is not unique. Trans shape is not supported", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(dst_format).c_str()); + return UNSUPPORTED; +} + +REGISTER_FORMAT_TRANSFER(FormatTransferFractalNz, FORMAT_ND, FORMAT_FRACTAL_NZ) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalNz, FORMAT_NCHW, FORMAT_FRACTAL_NZ) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalNz, FORMAT_NHWC, FORMAT_FRACTAL_NZ) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalNzND, FORMAT_FRACTAL_NZ, FORMAT_ND) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalNzND, FORMAT_FRACTAL_NZ, FORMAT_NCHW) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalNzND, FORMAT_FRACTAL_NZ, FORMAT_NHWC) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_fractal_nz.h b/src/ge/common/formats/format_transfers/format_transfer_fractal_nz.h new file mode 100644 index 00000000..c593aa7c --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fractal_nz.h @@ -0,0 +1,44 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACTAL_NZ_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACTAL_NZ_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +// transfer from nd to nz +class FormatTransferFractalNz : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; + +// transfer nz to nd +class FormatTransferFractalNzND : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACTAL_NZ_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_fractal_z.cc b/src/ge/common/formats/format_transfers/format_transfer_fractal_z.cc new file mode 100644 index 00000000..eafb1fa1 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fractal_z.cc @@ -0,0 +1,363 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_fractal_z.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +Status CheckDataTypeSupport(DataType data_type) { return GetSizeByDataType(data_type) > 0 ? SUCCESS : UNSUPPORTED; } + +/// +/// FZ represents the weight of convolution,. +/// After the conversion to two-dimensional matrix, the memory arrangement is small n and large Z. +/// If 4D(eg.NCHW) is used to represent convolution kernel, N is width, HWC is height. +/// +/// frac_z axises: (C1*H*W, No, Ni, C0), which Ni = 16, C0 = 16/32, No = Ceil(N/Ni), C1 = Ceil(C/C0) +/// +Status TransShapeToFz(int64_t n, int64_t c, int64_t h, int64_t w, DataType data_type, std::vector &dst_shape) { + auto c0 = GetCubeSizeByDataType(data_type); + if (c0 < 0) { + return UNSUPPORTED; + } + + auto c1 = Ceil(c, c0); + auto no = Ceil(n, static_cast(kNiSize)); + + dst_shape.clear(); + dst_shape.push_back(h * w * c1); + dst_shape.push_back(no); + dst_shape.push_back(kNiSize); + dst_shape.push_back(c0); + if (!IsShapeValid(dst_shape)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + return SUCCESS; +} + +Status TransShapeNchwToFz(const std::vector &src_shape, DataType data_type, std::vector &dst_shape) { + if (!CheckShapeValid(src_shape, kNchwDimsNum)) { + return PARAM_INVALID; + } + + auto n = src_shape.at(kNchwN); + auto c = src_shape.at(kNchwC); + auto h = src_shape.at(kNchwH); + auto w = src_shape.at(kNchwW); + return TransShapeToFz(n, c, h, w, data_type, dst_shape); +} + +Status TransShapeHwcnToFz(const std::vector &src_shape, DataType data_type, std::vector &dst_shape) { + if (!CheckShapeValid(src_shape, kHwcnDimsNum)) { + return PARAM_INVALID; + } + + auto h = src_shape.at(kHwcnH); + auto w = src_shape.at(kHwcnW); + auto c = src_shape.at(kHwcnC); + auto n = src_shape.at(kHwcnN); + + return TransShapeToFz(n, c, h, w, data_type, dst_shape); +} + +Status TransShapeNhwcToFz(const std::vector &src_shape, DataType data_type, std::vector &dst_shape) { + if (!CheckShapeValid(src_shape, kNhwcDimsNum)) { + return PARAM_INVALID; + } + + auto n = src_shape.at(kNhwcN); + auto h = src_shape.at(kNhwcH); + auto w = src_shape.at(kNhwcW); + auto c = src_shape.at(kNhwcC); + + return TransShapeToFz(n, c, h, w, data_type, dst_shape); +} + +Status TransFormatFromNchwToFz(const TransArgs &args, TransResult &result) { + int64_t n = args.src_shape.at(kNchwN); + int64_t c = args.src_shape.at(kNchwC); + int64_t h = args.src_shape.at(kNchwH); + int64_t w = args.src_shape.at(kNchwW); + + int64_t c0 = GetCubeSizeByDataType(args.src_data_type); + int64_t c1 = Ceil(c, c0); + + int64_t hw = h * w; + int64_t chw = c * hw; + int64_t hwc0 = hw * c0; + int64_t nchw = n * chw; + + // horizontal fractal matrix count (N) + int64_t hf_cnt = Ceil(n, static_cast(kNiSize)); + // vertical fractal matrix count (C1HWC0) + int64_t vf_cnt = c1 * hw; + // elements count in one fractal + int64_t fractal_ele_cnt = c0 * kNiSize; + int64_t total_ele_cnt = hf_cnt * vf_cnt * fractal_ele_cnt; + int size = GetSizeByDataType(args.src_data_type); + int64_t dst_size = total_ele_cnt * size; + std::shared_ptr dst(new (std::nothrow) uint8_t[dst_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), dst_size); + return OUT_OF_MEMORY; + } + + for (int64_t vfi = 0; vfi < vf_cnt; vfi++) { + // vertical fractal matrix base index + auto vf_base_i = vfi * hf_cnt; + for (int64_t hfi = 0; hfi < hf_cnt; hfi++) { + // global fractal matrix index + auto gfi = vf_base_i + hfi; + auto src_n_offset = hfi * chw * kNiSize; + auto src_f_offset = src_n_offset + vfi % hw + vfi / hw * hwc0; + for (int64_t row = 0; row < c0; row++) { + auto src_ci = vfi / hw * c0 + row; + auto src_row_offset = src_f_offset + row * hw; + for (int col = 0; col < kNiSize; col++) { + auto src_ni = hfi * kNiSize + col; + auto src_offset = src_row_offset + chw * col; + // pad 0 + // 1. src_ni grater than n + // 2. src_ci grater than c + // 3. source address grater than original array size + auto need_pad_zero = src_ni >= n || src_offset >= nchw || src_ci >= c; + auto idx = gfi * fractal_ele_cnt + col * c0 + row; + auto offset = idx * size; + auto protected_size = dst_size - offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - offset + : static_cast(SECUREC_MEM_MAX_LEN); + errno_t ret; + if (need_pad_zero) { + ret = memset_s(dst.get() + offset, static_cast(protected_size), 0, static_cast(size)); + } else { + ret = memcpy_s(dst.get() + offset, static_cast(protected_size), args.data + src_offset * size, + static_cast(size)); + } + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d pad mode %d", offset, + ret, need_pad_zero); + return INTERNAL_ERROR; + } + } + } + } + } + + result.data = dst; + result.length = static_cast(dst_size); + return SUCCESS; +} + +Status TransFormatHwcnToFz(const TransArgs &args, TransResult &result) { + int64_t h = args.src_shape[kHwcnH]; + int64_t w = args.src_shape[kHwcnW]; + int64_t c = args.src_shape[kHwcnC]; + int64_t n = args.src_shape[kHwcnN]; + int64_t n1n0 = Ceil(n, static_cast(kNiSize)) * kNiSize; + int64_t c0 = GetCubeSizeByDataType(args.src_data_type); + int64_t c1 = Ceil(c, c0); + + auto cn = c * n; + auto wcn = w * cn; + auto n1n0c0 = n1n0 * c0; + auto wn1n0c0 = w * n1n0c0; + auto hwn1n0c0 = h * wn1n0c0; + + int64_t data_size = GetSizeByDataType(args.src_data_type); + int64_t dst_size = 1; + for (auto dim : args.dst_shape) { + dst_size *= dim; + } + dst_size *= data_size; + std::shared_ptr dst(new (std::nothrow) uint8_t[dst_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), dst_size); + return OUT_OF_MEMORY; + } + + for (int64_t c1i = 0; c1i < c1; c1i++) { + for (int64_t hi = 0; hi < h; hi++) { + for (int64_t wi = 0; wi < w; wi++) { + for (int64_t n1n0i = 0; n1n0i < n1n0; n1n0i++) { + for (int64_t c0i = 0; c0i < c0; c0i++) { + int64_t dst_idx = c1i * hwn1n0c0 + hi * wn1n0c0 + wi * n1n0c0 + n1n0i * c0 + c0i; + int64_t dst_offset = dst_idx * data_size; + auto protected_size = dst_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto pad_zero = ((c1i * c0 + c0i) >= c) || (n1n0i >= n); + errno_t ret; + if (pad_zero) { + ret = memset_s(dst.get() + dst_offset, static_cast(protected_size), 0, + static_cast(data_size)); + } else { + int64_t src_idx = hi * wcn + wi * cn + (c1i * c0 + c0i) * n + n1n0i; + ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), + args.data + src_idx * data_size, static_cast(data_size)); + } + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d, pad mode %d", + dst_offset, ret, pad_zero); + return INTERNAL_ERROR; + } + } + } + } + } + } + + result.data = dst; + result.length = static_cast(dst_size); + return SUCCESS; +} + +Status TransFormatNhwcToFz(const TransArgs &args, TransResult &result) { + int64_t n = args.src_shape[kNhwcN]; + int64_t h = args.src_shape[kNhwcH]; + int64_t w = args.src_shape[kNhwcW]; + int64_t c = args.src_shape[kNhwcC]; + auto wc = w * c; + auto hwc = h * w * c; + + int64_t n1n0 = Ceil(n, static_cast(kNiSize)) * kNiSize; + int64_t c0 = GetCubeSizeByDataType(args.src_data_type); + int64_t c1 = Ceil(c, c0); + auto n1n0c0 = n1n0 * c0; + auto wn1n0c0 = w * n1n0c0; + auto hwn1n0c0 = h * wn1n0c0; + + int64_t data_size = GetSizeByDataType(args.src_data_type); + int64_t dst_size = 1; + for (auto dim : args.dst_shape) { + dst_size *= dim; + } + dst_size *= data_size; + std::shared_ptr dst(new (std::nothrow) uint8_t[dst_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), dst_size); + return OUT_OF_MEMORY; + } + + for (int64_t c1i = 0; c1i < c1; c1i++) { + for (int64_t hi = 0; hi < h; hi++) { + for (int64_t wi = 0; wi < w; wi++) { + for (int64_t n1n0i = 0; n1n0i < n1n0; n1n0i++) { + for (int64_t c0i = 0; c0i < c0; c0i++) { + int64_t dst_idx = c1i * hwn1n0c0 + hi * wn1n0c0 + wi * n1n0c0 + n1n0i * c0 + c0i; + int64_t dst_offset = dst_idx * data_size; + auto protected_size = dst_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto pad_zero = ((c1i * c0 + c0i) >= c) || (n1n0i >= n); + errno_t ret; + if (pad_zero) { + ret = memset_s(dst.get() + dst_offset, static_cast(protected_size), 0, + static_cast(data_size)); + } else { + int64_t src_idx = n1n0i * hwc + hi * wc + wi * c + (c1i * c0 + c0i); + ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), + args.data + src_idx * data_size, static_cast(data_size)); + } + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d, pad mode %d", + dst_offset, ret, pad_zero); + return INTERNAL_ERROR; + } + } + } + } + } + } + + result.data = dst; + result.length = static_cast(dst_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferFractalZ::TransFormat(const TransArgs &args, TransResult &result) { + GELOGD("Begin to trans format from %s to %s, src shape %s, data type %s, dst shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), ShapeToString(args.dst_shape).c_str()); + std::vector expect_shape; + auto ret = TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, expect_shape); + if (ret != SUCCESS) { + return ret; + } + if (!args.dst_shape.empty() && args.dst_shape != expect_shape) { + GELOGE(PARAM_INVALID, "Failed to trans format from %s to %s, the dst shape %s is invalid, expect %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.dst_shape).c_str(), + ShapeToString(expect_shape).c_str()); + return PARAM_INVALID; + } + + if (args.src_format == FORMAT_NCHW && args.dst_format == FORMAT_FRACTAL_Z) { + return TransFormatFromNchwToFz(args, result); + } + + if (args.src_format == FORMAT_HWCN && args.dst_format == FORMAT_FRACTAL_Z) { + return TransFormatHwcnToFz(args, result); + } + + if (args.src_format == FORMAT_NHWC && args.dst_format == FORMAT_FRACTAL_Z) { + return TransFormatNhwcToFz(args, result); + } + + return UNSUPPORTED; +} + +Status FormatTransferFractalZ::TransShape(Format src_format, const std::vector &src_shape, DataType data_type, + Format dst_format, std::vector &dst_shape) { + if (CheckDataTypeSupport(data_type) != SUCCESS) { + return UNSUPPORTED; + } + + if (src_format == FORMAT_NCHW && dst_format == FORMAT_FRACTAL_Z) { + return TransShapeNchwToFz(src_shape, data_type, dst_shape); + } + if (src_format == FORMAT_HWCN && dst_format == FORMAT_FRACTAL_Z) { + return TransShapeHwcnToFz(src_shape, data_type, dst_shape); + } + if (src_format == FORMAT_NHWC && dst_format == FORMAT_FRACTAL_Z) { + return TransShapeNhwcToFz(src_shape, data_type, dst_shape); + } + + return UNSUPPORTED; +} + +REGISTER_FORMAT_TRANSFER(FormatTransferFractalZ, FORMAT_NCHW, FORMAT_FRACTAL_Z) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalZ, FORMAT_HWCN, FORMAT_FRACTAL_Z) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalZ, FORMAT_NHWC, FORMAT_FRACTAL_Z) + +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_fractal_z.h b/src/ge/common/formats/format_transfers/format_transfer_fractal_z.h new file mode 100644 index 00000000..9653f3e7 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fractal_z.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACTAL_Z_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACTAL_Z_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class FormatTransferFractalZ : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACTAL_Z_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_fractal_zz.cc b/src/ge/common/formats/format_transfers/format_transfer_fractal_zz.cc new file mode 100644 index 00000000..59baccff --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fractal_zz.cc @@ -0,0 +1,339 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_fractal_zz.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +const int kDimSize4D = 4; +bool IsDataTypeSupport(DataType d_type) { return GetSizeByDataType(d_type) > 0; } + +using ShapeVector = std::vector; +bool CheckShape(Format format, const ShapeVector &shape) { + switch (format) { + case FORMAT_ND: + return IsShapeValid(shape); + case FORMAT_NCHW: + case FORMAT_NHWC: + return CheckShapeValid(shape, kDimSize4D); + default: + GELOGE(PARAM_INVALID, "Not support trans format between %s and FORMAT_FRACTAL_ZZ.", + TypeUtils::FormatToSerialString(format).c_str()); + return false; + } +} + +/** + * After the conversion to two-dimensional matrix, the memory arrangement is small z and large Z. + * @src_shape: N*H*W + * @dst_shape: N*H1*W1*H0*w0 + * @return + */ +Status TransShapeToFracZz(const ShapeVector &src_shape, DataType data_type, ShapeVector &dst_shape, + ShapeVector &hw_shape) { + dst_shape.clear(); + hw_shape.clear(); + auto w0 = GetCubeSizeByDataType(data_type); + auto h0 = GetCubeSizeByDataType(data_type); + switch (src_shape.size()) { + case 1: + dst_shape.push_back(1); + dst_shape.push_back(Ceil(src_shape[0], w0)); + dst_shape.push_back(h0); + dst_shape.push_back(w0); + hw_shape.push_back(1); + hw_shape.push_back(1); + hw_shape.push_back(src_shape[0]); + if (!IsShapeValid(dst_shape)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + return SUCCESS; + default: + auto size = src_shape.size(); + int64_t times = 1; + for (size_t i = 0; i != size - 2; i++) { + dst_shape.push_back(src_shape[i]); + times *= src_shape[i]; + } + dst_shape.push_back(Ceil(src_shape[size - 2], h0)); + dst_shape.push_back(Ceil(src_shape[size - 1], w0)); + dst_shape.push_back(h0); + dst_shape.push_back(w0); + hw_shape.push_back(times); + hw_shape.push_back(src_shape[size - 2]); + hw_shape.push_back(src_shape[size - 1]); + if (!IsShapeValid(dst_shape)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + return SUCCESS; + } +} + +Status CheckShapeRelation(const TransArgs &args, ShapeVector &hw_shape) { + ShapeVector expect_src_shape; + auto ret = TransShapeToFracZz(args.dst_shape, args.src_data_type, expect_src_shape, hw_shape); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Trans shape from %s to %s, shape %s to %s, data type %s failed", + TypeUtils::FormatToSerialString(args.dst_format).c_str(), + TypeUtils::FormatToSerialString(args.src_format).c_str(), ShapeToString(args.dst_shape).c_str(), + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return INTERNAL_ERROR; + } + if (args.src_shape != expect_src_shape) { + GELOGE(PARAM_INVALID, + "Failed to trans format from %s to %s, invalid relationship between src shape %s and dst shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str()); + return PARAM_INVALID; + } + return SUCCESS; +} + +Status TransFormatFromNdToFracZz(const TransArgs &args, TransResult &result, const ShapeVector &hw_shape) { + int size = GetSizeByDataType(args.src_data_type); + int64_t dst_size = GetItemNumByShape(args.dst_shape) * size; + std::shared_ptr dst(new (std::nothrow) uint8_t[dst_size](), std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), dst_size); + return OUT_OF_MEMORY; + } + // The src&dst_shape can be written as times*H*W & times*H1*W1*H0*W0, respectively. dst_shape_size >= kDimNum4D + auto times = hw_shape.at(0); + auto h = hw_shape.at(1); + auto w = hw_shape.at(2); + auto hw = h * w; + + auto shape_size = args.dst_shape.size(); + auto h1 = args.dst_shape[shape_size - 4]; + auto w1 = args.dst_shape[shape_size - 3]; + auto h0 = args.dst_shape[shape_size - 2]; + auto w0 = args.dst_shape[shape_size - 1]; + auto h0w0 = h0 * w0; + auto w1h0w0 = w1 * h0w0; + auto h1w1h0w0 = h1 * w1h0w0; + auto num_w1 = w / w0; + + for (int64_t times_idx = 0; times_idx < times; times_idx++) { + auto times_head = times_idx * h1w1h0w0; + auto src_times_head = times_idx * hw; + for (int64_t h1_idx = 0; h1_idx < h1; h1_idx++) { + auto h1_head = times_head + h1_idx * w1h0w0; + auto src_h1_head = h1_idx * h0; + for (int64_t h0_idx = 0; h0_idx < h0 && h0_idx + src_h1_head < h; h0_idx++) { + auto h0_head = h1_head + h0_idx * w0; + auto src_h_head = src_times_head + (src_h1_head + h0_idx) * w; + for (int64_t w1_idx = 0; w1_idx < num_w1; w1_idx++) { + auto src_offset = (src_h_head + w1_idx * w0) * size; + auto dst_offset = (h0_head + w1_idx * h0w0) * size; + auto protected_size = dst_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size * w0)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d", dst_offset, ret); + return INTERNAL_ERROR; + } + } + auto w1_head = num_w1 * w0; + auto w0_head = h0_head + num_w1 * h0w0; + for (int64_t w0_idx = 0; w0_idx + w1_head < w; w0_idx++) { + auto src_w_idx = w1_head + w0_idx; + auto src_offset = (src_h_head + src_w_idx) * size; + auto dst_offset = (w0_head + w0_idx) * size; + auto protected_size = dst_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d", dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + } + result.data = dst; + result.length = static_cast(dst_size); + return SUCCESS; +} + +Status TransFormatFromFracZzToNd(const TransArgs &args, TransResult &result, const ShapeVector &dst_hw_shape) { + int size = GetSizeByDataType(args.src_data_type); + int64_t dst_size = GetItemNumByShape(args.dst_shape) * size; + std::shared_ptr dst(new (std::nothrow) uint8_t[dst_size](), std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), dst_size); + return OUT_OF_MEMORY; + } + + // The src&dst_shape can be written as times*H*W & times*H1*W1*H0*W0, respectively. dst_shape_size >= kDimNum4D + auto times = dst_hw_shape.at(0); + auto h = dst_hw_shape.at(1); + auto w = dst_hw_shape.at(2); + auto hw = h * w; + + auto shape_size = args.src_shape.size(); + auto h1 = args.src_shape[shape_size - 4]; + auto w1 = args.src_shape[shape_size - 3]; + auto h0 = args.src_shape[shape_size - 2]; + auto w0 = args.src_shape[shape_size - 1]; + auto h0w0 = h0 * w0; + auto w1h0w0 = w1 * h0w0; + auto h1w1h0w0 = h1 * w1h0w0; + auto num_w1 = w / w0; + + for (int64_t times_idx = 0; times_idx < times; times_idx++) { + auto times_head = times_idx * h1w1h0w0; + auto dst_times_head = times_idx * hw; + for (int64_t h1_idx = 0; h1_idx < h1; h1_idx++) { + auto h1_head = times_head + h1_idx * w1h0w0; + auto dst_h1_head = h1_idx * h0; + for (int64_t h0_idx = 0; h0_idx < h0 && h0_idx + dst_h1_head < h; h0_idx++) { + auto h0_head = h1_head + h0_idx * w0; + auto dst_h_head = dst_times_head + (dst_h1_head + h0_idx) * w; + for (int64_t w1_idx = 0; w1_idx < num_w1; w1_idx++) { + auto src_offset = (h0_head + w1_idx * h0w0) * size; + auto dst_offset = (dst_h_head + w1_idx * w0) * size; + auto protected_size = dst_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size * w0)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d", dst_offset, ret); + return INTERNAL_ERROR; + } + } + auto w1_head = num_w1 * w0; + auto w0_head = h0_head + num_w1 * h0w0; + for (int64_t w0_idx = 0; w0_idx + w1_head < w; w0_idx++) { + auto src_offset = (w0_head + w0_idx) * size; + auto dst_w_idx = w1_head + w0_idx; + auto dst_offset = (dst_h_head + dst_w_idx) * size; + auto protected_size = dst_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, "Failed to operate the dst memory at offset %ld, error-code %d", dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + } + result.data = dst; + result.length = static_cast(dst_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferFractalZz::TransFormat(const TransArgs &args, TransResult &result) { + if (!IsDataTypeSupport(args.src_data_type) || !CheckShape(args.src_format, args.src_shape) || + !IsShapeValid(args.dst_shape)) { + GELOGE(PARAM_INVALID, "Not support trans format from %s to %s, src shape %s, dst shape %s, data type %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from %s to %s, src shape %s, dst shape %s, data type %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + ShapeVector expect_shape; + ShapeVector hw_shape; + auto ret = TransShapeToFracZz(args.src_shape, args.src_data_type, expect_shape, hw_shape); + if (ret != SUCCESS) { + return ret; + } + if (args.dst_shape != expect_shape) { + GELOGE(PARAM_INVALID, "Failed to trans format from %s to %s, the dst shape %s is invalid, expect %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.dst_shape).c_str(), + ShapeToString(expect_shape).c_str()); + return PARAM_INVALID; + } + return TransFormatFromNdToFracZz(args, result, hw_shape); +} + +Status FormatTransferFractalZz::TransShape(Format src_format, const ShapeVector &src_shape, DataType data_type, + Format dst_format, ShapeVector &dst_shape) { + if (!IsDataTypeSupport(data_type) || !CheckShape(src_format, src_shape)) { + GELOGE(PARAM_INVALID, "Not support trans format from %s to %s, src shape %s, data type %s", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(dst_format).c_str(), + ShapeToString(src_shape).c_str(), TypeUtils::DataTypeToSerialString(data_type).c_str()); + return PARAM_INVALID; + } + ShapeVector hw_shape; + return TransShapeToFracZz(src_shape, data_type, dst_shape, hw_shape); +} + +Status FormatTransferFractalZzND::TransFormat(const TransArgs &args, TransResult &result) { + if (!IsDataTypeSupport(args.src_data_type) || !IsShapeValid(args.src_shape) || + !CheckShape(args.dst_format, args.dst_shape)) { + GELOGE(PARAM_INVALID, "Not support trans format from %s to %s, src shape %s, dst shape %s, data type %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from %s to %s, src shape %s, dst shape %s, data type %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + + ShapeVector hw_shape; + if (CheckShapeRelation(args, hw_shape) != SUCCESS) { + return PARAM_INVALID; + } + return TransFormatFromFracZzToNd(args, result, hw_shape); +} + +Status FormatTransferFractalZzND::TransShape(Format src_format, const ShapeVector &src_shape, DataType data_type, + Format dst_format, ShapeVector &dst_shape) { + GELOGD("The shape derivation from %s to %s is not unique. Trans shape is not supported", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(dst_format).c_str()); + return UNSUPPORTED; +} + +REGISTER_FORMAT_TRANSFER(FormatTransferFractalZz, FORMAT_ND, FORMAT_FRACTAL_ZZ) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalZz, FORMAT_NCHW, FORMAT_FRACTAL_ZZ) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalZz, FORMAT_NHWC, FORMAT_FRACTAL_ZZ) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalZzND, FORMAT_FRACTAL_ZZ, FORMAT_ND) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalZzND, FORMAT_FRACTAL_ZZ, FORMAT_NCHW) +REGISTER_FORMAT_TRANSFER(FormatTransferFractalZzND, FORMAT_FRACTAL_ZZ, FORMAT_NHWC) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_fractal_zz.h b/src/ge/common/formats/format_transfers/format_transfer_fractal_zz.h new file mode 100644 index 00000000..4250ce93 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fractal_zz.h @@ -0,0 +1,44 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACTAL_ZZ_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACTAL_ZZ_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +// Transfer from nd to zz +class FormatTransferFractalZz : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; + +// Transfer zz to nd +class FormatTransferFractalZzND : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACTAL_ZZ_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_fracz_hwcn.cc b/src/ge/common/formats/format_transfers/format_transfer_fracz_hwcn.cc new file mode 100644 index 00000000..4058d349 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fracz_hwcn.cc @@ -0,0 +1,160 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_fracz_hwcn.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +bool CheckDataTypeSupported(const DataType &data_type) { return GetSizeByDataType(data_type) > 0; } + +Status CheckArgsForFracZToHwcn(const TransArgs &args) { + auto src_shape = args.src_shape; + auto dst_shape = args.dst_shape; + if (args.src_format != FORMAT_FRACTAL_Z || args.dst_format != FORMAT_HWCN) { + GELOGE(UNSUPPORTED, "Does not support trans format from %s to %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + if (!CheckDataTypeSupported(args.src_data_type)) { + GELOGE(UNSUPPORTED, "Failed to trans shape from FORMAT_FRACTAL_Z to HWCN, invalid data type %s", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return UNSUPPORTED; + } + if (!CheckShapeValid(src_shape, kFracZDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(src_shape).c_str()); + return PARAM_INVALID; + } + if (!CheckShapeValid(dst_shape, kHwcnDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + int64_t c0 = GetCubeSizeByDataType(args.src_data_type); + if (c0 < 0) { + return PARAM_INVALID; + } + int64_t c1 = Ceil(dst_shape.at(kHwcnC), c0); + int64_t n0 = Ceil(dst_shape.at(kHwcnN), static_cast(kNiSize)); + if (src_shape.at(kFracZHWC1) != dst_shape.at(kHwcnH) * dst_shape.at(kHwcnW) * c1 || src_shape.at(kFracZC0) != c0 || + src_shape.at(kFracZNi) != kNiSize || src_shape.at(kFracZN0) != n0) { + GELOGE(PARAM_INVALID, "Failed to check relationship between src and dst shape, src shape %s, dst shape %s", + ShapeToString(src_shape).c_str(), ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +Status GetDstDataAfterTrans(const TransArgs &args, TransResult &result, const int size, const int64_t total_size) { + std::shared_ptr dst(new (std::nothrow) uint8_t[total_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld, shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), total_size, ShapeToString(args.dst_shape).c_str()); + return OUT_OF_MEMORY; + } + + auto n0 = args.src_shape.at(kFracZN0); + auto ni = args.src_shape.at(kFracZNi); + auto c0 = args.src_shape.at(kFracZC0); + auto h = args.dst_shape.at(kHwcnH); + auto w = args.dst_shape.at(kHwcnW); + auto c = args.dst_shape.at(kHwcnC); + auto n = args.dst_shape.at(kHwcnN); + int64_t nc = ni * n0; + int64_t ncc0 = nc * c0; + int64_t wncc0 = w * ncc0; + int64_t hwncc0 = h * wncc0; + int64_t cn = c * n; + int64_t wcn = w * cn; + + for (int64_t h_idx = 0; h_idx < h; h_idx++) { + int64_t h_head_addr = h_idx * wcn; + for (int64_t w_idx = 0; w_idx < w; w_idx++) { + int64_t w_head_addr = h_head_addr + w_idx * cn; + for (int64_t c_idx = 0; c_idx < c; c_idx++) { + int64_t c_head_addr = w_head_addr + c_idx * n; + for (int64_t n_idx = 0; n_idx < n; n_idx++) { + int64_t dst_idx = c_head_addr + n_idx; + int64_t c1_idx = c_idx / c0; + int64_t c0_idx = c_idx % c0; + int64_t nc_idx = n_idx; + int64_t src_idx = c1_idx * hwncc0 + h_idx * wncc0 + w_idx * ncc0 + nc_idx * c0 + c0_idx; + auto src_offset = src_idx * size; + auto dst_offset = dst_idx * size; + auto protected_size = total_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? total_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to copy data from FracZ offset %ld to HWCN[%ld, %ld, %ld, %ld] " + "offset %ld, err-code %d", + src_offset, h_idx, w_idx, c_idx, n_idx, dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + } + result.data = dst; + result.length = static_cast(total_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferFracZHwcn::TransFormat(const TransArgs &args, TransResult &result) { + if (CheckArgsForFracZToHwcn(args) != SUCCESS) { + return PARAM_INVALID; + } + int size = GetSizeByDataType(args.src_data_type); + auto total_size = GetItemNumByShape(args.dst_shape) * size; + if (total_size <= 0) { + GELOGE(INTERNAL_ERROR, "Get %ld total size from dst shape %s, src shape %s", total_size, + ShapeToString(args.dst_shape).c_str(), ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from FracZ to HWCN, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + if (GetDstDataAfterTrans(args, result, size, total_size) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to get data after trans, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + return INTERNAL_ERROR; + } + return SUCCESS; +} + +Status FormatTransferFracZHwcn::TransShape(Format src_format, const std::vector &src_shape, DataType data_type, + Format dst_format, std::vector &dst_shape) { + GELOGD("The shape derivation from FracZ to HWCN is not unique. Trans shape in this direction is not supported"); + return UNSUPPORTED; +} + +REGISTER_FORMAT_TRANSFER(FormatTransferFracZHwcn, FORMAT_FRACTAL_Z, FORMAT_HWCN) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_fracz_hwcn.h b/src/ge/common/formats/format_transfers/format_transfer_fracz_hwcn.h new file mode 100644 index 00000000..49d8d336 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fracz_hwcn.h @@ -0,0 +1,34 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACZ_HWCN_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACZ_HWCN_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class FormatTransferFracZHwcn : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACZ_HWCN_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_fracz_nchw.cc b/src/ge/common/formats/format_transfers/format_transfer_fracz_nchw.cc new file mode 100644 index 00000000..83d33cb2 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fracz_nchw.cc @@ -0,0 +1,160 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_fracz_nchw.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +bool CheckDataTypeSupported(const DataType &data_type) { return GetSizeByDataType(data_type) > 0; } + +Status CheckArgsForFracZToNchw(const TransArgs &args) { + auto src_shape = args.src_shape; + auto dst_shape = args.dst_shape; + if (args.src_format != FORMAT_FRACTAL_Z || args.dst_format != FORMAT_NCHW) { + GELOGE(UNSUPPORTED, "Does not support trans format from %s to %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + if (!CheckDataTypeSupported(args.src_data_type)) { + GELOGE(UNSUPPORTED, "Failed to trans shape from FORMAT_FRACTAL_Z to NCHW, invalid data type %s", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return UNSUPPORTED; + } + if (!CheckShapeValid(src_shape, kFracZDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(src_shape).c_str()); + return PARAM_INVALID; + } + if (!CheckShapeValid(dst_shape, kNchwDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + int64_t c0 = GetCubeSizeByDataType(args.src_data_type); + if (c0 < 0) { + return PARAM_INVALID; + } + int64_t c1 = Ceil(dst_shape.at(kNchwC), c0); + int64_t n0 = Ceil(dst_shape.at(kNchwN), static_cast(kNiSize)); + if (src_shape.at(kFracZHWC1) != dst_shape.at(kNchwH) * dst_shape.at(kNchwW) * c1 || src_shape.at(kFracZC0) != c0 || + src_shape.at(kFracZNi) != kNiSize || src_shape.at(kFracZN0) != n0) { + GELOGE(PARAM_INVALID, "Failed to check relationship between src and dst shape, src shape %s, dst shape %s", + ShapeToString(src_shape).c_str(), ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +Status GetDstDataAfterTrans(const TransArgs &args, TransResult &result, const int size, const int64_t total_size) { + std::shared_ptr dst(new (std::nothrow) uint8_t[total_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld, shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), total_size, ShapeToString(args.dst_shape).c_str()); + return OUT_OF_MEMORY; + } + + auto n0 = args.src_shape.at(kFracZN0); + auto ni = args.src_shape.at(kFracZNi); + auto c0 = args.src_shape.at(kFracZC0); + auto h = args.dst_shape.at(kNchwH); + auto w = args.dst_shape.at(kNchwW); + auto c = args.dst_shape.at(kNchwC); + auto n = args.dst_shape.at(kNchwN); + int64_t nc = ni * n0; + int64_t ncc0 = nc * c0; + int64_t wncc0 = w * ncc0; + int64_t hwncc0 = h * wncc0; + int64_t hw = h * w; + int64_t chw = c * hw; + + for (int64_t n_idx = 0; n_idx < n; n_idx++) { + int64_t n_head_addr = n_idx * chw; + for (int64_t c_idx = 0; c_idx < c; c_idx++) { + int64_t c_head_addr = n_head_addr + c_idx * hw; + for (int64_t h_idx = 0; h_idx < h; h_idx++) { + int64_t h_head_addr = c_head_addr + h_idx * w; + for (int64_t w_idx = 0; w_idx < w; w_idx++) { + int64_t dst_idx = h_head_addr + w_idx; + int64_t c1_idx = c_idx / c0; + int64_t c0_idx = c_idx % c0; + int64_t nc_idx = n_idx; + int64_t src_idx = c1_idx * hwncc0 + h_idx * wncc0 + w_idx * ncc0 + nc_idx * c0 + c0_idx; + auto src_offset = src_idx * size; + auto dst_offset = dst_idx * size; + auto protected_size = total_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? total_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to copy data from FracZ offset %ld to NCHW[%ld, %ld, %ld, %ld] offset %ld, " + "err-code %d", + src_offset, n_idx, c_idx, h_idx, w_idx, dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + } + result.data = dst; + result.length = static_cast(total_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferFracZNchw::TransFormat(const TransArgs &args, TransResult &result) { + if (CheckArgsForFracZToNchw(args) != SUCCESS) { + return PARAM_INVALID; + } + int size = GetSizeByDataType(args.src_data_type); + auto total_size = GetItemNumByShape(args.dst_shape) * size; + if (total_size <= 0) { + GELOGE(INTERNAL_ERROR, "Get %ld total size from dst shape %s, src shape %s", total_size, + ShapeToString(args.dst_shape).c_str(), ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from FracZ to NCHW, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + if (GetDstDataAfterTrans(args, result, size, total_size) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to get data after trans, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + return INTERNAL_ERROR; + } + return SUCCESS; +} + +Status FormatTransferFracZNchw::TransShape(Format src_format, const std::vector &src_shape, DataType data_type, + Format dst_format, std::vector &dst_shape) { + GELOGD("The shape derivation from FracZ to NCHW is not unique. Trans shape in this direction is not supported"); + return UNSUPPORTED; +} + +REGISTER_FORMAT_TRANSFER(FormatTransferFracZNchw, FORMAT_FRACTAL_Z, FORMAT_NCHW) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_fracz_nchw.h b/src/ge/common/formats/format_transfers/format_transfer_fracz_nchw.h new file mode 100644 index 00000000..312a10f2 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fracz_nchw.h @@ -0,0 +1,34 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACZ_NCHW_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACZ_NCHW_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class FormatTransferFracZNchw : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACZ_NCHW_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_fracz_nhwc.cc b/src/ge/common/formats/format_transfers/format_transfer_fracz_nhwc.cc new file mode 100644 index 00000000..9f6f4890 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fracz_nhwc.cc @@ -0,0 +1,159 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_fracz_nhwc.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +bool CheckDataTypeSupported(const DataType &data_type) { return GetSizeByDataType(data_type) > 0; } + +Status CheckArgsForFracZToNhwc(const TransArgs &args) { + auto src_shape = args.src_shape; + auto dst_shape = args.dst_shape; + if (args.src_format != FORMAT_FRACTAL_Z || args.dst_format != FORMAT_NHWC) { + GELOGE(UNSUPPORTED, "Does not support trans format from %s to %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + if (!CheckDataTypeSupported(args.src_data_type)) { + GELOGE(UNSUPPORTED, "Failed to trans shape from FORMAT_FRACTAL_Z to NHWC, invalid data type %s", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return UNSUPPORTED; + } + if (!CheckShapeValid(src_shape, kFracZDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(src_shape).c_str()); + return PARAM_INVALID; + } + if (!CheckShapeValid(dst_shape, kNhwcDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + int64_t c0 = GetCubeSizeByDataType(args.src_data_type); + if (c0 < 0) { + return PARAM_INVALID; + } + int64_t c1 = Ceil(dst_shape.at(kNhwcC), c0); + int64_t n0 = Ceil(dst_shape.at(kNhwcN), static_cast(kNiSize)); + if (src_shape.at(kFracZHWC1) != dst_shape.at(kNhwcH) * dst_shape.at(kNhwcW) * c1 || src_shape.at(kFracZC0) != c0 || + src_shape.at(kFracZNi) != kNiSize || src_shape.at(kFracZN0) != n0) { + GELOGE(PARAM_INVALID, "Failed to check relationship between src and dst shape, src shape %s, dst shape %s", + ShapeToString(src_shape).c_str(), ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +Status GetDstDataAfterTrans(const TransArgs &args, TransResult &result, int size, int64_t total_size) { + std::shared_ptr dst(new (std::nothrow) uint8_t[total_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld, shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), total_size, ShapeToString(args.dst_shape).c_str()); + return OUT_OF_MEMORY; + } + + auto n0 = args.src_shape.at(kFracZN0); + auto ni = args.src_shape.at(kFracZNi); + auto c0 = args.src_shape.at(kFracZC0); + auto h = args.dst_shape.at(kNhwcH); + auto w = args.dst_shape.at(kNhwcW); + auto c = args.dst_shape.at(kNhwcC); + auto n = args.dst_shape.at(kNhwcN); + int64_t nc = ni * n0; + int64_t ncc0 = nc * c0; + int64_t wncc0 = w * ncc0; + int64_t hwncc0 = h * wncc0; + int64_t wc = w * c; + int64_t hwc = h * wc; + + for (int64_t n_idx = 0; n_idx < n; n_idx++) { + int64_t n_head_addr = n_idx * hwc; + for (int64_t h_idx = 0; h_idx < h; h_idx++) { + int64_t h_head_addr = n_head_addr + h_idx * wc; + for (int64_t w_idx = 0; w_idx < w; w_idx++) { + int64_t w_head_addr = h_head_addr + w_idx * c; + for (int64_t c_idx = 0; c_idx < c; c_idx++) { + int64_t dst_idx = w_head_addr + c_idx; + int64_t c1_idx = c_idx / c0; + int64_t c0_idx = c_idx % c0; + int64_t nc_idx = n_idx; + int64_t src_idx = c1_idx * hwncc0 + h_idx * wncc0 + w_idx * ncc0 + nc_idx * c0 + c0_idx; + auto src_offset = src_idx * size; + auto dst_offset = dst_idx * size; + auto protected_size = total_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? total_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to copy data from FracZ offset %ld to HHWC[%ld, %ld, %ld, %ld] offset %ld, err-code %d", + src_offset, n_idx, h_idx, w_idx, c_idx, dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + } + result.data = dst; + result.length = static_cast(total_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferFracZNhwc::TransFormat(const TransArgs &args, TransResult &result) { + if (CheckArgsForFracZToNhwc(args) != SUCCESS) { + return PARAM_INVALID; + } + int size = GetSizeByDataType(args.src_data_type); + auto total_size = GetItemNumByShape(args.dst_shape) * size; + if (total_size <= 0) { + GELOGE(INTERNAL_ERROR, "Get %ld total size from dst shape %s, src shape %s", total_size, + ShapeToString(args.dst_shape).c_str(), ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from FracZ to NHWC, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + if (GetDstDataAfterTrans(args, result, size, total_size) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to get data after trans, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + return INTERNAL_ERROR; + } + return SUCCESS; +} + +Status FormatTransferFracZNhwc::TransShape(Format src_format, const std::vector &src_shape, DataType data_type, + Format dst_format, std::vector &dst_shape) { + GELOGD("The shape derivation from FracZ to NHWC is not unique. Trans shape in this direction is not supported"); + return UNSUPPORTED; +} + +REGISTER_FORMAT_TRANSFER(FormatTransferFracZNhwc, FORMAT_FRACTAL_Z, FORMAT_NHWC) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_fracz_nhwc.h b/src/ge/common/formats/format_transfers/format_transfer_fracz_nhwc.h new file mode 100644 index 00000000..5a908dbb --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_fracz_nhwc.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACZ_NHWC_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACZ_NHWC_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class FormatTransferFracZNhwc : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_FRACZ_NHWC_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_hwcn_c1hwncoc0.cc b/src/ge/common/formats/format_transfers/format_transfer_hwcn_c1hwncoc0.cc new file mode 100644 index 00000000..2a223563 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_hwcn_c1hwncoc0.cc @@ -0,0 +1,199 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_hwcn_c1hwncoc0.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +bool CheckDataTypeSupported(const DataType &data_type) { return (data_type == DT_FLOAT || data_type == DT_FLOAT16); } + +Status TransShapeHwcnToC1hwncoc0(const std::vector &src_shape, std::vector &dst_shape) { + dst_shape.clear(); + dst_shape.push_back((src_shape.at(kHwcnC) - 1) / kCubeSize + 1); + dst_shape.push_back(src_shape.at(kHwcnH)); + dst_shape.push_back(src_shape.at(kHwcnW)); + dst_shape.push_back(src_shape.at(kHwcnN)); + dst_shape.push_back(kCubeSize); + dst_shape.push_back(kCubeSize); + if (!CheckShapeValid(dst_shape, kC1hwncoc0DimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + return SUCCESS; +} + +Status CheckArgsForHwcnToC1hwncoc0(const TransArgs &args) { + if (args.src_format != FORMAT_HWCN || args.dst_format != FORMAT_C1HWNCoC0) { + GELOGE(UNSUPPORTED, "Does not support trans format from %s to %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + if (!CheckDataTypeSupported(args.src_data_type)) { + GELOGE(UNSUPPORTED, "Failed to trans shape from HWCN to C1HWNCoC0, invalid data type %s", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return UNSUPPORTED; + } + if (!CheckShapeValid(args.src_shape, kHwcnDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + if (!CheckShapeValid(args.dst_shape, kC1hwncoc0DimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(args.dst_shape).c_str()); + return PARAM_INVALID; + } + std::vector expect_dst_shape; + auto ret = TransShapeHwcnToC1hwncoc0(args.src_shape, expect_dst_shape); + if (ret != SUCCESS) { + return ret; + } + if (args.dst_shape != expect_dst_shape) { + GELOGE(PARAM_INVALID, + "Failed to trans format, src and dst shape are not compatible. src shape %s, dst shape %s, " + "expect dst shape %s", + ShapeToString(args.src_shape).c_str(), ShapeToString(args.dst_shape).c_str(), + ShapeToString(expect_dst_shape).c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +Status GetDstDataAfterTrans(const TransArgs &args, TransResult &result, const int size, const int64_t total_size) { + std::shared_ptr dst(new (std::nothrow) uint8_t[total_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld, shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), total_size, ShapeToString(args.dst_shape).c_str()); + return OUT_OF_MEMORY; + } + + auto h = args.src_shape.at(kHwcnH); + auto w = args.src_shape.at(kHwcnW); + auto c = args.src_shape.at(kHwcnC); + auto n = args.src_shape.at(kHwcnN); + auto c1 = args.dst_shape.at(kC1hwncoc0C1); + auto c0 = args.dst_shape.at(kC1hwncoc0C0); + auto co = args.dst_shape.at(kC1hwncoc0Co); + int64_t coc0 = co * c0; + int64_t ncoc0 = n * coc0; + int64_t wncoc0 = w * ncoc0; + int64_t hwncoc0 = h * wncoc0; + int64_t cn = c * n; + int64_t wcn = w * cn; + + for (int64_t c1_idx = 0; c1_idx < c1; c1_idx++) { + int64_t c1_head_addr = c1_idx * hwncoc0; + for (int64_t h_idx = 0; h_idx < h; h_idx++) { + int64_t h_head_addr = c1_head_addr + h_idx * wncoc0; + for (int64_t w_idx = 0; w_idx < w; w_idx++) { + int64_t w_head_addr = h_head_addr + w_idx * ncoc0; + for (int64_t n_idx = 0; n_idx < n; n_idx++) { + int64_t n_head_addr = w_head_addr + n_idx * coc0; + for (int64_t co_idx = 0; co_idx < co; co_idx++) { + int64_t co_head_addr = n_head_addr + co_idx * c0; + for (int64_t c0_idx = 0; c0_idx < c0; c0_idx++) { + int64_t dst_idx = c0_idx + co_head_addr; + auto dst_offset = dst_idx * size; + auto protected_size = total_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? total_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + int64_t c_idx = c0_idx + c1_idx * c0; + int64_t src_idx = h_idx * wcn + w_idx * cn + c_idx * n + n_idx; + auto src_offset = src_idx * size; + + if (c_idx < c && c0_idx == co_idx) { + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to copy data from HWCN[%ld, %ld, %ld, %ld] offset %ld to " + "C1HWNCoC0[%ld, %ld, %ld, %ld, %ld, %ld] offset %ld, err-code %d", + h_idx, w_idx, c_idx, n_idx, src_offset, c1_idx, h_idx, w_idx, n_idx, co_idx, c0_idx, + dst_offset, ret); + return INTERNAL_ERROR; + } + } else { + auto ret = + memset_s(dst.get() + dst_offset, static_cast(protected_size), 0, static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to set to 0 to C1HWNCoC0[%ld, %ld, %ld, %ld, %ld, %ld] offset %ld, " + "err-code %d", + c1_idx, h_idx, w_idx, n_idx, co_idx, c0_idx, dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + } + } + } + } + result.data = dst; + result.length = static_cast(total_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferHwcnC1hwncoc0::TransFormat(const TransArgs &args, TransResult &result) { + if (CheckArgsForHwcnToC1hwncoc0(args) != SUCCESS) { + return PARAM_INVALID; + } + int size = GetSizeByDataType(args.src_data_type); + auto total_size = GetItemNumByShape(args.dst_shape) * size; + if (total_size <= 0) { + GELOGE(INTERNAL_ERROR, "Get %ld total size from dst shape %s, src shape %s", total_size, + ShapeToString(args.dst_shape).c_str(), ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from HWCN to C1HWNCoC0, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + if (GetDstDataAfterTrans(args, result, size, total_size) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to get data after trans, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + return INTERNAL_ERROR; + } + return SUCCESS; +} + +Status FormatTransferHwcnC1hwncoc0::TransShape(Format src_format, const std::vector &src_shape, + DataType data_type, Format dst_format, std::vector &dst_shape) { + if (src_format == FORMAT_HWCN && CheckDataTypeSupported(data_type)) { + if (!CheckShapeValid(src_shape, kHwcnDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(src_shape).c_str()); + return PARAM_INVALID; + } + return TransShapeHwcnToC1hwncoc0(src_shape, dst_shape); + } else { + return UNSUPPORTED; + } +} + +REGISTER_FORMAT_TRANSFER(FormatTransferHwcnC1hwncoc0, FORMAT_HWCN, FORMAT_C1HWNCoC0) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_hwcn_c1hwncoc0.h b/src/ge/common/formats/format_transfers/format_transfer_hwcn_c1hwncoc0.h new file mode 100644 index 00000000..56270cd1 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_hwcn_c1hwncoc0.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_HWCN_C1HWNCOC0_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_HWCN_C1HWNCOC0_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class FormatTransferHwcnC1hwncoc0 : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_HWCN_C1HWNCOC0_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nchw.cc b/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nchw.cc new file mode 100644 index 00000000..eab3ba96 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nchw.cc @@ -0,0 +1,157 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_nc1hwc0_nchw.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +bool CheckDataTypeSupported(const DataType &data_type) { return GetSizeByDataType(data_type) > 0; } + +Status CheckArgsForNc1hwc0ToNchw(const TransArgs &args) { + auto src_shape = args.src_shape; + auto dst_shape = args.dst_shape; + if (args.src_format != FORMAT_NC1HWC0 || args.dst_format != FORMAT_NCHW) { + GELOGE(UNSUPPORTED, "Does not support trans format from %s to %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + if (!CheckDataTypeSupported(args.src_data_type)) { + GELOGE(UNSUPPORTED, "Failed to trans shape from NC1HWC0 to NCHW, invalid data type %s", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return UNSUPPORTED; + } + if (!CheckShapeValid(args.src_shape, kNc1hwc0DimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + if (!CheckShapeValid(args.dst_shape, kNchwDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(args.dst_shape).c_str()); + return PARAM_INVALID; + } + int64_t c0 = GetCubeSizeByDataType(args.src_data_type); + if (c0 <= 0) { + GELOGE(PARAM_INVALID, "Failed to get cube size, the data type is invalid"); + return PARAM_INVALID; + } + if (src_shape.at(kNc1hwc0H) != dst_shape.at(kNchwH) || src_shape.at(kNc1hwc0W) != dst_shape.at(kNchwW) || + src_shape.at(kNc1hwc0N) != dst_shape.at(kNchwN) || src_shape.at(kNc1hwc0C0) != c0 || + src_shape.at(kNc1hwc0C1) != (dst_shape.at(kNchwC) - 1) / c0 + 1) { + GELOGE(PARAM_INVALID, "Failed to check relationship between src and dst shape, src shape %s, dst shape %s", + ShapeToString(src_shape).c_str(), ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +Status GetDstDataAfterTrans(const TransArgs &args, TransResult &result, const int size, const int64_t total_size) { + std::shared_ptr dst(new (std::nothrow) uint8_t[total_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld, shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), total_size, ShapeToString(args.dst_shape).c_str()); + return OUT_OF_MEMORY; + } + + auto h = args.src_shape.at(kNc1hwc0H); + auto w = args.src_shape.at(kNc1hwc0W); + auto n = args.src_shape.at(kNc1hwc0N); + auto c1 = args.src_shape.at(kNc1hwc0C1); + auto c0 = args.src_shape.at(kNc1hwc0C0); + auto c = args.dst_shape.at(kNchwC); + int64_t hw = h * w; + int64_t chw = c * hw; + int64_t wc0 = w * c0; + int64_t hwc0 = h * wc0; + int64_t c1hwc0 = c1 * hwc0; + + for (int64_t n_idx = 0; n_idx < n; n_idx++) { + int64_t n_head_addr = n_idx * chw; + for (int64_t c_idx = 0; c_idx < c; c_idx++) { + int64_t c_head_addr = n_head_addr + c_idx * hw; + for (int64_t h_idx = 0; h_idx < h; h_idx++) { + int64_t h_head_addr = c_head_addr + h_idx * w; + for (int64_t w_idx = 0; w_idx < w; w_idx++) { + int64_t dst_idx = h_head_addr + w_idx; + int64_t c1_idx = c_idx / c0; + int64_t c0_idx = c_idx % c0; + int64_t src_idx = n_idx * c1hwc0 + c1_idx * hwc0 + h_idx * wc0 + w_idx * c0 + c0_idx; + auto src_offset = src_idx * size; + auto dst_offset = dst_idx * size; + auto protected_size = total_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? total_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to copy data from NC1HWC0[%ld, %ld, %ld, %ld, %ld] offset %ld to NCHW[%ld, %ld, %ld, %ld]" + " offset %ld, err-code %d", + n_idx, c1_idx, h_idx, w_idx, c0_idx, src_offset, n_idx, c_idx, h_idx, w_idx, dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + } + result.data = dst; + result.length = static_cast(total_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferNc1hwc0Nchw::TransFormat(const TransArgs &args, TransResult &result) { + if (CheckArgsForNc1hwc0ToNchw(args) != SUCCESS) { + return PARAM_INVALID; + } + int size = GetSizeByDataType(args.src_data_type); + auto total_size = GetItemNumByShape(args.dst_shape) * size; + if (total_size <= 0) { + GELOGE(INTERNAL_ERROR, "Get %ld total size from dst shape %s, src shape %s", total_size, + ShapeToString(args.dst_shape).c_str(), ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from NC1HWC0 to NCHW, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + if (GetDstDataAfterTrans(args, result, size, total_size) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to get data after trans, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + return INTERNAL_ERROR; + } + return SUCCESS; +} + +Status FormatTransferNc1hwc0Nchw::TransShape(Format src_format, const std::vector &src_shape, + DataType data_type, Format dst_format, std::vector &dst_shape) { + GELOGD("The shape derivation from NC1HWC0 to NCHW is not unique. Trans shape in this direction is not supported"); + return UNSUPPORTED; +} + +REGISTER_FORMAT_TRANSFER(FormatTransferNc1hwc0Nchw, FORMAT_NC1HWC0, FORMAT_NCHW) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nchw.h b/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nchw.h new file mode 100644 index 00000000..b3fe65f8 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nchw.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NC1HWC0_NCHW_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NC1HWC0_NCHW_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class FormatTransferNc1hwc0Nchw : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NC1HWC0_NCHW_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nhwc.cc b/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nhwc.cc new file mode 100644 index 00000000..e9e8b19f --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nhwc.cc @@ -0,0 +1,157 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_nc1hwc0_nhwc.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +bool CheckDataTypeSupported(const DataType &data_type) { return GetSizeByDataType(data_type) > 0; } + +Status CheckArgsForNc1hwc0ToNhwc(const TransArgs &args) { + auto src_shape = args.src_shape; + auto dst_shape = args.dst_shape; + if (args.src_format != FORMAT_NC1HWC0 || args.dst_format != FORMAT_NHWC) { + GELOGE(UNSUPPORTED, "Does not support trans format from %s to %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + if (!CheckDataTypeSupported(args.src_data_type)) { + GELOGE(UNSUPPORTED, "Failed to trans shape from NC1HWC0 to NHWC, invalid data type %s", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return UNSUPPORTED; + } + if (!CheckShapeValid(args.src_shape, kNc1hwc0DimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + if (!CheckShapeValid(args.dst_shape, kNhwcDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(args.dst_shape).c_str()); + return PARAM_INVALID; + } + int64_t c0 = GetCubeSizeByDataType(args.src_data_type); + if (c0 <= 0) { + GELOGE(PARAM_INVALID, "Failed to get cube size, the data type is invalid"); + return PARAM_INVALID; + } + if (src_shape.at(kNc1hwc0H) != dst_shape.at(kNhwcH) || src_shape.at(kNc1hwc0W) != dst_shape.at(kNhwcW) || + src_shape.at(kNc1hwc0N) != dst_shape.at(kNhwcN) || src_shape.at(kNc1hwc0C0) != c0 || + src_shape.at(kNc1hwc0C1) != (dst_shape.at(kNhwcC) - 1) / c0 + 1) { + GELOGE(PARAM_INVALID, "Failed to check relationship between src and dst shape, src shape %s, dst shape %s", + ShapeToString(src_shape).c_str(), ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +Status GetDstDataAfterTrans(const TransArgs &args, TransResult &result, const int size, const int64_t total_size) { + std::shared_ptr dst(new (std::nothrow) uint8_t[total_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld, shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), total_size, ShapeToString(args.dst_shape).c_str()); + return OUT_OF_MEMORY; + } + + auto h = args.src_shape.at(kNc1hwc0H); + auto w = args.src_shape.at(kNc1hwc0W); + auto n = args.src_shape.at(kNc1hwc0N); + auto c1 = args.src_shape.at(kNc1hwc0C1); + auto c0 = args.src_shape.at(kNc1hwc0C0); + auto c = args.dst_shape.at(kNhwcC); + int64_t wc = w * c; + int64_t hwc = h * wc; + int64_t wc0 = w * c0; + int64_t hwc0 = h * wc0; + int64_t c1hwc0 = c1 * hwc0; + + for (int64_t n_idx = 0; n_idx < n; n_idx++) { + int64_t n_head_addr = n_idx * hwc; + for (int64_t h_idx = 0; h_idx < h; h_idx++) { + int64_t h_head_addr = n_head_addr + h_idx * wc; + for (int64_t w_idx = 0; w_idx < w; w_idx++) { + int64_t w_head_addr = h_head_addr + w_idx * c; + for (int64_t c_idx = 0; c_idx < c; c_idx++) { + int64_t dst_idx = w_head_addr + c_idx; + int64_t c1_idx = c_idx / c0; + int64_t c0_idx = c_idx % c0; + int64_t src_idx = n_idx * c1hwc0 + c1_idx * hwc0 + h_idx * wc0 + w_idx * c0 + c0_idx; + auto src_offset = src_idx * size; + auto dst_offset = dst_idx * size; + auto protected_size = total_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? total_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to copy data from NC1HWC0[%ld, %ld, %ld, %ld, %ld] offset %ld to NHWC[%ld, %ld, %ld, %ld]" + " offset %ld, err-code %d", + n_idx, c1_idx, h_idx, w_idx, c0_idx, src_offset, n_idx, c_idx, h_idx, w_idx, dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + } + result.data = dst; + result.length = static_cast(total_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferNc1hwc0Nhwc::TransFormat(const TransArgs &args, TransResult &result) { + if (CheckArgsForNc1hwc0ToNhwc(args) != SUCCESS) { + return PARAM_INVALID; + } + int size = GetSizeByDataType(args.src_data_type); + auto total_size = GetItemNumByShape(args.dst_shape) * size; + if (total_size <= 0) { + GELOGE(INTERNAL_ERROR, "Get %ld total size from dst shape %s, src shape %s", total_size, + ShapeToString(args.dst_shape).c_str(), ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from NC1HWC0 to NCHW, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + if (GetDstDataAfterTrans(args, result, size, total_size) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to get data after trans, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + return INTERNAL_ERROR; + } + return SUCCESS; +} + +Status FormatTransferNc1hwc0Nhwc::TransShape(Format src_format, const std::vector &src_shape, + DataType data_type, Format dst_format, std::vector &dst_shape) { + GELOGD("The shape derivation from NC1HWC0 to NHWC is not unique. Trans shape in this direction is not supported"); + return UNSUPPORTED; +} + +REGISTER_FORMAT_TRANSFER(FormatTransferNc1hwc0Nhwc, FORMAT_NC1HWC0, FORMAT_NHWC) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nhwc.h b/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nhwc.h new file mode 100644 index 00000000..22bc170b --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nhwc.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NC1HWC0_NHWC_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NC1HWC0_NHWC_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class FormatTransferNc1hwc0Nhwc : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NC1HWC0_NHWC_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_nchw_nc1hwc0.cc b/src/ge/common/formats/format_transfers/format_transfer_nchw_nc1hwc0.cc new file mode 100644 index 00000000..13e48f8c --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_nchw_nc1hwc0.cc @@ -0,0 +1,184 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +Status TransShapeNchwToNc1hwc0(const std::vector &src_shape, DataType data_type, + std::vector &dst_shape) { + int64_t c0 = GetCubeSizeByDataType(data_type); + if (c0 <= 0) { + GELOGE(PARAM_INVALID, "Failed to get cube size, the data type is invalid"); + return PARAM_INVALID; + } + if (!CheckShapeValid(src_shape, kNchwDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(src_shape).c_str()); + return PARAM_INVALID; + } + dst_shape.clear(); + dst_shape.push_back(src_shape.at(kNchwN)); + dst_shape.push_back((src_shape.at(kNchwC) - 1) / c0 + 1); + dst_shape.push_back(src_shape.at(kNchwH)); + dst_shape.push_back(src_shape.at(kNchwW)); + dst_shape.push_back(c0); + if (!CheckShapeValid(dst_shape, kNc1hwc0DimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + return SUCCESS; +} + +Status CheckArgsForNchwToNc1hwc0(const TransArgs &args) { + if (args.src_format != FORMAT_NCHW || args.dst_format != FORMAT_NC1HWC0) { + GELOGE(UNSUPPORTED, "Does not support trans format from %s to %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + std::vector expect_5d_shape; + auto ret = TransShapeNchwToNc1hwc0(args.src_shape, args.src_data_type, expect_5d_shape); + if (ret != SUCCESS) { + return ret; + } + if (expect_5d_shape != args.dst_shape) { + GELOGE(PARAM_INVALID, + "Failed to trans format, the src and dst shape are not compatible. data" + " type %s, src shape %s, dst shape %s, expect dst shape %s", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), ShapeToString(args.src_shape).c_str(), + ShapeToString(args.dst_shape).c_str(), ShapeToString(expect_5d_shape).c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} +} // namespace + +Status FormatTransferNchwNc1hwc0::TransFormat(const TransArgs &args, TransResult &result) { + if (CheckArgsForNchwToNc1hwc0(args) != SUCCESS) { + return PARAM_INVALID; + } + // Guarantee the validity of parameters in check function + int size = GetSizeByDataType(args.src_data_type); + auto total_size = GetItemNumByShape(args.dst_shape) * size; + if (total_size <= 0) { + GELOGE(INTERNAL_ERROR, "Get %ld total size from dst shape %s, src shape %s", total_size, + ShapeToString(args.dst_shape).c_str(), ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + GELOGD( + "Begin to trans format from NCHW to NC1HWC0, src shape %s, data type " + "%s, dst shape %s memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + std::shared_ptr dst(new (std::nothrow) uint8_t[total_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, + "Failed to trans format from %s to %s, can not alloc the memory for" + " dst buf %ld, shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), total_size, ShapeToString(args.dst_shape).c_str()); + return OUT_OF_MEMORY; + } + + auto n = args.src_shape.at(kNchwN); + auto c = args.src_shape.at(kNchwC); + auto h = args.src_shape.at(kNchwH); + auto w = args.src_shape.at(kNchwW); + + int64_t c0 = GetCubeSizeByDataType(args.src_data_type); + if (c0 <= 0) { + GELOGE(INTERNAL_ERROR, "The c0 is invalid %ld", c0); + return PARAM_INVALID; + } + int64_t c1 = (c - 1) / c0 + 1; + int64_t hw = h * w; + int64_t chw = c * hw; + int64_t hwc0 = hw * c0; + int64_t c1hwc0 = c1 * hwc0; + int64_t wc0 = w * c0; + + for (int64_t n_idx = 0; n_idx < n; n_idx++) { + int64_t n_head_addr = n_idx * c1hwc0; + for (int64_t c1_idx = 0; c1_idx < c1; c1_idx++) { + int64_t c1_head_addr = n_head_addr + c1_idx * hwc0; + for (int64_t h_idx = 0; h_idx < h; h_idx++) { + int64_t h_head_addr = c1_head_addr + h_idx * wc0; + for (int64_t w_idx = 0; w_idx < w; w_idx++) { + int64_t w_head_addr = h_head_addr + w_idx * c0; + for (int64_t c0_idx = 0; c0_idx < c0; c0_idx++) { + int64_t dst_index = c0_idx + w_head_addr; + int64_t dst_offset = dst_index * size; + auto protected_size = total_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? total_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + int64_t cIdx = c0_idx + c1_idx * c0; + int64_t srcIdx = n_idx * chw + cIdx * hw + h_idx * w + w_idx; + auto src_offset = srcIdx * size; + + if (cIdx < c) { + auto ret = memcpy_s(dst.get() + dst_offset, static_cast(protected_size), args.data + src_offset, + static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to copy data from NCHW[%ld] offset %ld to " + "NC1HWC0[%ld, %ld, %ld, %ld, %ld] offset %ld, err-code %d", + srcIdx, src_offset, n_idx, c1_idx, h_idx, w_idx, c0_idx, dst_offset, ret); + return INTERNAL_ERROR; + } + } else { + auto ret = + memset_s(dst.get() + dst_offset, static_cast(protected_size), 0, static_cast(size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to set to 0 to " + "NC1HWC0[%ld, %ld, %ld, %ld, %ld] offset %ld, err-code %d", + n_idx, c1_idx, h_idx, w_idx, c0_idx, dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + } + } + } + + result.data = dst; + result.length = static_cast(total_size); + return SUCCESS; +} + +Status FormatTransferNchwNc1hwc0::TransShape(Format src_format, const std::vector &src_shape, + DataType data_type, Format dst_format, std::vector &dst_shape) { + if (src_format == FORMAT_NCHW) { + return TransShapeNchwToNc1hwc0(src_shape, data_type, dst_shape); + } else { + return UNSUPPORTED; + } +} + +REGISTER_FORMAT_TRANSFER(FormatTransferNchwNc1hwc0, FORMAT_NCHW, FORMAT_NC1HWC0) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h b/src/ge/common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h new file mode 100644 index 00000000..272b6a50 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NCHW_NC1HWC0_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NCHW_NC1HWC0_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class FormatTransferNchwNc1hwc0 : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NCHW_NC1HWC0_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.cc b/src/ge/common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.cc new file mode 100644 index 00000000..b461e270 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.cc @@ -0,0 +1,195 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.h" + +#include +#include + +#include "common/formats/utils/formats_definitions.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +bool CheckDataTypeSupported(const DataType &data_type) { return GetSizeByDataType(data_type) > 0; } + +Status TransShapeNhwcToNc1hwc0(const std::vector &src_shape, DataType data_type, + std::vector &dst_shape) { + int64_t c0 = GetCubeSizeByDataType(data_type); + if (c0 <= 0) { + GELOGE(PARAM_INVALID, "Failed to get cube size, the data type is invalid"); + return PARAM_INVALID; + } + dst_shape.clear(); + dst_shape.push_back(src_shape.at(kNhwcN)); + dst_shape.push_back((src_shape.at(kNhwcC) - 1) / c0 + 1); + dst_shape.push_back(src_shape.at(kNhwcH)); + dst_shape.push_back(src_shape.at(kNhwcW)); + dst_shape.push_back(c0); + if (!CheckShapeValid(dst_shape, kNc1hwc0DimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(dst_shape).c_str()); + return PARAM_INVALID; + } + return SUCCESS; +} + +Status CheckArgsForNhwcToNc1hwc0(const TransArgs &args) { + if (args.src_format != FORMAT_NHWC || args.dst_format != FORMAT_NC1HWC0) { + GELOGE(UNSUPPORTED, "Does not support trans format from %s to %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + if (!CheckDataTypeSupported(args.src_data_type)) { + GELOGE(UNSUPPORTED, "Failed to trans shape from NHWC to NC1HWC0, invalid data type %s", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str()); + return UNSUPPORTED; + } + if (!CheckShapeValid(args.src_shape, kNhwcDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + if (!CheckShapeValid(args.dst_shape, kNc1hwc0DimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check dst shape %s", ShapeToString(args.dst_shape).c_str()); + return PARAM_INVALID; + } + std::vector expect_dst_shape; + auto ret = TransShapeNhwcToNc1hwc0(args.src_shape, args.src_data_type, expect_dst_shape); + if (ret != SUCCESS) { + return ret; + } + if (args.dst_shape != expect_dst_shape) { + GELOGE(PARAM_INVALID, + "Failed to trans format, the src and dst shape are not compatible. src shape %s, dst shape %s, " + "expect dst shape %s", + ShapeToString(args.src_shape).c_str(), ShapeToString(args.dst_shape).c_str(), + ShapeToString(expect_dst_shape).c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +Status GetDstDataAfterTrans(const TransArgs &args, TransResult &result, const int size, const int64_t total_size) { + std::shared_ptr dst(new (std::nothrow) uint8_t[total_size], std::default_delete()); + if (dst == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to trans format from %s to %s, can not alloc the memory for dst buf %ld, shape %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), total_size, ShapeToString(args.dst_shape).c_str()); + return OUT_OF_MEMORY; + } + + auto n = args.src_shape.at(kNhwcN); + auto h = args.src_shape.at(kNhwcH); + auto w = args.src_shape.at(kNhwcW); + auto c = args.src_shape.at(kNhwcC); + auto c1 = args.dst_shape.at(kNc1hwc0C1); + auto c0 = args.dst_shape.at(kNc1hwc0C0); + int64_t wc = w * c; + int64_t hwc = h * wc; + int64_t wc0 = w * c0; + int64_t hwc0 = h * wc0; + int64_t c1hwc0 = c1 * hwc0; + + for (int64_t n_idx = 0; n_idx < n; n_idx++) { + int64_t n_head_addr = n_idx * c1hwc0; + for (int64_t c1_idx = 0; c1_idx < c1; c1_idx++) { + int64_t c1_head_addr = n_head_addr + c1_idx * hwc0; + for (int64_t h_idx = 0; h_idx < h; h_idx++) { + int64_t h_head_addr = c1_head_addr + h_idx * wc0; + for (int64_t w_idx = 0; w_idx < w; w_idx++) { + int64_t w_head_addr = h_head_addr + w_idx * c0; + for (int64_t c0_idx = 0; c0_idx < c0; c0_idx++) { + int64_t dst_idx = c0_idx + w_head_addr; + int64_t dst_offset = dst_idx * size; + auto protected_size = total_size - dst_offset < static_cast(SECUREC_MEM_MAX_LEN) + ? total_size - dst_offset + : static_cast(SECUREC_MEM_MAX_LEN); + int64_t c_idx = c0_idx + c1_idx * c0; + int64_t src_idx = n_idx * hwc + h_idx * wc + w_idx * c + c_idx; + auto src_offset = src_idx * size; + + if (c_idx < c) { + auto ret = memcpy_s(dst.get() + dst_offset, protected_size, args.data + src_offset, size); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to copy data from NHWC[%ld, %ld, %ld, %ld] offset %ld to " + "NC1HWC0[%ld, %ld, %ld, %ld, %ld] offset %ld err-code %d", + n_idx, h_idx, w_idx, c_idx, src_offset, n_idx, c1_idx, h_idx, w_idx, c0_idx, dst_offset, ret); + return INTERNAL_ERROR; + } + } else { + auto ret = memset_s(dst.get() + dst_offset, protected_size, 0, size); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to set 0 to NC1HWC0[%ld, %ld, %ld, %ld, %ld] offset %ld base err-code %d", n_idx, c1_idx, + h_idx, w_idx, c0_idx, dst_offset, ret); + return INTERNAL_ERROR; + } + } + } + } + } + } + } + result.data = dst; + result.length = static_cast(total_size); + return SUCCESS; +} +} // namespace + +Status FormatTransferNhwcNc1hwc0::TransFormat(const TransArgs &args, TransResult &result) { + if (CheckArgsForNhwcToNc1hwc0(args) != SUCCESS) { + return PARAM_INVALID; + } + int size = GetSizeByDataType(args.src_data_type); + auto total_size = GetItemNumByShape(args.dst_shape) * size; + if (total_size <= 0) { + GELOGE(INTERNAL_ERROR, "Get %ld total size from dst shape %s, src shape %s", total_size, + ShapeToString(args.dst_shape).c_str(), ShapeToString(args.src_shape).c_str()); + return PARAM_INVALID; + } + GELOGD("Begin to trans format from NHWC to NC1HWC0, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + if (GetDstDataAfterTrans(args, result, size, total_size) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to get data after trans, src shape %s, data type %s, dst shape %s, memory size %ld", + ShapeToString(args.src_shape).c_str(), TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + ShapeToString(args.dst_shape).c_str(), total_size); + return INTERNAL_ERROR; + } + return SUCCESS; +} + +Status FormatTransferNhwcNc1hwc0::TransShape(Format src_format, const std::vector &src_shape, + DataType data_type, Format dst_format, std::vector &dst_shape) { + if (src_format == FORMAT_NHWC && CheckDataTypeSupported(data_type)) { + if (!CheckShapeValid(src_shape, kNhwcDimsNum)) { + GELOGE(PARAM_INVALID, "Failed to check src shape %s", ShapeToString(src_shape).c_str()); + return PARAM_INVALID; + } + return TransShapeNhwcToNc1hwc0(src_shape, data_type, dst_shape); + } else { + return UNSUPPORTED; + } +} + +REGISTER_FORMAT_TRANSFER(FormatTransferNhwcNc1hwc0, FORMAT_NHWC, FORMAT_NC1HWC0) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.h b/src/ge/common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.h new file mode 100644 index 00000000..401f7e07 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NHWC_NC1HWC0_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NHWC_NC1HWC0_H_ + +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class FormatTransferNhwcNc1hwc0 : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_NHWC_NC1HWC0_H_ diff --git a/src/ge/common/formats/format_transfers/format_transfer_transpose.cc b/src/ge/common/formats/format_transfers/format_transfer_transpose.cc new file mode 100644 index 00000000..a523a326 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_transpose.cc @@ -0,0 +1,250 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/format_transfers/format_transfer_transpose.h" + +#include +#include + +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +namespace { +std::map>> perm_args{ + {FORMAT_NCHW, + {{FORMAT_NHWC, std::vector({0, 2, 3, 1})}, + {FORMAT_HWCN, std::vector({2, 3, 1, 0})}, + {FORMAT_CHWN, std::vector({1, 2, 3, 0})}}}, + {FORMAT_NHWC, + {{FORMAT_NCHW, std::vector({0, 3, 1, 2})}, + {FORMAT_CHWN, std::vector({3, 1, 2, 0})}, + {FORMAT_HWCN, std::vector({1, 2, 3, 0})}}}, + {FORMAT_HWCN, + {{FORMAT_NCHW, std::vector({3, 2, 0, 1})}, + {FORMAT_NHWC, std::vector({3, 0, 1, 2})}, + {FORMAT_CHWN, std::vector({2, 0, 1, 3})}}}, + {FORMAT_CHWN, + {{FORMAT_NCHW, std::vector({3, 0, 1, 2})}, + {FORMAT_NHWC, std::vector({3, 1, 2, 0})}, + {FORMAT_HWCN, std::vector({1, 2, 0, 3})}}}, +}; + +bool IsShapeArgValid(const std::vector &src_shape, const std::vector &perm_arg) { + if (src_shape.empty()) { + GELOGE(PARAM_INVALID, "Failed to transpose, empty src shape"); + return false; + } + for (auto dim : src_shape) { + if (dim <= 0) { + GELOGE(PARAM_INVALID, "Failed to transpose, zero dim in src shape %s", ShapeToString(src_shape).c_str()); + return false; + } + } + if (perm_arg.size() != src_shape.size()) { + GELOGE(PARAM_INVALID, + "Failed to transpose, the size of src shape(%zu) and" + " perm arg(%zu) are different", + src_shape.size(), perm_arg.size()); + return false; + } + + std::vector exists(perm_arg.size()); + for (auto perm : perm_arg) { + if (perm < 0 || static_cast(perm) >= perm_arg.size() || ++exists[perm] > 1) { + GELOGE(PARAM_INVALID, "Failed to transpose, duplicated perm arg %ld, perm arg %s", perm, + JoinToString(perm_arg).c_str()); + return false; + } + } + return true; +} +bool IsTransposeArgValid(const uint8_t *src, const std::vector &src_shape, DataType src_data_type, + const std::vector &perm_arg) { + if (src == nullptr) { + GELOGE(PARAM_INVALID, "Failed to transpose, the src is null"); + return false; + } + if (GetSizeByDataType(src_data_type) < 0) { + GELOGE(UNSUPPORTED, "Failed to transpose, the data type %s is not support", + TypeUtils::DataTypeToSerialString(src_data_type).c_str()); + return false; + } + return IsShapeArgValid(src_shape, perm_arg); +} + +std::vector GenHeads(const std::vector &shape) { + std::vector heads(shape.size()); + bool first = true; + for (auto i = static_cast(shape.size() - 1); i >= 0; --i) { + if (first) { + heads[i] = 1; + first = false; + } else { + heads[i] = shape[i + 1] * heads[i + 1]; + } + } + return heads; +} + +int64_t GenOffset(const std::vector &offsets, const std::vector &indexes) { + int64_t offset = 0; + for (size_t i = 0; i < indexes.size(); ++i) { + offset += offsets[i] * indexes[i]; + } + return offset; +} + +void AddOne(const std::vector &shape, std::vector &indexes) { + size_t i = indexes.size() - 1; + indexes[i]++; + while (i > 0) { + if (indexes[i] >= shape[i]) { + indexes[i] = 0; + indexes[i - 1]++; + --i; + } else { + break; + } + } +} + +std::vector TransShapeByPerm(const std::vector &src_shape, const std::vector &perm_arg) { + std::vector dst_shape(src_shape.size()); + for (size_t i = 0; i < perm_arg.size(); ++i) { + dst_shape[i] = src_shape[perm_arg[i]]; + } + return dst_shape; +} +} // namespace + +Status Transpose(const uint8_t *src, const std::vector &src_shape, DataType src_data_type, + const std::vector &perm_arg, TransResult &result) { + if (!IsTransposeArgValid(src, src_shape, src_data_type, perm_arg)) { + return PARAM_INVALID; + } + + auto dst_shape = TransShapeByPerm(src_shape, perm_arg); + auto src_origin_ordered_heads = GenHeads(src_shape); + auto src_heads = TransShapeByPerm(src_origin_ordered_heads, perm_arg); + + int64_t dst_ele_num = GetItemNumByShape(dst_shape); + int64_t data_size = GetSizeByDataType(src_data_type); + int64_t dst_size = data_size * dst_ele_num; + std::shared_ptr dst(new (std::nothrow) uint8_t[dst_size], std::default_delete()); + + GELOGD("Begin to transpose, src shape %s, perm arg %s, dst shape %s, data type %s", JoinToString(src_shape).c_str(), + JoinToString(perm_arg).c_str(), JoinToString(dst_shape).c_str(), + TypeUtils::DataTypeToSerialString(src_data_type).c_str()); + + int64_t dst_index = 0; + std::vector dst_indexes(dst_shape.size()); + while (dst_index < dst_ele_num) { + auto src_offset = GenOffset(src_heads, dst_indexes) * data_size; + auto dst_offset_bytes = dst_index * data_size; + auto protected_size = dst_size - dst_offset_bytes < static_cast(SECUREC_MEM_MAX_LEN) + ? dst_size - dst_offset_bytes + : static_cast(SECUREC_MEM_MAX_LEN); + auto ret = memcpy_s(dst.get() + dst_offset_bytes, static_cast(protected_size), src + src_offset, + static_cast(data_size)); + if (ret != EOK) { + GELOGE(INTERNAL_ERROR, + "Failed to transpose, src shape %s, perm arg %s, dst shape %s, " + "failed to write to dst offset %ld, current dim offset %s", + ShapeToString(src_shape).c_str(), ShapeToString(perm_arg).c_str(), ShapeToString(dst_shape).c_str(), + dst_offset_bytes, ShapeToString(dst_indexes).c_str()); + return INTERNAL_ERROR; + } + AddOne(dst_shape, dst_indexes); + ++dst_index; + } + + result.data = dst; + result.length = static_cast(dst_size); + return SUCCESS; +} + +Status TransposeWithShapeCheck(const uint8_t *data, const std::vector &src_shape, + const std::vector &dst_shape, DataType src_data_type, + const std::vector &perm_arg, TransResult &result) { + if (!IsTransposeArgValid(data, src_shape, src_data_type, perm_arg)) { + return PARAM_INVALID; + } + auto expected_shape = TransShapeByPerm(src_shape, perm_arg); + if (dst_shape != expected_shape) { + GELOGE(PARAM_INVALID, "Failed to trans axis for perm_arg %s, invalid dst shape %s, expect %s", + ShapeToString(perm_arg).c_str(), ShapeToString(dst_shape).c_str(), ShapeToString(expected_shape).c_str()); + return PARAM_INVALID; + } + + return Transpose(data, src_shape, src_data_type, perm_arg, result); +} + +Status FormatTransferTranspose::TransFormat(const TransArgs &args, TransResult &result) { + std::vector expected_shape; + auto ret = TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, expected_shape); + if (ret != SUCCESS) { + return ret; + } + if (args.dst_shape != expected_shape) { + GELOGE(PARAM_INVALID, "Failed to trans format from %s to %s, invalid dst shape %s, expect %s", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str(), ShapeToString(args.dst_shape).c_str(), + ShapeToString(expected_shape).c_str()); + return PARAM_INVALID; + } + + return Transpose(args.data, args.src_shape, args.src_data_type, perm_args[args.src_format][args.dst_format], result); +} + +Status FormatTransferTranspose::TransShape(Format src_format, const std::vector &src_shape, DataType data_type, + Format dst_format, std::vector &dst_shape) { + auto dst_iter = perm_args.find(src_format); + if (dst_iter == perm_args.end()) { + GELOGE(UNSUPPORTED, "Failed to trans shape, do not support transpose from format %s to %s", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(dst_format).c_str()); + return UNSUPPORTED; + } + auto iter = dst_iter->second.find(dst_format); + if (iter == dst_iter->second.end()) { + GELOGE(UNSUPPORTED, "Failed to trans shape, do not support transpose from format %s to %s", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(dst_format).c_str()); + return UNSUPPORTED; + } + + if (!IsShapeArgValid(src_shape, iter->second)) { + return PARAM_INVALID; + } + dst_shape = TransShapeByPerm(src_shape, iter->second); + return SUCCESS; +} + +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_NCHW, FORMAT_NHWC) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_NCHW, FORMAT_HWCN) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_NCHW, FORMAT_CHWN) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_NHWC, FORMAT_NCHW) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_NHWC, FORMAT_CHWN) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_NHWC, FORMAT_HWCN) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_HWCN, FORMAT_NCHW) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_HWCN, FORMAT_NHWC) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_HWCN, FORMAT_CHWN) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_CHWN, FORMAT_NCHW) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_CHWN, FORMAT_NHWC) +REGISTER_FORMAT_TRANSFER(FormatTransferTranspose, FORMAT_CHWN, FORMAT_HWCN) +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/format_transfers/format_transfer_transpose.h b/src/ge/common/formats/format_transfers/format_transfer_transpose.h new file mode 100644 index 00000000..6866b2e7 --- /dev/null +++ b/src/ge/common/formats/format_transfers/format_transfer_transpose.h @@ -0,0 +1,43 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_TRANSPOSE_H_ +#define GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_TRANSPOSE_H_ + +#include +#include + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +Status Transpose(const uint8_t *src, const std::vector &src_shape, DataType src_data_type, + const std::vector &perm_arg, TransResult &result); + +Status TransposeWithShapeCheck(const uint8_t *src, const std::vector &src_shape, + const std::vector &dst_shape, DataType src_data_type, + const std::vector &perm_arg, TransResult &result); + +class FormatTransferTranspose : public FormatTransfer { + public: + Status TransFormat(const TransArgs &args, TransResult &result) override; + Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, Format dst_format, + std::vector &dst_shape) override; +}; +} // namespace formats +} // namespace ge + +#endif // GE_COMMON_FORMATS_FORMAT_TRANSFERS_FORMAT_TRANSFER_TRANSPOSE_H_ diff --git a/src/ge/common/formats/formats.cc b/src/ge/common/formats/formats.cc new file mode 100644 index 00000000..fcc02eb0 --- /dev/null +++ b/src/ge/common/formats/formats.cc @@ -0,0 +1,86 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/formats.h" + +#include +#include +#include + +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Status TransFormat(const TransArgs &args, TransResult &result) { + auto transfer = BuildFormatTransfer(args); + if (transfer == nullptr) { + GELOGE(UNSUPPORTED, "Failed to trans data from format %s to %s, unsupport now", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + if (args.data == nullptr) { + GELOGE(PARAM_INVALID, "Invalid input null data"); + return PARAM_INVALID; + } + return transfer->TransFormat(args, result); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Status TransShape(Format src_format, + const std::vector &src_shape, + DataType data_type, Format dst_format, + std::vector &dst_shape) { + formats::TransArgs args; + args.src_format = src_format; + args.dst_format = dst_format; + auto transfer = BuildFormatTransfer(args); + if (transfer == nullptr) { + GELOGE(UNSUPPORTED, "Failed to trans data from format %s to %s, unsupport now", + TypeUtils::FormatToSerialString(args.src_format).c_str(), + TypeUtils::FormatToSerialString(args.dst_format).c_str()); + return UNSUPPORTED; + } + + return transfer->TransShape(src_format, src_shape, data_type, dst_format, dst_shape); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Status TransDataType(const CastArgs &args, TransResult &result) { + auto transfer = BuildDataTypeTransfer(args); + if (transfer == nullptr) { + GELOGE(UNSUPPORTED, "Failed to trans data from datatype %s to %s, unsupport now", + TypeUtils::DataTypeToSerialString(args.src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(args.dst_data_type).c_str()); + return UNSUPPORTED; + } + return transfer->TransDataType(args, result); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool IsTransFormatSupport(const TransArgs &args) { + return FormatTransferExists(args); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY bool IsTransDataTypeSupport(const CastArgs &args) { + return DataTypeTransferExists(args); +} +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/formats.h b/src/ge/common/formats/formats.h new file mode 100644 index 00000000..09566904 --- /dev/null +++ b/src/ge/common/formats/formats.h @@ -0,0 +1,49 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_FORMATS_H_ +#define GE_COMMON_FORMATS_FORMATS_H_ + +#include +#include + +#include "common/formats/format_transfers/datatype_transfer.h" +#include "common/formats/format_transfers/format_transfer.h" +#include "external/graph/types.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/ge_tensor.h" + +namespace ge { +namespace formats { +/** + * Convert the data format, and put the converted format and length in the result + * @param args + * @param result + * @return + */ +Status TransFormat(const TransArgs &args, TransResult &result); + +Status TransShape(Format src_format, const std::vector &src_shape, DataType data_type, + Format dst_format, std::vector &dst_shape); + +Status TransDataType(const CastArgs &args, TransResult &result); + +bool IsTransFormatSupport(const TransArgs &args); + +bool IsTransDataTypeSupport(const CastArgs &args); +} // namespace formats +} // namespace ge +#endif // GE_COMMON_FORMATS_FORMATS_H_ diff --git a/src/ge/common/formats/utils/formats_definitions.h b/src/ge/common/formats/utils/formats_definitions.h new file mode 100644 index 00000000..3bc394ce --- /dev/null +++ b/src/ge/common/formats/utils/formats_definitions.h @@ -0,0 +1,80 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_UTILS_FORMATS_DEFINITIONS_H_ +#define GE_COMMON_FORMATS_UTILS_FORMATS_DEFINITIONS_H_ + +namespace ge { +namespace formats { + +static const int kCubeSize = 16; +static const int kNiSize = 16; +static const int64_t kShapeItemNumMAX = 1024UL * 1024UL * 1024UL * 1024UL; + +enum NchwDimIndex { + kNchwN, + kNchwC, + kNchwH, + kNchwW, + kNchwDimsNum +}; + +enum NhwcDimIndex { + kNhwcN, + kNhwcH, + kNhwcW, + kNhwcC, + kNhwcDimsNum +}; + +enum HwcnDimIndex { + kHwcnH, + kHwcnW, + kHwcnC, + kHwcnN, + kHwcnDimsNum +}; + +enum Nc1hwc0DimIndex { + kNc1hwc0N, + kNc1hwc0C1, + kNc1hwc0H, + kNc1hwc0W, + kNc1hwc0C0, + kNc1hwc0DimsNum +}; + +enum C1hwncoc0DimIndex { + kC1hwncoc0C1, + kC1hwncoc0H, + kC1hwncoc0W, + kC1hwncoc0N, + kC1hwncoc0Co, + kC1hwncoc0C0, + kC1hwncoc0DimsNum +}; + +enum FracZDimIndex { + kFracZHWC1, + kFracZN0, + kFracZNi, + kFracZC0, + kFracZDimsNum +}; + +} // namespace formats +} // namespace ge +#endif // GE_COMMON_FORMATS_UTILS_FORMATS_DEFINITIONS_H_ diff --git a/src/ge/common/formats/utils/formats_trans_utils.cc b/src/ge/common/formats/utils/formats_trans_utils.cc new file mode 100644 index 00000000..35a0a073 --- /dev/null +++ b/src/ge/common/formats/utils/formats_trans_utils.cc @@ -0,0 +1,98 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/formats/utils/formats_trans_utils.h" + +#include + +#include "common/formats/utils/formats_definitions.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace formats { +int64_t GetCubeSizeByDataType(DataType data_type) { + // Current cube does not support 4 bytes and longer data + auto size = GetSizeByDataType(data_type); + if (size <= 0) { + GELOGE(PARAM_INVALID, "Failed to get cube size, the data type %s is invalid", + TypeUtils::DataTypeToSerialString(data_type).c_str()); + return -1; + } else if (size == 1) { + return kCubeSize * 2; // 32 bytes cube size + } else { + return kCubeSize; + } +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY std::string ShapeToString(const GeShape &shape) { + return ShapeToString(shape.GetDims()); +} + +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY std::string ShapeToString(const std::vector &shape) { + return JoinToString(shape); +} + +int64_t GetItemNumByShape(const std::vector &shape) { + int64_t num = 1; + for (auto dim : shape) { + num *= dim; + } + return num; +} + +bool CheckShapeValid(const std::vector &shape, const int64_t expect_dims) { + if (expect_dims <= 0 || shape.size() != static_cast(expect_dims)) { + GELOGE(PARAM_INVALID, "Invalid shape, dims num %zu, expect %ld", shape.size(), expect_dims); + return false; + } + return IsShapeValid(shape); +} + +bool IsShapeValid(const std::vector &shape) { + if (shape.empty()) { + return false; + } + int64_t num = 1; + for (auto dim : shape) { + if (dim < 1) { + GELOGE(PARAM_INVALID, "Invalid zero dim in the shape %s", ShapeToString(shape).c_str()); + return false; + } + if (kShapeItemNumMAX / dim < num) { + GELOGE(PARAM_INVALID, "Shape overflow, the total count should be less than %ld!", kShapeItemNumMAX); + return false; + } + num *= dim; + } + return true; +} + +bool IsShapeEqual(const GeShape &src, const GeShape &dst) { + if (src.GetDims().size() != dst.GetDims().size()) { + return false; + } + + for (size_t i = 0; i < src.GetDims().size(); ++i) { + if (src.GetDim(i) != dst.GetDim(i)) { + return false; + } + } + return true; +} +} // namespace formats +} // namespace ge diff --git a/src/ge/common/formats/utils/formats_trans_utils.h b/src/ge/common/formats/utils/formats_trans_utils.h new file mode 100644 index 00000000..310aaf38 --- /dev/null +++ b/src/ge/common/formats/utils/formats_trans_utils.h @@ -0,0 +1,72 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FORMATS_UTILS_FORMATS_TRANS_UTILS_H_ +#define GE_COMMON_FORMATS_UTILS_FORMATS_TRANS_UTILS_H_ + +#include +#include +#include +#include + +#include "external/graph/types.h" +#include "graph/ge_tensor.h" + +namespace ge { +namespace formats { +int64_t GetCubeSizeByDataType(DataType data_type); + +/** + * Convert a vector to a string using ',' + * @tparam T + * @param vec + * @return + */ +template +std::string JoinToString(const std::vector &vec) { + std::stringstream ss; + bool first = true; + for (auto &ele : vec) { + if (first) { + first = false; + } else { + ss << ","; + } + ss << ele; + } + return ss.str(); +} + +std::string ShapeToString(const GeShape &shape); + +std::string ShapeToString(const std::vector &shape); + +int64_t GetItemNumByShape(const std::vector &shape); + +bool CheckShapeValid(const std::vector &shape, const int64_t expect_dims); + +bool IsShapeValid(const std::vector &shape); + +bool IsShapeEqual(const GeShape &src, const GeShape &dst); + +template +T Ceil(T n1, T n2) { + return (n2 != 0) ? (n1 - 1) / n2 + 1 : 0; +} + +} // namespace formats +} // namespace ge +#endif // GE_COMMON_FORMATS_UTILS_FORMATS_TRANS_UTILS_H_ diff --git a/src/ge/common/fp16_t.cc b/src/ge/common/fp16_t.cc new file mode 100644 index 00000000..51a15fb0 --- /dev/null +++ b/src/ge/common/fp16_t.cc @@ -0,0 +1,309 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/fp16_t.h" + +#include "external/register/register_types.h" + +namespace { +const int32_t kInt32SymbolShift = 31; +const int32_t kBitShift_32 = 32; +const int32_t kDim_2 = 2; +const int32_t kDim_11 = 11; +} // namespace + +namespace ge { +union Fp16ToFloatData { + uint32_t uint_data; + float float_data; +}; + +/// +/// @ingroup fp16_t global filed +/// @brief round mode of last valid digital +/// +const fp16RoundMode_t g_round_mode = ROUND_TO_NEAREST; + +void ExtractFP16(const uint16_t &val, uint16_t *s, int16_t *e, uint16_t *m) { + // 1.Extract + *s = static_cast(FP16_EXTRAC_SIGN(val)); + *e = static_cast(FP16_EXTRAC_EXP(val)); + *m = static_cast(FP16_EXTRAC_MAN(val)); + + // Denormal + if ((*e) == 0) { + *e = 1; + } +} + +/// +/// @ingroup fp16_t static method +/// @param [in] man truncated mantissa +/// @param [in] shift_out left shift bits based on ten bits +/// @brief judge whether to add one to the result while converting fp16_t to other datatype +/// @return Return true if add one, otherwise false +/// +static bool IsRoundOne(uint64_t man, uint16_t trunc_len) { + uint64_t mask0 = 0x4; + uint64_t mask1 = 0x2; + uint64_t mask2; + uint16_t shift_out = static_cast(trunc_len - kDim_2); + mask0 = mask0 << shift_out; + mask1 = mask1 << shift_out; + mask2 = mask1 - 1; + + bool last_bit = ((man & mask0) > 0); + bool trunc_high = false; + bool trunc_left = false; + if (g_round_mode == ROUND_TO_NEAREST) { + trunc_high = ((man & mask1) > 0); + trunc_left = ((man & mask2) > 0); + } + return (trunc_high && (trunc_left || last_bit)); +} + +/// +/// @ingroup fp16_t public method +/// @param [in] exp exponent of fp16_t value +/// @param [in] man exponent of fp16_t value +/// @brief normalize fp16_t value +/// @return +/// +static void Fp16Normalize(int16_t &exp, uint16_t &man) { + if (exp >= FP16_MAX_EXP) { + exp = FP16_MAX_EXP - 1; + man = FP16_MAX_MAN; + } else if (exp == 0 && man == FP16_MAN_HIDE_BIT) { + exp++; + man = 0; + } +} + +// Evaluation +fp16_t &fp16_t::operator=(const fp16_t &fp) { + if (&fp == this) { + return *this; + } + val = fp.val; + return *this; +} + +fp16_t &fp16_t::operator=(const float &f_val) { + uint16_t s_ret, m_ret; + int16_t e_ret; + uint32_t e_f, m_f; + uint32_t ui32_v = *(reinterpret_cast(&f_val)); // 1:8:23bit sign:exp:man + uint32_t m_len_delta; + + s_ret = static_cast((ui32_v & FP32_SIGN_MASK) >> FP32_SIGN_INDEX); // 4Byte->2Byte + e_f = (ui32_v & FP32_EXP_MASK) >> FP32_MAN_LEN; // 8 bit exponent + m_f = (ui32_v & FP32_MAN_MASK); // 23 bit mantissa dont't need to care about denormal + m_len_delta = FP32_MAN_LEN - FP16_MAN_LEN; + + // Exponent overflow/NaN converts to signed inf/NaN + if (e_f > 0x8Fu) { // 0x8Fu:142=127+15 + e_ret = FP16_MAX_EXP - 1; + m_ret = FP16_MAX_MAN; + } else if (e_f <= 0x70u) { // 0x70u:112=127-15 Exponent underflow converts to denormalized half or signed zero + e_ret = 0; + if (e_f >= 0x67) { // 0x67:103=127-24 Denormal + m_f = (m_f | FP32_MAN_HIDE_BIT); + uint16_t shift_out = FP32_MAN_LEN; + uint64_t m_tmp = (static_cast(m_f)) << (e_f - 0x67); + + bool need_round = IsRoundOne(m_tmp, shift_out); + m_ret = static_cast(m_tmp >> shift_out); + if (need_round) { + m_ret++; + } + } else if (e_f == 0x66 && m_f > 0) { // 0x66:102 Denormal 0(e_f - 0x70u); + + bool need_round = IsRoundOne(m_f, static_cast(m_len_delta)); + m_ret = static_cast(m_f >> m_len_delta); + if (need_round) { + m_ret++; + } + if (m_ret & FP16_MAN_HIDE_BIT) { + e_ret++; + } + } + + Fp16Normalize(e_ret, m_ret); + val = FP16_CONSTRUCTOR(s_ret, static_cast(e_ret), m_ret); + return *this; +} + +fp16_t &fp16_t::operator=(const int32_t &i_val) { + if (i_val == 0) { + val = 0; + } else { + uint32_t ui_val = *(reinterpret_cast(&i_val)); + uint16_t s_ret = static_cast(ui_val >> kInt32SymbolShift); + if (s_ret) { + int32_t i_val_m = -i_val; + ui_val = *(reinterpret_cast(&i_val_m)); + } + int16_t e_ret; + uint32_t m_tmp = (ui_val & FP32_ABS_MAX); + uint32_t m_min = FP16_MAN_HIDE_BIT; + uint32_t m_max = m_min << 1; + int32_t len = static_cast(GetManBitLength(m_tmp)); + if (len > kDim_11) { + e_ret = FP16_EXP_BIAS + FP16_MAN_LEN; + uint32_t m_trunc = 0; + uint32_t trunc_mask = 1; + int32_t e_tmp = len - kDim_11; + for (int i = 1; i < e_tmp; i++) { + trunc_mask = (trunc_mask << 1) + 1; + } + m_trunc = (m_tmp & trunc_mask) << static_cast(kBitShift_32 - e_tmp); + for (int i = 0; i < e_tmp; i++) { + m_tmp = (m_tmp >> 1); + e_ret = e_ret + 1; + } + bool b_last_bit = ((m_tmp & 1) > 0); + bool b_trunc_high = false; + bool b_trunc_left = false; + if (g_round_mode == ROUND_TO_NEAREST) { // trunc + b_trunc_high = ((m_trunc & FP32_SIGN_MASK) > 0); + b_trunc_left = ((m_trunc & FP32_ABS_MAX) > 0); + } + m_tmp = ManRoundToNearest(b_last_bit, b_trunc_high, b_trunc_left, m_tmp); + while (m_tmp >= m_max || e_ret < 0) { + m_tmp = m_tmp >> 1; + e_ret = e_ret + 1; + } + if (e_ret >= FP16_MAX_EXP) { + e_ret = FP16_MAX_EXP - 1; + m_tmp = FP16_MAX_MAN; + } + } else { + e_ret = FP16_EXP_BIAS; + m_tmp = m_tmp << static_cast(kDim_11 - len); + e_ret = e_ret + (len - 1); + } + uint16_t m_ret = static_cast(m_tmp); + val = FP16_CONSTRUCTOR(s_ret, static_cast(e_ret), m_ret); + } + return *this; +} + +/// +/// @ingroup fp16_t math conversion static method +/// @param [in] fp_val uint16_t value of fp16_t object +/// @brief Convert fp16_t to float/fp32 +/// @return Return float/fp32 value of fp_val which is the value of fp16_t object +/// +float Fp16ToFloat(const uint16_t &fp_val) { + float ret; + + uint16_t hf_sign, hf_man; + int16_t hf_exp; + ExtractFP16(fp_val, &hf_sign, &hf_exp, &hf_man); + + while (hf_man && !(hf_man & FP16_MAN_HIDE_BIT)) { + hf_man <<= 1; + hf_exp--; + } + + uint32_t s_ret, e_ret, m_ret, f_val; + + s_ret = hf_sign; + if (!hf_man) { + e_ret = 0; + m_ret = 0; + } else { + e_ret = static_cast(hf_exp - FP16_EXP_BIAS + FP32_EXP_BIAS); + m_ret = hf_man & FP16_MAN_MASK; + m_ret = m_ret << (FP32_MAN_LEN - FP16_MAN_LEN); + } + f_val = FP32_CONSTRUCTOR(s_ret, e_ret, m_ret); + Fp16ToFloatData data; + data.uint_data = f_val; + ret = data.float_data; + + return ret; +} + +/// +/// @ingroup fp16_t math convertion static method +/// @param [in] fp_val uint16_t value of fp16_t object +/// @brief Convert fp16_t to int32_t +/// @return Return int32_t value of fp_val which is the value of fp16_t object +/// +int32_t Fp16ToInt32(const uint16_t &fp_val) { + int32_t ret; + uint32_t ret_v; + uint32_t s_ret; + uint16_t hf_e, hf_m; + + // 1.Get s_ret and shift it to bit0. + s_ret = FP16_EXTRAC_SIGN(fp_val); + // 2.Get hf_e and hf_m + hf_e = FP16_EXTRAC_EXP(fp_val); + hf_m = FP16_EXTRAC_MAN(fp_val); + + if (FP16_IS_INVALID(fp_val)) { // Inf or NaN + ret_v = INT32_T_MAX + s_ret; + } else { + uint64_t long_int_m = hf_m; + uint16_t shift_out = 0; + + while (hf_e != FP16_EXP_BIAS) { + if (hf_e > FP16_EXP_BIAS) { + hf_e--; + long_int_m = long_int_m << 1; + } else { + hf_e++; + shift_out++; + } + } + uint32_t m_ret; + bool need_round = IsRoundOne(long_int_m, shift_out + FP16_MAN_LEN); + m_ret = static_cast((long_int_m >> (FP16_MAN_LEN + shift_out)) & BIT_LEN32_MAX); + if (need_round && m_ret < INT32_T_MAX) { + m_ret++; + } + + if (s_ret == 1) { + m_ret = (~m_ret) + 1; + } + if (m_ret == 0) { + s_ret = 0; + } + // Generate final result + ret_v = (s_ret << kInt32SymbolShift) | (m_ret); + } + + ret = *(reinterpret_cast(&ret_v)); + return ret; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY float fp16_t::toFloat() const { return Fp16ToFloat(val); } + +int32_t fp16_t::toInt32() const { return Fp16ToInt32(val); } + +// Convert +fp16_t::operator float() const { return Fp16ToFloat(val); } + +fp16_t::operator int32_t() const { return Fp16ToInt32(val); } +} // namespace ge diff --git a/src/ge/common/fp16_t.h b/src/ge/common/fp16_t.h new file mode 100644 index 00000000..a9bdc073 --- /dev/null +++ b/src/ge/common/fp16_t.h @@ -0,0 +1,329 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_FP16_T_H_ +#define GE_COMMON_FP16_T_H_ + +#include +#include + +#include + +namespace ge { +/** + *@ingroup fp16 basic parameter + *@brief fp16 exponent bias + */ +#define FP16_EXP_BIAS (15) +/** + *@ingroup fp16 basic parameter + *@brief the mantissa bit length of fp16 is 10 + */ +#define FP16_MAN_LEN (10) +/** + *@ingroup fp16 basic parameter + *@brief bit index of sign in fp16 + */ +#define FP16_SIGN_INDEX (15) +/** + *@ingroup fp16 basic parameter + *@brief exponent mask of fp16 ( 11111 00000 00000) + */ +#define FP16_EXP_MASK (0x7C00) +/** + *@ingroup fp16 basic parameter + *@brief mantissa mask of fp16 ( 11111 11111) + */ +#define FP16_MAN_MASK (0x03FF) +/** + *@ingroup fp16 basic parameter + *@brief clear bit of mantissa of fp16( 1 00000 00000) + */ +#define FP16_MAN_HIDE_BIT (0x0400) +/** + *@ingroup fp16 basic parameter + *@brief maximum value (0111 1011 1111 1111) + */ +#define FP16_MAX (0x7BFF) +/** + *@ingroup fp16 basic parameter + *@brief maximum value (0111 1011 1111 1111) + */ +/** + *@ingroup fp16 basic parameter + *@brief maximum exponent value of fp16 is 15(11111) + */ +#define FP16_MAX_EXP (0x001F) +/** + *@ingroup fp16 basic parameter + *@brief maximum mantissa value of fp16(11111 11111) + */ +#define FP16_MAX_MAN (0x03FF) +/** + *@ingroup fp16 basic operator + *@brief get sign of fp16 + */ +#define FP16_EXTRAC_SIGN(x) (((x) >> 15) & 1) +/** + *@ingroup fp16 basic operator + *@brief get exponent of fp16 + */ +#define FP16_EXTRAC_EXP(x) (((x) >> 10) & FP16_MAX_EXP) +/** + *@ingroup fp16 basic operator + *@brief get mantissa of fp16 + */ +#define FP16_EXTRAC_MAN(x) ((x & 0x3FF) | (((((x) >> 10) & 0x1F) > 0 ? 1 : 0) * 0x400)) +/** + *@ingroup fp16 basic operator + *@brief constructor of fp16 from sign exponent and mantissa + */ +#define FP16_CONSTRUCTOR(s, e, m) (((s) << FP16_SIGN_INDEX) | ((e) << FP16_MAN_LEN) | ((m)&FP16_MAX_MAN)) +/** + *@ingroup fp16 special value judgment + *@brief whether a fp16 is invalid + */ +#define FP16_IS_INVALID(x) ((x & FP16_EXP_MASK) == FP16_EXP_MASK) + +/** + *@ingroup fp32 basic parameter + *@brief fp32 exponent bias + */ +#define FP32_EXP_BIAS (127) +/** + *@ingroup fp32 basic parameter + *@brief the mantissa bit length of float/fp32 is 23 + */ +#define FP32_MAN_LEN (23) +/** + *@ingroup fp32 basic parameter + *@brief bit index of sign in float/fp32 + */ +#define FP32_SIGN_INDEX (31) +/** + *@ingroup fp32 basic parameter + *@brief sign mask of fp32 (1 0000 0000 0000 0000 0000 0000 000) + */ +#define FP32_SIGN_MASK (0x80000000u) +/** + *@ingroup fp32 basic parameter + *@brief exponent mask of fp32 ( 1111 1111 0000 0000 0000 0000 000) + */ +#define FP32_EXP_MASK (0x7F800000u) +/** + *@ingroup fp32 basic parameter + *@brief mantissa mask of fp32 ( 1111 1111 1111 1111 111) + */ +#define FP32_MAN_MASK (0x007FFFFFu) +/** + *@ingroup fp32 basic parameter + *@brief hide bit of mantissa of fp32 ( 1 0000 0000 0000 0000 000) + */ +#define FP32_MAN_HIDE_BIT (0x00800000u) +/** + *@ingroup fp32 basic parameter + *@brief absolute maximum value (0 1111 1111 1111 1111 1111 1111 111) + */ +#define FP32_ABS_MAX (0x7FFFFFFFu) +/** + *@ingroup fp32 basic parameter + *@brief maximum mantissa value of fp32 (1111 1111 1111 1111 1111 111) + */ +#define FP32_MAX_MAN (0x7FFFFF) +/** + *@ingroup fp32 basic operator + *@brief constructor of fp32 from sign exponent and mantissa + */ +#define FP32_CONSTRUCTOR(s, e, m) (((s) << FP32_SIGN_INDEX) | ((e) << FP32_MAN_LEN) | ((m)&FP32_MAX_MAN)) +/** + *@ingroup fp64 basic parameter + *@brief the mantissa bit length of double/fp64 is 52 + */ +#define FP64_MAN_LEN (52) +/** + *@ingroup fp64 basic parameter + *@brief bit index of sign in double/fp64 is 63 + */ +#define FP64_SIGN_INDEX (63) +/** + *@ingroup fp64 basic parameter + *@brief sign mask of fp64 (1 000 (total 63bits 0)) + */ +#define FP64_SIGN_MASK (0x8000000000000000LLu) +/** + *@ingroup fp64 basic parameter + *@brief exponent mask of fp64 (0 1 11111 11111 0000?-?-(total 52bits 0)) + */ +#define FP64_EXP_MASK (0x7FF0000000000000LLu) +/** + *@ingroup fp64 basic parameter + *@brief mantissa mask of fp64 ( 1111?-?-(total 52bits 1)) + */ +#define FP64_MAN_MASK (0x000FFFFFFFFFFFFFLLu) +/** + *@ingroup fp64 basic parameter + *@brief hide bit of mantissa of fp64 ( 1 0000?-?-(total 52bits 0)) + */ +#define FP64_MAN_HIDE_BIT (0x0010000000000000LLu) +/** + *@ingroup integer special value judgment + *@brief maximum positive value of int8_t (0111 1111) + */ +#define INT8_T_MAX (0x7F) +/** + *@ingroup integer special value judgment + *@brief maximum positive value of int32_t (0111 1111 1111 1111 1111 1111 1111 1111) + */ +#define INT32_T_MAX (0x7FFFFFFFu) +/** + *@ingroup integer special value judgment + *@brief maximum value of a data with 32 bits length (1111 1111 1111 1111 1111 1111 1111 1111) + */ +#define BIT_LEN32_MAX (0xFFFFFFFFu) +/** + *@ingroup fp16_t enum + *@brief round mode of last valid digital + */ +typedef enum TagFp16RoundMode { + ROUND_TO_NEAREST = 0, /**< round to nearest even */ + ROUND_BY_TRUNCATED, /**< round by truncated */ + ROUND_MODE_RESERVED, +} fp16RoundMode_t; + +/** + *@ingroup fp16_t + *@brief Half precision float + * bit15: 1 bit SIGN +---+-----+------------+ + * bit14-10: 5 bit EXP | S |EEEEE|MM MMMM MMMM| + * bit0-9: 10bit MAN +---+-----+------------+ + * + */ +using fp16_t = struct TagFp16 { + uint16_t val; + + public: + /** + *@ingroup fp16_t constructor + *@brief Constructor without any param(default constructor) + */ + TagFp16(void) { val = 0x0u; } + /** + *@ingroup fp16_t constructor + *@brief Constructor with an uint16_t value + */ + TagFp16(const uint16_t &ui_val) : val(ui_val) {} + /** + *@ingroup fp16_t constructor + *@brief Constructor with a fp16_t object(copy constructor) + */ + TagFp16(const TagFp16 &fp) : val(fp.val) {} + + /** + *@ingroup fp16_t copy assign + *@brief copy assign + */ + TagFp16 &operator=(const TagFp16 &fp); + /** + *@ingroup fp16_t math evaluation operator + *@param [in] fVal float object to be converted to fp16_t + *@brief Override basic evaluation operator to convert float to fp16_t + *@return Return fp16_t result from fVal + */ + TagFp16 &operator=(const float &fVal); + /** + *@ingroup fp16_t math evaluation operator + *@param [in] iVal int32_t object to be converted to fp16_t + *@brief Override basic evaluation operator to convert int32_t to fp16_t + *@return Return fp16_t result from iVal + */ + TagFp16 &operator=(const int32_t &iVal); + /** + *@ingroup fp16_t math conversion + *@brief Override convert operator to convert fp16_t to float/fp32 + *@return Return float/fp32 value of fp16_t + */ + operator float() const; + + /** + *@ingroup fp16_t math conversion + *@brief Override convert operator to convert fp16_t to int32_t + *@return Return int32_t value of fp16_t + */ + operator int32_t() const; + + /** + *@ingroup fp16_t math conversion + *@brief Convert fp16_t to float/fp32 + *@return Return float/fp32 value of fp16_t + */ + float toFloat() const; + + /** + *@ingroup fp16_t math conversion + *@brief Convert fp16_t to int32_t + *@return Return int32_t value of fp16_t + */ + int32_t toInt32() const; +}; +inline bool operator>(const TagFp16 &lhs, const TagFp16 &rhs) { return lhs.toFloat() > rhs.toFloat(); } +inline bool operator<(const TagFp16 &lhs, const TagFp16 &rhs) { return lhs.toFloat() < rhs.toFloat(); } +inline bool operator==(const TagFp16 &lhs, const TagFp16 &rhs) { return lhs.toFloat() == rhs.toFloat(); } +inline bool operator!=(const TagFp16 &lhs, const TagFp16 &rhs) { return lhs.toFloat() != rhs.toFloat(); } + +/** + *@ingroup fp16_t public method + *@param [in] val signature is negative + *@param [in|out] s sign of fp16_t object + *@param [in|out] e exponent of fp16_t object + *@param [in|out] m mantissa of fp16_t object + *@brief Extract the sign, exponent and mantissa of a fp16_t object + */ +void ExtractFP16(const uint16_t &val, uint16_t *s, int16_t *e, uint16_t *m); + +/** + *@ingroup fp16_t public method + *@param [in] bit0 whether the last preserved bit is 1 before round + *@param [in] bit1 whether the abbreviation's highest bit is 1 + *@param [in] bit_left whether the abbreviation's bits which not contain highest bit grater than 0 + *@param [in] man mantissa of a fp16_t or float number, support types: uint16_t/uint32_t/uint64_t + *@param [in] shift abbreviation bits + *@brief Round fp16_t or float mantissa to nearest value + *@return Returns true if round 1,otherwise false; + */ +template +T ManRoundToNearest(bool bit0, bool bit1, bool bit_left, T man, uint16_t shift = 0) { + man = (man >> shift) + ((bit1 && (bit_left || bit0)) ? 1 : 0); + return man; +} + +/** + *@ingroup fp16_t public method + *@param [in] man mantissa of a float number, support types: uint16_t/uint32_t/uint64_t + *@brief Get bit length of a uint32_t number + *@return Return bit length of man + */ +template +int16_t GetManBitLength(T man) { + int16_t len = 0; + while (man) { + man >>= 1; + len++; + } + return len; +} +}; // namespace ge + +#endif // GE_COMMON_FP16_T_H_ diff --git a/src/ge/common/ge/datatype_util.cc b/src/ge/common/ge/datatype_util.cc new file mode 100644 index 00000000..14635c14 --- /dev/null +++ b/src/ge/common/ge/datatype_util.cc @@ -0,0 +1,70 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/ge/datatype_util.h" + +#include + +namespace { +const std::vector kEmptyDatatypeVector; +std::map> g_translatable_data_type = { + // key:src datatype, value:dst datatype + {ge::DT_FLOAT, {ge::DT_FLOAT16, ge::DT_FLOAT}}, + {ge::DT_BOOL, {ge::DT_INT32}}, + {ge::DT_FLOAT16, {ge::DT_FLOAT, ge::DT_FLOAT16}}, + {ge::DT_INT64, {ge::DT_INT32}}}; + +std::map> g_reverse_translatable_data_type = { + // key:dst datatype,value:src datatype + {ge::DT_FLOAT16, {ge::DT_FLOAT, ge::DT_FLOAT16}}, + {ge::DT_INT32, {ge::DT_BOOL, ge::DT_INT64}}, + {ge::DT_FLOAT, {ge::DT_FLOAT16, ge::DT_FLOAT}}}; +} // namespace + +namespace ge { +bool DataTypeUtil::DataTypeTranslatable(const ge::DataType &src_out_data_type, const ge::DataType &dst_in_data_type) { + auto search = g_translatable_data_type.find(src_out_data_type); + if (search == g_translatable_data_type.end()) { + return false; + } + + for (auto data_type : search->second) { + if (data_type == dst_in_data_type) { + return true; + } + } + + return false; +} + +const std::vector &DataTypeUtil::GetTranslatableDataTypesBySrc(const ge::DataType &src_out_data_type) { + auto search = g_translatable_data_type.find(src_out_data_type); + if (search == g_translatable_data_type.end()) { + return kEmptyDatatypeVector; + } + + return search->second; +} + +const std::vector &DataTypeUtil::GetTranslatableDataTypesByDst(const ge::DataType &dst_in_data_type) { + auto search = g_reverse_translatable_data_type.find(dst_in_data_type); + if (search == g_reverse_translatable_data_type.end()) { + return kEmptyDatatypeVector; + } + + return search->second; +} +} // namespace ge diff --git a/src/ge/common/ge/datatype_util.h b/src/ge/common/ge/datatype_util.h new file mode 100644 index 00000000..ee3fb74d --- /dev/null +++ b/src/ge/common/ge/datatype_util.h @@ -0,0 +1,52 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_GE_DATATYPE_UTIL_H_ +#define GE_COMMON_GE_DATATYPE_UTIL_H_ + +#include +#include + +#include "graph/types.h" + +namespace ge { +static const int32_t kGeSizeFloat = sizeof(float); +static const int32_t kGeSizeHalfFloat = sizeof(float) / 2; +static const int32_t kGeSizeInt8 = sizeof(int8_t); +static const int32_t kGeSizeInt16 = sizeof(int16_t); +static const int32_t kGeSizeInt32 = sizeof(int32_t); +static const int32_t kGeSizeInt64 = sizeof(int64_t); +static const int32_t kGeSizeUint8 = sizeof(uint8_t); +static const int32_t kGeSizeBool = sizeof(bool); +static const int32_t kGeSizeDouble = sizeof(double); +static const int32_t kGeSizeUint64 = sizeof(uint64_t); +static const int32_t kGeSizeUint16 = sizeof(uint16_t); +static const int32_t kGeSizeUint32 = sizeof(uint32_t); + +static std::map CONST_OPDATA_TYPE_SIZE_MAP = { + {ge::DT_FLOAT, kGeSizeFloat}, {ge::DT_FLOAT16, kGeSizeHalfFloat}, {ge::DT_INT8, kGeSizeInt8}, + {ge::DT_INT16, kGeSizeInt16}, {ge::DT_INT32, kGeSizeInt32}, {ge::DT_INT64, kGeSizeInt64}, + {ge::DT_UINT8, kGeSizeUint8}, {ge::DT_UINT16, kGeSizeUint16}, {ge::DT_UINT32, kGeSizeUint32}, + {ge::DT_UINT64, kGeSizeUint64}, {ge::DT_DOUBLE, kGeSizeDouble}, {ge::DT_BOOL, kGeSizeBool}}; + +class GE_FUNC_HOST_VISIBILITY GE_FUNC_DEV_VISIBILITY DataTypeUtil { + public: + static bool DataTypeTranslatable(const ge::DataType &src_out_data_type, const ge::DataType &dst_in_data_type); + static const std::vector &GetTranslatableDataTypesBySrc(const ge::DataType &src_out_data_type); + static const std::vector &GetTranslatableDataTypesByDst(const ge::DataType &dst_in_data_type); +}; +} // namespace ge +#endif // GE_COMMON_GE_DATATYPE_UTIL_H_ diff --git a/src/ge/common/ge/ge_util.h b/src/ge/common/ge/ge_util.h new file mode 100644 index 00000000..c6319bd3 --- /dev/null +++ b/src/ge/common/ge/ge_util.h @@ -0,0 +1,36 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_GE_GE_UTIL_H_ +#define GE_COMMON_GE_GE_UTIL_H_ + +#include +#include +#include + +namespace ge { +#define GE_DELETE_ASSIGN_AND_COPY(Classname) \ + Classname &operator=(const Classname &) = delete; \ + Classname(const Classname &) = delete; + +template +static inline std::shared_ptr MakeShared(Args &&... args) { + typedef typename std::remove_const::type T_nc; + std::shared_ptr ret(new (std::nothrow) T_nc(std::forward(args)...)); + return ret; +} +} // namespace ge +#endif // GE_COMMON_GE_GE_UTIL_H_ diff --git a/src/ge/common/ge/plugin_manager.cc b/src/ge/common/ge/plugin_manager.cc new file mode 100644 index 00000000..b41afc5e --- /dev/null +++ b/src/ge/common/ge/plugin_manager.cc @@ -0,0 +1,283 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/ge/plugin_manager.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "framework/common/debug/log.h" + +namespace ge { +static const int kMaxNumOfSo = 64; +static const int kMaxSizeOfSo = 209100800; // = 200M(unit is Byte) +static const int kMaxSizeOfLoadedSo = 522752000; // = 500M(unit is Byte) +static const char *const kExt = ".so"; // supported extension of shared object + +void PluginManager::ClearHandles_() noexcept { + for (const auto &handle : handles_) { + if (dlclose(handle.second) != 0) { + GELOGW("Failed to close handle of %s: %s", handle.first.c_str(), dlerror()); + } + } + handles_.clear(); +} + +PluginManager::~PluginManager() { ClearHandles_(); } + +string PluginManager::GetPath() { + Dl_info dl_info; + if (dladdr(reinterpret_cast(&PluginManager::GetPath), &dl_info) == 0) { + GELOGW("Failed to read so_path!"); + return string(); + } else { + std::string so_path = dl_info.dli_fname; + char path[PATH_MAX] = {0}; + if (so_path.length() >= PATH_MAX) { + GELOGW("File path is too long!"); + return string(); + } + if (realpath(so_path.c_str(), path) == nullptr) { + GELOGW("Failed to get realpath of %s", so_path.c_str()); + return string(); + } + + so_path = path; + so_path = so_path.substr(0, so_path.rfind('/') + 1); + return so_path; + } +} + +void PluginManager::SplitPath(const string &mutil_path, vector &path_vec) { + std::string tmp_string = mutil_path + ":"; + std::string::size_type start_pos = 0; + std::string::size_type cur_pos = tmp_string.find(':', 0); + while (cur_pos != std::string::npos) { + std::string path = tmp_string.substr(start_pos, cur_pos - start_pos); + if (!path.empty()) { + path_vec.push_back(path); + } + start_pos = cur_pos + 1; + cur_pos = tmp_string.find(':', start_pos); + } +} + +Status PluginManager::LoadSo(const string &path, const vector &func_check_list) { + uint32_t num_of_loaded_so = 0; + int64_t size_of_loaded_so = 0; + so_list_.clear(); + ClearHandles_(); + + std::vector path_vec; + SplitPath(path, path_vec); + for (const auto &single_path : path_vec) { + GE_IF_BOOL_EXEC(single_path.length() >= PATH_MAX, GELOGE(GE_PLGMGR_PATH_INVALID, "File path is too long!"); + continue); + // load break when number of loaded so reach maximum + if (num_of_loaded_so >= kMaxNumOfSo) { + GELOGW("Number of loaded so reaches maximum, only the first %d are loaded!", kMaxNumOfSo); + break; + } + + std::string file_name = single_path.substr(single_path.rfind('/') + 1, string::npos); + string file_path_dlopen = RealPath(single_path.c_str()); + if (file_path_dlopen.empty()) { + GELOGW("Failed to get realpath of %s!", single_path.c_str()); + continue; + } + + int64_t file_size = 0; + if (ValidateSo(file_path_dlopen, size_of_loaded_so, file_size) != SUCCESS) { + GELOGW("Failed to validate so %s", file_path_dlopen.c_str()); + continue; + } + + GELOGI("dlopen so path name: %s. ", file_path_dlopen.c_str()); + + // load continue when dlopen is failed + auto handle = dlopen(file_path_dlopen.c_str(), RTLD_NOW | RTLD_GLOBAL); + if (handle == nullptr) { + GELOGE(GE_PLGMGR_PATH_INVALID, "Failed to dlopen %s!", dlerror()); + continue; + } + + GELOGW("The shared library will not be checked. Please ensure the source of the shared library is trusted."); + + // load continue when so is invalid + bool is_valid = true; + for (const auto &func_name : func_check_list) { + auto real_fn = (void (*)())dlsym(handle, func_name.c_str()); + if (real_fn == nullptr) { + GELOGE(GE_PLGMGR_PATH_INVALID, "%s is skipped since function %s is not exist!", func_name.c_str(), + func_name.c_str()); + is_valid = false; + break; + } + } + if (!is_valid) { + GE_LOGE_IF(dlclose(handle), "Failed to dlclose ret"); + continue; + } + + // add file to list + size_of_loaded_so += file_size; + so_list_.emplace_back(file_name); + handles_[string(file_name)] = handle; + num_of_loaded_so++; + } + if (num_of_loaded_so == 0) { + GELOGW("Failed to find any valid so in path %s!", path.c_str()); + return SUCCESS; + } + return SUCCESS; +} + +Status PluginManager::ValidateSo(const string &file_path, int64_t size_of_loaded_so, int64_t &file_size) const { + // read file size + struct stat stat_buf; + if (stat(file_path.c_str(), &stat_buf) != 0) { + GELOGW("%s check fail.", file_path.c_str()); + return FAILED; + } + + // load continue when the size itself reaches maximum + file_size = stat_buf.st_size; + if (stat_buf.st_size > kMaxSizeOfSo) { + GELOGW("The %s is skipped since its size exceeds maximum! (size: %ldB, maximum: %dB)", file_path.c_str(), file_size, + kMaxSizeOfSo); + return FAILED; + } + + // load continue if the total size of so reaches maximum when it is loaded + if (size_of_loaded_so + file_size > kMaxSizeOfLoadedSo) { + GELOGW( + "%s is skipped because the size of loaded so reaches maximum if it is load! " + "(size: %ldB, size of loaded so: %ldB, maximum: %dB)", + file_path.c_str(), file_size, size_of_loaded_so, kMaxSizeOfLoadedSo); + return FAILED; + } + + return SUCCESS; +} + +Status PluginManager::Load(const string &path, const vector &func_check_list) { + uint32_t num_of_loaded_so = 0; + int64_t size_of_loaded_so = 0; + const unsigned char is_folder = 0x4; + const std::string ext = kExt; + so_list_.clear(); + ClearHandles_(); + + char canonical_path[PATH_MAX] = {0}; + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(path.length() >= PATH_MAX, GELOGW("File path is too long!"); + return FAILED, "File path is too long!"); + if (realpath(path.c_str(), canonical_path) == nullptr) { + GELOGW("Failed to get realpath of %s", path.c_str()); + return SUCCESS; + } + + DIR *dir = opendir(canonical_path); + if (dir == nullptr) { + GELOGW("Invalid path for load: %s", path.c_str()); + return SUCCESS; + } + + struct dirent *entry = nullptr; + while ((entry = readdir(dir)) != nullptr) { + // read fileName and fileType + std::string file_name = entry->d_name; + unsigned char file_type = entry->d_type; + + // ignore folder + bool invalid_file = (file_type == is_folder || + // ignore file whose name length is less than 3 + file_name.size() <= ext.size() || + // ignore file whose extension is not so + file_name.compare(file_name.size() - ext.size(), ext.size(), ext) != 0); + if (invalid_file) { + continue; + } + + // load break when number of loaded so reach maximum + if (num_of_loaded_so >= kMaxNumOfSo) { + GELOGW("Number of loaded so reaches maximum, only the first %d are loaded!", kMaxNumOfSo); + break; + } + + std::string canonical_path_str = (std::string(canonical_path) + "/" + file_name); + string file_path_dlopen = RealPath(canonical_path_str.c_str()); + if (file_path_dlopen.empty()) { + GELOGW("failed to get realpath of %s", canonical_path_str.c_str()); + continue; + } + + int64_t file_size = 0; + if (ValidateSo(file_path_dlopen, size_of_loaded_so, file_size) != SUCCESS) { + GELOGW("Failed to validate so %s", canonical_path_str.c_str()); + continue; + } + + GELOGI("Dlopen so path name: %s. ", file_path_dlopen.c_str()); + + // load continue when dlopen is failed + auto handle = dlopen(file_path_dlopen.c_str(), RTLD_NOW | RTLD_GLOBAL); + if (handle == nullptr) { + GELOGW("Failed in dlopen %s!", dlerror()); + continue; + } + + GELOGW("The shared library will not be checked. Please ensure that the source of the shared library is trusted."); + + // load continue when so is invalid + bool is_valid = true; + for (const auto &func_name : func_check_list) { + auto real_fn = (void (*)())dlsym(handle, func_name.c_str()); + if (real_fn == nullptr) { + GELOGW("The %s is skipped since function %s is not existed!", file_name.c_str(), func_name.c_str()); + is_valid = false; + break; + } + } + if (!is_valid) { + GE_LOGE_IF(dlclose(handle), "Dlclose ret fail"); + GELOGW("Dlclose ret fail!"); + continue; + } + + // add file to list + size_of_loaded_so += file_size; + so_list_.emplace_back(file_name); + handles_[string(file_name)] = handle; + num_of_loaded_so++; + } + closedir(dir); + if (num_of_loaded_so == 0) { + GELOGW("Failed to find any valid so under %s!", path.c_str()); + return SUCCESS; + } + + return SUCCESS; +} + +const vector &PluginManager::GetSoList() const { return so_list_; } +} // namespace ge diff --git a/src/ge/common/ge/plugin_manager.h b/src/ge/common/ge/plugin_manager.h new file mode 100644 index 00000000..b35a631a --- /dev/null +++ b/src/ge/common/ge/plugin_manager.h @@ -0,0 +1,156 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_GE_PLUGIN_MANAGER_H_ +#define GE_COMMON_GE_PLUGIN_MANAGER_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "engine/dnnengine.h" +#include "framework/common/debug/ge_log.h" + +namespace ge { +using SoToHandleMap = std::map; +using std::function; +using std::map; +using std::string; +using std::vector; + +class PluginManager { + public: + PluginManager() = default; + + ~PluginManager(); + + static string GetPath(); + + void SplitPath(const string &mutil_path, vector &path_vec); + + Status LoadSo(const string &path, const vector &func_check_list = vector()); + + Status Load(const string &path, const vector &func_check_list = vector()); + + const vector &GetSoList() const; + + template + Status GetAllFunctions(const string &func_name, map> &funcs) { + for (const auto &handle : handles_) { + auto real_fn = (R(*)(Types...))dlsym(handle.second, func_name.c_str()); + if (real_fn == nullptr) { + GELOGW("Failed to get function %s in %s!", func_name.c_str(), handle.first.c_str()); + return GE_PLGMGR_FUNC_NOT_EXIST; + } else { + funcs[handle.first] = real_fn; + } + } + return SUCCESS; + } + + template + Status InvokeAll(const string &func_name, Types... args) { + for (const auto &handle : handles_) { + // If the funcName is existed, signature of realFn can be casted to any type + auto real_fn = (void (*)(Types...))dlsym(handle.second, func_name.c_str()); + if (real_fn == nullptr) { + GELOGW("Failed to invoke function %s in %s!", func_name.c_str(), handle.first.c_str()); + return GE_PLGMGR_INVOKE_FAILED; + } else { + real_fn(args...); + } + } + return SUCCESS; + } + + template + Status InvokeAll(const string &func_name, T arg) { + for (const auto &handle : handles_) { + // If the funcName is existed, signature of realFn can be casted to any type + auto real_fn = (void (*)(T))dlsym(handle.second, func_name.c_str()); + if (real_fn == nullptr) { + GELOGW("Failed to invoke function %s in %s!", func_name.c_str(), handle.first.c_str()); + return GE_PLGMGR_INVOKE_FAILED; + } + typename std::remove_reference::type arg_temp; + real_fn(arg_temp); + + if (std::is_same::type, map>>::value) { + for (const auto &val : arg_temp) { + if (arg.find(val.first) != arg.end()) { + GELOGW("FuncName %s in so %s find the same key: %s, will replace it", func_name.c_str(), + handle.first.c_str(), val.first.c_str()); + arg[val.first] = val.second; + } + } + } + arg.insert(arg_temp.begin(), arg_temp.end()); + } + return SUCCESS; + } + template + Status InvokeAll(const string &func_name, T1 arg) { + for (const auto &handle : handles_) { + // If the funcName is existed, signature of realFn can be casted to any type + auto real_fn = (T2(*)(T1))dlsym(handle.second, func_name.c_str()); + if (real_fn == nullptr) { + GELOGW("Failed to invoke function %s in %s!", func_name.c_str(), handle.first.c_str()); + return GE_PLGMGR_INVOKE_FAILED; + } else { + T2 res = real_fn(arg); + if (res != SUCCESS) { + return FAILED; + } + } + } + return SUCCESS; + } + + template + Status InvokeAll(const string &func_name) { + for (const auto &handle : handles_) { + // If the funcName is existed, signature of realFn can be casted to any type + auto real_fn = (T(*)())dlsym(handle.second, func_name.c_str()); + if (real_fn == nullptr) { + GELOGW("Failed to invoke function %s in %s!", func_name.c_str(), handle.first.c_str()); + return GE_PLGMGR_INVOKE_FAILED; + } else { + T res = real_fn(); + if (res != SUCCESS) { + return FAILED; + } + } + } + return SUCCESS; + } + + private: + void ClearHandles_() noexcept; + Status ValidateSo(const string &file_path, int64_t size_of_loaded_so, int64_t &file_size) const; + + vector so_list_; + SoToHandleMap handles_; +}; +} // namespace ge + +#endif // GE_COMMON_GE_PLUGIN_MANAGER_H_ diff --git a/src/ge/common/ge/tbe_plugin_manager.cc b/src/ge/common/ge/tbe_plugin_manager.cc new file mode 100644 index 00000000..a053b687 --- /dev/null +++ b/src/ge/common/ge/tbe_plugin_manager.cc @@ -0,0 +1,131 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/ge/tbe_plugin_manager.h" + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/log.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/util.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/engine/dnnengine.h" +#include "framework/omg/omg_inner_types.h" +#include "external/ge/ge_api_types.h" +#include "register/op_registry.h" +#include "graph/opsproto_manager.h" + +namespace ge { +// Get Singleton Instance +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY TBEPluginManager &TBEPluginManager::Instance() { + static TBEPluginManager instance_ptr_; + return instance_ptr_; +} + +void TBEPluginManager::ClearHandles_() { + for (const auto &handle : handles_vec_) { + if (dlclose(handle) != 0) { + GELOGW("Failed to close handle: %s", dlerror()); + } + } + handles_vec_.clear(); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void TBEPluginManager::Finalize() { ClearHandles_(); } + +string TBEPluginManager::GetPath() { + Dl_info dl_info; + if (dladdr(reinterpret_cast(&TBEPluginManager::GetPath), &dl_info) == 0) { + GELOGW("Failed to read so path!"); + return string(); + } else { + string so_path = dl_info.dli_fname; + char path[PATH_MAX] = {0}; + if (so_path.length() >= PATH_MAX) { + GELOGW("File path is too long!"); + return string(); + } + if (realpath(so_path.c_str(), path) == nullptr) { + GELOGW("Failed to get realpath of %s", so_path.c_str()); + return string(); + } + + so_path = path; + so_path = so_path.substr(0, so_path.rfind('/') + 1); + return so_path; + } +} + +Status TBEPluginManager::CheckCustomAiCpuOpLib() { + std::vector vec_op_type; + + domi::OpRegistry::Instance()->GetOpTypeByImplyType(vec_op_type, domi::ImplyType::CUSTOM); + for (size_t i = 0; i < vec_op_type.size(); i++) { + bool aicpu_so_exist = false; + std::string ai_cpu_so_name = "lib" + vec_op_type[i] + "_aicpu.so"; + for (size_t j = 0; j < domi::GetContext().aicpu_op_run_paths.size(); j++) { + string bin_file_path = domi::GetContext().aicpu_op_run_paths[j]; + if (bin_file_path.size() >= ai_cpu_so_name.size() && + bin_file_path.compare(bin_file_path.size() - ai_cpu_so_name.size(), ai_cpu_so_name.size(), ai_cpu_so_name) == + 0) { + aicpu_so_exist = true; + break; + } + } + if (!aicpu_so_exist) { + GELOGE(FAILED, "Can't find aicpu run so(%s), please check the plugin path!", ai_cpu_so_name.c_str()); + return FAILED; + } + } + return SUCCESS; +} + +void TBEPluginManager::SaveDdkVersion(const std::string &ddk_version) { + if (ddk_version.empty()) { + return; + } + GELOGI("Input ddk version : %s.", ddk_version.c_str()); + + // Save DDK version number to omgcontext + domi::GetContext().ddk_version = ddk_version; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void TBEPluginManager::InitPreparation( + const std::map &options) { + Status ret = CheckCustomAiCpuOpLib(); + if (ret != SUCCESS) { + GELOGE(ret, "Check custom aicpu run so failed!"); + return; + } else { + auto ddk_version = options.find("ge.DDK_version"); + if (ddk_version != options.end()) { + SaveDdkVersion(ddk_version->second); + } else { + GELOGW("No ddkVersion!"); + return; + } + } +} +} // namespace ge diff --git a/src/ge/common/ge/tbe_plugin_manager.h b/src/ge/common/ge/tbe_plugin_manager.h new file mode 100644 index 00000000..9b1e2662 --- /dev/null +++ b/src/ge/common/ge/tbe_plugin_manager.h @@ -0,0 +1,62 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_GE_TBE_PLUGIN_MANAGER_H_ +#define GE_COMMON_GE_TBE_PLUGIN_MANAGER_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "external/ge/ge_api_error_codes.h" +#include "external/register/register.h" + +namespace ge { +using SoHandlesVec = std::vector; +using std::vector; +using std::string; +using std::map; +using std::function; + +class TBEPluginManager { + public: + void Finalize(); + + // Get TBEPluginManager singleton instance + static TBEPluginManager& Instance(); + + static string GetPath(); + + static void InitPreparation(const std::map &options); + + private: + TBEPluginManager() = default; + ~TBEPluginManager() = default; + void ClearHandles_(); + static Status CheckCustomAiCpuOpLib(); + static void SaveDdkVersion(const std::string &ddk_version); + + SoHandlesVec handles_vec_; +}; +} // namespace ge + +#endif // GE_COMMON_GE_TBE_PLUGIN_MANAGER_H_ diff --git a/src/ge/common/ge_format_util.cc b/src/ge/common/ge_format_util.cc new file mode 100644 index 00000000..8b917db0 --- /dev/null +++ b/src/ge/common/ge_format_util.cc @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "framework/common/ge_format_util.h" + +#include "formats/formats.h" + +namespace ge { +GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY Status GeFormatUtil::TransShape(const TensorDesc &src_desc, + Format dst_format, + std::vector &dst_shape) { + return formats::TransShape(src_desc.GetFormat(), src_desc.GetShape().GetDims(), src_desc.GetDataType(), dst_format, + dst_shape); +} +} // namespace ge diff --git a/src/ge/common/helper/model_helper.cc b/src/ge/common/helper/model_helper.cc new file mode 100644 index 00000000..e5270b12 --- /dev/null +++ b/src/ge/common/helper/model_helper.cc @@ -0,0 +1,498 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "framework/common/helper/model_helper.h" + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/util.h" +#include "framework/omg/version.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/load/new_model_manager/davinci_model_parser.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" + +using std::string; +using ge::TBEKernelStore; +using ge::TBEKernelPtr; +using domi::ModelTaskDef; + +namespace ge { +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ModelHelper::~ModelHelper() { (void)ReleaseLocalModelData(); } + +Status ModelHelper::SaveModelPartition(std::shared_ptr &om_file_save_helper, ModelPartitionType type, + const uint8_t *data, size_t size) { + if (size < 1 || size > UINT32_MAX) { + GELOGE(PARAM_INVALID, "Add model partition failed, partition size %zu invalid", size); + return PARAM_INVALID; + } + if (data == nullptr) { + GELOGE(PARAM_INVALID, "Add model partition failed, data is null"); + return PARAM_INVALID; + } + ModelPartition partition_model; + partition_model.data = const_cast(data); + partition_model.size = static_cast(size); + partition_model.type = type; + if (om_file_save_helper->AddPartition(partition_model) != SUCCESS) { + GELOGE(PARAM_INVALID, "Add model partition failed, partition size %zu", size); + return PARAM_INVALID; + } + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelHelper::SaveToOmModel(const GeModelPtr &ge_model, + const SaveParam &save_param, + const std::string &output_file) { + if (output_file.empty()) { + GELOGE(FAILED, "GraphBuilder SaveModel received invalid file name prefix"); + return FAILED; + } + + GE_IF_BOOL_EXEC(ge_model == nullptr, GELOGE(FAILED, "Ge_model is nullptr"); return FAILED); + std::shared_ptr om_file_save_helper = ge::MakeShared(); + GE_CHECK_NOTNULL(om_file_save_helper); + ModelPtr model_tmp = ge::MakeShared(ge_model->GetName(), ge_model->GetPlatformVersion()); + if (model_tmp == nullptr) { + GELOGE(FAILED, "Create Model %s Ptr failed", ge_model->GetName().c_str()); + return FAILED; + } + model_tmp->SetGraph(ge_model->GetGraph()); + model_tmp->SetVersion(ge_model->GetVersion()); + model_tmp->SetAttr(ge_model->MutableAttrMap()); + + ge::Buffer model_buffer; + model_tmp->Save(model_buffer); + GELOGI("MODEL_DEF size is %zu", model_buffer.GetSize()); + if (model_buffer.GetSize() > 0) { + if (SaveModelPartition(om_file_save_helper, ModelPartitionType::MODEL_DEF, model_buffer.GetData(), + model_buffer.GetSize()) != SUCCESS) { + GELOGE(PARAM_INVALID, "Add model graph partition failed"); + return PARAM_INVALID; + } + } + + auto ge_model_weight = ge_model->GetWeight(); + GELOGI("WEIGHTS_DATA size is %zu", ge_model_weight.GetSize()); + if (SaveModelPartition(om_file_save_helper, ModelPartitionType::WEIGHTS_DATA, ge_model_weight.GetData(), + ge_model_weight.GetSize()) != SUCCESS) { + GELOGE(PARAM_INVALID, "Add weight partition failed"); + return PARAM_INVALID; + } + + TBEKernelStore tbe_kernel_store = ge_model->GetTBEKernelStore(); + GELOGI("TBE_KERNELS size is %zu", tbe_kernel_store.DataSize()); + if (tbe_kernel_store.DataSize() > 0) { + if (SaveModelPartition(om_file_save_helper, ModelPartitionType::TBE_KERNELS, tbe_kernel_store.Data(), + tbe_kernel_store.DataSize()) != SUCCESS) { + GELOGE(PARAM_INVALID, "Add tbe kernel partition failed"); + return PARAM_INVALID; + } + } + + // no need to check value, DATA->NetOutput + (void)tbe_kernel_store.Load(tbe_kernel_store.Data(), tbe_kernel_store.DataSize()); + + std::shared_ptr model_task_def = ge_model->GetModelTaskDefPtr(); + if (model_task_def == nullptr) { + GELOGE(MEMALLOC_FAILED, "Create model task def ptr failed"); + return FAILED; + } + size_t partition_task_size = model_task_def->ByteSizeLong(); + GE_IF_BOOL_EXEC(partition_task_size == 0 || partition_task_size > INT_MAX, + GELOGE(FAILED, "Model_def's byte size (%zu) is invalid!", partition_task_size); + return FAILED); + + ge::Buffer task_buffer(partition_task_size); + if (task_buffer.GetSize() == 0) { + GELOGE(MEMALLOC_FAILED, "Alloc model task def buffer failed"); + return MEMALLOC_FAILED; + } + (void)model_task_def->SerializePartialToArray(task_buffer.GetData(), static_cast(partition_task_size)); + + GELOGI("TASK_INFO op_size:%d, stream_num:%u", model_task_def->op().size(), model_task_def->stream_num()); + GELOGI("TASK_INFO size is %zu", partition_task_size); + + if (SaveModelPartition(om_file_save_helper, ModelPartitionType::TASK_INFO, task_buffer.GetData(), + partition_task_size) != SUCCESS) { + GELOGE(PARAM_INVALID, "Add model task def partition failed"); + return PARAM_INVALID; + } + // Save target/version to model_header + ModelFileHeader &model_header = om_file_save_helper->GetModelFileHeader(); + model_header.platform_type = ge_model->GetPlatformType(); + model_header.om_ir_version = ge_model->GetVersion(); + std::string platform_version = ge_model->GetPlatformVersion(); + GELOGI("Platform version save: %s", platform_version.c_str()); + + errno_t err; + err = memcpy_s(model_header.platform_version, PLATFORM_VERSION_LEN, platform_version.c_str(), + platform_version.size() + 1); + if (err != EOK) { + GELOGE(MEMALLOC_FAILED, "ModelHelper SaveModel failed while while allocating memory for platform_version"); + return MEMALLOC_FAILED; + } + string version = reinterpret_cast(model_header.platform_version); + GELOGI("Platform version save: %s", version.c_str()); + + size_t name_size = ge_model->GetName().size(); + name_size = name_size > (MODEL_NAME_LENGTH - 1) ? (MODEL_NAME_LENGTH - 1) : name_size; + err = memcpy_s(model_header.name, MODEL_NAME_LENGTH, ge_model->GetName().c_str(), name_size); + if (err != EOK) { + GELOGE(MEMALLOC_FAILED, "ModelHelper SaveModel failed while allocating memory for name"); + return MEMALLOC_FAILED; + } + string model_name = reinterpret_cast(model_header.name); + GELOGI("Model name save:%s", model_name.c_str()); + + Status ret = om_file_save_helper->SaveModel(save_param, output_file.c_str()); + if (ret != SUCCESS) { + GELOGE(FAILED, "OmFileSaveHelper SaveModel return fail."); + return FAILED; + } + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +ModelHelper::SaveOriginalGraphToOmModel(const ge::Graph &graph, const std::string &output_file) { + if (output_file.empty()) { + GELOGE(FAILED, "SaveModel received invalid file name prefix"); + return FAILED; + } + // Get computegraph from graph + auto compute_graph = ge::GraphUtils::GetComputeGraph(graph); + if (compute_graph == nullptr) { + GELOGE(FAILED, "SaveModel fail for compute_graph null"); + return FAILED; + } + ge::GraphUtils::DumpGEGraph(compute_graph, "OriginalGraph"); + ge::GraphUtils::DumpGEGraphToOnnx(*compute_graph, "OriginalGraph"); + // Model + ModelPtr model_ptr = ge::MakeShared(); + GE_CHECK_NOTNULL_EXEC(model_ptr, return MEMALLOC_FAILED); + model_ptr->SetName(compute_graph->GetName()); + model_ptr->SetGraph(graph); + model_ptr->SetVersion(static_cast(OM_PROTO_VERSION)); + string framework_version; + Status frame_rt = PlatformVersionManager::GetPlatformVersion(framework_version); + if (frame_rt == SUCCESS) { + uint32_t counter = 0; + string model_framework_version = framework_version + "." + std::to_string(counter); + model_ptr->SetPlatformVersion(model_framework_version); + } + // Model def + ge::Buffer model_buffer; + ge::graphStatus status = model_ptr->Save(model_buffer); + if (status != ge::GRAPH_SUCCESS) { + GELOGE(FAILED, "SaveModel fail for save buffer fail"); + return FAILED; + } + shared_ptr om_file_save_helper = ge::MakeShared(); + GE_CHECK_NOTNULL_EXEC(om_file_save_helper, return MEMALLOC_FAILED); + ModelPartition partition_model; + partition_model.data = model_buffer.GetData(); + partition_model.size = static_cast(model_buffer.GetSize()); + partition_model.type = ModelPartitionType::MODEL_DEF; + GELOGI("Original Model type[%u],size[%u]", partition_model.type, partition_model.size); + if (partition_model.data != nullptr && partition_model.size > 0) { + (void)om_file_save_helper->AddPartition(partition_model); + // Condition of AddPartition is established, no need to check value + } + // Save target/version to model_header + ModelFileHeader &model_header = om_file_save_helper->GetModelFileHeader(); + model_header.om_ir_version = model_ptr->GetVersion(); + model_header.headsize = MODEL_FILE_HEAD_LEN; + std::string platform_version = model_ptr->GetPlatformVersion(); + errno_t err = memcpy_s(model_header.platform_version, PLATFORM_VERSION_LEN, platform_version.c_str(), + platform_version.size() + 1); + if (err != EOK) { + GELOGE(FAILED, "ModelHelper SaveModel failed for platform_version"); + return FAILED; + } + err = memcpy_s(model_header.name, MODEL_NAME_LENGTH, model_ptr->GetName().c_str(), model_ptr->GetName().size() + 1); + if (err != EOK) { + GELOGE(FAILED, "ModelHelper SaveModel memory copy failed"); + return FAILED; + } + Status ret = om_file_save_helper->SaveModelToFile(output_file.c_str()); + return (ret == SUCCESS ? SUCCESS : FAILED); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelHelper::LoadModel(const ge::ModelData &model_data) { + if (model_data.model_data == nullptr || model_data.model_len == 0) { + GELOGE(FAILED, "Model_data is nullptr, or model_data_size is 0"); + return FAILED; + } + + if (is_assign_model_) { + GELOGE(FAILED, "Model helper has already loaded!"); + return FAILED; + } + if (ReleaseLocalModelData() != SUCCESS) { + GELOGE(FAILED, "ReleaseLocalModelData failed."); + return FAILED; + } + + if (ge::DavinciModelParser::ParseModelContent(model_data, model_addr_tmp_, model_len_tmp_) != SUCCESS) { + GELOGE(FAILED, "Parse model content failed!"); + return FAILED; + } + + file_header_ = reinterpret_cast(model_data.model_data); + + OmFileLoadHelper om_load_helper; + if (om_load_helper.Init(model_addr_tmp_, model_len_tmp_) != SUCCESS) { + GELOGE(FAILED, "Om_load_helper init failed"); + model_addr_tmp_ = nullptr; + return FAILED; + } + + // Encrypt model need to del temp model/no encrypt model don't need to del model + model_addr_tmp_ = nullptr; + + if (GenerateGeModel(om_load_helper) != SUCCESS) { + GELOGE(FAILED, "GenerateGeModel failed"); + return FAILED; + } + + is_assign_model_ = true; + return SUCCESS; +} + +Status ModelHelper::GenerateGeModel(OmFileLoadHelper &om_load_helper) { + model_ = ge::MakeShared(); + GE_CHECK_NOTNULL(model_); + Status ret = LoadModelData(om_load_helper); + if (ret != SUCCESS) { + return ret; + } + ret = LoadWeights(om_load_helper); + if (ret != SUCCESS) { + return ret; + } + ret = LoadTask(om_load_helper); + if (ret != SUCCESS) { + return ret; + } + ret = LoadTBEKernelStore(om_load_helper); + if (ret != SUCCESS) { + return ret; + } + return SUCCESS; +} + +Status ModelHelper::LoadModelData(OmFileLoadHelper &om_load_helper) { + ModelPartition partition_model_def; + // no need to check value, DATA->NetOutput + om_load_helper.GetModelPartition(ModelPartitionType::MODEL_DEF, partition_model_def); + GELOGI("Model_def partition size:%u", partition_model_def.size); + + ge::Model model; + if (ge::Model::Load(partition_model_def.data, partition_model_def.size, model) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Load model failed."); + return INTERNAL_ERROR; + } + + SetModelToGeModel(model); + + return SUCCESS; +} + +void ModelHelper::SetModelToGeModel(ge::Model &model) { + model_->SetGraph(model.GetGraph()); + model_->SetName(model.GetName()); + model_->SetVersion(model.GetVersion()); + model_->SetPlatformVersion(model.GetPlatformVersion()); + model_->SetAttr(model.MutableAttrMap()); +} + +Status ModelHelper::LoadWeights(OmFileLoadHelper &om_load_helper) { + ModelPartition partition; + if (om_load_helper.GetModelPartition(ModelPartitionType::WEIGHTS_DATA, partition) != SUCCESS) { + GELOGE(FAILED, "Get weight model partition failed."); + return FAILED; + } + ge::Buffer weight = ge::Buffer::CopyFrom(partition.data, partition.size); + model_->SetWeight(weight); + + GELOGI("GetWeight size:%u", partition.size); + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelHelper::LoadTask(OmFileLoadHelper &om_load_helper) { + ModelPartition task_partition; + if (om_load_helper.GetModelPartition(ModelPartitionType::TASK_INFO, task_partition) != SUCCESS) { + GELOGE(FAILED, "Get task model partition failed."); + return FAILED; + } + std::shared_ptr task = ge::MakeShared(); + GE_CHECK_NOTNULL(task); + if (task_partition.size != 0) { + if (!ReadProtoFromArray(task_partition.data, task_partition.size, task.get())) { + GELOGE(INTERNAL_ERROR, "ReadProtoFromArray failed."); + return INTERNAL_ERROR; + } + GELOGI("TASK_INFO op_size:%d, stream_num:%u", task->op().size(), task->stream_num()); + } + model_->SetModelTaskDef(task); + return SUCCESS; +} + +Status ModelHelper::LoadTBEKernelStore(OmFileLoadHelper &om_load_helper) { + // Load tbe kernels + ModelPartition partition_kernel_def; + TBEKernelStore kernel_store; + if (om_load_helper.GetModelPartition(ModelPartitionType::TBE_KERNELS, partition_kernel_def) == SUCCESS) { + GELOGI("Kernels partition size:%u", partition_kernel_def.size); + if (kernel_store.Load(partition_kernel_def.data, partition_kernel_def.size)) { + GELOGI("Load tbe kernels success"); + } else { + GELOGW("Load tbe kernels failed"); + } + } + model_->SetTBEKernelStore(kernel_store); + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY GeModelPtr ModelHelper::GetGeModel() { + if (model_ != nullptr) { + return model_; + } + + GELOGI("Model has not been loaded!"); + std::shared_ptr out_model = ge::MakeShared(); + if (out_model == nullptr) { + return nullptr; + } + return out_model; +} + +// Transit func for model to ge_model. It will be removed when load and build support ge_model in future +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelHelper::TransModelToGeModel(const ModelPtr &model, + GeModelPtr &ge_model) { + if (model == nullptr) { + GELOGE(FAILED, "Model is null"); + return FAILED; + } + ge_model = ge::MakeShared(); + GE_CHECK_NOTNULL(ge_model); + ge_model->SetGraph(model->GetGraph()); + ge_model->SetName(model->GetName()); + ge_model->SetVersion(model->GetVersion()); + ge_model->SetPlatformVersion(model->GetPlatformVersion()); + ge_model->SetAttr(model->MutableAttrMap()); + + // Copy weight info + auto compute_graph = ge::GraphUtils::GetComputeGraph(model->GetGraph()); + // ge::Buffer weight; + ge::Buffer weight; + (void)ge::AttrUtils::GetZeroCopyBytes(compute_graph, ge::ATTR_NAME_WEIGHTS_DATA, weight); + ge_model->SetWeight(weight); + // Copy task info + if (model->HasAttr(MODEL_ATTR_TASKS)) { + ge::Buffer task_buffer; + GE_CHK_BOOL_RET_STATUS(ge::AttrUtils::GetZeroCopyBytes(model, MODEL_ATTR_TASKS, task_buffer), FAILED, + "Get bytes failed."); + + std::shared_ptr task = ge::MakeShared(); + GE_CHECK_NOTNULL(task); + GE_IF_BOOL_EXEC(task_buffer.GetData() == nullptr, GELOGE(FAILED, "Get data fail"); return FAILED); + GE_IF_BOOL_EXEC(task_buffer.GetSize() == 0, GELOGE(FAILED, "Get size fail"); return FAILED); + + GE_CHK_BOOL_EXEC(ReadProtoFromArray(task_buffer.GetData(), static_cast(task_buffer.GetSize()), task.get()), + return INTERNAL_ERROR, "ReadProtoFromArray failed."); + + ge_model->SetModelTaskDef(task); + } + // Copy tbe kernel info + // TBEKernelStore kernel_store; + TBEKernelStore kernel_store; + if (compute_graph != nullptr && compute_graph->GetDirectNodesSize() != 0) { + for (const ge::NodePtr &n : compute_graph->GetDirectNode()) { + auto node_op_desc = n->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue); + TBEKernelPtr tbe_kernel = node_op_desc->TryGetExtAttr(ge::OP_EXTATTR_NAME_TBE_KERNEL, TBEKernelPtr()); + GE_IF_BOOL_EXEC(tbe_kernel == nullptr, continue); + kernel_store.AddTBEKernel(tbe_kernel); + GELOGI("Add tbe kernel bin %s", tbe_kernel->GetName().c_str()); + } + } + if (!kernel_store.Build()) { + GELOGE(FAILED, "TBE Kernels store build failed!"); + return FAILED; + } + ge_model->SetTBEKernelStore(kernel_store); + + return SUCCESS; +} + +// trasit func for ge_model to Model. will be removed when load and build support ge_model in future +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelHelper::TransGeModelToModel(const GeModelPtr &ge_model, + ModelPtr &model) { + if (ge_model == nullptr) { + GELOGE(FAILED, "Ge_model is null"); + return FAILED; + } + model = ge::MakeShared(); + GE_CHECK_NOTNULL(model); + model->SetGraph(ge_model->GetGraph()); + model->SetName(ge_model->GetName()); + model->SetVersion(ge_model->GetVersion()); + model->SetPlatformVersion(ge_model->GetPlatformVersion()); + model->SetAttr(ge_model->MutableAttrMap()); + // Copy weight info + auto compute_graph = ge::GraphUtils::GetComputeGraph(model->GetGraph()); + bool ret = ge::AttrUtils::SetZeroCopyBytes(compute_graph, ge::ATTR_NAME_WEIGHTS_DATA, ge_model->GetWeight()); + if (!ret) { + GELOGE(FAILED, "Copy weight buffer failed!"); + return FAILED; + } + // Copy task info + std::shared_ptr model_task = ge_model->GetModelTaskDefPtr(); + + if (model_task != nullptr) { + int size = model_task->ByteSize(); + ge::Buffer buffer(static_cast(size)); + if (buffer.GetSize() == 0) { + GELOGE(MEMALLOC_FAILED, "alloc model attr task buffer failed!"); + return MEMALLOC_FAILED; + } + (void)model_task->SerializePartialToArray(buffer.GetData(), size); + ret = ge::AttrUtils::SetZeroCopyBytes(model, MODEL_ATTR_TASKS, std::move(buffer)); + if (!ret) { + GELOGE(FAILED, "Copy task buffer failed!"); + return FAILED; + } + } + return SUCCESS; +} + +Status ModelHelper::ReleaseLocalModelData() noexcept { + Status result = SUCCESS; + if (model_addr_tmp_ != nullptr) { + errno_t ret = memset_s(static_cast(model_addr_tmp_), model_len_tmp_, 0, model_len_tmp_); + if (ret != EOK) { + GELOGE(FAILED, "Failed to memset memory, error-code %d", ret); + result = FAILED; + } + delete[] model_addr_tmp_; + model_addr_tmp_ = nullptr; + model_len_tmp_ = 0; + } + return result; +} +} // namespace ge diff --git a/src/ge/common/helper/om_file_helper.cc b/src/ge/common/helper/om_file_helper.cc new file mode 100644 index 00000000..dfe5c1d6 --- /dev/null +++ b/src/ge/common/helper/om_file_helper.cc @@ -0,0 +1,217 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "framework/common/helper/om_file_helper.h" + +#include +#include + +#include "common/math/math_util.h" +#include "common/auth/file_saver.h" +#include "framework/common/debug/log.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/util.h" + +namespace ge { +// For Load +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status OmFileLoadHelper::Init(const ge::ModelData &model) { + if (CheckModelValid(model) != SUCCESS) { + return FAILED; + } + uint32_t model_data_size = model.model_len - sizeof(ModelFileHeader); + uint8_t *model_data = static_cast(model.model_data) + sizeof(ModelFileHeader); + Status ret = Init(model_data, model_data_size); + return ret; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status OmFileLoadHelper::Init(uint8_t *model_data, + const uint32_t model_data_size) { + if (LoadModelPartitionTable(model_data, model_data_size) != SUCCESS) { + return FAILED; + } + is_inited_ = true; + return SUCCESS; +} + +// Use both +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status OmFileLoadHelper::GetModelPartition(ModelPartitionType type, + ModelPartition &partition) { + if (!is_inited_) { + GELOGE(PARAM_INVALID, "OmFileLoadHelper not Inited!"); + return PARAM_INVALID; + } + + bool found = false; + for (ModelPartition &part : context_.partition_datas_) { + if (part.type == type) { + partition = part; + found = true; + break; + } + } + + if (!found) { + if (type != ModelPartitionType::TBE_KERNELS) { + GELOGE(FAILED, "GetModelPartition:type:%d is not in partition_datas", static_cast(type)); + return FAILED; + } + } + return SUCCESS; +} + +Status OmFileLoadHelper::CheckModelValid(const ge::ModelData &model) const { + // Parameter validity check + if (model.model_data == nullptr) { + GELOGE(PARAM_INVALID, "Model_data must not be null"); + return PARAM_INVALID; + } + + // Model length too small + if (model.model_len < (sizeof(ModelFileHeader) + sizeof(ModelPartitionTable))) { + GELOGE(PARAM_INVALID, "Invalid model. length < sizeof(ModelFileHeader) + sizeof(ModelPartitionTable)."); + return PARAM_INVALID; + } + + // Get file header + auto model_header = reinterpret_cast(model.model_data); + // Determine whether the file length and magic number match + if ((model_header->length != model.model_len - sizeof(ModelFileHeader)) || + (MODEL_FILE_MAGIC_NUM != model_header->magic)) { + GELOGE(PARAM_INVALID, + "Invalid model. file_header->length(%u) + sizeof(ModelFileHeader)(%zu) != model->model_len(%u) || " + "MODEL_FILE_MAGIC_NUM != file_header->magic", + model_header->length, sizeof(ModelFileHeader), model.model_len); + return PARAM_INVALID; + } + return SUCCESS; +} + +Status OmFileLoadHelper::LoadModelPartitionTable(uint8_t *model_data, const uint32_t model_data_size) { + if (model_data == nullptr) { + GELOGE(PARAM_INVALID, "Param model_data must not be null"); + return PARAM_INVALID; + } + // Init partition table + auto partition_table = reinterpret_cast(model_data); + if ((partition_table->num != PARTITION_SIZE) && (partition_table->num != PARTITION_SIZE - 1)) { + GELOGE(PARAM_INVALID, "Invalid partition_table->num:%u", partition_table->num); + return PARAM_INVALID; + } + + size_t mem_offset = SIZE_OF_MODEL_PARTITION_TABLE(*partition_table); + GELOGI("sizeof(ModelFileHeader)=%zu, sizeof(ModelPartitionTable)=%zu", sizeof(ModelFileHeader), mem_offset); + if (model_data_size <= mem_offset) { + GELOGE(PARAM_INVALID, "invalid model data, partition_table->num:%u, model data size %u", partition_table->num, + model_data_size); + return PARAM_INVALID; + } + for (uint32_t i = 0; i < partition_table->num; i++) { + ModelPartition partition; + partition.size = partition_table->partition[i].mem_size; + partition.data = model_data + mem_offset; + partition.type = partition_table->partition[i].type; + context_.partition_datas_.push_back(partition); + + if (partition.size > model_data_size || mem_offset > model_data_size - partition.size) { + GELOGE(PARAM_INVALID, "the current need partition sizes %zu greater than the model data size %u ", + partition.size + mem_offset, model_data_size); + return PARAM_INVALID; + } + mem_offset += partition.size; + GELOGI("Partition, type:%d, size:%u", static_cast(partition.type), partition.size); + } + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY const std::vector + &OmFileSaveHelper::GetModelPartitions() const { + return context_.partition_datas_; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ModelPartitionTable *OmFileSaveHelper::GetPartitionTable() { + auto partition_size = static_cast(context_.partition_datas_.size()); + // Build ModelPartitionTable, flex array + context_.partition_table_.clear(); + context_.partition_table_.resize(sizeof(ModelPartitionTable) + sizeof(ModelPartitionMemInfo) * partition_size, 0); + + auto partition_table = reinterpret_cast(context_.partition_table_.data()); + partition_table->num = partition_size; + + uint32_t mem_offset = 0; + for (uint32_t i = 0; i < partition_size; i++) { + ModelPartition partition = context_.partition_datas_[i]; + partition_table->partition[i] = {partition.type, mem_offset, partition.size}; + mem_offset += partition.size; + GELOGI("Partition, type:%d, size:%u", static_cast(partition.type), partition.size); + } + return partition_table; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status OmFileSaveHelper::AddPartition(ModelPartition &partition) { + if (CheckUint32AddOverflow(context_.model_data_len_, partition.size) != SUCCESS) { + GELOGE(FAILED, "UINT32 %u and %u addition can result in overflow!", context_.model_data_len_, partition.size); + return FAILED; + } + context_.partition_datas_.push_back(partition); + context_.model_data_len_ += partition.size; + return SUCCESS; +} + +Status OmFileSaveHelper::SaveModel(const SaveParam &save_param, const char *output_file) { + (void)save_param.cert_file; + (void)save_param.ek_file; + (void)save_param.encode_mode; + (void)save_param.hw_key_file; + (void)save_param.pri_key_file; + Status ret = SaveModelToFile(output_file); + if (ret == SUCCESS) { + GELOGI("Generate model with encrypt."); + } + return ret; +} + +Status OmFileSaveHelper::SaveModelToFile(const char *output_file) { +#if !defined(NONSUPPORT_SAVE_TO_FILE) + uint32_t model_data_len = context_.model_data_len_; + if (model_data_len == 0) { + GELOGE(domi::PARAM_INVALID, "Model data len error! should not be 0"); + return domi::PARAM_INVALID; + } + + ModelPartitionTable *partition_table = GetPartitionTable(); + if (partition_table == nullptr) { + GELOGE(ge::GE_GRAPH_SAVE_FAILED, "SaveModelToFile exe failed: partition_table is NULL"); + return ge::GE_GRAPH_SAVE_FAILED; + } + uint32_t size_of_table = SIZE_OF_MODEL_PARTITION_TABLE(*partition_table); + FMK_UINT32_ADDCHECK(size_of_table, model_data_len) + model_header_.length = size_of_table + model_data_len; + + GELOGI("Sizeof(ModelFileHeader):%zu,sizeof(ModelPartitionTable):%u, model_data_len:%u, model_total_len:%zu", + sizeof(ModelFileHeader), size_of_table, model_data_len, model_header_.length + sizeof(ModelFileHeader)); + + std::vector partition_datas = context_.partition_datas_; + Status ret = FileSaver::SaveToFile(output_file, model_header_, *partition_table, partition_datas); + if (ret == SUCCESS) { + GELOGI("Save model success without encrypt."); + } + return ret; +#else + return SUCCESS; +#endif +} +} // namespace ge diff --git a/src/ge/common/math/math_util.h b/src/ge/common/math/math_util.h new file mode 100644 index 00000000..8a78317e --- /dev/null +++ b/src/ge/common/math/math_util.h @@ -0,0 +1,348 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_MATH_MATH_UTIL_H_ +#define GE_COMMON_MATH_MATH_UTIL_H_ + +#include +#include +#include +#include + +#include "framework/common/debug/log.h" +#include "framework/common/fmk_error_codes.h" + +namespace ge { +/// +/// @ingroup math_util +/// @brief check whether int32 addition can result in overflow +/// @param [in] a addend +/// @param [in] b addend +/// @return Status +/// +inline Status CheckIntAddOverflow(int a, int b) { + if (((b > 0) && (a > (INT_MAX - b))) || ((b < 0) && (a < (INT_MIN - b)))) { + return FAILED; + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether int64 addition can result in overflow +/// @param [in] a addend +/// @param [in] b addend +/// @return Status +/// +inline Status CheckInt64AddOverflow(int64_t a, int64_t b) { + if (((b > 0) && (a > (INT64_MAX - b))) || ((b < 0) && (a < (INT64_MIN - b)))) { + return FAILED; + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether int32 addition can result in overflow +/// @param [in] a addend +/// @param [in] b addend +/// @return Status +/// +inline Status CheckInt32AddOverflow(int32_t a, int32_t b) { + if (((b > 0) && (a > (INT32_MAX - b))) || ((b < 0) && (a < (INT32_MIN - b)))) { + return FAILED; + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether uint32 addition can result in overflow +/// @param [in] a addend +/// @param [in] b addend +/// @return Status +/// +inline Status CheckUint32AddOverflow(uint32_t a, uint32_t b) { + if (a > (UINT32_MAX - b)) { + return FAILED; + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether int subtraction can result in overflow +/// @param [in] a subtrahend +/// @param [in] b minuend +/// @return Status +/// +inline Status CheckIntSubOverflow(int a, int b) { + if (((b > 0) && (a < (INT_MIN + b))) || ((b < 0) && (a > (INT_MAX + b)))) { + return FAILED; + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether int32 subtraction can result in overflow +/// @param [in] a subtrahend +/// @param [in] b minuend +/// @return Status +/// +inline Status CheckInt32SubOverflow(int32_t a, int32_t b) { + if (((b > 0) && (a < (INT32_MIN + b))) || ((b < 0) && (a > (INT32_MAX + b)))) { + return FAILED; + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether int multiplication can result in overflow +/// @param [in] a multiplicator +/// @param [in] b multiplicator +/// @return Status +/// +inline Status CheckIntMulOverflow(int a, int b) { + if (a > 0) { + if (b > 0) { + if (a > (INT_MAX / b)) { + return FAILED; + } + } else { + if (b < (INT_MIN / a)) { + return FAILED; + } + } + } else { + if (b > 0) { + if (a < (INT_MIN / b)) { + return FAILED; + } + } else { + if ((a != 0) && (b < (INT_MAX / a))) { + return FAILED; + } + } + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether int32 multiplication can result in overflow +/// @param [in] a multiplicator +/// @param [in] b multiplicator +/// @return Status +/// +inline Status CheckInt32MulOverflow(int32_t a, int32_t b) { + if (a > 0) { + if (b > 0) { + if (a > (INT32_MAX / b)) { + return FAILED; + } + } else { + if (b < (INT32_MIN / a)) { + return FAILED; + } + } + } else { + if (b > 0) { + if (a < (INT32_MIN / b)) { + return FAILED; + } + } else { + if ((a != 0) && (b < (INT32_MAX / a))) { + return FAILED; + } + } + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether int64 int32 multiplication can result in overflow +/// @param [in] a multiplicator +/// @param [in] b multiplicator +/// @return Status +/// +inline Status CheckInt64Int32MulOverflow(int64_t a, int32_t b) { + if (a > 0) { + if (b > 0) { + if (a > (INT64_MAX / b)) { + return FAILED; + } + } else { + if (b < (INT64_MIN / a)) { + return FAILED; + } + } + } else { + if (b > 0) { + if (a < (INT64_MIN / b)) { + return FAILED; + } + } else { + if ((a != 0) && (b < (INT64_MAX / a))) { + return FAILED; + } + } + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether int64 multiplication can result in overflow +/// @param [in] a multiplicator +/// @param [in] b multiplicator +/// @return Status +/// +inline Status CheckInt64Uint32MulOverflow(int64_t a, uint32_t b) { + if (a == 0 || b == 0) { + return SUCCESS; + } + if (a > 0) { + if (a > (INT64_MAX / b)) { + return FAILED; + } + } else { + if (a < (INT64_MIN / b)) { + return FAILED; + } + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether uint32 multiplication can result in overflow +/// @param [in] a multiplicator +/// @param [in] b multiplicator +/// @return Status +/// +inline Status CheckUint32MulOverflow(uint32_t a, uint32_t b) { + if (a == 0 || b == 0) { + return SUCCESS; + } + + if (a > (UINT32_MAX / b)) { + return FAILED; + } + + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether int division can result in overflow +/// @param [in] a dividend +/// @param [in] b divisor +/// @return Status +/// +inline Status CheckIntDivOverflow(int a, int b) { + if ((b == 0) || ((a == INT_MIN) && (b == -1))) { + return FAILED; + } + return SUCCESS; +} + +/// +/// @ingroup math_util +/// @brief check whether int32 division can result in overflow +/// @param [in] a dividend +/// @param [in] b divisor +/// @return Status +/// +inline Status CheckInt32DivOverflow(int32_t a, int32_t b) { + if ((b == 0) || ((a == INT32_MIN) && (b == -1))) { + return FAILED; + } + return SUCCESS; +} + +#define FMK_INT_ADDCHECK(a, b) \ + if (CheckIntAddOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "Int %d and %d addition can result in overflow!", (a), (b)); \ + return INTERNAL_ERROR; \ + } + +#define FMK_INT32_ADDCHECK(a, b) \ + if (CheckInt32AddOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "Int32 %d and %d addition can result in overflow!", (a), (b)); \ + return INTERNAL_ERROR; \ + } + +#define FMK_UINT32_ADDCHECK(a, b) \ + if (CheckUint32AddOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "UINT32 %u and %u addition can result in overflow!", static_cast(a), \ + static_cast(b)); \ + return INTERNAL_ERROR; \ + } + +#define FMK_INT_SUBCHECK(a, b) \ + if (CheckIntSubOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "INT %d and %d subtraction can result in overflow!", (a), (b)); \ + return INTERNAL_ERROR; \ + } + +#define FMK_INT32_SUBCHECK(a, b) \ + if (CheckInt32SubOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "INT32 %d and %d subtraction can result in overflow!", (a), (b)); \ + return INTERNAL_ERROR; \ + } + +#define FMK_INT_MULCHECK(a, b) \ + if (CheckIntMulOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "INT %d and %d multiplication can result in overflow!", (a), (b)); \ + return INTERNAL_ERROR; \ + } + +#define FMK_INT32_MULCHECK(a, b) \ + if (CheckInt32MulOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "INT32 %d and %d multiplication can result in overflow!", (a), (b)); \ + return INTERNAL_ERROR; \ + } + +#define FMK_UINT32_MULCHECK(a, b) \ + if (CheckUint32MulOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "UINT32 %u and %u multiplication can result in overflow!", static_cast(a), \ + static_cast(b)); \ + return INTERNAL_ERROR; \ + } + +#define FMK_INT_DIVCHECK(a, b) \ + if (CheckIntDivOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "INT %d and %d division can result in overflow!", (a), (b)); \ + return INTERNAL_ERROR; \ + } + +#define FMK_INT32_DIVCHECK(a, b) \ + if (CheckInt32DivOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "INT32 %d and %d division can result in overflow!", (a), (b)); \ + return INTERNAL_ERROR; \ + } + +#define FMK_INT64_UINT32_MULCHECK(a, b) \ + if (CheckInt64Uint32MulOverflow((a), (b)) != SUCCESS) { \ + GELOGE(INTERNAL_ERROR, "INT64 %ld and UINT32 %u multiplication can result in overflow!", (a), (b)); \ + return INTERNAL_ERROR; \ + } +} // namespace ge + +#endif // GE_COMMON_MATH_MATH_UTIL_H_ diff --git a/src/ge/common/math_util.h b/src/ge/common/math_util.h new file mode 100644 index 00000000..2ff99bc1 --- /dev/null +++ b/src/ge/common/math_util.h @@ -0,0 +1,76 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_MATH_UTIL_H_ +#define GE_COMMON_MATH_UTIL_H_ + +#include +#include + +#include + +#include "Eigen/Eigen" +#include "framework/common/debug/log.h" +#include "framework/common/types.h" +#include "framework/common/util.h" +#include "mmpa/mmpa_api.h" +#include "unsupported/Eigen/CXX11/Tensor" + +namespace ge { + +/** +* @ingroup domi_calibration +* @brief Initializes an input array to a specified value +* @param [in] n array initialization length +* @param [in] alpha initialization value +* @param [out] output array to be initialized +* @return Status +*/ +template +Status NnSet(const int32_t n, const Dtype alpha, Dtype *output) { + GE_CHECK_NOTNULL(output); + + if (alpha == 0) { + if (sizeof(Dtype) * n < SECUREC_MEM_MAX_LEN) { + errno_t err = memset_s(output, sizeof(Dtype) * n, 0, sizeof(Dtype) * n); + GE_CHK_BOOL_RET_STATUS(err == EOK, PARAM_INVALID, "memset_s err"); + } else { + uint64_t size = static_cast(sizeof(Dtype) * n); + uint64_t step = SECUREC_MEM_MAX_LEN - (SECUREC_MEM_MAX_LEN % sizeof(Dtype)); + uint64_t times = size / step; + uint64_t remainder = size % step; + uint64_t i = 0; + while (i < times) { + errno_t err = memset_s(output + i * (step / sizeof(Dtype)), step, 0, step); + GE_CHK_BOOL_RET_STATUS(err == EOK, PARAM_INVALID, "memset_s err"); + i++; + } + if (remainder != 0) { + errno_t err = memset_s(output + i * (step / sizeof(Dtype)), remainder, 0, remainder); + GE_CHK_BOOL_RET_STATUS(err == EOK, PARAM_INVALID, "memset_s err"); + } + } + } + + for (int32_t i = 0; i < n; ++i) { + output[i] = alpha; + } + return SUCCESS; +} + +} // namespace ge + +#endif // GE_COMMON_MATH_UTIL_H_ diff --git a/src/ge/common/model_parser/base.cc b/src/ge/common/model_parser/base.cc new file mode 100644 index 00000000..79b885ed --- /dev/null +++ b/src/ge/common/model_parser/base.cc @@ -0,0 +1,110 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/model_parser/base.h" + +#include +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/util.h" + +namespace ge { +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ModelParserBase::ModelParserBase() {} +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ModelParserBase::~ModelParserBase() {} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelParserBase::LoadFromFile(const char *model_path, + const char *key, int32_t priority, + ge::ModelData &model_data) { + std::string real_path = RealPath(model_path); + if (real_path.empty()) { + GELOGE(PARAM_INVALID, "Model file path '%s' is invalid", model_path); + return PARAM_INVALID; + } + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(GetFileLength(model_path) == -1, return FAILED, "File size not valid."); + + std::ifstream fs(real_path.c_str(), std::ifstream::binary); + + GE_CHK_BOOL_RET_STATUS(fs.is_open(), FAILED, "Open file failed! path:%s", model_path); + + // get length of file: + (void)fs.seekg(0, std::ifstream::end); + int64_t len = fs.tellg(); + + GE_CHECK_GE(len, 1); + + (void)fs.seekg(0, std::ifstream::beg); + + char *data = new (std::nothrow) char[len]; + if (data == nullptr) { + GELOGE(MEMALLOC_FAILED, "Load model From file failed, bad memory allocation occur. (need:%ld)", len); + return MEMALLOC_FAILED; + } + + // read data as a block: + (void)fs.read(data, len); + + // Set the model data parameter + model_data.model_data = data; + model_data.model_len = len; + model_data.priority = priority; + model_data.key = (key == nullptr) ? "" : key; + + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelParserBase::ParseModelContent(const ge::ModelData &model, + uint8_t *&model_data, + uint32_t &model_len) { + // Parameter validity check + GE_CHECK_NOTNULL(model.model_data); + + // Model length too small + GE_CHK_BOOL_RET_STATUS(model.model_len >= sizeof(ModelFileHeader), PARAM_INVALID, + "Invalid model. length < sizeof(ModelFileHeader)."); + // Get file header + auto file_header = reinterpret_cast(model.model_data); + // Determine whether the file length and magic number match + GE_CHK_BOOL_RET_STATUS( + file_header->length == model.model_len - sizeof(ModelFileHeader) && file_header->magic == MODEL_FILE_MAGIC_NUM, + PARAM_INVALID, + "Invalid model. file_header->length + sizeof(ModelFileHeader) != model->model_len || MODEL_FILE_MAGIC_NUM != " + "file_header->magic"); + + Status res = SUCCESS; + + // Get data address + uint8_t *data = reinterpret_cast(model.model_data) + sizeof(ModelFileHeader); + if (file_header->is_encrypt == ModelEncryptType::UNENCRYPTED) { // Unencrypted model + GE_CHK_BOOL_RET_STATUS(model.key.empty(), PARAM_INVALID, + "Invalid param. model is unencrypted, but key is not empty."); + + model_data = data; + model_len = file_header->length; + GELOGI("model_len is %u, model_file_head_len is %zu.", model_len, sizeof(ModelFileHeader)); + } else { + GELOGE(PARAM_INVALID, "Invalid model. ModelEncryptType not supported."); + res = PARAM_INVALID; + } + + return res; +} +} // namespace ge diff --git a/src/ge/common/model_parser/base.h b/src/ge/common/model_parser/base.h new file mode 100644 index 00000000..ffc430e8 --- /dev/null +++ b/src/ge/common/model_parser/base.h @@ -0,0 +1,67 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_MODEL_PARSER_BASE_H_ +#define GE_COMMON_MODEL_PARSER_BASE_H_ + +#include + +#include "framework/common/debug/log.h" +#include "framework/common/ge_types.h" +#include "framework/common/types.h" +#include "framework/common/util.h" + +namespace ge { +class ModelParserBase { + public: + /// + /// @ingroup hiai + /// @brief constructor + /// + ModelParserBase(); + /// + /// @ingroup hiai + /// @brief destructor + /// + virtual ~ModelParserBase(); + + /// + /// @ingroup hiai + /// @brief Parsing a model file + /// @param [in] model_file model path + /// @param [in] model_key model secret key + /// @param [in] priority modle priority + /// @param [out] model_data model data + /// @return Status result + /// + static Status LoadFromFile(const char *model_file, const char *model_key, int32_t priority, + ge::ModelData &model_data); + + /// + /// @ingroup domi_ome + /// @brief Parse model contents from the ModelData + /// @param [in] model model data read from file + /// @param [out] model_data address of the model data + /// @param [out] model_len model actual length + /// If the input is an encrypted model, it needs to be deleted + /// @return SUCCESS success + /// @return others failure + /// @author + /// + static Status ParseModelContent(const ge::ModelData &model, uint8_t *&model_data, uint32_t &model_len); +}; +} // namespace ge +#endif // GE_COMMON_MODEL_PARSER_BASE_H_ diff --git a/src/ge/common/model_saver.cc b/src/ge/common/model_saver.cc new file mode 100755 index 00000000..c5328578 --- /dev/null +++ b/src/ge/common/model_saver.cc @@ -0,0 +1,79 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/model_saver.h" + +#include +#include +#include +#include +#include +#include + +#include "framework/common/debug/log.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/util.h" + +namespace ge { +const uint32_t kInteval = 2; + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelSaver::SaveJsonToFile(const char *file_path, + const Json &model) { + Status ret = SUCCESS; + if (file_path == nullptr || SUCCESS != CheckPath(file_path)) { + GELOGE(FAILED, "Check output file failed."); + return FAILED; + } + std::string model_str; + try { + model_str = model.dump(kInteval, ' ', false, Json::error_handler_t::ignore); + } catch (std::exception &e) { + GELOGE(FAILED, "Transfer json to string failed, reason: %s.", e.what()); + return FAILED; + } catch (...) { + GELOGE(FAILED, "Transfer json to string failed."); + return FAILED; + } + + char real_path[PATH_MAX] = {0}; + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(strlen(file_path) >= PATH_MAX, return FAILED, "file path is too long!"); + GE_IF_BOOL_EXEC(realpath(file_path, real_path) == nullptr, + GELOGI("File %s does not exit, it will be created.", file_path)); + + // Open file + mode_t mode = S_IRUSR | S_IWUSR; + int32_t fd = mmOpen2(real_path, O_RDWR | O_CREAT | O_TRUNC, mode); + if (fd == EN_ERROR || fd == EN_INVALID_PARAM) { + GELOGE(FAILED, "Open file failed. file path : %s", file_path); + return FAILED; + } + const char *model_char = model_str.c_str(); + uint32_t len = static_cast(model_str.length()); + // Write data to file + mmSsize_t mmpa_ret = mmWrite(fd, const_cast((const void *)model_char), len); + if (mmpa_ret == EN_ERROR || mmpa_ret == EN_INVALID_PARAM) { + // Need to both print the error info of mmWrite and mmClose, so return ret after mmClose + GELOGE(FAILED, "Write to file failed. errno = %ld", mmpa_ret); + ret = FAILED; + } + // Close file + if (mmClose(fd) != EN_OK) { + GELOGE(FAILED, "Close file failed."); + ret = FAILED; + } + return ret; +} +} // namespace ge diff --git a/src/ge/common/model_saver.h b/src/ge/common/model_saver.h new file mode 100644 index 00000000..411d5e35 --- /dev/null +++ b/src/ge/common/model_saver.h @@ -0,0 +1,43 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_MODEL_SAVER_H_ +#define GE_COMMON_MODEL_SAVER_H_ + +#include "common/auth/file_saver.h" +#include "nlohmann/json.hpp" +#include "framework/common/types.h" + +/** +* Provide read and write operations for offline model files +*/ +namespace ge { +using Json = nlohmann::json; + +class ModelSaver : public FileSaver { + public: + /** + * @ingroup domi_common + * @brief Save JSON object to file + * @param [in] file_path File output path + * @param [in] model json object + * @return Status result + */ + static Status SaveJsonToFile(const char *file_path, const Json &model); +}; +} // namespace ge + +#endif // GE_COMMON_MODEL_SAVER_H_ diff --git a/src/ge/common/op/attr_value_util.cc b/src/ge/common/op/attr_value_util.cc new file mode 100644 index 00000000..957e558d --- /dev/null +++ b/src/ge/common/op/attr_value_util.cc @@ -0,0 +1,319 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "framework/common/op/attr_value_util.h" + +#include "framework/common/debug/log.h" +#include "framework/common/util.h" + +namespace ge { +#define DEFINE_SET_ATTR_VALUE_ONE(ARG_TYPE, FIELD) \ + FMK_FUNC_DEV_VISIBILITY void SetAttrDef(ARG_TYPE value, AttrDef *out) { \ + GE_CHECK_NOTNULL_JUST_RETURN(out); \ + out->set_##FIELD(value); \ + } + +#define DEFINE_SET_ATTR_VALUE_LIST(ARG_TYPE, FIELD) \ + FMK_FUNC_DEV_VISIBILITY void SetAttrList(ARG_TYPE value, AttrDef *out) { \ + GE_CHECK_NOTNULL_JUST_RETURN(out); \ + GE_CHECK_NOTNULL_JUST_RETURN(out->mutable_list()); \ + out->mutable_list()->add_##FIELD(value); \ + } + +DEFINE_SET_ATTR_VALUE_ONE(const std::string &, s); +DEFINE_SET_ATTR_VALUE_ONE(const char *, s); +DEFINE_SET_ATTR_VALUE_ONE(const uint32_t, u); +DEFINE_SET_ATTR_VALUE_ONE(const int32_t, i); +DEFINE_SET_ATTR_VALUE_ONE(const int64_t, i); +DEFINE_SET_ATTR_VALUE_ONE(const float, f); +DEFINE_SET_ATTR_VALUE_ONE(const double, f); +DEFINE_SET_ATTR_VALUE_ONE(const bool, b); +DEFINE_SET_ATTR_VALUE_LIST(float, f); +DEFINE_SET_ATTR_VALUE_LIST(double, f); +DEFINE_SET_ATTR_VALUE_LIST(uint32_t, u); +DEFINE_SET_ATTR_VALUE_LIST(int32_t, i); +DEFINE_SET_ATTR_VALUE_LIST(bool, b); +DEFINE_SET_ATTR_VALUE_LIST(int64_t, i); +DEFINE_SET_ATTR_VALUE_LIST(const std::string &, s); + +#define ADD_TO_ATTR_MAP(KEY, VALUE, ATTR_MAP) \ + do { \ + GE_CHECK_NOTNULL_JUST_RETURN(ATTR_MAP); \ + AttrDef out; \ + auto it = ATTR_MAP->find(KEY); \ + if (it != ATTR_MAP->end()) { \ + auto &attr_value = it->second; \ + SetAttrDef(VALUE, &attr_value); \ + } else { \ + SetAttrDef(VALUE, &out); \ + ATTR_MAP->insert(AttrDefPair(KEY, out)); \ + } \ + } while (0); + +#define ADD_TO_ATTR_MAP_LIST(KEY, VALUE, ATTR_MAP) \ + do { \ + GE_CHECK_NOTNULL_JUST_RETURN(ATTR_MAP); \ + AttrDef out; \ + auto it = ATTR_MAP->find(KEY); \ + if (it != ATTR_MAP->end()) { \ + auto &attr_value = it->second; \ + SetAttrList(VALUE, &attr_value); \ + } else { \ + SetAttrList(VALUE, &out); \ + ATTR_MAP->insert(AttrDefPair(KEY, out)); \ + } \ + } while (0); + +#define DEFINE_ADD_ATTR_VALUE(KEY_TYPE, VALUE_TYPE) \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void AddOpAttr(KEY_TYPE map_key, VALUE_TYPE value, OpDef *op_def) { \ + GE_CHECK_NOTNULL_JUST_RETURN(op_def); \ + auto attr = op_def->mutable_attr(); \ + ADD_TO_ATTR_MAP(map_key, value, attr) \ + } \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void AddOpAttr(KEY_TYPE map_key, VALUE_TYPE value, \ + AttrDefMap *attr_map) { \ + ADD_TO_ATTR_MAP(map_key, value, attr_map) \ + } \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void AddModelAttr(KEY_TYPE map_key, VALUE_TYPE value, \ + ModelDef *model_def) { \ + GE_CHECK_NOTNULL_JUST_RETURN(model_def); \ + auto attr = model_def->mutable_attr(); \ + ADD_TO_ATTR_MAP(map_key, value, attr) \ + } + +#define DEFINE_ADD_ATTR_VALUE_LIST(KEY_TYPE, VALUE_TYPE) \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void AddOpAttrList(KEY_TYPE map_key, VALUE_TYPE value, \ + OpDef *op_def) { \ + GE_CHECK_NOTNULL_JUST_RETURN(op_def); \ + auto attr = op_def->mutable_attr(); \ + ADD_TO_ATTR_MAP_LIST(map_key, value, attr) \ + } \ + FMK_FUNC_DEV_VISIBILITY void AddOpAttrList(KEY_TYPE map_key, VALUE_TYPE value, AttrDefMap *attr_map) { \ + ADD_TO_ATTR_MAP_LIST(map_key, value, attr_map) \ + } \ + FMK_FUNC_DEV_VISIBILITY void AddModelAttrList(KEY_TYPE map_key, VALUE_TYPE value, ModelDef *model_def) { \ + GE_CHECK_NOTNULL_JUST_RETURN(model_def); \ + auto attr = model_def->mutable_attr(); \ + ADD_TO_ATTR_MAP_LIST(map_key, value, attr) \ + } + +DEFINE_ADD_ATTR_VALUE(const std::string &, const std::string &); +DEFINE_ADD_ATTR_VALUE(const char *, const char *); +DEFINE_ADD_ATTR_VALUE(const std::string &, const char *); +DEFINE_ADD_ATTR_VALUE(const std::string &, const uint32_t); +DEFINE_ADD_ATTR_VALUE(const std::string &, const int32_t); +DEFINE_ADD_ATTR_VALUE(const std::string &, const int64_t); +DEFINE_ADD_ATTR_VALUE(const std::string &, const float); +DEFINE_ADD_ATTR_VALUE(const std::string &, const double); +DEFINE_ADD_ATTR_VALUE(const std::string &, const bool); +DEFINE_ADD_ATTR_VALUE_LIST(const std::string &, const uint32_t); +DEFINE_ADD_ATTR_VALUE_LIST(const std::string &, const float); +DEFINE_ADD_ATTR_VALUE_LIST(const std::string &, const double); +DEFINE_ADD_ATTR_VALUE_LIST(const std::string &, const int32_t); +DEFINE_ADD_ATTR_VALUE_LIST(const std::string &, const bool); +DEFINE_ADD_ATTR_VALUE_LIST(const std::string &, const int64_t); +DEFINE_ADD_ATTR_VALUE_LIST(const std::string &, const std::string &); + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void AddOpAttr(const std::string &map_key, AttrDef &attr, + OpDef *op_def) { + GE_CHECK_NOTNULL_JUST_RETURN(op_def); + GE_CHECK_NOTNULL_JUST_RETURN(op_def->mutable_attr()); + (void)op_def->mutable_attr()->insert(AttrDefPair(map_key, attr)); +} + +#define DEFINE_GET_ATTR_VALUE(ARG_TYPE_KEY, ARG_TYPE_VALUE, FIELD) \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool GetAttrDefValue(ARG_TYPE_KEY map_key, ARG_TYPE_VALUE value, \ + const AttrDefMap &attr) { \ + auto it = attr.find(map_key); \ + if (it != attr.end()) { \ + *value = it->second.FIELD(); \ + return true; \ + } \ + return false; \ + } + +#define DEFINE_GET_ATTR_POINT_REF(ARG_TYPE_KEY, ARG_TYPE_VALUE, FIELD) \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool GetAttrDefValue(ARG_TYPE_KEY map_key, ARG_TYPE_VALUE *&value, \ + AttrDefMap *attr) { \ + GE_RT_FALSE_CHECK_NOTNULL(attr); \ + auto it = attr->find(map_key); \ + if (it != attr->end()) { \ + value = it->second.mutable_##FIELD(); \ + return true; \ + } \ + return false; \ + } + +#define DEFINE_GET_ATTR_CONST_POINT_REF(ARG_TYPE_KEY, ARG_TYPE_VALUE, FIELD) \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool GetAttrDefValue( \ + ARG_TYPE_KEY map_key, const ARG_TYPE_VALUE *&value, const AttrDefMap &attr) { \ + auto it = attr.find(map_key); \ + if (it == attr.end()) { \ + return false; \ + } \ + \ + value = &(it->second.FIELD()); \ + return true; \ + } + +#define DEFINE_GET_BYTES_ATTR_VALUE(ARG_TYPE_KEY, ARG_TYPE_VALUE) \ + bool GetBytesValue(ARG_TYPE_KEY key, ARG_TYPE_VALUE value, const AttrDefMap &attr) { \ + GE_RT_FALSE_CHECK_NOTNULL(value); \ + auto it = attr.find(key); \ + if (it != attr.end()) { \ + *value = it->second.bt(); \ + return true; \ + } \ + return false; \ + } + +#define DEFINE_GET_ATTR_LIST_VALUE(ARG_TYPE_KEY, ARG_TYPE_VALUE, FIELD) \ + FMK_FUNC_DEV_VISIBILITY bool GetAttrDefListValue(ARG_TYPE_KEY map_key, int idx, ARG_TYPE_VALUE value, \ + const AttrDefMap &attr) { \ + auto it = attr.find(map_key); \ + if (it == attr.end()) { \ + return false; \ + } \ + \ + const auto &list = it->second.list(); \ + if (idx < 0 || idx > list.FIELD##_size() - 1) { \ + return false; \ + } \ + \ + *value = list.FIELD(idx); \ + return true; \ + } + +DEFINE_GET_ATTR_VALUE(const std::string &, std::string *, s); +DEFINE_GET_ATTR_VALUE(const std::string &, int32_t *, i); +DEFINE_GET_ATTR_VALUE(const std::string &, int64_t *, i); +DEFINE_GET_ATTR_VALUE(const std::string &, uint32_t *, u); +DEFINE_GET_ATTR_VALUE(const std::string &, float *, f); +DEFINE_GET_ATTR_VALUE(const std::string &, double *, f); +DEFINE_GET_ATTR_VALUE(const std::string &, bool *, b); +DEFINE_GET_ATTR_VALUE(const std::string &, AttrDef_ListValue *, list); + +DEFINE_GET_ATTR_LIST_VALUE(const std::string &, int32_t *, i); +DEFINE_GET_ATTR_LIST_VALUE(const std::string &, uint32_t *, u); +DEFINE_GET_ATTR_LIST_VALUE(const std::string &, float *, f); +DEFINE_GET_ATTR_LIST_VALUE(const std::string &, double *, f); + +DEFINE_GET_ATTR_POINT_REF(const std::string &, NamedAttrs, func); +DEFINE_GET_ATTR_CONST_POINT_REF(const std::string &, NamedAttrs, func); + +DEFINE_GET_BYTES_ATTR_VALUE(const std::string &, std::string *); + +#define DEFINE_GET_OP_ATTR(ARG_TYPE_KEY, ARG_TYPE_VALUE) \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool GetOpAttr(ARG_TYPE_KEY map_key, ARG_TYPE_VALUE value, \ + const OpDef *op_def) { \ + GE_RT_FALSE_CHECK_NOTNULL(op_def); \ + return GetAttrDefValue(map_key, value, op_def->attr()); \ + } \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool GetModelAttr(ARG_TYPE_KEY map_key, ARG_TYPE_VALUE value, \ + const ModelDef *model_def) { \ + GE_RT_FALSE_CHECK_NOTNULL(model_def); \ + return GetAttrDefValue(map_key, value, model_def->attr()); \ + } + +DEFINE_GET_OP_ATTR(const std::string &, std::string *); +DEFINE_GET_OP_ATTR(const std::string &, int32_t *); +DEFINE_GET_OP_ATTR(const std::string &, int64_t *); +DEFINE_GET_OP_ATTR(const std::string &, uint32_t *); +DEFINE_GET_OP_ATTR(const std::string &, float *); +DEFINE_GET_OP_ATTR(const std::string &, double *); +DEFINE_GET_OP_ATTR(const std::string &, bool *); +DEFINE_GET_OP_ATTR(const std::string &, AttrDef_ListValue *); + +#define DEFINE_GET_BT_ATTR(ARG_TYPE_KEY, ARG_TYPE_VALUE) \ + FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool GetBytesAttr(ARG_TYPE_KEY key, ARG_TYPE_VALUE value, \ + const OpDef *op_def) { \ + GE_RT_FALSE_CHECK_NOTNULL(op_def); \ + return GetBytesValue(key, value, op_def->attr()); \ + } \ + FMK_FUNC_DEV_VISIBILITY bool GetBytesAttr(ARG_TYPE_KEY key, ARG_TYPE_VALUE value, const ModelDef *model_def) { \ + GE_RT_FALSE_CHECK_NOTNULL(model_def); \ + return GetBytesValue(key, value, model_def->attr()); \ + } + +DEFINE_GET_BT_ATTR(const std::string &, std::string *); + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool HasOpAttr(const OpDef *op_def, const std::string &attr_name) { + if (op_def == nullptr) { + return false; + } + const AttrDefMap &attr = op_def->attr(); + + const AttrDefMap::const_iterator it = attr.find(attr_name); + if (it != attr.end()) { + return true; + } + return false; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void AddModelAttr(const std::string &map_key, const void *value, + size_t size, ModelDef *model_def) { + if (model_def == nullptr) { + return; + } + AttrDef out; + auto attr = model_def->mutable_attr(); + auto it = attr->find(map_key); + if (it != attr->end()) { + auto &attr_value = it->second; + attr_value.set_bt(value, size); + } else { + out.set_bt(value, size); + attr->insert(AttrDefPair(map_key, out)); + } +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void AddOpBytesAttr(const std::string &key, const void *value, + size_t size, OpDef *op_def) { + if (op_def == nullptr) { + return; + } + AttrDef out; + auto attr = op_def->mutable_attr(); + auto it = attr->find(key); + if (it != attr->end()) { + auto &attr_value = it->second; + attr_value.set_bt(value, size); + } else { + out.set_bt(value, size); + attr->insert(AttrDefPair(key, out)); + } +} + +#define DEFINE_GET_ATTR_LIST_SIZE(ARG_TYPE_KEY, ARG_TYPE_VALUE, FIELD) \ + FMK_FUNC_DEV_VISIBILITY uint32_t GetOpAttrListSize(ARG_TYPE_KEY key, ARG_TYPE_VALUE value, const OpDef *op_def) { \ + GE_CHK_BOOL_RET_STATUS_NOLOG(op_def != nullptr, 0); \ + const AttrDefMap &attr_map = op_def->attr(); \ + auto it = attr_map.find(key); \ + if (it == attr_map.end()) { \ + return 0; \ + } \ + const auto &list = it->second.list(); \ + return list.FIELD##_size(); \ + } + +DEFINE_GET_ATTR_LIST_SIZE(const std::string &, const std::string &, s); +DEFINE_GET_ATTR_LIST_SIZE(const std::string &, int32_t, i); +DEFINE_GET_ATTR_LIST_SIZE(const std::string &, int64_t, i); +DEFINE_GET_ATTR_LIST_SIZE(const std::string &, uint32_t, u); +DEFINE_GET_ATTR_LIST_SIZE(const std::string &, float, f); +DEFINE_GET_ATTR_LIST_SIZE(const std::string &, double, f); +DEFINE_GET_ATTR_LIST_SIZE(const std::string &, bool, b); +} // namespace ge diff --git a/src/ge/common/op/ge_op_utils.cc b/src/ge/common/op/ge_op_utils.cc new file mode 100644 index 00000000..cce9b91d --- /dev/null +++ b/src/ge/common/op/ge_op_utils.cc @@ -0,0 +1,898 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "framework/common/op/ge_op_utils.h" + +#include + +#include "cce/dnn.h" +#include "cce/dnn_struct.hpp" +#include "common/ge/ge_util.h" +#include "external/graph/types.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/fmk_error_codes.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/op/attr_value_util.h" +#include "framework/common/util.h" +#include "graph/anchor.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" +#include "mmpa/mmpa_api.h" + +#define RETURN_IF_TRUE(cond, errcode, ...) \ + do { \ + if (cond) { \ + GELOGE(errcode, __VA_ARGS__); \ + return errcode; \ + } \ + } while (0); + +using domi::DOMI_TENSOR_NCHW; +using std::vector; + +namespace ge { +// General constant +const int32_t kDimMaxSize = 8; +const float DEFAULT_ALPHA_VALUE = 1.0; +const float DEFAULT_BETA_VALUE = 0.0; +const int NORMAL_TENSOR_SIZE = 4; +const int32_t kDimSizeZero = 0; +const int32_t kDimSizeOne = 1; +const int32_t kDimSizeTwo = 2; +const int32_t kDimSizeThree = 3; +const uint32_t kSliceDataNum = 2; + +// Add Sub Mul +const uint32_t ADD_INPUT_NUM = 2; +const uint32_t SUB_INPUT_NUM = 2; +const uint32_t MUL_INPUT_NUM = 2; + +// Permute +const int32_t PERMUTE_ORDER_NUM = 4; +// Ssd PriroBox +const double SSD_PRIORBOX_ASPECT_RATIO_VALUE = 1.0; +// Switch +const uint32_t SWITCH_INPUT_NUM = 2; +const uint32_t SWITCH_OUTPUT_NUM = 2; +const uint32_t SWITCH_FALSE_OUTPUT = 0; +const uint32_t SWITCH_TRUE_OUTPUT = 1; +const uint32_t SWITCH_DATA_INPUT = 0; +const uint32_t SWITCH_PRED_INPUT = 1; +// Internal constant +const uint32_t kPoolMaskDescWinH = 4; +const uint32_t kPoolMaskDescWinW = 5; +const uint32_t kPoolMaskDescDimSize = 6; + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool OpUtils::IsComputDimsSize(const int32_t format, + const uint32_t real_dim_cnt) { + return ((format == cce::CC_TENSOR_ND) || + ((format != cce::CC_TENSOR_NC1KHKWHWC0) && (format != cce::CC_TENSOR_C1HWNCoC0) && + (real_dim_cnt > DIM_DEFAULT_SIZE))); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +OpUtils::InitTensorDescriptor(const GeTensorDesc &tensor, cce::ccTensorDescriptor_t &cc_tensor) { + return InitTensorDescriptor(tensor, static_cast(tensor.GetDataType()), cc_tensor); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status OpUtils::InitTensorDescriptor( + const GeTensorDesc &model_tensor, int32_t dst_data_type, cce::ccTensorDescriptor_t &cc_tensor) { + uint32_t real_dim_cnt = OpUtils::GetRealDimCnt(model_tensor); + return InitTensorDescriptor(static_cast(model_tensor.GetFormat()), dst_data_type, + model_tensor.GetShape().GetDims(), cc_tensor, real_dim_cnt); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +OpUtils::InitTensorDescriptor(const GeTensor &model_tensor, cce::ccTensorDescriptor_t &cc_tensor) { + return InitTensorDescriptor(model_tensor, static_cast(model_tensor.GetTensorDesc().GetDataType()), + cc_tensor); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status OpUtils::InitTensorDescriptor( + const GeTensor &model_tensor, int32_t dst_data_type, cce::ccTensorDescriptor_t &cc_tensor) { + const GeTensorDesc &tensor_desc = model_tensor.GetTensorDesc(); + const GeShape &shape = tensor_desc.GetShape(); + return InitTensorDescriptor(static_cast(tensor_desc.GetFormat()), dst_data_type, shape.GetDims(), cc_tensor, + static_cast(shape.GetDimNum())); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +OpUtils::InitTensorDescriptor(int32_t format, int32_t data_type, const std::vector &dim, + cce::ccTensorDescriptor_t &cc_tensor, uint32_t real_dim_cnt) { + Status ret = SUCCESS; + ccDataType_t data_type_ = cce::tagCcDataType(data_type); + real_dim_cnt = + static_cast(((real_dim_cnt == 0) && (dim.size() > DIM_DEFAULT_SIZE)) ? dim.size() : real_dim_cnt); + if (IsComputDimsSize(format, real_dim_cnt)) { + GE_CHK_CCE_RET(cce::ccCreateTensorDescriptor(&cc_tensor)); +#if (defined(__GNUC__) && !(defined(__ICC) || defined(__INTEL_COMPILER))) && \ + (__GNUC__ * 10000 + __GNUC_MINOR__ * 100 + __GNUC_PATCHLEVEL__) < 50000 + // Variable length array initialization is not supported in gcc4. X compilation environment + GE_CHK_BOOL_RET_STATUS(real_dim_cnt <= CC_DIM_MAX, domi::CCE_FAILED, "real_dim_cnt support <= 8."); + int32_t real_dim[CC_DIM_MAX] = {0}; +#else + int32_t real_dim[real_dim_cnt] = {}; +#endif + uint32_t i = 0; + for (auto dim_temp : dim) { + GE_CHK_BOOL_EXEC_NOLOG(i < real_dim_cnt && i < kDimMaxSize, break); + real_dim[i] = static_cast(dim_temp); + i++; + } + + auto cc_ret = cce::ccSetTensorNdDescriptor(cc_tensor, data_type_, real_dim_cnt, real_dim); + GE_IF_BOOL_EXEC(cc_ret != cce::CC_STATUS_SUCCESS, + GELOGE(domi::CCE_FAILED, "Call cce failed. cc_ret = %d", cc_ret); + GE_CHK_CCE(cce::ccDestroyTensorDescriptor(&cc_tensor)); return domi::CCE_FAILED); + + return ret; + } else if (format == cce::CC_TENSOR_NC1KHKWHWC0) { + GE_CHK_CCE_RET(cce::ccCreatePoolingMaskDescriptor(&cc_tensor)); + cce::ccTensorFormat_t format_new = cce::tagCcTensorFormat(format); + GE_IF_BOOL_EXEC( + dim.size() != kPoolMaskDescDimSize, + GELOGE(PARAM_INVALID, "format CC_TENSOR_NC1KHKWHWC0 dim size must be 6,dim size id %lu.", dim.size()); + GE_CHK_CCE(cce::ccDestroyTensorDescriptor(&cc_tensor)); return PARAM_INVALID); + auto cc_ret = ccSetPoolingMaskTensorDescriptor( + cc_tensor, format_new, data_type_, static_cast(dim[NCHW_DIM_N]), + static_cast(dim[NCHW_DIM_C]), static_cast(dim[NCHW_DIM_H]), + static_cast(dim[NCHW_DIM_W]), static_cast(dim[kPoolMaskDescWinH]), + static_cast(dim[kPoolMaskDescWinW])); + + GE_IF_BOOL_EXEC(cc_ret != cce::CC_STATUS_SUCCESS, + GELOGE(domi::CCE_FAILED, "Call cce failed. cc_ret = %d", cc_ret); + GE_CHK_CCE(cce::ccDestroyTensorDescriptor(&cc_tensor)); return domi::CCE_FAILED); + return ret; + } else if (format == cce::CC_TENSOR_C1HWNCoC0) { + GE_CHK_CCE_RET(cce::ccCreateTensorDescriptor(&cc_tensor)); + cce::ccTensorFormat_t format_new = cce::tagCcTensorFormat(format); + GE_IF_BOOL_EXEC( + dim.size() != DIM_C1HWNCoC0_SIZE, + GELOGE(PARAM_INVALID, "format C1HWNCoC0_DIM_SIZE dim size must be 5,dim size id %lu.", dim.size()); + GE_CHK_CCE(cce::ccDestroyTensorDescriptor(&cc_tensor)); return PARAM_INVALID); + + auto cc_ret = cce::ccSetFilter6dDescriptor( + cc_tensor, format_new, data_type_, static_cast(dim[C1HWNCoC0_DIM_C1]), + static_cast(dim[C1HWNCoC0_DIM_H]), static_cast(dim[C1HWNCoC0_DIM_W]), + static_cast(dim[C1HWNCoC0_DIM_N]), static_cast(dim[C1HWNCoC0_DIM_Co]), + static_cast(dim[C1HWNCoC0_DIM_C0])); + + GE_IF_BOOL_EXEC(cc_ret != cce::CC_STATUS_SUCCESS, GELOGE(CCE_FAILED, "Call cce failed. cc_ret = %d", cc_ret); + GE_CHK_CCE(cce::ccDestroyTensorDescriptor(&cc_tensor)); return CCE_FAILED); + + return ret; + } + std::vector dim_vector; + (void)TransferDim(dim, dim_vector); // TransferDim always return success, no need to check value + // format + if (!CheckEnumValid(format, cce::CC_TENSOR_NCHW, cce::CC_TENSOR_RESERVED)) { + GELOGE(PARAM_INVALID, "not supported format, format = %d", format); + return PARAM_INVALID; + } + cce::ccTensorFormat_t format_new = cce::tagCcTensorFormat(format); + + // data type + if (!CheckEnumValid(data_type, cce::CC_DATA_FLOAT, cce::CC_DATA_RESERVED)) { + GELOGE(PARAM_INVALID, "not supported data type, type = %d", data_type); + return PARAM_INVALID; + } + + // create tensor descriptor + GE_CHK_CCE_RET(cce::ccCreateTensorDescriptor(&cc_tensor)); + + // input shape + size_t input_shape_size = dim_vector.size(); + GE_IF_BOOL_EXEC(input_shape_size != DIM_DEFAULT_SIZE, GELOGI("input_shape_size is %zu", input_shape_size)); + + // The last two outputs of fusedbatchnormgrad are 0. Need special processing for fusedbatchnormgrad. + GE_IF_BOOL_EXEC(dim.size() == 1 && dim[0] == 0, + GE_IF_BOOL_EXEC(cce::ccSetTensorRealDimCnt(cc_tensor, real_dim_cnt) != cce::CC_STATUS_SUCCESS, + GELOGE(domi::CCE_FAILED, "Call cce failed."); + GE_CHK_CCE(cce::ccDestroyTensorDescriptor(&cc_tensor)); return domi::CCE_FAILED); + return ret); + + if (format == cce::CC_TENSOR_NHWC) { + auto cc_ret = cce::ccSetTensor4dDescriptor( + cc_tensor, format_new, data_type_, static_cast(dim_vector.at(NHWC_DIM_N)), + static_cast(dim_vector.at(NHWC_DIM_C)), static_cast(dim_vector.at(NHWC_DIM_H)), + static_cast(dim_vector.at(NHWC_DIM_W))); + + GE_IF_BOOL_EXEC(cc_ret != cce::CC_STATUS_SUCCESS, + GELOGE(domi::CCE_FAILED, "Call cce failed. cc_ret = %d", cc_ret); + ret = domi::CCE_FAILED); + } else if (format == cce::CC_TENSOR_HWCN) { + auto cc_ret = cce::ccSetTensor4dDescriptor( + cc_tensor, format_new, data_type_, static_cast(dim_vector.at(NHWC_DIM_C)), + static_cast(dim_vector.at(NHWC_DIM_W)), static_cast(dim_vector.at(NHWC_DIM_N)), + static_cast(dim_vector.at(NHWC_DIM_H))); + + GE_IF_BOOL_EXEC(cc_ret != cce::CC_STATUS_SUCCESS, + GELOGE(domi::CCE_FAILED, "Call cce failed. cc_ret = %d", cc_ret); + ret = domi::CCE_FAILED); + } else if (format >= cce::CC_TENSOR_HASHTABLE_LOOKUP_LOOKUPS && format <= cce::CC_TENSOR_HASHTABLE_LOOKUP_HITS) { + int32_t dims[dim.size()]; + for (size_t i = 0; i < dim.size(); i++) { + dims[i] = static_cast(dim[i]); + } + + auto cc_ret = cce::ccSetTensorNdDescriptor(cc_tensor, data_type_, static_cast(dim.size()), dims); + cce::ccSetTensorFormat(cc_tensor, format_new); + GE_IF_BOOL_EXEC(cc_ret != cce::CC_STATUS_SUCCESS, GELOGE(CCE_FAILED, "Call cce failed. cc_ret = %d", cc_ret); + ret = CCE_FAILED); + } else { + auto cc_ret = cce::ccSetTensor4dDescriptor( + cc_tensor, format_new, data_type_, static_cast(dim_vector.at(NHWC_DIM_N)), + static_cast(dim_vector.at(NHWC_DIM_H)), static_cast(dim_vector.at(NHWC_DIM_W)), + static_cast(dim_vector.at(NHWC_DIM_C))); + + GE_IF_BOOL_EXEC(cc_ret != cce::CC_STATUS_SUCCESS, + GELOGE(domi::CCE_FAILED, "Call cce failed. cc_ret = %d", cc_ret); + ret = domi::CCE_FAILED); + } + auto cc_ret = cce::ccSetTensorRealDimCnt(cc_tensor, real_dim_cnt); + GE_IF_BOOL_EXEC(cc_ret != cce::CC_STATUS_SUCCESS, GELOGE(domi::CCE_FAILED, "Call cce failed. cc_ret = %d", cc_ret); + ret = domi::CCE_FAILED); + + if (ret != SUCCESS) { + GE_CHK_CCE(cce::ccDestroyTensorDescriptor(&cc_tensor)); + cc_tensor = nullptr; + } + + return ret; +} + +// Initialize filter description +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +OpUtils::InitFilterDescriptor(const GeTensor &model_filter, cce::ccFilterDescriptor_t &cc_filter) { + const GeTensorDesc &tensor_desc = model_filter.GetTensorDesc(); + const GeShape &shape = tensor_desc.GetShape(); + const std::vector dims = shape.GetDims(); + + // format + RETURN_IF_TRUE(!CheckEnumValid(tensor_desc.GetFormat(), cce::CC_TENSOR_NCHW, cce::CC_TENSOR_RESERVED), PARAM_INVALID, + "not supported format, format = %d", tensor_desc.GetFormat()); + + uint32_t tmp_int = static_cast(tensor_desc.GetFormat()); + cce::ccTensorFormat_t format = cce::tagCcTensorFormat(tmp_int); + + // data type + RETURN_IF_TRUE(!CheckEnumValid(tensor_desc.GetDataType(), cce::CC_DATA_FLOAT, cce::CC_DATA_RESERVED), PARAM_INVALID, + "not supported data type, type = %s", + TypeUtils::DataTypeToSerialString(tensor_desc.GetDataType()).c_str()); + + uint32_t dt_tmp = static_cast(tensor_desc.GetDataType()); + + ccDataType_t dataType = cce::tagCcDataType(dt_tmp); + + // create filter descriptor + GE_CHK_CCE_RET(cce::ccCreateFilterDescriptor(&cc_filter)); + + Status ret = SUCCESS; + // input filter + size_t filter_shape_size = shape.GetDimNum(); + if (filter_shape_size == DIM_DEFAULT_SIZE) { + cce::ccStatus_t cc_ret = cce::CC_STATUS_SUCCESS; + + GE_IF_BOOL_EXEC(dims.size() < 4, GELOGE(domi::CCE_FAILED, "dims is invalid!"); return domi::CCE_FAILED); + + if (dataType == CC_DATA_INT8) { + cc_ret = ccSetInt8Filter4dDescriptor(cc_filter, format, dataType, static_cast(dims[KCHW_DIM_K]), + static_cast(dims[KCHW_DIM_C]), + static_cast(dims[KCHW_DIM_H]), + static_cast(dims[KCHW_DIM_W]), cce::CC_DATA_HALF); + } else if (format == cce::CC_TENSOR_FRACTAL_Z_C04 || format == cce::CC_TENSOR_FRACTAL_DECONV_SP_STRIDE_TRANS || + format == cce::CC_TENSOR_FRACTAL_Z || format == cce::CC_TENSOR_FRACTAL_DECONV) { + cc_ret = cce::ccSetFilterFractalDescriptor( + cc_filter, format, dataType, static_cast(dims[KCHW_DIM_K]), + static_cast(dims[KCHW_DIM_C]), static_cast(dims[KCHW_DIM_H]), + static_cast(dims[KCHW_DIM_W])); + } else if (format == cce::CC_TENSOR_NHWC) { + cc_ret = cce::ccSetFilter4dDescriptor(cc_filter, format, dataType, static_cast(dims[NHWC_DIM_N]), + static_cast(dims[NHWC_DIM_C]), + static_cast(dims[NHWC_DIM_H]), + static_cast(dims[NHWC_DIM_W])); + } else if (format == cce::CC_TENSOR_CHWN) { + cc_ret = cce::ccSetFilter4dDescriptor(cc_filter, format, dataType, static_cast(dims[CHWN_DIM_N]), + static_cast(dims[CHWN_DIM_C]), + static_cast(dims[CHWN_DIM_H]), + static_cast(dims[CHWN_DIM_W])); + } else if (format == cce::CC_TENSOR_HWCN) { + cc_ret = cce::ccSetFilter4dDescriptor(cc_filter, format, dataType, static_cast(dims[NHWC_DIM_C]), + static_cast(dims[NHWC_DIM_W]), + static_cast(dims[NHWC_DIM_N]), + static_cast(dims[NHWC_DIM_H])); + } else { + cc_ret = cce::ccSetFilter4dDescriptor(cc_filter, format, dataType, static_cast(dims[KCHW_DIM_K]), + static_cast(dims[KCHW_DIM_C]), + static_cast(dims[KCHW_DIM_H]), + static_cast(dims[KCHW_DIM_W])); + } + + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(domi::CCE_FAILED, "ccSetFilterDescriptor failed. cc_ret = %d, format1 = %d", cc_ret, format); + ret = domi::CCE_FAILED; + } + } else { + GELOGE(UNSUPPORTED, "not supported shape size. size = %d", filter_shape_size); + ret = UNSUPPORTED; + } + + if (ret != SUCCESS) { + GE_CHK_CCE(cce::ccDestroyFilterDescriptor(&cc_filter)); + cc_filter = nullptr; + } + + return ret; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool OpUtils::ConvertDim(cce::ccTensorFormat_t src_format, + const std::vector &src, + cce::ccTensorFormat_t dst_format, + std::vector &dst) { + // The input of 3-dimension and 4-dimension is considered as picture dimension, + // which needs to be converted according to specific format + if ((src.size() != DIM_DEFAULT_SIZE && src.size() != 3) || src_format == dst_format) { + GELOGI("Convert format , src size %zu <3 ,not need convert", src.size()); + dst = src; + return true; + } + + std::vector nchw_dim; + + switch (src_format) { + case cce::CC_TENSOR_NCHW: + nchw_dim = src; + break; + case cce::CC_TENSOR_NHWC: + if (src.size() == DIM_DEFAULT_SIZE) { + nchw_dim.push_back(src[NHWC_DIM_N]); + nchw_dim.push_back(src[NHWC_DIM_C]); + nchw_dim.push_back(src[NHWC_DIM_H]); + nchw_dim.push_back(src[NHWC_DIM_W]); + } else { + nchw_dim.push_back(src[HWC_DIM_C]); + nchw_dim.push_back(src[HWC_DIM_H]); + nchw_dim.push_back(src[HWC_DIM_W]); + } + break; + default: + GELOGW("Not support src format is %d", src_format); + return false; + } + + if (nchw_dim.empty()) { + GELOGW("Vector is empty!"); + return false; + } + + switch (dst_format) { + case cce::CC_TENSOR_NCHW: + dst = nchw_dim; + break; + case cce::CC_TENSOR_NHWC: + if (src.size() == DIM_DEFAULT_SIZE) { + dst.push_back(nchw_dim[NCHW_DIM_N]); + dst.push_back(nchw_dim[NCHW_DIM_H]); + dst.push_back(nchw_dim[NCHW_DIM_W]); + dst.push_back(nchw_dim[NCHW_DIM_C]); + } else { + dst.push_back(nchw_dim[CHW_DIM_H]); + dst.push_back(nchw_dim[CHW_DIM_W]); + dst.push_back(nchw_dim[CHW_DIM_C]); + } + break; + default: + GELOGW("Not support dst format of %d", dst_format); + return false; + } + + return true; +} +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void OpUtils::DestroyTensorDescriptor( + cce::ccTensorDescriptor_t &cc_tensor) noexcept { + if (cc_tensor != nullptr) { + cce::ccStatus_t ret = cce::ccDestroyTensorDescriptor(&cc_tensor); + GE_LOGE_IF(ret != cce::CC_STATUS_SUCCESS, "cce::ccDestroyTensorDescriptor failed. ret = %d", ret); + cc_tensor = nullptr; + } +} +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void OpUtils::DestroyFilterDescriptor( + cce::ccFilterDescriptor_t &cc_filter) { + if (cc_filter != nullptr) { + cce::ccStatus_t ret = ccDestroyFilterDescriptor(&cc_filter); + GE_LOGE_IF(ret != cce::CC_STATUS_SUCCESS, "ccDestroyFilterDescriptor failed. ret = %d", ret); + cc_filter = nullptr; + } +} + +// Get the value of key from attr +#define AIPP_GET_ATTR_VALUE(KEY, ATTR_TYPE) \ + if (aipp_attr.GetItem(#KEY).GetValue(KEY) != SUCCESS) { \ + GELOGI("Attr %s will take default value.", #KEY); \ + break; \ + } + +// Converting aippparams and attrdefmap +#define AIPP_CONVERT_FORMAT_EX(KEY, ORG_TYPE, SAVE_TYPE, ATTR_TYPE) \ + do { \ + SAVE_TYPE KEY = static_cast(0); \ + AIPP_GET_ATTR_VALUE(KEY, ATTR_TYPE) \ + aipp_params->set_##KEY(ORG_TYPE(KEY)); \ + } while (0) + +// Converting aippparams and attrdefmap +#define AIPP_CONVERT_FORMAT(KEY, KEY_TYPE, ATTR_TYPE) AIPP_CONVERT_FORMAT_EX(KEY, KEY_TYPE, KEY_TYPE, ATTR_TYPE) + +#define AIPP_CONVERT_INT(KEY) AIPP_CONVERT_FORMAT(KEY, int64_t, GeAttrValue::INT) + +#define AIPP_CONVERT_BOOL(KEY) AIPP_CONVERT_FORMAT(KEY, bool, GeAttrValue::BOOL) + +#define AIPP_CONVERT_FLOAT(KEY) AIPP_CONVERT_FORMAT(KEY, float, GeAttrValue::FLOAT) + +// Transform aippparams (with repeated decoration) and attrdefmap +#define AIPP_CONVERT_LIST_FORMAT(KEY, KEY_TYPE, REQUIRED, ATTR_TYPE) \ + do { \ + if (REQUIRED) { \ + KEY_TYPE KEY; \ + AIPP_GET_ATTR_VALUE(KEY, ATTR_TYPE) \ + aipp_params->add_##KEY(KEY); \ + } \ + } while (0) + +#define AIPP_CONVERT_LIST_INT(KEY, REQUIRED) AIPP_CONVERT_LIST_FORMAT(KEY, int64_t, REQUIRED, GeAttrValue::INT) + +#define AIPP_CONVERT_LIST_BOOL(KEY, REQUIRED) AIPP_CONVERT_LIST_FORMAT(KEY, bool, REQUIRED, GeAttrValue::BOOL) + +#define AIPP_CONVERT_LIST_FLOAT(KEY, REQUIRED) AIPP_CONVERT_LIST_FORMAT(KEY, float, REQUIRED, GeAttrValue::FLOAT) + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +OpUtils::ConvertAippParams(const GeAttrValue::NamedAttrs &aipp_attr, domi::AippOpParams *aipp_params) { + GE_CHECK_NOTNULL(aipp_params); + AIPP_CONVERT_FORMAT_EX(aipp_mode, domi::AippOpParams::AippMode, int32_t, GeAttrValue::INT); + + if (aipp_params->aipp_mode() == domi::AippOpParams::dynamic) { + AIPP_CONVERT_INT(max_src_image_size); + AIPP_CONVERT_BOOL(support_rotation); + } else { + AIPP_CONVERT_FORMAT_EX(input_format, domi::AippOpParams::InputFormat, int32_t, GeAttrValue::INT); + AIPP_CONVERT_BOOL(csc_switch); + AIPP_CONVERT_BOOL(crop); + AIPP_CONVERT_INT(load_start_pos_w); + AIPP_CONVERT_INT(load_start_pos_h); + AIPP_CONVERT_INT(crop_size_w); + AIPP_CONVERT_INT(crop_size_h); + AIPP_CONVERT_BOOL(resize); + AIPP_CONVERT_INT(resize_output_w); + AIPP_CONVERT_INT(resize_output_h); + AIPP_CONVERT_BOOL(padding); + AIPP_CONVERT_INT(left_padding_size); + AIPP_CONVERT_INT(right_padding_size); + AIPP_CONVERT_INT(top_padding_size); + AIPP_CONVERT_INT(bottom_padding_size); + AIPP_CONVERT_INT(src_image_size_w); + AIPP_CONVERT_INT(src_image_size_h); + AIPP_CONVERT_FLOAT(cpadding_value); + AIPP_CONVERT_BOOL(rbuv_swap_switch); + AIPP_CONVERT_BOOL(ax_swap_switch); + AIPP_CONVERT_BOOL(single_line_mode); + AIPP_CONVERT_INT(mean_chn_0); + AIPP_CONVERT_INT(mean_chn_1); + AIPP_CONVERT_INT(mean_chn_2); + AIPP_CONVERT_FLOAT(min_chn_0); + AIPP_CONVERT_FLOAT(min_chn_1); + AIPP_CONVERT_FLOAT(min_chn_2); + AIPP_CONVERT_LIST_FLOAT(var_reci_chn_0, true); + AIPP_CONVERT_LIST_FLOAT(var_reci_chn_1, true); + AIPP_CONVERT_LIST_FLOAT(var_reci_chn_2, true); + + const bool csc_switch = aipp_params->csc_switch(); + AIPP_CONVERT_LIST_INT(matrix_r0c0, csc_switch); + AIPP_CONVERT_LIST_INT(matrix_r0c1, csc_switch); + AIPP_CONVERT_LIST_INT(matrix_r0c2, csc_switch); + AIPP_CONVERT_LIST_INT(matrix_r1c0, csc_switch); + AIPP_CONVERT_LIST_INT(matrix_r1c1, csc_switch); + AIPP_CONVERT_LIST_INT(matrix_r1c2, csc_switch); + AIPP_CONVERT_LIST_INT(matrix_r2c0, csc_switch); + AIPP_CONVERT_LIST_INT(matrix_r2c1, csc_switch); + AIPP_CONVERT_LIST_INT(matrix_r2c2, csc_switch); + AIPP_CONVERT_LIST_INT(output_bias_0, csc_switch); + AIPP_CONVERT_LIST_INT(output_bias_1, csc_switch); + AIPP_CONVERT_LIST_INT(output_bias_2, csc_switch); + AIPP_CONVERT_LIST_INT(input_bias_0, csc_switch); + AIPP_CONVERT_LIST_INT(input_bias_1, csc_switch); + AIPP_CONVERT_LIST_INT(input_bias_2, csc_switch); + } + + return SUCCESS; +} + +CceTensorDescriptor::CceTensorDescriptor(cce::ccTensorDescriptor_t cc_tensor) : cc_tensor_(cc_tensor) {} + +CceTensorDescriptor::~CceTensorDescriptor() { + if (cc_tensor_ != nullptr) { + OpUtils::DestroyTensorDescriptor(cc_tensor_); + cc_tensor_ = nullptr; + } +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status CceTensorDescriptor::InitTensor(int32_t format, + int32_t data_type, + const vector &dims) { + if (cc_tensor_ != nullptr) { + GELOGE(PARAM_INVALID, "Cannot init cce tensor descriptor twice!"); + return PARAM_INVALID; + } + cce::ccTensorDescriptor_t cc_tensor = nullptr; + + Status ret = OpUtils::InitTensorDescriptor(format, data_type, dims, cc_tensor); + + GE_CHK_STATUS_EXEC(ret, OpUtils::DestroyTensorDescriptor(cc_tensor); return FAILED, "init cc_tensor failed."); + + cc_tensor_ = cc_tensor; + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status CceTensorDescriptor::InitTensor(int32_t format, + int32_t data_type, + const ge::GeShape &shape) { + return InitTensor(format, data_type, shape.GetDims()); +} + +Status CceTensorDescriptor::GetFormat(cce::ccTensorFormat_t *format) { + GE_CHECK_NOTNULL(format); + GE_CHK_CCE_RET(cce::ccGetTensorFormat(cc_tensor_, format)); + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status CceTensorDescriptor::GetTensorSizeInBytes(uint32_t *size) { + GE_CHECK_NOTNULL(size); + GE_CHK_CCE_RET(cce::ccGetTensorSizeInBytes(cc_tensor_, size)); + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +CceTensorDescriptor::TransTensor(const cce::ccTensorDescriptor_t x_desc, const void *x, + const CceTensorDescriptorPtr &y_desc, void *y, uint32_t y_size_in_bytes) { + GE_CHECK_NOTNULL(y_desc); + GE_CHK_CCE_RET(cce::ccTransTensor(x_desc, x, y_desc->cc_tensor_, y, y_size_in_bytes)); + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY CceTensorDescriptorPtr CceTensorDescriptor::Create() { + shared_ptr desc = nullptr; + desc = ge::MakeShared(nullptr); + if (desc == nullptr) { + GELOGE(FAILED, "Make CceTensorDescriptor failed."); + return nullptr; + } + return desc; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status OpUtils::TransferDim(const std::vector &dim, + std::vector &dim_vector) { + size_t input_shape_size = dim.size(); + std::list new_dim_list; + for (auto dim_temp : dim) { + new_dim_list.push_back(dim_temp); + } + if (input_shape_size > DIM_DEFAULT_SIZE) { + dim_vector = dim; + GELOGI("Dim_vector size is %zu, do not to transfer dim", input_shape_size); + return SUCCESS; + } + switch (input_shape_size) { + case kDimSizeZero: { + new_dim_list.push_back(1); + new_dim_list.push_back(1); + new_dim_list.push_back(1); + new_dim_list.push_back(1); + break; + } + case kDimSizeOne: { + new_dim_list.push_front(1); + new_dim_list.push_back(1); + new_dim_list.push_back(1); + break; + } + case kDimSizeTwo: { + new_dim_list.push_front(1); + new_dim_list.push_back(1); + break; + } + case kDimSizeThree: { + new_dim_list.push_front(1); + break; + } + default: + GELOGI("Invalid input_shape_size."); + break; + } + + dim_vector.clear(); + for (auto dims : new_dim_list) { + dim_vector.push_back(dims); + } + return SUCCESS; +} + +void OpUtils::SliceData(std::vector &input, int64_t chunk_size, std::vector &output, int64_t begin, + int64_t out_dim, int64_t stride) { + char *slice = nullptr; + for (size_t j = 0; j < input.size(); j++) { + slice = input[j] + sizeof(int32_t) * begin * chunk_size; + for (int64_t i = 0; i < out_dim; i++) { + output.push_back(slice + sizeof(int32_t) * i * chunk_size * stride); + } + } +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status OpUtils::SetOutputSliceData( + void *data, int64_t data_size, int32_t data_type, std::vector &input_dims, std::vector &begin, + std::vector &output_dims, GeTensor *output, std::vector &stride) { + GE_CHECK_NOTNULL(data); + GE_CHECK_NOTNULL(output); + std::vector chunk_input; + std::vector chunk_output; + chunk_input.push_back(reinterpret_cast(data)); + int64_t chunk_size = data_size; + int dim_size = static_cast(input_dims.size()); + for (int i = 0; i < dim_size; i++) { + int64_t begin_i = begin[i]; + int64_t size_i = output_dims[i]; + int64_t dim_i = input_dims[i]; + int64_t stride_i = stride[i]; + GE_CHK_BOOL_EXEC((dim_i != 0), return PARAM_INVALID, "Dim_i can't be 0."); + chunk_size = chunk_size / dim_i; + + if (i % kSliceDataNum == 0) { + SliceData(chunk_input, chunk_size, chunk_output, begin_i, size_i, stride_i); + chunk_input.clear(); + } else { + SliceData(chunk_output, chunk_size, chunk_input, begin_i, size_i, stride_i); + chunk_output.clear(); + } + } + + size_t out_size = chunk_input.size() + chunk_output.size(); + GE_CHK_BOOL_RET_STATUS(out_size > 0, FAILED, "Out_size <= 0"); + + if (data_type == DT_FLOAT) { + float *output_data = new (std::nothrow) float[out_size](); + GE_CHECK_NOTNULL(output_data); + if (!chunk_input.empty()) { + for (size_t j = 0; j < out_size; j++) { + float *value = reinterpret_cast(chunk_input[j]); + output_data[j] = *value; + } + } else { + for (size_t j = 0; j < out_size; j++) { + float *value = reinterpret_cast(chunk_output[j]); + output_data[j] = *value; + } + } + (void)output->SetData(reinterpret_cast(output_data), out_size * sizeof(float)); + // output_data != nullptr and out_size > 0, SetData always return success, no need to check value + GE_DELETE_NEW_ARRAY(output_data); + } else if (data_type == DT_INT32) { + int *output_data = new (std::nothrow) int[out_size](); + GE_CHECK_NOTNULL(output_data); + + if (!chunk_input.empty()) { + for (size_t j = 0; j < out_size; j++) { + int *value = reinterpret_cast(chunk_input[j]); + output_data[j] = *value; + } + } else { + for (size_t j = 0; j < out_size; j++) { + int *value = reinterpret_cast(chunk_output[j]); + output_data[j] = *value; + } + } + (void)output->SetData(reinterpret_cast(output_data), out_size * sizeof(int)); + // output_data != nullptr and out_size > 0, SetData always return success, no need to check value + GE_DELETE_NEW_ARRAY(output_data); + } else { + GELOGE(FAILED, "Data type of Slice OP must be float or int32."); + return FAILED; + } + + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void OpUtils::TransDataHWCK2KCHW(const void *input, int64_t h, + int64_t w, int64_t c, int64_t k, + void **output) { + if (input == nullptr) { + return; + } + if (output == nullptr) { + return; + } + const char *w_data = (const char *)input; + + int64_t count = h * w * c * k; + GE_IF_BOOL_EXEC(count <= 0, GELOGW("Count value must be greater than 0, but count = %ld", count); return); + float *buf = new (std::nothrow) float[count](); + GE_RT_VOID_CHECK_NOTNULL(buf); + float *src_buff = nullptr; + float *dst_buff = nullptr; + for (int h_i = 0; h_i < h; ++h_i) { + for (int w_i = 0; w_i < w; ++w_i) { + for (int c_i = 0; c_i < c; ++c_i) { + for (int k_i = 0; k_i < k; ++k_i) { + src_buff = reinterpret_cast(const_cast(w_data)) + + ((h_i * w * c * k) + (w_i * c * k) + (c_i * k) + (k_i)); + + dst_buff = buf + ((k_i * c * h * w) + (c_i * h * w) + (h_i * w) + (w_i)); + + *dst_buff = *src_buff; + } + } + } + } + *output = buf; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void OpUtils::TransDataKCHW2HWCK(const void *input, int64_t k, + int64_t c, int64_t h, int64_t w, + void *output) { + if ((input == nullptr) || (output == nullptr)) { + GELOGD("%s[%d]: input param is nullptr.", __FILE__, __LINE__); + return; + } + + const char *w_data = (const char *)input; + + float *buf = reinterpret_cast(output); + float *src_buff = nullptr; + float *dst_buff = nullptr; + for (int k_i = 0; k_i < k; ++k_i) { + for (int c_i = 0; c_i < c; ++c_i) { + for (int h_i = 0; h_i < h; ++h_i) { + for (int w_i = 0; w_i < w; ++w_i) { + src_buff = reinterpret_cast(const_cast(w_data)) + + ((k_i * c * h * w) + (c_i * h * w) + (h_i * w) + (w_i)); + + dst_buff = buf + ((h_i * w * c * k) + (w_i * c * k) + (c_i * k) + (k_i)); + + *dst_buff = *src_buff; + } + } + } + } +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +OpUtils::InitFilterTensorDescriptor(const GeTensorDesc &model_tensor, cce::ccFilterDescriptor_t &cc_tensor) { + auto dims = model_tensor.GetShape().GetDims(); + auto dim_size = dims.size(); + if (dim_size == 0) { + GELOGE(FAILED, "Invalid dim size"); + return FAILED; + } + uint32_t cc_format_tmp = static_cast(model_tensor.GetFormat()); + cce::ccTensorFormat_t format = cce::tagCcTensorFormat(cc_format_tmp); + uint32_t model_tensor_dt = static_cast(model_tensor.GetDataType()); + ccDataType_t data_type = cce::tagCcDataType(model_tensor_dt); + GE_CHK_BOOL_EXEC( + ((format == cce::CC_TENSOR_NCHW) || (format == cce::CC_TENSOR_FRACTAL_Z) || (format == cce::CC_TENSOR_HWCN)), + return PARAM_INVALID, "Filter tensor format:%d not correct.", format); + GE_IF_BOOL_EXEC(static_cast(dims.size()) <= NCHW_DIM_W, + GELOGE(PARAM_INVALID, "Array index is invalid!"); + return PARAM_INVALID); + // create tensor descriptor + GE_CHK_CCE_RET(cce::ccCreateFilterDescriptor(&cc_tensor)); + if (format == cce::CC_TENSOR_FRACTAL_Z) { + GE_CHK_CCE_RET(cce::ccSetFilterFractalDescriptor( + cc_tensor, format, data_type, static_cast(dims[NCHW_DIM_N]), + static_cast(dims[NCHW_DIM_C]), static_cast(dims[NCHW_DIM_H]), + static_cast(dims[NCHW_DIM_W]))); + } else if (format == cce::CC_TENSOR_HWCN) { + GE_CHK_CCE_RET(cce::ccSetFilter4dDescriptor( + cc_tensor, format, data_type, static_cast(dims[NCHW_DIM_W]), + static_cast(dims[NCHW_DIM_H]), static_cast(dims[NCHW_DIM_N]), + static_cast(dims[NCHW_DIM_C]))); + } else { + GE_CHK_CCE_RET(cce::ccSetFilter4dDescriptor( + cc_tensor, format, data_type, static_cast(dims[NCHW_DIM_N]), + static_cast(dims[NCHW_DIM_C]), static_cast(dims[NCHW_DIM_H]), + static_cast(dims[NCHW_DIM_W]))); + } + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void OpUtils::SetTensorDescriptorAllOffsetQuantizeInfo( + const GeTensorDesc &tensor, cce::ccTensorDescriptor_t cc_tensor) { + GE_IF_BOOL_EXEC(!TensorUtils::HasAlloffsetQuantizeInfo(tensor), return;); + ccVecQuantizePara_t temp; + AllOffsetQuantizeInfo temp_quantInfo; + GE_CHK_BOOL_EXEC(TensorUtils::GetAlloffsetQuantizeInfo(tensor, temp_quantInfo) == GRAPH_SUCCESS, return, + "Execute GetAlloffsetQuantizeInfo failed."); + temp.scale = temp_quantInfo.scale; + temp.offset = static_cast(temp_quantInfo.offset); + temp.rrv = 0; + cce::ccSetTensorDescriptorQuantizeParam(cc_tensor, &temp); +} + +vector OpUtils::GetWeights(const ge::Node &node) { return OpDescUtils::GetWeights(node); } + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY vector OpUtils::GetWeights(ge::ConstNodePtr node) { + return OpDescUtils::GetWeights(node); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY vector OpUtils::MutableWeights(const ge::Node &node) { + return OpDescUtils::MutableWeights(node); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY vector OpUtils::MutableWeights(const ge::NodePtr node) { + return OpDescUtils::MutableWeights(node); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status OpUtils::SetWeights(ge::Node &node, + const vector &weights) { + return OpDescUtils::SetWeights(node, weights); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status OpUtils::SetWeights(ge::NodePtr node, + const vector &weights) { + return OpDescUtils::SetWeights(node, weights); +} + +// The caller guarantees that the input sensor is constant +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status +OpUtils::GetShapeDataFromConstTensor(const ConstGeTensorPtr &tensor, DataType type, std::vector &dims) { + if (tensor == nullptr) { + GELOGE(PARAM_INVALID, "Input tensor is nullptr"); + return PARAM_INVALID; + } + + // If the tensor data is a vector, the shape dimension must be 1 + if (tensor->GetTensorDesc().GetShape().GetDims().size() > 1) { + GELOGE(PARAM_INVALID, "The dimension of the input tensor shape cannot be more than 1, it is %zu", + tensor->GetTensorDesc().GetShape().GetDims().size()); + return PARAM_INVALID; + } + + if (type == DT_INT32) { + int32_t *shape_data = const_cast(reinterpret_cast(tensor->GetData().GetData())); + size_t dims_num = tensor->GetData().size() / sizeof(int32_t); + for (size_t i = 0; i < dims_num; i++) { + dims.push_back(static_cast(shape_data[i])); + } + } else if (type == DT_INT64) { + int64_t *shape_data = const_cast(reinterpret_cast(tensor->GetData().GetData())); + size_t dims_num = tensor->GetData().size() / sizeof(int64_t); + for (size_t i = 0; i < dims_num; i++) { + dims.push_back(shape_data[i]); + } + } else { + GELOGE(PARAM_INVALID, "Data type only can be DT_INT32 or DT_INT64. type is %s", + TypeUtils::DataTypeToSerialString(type).c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +uint32_t OpUtils::GetRealDimCnt(const GeTensorDesc &tensor_desc) { + uint32_t real_dim_cnt = 0; + domi::Status ret = TensorUtils::GetRealDimCnt(tensor_desc, real_dim_cnt); + return (ret == domi::SUCCESS) ? real_dim_cnt : 0; +} +} // namespace ge diff --git a/src/ge/common/profiling/profiling_manager.cc b/src/ge/common/profiling/profiling_manager.cc new file mode 100644 index 00000000..4c38e22c --- /dev/null +++ b/src/ge/common/profiling/profiling_manager.cc @@ -0,0 +1,338 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/profiling/profiling_manager.h" + +#include "nlohmann/json.hpp" + +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/string_util.h" +#include "runtime/base.h" + +using Json = nlohmann::json; + +namespace { +const char *const kJobID = "jobID"; +const char *const kDeviceID = "deviceID"; +const char *const kStartCfg = "startCfg"; +const char *const kFeatures = "features"; +const char *const kConf = "conf"; +const char *const kEvents = "events"; +const char *const kAiCoreEvents = "ai_core_events"; +const char *const kName = "name"; +} // namespace + +namespace ge { +ProfilingManager::ProfilingManager() {} + +ProfilingManager::~ProfilingManager() {} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ProfilingManager &ProfilingManager::Instance() { + static ProfilingManager profiling_manager; + return profiling_manager; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ge::Status ProfilingManager::Init(const Options &options) { +#ifdef DAVINCI_SUPPORT_PROFILING + device_id_ = options.device_id; + job_id_ = std::to_string(options.job_id); + + Status ret; + if (!recv_profiling_config_.empty()) { + GELOGI("Profiling json config from acl:%s", recv_profiling_config_.c_str()); + ret = InitFromAclCfg(recv_profiling_config_); + } else { + ret = InitFromEnv(options); + } + if (ret != SUCCESS) { + GELOGE(ret, "Failed to init profiling."); + return ret; + } + + if (is_profiling_) { + // register Framework to profiling + const ProfilingEngineImpl engine_0; + int result = Msprof::Engine::RegisterEngine("Framework", &engine_0); + if (result != 0) { + GELOGE(FAILED, "Register profiling engine failed."); + return FAILED; + } + // profiling startup first time + ret = StartProfiling(0); + if (ret != SUCCESS) { + GELOGE(ret, "Profiling start failed."); + return FAILED; + } + GELOGI("Profiling init succ."); + } +#endif + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ge::Status ProfilingManager::InitFromAclCfg( + const std::string &config) { +#ifdef DAVINCI_SUPPORT_PROFILING + try { + is_profiling_ = false; + profiling_opts_.clear(); + op_trace_conf_.clear(); + Json start_prof_conf = Json::parse(config); + Json &prof_conf = start_prof_conf[kStartCfg][0]; + job_id_ = prof_conf[kJobID]; + + GELOGI("Profiling json config from acl:%s", config.c_str()); + Json &features = prof_conf[kFeatures]; + for (size_t i = 0; i < features.size(); ++i) { + Json &feature = features[i]; + if ((feature.find(kName) == feature.end()) || feature[kName].is_null()) { + continue; + } + + const std::string &name = feature[kName]; + if (name == "op_trace") { + GELOGI("Op trace config from acl"); + Json &conf = feature[kConf]; + Json &events = conf[kEvents]; + const std::string &ai_core_events = events[kAiCoreEvents]; + GELOGI("Op trace config from acl ai_core_events:%s", ai_core_events.c_str()); + is_op_trace_ = true; + // op trace get conf + ProfMgrConf prof_mgr_conf; + int result = ProfMgrGetConf(ai_core_events, &prof_mgr_conf); + if (result != 0) { + GELOGE(FAILED, "ProfMgrGetConf failed."); + return FAILED; + } + op_trace_conf_ = prof_mgr_conf.conf; + op_trace_iter_num_ = static_cast(op_trace_conf_.size()); + GELOGI("Op trace profiling iter num %d,", op_trace_iter_num_); + } else if (name == "task_trace") { + is_op_trace_ = false; + GELOGI("Task trace config from acl"); + } + profiling_opts_.push_back(name); + } + + is_profiling_ = true; + } catch (Json::parse_error &e) { + GELOGE(FAILED, "Json conf is not invalid !"); + return ge::PARAM_INVALID; + } +#endif + return ge::SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ge::Status ProfilingManager::InitFromEnv(const Options &options) { +#ifdef DAVINCI_SUPPORT_PROFILING + const char *is_profiling = std::getenv("PROFILING_MODE"); + const char *prof_options = std::getenv("PROFILING_OPTIONS"); + GELOGI("The profiling in options is %s, %s", is_profiling, prof_options); + if ((is_profiling == nullptr) || (strcmp("true", is_profiling) != 0) || (prof_options == nullptr)) { + // default training trace on + is_profiling_ = false; + return SUCCESS; + } else { + std::string prof_options_str = std::string(prof_options); + profiling_opts_ = StringUtils::Split(prof_options_str, ':'); + is_profiling_ = true; + } + + // features:'training_trace', 'task_trace' or 'op_trace' etc + if (!profiling_opts_.empty()) { + if (profiling_opts_[0] == "op_trace") { + is_op_trace_ = true; + // op trace get conf + ProfMgrConf prof_mgr_conf; + int result = ProfMgrGetConf("", &prof_mgr_conf); + if (result != 0) { + GELOGE(FAILED, "ProfMgrGetConf failed."); + return FAILED; + } + op_trace_conf_ = prof_mgr_conf.conf; + op_trace_iter_num_ = static_cast(op_trace_conf_.size()); + GELOGI("op trace profiling iter num %d,", op_trace_iter_num_); + } else { + is_op_trace_ = false; + op_trace_iter_num_ = 1; + } + } +#endif + return ge::SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ge::Status ProfilingManager::StartProfiling(int32_t iter_num) { +#ifdef DAVINCI_SUPPORT_PROFILING + if (!profiling_opts_.empty()) { + GELOGI("Start profiling index is %d", iter_num); + // current one docker only use one device + Json p_device; + + try { + // profiling need physical_device_id + p_device[kDeviceID] = std::to_string(device_id_); + p_device[kJobID] = job_id_; + + Json features; + if (is_op_trace_) { + Json f; + f[kName] = "op_trace"; + Json conf; + if (op_trace_conf_.size() <= static_cast(iter_num)) { + GELOGE(FAILED, "Op trace iter num is invalid!"); + return FAILED; + } + conf = nlohmann::json::parse(op_trace_conf_[iter_num]); + f[kConf] = conf; + features[0] = f; + if (iter_num == 0) { + is_load_ = true; + } + } else { + for (std::vector::size_type i = 0; i < profiling_opts_.size(); i++) { + Json f; + f[kName] = profiling_opts_[i]; + features[i] = f; + } + is_load_ = true; + } + p_device[kFeatures] = features; + // only one device, but sProfMgrStartUp API require for device list + Json devices; + devices[0] = p_device; + + Json start_cfg; + start_cfg[kStartCfg] = devices; + + // convert json to string + std::stringstream ss; + ss << start_cfg; + send_profiling_config_ = ss.str(); + GELOGI("Profiling config %s\n", send_profiling_config_.c_str()); + } catch (Json::parse_error &e) { + GELOGE(FAILED, "Op trace json conf is not invalid !"); + return FAILED; + } + + // runtime startup for profiling + GE_CHK_RT_RET(rtProfilerStart()); + + // call profiling startup API + ProfMgrCfg prof_cfg = {send_profiling_config_}; + prof_handle = ProfMgrStartUp(&prof_cfg); + if (prof_handle == nullptr) { + GELOGW("ProfMgrStartUp failed."); + return FAILED; + } + } +#endif + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void ProfilingManager::StopProfiling() { +#ifdef DAVINCI_SUPPORT_PROFILING + Msprof::Engine::Reporter *reporter = PluginImpl::GetPluginReporter(); + if (reporter != nullptr) { + int ret = reporter->Flush(); + GELOGI("Report data end, ret is %d", ret); + } + + rtError_t rt_ret = rtProfilerStop(); + if (rt_ret != RT_ERROR_NONE) { + GELOGI("Call rtProfilerStop ret:%d", rt_ret); + } + + if (prof_handle != nullptr) { + int result = ProfMgrStop(prof_handle); + if (result != 0) { + GELOGW("ProfMgr stop return fail:%d.", result); + return; + } + } + is_load_ = false; + recv_profiling_config_ = ""; + GELOGI("Stop Profiling success."); +#endif +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void ProfilingManager::ReportProfilingData( + const std::map &op_task_id_map) { +#ifdef DAVINCI_SUPPORT_PROFILING + Msprof::Engine::Reporter *reporter = PluginImpl::GetPluginReporter(); + if (reporter == nullptr) { + GELOGI("Profiling report is nullptr!"); + return; + } + std::string data; + for (const auto &iter : op_task_id_map) { + data = iter.second + ' ' + std::to_string(iter.first) + ';'; + Msprof::Engine::ReporterData reporter_data{}; + reporter_data.deviceId = device_id_; + reporter_data.data = (unsigned char *)data.c_str(); + reporter_data.dataLen = data.size(); + int ret = memcpy_s(reporter_data.tag, MSPROF_ENGINE_MAX_TAG_LEN + 1, "framework", sizeof("framework")); + if (ret != EOK) { + GELOGE(ret, "Report data tag memcpy error!"); + return; + } + ret = reporter->Report(&reporter_data); + if (ret != SUCCESS) { + GELOGE(ret, "Reporter data fail!"); + return; + } + } + GELOGI("Report profiling data for GE end."); +#endif +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void ProfilingManager::SetProfilingConfig( + const std::string &profiling_cfg) { + recv_profiling_config_ = profiling_cfg; +} + +/** + * @brief Profiling PluginImpl +*/ +// PluginImpl static variable init +Msprof::Engine::Reporter *PluginImpl::reporter_ = nullptr; + +PluginImpl::PluginImpl(const std::string &module) : module_(module) { GELOGI("Create PluginImpl\n"); } + +int PluginImpl::Init(const Msprof::Engine::Reporter *reporter) { + GELOGI("PluginImpl init"); + reporter_ = const_cast(reporter); + return 0; +} + +int PluginImpl::UnInit() { + GELOGI("PluginImpl Uninit"); + reporter_ = nullptr; + return 0; +} + +Msprof::Engine::PluginIntf *ProfilingEngineImpl::CreatePlugin() { + GELOGI(" Create Plugin"); + return new (std::nothrow) PluginImpl("Framework"); +} + +int ProfilingEngineImpl::ReleasePlugin(Msprof::Engine::PluginIntf *plugin) { + if (plugin != nullptr) { + delete plugin; + plugin = nullptr; + } + return 0; +} +} // namespace ge diff --git a/src/ge/common/profiling/profiling_manager.h b/src/ge/common/profiling/profiling_manager.h new file mode 100644 index 00000000..6b1645de --- /dev/null +++ b/src/ge/common/profiling/profiling_manager.h @@ -0,0 +1,95 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_PROFILING_PROFILING_MANAGER_H_ +#define GE_COMMON_PROFILING_PROFILING_MANAGER_H_ + +#include +#include +#include + +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/ge_types.h" +#include "external/register/register_types.h" +#include "toolchain/prof_engine.h" +#include "toolchain/prof_mgr_core.h" + +using std::map; +using std::string; +using std::vector; + +namespace ge { +class FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ProfilingManager { + public: + ProfilingManager(); + virtual ~ProfilingManager(); + static ProfilingManager &Instance(); + ge::Status Init(const Options &options); + ge::Status InitFromEnv(const Options &options); + ge::Status InitFromAclCfg(const std::string &config); + ge::Status StartProfiling(int32_t iter); + void StopProfiling(); + bool ProfilingOpTraceOn() const { return is_op_trace_; } + bool ProfilingLoadFlag() const { return is_load_; } + bool ProfilingOn() const { return is_profiling_; } + int32_t GetOpTraceIterNum() const { return op_trace_iter_num_; } + void ReportProfilingData(const std::map &op_task_id_map); + void SetProfilingConfig(const string &profiling_cfg); + + private: + bool is_profiling_ = false; + bool is_op_trace_ = false; + bool is_load_ = false; + int32_t op_trace_iter_num_ = 0; + string job_id_; + int32_t device_id_ = 0; + vector op_trace_conf_; + vector profiling_opts_; + void *prof_handle = nullptr; + string recv_profiling_config_; + string send_profiling_config_; +}; + +/// +/// @brief register Plugin +/// +class FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY PluginImpl : public Msprof::Engine::PluginIntf { + public: + explicit PluginImpl(const std::string &module); + ~PluginImpl() {} + + int Init(const Msprof::Engine::Reporter *reporter); + int UnInit(); + static Msprof::Engine::Reporter *GetPluginReporter() { return reporter_; } + + private: + static Msprof::Engine::Reporter *reporter_; + std::string module_; +}; + +/// +/// @brief register Engine +/// +class ProfilingEngineImpl : public Msprof::Engine::EngineIntf { + public: + ProfilingEngineImpl() {} + ~ProfilingEngineImpl() {} + + Msprof::Engine::PluginIntf *CreatePlugin(); + int ReleasePlugin(Msprof::Engine::PluginIntf *plugin); +}; +} // namespace ge +#endif // GE_COMMON_PROFILING_PROFILING_MANAGER_H_ diff --git a/src/ge/common/properties_manager.cc b/src/ge/common/properties_manager.cc new file mode 100644 index 00000000..16952883 --- /dev/null +++ b/src/ge/common/properties_manager.cc @@ -0,0 +1,255 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/properties_manager.h" + +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/ge_types.h" +#include "framework/common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/attr_utils.h" +#include "common/util.h" + +namespace ge { +PropertiesManager::PropertiesManager() : is_inited_(false), delimiter("=") {} +PropertiesManager::~PropertiesManager() {} + +// singleton +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY PropertiesManager &PropertiesManager::Instance() { + static PropertiesManager instance; + return instance; +} + +// Initialize property configuration +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool PropertiesManager::Init(const std::string &file_path) { + std::lock_guard lock(mutex_); + if (is_inited_) { + GELOGW("Already inited, will be initialized again"); + properties_map_.clear(); + is_inited_ = false; + return is_inited_; + } + + if (!LoadFileContent(file_path)) { + return false; + } + + is_inited_ = true; + return is_inited_; +} + +// Load file contents +bool PropertiesManager::LoadFileContent(const std::string &file_path) { + // Normalize the path + string resolved_file_path = RealPath(file_path.c_str()); + if (resolved_file_path.empty()) { + GELOGE(false, "Invalid input file path [%s], make sure that the file path is correct.", file_path.c_str()); + return false; + } + std::ifstream fs(resolved_file_path, std::ifstream::in); + + if (!fs.is_open()) { + GELOGW("Open %s failed.", file_path.c_str()); + return false; + } + + std::string line; + + while (getline(fs, line)) { // line not with \n + if (!ParseLine(line)) { + GELOGW("Parse line failed. content is [%s].", line.c_str()); + fs.close(); + return false; + } + } + + fs.close(); // close the file + + GELOGI("LoadFileContent success."); + return true; +} + +// Parsing the command line +bool PropertiesManager::ParseLine(const std::string &line) { + std::string temp = Trim(line); + // Comment or newline returns true directly + if (temp.find_first_of('#') == 0 || *(temp.c_str()) == '\n') { + return true; + } + + if (!temp.empty()) { + std::string::size_type pos = temp.find_first_of(delimiter); // Must be divided by "=" + if (pos == std::string::npos) { + GELOGW("Incorrect line [%s]", line.c_str()); + return false; + } + + std::string map_key = Trim(temp.substr(0, pos)); + std::string value = Trim(temp.substr(pos + 1)); + if (map_key.empty() || value.empty()) { + GELOGW("Map_key or value empty. %s", line.c_str()); + return false; + } + + properties_map_[map_key] = value; + } + + return true; +} + +// Remove the space and tab before and after the string +std::string PropertiesManager::Trim(const std::string &str) { + if (str.empty()) { + return str; + } + + std::string::size_type start = str.find_first_not_of(" \t\r\n"); + if (start == std::string::npos) { + return str; + } + + std::string::size_type end = str.find_last_not_of(" \t\r\n") + 1; + return str.substr(start, end); +} + +// Get property value, if not found, return "" +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY std::string PropertiesManager::GetPropertyValue( + const std::string &map_key) { + std::lock_guard lock(mutex_); + auto iter = properties_map_.find(map_key); + if (properties_map_.end() != iter) { + return iter->second; + } + + return ""; +} + +// Set property value +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void PropertiesManager::SetPropertyValue(const std::string &map_key, + const std::string &value) { + std::lock_guard lock(mutex_); + properties_map_[map_key] = value; +} + +// return properties_map_ +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY std::map +PropertiesManager::GetPropertyMap() { + std::lock_guard lock(mutex_); + return properties_map_; +} + +// Set separator +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void PropertiesManager::SetPropertyDelimiter(const std::string &de) { + delimiter = de; +} + +// The following is the new dump scenario of the fusion operator +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void PropertiesManager::AddDumpPropertyValue( + const std::string &model, const std::set &layers) { + for (const std::string &layer : layers) { + GELOGI("This model %s config to dump layer %s", model.c_str(), layer.c_str()); + } + + std::lock_guard lock(dump_mutex_); + model_dump_properties_map_[model] = layers; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void PropertiesManager::DeleteDumpPropertyValue( + const std::string &model) { + std::lock_guard lock(dump_mutex_); + auto iter = model_dump_properties_map_.find(model); + if (iter != model_dump_properties_map_.end()) { + model_dump_properties_map_.erase(iter); + } +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void PropertiesManager::ClearDumpPropertyValue() { + std::lock_guard lock(dump_mutex_); + model_dump_properties_map_.clear(); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY std::set PropertiesManager::GetDumpPropertyValue( + const std::string &model) { + std::lock_guard lock(dump_mutex_); + auto iter = model_dump_properties_map_.find(model); + if (iter != model_dump_properties_map_.end()) { + return iter->second; + } + return {}; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool PropertiesManager::IsLayerNeedDump(const std::string &model, + const std::string &op_name) { + std::lock_guard lock(dump_mutex_); + // if dump all + if (model_dump_properties_map_.find(ge::DUMP_ALL_MODEL) != model_dump_properties_map_.end()) { + return true; + } + + // if this model need dump + auto model_iter = model_dump_properties_map_.find(model); + if (model_iter != model_dump_properties_map_.end()) { + // if no dump layer info, dump all layer in this model + if (model_iter->second.empty()) { + return true; + } + + return model_iter->second.find(op_name) != model_iter->second.end(); + } + + GELOGD("Model %s is not seated to be dump.", model.c_str()); + return false; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool PropertiesManager::QueryModelDumpStatus( + const std::string &model) { + std::lock_guard lock(dump_mutex_); + auto iter = model_dump_properties_map_.find(model); + if (iter != model_dump_properties_map_.end()) { + return true; + } else if (model_dump_properties_map_.find(ge::DUMP_ALL_MODEL) != model_dump_properties_map_.end()) { + return true; + } + return false; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void PropertiesManager::SetDumpOutputModel( + const std::string &output_mode) { + std::lock_guard lock(dump_mutex_); + this->output_mode_ = output_mode; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY std::string PropertiesManager::GetDumpOutputModel() { + std::lock_guard lock(dump_mutex_); + return this->output_mode_; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void PropertiesManager::SetDumpOutputPath( + const std::string &output_path) { + std::lock_guard lock(dump_mutex_); + this->output_path_ = output_path; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY std::string PropertiesManager::GetDumpOutputPath() { + std::lock_guard lock(dump_mutex_); + return this->output_path_; +} +} // namespace ge diff --git a/src/ge/common/properties_manager.h b/src/ge/common/properties_manager.h new file mode 100644 index 00000000..6c4b2072 --- /dev/null +++ b/src/ge/common/properties_manager.h @@ -0,0 +1,124 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_PROPERTIES_MANAGER_H_ +#define GE_COMMON_PROPERTIES_MANAGER_H_ + +#include +#include +#include +#include +#include + +#include "graph/op_desc.h" + +namespace ge { +// Configuration property management +static const char *SYSMODE __attribute__((unused)) = "FMK_SYSMODE"; +static const char *USE_FUSION __attribute__((unused)) = "FMK_USE_FUSION"; +static const char *TIMESTAT_ENABLE __attribute__((unused)) = "DAVINCI_TIMESTAT_ENABLE"; +static const char *ANNDROID_DEBUG __attribute__((unused)) = "ANNDROID_DEBUG"; + +class PropertiesManager { + public: + // Singleton + static PropertiesManager &Instance(); + + /** + * @ingroup domi_ome + * @brief Initialize configuration parameters, which must be invoked in main. + * @param [in] file_path Property profile path + * @return true success + * @return false fail + * @author + */ + bool Init(const std::string &file_path); + + /** + * @ingroup domi_ome + * @brief Get configuration parameter value + * @param [in] key Configuration parameter name + * @return Configuration parameter value. If the parameter name does not exist, return null + * @author + */ + std::string GetPropertyValue(const std::string &key); + + /** + * @ingroup domi_ome + * @brief Set configuration parameters + * @param [in] key Configuration parameter name + * @param [out] key Configuration parameter value + * @author + */ + void SetPropertyValue(const std::string &key, const std::string &value); + + /** + * @ingroup domi_ome + * @brief Return configuration parameters + * @return properties_map_ + * @author + */ + std::map GetPropertyMap(); + + /** + * @ingroup domi_ome + * @brief Adapt key value pair form, set different separators + * @param [in] delimiter + * @author + */ + void SetPropertyDelimiter(const std::string &de); + + void AddDumpPropertyValue(const std::string &model, const std::set &layers); + std::set GetDumpPropertyValue(const std::string &model); + bool IsLayerNeedDump(const std::string &model, const std::string &op_name); + void DeleteDumpPropertyValue(const std::string &model); + void ClearDumpPropertyValue(); + bool QueryModelDumpStatus(const std::string &model); + void SetDumpOutputModel(const std::string &output_model); + std::string GetDumpOutputModel(); + void SetDumpOutputPath(const std::string &output_path); + std::string GetDumpOutputPath(); + + private: + // Private construct, destructor + PropertiesManager(); + ~PropertiesManager(); + + // Get file content + bool LoadFileContent(const std::string &file_path); + + // Parsing a single line file + bool ParseLine(const std::string &line); + + // Remove space before and after string + std::string Trim(const std::string &str); + + bool is_inited_; + + // Configuration item separator, default is "=" + std::string delimiter; + + std::map properties_map_; + std::mutex mutex_; + + std::string output_mode_; + std::string output_path_; + std::map> model_dump_properties_map_; // model_dump_layers_map_ + std::mutex dump_mutex_; +}; +} // namespace ge + +#endif // GE_COMMON_PROPERTIES_MANAGER_H_ diff --git a/src/ge/common/singleton.h b/src/ge/common/singleton.h new file mode 100644 index 00000000..1a347bfe --- /dev/null +++ b/src/ge/common/singleton.h @@ -0,0 +1,55 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_SINGLETON_H_ +#define GE_COMMON_SINGLETON_H_ + +#include + +#define DECLARE_SINGLETON_CLASS(T) friend class Singleton + +namespace ge { +static std::mutex single_mutex_; +// Single thread version single instance template +template +class Singleton { + public: + Singleton(Singleton const &) = delete; + Singleton &operator=(Singleton const &) = delete; + + template + static T *Instance(_Args... args) { + std::lock_guard lock(single_mutex_); + if (instance_ == nullptr) { + // std::nothrow, Nullptr returned when memory request failed + instance_.reset(new (std::nothrow) T(args...)); + } + return instance_.get(); + } + + static void Destroy(void) { instance_.reset(); } + + Singleton() = default; + virtual ~Singleton() = default; + + private: + static std::unique_ptr instance_; +}; + +template +std::unique_ptr Singleton::instance_; +} // namespace ge +#endif // GE_COMMON_SINGLETON_H_ diff --git a/src/ge/common/tbe_kernel_store.cc b/src/ge/common/tbe_kernel_store.cc new file mode 100644 index 00000000..10ed51a6 --- /dev/null +++ b/src/ge/common/tbe_kernel_store.cc @@ -0,0 +1,142 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/tbe_kernel_store.h" + +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" + +namespace ge { +const uint32_t kKernelItemMagic = 0x5d776efd; + +struct KernelStoreItemHead { + uint32_t magic; + uint32_t name_len; + uint32_t bin_len; +}; + +TBEKernelStore::TBEKernelStore() {} + +void TBEKernelStore::AddTBEKernel(const TBEKernelPtr &kernel) { + if (kernel != nullptr) { + kernels_[kernel->GetName()] = kernel; + } +} + +bool TBEKernelStore::Build() { + buffer_.clear(); + size_t total_len = 0; + for (const auto &item : kernels_) { + auto kernel = item.second; + total_len += sizeof(KernelStoreItemHead); + total_len += kernel->GetName().length(); + total_len += kernel->GetBinDataSize(); + } + + try { + buffer_.resize(total_len); + } catch (std::bad_alloc &e) { + GELOGE(ge::MEMALLOC_FAILED, "All build memory failed, memory size %zu", total_len); + return false; + } + + uint8_t *next_buffer = buffer_.data(); + size_t remain_len = total_len; + errno_t mem_ret; + for (const auto &item : kernels_) { + auto kernel = item.second; + KernelStoreItemHead kernel_head{}; + kernel_head.magic = kKernelItemMagic; + kernel_head.name_len = static_cast(kernel->GetName().length()); + kernel_head.bin_len = static_cast(kernel->GetBinDataSize()); + + mem_ret = memcpy_s(next_buffer, remain_len, &kernel_head, sizeof(kernel_head)); + GE_CHK_BOOL_EXEC_NOLOG(mem_ret == EOK, return false); + next_buffer += sizeof(kernel_head); + + mem_ret = memcpy_s(next_buffer, remain_len - sizeof(kernel_head), kernel->GetName().data(), kernel_head.name_len); + GE_CHK_BOOL_EXEC_NOLOG(mem_ret == EOK, return false); + next_buffer += kernel_head.name_len; + + mem_ret = memcpy_s(next_buffer, remain_len - sizeof(kernel_head) - kernel_head.name_len, kernel->GetBinData(), + kernel_head.bin_len); + GE_CHK_BOOL_EXEC_NOLOG(mem_ret == EOK, return false); + + next_buffer += kernel_head.bin_len; + remain_len = remain_len - sizeof(kernel_head) - kernel_head.name_len - kernel_head.bin_len; + } + kernels_.clear(); + return true; +} + +const uint8_t *TBEKernelStore::Data() const { return buffer_.data(); } + +size_t TBEKernelStore::DataSize() const { return buffer_.size(); } + +bool TBEKernelStore::Load(const uint8_t *data, const size_t &len) { + if (data == nullptr || len == 0) { + return false; + } + size_t buffer_len = len; + while (buffer_len > sizeof(KernelStoreItemHead)) { + const char *next_buffer = reinterpret_cast(data) + (len - buffer_len); + + const auto *kernel_head = reinterpret_cast(next_buffer); + if (buffer_len < kernel_head->name_len + kernel_head->bin_len + sizeof(KernelStoreItemHead)) { + GELOGW("Invalid kernel block remain buffer len %zu, name len %u, bin len %u", buffer_len, kernel_head->name_len, + kernel_head->bin_len); + break; + } + + next_buffer += sizeof(KernelStoreItemHead); + std::string name(next_buffer, kernel_head->name_len); + + next_buffer += kernel_head->name_len; + GELOGI("Load kernel from om:%s,%u,%u", name.c_str(), kernel_head->name_len, kernel_head->bin_len); + std::vector kernel_bin(next_buffer, next_buffer + kernel_head->bin_len); + TBEKernelPtr teb_kernel_ptr = ge::MakeShared(name, std::move(kernel_bin)); + if (teb_kernel_ptr != nullptr) { + kernels_.emplace(name, teb_kernel_ptr); + } + buffer_len -= sizeof(KernelStoreItemHead) + kernel_head->name_len + kernel_head->bin_len; + } + + return true; +} + +TBEKernelPtr TBEKernelStore::FindTBEKernel(const std::string &name) const { + auto it = kernels_.find(name); + if (it != kernels_.end()) { + return it->second; + } + return nullptr; +} + +void TBEKernelStore::LoadTBEKernelBinToOpDesc(const std::shared_ptr &op_desc) const { + if (op_desc != nullptr) { + auto tbe_kernel = FindTBEKernel(op_desc->GetName()); + if (tbe_kernel != nullptr) { + GE_IF_BOOL_EXEC(!op_desc->SetExtAttr(ge::OP_EXTATTR_NAME_TBE_KERNEL, tbe_kernel), + GELOGW("LoadTBEKernelBinToOpDesc: SetExtAttr for tbe_kernel failed");) + GELOGI("Load tbe kernel:%s, %zu", tbe_kernel->GetName().c_str(), tbe_kernel->GetBinDataSize()); + } + } +} +} // namespace ge diff --git a/src/ge/common/tbe_kernel_store.h b/src/ge/common/tbe_kernel_store.h new file mode 100644 index 00000000..da231358 --- /dev/null +++ b/src/ge/common/tbe_kernel_store.h @@ -0,0 +1,56 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_TBE_KERNEL_STORE_H_ +#define GE_COMMON_TBE_KERNEL_STORE_H_ + +#include +#include +#include +#include +#include + +#include "framework/common/fmk_types.h" +#include "graph/op_desc.h" +#include "graph/op_kernel_bin.h" + +namespace ge { + +using TBEKernel = ge::OpKernelBin; +using TBEKernelPtr = std::shared_ptr; + +class FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY TBEKernelStore { + public: + TBEKernelStore(); + ~TBEKernelStore() = default; + void AddTBEKernel(const TBEKernelPtr &kernel); + bool Build(); + + bool Load(const uint8_t *data, const size_t &len); + TBEKernelPtr FindTBEKernel(const std::string &name) const; + + void LoadTBEKernelBinToOpDesc(const std::shared_ptr &op_desc) const; + + const uint8_t *Data() const; + size_t DataSize() const; + + private: + std::unordered_map kernels_; + std::vector buffer_; +}; +} // namespace ge + +#endif // GE_COMMON_TBE_KERNEL_STORE_H_ diff --git a/src/ge/common/thread_pool.cc b/src/ge/common/thread_pool.cc new file mode 100644 index 00000000..a52d4938 --- /dev/null +++ b/src/ge/common/thread_pool.cc @@ -0,0 +1,79 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "common/thread_pool.h" + +#include +#include +#include +#include +#include +#include + +#include "register/register_types.h" + +namespace ge { +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ThreadPool::ThreadPool(uint32_t size) : is_stoped_(false) { + idle_thrd_num_ = size < 1 ? 1 : size; + + for (uint32_t i = 0; i < idle_thrd_num_; ++i) { + pool_.emplace_back(ThreadFunc, this); + } +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ThreadPool::~ThreadPool() { + is_stoped_.store(true); + { + std::unique_lock lock{m_lock_}; + cond_var_.notify_all(); + } + + for (std::thread &thd : pool_) { + if (thd.joinable()) { + try { + thd.join(); + } catch (const std::system_error &) { + GELOGW("system_error"); + } catch (...) { + GELOGW("exception"); + } + } + } +} + +void ThreadPool::ThreadFunc(ThreadPool *thread_pool) { + if (thread_pool == nullptr) { + return; + } + while (!thread_pool->is_stoped_) { + std::function task; + { + std::unique_lock lock{thread_pool->m_lock_}; + thread_pool->cond_var_.wait(lock, [thread_pool] { + return thread_pool->is_stoped_.load() || !thread_pool->tasks_.empty(); + }); + if (thread_pool->is_stoped_ && thread_pool->tasks_.empty()) { + return; + } + task = std::move(thread_pool->tasks_.front()); + thread_pool->tasks_.pop(); + } + --thread_pool->idle_thrd_num_; + task(); + ++thread_pool->idle_thrd_num_; + } +} +} // namespace ge diff --git a/src/ge/common/thread_pool.h b/src/ge/common/thread_pool.h new file mode 100644 index 00000000..6a07d61d --- /dev/null +++ b/src/ge/common/thread_pool.h @@ -0,0 +1,80 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_COMMON_THREAD_POOL_H_ +#define GE_COMMON_THREAD_POOL_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "external/ge/ge_api_error_codes.h" +#include "graph/types.h" +#include "common/ge/ge_util.h" + +namespace ge { +using ThreadTask = std::function; + +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY ThreadPool { + public: + explicit ThreadPool(uint32_t size = 4); + ~ThreadPool(); + + template + auto commit(Func &&func, Args &&... args) -> std::future { + GELOGD("commit run task enter."); + if (is_stoped_.load()) { + GELOGE(ge::FAILED, "thread pool has been stopped."); + } + + using RetType = decltype(func(args...)); + auto bind_func = std::bind(std::forward(func), std::forward(args)...); + auto task = ge::MakeShared>(bind_func); + if (task == nullptr) { + GELOGW("Make shared failed."); + } + std::future future = task->get_future(); + { + std::lock_guard lock{m_lock_}; + tasks_.emplace([task]() { (*task)(); }); + } + cond_var_.notify_one(); + GELOGD("commit run task end"); + return future; + } + + static void ThreadFunc(ThreadPool *thread_pool); + + private: + std::vector pool_; + std::queue tasks_; + std::mutex m_lock_; + std::condition_variable cond_var_; + std::atomic is_stoped_; + std::atomic idle_thrd_num_; +}; +} // namespace ge + +#endif // GE_COMMON_THREAD_POOL_H_ diff --git a/src/ge/common/types.cc b/src/ge/common/types.cc new file mode 100644 index 00000000..36091e76 --- /dev/null +++ b/src/ge/common/types.cc @@ -0,0 +1,636 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "framework/common/types.h" + +#include "cce/dnn.h" + +namespace ge { +// dump +const std::string DUMP_MODEL = "model_name"; +const std::string DUMP_ALL_MODEL = "ALL_MODEL_NEED_DUMP_AND_IT_IS_NOT_A_MODEL_NAME"; +const std::string DUMP_STATUS = "status"; +const std::string DUMP_LAYER = "layer"; +const std::string DUMP_FILE_PATH = "path"; +} // namespace ge + +using ge::OpTypeRegistrar; +namespace ge { +const int DEFAULT_FORMAT = static_cast(cce::CC_TENSOR_NCHW); +// Supported public property names +const std::string PROP_OME_START_TIME = "ome_start_time"; // start time +const std::string PROP_OME_DUMP_PATH = "ome_dump_path"; // dump path +const std::string PROP_OME_LOG_PATH = "ome_log_path"; // log path + +// Profile related constant +const uint32_t CCE_PROFILE_ON = 0; +const uint32_t CCE_PROFILE_OFF = 1; +const std::string OME_PROFILE = "OME_PROFILE"; +const std::string CCE_PROFILE = "CCE_PROFILE"; +const std::string RTS_PROFILE = "RTS_PROFILE"; +const std::string PROFILER_JOBCTX = "profiler_jobctx"; +const std::string PROFILER_TARGET_PATH = "profiler_target_path"; +const std::string RTS_PROFILE_PATH = "RTS_PATH"; +const std::string PROFILE_STOP_KEY = "stop"; +const std::string PROFILE_STOP_VALUE = "enable"; +const std::map PROFILE_COMPONENT_MAP{ + {"ome", OME_PROFILE}, {"cce", CCE_PROFILE}, {"runtime", RTS_PROFILE}, +}; +const std::string PROFILE_CONFIG = "config"; + +REGISTER_OPTYPE_DEFINE(DATA, "Data"); +REGISTER_OPTYPE_DEFINE(AIPPDATA, "AippData"); +REGISTER_OPTYPE_DEFINE(CONVOLUTION, "Convolution"); +REGISTER_OPTYPE_DEFINE(CORRELATION, "Correlation"); +REGISTER_OPTYPE_DEFINE(CORRELATIONV2, "Correlation_V2"); +REGISTER_OPTYPE_DEFINE(DECONVOLUTION, "Deconvolution"); +REGISTER_OPTYPE_DEFINE(POOLING, "Pooling"); +REGISTER_OPTYPE_DEFINE(ELTWISE, "Eltwise"); +REGISTER_OPTYPE_DEFINE(RELU, "ReLU"); +REGISTER_OPTYPE_DEFINE(RELU6, "ReLU6"); +REGISTER_OPTYPE_DEFINE(SIGMOID, "Sigmoid"); +REGISTER_OPTYPE_DEFINE(ABSVAL, "AbsVal"); +REGISTER_OPTYPE_DEFINE(TANH, "TanH"); +REGISTER_OPTYPE_DEFINE(PRELU, "PReLU"); +REGISTER_OPTYPE_DEFINE(BATCHNORM, "BatchNorm"); +REGISTER_OPTYPE_DEFINE(FUSIONBATCHNORM, "FusionBatchNorm"); +REGISTER_OPTYPE_DEFINE(SCALE, "Scale"); +REGISTER_OPTYPE_DEFINE(FULL_CONNECTION, "FullConnection"); +REGISTER_OPTYPE_DEFINE(SOFTMAX, "Softmax"); +REGISTER_OPTYPE_DEFINE(PLUS, "Plus"); +REGISTER_OPTYPE_DEFINE(ACTIVATION, "Activation"); +REGISTER_OPTYPE_DEFINE(FLATTEN, "Flatten"); +REGISTER_OPTYPE_DEFINE(ADD, "Add"); +REGISTER_OPTYPE_DEFINE(SUB, "Sub"); +REGISTER_OPTYPE_DEFINE(MUL, "Mul"); +REGISTER_OPTYPE_DEFINE(MATMUL, "MatMul"); +REGISTER_OPTYPE_DEFINE(RSQRT, "Rsqrt"); +REGISTER_OPTYPE_DEFINE(BIASADD, "BiasAdd"); +REGISTER_OPTYPE_DEFINE(RESHAPE, "Reshape"); +REGISTER_OPTYPE_DEFINE(REFORMAT, "ReFormat"); +REGISTER_OPTYPE_DEFINE(DEPCONVOLUTION, "ConvolutionDepthwise"); +REGISTER_OPTYPE_DEFINE(DROPOUT, "Dropout"); +REGISTER_OPTYPE_DEFINE(DROPOUTGENMASK, "DropOutGenMask"); +REGISTER_OPTYPE_DEFINE(DROPOUTDOMASK, "DropOutDoMask"); +REGISTER_OPTYPE_DEFINE(CONCAT, "Concat"); +REGISTER_OPTYPE_DEFINE(ROIPOOLING, "ROIPooling"); +REGISTER_OPTYPE_DEFINE(PROPOSAL, "Proposal"); +REGISTER_OPTYPE_DEFINE(FSRDETECTIONOUTPUT, "FSRDetectionOutput"); +REGISTER_OPTYPE_DEFINE(DETECTIONPOSTPROCESS, "Detectpostprocess"); +REGISTER_OPTYPE_DEFINE(LRN, "LRN"); +REGISTER_OPTYPE_DEFINE(TRANSDATA, "TransData"); +REGISTER_OPTYPE_DEFINE(PERMUTE, "Permute"); +REGISTER_OPTYPE_DEFINE(SSDNORMALIZE, "SSDNormalize"); +REGISTER_OPTYPE_DEFINE(SSDPRIORBOX, "SSDPriorBox"); +REGISTER_OPTYPE_DEFINE(NETOUTPUT, "NetOutput"); +REGISTER_OPTYPE_DEFINE(SSDDETECTIONOUTPUT, "SSDDetectionOutput"); +REGISTER_OPTYPE_DEFINE(REFINEDETDETECTIONOUTPUT, "RefinedetDetectionOutput"); +REGISTER_OPTYPE_DEFINE(CHANNELAXPY, "ChannelAxpy"); +REGISTER_OPTYPE_DEFINE(PSROIPOOLING, "PSROIPooling"); +REGISTER_OPTYPE_DEFINE(POWER, "Power"); +REGISTER_OPTYPE_DEFINE(POW, "Pow"); +REGISTER_OPTYPE_DEFINE(ROIALIGN, "ROIAlign"); +REGISTER_OPTYPE_DEFINE(PYTHON, "Python"); +REGISTER_OPTYPE_DEFINE(FREESPACEEXTRACT, "FreespaceExtract"); +REGISTER_OPTYPE_DEFINE(SPATIALTF, "SpatialTransform"); +REGISTER_OPTYPE_DEFINE(SHAPE, "Shape"); +REGISTER_OPTYPE_DEFINE(SHAPEN, "ShapeN"); +REGISTER_OPTYPE_DEFINE(ARGMAX, "ArgMax"); +REGISTER_OPTYPE_DEFINE(GATHERND, "GatherNd"); +REGISTER_OPTYPE_DEFINE(GATHER, "Gather"); +REGISTER_OPTYPE_DEFINE(REALDIV, "RealDiv"); +REGISTER_OPTYPE_DEFINE(PACK, "Pack"); +REGISTER_OPTYPE_DEFINE(SLICE, "Slice"); +REGISTER_OPTYPE_DEFINE(FLOORDIV, "FloorDiv"); +REGISTER_OPTYPE_DEFINE(SQUEEZE, "Squeeze"); +REGISTER_OPTYPE_DEFINE(STRIDEDSLICE, "StridedSlice"); +REGISTER_OPTYPE_DEFINE(RANGE, "Range"); +REGISTER_OPTYPE_DEFINE(RPNPROPOSALS, "GenerateRpnProposals"); +REGISTER_OPTYPE_DEFINE(DECODEBBOX, "DecodeBBox"); +REGISTER_OPTYPE_DEFINE(PAD, "Pad"); +REGISTER_OPTYPE_DEFINE(PADV2, "PadV2"); +REGISTER_OPTYPE_DEFINE(MIRRORPAD, "MirrorPad"); +REGISTER_OPTYPE_DEFINE(TILE, "Tile"); +REGISTER_OPTYPE_DEFINE(SIZE, "Size"); +REGISTER_OPTYPE_DEFINE(CLIPBOXES, "Clipboxes"); +REGISTER_OPTYPE_DEFINE(FASTRCNNPREDICTIONS, "FastrcnnPredictions"); +REGISTER_OPTYPE_DEFINE(SPLIT, "Split"); +REGISTER_OPTYPE_DEFINE(SPLITV, "SplitV"); +REGISTER_OPTYPE_DEFINE(EXPANDDIMS, "ExpandDims"); +REGISTER_OPTYPE_DEFINE(EMPTY, "Empty"); +REGISTER_OPTYPE_DEFINE(MEAN, "Mean"); +REGISTER_OPTYPE_DEFINE(GREATER, "Greater"); +REGISTER_OPTYPE_DEFINE(SWITCH, "Switch"); +REGISTER_OPTYPE_DEFINE(SWITCHN, "SwitchN"); +REGISTER_OPTYPE_DEFINE(MERGE, "Merge"); +REGISTER_OPTYPE_DEFINE(TRANSPOSE, "Transpose"); +REGISTER_OPTYPE_DEFINE(TRANSPOSED, "TransposeD"); +REGISTER_OPTYPE_DEFINE(CAST, "Cast"); +REGISTER_OPTYPE_DEFINE(REGION, "Region"); +REGISTER_OPTYPE_DEFINE(YOLO, "Yolo"); +REGISTER_OPTYPE_DEFINE(YOLODETECTIONOUTPUT, "YoloDetectionOutput"); +REGISTER_OPTYPE_DEFINE(FILL, "Fill"); +REGISTER_OPTYPE_DEFINE(REVERSE, "Reverse"); +REGISTER_OPTYPE_DEFINE(UNPACK, "Unpack"); +REGISTER_OPTYPE_DEFINE(YOLO2REORG, "Yolo2Reorg"); +REGISTER_OPTYPE_DEFINE(REDUCESUM, "ReduceSum"); +REGISTER_OPTYPE_DEFINE(SUM, "Sum"); +REGISTER_OPTYPE_DEFINE(CONSTANT, "Const"); +REGISTER_OPTYPE_DEFINE(RESIZEBILINEAR, "ResizeBilinear"); +REGISTER_OPTYPE_DEFINE(RESIZEBILINEARGRAD, "ResizeBilinearGrad"); +REGISTER_OPTYPE_DEFINE(MAXIMUM, "Maximum"); +REGISTER_OPTYPE_DEFINE(FRAMEWORKOP, "FrameworkOp"); +REGISTER_OPTYPE_DEFINE(ARG, "_Arg"); +REGISTER_OPTYPE_DEFINE(FUSEDBATCHNORMGRAD, "FusedBatchNormGrad"); +REGISTER_OPTYPE_DEFINE(LSTM, "LSTM"); +REGISTER_OPTYPE_DEFINE(HIGHWAY, "HighWay"); +REGISTER_OPTYPE_DEFINE(RNN, "RNN"); +REGISTER_OPTYPE_DEFINE(ATTENTIONDECODER, "AttentionDecoder"); +REGISTER_OPTYPE_DEFINE(LOGICAL_NOT, "LogicalNot"); +REGISTER_OPTYPE_DEFINE(LOGICAL_AND, "LogicalAnd"); +REGISTER_OPTYPE_DEFINE(LOGICAL_OR, "LogicalOr"); +REGISTER_OPTYPE_DEFINE(EQUAL, "Equal"); +REGISTER_OPTYPE_DEFINE(NOTEQUAL, "NotEqual"); +REGISTER_OPTYPE_DEFINE(INTERP, "Interp"); +REGISTER_OPTYPE_DEFINE(SHUFFLECHANNEL, "ShuffleChannel"); +REGISTER_OPTYPE_DEFINE(AIPP, "Aipp"); +REGISTER_OPTYPE_DEFINE(MULTISHAPE, "MultiShape"); +REGISTER_OPTYPE_DEFINE(RECIPROCAL, "Reciprocal"); +REGISTER_OPTYPE_DEFINE(SELU, "Selu"); +REGISTER_OPTYPE_DEFINE(ELU, "Elu"); +REGISTER_OPTYPE_DEFINE(ACOSH, "Acosh"); +REGISTER_OPTYPE_DEFINE(ASINH, "Asinh"); +REGISTER_OPTYPE_DEFINE(MINIMUM, "Minimum"); +REGISTER_OPTYPE_DEFINE(CLIP, "Clip"); +REGISTER_OPTYPE_DEFINE(L2NORMALIZE, "L2Normalize"); +REGISTER_OPTYPE_DEFINE(CROPANDRESIZE, "CropAndResize"); +REGISTER_OPTYPE_DEFINE(UNUSEDCONST, "UnusedConst"); +REGISTER_OPTYPE_DEFINE(SPARSETODENSE, "SparseToDense"); +REGISTER_OPTYPE_DEFINE(NONMAXSUPPRESSION, "NonMaxSuppression"); +REGISTER_OPTYPE_DEFINE(TOPKV2, "TopKV2"); +REGISTER_OPTYPE_DEFINE(INVERTPERMUTATION, "InvertPermutation"); +REGISTER_OPTYPE_DEFINE(MULTINOMIAL, "Multinomial"); +REGISTER_OPTYPE_DEFINE(REVERSESEQUENCE, "ReverseSequence"); +REGISTER_OPTYPE_DEFINE(REDUCEPROD, "ReduceProd"); +REGISTER_OPTYPE_DEFINE(REDUCEMAX, "ReduceMax"); +REGISTER_OPTYPE_DEFINE(REDUCEMIN, "ReduceMin"); +REGISTER_OPTYPE_DEFINE(EXTRACTIMAGEPATCHES, "ExtractImagePatches"); +REGISTER_OPTYPE_DEFINE(SQRT, "Sqrt"); +REGISTER_OPTYPE_DEFINE(REDUCEALL, "ReduceAll"); +REGISTER_OPTYPE_DEFINE(RESIZENEARESTNEIGHBOR, "ResizeNearestNeighbor"); +REGISTER_OPTYPE_DEFINE(SPACETOBATCHND, "SpaceToBatchND"); +REGISTER_OPTYPE_DEFINE(BATCHTOSPACEND, "BatchToSpaceND"); +REGISTER_OPTYPE_DEFINE(ASSERT, "Assert"); +REGISTER_OPTYPE_DEFINE(GREATEREQUAL, "GreaterEqual"); +REGISTER_OPTYPE_DEFINE(FLOOR, "Floor"); +REGISTER_OPTYPE_DEFINE(RANDOMUNIFORM, "RandomUniform"); +REGISTER_OPTYPE_DEFINE(BATCHMATMUL, "BatchMatMul"); +REGISTER_OPTYPE_DEFINE(SPACETODEPTH, "SpaceToDepth"); +REGISTER_OPTYPE_DEFINE(DEPTHTOSPACE, "DepthToSpace"); +REGISTER_OPTYPE_DEFINE(RINT, "Rint"); +REGISTER_OPTYPE_DEFINE(ATAN, "Atan"); +REGISTER_OPTYPE_DEFINE(ATAN2, "Atan2"); +REGISTER_OPTYPE_DEFINE(ATANH, "Atanh"); +REGISTER_OPTYPE_DEFINE(ACOS, "Acos"); +REGISTER_OPTYPE_DEFINE(ASIN, "Asin"); +REGISTER_OPTYPE_DEFINE(NEG, "Neg"); +REGISTER_OPTYPE_DEFINE(LOG, "Log"); +REGISTER_OPTYPE_DEFINE(TAN, "Tan"); +REGISTER_OPTYPE_DEFINE(ROUND, "Round"); +REGISTER_OPTYPE_DEFINE(UPSAMPLE, "Upsample"); +REGISTER_OPTYPE_DEFINE(FLOORMOD, "FloorMod"); +REGISTER_OPTYPE_DEFINE(LESS, "Less"); +REGISTER_OPTYPE_DEFINE(LESSEQUAL, "LessEqual"); +REGISTER_OPTYPE_DEFINE(ONEHOT, "OneHot"); +REGISTER_OPTYPE_DEFINE(REFSWITCH, "RefSwitch"); +REGISTER_OPTYPE_DEFINE(REFMERGE, "RefMerge"); +REGISTER_OPTYPE_DEFINE(ENTER, "Enter"); +REGISTER_OPTYPE_DEFINE(REFENTER, "RefEnter"); +REGISTER_OPTYPE_DEFINE(LOOPCOND, "LoopCond"); +REGISTER_OPTYPE_DEFINE(NEXTITERATION, "NextIteration"); +REGISTER_OPTYPE_DEFINE(REFNEXTITERATION, "RefNextIteration"); +REGISTER_OPTYPE_DEFINE(EXIT, "Exit"); +REGISTER_OPTYPE_DEFINE(REFEXIT, "RefExit"); +REGISTER_OPTYPE_DEFINE(CONTROLTRIGGER, "ControlTrigger"); +REGISTER_OPTYPE_DEFINE(ZEROSLIKE, "ZerosLike"); +REGISTER_OPTYPE_DEFINE(EXP, "Exp"); +REGISTER_OPTYPE_DEFINE(WHERE, "Where"); +REGISTER_OPTYPE_DEFINE(FAKEQUANTWITHMINMAXVARS, "FakeQuantWithMinMaxVars"); +REGISTER_OPTYPE_DEFINE(SOFTPLUS, "Softplus"); +REGISTER_OPTYPE_DEFINE(SOFTSIGN, "Softsign"); +REGISTER_OPTYPE_DEFINE(COSH, "Cosh"); +REGISTER_OPTYPE_DEFINE(SINH, "Sinh"); +REGISTER_OPTYPE_DEFINE(SQUAREDDIFFERENCE, "SquaredDifference"); +REGISTER_OPTYPE_DEFINE(REQUIREDSPACETOBATCHPADDINGS, "RequiredSpaceToBatchPaddings"); // for retinanet scope fusion +REGISTER_OPTYPE_DEFINE(SSDPOSTPROCESSOR, "SSDPostProcessor"); +REGISTER_OPTYPE_DEFINE(RETINANETBOXES, "RetinanetBoxes"); +REGISTER_OPTYPE_DEFINE(RETINAMULTIANCHORS, "RetinaMultiAnchor"); +REGISTER_OPTYPE_DEFINE(RETINANETCLIPPEDBOXES, "RetinanetClippedBoxes"); +REGISTER_OPTYPE_DEFINE(RETINANETFILTEREDDETECTIONS, "RetinanetFilteredDetections"); +REGISTER_OPTYPE_DEFINE(RETINANETPOSTPROCESSOR, "RetinanetPostProcessor"); +REGISTER_OPTYPE_DEFINE(RETINANETANCHORS, "RetinanetAnchors"); +REGISTER_OPTYPE_DEFINE(FASTERRCNNMAP, "FasterRCNNMap"); +REGISTER_OPTYPE_DEFINE(FASTERRCNNMAP1, "FasterRCNNMap1"); +REGISTER_OPTYPE_DEFINE(FASTERRCNNSECONDSTAGEPOSTPROCESSOR, "FasterRCNNSecondStagePostprocessor"); +REGISTER_OPTYPE_DEFINE(FASTERRCNNROIINTERPOOLING, "FasterRCNNROIInterPooling"); +REGISTER_OPTYPE_DEFINE(FASTERRCNNFIRSTSTAGEPOSTPROCESSOR, "FasterRCNNFirstStagePostprocessor"); +REGISTER_OPTYPE_DEFINE(FASTERRCNNGRIDANCHORGENERATOR, "FasterRCNNGridAnchorGenerator"); +REGISTER_OPTYPE_DEFINE(ROIINTERPOOLING, "ROIInterPooling"); +REGISTER_OPTYPE_DEFINE(FASTERRCNNCLIPTOWINDOW, "FasterRCNNClipToWindow"); +REGISTER_OPTYPE_DEFINE(EMBEDLOOKUP, "EmbedLookup"); +REGISTER_OPTYPE_DEFINE(HASHLOOKUP, "HashLookup"); +REGISTER_OPTYPE_DEFINE(LSH_PROJ, "LshProject"); +REGISTER_OPTYPE_DEFINE(SVDF, "SVDF"); +REGISTER_OPTYPE_DEFINE(SSDANCHORGENERATOR, "SSDAnchorGenerator"); +REGISTER_OPTYPE_DEFINE(IDENTITY, "Identity"); +REGISTER_OPTYPE_DEFINE(IDENTITYN, "IdentityN"); +REGISTER_OPTYPE_DEFINE(PLACEHOLDERWITHDEFAULT, "PlaceholderWithDefault"); +REGISTER_OPTYPE_DEFINE(SELECT, "Select"); +REGISTER_OPTYPE_DEFINE(GETSPAN, "GetSpan"); +REGISTER_OPTYPE_DEFINE(STOPGRADIENT, "StopGradient"); +REGISTER_OPTYPE_DEFINE(PREVENTGRADIENT, "PreventGradient"); +REGISTER_OPTYPE_DEFINE(GUARANTEECONST, "GuaranteeConst"); +REGISTER_OPTYPE_DEFINE(BROADCASTGRADIENTARGS, "BroadcastGradientArgs") +REGISTER_OPTYPE_DEFINE(BROADCASTARGS, "BroadcastArgs") +REGISTER_OPTYPE_DEFINE(CONFUSIONMATRIX, "ConfusionMatrix"); +REGISTER_OPTYPE_DEFINE(RANK, "Rank"); +REGISTER_OPTYPE_DEFINE(PLACEHOLDER, "PlaceHolder"); +REGISTER_OPTYPE_DEFINE(END, "End"); +REGISTER_OPTYPE_DEFINE(BASICLSTMCELL, "BasicLSTMCell"); +REGISTER_OPTYPE_DEFINE(GETNEXT, "GetNext"); +REGISTER_OPTYPE_DEFINE(INITDATA, "InitData"); + +// Ann special operator +REGISTER_OPTYPE_DEFINE(ANN_MEAN, "AnnMean"); +REGISTER_OPTYPE_DEFINE(ANN_CONVOLUTION, "AnnConvolution"); +REGISTER_OPTYPE_DEFINE(ANN_DEPCONVOLUTION, "AnnDepthConv"); +REGISTER_OPTYPE_DEFINE(ANN_FULLCONNECTION, "AnnFullConnection"); +REGISTER_OPTYPE_DEFINE(ANN_NETOUTPUT, "AnnNetOutput"); +REGISTER_OPTYPE_DEFINE(ANN_DATA, "AnnData"); +REGISTER_OPTYPE_DEFINE(ANN_RESHAPE, "AnnReshape"); +REGISTER_OPTYPE_DEFINE(ANN_ADD, "AnnAdd"); +REGISTER_OPTYPE_DEFINE(ANN_MUL, "AnnMul"); +REGISTER_OPTYPE_DEFINE(ANN_SUB, "AnnSub"); +REGISTER_OPTYPE_DEFINE(ANN_DIV, "AnnDiv"); +REGISTER_OPTYPE_DEFINE(ANN_DEQUANTIZE, "AnnDequant"); +REGISTER_OPTYPE_DEFINE(ANN_QUANTIZE, "AnnQuant"); +REGISTER_OPTYPE_DEFINE(ANN_PAD, "AnnPad"); +REGISTER_OPTYPE_DEFINE(ANN_RESIZE_BILINEAR, "AnnResizeBilinear"); + +// Training operator +REGISTER_OPTYPE_DEFINE(GATHERV2, "GatherV2"); +REGISTER_OPTYPE_DEFINE(CONVGRADFILTER, "Conv2DBackpropFilter"); +REGISTER_OPTYPE_DEFINE(CONV2D, "Conv2D"); +REGISTER_OPTYPE_DEFINE(CONV2DBACKPROPINPUT, "Conv2DBackpropInput"); +REGISTER_OPTYPE_DEFINE(FUSEDBATCHNORM, "FusedBatchNorm"); +REGISTER_OPTYPE_DEFINE(BIASADDGRAD, "BiasAddGrad"); +REGISTER_OPTYPE_DEFINE(ACTIVATIONGRAD, "ReluGrad"); +REGISTER_OPTYPE_DEFINE(MAXPOOLWITHARGMAX, "MaxPoolWithArgmax"); +REGISTER_OPTYPE_DEFINE(MAXPOOLGRADWITHARGMAX, "MaxPoolGradWithArgmax"); +REGISTER_OPTYPE_DEFINE(SPARSESOFTMAXCROSSENTROPYWITHLOGITS, "SparseSoftmaxCrossEntropyWithLogits"); +REGISTER_OPTYPE_DEFINE(SNAPSHOT, "Snapshot"); +REGISTER_OPTYPE_DEFINE(VAR, "Var"); +REGISTER_OPTYPE_DEFINE(MEANGRAD, "MeanGrad"); +REGISTER_OPTYPE_DEFINE(TRANSLATE, "Translate"); +REGISTER_OPTYPE_DEFINE(ADDN, "AddN"); +REGISTER_OPTYPE_DEFINE(L2LOSS, "L2Loss"); +REGISTER_OPTYPE_DEFINE(MULTIPLY, "Multiply"); +REGISTER_OPTYPE_DEFINE(HUBERLOSSGRAD, "HuberLossGrad"); +REGISTER_OPTYPE_DEFINE(HUBERLOSS, "HuberLoss"); +REGISTER_OPTYPE_DEFINE(NEGATIVE, "Negative"); +REGISTER_OPTYPE_DEFINE(SSDCAST, "SSDCast"); +REGISTER_OPTYPE_DEFINE(SPARSESOFTMAXCROSSENTROPY, "SsdSparseSoftmaxCrossEntropy"); +REGISTER_OPTYPE_DEFINE(SPARSESOFTMAXCROSSENTROPYGRAD, "SsdSparseSoftmaxCrossEntropyGrad"); +REGISTER_OPTYPE_DEFINE(SSDSQUEEZEFUSION, "SsdSqueezeFusion"); +REGISTER_OPTYPE_DEFINE(CONCATFOUR2FIVE, "ConcatFour2Five"); +REGISTER_OPTYPE_DEFINE(CONCATFIVE2FOUR, "ConcatFive2Four"); +REGISTER_OPTYPE_DEFINE(SSDREALDIVTILEMUL, "SSDRealdivTileMul"); +REGISTER_OPTYPE_DEFINE(SSDSUMMULREALDIVMEAN, "SSDSumMulRealdivMean"); + +REGISTER_OPTYPE_DEFINE(VARIABLEV2, "VariableV2"); +REGISTER_OPTYPE_DEFINE(VARHANDLEOP, "VarHandleOp"); +REGISTER_OPTYPE_DEFINE(TEMPORARYVARIABLE, "TemporaryVariable"); +REGISTER_OPTYPE_DEFINE(DESTROYTEMPORARYVARIABLE, "DestroyTemporaryVariable"); +REGISTER_OPTYPE_DEFINE(VARIABLE, "Variable"); +REGISTER_OPTYPE_DEFINE(ASSIGN, "Assign"); +REGISTER_OPTYPE_DEFINE(ASSIGNVARIABLEOP, "AssignVariableOp"); +REGISTER_OPTYPE_DEFINE(ASSIGNADD, "AssignAdd"); +REGISTER_OPTYPE_DEFINE(ASSIGNADDVARIABLEOP, "AssignAddVariableOp"); +REGISTER_OPTYPE_DEFINE(ASSIGNSUB, "AssignSub"); +REGISTER_OPTYPE_DEFINE(ASSIGNSUBVARIABLEOP, "AssignSubVariableOp"); +REGISTER_OPTYPE_DEFINE(APPLYMOMENTUM, "ApplyMomentum"); +REGISTER_OPTYPE_DEFINE(RESOURCEAPPLYMOMENTUM, "ResourceApplyMomentum"); +REGISTER_OPTYPE_DEFINE(SGD, "SGD"); +REGISTER_OPTYPE_DEFINE(NOOP, "NoOp"); +REGISTER_OPTYPE_DEFINE(READVARIABLEOP, "ReadVariableOp"); +REGISTER_OPTYPE_DEFINE(CONSTANTOP, "Constant"); +REGISTER_OPTYPE_DEFINE(DEPTHWISECONV2DBACKPROPFILTER, "DepthwiseConv2dNativeBackpropFilter"); +REGISTER_OPTYPE_DEFINE(DEPTHWISECONV2DBACKPORPINPUT, "DepthwiseConv2dNativeBackpropInput"); +REGISTER_OPTYPE_DEFINE(DEPTHWISECONV2DFORWARDNATIVE, "DepthwiseConv2dNative"); +REGISTER_OPTYPE_DEFINE(DROPOUTGRAD, "DropOutGrad"); +REGISTER_OPTYPE_DEFINE(APPLYRMSPROPMIXEDPRECISION, "apply_rms_prop_mixed_precision"); +REGISTER_OPTYPE_DEFINE(APPLYRMSPROP, "ApplyRMSProp"); +REGISTER_OPTYPE_DEFINE(RELU6GRAD, "Relu6Grad"); +REGISTER_OPTYPE_DEFINE(AVGPOOLGRAD, "AvgPoolGrad"); +REGISTER_OPTYPE_DEFINE(CONCATV2, "ConcatV2"); +REGISTER_OPTYPE_DEFINE(CONCATOFFSET, "ConcatOffset"); +REGISTER_OPTYPE_DEFINE(LAYERNORMGRAD, "LayerNormGrad"); +REGISTER_OPTYPE_DEFINE(LAYERNORM, "LayerNorm"); +REGISTER_OPTYPE_DEFINE(LARS, "Lars"); +REGISTER_OPTYPE_DEFINE(DYNAMICSTITCH, "DynamicStitch"); + +/***************************************************/ +REGISTER_OPTYPE_DEFINE(SQUARE, "Square"); +REGISTER_OPTYPE_DEFINE(HCOMBROADCAST, "HcomBroadcast"); +REGISTER_OPTYPE_DEFINE(HCOMALLGATHER, "HcomAllGather"); +REGISTER_OPTYPE_DEFINE(HCOMALLREDUCE, "HcomAllReduce"); +REGISTER_OPTYPE_DEFINE(HCOMREDUCESCATTER, "HcomReduceScatter"); +REGISTER_OPTYPE_DEFINE(HCOMSEND, "HcomSend"); +REGISTER_OPTYPE_DEFINE(HCOMRECEIVE, "HcomReceive"); + +REGISTER_OPTYPE_DEFINE(VARASSIGN, "VarAssign"); +REGISTER_OPTYPE_DEFINE(VARISINITIALIZEDOP, "VarIsInitializedOp"); +REGISTER_OPTYPE_DEFINE(LogTimeStamp, "LogTimeStamp"); +REGISTER_OPTYPE_DEFINE(ISVARIABLEINITIALIZED, "IsVariableInitialized"); +REGISTER_OPTYPE_DEFINE(STREAMSWITCH, "StreamSwitch"); +REGISTER_OPTYPE_DEFINE(STREAMSWITCHN, "StreamSwitchN"); +REGISTER_OPTYPE_DEFINE(STREAMACTIVE, "StreamActive"); +REGISTER_OPTYPE_DEFINE(MEMCPYASYNC, "MemcpyAsync"); +REGISTER_OPTYPE_DEFINE(STREAMMERGE, "StreamMerge"); +REGISTER_OPTYPE_DEFINE(ENDGRAPH, "EndGraph"); +REGISTER_OPTYPE_DEFINE(SEND, "Send"); +REGISTER_OPTYPE_DEFINE(RECV, "Recv"); + +REGISTER_OPTYPE_DEFINE(ATOMICADDRCLEAN, "AtomicAddrClean"); + +REGISTER_OPTYPE_DEFINE(ABS_GRAD, "AbsGrad"); +REGISTER_OPTYPE_DEFINE(ACCUMULATE_N_V2, "AccumulateNV2"); +REGISTER_OPTYPE_DEFINE(ACOS_GRAD, "AcosGrad"); +REGISTER_OPTYPE_DEFINE(ACOSH_GRAD, "AcoshGrad"); +REGISTER_OPTYPE_DEFINE(ANY, "Any"); +REGISTER_OPTYPE_DEFINE(APPROXIMATE_EQUAL, "ApproximateEqual"); +REGISTER_OPTYPE_DEFINE(ASIN_GRAD, "AsinGrad"); +REGISTER_OPTYPE_DEFINE(ASINH_GRAD, "AsinhGrad"); +REGISTER_OPTYPE_DEFINE(ATAN_GRAD, "AtanGrad"); +REGISTER_OPTYPE_DEFINE(BROADCAST_TO, "BroadcastTo"); +REGISTER_OPTYPE_DEFINE(ELU_GRAD, "EluGrad"); +REGISTER_OPTYPE_DEFINE(ADD_V2, "AddV2"); +REGISTER_OPTYPE_DEFINE(DATAFORMATDIMMAP, "DataFormatDimMap"); +REGISTER_OPTYPE_DEFINE(DATAFORMATVECPERMUTE, "DataFormatVecPermute"); +REGISTER_OPTYPE_DEFINE(BESSELI0E, "BesselI0e"); +REGISTER_OPTYPE_DEFINE(BESSELI1E, "BesselI1e"); +REGISTER_OPTYPE_DEFINE(APPLYADADELTA, "ApplyAdadelta"); +REGISTER_OPTYPE_DEFINE(APPLYADAGRAD, "ApplyAdagrad"); +REGISTER_OPTYPE_DEFINE(APPLYADAGRADDA, "ApplyAdagradDA"); +REGISTER_OPTYPE_DEFINE(APPLYADAM, "ApplyAdam"); +REGISTER_OPTYPE_DEFINE(APPLYADAMAX, "ApplyAdaMax"); +REGISTER_OPTYPE_DEFINE(APPLYADDSIGN, "ApplyAddSign"); +REGISTER_OPTYPE_DEFINE(APPLYCENTEREDRMSPROP, "ApplyCenteredRMSProp"); +REGISTER_OPTYPE_DEFINE(APPLYFTRL, "ApplyFtrl"); +REGISTER_OPTYPE_DEFINE(APPLYFTRLV2, "ApplyFtrlV2"); +REGISTER_OPTYPE_DEFINE(APPLYGRADIENTDESCENT, "ApplyGradientDescent"); +REGISTER_OPTYPE_DEFINE(APPLYPOWERSIGN, "ApplyPowerSign"); +REGISTER_OPTYPE_DEFINE(APPLYPROXIMALADAGRAD, "ApplyProximalAdagrad"); +REGISTER_OPTYPE_DEFINE(APPLYPROXIMALGRADIENTDESCENT, "ApplyProximalGradientDescent"); +REGISTER_OPTYPE_DEFINE(DEQUANTIZE, "Dequantize"); + +REGISTER_OPTYPE_DEFINE(FOCAL_LOSS, "FocalLoss"); +REGISTER_OPTYPE_DEFINE(FOCAL_LOSS_GRAD, "FocalLossGrad"); +REGISTER_OPTYPE_DEFINE(SMOOTHL1_LOSS, "SmoothL1Loss"); +REGISTER_OPTYPE_DEFINE(SMOOTHL1_LOSS_grad, "SmoothL1LossGrad"); +REGISTER_OPTYPE_DEFINE(REDUCEMEAN, "ReduceMean"); +REGISTER_OPTYPE_DEFINE(CONCAT_V2, "ConcatV2"); +REGISTER_OPTYPE_DEFINE(ONEHOT_V2, "OneHotV2"); +REGISTER_OPTYPE_DEFINE(SLICE_V2, "SliceV2"); +REGISTER_OPTYPE_DEFINE(TILE_V2, "TileV2"); +REGISTER_OPTYPE_DEFINE(SUM_V2, "SumV2"); +// Common type when the operator has the same name +REGISTER_OPTYPE_DEFINE(DETECTIONOUTPUT, "DetectionOutput"); +// Custom operator +REGISTER_OPTYPE_DEFINE(CUSTOMOP, "CustomOp"); +REGISTER_OPTYPE_DEFINE(CUSTOMOP_NCHW, "CustomOpNchw"); +REGISTER_OPTYPE_DEFINE(CUSTOMOP_NHWC, "CustomOpNhwc"); +REGISTER_OPTYPE_DEFINE(CUSTOMOP_NC1HWC0, "CustomOpNc1hwc0"); + +// Depthwise 4d_2_6d,6d_2_4d +REGISTER_OPTYPE_DEFINE(DEPTHWISEWEIGHT4D26D, "depthwise_weight_4d_2_6d"); +REGISTER_OPTYPE_DEFINE(DEPTHWISEWEIGHT6D24D, "depthwise_weight_6d_2_4d"); + +REGISTER_OPTYPE_DEFINE(SQRTGRAD, "SqrtGrad"); +REGISTER_OPTYPE_DEFINE(SIGMOIDGRAD, "SigmoidGrad"); + +const std::string MODEL_ATTR_TASKS = "tasks"; +const std::string MODEL_ATTR_TASK_GEN_BASE_ADDR = "task_gen_base_addr"; +const std::string MODEL_ATTR_TASK_GEN_WEIGHT_ADDR = "task_gen_weight_addr"; +const std::string MODEL_ATTR_FUSION_MODEL_DEF = "fm"; + +const int MODEL_MAX_SIZE = INT32_MAX; // Max size of 2 GB minus 1 byte. +const uint64_t FILE_HEADER_MAX_SIZE = 3221225472; // Max size of 3 GB. + +#if !defined(__ANDROID__) && !defined(ANDROID) +const uint64_t ALLOC_MEMORY_MAX_SIZE = 8589934592; // Max size of 8 GB. +#else +const uint64_t ALLOC_MEMORY_MAX_SIZE = 536870912; // Max size of 512M. +#endif + +// Magic number of model file +const uint32_t MODEL_FILE_MAGIC_NUM = 0x444F4D49; // magic number + +// Model head length +const uint32_t MODEL_FILE_HEAD_LEN = 256; + +// Input node type +const std::string INPUT_TYPE = "Input"; + +// AIPP label, label AIPP conv operator +const std::string AIPP_CONV_FLAG = "Aipp_Conv_Flag"; + +// AIPP label, label aipp data operator +const std::string AIPP_DATA_FLAG = "Aipp_Data_Flag"; + +// Record the w dimension of model input corresponding to dynamic AIPP +const std::string AIPP_RELATED_DATA_DIM_W = "aipp_related_data_dim_w"; + +// Record the H dimension of model input corresponding to dynamic AIPP +const std::string AIPP_RELATED_DATA_DIM_H = "aipp_related_data_dim_h"; + +// The tag of the data operator. Mark this input to the dynamic AIPP operator +const std::string INPUT_TO_DYNAMIC_AIPP = "input_to_dynamic_aipp"; + +// DATA node type +const std::string DATA_TYPE = "Data"; + +// DATA node type +const std::string AIPP_DATA_TYPE = "AippData"; + +// Frame operator type +const std::string FRAMEWORK_OP_TYPE = "FrameworkOp"; + +// Data node type +const std::string ANN_DATA_TYPE = "AnnData"; +const std::string ANN_NETOUTPUT_TYPE = "AnnNetOutput"; +const std::string ANN_DEPTHCONV_TYPE = "AnnDepthConv"; +const std::string ANN_CONV_TYPE = "AnnConvolution"; +const std::string ANN_FC_TYPE = "AnnFullConnection"; +// Convolution node type +const std::string NODE_NAME_NET_OUTPUT = "Node_Output"; + +const std::string NODE_NAME_END_GRAPH = "Node_EndGraph"; + +// Convolution node type +const std::string OP_TYPE_CONVOLUTION = "Convolution"; +// Add convolution node name to AIPP +const std::string AIPP_CONV_OP_NAME = "aipp_conv_op"; +// Operator configuration item separator +const std::string OP_CONF_DELIMITER = ":"; + +// attr value name +const std::string ATTR_NAME_VALUE1 = "value1"; + +// attr value name, 6d_2_4d C +const std::string ATTR_NAME_INPUT_CVALUE = "input_cvalue"; + +// alpha default value +const float ALPHA_DEFAULT_VALUE = 1.0; + +// beta default value +const float BETA_DEFAULT_VALUE = 0.0; + +// coef default value +const float COEF_DEFAULT_VALUE = 0.0; + +// Relu6 coef value +const float RELU6_COEF = 6.0; + +// stride default value +const uint32_t STRIDE_DEFAULT_VALUE = 1; + +// pad default value +const uint32_t PAD_DEFAULT_VALUE = 0; + +// dilation default value +const int DILATION_DEFAULT_VALUE = 1; + +// kernel default value +const uint32_t KERNEL_DEFAULT_VALUE = 0; + +// defaule convolution group size +const uint32_t DEFAULT_CONV_GROUP = 1; + +// Default deconvolution adj +const uint32_t DEFAULT_DECONV_ADJ = 0; + +// Represents value 1 +const uint32_t NUM_ONE = 1; + +// spatial dim size default value +const int32_t SPATIAL_DIM_DEFAULT_SIZE = 2; + +// dim extended default value +const int32_t DIM_DEFAULT_VALUE = 1; + +// The first weight list in opdef is filter +const int32_t WEIGHT_FILTER_INDEX = 0; + +// The second weight list in opdef is bias +const int32_t WEIGHT_BIAS_INDEX = 1; + +const int32_t TENSOR_ND_SUPPORT_SIZE = 8; + +// NCHW index default value +const uint32_t NCHW_DIM_N = 0; +const uint32_t NCHW_DIM_C = 1; +const uint32_t NCHW_DIM_H = 2; +const uint32_t NCHW_DIM_W = 3; + +// KCHW index default value +const uint32_t KCHW_DIM_K = 0; +const uint32_t KCHW_DIM_C = 1; +const uint32_t KCHW_DIM_H = 2; +const uint32_t KCHW_DIM_W = 3; + +// HWCK index default value +const uint32_t HWCK_DIM_H = 0; +const uint32_t HWCK_DIM_W = 1; +const uint32_t HWCK_DIM_C = 2; +const uint32_t HWCK_DIM_K = 3; + +// NHWC index default value +const uint32_t NHWC_DIM_N = 0; +const uint32_t NHWC_DIM_H = 1; +const uint32_t NHWC_DIM_W = 2; +const uint32_t NHWC_DIM_C = 3; + +// CHWN index default value +const uint32_t CHWN_DIM_N = 3; +const uint32_t CHWN_DIM_C = 0; +const uint32_t CHWN_DIM_H = 1; +const uint32_t CHWN_DIM_W = 2; + +// CHW index default value +const uint32_t CHW_DIM_C = 0; +const uint32_t CHW_DIM_H = 1; +const uint32_t CHW_DIM_W = 2; + +// HWC index default value +const uint32_t HWC_DIM_H = 0; +const uint32_t HWC_DIM_W = 1; +const uint32_t HWC_DIM_C = 2; +// Pad index default value +const uint32_t PAD_H_HEAD = 0; +const uint32_t PAD_H_TAIL = 1; +const uint32_t PAD_W_HEAD = 2; +const uint32_t PAD_W_TAIL = 3; + +// window index default value +const uint32_t WINDOW_H = 0; +const uint32_t WINDOW_W = 1; + +// stride index default value +const uint32_t STRIDE_H = 0; +const uint32_t STRIDE_W = 1; + +// dilation index default value +const uint32_t DILATION_H = 0; +const uint32_t DILATION_W = 1; + +// the num of XRBG channel +const uint32_t XRGB_CHN_NUM = 4; + +// global pooling default value +const bool DEFAULT_GLOBAL_POOLING = false; + +const uint32_t MODEL_VERSION = 0x10000000; /**< Model version 1.0 */ + +// Eltwise's input size +const int ELTWISE_MIN_INPUT_SIZE = 2; + +// flowctrl +const std::string NODE_NAME_STREAM_SWITCH = "IteratorCtrl_StreamSwitch"; +const std::string NODE_NAME_STREAM_ACTIVE = "IteratorCtrl_StreamActive"; +const std::string NODE_NAME_FLOWCTRL_LOOP_PER_ITER = "npu_runconfig/iterations_per_loop"; +const std::string NODE_NAME_FLOWCTRL_LOOP_COND = "npu_runconfig/loop_cond"; +const std::string NODE_NAME_FLOWCTRL_LOOP_INCREMENT = "npu_runconfig/one"; +const std::string NODE_NAME_FLOWCTRL_LOOP_RESETVALUE = "npu_runconfig/zero"; +const std::string NODE_NAME_FLOWCTRL_LOOP_ASSIGNADD = "FlowCtrl_LoopCond_ASSIGNADD"; +const std::string NODE_NAME_FLOWCTRL_LOOP_ASSIGN = "FlowCtrl_LoopCond_ASSIGN"; +const std::string NODE_NAME_ATOMIC_ADDR_CLEAN = "atomic_addr_clean"; +const uint32_t TRUE_STREAM_ID = 0; +const uint32_t STREAM_SWITCH_INPUT_NUM = 2; + +const std::string NODE_NAME_GLOBAL_STEP = "global_step"; +const std::string NODE_NAME_GLOBAL_STEP_ASSIGNADD = "global_step_assignadd"; +}; // namespace ge diff --git a/src/ge/common/util.cc b/src/ge/common/util.cc new file mode 100644 index 00000000..b5a730bc --- /dev/null +++ b/src/ge/common/util.cc @@ -0,0 +1,441 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "framework/common/util.h" + +#include +#include + +#include +#include +#include +#include +#include +#include +#include + +#include "google/protobuf/io/coded_stream.h" +#include "google/protobuf/io/zero_copy_stream_impl.h" +#include "framework/common/fmk_types.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "external/ge/ge_api_error_codes.h" +#include "mmpa/mmpa_api.h" + +using google::protobuf::io::CodedInputStream; +using google::protobuf::io::FileInputStream; +using google::protobuf::io::ZeroCopyInputStream; + +namespace { +/* + * kProtoReadBytesLimit and kWarningThreshold are real arguments of CodedInputStream::SetTotalBytesLimit. + * In order to prevent integer overflow and excessive memory allocation during protobuf processing, + * it is necessary to limit the length of proto message (call SetTotalBytesLimit function). + * In theory, the minimum message length that causes an integer overflow is 512MB, and the default is 64MB. + * If the limit of warning_threshold is exceeded, the exception information will be printed in stderr. + * If such an exception is encountered during operation, + * the proto file can be divided into several small files or the limit value can be increased. + */ +const int kProtoReadBytesLimit = INT_MAX; // Max size of 2 GB minus 1 byte. +const int kWarningThreshold = 536870912 * 2; // 536870912 represent 512M + +/// The maximum length of the file. +/// Based on the security coding specification and the current actual (protobuf) model size, it is determined as 2G-1 +const int kMaxFileSizeLimit = INT_MAX; +} // namespace + +namespace ge { +static bool ReadProtoFromCodedInputStream(CodedInputStream &coded_stream, Message *proto) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(proto == nullptr, return false, "incorrect parameter. nullptr == proto"); + + coded_stream.SetTotalBytesLimit(kProtoReadBytesLimit, kWarningThreshold); + return proto->ParseFromCodedStream(&coded_stream); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool ReadProtoFromBinaryFile(const char *file, Message *proto) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((file == nullptr || proto == nullptr), return false, + "incorrect parameter. nullptr == file || nullptr == proto"); + + std::string real_path = RealPath(file); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), return false, "pb file path '%s' not valid", file); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(GetFileLength(real_path) == -1, return false, "file size not valid."); + + std::ifstream fs(real_path, std::ifstream::in | std::ifstream::binary); + if (!fs.is_open()) { + GELOGE(ge::FAILED, "Open %s failed.", file); + return false; + } + + google::protobuf::io::IstreamInputStream istream(&fs); + google::protobuf::io::CodedInputStream coded_stream(&istream); + + bool ret = ReadProtoFromCodedInputStream(coded_stream, proto); + + fs.close(); + + if (!ret) { + GELOGE(ge::FAILED, "Parse %s failed.", file); + return ret; + } + + return ret; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool ReadProtoFromArray(const void *data, int size, Message *proto) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((proto == nullptr|| data == nullptr || size == 0), return false, + "incorrect parameter. proto is nullptr || data is nullptr || size is 0"); + + google::protobuf::io::CodedInputStream coded_stream(reinterpret_cast(const_cast(data)), size); + return ReadProtoFromCodedInputStream(coded_stream, proto); +} + +// Get file length +long GetFileLength(const std::string &input_file) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(input_file.empty(), return -1, "input_file path is null."); + + std::string real_path = RealPath(input_file.c_str()); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), return -1, "input_file path '%s' not valid", input_file.c_str()); + unsigned long long file_length = 0; + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(mmGetFileSize(input_file.c_str(), &file_length) != EN_OK, return -1, + "open file failed."); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((file_length == 0), return -1, "file length == 0, not valid."); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(file_length > kMaxFileSizeLimit, return -1, "file size %lld is out of limit: %d.", + file_length, kMaxFileSizeLimit); + return static_cast(file_length); +} + +/** @ingroup domi_common + * @brief Read all data from binary file + * @param [in] file_name File path + * @param [out] buffer The address of the output memory, which needs to be released by the caller + * @param [out] length Output memory size + * @return false fail + * @return true success + */ +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool ReadBytesFromBinaryFile(const char *file_name, char **buffer, + int &length) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((file_name == nullptr), return false, "incorrect parameter. file is nullptr"); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((buffer == nullptr), return false, "incorrect parameter. buffer is nullptr"); + + std::string real_path = RealPath(file_name); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), return false, "file path '%s' not valid", file_name); + + std::ifstream file(real_path.c_str(), std::ios::binary | std::ios::ate); + if (!file.is_open()) { + GELOGE(ge::FAILED, "Read file %s failed.", file_name); + return false; + } + + length = static_cast(file.tellg()); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((length <= 0), file.close(); return false, "file length <= 0"); + + file.seekg(0, std::ios::beg); + + *buffer = new (std::nothrow) char[length](); + GE_CHK_BOOL_TRUE_EXEC_RET_STATUS(*buffer == nullptr, false, file.close(), "new an object failed."); + + file.read(*buffer, length); + file.close(); + return true; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool ReadBytesFromBinaryFile(const char *file_name, + std::vector &buffer) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((file_name == nullptr), return false, "incorrect parameter. file path is null"); + + std::string real_path = RealPath(file_name); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), return false, "file path '%s' not valid", file_name); + + std::ifstream file(real_path.c_str(), std::ios::binary | std::ios::ate); + if (!file.is_open()) { + GELOGE(ge::FAILED, "Read file %s failed.", file_name); + return false; + } + + std::streamsize size = file.tellg(); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((size <= 0), file.close(); return false, "file length <= 0, not valid."); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(size > kMaxFileSizeLimit, file.close(); + return false, "file size %ld is out of limit: %d.", size, kMaxFileSizeLimit); + + file.seekg(0, std::ios::beg); + + buffer.resize(static_cast(size)); + file.read(&buffer[0], size); + file.close(); + GELOGI("Read size:%ld", size); + return true; +} + +/** + * @ingroup domi_common + * @brief Create directory, support to create multi-level directory + * @param [in] directory_path Path, can be multi-level directory + * @return -1 fail + * @return 0 success + */ +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY int CreateDirectory(const std::string &directory_path) { + GE_CHK_BOOL_EXEC(!directory_path.empty(), return -1, "directory path is empty."); + auto dir_path_len = directory_path.length(); + if (dir_path_len >= PATH_MAX) { + GELOGE(ge::FAILED, "Directory path is too long."); + return -1; + } + char tmp_dir_path[PATH_MAX] = {0}; + for (size_t i = 0; i < dir_path_len; i++) { + tmp_dir_path[i] = directory_path[i]; + if ((tmp_dir_path[i] == '\\') || (tmp_dir_path[i] == '/')) { + if (access(tmp_dir_path, F_OK) != 0) { + int32_t ret = mmMkdir(tmp_dir_path, S_IRUSR | S_IWUSR | S_IXUSR); // 700 + if (ret != 0) { + if (errno != EEXIST) { + GELOGE(ge::FAILED, "Cannot create directory %s. Make sure that the directory exists and writable.", + directory_path.c_str()); + return ret; + } + } + } + } + } + int32_t ret = mmMkdir(const_cast(directory_path.c_str()), S_IRUSR | S_IWUSR | S_IXUSR); // 700 + if (ret != 0) { + if (errno != EEXIST) { + GELOGE(ge::FAILED, "Cannot create directory %s. Make sure that the directory exists and writable.", + directory_path.c_str()); + return ret; + } + } + return 0; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY std::string CurrentTimeInStr() { + std::time_t now = std::time(nullptr); + std::tm *ptm = std::localtime(&now); + if (ptm == nullptr) { + GELOGE(ge::FAILED, "Localtime failed."); + return ""; + } + + const int kTimeBufferLen = 32; + char buffer[kTimeBufferLen + 1] = {0}; + // format: 20171122042550 + std::strftime(buffer, kTimeBufferLen, "%Y%m%d%H%M%S", ptm); + return std::string(buffer); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool ReadProtoFromText(const char *file, + google::protobuf::Message *message) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((file == nullptr || message == nullptr), return false, + "incorrect parameter. nullptr == file || nullptr == message"); + + std::string real_path = RealPath(file); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), return false, "proto file path '%s' not valid", file); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(GetFileLength(real_path) == -1, return false, "file size not valid."); + + std::ifstream fs(real_path.c_str(), std::ifstream::in); + + if (!fs.is_open()) { + GELOGE(ge::FAILED, "Fail to open proto file '%s'.", file); + return false; + } + + google::protobuf::io::IstreamInputStream input(&fs); + bool ret = google::protobuf::TextFormat::Parse(&input, message); + GE_IF_BOOL_EXEC( + !ret, GELOGE(ret, "Call [google::protobuf::TextFormat::Parse] func ret fail, please check your text file.")); + fs.close(); + + return ret; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool ReadProtoFromMem(const char *data, int size, + google::protobuf::Message *message) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((data == nullptr || message == nullptr), return false, + "incorrect parameter. data is nullptr || message is nullptr"); + std::string str(data, static_cast(size)); + std::istringstream fs(str); + + google::protobuf::io::IstreamInputStream input(&fs); + bool ret = google::protobuf::TextFormat::Parse(&input, message); + GE_IF_BOOL_EXEC( + !ret, GELOGE(ret, "Call [google::protobuf::TextFormat::Parse] func ret fail, please check your text file.")); + + return ret; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY uint64_t GetCurrentTimestap() { + struct timeval tv {}; + int ret = gettimeofday(&tv, nullptr); + GE_LOGE_IF(ret != 0, "Func gettimeofday may failed: ret=%d", ret); + auto total_use_time = tv.tv_usec + tv.tv_sec * 1000000; // 1000000: seconds to microseconds + return static_cast(total_use_time); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool CheckInt64MulOverflow(int64_t a, int64_t b) { + if (a > 0) { + if (b > 0) { + if (a > (INT64_MAX / b)) { + return false; + } + } else { + if (b < (INT64_MIN / a)) { + return false; + } + } + } else { + if (b > 0) { + if (a < (INT64_MIN / b)) { + return false; + } + } else { + if ((a != 0) && (b < (INT64_MAX / a))) { + return false; + } + } + } + return true; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY std::string RealPath(const char *path) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(path == nullptr, return "", "path pointer is NULL."); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(strlen(path) >= PATH_MAX, return "", "path is invalid"); + // PATH_MAX is the system's own macro, indicating the maximum file path length supported + std::shared_ptr resolved_path(new (std::nothrow) char[PATH_MAX](), std::default_delete()); + if (resolved_path == nullptr) { + GELOGW("new an PATH_MAX string object failed."); + return ""; + } + + std::string res; + + // Nullptr is returned when the path does not exist or there is no permission + // Return absolute path when path is accessible + if (realpath(path, resolved_path.get()) != nullptr) { + res = resolved_path.get(); + } + + return res; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY bool CheckInputPathValid(const std::string &file_path) { + // The specified path is empty + if (file_path.empty()) { + GELOGE(ge::FAILED, "Path is empty."); + return false; + } + + // A regular matching expression to verify the validity of the input file path + // ^(/|./|(../)+|)([.]?[A-Za-z0-9_-]+/)*[A-Za-z0-9_+.-]+$ + // Path section:Support upper and lower case letters, numbers and underscores + // File name section:Support upper and lower case letters, numbers, underscores and dots(.) + std::string mode = "^(/+|./+|(../+)+|)(../|([.]?[A-Za-z0-9_-]+)/+)*[A-Za-z0-9_+.-]+$"; + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(!ValidateStr(file_path, mode), return false, + "input path [%s] is with illegal character. path can only be composed of upper and " + "lower case letters, numbers, minus sign(-) and underscores; filename can only be " + "composed of upper and lower case letters, numbers, underscores, dot(.), plus " + "sign(+) and minus sign(-).", + file_path.c_str()); + + std::string real_path = RealPath(file_path.c_str()); + // Unable to get absolute path (does not exist or does not have permission to access) + if (real_path.empty()) { + GELOGE(ge::FAILED, "Can not get real path for %s.", file_path.c_str()); + return false; + } + + // The absolute path points to a file that is not readable + if (access(real_path.c_str(), R_OK) != 0) { + GELOGE(ge::FAILED, "Can not read file in %s.", file_path.c_str()); + return false; + } + + return true; +} + +FMK_FUNC_HOST_VISIBILITY bool CheckOutputPathValid(const std::string &file_path) { + // The specified path is empty + if (file_path.empty()) { + GELOGE(ge::FAILED, "Path is empty."); + return false; + } + + // A regular matching expression to verify the validity of the input file path + // ^(/|./|(../)+|)([.]?[A-Za-z0-9_-]+/)*[A-Za-z0-9_+.-]+$ + // Path section:Support upper and lower case letters, numbers and underscores + // File name section:Support upper and lower case letters, numbers, underscores and dots(.) + std::string mode = "^(/+|./+|(../+)+|)(../|([.]?[A-Za-z0-9_-]+)/+)*[A-Za-z0-9_+.-]+$"; + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(!ValidateStr(file_path, mode), return false, + "input path [%s] is with illegal character. path can only be composed of upper and " + "lower case letters, numbers, minus sign(-) and underscores; filename can only be " + "composed of upper and lower case letters, numbers, underscores, dot(.), plus " + "sign(+) and minus sign(-).", + file_path.c_str()); + + std::string real_path = RealPath(file_path.c_str()); + // Can get absolute path (file exists) + if (!real_path.empty()) { + // File is not readable or writable + if (access(real_path.c_str(), R_OK | W_OK | F_OK) != 0) { + GELOGE(ge::FAILED, "Path[ %s ] exists, but can not be write.", file_path.c_str()); + return false; + } + } else { + // Find the last separator + int path_split_pos = static_cast(file_path.size() - 1); + for (; path_split_pos >= 0; path_split_pos--) { + if (file_path[path_split_pos] == '\\' || file_path[path_split_pos] == '/') { + break; + } + } + if (path_split_pos == 0) { + return true; + } + if (path_split_pos != -1) { + std::string prefix_path = std::string(file_path).substr(0, static_cast(path_split_pos)); + // Determine whether the specified path is valid by creating the path + if (CreateDirectory(prefix_path) != 0) { + GELOGE(ge::FAILED, "Can not create prefix path for path[ %s ].", file_path.c_str()); + return false; + } + } + } + + return true; +} + +FMK_FUNC_HOST_VISIBILITY bool ValidateStr(const std::string &str, const std::string &mode) { +#ifndef OS_CENTOS + std::regex reg(mode); + + // Matching string part + std::smatch match; + + bool res = regex_match(str, match, reg); + res = regex_search(str, std::regex("[`!@#$%^&*()|{}':;',\\[\\]<>?]")); + return !(res) && (str.size() == match.str().size()); +#else + return true; +#endif +} +} // namespace ge diff --git a/src/ge/engine_manager/dnnengine_manager.cc b/src/ge/engine_manager/dnnengine_manager.cc new file mode 100644 index 00000000..7c08e4d3 --- /dev/null +++ b/src/ge/engine_manager/dnnengine_manager.cc @@ -0,0 +1,418 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "engine_manager/dnnengine_manager.h" + +#include +#include +#include +#include +#include + +#include "common/debug/log.h" +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/ge_context.h" +#include "init/gelib.h" + +namespace { +const char *const kSchedulerUnits = "schedule_units"; +const char *const kId = "id"; +const char *const kName = "name"; +const char *const kExAttrs = "ex_attrs"; +const char *const kIndependent = "independent"; +const char *const kSkipAssignStream = "skip_assign_stream"; +const char *const kCalEngines = "cal_engines"; +const char *const kAttch = "attach"; +const char *const kVectorEngine = "VectorEngine"; +const char *const kAIcoreEngine = "AIcoreEngine"; +const char *const kCustomOpFlag = "_custom_op_flag"; +} // namespace + +namespace ge { +DNNEngineManager::DNNEngineManager() : init_flag_(false) {} +DNNEngineManager::~DNNEngineManager() { + engines_attrs_map_.clear(); + schedulers_.clear(); +} + +Status DNNEngineManager::Initialize(const std::map &options) { + // Multiple initializations are not supported + if (init_flag_) { + GELOGW("DNNEngineManager has been initialized."); + return SUCCESS; + } + + // Load engine so + std::string so_path = "plugin/nnengine/"; + std::string path = PluginManager::GetPath(); + path.append(so_path); + std::string so_api_func = "GetDNNEngineObjs"; + std::vector so_func{so_api_func}; + Status status = plugin_mgr_.Load(path, so_func); + if (status != SUCCESS) { + GELOGE(status, "Load engine's so failed. LibPath is %s", path.c_str()); + return status; + } + + status = plugin_mgr_.InvokeAll &>(so_api_func, engines_map_); + if (status != SUCCESS) { + GELOGE(status, "Get DNNEngineObjs failed."); + return status; + } + + GELOGI("The number of DNNEngineObjs are %zu.", engines_map_.size()); + + // Engines initialize + for (auto iter = engines_map_.begin(); iter != engines_map_.end(); ++iter) { + if (iter->second == nullptr) { + GELOGI("Engine: %s point to nullptr", (iter->first).c_str()); + continue; + } + + GELOGI("DNNEngine name: %s.", (iter->first).c_str()); + + status = iter->second->Initialize(options); + if (status != SUCCESS) { + GELOGE(status, "Engine: %s initialize failed.", (iter->first).c_str()); + return status; + } + + // Check engines' attribute + DNNEngineAttribute attrs; + iter->second->GetAttributes(attrs); + if (attrs.runtime_type == RuntimeType::DEVICE) { + if ((attrs.mem_type.size()) != 1 || (attrs.mem_type[0] != GE_ENGINE_ATTR_MEM_TYPE_HBM)) { + GELOGE(GE_ENG_MEMTYPE_ERROR, "Engine: %s in aicore, but the memory type is not HBM", (iter->first).c_str()); + return GE_ENG_MEMTYPE_ERROR; + } + } + } + + status = ParserJsonFile(); + if (status != SUCCESS) { + GELOGE(status, "parse json file failed"); + return status; + } + + status = CheckJsonFile(); + if (status != SUCCESS) { + GELOGE(status, "check json file failed"); + return status; + } + + init_flag_ = true; + + return SUCCESS; +} + +Status DNNEngineManager::Finalize() { + // Finalize is not allowed, initialize first is necessary + if (!init_flag_) { + GELOGW("DNNEngineManager has been finalized."); + return SUCCESS; + } + + for (auto iter = engines_map_.begin(); iter != engines_map_.end(); ++iter) { + if (iter->second != nullptr) { + GELOGI("DNNEngine name: %s.", (iter->first).c_str()); + Status status = iter->second->Finalize(); + if (status != SUCCESS) { + GELOGE(status, "Engine finalize failed."); + return status; + } + } + } + init_flag_ = false; + engines_map_.clear(); + return SUCCESS; +} + +std::shared_ptr DNNEngineManager::GetEngine(const std::string &name) const { + auto iter = engines_map_.find(name); + if (iter != engines_map_.end()) { + return iter->second; + } + + GELOGW("Failed to get engine object by engine name. %s.", name.c_str()); + return nullptr; +} + +bool DNNEngineManager::IsEngineRegistered(const std::string &name) const { + auto iter = engines_map_.find(name); + if (iter != engines_map_.end()) { + return true; + } + GELOGW("Engine: %s is not Registered", name.c_str()); + return false; +} + +std::string DNNEngineManager::GetDNNEngineName(const OpDescPtr &op_desc) const { + if (op_desc == nullptr) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "DNNEngineManager: op_desc is nullptr"); + return ""; + } + // Use the OpsKernelManager in GELib to get the opInfos for this opCode + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if ((instance_ptr == nullptr) || (!instance_ptr->InitFlag())) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "GetDNNEngineName failed."); + return ""; + } + OpsKernelManager &ops_kernel_manager = instance_ptr->OpsKernelManagerObj(); + std::vector op_infos = ops_kernel_manager.GetOpsKernelInfo(op_desc->GetType()); + if (op_infos.empty()) { + GELOGI("DNNEngineManager: Can not get op info by op type %s", op_desc->GetType().c_str()); + return ""; + } + string ge_core_type; + Status ret = ge::GetContext().GetOption(ge::CORE_TYPE, ge_core_type); + if (ret != SUCCESS) { + GELOGD("get the option CORE_TYPE fail, set it to default value VECTOR_ENGINE"); + } + string exclude_core_Type = (ge_core_type == kVectorEngine) ? kAIcoreEngine : kVectorEngine; + GELOGD("engine type will exclude: %s", exclude_core_Type.c_str()); + std::map unsupported_reasons; + for (const auto &it : op_infos) { + if (it.engine == exclude_core_Type) { + continue; + } + auto &kernel_map = ops_kernel_manager.GetAllOpsKernelInfoStores(); + auto &kernel_name = it.opKernelLib; + auto kernel_info_store = kernel_map.find(kernel_name); + if (kernel_info_store != kernel_map.end()) { + std::string unsupported_reason; + // It will be replaced by engine' checksupport + if (kernel_info_store->second->CheckSupported(op_desc, unsupported_reason)) { + op_desc->SetOpEngineName(it.engine); + op_desc->SetOpKernelLibName(kernel_name); + GELOGD("DNNEngineManager:Set OpKernelLibName %s and engine name %s into op_desc %s", kernel_name.c_str(), + it.engine.c_str(), op_desc->GetName().c_str()); + return it.engine; + } else { + bool is_custom_op = false; + if ((ge::AttrUtils::GetBool(op_desc, kCustomOpFlag, is_custom_op)) && is_custom_op) { + GELOGE(FAILED, + "The custom operator registered by the user does not support the logic function delivered by this " + "network. Check support failed, kernel_name is %s, op type is %s, op name is %s", + kernel_name.c_str(), op_desc->GetType().c_str(), op_desc->GetName().c_str()); + return ""; + } + unsupported_reasons.emplace(kernel_name, unsupported_reason); + GELOGI("DNNEngineManager:Check support failed, kernel_name is %s, op type is %s, op name is %s", + kernel_name.c_str(), op_desc->GetType().c_str(), op_desc->GetName().c_str()); + } + } else { + GELOGW( + "DNNEngineManager:Can not find any supported ops kernel info store by kernel_name %s," + "op type is %s, op name is %s", + kernel_name.c_str(), op_desc->GetType().c_str(), op_desc->GetName().c_str()); + } + } + for (const auto &it : unsupported_reasons) { + GELOGE(GE_GRAPH_ASSIGN_ENGINE_FAILED, "GetDNNEngineName:Op type %s of ops kernel %s is unsupported, reason:%s", + op_desc->GetType().c_str(), it.first.c_str(), it.second.c_str()); + } + GELOGE(GE_GRAPH_ASSIGN_ENGINE_FAILED, "Can't find any supported ops kernel and engine of %s, type is %s", + op_desc->GetName().c_str(), op_desc->GetType().c_str()); + return ""; +} + +const std::map &DNNEngineManager::GetSchedulers() const { return schedulers_; } + +Status DNNEngineManager::ParserJsonFile() { + GELOGI("Begin to parser json file"); + std::string json_file_path = "plugin/nnengine/ge_config/engine_conf.json"; + std::string path = PluginManager::GetPath(); + path.append(json_file_path); + nlohmann::json scheduler_json_file; + Status status = ReadJsonFile(path, &scheduler_json_file); + if (status != SUCCESS) { + GELOGE(FAILED, "Read scheduler json file failed and the file path is %s", path.c_str()); + return FAILED; + } + if (scheduler_json_file.is_null()) { + // when engine_conf.json is not exist, just return success + GELOGW("Json file is null"); + return SUCCESS; + } + + try { + nlohmann::json scheduler_utils_json = scheduler_json_file[kSchedulerUnits]; + if (scheduler_utils_json.is_null()) { + GELOGE(FAILED, "The message of scheduler units is not found"); + return FAILED; + } + if (!scheduler_utils_json.is_array()) { + GELOGE(FAILED, "The message of kSchedulerUnits is not array and the file path is %s", json_file_path.c_str()); + return FAILED; + } + auto size = scheduler_json_file[kSchedulerUnits].size(); + for (size_t i = 0; i < size; i++) { + SchedulerConf scheduler_conf; + std::map engine_conf_map; + nlohmann::json engines_json_map = scheduler_utils_json[i][kCalEngines]; + if (engines_json_map.is_null()) { + GELOGE(FAILED, "The message of cal_engines is not found"); + return FAILED; + } + std::string scheduler_id_temp = scheduler_utils_json[i][kId]; + if (!scheduler_id_temp.empty()) { + scheduler_conf.id = scheduler_id_temp; + } else { + GELOGE(FAILED, "Scheduler ID is null"); + return FAILED; + } + status = ParserEngineMessage(engines_json_map, scheduler_id_temp, engine_conf_map); + if (status != SUCCESS) { + GELOGE(FAILED, "Parser engines messages failed"); + return FAILED; + } + scheduler_conf.name = scheduler_utils_json[i][kName]; + scheduler_conf.ex_attrs = scheduler_utils_json[i][kExAttrs]; + scheduler_conf.cal_engines = engine_conf_map; + auto it = schedulers_.find(scheduler_id_temp); + if (it != schedulers_.end()) { + GELOGE(FAILED, "There are the same scheduler ts %s in the json file", scheduler_id_temp.c_str()); + return FAILED; + } + schedulers_.emplace(scheduler_id_temp, scheduler_conf); + } + } catch (const nlohmann::detail::type_error &e) { + GELOGE(FAILED, "Parser json file failed"); + return FAILED; + } + + GELOGI("Parser json file SUCCESS"); + return SUCCESS; +} + +Status DNNEngineManager::ParserEngineMessage(const json engines_json, const std::string &scheduler_mark, + std::map &engines) { + GELOGI("Begin to parser engine massage"); + if (engines_json.is_null()) { + GELOGE(FAILED, "The message of cal_engines is null"); + return FAILED; + } + try { + if (engines_json.is_array()) { + for (size_t i = 0; i < engines_json.size(); i++) { + nlohmann::json engines_elems = engines_json[i]; + EngineConfPtr engine_conf_ptr = MakeShared(); + if (engine_conf_ptr == nullptr) { + return FAILED; + } + std::string engine_id = engines_elems[kId]; + if (!engine_id.empty()) { + engine_conf_ptr->id = engine_id; + } else { + GELOGE(FAILED, "engineID is null"); + return FAILED; + } + if (engines_elems.find(kName) != engines_elems.end()) { + engine_conf_ptr->name = engines_elems[kName]; + } else { + GELOGW("The engine %s name is null", engine_id.c_str()); + } + if (engines_elems.find(kIndependent) != engines_elems.end()) { + engine_conf_ptr->independent = engines_elems[kIndependent]; + } + + if (engines_elems.find(kAttch) != engines_elems.end()) { + engine_conf_ptr->attach = engines_elems[kAttch]; + } + + if (engines_elems.find(kSkipAssignStream) != engines_elems.end()) { + engine_conf_ptr->skip_assign_stream = engines_elems[kSkipAssignStream]; + } + engine_conf_ptr->scheduler_id = scheduler_mark; + auto it = engines.find(engine_id); + if (it != engines.end()) { + GELOGE(FAILED, "There are the same engine %s message in the json file", engine_id.c_str()); + return FAILED; + } + engines.emplace(engine_id, engine_conf_ptr); + } + } else { + GELOGE(FAILED, "The message of cal_engines is not array in the json file"); + return FAILED; + } + } catch (const json::exception &e) { + GELOGE(FAILED, "construct json content failed"); + return FAILED; + } + GELOGI("Parser engine massage success"); + return SUCCESS; +} + +Status DNNEngineManager::ReadJsonFile(const std::string &file_path, JsonHandle handle) { + GELOGI("Begin to read json file"); + if (file_path.empty()) { + GELOGE(FAILED, "Json path %s is not valid", file_path.c_str()); + return FAILED; + } + nlohmann::json *json_file = reinterpret_cast(handle); + if (json_file == nullptr) { + GELOGE(FAILED, "JsonFile is nullptr"); + return FAILED; + } + const char *file = file_path.data(); + if ((access(file, F_OK)) == -1) { + if (engines_map_.size() != 0) { + GELOGE(FAILED, "The json file %s is not exist", file_path.c_str()); + return FAILED; + } else { + GELOGW("The json file %s is not need", file_path.c_str()); + return SUCCESS; + } + } + + std::ifstream ifs(file_path); + if (!ifs.is_open()) { + GELOGE(FAILED, "Open json file %s failed", file_path.c_str()); + return FAILED; + } + + ifs >> *json_file; + ifs.close(); + GELOGI("Read json file success"); + return SUCCESS; +} + +Status DNNEngineManager::CheckJsonFile() { + GELOGI("Begin to check json file"); + for (auto &it : engines_map_) { + std::string engine_name = it.first; + int count = 0; + for (auto &iter : schedulers_) { + auto engine_map = iter.second.cal_engines; + auto iter_engine_name = engine_map.find(engine_name); + if (iter_engine_name != engine_map.end()) { + count++; + } + } + if (count == 0) { + GELOGE(FAILED, "The engine message %s is not found in the json file", engine_name.c_str()); + return FAILED; + } + if (count > 1) { + GELOGE(FAILED, "The same engine message %s is existed in the json file", engine_name.c_str()); + return FAILED; + } + } + GELOGI("Check json file success"); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/engine_manager/dnnengine_manager.h b/src/ge/engine_manager/dnnengine_manager.h new file mode 100644 index 00000000..f4b1b551 --- /dev/null +++ b/src/ge/engine_manager/dnnengine_manager.h @@ -0,0 +1,85 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_ENGINE_MANAGER_DNNENGINE_MANAGER_H_ +#define GE_ENGINE_MANAGER_DNNENGINE_MANAGER_H_ + +#include +#include +#include +#include + +#include "nlohmann/json.hpp" + +#include "common/ge/plugin_manager.h" +#include "common/ge_inner_error_codes.h" +#include "common/opskernel/ops_kernel_info_types.h" +#include "engine/dnnengine.h" +#include "graph/op_desc.h" + +using JsonHandle = void *; +namespace ge { +using nlohmann::json; + +// Engine configuration +struct EngineConf { + string id; // engine ID + string name; // engine name + bool independent{false}; // independent stream + bool attach{false}; // attach stream + bool skip_assign_stream{false}; // not assign stream + string scheduler_id; // scheduler ID +}; +using EngineConfPtr = std::shared_ptr; + +// Configuration information of schedule unit +struct SchedulerConf { + string id; // scheduler ID + string name; // scheduler name + string ex_attrs; // extra information + map cal_engines; // engine information +}; + +using DNNEnginePtr = std::shared_ptr; + +class DNNEngineManager { + public: + friend class GELib; + std::shared_ptr GetEngine(const std::string &name) const; + bool IsEngineRegistered(const std::string &name) const; + // If can't find appropriate engine name, return "", report error + string GetDNNEngineName(const OpDescPtr &op_desc) const; + const map &GetSchedulers() const; + + private: + DNNEngineManager(); + ~DNNEngineManager(); + Status Initialize(const std::map &options); + Status Finalize(); + Status ReadJsonFile(const std::string &file_path, JsonHandle handle); + Status ParserJsonFile(); + Status ParserEngineMessage(const json engines_json, const string &scheduler_mark, + map &engines); + Status CheckJsonFile(); + PluginManager plugin_mgr_; + std::map engines_map_; + std::map engines_attrs_map_; + std::map schedulers_; + bool init_flag_; +}; +} // namespace ge + +#endif // GE_ENGINE_MANAGER_DNNENGINE_MANAGER_H_ diff --git a/src/ge/engine_manager/engine_conf.json b/src/ge/engine_manager/engine_conf.json new file mode 100755 index 00000000..8c8990ee --- /dev/null +++ b/src/ge/engine_manager/engine_conf.json @@ -0,0 +1,54 @@ +{ + "schedule_units": [ + { + "id": "TS_1", + "name": "1980_hwts", + "ex_attrs": "", + "cal_engines": [ + { + "id": "DNN_VM_GE_LOCAL", + "name": "GE_LOCAL", + "independent": false, + "skip_assign_stream": true, + "attach": true + }, + { + "id": "AIcoreEngine", + "name": "AICORE", + "independent": false, + "skip_assign_stream": false, + "attach":false + }, + + { + "id": "VectorEngine", + "name": "VECTORCORE", + "independent": false, + "skip_assign_stream": false, + "attach":false + }, + { + "id": "DNN_VM_AICPU", + "name": "AICPU", + "independent": false, + "skip_assign_stream": false, + "attach": true + }, + { + "id": "DNN_HCCL", + "name": "HCCL", + "independent": true, + "skip_assign_stream": false, + "attach": false + }, + { + "id": "DNN_VM_RTS", + "name": "RTS", + "independent": false, + "skip_assign_stream": false, + "attach": true + } + ] + } + ] +} diff --git a/src/ge/executor/CMakeLists.txt b/src/ge/executor/CMakeLists.txt new file mode 100755 index 00000000..191b2400 --- /dev/null +++ b/src/ge/executor/CMakeLists.txt @@ -0,0 +1,109 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +# libge_executor.so +# add all proto files, generate corresponding .h and .cc files +# add src files +file(GLOB PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "../../proto/task.proto" + "../../proto/om.proto" + "../../proto/insert_op.proto" + "../../proto/op_mapping_info.proto" + "../../proto/ge_ir.proto" + ) + +file(GLOB SRC_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "ge_executor.cc" + "../common/profiling/profiling_manager.cc" + "../graph/execute/graph_execute.cc" + "../graph/load/graph_loader.cc" + "../graph/load/new_model_manager/data_dumper.cc" + "../graph/load/new_model_manager/data_inputer.cc" + "../graph/load/new_model_manager/davinci_model.cc" + "../graph/load/new_model_manager/davinci_model_parser.cc" + "../graph/load/new_model_manager/model_manager.cc" + "../graph/load/new_model_manager/model_output.cc" + "../graph/load/new_model_manager/model_utils.cc" + "../graph/load/new_model_manager/task_info/end_graph_task_info.cc" + "../graph/load/new_model_manager/task_info/event_record_task_info.cc" + "../graph/load/new_model_manager/task_info/event_wait_task_info.cc" + "../graph/load/new_model_manager/task_info/fusion_start_task_info.cc" + "../graph/load/new_model_manager/task_info/fusion_stop_task_info.cc" + "../graph/load/new_model_manager/task_info/kernel_ex_task_info.cc" + "../graph/load/new_model_manager/task_info/kernel_task_info.cc" + "../graph/load/new_model_manager/task_info/label_goto_task_info.cc" + "../graph/load/new_model_manager/task_info/label_set_task_info.cc" + "../graph/load/new_model_manager/task_info/memcpy_async_task_info.cc" + "../graph/load/new_model_manager/task_info/profiler_trace_task_info.cc" + "../graph/load/new_model_manager/task_info/stream_active_task_info.cc" + "../graph/load/new_model_manager/task_info/stream_switch_task_info.cc" + "../graph/load/new_model_manager/task_info/task_info.cc" + "../graph/load/new_model_manager/tbe_handle_store.cc" + "../graph/load/output/output.cc" + "../graph/manager/graph_manager_utils.cc" + "../graph/manager/graph_mem_allocator.cc" + "../graph/manager/graph_var_manager.cc" + "../graph/manager/trans_var_data_utils.cc" + "../graph/manager/util/debug.cc" + "../model/ge_model.cc" + "../omm/csa_interact.cc" + "../single_op/single_op.cc" + "../single_op/single_op_manager.cc" + "../single_op/single_op_model.cc" + "../single_op/stream_resource.cc" + "../single_op/task/build_task_utils.cc" + "../single_op/task/op_task.cc" + "../single_op/task/tbe_task_builder.cc" + ) + +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) + +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}/src/ge) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/cce) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/json/include) +include_directories(${GE_SOURCE_DIR}/third_party/protobuf/src) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) + +######## libge_executor.so ######## +add_library(ge_executor SHARED ${SRC_LIST} ${PROTO_HDRS}) +target_compile_definitions(ge_executor PRIVATE + Werror + PROTOBUF_INLINE_NOT_IN_HEADERS=0 + DAVINCI_SUPPORT_PROFILING + FMK_HOST_INFER) +target_link_libraries(ge_executor + ge_common + graph + ${PROTOBUF_LIBRARY} + ${register} + ${c_sec} + ${cce} + ${runtime} + ${slog} + ${mmpa} + ${msprof} + rt + dl) + diff --git a/src/ge/executor/ge_executor.cc b/src/ge/executor/ge_executor.cc new file mode 100644 index 00000000..8be9f6c3 --- /dev/null +++ b/src/ge/executor/ge_executor.cc @@ -0,0 +1,486 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "executor/ge_executor.h" + +#include + +#include + +#include "cce/cce.h" +#include "cce/compiler_stub.h" +#include "cce/aicpu_engine.h" +#include "cce/fwk_adpt_struct.h" +#include "common/debug/log.h" +#include "framework/common/debug/ge_log.h" +#include "common/ge/ge_util.h" +#include "common/helper/model_helper.h" +#include "common/util.h" +#include "graph/execute/graph_execute.h" +#include "graph/load/graph_loader.h" +#include "graph/load/new_model_manager/davinci_model_parser.h" +#include "graph/load/new_model_manager/model_manager.h" +#include "graph/manager/graph_mem_allocator.h" +#include "graph/model.h" +#include "graph/utils/graph_utils.h" +#include "mmpa/mmpa_api.h" +#include "single_op/single_op_manager.h" +#include "framework/common/util.h" +#include "common/profiling/profiling_manager.h" + +namespace { +const uint64_t kDynamicImageSizeParamNum = 2; +} // namespace + +namespace ge { +bool GeExecutor::is_init_ = false; + +class ModelListenerAdapter : public ModelListener { + public: + domi::Status OnComputeDone(uint32_t model_id, uint32_t data_index, uint32_t result_code) { + if (listener == nullptr) { + GELOGE(ge::FAILED, "listener is null."); + return FAILED; + } + return listener->OnComputeDone(model_id, data_index, result_code); + } + + std::shared_ptr listener; +}; + +ge::Status TransferDomiErrorCode(const uint32_t error_code) { + switch (error_code) { + case ge::PARAM_INVALID: + case domi::PARAM_INVALID: + return ge::PARAM_INVALID; + case ge::INTERNAL_ERROR: + case domi::INTERNAL_ERROR: + return ge::INTERNAL_ERROR; + default: + return ge::FAILED; + } +} + +void GetGeTensorDescFromDomiInfo(std::vector &ge_descs, + const std::vector &domi_descs, + const std::vector &formats) { + uint32_t idx = 0; + for (auto desc_item : domi_descs) { + ge::TensorDesc ge_desc; + ge_desc.SetName(desc_item.name); + ge_desc.SetDataType(static_cast(desc_item.data_type)); + ge_desc.SetFormat(static_cast(formats[idx])); + std::vector shape_dims; + for (auto dim : desc_item.shape_info.dims) { + shape_dims.push_back(dim); + } + Shape ge_shape(shape_dims); + ge_desc.SetShape(ge_shape); + ge_desc.SetSize(desc_item.size); + ge_descs.emplace_back(ge_desc); + ++idx; + } +} + +void GetDomiInputData(const ge::RunModelData &input_data, InputData &inputs) { + inputs.index = input_data.index; + inputs.model_id = input_data.model_id; + inputs.timestamp = input_data.timestamp; + inputs.timeout = input_data.timeout; + inputs.request_id = input_data.request_id; + for (const auto &data_item : input_data.blobs) { + DataBuffer data_buf{data_item.data, data_item.length, data_item.isDataSupportMemShare}; + inputs.blobs.emplace_back(data_buf); + } +} + +void GetDomiOutputData(const ge::RunModelData &output_data, OutputData &outputs) { + outputs.index = output_data.index; + outputs.model_id = output_data.model_id; + for (const auto &data_item : output_data.blobs) { + DataBuffer data_buf(data_item.data, data_item.length, data_item.isDataSupportMemShare); + outputs.blobs.emplace_back(data_buf); + } +} + +GeExecutor::GeExecutor() {} + +Status GeExecutor::Initialize() { + GELOGI("Init ge_executor begin."); + if (is_init_) { + GELOGW("Already inited, don't need to init again."); + return ge::SUCCESS; + } + + std::vector mem_type(1, RT_MEMORY_HBM); + auto ret = MemManager::Instance().Initialize(mem_type); + if (ret != SUCCESS) { + GELOGE(ret, "Memory Manager init fail."); + return ret; + } + + // Start profiling + int32_t device_id = 0; + rtError_t rt_ret = rtGetDevice(&device_id); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "runtime get device_id failed, current device_id:%d", device_id); + return FAILED; + } + GELOGI("current device_id:%d", device_id); + Options profiling_options; + profiling_options.device_id = device_id; + profiling_options.job_id = 0; + ProfilingManager::Instance().Init(profiling_options); + if (ProfilingManager::Instance().Init(profiling_options) != SUCCESS) { + GELOGE(FAILED, "Failed to init profiling."); + return FAILED; + } + + is_init_ = true; + GELOGI("Init ge_executor over."); + return ge::SUCCESS; +} + +// Load model +Status GeExecutor::LoadModelOffline(uint32_t &model_id, const std::string &path, const std::string &key, + int32_t priority, std::shared_ptr listener) { + GELOGI("load model offline begin."); + if (!is_init_) { + GELOGE(GE_EXEC_NOT_INIT, "not inited yet!"); + return GE_EXEC_NOT_INIT; + } + + string file_path = RealPath(path.c_str()); + if (file_path.empty()) { + GELOGE(ge::FAILED, "fileath is invalid. please check your text file '%s'.", path.c_str()); + return ge::FAILED; + } + + std::shared_ptr listener_adapter = MakeShared(); + if (listener_adapter == nullptr) { + GELOGE(MEMALLOC_FAILED, "ModelListenerAdapter make shared failed!"); + return ge::FAILED; + } + listener_adapter->listener = listener; + + Status ret = GraphLoader::LoadModelFromFile(path, key, priority, listener_adapter, model_id); + if (ret != SUCCESS) { + GELOGE(ret, "[GeExecutor] LoadModelFromFile failed"); + return TransferDomiErrorCode(ret); + } + return SUCCESS; +} + +Status GeExecutor::LoadModel(uint32_t &model_id, const ModelData &model_data, + std::shared_ptr listener) { + GELOGI("Load model begin, model_id:%u.", model_id); + if (!is_init_) { + GELOGE(GE_EXEC_NOT_INIT, "not inited yet!"); + return GE_EXEC_NOT_INIT; + } + + Status ret; + std::shared_ptr listener_adapter = MakeShared(); + if (listener_adapter == nullptr) { + GELOGE(MEMALLOC_FAILED, "ModelListenerAdapter make shared failed!"); + return ge::FAILED; + } + listener_adapter->listener = listener; + + ret = GraphLoader::LoadModel(model_data, listener_adapter, model_id); + if (ret != SUCCESS) { + GELOGE(ret, "[GeExecutor] LoadModel failed."); + return TransferDomiErrorCode(ret); + } + return ret; +} + +Status GeExecutor::UnloadModel(uint32_t model_id) { + GELOGI("unload model %u begin.", model_id); + if (!is_init_) { + GELOGE(GE_EXEC_NOT_INIT, "not inited yet!"); + return GE_EXEC_NOT_INIT; + } + + // Stop profiling + if (!ProfilingManager::Instance().ProfilingOpTraceOn() && ProfilingManager::Instance().ProfilingOn()) { + ProfilingManager::Instance().StopProfiling(); + } + return GraphLoader::UnloadModel(model_id); +} + +Status GeExecutor::RunModel(const ge::RunModelData &input_data, ge::RunModelData &output_data) { + GELOGI("run model begin."); + if (!is_init_) { + GELOGE(GE_EXEC_NOT_INIT, "not inited yet!"); + return GE_EXEC_NOT_INIT; + } + + InputData inputs; + GetDomiInputData(input_data, inputs); + OutputData outputs; + GetDomiOutputData(output_data, outputs); + + return GraphExecutor::DataInput(inputs, outputs); +} + +// Get input and output descriptor +Status GeExecutor::GetModelDescInfo(uint32_t model_id, std::vector &input_desc, + std::vector &output_desc) { + GELOGI("get model desc info begin."); + if (!is_init_) { + GELOGE(GE_EXEC_NOT_INIT, "not inited yet!"); + return GE_EXEC_NOT_INIT; + } + + std::vector input_desc_infos; + std::vector output_desc_infos; + std::vector input_formats; + std::vector output_formats; + GELOGI("GetInputOutputDescInfo via new ome."); + + Status ret = GraphExecutor::GetInputOutputDescInfo(model_id, input_desc_infos, output_desc_infos, + input_formats, output_formats); + if (ret != domi::SUCCESS) { + GELOGE(ret, "GetInputOutputDescInfo failed. ret = %u", ret); + return TransferDomiErrorCode(ret); + } + + if (input_formats.size() != input_desc_infos.size()) { + GELOGE(ge::FAILED, "input_formats.size() != input_desc_infos.size()."); + return ge::FAILED; + } + + if (output_formats.size() != output_desc_infos.size()) { + GELOGE(ge::FAILED, "output_formats.size() != output_desc_infos.size()."); + return ge::FAILED; + } + + // Transfer data to TensorDesc + GetGeTensorDescFromDomiInfo(input_desc, input_desc_infos, input_formats); + GetGeTensorDescFromDomiInfo(output_desc, output_desc_infos, output_formats); + + GELOGI("get model desc info end."); + return ge::SUCCESS; +} + +Status GeExecutor::GetModelDescInfoForZeroCopy(uint32_t model_id, std::vector &input_desc, + std::vector &output_desc) { + GELOGI("get model desc info for zero copy begin."); + if (!is_init_) { + GELOGE(GE_EXEC_NOT_INIT, "not inited yet!"); + return GE_EXEC_NOT_INIT; + } + + std::vector input_desc_infos; + std::vector output_desc_infos; + std::vector input_formats; + std::vector output_formats; + GELOGI("GetInputOutputDescInfoForZeroCopy via new ome."); + + Status ret = GraphExecutor::GetInputOutputDescInfoForZeroCopy(model_id, input_desc_infos, output_desc_infos, + input_formats, output_formats); + if (ret != domi::SUCCESS) { + GELOGE(ret, "Get DescInfo For ZeroCopy failed. ret = %u", ret); + return TransferDomiErrorCode(ret); + } + + if (input_formats.size() != input_desc_infos.size()) { + GELOGE(ge::FAILED, "input_formats.size() != input_desc_infos.size()."); + return ge::FAILED; + } + + if (output_formats.size() != output_desc_infos.size()) { + GELOGE(ge::FAILED, "output_formats.size() != output_desc_infos.size()."); + return ge::FAILED; + } + + GetGeTensorDescFromDomiInfo(input_desc, input_desc_infos, input_formats); + GetGeTensorDescFromDomiInfo(output_desc, output_desc_infos, output_formats); + + GELOGI("get model desc info for zero copy end."); + return ge::SUCCESS; +} + +Status GeExecutor::CommandHandle(const Command &command) { + GELOGI("command handle begin."); + Status ret = GraphLoader::CommandHandle(command); + if (ret != SUCCESS) { + GELOGE(ret, "CommandHandle: Command Handle failed."); + return TransferDomiErrorCode(ret); + } + return SUCCESS; +} + +Status GeExecutor::GetMaxUsedMemory(uint32_t model_id, uint32_t &max_size) { + uint64_t max_mem_size = 0; + Status ret = GraphLoader::GetMaxUsedMemory(model_id, max_mem_size); + max_size = static_cast(max_mem_size); + return ret; +} + +/// +/// @ingroup ge +/// @brief Load data from model file to memory +/// @param [in] const std::string &path: Offline model file path +/// @param [out] domi::ModelData &model_data: Offline model memory data +/// @return SUCCESS handle successfully / others handle failed +/// +Status GeExecutor::LoadDataFromFile(const std::string &path, ModelData &model_data) { + string file_path = RealPath(path.c_str()); + if (file_path.empty()) { + GELOGE(ge::FAILED, "file_path is invalid. please check your text file '%s'.", path.c_str()); + return ge::FAILED; + } + GELOGI("load model_data from file: %s.", path.c_str()); + std::string key_path; + int32_t priority = 0; + Status ret = GraphLoader::LoadDataFromFile(path, key_path, priority, model_data); + if (ret != SUCCESS) { + if (model_data.model_data != nullptr) { + delete[] static_cast(model_data.model_data); + model_data.model_data = nullptr; + } + } + + return ret; +} + +/// +/// @ingroup ge +/// @brief Load model from offline model memory data +/// @param [in] domi::ModelData &model_data: Offline model data +/// void *dev_ptr: Input/Output memory start address +/// size_t memsize: Input/Output memory length +/// void *weight_ptr: Weight memory start address +/// size_t weightsize: Weight memory length +/// @param [out] uint32_t &model_id: identification after model loading +/// @return SUCCESS handle successfully / others handle failed +/// +Status GeExecutor::LoadModelFromData(uint32_t &model_id, const ModelData &model_data, void *dev_ptr, + size_t mem_size, void *weight_ptr, size_t weight_size) { + return GraphLoader::LoadModelFromData(model_id, model_data, dev_ptr, mem_size, weight_ptr, weight_size); +} + +/// +/// @ingroup ge +/// @brief Load task list from ModelData with queue. +/// @param [out] model_id: model id allocate from manager. +/// @param [in] ge_model_data: Model data load from offline model. +/// @param [in] input_queue_ids: input queue ids create from user. +/// @param [in] output_queue_ids: input queue ids create from user. +/// @return: 0 for success / others for fail +/// +Status GeExecutor::LoadModelWithQ(uint32_t &model_id, const ModelData &model_data, + const std::vector &input_queue_ids, + const std::vector &output_queue_ids) { + return GraphLoader::LoadModelWithQ(model_id, model_data, input_queue_ids, output_queue_ids); +} + +/// +/// @ingroup ge +/// @brief Synchronous execution of offline model(Do not create thread) +/// @param [in] uint32_t modelId: Model ID to execute +/// void* stream: stream to execute +/// const domi::InputData *input_data: Model input data +/// bool async_mode: is asynchronize mode. +/// @param [out] domi::OutputData *output_data: Model output data +/// @return SUCCESS handle successfully / others handle failed +/// +Status GeExecutor::ExecModel(uint32_t model_id, void *stream, const ge::RunModelData &input_data, + ge::RunModelData &output_data, bool async_mode) { + if (!is_init_) { + GELOGE(GE_EXEC_NOT_INIT, "not inited yet!"); + return GE_EXEC_NOT_INIT; + } + + InputData input_data_tmp; + OutputData output_data_tmp; + GetDomiInputData(input_data, input_data_tmp); + GetDomiOutputData(output_data, output_data_tmp); + + return GraphLoader::ExecuteModel(model_id, stream, async_mode, input_data_tmp, output_data_tmp); +} + +/// +/// @ingroup ge +/// @brief Get weight memory size from model file +/// @param [in] const std::string &path: Offline model file path +/// @param [out] size_t &mem_size Execution memory size +/// size_t &weight_size Weight memory space size +/// @return SUCCESS handle successfully / others handle failed +/// +Status GeExecutor::GetMemAndWeightSize(const std::string &path, size_t &mem_size, size_t &weight_size) { + ModelData model; + std::string key; + Status ret = ge::GraphLoader::LoadDataFromFile(path, key, 0, model); + if ((ret != SUCCESS) || (model.model_data == nullptr)) { + GELOGE(ret, "Load data from file failed. ret = %d", ret); + return ret; + } + + ret = ge::ModelManager::GetModelMemAndWeightSize(model, mem_size, weight_size); + + delete[] static_cast(model.model_data); + model.model_data = nullptr; + + return ret; +} + +/// +/// @ingroup ge +/// @brief Get weight memory size from model file +/// @param [in] const void *model_data Offline model buffer +/// size_t model_size Offline model buffer length +/// @param [out] size_t &mem_size Execution memory size +/// size_t &weight_size Weight memory space size +/// @return SUCCESS handle successfully / others handle failed +/// +Status GeExecutor::GetMemAndWeightSize(const void *model_data, size_t model_size, size_t &mem_size, + size_t &weight_size) { + if (model_data == nullptr) { + GELOGE(PARAM_INVALID, "invalid model data!"); + return PARAM_INVALID; + } + + ModelData model; + model.model_data = const_cast(model_data); + model.model_len = static_cast(model_size); + + return ge::ModelManager::GetModelMemAndWeightSize(model, mem_size, weight_size); +} + +Status GeExecutor::LoadSingleOp(const std::string &model_name, + const ge::ModelData &model_data, + void *stream, + SingleOp **single_op) { + return SingleOpManager::GetInstance().GetOpFromModel(model_name, model_data, stream, single_op); +} + +Status GeExecutor::ExecuteAsync(SingleOp *executor, const std::vector &inputs, + std::vector &outputs) { + if (executor == nullptr) { + GELOGE(PARAM_INVALID, "param is NULL"); + return PARAM_INVALID; + } + + return executor->ExecuteAsync(inputs, outputs); +} + +Status GeExecutor::ReleaseSingleOpResource(void *stream) { + return SingleOpManager::GetInstance().ReleaseResource(stream); +} +} // namespace ge diff --git a/src/ge/ge_local_engine/CMakeLists.txt b/src/ge/ge_local_engine/CMakeLists.txt new file mode 100755 index 00000000..f753b307 --- /dev/null +++ b/src/ge/ge_local_engine/CMakeLists.txt @@ -0,0 +1,55 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +# libge_local_engine.so +# add all proto files, generate corresponding .h and .cc files +file(GLOB_RECURSE PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "../../proto/task.proto" + ) + +file(GLOB_RECURSE SRC_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "engine/*.cc" + "ops_kernel_store/*.cc" + ) + +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) + +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}/src/ge) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/cce) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/tee) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/json/include) +include_directories(${GE_SOURCE_DIR}/third_party/protobuf/src) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) + +######### libge_local_engine.so ############# +add_library(ge_local_engine SHARED ${SRC_LIST} ${PROTO_HDRS}) +target_compile_definitions(ge_local_engine PRIVATE Werror) +target_link_libraries(ge_local_engine + graph + ${PROTOBUF_LIBRARY} + ${register} + ${c_sec} + ${slog} + ${runtime}) diff --git a/src/ge/ge_local_engine/common/constant/constant.h b/src/ge/ge_local_engine/common/constant/constant.h new file mode 100644 index 00000000..f7f1e2cb --- /dev/null +++ b/src/ge/ge_local_engine/common/constant/constant.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_LOCAL_ENGINE_COMMON_CONSTANT_CONSTANT_H_ +#define GE_GE_LOCAL_ENGINE_COMMON_CONSTANT_CONSTANT_H_ + +namespace ge { +namespace ge_local { +// engine name +const char *const kGeLocalEngineName = "DNN_VM_GE_LOCAL"; +const char *const kGeLocalOpKernelLibName = "DNN_VM_GE_LOCAL_OP_STORE"; +} // namespace ge_local +} // namespace ge + +#endif // GE_GE_LOCAL_ENGINE_COMMON_CONSTANT_CONSTANT_H_ diff --git a/src/ge/ge_local_engine/engine/ge_local_engine.cc b/src/ge/ge_local_engine/engine/ge_local_engine.cc new file mode 100644 index 00000000..eb3bc8ba --- /dev/null +++ b/src/ge/ge_local_engine/engine/ge_local_engine.cc @@ -0,0 +1,77 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_local_engine/engine/ge_local_engine.h" + +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/constant/constant.h" +#include "common/ge/ge_util.h" +#include "ops_kernel_store/ge_local_ops_kernel_info.h" + +namespace ge { +namespace ge_local { +GeLocalEngine &GeLocalEngine::Instance() { + static GeLocalEngine instance; + return instance; +} + +Status GeLocalEngine::Initialize(const std::map &options) { + if (ops_kernel_store_ == nullptr) { + ops_kernel_store_ = MakeShared(); + if (ops_kernel_store_ == nullptr) { + GELOGE(FAILED, "Make GeLocalOpsKernelInfoStore failed."); + return FAILED; + } + } + return SUCCESS; +} + +void GeLocalEngine::GetOpsKernelInfoStores(std::map &ops_kernel_map) { + if (ops_kernel_store_ != nullptr) { + // add buildin opsKernel to opsKernelInfoMap + ops_kernel_map[kGeLocalOpKernelLibName] = ops_kernel_store_; + } +} + +void GeLocalEngine::GetGraphOptimizerObjs(std::map &) { + // no optimizer for ge local engine +} + +Status GeLocalEngine::Finalize() { + ops_kernel_store_ = nullptr; + return SUCCESS; +} +} // namespace ge_local +} // namespace ge + +ge::Status Initialize(const std::map &options) { + return ge::ge_local::GeLocalEngine::Instance().Initialize(options); +} + +void GetOpsKernelInfoStores(std::map &ops_kernel_map) { + ge::ge_local::GeLocalEngine::Instance().GetOpsKernelInfoStores(ops_kernel_map); +} + +void GetGraphOptimizerObjs(std::map &graph_optimizers) { + ge::ge_local::GeLocalEngine::Instance().GetGraphOptimizerObjs(graph_optimizers); +} + +ge::Status Finalize() { return ge::ge_local::GeLocalEngine::Instance().Finalize(); } diff --git a/src/ge/ge_local_engine/engine/ge_local_engine.h b/src/ge/ge_local_engine/engine/ge_local_engine.h new file mode 100644 index 00000000..350b3e5f --- /dev/null +++ b/src/ge/ge_local_engine/engine/ge_local_engine.h @@ -0,0 +1,119 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_LOCAL_ENGINE_ENGINE_GE_LOCAL_ENGINE_H_ +#define GE_GE_LOCAL_ENGINE_ENGINE_GE_LOCAL_ENGINE_H_ + +#include +#include +#include + +#include "common/opskernel/ops_kernel_info_store.h" +#include "common/optimizer/graph_optimizer.h" + +using OpsKernelInfoStorePtr = std::shared_ptr; +using GraphOptimizerPtr = std::shared_ptr; + +namespace ge { +namespace ge_local { +/// +/// ge local engine. +/// Used for the ops not belong to any engine. eg:netoutput +/// +class GeLocalEngine { + public: + /// + /// get GeLocalEngine instance. + /// @return GeLocalEngine instance. + /// + static GeLocalEngine &Instance(); + + virtual ~GeLocalEngine() = default; + + /// + /// When Ge start, GE will invoke this interface + /// @return The status whether initialize successfully + /// + Status Initialize(const std::map &options); + + /// + /// After the initialize, GE will invoke this interface + /// to get the Ops kernel Store. + /// @param ops_kernel_map The ge local's ops kernel info + /// + void GetOpsKernelInfoStores(std::map &ops_kernel_map); + + /// + /// After the initialize, GE will invoke this interface + /// to get the Graph Optimizer. + /// @param graph_optimizers The ge local's Graph Optimizer objs + /// + void GetGraphOptimizerObjs(std::map &graph_optimizers); + + /// + /// When the graph finished, GE will invoke this interface + /// @return The status whether initialize successfully + /// + Status Finalize(); + + // Copy prohibited + GeLocalEngine(const GeLocalEngine &ge_local_engine) = delete; + + // Move prohibited + GeLocalEngine(const GeLocalEngine &&ge_local_engine) = delete; + + // Copy prohibited + GeLocalEngine &operator=(const GeLocalEngine &ge_local_engine) = delete; + + // Move prohibited + GeLocalEngine &operator=(GeLocalEngine &&ge_local_engine) = delete; + + private: + GeLocalEngine() = default; + + OpsKernelInfoStorePtr ops_kernel_store_ = nullptr; +}; +} // namespace ge_local +} // namespace ge + +extern "C" { + +/// +/// When Ge start, GE will invoke this interface +/// @return The status whether initialize successfully +/// +ge::Status Initialize(const map &options); + +/// +/// After the initialize, GE will invoke this interface to get the Ops kernel Store +/// @param ops_kernel_map The ge local's ops kernel info +/// +void GetOpsKernelInfoStores(std::map &ops_kernel_map); + +/// +/// After the initialize, GE will invoke this interface to get the Graph Optimizer +/// @param graph_optimizers The ge local's Graph Optimizer objs +/// +void GetGraphOptimizerObjs(std::map &graph_optimizers); + +/// +/// When the graph finished, GE will invoke this interface +/// @return The status whether initialize successfully +/// +ge::Status Finalize(); +} + +#endif // GE_GE_LOCAL_ENGINE_ENGINE_GE_LOCAL_ENGINE_H_ diff --git a/src/ge/ge_local_engine/ops_kernel_store/ge_local_ops_kernel_info.cc b/src/ge/ge_local_engine/ops_kernel_store/ge_local_ops_kernel_info.cc new file mode 100644 index 00000000..7b354c86 --- /dev/null +++ b/src/ge/ge_local_engine/ops_kernel_store/ge_local_ops_kernel_info.cc @@ -0,0 +1,200 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_local_engine/ops_kernel_store/ge_local_ops_kernel_info.h" + +#include + +#include "common/constant/constant.h" +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/ge/ge_util.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" +#include "op/op_factory.h" +#include "proto/task.pb.h" + +namespace { +const char *const kConstantOpType = "Constant"; +const char *const kConstantOpAttrName = "value"; +} // namespace +namespace ge { +namespace ge_local { +using domi::TaskDef; +using std::map; +using std::string; +using std::vector; + +Status GeLocalOpsKernelInfoStore::Initialize(const map &options) { + GELOGI("GeLocalOpsKernelInfoStore init start."); + + OpInfo default_op_info = {.engine = kGeLocalEngineName, + .opKernelLib = kGeLocalOpKernelLibName, + .computeCost = 0, + .flagPartial = false, + .flagAsync = false, + .isAtomic = false}; + // Init op_info_map_ + auto all_ops = OpFactory::Instance().GetAllOps(); + for (auto &op : all_ops) { + op_info_map_[op] = default_op_info; + } + + GELOGI("GeLocalOpsKernelInfoStore inited success. op num=%zu", op_info_map_.size()); + + return SUCCESS; +} + +Status GeLocalOpsKernelInfoStore::Finalize() { + op_info_map_.clear(); + return SUCCESS; +} + +Status GeLocalOpsKernelInfoStore::CalcOpRunningParam(Node &ge_node) { + OpDescPtr op_desc = ge_node.GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(FAILED, "CalcOpRunningParam failed, as op desc is null"); + return FAILED; + } + const string node_name = ge_node.GetName(); + const string node_type = ge_node.GetType(); + size_t output_size = op_desc->GetOutputsSize(); + GELOGD("Calc op[%s:%s] op running param, output size=%zu.", node_name.c_str(), node_type.c_str(), output_size); + + for (size_t i = 0; i < output_size; ++i) { + GeTensorDesc output_tensor = op_desc->GetOutputDesc(static_cast(i)); + Format format = output_tensor.GetFormat(); + DataType data_type = output_tensor.GetDataType(); + + uint32_t mem_size = 0; + graphStatus graph_status = TensorUtils::GetSize(output_tensor, mem_size); + // If mem size has been set, no need reset. + if ((graph_status == GRAPH_SUCCESS) && (mem_size > 0) && (data_type != DT_STRING)) { + GELOGD("Op[%s:%s] out[%zu] mem size has been set, no need calc again, format=%s, data_type=%s, mem_size=%u.", + node_name.c_str(), node_type.c_str(), i, TypeUtils::FormatToSerialString(format).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str(), mem_size); + continue; + } + + int64_t output_mem_size = 0; + GeShape output_shape = output_tensor.GetShape(); + if ((node_type == kConstantOpType) && (data_type == DT_STRING)) { + graph_status = CalcConstantStrMemSize(op_desc, output_mem_size); + } else { + graph_status = TensorUtils::CalcTensorMemSize(output_shape, format, data_type, output_mem_size); + } + + if (graph_status != GRAPH_SUCCESS) { + GELOGE(FAILED, "Calc op[%s:%s] out[%zu] mem size failed, format=%s, data_type=%s, error=%u.", node_name.c_str(), + node_type.c_str(), i, TypeUtils::FormatToSerialString(format).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str(), graph_status); + return FAILED; + } + + if (output_mem_size < 0) { + GELOGE(FAILED, + "Calc op[%s:%s] out[%zu] mem size is negative(not support)," + " format=%s, data_type=%s, mem_size=%ld.", + node_name.c_str(), node_type.c_str(), i, TypeUtils::FormatToSerialString(format).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str(), output_mem_size); + return FAILED; + } + GELOGI( + "Calc op[%s:%s] out[%zu] mem size is %ld," + " format=%s, data_type=%s.", + node_name.c_str(), node_type.c_str(), i, output_mem_size, TypeUtils::FormatToSerialString(format).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str()); + + if (output_mem_size > static_cast(UINT_MAX)) { + GELOGE(FAILED, + "Calc op[%s:%s] out[%zu] mem size failed, as GE need data, " + "type is uint32, but output_mem_size[%ld] is overflow.", + node_name.c_str(), node_type.c_str(), i, output_mem_size); + return FAILED; + } + TensorUtils::SetSize(output_tensor, static_cast(output_mem_size)); + + graph_status = op_desc->UpdateOutputDesc(static_cast(i), output_tensor); + if (graph_status != GRAPH_SUCCESS) { + GELOGE(FAILED, "Update op[%s:%s] out[%zu] desc failed, format=%s, data_type=%s, error=%u.", node_name.c_str(), + node_type.c_str(), i, TypeUtils::FormatToSerialString(format).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str(), graph_status); + return FAILED; + } + } + GELOGD("Calc op[%s:%s] running param success.", node_name.c_str(), node_type.c_str()); + return SUCCESS; +} + +Status GeLocalOpsKernelInfoStore::CalcConstantStrMemSize(const OpDescPtr &op_desc, int64_t &mem_size) { + if (op_desc == nullptr) { + GELOGE(FAILED, "CalcConstantStrMemSize failed, as op desc is null"); + return FAILED; + } + ConstGeTensorPtr value = MakeShared(); + if (value == nullptr) { + GELOGE(FAILED, "make shared ConstGeTensor exception."); + return FAILED; + } + // Constant op attr name is "value" + if (!AttrUtils::GetTensor(op_desc, kConstantOpAttrName, value)) { + GELOGE(FAILED, "Get Constant op attr value failed"); + return FAILED; + } + mem_size = static_cast(value->GetData().size()); + return GRAPH_SUCCESS; +} + +void GeLocalOpsKernelInfoStore::GetAllOpsKernelInfo(map &infos) const { infos = op_info_map_; } + +Status GeLocalOpsKernelInfoStore::GenerateTask(const Node &node, RunContext &context, vector &tasks) { + string name = node.GetName(); + string type = node.GetType(); + GELOGD("Ge local generate task for node:%s(%s) begin, tasks.size()=%zu.", name.c_str(), type.c_str(), tasks.size()); + + auto op = OpFactory::Instance().CreateOp(node, context); + if (op == nullptr) { + GELOGE(FAILED, "CreateOp for node:%s(%s) failed.", name.c_str(), type.c_str()); + return FAILED; + } + + Status ret = op->Run(); + if (ret != SUCCESS) { + GELOGE(ret, "Node:%s(%s) op run failed.", name.c_str(), type.c_str()); + return ret; + } + GELOGI("Ge local generate task for node:%s(%s) end, tasks.size()=%zu.", name.c_str(), type.c_str(), tasks.size()); + return ret; +} + +bool GeLocalOpsKernelInfoStore::CheckSupported(const OpDescPtr &op_desc, std::string &) const { + if (op_desc == nullptr) { + return false; + } + return op_info_map_.count(op_desc->GetType()) > 0; +} + +Status GeLocalOpsKernelInfoStore::CreateSession(const map &session_options) { + // Do nothing + return SUCCESS; +} + +Status GeLocalOpsKernelInfoStore::DestroySession(const map &session_options) { + // Do nothing + return SUCCESS; +} +} // namespace ge_local +} // namespace ge diff --git a/src/ge/ge_local_engine/ops_kernel_store/ge_local_ops_kernel_info.h b/src/ge/ge_local_engine/ops_kernel_store/ge_local_ops_kernel_info.h new file mode 100644 index 00000000..a04367ff --- /dev/null +++ b/src/ge/ge_local_engine/ops_kernel_store/ge_local_ops_kernel_info.h @@ -0,0 +1,118 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_GE_LOCAL_OPS_KERNEL_INFO_H_ +#define GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_GE_LOCAL_OPS_KERNEL_INFO_H_ + +#include +#include +#include + +#include "common/opskernel/ops_kernel_info_store.h" + +namespace ge { +namespace ge_local { +class GeLocalOpsKernelInfoStore : public OpsKernelInfoStore { + public: + GeLocalOpsKernelInfoStore() = default; + + ~GeLocalOpsKernelInfoStore() override = default; + + /// + /// Initialize related resources of the ge local kernelinfo store + /// @return status whether this operation success + /// + Status Initialize(const std::map &options) override; + + /// + /// Release related resources of the ge local kernel info store + /// @return status whether this operation success + /// + Status Finalize() override; + + /// + /// Check to see if an operator is fully supported or partially supported. + /// @param op_desc OpDesc information + /// @param reason unsupported reason + /// @return bool value indicate whether the operator is fully supported + /// + bool CheckSupported(const OpDescPtr &op_desc, std::string &reason) const override; + + /// + /// Returns the full operator information. + /// @param infos reference of a map, + /// contain operator's name and detailed information + /// + void GetAllOpsKernelInfo(std::map &infos) const override; + + /// + /// Calc the running size of Operator, + /// then GE will alloc the mem size from runtime + /// @param ge_node Node information + /// @return status whether this operation success + /// + Status CalcOpRunningParam(ge::Node &ge_node) override; + + /// + /// call the runtime's interface to generate the task + /// @param node Node information + /// @param context run context info + /// @return status whether this operation success + /// + Status GenerateTask(const ge::Node &ge_node, ge::RunContext &context, std::vector &tasks) override; + + /// + /// Create session + /// @param session_options Session Options + /// @return status whether this operation success + /// + Status CreateSession(const std::map &session_options) override; + + /// + /// Destroy session + /// @param session_options Session Options + /// @return status whether this operation success + /// + Status DestroySession(const std::map &session_options) override; + + // Copy prohibited + GeLocalOpsKernelInfoStore(const GeLocalOpsKernelInfoStore &ops_kernel_store) = delete; + + // Move prohibited + GeLocalOpsKernelInfoStore(const GeLocalOpsKernelInfoStore &&ops_kernel_store) = delete; + + // Copy prohibited + GeLocalOpsKernelInfoStore &operator=(const GeLocalOpsKernelInfoStore &ops_kernel_store) = delete; + + // Move prohibited + GeLocalOpsKernelInfoStore &operator=(GeLocalOpsKernelInfoStore &&ops_kernel_store) = delete; + + private: + /// + /// Calc memSize for constant which type is DT_STRING. + /// @param op_desc OpDesc information + /// @param mem_size output size + /// @return whether this operation success + /// + Status CalcConstantStrMemSize(const OpDescPtr &op_desc, int64_t &mem_size); + + // store op name and OpInfo key-value pair + std::map op_info_map_; +}; +} // namespace ge_local +} // namespace ge + +#endif // GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_GE_LOCAL_OPS_KERNEL_INFO_H_ diff --git a/src/ge/ge_local_engine/ops_kernel_store/op/ge_deleted_op.cc b/src/ge/ge_local_engine/ops_kernel_store/op/ge_deleted_op.cc new file mode 100644 index 00000000..f0535331 --- /dev/null +++ b/src/ge/ge_local_engine/ops_kernel_store/op/ge_deleted_op.cc @@ -0,0 +1,65 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_local_engine/ops_kernel_store/op/ge_deleted_op.h" + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "ge_local_engine/ops_kernel_store/op/op_factory.h" + +namespace ge { +namespace ge_local { +GeDeletedOp::GeDeletedOp(const Node &node, RunContext &run_context) : Op(node, run_context) {} + +Status GeDeletedOp::Run() { + GELOGE(FAILED, "Node:%s type is %s, should be deleted by ge.", name_.c_str(), type_.c_str()); + // Do nothing + return FAILED; +} + +REGISTER_OP_CREATOR(TemporaryVariable, GeDeletedOp); +REGISTER_OP_CREATOR(DestroyTemporaryVariable, GeDeletedOp); +REGISTER_OP_CREATOR(GuaranteeConst, GeDeletedOp); +REGISTER_OP_CREATOR(PreventGradient, GeDeletedOp); +REGISTER_OP_CREATOR(StopGradient, GeDeletedOp); +REGISTER_OP_CREATOR(ExpandDims, GeDeletedOp); +REGISTER_OP_CREATOR(Reshape, GeDeletedOp); +REGISTER_OP_CREATOR(ReFormat, GeDeletedOp); +REGISTER_OP_CREATOR(Squeeze, GeDeletedOp); +REGISTER_OP_CREATOR(Size, GeDeletedOp); +REGISTER_OP_CREATOR(Shape, GeDeletedOp); +REGISTER_OP_CREATOR(ShapeN, GeDeletedOp); +REGISTER_OP_CREATOR(Rank, GeDeletedOp); +REGISTER_OP_CREATOR(_Retval, GeDeletedOp); +REGISTER_OP_CREATOR(ReadVariableOp, GeDeletedOp); +REGISTER_OP_CREATOR(VarHandleOp, GeDeletedOp); +REGISTER_OP_CREATOR(VarIsInitializedOp, GeDeletedOp); +REGISTER_OP_CREATOR(Snapshot, GeDeletedOp); +REGISTER_OP_CREATOR(Identity, GeDeletedOp); +REGISTER_OP_CREATOR(IdentityN, GeDeletedOp); +REGISTER_OP_CREATOR(VariableV2, GeDeletedOp); +REGISTER_OP_CREATOR(Empty, GeDeletedOp); +REGISTER_OP_CREATOR(PlaceholderWithDefault, GeDeletedOp); +REGISTER_OP_CREATOR(IsVariableInitialized, GeDeletedOp); +REGISTER_OP_CREATOR(PlaceholderV2, GeDeletedOp); +REGISTER_OP_CREATOR(Placeholder, GeDeletedOp); +REGISTER_OP_CREATOR(End, GeDeletedOp); +REGISTER_OP_CREATOR(Merge, GeDeletedOp); +REGISTER_OP_CREATOR(Switch, GeDeletedOp); +REGISTER_OP_CREATOR(RefMerge, GeDeletedOp); +REGISTER_OP_CREATOR(RefSwitch, GeDeletedOp); +} // namespace ge_local +} // namespace ge diff --git a/src/ge/ge_local_engine/ops_kernel_store/op/ge_deleted_op.h b/src/ge/ge_local_engine/ops_kernel_store/op/ge_deleted_op.h new file mode 100644 index 00000000..c551779d --- /dev/null +++ b/src/ge/ge_local_engine/ops_kernel_store/op/ge_deleted_op.h @@ -0,0 +1,43 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_GE_DELETED_OP_H_ +#define GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_GE_DELETED_OP_H_ + +#include "ge_local_engine/ops_kernel_store/op/op.h" + +namespace ge { +namespace ge_local { +class GeDeletedOp : public Op { + public: + GeDeletedOp(const Node &node, RunContext &run_context); + + ~GeDeletedOp() override = default; + + GeDeletedOp &operator=(const GeDeletedOp &op) = delete; + + GeDeletedOp(const GeDeletedOp &op) = delete; + + /// + /// @brief generate task. + /// @return result + /// + ge::Status Run() override; +}; +} // namespace ge_local +} // namespace ge + +#endif // GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_GE_DELETED_OP_H_ diff --git a/src/ge/ge_local_engine/ops_kernel_store/op/no_op.cc b/src/ge/ge_local_engine/ops_kernel_store/op/no_op.cc new file mode 100644 index 00000000..8b44aea7 --- /dev/null +++ b/src/ge/ge_local_engine/ops_kernel_store/op/no_op.cc @@ -0,0 +1,49 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_local_engine/ops_kernel_store/op/no_op.h" + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "ge_local_engine/ops_kernel_store/op/op_factory.h" + +namespace ge { +namespace ge_local { +NoOp::NoOp(const Node &node, RunContext &run_context) : Op(node, run_context) {} + +Status NoOp::Run() { + GELOGI("Node:%s type is %s, no need gen task.", name_.c_str(), type_.c_str()); + // Do nothing + return SUCCESS; +} + +REGISTER_OP_CREATOR(Data, NoOp); + +REGISTER_OP_CREATOR(AippData, NoOp); + +REGISTER_OP_CREATOR(NoOp, NoOp); + +REGISTER_OP_CREATOR(Variable, NoOp); + +REGISTER_OP_CREATOR(Constant, NoOp); + +REGISTER_OP_CREATOR(Const, NoOp); + +REGISTER_OP_CREATOR(NetOutput, NoOp); + +REGISTER_OP_CREATOR(ControlTrigger, NoOp); +} // namespace ge_local +} // namespace ge diff --git a/src/ge/ge_local_engine/ops_kernel_store/op/no_op.h b/src/ge/ge_local_engine/ops_kernel_store/op/no_op.h new file mode 100644 index 00000000..80f235dc --- /dev/null +++ b/src/ge/ge_local_engine/ops_kernel_store/op/no_op.h @@ -0,0 +1,43 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_NO_OP_H_ +#define GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_NO_OP_H_ + +#include "ge_local_engine/ops_kernel_store/op/op.h" + +namespace ge { +namespace ge_local { +class NoOp : public Op { + public: + NoOp(const Node &node, RunContext &run_context); + + ~NoOp() override = default; + + NoOp &operator=(const NoOp &op) = delete; + + NoOp(const NoOp &op) = delete; + + /// + /// @brief generate task. + /// @return result + /// + ge::Status Run() override; +}; +} // namespace ge_local +} // namespace ge + +#endif // GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_NO_OP_H_ diff --git a/src/ge/ge_local_engine/ops_kernel_store/op/op.cc b/src/ge/ge_local_engine/ops_kernel_store/op/op.cc new file mode 100644 index 00000000..2405391a --- /dev/null +++ b/src/ge/ge_local_engine/ops_kernel_store/op/op.cc @@ -0,0 +1,27 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_local_engine/ops_kernel_store/op/op.h" + +#include "graph/utils/anchor_utils.h" +#include "graph/utils/tensor_utils.h" + +namespace ge { +namespace ge_local { +Op::Op(const Node &node, RunContext &run_context) + : run_context_(run_context), node_(node), name_(node.GetName()), type_(node.GetType()) {} +} // namespace ge_local +} // namespace ge diff --git a/src/ge/ge_local_engine/ops_kernel_store/op/op.h b/src/ge/ge_local_engine/ops_kernel_store/op/op.h new file mode 100644 index 00000000..71fda167 --- /dev/null +++ b/src/ge/ge_local_engine/ops_kernel_store/op/op.h @@ -0,0 +1,51 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_OP_H_ +#define GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_OP_H_ + +#include + +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/opskernel/ops_kernel_info_types.h" +#include "graph/node.h" + +namespace ge { +namespace ge_local { +/// +/// The base class for all op. +/// +class Op { + public: + Op(const Node &node, RunContext &run_context); + + virtual ~Op() = default; + + virtual Status Run() = 0; + + protected: + const RunContext &run_context_; + const Node &node_; + std::string name_; + std::string type_; +}; +} // namespace ge_local +} // namespace ge + +#endif // GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_OP_H_ diff --git a/src/ge/ge_local_engine/ops_kernel_store/op/op_factory.cc b/src/ge/ge_local_engine/ops_kernel_store/op/op_factory.cc new file mode 100644 index 00000000..d9d803da --- /dev/null +++ b/src/ge/ge_local_engine/ops_kernel_store/op/op_factory.cc @@ -0,0 +1,56 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_local_engine/ops_kernel_store/op/op_factory.h" + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "graph/op_desc.h" + +namespace ge { +namespace ge_local { +OpFactory &OpFactory::Instance() { + static OpFactory instance; + return instance; +} + +std::shared_ptr OpFactory::CreateOp(const Node &node, RunContext &run_context) { + auto iter = op_creator_map_.find(node.GetType()); + if (iter != op_creator_map_.end()) { + return iter->second(node, run_context); + } + + GELOGE(FAILED, "Not supported OP, type = %s, name = %s", node.GetType().c_str(), node.GetName().c_str()); + return nullptr; +} + +void OpFactory::RegisterCreator(const std::string &type, const OP_CREATOR_FUNC &func) { + if (func == nullptr) { + GELOGW("Func is NULL."); + return; + } + + auto iter = op_creator_map_.find(type); + if (iter != op_creator_map_.end()) { + GELOGW("%s creator already exist", type.c_str()); + return; + } + + op_creator_map_[type] = func; + all_ops_.emplace_back(type); +} +} // namespace ge_local +} // namespace ge diff --git a/src/ge/ge_local_engine/ops_kernel_store/op/op_factory.h b/src/ge/ge_local_engine/ops_kernel_store/op/op_factory.h new file mode 100644 index 00000000..d7e9a2a1 --- /dev/null +++ b/src/ge/ge_local_engine/ops_kernel_store/op/op_factory.h @@ -0,0 +1,99 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_OP_FACTORY_H_ +#define GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_OP_FACTORY_H_ + +#include +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "ge_local_engine/ops_kernel_store/op/op.h" + +namespace ge { +namespace ge_local { +using OP_CREATOR_FUNC = std::function(const Node &, RunContext &)>; + +// manage all the op, support create op. +class OpFactory { + public: + static OpFactory &Instance(); + + /// + /// @brief create Op. + /// @param [in] node share ptr of node + /// @param [in] run_context run context + /// @return not nullptr success + /// @return nullptr fail + /// + std::shared_ptr CreateOp(const Node &node, RunContext &run_context); + + /// + /// @brief Register Op create function. + /// @param [in] type Op type + /// @param [in] func Op create func + /// + void RegisterCreator(const std::string &type, const OP_CREATOR_FUNC &func); + + const std::vector &GetAllOps() const { return all_ops_; } + + OpFactory(const OpFactory &) = delete; + + OpFactory &operator=(const OpFactory &) = delete; + + OpFactory(OpFactory &&) = delete; + + OpFactory &operator=(OpFactory &&) = delete; + + private: + OpFactory() = default; + + ~OpFactory() = default; + + // the op creator function map + std::map op_creator_map_; + std::vector all_ops_; +}; + +class OpRegistrar { + public: + OpRegistrar(const std::string &type, const OP_CREATOR_FUNC &func) { + OpFactory::Instance().RegisterCreator(type, func); + } + + ~OpRegistrar() = default; + + OpRegistrar(const OpRegistrar &) = delete; + + OpRegistrar &operator=(const OpRegistrar &) = delete; + + OpRegistrar(OpRegistrar &&) = delete; + + OpRegistrar &operator=(OpRegistrar &&) = delete; +}; + +#define REGISTER_OP_CREATOR(type, clazz) \ + std::shared_ptr Creator_##type##Op(const Node &node, RunContext &run_context) { \ + return MakeShared(node, run_context); \ + } \ + OpRegistrar g_##type##Op_creator(#type, Creator_##type##Op) +} // namespace ge_local +} // namespace ge + +#endif // GE_GE_LOCAL_ENGINE_OPS_KERNEL_STORE_OP_OP_FACTORY_H_ diff --git a/src/ge/ge_runtime/CMakeLists.txt b/src/ge/ge_runtime/CMakeLists.txt new file mode 100755 index 00000000..9e9892af --- /dev/null +++ b/src/ge/ge_runtime/CMakeLists.txt @@ -0,0 +1,55 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +# libge_runtime.so +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}/src/ge) +include_directories(${GE_SOURCE_DIR}/src) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/framework/common) +include_directories(${GE_SOURCE_DIR}/inc/framework/ge_runtime) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/cce) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/protobuf/src) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) + +######### libge_runtime.so ############# +file(GLOB_RECURSE GE_SRC_LIST RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "model_runner.cc" + "runtime_model.cc" + "op_info_utils.cc" + "output.cc" + "task/*.cc" + ) + +add_library(ge_runtime SHARED ${GE_SRC_LIST}) +target_compile_definitions(ge_runtime PUBLIC + PROTOBUF_INLINE_NOT_IN_HEADERS=0 + Werror) +target_link_libraries(ge_runtime + graph + ${cce} + ${slog} + ${runtime} + ${c_sec} + rt + dl + ) diff --git a/src/ge/ge_runtime/model_context.h b/src/ge/ge_runtime/model_context.h new file mode 100644 index 00000000..4f580b61 --- /dev/null +++ b/src/ge/ge_runtime/model_context.h @@ -0,0 +1,58 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_MODEL_CONTEXT_H_ +#define GE_GE_RUNTIME_MODEL_CONTEXT_H_ + +#include + +#include "runtime/rt_model.h" + +namespace ge { +namespace model_runner { +class ModelContext { + public: + ModelContext(uint32_t device_id, uint64_t session_id, int32_t priority, rtModel_t rt_model_handle, + rtStream_t rt_model_stream, const std::vector &stream_list, + const std::vector &label_list, const std::vector &event_list) + : device_id_(device_id), session_id_(session_id), priority_(priority), rt_model_handle_(rt_model_handle), + rt_model_stream_(rt_model_stream), stream_list_(stream_list), label_list_(label_list), + event_list_(event_list) {} + ~ModelContext() {} + + uint64_t device_id() const { return device_id_; } + uint64_t session_id() const { return session_id_; } + int32_t priority() const { return priority_; } + const rtModel_t &rt_model_handle() const { return rt_model_handle_; } + const rtStream_t &rt_model_stream() const { return rt_model_stream_; } + const std::vector &stream_list() const { return stream_list_; } + const std::vector &label_list() const { return label_list_; } + const std::vector &event_list() const { return event_list_; } + + private: + uint32_t device_id_; + uint64_t session_id_; + int32_t priority_; + rtModel_t rt_model_handle_; + rtStream_t rt_model_stream_; + std::vector stream_list_; + std::vector label_list_; + std::vector event_list_; +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_MODEL_CONTEXT_H_ diff --git a/src/ge/ge_runtime/model_runner.cc b/src/ge/ge_runtime/model_runner.cc new file mode 100644 index 00000000..e36901c8 --- /dev/null +++ b/src/ge/ge_runtime/model_runner.cc @@ -0,0 +1,129 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/model_runner.h" + +#include "./op_info_utils.h" +#include "./runtime_model.h" +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/ge/ge_util.h" +#include "ge_runtime/davinci_model.h" +#include "graph/op_desc.h" + +namespace ge { +namespace model_runner { + +using RuntimeModelPtr = std::shared_ptr; +using DavinciModelPtr = std::shared_ptr; + +ModelRunner &ModelRunner::Instance() { + static ModelRunner instance; // Guaranteed to be destroyed. + return instance; +} + +bool ModelRunner::LoadDavinciModel(uint32_t device_id, + uint64_t session_id, + uint32_t model_id, + std::shared_ptr davinci_model, + std::shared_ptr listener) { + std::shared_ptr model = MakeShared(); + if (model == nullptr) { + return false; + } + bool status = model->Load(device_id, session_id, davinci_model); + if (!status) { + return false; + } + + runtime_models_[model_id] = model; + return true; +} + +const std::vector &ModelRunner::GetTaskIdList(uint32_t model_id) const { + auto model_iter = runtime_models_.find(model_id); + if (model_iter == runtime_models_.end()) { + GELOGE(PARAM_INVALID, "Model id %u not found.", model_id); + static const std::vector empty_ret; + return empty_ret; + } + + return model_iter->second->GetTaskIdList(); +} + +bool ModelRunner::UnloadModel(uint32_t model_id) { + auto iter = runtime_models_.find(model_id); + if (iter != runtime_models_.end()) { + (void)runtime_models_.erase(iter); + return true; + } + + return false; +} + +bool ModelRunner::RunModel(uint32_t model_id, const InputData &input_data, OutputData *output_data) { + if (output_data == nullptr) { + GELOGW("Output data point is null."); + } + + auto model_iter = runtime_models_.find(model_id); + if (model_iter == runtime_models_.end()) { + GELOGE(PARAM_INVALID, "Model id %u not found.", model_id); + return false; + } + + bool status = model_iter->second->CopyInputData(input_data); + if (!status) { + GELOGE(FAILED, "Copy input data fail."); + return false; + } + + status = model_iter->second->Run(); + if (!status) { + GELOGE(FAILED, "Run model fail."); + return false; + } + + return true; +} + +bool ModelRunner::GetInputOutputDescInfo(uint32_t model_id, + bool zero_copy, + std::vector *input_desc, + std::vector *output_desc, + std::vector *input_format, + std::vector *output_format) { + if (runtime_models_.find(model_id) == runtime_models_.end()) { + GELOGE(PARAM_INVALID, "Model id %u not found.", model_id); + return false; + } + + auto model = runtime_models_[model_id]; + if (input_desc == nullptr || output_desc == nullptr) { + GELOGE(PARAM_INVALID, "input_desc or output_desc is null."); + return false; + } + + bool status = model->GetInputOutputDescInfo(zero_copy, input_desc, output_desc, input_format, output_format); + if (!status) { + GELOGE(FAILED, "Get input output desc info fail."); + return false; + } + + return true; +} +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/op_info_utils.cc b/src/ge/ge_runtime/op_info_utils.cc new file mode 100644 index 00000000..2decc0a2 --- /dev/null +++ b/src/ge/ge_runtime/op_info_utils.cc @@ -0,0 +1,632 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/op_info_utils.h" + +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "runtime/rt_model.h" + +namespace ge { +namespace model_runner { +static const uint32_t kDimMaxSize = 8; +static const uint32_t kPoolMaskDescDimSize = 6; +static const uint32_t kPoolMaskDescWinH = 4; +static const uint32_t kPoolMaskDescWinW = 5; + +bool OpInfoUtils::NeedTransFilter(const std::shared_ptr &data_info) { + if (data_info == nullptr) { + GELOGE(PARAM_INVALID, "data info is null."); + return false; + } + + if (data_info->input_tensors.empty()) { + GELOGE(PARAM_INVALID, "data info input tensors is empty."); + return false; + } + + return static_cast(data_info->input_tensors[0].format) == FORMAT_FILTER_HWCK || + static_cast(data_info->input_tensors[0].format) == FORMAT_HWCN; +} + +bool OpInfoUtils::TransFilterData(const std::shared_ptr &data_info, const void *in_data, uint32_t length) { + GELOGI("Start trans filter data."); + if (data_info == nullptr) { + GELOGE(PARAM_INVALID, "data info ptr is null."); + return false; + } + + if (data_info->input_tensors.empty() || data_info->output_tensors.empty()) { + GELOGE(PARAM_INVALID, "data info input tensors size %zu, output tensor size %zu.", data_info->input_tensors.size(), + data_info->output_tensors.size()); + return false; + } + + if (in_data == nullptr) { + GELOGE(PARAM_INVALID, "In data ptr is null."); + return false; + } + + // Transform to KCHW + GELOGI("copy filter data op: %s, need transfer.", data_info->name.c_str()); + data_info->input_tensors[0].format = static_cast(FORMAT_NCHW); + data_info->input_tensors[0].datatype = static_cast(DT_FLOAT); + data_info->input_tensors[0].dims = std::vector( + {data_info->input_tensors[0].GetDim(kHwckDimK), data_info->input_tensors[0].GetDim(kHwckDimC), + data_info->input_tensors[0].GetDim(kHwckDimH), data_info->input_tensors[0].GetDim(kHwckDimW)}); + + void *out_data = nullptr; + auto total_size = static_cast(data_info->input_tensors[0].GetShapeSize() * sizeof(float)); + if (total_size != length) { + GELOGE(FAILED, "Input filter data length(%u) not correct,need:%u!", length, total_size); + return false; + } + TransDataHWCK2KCHW(in_data, data_info->input_tensors[0].GetDim(kHwckDimH), + data_info->input_tensors[0].GetDim(kHwckDimW), data_info->input_tensors[0].GetDim(kHwckDimC), + data_info->input_tensors[0].GetDim(kHwckDimK), &out_data); + + // Transform to FracZ + // using namespace cce; + cce::ccFilterDescriptor_t input_desc = nullptr; + GE_MAKE_GUARD(input_desc, [&] { + if (input_desc) GE_CHK_CCE(cce::ccDestroyFilterDescriptor(&input_desc)); + }); + cce::ccFilterDescriptor_t output_desc = nullptr; + GE_MAKE_GUARD_FILTER_DESC(output_desc); + bool ret = InitFilterTensorDescriptor(data_info->input_tensors[0].dims, data_info->input_tensors[0].format, + data_info->input_tensors[0].datatype, input_desc); + if (!ret) { + delete[] reinterpret_cast(out_data); + out_data = nullptr; + DestroyFilterDescriptor(input_desc); + GELOGE(INTERNAL_ERROR, "InitTensorDescriptor input_desc failed."); + return false; + } + + ret = InitFilterTensorDescriptor(data_info->output_tensors[0].dims, data_info->input_tensors[0].format, + data_info->input_tensors[0].datatype, output_desc); + if (!ret) { + delete[] reinterpret_cast(out_data); + out_data = nullptr; + DestroyFilterDescriptor(output_desc); + DestroyFilterDescriptor(input_desc); + GELOGE(INTERNAL_ERROR, "InitTensorDescriptor output_desc failed."); + return false; + } + + void *fp16_data_addr = nullptr; + uint32_t output_size = data_info->output_tensors[0].size; + + rtError_t rt_ret = rtMallocHost(&fp16_data_addr, output_size); + if (rt_ret != RT_ERROR_NONE) { + delete[] reinterpret_cast(out_data); + out_data = nullptr; + DestroyFilterDescriptor(output_desc); + DestroyFilterDescriptor(input_desc); + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + GE_MAKE_GUARD_RTMEM(fp16_data_addr); + + cce::ccStatus_t cc_ret = cce::ccTransFilter(input_desc, out_data, output_desc, fp16_data_addr, output_size); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + delete[] reinterpret_cast(out_data); + out_data = nullptr; + DestroyFilterDescriptor(output_desc); + DestroyFilterDescriptor(input_desc); + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return false; + } + + delete[] reinterpret_cast(out_data); + out_data = nullptr; + + // Copy input data to data node + const std::vector &outputs = data_info->output_addrs; + if (outputs.empty()) { + GELOGE(PARAM_INVALID, "data_info %s output_addrs is empty.", data_info->name.c_str()); + return false; + } + + rt_ret = rtMemcpy(reinterpret_cast(outputs[0]), output_size, fp16_data_addr, output_size, + RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + GELOGI("Filter data op transdata end."); + return true; +} + +bool OpInfoUtils::InitFilterTensorDescriptor(const std::vector &dims, uint32_t format, uint32_t dtype, + cce::ccFilterDescriptor_t &cc_tensor) { + if (dims.empty()) { + GELOGE(FAILED, "Invalid dim size"); + return false; + } + cce::ccTensorFormat_t cc_format = cce::tagCcTensorFormat(format); + cce::ccDataType_t data_type = cce::tagCcDataType(dtype); + if (cc_format != cce::CC_TENSOR_NCHW && cc_format != cce::CC_TENSOR_FRACTAL_Z && cc_format != cce::CC_TENSOR_HWCN) { + GELOGE(PARAM_INVALID, "Filter tensor cc_format:%u not correct.", format); + return false; + } + if (dims.size() <= static_cast(kNchwDimW)) { + GELOGE(PARAM_INVALID, "Array index is invalid!"); + return false; + } + + // Create tensor descriptor + cce::ccStatus_t cc_ret = cce::ccCreateFilterDescriptor(&cc_tensor); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return false; + } + + if (cc_format == cce::CC_TENSOR_FRACTAL_Z) { + cc_ret = cce::ccSetFilterFractalDescriptor( + cc_tensor, cc_format, data_type, static_cast(dims[kNchwDimN]), static_cast(dims[kNchwDimC]), + static_cast(dims[kNchwDimH]), static_cast(dims[kNchwDimW])); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return false; + } + } else if (cc_format == cce::CC_TENSOR_HWCN) { + cc_ret = cce::ccSetFilterFractalDescriptor( + cc_tensor, cc_format, data_type, static_cast(dims[kNchwDimW]), static_cast(dims[kNchwDimH]), + static_cast(dims[kNchwDimN]), static_cast(dims[kNchwDimC])); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return false; + } + } else { + cc_ret = cce::ccSetFilterFractalDescriptor( + cc_tensor, cc_format, data_type, static_cast(dims[kNchwDimN]), static_cast(dims[kNchwDimC]), + static_cast(dims[kNchwDimH]), static_cast(dims[kNchwDimW])); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return false; + } + } + return true; +} + +void OpInfoUtils::DestroyFilterDescriptor(cce::ccFilterDescriptor_t &cc_filter) { + if (cc_filter != nullptr) { + cce::ccStatus_t cc_ret = ccDestroyFilterDescriptor(&cc_filter); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "ccDestroyFilterDescriptor failed. ret = %d", static_cast(cc_ret)); + } + + cc_filter = nullptr; + } +} + +void OpInfoUtils::DestroyTensorDescriptor(cce::ccTensorDescriptor_t &cc_tensor) { + if (cc_tensor != nullptr) { + cce::ccStatus_t cc_ret = cce::ccDestroyTensorDescriptor(&cc_tensor); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return; + } + cc_tensor = nullptr; + } +} + +bool OpInfoUtils::IsInputTensorNeedTrans(const std::shared_ptr &data_info) { + if (data_info == nullptr) { + GELOGE(PARAM_INVALID, "data info is null."); + return false; + } + + if (data_info->input_tensors.empty() || data_info->output_tensors.empty()) { + GELOGE(PARAM_INVALID, "data info input tensors size %zu, output tensor size %zu.", data_info->input_tensors.size(), + data_info->output_tensors.size()); + return false; + } + + if (static_cast(data_info->output_tensors[0].format) == FORMAT_NC1HWC0 && + static_cast(data_info->output_tensors[0].datatype) == DT_INT8) { + // AIPP input,Consider compatibility and judge according to this condition. + // Add attribute in data node to mark whether it is AIPP + return false; + } + + return data_info->input_tensors[0].format != data_info->output_tensors[0].format || + data_info->input_tensors[0].datatype != data_info->output_tensors[0].datatype; +} + +void OpInfoUtils::TransDataHWCK2KCHW(const void *input, int64_t H, int64_t W, int64_t C, int64_t K, void **output) { + if (input == nullptr) { + return; + } + if (output == nullptr) { + return; + } + const char *w_data = reinterpret_cast(input); + + int64_t count = H * W * C * K; + if (count <= 0) { + GELOGE(PARAM_INVALID, "Count value must be greater than 0, but count = %ld", count); + return; + } + + float *buf = new (std::nothrow) float[count](); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "Buf must not be null."); + return; + } + + const float *src_buff = nullptr; + float *dst_buff = nullptr; + for (int64_t h = 0; h < H; ++h) { + for (int64_t w = 0; w < W; ++w) { + for (int64_t c = 0; c < C; ++c) { + for (int64_t k = 0; k < K; ++k) { + src_buff = reinterpret_cast(w_data) + ((h * W * C * K) + (w * C * K) + (c * K) + (k)); + dst_buff = buf + ((k * C * H * W) + (c * H * W) + (h * W) + (w)); + *dst_buff = *src_buff; + } + } + } + } + *output = buf; +} + +bool OpInfoUtils::IsComputDimsSize(const uint32_t format, const uint32_t real_dim_cnt) { + return ((format == static_cast(cce::CC_TENSOR_ND)) || + ((format != static_cast(cce::CC_TENSOR_NC1KHKWHWC0)) && + (format != static_cast(cce::CC_TENSOR_C1HWNCoC0)) && + (real_dim_cnt > static_cast(DIM_DEFAULT_SIZE)))); +} + +static const auto set_real_dim_cnt = [](uint32_t real_dim_cnt, const std::vector &dim) { + return static_cast(((real_dim_cnt == 0) && (dim.size() > DIM_DEFAULT_SIZE)) ? dim.size() + : real_dim_cnt); +}; + +bool OpInfoUtils::InitTensorDescriptor(uint32_t format, uint32_t data_type, const std::vector &dim, + cce::ccTensorDescriptor_t &cc_tensor, uint32_t real_dim_cnt) { + cce::ccDataType_t data_type_ = cce::tagCcDataType(data_type); + + real_dim_cnt = set_real_dim_cnt(real_dim_cnt, dim); + + if (IsComputDimsSize(format, real_dim_cnt)) { + // (Format is ND) or (Dimension is greater than 4 and format is not NC1KHKWHWC0 or C1HWNCoC0) + return InitTensorNdDescriptor(data_type, dim, cc_tensor, real_dim_cnt); + } else if (format == static_cast(cce::CC_TENSOR_NC1KHKWHWC0)) { + return InitTensorPoolingMaskDescriptor(format, data_type, dim, cc_tensor, real_dim_cnt); + } else if (format == static_cast(cce::CC_TENSOR_C1HWNCoC0)) { + return InitTensor6dDescriptor(format, data_type, dim, cc_tensor, real_dim_cnt); + } + std::vector dim_vector; + TransferDim(dim, dim_vector); + + if (!CheckParam(format, data_type, dim_vector)) { + GELOGE(PARAM_INVALID, "Check param fail."); + return false; + } + + // Create tensor descriptor + cce::ccStatus_t cc_ret = cce::ccCreateTensorDescriptor(&cc_tensor); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return false; + } + + // The last two outputs of fusedbatchnormgrad are 0. The special processing of fusedbatchnormgrad + if (dim.size() == 1 && dim[0] == 0) { + (void)cce::ccSetTensorRealDimCnt(cc_tensor, real_dim_cnt); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + return false; + } + + if (format >= static_cast(cce::CC_TENSOR_HASHTABLE_LOOKUP_LOOKUPS) && + format <= static_cast(cce::CC_TENSOR_HASHTABLE_LOOKUP_HITS)) { + int32_t dims[dim.size()]; + for (size_t i = 0; i < dim.size(); ++i) { + dims[i] = static_cast(dim[i]); + } + + cc_ret = cce::ccSetTensorNdDescriptor(cc_tensor, data_type_, dim.size(), dims); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: %d", static_cast(cc_ret)); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + return false; + } + + cce::ccTensorFormat_t tensor_format = cce::tagCcTensorFormat(format); + cc_ret = cce::ccSetTensorFormat(cc_tensor, tensor_format); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: %d", static_cast(cc_ret)); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + return false; + } + + cc_ret = cce::ccSetTensorRealDimCnt(cc_tensor, real_dim_cnt); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + return false; + } + + return true; + } else if (format == static_cast(cce::CC_TENSOR_NHWC)) { + return InitTensor4dDescriptor(format, data_type, cc_tensor, static_cast(dim_vector.at(0)), + static_cast(dim_vector.at(3)), static_cast(dim_vector.at(1)), + static_cast(dim_vector.at(2)), real_dim_cnt); + } else if (format == static_cast(cce::CC_TENSOR_HWCN)) { + return InitTensor4dDescriptor(format, data_type, cc_tensor, static_cast(dim_vector.at(3)), + static_cast(dim_vector.at(2)), static_cast(dim_vector.at(0)), + static_cast(dim_vector.at(1)), real_dim_cnt); + } + + // else default + return InitTensor4dDescriptor(format, data_type, cc_tensor, static_cast(dim_vector.at(0)), + static_cast(dim_vector.at(1)), static_cast(dim_vector.at(2)), + static_cast(dim_vector.at(3)), real_dim_cnt); +} + +void OpInfoUtils::TransferDim(const std::vector &dim, std::vector &dim_vector) { + uint32_t input_shape_size = static_cast(dim.size()); + std::list new_dim_list; + + for (auto dim_temp : dim) { + new_dim_list.push_back(dim_temp); + } + if (input_shape_size > static_cast(DIM_DEFAULT_SIZE)) { + dim_vector = dim; + GELOGI("The size of dim_vector is %u, do not to transfer dim", input_shape_size); + return; + } + switch (input_shape_size) { + case 0: { + new_dim_list.push_back(1); + new_dim_list.push_back(1); + new_dim_list.push_back(1); + new_dim_list.push_back(1); + break; + } + case 1: { + new_dim_list.push_front(1); + new_dim_list.push_back(1); + new_dim_list.push_back(1); + break; + } + case 2: { + new_dim_list.push_front(1); + new_dim_list.push_back(1); + break; + } + case 3: { + new_dim_list.push_front(1); + break; + } + default: {} + } + + dim_vector.clear(); + for (auto new_dim : new_dim_list) { + dim_vector.push_back(new_dim); + } +} + +bool OpInfoUtils::InitTensorNdDescriptor(uint32_t data_type, const std::vector &dim, + cce::ccTensorDescriptor_t &cc_tensor, uint32_t real_dim_cnt) { + cce::ccDataType_t data_type_ = cce::tagCcDataType(data_type); + cce::ccStatus_t cc_ret = cce::ccCreateTensorDescriptor(&cc_tensor); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return false; + } + + int32_t *real_dim = nullptr; + if (real_dim_cnt > 0) { + real_dim = new (std::nothrow) int32_t[real_dim_cnt]; + if (real_dim == nullptr) { + GELOGE(FAILED, "Failed to malloc memory"); + return false; + } + } + + for (size_t i = 0; i < dim.size(); ++i) { + if (i >= real_dim_cnt || i >= kDimMaxSize) { + break; + } + real_dim[i] = static_cast(dim[i]); + } + + cc_ret = cce::ccSetTensorNdDescriptor(cc_tensor, data_type_, real_dim_cnt, real_dim); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + delete[] real_dim; + return false; + } + + delete[] real_dim; + return true; +} + +bool OpInfoUtils::InitTensorPoolingMaskDescriptor(uint32_t format, uint32_t data_type, const std::vector &dim, + cce::ccTensorDescriptor_t &cc_tensor, uint32_t) { + cce::ccStatus_t cc_ret = cce::ccCreatePoolingMaskDescriptor(&cc_tensor); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return false; + } + cce::ccTensorFormat_t format_ = cce::tagCcTensorFormat(format); + cce::ccDataType_t data_type_ = cce::tagCcDataType(data_type); + + if (dim.size() != kPoolMaskDescDimSize) { + GELOGE(PARAM_INVALID, "The dim size of format CC_TENSOR_NC1KHKWHWC0 must be 6,dim size id %zu.", dim.size()); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + return false; + } + + cc_ret = cce::ccSetPoolingMaskTensorDescriptor( + cc_tensor, format_, data_type_, static_cast(dim[kNchwDimN]), static_cast(dim[kNchwDimC]), + static_cast(dim[kNchwDimH]), static_cast(dim[kNchwDimW]), + static_cast(dim[kPoolMaskDescWinH]), static_cast(dim[kPoolMaskDescWinW])); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + return false; + } + + return true; +} + +bool OpInfoUtils::InitTensor6dDescriptor(uint32_t format, uint32_t data_type, const std::vector &dim, + cce::ccTensorDescriptor_t &cc_tensor, uint32_t) { + cce::ccDataType_t data_type_ = cce::tagCcDataType(data_type); + cce::ccStatus_t cc_ret = cce::ccCreateTensorDescriptor(&cc_tensor); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return false; + } + + cce::ccTensorFormat_t format_ = cce::tagCcTensorFormat(format); + if (dim.size() != static_cast(DIM_C1HWNCoC0_SIZE)) { + GELOGE(PARAM_INVALID, "The dim size of format C1HWNCoC0_DIM_SIZE must be 5,dim size id %zu.", dim.size()); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + return false; + } + + cc_ret = cce::ccSetFilter6dDescriptor( + cc_tensor, format_, data_type_, static_cast(dim[C1HWNCoC0_DIM_C1]), + static_cast(dim[C1HWNCoC0_DIM_H]), static_cast(dim[C1HWNCoC0_DIM_W]), + static_cast(dim[C1HWNCoC0_DIM_N]), static_cast(dim[C1HWNCoC0_DIM_Co]), + static_cast(dim[C1HWNCoC0_DIM_C0])); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + return false; + } + + return true; +} + +bool OpInfoUtils::InitTensor4dDescriptor(uint32_t format, uint32_t data_type, cce::ccTensorDescriptor_t &cc_tensor, + int32_t n, int32_t c, int32_t h, int32_t w, uint32_t real_dim_cnt) { + cce::ccDataType_t data_type_ = cce::tagCcDataType(data_type); + cce::ccTensorFormat_t format_ = cce::tagCcTensorFormat(format); + auto cc_ret = cce::ccSetTensor4dDescriptor(cc_tensor, format_, data_type_, n, c, h, w); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + return false; + } + + cc_ret = cce::ccSetTensorRealDimCnt(cc_tensor, real_dim_cnt); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + (void)cce::ccDestroyTensorDescriptor(&cc_tensor); + cc_tensor = nullptr; + return false; + } + + return true; +} + +bool OpInfoUtils::CheckParam(uint32_t format, uint32_t data_type, const std::vector &dim_vector) { + // format + if (format >= static_cast(cce::CC_TENSOR_RESERVED)) { + GELOGE(PARAM_INVALID, "Not supported format, format = %u", format); + return false; + } + + // data type + if (data_type >= static_cast(cce::CC_DATA_RESERVED)) { + GELOGE(PARAM_INVALID, "Not supported data type, type = %u", data_type); + return false; + } + + // input shape + auto input_shape_size = dim_vector.size(); + if (input_shape_size != static_cast(DIM_DEFAULT_SIZE)) { + GELOGW("input_shape_size is %u", input_shape_size); + } + + return true; +} + +bool OpInfoUtils::GetOutputSize(const std::shared_ptr &op_info, std::vector &output_size_list, + std::vector &output_memory_size_list) { + if (op_info == nullptr) { + GELOGE(PARAM_INVALID, "op info is null."); + return false; + } + + for (size_t i = 0; i < op_info->output_tensors.size(); ++i) { + auto output_desc = op_info->output_tensors[i]; + bool output_tensor = op_info->output_tensors[i].is_output; + + if (output_tensor) { + // Recalculate the size directly using desc of net output op. + cce::ccTensorDescriptor_t cctensor = nullptr; + bool status = InitTensorDescriptor(output_desc.format, output_desc.datatype, output_desc.dims, cctensor, + output_desc.real_dim_cnt); + if (!status) { + GELOGE(FAILED, "InitTensorDescriptor fail."); + return false; + } + // Call the API of CCE to obtain the converted size and other parameters. + uint32_t size = 0; + uint32_t memory_size = 0; + auto cc_ret0 = cce::ccGetTensorSizeInBytes(cctensor, &size); + auto cc_ret1 = cce::ccGetTensorMemorySizeInBytes(cctensor, &memory_size); + DestroyTensorDescriptor(cctensor); + if (cc_ret0 != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "ccGetTensorSizeInBytes fail, ret = 0x%X.", cc_ret0); + return false; + } + if (cc_ret1 != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "ccGetTensorMemorySizeInBytes fail, ret = 0x%X.", cc_ret1); + return false; + } + + output_size_list.push_back(size); + output_memory_size_list.push_back(memory_size); + } + } + + if (output_size_list.size() != output_memory_size_list.size()) { + GELOGE(INTERNAL_ERROR, "Output size list length %zu not equal output memory size list length %zu.", + output_size_list.size(), output_memory_size_list.size()); + return false; + } + + return true; +} + +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/op_info_utils.h b/src/ge/ge_runtime/op_info_utils.h new file mode 100644 index 00000000..8938f76d --- /dev/null +++ b/src/ge/ge_runtime/op_info_utils.h @@ -0,0 +1,81 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_OP_INFO_UTILS_H_ +#define GE_GE_RUNTIME_OP_INFO_UTILS_H_ + +#include +#include +#include + +#include "cce/dnn.h" +#include "ge_runtime/op_info.h" +#include "graph/op_desc.h" +#include "common/ge_types.h" +#include "runtime/rt_model.h" + +namespace ge { +namespace model_runner { + +const uint32_t kNchwDimN = 0; +const uint32_t kNchwDimC = 1; +const uint32_t kNchwDimH = 2; +const uint32_t kNchwDimW = 3; + +const uint32_t kNhwcDimN = 0; +const uint32_t kNhwcDimH = 1; +const uint32_t kNhwcDimW = 2; +const uint32_t kNhwcDimC = 3; + +const uint32_t kHwckDimH = 0; +const uint32_t kHwckDimW = 1; +const uint32_t kHwckDimC = 2; +const uint32_t kHwckDimK = 3; + +const string kNetOutPut = "NetOutput"; + +class OpInfoUtils { + public: + static bool InitTensorDescriptor(uint32_t format, uint32_t data_type, const std::vector &dim, + cce::ccTensorDescriptor_t &cc_tensor, uint32_t real_dim_cnt = 0); + static void DestroyTensorDescriptor(cce::ccTensorDescriptor_t &cc_tensor); + static bool NeedTransFilter(const std::shared_ptr &data_info); + static bool TransFilterData(const std::shared_ptr &data_info, const void *in_data, uint32_t length); + static bool IsInputTensorNeedTrans(const std::shared_ptr &data_info); + static bool GetOutputSize(const std::shared_ptr &op_info, std::vector &output_size_list, + std::vector &output_memory_size_list); + + private: + static bool InitFilterTensorDescriptor(const std::vector &dims, uint32_t format, uint32_t dtype, + cce::ccFilterDescriptor_t &cc_tensor); + static void TransDataHWCK2KCHW(const void *input, int64_t H, int64_t W, int64_t C, int64_t K, void **output); + static void DestroyFilterDescriptor(cce::ccFilterDescriptor_t &cc_filter); + static bool IsComputDimsSize(const uint32_t format, const uint32_t real_dim_cnt); + static void TransferDim(const std::vector &dim, std::vector &dim_vector); + static bool InitTensorNdDescriptor(uint32_t data_type, const std::vector &dim, + cce::ccTensorDescriptor_t &cc_tensor, uint32_t real_dim_cnt); + static bool InitTensorPoolingMaskDescriptor(uint32_t format, uint32_t data_type, const std::vector &dim, + cce::ccTensorDescriptor_t &cc_tensor, uint32_t real_dim_cnt); + static bool InitTensor6dDescriptor(uint32_t format, uint32_t data_type, const std::vector &dim, + cce::ccTensorDescriptor_t &cc_tensor, uint32_t real_dim_cnt); + static bool InitTensor4dDescriptor(uint32_t format, uint32_t data_type, cce::ccTensorDescriptor_t &cc_tensor, + int32_t n, int32_t c, int32_t h, int32_t w, uint32_t real_dim_cnt); + static bool CheckParam(uint32_t format, uint32_t data_type, const std::vector &dim_vector); +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_OP_INFO_UTILS_H_ diff --git a/src/ge/ge_runtime/output.cc b/src/ge/ge_runtime/output.cc new file mode 100644 index 00000000..a3922dd9 --- /dev/null +++ b/src/ge/ge_runtime/output.cc @@ -0,0 +1,148 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/output.h" + +#include "./op_info_utils.h" +#include "cce/dnn_base.h" +#include "cce/dnn_base_def.hpp" +#include "common/ge_inner_error_codes.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" + +using cce::ccTensorDescriptor_t; +using cce::ccDestroyTensorDescriptor; + +namespace ge { +namespace model_runner { +Output::Output(const OpInfoPtr &op_info, const std::shared_ptr &model) + : model_(model), op_info_(op_info), input_num_(0) {} + +Output::~Output() {} + +bool Output::Init() { + if (op_info_ == nullptr || model_ == nullptr) { + GELOGE(INTERNAL_ERROR, "The op_desc_ or model_ is nullptr."); + return false; + } + + input_num_ = op_info_->input_tensors.size(); + v_input_size_.clear(); + v_input_data_addr_.clear(); + + auto input_vector = op_info_->input_addrs; + if (input_num_ != input_vector.size()) { + GELOGE(INTERNAL_ERROR, "The input desc size: %zu != input addr size: %zu.", input_num_, input_vector.size()); + return false; + } + + for (size_t i = 0; i < input_num_; i++) { + uint32_t tensorSize = 0; + const auto &input_info = op_info_->input_tensors.at(i); + tensorSize = input_info.size; + v_input_size_.push_back(tensorSize); + v_input_data_addr_.push_back(reinterpret_cast(input_vector.at(i))); + } + + GELOGI("Init output:%zu, %zu, %zu", input_num_, v_input_size_.size(), v_input_data_addr_.size()); + + return true; +} + +/// +/// @ingroup domi_ome +/// @brief Copy Op Output to user space. +/// @brief when model running, Add one DataOp as input node, Add one Output Op as output node. +/// @return Status +/// +bool Output::CopyRslt(OutputData *rslt, uint32_t data_begin, uint32_t &data_index, bool support_mem_share) { + if (rslt == nullptr) { + GELOGE(FAILED, "OutputData is null."); + return false; + } + uint32_t data_count = 0; + if (v_input_size_.empty() || v_input_data_addr_.empty()) { + GELOGE(INTERNAL_ERROR, "v_output_size_ or v_output_data_addr_ is empty!"); + return false; + } + + for (size_t i = 0; i < input_num_; i++) { + DataBuffer data_buf = rslt->blobs[data_begin + data_count]; + bool ret = SetDataBuf(data_buf, data_count, i, support_mem_share); + if (!ret) { + GELOGE(FAILED, "Copy data to host error. index: %lu", i); + return ret; + } + data_index = data_begin + data_count; + } + + return true; +} + +bool Output::SetDataBuf(DataBuffer &data_buf, uint32_t &data_count, size_t i, bool support_mem_share) { + if (op_info_ == nullptr) { + GELOGE(FAILED, "op_info_ is null"); + return false; + } + if (data_buf.length == 0) { + ++data_count; + GELOGD("data_buf.length = 0,do not need copy, output op : %s, output tensor index : %zu!", + op_info_->name.c_str(), i); + return true; + } + + ccTensorDescriptor_t cc_tensor_desc = nullptr; + GE_MAKE_GUARD_TENSOR(cc_tensor_desc); + + if (i >= op_info_->input_tensors.size()) { + GELOGE(FAILED, "tensor_info is null"); + return false; + } + + auto tensor_info = op_info_->input_tensors.at(i); + + if (data_buf.isDataSupportMemShare && support_mem_share) { + GELOGI("No need to copy input data, user's output data buffer can be shared."); + } else { + // copy result to Databuf + uint32_t size = v_input_size_[i]; + GELOGI("Tensor data size before: %u", size); + if (!OpInfoUtils::InitTensorDescriptor(tensor_info.format, tensor_info.datatype, tensor_info.dims, + cc_tensor_desc)) { + GELOGE(FAILED, "OpUtils::InitTensorDescriptor tensorDesc failed."); + return false; + } + if (ccGetTensorSizeInBytes(cc_tensor_desc, &size) != CC_STATUS_SUCCESS) { + return false; + } + rtError_t rt_ret = rtMemcpy(data_buf.data, size, v_input_data_addr_[i], size, RT_MEMCPY_DEVICE_TO_HOST); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtmemcpy error"); + return false; + } + GELOGI("Tensor data size: %u data_buflength: %u", size, data_buf.length); + OpInfoUtils::DestroyTensorDescriptor(cc_tensor_desc); + } + + ++data_count; + GELOGD("Successfully copy the output tensor memory to buffer, output op : %s, output tensor index : %lu!", + op_info_->name.c_str(), i); + + return false; +} + +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/output.h b/src/ge/ge_runtime/output.h new file mode 100644 index 00000000..24f86923 --- /dev/null +++ b/src/ge/ge_runtime/output.h @@ -0,0 +1,54 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_OUTPUT_H_ +#define GE_GE_RUNTIME_OUTPUT_H_ + +#include +#include + +#include "ge_runtime/davinci_model.h" +#include "common/ge_types.h" + +namespace ge { +namespace model_runner { + +class Output { + public: + Output(const OpInfoPtr &op_info, const std::shared_ptr &model); + virtual ~Output(); + bool Init(); + + bool CopyRslt(OutputData *rslt, uint32_t data_begin, uint32_t &data_index, bool support_mem_share); + + bool SetDataBuf(DataBuffer &data_buf, uint32_t &data_count, size_t i, bool support_mem_share); + + // Copy assignment operator and copy constructor are deleted + Output &operator=(const Output &output) = delete; + Output(const Output &output) = delete; + + protected: + std::shared_ptr model_; + OpInfoPtr op_info_; + + // Input descriptions + size_t input_num_; + vector v_input_data_addr_; // Init as:buf_base + op_def_->input(i)); + vector v_input_size_; +}; +} // namespace model_runner +} // namespace ge +#endif // GE_GE_RUNTIME_OUTPUT_H_ diff --git a/src/ge/ge_runtime/runtime_model.cc b/src/ge/ge_runtime/runtime_model.cc new file mode 100644 index 00000000..14b478f3 --- /dev/null +++ b/src/ge/ge_runtime/runtime_model.cc @@ -0,0 +1,835 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/runtime_model.h" + +#include + +#include "./model_context.h" +#include "./task/task.h" +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/op/op_parser_util.h" +#include "graph/types.h" +#include "ge_runtime/op_info_utils.h" +#include "task/task_factory.h" + +namespace ge { +namespace model_runner { +RuntimeModel::~RuntimeModel() { + GELOGI("RuntimeModel destructor start"); + + // Release task first, hccl task hold stream + task_list_.clear(); + + // Unbind rtModel from all task related streams + RtModelUnbindStream(); + + // Release all task related streams + RtStreamDestroy(); + + // Release rtlabel resource + RtLabelDestroy(); + + // Release rtEvent resourece + RtEventDestroy(); + + GELOGI("Do RtModelDestroy"); + // Release all rt_model + RtModelDestroy(); +} + +bool RuntimeModel::InitStream(std::shared_ptr &davinci_model) { + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "Davinci model is null."); + return false; + } + + std::set wait_active_streams; + std::set force_copy_streams; + + for (const auto &stream_id : davinci_model->GetWaitActiveStreams()) { + GELOGI("stream id %u is wait active stream.", stream_id); + (void)wait_active_streams.insert(stream_id); + } + + for (const auto &stream_id : davinci_model->GetForceCopyStreams()) { + GELOGI("stream id %u is force copy stream.", stream_id); + (void)force_copy_streams.insert(stream_id); + } + + GELOGI("stream number:%u", davinci_model->GetStreamNum()); + for (uint32_t i = 0; i < davinci_model->GetStreamNum(); ++i) { + rtStream_t stream = nullptr; + uint32_t flag = (force_copy_streams.find(i) != force_copy_streams.end()) + ? (RT_STREAM_PERSISTENT | RT_STREAM_FORCE_COPY) + : (RT_STREAM_PERSISTENT); + + rtError_t rt_ret = rtStreamCreateWithFlags(&stream, davinci_model->GetPriority(), flag); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api rtStreamCreate failed, ret: 0x%X", rt_ret); + return false; + } + + GELOGI("rtStreamCreateWithFlags end."); + + stream_list_.emplace_back(stream); + + // Bind rt_model_handle_ to all task related streams + flag = (wait_active_streams.find(i) != wait_active_streams.end()) ? (static_cast(RT_INVALID_FLAG)) + : (static_cast(RT_HEAD_STREAM)); + rt_ret = rtModelBindStream(rt_model_handle_, stream, flag); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api rtModelBindStream failed, ret: 0x%X", rt_ret); + return false; + } + } + + return true; +} + +bool RuntimeModel::InitEvent(uint32_t event_num) { + GELOGI("event number:%u.", event_num); + for (uint32_t i = 0; i < event_num; ++i) { + rtEvent_t rt_event; + rtError_t rt_ret = rtEventCreate(&rt_event); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api rtEventCreate failed, i; %u; ret: 0x%X", i, rt_ret); + return false; + } + event_list_.push_back(rt_event); + } + return true; +} + +bool RuntimeModel::InitLabel(uint32_t batch_num) { + GELOGI("batch number:%u.", batch_num); + for (uint32_t i = 0; (batch_num != 0 && i <= batch_num); ++i) { + rtLabel_t rt_lLabel = nullptr; + rtError_t rt_ret = rtLabelCreate(&rt_lLabel); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api rtLabelCreate failed, i; %u; ret: 0x%X", i, rt_ret); + return false; + } + + if (rt_lLabel == nullptr) { + GELOGE(RT_FAILED, "rtLabel is nullptr!"); + return false; + } + + label_list_.emplace_back(rt_lLabel); + } + return true; +} + +bool RuntimeModel::InitResource(std::shared_ptr &davinci_model) { + GELOGI("InitResource start"); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci model is null"); + return false; + } + rtError_t rt_ret = rtModelCreate(&rt_model_handle_, 0); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api rtModelCreate failed, ret: 0x%X", rt_ret); + return false; + } + + // Create rtStream for rt_model_handle_ + rt_ret = rtStreamCreate(&rt_model_stream_, davinci_model->GetPriority()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api rtStreamCreate failed, ret: 0x%X", rt_ret); + return false; + } + GELOGI("rtStreamCreate end"); + + if (!InitStream(davinci_model)) { + return false; + } + + if (!InitEvent(davinci_model->GetEventNum())) { + return false; + } + + if (!InitLabel(davinci_model->GetBatchNum())) { + return false; + } + + GELOGI("InitResource succ"); + return true; +} + +void RuntimeModel::GenerateTask(uint32_t device_id, uint64_t session_id, std::shared_ptr &davinci_model) { + GELOGI("GenerateTask start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci model is null"); + return; + } + auto task_infos = davinci_model->GetTaskInfoList(); + ModelContext model_context(device_id, session_id, davinci_model->GetPriority(), rt_model_handle_, rt_model_stream_, + stream_list_, label_list_, event_list_); + for (auto &task_info : task_infos) { + auto task = TaskFactory::GetInstance().Create(model_context, task_info); + task_list_.push_back(task); + } + GELOGI("GenerateTask succ."); +} + +bool RuntimeModel::LoadTask() { + GELOGI("LoadTask start."); + for (auto &task : task_list_) { + if (task == nullptr) { + GELOGE(PARAM_INVALID, "task is null."); + continue; + } + bool ret = task->Distribute(); + if (!ret) { + GELOGE(FAILED, "task distribute fail."); + return false; + } + + uint32_t task_id = 0; + rtError_t rt_ret = rtModelGetTaskId(rt_model_handle_, &task_id); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X.", rt_ret); + return false; + } + task_id_list_.push_back(task_id); + } + GELOGI("Distribute task succ."); + + auto rt_ret = rtModelLoadComplete(rt_model_handle_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api rtModelLoadComplete failed, ret: 0x%X.", rt_ret); + return false; + } + + GELOGI("LoadTask succ."); + return true; +} + +bool RuntimeModel::Load(uint32_t device_id, uint64_t session_id, std::shared_ptr &davinci_model) { + bool status = InitResource(davinci_model); + if (!status) { + GELOGE(FAILED, "InitResource failed."); + return status; + } + + status = InitDataInfo(davinci_model); + if (!status) { + GELOGE(FAILED, "InitDataInfo failed."); + return status; + } + + status = InitOutputInfo(davinci_model); + if (!status) { + GELOGE(FAILED, "InitOutputInfo failed."); + return status; + } + + status = InitConstantInfo(davinci_model); + if (!status) { + GELOGE(FAILED, "InitConstantInfo failed."); + return status; + } + + GenerateTask(device_id, session_id, davinci_model); + + status = LoadTask(); + if (!status) { + GELOGE(FAILED, "DistributeTask failed"); + return status; + } + + return status; +} + +bool RuntimeModel::Run() { + GELOGI("Davinci task run start"); + rtError_t ret = rtModelExecute(rt_model_handle_, rt_model_stream_, 0); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Model execute failed, ret = 0x%X", ret); + return false; + } + + GELOGI("Run rtModelExecute success"); + + ret = rtStreamSynchronize(rt_model_stream_); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Model stream sync failed, ret = 0x%X", ret); + return false; + } + + GELOGI("Davinci task run succ."); + return true; +} + +void RuntimeModel::RtModelUnbindStream() noexcept { + for (size_t i = 0; i < stream_list_.size(); i++) { + if (rtModelUnbindStream(rt_model_handle_, stream_list_[i]) != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Unbind stream from model failed! Index: %zu", i); + return; + } + } +} + +void RuntimeModel::RtStreamDestroy() noexcept { + if (rtStreamDestroy(rt_model_stream_) != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Destroy stream for rt_model failed!"); + return; + } + + for (size_t i = 0; i < stream_list_.size(); i++) { + if (rtStreamDestroy(stream_list_[i]) != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Destroy stream failed! Index: %zu", i); + return; + } + } +} + +void RuntimeModel::RtLabelDestroy() noexcept { + for (size_t i = 0; i < label_list_.size(); i++) { + if (rtLabelDestroy(label_list_[i]) != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Destroy label failed! Index: %zu.", i); + return; + } + } +} + +void RuntimeModel::RtModelDestroy() noexcept { + rtError_t ret = rtModelDestroy(rt_model_handle_); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", ret); + return; + } +} + +void RuntimeModel::RtEventDestroy() noexcept { + for (size_t i = 0; i < event_list_.size(); i++) { + if (rtEventDestroy(event_list_[i]) != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Destroy event failed! Index: %zu", i); + return; + } + } +} + +bool RuntimeModel::InitDataInfo(std::shared_ptr &davinci_model) { + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci model is null"); + return false; + } + data_info_list_ = davinci_model->GetDataInfoList(); + for (auto &data_info : data_info_list_) { + cce::ccTensorDescriptor_t input_desc = nullptr; + cce::ccTensorDescriptor_t output_desc = nullptr; + if (data_info == nullptr) { + GELOGE(PARAM_INVALID, "data info ptr is null."); + return false; + } + + if (data_info->input_tensors.empty() || data_info->output_tensors.empty()) { + GELOGE(PARAM_INVALID, "data info input tensors size %zu, output tensor size %zu.", + data_info->input_tensors.size(), data_info->output_tensors.size()); + return false; + } + + if (static_cast(data_info->input_tensors[0].format) != FORMAT_FILTER_HWCK) { + bool ret = OpInfoUtils::InitTensorDescriptor(data_info->input_tensors[0].format, + data_info->input_tensors[0].datatype, + data_info->input_tensors[0].dims, input_desc, + data_info->input_tensors[0].real_dim_cnt); + if (!ret) { + GELOGE(FAILED, "InitTensorDescriptor Fail."); + OpInfoUtils::DestroyTensorDescriptor(input_desc); + return false; + } + + input_tensor_desc_list_[data_info->name] = input_desc; + } + + if (static_cast(data_info->output_tensors[0].format) != FORMAT_FRACTAL_Z) { + bool ret = OpInfoUtils::InitTensorDescriptor(data_info->output_tensors[0].format, + data_info->output_tensors[0].datatype, + data_info->output_tensors[0].dims, output_desc, + data_info->output_tensors[0].real_dim_cnt); + if (!ret) { + GELOGE(FAILED, "InitTensorDescriptor Fail."); + OpInfoUtils::DestroyTensorDescriptor(output_desc); + return false; + } + + output_tensor_desc_list_[data_info->name] = output_desc; + } + } + + return true; +} + +bool RuntimeModel::InitOutputInfo(std::shared_ptr &davinci_model) { + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci model is null"); + return false; + } + output_info_list_ = davinci_model->GetOutputInfoList(); + return true; +} + +bool RuntimeModel::CopyInputData(const InputData &input_data) { + if (input_data.blobs.size() != data_info_list_.size()) { + GELOGE(PARAM_INVALID, "The input data list size (%zu) does not match the model input list size (%zu)", + input_data.blobs.size(), data_info_list_.size()); + return false; + } + + for (const auto &data_info : data_info_list_) { + if (data_info == nullptr) { + GELOGE(PARAM_INVALID, "data info is null."); + return false; + } + + bool ret = CopyInputDataToModel(input_data.blobs, data_info); + if (!ret) { + GELOGE(FAILED, "Copy input data to model ret fail, data_info: %s, model id: %u", data_info->name.c_str(), + input_data.model_id); + return false; + } + } + + return true; +} + +bool RuntimeModel::CopyInputDataToModel(const std::vector &data, const std::shared_ptr &data_info) { + if (data_info == nullptr) { + GELOGE(PARAM_INVALID, "data info is empty."); + return false; + } + GELOGI("Start copy input data to model, data info: %s.", data_info->name.c_str()); + if (data.empty()) { + GELOGE(PARAM_INVALID, "data buffer is empty."); + return false; + } + + // Check size + if (data_info->input_tensors.size() != 1 || data_info->output_tensors.size() != 1) { + GELOGE(PARAM_INVALID, "Data Op has invalid input_desc_size(%zu) or output_desc_size(%zu)", + data_info->input_tensors.size(), data_info->output_tensors.size()); + return false; + } + + // Process filter weight input while online + if (OpInfoUtils::NeedTransFilter(data_info)) { + bool ret = OpInfoUtils::TransFilterData(data_info, data[data_info->index].data, data[data_info->index].length); + if (!ret) { + GELOGE(FAILED, "TransFilterData fail."); + return false; + } + return true; + } + + if (data_info->input_tensors[0].size >= data[data_info->index].length) { + GELOGE(PARAM_INVALID, "The input data size(%u) does not match model required size(%u), ret fail.", + data[data_info->index].length, data_info->input_tensors[0].size); + return false; + } + + // float to float16 + bool need_trans_flag = OpInfoUtils::IsInputTensorNeedTrans(data_info); + if (need_trans_flag) { + return CopyTransData(data, data_info); + } else { + return CopyHostData(data, data_info); + } +} + +bool RuntimeModel::CopyHostData(const std::vector &data, const std::shared_ptr &data_info) const { + GELOGI("Start CopyHostData."); + if (data.empty()) { + GELOGE(PARAM_INVALID, "data buffer is empty."); + return false; + } + + if (data_info == nullptr) { + GELOGE(PARAM_INVALID, "data info is null."); + return false; + } + + void *host_data_addr = data[data_info->index].data; + uint32_t copy_size = data[data_info->index].length; + GELOGD("data output tensor is aipp tensor,copy data only."); + + const std::vector &outputs = data_info->output_addrs; + if (outputs.empty()) { + GELOGE(PARAM_INVALID, "Output addrs is empty."); + return false; + } + + // Copy input data to data nodes + void *data_out_addr = reinterpret_cast(outputs[0]); + + rtError_t rt_ret = rtMemcpy(data_out_addr, copy_size, host_data_addr, copy_size, RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + return true; +} + +bool RuntimeModel::CopyTransData(const std::vector &data, const std::shared_ptr &data_info) { + GELOGI("Start CopyTransData."); + if (data.empty()) { + GELOGE(PARAM_INVALID, "data buffer is empty."); + return false; + } + + if (data_info == nullptr) { + GELOGE(PARAM_INVALID, "data info is null."); + return false; + } + + if (data_info->output_tensors.empty()) { + GELOGE(PARAM_INVALID, "data info output tensors is empty."); + return false; + } + + const std::vector &outputs = data_info->output_addrs; + if (outputs.empty()) { + GELOGE(PARAM_INVALID, "output addrs is empty."); + return false; + } + + void *fp16_data_addr = nullptr; + uint32_t copy_size = data_info->output_tensors[0].size; + GE_MAKE_GUARD_RTMEM(fp16_data_addr); + + rtError_t rt_ret = rtMallocHost(&fp16_data_addr, copy_size); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + cce::ccStatus_t cc_ret = cce::ccTransTensor(input_tensor_desc_list_[data_info->name], data[data_info->index].data, + output_tensor_desc_list_[data_info->name], fp16_data_addr, copy_size); + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return false; + } + void *host_data_addr = fp16_data_addr; + + GELOGI("data output tensor is not aipp tensor,call cce trans tensor."); + GELOGI("output[0]=%ld, copy_size=%u", outputs[0], copy_size); + + rt_ret = rtMemcpy(reinterpret_cast(outputs[0]), copy_size, host_data_addr, copy_size, + RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + return true; +} + +bool RuntimeModel::InitConstantInfo(std::shared_ptr &davinci_model) { + // Const no input, only 1 output, and this output has no data + // weight data copy to output mem + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "Davinci model is null."); + return false; + } + constant_info_list_ = davinci_model->GetConstantInfoList(); + + for (const auto &constant : constant_info_list_) { + if (constant == nullptr) { + GELOGE(PARAM_INVALID, "constant is null"); + continue; + } + if (constant->output_tensors.empty()) { + GELOGE(PARAM_INVALID, "Output tensors is empty"); + return false; + } + + if (constant->weight_tensors.empty()) { + GELOGE(PARAM_INVALID, "Weight tensors is empty"); + return false; + } + + if (constant->output_tensors[0].size < constant->weight_data.size()) { + GELOGE(PARAM_INVALID, "Output size:%u less than weight data size:%zu", + constant->output_tensors[0].size, constant->weight_data.size()); + return false; + } + + if (constant->weight_data.empty()) { + GELOGW("Const op:%s has no weight data.", constant->name.c_str()); + continue; + } + + if (constant->weight_tensors[0].datatype == DT_STRING) { + /// If tensor is a scaler, it's shape size if zero, according ge_tensor.cc. + /// The logic of GetShapeSize is wrong, the scaler tensor's GetShapeSize is zero + /// and that of unknown shape is zero too. + /// Unknown shape will not appear here, so we can use zero judge a tensor is scaler or not. + int64_t elem_num = (constant->weight_tensors[0].GetShapeSize() == 0) ? + 1 : constant->weight_tensors[0].GetShapeSize(); + if (constant->weight_data.size() < sizeof(uint64_t)) { + GELOGE(FAILED, "weight_data size is smaller than sizeof(uint64_t)"); + return false; + } + uint64_t *buff = reinterpret_cast(const_cast(constant->weight_data.data())); + int64_t offset = elem_num * 8; + uintptr_t hbm_raw_data_base_addr = reinterpret_cast(constant->output_addrs[0]) + offset; + for (int64_t i = elem_num - 1; i >= 0; --i) { + buff[i] = hbm_raw_data_base_addr + (buff[i] - buff[0]); + } + } + + rtError_t rt_ret = rtMemcpy(reinterpret_cast(constant->output_addrs[0]), constant->output_tensors[0].size, + constant->weight_data.data(), constant->weight_data.size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtGetFunctionByName failed, ret: 0x%X", rt_ret); + return false; + } + } + + return true; +} + +bool RuntimeModel::GetInputOutputDescInfo(bool zero_copy, + std::vector *input_desc, + std::vector *output_desc, + std::vector *input_format, + std::vector *output_format) { + if ((data_info_list_.empty()) || (data_info_list_[0]->input_tensors.size() != 1)) { + // Maybe there is no datainput info while online + if (!zero_copy && input_format == nullptr) { + GELOGW("Data op List is null or input_desc size is not 1!"); + } else { + GELOGE(FAILED, "Data op List is null or input_desc size is not 1!"); + return false; + } + } + + bool ret = GetInputDescInfo(input_desc, input_format); + if (!ret) { + GELOGE(FAILED, "Get input desc info failed."); + return false; + } + + ret = GetOutputDescInfo(output_desc, output_format); + if (!ret) { + GELOGE(FAILED, "Get output desc info failed."); + return false; + } + + std::vector output_size_list; + std::vector output_memory_size_list; + for (const auto &output_op : output_info_list_) { + if (!OpInfoUtils::GetOutputSize(output_op, output_size_list, output_memory_size_list)) { + GELOGE(FAILED, "GetOutputSize fail."); + return false; + } + } + + if (output_desc->size() != output_size_list.size()) { + GELOGE(INTERNAL_ERROR, "output_desc size[%zu] not equal output_size_list_[%zu] size!", output_desc->size(), + output_size_list.size()); + return false; + } + + const std::vector &size_list = (zero_copy) ? (output_memory_size_list) : (output_size_list); + for (size_t i = 0; i < output_size_list.size(); ++i) { + output_desc->at(i).size = size_list[i]; + } + + return true; +} + +bool RuntimeModel::GetInputDescInfo(std::vector *input_desc, + std::vector *formats) { + if (input_desc == nullptr) { + GELOGE(PARAM_INVALID, "Input desc is null."); + return false; + } + + // Analyze input dimension information + for (size_t index = 0; index < data_info_list_.size(); ++index) { + if (data_info_list_[index]->input_tensors.empty()) { + GELOGE(INTERNAL_ERROR, "data info list index %zu input tensors is empty.", index); + return false; + } + InputOutputDescInfo input; + uint32_t n, c, h, w; + Format format = static_cast(data_info_list_[index]->input_tensors[0].format); + if (format == FORMAT_NHWC) { + n = kNhwcDimN; + c = kNhwcDimC; + h = kNhwcDimH; + w = kNhwcDimW; + } else { + n = kNchwDimN; + c = kNchwDimC; + h = kNchwDimH; + w = kNchwDimW; + } + + if (data_info_list_[index]->input_tensors[0].dims.size() == static_cast(domi::NORMAL_TENSOR_SIZE)) { + input.shape_info.num = data_info_list_[index]->input_tensors[0].GetDim(n); + input.shape_info.height = data_info_list_[index]->input_tensors[0].GetDim(h); + input.shape_info.width = data_info_list_[index]->input_tensors[0].GetDim(w); + input.shape_info.channel = data_info_list_[index]->input_tensors[0].GetDim(c); + } + // Original network dimension + for (size_t k = 0; k < data_info_list_[index]->input_tensors[0].dims.size(); ++k) { + input.shape_info.dims.push_back(data_info_list_[index]->input_tensors[0].GetDim(k)); + } + + input.data_type = data_info_list_[index]->input_tensors[0].datatype; + input.name = data_info_list_[index]->name; + input.size = data_info_list_[index]->input_tensors[0].size; + + input_desc->push_back(input); + if (formats != nullptr) { + formats->push_back(format); + } + } + + return true; +} + +bool RuntimeModel::GetOutputDescInfo(std::vector *output_desc, + std::vector *formats) { + if (output_desc == nullptr) { + GELOGE(PARAM_INVALID, "Output desc is null."); + return false; + } + + // Analyze output dimension information + for (size_t i = 0; i < output_info_list_.size(); ++i) { + const auto &op_info = output_info_list_[i]; + if (op_info == nullptr) { + GELOGE(PARAM_INVALID, "Op info at %zu is null.", i); + return false; + } + auto out_size = static_cast(op_info->output_tensors.size()); + for (uint32_t index = 0; index < out_size; ++index) { + bool is_output = op_info->output_tensors[index].is_output; + if (!is_output) { + continue; + } + + std::string output_name; + InputOutputDescInfo output; + uint32_t format_result; + CreateOutput(index, *op_info, &output, &format_result); + + std::vector src_name = op_info->src_name; + std::vector src_index = op_info->src_index; + if (op_info->type == kNetOutPut) { + GELOGI("Op info %s index %zu is NETOUTPUT.", op_info->name.c_str(), i); + if (index >= src_name.size() || index >= src_index.size()) { + GELOGE(INTERNAL_ERROR, "Construct output_name failed."); + return false; + } + output_name = std::string("output_") + std::to_string(index) + "_" + src_name[index] + "_" + + std::to_string(src_index[index]); + } else { + GELOGI("Op info %s index %zu is not NETOUTPUT, type: %s.", op_info->name.c_str(), i, op_info->type.c_str()); + output_name = std::string("output_") + std::to_string(i) + "_" + op_info->name + "_" + std::to_string(index); + } + output.name = output_name; + + output_desc->push_back(output); + if (formats != nullptr) { + formats->push_back(format_result); + } + } + } + return true; +} + +void RuntimeModel::CreateOutput(uint32_t index, const OpInfo &op_info, InputOutputDescInfo *output, + uint32_t *format_result) { + if (output == nullptr) { + GELOGE(PARAM_INVALID, "Output desc is null."); + return; + } + + int64_t dims[] = {1, 1, 1, 1}; + if (index >= op_info.output_tensors.size()) { + GELOGE(PARAM_INVALID, "op_info %s output_tensors size %zu, but index %u.", op_info.name.c_str(), + op_info.output_tensors.size(), index); + return; + } + + TensorInfo output_tensor = op_info.output_tensors[index]; + Format format = static_cast(output_tensor.format); + if (format_result != nullptr) { + *format_result = format; + } + + if (format == FORMAT_ND) { // For ND tensor + for (size_t i = 0; i < output_tensor.dims.size() && i < (sizeof(dims) / sizeof(dims[0])); ++i) { + dims[i] = static_cast(output_tensor.GetDim(i)); + } + } else if (format == FORMAT_NHWC) { // For FORMAT_NHWC + dims[0] = output_tensor.GetDim(kNhwcDimN); + dims[1] = output_tensor.GetDim(kNhwcDimC); + dims[2] = output_tensor.GetDim(kNhwcDimH); + dims[3] = output_tensor.GetDim(kNhwcDimW); + } else { // For FORMAT_NCHW + dims[0] = output_tensor.GetDim(kNchwDimN); + dims[1] = output_tensor.GetDim(kNchwDimC); + dims[2] = output_tensor.GetDim(kNchwDimH); + dims[3] = output_tensor.GetDim(kNchwDimW); + } + + output->shape_info.num = dims[0]; // 0: First dim + output->shape_info.channel = dims[1]; // 1: Second dim + output->shape_info.height = dims[2]; // 2: Third dim + output->shape_info.width = dims[3]; // 3: Forth dim + + if (index >= op_info.input_tensors.size()) { + GELOGE(PARAM_INVALID, "input tensors size %zu less than index %u.", op_info.input_tensors.size(), index); + return; + } + + if (op_info.input_tensors[index].format == FORMAT_FRACTAL_Z) { // FraczToHWCK + int64_t k = output_tensor.GetDim(0); // 0: First dim + int64_t c = output_tensor.GetDim(1); // 1: Second dim + int64_t h = output_tensor.GetDim(2); // 2: Third dim + int64_t w = output_tensor.GetDim(3); // 3: Forth dim + output->shape_info.dims.push_back(h); + output->shape_info.dims.push_back(w); + output->shape_info.dims.push_back(c); + output->shape_info.dims.push_back(k); + + if (format_result != nullptr) { + *format_result = FORMAT_HWCN; + } + } else { + for (size_t j = 0; j < output_tensor.dims.size(); ++j) { + output->shape_info.dims.push_back(output_tensor.GetDim(j)); + } + } + + output->data_type = output_tensor.datatype; +} + +const std::vector &RuntimeModel::GetTaskIdList() const { return task_id_list_; } + +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/runtime_model.h b/src/ge/ge_runtime/runtime_model.h new file mode 100644 index 00000000..e304cafb --- /dev/null +++ b/src/ge/ge_runtime/runtime_model.h @@ -0,0 +1,92 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_RUNTIME_MODEL_H_ +#define GE_GE_RUNTIME_RUNTIME_MODEL_H_ + +#include +#include +#include +#include + +#include "cce/dnn_base_def.hpp" +#include "ge_runtime/davinci_model.h" +#include "common/ge_types.h" +#include "runtime/base.h" +#include "runtime/rt_model.h" + +namespace ge { +namespace model_runner { + +class Task; +class RuntimeModel { + public: + RuntimeModel() = default; + ~RuntimeModel(); + + bool Load(uint32_t device_id, uint64_t session_id, std::shared_ptr &davinci_model); + const std::vector &GetTaskIdList() const; + bool Run(); + bool CopyInputData(const InputData &input_data); + bool GetInputOutputDescInfo(bool zero_copy, + std::vector *input_desc, + std::vector *output_desc, + std::vector *input_format, + std::vector *output_format); + + private: + bool InitResource(std::shared_ptr &davinci_model); + void GenerateTask(uint32_t device_id, uint64_t session_id, std::shared_ptr &davinci_model); + bool LoadTask(); + bool InitStream(std::shared_ptr &davinci_model); + bool InitEvent(uint32_t event_num); + bool InitLabel(uint32_t batch_num); + bool InitDataInfo(std::shared_ptr &davinci_model); + bool InitOutputInfo(std::shared_ptr &davinci_model); + bool InitConstantInfo(std::shared_ptr &davinci_model); + void RtModelUnbindStream() noexcept; + void RtStreamDestroy() noexcept; + void RtModelDestroy() noexcept; + void RtLabelDestroy() noexcept; + void RtEventDestroy() noexcept; + bool CopyInputDataToModel(const std::vector &data, const std::shared_ptr &data_info); + bool CopyHostData(const std::vector &data, const std::shared_ptr &data_info) const; + bool CopyTransData(const std::vector &data, const std::shared_ptr &data_info); + bool GetInputDescInfo(std::vector *input_desc, std::vector *formats); + bool GetOutputDescInfo(std::vector *output_desc, std::vector *formats); + void CreateOutput(uint32_t index, const OpInfo &op_info, InputOutputDescInfo *output, uint32_t *format); + + rtModel_t rt_model_handle_{}; + rtStream_t rt_model_stream_{}; + + std::vector stream_list_{}; + std::vector label_list_{}; + std::vector event_list_{}; + + std::vector> task_list_{}; + std::vector> data_info_list_{}; + std::vector> output_info_list_{}; + std::vector> constant_info_list_{}; + std::map input_tensor_desc_list_{}; + std::map output_tensor_desc_list_{}; + + std::vector task_id_list_{}; +}; + +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_RUNTIME_MODEL_H_ diff --git a/src/ge/ge_runtime/task/aicpu_task.cc b/src/ge/ge_runtime/task/aicpu_task.cc new file mode 100644 index 00000000..cfb1cc77 --- /dev/null +++ b/src/ge/ge_runtime/task/aicpu_task.cc @@ -0,0 +1,118 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/task/aicpu_task.h" + +#include + +#include "aicpu/common/aicpu_task_struct.h" +#include "ge_runtime/task/task_factory.h" + +namespace ge { +namespace model_runner { +AicpuTask::AicpuTask(const ModelContext &model_context, const std::shared_ptr &task_info) + : TaskRepeater(model_context, task_info), task_info_(task_info), stream_(nullptr), args_(nullptr) { + if (task_info_ == nullptr) { + GELOGW("task_info_ is null!"); + } + + auto stream_list = model_context.stream_list(); + if (stream_list.size() == 1) { + stream_ = stream_list[0]; + } else if (stream_list.size() > task_info->stream_id()) { + stream_ = stream_list[task_info->stream_id()]; + } else { + GELOGW("index: %u >= stream_list.size(): %zu.", task_info->stream_id(), stream_list.size()); + } +} + +AicpuTask::~AicpuTask() { ReleaseRtMem(&args_); } + +bool AicpuTask::Distribute() { + GELOGI("InitAicpuTask start."); + vector io_addrs; + io_addrs.insert(io_addrs.end(), task_info_->input_data_addrs().begin(), task_info_->input_data_addrs().end()); + io_addrs.insert(io_addrs.end(), task_info_->output_data_addrs().begin(), task_info_->output_data_addrs().end()); + auto io_addrs_num = static_cast(io_addrs.size()); + auto io_addrs_size = static_cast(io_addrs_num * sizeof(void *)); + constexpr uint32_t io_addr_offset = sizeof(aicpu::AicpuParamHead); + uint32_t node_def_addr_offset = io_addr_offset + io_addrs_size; + uint32_t args_size = + sizeof(aicpu::AicpuParamHead) + io_addrs_size + static_cast(task_info_->node_def().size()); + aicpu::AicpuParamHead aicpu_param_head = {args_size, io_addrs_num}; + + // Malloc device memory for args + rtError_t rt_ret = rtMalloc(&args_, args_size, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api(rtMalloc) failed, ret: 0x%X.", rt_ret); + return false; + } + // Memcpy AicpuParamHead + rt_ret = rtMemcpy(args_, sizeof(aicpu::AicpuParamHead), reinterpret_cast(&aicpu_param_head), + sizeof(aicpu::AicpuParamHead), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api(rtMemcpy) failed, ret: 0x%X.", rt_ret); + return false; + } + + // Memcpy io addrs + if (io_addrs_num != 0) { + rt_ret = rtMemcpy(reinterpret_cast(reinterpret_cast(args_) + io_addr_offset), io_addrs_size, + reinterpret_cast(io_addrs.data()), io_addrs_size, RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api(rtMemcpy) failed, ret: 0x%X.", rt_ret); + return false; + } + } + // Memcpy node def + rt_ret = rtMemcpy(reinterpret_cast(reinterpret_cast(args_) + node_def_addr_offset), + task_info_->node_def().size(), reinterpret_cast(task_info_->node_def().data()), + task_info_->node_def().size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api(rtMemcpy) failed, ret: 0x%X.", rt_ret); + return false; + } + + GELOGI("Distribute AicpuTask start, args_size = %u, io_addrs_num = %u, so_name = %s, kernel_name = %s.", args_size, + io_addrs_num, task_info_->so_name().data(), task_info_->kernel_name().data()); + rt_ret = rtCpuKernelLaunch(reinterpret_cast(task_info_->so_name().data()), + reinterpret_cast(task_info_->kernel_name().data()), 1, args_, args_size, + nullptr, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + GELOGI("Distribute AicpuTask end."); + return true; +} + +void AicpuTask::ReleaseRtMem(void **ptr) noexcept { + if (ptr == nullptr || *ptr == nullptr) { + return; + } + + rtError_t rt_ret = rtFree(*ptr); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "ReleaseRtMem failed, ret: 0x%X", rt_ret); + return; + } + *ptr = nullptr; +} + +REGISTER_TASK(TaskInfoType::kAiCpu, AicpuTask, AicpuTaskInfo); +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/task/aicpu_task.h b/src/ge/ge_runtime/task/aicpu_task.h new file mode 100644 index 00000000..ddb961b1 --- /dev/null +++ b/src/ge/ge_runtime/task/aicpu_task.h @@ -0,0 +1,44 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_AICPU_TASK_H_ +#define GE_GE_RUNTIME_TASK_AICPU_TASK_H_ + +#include + +#include "ge_runtime/task/task.h" + +namespace ge { +namespace model_runner { +class AicpuTask : public TaskRepeater { + public: + AicpuTask(const ModelContext &model_context, const std::shared_ptr &task_info); + + ~AicpuTask() override; + + bool Distribute() override; + + private: + static void ReleaseRtMem(void **ptr) noexcept; + + std::shared_ptr task_info_; + void *stream_; + void *args_; +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_TASK_AICPU_TASK_H_ diff --git a/src/ge/ge_runtime/task/cce_task.cc b/src/ge/ge_runtime/task/cce_task.cc new file mode 100644 index 00000000..acfc83b8 --- /dev/null +++ b/src/ge/ge_runtime/task/cce_task.cc @@ -0,0 +1,167 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/task/cce_task.h" + +#include "ge_runtime/task/task_factory.h" + +namespace ge { +namespace model_runner { +CceTask::CceTask(const ModelContext &model_context, const std::shared_ptr &task_info) + : TaskRepeater(model_context, task_info), + task_info_(task_info), + stream_(nullptr), + stub_func_(nullptr), + args_(nullptr), + sm_desc_(nullptr), + flowtable_(nullptr), + is_flowtable_(false) { + if (task_info_ == nullptr) { + GELOGW("task_info_ is null!"); + } + + auto stream_list = model_context.stream_list(); + if (stream_list.size() == 1) { + stream_ = stream_list[0]; + } else if (stream_list.size() > task_info->stream_id()) { + stream_ = stream_list[task_info->stream_id()]; + } else { + GELOGW("index: %u >= stream_list.size(): %zu.", task_info->stream_id(), stream_list.size()); + } +} + +CceTask::~CceTask() { + FreeRtMem(&args_); + FreeRtMem(&flowtable_); + rtError_t ret = (sm_desc_ != nullptr) ? rtMemFreeManaged(sm_desc_) : RT_ERROR_NONE; + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", ret); + } + sm_desc_ = nullptr; +} + +void CceTask::FreeRtMem(void **ptr) noexcept { + if (ptr == nullptr || *ptr == nullptr) { + return; + } + rtError_t ret = rtFree(*ptr); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", ret); + } + + *ptr = nullptr; +} + +bool CceTask::Distribute() { + GELOGI("Distribute CCETask start."); + if (stream_ == nullptr) { + GELOGE(PARAM_INVALID, "stream_ is null!"); + return false; + } + // Get stub_func + if (task_info_->stub_func().empty()) { + GELOGE(PARAM_INVALID, "kernel_info->stub_func is empty!"); + return false; + } + + rtError_t rt_ret = rtGetFunctionByName(const_cast(task_info_->stub_func().c_str()), &stub_func_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtGetFunctionByName failed, ret: 0x%X", rt_ret); + stub_func_ = nullptr; + return false; + } + + // Flowtable + if (is_flowtable_) { + rt_ret = rtMalloc(&flowtable_, task_info_->flow_table().size(), RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + rt_ret = rtMemcpy(flowtable_, task_info_->flow_table().size(), task_info_->flow_table().data(), + task_info_->flow_table().size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + // Modify flowtable addr in args + auto args = const_cast(task_info_->args().data()); + if (task_info_->args_offset().size() < sizeof(uint16_t)) { + GELOGE(FAILED, "size of args_offset is smaller than sizeof(uint16_t)."); + return false; + } + auto task_offset = reinterpret_cast(const_cast(task_info_->args_offset().data())); + + if (task_info_->args().size() < (task_offset[0] + sizeof(uint64_t))) { + GELOGE(FAILED, + "(context.args_offset().data()))[0]:%u + sizeof(uint64_t):%zu > kernelDef.args().size():%zu", + static_cast(task_offset[0]), sizeof(uint64_t), task_info_->args().size()); + return false; + } + + *(reinterpret_cast(args + task_offset[0])) = reinterpret_cast(flowtable_); + } + + // Args + rt_ret = rtMalloc(&args_, task_info_->args_size(), RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + rt_ret = rtMemcpy(args_, task_info_->args_size(), task_info_->args().data(), task_info_->args_size(), + RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + // L2 sm_desc + if (!task_info_->sm_desc().empty()) { + rt_ret = rtMemAllocManaged(&sm_desc_, task_info_->sm_desc().size(), RT_MEMORY_SPM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + rt_ret = rtMemcpy(sm_desc_, task_info_->sm_desc().size(), + task_info_->sm_desc().data(), + task_info_->sm_desc().size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + } + + // Kernel launch + rt_ret = rtKernelLaunch(stub_func_, + task_info_->block_dim(), + args_, + task_info_->args_size(), + static_cast(sm_desc_), + stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + return true; +} + +REGISTER_TASK(TaskInfoType::kCce, CceTask, CceTaskInfo); +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/task/cce_task.h b/src/ge/ge_runtime/task/cce_task.h new file mode 100644 index 00000000..e5c936fc --- /dev/null +++ b/src/ge/ge_runtime/task/cce_task.h @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_CCE_TASK_H_ +#define GE_GE_RUNTIME_TASK_CCE_TASK_H_ + +#include + +#include "ge_runtime/task/task.h" + +namespace ge { +namespace model_runner { +class CceTask : public TaskRepeater { + public: + CceTask(const ModelContext &model_context, const std::shared_ptr &task_info); + + ~CceTask() override; + + bool Distribute() override; + + static void FreeRtMem(void **ptr) noexcept; + + private: + std::shared_ptr task_info_; + void *stream_; + void *stub_func_; + void *args_; + void *sm_desc_; + void *flowtable_; + bool is_flowtable_; +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_TASK_CCE_TASK_H_ diff --git a/src/ge/ge_runtime/task/event_record_task.cc b/src/ge/ge_runtime/task/event_record_task.cc new file mode 100644 index 00000000..f5a980c0 --- /dev/null +++ b/src/ge/ge_runtime/task/event_record_task.cc @@ -0,0 +1,58 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/task/event_record_task.h" + +#include "ge_runtime/task/task_factory.h" + +namespace ge { +namespace model_runner { +EventRecordTask::EventRecordTask(const ModelContext &model_context, + const std::shared_ptr &task_info) + : TaskRepeater(model_context, task_info), + task_info_(task_info), + stream_(nullptr), + event_(nullptr) { + if (task_info_ == nullptr) { + GELOGW("task_info_ is null!"); + } + auto stream_list = model_context.stream_list(); + auto event_list = model_context.event_list(); + uint32_t stream_id = task_info->stream_id(); + uint32_t event_id = task_info->event_id(); + if (stream_id >= stream_list.size() || event_id >= event_list.size()) { + GELOGW("stream_list size:%zu, stream_id:%u, event_list size:%zu, event_id:%u", stream_list.size(), + stream_id, event_list.size(), event_id); + } + stream_ = stream_list[stream_id]; + event_ = event_list[event_id]; +} + +EventRecordTask::~EventRecordTask() {} + +bool EventRecordTask::Distribute() { + rtError_t rt_ret = rtEventRecord(event_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + GELOGI("Distribute end."); + return true; +} + +REGISTER_TASK(TaskInfoType::kEventRecord, EventRecordTask, EventRecordTaskInfo); +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/task/event_record_task.h b/src/ge/ge_runtime/task/event_record_task.h new file mode 100644 index 00000000..7c0c1042 --- /dev/null +++ b/src/ge/ge_runtime/task/event_record_task.h @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_EVENT_RECORD_TASK_H_ +#define GE_GE_RUNTIME_TASK_EVENT_RECORD_TASK_H_ + +#include + +#include "ge_runtime/task/task.h" + +namespace ge { +namespace model_runner { +class EventRecordTask : public TaskRepeater { + public: + EventRecordTask(const ModelContext &model_context, const std::shared_ptr &task_info); + + ~EventRecordTask() override; + + bool Distribute() override; + + private: + std::shared_ptr task_info_; + rtStream_t stream_; + rtEvent_t event_; +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_TASK_EVENT_RECORD_TASK_H_ diff --git a/src/ge/ge_runtime/task/event_wait_task.cc b/src/ge/ge_runtime/task/event_wait_task.cc new file mode 100644 index 00000000..db00bc4c --- /dev/null +++ b/src/ge/ge_runtime/task/event_wait_task.cc @@ -0,0 +1,63 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/task/event_wait_task.h" + +#include "ge_runtime/task/task_factory.h" + +namespace ge { +namespace model_runner { +EventWaitTask::EventWaitTask(const ModelContext &model_context, const std::shared_ptr &task_info) + : TaskRepeater(model_context, task_info), + task_info_(task_info), + stream_(nullptr), + event_(nullptr) { + if (task_info_ == nullptr) { + GELOGW("task_info_ is null!"); + } + auto stream_list = model_context.stream_list(); + auto event_list = model_context.event_list(); + uint32_t stream_id = task_info->stream_id(); + uint32_t event_id = task_info->event_id(); + if (stream_id >= stream_list.size() || event_id >= event_list.size()) { + GELOGW("stream_list size:%zu, stream_id:%u, event_list size:%zu, event_id:%u", stream_list.size(), stream_id, + event_list.size(), event_id); + } + stream_ = stream_list[stream_id]; + event_ = event_list[event_id]; +} + +EventWaitTask::~EventWaitTask() {} + +bool EventWaitTask::Distribute() { + rtError_t rt_ret = rtStreamWaitEvent(stream_, event_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api rtStreamWaitEvent failed, ret: 0x%X", rt_ret); + return false; + } + + rt_ret = rtEventReset(event_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api rtEventReset failed, ret: 0x%X", rt_ret); + return false; + } + GELOGI("Distribute end."); + return true; +} + +REGISTER_TASK(TaskInfoType::kEventWait, EventWaitTask, EventWaitTaskInfo); +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/task/event_wait_task.h b/src/ge/ge_runtime/task/event_wait_task.h new file mode 100644 index 00000000..886b733a --- /dev/null +++ b/src/ge/ge_runtime/task/event_wait_task.h @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_EVENT_WAIT_TASK_H_ +#define GE_GE_RUNTIME_TASK_EVENT_WAIT_TASK_H_ + +#include + +#include "ge_runtime/task/task.h" + +namespace ge { +namespace model_runner { +class EventWaitTask : public TaskRepeater { + public: + EventWaitTask(const ModelContext &model_context, const std::shared_ptr &task_info); + + ~EventWaitTask() override; + + bool Distribute() override; + + private: + std::shared_ptr task_info_; + rtStream_t stream_; + rtEvent_t event_; +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_TASK_EVENT_WAIT_TASK_H_ diff --git a/src/ge/ge_runtime/task/hccl_task.cc b/src/ge/ge_runtime/task/hccl_task.cc new file mode 100755 index 00000000..1a5d0a20 --- /dev/null +++ b/src/ge/ge_runtime/task/hccl_task.cc @@ -0,0 +1,153 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/task/hccl_task.h" + +#include "ge_runtime/task/task_factory.h" +#include "common/opskernel/ops_kernel_info_store.h" +#include "common/opskernel/ge_task_info.h" + +namespace ge { +namespace model_runner { +HcclTask::HcclTask(const ModelContext &model_context, const std::shared_ptr &task_info) + : TaskRepeater(model_context, task_info), task_info_(task_info), stream_(nullptr), + rt_model_handle_(nullptr), priority_(0), slave_stream_list_(), hcom_bind_model_(nullptr), + hcom_unbind_model_(nullptr), hcom_distribute_task_(nullptr) { + if (task_info_ == nullptr) { + GELOGW("task_info_ is null!"); + } + + hcom_bind_model_ = task_info->hcom_bind_model(); + hcom_unbind_model_ = task_info->hcom_unbind_model(); + + priority_ = model_context.priority(); + rt_model_handle_ = model_context.rt_model_handle(); + auto stream_list = model_context.stream_list(); + + if (hcom_bind_model_ != nullptr) { + if (rt_model_handle_list_.insert(rt_model_handle_).second) { + for (auto stream : stream_list) { + (void) hcom_bind_model_(rt_model_handle_, stream); + } + } + } + + if (stream_list.size() == 1) { + stream_ = stream_list[0]; + } else if (stream_list.size() > task_info->stream_id()) { + stream_ = stream_list[task_info->stream_id()]; + } else { + GELOGW("index: %u >= stream_list.size(): %zu.", task_info->stream_id(), stream_list.size()); + } +} + +HcclTask::~HcclTask() { + for (size_t i = 0; i < slave_stream_list_.size(); ++i) { + rtError_t rt_ret = rtModelUnbindStream(rt_model_handle_, slave_stream_list_[i]); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Unbind stream from model failed! Index: %zu", i); + } + } + + for (size_t i = 0; i < slave_stream_list_.size(); ++i) { + rtError_t rt_ret = rtStreamDestroy(slave_stream_list_[i]); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Destroy stream failed! Index: %zu", i); + } + } + + if (hcom_unbind_model_ != nullptr) { + if (rt_model_handle_list_.find(rt_model_handle_) != rt_model_handle_list_.end()) { + (void) hcom_unbind_model_(rt_model_handle_); + (void)rt_model_handle_list_.erase(rt_model_handle_); + } + } +} + +bool HcclTask::Distribute() { + // No ops kernel info store + hcom_distribute_task_ = task_info_->hcom_distribute_task(); + if (hcom_distribute_task_ != nullptr) { + return hcom_distribute_task_(task_info_, stream_); + } + + // Ops kernel info store + // Get private_def and ops_kernel_store_ptr + GELOGI("get custom info in modelTaskDef"); + void *ops_kernel_store = task_info_->ops_kernel_store(); + OpsKernelInfoStore* ops_kernel_info_store = reinterpret_cast (ops_kernel_store); + if (ops_kernel_store == nullptr) { + GELOGE(PARAM_INVALID, "No hcom distribute function ptr and no ops kernel store."); + return false; + } + + char *private_def = reinterpret_cast(const_cast(task_info_->private_def().data())); + auto private_def_len = static_cast(task_info_->private_def().size()); + + GELOGI("hcclStreamNum =%ld", task_info_->hccl_stream_num()); + for (int64_t i = 0; i < task_info_->hccl_stream_num(); ++i) { + rtStream_t stream = nullptr; + rtError_t rt_ret = rtStreamCreateWithFlags(&stream, priority_, RT_STREAM_PERSISTENT | RT_STREAM_FORCE_COPY); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + rt_ret = rtModelBindStream(rt_model_handle_, stream, RT_HEAD_STREAM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + slave_stream_list_.push_back(stream); + } + + GELOGI("HcclTaskInfo Distribute Start. begin to call function LoadTask in hccl."); + GETaskInfo ge_task; + ge_task.id = 0; + ge_task.type = static_cast(RT_MODEL_TASK_HCCL); + ge_task.stream = stream_; + + ge_task.kernelHcclInfo.hccl_type = task_info_->hccl_type(); + ge_task.kernelHcclInfo.inputDataAddr = task_info_->input_data_addr(); + ge_task.kernelHcclInfo.outputDataAddr = task_info_->output_data_addr(); + ge_task.kernelHcclInfo.workSpaceAddr = task_info_->workspace_addr(); + ge_task.kernelHcclInfo.workSpaceMemSize = task_info_->workspace_size(); + ge_task.kernelHcclInfo.count = task_info_->count(); + ge_task.kernelHcclInfo.dataType = static_cast(task_info_->data_type()); + ge_task.kernelHcclInfo.opType = static_cast(task_info_->op_type()); + ge_task.kernelHcclInfo.rootId = task_info_->root_id(); + + ge_task.kernelHcclInfo.hcclStreamList = slave_stream_list_; + + ge_task.privateDef = private_def; + ge_task.privateDefLen = private_def_len; + ge_task.opsKernelStorePtr = ops_kernel_store; + + auto result = ops_kernel_info_store->LoadTask(ge_task); + // tagHcclResult::HCCL_SUCCESS is 0 + if (result != 0) { + GELOGE(INTERNAL_ERROR, "davinci_model : load task fail, return ret: %u", result); + return false; + } + + GELOGI("call function LoadTask end."); + return true; +} + +REGISTER_TASK(TaskInfoType::kHccl, HcclTask, HcclTaskInfo); +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/task/hccl_task.h b/src/ge/ge_runtime/task/hccl_task.h new file mode 100644 index 00000000..67788f36 --- /dev/null +++ b/src/ge/ge_runtime/task/hccl_task.h @@ -0,0 +1,52 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_HCCL_TASK_H_ +#define GE_GE_RUNTIME_TASK_HCCL_TASK_H_ + +#include +#include +#include + +#include "ge_runtime/task/task.h" + +namespace ge { +namespace model_runner { +class HcclTask : public TaskRepeater { + public: + HcclTask(const ModelContext &model_context, const std::shared_ptr &task_info); + + ~HcclTask() override; + + bool Distribute() override; + + private: + std::shared_ptr task_info_; + void *stream_; + rtModel_t rt_model_handle_; + int32_t priority_; + std::vector slave_stream_list_; + std::function hcom_bind_model_; + std::function hcom_unbind_model_; + std::function, void *)> hcom_distribute_task_; + static std::set rt_model_handle_list_; +}; + +std::set HcclTask::rt_model_handle_list_{}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_TASK_HCCL_TASK_H_ diff --git a/src/ge/ge_runtime/task/memcpy_async_task.cc b/src/ge/ge_runtime/task/memcpy_async_task.cc new file mode 100644 index 00000000..e9610978 --- /dev/null +++ b/src/ge/ge_runtime/task/memcpy_async_task.cc @@ -0,0 +1,57 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/task/memcpy_async_task.h" + +#include "ge_runtime/task/task_factory.h" + +namespace ge { +namespace model_runner { +MemcpyAsyncTask::MemcpyAsyncTask(const ModelContext &model_context, + const std::shared_ptr &task_info) + : TaskRepeater(model_context, task_info), task_info_(task_info), stream_(nullptr) { + if (task_info_ == nullptr) { + GELOGW("task_info_ is null!"); + } + auto stream_list = model_context.stream_list(); + uint32_t stream_id = task_info->stream_id(); + + GELOGI("Stream list size:%zu, stream id:%u.", stream_list.size(), stream_id); + if (stream_id >= stream_list.size()) { + GELOGW("Stream id invalid"); + } + stream_ = stream_list[stream_id]; +} + +MemcpyAsyncTask::~MemcpyAsyncTask() {} + +bool MemcpyAsyncTask::Distribute() { + GELOGI("MemcpyAsyncTask Distribute start."); + GELOGI("dst_max:%lu, count:%lu, kind:%u.", task_info_->dst_max(), task_info_->count(), task_info_->kind()); + rtError_t rt_ret = rtMemcpyAsync(task_info_->dst(), task_info_->dst_max(), + task_info_->src(), task_info_->count(), + static_cast(task_info_->kind()), stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + GELOGI("DistributeTask end"); + return true; +} + +REGISTER_TASK(TaskInfoType::kMemcpyAsync, MemcpyAsyncTask, MemcpyAsyncTaskInfo); +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/task/memcpy_async_task.h b/src/ge/ge_runtime/task/memcpy_async_task.h new file mode 100644 index 00000000..fd2e42ae --- /dev/null +++ b/src/ge/ge_runtime/task/memcpy_async_task.h @@ -0,0 +1,41 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_MEMCPY_ASYNC_TASK_H_ +#define GE_GE_RUNTIME_TASK_MEMCPY_ASYNC_TASK_H_ + +#include + +#include "ge_runtime/task/task.h" + +namespace ge { +namespace model_runner { +class MemcpyAsyncTask : public TaskRepeater { + public: + MemcpyAsyncTask(const ModelContext &model_context, const std::shared_ptr &task_info); + + ~MemcpyAsyncTask() override; + + bool Distribute() override; + + private: + std::shared_ptr task_info_; + rtStream_t stream_; +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_TASK_MEMCPY_ASYNC_TASK_H_ diff --git a/src/ge/ge_runtime/task/profiler_task.cc b/src/ge/ge_runtime/task/profiler_task.cc new file mode 100644 index 00000000..95fddd67 --- /dev/null +++ b/src/ge/ge_runtime/task/profiler_task.cc @@ -0,0 +1,53 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/task/profiler_task.h" + +#include "ge_runtime/task/task_factory.h" + +namespace ge { +namespace model_runner { +ProfilerTask::ProfilerTask(const ModelContext &model_context, const std::shared_ptr &task_info) + : TaskRepeater(model_context, task_info), task_info_(task_info), stream_(nullptr) { + if (task_info_ == nullptr) { + GELOGW("task_info_ is null!"); + } + auto stream_list = model_context.stream_list(); + uint32_t stream_id = task_info->stream_id(); + GELOGI("Stream list size:%zu, stream id:%u.", stream_list.size(), stream_id); + if (stream_id >= stream_list.size()) { + GELOGW("Stream id invalid"); + } + stream_ = stream_list[stream_id]; +} + +ProfilerTask::~ProfilerTask() {} + +bool ProfilerTask::Distribute() { + GELOGI("ProfilerTask Distribute start."); + GELOGI("logid = %lu, notify = %d, flat = %u.", task_info_->log_id(), task_info_->notify(), task_info_->flat()); + rtError_t rt_ret = rtProfilerTrace(task_info_->log_id(), task_info_->notify(), task_info_->flat(), stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + GELOGI("DistributeTask end"); + return true; +} + +REGISTER_TASK(TaskInfoType::kProfilerTrace, ProfilerTask, ProfilerTraceTaskInfo); +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/task/profiler_task.h b/src/ge/ge_runtime/task/profiler_task.h new file mode 100644 index 00000000..ce814a6d --- /dev/null +++ b/src/ge/ge_runtime/task/profiler_task.h @@ -0,0 +1,41 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_PROFILER_TASK_H_ +#define GE_GE_RUNTIME_TASK_PROFILER_TASK_H_ + +#include + +#include "ge_runtime/task/task.h" + +namespace ge { +namespace model_runner { +class ProfilerTask : public TaskRepeater { + public: + ProfilerTask(const ModelContext &model_context, const std::shared_ptr &task_info); + + ~ProfilerTask() override; + + bool Distribute() override; + + private: + std::shared_ptr task_info_; + rtStream_t stream_; +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_TASK_PROFILER_TASK_H_ diff --git a/src/ge/ge_runtime/task/stream_active_task.cc b/src/ge/ge_runtime/task/stream_active_task.cc new file mode 100644 index 00000000..2e91988d --- /dev/null +++ b/src/ge/ge_runtime/task/stream_active_task.cc @@ -0,0 +1,58 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/task/stream_active_task.h" +#include "ge_runtime/task/task_factory.h" + +namespace ge { +namespace model_runner { +StreamActiveTask::StreamActiveTask(const ModelContext &model_context, + const std::shared_ptr &task_info) + : TaskRepeater(model_context, task_info), + task_info_(task_info), + stream_(nullptr), + active_stream_(nullptr) { + if (task_info_ == nullptr) { + GELOGW("task_info_ is null!"); + } + auto stream_list = model_context.stream_list(); + uint32_t stream_id = task_info->stream_id(); + uint32_t active_stream_id = task_info->active_stream_id(); + GELOGI("Stream list size:%zu, stream id:%u, active stream id:%u", stream_list.size(), stream_id, active_stream_id); + if (stream_id >= stream_list.size() || active_stream_id >= stream_list.size()) { + GELOGW("Stream id invalid"); + } + stream_ = stream_list[stream_id]; + active_stream_ = stream_list[active_stream_id]; +} + +StreamActiveTask::~StreamActiveTask() {} + +bool StreamActiveTask::Distribute() { + GELOGI("Distribute start"); + GELOGI("Stream %u active %u.", task_info_->stream_id(), task_info_->active_stream_id()); + rtError_t rt_ret = rtStreamActive(active_stream_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + GELOGI("DistributeTask end"); + return true; +} + +REGISTER_TASK(TaskInfoType::kStreamActive, StreamActiveTask, StreamActiveTaskInfo); +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/task/stream_active_task.h b/src/ge/ge_runtime/task/stream_active_task.h new file mode 100644 index 00000000..342fa78f --- /dev/null +++ b/src/ge/ge_runtime/task/stream_active_task.h @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_STREAM_ACTIVE_TASK_H_ +#define GE_GE_RUNTIME_TASK_STREAM_ACTIVE_TASK_H_ + +#include + +#include "ge_runtime/task/task.h" + +namespace ge { +namespace model_runner { +class StreamActiveTask : public TaskRepeater { + public: + StreamActiveTask(const ModelContext &model_context, const std::shared_ptr &task_info); + + ~StreamActiveTask() override; + + bool Distribute() override; + + private: + std::shared_ptr task_info_; + rtStream_t stream_; + rtStream_t active_stream_; +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_TASK_STREAM_ACTIVE_TASK_H_ diff --git a/src/ge/ge_runtime/task/stream_switch_task.cc b/src/ge/ge_runtime/task/stream_switch_task.cc new file mode 100644 index 00000000..1df3834a --- /dev/null +++ b/src/ge/ge_runtime/task/stream_switch_task.cc @@ -0,0 +1,78 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/task/stream_switch_task.h" + +#include "ge_runtime/task/task_factory.h" + +namespace ge { +namespace model_runner { +StreamSwitchTask::StreamSwitchTask(const ModelContext &model_context, + const std::shared_ptr &task_info) + : TaskRepeater(model_context, task_info), + task_info_(task_info), + stream_(nullptr), + stream_list_() { + if (task_info_ == nullptr) { + GELOGW("task_info_ is null!"); + } + + stream_list_ = model_context.stream_list(); + if (stream_list_.size() == 1) { + stream_ = stream_list_[0]; + } else if (stream_list_.size() > task_info->stream_id()) { + stream_ = stream_list_[task_info->stream_id()]; + } else { + GELOGW("Index: %u >= stream_list.size(): %zu.", task_info->stream_id(), stream_list_.size()); + } +} + +StreamSwitchTask::~StreamSwitchTask() {} + +bool StreamSwitchTask::Distribute() { + GELOGI("Init StreamSwitchTask start."); + GELOGI("Stream %u active %ld.", task_info_->stream_id(), task_info_->true_stream_id()); + + if (stream_ == nullptr) { + GELOGE(PARAM_INVALID, "stream_ is null!"); + return false; + } + + if (static_cast(task_info_->true_stream_id()) >= stream_list_.size()) { + GELOGE(PARAM_INVALID, "true_stream_id %ld must less than stream_list_ size %zu!", task_info_->true_stream_id(), + stream_list_.size()); + return false; + } + + void *input = reinterpret_cast(task_info_->input_addr()); + rtCondition_t cond = static_cast(task_info_->cond()); + void *value = reinterpret_cast(task_info_->value_addr()); + rtStream_t true_stream = stream_list_[task_info_->true_stream_id()]; + rtSwitchDataType_t data_type = static_cast(task_info_->data_type()); + + GELOGI("StreamSwitchTask Distribute Start."); + rtError_t rt_ret = rtStreamSwitchEx(input, cond, value, true_stream, stream_, data_type); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return false; + } + + return true; +} + +REGISTER_TASK(TaskInfoType::kStreamSwitch, StreamSwitchTask, StreamSwitchTaskInfo); +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/task/stream_switch_task.h b/src/ge/ge_runtime/task/stream_switch_task.h new file mode 100644 index 00000000..fce56e47 --- /dev/null +++ b/src/ge/ge_runtime/task/stream_switch_task.h @@ -0,0 +1,44 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_STREAM_SWITCH_TASK_H_ +#define GE_GE_RUNTIME_TASK_STREAM_SWITCH_TASK_H_ + +#include +#include + +#include "ge_runtime/task/task.h" + +namespace ge { +namespace model_runner { +class StreamSwitchTask : public TaskRepeater { + public: + StreamSwitchTask(const ModelContext &model_context, const std::shared_ptr &task_info); + + ~StreamSwitchTask() override; + + bool Distribute() override; + + private: + std::shared_ptr task_info_; + + void *stream_; + std::vector stream_list_; +}; + +} // namespace model_runner +} // namespace ge +#endif // GE_GE_RUNTIME_TASK_STREAM_SWITCH_TASK_H_ diff --git a/src/ge/ge_runtime/task/task.h b/src/ge/ge_runtime/task/task.h new file mode 100644 index 00000000..431706d1 --- /dev/null +++ b/src/ge/ge_runtime/task/task.h @@ -0,0 +1,52 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_TASK_H_ +#define GE_GE_RUNTIME_TASK_TASK_H_ + +#include +#include + +#include "runtime/rt_model.h" +#include "ge_runtime/model_context.h" +#include "ge_runtime/task_info.h" + +namespace ge { +namespace model_runner { +class Task { + public: + Task() {} + + virtual ~Task() {} + + virtual bool Distribute() = 0; +}; + +template +class TaskRepeater : public Task { + static_assert(std::is_base_of(), "Wrong TaskInfo Type!"); + + public: + TaskRepeater(const ModelContext &model_context, std::shared_ptr task_info) {} + + virtual ~TaskRepeater() {} + + virtual bool Distribute() = 0; +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_TASK_TASK_H_ diff --git a/src/ge/ge_runtime/task/task_factory.h b/src/ge/ge_runtime/task/task_factory.h new file mode 100644 index 00000000..82743257 --- /dev/null +++ b/src/ge/ge_runtime/task/task_factory.h @@ -0,0 +1,89 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_TASK_FACTORY_H_ +#define GE_GE_RUNTIME_TASK_TASK_FACTORY_H_ + +#include +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "framework/common/debug/ge_log.h" +#include "ge_runtime/task_info.h" + +namespace ge { +namespace model_runner { +class Task; +class ModelContext; +using TASK_CREATOR_FUN = std::function(const ModelContext &, std::shared_ptr)>; + +class TaskFactory { + private: + TaskFactory() {} + ~TaskFactory() {} + void RegisterCreator(const TaskInfoType &type, const TASK_CREATOR_FUN &func) { + if (creator_map_.find(type) != creator_map_.end()) { + GELOGW("Creator type %d already exist", static_cast(type)); + } + creator_map_[type] = func; + } + + std::map creator_map_; + + public: + static TaskFactory &GetInstance() { + static TaskFactory instance; + return instance; + } + + std::shared_ptr Create(const ModelContext &model_context, std::shared_ptr &task_info) const { + if (task_info == nullptr) { + GELOGE(FAILED, "task_info is null."); + return nullptr; + } + + auto iter = creator_map_.find(task_info->type()); + if (iter == creator_map_.end()) { + GELOGE(FAILED, "Unknow task type %d", static_cast(task_info->type())); + return nullptr; + } + return iter->second(model_context, task_info); + } + + class Register { + public: + Register(const TaskInfoType &type, const TASK_CREATOR_FUN &func) { + GELOGI("regist type %d", static_cast(type)); + TaskFactory::GetInstance().RegisterCreator(type, func); + } + + ~Register() {} + }; +}; + +#define REGISTER_TASK(type, task_clazz, task_info_clazz) \ + TaskFactory::Register g_##task_clazz##_register( \ + type, \ + [](const ModelContext &model_context, const std::shared_ptr &task_info) -> std::shared_ptr { \ + std::shared_ptr concrete_task_info = std::static_pointer_cast(task_info); \ + return std::make_shared(model_context, concrete_task_info); \ + }); + +} // namespace model_runner +} // namespace ge +#endif // GE_GE_RUNTIME_TASK_TASK_FACTORY_H_ diff --git a/src/ge/ge_runtime/task/tbe_task.cc b/src/ge/ge_runtime/task/tbe_task.cc new file mode 100644 index 00000000..36be96b3 --- /dev/null +++ b/src/ge/ge_runtime/task/tbe_task.cc @@ -0,0 +1,112 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "ge_runtime/task/tbe_task.h" + +#include + +#include "ge_runtime/task/task_factory.h" + +namespace ge { +namespace model_runner { +TbeTask::TbeTask(const ModelContext &model_context, const std::shared_ptr &task_info) + : TaskRepeater(model_context, task_info), + task_info_(task_info), + stream_(nullptr), + stub_func_(nullptr), + args_(nullptr) { + if (task_info_ == nullptr) { + GELOGW("task_info_ is null!"); + } + + auto stream_list = model_context.stream_list(); + if (stream_list.size() == 1) { + stream_ = stream_list[0]; + } else if (stream_list.size() > task_info->stream_id()) { + stream_ = stream_list[task_info->stream_id()]; + } else { + GELOGE(PARAM_INVALID, "Index: %u >= stream_list.size(): %zu.", task_info->stream_id(), stream_list.size()); + return; + } +} + +TbeTask::~TbeTask() { + if (args_ != nullptr) { + rtError_t rt_ret = rtFree(args_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtFree fwkOpBuf failed! ret: 0x%X.", rt_ret); + } + args_ = nullptr; + } +} + +bool TbeTask::Distribute() { + GELOGI("InitTbeTask start."); + if (stream_ == nullptr) { + GELOGE(PARAM_INVALID, "stream_ is null!"); + return false; + } + // Get stub_func + if (task_info_->stub_func().empty()) { + GELOGE(PARAM_INVALID, "kernel_info->stub_func is empty!"); + return false; + } + + rtError_t rt_ret = rtGetFunctionByName(const_cast(task_info_->stub_func().c_str()), &stub_func_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtGetFunctionByName failed, ret: %d", static_cast(rt_ret)); + stub_func_ = nullptr; + return false; + } + + // Get args + std::vector tensor_device_addrs; + tensor_device_addrs.insert(tensor_device_addrs.end(), task_info_->input_data_addrs().begin(), + task_info_->input_data_addrs().end()); + tensor_device_addrs.insert(tensor_device_addrs.end(), task_info_->output_data_addrs().begin(), + task_info_->output_data_addrs().end()); + tensor_device_addrs.insert(tensor_device_addrs.end(), task_info_->workspace_addrs().begin(), + task_info_->workspace_addrs().end()); + auto args_size = static_cast(tensor_device_addrs.size() * sizeof(void *)); + + rt_ret = rtMalloc(&args_, args_size, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtMalloc failed, ret: %d", static_cast(rt_ret)); + return false; + } + + rt_ret = rtMemcpy(args_, args_size, reinterpret_cast(tensor_device_addrs.data()), args_size, + RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtMemcpy fail, ret 0x%X.", rt_ret); + return false; + } + + GELOGI("InitTbeTask end."); + GELOGI("DistributeTbeTask start."); + rt_ret = rtKernelLaunch(stub_func_, task_info_->block_dim(), args_, args_size, nullptr, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api rtKernelLaunch failed, ret: 0x%X", rt_ret); + return false; + } + + GELOGI("DistributeTbeTask end."); + return true; +} + +REGISTER_TASK(TaskInfoType::kTbe, TbeTask, TbeTaskInfo); +} // namespace model_runner +} // namespace ge diff --git a/src/ge/ge_runtime/task/tbe_task.h b/src/ge/ge_runtime/task/tbe_task.h new file mode 100644 index 00000000..f9ca56bb --- /dev/null +++ b/src/ge/ge_runtime/task/tbe_task.h @@ -0,0 +1,43 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GE_RUNTIME_TASK_TBE_TASK_H_ +#define GE_GE_RUNTIME_TASK_TBE_TASK_H_ + +#include + +#include "ge_runtime/task/task.h" + +namespace ge { +namespace model_runner { +class TbeTask : public TaskRepeater { + public: + TbeTask(const ModelContext &model_context, const std::shared_ptr &task_info); + + ~TbeTask() override; + + bool Distribute() override; + + private: + std::shared_ptr task_info_; + void *stream_; + void *stub_func_; + void *args_; +}; +} // namespace model_runner +} // namespace ge + +#endif // GE_GE_RUNTIME_TASK_TBE_TASK_H_ diff --git a/src/ge/generator/ge_generator.cc b/src/ge/generator/ge_generator.cc new file mode 100644 index 00000000..2d3814d5 --- /dev/null +++ b/src/ge/generator/ge_generator.cc @@ -0,0 +1,353 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "generator/ge_generator.h" + +#include "common/ge/ge_util.h" +#include "common/ge/plugin_manager.h" +#include "common/helper/model_helper.h" +#include "common/helper/om_file_helper.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "ge/ge_api.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/manager/graph_manager.h" +#include "graph/opsproto_manager.h" +#include "graph/utils/graph_utils.h" +#include "model/ge_model.h" + +using std::string; +using std::vector; +using std::map; + +namespace { +const char *const kAttrOpType = "op_type"; +} + +namespace ge { +static Status AddInputs(const ComputeGraphPtr &graph, const NodePtr &node, const GeTensorDesc &tensor, int32_t index, + bool attr) { + GE_CHECK_NOTNULL_EXEC(graph, return PARAM_INVALID); + GE_CHECK_NOTNULL_EXEC(node, return PARAM_INVALID); + string op_type; + if (!AttrUtils::GetStr(tensor, kAttrOpType, op_type) || op_type.empty()) { + op_type = DATA; + } + + string op_name = node->GetName() + "_in_" + std::to_string(index); + OpDescPtr data_op = MakeShared(op_name, op_type); + if (data_op == nullptr) { + return FAILED; + } + + GE_CHK_BOOL_EXEC(data_op->AddInputDesc(tensor) == GRAPH_SUCCESS, return FAILED, "Add input desc fail."); + GE_CHK_BOOL_EXEC(data_op->AddOutputDesc(tensor) == GRAPH_SUCCESS, return FAILED, "Add output desc fail."); + if (attr) { + GE_CHK_BOOL_EXEC(AttrUtils::SetInt(data_op, ATTR_NAME_INDEX, index), return FAILED, "Set index fail."); + } + + ge::NodePtr arg_node = graph->AddNode(data_op); + GE_CHK_BOOL_EXEC(arg_node != nullptr, return FAILED, "Insert Data node fail."); + + GE_CHK_STATUS(GraphUtils::AddEdge(arg_node->GetOutDataAnchor(0), node->GetInDataAnchor(index)), + "Add edge[%s->%s] fail.", data_op->GetName().c_str(), node->GetName().c_str()); + + return SUCCESS; +} + +static Status AddOutputs(const ComputeGraphPtr &graph, const NodePtr &node, const vector &outputs) { + OpDescPtr op_desc = MakeShared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + if (op_desc == nullptr) { + return FAILED; + } + int32_t count = 0; + for (const auto &out_desc : outputs) { + GeTensorDesc tensor = out_desc.GetTensorDesc(); + TensorUtils::SetInputTensor(tensor, true); + GE_CHK_BOOL_EXEC(op_desc->AddInputDesc(tensor) == GRAPH_SUCCESS, return FAILED, "Add input desc fail"); + + TensorUtils::SetInputTensor(tensor, false); + TensorUtils::SetOutputTensor(tensor, true); + GE_CHK_BOOL_EXEC(op_desc->AddOutputDesc(tensor) == GRAPH_SUCCESS, return FAILED, "Add output desc fail"); + count++; + } + GE_CHECK_NOTNULL_EXEC(graph, return PARAM_INVALID); + ge::NodePtr out_node = graph->AddNode(op_desc); + GE_CHK_BOOL_EXEC(out_node != nullptr, return FAILED, "Insert Output node fail."); + GE_CHECK_NOTNULL_EXEC(node, return PARAM_INVALID); + for (int32_t i = 0; i < count; ++i) { + GE_CHK_STATUS(GraphUtils::AddEdge(node->GetOutDataAnchor(i), out_node->GetInDataAnchor(i)), + "Add edge[%s->%s] fail.", node->GetName().c_str(), out_node->GetName().c_str()); + } + + return SUCCESS; +} + +static void GetOpsProtoPath(string &opsproto_path) { + GELOGI("Start to get ops proto path schedule"); + const char *path_env = std::getenv("ASCEND_OPP_PATH"); + if (path_env != nullptr) { + string path = path_env; + string file_path = RealPath(path.c_str()); + if (file_path.empty()) { + GELOGE(FAILED, "File path %s is invalid.", path.c_str()); + return; + } + opsproto_path = (path + "/op_proto/built-in/" + ":") + (path + "/op_proto/custom/"); + GELOGI("Get opsproto so path from env : %s", path.c_str()); + return; + } + string path_base = PluginManager::GetPath(); + GELOGI("path_base is %s", path_base.c_str()); + path_base = path_base.substr(0, path_base.rfind('/')); + path_base = path_base.substr(0, path_base.rfind('/') + 1); + opsproto_path = (path_base + "ops/op_proto/built-in/" + ":") + (path_base + "ops/op_proto/custom/"); +} + +class GeGenerator::Impl { + public: + Status BuildModel(const Graph &graph, const vector &inputs, GraphId &graph_id, + vector &ge_models); + + Status SaveModel(const string &file_name_prefix, vector models); + + Status SaveParams(GeModelPtr &ge_model, const string &type, const map &attrs, + const vector &inputs, const vector &outputs); + + GraphManager graph_manager_; + SaveParam save_param_; +}; + +Status GeGenerator::Initialize(const map &options) { + impl_ = ge::MakeShared(); + if (impl_ == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared failed"); + return MEMALLOC_FAILED; + } + string opsproto_path; + GetOpsProtoPath(opsproto_path); + GELOGI("opsproto_path is %s", opsproto_path.c_str()); + OpsProtoManager *manager = OpsProtoManager::Instance(); + map option_tmp; + option_tmp.emplace(std::pair(string("ge.opsProtoLibPath"), opsproto_path)); + (void)manager->Initialize(option_tmp); + + Status ret = impl_->graph_manager_.Initialize(options); + if (ret != SUCCESS) { + GELOGE(GE_GENERATOR_GRAPH_MANAGER_INIT_FAILED, "Graph manager initialize failed"); + return GE_GENERATOR_GRAPH_MANAGER_INIT_FAILED; + } + // get ek file + auto iter = options.find(EK_FILE); + if (iter != options.end()) { + impl_->save_param_.ek_file = iter->second; + } + // get cert file + iter = options.find(CERT_FILE); + if (iter != options.end()) { + impl_->save_param_.cert_file = iter->second; + } + // get hw key file + iter = options.find(HW_KEY_FILE); + if (iter != options.end()) { + impl_->save_param_.hw_key_file = iter->second; + } + // get private file + iter = options.find(PRIVATE_KEY_FILE); + if (iter != options.end()) { + impl_->save_param_.pri_key_file = iter->second; + } + return SUCCESS; +} + +Status GeGenerator::Finalize() { + GE_CHECK_NOTNULL_EXEC(impl_, return PARAM_INVALID); + Status ret = impl_->graph_manager_.Finalize(); + if (ret != SUCCESS) { + GELOGE(GE_GENERATOR_GRAPH_MANAGER_FINALIZE_FAILED, "Graph manager finalize failed"); + return GE_GENERATOR_GRAPH_MANAGER_FINALIZE_FAILED; + } + return SUCCESS; +} + +Status GeGenerator::GenerateOfflineModel(const Graph &graph, const string &file_name_prefix, + const vector &inputs) { + GELOGI("Start to GenerateOfflineModel."); + GraphId graph_id; + vector ge_models; + GE_CHECK_NOTNULL_EXEC(impl_, return PARAM_INVALID); + + string model_name; + auto compute_graph = GraphUtils::GetComputeGraph(graph); + if (compute_graph == nullptr) { + GELOGW("Get compute graph fail."); + } else { + model_name = compute_graph->GetName(); + } + + Status ret = impl_->BuildModel(graph, inputs, graph_id, ge_models); + if (ret != SUCCESS) { + GELOGE(ret, "Build model failed"); + if (impl_->graph_manager_.Finalize() != SUCCESS) { + GELOGE(FAILED, "graph_manager finalize fail."); + } + return ret; + } + + if (!model_name.empty() && !ge_models.empty()) { + ge_models[0]->SetName(model_name); + } + + ret = impl_->SaveModel(file_name_prefix, ge_models); + if (ret != SUCCESS) { + GELOGE(ret, "Save model failed"); + if (impl_->graph_manager_.Finalize() != SUCCESS) { + GELOGE(FAILED, "graph_manager finalize fail."); + } + return ret; + } + GELOGI("GenerateOfflineModel success."); + return SUCCESS; +} + +/// +/// @ingroup ge +/// @brief Compiling a single operator into an offline model +/// @param [in] OpDescPtr &op_desc: Operator description info that needs to be compiled into an offline model file +/// @param [in] vector &inputs: Operator input data description information. +/// @param [in] vector &outputs: Operator output data description information. +/// @param [in] const string &model_file_name: Offline model filename. +/// @return SUCCESS handle successfully / others handle failed +/// +Status GeGenerator::BuildSingleOpModel(OpDescPtr &op_desc, const vector &inputs, + const vector &outputs, const string &model_file_name) { + GE_CHECK_NOTNULL_EXEC(op_desc, return PARAM_INVALID); + if (!inputs.empty() && (inputs.size() != op_desc->GetInputsSize())) { + GELOGE(PARAM_INVALID, "Tensor size: %zu, Inputs size:%zu", inputs.size(), op_desc->GetInputsSize()); + return PARAM_INVALID; + } + if (!outputs.empty() && (outputs.size() != op_desc->GetOutputsSize())) { + GELOGE(PARAM_INVALID, "Tensor size: %zu, Outputs size:%zu", outputs.size(), op_desc->GetOutputsSize()); + return PARAM_INVALID; + } + + // 0. Save original attributes. + map op_attrs = op_desc->GetAllAttrs(); + + // 1. Create ComputeGraph. + string name = ge::CurrentTimeInStr() + "_" + model_file_name; + ge::ComputeGraphPtr compute_graph = MakeShared(name); + if (compute_graph == nullptr) { + return INTERNAL_ERROR; + } + GE_CHECK_NOTNULL_EXEC(compute_graph, return INTERNAL_ERROR); + + // 2. Add Node to ComputeGraph. + NodePtr op_node = compute_graph->AddNode(op_desc); + GE_CHECK_NOTNULL_EXEC(op_node, return INTERNAL_ERROR); + + // 3. Create InputData node. + int64_t in_size = static_cast(op_desc->GetInputsSize()); + GE_CHK_BOOL_EXEC(AttrUtils::SetInt(op_desc, ATTR_NAME_N, in_size), return FAILED, "Op[%s] Set N fail", + op_desc->GetName().c_str()); + int32_t arg_index = 0; + if (inputs.empty()) { + for (const auto &input_desc : op_desc->GetAllInputsDescPtr()) { + GE_CHECK_NOTNULL_EXEC(input_desc, return INTERNAL_ERROR); + GE_CHK_STATUS_RET_NOLOG(AddInputs(compute_graph, op_node, *input_desc, arg_index, false)); + arg_index++; + } + } else { + for (const auto &in_desc : inputs) { + const GeTensorDesc input_desc = in_desc.GetTensorDesc(); + GE_CHK_STATUS_RET_NOLOG(AddInputs(compute_graph, op_node, input_desc, arg_index, true)); + arg_index++; + } + } + + // 4. Create Output node. + if (!outputs.empty()) { + GE_CHK_STATUS_RET_NOLOG(AddOutputs(compute_graph, op_node, outputs)); + } + + // dump ComputeGraph. + compute_graph->Dump(); + Graph graph = ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph); + GELOGI("ATC parser success."); + + GraphId graph_id; + vector ge_models; + GE_CHECK_NOTNULL_EXEC(impl_, return PARAM_INVALID); + GE_CHK_STATUS_RET_NOLOG(impl_->BuildModel(graph, inputs, graph_id, ge_models)); + + if (!ge_models.empty()) { + GE_CHK_STATUS_RET_NOLOG(impl_->SaveParams(ge_models[0], op_desc->GetType(), op_attrs, inputs, outputs)); + } + + GE_CHK_STATUS_RET_NOLOG(impl_->SaveModel(model_file_name, ge_models)); + return SUCCESS; +} + +Status GeGenerator::Impl::SaveParams(GeModelPtr &ge_model, const string &type, const map &attrs, + const vector &inputs, const vector &outputs) { + GE_CHECK_NOTNULL_EXEC(ge_model, return PARAM_INVALID); + GE_CHK_BOOL_EXEC_NOLOG(graph_manager_.SaveParams(*ge_model, type, attrs, inputs, outputs) == SUCCESS, + graph_manager_.Finalize(); + return FAILED); + + return SUCCESS; +} + +Status GeGenerator::Impl::SaveModel(const string &file_name_prefix, vector models) { + // to be change to ModelHelper interface + if (models.empty()) { + GELOGE(FAILED, "models are empty."); + return FAILED; + } + + ModelHelper model_helper; + Status ret = model_helper.SaveToOmModel(models[0], save_param_, file_name_prefix); + if (ret != SUCCESS) { + GELOGE(ret, "Save to Om model failed"); + return ret; + } + return SUCCESS; +} + +Status GeGenerator::Impl::BuildModel(const Graph &graph, const vector &inputs, GraphId &graph_id, + vector &ge_models) { + static GraphId id = 0; + + Status ret = graph_manager_.AddGraph(id, graph); + if (ret != SUCCESS) { + GELOGE(GE_GENERATOR_GRAPH_MANAGER_ADD_GRAPH_FAILED, "graphManager AddGraph failed, id: %u", id); + graph_manager_.Finalize(); + return GE_GENERATOR_GRAPH_MANAGER_ADD_GRAPH_FAILED; + } + + GELOGI("models' inputs.size()=%zu", inputs.size()); + ret = graph_manager_.BuildGraph(id, inputs, ge_models); + if (ret != SUCCESS) { + GELOGE(GE_GENERATOR_GRAPH_MANAGER_BUILD_GRAPH_FAILED, "graphManager BuildGraph failed, id: %u", id); + return GE_GENERATOR_GRAPH_MANAGER_BUILD_GRAPH_FAILED; + } + + graph_id = id; + id += 1; + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/generator/generator_api.cc b/src/ge/generator/generator_api.cc new file mode 100644 index 00000000..44c85535 --- /dev/null +++ b/src/ge/generator/generator_api.cc @@ -0,0 +1,357 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "generator/generator_api.h" + +#include "common/ge/ge_util.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "generator/ge_generator.h" +#include "graph/ge_attr_value.h" +#include "graph/ge_tensor.h" +#include "graph/op_desc.h" +#include "graph/utils/tensor_utils.h" + +#define CHECK_PARAM_NOT_NULL(param) \ + do { \ + if (param == nullptr) { \ + GELOGE(ge::PARAM_INVALID, "Param: %s is null.", #param); \ + return ge::PARAM_INVALID; \ + } \ + } while (0) + +#define CHECK_PARAM_OBJECT(object, param) \ + ({ \ + object *obj_value = reinterpret_cast(param); \ + if (obj_value == nullptr) { \ + GELOGE(ge::PARAM_INVALID, "Param: %s is null.", #param); \ + return ge::PARAM_INVALID; \ + } \ + obj_value; \ + }) + +class OpAttr { + public: + OpAttr() = default; + ~OpAttr() = default; + + const std::map &Attrs() const { return attrs_; } + + template + Status_t SetAttr(const char *name, T value) { + CHECK_PARAM_NOT_NULL(name); + auto attr_value = ge::GeAttrValue::CreateFrom(value); + attrs_[std::string(name)] = attr_value; + return ge::SUCCESS; + } + + template + Status_t SetAttr(const char *name, const T *value, int num) { + CHECK_PARAM_NOT_NULL(name); + CHECK_PARAM_NOT_NULL(value); + + std::vector values; + for (int i = 0; i < num; ++i) { + values.push_back(value[i]); + } + + auto attr_value = ge::GeAttrValue::CreateFrom>(values); + attrs_[std::string(name)] = attr_value; + return ge::SUCCESS; + } + + Status_t SetAttr(const char *name, const char *value) { + CHECK_PARAM_NOT_NULL(name); + CHECK_PARAM_NOT_NULL(value); + auto attr_value = ge::GeAttrValue::CreateFrom(string(value)); + attrs_[std::string(name)] = attr_value; + return ge::SUCCESS; + } + + Status_t SetAttr(const char *name, const char **value, int num) { + CHECK_PARAM_NOT_NULL(name); + CHECK_PARAM_NOT_NULL(value); + + std::vector values; + for (int i = 0; i < num; ++i) { + values.push_back(string(value[i])); + } + + auto attr_value = ge::GeAttrValue::CreateFrom>(values); + attrs_[std::string(name)] = attr_value; + return ge::SUCCESS; + } + + private: + std::map attrs_; +}; + +/// +/// @ingroup ge +/// @brief Generate offline model for the op. +/// @param [in] op_type: type name of the op. +/// @param [in] in_tensor: input description array (created by OpTensorCreate). +/// @param [in] in_num: number of in_tensor. +/// @param [in] out_tensor: output description array (created by OpTensorCreate). +/// @param [in] out_num: number of out_tensor. +/// @param [in] attr: the attributes of the op (created by OpAttrCreate). +/// @param [in] om_file: file name for the om to save. +/// @return 0 for success / others for fail +/// +Status_t OpTaskGernerator(const char *op_type, const OpTensor_t *in_tensor, int in_num, const OpTensor_t *out_tensor, + int out_num, const OpAttr_t attr, const char *om_file) { + CHECK_PARAM_NOT_NULL(op_type); + CHECK_PARAM_NOT_NULL(om_file); + const std::string om_file_name(om_file); + + std::string op_name = std::string(op_type) + "_" + std::to_string(ge::GetCurrentTimestap()); + ge::OpDescPtr op_desc = ge::MakeShared(op_name, op_type); + if (op_desc == nullptr) { + return ge::FAILED; + } + std::vector inputs; + for (int i = 0; i < in_num && in_tensor != nullptr; ++i) { + const ge::TensorDesc *in_desc = CHECK_PARAM_OBJECT(ge::TensorDesc, in_tensor[i]); + ge::GeTensorDesc tensor_desc(ge::GeShape(in_desc->GetShape().GetDims()), in_desc->GetFormat(), + in_desc->GetDataType()); + + tensor_desc.SetOriginFormat(in_desc->GetFormat()); + ge::TensorUtils::SetRealDimCnt(tensor_desc, static_cast(in_desc->GetShape().GetDims().size())); + ge::TensorUtils::SetInputTensor(tensor_desc, true); + ge::TensorUtils::SetOutputTensor(tensor_desc, false); + + if (op_desc->AddInputDesc(tensor_desc) != ge::GRAPH_SUCCESS) { + GELOGE(ge::FAILED, "AddInputDesc fail."); + return ge::FAILED; + } + inputs.emplace_back(tensor_desc); + } + + std::vector outputs; + for (int i = 0; i < out_num && out_tensor != nullptr; ++i) { + const ge::TensorDesc *out_desc = CHECK_PARAM_OBJECT(ge::TensorDesc, out_tensor[i]); + ge::GeTensorDesc tensor_desc(ge::GeShape(out_desc->GetShape().GetDims()), out_desc->GetFormat(), + out_desc->GetDataType()); + + tensor_desc.SetOriginFormat(out_desc->GetFormat()); + ge::TensorUtils::SetRealDimCnt(tensor_desc, static_cast(out_desc->GetShape().GetDims().size())); + ge::TensorUtils::SetInputTensor(tensor_desc, false); + ge::TensorUtils::SetOutputTensor(tensor_desc, true); + + (void)op_desc->AddOutputDesc(tensor_desc); + outputs.emplace_back(tensor_desc); + } + + if (attr != nullptr) { + OpAttr *op_attr = CHECK_PARAM_OBJECT(OpAttr, attr); + for (const auto &it : op_attr->Attrs()) { + GE_IF_BOOL_EXEC(op_desc->SetAttr(it.first, it.second) != ge::SUCCESS, GELOGE(ge::FAILED, "SetAttr failed."); + return ge::FAILED); + } + } + + ge::GeGenerator generator; + return generator.BuildSingleOpModel(op_desc, inputs, outputs, om_file_name); +} + +/// +/// @ingroup ge +/// @brief Create Tensor Description. +/// @param [in] format: tensor format of the data. +/// @param [in] datatype: tensor type of the data. +/// @param [in] shape: tensor shape array. +/// @param [in] num: number of shape. +/// @return OpTensor_t for success / nullptr for fail +/// +OpTensor_t OpTensorCreate(int format, int datatype, const int64_t *shape, int num) { + std::vector dims; + if (shape != nullptr) { + for (int i = 0; i < num; ++i) { + dims.push_back(shape[i]); + } + } + + ge::Format fmt = static_cast(format); + ge::DataType dt = static_cast(datatype); + + return new (std::nothrow) ge::TensorDesc(ge::Shape(dims), fmt, dt); +} + +/// +/// @ingroup ge +/// @brief Destroy Tensor Description. +/// @param [in] OpTensor_t tensor: created by OpTensorCreate. +/// @param [out] none +/// @return 0 for success / others for fail. +/// +Status_t OpTensorDestroy(OpTensor_t tensor) { + ge::TensorDesc *op_tensor = CHECK_PARAM_OBJECT(ge::TensorDesc, tensor); + delete op_tensor; + op_tensor = nullptr; + + return ge::SUCCESS; +} + +/// +/// @ingroup ge +/// @brief Create an attribute holder. +/// @param [in] none +/// @param [out] none +/// @return OpAttr_t for success / nullptr for fail. +/// +OpAttr_t OpAttrCreate() { return new (std::nothrow) OpAttr; } + +/// +/// @ingroup ge +/// @brief Destroy Attribute holder. +/// @param [in] OpAttr_t attr: created by OpAttrCreate. +/// @param [out] none +/// @return 0 for success / others for fail. +/// +Status_t OpAttrDestroy(OpAttr_t attr) { + OpAttr *op_attr = CHECK_PARAM_OBJECT(OpAttr, attr); + delete op_attr; + + return ge::SUCCESS; +} + +/// +/// @ingroup ge +/// @brief Set a boolean attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value. +/// @return 0 for success / others for fail. +/// +Status_t SetAttrBool(OpAttr_t attr, const char *name, bool value) { + CHECK_PARAM_NOT_NULL(name); + OpAttr *op_attr = CHECK_PARAM_OBJECT(OpAttr, attr); + + return op_attr->SetAttr(name, value); +} + +/// +/// @ingroup ge +/// @brief Set an integer attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value. +/// @return 0 for success / others for fail. +/// +Status_t SetAttrInt(OpAttr_t attr, const char *name, int64_t value) { + CHECK_PARAM_NOT_NULL(name); + OpAttr *op_attr = CHECK_PARAM_OBJECT(OpAttr, attr); + + return op_attr->SetAttr(name, value); +} + +/// +/// @ingroup ge +/// @brief Set a float attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value. +/// @return 0 for success / others for fail. +/// +Status_t SetAttrFloat(OpAttr_t attr, const char *name, float value) { + CHECK_PARAM_NOT_NULL(name); + OpAttr *op_attr = CHECK_PARAM_OBJECT(OpAttr, attr); + + return op_attr->SetAttr(name, value); +} + +/// +/// @ingroup ge +/// @brief Set a string attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value (can`t be nullptr, end with '\0'). +/// @return 0 for success / others for fail. +/// +Status_t SetAttrString(OpAttr_t attr, const char *name, const char *value) { + CHECK_PARAM_NOT_NULL(name); + CHECK_PARAM_NOT_NULL(value); + OpAttr *op_attr = CHECK_PARAM_OBJECT(OpAttr, attr); + + return op_attr->SetAttr(name, string(value)); +} + +/// +/// @ingroup ge +/// @brief Set a boolean array attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value array. +/// @param [in] num: number of value array. +/// @return 0 for success / others for fail. +/// +Status_t SetAttrBoolList(OpAttr_t attr, const char *name, const bool *value, int num) { + CHECK_PARAM_NOT_NULL(name); + CHECK_PARAM_NOT_NULL(value); + OpAttr *op_attr = CHECK_PARAM_OBJECT(OpAttr, attr); + + return op_attr->SetAttr(name, value, num); +} +/// +/// @ingroup ge +/// @brief Set an integer array attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value array. +/// @param [in] num: number of value array. +/// @return 0 for success / others for fail. +/// +Status_t SetAttrIntList(OpAttr_t attr, const char *name, const int64_t *value, int num) { + CHECK_PARAM_NOT_NULL(name); + CHECK_PARAM_NOT_NULL(value); + OpAttr *op_attr = CHECK_PARAM_OBJECT(OpAttr, attr); + + return op_attr->SetAttr(name, value, num); +} + +/// +/// @ingroup ge +/// @brief Set a float array attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value array. +/// @param [in] num: number of value array. +/// @return 0 for success / others for fail. +/// +Status_t SetAttrFloatList(OpAttr_t attr, const char *name, const float *value, int num) { + CHECK_PARAM_NOT_NULL(name); + CHECK_PARAM_NOT_NULL(value); + OpAttr *op_attr = CHECK_PARAM_OBJECT(OpAttr, attr); + + return op_attr->SetAttr(name, value, num); +} + +/// +/// @ingroup ge +/// @brief Set a string array attribute to the attribute holder. +/// @param [in] attr: attribute holder (created by OpAttrCreate). +/// @param [in] name: attribute name (can`t be nullptr, end with '\0'). +/// @param [in] value: attribute value array (each value can`t be nullptr, end with '\0'). +/// @param [in] num: number of value array. +/// @return 0 for success / others for fail. +/// +Status_t SetAttrStringList(OpAttr_t attr, const char *name, const char **value, int num) { + CHECK_PARAM_NOT_NULL(name); + CHECK_PARAM_NOT_NULL(value); + OpAttr *op_attr = CHECK_PARAM_OBJECT(OpAttr, attr); + + return op_attr->SetAttr(name, value, num); +} diff --git a/src/ge/graph/build/graph_build.cc b/src/ge/graph/build/graph_build.cc new file mode 100644 index 00000000..25fc45cc --- /dev/null +++ b/src/ge/graph/build/graph_build.cc @@ -0,0 +1,255 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/graph_build.h" + +#include "common/ge/ge_util.h" +#include "common/helper/model_helper.h" +#include "common/opskernel/ops_kernel_info_types.h" +#include "graph/build/optimize_stream_graph.h" +#include "graph/build/run_context.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/type_utils.h" +#include "init/gelib.h" +#include "model/ge_model.h" + +using domi::BuildMode; + +namespace { +const int32_t kInvalidPerfLevel = -1; +} // namespace +namespace ge { +GraphBuilder::GraphBuilder() : build_mode_(BuildMode::GEN_TASK_WITH_FUSION), hcom_parallel_(false) {} + +void GraphBuilder::SetOptions(const ge::GraphManagerOptions &options) { + stream_max_parallel_num_ = options.stream_max_parallel_num; + hcom_parallel_ = options.hcom_parallel; + + if (options.perf_level == kInvalidPerfLevel) { + build_mode_ = static_cast(BuildMode::GEN_TASK_WITH_FUSION); + } else { + build_mode_ = options.perf_level; + } +} + +Status GraphBuilder::CalcOpParam(const ge::ComputeGraphPtr &graph) { + GELOGI("Begin to calculate op running param."); + GE_CHECK_NOTNULL(graph); + auto instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "GraphBuilder: GE is not initialized"); + return GE_CLI_GE_NOT_INITIALIZED; + } + for (const auto &node_ptr : graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node_ptr->GetOpDesc()); + std::string kernel_lib_name = node_ptr->GetOpDesc()->GetOpKernelLibName(); + if (kernel_lib_name.empty()) { + // reset op kernel lib + (void)instance_ptr->DNNEngineManagerObj().GetDNNEngineName(node_ptr->GetOpDesc()); + kernel_lib_name = node_ptr->GetOpDesc()->GetOpKernelLibName(); + if (kernel_lib_name.empty()) { + GELOGE(INTERNAL_ERROR, "Get node:%s(%s) kernel lib failed.", node_ptr->GetName().c_str(), + node_ptr->GetType().c_str()); + return INTERNAL_ERROR; + } + } + + OpsKernelInfoStorePtr kernel_info = instance_ptr->OpsKernelManagerObj().GetOpsKernelInfoStore(kernel_lib_name); + if (kernel_info != nullptr) { + auto ret = SetInputSize(node_ptr); + if (ret != SUCCESS) { + GELOGE(ret, "Set node inputDesc size failed, node name is %s", node_ptr->GetName().c_str()); + return ret; + } + ret = kernel_info->CalcOpRunningParam(*node_ptr); + if (ret != SUCCESS) { + GELOGE(ret, "Calculate op running param failed, node name is %s", node_ptr->GetName().c_str()); + return ret; + } + } else { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "Get op %s ops kernel info store failed", node_ptr->GetName().c_str()); + return INTERNAL_ERROR; + } + } + GELOGI("Success to calculate op running param."); + return SUCCESS; +} + +Status GraphBuilder::Build(ComputeGraphPtr &comp_graph, std::vector &subgraph_ptr_list, + GeModelPtr &ge_model_ptr, uint64_t session_id) { + GELOGI("Start to build model."); + if (comp_graph == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "Graph build comp_graph is null."); + return GE_GRAPH_PARAM_NULLPTR; + } + + Status ret = SecondPartition(comp_graph, subgraph_ptr_list); + GE_CHK_STATUS_RET(ret, "Graph second partition Failed."); + + GE_TIMESTAMP_START(BuildSubgraph); + ge::ModelBuilder builder(comp_graph, subgraph_ptr_list, stream_max_parallel_num_, hcom_parallel_, build_mode_); + + GELOGI("[Build] invoke the other opskernel to generate task."); + + GraphUtils::DumpGEGraph(comp_graph, "BeforePreBuildModel"); + GraphUtils::DumpGEGraphToOnnx(*comp_graph, "BeforePreBuildModel"); + + GE_TIMESTAMP_START(PreBuildModel); + GE_CHK_STATUS_RET(builder.PreBuildModel(), "Builder PreBuildModel() return fail."); + GE_TIMESTAMP_END(PreBuildModel, "GraphBuilder::PreBuildModel"); + + GraphUtils::DumpGEGraph(comp_graph, "AfterPrebuildmodel"); + GraphUtils::DumpGEGraphToOnnx(*comp_graph, "AfterPrebuildmodel"); + + GE_TIMESTAMP_START(CalcOpParam); + GE_CHK_STATUS_RET(CalcOpParam(comp_graph), "Builder CalcOpParam() return fail."); + GE_TIMESTAMP_END(CalcOpParam, "GraphBuilder::CalcOpParam"); + GraphUtils::DumpGEGraph(comp_graph, "AfterCalcOpParam"); + GraphUtils::DumpGEGraphToOnnx(*comp_graph, "AfterCalcOpParam"); + + ModelPtr model_ptr = MakeShared(); + if (model_ptr == nullptr) { + return MEMALLOC_FAILED; + } + GE_TIMESTAMP_START(BuildModelForGetTask); + GE_CHK_STATUS_RET(builder.BuildModelForGetTask(*model_ptr), "Builder BuildModelForGetTask() return fail."); + GE_TIMESTAMP_END(BuildModelForGetTask, "GraphBuilder::BuildModelForGetTask"); + + GraphUtils::DumpGEGraph(comp_graph, "AfterBuildModel"); + GraphUtils::DumpGEGraphToOnnx(*comp_graph, "AfterBuildModel"); + + GE_TIMESTAMP_START(GetTaskInfo); + ret = GetTaskInfo(builder, model_ptr, comp_graph, subgraph_ptr_list, session_id); + GE_TIMESTAMP_END(GetTaskInfo, "GraphBuilder::GetTaskInfo"); + + GraphUtils::DumpGEGraph(comp_graph, "AfterGetTask"); + GraphUtils::DumpGEGraphToOnnx(*comp_graph, "AfterGetTask"); + if (ret != SUCCESS) { + GELOGE(ret, "Builder GetTaskInfo() return fail."); + return ret; + } + ge_model_ptr = MakeShared(); + if (ge_model_ptr == nullptr) { + return MEMALLOC_FAILED; + } + GE_CHK_STATUS_RET(builder.SaveDataToModel(*model_ptr, *ge_model_ptr), "model builder SaveDataToModel() return fail."); + GELOGI("Success to build model."); + GE_TIMESTAMP_END(BuildSubgraph, "GraphBuilder::Build"); + return SUCCESS; +} + +Status GraphBuilder::GetTaskInfo(const ge::ModelBuilder &builder, const ModelPtr &model_ptr, + ComputeGraphPtr &comp_graph, std::vector &subgraph_ptr_list, + uint64_t session_id) { + GE_CHECK_NOTNULL(model_ptr); + GE_CHECK_NOTNULL(comp_graph); + + int64_t memory_size = 0; + if (!AttrUtils::GetInt(model_ptr, ATTR_MODEL_MEMORY_SIZE, memory_size)) { + GELOGE(INTERNAL_ERROR, "Get memory size fail."); + return INTERNAL_ERROR; + } + int64_t weight_size = 0; + if (!AttrUtils::GetInt(model_ptr, ATTR_MODEL_WEIGHT_SIZE, weight_size)) { + GELOGE(INTERNAL_ERROR, "Get weight memory size fail."); + return INTERNAL_ERROR; + } + auto *get_mem_base = reinterpret_cast(ge::VarManager::Instance(0)->GetVarMemMaxSize()); + uint8_t *get_weight_mem_base = get_mem_base; + if (weight_size > 0) { + get_weight_mem_base = get_mem_base + memory_size; + } + + RunContextUtil run_context; + Status ret = run_context.InitMemInfo(get_mem_base, memory_size, get_weight_mem_base, weight_size); + if (ret != SUCCESS) { + GELOGE(ret, "task_generator init mem info fail."); + return ret; + } + auto weight_buffer = builder.GetWeightBuffer(); + ret = run_context.CreateRunContext(*model_ptr, comp_graph, weight_buffer, session_id); + if (ret != SUCCESS) { + GELOGE(ret, "runContext create run context fail."); + return ret; + } + + OptimizeStreamGraph optimize_stream; + ret = optimize_stream.OptimizeStreamedSubGraph(comp_graph, subgraph_ptr_list, run_context.GetRunContext()); + if (ret != SUCCESS) { + GELOGE(ret, "Optimize streamed subGraph fail."); + return ret; + } + + GraphUtils::DumpGEGraph(comp_graph, "AfterOptimizeStreamedSubGraph"); + GraphUtils::DumpGEGraphToOnnx(*comp_graph, "AfterOptimizeStreamedSubGraph"); + + auto *get_var_mem_base = reinterpret_cast(ge::VarManager::Instance(0)->GetVarMemLogicBase()); + uint64_t var_size = (ge::VarManager::Instance(session_id)->GetVarMemSize(RT_MEMORY_HBM) > 0) + ? ge::VarManager::Instance(0)->GetVarMemMaxSize() + : 0; + TaskGenerator task_generator(get_var_mem_base, var_size); + ret = task_generator.GetTaskInfo(*model_ptr, comp_graph, session_id, run_context.GetRunContext()); + + return ret; +} + +Status GraphBuilder::SetInputSize(const ge::NodePtr &node_ptr) { + // set input_desc.size = src_node.output_desc.size + GELOGI("Start to set input desc size."); + for (const auto &in_data_anchor : node_ptr->GetAllInDataAnchors()) { + const auto &peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, continue); + const auto &src_node = peer_out_anchor->GetOwnerNode(); + const auto &src_op = src_node->GetOpDesc(); + GE_IF_BOOL_EXEC(src_op == nullptr, continue); + auto node_op_desc = node_ptr->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue); + // set dst_node.input_desc = src_node.output_desc + ge::GeTensorDesc desc_temp(src_op->GetOutputDesc(peer_out_anchor->GetIdx())); + + uint32_t size = 0; + GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(desc_temp, size) != SUCCESS, GELOGI("Get size failed!")); + GELOGI("src node %s output desc, dim_size: %zu, mem_size: %u, format: %s, type: %s.", src_node->GetName().c_str(), + desc_temp.GetShape().GetDimNum(), size, TypeUtils::FormatToSerialString(desc_temp.GetFormat()).c_str(), + TypeUtils::DataTypeToSerialString(desc_temp.GetDataType()).c_str()); + for (size_t i = 0; i < desc_temp.GetShape().GetDimNum(); ++i) { + GELOGI("dims[%zu]: %ld", i, desc_temp.GetShape().GetDim(i)); + } + + auto input_desc = node_op_desc->GetInputDescPtr(in_data_anchor->GetIdx()); + GE_CHECK_NOTNULL(input_desc); + ge::TensorUtils::SetSize(const_cast(*input_desc), size); + GE_CHK_STATUS_RET(node_op_desc->UpdateInputDesc(in_data_anchor->GetIdx(), *input_desc)); + GELOGI("%s input desc, dim_size: %zu, mem_size: %u, format: %s, type: %s.", node_ptr->GetName().c_str(), + input_desc->GetShape().GetDimNum(), size, TypeUtils::FormatToSerialString(input_desc->GetFormat()).c_str(), + TypeUtils::DataTypeToSerialString(input_desc->GetDataType()).c_str()); + } + + return SUCCESS; +} + +Status GraphBuilder::SecondPartition(ge::ComputeGraphPtr &comp_graph, + std::vector &subgraph_ptr_list) { + GELOGI("[SecondPartition] second partition."); + subgraph_ptr_list.clear(); + GE_TIMESTAMP_START(GraphPartition2); + Status ret = graph_partitioner_.Partition(comp_graph, subgraph_ptr_list, GraphPartitioner::kSecondPartitioning); + GE_CHK_STATUS_RET(ret, "Graph partition Failed."); + GE_TIMESTAMP_END(GraphPartition2, "GraphPartitioner::Partition2"); + return ret; +} +} // namespace ge diff --git a/src/ge/graph/build/graph_build.h b/src/ge/graph/build/graph_build.h new file mode 100644 index 00000000..b6f521e5 --- /dev/null +++ b/src/ge/graph/build/graph_build.h @@ -0,0 +1,71 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_GRAPH_BUILD_H_ +#define GE_GRAPH_BUILD_GRAPH_BUILD_H_ + +#include +#include +#include +#include +#include +#include + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/properties_manager.h" +#include "common/string_util.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/build/model_builder.h" +#include "graph/build/task_generator.h" +#include "graph/compute_graph.h" +#include "graph/graph.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/model.h" +#include "graph/node.h" +#include "graph/partition/graph_partition.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/tensor_utils.h" + +namespace ge { +class GraphBuilder { + public: + GraphBuilder(); + GraphBuilder(const GraphBuilder &in) = delete; + GraphBuilder &operator=(const GraphBuilder &in) = delete; + virtual ~GraphBuilder() = default; + Status Build(ComputeGraphPtr &comp_graph, std::vector &subgraph_ptr_list, + GeModelPtr &ge_model_ptr, uint64_t session_id = INVALID_SESSION_ID); + void SetOptions(const GraphManagerOptions &options); + + private: + Status CalcOpParam(const ge::ComputeGraphPtr &graph); + Status GetTaskInfo(const ge::ModelBuilder &builder, const ModelPtr &model_ptr, + ComputeGraphPtr &comp_graph, std::vector &subgraph_ptr_list, + uint64_t session_id = INVALID_SESSION_ID); + Status SetInputSize(const ge::NodePtr &node_ptr); + Status SecondPartition(ge::ComputeGraphPtr &comp_graph, vector &subgraph_ptr_list); + + int build_mode_; + + std::map stream_max_parallel_num_; + bool hcom_parallel_; + + GraphPartitioner graph_partitioner_; +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_GRAPH_BUILD_H_ diff --git a/src/ge/graph/build/logical_stream_allocator.cc b/src/ge/graph/build/logical_stream_allocator.cc new file mode 100644 index 00000000..c9c9c008 --- /dev/null +++ b/src/ge/graph/build/logical_stream_allocator.cc @@ -0,0 +1,561 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/logical_stream_allocator.h" +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/fmk_error_codes.h" +#include "framework/common/types.h" +#include "graph/debug/ge_attr_define.h" + +using std::string; +using std::vector; +using std::map; +using std::set; + +namespace ge { +LogicalStreamPass::LogicalStreamPass(const string &name) : name_(name) {} + +const string &LogicalStreamPass::GetName() const { return name_; } + +bool LogicalStreamPass::IsEngineSkip(const Subgraph &subgraph) const { return subgraph.engine_conf.skip_assign_stream; } + +bool LogicalStreamPass::IsEngineAttach(const Subgraph &subgraph) const { return subgraph.engine_conf.attach; } + +bool LogicalStreamPass::IsEngineIndependent(const Subgraph &subgraph) const { return subgraph.engine_conf.independent; } + +bool LogicalStreamPass::HasStreamLabel(const Subgraph &subgraph) const { + return !subgraph.subgraph_info.GetStreamLabel().empty(); +} + +bool LogicalStreamPass::HasAssignedStream(const Subgraph &subgraph) const { + return subgraph.stream_id != kInvalidStream; +} + +Status AssignByLabelPass::Run(ComputeGraphPtr whole_graph, const vector &subgraphs, Context &context) { + bool changed = false; + int64_t &next_stream = context.next_stream; + map label_streams; + + for (const SubgraphPtr &subgraph : subgraphs) { + const string &stream_label = subgraph->subgraph_info.GetStreamLabel(); + if (!stream_label.empty()) { + // Subgraphs of the same stream_label are assigned to the same stream, + // and different stream_labels are assigned new streams. + auto iter = label_streams.find(stream_label); + if (iter != label_streams.end()) { + subgraph->stream_id = iter->second; + } else { + subgraph->stream_id = next_stream; + GELOGI("Assign new stream %ld for label %s.", next_stream, stream_label.c_str()); + + label_streams.emplace(stream_label, next_stream); + ++next_stream; + } + changed = true; + } + } + + return changed ? SUCCESS : NOT_CHANGED; +} + +Status IndependentStreamPass::Run(ComputeGraphPtr whole_graph, const vector &subgraphs, Context &context) { + bool changed = false; + int64_t &next_stream = context.next_stream; + + // > + map> engine_streams; + + for (const SubgraphPtr &subgraph : subgraphs) { + if (!IsEngineIndependent(*subgraph)) { + continue; + } + + const string &engine = subgraph->engine_conf.id; + const string &stream_label = subgraph->subgraph_info.GetStreamLabel(); + auto &label_streams = engine_streams[engine]; + auto iter = label_streams.find(stream_label); + if (iter != label_streams.end()) { + subgraph->stream_id = iter->second; + } else { + subgraph->stream_id = next_stream; + GELOGI("Assign new independent stream %ld for engine %s (label: %s).", next_stream, engine.c_str(), + stream_label.c_str()); + + label_streams.emplace(stream_label, next_stream); + ++next_stream; + } + changed = true; + } + + return changed ? SUCCESS : NOT_CHANGED; +} + +Status AssignByDependencyPass::Run(ComputeGraphPtr whole_graph, const vector &subgraphs, + Context &context) { + bool changed = false; + map end_subgraph_map; + map pld_subgraph_map; + InitEndSubgraphMap(subgraphs, end_subgraph_map); + InitPldSubgraphMap(subgraphs, pld_subgraph_map); + + for (const SubgraphPtr &subgraph : subgraphs) { + if (HasAssignedStream(*subgraph)) { + continue; + } + + SubgraphPtr reusable_subgraph = GetReusableSubgraph(subgraph, end_subgraph_map, pld_subgraph_map); + if (reusable_subgraph != nullptr) { + if (HasAssignedStream(*reusable_subgraph)) { + subgraph->stream_id = reusable_subgraph->stream_id; + } else { + int64_t stream_id = AssignNewStream(reusable_subgraph); + subgraph->stream_id = stream_id; + GELOGI("Reusable subgraph %s has not been assigned a stream, now assign new stream %ld.", + reusable_subgraph->name.c_str(), stream_id); + } + + if (reusable_subgraph->reused_subgraph != nullptr) { + reusable_subgraph = reusable_subgraph->reused_subgraph; + } + + subgraph->reused_subgraph = reusable_subgraph; + reused_subgraphs_.emplace_back(subgraph, reusable_subgraph); + GELOGI("Subgraph %s of engine %s reuses stream of subgraph %s of engine %s.", subgraph->name.c_str(), + subgraph->engine_conf.id.c_str(), reusable_subgraph->name.c_str(), + reusable_subgraph->engine_conf.id.c_str()); + } else { + (void)AssignNewStream(subgraph); + } + changed = true; + } + + UpdateAssignedSubgraphs(context); + UpdateReusedSubgraphs(); + + return changed ? SUCCESS : NOT_CHANGED; +} + +void AssignByDependencyPass::InitEndSubgraphMap(const vector &subgraphs, + map &end_subgraph_map) { + for (const auto &subgraph : subgraphs) { + const SubGraphInfo &subgraph_info = subgraph->subgraph_info; + for (const auto &item : subgraph_info.GetEnd2PldMap()) { + end_subgraph_map.emplace(item.first, subgraph); + } + } +} + +void AssignByDependencyPass::InitPldSubgraphMap(const vector &subgraphs, + map &pld_subgraph_map) { + for (const auto &subgraph : subgraphs) { + const SubGraphInfo &subgraph_info = subgraph->subgraph_info; + for (const auto &item : subgraph_info.GetPld2EndMap()) { + pld_subgraph_map.emplace(item.first, subgraph); + } + } +} + +bool AssignByDependencyPass::CouldReuse(const SubgraphPtr &subgraph, const SubgraphPtr &pred_subgraph, + const map &pld_subgraph_map) { + if ((subgraph == nullptr) || (pred_subgraph == nullptr)) { + return false; + } + + if (subgraph->engine_conf.scheduler_id != pred_subgraph->engine_conf.scheduler_id) { + return false; + } + + if (IsEngineIndependent(*pred_subgraph) || HasStreamLabel(*pred_subgraph)) { + return false; + } + + // If the engine of the predecessor subgraph is the same as the other successor subgraphs, the stream is not reused. + for (const auto &end_pld_pair : pred_subgraph->subgraph_info.GetEnd2PldMap()) { + auto iter = pld_subgraph_map.find(end_pld_pair.second); + if (iter != pld_subgraph_map.end()) { + const SubgraphPtr &pred_subgraph_succ = iter->second; + if (pred_subgraph_succ != subgraph && pred_subgraph_succ->engine_conf.id == pred_subgraph->engine_conf.id) { + return false; + } + } + } + + if ((subgraph->engine_conf.id == pred_subgraph->engine_conf.id) || IsEngineAttach(*subgraph)) { + return true; + } + + if ((pred_subgraph->reused_subgraph != nullptr) && + (pred_subgraph->reused_subgraph->engine_conf.id == subgraph->engine_conf.id)) { + return true; + } + + return false; +} + +LogicalStreamPass::SubgraphPtr AssignByDependencyPass::GetReusableSubgraph( + const SubgraphPtr &subgraph, const map &end_subgraph_map, + const map &pld_subgraph_map) { + const SubGraphInfo &subgraph_info = subgraph->subgraph_info; + for (const auto &pld_2_end : subgraph_info.GetPld2EndMap()) { + const NodePtr &peer_end = pld_2_end.second; + auto iter = end_subgraph_map.find(peer_end); + if (iter != end_subgraph_map.end()) { + const SubgraphPtr &pred_subgraph = iter->second; + if (CouldReuse(subgraph, pred_subgraph, pld_subgraph_map)) { + return pred_subgraph; + } + } + } + + return nullptr; +} + +int64_t AssignByDependencyPass::AssignNewStream(SubgraphPtr subgraph) { + const string &engine_name = subgraph->engine_conf.id; + int64_t max_parallel_num = subgraph->max_parallel_num; + + int64_t stream_id = 0; + auto next_iter = engine_next_streams_.find(engine_name); + if (next_iter != engine_next_streams_.end()) { + stream_id = next_iter->second; + } + + if (stream_id >= max_parallel_num) { + stream_id = 0; + } + + subgraph->stream_id = stream_id; + engine_next_streams_[engine_name] = stream_id + 1; + assigned_subgraphs_.emplace(subgraph); + + if ((stream_id + 1) > engine_stream_num_[engine_name]) { + engine_stream_num_[engine_name] = stream_id + 1; + } + + GELOGI("Subgraph %s assigns new temp stream %ld (engine: %s).", subgraph->name.c_str(), stream_id, + engine_name.c_str()); + + return stream_id; +} + +void AssignByDependencyPass::UpdateAssignedSubgraphs(Context &context) { + // Update the starting stream id for each engine. + int64_t &next_stream = context.next_stream; + map engine_start_streams; + for (const auto &item : engine_stream_num_) { + int64_t stream_count = item.second; + engine_start_streams[item.first] = next_stream; + next_stream += stream_count; + } + + // Update the subgraphs assigned by the engine. + for (auto &subgraph : assigned_subgraphs_) { + subgraph->stream_id += engine_start_streams[subgraph->engine_conf.id]; + GELOGI("Stream of subgraph %s has been updated to %ld.", subgraph->name.c_str(), subgraph->stream_id); + } +} + +void AssignByDependencyPass::UpdateReusedSubgraphs() { + // Update streams for the subgraphs of reusing stream. + for (const auto &item : reused_subgraphs_) { + auto &cur_subgraph = item.first; + auto &reused_graph = item.second; + cur_subgraph->stream_id = reused_graph->stream_id; + GELOGI("Stream of subgraph %s has been updated to %ld.", cur_subgraph->name.c_str(), cur_subgraph->stream_id); + } +} + +Status NodeStreamUpdatePass::Run(ComputeGraphPtr whole_graph, const vector &subgraphs, Context &context) { + // Check if all subgraphs have been assigned a stream. + for (const SubgraphPtr &subgraph : subgraphs) { + const string &engine_name = subgraph->engine_conf.id; + + if (!IsEngineSkip(*subgraph) && !HasAssignedStream(*subgraph)) { + GELOGE(INTERNAL_ERROR, "Subgraph %s has not yet been assigned a stream (engine: %s).", subgraph->name.c_str(), + engine_name.c_str()); + return INTERNAL_ERROR; + } else { + GELOGI("Subgraph %s is assigned stream %ld (engine: %s).", subgraph->name.c_str(), subgraph->stream_id, + engine_name.c_str()); + } + } + + // Init the stream id of node. + for (NodePtr &node : whole_graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + node->GetOpDesc()->SetStreamId(kInvalidStream); + } + + // Set the stream id of the subgraph to the node. + for (const SubgraphPtr &subgraph : subgraphs) { + int64_t stream_id = subgraph->stream_id; + const string &engine_name = subgraph->engine_conf.id; + auto compute_graph = subgraph->subgraph_info.GetSubGraph(); + for (NodePtr &node : compute_graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + if (IsEngineSkip(*subgraph) && node->GetInNodes().empty()) { + GELOGI("Node %s of type %s in subgraph %s doesn't need to assign a stream (engine: %s).", + node->GetName().c_str(), node->GetType().c_str(), subgraph->name.c_str(), engine_name.c_str()); + } else { + node->GetOpDesc()->SetStreamId(stream_id); + } + } + } + + // Update stream id for nodes belong to skipped engine subgraph + GE_CHK_STATUS_RET(UpdateForSkippedEngine(whole_graph, subgraphs)); + + return SUCCESS; +} + +Status AllReduceParallelPass::Run(ComputeGraphPtr whole_graph, const vector &subgraphs, Context &context) { + if (!context.hcom_parallel) { + return NOT_CHANGED; + } + + GELOGI("AllReduceParallelPass is enabled."); + + // All successors of HcomAllReduce. + set all_reduce_succs; + + for (const NodePtr &node : whole_graph->GetDirectNode()) { + if (node->GetType() != HCOMALLREDUCE) { + continue; + } + + string reduce_stream_label; + GE_CHECK_NOTNULL(node->GetOpDesc()); + // ATTR_NAME_STREAM_LABEL is optional. + (void)AttrUtils::GetStr(node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, reduce_stream_label); + + set cur_nodes = {node}; + while (!cur_nodes.empty()) { + set all_out_data_nodes; + for (auto &curr_node : cur_nodes) { + for (const NodePtr &out_node : curr_node->GetOutDataNodes()) { + string out_stream_label; + GE_CHECK_NOTNULL(out_node->GetOpDesc()); + // ATTR_NAME_STREAM_LABEL is optional. + (void)AttrUtils::GetStr(out_node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, out_stream_label); + if (out_stream_label == reduce_stream_label) { + all_reduce_succs.emplace(out_node); + all_out_data_nodes.emplace(out_node); + } + } + } + cur_nodes = all_out_data_nodes; + } + } + + map old_stream_to_new; + for (const NodePtr &node : all_reduce_succs) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + auto old_stream = node->GetOpDesc()->GetStreamId(); + if (old_stream != kInvalidStream) { + int64_t new_stream = kInvalidStream; + auto iter = old_stream_to_new.find(old_stream); + if (iter != old_stream_to_new.end()) { + new_stream = iter->second; + } else { + new_stream = context.next_stream; + context.next_stream++; + old_stream_to_new.emplace(old_stream, new_stream); + } + + GELOGI("Stream of node %s has been updated from %ld to %ld.", node->GetName().c_str(), old_stream, new_stream); + node->GetOpDesc()->SetStreamId(new_stream); + } + } + + return !all_reduce_succs.empty() ? SUCCESS : NOT_CHANGED; +} + +int64_t NodeStreamUpdatePass::GetSingleInoutStream(const NodePtr &node) const { + set stream_ids; + + for (const auto &in_node : node->GetInAllNodes()) { + GE_CHECK_NOTNULL_EXEC(in_node->GetOpDesc(), return kInvalidStream); + int64_t stream_id = in_node->GetOpDesc()->GetStreamId(); + if (stream_id != kInvalidStream) { + stream_ids.insert(stream_id); + } + } + for (const auto &out_node : node->GetOutAllNodes()) { + GE_CHECK_NOTNULL_EXEC(out_node->GetOpDesc(), return kInvalidStream); + int64_t stream_id = out_node->GetOpDesc()->GetStreamId(); + if (stream_id != kInvalidStream) { + stream_ids.insert(stream_id); + } + } + if (stream_ids.size() == 1) { + int64_t stream_id = *(stream_ids.begin()); + GELOGI("Node %s of type %s: its all input and output nodes are in same stream id[%ld].", node->GetName().c_str(), + node->GetType().c_str(), stream_id); + return stream_id; + } + + return kInvalidStream; +} + +Status NodeStreamUpdatePass::UpdateForSkippedEngine(const ComputeGraphPtr &whole_graph, + const vector &subgraphs) { + set nodes_to_be_updated; + + // Check if sub graph is engine skipped and without stream label or not + for (const SubgraphPtr &subgraph : subgraphs) { + if (IsEngineSkip(*subgraph) && !HasStreamLabel(*subgraph)) { + auto compute_graph = subgraph->subgraph_info.GetSubGraph(); + for (NodePtr &node : compute_graph->GetDirectNode()) { + auto op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + auto stream_id = op_desc->GetStreamId(); + if (stream_id != kInvalidStream) { + nodes_to_be_updated.insert(op_desc); + } + } + } + } + + // Try reassign the stream id + for (ge::NodePtr &node : whole_graph->GetDirectNode()) { + auto op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + int64_t stream_id = op_desc->GetStreamId(); + if (nodes_to_be_updated.find(op_desc) != nodes_to_be_updated.end()) { + if (AreAllPredStreamsInvalid(node)) { + op_desc->SetStreamId(kInvalidStream); + } else { + int64_t inout_stream = GetSingleInoutStream(node); + if (inout_stream != kInvalidStream) { + op_desc->SetStreamId(inout_stream); + GELOGI("Node %s of type %s reassign to stream id[%ld] from stream id[%ld].", node->GetName().c_str(), + node->GetType().c_str(), inout_stream, stream_id); + } + } + } + } + return SUCCESS; +} + +bool NodeStreamUpdatePass::AreAllPredStreamsInvalid(const NodePtr &node) const { + for (const auto &pre_node : node->GetInAllNodes()) { + auto pre_node_desc = pre_node->GetOpDesc(); + if (pre_node_desc != nullptr) { + int64_t stream_id = pre_node_desc->GetStreamId(); + if (stream_id != kInvalidStream) { + return false; + } + } + } + return true; +} + +LogicalStreamAllocator::LogicalStreamAllocator(const map &scheduler_confs, + const map &max_parallel_num, bool hcom_parallel) + : scheduler_confs_(scheduler_confs), max_parallel_num_(max_parallel_num) { + context_.hcom_parallel = hcom_parallel; +} + +Status LogicalStreamAllocator::Assign(const ComputeGraphPtr &whole_graph, const vector &subgraph_infos, + int64_t &stream_num) { + GE_CHECK_NOTNULL(whole_graph); + map engine_confs; + for (const auto &item : scheduler_confs_) { + const SchedulerConf &scheduler = item.second; + for (const auto &engine_pair : scheduler.cal_engines) { + EngineConfPtr engine_conf = engine_pair.second; + if (engine_conf != nullptr) { + engine_confs[engine_pair.first] = engine_conf; + } + } + } + + vector subgraphs; + Status status = ConvertSubgraphs(subgraph_infos, engine_confs, subgraphs); + if (status != SUCCESS) { + GELOGE(status, "Create subgraphs failed."); + return status; + } + + return RunPasses(whole_graph, subgraphs, stream_num); +} + +Status LogicalStreamAllocator::ConvertSubgraphs(const vector &subgraph_infos, + const map &engine_confs, + vector &subgraphs) { + for (auto &subgraph_info : subgraph_infos) { + GE_CHECK_NOTNULL(subgraph_info); + + string subgraph_name; + ComputeGraphPtr computer_graph = subgraph_info->GetSubGraph(); + if (computer_graph != nullptr) { + subgraph_name = computer_graph->GetName(); + } + + const string &engine_name = subgraph_info->GetEngineName(); + auto engine_conf_iter = engine_confs.find(engine_name); + if ((engine_conf_iter == engine_confs.end()) || (engine_conf_iter->second == nullptr)) { + GELOGE(INTERNAL_ERROR, "Engine conf of subgraph %s not found (engine name: %s).", subgraph_name.c_str(), + engine_name.c_str()); + + return INTERNAL_ERROR; + } + + SubgraphPtr subgraph = MakeShared(*subgraph_info, *engine_conf_iter->second); + GE_CHECK_NOTNULL(subgraph); + subgraph->name = subgraph_name; + + auto parallel_iter = max_parallel_num_.find(engine_name); + if (parallel_iter != max_parallel_num_.end()) { + subgraph->max_parallel_num = parallel_iter->second; + } + + subgraphs.emplace_back(subgraph); + } + + return SUCCESS; +} + +Status LogicalStreamAllocator::RunPasses(const ComputeGraphPtr &whole_graph, const vector &subgraphs, + int64_t &stream_num) { + vector passes; + passes.emplace_back(MakeShared()); + passes.emplace_back(MakeShared()); + passes.emplace_back(MakeShared()); + passes.emplace_back(MakeShared()); + passes.emplace_back(MakeShared()); + + for (auto &pass : passes) { + GE_CHECK_NOTNULL(pass); + + Status status = pass->Run(whole_graph, subgraphs, context_); + if (status == SUCCESS) { + GELOGI("Stream pass %s return SUCCESS.", pass->GetName().c_str()); + } else if (status == NOT_CHANGED) { + GELOGI("Stream pass %s return NOT_CHANGED.", pass->GetName().c_str()); + } else { + GELOGE(status, "Stream pass %s failed.", pass->GetName().c_str()); + return status; + } + } + + stream_num = context_.next_stream; + GELOGI("Assigned logical stream num: %ld.", stream_num); + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/build/logical_stream_allocator.h b/src/ge/graph/build/logical_stream_allocator.h new file mode 100644 index 00000000..83c5f668 --- /dev/null +++ b/src/ge/graph/build/logical_stream_allocator.h @@ -0,0 +1,190 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_LOGICAL_STREAM_ALLOCATOR_H_ +#define GE_GRAPH_BUILD_LOGICAL_STREAM_ALLOCATOR_H_ + +#include +#include +#include +#include +#include +#include + +#include "engine_manager/dnnengine_manager.h" +#include "graph/manager/graph_manager_utils.h" + +namespace ge { +// Define default fuctions for stream passes. +#define STREAM_PASS_DEFAULT_FUNC(CLASS) \ + CLASS() : LogicalStreamPass(#CLASS) {} \ + ~CLASS() override = default; \ + CLASS(const CLASS &) = delete; \ + CLASS &operator=(const CLASS &) = delete + +static const int64_t kInvalidStream = -1; + +// Base stream class. +class LogicalStreamPass { + public: + static const int64_t kDefaultMaxParalleNum = 1; + + struct Subgraph; + using SubgraphPtr = std::shared_ptr; + + struct Subgraph { + string name; + int64_t stream_id = kInvalidStream; + + const SubGraphInfo &subgraph_info; + const EngineConf &engine_conf; + int64_t max_parallel_num = kDefaultMaxParalleNum; + + SubgraphPtr reused_subgraph = nullptr; + + Subgraph(const SubGraphInfo &subgraph_info, const EngineConf &engine_conf) + : subgraph_info(subgraph_info), engine_conf(engine_conf) {} + }; + + struct Context { + // Next stream id. + int64_t next_stream = 0; + bool hcom_parallel = false; + }; + + explicit LogicalStreamPass(const std::string &name); + LogicalStreamPass(const LogicalStreamPass &) = delete; + LogicalStreamPass &operator=(const LogicalStreamPass &) = delete; + virtual ~LogicalStreamPass() = default; + + const std::string &GetName() const; + virtual Status Run(ComputeGraphPtr whole_graph, const std::vector &subgraphs, Context &context) = 0; + + protected: + bool IsEngineSkip(const Subgraph &subgraph) const; + bool IsEngineAttach(const Subgraph &subgraph) const; + bool IsEngineIndependent(const Subgraph &subgraph) const; + bool HasStreamLabel(const Subgraph &subgraph) const; + bool HasAssignedStream(const Subgraph &subgraph) const; + + private: + std::string name_; +}; + +using LogicalStreamPassPtr = std::shared_ptr; + +// Allocate streams by label. +class AssignByLabelPass : public LogicalStreamPass { + public: + STREAM_PASS_DEFAULT_FUNC(AssignByLabelPass); + Status Run(ComputeGraphPtr whole_graph, const std::vector &subgraphs, Context &context) override; +}; + +// Engines such as hccl require independent Stream. +class IndependentStreamPass : public LogicalStreamPass { + public: + STREAM_PASS_DEFAULT_FUNC(IndependentStreamPass); + Status Run(ComputeGraphPtr whole_graph, const std::vector &subgraphs, Context &context) override; +}; + +// Reuse streams or assign new streams based on dependencies. +class AssignByDependencyPass : public LogicalStreamPass { + public: + STREAM_PASS_DEFAULT_FUNC(AssignByDependencyPass); + Status Run(ComputeGraphPtr whole_graph, const std::vector &subgraphs, Context &context) override; + + private: + void InitEndSubgraphMap(const std::vector &subgraphs, std::map &end_subgraph_map); + void InitPldSubgraphMap(const std::vector &subgraphs, std::map &pld_subgraph_map); + + SubgraphPtr GetReusableSubgraph(const SubgraphPtr &subgraph, const std::map &end_subgraph_map, + const std::map &pld_subgraph_map); + + int64_t AssignNewStream(SubgraphPtr subgraph); + + void UpdateAssignedSubgraphs(Context &context); + void UpdateReusedSubgraphs(); + + bool CouldReuse(const SubgraphPtr &subgraph, const SubgraphPtr &pred_subgraph, + const std::map &pld_subgraph_map); + + // + std::map engine_next_streams_; + + // + std::map engine_stream_num_; + + // Subgraphs of assign stream by engine + std::set assigned_subgraphs_; + + // + std::vector> reused_subgraphs_; +}; + +// Update the stream of subgraphs to nodes. +class NodeStreamUpdatePass : public LogicalStreamPass { + public: + STREAM_PASS_DEFAULT_FUNC(NodeStreamUpdatePass); + Status Run(ComputeGraphPtr whole_graph, const std::vector &subgraphs, Context &context) override; + + private: + /// Optimize for case like: + /// NodeA(stream1) -> Const(stream2) -> NodeB(stream1) + /// To case: + /// NodeA(stream1) -> Const(stream1) -> NodeB(stream1) + /// Which could reduce event number (Const could be other type which belong to skipped engine subgraph) + Status UpdateForSkippedEngine(const ComputeGraphPtr &whole_graph, const std::vector &subgraphs); + + int64_t GetSingleInoutStream(const NodePtr &node) const; + // Judge if all predecessors' streams of node are INVALID_STREAM + bool AreAllPredStreamsInvalid(const NodePtr &node) const; +}; + +// AllReduce and backward operators execute in parallel. +class AllReduceParallelPass : public LogicalStreamPass { + public: + STREAM_PASS_DEFAULT_FUNC(AllReduceParallelPass); + Status Run(ComputeGraphPtr whole_graph, const std::vector &subgraphs, Context &context) override; +}; + +// Assign logical streams which is not limited by the number of tasks. +class LogicalStreamAllocator { + using Subgraph = LogicalStreamPass::Subgraph; + using SubgraphPtr = LogicalStreamPass::SubgraphPtr; + using Context = LogicalStreamPass::Context; + + public: + LogicalStreamAllocator(const std::map &scheduler_confs, + const std::map &max_parallel_num, bool hcom_parallel = false); + LogicalStreamAllocator(const LogicalStreamAllocator &) = delete; + LogicalStreamAllocator &operator=(const LogicalStreamAllocator &) = delete; + ~LogicalStreamAllocator() = default; + + Status Assign(const ComputeGraphPtr &whole_graph, const std::vector &subgraphs, int64_t &stream_num); + + private: + Status ConvertSubgraphs(const std::vector &subgraph_infos, + const std::map &engine_confs, + std::vector &subgraphs); + Status RunPasses(const ComputeGraphPtr &whole_graph, const std::vector &subgraphs, int64_t &stream_num); + + const std::map &scheduler_confs_; + const std::map &max_parallel_num_; + Context context_; +}; +} // namespace ge + +#endif // GE_GRAPH_BUILD_LOGICAL_STREAM_ALLOCATOR_H_ diff --git a/src/ge/graph/build/memory/CMakeLists.txt b/src/ge/graph/build/memory/CMakeLists.txt new file mode 100644 index 00000000..90bf510c --- /dev/null +++ b/src/ge/graph/build/memory/CMakeLists.txt @@ -0,0 +1,53 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +# libge_memosy.a +file(GLOB_RECURSE SRC_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "memory_assigner.cc" + "graph_mem_assigner.cc" + "binary_block_mem_assigner.cc" + "block_mem_assigner.cc" + "hybrid_mem_assigner.cc" + "max_block_mem_assigner.cc" + "var_mem_assign_util.cc" + ) + +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}/src) +include_directories(${GE_SOURCE_DIR}/src/ge) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/protobuf/src) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) + +######### libge_memory.a ############# +add_library(ge_memory STATIC ${SRC_LIST}) +target_compile_definitions(ge_memory PRIVATE + Werror + DAVINCI_CLOUD) +target_link_libraries(ge_memory + graph + ge_common + ${PROTOBUF_LIBRARY} + ${c_sec} + ${slog} + rt + dl) diff --git a/src/ge/graph/build/memory/binary_block_mem_assigner.cc b/src/ge/graph/build/memory/binary_block_mem_assigner.cc new file mode 100644 index 00000000..f4312976 --- /dev/null +++ b/src/ge/graph/build/memory/binary_block_mem_assigner.cc @@ -0,0 +1,124 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/memory/binary_block_mem_assigner.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" + +namespace { +const uint32_t kRangeCeilInterval = 2; +const uint32_t kLogBase = 2; +const int64_t kLargeBlockSize = 8 * 1024 * 1024; +const int64_t kLargeBlockRangeSize = 10; +} // namespace + +namespace ge { +using std::vector; + +void BinaryBlockMemAssigner::PlanRanges(size_t range_number_limit, vector> &ranges) { + /// range delete and merge + /// result after delete and merge is: [[6,12],[16,24,24],[30,32,48],[60,256]] + bool changed = false; + vector temp; + do { + changed = false; + for (auto iter = ranges.begin(); iter != ranges.end();) { + if (!temp.empty()) { + iter->insert(iter->end(), temp.begin(), temp.end()); + temp.clear(); + } + if (iter->empty()) { + iter = ranges.erase(iter); + changed = true; + } else if ((iter->size() < range_number_limit) && (ranges.end() - iter > 1) && + !(iter->at(0) >= kLargeBlockSize && iter->size() >= kLargeBlockRangeSize)) { + temp.insert(temp.end(), iter->begin(), iter->end()); + iter = ranges.erase(iter); + changed = true; + } else { + ++iter; + } + } + } while (changed); +} + +/// +/// @ingroup domi_omg +/// @brief memory size fixed for reuse. this function determines memory types and sizes +/// @param [out] range_ceils return memory sizes +/// @return Status result +/// @author +/// +Status BinaryBlockMemAssigner::GetMemoryRanges(vector &range_ceils) { + vector all_memory_size; + GetOutAndWorkSpaceMem(all_memory_size); + if (all_memory_size.empty()) { + GELOGW("Vector all_memory_size is empty!"); + return SUCCESS; + } + if ((all_memory_size.front() == 0) || (log(kLogBase) == 0)) { + GELOGE(FAILED, "dividend is 0!"); + return FAILED; + } + auto range_number = static_cast( + ceil(log(all_memory_size.back() / static_cast(all_memory_size.front())) / log(kLogBase))); + range_number = (range_number == 0) ? 1 : range_number; + GELOGI("Range number: %zu", range_number); + + vector> ranges(range_number); + GE_CHK_BOOL_EXEC((range_number != 0), return PARAM_INVALID, "range_number can't be 0."); + size_t range_number_limit = all_memory_size.size() / range_number; + int64_t range_ceil = all_memory_size[0]; + for (size_t i = 1; i <= range_number; i++) { + GE_IF_BOOL_EXEC(TypeUtils::CheckUint64MulOverflow(static_cast(range_ceil), kRangeCeilInterval), + GELOGE(FAILED, "Multiply result is out of range."); + return FAILED); + range_ceil *= kRangeCeilInterval; // The block size of each interval is doubled every time. + for (auto iter = all_memory_size.begin(); iter != all_memory_size.end();) { + if (*iter <= range_ceil) { + ranges[i - 1].push_back(*iter); + iter = all_memory_size.erase(iter); + } else { + break; + } + } + } + + GELOGD("Origin ranges:"); + for (auto &v : ranges) { + GELOGD("__%s", ToString(v).c_str()); + } + + PlanRanges(range_number_limit, ranges); + GELOGD("Origin ranges:"); + for (auto &v : ranges) { + GELOGD("__%s", ToString(v).c_str()); + } + + for (auto &range : ranges) { + std::sort(range.begin(), range.end()); + if (!range.empty()) { + range_ceils.push_back(range.back()); + } + } + GELOGI("Range ceils: %s", ToString(range_ceils).c_str()); + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/build/memory/binary_block_mem_assigner.h b/src/ge/graph/build/memory/binary_block_mem_assigner.h new file mode 100644 index 00000000..3c94cdc7 --- /dev/null +++ b/src/ge/graph/build/memory/binary_block_mem_assigner.h @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_MEMORY_BINARY_BLOCK_MEM_ASSIGNER_H_ +#define GE_GRAPH_BUILD_MEMORY_BINARY_BLOCK_MEM_ASSIGNER_H_ + +#include +#include + +#include "graph/build/memory/block_mem_assigner.h" + +namespace ge { +class BinaryBlockMemAssigner : public BlockMemAssigner { + public: + explicit BinaryBlockMemAssigner(ge::ComputeGraphPtr compute_graph) : BlockMemAssigner(std::move(compute_graph)) {} + + BinaryBlockMemAssigner(const BinaryBlockMemAssigner &) = delete; + + BinaryBlockMemAssigner &operator=(const BinaryBlockMemAssigner &) = delete; + + ~BinaryBlockMemAssigner() override = default; + + Status GetMemoryRanges(std::vector &ranges) override; + + private: + void PlanRanges(size_t range_number_limit, std::vector> &ranges); +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_MEMORY_BINARY_BLOCK_MEM_ASSIGNER_H_ diff --git a/src/ge/graph/build/memory/block_mem_assigner.cc b/src/ge/graph/build/memory/block_mem_assigner.cc new file mode 100644 index 00000000..759fe0c1 --- /dev/null +++ b/src/ge/graph/build/memory/block_mem_assigner.cc @@ -0,0 +1,789 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/memory/block_mem_assigner.h" +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/anchor.h" +#include "graph/buffer.h" +#include "graph/ge_attr_value.h" +#include "graph/ge_context.h" +#include "graph/node.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" + +#include "graph/debug/ge_attr_define.h" + +#include "graph/optimize/common/params.h" +#include "omg/omg_inner_types.h" + +namespace { +const char *const kAttrNameWorkspaceReuseFlag = "workspace_reuse_flag"; +const char *const kL2FusionDynamicConvergeOp = "l2fusion_dynamic_converge_op"; +const char *const kDisableReuseMemory = "ge.exec.disableReuseMemory"; +const int kReuseMaxCount = 10; +} // namespace + +namespace ge { +using std::map; +using std::pair; +using std::string; +using std::stringstream; +using std::unordered_map; +using std::unordered_set; +using std::vector; + +void MemoryBlock::Resize() { + auto iter = std::max_element(real_size_list_.begin(), real_size_list_.end()); + if (iter == real_size_list_.end()) { + GELOGW("real_size_list_ is empty"); + return; + } else { + size_t block_size = *iter; + if ((block_size > 0) && (block_size % kMemAlignSize != 0)) { + block_size = (block_size + kMemAlignSize - 1) / kMemAlignSize * kMemAlignSize; + } + block_size_ = block_size; + } +} + +bool MemoryBlock::IsSameLabel(std::string &first_batch_label) { + if (node_type_index_list_.empty()) { + return false; + } + + auto node_op_desc = node_type_index_list_[0].node_->GetOpDesc(); + if (node_op_desc == nullptr) { + return false; + } + // not all op has ATTR_NAME_BATCH_LABEL, no need check return value, only check out parameter + (void)ge::AttrUtils::GetStr(node_op_desc, ATTR_NAME_BATCH_LABEL, first_batch_label); + if (first_batch_label.empty()) { + return false; + } + bool all_same_label = true; + for (size_t index = 1; index < node_type_index_list_.size(); ++index) { + if (node_type_index_list_[index].node_ == nullptr) { + continue; + } + std::string batch_label; + auto index_op_desc = node_type_index_list_[index].node_->GetOpDesc(); + GE_IF_BOOL_EXEC(index_op_desc == nullptr, continue); + (void)ge::AttrUtils::GetStr(index_op_desc, ATTR_NAME_BATCH_LABEL, batch_label); + if (first_batch_label != batch_label) { + all_same_label = false; + break; + } + } + return all_same_label; +} + +string ToString(ge::NodeTypeIndex &x) { + stringstream ss; + ss << "[" << x.node_->GetName() << "(" << x.node_->GetType() << "), "; + if (x.mem_type_ == kOutput) { + ss << "Output, "; + } else { + ss << "Workspace, "; + } + ss << x.index_ << "]"; + return ss.str(); +} + +string MemoryBlock::String() { + stringstream ss; + ss << "Block size: " << Size() << " from " << HeadOffset() << " to " << TailOffset() << ""; + ss << "real_size_list: " << ToString(real_size_list_) << ""; + ss << "ref_count: " << ref_count_ << ""; + ss << "members: "; + for (auto x : NodeTypeIndexList()) { + ss << "__node: " << ToString(x) << ""; + } + return ss.str(); +} + +BlockMemAssigner::BlockMemAssigner(ge::ComputeGraphPtr compute_graph) + : mem_offset_(0), compute_graph_(std::move(compute_graph)) {} + +BlockMemAssigner::~BlockMemAssigner() { + for (MemoryBlock *memory_block : memory_blocks_) { + GE_DELETE_NEW_SINGLE(memory_block); + } +} + +void BlockMemAssigner::GetOutAndWorkSpaceMem(vector &all_memory_size) { + vector temp; + + for (const NodePtr &n : compute_graph_->GetDirectNode()) { + auto node_op_desc = n->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue); + for (const auto &output_desc : node_op_desc->GetAllOutputsDescPtr()) { + bool reuse_input = false; + GE_IF_BOOL_EXEC(ge::TensorUtils::GetReuseInput(*output_desc, reuse_input) != SUCCESS, + GELOGI("Get reuse_input failed")); + + if (!reuse_input) { + uint32_t size = 0; + GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(*output_desc, size) != SUCCESS, GELOGI("Get size failed")); + all_memory_size.emplace_back(size); + } + } + + temp.clear(); + GetNodeWorkSpaceSize(n, temp); + all_memory_size.insert(all_memory_size.end(), temp.begin(), temp.end()); + } + sort(all_memory_size.begin(), all_memory_size.end()); + GELOGI("All memory size: %s", ToString(all_memory_size).c_str()); + + for (auto iter = all_memory_size.begin(); iter != all_memory_size.end();) { + if (*iter == 0) { + iter = all_memory_size.erase(iter); + } else { + ++iter; + } + } +} +/// +/// @ingroup domi +/// @brief decide memory size based on actual input memory size +/// @param [in] size actual memory size in need +/// @param [in] ranges memory size provided +/// @return size_t memory size to apply +/// +size_t GetBlockSize(size_t size, const vector &ranges) { + for (int64_t x : ranges) { + auto x_temp = static_cast(x); + if (size <= x_temp) { + return x_temp; + } + } + + GELOGW("Memory needed size:%zu is beyond the biggest block in memory ranges.", size); + return 0; +} + +bool IsDirectOutputNode(const NodePtr &node, int idx) { + if ((node != nullptr) && (node->GetOpDesc() != nullptr) && (node->GetOpDesc()->GetType() == NETOUTPUT)) { + auto op_desc = node->GetOpDesc(); + auto input_desc = op_desc->MutableInputDesc(idx); + auto output_desc = op_desc->MutableOutputDesc(idx); + if ((input_desc != nullptr) && (output_desc != nullptr) && (input_desc->GetFormat() == output_desc->GetFormat()) && + (input_desc->GetDataType() == output_desc->GetDataType())) { + return true; + } + } + return false; +} + +void AddReusableBlockCount(const MemoryBlock &mem_block, map &reusable_block_counts) { + string key = std::to_string(mem_block.Size()); + key += "_" + std::to_string(mem_block.stream_id_); + auto it = reusable_block_counts.find(key); + if (it != reusable_block_counts.end()) { + it->second++; + } else { + reusable_block_counts[key] = 1; + } +} + +void ReduceReusableBlockCount(const MemoryBlock &mem_block, map &reusable_block_counts) { + string key = std::to_string(mem_block.Size()); + key += "_" + std::to_string(mem_block.stream_id_); + auto it = reusable_block_counts.find(key); + if (it != reusable_block_counts.end()) { + if (it->second > 0) { + it->second--; + } + } +} + +bool CanReuseBySize(const map &reusable_block_counts, const MemoryBlock &reusable_block, + size_t block_size) { + bool can_reuse = false; + if (reusable_block.Size() == block_size) { + can_reuse = true; + } else { + string key = std::to_string(reusable_block.Size()); + key += "_" + std::to_string(reusable_block.stream_id_); + auto it = reusable_block_counts.find(key); + if ((it != reusable_block_counts.end() && (it->second > kReuseMaxCount)) && (reusable_block.Size() > block_size)) { + can_reuse = true; + GELOGD("Less size mem reuse, reuse block size:%zu, current block size:%zu", reusable_block.Size(), block_size); + } + } + return can_reuse; +} + +bool CanReuseByStream(const std::unordered_set &reuse_stream, MemoryBlock &reusable_block) { + bool can_reuse = false; + if (reuse_stream.find(reusable_block.stream_id_) != reuse_stream.cend()) { + can_reuse = true; + } + return can_reuse; +} + +MemoryBlock *BlockMemAssigner::ApplyMemory(size_t block_size, size_t real_size, MemoryType mem_type, const NodePtr &n, + uint32_t out_index, const vector &workspace_reuse_flag) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(n == nullptr, return nullptr, "Input parameter n is null."); + auto node_op_desc = n->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, return nullptr); + + string ge_disable_reuse_mem_env = "0"; + (void)ge::GetContext().GetOption(kDisableReuseMemory, ge_disable_reuse_mem_env); + if (ge_disable_reuse_mem_env != "1") { + int64_t convergence_label; + bool reuse_mem_flag = true; + if ((workspace_reuse_flag.size() > out_index) && (workspace_reuse_flag[out_index] == false)) { + reuse_mem_flag = false; + } + if (!ge::AttrUtils::GetInt(node_op_desc, kL2FusionDynamicConvergeOp, convergence_label)) { + bool out_flg = false; + GE_IF_BOOL_EXEC(n->GetOutDataNodes().empty(), out_flg = true); + if (static_cast(out_index) < n->GetAllOutDataAnchors().size()) { + for (auto in_anchor : n->GetOutDataAnchor(out_index)->GetPeerInDataAnchors()) { + if (IsDirectOutputNode(in_anchor->GetOwnerNode(), in_anchor->GetIdx())) { + out_flg = true; + break; + } else { + break; + } + } + auto op_type = node_op_desc->GetType(); + bool is_reuse_memory = !out_flg && reuse_mem_flag && (op_type != DATA_TYPE) && (op_type != AIPP_DATA_TYPE) && + (op_type != CONSTANT) && (op_type != NETOUTPUT) && (op_type != PROPOSAL) && + (op_type != ANN_DATA_TYPE) && (op_type != ZEROSLIKE) && (op_type != CONSTANTOP); + auto stream_id = node_op_desc->GetStreamId(); + auto map_iter = reusable_streams_map_.find(stream_id); + if (is_reuse_memory && map_iter != reusable_streams_map_.end()) { + for (auto it = reusable_blocks_.begin(); it != reusable_blocks_.end(); ++it) { + MemoryBlock *reusable_block = *it; + bool is_data = false; + for (auto node_type : reusable_block->NodeTypeIndexList()) { + GE_IF_BOOL_EXEC(node_type.node_ != nullptr, string type = node_type.node_->GetType(); + bool flag = (type == DATA_TYPE) || (type == ENTER) || (type == REFENTER) || + (type == AIPP_DATA_TYPE) || (type == NEXTITERATION) || + (type == REFNEXTITERATION); + GE_IF_BOOL_EXEC(flag, is_data = true; break;);); + } + GE_IF_BOOL_EXEC(is_data == true, continue); + + // A node can reuse blocks of the same stream and preorder streams + if (CanReuseBySize(reusable_block_counts_, *reusable_block, block_size) && + CanReuseByStream(map_iter->second, *reusable_block)) { + GELOGD("Cross stream mem reuse, target stream:%ld, current stream:%ld", reusable_block->stream_id_, + stream_id); + reusable_block->AddNodeTypeIndex({n, mem_type, out_index}, real_size); + reusable_block->ref_count_++; + ReduceReusableBlockCount(*reusable_block, reusable_block_counts_); + reusable_blocks_.erase(it); + return reusable_block; + } + } + } + } + } + } + + auto block = new (std::nothrow) MemoryBlock(block_size); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(block == nullptr, return nullptr, "new an object failed."); + + block->Init(real_size, mem_type, n, out_index); + block->stream_id_ = node_op_desc->GetStreamId(); + block->ref_count_++; + memory_blocks_.emplace_back(block); + return block; +} + +MemoryBlock *BlockMemAssigner::ApplyOutMemory(const NodePtr &n, uint32_t index, const vector &ranges) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(n == nullptr, return nullptr, "input node is null."); + auto node_op_desc = n->GetOpDesc(); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(node_op_desc == nullptr, return nullptr, "node_op_desc is null."); + MemoryBlock *block = nullptr; + bool reuse_input = false; + uint32_t reuse_input_index = 0; + uint32_t size = 0; + auto output_op_desc = node_op_desc->GetOutputDescPtr(index); + if (output_op_desc != nullptr) { + GE_IF_BOOL_EXEC(ge::TensorUtils::GetReuseInput(*output_op_desc, reuse_input) != SUCCESS, + GELOGI("Get reuse_input failed")); + GE_IF_BOOL_EXEC(ge::TensorUtils::GetReuseInputIndex(*output_op_desc, reuse_input_index) != SUCCESS, + GELOGI("Get reuse_input_index failed")); + GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(*output_op_desc, size) != SUCCESS, GELOGI("Get size failed")); + } + + if (reuse_input) { + auto in_data_anchor = n->GetInDataAnchor(reuse_input_index); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(in_data_anchor == nullptr, return nullptr, "In data anchor is null."); + auto peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(peer_out_anchor == nullptr, return nullptr, "Peer out data anchor is null."); + auto reuse_src_node = peer_out_anchor->GetOwnerNode(); + auto reuse_src_node_output_index = static_cast(peer_out_anchor->GetIdx()); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG( + (node_out_blocks_.empty() || + (node_out_blocks_[reuse_src_node->GetName()].size() <= reuse_src_node_output_index)), + return nullptr, "node_out_block of node_out_block[reuse_src_node->Name()] is empty!"); + block = node_out_blocks_[reuse_src_node->GetName()][reuse_src_node_output_index]; + } else { + auto block_size = GetBlockSize(size, ranges); + vector workspace_reuse_flag; + block = ApplyMemory(block_size, size, kOutput, n, index, workspace_reuse_flag); + } + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(block == nullptr, return nullptr, "Block is nullptr."); + GE_IF_BOOL_EXEC(index >= n->GetAllOutDataAnchors().size(), GELOGE(FAILED, "index is out of range."); return nullptr); + auto out_data_anchor = n->GetOutDataAnchor(index); + GE_IF_BOOL_EXEC(out_data_anchor == nullptr, GELOGE(FAILED, "Out data anchor is nullptr."); return nullptr); + int out_count = 0; + for (const auto &in_anchor : out_data_anchor->GetPeerInDataAnchors()) { + auto owner_node = in_anchor->GetOwnerNode(); + auto op_desc = owner_node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, continue); + Params *instance = Params::Instance(); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(instance == nullptr, return nullptr, "Params instance is nullptr."); + if (!((instance->GetTarget() == TARGET_TYPE_TINY) && (op_desc->GetType() == NETOUTPUT))) { + out_count++; + } + } + int out_count_reuse_input = block->ref_count_; + for (const auto &in_anchor : out_data_anchor->GetPeerInDataAnchors()) { + auto owner_node = in_anchor->GetOwnerNode(); + GE_IF_BOOL_EXEC(owner_node == nullptr, continue); + auto op_desc = owner_node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, continue); + for (uint32_t i = 0; i < static_cast(op_desc->GetOutputsSize()); i++) { + bool dst_reuse_input = false; + uint32_t dst_reuse_input_index = 0; + auto owner_node_op_desc = op_desc->GetOutputDescPtr(i); + GE_IF_BOOL_EXEC(owner_node_op_desc == nullptr, continue); + GE_IF_BOOL_EXEC(ge::TensorUtils::GetReuseInput(*owner_node_op_desc, dst_reuse_input) != SUCCESS, + GELOGI("Get dst_reuse_input failed")); + GE_IF_BOOL_EXEC(ge::TensorUtils::GetReuseInputIndex(*owner_node_op_desc, dst_reuse_input_index) != SUCCESS, + GELOGI("Get dst_reuse_input_index failed")); + if (dst_reuse_input && (dst_reuse_input_index == static_cast(in_anchor->GetIdx()))) { + block->AddNodeTypeIndex({owner_node, kOutput, i}, block->Size()); + out_count_reuse_input += 1; + reuse_input = true; + } + } + } + block->ref_count_ = reuse_input ? out_count_reuse_input + out_count - 1 : out_count; + return block; +} + +bool IsOutputBlock(const ge::InDataAnchorPtr &in_data_anchor) { + auto peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, GELOGE(FAILED, "Peer out anchor is nullptr."); return false); + auto src = peer_out_anchor->GetOwnerNode(); + int32_t index = peer_out_anchor->GetIdx(); + auto iter = domi::GetContext().out_nodes_map.find(src->GetName()); + if (iter != domi::GetContext().out_nodes_map.end()) { + for (auto id : iter->second) { + if (index == id) { + return true; + } + } + } + return false; +} + +void BlockMemAssigner::ReleaseMemory(MemoryBlock *to_release, vector &reusable_memory) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(to_release == nullptr, return, "Input parameter to_release is null."); + GE_CHK_TRUE_EXEC_INFO(to_release->ref_count_ <= 0, return, "Release memory"); + --to_release->ref_count_; + if (to_release->ref_count_ == 0) { + reusable_memory.emplace_back(to_release); + AddReusableBlockCount(*to_release, reusable_block_counts_); + } +} + +void BlockMemAssigner::ReleaseMemorys(const vector &to_releases, + vector &reusable_memory) { + for (auto mem_block : to_releases) { + ReleaseMemory(mem_block, reusable_memory); + } +} + +void BlockMemAssigner::ReleaseInputNodeOutMemory(const NodePtr &n, + const unordered_map> &node_out_blocks, + vector &reusable_memory) { + for (const auto &in_anchor : n->GetAllInDataAnchors()) { + if ((in_anchor->GetPeerOutAnchor() == nullptr) || + (in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc() == nullptr) || + (n->GetOpDesc() == nullptr)) { + return; + } + GE_IF_BOOL_EXEC(IsOutputBlock(in_anchor), continue); + + auto node_name = in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetName(); + + GE_IF_BOOL_EXEC((in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetType() == CONSTANT) || + (in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetType() == FASTRCNNPREDICTIONS) || + (in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetType() == CONSTANTOP), + continue); + + auto it = node_out_blocks.find(node_name); + if (it == node_out_blocks.end()) { + continue; + } + for (auto block : it->second) { + const vector &node_type_indexs = block->NodeTypeIndexList(); + if (node_type_indexs.empty()) { + continue; + } + GELOGD("node_type_indexs: %d, %s", node_type_indexs.back().index_, + node_type_indexs.back().node_->GetName().c_str()); + + if ((node_type_indexs.back().node_ == in_anchor->GetPeerOutAnchor()->GetOwnerNode()) && + (node_type_indexs.back().index_ == static_cast(in_anchor->GetPeerOutAnchor()->GetIdx())) && + n->GetOpDesc()->GetStreamId() == block->stream_id_) { + ReleaseMemory(block, reusable_memory); + } + } + } +} + +/// +/// @ingroup domi +/// @brief traverse all nodes outputs and workspace in need, apply memory block considering memory reuse +/// @param [in/out] ranges memory size provided +/// @return Status result +/// +void BlockMemAssigner::AssignMemoryWithReuse(vector &ranges) { + // Init reusable streams map + InitReusableStreamMap(); + string ge_disable_reuse_mem_env = "0"; + (void)ge::GetContext().GetOption("ge.exec.disableReuseMemory", ge_disable_reuse_mem_env); + + if (ge_disable_reuse_mem_env == "1") { + GEEVENT("Reuse memory close"); + } else { + GEEVENT("Reuse memory open"); + } + + for (const NodePtr &n : compute_graph_->GetDirectNode()) { + auto node_op_desc = n->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue); + int64_t stream_id = node_op_desc->GetStreamId(); + + // Allocate memory for the current node and release node memory of the same size in the workspace + GE_IF_BOOL_EXEC(ge_disable_reuse_mem_env != "1", + ReleaseMemorys(stream_workspace_blocks_[stream_id], reusable_blocks_);) + for (uint32_t i = 0; i < static_cast(node_op_desc->GetOutputsSize()); i++) { + uint32_t size = 0; + auto output_op_desc = node_op_desc->GetOutputDescPtr(i); + if (output_op_desc != nullptr) { + GE_IF_BOOL_EXEC(ge::TensorUtils::GetSize(*output_op_desc, size) != SUCCESS, GELOGI("Get size failed")); + } + if ((size == 0) || CheckIsZeroMemNodeType(n->GetType())) { + zero_memory_list_.emplace_back(n, kOutput, i); + continue; + } + MemoryBlock *mem_block = ApplyOutMemory(n, i, ranges); + if (mem_block != nullptr) { + node_out_blocks_[n->GetName()].emplace_back(mem_block); + } + } + + stream_workspace_blocks_[stream_id].clear(); + vector temp; + GetNodeWorkSpaceSize(n, temp); + vector workspace_reuse_flag; + GE_IF_BOOL_EXEC(!ge::AttrUtils::GetListBool(node_op_desc, kAttrNameWorkspaceReuseFlag, workspace_reuse_flag), + GELOGI("OP %s get workspace_reuse_flag attr failed", node_op_desc->GetName().c_str())); + for (size_t i = 0; i < temp.size(); i++) { + if (temp[i] == 0) { + zero_memory_list_.emplace_back(n, kWorkspace, static_cast(i)); + continue; + } + MemoryBlock *mem_block = + ApplyMemory(GetBlockSize(static_cast(temp[i]), ranges), static_cast(temp[i]), kWorkspace, n, + static_cast(i), workspace_reuse_flag); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(mem_block == nullptr, continue, "failed to apply memory block."); + stream_workspace_blocks_[stream_id].emplace_back(mem_block); + } + ReleaseInputNodeOutMemory(n, node_out_blocks_, reusable_blocks_); + } + + GELOGD("Assigned memory blocks:"); + for (auto mem_block : memory_blocks_) { + GELOGD("%s", mem_block->String().c_str()); + (void)mem_block; // Fix warning + } + + GE_IF_BOOL_EXEC(!(ge_disable_reuse_mem_env == "1"), MergeDynamicBatchBlocks();) + ResizeMemoryBlocks(); + + GELOGD("Memory blocks after resize:"); + for (auto mem_block : memory_blocks_) { + GELOGD("%s", mem_block->String().c_str()); + (void)mem_block; // Fix warning + } +} + +void BlockMemAssigner::GetNodeWorkSpaceSize(const NodePtr &node, vector &workspace_memory) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(node->GetOpDesc() == nullptr, return, "Op desc is null."); + vector workspace_byte_nums = node->GetOpDesc()->GetWorkspaceBytes(); + + GELOGD("GetNodeWorkSpaceSize: node[%s] size:%zu", node->GetOpDesc()->GetName().c_str(), workspace_byte_nums.size()); + for (int64_t byte_size : workspace_byte_nums) { + workspace_memory.emplace_back(byte_size); + GELOGD("GetNodeWorkSpaceSize: push back size:%ld", byte_size); + } +} + +// descending order +static bool CompareBlockMaxSize(MemoryBlock *left, MemoryBlock *right) { + if (left == nullptr || right == nullptr) { + return false; + } + auto left_max_size = std::max_element(left->RealSizeList().begin(), left->RealSizeList().end()); + if (left_max_size != left->RealSizeList().end()) { + auto right_max_size = std::max_element(right->RealSizeList().begin(), right->RealSizeList().end()); + if (right_max_size == right->RealSizeList().end() || (*left_max_size > *right_max_size)) { + return true; + } + } + return false; +} + +void MergeBlocks(std::vector &dest, std::vector &src) { + for (size_t i = 0; i < dest.size(); ++i) { + if (i >= src.size()) { + return; + } + if (dest[i] != nullptr && src[i] != nullptr) { + for (size_t j = 0; j < src[i]->NodeTypeIndexList().size(); ++j) { + dest[i]->AddNodeTypeIndex(src[i]->NodeTypeIndexList()[j], src[i]->RealSizeList()[j]); + src[i]->deleted_block_ = true; + } + } + } +} + +void BlockMemAssigner::MergeDynamicBatchBlocks() { + std::map> dynamic_batch_blocks; + for (auto block : memory_blocks_) { + if (block == nullptr) { + continue; + } + std::string batch_label; + if (block->IsSameLabel(batch_label)) { + dynamic_batch_blocks[batch_label].emplace_back(block); + } + } + + auto it = dynamic_batch_blocks.begin(); + auto it_max = it; + + // find max block counts + for (; it != dynamic_batch_blocks.end(); ++it) { + if (it->second.size() > it_max->second.size()) { + it_max = it; + } + std::sort(it->second.begin(), it->second.end(), CompareBlockMaxSize); + } + if (it_max != dynamic_batch_blocks.end()) { + GELOGI("MergeDynamicBatch %s block counts %zu", it_max->first.c_str(), it_max->second.size()); + } + for (it = dynamic_batch_blocks.begin(); it != dynamic_batch_blocks.end(); ++it) { + if (it != it_max) { + GELOGI("MergeDynamicBatch from %s to %s", it->first.c_str(), it_max->first.c_str()); + MergeBlocks(it_max->second, it->second); + } + } +} + +/// +/// @ingroup domi_omg +/// @brief traverse memory size, resize, calculate offset +/// @param [in&out] memory_blocks_ memory block, after calculating offset +/// +void BlockMemAssigner::ResizeMemoryBlocks() { + for (auto &memory_block : memory_blocks_) { + if (memory_block == nullptr || memory_block->deleted_block_) { + continue; + } + memory_block->Resize(); + memory_block->SetHeadOffset(mem_offset_); + mem_offset_ += memory_block->Size(); + memory_block->SetTailOffset(mem_offset_ - 1); + } +} + +/// +/// @ingroup domi +/// @brief given NodeTypeIndex, set offset in Op's OpDef +/// @param [in&out] node_type_index +/// @param [in] offset offset to be set +/// @return Status result +/// +void SetOffsetSize(const NodeTypeIndex &node_type_index, int64_t offset) { + ge::OpDescPtr op_desc = node_type_index.node_->GetOpDesc(); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(op_desc == nullptr, return, "op_desc is null."); + if (node_type_index.mem_type_ == kOutput) { + vector output_list = op_desc->GetOutputOffset(); + for (auto i = static_cast(output_list.size()); i < node_type_index.index_ + 1; i++) { + output_list.emplace_back(kInvalidOffset); + } + if (output_list.empty()) { + GELOGW("Empty output"); + return; + } + + if ((op_desc->GetType() == DATA) || (op_desc->GetType() == AIPP_DATA_TYPE) || (op_desc->GetType() == MULTISHAPE) || + (op_desc->GetType() == NETOUTPUT)) { + if ((output_list[node_type_index.index_] == kInvalidOffset) || (output_list[node_type_index.index_] < offset)) { + output_list.at(node_type_index.index_) = offset; + } + } else { + output_list.at(node_type_index.index_) = offset; + } + + op_desc->SetOutputOffset(output_list); + } else if (node_type_index.mem_type_ == kWorkspace) { + vector workspace_list; + workspace_list = op_desc->GetWorkspace(); + for (auto i = static_cast(workspace_list.size()); i < node_type_index.index_ + 1; i++) { + workspace_list.emplace_back(kInvalidOffset); + } + workspace_list.at(node_type_index.index_) = offset; + op_desc->SetWorkspace(workspace_list); + } +} + +void BlockMemAssigner::SetOpMemOffset() { + for (MemoryBlock *memory_block : memory_blocks_) { + if (memory_block == nullptr || memory_block->deleted_block_) { + continue; + } + for (const NodeTypeIndex &node_type_index : memory_block->NodeTypeIndexList()) { + SetOffsetSize(node_type_index, memory_block->HeadOffset()); + } + } + for (const NodeTypeIndex &node_type_index : zero_memory_list_) { + SetOffsetSize(node_type_index, 0); + } +} + +Status BlockMemAssigner::Assign() { + vector ranges; + if (GetMemoryRanges(ranges) != SUCCESS) { + GELOGE(FAILED, "GetMemoryRanges Fail!"); + return FAILED; + } + GE_IF_BOOL_EXEC(ranges.empty(), return SUCCESS); + AssignMemoryWithReuse(ranges); + + SetOpMemOffset(); + + return SUCCESS; +} + +void BlockMemAssigner::InitReusableStreamMap() { + // save a stream's id and its first Node and last node. + map> stream_head_tail_node_map; + // save a stream's id and its directly child stream. + map> stream_dependency_map; + // save a stream's id and its occupied memory. + unordered_map stream_mem_map; + + // Find streams's first and last node. + FindHeadAndTailNodesForStream(stream_head_tail_node_map, stream_mem_map); + + // If streamB's first node is the output of streamA's last node, then B depends on A. + FindDependentStream(stream_head_tail_node_map, stream_dependency_map); + + // If a stream has more than one child stream, select the one that occupies the closest memory + for (const auto &iter : stream_dependency_map) { + if (iter.second.empty()) { + continue; + } + int64_t target_size = stream_mem_map[iter.first]; + int64_t min_size_gap = LONG_MAX; + int64_t target_reuse_stream_id = 0; + for (auto id : iter.second) { + if (labs(stream_mem_map[id] - target_size) < min_size_gap) { + target_reuse_stream_id = id; + min_size_gap = labs(stream_mem_map[id] - target_size); + } + } + // If b can reuse a, then b should also be able to reuse all blocks that a can reuse. + reusable_streams_map_[target_reuse_stream_id].insert(reusable_streams_map_[iter.first].begin(), + reusable_streams_map_[iter.first].end()); + } +} + +void BlockMemAssigner::FindHeadAndTailNodesForStream(map> &stream_head_tail_node_map, + unordered_map &stream_mem_map) { + for (const auto &n : compute_graph_->GetDirectNode()) { + GE_IF_BOOL_EXEC(n->GetOpDesc() == nullptr, GELOGW("Op desc is nullptr"); continue); + auto stream_id = n->GetOpDesc()->GetStreamId(); + // traverse to find streams's first and last node. + if (stream_head_tail_node_map.find(stream_id) == stream_head_tail_node_map.end()) { + stream_head_tail_node_map[stream_id] = std::make_pair(n, n); + reusable_streams_map_[stream_id].insert(stream_id); // a node can reuse blocks from same stream. + } else { + stream_head_tail_node_map[stream_id].second = n; + } + + // Accumulate the output size of the node in the stream. + for (size_t i = 0; i < n->GetOpDesc()->GetOutputsSize(); i++) { + uint32_t size = 0; + if (ge::TensorUtils::GetSize(*n->GetOpDesc()->GetOutputDescPtr(static_cast(i)), size) != SUCCESS) { + GELOGW("Get output size failed!"); + continue; + } + stream_mem_map[stream_id] += size; + } + // Accumulate the workspace size of the node in the stream. + for (auto size : n->GetOpDesc()->GetWorkspaceBytes()) { + stream_mem_map[stream_id] += size; + } + } +} + +void BlockMemAssigner::FindDependentStream(map> &stream_head_tail_node_map, + map> &stream_dependency_map) { + for (const auto &it1 : stream_head_tail_node_map) { + for (const auto &it2 : stream_head_tail_node_map) { + if (it1 == it2) { + continue; + } + NodePtr pre_node = it1.second.second; + NodePtr post_node = it2.second.first; + for (const auto &out_node : pre_node->GetOutNodes()) { + if ((out_node->GetOpDesc() == nullptr) || (post_node->GetOpDesc() == nullptr) || + (pre_node->GetOpDesc() == nullptr)) { + continue; + } + if (out_node->GetOpDesc()->GetId() == post_node->GetOpDesc()->GetId()) { + stream_dependency_map[pre_node->GetOpDesc()->GetStreamId()].insert(post_node->GetOpDesc()->GetStreamId()); + } + } + } + } +} + +bool BlockMemAssigner::CheckIsZeroMemNodeType(const string &node_type) const { + return (node_type == VARIABLE) || (node_type == CONSTANT) || (node_type == MULTISHAPE) || + (node_type == HCOMBROADCAST) || (node_type == HCOMALLREDUCE) || (node_type == CONSTANTOP) || + (node_type == ASSIGNADD) || (node_type == ASSIGNSUB) || (node_type == ASSIGN); +} +} // namespace ge diff --git a/src/ge/graph/build/memory/block_mem_assigner.h b/src/ge/graph/build/memory/block_mem_assigner.h new file mode 100644 index 00000000..4a019137 --- /dev/null +++ b/src/ge/graph/build/memory/block_mem_assigner.h @@ -0,0 +1,272 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_MEMORY_BLOCK_MEM_ASSIGNER_H_ +#define GE_GRAPH_BUILD_MEMORY_BLOCK_MEM_ASSIGNER_H_ + +#include +#include +#include +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/build/memory/mem_assigner.h" +#include "graph/compute_graph.h" + +namespace ge { +enum MemoryType { kOutput, kWorkspace }; + +struct NodeTypeIndex { + NodeTypeIndex(ge::NodePtr node, MemoryType mem_type, uint32_t index) + : node_(std::move(node)), mem_type_(mem_type), index_(index) {} + + ge::NodePtr node_ = nullptr; + MemoryType mem_type_ = kOutput; + uint32_t index_ = 0; +}; + +class MemoryBlock { + public: + explicit MemoryBlock(size_t block_size) + : ref_count_(0), + stream_id_(0), + deleted_block_(false), + block_size_(block_size), + head_offset_(0), + tail_offset_(0) {} + + MemoryBlock(const MemoryBlock &) = delete; + + MemoryBlock &operator=(const MemoryBlock &) = delete; + + ~MemoryBlock() { node_type_index_list_.clear(); } + + void Init(size_t real_size, MemoryType type, const ge::NodePtr &node, uint32_t out_index) { + real_size_list_.emplace_back(real_size); + node_type_index_list_.emplace_back(node, type, out_index); + } + size_t Size() const { return block_size_; } + + void SetHeadOffset(size_t offset) { head_offset_ = offset; } + + void SetTailOffset(size_t offset) { tail_offset_ = offset; } + + size_t HeadOffset() const { return head_offset_; } + + size_t TailOffset() const { return tail_offset_; } + + void AddNodeTypeIndex(const NodeTypeIndex &node_type_index, size_t real_size) { + node_type_index_list_.emplace_back(node_type_index); + real_size_list_.emplace_back(real_size); + } + + const std::vector &NodeTypeIndexList() const { return node_type_index_list_; } + const std::vector &RealSizeList() const { return real_size_list_; } + + void Resize(); + + std::string String(); + + bool IsSameLabel(std::string &first_batch_label); + + int ref_count_; + int64_t stream_id_; + bool deleted_block_; + + private: + size_t block_size_; + std::vector real_size_list_; + size_t head_offset_; + size_t tail_offset_; + std::vector node_type_index_list_; +}; + +class BlockMemAssigner : public MemAssigner { + public: + explicit BlockMemAssigner(ge::ComputeGraphPtr compute_graph); + + BlockMemAssigner(const BlockMemAssigner &) = delete; + + BlockMemAssigner &operator=(const BlockMemAssigner &) = delete; + + ~BlockMemAssigner() override; + + Status Assign() override; + + size_t GetMemOffset() const { return mem_offset_; } + + /// + /// @ingroup domi + /// @brief memory size fixed for reuse. get memory range + /// @param [out] ranges return memory range + /// @return Status result + /// + virtual Status GetMemoryRanges(std::vector &ranges) = 0; + /// + /// @ingroup domi + /// @brief traverse all nodes' outputs and needed workspace mem, apply memory, consider reuse memory + /// @param [in] ranges memory range provided + /// @author + /// + void AssignMemoryWithReuse(std::vector &ranges); + + void SetOpMemOffset(); + + protected: + /// + /// @ingroup domi + /// @brief traverse all memory size, resize, and calculate offset + /// @param [in&out] memory_blocks memory size, resize and calculate memory address after offset + /// + void ResizeMemoryBlocks(); + + void GetOutAndWorkSpaceMem(std::vector &all_memory_size); + + void GetNodeWorkSpaceSize(const ge::NodePtr &node, std::vector &workspace_memory); + + /// + /// @ingroup GE + /// @brief Traversing the compute_graph_ to find the reuse relationship between streams + /// @param [in] reusable_stream_map map to save stream_id and its reusable stream_ids + /// @return void + /// @author + /// + void InitReusableStreamMap(); + + /// + /// @ingroup GE + /// @brief Traversing the compute_graph_ to find the first and last nodeptr of a stream. + /// @param [in] stream_head_tail_node_map map to save stream_id and its first and last nodeptr. + /// @param [in] stream_mem_map map to save stream_id and its memory capacity. + /// @return void + /// @author + /// + void FindHeadAndTailNodesForStream(std::map> &stream_head_tail_node_map, + std::unordered_map &stream_mem_map); + + /// + /// @ingroup GE + /// @brief Traversing the compute_graph_ to find the reuse relationship between streams. + /// @param [in] stream_head_tail_node_map map to save stream_id and its first and last nodeptr. + /// @param [in] stream_dependency_map map to save stream_id and stream_ids depends on it. + /// @return void + /// @author + /// + void FindDependentStream(std::map> &stream_head_tail_node_map, + std::map> &stream_dependency_map); + + /// + /// @ingroup GE + /// @brief Determine whether it is the type of zero memory node. + /// @param [in] node type. + /// @return bool true: is zero memory node; false: is not zero memory node + /// @author + /// + bool CheckIsZeroMemNodeType(const std::string &node_type) const; + + size_t mem_offset_; + + ge::ComputeGraphPtr compute_graph_; + + std::vector memory_blocks_; + + std::vector zero_memory_list_; + + private: + /// + /// @ingroup GE + /// @brief Traversing the compute_graph_ to apply for output memory while considering reuse + /// @param [in] n node in compute_graph_ + /// @param [in] index output node index + /// @param [in] ranges available memory specifications + /// @return MemoryBlock* + /// @author + /// + MemoryBlock *ApplyOutMemory(const ge::NodePtr &n, uint32_t index, const std::vector &ranges); + + /// + /// @ingroup GE + /// @brief Traversing the compute_graph_ to apply for memory while considering reuse + /// @param [in] block_size applied memory block size + /// @param [in] real_size actual memory size required + /// @param [in] type output or workspace + /// @param [in] n node in compute_graph_ + /// @param [in] out_index output node index + /// @param [in] workspace_reuse_flag reuse flag for workspace + /// @return MemoryBlock* + /// @author + /// + MemoryBlock *ApplyMemory(size_t block_size, size_t real_size, MemoryType mem_type, const ge::NodePtr &n, + uint32_t out_index, const std::vector &workspace_reuse_flag); + + /// + /// @ingroup GE + /// @brief Release memory block to reusable list + /// @param [in] to_release memory block to be released + /// @param [in] reusable_memory reusable list + /// @return void + /// @author + /// + void ReleaseMemory(MemoryBlock *to_release, vector &reusable_memory); + + /// + /// @ingroup GE + /// @brief Release memory blocks to reusable list + /// @param [in] to_releases memory blocks to be released + /// @param [in] reusable_memory reusable list + /// @return void + /// @author + /// + void ReleaseMemorys(const vector &to_releases, vector &reusable_memory); + + /// + /// @ingroup GE + /// @brief Release memory block to reusable list + /// @param [in] n node in compute_graph_ + /// @param [in] node_out_blocks output memory blocks for ops + /// @param [in] reusable_memory reusable list + /// @return void + /// @author + /// + void ReleaseInputNodeOutMemory(const ge::NodePtr &n, + const std::unordered_map> &node_out_blocks, + vector &reusable_memory); + + /// + /// @ingroup GE + /// @brief Merge memory blocks between different batchs + /// @return void + /// @author + /// + void MergeDynamicBatchBlocks(); + + std::vector reusable_blocks_; + + std::map reusable_block_counts_; + + std::unordered_map> stream_workspace_blocks_; + + std::unordered_map> node_out_blocks_; + + // save stream_id and reusable stream_ids + std::unordered_map> reusable_streams_map_; +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_MEMORY_BLOCK_MEM_ASSIGNER_H_ diff --git a/src/ge/graph/build/memory/graph_mem_assigner.cc b/src/ge/graph/build/memory/graph_mem_assigner.cc new file mode 100644 index 00000000..9da8b006 --- /dev/null +++ b/src/ge/graph/build/memory/graph_mem_assigner.cc @@ -0,0 +1,893 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/memory/graph_mem_assigner.h" + +#include + +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/build/memory/hybrid_mem_assigner.h" +#include "graph/build/memory/var_mem_assign_util.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/ge_attr_value.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" + +namespace { +const int kAllInputAddrIsAtomic = -1; +} // namespace +namespace ge { +Status VariableMemoryAssigner::Assign() { + Status result = ge::VarMemAssignUtil::AssignConstantOpMemory(compute_graph_); + if (result != ge::SUCCESS) { + return result; + } + + result = ge::VarMemAssignUtil::AssignVarMemory(compute_graph_); + if (result != ge::SUCCESS) { + return result; + } + return ge::SUCCESS; +} + +Status VariableMemoryAssigner::AssignVarAttr2Nodes() { + Status result = ge::VarMemAssignUtil::AssignVarAttr2Nodes(compute_graph_); + if (result != ge::SUCCESS) { + return result; + } + return ge::SUCCESS; +} + +Status GraphMemoryAssigner::AssignMemory() { + auto mem_assigner = std::unique_ptr(new (std::nothrow) ge::HybridMemAssigner(compute_graph_)); + if (mem_assigner == nullptr) { + GELOGE(ge::FAILED, "Alloc HybridMemAssigner failed."); + return ge::FAILED; + } + if (mem_assigner->Assign() != ge::SUCCESS) { + GELOGE(ge::FAILED, "Memory assigner failed"); + return ge::FAILED; + } + MemoryOffset memory_offset(RT_MEMORY_HBM, mem_assigner->GetMemOffset()); + memory_offset_.push_back(memory_offset); + + auto session_id = compute_graph_->GetSessionID(); + int64_t var_size_before_assign = ge::VarManager::Instance(session_id)->GetVarMemSize(RT_MEMORY_HBM); + auto variable_assigner = + std::unique_ptr(new (std::nothrow) ge::VariableMemoryAssigner(compute_graph_)); + if (variable_assigner == nullptr) { + GELOGE(ge::FAILED, "Alloc VariableMemoryAssigner failed."); + return ge::FAILED; + } + + if (variable_assigner->Assign() != ge::SUCCESS) { + return ge::FAILED; + } + int64_t var_size_assign = ge::VarManager::Instance(session_id)->GetVarMemSize(RT_MEMORY_HBM) - var_size_before_assign; + GELOGI("GraphMemoryAssigner::AssignMemory variable size = %ld", var_size_assign); + return ge::SUCCESS; +} + +ge::Status GraphMemoryAssigner::AssignVarAttr2Nodes() { + auto variable_assigner = + std::unique_ptr(new (std::nothrow) ge::VariableMemoryAssigner(compute_graph_)); + if (variable_assigner == nullptr) { + GELOGE(ge::FAILED, "Alloc VariableMemoryAssigner failed."); + return ge::FAILED; + } + if (variable_assigner->AssignVarAttr2Nodes() != ge::SUCCESS) { + return ge::FAILED; + } + return ge::SUCCESS; +} + +Status GraphMemoryAssigner::ReAssignMemory(bool is_loop_graph, size_t &mem_offset) { + if (memory_offset_.empty()) { + GELOGE(FAILED, "memory_offset_ is empty."); + return ge::FAILED; + } + + GE_CHK_STATUS_RET(ReAssignContinuousMemory(is_loop_graph), "ReAssignContinuousMemory Failed!"); + + GE_CHK_STATUS_RET(ReAssignVirtualConcatMemory(), "ReAssignVirtualConcatMemory Failed!"); + + GE_CHK_STATUS_RET(ReAssignMergeMemory(), "ReAssignMergeMemory Failed!"); + + GE_CHK_STATUS_RET(ReAssignAtomicMemory(is_loop_graph), "ReAssignAtomicMemory Failed!"); + + mem_offset = memory_offset_[0].mem_offset_; + + if (mem_offset > VarManager::Instance(0)->GetGraphMemoryMaxSize()) { + GELOGE(ge::FAILED, "Current memoffset %zu is greater than memory manager malloc max size %zu", mem_offset, + VarManager::Instance(0)->GetGraphMemoryMaxSize()); + return ge::FAILED; + } + return SUCCESS; +} + +Status GraphMemoryAssigner::ReAssignContinuousMemory(bool is_loop_graph) { + GELOGI("Begin to reassign continuous memory"); + Status ret; + for (auto &node : compute_graph_->GetDirectNode()) { + // Get the continuous input type of the node, default is false + bool is_input_continuous = false; + GE_CHECK_NOTNULL(node->GetOpDesc()); + // If GetBool fail, is_input_continuous is false. + (void)ge::AttrUtils::GetBool(node->GetOpDesc(), ATTR_NAME_CONTINUOUS_INPUT, is_input_continuous); + int64_t mem_clean_start = memory_offset_[0].mem_offset_; + // Assign continuous input memory + if (is_input_continuous) { + ret = AssignContinuousInputMemory(node); + if (ret != ge::SUCCESS) { + GELOGE(ret, "Assign continuous input memory failed!"); + return ret; + } + + memory_offset_[0].mem_offset_ += kMemAlignSize; + + // Clean up atomic address, eg, hcom node + vector input_indexes; + // If GetListInt fail, input_indexes is empty. + (void)ge::AttrUtils::GetListInt(node->GetOpDesc(), ATOMIC_ATTR_INPUT_INDEX, input_indexes); + + if (!input_indexes.empty() && input_indexes[0] == kAllInputAddrIsAtomic) { + // check whether there is an atomic conflict between the current node and the peer out node + if (!CheckInputIsSupportAtomic(node)) { + GELOGE(ge::FAILED, + "There is an atomic conflict between the current node and the peer out node, not supported!"); + return ge::FAILED; + } else if (is_loop_graph) { + GE_CHK_STATUS_RET(SetLoopGraphAtomicAttr(node, mem_clean_start)); + } else { + int64_t mem_clean_size = memory_offset_[0].mem_offset_ - mem_clean_start; + GE_CHK_STATUS_RET(SetAtomicCleanAttr(nullptr, mem_clean_start, mem_clean_size), "SetAtomicCleanAttr failed."); + } + } + } + + // Get the reference type of the node, default is false + bool is_ref = false; + // If GetBool fail, is_ref is false. + (void)ge::AttrUtils::GetBool(node->GetOpDesc(), ATTR_NAME_REFERENCE, is_ref); + + // Get the continuous output type of the node, default is false + bool is_output_continuous = false; + // If GetBool fail, is_output_continuous is false. + (void)ge::AttrUtils::GetBool(node->GetOpDesc(), ATTR_NAME_CONTINUOUS_OUTPUT, is_output_continuous); + + // If the output is ref type and refers to the ref of an input, the name of the output + // and the input are the same. Ge encounters ref type, finds matching relationship according + // to the names of input and output, and allocates the same memory address, eg: HCOMBroadcast + if (is_ref) { + ret = AssignReferenceMemory(node); + if (ret != ge::SUCCESS) { + GELOGE(ret, "Assign reference memory failed!"); + return ret; + } + } else if (is_output_continuous) { // Assign continuous output memory + ret = AssignContinuousOutputMemory(node); + if (ret != ge::SUCCESS) { + GELOGE(ret, "Assign reference memory failed!"); + return ret; + } + } + } + + GELOGI("After reassign continuous memory, memoffset = %zu.", memory_offset_[0].mem_offset_); + return ge::SUCCESS; +} + +Status GraphMemoryAssigner::AssignContinuousInputMemory(const ge::NodePtr &node) { + GELOGI("Current node %s needs continuous input.", node->GetName().c_str()); + for (auto &in_data_anchor : node->GetAllInDataAnchors()) { + auto peer_out_data_anchor = in_data_anchor->GetPeerOutAnchor(); + if (peer_out_data_anchor == nullptr) { + continue; + } + auto peer_op_desc = peer_out_data_anchor->GetOwnerNode()->GetOpDesc(); + GE_IF_BOOL_EXEC(peer_op_desc == nullptr, continue); + bool is_peer_output_continuous = false; + // If GetBool fail, is_peer_output_continuous is false. + (void)ge::AttrUtils::GetBool(peer_op_desc, ATTR_NAME_CONTINUOUS_OUTPUT, is_peer_output_continuous); + + // Get peer node output size, if size == 1(peer node has only one output), continuous input of the node and + // continuous output of the previous node is the same, we can support it. If size != 1, there may be + // conflict between the two, we can not support it. + auto peer_output_size = peer_op_desc->GetOutputsSize(); + if (is_peer_output_continuous && (peer_output_size != 1)) { + GELOGE(ge::PARAM_INVALID, + "Current node %s requires continuous input, while the previous node %s requires " + "continuous output. There may be conflict between the two. This node is not supported now.", + node->GetOpDesc()->GetName().c_str(), peer_op_desc->GetName().c_str()); + return ge::PARAM_INVALID; + } + + bool is_peer_reference = false; + // If GetBool fail, is_peer_reference is false. + (void)ge::AttrUtils::GetBool(peer_op_desc, ATTR_NAME_REFERENCE, is_peer_reference); + + if (is_peer_reference) { + GELOGE(ge::PARAM_INVALID, + "Current node %s requires continuous input, while the previous node %s requires " + "reference. There may be conflict between the two. This node is not supported now.", + node->GetOpDesc()->GetName().c_str(), peer_op_desc->GetName().c_str()); + return ge::PARAM_INVALID; + } + + vector output_list = peer_op_desc->GetOutputOffset(); + if (peer_out_data_anchor->GetIdx() < static_cast(output_list.size())) { + output_list.at(peer_out_data_anchor->GetIdx()) = memory_offset_[0].mem_offset_; + } else { + GELOGE(ge::FAILED, "index : %d is out of range.", peer_out_data_anchor->GetIdx()); + return ge::FAILED; + } + peer_op_desc->SetOutputOffset(output_list); + + uint32_t tensor_desc_size = 0; + if (ge::TensorUtils::GetSize(*(peer_op_desc->GetOutputDescPtr(peer_out_data_anchor->GetIdx())), tensor_desc_size) != + ge::SUCCESS) { + GELOGE(FAILED, "GetSize failed."); + return FAILED; + } + + memory_offset_[0].mem_offset_ += tensor_desc_size; + AlignMemOffset(kMemAlignSize); + } + + return ge::SUCCESS; +} + +Status GraphMemoryAssigner::AssignContinuousOutputMemory(const ge::NodePtr &node) { + GELOGI("Current node %s needs continuous output.", node->GetName().c_str()); + auto out_op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(out_op_desc == nullptr, GELOGE(ge::FAILED, "out_op_desc is null."); return ge::FAILED); + vector output_list = out_op_desc->GetOutputOffset(); + + if (out_op_desc->GetOutputsSize() > output_list.size()) { + GELOGE(ge::FAILED, "The size %zu of node output desc is more than output_list's size %zu.", + out_op_desc->GetOutputsSize(), output_list.size()); + return ge::FAILED; + } + + for (auto &out_data_anchor : node->GetAllOutDataAnchors()) { + output_list[out_data_anchor->GetIdx()] = memory_offset_[0].mem_offset_; + + uint32_t tensor_desc_size = 0; + if (ge::TensorUtils::GetSize(*(out_op_desc->GetOutputDescPtr(out_data_anchor->GetIdx())), tensor_desc_size) != + ge::SUCCESS) { + GELOGE(FAILED, "GetSize failed."); + return FAILED; + } + memory_offset_[0].mem_offset_ += tensor_desc_size; + + AlignMemOffset(kMemAlignSize); + } + + out_op_desc->SetOutputOffset(output_list); + memory_offset_[0].mem_offset_ += kMemAlignSize; + return ge::SUCCESS; +} + +Status GraphMemoryAssigner::ReAssignVirtualConcatMemory() { + for (const auto &n : compute_graph_->GetAllNodes()) { + GE_CHECK_NOTNULL(n->GetOpDesc()); + if (n->GetOpDesc()->GetType() == CONCAT) { + int64_t is_node_virtual; + GE_IF_BOOL_EXEC( + !(ge::AttrUtils::GetInt(n->GetOpDesc(), "fusion_virtual_op", is_node_virtual)), // Need to change + continue;); + vector output_list = n->GetOpDesc()->GetOutputOffset(); + if (output_list.empty()) { + GELOGE(FAILED, "Outputoffset is empty node name:%s", n->GetName().c_str()); + return FAILED; + } + output_list.at(0) = memory_offset_[0].mem_offset_; + n->GetOpDesc()->SetOutputOffset(output_list); + GELOGI("Set Concat %s output offset to %zu.", n->GetOpDesc()->GetName().c_str(), memory_offset_[0].mem_offset_); + + size_t extra_memory_size = 0; + for (const auto &in_data_anchor : n->GetAllInDataAnchors()) { + auto peer_out_data_anchor = in_data_anchor->GetPeerOutAnchor(); + if (peer_out_data_anchor != nullptr) { + for (const auto &next_in_data_anchor : peer_out_data_anchor->GetPeerInDataAnchors()) { + if (in_data_anchor->GetOwnerNode()->GetName() == next_in_data_anchor->GetOwnerNode()->GetName()) { + auto peer_op_desc = peer_out_data_anchor->GetOwnerNode()->GetOpDesc(); + GE_CHECK_NOTNULL(peer_op_desc); + vector output_offsets = peer_op_desc->GetOutputOffset(); + if (peer_out_data_anchor->GetIdx() < static_cast(output_offsets.size())) { + output_offsets.at(peer_out_data_anchor->GetIdx()) = memory_offset_[0].mem_offset_; + } else { + GELOGE(ge::FAILED, "index : %d is out of range.", peer_out_data_anchor->GetIdx()); + return ge::FAILED; + } + peer_op_desc->SetOutputOffset(output_offsets); + + ge::ConstGeTensorDescPtr output_desc = peer_op_desc->GetOutputDescPtr(peer_out_data_anchor->GetIdx()); + GE_CHECK_NOTNULL(output_desc); + int64_t output_mem_size = 0; + + // calculate tensor real size + GeShape output_shape = output_desc->GetShape(); + Format format = output_desc->GetFormat(); + DataType data_type = output_desc->GetDataType(); + graphStatus graph_status = + TensorUtils::CalcTensorMemSize(output_shape, format, data_type, output_mem_size); + if (graph_status != GRAPH_SUCCESS) { + GELOGE(graph_status, "CalcTensorMemSize failed!"); + return FAILED; + } + + if ((output_mem_size > UINT32_MAX) || (output_mem_size < 0)) { + GELOGE(FAILED, + "After calc virtual concat tensor memory size, output_mem_size = %ld, " + "out of data range [0, %u]", + output_mem_size, UINT32_MAX); + return FAILED; + } + + uint32_t size = static_cast(output_mem_size); + memory_offset_[0].mem_offset_ += size; + uint32_t out_size = 0; + if (ge::TensorUtils::GetSize(*(peer_op_desc->GetOutputDescPtr(peer_out_data_anchor->GetIdx())), + out_size) != ge::SUCCESS) { + GELOGE(FAILED, "GetSize failed."); + return FAILED; + } + extra_memory_size = extra_memory_size + out_size - size; + } + } + } + } + memory_offset_[0].mem_offset_ += extra_memory_size; + } + } + + GELOGI("After reassign virtual concat memory, memoffset = %zu.", memory_offset_[0].mem_offset_); + return SUCCESS; +} + +Status GraphMemoryAssigner::ReAssignMergeMemory() { + for (const ge::NodePtr &n : compute_graph_->GetDirectNode()) { + GE_IF_BOOL_EXEC(n->GetOpDesc() == nullptr, continue); + string node_type; + GE_CHK_STATUS_RET(GetOriginalType(n, node_type), "Get node type fail."); + if (node_type != STREAMMERGE) { + continue; + } + + vector> input_node_list; + for (const auto &in_anchor : n->GetAllInDataAnchors()) { + ge::OutDataAnchorPtr out_anchor = in_anchor->GetPeerOutAnchor(); + if (out_anchor == nullptr) { + std::string in_name; + GE_IF_BOOL_EXEC(ge::AttrUtils::GetStr(n->GetOpDesc(), ATTR_NAME_NEXT_ITERATION, in_name) && !in_name.empty(), { + ge::NodePtr in_node = compute_graph_->FindNode(in_name); + GE_CHECK_NOTNULL(in_node); + input_node_list.emplace_back(std::make_pair(0, in_node)); + }); + continue; + } + ge::NodePtr src_node = out_anchor->GetOwnerNode(); + input_node_list.emplace_back(std::make_pair(out_anchor->GetIdx(), src_node)); + } + + int64_t data_output_offset = -1; + int64_t max_output_size = -1; + for (auto &iter : input_node_list) { + int index = iter.first; + NodePtr src_node = iter.second; + GE_CHECK_NOTNULL(src_node->GetOpDesc()); + int64_t tmp_output_size = src_node->GetOpDesc()->GetOutputDesc(index).GetShape().GetShapeSize(); + if ((data_output_offset == -1) || (tmp_output_size > max_output_size)) { + vector output_list = src_node->GetOpDesc()->GetOutputOffset(); + int output_size = static_cast(output_list.size()); + if (index >= output_size) { + GELOGE(INTERNAL_ERROR, "out_anchor[%d] >= output_list[%d]", index, output_size); + return INTERNAL_ERROR; + } + + data_output_offset = output_list[index]; + max_output_size = tmp_output_size; + } + GELOGI("merge=%s, input=%s, size=%ld, offset=%ld, max_size=%ld", n->GetName().c_str(), + src_node->GetName().c_str(), tmp_output_size, data_output_offset, max_output_size); + } + + vector input_list; + for (auto &iter : input_node_list) { + int index = iter.first; + NodePtr src_node = iter.second; + GE_CHECK_NOTNULL(src_node->GetOpDesc()); + vector output_list = src_node->GetOpDesc()->GetOutputOffset(); + int output_size = static_cast(output_list.size()); + if (index >= output_size) { + GELOGE(INTERNAL_ERROR, "out_anchor[%d] >= output_list[%d]", index, output_size); + return INTERNAL_ERROR; + } + + output_list[index] = data_output_offset; + src_node->GetOpDesc()->SetOutputOffset(output_list); + input_list.emplace_back(data_output_offset); + } + + n->GetOpDesc()->SetInputOffset(input_list); + } + GELOGI("After reassign merge memory, memoffset = %zu.", memory_offset_[0].mem_offset_); + return SUCCESS; +} + +Status GraphMemoryAssigner::ReAssignAtomicMemory(bool is_loop_graph) { + if (compute_graph_ == nullptr) { + GELOGE(ge::PARAM_INVALID, "Graph must not be null."); + return ge::PARAM_INVALID; + } + // Atomic op memory start addr + int64_t atomic_mem_start = static_cast(memory_offset_[0].mem_offset_); + GELOGI("Begin to reAssign atomic memory, atomic initial address mem_offset = %zu!", memory_offset_[0].mem_offset_); + + for (auto &node : compute_graph_->GetDirectNode()) { + auto node_op_desc = node->GetOpDesc(); + if (node_op_desc == nullptr) { + continue; + } + + bool is_atomic = false; + // If GetBool fail, is_atomic is false. + (void)ge::AttrUtils::GetBool(node_op_desc, ATOMIC_ATTR_IS_ATOMIC_NODE, is_atomic); + if (!is_atomic) { + continue; + } + + bool is_ref = false; + // If GetBool fail, is_ref is false. + (void)ge::AttrUtils::GetBool(node_op_desc, ATTR_NAME_REFERENCE, is_ref); + if (is_ref) { + GELOGE(ge::PARAM_INVALID, "The node %s cannot have both atomic and ref attribute.", + node_op_desc->GetName().c_str()); + return ge::PARAM_INVALID; + } + + // Atomic op memory start addr of loop graph + int64_t loop_graph_atomic_mem_start = static_cast(memory_offset_[0].mem_offset_); + + // Reassign atomic node output memory + Status ret = AssignAtomicOutputMemory(node); + if (ret != SUCCESS) { + GELOGE(ret, "Assign atomic output memory failed, node is %s.", node_op_desc->GetName().c_str()); + return ret; + } + + // Check atomic workspace + map> sub_node_workspace_info; + sub_node_workspace_info = node_op_desc->TryGetExtAttr(EXT_ATTR_ATOMIC_WORKSPACE_INFO, sub_node_workspace_info); + if (!sub_node_workspace_info.empty()) { + bool is_fusion_node = false; + // If GetBool fail, is_fusion_node is false. + (void)ge::AttrUtils::GetBool(node_op_desc, ATOMIC_ATTR_IS_FUSION_NODE, is_fusion_node); + + if (is_fusion_node) { + // Assign fusion atomic node workspace memory + ret = AssignFusionAtomicWorkspaceMemory(node_op_desc, sub_node_workspace_info); + } else { + // Assign single ordinary atomic node workspace memory, not include fusion node + ret = AssignOrdinaryAtomicWorkspaceMemory(node_op_desc, sub_node_workspace_info); + } + + if (ret != SUCCESS) { + GELOGE(ret, "Assign atomic workspace memory failed, node is %s.", node_op_desc->GetName().c_str()); + return ret; + } + } + + /// In networks with loop op, atomic op uses atomic_addr_clean op independently, + /// so we need to set the attr separately. + if (is_loop_graph) { + GE_CHK_STATUS_RET(SetLoopGraphAtomicAttr(node, loop_graph_atomic_mem_start)); + } + } + + // In networks without loop op, the same atomic addr clean op is used for atomic op + if (!is_loop_graph) { + // Set the address attr of atomic clean operator + int64_t atomic_mem_size = memory_offset_[0].mem_offset_ - atomic_mem_start; + if (atomic_mem_size != 0) { + GE_CHK_STATUS_RET(SetAtomicCleanAttr(nullptr, atomic_mem_start, atomic_mem_size), "SetAtomicCleanAttr failed."); + } + } + + return SUCCESS; +} + +Status GraphMemoryAssigner::AssignReferenceMemory(const ge::NodePtr &node) { + GELOGI("Current node %s needs to support the reference relationship between output and input.", + node->GetName().c_str()); + + auto out_op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(out_op_desc == nullptr, GELOGE(ge::FAILED, "out_op_desc is null."); return ge::FAILED); + vector output_list = out_op_desc->GetOutputOffset(); + + if (out_op_desc->GetOutputsSize() > output_list.size()) { + GELOGE(ge::FAILED, "The size %zu of node output desc is more than output_list's size %zu.", + out_op_desc->GetOutputsSize(), output_list.size()); + return ge::FAILED; + } + + map input_name_index; + for (const auto &input_name : out_op_desc->GetAllInputNames()) { + int index = out_op_desc->GetInputIndexByName(input_name); + input_name_index.emplace(input_name, index); + } + + for (auto &out_data_anchor : node->GetAllOutDataAnchors()) { + string out_data_anchor_name = out_op_desc->GetOutputNameByIndex(out_data_anchor->GetIdx()); + auto iter = input_name_index.find(out_data_anchor_name); + if (iter != input_name_index.end()) { + int index = iter->second; + GELOGI("Reference memory: input anchor index = %d, input anchor name = %s, output anchor name = %s.", index, + iter->first.c_str(), out_data_anchor_name.c_str()); + GE_CHECK_NOTNULL(node->GetInDataAnchor(index)); + auto peer_out_anchor = node->GetInDataAnchor(index)->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, continue); + int peer_out_anchor_index = peer_out_anchor->GetIdx(); + auto peer_out_node = peer_out_anchor->GetOwnerNode(); + auto peer_out_op_desc = peer_out_node->GetOpDesc(); + GE_CHECK_NOTNULL(peer_out_op_desc); + output_list[out_data_anchor->GetIdx()] = peer_out_op_desc->GetOutputOffset()[peer_out_anchor_index]; + GELOGI("Reference output : Set %s name[%s] output[%d] offset to [%ld] stream_id[%ld]", + node->GetOwnerComputeGraph()->GetName().c_str(), peer_out_op_desc->GetName().c_str(), + out_data_anchor->GetIdx(), output_list[out_data_anchor->GetIdx()], peer_out_op_desc->GetStreamId()); + } else { + GELOGI("Reference output : origin %s name[%s] output[%d] offset is [%ld] stream_id[%ld]", + node->GetOwnerComputeGraph()->GetName().c_str(), out_op_desc->GetName().c_str(), out_data_anchor->GetIdx(), + output_list[out_data_anchor->GetIdx()], out_op_desc->GetStreamId()); + } + } + + out_op_desc->SetOutputOffset(output_list); + + return ge::SUCCESS; +} + +bool GraphMemoryAssigner::CheckInputIsSupportAtomic(const ge::NodePtr &node) { + for (auto &in_data_anchor : node->GetAllInDataAnchors()) { + auto peer_out_data_anchor = in_data_anchor->GetPeerOutAnchor(); + if (peer_out_data_anchor == nullptr) { + continue; + } + auto peer_op_desc = peer_out_data_anchor->GetOwnerNode()->GetOpDesc(); + if (peer_op_desc == nullptr) { + continue; + } + if ((peer_op_desc->GetType() == CONSTANTOP) || (peer_op_desc->GetType() == AIPP_DATA_TYPE) || + (peer_op_desc->GetType() == VARIABLE)) { + GELOGE(ge::FAILED, + "The current node is %s, and the peer out node is %s. Currently, this scenario is not supported", + node->GetName().c_str(), peer_op_desc->GetName().c_str()); + return false; + } + } + return true; +} + +Status GraphMemoryAssigner::AssignAtomicOutputMemory(const ge::NodePtr &node) { + auto op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, GELOGE(ge::FAILED, "op_desc is null."); return ge::FAILED); + GELOGI("Begin to assign atomic output memory, node = %s.", op_desc->GetName().c_str()); + + vector atomic_output_index; + // If GetListInt fail, atomic_output_index is empty. + (void)ge::AttrUtils::GetListInt(op_desc, ATOMIC_ATTR_OUTPUT_INDEX, atomic_output_index); + + // Check atomic output + vector output_list = op_desc->GetOutputOffset(); + if (atomic_output_index.size() > output_list.size()) { + GELOGE(ge::FAILED, "The size of atomic_output_index is more than the size of output_list"); + return ge::FAILED; + } + auto output_list_size = static_cast(output_list.size()); + for (auto &output_index : atomic_output_index) { + if (output_index >= output_list_size) { + GELOGE(ge::PARAM_INVALID, "The output index %ld is more than the size %ld of output_list.", output_index, + output_list_size); + return ge::PARAM_INVALID; + } + + // If the input of the cascade op needs to clear the atomic addr, there is no need to clear it separately here + bool is_assigned_mem = false; + if (static_cast(output_index) >= node->GetAllOutDataAnchors().size()) { + GELOGE(ge::PARAM_INVALID, "Output index %ld is more than the size of node's AllOutDataAnchors.", output_index); + return ge::PARAM_INVALID; + } + auto out_data_anchor = node->GetAllOutDataAnchors().at(output_index); + GE_CHECK_NOTNULL(out_data_anchor); + auto input_anchors = out_data_anchor->GetPeerInDataAnchors(); + for (auto &input_anchor : input_anchors) { + auto output_node = input_anchor->GetOwnerNode(); + + /// Get input atomic attr of peer output op, if atomic_input_index[0] = -1, indicates that the atomic address + /// has been assigned + vector atomic_input_index; + (void)ge::AttrUtils::GetListInt(output_node->GetOpDesc(), ATOMIC_ATTR_INPUT_INDEX, atomic_input_index); + if (!atomic_input_index.empty() && (atomic_input_index[0] == kAllInputAddrIsAtomic)) { + is_assigned_mem = true; + break; + } + } + + // If you have already assigned an atomic address, skip it, and you don't need to reassign it. + if (is_assigned_mem) { + GELOGI( + "[IMAS]Atomic output : we have assigned atomic memory as the input of next node in " + "ReAssignContinuousMemory function."); + continue; + } + + auto output_desc = op_desc->GetAllOutputsDescPtr().at(output_index); + uint32_t size = 0; + if (ge::TensorUtils::GetSize(*output_desc, size) != SUCCESS) { + GELOGI("Get size failed"); + } + + output_list[output_index] = memory_offset_[0].mem_offset_; + + memory_offset_[0].mem_offset_ += size; + AlignMemOffset(kMemAlignSize); + } + + op_desc->SetOutputOffset(output_list); + + return ge::SUCCESS; +} + +Status GraphMemoryAssigner::AssignOrdinaryAtomicWorkspaceMemory(const ge::OpDescPtr &op_desc, + map> &workspace_info) { + GELOGI("Begin to reassign normal atomic memory, node = %s.", op_desc->GetName().c_str()); + vector workspace_vector = op_desc->GetWorkspace(); + + for (auto iter = workspace_info.begin(); iter != workspace_info.end(); ++iter) { + if (op_desc->GetName() != iter->first) { + GELOGE(ge::PARAM_INVALID, "The node name %s and the node name %s in workspace info are inconsistent.", + op_desc->GetName().c_str(), iter->first.c_str()); + return ge::PARAM_INVALID; + } + + if (iter->second.empty()) { + continue; + } + + for (auto &info_iter : iter->second) { + auto workspace_index = static_cast(info_iter.first); + auto workspace_size = info_iter.second; + if (workspace_index >= workspace_vector.size()) { + GELOGE(ge::PARAM_INVALID, "The workspace index %lu is more than the size %zu of workspace vector.", + workspace_index, workspace_vector.size()); + return ge::PARAM_INVALID; + } + + workspace_vector[workspace_index] = memory_offset_[0].mem_offset_; + + memory_offset_[0].mem_offset_ += workspace_size; + } + } + op_desc->SetWorkspace(workspace_vector); + + return SUCCESS; +} + +Status GraphMemoryAssigner::AssignFusionAtomicWorkspaceMemory(const ge::OpDescPtr &op_desc, + map> &workspace_info) { + GELOGI("Begin to reassign fusion atomic memory, node = %s.", op_desc->GetName().c_str()); + map> sub_node_workspace_offset; + + for (auto &iter : workspace_info) { + if (iter.second.empty()) { + continue; + } + + map index_offset; + for (auto &info_iter : iter.second) { + auto workspace_index = static_cast(info_iter.first); + auto workspace_size = info_iter.second; + + size_t workspace_offset = memory_offset_[0].mem_offset_; + + memory_offset_[0].mem_offset_ += workspace_size; + index_offset.insert(std::make_pair(workspace_index, workspace_offset)); + } + sub_node_workspace_offset.insert(std::make_pair(iter.first, index_offset)); + } + if (!(op_desc->SetExtAttr(EXT_ATTR_ATOMIC_WORKSPACE_OFFSET, sub_node_workspace_offset))) { + GELOGE(FAILED, "Set EXT_ATTR_ATOMIC_WORKSPACE_OFFSET failed, op name:%s.", op_desc->GetName().c_str()); + return FAILED; + } + + return SUCCESS; +} + +Status GraphMemoryAssigner::CheckOffset() { + for (const ge::NodePtr &node : compute_graph_->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + vector input_list = node->GetOpDesc()->GetInputOffset(); + for (auto input : input_list) { + if (input == ge::kInvalidOffset) { + GELOGE(FAILED, "Invalid offset in node: %s input: %ld.", node->GetName().c_str(), ge::kInvalidOffset); + return FAILED; + } + } + vector output_list = node->GetOpDesc()->GetOutputOffset(); + for (auto output : output_list) { + if (output == ge::kInvalidOffset) { + GELOGE(FAILED, "Invalid offset in node: %s output: %ld.", node->GetName().c_str(), ge::kInvalidOffset); + return FAILED; + } + } + vector workspace_list = node->GetOpDesc()->GetWorkspace(); + for (auto workspace : workspace_list) { + if (workspace == ge::kInvalidOffset) { + GELOGE(FAILED, "Invalid offset in node: %s workspace: %ld.", node->GetName().c_str(), ge::kInvalidOffset); + return FAILED; + } + } + } + return SUCCESS; +} + +ge::Status GraphMemoryAssigner::SetInputOffset() { + if (memory_offset_.empty()) { + GELOGE(FAILED, "memory_offset_ is empty."); + return FAILED; + } + GEEVENT("[IMAS]AfterAssignMemory : %s", compute_graph_->GetName().c_str()); + for (const ge::NodePtr &node : compute_graph_->GetDirectNode()) { + if (UpdateOpInputOffset(node) != ge::SUCCESS) { + GELOGE(ge::FAILED, "Update op input offset failed"); + return ge::FAILED; + } + } + return ge::SUCCESS; +} + +ge::Status GraphMemoryAssigner::UpdateOpInputOffset(const NodePtr &node) const { + vector input_list; + if (node->GetType() == HCOMBROADCAST) { + for (const auto &anchor : node->GetAllInDataAnchors()) { + vector output_list; + auto peer_out_anchor = anchor->GetPeerOutAnchor(); + if (peer_out_anchor == nullptr) { + continue; + } + + auto last_peer_out_node = peer_out_anchor->GetOwnerNode(); + // If the current node is broadcast and the preceding node is variable, because InputOffset has been set + // in function:AssignVarAttr2Nodes, then the InputOffset of the broadcast node is taken to update the input_list. + // Otherwise, the OutputOffset of the previous node is used to update the input_list. + if (last_peer_out_node->GetType() != VARIABLE) { + auto last_peer_out_op_desc = last_peer_out_node->GetOpDesc(); + GE_CHECK_NOTNULL(last_peer_out_op_desc); + output_list = last_peer_out_op_desc->GetOutputOffset(); + if (output_list.size() > static_cast(peer_out_anchor->GetIdx())) { + input_list.emplace_back(output_list.at(peer_out_anchor->GetIdx())); + } + } else { + vector cur_node_input_list; + auto cur_node_op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(cur_node_op_desc); + cur_node_input_list = cur_node_op_desc->GetInputOffset(); + if (cur_node_input_list.size() > static_cast(anchor->GetIdx())) { + input_list.emplace_back(cur_node_input_list.at(anchor->GetIdx())); + } + } + } + } else { + for (const auto &anchor : node->GetAllInDataAnchors()) { + vector output_list; + auto peer_out_anchor = anchor->GetPeerOutAnchor(); + if (peer_out_anchor == nullptr) { + continue; + } + + // If the current node not broadcast, the OutputOffset of the previous node is used to update the input_list + auto last_peer_out_node = peer_out_anchor->GetOwnerNode(); + auto last_peer_out_op_desc = last_peer_out_node->GetOpDesc(); + GE_CHECK_NOTNULL(last_peer_out_op_desc); + output_list = last_peer_out_op_desc->GetOutputOffset(); + if (output_list.size() > static_cast(peer_out_anchor->GetIdx())) { + input_list.emplace_back(output_list.at(peer_out_anchor->GetIdx())); + } + } + } + GE_CHECK_NOTNULL(node->GetOpDesc()); + node->GetOpDesc()->SetInputOffset(input_list); + return ge::SUCCESS; +} + +Status GraphMemoryAssigner::SetLoopGraphAtomicAttr(const ge::NodePtr &node, int64_t atomic_mem_start) { + // set the address attr of atomic clean operator for loop graph + int64_t atomic_mem_size = memory_offset_[0].mem_offset_ - atomic_mem_start; + GELOGI("SetLoopGraphAtomicAttr beign, atomic_addr_clean start size is %ld, mem_size is %ld, mem_offset is %zu.", + atomic_mem_start, atomic_mem_size, memory_offset_[0].mem_offset_); + const auto &in_control_anchor = node->GetInControlAnchor(); + if (atomic_mem_size != 0 && in_control_anchor != nullptr) { + for (auto &peer_out_control_anchor : in_control_anchor->GetPeerOutControlAnchors()) { + if (peer_out_control_anchor == nullptr) { + continue; + } + auto peer_out_node = peer_out_control_anchor->GetOwnerNode(); + auto peer_out_node_desc = peer_out_node->GetOpDesc(); + if (peer_out_node_desc == nullptr) { + continue; + } + + GELOGI("SetLoopGraphAtomicAttr, node is %s, op type is %s.", peer_out_node_desc->GetName().c_str(), + peer_out_node_desc->GetType().c_str()); + + if (peer_out_node_desc->GetType() == ATOMICADDRCLEAN) { + GE_CHK_STATUS_EXEC(SetAtomicCleanAttr(peer_out_node, atomic_mem_start, atomic_mem_size), + GELOGE(FAILED, "SetAtomicCleanAttr failed."); + return FAILED); + } + } + } + return SUCCESS; +} + +ge::Status GraphMemoryAssigner::SetAtomicCleanAttr(const NodePtr &n, int64_t atomic_mem_start, + int64_t atomic_mem_size) { + for (ge::NodePtr &node : compute_graph_->GetDirectNode()) { + auto node_op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue); + + if (((n != nullptr) && (node->GetName() == n->GetName())) || + ((n == nullptr) && (node_op_desc->GetType() == ATOMICADDRCLEAN))) { + vector workspace_vector = node_op_desc->GetWorkspace(); + vector workspace_byte_vector = node_op_desc->GetWorkspaceBytes(); + workspace_vector.emplace_back(atomic_mem_start); + workspace_byte_vector.emplace_back(atomic_mem_size); + node_op_desc->SetWorkspace(workspace_vector); + node_op_desc->SetWorkspaceBytes(workspace_byte_vector); + + std::vector mem_start_vector; + // If GetListInt fail, mem_start_vector is empty. + (void)ge::AttrUtils::GetListInt(node_op_desc, ATTR_NAME_AUTOMIC_ADD_START, mem_start_vector); + mem_start_vector.emplace_back(atomic_mem_start); + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetListInt(node_op_desc, ATTR_NAME_AUTOMIC_ADD_START, mem_start_vector), + GELOGE(FAILED, "SetListInt failed."); + return FAILED); + + std::vector mem_size_vector; + // If GetListInt fail, mem_size_vector is empty. + (void)ge::AttrUtils::GetListInt(node_op_desc, ATTR_NAME_AUTOMIC_ADD_MEM_SIZE, mem_size_vector); + mem_size_vector.emplace_back(atomic_mem_size); + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetListInt(node_op_desc, ATTR_NAME_AUTOMIC_ADD_MEM_SIZE, mem_size_vector), + GELOGE(FAILED, "SetListInt failed."); + return FAILED); + } + } + return SUCCESS; +} + +void GraphMemoryAssigner::AlignMemOffset(const int64_t &mem_align_size) { + if (mem_align_size <= 0) { + return; + } + memory_offset_[0].mem_offset_ = + (memory_offset_[0].mem_offset_ + mem_align_size - 1) / mem_align_size * mem_align_size; +} +} // namespace ge diff --git a/src/ge/graph/build/memory/graph_mem_assigner.h b/src/ge/graph/build/memory/graph_mem_assigner.h new file mode 100644 index 00000000..eef8718c --- /dev/null +++ b/src/ge/graph/build/memory/graph_mem_assigner.h @@ -0,0 +1,156 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_MEMORY_GRAPH_MEM_ASSIGNER_H_ +#define GE_GRAPH_BUILD_MEMORY_GRAPH_MEM_ASSIGNER_H_ + +#include +#include +#include +#include +#include +#include + +#include "framework/common/ge_inner_error_codes.h" +#include "graph/node.h" +#include "runtime/mem.h" + +namespace ge { +struct MemoryOffset { + MemoryOffset(rtMemType_t mem_type, size_t mem_offset) : mem_type_(mem_type), mem_offset_(mem_offset) {} + + public: + rtMemType_t mem_type_; + size_t mem_offset_; +}; + +using MemoryOffsetList = vector; + +class VariableMemoryAssigner { + public: + explicit VariableMemoryAssigner(ge::ComputeGraphPtr compute_graph) : compute_graph_(std::move(compute_graph)) {} + + VariableMemoryAssigner(const VariableMemoryAssigner &) = delete; + + VariableMemoryAssigner &operator=(const VariableMemoryAssigner &) = delete; + + virtual ~VariableMemoryAssigner() = default; + + /// + /// @ingroup ge_graph + /// @brief assign memory offset + /// @return Status result of function + /// + ge::Status Assign(); + + /// + /// @ingroup ge_graph + /// @brief assign variable attr to nodes + /// @return Status result of function + /// + ge::Status AssignVarAttr2Nodes(); + + private: + ge::ComputeGraphPtr compute_graph_; +}; + +using VariableMemoryAssignerPtr = std::shared_ptr; + +class GraphMemoryAssigner { + public: + explicit GraphMemoryAssigner(ge::ComputeGraphPtr compute_graph) : compute_graph_(std::move(compute_graph)) {} + + GraphMemoryAssigner(const GraphMemoryAssigner &) = delete; + + GraphMemoryAssigner &operator=(const GraphMemoryAssigner &) = delete; + + virtual ~GraphMemoryAssigner() = default; + + /// + /// @ingroup ge_graph + /// @brief assign memory offset + /// @return Status result of function + /// + ge::Status AssignMemory(); + + /// + /// @ingroup ge_graph + /// @brief assign variable attr to nodes, + /// must be called after all memory assigned. + /// @return Status result of function + /// + ge::Status AssignVarAttr2Nodes(); + + ge::Status ReAssignMemory(bool is_loop_graph, size_t &mem_offset); + + ge::Status SetInputOffset(); + + ge::Status UpdateOpInputOffset(const NodePtr &node) const; + + ge::Status CheckOffset(); + + private: + /// + /// @ingroup ge_graph + /// @brief assign memory offset + /// @return Status result of function + /// + ge::Status ReAssignContinuousMemory(bool is_loop_graph); + + ge::Status ReAssignVirtualConcatMemory(); + + ge::Status ReAssignMergeMemory(); + + ge::Status ReAssignAtomicMemory(bool is_loop_graph); + + ge::Status AssignContinuousInputMemory(const ge::NodePtr &node); + + ge::Status AssignContinuousOutputMemory(const ge::NodePtr &node); + + ge::Status AssignReferenceMemory(const ge::NodePtr &node); + + /// + /// @brief check the input of node whether support atomic attr + /// @param node + /// @return true:supported; false:not supported + /// + bool CheckInputIsSupportAtomic(const ge::NodePtr &node); + + ge::Status AssignAtomicOutputMemory(const ge::NodePtr &node); + + ge::Status AssignOrdinaryAtomicWorkspaceMemory(const ge::OpDescPtr &op_desc, + std::map> &workspace_info); + + ge::Status AssignFusionAtomicWorkspaceMemory(const ge::OpDescPtr &op_desc, + std::map> &workspace_info); + + /// + /// @brief set loop graph atomic attr + /// @param node + /// @param atomic_mem_start: atomic op memory start address + /// + ge::Status SetLoopGraphAtomicAttr(const ge::NodePtr &node, int64_t atomic_mem_start); + + ge::Status SetAtomicCleanAttr(const ge::NodePtr &n, int64_t atomic_mem_start, int64_t atomic_mem_size); + + void AlignMemOffset(const int64_t &mem_align_size); + + MemoryOffsetList memory_offset_; + ge::ComputeGraphPtr compute_graph_; +}; +} // namespace ge + +#endif // GE_GRAPH_BUILD_MEMORY_GRAPH_MEM_ASSIGNER_H_ diff --git a/src/ge/graph/build/memory/hybrid_mem_assigner.cc b/src/ge/graph/build/memory/hybrid_mem_assigner.cc new file mode 100644 index 00000000..15fbb312 --- /dev/null +++ b/src/ge/graph/build/memory/hybrid_mem_assigner.cc @@ -0,0 +1,73 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/memory/hybrid_mem_assigner.h" + +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/build/memory/binary_block_mem_assigner.h" +#include "graph/build/memory/max_block_mem_assigner.h" + +namespace ge { +HybridMemAssigner::HybridMemAssigner(ge::ComputeGraphPtr compute_graph) + : mem_offset_(0), compute_graph_(std::move(compute_graph)) {} + +Status HybridMemAssigner::AssignMemory(std::unique_ptr &block_assigner, size_t &mem_size) { + vector ranges; + GE_CHECK_NOTNULL(block_assigner); + if (block_assigner->GetMemoryRanges(ranges) != SUCCESS) { + GELOGE(FAILED, "GetMemoryRanges Fail!"); + return FAILED; + } + GE_IF_BOOL_EXEC(ranges.empty(), return SUCCESS); + + block_assigner->AssignMemoryWithReuse(ranges); + + mem_size = block_assigner->GetMemOffset(); + return SUCCESS; +} + +Status HybridMemAssigner::Assign() { + std::unique_ptr binary_assigner(new (std::nothrow) BinaryBlockMemAssigner(compute_graph_)); + GE_CHECK_NOTNULL(binary_assigner); + + std::unique_ptr max_assigner(new (std::nothrow) MaxBlockMemAssigner(compute_graph_)); + GE_CHECK_NOTNULL(max_assigner); + + size_t bin_mem_size = 0; + size_t max_mem_size = 0; + + GE_CHK_STATUS_RET(AssignMemory(binary_assigner, bin_mem_size), "BinaryBlock Method AssignMemory Fail!"); + GE_CHK_STATUS_RET(AssignMemory(max_assigner, max_mem_size), "MaxBlock Method AssignMemory Fail!"); + + std::unique_ptr priority_assigner; + + GELOGI("Binary-block memory size:%zu, max-block memory size:%zu", bin_mem_size, max_mem_size); + if (bin_mem_size <= max_mem_size) { + GELOGI("Use binary-block memory assigner method"); + priority_assigner = std::move(binary_assigner); + } else { + GELOGI("Use max-block memory assigner method"); + priority_assigner = std::move(max_assigner); + } + + priority_assigner->SetOpMemOffset(); + mem_offset_ = priority_assigner->GetMemOffset(); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/build/memory/hybrid_mem_assigner.h b/src/ge/graph/build/memory/hybrid_mem_assigner.h new file mode 100644 index 00000000..e9aade09 --- /dev/null +++ b/src/ge/graph/build/memory/hybrid_mem_assigner.h @@ -0,0 +1,52 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_MEMORY_HYBRID_MEM_ASSIGNER_H_ +#define GE_GRAPH_BUILD_MEMORY_HYBRID_MEM_ASSIGNER_H_ + +#include + +#include "graph/build/memory/mem_assigner.h" +#include "graph/compute_graph.h" +#include "common/types.h" +#include "common/util.h" + +namespace ge { +class BlockMemAssigner; + +class HybridMemAssigner : public MemAssigner { + public: + explicit HybridMemAssigner(ge::ComputeGraphPtr compute_graph); + + HybridMemAssigner(const HybridMemAssigner &) = delete; + + HybridMemAssigner &operator=(const HybridMemAssigner &) = delete; + + ~HybridMemAssigner() override = default; + + Status Assign() override; + + size_t GetMemOffset() const { return mem_offset_; } + + private: + Status AssignMemory(std::unique_ptr &block_assigner, size_t &mem_size); + + size_t mem_offset_; + + ge::ComputeGraphPtr compute_graph_; +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_MEMORY_HYBRID_MEM_ASSIGNER_H_ diff --git a/src/ge/graph/build/memory/max_block_mem_assigner.cc b/src/ge/graph/build/memory/max_block_mem_assigner.cc new file mode 100644 index 00000000..c093403a --- /dev/null +++ b/src/ge/graph/build/memory/max_block_mem_assigner.cc @@ -0,0 +1,33 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/memory/max_block_mem_assigner.h" + +#include + +namespace ge { +Status MaxBlockMemAssigner::GetMemoryRanges(std::vector &ranges) { + std::vector all_memory_size; + + GetOutAndWorkSpaceMem(all_memory_size); + + auto it = std::max_element(std::begin(all_memory_size), std::end(all_memory_size)); + if (it != std::end(all_memory_size)) { + ranges.emplace_back(*it); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/build/memory/max_block_mem_assigner.h b/src/ge/graph/build/memory/max_block_mem_assigner.h new file mode 100644 index 00000000..22229737 --- /dev/null +++ b/src/ge/graph/build/memory/max_block_mem_assigner.h @@ -0,0 +1,39 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_MEMORY_MAX_BLOCK_MEM_ASSIGNER_H_ +#define GE_GRAPH_BUILD_MEMORY_MAX_BLOCK_MEM_ASSIGNER_H_ + +#include +#include + +#include "graph/build/memory/block_mem_assigner.h" + +namespace ge { +class MaxBlockMemAssigner : public BlockMemAssigner { + public: + explicit MaxBlockMemAssigner(ge::ComputeGraphPtr compute_graph) : BlockMemAssigner(std::move(compute_graph)) {} + + MaxBlockMemAssigner(const MaxBlockMemAssigner &) = delete; + + MaxBlockMemAssigner &operator=(const MaxBlockMemAssigner &) = delete; + + ~MaxBlockMemAssigner() override = default; + + Status GetMemoryRanges(std::vector &ranges) override; +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_MEMORY_MAX_BLOCK_MEM_ASSIGNER_H_ diff --git a/src/ge/graph/build/memory/mem_assigner.h b/src/ge/graph/build/memory/mem_assigner.h new file mode 100644 index 00000000..26cc9f26 --- /dev/null +++ b/src/ge/graph/build/memory/mem_assigner.h @@ -0,0 +1,39 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_MEMORY_MEM_ASSIGNER_H_ +#define GE_GRAPH_BUILD_MEMORY_MEM_ASSIGNER_H_ + +#include "common/ge_inner_error_codes.h" +#include "memory/memory_assigner.h" + +namespace ge { +static const int64_t kInvalidOffset = -1; + +class MemAssigner { + public: + MemAssigner() = default; + + MemAssigner(const MemAssigner &) = delete; + + MemAssigner &operator=(const MemAssigner &) = delete; + + virtual ~MemAssigner() = default; + + virtual Status Assign() = 0; +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_MEMORY_MEM_ASSIGNER_H_ diff --git a/src/ge/graph/build/memory/memory_assigner.cc b/src/ge/graph/build/memory/memory_assigner.cc new file mode 100644 index 00000000..db05b500 --- /dev/null +++ b/src/ge/graph/build/memory/memory_assigner.cc @@ -0,0 +1,61 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "memory/memory_assigner.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/build/memory/graph_mem_assigner.h" + +namespace ge { +Status MemoryAssigner::AssignMemory(bool is_loop_graph, size_t &mem_offset) { + std::unique_ptr graph_mem_assigner(new (std::nothrow) + ge::GraphMemoryAssigner(compute_graph_)); + if (graph_mem_assigner == nullptr) { + GELOGE(ge::FAILED, "Alloc GraphMemoryAssigner failed."); + return ge::FAILED; + } + + if (graph_mem_assigner->AssignMemory() != ge::SUCCESS) { + GELOGE(ge::FAILED, "Memory assigner failed"); + return ge::FAILED; + } + + // Reassign memory for special nodes + if (graph_mem_assigner->ReAssignMemory(is_loop_graph, mem_offset) != ge::SUCCESS) { + GELOGE(ge::FAILED, "Memory assigner failed"); + return ge::FAILED; + } + + // Must do variable attr assign after all the memory assigned + if (graph_mem_assigner->AssignVarAttr2Nodes() != SUCCESS) { + GELOGE(FAILED, "Variable Memory assigner failed"); + return FAILED; + } + + if (graph_mem_assigner->SetInputOffset() != ge::SUCCESS) { + GELOGE(ge::FAILED, "SetInputOffset Fail!"); + return ge::FAILED; + } + + if (graph_mem_assigner->CheckOffset() != SUCCESS) { + GELOGE(FAILED, "CheckOffset Fail!"); + return FAILED; + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/build/memory/var_mem_assign_util.cc b/src/ge/graph/build/memory/var_mem_assign_util.cc new file mode 100644 index 00000000..46e0348d --- /dev/null +++ b/src/ge/graph/build/memory/var_mem_assign_util.cc @@ -0,0 +1,347 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/memory/var_mem_assign_util.h" + +#include + +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/common/transop_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/manager/graph_mem_allocator.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/tensor.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/tensor_utils.h" + +using std::string; +using std::vector; + +namespace ge { +Status VarMemAssignUtil::AssignVarMemory(ge::ComputeGraphPtr &compute_graph) { + GE_CHK_STATUS_RET(AssignMemory2VariableNode(compute_graph)); + GE_CHK_STATUS_RET(AssignMemory2HasRefAttrNode(compute_graph)); + + return SUCCESS; +} + +Status VarMemAssignUtil::AssignConstantOpMemory(ge::ComputeGraphPtr &compute_graph) { + return AssignStaticMemory2Node(compute_graph); +} + +Status VarMemAssignUtil::AssignMemory2VariableNode(ge::ComputeGraphPtr &compute_graph) { + return AssignStaticMemory2Node(compute_graph); +} + +Status VarMemAssignUtil::AssignStaticMemory2Node(ge::ComputeGraphPtr &compute_graph) { + GE_IF_BOOL_EXEC(compute_graph == nullptr, return FAILED); + for (const ge::NodePtr &n : compute_graph->GetDirectNode()) { + GE_IF_BOOL_EXEC((n->GetType() != VARIABLE) && (n->GetType() != CONSTANTOP), continue); + string ref_var_src_var_name; + GE_CHECK_NOTNULL(n->GetOpDesc()); + GE_IF_BOOL_EXEC(ge::AttrUtils::GetStr(n->GetOpDesc(), REF_VAR_SRC_VAR_NAME, ref_var_src_var_name), continue); + GE_IF_BOOL_EXEC(n->GetOpDesc()->GetAllOutputsDesc().empty(), + GELOGE(FAILED, "node:%s has no OutputDesc.", n->GetName().c_str()); + return FAILED); + ge::ConstGeTensorDescPtr tensor_desc = n->GetOpDesc()->GetOutputDescPtr(0); + GE_CHECK_NOTNULL(tensor_desc); + string node_name = n->GetName(); + if (!VarManager::Instance(compute_graph->GetSessionID())->IsVarExist(node_name, *tensor_desc)) { + GE_CHK_STATUS_RET( + VarManager::Instance(compute_graph->GetSessionID())->AssignVarMem(node_name, *tensor_desc, RT_MEMORY_HBM)); + GE_IF_BOOL_EXEC(n->GetType() == VARIABLE, + GE_CHK_STATUS_RET(AssignData2Fp32Var(n, compute_graph->GetSessionID()))); + GE_CHK_STATUS_RET(VarManager::Instance(compute_graph->GetSessionID()) + ->SetAllocatedGraphId(node_name, compute_graph->GetGraphID())); + } + + uint8_t *dev_ptr = nullptr; + rtMemType_t memory_type = RT_MEMORY_HBM; + GE_CHK_STATUS_RET(VarManager::Instance(compute_graph->GetSessionID()) + ->GetVarAddr(node_name, *tensor_desc, &dev_ptr, memory_type)); + vector output_list = n->GetOpDesc()->GetOutputOffset(); + GE_IF_BOOL_EXEC(output_list.empty(), return FAILED); + output_list[0] = static_cast(reinterpret_cast(dev_ptr)); + n->GetOpDesc()->SetOutputOffset(output_list); + } + return SUCCESS; +} + +Status VarMemAssignUtil::AssignData2Fp32Var(const ge::NodePtr &node, uint64_t session_id) { + string src_var_name; + GE_CHECK_NOTNULL(node->GetOpDesc()); + if (ge::AttrUtils::GetStr(node->GetOpDesc(), VAR_ATTR_SRC_VAR_NAME, src_var_name)) { + ge::GeTensorDesc cur_tensor_desc; + uint8_t *dev_ptr = nullptr; + rtMemType_t memory_type = RT_MEMORY_HBM; + GE_CHK_STATUS_RET(VarManager::Instance(session_id)->GetCurVarDesc(src_var_name, cur_tensor_desc)); + GE_CHK_STATUS_RET( + VarManager::Instance(session_id)->GetVarAddr(src_var_name, cur_tensor_desc, &dev_ptr, memory_type)); + GE_CHK_STATUS_RET( + VarManager::Instance(session_id)->SetVarAddr(node->GetName(), cur_tensor_desc, dev_ptr, memory_type)); + } + return SUCCESS; +} + +Status VarMemAssignUtil::AssignVarAttr2Nodes(ge::ComputeGraphPtr &compute_graph) { + for (const ge::NodePtr &node : compute_graph->GetDirectNode()) { + GE_IF_BOOL_EXEC(node->GetType() != VARIABLE, continue); + string ref_var_src_var_name; + GE_CHECK_NOTNULL(node->GetOpDesc()); + GE_IF_BOOL_EXEC(ge::AttrUtils::GetStr(node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, ref_var_src_var_name), continue); + GE_CHK_STATUS_RET(DealVariableNode(compute_graph->GetGraphID(), node, compute_graph->GetSessionID())); + } + return SUCCESS; +} + +Status VarMemAssignUtil::SetOutVariableAttr(const ge::NodePtr &node, const ge::NodePtr &var_node, int index, + uint64_t session_id) { + vector output_list; + uint8_t *dev_ptr = nullptr; + GE_CHECK_NOTNULL(node->GetOpDesc()); + output_list = node->GetOpDesc()->GetOutputOffset(); + if (output_list.empty()) { + GELOGE(PARAM_INVALID, "Output_list is empty"); + return PARAM_INVALID; + } + GE_CHECK_NOTNULL(var_node->GetOpDesc()); + GeTensorDesc var_tensor_desc = var_node->GetOpDesc()->GetOutputDesc(0); + rtMemType_t memory_type = RT_MEMORY_HBM; + GE_CHK_STATUS_RET( + VarManager::Instance(session_id)->GetVarAddr(var_node->GetName(), var_tensor_desc, &dev_ptr, memory_type)); + + int out_list_size = static_cast(output_list.size()); + GE_CHK_BOOL_RET_STATUS(index < out_list_size, FAILED, "index %d >= output_list.size() %d", index, out_list_size); + + output_list[index] = static_cast(reinterpret_cast(dev_ptr)); + GELOGI("Assign node outputOffset[index] is: %ld", output_list[index]); + node->GetOpDesc()->SetOutputOffset(output_list); + + return SUCCESS; +} + +Status VarMemAssignUtil::DealExportVariableNode(const ge::NodePtr &node, const ge::NodePtr &var_node, + uint64_t session_id) { + ge::OutDataAnchorPtr var_out_anchor = node->GetOutDataAnchor(0); + GE_IF_BOOL_EXEC(var_out_anchor == nullptr, return FAILED); + for (const ge::InDataAnchorPtr &dst_in_var_anchor : var_out_anchor->GetPeerInDataAnchors()) { + ge::NodePtr dst_node = dst_in_var_anchor->GetOwnerNode(); + if ((dst_node->GetType() == ASSIGN) || (dst_node->GetType() == ASSIGNADD) || + (dst_node->GetType() == ASSIGNSUB)) { + if (dst_in_var_anchor == dst_node->GetInDataAnchor(0)) { + GE_CHK_STATUS_RET(DealExportVariableNode(dst_node, var_node, session_id)); + } + } + } + GE_CHK_STATUS_RET(SetOutVariableAttr(node, var_node, 0, session_id)); + return SUCCESS; +} + +Status VarMemAssignUtil::DealBroadCastNode(uint32_t graph_id, const ge::NodePtr &node, + const ge::InDataAnchorPtr &in_data_anchor, const ge::NodePtr &var_node, + uint64_t session_id) { + VarBroadCastInfo broad_cast_info; + broad_cast_info.idx = in_data_anchor->GetIdx(); + broad_cast_info.var_name = var_node->GetName(); + broad_cast_info.broadcast_name = node->GetName(); + + auto op_desc = node->GetOpDesc(); + GE_CHK_BOOL_RET_STATUS(op_desc != nullptr, FAILED, "Get broadcast op %s desc is nullptr", node->GetName().c_str()); + + GE_IF_BOOL_EXEC(broad_cast_info.idx < 0, + GELOGI("Broadcast input index must be positive, actual %d", broad_cast_info.idx); + return INTERNAL_ERROR); + + auto broad_cast_index = static_cast(broad_cast_info.idx); + auto input_tensor_desc_ptr_vistor = op_desc->GetAllInputsDescPtr(); + GE_CHK_BOOL_RET_STATUS(input_tensor_desc_ptr_vistor.size() > broad_cast_index, FAILED, + "Get broadcast op %s input tensor desc size [%zu] < idx [%d]", node->GetName().c_str(), + input_tensor_desc_ptr_vistor.size(), broad_cast_info.idx); + const ge::GeTensorDescPtr input_tensor_desc = + input_tensor_desc_ptr_vistor.at(static_cast(broad_cast_info.idx)); + uint32_t input_size = 0; + GE_CHK_STATUS(TensorUtils::GetSize(*input_tensor_desc, input_size), "get input size failed."); + broad_cast_info.input_size = input_size; + + vector output_list = op_desc->GetOutputOffset(); + GE_CHK_BOOL_RET_STATUS(output_list.size() > broad_cast_index, FAILED, + "Get broadcast op %s output_list size [%zu] < idx [%d]", node->GetName().c_str(), + output_list.size(), broad_cast_info.idx); + broad_cast_info.input_offset = output_list[broad_cast_info.idx]; + broad_cast_info.output_offset = output_list[broad_cast_info.idx]; + + op_desc->SetInputOffset(output_list); + + auto output_tensor_desc_ptr_vistor = op_desc->GetAllOutputsDescPtr(); + GE_CHK_BOOL_RET_STATUS(output_tensor_desc_ptr_vistor.size() > broad_cast_index, FAILED, + "Get broadcast op %s output tensor desc size [%zu] < idx [%d]", node->GetName().c_str(), + output_tensor_desc_ptr_vistor.size(), broad_cast_info.idx); + const ge::GeTensorDescPtr output_tensor_desc = + output_tensor_desc_ptr_vistor.at(static_cast(broad_cast_info.idx)); + uint32_t output_size = 0; + GE_CHK_STATUS(TensorUtils::GetSize(*output_tensor_desc, output_size), "get input size failed."); + broad_cast_info.output_size = output_size; + GE_CHK_BOOL_RET_STATUS(broad_cast_info.output_size == broad_cast_info.input_size, FAILED, + "Broadcast op input size[%lu] is not equal output size[%lu]", broad_cast_info.input_size, + broad_cast_info.output_size); + + GE_CHK_STATUS_RET(VarManager::Instance(session_id)->SaveBroadCastInfo(graph_id, broad_cast_info)); + return SUCCESS; +} + +Status VarMemAssignUtil::DealVariableNode(uint32_t graph_id, const ge::NodePtr &node, uint64_t session_id) { + GE_CHK_STATUS_RET(SetOutVariableAttr(node, node, 0, session_id)); + + for (const ge::OutDataAnchorPtr &var_out_data_anchor : node->GetAllOutDataAnchors()) { + for (const ge::InDataAnchorPtr &dst_in_data_anchor : var_out_data_anchor->GetPeerInDataAnchors()) { + ge::NodePtr dst_node = dst_in_data_anchor->GetOwnerNode(); + if (dst_node->GetType() == HCOMBROADCAST) { + GE_CHK_STATUS_RET(DealBroadCastNode(graph_id, dst_node, dst_in_data_anchor, node, session_id)); + continue; + } + + if ((dst_node->GetType() == ASSIGN) || (dst_node->GetType() == ASSIGNADD) || + (dst_node->GetType() == ASSIGNSUB)) { + if (dst_in_data_anchor == dst_node->GetInDataAnchor(0)) { + GE_CHK_STATUS_RET(DealExportVariableNode(dst_node, node, session_id)); + } + } + auto dst_type = dst_node->GetType(); + bool is_trans_node = (dst_type == TRANSDATA) || (dst_type == CAST) || (dst_type == TRANSPOSE) || + (dst_type == PERMUTE); + if (is_trans_node) { + NodePtr final_trans_node = GetFinalTransNode(dst_node); + GE_CHK_STATUS_RET(DealTransNode(final_trans_node)); + } + } + } + return SUCCESS; +} + +ge::NodePtr VarMemAssignUtil::GetFinalTransNode(const ge::NodePtr &trans_node) { + NodePtr final_ref_node = trans_node; + OutDataAnchorPtr trans_out_data_anchor = trans_node->GetOutDataAnchor(0); + GE_IF_BOOL_EXEC(trans_out_data_anchor == nullptr, return final_ref_node); + for (const auto &dst_in_anchor : trans_out_data_anchor->GetPeerInDataAnchors()) { + NodePtr dst_node = dst_in_anchor->GetOwnerNode(); + auto dst_type = dst_node->GetType(); + bool is_trans_node = (dst_type == TRANSDATA) || (dst_type == CAST) || (dst_type == TRANSPOSE) || + (dst_type == PERMUTE); + if (is_trans_node && (dst_in_anchor->GetIdx() == 0)) { + final_ref_node = GetFinalTransNode(dst_node); + } + } + GELOGI("Final writable node is %s", final_ref_node->GetName().c_str()); + return final_ref_node; +} + +Status VarMemAssignUtil::DealTransNode(const ge::NodePtr &final_trans_node) { + ge::OutDataAnchorPtr final_trans_out_anchor = final_trans_node->GetOutDataAnchor(0); + GE_IF_BOOL_EXEC(final_trans_out_anchor == nullptr, return SUCCESS); + for (const ge::InDataAnchorPtr &dst_in_var_anchor : final_trans_out_anchor->GetPeerInDataAnchors()) { + ge::NodePtr dst_node = dst_in_var_anchor->GetOwnerNode(); + if ((dst_node->GetType() == ASSIGN) || (dst_node->GetType() == ASSIGNADD) || + (dst_node->GetType() == ASSIGNSUB)) { + GE_CHK_STATUS_RET(DealExportTransNode(dst_node, final_trans_node)); + } + } + return SUCCESS; +} + +Status VarMemAssignUtil::DealExportTransNode(const ge::NodePtr &node, const ge::NodePtr &final_trans_node) { + ge::OutDataAnchorPtr node_out_anchor = node->GetOutDataAnchor(0); + GE_CHECK_NOTNULL(node_out_anchor); + for (const ge::InDataAnchorPtr &dst_in_var_anchor : node_out_anchor->GetPeerInDataAnchors()) { + ge::NodePtr dst_node = dst_in_var_anchor->GetOwnerNode(); + if ((dst_node->GetType() == ASSIGN) || (dst_node->GetType() == ASSIGNADD) || + (dst_node->GetType() == ASSIGNSUB)) { + GE_CHK_STATUS_RET(DealExportTransNode(dst_node, final_trans_node)); + } + } + GE_CHK_STATUS_RET(SetOutTransNodeToAssign(node, final_trans_node, 0)); + return SUCCESS; +} + +Status VarMemAssignUtil::SetOutTransNodeToAssign(const ge::NodePtr &node, const ge::NodePtr &final_trans_node, + size_t index) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + GE_CHECK_NOTNULL(final_trans_node->GetOpDesc()); + // get final_trans_node outputOffset + vector final_trans_output_list = final_trans_node->GetOpDesc()->GetOutputOffset(); + GE_CHECK_SIZE(final_trans_output_list.size()); + + // get assign_node outputOffset + vector output_list = node->GetOpDesc()->GetOutputOffset(); + auto out_list_size = output_list.size(); + GE_CHECK_SIZE(out_list_size); + GE_CHK_BOOL_RET_STATUS(index < out_list_size, FAILED, "index %zu >= output_list.size() %zu", index, out_list_size); + + // final_trans_node outputOffset[0] to assign_node outputOffset[0] + GELOGI("final_trans_node outputOffset[0] is: %ld", final_trans_output_list[0]); + + output_list[index] = final_trans_output_list[0]; + GELOGI("Assign node outputOffset[0] is: %ld", output_list[index]); + node->GetOpDesc()->SetOutputOffset(output_list); + + return SUCCESS; +} + +Status VarMemAssignUtil::AssignMemory2HasRefAttrNode(ge::ComputeGraphPtr &compute_graph) { + for (const ge::NodePtr &n : compute_graph->GetDirectNode()) { + string ref_var_src_var_name; + GE_CHECK_NOTNULL(n->GetOpDesc()); + bool is_ref = ge::AttrUtils::GetStr(n->GetOpDesc(), REF_VAR_SRC_VAR_NAME, ref_var_src_var_name); + GE_IF_BOOL_EXEC(is_ref, + GE_CHK_STATUS_RET(AssignData2VarRef(n, ref_var_src_var_name, compute_graph->GetSessionID()))); + } + return SUCCESS; +} + +Status VarMemAssignUtil::AssignData2VarRef(const ge::NodePtr &has_ref_attr_node, const string &src_var_name, + uint64_t session_id) { + if (!TransOpUtil::IsTransOp(has_ref_attr_node)) { + return SUCCESS; + } + // Get ref_var_src_var address + ge::NodePtr var_ref_src_var = has_ref_attr_node->GetOwnerComputeGraph()->FindNode(src_var_name); + GE_IF_BOOL_EXEC(var_ref_src_var == nullptr || var_ref_src_var->GetOpDesc() == nullptr, return FAILED); + GeTensorDesc src_tensor_desc = var_ref_src_var->GetOpDesc()->GetOutputDesc(0); + uint8_t *dev_ptr = nullptr; + GE_CHK_STATUS_RET(VarManager::Instance(session_id)->GetVarAddr(src_var_name, src_tensor_desc, &dev_ptr)); + GE_CHECK_NOTNULL(has_ref_attr_node->GetOpDesc()); + vector ref_attr_node_output_list = has_ref_attr_node->GetOpDesc()->GetOutputOffset(); + GE_CHECK_SIZE(ref_attr_node_output_list.size()); + + int out_index = 0; + bool is_get = ge::AttrUtils::GetInt(var_ref_src_var->GetOpDesc(), REF_VAR_PRE_PEER_OUT_INDEX, out_index); + if (!is_get) { + GELOGI("%s failed to get attr [REF_VAR_PRE_PEER_OUT_INDEX]", var_ref_src_var->GetName().c_str()); + } + + GE_CHK_BOOL_RET_STATUS(static_cast(out_index) < ref_attr_node_output_list.size(), FAILED, + "out_index %d >= ref_attr_node_output_list.size() %zu", out_index, + ref_attr_node_output_list.size()); + + ref_attr_node_output_list[out_index] = static_cast(reinterpret_cast(dev_ptr)); + has_ref_attr_node->GetOpDesc()->SetOutputOffset(ref_attr_node_output_list); + GELOGI("Refresh address successfully, ref node: [%s], addr: [%ld]", has_ref_attr_node->GetName().c_str(), + ref_attr_node_output_list[out_index]); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/build/memory/var_mem_assign_util.h b/src/ge/graph/build/memory/var_mem_assign_util.h new file mode 100644 index 00000000..d6ebe210 --- /dev/null +++ b/src/ge/graph/build/memory/var_mem_assign_util.h @@ -0,0 +1,57 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_MEMORY_VAR_MEM_ASSIGN_UTIL_H_ +#define GE_GRAPH_BUILD_MEMORY_VAR_MEM_ASSIGN_UTIL_H_ + +#include + +#include "common/debug/log.h" +#include "common/ge_inner_error_codes.h" +#include "graph/utils/node_utils.h" + +namespace ge { +class VarMemAssignUtil { + public: + static Status AssignVarMemory(ge::ComputeGraphPtr &compute_graph); + static Status AssignConstantOpMemory(ge::ComputeGraphPtr &compute_graph); + static Status AssignStaticMemory2Node(ge::ComputeGraphPtr &compute_graph); + static Status AssignVarAttr2Nodes(ge::ComputeGraphPtr &compute_graph); + static Status AssignMemory2HasRefAttrNode(ge::ComputeGraphPtr &compute_graph); + + private: + static Status AssignMemory2VariableNode(ge::ComputeGraphPtr &compute_graph); + + static Status SetOutVariableAttr(const ge::NodePtr &node, const ge::NodePtr &var_node, int index, + uint64_t session_id); + static Status DealExportVariableNode(const ge::NodePtr &node, const ge::NodePtr &var_node, uint64_t session_id); + static Status DealVariableNode(uint32_t graph_id, const ge::NodePtr &node, uint64_t session_id); + + static Status DealBroadCastNode(uint32_t graph_id, const ge::NodePtr &node, const ge::InDataAnchorPtr &in_data_anchor, + const ge::NodePtr &var_node, uint64_t session_id); + static Status AssignData2Fp32Var(const ge::NodePtr &node, uint64_t session_id); + + static ge::NodePtr GetFinalTransNode(const ge::NodePtr &ref_node); + + static Status DealTransNode(const ge::NodePtr &final_trans_node); + static Status DealExportTransNode(const ge::NodePtr &node, const ge::NodePtr &final_trans_node); + static Status AssignData2VarRef(const ge::NodePtr &variable_ref, const std::string &src_var_name, + uint64_t session_id); + + static Status SetOutTransNodeToAssign(const ge::NodePtr &node, const ge::NodePtr &final_trans_node, size_t index); +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_MEMORY_VAR_MEM_ASSIGN_UTIL_H_ diff --git a/src/ge/graph/build/model_builder.cc b/src/ge/graph/build/model_builder.cc new file mode 100644 index 00000000..72563dba --- /dev/null +++ b/src/ge/graph/build/model_builder.cc @@ -0,0 +1,609 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/model_builder.h" + +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/anchor.h" +#include "graph/attr_value.h" +#include "graph/buffer.h" +#include "graph/build/stream_allocator.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/ge_attr_value.h" +#include "graph/ge_error_codes.h" +#include "graph/manager/graph_mem_allocator.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/optimize/common/params.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" +#include "init/gelib.h" +#include "memory/memory_assigner.h" +#include "omg/version.h" +#include "register/op_registry.h" + +using std::string; +using std::vector; +using std::map; +using std::set; +using domi::DOMI_TENSOR_NC1HWC0; +using domi::AippOpParams; +using domi::ModelTaskDef; +using ge::FAILED; +using ge::SUCCESS; +using ge::PARAM_INVALID; + +namespace { +const uint32_t kWeightsStartOffset = 512; +const int32_t kWrongIndex = -2; +const int kInvalidIndexNum = -1; + +bool IsGeLocalOp(const ge::ConstOpDescPtr &op_desc) { + auto type = op_desc->GetType(); + if (type == ge::CONSTANTOP) { + // constant op just has one output + ge::GeTensorDesc output_desc = op_desc->GetOutputDesc(0); + return !(output_desc.GetDataType() == ge::DT_STRING); + } + const set ge_local_set = {ge::STREAMMERGE, ge::MEMCPYASYNC, ge::STREAMACTIVE, ge::STREAMSWITCH, + ge::VARIABLE, ge::NOOP, ge::CONSTANT, ge::ENTER, + ge::REFENTER, ge::LOOPCOND, ge::NEXTITERATION, ge::REFNEXTITERATION, + ge::EXIT, ge::REFEXIT}; + return (ge_local_set.find(type) != ge_local_set.end()); +} +} // namespace + +namespace ge { +ModelBuilder::ModelBuilder(ge::ComputeGraphPtr compute_graph, const vector &subgraphs, + const map &stream_max_parallel_num, bool hcom_parallel, int mode) + : mem_offset_(0), + weight_offset_(kWeightsStartOffset), + compute_graph_(std::move(compute_graph)), + subgraphs_(subgraphs), + stream_num_(0), + event_num_(0), + stream_max_parallel_num_(stream_max_parallel_num), + hcom_parallel_(hcom_parallel), + build_mode_(mode), + max_mem_offset_(0), + platform_type_(0), + is_loop_graph_(false) {} + +ModelBuilder::~ModelBuilder() {} + +Status ModelBuilder::CalcOutputSize(const ge::NodePtr &n) { + GE_CHECK_NOTNULL(n); + auto node_op_desc = n->GetOpDesc(); + GE_CHECK_NOTNULL(node_op_desc); + uint32_t index = 0; + for (const auto &output_desc_ptr : node_op_desc->GetAllOutputsDescPtr()) { + GeTensorDesc &desc_temp = *output_desc_ptr; + + uint32_t dim_num = static_cast(desc_temp.GetShape().GetDimNum()); + GE_IF_BOOL_EXEC(dim_num > DIM_DEFAULT_SIZE, TensorUtils::SetRealDimCnt(desc_temp, dim_num)); + // calculate tensor size + uint32_t size_temp = 0; + graphStatus graph_status = TensorUtils::GetTensorMemorySizeInBytes(desc_temp, size_temp); + if (graph_status != GRAPH_SUCCESS) { + GELOGE(graph_status, "GetTensorMemorySizeInBytes failed!"); + return FAILED; + } + TensorUtils::SetSize(desc_temp, size_temp); + if (node_op_desc->UpdateOutputDesc(index, desc_temp) != SUCCESS) { + GELOGE(FAILED, "UpdateOutputDesc failed."); + return FAILED; + } + + GELOGD("update output desc, dim_size: %u, mem_size: %u, format: %s, type: %s, node name:%s", dim_num, size_temp, + TypeUtils::FormatToSerialString(desc_temp.GetFormat()).c_str(), + TypeUtils::DataTypeToSerialString(desc_temp.GetDataType()).c_str(), node_op_desc->GetName().c_str()); + index++; + } + + return SUCCESS; +} + +void ModelBuilder::SetInputIsConst(const ge::NodePtr &n) { + auto node_op_desc = n->GetOpDesc(); + if (node_op_desc == nullptr) { + GELOGW("node_op_desc is nullptr!"); + return; + } + auto is_input_const = node_op_desc->GetIsInputConst(); + + // must set all true input_const to false + for (size_t i = 0; i < is_input_const.size(); i++) { + is_input_const[i] = false; + } + auto in_data_anchors = n->GetAllInDataAnchors(); + for (size_t index = 0; index < in_data_anchors.size(); index++) { + auto in_data_anchor = in_data_anchors.at(index); + const auto &peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, continue); + const auto &src_node = peer_out_anchor->GetOwnerNode(); + if (src_node->GetType() == CONSTANT) { + GELOGI("SetIsInputConst const"); + for (size_t i = is_input_const.size(); i <= index; ++i) { + is_input_const.push_back(false); + } + is_input_const[index] = true; + + vector weights = OpDescUtils::MutableWeights(src_node); + if (weights.empty()) { + GELOGW("SetInputIsConst weights is empty"); + return; + } + GeTensorPtr weight = weights[0]; + GE_IF_BOOL_EXEC(weight == nullptr, continue); + GeTensorDesc &tensor_desc = weight->MutableTensorDesc(); + int64_t data_offset = 0; + if (TensorUtils::GetDataOffset(tensor_desc, data_offset) != GRAPH_SUCCESS) { + GELOGW("Get Offset from weight failed"); + return; + } + auto input_tensor = node_op_desc->MutableInputDesc(static_cast(index)); + if (input_tensor == nullptr) { + GELOGW("Get input_tensor failed"); + return; + } + TensorUtils::SetDataOffset(*input_tensor, data_offset); + } else if (src_node->GetType() == CONSTANTOP) { + if ((index < is_input_const.size()) && is_input_const[index]) { + is_input_const[index] = false; + } + } + } + + std::string input_const_info = ToString(is_input_const); + GELOGD("update opdesc:%s InputConst:%s", node_op_desc->GetName().c_str(), input_const_info.c_str()); + node_op_desc->SetIsInputConst(is_input_const); +} + +Status ModelBuilder::AdjustConstWeightSize(const ge::NodePtr &node, size_t &mem_offset) { + GE_CHECK_NOTNULL(node); + if (node->GetType() == CONSTANT) { + vector weights = OpDescUtils::MutableWeights(node); + if (weights.empty()) { + GELOGE(FAILED, "weights size of node %s is empty", node->GetName().c_str()); + return FAILED; + } + GeTensorPtr weight = weights[0]; + if (weight == nullptr) { + GELOGE(FAILED, "weights[0] is null."); + return FAILED; + } + GeTensorDesc &tensor_desc = weight->MutableTensorDesc(); + size_t output_size = weight->GetData().size(); + TensorUtils::SetDataOffset(tensor_desc, mem_offset); + mem_offset += output_size; + } + return SUCCESS; +} + +Status ModelBuilder::SetInputOutputDesc() { + Status ret; + GELOGI("Start to SetInputOutputDesc."); + + for (const ge::NodePtr &n : compute_graph_->GetDirectNode()) { + auto node_op_desc = n->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue); + + if (!is_loop_graph_ && node_op_desc->GetType() == LOOPCOND) { + is_loop_graph_ = true; + } + + if (node_op_desc->GetType() == DATA_TYPE || node_op_desc->GetType() == AIPP_DATA_TYPE) { + GELOGD("Data node: %s.", n->GetName().c_str()); + continue; + } + + GE_IF_BOOL_EXEC(n->GetInAllNodes().empty() && n->GetOutAllNodes().empty(), continue;); + + SetInputIsConst(n); + if (IsGeLocalOp(n->GetOpDesc())) { + GE_CHK_STATUS_RET(CalcOutputSize(n), "Calculate output size failed"); + } + ret = AdjustConstWeightSize(n, weight_offset_); + GE_CHK_STATUS_RET(ret, "AdjustConstWeightSize failed"); + + GE_IF_BOOL_EXEC(((weight_offset_ > 0) && (weight_offset_ % kMemAlignSize != 0)), + weight_offset_ = (weight_offset_ + kMemAlignSize - 1) / kMemAlignSize * kMemAlignSize); + } + GE_CHK_STATUS_RET(compute_graph_->TopologicalSorting(), "TopologicalSorting failed"); + return SUCCESS; +} + +Status ModelBuilder::AssignMemory() { + std::unique_ptr mem_assigner(new (std::nothrow) ge::MemoryAssigner(compute_graph_)); + if (mem_assigner == nullptr) { + GELOGE(FAILED, "new memory allocator failed."); + return FAILED; + } + if (mem_assigner->AssignMemory(is_loop_graph_, mem_offset_) != SUCCESS) { + GELOGE(FAILED, "memory allocator failed."); + return FAILED; + } + return SUCCESS; +} + +void ModelBuilder::AddNodeInputProperty() { + for (const ge::NodePtr &node : compute_graph_->GetDirectNode()) { + auto node_op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, GELOGW("node_op_desc is nullptr!"); return); + vector src_name_list; + vector src_index_list; + for (const auto &in_data_anchor : node->GetAllInDataAnchors()) { + auto peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, GELOGW("peer_out_anchor is nullptr!"); continue); + GE_IF_BOOL_EXEC(node_op_desc->HasAttr(MERGE_PRENODE_FLAG), continue); + + ge::NodePtr src_node = peer_out_anchor->GetOwnerNode(); + src_name_list.emplace_back(src_node->GetName()); + src_index_list.emplace_back(peer_out_anchor->GetIdx()); + } + auto in_control_anchor = node->GetInControlAnchor(); + if (in_control_anchor != nullptr) { + string src_name_temp; + for (const auto &out_control_anchor : in_control_anchor->GetPeerOutControlAnchors()) { + ge::NodePtr src_node = out_control_anchor->GetOwnerNode(); + src_name_temp = src_name_temp.empty() ? src_node->GetName() : src_name_temp + ":" + src_node->GetName(); + } + GE_IF_BOOL_EXEC(!src_name_temp.empty(), src_name_list.emplace_back(src_name_temp);) + } + node_op_desc->SetSrcName(src_name_list); + node_op_desc->SetSrcIndex(src_index_list); + } + + for (const ge::NodePtr &node : compute_graph_->GetDirectNode()) { + auto node_op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, GELOGW("node_op_desc is nullptr!"); return); + GE_IF_BOOL_EXEC(node_op_desc->GetType() == NETOUTPUT, continue); + auto out_control_anchor = node->GetOutControlAnchor(); + GE_IF_BOOL_EXEC(out_control_anchor == nullptr, GELOGW("out_control_anchor is nullptr"); return); + vector dst_name_list; + vector dst_index_list; + string dst_name_temp; + for (const auto &in_control_anchor : out_control_anchor->GetPeerInControlAnchors()) { + ge::NodePtr dst_node = in_control_anchor->GetOwnerNode(); + dst_name_temp = dst_name_temp.empty() ? dst_node->GetName() : dst_name_temp + ":" + dst_node->GetName(); + } + GE_IF_BOOL_EXEC(!dst_name_temp.empty(), dst_name_list.emplace_back(dst_name_temp)); + + GE_IF_BOOL_EXEC(!out_control_anchor->GetPeerInControlAnchors().empty(), + dst_index_list.emplace_back(kInvalidIndexNum)); + + for (const auto &out_data_anchor : node->GetAllOutDataAnchors()) { + GE_IF_BOOL_EXEC(node_op_desc->HasAttr(MERGE_PRENODE_FLAG), break); + dst_name_temp = ""; + int64_t dst_index = kWrongIndex; // assign an impossible value to dst_index. + for (const auto &in_data_anchor : out_data_anchor->GetPeerInDataAnchors()) { + GE_IF_BOOL_EXEC(in_data_anchor == nullptr, GELOGW("in_data_anchor is nullptr"); return); + ge::NodePtr dst_node = in_data_anchor->GetOwnerNode(); + dst_name_temp = dst_name_temp.empty() ? dst_node->GetName() : dst_name_temp + ":" + dst_node->GetName(); + dst_index = in_data_anchor->GetIdx(); + } + GE_IF_BOOL_EXEC(dst_index != kWrongIndex, dst_index_list.emplace_back(dst_index)); // not found + GE_IF_BOOL_EXEC(!dst_name_temp.empty(), dst_name_list.emplace_back(dst_name_temp)); + } + node_op_desc->SetDstName(dst_name_list); + node_op_desc->SetDstIndex(dst_index_list); + } +} + +Status ModelBuilder::AdjustInputTensorFlag() { + GELOGI("Start to AdjustInputTensorFlag."); + for (const ge::NodePtr &n : compute_graph_->GetDirectNode()) { + if ((n->GetType() == DATA_TYPE) || (n->GetType() == AIPP_DATA_TYPE)) { + GELOGD("Data node: %s.", n->GetName().c_str()); + for (const auto &anchor : n->GetAllOutDataAnchors()) { + for (const auto &in_anchors : anchor->GetPeerInDataAnchors()) { + GE_IF_BOOL_EXEC(in_anchors == nullptr, continue); + auto owner_node = in_anchors->GetOwnerNode(); + auto owner_node_op_desc = owner_node->GetOpDesc(); + GE_IF_BOOL_EXEC(owner_node_op_desc == nullptr, continue); + auto input_desc = owner_node_op_desc->GetInputDesc(in_anchors->GetIdx()); + ge::TensorUtils::SetInputTensor(input_desc, true); + if (owner_node_op_desc->UpdateInputDesc(in_anchors->GetIdx(), input_desc) != SUCCESS) { + GELOGE(FAILED, "UpdateOutputDesc failed."); + return FAILED; + } + } + } + } + } + return SUCCESS; +} + +Status ModelBuilder::BuildModelDef(ge::Model &model) { + ClearOriginalFormat(); + + max_mem_offset_ = mem_offset_; + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetInt(&model, ATTR_MODEL_MEMORY_SIZE, max_mem_offset_), + GELOGE(FAILED, "SetInt of ATTR_MODEL_MEMORY_SIZE failed."); + return FAILED); + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetInt(&model, ATTR_MODEL_STREAM_NUM, stream_num_), + GELOGE(FAILED, "SetInt of ATTR_MODEL_STREAM_NUM failed."); + return FAILED); + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetInt(&model, ATTR_MODEL_WEIGHT_SIZE, weight_offset_), + GELOGE(FAILED, "SetInt of ATTR_MODEL_WEIGHT_SIZE failed."); + return FAILED); + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetInt(&model, ATTR_MODEL_EVENT_NUM, event_num_), + GELOGE(FAILED, "SetInt of ATTR_MODEL_EVENT_NUM failed."); + return FAILED); + model.SetName(compute_graph_->GetName()); + model.SetGraph(ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph_)); + + GELOGI("weight_offset_: %zu", weight_offset_); + + GELOGI("Set event num: %ld.", event_num_); + + if (Params::Instance() == nullptr) { + return FAILED; + } + + platform_type_ = Params::Instance()->GetTarget_8bit(); + return SUCCESS; +} + +void ModelBuilder::ClearOriginalFormat() { + for (const ge::NodePtr &n : compute_graph_->GetDirectNode()) { + auto node_op_desc = n->GetOpDesc(); + if (node_op_desc != nullptr) { + if (node_op_desc->HasAttr(ATTR_NAME_FORMAT)) { + if (node_op_desc->DelAttr(ATTR_NAME_FORMAT) != SUCCESS) { + GELOGW("DelAttr ATTR_NAME_FORMAT failed."); + } + } + + GE_IF_BOOL_EXEC(node_op_desc->HasAttr(ATTR_NAME_INFERRED_FORMAT), + if (node_op_desc->DelAttr(ATTR_NAME_INFERRED_FORMAT) != SUCCESS) { + GELOGW("DelAttr ATTR_NAME_INFERRED_FORMAT failed."); + }); + + GE_IF_BOOL_EXEC(node_op_desc->HasAttr(ATTR_NAME_PRED_PERMUTE_DELETED), + if (node_op_desc->DelAttr(ATTR_NAME_PRED_PERMUTE_DELETED) != SUCCESS) { + GELOGW("DelAttr ATTR_NAME_PRED_PERMUTE_DELETED failed."); + }); + + GE_IF_BOOL_EXEC(node_op_desc->HasAttr(ATTR_NAME_IGNORE_PRED_FORMAT), + if (node_op_desc->DelAttr(ATTR_NAME_IGNORE_PRED_FORMAT) != SUCCESS) { + GELOGW("DelAttr ATTR_NAME_IGNORE_PRED_FORMAT failed."); + }); + } + } +} + +Status ModelBuilder::MergeWeights() { + if (weight_offset_ == 0) { + return SUCCESS; + } + + ge::Buffer buffer(weight_offset_); + weight_buffer_ = buffer; + auto base_addr = weight_buffer_.GetData(); + + for (const ge::NodePtr &node : compute_graph_->GetAllNodes()) { + auto op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, continue); + if (node->GetType() != CONSTANT) { + continue; + } + + // Get const op weight pointer + ge::GeTensorPtr weight = nullptr; + // If MutableTensor failed, weight is nullptr. + (void)ge::AttrUtils::MutableTensor(op_desc, ATTR_NAME_WEIGHTS, weight); + if (weight == nullptr) { + GELOGE(FAILED, "Can't get const op weight, name: %s", node->GetName().c_str()); + return FAILED; + } + + // Get const op weight offset + int64_t offset = 0; + if (ge::TensorUtils::GetDataOffset(weight->GetTensorDesc(), offset) != SUCCESS) { + GELOGW("Can't get const op offset, name: %s", node->GetName().c_str()); + continue; // continue to merge if can not get offset + } + + // Get const op weight data + auto weight_data = weight->MutableData(); + + // copy const op weight data to buffer + GELOGI("Move weight data to buffer, name: %s offset: %ld", node->GetName().c_str(), offset); + ge::TensorUtils::SetWeightSize(weight->MutableTensorDesc(), static_cast(weight_data.size())); + if ((offset == 0) || (weight_data.size() == 0)) { + GELOGI("Size or offset is 0. size: %lu offset: %ld", weight_data.size(), offset); + continue; + } + if (weight_data.data() != nullptr) { + GE_IF_BOOL_EXEC(base_addr == nullptr, GELOGE(FAILED, "Base addr is nullptr."); return FAILED); + GE_CHK_BOOL_EXEC( + memcpy_s(base_addr + offset, weight_offset_ - offset, weight_data.data(), weight_data.size()) == EOK, + return FAILED, "call memcpy_s failed."); + } + + weight_data.clear(); + } + + return SUCCESS; +} + +Status ModelBuilder::SaveDataToModel(ge::Model &model, ge::GeModel &ge_model) { + // Add weight + ge_model.SetWeight(weight_buffer_); + + // Add TBE Kernels + for (const ge::NodePtr &n : compute_graph_->GetDirectNode()) { + auto node_op_desc = n->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue); + TBEKernelPtr tbe_kernel = node_op_desc->TryGetExtAttr(ge::OP_EXTATTR_NAME_TBE_KERNEL, TBEKernelPtr()); + GE_IF_BOOL_EXEC(tbe_kernel == nullptr, continue); + tbe_kernel_store_.AddTBEKernel(tbe_kernel); + GELOGD("Add tbe kernel bin %s", tbe_kernel->GetName().c_str()); + } + if (!tbe_kernel_store_.Build()) { + GELOGE(FAILED, "TBE Kernels store build failed!"); + return FAILED; + } + ge_model.SetTBEKernelStore(tbe_kernel_store_); + + // Add task + GeAttrValue::BYTES task_def_bytes; + if (!AttrUtils::GetZeroCopyBytes(model, MODEL_ATTR_TASKS, task_def_bytes)) { + GELOGE(INTERNAL_ERROR, "Get zero copy bytes fail."); + return INTERNAL_ERROR; + } + int byte_size = static_cast(task_def_bytes.GetSize()); + std::shared_ptr task = ge::MakeShared(); + GE_CHECK_NOTNULL(task); + GE_CHK_BOOL_EXEC(ReadProtoFromArray(task_def_bytes.GetData(), byte_size, task.get()), return INTERNAL_ERROR, + "ReadProtoFromArray failed."); + ge_model.SetModelTaskDef(task); + + // Add graph + ge_model.SetName(model.GetName()); + ge_model.SetGraph(model.GetGraph()); + ge_model.SetVersion(model.GetVersion()); + ge_model.SetPlatformVersion(model.GetPlatformVersion()); + ge_model.SetPlatformType(platform_type_); + ge_model.SetAttr(model.MutableAttrMap()); + return SUCCESS; +} + +void ModelBuilder::SetModelVersion(ge::Model &model) { + // set framework_version TO model + string framework_version; + uint32_t counter = 0; + Status frame_rt = PlatformVersionManager::GetPlatformVersion(framework_version); + GE_IF_BOOL_EXEC((frame_rt == SUCCESS), + string model_framework_version = framework_version + "." + std::to_string(counter); + model.SetPlatformVersion(model_framework_version);); + + // set IR Version TO model + model.SetVersion(static_cast(OM_PROTO_VERSION)); +} + +Status ModelBuilder::PreBuildModel() { + if ((compute_graph_ == nullptr) || !(compute_graph_->IsValid())) { + GELOGE(FAILED, "Graph_ is not valid."); + return FAILED; + } + GELOGI("BuildModel begin."); + + GE_CHK_STATUS_RET(SetInputOutputDesc(), "SetInputOutputDesc Failed!"); + + AddNodeInputProperty(); + + return SUCCESS; +} + +Status ModelBuilder::BuildModelForGetTask(ge::Model &model) { + GE_CHK_STATUS_RET(AdjustInputTensorFlag(), "AdjustInputTensorFlag failed!"); + + // Assign logical streams. + StreamAllocator stream_allocator(compute_graph_, subgraphs_); + GE_CHK_STATUS_RET(stream_allocator.AssignLogicalStreams(stream_max_parallel_num_, hcom_parallel_), + "Assign logical streams failed."); + + GE_CHK_STATUS_RET(AssignMemory(), "Assign Memory Failed!"); + + // Compile single op in graph build stage + GE_CHK_STATUS_RET(CompileSingleOp(), "ATC builder CompileSingleOp() return fail."); + + // Refresh real streams and insert event nodes. + GE_CHK_STATUS_RET(stream_allocator.RefreshRealStream(stream_num_, event_num_), "RefreshRealStream failed."); + + GE_CHK_STATUS_RET(MergeWeights(), "MergeWeights Failed!"); + GE_CHK_STATUS_RET(BuildModelDef(model), "BuildModelDef failed!"); + + SetModelVersion(model); + + return SUCCESS; +} + +ge::Buffer ModelBuilder::GetWeightBuffer() const { return weight_buffer_; } + +Status ModelBuilder::CompileSingleOp() { + GELOGD("Begin to compile single op."); + // Create ge instance + std::shared_ptr instance = ge::GELib::GetInstance(); + if ((instance == nullptr) || !instance->InitFlag()) { + GELOGE(ge::GE_CLI_GE_NOT_INITIALIZED, "CompileSingleOp failed."); + return ge::GE_CLI_GE_NOT_INITIALIZED; + } + + GE_TIMESTAMP_CALLNUM_START(CheckAccuracySupported); + GE_TIMESTAMP_CALLNUM_START(BatchCompileOp); + std::unordered_map> node_vector_map; + for (auto &node : compute_graph_->GetAllNodes()) { + auto op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + continue; + } + + // Graph build stage only supports the individual compilation of atomic clean operator + if (op_desc->GetType() == ATOMICADDRCLEAN) { + GELOGD("Begin to compile single op, op name is %s.", op_desc->GetName().c_str()); + string kernel_lib_name = op_desc->GetOpKernelLibName(); + if (kernel_lib_name.empty()) { + // Reset op kernel lib + (void)instance->DNNEngineManagerObj().GetDNNEngineName(op_desc); + kernel_lib_name = op_desc->GetOpKernelLibName(); + if (kernel_lib_name.empty()) { + GELOGE(ge::INTERNAL_ERROR, "Get node:%s(%s) kernel lib failed.", node->GetName().c_str(), + node->GetType().c_str()); + return ge::INTERNAL_ERROR; + } + } + + OpsKernelInfoStorePtr kernel_info = instance->OpsKernelManagerObj().GetOpsKernelInfoStore(kernel_lib_name); + if (kernel_info != nullptr) { + node_vector_map[kernel_lib_name].emplace_back(node); + } else { + GELOGE(ge::GE_GRAPH_PARAM_NULLPTR, "Get op %s ops kernel info store failed", node->GetName().c_str()); + return ge::GE_GRAPH_PARAM_NULLPTR; + } + } + } + for (auto &it : node_vector_map) { + auto &kernel_lib_name = it.first; + auto &node_vector = it.second; + OpsKernelInfoStorePtr kernel_info = instance->OpsKernelManagerObj().GetOpsKernelInfoStore(kernel_lib_name); + GE_CHECK_NOTNULL(kernel_info); + GE_TIMESTAMP_RESTART(BatchCompileOp); + auto ret = kernel_info->CompileOp(node_vector); + GEEVENT("[GEPERFTRACE] The node size of compile op of %s is %zu", kernel_lib_name.c_str(), node_vector.size()); + GE_TIMESTAMP_ADD(BatchCompileOp); + if (ret != ge::SUCCESS) { + GELOGE(ret, "Compile op failed, kernel lib name is %s", kernel_lib_name.c_str()); + return ret; + } + } + GE_TIMESTAMP_CALLNUM_END(BatchCompileOp, "GraphBuild::CompileOp"); + GE_TIMESTAMP_CALLNUM_END(CheckAccuracySupported, "GraphBuild::CheckAccuracySupported"); + return ge::SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/build/model_builder.h b/src/ge/graph/build/model_builder.h new file mode 100644 index 00000000..1121a31d --- /dev/null +++ b/src/ge/graph/build/model_builder.h @@ -0,0 +1,109 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_MODEL_BUILDER_H_ +#define GE_GRAPH_BUILD_MODEL_BUILDER_H_ + +#include +#include +#include +#include +#include +#include + +#include "common/op/ge_op_utils.h" +#include "common/tbe_kernel_store.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/compute_graph.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/model.h" +#include "graph/node.h" +#include "model/ge_model.h" +#include "omg/omg_inner_types.h" + +namespace ge { +class ModelBuilder { + public: + ModelBuilder(ge::ComputeGraphPtr whole_graph, const std::vector &subgraphs, + const std::map &stream_max_parallel_num, bool hcom_parallel, + int mode = static_cast(domi::BuildMode::GEN_TASK_WITHOUT_FUSION)); + + ModelBuilder(const ModelBuilder &) = delete; + + ModelBuilder &operator=(const ModelBuilder &op) = delete; + + ~ModelBuilder(); + + Status SaveDataToModel(ge::Model &model, ge::GeModel &ge_model); + Status PreBuildModel(); + Status BuildModelForGetTask(ge::Model &model_def); + + ge::Buffer GetWeightBuffer() const; + + protected: + Status AssignMemory(); + + void AddNodeInputProperty(); + + void ClearOriginalFormat(); + + Status MergeWeights(); + + private: + void SetInputIsConst(const ge::NodePtr &n); + + void SetModelVersion(ge::Model &model); + + Status CalcOutputSize(const ge::NodePtr &n); + + Status AdjustConstWeightSize(const ge::NodePtr &node, size_t &mem_offset); + + Status SetInputOutputDesc(); + + Status AdjustInputTensorFlag(); + + Status BuildModelDef(ge::Model &model_def); + + Status CompileSingleOp(); + + size_t mem_offset_; + + size_t weight_offset_; + + ge::ComputeGraphPtr compute_graph_; + + const std::vector &subgraphs_; + + int64_t stream_num_; + + int64_t event_num_; + + ge::Buffer weight_buffer_; + + std::map stream_max_parallel_num_; + bool hcom_parallel_; + + int build_mode_; + size_t max_mem_offset_; + + TBEKernelStore tbe_kernel_store_; + + uint8_t platform_type_; + bool is_loop_graph_; +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_MODEL_BUILDER_H_ diff --git a/src/ge/graph/build/optimize_stream_graph.cc b/src/ge/graph/build/optimize_stream_graph.cc new file mode 100644 index 00000000..7798d1e6 --- /dev/null +++ b/src/ge/graph/build/optimize_stream_graph.cc @@ -0,0 +1,143 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/optimize_stream_graph.h" + +#include +#include +#include +#include +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/tensor_utils.h" +#include "init/gelib.h" + +using std::vector; + +namespace { +static const int64_t kInvalidStream = -1; +} // namespace +namespace ge { +OptimizeStreamGraph::~OptimizeStreamGraph() {} + +void OptimizeStreamGraph::RefreshNodeId(const ComputeGraphPtr &comp_graph, vector &subgraph_infos) { + size_t node_size = comp_graph->GetDirectNodesSize(); + GELOGI("Refresh placeholder and end nodeId start from node num: %zu", node_size); + for (const auto &sub_graph_info : subgraph_infos) { + ComputeGraphPtr sub_graph = sub_graph_info->GetSubGraph(); + if (sub_graph == nullptr) { + continue; + } + for (ge::NodePtr &node : sub_graph->GetAllNodes()) { + GE_CHECK_NOTNULL_EXEC(node->GetOpDesc(), return); + if ((node->GetType() == END) || (node->GetType() == PLACEHOLDER)) { + node->GetOpDesc()->SetId(static_cast(node_size)); + GELOGI("Refresh node %s nodeId: %ld", node->GetName().c_str(), node->GetOpDesc()->GetId()); + node_size++; + } + } + } +} + +bool OptimizeStreamGraph::IsSameStreamId(const ComputeGraphPtr &comp_graph) { + if (comp_graph == nullptr) { + return false; + } + std::set stream_set; + for (const ge::NodePtr &cur_node : comp_graph->GetAllNodes()) { + GE_IF_BOOL_EXEC(cur_node->GetOpDesc() == nullptr, continue); + int64_t stream_id = cur_node->GetOpDesc()->GetStreamId(); + if (stream_id == kInvalidStream) { + continue; + } + GELOGD("Node %s in subgraph %s stream id is: %ld, node num: %zu", cur_node->GetName().c_str(), + comp_graph->GetName().c_str(), stream_id, comp_graph->GetDirectNodesSize()); + stream_set.insert(stream_id); + } + if (stream_set.size() > 1) { + GELOGI("Nodes of graph: %s have different stream id, node num: %zu, different stream num: %zu.", + comp_graph->GetName().c_str(), comp_graph->GetDirectNodesSize(), stream_set.size()); + return false; + } + return true; +} + +Status OptimizeStreamGraph::OptimizeStreamedSubGraph(const ComputeGraphPtr &comp_graph, + vector &subgraph_infos, + struct RunContext &run_context) { + Status ret = SUCCESS; + GELOGI("Begin to Get optimize streamed subgraph."); + + RefreshNodeId(comp_graph, subgraph_infos); + + std::shared_ptr instance = ge::GELib::GetInstance(); + GE_CHECK_NOTNULL(instance); + + for (auto &sub_graph_info : subgraph_infos) { + ComputeGraphPtr sub_graph = sub_graph_info->GetSubGraph(); + if (sub_graph == nullptr) { + continue; + } + + std::string engine_name = sub_graph_info->GetEngineName(); + + vector graph_optimizers; + if (instance->DNNEngineManagerObj().IsEngineRegistered(engine_name)) { + instance->OpsKernelManagerObj().GetGraphOptimizerByEngine(engine_name, graph_optimizers); + GELOGI("Subgraph: %s start optimize streamed graph. engineName: %s, subgraph num: %zu, graph Optimizer num: %zu.", + sub_graph->GetName().c_str(), engine_name.c_str(), subgraph_infos.size(), graph_optimizers.size()); + + auto nodes = sub_graph->GetAllNodes(); + if (nodes.empty()) { + continue; + } + if (!IsSameStreamId(sub_graph)) { + GELOGI("There are more than one stream in subgraph %s", sub_graph->GetName().c_str()); + continue; + } + OpDescPtr op_desc = nodes.at(0)->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + int64_t stream_id = op_desc->GetStreamId(); + if (static_cast(stream_id) >= run_context.graphStreamList.size()) { + GELOGE(FAILED, "stream_id is bigger than run_context.graphStreamList.size()"); + return FAILED; + } + run_context.stream = run_context.graphStreamList[stream_id]; + GELOGD("Subgraph has same stream id, subgraph: %s, engine_name: %s, stream_id: %ld, rtstream: %lu.", + sub_graph->GetName().c_str(), engine_name.c_str(), stream_id, + static_cast(reinterpret_cast(run_context.stream))); + for (auto iter = graph_optimizers.begin(); iter != graph_optimizers.end(); ++iter) { + GE_CHECK_NOTNULL(*iter); + ret = (*iter)->OptimizeStreamGraph(*sub_graph, run_context); + if (ret != SUCCESS) { + GELOGE(ret, + "[optimizeStreamedSubGraph]: optimize streamed subgraph failed, subgraph: %s, engine_name: %s, graph " + "Optimizer num: %zu, ret: %u", + sub_graph->GetName().c_str(), engine_name.c_str(), graph_optimizers.size(), ret); + return ret; + } + GELOGI( + "[optimizeStreamedSubGraph]: optimize streamed subgraph success, subgraph: %s, engine_name: %s, graph " + "Optimizer num: %zu!", + sub_graph->GetName().c_str(), engine_name.c_str(), graph_optimizers.size()); + } + } + } + + return ret; +} +} // namespace ge diff --git a/src/ge/graph/build/optimize_stream_graph.h b/src/ge/graph/build/optimize_stream_graph.h new file mode 100644 index 00000000..03f4006a --- /dev/null +++ b/src/ge/graph/build/optimize_stream_graph.h @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_OPTIMIZE_STREAM_GRAPH_H_ +#define GE_GRAPH_BUILD_OPTIMIZE_STREAM_GRAPH_H_ + +#include + +#include "common/ge_inner_error_codes.h" +#include "common/opskernel/ops_kernel_info_types.h" +#include "framework/common/types.h" +#include "graph/compute_graph.h" +#include "graph/manager/graph_manager_utils.h" + +namespace ge { +class OptimizeStreamGraph { + public: + OptimizeStreamGraph() = default; + + OptimizeStreamGraph(const OptimizeStreamGraph &) = delete; + + OptimizeStreamGraph &operator=(const OptimizeStreamGraph &) = delete; + + virtual ~OptimizeStreamGraph(); + + Status OptimizeStreamedSubGraph(const ComputeGraphPtr &comp_graph, std::vector &subgraph_ptr_list, + struct RunContext &run_context); + + private: + void RefreshNodeId(const ComputeGraphPtr &comp_graph, std::vector &subgraph_ptr_list); + + bool IsSameStreamId(const ComputeGraphPtr &comp_graph); +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_OPTIMIZE_STREAM_GRAPH_H_ diff --git a/src/ge/graph/build/run_context.cc b/src/ge/graph/build/run_context.cc new file mode 100644 index 00000000..a52d7dba --- /dev/null +++ b/src/ge/graph/build/run_context.cc @@ -0,0 +1,147 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/run_context.h" + +#include "framework/common/debug/ge_log.h" +#include "common/util.h" +#include "graph/debug/ge_attr_define.h" + +namespace ge { +RunContextUtil::~RunContextUtil() { DestroyRtModelStreamAndEvents(); } + +Status RunContextUtil::InitMemInfo(uint8_t *data_mem_base, uint64_t data_mem_size, uint8_t *weight_mem_base, + uint64_t weight_mem_size) { + if ((data_mem_size > 0) && (data_mem_base == nullptr)) { + GELOGE(PARAM_INVALID, "InitMemInfo param data_mem_base is null but data_mem_size = %lu.", data_mem_size); + return PARAM_INVALID; + } + if ((weight_mem_size > 0) && (weight_mem_base == nullptr)) { + GELOGE(PARAM_INVALID, "InitMemInfo param weight_mem_base is null but weight_mem_size = %lu.", weight_mem_size); + return PARAM_INVALID; + } + data_mem_base_ = data_mem_base; + data_mem_size_ = data_mem_size; + weight_mem_base_ = weight_mem_base; + weight_mem_size_ = weight_mem_size; + return SUCCESS; +} + +Status RunContextUtil::CreateRtModelStreamsAndEvents(uint32_t stream_num, uint32_t event_num) { + // Create rt model + rtError_t rt_ret = rtModelCreate(&rt_model_, 0); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtModelCreate failed. rt_ret = %d", static_cast(rt_ret)); + return RT_FAILED; + } + + // Create rt Stream and bind with model + for (uint32_t i = 0; i < stream_num; ++i) { + rtStream_t stream = nullptr; + rt_ret = rtStreamCreate(&stream, 0); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtStreamCreate failed. rt_ret = %d, index = %u", static_cast(rt_ret), i); + return RT_FAILED; + } + stream_list_.emplace_back(stream); + + rt_ret = rtModelBindStream(rt_model_, stream, 0); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Bind stream and model failed. rt_ret = %d, index = %u", static_cast(rt_ret), i); + return RT_FAILED; + } + } + + // Create rt event + for (uint32_t i = 0; i < event_num; ++i) { + rtEvent_t event = nullptr; + rt_ret = rtEventCreate(&event); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtEventCreate failed. rt_ret = %d, index = %u", static_cast(rt_ret), i); + return RT_FAILED; + } + event_list_.emplace_back(event); + } + return SUCCESS; +} + +void RunContextUtil::DestroyRtModelStreamAndEvents() noexcept { + rtError_t rt_ret; + for (size_t i = 0; i < stream_list_.size(); i++) { + // Unbind stream to model first + (void)rtModelUnbindStream(rt_model_, stream_list_[i]); + rt_ret = rtStreamDestroy(stream_list_[i]); + if (rt_ret != RT_ERROR_NONE) { + GELOGW("Destroy stream failed. rt_ret = %d, index = %zu.", static_cast(rt_ret), i); + } + } + stream_list_.clear(); + + for (size_t i = 0; i < event_list_.size(); i++) { + rt_ret = rtEventDestroy(event_list_[i]); + if (rt_ret != RT_ERROR_NONE) { + GELOGW("Destroy event failed. rt_ret = %d, index = %zu.", static_cast(rt_ret), i); + } + } + event_list_.clear(); + + if (rt_model_ != nullptr) { + rt_ret = rtModelDestroy(rt_model_); + if (rt_ret != RT_ERROR_NONE) { + GELOGW("Destroy rt model failed. rt_ret = %d.", static_cast(rt_ret)); + } + rt_model_ = nullptr; + } +} + +Status RunContextUtil::CreateRunContext(Model &model, const ComputeGraphPtr &graph, Buffer &buffer, + const uint64_t session_id) { + GELOGI("Begin to Create RunContext, session_id = %lu", session_id); + // check params + if (graph == nullptr) { + GELOGE(PARAM_INVALID, "CreateRunContext param graph is null. session_id=%lu", session_id); + return PARAM_INVALID; + } + + uint32_t stream_num = 0; + if (!AttrUtils::GetInt(&model, ATTR_MODEL_STREAM_NUM, stream_num)) { + GELOGE(INTERNAL_ERROR, "Get stream_num attr from model_def failed. session_id=%lu", session_id); + return INTERNAL_ERROR; + } + GELOGI("Stream_num = %u", stream_num); + + uint32_t event_num = 0; + if (!AttrUtils::GetInt(&model, ATTR_MODEL_EVENT_NUM, event_num)) { + GELOGE(INTERNAL_ERROR, "Get event_num attr from model failed. session_id=%lu", session_id); + return INTERNAL_ERROR; + } + GELOGI("Event_num = %u", event_num); + + Status ret = CreateRtModelStreamsAndEvents(stream_num, event_num); + if (ret != SUCCESS) { + GELOGE(ret, "CreateRtModelStreamsAndEvents failed. session_id=%lu", session_id); + DestroyRtModelStreamAndEvents(); + return ret; + } + + run_context_ = {rt_model_, nullptr, session_id, data_mem_size_, data_mem_base_, + weight_mem_size_, weight_mem_base_, buffer, stream_list_, event_list_}; + + return SUCCESS; +} + +RunContext &RunContextUtil::GetRunContext() { return run_context_; } +} // namespace ge diff --git a/src/ge/graph/build/run_context.h b/src/ge/graph/build/run_context.h new file mode 100644 index 00000000..688ce83f --- /dev/null +++ b/src/ge/graph/build/run_context.h @@ -0,0 +1,66 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_RUN_CONTEXT_H_ +#define GE_GRAPH_BUILD_RUN_CONTEXT_H_ + +#include + +#include "common/ge_inner_error_codes.h" +#include "common/opskernel/ops_kernel_info_types.h" +#include "framework/common/types.h" +#include "graph/compute_graph.h" +#include "graph/model.h" +#include "runtime/rt.h" + +namespace ge { +class RunContextUtil { + public: + RunContextUtil() = default; + + virtual ~RunContextUtil(); + + // Init mem info. + Status InitMemInfo(uint8_t *data_mem_base, uint64_t data_mem_size, uint8_t *weight_mem_base, + uint64_t weight_mem_size); + + Status CreateRunContext(Model &model_def, const ComputeGraphPtr &graph, Buffer &buffer, + const uint64_t session_id); + + RunContext &GetRunContext(); + + RunContext run_context_; + + private: + // Create Rt model/stream/event for task generate + Status CreateRtModelStreamsAndEvents(uint32_t stream_num, uint32_t event_num); + + // Destroy Rt model/stream/event + void DestroyRtModelStreamAndEvents() noexcept; + + // Model + rtModel_t rt_model_ = nullptr; + std::vector stream_list_; + std::vector event_list_; + + // Mem info + uint8_t *data_mem_base_ = nullptr; + uint64_t data_mem_size_ = 0; + uint8_t *weight_mem_base_ = nullptr; + uint64_t weight_mem_size_ = 0; +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_RUN_CONTEXT_H_ diff --git a/src/ge/graph/build/stream_allocator.cc b/src/ge/graph/build/stream_allocator.cc new file mode 100644 index 00000000..19816d69 --- /dev/null +++ b/src/ge/graph/build/stream_allocator.cc @@ -0,0 +1,944 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/stream_allocator.h" + +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/fmk_error_codes.h" +#include "framework/common/types.h" +#include "graph/build/logical_stream_allocator.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/graph_utils.h" +#include "init/gelib.h" + +using std::string; +using std::vector; +using std::map; +using std::set; + +namespace { +const int64_t kMaxNodeNumInNormalStream = 350; +const int64_t kMaxNodeNumInHcomStream = 5; + +const uint32_t kMaxSwitchStreamNum = 1; +} // namespace + +namespace ge { +Status StreamAllocator::AssignLogicalStreams(const std::map &max_parallel_num, bool hcom_parallel) { + GELOGI("AssignLogicalStreams start."); + GE_CHECK_NOTNULL(whole_graph_); + GraphUtils::DumpGEGraph(whole_graph_, "BeforeAssignedLogicalStreams_whole_graph"); + GraphUtils::DumpGEGraphToOnnx(*whole_graph_, "BeforeAssignedLogicalStreams_whole_graph"); + + auto gelib = GELib::GetInstance(); + if (gelib == nullptr) { + GELOGE(FAILED, "Get GELib instance failed."); + return FAILED; + } + + const map &scheduler_confs = gelib->DNNEngineManagerObj().GetSchedulers(); + + LogicalStreamAllocator logical_allocator(scheduler_confs, max_parallel_num, hcom_parallel); + Status status = logical_allocator.Assign(whole_graph_, subgraphs_, stream_num_); + if (status != SUCCESS) { + GELOGE(status, "Assign logical streams failed."); + return status; + } + + GraphUtils::DumpGEGraph(whole_graph_, "AfterAssignedLogicalStreams_whole_graph"); + GraphUtils::DumpGEGraphToOnnx(*whole_graph_, "AfterAssignedLogicalStreams_whole_graph"); + GELOGI("AssignLogicalStreams success."); + + return SUCCESS; +} + +// After allocating the logical stream in the graph, refresh the stream in the +// graph and insert the synchronization node. +Status StreamAllocator::RefreshRealStream(int64_t &stream_num, int64_t &event_num) { + GELOGI("RefreshRealStream start."); + GE_CHECK_NOTNULL(whole_graph_); + Status status = ActiveStreamsBySpecificLabels(); + if (status != SUCCESS) { + GELOGE(status, "ActiveStreams failed!"); + return status; + } + + status = InsertSyncEvents(); + if (status != SUCCESS) { + GELOGE(status, "InsertSyncEventId failed!"); + return status; + } + + status = OptimizeSyncEvents(); + if (status != SUCCESS) { + GELOGE(status, "OptimizeSyncEventId failed!"); + return status; + } + + status = SplitStreams(); + if (status != SUCCESS) { + GELOGE(status, "SplitStreams failed!"); + return status; + } + + status = ActiveStreamsForLoop(); + if (status != SUCCESS) { + GELOGE(status, "ActiveStreamsForLoop failed!"); + return status; + } + + status = AddActiveEntryStream(); + if (status != SUCCESS) { + GELOGE(status, "AddActiveEntryStream failed!"); + return status; + } + + status = RefreshContinuousEvents(); + if (status != SUCCESS) { + GELOGE(status, "RefreshContinuousEvents failed!"); + return status; + } + + status = InsertSyncEventNodes(); + if (status != SUCCESS) { + GELOGE(status, "InsertSyncEventNode failed!"); + return status; + } + + DumpEvents(); + GraphUtils::DumpGEGraph(whole_graph_, "RefreshRealStream"); + GraphUtils::DumpGEGraphToOnnx(*whole_graph_, "RefreshRealStream"); + + for (const NodePtr &node : whole_graph_->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + auto stream_id = node->GetOpDesc()->GetStreamId(); + if (stream_id == kInvalidStream) { + node->GetOpDesc()->SetStreamId(0); + } + } + + if (stream_num_ == 0) { + GELOGI("None of nodes need to assign stream, stream num is 0, it will cause error, so change it to 1"); + stream_num_ = 1; + } + GELOGI("stream_num_: %ld, event_num_: %u.", stream_num_, event_num_); + GELOGI("RefreshRealStream successfully."); + + stream_num = stream_num_; + event_num = static_cast(event_num_); + + return SUCCESS; +} + +// Split the stream according to the maximum number of nodes in the stream. +Status StreamAllocator::SplitStreams() { + if (stream_num_ == 0) { + GELOGI("stream_num_ is 0"); + return SUCCESS; + } + + // stream_node_num_vec records the number of all nodes on each stream + // added_stream_num_vec records the number of streams that each stream needs to increase + // new_stream_id_vec records the new physical stream id for each stream + vector stream_node_num_vec(stream_num_); + vector added_stream_num_vec(stream_num_); + vector new_stream_id_vec(stream_num_); + vector pre_node_vec(stream_num_); + vector> split_streams(stream_num_); + + int64_t last_stream_id = stream_num_ - 1; + for (auto i = 0; i <= last_stream_id; i++) { + stream_node_num_vec[i] = 0; + added_stream_num_vec[i] = 0; + new_stream_id_vec[i] = i; + pre_node_vec[i] = nullptr; + } + + for (const auto &cur_node : whole_graph_->GetDirectNode()) { + GE_CHECK_NOTNULL(cur_node->GetOpDesc()); + int64_t stream_id = cur_node->GetOpDesc()->GetStreamId(); + if (stream_id == kInvalidStream) { + continue; + } + if (stream_id > last_stream_id) { + GELOGE(FAILED, "SplitStreams:streamid(%ld) > last_stream_id(%ld)", stream_id, last_stream_id); + return FAILED; + } + stream_node_num_vec[stream_id]++; + + // The maximum number of tasks per stream. + int64_t max_node_num_one_stream = kMaxNodeNumInNormalStream; + const string op_type = cur_node->GetType(); + if ((op_type == HCOMBROADCAST) || (op_type == HCOMALLGATHER) || (op_type == HCOMALLREDUCE) || + (op_type == HCOMREDUCESCATTER)) { + max_node_num_one_stream = kMaxNodeNumInHcomStream; + } + + // Split the stream if it exceeds the maximum number of nodes in the stream. + if (stream_node_num_vec[stream_id] > max_node_num_one_stream) { + last_stream_id++; + GELOGI( + "stream_node_num_vec[%ld]= %ld > max_node_num_one_stream : %ld, " + "It's time to split the stream, split newly-added stream id is %ld", + stream_id, stream_node_num_vec[stream_id], max_node_num_one_stream, last_stream_id); + + stream_node_num_vec[stream_id] = 1; + added_stream_num_vec[stream_id]++; + new_stream_id_vec[stream_id] = last_stream_id; + split_streams[stream_id].emplace(last_stream_id); + + // Add the send/recv event to the first and last nodes of the split stream. + NodePtr pre_node = pre_node_vec[stream_id]; + if (pre_node != nullptr) { + GELOGI("Add send event %u for node %s", event_num_, pre_node->GetName().c_str()); + GELOGI("Add recv event %u for node %s", event_num_, cur_node->GetName().c_str()); + AddSendEventId(pre_node, event_num_); + AddRecvEventId(cur_node, event_num_); + ++event_num_; + } + } + + /// If the split stream num is greater than 1, the node behind the same + /// stream must reset the new stream id. + if (added_stream_num_vec[stream_id] >= 1) { + cur_node->GetOpDesc()->SetStreamId(new_stream_id_vec[stream_id]); + } + + pre_node_vec[stream_id] = cur_node; + } + + if (last_stream_id >= 0) { + stream_num_ = last_stream_id + 1; + } + + return UpdateActiveStreams(split_streams); +} + +Status StreamAllocator::UpdateActiveStreams(vector> &split_streams) { + for (const auto &node : whole_graph_->GetDirectNode()) { + vector active_streams; + GE_CHECK_NOTNULL(node->GetOpDesc()); + if (AttrUtils::GetListInt(node->GetOpDesc(), ATTR_NAME_ACTIVE_STREAM_LIST, active_streams)) { + vector new_active_streams = active_streams; + for (const uint32_t logical_stream : active_streams) { + if (static_cast(logical_stream) >= split_streams.size()) { + GELOGE(FAILED, "logical stream is out of range."); + return FAILED; + } + const set &new_split_streams = split_streams[logical_stream]; + if (!new_split_streams.empty()) { + for (int64_t split_stream : new_split_streams) { + specific_activated_streams_.emplace(split_stream); + new_active_streams.emplace_back(static_cast(split_stream)); + } + } + } + if (!AttrUtils::SetListInt(node->GetOpDesc(), ATTR_NAME_ACTIVE_STREAM_LIST, new_active_streams)) { + GELOGE(FAILED, "UpdateActiveStreams failed, node name : (%s).", node->GetName().c_str()); + return FAILED; + } + } + } + return SUCCESS; +} + +Status StreamAllocator::ActiveStreamsBySpecificLabels() { + // > + map> labeled_streams; + for (const auto &node : whole_graph_->GetDirectNode()) { + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + string stream_label; + if (AttrUtils::GetStr(op_desc, ATTR_NAME_STREAM_LABEL, stream_label) && !stream_label.empty()) { + int64_t stream_id = op_desc->GetStreamId(); + if (stream_id != kInvalidStream) { + labeled_streams[stream_label].emplace(stream_id); + } + } + } + + for (const auto &node : whole_graph_->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + vector activated_label_list; + if (!AttrUtils::GetListStr(node->GetOpDesc(), ATTR_NAME_ACTIVE_LABEL_LIST, activated_label_list) || + activated_label_list.empty()) { + continue; + } + + vector activated_stream_list; + for (string &activated_label : activated_label_list) { + specific_activated_labels_[activated_label].emplace(node); + for (int64_t activated_stream : labeled_streams[activated_label]) { + activated_stream_list.push_back(static_cast(activated_stream)); + specific_activated_streams_.emplace(activated_stream); + specific_activated_streams_nodes_map_[activated_stream].emplace(node); + GELOGI("Node %s active stream %ld by %s.", node->GetName().c_str(), activated_stream, activated_label.c_str()); + } + } + GE_CHK_BOOL_EXEC(AttrUtils::SetListInt(node->GetOpDesc(), ATTR_NAME_ACTIVE_STREAM_LIST, activated_stream_list), + GELOGE(FAILED, "SetListInt failed."); + return FAILED); + } + + return SUCCESS; +} + +Status StreamAllocator::ActiveStreamsForLoop() { + vector loop_active_streams; + for (int64_t stream_id = 0; stream_id < stream_num_; stream_id++) { + if (specific_activated_streams_.count(stream_id) == 0) { + loop_active_streams.emplace_back(static_cast(stream_id)); + } + } + // Set the stream that needs to be activated + for (const auto &node : whole_graph_->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + bool is_loop_active = false; + if (AttrUtils::GetBool(node->GetOpDesc(), ATTR_NAME_IS_LOOP_ACTIVE, is_loop_active) && is_loop_active) { + vector activated_label_list; + if (!AttrUtils::GetListStr(node->GetOpDesc(), ATTR_NAME_ACTIVE_LABEL_LIST, activated_label_list) || + activated_label_list.empty()) { + GE_CHK_BOOL_EXEC(AttrUtils::SetListInt(node->GetOpDesc(), ATTR_NAME_ACTIVE_STREAM_LIST, loop_active_streams), + GELOGE(FAILED, "SetListInt failed."); + return FAILED); + for (const auto &stream_id : loop_active_streams) { + GELOGI("Active stream %u for node: %s", stream_id, node->GetName().c_str()); + } + + break; + } + } + } + + return CheckStreamActived(); +} + +Status StreamAllocator::CheckStreamActived() const { + for (const auto &node : whole_graph_->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + vector active_streams; + if (AttrUtils::GetListInt(node->GetOpDesc(), ATTR_NAME_ACTIVE_STREAM_LIST, active_streams)) { + uint32_t stream_id = static_cast(node->GetOpDesc()->GetStreamId()); + auto iter = find(active_streams.begin(), active_streams.end(), stream_id); + if (iter != active_streams.end()) { + GELOGE(FAILED, "Node %s cannot active its own stream %u.", node->GetName().c_str(), stream_id); + return FAILED; + } + } + } + + return SUCCESS; +} + +// Insert the send/recv event id to the graph +Status StreamAllocator::InsertSyncEvents() { + for (const auto &cur_node : whole_graph_->GetDirectNode()) { + // Take the adjacent points, then judge whether need to insert the event + for (const OutDataAnchorPtr &anchor : cur_node->GetAllOutDataAnchors()) { + for (const InDataAnchorPtr &peer_in_anchor : anchor->GetPeerInDataAnchors()) { + NodePtr next_node = peer_in_anchor->GetOwnerNode(); + Status status = InsertOneEventInTwoNodes(cur_node, next_node); + if (status != SUCCESS) { + GELOGE(status, "InsertOneEventInTwoNodes failed!"); + return status; + } + } + } + + /// If the two nodes of the control side belong to two streams, + /// you also need to add the send/recv event. + if (cur_node->GetOutControlAnchor() != nullptr) { + for (const AnchorPtr &peer_in_anchor : cur_node->GetOutControlAnchor()->GetPeerAnchors()) { + NodePtr next_node = peer_in_anchor->GetOwnerNode(); + Status status = InsertOneEventInTwoNodes(cur_node, next_node); + if (status != SUCCESS) { + GELOGE(status, "InsertOneEventInTwoNodes failed!"); + return status; + } + } + } + } + + return SUCCESS; +} + +// Insert one send/recv event in two nodes +Status StreamAllocator::InsertOneEventInTwoNodes(const NodePtr &cur_node, const NodePtr &next_node) { + GE_CHECK_NOTNULL(cur_node->GetOpDesc()); + GE_CHECK_NOTNULL(next_node->GetOpDesc()); + + // No need to insert events after node that do not assign streams. + int64_t cur_stream_id = cur_node->GetOpDesc()->GetStreamId(); + if (cur_stream_id == kInvalidStream) { + GELOGD("No need to insert event after node %s.", cur_node->GetName().c_str()); + return SUCCESS; + } + + // No need to insert events between nodes in the same stream. + int64_t next_stream_id = next_node->GetOpDesc()->GetStreamId(); + if (cur_stream_id == next_stream_id) { + return SUCCESS; + } + + // No event needs to be inserted between the active node and the activated stream. + string next_node_label; + if (AttrUtils::GetStr(next_node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, next_node_label) && !next_node_label.empty()) { + auto iter = specific_activated_labels_.find(next_node_label); + if (iter != specific_activated_labels_.end()) { + for (const auto &active_node : iter->second) { + OpDescPtr active_op = active_node->GetOpDesc(); + GE_CHECK_NOTNULL(active_op); + if ((cur_stream_id == active_op->GetStreamId()) && (cur_node->GetOpDesc()->GetId() <= active_op->GetId())) { + GELOGI("No need to insert event between node %s and %s.", cur_node->GetName().c_str(), + next_node->GetName().c_str()); + return SUCCESS; + } + } + } + } + + // Add send and receive events. + AddSendEventId(cur_node, event_num_); + AddRecvEventId(next_node, event_num_); + GELOGD("Insert event %u between node %s(stream %ld) and %s(stream %ld)", event_num_, cur_node->GetName().c_str(), + cur_stream_id, next_node->GetName().c_str(), next_stream_id); + + ++event_num_; + + return SUCCESS; +} + +// Optimize the event in the graph, delete the redundant sync event according to the stream information +Status StreamAllocator::OptimizeSyncEvents() { + map> stream_nodes; + + for (const auto &node : whole_graph_->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + int64_t stream_id = node->GetOpDesc()->GetStreamId(); + stream_nodes[stream_id].emplace_back(node); + } + + Status status = OptimizeBySendEvents(stream_nodes); + if (status != SUCCESS) { + GELOGE(status, "OptimizeBySendEvents failed!"); + return status; + } + + status = OptimizeByRecvEvents(stream_nodes); + if (status != SUCCESS) { + GELOGE(status, "OptimizeByRecvEvents failed!"); + return status; + } + + status = OptimizeByStreamActivate(); + if (status != SUCCESS) { + GELOGE(status, "OptimizeByStreamActivate failed!"); + return status; + } + + return SUCCESS; +} + +/// Optimization scenario: one stream has multiple send events in one node, +/// and multiple nodes for recv events on another stream +/// Example: +/// Stream0 Stream1 +/// N1 - - - event - > N1 +/// \ | +/// \ v +/// - - event - > N2 +Status StreamAllocator::OptimizeBySendEvents(const map> &stream_nodes) { + for (const auto &one_pair : stream_nodes) { + // The nodes on a stream in order + const vector &nodes = one_pair.second; + + map send_node_to_event_id; + + for (const auto &recv_node_ptr : nodes) { + GE_CHECK_NOTNULL(recv_node_ptr); + // Get all recv events of the current node, then traverse the event + vector recv_events; + GetRecvEventIdList(recv_node_ptr, recv_events); + + for (const auto &event_id : recv_events) { + NodePtr send_node_ptr = GetNodeFromSendEventId(event_id); + GE_CHECK_NOTNULL(send_node_ptr); + + /// If the record to the stream is found in the map, + /// and the recv node is the node, then remove sync event + if (send_node_to_event_id.find(send_node_ptr) != send_node_to_event_id.end()) { + RmvSendEventId(send_node_ptr, event_id); + RmvRecvEventId(recv_node_ptr, event_id); + GELOGI("Remove send event %u for node: %s", event_id, send_node_ptr->GetName().c_str()); + GELOGI("Remove recv event %u for node: %s", event_id, recv_node_ptr->GetName().c_str()); + } else { + send_node_to_event_id[send_node_ptr] = event_id; + } + } + } + } + + return SUCCESS; +} + +/// Scenario: multiple send nodes on a stream sent to a single recv node on the destination stream +/// Example: +/// Stream0 Stream1 +/// N1 - - +/// | | +/// | - - event - - - +/// | | +/// V V +/// N2 - - - event - > N2 +Status StreamAllocator::OptimizeByRecvEvents(const map> &stream_nodes) { + for (const auto &one_pair : stream_nodes) { + // The nodes on a stream in order + const vector &nodes = one_pair.second; + + map recv_node_to_event_id; + + for (const auto &send_node_ptr : nodes) { + GE_CHECK_NOTNULL(send_node_ptr); + // Get all send events of the current node, then traverse the event + vector send_id_list; + GetSendEventIdList(send_node_ptr, send_id_list); + + for (const auto &event_id : send_id_list) { + NodePtr recv_node_ptr = GetNodeFromRecvEventId(event_id); + GE_CHECK_NOTNULL(recv_node_ptr); + + /// If the record to the stream is found in the map, + /// and the send node is the node, then remove sync event + auto it = recv_node_to_event_id.find(recv_node_ptr); + if (it != recv_node_to_event_id.end()) { + uint32_t pre_event_id = it->second; + NodePtr pre_send_node_ptr = GetNodeFromSendEventId(pre_event_id); + GE_CHECK_NOTNULL(pre_send_node_ptr); + + RmvSendEventId(pre_send_node_ptr, pre_event_id); + RmvRecvEventId(recv_node_ptr, pre_event_id); + GELOGI("Remove event %u between node %s and node %s.", event_id, pre_send_node_ptr->GetName().c_str(), + recv_node_ptr->GetName().c_str()); + } + recv_node_to_event_id[recv_node_ptr] = event_id; + } + } + } + + return SUCCESS; +} + +// In situation : stream(normal) -> stream(streamActivate)-> +// -> stream(streamSwitch) -> stream(streamActivate) -> stream(stream true or false) +// No need to insert an event between node in stream(normal) and node in stream(stream true or false) +bool StreamAllocator::IsRecvNodeActivatedBySendNode(const NodePtr &send_node_ptr, const NodePtr &recv_node_ptr) const { + GE_CHECK_NOTNULL_EXEC(send_node_ptr->GetOpDesc(), GELOGE(FAILED, "op desc is nullptr"); return false); + GE_CHECK_NOTNULL_EXEC(recv_node_ptr->GetOpDesc(), GELOGE(FAILED, "op desc is nullptr"); return false); + auto cur_stream_id = send_node_ptr->GetOpDesc()->GetStreamId(); + if (AttrUtils::HasAttr(recv_node_ptr->GetOpDesc(), ATTR_NAME_STREAM_LABEL)) { + // find streamActivate node + auto iter = specific_activated_streams_nodes_map_.find(recv_node_ptr->GetOpDesc()->GetStreamId()); + set activate_stream_nodes; + if (iter != specific_activated_streams_nodes_map_.end()) { + activate_stream_nodes = iter->second; + } + set visited_nodes{recv_node_ptr}; + while (!activate_stream_nodes.empty()) { + set activate_stream_nodes_temp; + for (const auto &activate_stream_node : activate_stream_nodes) { + GE_IF_BOOL_EXEC(activate_stream_node->GetOpDesc() == nullptr, continue); + if (visited_nodes.find(activate_stream_node) != visited_nodes.end() || + AttrUtils::HasAttr(activate_stream_node->GetOpDesc(), ATTR_NAME_IS_LOOP_ACTIVE)) { + return false; + } + visited_nodes.insert(activate_stream_node); + // nodes in stream link to streamActivate no need to add event before activated node + for (const auto &pre_activate_stream_node : activate_stream_node->GetInNodes()) { + GE_IF_BOOL_EXEC(pre_activate_stream_node->GetOpDesc() == nullptr, continue); + if (pre_activate_stream_node->GetOpDesc()->GetStreamId() == cur_stream_id && + pre_activate_stream_node->GetOpDesc()->GetId() >= send_node_ptr->GetOpDesc()->GetId()) { + return true; + } + } + auto iterator = specific_activated_streams_nodes_map_.find(activate_stream_node->GetOpDesc()->GetStreamId()); + if (iterator != specific_activated_streams_nodes_map_.end()) { + auto active_nodes = iterator->second; + for (const auto &active_node : active_nodes) { + activate_stream_nodes_temp.emplace(active_node); + } + } + } + activate_stream_nodes = activate_stream_nodes_temp; + } + } + return false; +} + +Status StreamAllocator::OptimizeByStreamActivate() { + auto node_to_send_events_temp = node_to_send_events_; + for (const auto &node_event_id_pair : node_to_send_events_temp) { + const NodePtr &send_node_ptr = node_event_id_pair.first; + for (const auto &event_id : node_event_id_pair.second) { + NodePtr recv_node_ptr = GetNodeFromRecvEventId(event_id); + GE_CHECK_NOTNULL(recv_node_ptr); + if (IsRecvNodeActivatedBySendNode(send_node_ptr, recv_node_ptr)) { + RmvSendEventId(send_node_ptr, event_id); + RmvRecvEventId(recv_node_ptr, event_id); + } + } + } + return SUCCESS; +} + +// Refresh events to continuous events +Status StreamAllocator::RefreshContinuousEvents() { + // Establish a mapping relationship from old to new event id + map old_to_new_events; + uint32_t new_event_id = 0; + for (const auto &one_pair : node_to_send_events_) { + for (const auto &event_id : one_pair.second) { + old_to_new_events[event_id] = new_event_id; + new_event_id++; + } + } + + // Refresh send event id + for (auto &one_pair : node_to_send_events_) { + vector &send_events = one_pair.second; + for (size_t i = 0; i < send_events.size(); i++) { + auto find_it = old_to_new_events.find(send_events[i]); + if (find_it == old_to_new_events.end()) { + GELOGE(FAILED, "RefreshContinuousEvents: invalid send event %u", send_events[i]); + return FAILED; + } + send_events[i] = find_it->second; + } + } + + // Refresh recv event id + for (auto &one_pair : node_to_recv_events_) { + vector &recv_events = one_pair.second; + for (size_t i = 0; i < recv_events.size(); i++) { + auto find_it = old_to_new_events.find(recv_events[i]); + if (find_it == old_to_new_events.end()) { + GELOGE(FAILED, "RefreshContinuousEvents: invalid recv event %u", recv_events[i]); + return FAILED; + } + recv_events[i] = find_it->second; + } + } + + event_num_ = static_cast(old_to_new_events.size()); + + return SUCCESS; +} + +// Insert the real send/recv node in the graph +Status StreamAllocator::InsertSyncEventNodes() { + for (const auto &node : whole_graph_->GetDirectNode()) { + // Add the node corresponding to the recv event + vector recv_event_id_list; + GetRecvEventIdList(node, recv_event_id_list); + GE_CHECK_NOTNULL(node->GetOpDesc()); + GE_CHECK_NOTNULL(node->GetInControlAnchor()); + GE_CHECK_NOTNULL(node->GetOutControlAnchor()); + for (auto &event_id : recv_event_id_list) { + string recv_node_name = "_Recv_" + to_string(event_id); + OpDescPtr op_desc_ptr = MakeShared(recv_node_name, RECV); + GE_CHECK_NOTNULL(op_desc_ptr); + + int64_t temp_stream_id = node->GetOpDesc()->GetStreamId(); + op_desc_ptr->SetStreamId(temp_stream_id); + GE_CHK_BOOL_EXEC(AttrUtils::SetInt(op_desc_ptr, RECV_ATTR_EVENT_ID, event_id), GELOGE(FAILED, "SetInt failed."); + return FAILED); + (void)AttrUtils::SetListStr(op_desc_ptr, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, + std::move(std::vector())); + NodePtr recv_node = whole_graph_->AddNode(op_desc_ptr); + GE_CHECK_NOTNULL(recv_node); + GE_CHECK_NOTNULL(recv_node->GetOutControlAnchor()); + Status status = GraphUtils::AddEdge(recv_node->GetOutControlAnchor(), node->GetInControlAnchor()); + if (status != SUCCESS) { + GELOGE(status, "Add edge for node %s and node %s failed.", recv_node->GetName().c_str(), + node->GetName().c_str()); + return status; + } + + GELOGI("Add recv %u before node: %s", event_id, node->GetName().c_str()); + } + + // Add the node corresponding to the send event + vector send_event_id_list; + GetSendEventIdList(node, send_event_id_list); + + for (auto &event_id : send_event_id_list) { + string send_node_name = "_Send_" + to_string(event_id); + OpDescPtr op_desc_ptr = MakeShared(send_node_name, SEND); + GE_CHECK_NOTNULL(op_desc_ptr); + + int64_t temp_stream_id = node->GetOpDesc()->GetStreamId(); + op_desc_ptr->SetStreamId(temp_stream_id); + GE_CHK_BOOL_EXEC(AttrUtils::SetInt(op_desc_ptr, SEND_ATTR_EVENT_ID, event_id), GELOGE(FAILED, "SetInt failed."); + return FAILED); + (void)AttrUtils::SetListStr(op_desc_ptr, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, + std::move(std::vector())); + NodePtr send_node = whole_graph_->AddNode(op_desc_ptr); + GE_CHECK_NOTNULL(send_node); + GE_CHECK_NOTNULL(send_node->GetInControlAnchor()); + Status status = GraphUtils::AddEdge(node->GetOutControlAnchor(), send_node->GetInControlAnchor()); + if (status != SUCCESS) { + GELOGE(status, "Add edge for node %s and node %s failed.", node->GetName().c_str(), + send_node->GetName().c_str()); + return status; + } + + GELOGI("Add send event %u after node: %s", event_id, node->GetName().c_str()); + } + } + + Status status = whole_graph_->InsertEventNodes(); + if (status != SUCCESS) { + GELOGE(status, "whole_graph_->InsertEventNodes failed"); + return status; + } + + return SUCCESS; +} + +// Insert send event id on a node +void StreamAllocator::AddSendEventId(const NodePtr &node, uint32_t event_id) { + node_to_send_events_[node].emplace_back(event_id); +} + +// Insert recv event id on a node +void StreamAllocator::AddRecvEventId(const NodePtr &node, uint32_t event_id) { + node_to_recv_events_[node].emplace_back(event_id); +} + +// Remove send event id from a node +void StreamAllocator::RmvSendEventId(const NodePtr &node, uint32_t event_id) { + auto find_it = node_to_send_events_.find(node); + if (find_it == node_to_send_events_.end()) { + return; + } + + vector &send_events = find_it->second; + for (auto it = send_events.begin(); it != send_events.end(); ++it) { + if (*it == event_id) { + send_events.erase(it); + return; + } + } +} + +// Remove recv event id from a node +void StreamAllocator::RmvRecvEventId(const NodePtr &node, uint32_t event_id) { + auto find_it = node_to_recv_events_.find(node); + if (find_it == node_to_recv_events_.end()) { + return; + } + + vector &recv_events = find_it->second; + for (auto it = recv_events.begin(); it != recv_events.end(); ++it) { + if (*it == event_id) { + recv_events.erase(it); + return; + } + } +} + +// Get send event id list from a node +void StreamAllocator::GetSendEventIdList(const NodePtr &node, vector &send_list) const { + send_list.clear(); + auto find_it = node_to_send_events_.find(node); + if (find_it != node_to_send_events_.end()) { + send_list = find_it->second; + } +} + +// Get recv event id list from a node +void StreamAllocator::GetRecvEventIdList(const NodePtr &node, vector &recv_list) const { + recv_list.clear(); + auto find_it = node_to_recv_events_.find(node); + if (find_it != node_to_recv_events_.end()) { + recv_list = find_it->second; + } +} + +// Get a specific send node according to the recv event +NodePtr StreamAllocator::GetNodeFromSendEventId(uint32_t send_event_id) const { + for (const auto &one_pair : node_to_send_events_) { + const vector &events = one_pair.second; + for (const auto &event_id : events) { + if (event_id == send_event_id) { + return one_pair.first; + } + } + } + + return nullptr; +} + +// Get a specific recv node according to the recv event +NodePtr StreamAllocator::GetNodeFromRecvEventId(uint32_t recv_event_id) const { + for (const auto &one_pair : node_to_recv_events_) { + const vector &events = one_pair.second; + for (const auto &event_id : events) { + if (event_id == recv_event_id) { + return one_pair.first; + } + } + } + + return nullptr; +} + +void StreamAllocator::DumpEvents() { + map> after_refresh_stream_nodes; + for (const auto &node : whole_graph_->GetDirectNode()) { + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr, continue); + int64_t stream_id = node->GetOpDesc()->GetStreamId(); + after_refresh_stream_nodes[stream_id].emplace_back(node); + } + + for (const auto &one_pair : after_refresh_stream_nodes) { + int64_t stream_id = one_pair.first; + GELOGI("After RefreshRealStream: stream %ld.", stream_id); + + for (const auto &node : one_pair.second) { + string send_event_str; + for (const auto &send_event_id : node_to_send_events_[node]) { + send_event_str += " " + to_string(send_event_id); + } + if (!send_event_str.empty()) { + GELOGI("node: %s, send events: %s", node->GetName().c_str(), send_event_str.c_str()); + } + + string recv_event_str; + for (const auto &recv_event_id : node_to_recv_events_[node]) { + recv_event_str += " " + to_string(recv_event_id); + } + if (!recv_event_str.empty()) { + GELOGI("node: %s, recv events: %s", node->GetName().c_str(), recv_event_str.c_str()); + } + } + } +} + +// Add active entry stream for special env. +Status StreamAllocator::AddActiveEntryStream() { + auto gelib = GELib::GetInstance(); + bool head_stream = (gelib == nullptr) ? false : gelib->HeadStream(); + GELOGI("Configured head stream: %u", head_stream); + if (!head_stream) { + return SUCCESS; + } + + // Collect streams active by StreamSwitch/StreamActive node. + std::set deactive_stream; + for (ge::NodePtr &node : whole_graph_->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + Status ret = CollectDeactiveStream(node->GetOpDesc(), deactive_stream); + if (ret != SUCCESS) { + return ret; + } + } + + // Collect default active stream, Add to active entry stream. + std::vector active_stream_list; + for (int64_t stream_id = 0; stream_id < stream_num_; ++stream_id) { + if (deactive_stream.count(stream_id) == 0) { + active_stream_list.push_back(stream_id); + } + } + + int64_t new_stream_id = stream_num_; + stream_num_++; + return InsertActiveEntryStream(active_stream_list, new_stream_id); +} + +// Collect deactive stream from flowctrl op. +Status StreamAllocator::CollectDeactiveStream(const OpDescPtr &op_desc, std::set &deactive_streams) const { + GE_CHECK_NOTNULL(op_desc); + std::string op_type = op_desc->GetType(); + if (op_type == STREAMSWITCH) { + std::vector active_stream_list; + // If GetListInt fail, active_stream_list is empty. + (void)ge::AttrUtils::GetListInt(op_desc, ATTR_NAME_ACTIVE_STREAM_LIST, active_stream_list); + if (active_stream_list.size() != kMaxSwitchStreamNum) { + GELOGE(INTERNAL_ERROR, "Stream num of switch true branch must be %u.", kMaxSwitchStreamNum); + return INTERNAL_ERROR; + } + + deactive_streams.insert(active_stream_list[0]); + GELOGI("Flowctrl_op node:%s, flowctrl stream id:%u.", op_desc->GetName().c_str(), active_stream_list[0]); + } else if (op_type == STREAMACTIVE) { + if (op_desc->HasAttr(ATTR_NAME_SWITCH_BRANCH_NODE_LABEL)) { + std::vector active_stream_list; + if (!AttrUtils::GetListInt(op_desc, ATTR_NAME_ACTIVE_STREAM_LIST, active_stream_list)) { + GELOGE(INTERNAL_ERROR, "StreamActiveOp get attr ACTIVE_STREAM fail."); + return INTERNAL_ERROR; + } + + for (uint32_t deactive_stream : active_stream_list) { + deactive_streams.insert(deactive_stream); + GELOGI("Flowctrl_op node:%s, flowctrl stream id:%u.", op_desc->GetName().c_str(), deactive_stream); + } + } + } + + return SUCCESS; +} + +// Insert StreamActive Op for Entry Stream. +Status StreamAllocator::InsertActiveEntryStream(const std::vector &active_streams, int64_t stream_id) { + string node_name = "ActiveEntryStream_" + string(STREAMACTIVE); + OpDescPtr op_desc = ge::MakeShared(node_name, STREAMACTIVE); + if (op_desc == nullptr) { + GELOGE(FAILED, "Failed to new opdesc."); + return FAILED; + } + GELOGI("Create StreamActive op:%s.", op_desc->GetName().c_str()); + + GE_CHK_BOOL_EXEC( + AttrUtils::SetListStr(op_desc, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, std::move(std::vector())), + GELOGE(FAILED, "SetListStr failed."); + return FAILED); + + NodePtr active_node = whole_graph_->AddNodeFront(op_desc); + GE_IF_BOOL_EXEC(active_node == nullptr, + GELOGE(FAILED, "Create StreamActive op: %s failed.", op_desc->GetName().c_str()); + return INTERNAL_ERROR); + GE_CHECK_NOTNULL(active_node->GetOpDesc()); + // Add one stream for ActiveEntryStream Task. + active_node->GetOpDesc()->SetStreamId(stream_id); + + GE_CHK_BOOL_EXEC(AttrUtils::SetBool(op_desc, "is_aicpu_stream", true), GELOGE(FAILED, "SetBool failed."); + return FAILED); + GE_CHK_BOOL_EXEC(AttrUtils::SetListInt(active_node->GetOpDesc(), ATTR_NAME_ACTIVE_STREAM_LIST, active_streams), + GELOGE(FAILED, "SetListInt failed."); + return FAILED); + + std::vector group_names; + GE_CHK_BOOL_EXEC(AttrUtils::SetListStr(active_node->GetOpDesc(), ATTR_NAME_SWITCH_BRANCH_NODE_LABEL, group_names), + GELOGE(FAILED, "SetLisStr failed."); + return FAILED); + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/build/stream_allocator.h b/src/ge/graph/build/stream_allocator.h new file mode 100644 index 00000000..6804e52c --- /dev/null +++ b/src/ge/graph/build/stream_allocator.h @@ -0,0 +1,95 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_STREAM_ALLOCATOR_H_ +#define GE_GRAPH_BUILD_STREAM_ALLOCATOR_H_ + +#include +#include +#include +#include +#include + +#include "engine_manager/dnnengine_manager.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/compute_graph.h" +#include "graph/manager/graph_manager_utils.h" + +namespace ge { +class StreamAllocator { + public: + StreamAllocator(ComputeGraphPtr whole_graph, const std::vector &subgraphs) + : whole_graph_(std::move(whole_graph)), subgraphs_(subgraphs) {} + StreamAllocator(const StreamAllocator &) = delete; + StreamAllocator &operator=(const StreamAllocator &) = delete; + ~StreamAllocator() = default; + + Status AssignLogicalStreams(const std::map &max_parallel_num, bool hcom_parallel); + Status RefreshRealStream(int64_t &stream_num, int64_t &event_num); + + private: + Status SplitStreams(); + Status ActiveStreamsBySpecificLabels(); + Status UpdateActiveStreams(std::vector> &splited_streams); + Status ActiveStreamsForLoop(); + Status CheckStreamActived() const; + + Status InsertSyncEvents(); + Status InsertOneEventInTwoNodes(const NodePtr &cur_node_ptr, const NodePtr &next_node_ptr); + + Status OptimizeSyncEvents(); + Status OptimizeBySendEvents(const std::map> &stream_nodes); + Status OptimizeByRecvEvents(const std::map> &stream_nodes); + Status OptimizeByStreamActivate(); + + Status RefreshContinuousEvents(); + Status InsertSyncEventNodes(); + + Status AddActiveEntryStream(); + Status CollectDeactiveStream(const OpDescPtr &op_desc, std::set &deactive_streams) const; + Status InsertActiveEntryStream(const std::vector &active_streams, int64_t stream_id); + + void AddSendEventId(const NodePtr &node, uint32_t event_id); + void AddRecvEventId(const NodePtr &node, uint32_t event_id); + void RmvSendEventId(const NodePtr &node, uint32_t event_id); + void RmvRecvEventId(const NodePtr &node, uint32_t event_id); + void GetSendEventIdList(const NodePtr &node, std::vector &send_list) const; + void GetRecvEventIdList(const NodePtr &node, std::vector &recv_list) const; + NodePtr GetNodeFromSendEventId(uint32_t send_event_id) const; + NodePtr GetNodeFromRecvEventId(uint32_t recv_event_id) const; + + void DumpEvents(); + // Determine if the successor node of RecvNode is directly or indirectly activated by the SendNode precursor node + bool IsRecvNodeActivatedBySendNode(const NodePtr &send_node_ptr, const NodePtr &recv_node_ptr) const; + + ComputeGraphPtr whole_graph_; + const std::vector &subgraphs_; + + int64_t stream_num_{0}; + uint32_t event_num_{0}; + + std::map> specific_activated_labels_; + std::set specific_activated_streams_; + std::map> specific_activated_streams_nodes_map_; + + // send events corresponding to the node + std::map> node_to_send_events_; + + // recv events corresponding to the node + std::map> node_to_recv_events_; +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_STREAM_ALLOCATOR_H_ diff --git a/src/ge/graph/build/task_generator.cc b/src/ge/graph/build/task_generator.cc new file mode 100644 index 00000000..0d81c548 --- /dev/null +++ b/src/ge/graph/build/task_generator.cc @@ -0,0 +1,530 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/build/task_generator.h" + +#include +#include + +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/ge_context.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/model_serialize.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" +#include "init/gelib.h" + +using std::string; +using std::vector; +using std::map; +using domi::TaskDef; +using domi::ModelTaskDef; +using domi::LogTimeStampDef; + +namespace { +const char *const kIsFirstNode = "is_first_node"; +const char *const kIsLastNode = "is_last_node"; +const char *const kIsInputVar = "INPUT_IS_VAR"; +const char *const kIsOutputVar = "OUTPUT_IS_VAR"; +const char *const kProfilingMode = "PROFILING_MODE"; +const char *const kProfilingFpPoint = "FP_POINT"; +const char *const kProfilingBpPoint = "BP_POINT"; +const uint32_t kProfilingArStep = 2; +const uint64_t kProfilingFpStartLogid = 1; +const uint64_t kProfilingBpEndLogid = 2; +const uint64_t kProfilingArStartLogid = 3; +const uint64_t kProfilingArEndLogid = 4; +const uint64_t kProfilingIterEndLogid = 255; +} // namespace +namespace ge { +TaskGenerator::TaskGenerator(uint8_t *var_mem_base, uint64_t var_mem_size) { + var_mem_base_ = var_mem_base; + var_mem_size_ = var_mem_size; +} +TaskGenerator::~TaskGenerator() {} + +Status TaskGenerator::GetTaskInfo(Model &model, ComputeGraphPtr &graph, uint64_t session_id, RunContext &run_context) { + GELOGI("Begin to Get TaskInfo. session_id=%lu", session_id); + // Check params + if (graph == nullptr) { + GELOGE(PARAM_INVALID, "GetTaskInfo param graph is null. session_id=%lu", session_id); + return PARAM_INVALID; + } + + std::vector task_def_list; + std::map op_name_map; + + GraphUtils::DumpGEGraph(graph, "GenerateTaskBefore"); + GraphUtils::DumpGEGraphToOnnx(*graph, "GenerateTaskBefore"); + Status ret = GenerateTask(run_context, graph, task_def_list, op_name_map); + GraphUtils::DumpGEGraph(graph, "GenerateTaskAfter"); + GraphUtils::DumpGEGraphToOnnx(*graph, "GenerateTaskAfter"); + if (ret != SUCCESS) { + GELOGE(ret, "GenerateTask failed. session_id=%lu", session_id); + return ret; + } + + // op_name_map used when graph load + graph->SetGraphOpName(op_name_map); + + // Set op_name for infer profiling + vector op_name; + for (auto &iter : op_name_map) { + op_name.push_back(iter.second); + } + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetListStr(model, ATTR_MODEL_TASK_INDEX_OP_NAME, op_name), + GELOGE(FAILED, "SetListStr failed."); + return FAILED); + + GELOGI("Call GenerateTask Success, task_def_list.size:%zu, op_name_map.size:%zu", task_def_list.size(), + op_name_map.size()); + + // Init and serialize model_task_def + ModelTaskDef model_task_def; + model_task_def.set_memory_size(run_context.dataMemSize); + model_task_def.set_weight_size(run_context.weightMemSize); + for (const TaskDef &task_def_temp : task_def_list) { + TaskDef *task_def = model_task_def.add_task(); + if (task_def == nullptr) { + GELOGE(FAILED, "task_def is nullptr."); + return FAILED; + } + *task_def = task_def_temp; + } + + ret = AddModelTaskToModel(model_task_def, session_id, model, run_context); + if (ret != SUCCESS) { + GELOGE(ret, "AddModelTaskToModel failed. session_id=%lu", session_id); + return ret; + } + + GELOGI("Get TaskInfo success. session_id=%lu", session_id); + return SUCCESS; +} + +Status TaskGenerator::AddModelTaskToModel(const ModelTaskDef &model_task_def, uint64_t session_id, ge::Model &model, + RunContext &run_context) { + GE_CHK_BOOL_EXEC( + AttrUtils::SetInt(model, MODEL_ATTR_TASK_GEN_BASE_ADDR, reinterpret_cast(run_context.dataMemBase)), + GELOGE(FAILED, "SetInt MODEL_ATTR_TASK_GEN_BASE_ADDR failed."); + return FAILED); + GE_CHK_BOOL_EXEC( + AttrUtils::SetInt(model, MODEL_ATTR_TASK_GEN_WEIGHT_ADDR, reinterpret_cast(run_context.weightMemBase)), + GELOGE(FAILED, "SetInt MODEL_ATTR_TASK_GEN_WEIGHT_ADDR failed."); + return FAILED); + GE_CHK_BOOL_EXEC(AttrUtils::SetInt(model, ATTR_MODEL_TASK_GEN_VAR_ADDR, reinterpret_cast(var_mem_base_)), + GELOGE(FAILED, "SetInt ATTR_MODEL_TASK_GEN_VAR_ADDR failed."); + return FAILED); + GE_CHK_BOOL_EXEC(AttrUtils::SetInt(model, ATTR_MODEL_VAR_SIZE, var_mem_size_), + GELOGE(FAILED, "SetInt ATTR_MODEL_VAR_SIZE failed."); + return FAILED); + GE_CHK_BOOL_EXEC(AttrUtils::SetInt(model, MODEL_ATTR_SESSION_ID, session_id), + GELOGE(FAILED, "SetInt MODEL_ATTR_SESSION_ID failed."); + return FAILED); + + size_t task_size = model_task_def.ByteSizeLong(); + ge::Buffer serial_buff(task_size); + if (!model_task_def.SerializePartialToArray(serial_buff.GetData(), static_cast(task_size))) { + GELOGE(FAILED, "model_task_def's serialize failed, model name = %s, task_size=%zu.", model.GetName().c_str(), + task_size); + return FAILED; + } + if (!AttrUtils::SetZeroCopyBytes(model, MODEL_ATTR_TASKS, std::move(serial_buff))) { + GELOGE(FAILED, "Set model task to model failed, model name = %s, task_size=%zu.", model.GetName().c_str(), + task_size); + return FAILED; + } + + return SUCCESS; +} + +Status TaskGenerator::UpdateOpIsVarAttr(const OpDescPtr &op_desc, uint64_t session_id) { + vector input_offsets = op_desc->GetInputOffset(); + GELOGD("Update is var attr, node[name:%s(%s), id:%ld, stream_id:%ld].", op_desc->GetName().c_str(), + op_desc->GetType().c_str(), op_desc->GetId(), op_desc->GetStreamId()); + if (!(input_offsets.empty())) { + vector input_var; + for (int64_t input : input_offsets) { + input_var.push_back(VarManager::Instance(session_id)->IsVarAddr(input)); + } + GE_CHK_BOOL_EXEC(AttrUtils::SetListBool(op_desc, kIsInputVar, input_var), GELOGE(FAILED, "SetListBool failed."); + return FAILED); + } + + vector output_offsets = op_desc->GetOutputOffset(); + if (!(output_offsets.empty())) { + vector output_var; + for (int64_t output : output_offsets) { + output_var.push_back(VarManager::Instance(session_id)->IsVarAddr(output)); + } + GE_CHK_BOOL_EXEC(AttrUtils::SetListBool(op_desc, kIsOutputVar, output_var), GELOGE(FAILED, "SetListBool failed."); + return FAILED); + } + return SUCCESS; +} + +Status TaskGenerator::GenerateTask(RunContext &run_context, ComputeGraphPtr &graph, + vector &task_def_list, map &op_name_map) { + std::shared_ptr ge_lib = GELib::GetInstance(); + if ((ge_lib == nullptr) || !ge_lib->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "GenerateTask failed."); + return GE_CLI_GE_NOT_INITIALIZED; + } + + auto ret = MarkFirstAndLastNode(graph); + if (ret != SUCCESS) { + GELOGE(ret, "MarkFirstAndLastNode failed."); + return ret; + } + + ProfilingPoint ppoint; + vector ar_ppoint; + GE_CHK_STATUS_RET(FindProfilingTaskIndex(graph, ppoint, ar_ppoint)); + + const OpsKernelManager &ops_kernel_manager = ge_lib->OpsKernelManagerObj(); + + uint32_t node_index = 0; + GE_TIMESTAMP_CALLNUM_START(GenerateTask); + for (auto &node : graph->GetAllNodes()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + if (node->GetOpDesc()->GetType() == CONCAT) { + int64_t is_node_virtual; + GE_IF_BOOL_EXEC(ge::AttrUtils::GetInt(node->GetOpDesc(), "fusion_virtual_op", is_node_virtual), continue); + } + node_index++; + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHK_STATUS_RET(UpdateOpIsVarAttr(op_desc, graph->GetSessionID())); + + string name = node->GetName(); + string type = node->GetType(); + string op_kernel_lib_name = op_desc->GetOpKernelLibName(); + if (op_kernel_lib_name.empty()) { + GELOGI("Node[name:%s(%s)] task no need to generate task.", name.c_str(), type.c_str()); + continue; + } + + OpsKernelInfoStorePtr kernel_info_store = ops_kernel_manager.GetOpsKernelInfoStore(op_kernel_lib_name); + if (kernel_info_store == nullptr) { + GELOGE(INTERNAL_ERROR, "No ops kernel store found. node:%s(%s), op_kernel_lib_name=%s.", name.c_str(), + type.c_str(), op_kernel_lib_name.c_str()); + return INTERNAL_ERROR; + } + + ret = UpdateAnchorStatus(node); + if (ret != SUCCESS) { + GELOGE(ret, "Call UpdateAnchorStatus node:%s(%s) failed", name.c_str(), type.c_str()); + return ret; + } + + int64_t op_id = op_desc->GetId(); + int64_t stream_id = op_desc->GetStreamId(); + if (stream_id < 0 || stream_id >= static_cast(run_context.graphStreamList.size())) { + GELOGE(INTERNAL_ERROR, "node[name:%s(%s), id:%ld] stream id is invalid, stream list size=%zu", name.c_str(), + type.c_str(), op_id, run_context.graphStreamList.size()); + return INTERNAL_ERROR; + } + + // Profiling task + size_t task_list_size_before = task_def_list.size(); + GE_CHK_STATUS_RET(InsertProfilingTaskBefore(op_desc, ppoint, ar_ppoint, node_index, task_def_list)); + run_context.stream = run_context.graphStreamList[stream_id]; + GELOGD("Call %s to generate node[name:%s(%s), id:%ld, stream_id:%ld] task.", op_kernel_lib_name.c_str(), + name.c_str(), type.c_str(), op_id, stream_id); + GE_TIMESTAMP_RESTART(GenerateTask); + ret = kernel_info_store->GenerateTask(*node, run_context, task_def_list); + GE_TIMESTAMP_ADD(GenerateTask); + if (ret != SUCCESS) { + GELOGE(ret, "Call %s to generate node[name:%s(%s), id:%ld, stream_id:%ld] task failed.", + op_kernel_lib_name.c_str(), name.c_str(), type.c_str(), op_id, stream_id); + return ret; + } + // Profiling task + GE_CHK_STATUS_RET(InsertProfilingTaskAfter(op_desc, ppoint, ar_ppoint, node_index, task_def_list)); + + size_t task_list_size_after = task_def_list.size(); + // If tasks is reduced + if (task_list_size_after < task_list_size_before) { + GELOGE(FAILED, "Call %s to generate node[name:%s(%s), id:%ld, stream_id:%ld] task. but task num from %zu to %zu.", + op_kernel_lib_name.c_str(), name.c_str(), type.c_str(), op_id, stream_id, task_list_size_before, + task_list_size_after); + return FAILED; + } + + // Reset stream id to ge stream id, as graph load must use ge stream to reassign stream + void *ops_kernel_info_store_ptr = kernel_info_store.get(); + for (size_t idx = task_list_size_before; idx < task_list_size_after; ++idx) { + task_def_list[idx].set_stream_id(static_cast(stream_id)); + op_name_map[idx] = name; + // Set opsKernelInfoStorePtr and op_index, the two fields be use in DistributeTask and InitTaskInfo + TaskDef *task_def_ptr = &task_def_list[idx]; + GE_CHECK_NOTNULL(task_def_ptr); + task_def_ptr->set_ops_kernel_store_ptr(reinterpret_cast(ops_kernel_info_store_ptr)); + } + + GELOGD("Call %s to generate node[name:%s(%s), id:%ld, stream_id:%ld] task finished, generate %lu task(s).", + op_kernel_lib_name.c_str(), name.c_str(), type.c_str(), op_id, stream_id, + task_list_size_after - task_list_size_before); + } + GE_TIMESTAMP_CALLNUM_END(GenerateTask, "GraphBuild::GenerateTask"); + return SUCCESS; +} + +Status TaskGenerator::UpdateAnchorStatus(const NodePtr &node) { + if (NodeUtils::SetAllAnchorStatus(node) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "NodeUtils::SetAllAnchorStatus failed."); + return INTERNAL_ERROR; + } + for (auto &anchor : node->GetAllInDataAnchors()) { + auto peer_anchor = anchor->GetPeerOutAnchor(); + if (peer_anchor == nullptr) { + if (AnchorUtils::SetStatus(anchor, ANCHOR_SUSPEND) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "AnchorUtils::SetStatus failed."); + return INTERNAL_ERROR; + } + } else if (peer_anchor->GetOwnerNode()->GetType() == CONSTANT) { + if (AnchorUtils::SetStatus(anchor, ANCHOR_CONST) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "AnchorUtils::SetStatus failed."); + return INTERNAL_ERROR; + } + } else { + if (AnchorUtils::SetStatus(anchor, ANCHOR_DATA) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "AnchorUtils::SetStatus failed."); + return INTERNAL_ERROR; + } + } + } + + return SUCCESS; +} + +Status TaskGenerator::MarkFirstAndLastNode(ComputeGraphPtr &graph) { + std::shared_ptr ge_lib = GELib::GetInstance(); + if ((ge_lib == nullptr) || !ge_lib->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "GE is not initialized or is finalized"); + return GE_CLI_GE_NOT_INITIALIZED; + } + + map>> engine_stream_stat; + for (auto &node : graph->GetAllNodes()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + string op_kernel_lib_name = node->GetOpDesc()->GetOpKernelLibName(); + int64_t stream_id = node->GetOpDesc()->GetStreamId(); + + if (op_kernel_lib_name.empty()) { + // Reset op kernel lib + (void)ge_lib->DNNEngineManagerObj().GetDNNEngineName(node->GetOpDesc()); + op_kernel_lib_name = node->GetOpDesc()->GetOpKernelLibName(); + if (op_kernel_lib_name.empty()) { + GELOGE(INTERNAL_ERROR, "node:%s(%s) get op kernel lib failed.", node->GetName().c_str(), + node->GetType().c_str()); + return INTERNAL_ERROR; + } + } + + auto it = engine_stream_stat.find(op_kernel_lib_name); + if (it == engine_stream_stat.end()) { + map> stream_map; + std::pair node_pair(node, node); + (void)stream_map.emplace(stream_id, node_pair); + (void)engine_stream_stat.emplace(op_kernel_lib_name, stream_map); + } else { + auto stream_it = it->second.find(stream_id); + if (stream_it == it->second.end()) { + std::pair node_pair(node, node); + (void)it->second.emplace(stream_id, node_pair); + } else { + stream_it->second.second = node; + } + } + } + + for (auto &it : engine_stream_stat) { + for (auto &stream_it : it.second) { + NodePtr &first_node = stream_it.second.first; + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetBool(first_node->GetOpDesc(), kIsFirstNode, true), + GELOGE(FAILED, "SetBool failed."); + return FAILED); + NodePtr &last_node = stream_it.second.second; + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetBool(last_node->GetOpDesc(), kIsLastNode, true), + GELOGE(FAILED, "SetBool failed."); + return FAILED); + } + } + return SUCCESS; +} + +Status TaskGenerator::FindProfilingTaskIndex(const ComputeGraphPtr &graph, ProfilingPoint &ppoint, + vector &ar_ppoint) const { + GE_CHECK_NOTNULL(graph); + const char *is_profiling = std::getenv(kProfilingMode); + if (is_profiling == nullptr) { + return SUCCESS; + } + const char *fp_point = std::getenv(kProfilingFpPoint); + if (fp_point == nullptr) { + return SUCCESS; + } + string fp_point_str = string(fp_point); + const char *bp_point = std::getenv(kProfilingBpPoint); + if (bp_point == nullptr) { + return SUCCESS; + } + string bp_point_str = string(bp_point); + uint32_t current_idx = 0; + uint32_t iter_end = 0; + uint32_t last_bp = 0; + uint32_t first_fp = 0; + for (auto &node : graph->GetAllNodes()) { + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(node->GetOpDesc()); + current_idx++; + string op_kernel_lib_name = op_desc->GetOpKernelLibName(); + if (op_kernel_lib_name.empty()) { + continue; + } + if (op_desc->GetName() == bp_point_str) { + last_bp = current_idx; + GELOGI("Last bp name %s, idx %u", op_desc->GetName().c_str(), last_bp); + } + if (op_desc->GetType() == NETOUTPUT) { + iter_end = current_idx; + GELOGI("Iter end name %s, idx %u", op_desc->GetName().c_str(), iter_end); + } + if (op_desc->GetName() == fp_point_str) { + first_fp = current_idx; + GELOGI("First fp name %s, idx %u", op_desc->GetName().c_str(), first_fp); + } + + if (op_desc->GetType() == HCOMALLREDUCE) { + ar_ppoint.emplace_back(current_idx); + GELOGI("Allreduce name %s, idx %u", op_desc->GetName().c_str(), current_idx); + } + } + ppoint.fp_index = first_fp; + ppoint.bp_index = last_bp; + ppoint.end_index = iter_end; + return SUCCESS; +} + +Status TaskGenerator::InsertProfilingTaskBefore(const OpDescPtr &op_desc, const ProfilingPoint &ppoint, + vector &ar_ppoint, uint32_t node_index, + vector &task_def_list) { + const char *is_profiling = std::getenv(kProfilingMode); + if ((is_profiling == nullptr) || (ppoint.fp_index == 0) || (ppoint.bp_index == 0) || (ppoint.end_index == 0)) { + return SUCCESS; + } + if (ppoint.fp_index == node_index) { + uint64_t jobid_log_id = ge::GetContext().JobId(); + GELOGI("The first FP operator is %s, idx %u, job_id %lu", op_desc->GetName().c_str(), node_index, jobid_log_id); + + TaskDef job_task_def; + job_task_def.set_type(RT_MODEL_TASK_PROFILER_TRACE); + job_task_def.set_stream_id(op_desc->GetStreamId()); + LogTimeStampDef *job_log_def = job_task_def.mutable_log_timestamp(); + if (job_log_def != nullptr) { + job_log_def->set_logid(jobid_log_id); + job_log_def->set_notify(false); + } + task_def_list.emplace_back(job_task_def); + TaskDef fp_task_def; + fp_task_def.set_type(RT_MODEL_TASK_PROFILER_TRACE); + fp_task_def.set_stream_id(op_desc->GetStreamId()); + LogTimeStampDef *fp_log_def = fp_task_def.mutable_log_timestamp(); + if (fp_log_def != nullptr) { + fp_log_def->set_logid(kProfilingFpStartLogid); + fp_log_def->set_notify(false); + } + task_def_list.emplace_back(fp_task_def); + } + + for (size_t i = 0; i < ar_ppoint.size(); i++) { + if (ar_ppoint[i] != node_index) { + continue; + } + GELOGI("The start allreduce operator is %s, idx %u", op_desc->GetName().c_str(), node_index); + TaskDef ar_task_def; + ar_task_def.set_type(RT_MODEL_TASK_PROFILER_TRACE); + ar_task_def.set_stream_id(op_desc->GetStreamId()); + LogTimeStampDef *ar_log_def = ar_task_def.mutable_log_timestamp(); + if (ar_log_def != nullptr) { + GE_IF_BOOL_EXEC(TypeUtils::CheckUint64MulOverflow(i, kProfilingArStep), + GELOGE(FAILED, "Multiply result is out of range."); + return FAILED); + auto log_id = i * kProfilingArStep + kProfilingArStartLogid; + ar_log_def->set_logid(log_id); + ar_log_def->set_notify(false); + } + task_def_list.push_back(ar_task_def); + } + return SUCCESS; +} + +Status TaskGenerator::InsertProfilingTaskAfter(const OpDescPtr &op_desc, const ProfilingPoint &ppoint, + vector &ar_ppoint, uint32_t node_index, + vector &task_def_list) { + GE_CHECK_NOTNULL(op_desc); + const char *is_profiling = std::getenv(kProfilingMode); + if ((is_profiling == nullptr) || (ppoint.fp_index == 0) || (ppoint.bp_index == 0) || (ppoint.end_index == 0)) { + return SUCCESS; + } + if (ppoint.bp_index == node_index) { + GELOGI("The last BP operator is %s, idx %u", op_desc->GetName().c_str(), node_index); + TaskDef bp_task_def; + bp_task_def.set_type(RT_MODEL_TASK_PROFILER_TRACE); + bp_task_def.set_stream_id(op_desc->GetStreamId()); + LogTimeStampDef *bp_log_def = bp_task_def.mutable_log_timestamp(); + GE_CHECK_NOTNULL(bp_log_def); + bp_log_def->set_logid(kProfilingBpEndLogid); + bp_log_def->set_notify(false); + task_def_list.emplace_back(bp_task_def); + } + if (ppoint.end_index == node_index) { + GELOGI("The iteration end operator is %s, idx %u", op_desc->GetName().c_str(), node_index); + TaskDef end_task_def; + end_task_def.set_type(RT_MODEL_TASK_PROFILER_TRACE); + end_task_def.set_stream_id(op_desc->GetStreamId()); + LogTimeStampDef *end_log_def = end_task_def.mutable_log_timestamp(); + GE_CHECK_NOTNULL(end_log_def); + end_log_def->set_logid(kProfilingIterEndLogid); + end_log_def->set_notify(true); + task_def_list.emplace_back(end_task_def); + } + + for (size_t i = 0; i < ar_ppoint.size(); i++) { + if (ar_ppoint[i] != node_index) { + continue; + } + GELOGI("The end allreduce operator is %s, idx %u", op_desc->GetName().c_str(), node_index); + TaskDef ar_task_def; + ar_task_def.set_type(RT_MODEL_TASK_PROFILER_TRACE); + ar_task_def.set_stream_id(op_desc->GetStreamId()); + LogTimeStampDef *ar_log_def = ar_task_def.mutable_log_timestamp(); + GE_CHECK_NOTNULL(ar_log_def); + GE_IF_BOOL_EXEC(TypeUtils::CheckUint64MulOverflow(i, kProfilingArStep), + GELOGE(FAILED, "Multiply result is out of range."); + return FAILED); + auto log_id = i * kProfilingArStep + kProfilingArEndLogid; + ar_log_def->set_logid(log_id); + ar_log_def->set_notify(false); + task_def_list.emplace_back(ar_task_def); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/build/task_generator.h b/src/ge/graph/build/task_generator.h new file mode 100644 index 00000000..ad9c1388 --- /dev/null +++ b/src/ge/graph/build/task_generator.h @@ -0,0 +1,107 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_BUILD_TASK_GENERATOR_H_ +#define GE_GRAPH_BUILD_TASK_GENERATOR_H_ + +#include +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/opskernel/ops_kernel_info_types.h" +#include "framework/common/types.h" +#include "graph/compute_graph.h" +#include "graph/model.h" +#include "proto/task.pb.h" +#include "runtime/rt.h" + +namespace ge { +struct ProfilingPoint { + uint32_t fp_index = 0; + uint32_t bp_index = 0; + uint32_t end_index = 0; +}; + +class TaskGenerator { + public: + TaskGenerator() = default; + + TaskGenerator(const TaskGenerator &) = delete; + + TaskGenerator &operator=(const TaskGenerator &) = delete; + + virtual ~TaskGenerator(); + + TaskGenerator(uint8_t *var_mem_base, uint64_t var_mem_size); + + /// + /// get task info. + /// @param model model + /// @param graph compute graph + /// @param buffer weights buffer + /// @param session_id session id + /// @return SUCCESS: success + /// other:failed + /// + Status GetTaskInfo(Model &model, ComputeGraphPtr &graph, uint64_t session_id, RunContext &run_context); + + private: + Status UpdateAnchorStatus(const NodePtr &node); + + Status UpdateOpIsVarAttr(const OpDescPtr &op_desc, uint64_t session_id); + + /// + /// call engine to generate task. + /// @param run_context run context + /// @param graph compute graph + /// @param task_def_list task def list generate by engine + /// @param op_name_map relation of task index and op + /// @return SUCCESS:seccess + /// Other: failed + /// + Status GenerateTask(RunContext &run_context, ComputeGraphPtr &graph, std::vector &task_def_list, + std::map &op_name_map); + + /// + /// AddModelTaskToModel + /// @param model_task_def model task + /// @param model_def model + /// @return SUCCESS:seccess + /// Other: failed + /// + Status AddModelTaskToModel(const domi::ModelTaskDef &model_task_def, uint64_t session_id, Model &model_def, + RunContext &run_context); + + // Mark first and last node according to the same stream and engine + Status MarkFirstAndLastNode(ComputeGraphPtr &graph); + + // profiling interface + Status FindProfilingTaskIndex(const ComputeGraphPtr &graph, ProfilingPoint &ppoint, + std::vector &ar_ppoint) const; + Status InsertProfilingTaskBefore(const OpDescPtr &op_desc, const ProfilingPoint &ppoint, + std::vector &ar_ppoint, uint32_t node_index, + std::vector &task_def_list); + Status InsertProfilingTaskAfter(const OpDescPtr &op_desc, const ProfilingPoint &ppoint, + std::vector &ar_ppoint, uint32_t node_index, + std::vector &task_def_list); + + uint8_t *var_mem_base_ = nullptr; + uint64_t var_mem_size_ = 0; +}; +} // namespace ge +#endif // GE_GRAPH_BUILD_TASK_GENERATOR_H_ diff --git a/src/ge/graph/common/bcast.cc b/src/ge/graph/common/bcast.cc new file mode 100644 index 00000000..7948ff14 --- /dev/null +++ b/src/ge/graph/common/bcast.cc @@ -0,0 +1,168 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/common/bcast.h" + +#include + +#include "common/math_util.h" +#include "common/util.h" + +using domi::Status; + +namespace ge { +Status BCast::GenerateBcastInfo(const kVecInt &sx, const kVecInt &sy) { + if (sx.size() == 0 && sy.size() == 0) { + result_.push_back(1); + x_reshape_.push_back(1); + x_bcast_.push_back(1); + y_reshape_.push_back(1); + y_bcast_.push_back(1); + } else { + kVecInt x = sx; + kVecInt y = sy; + Reverse(x); + Reverse(y); + ExtendTensorDim(x, y); + GE_RETURN_WITH_LOG_IF_ERROR(SetShapeDifferentInfo(x, y), "GenerateBcastInfo failed."); + } + ReverseAllIntermediateShapes(); + return domi::SUCCESS; +} + +Status BCast::SetShapeDifferentInfo(const kVecInt &x, const kVecInt &y) { + const int64_t n = x.size(); + for (int64_t i = 0; i < n; ++i) { + const int64_t x_i = x[i]; + GE_CHECK_GE(x_i, 0); + const int64_t y_i = y[i]; + GE_CHECK_GE(y_i, 0); + int64_t output_i = 0; + int64_t x_bcast_i = 0; + int64_t y_bcast_i = 0; + + if (x_i == y_i) { + output_i = x_i; + x_bcast_i = 1; + y_bcast_i = 1; + if (x_i == 1) { + grad_x_reduce_idx_.push_back(n - 1 - i); + grad_y_reduce_idx_.push_back(n - 1 - i); + } + } else if (x_i == 1) { + output_i = y_i; + x_bcast_i = y_i; + y_bcast_i = 1; + grad_x_reduce_idx_.push_back(n - 1 - i); + } else if (y_i == 1) { + output_i = x_i; + x_bcast_i = 1; + y_bcast_i = x_i; + grad_y_reduce_idx_.push_back(n - 1 - i); + } else { + GELOGE(domi::PARAM_INVALID, + "SetShapeDifferentInfo failed. Two tensor shapes are not compatible " + "according to the broadcasting rule."); + return domi::PARAM_INVALID; + } + output_.push_back(output_i); + result_.push_back(output_i); + x_reshape_.push_back(x_i); + x_bcast_.push_back(x_bcast_i); + y_reshape_.push_back(y_i); + y_bcast_.push_back(y_bcast_i); + } + return domi::SUCCESS; +} + +void BCast::ExtendTensorDim(kVecInt &v_x, kVecInt &v_y) { + if (v_x.size() > v_y.size()) { + v_y.resize(v_x.size(), 1); + } else { + v_x.resize(v_y.size(), 1); + } +} + +BCast::kVecInt BCast::TransShapeToDimVec(const GeTensorDesc &shape) { + const size_t dim_num = shape.GetShape().GetDimNum(); + BCast::kVecInt ret(dim_num); + for (size_t i = 0; i < dim_num; ++i) { + ret[i] = shape.GetShape().GetDim(i); + } + return ret; +} + +void BCast::Reverse(kVecInt &shape) { std::reverse(shape.begin(), shape.end()); } + +void BCast::ReverseAllIntermediateShapes() { + // Reverse all intermediate shape params + Reverse(x_reshape_); + Reverse(x_bcast_); + Reverse(y_reshape_); + Reverse(y_bcast_); + Reverse(result_); + Reverse(output_); + Reverse(grad_x_reduce_idx_); + Reverse(grad_y_reduce_idx_); +} + +void BCast::BCastIndexes(kVecInt &x_indexes, kVecInt &y_indexes) { + Reverse(x_reshape_); + Reverse(y_reshape_); + Reverse(output_); + + // Process 0-th dimension + int64_t x_dim = 1; + int64_t y_dim = 1; + int64_t out_dim = 1; + + // If x and y are both scalar, then output_ is empty + if (!output_.empty()) { + x_dim = x_reshape_.at(0); + y_dim = y_reshape_.at(0); + out_dim = output_.at(0); + } + + int64_t x_bias = x_dim; + int64_t y_bias = y_dim; + + for (int64_t i = 0; i < out_dim; i++) { + x_indexes.push_back(x_dim == 1 ? 0 : i); + y_indexes.push_back(y_dim == 1 ? 0 : i); + } + + // Process the remaining dimensions + for (size_t i = 1; i < output_.size(); i++) { + x_dim = x_reshape_.at(i); // i-th dimension of x. + y_dim = y_reshape_.at(i); // i-th dimension of y. + out_dim = output_.at(i); // i-th dimension of output_. + + int64_t stride = x_indexes.size(); + for (int64_t j = 1; j < out_dim; j++) { + for (int64_t k = 0; k < stride; k++) { + x_indexes.push_back(x_indexes.at(k) + (x_dim == 1 ? 0 : (j * x_bias))); + y_indexes.push_back(y_indexes.at(k) + (y_dim == 1 ? 0 : (j * y_bias))); + } + } + x_bias *= x_dim; + y_bias *= y_dim; + } + + Reverse(x_reshape_); + Reverse(y_reshape_); + Reverse(output_); +} +} // namespace ge diff --git a/src/ge/graph/common/bcast.h b/src/ge/graph/common/bcast.h new file mode 100644 index 00000000..429f153f --- /dev/null +++ b/src/ge/graph/common/bcast.h @@ -0,0 +1,254 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_COMMON_BCAST_H_ +#define GE_GRAPH_COMMON_BCAST_H_ + +#include +#include +#include + +#include "common/debug/log.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/attr_value.h" +#include "graph/ge_tensor.h" +#include "graph/utils/tensor_adapter.h" +#include "unsupported/Eigen/CXX11/Tensor" + +namespace ge { +static const size_t kMinDimNum = 2; +class BCast { + public: + /// + /// @ingroup domi_calibration + /// @brief define kVecInt + /// + typedef std::vector kVecInt; + + /// + /// @ingroup domi_calibration + /// @brief constructor + /// + BCast() {} + /// + /// @ingroup domi_calibration + /// @brief destructor + /// + ~BCast() {} + + /// + /// @ingroup domi_calibration + /// @brief Not optimize intermediate shapes + /// @decrease dims, more efficient, set by user + /// @param [in] x first Tensor dim + /// @param [in] y second Tensor dim + /// @return SUCCESS broadcast message successfully generated + /// @return other broadcast message failed to generate + /// + ge::Status GenerateBcastInfo(const kVecInt &x, const kVecInt &y); + + /// + /// @ingroup domi_calibration + /// @brief get x_reshape + /// + const kVecInt &GetXReshape() const { return x_reshape_; } + + /// + /// @ingroup domi_calibration + /// @brief get x_bcast + /// + const kVecInt &GetXBcast() const { return x_bcast_; } + + /// + /// @ingroup domi_calibration + /// @brief get y_reshape + /// + const kVecInt &GetYReshape() const { return y_reshape_; } + /// + /// @ingroup domi_calibration + /// @brief get y_bcast + /// + const kVecInt &GetYBcast() const { return y_bcast_; } + /// + /// @ingroup domi_calibration + /// @brief get result_shape + /// + const kVecInt &GetResultShape() const { return result_; } + + /// + /// @ingroup domi_calibration + /// @brief get result_shape + /// + const kVecInt &GetOutputShape() const { return output_; } + const kVecInt &GetGradXReduceIdx() const { return grad_x_reduce_idx_; } + const kVecInt &GetGradYReduceIdx() const { return grad_y_reduce_idx_; } + + /// + /// @ingroup domi_calibration + /// @brief convert TensorDescriptor to kVecInt + /// @param [in] shape Tensor descriptor + /// @return kVecInt dim info + /// + static kVecInt TransShapeToDimVec(const GeTensorDesc &shape); + + /// + /// @ingroup domi_calibration + /// from Bcast::kVecInt to Eigen::array + /// @param [in] vec dim info + /// @return Eigen::array + /// + template + static Status ToIndexArray(const BCast::kVecInt &vec, Eigen::array &ret) { + if (vec.size() != NDIMS) { + GELOGE(domi::PARAM_INVALID, "ToIndexArray failed. size of vector = %zu is not equal to NDIMS = %d.", vec.size(), + NDIMS); + return domi::PARAM_INVALID; + } + for (int i = 0; i < NDIMS; ++i) { + ret[i] = vec[i]; + } + return domi::SUCCESS; + } + void BCastIndexes(kVecInt &x_indexes, kVecInt &y_indexes); + template + Status BCastCompute(const std::vector &input, std::vector &v_output, + const std::function &func) { + Status ret; + if (func == nullptr) { + GELOGE(domi::PARAM_INVALID, "Param func is null"); + return domi::PARAM_INVALID; + } + // Min input num is 2 + if (input.size() < kMinDimNum) { + GELOGE(domi::PARAM_INVALID, "Input size is smaller than two."); + return domi::PARAM_INVALID; + } + // Only broadcast shape + ret = + GenerateBcastInfo(TransShapeToDimVec(input[0]->GetTensorDesc()), TransShapeToDimVec(input[1]->GetTensorDesc())); + if (ret != domi::SUCCESS) { + GELOGE(ret, "Greater broadcasting failed."); + return ret; + } + + kVecInt x_indexes; + kVecInt y_indexes; + BCastIndexes(x_indexes, y_indexes); + + const void *x1_data = input[0]->GetData().data(); + const void *x2_data = input[1]->GetData().data(); + + for (size_t i = 0; i < x_indexes.size(); i++) { + int64_t x_index = x_indexes[i]; + int64_t y_index = y_indexes[i]; + auto value = func((*(reinterpret_cast(x1_data) + x_index)), + (*(reinterpret_cast(x2_data) + y_index))); + v_output.push_back(value); + } + + return domi::SUCCESS; + } + + template + Status BCastComputeCheck(const std::vector &input, std::vector &v_output, + const std::function &func) { + if (func == nullptr) { + GELOGE(PARAM_INVALID, "Param func is null"); + return PARAM_INVALID; + } + // Min input num is 2 + if (input.size() < kMinDimNum) { + GELOGE(PARAM_INVALID, "Input size is smaller than two."); + return PARAM_INVALID; + } + // Only broadcast shape + Status ret = + GenerateBcastInfo(TransShapeToDimVec(input[0]->GetTensorDesc()), TransShapeToDimVec(input[1]->GetTensorDesc())); + if (ret != SUCCESS) { + GELOGE(ret, "Greater broadcasting failed."); + return ret; + } + + DataType data_type = input[0]->GetTensorDesc().GetDataType(); + kVecInt x_indexes; + kVecInt y_indexes; + BCastIndexes(x_indexes, y_indexes); + + const void *x1_data = input[0]->GetData().data(); + const void *x2_data = input[1]->GetData().data(); + + for (size_t i = 0; i < x_indexes.size(); i++) { + int64_t x_index = x_indexes[i]; + int64_t y_index = y_indexes[i]; + auto value = func((*(reinterpret_cast(x1_data) + x_index)), + (*(reinterpret_cast(x2_data) + y_index)), data_type, ret); + if (ret != SUCCESS) { + GELOGE(ret, "BCastComputeCheck func execute failed, datatype is %d.", data_type); + return ret; + } + v_output.push_back(value); + } + + return SUCCESS; + } + + private: + /// + /// @ingroup domi_calibration + /// @brief reverse elements in kVecInt + /// @param [in] shape dim info + /// @return null + /// + static void Reverse(kVecInt &shape); + + /// + /// @ingroup domi_calibration + /// @brief two Tensor with different shape, set broadcast info + /// @param [in] x first input Tensor dim info + /// @param [in] y second input Tensor dim info + /// @return null + /// + ge::Status SetShapeDifferentInfo(const kVecInt &x, const kVecInt &y); + /// + /// @ingroup domi_calibration + /// @brief extend Tensor dim + /// @param [in] x first input Tensor dim info + /// @param [in] y second input Tensor dim info + /// @return null + /// + void ExtendTensorDim(kVecInt &x, kVecInt &y); + /// + /// @ingroup domi_calibration + /// @brief reverse all intermediate shape params + /// @param [in] void + /// @return null + /// + void ReverseAllIntermediateShapes(); + + kVecInt x_reshape_; + kVecInt x_bcast_; + kVecInt y_reshape_; + kVecInt y_bcast_; + kVecInt result_; + kVecInt output_; + kVecInt grad_x_reduce_idx_; + kVecInt grad_y_reduce_idx_; +}; +} // namespace ge + +#endif // GE_GRAPH_COMMON_BCAST_H_ diff --git a/src/ge/graph/common/omg_util.cc b/src/ge/graph/common/omg_util.cc new file mode 100644 index 00000000..334bcdc4 --- /dev/null +++ b/src/ge/graph/common/omg_util.cc @@ -0,0 +1,192 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/common/omg_util.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/graph_utils.h" + +using ge::AttrUtils; +using ge::OpDescPtr; + +namespace ge { +/// +/// @brief get the Original Type of FrameworkOp +/// @param [in] node +/// @param [out] type +/// @return Status +/// +Status GetOriginalType(const ge::NodePtr &node, string &type) { + GE_CHECK_NOTNULL(node); + type = node->GetType(); + GE_IF_BOOL_EXEC(type != FRAMEWORKOP, return SUCCESS); + GE_CHECK_NOTNULL(node->GetOpDesc()); + bool ret = ge::AttrUtils::GetStr(node->GetOpDesc(), ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, type); + if (!ret) { + GELOGE(INTERNAL_ERROR, "Get FrameWorkOp original type [%s]", type.c_str()); + return INTERNAL_ERROR; + } + GELOGD("Get FrameWorkOp original type [%s]", type.c_str()); + return SUCCESS; +} + +/// +/// @brief set op stream_label +/// @param [in] node +/// @param [in] label +/// @return Status +/// +Status SetStreamLabel(const ge::NodePtr &node, const std::string &label) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + + if (!AttrUtils::SetStr(tmp_desc, ATTR_NAME_STREAM_LABEL, label)) { + GELOGE(FAILED, "Op: %s set ATTR_NAME_STREAM_LABEL failed", node->GetName().c_str()); + return FAILED; + } + + return SUCCESS; +} + +/// +/// @brief set op cycle_event flag +/// @param [in] node +/// @return Status +/// +Status SetCycleEvent(const ge::NodePtr &node) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + if (!AttrUtils::SetBool(tmp_desc, ATTR_NAME_STREAM_CYCLE_EVENT_FLAG, true)) { + GELOGE(FAILED, "Op: %s set ATTR_NAME_STREAM_CYCLE_EVENT_FLAG failed", node->GetName().c_str()); + return FAILED; + } + + return SUCCESS; +} + +/// +/// @brief set op active_label_list +/// @param [in] node +/// @param [in] active_label_list +/// @return Status +/// +Status SetActiveLabelList(const ge::NodePtr &node, const std::vector &active_label_list) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + if (!AttrUtils::SetListStr(tmp_desc, ge::ATTR_NAME_ACTIVE_LABEL_LIST, active_label_list)) { + GELOGE(FAILED, "Op: %s set ATTR_NAME_ACTIVE_LABEL_LIST failed", node->GetName().c_str()); + return FAILED; + } + + return SUCCESS; +} + +/// +/// @brief set op branch_label +/// @param [in] node +/// @param [in] branch_label +/// @return Status +/// +Status SetSwitchBranchNodeLabel(const ge::NodePtr &node, const std::string &branch_label) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + if (!AttrUtils::SetStr(tmp_desc, ge::ATTR_NAME_SWITCH_BRANCH_NODE_LABEL, branch_label)) { + GELOGE(FAILED, "Op: %s set ATTR_NAME_SWITCH_BRANCH_NODE_LABEL failed", node->GetName().c_str()); + return FAILED; + } + + return SUCCESS; +} + +/// +/// @brief set op true_branch flag +/// @param [in] node +/// @param [in] value +/// @return Status +/// +Status SetSwitchTrueBranchFlag(const ge::NodePtr &node, bool value) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + if (!AttrUtils::SetBool(tmp_desc, ge::ATTR_NAME_SWITCH_TRUE_BRANCH_FLAG, value)) { + GELOGE(FAILED, "Op: %s set ATTR_NAME_SWITCH_TRUE_BRANCH_FLAG failed", node->GetName().c_str()); + return FAILED; + } + + return SUCCESS; +} + +/// +/// @brief set op original name +/// @param [in] node +/// @param [in] orig_name +/// @return Status +/// +Status SetOriginalNodeName(const ge::NodePtr &node, const std::string &orig_name) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + if (!AttrUtils::SetStr(tmp_desc, ge::ATTR_NAME_ORIG_NODE_NAME, orig_name)) { + GELOGE(FAILED, "Op: %s set ATTR_NAME_ORIG_NODE_NAME failed", node->GetName().c_str()); + return FAILED; + } + + return SUCCESS; +} + +/// +/// @brief set op cyclic_dependence flag +/// @param [in] node +/// @return Status +/// +Status SetCyclicDependenceFlag(const ge::NodePtr &node) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + if (!AttrUtils::SetBool(tmp_desc, ge::ATTR_NAME_CYCLIC_DEPENDENCE_FLAG, true)) { + GELOGE(FAILED, "Op: %s set ATTR_NAME_CYCLIC_DEPENDENCE_FLAG failed", node->GetName().c_str()); + return FAILED; + } + + return SUCCESS; +} + +/// +/// @brief set op next_iteration name +/// @param [in] node +/// @param [in] next +/// @return Status +/// +Status SetNextIteration(const ge::NodePtr &node, const std::string &next) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + + if (!AttrUtils::SetStr(tmp_desc, ge::ATTR_NAME_NEXT_ITERATION, next)) { + GELOGE(FAILED, "Op: %s set ATTR_NAME_NEXT_ITERATION failed", node->GetName().c_str()); + return FAILED; + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/common/omg_util.h b/src/ge/graph/common/omg_util.h new file mode 100644 index 00000000..1f93c92b --- /dev/null +++ b/src/ge/graph/common/omg_util.h @@ -0,0 +1,101 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_COMMON_OMG_UTIL_H_ +#define GE_GRAPH_COMMON_OMG_UTIL_H_ + +#include +#include +#include +#include + +#include "common/types.h" +#include "common/util.h" +#include "graph/node.h" + +namespace ge { +/// +/// @brief get the Original Type of FrameworkOp +/// @param [in] node +/// @param [out] type +/// @return Status +/// +Status GetOriginalType(const ge::NodePtr &node, string &type); + +/// +/// @brief set op stream_label +/// @param [in] node +/// @param [in] label +/// @return Status +/// +Status SetStreamLabel(const ge::NodePtr &node, const std::string &label); + +/// +/// @brief set op cycle_event flag +/// @param [in] node +/// @return Status +/// +Status SetCycleEvent(const ge::NodePtr &node); + +/// +/// @brief set op active_label_list +/// @param [in] node +/// @param [in] label +/// @return Status +/// +Status SetActiveLabelList(const ge::NodePtr &node, const std::vector &active_label_list); + +/// +/// @brief set op branch_label +/// @param [in] node +/// @param [in] branch_label +/// @return Status +/// +Status SetSwitchBranchNodeLabel(const ge::NodePtr &node, const std::string &branch_label); + +/// +/// @brief set op true_branch flag +/// @param [in] node +/// @param [in] value +/// @return Status +/// +Status SetSwitchTrueBranchFlag(const ge::NodePtr &node, bool value); + +/// +/// @brief set op original name +/// @param [in] node +/// @param [in] orig_name +/// @return Status +/// +Status SetOriginalNodeName(const ge::NodePtr &node, const std::string &orig_name); + +/// +/// @brief set op cyclic_dependence flag +/// @param [in] node +/// @return Status +/// +Status SetCyclicDependenceFlag(const ge::NodePtr &node); + +/// +/// @brief set op next_iteration name +/// @param [in] node +/// @param [in] next +/// @return Status +/// +Status SetNextIteration(const ge::NodePtr &node, const std::string &next); +} // namespace ge + +#endif // GE_GRAPH_COMMON_OMG_UTIL_H_ diff --git a/src/ge/graph/common/transop_util.cc b/src/ge/graph/common/transop_util.cc new file mode 100644 index 00000000..b9754bed --- /dev/null +++ b/src/ge/graph/common/transop_util.cc @@ -0,0 +1,63 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/common/transop_util.h" + +#include "common/types.h" + +namespace { +const int kInvalidTransopDataIndex = -1; +} // namespace + +namespace ge { +TransOpUtil::TransOpUtil() { + transop_index_map_ = {{TRANSDATA, 0}, {TRANSPOSE, 0}, {TRANSPOSED, 0}, + {RESHAPE, 0}, {REFORMAT, 0}, {CAST, 0}}; +} + +TransOpUtil::~TransOpUtil() {} + +TransOpUtil &TransOpUtil::Instance() { + static TransOpUtil inst; + return inst; +} + +bool TransOpUtil::IsTransOp(const NodePtr &node) { + if (node == nullptr) { + return false; + } + return IsTransOp(node->GetType()); +} + +bool TransOpUtil::IsTransOp(const std::string &type) { + return Instance().transop_index_map_.find(type) != Instance().transop_index_map_.end(); +} + +int TransOpUtil::GetTransOpDataIndex(const NodePtr &node) { + if (node == nullptr) { + return kInvalidTransopDataIndex; + } + return GetTransOpDataIndex(node->GetType()); +} + +int TransOpUtil::GetTransOpDataIndex(const std::string &type) { + auto it = Instance().transop_index_map_.find(type); + if (it != Instance().transop_index_map_.end()) { + return it->second; + } + return kInvalidTransopDataIndex; +} +} // namespace ge diff --git a/src/ge/graph/common/transop_util.h b/src/ge/graph/common/transop_util.h new file mode 100644 index 00000000..041a7637 --- /dev/null +++ b/src/ge/graph/common/transop_util.h @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_COMMON_TRANSOP_UTIL_H_ +#define GE_GRAPH_COMMON_TRANSOP_UTIL_H_ + +#include +#include + +#include "graph/node.h" + +namespace ge { +class GE_FUNC_HOST_VISIBILITY GE_FUNC_DEV_VISIBILITY TransOpUtil { + public: + static bool IsTransOp(const NodePtr &node); + + static bool IsTransOp(const std::string &type); + + static int GetTransOpDataIndex(const NodePtr &node); + + static int GetTransOpDataIndex(const std::string &type); + + private: + TransOpUtil(); + + ~TransOpUtil(); + + static TransOpUtil &Instance(); + + typedef std::unordered_map transop_index_op; + transop_index_op transop_index_map_; +}; +} // namespace ge + +#endif // GE_GRAPH_COMMON_TRANSOP_UTIL_H_ diff --git a/src/ge/graph/execute/graph_execute.cc b/src/ge/graph/execute/graph_execute.cc new file mode 100644 index 00000000..d686791e --- /dev/null +++ b/src/ge/graph/execute/graph_execute.cc @@ -0,0 +1,490 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/execute/graph_execute.h" + +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/model_parser/base.h" +#include "graph/load/new_model_manager/model_manager.h" +#include "omm/csa_interact.h" +#include "runtime/dev.h" +#include "runtime/mem.h" + +namespace ge { +GraphExecutor::GraphExecutor() + : init_flag_(false), + train_graph_flag_(false), + sync_run_mutex_(nullptr), + condition_(nullptr), + graph_run_listener_(nullptr), + graph_context_(nullptr), + last_graph_id_(UINT32_MAX), + malloc_flag_(false) {} + +GraphExecutor::~GraphExecutor() { + outputs_desc_.clear(); + if (malloc_flag_) { + for (auto &buffer_addr : buffer_addr_) { + rtError_t rt_ret; + rt_ret = rtFreeHost(buffer_addr); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "[GraphManager] subgraph free buffer failed, ret: 0x%X", rt_ret); + } + } + } + malloc_flag_ = false; + buffer_addr_.clear(); +} + +Status GraphExecutor::SetCondition(std::mutex *mutex, std::condition_variable *cond, + std::shared_ptr listener) { + if (mutex == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[SetCondition] input param mutex is nullptr."); + return GE_GRAPH_PARAM_NULLPTR; + } + if (cond == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[SetCondition] input param cond is nullptr."); + return GE_GRAPH_PARAM_NULLPTR; + } + if (listener == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[SetCondition] input param listener is nullptr."); + return GE_GRAPH_PARAM_NULLPTR; + } + + sync_run_mutex_ = mutex; + condition_ = cond; + + graph_run_listener_ = listener; + + init_flag_ = true; + + return SUCCESS; +} + +Status GraphExecutor::SetGraphContext(GraphContextPtr graph_context_ptr) { + if (graph_context_ptr == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[SetGraphContext] input param graph_context_ptr is nullptr"); + return GE_GRAPH_PARAM_NULLPTR; + } + graph_context_ = graph_context_ptr; + return SUCCESS; +} + +void GraphExecutor::SetTrainFlag(bool is_train_graph) { train_graph_flag_ = is_train_graph; } + +Status GraphExecutor::FreeInOutBuffer() { + if (malloc_flag_) { + for (auto iter = buffer_addr_.begin(); iter != buffer_addr_.end(); ++iter) { + rtError_t rt_ret; + rt_ret = rtFreeHost(*iter); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "[GraphManager] subgraph free buffer failed, ret: 0x%X", rt_ret); + (void)buffer_addr_.erase(buffer_addr_.begin(), iter); + return GE_GRAPH_FREE_FAILED; + } + } + buffer_addr_.clear(); + + malloc_flag_ = false; + return SUCCESS; + } else { + GELOGI("[GraphManager] not malloc buffer."); + return SUCCESS; + } +} + +Status GraphExecutor::MallocInOutBuffer(const std::vector &buffer_size, std::vector &data_addr) { + if (malloc_flag_) { + auto all_size_same = true; + if (buffer_size.size() == buffer_size_.size()) { + for (size_t i = 0; i < buffer_size.size(); i++) { + if (buffer_size[i] != buffer_size_[i]) { + all_size_same = false; + break; + } + } + } else { + all_size_same = false; + } + if (all_size_same) { + data_addr = buffer_addr_; + return SUCCESS; + } + buffer_size_.clear(); + auto rt_ret = FreeInOutBuffer(); + if (rt_ret != SUCCESS) { + GELOGE(RT_FAILED, "[SubGraphInfo] MallocInOutBuffer free buffer failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + } + + rtError_t rt_ret; + for (size_t i = 0; i < buffer_size.size(); ++i) { + void *tmp_buf = nullptr; + rt_ret = rtMallocHost(&tmp_buf, buffer_size[i]); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "[GraphManager] subgraph malloc buffer failed, ret: 0x%X", rt_ret); + return GE_GRAPH_MALLOC_FAILED; + } + malloc_flag_ = true; + data_addr.push_back(tmp_buf); + buffer_addr_.push_back(tmp_buf); + } + buffer_size_ = buffer_size; + return SUCCESS; +} + +Status GraphExecutor::PrepareInputData(const std::vector &input_tensor, InputData &graph_input_data, + OutputData &graph_output_data, std::vector &output_desc) { + // Preprocessing input data + graph_input_data.index = 0; + graph_input_data.timeout = 0; + graph_input_data.timestamp = 0; + std::size_t inputSize = input_tensor.size(); + std::size_t output_size = output_desc.size(); + std::vector buffer_size_vec; + std::vector addr_vec; + + for (std::size_t i = 0; i < inputSize; ++i) { + const GeTensor *InTensor = &input_tensor[i]; + GE_CHECK_NOTNULL(InTensor); + buffer_size_vec.push_back(static_cast(InTensor->GetData().size())); + } + + for (const auto &desc : output_desc) { + buffer_size_vec.push_back(desc.size); + } + + Status ret = MallocInOutBuffer(buffer_size_vec, addr_vec); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_MALLOC_FAILED, "[GraphExecutor] Malloc mem failed"); + return GE_GRAPH_MALLOC_FAILED; + } + + for (std::size_t i = 0; i < input_tensor.size() && i < addr_vec.size(); ++i) { + const GeTensor *in_tensor = &input_tensor[i]; + GE_CHECK_NOTNULL(in_tensor); + if ((addr_vec[i] != nullptr) && (in_tensor->GetData().data() != nullptr)) { + if (memcpy_s(addr_vec[i], buffer_size_vec[i], in_tensor->GetData().data(), in_tensor->GetData().size()) != 0) { + GELOGE(GE_GRAPH_EXECUTE_FAILED, "[GraphExecutor] memcpy input data failed."); + return GE_GRAPH_EXECUTE_FAILED; + } + } + + DataBuffer in_data_buf; + in_data_buf.data = reinterpret_cast(addr_vec[i]); + in_data_buf.length = static_cast(in_tensor->GetData().size()); + in_data_buf.isDataSupportMemShare = false; + graph_input_data.blobs.push_back(in_data_buf); + } + + graph_output_data.index = 0; + + for (std::size_t j = 0; j < output_size; j++) { + auto desc = output_desc[j]; + uint32_t buffer_size = desc.size; + + DataBuffer out_data_buf; + out_data_buf.data = reinterpret_cast(addr_vec[inputSize + j]); + out_data_buf.length = buffer_size; + out_data_buf.isDataSupportMemShare = false; + graph_output_data.blobs.push_back(out_data_buf); + } + + return SUCCESS; +} + +Status GraphExecutor::SyncExecuteModel(uint32_t model_id, const std::vector &input_tensor, + std::vector &output_tensor) { + // Prepare input and output + std::vector inputs_desc; + std::vector output_desc; + + GELOGI("[ExecuteGraph] GetInputOutputDescInfo via new ome begin."); + Status ret = GetInputOutputDescInfo(model_id, inputs_desc, output_desc); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_GET_IN_OUT_FAILED, "[GraphExecutor] GetInputOutputDescInfo failed, modelId=%u.", model_id); + return GE_GRAPH_GET_IN_OUT_FAILED; + } + outputs_desc_.assign(output_desc.begin(), output_desc.end()); + + InputData input_data; + OutputData output_data; + input_data.model_id = model_id; + ret = PrepareInputData(input_tensor, input_data, output_data, output_desc); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_PREPARE_FAILED, "[GraphExecutor] PrepareInputData failed, modelId=%u.", model_id); + return GE_GRAPH_PREPARE_FAILED; + } + + if (graph_run_listener_->ResetResult() != SUCCESS) { + GELOGE(GE_GRAPH_EXECUTE_FAILED, "Reset result failed"); + return GE_GRAPH_EXECUTE_FAILED; + } + + // Run mode async + GELOGI("[ExecuteGraph] DataInput via new ome begin."); + ret = DataInput(input_data, output_data); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_DATA_INPUT_FAILED, "[GraphExecutor] push data failed, modelId=%u.", model_id); + return GE_GRAPH_DATA_INPUT_FAILED; + } + GELOGI("[GraphExecutor] input data push to wrapper finish, waiting for result..."); + + // Pending until async execute graph complete + { + std::unique_lock ulock(*sync_run_mutex_); + if (!graph_run_listener_->IsFinished()) { + (*condition_).wait(ulock); + } + + // Run graph return + uint32_t result_code = graph_run_listener_->GetResultCode(); + if (result_code != SUCCESS) { + GELOGE(GE_GRAPH_EXECUTE_FAILED, "[GraphExecutor] execute model failed, ret=%u, modelId=%u.", result_code, + model_id); + return GE_GRAPH_EXECUTE_FAILED; + } + } + for (size_t i = 0; i < output_data.blobs.size(); ++i) { + DataBuffer out_data_tmp = output_data.blobs[i]; + CHECK_FALSE_EXEC(out_data_tmp.length != 0, + GELOGE(GE_GRAPH_EXECUTE_FAILED, "Failed to allocate memory, length is 0."); + return GE_GRAPH_EXECUTE_FAILED); + std::unique_ptr out_buf_tmp(new (std::nothrow) uint8_t[out_data_tmp.length]); + if (out_buf_tmp == nullptr) { + GELOGE(FAILED, "Failed to allocate memory."); + return FAILED; + } + rtError_t ret_value = rtMemcpy(out_buf_tmp.get(), out_data_tmp.length, out_data_tmp.data, out_data_tmp.length, + RT_MEMCPY_DEVICE_TO_HOST); + CHECK_FALSE_EXEC(ret_value == RT_ERROR_NONE, + GELOGE(GE_GRAPH_EXECUTE_FAILED, "Call rt api rtMemcpy failed, ret: 0x%X", ret); + return GE_GRAPH_EXECUTE_FAILED); + GeTensor out_tensor; + std::vector shape_dims; + for (const auto &dim : output_desc[i].shape_info.dims) { + shape_dims.push_back(dim); + } + + GeShape out_shape(shape_dims); + out_tensor.MutableTensorDesc().SetShape(out_shape); + out_tensor.MutableTensorDesc().SetDataType((DataType)output_desc[i].data_type); + if (out_tensor.SetData(out_buf_tmp.get(), out_data_tmp.length) != SUCCESS) { + GELOGE(FAILED, "Out tensor set data failed"); + return FAILED; + } + output_tensor.push_back(out_tensor); + } + + GELOGI("[GraphExecutor] execute model success, modelId=%u.", model_id); + + return SUCCESS; +} + +void GraphExecutor::InitModelIdInfo(std::vector &out_model_id_info, + std::vector &sub_graph_vec, uint32_t output_size) { + for (uint32_t i = 0; i < output_size; i++) { + for (size_t j = 0; j < sub_graph_vec.size(); j++) { + if (sub_graph_vec[j]->GetOutputFlag().size() == output_size && sub_graph_vec[j]->GetOutputFlag().at(i)) { + out_model_id_info.push_back(sub_graph_vec[j]->GetModelIdInfo().model_id); + } + } + } +} + +Status GraphExecutor::FreeExecuteMemory() { + auto ret = FreeInOutBuffer(); + if (ret != SUCCESS) { + GELOGE(ret, "[FreeExecuteMemory] FreeInOutBuffer Error!"); + return ret; + } + + return SUCCESS; +} + +Status GraphExecutor::ExecuteGraph(GraphId graph_id, const GeModelPtr &ge_model, + const std::vector &input_tensor, std::vector &output_tensor) { + if (graph_id != last_graph_id_) { + auto ret = FreeExecuteMemory(); + if (ret != SUCCESS) { + return ret; + } + } + last_graph_id_ = graph_id; + + if (!init_flag_) { + GELOGE(GE_GRAPH_EXECUTE_NOT_INIT, "[GraphExecutor] AI Core Engine without calling SetCondition!"); + return GE_GRAPH_EXECUTE_NOT_INIT; + } + GE_CHECK_NOTNULL_EXEC(ge_model, return FAILED); + Status ret = SyncExecuteModel(ge_model->GetModelId(), input_tensor, output_tensor); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_SYNC_MODEL_FAILED, "[GraphExecutor] SyncExecuteModel Error!"); + return GE_GRAPH_SYNC_MODEL_FAILED; + } + + return SUCCESS; +} + +Status GraphExecutor::ExecuteGraphAsync(GraphId graph_id, const GeModelPtr &ge_model, + const std::vector &input_tensor, + std::vector &output_tensor) { + GELOGI("[GraphExecutor] Start to async execute graph, graph_id=%u", graph_id); + if (graph_id != last_graph_id_) { + auto ret = FreeExecuteMemory(); + if (ret != SUCCESS) { + return ret; + } + } + last_graph_id_ = graph_id; + GE_CHECK_NOTNULL_EXEC(ge_model, return FAILED); + Status ret = AsyncExecuteModel(ge_model->GetModelId(), input_tensor, output_tensor); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_SYNC_MODEL_FAILED, "[GraphExecutor] AsyncExecuteModel Error!"); + return GE_GRAPH_SYNC_MODEL_FAILED; + } + + GELOGI("[GraphExecutor] Async execute graph success, graph_id=%u", graph_id); + return SUCCESS; +} + +Status GraphExecutor::AsyncExecuteModel(uint32_t model_id, const std::vector &inputs, + std::vector &outputs) { + try { + auto model_manager = ge::ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + GELOGI("RunAsync begin.model_id %u", model_id); + + Status ret = model_manager->DataInputTensor(model_id, inputs, outputs); + if (ret != SUCCESS) { + GELOGE(ret, "RunAsync: DataInput fail"); + return ret; + } + + GELOGI("RunAsync success."); + } catch (std::bad_alloc &) { + GELOGE(MEMALLOC_FAILED, "RunAsync failed, bad memory allocation occur !"); + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return MEMALLOC_FAILED; + } catch (...) { + GELOGE(FAILED, "RunAsync failed, some exceptions occur !"); + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return FAILED; + } + + return SUCCESS; +} + +Status GraphExecutor::DataInput(const InputData &input_data, OutputData &output_data) { + try { + auto model_manager = ge::ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = model_manager->DataInput(input_data, output_data); + if (ret != SUCCESS) { + GELOGE(ret, "DataInput: DataInput failed."); + CsaInteract::GetInstance().WriteErrorCode(ret, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return ret; + } + } catch (std::bad_alloc &) { + GELOGE(MEMALLOC_FAILED, "DataInput failed, bad memory allocation occur !"); + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return MEMALLOC_FAILED; + } catch (...) { + GELOGE(FAILED, "DataInput failed, some exceptions occur !"); + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return FAILED; + } + + return SUCCESS; +} + +Status GraphExecutor::GetInputOutputDescInfo(const uint32_t model_id, vector &input_desc, + vector &output_desc) { + try { + auto model_manager = ge::ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = model_manager->GetInputOutputDescInfo(model_id, input_desc, output_desc); + if (ret != SUCCESS) { + GELOGE(ret, "GetInputOutputDescInfo failed."); + CsaInteract::GetInstance().WriteErrorCode(ret, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return ret; + } + } catch (std::bad_alloc &) { + GELOGE(MEMALLOC_FAILED, "GetInputOutputDescInfo failed, bad memory allocation occur !"); + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return MEMALLOC_FAILED; + } catch (...) { + GELOGE(FAILED, "GetInputOutputDescInfo failed, some exceptions occur !"); + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return FAILED; + } + + return SUCCESS; +} + +Status GraphExecutor::GetInputOutputDescInfo(const uint32_t model_id, vector &input_desc, + vector &output_desc, + std::vector &input_formats, std::vector &out_formats) { + try { + auto model_manager = ge::ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = model_manager->GetInputOutputDescInfo(model_id, input_desc, output_desc, input_formats, out_formats); + if (ret != SUCCESS) { + GELOGE(ret, "GetInputOutputDescInfo failed."); + CsaInteract::GetInstance().WriteErrorCode(ret, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return ret; + } + } catch (std::bad_alloc &) { + GELOGE(MEMALLOC_FAILED, "GetInputOutputDescInfo failed, bad memory allocation occur !"); + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return MEMALLOC_FAILED; + } catch (...) { + GELOGE(FAILED, "GetInputOutputDescInfo failed, some exceptions occur !"); + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return FAILED; + } + + return SUCCESS; +} + +Status GraphExecutor::GetInputOutputDescInfoForZeroCopy(uint32_t model_id, vector &input_desc, + vector &output_desc, + std::vector &input_formats, + std::vector &out_formats) { + try { + auto model_manager = ge::ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = + model_manager->GetInputOutputDescInfoForZeroCopy(model_id, input_desc, output_desc, input_formats, out_formats); + if (ret != SUCCESS) { + GELOGE(ret, "GetInputOutputDescInfoForZeroCopy failed."); + return ret; + } + } catch (std::bad_alloc &) { + GELOGE(MEMALLOC_FAILED, "GetInputOutputDescInfoForZeroCopy failed, bad memory allocation occur !"); + return MEMALLOC_FAILED; + } catch (...) { + GELOGE(FAILED, "GetInputOutputDescInfoForZeroCopy failed, some exceptions occur !"); + return FAILED; + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/execute/graph_execute.h b/src/ge/graph/execute/graph_execute.h new file mode 100644 index 00000000..35376184 --- /dev/null +++ b/src/ge/graph/execute/graph_execute.h @@ -0,0 +1,117 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_EXECUTE_GRAPH_EXECUTE_H_ +#define GE_GRAPH_EXECUTE_GRAPH_EXECUTE_H_ + +#include + +#include +#include +#include +#include + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/ge_types.h" +#include "common/properties_manager.h" +#include "common/string_util.h" +#include "common/types.h" +#include "common/util.h" +#include "ge/ge_api_types.h" +#include "graph/compute_graph.h" +#include "graph/manager/graph_context.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/model.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/tensor_utils.h" + +namespace ge { +class GraphExecutor { + public: + GraphExecutor(); + + virtual ~GraphExecutor(); + + Status ExecuteGraph(GraphId graph_id, const GeModelPtr &ge_model, const std::vector &input_tensor, + std::vector &output_tensor); + + Status ExecuteGraphAsync(GraphId graph_id, const GeModelPtr &ge_model, const std::vector &input_tensor, + std::vector &output_tensor); + + Status SetCondition(std::mutex *mutex, std::condition_variable *cond, std::shared_ptr listener); + + Status SetGraphContext(GraphContextPtr graph_context_ptr); + + void SetTrainFlag(bool is_train_graph); + + const std::vector &GetOutputsDesc() const { return outputs_desc_; } + + Status FreeExecuteMemory(); + + static Status DataInput(const InputData &input_data, OutputData &output_data); + + static Status GetInputOutputDescInfo(const uint32_t model_id, vector &input_desc, + vector &output_desc); + + static Status GetInputOutputDescInfo(const uint32_t model_id, vector &input_desc, + vector &output_desc, std::vector &input_formats, + std::vector &output_formats); + + static Status GetInputOutputDescInfoForZeroCopy(uint32_t model_id, vector &input_desc, + vector &output_desc, + std::vector &input_formats, + std::vector &output_formats); + + private: + Status PrepareInputData(const std::vector &input_tensor, InputData &graph_input_data, + OutputData &graph_output_data, std::vector &output_desc); + + Status SyncExecuteModel(uint32_t model_id, const std::vector &input_tensor, + std::vector &output_tensor); + + Status AsyncExecuteModel(uint32_t model_id, const std::vector &input_tensor, + std::vector &output_tensor); + + void InitModelIdInfo(std::vector &out_model_id_info, std::vector &sub_graph_vec, + uint32_t output_size); + + Status FreeInOutBuffer(); + + Status MallocInOutBuffer(const std::vector &buffer_size, std::vector &data_addr); + + bool init_flag_; + + bool train_graph_flag_; + // For run graph synchronous return + std::mutex *sync_run_mutex_; + std::condition_variable *condition_; + + // Run graph asynchronous call back listener + std::shared_ptr graph_run_listener_; + + GraphContextPtr graph_context_; + + std::vector outputs_desc_; + GraphId last_graph_id_; + + bool malloc_flag_; + std::vector buffer_addr_; + std::vector buffer_size_; +}; +} // namespace ge + +#endif // GE_GRAPH_EXECUTE_GRAPH_EXECUTE_H_ diff --git a/src/ge/graph/load/graph_loader.cc b/src/ge/graph/load/graph_loader.cc new file mode 100644 index 00000000..2cf03022 --- /dev/null +++ b/src/ge/graph/load/graph_loader.cc @@ -0,0 +1,431 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/graph_loader.h" + +#include +#include + +#include "common/helper/model_helper.h" +#include "common/util.h" +#include "graph/ge_context.h" +#include "graph/load/new_model_manager/davinci_model_parser.h" +#include "graph/load/new_model_manager/model_manager.h" +#include "graph/manager/graph_var_manager.h" +#include "omm/csa_interact.h" +#include "runtime/dev.h" + +namespace ge { +GraphLoader::GraphLoader() = default; + +GraphLoader::~GraphLoader() = default; + +Status GraphLoader::LoadGraph(const std::shared_ptr &ge_model_ptr, + const std::shared_ptr &model_listener, ModelIdInfo &model_id_info) { + if (ge_model_ptr == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[LoadGraph] GE load graph model_ptr is nullptr."); + return GE_GRAPH_PARAM_NULLPTR; + } + + if (model_listener == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[LoadGraph] GE load graph model_listener is nullptr."); + return GE_GRAPH_PARAM_NULLPTR; + } + + std::shared_ptr model_ptr; + if (ModelHelper::TransGeModelToModel(ge_model_ptr, model_ptr) != SUCCESS) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[LoadGraph] GE load graph ge_model_ptr trans to ModelPtr failed."); + return GE_GRAPH_PARAM_NULLPTR; + } + GELOGI("[LoadGraph] GE load graph via new ome begin."); + Status ret = LoadModelOnline(model_id_info.model_id, model_ptr, model_listener); + if (ret != SUCCESS) { + GELOGE(ret, "[LoadGraph] GE load graph LoadGraph() return fail. err: %u", ret); + return ret; + } + GELOGI("[LoadGraph] GE load graph success. modelId: %u", model_id_info.model_id); + return ret; +} + +Status GraphLoader::LoadGraphAsync(const std::shared_ptr &ge_model_ptr, + const std::shared_ptr &model_async_listener, + ModelIdInfo &model_id_info) { + if (ge_model_ptr == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[LoadGraphAsync] GE load graph model_ptr is nullptr."); + return GE_GRAPH_PARAM_NULLPTR; + } + + if (model_async_listener == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[LoadGraphAsync] GE load graph model_listener is nullptr."); + return GE_GRAPH_PARAM_NULLPTR; + } + + std::shared_ptr model_ptr; + if (ModelHelper::TransGeModelToModel(ge_model_ptr, model_ptr) != SUCCESS) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[LoadGraph] GE load graph ge_model_ptr trans to ModelPtr failed."); + return GE_GRAPH_PARAM_NULLPTR; + } + + GELOGI("[LoadGraphAsync] GE load graph begin."); + Status ret = LoadModelOnline(model_id_info.model_id, model_ptr, model_async_listener); + if (ret != SUCCESS) { + GELOGE(ret, "[LoadGraphAsync] GE load graph LoadGraphAsync() return fail. err: %u", ret); + return ret; + } + + GELOGI("[LoadGraphAsync] GE load graph success. modelId: %u", model_id_info.model_id); + return ret; +} + +Status GraphLoader::UnloadModel(uint32_t model_id) { + auto model_manager = ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + GELOGI("UnLoad model begin, model_id:%u.", model_id); + + Status ret = model_manager->Stop(model_id); + if (ret != SUCCESS) { + GELOGE(ret, "UnloadModel: Stop failed."); + } + + ret = model_manager->Unload(model_id); + if (ret != SUCCESS) { + GELOGE(ret, "UnloadModel: Unload failed."); + CsaInteract::GetInstance().WriteErrorCode(ret, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_UNLOAD); + return ret; + } + GELOGI("UnLoad model success, model_id:%u.", model_id); + return SUCCESS; +} + +Status GraphLoader::LoadModelOnline(uint32_t &model_id, std::shared_ptr &model, + const std::shared_ptr &listener) { + rtError_t rt_ret = rtSetDevice(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + CsaInteract::GetInstance().WriteErrorCode(rt_ret, ERROR_MODULE_RUNTIME, JOBSUBSTATE_GRAPH_LOAD); + return RT_FAILED; + } + + try { + GELOGI("Load begin, model_id:%u.", model_id); + auto model_manager = ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = model_manager->LoadModelOnline(model_id, model, listener); + if (ret != SUCCESS) { + GELOGE(ret, "LoadModel: Load failed. ret = %u", ret); + CsaInteract::GetInstance().WriteErrorCode(ret, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_LOAD); + + rt_ret = rtDeviceReset(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + } + return ret; + } + + ret = model_manager->Start(model_id); + if (ret != SUCCESS) { + if (model_manager->Unload(model_id) != SUCCESS) { + GELOGE(ret, "LoadModel: Unload failed while trying to unload after a failed start."); + } + + rt_ret = rtDeviceReset(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + } + + GELOGE(ret, "LoadModel: Start failed."); + CsaInteract::GetInstance().WriteErrorCode(ret, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return ret; + } + + GELOGI("Load model success, model_id:%u.", model_id); + } catch (std::bad_alloc &) { + rt_ret = rtDeviceReset(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + } + + GELOGE(MEMALLOC_FAILED, "Load model failed, bad memory allocation occur !"); + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_LOAD); + return MEMALLOC_FAILED; + } catch (...) { + rt_ret = rtDeviceReset(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + } + + GELOGE(FAILED, "Load failed, some exceptions occur !"); + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_LOAD); + return FAILED; + } + + rt_ret = rtDeviceReset(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +Status GraphLoader::GetMaxUsedMemory(uint32_t model_id, uint64_t &max_size) { + auto model_manager = ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = model_manager->GetMaxUsedMemory(model_id, max_size); + if (ret != SUCCESS) { + GELOGE(ret, "GetMaxUsedMemory: GetMaxUsedMemory failed."); + return ret; + } + return SUCCESS; +} + +Status GraphLoader::LoadDataFromFile(const std::string &path, const std::string &key_path, int32_t priority, + ModelData &model_data) { + Status ret; + try { + if (!CheckInputPathValid(path)) { + GELOGE(PARAM_INVALID, "model path is invalid: %s", path.c_str()); + return PARAM_INVALID; + } + + GELOGI("Load model begin, model path is: %s", path.c_str()); + if (!key_path.empty() && !CheckInputPathValid(key_path)) { + GELOGE(PARAM_INVALID, "decrypt_key path is invalid: %s", key_path.c_str()); + return PARAM_INVALID; + } + + ret = DavinciModelParser::LoadFromFile(path.c_str(), key_path.c_str(), priority, model_data); + if (ret != SUCCESS) { + GELOGE(ret, "LoadModelFromFile: Load failed. ret = %u", ret); + return ret; + } + + return SUCCESS; + } catch (std::bad_alloc &) { + GELOGE(MEMALLOC_FAILED, "Load model from file failed, bad memory allocation"); + ret = MEMALLOC_FAILED; + } catch (...) { + GELOGE(FAILED, "Load model from file failed with exception"); + ret = FAILED; + } + + if (model_data.model_data != nullptr) { + delete[] static_cast(model_data.model_data); + model_data.model_data = nullptr; + } + return ret; +} + +Status GraphLoader::LoadModelFromFile(const std::string &path, const std::string &key_path, int32_t priority, + const std::shared_ptr &listener, uint32_t &model_id) { + Status ret; + ModelData model_data; + + try { + ret = LoadDataFromFile(path, key_path, priority, model_data); + if (ret != SUCCESS) { + GELOGE(ret, "LoadModelFromFile: Load failed. ret = %u", ret); + if (model_data.model_data != nullptr) { + delete[] static_cast(model_data.model_data); + model_data.model_data = nullptr; + } + return ret; + } + + ret = LoadModel(model_data, listener, model_id); + if (ret != SUCCESS) { + GELOGE(ret, "LoadModel: Load failed. ret = %u", ret); + if (model_data.model_data != nullptr) { + delete[] static_cast(model_data.model_data); + model_data.model_data = nullptr; + } + } + } catch (std::bad_alloc &) { + GELOGE(MEMALLOC_FAILED, "Load model from file failed, bad memory allocation"); + ret = MEMALLOC_FAILED; + } catch (...) { + GELOGE(FAILED, "Load model from file failed with exception"); + ret = FAILED; + } + + if (model_data.model_data != nullptr) { + delete[] static_cast(model_data.model_data); + model_data.model_data = nullptr; + } + + return ret; +} + +Status GraphLoader::LoadModel(const ModelData &model_data, const std::shared_ptr &listener, + uint32_t &model_id) { + try { + GELOGI("Load model begin, model_id:%u.", model_id); + + // For GeOp, Open Device 0 here. + GE_CHK_RT_RET(rtSetDevice(0)); + auto model_manager = ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = model_manager->LoadModelOffline(model_id, model_data, listener); + if (ret != SUCCESS) { + GE_CHK_RT(rtDeviceReset(0)); + GELOGE(ret, "LoadModel: Load failed."); + return ret; + } + ret = model_manager->Start(model_id); + if (ret != SUCCESS) { + if (model_manager->Unload(model_id) != SUCCESS) { + GELOGE(FAILED, "LoadModel: Unload failed while trying to unload after a failed start."); + } + GELOGE(ret, "LoadModel: Start failed."); + return ret; + } + GELOGI("LoadModel: Start model success, model_id:%u.", model_id); + } catch (std::bad_alloc &) { + GELOGE(MEMALLOC_FAILED, "Load model failed, bad memory allocation occur !"); + return MEMALLOC_FAILED; + } catch (...) { + GELOGE(FAILED, "Load model failed, some exceptions occur !"); + return FAILED; + } + + return SUCCESS; +} + +Status GraphLoader::CommandHandle(const Command &command) { + try { + auto model_manager = ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = model_manager->HandleCommand(command); + if (ret != SUCCESS) { + GELOGE(ret, "CommandHandle: Command Handle failed."); + + return ret; + } + } catch (std::bad_alloc &) { + GELOGE(MEMALLOC_FAILED, "Command handle failed, bad memory allocation occur !"); + + return MEMALLOC_FAILED; + } catch (...) { + GELOGE(FAILED, "Command handle failed, some exceptions occur !"); + + return FAILED; + } + + return SUCCESS; +} + +Status GraphLoader::LoadModelFromData(uint32_t &model_id, const ModelData &model_data, void *dev_ptr, size_t memsize, + void *weight_ptr, size_t weightsize) { + try { + GELOGI("Load model begin, model_id:%u.", model_id); + + // For ACL, Open Device from App. + auto model_manager = ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = + model_manager->LoadModelOffline(model_id, model_data, nullptr, dev_ptr, memsize, weight_ptr, weightsize); + if (ret != SUCCESS) { + GELOGE(ret, "Load model failed, model_id:%u.", model_id); + return ret; + } + + GELOGI("Load model success, model_id:%u.", model_id); + } catch (std::bad_alloc &) { + GELOGE(MEMALLOC_FAILED, "Load model failed, bad memory allocation occur !"); + return MEMALLOC_FAILED; + } catch (...) { + GELOGE(FAILED, "Load model failed, some exceptions occur !"); + return FAILED; + } + + return SUCCESS; +} + +/// +/// @ingroup ge +/// @brief Load task list from ModelData with queue. +/// @param [out] model_id: model id allocate from manager. +/// @param [in] model_data: Model data load from offline model. +/// @param [in] input_queue_ids: input queue ids create from user. +/// @param [in] output_queue_ids: input queue ids create from user. +/// @return: 0 for success / others for fail +/// +Status GraphLoader::LoadModelWithQ(uint32_t &model_id, const ModelData &model_data, + const std::vector &input_queue_ids, + const std::vector &output_queue_ids) { + GELOGI("Load model with queue begin, model_id:%u.", model_id); + + // For ACL, Open Device from App. + auto model_manager = ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = model_manager->LoadModelWithQ(model_id, model_data, input_queue_ids, output_queue_ids); + if (ret != SUCCESS) { + GELOGE(ret, "Load model with queue failed, model_id:%u.", model_id); + return ret; + } + + GELOGI("Load model with queue success, model_id:%u.", model_id); + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief execute model +/// @param [in] model_id model id +/// @param [in] stream stream to execute model on +/// @param [in] async_mode is asynchronize mode. +/// @param [in] input_data model input data +/// @param [out] output_data model output data +/// +Status GraphLoader::ExecuteModel(uint32_t model_id, rtStream_t stream, bool async_mode, const InputData &input_data, + OutputData &output_data) { + auto model_manager = ModelManager::GetInstance(); + GE_CHECK_NOTNULL(model_manager); + Status ret = model_manager->ExecuteModel(model_id, stream, async_mode, input_data, output_data); + if (ret != SUCCESS) { + GELOGE(ret, "Execute model failed, model_id:%u.", model_id); + return ret; + } + + GELOGI("Execute model success, model_id:%u.", model_id); + return SUCCESS; +} + +Status GraphLoader::GetMemoryInfo(int64_t &free) { + rtError_t rt_ret = rtSetDevice(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + CsaInteract::GetInstance().WriteErrorCode(rt_ret, ERROR_MODULE_RUNTIME, JOBSUBSTATE_GRAPH_LOAD); + return RT_FAILED; + } + size_t total_mem = 0; + size_t free_mem = 0; + rt_ret = rtMemGetInfo(&free_mem, &total_mem); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + rt_ret = rtDeviceReset(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + // Add small page memory size + free = static_cast(free_mem + VarManager::Instance(0)->GetUseMaxMemorySize() - total_mem); + GELOGI("GetMemoryInfo free[%zu], total[%zu], return free[%ld]", free_mem, total_mem, free); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/load/graph_loader.h b/src/ge/graph/load/graph_loader.h new file mode 100644 index 00000000..d0620ce7 --- /dev/null +++ b/src/ge/graph/load/graph_loader.h @@ -0,0 +1,81 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_GRAPH_LOADER_H_ +#define GE_GRAPH_LOAD_GRAPH_LOADER_H_ + +#include +#include +#include + +#include "common/debug/log.h" +#include "common/fmk_types.h" +#include "common/ge_types.h" +#include "graph/compute_graph.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/model.h" +#include "runtime/mem.h" + +namespace ge { +class GraphLoader { + public: + GraphLoader(); + + virtual ~GraphLoader(); + + GraphLoader(const GraphLoader &in) = delete; + + GraphLoader &operator=(const GraphLoader &in) = delete; + + Status LoadGraph(const std::shared_ptr &ge_model_ptr, + const std::shared_ptr &model_listener, ModelIdInfo &model_id_info); + + Status LoadGraphAsync(const std::shared_ptr &ge_model_ptr, + const std::shared_ptr &model_async_listener, ModelIdInfo &model_id_info); + + static Status UnloadModel(uint32_t model_id); + + static Status GetMaxUsedMemory(uint32_t model_id, uint64_t &max_size); + + static Status LoadModel(const ModelData &model_data, const std::shared_ptr &listener, + uint32_t &model_id); + + static Status LoadModelFromFile(const std::string &path, const std::string &key_path, int32_t priority, + const std::shared_ptr &listener, uint32_t &model_id); + + static Status CommandHandle(const Command &command); + + static Status GetMemoryInfo(int64_t &free); + + static Status LoadDataFromFile(const std::string &path, const std::string &key_path, int32_t priority, + ModelData &model_data); + + static Status LoadModelFromData(uint32_t &model_id, const ModelData &model_data, void *dev_ptr, size_t mem_size, + void *weight_ptr, size_t weight_size); + + static Status LoadModelWithQ(uint32_t &model_id, const ModelData &model_data, + const std::vector &input_queue_ids, + const std::vector &output_queue_ids); + + static Status ExecuteModel(uint32_t model_id, rtStream_t stream, bool async_mode, const InputData &input_data, + OutputData &output_data); + + private: + static Status LoadModelOnline(uint32_t &model_id, std::shared_ptr &model, + const std::shared_ptr &listener); +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_GRAPH_LOADER_H_ diff --git a/src/ge/graph/load/new_model_manager/data_dumper.cc b/src/ge/graph/load/new_model_manager/data_dumper.cc new file mode 100644 index 00000000..e4f1a5af --- /dev/null +++ b/src/ge/graph/load/new_model_manager/data_dumper.cc @@ -0,0 +1,323 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/data_dumper.h" + +#include + +#include "common/properties_manager.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/util.h" +#include "graph/anchor.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/load/new_model_manager/model_utils.h" +#include "graph/utils/attr_utils.h" +#include "proto/ge_ir.pb.h" +#include "proto/op_mapping_info.pb.h" +#include "runtime/mem.h" + +namespace { +const uint32_t kAicpuLoadFlag = 1; +const uint32_t kAicpuUnloadFlag = 0; +} // namespace + +static int32_t GetIrDataType(ge::DataType data_type) { + static const std::map data_type_map = { + {ge::DT_UNDEFINED, ge::proto::DT_UNDEFINED}, + {ge::DT_FLOAT, ge::proto::DT_FLOAT}, + {ge::DT_FLOAT16, ge::proto::DT_FLOAT16}, + {ge::DT_INT8, ge::proto::DT_INT8}, + {ge::DT_UINT8, ge::proto::DT_UINT8}, + {ge::DT_INT16, ge::proto::DT_INT16}, + {ge::DT_UINT16, ge::proto::DT_UINT16}, + {ge::DT_INT32, ge::proto::DT_INT32}, + {ge::DT_INT64, ge::proto::DT_INT64}, + {ge::DT_UINT32, ge::proto::DT_UINT32}, + {ge::DT_UINT64, ge::proto::DT_UINT64}, + {ge::DT_BOOL, ge::proto::DT_BOOL}, + {ge::DT_DOUBLE, ge::proto::DT_DOUBLE}, + {ge::DT_DUAL, ge::proto::DT_DUAL}, + {ge::DT_DUAL_SUB_INT8, ge::proto::DT_DUAL_SUB_INT8}, + {ge::DT_DUAL_SUB_UINT8, ge::proto::DT_DUAL_SUB_UINT8}, + {ge::DT_COMPLEX64, ge::proto::DT_COMPLEX64}, + {ge::DT_COMPLEX128, ge::proto::DT_COMPLEX128}, + {ge::DT_QINT8, ge::proto::DT_QINT8}, + {ge::DT_QINT16, ge::proto::DT_QINT16}, + {ge::DT_QINT32, ge::proto::DT_QINT32}, + {ge::DT_QUINT8, ge::proto::DT_QUINT8}, + {ge::DT_QUINT16, ge::proto::DT_QUINT16}, + {ge::DT_RESOURCE, ge::proto::DT_RESOURCE}, + {ge::DT_STRING_REF, ge::proto::DT_STRING_REF}, + {ge::DT_STRING, ge::proto::DT_STRING}, + }; + + auto iter = data_type_map.find(data_type); + if (iter == data_type_map.end()) { + return static_cast(ge::proto::DT_UNDEFINED); + } + + return static_cast(iter->second); +} + +namespace ge { +DataDumper::~DataDumper() { + ReleaseDevMem(&dev_mem_load_); + ReleaseDevMem(&dev_mem_unload_); +} + +void DataDumper::ReleaseDevMem(void **ptr) noexcept { + if (ptr == nullptr) { + return; + } + + if (*ptr != nullptr) { + rtError_t rt_ret = rtFree(*ptr); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtFree failed, ret: 0x%X", rt_ret); + } + + *ptr = nullptr; + } +} + +void DataDumper::SaveDumpInput(const std::shared_ptr &node) { + if (node != nullptr) { + auto input_op_desc = node->GetOpDesc(); + if (input_op_desc == nullptr) { + GELOGE(PARAM_INVALID, "input op desc is null."); + return; + } + + for (auto &out_data_anchor : node->GetAllOutDataAnchors()) { + for (auto &dst_in_data_anchor : out_data_anchor->GetPeerInDataAnchors()) { + ge::NodePtr dst_node = dst_in_data_anchor->GetOwnerNode(); + auto op_desc = dst_node->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(PARAM_INVALID, "input op desc is null."); + return; + } + input_map_.insert( + {op_desc->GetName(), {input_op_desc, dst_in_data_anchor->GetIdx(), out_data_anchor->GetIdx()}}); + } + } + } +} + +void DataDumper::SaveDumpTask(uint32_t task_id, const std::shared_ptr &op_desc, uintptr_t args) { + if (op_desc == nullptr) { + GELOGE(PARAM_INVALID, "Opdesc is nullptr"); + return; + } + + GELOGI("Save dump task %s, id: %u.", op_desc->GetName().c_str(), task_id); + op_list_.push_back({task_id, op_desc, args, true}); + + for (auto iter = input_map_.equal_range(op_desc->GetName()); iter.first != iter.second; ++iter.first) { + InnerInputMapping &inner_input_mapping = iter.first->second; + auto &data_op = inner_input_mapping.data_op; + if (data_op == nullptr) { + GELOGE(PARAM_INVALID, "data_op is null."); + return; + } + + auto output_tensor = data_op->GetOutputDescPtr(inner_input_mapping.output_anchor_index); + if (output_tensor == nullptr) { + GELOGE(PARAM_INVALID, "output_tensor is null, index: %d, size: %zu.", inner_input_mapping.output_anchor_index, + data_op->GetOutputsSize()); + return; + } + + uintptr_t data_addr = args - sizeof(void *) * data_op->GetInputOffset().size() + + sizeof(void *) * static_cast(inner_input_mapping.output_anchor_index); + GELOGI("Save input dump task %s, id: %u.", data_op->GetName().c_str(), task_id); + op_list_.push_back({task_id, data_op, data_addr, false, inner_input_mapping.input_anchor_index, + inner_input_mapping.output_anchor_index}); + } +} + +Status DataDumper::LoadDumpInfo() { + GELOGI("%zu op need dump in %s.", op_list_.size(), model_name_.c_str()); + if (op_list_.empty()) { + return SUCCESS; + } + + aicpu::dump::OpMappingInfo op_mapping_info; + + op_mapping_info.set_dump_path(PropertiesManager::Instance().GetDumpOutputPath() + std::to_string(device_id_) + "/"); + + op_mapping_info.set_model_name(model_name_); + op_mapping_info.set_model_id(model_id_); + op_mapping_info.set_flag(kAicpuLoadFlag); + + for (const auto &op_iter : op_list_) { + aicpu::dump::Task task; + task.set_task_id(op_iter.task_id); + task.mutable_op()->set_op_name(op_iter.op->GetName()); + task.mutable_op()->set_op_type(op_iter.op->GetType()); + + if (op_iter.is_task) { + // tbe or aicpu op + const auto &output_descs = op_iter.op->GetAllOutputsDesc(); + const std::vector output_addrs = ModelUtils::GetOutputDataAddrs(runtime_param_, op_iter.op, false); + if (output_descs.size() != output_addrs.size()) { + GELOGE(PARAM_INVALID, "Invalid output desc addrs size %zu, op %s has %zu output desc.", output_addrs.size(), + op_iter.op->GetName().c_str(), output_descs.size()); + return PARAM_INVALID; + } + + for (size_t i = 0; i < output_descs.size(); ++i) { + aicpu::dump::Output output; + output.set_data_type(static_cast(GetIrDataType(output_descs.at(i).GetDataType()))); + output.set_format(static_cast(output_descs.at(i).GetFormat())); + + for (auto dim : output_descs.at(i).GetShape().GetDims()) { + output.mutable_shape()->add_dim(dim); + } + + std::string origin_name; + int32_t origin_output_index = -1; + (void)AttrUtils::GetStr(&output_descs.at(i), ATTR_NAME_DATA_DUMP_ORIGIN_NAME, origin_name); + (void)AttrUtils::GetInt(&output_descs.at(i), ATTR_NAME_DATA_DUMP_ORIGIN_OUTPUT_INDEX, origin_output_index); + output.set_original_name(origin_name); + output.set_original_output_index(origin_output_index); + output.set_original_output_format(static_cast(output_descs.at(i).GetOriginFormat())); + output.set_original_output_data_type(static_cast(output_descs.at(i).GetOriginDataType())); + // due to lhisi virtual addr bug, cannot use args now + output.set_address(static_cast(reinterpret_cast(output_addrs[i]))); + + task.mutable_output()->Add(std::move(output)); + } + op_mapping_info.mutable_task()->Add(std::move(task)); + continue; + } + + // else data, const or variable op + aicpu::dump::Output output; + auto output_tensor = op_iter.op->GetOutputDescPtr(op_iter.output_anchor_index); + const std::vector output_addrs = ModelUtils::GetOutputDataAddrs(runtime_param_, op_iter.op, false); + if (output_tensor == nullptr) { + GELOGE(PARAM_INVALID, "output_tensor is null, index: %d, size: %zu.", op_iter.output_anchor_index, + op_iter.op->GetOutputsSize()); + return PARAM_INVALID; + } + + output.set_data_type(static_cast(GetIrDataType(output_tensor->GetDataType()))); + output.set_format(static_cast(output_tensor->GetFormat())); + + for (auto dim : output_tensor->GetShape().GetDims()) { + output.mutable_shape()->add_dim(dim); + } + + std::string origin_name; + int32_t origin_output_index = -1; + (void)AttrUtils::GetStr(output_tensor, ATTR_NAME_DATA_DUMP_ORIGIN_NAME, origin_name); + (void)AttrUtils::GetInt(output_tensor, ATTR_NAME_DATA_DUMP_ORIGIN_OUTPUT_INDEX, origin_output_index); + output.set_original_name(origin_name); + output.set_original_output_index(origin_output_index); + output.set_original_output_format(static_cast(output_tensor->GetOriginFormat())); + output.set_original_output_data_type(static_cast(output_tensor->GetOriginDataType())); + // due to lhisi virtual addr bug, cannot use args now + output.set_address(static_cast(reinterpret_cast(output_addrs[op_iter.output_anchor_index]))); + + task.mutable_output()->Add(std::move(output)); + + op_mapping_info.mutable_task()->Add(std::move(task)); + } + + std::string proto_str; + uint32_t proto_size = op_mapping_info.ByteSizeLong(); + bool ret = op_mapping_info.SerializeToString(&proto_str); + if (!ret || proto_size == 0) { + GELOGE(FAILED, "Protobuf SerializeToString failed, proto size %u.", proto_size); + return FAILED; + } + + if (dev_mem_load_ != nullptr) { + GELOGW("dev_mem_load_ has been used."); + ReleaseDevMem(&dev_mem_load_); + } + + rtError_t rt_ret = rtMalloc(&dev_mem_load_, proto_size, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtMalloc failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtMemcpy(dev_mem_load_, proto_size, proto_str.c_str(), proto_size, RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtMemcpy failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtDatadumpInfoLoad(dev_mem_load_, proto_size); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtDatadumpInfoLoad failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + load_flag_ = true; + GELOGI("LoadDumpInfo success, proto size: %zu.", proto_size); + return SUCCESS; +} + +Status DataDumper::UnloadDumpInfo() { + if (!load_flag_) { + GELOGI("No need to UnloadDumpInfo."); + load_flag_ = false; + return SUCCESS; + } + + GELOGI("UnloadDumpInfo start."); + aicpu::dump::OpMappingInfo op_mapping_info; + op_mapping_info.set_model_id(model_id_); + op_mapping_info.set_flag(kAicpuUnloadFlag); + + std::string proto_str; + size_t proto_size = op_mapping_info.ByteSizeLong(); + bool ret = op_mapping_info.SerializeToString(&proto_str); + if (!ret || proto_size == 0) { + GELOGE(FAILED, "Protobuf SerializeToString failed, proto size %zu.", proto_size); + return FAILED; + } + + if (dev_mem_unload_ != nullptr) { + GELOGW("dev_mem_unload_ has been used."); + ReleaseDevMem(&dev_mem_unload_); + } + + rtError_t rt_ret = rtMalloc(&dev_mem_unload_, proto_size, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtMalloc failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtMemcpy(dev_mem_unload_, proto_size, proto_str.c_str(), proto_size, RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtMemcpy failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtDatadumpInfoLoad(dev_mem_unload_, proto_size); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtDatadumpInfoLoad failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + load_flag_ = false; + GELOGI("UnloadDumpInfo success, proto size: %zu.", proto_size); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/data_dumper.h b/src/ge/graph/load/new_model_manager/data_dumper.h new file mode 100644 index 00000000..6a3120b4 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/data_dumper.h @@ -0,0 +1,89 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DATA_DUMPER_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DATA_DUMPER_H_ + +#include +#include +#include +#include + +#include "framework/common/ge_inner_error_codes.h" +#include "graph/node.h" +#include "task_info/task_info.h" + +namespace ge { +class DataDumper { + public: + DataDumper() + : model_name_(), + model_id_(0), + runtime_param_(), + dev_mem_load_(nullptr), + dev_mem_unload_(nullptr), + op_list_(), + input_map_(), + load_flag_(false), + device_id_(0) {} + + ~DataDumper(); + + void SetModelName(const std::string &model_name) { model_name_ = model_name; } + void SetModelId(uint32_t model_id) { model_id_ = model_id; } + void SetMemory(const RuntimeParam &runtime_param) { runtime_param_ = runtime_param; } + void SetDeviceId(uint32_t device_id) { device_id_ = device_id; } + + void SaveDumpInput(const std::shared_ptr &node); + // args is device memory stored first output addr + void SaveDumpTask(uint32_t task_id, const std::shared_ptr &op_desc, uintptr_t args); + Status LoadDumpInfo(); + Status UnloadDumpInfo(); + + private: + void ReleaseDevMem(void **ptr) noexcept; + + std::string model_name_; + uint32_t model_id_; + RuntimeParam runtime_param_; + void *dev_mem_load_; + void *dev_mem_unload_; + + struct InnerDumpInfo; + struct InnerInputMapping; + std::vector op_list_; + std::multimap input_map_; + bool load_flag_; + uint32_t device_id_; + + struct InnerDumpInfo { + uint32_t task_id; + std::shared_ptr op; + uintptr_t args; + bool is_task; + int input_anchor_index; + int output_anchor_index; + }; + + struct InnerInputMapping { + std::shared_ptr data_op; + int input_anchor_index; + int output_anchor_index; + }; +}; +} // namespace ge + +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DATA_DUMPER_H_ diff --git a/src/ge/graph/load/new_model_manager/data_inputer.cc b/src/ge/graph/load/new_model_manager/data_inputer.cc new file mode 100644 index 00000000..594a7bcd --- /dev/null +++ b/src/ge/graph/load/new_model_manager/data_inputer.cc @@ -0,0 +1,34 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/data_inputer.h" + +#include + +#include "common/debug/log.h" +#include "common/scope_guard.h" +#include "common/types.h" + +namespace ge { +domi::Status InputDataWrapper::Init(const InputData &input, const OutputData &output) { + GE_CHK_BOOL_RET_STATUS(!is_init, domi::INTERNAL_ERROR, "InputDataWrapper is re-initialized"); + + input_ = input; + output_ = output; + is_init = true; + return domi::SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/data_inputer.h b/src/ge/graph/load/new_model_manager/data_inputer.h new file mode 100644 index 00000000..7e396807 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/data_inputer.h @@ -0,0 +1,146 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DATA_INPUTER_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DATA_INPUTER_H_ + +#include +#include +#include + +#include "common/blocking_queue.h" +#include "common/types.h" +#include "common/ge_types.h" + +namespace ge { +/// +/// @ingroup domi_ome +/// @brief wrapper input data +/// @author +/// +class InputDataWrapper { + public: + InputDataWrapper() : is_init(false) {} + + ~InputDataWrapper() {} + + /// + /// @ingroup domi_ome + /// @brief init InputData + /// @param [in] input use input to init InputData + /// @param [in] output data copy dest address + /// @return SUCCESS success + /// @return other init failed + /// + domi::Status Init(const InputData &input, const OutputData &output); + + /// + /// @ingroup domi_ome + /// @brief init InputData + /// @param [in] input use input to init InputData + /// @param [in] output data copy dest address + /// @return SUCCESS success + /// @return other init failed + /// + OutputData *GetOutput() { return &output_; } + + /// + /// @ingroup domi_ome + /// @brief return InputData + /// @return InputData + /// + const InputData &GetInput() const { return input_; } + + private: + OutputData output_; + InputData input_; + bool is_init; +}; + +/// +/// @ingroup domi_ome +/// @brief manage data input +/// @author +/// +class DataInputer { + public: + /// + /// @ingroup domi_ome + /// @brief constructor + /// + DataInputer() {} + + /// + /// @ingroup domi_ome + /// @brief destructor + /// + ~DataInputer() {} + + /// + /// @ingroup domi_ome + /// @brief init + /// @return SUCCESS init success + /// + domi::Status Init() { return domi::SUCCESS; } + + /// + /// @ingroup domi_ome + /// @brief is input data full + /// @return true full + /// @return false not full + /// + bool IsDataFull() { return queue_.IsFull(); } + + /// + /// @ingroup domi_ome + /// @brief add input data + /// @param [int] input data + /// @return SUCCESS add successful + /// @return INTERNAL_ERROR add failed + /// + domi::Status Push(const std::shared_ptr &data) { + bool success = queue_.Push(data, false); + return success ? domi::SUCCESS : domi::INTERNAL_ERROR; + } + + /// + /// @ingroup domi_ome + /// @brief pop input data + /// @param [out] save popped input data + /// @return SUCCESS pop success + /// @return INTERNAL_ERROR pop fail + /// + domi::Status Pop(std::shared_ptr &data) { + bool success = queue_.Pop(data); + return success ? domi::SUCCESS : domi::INTERNAL_ERROR; + } + + /// + /// @ingroup domi_ome + /// @brief stop receiving data, invoke thread at Pop + /// + void Stop() { queue_.Stop(); } + + private: + /// + /// @ingroup domi_ome + /// @brief save input data queue + /// + BlockingQueue> queue_; +}; +} // namespace ge + +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DATA_INPUTER_H_ diff --git a/src/ge/graph/load/new_model_manager/davinci_model.cc b/src/ge/graph/load/new_model_manager/davinci_model.cc new file mode 100644 index 00000000..4887e7d1 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/davinci_model.cc @@ -0,0 +1,2510 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/davinci_model.h" + +#include +#include +#include +#include +#include + +#include +#include +#include + +#include "cce/cce.h" +#include "cce/dnn.h" +#include "cce/optimizer/fusion_engine.h" +#include "common/debug/log.h" +#include "common/formats/formats.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "common/math/math_util.h" +#include "common/op/ge_op_utils.h" +#include "common/profiling/profiling_manager.h" +#include "common/properties_manager.h" +#include "common/scope_guard.h" +#include "common/thread_pool.h" +#include "framework/common/debug/ge_log.h" +#include "graph/compute_graph.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/ge_context.h" +#include "graph/graph.h" +#include "graph/load/new_model_manager/tbe_handle_store.h" +#include "graph/load/output/output.h" +#include "graph/manager/graph_mem_allocator.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/manager/util/debug.h" +#include "graph/model_serialize.h" +#include "graph/node.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/type_utils.h" +#include "init/gelib.h" +#include "mmpa/mmpa_api.h" +#include "model_output.h" +#include "omm/csa_interact.h" +#include "runtime/base.h" +#include "runtime/dev.h" +#include "runtime/event.h" +#include "runtime/mem.h" +#include "runtime/stream.h" + +// create std::thread, catch exceptions using try/catch +#define CREATE_STD_THREAD(thread_id, func, args) \ + do { \ + try { \ + thread_id = std::thread(func, args); \ + } catch (const std::system_error &e) { \ + GELOGE(FAILED, "Caught system_error with code:%d, meaning:%s", e.code().value(), e.what()); \ + GELOGE(FAILED, "Thread creat FAIL, Please check the left resource!"); \ + return FAILED; \ + } \ + } while (0) + +namespace ge { +namespace { +const uint32_t DEFAULT_DATA_INDEX = 0; +const uint32_t TRUE_BRANCH_STREAM_NUM = 1; +const uint32_t THREAD_NUM = 16; +const int kDecimal = 10; +const int kBytes = 8; + +class RtContextSwitchGuard { + public: + RtContextSwitchGuard(rtCtxMode_t mode, uint32_t device_id) : last_(nullptr), current_(nullptr) { + auto ret = rtCtxGetCurrent(&last_); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Failed to get current context from rt, error-code %d", ret); + return; + } + + ret = rtCtxCreate(¤t_, mode, static_cast(device_id)); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Failed to create new context for device %u, error-code %d", device_id, ret); + return; + } + + ret = rtCtxSetCurrent(current_); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Failed to switch context to normal, device %u", device_id); + return; + } + } + + ~RtContextSwitchGuard() { + if (current_ != nullptr) { + auto ret = rtCtxDestroy(current_); + if (ret != RT_ERROR_NONE) { + GELOGW("Failed to call rtCtxDestroy"); + } + } + if (last_ != nullptr) { + auto ret = rtCtxSetCurrent(last_); + if (ret != RT_ERROR_NONE) { + GELOGW("Failed to call rtCtxSetCurrent"); + } + } + } + + private: + rtContext_t last_; + rtContext_t current_; +}; + +int CalcVarSizeInBytes(const GeTensorDesc &desc) { + int var_size = GetSizeByDataType(desc.GetDataType()); + if (var_size <= 0) { + GELOGE(PARAM_INVALID, "Failed to calc var data size from data type %s", + TypeUtils::DataTypeToSerialString(desc.GetDataType()).c_str()); + return -1; + } + auto shape = desc.GetShape(); + auto dimNum = shape.GetDimNum(); + for (size_t dimIndex = 0; dimIndex < dimNum; ++dimIndex) { + var_size *= static_cast(shape.GetDim(dimIndex)); + } + return var_size; +} + +Status CopyVarFromDevice(uint64_t session_id, const NodePtr &var, std::unique_ptr &var_data, + const GeTensorDesc &input_desc) { + uint8_t *var_logic = nullptr; + GE_CHECK_NOTNULL(var); + auto ret = VarManager::Instance(session_id)->GetVarAddr(var->GetName(), input_desc, &var_logic); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, + "Failed to copy var %s from device, can not find it" + " from var manager %u", + var->GetName().c_str(), ret); + return INTERNAL_ERROR; + } + + uint8_t *var_addr = VarManager::Instance(session_id)->GetVarMemoryAddr(var_logic, RT_MEMORY_HBM); + if (var_addr == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to copy var %s from device, can not get var addr", var->GetName().c_str()); + return INTERNAL_ERROR; + } + + int var_size_bytes = CalcVarSizeInBytes(input_desc); + if (var_size_bytes <= 0) { + return INTERNAL_ERROR; + } + + std::unique_ptr var_host(new (std::nothrow) uint8_t[var_size_bytes]); + if (var_host == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to malloc rt-host memory, size %d", var_size_bytes); + return OUT_OF_MEMORY; + } + + ret = rtMemcpy(reinterpret_cast(var_host.get()), var_size_bytes, reinterpret_cast(var_addr), + var_size_bytes, RT_MEMCPY_DEVICE_TO_HOST); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, + "Failed to copy var memory from device, var %s, size %d," + " rt-error-code %u", + var->GetName().c_str(), var_size_bytes, ret); + return RT_FAILED; + } + + GELOGD("Copy var %s from device to host, size %d", var->GetName().c_str(), var_size_bytes); + var_data.swap(var_host); + + return SUCCESS; +} + +Status CopyVarToDevice(const NodePtr &var, const formats::TransResult &trans_result, void *var_addr) { + GE_CHECK_NOTNULL(var); + GELOGD("Copy var %s from host to device, size %zu", var->GetName().c_str(), trans_result.length); + auto ret = rtMemcpy(var_addr, trans_result.length, reinterpret_cast(trans_result.data.get()), + trans_result.length, RT_MEMCPY_HOST_TO_DEVICE); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Failed to copy memory to device, size %zu", trans_result.length); + return RT_FAILED; + } + return SUCCESS; +} + +Status TransVarOnHost(uint8_t *var_data, const VarTransRoad &trans_road, formats::TransResult &result) { + formats::TransResult resultLastTime{}; + bool use_init_data = true; + for (const auto &trans_info : trans_road) { + if (trans_info.node_type == RESHAPE || trans_info.node_type == REFORMAT) { + GELOGD("Skip to trans variable data on the reshape/reformat node"); + continue; + } + uint8_t *src_data = nullptr; + if (use_init_data) { + src_data = var_data; + use_init_data = false; + } else { + src_data = resultLastTime.data.get(); + } + + formats::TransResult tmp_result{}; + if (trans_info.node_type == TRANSDATA) { + auto src_format = trans_info.input.GetFormat(); + auto src_shape = trans_info.input.GetShape().GetDims(); + auto dst_format = trans_info.output.GetFormat(); + auto dst_shape = trans_info.output.GetShape().GetDims(); + auto data_type = trans_info.input.GetDataType(); + GELOGD("Trans format from %s to %s, shape %s to %s, data-type %s", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(dst_format).c_str(), + formats::ShapeToString(src_shape).c_str(), formats::ShapeToString(dst_shape).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str()); + auto ret = formats::TransFormat({src_data, src_format, dst_format, src_shape, dst_shape, data_type}, tmp_result); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, + "Failed to trans format from %s to %s, shape %s to %s, " + "data type %s error code %u", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(dst_format).c_str(), + formats::ShapeToString(src_shape).c_str(), formats::ShapeToString(dst_shape).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str(), ret); + return ret; + } + } else if (trans_info.node_type == CAST) { + auto input_shape = trans_info.input.GetShape(); + auto src_data_size = input_shape.GetShapeSize(); + auto src_data_type = trans_info.input.GetDataType(); + auto dst_data_type = trans_info.output.GetDataType(); + GELOGD("Trans data type from %s to %s, input shape %s, data size %ld", + TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(dst_data_type).c_str(), formats::ShapeToString(input_shape).c_str(), + src_data_size); + auto ret = formats::TransDataType({src_data, static_cast(src_data_size), src_data_type, dst_data_type}, + tmp_result); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to trans data type from %s to %s, input shape %s, data size %ld, error code %u", + TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(dst_data_type).c_str(), formats::ShapeToString(input_shape).c_str(), + src_data_size, ret); + return ret; + } + } else { + GELOGE(UNSUPPORTED, "Failed to trans var data, the trans type %s does not supported", + trans_info.node_type.c_str()); + return UNSUPPORTED; + } + resultLastTime = tmp_result; + } + + result = resultLastTime; + return SUCCESS; +} + +/// +/// re-alloc var memory on device using var-manager +/// free origin var memory(var manager does not support now) +/// @param session_id +/// @param var +/// @param var_size_bytes +/// @param var_device +/// @return +/// +Status ReAssignVarAddr(uint64_t session_id, const std::string &var_name, const GeTensorDesc &tensor_desc, + void **var_device) { + uint8_t *var_logic = nullptr; + Status ret = VarManager::Instance(session_id)->GetVarAddr(var_name, tensor_desc, &var_logic); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, + "Failed to get var %s device addr, can not find it" + " from var manager %u", + var_name.c_str(), ret); + return INTERNAL_ERROR; + } + + uint8_t *var_addr = VarManager::Instance(session_id)->GetVarMemoryAddr(var_logic, RT_MEMORY_HBM); + if (var_addr == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to convert var %s logic addr to real addr", var_name.c_str()); + return INTERNAL_ERROR; + } + *var_device = var_addr; + + return SUCCESS; +} + +Status TransVarData(const NodePtr &var, const VarTransRoad &trans_road, uint64_t session_id, uint32_t device_id) { + // do not need to do anything if only all reshape/reformat node on the trans_road + GE_CHECK_NOTNULL(var); + bool need_trans = false; + if (std::any_of(trans_road.begin(), trans_road.end(), [](const ge::TransNodeInfo &road) { + return road.node_type != RESHAPE && road.node_type != REFORMAT; + })) { + need_trans = true; + } + + if (!need_trans) { + return SUCCESS; + } + + // Sync var data from device + std::unique_ptr var_data; + if (trans_road.size() == 0) { + GELOGE(INTERNAL_ERROR, "Failed to get trans_road, trans_road is empty."); + return INTERNAL_ERROR; + } + const GeTensorDesc &input_desc = trans_road.begin()->input; + auto ret = CopyVarFromDevice(session_id, var, var_data, input_desc); + if (ret != SUCCESS) { + return ret; + } + + formats::TransResult trans_result{}; + ret = TransVarOnHost(var_data.get(), trans_road, trans_result); + if (ret != SUCCESS) { + GELOGE(ret, "Failed to trans var data on host, error code %u", ret); + return ret; + } + + void *var_device = nullptr; + /// + /// It is a temporary solution to use the last GeTensorDesc to assign variable memory because the variable manager + /// depends on TensorDesc and it is difficult to be modified. The correct solution is to assign memory based on the + /// size of the converted variable. To complete the final solution, the dependency of the variable manager on + /// TensorDesc needs to be removed. This change is large and needs to be performed step by step. + /// + ret = ReAssignVarAddr(session_id, var->GetName(), trans_road.rbegin()->output, &var_device); + if (ret != SUCCESS) { + GELOGE(ret, "Failed to re-assign memory on device, size %zu", trans_result.length); + return ret; + } + + // sync new data to device + ret = CopyVarToDevice(var, trans_result, var_device); + if (ret != SUCCESS) { + GELOGE(ret, "Failed to send var data to device"); + return ret; + } + + return SUCCESS; +} +} // namespace + +std::mutex DavinciModel::tvm_bin_mutex_; +std::set DavinciModel::tvm_bin_kernel_; + +DavinciModel::DavinciModel(int32_t priority, const std::shared_ptr &listener) + : weights_mem_base_(nullptr), + var_mem_base_(nullptr), + mem_base_(nullptr), + is_inner_mem_base_(false), + is_inner_weight_base_(false), + data_inputer_(nullptr), + dataInputTid(0), + is_model_has_inited_(false), + model_id_(0), + version_(0), + ge_model_(nullptr), + thread_id_(), + listener_(listener), + run_flg_(false), + priority_(priority), + rt_model_handle_(nullptr), + rt_model_stream_(nullptr), + is_inner_model_stream_(false), + support_mem_shared_flag_(false), + session_id_(0), + device_id_(0), + is_train_mode_(false), + model_task_def_(nullptr), + maxDumpOpNum_(0) { + op_list_.clear(); +} + +DavinciModel::~DavinciModel() { + try { + Status ret = data_dumper_.UnloadDumpInfo(); + if (ret != SUCCESS) { + GELOGW("UnloadDumpInfo fail, ret: %u.", ret); + } + + GE_CHK_STATUS(ModelRunStop()); + UnbindTaskSinkStream(); + + op_list_.clear(); + data_op_list_.clear(); + output_op_list_.clear(); + + GE_DELETE_NEW_SINGLE(data_inputer_); + + for (size_t i = 0; i < label_list_.size(); ++i) { + GE_LOGW_IF(rtLabelDestroy(label_list_[i]) != RT_ERROR_NONE, "Destroy label failed! Index: %zu", i); + } + + for (size_t i = 0; i < stream_list_.size(); ++i) { + GE_LOGW_IF(rtStreamDestroy(stream_list_[i]) != RT_ERROR_NONE, "Destroy stream failed! Index: %zu", i); + } + + for (size_t i = 0; i < event_list_.size(); ++i) { + GE_LOGW_IF(rtEventDestroy(event_list_[i]) != RT_ERROR_NONE, "Destroy event failed. Index: %zu", i); + } + + FreeWeightsMem(); + + FreeFeatureMapMem(); + + if (model_task_def_) { + // release rtModel + GELOGI("do ReleaseTask"); + GE_CHK_RT(rtModelDestroy(rt_model_handle_)); + ReleaseTask(); + } + + CleanTbeHandle(); + + var_mem_base_ = nullptr; + } catch (...) { + GELOGW("DavinciModel::~DavinciModel: clear op_list catch exception."); + } +} + +void DavinciModel::UnbindHcomStream() { + if (!all_hccl_stream_list_.empty()) { + for (size_t i = 0; i < all_hccl_stream_list_.size(); i++) { + GE_LOGW_IF(rtModelUnbindStream(rt_model_handle_, all_hccl_stream_list_[i]) != RT_ERROR_NONE, + "Unbind hccl stream from model failed! Index: %zu", i); + GE_LOGW_IF(rtStreamDestroy(all_hccl_stream_list_[i]) != RT_ERROR_NONE, "Destroy hccl stream for rt_model failed!") + } + } + return; +} + +void DavinciModel::ReleaseTask() { + for (const auto &task : task_list_) { + if (task != nullptr) { + GE_CHK_STATUS(task->Release()); + } + } +} + +Status DavinciModel::Assign(const GeModelPtr &ge_model) { + if (ge_model == nullptr) { + GELOGI("can't assign null ge_model"); + return FAILED; + } + ge_model_ = ge_model; + model_task_def_ = ge_model_->GetModelTaskDefPtr(); + return SUCCESS; +} + +Status DavinciModel::InitModelMem(void *dev_ptr, size_t memsize, void *weight_ptr, size_t weight_size) { + if (is_model_has_inited_) { + GELOGI("call InitModelMem more than once ."); + return FAILED; + } + is_model_has_inited_ = true; + std::size_t data_size = TotalMemSize(); + ge::Buffer weights = ge_model_->GetWeight(); + + uint8_t *weights_addr = weights.GetData(); + std::size_t weights_size = weights.GetSize(); + + GE_CHECK_LE(weights_size, ALLOC_MEMORY_MAX_SIZE); + + if ((dev_ptr != nullptr) && (memsize < TotalMemSize())) { + GELOGE(FAILED, "Invalid mem param: memsize=%zu totalsize=%zu.", memsize, TotalMemSize()); + return FAILED; + } + + if ((weight_ptr != nullptr) && (weight_size < weights_size)) { + GELOGE(FAILED, "Invalid mem param: weight_size=%zu totalsize=%zu.", weight_size, weights_size); + return FAILED; + } + + mem_base_ = static_cast(dev_ptr); + weights_mem_base_ = static_cast(dev_ptr); + is_inner_mem_base_ = false; + is_inner_weight_base_ = false; + + if (TotalMemSize() && mem_base_ == nullptr) { + mem_base_ = MallocFeatureMapMem(data_size); + if (mem_base_ == nullptr) { + return FAILED; + } + + weights_mem_base_ = mem_base_; + + is_inner_mem_base_ = true; + is_inner_weight_base_ = true; + } + + if (weights_size != 0) { + weights_mem_base_ = static_cast(weight_ptr); + is_inner_weight_base_ = false; + if (weight_ptr == nullptr) { + weights_mem_base_ = MallocWeightsMem(weights_size); + if (weights_mem_base_ == nullptr) { + return FAILED; + } + is_inner_weight_base_ = true; + } + GE_CHK_RT_RET(rtMemcpy(weights_mem_base_, weights_size, weights_addr, weights_size, RT_MEMCPY_HOST_TO_DEVICE)) + GELOGI("copy weights data to device"); + } + + var_mem_base_ = VarManager::Instance(session_id_)->GetVarMemoryBase(RT_MEMORY_HBM); + if (TotalVarMemSize() && var_mem_base_ == nullptr) { + Status ret = VarManager::Instance(session_id_)->MallocVarMemory(TotalVarMemSize()); + if (ret != SUCCESS) { + GELOGE(ret, "Malloc Var Memory Fail."); + return ret; + } + var_mem_base_ = VarManager::Instance(session_id_)->GetVarMemoryBase(RT_MEMORY_HBM); + } + + runtime_param_.mem_base = mem_base_; + runtime_param_.weight_base = weights_mem_base_; + runtime_param_.var_base = var_mem_base_; + return SUCCESS; +} + +void DavinciModel::InitRuntimeParams() { + int64_t value = 0; + bool ret; + ret = ge::AttrUtils::GetInt(ge_model_, ATTR_MODEL_MEMORY_SIZE, value); + runtime_param_.mem_size = ret ? (uint64_t)value : 0; + ret = ge::AttrUtils::GetInt(ge_model_, ATTR_MODEL_WEIGHT_SIZE, value); + runtime_param_.weight_size = ret ? (uint64_t)value : 0; + ret = ge::AttrUtils::GetInt(ge_model_, ATTR_MODEL_STREAM_NUM, value); + runtime_param_.stream_num = ret ? (uint32_t)value : 0; + ret = ge::AttrUtils::GetInt(ge_model_, ATTR_MODEL_EVENT_NUM, value); + runtime_param_.event_num = ret ? (uint64_t)value : 0; + ret = ge::AttrUtils::GetInt(ge_model_, ATTR_MODEL_BATCH_NUM, value); + runtime_param_.batch_num = ret ? (uint32_t)value : 0; + ret = ge::AttrUtils::GetInt(ge_model_, MODEL_ATTR_TASK_GEN_BASE_ADDR, value); + runtime_param_.logic_mem_base = ret ? (uint64_t)value : 0; + ret = ge::AttrUtils::GetInt(ge_model_, MODEL_ATTR_TASK_GEN_WEIGHT_ADDR, value); + runtime_param_.logic_weight_base = ret ? (uint64_t)value : 0; + ret = ge::AttrUtils::GetInt(ge_model_, ge::MODEL_ATTR_SESSION_ID, value); + runtime_param_.session_id = ret ? (uint64_t)value : 0; + ret = ge::AttrUtils::GetInt(ge_model_, ATTR_MODEL_TASK_GEN_VAR_ADDR, value); + runtime_param_.logic_var_base = ret ? (uint64_t)value : 0; + ret = ge::AttrUtils::GetInt(ge_model_, ATTR_MODEL_VAR_SIZE, value); + runtime_param_.var_size = ret ? (uint64_t)value : 0; + session_id_ = runtime_param_.session_id; + GELOGI("Init(),memory_size:%lu, weight_size:%lu, stream_num:%u, session_id:%lu, var_size:%lu.", + runtime_param_.mem_size, runtime_param_.weight_size, runtime_param_.stream_num, runtime_param_.session_id, + runtime_param_.var_size); + + GELOGI("Init(),event_num:%u, batch_num:%u", runtime_param_.event_num, runtime_param_.batch_num); +} + +void DavinciModel::CheckHasHcomOp() { + Graph graph = ge_model_->GetGraph(); + auto compute_graph = GraphUtils::GetComputeGraph(graph); + if (compute_graph == nullptr) { + return; + } + for (const auto &node : compute_graph->GetAllNodes()) { + OpDescPtr op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, GELOGW("Node OpDesc is nullptr"); continue); + GE_IF_BOOL_EXEC(((op_desc->GetType() == HCOMBROADCAST) || (op_desc->GetType() == HCOMALLGATHER) || + (op_desc->GetType() == HCOMALLREDUCE) || (op_desc->GetType() == HCOMSEND) || + (op_desc->GetType() == HCOMRECEIVE) || (op_desc->GetType() == HCOMREDUCESCATTER)), + uint32_t stream_id = static_cast(op_desc->GetStreamId()); + (void)hcom_streams_.emplace(stream_id); GELOGD("hcom stream: %u.", stream_id); continue); + + bool is_aicpu_stream = false; + GE_IF_BOOL_EXEC(AttrUtils::GetBool(op_desc, "is_aicpu_stream", is_aicpu_stream) && is_aicpu_stream, + uint32_t stream_id = static_cast(op_desc->GetStreamId()); + (void)aicpu_streams_.emplace(stream_id); GELOGD("aicpu stream: %u.", stream_id); continue); + } +} + +Status DavinciModel::DoTaskSink() { + // task sink is supported as model_task_def is set + if (model_task_def_) { + GELOGI("do task_sink."); + + // create model_handle to load model + GE_CHK_RT_RET(rtModelCreate(&rt_model_handle_, 0)); + + for (size_t i = 0; i < stream_list_.size(); i++) { + GE_IF_BOOL_EXEC(active_stream_indication_.count(i) > 0, GELOGI("rtModelBindStream[%zu]", i); + GE_CHK_RT_RET(rtModelBindStream(rt_model_handle_, stream_list_[i], RT_INVALID_FLAG)); continue;); + // bind rt_model_handel to all streams that relates to op + GE_CHK_RT_RET(rtModelBindStream(rt_model_handle_, stream_list_[i], 0)); + } + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(InitTaskInfo(*model_task_def_.get()) != SUCCESS, return FAILED, + "InitTaskInfo failed."); + + GE_CHK_STATUS_RET(DistributeTask(), "Distribute failed."); + + GE_CHK_RT_RET(rtModelLoadComplete(rt_model_handle_)); + } + return SUCCESS; +} + +// initialize op sequence and call initialization function of each op respectively +Status DavinciModel::Init(void *dev_ptr, size_t memsize, void *weight_ptr, size_t weight_size) { + // validating params + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(priority_ < 0 || priority_ > 7, return PARAM_INVALID, + "Priority must between 0-7, now is %d", priority_); + GE_CHK_BOOL_RET_STATUS(ge_model_ != nullptr, PARAM_INVALID, "GeModel is null."); + // Initializing runtime_param_ + InitRuntimeParams(); + + version_ = ge_model_->GetVersion(); + name_ = ge_model_->GetName(); + + CheckHasHcomOp(); + + for (uint32_t i = 0; i < StreamNum(); i++) { + rtStream_t stream = nullptr; + GE_MAKE_GUARD_RTSTREAM(stream); + + if (hcom_streams_.find(i) != hcom_streams_.end()) { + GE_CHK_RT_RET(rtStreamCreateWithFlags(&stream, priority_, RT_STREAM_PERSISTENT | RT_STREAM_FORCE_COPY)); + } else if (aicpu_streams_.find(i) != aicpu_streams_.end()) { + GE_CHK_RT_RET(rtStreamCreateWithFlags(&stream, priority_, RT_STREAM_PERSISTENT | RT_STREAM_AICPU)); + } else { + GE_CHK_RT_RET(rtStreamCreateWithFlags(&stream, priority_, RT_STREAM_PERSISTENT)); + } + + GE_DISMISS_GUARD(stream); + stream_list_.push_back(stream); + } + + for (uint32_t i = 0; i < EventNum(); i++) { + rtEvent_t rt_event; + GE_CHK_RT_RET(rtEventCreate(&rt_event)); + event_list_.push_back(rt_event); + } + + for (uint32_t i = 0; ((BatchNum() != 0) && (i <= BatchNum())); i++) { + rtLabel_t rtLabel; + GE_CHK_RT_RET(rtLabelCreate(&rtLabel)); + GE_CHK_BOOL_RET_STATUS(rtLabel != nullptr, FAILED, "rtLabel is nullptr!"); + label_list_.push_back(rtLabel); + } + + Graph graph = ge_model_->GetGraph(); + auto compute_graph = GraphUtils::GetComputeGraph(graph); + GE_CHK_BOOL_RET_STATUS(compute_graph != nullptr, INTERNAL_ERROR, "Get compute graph is nullptr!"); + + runtime_param_.graph_id = GetGraphID(compute_graph->GetName()); + + GE_TIMESTAMP_START(TransAllVarData); + GE_CHK_STATUS_RET(TransAllVarData(compute_graph, runtime_param_.graph_id), "TransAllVarData failed."); + GE_TIMESTAMP_END(TransAllVarData, "GraphLoader::TransAllVarData"); + GE_CHK_STATUS_RET(CopyVarData(compute_graph), "copy var data failed."); + + GE_TIMESTAMP_START(InitModelMem); + GE_CHK_STATUS_RET_NOLOG(InitModelMem(dev_ptr, memsize, weight_ptr, weight_size)); + GE_TIMESTAMP_END(InitModelMem, "GraphLoader::InitModelMem"); + + InitDataDumper(); + data_inputer_ = new (std::nothrow) DataInputer(); + GE_CHK_BOOL_RET_STATUS(data_inputer_ != nullptr, INTERNAL_ERROR, "data_inputer_ is nullptr!"); + + for (const ge::NodePtr &node : compute_graph->GetDirectNode()) { + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr, continue); + GE_IF_BOOL_EXEC(node->GetOpDesc()->GetType() != VARIABLE, continue); + GE_IF_BOOL_EXEC(IsBroadCastOpData(node), + (void)ge::AttrUtils::SetStr(node->GetOpDesc(), VAR_ATTR_VAR_IS_BROADCAST, "var_is_restore");); + } + // for profiling + op_name_map_ = compute_graph->GetGraphOpName(); + + GE_TIMESTAMP_CALLNUM_START(LoadTBEKernelBinToOpDesc); + GE_TIMESTAMP_CALLNUM_START(InitTbeHandle); + + vector op_name; + GE_IF_BOOL_EXEC(ge::AttrUtils::GetListStr(ge_model_, ATTR_MODEL_TASK_INDEX_OP_NAME, op_name), + GELOGI("get str of task_index_op_name")); + if (op_name_map_.empty()) { + for (size_t idx = 0; idx < op_name.size(); idx++) { + op_name_map_[idx] = op_name[idx]; + } + GELOGI("infer profiling: op_name_size(%zu)", op_name.size()); + } + + auto nodes = compute_graph->GetAllNodes(); + tbekernel_store_ = ge_model_->GetTBEKernelStore(); + for (size_t i = 0; i < nodes.size(); i++) { + auto node = nodes.at(i); + GE_CHK_BOOL_RET_STATUS(node != nullptr, PARAM_INVALID, "CreateOp failed."); + + auto op_desc = node->GetOpDesc(); + GE_CHK_BOOL_RET_STATUS(op_desc != nullptr, PARAM_INVALID, "op_desc is null."); + op_list_[i] = op_desc; + + GE_TIMESTAMP_RESTART(LoadTBEKernelBinToOpDesc); + tbekernel_store_.LoadTBEKernelBinToOpDesc(op_desc); + GE_TIMESTAMP_ADD(LoadTBEKernelBinToOpDesc); + + if (op_desc->GetType() == DATA_TYPE || op_desc->GetType() == AIPP_DATA_TYPE || + op_desc->GetType() == ANN_DATA_TYPE) { + data_op_list_.push_back(op_desc); + GE_IF_BOOL_EXEC( + (op_desc->GetInputDescPtr(0) != nullptr && op_desc->GetInputDescPtr(0)->GetFormat() != FORMAT_FILTER_HWCK), + data_op_input_tensor_desc_map_[op_desc->GetName()] = op_desc->GetInputDescPtr(0)); + GE_IF_BOOL_EXEC( + (op_desc->GetOutputDescPtr(0) != nullptr && op_desc->GetOutputDescPtr(0)->GetFormat() != FORMAT_FRACTAL_Z), + data_op_output_tensor_desc_map_[op_desc->GetName()] = op_desc->GetOutputDescPtr(0)); + SetOutsideAddr(ModelUtils::GetOutputDataAddrs(runtime_param_, op_desc)); + data_dumper_.SaveDumpInput(node); + } + + GE_IF_BOOL_EXEC(op_desc->GetType() == VARIABLE, variable_op_list_.push_back(op_desc)); + + GE_IF_BOOL_EXEC(op_desc->GetType() == NETOUTPUT, output_op_list_.push_back(op_desc); + GE_CHK_STATUS_RET(ModelUtils::GetOutputSize(op_desc, output_size_list_, output_memory_size_list_), + "Get output size fail"); + SetOutsideAddr(ModelUtils::GetInputDataAddrs(runtime_param_, op_desc))); + + // Initialize constant op, only applies to training, ignoring inference constant op + GE_IF_BOOL_EXEC(op_desc->GetType() == CONSTANTOP, + GE_CHK_STATUS_RET(InitConstant(op_desc), "Constant init failed. %s", op_desc->GetName().c_str());); + + GE_TIMESTAMP_RESTART(InitTbeHandle); + uint32_t run_mode = static_cast(domi::ImplyType::INVALID); + GE_IF_BOOL_EXEC((AttrUtils::GetInt(op_desc, ATTR_NAME_IMPLY_TYPE, run_mode) && + run_mode == static_cast(domi::ImplyType::TVM)), + GE_CHK_STATUS_RET(InitTbeHandle(op_desc), "TBE init failed. %s", op_desc->GetName().c_str());); + GE_TIMESTAMP_ADD(InitTbeHandle); + + GE_CHK_STATUS_RET(MarkActiveStream(op_desc), "MarkActiveStream failed, node:%s, opIndex:%zu", + op_desc->GetName().c_str(), i); + } + GE_TIMESTAMP_CALLNUM_END(LoadTBEKernelBinToOpDesc, "GraphLoader::LoadTBEKernelBinToOpDesc"); + GE_TIMESTAMP_CALLNUM_END(InitTbeHandle, "GraphLoader::InitTbeHandle"); + + GE_TIMESTAMP_START(DoTaskSink); + auto ret = DoTaskSink(); + GE_TIMESTAMP_END(DoTaskSink, "GraphLoader::DoTaskSink"); + return ret; +} + +/// +/// @ingroup ge +/// @brief ACL case, Load task list with queue. +/// @param [in] input_queue_ids: input queue ids from user, nums equal Data Op. +/// @param [in] output_queue_ids: input queue ids from user, nums equal NetOutput Op. +/// @return: 0 for success / others for fail +/// +Status DavinciModel::SetQueIds(const std::vector &input_queue_ids, + const std::vector &output_queue_ids) { + if (input_queue_ids.empty() && output_queue_ids.empty()) { + GELOGE(PARAM_INVALID, "Para is empty"); + return PARAM_INVALID; + } + + input_queue_ids_ = input_queue_ids; + output_queue_ids_ = output_queue_ids; + return SUCCESS; +} + +/// +/// @brief define static mode and mutex mode +/// +SysMode DavinciModel::mode_ = INFERENCE; +std::mutex DavinciModel::mutex_mode_; + +/// +/// @ingroup domi_ome +/// @brief get sys mode +/// @return SysMode required system mode +/// @author +/// +SysMode DavinciModel::GetSysMode() { + std::unique_lock lock(mutex_mode_); + return mode_; +} + +/// +/// @ingroup domi_ome +/// @brief set sys mode +/// @param [in] mode to be set +/// @return Status mode set result +/// @author +/// +Status DavinciModel::SetSysMode(SysMode mode) { + GE_CHK_BOOL_RET_STATUS(mode < RESERVED, PARAM_INVALID, "DavinciModel::SetSysMode Para Error"); + + std::unique_lock lock(mutex_mode_); + mode_ = mode; + return SUCCESS; +} + +Status DavinciModel::GetInputOutputDescInfo(vector &input_desc, + vector &output_desc) { + if ((data_op_list_.empty()) || (data_op_list_[0]->GetInputsSize()) != 1) { + GELOGI("data_op_list_ is empty or input_desc size is not 1."); + } else { + std::vector input_formats; + GE_CHK_STATUS_RET(GetInputDescInfo(input_desc, input_formats), "get input desc info failed"); + } + + std::vector output_formats; + GE_CHK_STATUS_RET(GetOutputDescInfo(output_desc, output_formats), "get output desc info failed"); + + return SUCCESS; +} + +Status DavinciModel::GetInputOutputDescInfoForZeroCopy(vector &input_desc, + vector &output_desc) { + if ((data_op_list_.empty()) || (data_op_list_[0]->GetInputsSize()) != 1) { + GELOGE(FAILED, "OP List Pointer is null or input_desc size is not 1!"); + return FAILED; + } + + std::vector input_formats; + GE_CHK_STATUS_RET(GetInputDescInfo(input_desc, input_formats), "get input desc info failed"); + std::vector output_formats; + GE_CHK_STATUS_RET(GetOutputDescInfo(output_desc, output_formats), "get output desc info failed"); + + GE_CHK_BOOL_RET_STATUS(output_desc.size() == output_size_list_.size(), INTERNAL_ERROR, + "output_desc size[%zu] not equal output_size_list_[%zu] size!", output_desc.size(), + output_size_list_.size()); + + GE_CHECK_GE(output_memory_size_list_.size(), output_size_list_.size()); + /// For function zero copy,the memory should be aligned by 512 bytes. + /// And, because of the cce op limit, size should be lager than the real shape size. The memory should be padded by 32 + /// bytes. + /// *size equals to ((tensorDesc->dataSize + 2 * 32 - 1) / 32) * 32; + for (size_t i = 0; i < output_size_list_.size(); i++) { + output_desc[i].size = output_memory_size_list_[i]; + } + + return SUCCESS; +} + +Status DavinciModel::GetInputOutputDescInfo(vector &input_desc, + vector &output_desc, + std::vector &input_formats, + std::vector &output_formats) { + if ((data_op_list_.empty()) || (data_op_list_[0]->GetInputsSize()) != 1) { + GELOGE(FAILED, "OP List Pointer is null or input_desc size is not 1!"); + return FAILED; + } + + GE_CHK_STATUS_RET(GetInputDescInfo(input_desc, input_formats), "get input desc info failed"); + + GE_CHK_STATUS_RET(GetOutputDescInfo(output_desc, output_formats), "get ouput desc info failed"); + + return SUCCESS; +} + +Status DavinciModel::GetInputOutputDescInfoForZeroCopy(vector &input_desc, + vector &output_desc, + std::vector &input_formats, + std::vector &output_formats) { + if ((data_op_list_.empty()) || (1 != data_op_list_[0]->GetInputsSize())) { + GELOGE(FAILED, "OP List Pointer is null or input_desc size is not 1!"); + return FAILED; + } + + GE_CHK_STATUS_RET(GetInputDescInfo(input_desc, input_formats), "get input desc info failed"); + + GE_CHK_STATUS_RET(GetOutputDescInfo(output_desc, output_formats), "get ouput desc info failed"); + + GE_CHK_BOOL_RET_STATUS(output_desc.size() == output_size_list_.size(), INTERNAL_ERROR, + "output_desc size[%zu] not equal output_size_list_[%zu] size!", output_desc.size(), + output_size_list_.size()); + + GE_CHECK_GE(output_memory_size_list_.size(), output_size_list_.size()); + /// For function zero copy,the momery should be aligned by 512 bytes. + /// And, because of the cce op limit, size should be lager than the real shape size. The memory should be padded by 32 + /// bytes. + /// *size equals to ((tensorDesc->dataSize + 2 * 32 - 1) / 32) * 32; + for (size_t i = 0; i < output_size_list_.size(); i++) { + output_desc[i].size = output_memory_size_list_[i]; + } + + return SUCCESS; +} + +Status DavinciModel::GetInputDescInfo(vector &input_desc, std::vector &formats) { + for (std::size_t index = 0; index < data_op_list_.size(); ++index) { + InputOutputDescInfo input; + uint32_t n, c, h, w; + GE_CHECK_NOTNULL(data_op_list_[index]); + GE_CHECK_NOTNULL(data_op_list_[index]->GetInputDescPtr(0)); + Format format = data_op_list_[index]->GetOutputDescPtr(0)->GetFormat(); + n = format == FORMAT_NHWC ? NHWC_DIM_N : NCHW_DIM_N; + c = format == FORMAT_NHWC ? NHWC_DIM_C : NCHW_DIM_C; + h = format == FORMAT_NHWC ? NHWC_DIM_H : NCHW_DIM_H; + w = format == FORMAT_NHWC ? NHWC_DIM_W : NCHW_DIM_W; + + if (data_op_list_[index]->GetInputDescPtr(0)->GetShape().GetDimNum() == static_cast(NORMAL_TENSOR_SIZE)) { + input.shape_info.num = data_op_list_[index]->GetInputDescPtr(0)->GetShape().GetDim(n); + input.shape_info.height = data_op_list_[index]->GetInputDescPtr(0)->GetShape().GetDim(h); + input.shape_info.width = data_op_list_[index]->GetInputDescPtr(0)->GetShape().GetDim(w); + input.shape_info.channel = data_op_list_[index]->GetInputDescPtr(0)->GetShape().GetDim(c); + } + for (size_t k = 0; k < data_op_list_[index]->GetInputDescPtr(0)->GetShape().GetDimNum(); k++) { + input.shape_info.dims.push_back(data_op_list_[index]->GetInputDescPtr(0)->GetShape().GetDim(k)); + } + + input.data_type = data_op_list_[index]->GetInputDescPtr(0)->GetDataType(); + input.name = data_op_list_[index]->GetName(); + uint32_t input_size = 0; + GE_CHK_STATUS_RET(TensorUtils::GetSize(*data_op_list_[index]->GetInputDescPtr(0), input_size), + "get input size failed."); + input.size = input_size; + formats.push_back(format); + input_desc.push_back(input); + } + return SUCCESS; +} + +void DavinciModel::CreateOutput(uint32_t index, OpDescPtr &op_desc, InputOutputDescInfo &output, + uint32_t &format_result) { + /// netoutput input tensor desc + GE_IF_BOOL_EXEC(op_desc->GetInputDescPtr(index) == nullptr, GELOGE(FAILED, "OpDesc GetInputDescPtr is nullptr"); + return); + Format format = op_desc->GetInputDescPtr(index)->GetFormat(); + GeShape shape = op_desc->GetInputDescPtr(index)->GetShape(); + DataType data_type = op_desc->GetInputDescPtr(index)->GetDataType(); + + int64_t dims[] = {1, 1, 1, 1}; + format_result = format; + if (format == FORMAT_ND) { // for ND tensor + for (size_t i = 0; i < shape.GetDimNum() && i < (sizeof(dims) / sizeof(dims[0])); i++) { + dims[i] = shape.GetDim(i); + } + } else { // FOR FORMAT_NHWC or FORMAT_NCHW + dims[0] = shape.GetDim(format == FORMAT_NHWC ? NHWC_DIM_N : NCHW_DIM_N); // 0: first dim + dims[1] = shape.GetDim(format == FORMAT_NHWC ? NHWC_DIM_C : NCHW_DIM_C); // 1: second dim + dims[2] = shape.GetDim(format == FORMAT_NHWC ? NHWC_DIM_H : NCHW_DIM_H); // 2: third dim + dims[3] = shape.GetDim(format == FORMAT_NHWC ? NHWC_DIM_W : NCHW_DIM_W); // 3: forth dim + } + output.shape_info.num = dims[0]; // 0: first dim + output.shape_info.channel = dims[1]; // 1: second dim + output.shape_info.height = dims[2]; // 2: third dim + output.shape_info.width = dims[3]; // 3: forth dim + + if (op_desc->GetInputDescPtr(index)->GetFormat() == FORMAT_FRACTAL_Z) { // FraczToHWCK + int64_t k = shape.GetDim(0); // 0: first dim + int64_t c = shape.GetDim(1); // 1: second dim + int64_t h = shape.GetDim(2); // 2: third dim + int64_t w = shape.GetDim(3); // 3: forth dim + output.shape_info.dims.push_back(h); + output.shape_info.dims.push_back(w); + output.shape_info.dims.push_back(c); + output.shape_info.dims.push_back(k); + format_result = FORMAT_HWCN; + } else { + for (size_t j = 0; j < shape.GetDimNum(); j++) { + output.shape_info.dims.push_back(shape.GetDim(j)); + } + } + + int64_t tensor_size = 0; + (void)TensorUtils::CalcTensorMemSize(shape, format, data_type, tensor_size); + output.size = static_cast(tensor_size); + output.data_type = op_desc->GetInputDescPtr(index)->GetDataType(); +} + +Status DavinciModel::GetOutputDescInfo(vector &output_desc, std::vector &formats) { + for (size_t i = 0; i < output_op_list_.size(); i++) { + auto &op_desc = output_op_list_[i]; + uint32_t out_size = static_cast(op_desc->GetOutputsSize()); + + for (uint32_t index = 0; index < out_size; index++) { + bool is_output = false; + GE_IF_BOOL_EXEC(op_desc->GetOutputDescPtr(index) == nullptr, + GELOGE(INTERNAL_ERROR, "OpDesc GetOutputDescPtr is nullptr"); + return INTERNAL_ERROR); + GE_CHK_STATUS(TensorUtils::GetOutputTensor(*op_desc->GetOutputDescPtr(index), is_output), + "get output tensor failed."); + if (!is_output) { + continue; + } + + string output_name; + InputOutputDescInfo output; + uint32_t format_result; + CreateOutput(index, op_desc, output, format_result); + + std::vector src_name = op_desc->GetSrcName(); + std::vector src_index = op_desc->GetSrcIndex(); + GE_CHK_BOOL_RET_STATUS(src_name.size() > index && src_index.size() > index, INTERNAL_ERROR, + "construct output_name failed."); + output_name = std::string("output_") + std::to_string(index) + "_" + src_name[index] + "_" + + std::to_string(src_index[index]); + output.name = output_name; + + output_desc.push_back(output); + formats.push_back(format_result); + } + } + return SUCCESS; +} + +ge::Format DavinciModel::GetFormat() { + if ((data_op_list_.empty()) || data_op_list_[0] == nullptr || data_op_list_[0]->GetInputDescPtr(0) == nullptr) { + GELOGW("OP List Pointer is null or input_desc size is not 1!"); + return FORMAT_NCHW; + } + + return data_op_list_[0]->GetInputDescPtr(0)->GetFormat(); +} + +Status DavinciModel::CopyInputData(const InputData ¤t_data, bool device_data) { + Status ret = SUCCESS; + uint32_t data_op_index = 0; + + for (auto op_desc : data_op_list_) { + ret = CopyInputDataToModel(current_data.blobs, data_op_index, device_data); + + GE_CHK_BOOL_EXEC(ret == SUCCESS, break, "Copy input data to model ret fail, index:%u, model id:%u", + current_data.index, current_data.model_id); + data_op_index++; + } + return ret; +} + +Status DavinciModel::SyncVarData() { + GELOGI("SyncBroadCastData2Var model id:%u", model_id_); + Status ret = SUCCESS; + + for (auto op_desc : variable_op_list_) { + ret = + VarManager::Instance(session_id_)->SyncVarData(runtime_param_.graph_id, op_desc->GetName(), op_desc, mem_base_); + GE_CHK_BOOL_EXEC(ret == SUCCESS, break, "sync var data ret fail, model id:%u, op name:%s", model_id_, + op_desc->GetName().c_str()); + } + return ret; +} + +/// +/// @ingroup domi_ome +/// @brief copy input data to Model's firat OP. Address already malloced when Load +/// @copy need datatype transfer: FLOAT to FP16, 4D to 5D; +/// @param [in] data data pointer to be copy +/// @return Status result +/// @author +/// +Status DavinciModel::CopyInputDataToModel(const std::vector &data, uint32_t data_op_index, + bool device_data) { + GE_CHK_BOOL_RET_STATUS(!data_op_list_.empty(), PARAM_INVALID, "data_op_list_ is empty!"); + + GE_CHK_BOOL_RET_STATUS(data_op_list_.size() == data.size(), PARAM_INVALID, + "The input data list size (%zu) does not match the model input list size (%zu)", data.size(), + data_op_list_.size()); + + GE_CHK_BOOL_RET_STATUS(data_op_index < data_op_list_.size(), PARAM_INVALID, + "input data op index(%u) is invalid, exceeds input op size(%zu)", data_op_index, + data_op_list_.size()); + + /// input datatype conversion, converting FLOAT to FP16, 4D to 5D at the same time. + /// Choose respective mode in API parameters. + auto op_def = data_op_list_[data_op_index]; + GE_CHK_BOOL_EXEC(op_def != nullptr, return PARAM_INVALID, "op_def is null!"); + + auto data_index = data_op_index; + if (AttrUtils::GetInt(op_def, "index", data_index)) { + GELOGI("ge_train:get new index %u , old %u", data_index, data_op_index); + } + + GE_CHK_BOOL_EXEC(data_index < data.size(), return PARAM_INVALID, "index:%u >= size:%zu", data_index, data.size()); + GE_CHK_BOOL_RET_STATUS(op_def->GetInputsSize() == 1 && op_def->GetOutputsSize() == 1, PARAM_INVALID, + "Data Op has invalid input_desc_size(%zu) or output_desc_size(%zu)", op_def->GetInputsSize(), + op_def->GetOutputsSize()); + + uint32_t input_size = 0; + GE_CHK_STATUS(TensorUtils::GetSize(*op_def->GetInputDescPtr(0), input_size), "get input size failed."); + + GE_CHK_BOOL_RET_STATUS(input_size >= data[data_index].length, PARAM_INVALID, + "input data size(%u) does not match model required size(%u), ret fail.", + data[data_index].length, input_size); + + // float to float16 + bool need_trans_flag = ModelUtils::IsInputTensorNeedTrans(data_op_list_[data_op_index], 0); + + uint32_t output_size = 0; + GE_CHK_STATUS(TensorUtils::GetSize(*op_def->GetOutputDescPtr(0), output_size), "get output size failed."); + + vector outputs = op_def->GetOutputOffset(); + GE_CHECK_VECTOR_NOT_EMPTY(outputs); + + bool need_memset = false; + (void)AttrUtils::GetBool(op_def, "_need_memset", need_memset); + if (need_memset) { + void *data_out_addr = mem_base_ + outputs[0]; + // data+allreduce output align 512 + uint32_t output_size_align = (output_size + MEM_ALIGN_SIZE - 1) / MEM_ALIGN_SIZE * MEM_ALIGN_SIZE; + GE_CHK_RT_RET(rtMemset(data_out_addr, output_size_align + 1, 0U, output_size_align)); + } + if (device_data) { + return CopyPlainData(data, data_index, data_op_index, outputs, output_size, RT_MEMCPY_DEVICE_TO_DEVICE); + } else if (need_trans_flag) { + return CopyTransData(data, data_index, data_op_index, outputs, output_size); + } else { + return CopyPlainData(data, data_index, data_op_index, outputs, output_size, RT_MEMCPY_HOST_TO_DEVICE); + } +} + +Status DavinciModel::CopyTransData(const std::vector &data, uint32_t data_index, uint32_t data_op_index, + const std::vector &outputs, uint32_t output_size) { + GE_CHECK_VECTOR_NOT_EMPTY(outputs); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(outputs[0] == -1, return PARAM_INVALID, "output offset is -1"); + GE_CHK_BOOL_EXEC(data_index < data.size(), return PARAM_INVALID, "index:%u >= size:%zu", data_index, data.size()); + + auto input_tensor_desc = data_op_input_tensor_desc_map_[data_op_list_[data_op_index]->GetName()]; + auto output_tensor_desc = data_op_output_tensor_desc_map_[data_op_list_[data_op_index]->GetName()]; + + uint8_t *src_data = reinterpret_cast(data[data_index].data); + + formats::TransResult tmp_result{}; + auto input_shape = input_tensor_desc->GetShape(); + auto src_data_size = input_shape.GetShapeSize(); + auto src_data_type = input_tensor_desc->GetDataType(); + auto dst_data_type = output_tensor_desc->GetDataType(); + GELOGD("Trans data type from %s to %s, input shape %s, data size %zu", + TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(dst_data_type).c_str(), formats::ShapeToString(input_shape).c_str(), + src_data_size); + auto ret = + formats::TransDataType({src_data, static_cast(src_data_size), src_data_type, dst_data_type}, tmp_result); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to trans data type from %s to %s, input shape %s, data size %zu, error code %d", + TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(dst_data_type).c_str(), formats::ShapeToString(input_shape).c_str(), + src_data_size, ret); + return ret; + } + + void *mem_addr = mem_base_ + outputs[0]; + auto rt_ret = rtMemcpy(mem_addr, runtime_param_.mem_size - outputs[0], tmp_result.data.get(), tmp_result.length, + RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Failed to copy memory to device, size %zu", tmp_result.length); + return RT_FAILED; + } + GELOGI("[IMAS]CopyTransData memcpy graph_%u type[F] name[%s] output[%d] datasize[%zu]", runtime_param_.graph_id, + data_op_list_[data_op_index]->GetName().c_str(), 0, tmp_result.length); + return SUCCESS; +} + +Status DavinciModel::CopyPlainData(const std::vector &data, uint32_t data_index, uint32_t data_op_index, + const std::vector &outputs, uint32_t output_size, + rtMemcpyKind_t kind) { + GE_CHK_BOOL_EXEC(data_index < data.size(), return PARAM_INVALID, "index:%u >= size:%zu", data_index, data.size()); + bool flag = data[data_index].isDataSupportMemShare && support_mem_shared_flag_; + // if data attr support zero cpy,then update addrs info to flowtable + if (flag) { + GELOGI("No need to copy input data, user's input data buffer can be shared."); + return SUCCESS; + } + + GE_CHECK_VECTOR_NOT_EMPTY(outputs); + // P2P memory space parameters + void *host_data_addr = data[data_index].data; + uint32_t copy_size = data[data_index].length; + GELOGD("data output tensor is aipp tensor,copy data only."); + + void *data_out_addr = nullptr; + if (VarManager::Instance(session_id_)->IsVarAddr(outputs[0])) { + data_out_addr = var_mem_base_ + outputs[0] - runtime_param_.logic_var_base; + } else { + data_out_addr = mem_base_ + outputs[0]; + GELOGI("output[0]=%ld, copy_size=%u, total_size=%zu", outputs[0], copy_size, TotalMemSize()); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(((uint64_t)outputs[0] + (uint64_t)copy_size) > TotalMemSize(), return INTERNAL_ERROR, + "input offset add size is large than total memory."); + } + + GE_CHK_RT_RET(rtMemcpy(data_out_addr, copy_size, host_data_addr, copy_size, kind)); + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief send Output Op result to upper layer +/// @already malloced in ModelLoad, no need to malloc again +/// @param [in] sink_op Sink Op +/// @return Status result +/// @author +/// +Status DavinciModel::CopyOutputData(uint32_t model_id, uint32_t data_id, OutputData &output_data) { + Status ret = SUCCESS; + if (output_op_list_.empty()) { + ret = SyncVarData(); + } else { + output_data.index = data_id; + output_data.model_id = model_id; + GE_CHK_BOOL_RET_STATUS(output_data.blobs.size() == output_size_list_.size(), INTERNAL_ERROR, + "output buffer size[%zu] not equal output_size_list[%zu] size!", output_data.blobs.size(), + output_size_list_.size()); + + // index of data in output_data + uint32_t output_data_index = 0; + for (auto &op_desc : output_op_list_) { + ret = CopyOutputDataToUser(op_desc, output_data.blobs, output_data_index); + GE_CHK_BOOL_EXEC(ret == SUCCESS, break, "Copy input data to model ret fail, index:%u, model id:%u", + output_data.index, output_data.model_id); + } + } + + (void)DumpOpInputOutput(op_list_, model_id); // dump, not care result. + return ret; +} + +Status DavinciModel::CopyOutputDataToUser(OpDescPtr &op_desc, std::vector &blobs, uint32_t &data_index) { + Output model_output(op_desc, this); + + GE_CHK_BOOL_RET_STATUS(model_output.Init() == SUCCESS, PARAM_INVALID, "make shared model_output failed"); + + vector v_output_size; + vector v_output_data_addr; + model_output.GetOutputData(v_output_data_addr, v_output_size); + + // for all output tensor, copy output data from op to designated position + for (size_t i = 0; i < v_output_size.size(); ++i) { + GE_CHK_BOOL_RET_STATUS(data_index < blobs.size(), PARAM_INVALID, + "The blobs size:%zu, data_op size:%zu, curr output size:%zu", blobs.size(), + data_op_list_.size(), v_output_size.size()); + + DataBuffer &data_buf = blobs[data_index]; + data_index++; + + uint32_t size = data_buf.length; + GE_CHK_BOOL_RET_STATUS(size <= v_output_size[i], PARAM_INVALID, + "Model output data size(%u) does not match required size(%u).", v_output_size[i], + data_buf.length); + GE_CHK_RT_RET(rtMemcpy(data_buf.data, size, v_output_data_addr[i], size, RT_MEMCPY_DEVICE_TO_DEVICE)); + } + + return SUCCESS; +} + +Status DavinciModel::SyncDataAndDump() { + Status ret = SUCCESS; + if (output_op_list_.empty()) { + ret = SyncVarData(); + } + + (void)DumpOpInputOutput(op_list_, model_id_); // dump, not care result. + return ret; +} + +/// +/// @ingroup domi_ome +/// @brief send Output Op result to upper layer +/// @already malloced in ModelLoad, no need to malloc again +/// @param [in] sink_op Sink Op +/// @return Status result +/// @author +/// +Status DavinciModel::ReturnResult(uint32_t model_id, uint32_t data_id, const bool rslt_flg, const bool seq_end_flag, + OutputData *output_data) { + GE_CHK_BOOL_EXEC(listener_ != nullptr, return PARAM_INVALID, "listener_ is null!"); + if (seq_end_flag) { + GELOGW("End of sequence, model id: %u", model_id); + GE_CHK_STATUS(listener_->OnComputeDone(model_id, data_id, END_OF_SEQUENCE), "OnComputeDone failed"); + return END_OF_SEQUENCE; + } + + // return result is not required + if (!rslt_flg) { + GELOGW("Compute failed, model id: %u", model_id); + GE_CHK_STATUS(listener_->OnComputeDone(model_id, data_id, INTERNAL_ERROR), "OnComputeDone failed"); + return INTERNAL_ERROR; + } + + if (output_op_list_.empty()) { + GELOGW("Output tensor list is empty, model id: %u", model_id); + GE_CHK_STATUS(listener_->OnComputeDone(model_id, data_id, INTERNAL_ERROR), "OnComputeDone failed"); + return INTERNAL_ERROR; + } + + GE_CHECK_NOTNULL(output_data); + // index of data in output_data + uint32_t data_index = 0; + + output_data->index = data_id; + output_data->model_id = model_id; + + // copy output data from op to designated position + for (auto &op_desc : output_op_list_) { + Status ret = ModelOutput::CopyResult(this, op_desc, *output_data, data_index, support_mem_shared_flag_); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "CopyResult failed, op name: %s", op_desc->GetName().c_str()); + GE_CHK_STATUS(listener_->OnComputeDone(model_id, data_id, INTERNAL_ERROR), "OnComputeDone failed"); + return INTERNAL_ERROR; + } + } + + GE_IF_BOOL_EXEC((DumpOpInputOutput(op_list_, model_id) != SUCCESS), + GELOGW("dump op failed, model_id: %u", model_id);); + + GE_CHK_STATUS(listener_->OnComputeDone(model_id, data_id, SUCCESS), "OnComputeDone failed"); + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief return not output to upper layer for cloud case +/// @return Status result +/// +Status DavinciModel::ReturnNoOutput(uint32_t model_id, uint32_t data_id) { + GELOGI("ReturnNoOutput model id:%u", model_id); + for (const auto &op_desc : variable_op_list_) { + Status ret = VarManager::Instance(session_id_) + ->SyncBroadCastData2Var(runtime_param_.graph_id, op_desc->GetName(), op_desc, mem_base_); + GE_CHK_BOOL_EXEC(ret == SUCCESS, break, "sync var data ret fail, model id:%u, op name:%s", model_id, + op_desc->GetName().c_str()); + } + + GE_IF_BOOL_EXEC(DumpOpInputOutput(op_list_, model_id) != SUCCESS, GELOGW("dump op failed, model_id: %u", model_id);); + GE_CHK_BOOL_EXEC(listener_ != nullptr, return PARAM_INVALID, "listener_ is null!"); + GE_CHK_STATUS(listener_->OnComputeDone(model_id, data_id, SUCCESS), "OnComputeDone failed"); + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief dump all op input and output information +/// @param [in] op_list model_id +/// @return Status result +/// +Status DavinciModel::DumpOpInputOutput(map &op_list, uint32_t model_id) { + if (op_list.empty()) { + GELOGW("op_list is empty."); + return FAILED; + } +#ifdef FMK_SUPPORT_DUMP + char *ge_dump_env = getenv("DUMP_OP"); + int dump_op_switch = (ge_dump_env != nullptr) ? std::strtol(ge_dump_env, nullptr, kDecimal) : 0; + // 10 for decimal number + if (dump_op_switch != 0) { + int64_t cnt = 1; + for (auto it : op_list) { + if (maxDumpOpNum_ != 0 && cnt > maxDumpOpNum_) { + GELOGW("dump op cnt > maxDumpOpNum, maxDumpOpNum: %ld.", maxDumpOpNum_); + return SUCCESS; + } + Status ret = DumpSingleOpInputOutput(it.second, model_id); + cnt++; + if (ret != SUCCESS) { + GELOGE(FAILED, "dump single op failed, model_id: %u", model_id); + return FAILED; + } + } + } +#else + GELOGW("need to define FMK_SUPPORT_DUMP for dump op input and output."); +#endif + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief dump single op input and output information +/// @param [in] dump_op model_id +/// @return Status result +/// +Status DavinciModel::DumpSingleOpInputOutput(const OpDescPtr &op_def, uint32_t model_id) { + GE_CHK_BOOL_EXEC(op_def != nullptr, return PARAM_INVALID, "op_def is null!"); + string op_name = StringUtils::ReplaceAll(op_def->GetName(), "/", "-"); + GELOGI("dump op name:%s, type:%s, model_id: %u", op_def->GetName().c_str(), op_def->GetType().c_str(), model_id); + string model_path = "./dump" + to_string(model_id); + if (mmAccess(model_path.c_str()) != EN_OK) { + int32_t ret = mmMkdir(model_path.c_str(), S_IRUSR | S_IWUSR | S_IXUSR); + if (ret != EN_OK) { + GELOGE(FAILED, "make dir failed, model_id: %u", model_id); + return FAILED; + } + } + const vector input_size_vec = ModelUtils::GetInputSize(op_def); + const vector input_addr_vec = ModelUtils::GetInputDataAddrs(runtime_param_, op_def, false); + for (size_t i = 0; i < input_addr_vec.size(); i++) { + uint32_t input_size = input_size_vec.at(i); + char input_file_name[PATH_MAX] = {0}; + if ((sprintf_s(input_file_name, PATH_MAX, "%s/dump_%u_%s_%s_input_%zu.bin", model_path.c_str(), model_id, + op_def->GetType().c_str(), op_name.c_str(), i)) == -1) { + GELOGE(FAILED, "construct input dump file path failed."); + return FAILED; + } + if ((Debug::DumpDevMem(input_file_name, input_addr_vec.at(i), input_size)) != SUCCESS) { + GELOGE(FAILED, "dump to input_file failed"); + return FAILED; + } + } + + const vector output_size_vec = ModelUtils::GetOutputSize(op_def); + const vector output_addr_vec = ModelUtils::GetOutputDataAddrs(runtime_param_, op_def, false); + if (!(op_def->GetType() == "Const")) { + for (size_t i = 0; i < output_addr_vec.size(); i++) { + uint32_t output_size = output_size_vec.at(i); + char output_file_name[PATH_MAX] = {0}; + if ((sprintf_s(output_file_name, PATH_MAX, "%s/dump_%u_%s_%s_output_%zu.bin", model_path.c_str(), model_id, + op_def->GetType().c_str(), op_name.c_str(), i)) == -1) { + GELOGE(FAILED, "construct output dump file path failed."); + return FAILED; + } + if ((Debug::DumpDevMem(output_file_name, output_addr_vec.at(i), output_size)) != SUCCESS) { + GELOGE(FAILED, "dump to output_file failed"); + return FAILED; + } + } + } + return SUCCESS; +} + +void *DavinciModel::Run(DavinciModel *model) { + GE_CHK_BOOL_EXEC(model != nullptr, + CsaInteract::GetInstance().WriteErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + return nullptr, "model_pointer is null!") + bool seq_end_flag = false; + uint32_t interator_count = 0; + uint32_t model_id = model->Id(); + uint32_t device_id = model->GetDeviceId(); + + GELOGI("Model Run thread start, model_id:%u", model_id); + rtError_t rt_ret = rtSetDevice(static_cast(device_id)); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(FAILED, "Model run rtsetdevice failed."); + return nullptr; + } + // DeviceReset before thread run finished! + GE_MAKE_GUARD(not_used_var, [&] { GE_CHK_RT(rtDeviceReset(device_id)); }); + + while (model->RunFlag()) { + bool rslt_flg = true; + if (model->GetDataInputer() == nullptr) { + GELOGW("Data inputer is nullptr."); + CsaInteract::GetInstance().StoreInternalErrorCode(FAILED, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + break; + } + + std::shared_ptr data_wrapper; + Status ret = model->GetDataInputer()->Pop(data_wrapper); + if (data_wrapper == nullptr || ret != SUCCESS) { + GELOGI("data_wrapper is null!"); + continue; + } + GELOGI("Getting the input data, model_id:%u", model_id); + + GE_IF_BOOL_EXEC(!model->RunFlag(), break); + + InputData current_data = data_wrapper->GetInput(); + GELOGI("Model thread Run begin, model id:%u, data index:%d.", model_id, current_data.index); + + GE_TIMESTAMP_START(Model_SyncVarData); + ret = model->SyncVarData(); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG( + ret != SUCCESS, + (void)model->ReturnResult(model->model_id_, current_data.index, false, false, data_wrapper->GetOutput()); + CsaInteract::GetInstance().StoreInternalErrorCode(ret, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + continue, "Copy input data to model failed."); // [No need to check value] + GE_TIMESTAMP_END(Model_SyncVarData, "Model Run SyncVarData"); + + GELOGI("Copy input data, model id:%u", model_id); + ret = model->CopyInputData(current_data, false); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG( + ret != SUCCESS, + (void)model->ReturnResult(model->model_id_, current_data.index, false, false, data_wrapper->GetOutput()); + CsaInteract::GetInstance().StoreInternalErrorCode(ret, ERROR_MODULE_FMK, JOBSUBSTATE_GRAPH_EXEC); + continue, "Copy input data to model failed."); // [No need to check value] + + if (ProfilingManager::Instance().ProfilingOpTraceOn()) { + GELOGI("GetOpTraceIterNum:%d", ProfilingManager::Instance().GetOpTraceIterNum()); + for (int32_t i = 0; i < ProfilingManager::Instance().GetOpTraceIterNum(); i++) { + if (!ProfilingManager::Instance().ProfilingLoadFlag()) { + (void)ProfilingManager::Instance().StartProfiling(i); // just profiling, no need to check value + } + // collect profiling for ge + ProfilingManager::Instance().ReportProfilingData(model->GetTaskIdOpName()); + GELOGI("rtModelExecute start."); + rtError_t rt_ret_prof_on = rtModelExecute(model->rt_model_handle_, model->rt_model_stream_, 0); + GE_IF_BOOL_EXEC(rt_ret_prof_on != RT_ERROR_NONE, rslt_flg = false; (void)model->ReturnResult( + model->model_id_, current_data.index, false, false, data_wrapper->GetOutput()); + continue); // [No need to check value] + GELOGI("rtModelExecute end"); + + GELOGI("rtStreamSynchronize start."); + rt_ret_prof_on = rtStreamSynchronize(model->rt_model_stream_); + GE_IF_BOOL_EXEC(rt_ret_prof_on != RT_ERROR_NONE, rslt_flg = false; (void)model->ReturnResult( + model->model_id_, current_data.index, false, seq_end_flag, data_wrapper->GetOutput()); + continue); // [No need to check value] + GELOGI("rtStreamSynchronize end."); + ProfilingManager::Instance().StopProfiling(); // just profiling, no need to check value + } + } else { + GE_TIMESTAMP_START(rtModelExecute); + GELOGI("rtModelExecute start."); + rtError_t rt_ret_prof_off = rtModelExecute(model->rt_model_handle_, model->rt_model_stream_, 0); + GE_IF_BOOL_EXEC( + rt_ret_prof_off != RT_ERROR_NONE, rslt_flg = false; + (void)model->ReturnResult(model->model_id_, current_data.index, false, false, data_wrapper->GetOutput()); + CsaInteract::GetInstance().WriteErrorCode(rt_ret_prof_off, ERROR_MODULE_RUNTIME, JOBSUBSTATE_GRAPH_EXEC); + continue); + GELOGI("rtModelExecute end"); + GE_TIMESTAMP_END(rtModelExecute, "GraphExcute::rtModelExecute"); + + GE_TIMESTAMP_START(rtStreamSynchronize); + GELOGI("rtStreamSynchronize start."); + rt_ret_prof_off = rtStreamSynchronize(model->rt_model_stream_); + if (rt_ret_prof_off == RT_ERROR_END_OF_SEQUENCE) { + seq_end_flag = true; + } + GE_IF_BOOL_EXEC(rt_ret_prof_off != RT_ERROR_NONE, rslt_flg = false; GELOGI("seq_end_flg: %d", seq_end_flag); + (void)model->ReturnResult(model->model_id_, current_data.index, false, seq_end_flag, + data_wrapper->GetOutput()); // [No need to check value] + CsaInteract::GetInstance().StoreInternalErrorCode(rt_ret_prof_off, ERROR_MODULE_RUNTIME, + JOBSUBSTATE_GRAPH_EXEC); + continue); + GELOGI("rtStreamSynchronize end."); + GE_TIMESTAMP_END(rtStreamSynchronize, "GraphExcute::Wait for rtStreamSynchronize"); + + // collect profiling for ge + if (ProfilingManager::Instance().ProfilingOn()) { + ProfilingManager::Instance().ReportProfilingData(model->GetTaskIdOpName()); + } + } + + GE_TIMESTAMP_START(ReturnResult3); + // copy output data from device to host + GE_IF_BOOL_EXEC( + !model->output_op_list_.empty(), + (void)model->ReturnResult(model->model_id_, current_data.index, rslt_flg, false, data_wrapper->GetOutput())) + // copy output data from device to host for variable graph + GE_IF_BOOL_EXEC(model->output_op_list_.empty(), (void)model->ReturnNoOutput(model->model_id_, current_data.index)); + GE_TIMESTAMP_END(ReturnResult3, "GraphExcute::CopyDataFromDeviceToHost"); + + interator_count++; + GELOGI("interator_count=%u", interator_count); + } + + CsaInteract::GetInstance().WriteInternalErrorCode(); + GELOGI("Model run end, model id:%u", model->model_id_); + GEEVENT("Model Run thread end, model_id:%u", model->model_id_); + return nullptr; +} + +/// +/// @ingroup domi_ome +/// @brief call API provided by data inputer to destroy thread +/// @param [in] no +/// @return Status Destroy result +/// @author +/// +Status DavinciModel::DestroyThread() { + GE_CHK_BOOL_RET_STATUS(data_inputer_ != nullptr, INTERNAL_ERROR, "data_inputer_ is nullptr!"); + + run_flg_ = false; + + data_inputer_->Stop(); + + if (thread_id_.joinable()) { + thread_id_.join(); + } + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief create model std::thread, +/// @brief start to execute Model +/// @param [in] no +/// @return Status create model thread and execute result +/// @author +/// +Status DavinciModel::ModelRunStart() { + GE_CHK_BOOL_RET_STATUS((DavinciModel::GetSysMode() != RESET) && (DavinciModel::GetSysMode() != STOP), INTERNAL_ERROR, + "Model Start FAIL in wrong sys mode!"); + + GE_CHK_BOOL_RET_STATUS(data_inputer_ != nullptr, INTERNAL_ERROR, "data_inputer_ is nullptr!"); + + LockRunFlg(); + GE_MAKE_GUARD(tmp_lock, [&] { UnlockRunFlg(); }); + + GE_CHK_BOOL_RET_STATUS(!run_flg_, INTERNAL_ERROR, "Model already started!"); + + run_flg_ = true; + + // create stream instance which rt_model_handel is running on + GE_CHK_RT_RET(rtStreamCreate(&rt_model_stream_, priority_)); + is_inner_model_stream_ = true; + + string opt = "0"; + (void)ge::GetContext().GetOption("ge.maxDumpOpNum", opt); // option may not be set up, no need to check value + int64_t maxDumpOpNum = std::strtol(opt.c_str(), nullptr, kDecimal); + maxDumpOpNum_ = maxDumpOpNum; + + CREATE_STD_THREAD(thread_id_, DavinciModel::Run, this); + GELOGI("model tread create success, model id:%u", model_id_); + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief call API provided by data inputer and destroy model Thread +/// @param [in] no +/// @return Status Destroy result +/// @author +/// +Status DavinciModel::ModelRunStop() { + GE_CHK_BOOL_RET_STATUS((DavinciModel::GetSysMode() != RESET) && (DavinciModel::GetSysMode() != STOP), INTERNAL_ERROR, + "Model stop FAIL in wrong sys mode!"); + + LockRunFlg(); + GE_MAKE_GUARD(tmp_lock, [&] { UnlockRunFlg(); }); + + GE_IF_BOOL_EXEC(!run_flg_, return SUCCESS); + + GE_CHK_STATUS_RET(DestroyThread(), "DestoyThead failed!"); + + return SUCCESS; +} + +void DavinciModel::UnbindTaskSinkStream() { + // unbinding hcom stream + UnbindHcomStream(); + for (size_t i = 0; i < stream_list_.size(); i++) { + // unbind rt_model_handle and streams + GE_LOGW_IF(rtModelUnbindStream(rt_model_handle_, stream_list_[i]) != RT_ERROR_NONE, + "Unbind stream from model failed! Index: %zu", i); + } + + if (is_inner_model_stream_) { + // destroy stream that is bound with rt_model + GE_LOGW_IF(rtStreamDestroy(rt_model_stream_) != RT_ERROR_NONE, "Destroy stream for rt_model failed!") + } + return; +} + +Status DavinciModel::InitTaskInfo(ModelTaskDef &model_task_def) { + GELOGI("InitTaskInfo in,task size %zu", model_task_def.task().size()); + task_list_.resize(model_task_def.task_size()); + std::vector> futures(model_task_def.task_size()); + constexpr uint32_t thread_num = THREAD_NUM; + ThreadPool executor(thread_num); + rtContext_t ctx = nullptr; + rtError_t rt_ret = rtCtxGetCurrent(&ctx); + if (rt_ret != RT_ERROR_NONE || ctx == nullptr) { + GELOGE(RT_FAILED, "Failed to get current context from rt, error-code 0x%X.", rt_ret); + return RT_FAILED; + } + + for (int32_t i = 0; i < model_task_def.task_size(); ++i) { + futures[i] = executor.commit( + [](const domi::TaskDef &task, DavinciModel *model, rtContext_t ctx, int32_t idx) -> Status { + rtError_t ctx_ret = rtCtxSetCurrent(ctx); + if (ctx_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Failed to set context from rt, error-code 0x%X.", ctx_ret); + return RT_FAILED; + } + + model->task_list_[idx] = TaskInfoFactory::Instance().Create(static_cast(task.type())); + + Status ret = FAILED; + if (model->task_list_[idx] != nullptr) { + ret = model->task_list_[idx]->Init(task, model); + } + return ret; + }, + model_task_def.task(i), this, ctx, i); + } + + Status ret; + for (size_t i = 0; i < futures.size(); ++i) { + ret = futures[i].get(); + if (ret != SUCCESS) { + GELOGE(ret, "Task index %zu init fail.", i); + return ret; + } + } + + GELOGI("InitTaskInfo out"); + return SUCCESS; +} + +Status DavinciModel::DistributeTask() { + GELOGI("do Distribute."); + + op_task_id_map_.clear(); + Status ret; + for (size_t task_index = 0; task_index < task_list_.size(); ++task_index) { + auto &task = task_list_.at(task_index); + if (task == nullptr) { + GELOGW("task is null"); + continue; + } + ret = task->Distribute(); + if (ret != SUCCESS) { + GELOGE(ret, "Distribute Fail!"); + return ret; + } + + // for data dump + if (reinterpret_cast(task->GetDumpArgs()) != nullptr) { + auto op_index = std::max(model_task_def_->task(task_index).kernel().context().op_index(), + model_task_def_->task(task_index).kernel_ex().op_index()); + OpDescPtr op = GetOpByIndex(op_index); + if (op == nullptr) { + GELOGE(PARAM_INVALID, "Op index %u is null, op list size %zu.", op_index, op_list_.size()); + return PARAM_INVALID; + } + + if (PropertiesManager::Instance().IsLayerNeedDump(name_, op->GetName())) { + data_dumper_.SaveDumpTask(task->GetTaskID(), op, task->GetDumpArgs()); + } + } + + // get op_name by task_index + if (task->GetCtx() != nullptr) { + auto iter = op_name_map_.find(task_index); + if (iter == op_name_map_.end()) { + continue; + } + + // else task index is found in op_name_map_ + string op_name = op_name_map_[task_index]; + op_task_id_map_[task->GetTaskID()] = op_name; + } + } + + // launch dump kernel to aicpu + ret = data_dumper_.LoadDumpInfo(); + if (ret != SUCCESS) { + GELOGE(ret, "Load dump info fail."); + return ret; + } + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief Save Data and NetOutput address info for ZeroCopy. +/// @param [in] const std::vector &outside_addrs +/// @return None. +/// +void DavinciModel::SetOutsideAddr(const std::vector &outside_addrs) { + for (auto addr : outside_addrs) { + if (outside_addrs_.find(addr) != outside_addrs_.end()) { + continue; + } + + (void)outside_addrs_.emplace(std::pair>(addr, {})); + GELOGI("SetOutsideAddr success."); + } +} + +/// +/// @ingroup domi_ome +/// @brief Save outside address used info for ZeroCopy. +/// @param [in] const std::vector &outside_addrs: address of task +/// @param [in] const char *args_offset: arguments address save the address. +/// @return None. +/// +void DavinciModel::SetZeroCopyAddr(const std::vector &outside_addrs, void *args_offset) { + size_t nums = outside_addrs.size(); + for (size_t i = 0; i < nums; ++i) { + std::lock_guard lock(outside_addrs_mutex_); + auto it = outside_addrs_.find(outside_addrs[i]); + if (it == outside_addrs_.end()) { + continue; + } + + it->second.push_back(static_cast(args_offset) + i * sizeof(void *)); + GELOGI("SetZeroCopyAddr of outside_addrs."); + } +} + +/// +/// @ingroup domi_ome +/// @brief Copy Inputs and Outputs addr to model for direct use. +/// @param [in] const domi::InputData &input_data: model input data. +/// @param [in] domi::OutputData &output_data: model output data. +/// @return SUCCESS handle successfully / PARAM_INVALID for failed +/// +Status DavinciModel::ModelZeroCopy(const InputData &input_data, OutputData &output_data) { + if (ZeroCopyInput(input_data) != SUCCESS) { + GELOGE(PARAM_INVALID, "ZeroCopyInput failed."); + return PARAM_INVALID; + } + + if (ZeroCopyOutput(output_data) != SUCCESS) { + GELOGE(PARAM_INVALID, "ZeroCopyOutput failed."); + return PARAM_INVALID; + } + + output_data.index = input_data.index; + output_data.model_id = model_id_; + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief Copy Data addr to model for direct use. +/// @param [in] const domi::InputData &input_data: model input data info. +/// @return SUCCESS handle successfully / others handle failed +/// +Status DavinciModel::ZeroCopyInput(const InputData &input_data) { + GE_CHK_BOOL_RET_STATUS(!data_op_list_.empty(), SUCCESS, "data_op_list_ is empty!"); + GE_CHK_BOOL_RET_STATUS(data_op_list_.size() == input_data.blobs.size(), PARAM_INVALID, + "The input data list size (%zu) does not match the model input list size (%zu)", + input_data.blobs.size(), data_op_list_.size()); + + const std::vector &blobs = input_data.blobs; + for (size_t data_op_index = 0; data_op_index < data_op_list_.size(); ++data_op_index) { + auto op_desc = data_op_list_[data_op_index]; + GE_CHK_BOOL_EXEC(op_desc != nullptr, return PARAM_INVALID, "op_desc is null!"); + + auto data_index = static_cast(data_op_index); + if (AttrUtils::GetInt(op_desc, "index", data_index)) { + GELOGI("ge_train:get new index %u , old %zu", data_index, data_op_index); + } + GE_CHK_BOOL_EXEC(data_index < blobs.size(), return PARAM_INVALID, "index:%u >= size:%zu", data_index, blobs.size()); + GE_CHK_BOOL_RET_STATUS(op_desc->GetInputsSize() == 1 && op_desc->GetOutputsSize() == 1, PARAM_INVALID, + "Data Op has invalid input_desc_size(%zu) or output_desc_size(%zu)", + op_desc->GetInputsSize(), op_desc->GetOutputsSize()); + + uint32_t input_size = 0; + const DataBuffer &data_buf = blobs[data_index]; + GE_CHK_STATUS(TensorUtils::GetSize(*op_desc->GetInputDescPtr(0), input_size), "get input size failed."); + GE_CHK_BOOL_RET_STATUS(input_size >= data_buf.length, PARAM_INVALID, + "input data size(%u) does not match model required size(%u), ret fail.", data_buf.length, + input_size); + + const vector &outputs = ModelUtils::GetOutputDataAddrs(runtime_param_, op_desc); + if (data_buf.data == nullptr) { + GELOGE(INTERNAL_ERROR, "data_buf.data is nullptr"); + return INTERNAL_ERROR; + } + if (!outputs.empty() && ZeroCopyImpl(outputs[0], data_buf) != SUCCESS) { + return FAILED; + } + } + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief Copy NetOutput addr to model for direct use. +/// @param [in] const domi::OutputData &output_data: model output data info. +/// @return SUCCESS handle successfully / others handle failed +/// +Status DavinciModel::ZeroCopyOutput(const OutputData &output_data) { + GE_CHK_BOOL_RET_STATUS(output_data.blobs.size() == output_size_list_.size(), INTERNAL_ERROR, + "output buffer size[%zu] not equal output_size_list[%zu] size!", output_data.blobs.size(), + output_size_list_.size()); + + // index of data in output_data + uint32_t output_data_index = 0; + const std::vector &blobs = output_data.blobs; + for (auto &op_desc : output_op_list_) { + Output model_output(op_desc, this); + GE_CHK_BOOL_RET_STATUS(model_output.Init() == SUCCESS, PARAM_INVALID, "init model_output failed"); + vector v_output_size = ModelUtils::GetInputSize(op_desc); + vector v_output_data_addr = ModelUtils::GetInputDataAddrs(runtime_param_, op_desc); + + // for all output tensor, copy output data from op to designated position + for (size_t i = 0; i < op_desc->GetOutputsSize(); ++i) { + GE_CHK_BOOL_RET_STATUS(output_data_index < blobs.size(), PARAM_INVALID, + "The blobs size:%zu, data_op size:%zu, curr output size:%zu", blobs.size(), + data_op_list_.size(), op_desc->GetOutputsSize()); + const DataBuffer &data_buf = blobs[output_data_index]; + output_data_index++; + uint32_t size = data_buf.length; + GE_CHK_BOOL_RET_STATUS(size <= v_output_size[i], PARAM_INVALID, + "Model output data size(%u) does not match required size(%u).", v_output_size[i], + data_buf.length); + + GELOGI("ZeroCopyOutput memcpy graph_%u type[F] name[%s] output[%lu] memsize[%u] datasize[%u]", + runtime_param_.graph_id, op_desc->GetName().c_str(), i, data_buf.length, v_output_size[i]); + if (ZeroCopyImpl(v_output_data_addr[i], data_buf) != SUCCESS) { + return FAILED; + } + } + } + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief Copy address to args_ space for direct use. +/// @param [in] const void *src_addr: source address of the Op. +/// @param [in] const void *dst_addr: destination address of user data. +/// @return SUCCESS handle successfully / others handle failed +/// +Status DavinciModel::ZeroCopyImpl(const void *src_addr, const DataBuffer &data_buf) { + auto it = outside_addrs_.find(src_addr); + if (it == outside_addrs_.end()) { + GELOGE(FAILED, "ZeroCopyImpl failed to find outside_addrs."); + return FAILED; + } + + auto dst_addr = static_cast(data_buf.data); + auto dst_size = static_cast(data_buf.length); + Status ret = ModelUtils::ConvertVirtualAddressToPhysical(dst_addr, dst_size, dst_addr); + if (ret != SUCCESS) { + GELOGE(FAILED, "Convert virtual address to physical for dst_addr failed."); + return FAILED; + } + + for (auto &addr : it->second) { + __builtin_prefetch(addr); + rtError_t rt_err = rtMemcpy(addr, sizeof(void *), &dst_addr, sizeof(void *), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_err != RT_ERROR_NONE) { + GELOGE(FAILED, "ZeroCopyImpl: rtMemcpy failed"); + return FAILED; + } + } + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief get unique identification for op when load two or more models +/// @param [in] const OpDescPtr: current op. +/// @param [in] string identification: unique identification for current op. +/// @return SUCCESS handle successfully / others handle failed +/// +void DavinciModel::GetUniqueId(const OpDescPtr &op_desc, std::string &unique_identification) { + std::string session_graph_id; + GE_IF_BOOL_EXEC(AttrUtils::GetStr(*op_desc, ATTR_NAME_SESSION_GRAPH_ID, session_graph_id), + GELOGI("Get original type of session_graph_id.")); + if (session_graph_id.empty()) { + return; + } else if (session_graph_id.find("-1") != string::npos) { + unique_identification = session_graph_id + "_" + to_string(model_id_); + } else { + unique_identification = session_graph_id; + } +} + +/// +/// @ingroup domi_ome +/// @brief For TVM Op, avoid Addr Reuse. +/// @return void* +/// +const char *DavinciModel::GetRegisterStub(const string &binfile, const string &session_graph_id) { + string binfile_key; + if (session_graph_id.empty()) { + binfile_key = binfile; + } else { + binfile_key = session_graph_id + "_" + binfile; + } + std::lock_guard lock(tvm_bin_mutex_); + auto it = tvm_bin_kernel_.find(binfile_key); + if (it != tvm_bin_kernel_.end()) { + return it->c_str(); + } else { + it = tvm_bin_kernel_.insert(tvm_bin_kernel_.end(), binfile_key); + return it->c_str(); + } +} + +/// +/// @ingroup domi_ome +/// @brief Constant Op Init. +/// @return Status +/// +Status DavinciModel::InitConstant(const ConstOpDescPtr &op_desc) const { + auto v_weights = ModelUtils::GetWeights(op_desc); + auto v_output_size = ModelUtils::GetOutputSize(op_desc); + auto v_output_addr = ModelUtils::GetOutputDataAddrs(runtime_param_, op_desc); + GE_IF_BOOL_EXEC(v_weights.empty() || v_output_size.empty() || v_output_addr.empty(), + GELOGE(PARAM_INVALID, "const op:%s not set output", op_desc->GetName().c_str()); + return PARAM_INVALID;); + + GeTensor *tensor = const_cast(v_weights[0].get()); + GE_IF_BOOL_EXEC(v_output_size[0] < tensor->GetData().size(), + GELOGE(PARAM_INVALID, "output size:%u less than weight data size:%zu", v_output_size[0], + tensor->GetData().size()); + return PARAM_INVALID;); + + GE_IF_BOOL_EXEC(tensor->GetData().size() == 0, GELOGW("const op:%s has no weight data.", op_desc->GetName().c_str()); + return SUCCESS;); + + auto desc = tensor->GetTensorDesc(); + if (desc.GetDataType() == DT_STRING) { + GeShape tensor_shape = desc.GetShape(); + /// if tensor is a scaler, it's shape size if zero, according ge_tensor.cc. + /// the logic of GetShapeSize is wrong, the scaler tensor's GetShapeSize is zero + /// and that of unknown shape is zero too. + /// unknown shape will not appear here, so we can use zero judge a tensor is scaler or not + int64_t elem_num = tensor_shape.GetShapeSize() == 0 ? 1 : tensor_shape.GetShapeSize(); + uint64_t *buff = reinterpret_cast(tensor->MutableData().data()); + GE_CHK_BOOL_RET_STATUS(CheckInt64Uint32MulOverflow(elem_num, kBytes) == SUCCESS, FAILED, "Shape size is invalid"); + int64_t offset = elem_num * kBytes; + + uint64_t hbm_raw_data_base_addr = reinterpret_cast(v_output_addr[0]) + offset; + for (int64_t i = elem_num - 1; i >= 0; --i) { + buff[i] = hbm_raw_data_base_addr + (buff[i] - buff[0]); + } + } + GE_CHK_RT_RET(rtMemcpy(v_output_addr[0], v_output_size[0], tensor->GetData().data(), tensor->GetData().size(), + RT_MEMCPY_HOST_TO_DEVICE)); + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief TVM Op Init. +/// @return Status +/// + +Status DavinciModel::InitTbeHandle(const OpDescPtr &op_desc) { + TBEKernelPtr tbe_kernel = op_desc->TryGetExtAttr(OP_EXTATTR_NAME_TBE_KERNEL, TBEKernelPtr()); + if (tbe_kernel == nullptr) { + GELOGE(INTERNAL_ERROR, "TBE: %s can't find tvm bin file!", op_desc->GetName().c_str()); + return INTERNAL_ERROR; + } + + std::string session_graph_model_id; + GetUniqueId(op_desc, session_graph_model_id); + const char *bin_file_key = GetRegisterStub(op_desc->GetName(), session_graph_model_id); // from set, always valid. + TBEHandleStore &kernel_store = TBEHandleStore::GetInstance(); + + std::lock_guard lock(tvm_bin_mutex_); + if (rtQueryFunctionRegistered(bin_file_key) != RT_ERROR_NONE) { + void *bin_handle = nullptr; + if (!kernel_store.FindTBEHandle(bin_file_key, bin_handle)) { + GELOGI("TBE: can't find the kernel_name[%s] in HandleMap", bin_file_key); + + rtDevBinary_t binary; + std::string json_string; + GE_IF_BOOL_EXEC(AttrUtils::GetStr(op_desc, TVM_ATTR_NAME_MAGIC, json_string), + GELOGI("Get original type of session_graph_id.")); + if (json_string == "RT_DEV_BINARY_MAGIC_ELF_AICPU") { + binary.magic = RT_DEV_BINARY_MAGIC_ELF_AICPU; + } else if (json_string == "RT_DEV_BINARY_MAGIC_ELF") { + binary.magic = RT_DEV_BINARY_MAGIC_ELF; + } else { + GELOGE(PARAM_INVALID, "TBE: Invalid parameter magic number! json: %s", json_string.c_str()); + return PARAM_INVALID; + } + + binary.version = 0; + binary.data = tbe_kernel->GetBinData(); + binary.length = tbe_kernel->GetBinDataSize(); + + GELOGI("TBE: binary.length: %lu", binary.length); + GE_CHK_RT_RET(rtDevBinaryRegister(&binary, &bin_handle)); + + std::string meta_data; + GE_IF_BOOL_EXEC(AttrUtils::GetStr(op_desc, TVM_ATTR_NAME_METADATA, meta_data), + GELOGI("Get original type of json_string")); + GELOGI("TBE: meta data: %s", meta_data.empty() ? "null" : meta_data.c_str()); + GE_IF_BOOL_EXEC(!meta_data.empty(), GE_CHK_RT_RET(rtMetadataRegister(bin_handle, meta_data.c_str()))); + + kernel_store.StoreTBEHandle(bin_file_key, bin_handle, tbe_kernel); + } else { + GELOGI("TBE: find the kernel_name[%s] in HandleMap", bin_file_key); + kernel_store.ReferTBEHandle(bin_file_key); + } + + std::string kernel_name; + GE_IF_BOOL_EXEC(AttrUtils::GetStr(op_desc, op_desc->GetName() + "_kernelname", kernel_name), + GELOGI("Get original type of kernel_name")); + GELOGI("TBE: binfile_key=%s, kernel_name=%s", bin_file_key, kernel_name.c_str()); + GE_CHK_RT_RET(rtFunctionRegister(bin_handle, bin_file_key, bin_file_key, kernel_name.c_str(), 0)); + used_tbe_handle_map_[bin_file_key] = 1; // Init used num to 1. + return SUCCESS; + } + + // Kernel registed, Increase used num in store. + StoreTbeHandle(bin_file_key); + return SUCCESS; +} + +void DavinciModel::StoreTbeHandle(const std::string &handle_key) { + // Online mode FE may call rtFunctionRegister. + TBEHandleStore &kernel_store = TBEHandleStore::GetInstance(); + + // Need protection of tvm_bin_mutex_. + auto it = used_tbe_handle_map_.find(handle_key); + if (it != used_tbe_handle_map_.end()) { + // GE registered, increase reference. + kernel_store.ReferTBEHandle(handle_key); + it->second++; + return; + } + + void *bin_handle = nullptr; + if (kernel_store.FindTBEHandle(handle_key, bin_handle)) { + // GE registered, increase reference. + used_tbe_handle_map_[handle_key] = 1; // Init used num to 1. + kernel_store.ReferTBEHandle(handle_key); + } +} + +void DavinciModel::CleanTbeHandle() { + TBEHandleStore &kernel_store = TBEHandleStore::GetInstance(); + + std::lock_guard lock(tvm_bin_mutex_); + kernel_store.EraseTBEHandle(used_tbe_handle_map_); + used_tbe_handle_map_.clear(); +} + +/// +/// @ingroup domi_ome +/// @brief insert active_stream_indication_ +/// @return Status +/// +Status DavinciModel::MarkActiveStream(const OpDescPtr &op_desc) { + GE_CHECK_NOTNULL(op_desc); + std::string type = op_desc->GetType(); + GE_IF_BOOL_EXEC( + type == STREAMSWITCH, std::vector active_stream_list; + GE_LOGI_IF(!ge::AttrUtils::GetListInt(op_desc, ATTR_NAME_ACTIVE_STREAM_LIST, active_stream_list), + "GetInt ACTIVE_STREAM_LIST fail."); + if (active_stream_list.size() != TRUE_BRANCH_STREAM_NUM) { + GELOGE(INTERNAL_ERROR, "Stream num of switch true branch must be %u.", TRUE_BRANCH_STREAM_NUM); + return INTERNAL_ERROR; + } uint32_t true_stream_id = active_stream_list.front(); + active_stream_indication_.insert(true_stream_id); + GELOGI("flowctrl_op_index_map node:%s, true_stream_id=%u.", op_desc->GetName().c_str(), true_stream_id);); + GE_IF_BOOL_EXEC(type == STREAMACTIVE, if (op_desc->HasAttr(ATTR_NAME_SWITCH_BRANCH_NODE_LABEL)) { + std::vector active_stream_list; + GE_CHK_BOOL_EXEC(AttrUtils::GetListInt(op_desc, ATTR_NAME_ACTIVE_STREAM_LIST, active_stream_list), + return INTERNAL_ERROR, "StreamActiveOp get attr ACTIVE_STREAM fail."); + + for (size_t j = 0; j < active_stream_list.size(); ++j) { + active_stream_indication_.insert(active_stream_list[j]); + GELOGI("flowctrl_op_index_map node:%s, active_stream_id=%u.", op_desc->GetName().c_str(), active_stream_list[j]); + } + }); + return SUCCESS; +} + +bool DavinciModel::IsBroadCastOpData(const ge::NodePtr &var_node) { + for (const auto &out_anchor : var_node->GetAllOutDataAnchors()) { + GE_RT_FALSE_CHECK_NOTNULL(out_anchor); + for (const auto &in_anchor : out_anchor->GetPeerInDataAnchors()) { + GE_RT_FALSE_CHECK_NOTNULL(in_anchor); + ge::NodePtr dst_node = in_anchor->GetOwnerNode(); + GE_RT_FALSE_CHECK_NOTNULL(dst_node); + if (dst_node->GetType() == HCOMBROADCAST) { + return true; + } + } + } + return false; +} + +/// +/// @ingroup domi_ome +/// @brief Init model stream for NN model. +/// @param [in] stream user input model stream. +/// @param [in] async_mode is asynchronize mode. +/// @return Status +/// +Status DavinciModel::InitModelStream(rtStream_t stream, bool async_mode) { + // asynchronize mode, use user input stream. + if (async_mode) { + rt_model_stream_ = stream; + is_inner_model_stream_ = false; + return SUCCESS; + } + + // synchronize mode, use forbidden stream. + if (stream != nullptr) { + if ((rt_model_stream_ != nullptr) && is_inner_model_stream_) { + if (rtStreamDestroy(rt_model_stream_) != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Destroy rt_stream failed!"); + return FAILED; + } + } + + rt_model_stream_ = stream; + is_inner_model_stream_ = false; + return SUCCESS; + } + + if (rt_model_stream_ == nullptr) { + GE_CHK_RT_RET(rtStreamCreateWithFlags(&rt_model_stream_, priority_, RT_STREAM_FORBIDDEN_DEFAULT)); + is_inner_model_stream_ = true; + } + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief ACL case, do not start new thread, return execute result. +/// @param [in] stream execute model stream. +/// @param [in] async_mode is asynchronize mode. +/// @param [in] input_data model input data. +/// @param [out] output_data model output data. +/// +Status DavinciModel::NnExecute(rtStream_t stream, bool async_mode, const InputData &input_data, + OutputData &output_data) { + GELOGI("Model Run begin, model id:%u, data index:%d, flag:%d.", model_id_, input_data.index, async_mode); + GE_CHK_STATUS(InitModelStream(stream, async_mode), "Init model stream fail."); + + GELOGI("do rtModelExecute task sink, model id:%u", input_data.model_id); + Status ret = ModelZeroCopy(input_data, output_data); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(ret != SUCCESS, return INTERNAL_ERROR, "Copy input data to model failed."); + + GELOGI("current_data.index=%u", input_data.index); + + GELOGD("rtModelExecute do"); + + rtError_t rt_ret = rtModelExecute(rt_model_handle_, rt_model_stream_, 0); + GE_CHK_RT_EXEC(rt_ret, return INTERNAL_ERROR); + GELOGI("rtModelExecute end"); + + if (async_mode) { + rt_ret = rtStreamSynchronize(rt_model_stream_); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, return INTERNAL_ERROR); + } + + ret = SyncDataAndDump(); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(ret != SUCCESS, return INTERNAL_ERROR, "Copy Output data to user failed."); + + // collect profiling for ge + if (ProfilingManager::Instance().ProfilingOn()) { + ProfilingManager::Instance().ReportProfilingData(op_task_id_map_); + GELOGI("Acl Profiling Op name taskId report."); + } + + GELOGI("Model run end, model id:%u", model_id_); + GEEVENT("Model Run thread end, model_id:%u", model_id_); + return SUCCESS; +} + +uint8_t *DavinciModel::MallocFeatureMapMem(uint64_t data_size) { + uint8_t *mem_base = nullptr; + if (std::getenv(kEnvGeuseStaticMemory) != nullptr) { + data_size = static_cast(VarManager::Instance(0)->GetGraphMemoryMaxSize()); + string memory_key = std::to_string(0) + "_f"; + mem_base = MemManager::Instance(RT_MEMORY_HBM)->MallocMemory(memory_key, data_size, GetDeviceId()); + } else { + mem_base = MemManager::Instance(RT_MEMORY_HBM)->MallocMemory(data_size, GetDeviceId()); + } + + if (mem_base != nullptr) { + GE_CHK_RT(rtMemset(mem_base, data_size, 0U, data_size)); + } + return mem_base; +} + +uint8_t *DavinciModel::MallocWeightsMem(uint32_t weights_size) { + uint8_t *weights_mem_base = nullptr; + if (std::getenv(kEnvGeuseStaticMemory) != nullptr) { + string weight_memory_key = std::to_string(0) + "_w"; + weights_mem_base = + MemManager::Instance(RT_MEMORY_HBM)->MallocMemory(weight_memory_key, weights_size, GetDeviceId()); + } else { + weights_mem_base = MemManager::Instance(RT_MEMORY_HBM)->MallocMemory(weights_size, GetDeviceId()); + } + return weights_mem_base; +} + +void DavinciModel::FreeFeatureMapMem() { + if (std::getenv(kEnvGeuseStaticMemory) != nullptr) { + string weight_memory_key = std::to_string(0) + "_f"; + if (MemManager::Instance(RT_MEMORY_HBM)->GetMemoryAddr(weight_memory_key) != nullptr) { + GE_CHK_STATUS(MemManager::Instance(RT_MEMORY_HBM)->FreeMemory(weight_memory_key, GetDeviceId()), + "failed to free weight memory"); + } + mem_base_ = nullptr; + } else { + GE_IF_BOOL_EXEC(mem_base_ != nullptr && is_inner_mem_base_, + GE_CHK_STATUS(MemManager::Instance(RT_MEMORY_HBM)->FreeMemory(mem_base_, GetDeviceId()), + "failed to free feature_map memory"); + mem_base_ = nullptr); + } +} + +void DavinciModel::FreeWeightsMem() { + if (std::getenv(kEnvGeuseStaticMemory) != nullptr) { + string memory_key = std::to_string(0) + "_w"; + if (MemManager::Instance(RT_MEMORY_HBM)->GetMemoryAddr(memory_key) != nullptr) { + GE_CHK_STATUS(MemManager::Instance(RT_MEMORY_HBM)->FreeMemory(memory_key, GetDeviceId()), + "failed to free feature_map memory"); + } + weights_mem_base_ = nullptr; + } else { + GE_IF_BOOL_EXEC(weights_mem_base_ != nullptr && weights_mem_base_ != mem_base_ && is_inner_weight_base_, + GE_CHK_STATUS(MemManager::Instance(RT_MEMORY_HBM)->FreeMemory(weights_mem_base_, GetDeviceId()), + "failed to free weight memory"); + weights_mem_base_ = nullptr); + } +} + +uint32_t DavinciModel::GetGraphID(const std::string &session_graph_id) { + std::string session_id = "_"; + auto pos = session_graph_id.find(session_id); + if (pos != std::string::npos) { + size_t graph_id_length = session_graph_id.length() - pos - session_id.length(); + std::string graph_id = session_graph_id.substr(pos + session_id.length(), graph_id_length); + return static_cast(std::strtol(graph_id.c_str(), nullptr, kDecimal)); + } + return 0; +} + +Status DavinciModel::TransAllVarData(ComputeGraphPtr &graph, uint32_t graph_id) { + GELOGI("TransAllVarData start: session_id:%lu, graph_id: %u.", session_id_, graph_id); + + ThreadPool executor(THREAD_NUM); + std::vector> vector_future; + + rtContext_t ctx = nullptr; + rtError_t rt_ret = rtCtxGetCurrent(&ctx); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Failed to get current context, error_code is: 0x%X.", rt_ret); + return RT_FAILED; + } + + for (ge::NodePtr &node : graph->GetDirectNode()) { + if (node == nullptr) { + continue; + } + if (node->GetType() != VARIABLE) { + continue; + } + vector_future.push_back(executor.commit( + [](ge::NodePtr &node, DavinciModel *model, rtContext_t ctx, uint32_t graph_id) -> Status { + if (model == nullptr) { + GELOGE(FAILED, "DavinciModel is NULL!"); + return FAILED; + } + rtError_t rt_ret = rtCtxSetCurrent(ctx); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Failed to set context, error_code is: 0x%X.", rt_ret); + return RT_FAILED; + } + uint32_t allocated_graph_id = 0; + Status ret = + VarManager::Instance(model->session_id_)->GetAllocatedGraphId(node->GetName(), allocated_graph_id); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "var has not been allocated, node:%s, graph_id:%u.", node->GetName().c_str(), + graph_id); + return INTERNAL_ERROR; + } + uint32_t changed_graph_id = 0; + ret = VarManager::Instance(model->session_id_)->GetChangedGraphId(node->GetName(), changed_graph_id); + bool call_trans_var = + (ret == SUCCESS && changed_graph_id == graph_id && changed_graph_id != allocated_graph_id); + if (call_trans_var) { + GELOGI("VarManager::GetChangedGraphId() success, node:%s, graph_id:%u.", node->GetName().c_str(), graph_id); + VarTransRoad *trans_road = VarManager::Instance(model->session_id_)->GetTransRoad(node->GetName()); + if (trans_road == nullptr) { + GELOGI("The variable %s does not have any trans road", node->GetName().c_str()); + return SUCCESS; + } + ret = TransVarData(node, *trans_road, model->session_id_, model->device_id_); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "TransVarData failed, node:%s, graph_id:%u.", node->GetName().c_str(), graph_id); + return INTERNAL_ERROR; + } + VarManager::Instance(model->session_id_)->RemoveChangedGraphId(node->GetName()); + } + return SUCCESS; + }, + node, this, ctx, graph_id)); + } + + Status ret_status; + for (size_t i = 0; i < vector_future.size(); ++i) { + ret_status = vector_future[i].get(); + if (ret_status != SUCCESS) { + GELOGE(ret_status, "TransAllVarData:: trans %zu vardata failed", i); + return ret_status; + } + } + + GELOGI("TransAllVarData success."); + + return SUCCESS; +} + +void DavinciModel::InitDataDumper() { + GELOGI("data dumper init, name: %s, id: %u.", name_.c_str(), model_id_); + data_dumper_.SetModelName(name_); + data_dumper_.SetModelId(model_id_); + data_dumper_.SetMemory(runtime_param_); + + int32_t device_id = 0; + rtError_t rt_ret = rtGetDevice(&device_id); + if (rt_ret != RT_ERROR_NONE || device_id < 0) { + GELOGE(RT_FAILED, "Call rtGetDevice fail, ret = 0x%X, device_id = %d.", rt_ret, device_id); + return; + } + data_dumper_.SetDeviceId(device_id); + GELOGI("InitDataDumper end."); +} + +uint32_t DavinciModel::GetFlowctrlIndex(uint32_t op_index) { + std::lock_guard lock(flowctrl_op_index_internal_map_mutex_); + return (++flowctrl_op_index_internal_map_[op_index]) - 1; +} + +void DavinciModel::PushHcclStream(rtStream_t value) { + std::lock_guard lock(all_hccl_stream_list_mutex_); + all_hccl_stream_list_.push_back(value); +} + +Status TransTensor(uint8_t *var_data, const NodePtr &var_src, const NodePtr &var_dst, formats::TransResult &result) { + GE_CHECK_NOTNULL(var_src); + GE_CHECK_NOTNULL(var_src->GetOpDesc()); + GE_CHECK_NOTNULL(var_dst); + GE_CHECK_NOTNULL(var_dst->GetOpDesc()); + auto src_data_shape_size = var_src->GetOpDesc()->GetOutputDesc(0).GetShape().GetShapeSize(); + auto src_data_datatype = var_src->GetOpDesc()->GetOutputDesc(0).GetDataType(); + auto dst_data_datatype = var_dst->GetOpDesc()->GetOutputDesc(0).GetDataType(); + GE_IF_BOOL_EXEC( + src_data_datatype != dst_data_datatype, + auto ret = formats::TransDataType( + {var_data, static_cast(src_data_shape_size), src_data_datatype, dst_data_datatype}, result); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "trans var data on host failed"); + return ret; + }); + return SUCCESS; +} + +Status DavinciModel::CopyTensorFromSrcVarNode(const NodePtr &var_src, const NodePtr &var_dst) { + /// after FE fusion pass, input num of applymomentum op was changed, 0th input is var_fp32, 6th input is + /// var_fp16(new). + /// unlink edges between var_fp32 and "dst_node" (need fp16) of var_fp32, add edge between var_fp16 and dst_node. + /// need copy value from var_fp32 to var_fp16. + /// [opdesc of var_src and var_dst are checked before passed in, no need to check if they are nullptr] + GE_IF_BOOL_EXEC(var_src == nullptr || var_dst == nullptr, GELOGE(FAILED, "node var is nullptr"); return FAILED); + // src_node output_desc (fp32) + GeTensorDesc output_desc = var_src->GetOpDesc()->GetOutputDesc(0); + auto src_data_type = output_desc.GetDataType(); + auto src_shape = output_desc.GetShape(); + auto src_format = output_desc.GetFormat(); + GELOGI("src_node %s, src_format %s, src_shape %s, src_type %s", var_src->GetName().c_str(), + TypeUtils::FormatToSerialString(src_format).c_str(), formats::ShapeToString(src_shape).c_str(), + TypeUtils::DataTypeToSerialString(src_data_type).c_str()); + // dst_node output_desc (fp16) + GeTensorDesc dst_tensor_desc = var_dst->GetOpDesc()->GetOutputDesc(0); + auto data_type = dst_tensor_desc.GetDataType(); + auto data_shape = dst_tensor_desc.GetShape(); + auto data_format = dst_tensor_desc.GetFormat(); + GELOGI("dst_node %s, src_format %s, src_shape %s, src_type %s", var_dst->GetName().c_str(), + TypeUtils::FormatToSerialString(data_format).c_str(), formats::ShapeToString(data_shape).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str()); + // Sync var data from device + std::unique_ptr var_src_data; + RtContextSwitchGuard switch_context(RT_CTX_NORMAL_MODE, device_id_); + // copy from src_node + auto ret = CopyVarFromDevice(session_id_, var_src, var_src_data, output_desc); + GE_IF_BOOL_EXEC(ret != SUCCESS, GELOGE(FAILED, "Copy Var From Device failed"); return ret); + // trans dtype + formats::TransResult trans_result; + ret = TransTensor(var_src_data.get(), var_src, var_dst, trans_result); + GE_IF_BOOL_EXEC(ret != SUCCESS, GELOGE(INTERNAL_ERROR, "trans var data on host failed"); return ret); + // reset src value. + void *var_device = nullptr; + ret = ReAssignVarAddr(session_id_, var_dst->GetName(), dst_tensor_desc, &var_device); + GE_IF_BOOL_EXEC(ret != SUCCESS, GELOGE(INTERNAL_ERROR, "assign mem failed"); return ret); + // copy to device + ret = CopyVarToDevice(var_dst, trans_result, var_device); + GE_IF_BOOL_EXEC(ret != SUCCESS, GELOGE(ret, "Failed to send var data to device"); return ret); + return SUCCESS; +} + +Status DavinciModel::CopyVarData(ComputeGraphPtr &compute_graph) { + GELOGI("CopyVarData start: session_id:%lu.", session_id_); + if (compute_graph == nullptr) { + GELOGE(FAILED, "compute_graph is nullptr"); + return FAILED; + } + + string cp_from_node; + bool copy_value = false; + for (auto &node : compute_graph->GetAllNodes()) { + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr || node->GetOpDesc()->GetType() != VARIABLE, continue); + GE_IF_BOOL_EXEC(ge::AttrUtils::GetStr(node->GetOpDesc(), "_copy_from_var_node", cp_from_node), + GELOGI("Get original type of cp_from_node")); + if (cp_from_node.length() != 0) { + (void)ge::AttrUtils::GetBool(node->GetOpDesc(), "_copy_value", copy_value); + if (!copy_value) { + auto src_node = compute_graph->FindNode(cp_from_node); + GE_CHECK_NOTNULL(src_node); + GELOGI("current_var_node__: [%s] copy_from_var_node__: [%s].", node->GetName().c_str(), + src_node->GetName().c_str()); + auto ret = CopyTensorFromSrcVarNode(src_node, node); + GE_IF_BOOL_EXEC(ret != SUCCESS, GELOGE(FAILED, "copy tensor failed!"); return FAILED); + // only copy once + (void)ge::AttrUtils::SetBool(node->GetOpDesc(), "_copy_value", true); + } + } + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/davinci_model.h b/src/ge/graph/load/new_model_manager/davinci_model.h new file mode 100644 index 00000000..822b87eb --- /dev/null +++ b/src/ge/graph/load/new_model_manager/davinci_model.h @@ -0,0 +1,625 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DAVINCI_MODEL_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DAVINCI_MODEL_H_ + +#include +#include +#include +#include +#include +#include + +#include "cce/cce_def.hpp" +#include "cce/dnn.h" +#include "cce/dnn_base_def.hpp" +#include "cce/taskdown_common.hpp" +#include "common/ge_types.h" +#include "common/helper/model_helper.h" +#include "common/helper/om_file_helper.h" +#include "common/opskernel/ge_task_info.h" +#include "common/types.h" +#include "framework/common/util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/load/new_model_manager/data_dumper.h" +#include "graph/load/new_model_manager/data_inputer.h" +#include "graph/load/new_model_manager/model_utils.h" +#include "graph/model.h" +#include "graph/node.h" +#include "graph/op_desc.h" +#include "graph/operator.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/tensor_utils.h" +#include "mmpa/mmpa_api.h" +#include "proto/task.pb.h" +#include "task_info/task_info.h" + +#define WEIGHTS_ADDR_TO_CCE(var) + +namespace ge { +using std::vector; +const uint32_t MEM_ALIGN_SIZE = 512; + +// comments +class DavinciModel { + public: + /// + /// @ingroup domi_ome + /// @brief DavinciModel constructor + /// @author + /// + DavinciModel(int32_t priority, const std::shared_ptr &listener); + + /// + /// @ingroup domi_ome + /// @brief DavinciModel desctructor, free Parse and Init resources + /// @author + /// + ~DavinciModel(); + + /// + /// @ingroup domi_ome + /// @brief apply model to model_def_ + /// + Status Assign(const GeModelPtr &ge_model); + + /// + /// @ingroup domi_ome + /// @brief DavinciModel initialization, including Stream, ccHandle, Event, DataInputer, etc + /// @return execute result + /// @author + /// + Status Init(void *dev_ptr = nullptr, size_t memsize = 0, void *weight_ptr = nullptr, size_t weightsize = 0); + + /// + /// @ingroup ge + /// @brief ACL case, Load task list with queue. + /// @param [in] input_que_ids: input queue ids from user, nums equal Data Op. + /// @param [in] output_que_ids: input queue ids from user, nums equal NetOutput Op. + /// @return: 0 for success / others for fail + /// + Status SetQueIds(const std::vector &input_queue_ids, const std::vector &output_queue_ids); + + /// + /// @ingroup domi_ome + /// @brief Get DataInputer + /// @return model ID + /// + uint32_t Id() const { return model_id_; } + + /// + /// @ingroup domi_ome + /// @brief Get DataInputer + /// @return model ID + /// + void SetId(uint32_t model_id) { model_id_ = model_id; } + + static void *Run(DavinciModel *model_pointer); + + /// + /// @ingroup domi_ome + /// @brief NnExecute + /// @param [in] stream execute stream + /// @param [in] async_mode is asynchronize mode. + /// @param [in] input_data model input data + /// @param [out] output_data model output data + /// + Status NnExecute(rtStream_t stream, bool async_mode, const InputData &input_data, OutputData &output_data); + + /// + /// @ingroup domi_ome + /// @brief get sys mode + /// @return SysMode + /// + static SysMode GetSysMode(); + + /// + /// @ingroup domi_ome + /// @brief set sys mode + /// @return Status + /// + static Status SetSysMode(SysMode mode); + + /// + /// @ingroup domi_ome + /// @brief lock mutex run flag + /// @author + /// + void LockRunFlg() { mux_run_flg_.lock(); } + + /// + /// @ingroup domi_ome + /// @brief unlock mutex run flag + /// @author + /// + void UnlockRunFlg() { mux_run_flg_.unlock(); } + + /// + /// @ingroup domi_ome + /// @brief get DataInputer + /// @return DataInputer pointer + /// + DataInputer *const GetDataInputer() const { return data_inputer_; } + + // get Stream number + uint32_t StreamNum() const { return runtime_param_.stream_num; } + + // get Event number + uint32_t EventNum() const { return runtime_param_.event_num; } + + // get batch number + uint32_t BatchNum() const { return runtime_param_.batch_num; } + + // get session id + uint64_t SessionId() const { return runtime_param_.session_id; } + + vector GetOpDesc() { + vector opDescVector; + GE_IF_BOOL_EXEC(ge::AttrUtils::GetListOpDesc(GetGeModel(), MODEL_ATTR_FUSION_MODEL_DEF, opDescVector), + GELOGI("get opDesc of opDescVector")); + return opDescVector; + } + + // get model priority + int32_t Priority() const { return priority_; } + + // get total mem size + size_t TotalMemSize() const { return runtime_param_.mem_size; } + + // model name + string Name() { return name_; } + + // version + uint32_t Version() const { return version_; } + + // get total weights mem size + size_t TotalWeightsMemSize() const { return runtime_param_.weight_size; } + + size_t TotalVarMemSize() const { return runtime_param_.var_size; } + + // get base memory address + uint8_t *MemBase() { return mem_base_; } + + // get weight base memory address + uint8_t *WeightsMemBase() { return weights_mem_base_; } + + uint8_t *VarMemBase() { return var_mem_base_; } + + // get Event list + const vector &GetEventList() const { return event_list_; } + + const vector &GetStreamList() const { return stream_list_; } + + const vector &GetLabelList() const { return label_list_; } + + Status DestroyThread(); + + // get Op + map GetOpList() const { return op_list_; } + + OpDescPtr GetOpByIndex(uint32_t index) { + if (op_list_.find(index) == op_list_.end()) { + return nullptr; + } + return op_list_.at(index); + } + + OpDescPtr GetVariableOp(const string &name) { + for (auto op_desc : variable_op_list_) { + if (op_desc != nullptr && op_desc->GetName() == name) { + return op_desc; + } + } + return nullptr; + } + // get taskid to op name + const map &GetTaskIdOpName() const { return op_task_id_map_; } + + // get updated task info list + std::vector GetTaskList() { return task_list_; } + + /// + /// @ingroup domi_ome + /// @brief get model input and output format + /// @return ccTensorFormat_t current model input and output format + /// + ge::Format GetFormat(); + + rtModel_t GetRtModelHandle() { + rtModel_t res = rt_model_handle_; + return res; + } + + uint64_t GetRtBaseAddr() const { return runtime_param_.logic_mem_base; } + + uint64_t GetRtWeightAddr() const { return runtime_param_.logic_weight_base; } + + uint64_t GetRtVarAddr() const { return runtime_param_.logic_var_base; } + + uint32_t GetFlowctrlIndex(uint32_t op_index); + + void PushHcclStream(rtStream_t value); + + bool IsBroadCastOpData(const ge::NodePtr &var_node); + + /// + /// @ingroup domi_ome + /// @brief For TVM Op, avoid Addr Reuse. + /// @return void* + /// + static const char *GetRegisterStub(const string &tvm_binfile_key, const string &session_graph_model_id = ""); + + /// + /// @ingroup domi_ome + /// @brief get model input and output desc info + /// @param [out] input_shape model input size + /// @param [out] output_shape model output size + /// @return execute result + /// + Status GetInputOutputDescInfo(vector &input_desc, vector &output_desc); + + Status GetInputOutputDescInfo(vector &input_desc, vector &output_desc, + std::vector &inputFormats, std::vector &output_formats); + + /// + /// @ingroup domi_ome + /// @brief Get model_id. + /// @return model_id + /// + uint32_t GetModelId() const { return model_id_; } + + /// + /// @ingroup domi_ome + /// @brief get unique identification for op when load two or more models + /// @param [in] op_desc : current op. + /// @param [in] string identification: unique identification for current op. + /// @return None + /// + void GetUniqueId(const OpDescPtr &op_desc, std::string &unique_identification); + + /// + /// @ingroup domi_ome + /// @brief get model input and output desc for zero copy + /// @param [out] input_shape model input size + /// @param [out] output_shape model output size + /// @return execute result + /// + Status GetInputOutputDescInfoForZeroCopy(vector &input_desc, + vector &output_desc); + + Status GetInputOutputDescInfoForZeroCopy(vector &input_desc, + vector &output_desc, + std::vector &inputFormats, std::vector &output_formats); + + /// + /// @ingroup domi_ome + /// @brief copy input data to model + /// @return Status + /// + Status CopyInputDataToModel(const std::vector &data, uint32_t data_op_index, bool device_data); + + Status ReturnResult(uint32_t model_id, uint32_t data_id, const bool rslt_flg, const bool seq_end_flg, + OutputData *output_data); + + Status ReturnNoOutput(uint32_t model_id, uint32_t data_id); + + /// + /// @ingroup domi_ome + /// @brief dump all op input and output information + /// @param [in] op_list model_id + /// @return Status + /// + Status DumpOpInputOutput(map &op_list, uint32_t model_id); + + /// + /// @ingroup domi_ome + /// @brief dump single op input and output information + /// @param [in] dump_op model_id + /// @return Status + /// + Status DumpSingleOpInputOutput(const OpDescPtr &dump_op, uint32_t model_id); + + Status ModelRunStart(); + + /// + /// @ingroup domi_ome + /// @brief stop run model + /// @return Status + /// + Status ModelRunStop(); + + /// + /// @ingroup domi_ome + /// @brief model run flag + /// @return Status + /// + bool RunFlag() const { return run_flg_; } + + Status GetOutputDescInfo(vector &output_desc, std::vector &formats); + + /// + /// @ingroup domi_ome + /// @brief Set Session Id + /// @return void + /// + void SetSessionId(uint64_t session_id) { session_id_ = session_id; } + + /// + /// @ingroup domi_ome + /// @brief Get Session Id + /// @return sessionID + /// + uint64_t GetSessionId() const { return session_id_; } + + /// + /// @ingroup domi_ome + /// @brief SetDeviceId + /// @return void + /// + void SetDeviceId(uint32_t device_id) { device_id_ = device_id; } + + /// + /// @ingroup domi_ome + /// @brief Get device Id + /// @return device id + /// + uint32_t GetDeviceId() const { return device_id_; } + + /// + /// @ingroup domi_ome + /// @brief Set Train Mode + /// @return void + /// + void SetTrainMode(bool mode) { is_train_mode_ = mode; } + + /// + /// @ingroup domi_ome + /// @brief Get Train Mode + /// @return bool true + /// + bool GetTrainMode() { return is_train_mode_; } + + GeModelPtr GetGeModel() { return ge_model_; } + + const RuntimeParam &GetRuntimeParam() { return runtime_param_; } + + int32_t GetDataInputTid() const { return dataInputTid; } + void SetDataInputTid(int32_t data_input_tid) { dataInputTid = data_input_tid; } + + /// + /// @ingroup domi_ome + /// @brief Save outside address of Data or NetOutput used info for ZeroCopy. + /// @param [in] const std::vector &outside_addrs: address of task + /// @param [in] const void *args_offset: arguments address save the address. + /// @return None. + /// + void SetZeroCopyAddr(const std::vector &outside_addrs_, void *args_offset); + + DavinciModel &operator=(const DavinciModel &model) = delete; + + DavinciModel(const DavinciModel &model) = delete; + + private: + // memory address of weights + uint8_t *weights_mem_base_; + uint8_t *var_mem_base_; + // memory address of model + uint8_t *mem_base_; + bool is_inner_mem_base_; + bool is_inner_weight_base_; + // input data manager + DataInputer *data_inputer_; + + int32_t dataInputTid; + + /// + /// @ingroup domi_ome + /// @brief Save Data and NetOutput address info for ZeroCopy. + /// @param [in] const std::vector &outside_addrs + /// @return None. + /// + void SetOutsideAddr(const std::vector &outside_addrs); + Status ModelZeroCopy(const InputData &input_data, OutputData &output_data); + Status ZeroCopyInput(const InputData &input_data); + Status ZeroCopyOutput(const OutputData &output_data); + Status ZeroCopyImpl(const void *src_addr, const DataBuffer &data_buf); + + Status CopyInputData(const InputData ¤t_data, bool device_data = false); + + Status CopyTransData(const std::vector &data, uint32_t data_index, uint32_t data_op_index, + const std::vector &outputs, uint32_t output_size); + + Status CopyPlainData(const std::vector &data, uint32_t data_index, uint32_t data_op_index, + const std::vector &outputs, uint32_t output_size, rtMemcpyKind_t kind); + + Status CopyOutputData(uint32_t model_id, uint32_t data_id, OutputData &output_data); + + Status CopyOutputDataToUser(OpDescPtr &op_desc, std::vector &blobs, uint32_t &data_index); + + Status SyncVarData(); + + Status SyncDataAndDump(); + + Status InitModelMem(void *dev_ptr, size_t memsize, void *weight_ptr, size_t weightsize); + + Status GetInputDescInfo(vector &input_desc, std::vector &formats); + + Status InitTaskInfo(domi::ModelTaskDef &modelTaskInfo); + + void UnbindHcomStream(); + + Status DistributeTask(); + + uint8_t *MallocFeatureMapMem(uint64_t data_size); + + uint8_t *MallocWeightsMem(uint32_t weights_size); + + void FreeFeatureMapMem(); + + void FreeWeightsMem(); + + void ReleaseTask(); + + void UnbindTaskSinkStream(); + + /// + /// @ingroup domi_ome + /// @brief Constant Op Init. + /// @return Status + /// + Status InitConstant(const ConstOpDescPtr &op_desc) const; + + /// + /// @ingroup domi_ome + /// @brief TVM Op Init. + /// @return Status + /// + Status InitTbeHandle(const OpDescPtr &op_desc); + + void StoreTbeHandle(const std::string &handle_key); + void CleanTbeHandle(); + + /// + /// @ingroup domi_ome + /// @brief Init model stream for NN model. + /// @return Status + /// + Status InitModelStream(rtStream_t stream, bool async_mode); + + /// + /// @ingroup domi_ome + /// @brief insert active_stream_indication_ + /// @return Status + /// + Status MarkActiveStream(const OpDescPtr &op_desc); + + void InitRuntimeParams(); + + void CheckHasHcomOp(); + + Status DoTaskSink(); + + void CreateOutput(uint32_t index, OpDescPtr &op_desc, InputOutputDescInfo &output, uint32_t &format_result); + + uint32_t GetGraphID(const std::string &session_graph_id); + + Status TransAllVarData(ComputeGraphPtr &graph, uint32_t graph_id); + Status CopyVarData(ComputeGraphPtr &graph); + Status CopyTensorFromSrcVarNode(const NodePtr &var_src, const NodePtr &var_dst); + + void InitDataDumper(); + + bool is_model_has_inited_; + uint32_t model_id_; + string name_; + uint32_t version_; + GeModelPtr ge_model_; + + map op_list_; + + // data op_desc + vector data_op_list_; + + vector output_op_list_; + + vector variable_op_list_; + + vector output_size_list_; + + // output op: save cce op actual needed memory size + vector output_memory_size_list_; + + std::thread thread_id_; + + std::shared_ptr listener_; + + bool run_flg_; + + std::mutex mux_run_flg_; + + static SysMode mode_; + + static std::mutex mutex_mode_; + + int32_t priority_; + + vector stream_list_; + + std::mutex all_hccl_stream_list_mutex_; + vector all_hccl_stream_list_; + + vector event_list_; + + vector label_list_; + + std::mutex outside_addrs_mutex_; + std::map> outside_addrs_; + + std::vector task_list_; + // rt_moodel_handle + rtModel_t rt_model_handle_; + + rtStream_t rt_model_stream_; + + bool is_inner_model_stream_; + + // ACL queue schedule, save queue ids for Init. + std::vector input_queue_ids_; + std::vector output_queue_ids_; + + // save input/output tensor descriptor in maps + std::map data_op_input_tensor_desc_map_; + std::map data_op_output_tensor_desc_map_; + + bool support_mem_shared_flag_; + + uint64_t session_id_; + + uint32_t device_id_; + + bool is_train_mode_; + + std::mutex flowctrl_op_index_internal_map_mutex_; + std::map flowctrl_op_index_internal_map_; + std::set active_stream_indication_; + + std::shared_ptr model_task_def_; + std::set aicpu_streams_; + std::set hcom_streams_; + RuntimeParam runtime_param_; + TBEKernelStore tbekernel_store_; + + static std::mutex tvm_bin_mutex_; // lock for tvm maps. + static std::set tvm_bin_kernel_; + + std::map used_tbe_handle_map_; + + // for profiling + std::map op_name_map_; + std::map op_task_id_map_; + + int64_t maxDumpOpNum_; + // for data dump + DataDumper data_dumper_; +}; + +#define TIME_LOG_HEAD_FMT " OP_ID OP_NAME OP_TYPE ELAPSED TIME(ms)" +#define OP_TIME_LOG_FMT "%d_%-5d %-5d | %-20s | %-15s | %10f | %10d" +#define MODEL_TIME_LOG_FMT "******** Model %d ends, elapsed time: %f ms ********" +const size_t INPUT_OUTPUT_NAME_MAX_LEN = 256; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DAVINCI_MODEL_H_ diff --git a/src/ge/graph/load/new_model_manager/davinci_model_parser.cc b/src/ge/graph/load/new_model_manager/davinci_model_parser.cc new file mode 100644 index 00000000..b0fbf8e4 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/davinci_model_parser.cc @@ -0,0 +1,98 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/davinci_model_parser.h" + +#include +#include +#include +#include "securec.h" + +#include "common/debug/log.h" +#include "graph/load/new_model_manager/davinci_model.h" + +namespace ge { +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status ModelInfoParser(const ModelData &model, ModelInfo &model_info) { + GE_CHK_RT_RET(rtSetDevice(0)); + try { + uint32_t model_len = 0; + uint8_t *model_data = nullptr; + + Status ret = DavinciModelParser::ParseModelContent(model, model_data, model_len); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(ret != SUCCESS, GE_CHK_RT(rtDeviceReset(0)); return ret, "Parse model failed"); + + auto *file_header = reinterpret_cast(model.model_data); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(file_header == nullptr, GE_CHK_RT(rtDeviceReset(0)); + return PARAM_INVALID, "file_header is null."); + + model_info.version = file_header->version; + model_info.is_encrypt = false; + GE_IF_BOOL_EXEC(ENCRYPTED == file_header->is_encrypt, model_info.is_encrypt = true); + + std::shared_ptr davinci_model = + std::shared_ptr(new (std::nothrow) DavinciModel(model.priority, nullptr)); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(davinci_model == nullptr, GE_CHK_RT(rtDeviceReset(0)); + return PARAM_INVALID, "davinci_model is null."); + + GE_MAKE_GUARD(davinci_model, [&] { davinci_model = nullptr; }); + + ModelHelper model_helper; + ret = model_helper.LoadModel(model); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((ret != SUCCESS), GE_CHK_RT(rtDeviceReset(0)); return FAILED, "load model failed"); + + ret = davinci_model->Assign(model_helper.GetGeModel()); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(ret != SUCCESS, GE_CHK_RT(rtDeviceReset(0)); + return ret, "Parse davinci model data failed"); + + ret = davinci_model->Init(); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(ret != SUCCESS, GE_CHK_RT(rtDeviceReset(0)); + return ret, "Davinci model init failed"); + + vector input_list; + vector output_list; + + ret = davinci_model->GetInputOutputDescInfo(input_list, output_list); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(ret != SUCCESS, GE_CHK_RT(rtDeviceReset(0)); + return ret, "Davinci model GetInputOutputDescInfo failed"); + + for (const auto &desc : input_list) { + model_info.input_desc.push_back(desc.shape_info); + } + for (const auto &desc : output_list) { + model_info.output_desc.push_back(desc.shape_info); + } + + model_info.name = davinci_model->Name(); + } catch (...) { + GELOGE(FAILED, "OM model parser failed, some exceptions occur !"); + GE_CHK_RT(rtDeviceReset(0)); + return FAILED; + } + + GE_CHK_RT(rtDeviceReset(0)); + + return SUCCESS; +} + +DavinciModelParser::DavinciModelParser() {} + +DavinciModelParser::~DavinciModelParser() {} +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/davinci_model_parser.h b/src/ge/graph/load/new_model_manager/davinci_model_parser.h new file mode 100644 index 00000000..8907c97d --- /dev/null +++ b/src/ge/graph/load/new_model_manager/davinci_model_parser.h @@ -0,0 +1,46 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DAVINCI_MODEL_PARSER_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DAVINCI_MODEL_PARSER_H_ + +#include +#include + +#include "common/debug/log.h" +#include "common/ge_types.h" +#include "common/model_parser/base.h" +#include "common/types.h" +#include "common/util.h" + +namespace ge { +class DavinciModelParser : public ModelParserBase { + public: + /// + /// @ingroup hiai + /// @brief constructor + /// + DavinciModelParser(); + + /// + /// @ingroup hiai + /// @brief destructor + /// + ~DavinciModelParser(); +}; +} // namespace ge + +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_DAVINCI_MODEL_PARSER_H_ diff --git a/src/ge/graph/load/new_model_manager/model_manager.cc b/src/ge/graph/load/new_model_manager/model_manager.cc new file mode 100644 index 00000000..b3325e14 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/model_manager.cc @@ -0,0 +1,773 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/model_manager.h" + +#include + +#include "cce/aicpu_engine_struct.h" +#include "cce/compiler_stub.h" +#include "common/l2_cache_optimize.h" +#include "common/profiling/profiling_manager.h" +#include "common/properties_manager.h" +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/load/new_model_manager/davinci_model_parser.h" + +namespace ge { +thread_local uint32_t device_count = 0; +namespace { +const int kCmdParSize = 2; +} // namespace +std::shared_ptr ModelManager::GetInstance() { + static const std::shared_ptr instance_ptr = + shared_ptr(new (std::nothrow) ModelManager(), ModelManager::FinalizeForPtr); + return instance_ptr; +} + +ModelManager::ModelManager() { max_model_id_ = 0; } + +static Status KernelLaunchEx(aicpu::FWKAdapter::FWKOperateType opType, uint64_t session_id) { + STR_FWK_OP_KERNEL param_base = {}; + void *devicebase = nullptr; + const uint32_t kKernelType = 0; + param_base.fwkKernelType = kKernelType; + param_base.fwkKernelBase.fwk_kernel.opType = opType; + param_base.fwkKernelBase.fwk_kernel.sessionID = session_id; + + rtError_t rt_ret = rtMalloc(&(devicebase), sizeof(STR_FWK_OP_KERNEL), RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "malloc device memory failed."); + return FAILED; + } + + rt_ret = + rtMemcpy(devicebase, sizeof(STR_FWK_OP_KERNEL), ¶m_base, sizeof(STR_FWK_OP_KERNEL), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "memory copy to device failed."); + GE_CHK_RT(rtFree(devicebase)); + return FAILED; + } + + rtStream_t stream = nullptr; + rt_ret = rtStreamCreate(&stream, 0); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "create stream failed."); + GE_CHK_RT(rtFree(devicebase)); + return FAILED; + } + + rt_ret = rtKernelLaunchEx(devicebase, sizeof(STR_FWK_OP_KERNEL), 0, stream); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtKernelLaunchEx failed."); + GE_CHK_RT(rtFree(devicebase)); + GE_CHK_RT(rtStreamDestroy(stream)); + return FAILED; + } + rt_ret = rtStreamSynchronize(stream); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtStreamSynchronize failed."); + GE_CHK_RT(rtFree(devicebase)); + GE_CHK_RT(rtStreamDestroy(stream)); + return FAILED; + } + + rt_ret = rtFree(devicebase); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "free memory failed."); + GE_CHK_RT(rtStreamDestroy(stream)); + return FAILED; + } + rt_ret = rtStreamDestroy(stream); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtStreamDestroy failed."); + return FAILED; + } + return SUCCESS; +} + +void ModelManager::DestroyAicpuSession(uint64_t session_id) { + std::lock_guard lock(sess_ids_mutex_); + auto it = sess_ids_.find(session_id); + if (it == sess_ids_.end()) { + GELOGI("The session: %lu not created.", session_id); + return; + } else { + Status ret = KernelLaunchEx(aicpu::FWKAdapter::FWKOperateType::FWK_ADPT_SESSION_DESTROY, session_id); + if (ret != SUCCESS) { + GELOGW("The session: %lu destroy failed.", session_id); + } else { + (void)sess_ids_.erase(session_id); + GELOGI("The session: %lu destroyed.", session_id); + } + } +} + +ModelManager::~ModelManager() { + std::lock_guard lock(map_mutex_); + model_map_.clear(); + + GE_IF_BOOL_EXEC(device_count > 0, GE_CHK_RT(rtDeviceReset(0))); +} + +/// +/// @ingroup domi_ome +/// @brief set Device. If no device available, return failure +/// @return Status run result +/// @author +/// +Status ModelManager::SetDevice(int32_t deviceId) const { + GE_CHK_RT_RET(rtSetDevice(deviceId)); + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief load model online +/// @return Status run result +/// +Status ModelManager::LoadModelOnline(uint32_t &model_id, shared_ptr &model, + std::shared_ptr listener) { + GE_CHK_BOOL_RET_STATUS(listener.get() != nullptr, PARAM_INVALID, "Param incorrect, listener is null"); + GenModelId(&model_id); + + GE_CHK_STATUS_RET(SetDevice(static_cast(GetContext().DeviceId())), "Set device failed, model id:%u.", + model_id); + + std::shared_ptr davinci_model = MakeShared(0, listener); + if (davinci_model == nullptr) { + GELOGE(FAILED, "davinci_model is nullptr"); + return FAILED; + } + + davinci_model->SetId(model_id); + davinci_model->SetDeviceId(GetContext().DeviceId()); + + Status ret = SUCCESS; + do { + GeModelPtr ge_model; + GE_IF_BOOL_EXEC(ModelHelper::TransModelToGeModel(model, ge_model) != SUCCESS, + GELOGW("trans model to ge_model failed."); + break;); + GE_TIMESTAMP_START(Assign); + GE_IF_BOOL_EXEC(SUCCESS != (ret = davinci_model->Assign(ge_model)), GELOGW("assign model to modeldef failed."); + break;); + GE_TIMESTAMP_END(Assign, "GraphLoader::ModelAssign"); + + GE_TIMESTAMP_START(Init); + GE_IF_BOOL_EXEC(SUCCESS != (ret = davinci_model->Init()), GELOGW("DavinciInit failed."); break;); + GE_TIMESTAMP_END(Init, "GraphLoader::ModelInit"); + + InsertModel(model_id, davinci_model); + + GELOGI("Parse model %u success.", model_id); + } while (0); + + GE_CHK_RT(rtDeviceReset(static_cast(GetContext().DeviceId()))); + + return ret; +} + +void ModelManager::InsertModel(uint32_t id, std::shared_ptr &davinci_model) { + GE_CHK_BOOL_EXEC(davinci_model != nullptr, return, "davinci_model ptr is null, id: %u", id); + std::lock_guard lock(map_mutex_); + model_map_[id] = davinci_model; +} + +Status ModelManager::DeleteModel(uint32_t id) { + std::lock_guard lock(map_mutex_); + + auto it = model_map_.find(id); + if (it == model_map_.end()) { + GELOGE(PARAM_INVALID, "model id %u does not exists.", id); + return PARAM_INVALID; + } + + (void)model_map_.erase(it); + free_model_id_.push_back(id); + return SUCCESS; +} + +Status ModelManager::UnLoadAllModel(int32_t DeviceId) { + vector id_list; + + for (const auto &it : model_map_) { + uint32_t model_id = it.first; + GELOGI("Unload All model : model id : %u", model_id); + id_list.push_back(model_id); + GE_CHK_STATUS_RET(Stop(model_id), "UnLoadAllModel: Stop model : %u failed.", model_id); + } + + for (const auto &id : id_list) { + GE_CHK_STATUS_RET(UnloadModeldef(id), "UnLoadAllModel: Unload model : %u failed.", id); + } + + return SUCCESS; +} + +std::shared_ptr ModelManager::GetModel(uint32_t id) { + std::lock_guard lock(map_mutex_); + + auto it = model_map_.find(id); + return (it == model_map_.end()) ? nullptr : it->second; +} + +Status ModelManager::Unload(uint32_t model_id) { + GE_CHK_STATUS_RET(DeleteModel(model_id), "failed to unload model id: %u", model_id); + if (device_count > 0) { + device_count--; + GELOGI("Unload model %u success.", model_id); + } else { + GELOGI("Unload model %u success.no need reset device,device_count: %u", model_id, device_count); + } + + return SUCCESS; +} + +Status ModelManager::UnloadModeldef(uint32_t model_id) { + GE_CHK_STATUS_RET(DeleteModel(model_id), "failed to unload modeldef id: %u", model_id); + return SUCCESS; +} + +Status ModelManager::DataInput(const InputData &input_data, OutputData &output_data) { + GELOGI("calling the DataInput"); + + SysMode mode = DavinciModel::GetSysMode(); + if ((mode == RESET) || (mode == STOP)) { + GELOGE(domi::MODEL_NOT_READY, "System mode is reset or stop"); + return domi::MODEL_NOT_READY; + } + + shared_ptr data_wrap(new (std::nothrow) InputDataWrapper()); + GE_CHECK_NOTNULL(data_wrap); + + Status status = data_wrap->Init(input_data, output_data); + if (status != SUCCESS) { + GELOGE(domi::PUSH_DATA_FAILED, "Init InputDataWrapper failed, input data index: %u.", input_data.index); + return domi::PUSH_DATA_FAILED; + } + + uint32_t model_id = input_data.model_id; + output_data.model_id = model_id; + + std::shared_ptr model = GetModel(model_id); + + GE_CHK_BOOL_RET_STATUS(model != nullptr, PARAM_INVALID, "Invalid Model ID %u in InputData! ", model_id); + + GE_IF_BOOL_EXEC(model->GetDataInputTid() == 0, model->SetDataInputTid(mmGetTid())); + + DataInputer *inputer = model->GetDataInputer(); + GE_CHECK_NOTNULL(inputer); + if (inputer->Push(data_wrap) != SUCCESS) { + GELOGE(domi::DATA_QUEUE_ISFULL, "Data queue is full, please call again later, model_id %u ", model_id); + return domi::DATA_QUEUE_ISFULL; + } + GELOGD("Data input success, model id:%u", model_id); + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief load Input and output TensorInfor for Model +/// @return Status run result +/// +Status ModelManager::DataInputTensor(uint32_t model_id, const std::vector &inputs, + std::vector &outputs) { + SysMode mode = DavinciModel::GetSysMode(); + if ((mode == RESET) || (mode == STOP)) { + GELOGE(domi::MODEL_NOT_READY, "System mode is reset or stop"); + return domi::MODEL_NOT_READY; + } + std::shared_ptr model = GetModel(model_id); + GE_CHECK_NOTNULL(model); + + InputData input_data; + input_data.model_id = model_id; + input_data.timeout = 0; + input_data.timestamp = 0; + input_data.index = 0; + + std::size_t index = 0; + for (const auto &item : model->GetOpList()) { + auto op = item.second; + GE_CHECK_NOTNULL(op); + if (op->GetType() == DATA) { + GE_CHECK_GE(inputs.size(), 1); + GE_CHECK_GE(inputs.size() - 1, index); + + DataBuffer data; + data.data = inputs[index].data.data; + data.length = inputs[index].data.length; + input_data.blobs.push_back(data); + index++; + } + } + + CHECK_FALSE_EXEC(input_data.blobs.size() >= inputs.size(), + GELOGW("cur_inputs size = %zu, inputs size = %zu.", input_data.blobs.size(), inputs.size());); + + OutputData output_data; + output_data.model_id = model_id; + for (size_t i = 0; i < outputs.size(); i++) { + DataBuffer data; + data.data = outputs[i].data.data; + data.length = outputs[i].data.length; + output_data.blobs.push_back(data); + } + + shared_ptr data_wrap(new (std::nothrow) InputDataWrapper()); + GE_CHECK_NOTNULL(data_wrap); + + GE_CHK_STATUS_EXEC(data_wrap->Init(input_data, output_data), return domi::PUSH_DATA_FAILED, + "Init InputDataWrapper failed,input data model_id is : %u.", model_id); + + GE_CHK_BOOL_RET_STATUS(model != nullptr, PARAM_INVALID, "Invalid Model ID %u in InputData! ", model_id); + + DataInputer *inputer = model->GetDataInputer(); + GE_CHECK_NOTNULL(inputer); + + GE_CHK_STATUS_EXEC(inputer->Push(data_wrap), return domi::DATA_QUEUE_ISFULL, + "Data queue is full, please call again later, model_id %u ", model_id); + + GELOGD("Data input success, model id:%u", model_id); + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief create model thread, start to execute model +/// @param [in] model_id Model ID to be started +/// @return Status model run result +/// @author +/// +Status ModelManager::Start(uint32_t model_id) { + std::shared_ptr davinci_model = GetModel(model_id); + + GE_CHK_BOOL_RET_STATUS(davinci_model != nullptr, PARAM_INVALID, "Invalid Model ID %u to start! ", model_id); + + Status status = davinci_model->ModelRunStart(); + if (status == SUCCESS) { + GELOGI("Start model %u success.", model_id); + } + + return status; +} + +/// +/// @ingroup domi_ome +/// @brief Model ID stop +/// @only when unloaded +/// @param [in] model_id Model ID to be stopped +/// @return Status model stop result +/// @author +/// +Status ModelManager::Stop(uint32_t model_id) { + std::shared_ptr davinci_model = GetModel(model_id); + GE_CHK_BOOL_RET_STATUS(davinci_model != nullptr, PARAM_INVALID, "Invalid Model ID %u to stop!", model_id); + + Status status = davinci_model->ModelRunStop(); + if (status == SUCCESS) { + GELOGI("Stop model %u success.", model_id); + } + + return status; +} + +/// +/// @ingroup domi_ome +/// @brief Command handle +/// @iterator 1 only Ieference, Debug 2 modes +/// @param [in] command command to handle +/// @return Status command handle result +/// @author +/// +Status ModelManager::HandleCommand(const Command &command) { + static const std::map> cmds = { + {"profile", HandleProfileCommand}, {"dump", HandleDumpCommand}, {"profiling", HandleAclProfilingCommand}}; + + auto iter = cmds.find(command.cmd_type); + if (iter == cmds.end()) { + GELOGE(PARAM_INVALID, "Unsupported command: %s", command.cmd_type.c_str()); + return PARAM_INVALID; + } else { + return iter->second(command); + } +} + +Status ModelManager::HandleAclProfilingCommand(const Command &command) { + if (command.cmd_params.size() < kCmdParSize) { + GELOGE(PARAM_INVALID, "When the cmd_type is 'profiling', the size of cmd_params must larger than 2."); + return PARAM_INVALID; + } + + std::string map_key = command.cmd_params[0]; + std::string value = command.cmd_params[1]; + if (map_key == PROFILE_CONFIG) { + ProfilingManager::Instance().SetProfilingConfig(value); + } + + return SUCCESS; +} + +Status ModelManager::HandleProfileCommand(const Command &command) { + if (command.cmd_params.size() < kCmdParSize) { + GELOGE(PARAM_INVALID, "When the cmd_type is 'profile', the size of cmd_params must larger than 2."); + return PARAM_INVALID; + } + + std::string map_key = command.cmd_params[0]; + std::string value = command.cmd_params[1]; + + GELOGI("Profiling mode, Command key:%s , value:%s ", map_key.c_str(), value.c_str()); + + auto iter = PROFILE_COMPONENT_MAP.find(map_key); + if (iter != PROFILE_COMPONENT_MAP.end()) { + std::string property_value = (value == "on") ? "1" : "0"; + PropertiesManager::Instance().SetPropertyValue(iter->second, property_value); + } + + if ((map_key == PROFILER_JOBCTX || map_key == PROFILER_TARGET_PATH || + map_key == RTS_PROFILE_PATH)) { + PropertiesManager::Instance().SetPropertyValue(map_key, value); + } + + if ((map_key == PROFILE_STOP_KEY) && (value == PROFILE_STOP_VALUE)) { + rtError_t rt_ret = rtProfilerStop(); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(PARAM_INVALID, "Call rtProfilerStop ret:%d", rt_ret); + return PARAM_INVALID; + } + } + + return SUCCESS; +} + +Status ModelManager::HandleDumpCommand(const Command &command) { + if (command.cmd_params.size() % kCmdParSize != 0) { + GELOGE(PARAM_INVALID, "When the cmd_type is 'dump', the size of cmd_params must be a even number."); + return PARAM_INVALID; + } + + std::string dump_status("off"); + std::string dump_model(DUMP_ALL_MODEL); + std::string dump_path("/"); + std::set dump_layers; + + auto iter_dump_status = std::find(command.cmd_params.begin(), command.cmd_params.end(), DUMP_STATUS); + if (iter_dump_status != command.cmd_params.end()) { + ++iter_dump_status; + if (iter_dump_status == command.cmd_params.end()) { + GELOGE(PARAM_INVALID, "Invalid access."); + return PARAM_INVALID; + } + + dump_status = *iter_dump_status; + GELOGI("dump status = %s.", dump_status.c_str()); + } + + auto iter_dump_model = std::find(command.cmd_params.begin(), command.cmd_params.end(), DUMP_MODEL); + if (iter_dump_model != command.cmd_params.end()) { + ++iter_dump_model; + if (iter_dump_model == command.cmd_params.end()) { + GELOGE(PARAM_INVALID, "Invalid access."); + return PARAM_INVALID; + } + + dump_model = *iter_dump_model; + GELOGI("dump model = %s.", dump_model.c_str()); + } + + if (dump_status == "off" || dump_status == "OFF") { + PropertiesManager::Instance().DeleteDumpPropertyValue(dump_model); + return SUCCESS; + } + + for (size_t i = 0; i < command.cmd_params.size() / 2; ++i) { + if (command.cmd_params.at(i * kCmdParSize).find(DUMP_LAYER) != std::string::npos) { + GELOGI("dump layer: %s.", command.cmd_params.at(i * kCmdParSize + 1).c_str()); + (void)dump_layers.insert(command.cmd_params.at(i * kCmdParSize + 1)); + } + } + + auto iter_dump_path = std::find(command.cmd_params.begin(), command.cmd_params.end(), DUMP_FILE_PATH); + if (iter_dump_path != command.cmd_params.end()) { + ++iter_dump_path; + if (iter_dump_path == command.cmd_params.end()) { + GELOGE(PARAM_INVALID, "Invalid access."); + return PARAM_INVALID; + } + + dump_path = *iter_dump_path; + + if (!dump_path.empty() && dump_path[dump_path.size() - 1] != '/') { + dump_path += "/"; + } + GELOGI("dump path = %s.", dump_path.c_str()); + } + + PropertiesManager::Instance().AddDumpPropertyValue(dump_model, dump_layers); + PropertiesManager::Instance().SetDumpOutputPath(dump_path); + return SUCCESS; +} + +Status ModelManager::GetMaxUsedMemory(const uint32_t model_id, uint64_t &max_size) { + std::shared_ptr davinci_model = GetModel(model_id); + GE_CHK_BOOL_RET_STATUS(davinci_model != nullptr, PARAM_INVALID, "GetMaxUsedMemory Failed, Invalid Model ID %u !", + model_id); + + max_size = davinci_model->TotalMemSize(); + return SUCCESS; +} + +Status ModelManager::GetInputOutputDescInfo(const uint32_t model_id, vector &input_desc, + vector &output_desc) { + std::shared_ptr davinci_model = GetModel(model_id); + GE_CHK_BOOL_RET_STATUS(davinci_model != nullptr, PARAM_INVALID, + "GetInputOutputDescInfo Failed, Invalid Model ID %u !", model_id); + + return davinci_model->GetInputOutputDescInfo(input_desc, output_desc); +} + +Status ModelManager::GetInputOutputDescInfoForZeroCopy(const uint32_t model_id, vector &input_desc, + vector &output_desc) { + std::shared_ptr davinci_model = GetModel(model_id); + GE_CHK_BOOL_RET_STATUS(davinci_model != nullptr, PARAM_INVALID, + "GetInputOutputDescInfo Failed, Invalid Model ID %u !", model_id); + + return davinci_model->GetInputOutputDescInfoForZeroCopy(input_desc, output_desc); +} + +Status ModelManager::GetInputOutputDescInfo(const uint32_t model_id, vector &input_desc, + vector &output_desc, + std::vector &inputFormats, std::vector &outputFormats) { + std::shared_ptr davinci_model = GetModel(model_id); + GE_CHK_BOOL_RET_STATUS(davinci_model != nullptr, PARAM_INVALID, + "GetInputOutputDescInfo Failed, Invalid Model ID %u !", model_id); + + return davinci_model->GetInputOutputDescInfo(input_desc, output_desc, inputFormats, outputFormats); +} + +Status ModelManager::GetInputOutputDescInfoForZeroCopy(const uint32_t model_id, vector &input_desc, + vector &output_desc, + std::vector &inputFormats, + std::vector &outputFormats) { + std::shared_ptr davinci_model = GetModel(model_id); + GE_CHK_BOOL_RET_STATUS(davinci_model != nullptr, PARAM_INVALID, + "GetInputOutputDescInfo Failed, Invalid Model ID %u !", model_id); + + return davinci_model->GetInputOutputDescInfoForZeroCopy(input_desc, output_desc, inputFormats, outputFormats); +} + +Status ModelManager::LoadModelOffline(uint32_t &model_id, const ModelData &model, shared_ptr listener, + void *dev_ptr, size_t mem_size, void *weight_ptr, size_t weight_size) { + GE_CHK_BOOL_RET_STATUS(model.key.empty() || access(model.key.c_str(), F_OK) == 0, PARAM_INVALID, + "input key file path is not valid!"); + GenModelId(&model_id); + + shared_ptr davinci_model = nullptr; + + ModelHelper model_helper; + Status ret = model_helper.LoadModel(model); + if (ret != SUCCESS) { + GELOGE(ret, "load model failed."); + return ret; + } + + do { + GeModelPtr ge_model = model_helper.GetGeModel(); + try { + davinci_model = std::make_shared(model.priority, listener); + } catch (std::bad_alloc &) { + GELOGE(FAILED, "Make shared failed"); + return FAILED; + } catch (...) { + GELOGE(FAILED, "Make shared failed since other exception raise"); + return FAILED; + } + ret = davinci_model->Assign(ge_model); + if (ret != SUCCESS) { + GELOGW("assign model failed."); + break; + } + davinci_model->SetId(model_id); + ret = davinci_model->Init(dev_ptr, mem_size, weight_ptr, weight_size); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(ret != SUCCESS, break, "DavinciInit failed."); + + InsertModel(model_id, davinci_model); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(davinci_model == nullptr, ret = PARAM_INVALID; break, "Insert model failed"); + + GELOGI("Parse model %u success.", model_id); + + GE_IF_BOOL_EXEC(ret == SUCCESS, device_count++); + return SUCCESS; + } while (0); + + return ret; +} + +/// +/// @ingroup ge +/// @brief ACL case, Load task list with queue. +/// @param [out] model_id: model id for manager. +/// @param [in] model_data: Model data load from offline model file. +/// @param [in] input_que_ids: input queue ids from user, num equals Data Op. +/// @param [in] output_que_ids: input queue ids from user, num equals NetOutput Op. +/// @return: 0 for success / others for fail +/// +Status ModelManager::LoadModelWithQ(uint32_t &model_id, const ModelData &model_data, + const std::vector &input_queue_ids, + const std::vector &output_queue_ids) { + GE_CHK_BOOL_RET_STATUS(model_data.key.empty() || access(model_data.key.c_str(), F_OK) == 0, PARAM_INVALID, + "input key file path is not valid!"); + + ModelHelper model_helper; + Status ret = model_helper.LoadModel(model_data); + if (ret != SUCCESS) { + GELOGE(ret, "load model failed."); + return ret; + } + + shared_ptr davinci_model = MakeShared(model_data.priority, nullptr); + if (davinci_model == nullptr) { + GELOGE(FAILED, "create model failed."); + return FAILED; + } + + ret = davinci_model->Assign(model_helper.GetGeModel()); + if (ret != SUCCESS) { + GELOGE(ret, "assign model failed."); + return ret; + } + + GenModelId(&model_id); + davinci_model->SetId(model_id); + davinci_model->SetSessionId(model_id); + ret = davinci_model->SetQueIds(input_queue_ids, output_queue_ids); + if (ret != SUCCESS) { + GELOGE(ret, "set model queue ids failed."); + return ret; + } + + ret = davinci_model->Init(); + if (ret != SUCCESS) { + GELOGE(ret, "init model failed."); + return ret; + } + + InsertModel(model_id, davinci_model); + GELOGI("Parse model %u success.", model_id); + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief ACL case, not start new thread, return result +/// @param [in] model_id mode id +/// @param [in] stream model stream +/// @param [in] async_mode is asynchronize mode. +/// @param [in] input_data input data +/// @param [out] output_data output data +/// +Status ModelManager::ExecuteModel(uint32_t model_id, rtStream_t stream, bool async_mode, const InputData &input_data, + OutputData &output_data) { + std::shared_ptr davinci_model = GetModel(model_id); + GE_CHK_BOOL_RET_STATUS(davinci_model != nullptr, PARAM_INVALID, "Invalid Model ID %u to start! ", model_id); + + Status status = davinci_model->NnExecute(stream, async_mode, input_data, output_data); + if (status == SUCCESS) { + GELOGI("Execute model %u success.", model_id); + } + + return status; +} + +Status ModelManager::CreateAicpuSession(uint64_t session_id) { + std::lock_guard lock(sess_ids_mutex_); + auto it = sess_ids_.find(session_id); + // never been created by any model + if (it == sess_ids_.end()) { + Status ret = KernelLaunchEx(aicpu::FWKAdapter::FWKOperateType::FWK_ADPT_SESSION_CREATE, session_id); + if (ret == SUCCESS) { + (void)sess_ids_.insert(session_id); + GELOGI("The session: %lu create success.", session_id); + } + return ret; + } + return SUCCESS; +} + +/// +/// @ingroup ge +/// @brief get model memory size and weight +/// @param [in] const ModelData model: model type +/// @param [out] size_t memSize: model memory usage +/// size_t weightSize: model weight and memory size +/// @return SUCCESS success / others failure +/// +Status ModelManager::GetModelMemAndWeightSize(const ModelData &model, size_t &mem_size, size_t &weight_size) { + uint8_t *model_data = nullptr; + uint32_t model_len = 0; + Status ret = DavinciModelParser::ParseModelContent(model, model_data, model_len); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(ret != SUCCESS, return ret, "parse model content failed!"); + + OmFileLoadHelper om_file_helper; + ret = om_file_helper.Init(model_data, model_len); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(ret != SUCCESS, return ret, "om file helperInit failed!"); + + ModelPartition task_partition; + if (om_file_helper.GetModelPartition(ModelPartitionType::TASK_INFO, task_partition) != SUCCESS) { + GELOGE(FAILED, "get task model partition failed."); + return FAILED; + } + + std::shared_ptr model_task_def = MakeShared(); + if (model_task_def == nullptr) { + return FAILED; + } + if (task_partition.size != 0) { + if (!ReadProtoFromArray(task_partition.data, static_cast(task_partition.size), model_task_def.get())) { + GELOGE(FAILED, "ReadProtoFromArray failed."); + return FAILED; + } + } + + ModelPartition partition_weight; + ret = om_file_helper.GetModelPartition(ModelPartitionType::WEIGHTS_DATA, partition_weight); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(ret != SUCCESS, return ret, "Get weight partition failed. ret = %u", ret); + + mem_size = model_task_def->memory_size(); + weight_size = partition_weight.size; + return SUCCESS; +} + +void ModelManager::GenModelId(uint32_t *id) { + if (id == nullptr) { + return; + } + + std::lock_guard lock(map_mutex_); + if (free_model_id_.empty()) { + *id = ++max_model_id_; + } else { + *id = free_model_id_.back(); + free_model_id_.pop_back(); + } +} +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/model_manager.h b/src/ge/graph/load/new_model_manager/model_manager.h new file mode 100644 index 00000000..a392a380 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/model_manager.h @@ -0,0 +1,252 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_MODEL_MANAGER_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_MODEL_MANAGER_H_ + +#include +#include +#include +#include +#include +#include +#include +#include "common/types.h" +#include "common/ge_types.h" +#include "common/ge_inner_error_codes.h" +#include "common/helper/model_helper.h" +#include "common/helper/om_file_helper.h" +#include "graph/model.h" +#include "runtime/base.h" +#include "graph/ge_context.h" +#include "ge/ge_api_types.h" + +namespace ge { +class DavinciModel; + +class FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY ModelManager { + public: + static std::shared_ptr GetInstance(); + static void FinalizeForPtr(ModelManager *) {} + + /// + /// @ingroup domi_ome + /// @brief load and init model + /// @param [in] model_id model id + /// @param [in] model including model ptr and size + /// @param [in] listener used to return result + /// @param [in/out] info model task generate info + /// @return Status run result + /// @author + /// + ge::Status LoadModelOffline(uint32_t &model_id, const ModelData &model, + std::shared_ptr listener = nullptr, void *dev_ptr = nullptr, + size_t mem_size = 0, void *weight_ptr = nullptr, size_t weight_size = 0); + + /// + /// @ingroup domi_ome + /// @brief load and init model + /// @param [out] model_id model id + /// @param [in] model modeldef datatype + /// @param [in] listener used to return result + /// @param [in] isTrainMode model type + /// @return Status run result + /// @author @ + /// + ge::Status LoadModelOnline(uint32_t &model_id, std::shared_ptr &model, + std::shared_ptr listener); + + /// + /// @ingroup ge + /// @brief ACL case, Load task list with queue. + /// @param [out] model_id: model id for manager. + /// @param [in] model_data: Model data load from offline model file. + /// @param [in] input_que_ids: input queue ids from user, num equals Data Op. + /// @param [in] output_que_ids: input queue ids from user, num equals NetOutput Op. + /// @return: 0 for success / others for fail + /// + ge::Status LoadModelWithQ(uint32_t &model_id, const ModelData &model_data, + const std::vector &input_queue_ids, + const std::vector &output_queue_ids); + + /// + /// @ingroup domi_ome + /// @brief unload model and free resources + /// @param [in] model_id model id + /// @return Status run result + /// @author + /// + ge::Status Unload(uint32_t model_id); + + /// + /// @ingroup omm + /// @brief unload model and free resources + /// @param [in] model_id model id + /// @return Status run result + /// @author + /// + ge::Status UnloadModeldef(uint32_t model_id); + + /// + /// @ingroup domi_ome + /// @brief unload all models and free resources + /// @return Status run result + /// @author + /// + ge::Status UnLoadAllModel(int32_t DeviceId); + + /// + /// @ingroup domi_ome + /// @brief process input data asynchronously + /// cannot be invoked by multiple thread + /// if one fails, other continue + /// @param [in] input_data input data + /// @return SUCCESS success + /// @return PARAM_INVALID parameter invalid + /// @return MODEL_NOT_READY model not ready + /// @return PUSH_DATA_FAILED push data into model queue failed + /// @author + /// + ge::Status DataInput(const InputData &input_data, OutputData &output_data); + + ge::Status DataInputTensor(uint32_t model_id, const std::vector &inputs, + std::vector &outputs); + + /// + /// @ingroup domi_ome + /// @brief model start to run + /// + ge::Status Start(uint32_t model_id); + + /// + /// @ingroup domi_ome + /// @brief ACL case, do not start new thread, return result + /// @param [in] model_id model id + /// @param [in] stream model stream + /// @param [in] async_mode is asynchronize mode. + /// @param [in] input_data model input data + /// @param [out] output_data model output data + /// + ge::Status ExecuteModel(uint32_t model_id, rtStream_t stream, bool async_mode, const InputData &input_data, + OutputData &output_data); + + /// + /// @ingroup domi_ome + /// @brief model stop + /// + ge::Status Stop(uint32_t model_id); + + /// + /// @ingroup domi_ome + /// @brief comment handle function + /// + ge::Status HandleCommand(const Command &command); + static ge::Status HandleAclProfilingCommand(const Command &command); + static ge::Status HandleProfileCommand(const Command &command); + static ge::Status HandleDumpCommand(const Command &command); + /// + /// @ingroup domi_ome + /// @brief get model memory usage + /// @param [in] model_id model id + /// @return SUCCESS success + /// @return PARAM_INVALID parameter invalid + /// + ge::Status GetMaxUsedMemory(const uint32_t model_id, uint64_t &max_size); + + /// + /// @ingroup domi_ome + /// @brief get model input and output size + /// @param [in] model_id model id + /// @param [out] input_shape input tensor + /// @param [out] output_shape output tensor + /// @return SUCCESS success + /// @return PARAM_INVALID parameter invalid + /// + ge::Status GetInputOutputDescInfo(const uint32_t model_id, std::vector &input_desc, + std::vector &output_desc); + + ge::Status GetInputOutputDescInfo(const uint32_t model_id, std::vector &input_desc, + std::vector &output_desc, + std::vector &inputFormats, std::vector &outputFormats); + + /// + /// @ingroup domi_ome + /// @brief set model input and output size zero copy + /// @param [in] model_id model id + /// @param [out] input_shape input tensor + /// @param [out] output_shape output tensor + /// @return SUCCESS success + /// @return PARAM_INVALID parameter invalid + /// + ge::Status GetInputOutputDescInfoForZeroCopy(const uint32_t model_id, std::vector &input_desc, + std::vector &output_desc); + + ge::Status GetInputOutputDescInfoForZeroCopy(const uint32_t model_id, std::vector &input_desc, + std::vector &output_desc, + std::vector &inputFormats, + std::vector &outputFormats); + + ge::Status SetDevice(int32_t deviceId) const; + + /// + /// @ingroup domi_ome + /// @brief Get model according to given id + /// + std::shared_ptr GetModel(uint32_t id); + + ge::Status CreateAicpuSession(uint64_t session_id); + + static ge::Status GetModelMemAndWeightSize(const ModelData &model, size_t &mem_size, size_t &weight_size); + + void DestroyAicpuSession(uint64_t session_id); + + private: + /// + /// @ingroup domi_ome + /// @brief constructor + /// + ModelManager(); + + /// + /// @ingroup domi_ome + /// @brief destructor + /// + ~ModelManager(); + + /// + /// @ingroup domi_ome + /// @brief insert new model into model manager set + /// + void InsertModel(uint32_t id, std::shared_ptr &davinci_model); + + /// + /// @ingroup domi_ome + /// @brief delete model from model manager set + /// + ge::Status DeleteModel(uint32_t id); + + void GenModelId(uint32_t *id); + + std::map> model_map_; + std::vector free_model_id_; + uint32_t max_model_id_; + std::mutex map_mutex_; + std::mutex sess_ids_mutex_; + std::set sess_ids_; +}; +} // namespace ge + +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_MODEL_MANAGER_H_ diff --git a/src/ge/graph/load/new_model_manager/model_output.cc b/src/ge/graph/load/new_model_manager/model_output.cc new file mode 100644 index 00000000..24f520b3 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/model_output.cc @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/model_output.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/op/ge_op_utils.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/load/output/output.h" + +namespace ge { +Status ModelOutput::CopyResult(DavinciModel *model, OpDescPtr op_desc, OutputData &rslt, uint32_t &data_index, + bool support_mem_share) { + uint32_t data_begin = data_index; + std::shared_ptr model_output = MakeShared(op_desc, model); + if (model_output == nullptr) { + return INTERNAL_ERROR; + } + + if (model_output->Init() != SUCCESS) { + return INTERNAL_ERROR; + } + + return model_output->CopyResult(rslt, data_begin, data_index, support_mem_share); +} +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/model_output.h b/src/ge/graph/load/new_model_manager/model_output.h new file mode 100644 index 00000000..1b05bdd6 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/model_output.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_MODEL_OUTPUT_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_MODEL_OUTPUT_H_ + +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "common/ge_types.h" +#include "graph/op_desc.h" + +namespace ge { +class DavinciModel; + +class ModelOutput { + public: + static Status CopyResult(DavinciModel *model, OpDescPtr op_desc, OutputData &rslt, uint32_t &data_index, + bool support_mem_share); +}; +} // namespace ge + +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_MODEL_OUTPUT_H_ diff --git a/src/ge/graph/load/new_model_manager/model_utils.cc b/src/ge/graph/load/new_model_manager/model_utils.cc new file mode 100644 index 00000000..c47e669c --- /dev/null +++ b/src/ge/graph/load/new_model_manager/model_utils.cc @@ -0,0 +1,514 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/model_utils.h" + +#include + +#include "common/debug/log.h" +#include "common/op/ge_op_utils.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/tensor_utils.h" +#include "runtime/base.h" +#include "runtime/kernel.h" + +#include "framework/common/debug/ge_log.h" +#include "graph/manager/graph_var_manager.h" + +namespace ge { +/// +/// @ingroup domi_ome +/// @brief Check is Output Op. +/// @return bool +/// +bool ModelUtils::IsOutput(ConstOpDescPtr op_desc) { + GE_CHECK_NOTNULL_EXEC(op_desc, return false); + size_t output_size = op_desc->GetOutputsSize(); + for (size_t i = 0; i < output_size; ++i) { + bool output_tensor = false; + GE_IF_BOOL_EXEC(TensorUtils::GetOutputTensor(op_desc->GetOutputDesc(i), output_tensor) != GRAPH_SUCCESS, + GELOGW("Get OutputTensor failed, name: %s, output index: %zu", op_desc->GetName().c_str(), i); + return false;); + if (output_tensor) { + return true; + } + } + + return false; +} + +/// +/// @ingroup domi_ome +/// @brief Check is the Input need trans code. +/// @return bool +/// +bool ModelUtils::IsInputTensorNeedTrans(ConstOpDescPtr op_desc, size_t tensor_index) { + GE_CHECK_NOTNULL_EXEC(op_desc, return false); + const auto &input_desc = op_desc->GetInputDesc(tensor_index); + const auto &output_desc = op_desc->GetOutputDesc(tensor_index); + + if ((output_desc.GetFormat() == FORMAT_NC1HWC0) && (output_desc.GetDataType() == DT_INT8)) { + // AIPP input, add attribute in data op to tag aipp + return false; + } + + return (input_desc.GetFormat() != output_desc.GetFormat()) || (input_desc.GetDataType() != output_desc.GetDataType()); +} + +/// +/// @ingroup domi_ome +/// @brief Get input size. +/// @return vector +/// +vector ModelUtils::GetInputSize(ConstOpDescPtr op_desc) { + vector v_input_size; + GE_CHECK_NOTNULL_EXEC(op_desc, return v_input_size); + const size_t inputs_size = op_desc->GetInputsSize(); + const string op_type = op_desc->GetType(); + + const vector v_is_input_const = op_desc->GetIsInputConst(); + for (size_t i = 0; i < inputs_size; ++i) { + if ((i < v_is_input_const.size()) && v_is_input_const[i] && (op_type != NETOUTPUT)) { + // TBE: add weights size to input + GE_IF_BOOL_EXEC(true, GeTensorDesc tensor_desc = op_desc->GetInputDesc(i); uint32_t tensor_size = 0; + GE_CHK_STATUS(TensorUtils::GetSize(tensor_desc, tensor_size)); + if (tensor_size) { v_input_size.push_back(tensor_size); }); + continue; + } + + uint32_t tensor_size = 0; + GE_IF_BOOL_EXEC( + TensorUtils::GetSize(op_desc->GetInputDesc(i), tensor_size) != GRAPH_SUCCESS, + GELOGI("Get size from TensorDesc failed, op : %s, input index : %zu", op_desc->GetName().c_str(), i); + continue;); + + v_input_size.push_back(tensor_size); + } + + return v_input_size; +} + +/// +/// @ingroup domi_ome +/// @brief Get output size. +/// @return vector +/// +vector ModelUtils::GetOutputSize(ConstOpDescPtr op_desc) { + vector v_output_size; + GE_CHECK_NOTNULL_EXEC(op_desc, return v_output_size); + + const size_t outputs_size = op_desc->GetOutputsSize(); + const vector v_output_offset = op_desc->GetOutputOffset(); + GE_IF_BOOL_EXEC(v_output_offset.size() != outputs_size, + GELOGW("Output param invalid: output_offset=%zu, outputs=%zu.", v_output_offset.size(), outputs_size); + return v_output_size;); + + for (size_t i = 0; i < outputs_size; ++i) { + uint32_t tensor_size = 0; + GE_IF_BOOL_EXEC( + TensorUtils::GetSize(op_desc->GetOutputDesc(i), tensor_size) != GRAPH_SUCCESS, + GELOGI("Get size from TensorDesc failed, op : %s, output index : %zu", op_desc->GetName().c_str(), i); + continue;); + + v_output_size.push_back(tensor_size); + } + + return v_output_size; +} + +/// +/// @ingroup domi_ome +/// @brief Get workspace size. +/// @return vector +/// +vector ModelUtils::GetWorkspaceSize(ConstOpDescPtr op_desc) { + vector v_workspace_size; + GE_CHECK_NOTNULL_EXEC(op_desc, return v_workspace_size); + + const vector v_workspace_num = op_desc->GetWorkspace(); + const vector v_workspace_bytes = op_desc->GetWorkspaceBytes(); + if (v_workspace_num.size() != v_workspace_bytes.size()) { + GELOGW("workspace_num[%zu]!= workspace_bytes[%zu]", v_workspace_num.size(), v_workspace_bytes.size()); + return v_workspace_size; + } + + for (auto workspace_bytes : v_workspace_bytes) { + v_workspace_size.push_back(workspace_bytes); + } + + return v_workspace_size; +} + +/// +/// @ingroup domi_ome +/// @brief Get weight size. +/// @return vector +/// +vector ModelUtils::GetWeightSize(ConstOpDescPtr op_desc) { + vector v_weight_size; + GE_CHECK_NOTNULL_EXEC(op_desc, return v_weight_size); + + // const op, get weight directly + const string type_name = op_desc->GetType(); + if ((type_name == "Const") || (type_name == "Constant")) { + ConstGeTensorPtr weight = nullptr; + if (AttrUtils::GetTensor(*op_desc, ATTR_NAME_WEIGHTS, weight)) { + v_weight_size.push_back(TensorUtils::GetWeightSize(weight)); + } + + return v_weight_size; + } + + // other ops get weight from connected constop + const size_t inputs_size = op_desc->GetInputsSize(); + const vector v_is_input_const = op_desc->GetIsInputConst(); + for (size_t i = 0; i < inputs_size; ++i) { + if ((i < v_is_input_const.size()) && v_is_input_const[i]) { + v_weight_size.push_back(TensorUtils::GetWeightSize(op_desc->GetInputDesc(i))); + } + } + + return v_weight_size; +} + +/// +/// @ingroup domi_ome +/// @brief Get weights. +/// @return vector +/// +vector ModelUtils::GetWeights(ConstOpDescPtr op_desc) { + vector v_weights; + GE_CHECK_NOTNULL_EXEC(op_desc, return v_weights); + + // const op, get weight directly + const string op_type = op_desc->GetType(); + if ((op_type == "Const") || (op_type == "Constant")) { + ConstGeTensorPtr weight = nullptr; + if (AttrUtils::GetTensor(*op_desc, ATTR_NAME_WEIGHTS, weight)) { + v_weights.push_back(weight); + } + + return v_weights; + } + + // other ops get weight from connected constop + const size_t inputs_size = op_desc->GetInputsSize(); + const vector v_is_input_const = op_desc->GetIsInputConst(); + for (size_t i = 0; i < inputs_size; ++i) { + if ((i < v_is_input_const.size()) && v_is_input_const[i]) { + ConstGeTensorPtr weight = nullptr; + GeTensorDesc tensor_desc = op_desc->GetInputDesc(i); + if (AttrUtils::GetTensor(tensor_desc, ATTR_NAME_WEIGHTS, weight)) { + v_weights.push_back(weight); + } + } + } + + return v_weights; +} + +/// +/// @ingroup domi_ome +/// @brief Save Output tensor info to vector. +/// @return Status +/// +Status ModelUtils::GetOutputSize(ConstOpDescPtr op_desc, vector &output_size_list, + vector &output_memory_size_list) { + GE_CHECK_NOTNULL(op_desc); + + for (size_t i = 0; i < op_desc->GetOutputsSize(); ++i) { + bool output_tensor = false; + auto output_desc = op_desc->GetOutputDesc(i); + GE_CHK_STATUS_RET(TensorUtils::GetOutputTensor(output_desc, output_tensor), + "get OutputTensor failed, op : %s, input index : %zu", op_desc->GetName().c_str(), i); + + if (output_tensor) { + // get transferred parameters such as size + uint32_t size = 0; + uint32_t memory_size = 0; + graphStatus graph_status0 = TensorUtils::GetTensorSizeInBytes(output_desc, size); + graphStatus graph_status1 = TensorUtils::GetTensorMemorySizeInBytes(output_desc, memory_size); + if ((graph_status0 != GRAPH_SUCCESS) || (graph_status1 != GRAPH_SUCCESS)) { + return INTERNAL_ERROR; + } + output_size_list.push_back(size); + output_memory_size_list.push_back(memory_size); + } + } + + return SUCCESS; +} + +/// +/// @ingroup domi_ome +/// @brief Get AiCpuOp Input descriptor. +/// @return vector<::tagCcAICPUTensor> +/// +vector<::tagCcAICPUTensor> ModelUtils::GetInputDescs(ConstOpDescPtr op_desc) { + // AiCpuOp::GetInputDescs + vector<::opTensor_t> v_input_descs; + GE_CHECK_NOTNULL_EXEC(op_desc, return v_input_descs); + + const size_t inputs_size = op_desc->GetInputsSize(); + const vector v_is_input_const = op_desc->GetIsInputConst(); + + for (size_t i = 0; i < inputs_size; ++i) { + if ((i < v_is_input_const.size()) && v_is_input_const[i]) { // skip Const input node + continue; + } + + uint32_t dim_cnt = 0; + const auto &descriptor = op_desc->GetInputDesc(i); + GE_CHK_BOOL_EXEC_WARN(TensorUtils::GetRealDimCnt(descriptor, dim_cnt) == GRAPH_SUCCESS, continue, + "Get dim_cnt failed"); + + opTensor_t tmp; + uint32_t tmp_fmt = descriptor.GetFormat(); + tmp.format = tagOpTensorFormat(tmp_fmt); + tmp.dim_cnt = static_cast(dim_cnt); + uint32_t tmp_type = descriptor.GetDataType(); + tmp.data_type = tagOpDataType(tmp_type); + + for (int32_t j = 0; j < 4; j++) { // 4 dims + tmp.dim[j] = (j < tmp.dim_cnt ? descriptor.GetShape().GetDim(j) : 1); + } + + v_input_descs.push_back(tmp); + } + + return v_input_descs; +} + +/// +/// @ingroup domi_ome +/// @brief Get AiCpuOp Output descriptor. +/// @return vector<::tagCcAICPUTensor> +/// +vector<::tagCcAICPUTensor> ModelUtils::GetOutputDescs(ConstOpDescPtr op_desc) { + // AiCpuOp::GetOutputDescs + vector<::opTensor_t> v_output_descs; + GE_CHECK_NOTNULL_EXEC(op_desc, return v_output_descs); + + // init op output opTensor_t struct + const size_t output_num = op_desc->GetOutputsSize(); + for (size_t i = 0; i < output_num; ++i) { + uint32_t dim_cnt = 0; + const auto &descriptor = op_desc->GetOutputDesc(i); + GE_CHK_BOOL_EXEC_WARN(TensorUtils::GetRealDimCnt(descriptor, dim_cnt) == GRAPH_SUCCESS, continue, + "Get dim_cnt failed"); + + opTensor_t tmp; + uint32_t tmp_fmt = descriptor.GetFormat(); + tmp.format = tagOpTensorFormat(tmp_fmt); + tmp.dim_cnt = static_cast(dim_cnt); + uint32_t tmp_type = descriptor.GetDataType(); + tmp.data_type = tagOpDataType(tmp_type); + + for (int32_t j = 0; j < 4; j++) { // 4 dims + tmp.dim[j] = static_cast(j < tmp.dim_cnt ? descriptor.GetShape().GetDim(j) : 1); + } + + v_output_descs.push_back(tmp); + } + + return v_output_descs; +} + +/// +/// @ingroup domi_ome +/// @brief Get input data address. +/// @return vector +/// +vector ModelUtils::GetInputDataAddrs(const RuntimeParam &model_param, ConstOpDescPtr op_desc, + bool need_convert) { + vector v_input_data_addr; // init as:buf_base + op_def_->input(i)); + GE_CHECK_NOTNULL_EXEC(op_desc, return v_input_data_addr); + uint64_t session_id = model_param.session_id; + uint8_t *mem_base = model_param.mem_base; + uint8_t *var_base = model_param.var_base; + uint8_t *weight_base = model_param.weight_base; + const uint64_t logic_mem_base = 0; + uint64_t logic_weight_base = 0; + uint64_t logic_var_base = model_param.logic_var_base; + uint64_t mem_size = model_param.mem_size; + uint64_t weight_size = model_param.weight_size; + uint64_t var_size = model_param.var_size; + + if (need_convert) { + Status status = ConvertVirtualAddressToPhysical(mem_base, mem_size, mem_base); + if (status != SUCCESS) { + GELOGE(RT_FAILED, "Convert virtual address to physical for mem_base failed."); + return v_input_data_addr; + } + + status = ConvertVirtualAddressToPhysical(weight_base, weight_size, weight_base); + if (status != SUCCESS) { + GELOGE(RT_FAILED, "Convert virtual address to physical for weight_base failed."); + return v_input_data_addr; + } + + status = ConvertVirtualAddressToPhysical(var_base, var_size, var_base); + if (status != SUCCESS) { + GELOGE(RT_FAILED, "Convert virtual address to physical for var_base failed."); + return v_input_data_addr; + } + } + + const size_t inputs_size = op_desc->GetInputsSize(); + const vector v_input_offset = op_desc->GetInputOffset(); + + const string op_type = op_desc->GetType(); + + size_t non_const_index = 0; + const vector v_is_input_const = op_desc->GetIsInputConst(); + for (size_t i = 0; i < inputs_size; ++i) { + if ((i < v_is_input_const.size()) && v_is_input_const[i] && (op_type != NETOUTPUT)) { + // TBE: add weights address to input + GE_IF_BOOL_EXEC(true, GeTensorDesc tensor_desc = op_desc->GetInputDesc(i); uint32_t tensor_size = 0; + GE_CHK_STATUS(TensorUtils::GetSize(tensor_desc, tensor_size)); if (tensor_size) { + int64_t data_offset = 0; + GE_CHK_STATUS(TensorUtils::GetDataOffset(tensor_desc, data_offset)); + uint8_t *weight_addr = static_cast(weight_base + data_offset - logic_weight_base); + v_input_data_addr.push_back(weight_addr); + }); + non_const_index++; + continue; + } + + GE_IF_BOOL_EXEC(non_const_index >= v_input_offset.size(), + GELOGW("offsets=%zu, inputs=%zu, index=%zu.", v_input_offset.size(), inputs_size, non_const_index); + break;); + + int64_t input_offset = v_input_offset[non_const_index]; + non_const_index++; + GE_IF_BOOL_EXEC(var_size != 0 && ge::VarManager::Instance(session_id)->IsVarAddr(input_offset), + uint8_t *variable_addr = var_base + input_offset - logic_var_base; + v_input_data_addr.push_back(variable_addr); + continue;); + + bool input_tensor = false; + GE_IF_BOOL_EXEC(TensorUtils::GetInputTensor(op_desc->GetOutputDesc(i), input_tensor) != GRAPH_SUCCESS, + GELOGW("get size from TensorDesc failed, op: %s, input index: %zu", op_desc->GetName().c_str(), i); + continue;); + + uint8_t *mem_addr = mem_base + input_offset - logic_mem_base; + v_input_data_addr.push_back(mem_addr); + } + + return v_input_data_addr; +} + +/// +/// @ingroup domi_ome +/// @brief Get output data address. +/// @return vector +/// +vector ModelUtils::GetOutputDataAddrs(const RuntimeParam &model_param, ConstOpDescPtr op_desc, + bool need_convert) { + vector v_output_data_addr; // init as:buf_base + op_def_->output(i) + GE_CHECK_NOTNULL_EXEC(op_desc, return v_output_data_addr); + uint64_t session_id = model_param.session_id; + uint8_t *mem_base = model_param.mem_base; + uint8_t *var_base = model_param.var_base; + const uint64_t logic_mem_base = 0; + uint64_t logic_var_base = model_param.logic_var_base; + uint64_t mem_size = model_param.mem_size; + uint64_t var_size = model_param.var_size; + + if (need_convert) { + Status status = ConvertVirtualAddressToPhysical(mem_base, mem_size, mem_base); + if (status != SUCCESS) { + GELOGE(RT_FAILED, "Convert virtual address to physical for mem_base failed."); + return v_output_data_addr; + } + + status = ConvertVirtualAddressToPhysical(var_base, var_size, var_base); + if (status != SUCCESS) { + GELOGE(RT_FAILED, "Convert virtual address to physical for var_base failed."); + return v_output_data_addr; + } + } + + const size_t outputs_size = op_desc->GetOutputsSize(); + const vector v_output_offset = op_desc->GetOutputOffset(); + GE_IF_BOOL_EXEC(v_output_offset.size() != outputs_size, + GELOGW("Output param invalid: output_offset=%zu, outputs=%zu.", v_output_offset.size(), outputs_size); + return v_output_data_addr;); + + for (size_t i = 0; i < outputs_size; ++i) { + GE_IF_BOOL_EXEC(var_size != 0 && ge::VarManager::Instance(session_id)->IsVarAddr(v_output_offset[i]), + uint8_t *variable_addr = static_cast(var_base + v_output_offset[i] - logic_var_base); + v_output_data_addr.push_back(variable_addr); + continue;); + uint8_t *mem_addr = mem_base + v_output_offset[i] - logic_mem_base; + v_output_data_addr.push_back(mem_addr); + } + + return v_output_data_addr; +} + +/// +/// @ingroup domi_ome +/// @brief Get workspace data address. +/// @return vector +/// +vector ModelUtils::GetWorkspaceDataAddrs(const RuntimeParam &model_param, ConstOpDescPtr op_desc) { + vector v_workspace_data_addr; + GE_CHECK_NOTNULL_EXEC(op_desc, return v_workspace_data_addr); + uint8_t *mem_base = model_param.mem_base; + uint64_t mem_size = model_param.mem_size; + + Status status = ConvertVirtualAddressToPhysical(mem_base, mem_size, mem_base); + if (status != SUCCESS) { + GELOGE(RT_FAILED, "Convert virtual address to physical for mem_base failed."); + return v_workspace_data_addr; + } + + const vector v_workspace_num = op_desc->GetWorkspace(); + const vector v_workspace_bytes = op_desc->GetWorkspaceBytes(); + if (v_workspace_num.size() != v_workspace_bytes.size()) { + GELOGW("v_workspace_num.size()[%zu] != v_workspace_bytes.size()[%zu]", v_workspace_num.size(), + v_workspace_bytes.size()); + return v_workspace_data_addr; + } + + for (size_t i = 0; i < v_workspace_bytes.size(); ++i) { + int64_t workspace_num = v_workspace_num[i]; + int64_t workspace_bytes = v_workspace_bytes[i]; + v_workspace_data_addr.push_back(workspace_bytes == 0 ? nullptr : mem_base + workspace_num); + } + + return v_workspace_data_addr; +} + +Status ModelUtils::ConvertVirtualAddressToPhysical(uint8_t *virtual_address, uint64_t size, + uint8_t *&physical_address) { + // Indicates whether use physical address. + const char *use_physical_address = std::getenv("GE_USE_PHYSICAL_ADDRESS"); + if (use_physical_address == nullptr || virtual_address == 0 || size == 0) { + return SUCCESS; + } + + rtError_t ret = rtKernelConfigTransArg(virtual_address, size, 0, reinterpret_cast(&physical_address)); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtKernelConfigTransArg failed, ret: 0x%X", ret); + return RT_FAILED; + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/model_utils.h b/src/ge/graph/load/new_model_manager/model_utils.h new file mode 100644 index 00000000..7d0c49c3 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/model_utils.h @@ -0,0 +1,135 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_MODEL_UTILS_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_MODEL_UTILS_H_ + +#include + +#include "cce/dnn.h" +#include "cce/taskdown_api.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/load/new_model_manager/task_info/task_info.h" +#include "graph/op_desc.h" +#include "graph/utils/tensor_adapter.h" + +using std::vector; + +namespace ge { +class ModelUtils { + public: + ModelUtils() = default; + ~ModelUtils() = default; + + /// + /// @ingroup domi_ome + /// @brief Check is Output Op. + /// @return bool + /// + static bool IsOutput(ConstOpDescPtr op_desc); + + /// + /// @ingroup domi_ome + /// @brief Check is the Input need trans code. + /// @return bool + /// + static bool IsInputTensorNeedTrans(ConstOpDescPtr op_desc, size_t tensor_index); + + /// + /// @ingroup domi_ome + /// @brief Get input size. + /// @return vector + /// + static vector GetInputSize(ConstOpDescPtr op_desc); + + /// + /// @ingroup domi_ome + /// @brief Get output size. + /// @return vector + /// + static vector GetOutputSize(ConstOpDescPtr op_desc); + + /// + /// @ingroup domi_ome + /// @brief Get workspace size. + /// @return vector + /// + static vector GetWorkspaceSize(ConstOpDescPtr op_desc); + + /// + /// @ingroup domi_ome + /// @brief Get weight size. + /// @return vector + /// + static vector GetWeightSize(ConstOpDescPtr op_desc); + + /// + /// @ingroup domi_ome + /// @brief Get weights. + /// @return vector + /// + static vector GetWeights(ConstOpDescPtr op_desc); + + /// + /// @ingroup domi_ome + /// @brief Save Output tensor info to vector. + /// @return Status + /// + static Status GetOutputSize(ConstOpDescPtr op_desc, vector &output_size_list, + vector &output_memory_size_list); + + /// + /// @ingroup domi_ome + /// @brief Get AiCpuOp Input descriptor. + /// @return vector<::tagCcAICPUTensor> + /// + static vector<::tagCcAICPUTensor> GetInputDescs(ConstOpDescPtr op_desc); + /// + /// @ingroup domi_ome + /// @brief Get AiCpuOp Output descriptor. + /// @return vector<::tagCcAICPUTensor> + /// + static vector<::tagCcAICPUTensor> GetOutputDescs(ConstOpDescPtr op_desc); + + /// + /// @ingroup domi_ome + /// @brief Get input data address. + /// @return vector + /// + static vector GetInputDataAddrs(const RuntimeParam &model_param, ConstOpDescPtr op_desc, + bool need_convert = true); + /// + /// @ingroup domi_ome + /// @brief Get output data address. + /// @return vector + /// + static vector GetOutputDataAddrs(const RuntimeParam &model_param, ConstOpDescPtr op_desc, + bool need_convert = true); + + /// + /// @ingroup domi_ome + /// @brief Get workspace data address. + /// @return vector + /// + static vector GetWorkspaceDataAddrs(const RuntimeParam &model_param, ConstOpDescPtr op_desc); + + static ge::Status ConvertVirtualAddressToPhysical(uint8_t *virtual_address, uint64_t size, + uint8_t *&physical_address); +}; +} // namespace ge + +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_MODEL_UTILS_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/end_graph_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/end_graph_task_info.cc new file mode 100644 index 00000000..f14f593e --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/end_graph_task_info.cc @@ -0,0 +1,54 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/end_graph_task_info.h" + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" + +namespace ge { +Status EndGraphTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("InitEndGraphTaskInfo start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + GELOGE(ret, "SetStream fail, stream_id:%u", task_def.stream_id()); + return FAILED; + } + + model_ = davinci_model->GetRtModelHandle(); + + return SUCCESS; +} + +Status EndGraphTaskInfo::Distribute() { + GELOGI("EndGraphTaskInfo Distribute Start."); + + rtError_t rt_ret = rtEndGraph(model_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtEndGraph failed, ret: 0x%x", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_MODEL_END_GRAPH, EndGraphTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/end_graph_task_info.h b/src/ge/graph/load/new_model_manager/task_info/end_graph_task_info.h new file mode 100644 index 00000000..17f3b002 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/end_graph_task_info.h @@ -0,0 +1,36 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_END_GRAPH_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_END_GRAPH_TASK_INFO_H_ +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class EndGraphTaskInfo : public TaskInfo { + public: + EndGraphTaskInfo() : model_(0) {} + + ~EndGraphTaskInfo() override { model_ = nullptr; } + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + private: + rtModel_t model_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_END_GRAPH_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/event_record_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/event_record_task_info.cc new file mode 100644 index 00000000..edfd8d17 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/event_record_task_info.cc @@ -0,0 +1,59 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/event_record_task_info.h" + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" + +namespace ge { +Status EventRecordTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("EventRecordTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + if (task_def.event_id() >= davinci_model->EventNum()) { + GELOGE(INTERNAL_ERROR, "event_list_ size = %u cur =%u!", davinci_model->EventNum(), task_def.event_id()); + return INTERNAL_ERROR; + } + + if (!davinci_model->GetEventList().empty()) { + event_ = davinci_model->GetEventList()[task_def.event_id()]; + } + + return SUCCESS; +} + +Status EventRecordTaskInfo::Distribute() { + GELOGI("EventRecordTaskInfo Distribute Start."); + rtError_t rt_ret = rtEventRecord(event_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_EVENT_RECORD, EventRecordTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/event_record_task_info.h b/src/ge/graph/load/new_model_manager/task_info/event_record_task_info.h new file mode 100644 index 00000000..04ee1779 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/event_record_task_info.h @@ -0,0 +1,36 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_EVENT_RECORD_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_EVENT_RECORD_TASK_INFO_H_ +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class EventRecordTaskInfo : public TaskInfo { + public: + EventRecordTaskInfo() : event_(nullptr) {} + + ~EventRecordTaskInfo() override { event_ = nullptr; } + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + private: + rtEvent_t event_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_EVENT_RECORD_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/event_wait_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/event_wait_task_info.cc new file mode 100644 index 00000000..a8db158d --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/event_wait_task_info.cc @@ -0,0 +1,67 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/event_wait_task_info.h" + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" + +namespace ge { +Status EventWaitTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("EventWaitTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + if (task_def.event_id() >= davinci_model->EventNum()) { + GELOGE(INTERNAL_ERROR, "event_list_ size = %u cur =%u!", davinci_model->EventNum(), task_def.event_id()); + return INTERNAL_ERROR; + } + + if (!davinci_model->GetEventList().empty()) { + event_ = davinci_model->GetEventList()[task_def.event_id()]; + } + + event_type_ = task_def.event_ex().event_type(); + + return SUCCESS; +} + +Status EventWaitTaskInfo::Distribute() { + GELOGI("EventWaitTaskInfo Distribute Start."); + rtError_t rt_ret = rtStreamWaitEvent(stream_, event_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtEventReset(event_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_EVENT_WAIT, EventWaitTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/event_wait_task_info.h b/src/ge/graph/load/new_model_manager/task_info/event_wait_task_info.h new file mode 100644 index 00000000..f9da30b8 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/event_wait_task_info.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_EVENT_WAIT_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_EVENT_WAIT_TASK_INFO_H_ +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class EventWaitTaskInfo : public TaskInfo { + public: + EventWaitTaskInfo() : event_(nullptr), event_type_(0) {} + + ~EventWaitTaskInfo() override { event_ = nullptr; } + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + private: + rtEvent_t event_; + uint32_t event_type_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_EVENT_WAIT_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/fusion_start_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/fusion_start_task_info.cc new file mode 100644 index 00000000..3463b41b --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/fusion_start_task_info.cc @@ -0,0 +1,50 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/fusion_start_task_info.h" + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" + +namespace ge { +Status FusionStartTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("FusionStartTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + return SUCCESS; +} + +Status FusionStartTaskInfo::Distribute() { + GELOGI("FusionStartTaskInfo Distribute Start."); + rtError_t rt_ret = rtKernelFusionStart(stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_FUSION_START, FusionStartTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/fusion_start_task_info.h b/src/ge/graph/load/new_model_manager/task_info/fusion_start_task_info.h new file mode 100644 index 00000000..7f575639 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/fusion_start_task_info.h @@ -0,0 +1,33 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_FUSION_START_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_FUSION_START_TASK_INFO_H_ +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class FusionStartTaskInfo : public TaskInfo { + public: + FusionStartTaskInfo() {} + + ~FusionStartTaskInfo() override {} + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_FUSION_START_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/fusion_stop_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/fusion_stop_task_info.cc new file mode 100644 index 00000000..27d7f345 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/fusion_stop_task_info.cc @@ -0,0 +1,50 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/fusion_stop_task_info.h" + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" + +namespace ge { +Status FusionStopTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("FusionStopTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + return SUCCESS; +} + +Status FusionStopTaskInfo::Distribute() { + GELOGI("FusionStopTaskInfo Distribute Start."); + rtError_t rt_ret = rtKernelFusionEnd(stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_FUSION_END, FusionStopTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/fusion_stop_task_info.h b/src/ge/graph/load/new_model_manager/task_info/fusion_stop_task_info.h new file mode 100644 index 00000000..66248e9f --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/fusion_stop_task_info.h @@ -0,0 +1,33 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_FUSION_STOP_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_FUSION_STOP_TASK_INFO_H_ +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class FusionStopTaskInfo : public TaskInfo { + public: + FusionStopTaskInfo() {} + + ~FusionStopTaskInfo() override {} + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_FUSION_STOP_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/hccl_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/hccl_task_info.cc new file mode 100755 index 00000000..80c31b09 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/hccl_task_info.cc @@ -0,0 +1,254 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/hccl_task_info.h" + +#include + +#include "common/opskernel/ops_kernel_info_store.h" +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/load/new_model_manager/model_utils.h" + +namespace ge { +HcclTaskInfo::~HcclTaskInfo() { + if (private_def_ != nullptr) { + rtError_t ret = rtFreeHost(private_def_); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtFree Fail, ret = 0x%X.", ret); + } + + private_def_ = nullptr; + } + input_data_addr_ = nullptr; + davinci_model_ = nullptr; + ops_kernel_store_ = nullptr; + output_data_addr_ = nullptr; + workspace_addr_ = nullptr; +} + +Status HcclTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("HcclTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + davinci_model_ = davinci_model; + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + GetPrivateDefByTaskDef(task_def); + + auto hccl_def = task_def.kernel_hccl(); + hcclDataType_t data_type; + int32_t count; + uint32_t op_index = hccl_def.op_index(); + GELOGI("HcclTaskInfo Init, op_index is: %u", op_index); + std::string hccl_type = hccl_def.hccl_type(); + + // Get HCCL op + auto op_desc = davinci_model->GetOpList()[op_index]; + GE_CHECK_NOTNULL(op_desc); + + Status dmrt = HcomOmeUtil::GetHcomDataType(op_desc, data_type); + if (dmrt != SUCCESS) { + GELOGE(FAILED, "davinci_model: GetHcomDataType fail! domi error: %u", dmrt); + return FAILED; + } + + dmrt = HcomOmeUtil::GetHcomCount(op_desc, data_type, (hccl_type == HCOMALLGATHER), count); + if (dmrt != SUCCESS) { + GELOGE(FAILED, "davinci_model: GetHcomCount fail! domi error: %u", dmrt); + return FAILED; + } + + ret = SetAddrs(hccl_type, op_desc); + if (ret != SUCCESS) { + GELOGE(ret, "Setaddrs Fail."); + return ret; + } + + count_ = count; + hccl_type_ = hccl_type; + data_type_ = data_type; + + // GE's new process: hccl declares the need for Workspace size, and GE allocates Workspace + auto workspace_bytes = op_desc->GetWorkspaceBytes(); + if (!workspace_bytes.empty()) { + uint64_t workspace_mem_size_tmp = workspace_bytes[0]; + GELOGI("hccl need work_space_mem_size=%lu", workspace_mem_size_tmp); + if (workspace_mem_size_tmp != 0) { + workspace_mem_size_ = workspace_mem_size_tmp; + vector workspace_data_addrs = + ModelUtils::GetWorkspaceDataAddrs(davinci_model->GetRuntimeParam(), op_desc); + if (!workspace_data_addrs.empty()) { + GELOGI("Get work_space_addr"); + workspace_addr_ = workspace_data_addrs[0]; + } + } + } + // GE's new process: hccl declares the number of streams required, creates a stream by GE, and sends it to hccl + int64_t hccl_stream_num = 0; + if (!ge::AttrUtils::GetInt(op_desc, "used_stream_num", hccl_stream_num)) { + GELOGW("op_desc has no attr used_stream_num!"); + } + + GELOGI("hcclStreamNum =%ld", hccl_stream_num); + + for (int64_t i = 0; i < hccl_stream_num; ++i) { + rtStream_t stream = nullptr; + rtError_t rt_ret = + rtStreamCreateWithFlags(&stream, davinci_model->Priority(), RT_STREAM_PERSISTENT | RT_STREAM_FORCE_COPY); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + // Create slave stream, inactive by default, activated by hccl + rt_ret = rtModelBindStream(davinci_model->GetRtModelHandle(), stream, RT_MODEL_WAIT_ACTIVE_STREAM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + hccl_stream_list_.push_back(stream); + davinci_model->PushHcclStream(stream); + } + + return SUCCESS; +} + +Status HcclTaskInfo::Distribute() { + GELOGI("HcclTaskInfo Distribute Start. begin to call function LoadTask in hccl."); + if (ops_kernel_store_ == nullptr) { + GELOGE(INTERNAL_ERROR, "ops kernel store is null."); + return INTERNAL_ERROR; + } + + OpsKernelInfoStore *ops_kernel_info_store = reinterpret_cast(ops_kernel_store_); + GE_CHECK_NOTNULL(ops_kernel_info_store); + GETaskInfo ge_task; + TransToGETaskInfo(ge_task); + auto result = ops_kernel_info_store->LoadTask(ge_task); + if (result != HCCL_SUCCESS) { + GELOGE(INTERNAL_ERROR, "davinci_model : load task fail, return ret: %u", result); + return INTERNAL_ERROR; + } + + GELOGI("Call function LoadTask end."); + return SUCCESS; +} + +Status HcclTaskInfo::SetAddrs(const std::string &hccl_type, const std::shared_ptr &op_desc) { + domi::Status dmrt; + hcclRedOp_t op_type; + GE_CHECK_NOTNULL(davinci_model_); + auto input_data_addr_list = ModelUtils::GetInputDataAddrs(davinci_model_->GetRuntimeParam(), op_desc); + if (!input_data_addr_list.empty()) { + input_data_addr_ = input_data_addr_list[0]; + } + + void *output_data_addr = nullptr; + auto output_data_addr_list = ModelUtils::GetOutputDataAddrs(davinci_model_->GetRuntimeParam(), op_desc); + if (!output_data_addr_list.empty()) { + output_data_addr = output_data_addr_list[0]; + } + + if (hccl_type == HCOMBROADCAST) { + int64_t root_id; + dmrt = HcomOmeUtil::GetHcomRootId(op_desc, root_id); + if (dmrt != SUCCESS) { + GELOGE(FAILED, "davinci_model: GetHcomRootId fail! domi error: %u", dmrt); + return FAILED; + } + root_id_ = root_id; + } else if (hccl_type == HCOMALLGATHER || hccl_type == HCOMRECEIVE) { + output_data_addr_ = output_data_addr; + } else if (hccl_type == HCOMALLREDUCE) { + dmrt = HcomOmeUtil::GetHcomOperationType(op_desc, op_type); + if (dmrt != SUCCESS) { + GELOGE(FAILED, "davinci_model: GetHcomOperationType fail! domi error: %u", dmrt); + return FAILED; + } + + output_data_addr_ = output_data_addr; + op_type_ = op_type; + } else if (hccl_type == HCOMREDUCESCATTER) { + dmrt = HcomOmeUtil::GetHcomOperationType(op_desc, op_type); + if (dmrt != SUCCESS) { + GELOGE(FAILED, "davinci_model: GetHcomOperationType fail! domi error: %u", dmrt); + return FAILED; + } + + output_data_addr_ = output_data_addr; + op_type_ = op_type; + } + + return SUCCESS; +} + +void HcclTaskInfo::TransToGETaskInfo(GETaskInfo &ge_task) { + ge_task.id = id_; + ge_task.type = static_cast(RT_MODEL_TASK_HCCL); + ge_task.stream = stream_; + + ge_task.kernelHcclInfo.hccl_type = hccl_type_; + ge_task.kernelHcclInfo.inputDataAddr = input_data_addr_; + ge_task.kernelHcclInfo.outputDataAddr = output_data_addr_; + ge_task.kernelHcclInfo.workSpaceAddr = workspace_addr_; + ge_task.kernelHcclInfo.count = count_; + ge_task.kernelHcclInfo.dataType = data_type_; + ge_task.kernelHcclInfo.opType = op_type_; + ge_task.kernelHcclInfo.rootId = root_id_; + ge_task.kernelHcclInfo.workSpaceMemSize = workspace_mem_size_; + ge_task.kernelHcclInfo.hcclStreamList = hccl_stream_list_; + + ge_task.privateDef = private_def_; + ge_task.privateDefLen = private_def_len_; + ge_task.opsKernelStorePtr = ops_kernel_store_; +} + +void HcclTaskInfo::GetPrivateDefByTaskDef(const domi::TaskDef &task) { + // Get privateDef and opsKernelStorePtr from taskDef and save them in taskInfo + GELOGI("get custom info in modelTaskDef."); + ops_kernel_store_ = nullptr; + void *ops_kernel_store_name_temp = reinterpret_cast(task.ops_kernel_store_ptr()); + if (ops_kernel_store_name_temp != nullptr) { + ops_kernel_store_ = std::move(ops_kernel_store_name_temp); + std::string private_def_temp = task.private_def(); + if (!private_def_temp.empty()) { + private_def_len_ = private_def_temp.size(); + rtError_t ret = rtMallocHost(&private_def_, private_def_len_); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtMallocHost Fail, ret = 0x%X.", ret); + return; + } + + ret = rtMemcpy(private_def_, private_def_len_, task.private_def().c_str(), private_def_len_, + RT_MEMCPY_HOST_TO_HOST); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rtMemcpy Fail, ret = 0x%X.", ret); + return; + } + } + } +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_HCCL, HcclTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/hccl_task_info.h b/src/ge/graph/load/new_model_manager/task_info/hccl_task_info.h new file mode 100644 index 00000000..1a2c508f --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/hccl_task_info.h @@ -0,0 +1,79 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_HCCL_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_HCCL_TASK_INFO_H_ + +#include +#include +#include + +#include "common/opskernel/ge_task_info.h" +#include "graph/load/new_model_manager/task_info/task_info.h" +#include "graph/manager/util/hcom_util.h" +namespace ge { +class HcclTaskInfo : public TaskInfo { + public: + HcclTaskInfo() + : davinci_model_(nullptr), + hccl_type_(""), + input_data_addr_(nullptr), + output_data_addr_(nullptr), + count_(0), + data_type_(HCCL_DATA_TYPE_INT8), + op_type_(HCCL_REP_OP_SUM), + root_id_(0), + id_(0), + workspace_addr_(nullptr), + workspace_mem_size_(0), + hccl_stream_list_(), + ops_kernel_store_(nullptr), + private_def_(nullptr), + private_def_len_(0) {} + + ~HcclTaskInfo() override; + + ge::Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + ge::Status Distribute() override; + + uint32_t GetTaskID() override { return id_; } + + private: + ge::Status SetAddrs(const std::string &hccl_type, const std::shared_ptr &op); + + void TransToGETaskInfo(GETaskInfo &ge_task); + + void GetPrivateDefByTaskDef(const domi::TaskDef &task); + + DavinciModel *davinci_model_; + string hccl_type_; + void *input_data_addr_; + void *output_data_addr_; + int32_t count_; + hcclDataType_t data_type_; + hcclRedOp_t op_type_; + int64_t root_id_; + uint32_t id_; + void *workspace_addr_; + uint64_t workspace_mem_size_; + vector hccl_stream_list_; + void *ops_kernel_store_; + void *private_def_; + uint32_t private_def_len_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_HCCL_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/kernel_ex_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/kernel_ex_task_info.cc new file mode 100644 index 00000000..765ae1b3 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/kernel_ex_task_info.cc @@ -0,0 +1,190 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/kernel_ex_task_info.h" + +#include + +#include "cce/aicpu_engine_struct.h" +#include "cce/fwk_adpt_struct.h" +#include "common/ge/ge_util.h" +#include "common/properties_manager.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/fmk_error_codes.h" +#include "graph/attr_value.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/load/new_model_manager/model_manager.h" + +namespace ge { +static const char *const GE_GLOBAL_STEP = "Variable"; + +Status KernelExTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("KernelExTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + auto kernel_ex_def = task_def.kernel_ex(); + + // 1. Copy context from kernelExDef.private to workspace + uint32_t op_index = kernel_ex_def.op_index(); + OpDescPtr op_desc = davinci_model->GetOpByIndex(op_index); + if (op_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "Init aicpu task info error, index is out of range!"); + return INTERNAL_ERROR; + } + + vector workspace_data_addrs = ModelUtils::GetWorkspaceDataAddrs(davinci_model->GetRuntimeParam(), op_desc); + if (workspace_data_addrs.empty()) { + GELOGE(FAILED, "workspace_data_addrs is empty."); + return FAILED; + } else { + rtError_t rt_ret = + rtMemcpy(workspace_data_addrs[0], kernel_ex_def.task_info_size(), kernel_ex_def.task_info().data(), + kernel_ex_def.task_info_size(), RT_MEMCPY_HOST_TO_DEVICE); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, GELOGE(FAILED, "rtMemcpy error: 0x%X", rt_ret); return FAILED); + } + + // 2. Reconstruct kernelExDef.args to STR_FWK_OP_KERNEL + STR_FWK_OP_KERNEL fwk_op_kernel; + if (sizeof(STR_FWK_OP_KERNEL) != kernel_ex_def.args_size()) { + GELOGE(FAILED, "sizeof STR_FWK_OP_KERNEL is: %zu, but args_size is: %u", sizeof(STR_FWK_OP_KERNEL), + kernel_ex_def.args_size()); + return FAILED; + } + errno_t sec_ret = + memcpy_s(&fwk_op_kernel, sizeof(STR_FWK_OP_KERNEL), kernel_ex_def.args().data(), kernel_ex_def.args_size()); + if (sec_ret != EOK) { + GELOGE(FAILED, "memcpy failed, ret: %d", sec_ret); + return FAILED; + } + + // 2.1 get loop cond variable for tensor array write + uint64_t loop_cond_addr = 0; + OpDescPtr loop_cond_node = davinci_model->GetVariableOp(GE_GLOBAL_STEP); + if (loop_cond_node != nullptr) { + vector v_loop_cond_addr = ModelUtils::GetOutputDataAddrs(davinci_model->GetRuntimeParam(), loop_cond_node); + if (v_loop_cond_addr.size() != 0) { + loop_cond_addr = static_cast(reinterpret_cast(v_loop_cond_addr[0])); + } + } + + // 3. Set workspaceaddr, inputOutputDataAddr + uint64_t workspace_base_addr = reinterpret_cast(workspace_data_addrs[0]); + vector input_addrs = ModelUtils::GetInputDataAddrs(davinci_model->GetRuntimeParam(), op_desc); + vector output_addrs = ModelUtils::GetOutputDataAddrs(davinci_model->GetRuntimeParam(), op_desc); + vector io_addrs; + io_addrs.insert(io_addrs.end(), input_addrs.begin(), input_addrs.end()); + io_addrs.insert(io_addrs.end(), output_addrs.begin(), output_addrs.end()); + + auto addrs_size = sizeof(uint64_t) * (io_addrs.size()); + if (addrs_size > 0) { + rtError_t rt_ret = rtMalloc(&input_output_addr_, addrs_size, RT_MEMORY_HBM); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, GELOGE(RT_FAILED, "rtMalloc error, ret: 0x%X", rt_ret); return RT_FAILED;) + + rt_ret = rtMemcpy(input_output_addr_, addrs_size, io_addrs.data(), addrs_size, RT_MEMCPY_HOST_TO_DEVICE); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, GELOGE(rt_ret, "rtMemcpy to input_output_addr_ error: 0x%X", rt_ret); + return FAILED;) + + if (PropertiesManager::Instance().IsLayerNeedDump(davinci_model->Name(), op_desc->GetName())) { + dump_flag_ = RT_KERNEL_DUMPFLAG; + dump_args_ = reinterpret_cast(reinterpret_cast(input_output_addr_) + + sizeof(void *) * input_addrs.size()); + } + } + + uint64_t input_output_addr = static_cast(reinterpret_cast(input_output_addr_)); + fwk_op_kernel.fwkKernelBase.fwk_kernel.workspaceBaseAddr = workspace_base_addr; + fwk_op_kernel.fwkKernelBase.fwk_kernel.inputOutputAddr = input_output_addr; + fwk_op_kernel.fwkKernelBase.fwk_kernel.stepIDAddr = loop_cond_addr; + + // 4. Create session + auto session_id = fwk_op_kernel.fwkKernelBase.fwk_kernel.sessionID; + GELOGI("session_id: %lu", session_id); + GE_CHECK_NOTNULL(ModelManager::GetInstance()); + GE_IF_BOOL_EXEC(ModelManager::GetInstance()->CreateAicpuSession(session_id) != SUCCESS, + GELOGE(ret, "CreateAicpuSession error."); + return ret;) + + // 5. Return result + rtError_t rt_ret = rtMalloc(&kernel_buf_, sizeof(STR_FWK_OP_KERNEL), RT_MEMORY_HBM); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, GELOGE(rt_ret, "rtMalloc error: 0x%X", rt_ret); return FAILED;) + + rt_ret = rtMemcpy(kernel_buf_, sizeof(STR_FWK_OP_KERNEL), static_cast(&fwk_op_kernel), + sizeof(STR_FWK_OP_KERNEL), RT_MEMCPY_HOST_TO_DEVICE); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, GELOGE(rt_ret, "rtMemcpy error, ret: Ox%X", rt_ret); return FAILED;) + davinci_model->SetZeroCopyAddr(io_addrs, input_output_addr_); + + kernel_buf_size_ = sizeof(STR_FWK_OP_KERNEL); + davinci_model_ = davinci_model; + return SUCCESS; +} + +Status KernelExTaskInfo::Distribute() { + GELOGI("KernelExTaskInfo Distribute Start."); + rtError_t rt_ret = rtKernelLaunchEx(kernel_buf_, kernel_buf_size_, dump_flag_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + if (davinci_model_ == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model_ is null."); + return PARAM_INVALID; + } + + uint32_t taskid = 0; + rt_ret = rtModelGetTaskId(davinci_model_->GetRtModelHandle(), &taskid); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + task_id_ = taskid; + + return SUCCESS; +} + +Status KernelExTaskInfo::Release() { + Status ret = SUCCESS; + if (kernel_buf_ != nullptr) { + rtError_t rt_ret = rtFree(kernel_buf_); + if (rt_ret != RT_ERROR_NONE) { + GELOGW("rtFree error, ret: 0x%X", rt_ret); + ret = FAILED; + } else { + kernel_buf_ = nullptr; + } + } + if (input_output_addr_ != nullptr) { + rtError_t rt_ret = rtFree(input_output_addr_); + if (rt_ret != RT_ERROR_NONE) { + GELOGW("rtFree error, ret: 0x%X", rt_ret); + ret = FAILED; + } else { + input_output_addr_ = nullptr; + } + } + return ret; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_KERNEL_EX, KernelExTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/kernel_ex_task_info.h b/src/ge/graph/load/new_model_manager/task_info/kernel_ex_task_info.h new file mode 100644 index 00000000..a1fd541f --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/kernel_ex_task_info.h @@ -0,0 +1,59 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_KERNEL_EX_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_KERNEL_EX_TASK_INFO_H_ + +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class KernelExTaskInfo : public TaskInfo { + public: + KernelExTaskInfo() + : task_id_(0), + dump_flag_(RT_KERNEL_DEFAULT), + kernel_buf_size_(0), + davinci_model_(nullptr), + kernel_buf_(nullptr), + input_output_addr_(nullptr), + dump_args_(nullptr) {} + + ~KernelExTaskInfo() override {} + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + Status Release() override; + + uint32_t GetTaskID() override { return task_id_; } + + uintptr_t GetDumpArgs() override { + auto ret = reinterpret_cast(dump_args_); + return ret; + } + + private: + uint32_t task_id_; + uint32_t dump_flag_; + uint32_t kernel_buf_size_; + DavinciModel *davinci_model_; + void *kernel_buf_; + void *input_output_addr_; + void *dump_args_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_KERNEL_EX_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/kernel_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/kernel_task_info.cc new file mode 100644 index 00000000..1115969a --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/kernel_task_info.cc @@ -0,0 +1,706 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/kernel_task_info.h" +#include +#include +#include +#include +#include "aicpu/common/aicpu_task_struct.h" +#include "common/properties_manager.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/l2_cache_optimize.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/load/new_model_manager/model_utils.h" +#include "runtime/kernel.h" + +namespace ge { +static constexpr uint8_t kL2LoadToDdr = 1; +static constexpr uint8_t kL2NotLoadToDdr = 0; + +Status KernelTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGD("KernelTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + davinci_model_ = davinci_model; + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + domi::KernelDef kernel_def = task_def.kernel(); + block_dim_ = kernel_def.block_dim(); + args_size_ = kernel_def.args_size(); + // get opcontext stored in model + const domi::KernelContext &context = kernel_def.context(); + // get kernel_type + kernel_type_ = static_cast(context.kernel_type()); + // get bin_file_key + OpDescPtr op_desc = davinci_model->GetOpByIndex(context.op_index()); + if (op_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "Get op_desc failed, index is out of range!"); + return INTERNAL_ERROR; + } + string session_graph_model_id; + davinci_model->GetUniqueId(op_desc, session_graph_model_id); + const char *bin_file_key = DavinciModel::GetRegisterStub(op_desc->GetName(), session_graph_model_id); + // new aicpu kernel(rtCpuKernelLaunch) no need to check function + if (kernel_type_ == cce::ccKernelType::CCE_AI_CORE) { + rtError_t rt_ret; + rt_ret = rtGetFunctionByName(const_cast(kernel_def.stub_func().c_str()), &stub_func_); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, + GELOGE(RT_FAILED, + "execute rtGetFunctionByName failed. stub_func: %s", + kernel_def.stub_func().c_str()); + return RT_FAILED;); + } else if (kernel_type_ != cce::ccKernelType::AI_CPU) { + rtError_t rt_ret; + rt_ret = rtGetFunctionByName(bin_file_key, &stub_func_); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, + GELOGE(RT_FAILED, "execute rtGetFunctionByName failed. bin_file_key: %s", bin_file_key); + return RT_FAILED;); + } + + if (context.origin_op_index_size() > CC_FUSION_OP_MAX) { + GELOGE(PARAM_INVALID, "context.origin_op_index_size() is more than CC_FUSION_OP_MAX(%d)", CC_FUSION_OP_MAX); + return PARAM_INVALID; + } + + for (int32_t i = 0; i < context.origin_op_index_size(); ++i) { + ctx_.opIndex2[i] = context.origin_op_index(i); + } + ctx_.opCount = context.origin_op_index_size(); + if (kernel_type_ == cce::ccKernelType::TE) { + ctx_.opIndex = context.op_index(); + uint16_t *args_offset_tmp = reinterpret_cast(const_cast(context.args_offset().data())); + if (context.args_offset().size() / sizeof(uint16_t) < 1) { + GELOGE(FAILED, "context.args_offset().size() / sizeof(uint16_t) less than 1"); + return FAILED; + } + + ret = InitTVMTask(davinci_model, args_offset_tmp[0], kernel_def); + } else if (kernel_type_ == cce::ccKernelType::CUSTOMIZED) { + ret = InitAICPUCustomTask(davinci_model->GetOpList(), context.op_index(), kernel_def); + } else if (kernel_type_ == cce::ccKernelType::AI_CPU) { + ret = InitAicpuTask(davinci_model->GetOpList(), context.op_index(), kernel_def); + } else { + if (kernel_def.args().empty() || args_size_ == 0) { + GELOGE(FAILED, "args is null."); + return FAILED; + } + ret = InitCceTask(davinci_model, kernel_def); + } + GELOGD("KernelTaskInfo Init end."); + + return ret; +} + +Status KernelTaskInfo::Distribute() { + GELOGD("KernelTaskInfo Distribute Start."); + rtError_t rt_ret; + + if (kernel_type_ == cce::ccKernelType::AI_CPU) { + // blockDim is reserved parameter, set to 1 + rt_ret = + rtCpuKernelLaunchWithFlag(reinterpret_cast(so_name_.c_str()), + reinterpret_cast(kernel_name_.c_str()), + 1, args_, args_size_, nullptr, stream_, dump_flag_); + } else { + rt_ret = rtKernelLaunchWithFlag(stub_func_, block_dim_, args_, args_size_, static_cast(sm_desc_), + stream_, dump_flag_); + } + + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + uint32_t taskid = 0; + GE_CHECK_NOTNULL(davinci_model_); + rt_ret = rtModelGetTaskId(davinci_model_->GetRtModelHandle(), &taskid); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + task_id_ = taskid; + + return SUCCESS; +} + +Status KernelTaskInfo::Release() { + FreeRtMem(&args_); + FreeRtMem(&flowtable_); + FreeRtMem(&custom_info_.input_descs); + FreeRtMem(&custom_info_.input_addrs); + FreeRtMem(&custom_info_.output_descs); + FreeRtMem(&custom_info_.output_addrs); + FreeRtMem(&custom_info_.attr_handle); + + if (ctx_.argsOffset != nullptr) { + delete[] ctx_.argsOffset; + ctx_.argsOffset = nullptr; + } + + rtError_t ret = (sm_desc_ != nullptr) ? rtMemFreeManaged(sm_desc_) : RT_ERROR_NONE; + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", static_cast(ret)); + return FAILED; + } + sm_desc_ = nullptr; + + return SUCCESS; +} + +Status KernelTaskInfo::InitTVMTask(DavinciModel *davinci_model, uint16_t offset, const domi::KernelDef &kernel_def) { + GELOGD("Do InitTVMTask"); + GE_CHECK_NOTNULL(davinci_model); + // get tvm op desc + OpDescPtr op_desc = davinci_model->GetOpByIndex(ctx_.opIndex); + if (op_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "InitTVMTaskInfo error, index is out of range!"); + return INTERNAL_ERROR; + } + + // Update Stub + // When training, when the the second call to DavinciModel::init() comes here, stub_func_ is already valid, + // and does not need to be modified. + // When inferencing, stub_func_ is different from dynamic-registration to runtime, and needs to be modified. + string session_graph_model_id; + const char *bin_file_key; + davinci_model->GetUniqueId(op_desc, session_graph_model_id); + bin_file_key = DavinciModel::GetRegisterStub(op_desc->GetName(), session_graph_model_id); + rtError_t rt_ret; + rt_ret = rtQueryFunctionRegistered(const_cast(bin_file_key)); + if (rt_ret != RT_ERROR_NONE) { + stub_func_ = const_cast(bin_file_key); + } + + const vector input_data_addrs = ModelUtils::GetInputDataAddrs(davinci_model->GetRuntimeParam(), op_desc); + const vector output_data_addrs = ModelUtils::GetOutputDataAddrs(davinci_model->GetRuntimeParam(), op_desc); + const vector workspace_data_addrs = + ModelUtils::GetWorkspaceDataAddrs(davinci_model->GetRuntimeParam(), op_desc); + vector tensor_device_addrs; + + tensor_device_addrs.insert(tensor_device_addrs.end(), input_data_addrs.begin(), input_data_addrs.end()); + tensor_device_addrs.insert(tensor_device_addrs.end(), output_data_addrs.begin(), output_data_addrs.end()); + tensor_device_addrs.insert(tensor_device_addrs.end(), workspace_data_addrs.begin(), workspace_data_addrs.end()); + + // malloc args memory + rt_ret = rtMalloc(&args_, args_size_, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + // copy orign args + rt_ret = rtMemcpy(args_, args_size_, static_cast(const_cast(kernel_def.args().data())), args_size_, + RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + if (args_size_ <= static_cast(offset) || + args_size_ - static_cast(offset) < static_cast(sizeof(void *) * tensor_device_addrs.size())) { + GELOGE(FAILED, "offset >= kernelInfo.argsSize or copy content beyond applied memory."); + return FAILED; + } + + // copy args + rt_ret = rtMemcpy(static_cast(args_) + offset, sizeof(void *) * tensor_device_addrs.size(), + tensor_device_addrs.data(), sizeof(void *) * tensor_device_addrs.size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + if (PropertiesManager::Instance().IsLayerNeedDump(davinci_model->Name(), op_desc->GetName())) { + dump_flag_ = RT_KERNEL_DUMPFLAG; + dump_args_ = reinterpret_cast(reinterpret_cast(args_) + offset + + sizeof(void *) * input_data_addrs.size()); + } + + davinci_model_->SetZeroCopyAddr(tensor_device_addrs, static_cast(args_) + offset); + // update origin l2 data + string sm_desc = kernel_def.sm_desc(); + char *sm_contrl = nullptr; + rtL2Ctrl_t *l2_ctrl_info = nullptr; + if (!sm_desc.empty()) { + sm_contrl = const_cast(sm_desc.data()); + l2_ctrl_info = reinterpret_cast(sm_contrl); + + uint64_t gen_base_addr = davinci_model->GetRtBaseAddr(); + + // There is no weight for te op now. Update L2_mirror_addr by data memory base. + uint64_t data_base_addr = (uint64_t)(uintptr_t)davinci_model->MemBase() - (uint64_t)gen_base_addr; + const uint32_t l2_ctrl_info_data_count = 8; + for (uint32_t data_index = 0; data_index < l2_ctrl_info_data_count; ++data_index) { + if (l2_ctrl_info->data[data_index].L2_mirror_addr != 0) { + l2_ctrl_info->data[data_index].L2_mirror_addr += data_base_addr; + } + } + + rt_ret = rtMemAllocManaged(&sm_desc_, sm_desc.size(), RT_MEMORY_SPM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtMemcpy(sm_desc_, sm_desc.size(), sm_desc.data(), sm_desc.size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + } + GELOGD("Do InitTVMTask end"); + return SUCCESS; +} + +Status KernelTaskInfo::InitAICPUCustomTask(const std::map> &op_list, + uint32_t op_index, const domi::KernelDef &kernel_def) { + GELOGI("Do InitAICPUCustomTask"); + + auto iter = op_list.find(op_index); + if (iter == op_list.end()) { + GELOGE(INTERNAL_ERROR, "index is out of range, index: %u", op_index); + return INTERNAL_ERROR; + } + + auto op_desc = iter->second; + + const domi::KernelContext &context = kernel_def.context(); + const uint32_t kCustomAicpuArgsLen = 5; + ctx_.argsOffset = new (std::nothrow) uint16_t[kCustomAicpuArgsLen](); + if (ctx_.argsOffset == nullptr) { + GELOGE(PARAM_INVALID, "ctx_.argsOffset is null!"); + return PARAM_INVALID; + } + + if (context.args_offset().size() / sizeof(uint16_t) < kCustomAicpuArgsLen) { + GELOGE(PARAM_INVALID, "context.args_offset().size() / sizeof(uint16_t) is less than kCustomAicpuArgsLen"); + return PARAM_INVALID; + } + + for (uint32_t i = 0; i < kCustomAicpuArgsLen; ++i) { + ctx_.argsOffset[i] = (reinterpret_cast(const_cast(context.args_offset().data())))[i]; + } + + const std::vector input_data_addrs = + ModelUtils::GetInputDataAddrs(davinci_model_->GetRuntimeParam(), op_desc); + const std::vector output_data_addrs = + ModelUtils::GetOutputDataAddrs(davinci_model_->GetRuntimeParam(), op_desc); + + Status ret = StoreInputOutputTensor(input_data_addrs, output_data_addrs, ModelUtils::GetInputDescs(op_desc), + ModelUtils::GetOutputDescs(op_desc)); + if (ret != SUCCESS) { + GELOGE(ret, "StoreInputOutputTensor Failed"); + return ret; + } + + // attrHandle + Buffer buffer; + if (!AttrUtils::GetBytes(op_desc, ATTR_NAME_OPATTR, buffer)) { + GELOGE(FAILED, "can't find opattr bytes!."); + return FAILED; + } + + uint32_t op_attr_size = buffer.GetSize(); + if (op_attr_size == 0) { + GELOGE(PARAM_INVALID, "param op_attr_size is out of range"); + return PARAM_INVALID; + } + + rtError_t rt_ret = rtMalloc(&custom_info_.attr_handle, op_attr_size, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtMemcpy(custom_info_.attr_handle, op_attr_size, buffer.GetData(), op_attr_size, RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + // args + char *args = const_cast(kernel_def.args().data()); + + for (uint32_t i = 0; i < kCustomAicpuArgsLen; ++i) { + if (kernel_def.args().size() < ((size_t)ctx_.argsOffset[i] + sizeof(uint64_t))) { + GELOGE(FAILED, "ctx.argsOffset[%u]: %u + sizeof(uint64_t): %zu >= kernelDef.args().size():%zu", i, + (uint32_t)ctx_.argsOffset[i], sizeof(uint64_t), kernel_def.args().size()); + return FAILED; + } + } + *(reinterpret_cast(args + ctx_.argsOffset[0])) = + reinterpret_cast(custom_info_.input_descs); // arg 0 + *(reinterpret_cast(args + ctx_.argsOffset[1])) = + reinterpret_cast(custom_info_.input_addrs); // arg 1 + *(reinterpret_cast(args + ctx_.argsOffset[2])) = + reinterpret_cast(custom_info_.output_descs); // arg 2 + *(reinterpret_cast(args + ctx_.argsOffset[3])) = + reinterpret_cast(custom_info_.output_addrs); // arg 3 + *(reinterpret_cast(args + ctx_.argsOffset[4])) = + reinterpret_cast(custom_info_.attr_handle); // arg 4 + + rt_ret = rtMalloc(&args_, args_size_, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtMemcpy(args_, kernel_def.args_size(), kernel_def.args().data(), kernel_def.args_size(), + RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + davinci_model_->SetZeroCopyAddr(input_data_addrs, custom_info_.input_addrs); + davinci_model_->SetZeroCopyAddr(output_data_addrs, custom_info_.output_addrs); + return SUCCESS; +} + +Status KernelTaskInfo::InitCceTask(DavinciModel *davinci_model, const domi::KernelDef &kernel_def) { + GELOGI("Do InitCCETask"); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + Status ret = SetContext(kernel_def); + if (ret != SUCCESS) { + GELOGE(ret, "SetContext Fail."); + return ret; + } + + string flowtable = kernel_def.flowtable(); + const domi::KernelContext &context = kernel_def.context(); + + if (context.is_flowtable()) { + if (flowtable.empty()) { + GELOGE(FAILED, "flowtable is null."); + return FAILED; + } + flowtable_size_ = flowtable.size(); + } + + // get smDesc stored in model + string sm_desc = kernel_def.sm_desc(); + uint64_t sm_contrl_size = sm_desc.empty() ? 0 : sizeof(rtSmDesc_t); + + // Passing the memory info when the offline-model-generated to the CCE, which uses this info for address refresh + ctx_.genDataBaseAddr = davinci_model->GetRtBaseAddr(); + ctx_.genDataBaseSize = davinci_model->TotalMemSize(); + ctx_.genWeightBaseAddr = davinci_model->GetRtWeightAddr(); + ctx_.genWeightBaseSize = davinci_model->TotalWeightsMemSize(); + ctx_.genVariableBaseAddr = davinci_model->GetRtVarAddr(); + ctx_.genVariableBaseSize = davinci_model->TotalVarMemSize(); + ctx_.l2ctrlSize = sm_contrl_size; + + if (UpdateCceArgs(sm_desc, flowtable, davinci_model, kernel_def) != SUCCESS) { + GELOGE(ret, "update cce args fail"); + return ret; + } + + // flowtable + ret = SetFlowtable(flowtable, kernel_def); + if (ret != SUCCESS) { + GELOGE(ret, "SetFlowtable Fail"); + return ret; + } + + // args + rtError_t rt_ret = rtMalloc(&args_, kernel_def.args_size(), RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtMemcpy(args_, kernel_def.args_size(), kernel_def.args().data(), kernel_def.args_size(), + RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + // L2 + if (!sm_desc.empty()) { + rt_ret = rtMemAllocManaged(&sm_desc_, sm_desc.size(), RT_MEMORY_SPM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtMemcpy(sm_desc_, sm_desc.size(), sm_desc.data(), sm_desc.size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + } + return SUCCESS; +} + +Status KernelTaskInfo::InitAicpuTask(const std::map &op_list, uint32_t op_index, + const domi::KernelDef &kernel_def) { + GELOGI("Do InitAicpuTask"); + so_name_ = kernel_def.so_name(); + kernel_name_ = kernel_def.kernel_name(); + + auto iter = op_list.find(op_index); + if (iter == op_list.end()) { + GELOGE(INTERNAL_ERROR, "index is out of range, index: %u", op_index); + return INTERNAL_ERROR; + } + + // copy args to new host memory + std::unique_ptr args_addr(new (std::nothrow) uint8_t[args_size_]); + errno_t sec_ret = memcpy_s(static_cast(args_addr.get()), args_size_, + static_cast(kernel_def.args().data()), args_size_); + if (sec_ret != EOK) { + GELOGE(FAILED, "memcpy failed, ret: %d", sec_ret); + return FAILED; + } + + OpDescPtr op_desc = iter->second; + vector input_addrs = ModelUtils::GetInputDataAddrs(davinci_model_->GetRuntimeParam(), op_desc); + vector output_addrs = ModelUtils::GetOutputDataAddrs(davinci_model_->GetRuntimeParam(), op_desc); + vector io_addrs; + io_addrs.insert(io_addrs.end(), input_addrs.begin(), input_addrs.end()); + io_addrs.insert(io_addrs.end(), output_addrs.begin(), output_addrs.end()); + if (!io_addrs.empty()) { + // refresh io addrs + uintptr_t io_addr = + reinterpret_cast(args_addr.get()) + static_cast(sizeof(aicpu::AicpuParamHead)); + auto addrs_size = sizeof(uint64_t) * (io_addrs.size()); + sec_ret = memcpy_s(reinterpret_cast(io_addr), addrs_size, static_cast(io_addrs.data()), addrs_size); + if (sec_ret != EOK) { + GELOGE(FAILED, "memcpy failed, ret: %d", sec_ret); + return FAILED; + } + } + + // malloc device memory for args + rtError_t rt_ret = rtMalloc(static_cast(&args_), args_size_, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api(rtMalloc) failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + // copy args to device + rt_ret = rtMemcpy(args_, args_size_, static_cast(args_addr.get()), args_size_, RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api(rtMemcpy) failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + if (PropertiesManager::Instance().IsLayerNeedDump(davinci_model_->Name(), op_desc->GetName())) { + dump_flag_ = RT_KERNEL_DUMPFLAG; + dump_args_ = reinterpret_cast(reinterpret_cast(args_) + sizeof(aicpu::AicpuParamHead) + + sizeof(void *) * input_addrs.size()); + } + + davinci_model_->SetZeroCopyAddr(io_addrs, static_cast(args_) + sizeof(aicpu::AicpuParamHead)); + return SUCCESS; +} + +Status KernelTaskInfo::StoreInputOutputTensor(const std::vector &input_data_addrs, + const std::vector &output_data_addrs, + const std::vector<::tagCcAICPUTensor> &input_descs, + const std::vector<::tagCcAICPUTensor> &output_descs) { + auto input_size = input_descs.size(); + auto output_size = output_descs.size(); + + // inputDescs + rtError_t rt_ret = rtMalloc(&custom_info_.input_descs, sizeof(opTensor_t) * input_size, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + for (std::size_t i = 0; i < input_size; ++i) { + rt_ret = rtMemcpy(static_cast(custom_info_.input_descs) + i, sizeof(opTensor_t), + const_cast(&input_descs[i]), sizeof(opTensor_t), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + } + + // inputAddrs + rt_ret = rtMalloc(&custom_info_.input_addrs, sizeof(opTensor_t) * input_size, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + if (!input_data_addrs.empty()) { + rt_ret = rtMemcpy(custom_info_.input_addrs, sizeof(void *) * input_size, &input_data_addrs[0], + sizeof(void *) * input_size, RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + } + + // outputDescs + rt_ret = rtMalloc(&custom_info_.output_descs, sizeof(opTensor_t) * output_size, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + for (std::size_t i = 0; i < output_size; ++i) { + rt_ret = rtMemcpy(static_cast(custom_info_.output_descs) + i, sizeof(opTensor_t), + const_cast(&input_descs[i]), sizeof(opTensor_t), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + } + + // outputAddrs + rt_ret = rtMalloc(&custom_info_.output_addrs, sizeof(opTensor_t) * output_size, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + if (!output_data_addrs.empty()) { + rt_ret = rtMemcpy(custom_info_.output_addrs, sizeof(void *) * output_size, &output_data_addrs[0], + sizeof(void *) * output_size, RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + } + + return SUCCESS; +} + +Status KernelTaskInfo::SetContext(const domi::KernelDef &kernel_def) { + const domi::KernelContext &context = kernel_def.context(); + ctx_.kernelType = static_cast(context.kernel_type()); + ctx_.opId = context.op_id(); + ctx_.kernelFuncId = context.kernel_func_id(); + ctx_.isFlowtable = context.is_flowtable(); + ctx_.argsCount = context.args_count(); + if (ctx_.argsCount == 0) { + GELOGE(INTERNAL_ERROR, "check argsCount fail:%u.", ctx_.argsCount); + return INTERNAL_ERROR; + } + + if (context.args_offset().size() / sizeof(uint16_t) < ctx_.argsCount) { + GELOGE(PARAM_INVALID, "param [context.args_offset().size() / sizeof(uint16_t)] is less than [ctx_.argsCount]"); + return PARAM_INVALID; + } + + // ctx_.argsOffset stores the offset of the internal information of agrs_, equal to the ctx_.argsCount + ctx_.argsOffset = new (std::nothrow) uint16_t[ctx_.argsCount](); + if (ctx_.argsOffset == nullptr) { + GELOGE(PARAM_INVALID, "(param [ctx_.argsOffset] must not be null."); + return PARAM_INVALID; + } + + for (uint32_t i = 0; i < ctx_.argsCount; ++i) { + ctx_.argsOffset[i] = (reinterpret_cast(const_cast(context.args_offset().data())))[i]; + } + + return SUCCESS; +} + +void KernelTaskInfo::FreeRtMem(void **ptr) { + if (ptr == nullptr || *ptr == nullptr) { + return; + } + rtError_t ret = rtFree(*ptr); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", ret); + } + + *ptr = nullptr; +} + +Status KernelTaskInfo::UpdateCceArgs(std::string &sm_desc, std::string &flowtable, DavinciModel *davinci_model, + const domi::KernelDef &kernel_def) { + GE_CHECK_NOTNULL(davinci_model); + const domi::KernelContext &context = kernel_def.context(); + char *sm_contrl = nullptr; + + if (!sm_desc.empty()) { + sm_contrl = const_cast(sm_desc.data()); + } + + uint64_t data_base_addr = reinterpret_cast(reinterpret_cast(davinci_model->MemBase())) - + davinci_model->GetRtBaseAddr(); + uint64_t weight_base_addr = reinterpret_cast(reinterpret_cast(davinci_model->WeightsMemBase())) - + davinci_model->GetRtWeightAddr(); + uint64_t var_base_addr = reinterpret_cast(reinterpret_cast(davinci_model->VarMemBase())) - + davinci_model->GetRtVarAddr(); + cce::ccStatus_t cc_ret; + if (context.is_flowtable()) { + cc_ret = ccUpdateKernelArgs(ctx_, data_base_addr, weight_base_addr, var_base_addr, + const_cast(flowtable.data()), kernel_def.flowtable().size(), sm_contrl); + } else { + cc_ret = ccUpdateKernelArgs(ctx_, data_base_addr, weight_base_addr, var_base_addr, + const_cast(kernel_def.args().data()), args_size_, sm_contrl); + } + + if (cc_ret != cce::CC_STATUS_SUCCESS) { + GELOGE(CCE_FAILED, "Call cce api failed, ret: 0x%X", cc_ret); + return CCE_FAILED; + } + + return SUCCESS; +} + +Status KernelTaskInfo::SetFlowtable(std::string &flowtable, const domi::KernelDef &kernel_def) { + const domi::KernelContext &context = kernel_def.context(); + if (context.is_flowtable()) { + rtError_t rt_ret = rtMalloc(&flowtable_, flowtable.size(), RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + rt_ret = rtMemcpy(flowtable_, flowtable.size(), flowtable.data(), flowtable.size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + // modify flowtable addr in args + char *args = const_cast(kernel_def.args().data()); + + if (kernel_def.args().size() < + ((reinterpret_cast(const_cast(context.args_offset().data())))[0] + sizeof(uint64_t))) { + GELOGE(FAILED, "(context.args_offset().data()))[0]:%u + sizeof(uint64_t):%zu > kernelDef.args().size():%zu", + (uint32_t)((reinterpret_cast(const_cast(context.args_offset().data())))[0]), + sizeof(uint64_t), kernel_def.args().size()); + return FAILED; + } + + *(reinterpret_cast( + args + (reinterpret_cast(const_cast(context.args_offset().data())))[0])) = + reinterpret_cast(flowtable_); + } + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_KERNEL, KernelTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/kernel_task_info.h b/src/ge/graph/load/new_model_manager/task_info/kernel_task_info.h new file mode 100644 index 00000000..03827bec --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/kernel_task_info.h @@ -0,0 +1,123 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_KERNEL_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_KERNEL_TASK_INFO_H_ + +#include +#include +#include +#include + +#include "graph/load/new_model_manager/task_info/task_info.h" +#include "graph/op_desc.h" +namespace ge { +class KernelTaskInfo : public TaskInfo { + public: + friend class DavinciModel; + + KernelTaskInfo() + : ctx_(), + stub_func_(nullptr), + args_(nullptr), + sm_desc_(nullptr), + flowtable_(nullptr), + block_dim_(0), + args_size_(0), + flowtable_size_(0), + task_id_(0), + so_name_(""), + kernel_name_(""), + kernel_type_(cce::ccKernelType::CCE_AI_CORE), + dump_flag_(RT_KERNEL_DEFAULT), + dump_args_(nullptr), + davinci_model_(nullptr) {} + + ~KernelTaskInfo() override { + davinci_model_ = nullptr; + stub_func_ = nullptr; + sm_desc_ = nullptr; + flowtable_ = nullptr; + args_ = nullptr; + } + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + Status Release() override; + + cce::ccOpContext *GetCtx() override { return &ctx_; } + + uint32_t GetTaskID() override { return task_id_; } + + uintptr_t GetDumpArgs() override { + auto ret = reinterpret_cast(dump_args_); + return ret; + } + + cce::ccOpContext ctx_; + + private: + Status InitTVMTask(DavinciModel *davinci_model, uint16_t offset, const domi::KernelDef &kernel_def); + + Status InitAICPUCustomTask(const std::map> &op_list, uint32_t op_index, + const domi::KernelDef &kernel_def); + + Status InitCceTask(DavinciModel *davinci_model, const domi::KernelDef &kernel_def); + + Status InitAicpuTask(const std::map &op_list, uint32_t op_index, + const domi::KernelDef &kernel_def); + + Status StoreInputOutputTensor(const std::vector &input_data_addrs, + const std::vector &output_data_addrs, + const std::vector<::tagCcAICPUTensor> &input_descs, + const std::vector<::tagCcAICPUTensor> &output_descs); + + Status SetContext(const domi::KernelDef &kernel_def); + + Status UpdateCceArgs(std::string &sm_desc, std::string &flowtable, DavinciModel *davinci_model, + const domi::KernelDef &kernel_def); + + Status SetFlowtable(std::string &flowtable, const domi::KernelDef &kernel_def); + + static void FreeRtMem(void **ptr); + + void *stub_func_; + void *args_; + void *sm_desc_; + void *flowtable_; + uint32_t block_dim_; + uint32_t args_size_; + uint32_t flowtable_size_; + uint32_t task_id_; + std::string so_name_; + std::string kernel_name_; + cce::ccKernelType kernel_type_; + uint32_t dump_flag_; + void *dump_args_; + DavinciModel *davinci_model_; + + struct AICPUCustomInfo { + void *input_descs = nullptr; + void *input_addrs = nullptr; + void *output_descs = nullptr; + void *output_addrs = nullptr; + void *attr_handle = nullptr; + } custom_info_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_KERNEL_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/label_goto_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/label_goto_task_info.cc new file mode 100644 index 00000000..0aece056 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/label_goto_task_info.cc @@ -0,0 +1,54 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/label_goto_task_info.h" + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" + +namespace ge { +Status LabelGotoTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("LabelGotoTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + if (!davinci_model->GetLabelList().empty()) { + label_ = davinci_model->GetLabelList().back(); + } + + return SUCCESS; +} + +Status LabelGotoTaskInfo::Distribute() { + GELOGI("LabelGotoTaskInfo Distribute Start."); + rtError_t rt_ret = rtLabelGoto(label_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_LABEL_GOTO, LabelGotoTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/label_goto_task_info.h b/src/ge/graph/load/new_model_manager/task_info/label_goto_task_info.h new file mode 100644 index 00000000..ac78cbe2 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/label_goto_task_info.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_LABEL_GOTO_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_LABEL_GOTO_TASK_INFO_H_ + +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class LabelGotoTaskInfo : public TaskInfo { + public: + LabelGotoTaskInfo() : label_(nullptr) {} + + ~LabelGotoTaskInfo() override { label_ = nullptr; } + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + private: + void *label_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_LABEL_GOTO_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/label_set_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/label_set_task_info.cc new file mode 100644 index 00000000..397a21bd --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/label_set_task_info.cc @@ -0,0 +1,60 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/label_set_task_info.h" + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" + +namespace ge { +Status LabelSetTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("LabelSetTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + uint32_t label_id = task_def.label_id(); + if (label_id > davinci_model->BatchNum()) { + GELOGE(PARAM_INVALID, "labelId is invalid! labelId=%u, labelListSize=%u", label_id, davinci_model->BatchNum()); + return PARAM_INVALID; + } + + if (!davinci_model->GetLabelList().empty()) { + label_ = davinci_model->GetLabelList()[label_id]; + } + + return SUCCESS; +} + +Status LabelSetTaskInfo::Distribute() { + GELOGI("LabelSetTaskInfo Distribute Start."); + rtError_t rt_ret = rtLabelSet(label_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_LABEL_SET, LabelSetTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/label_set_task_info.h b/src/ge/graph/load/new_model_manager/task_info/label_set_task_info.h new file mode 100644 index 00000000..c68ffb98 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/label_set_task_info.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_LABEL_SET_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_LABEL_SET_TASK_INFO_H_ + +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class LabelSetTaskInfo : public TaskInfo { + public: + LabelSetTaskInfo() : label_(nullptr) {} + + ~LabelSetTaskInfo() override { label_ = nullptr; } + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + private: + void *label_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_LABEL_SET_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/memcpy_async_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/memcpy_async_task_info.cc new file mode 100644 index 00000000..e62228d6 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/memcpy_async_task_info.cc @@ -0,0 +1,108 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/memcpy_async_task_info.h" + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" + +namespace ge { +Status MemcpyAsyncTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("MemcpyAsyncTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + auto memcpy_async_def = task_def.memcpy_async(); + + GELOGI("InitMemcpyAsyncTaskInfo start."); + + uint64_t logic_dst = memcpy_async_def.dst(); + uint64_t logic_src = memcpy_async_def.src(); + + dst_max_ = memcpy_async_def.dst_max(); + + uint64_t update_base_addr = 0; + ret = GetUpdateBaseAddr(davinci_model, logic_src, update_base_addr); + if (ret != SUCCESS) { + return ret; + } + src_ = reinterpret_cast(update_base_addr + logic_src); + + uint64_t mem_base = reinterpret_cast(davinci_model->MemBase()); + uint64_t logic_mem_base = davinci_model->GetRtBaseAddr(); + dst_ = reinterpret_cast(mem_base + (logic_dst - logic_mem_base)); + + count_ = memcpy_async_def.count(); + kind_ = memcpy_async_def.kind(); + + return SUCCESS; +} + +Status MemcpyAsyncTaskInfo::Distribute() { + GELOGI("MemcpyAsyncTaskInfo Distribute Start."); + GELOGI("Distribute MemcpyAsync, dst_max:%lu, count:%lu, kind:%u.", dst_max_, count_, kind_); + + rtError_t rt_ret = rtMemcpyAsync(dst_, dst_max_, src_, count_, static_cast(kind_), stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +Status MemcpyAsyncTaskInfo::GetUpdateBaseAddr(DavinciModel *davinci_model, uint64_t update_addr, uint64_t &base_addr) { + GE_CHECK_NOTNULL(davinci_model); + uint64_t data_base_addr = reinterpret_cast(reinterpret_cast(davinci_model->MemBase())) - + davinci_model->GetRtBaseAddr(); + uint64_t weight_base_addr = reinterpret_cast(reinterpret_cast(davinci_model->WeightsMemBase())) - + davinci_model->GetRtWeightAddr(); + uint64_t var_base_addr = reinterpret_cast(reinterpret_cast(davinci_model->VarMemBase())) - + davinci_model->GetRtVarAddr(); + + uint64_t data_base_addr_start = davinci_model->GetRtBaseAddr(); + uint64_t data_base_addr_end = davinci_model->GetRtBaseAddr() + davinci_model->TotalMemSize(); + uint64_t wight_base_addr_start = davinci_model->GetRtWeightAddr(); + uint64_t wight_base_addr_end = davinci_model->GetRtWeightAddr() + davinci_model->TotalWeightsMemSize(); + uint64_t varible_base_addr_start = davinci_model->GetRtVarAddr(); + uint64_t varible_base_addr_end = davinci_model->GetRtVarAddr() + davinci_model->TotalVarMemSize(); + + if ((data_base_addr_start <= update_addr) && (update_addr <= data_base_addr_end)) { + base_addr = data_base_addr; + GELOGI("The update_addr is data address."); + } else if ((wight_base_addr_start <= update_addr) && (update_addr <= wight_base_addr_end)) { + base_addr = weight_base_addr; + GELOGI("The update_addr is weight address."); + } else if ((varible_base_addr_start <= update_addr) && (update_addr <= varible_base_addr_end)) { + base_addr = var_base_addr; + GELOGI("The update_addr is variable address."); + } else if (update_addr != 0) { + base_addr = 0; + GELOGE(PARAM_INVALID, "The update_addr is abnormal."); + return PARAM_INVALID; + } + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_MEMCPY_ASYNC, MemcpyAsyncTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/memcpy_async_task_info.h b/src/ge/graph/load/new_model_manager/task_info/memcpy_async_task_info.h new file mode 100644 index 00000000..02872f34 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/memcpy_async_task_info.h @@ -0,0 +1,45 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_MEMCPY_ASYNC_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_MEMCPY_ASYNC_TASK_INFO_H_ +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class MemcpyAsyncTaskInfo : public TaskInfo { + public: + MemcpyAsyncTaskInfo() : dst_(nullptr), dst_max_(0), src_(nullptr), count_(0), kind_(0) {} + + ~MemcpyAsyncTaskInfo() override { + src_ = nullptr; + dst_ = nullptr; + } + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + private: + Status GetUpdateBaseAddr(DavinciModel *davinci_model, uint64_t update_addr, uint64_t &base_addr); + + void *dst_; + uint64_t dst_max_; + void *src_; + uint64_t count_; + uint32_t kind_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_MEMCPY_ASYNC_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/profiler_trace_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/profiler_trace_task_info.cc new file mode 100644 index 00000000..c7b3deca --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/profiler_trace_task_info.cc @@ -0,0 +1,59 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/profiler_trace_task_info.h" + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" + +namespace ge { +Status ProfilerTraceTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("ProfilerTraceTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + auto log_time_stamp_def = task_def.log_timestamp(); + GELOGI("do InitLogTimeStampTaskInfo"); + + log_id_ = log_time_stamp_def.logid(); + notify_ = log_time_stamp_def.notify(); + flat_ = log_time_stamp_def.flat(); + + return SUCCESS; +} + +Status ProfilerTraceTaskInfo::Distribute() { + GELOGI("ProfilerTraceTaskInfo Distribute Start."); + GELOGI("rtProfilerTrace: logid = %lu. notify = %d.", log_id_, notify_); + + rtError_t rt_ret = rtProfilerTrace(log_id_, notify_, flat_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_PROFILER_TRACE, ProfilerTraceTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/profiler_trace_task_info.h b/src/ge/graph/load/new_model_manager/task_info/profiler_trace_task_info.h new file mode 100644 index 00000000..ab07eb22 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/profiler_trace_task_info.h @@ -0,0 +1,38 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_PROFILER_TRACE_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_PROFILER_TRACE_TASK_INFO_H_ +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class ProfilerTraceTaskInfo : public TaskInfo { + public: + ProfilerTraceTaskInfo() : log_id_(0), notify_(false), flat_(0) {} + + ~ProfilerTraceTaskInfo() override {} + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + private: + uint64_t log_id_; + bool notify_; + uint32_t flat_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_PROFILER_TRACE_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/stream_active_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/stream_active_task_info.cc new file mode 100644 index 00000000..aa2c3284 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/stream_active_task_info.cc @@ -0,0 +1,83 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/stream_active_task_info.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/debug/ge_attr_define.h" + +namespace ge { +Status StreamActiveTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("StreamActiveTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return ret; + } + + auto stream_active_def = task_def.stream_active(); + GELOGI("InitStreamActiveTaskInfo start."); + uint32_t op_index = stream_active_def.op_index(); + + uint32_t internal_index = davinci_model->GetFlowctrlIndex(op_index); + + // get StreamActive op + auto op_desc = davinci_model->GetOpList()[op_index]; + GE_CHECK_NOTNULL(op_desc); + std::vector active_stream_index_list; + if (!AttrUtils::GetListInt(op_desc, ATTR_NAME_ACTIVE_STREAM_LIST, active_stream_index_list)) { + GELOGE(INTERNAL_ERROR, "StreamActiveOp get attr ACTIVE_STREAM fail, node name:%s.", op_desc->GetName().c_str()); + return INTERNAL_ERROR; + } + + if (internal_index >= active_stream_index_list.size()) { + GELOGE(INTERNAL_ERROR, "InitStreamSwitchTaskInfo stream id index invalid. index:%u, list size:%zu.", internal_index, + active_stream_index_list.size()); + return INTERNAL_ERROR; + } + + if (active_stream_index_list[internal_index] >= davinci_model->GetStreamList().size()) { + GELOGE(INTERNAL_ERROR, "InitStreamSwitchTaskInfo stream index invalid. index:%u, stream list size:%zu.", + active_stream_index_list[internal_index], davinci_model->GetStreamList().size()); + return INTERNAL_ERROR; + } + + active_stream_ = davinci_model->GetStreamList()[active_stream_index_list[internal_index]]; + active_stream_id_ = stream_active_def.active_stream_id(); + + return SUCCESS; +} + +Status StreamActiveTaskInfo::Distribute() { + GELOGI("StreamActiveTaskInfo Distribute Start."); + rtError_t rt_ret = rtStreamActive(active_stream_, stream_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_STREAM_ACTIVE, StreamActiveTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/stream_active_task_info.h b/src/ge/graph/load/new_model_manager/task_info/stream_active_task_info.h new file mode 100644 index 00000000..a75e616e --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/stream_active_task_info.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_STREAM_ACTIVE_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_STREAM_ACTIVE_TASK_INFO_H_ +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class StreamActiveTaskInfo : public TaskInfo { + public: + StreamActiveTaskInfo() : active_stream_(nullptr), active_stream_id_(0) {} + + ~StreamActiveTaskInfo() override { active_stream_ = nullptr; } + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + private: + rtStream_t active_stream_; + uint32_t active_stream_id_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_STREAM_ACTIVE_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/stream_switch_task_info.cc b/src/ge/graph/load/new_model_manager/task_info/stream_switch_task_info.cc new file mode 100644 index 00000000..5dd3c061 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/stream_switch_task_info.cc @@ -0,0 +1,115 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/stream_switch_task_info.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/load/new_model_manager/model_utils.h" +#include "graph/debug/ge_attr_define.h" + +namespace ge { +namespace { +const uint32_t kTrueBranchStreamNum = 1; +} // namespace + +Status StreamSwitchTaskInfo::Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) { + GELOGI("StreamSwitchTaskInfo Init Start."); + if (davinci_model == nullptr) { + GELOGE(PARAM_INVALID, "davinci_model is null!"); + return PARAM_INVALID; + } + + Status ret = SetStream(task_def.stream_id(), davinci_model->GetStreamList()); + if (ret != SUCCESS) { + return FAILED; + } + + auto stream_switch_def = task_def.stream_switch(); + GELOGI("InitStreamSwitchTaskInfo start."); + uint32_t op_index = stream_switch_def.op_index(); + + // get StreamSwitch op + auto op_desc = davinci_model->GetOpList()[op_index]; + GE_CHECK_NOTNULL(op_desc); + auto input_data_addr = ModelUtils::GetInputDataAddrs(davinci_model->GetRuntimeParam(), op_desc); + if (!input_data_addr.empty() && input_data_addr.size() >= STREAM_SWITCH_INPUT_NUM) { + input_ptr_ = input_data_addr[0]; + value_ptr_ = input_data_addr[1]; + } + + uint32_t cond = 0; + if (!AttrUtils::GetInt(op_desc, ATTR_NAME_STREAM_SWITCH_COND, cond)) { + GELOGE(INTERNAL_ERROR, "StreamSwitchOp get attr STREAM_SWITCH_COND fail."); + return INTERNAL_ERROR; + } + cond_ = static_cast(cond); + + size_t input_size = op_desc->GetInputsSize(); + if (input_data_addr.size() != STREAM_SWITCH_INPUT_NUM || input_size != STREAM_SWITCH_INPUT_NUM) { + GELOGE(INTERNAL_ERROR, "Input num should be %u. inputAddr size:%zu, inputDesc size:%zu.", + STREAM_SWITCH_INPUT_NUM, input_data_addr.size(), input_size); + return INTERNAL_ERROR; + } + + vector active_stream_list; + if (!AttrUtils::GetListInt(op_desc, ATTR_NAME_ACTIVE_STREAM_LIST, active_stream_list)) { + GELOGE(INTERNAL_ERROR, "StreamSwitchOp get attr ACTIVE_STREAM_LIST fail."); + return INTERNAL_ERROR; + } + + if (active_stream_list.size() != kTrueBranchStreamNum) { + GELOGE(FAILED, "Stream num of switch true branch must be %u.", kTrueBranchStreamNum); + return FAILED; + } + + size_t true_stream_index = active_stream_list.front(); + if (true_stream_index >= davinci_model->GetStreamList().size()) { + GELOGE(INTERNAL_ERROR, "InitStreamSwitchTaskInfo stream index invalid. index:%zu, stream list size:%zu.", + true_stream_index, davinci_model->GetStreamList().size()); + return INTERNAL_ERROR; + } + + true_stream_ = davinci_model->GetStreamList()[true_stream_index]; + true_stream_id_ = stream_switch_def.true_stream_id(); + + if (op_desc->HasAttr(ATTR_NAME_SWITCH_DATA_TYPE)) { + int64_t data_type = 0; + if (!AttrUtils::GetInt(op_desc, ATTR_NAME_SWITCH_DATA_TYPE, data_type)) { + GELOGE(FAILED, "StreamSwitchOp[node:%s] get attr SWITCH_DATA_TYPE fail.", op_desc->GetName().c_str()); + return FAILED; + } + data_type_ = static_cast(data_type); + } + + return SUCCESS; +} + +Status StreamSwitchTaskInfo::Distribute() { + GELOGI("StreamSwitchTaskInfo Distribute Start."); + rtError_t rt_ret = rtStreamSwitchEx(input_ptr_, cond_, value_ptr_, true_stream_, stream_, data_type_); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Call rt api failed, ret: 0x%X", rt_ret); + return RT_FAILED; + } + + return SUCCESS; +} + +REGISTER_TASK_INFO(RT_MODEL_TASK_STREAM_SWITCH, StreamSwitchTaskInfo); +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/stream_switch_task_info.h b/src/ge/graph/load/new_model_manager/task_info/stream_switch_task_info.h new file mode 100644 index 00000000..07509c7c --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/stream_switch_task_info.h @@ -0,0 +1,51 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_STREAM_SWITCH_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_STREAM_SWITCH_TASK_INFO_H_ +#include "graph/load/new_model_manager/task_info/task_info.h" + +namespace ge { +class StreamSwitchTaskInfo : public TaskInfo { + public: + StreamSwitchTaskInfo() + : input_ptr_(nullptr), + cond_(RT_EQUAL), + value_ptr_(nullptr), + true_stream_(nullptr), + true_stream_id_(0), + data_type_(RT_SWITCH_INT32) {} + + ~StreamSwitchTaskInfo() override { + input_ptr_ = nullptr; + value_ptr_ = nullptr; + true_stream_ = nullptr; + } + + Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) override; + + Status Distribute() override; + + private: + void *input_ptr_; + rtCondition_t cond_; + void *value_ptr_; + rtStream_t true_stream_; + uint32_t true_stream_id_; + rtSwitchDataType_t data_type_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_STREAM_SWITCH_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/task_info.cc b/src/ge/graph/load/new_model_manager/task_info/task_info.cc new file mode 100644 index 00000000..01bf0690 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/task_info.cc @@ -0,0 +1,34 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/task_info/task_info.h" + +#include + +namespace ge { +Status TaskInfo::SetStream(uint32_t stream_id, const std::vector &stream_list) { + if (stream_list.size() == 1) { + stream_ = stream_list[0]; + } else if (stream_list.size() > stream_id) { + stream_ = stream_list[stream_id]; + } else { + GELOGE(FAILED, "index: %u >= stream_list.size(): %zu.", stream_id, stream_list.size()); + return FAILED; + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/task_info/task_info.h b/src/ge/graph/load/new_model_manager/task_info/task_info.h new file mode 100644 index 00000000..09ba05de --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/task_info.h @@ -0,0 +1,71 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_TASK_INFO_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_TASK_INFO_H_ + +#include + +#include "cce/customize.h" +#include "cce/taskdown_common.hpp" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/load/new_model_manager/task_info/task_info_factory.h" +#include "proto/task.pb.h" +namespace ge { +struct RuntimeParam { + uint64_t mem_size = 0; + uint64_t logic_mem_base = 0; + uint8_t *mem_base = nullptr; + uint64_t weight_size = 0; + uint64_t logic_weight_base = 0; + uint8_t *weight_base = nullptr; + uint64_t var_size = 0; + uint64_t logic_var_base = 0; + uint8_t *var_base = nullptr; + uint32_t batch_num = 0; + uint32_t stream_num = 0; + uint32_t event_num = 0; + uint64_t session_id = 0; + uint32_t graph_id = 0; +}; + +class DavinciModel; + +class TaskInfo { + public: + TaskInfo() : stream_(nullptr) {} + + virtual ~TaskInfo() { stream_ = nullptr; } + + virtual Status Init(const domi::TaskDef &task_def, DavinciModel *davinci_model) = 0; + + virtual Status Distribute() = 0; + + virtual Status Release() { return SUCCESS; } + + virtual cce::ccOpContext *GetCtx() { return nullptr; } + + virtual uint32_t GetTaskID() { return 0xFFFFFFFF; } + + virtual uintptr_t GetDumpArgs() { return 0; } + + protected: + Status SetStream(uint32_t stream_id, const std::vector &stream_list); + + void *stream_; +}; +} // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_TASK_INFO_H_ diff --git a/src/ge/graph/load/new_model_manager/task_info/task_info_factory.h b/src/ge/graph/load/new_model_manager/task_info/task_info_factory.h new file mode 100644 index 00000000..86fda23e --- /dev/null +++ b/src/ge/graph/load/new_model_manager/task_info/task_info_factory.h @@ -0,0 +1,91 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_TASK_INFO_FACTORY_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_TASK_INFO_FACTORY_H_ + +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge/ge_util.h" +#include "runtime/rt_model.h" + +namespace ge { +class TaskInfo; +using TaskInfoPtr = std::shared_ptr; + +class TaskInfoFactory { + public: + // TaskManagerCreator function def + using TaskInfoCreatorFun = std::function; + + static TaskInfoFactory &Instance() { + static TaskInfoFactory instance; + return instance; + } + + TaskInfoPtr Create(rtModelTaskType_t task_type) { + auto iter = creator_map_.find(task_type); + if (iter == creator_map_.end()) { + GELOGW("Cannot find task type %d in inner map.", static_cast(task_type)); + return nullptr; + } + + return iter->second(); + } + + // TaskInfo registerar + class Registerar { + public: + Registerar(rtModelTaskType_t type, const TaskInfoCreatorFun func) { + TaskInfoFactory::Instance().RegisterCreator(type, func); + } + + ~Registerar() {} + }; + + private: + TaskInfoFactory() {} + + ~TaskInfoFactory() {} + + // register creator, this function will call in the constructor + void RegisterCreator(rtModelTaskType_t type, const TaskInfoCreatorFun func) { + auto iter = creator_map_.find(type); + if (iter != creator_map_.end()) { + GELOGD("TaskManagerFactory::RegisterCreator: %d creator already exist", static_cast(type)); + return; + } + + creator_map_[type] = func; + } + + std::map creator_map_; +}; + +#define REGISTER_TASK_INFO(type, clazz) \ + TaskInfoPtr Creator_##type##_Task_Info() { \ + std::shared_ptr ptr = nullptr; \ + ptr = MakeShared(); \ + return ptr; \ + } \ + TaskInfoFactory::Registerar g_##type##_Task_Info_Creator(type, Creator_##type##_Task_Info); +}; // namespace ge +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TASK_INFO_TASK_INFO_FACTORY_H_ + diff --git a/src/ge/graph/load/new_model_manager/tbe_handle_store.cc b/src/ge/graph/load/new_model_manager/tbe_handle_store.cc new file mode 100644 index 00000000..15967ad2 --- /dev/null +++ b/src/ge/graph/load/new_model_manager/tbe_handle_store.cc @@ -0,0 +1,138 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/new_model_manager/tbe_handle_store.h" + +#include +#include "common/ge_inner_error_codes.h" +#include "framework/common/debug/ge_log.h" +#include "runtime/kernel.h" + +namespace ge { +void TbeHandleInfo::used_inc(uint32_t num) { + if (used_ > std::numeric_limits::max() - num) { + GELOGE(INTERNAL_ERROR, "Used[%u] reach numeric max.", used_); + return; + } + + used_ += num; +} + +void TbeHandleInfo::used_dec(uint32_t num) { + if (used_ < std::numeric_limits::min() + num) { + GELOGE(INTERNAL_ERROR, "Used[%u] reach numeric min.", used_); + return; + } + + used_ -= num; +} + +uint32_t TbeHandleInfo::used_num() const { return used_; } + +void *TbeHandleInfo::handle() const { return handle_; } + +TBEHandleStore &TBEHandleStore::GetInstance() { + static TBEHandleStore instance; + + return instance; +} + +/// +/// @ingroup ge +/// @brief Find Registered TBE handle by name. +/// @param [in] name: TBE handle name to find. +/// @param [out] handle: handle names record. +/// @return true: found / false: not found. +/// +bool TBEHandleStore::FindTBEHandle(const std::string &name, void *&handle) { + std::lock_guard lock(mutex_); + auto it = kernels_.find(name); + if (it == kernels_.end()) { + return false; + } else { + TbeHandleInfo &info = it->second; + handle = info.handle(); + return true; + } +} + +/// +/// @ingroup ge +/// @brief Store registered TBE handle info. +/// @param [in] name: TBE handle name to store. +/// @param [in] handle: TBE handle addr to store. +/// @param [in] kernel: TBE kernel bin to store. +/// @return NA +/// +void TBEHandleStore::StoreTBEHandle(const std::string &name, void *handle, std::shared_ptr &kernel) { + std::lock_guard lock(mutex_); + auto it = kernels_.find(name); + if (it == kernels_.end()) { + TbeHandleInfo info(handle, kernel); + info.used_inc(); + kernels_.emplace(name, info); + } else { + TbeHandleInfo &info = it->second; + info.used_inc(); + } +} + +/// +/// @ingroup ge +/// @brief Increase reference of registered TBE handle info. +/// @param [in] name: handle name increase reference. +/// @return NA +/// +void TBEHandleStore::ReferTBEHandle(const std::string &name) { + std::lock_guard lock(mutex_); + auto it = kernels_.find(name); + if (it == kernels_.end()) { + GELOGE(INTERNAL_ERROR, "Kernel[%s] not found in stored.", name.c_str()); + return; + } + + TbeHandleInfo &info = it->second; + info.used_inc(); +} + +/// +/// @ingroup ge +/// @brief Erase TBE registered handle record. +/// @param [in] names: handle names erase. +/// @return NA +/// +void TBEHandleStore::EraseTBEHandle(const std::map &names) { + std::lock_guard lock(mutex_); + for (auto &item : names) { + auto it = kernels_.find(item.first); + if (it == kernels_.end()) { + GELOGE(INTERNAL_ERROR, "Kernel[%s] not found in stored.", item.first.c_str()); + continue; + } + + TbeHandleInfo &info = it->second; + if (info.used_num() > item.second) { + info.used_dec(item.second); + } else { + rtError_t rt_ret = rtDevBinaryUnRegister(info.handle()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(INTERNAL_ERROR, "Kernel[%s] UnRegister handle fail:%u.", item.first.c_str(), rt_ret); + } + kernels_.erase(it); + } + } +} +} // namespace ge diff --git a/src/ge/graph/load/new_model_manager/tbe_handle_store.h b/src/ge/graph/load/new_model_manager/tbe_handle_store.h new file mode 100644 index 00000000..3583064b --- /dev/null +++ b/src/ge/graph/load/new_model_manager/tbe_handle_store.h @@ -0,0 +1,99 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TBE_HANDLE_STORE_H_ +#define GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TBE_HANDLE_STORE_H_ + +#include + +#include +#include +#include +#include +#include + +#include "common/fmk_types.h" +#include "graph/op_kernel_bin.h" + +namespace ge { +class TbeHandleInfo { + public: + TbeHandleInfo(void *handle, std::shared_ptr &kernel) : used_(0), handle_(handle), kernel_(kernel) {} + + ~TbeHandleInfo() { handle_ = nullptr; } + + void used_inc(uint32_t num = 1); + void used_dec(uint32_t num = 1); + uint32_t used_num() const; + + void *handle() const; + + private: + uint32_t used_; + + void *handle_; + std::shared_ptr kernel_; +}; + +class FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY TBEHandleStore { + public: + static TBEHandleStore &GetInstance(); + + /// + /// @ingroup ge + /// @brief Find Registered TBE handle by name. + /// @param [in] name: TBE handle name to find. + /// @param [out] handle: TBE handle addr found. + /// @return true: found / false: not found. + /// + bool FindTBEHandle(const std::string &name, void *&handle); + + /// + /// @ingroup ge + /// @brief Store registered TBE handle info. + /// @param [in] name: TBE handle name to store. + /// @param [in] handle: TBE handle addr to store. + /// @param [in] kernel: TBE kernel bin to store. + /// @return NA + /// + void StoreTBEHandle(const std::string &name, void *handle, std::shared_ptr &kernel); + + /// + /// @ingroup ge + /// @brief Increase reference of registered TBE handle info. + /// @param [in] name: handle name increase reference. + /// @return NA + /// + void ReferTBEHandle(const std::string &name); + + /// + /// @ingroup ge + /// @brief Erase TBE registered handle record. + /// @param [in] names: handle names erase. + /// @return NA + /// + void EraseTBEHandle(const std::map &names); + + private: + TBEHandleStore() = default; + ~TBEHandleStore() = default; + + std::mutex mutex_; + std::unordered_map kernels_; +}; +} // namespace ge + +#endif // GE_GRAPH_LOAD_NEW_MODEL_MANAGER_TBE_HANDLE_STORE_H_ diff --git a/src/ge/graph/load/output/output.cc b/src/ge/graph/load/output/output.cc new file mode 100644 index 00000000..bbc2bf4e --- /dev/null +++ b/src/ge/graph/load/output/output.cc @@ -0,0 +1,161 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/load/output/output.h" + +#include + +#include "common/properties_manager.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" + +namespace ge { +Output::Output(const OpDescPtr &op_desc, DavinciModel *model) + : base_(nullptr), + var_base_(nullptr), + logic_base_(0), + logic_var_base_(0), + model_(model), + op_desc_(op_desc), + input_num_(0) {} + +Output::~Output() { + var_base_ = nullptr; + base_ = nullptr; + model_ = nullptr; +} + +/// +/// @ingroup domi +/// @brief Initialize input/output params +/// @return Status +/// +Status Output::Init() { + if (op_desc_ == nullptr || model_ == nullptr) { + GELOGE(INTERNAL_ERROR, "The op_desc_ or model_ is nullptr."); + return INTERNAL_ERROR; + } + + base_ = model_->MemBase(); + var_base_ = model_->VarMemBase(); + logic_base_ = model_->GetRtBaseAddr(); + logic_var_base_ = model_->GetRtVarAddr(); + + input_num_ = op_desc_->GetInputsSize(); + v_input_size_.clear(); + v_input_data_addr_.clear(); + + auto input_vector = op_desc_->GetInputOffset(); + if (input_num_ != input_vector.size()) { + GELOGE(INTERNAL_ERROR, "input desc size: %zu != input offset size: %zu.", input_num_, input_vector.size()); + return INTERNAL_ERROR; + } + + for (size_t i = 0; i < input_num_; i++) { + uint32_t tensor_size = 0; + auto input_desc = op_desc_->GetInputDescPtr(i); + GE_CHECK_NOTNULL(input_desc); + Status ret = TensorUtils::GetSize(*input_desc, tensor_size); + if (ret != GRAPH_SUCCESS) { + GELOGE(ret, "Get size from TensorDesc failed, op : %s, input index : %zu", op_desc_->GetName().c_str(), i); + return ret; + } + v_input_size_.push_back(tensor_size); + + if (VarManager::Instance(model_->SessionId())->IsVarAddr(input_vector[i])) { + v_input_data_addr_.push_back(static_cast(var_base_ + input_vector[i] - logic_var_base_)); + } else { + v_input_data_addr_.push_back(static_cast(base_ + input_vector[i])); + } + } + + GELOGI("Init output:%lu, %lu, %lu", input_num_, v_input_size_.size(), v_input_data_addr_.size()); + + return SUCCESS; +} + +/// +/// @ingroup domi +/// @brief Copy Op Output to user space. +/// @brief when model running, Add one DataOp as input node, Add one Output Op as output node. +/// @return Status +/// +Status Output::CopyResult(OutputData &rslt, uint32_t data_begin, uint32_t &data_index, bool support_mem_share) { + uint32_t data_count = 0; + for (size_t i = 0; i < input_num_; i++) { + DataBuffer data_buf = rslt.blobs[data_begin + data_count]; + Status ret = SetDataBuf(data_buf, data_count, i, support_mem_share); + if (ret != SUCCESS) { + GELOGE(ret, "Copy data to host error. index: %zu", i); + return ret; + } + data_index = data_begin + data_count; + } + + return SUCCESS; +} + +Status Output::SetDataBuf(DataBuffer &data_buf, uint32_t &data_count, size_t i, bool support_mem_share) { + if (data_buf.length == 0) { + ++data_count; + GELOGD("Length of data_buffer is zero, No need to copy. output op : %s, output tensor index : %zu!", + op_desc_->GetName().c_str(), i); + return SUCCESS; + } + + auto tensor_desc = op_desc_->GetInputDescPtr(static_cast(i)); + if (tensor_desc == nullptr) { + GELOGE(FAILED, "tensor_desc is null"); + return FAILED; + } + + if (data_buf.isDataSupportMemShare && support_mem_share) { + GELOGI("No need to copy input data, user's output data buffer can be shared."); + } else { + // Copy result to Databuf + uint32_t size = v_input_size_[i]; + GELOGI("Tensor data size before: %u", size); + + graphStatus graph_status = TensorUtils::GetTensorSizeInBytes(*tensor_desc, size); + if (graph_status != ge::GRAPH_SUCCESS) { + GELOGE(graph_status, "GetTensorSizeInBytes failed!"); + return FAILED; + } + + rtError_t rt_ret = rtMemcpy(data_buf.data, size, v_input_data_addr_[i], size, RT_MEMCPY_DEVICE_TO_HOST); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtmemcpy error"); + return FAILED; + } + GELOGI("Tensor data size: %u data_buflength: %u", size, data_buf.length); + } + + ++data_count; + GELOGD("Successfully copy the output tensor memory to buffer, output op : %s, output tensor index : %zu!", + op_desc_->GetName().c_str(), i); + + return SUCCESS; +} + +void Output::GetOutputData(vector &v_data_addr, vector &v_data_size) { + for (size_t i = 0; i < input_num_; ++i) { + v_data_addr.push_back(v_input_data_addr_[i]); + v_data_size.push_back(v_input_size_[i]); + } +} +} // namespace ge diff --git a/src/ge/graph/load/output/output.h b/src/ge/graph/load/output/output.h new file mode 100644 index 00000000..cff8505e --- /dev/null +++ b/src/ge/graph/load/output/output.h @@ -0,0 +1,97 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_LOAD_OUTPUT_OUTPUT_H_ +#define GE_GRAPH_LOAD_OUTPUT_OUTPUT_H_ + +#include +#include + +#include "cce/dnn_base_def.hpp" +#include "common/debug/log.h" +#include "common/op/attr_value_util.h" +#include "common/op/ge_op_utils.h" +#include "common/op/op_parser_util.h" +#include "common/types.h" +#include "common/util.h" +#include "common/ge_types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/op_desc.h" + +namespace ge { +using std::string; +using std::vector; + +// The base class for all op +class Output { + public: + Output(const OpDescPtr &op_desc, DavinciModel *model); + virtual ~Output(); + + /// + /// @ingroup domi + /// @brief Initialize input/output params + /// @return Status + /// + virtual Status Init(); + + /// + /// @ingroup domi + /// @brief Copy Op Output to user space. + /// @brief when model running, Add one DataOp as input node, Add one Output Op as output node. + /// @return Status + /// + virtual Status CopyResult(OutputData &rslt, uint32_t data_begin, uint32_t &data_index, + bool support_mem_share); + + /// + /// @ingroup domi + /// @brief Trans Output data to fp16 + /// @return Status + /// + Status SetDataBuf(DataBuffer &data_buf, uint32_t &data_count, size_t i, bool support_mem_share); + + /// + /// @ingroup domi + /// @brief Get Output data and size. + /// @return void + /// + void GetOutputData(vector &v_data_addr, vector &v_data_size); + + // Copy assignment operator and copy constructor are deleted + Output &operator=(const Output &output) = delete; + Output(const Output &output) = delete; + + protected: + // Model's base address + uint8_t *base_; + uint8_t *var_base_; + uint64_t logic_base_; + uint64_t logic_var_base_; + // The DavinciModel which ops belong to + DavinciModel *model_; + + ConstOpDescPtr op_desc_; + + // Input descriptions + size_t input_num_; + vector v_input_data_addr_; // init as:buf_base + op_def_->input(i)); + vector v_input_size_; +}; +} // namespace ge + +#endif // GE_GRAPH_LOAD_OUTPUT_OUTPUT_H_ diff --git a/src/ge/graph/manager/graph_context.cc b/src/ge/graph/manager/graph_context.cc new file mode 100644 index 00000000..6a5b2913 --- /dev/null +++ b/src/ge/graph/manager/graph_context.cc @@ -0,0 +1,98 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/graph_context.h" + +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" + +namespace ge { +GraphContext::GraphContext(const GraphNodePtr &graph_node) { + if (graph_node == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "graphNode is NULL!"); + return; + } + + compute_graph_ = graph_node->GetComputeGraph(); + current_graph_id_ = graph_node->GetGraphId(); + + if (compute_graph_ == nullptr) { + std::shared_ptr graph = graph_node->GetGraph(); + if (graph == nullptr) { + GELOGE(GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL, "compute_graph by graphNode is NULL!"); + return; + } + + compute_graph_ = GraphUtils::GetComputeGraph(*graph); + return; + } +} + +Status GraphContext::SetComputeGraph(const GraphNodePtr &graph_node) { + if (graph_node == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "graphNode is NULL!"); + return GE_GRAPH_PARAM_NULLPTR; + } + + compute_graph_ = graph_node->GetComputeGraph(); + current_graph_id_ = graph_node->GetGraphId(); + + if (compute_graph_ == nullptr) { + std::shared_ptr graph = graph_node->GetGraph(); + if (graph == nullptr) { + GELOGE(GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL, "compute_graph by graphNode is NULL!"); + return GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL; + } + + compute_graph_ = GraphUtils::GetComputeGraph(*graph); + return SUCCESS; + } + return SUCCESS; +} + +Status GraphContext::Initialize(const std::map &options) const { return SUCCESS; } + +Status GraphContext::Finalize() const { return SUCCESS; } + +Status GraphContext::GetVariableTensor(const std::string &var_data_name, GeTensor &returned_tensor) { + if (var_data_name.empty()) { + GELOGE(GE_GRAPH_EMPTY_STRING_NAME, "Variable data name is empty!"); + return GE_GRAPH_EMPTY_STRING_NAME; + } + + if (GetVarNodeTensorTable().empty()) { + GELOGE(GE_GRAPH_EMPTY_VARIABLE_TENSOR_TABLE, "VarNodeTensorTable is empty!"); + return GE_GRAPH_EMPTY_VARIABLE_TENSOR_TABLE; + } + for (auto &var_record : GetVarNodeTensorTable()) { + if (var_data_name == std::get<0>(var_record.first)) { + returned_tensor.SetTensorDesc(var_record.second.GetTensorDesc()); + auto ret = returned_tensor.SetData(var_record.second.GetData()); + if (ret != SUCCESS) { + GELOGE(ret, "Set Tensor data failed!"); + return ret; + } + + return SUCCESS; + } + } + + GELOGE(GE_GRAPH_VARIABLE_DOES_NOT_EXIST, "VarRecord with data_name %s does NOT exist!", var_data_name.c_str()); + + return GE_GRAPH_VARIABLE_DOES_NOT_EXIST; +} +} // namespace ge diff --git a/src/ge/graph/manager/graph_context.h b/src/ge/graph/manager/graph_context.h new file mode 100644 index 00000000..6e41b326 --- /dev/null +++ b/src/ge/graph/manager/graph_context.h @@ -0,0 +1,108 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_GRAPH_CONTEXT_H_ +#define GE_GRAPH_MANAGER_GRAPH_CONTEXT_H_ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "graph/compute_graph.h" +#include "graph/manager/graph_manager_utils.h" + +namespace ge { +class GraphContext; + +using SessionId = uint64_t; + +using GradOpList = std::vector>; + +using VariableRecord = std::tuple; + +using OutputOpNameIndex = std::pair; + +struct key_hash : public std::unary_function { + std::size_t operator()(const ge::OutputOpNameIndex &outputOpNameIndex) const { + return (static_cast(outputOpNameIndex.first[0])) ^ outputOpNameIndex.second; + } +}; + +struct key_equal : public std::binary_function { + bool operator()(const ge::OutputOpNameIndex &varR1, const ge::OutputOpNameIndex &varR2) const { + return (varR1.first == varR2.first && varR1.second == varR2.second); + } +}; + +using VarNodeTensorTable = std::vector>; + +using SessionVarTableMap = std::map; + +using GraphContextPtr = std::shared_ptr; + +struct OutputDescInfo { + std::string op_name; + uint8_t index; + struct InputOutputDescInfo info; +}; + +/// +/// @ingroup graph +/// @brief Global graph context sharing, provide variable sharing facility for +/// multiple graphs in the same session. +/// @author +/// +class GraphContext { + public: + GraphContext() = default; + + ~GraphContext() = default; + + Status Initialize(const std::map &options = {}) const; + // Disable copy constructor and assignment operator + GraphContext(const GraphContext &) = delete; + + GraphContext &operator=(const GraphContext &) = delete; + + Status Finalize() const; + + Status GetVariableTensor(const std::string &var_data_name, GeTensor &returned_tensor); + + const ComputeGraphPtr &GetComputeGraph() const { return compute_graph_; } + + Status SetComputeGraph(const GraphNodePtr &graph_node); + + private: + explicit GraphContext(const GraphNodePtr &graph_node); + + ComputeGraphPtr compute_graph_ = nullptr; + + GraphId current_graph_id_ = 0; + + // Get the unique VarNode-Tensor table + static VarNodeTensorTable &GetVarNodeTensorTable() { + static VarNodeTensorTable _this; + return _this; + } +}; +} // namespace ge + +#endif // GE_GRAPH_MANAGER_GRAPH_CONTEXT_H_ diff --git a/src/ge/graph/manager/graph_manager.cc b/src/ge/graph/manager/graph_manager.cc new file mode 100644 index 00000000..9ac45dcc --- /dev/null +++ b/src/ge/graph/manager/graph_manager.cc @@ -0,0 +1,1739 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/graph_manager.h" + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "common/math/math_util.h" +#include "common/thread_pool.h" +#include "common/util.h" +#include "external/graph/types.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/ge_types.h" +#include "graph/common/transop_util.h" +#include "graph/ge_context.h" +#include "graph/ge_global_options.h" +#include "graph/ge_local_context.h" +#include "graph/manager/graph_mem_allocator.h" +#include "graph/passes/atomic_addr_clean_pass.h" +#include "graph/passes/compile_nodes_pass.h" +#include "graph/passes/constant_folding_pass.h" +#include "graph/passes/control_op_attr_pass.h" +#include "graph/passes/dimension_adjust_pass.h" +#include "graph/passes/identify_reference_pass.h" +#include "graph/passes/link_gen_mask_nodes_pass.h" +#include "graph/passes/multi_batch_pass.h" +#include "graph/passes/no_reshape_op_remove_pass.h" +#include "graph/passes/permute_pass.h" +#include "graph/passes/reshape_remove_pass.h" +#include "graph/passes/same_transdata_breadth_fusion_pass.h" +#include "graph/passes/transop_breadth_fusion_pass.h" +#include "graph/passes/transop_depth_fusion_pass.h" +#include "graph/passes/transop_nearby_allreduce_fusion_pass.h" +#include "graph/passes/transop_without_reshape_fusion_pass.h" +#include "graph/passes/transpose_transdata_pass.h" +#include "graph/passes/variable_op_pass.h" +#include "graph/passes/variable_ref_delete_op_pass.h" +#include "graph/utils/tensor_adapter.h" +#include "inc/pass_manager.h" +#include "init/gelib.h" + +namespace { +const char *const kSummary = "Summary"; +const char *const kSave = "Save"; +const char *const kNetOutput = "NetOutput"; +const char *const kVariable = "Variable"; +const char *const kSend = "Send"; +const char *const kRecv = "Recv"; +} // namespace + +namespace ge { +GraphManager::GraphManager() : thread_run_flag_(false), graph_run_listener_(nullptr), init_flag_(false) {} + +Status GraphManager::Initialize(const std::map &options) { + if (init_flag_) { + GELOGW("[Initialize] GraphManager already initialized."); + return SUCCESS; + } + + // malloc + graph_run_listener_ = MakeShared(); + if (graph_run_listener_ == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared failed"); + return MEMALLOC_FAILED; + } + Status ret = graph_run_listener_->SetCondition(&sync_run_mutex_, &condition_); + if (ret != SUCCESS) { + GELOGE(ret, "[Initialize] mutex and cond is invalid."); + return ret; + } + // graph context + graph_context_ = MakeShared(); + if (graph_context_ == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared failed."); + return MEMALLOC_FAILED; + } + + // parse option parameters + ret = ParseOptions(options); + if (ret != SUCCESS) { + GELOGE(ret, "[Initialize] parse options failed."); + return ret; + } + + graph_builder_.SetOptions(options_); + ret = graph_optimize_.SetOptions(options_); + if (ret != SUCCESS) { + GELOGE(ret, "[Initialize] Graph optimize initialize failed."); + return ret; + } + graph_preparer_.SetOptions(options_); + + ret = graph_context_->Initialize(options); + if (ret != SUCCESS) { + GELOGE(ret, "[Initialize] GraphContext initialize failed."); + return ret; + } + + graph_map_.clear(); + init_flag_ = true; + + thread_run_flag_ = true; + prerun_thread_ = std::thread(GraphManager::PreRunThread, this); + run_thread_ = std::thread(GraphManager::RunThread, this); + + return SUCCESS; +} + +Status GraphManager::Finalize() { + if (!init_flag_) { + GELOGW("GraphManager has not been initialized."); + return SUCCESS; + } + + GE_CHK_STATUS_RET(graph_executor_.FreeExecuteMemory()); + + StopQueue(this); + + if (prerun_thread_.joinable()) { + prerun_thread_.join(); + } + if (run_thread_.joinable()) { + run_thread_.join(); + } + + // check graph whether running or not + Status unload_model_ret = SUCCESS; + Status ret; + rtError_t rt_ret; + for (auto iter = graph_map_.begin(); iter != graph_map_.end(); ++iter) { + GraphNodePtr graph_node = iter->second; + if (graph_node->GetRunFlag()) { + GELOGW("[GraphManager] finalize failed, graphId=%u.", iter->first); + unload_model_ret = GE_GRAPH_GRAPH_IS_RUNNING; + continue; + } + + // unload model + auto ge_model = graph_node->GetGeModel(); + if (ge_model != nullptr && ge_model->GetModelId() != INVALID_MODEL_ID && graph_node->GetLoadFlag()) { + rt_ret = rtSetDevice(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGW("[GraphManager] rtSetDevice failed, modelId=%u, graphId=%u.", ge_model->GetModelId(), iter->first); + unload_model_ret = FAILED; + continue; + } + ret = GraphLoader::UnloadModel(ge_model->GetModelId()); + if (ret != SUCCESS) { + GELOGW("[GraphManager] unload model failed, modelId=%u, graphId=%u.", ge_model->GetModelId(), iter->first); + unload_model_ret = ret; + } + rt_ret = rtDeviceReset(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGW("[GraphManager] rtDeviceReset failed, modelId=%u, graphId=%u.", ge_model->GetModelId(), iter->first); + unload_model_ret = FAILED; + continue; + } + } + } + graph_map_.clear(); + + // graph context + if (graph_context_ != nullptr) { + Status ret_final = graph_context_->Finalize(); + if (ret_final != SUCCESS) { + GELOGE(ret_final, "[GraphManager] graph context Finalize failed!"); + unload_model_ret = ret_final; + } + } + + init_flag_ = false; + return unload_model_ret; +} + +Status GraphManager::AddGraph(const GraphId &graph_id, const Graph &graph) { + if (graph_map_.find(graph_id) != graph_map_.end()) { + GELOGE(GE_GRAPH_GRAPH_ALREADY_EXIST, "[GraphManager] graph exists, graph_id = %u.", graph_id); + return GE_GRAPH_GRAPH_ALREADY_EXIST; + } + + auto compute_graph = GraphUtils::GetComputeGraph(graph); + if (compute_graph != nullptr) { + compute_graph->SetGraphID(graph_id); + } else { + GELOGE(FAILED, "compute graph is null"); + return FAILED; + } + std::string session_graph_id; + if (!AttrUtils::GetStr(*compute_graph, ATTR_NAME_SESSION_GRAPH_ID, session_graph_id) || session_graph_id.empty()) { + session_graph_id = "-1_" + to_string(graph_id); + if (!AttrUtils::SetStr(*compute_graph, ATTR_NAME_SESSION_GRAPH_ID, session_graph_id)) { + GELOGW("Set attribute of compute graph failed."); + } + GELOGW("Get graph session_graph_id attr failed, set session id to default value: [0]"); + } + + GraphNodePtr graph_node = MakeShared(graph_id); + if (graph_node == nullptr) { + GELOGE(FAILED, "GraphNode make shared failed"); + return FAILED; + } + std::shared_ptr graph_ptr = MakeShared(graph); + if (graph_ptr == nullptr) { + GELOGE(FAILED, "GraphPtr make shared failed"); + return FAILED; + } + + graph_node->SetGraph(graph_ptr); + + graph_map_.insert(std::make_pair(graph_id, graph_node)); + + GELOGI("[GraphManager] add graph success, graph_id = %u.", graph_id); + + var_acc_ctrl_.AddGraph(graph_id, compute_graph); + return SUCCESS; +} + +Status GraphManager::MergeSubGraph(ComputeGraphPtr &compute_graph, const std::vector &sub_graph_list) { + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr != nullptr && instance_ptr->InitFlag()) { + Status ret = graph_partitioner_.MergeAfterSubGraphOptimization(compute_graph, sub_graph_list); + if (ret != SUCCESS) { + GELOGE(ret, "merge end and placeholder after subGraph optimization failed."); + return FAILED; + } + + Status ret_topo = compute_graph->TopologicalSorting(); + if (ret_topo != SUCCESS) { + GELOGE(ret_topo, "[GraphManager]: TopologicalSorting the merged graph failed."); + return ret_topo; + } + } else if (!sub_graph_list.empty() && (sub_graph_list[0] != nullptr)) { + compute_graph = sub_graph_list[0]->GetSubGraph(); + } + + return SUCCESS; +} + +Status GraphManager::PreRun(const GraphNodePtr &graph_node, const std::vector &inputs, + vector &ge_models, GeModelPtr &ge_model, uint64_t session_id) { + GELOGI("Ready For PreRun Start session_id = %lu.", session_id); + GE_TIMESTAMP_START(PreRun); + GE_CHECK_NOTNULL(graph_node); + // it will not execute graph preprocess, optimize, parition, build if the graph has built successful. + GE_CHECK_NOTNULL(graph_node->GetGraph()); + auto compute_graph = GraphUtils::GetComputeGraph(*graph_node->GetGraph()); + GE_IF_BOOL_EXEC(compute_graph == nullptr, GELOGE(FAILED, "compute graph is NULL."); return FAILED); + GraphUtils::DumpGEGraph(compute_graph, "BeforeSummaryHandle"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph, "BeforeSummaryHandle"); + // optimize the summary op in graph: store the summary name and replace the summary ops with net_output op. + GE_TIMESTAMP_START(HandleSummaryOp); + auto ret = graph_optimize_.HandleSummaryOp(compute_graph); + GE_TIMESTAMP_END(HandleSummaryOp, "GraphManager::HandleSummaryOp"); + GE_CHK_BOOL_EXEC(ret == SUCCESS, return ret, "[RunTrainGraph] HandleSummaryOp failed."); + GE_TIMESTAMP_START(GraphPrepare); + ret = graph_preparer_.Prepare(graph_node->GetGraph(), inputs, compute_graph, session_id); + if (ret != SUCCESS) { + GELOGE(ret, "ATC RunGraph input compute graph is NULL"); + return ret; + } + GE_TIMESTAMP_END(GraphPrepare, "GraphPrepare::Prepare"); + compute_graph->SetSessionID(session_id); + GraphUtils::DumpGEGraph(compute_graph, "OptimizeOriginalGraphAfter"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph, "OptimizeOriginalGraphAfter"); + + GE_TIMESTAMP_START(InferShape); + // Origin graph infershape + GE_CHK_STATUS_EXEC(compute_graph->InferShapeInNeed(), + GELOGE(GE_GRAPH_INFERSHAPE_FAILED, " OriginGraph infershape failed"); + return GE_GRAPH_INFERSHAPE_FAILED;) + GE_TIMESTAMP_END(InferShape, "ComputeGraph::InferShapeInNeed"); + // graph partition + std::vector sub_graph_list; + GE_TIMESTAMP_START(GraphPartition); + ret = graph_partitioner_.Partition(compute_graph, sub_graph_list, GraphPartitioner::kPartitioning); + if (ret != SUCCESS) { + GELOGE(ret, "Graph partition Failed"); + return ret; + } + GE_TIMESTAMP_END(GraphPartition, "GraphPartitioner::Partition1"); + GE_TIMESTAMP_START(SetSubgraph); + // use default 16 multi thread + const uint32_t thread_num = 16; + ThreadPool executor(thread_num); + size_t sub_graph_list_size = sub_graph_list.size(); + std::vector> vector_future(sub_graph_list_size); + for (size_t i = 0; i < sub_graph_list_size; ++i) { + vector_future[i] = executor.commit(GraphManager::ProcessSubGraphWithMultiThreads, this, sub_graph_list[i], + session_id, GetThreadLocalContext()); + } + for (size_t i = 0; i < vector_future.size(); ++i) { + Status ret_status = vector_future[i].get(); + if (ret_status != SUCCESS) { + GELOGE(ret_status, "subgraph %zu optimize failed", i); + return ret_status; + } + } + GE_TIMESTAMP_END(SetSubgraph, "SetSubGraph"); + + ComputeGraphPtr merged_compute_graph = nullptr; + + GE_TIMESTAMP_START(MergeSubgraph); + ret = MergeSubGraph(merged_compute_graph, sub_graph_list); + if (ret != SUCCESS) { + GELOGE(ret, "Merge SubGraph Failed"); + return ret; + } + merged_compute_graph->SetSessionID(session_id); + merged_compute_graph->SetGraphID(graph_node->GetGraphId()); + GE_TIMESTAMP_END(MergeSubgraph, "GraphManager::MergeSubGraph"); + + GraphUtils::DumpGEGraph(merged_compute_graph, "mergedComputeGraph"); + GraphUtils::DumpGEGraphToOnnx(*merged_compute_graph, "mergedComputeGraph"); + + std::shared_ptr instance_ge = ge::GELib::GetInstance(); + if (instance_ge != nullptr && instance_ge->InitFlag()) { + // optimize after merge subgraph + GE_TIMESTAMP_START(OptimizeAfterMergeSubgraph); + ret = OptimizeAfterMergeSubGraph(merged_compute_graph); + if (ret != SUCCESS) { + GELOGE(ret, "Optimize after merge subgraph failed."); + return ret; + } + GE_TIMESTAMP_END(OptimizeAfterMergeSubgraph, "GraphManager::OptimizeAfterMergeSubGraph"); + } + + GraphUtils::DumpGEGraph(merged_compute_graph, "OptimizeMergeSubGraphAfter"); + GraphUtils::DumpGEGraphToOnnx(*merged_compute_graph, "OptimizeMergeSubGraphAfter"); + + // build + if (merged_compute_graph != nullptr) { + std::string graph_name = merged_compute_graph->GetName(); + graph_name.append("_"); + graph_name.append(std::to_string(graph_node->GetGraphId())); + merged_compute_graph->SetName(graph_name); + } + ret = graph_builder_.Build(merged_compute_graph, sub_graph_list, ge_model, session_id); + if (ret != SUCCESS) { + GELOGE(ret, "SubGraph build Failed."); + return ret; + } + + bool is_always_dump = false; + if (!PropertiesManager::Instance().GetDumpOutputPath().empty()) { + is_always_dump = true; + } + + GraphUtils::DumpGEGraph(merged_compute_graph, "Build", is_always_dump); + GraphUtils::DumpGEGraphToOnnx(*merged_compute_graph, "Build"); + + // set modelptr to subgraph + for (const auto &sub_graph_info : sub_graph_list) { + sub_graph_info->SetGeModelPtr(ge_model); + } + + ge_models.push_back(ge_model); + + GE_IF_BOOL_EXEC(sub_graph_list.empty(), GELOGE(FAILED, "Input graph must have at least one calculation op Node"); + return FAILED;); + sub_graph_list[0]->SetSubGraph(merged_compute_graph); + // set subgraphlist to graphnode + graph_node->SetSubGraph(sub_graph_list); + GE_TIMESTAMP_END(PreRun, "GraphManager::PreRun"); + GEEVENT("[GEPERFTRACE] GE PreRun End"); + return ret; +} + +Status GraphManager::StartForRunGraph(const GraphNodePtr &graph_node, const std::vector &inputs, + vector &ge_models, uint64_t session_id) { + // it will not execute graph prreprocess, optimize, parition, build if the graph has built successful. + Status ret = SUCCESS; + if (IsGraphNeedBuild(graph_node)) { + if (graph_node->GetBuildFlag()) { + GELOGE(PARAM_INVALID, + "The graph %u need to re-build, you should remove it from GE " + "first, then AddGraph again and rebuild it.", + graph_node->GetGraphId()); + return PARAM_INVALID; + } + GeModelPtr ge_model = nullptr; + ret = PreRun(graph_node, inputs, ge_models, ge_model, session_id); + if (ret != SUCCESS) { + GELOGE(ret, "PreRun Failed."); + return ret; + } + ret = LoadGraph(ge_model, graph_node); + if (ret != SUCCESS) { + GELOGE(ret, "LoadGraph Failed."); + return ret; + } + graph_node->SetBuildFlag(true); + var_acc_ctrl_.SetGraphBuildEnd(graph_node->GetGraphId()); + } else if (!graph_node->GetLoadFlag()) { + GeModelPtr ge_model = graph_node->GetGeModel(); + ret = LoadGraph(ge_model, graph_node); + if (ret != SUCCESS) { + GELOGE(ret, "LoadGraph Failed."); + return ret; + } + } + return ret; +} +Status GraphManager::LoadGraph(const GeModelPtr &ge_model, const GraphNodePtr &graph_node) { + GELOGI("[LoadGraph] run_graph_flag[%d], graph_id[%u]", options_.run_graph_flag, graph_node->GetGraphId()); + if (options_.run_graph_flag && ge_model != nullptr) { + // synchronization run graph with model + std::shared_ptr model_listener = GetModelListener(); + ModelIdInfo model_id_info; + if (getenv(kEnvGeuseStaticMemory) != nullptr) { + GELOGI("[LoadGraph] GE_USE_STATIC_MEMORY is seted."); + } else { + GE_CHK_STATUS_RET(CheckAndReleaseMemory(ge_model, graph_node)) + } + GE_TIMESTAMP_START(LoadGraph); + Status ret = graph_loader_.LoadGraph(ge_model, model_listener, model_id_info); + GE_TIMESTAMP_END(LoadGraph, "GraphManager::LoadGraph"); + if (ret != SUCCESS) { + GELOGE(ret, "[StartForRunGraph] LoadGraph Failed"); + graph_node->SetRunFlag(false); + return ret; + } + graph_node->SetLoadFlag(true); + ge_model->SetModelId(model_id_info.model_id); + graph_node->SetGeModel(ge_model); + } + return SUCCESS; +} + +Status GraphManager::InnerRunGraph(GraphNodePtr &graph_node, const GraphId &graph_id, + const std::vector &inputs, std::vector &outputs) { + Status ret = graph_executor_.SetCondition(&sync_run_mutex_, &condition_, graph_run_listener_); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_RUNGRAPH_FAILED, "[RunGraph] set condition failed, graph_id = %u.", graph_id); + graph_node->SetRunFlag(false); + return GE_GRAPH_RUNGRAPH_FAILED; + } + + if (GetTrainFlag()) { + GE_CHK_STATUS_RET(graph_executor_.SetGraphContext(GetGraphContext())) + graph_executor_.SetTrainFlag(options_.train_graph_flag); + } + ret = graph_executor_.ExecuteGraph(graph_id, graph_node->GetGeModel(), inputs, outputs); + + graph_node->SetRunFlag(false); + if (ret != SUCCESS) { + GELOGE(ret, "[RunGraph] execute graph failed, graph_id = %u.", graph_id); + return ret; + } + return SUCCESS; +} + +Status GraphManager::RunGraph(const GraphId &graph_id, const std::vector &inputs, + std::vector &outputs, uint64_t session_id) { + std::lock_guard lock(run_mutex_); + GELOGI("[RunGraph] start to run graph, graph_id = %u, is_train_graph: %d", graph_id, GetTrainFlag()); + + if (inputs.empty()) { + GELOGI("[RunGraph] initilize sub graph has no inputs."); + } + + // find graph + GraphNodePtr graph_node = nullptr; + Status ret = GetGraphNode(graph_id, graph_node); + if (ret != SUCCESS) { + GELOGE(ret, "[RunGraph] graph not exist, graph_id = %u.", graph_id); + return ret; + } + + if (graph_node == nullptr) { + GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "[RunGraph] graph node is NULL, graph_id = %u.", graph_id); + return GE_GRAPH_GRAPH_NODE_NULL; + } + + if (graph_node->GetRunFlag()) { + GELOGE(GE_GRAPH_ALREADY_RUNNING, "[RunGraph] graph already running, graph id = %u", graph_id); + return GE_GRAPH_ALREADY_RUNNING; + } + // set graph's run flag + graph_node->SetRunFlag(true); + ComputeGraphPtr compute_graph_tmp = GraphUtils::GetComputeGraph(*(graph_node->GetGraph())); + + GE_IF_BOOL_EXEC( + GetTrainFlag(), GE_IF_BOOL_EXEC(compute_graph_tmp == nullptr, + GELOGE(GE_GRAPH_GRAPH_NODE_NULL, + "[RunGraph] compute_graph_tmp is NULL, graph id = %u.", graph_id); + return GE_GRAPH_GRAPH_NODE_NULL;) + // adapt for not set. + GE_IF_BOOL_EXEC(!compute_graph_tmp->GetNeedIteration(), + compute_graph_tmp->SetNeedIteration(GraphUtils::CheckIsTrainGraph(compute_graph_tmp));)) + + std::vector ge_models; + + if (options_.local_fmk_op_flag) { + graph_optimize_.TranFrameOp(compute_graph_tmp); + } + + ret = StartForRunGraph(graph_node, inputs, ge_models, session_id); + if (ret != SUCCESS) { + GELOGE(ret, "[RunGraph] StartForRunGraph failed!"); + graph_node->SetRunFlag(false); + return ret; + } + + const std::vector &all_sub_graph = graph_node->GetAllSubGraph(); + + // excute graph + ret = InnerRunGraph(graph_node, graph_id, inputs, outputs); + if (ret != SUCCESS) { + return ret; + } + + if (GetTrainFlag()) { + if (compute_graph_tmp->IsSummaryGraph()) { + ret = SummaryHandle(graph_id, outputs); + if (ret != SUCCESS) { + GELOGE(ret, "[RunGraph] SummaryHandle failed!"); + } + } + + if (!all_sub_graph.empty()) { + auto checkPointGraph = all_sub_graph[0]->GetSubGraph(); + if (IsCheckpointGraph(checkPointGraph)) { + ret = CheckpointHandle(graph_id, outputs); + if (ret != SUCCESS) { + GELOGE(ret, "[RunGraph] CheckpointHandle failed!"); + } + } + } + } + + GELOGI("[RunGraph] run graph success, graph_id = %u.", graph_id); + return SUCCESS; +} + +Status GraphManager::BuildGraph(const GraphId &graph_id, const std::vector &inputs, + std::vector &models) { + GELOGI("[BuildGraph] start to build graph, graph_id=%u.", graph_id); + if (inputs.empty()) { + GELOGW("[BuildGraph] BuildGraph warning: empty GeTensor inputs"); + } + + // find graph + GraphNodePtr graph_node = nullptr; + Status ret = GetGraphNode(graph_id, graph_node); + if (ret != SUCCESS) { + GELOGE(ret, "[BuildGraph] graph not exist, graph_id = %u.", graph_id); + return ret; + } + + if (graph_node == nullptr) { + GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "[BuildGraph] graph node is NULL, graphId = %u.", graph_id); + return GE_GRAPH_GRAPH_NODE_NULL; + } + + if (graph_node->GetRunFlag()) { + GELOGE(GE_GRAPH_ALREADY_RUNNING, "[BuildGraph] graph already running, graph id = %u", graph_node->GetGraphId()); + return GE_GRAPH_ALREADY_RUNNING; + } + // set graph's run flag + graph_node->SetRunFlag(true); + + struct timeval tv; + if (gettimeofday(&tv, nullptr) != 0) { + GELOGE(INTERNAL_ERROR, "get the time of day failed."); + return INTERNAL_ERROR; + } + uint64_t session_id = static_cast(tv.tv_sec * 1000000 + tv.tv_usec); // 1000000us + ret = StartForRunGraph(graph_node, inputs, models, session_id); + graph_node->SetRunFlag(false); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_PRERUN_FAILED, "[BuildGraph] StartForRunGraph failed!"); + return GE_GRAPH_PRERUN_FAILED; + } + + GELOGI("[BuildGraph] build graph success, graph_id=%u.", graph_id); + return ret; +} + +/// +/// @ingroup ge_graph +/// @brief Save extra attribute to Model +/// @param [in] model: Model attribues will save to. +/// @param [in] type: type of OpDesc. +/// @param [in] attrs: attributes of OpDesc. +/// @param [in] inputs: inputs tensor. +/// @param [in] outputs: outputs tensor. +/// @return: Status +/// +Status GraphManager::SaveParams(ge::GeModel &model, const std::string &type, const std::map &attrs, + const std::vector &inputs, const std::vector &outputs) { + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetStr(&model, "ATTR_MODEL_OP_TYPE", type), return FAILED, "Set Op[%s] type fail", + type.c_str()); + + for (const auto &it : attrs) { + GE_CHK_BOOL_EXEC(model.SetAttr("ATTR_MODEL_" + it.first, it.second) == GRAPH_SUCCESS, return FAILED, + "Set OpDesc attribute[%s] fail", it.first.c_str()); + } + + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetListTensor(&model, "ATTR_MODEL_TENSOR_INPUTS", inputs), return FAILED, + "Set Inputs tensor list fail"); + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetListTensor(&model, "ATTR_MODEL_TENSOR_OUTPUTS", outputs), return FAILED, + "Set Outputs tensor list fail"); + + return SUCCESS; +} + +Status GraphManager::RemoveGraph(const GraphId &graph_id) { + auto it = graph_map_.find(graph_id); + if (it == graph_map_.end()) { + GELOGE(GE_GRAPH_GRAPH_NOT_EXIST, "[GraphManager] Id %u does not exists.", graph_id); + return GE_GRAPH_GRAPH_NOT_EXIST; + } + + GraphNodePtr graph_node = it->second; + if ((graph_node == nullptr) || (graph_node->GetRunFlag())) { + GELOGE(GE_GRAPH_GRAPH_IS_RUNNING, "[GraphManager] Id %u is running, can't be deleted.", graph_id); + return GE_GRAPH_GRAPH_IS_RUNNING; + } + Status ret = SUCCESS; + Status middle_ret; + rtError_t rt_ret; + const std::vector &all_sub_graph = graph_node->GetAllSubGraph(); + for (size_t i = 0; i < all_sub_graph.size(); ++i) { + // must free buffer firstly + middle_ret = all_sub_graph[i]->FreeInOutBuffer(); + if (middle_ret != SUCCESS) { + GELOGE(middle_ret, "[GraphManager] RemoveGraph free mem failed, graph_id=%u.", graph_id); + ret = middle_ret; + } + if (all_sub_graph[i]->GeModelIsValid() && all_sub_graph[i]->GetModelIdInfo().model_id != INVALID_MODEL_ID) { + // unload model + GELOGI("UnloadModel via new ome."); + rt_ret = rtSetDevice(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "[GraphManager:] rtSetDevice failed, modelId=%u, graphId=%u.", + all_sub_graph[i]->GetModelIdInfo().model_id, graph_id); + ret = FAILED; + continue; + } + middle_ret = GraphLoader::UnloadModel(all_sub_graph[i]->GetModelIdInfo().model_id); + if (middle_ret != SUCCESS) { + GELOGE(middle_ret, "[GraphManager:] unload model failed, modelId=%u, graph_id=%u.", + all_sub_graph[i]->GetModelIdInfo().model_id, graph_id); + ret = middle_ret; + } + rt_ret = rtDeviceReset(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "[GraphManager:] unload model failed, modelId=%u, graphId=%u.", + all_sub_graph[i]->GetModelIdInfo().model_id, graph_id); + ret = FAILED; + } + } + } + var_acc_ctrl_.RemoveGraph(graph_id); + graph_map_.erase(it); + auto ge_model = graph_node->GetGeModel(); + if (ge_model != nullptr) { + GELOGI("Unload model %u.", ge_model->GetModelId()); + rt_ret = rtSetDevice(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "[GraphManager:] rtSetDevice failed, modelId=%u, graphId=%u.", ge_model->GetModelId(), + graph_id); + return FAILED; + } + middle_ret = GraphLoader::UnloadModel(ge_model->GetModelId()); + if (middle_ret != SUCCESS) { + GELOGE(middle_ret, "[GraphManager:] unload model failed, modelId=%u, graph_id=%u.", ge_model->GetModelId(), + graph_id); + ret = middle_ret; + } + rt_ret = rtDeviceReset(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "[GraphManager:] rtDeviceReset failed, modelId=%u, graphId=%u.", ge_model->GetModelId(), + graph_id); + ret = FAILED; + } + } + GE_CHK_STATUS_RET(ret, "[GraphManager:] Remove graph failed, graph_id=%u.", graph_id); + GELOGI("[GraphManager] remove graph success, graph_id=%u.", graph_id); + return SUCCESS; +} + +Status GraphManager::ParseOptions(const std::map &options) { + Status ret; + + ParseOption(options, "ge.INPUT_NODES_SET_FP16", options_.input_nodes_set_fp16); + // parse streams max parallel num + ret = ParseOption(options, STREAM_MAX_PARALLEL_NUM, options_.stream_max_parallel_num); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, + "parse Key:%s value failed, it must be same format as " + "DNN_V100:2,DNN_HCCL:3", + STREAM_MAX_PARALLEL_NUM.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + + // get stream num + ret = ParseOption(options, STREAM_NUM, options_.stream_num); + if ((ret != SUCCESS) || (options_.stream_num == 0)) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:ge.stream_num, its value %d is invalid, must be not equal zero.", + options_.stream_num); + return GE_GRAPH_OPTIONS_INVALID; + } + + // get perf level, its value please see enum PerfLevel + ret = ParseOption(options, PERF_LEVEL, options_.perf_level); + if ((ret != SUCCESS) || IsPerfLevelInvalid(options_.perf_level)) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:ge.perfLevel, its value %d is invalid, must be enum PerfLevel type.", + options_.perf_level); + return GE_GRAPH_OPTIONS_INVALID; + } + + // get encrypt mode + ret = ParseOption(options, ENCRYPT_MODE, options_.encrypt_mode); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:ge.encryptMode value invalid."); + return GE_GRAPH_OPTIONS_INVALID; + } + + // get ek file + ParseOption(options, EK_FILE, options_.ek_file); + + // get cert file + ParseOption(options, CERT_FILE, options_.cert_file); + + // get hw key file + ParseOption(options, HW_KEY_FILE, options_.hw_key_file); + + // get private file + ParseOption(options, PRIVATE_KEY_FILE, options_.private_key_file); + + // get framework type, its value please see enum FrameworkType + ret = ParseOption(options, FRAMEWORK_TYPE, options_.framework_type); + if (ret != SUCCESS) { + // print error log in ParseOption + return GE_GRAPH_OPTIONS_INVALID; + } + + // get calibration info file + ParseOption(options, CALIBRATION_CONF_FILE, options_.calibration_conf_file); + + // get insert op info file + ParseOption(options, INSERT_OP_FILE, options_.insert_op_file); + + // get output node name + ParseOption(options, OUTPUT_NODE_NAME, options_.output_node_name); + + // get function bin path + ParseOption(options, "ge.func_bin_path", options_.func_bin_path); + + // get core type + ParseOption(options, CORE_TYPE, options_.core_type); + + // get weight compress flag + ret = ParseOption(options, COMPRESS_FLAG, options_.compress_flag); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:ge.compressFlag value is invalid, must be 0 or 1."); + return GE_GRAPH_OPTIONS_INVALID; + } + + // ge.graphType. + options_.run_graph_flag = true; + ret = ParseOption(options, RUN_FLAG, options_.run_graph_flag); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:ge.runFlag value is invalid, must be 0 or 1."); + return GE_GRAPH_OPTIONS_INVALID; + } + + // ge.graphType + ret = ParseTrainGraphFlag(options_.run_graph_flag, options_.train_graph_flag); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:ge.runFlag value is invalid"); + return GE_GRAPH_OPTIONS_INVALID; + } + + // parse FmkOp + options_.local_fmk_op_flag = false; + ret = ParseOption(options, LOCAL_FMKOP_FLAG, options_.local_fmk_op_flag); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:ge.localFmkopFlag value is invalid, must be 0 or 1."); + return GE_GRAPH_OPTIONS_INVALID; + } + + // parse hcom parallel + options_.hcom_parallel = false; + ret = ParseOption(options, HCOM_PARALLEL, options_.hcom_parallel); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:ge.hcomParallel value is invalid, must be 0 or 1."); + return GE_GRAPH_OPTIONS_INVALID; + } + + // net output node dataType + ParseOption(options, OUTPUT_DATATYPE, options_.output_datatype); + if (!options_.output_datatype.empty()) { + domi::GetContext().output_type = options_.output_datatype; + } + + // Set save_original_model flag (ge.save_original_model) + GE_CHK_STATUS_RET(ParseOption(options, SAVE_ORIGINAL_MODEL, options_.save_original_model), + "Set save original model flag fail"); + GELOGI("Set save original model flag %s", options_.save_original_model ? "true" : "false"); + // Original model file name + ParseOption(options, ORIGINAL_MODEL_FILE, options_.original_model_file); + + return SUCCESS; +} + +Status GraphManager::ParseTrainGraphFlag(bool &options, bool &option) { + std::shared_ptr ge_instance_ptr = ge::GELib::GetInstance(); + if (ge_instance_ptr == nullptr) { + GELOGW("[Initialize] set train_graph_flag_ to 0 when GE is not initialized or finalized."); + option = false; + } else if (!ge_instance_ptr->isTrainMode()) { + option = false; + } else { // ge_instance_ptr->isTrainMode() is true + if (!options) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, + "Key:ge.runFlag, its value %d is invalid, it must be 1 when GElib::is_train_mode_ flag is 1", options); + return GE_GRAPH_OPTIONS_INVALID; + } + option = true; + } + return SUCCESS; +} + +bool GraphManager::IsPerfLevelInvalid(int32_t perf_level) { + return ((perf_level != static_cast(GEN_TASK_WITHOUT_L2FUSION)) && + (perf_level != static_cast(GEN_TASK_WITHOUT_FUSION)) && (perf_level != -1)); +} + +void GraphManager::ParseOption(const std::map &options, const std::string &key, + std::string &option) { + auto iter = options.find(key); + if (iter != options.end()) { + option = iter->second; + } +} + +Status GraphManager::ParseOption(const std::map &options, const std::string &key, + bool &option) { + auto iter = options.find(key); + if (iter != options.end()) { + string flag = iter->second; + if (flag == "0") { + option = false; + } else if (flag == "1") { + option = true; + } else { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:%s, its value %s is invalid, it must be 0 or 1.", key.c_str(), + flag.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + } + return SUCCESS; +} + +Status GraphManager::ParseOption(const std::map &options, const std::string &key, + int &option) { + const int kDecimal = 10; + char *ptr = nullptr; + auto iter = options.find(key); + if (iter != options.end()) { + option = static_cast(std::strtol(iter->second.c_str(), &ptr, kDecimal)); + if (ptr != nullptr && *ptr != '\0') { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:%s, its value %s is invalid, must be int32_t type.", key.c_str(), + iter->second.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + } + return SUCCESS; +} + +void GraphManager::Trim(std::string &str) { + if (!str.empty()) { + auto it = str.find_first_not_of(" "); + if (it != std::string::npos) { + str.erase(0, it); + } + it = str.find_last_not_of(" "); + if (it != std::string::npos) { + str.erase(it + 1); + } + } +} + +Status GraphManager::ParseOption(const std::map &options, const std::string &key, + std::map &option) { + auto iter = options.find(key); + if (iter == options.end()) { + return SUCCESS; + } + GELOGI("Start to parse %s", key.c_str()); + option.clear(); + std::string op_num = iter->second; + + // split string by ',' + std::vector split; + std::istringstream f(op_num); + std::string str_tmp; + while (getline(f, str_tmp, ',')) { + split.push_back(str_tmp); + } + + for (const std::string &engine_parallel : split) { + // split engine and num by : + size_t pos = engine_parallel.find(':'); + if (pos == string::npos) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, + "engine and num must be connected by :, " + "while your input is %s", + engine_parallel.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + std::string engine_name = engine_parallel.substr(0, pos); + std::string parallel_num = engine_parallel.substr(pos + 1); + Trim(engine_name); + Trim(parallel_num); + + Status ret = CheckEngineName(engine_name, key, option); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "check engine name : %s failed, ", engine_name.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + + int num = 0; + ret = ParseParallelNum(parallel_num, key, num); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "parse parallel num failed"); + return GE_GRAPH_OPTIONS_INVALID; + } + + option.insert(std::make_pair(engine_name, num)); + } + GELOGI("Parse %s successfully", key.c_str()); + return SUCCESS; +} + +Status GraphManager::CheckEngineName(const std::string &engine_name, const std::string &key, + const std::map &option) { + if (engine_name.empty()) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "engine name of %s is empty", key.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + // judge whether exist in engine list + if (!GELib::GetInstance()->DNNEngineManagerObj().IsEngineRegistered(engine_name)) { + GELOGW("engine : %s is not registered in %s", engine_name.c_str(), key.c_str()); + } + + auto it_stream_repeat = option.find(engine_name); + if (it_stream_repeat != option.end()) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "engine : %s of %s is repeated", engine_name.c_str(), key.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + return SUCCESS; +} + +Status GraphManager::ParseParallelNum(const std::string ¶llel_num, const std::string &key, int &num) { + if (parallel_num.empty()) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "parallel num of %s is empty", key.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + for (char c : parallel_num) { + if (!isdigit(c)) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "%s input is invalid ", key.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + } + + try { + num = std::stoi(parallel_num); + } catch (std::invalid_argument &) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "parallel num : %s of %s is invalid argument", parallel_num.c_str(), key.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } catch (std::out_of_range &) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "parallel num : %s of %s is out of range", parallel_num.c_str(), key.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } catch (...) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "parallel num : %s of %s is invalid argument", parallel_num.c_str(), key.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + + if (num < 1) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "parallel num : %s of %s must bigger than 0", parallel_num.c_str(), key.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + return SUCCESS; +} + +Status GraphManager::GetGraphNode(const GraphId &graph_id, GraphNodePtr &out) { + auto iter = graph_map_.find(graph_id); + if (iter == graph_map_.end()) { + out = nullptr; + GELOGE(GE_GRAPH_GRAPH_NOT_EXIST, "[GraphManager] graph not exist, graph_id= %u.", graph_id); + return GE_GRAPH_GRAPH_NOT_EXIST; + } + out = iter->second; + return SUCCESS; +} + +Status GraphManager::GetVariable(const std::string &name, Tensor &val) { + GeTensorPtr ge_tensor_ptr = TensorAdapter::AsGeTensorPtr(val); + GE_CHECK_NOTNULL(ge_tensor_ptr); + return GetGraphContext()->GetVariableTensor(name, *(ge_tensor_ptr.get())); +} + +Status GraphManager::SummaryHandle(const GraphId &graph_id, std::vector &outputs) { + std::vector without_summary_outputs; + std::set summary_output_index; + GELOGI("[GraphManager] SummaryHandle, outputsSize=%zu.", outputs.size()); + const std::map> &whole_summary_output_indexes = + graph_optimize_.GetSummaryOutputIndexes(); + if (whole_summary_output_indexes.find(graph_id) == whole_summary_output_indexes.end()) { + GELOGE(FAILED, "No Summary graph found in map."); + return FAILED; + } + const std::map &summary_output_indexes = whole_summary_output_indexes.at(graph_id); + GELOGI("[GraphManager] SummaryHandle, summaryOutputIndexesSize=%zu.", summary_output_indexes.size()); + std::map summary_results; + for (auto iter = summary_output_indexes.begin(); iter != summary_output_indexes.end(); ++iter) { + GELOGI("[GraphManager] SummaryHandle, summaryName=%s, outputIndex=%zu.", iter->first.c_str(), iter->second); + summary_results.emplace(iter->first, TensorAdapter::AsTensor(outputs.at(iter->second))); + summary_output_index.emplace(iter->second); + } + + // remove summary data from outputs + if (!summary_output_index.empty()) { + for (size_t j = 0; j < outputs.size(); ++j) { + if (summary_output_index.count(j) == 0) { + without_summary_outputs.emplace_back(outputs.at(j)); + } + } + outputs.swap(without_summary_outputs); + GELOGI("[GraphManager] SummaryHandle, after swap outputsSize=%zu.", outputs.size()); + } + + if (!summary_results.empty()) { + return PushSummaryData2ME(graph_id, summary_results); + } + + return SUCCESS; +} + +Status GraphManager::CheckpointHandle(const GraphId &graph_id, const std::vector &outputs) { + GELOGI("[GraphManager] CheckpointHandle, outputsSize=%zu.", outputs.size()); + std::vector outputs_desc = graph_executor_.GetOutputsDesc(); + GELOGI("[GraphManager] CheckpointHandle, outputsDescSize=%zu.", outputs_desc.size()); + std::map save_results; + for (size_t i = 0; i < outputs_desc.size(); ++i) { + std::string desc_name = outputs_desc.at(i).name; + auto index = desc_name.find_last_of("_"); + if (index != std::string::npos) { + desc_name = desc_name.substr(0, index); + index = desc_name.find_first_of("_"); + if (index != std::string::npos) { + desc_name = desc_name.substr(index + 1); + index = desc_name.find_first_of("_"); + if (index != std::string::npos) { + desc_name = desc_name.substr(index + 1); + } + } + } + index = desc_name.find("_trans"); + if (index != std::string::npos) { + desc_name = desc_name.substr(0, index); + } + + GELOGI("[GraphManager] CheckpointHandle, descName=%s.", desc_name.c_str()); + save_results.emplace(desc_name, TensorAdapter::AsTensor(outputs.at(i))); + } + + if (!save_results.empty()) { + return PushSaveData2ME(graph_id, save_results); + } + + return SUCCESS; +} + +Status GraphManager::RegisterCallBackFunc( + const std::string &key, + const std::function &)> &callback) { + GELOGI("[GraphManager] RegisterCallBackFunc, key=%s.", key.c_str()); + me_callback_map_[key] = callback; + return SUCCESS; +} + +Status GraphManager::PushSummaryData2ME(const GraphId &graph_id, + const std::map &summary_data) { + GELOGI("[GraphManager] PushSummaryData2ME, dataSize=%zu.", summary_data.size()); + auto itr = me_callback_map_.find(kSummary); + if (itr == me_callback_map_.end()) { + GELOGE(FAILED, "[GraphManager] PushSummaryData2ME failed, not found summary callback."); + return FAILED; + } + return itr->second(graph_id, summary_data); +} + +Status GraphManager::PushSaveData2ME(const GraphId &graph_id, const std::map &save_data) { + GELOGI("[GraphManager] PushSaveData2ME, dataSize=%zu.", save_data.size()); + auto itr = me_callback_map_.find(kSave); + if (itr == me_callback_map_.end()) { + GELOGE(FAILED, "[GraphManager] PushSaveData2ME failed, not found checkpoint callback."); + return FAILED; + } + return itr->second(graph_id, save_data); +} + +bool GraphManager::CheckNetOutputForCheckpointGraph(NodePtr &node) { + size_t in_data_anchor_size = node->GetAllInDataAnchors().size(); + for (size_t i = 0; i < in_data_anchor_size; ++i) { + auto in = node->GetInDataAnchor(i); + if (in == nullptr) { + return false; + } + auto peerin = in->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peerin == nullptr, return false); + if (peerin->GetOwnerNode()->GetType() != kVariable && (!TransOpUtil::IsTransOp(peerin->GetOwnerNode()))) { + return false; + } + } + return true; +} + +bool GraphManager::CheckVariableForCheckpointGraph(NodePtr &node) { + auto out = node->GetOutDataAnchor(0); + if (out == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "out is nullptr."); + return false; + } + auto peer_out = out->GetPeerInDataAnchors(); + for (size_t i = 0; i < peer_out.size(); ++i) { + if (peer_out.at(i)->GetOwnerNode()->GetType() != kNetOutput && + (!TransOpUtil::IsTransOp(peer_out.at(i)->GetOwnerNode()))) { + return false; + } + } + return true; +} + +bool GraphManager::CheckTransOpForCheckpointGraph(NodePtr &node) { + for (const auto &out_node : node->GetOutAllNodes()) { + if ((!TransOpUtil::IsTransOp(out_node)) && (out_node->GetType() != kNetOutput) && (out_node->GetType() != kSend)) { + return false; + } + } + + for (const auto &in_node : node->GetInAllNodes()) { + if ((!TransOpUtil::IsTransOp(in_node)) && (in_node->GetType() != kVariable) && (in_node->GetType() != kRecv)) { + return false; + } + } + return true; +} + +bool GraphManager::IsCheckpointGraph(ComputeGraphPtr &compute_graph) { + if (compute_graph == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[IsCheckpointGraph] computeGraph is nullptr."); + return false; + } + for (auto &node : compute_graph->GetAllNodes()) { + OpDescPtr op = node->GetOpDesc(); + GE_RT_FALSE_CHECK_NOTNULL(op); + if (op->GetType() == kNetOutput) { + if (!CheckNetOutputForCheckpointGraph(node)) { + return false; + } + } else if (op->GetType() == kVariable) { + if (!CheckVariableForCheckpointGraph(node)) { + return false; + } + } else if ((TransOpUtil::IsTransOp(node))) { + if (!CheckTransOpForCheckpointGraph(node)) { + return false; + } + } else if (op->GetType() != kSend && op->GetType() != kRecv) { + GELOGI("this node is not allow in checkpoint sub graph, node_type: %s, node_name: %s.", op->GetType().c_str(), + op->GetName().c_str()); + return false; + } + } + GELOGI("current graph %s is checkpoint sub graph.", compute_graph->GetName().c_str()); + return true; +} + +bool GraphManager::IsBroadCastOpData(const ge::NodePtr &var_node) { + for (auto &out_anchor : var_node->GetAllOutDataAnchors()) { + GE_RT_FALSE_CHECK_NOTNULL(out_anchor); + for (auto &in_anchor : out_anchor->GetPeerInDataAnchors()) { + GE_RT_FALSE_CHECK_NOTNULL(in_anchor); + ge::NodePtr dst_node = in_anchor->GetOwnerNode(); + GE_RT_FALSE_CHECK_NOTNULL(dst_node); + if (dst_node->GetType() == HCOMBROADCAST) { + return true; + } + } + } + return false; +} + +void GraphManager::AdjustBroadCastOpData(const ge::NodePtr &var_node) { + if (!ge::AttrUtils::SetStr(var_node->GetOpDesc(), VAR_ATTR_VAR_IS_BROADCAST, "var_is_restore")) { + GELOGW("set var_is_restore failed"); + } +} + +bool GraphManager::IsAssignOpData(const ge::NodePtr &var_node) { + GELOGD("IsAssignOpData var_node %s", var_node->GetName().c_str()); + std::map> assign_ops = {{ASSIGN, {0}}}; + + ge::NodePtr assign_node = nullptr; + if (ConfirmUseOpAndIndexByNode(var_node, assign_ops, assign_node)) { + return true; + } + + return false; +} + +void GraphManager::AdjustAssignOpData(const ge::NodePtr &var_node) { + if (!ge::AttrUtils::SetStr(var_node->GetOpDesc(), VAR_ATTR_VAR_IS_RESTORE, "var_is_restore")) { + GELOGW("SetStr var_is_restore failed"); + } +} + +bool GraphManager::ConfirmUseOpAndIndexByAnchor(const ge::InDataAnchorPtr &in_anchor, + const map> &confirm_ops, ge::NodePtr &use_node) { + GE_RT_FALSE_CHECK_NOTNULL(in_anchor); + ge::NodePtr dst_node = in_anchor->GetOwnerNode(); + GE_RT_FALSE_CHECK_NOTNULL(dst_node); + ge::OpDescPtr dst_op_desc = dst_node->GetOpDesc(); + GE_RT_FALSE_CHECK_NOTNULL(dst_op_desc); + const string &dst_type = dst_op_desc->GetType(); + int input_index = in_anchor->GetIdx(); + + GELOGD("ConfirmUseOpAndIndex, var name %s, dst_type = %s, input index %d", dst_node->GetName().c_str(), + dst_type.c_str(), input_index); + + if (confirm_ops.count(dst_type) > 0) { + if (confirm_ops.at(dst_type).count(input_index) > 0) { + use_node = dst_node; + return true; + } + } + return false; +} + +bool GraphManager::ConfirmUseOpAndIndexByNode(const ge::NodePtr &var_node, + const map> &confirm_ops, ge::NodePtr &use_node) { + GE_RT_FALSE_CHECK_NOTNULL(var_node); + for (auto &out_anchor : var_node->GetAllOutDataAnchors()) { + GE_RT_FALSE_CHECK_NOTNULL(out_anchor); + for (auto &in_anchor : out_anchor->GetPeerInDataAnchors()) { + GE_RT_FALSE_CHECK_NOTNULL(in_anchor); + if (ConfirmUseOpAndIndexByAnchor(in_anchor, confirm_ops, use_node)) { + return true; + } + } + } + return false; +} + +void GraphManager::ResetConstType(ge::ComputeGraphPtr &compute_graph) { + if (options_.train_graph_flag) { + for (ge::NodePtr &n : compute_graph->GetDirectNode()) { + if (n->GetOpDesc() == nullptr) { + continue; + } + if (n->GetOpDesc()->GetType() == CONSTANT) { + n->GetOpDesc()->SetType(CONSTANTOP); + } + } + } + + if (!options_.train_graph_flag) { + for (ge::NodePtr &n : compute_graph->GetDirectNode()) { + if (n->GetOpDesc() == nullptr) { + continue; + } + if (n->GetOpDesc()->GetType() == CONSTANTOP) { + n->GetOpDesc()->SetType(CONSTANT); + } + } + } +} + +Status GraphManager::OptimizeAfterMergeSubGraph(ge::ComputeGraphPtr &compute_graph) { + GELOGI("Start optimize after merge sub graph."); + + GEPass ge_passes_for_shape(compute_graph); + NamesToPass names_to_passes_for_shape; + IdentifyReferencePass identify_reference_pass; + names_to_passes_for_shape.emplace_back("IdentifyReferencePass", &identify_reference_pass); + NoReshapeOpRemovePass no_reshape_op_remove_pass; + names_to_passes_for_shape.emplace_back("NoReshapeOpRemovePass", &no_reshape_op_remove_pass); + TransposeTransDataPass transpose_transdata_pass; + names_to_passes_for_shape.emplace_back("TransposeTransDataPass", &transpose_transdata_pass); + GE_TIMESTAMP_START(ge_passes_for_shape); + Status ret = ge_passes_for_shape.Run(names_to_passes_for_shape); + GE_TIMESTAMP_END(ge_passes_for_shape, "GraphManager::GePassesForShape"); + if (ret != SUCCESS) { + GELOGE(ret, "Run ge_passes_for_shape optimize for OptimizeAfterMergeSubGraph failed, ret:%d.", ret); + return ret; + } + + string options = "default"; + if (GetContext().GetOption("ge.exec.variable_acc", options) != SUCCESS) { + GELOGI("get ge.exec.variable_acc failed. set default value."); + } + PassManager after_merge_passes; + GE_CHK_STATUS_RET(after_merge_passes.AddPass(new (std::nothrow) PermutePass)) + GE_IF_BOOL_EXEC(options == "default" || options == "1", GELOGI("turn on variable accelerator"); + GE_CHK_STATUS_RET(after_merge_passes.AddPass(new (std::nothrow) VariableOpPass(&var_acc_ctrl_)))) + GE_CHK_STATUS_RET(after_merge_passes.AddPass(new (std::nothrow) TransOpDepthFusionPass)) + GE_CHK_STATUS_RET(after_merge_passes.AddPass(new (std::nothrow) TransOpBreadthFusionPass)) + GE_CHK_STATUS_RET(after_merge_passes.AddPass(new (std::nothrow) VariableRefDeleteOpPass)) + GE_CHK_STATUS_RET(after_merge_passes.AddPass(new (std::nothrow) SameTransdataBreadthFusionPass)) + GE_CHK_STATUS_RET(after_merge_passes.AddPass(new (std::nothrow) TransOpWithoutReshapeFusionPass)) + GE_CHK_STATUS_RET(after_merge_passes.AddPass(new (std::nothrow) CompileNodesPass)) + GE_CHK_STATUS_RET(after_merge_passes.AddPass(new (std::nothrow) AtomicAddrCleanPass)) + GE_CHK_STATUS_RET( + after_merge_passes.AddPass(new (std::nothrow) LinkGenMaskNodesPass(options_.stream_max_parallel_num))) + + GE_TIMESTAMP_START(after_merge_passes); + ret = after_merge_passes.Run(compute_graph); + GE_TIMESTAMP_END(after_merge_passes, "GraphManager::AfterMergePasses"); + if (ret != SUCCESS && ret != NOT_CHANGED) { + GELOGE(ret, "Run passes after merge sub graph failed, ret:%d.", ret); + return ret; + } + + // add variable attr for hccl broadcast,need to be removed after variable pass online + for (const ge::NodePtr &node : compute_graph->GetDirectNode()) { + if (node->GetOpDesc()->GetType() != VARIABLE) { + continue; + } + + if (IsBroadCastOpData(node)) { + AdjustBroadCastOpData(node); + } + if (IsAssignOpData(node)) { + AdjustAssignOpData(node); + } + } + + GEPass ge_passes(compute_graph); + NamesToPass names_to_passes; + TransOpNearbyAllreduceFusionPass trans_op_nearby_allreduce_fusion_pass; + names_to_passes.emplace_back("ReshapeRemovePass", &trans_op_nearby_allreduce_fusion_pass); + ReshapeRemovePass reshape_remove_pass; + names_to_passes.emplace_back("ReshapeRemovePass", &reshape_remove_pass); + ConstantFoldingPass constant_folding_pass; + names_to_passes.emplace_back("ConstantFoldingPass", &constant_folding_pass); + DimensionAdjustPass dimension_adjust_pass; + names_to_passes.emplace_back("DimensionAdjustPass", &dimension_adjust_pass); + GE_TIMESTAMP_START(names_to_passes); + ret = ge_passes.Run(names_to_passes); + GE_TIMESTAMP_END(names_to_passes, "GraphManager::MergedGraphNameToPasses"); + if (ret != SUCCESS) { + GELOGE(ret, "Run ge_passes optimize for OptimizeAfterMergeSubGraph failed, ret:%d.", ret); + return ret; + } + + ResetConstType(compute_graph); + + PassManager pass_for_control_attr_optimize; + GE_CHK_STATUS_RET(pass_for_control_attr_optimize.AddPass(new (std::nothrow) MultiBatchPass)) + GE_CHK_STATUS_RET(pass_for_control_attr_optimize.AddPass(new (std::nothrow) ControlOpAttrPass)) + GE_CHK_STATUS_RET(pass_for_control_attr_optimize.AddPass(new (std::nothrow) CompileNodesPass)) + + GE_TIMESTAMP_START(pass_for_control_attr_optimize); + ret = pass_for_control_attr_optimize.Run(compute_graph); + GE_TIMESTAMP_END(pass_for_control_attr_optimize, "GraphManager::ControlAttrOptimize"); + if (ret != SUCCESS && ret != NOT_CHANGED) { + GELOGE(ret, "Run ControlOpAttrPass failed"); + return ret; + } + + ret = compute_graph->TopologicalSorting(); + if (ret != SUCCESS) { + GELOGE(ret, "Graph topological sort failed, ret:%d.", ret); + return ret; + } + + GELOGI("End optimize after merge sub graph."); + return SUCCESS; +} + +Status GraphManager::LoadGraphAsync(const GeModelPtr &ge_model, const GraphNodePtr &graph_node) { + GELOGI("[LoadGraphAsync] run_graph_flag[%d], graph_id[%u]", options_.run_graph_flag, graph_node->GetGraphId()); + if (options_.run_graph_flag && ge_model != nullptr) { + // synchronization run graph with model + ModelIdInfo model_id_info; + if (getenv(kEnvGeuseStaticMemory) != nullptr) { + GELOGI("[LoadGraphAsync] GE_USE_STATIC_MEMORY is seted."); + } else { + GE_CHK_STATUS_RET(CheckAndReleaseMemory(ge_model, graph_node)) + } + GE_TIMESTAMP_START(LoadGraph); + GE_CHECK_NOTNULL(graph_node->graph_run_async_listener_); + Status ret = graph_loader_.LoadGraphAsync(ge_model, graph_node->graph_run_async_listener_, model_id_info); + GE_TIMESTAMP_END(LoadGraph, "GraphManager::LoadGraphAsync"); + if (ret != SUCCESS) { + GELOGE(ret, "[LoadGraphAsync] LoadGraphAsync Failed"); + graph_node->SetRunFlag(false); + return ret; + } + ge_model->SetModelId(model_id_info.model_id); + graph_node->SetGeModel(ge_model); + } + return SUCCESS; +} + +Status GraphManager::CheckAndReleaseMemory(const GeModelPtr &ge_model, const GraphNodePtr &graph_node) { + GELOGI("CheckAndReleaseMemory graph_id[%u]", graph_node->GetGraphId()); + int64_t value = 0; + bool ret = ge::AttrUtils::GetInt(ge_model, ATTR_MODEL_MEMORY_SIZE, value); + int64_t memory_size = ret ? value : 0; + ret = ge::AttrUtils::GetInt(ge_model, ATTR_MODEL_WEIGHT_SIZE, value); + int64_t weight_size = ret ? value : 0; + + int64_t free_memory = 0; + Status result = GraphLoader::GetMemoryInfo(free_memory); + if (result != SUCCESS) { + return result; + } + + GELOGI( + "CheckAndReleaseMemory Graph[%u] need memory_size[%ld], weight_size[%ld]," + " Device[%u] free_memory_size[%ld]", + graph_node->GetGraphId(), memory_size, weight_size, GetContext().DeviceId(), free_memory); + if (CheckInt64AddOverflow(memory_size, weight_size) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "The sum of Memory size and weight size exceeds INT64_MAX"); + return INTERNAL_ERROR; + } + if (free_memory >= (memory_size + weight_size)) { + return SUCCESS; + } + rtError_t rt_ret; + for (auto &it : graph_map_) { + auto graph_id = it.second->GetGraphId(); + auto model = it.second->GetGeModel(); + if (model == nullptr) { + continue; + } + auto model_id = model->GetModelId(); + // not loaded,no need unload + if (!it.second->GetLoadFlag()) { + GELOGI("CheckAndReleaseMemory graph[%u] has not been loaded.", graph_id); + continue; + } + uint64_t max_memory_size = 0; + result = GraphLoader::GetMaxUsedMemory(model_id, max_memory_size); + if (result != SUCCESS) { + continue; + } + GELOGI("CheckAndReleaseMemory try to UnloadGraph[%u], model[%u] which MaxUsedMemory[%lu].", graph_id, model_id, + max_memory_size); + rt_ret = rtSetDevice(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "[GraphManager:] rtSetDevice failed, modelId=%u, graphId=%u.", model_id, graph_id); + continue; + } + result = GraphLoader::UnloadModel(model_id); + if (result != SUCCESS) { + GELOGW("[GraphManager:] unload model failed, modelId=%u, graphId=%u.", model_id, graph_id); + } + rt_ret = rtDeviceReset(GetContext().DeviceId()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "[GraphManager:] rtDeviceReset failed, modelId=%u, graphId=%u.", model_id, graph_id); + continue; + } + it.second->SetLoadFlag(false); + GELOGI("CheckAndReleaseMemory UnloadGraph[%u], model[%u] success and set LoadFlag to false.", graph_id, model_id); + } + return SUCCESS; +} + +Status GraphManager::ProcessSubGraphWithMultiThreads(GraphManager *graph_manager, SubGraphInfoPtr &sub_graph_info_ptr, + uint64_t session_id, const GEThreadLocalContext &ge_context) { + Status ret = SUCCESS; + GetThreadLocalContext() = ge_context; + if (sub_graph_info_ptr != nullptr && graph_manager != nullptr) { + ComputeGraphPtr compute_graph_tmp = sub_graph_info_ptr->GetSubGraph(); + const std::string &engine_name = sub_graph_info_ptr->GetEngineName(); + GELOGI("ProcessSubGraphWithMultiThreads start, graph name is %s, engine_name is %s, thread id is %lu", + compute_graph_tmp != nullptr ? compute_graph_tmp->GetName().c_str() : "", engine_name.c_str(), + pthread_self()); + GraphUtils::DumpGEGraph(compute_graph_tmp, "OptimizeSubGraphBefore"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph_tmp, "OptimizeSubGraphBefore"); + GE_CHECK_NOTNULL(compute_graph_tmp); + compute_graph_tmp->SetSessionID(session_id); + ret = graph_manager->graph_optimize_.OptimizeSubGraph(compute_graph_tmp, engine_name); + if (ret != SUCCESS) { + GELOGE(ret, "SubGraph optimize Failed %s", engine_name.c_str()); + return ret; + } else { + GELOGI("SubGraph optimize success %s", engine_name.c_str()); + } + GraphUtils::DumpGEGraph(compute_graph_tmp, "OptimizeSubGraphAfter"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph_tmp, "OptimizeSubGraphAfter"); + sub_graph_info_ptr->SetSubGraph(compute_graph_tmp); + GELOGI("ProcessSubGraphWithMultiThreads end, graph name is %s, engine_name is %s, thread id is %lu", + compute_graph_tmp != nullptr ? compute_graph_tmp->GetName().c_str() : "", engine_name.c_str(), + pthread_self()); + } else { + GELOGE(ret, "graph_manager or sub_graph_info_ptr is nullptr"); + return FAILED; + } + return SUCCESS; +} + +// run graph async on session +Status GraphManager::RunGraphAsync(const GraphId &graph_id, const std::vector &inputs, + std::vector &outputs, uint64_t session_id, + std::function callback) { + GELOGI("[GraphManager] Start to run graph async, graph_id=%u, inputsSize=%zu, outputsSize=%zu.", graph_id, + inputs.size(), outputs.size()); + + bool ret = + prerun_args_q_.Push(PreRunArgs({graph_id, inputs, outputs, session_id, GetThreadLocalContext(), callback})); + if (!ret) { + GELOGE(FAILED, "[GraphManager] Run graph async failed, graph_id=%u.", graph_id); + return FAILED; + } + + GELOGI("[GraphManager] Run graph async success, graph_id=%u.", graph_id); + return SUCCESS; +} + +void GraphManager::PreRunThread(GraphManager *graph_manager) { + if (prctl(PR_SET_NAME, ("GE_PreRun")) != 0) { + GELOGW("Set thread name failed."); + } + PreRunArgs args; + while (graph_manager->thread_run_flag_) { + bool pop_status = graph_manager->prerun_args_q_.Pop(args); + if (!pop_status) { + continue; + } + GetThreadLocalContext() = args.context; + GELOGI("A new loop start."); + std::vector ge_inputs; + for (auto const &input : args.input_tensor) { + std::vector input_dims; + std::transform(input.shapeInfo.dims.begin(), input.shapeInfo.dims.end(), std::back_inserter(input_dims), + [](uint32_t x) -> int64_t { return static_cast(x); }); + GeShape input_shape(input_dims); + GeTensorDesc input_tensor_desc; + input_tensor_desc.SetShape(input_shape); + input_tensor_desc.SetDataType(static_cast(input.dataType)); + ge_inputs.emplace_back(input_tensor_desc); + } + // find graph + GraphNodePtr graph_node = nullptr; + Status ret = graph_manager->GetGraphNode(args.graph_id, graph_node); + if (ret != SUCCESS) { + ReturnError(graph_manager, args.callback, GE_GRAPH_ALREADY_RUNNING, + "[RunGraph] graph not exist, graph_id=" + std::to_string(args.graph_id)); + return; + } + + graph_node->Lock(); + + if (graph_node->GetRunFlag()) { + ReturnError(graph_manager, args.callback, GE_GRAPH_GRAPH_NODE_NULL, + "[RunGraph] graph already running, graph id=" + std::to_string(args.graph_id)); + graph_node->Unlock(); + return; + } + // set graph's run flag + graph_node->SetRunFlag(true); + + ComputeGraphPtr compute_graph_tmp = GraphUtils::GetComputeGraph(*(graph_node->GetGraph())); + + if (graph_manager->GetTrainFlag()) { + if (compute_graph_tmp == nullptr) { + ReturnError(graph_manager, args.callback, GE_GRAPH_GRAPH_NODE_NULL, + "[RunGraph] compute_graph_tmp is NULL, graph id = %u."); + graph_node->Unlock(); + return; + } + + if (!compute_graph_tmp->GetNeedIteration()) { + compute_graph_tmp->SetNeedIteration(GraphUtils::CheckIsTrainGraph(compute_graph_tmp)); + } + } + + std::vector ge_models; + + if (graph_manager->options_.local_fmk_op_flag) { + graph_manager->graph_optimize_.TranFrameOp(compute_graph_tmp); + } + + // it will not execute graph preprocess, optimize, parition, build if the graph has built successful. + + GELOGI("Start for run graph async."); + + GeModelPtr ge_model = nullptr; + if (graph_manager->IsGraphNeedBuild(graph_node)) { + if (graph_node->GetBuildFlag()) { + ReturnError(graph_manager, args.callback, PARAM_INVALID, + "The graph " + std::to_string(graph_node->GetGraphId()) + + " need to re-build, you should remove it" + " from GE first, then AddGraph again and rebuild it."); + return; + } + + ret = graph_manager->PreRun(graph_node, ge_inputs, ge_models, ge_model, args.session_id); + if (ret != SUCCESS) { + graph_node->SetRunFlag(false); + ReturnError(graph_manager, args.callback, ret, "PreRun Failed."); + graph_node->Unlock(); + return; + } + graph_node->SetBuildFlag(true); + graph_manager->var_acc_ctrl_.SetGraphBuildEnd(graph_node->GetGraphId()); + } else { + ge_model = graph_node->GetGeModel(); + } + + graph_manager->run_args_q_.Push(RunArgs({graph_node, args.graph_id, args.input_tensor, args.output_tensor, ge_model, + GetThreadLocalContext(), args.callback})); + GELOGI("Loop end."); + } +} + +void GraphManager::RunThread(GraphManager *graph_manager) { + if (prctl(PR_SET_NAME, ("GE_Run")) != 0) { + GELOGW("Set thread name failed."); + } + RunArgs args; + while (graph_manager->thread_run_flag_) { + bool pop_status = graph_manager->run_args_q_.Pop(args); + if (!pop_status) { + continue; + } + GELOGI("A new loop start."); + GetThreadLocalContext() = args.context; + if (args.graph_node->graph_run_async_listener_ != nullptr) { + args.graph_node->graph_run_async_listener_->SetCallback(args.callback); + } + + Status ret; + if (!args.graph_node->GetLoadFlag()) { + ret = graph_manager->LoadGraphAsync(args.ge_model, args.graph_node); + if (ret != SUCCESS) { + StopQueue(graph_manager); + ReturnError(graph_manager, args.callback, ret, "LoadGraphAsync failed, thread exit."); + args.graph_node->Unlock(); + return; + } + args.graph_node->SetLoadFlag(true); + GELOGI("LoadGraph[%u], model[%u] success and set LoadFlag to true.", args.graph_node->GetGraphId(), + args.ge_model->GetModelId()); + } + + if (graph_manager->GetTrainFlag()) { + ret = graph_manager->graph_executor_.SetGraphContext(graph_manager->GetGraphContext()); + if (ret != SUCCESS) { + GELOGW("[GraphManager] SetGraphContext failed, graph_id=%u.", args.graph_id); + } + graph_manager->graph_executor_.SetTrainFlag(graph_manager->options_.train_graph_flag); + } + + ret = graph_manager->graph_executor_.ExecuteGraphAsync(args.graph_id, args.graph_node->GetGeModel(), + args.input_tensor, args.output_tensor); + args.graph_node->SetRunFlag(false); + args.graph_node->Unlock(); + if (ret != SUCCESS) { + GELOGE(ret, "[GraphManager] Run graph async failed, graph_id=%u.", args.graph_id); + StopQueue(graph_manager); + return; + } + GELOGI("[GraphManager] Run graph async success, graph_id=%u.", args.graph_id); + } +} + +void GraphManager::StopQueue(GraphManager *graph_manager) { + if (graph_manager == nullptr) { + return; + } + + graph_manager->thread_run_flag_.store(false); + graph_manager->prerun_args_q_.Stop(); + graph_manager->run_args_q_.Stop(); +} + +void GraphManager::ReturnError(GraphManager *graph_manager, std::function callback, Status ret, + const string &log) { + if (graph_manager == nullptr) { + return; + } + + GELOGE(ret, "%s.", log.c_str()); + StopQueue(graph_manager); + callback(ret); +} + +bool GraphManager::IsGraphNeedRebuild(uint32_t graph_id) { + // find graph + GraphNodePtr graph_node = nullptr; + Status ret = GetGraphNode(graph_id, graph_node); + if (ret != SUCCESS) { + GELOGE(ret, "[RunGraph] graph not exist, graph_id=%u.", graph_id); + return true; + } + + if (graph_node == nullptr) { + GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "[RunGraph] graph node is NULL, graphId=%u.", graph_id); + return true; + } + + return IsGraphNeedBuild(graph_node); +} + +bool GraphManager::IsGraphNeedBuild(const GraphNodePtr &graph_node) { + return !graph_node->GetBuildFlag() || var_acc_ctrl_.IsGraphNeedRebuild(graph_node->GetGraphId()); +} +} // namespace ge diff --git a/src/ge/graph/manager/graph_manager.h b/src/ge/graph/manager/graph_manager.h new file mode 100644 index 00000000..cd07a679 --- /dev/null +++ b/src/ge/graph/manager/graph_manager.h @@ -0,0 +1,301 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_GRAPH_MANAGER_H_ +#define GE_GRAPH_MANAGER_GRAPH_MANAGER_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "common/blocking_queue.h" +#include "common/ge_inner_error_codes.h" +#include "external/graph/types.h" +#include "ge/ge_api_types.h" +#include "graph/build/graph_build.h" +#include "graph/execute/graph_execute.h" +#include "graph/ge_local_context.h" +#include "graph/load/graph_loader.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/manager/util/variable_accelerate_ctrl.h" +#include "graph/optimize/graph_optimize.h" +#include "graph/partition/graph_partition.h" +#include "graph/preprocess/graph_preprocess.h" +#include "model/ge_model.h" + +namespace ge { +class GraphManager { + public: + GraphManager(); + + ~GraphManager() = default; + + /// + /// @ingroup ge_graph + /// @brief graph manager init + /// @param [in] options user config params + /// @return Status result of function + /// + Status Initialize(const std::map &options); + + /// + /// @ingroup ge_graph + /// @brief graph manager finalize + /// @return Status result of function + /// + Status Finalize(); + + /// + /// @ingroup ge_graph + /// @brief add specific graph + /// @param [in] graph_id graph id + /// @param [out] Graph output graph + /// @return Status result of function + /// + Status AddGraph(const GraphId &graph_id, const Graph &graph); + + /// + /// @ingroup ge_graph + /// @brief remove specific graph + /// @param [in] graph_id graph id + /// @return Status result of function + /// + Status RemoveGraph(const GraphId &graph_id); + + /// + /// @ingroup ge_graph + /// @brief run specific graph + /// @param [in] graph_id graph id + /// @param [in] inputs input data + /// @param [out] outputs output data + /// @return Status result of function + /// + Status RunGraph(const GraphId &graph_id, const std::vector &inputs, std::vector &outputs, + uint64_t session_id = INVALID_SESSION_ID); + + /// + /// @ingroup ge_graph + /// @brief build specific graph + /// @param [in] graph_id graph id + /// @param [in] inputs input data + /// @param [out] models build result + /// @return Status result of function + /// + Status BuildGraph(const GraphId &graph_id, const std::vector &inputs, vector &models); + + /// + /// @ingroup ge_graph + /// @brief Save extra attribute to Model + /// @param [in] model: Model attribues will save to. + /// @param [in] type: type of OpDesc. + /// @param [in] attrs: attributes of OpDesc + /// @param [in] inputs: input tensor + /// @param [in] outputs: output tensor + /// @return: Status + /// + Status SaveParams(ge::GeModel &model, const std::string &type, const std::map &attrs, + const std::vector &inputs, const std::vector &outputs); + + /// + /// @ingroup ge_graph + /// @brief get variable value from the session with specific session id + /// @param [in] sessionId session id + /// @param [in] name op name + /// @param [out] val out value tensor + /// @return Status result of function + /// + Status GetVariable(const std::string &name, Tensor &val); + + /// + /// @ingroup ge_graph + /// @brief run graph async on session with specific session id + /// @param [in] graph_id graph id + /// @param [in] inputs input data + /// @param [out] outputs output data + /// @param [out] callback: callback while run graph async finish + /// @return Status result of function + /// + Status RunGraphAsync(const GraphId &graph_id, const std::vector &inputs, + std::vector &outputs, uint64_t session_id, std::function callback); + + /// + /// @ingroup ge_graph + /// @brief me register the callback function to get the result of summary or checkpoin + /// @param [in] key: summary or checkpoint + /// @param [in] callbak: The real callback object of me + /// @return Status result of function + /// + Status RegisterCallBackFunc( + const std::string &key, + const std::function &)> &callback); + + const bool GetTrainFlag() const { return options_.train_graph_flag; } + + bool IsGraphNeedRebuild(uint32_t graph_id); + + private: + struct PreRunArgs { + GraphId graph_id; + std::vector input_tensor; + std::vector output_tensor; + uint64_t session_id; + GEThreadLocalContext context; + std::function callback; + }; + + struct RunArgs { + GraphNodePtr graph_node; + GraphId graph_id; + std::vector input_tensor; + std::vector output_tensor; + GeModelPtr ge_model; + GEThreadLocalContext context; + std::function callback; + }; + + Status GetGraphNode(const GraphId &graph_id, GraphNodePtr &out); + + std::shared_ptr GetModelListener() const { return graph_run_listener_; } + + static Status ProcessSubGraphWithMultiThreads(GraphManager *graph_manager, SubGraphInfoPtr &sub_graph_info_ptr, + uint64_t session_id, const GEThreadLocalContext &ge_context); + Status PreRun(const GraphNodePtr &graph_node, const std::vector &inputs, vector &ge_models, + GeModelPtr &ge_model, uint64_t session_id = INVALID_SESSION_ID); + + Status StartForRunGraph(const GraphNodePtr &graph_node, const std::vector &inputs, + vector &ge_models, uint64_t session_id = INVALID_SESSION_ID); + + Status InnerRunGraph(GraphNodePtr &graph_node, const GraphId &graph_id, const std::vector &inputs, + std::vector &outputs); + + Status ParseOptions(const std::map &options); + + static void ParseOption(const std::map &options, const std::string &key, + std::string &option); + + static Status ParseOption(const std::map &options, const std::string &key, bool &option); + + static Status ParseOption(const std::map &options, const std::string &key, int &option); + + static Status ParseOption(const std::map &options, const std::string &key, + std::map &option); + + static void Trim(std::string &str); + + static Status CheckEngineName(const std::string &engine_name, const std::string &key, + const std::map &option); + + static Status ParseParallelNum(const std::string ¶llel_num, const std::string &key, int &num); + + static Status ParseTrainGraphFlag(bool &options, bool &option); + + static bool IsPerfLevelInvalid(int32_t perf_level); + + Status SummaryHandle(const GraphId &graph_id, std::vector &outputs); + + Status CheckpointHandle(const GraphId &graph_id, const std::vector &outputs); + + // call the callback function of ME to push summary result data to ME + Status PushSummaryData2ME(const GraphId &graph_id, const std::map &summary_data); + + // call the callback function of ME to push save result data to ME + Status PushSaveData2ME(const GraphId &graph_id, const std::map &save_data); + + bool IsCheckpointGraph(ComputeGraphPtr &compute_graph); + + bool CheckNetOutputForCheckpointGraph(NodePtr &node); + + bool CheckVariableForCheckpointGraph(NodePtr &node); + + bool CheckTransOpForCheckpointGraph(NodePtr &node); + + Status MergeSubGraph(ComputeGraphPtr &compute_graph, const std::vector &sub_graph_list); + + bool IsBroadCastOpData(const ge::NodePtr &var_node); + + void AdjustBroadCastOpData(const ge::NodePtr &var_node); + + bool IsAssignOpData(const ge::NodePtr &var_node); + + void AdjustAssignOpData(const ge::NodePtr &var_node); + + bool ConfirmUseOpAndIndexByAnchor(const ge::InDataAnchorPtr &in_anchor, const map> &confirm_ops, + ge::NodePtr &use_node); + + bool ConfirmUseOpAndIndexByNode(const ge::NodePtr &var_node, const map> &confirm_ops, + ge::NodePtr &use_node); + + // graph context + std::shared_ptr GetGraphContext() const { return graph_context_; } + + void ResetConstType(ge::ComputeGraphPtr &compute_graph); + + Status OptimizeAfterMergeSubGraph(ge::ComputeGraphPtr &compute_graph); + + Status LoadGraphAsync(const GeModelPtr &ge_model, const GraphNodePtr &graph_node); + + Status CheckAndReleaseMemory(const GeModelPtr &ge_model, const GraphNodePtr &graph_node); + + Status LoadGraph(const GeModelPtr &ge_model, const GraphNodePtr &graph_node); + + bool IsGraphNeedBuild(const GraphNodePtr &graph_node); + + static void PreRunThread(GraphManager *graph_manager); + static void RunThread(GraphManager *graph_manager); + static void StopQueue(GraphManager *graph_manager); + static void ReturnError(GraphManager *graph_manager, std::function callback, Status ret, + const string &log); + + std::atomic_bool thread_run_flag_; + BlockingQueue prerun_args_q_{}; + BlockingQueue run_args_q_{}; + std::thread prerun_thread_; + std::thread run_thread_; + + std::map graph_map_; + + // for run graph synchronous return + std::mutex sync_run_mutex_; + std::condition_variable condition_; + // run graph synchronization call back listener + std::shared_ptr graph_run_listener_; + + // summary and checkpoint callback function list for ME, key is summary or checkpoint + std::map &)>> me_callback_map_; + + bool init_flag_; + + GraphManagerOptions options_; + + GraphPrepare graph_preparer_; + GraphOptimize graph_optimize_; + GraphPartitioner graph_partitioner_; + GraphBuilder graph_builder_; + GraphLoader graph_loader_; + GraphExecutor graph_executor_; + GraphContextPtr graph_context_ = nullptr; + + VarAccelerateCtrl var_acc_ctrl_; + + std::mutex run_mutex_; +}; +}; // namespace ge + +#endif // GE_GRAPH_MANAGER_GRAPH_MANAGER_H_ diff --git a/src/ge/graph/manager/graph_manager_utils.cc b/src/ge/graph/manager/graph_manager_utils.cc new file mode 100644 index 00000000..bd08a554 --- /dev/null +++ b/src/ge/graph/manager/graph_manager_utils.cc @@ -0,0 +1,233 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/graph_manager_utils.h" + +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge/ge_util.h" +#include "graph/debug/ge_attr_define.h" +#include "common/string_util.h" +#include "graph/compute_graph.h" +#include "graph/op_desc.h" +#include "graph/optimize/common/params.h" +#include "omg/omg_inner_types.h" +#include "runtime/mem.h" + +namespace ge { +GraphNode::GraphNode(GraphId graph_id) + : graph_id_(graph_id), + run_flag_(false), + subgraph_ptr_list_(), + graph_(nullptr), + compute_graph_(nullptr), + build_flag_(false), + load_flag_(false), + ge_model_(nullptr), + sem_(1) { + graph_run_async_listener_ = MakeShared(); + if (graph_run_async_listener_ == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared failed"); + } +} + +GraphNode::~GraphNode() = default; + +void GraphNode::Lock() { + sem_.Push(0); +} + +void GraphNode::Unlock() { + uint8_t unused; + sem_.Pop(unused); +} + +SubGraphInfo::SubGraphInfo() : subgraph_ptr_(nullptr), ge_model_ptr_(nullptr), malloc_flag_(false) {} + +SubGraphInfo::~SubGraphInfo() { + if (malloc_flag_) { + for (auto &buffer_addr : buffer_addr_) { + if (buffer_addr == nullptr) { + continue; + } + rtError_t rt_ret; + rt_ret = rtFreeHost(buffer_addr); + buffer_addr = nullptr; + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "[GraphManager] subgraph free buffer failed, modelId = %u", model_id_info_.model_id); + } + } + } +} + +Status SubGraphInfo::FreeInOutBuffer() { + if (malloc_flag_) { + for (auto iter = buffer_addr_.begin(); iter != buffer_addr_.end(); ++iter) { + rtError_t rt_ret; + rt_ret = rtFreeHost(*iter); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "[GraphManager] subgraph free buffer failed, modelId = %u", model_id_info_.model_id); + buffer_addr_.erase(buffer_addr_.begin(), iter); + return GE_GRAPH_FREE_FAILED; + } + } + buffer_addr_.clear(); + + malloc_flag_ = false; + return SUCCESS; + } else { + GELOGI("[GraphManager] not malloc buffer, modelId = %u", model_id_info_.model_id); + return SUCCESS; + } +} + +GraphModelListener::GraphModelListener() : result_code_(0), is_finished_(false), mutex_(nullptr), condition_(nullptr) {} + +Status GraphModelListener::SetCondition(std::mutex *mutex, std::condition_variable *cond) { + if (mutex == nullptr || cond == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[GraphManager] param is NULL."); + return GE_GRAPH_PARAM_NULLPTR; + } + + mutex_ = mutex; + condition_ = cond; + return SUCCESS; +} + +Status GraphModelListener::OnComputeDone(uint32_t model_id, uint32_t task_id, uint32_t result) { + GELOGI( + "[GraphManager] graph compute call back, model_id:%u, task_id:%u, " + "resultCode:%u.", + model_id, task_id, result); + GE_IF_BOOL_EXEC(condition_ == nullptr, GELOGE(FAILED, "[GraphModelListener] condition is null."); return FAILED); + std::lock_guard lock(*mutex_); + result_code_ = result; + is_finished_ = true; + condition_->notify_all(); + + return SUCCESS; +} + +uint32_t GraphModelListener::GetResultCode() const { + if (!is_finished_) { + GELOGE(INTERNAL_ERROR, "[GraphManager] model not run finish."); + return INTERNAL_ERROR; + } + return result_code_; +} + +Status GraphModelListener::ResetResult() { + if (mutex_ == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[GraphManager] param is NULL."); + return GE_GRAPH_PARAM_NULLPTR; + } + + std::lock_guard lock(*mutex_); + result_code_ = 0; + is_finished_ = false; + + return SUCCESS; +} + +void RunAsyncListener::SetCallback(const std::function &callback) { + sem_.Push(0); + callback_ = callback; +} + +Status RunAsyncListener::OnComputeDone(uint32_t model_id, uint32_t task_id, uint32_t result) { + GELOGI("[GraphManager] run graph async call back, modelId:%u, taskId:%u, resultCode:%u.", + model_id, task_id, result); + GE_CHECK_NOTNULL(callback_); + callback_(result); + uint8_t unused; + sem_.Pop(unused); + return SUCCESS; +} + +bool HasCalcOp(const ComputeGraphPtr &graph) { + if (graph == nullptr) { + return false; + } + + static const std::set calc_op_type = {CONVOLUTION, DECONVOLUTION, FULL_CONNECTION}; + + for (const auto &node : graph->GetAllNodes()) { + OpDescPtr op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, GELOGE(FAILED, "Node GetOpDesc is nullptr"); return false); + if (calc_op_type.find(op_desc->GetType()) != calc_op_type.end()) { + return true; + } + } + + return false; +} + +Status CheckTinyCalc(const char *cal_conf, const ComputeGraphPtr &graph) { + if ((Params::Instance() != nullptr) && (Params::Instance()->GetTarget() != TARGET_TYPE_TINY)) { + return SUCCESS; + } + + if (cal_conf != nullptr && *cal_conf != '\0') { + return SUCCESS; + } + + if (HasCalcOp(graph)) { + return GE_GRAPH_PARAM_NULLPTR; + } + + return SUCCESS; +} + +Status ParseOutNodes(const string &out_nodes) { + try { + if (!out_nodes.empty()) { + domi::GetContext().out_nodes_map.clear(); + domi::GetContext().user_out_nodes.clear(); + + vector nodes_v = StringUtils::Split(out_nodes, ';'); + for (const string &node : nodes_v) { + vector key_value_v = StringUtils::Split(node, ':'); + if (key_value_v.size() != 2) { // must contain 2 items + GELOGE(GE_GRAPH_PARAM_NULLPTR, "Invalid outNodes: %s", node.c_str()); + return GE_GRAPH_PARAM_NULLPTR; + } + auto iter = domi::GetContext().out_nodes_map.find(key_value_v[0]); + int32_t index = std::stoi(StringUtils::Trim(key_value_v[1])); + if (iter != domi::GetContext().out_nodes_map.end()) { + iter->second.emplace_back(index); + } else { + std::vector index_v; + index_v.emplace_back(index); + domi::GetContext().out_nodes_map.emplace(key_value_v[0], index_v); + } + domi::GetContext().user_out_nodes.emplace_back(key_value_v[0], index); + } + } + } catch (std::invalid_argument &) { + GELOGE(PARAM_INVALID, "out nodes: %s, key value[1] is invalid argument", out_nodes.c_str()); + return PARAM_INVALID; + } catch (std::out_of_range &) { + GELOGE(PARAM_INVALID, "out nodes: %s, key value[1] is out of range", out_nodes.c_str()); + return PARAM_INVALID; + } catch (...) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "Invalid outNodes: %s", out_nodes.c_str()); + return GE_GRAPH_PARAM_NULLPTR; + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/manager/graph_manager_utils.h b/src/ge/graph/manager/graph_manager_utils.h new file mode 100644 index 00000000..725df2e0 --- /dev/null +++ b/src/ge/graph/manager/graph_manager_utils.h @@ -0,0 +1,261 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_GRAPH_MANAGER_UTILS_H_ +#define GE_GRAPH_MANAGER_GRAPH_MANAGER_UTILS_H_ + +#include +#include +#include +#include +#include +#include +#include +#include + +#include "common/blocking_queue.h" +#include "common/ge_types.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/compute_graph.h" +#include "graph/graph.h" +#include "graph/model.h" +#include "model/ge_model.h" +#include "register/register_fmk_types.h" + +namespace ge { +// state for graph task in life cycle +enum GraphNodeState { + GRAPH_NODE_INIT = 0, + GRAPH_NODE_READY, +}; + +using GraphId = uint32_t; +using ConstModelPtr = std::shared_ptr; +using GeModelPtr = std::shared_ptr; + +using ConstGraphPtr = std::shared_ptr; +using GraphPtr = std::shared_ptr; + +const uint64_t INVALID_SESSION_ID = 0xffffffffffffffffULL; + +struct ModelIdInfo { + uint32_t model_id{INVALID_MODEL_ID}; +}; + +class SubGraphInfo { + public: + SubGraphInfo(); + + ~SubGraphInfo(); + + void SetSubGraph(const ComputeGraphPtr &sub_graph_ptr) { subgraph_ptr_ = sub_graph_ptr; } + ComputeGraphPtr GetSubGraph() const { return subgraph_ptr_; } + + void SetEngineName(const std::string &engine_name) { engine_name_ = engine_name; } + const std::string &GetEngineName() const { return engine_name_; } + + void SetInputFlag(const std::vector &input_flag) { input_flag_ = input_flag; } + const std::vector &GetInputFlag() const { return input_flag_; } + + void SetOutputFlag(const std::vector &output_flag) { output_flag_ = output_flag; } + const std::vector &GetOutputFlag() const { return output_flag_; } + + void SetModelIdInfo(const ModelIdInfo &model_id_info) { model_id_info_ = model_id_info; } + ModelIdInfo GetModelIdInfo() const { return model_id_info_; } + + void SetGeModelPtr(const GeModelPtr &ge_model_ptr) { ge_model_ptr_ = ge_model_ptr; } + bool GeModelIsValid() const { return ge_model_ptr_ != nullptr; } + + Status FreeInOutBuffer(); + + void SetOutputContext(const std::string &output) { output_names_ = output; } + std::string GetOutputContext() const { return output_names_; } + + void SetStreamLabel(const std::string &stream_label) { stream_label_ = stream_label; } + const std::string &GetStreamLabel() const { return stream_label_; } + + void SetEnd2PldMap(std::unordered_map &end_map) { end_to_pld_ = end_map; } + const std::unordered_map &GetEnd2PldMap() const { return end_to_pld_; } + + void SetPld2EndMap(std::unordered_map &pld_map) { pld_to_end_ = pld_map; } + const std::unordered_map &GetPld2EndMap() const { return pld_to_end_; } + + private: + ComputeGraphPtr subgraph_ptr_; + std::string engine_name_; + std::vector input_flag_; + std::vector output_flag_; + ModelIdInfo model_id_info_; + GeModelPtr ge_model_ptr_; + bool malloc_flag_; + std::vector buffer_addr_; + std::string output_names_; + std::vector buffer_size_; + std::string stream_label_; + std::unordered_map end_to_pld_; + std::unordered_map pld_to_end_; +}; + +using SubGraphInfoPtr = std::shared_ptr; + +// for run graph async listener +class RunAsyncListener : public ge::ModelListener { + public: + RunAsyncListener() : sem_(1) {} + + ~RunAsyncListener() = default; + + void SetCallback(const std::function &callback); + + // callback + Status OnComputeDone(uint32_t model_id, uint32_t task_id, uint32_t result) override; + + private: + std::function callback_; + BlockingQueue sem_; +}; + +// single graph node info +class GraphNode { + public: + explicit GraphNode(GraphId graph_id); + ~GraphNode(); + + GraphId GetGraphId() const { return graph_id_; } + + ConstGraphPtr GetGraph() const { return graph_; } + void SetGraph(const GraphPtr &graph) { graph_ = graph; } + + ComputeGraphPtr GetComputeGraph() const { return compute_graph_; } + void SetComputeGraph(const ComputeGraphPtr &compute_graph) { compute_graph_ = compute_graph; } + + bool GetRunFlag() const { return run_flag_; } + void SetRunFlag(bool flag) { run_flag_ = flag; } + + void SetSubGraph(std::vector &subgraph_ptr_list) { subgraph_ptr_list_ = subgraph_ptr_list; } + const std::vector &GetAllSubGraph() const { return subgraph_ptr_list_; } + + bool GetBuildFlag() const { return build_flag_; } + void SetBuildFlag(bool buildFlag) { build_flag_ = buildFlag; } + bool GetLoadFlag() const { return load_flag_; } + void SetLoadFlag(bool load_flag) { load_flag_ = load_flag; } + void SetGeModel(const GeModelPtr &ge_model) { ge_model_ = ge_model; } + GeModelPtr GetGeModel() const { return ge_model_; } + void Lock(); + void Unlock(); + + // run graph asynchronous listener + std::shared_ptr graph_run_async_listener_; + + private: + GraphId graph_id_; + bool run_flag_; + std::vector subgraph_ptr_list_; + + GraphPtr graph_; + ComputeGraphPtr compute_graph_; + bool build_flag_; + bool load_flag_; + GeModelPtr ge_model_; + BlockingQueue sem_; +}; + +using GraphNodePtr = std::shared_ptr; +using ConstGraphNodePtr = shared_ptr; + +class GraphModelListener : public ge::ModelListener { + public: + GraphModelListener(); + + ~GraphModelListener() = default; + + // callback + Status OnComputeDone(uint32_t model_id, uint32_t task_id, uint32_t result) override; + + Status SetCondition(std::mutex *mutex, std::condition_variable *cond); + + Status ResetResult(); + + // need lock by caller + uint32_t GetResultCode() const; + + bool IsFinished() const { return is_finished_; } + + private: + uint32_t result_code_; + bool is_finished_; + + // not owner + std::mutex *mutex_; + // not owner + std::condition_variable *condition_; +}; + +Status CheckTinyCalc(const char *cal_conf, const ComputeGraphPtr &graph); + +Status ParseOutNodes(const string &out_nodes); + +struct GraphManagerOptions { + int32_t stream_num; + int32_t perf_level; + int32_t encrypt_mode; + int32_t framework_type; + std::string ek_file; + std::string cert_file; + std::string hw_key_file; + std::string private_key_file; + std::string calibration_conf_file; + std::string insert_op_file; + std::string output_node_name; + std::string func_bin_path; + std::string input_nodes_set_fp16; + std::string core_type; + bool compress_flag; + bool run_graph_flag; + bool train_graph_flag; + bool local_fmk_op_flag; + bool hcom_parallel; + std::map stream_max_parallel_num; + std::string output_datatype; + std::string original_model_file; + bool save_original_model; + GraphManagerOptions() + : stream_num(1), + perf_level(domi::GEN_TASK_WITHOUT_FUSION), + encrypt_mode(-1), + framework_type(domi::FMK_TYPE_T), + ek_file(""), + cert_file(""), + hw_key_file(""), + private_key_file(""), + calibration_conf_file(""), + insert_op_file(""), + output_node_name(""), + func_bin_path(""), + core_type(""), + compress_flag(false), + run_graph_flag(false), + train_graph_flag(false), + local_fmk_op_flag(false), + hcom_parallel(false), + save_original_model(false) {} +}; +} // namespace ge + +#endif // GE_GRAPH_MANAGER_GRAPH_MANAGER_UTILS_H_ diff --git a/src/ge/graph/manager/graph_mem_allocator.cc b/src/ge/graph/manager/graph_mem_allocator.cc new file mode 100644 index 00000000..25fc5eb2 --- /dev/null +++ b/src/ge/graph/manager/graph_mem_allocator.cc @@ -0,0 +1,227 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/graph_mem_allocator.h" + +#include +#include +#include + +#include "framework/common/debug/ge_log.h" + +namespace ge { +void MemoryAllocator::Initialize(uint32_t device_id) { + GELOGI("MemoryAllocator::Initialize"); + + // when redo Initialize free memory + for (auto &it : memory_base_map_) { + if (FreeMemory(it.second.memory_addr_, device_id) != ge::SUCCESS) { + GELOGW("Initialize: FreeMemory failed"); + } + } + memory_base_map_.clear(); +} + +void MemoryAllocator::Finalize(uint32_t device_id) { + GELOGI("MemoryAllocator::Finalize"); + + // free memory + for (auto &it : memory_base_map_) { + if (FreeMemory(it.second.memory_addr_, device_id) != ge::SUCCESS) { + GELOGW("Finalize: FreeMemory failed"); + } + } + memory_base_map_.clear(); +} + +uint8_t *MemoryAllocator::MallocMemory(uint64_t memory_size, uint32_t device_id) const { + uint8_t *memory_addr = nullptr; + + if (rtMalloc(reinterpret_cast(&memory_addr), memory_size, memory_type_) != RT_ERROR_NONE) { + GELOGE(ge::INTERNAL_ERROR, + "MemoryAllocator::MallocMemory device_id = %u," + " size= %lu", + device_id, memory_size); + + return nullptr; + } + + GELOGI("MemoryAllocator::MallocMemory device_id = %u, size= %lu", device_id, memory_size); + return memory_addr; +} + +Status MemoryAllocator::FreeMemory(uint8_t *memory_addr, uint32_t device_id) const { + GELOGI("MemoryAllocator::FreeMemory device_id = %u", device_id); + if (rtFree(memory_addr) != RT_ERROR_NONE) { + GELOGE(ge::INTERNAL_ERROR, "MemoryAllocator::MallocMemory device_id = %u", device_id); + return ge::INTERNAL_ERROR; + } + memory_addr = nullptr; + return ge::SUCCESS; +} + +uint8_t *MemoryAllocator::MallocMemory(const string &memory_key, uint64_t memory_size, uint32_t device_id) { + auto it = memory_base_map_.find(memory_key); + if (it != memory_base_map_.end()) { + it->second.memory_used_num_++; + return it->second.memory_addr_; + } + + uint8_t *memory_addr = MallocMemory(memory_size, device_id); + + if (memory_addr == nullptr) { + GELOGE(ge::INTERNAL_ERROR, + "MemoryAllocator::MallocMemory failed," + " memory_key[%s], size = %lu.", + memory_key.c_str(), memory_size); + return nullptr; + } + + MemoryInfo memory_info(memory_addr, memory_size); + memory_info.memory_used_num_++; + memory_base_map_[memory_key] = memory_info; + mem_malloced_ = true; + return memory_addr; +} + +Status MemoryAllocator::FreeMemory(const string &memory_key, uint32_t device_id) { + auto it = memory_base_map_.find(memory_key); + if (it == memory_base_map_.end()) { + if (mem_malloced_) { + GELOGW( + "MemoryAllocator::FreeMemory failed," + " memory_key[%s] was not exist, device_id = %u.", + memory_key.c_str(), device_id); + } + return ge::INTERNAL_ERROR; + } + + if (it->second.memory_used_num_ > 1) { + GELOGW("MemoryAllocator::FreeMemory memory_key[%s] should not be released, reference count %d", memory_key.c_str(), + it->second.memory_used_num_); + // reference count greater than 1 represnt that static memory is used by + // someone else, reference count decrement + it->second.memory_used_num_--; + return ge::SUCCESS; + } + + if (FreeMemory(it->second.memory_addr_, device_id) != ge::SUCCESS) { + GELOGE(ge::INTERNAL_ERROR, + "MemoryAllocator::FreeMemory rtFree failed," + " memory_key[%s]", + memory_key.c_str()); + return ge::INTERNAL_ERROR; + } + + GELOGI("MemoryAllocator::FreeMemory device_id = %u", device_id); + + memory_base_map_.erase(it); + return ge::SUCCESS; +} + +uint8_t *MemoryAllocator::GetMemoryAddr(const string &memory_key, uint32_t device_id) { + auto it = memory_base_map_.find(memory_key); + if (it == memory_base_map_.end()) { + GELOGW( + "MemoryAllocator::GetMemoryAddr failed," + " memory_key[%s] was not exist, device_id = %u.", + memory_key.c_str(), device_id); + return nullptr; + } + + return it->second.memory_addr_; +} + +MemManager::MemManager() : default_memory_allocator_(nullptr) {} + +MemManager::~MemManager() { Finalize(); } + +MemManager &MemManager::Instance() { + static MemManager mem_manager; + return mem_manager; +} + +MemoryAllocator *MemManager::Instance(rtMemType_t memory_type) { return Instance().GetMemoryAllocator(memory_type); } + +Status MemManager::Initialize(const std::vector &memory_type) { + std::lock_guard lock(allocator_mutex_); + MemoryAllocator *memory_allocator = nullptr; + for (unsigned int index : memory_type) { + auto it = memory_allocator_map_.find(index); + if (it == memory_allocator_map_.end()) { + memory_allocator = new (std::nothrow) MemoryAllocator(index); + + if (memory_allocator != nullptr) { + memory_allocator_map_[index] = memory_allocator; + GELOGI("Create MemoryAllocator memory type[%u] success.", index); + } else { + GELOGE(ge::INTERNAL_ERROR, "Alloc MemoryAllocator failed."); + } + } else { + memory_allocator = it->second; + } + + if (memory_allocator == nullptr) { + GELOGE(ge::INTERNAL_ERROR, "Create MemoryAllocator failed."); + return ge::INTERNAL_ERROR; + } else { + memory_allocator->Initialize(0); + } + } + + default_memory_allocator_ = new (std::nothrow) MemoryAllocator(RT_MEMORY_RESERVED); + if (default_memory_allocator_ == nullptr) { + GELOGE(ge::INTERNAL_ERROR, "Create MemoryAllocator failed."); + return ge::INTERNAL_ERROR; + } + return ge::SUCCESS; +} + +void MemManager::Finalize() noexcept { + GELOGI("Finalize."); + std::lock_guard lock(allocator_mutex_); + for (auto &memory_allocator : memory_allocator_map_) { + if (memory_allocator.second != nullptr) { + memory_allocator.second->Finalize(0); + delete memory_allocator.second; + memory_allocator.second = nullptr; + } + } + + if (default_memory_allocator_ != nullptr) { + delete default_memory_allocator_; + default_memory_allocator_ = nullptr; + } + memory_allocator_map_.clear(); +} + +MemoryAllocator *MemManager::GetMemoryAllocator(rtMemType_t memory_type) { + std::lock_guard lock(allocator_mutex_); + MemoryAllocator *memory_allocator = nullptr; + auto it = memory_allocator_map_.find(memory_type); + if (it != memory_allocator_map_.end()) { + memory_allocator = it->second; + } + + // Usually impossible + if (memory_allocator == nullptr) { + GELOGE(ge::INTERNAL_ERROR, "GetMemoryAllocator failed, memory type is %u.", memory_type); + return default_memory_allocator_; + } + + return memory_allocator; +} +} // namespace ge diff --git a/src/ge/graph/manager/graph_mem_allocator.h b/src/ge/graph/manager/graph_mem_allocator.h new file mode 100644 index 00000000..fa4bf42f --- /dev/null +++ b/src/ge/graph/manager/graph_mem_allocator.h @@ -0,0 +1,175 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_GRAPH_MEM_ALLOCATOR_H_ +#define GE_GRAPH_MANAGER_GRAPH_MEM_ALLOCATOR_H_ + +#include +#include +#include +#include +#include +#include + +#include "framework/common/ge_inner_error_codes.h" +#include "graph/node.h" +#include "runtime/mem.h" + +namespace ge { +class MemoryInfo { + public: + MemoryInfo() : memory_addr_(nullptr), memory_size_(0), memory_used_num_(0) {} + + MemoryInfo(uint8_t *memory_addr, size_t memory_size) + : memory_addr_(memory_addr), memory_size_(memory_size), memory_used_num_(0) {} + + MemoryInfo &operator=(const MemoryInfo &op) { + if (&op == this) { + return *this; + } + + this->memory_addr_ = op.memory_addr_; + this->memory_size_ = op.memory_size_; + this->memory_used_num_ = op.memory_used_num_; + return *this; + } + + MemoryInfo(const MemoryInfo &op) { + this->memory_addr_ = op.memory_addr_; + this->memory_size_ = op.memory_size_; + this->memory_used_num_ = op.memory_used_num_; + } + virtual ~MemoryInfo() = default; + + uint8_t *memory_addr_; + uint64_t memory_size_; + int32_t memory_used_num_; +}; + +class MemoryAllocator { + public: + explicit MemoryAllocator(rtMemType_t memory_type) : memory_type_(memory_type), mem_malloced_(false) {} + + virtual ~MemoryAllocator() = default; + + /// + /// @ingroup ge_graph + /// @brief memory allocator init + /// @param [in] options user config params + /// @return void + /// + void Initialize(uint32_t device_id = 0); + + /// + /// @ingroup ge_graph + /// @brief memory allocator finalize + /// @return void + /// + void Finalize(uint32_t device_id = 0); + + /// + /// @ingroup ge_graph + /// @brief malloc memory + /// @param [in] size memory size + /// @param [in] device_id device id + /// @return memory address + /// + uint8_t *MallocMemory(uint64_t memory_size, uint32_t device_id = 0) const; + + /// + /// @ingroup ge_graph + /// @brief free memory + /// @param [in] device_id device id + /// @param [out] memory_ptr memory address ptr + /// @return Status result of function + /// + Status FreeMemory(uint8_t *memory_addr, uint32_t device_id = 0) const; + + /// + /// @ingroup ge_graph + /// @brief malloc memory + /// @param [in] memory_key memory key + /// @param [in] size memory size + /// @param [in] device_id device id + /// @return memory address + /// + uint8_t *MallocMemory(const string &memory_key, uint64_t memory_size, uint32_t device_id = 0); + + /// + /// @ingroup ge_graph + /// @brief free memory + /// @param [in] memory_key memory key + /// @param [in] device_id device id + /// @return Status result of function + /// + Status FreeMemory(const string &memory_key, uint32_t device_id = 0); + + /// + /// @ingroup ge_graph + /// @brief get memory address + /// @param [in] memory_key memory key + /// @param [in] device_id device id + /// @return memory address (must not free memory by it) + /// + uint8_t *GetMemoryAddr(const string &memory_key, uint32_t device_id = 0); + + private: + rtMemType_t memory_type_; + bool mem_malloced_; + map memory_base_map_; +}; + +using MemoryAllocatorPtr = std::shared_ptr; + +class MemManager { + public: + MemManager(); + virtual ~MemManager(); + static MemManager &Instance(); + static MemoryAllocator *Instance(rtMemType_t memory_type); + MemManager(const MemManager &) = delete; + MemManager &operator=(const MemManager &) = delete; + /// + /// @ingroup ge_graph + /// @brief memory allocator manager init + /// @param [in] options user config params + /// @return Status result of function + /// + Status Initialize(const std::vector &memory_type); + + /// + /// @ingroup ge_graph + /// @brief memory allocator finalize + /// @return void + /// + void Finalize() noexcept; + + private: + /// + /// @ingroup ge_graph + /// @brief ge memory allocator + /// @param [in] memory_type memory type + /// @return Status result of function + /// + MemoryAllocator *GetMemoryAllocator(rtMemType_t memory_type); + + std::map memory_allocator_map_; + MemoryAllocator *default_memory_allocator_; + std::mutex allocator_mutex_; +}; +}; // namespace ge + +#endif // GE_GRAPH_MANAGER_GRAPH_MEM_ALLOCATOR_H_ diff --git a/src/ge/graph/manager/graph_var_manager.cc b/src/ge/graph/manager/graph_var_manager.cc new file mode 100644 index 00000000..217bbb2c --- /dev/null +++ b/src/ge/graph/manager/graph_var_manager.cc @@ -0,0 +1,825 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/graph_var_manager.h" + +#include + +#include "common/l2_cache_optimize.h" +#include "graph/debug/ge_attr_define.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "ge/ge_api_types.h" +#include "graph/manager/graph_mem_allocator.h" +#include "graph/manager/trans_var_data_utils.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/type_utils.h" + +using std::string; +using std::vector; +using std::map; + +namespace ge { +VarResource::VarResource(uint64_t session_id) : session_id_(session_id) {} + +VarResource::~VarResource() { + var_offset_set_.clear(); + var_addr_mgr_map_.clear(); + cur_var_tensor_desc_map_.clear(); + var_broad_cast_info_.clear(); +} + +ge::Status VarResource::GetVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t **dev_ptr, + rtMemType_t &memory_type) { + if (dev_ptr == nullptr) { + GELOGE(FAILED, "[GetVarAddr] dev_ptr is null!"); + return FAILED; + } + std::string var_key = VarKey(var_name, tensor_desc); + GELOGD("VarResource::GetVarAddr , var_key = %s", var_key.c_str()); + + auto iter = var_addr_mgr_map_.find(var_key); + if (iter == var_addr_mgr_map_.end()) { + GELOGE(FAILED, "VarResource::GetVarAddr failed, var_key %s", var_key.c_str()); + return FAILED; + } + + *dev_ptr = iter->second.address; + memory_type = iter->second.memory_type; + + return SUCCESS; +} + +void VarResource::SetVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t *dev_ptr, + rtMemType_t memory_type) { + std::string var_key = VarKey(var_name, tensor_desc); + GELOGI("VarResource::SetVarAddr , var_key = %s, mem_type:%u", var_key.c_str(), memory_type); + if (var_addr_mgr_map_.count(var_key) == 0) { + GELOGI("SetVarAddr node_name %s, tensor_desc type %s, format %s", var_name.c_str(), + TypeUtils::DataTypeToSerialString(tensor_desc.GetDataType()).c_str(), + TypeUtils::FormatToSerialString(tensor_desc.GetFormat()).c_str()); + + VarAddrMgr var_addr_mgr; + var_addr_mgr.address = dev_ptr; + var_addr_mgr.tensor_desc = tensor_desc; + var_addr_mgr_map_[var_key] = var_addr_mgr; + } + + cur_var_tensor_desc_map_[var_name] = tensor_desc; +} + +ge::Status VarResource::SaveVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t *address, + rtMemType_t memory_type) { + std::string var_key = VarKey(var_name, tensor_desc); + GELOGD("VarResource::SaveVarAddr, var_key = %s", var_key.c_str()); + if (var_addr_mgr_map_.count(var_key) == 0) { + uint64_t logic_address = VarManager::Instance(0)->GetVarMemLogicBase() + + reinterpret_cast(reinterpret_cast(address)); + GELOGI("SaveVarAddr node_name %s, tensor_desc format %s, type %s.", var_name.c_str(), + TypeUtils::FormatToSerialString(tensor_desc.GetFormat()).c_str(), + TypeUtils::DataTypeToSerialString(tensor_desc.GetDataType()).c_str()); + VarAddrMgr var_addr_mgr; + var_addr_mgr.address = reinterpret_cast(reinterpret_cast(logic_address)); + var_addr_mgr.offset = reinterpret_cast(reinterpret_cast(address)); + var_addr_mgr.tensor_desc = tensor_desc; + var_addr_mgr.memory_type = memory_type; + var_addr_mgr_map_[var_key] = var_addr_mgr; + var_offset_set_.insert(logic_address); + + return SUCCESS; + } + + GELOGE(FAILED, "VarResource::SaveVarAddr, var_key %s save addr conflict", var_key.c_str()); + return FAILED; +} + +bool VarResource::IsVarExist(const std::string &var_name, const ge::GeTensorDesc &tensor_desc) { + std::string var_key = VarKey(var_name, tensor_desc); + return var_addr_mgr_map_.count(var_key) != 0; +} + +bool VarResource::IsVarExist(const std::string &var_name) { return cur_var_tensor_desc_map_.count(var_name) != 0; } + +std::string VarResource::VarKey(const std::string &var_name, const ge::GeTensorDesc &tensor_desc) { + std::string var_key(var_name); + var_key.append(std::to_string(static_cast(tensor_desc.GetFormat()))) + .append("_") + .append(std::to_string(static_cast(tensor_desc.GetDataType()))); + return var_key; +} + +ge::Status VarResource::GetCurVarDesc(const std::string &var_name, ge::GeTensorDesc &tensor_desc) { + if (cur_var_tensor_desc_map_.count(var_name) == 0) { + return FAILED; + } + tensor_desc = cur_var_tensor_desc_map_[var_name]; + return SUCCESS; +} + +ge::Status VarResource::RenewCurVarDesc(const std::string &var_name, const ge::OpDescPtr &op_desc) { + if (cur_var_tensor_desc_map_.count(var_name) == 0) { + GELOGI("There is no this node[%s] in var tensor_desc map. so no need renew!", var_name.c_str()); + return SUCCESS; + } + + if (op_desc == nullptr) { + GELOGE(FAILED, "[RenewCurVarDesc] renew var desc fail! input opdesc is null!"); + return FAILED; + } + + ge::GeTensorDesc curr_desc; + ge::Status ret = GetCurVarDesc(var_name, curr_desc); + if (ret != SUCCESS) { + GELOGE(FAILED, "[RenewCurVarDesc] Get var desc fail!"); + return FAILED; + } + std::string key = VarKey(var_name, curr_desc); + curr_desc.SetOriginFormat((op_desc->GetOutputDesc(0)).GetOriginFormat()); + curr_desc.SetFormat((op_desc->GetOutputDesc(0)).GetFormat()); + cur_var_tensor_desc_map_[var_name] = curr_desc; + auto iter = var_addr_mgr_map_.find(key); + if (iter == var_addr_mgr_map_.end()) { + GELOGE(FAILED, "[RenewCurVarDesc] can't find ele with key [%s]", key.c_str()); + return FAILED; + } + auto val = iter->second; + val.tensor_desc.SetOriginFormat((op_desc->GetOutputDesc(0)).GetOriginFormat()); + val.tensor_desc.SetFormat((op_desc->GetOutputDesc(0)).GetFormat()); + var_addr_mgr_map_.erase(iter); + key = VarKey(var_name, curr_desc); + var_addr_mgr_map_[key] = val; + + return SUCCESS; +} + +void VarResource::SaveBroadCastInfo(uint32_t graph_id, const VarBroadCastInfo &broad_cast_info) { + var_broad_cast_info_[graph_id][broad_cast_info.var_name] = broad_cast_info; +} + +ge::Status VarResource::SyncVarData2BroadCast(uint32_t graph_id, const std::string &var_name, + const ge::ConstOpDescPtr &var_op_desc, uint8_t *base_ptr) { + if (var_op_desc == nullptr) { + GELOGE(FAILED, "[SyncVarData2BroadCast] var opdesc is null!"); + return FAILED; + } + GE_CHECK_NOTNULL(base_ptr); + GELOGI("SyncVarData2BroadCast graph_id: %u, var_name: %s.", graph_id, var_name.c_str()); + + VarBroadCastInfo var_broadcast_info = var_broad_cast_info_[graph_id][var_name]; + uint8_t *dst_addr = base_ptr + var_broadcast_info.input_offset; + ge::GeTensorDesc var_tensor_desc = var_op_desc->GetOutputDesc(0); + + return ge::TransVarDataUtils::SyncVarData2BroadCast(var_name, var_tensor_desc, dst_addr, + var_broadcast_info.input_size, session_id_); +} + +ge::Status VarResource::SyncBroadCastData2Var(uint32_t graph_id, const std::string &var_name, + const ge::ConstOpDescPtr &var_op_desc, uint8_t *base_ptr) { + GELOGI("SyncBroadCastData2Var var_name: %s", var_name.c_str()); + GE_CHECK_NOTNULL(var_op_desc); + string var_is_broadcast; + bool is_broadcast = AttrUtils::GetStr(var_op_desc, VAR_ATTR_VAR_IS_BROADCAST, var_is_broadcast); + if (!is_broadcast) { + return SUCCESS; + } + + VarBroadCastInfo var_broadcast_info = var_broad_cast_info_[graph_id][var_name]; + // subgraph base_ptr could be nullptr, task it as base 0 + uint8_t *dst_addr = base_ptr + var_broadcast_info.output_offset; + ge::GeTensorDesc var_tensor_desc = var_op_desc->GetOutputDesc(0); + + return ge::TransVarDataUtils::SyncBroadCastData2Var(dst_addr, var_broadcast_info.output_size, var_name, + var_tensor_desc, session_id_); +} + +ge::Status VarResource::SyncVarData(uint32_t graph_id, const std::string &var_name, + const ge::ConstOpDescPtr &var_op_desc, uint8_t *base_ptr) { + GE_CHECK_NOTNULL(var_op_desc); + string var_is_broadcast; + bool is_broadcast = AttrUtils::GetStr(var_op_desc, VAR_ATTR_VAR_IS_BROADCAST, var_is_broadcast); + if (!is_broadcast) { + return SUCCESS; + } + + return SyncVarData2BroadCast(graph_id, var_name, var_op_desc, base_ptr); +} + +bool VarResource::IsVarAddr(const int64_t &offset) { return var_offset_set_.count(offset) > 0; } + +VarTransRoad *VarResource::GetTransRoad(const std::string &var_name) { + auto iter = var_to_trans_road_.find(var_name); + if (iter == var_to_trans_road_.end()) { + return nullptr; + } else { + return &(iter->second); + } +} + +Status VarResource::GetChangedGraphId(const std::string &var_name, uint32_t &graph_id) { + auto iter = var_names_to_changed_graph_id_.find(var_name); + if (iter == var_names_to_changed_graph_id_.end()) { + return FAILED; + } else { + graph_id = iter->second; + return SUCCESS; + } +} +Status VarResource::GetAllocatedGraphId(const std::string &var_name, uint32_t &graph_id) { + auto iter = var_names_to_allocated_graph_id_.find(var_name); + if (iter == var_names_to_allocated_graph_id_.end()) { + return FAILED; + } else { + graph_id = iter->second; + return SUCCESS; + } +} + +Status VarResource::SetAllocatedGraphId(const std::string &var_name, uint32_t graph_id) { + if (GetAllocatedGraphId(var_name, graph_id) == SUCCESS) { + GELOGW("VarManager var[%s] has been allocated in graph[%d]", var_name.c_str(), graph_id); + return SUCCESS; + } + var_names_to_allocated_graph_id_[var_name] = graph_id; + return SUCCESS; +} + +MemResource::MemResource() : total_size_(0), var_mem_base_(nullptr), var_mem_size_(0) {} + +Status MemResource::AssignVarMem(const std::string &var_name, uint64_t size, uint64_t session_id, size_t &mem_offset) { + size = (size + kSessionMemAlignSize - 1) / kSessionMemAlignSize * kSessionMemAlignSize; + + total_size_ = VarManager::Instance(0)->GetVarMemMaxSize(); + if (total_size_ < var_mem_size_) { + GELOGE(PARAM_INVALID, "total_size_: %lu is smaller than var_mem_size_: %lu", total_size_, var_mem_size_); + return PARAM_INVALID; + } + uint64_t free_size = total_size_ - var_mem_size_; + if (free_size < (size + kSessionMemAlignSize * 2)) { + GELOGE(PARAM_INVALID, "malloc var mem, size[%lu] > free_size[%lu]", size, free_size); + return PARAM_INVALID; + } + + mem_offset = var_mem_size_; + + // offset for next, align 512 BYTE + size = size + kSessionMemAlignSize; + var_mem_size_ = var_mem_size_ + size; + + // align 512 BYTE + var_mem_size_ = var_mem_size_ + kSessionMemAlignSize; + return SUCCESS; +} + +int64_t MemResource::GetVarMemSize() const { return var_mem_size_; } + +VarManager::VarManager(uint64_t session_id) + : version_(SessionVersion::OTHER_VERSION), + session_id_(session_id), + device_id_(0), + job_id_(0), + graph_mem_max_size_(kGraphMemoryManagerMallocMaxSize), + var_mem_max_size_(kMemoryVarManagerMallocSize), + var_mem_logic_base_(kMemoryVarLogicBase), + use_max_mem_size_(kUseMaxMemorySize) {} + +VarManager *VarManager::Instance(uint64_t session_id) { + GELOGD("VarManager::Instance, session id = %lu", session_id); + return VarManagerPool::Instance().GetVarManager(session_id); +} + +void VarManager::Destroy() { + std::lock_guard lock(mutex_); + GELOGI("VarManager::Destroy, session id = %lu.", session_id_); + version_ = SessionVersion::OTHER_VERSION; + device_id_ = 0; + session_id_ = 0; + for (auto &memory_resource : mem_resource_map_) { + if (memory_resource.second != nullptr) { + delete memory_resource.second; + memory_resource.second = nullptr; + } + } + mem_resource_map_.clear(); +} + +ge::Status VarManager::Init(const uint32_t &version, const uint64_t &session_id, const uint32_t &device_id, + const uint64_t &job_id) { + std::lock_guard lock(mutex_); + GELOGI("VarManager::Init, session id = %lu.", session_id); + version_ = version; + device_id_ = device_id; + session_id_ = session_id; + job_id_ = job_id; + var_resource_ = std::unique_ptr(new (std::nothrow) VarResource(session_id_)); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return ge::INTERNAL_ERROR; + } + return SUCCESS; +} + +const uint64_t &VarManager::SessionId() const { + std::lock_guard lock(mutex_); + return session_id_; +} + +const uint32_t &VarManager::DeviceId() const { + std::lock_guard lock(mutex_); + return device_id_; +} + +const uint64_t &VarManager::JobId() const { + std::lock_guard lock(mutex_); + return job_id_; +} + +ge::Status VarManager::SetVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t *dev_ptr, + rtMemType_t memory_type) { + GELOGI("VarManager::SetVarAddr var_name = %s, data_type = %s, data_format = %s.", var_name.c_str(), + ge::TypeUtils::DataTypeToSerialString(tensor_desc.GetDataType()).c_str(), + ge::TypeUtils::FormatToSerialString(tensor_desc.GetFormat()).c_str()); + + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return ge::INTERNAL_ERROR; + } + var_resource_->SetVarAddr(var_name, tensor_desc, dev_ptr, memory_type); + return ge::SUCCESS; +} + +ge::Status VarManager::GetVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t **dev_ptr, + rtMemType_t &memory_type) { + std::lock_guard lock(mutex_); + GELOGD("VarManager::GetVarAddr var_name = %s, data_type = %s, data_format = %s", var_name.c_str(), + ge::TypeUtils::DataTypeToSerialString(tensor_desc.GetDataType()).c_str(), + ge::TypeUtils::FormatToSerialString(tensor_desc.GetFormat()).c_str()); + + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return ge::INTERNAL_ERROR; + } + auto ret = var_resource_->GetVarAddr(var_name, tensor_desc, dev_ptr, memory_type); + if (ret != SUCCESS) { + GELOGW("GetVarAddr fail."); + return ge::INTERNAL_ERROR; + } + return SUCCESS; +} + +ge::Status VarManager::GetVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t **dev_ptr) { + std::lock_guard lock(mutex_); + rtMemType_t memory_type = RT_MEMORY_HBM; + return GetVarAddr(var_name, tensor_desc, dev_ptr, memory_type); +} + +int64_t VarManager::GetVarMemSize(rtMemType_t memory_type) { + std::lock_guard lock(mutex_); + MemResource *mem_resource = nullptr; + auto iter = mem_resource_map_.find(memory_type); + if (iter == mem_resource_map_.end()) { + return 0; + } else { + mem_resource = iter->second; + } + + if (mem_resource == nullptr) { + GELOGE(ge::INTERNAL_ERROR, "MemResource is invalid."); + return 0; + } + return mem_resource->GetVarMemSize(); +} + +ge::Status VarManager::AssignVarMem(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, + rtMemType_t memory_type) { + std::lock_guard lock(mutex_); + GELOGI("VarManager::AssignVarMem var_name = %s, data_type = %s, data_format = %s.", var_name.c_str(), + ge::TypeUtils::DataTypeToSerialString(tensor_desc.GetDataType()).c_str(), + ge::TypeUtils::FormatToSerialString(tensor_desc.GetFormat()).c_str()); + + uint32_t tensor_desc_size = 0; + size_t mem_offset = 0; + ge::Status result = TensorUtils::GetSize(tensor_desc, tensor_desc_size); + if (result != ge::SUCCESS) { + GELOGE(result, "get size from TensorDesc failed"); + return result; + } + + MemResource *mem_resource = nullptr; + auto it = mem_resource_map_.find(memory_type); + if (it == mem_resource_map_.end()) { + mem_resource = new (std::nothrow) MemResource(); + if (mem_resource == nullptr) { + GELOGE(ge::INTERNAL_ERROR, "Alloc MemResource failed, memory_type = %u.", memory_type); + return ge::INTERNAL_ERROR; + } else { + mem_resource_map_[memory_type] = mem_resource; + } + } else { + mem_resource = it->second; + } + + if (mem_resource == nullptr) { + GELOGE(ge::INTERNAL_ERROR, "MemResource is invalid, memory_type = %u.", memory_type); + return ge::INTERNAL_ERROR; + } + result = mem_resource->AssignVarMem(var_name, tensor_desc_size, session_id_, mem_offset); + if (result != SUCCESS) { + GELOGE(ge::INTERNAL_ERROR, "AssignVarMem by offset failed."); + return ge::INTERNAL_ERROR; + } + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return ge::INTERNAL_ERROR; + } + + result = var_resource_->SaveVarAddr( + var_name, tensor_desc, reinterpret_cast(reinterpret_cast(mem_offset)), memory_type); + if (result != SUCCESS) { + GELOGE(ge::INTERNAL_ERROR, "AssignVarMem by offset failed."); + return ge::INTERNAL_ERROR; + } + + result = var_resource_->GetVarAddr( + var_name, tensor_desc, reinterpret_cast(reinterpret_cast(&mem_offset)), memory_type); + if (result != SUCCESS) { + GELOGE(ge::INTERNAL_ERROR, "GetVarAddr by offset failed."); + return ge::INTERNAL_ERROR; + } + + ge::GeTensorDesc cur_tensor_desc; + result = var_resource_->GetCurVarDesc(var_name, cur_tensor_desc); + if (result != SUCCESS) { + var_resource_->SetVarAddr(var_name, tensor_desc, + reinterpret_cast(reinterpret_cast(mem_offset)), memory_type); + return SUCCESS; + } + + if (cur_tensor_desc.GetFormat() != tensor_desc.GetFormat() || + cur_tensor_desc.GetDataType() != tensor_desc.GetDataType() || + cur_tensor_desc.GetShape().GetDims() != tensor_desc.GetShape().GetDims()) { + GELOGI("var %s assigned new memory (format, data type, shape) (%s, %s, %zu) from (%s, %s, %zu)", var_name.c_str(), + ge::TypeUtils::DataTypeToSerialString(tensor_desc.GetDataType()).c_str(), + ge::TypeUtils::FormatToSerialString(tensor_desc.GetFormat()).c_str(), + tensor_desc.GetShape().GetDims().size(), + ge::TypeUtils::DataTypeToSerialString(cur_tensor_desc.GetDataType()).c_str(), + ge::TypeUtils::FormatToSerialString(cur_tensor_desc.GetFormat()).c_str(), + cur_tensor_desc.GetShape().GetDims().size()); + var_resource_->SetVarAddr(var_name, tensor_desc, + reinterpret_cast(reinterpret_cast(mem_offset)), memory_type); + } + + return SUCCESS; +} + +bool VarManager::IsVarExist(const std::string &var_name, const ge::GeTensorDesc &tensor_desc) { + std::lock_guard lock(mutex_); + GELOGD("VarManager::IsVarExist var_name = %s, data_type = %s, data_format = %s", var_name.c_str(), + ge::TypeUtils::FormatToSerialString(tensor_desc.GetFormat()).c_str(), + ge::TypeUtils::DataTypeToSerialString(tensor_desc.GetDataType()).c_str()); + + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return false; + } + return var_resource_->IsVarExist(var_name, tensor_desc); +} + +bool VarManager::IsVarExist(const std::string &var_name) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return false; + } + return var_resource_->IsVarExist(var_name); +} + +ge::Status VarManager::SyncVarData(uint32_t graph_id, const std::string &var_name, ge::ConstOpDescPtr var_op_desc, + uint8_t *base_ptr) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return ge::INTERNAL_ERROR; + } + return var_resource_->SyncVarData(graph_id, var_name, std::move(var_op_desc), base_ptr); +} + +ge::Status VarManager::GetCurVarDesc(const std::string &var_name, ge::GeTensorDesc &tensor_desc) { + std::lock_guard lock(mutex_); + GELOGI("VarManager::GetCurVarDesc var_name = %s.", var_name.c_str()); + + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return ge::INTERNAL_ERROR; + } + return var_resource_->GetCurVarDesc(var_name, tensor_desc); +} + +ge::Status VarManager::SaveBroadCastInfo(uint32_t graph_id, const VarBroadCastInfo &broad_cast_info) { + std::lock_guard lock(mutex_); + GELOGI( + "VarManager::SaveBroadCastInfo var_name = %s, broadcast name = %s, " + "idx = %d, input_offset = %ld, input_size = %lu, output_offset = %ld, " + "output_size = %lu", + broad_cast_info.var_name.c_str(), broad_cast_info.broadcast_name.c_str(), broad_cast_info.idx, + broad_cast_info.input_offset, broad_cast_info.input_size, broad_cast_info.output_offset, + broad_cast_info.output_size); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return ge::INTERNAL_ERROR; + } + var_resource_->SaveBroadCastInfo(graph_id, broad_cast_info); + return SUCCESS; +} + +ge::Status VarManager::RenewCurVarDesc(const std::string &var_name, ge::OpDescPtr op_desc) { + std::lock_guard lock(mutex_); + GELOGD("VarManager::RenewCurVarDesc var_name = %s.", var_name.c_str()); + + if (var_resource_ == nullptr) { + GELOGE(ge::INTERNAL_ERROR, "VarManager has not been init."); + return ge::INTERNAL_ERROR; + } + return var_resource_->RenewCurVarDesc(var_name, std::move(op_desc)); +} + +ge::Status VarManager::SyncBroadCastData2Var(uint32_t graph_id, const std::string &var_name, + ge::ConstOpDescPtr var_op_desc, uint8_t *base_ptr) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return ge::INTERNAL_ERROR; + } + return var_resource_->SyncBroadCastData2Var(graph_id, var_name, std::move(var_op_desc), base_ptr); +} + +bool VarManager::IsVarAddr(const int64_t &offset) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return false; + } + return var_resource_->IsVarAddr(offset); +} + +ge::Status VarManager::MallocVarMemory(size_t memory_size) { + std::lock_guard lock(mutex_); + uint8_t *var_mem_base = nullptr; + string memory_key = std::to_string(session_id_); + + // malloc variable memory + size_t var_memory_size = memory_size; + + // align 512 BYTE + var_memory_size = (var_memory_size + kSessionMemAlignSize - 1) / kSessionMemAlignSize * kSessionMemAlignSize; + + var_mem_base = MemManager::Instance(RT_MEMORY_HBM)->MallocMemory(memory_key, var_memory_size); + if (var_mem_base == nullptr) { + GELOGE(ge::INTERNAL_ERROR, + "VarManager::MallocVarMemory failed " + "session_id = %s", + memory_key.c_str()); + return ge::INTERNAL_ERROR; + } + return SUCCESS; +} + +uint8_t *VarManager::GetVarMemoryBase(rtMemType_t memory_type) { + std::lock_guard lock(mutex_); + string memory_key = std::to_string(session_id_); + return MemManager::Instance(memory_type)->GetMemoryAddr(memory_key); +} + +uint8_t *VarManager::GetVarMemoryAddr(uint8_t *logic_addr, rtMemType_t memory_type) { + std::lock_guard lock(mutex_); + string mem_key = std::to_string(session_id_); + uint8_t *mem_base = MemManager::Instance(memory_type)->GetMemoryAddr(mem_key); + if (mem_base == nullptr) { + return nullptr; + } + uint8_t *mem_addr = logic_addr + reinterpret_cast(mem_base) - VarManager::Instance(0)->GetVarMemLogicBase(); + return mem_addr; +} + +ge::Status VarManager::FreeVarMemory() { + std::lock_guard lock(mutex_); + string memory_key = std::to_string(SessionId()); + return MemManager::Instance(RT_MEMORY_HBM)->FreeMemory(memory_key); +} + +ge::Status VarManager::SetTransRoad(const std::string &var_name, const VarTransRoad &trans_road) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return ge::INTERNAL_ERROR; + } + return var_resource_->SetTransRoad(var_name, trans_road); +} + +VarTransRoad *VarManager::GetTransRoad(const std::string &var_name) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return nullptr; + } + return var_resource_->GetTransRoad(var_name); +} + +Status VarManager::SetChangedGraphId(const std::string &var_name, uint32_t graph_id) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return INTERNAL_ERROR; + } + return var_resource_->SetChangedGraphId(var_name, graph_id); +} + +Status VarManager::GetChangedGraphId(const std::string &var_name, uint32_t &graph_id) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return INTERNAL_ERROR; + } + return var_resource_->GetChangedGraphId(var_name, graph_id); +} + +Status VarManager::SetMemoryMallocSize(const map &options) { + auto it = options.find(GRAPH_MEMORY_MAX_SIZE); + if (it == options.end()) { + graph_mem_max_size_ = kGraphMemoryManagerMallocMaxSize; + } else { + string graph_memory_manager_malloc_max_size = it->second; + ge::Status ret = ParseMemoryMallocSize(graph_memory_manager_malloc_max_size, graph_mem_max_size_); + if (ret != SUCCESS) { + GELOGE(ge::GE_GRAPH_OPTIONS_INVALID, "Parse graph memory manager malloc max size failed."); + return ge::GE_GRAPH_OPTIONS_INVALID; + } + } + + it = options.find(VARIABLE_MEMORY_MAX_SIZE); + if (it == options.end()) { + var_mem_max_size_ = kMemoryVarManagerMallocSize; + } else { + string memory_var_manager_malloc_size = it->second; + ge::Status ret = ParseMemoryMallocSize(memory_var_manager_malloc_size, var_mem_max_size_); + if (ret != SUCCESS) { + GELOGE(ge::GE_GRAPH_OPTIONS_INVALID, "Parse memory var manager malloc size failed."); + return ge::GE_GRAPH_OPTIONS_INVALID; + } + } + + var_mem_logic_base_ = graph_mem_max_size_ + kGraphMemoryBuffer; + if (var_mem_logic_base_ > kMaxMemorySize) { + GELOGE(ge::GE_GRAPH_OPTIONS_INVALID, "kMemoryVarLogicBase : %zu can not exceed max memory size : %zu.", + var_mem_logic_base_, kMaxMemorySize); + return ge::GE_GRAPH_OPTIONS_INVALID; + } + + use_max_mem_size_ = graph_mem_max_size_ + var_mem_max_size_; + if (use_max_mem_size_ > kMaxMemorySize) { + GELOGE(ge::GE_GRAPH_OPTIONS_INVALID, "kUseMaxMemorySize : %zu can not exceed max memory size : %zu.", + use_max_mem_size_, kMaxMemorySize); + return ge::GE_GRAPH_OPTIONS_INVALID; + } + GELOGI("Set memory malloc size successfully"); + return SUCCESS; +} + +Status VarManager::ParseMemoryMallocSize(string &memory_size, size_t &result) { + if (memory_size.empty()) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Memory malloc size input is empty."); + return GE_GRAPH_OPTIONS_INVALID; + } + // split string by '*' + vector splits; + std::istringstream str(memory_size); + string str_split; + while (getline(str, str_split, '*')) { + splits.emplace_back(str_split); + } + + result = 1; + for (string split : splits) { + // Trim + auto it = split.find_first_not_of(" "); + if (it != string::npos) { + split.erase(0, it); + } + it = split.find_last_not_of(" "); + if (it != string::npos) { + split.erase(it + 1); + } + + for (char c : split) { + if (!isdigit(c)) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Memory malloc size input contains non digit."); + return GE_GRAPH_OPTIONS_INVALID; + } + } + uint64_t num = std::strtoul(split.c_str(), nullptr, 0); + GE_IF_BOOL_EXEC(TypeUtils::CheckUint64MulOverflow(result, static_cast(num)), + GELOGE(FAILED, "Input memory size is out of range."); + return FAILED); + if ((num > kMaxMemorySize) || (result * static_cast(num) > kMaxMemorySize)) { + GELOGE(FAILED, "Input memory size can not exceed max memory size : %zu.", kMaxMemorySize); + return FAILED; + } + result *= static_cast(num); + } + + return SUCCESS; +} + +void VarManager::RemoveChangedGraphId(const std::string &var_name) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return; + } + var_resource_->RemoveChangedGraphId(var_name); +} + +Status VarManager::SetAllocatedGraphId(const std::string &var_name, uint32_t graph_id) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return INTERNAL_ERROR; + } + return var_resource_->SetAllocatedGraphId(var_name, graph_id); +} + +Status VarManager::GetAllocatedGraphId(const std::string &var_name, uint32_t &graph_id) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return INTERNAL_ERROR; + } + return var_resource_->GetAllocatedGraphId(var_name, graph_id); +} + +void VarManager::RemoveAllocatedGraphId(const std::string &var_name) { + std::lock_guard lock(mutex_); + if (var_resource_ == nullptr) { + GELOGW("VarManager has not been init."); + return; + } + var_resource_->RemoveAllocatedGraphId(var_name); +} + +VarManagerPool::~VarManagerPool() { Destroy(); } + +VarManagerPool &VarManagerPool::Instance() { + static VarManagerPool var_manager_pool; + return var_manager_pool; +} + +void VarManagerPool::Destroy() noexcept { + std::lock_guard lock(var_manager_mutex_); + for (auto &it : var_manager_map_) { + VarManager *var_manager = it.second; + if (var_manager != nullptr) { + var_manager->Destroy(); + delete var_manager; + var_manager = nullptr; + } + } + var_manager_map_.clear(); +} + +ge::Status VarManagerPool::Init() const { return SUCCESS; } + +VarManager *VarManagerPool::GetVarManager(uint64_t session_id) { + std::lock_guard lock(var_manager_mutex_); + auto it = var_manager_map_.find(session_id); + if (it != var_manager_map_.end()) { + GELOGD("VarManagerPool::GetVarManager"); + return it->second; + } + + VarManager *var_manager = new (std::nothrow) VarManager(session_id); + if (var_manager == nullptr) { + GELOGE(INTERNAL_ERROR, + "VarManager::Instance find session by " + "session_id[%lu] failed.", + session_id); + static VarManager new_var_manager(0); + return &new_var_manager; + } + var_manager_map_[session_id] = var_manager; + return var_manager; +} +} // namespace ge diff --git a/src/ge/graph/manager/graph_var_manager.h b/src/ge/graph/manager/graph_var_manager.h new file mode 100644 index 00000000..c78f83db --- /dev/null +++ b/src/ge/graph/manager/graph_var_manager.h @@ -0,0 +1,299 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_GRAPH_VAR_MANAGER_H_ +#define GE_GRAPH_MANAGER_GRAPH_VAR_MANAGER_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/ge_types.h" +#include "framework/common/l2_cache_optimize.h" +#include "graph/ge_tensor.h" +#include "graph/op_desc.h" +#include "graph/tensor.h" +#include "runtime/mem.h" + +namespace ge { +const size_t kGraphMemoryManagerMallocMaxSize = 26UL * 1024UL * 1024UL * 1024UL; +const size_t kMemoryVarManagerMallocSize = 5UL * 1024UL * 1024UL * 1024UL; +const size_t kMemoryVarLogicBase = 32UL * 1024UL * 1024UL * 1024UL; +const size_t kUseMaxMemorySize = kGraphMemoryManagerMallocMaxSize + kMemoryVarManagerMallocSize; +const size_t kGraphMemoryBuffer = 4UL * 1024UL * 1024UL * 1024UL; +const size_t kMaxMemorySize = 256UL * 1024UL * 1024UL * 1024UL; +const char kEnvGeuseStaticMemory[] = "GE_USE_STATIC_MEMORY"; +const uint64_t kSessionMemAlignSize = 512; + +enum MemStatus { + NORMAL = 0, + COMPILE_TASK = 1, + RUN_TASK = 2, +}; + +enum SessionVersion { + ClOUD_VERSION = 0, + MINI_VERSION = 1, + OTHER_VERSION = 2, +}; + +struct MemResourceCfg { + uint32_t mem_status; + size_t mem_res_size; + MemResourceCfg() : mem_status(0), mem_res_size(0) {} +}; + +struct VarAddrMgr { + ge::GeTensorDesc tensor_desc; + uint8_t *address; + uint64_t offset; + rtMemType_t memory_type; + VarAddrMgr() : address(nullptr), offset(0), memory_type(RT_MEMORY_HBM) {} +}; + +struct VarBroadCastInfo { + std::string var_name; + std::string broadcast_name; + int idx; + int64_t input_offset; + uint64_t input_size; + int64_t output_offset; + uint64_t output_size; +}; + +struct VarFormatInfo { + int format; + int data_type; + std::vector dims; +}; + +struct TransNodeInfo { + std::string node_type; + GeTensorDesc input; + GeTensorDesc output; +}; + +using VarTransRoad = std::vector; + +class VarResource { + public: + explicit VarResource(uint64_t session_id_); + ~VarResource(); + + ge::Status GetVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t **dev_ptr, + rtMemType_t &memory_type); + + void SetVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t *dev_ptr, + rtMemType_t rtMemType_t); + + ge::Status SaveVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t *address, + rtMemType_t memory_type); + + ge::Status GetCurVarDesc(const std::string &var_name, ge::GeTensorDesc &tensor_desc); + + ge::Status RenewCurVarDesc(const std::string &var_name, const ge::OpDescPtr &op_desc); + + void SaveBroadCastInfo(uint32_t graph_id, const VarBroadCastInfo &broad_cast_info); + + ge::Status SyncVarData2BroadCast(uint32_t graph_id, const std::string &var_name, + const ge::ConstOpDescPtr &var_op_desc, uint8_t *base_ptr); + + ge::Status SyncBroadCastData2Var(uint32_t graph_id, const std::string &var_name, + const ge::ConstOpDescPtr &var_op_desc, uint8_t *base_ptr); + + ge::Status SyncVarData(uint32_t graph_id, const std::string &var_name, const ge::ConstOpDescPtr &var_op_desc, + uint8_t *base_ptr); + + Status SetTransRoad(const std::string &var_name, const VarTransRoad &trans_road) { + if (var_to_trans_road_.find(var_name) != var_to_trans_road_.end()) { + GELOGW("Var name: %s has already set.", var_name.c_str()); + return GRAPH_SUCCESS; + } + var_to_trans_road_[var_name] = trans_road; + return GRAPH_SUCCESS; + } + + VarTransRoad *GetTransRoad(const std::string &var_name); + + Status SetChangedGraphId(const std::string &var_name, uint32_t graph_id) { + var_names_to_changed_graph_id_[var_name] = graph_id; + return SUCCESS; + } + + Status GetChangedGraphId(const std::string &var_name, uint32_t &graph_id); + + void RemoveChangedGraphId(const std::string &var_name) { var_names_to_changed_graph_id_.erase(var_name); } + + Status SetAllocatedGraphId(const std::string &var_name, uint32_t graph_id); + Status GetAllocatedGraphId(const std::string &var_name, uint32_t &graph_id); + + void RemoveAllocatedGraphId(const std::string &var_name) { var_names_to_allocated_graph_id_.erase(var_name); } + + bool IsVarExist(const std::string &var_name, const ge::GeTensorDesc &tensor_desc); + + bool IsVarExist(const std::string &var_name); + + bool IsVarAddr(const int64_t &offset); + + private: + std::string VarKey(const std::string &var_name, const ge::GeTensorDesc &tensor_desc); + + uint64_t session_id_; + std::unordered_set var_offset_set_; + std::unordered_map var_addr_mgr_map_; + std::unordered_map cur_var_tensor_desc_map_; + std::unordered_map> var_to_trans_road_; + std::unordered_map var_names_to_changed_graph_id_; + std::unordered_map var_names_to_allocated_graph_id_; + std::map> var_broad_cast_info_; +}; + +class MemResource { + public: + MemResource(); + ~MemResource() = default; + + Status AssignVarMem(const std::string &var_name, uint64_t size, uint64_t session_id, size_t &mem_offset); + + int64_t GetVarMemSize() const; + + private: + uint64_t total_size_; + uint8_t *var_mem_base_; + uint64_t var_mem_size_; +}; + +class FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY VarManager { + public: + static VarManager *Instance(uint64_t session_id); + explicit VarManager(uint64_t session_id); + ~VarManager() = default; + + ge::Status Init(const uint32_t &version, const uint64_t &session_id, const uint32_t &device_id, + const uint64_t &job_id); + + void Destroy(); + + ge::Status AssignVarMem(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, rtMemType_t memory_type); + + ge::Status SetVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t *dev_ptr, + rtMemType_t memory_type); + + ge::Status GetVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t **dev_ptr, + rtMemType_t &memory_type); + + ge::Status GetVarAddr(const std::string &var_name, const ge::GeTensorDesc &tensor_desc, uint8_t **dev_ptr); + + ge::Status SyncVarData(uint32_t graph_id, const std::string &var_name, ge::ConstOpDescPtr var_op_desc, + uint8_t *base_ptr); + + ge::Status SaveBroadCastInfo(uint32_t graph_id, const VarBroadCastInfo &broad_cast_info); + + ge::Status SyncBroadCastData2Var(uint32_t graph_id, const std::string &var_name, ge::ConstOpDescPtr var_op_desc, + uint8_t *base_ptr); + + ge::Status GetCurVarDesc(const std::string &var_name, ge::GeTensorDesc &tensor_desc); + + ge::Status RenewCurVarDesc(const std::string &var_name, ge::OpDescPtr op_desc); + + ge::Status MallocVarMemory(size_t memory_size = kMemoryVarManagerMallocSize); + + ge::Status FreeVarMemory(); + + Status SetTransRoad(const std::string &var_name, const VarTransRoad &trans_road); + + VarTransRoad *GetTransRoad(const std::string &var_name); + + Status SetChangedGraphId(const std::string &var_name, uint32_t graph_id); + + Status GetChangedGraphId(const std::string &var_name, uint32_t &graph_id); + + Status SetMemoryMallocSize(const std::map &options); + + const size_t &GetGraphMemoryMaxSize() const { return graph_mem_max_size_; } + + const size_t &GetVarMemMaxSize() const { return var_mem_max_size_; } + + const size_t &GetVarMemLogicBase() const { return var_mem_logic_base_; } + + const size_t &GetUseMaxMemorySize() const { return use_max_mem_size_; } + + void RemoveChangedGraphId(const std::string &var_name); + + Status SetAllocatedGraphId(const std::string &var_name, uint32_t graph_id); + + Status GetAllocatedGraphId(const std::string &var_name, uint32_t &graph_id); + + void RemoveAllocatedGraphId(const std::string &var_name); + + const uint64_t &SessionId() const; + + const uint32_t &DeviceId() const; + + const uint64_t &JobId() const; + + int64_t GetVarMemSize(rtMemType_t memory_type); + + bool IsVarExist(const std::string &var_name, const ge::GeTensorDesc &tensor_desc); + + bool IsVarExist(const std::string &var_name); + + bool IsVarAddr(const int64_t &offset); + + uint8_t *GetVarMemoryBase(rtMemType_t memory_type); + + uint8_t *GetVarMemoryAddr(uint8_t *logic_addr, rtMemType_t memory_type); + + private: + uint32_t version_; + uint64_t session_id_; + uint32_t device_id_; + uint64_t job_id_; + size_t graph_mem_max_size_; + size_t var_mem_max_size_; + size_t var_mem_logic_base_; + size_t use_max_mem_size_; + std::unique_ptr var_resource_; + map mem_resource_map_; + mutable std::recursive_mutex mutex_; + + Status ParseMemoryMallocSize(std::string &memory_size, size_t &my_size); +}; + +class VarManagerPool { + public: + virtual ~VarManagerPool(); + + static VarManagerPool &Instance(); + + VarManager *GetVarManager(uint64_t session_id); + + void Destroy() noexcept; + + ge::Status Init() const; + + private: + VarManagerPool() = default; + std::mutex var_manager_mutex_; + map var_manager_map_; +}; +}; // namespace ge +#endif // GE_GRAPH_MANAGER_GRAPH_VAR_MANAGER_H_ diff --git a/src/ge/graph/manager/model_manager/event_manager.cc b/src/ge/graph/manager/model_manager/event_manager.cc new file mode 100644 index 00000000..686eb3d8 --- /dev/null +++ b/src/ge/graph/manager/model_manager/event_manager.cc @@ -0,0 +1,83 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/model_manager/event_manager.h" + +#define RETURN_IF_COND_NOT_MET(condition, ...) \ + do { \ + if (!(condition)) { \ + GELOGE(FAILED, __VA_ARGS__); \ + return; \ + } \ + } while (0); + +namespace ge { +Status EventManager::Init(size_t event_num) { + if (this->inited_) { + return SUCCESS; + } + + rtEvent_t event = nullptr; + current_idx_ = 0; + for (size_t i = 0; i < event_num; ++i) { + GE_CHK_RT_RET(rtEventCreate(&event)); + this->event_list_.push_back(event); + } + + this->inited_ = true; + + return SUCCESS; +} + +void EventManager::Release() noexcept { + for (size_t i = 0; i < this->event_list_.size(); ++i) { + rtError_t rt_ret = rtEventDestroy(this->event_list_[i]); + RETURN_IF_COND_NOT_MET(rt_ret == RT_ERROR_NONE, "Destroy event failed, idx is %zu, ret is 0x%x.", i, rt_ret); + } + this->event_list_.clear(); + + this->inited_ = false; +} + +Status EventManager::EventRecord(size_t event_idx, rtStream_t stream) { + GE_CHK_BOOL_RET_STATUS_NOLOG(this->inited_, INTERNAL_ERROR); + + GE_CHK_BOOL_RET_STATUS_NOLOG(event_idx < this->event_list_.size(), PARAM_INVALID); + + GE_CHK_RT_RET(rtEventRecord(this->event_list_[event_idx], stream)); + + current_idx_ = static_cast(event_idx); + return SUCCESS; +} + +Status EventManager::EventElapsedTime(size_t start_event_idx, size_t stop_event_idx, float &time) { + GE_CHK_BOOL_RET_STATUS_NOLOG(this->inited_, INTERNAL_ERROR); + + GE_CHK_BOOL_RET_STATUS_NOLOG(start_event_idx < this->event_list_.size() && + stop_event_idx < this->event_list_.size() && start_event_idx <= stop_event_idx, + PARAM_INVALID); + + GE_CHK_RT_RET(rtEventElapsedTime(&time, this->event_list_[start_event_idx], this->event_list_[stop_event_idx])); + + return SUCCESS; +} + +Status EventManager::GetEvent(uint32_t index, rtEvent_t &event) { + GE_CHK_BOOL_RET_STATUS_NOLOG(index < this->event_list_.size(), PARAM_INVALID); + event = this->event_list_[index]; + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/manager/model_manager/event_manager.h b/src/ge/graph/manager/model_manager/event_manager.h new file mode 100644 index 00000000..1d57dd52 --- /dev/null +++ b/src/ge/graph/manager/model_manager/event_manager.h @@ -0,0 +1,98 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_MODEL_MANAGER_EVENT_MANAGER_H_ +#define GE_GRAPH_MANAGER_MODEL_MANAGER_EVENT_MANAGER_H_ + + +#include + +#include "common/fmk_error_codes.h" +#include "common/fmk_types.h" +#include "common/util.h" +#include "runtime/event.h" + +namespace ge { +class EventManager { + public: + /// + /// @ingroup domi_ome + /// @brief constructor + /// + EventManager() : inited_(false), current_idx_(0) {} + /// + /// @ingroup domi_ome + /// @brief destructor + /// + ~EventManager() { this->Release(); } + + /// + /// @ingroup domi_ome + /// @brief init and create event list + /// @param [in] event_num event number created + /// @return exec result + /// + Status Init(size_t event_num); + + /// + /// @ingroup domi_ome + /// @brief event record + /// @param [in] event_idx event index + /// @param [in] stream related stream + /// @return exec result + /// + Status EventRecord(size_t event_idx, rtStream_t stream); + + /// + /// @ingroup domi_ome + /// @brief time between start and end in ms + /// @param [in] start_event_idx start event index + /// @param [in] stop_event_idx stop event index + /// @param [out] time + /// @return exec result + /// + Status EventElapsedTime(size_t start_event_idx, size_t stop_event_idx, float &time); + + /// + /// @ingroup domi_ome + /// @brief current event index + /// @return + /// + uint32_t CurrentIdx() const { return current_idx_; } + + /// + /// @ingroup domi_ome + /// @brief get event at specific loc + /// @param [in] index event index + /// @return + /// + Status GetEvent(uint32_t index, rtEvent_t &event); + + /// + /// @ingroup domi_ome + /// @brief release event list + /// @param [in] + /// @return + /// + void Release() noexcept; + + private: + std::vector event_list_; + bool inited_; + uint32_t current_idx_; +}; // EventManager +}; // namespace ge +#endif // GE_GRAPH_MANAGER_MODEL_MANAGER_EVENT_MANAGER_H_ diff --git a/src/ge/graph/manager/trans_var_data_utils.cc b/src/ge/graph/manager/trans_var_data_utils.cc new file mode 100644 index 00000000..9873ffb2 --- /dev/null +++ b/src/ge/graph/manager/trans_var_data_utils.cc @@ -0,0 +1,91 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/trans_var_data_utils.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/formats/formats.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/types.h" +#include "graph/utils/type_utils.h" + +namespace ge { +Status TransVarDataUtils::SyncVarData2BroadCast(const string &var_name, const ge::GeTensorDesc &src_tensor_desc, + uint8_t *dst_addr, uint32_t dst_addr_size, uint64_t session_id) { + GE_CHK_BOOL_RET_STATUS(dst_addr != nullptr, FAILED, "dst addr is null. "); + uint8_t *src_host_addr = nullptr; + uint32_t src_addr_size = 0; + GE_MAKE_GUARD_RTMEM(src_host_addr); + GE_CHK_STATUS_RET(SyncTensorToHost(var_name, src_tensor_desc, &src_host_addr, src_addr_size, session_id)); + + GELOGI("src_addr_size: %u, dst_addr_size: %u", src_addr_size, dst_addr_size); + GE_CHK_BOOL_RET_STATUS(src_addr_size == dst_addr_size, FAILED, "var data size is not equal broadcast "); + + GE_CHK_RT_RET(rtMemcpy(dst_addr, dst_addr_size, src_host_addr, src_addr_size, RT_MEMCPY_HOST_TO_DEVICE)); + return SUCCESS; +} + +Status TransVarDataUtils::SyncBroadCastData2Var(uint8_t *src_addr, uint32_t src_addr_size, const string &var_name, + const ge::GeTensorDesc &dst_tensor_desc, uint64_t session_id) { + GE_CHK_BOOL_RET_STATUS(src_addr != nullptr, FAILED, "src addr is null. "); + uint8_t *host_addr = nullptr; + GE_MAKE_GUARD_RTMEM(host_addr); + GE_CHK_RT_RET(rtMallocHost(reinterpret_cast(&host_addr), src_addr_size)); + GE_CHK_RT_RET(rtMemcpy(host_addr, src_addr_size, src_addr, src_addr_size, RT_MEMCPY_DEVICE_TO_HOST)); + + GE_CHK_STATUS_RET( + SyncTensorToDevice(var_name, reinterpret_cast(host_addr), src_addr_size, dst_tensor_desc, session_id)); + + return SUCCESS; +} + +Status TransVarDataUtils::SyncTensorToHost(const string &var_name, const ge::GeTensorDesc &src_tensor_desc, + uint8_t **host_addr, uint32_t &src_tensor_size, uint64_t session_id) { + GE_CHK_STATUS_RET(ge::TensorUtils::GetSize(src_tensor_desc, src_tensor_size), "get size from TensorDesc failed"); + + uint8_t *src_addr = nullptr; + GE_CHK_STATUS_RET(VarManager::Instance(session_id)->GetVarAddr(var_name, src_tensor_desc, &src_addr)); + uint8_t *mem_addr = + src_addr - static_cast(reinterpret_cast(VarManager::Instance(0)->GetVarMemLogicBase())) + + static_cast( + reinterpret_cast(VarManager::Instance(session_id)->GetVarMemoryBase(RT_MEMORY_HBM))); + GE_CHK_RT_RET(rtMallocHost(reinterpret_cast(host_addr), src_tensor_size)); + + GE_CHK_RT_RET(rtMemcpy(*host_addr, src_tensor_size, mem_addr, src_tensor_size, RT_MEMCPY_DEVICE_TO_HOST)); + + GELOGI("SyncTensorToHost var_name %s, src_tensor_size %u", var_name.c_str(), src_tensor_size); + return SUCCESS; +} + +Status TransVarDataUtils::SyncTensorToDevice(const string &var_name, const uint8_t *host_addr, uint32_t addr_size, + const ge::GeTensorDesc &dst_tensor_desc, uint64_t session_id) { + uint8_t *dst_addr = nullptr; + GE_CHK_STATUS_RET(VarManager::Instance(session_id)->GetVarAddr(var_name, dst_tensor_desc, &dst_addr)); + uint8_t *mem_addr = + dst_addr - static_cast(reinterpret_cast(VarManager::Instance(0)->GetVarMemLogicBase())) + + static_cast( + reinterpret_cast(VarManager::Instance(session_id)->GetVarMemoryBase(RT_MEMORY_HBM))); + GE_CHK_RT_RET(rtMemcpy(mem_addr, addr_size, host_addr, addr_size, RT_MEMCPY_HOST_TO_DEVICE)); + + GELOGI("SyncTensorToDevice var_name %s, addr_size %u", var_name.c_str(), addr_size); + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/manager/trans_var_data_utils.h b/src/ge/graph/manager/trans_var_data_utils.h new file mode 100644 index 00000000..213dad12 --- /dev/null +++ b/src/ge/graph/manager/trans_var_data_utils.h @@ -0,0 +1,43 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_TRANS_VAR_DATA_UTILS_H_ +#define GE_GRAPH_MANAGER_TRANS_VAR_DATA_UTILS_H_ + +#include + +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/ge_types.h" +#include "graph/utils/tensor_utils.h" + +namespace ge { +class TransVarDataUtils { + public: + static ge::Status SyncVarData2BroadCast(const string &var_name, const ge::GeTensorDesc &src_tensor_desc, + uint8_t *dst_addr, uint32_t dst_addr_size, uint64_t session_id_); + static ge::Status SyncBroadCastData2Var(uint8_t *src_addr, uint32_t src_addr_size, const string &var_name, + const ge::GeTensorDesc &dst_tensor_desc, uint64_t session_id_); + + private: + static ge::Status SyncTensorToHost(const string &var_name, const ge::GeTensorDesc &src_tensor_desc, + uint8_t **host_addr, uint32_t &addr_size, uint64_t session_id_); + + static ge::Status SyncTensorToDevice(const string &var_name, const uint8_t *host_addr, uint32_t addr_size, + const ge::GeTensorDesc &dst_tensor_desc, uint64_t session_id_); +}; +} // namespace ge + +#endif // GE_GRAPH_MANAGER_TRANS_VAR_DATA_UTILS_H_ diff --git a/src/ge/graph/manager/util/debug.cc b/src/ge/graph/manager/util/debug.cc new file mode 100755 index 00000000..67f9fdf0 --- /dev/null +++ b/src/ge/graph/manager/util/debug.cc @@ -0,0 +1,71 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/util/debug.h" + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" + +using google::protobuf::Message; +using google::protobuf::io::CodedInputStream; +using google::protobuf::io::FileOutputStream; + +namespace ge { +Debug::Debug() = default; + +Debug::~Debug() = default; + +void Debug::DumpProto(const Message &proto, const char *file) { + int fd = open(file, O_WRONLY | O_CREAT | O_TRUNC, S_IRUSR | S_IWUSR | S_IRGRP | S_IROTH); + if (fd == -1) { + GELOGW("Write %s failed", file); + return; + } + auto output = ge::MakeShared(fd); + if (output == nullptr) { + GELOGW("create output failed."); + if (close(fd) != 0) { + GELOGW("close fd failed."); + } + return; + } + bool ret = google::protobuf::TextFormat::Print(proto, output.get()); + if (!ret) { + GELOGW("dump proto failed."); + } + if (close(fd) != 0) { + GELOGW("close fd failed."); + } +} + +Status Debug::DumpDevMem(const char *file, const void *addr, uint32_t size) { + uint8_t *host_addr = nullptr; + rtError_t ret = rtMallocHost(reinterpret_cast(&host_addr), size); + if (ret != RT_ERROR_NONE) { + GELOGE(FAILED, "Call rt api rtMallocHost failed."); + return FAILED; + } + GE_MAKE_GUARD_RTMEM(host_addr); + ret = rtMemcpy(host_addr, size, addr, size, RT_MEMCPY_DEVICE_TO_HOST); + if (ret != RT_ERROR_NONE) { + GELOGE(FAILED, "Call rt api rtMemcpy failed, ret: 0x%X", ret); + return FAILED; + } + + GE_CHK_STATUS_RET(MemoryDumper::DumpToFile(file, host_addr, size)); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/manager/util/debug.h b/src/ge/graph/manager/util/debug.h new file mode 100755 index 00000000..8742db4f --- /dev/null +++ b/src/ge/graph/manager/util/debug.h @@ -0,0 +1,62 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_UTIL_DEBUG_H_ +#define GE_GRAPH_MANAGER_UTIL_DEBUG_H_ + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/types.h" +#include "common/util.h" +#include "mmpa/mmpa_api.h" +#include "proto/om.pb.h" +#include "runtime/kernel.h" + +using google::protobuf::Message; +using google::protobuf::io::CodedInputStream; +using google::protobuf::io::FileOutputStream; + +namespace ge { +// MemoryDumper: used for output memory data in test case +class Debug { + public: + Debug(); + ~Debug(); + + static void DumpProto(const Message &proto, const char *file); + static Status DumpDevMem(const char *file, const void *addr, uint32_t size); +}; +} // namespace ge + +#endif // GE_GRAPH_MANAGER_UTIL_DEBUG_H_ diff --git a/src/ge/graph/manager/util/hcom_util.cc b/src/ge/graph/manager/util/hcom_util.cc new file mode 100644 index 00000000..a9eb323d --- /dev/null +++ b/src/ge/graph/manager/util/hcom_util.cc @@ -0,0 +1,147 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/util/hcom_util.h" + +#include "common/debug/log.h" +#include "common/math/math_util.h" +#include "common/op/attr_value_util.h" +#include "common/op/ge_op_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" + +namespace ge { +Status HcomOmeUtil::GetHcomDataType(const ge::ConstOpDescPtr &op_desc, hcclDataType_t &data_type) { + GE_CHECK_NOTNULL(op_desc); + ge::DataType src_data_type = ge::DT_FLOAT; + if (op_desc->GetType() == HCOMRECEIVE) { + bool ret = ge::AttrUtils::GetDataType(op_desc, HCOM_ATTR_DATA_TYPE, src_data_type); + if (ret == false) { + GELOGE(PARAM_INVALID, "op:HcomReceive, op desc no attr: dtype."); + return PARAM_INVALID; + } + } else { + auto input_desc_ptr = op_desc->GetInputDescPtr(0); + GE_CHECK_NOTNULL(input_desc_ptr); + src_data_type = input_desc_ptr->GetDataType(); + } + + auto iter = kConstOpHcomDataType.find(static_cast(src_data_type)); + if (iter == kConstOpHcomDataType.end()) { + GELOGE(PARAM_INVALID, "HcomOmeUtil:: HcomDataType cann't support! Current Davinci Data Type : %s", + ge::TypeUtils::DataTypeToSerialString(src_data_type).c_str()); + return PARAM_INVALID; + } + + data_type = iter->second; + return SUCCESS; +} + +Status HcomOmeUtil::GetHcomTypeSize(hcclDataType_t data_type, int32_t &size) { + auto iter = kConstOpHcomDataTypeSize.find(data_type); + GE_CHK_BOOL_EXEC(iter != kConstOpHcomDataTypeSize.end(), return PARAM_INVALID, + "HcomOmeUtil::HcomDataTypeSize , No DataTypeSize!"); + + size = iter->second; + return SUCCESS; +} + +Status HcomOmeUtil::GetHcomCount(const ge::ConstOpDescPtr &op_desc, hcclDataType_t data_type, bool is_allgather, + int &count) { + GE_CHECK_NOTNULL(op_desc); + int64_t total_size = 0; + int64_t align_size = 512; + int32_t size = 0; + GE_CHK_STATUS_RET(HcomOmeUtil::GetHcomTypeSize(data_type, size), "GetHcomCount: GetHcomTypeSize fail!"); + if (op_desc->GetType() == HCOMRECEIVE) { + vector shape_dims; + bool ret = ge::AttrUtils::GetListInt(op_desc, HCOM_ATTR_SHAPE, shape_dims); + if (ret == false) { + GELOGE(PARAM_INVALID, "op:HcomReceive, op desc no attr: shape."); + return PARAM_INVALID; + } + ge::GeShape shape = ge::GeShape(shape_dims); + auto input_size = static_cast(shape.GetShapeSize() * size); + total_size = (input_size + align_size - 1) / align_size * align_size; + } else { + for (size_t i = 0; i < op_desc->GetInputsSize(); i++) { + uint32_t input_size = 0; + int64_t block_size = 0; + GE_CHECK_NOTNULL(op_desc->GetInputDescPtr(i)); + GE_CHK_STATUS_RET(ge::TensorUtils::GetSize(*op_desc->GetInputDescPtr(i), input_size), + "get size from TensorDesc failed, op : %s, input index : %zu", op_desc->GetName().c_str(), i); + + GE_IF_BOOL_EXEC( + op_desc->GetType() == HCOMREDUCESCATTER, int32_t rank_size = 0; + GE_CHK_BOOL_RET_STATUS(ge::AttrUtils::GetInt(op_desc, HCOM_ATTR_RANK_SIZE, rank_size), PARAM_INVALID, + "get HCOM_ATTR_RANK_SIZE failed"); + GE_CHK_BOOL_RET_STATUS(rank_size != 0, PARAM_INVALID, "rank size is zero"); + int64_t shape_size = op_desc->GetInputDescPtr(i)->GetShape().GetShapeSize(); GE_CHK_STATUS_RET( + CheckInt64Int32MulOverflow(shape_size, size), "Product of shape size and size beyond INT64_MAX"); + block_size = (shape_size * size) / rank_size; + GE_CHK_STATUS_RET(CheckInt64AddOverflow(total_size, block_size), "Total size is beyond the INT64_MAX"); + total_size = total_size + block_size; continue;); + + int64_t shape_size = op_desc->GetInputDescPtr(i)->GetShape().GetShapeSize(); + GE_CHK_STATUS_RET(CheckInt64Int32MulOverflow(shape_size, size), + "Product of shape size and size beyond INT64_MAX"); + GE_IF_BOOL_EXEC(is_allgather, block_size = shape_size * size;); + GE_IF_BOOL_EXEC(!is_allgather, block_size = (input_size + align_size - 1) / align_size * align_size;); + GE_CHK_STATUS_RET(CheckInt64AddOverflow(total_size, block_size), "Total size is beyond the INT64_MAX"); + total_size = total_size + block_size; + } + } + + GE_CHK_BOOL_RET_STATUS(size != 0, PARAM_INVALID, "Size is zero"); + count = static_cast(total_size / size); + + GE_CHK_BOOL_EXEC(total_size % size == 0, return PARAM_INVALID, "total_size:%ld is not divisiable by size:%d.", + total_size, size); + + return SUCCESS; +} + +Status HcomOmeUtil::GetHcomOperationType(const ge::ConstOpDescPtr &op_desc, hcclRedOp_t &op_type) { + GE_CHECK_NOTNULL(op_desc); + + std::string hcom_op_type; + GE_CHK_BOOL_EXEC(ge::AttrUtils::GetStr(op_desc, HCOM_ATTR_REDUCE_TYPE, hcom_op_type), return PARAM_INVALID, + "HcomOmeUtil::Get HCOM_ATTR_REDUCE_TYPE fail, not support!"); + + if (hcom_op_type == "min") { + op_type = HCCL_REP_OP_MIN; + } else if (hcom_op_type == "max") { + op_type = HCCL_REP_OP_MAX; + } else if (hcom_op_type == "prod") { + op_type = HCCL_REP_OP_PROD; + } else if (hcom_op_type == "sum") { + op_type = HCCL_REP_OP_SUM; + } else { + GELOGE(PARAM_INVALID, "HcomOmeUtil::Get HCOM_ATTR_REDUCE_TYPE fail, [%s] not support!", hcom_op_type.c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +Status HcomOmeUtil::GetHcomRootId(const ge::ConstOpDescPtr &op_desc, int64_t &root_id) { + GE_CHECK_NOTNULL(op_desc); + GE_CHK_BOOL_EXEC(ge::AttrUtils::GetInt(op_desc, HCOM_ATTR_ROOT_RANK, root_id), return PARAM_INVALID, + "HcomOmeUtil::Get HCOM_ATTR_ROOT_INDEX fail, not support!"); + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/manager/util/hcom_util.h b/src/ge/graph/manager/util/hcom_util.h new file mode 100644 index 00000000..31bf246e --- /dev/null +++ b/src/ge/graph/manager/util/hcom_util.h @@ -0,0 +1,93 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_UTIL_HCOM_UTIL_H_ +#define GE_GRAPH_MANAGER_UTIL_HCOM_UTIL_H_ + +#include +#include +#include + +#include "common/debug/log.h" +#include "common/string_util.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/op_desc.h" +#include "hccl/hcom.h" + +namespace ge { +using std::string; +using std::vector; + +static std::map kConstOpHcomDataType = { + {ge::DT_FLOAT, HCCL_DATA_TYPE_FLOAT}, + {ge::DT_FLOAT16, HCCL_DATA_TYPE_HALF}, + {ge::DT_INT8, HCCL_DATA_TYPE_INT8}, + {ge::DT_INT32, HCCL_DATA_TYPE_INT}, +}; + +static std::map kConstOpHcomDataTypeSize = { + {HCCL_DATA_TYPE_FLOAT, sizeof(float)}, + {HCCL_DATA_TYPE_HALF, sizeof(float) / 2}, + {HCCL_DATA_TYPE_INT8, sizeof(int8_t)}, + {HCCL_DATA_TYPE_INT, sizeof(int32_t)}, +}; + +class HcomOmeUtil { + public: + /// + /// @ingroup domi_ome + /// @brief GetHcomDataType + /// @return SUCCESS + /// @return FAIL + /// + static Status GetHcomDataType(const ge::ConstOpDescPtr &op_desc, hcclDataType_t &data_type); + + /// + /// @ingroup domi_ome + /// @brief GetHcomTypeSize + /// @return SUCCESS + /// @return FAIL + /// + static Status GetHcomTypeSize(hcclDataType_t data_type, int32_t &size); + + /// + /// @ingroup domi_ome + /// @brief GetHcomCount + /// @return SUCCESS + /// @return FAIL + /// + static Status GetHcomCount(const ge::ConstOpDescPtr &op_desc, hcclDataType_t data_type, bool is_allgather, + int &count); + + /// + /// @ingroup domi_ome + /// @brief GetHcomOperationType + /// @return SUCCESS + /// @return FAIL + /// + static Status GetHcomOperationType(const ge::ConstOpDescPtr &op_desc, hcclRedOp_t &op_type); + + /// + /// @ingroup domi_ome + /// @brief GetHcomRootId + /// @return SUCCESS + /// @return FAIL + /// + static Status GetHcomRootId(const ge::ConstOpDescPtr &op_desc, int64_t &root_id); +}; +} // namespace ge +#endif // GE_GRAPH_MANAGER_UTIL_HCOM_UTIL_H_ diff --git a/src/ge/graph/manager/util/rt_context_util.cc b/src/ge/graph/manager/util/rt_context_util.cc new file mode 100644 index 00000000..05120f6a --- /dev/null +++ b/src/ge/graph/manager/util/rt_context_util.cc @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/util/rt_context_util.h" + +#include "framework/common/debug/ge_log.h" + +namespace ge { +void RtContextUtil::AddrtContext(rtContext_t context) { rtContexts_.emplace_back(context); } + +void RtContextUtil::DestroyrtContexts() { + GELOGI("The size of runtime context handle is %zu.", rtContexts_.size()); + for (auto &rtContext : rtContexts_) { + (void)rtCtxDestroy(rtContext); + } + rtContexts_.clear(); +} +} // namespace ge diff --git a/src/ge/graph/manager/util/rt_context_util.h b/src/ge/graph/manager/util/rt_context_util.h new file mode 100644 index 00000000..006abb9f --- /dev/null +++ b/src/ge/graph/manager/util/rt_context_util.h @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_UTIL_RT_CONTEXT_UTIL_H_ +#define GE_GRAPH_MANAGER_UTIL_RT_CONTEXT_UTIL_H_ + +#include + +#include "runtime/context.h" + +namespace ge { +class RtContextUtil { + public: + static RtContextUtil &GetInstance() { + static RtContextUtil instance; + return instance; + } + + void AddrtContext(rtContext_t context); + + void DestroyrtContexts(); + + RtContextUtil &operator=(const RtContextUtil &) = delete; + RtContextUtil(const RtContextUtil &RtContextUtil) = delete; + + private: + RtContextUtil() = default; + ~RtContextUtil() {} + + std::vector rtContexts_; +}; +} // namespace ge + +#endif // GE_GRAPH_MANAGER_UTIL_RT_CONTEXT_UTIL_H_ + diff --git a/src/ge/graph/manager/util/variable_accelerate_ctrl.cc b/src/ge/graph/manager/util/variable_accelerate_ctrl.cc new file mode 100644 index 00000000..522b5ee3 --- /dev/null +++ b/src/ge/graph/manager/util/variable_accelerate_ctrl.cc @@ -0,0 +1,80 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/manager/util/variable_accelerate_ctrl.h" + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/types.h" + +namespace ge { +namespace { +inline bool IsVariable(const std::string &node_type) { + return node_type == VARIABLE || node_type == VARIABLEV2 || node_type == VARHANDLEOP; +} +} + +bool VarAccelerateCtrl::IsVarPermitToChangeFormats(const std::string &var_name) { + auto iter = var_names_to_change_times_.find(var_name); + if (iter == var_names_to_change_times_.end()) { + return true; + } + return iter->second < kMaxVarChangeTimes_; +} + +void VarAccelerateCtrl::SetVarChanged(const std::string &var_name) { + auto times = ++var_names_to_change_times_[var_name]; + for (auto &graph_id_to_var_names : graph_ids_to_var_names_) { + if (graph_id_to_var_names.second.count(var_name) > 0) { + GELOGI("The format of var %s has been changed, total changed times %d, " + "the graph %u contains which should be re-build before next run", + var_name.c_str(), times, graph_id_to_var_names.first); + /// The graph being compiled right now is also added to the rebuild-list + /// and can be deleted by calling `SetGraphBuildEnd` at the end of compilation. + graph_ids_need_rebuild_.insert(graph_id_to_var_names.first); + } + } +} + +void VarAccelerateCtrl::AddGraph(uint32_t graph_id, const ComputeGraphPtr &compute_graph) { + if (compute_graph == nullptr) { + GELOGE(PARAM_INVALID, "Failed to add graph %u, the compute graph is null", graph_id); + return; + } + auto &var_names = graph_ids_to_var_names_[graph_id]; + for (auto &node : compute_graph->GetAllNodes()) { + auto node_type = node->GetType(); + if (IsVariable(node_type)) { + GELOGD("Add graph %u contains variable %s", graph_id, node->GetName().c_str()); + var_names.insert(node->GetName()); + } + } + GELOGD("Add graph %u, var count %zu", graph_id, var_names.size()); +} + +void VarAccelerateCtrl::RemoveGraph(uint32_t graph_id) { + GELOGD("Remove graph %u", graph_id); + graph_ids_to_var_names_.erase(graph_id); + graph_ids_need_rebuild_.erase(graph_id); +} +bool VarAccelerateCtrl::IsGraphNeedRebuild(uint32_t graph_id) const { + return graph_ids_need_rebuild_.count(graph_id) > 0; +} +void VarAccelerateCtrl::SetGraphBuildEnd(uint32_t graph_id) { + graph_ids_need_rebuild_.erase(graph_id); + GELOGD("The graph %u has built end, remove it from the rebuild-set", graph_id); +} +} // namespace ge diff --git a/src/ge/graph/manager/util/variable_accelerate_ctrl.h b/src/ge/graph/manager/util/variable_accelerate_ctrl.h new file mode 100644 index 00000000..d8504c02 --- /dev/null +++ b/src/ge/graph/manager/util/variable_accelerate_ctrl.h @@ -0,0 +1,65 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_MANAGER_UTIL_VARIABLE_ACCELERATE_CTRL_H_ +#define GE_GRAPH_MANAGER_UTIL_VARIABLE_ACCELERATE_CTRL_H_ + +#include +#include +#include + +#include "graph/compute_graph.h" +#include "graph/node.h" + +namespace ge { +class VarAccelerateCtrl { + public: + void AddGraph(uint32_t graph_id, const ComputeGraphPtr &compute_graph); + + void RemoveGraph(uint32_t graph_id); + + void SetVarChanged(const std::string &var_name); + + bool IsGraphNeedRebuild(uint32_t graph_id) const; + + void SetGraphBuildEnd(uint32_t graph_id); + + bool IsVarPermitToChangeFormats(const std::string &var_name); + + private: + /// + /// the variable and graph relationships will construct when `AddGraph` + /// + std::map> graph_ids_to_var_names_; + + /// + /// The graph id of the graph to be rebuilt. When the format of a variable is + /// changed, the graph which contains this variable is needs to be rebuilt. + /// + std::set graph_ids_need_rebuild_; + + /// + /// Number of variable names and their format changes. + /// In order to prevent the variable format from being repeatedly changed + /// between different formats, we simply limited the variable format to + /// only one time of changing + /// + std::map var_names_to_change_times_; + static const int kMaxVarChangeTimes_ = 1; +}; +} // namespace ge + +#endif // GE_GRAPH_MANAGER_UTIL_VARIABLE_ACCELERATE_CTRL_H_ diff --git a/src/ge/graph/optimize/common/params.h b/src/ge/graph/optimize/common/params.h new file mode 100644 index 00000000..71151c1d --- /dev/null +++ b/src/ge/graph/optimize/common/params.h @@ -0,0 +1,66 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_OPTIMIZE_COMMON_PARAMS_H_ +#define GE_GRAPH_OPTIMIZE_COMMON_PARAMS_H_ + +#include + +#include "common/singleton.h" +#include "common/types.h" + +namespace ge { +class Params : public Singleton { + public: + DECLARE_SINGLETON_CLASS(Params); + + void SetTarget(const char* target) { + std::string tmp_target = (target != nullptr) ? target : ""; + +#if defined(__ANDROID__) || defined(ANDROID) + target_ = "LITE"; + target_8bit_ = TARGET_TYPE_LTTE_8BIT; +#else + target_ = "MINI"; + target_8bit_ = TARGET_TYPE_MINI_8BIT; +#endif + if (tmp_target == "mini") { + target_ = "MINI"; + target_8bit_ = TARGET_TYPE_MINI_8BIT; + } else if (tmp_target == "tiny") { + target_ = "TINY"; + target_8bit_ = TARGET_TYPE_TINY_8BIT; + } else if (tmp_target == "lite") { + target_ = "LITE"; + target_8bit_ = TARGET_TYPE_LTTE_8BIT; + } + } + + string GetTarget() const { return target_; } + + uint8_t GetTarget_8bit() const { return target_8bit_; } + ~Params() override = default; + + private: + Params() : target_("MINI") {} + + string target_; + uint8_t target_8bit_ = 0; +}; +} // namespace ge + +#endif // GE_GRAPH_OPTIMIZE_COMMON_PARAMS_H_ + diff --git a/src/ge/graph/optimize/graph_optimize.cc b/src/ge/graph/optimize/graph_optimize.cc new file mode 100644 index 00000000..8b698724 --- /dev/null +++ b/src/ge/graph/optimize/graph_optimize.cc @@ -0,0 +1,235 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/optimize/graph_optimize.h" + +#include + +#include "cce/optimizer/fusion_engine.h" +#include "framework/common/debug/ge_log.h" +#include "graph/anchor.h" +#include "graph/passes/dimension_adjust_pass.h" +#include "graph/utils/graph_utils.h" +#include "inc/pass_manager.h" +#include "init/gelib.h" +#include "opskernel_manager/ops_kernel_manager.h" + +using ge::ComputeGraph; +using ge::OpDesc; + +namespace { +const char *const kVectorEngine = "VectorEngine"; +const char *const kAicoreEngine = "AIcoreEngine"; +} // namespace + +namespace ge { +GraphOptimize::GraphOptimize() + : optimize_type_(domi::FrameworkType::FMK_TYPE_T), + cal_config_(""), + insert_op_config_(""), + parse_out_node_(""), + core_type_(kAicoreEngine), + graph_context_(nullptr) {} + +void AddNodeInputProperty(ComputeGraphPtr &compute_graph) { + if (compute_graph == nullptr) { + GELOGE(GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL, "[AddNodeInputProperty]: compute_graph is nullptr."); + return; + } + for (ge::NodePtr &node : compute_graph->GetDirectNode()) { + auto node_op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, GELOGW("node_op_desc is nullptr!"); return); + auto in_control_anchor = node->GetInControlAnchor(); + vector src_name_list; + vector input_name_list; + vector src_index_list; + GE_IF_BOOL_EXEC( + in_control_anchor != nullptr, string src_name_temp; for (auto &out_control_anchor + : in_control_anchor->GetPeerOutControlAnchors()) { + ge::NodePtr src_node = out_control_anchor->GetOwnerNode(); + GE_IF_BOOL_EXEC(src_node == nullptr, GELOGW("src_node is nullptr!"); continue); + src_name_temp = src_name_temp == "" ? src_node->GetName() : src_name_temp + ":" + src_node->GetName(); + } GE_IF_BOOL_EXEC(src_name_temp != "", src_name_list.emplace_back(src_name_temp); + node_op_desc->SetSrcName(src_name_list);)) + + for (auto &in_data_anchor : node->GetAllInDataAnchors()) { + auto peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, + GELOGW("peer_out_anchor is nullptr! node: %s", node->GetName().c_str()); + continue); + + ge::NodePtr src_node = peer_out_anchor->GetOwnerNode(); + src_name_list = node_op_desc->GetSrcName(); + src_index_list = node_op_desc->GetSrcIndex(); + src_name_list.emplace_back(src_node->GetName()); + src_index_list.emplace_back(peer_out_anchor->GetIdx()); + node_op_desc->SetSrcName(src_name_list); + node_op_desc->SetSrcIndex(src_index_list); + GE_IF_BOOL_EXEC(!(node_op_desc->GetType() == NETOUTPUT && GetContext().type == domi::FMK_TYPE_T), + ge::NodePtr peer_owner_node = peer_out_anchor->GetOwnerNode(); + input_name_list = node_op_desc->GetInputName(); input_name_list.emplace_back( + peer_owner_node->GetName() + + (peer_out_anchor->GetIdx() == 0 ? "" : ": " + to_string(peer_out_anchor->GetIdx()))); + node_op_desc->SetInputName(input_name_list);) + } + } +} + +Status GraphOptimize::OptimizeSubGraph(ComputeGraphPtr &compute_graph, const std::string &engine_name) { + if (compute_graph == nullptr) { + GELOGE(GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL, "[OptimizeSubGraph]: compute_graph is nullptr."); + return GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL; + } + + Status ret = SUCCESS; + vector graph_optimizer; + + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "GraphOptimzer: GE is not initialized"); + return GE_CLI_GE_NOT_INITIALIZED; + } + + if (instance_ptr->DNNEngineManagerObj().IsEngineRegistered(engine_name)) { + instance_ptr->OpsKernelManagerObj().GetGraphOptimizerByEngine(engine_name, graph_optimizer); + AddNodeInputProperty(compute_graph); + + if (compute_graph->GetDirectNode().size() == 0) { + GELOGW("[OptimizeSubGraph] compute_graph do not has any node."); + return SUCCESS; + } + + for (auto iter = graph_optimizer.begin(); iter != graph_optimizer.end(); ++iter) { + ret = (*iter)->OptimizeFusedGraph(*(compute_graph)); + if (ret != SUCCESS) { + GELOGE(ret, "[OptimizeSubGraph][OptimizeFusedGraph]: graph optimize failed, ret:%d", ret); + return ret; + } + } + } else { + GELOGI("Engine: %s is not registered. do nothing in subGraph Optimize by ATC.", engine_name.c_str()); + } + + return ret; +} + +Status GraphOptimize::OptimizeOriginalGraph(ComputeGraphPtr &compute_graph) { + if (compute_graph == nullptr) { + GELOGE(GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL, "[OptimizeOriginalGraph]: compute_graph is nullptr."); + return GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL; + } + + Status ret = SUCCESS; + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "OptimizeOriginalGraph failed."); + return GE_CLI_GE_NOT_INITIALIZED; + } + + std::map graph_optimizer = instance_ptr->OpsKernelManagerObj().GetAllGraphOptimizerObjs(); + GELOGI("optimize by opskernel in original graph optimize phase. num of graph_optimizer is %lu.", + graph_optimizer.size()); + string exclude_core_Type = (core_type_ == kVectorEngine) ? kAicoreEngine : kVectorEngine; + GELOGD("[OptimizeOriginalGraph]: engine type will exclude: %s", exclude_core_Type.c_str()); + if (graph_optimizer.size() != 0) { + for (auto iter = graph_optimizer.begin(); iter != graph_optimizer.end(); ++iter) { + if (iter->first == exclude_core_Type) { + continue; + } + ret = (iter->second)->OptimizeOriginalGraph(*compute_graph); + if (ret != SUCCESS) { + GELOGE(ret, "[OptimizeOriginalGraph]: graph optimize failed, ret:%d", ret); + return ret; + } + } + } + return ret; +} + +Status GraphOptimize::OptimizeOriginalGraphForQuantize(ComputeGraphPtr &compute_graph) { + if (compute_graph == nullptr) { + GELOGE(GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL, "[OptimizeOriginalGraph]: compute_graph is nullptr."); + return GE_GRAPH_OPTIMIZE_COMPUTE_GRAPH_NULL; + } + + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "OptimizeOriginalGraph failed."); + return GE_CLI_GE_NOT_INITIALIZED; + } + + std::map graph_optimizer = instance_ptr->OpsKernelManagerObj().GetAllGraphOptimizerObjs(); + GELOGI("optimize by opskernel in original graph optimize quantize phase. num of graph_optimizer is %zu.", + graph_optimizer.size()); + Status ret = SUCCESS; + string exclude_core_Type = (core_type_ == kVectorEngine) ? kAicoreEngine : kVectorEngine; + GELOGD("[OptimizeOriginalGraphForQuantize]: engine type will exclude: %s", exclude_core_Type.c_str()); + if (graph_optimizer.size() != 0) { + for (auto iter = graph_optimizer.begin(); iter != graph_optimizer.end(); ++iter) { + if (iter->first == exclude_core_Type || iter->second == nullptr) { + continue; + } + ret = iter->second->OptimizeGraphPrepare(*compute_graph); + if (ret != SUCCESS) { + GELOGE(ret, "[OptimizeOriginalGraphForQuantize]: graph optimize failed, ret:%u", ret); + return ret; + } + } + } + return ret; +} + +Status GraphOptimize::SetOptions(const ge::GraphManagerOptions &options) { + if (options.framework_type >= static_cast(domi::FrameworkType::FMK_TYPE_RESERVED)) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Optimize Type %d invalid.", options.framework_type); + return GE_GRAPH_OPTIONS_INVALID; + } + optimize_type_ = static_cast(options.framework_type); + cal_config_ = options.calibration_conf_file; + insert_op_config_ = options.insert_op_file; + train_graph_flag_ = options.train_graph_flag; + local_fmk_op_flag_ = options.local_fmk_op_flag; + func_bin_path_ = options.func_bin_path; + core_type_ = options.core_type; + return SUCCESS; +} + +void GraphOptimize::TranFrameOp(ComputeGraphPtr &compute_graph) { + GE_CHECK_NOTNULL_JUST_RETURN(compute_graph); + vector local_framework_op_vec = { + "TensorDataset", "QueueDataset", "DeviceQueueDataset", "ParallelMapDataset", "BatchDatasetV2", + "IteratorV2", "MakeIterator", "IteratorGetNext", "FilterDataset", "MapAndBatchDatasetV2"}; + for (auto &nodePtr : compute_graph->GetAllNodes()) { + OpDescPtr op = nodePtr->GetOpDesc(); + GE_IF_BOOL_EXEC(op == nullptr, GELOGW("op is nullptr!"); continue); + // fwkop black-white sheet + vector::iterator iter = + std::find(local_framework_op_vec.begin(), local_framework_op_vec.end(), op->GetType()); + if (iter != local_framework_op_vec.end()) { + // set - original_type + if (!AttrUtils::SetStr(op, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, op->GetType())) { + GELOGW("TranFrameOp SetStr ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE failed"); + } + // set - framework_type + // [No need to verify return value] + op->SetType("FrameworkOp"); + if (!AttrUtils::SetInt(op, ATTR_NAME_FRAMEWORK_FWK_TYPE, domi::FrameworkType::FMK_TYPE_T)) { + GELOGW("TranFrameOp SetInt ATTR_NAME_FRAMEWORK_FWK_TYPE failed"); + } + } + } +} +} // namespace ge diff --git a/src/ge/graph/optimize/graph_optimize.h b/src/ge/graph/optimize/graph_optimize.h new file mode 100644 index 00000000..e1e7a7c0 --- /dev/null +++ b/src/ge/graph/optimize/graph_optimize.h @@ -0,0 +1,83 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_OPTIMIZE_GRAPH_OPTIMIZE_H_ +#define GE_GRAPH_OPTIMIZE_GRAPH_OPTIMIZE_H_ + +#include +#include +#include +#include +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/ge_types.h" +#include "common/optimizer/graph_optimizer.h" +#include "graph/compute_graph.h" +#include "graph/manager/graph_context.h" +#include "graph/manager/graph_manager_utils.h" +#include "omg/omg_inner_types.h" + +namespace ge { +using GraphOptimizerPtr = std::shared_ptr; +class GraphOptimize { + public: + GraphOptimize(); + ~GraphOptimize() = default; + + // subgraph optimize + Status OptimizeSubGraph(ComputeGraphPtr &compute_graph, const std::string &engine_name); + + // original graph optimize + Status OptimizeOriginalGraph(ComputeGraphPtr &compute_graph); + + // for fe prepare optimize in quantize scene + Status OptimizeOriginalGraphForQuantize(ComputeGraphPtr &compute_graph); + + // set options + Status SetOptions(const GraphManagerOptions &options); + + const std::map> &GetSummaryOutputIndexes() const { + return summary_output_indexes_; + } + + void ClearSummaryOutputIndexes() { summary_output_indexes_.clear(); } + + // handle summary node before preRun graph + Status HandleSummaryOp(ComputeGraphPtr &compute_graph); + + void TranFrameOp(ComputeGraphPtr &compute_graph); + + private: + std::mutex mutex_; + domi::FrameworkType optimize_type_; + std::string cal_config_; + std::string insert_op_config_; + std::string parse_out_node_; + std::string core_type_; + std::vector out_nodes_name_; + std::vector out_nodes_index_; + bool train_graph_flag_ = false; + GraphContextPtr graph_context_; + bool local_fmk_op_flag_ = false; + // record the summary names for filter sumarry result. + std::map> summary_output_indexes_ = {}; + std::string func_bin_path_; +}; +}; // namespace ge +#endif // GE_GRAPH_OPTIMIZE_GRAPH_OPTIMIZE_H_ diff --git a/src/ge/graph/optimize/optimizer/graph_pass.h b/src/ge/graph/optimize/optimizer/graph_pass.h new file mode 100644 index 00000000..7393fd43 --- /dev/null +++ b/src/ge/graph/optimize/optimizer/graph_pass.h @@ -0,0 +1,93 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_OPTIMIZE_OPTIMIZER_GRAPH_PASS_H_ +#define GE_GRAPH_OPTIMIZE_OPTIMIZER_GRAPH_PASS_H_ + +#include +#include + +#include "./pass.h" +#include "common/op/attr_value_util.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/compute_graph.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" + +namespace ge { +/// +/// @ingroup domi +/// @brief +/// @author +/// +class GraphPass : public Pass { + public: + /// + /// @param [in] graph graph to optimize + /// @return SUCCESS optimize success + /// @return NOT_CHANGED not optimized + /// @return other optimize fail + /// @author + /// + virtual Status Run(ge::ComputeGraphPtr graph) = 0; + static void RecordOriginalNames(std::vector originalNodes, const ge::NodePtr &node) { + GE_CHECK_NOTNULL_JUST_RETURN(node); + std::vector originalNames; + for (ge::NodePtr nodeTmp : originalNodes) { + GE_IF_BOOL_EXEC(nodeTmp == nullptr, return;) + std::vector namesTmp; + ge::OpDescPtr opdescTmp = nodeTmp->GetOpDesc(); + if (!ge::AttrUtils::GetListStr(opdescTmp, "original_op_names", namesTmp)) { + GELOGW("Get original_op_names failed"); + } + if (namesTmp.size() != 0) { + originalNames.insert(originalNames.end(), namesTmp.begin(), namesTmp.end()); + } else { + originalNames.emplace_back(opdescTmp->GetName()); + } + } + + if (originalNames.size() == 0) { + std::string tmp; + originalNames.emplace_back(tmp); + } + + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetListStr(node->GetOpDesc(), "_datadump_original_op_names", originalNames), + return, "Set original_op_names fail.") + } + + static bool IsConstNode(const ge::NodePtr &node) { + if (node == nullptr) { + GELOGE(PARAM_INVALID, "Input param node is nullptr."); + return false; + } + if (node->GetOpDesc()->GetType() == CONSTANTOP) { + return true; + } else if (node->GetOpDesc()->GetType() == FRAMEWORKOP) { + string type; + GE_CHK_BOOL_EXEC(ge::AttrUtils::GetStr(node->GetOpDesc(), ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, type), + return false, "Get original_type for op %s fail!", node->GetName().c_str()); + GE_IF_BOOL_EXEC(type == CONSTANT, GELOGI("Is const op"); return true); + return false; + } else { + return false; + } + } +}; +} // namespace ge +#endif // GE_GRAPH_OPTIMIZE_OPTIMIZER_GRAPH_PASS_H_ diff --git a/src/ge/graph/optimize/optimizer/pass.h b/src/ge/graph/optimize/optimizer/pass.h new file mode 100644 index 00000000..e545ae8b --- /dev/null +++ b/src/ge/graph/optimize/optimizer/pass.h @@ -0,0 +1,40 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_OPTIMIZE_OPTIMIZER_PASS_H_ +#define GE_GRAPH_OPTIMIZE_OPTIMIZER_PASS_H_ + +#include +#include "common/ge_inner_error_codes.h" + +namespace ge { +/// +/// @ingroup domi_omg +/// @brief pass +/// @author +/// +template +class Pass { + public: + virtual ~Pass() {} + /// + /// run pass + /// @author + /// + virtual Status Run(std::shared_ptr) = 0; +}; +} // namespace ge +#endif // GE_GRAPH_OPTIMIZE_OPTIMIZER_PASS_H_ diff --git a/src/ge/graph/optimize/summary_optimize.cc b/src/ge/graph/optimize/summary_optimize.cc new file mode 100644 index 00000000..058d5b8c --- /dev/null +++ b/src/ge/graph/optimize/summary_optimize.cc @@ -0,0 +1,108 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#include "graph/optimize/graph_optimize.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/tensor_utils.h" +#include "omg/omg_inner_types.h" + +namespace ge { +static const char *const kSummary = "Summary"; +static const int kMaxMapSize = 10000; +Status GraphOptimize::HandleSummaryOp(ComputeGraphPtr &compute_graph) { + GELOGI("[HandleSummaryOp] HandleSummaryOp start!"); + if (summary_output_indexes_.size() >= kMaxMapSize) { + GELOGE(FAILED, "Map size out of range."); + return FAILED; + } + if (summary_output_indexes_.find(compute_graph->GetGraphID()) != summary_output_indexes_.end()) { + return SUCCESS; + } + if (compute_graph == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "compute_graph is nullptr."); + return GE_GRAPH_PARAM_NULLPTR; + } + vector del_nodes; + vector front_nodes; + vector out_index; + std::map summary_output_indexes = {}; + size_t output_index = compute_graph->GetGraphOutNodesInfo().size(); + for (auto &node_ptr : compute_graph->GetAllNodes()) { + GE_CHECK_NOTNULL(node_ptr); + OpDescPtr op = node_ptr->GetOpDesc(); + GE_IF_BOOL_EXEC(op == nullptr, GELOGW("op is nullptr!"); continue); + + if (op->GetType() == kSummary) { + compute_graph->SetSummaryFlag(true); + auto in = node_ptr->GetInDataAnchor(0); + if (in == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "in is nullptr."); + return GE_GRAPH_PARAM_NULLPTR; + } + + auto peerin = in->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peerin == nullptr, GELOGE(GE_GRAPH_PARAM_NULLPTR, "peerin is nullptr."); + return GE_GRAPH_PARAM_NULLPTR); + + auto ret = GraphUtils::RemoveEdge(peerin, in); + if (ret != SUCCESS) { + return ret; + } + + auto front_node = peerin->GetOwnerNode(); + front_nodes.emplace_back(front_node); + auto idx = peerin->GetIdx(); + out_index.emplace_back(idx); + GELOGI("[GraphOptimize] Summary name: %s, output index: %zu", op->GetName().c_str(), output_index); + summary_output_indexes.emplace(op->GetName(), output_index); + output_index += 1; + + del_nodes.emplace_back(node_ptr); + } + } + summary_output_indexes_.emplace(compute_graph->GetGraphID(), summary_output_indexes); + + // add output nodes for summary + std::vector> out_nodes_info; + for (size_t i = 0; i < front_nodes.size(); i++) { + out_nodes_info.emplace_back(pair(front_nodes[i], out_index[i])); + } + compute_graph->AppendGraphOutNodesInfo(out_nodes_info); + + // delete summary node + for (auto &node_ptr : del_nodes) { + auto ret = GraphUtils::RemoveNodeWithoutRelink(compute_graph, node_ptr); + if (ret != SUCCESS) { + GELOGE(ret, "GraphUtils::RemoveNodeWithoutRelink failed."); + return ret; + } + // update Target list + vector graph_target = compute_graph->GetGraphTargetNodesInfo(); + auto iter = find(graph_target.begin(), graph_target.end(), node_ptr); + if (iter != graph_target.end()) { + GELOGI("Current node %s is as Target, remove it from target vector.", node_ptr->GetName().c_str()); + (void)graph_target.erase(iter); + compute_graph->SetGraphTargetNodesInfo(graph_target); + } + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/partition/engine_place.cc b/src/ge/graph/partition/engine_place.cc new file mode 100644 index 00000000..e9dc9a9d --- /dev/null +++ b/src/ge/graph/partition/engine_place.cc @@ -0,0 +1,85 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/partition/engine_place.h" + +#include +#include +#include +#include + +#include "common/op/ge_op_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "init/gelib.h" +#include "opskernel_manager/ops_kernel_manager.h" + +namespace ge { +Status EnginePlacer::Run() { + GELOGI("Engine placer starts."); + if (compute_graph_ == nullptr) { + GELOGE(GE_GRAPH_NULL_INPUT, "compute_graph_ is null."); + return FAILED; + } + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if ((instance_ptr == nullptr) || (!instance_ptr->InitFlag())) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "Run enginePlacer failed"); + return FAILED; + } + // Assign engine for each node in the graph + for (const auto &node_ptr : compute_graph_->GetDirectNode()) { + GE_CHECK_NOTNULL(node_ptr); + GE_CHECK_NOTNULL(node_ptr->GetOpDesc()); + std::string engine_name; + // Check if this node has assigned engine + if ((!node_ptr->GetOpDesc()->GetOpKernelLibName().empty())) { + engine_name = node_ptr->GetOpDesc()->GetOpEngineName(); + } else { + // Call placer cost model to get the "best" engine for this node + engine_name = instance_ptr->DNNEngineManagerObj().GetDNNEngineName(node_ptr->GetOpDesc()); + // If can't get op's engine name, return failed + if (engine_name.empty()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "Can not find engine of op type %s", + node_ptr->GetOpDesc()->GetType().c_str()); + return FAILED; + } + } + if (AssignEngineAndLog(node_ptr, engine_name) != SUCCESS) { + GELOGE(GE_GRAPH_ASSIGN_ENGINE_FAILED, "[GraphPartitioner]: AssignEngineAndLog FAILED"); + return FAILED; + } + } + GELOGI("Engine placer ends."); + return SUCCESS; +} + +Status EnginePlacer::AssignEngineAndLog(ge::ConstNodePtr node_ptr, const std::string &engine_name) { + if (node_ptr == nullptr || node_ptr->GetOpDesc() == nullptr) { + GELOGE(FAILED, "node_ptr is null."); + return FAILED; + } + + // private function, promise node_ptr->GetOpDesc() not null + GELOGI("Assigning DNNEngine %s to node %s, op type %s", engine_name.c_str(), node_ptr->GetName().c_str(), + node_ptr->GetOpDesc()->GetType().c_str()); + + // Record the node assigned engine name + node_engine_map_.insert(std::make_pair(node_ptr, engine_name)); + + return SUCCESS; +} +} // namespace ge + diff --git a/src/ge/graph/partition/engine_place.h b/src/ge/graph/partition/engine_place.h new file mode 100644 index 00000000..93a101ff --- /dev/null +++ b/src/ge/graph/partition/engine_place.h @@ -0,0 +1,61 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PARTITION_ENGINE_PLACE_H_ +#define GE_GRAPH_PARTITION_ENGINE_PLACE_H_ + +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "graph/compute_graph.h" + +namespace ge { +using NodeEngineMap = std::unordered_map; + +/// +/// @ingroup graph/partition +/// @brief Assigned individual DNNEngine to each node in the origin graph +/// @author +/// +class EnginePlacer { + public: + explicit EnginePlacer(const ComputeGraphPtr &graph) : compute_graph_(graph) {} + EnginePlacer() = default; + ~EnginePlacer() = default; + + // Disable copy constructor and assignment operator + EnginePlacer(const EnginePlacer &) = delete; + EnginePlacer(const EnginePlacer &&) = delete; + EnginePlacer &operator=(const EnginePlacer &) = delete; + EnginePlacer &operator=(const EnginePlacer &&) = delete; + + Status Run(); + + // Get the unique node-engine map + const NodeEngineMap *GetNodeEngineMap() const { return &node_engine_map_; } + + void SetComputeGraph(const ComputeGraphPtr &compute_graph) { compute_graph_ = compute_graph; } + + private: + Status AssignEngineAndLog(ConstNodePtr node_ptr, const std::string &engine_name); + + ComputeGraphPtr compute_graph_; + NodeEngineMap node_engine_map_; +}; +} // namespace ge + +#endif // GE_GRAPH_PARTITION_ENGINE_PLACE_H_ diff --git a/src/ge/graph/partition/graph_partition.cc b/src/ge/graph/partition/graph_partition.cc new file mode 100644 index 00000000..a6f48b54 --- /dev/null +++ b/src/ge/graph/partition/graph_partition.cc @@ -0,0 +1,985 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/partition/graph_partition.h" + +#include +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "common/op/ge_op_utils.h" +#include "graph/debug/ge_attr_define.h" +#include "framework/common/types.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/optimize/optimizer/graph_pass.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/type_utils.h" +#include "init/gelib.h" +#include "opskernel_manager/ops_kernel_manager.h" + +namespace { +const char *const kEngineDefaultData = "ENGINE_DEFAULT_DATA"; +const char *const kEndType = "End"; +const char *const kPlaceHolderType = "PlaceHolder"; +const int kOneGraph = 1; // only one graph +const int kRankOne = 1; // order of graph list is 0,1,2,3..., 1 means second order +const int kRankZero = 0; // order of graph list is 0,1,2,3..., 0 means first order +} // namespace +namespace ge { +Status ge::GraphPartitioner::CheckIfEnd2PldEmpty(ge::ComputeGraphPtr &output_merged_compute_graph) { + // only one condition:no data node, one engine, there is only one graph + input graph + if (partitions_.size() == kOneGraph) { + auto partition = (*partitions_.begin()); + if (partition.first == nullptr) { + GELOGE(GE_GRAPH_EMPTY_PARTITION, "[GraphPartitioner]: partition.first is null, engine name is %s", + partition.second.c_str()); + return FAILED; + } + output_merged_compute_graph = partition.first; + // flush all nodes' engine of merged graph + engine_placer_.SetComputeGraph(output_merged_compute_graph); + if (engine_placer_.Run() != SUCCESS) { + GELOGE(GE_GRAPH_INIT_FAILED, "[GraphPartitioner]: engine_placer run failed"); + return FAILED; + } + } else { // if placeholder to end map is empty, it should be an exception condition + GELOGE(GE_GRAPH_EMPTY_PARTITION, "[GraphPartitioner]: placeholder to end map is empty, partitions size is not 1."); + return FAILED; + } + return SUCCESS; +} + +Status ge::GraphPartitioner::MergeAllSubGraph(ge::ComputeGraphPtr &output_merged_compute_graph, + const std::vector &sub_graph_list) { + for (size_t rank = 0; rank < rank_2_partitions_.size(); rank++) { + string temp_stream; + // sub_graph_list index is one ahead of rank_2_partitions_list index + if (rank > 0) { + temp_stream = sub_graph_list[rank - 1]->GetStreamLabel(); + } + for (const auto &node : rank_2_partitions_[rank]->GetAllNodes()) { + if (node == nullptr) { + continue; + } + if ((node->GetType() == kEndType) || (node->GetType() == kPlaceHolderType)) { + continue; + } + if (!temp_stream.empty() && !AttrUtils::HasAttr(node->GetOpDesc(), ATTR_NAME_STREAM_LABEL)) { + (void)AttrUtils::SetStr(node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, temp_stream); + } + if (node->SetOwnerComputeGraph(output_merged_compute_graph) != GRAPH_SUCCESS) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "SetownerComputeGraph failed, node %s", node->GetName().c_str()); + return FAILED; + } + (void)output_merged_compute_graph->AddNode(node); + } + } + // get session graph id from subgraph + SetMergedGraphId(output_merged_compute_graph); + return SUCCESS; +} + +void ge::GraphPartitioner::SetMergedGraphId(ge::ComputeGraphPtr &output_merged_compute_graph) { + string session_graph_id; + // get session graph id from subgraph + if (rank_2_partitions_.empty() || + !AttrUtils::GetStr(*(rank_2_partitions_[0]), ATTR_NAME_SESSION_GRAPH_ID, session_graph_id)) { + GELOGW("Get graph session_graph_id attr failed."); + } + // set session graph id into merged subgraph + if (!session_graph_id.empty()) { + GELOGI("Set session graph id %s in merged compute graph", session_graph_id.c_str()); + // private function, promise output_merged_compute_graph not null + GE_IF_BOOL_EXEC(!AttrUtils::SetStr(*output_merged_compute_graph, ATTR_NAME_SESSION_GRAPH_ID, session_graph_id), + GELOGW("SetStr ATTR_NAME_SESSION_GRAPH_ID failed");) + } +} + +Status ge::GraphPartitioner::RemoveNodeAndEdgeBetweenEndPld(ge::ComputeGraphPtr &output_merged_compute_graph, + const std::vector &sub_graph_list) { + ComputeGraphPtr new_sub_graph = MakeShared("mergedGraph"); + output_merged_compute_graph = new_sub_graph; + if ((new_sub_graph == nullptr) || (MergeAllSubGraph(output_merged_compute_graph, sub_graph_list) != SUCCESS)) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[GraphPartitioner]: MergeAllSubGraph failed."); + return FAILED; + } + for (const auto &it : index_2_end_) { + auto &end = it.second; + auto &pld = end_2_pld_[it.second]; + if ((end != nullptr) && (pld != nullptr) && (end->GetInDataAnchor(0) != nullptr) && + (pld->GetOutDataAnchor(0) != nullptr)) { + AnchorPtr end_in_anchor = (end->GetInDataAnchor(0)->GetFirstPeerAnchor() == nullptr) + ? Anchor::DynamicAnchorCast(end->GetInControlAnchor()) + : Anchor::DynamicAnchorCast(end->GetInDataAnchor(0)); + AnchorPtr pld_out_anchor = (pld->GetOutDataAnchor(0)->GetFirstPeerAnchor() == nullptr) + ? Anchor::DynamicAnchorCast(pld->GetOutControlAnchor()) + : Anchor::DynamicAnchorCast(pld->GetOutDataAnchor(0)); + auto src_anchor = end_in_anchor->GetFirstPeerAnchor(); // src_anchor should be only 1 + if (GraphUtils::RemoveEdge(src_anchor, end_in_anchor) != GRAPH_SUCCESS) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[GraphPartitioner]: RemoveEdge failed. node_name:%s, graph_name:%s", + end->GetName().c_str(), end->GetOwnerComputeGraph()->GetName().c_str()); + return FAILED; + } + GE_CHECK_NOTNULL(pld_out_anchor); + for (const auto &peer_in_anchor : pld_out_anchor->GetPeerAnchors()) { + if (GraphUtils::RemoveEdge(pld_out_anchor, peer_in_anchor) != GRAPH_SUCCESS) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[GraphPartitioner]: RemoveEdge failed. node_name:%s, graph_name:%s", + pld->GetName().c_str(), pld->GetOwnerComputeGraph()->GetName().c_str()); + return FAILED; + } + if (GraphUtils::AddEdge(src_anchor, peer_in_anchor) != GRAPH_SUCCESS) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "merge two subgraph fail."); + return FAILED; + } + } + } else { + GELOGW("End or pld is nullptr or in data anchor of end is nullptr or out data anchor of pld is nullptr"); + } + } + return SUCCESS; +} + +void ge::GraphPartitioner::ClearAllPartitionData(Mode mode) { + transfer_graph_.clear(); + rank_2_partitions_.clear(); + partitions_2_rank_.clear(); + partitions_.clear(); + corresponding_node_in_partitions_.clear(); + index_2_end_.clear(); + cluster_2_partition_.clear(); + clusters_.clear(); + node_2_cluster_.clear(); + pld_2_end_.clear(); + end_2_pld_.clear(); + if (mode_ == kMerging) { + mode_ = kPartitioning; + } else { + mode_ = mode; + } +} + +Status ge::GraphPartitioner::MergeAfterSubGraphOptimization(ge::ComputeGraphPtr &output_merged_compute_graph, + const std::vector &sub_graph_list) { + if (mode_ != kMerging) { + GELOGE(GE_GRAPH_UNSUPPORTED, "Cannot call merging in partition mode"); + return FAILED; + } + GELOGI("Graph merge starts."); + // check input param + for (const auto &it : sub_graph_list) { + if (it == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[GraphPartitioner]: merging sub-graphs failed, sub-graph is null"); + return FAILED; + } + } + if (end_2_pld_.empty() || pld_2_end_.empty()) { + if (CheckIfEnd2PldEmpty(output_merged_compute_graph) != SUCCESS) { + return FAILED; + } + } + GE_TIMESTAMP_START(MergeGraphRemoveNode); + if (RemoveNodeAndEdgeBetweenEndPld(output_merged_compute_graph, sub_graph_list) != ge::SUCCESS) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[GraphPartitioner]: merging sub-graphs failed"); + return FAILED; + } + GE_TIMESTAMP_END(MergeGraphRemoveNode, "GraphPartitioner::MergeGraphRemoveNodeAndEdge"); + GE_TIMESTAMP_START(MergeGraphTopologicalSorting); + Status ret = output_merged_compute_graph->TopologicalSorting(); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_TOPO_SORT_FAILED, "[GraphPartitioner]: output_merged_compute_graph->TopologicalSorting failed"); + return FAILED; + } + GE_TIMESTAMP_END(MergeGraphTopologicalSorting, "GraphPartitioner::MergeGraphTopologicalSorting"); + // flush all nodes' engine of merged graph + GE_TIMESTAMP_START(MergeGraphEnginePlacerRun); + engine_placer_.SetComputeGraph(output_merged_compute_graph); + if (engine_placer_.Run() != SUCCESS) { + GELOGE(GE_GRAPH_INIT_FAILED, "[GraphPartitioner]: engine_placer run failed"); + return FAILED; + } + GE_TIMESTAMP_END(MergeGraphEnginePlacerRun, "GraphPartitioner::MergeGraphEnginePlacerRun"); + ClearAllPartitionData(kMerging); + GELOGI("Graph merge ends."); + return SUCCESS; +} + +Status ge::GraphPartitioner::UpdatePldOpDesc(const NodePtr &src_node, int output_index, OpDescPtr &pld_op_desc) { + if (src_node == nullptr || pld_op_desc == nullptr || src_node->GetOpDesc() == nullptr) { + GELOGE(FAILED, "parameter ptr is null."); + return FAILED; + } + const auto &output_desc = src_node->GetOpDesc()->GetOutputDesc(static_cast(output_index)); + GE_IF_BOOL_EXEC(pld_op_desc->AddOutputDesc(output_desc) != GRAPH_SUCCESS, GELOGE(FAILED, "AddOutputDesc failed"); + return FAILED;) + if (pld_op_desc->MutableOutputDesc(0) != nullptr) { + ge::TensorUtils::SetRealDimCnt(*(pld_op_desc->MutableOutputDesc(0).get()), + static_cast(output_desc.GetShape().GetDims().size())); + } else { + GELOGE(GE_GRAPH_ADD_PLC_END_FAILED, "[GraphPartitioner]: pld_op_desc is null."); + return FAILED; + } + // flush pld data type as original data type + if (output_desc.GetOriginDataType() != DT_UNDEFINED) { + pld_op_desc->MutableOutputDesc(0)->SetDataType(output_desc.GetOriginDataType()); + } else { + GELOGW("Original data type of %s is undefined![data type is %s]", src_node->GetName().c_str(), + TypeUtils::DataTypeToSerialString(output_desc.GetDataType()).c_str()); + } + // flush pld format as original format + if (output_desc.GetOriginFormat() != FORMAT_RESERVED) { + pld_op_desc->MutableOutputDesc(0)->SetFormat(output_desc.GetOriginFormat()); + pld_op_desc->MutableOutputDesc(0)->SetShape(output_desc.GetOriginShape()); + } else { + GELOGW("Original format of %s is undefined![format is %s]", src_node->GetName().c_str(), + TypeUtils::FormatToSerialString(output_desc.GetFormat()).c_str()); + } + return SUCCESS; +} + +Status ge::GraphPartitioner::UpdateEndOpDesc(const NodePtr &dst_node, int input_index, OpDescPtr &end_op_desc) { + if (dst_node == nullptr || end_op_desc == nullptr || dst_node->GetOpDesc() == nullptr) { + GELOGE(FAILED, "parameter ptr is null."); + return FAILED; + } + const auto &input_desc = dst_node->GetOpDesc()->GetInputDesc(static_cast(input_index)); + GE_IF_BOOL_EXEC(end_op_desc->AddInputDesc(input_desc) != GRAPH_SUCCESS, GELOGE(FAILED, "AddInputDesc failed"); + return FAILED;) + if (end_op_desc->MutableInputDesc(0) != nullptr) { + ge::TensorUtils::SetRealDimCnt(*(end_op_desc->MutableInputDesc(0).get()), + static_cast(input_desc.GetShape().GetDims().size())); + } else { + GELOGE(GE_GRAPH_ADD_PLC_END_FAILED, "[GraphPartitioner]: pld_op_desc is null."); + return FAILED; + } + // flush end data type as original data type + if (input_desc.GetOriginDataType() != DT_UNDEFINED) { + end_op_desc->MutableInputDesc(0)->SetDataType(input_desc.GetOriginDataType()); + } else { + GELOGI("Original data type of %s is undefined![data type is %s]", dst_node->GetName().c_str(), + + TypeUtils::DataTypeToSerialString(input_desc.GetDataType()).c_str()); + } + // flush end format as original format + if (input_desc.GetOriginFormat() != FORMAT_RESERVED) { + end_op_desc->MutableInputDesc(0)->SetFormat(input_desc.GetOriginFormat()); + end_op_desc->MutableInputDesc(0)->SetShape(input_desc.GetOriginShape()); + } else { + GELOGW("Original format of %s is undefined![format is %s]", dst_node->GetName().c_str(), + TypeUtils::FormatToSerialString(input_desc.GetFormat()).c_str()); + } + return SUCCESS; +} + +graphStatus ge::GraphPartitioner::AddPlaceHolderEndInSrcDstGraph(const AnchorPtr &out_anchor, + const AnchorPtr &peer_in_anchor, + const ge::ComputeGraphPtr &pld_graph, + const ge::ComputeGraphPtr &end_graph) { + GE_CHECK_NOTNULL(out_anchor); + GE_CHECK_NOTNULL(peer_in_anchor); + GE_CHECK_NOTNULL(pld_graph); + GE_CHECK_NOTNULL(end_graph); + const auto &src_node = out_anchor->GetOwnerNode(); + const auto &dst_node = peer_in_anchor->GetOwnerNode(); + string engine_end_name; + string engine_pld_name; + // link input -> end + string end_name = kEndType + std::to_string(num_of_pld_end_); + auto end_op_desc = MakeShared(end_name, END); + if (end_op_desc == nullptr) { + GELOGE(GRAPH_PARAM_INVALID, "pld_op_desc is nullptr."); + return FAILED; + } + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(end_op_desc, "peerIndex", num_of_pld_end_), GELOGW("SetInt peerIndex failed");) + GE_IF_BOOL_EXEC(!AttrUtils::SetStr(end_op_desc, "parentOpType", dst_node->GetType()), + GELOGW("SetStr parentOpType failed");) + // replace input_desc of end with owner node's desc + int input_index = ge::AnchorUtils::GetIdx(peer_in_anchor); + bool is_need_update_desc = (input_index >= 0) && (mode_ == kPartitioning); + if (is_need_update_desc) { + if (UpdateEndOpDesc(dst_node, input_index, end_op_desc) != SUCCESS) { + GELOGE(GRAPH_PARAM_INVALID, "UpdateEndOpDesc failed, input index %d, engine name is %s", input_index, + engine_end_name.c_str()); + return FAILED; + } + } else { + GeTensorDesc input_desc; + if (end_op_desc->AddInputDesc(input_desc) != SUCCESS) { + GELOGE(GRAPH_PARAM_INVALID, "AddInputDesc failed, input index %d, engine name is %s", input_index, + engine_end_name.c_str()); + return FAILED; + } + } + NodePtr new_end_node = end_graph->AddNode(end_op_desc); + if (new_end_node == nullptr) { + GELOGE(GRAPH_PARAM_INVALID, "new_end_node is nullptr."); + return FAILED; + } + GE_IF_BOOL_EXEC(new_end_node->SetOwnerComputeGraph(end_graph) != GRAPH_SUCCESS, + GELOGE(GRAPH_PARAM_INVALID, "SetOwnerComputeGraph failed"); + return FAILED;) + AnchorPtr end_dst_anchor = GetEndInAnchor(out_anchor, new_end_node); + if (GraphUtils::AddEdge(out_anchor, end_dst_anchor) != GRAPH_SUCCESS) { + GELOGE(GE_GRAPH_ADD_PLC_END_FAILED, "add end node : %s node %dth out-anchor --> end in %s subgraph fail.", + src_node->GetName().c_str(), AnchorUtils::GetIdx(out_anchor), end_graph->GetName().c_str()); + return FAILED; + } + /// For fe, op id has been set in AddNode, + /// we can take op id of srcNode as the mark of parentId now + auto const &src_node_opdesc = src_node->GetOpDesc(); + GE_CHECK_NOTNULL(src_node_opdesc); + int64_t node_id = src_node_opdesc->GetId(); + const string pld_name = kPlaceHolderType + std::to_string(num_of_pld_end_); + auto pld_op_desc = MakeShared(pld_name, PLACEHOLDER); + if (pld_op_desc == nullptr) { + GELOGE(GRAPH_PARAM_INVALID, "pld_op_desc is nullptr."); + return FAILED; + } + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(pld_op_desc, "peerIndex", num_of_pld_end_), GELOGW("SetInt peerIndex failed");) + GE_IF_BOOL_EXEC(!AttrUtils::SetStr(pld_op_desc, "parentOpType", src_node->GetType()), + GELOGW("SetStr parentOpType failed");) + GE_IF_BOOL_EXEC(!AttrUtils::SetStr(pld_op_desc, "parentId", end_graph->GetName() + ":" + std::to_string(node_id)), + GELOGW("SetStr parentId failed");) + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(pld_op_desc, "anchorIndex", AnchorUtils::GetIdx(out_anchor)), + GELOGW("SetInt anchorIndex failed");) + // do not care over flow + num_of_pld_end_++; + // replace output_desc of pld with input node's output desc + int output_index = ge::AnchorUtils::GetIdx(out_anchor); + is_need_update_desc = (output_index >= 0) && (mode_ == kPartitioning); + if (is_need_update_desc) { + if (UpdatePldOpDesc(src_node, output_index, pld_op_desc) != SUCCESS) { + GELOGE(GRAPH_PARAM_INVALID, "UpdateEndOpDesc failed, output index %d, engine name is %s", output_index, + engine_pld_name.c_str()); + return FAILED; + } + } else { + GeTensorDesc output_desc; + if (pld_op_desc->AddOutputDesc(output_desc) != SUCCESS) { + GELOGE(GRAPH_PARAM_INVALID, "AddOutputDesc failed, input index %d, engine name is %s", input_index, + engine_pld_name.c_str()); + return FAILED; + } + } + NodePtr new_pld_node = pld_graph->AddNode(pld_op_desc); + if (new_pld_node == nullptr) { + GELOGE(GRAPH_PARAM_INVALID, "new_pld_node is nullptr."); + return FAILED; + } + GE_IF_BOOL_EXEC(new_pld_node->SetOwnerComputeGraph(pld_graph) != GRAPH_SUCCESS, + GELOGE(GRAPH_PARAM_INVALID, "SetOwnerComputeGraph failed"); + return FAILED;) + AnchorPtr pld_src_anchor = GetPldOutAnchor(new_pld_node, peer_in_anchor); + // link placeHolder -> computeNode + if (GraphUtils::AddEdge(pld_src_anchor, peer_in_anchor) != GRAPH_SUCCESS) { + GELOGE(GE_GRAPH_ADD_PLC_END_FAILED, + "add placeholder node : placeholder --> %s node %dth in-anchor in %s subgraph fail.", + dst_node->GetName().c_str(), AnchorUtils::GetIdx(peer_in_anchor), pld_graph->GetName().c_str()); + return FAILED; + } + index_2_end_[num_of_pld_end_] = new_end_node; + pld_2_end_[new_pld_node] = new_end_node; + end_2_pld_[new_end_node] = new_pld_node; + return SUCCESS; +} + +Status ge::GraphPartitioner::LinkInput2EndRemoveOriginalLink(ge::NodePtr input_node, ge::ComputeGraphPtr src_graph, + ge::ComputeGraphPtr dst_graph) { + if (input_node == nullptr || src_graph == nullptr || dst_graph == nullptr) { + GELOGE(FAILED, "parameter ptr is null."); + return FAILED; + } + // get the original anchors and remove the original link + for (const auto &out_data_anchor : input_node->GetAllOutAnchors()) { + for (auto &peer_in_anchor : out_data_anchor->GetPeerAnchors()) { + if (peer_in_anchor->GetOwnerNode()->GetType() != kEndType) { + if (GraphUtils::RemoveEdge(out_data_anchor, peer_in_anchor) != GRAPH_SUCCESS) { + GELOGE(FAILED, "[GraphPartitioner]: RemoveEdge() failed."); + return FAILED; + } + // link input -> end + auto ret = AddPlaceHolderEndInSrcDstGraph(out_data_anchor, peer_in_anchor, src_graph, dst_graph); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_ADD_PLC_END_FAILED, "[GraphPartitioner]: AddPlaceHolderEndInSrcDstGraph() failed."); + return ret; + } + } else { + auto end_node = peer_in_anchor->GetOwnerNode(); + if (GraphUtils::RemoveJustNode(src_graph, end_node) != GRAPH_SUCCESS) { + GELOGE(FAILED, "[GraphPartitioner]: RemoveJustNode() failed."); + return FAILED; + } + if (end_node->SetOwnerComputeGraph(dst_graph) != GRAPH_SUCCESS) { + GELOGE(FAILED, "[GraphPartitioner]: RemoveJustNode() failed."); + return FAILED; + } + if (dst_graph->AddNode(end_node) == nullptr) { + GELOGE(FAILED, "[GraphPartitioner]: AddNode() failed."); + return FAILED; + } + } + } + } + return SUCCESS; +} + +Status ge::GraphPartitioner::PutInputNodesInSubGraph(const ge::ComputeGraphPtr &src_graph, + const ge::ComputeGraphPtr &dst_graph) { + if (src_graph == nullptr || dst_graph == nullptr) { + GELOGE(FAILED, "parameter ptr is null."); + return FAILED; + } + for (auto &input_node : src_graph->GetAllNodes()) { + if (IsDataLike(input_node)) { + if (input_node->SetOwnerComputeGraph(dst_graph) != GRAPH_SUCCESS) { + GELOGE(FAILED, "[GraphPartitioner]: SetOwnerComputeGraph failed."); + return FAILED; + } + // remove input node from src_graph + if (GraphUtils::RemoveJustNode(src_graph, input_node) != GRAPH_SUCCESS) { + GELOGE(FAILED, "[GraphPartitioner]: RemoveJustNode() failed."); + return FAILED; + } + // add input node to dst_graph + if (dst_graph->AddNode(input_node) == nullptr) { + GELOGE(FAILED, "[GraphPartitioner]: AddNode() failed."); + return FAILED; + } + if (LinkInput2EndRemoveOriginalLink(input_node, src_graph, dst_graph) != ge::SUCCESS) { + GELOGE(FAILED, "[GraphPartitioner]: LinkInput2EndRemoveOriginalLink() failed."); + return FAILED; + } + } + } + return SUCCESS; +} + +void ge::GraphPartitioner::AddNewGraphToPartition(ge::ComputeGraphPtr &input_graph, const std::string &engine_name) { + if (input_graph == nullptr) { + GELOGW("[GraphPartitioner]: input_graph is null, engine name is %s", engine_name.c_str()); + return; + } + partitions_[input_graph] = engine_name; +} + +bool ge::GraphPartitioner::IsDataLike(ge::NodePtr node) { + return (node->GetType() == CONSTANT) || (node->GetType() == DATA) || + (node->GetType() == AIPPDATA) || (node->GetType() == CONSTANTOP) || + (node->GetType() == VARIABLE); +} + +bool ge::GraphPartitioner::HasNoInput(ge::NodePtr node) { + if (node == nullptr) { + GELOGE(FAILED, "node_ptr is null."); + return true; + } + return node->GetInNodes().empty(); +} + +Status ge::GraphPartitioner::Initialize(ge::ComputeGraphPtr compute_graph) { + GELOGI("Initialize starts."); + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || compute_graph == nullptr) { + GELOGE(GE_GRAPH_NOT_INIT, "Graph partitioner initialize failed."); + return FAILED; + } + engine_placer_.SetComputeGraph(compute_graph); + if (engine_placer_.Run() != SUCCESS) { + GELOGE(FAILED, "Engine placer run failed."); + return FAILED; + } + const NodeEngineMap *node_engine_map = engine_placer_.GetNodeEngineMap(); + size_t temp_index = 0; + for (const auto &node : compute_graph->GetDirectNode()) { + std::string temp_stream; + // node opdesc has been checked before + (void)AttrUtils::GetStr(node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, temp_stream); + + ClusterPtr new_cluster; + // data like node without input should be handle specific + if (HasNoInput(node) && IsDataLike(node)) { + ClusterPtr cluster = MakeShared(temp_index, kEngineDefaultData, temp_stream); + new_cluster = cluster; + } else { + ClusterPtr cluster = MakeShared(temp_index, node_engine_map->at(node), temp_stream); + new_cluster = cluster; + } + if (new_cluster == nullptr) { + GELOGE(FAILED, "[GraphPartitioner]: failed to allocate new_cluster"); + return FAILED; + } + new_cluster->nodes_.push_back(node); + if (!HasNoInput(node)) { + for (const auto &parent : node->GetInAllNodes()) { + new_cluster->in_clu_.insert(node_2_cluster_.at(parent)->index_); + node_2_cluster_.at(parent)->out_clu_.insert(temp_index); + } + } + node_2_cluster_[node] = new_cluster; + clusters_[temp_index] = new_cluster; + GELOGD("Node name is %s, engine is %s, cluster index is %zu, stream label is %s", node->GetName().c_str(), + new_cluster->engine_name_.c_str(), new_cluster->index_, new_cluster->stream_label_.c_str()); + temp_index++; + } + GELOGI("Initialize ends."); + return SUCCESS; +} + +Status ge::GraphPartitioner::AddPartitionsToGraphNode(vector &output_subgraphs, + ge::ComputeGraphPtr compute_graph) { + const std::string &input_subgraph_name = "inputNodesSubGraph"; + string session_graph_id; + if (!AttrUtils::GetStr(*compute_graph, ATTR_NAME_SESSION_GRAPH_ID, session_graph_id)) { + GELOGW("Get graph session_graph_id attr failed."); + return INTERNAL_ERROR; + } + // the output_subgraphs have topological order + for (const auto &subGraph : rank_2_partitions_) { + if (partitions_.find(subGraph) == partitions_.end()) { + GELOGE(GE_GRAPH_EMPTY_PARTITION, "[GraphPartitioner]: partition is null."); + return FAILED; + } + auto &engine_name = partitions_.at(subGraph); + GraphUtils::DumpGEGraph(subGraph, subGraph->GetName()); + GraphUtils::DumpGEGraphToOnnx(*subGraph, subGraph->GetName()); + if (!session_graph_id.empty()) { + GE_IF_BOOL_EXEC(!AttrUtils::SetStr(subGraph, ATTR_NAME_SESSION_GRAPH_ID, session_graph_id), + GELOGW("SetStr ATTR_NAME_SESSION_GRAPH_ID failed");) + } + if (engine_name != input_subgraph_name) { // do not add Data subGraph into SubGraphInfo + auto sgi = MakeShared(); + if (sgi == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[GraphPartitioner]: MakeShared sub graph info failed."); + return FAILED; + } + // set engine name + sgi->SetEngineName(engine_name); + // set stream label + string sub_graph_stream; + if (AttrUtils::GetStr(subGraph->GetDirectNode().at(0)->GetOpDesc(), ATTR_NAME_STREAM_LABEL, sub_graph_stream)) { + sgi->SetStreamLabel(sub_graph_stream); + } + /// for now inputFlag is the same before and after partition. It should + /// be changed according to the real partition + std::vector sub_graph_input(input_size_, true); + std::vector sub_graph_output(output_size_, true); + sgi->SetSubGraph(subGraph); + sgi->SetOutputFlag(sub_graph_output); + sgi->SetInputFlag(sub_graph_input); + sgi->SetOutputContext(output_name_); + AddEndPldInformationToSubGraphInfo(sgi); + GELOGI("[GraphPartitioner]: subGraph engine name is %s, graph name is %s, stream label is %s", + engine_name.c_str(), subGraph->GetName().c_str(), sgi->GetStreamLabel().c_str()); + output_subgraphs.push_back(sgi); + } + } + return SUCCESS; +} + +// check if two clusters can merge +bool ge::GraphPartitioner::IsMergeable(size_t parent_cluster, size_t child_cluster, size_t upper_bound) { + if ((clusters_[parent_cluster] == nullptr) || (clusters_[parent_cluster]->nodes_.empty()) || + (clusters_[child_cluster] == nullptr) || (clusters_[child_cluster]->nodes_.empty())) { + return false; + } + // Check if parent_cluster,child_cluster has same engine or stream label + if ((clusters_[parent_cluster]->engine_name_ != clusters_[child_cluster]->engine_name_) || + (clusters_[parent_cluster]->stream_label_ != clusters_[child_cluster]->stream_label_)) { + GELOGI("Parent cluster %zu engine %s stream label %s, child cluster %zu engine %s stream label %s can not merge", + parent_cluster, clusters_[parent_cluster]->engine_name_.c_str(), + clusters_[parent_cluster]->stream_label_.c_str(), child_cluster, + clusters_[child_cluster]->engine_name_.c_str(), clusters_[child_cluster]->stream_label_.c_str()); + return false; + } + // Check if parent_cluster,child_cluster is reachable + RemoveEdge(parent_cluster, child_cluster); + // Check if there is a path between parent and child, if return true, can not merge + if (HasSecondPath(parent_cluster, child_cluster, upper_bound)) { + GELOGI("Find second path from %zu to %zu, upper bound is %zu", parent_cluster, child_cluster, upper_bound); + InsertEdge(parent_cluster, child_cluster); + return false; + } + InsertEdge(parent_cluster, child_cluster); + return true; +} + +void ge::GraphPartitioner::MergeTwoClusters(size_t parent_cluster, size_t &child_cluster) { + // check which index is bigger + size_t big_cluster, small_cluster; + size_t child_cluster_original = child_cluster; + if (parent_cluster > child_cluster) { + small_cluster = child_cluster; + big_cluster = parent_cluster; + } else { + big_cluster = child_cluster; + small_cluster = parent_cluster; + // flush child_cluster, because it has been modified + child_cluster = small_cluster; + } + + // update node_2_cluster_ map + for (auto &node : clusters_[big_cluster]->nodes_) { + node_2_cluster_[node] = clusters_[small_cluster]; + } + // merge nodes + clusters_[small_cluster]->nodes_.splice(clusters_[small_cluster]->nodes_.end(), clusters_[big_cluster]->nodes_); + // merge all input & output to small cluster + clusters_[small_cluster]->in_clu_.insert(clusters_[big_cluster]->in_clu_.begin(), + clusters_[big_cluster]->in_clu_.end()); + clusters_[small_cluster]->out_clu_.insert(clusters_[big_cluster]->out_clu_.begin(), + clusters_[big_cluster]->out_clu_.end()); + // remove child_cluster's out parent_cluster's in between child_cluster and parent_cluster + RemoveEdge(parent_cluster, child_cluster_original); + // update in/out of the cluster with bigger index + for (auto in_clu : clusters_[big_cluster]->in_clu_) { + clusters_[in_clu]->out_clu_.insert(small_cluster); + clusters_[in_clu]->out_clu_.erase(big_cluster); + } + for (auto out_clu : clusters_[big_cluster]->out_clu_) { + clusters_[out_clu]->in_clu_.insert(small_cluster); + clusters_[out_clu]->in_clu_.erase(big_cluster); + } + clusters_[big_cluster] = clusters_[small_cluster]; +} + +void ge::GraphPartitioner::RemoveEdge(size_t parent_cluster, size_t child_cluster) { + clusters_[child_cluster]->in_clu_.erase(parent_cluster); + clusters_[parent_cluster]->out_clu_.erase(child_cluster); +} + +void ge::GraphPartitioner::InsertEdge(size_t from, size_t to) { + if (from == to) { + return; + } + if (!clusters_[from]->out_clu_.insert(to).second) { + // edge has already exists + return; + } + clusters_[to]->in_clu_.insert(from); +} + +void ge::GraphPartitioner::MarkClusters() { + GELOGI("MarkClusters starts. cluster size is %zu", clusters_.size()); + size_t cluster_size = clusters_.size(); + for (size_t child_cluster = 0; child_cluster < cluster_size; child_cluster++) { + auto found_child_cluster = clusters_[child_cluster]; + if (found_child_cluster == nullptr) { + GELOGW("can not found child_cluster is %zu", child_cluster); + continue; + } + auto copy_parents_clusters = found_child_cluster->in_clu_; + vector ordered_cluster; + for (const auto &parent_cluster : copy_parents_clusters) { + ordered_cluster.emplace_back(parent_cluster); + } + // sort cluster according to it's output amount + auto comp_func = [this](const size_t &parent_cluster1, const size_t &parent_cluster2) -> bool { + return clusters_[parent_cluster1]->out_clu_.size() < clusters_[parent_cluster2]->out_clu_.size(); + }; + std::sort(ordered_cluster.begin(), ordered_cluster.end(), comp_func); + auto child_merged = child_cluster; + for (const auto &parent_cluster : ordered_cluster) { + if (IsMergeable(parent_cluster, child_merged, child_cluster)) { + MergeTwoClusters(parent_cluster, child_merged); + GELOGD("Merging cluster %zu and %zu to %zu", parent_cluster, child_cluster, child_merged); + } + } + } + GELOGI("MarkClusters ends."); +} + +Status ge::GraphPartitioner::SplitSubGraphs(ge::ComputeGraphPtr compute_graph) { + GELOGI("SplitSubGraphs starts."); + if (compute_graph == nullptr) { + GELOGE(FAILED, "parameter ptr is null."); + return FAILED; + } + // Create graphs for all clusters + std::unordered_set cluster_set; + // add pld&end + for (auto &node : compute_graph->GetDirectNode()) { + GELOGD("Node name is %s.", node->GetName().c_str()); + auto child_cluster = node_2_cluster_[node]; + ge::ComputeGraphPtr corresponding_graph; + // unordered_set's insert returns a pair, second of pair is bool + if (!cluster_set.insert(child_cluster).second) { + GELOGD("Old sub graph, child_cluster is %zu", child_cluster->index_); + corresponding_graph = cluster_2_partition_.at(child_cluster); + } else { + std::string graph_name = "new_sub_graph" + std::to_string(partitions_.size()); + ComputeGraphPtr new_sub_graph = MakeShared(graph_name); + if (new_sub_graph == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[GraphPartitioner]: MakeShared() failed."); + return FAILED; + } + AddNewGraphToPartition(new_sub_graph, child_cluster->engine_name_); + corresponding_graph = new_sub_graph; + cluster_2_partition_[child_cluster] = corresponding_graph; + GELOGD("New sub graph, name is %s", graph_name.c_str()); + } + // build node to corresponding node map + NodePtr corresponding_node = corresponding_graph->AddNode(node->GetOpDesc()); + if (corresponding_node == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "[GraphPartitioner]: AddNode() failed."); + return FAILED; + } + corresponding_node_in_partitions_[node] = corresponding_node; + GE_CHK_STATUS_RET(corresponding_node->SetOwnerComputeGraph(corresponding_graph)) + for (const auto &in_anchor : node->GetAllInAnchors()) { + GELOGD("In anchor index is %d", AnchorUtils::GetIdx(in_anchor)); + for (auto &peer_out_anchor : in_anchor->GetPeerAnchors()) { + GELOGD("Peer out anchor index is %d", AnchorUtils::GetIdx(peer_out_anchor)); + // All nodes have a copy in corresponding_node_in_partitions_, so function at can not be execption + auto parent_node = corresponding_node_in_partitions_.at(peer_out_anchor->GetOwnerNode()); + GELOGD("Parent node name is %s", parent_node->GetName().c_str()); + // add edge + auto src_anchor = parent_node->GetOutAnchor(AnchorUtils::GetIdx(peer_out_anchor)); + auto dst_anchor = corresponding_node->GetInAnchor(AnchorUtils::GetIdx(in_anchor)); + // if child and parent's cluster is not same, add plc and end + auto parent_cluster = node_2_cluster_[peer_out_anchor->GetOwnerNode()]; + if (parent_cluster != child_cluster) { + GELOGD("Parent cluster is %zu, child_cluster is %zu", parent_cluster->index_, child_cluster->index_); + if (AddPlaceHolderEnd(peer_out_anchor, in_anchor) != ge::SUCCESS) { + GELOGE(GE_GRAPH_ADD_PLC_END_FAILED, "[GraphPartitioner]: AddPlaceHolderEndInSrcDstGraph() failed."); + return FAILED; + } + } else { // parent and child in the same cluster, add edge + GELOGD("AddEdge from parent cluster %zu to child %zu", parent_cluster->index_, child_cluster->index_); + if (GraphUtils::AddEdge(src_anchor, dst_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "AddEdge fail, from %s to %s", peer_out_anchor->GetOwnerNode()->GetName().c_str(), + in_anchor->GetOwnerNode()->GetName().c_str()); + return FAILED; + } + } + } + } + } + GELOGI("SplitSubGraphs ends."); + return SUCCESS; +} + +/// before calling this function, the direct path between src and dst are already removed. +/// return true if a second path is found +bool ge::GraphPartitioner::HasSecondPath(size_t src, size_t dst, size_t upper_bound) { + if (clusters_.at(src)->out_clu_.empty() || clusters_.at(dst)->in_clu_.empty()) { + return false; + } + /// Avoid recursion since stack space might be limited. + /// We instead keep a stack of nodes to visit. + std::vector temp_stack; + std::unordered_set visited; + temp_stack.push_back(src); + while (!temp_stack.empty()) { + size_t cluster = temp_stack.back(); + temp_stack.pop_back(); + ClusterPtr cur_cluster = clusters_[cluster]; + if (!visited.insert(cluster).second) { + continue; + } + for (auto out : cur_cluster->out_clu_) { + if (out == dst) { + return true; // There is cycle + } + if (out < upper_bound) { + temp_stack.push_back(out); + } + } + } + return false; +} + +Status ge::GraphPartitioner::Partition(ge::ComputeGraphPtr compute_graph, vector &output_subgraphs, + Mode mode) { + ClearAllPartitionData(mode); + if (compute_graph == nullptr) { + GELOGE(GE_GRAPH_NULL_INPUT, "[GraphPartitioner]: compute_graph is null."); + return FAILED; + } + output_name_ = compute_graph->GetOutput(); + output_size_ = compute_graph->GetOutputSize(); + input_size_ = compute_graph->GetInputSize(); + if (output_size_ == 0) { + GELOGE(GE_GRAPH_NULL_INPUT, "The output size need to be greater than 0."); + return FAILED; + } + GELOGI("Graph partition starts, graph nodes size is %zu", compute_graph->GetDirectNodesSize()); + Status ret = compute_graph->TopologicalSorting(); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_TOPO_SORT_FAILED, "[GraphPartitioner]: subGraphPtr->TopologicalSorting failed"); + return FAILED; + } + GE_TIMESTAMP_START(GraphPartitionInitialize); + if (Initialize(compute_graph) != SUCCESS) { + GELOGE(GE_GRAPH_INIT_FAILED, "[GraphPartitioner]: initialize failed"); + return FAILED; + } + GE_TIMESTAMP_END(GraphPartitionInitialize, "GraphPartitioner::PartitionInitialize"); + GE_TIMESTAMP_START(GraphPartitionMarkClusters); + MarkClusters(); + GE_TIMESTAMP_END(GraphPartitionMarkClusters, "GraphPartitioner::PartitionMarkClusters"); + GE_TIMESTAMP_START(GraphPartitionSplitSubGraphs); + if (SplitSubGraphs(compute_graph) != SUCCESS) { + GELOGE(FAILED, "[GraphPartitioner]: SplitSubGraphs failed"); + return FAILED; + } + GE_TIMESTAMP_END(GraphPartitionSplitSubGraphs, "GraphPartitioner::PartitionSplitSubGraphs"); + GE_TIMESTAMP_START(GraphPartitionSortSubGraphs); + if (SortSubGraphs(compute_graph) != ge::SUCCESS) { + GELOGE(GE_GRAPH_TOPO_SORT_FAILED, "Graph Partition SortSubGraphs failed."); + return ge::FAILED; + } + GE_TIMESTAMP_END(GraphPartitionSortSubGraphs, "GraphPartitioner::PartitionSortSubGraphs"); + GE_TIMESTAMP_START(GraphPartitionAddPartitionsToGraphNode); + if (AddPartitionsToGraphNode(output_subgraphs, compute_graph) != ge::SUCCESS) { + GELOGE(GE_GRAPH_EMPTY_PARTITION, "Graph Partition AddPartitionsToGraphNode failed."); + return ge::FAILED; + } + GE_TIMESTAMP_END(GraphPartitionAddPartitionsToGraphNode, "GraphPartitioner::PartitionAddPartitionsToGraphNode"); + GELOGI("Graph partition ends. Adding partitions to SubGraphInfo, got %zu sub graphs", output_subgraphs.size()); + mode_ = kMerging; + // do not care over flow + partition_times_++; + return SUCCESS; +} + +// all the inputs are the nodes and anchors in the original graph +Status ge::GraphPartitioner::AddPlaceHolderEnd(const AnchorPtr &out_anchor, const AnchorPtr &in_anchor) { + if ((out_anchor == nullptr) || (in_anchor == nullptr)) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "src_node or dst_node is null."); + return FAILED; + } + // nodes in original graph + auto src_node = out_anchor->GetOwnerNode(); + auto dst_node = in_anchor->GetOwnerNode(); + if ((src_node == nullptr) || (dst_node == nullptr)) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "src_node or dst_node is null."); + return FAILED; + } + // All nodes have a copy in corresponding_node_in_partitions_, so function at can not be execption + auto src_anchor = corresponding_node_in_partitions_.at(src_node)->GetOutAnchor(AnchorUtils::GetIdx(out_anchor)); + auto dst_anchor = corresponding_node_in_partitions_.at(dst_node)->GetInAnchor(AnchorUtils::GetIdx(in_anchor)); + if ((src_anchor == nullptr) || (dst_anchor == nullptr)) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "src_anchor or dst_anchor is null."); + return FAILED; + } + // anchors in subGraph + const ComputeGraphPtr &src_subgraph = src_anchor->GetOwnerNode()->GetOwnerComputeGraph(); + const ComputeGraphPtr &dst_subgraph = dst_anchor->GetOwnerNode()->GetOwnerComputeGraph(); + // add end and pld node + auto ret = AddPlaceHolderEndInSrcDstGraph(src_anchor, dst_anchor, dst_subgraph, src_subgraph); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_ADD_PLC_END_FAILED, "[GraphPartitioner]: add placeholder end failed."); + return ret; + } + return SUCCESS; +} + +Status ge::GraphPartitioner::SortSubGraphs(const ge::ComputeGraphPtr &compute_graph) { + uint32_t rank = kRankOne; // rank 0 for data graph + ComputeGraphPtr new_input_nodes_sub_graph = MakeShared("inputNodeGraph"); + if (new_input_nodes_sub_graph == nullptr || compute_graph == nullptr) { + GELOGE(FAILED, "[GraphPartitioner]: new_input_nodes_sub_graph or compute_graph is null."); + return FAILED; + } + for (const auto &node : compute_graph->GetDirectNode()) { + // All nodes in original graph have a copy in corresponding_node_in_partitions_, so it can not be null + auto sub_graph = corresponding_node_in_partitions_.at(node)->GetOwnerComputeGraph(); + if ((partitions_2_rank_.find(sub_graph) == partitions_2_rank_.end()) && + (partitions_[sub_graph] != kEngineDefaultData)) { + partitions_2_rank_[sub_graph] = rank; + rank_2_partitions_.push_back(sub_graph); + rank++; + } else if (partitions_[sub_graph] == kEngineDefaultData) { // merge data graph + if (PutInputNodesInSubGraph(sub_graph, new_input_nodes_sub_graph) != SUCCESS) { + GELOGE(FAILED, "[GraphPartitioner]: putInputNodesInSubGraph failed."); + return FAILED; + } + auto to_be_del = partitions_.find(sub_graph); + partitions_.erase(to_be_del); + } + } + if (!new_input_nodes_sub_graph->GetDirectNode().empty()) { + rank_2_partitions_.insert(rank_2_partitions_.begin(), new_input_nodes_sub_graph); + partitions_2_rank_[new_input_nodes_sub_graph] = 0; + AddNewGraphToPartition(new_input_nodes_sub_graph, "inputNodesSubGraph"); + } + // reinit rank + rank = kRankZero; + for (const auto &it : rank_2_partitions_) { + // rename subGraph based on rank + if (it != nullptr) { + // rename subGraph based on rank + string graph_name = + "partition" + std::to_string(partition_times_) + "_rank" + std::to_string(rank) + "_" + it->GetName(); + it->SetName(graph_name); + } + rank++; + } + return SUCCESS; +} + +AnchorPtr ge::GraphPartitioner::GetEndInAnchor(const AnchorPtr &src_anchor, const NodePtr &end_node) { + if (src_anchor == nullptr || end_node == nullptr) { + GELOGE(FAILED, "parameter ptr is null."); + return nullptr; + } + AnchorPtr end_in_anchor; + if (Anchor::DynamicAnchorCast(src_anchor) != nullptr) { + end_in_anchor = end_node->GetInDataAnchor(0); + } else { + end_in_anchor = end_node->GetInControlAnchor(); + } + return end_in_anchor; +} + +AnchorPtr ge::GraphPartitioner::GetPldOutAnchor(const NodePtr &pld_node, const AnchorPtr &dst_anchor) { + if (pld_node == nullptr || dst_anchor == nullptr) { + GELOGE(FAILED, "parameter ptr is null."); + return nullptr; + } + AnchorPtr pld_out_anchor; + if (Anchor::DynamicAnchorCast(dst_anchor) != nullptr) { + pld_out_anchor = pld_node->GetOutDataAnchor(0); + } else { + pld_out_anchor = pld_node->GetOutControlAnchor(); + } + return pld_out_anchor; +} + +void ge::GraphPartitioner::AddEndPldInformationToSubGraphInfo(ge::SubGraphInfoPtr &sub_graph_info) { + if (sub_graph_info == nullptr) { + GELOGE(FAILED, "parameter ptr is null."); + return; + } + auto sub_graph = sub_graph_info->GetSubGraph(); + GE_CHECK_NOTNULL_JUST_RETURN(sub_graph); + NodetoNodeMap end_map; + NodetoNodeMap pld_map; + for (const auto &node : sub_graph->GetAllNodes()) { + if (node->GetType() == kEndType) { + end_map[node] = end_2_pld_.at(node); + } + if (node->GetType() == kPlaceHolderType) { + pld_map[node] = pld_2_end_.at(node); + } + } + sub_graph_info->SetEnd2PldMap(end_map); + sub_graph_info->SetPld2EndMap(pld_map); +} +} // namespace ge diff --git a/src/ge/graph/partition/graph_partition.h b/src/ge/graph/partition/graph_partition.h new file mode 100644 index 00000000..f6f58e47 --- /dev/null +++ b/src/ge/graph/partition/graph_partition.h @@ -0,0 +1,155 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PARTITION_GRAPH_PARTITION_H_ +#define GE_GRAPH_PARTITION_GRAPH_PARTITION_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "graph/compute_graph.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/operator_reg.h" +#include "graph/partition/engine_place.h" + +namespace ge { +using PartitionMap = std::unordered_map; +using NodetoNodeMap = std::unordered_map; +using EnginetoGraphMap = std::unordered_map; +using EdgeMap = std::set>; +using ClusterSet = std::unordered_set; +class Cluster { + public: + size_t index_; // corresponding to rank of node + ClusterSet in_clu_; // inClusters index + ClusterSet out_clu_; // outClusters index + std::list nodes_; // including node of this cluster + std::string engine_name_; // data like must be a specific engine + std::string stream_label_; + explicit Cluster(size_t index, std::string engine, std::string stream) + : index_(index), engine_name_(std::move(engine)), stream_label_(std::move(stream)) {} + ~Cluster() = default; +}; +using ClusterPtr = std::shared_ptr; + +class GraphPartitioner { + public: + /// Partition() can only be called in Partition mode. + /// MergeAfterSubGraphOptimization() can only be called in Merge mode. + /// After Partition(), change to Merge mode. After MergeAfterSubGraphOptimization(), change to Partition mode + enum Mode { kPartitioning, kSecondPartitioning, kMerging }; + GraphPartitioner() : mode_(GraphPartitioner::kPartitioning) {} + ~GraphPartitioner() = default; + + // the main method that partitions the graph + // input_size and output_size are the number of inputs and outputs in the original graph + Status Partition(ComputeGraphPtr compute_graph, vector &output_subgraphs, Mode mode); + + // after partition, all SubGraph will be merged back based on end<->pld. + Status MergeAfterSubGraphOptimization(ComputeGraphPtr &output_merged_compute_graph, + const std::vector &sub_graph_list); + + private: + Status MergeAllSubGraph(ComputeGraphPtr &output_merged_compute_graph, + const std::vector &sub_graph_list); + Status CheckIfEnd2PldEmpty(ComputeGraphPtr &output_merged_compute_graph); + + // Run engine placer, assign engine, check support amd init all clusters + Status Initialize(ComputeGraphPtr compute_graph); + + /// add pld and end nodes between two sub-graphs for the specific anchors + /// all anchors are in original graph + Status AddPlaceHolderEnd(const AnchorPtr &out_anchor, const AnchorPtr &in_anchor); + void AddNewGraphToPartition(ComputeGraphPtr &input_graph, const std::string &engine_name); + Status AddPartitionsToGraphNode(vector &output_subgraphs, ComputeGraphPtr compute_graph); + + // check if the node has no input + bool HasNoInput(NodePtr node); + + // check if the node is data-like. Currently data-like means: data, variable, const + bool IsDataLike(NodePtr node); + + // add place holder and end node in src and dst graph + graphStatus AddPlaceHolderEndInSrcDstGraph(const AnchorPtr &out_data_anchor, const AnchorPtr &peer_in_anchor, + const ComputeGraphPtr &pld_graph, const ComputeGraphPtr &end_graph); + Status LinkInput2EndRemoveOriginalLink(NodePtr input_node, ComputeGraphPtr src_graph, ComputeGraphPtr dst_graph); + + /// After partition, put input nodes in srcGraph to dstGraph. Data will be linked to 'end'; + /// the other end will be linked to 'placeholder' + Status PutInputNodesInSubGraph(const ComputeGraphPtr &src_graph, const ComputeGraphPtr &dst_graph); + + // Sort all subGraphs topologically, store the info in sorted_partitions_ + Status SortSubGraphs(const ComputeGraphPtr &); + AnchorPtr GetEndInAnchor(const AnchorPtr &src_anchor, const NodePtr &end_node); + AnchorPtr GetPldOutAnchor(const NodePtr &pld_node, const AnchorPtr &dst_anchor); + Status RemoveNodeAndEdgeBetweenEndPld(ComputeGraphPtr &output_merged_compute_graph, + const std::vector &sub_graph_list); + void AddEndPldInformationToSubGraphInfo(SubGraphInfoPtr &sub_graph_info); + bool IsMergeable(size_t parent_cluster, size_t child_cluster, size_t upper_bound); + + // Link from->to + void InsertEdge(size_t from, size_t to); + + // Remove parent cluster's out and child cluster's in + void RemoveEdge(size_t parent_cluster, size_t child_cluster); + void MergeTwoClusters(size_t parent_cluster, size_t &child_cluster); + + // Check if there's a second path between two clusters. The max path length is upper_bound + bool HasSecondPath(size_t src, size_t dst, size_t upper_bound); + + // Mark all clusters + void MarkClusters(); + + /// Split all sub graph and add placeholder, end according to marks + /// traverse marked clusters and split them into sub-graphs + Status SplitSubGraphs(ComputeGraphPtr compute_graph); + Status UpdateEndOpDesc(const NodePtr &dst_node, int input_index, OpDescPtr &end_op_desc); + Status UpdatePldOpDesc(const NodePtr &dst_node, int input_index, OpDescPtr &end_op_desc); + + // Clear partition data + void ClearAllPartitionData(Mode mode); + void SetMergedGraphId(ComputeGraphPtr &output_merged_compute_graph); + + // private local variables + EnginePlacer engine_placer_; + PartitionMap partitions_; // sub-graphs after partition + std::unordered_map partitions_2_rank_; // + std::vector rank_2_partitions_; // + NodetoNodeMap corresponding_node_in_partitions_; // mapping between a node in the original graph and + uint32_t num_of_pld_end_ = 0; // a counter to track 'place holder' and 'end' + size_t input_size_ = 0; + size_t output_size_ = 0; + std::string output_name_; + NodetoNodeMap end_2_pld_; // mapping between each 'end; and 'placeHolder' node + NodetoNodeMap pld_2_end_; // mapping between each 'placeHolder' and 'end' node + std::map index_2_end_; // order mapping between peerindex and 'end' node + Mode mode_ = kPartitioning; + uint32_t partition_times_ = 0; // times of call partition + std::vector transfer_graph_; // contains all transfer graphs + std::unordered_map clusters_; // index to cluster ptr, contains all nodes + std::unordered_map> node_2_cluster_; // node map to cluster + std::unordered_map, ComputeGraphPtr> cluster_2_partition_; // cluster map to subgraph +}; +} // namespace ge + +#endif // GE_GRAPH_PARTITION_GRAPH_PARTITION_H_ diff --git a/src/ge/graph/passes/addn_pass.cc b/src/ge/graph/passes/addn_pass.cc new file mode 100644 index 00000000..c0592965 --- /dev/null +++ b/src/ge/graph/passes/addn_pass.cc @@ -0,0 +1,46 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/addn_pass.h" + +#include + +namespace ge { +namespace { +const size_t kInputSizeSingle = 1; +} // namespace + +Status AddNPass::Run(NodePtr &node) { + GELOGD("AddNPass running"); + if (node == nullptr) { + GELOGE(PARAM_INVALID, "param [node] must not be null."); + return PARAM_INVALID; + } + + if (node->GetType() == ADDN) { + if (node->GetOpDesc() == nullptr) { + GELOGE(PARAM_INVALID, "Param [node] op desc is null."); + return PARAM_INVALID; + } + // AddN with single input can be optimized + if (node->GetOpDesc()->GetInputsSize() == kInputSizeSingle) { + std::vector io_map = {PassUtils::GetUniqueInDataAnchorIndex(node)}; + return IsolateAndDeleteNode(node, io_map); + } + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/addn_pass.h b/src/ge/graph/passes/addn_pass.h new file mode 100644 index 00000000..dd44e3cd --- /dev/null +++ b/src/ge/graph/passes/addn_pass.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_ADDN_PASS_H_ +#define GE_GRAPH_PASSES_ADDN_PASS_H_ + +#include "common/ge_inner_error_codes.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/types.h" +#include "graph/graph.h" +#include "graph/passes/base_pass.h" +#include "graph/passes/pass_utils.h" +#include "graph/utils/graph_utils.h" + +namespace ge { +class AddNPass : public BaseNodePass { + public: + Status Run(ge::NodePtr &node) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_ADDN_PASS_H_ diff --git a/src/ge/graph/passes/aicpu_constant_folding_pass.cc b/src/ge/graph/passes/aicpu_constant_folding_pass.cc new file mode 100644 index 00000000..ecb6f1d3 --- /dev/null +++ b/src/ge/graph/passes/aicpu_constant_folding_pass.cc @@ -0,0 +1,558 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/aicpu_constant_folding_pass.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/ge/ge_util.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/type_utils.h" +#include "init/gelib.h" + +namespace { +const char *const kKernelLibName = "aicpu_kernel"; +const uint64_t kReleaseFlag = 1; +const uint64_t kDouble = 2; +} // namespace +namespace ge { +Status AicpuConstantFoldingPass::Run(ge::NodePtr &node) { + GE_CHECK_NOTNULL(node); + GELOGD("Begin to run aicpu constant folding on node %s", node->GetName().c_str()); + if (node->GetType() == NETOUTPUT) { + GELOGI("Skip aicpu constant folding on node[netoutput] %s", node->GetName().c_str()); + return SUCCESS; + } + + vector weight_vec; + bool flag = CheckInput(node, weight_vec); + if (!flag) { + return SUCCESS; + } + OpDescPtr node_desc = node->GetOpDesc(); // checked before + vector data_vec; + vector input_addrs; + vector output_addrs; + Status ret = GetInputAddrs(weight_vec, input_addrs); + if (ret != SUCCESS) { + ReleaseMemory(input_addrs, output_addrs, data_vec); + return SUCCESS; + } + + ret = GetOutputAddrs(node_desc, output_addrs); + if (ret != SUCCESS) { + ReleaseMemory(input_addrs, output_addrs, data_vec); + return SUCCESS; + } + + ret = LaunchSingleOpRunTask(node, input_addrs, output_addrs); + if (ret != SUCCESS) { + ReleaseMemory(input_addrs, output_addrs, data_vec); + return SUCCESS; + } + GELOGI("[Node:%s] Launch singleOpRunTask success", node->GetName().c_str()); + + vector data_infos; + ret = GenerateDataPtrInfo(output_addrs, data_vec, data_infos); + if (ret != SUCCESS) { + ReleaseMemory(input_addrs, output_addrs, data_vec); + return SUCCESS; + } + GELOGI("[Node:%s] Generate dataPtrInfo success", node->GetName().c_str()); + + ret = LaunchMemCopyTask(data_infos); + if (ret != SUCCESS) { + ReleaseMemory(input_addrs, output_addrs, data_vec); + return SUCCESS; + } + GELOGI("[Node:%s] Launch memCopyTask success", node->GetName().c_str()); + + vector outputs; + ret = GenerateGeTensor(node_desc, data_vec, outputs); + if (ret != SUCCESS) { + ReleaseMemory(input_addrs, output_addrs, data_vec); + return SUCCESS; + } + ReleaseMemory(input_addrs, output_addrs, data_vec); + GELOGI("[Node:%s] Generate geTensor success", node->GetName().c_str()); + return Folding(node, outputs); +} + +bool AicpuConstantFoldingPass::CheckInput(const NodePtr &node, vector &weight_vec) { + OpDescPtr node_desc = node->GetOpDesc(); + if (node_desc == nullptr) { + GELOGW("Opdesc of %s is null", node->GetName().c_str()); + return false; + } + DataType data_type = node_desc->GetOutputDesc(0).GetDataType(); + Format format = node_desc->GetOutputDesc(0).GetFormat(); + GELOGD("Current [node:%s, type:%s] info: format: %s, datatype:%s", node->GetName().c_str(), node->GetType().c_str(), + TypeUtils::FormatToSerialString(format).c_str(), TypeUtils::DataTypeToSerialString(data_type).c_str()); + auto input_nodes = OpDescUtils::GetConstInputNode(*node); + if (input_nodes.empty() || input_nodes.size() != node_desc->GetInputsSize()) { + GELOGD("Const input nodes size is %zu, and nodeDesc inputsSize is %zu.", input_nodes.size(), + node_desc->GetInputsSize()); + return false; + } + weight_vec = OpDescUtils::GetInputData(input_nodes); + return true; +} + +Status AicpuConstantFoldingPass::GetInputAddrs(const vector &weight_vec, + vector &input_addrs) { + if (weight_vec.empty()) { + GELOGE(FAILED, "Weight is null"); + return FAILED; + } + for (const ConstGeTensorPtr &weight : weight_vec) { + void *input_addr = nullptr; + GE_CHK_RT_RET(rtMalloc(&input_addr, weight->GetData().size(), RT_MEMORY_HBM)); + + rtError_t rt_ret = rtMemcpy(input_addr, weight->GetData().size(), weight->GetData().data(), + weight->GetData().size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtMemcpy error"); + GE_CHK_RT(rtFree(input_addr)); + return FAILED; + } + + AddrAndType input_info = {static_cast(reinterpret_cast(input_addr)), kData}; + input_addrs.emplace_back(input_info); + } + return SUCCESS; +} + +Status AicpuConstantFoldingPass::GetOutputAddrs(const OpDescPtr &node_desc, vector &output_addrs) { + if (node_desc->GetOutputsSize() == 0) { + GELOGE(FAILED, "Output size is 0 "); + return FAILED; + } + for (size_t i = 0; i < node_desc->GetOutputsSize(); ++i) { + void *summary_addr = nullptr; + GE_CHK_RT_RET(rtMalloc(&summary_addr, sizeof(aicpu::FWKAdapter::ResultSummary), RT_MEMORY_HBM)); + output_addrs.emplace_back(static_cast(reinterpret_cast(summary_addr))); + } + return SUCCESS; +} + +Status AicpuConstantFoldingPass::GenerateDataPtrInfo(const vector &output_addrs, + vector &data_vec, vector &data_infos) { + for (uint64_t output_addr : output_addrs) { + aicpu::FWKAdapter::ResultSummary result_summary; + GE_CHK_RT_RET(rtMemcpy(&result_summary, sizeof(aicpu::FWKAdapter::ResultSummary), + reinterpret_cast(reinterpret_cast(output_addr)), + sizeof(aicpu::FWKAdapter::ResultSummary), RT_MEMCPY_DEVICE_TO_HOST)); + void *raw_data_addr = nullptr; + GE_CHK_RT_RET(rtMalloc(&raw_data_addr, result_summary.raw_data_size, RT_MEMORY_HBM)); + + void *shape_data_addr = nullptr; + rtError_t rt_ret = rtMalloc(&shape_data_addr, result_summary.shape_data_size, RT_MEMORY_HBM); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtMalloc error"); + GE_CHK_RT(rtFree(raw_data_addr)); + return FAILED; + } + DataPtrInfo raw_data_info; + raw_data_info.release_flag = kReleaseFlag; + raw_data_info.data_size = result_summary.raw_data_size; + raw_data_info.src_ptr = result_summary.raw_data_ptr; + raw_data_info.dst_ptr = static_cast(reinterpret_cast(raw_data_addr)); + data_vec.emplace_back(raw_data_info); + + DataPtrInfo shape_data_info; + shape_data_info.release_flag = kReleaseFlag; + shape_data_info.data_size = result_summary.shape_data_size; + shape_data_info.src_ptr = result_summary.shape_data_ptr; + shape_data_info.dst_ptr = static_cast(reinterpret_cast(shape_data_addr)); + data_vec.emplace_back(shape_data_info); + } + for (const DataPtrInfo &data_info : data_vec) { + data_infos.emplace_back(static_cast(reinterpret_cast(&data_info))); + } + return SUCCESS; +} + +Status AicpuConstantFoldingPass::UpdateWorkSpaceAddr(string &task_info, STR_FWK_OP_KERNEL &task) const { + // Update the workspace_addr + if (task_info.empty()) { + GELOGE(FAILED, "task_info is empty "); + return FAILED; + } + void *workspace_addr = nullptr; + GE_CHK_RT_RET(rtMalloc(&workspace_addr, task_info.size(), RT_MEMORY_HBM)); + rtError_t rt_ret = + rtMemcpy(workspace_addr, task_info.size(), task_info.data(), task_info.size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtMemcpy error"); + GE_CHK_RT(rtFree(workspace_addr)); + return FAILED; + } + + uint64_t workspace_base_addr = static_cast(reinterpret_cast(workspace_addr)); + task.fwkKernelBase.fwk_kernel.workspaceBaseAddr = workspace_base_addr; + return SUCCESS; +} + +Status AicpuConstantFoldingPass::UpdateInputAndOutputAddr(const vector &io_addrs, + STR_FWK_OP_KERNEL &task) const { + auto addrs_size = sizeof(uint64_t) * (io_addrs.size()); + if (addrs_size < 1) { + GELOGE(FAILED, "addrs_size is less than 1 "); + return FAILED; + } + void *input_output_addr = nullptr; + GE_CHK_RT_RET(rtMalloc(&input_output_addr, addrs_size, RT_MEMORY_HBM)); + rtError_t rt_ret = rtMemcpy(input_output_addr, addrs_size, io_addrs.data(), addrs_size, RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtMemcpy error"); + GE_CHK_RT(rtFree(input_output_addr)); + return FAILED; + } + + uint64_t in_out_addr = static_cast(reinterpret_cast(input_output_addr)); + task.fwkKernelBase.fwk_kernel.inputOutputAddr = in_out_addr; + return SUCCESS; +} + +Status AicpuConstantFoldingPass::UpdateSingleOpAddr(string &task_info, const vector &input_addrs, + const vector &outputs_addr_vec, STR_FWK_OP_KERNEL &task) { + // Build the SingleOpAddr + vector inputs_addr_vec; + for (const auto &item : input_addrs) { + inputs_addr_vec.push_back(item.input_addr); + } + vector io_addrs; + io_addrs.insert(io_addrs.end(), inputs_addr_vec.begin(), inputs_addr_vec.end()); + io_addrs.insert(io_addrs.end(), outputs_addr_vec.begin(), outputs_addr_vec.end()); + + Status ret = UpdateInputAndOutputAddr(io_addrs, task); + if (ret != SUCCESS) { + GELOGE(ret, "UpdateInputAndOutputAddr error"); + return ret; + } + ret = UpdateWorkSpaceAddr(task_info, task); + if (ret != SUCCESS) { + GELOGE(ret, "UpdateWorkSpaceAddr error"); + return ret; + } + return SUCCESS; +} + +Status AicpuConstantFoldingPass::UpdateMemCopyAddr(string &task_info, const vector &data_infos, + vector &internal_addrs, STR_FWK_OP_KERNEL &task) { + vector release_flags; + vector data_sizes; + vector src_addrs; + vector dst_addrs; + for (auto item : data_infos) { + auto *data_info_ptr = reinterpret_cast(reinterpret_cast(item)); // pointer cannot be null + release_flags.push_back(data_info_ptr->release_flag); + data_sizes.push_back(data_info_ptr->data_size); + src_addrs.push_back(data_info_ptr->src_ptr); + dst_addrs.push_back(data_info_ptr->dst_ptr); + } + vector> inputs = {release_flags, data_sizes, src_addrs, dst_addrs}; + auto data_size = sizeof(uint64_t) * (data_infos.size()); + vector io_addrs; + if (data_infos.size() > 0) { + for (const auto &item : inputs) { + void *input_addr_ptr = nullptr; + GE_CHK_RT_RET(rtMalloc(&input_addr_ptr, data_size, RT_MEMORY_HBM)); + rtError_t rt_ret = rtMemcpy(input_addr_ptr, data_size, item.data(), data_size, RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtMemcpy error"); + GE_CHK_RT(rtFree(input_addr_ptr)); + return FAILED; + } + uint64_t input_addr = static_cast(reinterpret_cast(input_addr_ptr)); + io_addrs.push_back(input_addr); + } + } + internal_addrs = io_addrs; + + Status ret = UpdateInputAndOutputAddr(io_addrs, task); + if (ret != SUCCESS) { + GELOGE(ret, "UpdateInputAndOutputAddr error"); + return ret; + } + ret = UpdateWorkSpaceAddr(task_info, task); + if (ret != SUCCESS) { + GELOGE(ret, "UpdateWorkSpaceAddr error"); + return ret; + } + return SUCCESS; +} + +Status AicpuConstantFoldingPass::LaunchSingleOpRunTask(const NodePtr &node, const vector &input_addrs, + const vector &output_addrs) { + void *task_buf = nullptr; + auto instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "GE is not initialized"); + return GE_CLI_GE_NOT_INITIALIZED; + } + OpsKernelInfoStorePtr kernel_info = instance_ptr->OpsKernelManagerObj().GetOpsKernelInfoStore(kKernelLibName); + if (kernel_info == nullptr) { + GELOGE(FAILED, "Get op kernel info store failed"); + return FAILED; + } + STR_FWK_OP_KERNEL aicpu_task; + aicpu_task.fwkKernelBase.fwk_kernel.inputOutputAddr = 0; + aicpu_task.fwkKernelBase.fwk_kernel.workspaceBaseAddr = 0; + std::string task_info; + Status ret = kernel_info->GenSingleOpRunTask(node, aicpu_task, task_info); + if (ret != SUCCESS) { + return ret; + } + std::function callback = [&]() { + void *input_output_ptr = + reinterpret_cast(reinterpret_cast(aicpu_task.fwkKernelBase.fwk_kernel.inputOutputAddr)); + if (input_output_ptr != nullptr) { + GE_CHK_RT(rtFree(input_output_ptr)); + } + void *workspace_addr_ptr = + reinterpret_cast(reinterpret_cast(aicpu_task.fwkKernelBase.fwk_kernel.workspaceBaseAddr)); + if (workspace_addr_ptr != nullptr) { + GE_CHK_RT(rtFree(workspace_addr_ptr)); + } + }; + GE_MAKE_GUARD(release, callback); + + ret = UpdateSingleOpAddr(task_info, input_addrs, output_addrs, aicpu_task); + if (ret != SUCCESS) { + GELOGE(ret, "UpdateSingleOpAddr error"); + return ret; + } + ret = GenerateTaskForLaunch(aicpu_task, task_buf); + if (ret != SUCCESS) { + GELOGE(ret, "GenerateTaskForLaunch error"); + return ret; + } + ret = KernelLaunch(task_buf); + if (ret != SUCCESS) { + GELOGE(ret, "KernelLaunch error"); + return ret; + } + + return SUCCESS; +} + +Status AicpuConstantFoldingPass::LaunchMemCopyTask(const vector &data_infos) { + void *task_buf = nullptr; + auto instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr || !instance_ptr->InitFlag()) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "GE is not initialized"); + return GE_CLI_GE_NOT_INITIALIZED; + } + OpsKernelInfoStorePtr kernel_info = instance_ptr->OpsKernelManagerObj().GetOpsKernelInfoStore(kKernelLibName); + if (kernel_info == nullptr) { + GELOGE(FAILED, "Get op kernel info store failed"); + return FAILED; + } + STR_FWK_OP_KERNEL aicpu_task; + aicpu_task.fwkKernelBase.fwk_kernel.inputOutputAddr = 0; + aicpu_task.fwkKernelBase.fwk_kernel.workspaceBaseAddr = 0; + std::string task_info; + Status ret = kernel_info->GenMemCopyTask(data_infos.size(), aicpu_task, task_info); + if (ret != SUCCESS) { + return ret; + } + + vector internal_addrs; + std::function callback = [&]() { + for (auto item : internal_addrs) { + GE_CHK_RT(rtFree(reinterpret_cast(reinterpret_cast(item)))); // pointer cannot be null + } + void *input_output_ptr = + reinterpret_cast(reinterpret_cast(aicpu_task.fwkKernelBase.fwk_kernel.inputOutputAddr)); + if (input_output_ptr != nullptr) { + GE_CHK_RT(rtFree(input_output_ptr)); + } + void *workspace_addr_ptr = + reinterpret_cast(reinterpret_cast(aicpu_task.fwkKernelBase.fwk_kernel.workspaceBaseAddr)); + if (workspace_addr_ptr != nullptr) { + GE_CHK_RT(rtFree(workspace_addr_ptr)); + } + }; + GE_MAKE_GUARD(release, callback); + + ret = UpdateMemCopyAddr(task_info, data_infos, internal_addrs, aicpu_task); + if (ret != SUCCESS) { + GELOGE(ret, "UpdateMemCopyAddr error"); + return ret; + } + ret = GenerateTaskForLaunch(aicpu_task, task_buf); + if (ret != SUCCESS) { + GELOGE(ret, "GenerateTaskForLaunch error"); + return ret; + } + ret = KernelLaunch(task_buf); + if (ret != SUCCESS) { + GELOGE(ret, "KernelLaunch error"); + return ret; + } + return SUCCESS; +} + +Status AicpuConstantFoldingPass::GenerateTaskForLaunch(STR_FWK_OP_KERNEL &aicpu_task, void *&task_buf) const { + GE_CHK_RT_RET(rtMalloc(&task_buf, sizeof(STR_FWK_OP_KERNEL), RT_MEMORY_HBM)); + + rtError_t rt_ret = rtMemcpy(task_buf, sizeof(STR_FWK_OP_KERNEL), reinterpret_cast(&aicpu_task), + sizeof(STR_FWK_OP_KERNEL), RT_MEMCPY_HOST_TO_DEVICE); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtMemcpy error"); + GE_CHK_RT(rtFree(task_buf)); + return FAILED; + } + return SUCCESS; +} + +Status AicpuConstantFoldingPass::KernelLaunch(void *task_buf) const { + rtModel_t model = nullptr; + rtStream_t stream = nullptr; + rtStream_t stream_run = nullptr; + std::function callback = [&]() { + if (task_buf != nullptr) { + GE_CHK_RT(rtFree(task_buf)); + } + if (model != nullptr) { + GE_CHK_RT(rtModelDestroy(model)); + } + if (stream != nullptr) { + GE_CHK_RT(rtStreamDestroy(stream)); + } + if (stream_run != nullptr) { + GE_CHK_RT(rtStreamDestroy(stream_run)); + } + }; + GE_MAKE_GUARD(release, callback); + + rtError_t rt_ret = rtModelCreate(&model, 0); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "create model failed."); + return FAILED; + } + rt_ret = rtStreamCreate(&stream, 0); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "create stream failed."); + return FAILED; + } + rt_ret = rtModelBindStream(model, stream, 0); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtModelBindStream failed."); + return FAILED; + } + rt_ret = rtKernelLaunchEx(task_buf, sizeof(STR_FWK_OP_KERNEL), 0, stream); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtKernelLaunchEx failed."); + return FAILED; + } + rt_ret = rtModelLoadComplete(model); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtModelLoadComplete failed."); + return FAILED; + } + rt_ret = rtStreamCreate(&stream_run, 0); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "create run stream failed."); + return FAILED; + } + rt_ret = rtModelExecute(model, stream_run, 0); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtModelExecute failed."); + return FAILED; + } + rt_ret = rtStreamSynchronize(stream_run); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(rt_ret, "rtStreamSynchronize failed."); + return FAILED; + } + return SUCCESS; +} + +Status AicpuConstantFoldingPass::GenerateGeTensor(const OpDescPtr &node_desc, const vector &data_vec, + vector &outputs) { + if ((node_desc->GetOutputsSize() * kDouble) != data_vec.size()) { + GELOGE(FAILED, "node[%s] something wrong with output size", node_desc->GetName().c_str()); + return FAILED; + } + + for (size_t i = 0; i < node_desc->GetOutputsSize(); i++) { + auto output_tensor_desc = node_desc->GetOutputDesc(static_cast(i)); + GeTensorPtr output_ptr = MakeShared(output_tensor_desc); + if (output_ptr == nullptr) { + GELOGE(FAILED, "node[%s] something wrong with construct GeTensor", node_desc->GetName().c_str()); + return FAILED; + } + const DataPtrInfo &raw_data_info = data_vec.at(i * kDouble); + uint64_t raw_data_size = raw_data_info.data_size; + std::unique_ptr data_addr(new (std::nothrow) uint8_t[raw_data_size]()); + if (data_addr == nullptr) { + GELOGE(MEMALLOC_FAILED, "new data_addr failed"); + return INTERNAL_ERROR; + } + GE_CHK_RT_RET(rtMemcpy(data_addr.get(), raw_data_size, + reinterpret_cast(reinterpret_cast(raw_data_info.dst_ptr)), raw_data_size, + RT_MEMCPY_DEVICE_TO_HOST)); + GE_IF_BOOL_EXEC(output_ptr->SetData(data_addr.get(), raw_data_size) != GRAPH_SUCCESS, + GELOGE(FAILED, "set data failed"); + return FAILED); + GELOGI("GenerateGeTensor: raw_data_size %lu", raw_data_size); + + const DataPtrInfo &shape_data_info = data_vec.at(i * kDouble + 1); + uint64_t shape_data_size = shape_data_info.data_size; + uint64_t dim_num = shape_data_size / sizeof(uint64_t); + std::unique_ptr shape_addr(new (std::nothrow) int64_t[dim_num]()); + if (shape_addr == nullptr) { + GELOGE(MEMALLOC_FAILED, "new shape_addr failed"); + return INTERNAL_ERROR; + } + GE_CHK_RT_RET(rtMemcpy(shape_addr.get(), shape_data_size, + reinterpret_cast(reinterpret_cast(shape_data_info.dst_ptr)), + shape_data_size, RT_MEMCPY_DEVICE_TO_HOST)); + std::vector shapeDims; + for (size_t idx = 0; idx < dim_num; idx++) { + shapeDims.push_back(shape_addr[idx]); + GELOGI("GenerateGeTensor: dim %ld", shape_addr[idx]); + } + output_ptr->MutableTensorDesc().SetShape(GeShape(shapeDims)); + + outputs.emplace_back(output_ptr); + } + return SUCCESS; +} + +void AicpuConstantFoldingPass::ReleaseMemory(const vector &input_addrs, + const vector &output_addrs, + const vector &data_vec) const { + for (const auto &item : input_addrs) { + GE_CHK_RT(rtFree(reinterpret_cast(reinterpret_cast(item.input_addr)))); + } + for (auto item : output_addrs) { + GE_CHK_RT(rtFree(reinterpret_cast(reinterpret_cast(item)))); + } + for (const auto &item : data_vec) { + GE_CHK_RT(rtFree(reinterpret_cast(reinterpret_cast(item.dst_ptr)))); + } +} +} // namespace ge diff --git a/src/ge/graph/passes/aicpu_constant_folding_pass.h b/src/ge/graph/passes/aicpu_constant_folding_pass.h new file mode 100644 index 00000000..bc495f5d --- /dev/null +++ b/src/ge/graph/passes/aicpu_constant_folding_pass.h @@ -0,0 +1,68 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_AICPU_CONSTANT_FOLDING_PASS_H_ +#define GE_GRAPH_PASSES_AICPU_CONSTANT_FOLDING_PASS_H_ + +#include +#include + +#include "common/opskernel/ops_kernel_info_store.h" +#include "graph/passes/folding_pass.h" + +namespace ge { +class AicpuConstantFoldingPass : public FoldingPass { + public: + Status Run(ge::NodePtr &node) override; + + private: + enum AddrType { kData = 0, kSummary = 1, kTypeEnd }; + + struct AddrAndType { + uint64_t input_addr; + AddrType attr_type; + } __attribute__((packed)); + + struct DataPtrInfo { + uint64_t release_flag; + uint64_t data_size; + uint64_t src_ptr; + uint64_t dst_ptr; + } __attribute__((packed)); + bool CheckInput(const ge::NodePtr &node, vector &weight_vec); + Status GetInputAddrs(const vector &weight_vec, vector &input_addrs); + Status GetOutputAddrs(const OpDescPtr &node_desc, vector &output_addrs); + Status GenerateTaskForLaunch(STR_FWK_OP_KERNEL &aicpu_task, void *&task_buf) const; + Status GenerateDataPtrInfo(const vector &output_addrs, vector &data_vec, + vector &data_infos); + Status GenerateGeTensor(const OpDescPtr &node_desc, const vector &data_vec, + vector &outputs); + Status UpdateWorkSpaceAddr(string &task_info, STR_FWK_OP_KERNEL &task) const; + Status UpdateInputAndOutputAddr(const vector &io_addrs, STR_FWK_OP_KERNEL &task) const; + Status UpdateSingleOpAddr(string &task_info, const vector &input_addrs, + const vector &outputs_addr_vec, STR_FWK_OP_KERNEL &task); + Status UpdateMemCopyAddr(string &task_info, const vector &data_infos, vector &internal_addrs, + STR_FWK_OP_KERNEL &task); + Status LaunchSingleOpRunTask(const NodePtr &node, const vector &input_addrs, + const vector &output_addrs); + Status LaunchMemCopyTask(const vector &data_infos); + void ReleaseMemory(const vector &input_addrs, const vector &output_addrs, + const vector &data_vec) const; + Status KernelLaunch(void *aicpu_task) const; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_AICPU_CONSTANT_FOLDING_PASS_H_ diff --git a/src/ge/graph/passes/assert_pass.cc b/src/ge/graph/passes/assert_pass.cc new file mode 100644 index 00000000..725016a9 --- /dev/null +++ b/src/ge/graph/passes/assert_pass.cc @@ -0,0 +1,101 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/assert_pass.h" + +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/util.h" + +namespace ge { +// aicpu not support string type, so current implemention is Upward traversal +Status AssertPass::Run(NodePtr &node) { + GELOGD("AssertPass running"); + if (node == nullptr) { + GELOGE(PARAM_INVALID, "param [node] must not be null."); + return PARAM_INVALID; + } + if (node->GetOpDesc() == nullptr) { + GELOGE(PARAM_INVALID, "param [node] [opDesc] must not be null."); + return PARAM_INVALID; + } + std::string op_type = node->GetOpDesc()->GetType(); + if (op_type == ASSERT) { + GELOGD("op type is assert."); + + std::vector nodes_unused; + // collect assert and other unused ops + CollectUnusedNode(node, nodes_unused); + // remove unused node + Status status = RemoveUnusedNode(nodes_unused); + if (status != SUCCESS) { + GELOGE(status, "remove unused node failed."); + return status; + } + } + return SUCCESS; +} + +void AssertPass::CollectUnusedNode(const NodePtr &assert_node, vector &nodes_unused) { + std::map invalid_outdata_info; + std::queue node_queue; + node_queue.push(assert_node); + + while (!node_queue.empty()) { + NodePtr cur_node = node_queue.front(); + if (cur_node == nullptr) { + continue; + } + node_queue.pop(); + nodes_unused.push_back(cur_node); + + for (const auto &src_node : cur_node->GetInDataNodes()) { + if (src_node != nullptr && src_node->GetOpDesc() != nullptr) { + auto size = ++invalid_outdata_info[src_node.get()]; + // src_node need to be deleted + if (src_node->GetOutDataNodesSize() == size && src_node->GetOpDesc()->GetType() != DATA && + src_node->GetOpDesc()->GetType() != AIPPDATA) { + node_queue.push(src_node); + } + } + } + } +} + +Status AssertPass::RemoveUnusedNode(std::vector &nodes_unused) { + for (NodePtr &node : nodes_unused) { + if (node == nullptr) { + continue; + } + std::vector assert_io_map; + size_t out_nums = node->GetAllOutDataAnchorsSize(); + while (out_nums > 0) { + assert_io_map.push_back(-1); + out_nums--; + } + + if (IsolateAndDeleteNode(node, assert_io_map) != SUCCESS) { + return FAILED; + } + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/assert_pass.h b/src/ge/graph/passes/assert_pass.h new file mode 100644 index 00000000..528f6046 --- /dev/null +++ b/src/ge/graph/passes/assert_pass.h @@ -0,0 +1,47 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_ASSERT_PASS_H_ +#define GE_GRAPH_PASSES_ASSERT_PASS_H_ + +#include + +#include "graph/passes/base_pass.h" + +namespace ge { +class AssertPass : public BaseNodePass { + public: + Status Run(NodePtr& node) override; + + private: + /// + /// collect assert and other unused ops + /// @param assert_node assert node + /// @param nodes_unused nodes to be deleted + /// @return void + /// + void CollectUnusedNode(const NodePtr &assert_node, std::vector& nodes_unused); + + /// + /// remove unused nodes from graph + /// @param graph + /// @param nodes_unused nodes to be deleted + /// @return Status + /// + Status RemoveUnusedNode(std::vector& nodes_unused); +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_ASSERT_PASS_H_ diff --git a/src/ge/graph/passes/atomic_addr_clean_pass.cc b/src/ge/graph/passes/atomic_addr_clean_pass.cc new file mode 100644 index 00000000..64315b9f --- /dev/null +++ b/src/ge/graph/passes/atomic_addr_clean_pass.cc @@ -0,0 +1,234 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/atomic_addr_clean_pass.h" + +#include +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "common/ge_inner_error_codes.h" +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_attr_define.h" +#include "init/gelib.h" + +namespace { +bool is_loop_graph = false; +} +namespace ge { +Status AtomicAddrCleanPass::Run(ComputeGraphPtr graph) { + if (graph == nullptr) { + GELOGE(PARAM_INVALID, "param [graph] must not be null."); + return PARAM_INVALID; + } + GELOGD("AtomicAddrCleanPass begin."); + // 1.Recoginze atomic and loop mark + vector atomic_node_vec; + for (NodePtr &node : graph->GetDirectNode()) { + if (IsAtomicOp(node)) { + atomic_node_vec.push_back(node); + } + if (!is_loop_graph && node->GetType() == LOOPCOND) { + // there is loop in this graph + GELOGD("There is no loop node. It will insert clean node follow atomic node."); + is_loop_graph = true; + } + } + if (atomic_node_vec.empty()) { + GELOGI("There is no atomic node. Ignore atomicAddrClean pass."); + return SUCCESS; + } + // 2.Insert clean node and link to atomic node + Status ret; + if (is_loop_graph) { + ret = HandleLoopGraph(graph, atomic_node_vec); + if (ret != SUCCESS) { + return ret; + } + } else { + ret = HandleNormalGraph(graph, atomic_node_vec); + if (ret != SUCCESS) { + return ret; + } + } + GELOGD("AtomicAddrCleanPass end."); + return SUCCESS; +} + +Status AtomicAddrCleanPass::HandleLoopGraph(ComputeGraphPtr &graph, const vector &atomic_node_vec) { + // Loop graph , insert clean node follow atomic node + int index = 0; + for (const auto &node : atomic_node_vec) { + // Insert atomic clean op + NodePtr clean_addr_node = InsertAtomicAddrCleanNode(graph); + if (clean_addr_node == nullptr) { + GELOGE(FAILED, "Insert AtomicAddrClean node failed. Ignore atomicAddrClean pass."); + return FAILED; + } + + GE_CHECK_NOTNULL(clean_addr_node->GetOpDesc()); + string node_name = clean_addr_node->GetOpDesc()->GetName(); + std::ostringstream oss; + oss << node_name << index; + node_name = oss.str(); + clean_addr_node->GetOpDesc()->SetName(node_name); // [Cascade Pointer] + GELOGD("Inserted atomic clean node name is %s", node_name.c_str()); + + auto ret = LinkToAtomicNode(node, clean_addr_node); + if (ret != SUCCESS) { + GELOGE(ret, "Link control anchor failed from atomic node to atomic_addr_clean node."); + return ret; + } + index++; + } + return SUCCESS; +} + +Status AtomicAddrCleanPass::HandleNormalGraph(ComputeGraphPtr &graph, const vector &atomic_node_vec) { + GELOGD("Not loop graph. It will insert only 1 clean node."); + // not loop graph , insert only one clean node in graph + NodePtr clean_addr_node = InsertAtomicAddrCleanNode(graph); + if (clean_addr_node == nullptr) { + GELOGE(FAILED, "Insert AtomicAddrClean node failed. Ignore atomicAddrClean pass."); + return FAILED; + } + for (const auto &node : atomic_node_vec) { + auto ret = LinkToAtomicNode(node, clean_addr_node); + if (ret != SUCCESS) { + GELOGE(ret, "Link control anchor failed from atomic node to atomic_addr_clean node."); + return ret; + } + } + + // for HCOM atomic node, add one more control link to peer-in node + for (auto &node : hcom_node_vec_) { + for (auto &in_anchor : node->GetAllInDataAnchors()) { + GE_CHECK_NOTNULL(in_anchor->GetPeerOutAnchor()); + NodePtr peer_in_node = in_anchor->GetPeerOutAnchor()->GetOwnerNode(); + Status ret = LinkToAtomicNode(peer_in_node, clean_addr_node); + if (ret != SUCCESS) { + GELOGE(ret, "Link failed, %s : %s", peer_in_node->GetName().c_str(), clean_addr_node->GetName().c_str()); + return ret; + } + } + } + return SUCCESS; +} + +NodePtr AtomicAddrCleanPass::InsertAtomicAddrCleanNode(ComputeGraphPtr &graph) { + OpDescPtr op_desc = MakeShared(NODE_NAME_ATOMIC_ADDR_CLEAN, ATOMICADDRCLEAN); + if (op_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "Make shared atomic addr clean op failed."); + return nullptr; + } + string session_graph_id; + if (!AttrUtils::GetStr(*graph, ATTR_NAME_SESSION_GRAPH_ID, session_graph_id)) { + GELOGW("Get graph session_graph_id attr failed."); + } + if (!session_graph_id.empty()) { + (void)AttrUtils::SetStr(op_desc, ATTR_NAME_SESSION_GRAPH_ID, session_graph_id); + } + + string name = op_desc->GetName() + session_graph_id; + op_desc->SetName(name); + GELOGI("Create cleanAddr op:%s.", op_desc->GetName().c_str()); + // To avoid same name between graphs, set session graph id to this node + NodePtr clean_addr_node = graph->AddNodeFront(op_desc); + return clean_addr_node; +} + +Status AtomicAddrCleanPass::LinkToAtomicNode(const NodePtr &atomic_node, NodePtr &atomic_clean_node) { + GE_IF_BOOL_EXEC(atomic_node == nullptr || atomic_clean_node == nullptr, + GELOGE(PARAM_INVALID, "param [atomic_node][atomic_clean_node] must not be null."); + return PARAM_INVALID); + InControlAnchorPtr in_ctrl_anchor = atomic_node->GetInControlAnchor(); + OutControlAnchorPtr out_ctrl_anchor = atomic_clean_node->GetOutControlAnchor(); + if (in_ctrl_anchor == nullptr || out_ctrl_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, "Get control anchor faild, dst node: %s.", atomic_node->GetName().c_str()); + return INTERNAL_ERROR; + } + + graphStatus status = GraphUtils::AddEdge(out_ctrl_anchor, in_ctrl_anchor); + if (status != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Graph add cleanAddrNode op out ctrl edge fail, dst node: %s.", + atomic_node->GetName().c_str()); + return INTERNAL_ERROR; + } + GELOGI("Graph add cleanAddrNode op out ctrl edge, dst node: %s.", atomic_node->GetName().c_str()); + std::string stream_label; + if (is_loop_graph && AttrUtils::GetStr(atomic_node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, stream_label)) { + if (!AttrUtils::SetStr(atomic_clean_node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, stream_label)) { + GELOGW("LinkToAtomicNode: SetStr failed"); + return INTERNAL_ERROR; + } + } + return SUCCESS; +} + +bool AtomicAddrCleanPass::IsAtomicOp(const NodePtr &node) { + GE_IF_BOOL_EXEC(node == nullptr, GELOGE(FAILED, "node is null."); return false); + OpDescPtr op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + return false; + } + // 1.Check if isAtomic attrs exist for HCOM + std::shared_ptr instance_ptr = GELib::GetInstance(); + if ((instance_ptr == nullptr) || (!instance_ptr->InitFlag())) { + GELOGW("GELib not initialized"); + return false; + } + + OpsKernelManager &ops_kernel_manager = instance_ptr->OpsKernelManagerObj(); + vector op_info_vec = ops_kernel_manager.GetOpsKernelInfo(op_desc->GetType()); + for (const auto &op_info : op_info_vec) { + if (op_info.isAtomic) { + GELOGI("Recognized atomic op %s from HCCL engine.", op_desc->GetName().c_str()); + // check peer input is DATA + for (auto &in_data_anchor : node->GetAllInDataAnchors()) { + if (in_data_anchor->GetPeerOutAnchor() != nullptr && + in_data_anchor->GetPeerOutAnchor()->GetOwnerNode() != nullptr) { + auto peer_in_node = in_data_anchor->GetPeerOutAnchor()->GetOwnerNode(); + if (peer_in_node->GetType() == DATA) { + (void)AttrUtils::SetBool(peer_in_node->GetOpDesc(), "_need_memset", true); + GELOGI("Recognized atomic op %s from HCCL engine and input is DATA.", op_desc->GetName().c_str()); + return false; + } + } + } + hcom_node_vec_.push_back(node); + return true; + } + } + // 2.Check atomic attr in node + std::map> node_workspace_offset; + bool has_atomic_input = op_desc->HasAttr(ATOMIC_ATTR_INPUT_INDEX); + bool has_atomic_output = op_desc->HasAttr(ATOMIC_ATTR_OUTPUT_INDEX); + node_workspace_offset = op_desc->TryGetExtAttr(EXT_ATTR_ATOMIC_WORKSPACE_OFFSET, node_workspace_offset); + if (!has_atomic_input && !has_atomic_output && node_workspace_offset.empty()) { + return false; + } + + graphStatus ret = op_desc->SetAttr(ATOMIC_ATTR_IS_ATOMIC_NODE, GeAttrValue::CreateFrom(true)); + if (ret != GRAPH_SUCCESS) { + GELOGW("set attr ATOMIC_ATTR_IS_ATOMIC_NODE fail."); + } + GELOGI("Recognized atomic op %s from FE engine.", op_desc->GetName().c_str()); + return true; +} +} // namespace ge diff --git a/src/ge/graph/passes/atomic_addr_clean_pass.h b/src/ge/graph/passes/atomic_addr_clean_pass.h new file mode 100644 index 00000000..be7b30fe --- /dev/null +++ b/src/ge/graph/passes/atomic_addr_clean_pass.h @@ -0,0 +1,69 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_ATOMIC_ADDR_CLEAN_PASS_H_ +#define GE_GRAPH_PASSES_ATOMIC_ADDR_CLEAN_PASS_H_ + +#include + +#include "graph/graph.h" +#include "inc/graph_pass.h" + +namespace ge { +class AtomicAddrCleanPass : public GraphPass { + public: + Status Run(ComputeGraphPtr graph); + + private: + /// + /// HandleLoopGraph + /// @param graph + /// @return + /// + Status HandleLoopGraph(ComputeGraphPtr &graph, const vector &atomic_node_vec); + /// + /// HandleNormalGraph + /// @param graph + /// @return + /// + Status HandleNormalGraph(ComputeGraphPtr &graph, const vector &atomic_node_vec); + /// + /// Insert atomic clean node to graph + /// @param graph + /// @return + /// + NodePtr InsertAtomicAddrCleanNode(ComputeGraphPtr &graph); + + /// + /// Link control anchor from atomic clean node to atomic node + /// @param atomic_node + /// @param atomic_clean_node + /// @return + /// + Status LinkToAtomicNode(const NodePtr &atomic_node, NodePtr &atomic_clean_node); + + /// + /// Check if this node is atomic op. + /// @param node + /// @return + /// + bool IsAtomicOp(const NodePtr &node); + + vector hcom_node_vec_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_ATOMIC_ADDR_CLEAN_PASS_H_ diff --git a/src/ge/graph/passes/base_pass.cc b/src/ge/graph/passes/base_pass.cc new file mode 100644 index 00000000..eba17790 --- /dev/null +++ b/src/ge/graph/passes/base_pass.cc @@ -0,0 +1,213 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/base_pass.h" + +#include +#include + +#include "common/debug/log.h" +#include "framework/common/debug/ge_log.h" +#include "graph/compute_graph.h" +#include "graph/utils/graph_utils.h" + +namespace ge { +namespace { +const int kMaxRePassTimes = 1000; +const size_t kMaxOneInNodes = 1000; + +void GetAllNodesNoInputEdge(const ComputeGraphPtr &graph, std::queue &input_edge_nodes, + std::unordered_set &nodes_seen, std::unordered_set &nodes_last) { + nodes_last.clear(); + for (auto &node : graph->GetAllNodes()) { + if (node == nullptr) { + continue; + } + size_t in_nums = node->GetInNodes().size(); + if (in_nums == 0) { + input_edge_nodes.push(node); + nodes_seen.insert(node.get()); + } else if (in_nums > kMaxOneInNodes) { + nodes_last.insert(node); + } + } +} + +void AddNextIterNodes(const Node::Vistor &nodes, std::queue &nodes_to_pass, + std::unordered_set &nodes_seen, std::unordered_set &nodes_last) { + for (auto &node : nodes) { + if (node == nullptr) { + continue; + } + if (nodes_last.count(node) != 0) { + continue; + } + + bool all_in_nodes_seen = node->IsAllInNodesSeen(nodes_seen); + if (all_in_nodes_seen && nodes_seen.insert(node.get()).second) { + nodes_to_pass.push(node); + } + } +} + +Status RunPasses(NodePtr &node, const NamesToPass &names_to_passes, std::unordered_set &nodes_re_pass, + std::unordered_set &nodes_deleted, std::unordered_set &nodes_seen) { + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + GELOGD("Begin to run pass for node %s", node->GetName().c_str()); + for (const auto &name_to_pass : names_to_passes) { + if (name_to_pass.second == nullptr) { + GELOGE(INTERNAL_ERROR, "There is null pointer in passes(%s), skip it", name_to_pass.first.c_str()); + continue; + } + + GELOGD("Begin to run pass %s", name_to_pass.first.c_str()); + name_to_pass.second->init(); + auto result = name_to_pass.second->Run(node); + if (result != SUCCESS) { + GELOGE(INTERNAL_ERROR, + "Failed to process pass %s on node %s, result " + "%u, the passes will be terminated immediately.", + name_to_pass.first.c_str(), node->GetName().c_str(), result); + return result; + } + + auto nodes_to_re_pass = name_to_pass.second->GetNodesNeedRePass(); + for (const auto &node_to_re_pass : nodes_to_re_pass) { + if (node_to_re_pass == nullptr) { + GELOGW("Found null re-pass node when executing %s on node %s type %s", name_to_pass.first.c_str(), + node->GetName().c_str(), node->GetType().c_str()); + continue; + } + if (node_to_re_pass->IsAllInNodesSeen(nodes_seen)) { + GELOGD("The node %s will be re-pass later", node_to_re_pass->GetName().c_str()); + nodes_re_pass.insert(node_to_re_pass); + } else { + GELOGD("The node %s are not all seen, don't set repass this time", node_to_re_pass->GetName().c_str()); + } + } + + auto nodes_deleted_by_pass = name_to_pass.second->GetNodesDeleted(); + nodes_deleted.insert(nodes_deleted_by_pass.begin(), nodes_deleted_by_pass.end()); + if (nodes_deleted_by_pass.count(node.get()) > 0) { + GELOGD("The node %s was deleted by pass %s, stop the remain passes", node->GetName().c_str(), + name_to_pass.first.c_str()); + break; + } + } + + return SUCCESS; +} +} // namespace + +Status BaseNodePass::IsolateAndDeleteNode(NodePtr &node, const std::vector &io_map) { + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + GELOGI("Prepare to isolate and delete node, name:%s, type:%s.", node->GetName().c_str(), + node->GetType().c_str()); + ComputeGraphPtr graph = node->GetOwnerComputeGraph(); + if (graph == nullptr) { + GELOGE(FAILED, "[%s] The owner graph must not be null.", node->GetName().c_str()); + return FAILED; + } + + AddRePassNodesWithInOut(node); + + if (GraphUtils::IsolateNode(node, io_map) != GRAPH_SUCCESS) { + GELOGE(FAILED, "[%s] IsolateNode failed.", node->GetName().c_str()); + return FAILED; + } + + if (GraphUtils::RemoveNodeWithoutRelink(graph, node) != SUCCESS) { + GELOGE(FAILED, "[%s] RemoveNodeWithoutRelink failed.", node->GetName().c_str()); + return FAILED; + } + + AddNodeDeleted(node.get()); + return SUCCESS; +} + +Status GEPass::Run(const NamesToPass &names_to_passes) { + if (graph_ == nullptr) { + GELOGE(INTERNAL_ERROR, "The graph is null"); + return INTERNAL_ERROR; + } + if (names_to_passes.empty()) { + GELOGW("No passes input, the GEPass will do nothing"); + return INTERNAL_ERROR; + } + + GELOGD("Begin to run pass on graph, passes count %zu", names_to_passes.size()); + std::queue nodes; + std::unordered_set nodes_seen; + std::unordered_set nodes_deleted; + std::unordered_set nodes_re_pass; + std::unordered_set nodes_last; + GetAllNodesNoInputEdge(graph_, nodes, nodes_seen, nodes_last); + GELOGD("Start points count %zu", nodes.size()); + int re_pass_times = 0; + + do { + for (auto &node : nodes_re_pass) { + nodes.push(node); + nodes_seen.insert(node.get()); + } + nodes_re_pass.clear(); + + while (!nodes.empty()) { + NodePtr node = nodes.front(); + nodes.pop(); + + (void)nodes_re_pass.erase(node); + GE_IF_BOOL_EXEC(node == nullptr, GELOGW("node is null"); continue); + if (nodes_deleted.count(node.get()) > 0) { + GELOGD("The node %s was deleted before, skip it.", node->GetName().c_str()); + continue; + } + + AddNextIterNodes(node->GetOutNodes(), nodes, nodes_seen, nodes_last); + + auto ret = RunPasses(node, names_to_passes, nodes_re_pass, nodes_deleted, nodes_seen); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, + "Failed to process passes on node %s type %s," + " error code: %u", + node->GetName().c_str(), node->GetType().c_str(), ret); + return INTERNAL_ERROR; + } + } + + for (auto &node : nodes_last) { + bool all_in_nodes_seen = node->IsAllInNodesSeen(nodes_seen); + if (all_in_nodes_seen && nodes_seen.insert(node.get()).second) { + nodes.push(node); + } + } + nodes_last.clear(); + } while ((!nodes_re_pass.empty() || !nodes.empty()) && ++re_pass_times < kMaxRePassTimes); + + if (re_pass_times == kMaxRePassTimes) { + GELOGW("re_pass_times should not come to %d", kMaxRePassTimes); + } + GELOGD("All passes runs end"); + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/base_pass.h b/src/ge/graph/passes/base_pass.h new file mode 100644 index 00000000..53eab006 --- /dev/null +++ b/src/ge/graph/passes/base_pass.h @@ -0,0 +1,112 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_BASE_PASS_H_ +#define GE_GRAPH_PASSES_BASE_PASS_H_ + +#include +#include +#include +#include +#include + +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/types.h" +#include "graph/compute_graph.h" +#include "graph/utils/op_desc_utils.h" + +namespace ge { +class BaseNodePass { + public: + /// + /// Optimize on one node. the function can add nodes to the graph, change + /// connections between nodes while optimizing or remove nodes from the graph. + /// @param node + /// @return + /// + virtual Status Run(NodePtr &node) = 0; + + virtual ~BaseNodePass() = default; + + std::unordered_set GetNodesNeedRePass() { return nodes_need_re_pass_; } + + std::unordered_set GetNodesDeleted() { return nodes_deleted_; } + + void init() { + nodes_need_re_pass_.clear(); + nodes_deleted_.clear(); + } + + protected: + Status IsolateAndDeleteNode(NodePtr &node, const std::vector &io_map); + + Status IsolateAndDeleteNode(NodePtr &node, const std::initializer_list &io_map) { + return IsolateAndDeleteNode(node, std::vector(io_map)); + } + + /// + /// Add a node to be optimized again. If you add a new node to the graph, or + /// change a node connections, and you want to make sure the node will be + /// optimized by other passes, call this function. + /// @param node + /// + void AddRePassNode(NodePtr &node) { nodes_need_re_pass_.insert(node); } + + /// + /// Add a node and it's input/output data nodes to be optimized again. + /// @param node + /// + void AddRePassNodesWithInOut(NodePtr &node) { + AddRePassNode(node); + auto out_nodes = node->GetOutNodes(); + for (auto &out_node : out_nodes) { + AddRePassNode(out_node); + } + auto in_nodes = node->GetInNodes(); + for (auto &in_node : in_nodes) { + AddRePassNode(in_node); + } + } + + /// + /// If you deleted a node from the graph, especially current node. The remain + /// iterate passes will continue process on the deleted node(if it can be + /// reached by edge connections) till the last one. Obviously it is a waste of + /// time. You can add the deleted nodes by calling this function, to stop the + /// next iterations. + /// @param node + /// + void AddNodeDeleted(Node *node) { nodes_deleted_.insert(node); } + + private: + std::unordered_set nodes_need_re_pass_; + std::unordered_set nodes_deleted_; +}; + +using NamesToPass = std::vector>; + +class GEPass { + public: + explicit GEPass(ComputeGraphPtr &graph) : graph_(graph) {} + virtual ~GEPass() = default; + Status Run(const NamesToPass &names_to_passes); + + private: + ComputeGraphPtr graph_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_BASE_PASS_H_ diff --git a/src/ge/graph/passes/cast_translate_pass.cc b/src/ge/graph/passes/cast_translate_pass.cc new file mode 100644 index 00000000..e014db40 --- /dev/null +++ b/src/ge/graph/passes/cast_translate_pass.cc @@ -0,0 +1,296 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/cast_translate_pass.h" + +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/omg_util.h" +#include "graph/passes/pass_utils.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/type_utils.h" +#include "init/gelib.h" +#include "opskernel_manager/ops_kernel_manager.h" + +namespace ge { +bool CastTranslatePass::CheckInAndOutDataAnchor(NodePtr &node) const { + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return false; + } + if (node->GetOpDesc() == nullptr) { + GELOGW("Param [node] op desc is null."); + return false; + } + + auto in_anchors = node->GetAllInDataAnchors(); + auto out_anchors = node->GetAllOutDataAnchors(); + // Cast|Translate has one input one output data anchor + if (in_anchors.size() != 1 || out_anchors.size() != 1) { + return false; + } + return true; +} + +bool CastTranslatePass::IsCastNode(NodePtr &node) const { + std::string original_type; + GE_IF_BOOL_EXEC(GetOriginalType(node, original_type) != SUCCESS, + GELOGW("get original type failed"); return false); + return (original_type == CAST); +} + +bool CastTranslatePass::IsTranslateNode(NodePtr &node) const { + std::string original_type; + GE_IF_BOOL_EXEC(GetOriginalType(node, original_type) != SUCCESS, + GELOGW("get original type failed"); return false); + return (original_type == TRANSLATE); +} + +bool CastTranslatePass::IsSameCastOrTranslate(NodePtr &node, NodePtr &base_node) const { + GE_IF_BOOL_EXEC(node == nullptr, GELOGW("node is null."); return false); + GE_IF_BOOL_EXEC(base_node == nullptr, GELOGW("base_node is null."); return false); + auto op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, return false); + auto base_op_desc = base_node->GetOpDesc(); + GE_IF_BOOL_EXEC(base_op_desc == nullptr, return false); + auto in_desc = op_desc->MutableInputDesc(0); + auto out_desc = op_desc->MutableOutputDesc(0); + auto base_in_desc = base_op_desc->MutableInputDesc(0); + auto base_out_desc = base_op_desc->MutableOutputDesc(0); + GE_IF_BOOL_EXEC(in_desc == nullptr, GELOGW("in_desc is null."); return false); + GE_IF_BOOL_EXEC(out_desc == nullptr, GELOGW("out_desc is null."); return false); + GE_IF_BOOL_EXEC(base_in_desc == nullptr, GELOGW("base_in_desc is null."); return false); + GE_IF_BOOL_EXEC(base_out_desc == nullptr, GELOGW("base_out_desc is null."); return false); + if (in_desc->GetDataType() == base_in_desc->GetDataType() && + out_desc->GetDataType() == base_out_desc->GetDataType() && in_desc->GetFormat() == base_in_desc->GetFormat() && + out_desc->GetFormat() == base_out_desc->GetFormat()) { + return true; + } + GELOGD("Output node [%s] isn't the same Cast or Translate.", node->GetName().c_str()); + return false; +} + +bool CastTranslatePass::IsNodeNeedOptimize(NodePtr &node) const { + if (CheckInAndOutDataAnchor(node) && (IsCastNode(node) || IsTranslateNode(node))) { + return true; + } + return false; +} + +bool CastTranslatePass::CheckDstNode(NodePtr &out_node, bool &is_src_cast) const { + return (CheckInAndOutDataAnchor(out_node) && + ((!is_src_cast && IsCastNode(out_node)) || (is_src_cast && IsTranslateNode(out_node)))); +} + +bool CastTranslatePass::IsNextNodeNeedOptimize(NodePtr &node, bool &is_src_cast) const { + GE_IF_BOOL_EXEC(node == nullptr, GELOGW("cast_node is null."); return false); + const std::string &node_name = node->GetName(); + auto out_data_nodes = node->GetOutDataNodes(); + if (out_data_nodes.empty()) { + return false; + } + auto &out_node = out_data_nodes.at(0); + bool is_first = true; + // Cast-->all Translate; Translate-->all Cast + for (auto &out_data_node : out_data_nodes) { + if (out_data_node == nullptr) { + continue; + } + if (CheckDstNode(out_data_node, is_src_cast) && (is_first || IsSameCastOrTranslate(out_data_node, out_node))) { + is_first = false; + continue; + } + GELOGD("[%s] Output node is %s, can't optimize.", node_name.c_str(), out_data_node->GetType().c_str()); + return false; + } + + GELOGD("[%s] %zu dst nodes have the same input and output.", node_name.c_str(), out_data_nodes.size()); + return true; +} + +bool CastTranslatePass::IsOpSupportedOptimize(NodePtr &cast_node, NodePtr &trans_node, bool &is_src_cast) { + GE_IF_BOOL_EXEC(cast_node == nullptr, GELOGW("cast_node is null."); return false); + GE_IF_BOOL_EXEC(trans_node == nullptr, GELOGW("trans_node is null."); return false); + OpDescPtr trans_op_desc = trans_node->GetOpDesc(); + GE_IF_BOOL_EXEC(trans_op_desc == nullptr, GELOGW("trans_op_desc is null."); return false); + // backup datatype + DataType trans_in_datatype = trans_op_desc->GetInputDesc(0).GetDataType(); + DataType trans_out_datatype = trans_op_desc->GetOutputDesc(0).GetDataType(); + + auto cast_op_desc = cast_node->GetOpDesc(); + GE_IF_BOOL_EXEC(cast_op_desc == nullptr, GELOGW("cast_op_desc is null."); return false); + DataType cast_in_datatype = cast_op_desc->GetInputDesc(0).GetDataType(); + DataType cast_out_datatype = cast_op_desc->GetOutputDesc(0).GetDataType(); + GELOGI("CastTranslatePass, cast in %s out %s, translate in %s out %s.", + TypeUtils::DataTypeToSerialString(cast_in_datatype).c_str(), + TypeUtils::DataTypeToSerialString(cast_out_datatype).c_str(), + TypeUtils::DataTypeToSerialString(trans_in_datatype).c_str(), + TypeUtils::DataTypeToSerialString(trans_out_datatype).c_str()); + + if (is_src_cast) { + // A-->Cast-->Translate + // change Translate input datatype to be the input of Cast + // then delete Cast + // [MutableInputDesc guarantees non empty throughout the process] + trans_op_desc->MutableInputDesc(0)->SetDataType(cast_in_datatype); + } else { + // Translate-->Cast-->A + // change Translate output datatype to be the output of Cast + // then delete Cast + // [MutableInputDesc guarantees non empty throughout the process] + trans_op_desc->MutableOutputDesc(0)->SetDataType(cast_out_datatype); + } + + if (!TranslateCheckAccuracySupported(trans_op_desc)) { + if (is_src_cast) { + trans_op_desc->MutableInputDesc(0)->SetDataType(trans_in_datatype); + } else { + trans_op_desc->MutableOutputDesc(0)->SetDataType(trans_out_datatype); + } + GELOGW("CheckAccuracySupported fail, don't delete Cast[%s].", cast_node->GetName().c_str()); + return false; + } + + if (is_src_cast) { + GE_IF_BOOL_EXEC( + !AttrUtils::SetInt(trans_op_desc, ATTR_NAME_INPUT_DATATYPE, static_cast(cast_in_datatype)), + GELOGW("set ATTR_NAME_INPUT_DATATYPE failed"); return false); + } else { + GE_IF_BOOL_EXEC( + !AttrUtils::SetInt(trans_op_desc, ATTR_NAME_OUTPUT_DATATYPE, static_cast(cast_out_datatype)), + GELOGW("set ATTR_NAME_INPUT_DATATYPE failed"); return false); + } + GELOGI("CastTranslatePass, translate in %d out %d.", trans_op_desc->GetInputDesc(0).GetDataType(), + trans_op_desc->GetOutputDesc(0).GetDataType()); + return true; +} + +bool CastTranslatePass::CheckOpSupportOptimize(NodePtr &node, bool &is_src_cast) { + GE_IF_BOOL_EXEC(node == nullptr, GELOGE(FAILED, "node is null."); return false); + auto out_node = node->GetOutDataNodes().at(0); + // N dst nodes have the same datatype and format, check the first node + if (is_src_cast) { + return IsOpSupportedOptimize(node, out_node, is_src_cast); + } else { + return IsOpSupportedOptimize(out_node, node, is_src_cast); + } +} + +Status CastTranslatePass::Run(NodePtr &node) { + GE_CHECK_NOTNULL(node); + + bool is_src_cast = IsCastNode(node); + if (!IsNodeNeedOptimize(node) || !IsNextNodeNeedOptimize(node, is_src_cast)) { + return SUCCESS; + } + + GELOGI("CastTranslatePass, optimize %s.", node->GetName().c_str()); + if (CheckOpSupportOptimize(node, is_src_cast)) { + if (is_src_cast) { + if (FuseDstNTranslates(node) != SUCCESS) { + return FAILED; + } + return IsolateAndDeleteNode(node, {0}); + } else { + auto out_data_nodes = node->GetOutDataNodes(); + for (auto &out_data_node : out_data_nodes) { + if (out_data_node == nullptr) { + continue; + } + if (IsolateAndDeleteNode(out_data_node, {0}) != SUCCESS) { + return FAILED; + } + } + } + } + + return SUCCESS; +} + +Status CastTranslatePass::FuseDstNTranslates(NodePtr &node) { + GE_CHECK_NOTNULL(node); + auto out_data_nodes = node->GetOutDataNodes(); + size_t nums = out_data_nodes.size(); + if (nums == 1) { + return SUCCESS; + } + + auto &base_node = out_data_nodes.at(0); + GE_CHECK_NOTNULL(base_node); + for (size_t i = 1; i < nums; i++) { + auto &out_data_node = out_data_nodes.at(i); + GE_CHECK_NOTNULL(out_data_node); + AddRePassNodesWithInOut(out_data_node); + // Has checked nodes only has one in data anchor one out data anchor + GE_CHK_STATUS_RET(NodeUtils::MoveOutputEdges(out_data_node, base_node), "move out put edge failed"); + + // Relink in control anchor, delete in data anchor + auto in_ctr_anchor = out_data_node->GetInControlAnchor(); + GE_CHECK_NOTNULL(in_ctr_anchor); + for (const auto &peer_anchor : in_ctr_anchor->GetPeerOutControlAnchors()) { + GE_CHECK_NOTNULL(base_node->GetInControlAnchor()); + GE_CHK_STATUS_RET(base_node->GetInControlAnchor()->LinkFrom(peer_anchor), "link from peer anchor failed"); + } + in_ctr_anchor->UnlinkAll(); + out_data_node->GetAllInDataAnchors().at(0)->UnlinkAll(); + + ComputeGraphPtr graph = out_data_node->GetOwnerComputeGraph(); + GE_CHECK_NOTNULL(graph); + if (GraphUtils::RemoveNodeWithoutRelink(graph, out_data_node) != SUCCESS) { + GELOGE(FAILED, "[%s] RemoveNodeWithoutRelink failed.", out_data_node->GetName().c_str()); + return FAILED; + } + AddNodeDeleted(out_data_node.get()); + } + + return SUCCESS; +} + +bool CastTranslatePass::TranslateCheckAccuracySupported(const OpDescPtr &op_desc) { + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if ((instance_ptr == nullptr) || (!instance_ptr->InitFlag())) { + GELOGW("GE is not initialized or is finalized."); + return false; + } + + OpsKernelManager &ops_kernel_manager = instance_ptr->OpsKernelManagerObj(); + GE_IF_BOOL_EXEC(op_desc == nullptr, GELOGE(FAILED, "Opdesc is nullptr"); return false); + vector op_infos = ops_kernel_manager.GetOpsKernelInfo(op_desc->GetType()); + if (op_infos.empty()) { + GELOGI("Can not get op info by op type %s", op_desc->GetType().c_str()); + return false; + } + + std::string unsupported_reason; + for (auto &it : op_infos) { + auto kernel_map = ops_kernel_manager.GetAllOpsKernelInfoStores(); + auto &kernel_name = it.opKernelLib; + auto kernel_info_store = kernel_map.find(kernel_name); + if (kernel_info_store != kernel_map.end()) { + if (kernel_info_store->second != nullptr && + kernel_info_store->second->CheckAccuracySupported(op_desc, unsupported_reason)) { + return true; + } + } + } + GELOGI("CastTranslatePass CheckAccuracySupported[%s] fail.", op_desc->GetName().c_str()); + return false; +} +} // namespace ge diff --git a/src/ge/graph/passes/cast_translate_pass.h b/src/ge/graph/passes/cast_translate_pass.h new file mode 100644 index 00000000..a802fe9e --- /dev/null +++ b/src/ge/graph/passes/cast_translate_pass.h @@ -0,0 +1,41 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_CAST_TRANSLATE_PASS_H_ +#define GE_GRAPH_PASSES_CAST_TRANSLATE_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class CastTranslatePass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; + + private: + bool CheckInAndOutDataAnchor(NodePtr &node) const; + bool IsCastNode(NodePtr &node) const; + bool IsTranslateNode(NodePtr &node) const; + bool IsSameCastOrTranslate(NodePtr &node, NodePtr &base_node) const; + bool IsNodeNeedOptimize(NodePtr &node) const; + bool CheckDstNode(NodePtr &out_node, bool &is_src_cast) const; + bool IsNextNodeNeedOptimize(NodePtr &node, bool &is_src_cast) const; + bool IsOpSupportedOptimize(NodePtr &cast_node, NodePtr &trans_node, bool &is_src_cast); + bool CheckOpSupportOptimize(NodePtr &node, bool &is_src_cast); + Status FuseDstNTranslates(NodePtr &node); + bool TranslateCheckAccuracySupported(const OpDescPtr &op_desc); +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_CAST_TRANSLATE_PASS_H_ diff --git a/src/ge/graph/passes/compile_nodes_pass.cc b/src/ge/graph/passes/compile_nodes_pass.cc new file mode 100644 index 00000000..dfddc6ce --- /dev/null +++ b/src/ge/graph/passes/compile_nodes_pass.cc @@ -0,0 +1,129 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/compile_nodes_pass.h" + +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/ge/ge_util.h" +#include "graph/op_desc.h" +#include "graph/debug/ge_attr_define.h" + +namespace { +const char *const kAICPUEngineName = "DNN_VM_AICPU"; +const char *const kAICPUKernelLibName = "aicpu_kernel"; +} // namespace + +namespace ge { +graphStatus CompileNodesPass::CompileOp(NodePtr node, + const std::shared_ptr &instance, + const string &kernel_lib_name) { + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(instance); + OpsKernelInfoStorePtr kernel_info = instance->OpsKernelManagerObj().GetOpsKernelInfoStore(kernel_lib_name); + if (kernel_info == nullptr) { + GELOGE(ge::GE_GRAPH_PARAM_NULLPTR, "Get op %s ops kernel info store failed", node->GetName().c_str()); + return ge::GE_GRAPH_PARAM_NULLPTR; + } + + // check if support + auto op_desc = node->GetOpDesc(); + auto ge_desc = MakeShared(op_desc); + if (ge_desc == nullptr) { + GELOGE(GE_GRAPH_MEMORY_ALLOC_FAILED, "Fail to malloc op desc."); + return FAILED; + } + string reason; + if (!(kernel_info->CheckAccuracySupported(*ge_desc, reason, true))) { + GELOGW("Check Accuracy Supported failed, go to aicpu engine, node name is %s, reason: %s", node->GetName().c_str(), + reason.c_str()); + op_desc->SetOpEngineName(kAICPUEngineName); + op_desc->SetOpKernelLibName(kAICPUKernelLibName); + } else { + // TBE compile op + vector node_vec = {node}; + auto ret = kernel_info->CompileOp(node_vec); + if (ret != ge::SUCCESS) { + GELOGE(ret, "Compile single op failed, node name is %s", node->GetName().c_str()); + return GRAPH_FAILED; + } + } + + return GRAPH_SUCCESS; +} + +graphStatus CompileNodesPass::CompileNode(const NodePtr &node, const std::shared_ptr &instance) { + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(instance); + auto op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(ge::GE_GRAPH_PARAM_NULLPTR, "Get op %s opdesc failed", node->GetName().c_str()); + return ge::GE_GRAPH_PARAM_NULLPTR; + } + string kernel_lib_name = op_desc->GetOpKernelLibName(); + if (kernel_lib_name.empty()) { + // reset op kernel lib + (void)instance->DNNEngineManagerObj().GetDNNEngineName(op_desc); + kernel_lib_name = op_desc->GetOpKernelLibName(); + if (kernel_lib_name.empty()) { + GELOGE(GRAPH_FAILED, "Get node:%s, type:%s kernel lib failed.", node->GetName().c_str(), + op_desc->GetType().c_str()); + return GRAPH_FAILED; + } + } + + return CompileOp(node, instance, kernel_lib_name); +} + +graphStatus CompileNodesPass::Run(ComputeGraphPtr graph) { + GELOGI("[CompileNodesPass]: optimize begin."); + if (graph == nullptr) { + return GRAPH_SUCCESS; + } + + std::shared_ptr instance = ge::GELib::GetInstance(); + if (instance == nullptr || !instance->InitFlag()) { + GELOGE(ge::GE_CLI_GE_NOT_INITIALIZED, "Run CompileNodesPass failed."); + return ge::GE_CLI_GE_NOT_INITIALIZED; + } + + for (auto &node : graph->GetAllNodes()) { + if (node == nullptr) { + continue; + } + auto op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + continue; + } + auto node_need_compile = false; + (void) ge::AttrUtils::GetBool(op_desc, ATTR_NEED_COMPILE, node_need_compile); + if (!node_need_compile) { + continue; + } + + auto ret = CompileNode(node, instance); + if (ret != GRAPH_SUCCESS) { + return ret; + } + } + + GELOGI("[CompileNodesPass]: Optimize success."); + return GRAPH_SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/compile_nodes_pass.h b/src/ge/graph/passes/compile_nodes_pass.h new file mode 100644 index 00000000..cd5622ed --- /dev/null +++ b/src/ge/graph/passes/compile_nodes_pass.h @@ -0,0 +1,41 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_COMPILE_NODES_PASS_H_ +#define GE_GRAPH_PASSES_COMPILE_NODES_PASS_H_ + +#include +#include +#include "init/gelib.h" +#include "inc/graph_pass.h" + +namespace ge { +/// +/// compile nodes +/// +class CompileNodesPass : public GraphPass { + public: + CompileNodesPass() {} + virtual ~CompileNodesPass() {} + + graphStatus Run(ComputeGraphPtr graph) override; + private: + graphStatus CompileNode(const NodePtr &node, const std::shared_ptr &instance); + graphStatus CompileOp(NodePtr node, const std::shared_ptr &instance, const string &kernel_lib_name); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_COMPILE_NODES_PASS_H_ diff --git a/src/ge/graph/passes/constant_folding_pass.cc b/src/ge/graph/passes/constant_folding_pass.cc new file mode 100644 index 00000000..dc4f4b90 --- /dev/null +++ b/src/ge/graph/passes/constant_folding_pass.cc @@ -0,0 +1,76 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/constant_folding_pass.h" + +#include + +#include "common/debug/log.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel.h" + +namespace ge { +Status ConstantFoldingPass::Run(ge::NodePtr &node) { + GE_CHECK_NOTNULL(node); + GELOGD("Begin to run constant folding on node %s", node->GetName().c_str()); + + OpDescPtr node_desc = node->GetOpDesc(); + if (node_desc == nullptr) { + return SUCCESS; + } + DataType data_type = node_desc->GetOutputDesc(0).GetDataType(); + Format format = node_desc->GetOutputDesc(0).GetFormat(); + GELOGD("current [node:%s, type:%s] info: format: %s, datatype:%s", node->GetName().c_str(), node->GetType().c_str(), + TypeUtils::FormatToSerialString(format).c_str(), TypeUtils::DataTypeToSerialString(data_type).c_str()); + auto input_nodes = OpDescUtils::GetConstInputNode(*node); + if (input_nodes.empty() || input_nodes.size() != node_desc->GetInputsSize()) { + GELOGI("Const input nodes size is %zu, and nodeDesc inputsSize is %zu.", + input_nodes.size(), node_desc->GetInputsSize()); + return SUCCESS; + } + + auto op_kernel = folding_pass::GetKernelByType(node); + if (op_kernel == nullptr) { + GELOGD("No op kernel for node %s type %s, skip the constant folding", node->GetName().c_str(), + node->GetType().c_str()); + return SUCCESS; + } + auto inputs = OpDescUtils::GetInputData(input_nodes); + vector outputs; + auto ret = op_kernel->Compute(node_desc, inputs, outputs); + if (ret != SUCCESS) { + if (ret == NOT_CHANGED) { + return SUCCESS; + } + GELOGE(INTERNAL_ERROR, "Calculate for node %s failed in constant folding", node->GetName().c_str()); + return ret; + } + if (outputs.empty()) { + GELOGE(INTERNAL_ERROR, + "Failed to constant folding on node %s," + " no output weight", + node->GetName().c_str()); + return INTERNAL_ERROR; + } + + return Folding(node, outputs); +} +} // namespace ge diff --git a/src/ge/graph/passes/constant_folding_pass.h b/src/ge/graph/passes/constant_folding_pass.h new file mode 100644 index 00000000..1dcbcdc3 --- /dev/null +++ b/src/ge/graph/passes/constant_folding_pass.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_CONSTANT_FOLDING_PASS_H_ +#define GE_GRAPH_PASSES_CONSTANT_FOLDING_PASS_H_ + +#include +#include + +#include "graph/passes/folding_pass.h" + +namespace ge { +class ConstantFoldingPass : public FoldingPass { + public: + Status Run(ge::NodePtr &node) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_CONSTANT_FOLDING_PASS_H_ diff --git a/src/ge/graph/passes/constant_fuse_same_pass.cc b/src/ge/graph/passes/constant_fuse_same_pass.cc new file mode 100644 index 00000000..7bf46947 --- /dev/null +++ b/src/ge/graph/passes/constant_fuse_same_pass.cc @@ -0,0 +1,195 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/constant_fuse_same_pass.h" + +#include +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace { +const size_t kCorrectNum = 1; +const char *const kOriginElementNumAttrName = "origin_element_num"; + +bool CheckConstInAndOut(const NodePtr &node) { + // has none in control + // has one out data anchor + if ((node->GetInControlNodes().empty()) && (node->GetAllOutDataAnchorsSize() == kCorrectNum)) { + return true; + } + return false; +} + +void GetOutDataNodeToIndexMap(NodePtr &node, std::map &out_node_to_indexs) { + auto out_data_anchor = node->GetOutDataAnchor(0); + GE_CHECK_NOTNULL_JUST_RETURN(out_data_anchor); + auto peer_in_anchors = out_data_anchor->GetPeerInDataAnchors(); + if (!peer_in_anchors.empty()) { + for (auto &anchor : peer_in_anchors) { + int index = anchor->GetIdx(); + NodePtr out_node = anchor->GetOwnerNode(); + if (out_node == nullptr) { + continue; + } + string key_name = out_node->GetName() + "-" + std::to_string(index); + out_node_to_indexs[key_name] = anchor; + } + } +} +} // namespace + +Status ConstantFuseSamePass::Run(ge::ComputeGraphPtr graph) { + if (graph == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "Compute graph is null."); + return GE_GRAPH_PARAM_NULLPTR; + } + GELOGI("ConstantFuseSamePass in."); + + std::map> fuse_nodes; + GetFuseConstNodes(graph, fuse_nodes); + + return FuseConstNodes(graph, fuse_nodes); +} + +void ConstantFuseSamePass::GetFuseConstNodes(ComputeGraphPtr &graph, + std::map> &fuse_nodes) { + int total_const_nums = 0; + int insert_const_nums = 0; + for (auto &node : graph->GetDirectNode()) { + if (node->GetType() != CONSTANT && node->GetType() != CONSTANTOP) { + continue; + } + OpDescPtr op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + continue; + } + ++total_const_nums; + + if (!CheckConstInAndOut(node)) { + GELOGD("The const %s does not support to fusion, skip it", node->GetName().c_str()); + continue; + } + + GeTensorPtr weight; + if (!AttrUtils::MutableTensor(op_desc, ATTR_NAME_WEIGHTS, weight)) { + GELOGW("The const node %s does not have weight attr, skip it", node->GetName().c_str()); + continue; + } + int64_t origin_element_num = -1; + if (!AttrUtils::GetInt(weight->MutableTensorDesc(), kOriginElementNumAttrName, origin_element_num)) { + GELOGI("The const %s does not have origin element num attribute, skip it", node->GetName().c_str()); + continue; + } + if (origin_element_num != 1) { + GELOGI("The const %s origin element num %ld, does not support to fusion now", node->GetName().c_str(), + origin_element_num); + continue; + } + + auto output_tensor = op_desc->MutableOutputDesc(0); + if (output_tensor == nullptr) { + GELOGW("The const %s does not have output 0, skip to fusion", node->GetName().c_str()); + continue; + } + auto data_type = output_tensor->GetDataType(); + auto type_size = GetSizeByDataType(data_type); + if (type_size < 0) { + GELOGI("The data type of const %s does not support fusion, data type %s", node->GetName().c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str()); + continue; + } + ++insert_const_nums; + + SameConstKey map_key; + map_key.data_size = type_size; + map_key.data = weight->GetData().GetData(); + map_key.data_type = data_type; + map_key.format = output_tensor->GetFormat(); + map_key.shape = output_tensor->GetShape().GetDims(); + fuse_nodes[map_key].emplace_back(node); + GELOGD("ConstantFuseSamePass, format %s, datatype %s, data_size %d, shape_size %zu. node name %s", + TypeUtils::FormatToSerialString(map_key.format).c_str(), + TypeUtils::DataTypeToSerialString(map_key.data_type).c_str(), map_key.data_size, map_key.shape.size(), + node->GetName().c_str()); + } + GELOGI("ConstantFuseSamePass, total_const_nums %d, insert_const_nums %d, fuse_nodes size is %zu.", total_const_nums, + insert_const_nums, fuse_nodes.size()); +} + +Status ConstantFuseSamePass::MoveOutDataEdges(NodePtr &src_node, NodePtr &dst_node) { + // key is node_name-in_index + std::map src_out_node_to_indexs; + GetOutDataNodeToIndexMap(src_node, src_out_node_to_indexs); + if (src_out_node_to_indexs.empty()) { + return SUCCESS; + } + + std::map dst_out_node_to_indexs; + GetOutDataNodeToIndexMap(dst_node, dst_out_node_to_indexs); + + auto dst_out_data_anchor = dst_node->GetOutDataAnchor(0); + GE_CHECK_NOTNULL(dst_out_data_anchor); + auto src_out_data_anchor = src_node->GetOutDataAnchor(0); + GE_CHECK_NOTNULL(src_out_data_anchor); + src_out_data_anchor->UnlinkAll(); + for (auto it = src_out_node_to_indexs.begin(); it != src_out_node_to_indexs.end(); ++it) { + if (dst_out_node_to_indexs.count(it->first) > 0) { + continue; // exclusion of duplication + } + auto ret = dst_out_data_anchor->LinkTo(it->second); + if (ret != SUCCESS) { + GELOGE(FAILED, "Failed to move out data edge from %s to %s", src_node->GetName().c_str(), + dst_node->GetName().c_str()); + return FAILED; + } + } + return SUCCESS; +} + +Status ConstantFuseSamePass::FuseConstNodes(ComputeGraphPtr &graph, + std::map> &fuse_nodes) { + for (auto iter = fuse_nodes.begin(); iter != fuse_nodes.end(); ++iter) { + auto nodes = iter->second; + size_t len = nodes.size(); + auto first_node = nodes.at(0); + for (size_t i = 1; i < len; ++i) { + auto node = nodes.at(i); + // the const node which can be fused has none input(both data and control in) + if (GraphUtils::MoveOutCtrlEdges(node, first_node) != SUCCESS) { + return FAILED; + } + if (MoveOutDataEdges(node, first_node) != SUCCESS) { + return FAILED; + } + if (GraphUtils::RemoveNodeWithoutRelink(graph, node) != SUCCESS) { + GELOGE(FAILED, "[%s] RemoveNodeWithoutRelink failed.", node->GetName().c_str()); + return FAILED; + } + } + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/constant_fuse_same_pass.h b/src/ge/graph/passes/constant_fuse_same_pass.h new file mode 100644 index 00000000..fffb784c --- /dev/null +++ b/src/ge/graph/passes/constant_fuse_same_pass.h @@ -0,0 +1,74 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_CONSTANT_FUSE_SAME_PASS_H_ +#define GE_GRAPH_PASSES_CONSTANT_FUSE_SAME_PASS_H_ + +#include +#include +#include +#include + +#include "graph/types.h" +#include "inc/graph_pass.h" + +namespace ge { +struct SameConstKey { + int data_size; + const uint8_t *data; + DataType data_type; + Format format; + std::vector shape; + + public: + bool operator<(const SameConstKey &key) const { + if (data_size != key.data_size) { + return data_size < key.data_size; + } + int ret = memcmp(data, key.data, data_size); + if (ret != 0) { + return ret < 0; + } + if (data_type != key.data_type) { + return data_type < key.data_type; + } + if (format != key.format) { + return format < key.format; + } + size_t shape_size = shape.size(); + if (shape_size != key.shape.size()) { + return shape_size < key.shape.size(); + } + for (size_t i = 0; i < shape_size; ++i) { + if (shape.at(i) != key.shape.at(i)) { + return shape.at(i) < key.shape.at(i); + } + } + return false; + } +}; + +class ConstantFuseSamePass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph) override; + + private: + void GetFuseConstNodes(ComputeGraphPtr &graph, std::map> &fuse_nodes); + Status MoveOutDataEdges(NodePtr &src_node, NodePtr &dst_node); + Status FuseConstNodes(ComputeGraphPtr &graph, std::map> &fuse_nodes); +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_CONSTANT_FUSE_SAME_PASS_H_ diff --git a/src/ge/graph/passes/control_op_attr_pass.cc b/src/ge/graph/passes/control_op_attr_pass.cc new file mode 100644 index 00000000..a10d21e6 --- /dev/null +++ b/src/ge/graph/passes/control_op_attr_pass.cc @@ -0,0 +1,251 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/control_op_attr_pass.h" + +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/types.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/graph_utils.h" +#include "init/gelib.h" + + +namespace { +const uint32_t kMaxNodeNum = 350; +} // namespace + +namespace ge { +/// +/// @brief Pass for Switch & Active Op attr +/// @param [in] graph +/// @return Status +/// +Status ControlOpAttrPass::Run(ComputeGraphPtr graph) { + GELOGD("ControlOpAttrPass Enter"); + + if (AcquireEngineInfo() != SUCCESS) { + GELOGE(FAILED, "AcquireEngineInfo fail."); + return FAILED; + } + + if (HandleStreamLabel(graph) != SUCCESS) { + GELOGE(FAILED, "HandleStreamLabel fail."); + return FAILED; + } + + if (HandleSwitchNodes(graph) != SUCCESS) { + GELOGE(FAILED, "HandleSwitchNodes fail."); + return FAILED; + } + + GELOGD("ControlOpAttrPass Leave"); + return SUCCESS; +} + +/// +/// @brief acquire engine info +/// @return Status +/// +Status ControlOpAttrPass::AcquireEngineInfo() { + auto gelib = GELib::GetInstance(); + if (gelib == nullptr) { + GELOGE(INTERNAL_ERROR, "Get GELib instance failed."); + return INTERNAL_ERROR; + } + + const map &scheduler_confs = gelib->DNNEngineManagerObj().GetSchedulers(); + for (const auto &item : scheduler_confs) { + const SchedulerConf &scheduler = item.second; + for (const auto &engine_pair : scheduler.cal_engines) { + EngineConfPtr engine_conf = engine_pair.second; + if (engine_conf != nullptr) { + engine_confs_[engine_pair.first] = engine_conf; + } + } + } + + return SUCCESS; +} + +/// +/// @brief Handle stream label +/// @param [in] graph +/// @return Status +/// +Status ControlOpAttrPass::HandleStreamLabel(const ComputeGraphPtr &graph) { + std::string stream_label; + for (auto &node : graph->GetDirectNode()) { + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + + const std::string type = op_desc->GetType(); + if ((type == STREAMSWITCH) || (type == STREAMSWITCHN)) { + switch_nodes_.emplace_back(node); + } + + if (!AttrUtils::GetStr(op_desc, ATTR_NAME_STREAM_LABEL, stream_label)) { + continue; + } + + auto num_iter = stream_label_num_.find(stream_label); + if (num_iter == stream_label_num_.end()) { + stream_label_num_[stream_label] = 1; + } else { + num_iter->second++; + } + + bool independent = false; + const std::string engine_name = op_desc->GetOpEngineName(); + if (!engine_name.empty()) { + auto engine_conf_iter = engine_confs_.find(engine_name); + bool exist_flag = (engine_conf_iter == engine_confs_.end()) || (engine_conf_iter->second == nullptr); + if (exist_flag) { + GELOGE(INTERNAL_ERROR, "Engine conf of node %s not found (engine name: %s).", op_desc->GetName().c_str(), + engine_name.c_str()); + return INTERNAL_ERROR; + } + independent = engine_conf_iter->second->independent; + } + + auto flag_iter = label_flag_.find(stream_label); + if (flag_iter == label_flag_.end()) { + label_flag_[stream_label] = independent ? std::make_pair(false, true) : std::make_pair(true, false); + } else if (flag_iter->second.first && flag_iter->second.second) { + continue; + } else { + bool &flag = (independent ? flag_iter->second.second : flag_iter->second.first); + flag = true; + } + } + + return SUCCESS; +} + +/// +/// @brief Handle Switch Op +/// @param [in] graph +/// @return Status +/// +Status ControlOpAttrPass::HandleSwitchNodes(ComputeGraphPtr &graph) { + for (auto &switch_node : switch_nodes_) { + GE_CHECK_NOTNULL(switch_node); + std::vector ori_active_label_list; + OpDescPtr switch_desc = switch_node->GetOpDesc(); + GE_CHECK_NOTNULL(switch_desc); + if (!AttrUtils::GetListStr(switch_desc, ATTR_NAME_ACTIVE_LABEL_LIST, ori_active_label_list) || + ori_active_label_list.empty()) { + GELOGE(INTERNAL_ERROR, "active label of switch %s is null", switch_node->GetName().c_str()); + return INTERNAL_ERROR; + } + + std::vector active_label_list; + std::vector active_nodes; + size_t label_num = ori_active_label_list.size(); + for (size_t i = 0; i < label_num; i++) { + const std::string active_label = ori_active_label_list[i]; + if (!CheckNeedActiveNode(active_label)) { + active_label_list.emplace_back(active_label); + continue; + } + + std::string name = switch_node->GetName() + "_" + STREAMACTIVE; + if (label_num > 0) { + name = name + "_" + std::to_string(i); + } + GELOGI("Create StreamActive op:%s.", name.c_str()); + OpDescPtr active_op_desc = MakeShared(name, STREAMACTIVE); + if (active_op_desc == nullptr) { + GELOGE(FAILED, "Create node %s fail.", name.c_str()); + return FAILED; + } + NodePtr active_node = graph->AddNode(active_op_desc); + if (active_node == nullptr) { + GELOGE(FAILED, "Create StreamActive node fail."); + return FAILED; + } + + for (NodePtr &node : switch_node->GetOutControlNodes()) { + std::string stream_label; + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + (void)AttrUtils::GetStr(op_desc, ATTR_NAME_STREAM_LABEL, stream_label); + if (stream_label != active_label) { + continue; + } + GE_CHK_STATUS_RET(GraphUtils::RemoveEdge(switch_node->GetOutControlAnchor(), node->GetInControlAnchor()), + "remove edge failed"); + GE_CHK_STATUS_RET(GraphUtils::AddEdge(active_node->GetOutControlAnchor(), node->GetInControlAnchor()), + "add edge failed"); + } + + GE_CHK_STATUS_RET(SetSwitchBranchNodeLabel(active_node, name), "set switch branch node label failed"); + GE_CHK_STATUS_RET(SetStreamLabel(active_node, name), "set stream label failed"); + GE_CHK_STATUS_RET(SetActiveLabelList(active_node, {active_label}), "set active label list failed"); + + active_nodes.emplace_back(active_node); + active_label_list.emplace_back(name); + } + + GE_CHK_STATUS_RET(SetActiveLabelList(switch_node, {active_label_list}), "set active label list failed"); + + if (active_nodes.empty()) { + continue; + } + + if (!switch_node->GetOutAllNodes().empty()) { + GELOGE(FAILED, "Exist out_node holds stream_label beyond the range of active_label_list, switch_node:%s.", + switch_desc->GetName().c_str()); + return FAILED; + } + for (auto &active_node : active_nodes) { + GE_CHK_STATUS_RET(GraphUtils::AddEdge(switch_node->GetOutControlAnchor(), active_node->GetInControlAnchor()), + "add edge failed"); + } + } + + return SUCCESS; +} + +/// +/// @brief Check if insert active node +/// @param [in] stream_label +/// @return bool +/// +bool ControlOpAttrPass::CheckNeedActiveNode(const std::string &stream_label) { + if (stream_label_num_[stream_label] > kMaxNodeNum) { + return true; + } + + auto iter = label_flag_.find(stream_label); + if (iter == label_flag_.end()) { + GELOGE(INTERNAL_ERROR, "not find label %s", stream_label.c_str()); + return false; + } + if (iter->second.first && iter->second.second) { + return true; + } + + return false; +} +} // namespace ge diff --git a/src/ge/graph/passes/control_op_attr_pass.h b/src/ge/graph/passes/control_op_attr_pass.h new file mode 100644 index 00000000..d53e2191 --- /dev/null +++ b/src/ge/graph/passes/control_op_attr_pass.h @@ -0,0 +1,47 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_CONTROL_OP_ATTR_PASS_H_ +#define GE_GRAPH_PASSES_CONTROL_OP_ATTR_PASS_H_ + +#include +#include +#include +#include +#include + +#include "engine_manager/dnnengine_manager.h" +#include "inc/graph_pass.h" + +namespace ge { +class ControlOpAttrPass : public GraphPass { + public: + Status Run(ComputeGraphPtr graph); + + private: + Status AcquireEngineInfo(); + Status HandleStreamLabel(const ComputeGraphPtr &graph); + Status HandleSwitchNodes(ComputeGraphPtr &graph); + bool CheckNeedActiveNode(const std::string &stream_label); + + std::unordered_map stream_label_num_; + // map> + std::unordered_map> label_flag_; + std::vector switch_nodes_; + std::map engine_confs_; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_CONTROL_OP_ATTR_PASS_H_ diff --git a/src/ge/graph/passes/control_trigger_pass.cc b/src/ge/graph/passes/control_trigger_pass.cc new file mode 100644 index 00000000..ee2198af --- /dev/null +++ b/src/ge/graph/passes/control_trigger_pass.cc @@ -0,0 +1,453 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/control_trigger_pass.h" + +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/types.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/type_utils.h" + +namespace ge { +Status ControlTriggerPass::Run(ComputeGraphPtr graph) { + GELOGD("ControlTriggerPass Enter"); + + GraphUtils::DumpGEGraph(graph, "BeforeControlTriggerPass"); + GraphUtils::DumpGEGraphToOnnx(*graph, "BeforeControlTriggerPass"); + + for (NodePtr &node : graph->GetDirectNode()) { + if (node->GetType() != CONTROLTRIGGER) { + continue; + } + auto in_ctrl_nodes = node->GetInControlNodes(); + for (NodePtr &in_ctrl_node : in_ctrl_nodes) { + if (HandleDynamicCtrlEdges(graph, node, in_ctrl_node) != SUCCESS) { + GELOGE(FAILED, "HandleDynamicCtrlEdges for %s->%s fail.", in_ctrl_node->GetName().c_str(), + node->GetName().c_str()); + return FAILED; + } + } + } + + GraphUtils::DumpGEGraph(graph, "AfterControlTriggerPass"); + GraphUtils::DumpGEGraphToOnnx(*graph, "AfterControlTriggerPass"); + + GELOGD("ControlTriggerPass Leave"); + return SUCCESS; +} + +/// +/// @brief Handle input ctrl edges for ControlTrigger node +/// @param [in] graph +/// @param [in] node +/// @param [in] in_ctrl_node +/// @return Status +/// +Status ControlTriggerPass::HandleDynamicCtrlEdges(ComputeGraphPtr &graph, NodePtr &node, NodePtr &in_ctrl_node) { + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(in_ctrl_node); + GELOGI("HandleDynamicCtrlEdges: node=%s, in_ctrl_node=%s", node->GetName().c_str(), in_ctrl_node->GetName().c_str()); + NodePtr switch_node = nullptr; + bool branch_flag = false; + if (FindSwitchNode(in_ctrl_node, switch_node, branch_flag) != SUCCESS) { + GELOGE(FAILED, "FindSwitchNode fail."); + return FAILED; + } + + if (switch_node == nullptr) { + GELOGI("Not find valid switch node."); + return SUCCESS; + } + auto iter1 = control_trigger_map_.find(node); + if (iter1 != control_trigger_map_.end()) { + auto iter2 = iter1->second.find(switch_cond_map_[switch_node]); + if (iter2 != iter1->second.end()) { + NodePtr constant = (branch_flag ? iter2->second.second : iter2->second.first); + if ((GraphUtils::RemoveEdge(in_ctrl_node->GetOutControlAnchor(), node->GetInControlAnchor()) != GRAPH_SUCCESS) || + (GraphUtils::AddEdge(in_ctrl_node->GetOutControlAnchor(), constant->GetInControlAnchor()) != GRAPH_SUCCESS)) { + GELOGE(FAILED, "Replace ctrl edge fail, %s->%s, %s->%s.", in_ctrl_node->GetName().c_str(), + node->GetName().c_str(), in_ctrl_node->GetName().c_str(), constant->GetName().c_str()); + return FAILED; + } + + GELOGI("No need to insert new branch."); + return SUCCESS; + } + } + + if (InsertOppositeBranch(graph, node, in_ctrl_node, switch_node, branch_flag) != SUCCESS) { + GELOGE(FAILED, "InsertOppositeBranch fail."); + return FAILED; + } + + return SUCCESS; +} + +/// +/// @brief Find switch_node for ControlTrigger node +/// @param [in] node +/// @param [out] switch_node +/// @param [out] branch_flag +/// @return Status +/// +Status ControlTriggerPass::FindSwitchNode(const NodePtr &node, NodePtr &switch_node, bool &branch_flag) { + std::set> handle_nodes; + // {node, >} + std::stack>>> nodes; + nodes.push(std::make_pair(node, std::make_pair(UINT32_MAX, std::make_pair(0, 0)))); + std::set> in_nodes; + + while (!nodes.empty()) { + auto iter = nodes.top(); + NodePtr tmp_node = iter.first; + GE_CHECK_NOTNULL(tmp_node); + nodes.pop(); + uint32_t index = iter.second.first; + auto num_pair = iter.second.second; + if (handle_nodes.count(std::make_pair(tmp_node, index)) > 0) { + continue; + } + switch (TransferNodeType(tmp_node, index)) { + case kCondSwitch: + if (num_pair.first == 0) { + switch_node = tmp_node; + branch_flag = (index == SWITCH_TRUE_OUTPUT); + GELOGI("FindSwitchNode succ, switch_node=%s, idx=%u", switch_node->GetName().c_str(), index); + return SUCCESS; + } + num_pair.first--; + break; + case kCondMerge: + num_pair.first++; + break; + case kLoopSwitchT: + GELOGI("in while_body, no need handle"); + return SUCCESS; + case kLoopSwitchF: + num_pair.second++; + break; + case kEnter: + if (num_pair.second > 0) { + num_pair.second--; + } + break; + case kNotControlOp: + break; + default: + GELOGE(FAILED, "invalid type"); + return FAILED; + } + + GetInNodes(tmp_node, in_nodes); + for (auto &node_idx : in_nodes) { + nodes.push(std::make_pair(node_idx.first, std::make_pair(node_idx.second, num_pair))); + } + + (void)handle_nodes.insert(std::make_pair(tmp_node, index)); + } + + return SUCCESS; +} + +/// +/// @brief Check if need insert opposite branch +/// @param [in] node +/// @param [in] index +/// @return ControlNodeType +/// +ControlNodeType ControlTriggerPass::TransferNodeType(const NodePtr &node, uint32_t index) { + const std::string type = node->GetType(); + if ((type == SWITCH) || (type == REFSWITCH)) { + if ((index != SWITCH_TRUE_OUTPUT) && (index != SWITCH_FALSE_OUTPUT)) { + GELOGI("TransferNodeType: neither true nor false branch."); + return kNotControlOp; + } + + if (FindPredInput(node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "FindPredInput fail, switch_node: %s.", node->GetName().c_str()); + return kInvalidType; + } + + NodePtr pred_node = switch_cond_map_[node]; + bool branch_flag = (index == SWITCH_TRUE_OUTPUT); + if (pred_node->GetType() != LOOPCOND) { + GELOGI("TransferNodeType: kCondSwitch node=%s, idx=%u", node->GetName().c_str(), index); + return kCondSwitch; + } else { + GELOGI("TransferNodeType: kLoopSwitch node=%s, idx=%u", node->GetName().c_str(), index); + return branch_flag ? kLoopSwitchT : kLoopSwitchF; + } + } else if ((type == MERGE) || (type == REFMERGE)) { + OpDescPtr merge_desc = node->GetOpDesc(); + if (merge_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "FindPredInput fail, merge_desc is null, merge_node: %s.", node->GetName().c_str()); + return kInvalidType; + } + if (!merge_desc->HasAttr(ATTR_NAME_NEXT_ITERATION)) { + return kCondMerge; + } + } else if ((type == ENTER) || (type == REFENTER)) { + return kEnter; + } + + return kNotControlOp; +} + +/// +/// @brief Get in_node & idx pairs +/// @param [in] node +/// @param [out] in_nodes +/// @return void +/// +void ControlTriggerPass::GetInNodes(const NodePtr &node, std::set> &in_nodes) { + in_nodes.clear(); + for (auto &in_ctrl_node : node->GetInControlNodes()) { + (void)in_nodes.insert(std::make_pair(in_ctrl_node, UINT32_MAX)); + } + + for (InDataAnchorPtr &in_data_anchor : node->GetAllInDataAnchors()) { + OutDataAnchorPtr peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + if (peer_out_anchor == nullptr) { + continue; + } + (void)in_nodes.insert(std::make_pair(peer_out_anchor->GetOwnerNode(), peer_out_anchor->GetIdx())); + } + return; +} + +/// +/// @brief Insert opposite branch for ControlTrigger +/// @param [in] graph +/// @param [in] ControlTrigger node +/// @param [in] in_ctrl_node +/// @param [in] switch_node +/// @param [in] branch_flag +/// @return Status +/// +Status ControlTriggerPass::InsertOppositeBranch(ComputeGraphPtr &graph, NodePtr &node, NodePtr &in_ctrl_node, + NodePtr &switch_node, bool branch_flag) { + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(in_ctrl_node); + GE_CHECK_NOTNULL(switch_node); + OpDescPtr switch_desc = switch_node->GetOpDesc(); + GE_CHECK_NOTNULL(switch_desc); + + GeTensorDesc data_desc(GeShape(), FORMAT_NCHW, DT_INT32); + + NodePtr merge_node = InsertMergeNode(graph, node, in_ctrl_node, data_desc); + if (merge_node == nullptr) { + GELOGE(FAILED, "InsertMergeNode fail."); + return FAILED; + } + + NodePtr const_f = InsertConstNode(graph, merge_node, data_desc, false); + NodePtr const_t = InsertConstNode(graph, merge_node, data_desc, true); + if ((const_f == nullptr) || (const_t == nullptr)) { + GELOGE(FAILED, "InsertConstNode fail."); + return FAILED; + } + + NodePtr orig_const = branch_flag ? const_t : const_f; + NodePtr new_const = !branch_flag ? const_t : const_f; + uint32_t new_idx = branch_flag ? SWITCH_FALSE_OUTPUT : SWITCH_TRUE_OUTPUT; + + const std::string identity_name = switch_desc->GetName() + "_" + IDENTITY; + NodePtr identity_node = InsertIdentityNode(graph, identity_name, switch_desc->GetOutputDesc(new_idx)); + if (identity_node == nullptr) { + GELOGE(FAILED, "InsertIdentityNode fail."); + return FAILED; + } + + if (GraphUtils::AddEdge(in_ctrl_node->GetOutControlAnchor(), orig_const->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add in ctrl edge fail, %s->%s.", in_ctrl_node->GetName().c_str(), orig_const->GetName().c_str()); + return FAILED; + } + if (GraphUtils::AddEdge(switch_node->GetOutDataAnchor(new_idx), identity_node->GetInDataAnchor(0)) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add in data edge fail, %s->%s.", switch_desc->GetName().c_str(), identity_node->GetName().c_str()); + return FAILED; + } + if (GraphUtils::AddEdge(identity_node->GetOutControlAnchor(), new_const->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add in ctrl edge fail, %s->%s.", identity_node->GetName().c_str(), new_const->GetName().c_str()); + return FAILED; + } + + auto pred_const = std::make_pair(switch_cond_map_[switch_node], std::make_pair(const_f, const_t)); + auto iter = control_trigger_map_.find(node); + if (iter == control_trigger_map_.end()) { + control_trigger_map_[node] = {pred_const}; + } else { + if (!iter->second.insert(pred_const).second) { + GELOGE(FAILED, "control_trigger_map_ insert failed."); + return FAILED; + } + } + + return SUCCESS; +} + +/// +/// @brief Insert Merge Node +/// @param [in] graph +/// @param [in] node +/// @param [in] in_ctrl_node +/// @param [in] data_desc +/// @return NodePtr +/// +NodePtr ControlTriggerPass::InsertMergeNode(ComputeGraphPtr &graph, NodePtr &node, NodePtr &in_ctrl_node, + const GeTensorDesc &data_desc) { + const std::string name = node->GetName() + "_" + MERGE; + OpDescPtr op_desc = MakeShared(name, MERGE); + if (op_desc == nullptr) { + GELOGE(FAILED, "Create Merge op %s: create op_desc fail.", name.c_str()); + return nullptr; + } + + if ((op_desc->AddInputDesc(data_desc) != GRAPH_SUCCESS) || (op_desc->AddInputDesc(data_desc) != GRAPH_SUCCESS) || + (op_desc->AddOutputDesc(data_desc) != GRAPH_SUCCESS) || (op_desc->AddOutputDesc(data_desc) != GRAPH_SUCCESS)) { + GELOGE(INTERNAL_ERROR, "Create Merge op %s: add input/output desc fail.", name.c_str()); + return nullptr; + } + + GELOGI("Create Merge op:%s.", name.c_str()); + NodePtr merge_node = graph->AddNode(op_desc); + if (merge_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Create Merge op %s fail.", name.c_str()); + return nullptr; + } + + if ((GraphUtils::RemoveEdge(in_ctrl_node->GetOutControlAnchor(), node->GetInControlAnchor()) != GRAPH_SUCCESS) || + (GraphUtils::AddEdge(merge_node->GetOutControlAnchor(), node->GetInControlAnchor()) != GRAPH_SUCCESS)) { + GELOGE(FAILED, "Replace ctrl edge fail, %s->%s, %s->%s", in_ctrl_node->GetName().c_str(), node->GetName().c_str(), + merge_node->GetName().c_str(), node->GetName().c_str()); + return nullptr; + } + + return merge_node; +} + +/// +/// @brief Insert Const Node +/// @param [in] graph +/// @param [in] merge_node +/// @param [in] data_desc +/// @param [in] flag +/// @return NodePtr +/// +NodePtr ControlTriggerPass::InsertConstNode(ComputeGraphPtr &graph, NodePtr &merge_node, const GeTensorDesc &data_desc, + bool flag) { + const std::string name = merge_node->GetName() + "_" + CONSTANT + (flag ? "_t" : "_f"); + OpDescPtr op_desc = MakeShared(name, CONSTANT); + if (op_desc == nullptr) { + GELOGE(FAILED, "Create Const op %s: create op_desc fail.", name.c_str()); + return nullptr; + } + + int32_t value = 0; + GeTensorPtr const_value = MakeShared(data_desc, reinterpret_cast(&value), sizeof(int32_t)); + if (const_value == nullptr) { + GELOGE(FAILED, "Create tensor fail."); + return nullptr; + } + if (!AttrUtils::SetTensor(op_desc, ATTR_NAME_WEIGHTS, const_value)) { + GELOGE(INTERNAL_ERROR, "Create Const op %s: set attr ATTR_NAME_WEIGHTS fail.", name.c_str()); + return nullptr; + } + + if (op_desc->AddOutputDesc(data_desc) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Create Const op %s: add output desc fail.", name.c_str()); + return nullptr; + } + + GELOGI("Create Const op: %s", name.c_str()); + NodePtr const_node = graph->AddNode(op_desc); + if (const_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Create Const op %s fail.", name.c_str()); + return nullptr; + } + + uint32_t out_idx = (flag ? SWITCH_TRUE_OUTPUT : SWITCH_FALSE_OUTPUT); + if (GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), merge_node->GetInDataAnchor(out_idx)) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add in data edge fail, %s->%s", const_node->GetName().c_str(), merge_node->GetName().c_str()); + return nullptr; + } + + return const_node; +} + +/// +/// @brief Insert Identity Node +/// @param [in] graph +/// @param [in] name +/// @param [in] data_desc +/// @return NodePtr +/// +NodePtr ControlTriggerPass::InsertIdentityNode(ComputeGraphPtr &graph, const std::string &name, + const GeTensorDesc &data_desc) { + OpDescPtr op_desc = MakeShared(name, IDENTITY); + if (op_desc == nullptr) { + GELOGE(FAILED, "Create Identity op %s: create op_desc fail.", name.c_str()); + return nullptr; + } + + if ((op_desc->AddInputDesc(data_desc) != GRAPH_SUCCESS) || (op_desc->AddOutputDesc(data_desc) != GRAPH_SUCCESS)) { + GELOGE(INTERNAL_ERROR, "Create Identity op %s: add input/output desc fail.", name.c_str()); + return nullptr; + } + + GELOGI("Create Identity op:%s.", name.c_str()); + NodePtr identity_node = graph->AddNode(op_desc); + if (identity_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Create Identity op %s fail.", name.c_str()); + return nullptr; + } + + return identity_node; +} + +/// +/// @brief Find pred_input of switch_node +/// @param [in] switch_node +/// @param [in] name +/// @param [in] data_desc +/// @return Status +/// +Status ControlTriggerPass::FindPredInput(const NodePtr &switch_node) { + if (switch_node == nullptr) { + GELOGE(INTERNAL_ERROR, "switch_node is null"); + return INTERNAL_ERROR; + } + + InDataAnchorPtr in_cond_anchor = switch_node->GetInDataAnchor(SWITCH_PRED_INPUT); + if (in_cond_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, "in_cond_anchor is nullptr, node: %s.", switch_node->GetName().c_str()); + return INTERNAL_ERROR; + } + OutDataAnchorPtr pred_cond_anchor = in_cond_anchor->GetPeerOutAnchor(); + if (pred_cond_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, "pred_cond_anchor is nullptr, node: %s.", switch_node->GetName().c_str()); + return INTERNAL_ERROR; + } + + switch_cond_map_[switch_node] = pred_cond_anchor->GetOwnerNode(); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/control_trigger_pass.h b/src/ge/graph/passes/control_trigger_pass.h new file mode 100644 index 00000000..2d7182dd --- /dev/null +++ b/src/ge/graph/passes/control_trigger_pass.h @@ -0,0 +1,60 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_CONTROL_TRIGGER_PASS_H_ +#define GE_GRAPH_PASSES_CONTROL_TRIGGER_PASS_H_ + +#include +#include +#include +#include + +#include "inc/graph_pass.h" + +namespace ge { +enum ControlNodeType { + kNotControlOp, + kCondSwitch, + kCondMerge, + kLoopSwitchT, + kLoopSwitchF, + kEnter, + kInvalidType +}; + +class ControlTriggerPass : public GraphPass { + public: + Status Run(ComputeGraphPtr graph); + + private: + Status HandleDynamicCtrlEdges(ComputeGraphPtr &graph, NodePtr &node, NodePtr &in_ctrl_node); + Status FindSwitchNode(const NodePtr &node, NodePtr &switch_node, bool &branch_flag); + ControlNodeType TransferNodeType(const NodePtr &node, uint32_t index); + void GetInNodes(const NodePtr &node, std::set> &in_nodes); + Status InsertOppositeBranch(ComputeGraphPtr &graph, NodePtr &node, NodePtr &in_ctrl_node, NodePtr &switch_node, + bool branch_flag); + NodePtr InsertMergeNode(ComputeGraphPtr &graph, NodePtr &node, NodePtr &in_ctrl_node, const GeTensorDesc &data_desc); + NodePtr InsertConstNode(ComputeGraphPtr &graph, NodePtr &merge_node, const GeTensorDesc &data_desc, bool flag); + NodePtr InsertIdentityNode(ComputeGraphPtr &graph, const std::string &name, const GeTensorDesc &data_desc); + Status FindPredInput(const NodePtr &switch_node); + + // + std::unordered_map switch_cond_map_; + // > + std::unordered_map>> control_trigger_map_; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_CONTROL_TRIGGER_PASS_H_ diff --git a/src/ge/graph/passes/dimension_adjust_pass.cc b/src/ge/graph/passes/dimension_adjust_pass.cc new file mode 100644 index 00000000..ab69693a --- /dev/null +++ b/src/ge/graph/passes/dimension_adjust_pass.cc @@ -0,0 +1,73 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/dimension_adjust_pass.h" + +#include +#include +#include + +namespace ge { +namespace { +const int kDataInputIndex = 0; +const int kRemoveInputIndex = 1; +} // namespace + +Status DimensionAdjustPass::Run(ge::NodePtr &node) { + if (node == nullptr) { + GELOGE(PARAM_INVALID, "node is nullptr"); + return PARAM_INVALID; + } + + OpDescPtr op_desc_ptr = node->GetOpDesc(); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "GetOpDesc return nullptr"); + return PARAM_INVALID; + } + + string type; + Status ret = GetOriginalType(node, type); + if (ret != SUCCESS) { + GELOGE(ret, "DimensionAdjustPass get originnal type fail."); + return ret; + } + KernelFactory &factory = KernelFactory::Instance(); + shared_ptr op_kernel = factory.Create(type); + if (op_kernel == nullptr) { + return SUCCESS; + } + + // call compute function + ret = op_kernel->Compute(node); + if (ret != SUCCESS) { + if (ret == NOT_CHANGED) { + return SUCCESS; + } + GELOGE(ret, "DimensionAdjustPass compute failed"); + return ret; + } + if (node->GetAllInDataAnchors().size() > static_cast(kRemoveInputIndex)) { + ret = PassUtils::UnlinkNodeWithControlCopy(node, kRemoveInputIndex); + if (ret != SUCCESS) { + GELOGE(ret, "DimensionAdjustPass unlink node with control copy fail."); + return ret; + } + } + + std::vector data_relink_io_map = {kDataInputIndex}; + return IsolateAndDeleteNode(node, data_relink_io_map); +} +} // namespace ge diff --git a/src/ge/graph/passes/dimension_adjust_pass.h b/src/ge/graph/passes/dimension_adjust_pass.h new file mode 100644 index 00000000..fa9d2320 --- /dev/null +++ b/src/ge/graph/passes/dimension_adjust_pass.h @@ -0,0 +1,40 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_DIMENSION_ADJUST_PASS_H_ +#define GE_GRAPH_PASSES_DIMENSION_ADJUST_PASS_H_ + +#include "common/debug/log.h" +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/common/omg_util.h" +#include "graph/passes/base_pass.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "inc/kernel.h" +#include "inc/kernel_factory.h" +#include "graph/passes/pass_utils.h" + +namespace ge { +class DimensionAdjustPass : public BaseNodePass { + public: + Status Run(ge::NodePtr &node) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_DIMENSION_ADJUST_PASS_H_ diff --git a/src/ge/graph/passes/dimension_compute_pass.cc b/src/ge/graph/passes/dimension_compute_pass.cc new file mode 100644 index 00000000..adf60737 --- /dev/null +++ b/src/ge/graph/passes/dimension_compute_pass.cc @@ -0,0 +1,56 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/dimension_compute_pass.h" + +#include +#include + +#include "common/debug/log.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/utils/attr_utils.h" +#include "inc/kernel.h" + +namespace ge { +Status DimensionComputePass::Run(ge::NodePtr &node) { + GE_CHECK_NOTNULL(node); + auto op_kernel = folding_pass::GetKernelByType(node); + if (op_kernel == nullptr) { + return SUCCESS; + } + std::vector outputs; + auto ret = op_kernel->Compute(node, outputs); + if (ret != SUCCESS) { + if (ret == NOT_CHANGED) { + return SUCCESS; + } else { + GELOGE(ret, "DimensionComputePass Compute failed"); + return ret; + } + } + + if (outputs.empty()) { + GELOGE(INTERNAL_ERROR, + "Failed to compute dims for node %s," + " no output weight", + node->GetName().c_str()); + return INTERNAL_ERROR; + } + + return Folding(node, outputs); +} +} // namespace ge diff --git a/src/ge/graph/passes/dimension_compute_pass.h b/src/ge/graph/passes/dimension_compute_pass.h new file mode 100644 index 00000000..40110757 --- /dev/null +++ b/src/ge/graph/passes/dimension_compute_pass.h @@ -0,0 +1,29 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_DIMENSION_COMPUTE_PASS_H_ +#define GE_GRAPH_PASSES_DIMENSION_COMPUTE_PASS_H_ + +#include "graph/passes/folding_pass.h" + +namespace ge { +class DimensionComputePass : public FoldingPass { + public: + Status Run(ge::NodePtr &node) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_DIMENSION_COMPUTE_PASS_H_ diff --git a/src/ge/graph/passes/dropout_pass.cc b/src/ge/graph/passes/dropout_pass.cc new file mode 100644 index 00000000..ab88aa23 --- /dev/null +++ b/src/ge/graph/passes/dropout_pass.cc @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/dropout_pass.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/utils/node_utils.h" + +namespace ge { +/// +/// run pass +/// @param [in] node node to be optimized +/// @return Status +/// +Status DropOutPass::Run(NodePtr &node) { + GELOGD("DropOutPass running"); + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + if (node->GetOpDesc() == nullptr) { + GELOGE(PARAM_INVALID, "param [opDesc] must not be null."); + return PARAM_INVALID; + } + std::string op_type = node->GetOpDesc()->GetType(); + if (op_type == DROPOUT) { + GELOGD("op type is dropout."); + return IsolateAndDeleteNode(node, {0}); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/dropout_pass.h b/src/ge/graph/passes/dropout_pass.h new file mode 100644 index 00000000..506ee5d6 --- /dev/null +++ b/src/ge/graph/passes/dropout_pass.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_DROPOUT_PASS_H_ +#define GE_GRAPH_PASSES_DROPOUT_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class DropOutPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_DROPOUT_PASS_H_ diff --git a/src/ge/graph/passes/end_graph_pass.cc b/src/ge/graph/passes/end_graph_pass.cc new file mode 100644 index 00000000..0a2790a8 --- /dev/null +++ b/src/ge/graph/passes/end_graph_pass.cc @@ -0,0 +1,72 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/end_graph_pass.h" + +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/passes/pass_utils.h" +#include "graph/utils/tensor_utils.h" +#include "init/gelib.h" +#include "common/ge/ge_util.h" +#include "graph/debug/ge_attr_define.h" + +namespace ge { +Status EndGraphPass::Run(ge::ComputeGraphPtr graph) { + GELOGI("EndGraphPass Run."); + if (graph == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "Compute graph is null."); + return GE_GRAPH_PARAM_NULLPTR; + } + + auto gelib = GELib::GetInstance(); + bool head_stream = (gelib == nullptr) ? false : gelib->HeadStream(); + if (!head_stream) { + GELOGI("Configured head stream: %d, No need EndGraph.", head_stream); + return SUCCESS; + } + + NodePtr net_output_node = graph->FindNode(NODE_NAME_NET_OUTPUT); + if (net_output_node == nullptr) { + GELOGI("No output node found."); + return SUCCESS; + } + + OpDescPtr op_desc = MakeShared(NODE_NAME_END_GRAPH, ENDGRAPH); + GE_CHECK_NOTNULL(op_desc); + GELOGI("Create EndGraph op:%s.", op_desc->GetName().c_str()); + (void) AttrUtils::SetListStr(op_desc, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, std::move(std::vector())); + NodePtr end_graph_node = graph->AddNode(op_desc); + if (end_graph_node == nullptr) { + GELOGI("Add EndGraph:%s node to Graph fail.", op_desc->GetName().c_str()); + return INTERNAL_ERROR; + } + + if (GraphUtils::AddEdge(net_output_node->GetOutControlAnchor(), end_graph_node->GetInControlAnchor()) != SUCCESS) { + GELOGI("Add ctrl edge to EndGraph:%s fail.", end_graph_node->GetName().c_str()); + return INTERNAL_ERROR; + } + + GELOGI("EndGraphPass Leave."); + return SUCCESS; +} +} // namespace ge + diff --git a/src/ge/graph/passes/end_graph_pass.h b/src/ge/graph/passes/end_graph_pass.h new file mode 100644 index 00000000..c6ff422a --- /dev/null +++ b/src/ge/graph/passes/end_graph_pass.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_END_GRAPH_PASS_H_ +#define GE_GRAPH_PASSES_END_GRAPH_PASS_H_ + +#include "graph/types.h" +#include "inc/graph_pass.h" + +namespace ge { +class EndGraphPass : public GraphPass { + public: + /// + /// Entry of the NetOutputPass optimizer + /// @param [in] graph: Input ComputeGraph + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status Run(ge::ComputeGraphPtr graph) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_END_GRAPH_PASS_H_ + diff --git a/src/ge/graph/passes/enter_pass.cc b/src/ge/graph/passes/enter_pass.cc new file mode 100644 index 00000000..bead855a --- /dev/null +++ b/src/ge/graph/passes/enter_pass.cc @@ -0,0 +1,72 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/enter_pass.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/utils/graph_utils.h" + +namespace ge { +Status EnterPass::Run(NodePtr &node) { + GELOGD("EnterPass running"); + if (node == nullptr) { + GELOGE(PARAM_INVALID, "param [node] must not be null."); + return PARAM_INVALID; + } + + if ((node->GetType() != ENTER) && (node->GetType() != REFENTER)) { + return SUCCESS; + } + + // enter node has only one input + if (node->GetInDataNodes().empty()) { + GELOGE(PARAM_INVALID, "enter_node %s has no input", node->GetName().c_str()); + return PARAM_INVALID; + } + NodePtr in_node = node->GetInDataNodes().at(0); + if (in_node == nullptr) { + GELOGE(PARAM_INVALID, "param [in_node] must not be null"); + return PARAM_INVALID; + } + + if ((in_node->GetType() != CONSTANT) && (in_node->GetType() != CONSTANTOP)) { + return SUCCESS; + } + + bool need_remove_flag = in_node->GetInControlNodes().empty() && + node->GetInControlNodes().empty() && + node->GetOutDataNodes().empty(); + if (need_remove_flag) { + for (auto &out_ctrl_node : node->GetOutControlNodes()) { + if (out_ctrl_node == nullptr) { + continue; + } + if (GraphUtils::RemoveEdge(node->GetOutControlAnchor(), out_ctrl_node->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Remove Enter ctrl output fail, %s->%s", + node->GetName().c_str(), out_ctrl_node->GetName().c_str()); + return FAILED; + } + } + } + + GELOGD("EnterPass success"); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/enter_pass.h b/src/ge/graph/passes/enter_pass.h new file mode 100644 index 00000000..04ac62ee --- /dev/null +++ b/src/ge/graph/passes/enter_pass.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_ENTER_PASS_H_ +#define GE_GRAPH_PASSES_ENTER_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class EnterPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_ENTER_PASS_H_ diff --git a/src/ge/graph/passes/flow_ctrl_pass.cc b/src/ge/graph/passes/flow_ctrl_pass.cc new file mode 100644 index 00000000..c027eb9f --- /dev/null +++ b/src/ge/graph/passes/flow_ctrl_pass.cc @@ -0,0 +1,451 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/flow_ctrl_pass.h" + +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/passes/pass_utils.h" + +namespace ge { +// when namespace change to ge, please delete the using code. + +Status FlowCtrlPass::Run(ComputeGraphPtr compute_graph) { + GE_CHECK_NOTNULL(compute_graph); + if (!PassUtils::IsNeedTrainIteFlowCtrl(compute_graph)) { + GELOGI("No need FlowCtrl"); + return NOT_CHANGED; + } + + GELOGI("FlowCtrl pass begin"); + bool graph_change = false; + // 1. Add FP/BP flow ctrl (big cycle) + for (auto &node : compute_graph->GetDirectNode()) { + if (node == nullptr) { + continue; + } + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr, continue); + uint32_t true_stream_id = 0; + bool is_found = AttrUtils::GetInt(node->GetOpDesc(), ATTR_NAME_TRUE_BRANCH_STREAM, true_stream_id); + // FP/BP cycle flag is true_stream_id == 0 + if (is_found && (true_stream_id == TRUE_STREAM_ID)) { + // Add big cycle + Status ret = AddFpBpIteratorCtrl(compute_graph, node); + if (ret != SUCCESS) { + GELOGE(ret, "AddFpBpIteratorCtrl fail, node: %s.", node->GetName().c_str()); + return ret; + } + graph_change = true; + // only one big cycle, so break. + break; + } + } + + // 2. Add special node flow ctrl. eg, IteratorGetNext. (small cycle) + // NOTE: Small cycle share the variables with big cycle. + for (auto &node : compute_graph->GetDirectNode()) { + if (node == nullptr) { + continue; + } + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr, continue); + bool need_cycle_flag = false; + bool is_found = AttrUtils::GetBool(node->GetOpDesc(), ATTR_NAME_STREAM_CYCLE_EVENT_FLAG, need_cycle_flag); + // small cycle flag is need_stream_cycle_event == true + if (is_found && need_cycle_flag) { + Status ret = AddSpecialNodeIteratorCtrl(compute_graph, node); + if (ret != SUCCESS) { + GELOGE(ret, "AddSpecialNodeIteratorCtrl fail, node: %s.", node->GetName().c_str()); + return ret; + } + graph_change = true; + } + } + GELOGI("FlowCtrl pass end, graph is %s.", graph_change ? "changed" : "not changed"); + return graph_change ? SUCCESS : NOT_CHANGED; +} + +NodePtr FlowCtrlPass::InsertOp(ComputeGraphPtr &compute_graph, const string &node_type, const string &node_name, + const std::vector &input_list, + const std::vector &output_list) { + OpDescPtr op_desc = MakeShared(node_name, node_type); + if (op_desc == nullptr) { + GELOGE(FAILED, "Make OpDesc failed, name:%s, type:%s.", node_name.c_str(), node_type.c_str()); + return nullptr; + } + + for (auto &input_desc : input_list) { + graphStatus graph_status = op_desc->AddInputDesc(input_desc); + if (graph_status != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add node:%s intput desc failed, error=%u.", node_name.c_str(), graph_status); + return nullptr; + } + } + + for (auto &output_desc : output_list) { + graphStatus graph_status = op_desc->AddOutputDesc(output_desc); + if (graph_status != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add node:%s output desc failed, error=%u.", node_name.c_str(), graph_status); + return nullptr; + } + } + + GE_IF_BOOL_EXEC(compute_graph == nullptr, GELOGE(FAILED, "compute_graph is nullptr"); return nullptr); + NodePtr node = compute_graph->AddNode(op_desc); + if (node == nullptr) { + GELOGE(FAILED, "add node failed, name:%s, type:%s.", node_name.c_str(), node_type.c_str()); + return nullptr; + } + + GELOGI("Insert op success, name:%s, type:%s.", node_name.c_str(), node_type.c_str()); + return node; +} + +NodePtr FlowCtrlPass::InsertStreamSwitchOp(ComputeGraphPtr &compute_graph, const string &switch_name, + const NodePtr &loop_cond, const NodePtr &iter_per_loop) { + GE_IF_BOOL_EXEC(loop_cond == nullptr || loop_cond->GetOpDesc() == nullptr, GELOGE(FAILED, "loop_cond is null"); + return nullptr); + GE_IF_BOOL_EXEC(iter_per_loop == nullptr || iter_per_loop->GetOpDesc() == nullptr, + GELOGE(FAILED, "iter_per_loop is nullptr"); + return nullptr); + std::vector input_desc_list = {loop_cond->GetOpDesc()->GetOutputDesc(0), + iter_per_loop->GetOpDesc()->GetOutputDesc(0)}; + std::vector output_desc_list; + NodePtr stream_switch = InsertOp(compute_graph, STREAMSWITCH, switch_name, input_desc_list, output_desc_list); + if (stream_switch == nullptr) { + GELOGE(FAILED, "InsertStreamSwitchOp failed, name:%s.", switch_name.c_str()); + return nullptr; + } + + // set input 0 + graphStatus add_ret = GraphUtils::AddEdge(loop_cond->GetOutDataAnchor(0), stream_switch->GetInDataAnchor(0)); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add loop_cond_node to switch_node:%s edge failed, ret = %u.", switch_name.c_str(), add_ret); + return nullptr; + } + + // set input 1 + add_ret = GraphUtils::AddEdge(iter_per_loop->GetOutDataAnchor(0), stream_switch->GetInDataAnchor(1)); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add iter_per_loop_node to switch_node:%s edge failed, ret = %u.", switch_name.c_str(), add_ret); + return nullptr; + } + + // stream switch op need switch cond by attr. + GE_IF_BOOL_EXEC( + !AttrUtils::SetInt(stream_switch->GetOpDesc(), ATTR_NAME_STREAM_SWITCH_COND, static_cast(RT_LESS)), + GELOGE(FAILED, "set ATTR_NAME_STREAM_SWITCH_COND failed"); + return nullptr); + + return stream_switch; +} + +NodePtr FlowCtrlPass::AddVariableNode(ComputeGraphPtr &compute_graph, const string &name) { + GE_IF_BOOL_EXEC(compute_graph == nullptr, GELOGE(FAILED, "compute_graph is nullptr"); return nullptr); + NodePtr exist_node = compute_graph->FindNode(name); + if (exist_node != nullptr) { + GELOGD("Node %s already exist, no need add.", name.c_str()); + return exist_node; + } + // fetch and set tensor desc + GeTensorDesc tensor_desc; + if (ge::VarManager::Instance(compute_graph->GetSessionID()) == nullptr) { + return nullptr; + } + Status ret = ge::VarManager::Instance(compute_graph->GetSessionID())->GetCurVarDesc(name, tensor_desc); + if (ret != SUCCESS) { + GELOGE(FAILED, "Get var desc fail, name:%s", name.c_str()); + return nullptr; + } + std::vector input_desc_list; + std::vector output_desc_list = {tensor_desc}; + // insert node + return InsertOp(compute_graph, VARIABLE, name, input_desc_list, output_desc_list); +} + +NodePtr FlowCtrlPass::InsertAssignOp(ge::ComputeGraphPtr &compute_graph, const string &node_type, + const string &node_name, const NodePtr &ref_node, const NodePtr &value_node) { + GE_IF_BOOL_EXEC(ref_node == nullptr || value_node == nullptr || ref_node->GetOpDesc() == nullptr || + value_node->GetOpDesc() == nullptr, + GELOGE(FAILED, "ref node or value node is null"); + return nullptr); + GeTensorDesc ref_tensor_desc = ref_node->GetOpDesc()->GetOutputDesc(0); + GeTensorDesc val_tensor_desc = value_node->GetOpDesc()->GetOutputDesc(0); + std::vector input_desc_list = {ref_tensor_desc, val_tensor_desc}; + std::vector output_desc_list = {ref_tensor_desc}; + NodePtr assign_node = InsertOp(compute_graph, node_type, node_name, input_desc_list, output_desc_list); + if (assign_node == nullptr) { + GELOGE(FAILED, "Insert node %s(%s) failed.", node_name.c_str(), node_type.c_str()); + return nullptr; + } + // assign node input 0 = ref_node + graphStatus add_ret = GraphUtils::AddEdge(ref_node->GetOutDataAnchor(0), assign_node->GetInDataAnchor(0)); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add ref_node to %s edge failed, add_ret=%u.", node_name.c_str(), add_ret); + return nullptr; + } + // assign input 1 = value_node + add_ret = GraphUtils::AddEdge(value_node->GetOutDataAnchor(0), assign_node->GetInDataAnchor(1)); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add value_node to %s edge failed, add_ret=%u.", node_name.c_str(), add_ret); + return nullptr; + } + + return assign_node; +} + +Status FlowCtrlPass::CreateIterCtrlTrueBranch(ComputeGraphPtr &compute_graph, const NodePtr &loop_cond_node, + const NodePtr &loop_inc_node, NodePtr &switch_node) { + /* + * loopCond + * | + * v + * switch --> AssignAdd --> active + * ^ + * | + * loopIncrement + */ + // Insert AssignAdd node + NodePtr assign_add_node = + InsertAssignOp(compute_graph, ASSIGNADD, NODE_NAME_FLOWCTRL_LOOP_ASSIGNADD, loop_cond_node, loop_inc_node); + if (assign_add_node == nullptr || switch_node == nullptr) { + GELOGE(PARAM_INVALID, "assign add node or switch node is null"); + return FAILED; + } + + string active_name = switch_node->GetName() + "_StreamActive"; + // add attr for stream assign model to break branch. + GE_CHK_STATUS_RET(SetStreamLabel(assign_add_node, active_name), "set stream label failed"); + + // used for stream assign to find true branch + GE_CHK_STATUS_RET(SetActiveLabelList(switch_node, {active_name}), "set active label list failed"); + + // 2. Insert active node + NodePtr active_node = InsertOp(compute_graph, STREAMACTIVE, active_name, {}, {}); + if (active_node == nullptr) { + GELOGE(FAILED, "Insert stream active node:%s for IterCtrlTrueStream failed.", active_name.c_str()); + return FAILED; + } + GE_CHK_STATUS_RET(SetStreamLabel(active_node, active_name), "set stream label failed"); + GE_IF_BOOL_EXEC(!AttrUtils::SetBool(active_node->GetOpDesc(), ATTR_NAME_IS_LOOP_ACTIVE, true), + GELOGE(FAILED, "set ATTR_NAME_IS_LOOP_ACTIVE failed"); + return FAILED); + + // add ctrl edges + graphStatus add_ret = GraphUtils::AddEdge(switch_node->GetOutControlAnchor(), assign_add_node->GetInControlAnchor()); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add switch_node to assign_add_node ctrl edge failed, add_ret=%u.", add_ret); + return FAILED; + } + + add_ret = GraphUtils::AddEdge(assign_add_node->GetOutControlAnchor(), active_node->GetInControlAnchor()); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add assign_add_node to active_node ctrl edge failed, add_ret=%u.", add_ret); + return FAILED; + } + + GELOGI("CreateIterCtrlTrueBranch success. StreamActive op:%s.", active_node->GetName().c_str()); + return SUCCESS; +} + +Status FlowCtrlPass::CreateIterCtrlFalseBranch(ComputeGraphPtr &compute_graph, const NodePtr &loop_cond_node, + const NodePtr &loop_reset_node, NodePtr &switch_node) { + /* + * loopCond + * | + * v + * switch --> Assign + * ^ + * | + * loopReset + */ + // Insert Assign node + NodePtr assign_node = + InsertAssignOp(compute_graph, ASSIGN, NODE_NAME_FLOWCTRL_LOOP_ASSIGN, loop_cond_node, loop_reset_node); + if (assign_node == nullptr || switch_node == nullptr) { + GELOGE(PARAM_INVALID, "assign_node or switch node is null"); + return FAILED; + } + + GE_CHK_STATUS_RET(SetStreamLabel(assign_node, switch_node->GetName()), "set stream label failed"); + + // 3. Insert ctrl edges + graphStatus add_ret = GraphUtils::AddEdge(switch_node->GetOutControlAnchor(), assign_node->GetInControlAnchor()); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add switch_node to assign_node ctrl edge failed, add_ret=%u.", add_ret); + return FAILED; + } + + GELOGI("CreateIterCtrlFalseBranch success."); + return SUCCESS; +} + +Status FlowCtrlPass::AddFpBpIteratorCtrl(ComputeGraphPtr &compute_graph, NodePtr &pre_node) { + GE_IF_BOOL_EXEC(pre_node == nullptr, GELOGE(FAILED, "pre_node is nullptr"); return FAILED); + string pre_node_name = pre_node->GetName(); + GELOGI("Add FpBp Iterator ctrl, pre node:%s.", pre_node_name.c_str()); + // 1. Get or add variables + NodePtr loop_cond_node = AddVariableNode(compute_graph, NODE_NAME_FLOWCTRL_LOOP_COND); + if (loop_cond_node == nullptr) { + GELOGE(FAILED, "Add variable:%s failed.", NODE_NAME_FLOWCTRL_LOOP_COND.c_str()); + return FAILED; + } + NodePtr loop_inc_node = AddVariableNode(compute_graph, NODE_NAME_FLOWCTRL_LOOP_INCREMENT); + if (loop_inc_node == nullptr) { + GELOGE(FAILED, "Add variable:%s failed.", NODE_NAME_FLOWCTRL_LOOP_INCREMENT.c_str()); + return FAILED; + } + NodePtr loop_reset_node = AddVariableNode(compute_graph, NODE_NAME_FLOWCTRL_LOOP_RESETVALUE); + if (loop_reset_node == nullptr) { + GELOGE(FAILED, "Add variable:%s failed.", NODE_NAME_FLOWCTRL_LOOP_RESETVALUE.c_str()); + return FAILED; + } + NodePtr iter_per_loop_node = AddVariableNode(compute_graph, NODE_NAME_FLOWCTRL_LOOP_PER_ITER); + if (iter_per_loop_node == nullptr) { + GELOGE(FAILED, "Add variable:%s failed.", NODE_NAME_FLOWCTRL_LOOP_PER_ITER.c_str()); + return FAILED; + } + + // 2. Add StreamSwitch + string switch_name = pre_node_name + "_" + NODE_NAME_STREAM_SWITCH; + NodePtr switch_node = InsertStreamSwitchOp(compute_graph, switch_name, loop_cond_node, iter_per_loop_node); + if (switch_node == nullptr) { + GELOGE(FAILED, "InsertStreamSwitchOp:%s failed.", switch_name.c_str()); + return FAILED; + } + GE_CHK_STATUS_RET(SetStreamLabel(switch_node, switch_name), "set stream label failed"); + + graphStatus add_ret = GraphUtils::AddEdge(pre_node->GetOutControlAnchor(), switch_node->GetInControlAnchor()); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add pre node:%s to switch_node:%s ctrl edge failed, ret = %u.", pre_node_name.c_str(), + switch_name.c_str(), add_ret); + return FAILED; + } + + // 3. Create switch false branch: return results and reset the loopCond + Status ret = CreateIterCtrlFalseBranch(compute_graph, loop_cond_node, loop_reset_node, switch_node); + if (ret != SUCCESS) { + GELOGE(ret, "CreateIterCtrlFalseBranch fail, pre node:%s.", pre_node_name.c_str()); + return ret; + } + + // 4. Create switch true branch: + // active train streams and increase the loopCond + ret = CreateIterCtrlTrueBranch(compute_graph, loop_cond_node, loop_inc_node, switch_node); + if (ret != SUCCESS) { + GELOGE(ret, "CreateIterCtrlTrueBranch fail, pre node:%s.", pre_node_name.c_str()); + return ret; + } + return SUCCESS; +} + +Status FlowCtrlPass::AddSpecialNodeIteratorCtrl(ComputeGraphPtr &compute_graph, NodePtr &loop_after_node) { + /* + * before add: + * iterator + * | + * v + * MemcpyAsync + * + * after add: + * iterator ----------┐ + * | ┆c + * v c v c + * MemcpyAsync-----> switch -----> active + * ^ + * / \ + * itersPerLoop loopCond + */ + GE_IF_BOOL_EXEC(loop_after_node == nullptr || compute_graph == nullptr, + GELOGE(FAILED, "loop after node or compute graph is null"); + return FAILED); + InDataAnchorPtr in_anchor = loop_after_node->GetInDataAnchor(0); + if (in_anchor == nullptr || in_anchor->GetPeerOutAnchor() == nullptr) { + GELOGE(FAILED, "Find %s in data anchor failed.", loop_after_node->GetName().c_str()); + return FAILED; + } + NodePtr loop_pre_node = in_anchor->GetPeerOutAnchor()->GetOwnerNode(); + + // 1. Get variables + NodePtr loop_cond_node = compute_graph->FindNode(NODE_NAME_FLOWCTRL_LOOP_COND); + if (loop_cond_node == nullptr) { + GELOGE(FAILED, "Find node :%s failed.", NODE_NAME_FLOWCTRL_LOOP_COND.c_str()); + return FAILED; + } + NodePtr iter_per_loop_node = compute_graph->FindNode(NODE_NAME_FLOWCTRL_LOOP_PER_ITER); + if (iter_per_loop_node == nullptr) { + GELOGE(FAILED, "Find node :%s failed.", NODE_NAME_FLOWCTRL_LOOP_PER_ITER.c_str()); + return FAILED; + } + + // 2. Add StreamSwitch and edges to switch_node. + GE_IF_BOOL_EXEC(loop_pre_node == nullptr, GELOGE(FAILED, "loop pre node is null"); return FAILED); + string switch_name = loop_pre_node->GetName() + "_" + NODE_NAME_STREAM_SWITCH; + NodePtr switch_node = InsertStreamSwitchOp(compute_graph, switch_name, loop_cond_node, iter_per_loop_node); + if (switch_node == nullptr) { + GELOGE(FAILED, "InsertStreamSwitchOp:%s failed.", switch_name.c_str()); + return FAILED; + } + + GE_CHK_STATUS_RET(SetStreamLabel(switch_node, switch_name), "set stream label failed"); + + graphStatus add_ret = GraphUtils::AddEdge(loop_pre_node->GetOutControlAnchor(), switch_node->GetInControlAnchor()); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add loop_pre_node:%s to switch_node:%s ctrl edge failed, ret = %u.", + loop_pre_node->GetName().c_str(), switch_name.c_str(), add_ret); + return FAILED; + } + add_ret = GraphUtils::AddEdge(loop_after_node->GetOutControlAnchor(), switch_node->GetInControlAnchor()); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add node:%s to switch_node:%s ctrl edge failed, ret = %u.", loop_after_node->GetName().c_str(), + switch_name.c_str(), add_ret); + return FAILED; + } + + // 3. Create switch true branch: only active + string active_name = switch_name + "_StreamActive"; + NodePtr active_node = InsertOp(compute_graph, STREAMACTIVE, active_name, {}, {}); + if (active_node == nullptr) { + GELOGE(FAILED, "Insert stream active node:%s for SpecialNodeIteratorCtrl failed.", active_name.c_str()); + return FAILED; + } + + GE_CHK_STATUS_RET(SetStreamLabel(active_node, active_name), "set stream label failed"); + + GE_IF_BOOL_EXEC(!AttrUtils::SetBool(active_node->GetOpDesc(), ATTR_NAME_IS_LOOP_ACTIVE, true), + GELOGE(FAILED, "set ATTR_NAME_IS_LOOP_ACTIVE failed"); + return FAILED); + + add_ret = GraphUtils::AddEdge(switch_node->GetOutControlAnchor(), active_node->GetInControlAnchor()); + if (add_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add switch_node:%s to active_node:%s ctrl edge failed, ret = %u.", switch_name.c_str(), + active_name.c_str(), add_ret); + return FAILED; + } + + // used for stream assign to find true branch + GE_CHK_STATUS_RET(SetActiveLabelList(switch_node, {active_name}), "set active label list failed"); + // used for stream assign to find active stream + GE_CHK_STATUS_RET(SetActiveLabelList(active_node, {loop_pre_node->GetName()}), "set active label list failed"); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/flow_ctrl_pass.h b/src/ge/graph/passes/flow_ctrl_pass.h new file mode 100644 index 00000000..4394d027 --- /dev/null +++ b/src/ge/graph/passes/flow_ctrl_pass.h @@ -0,0 +1,143 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FLOW_CTRL_PASS_H_ +#define GE_GRAPH_PASSES_FLOW_CTRL_PASS_H_ + +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "inc/graph_pass.h" + +namespace ge { +/// +/// Add flow control to the computeGraph +/// +class FlowCtrlPass : public GraphPass { + public: + /// + /// Add flow control to the computeGraph. + /// @param compute_graph graph + /// @return SUCCESS: do success + /// NOT_CHANGED : do nothing + /// Other: failed + /// + Status Run(ComputeGraphPtr compute_graph) override; + + private: + /// + /// Universal insert node to graph. + /// @param compute_graph graph + /// @param node_type inserted node type + /// @param node_name inserted node name + /// @param input_list input desc list + /// @param output_list output desc list + /// @return the inserted node. if insert failed return nullptr. + /// + NodePtr InsertOp(ComputeGraphPtr &compute_graph, const string &node_type, const string &node_name, + const std::vector &input_list, const std::vector &output_list); + + /// + /// used for insert assign and assign add node. + /// include add input desc info. + /// @param compute_graph graph + /// @param node_type node type(assign/assignAdd) + /// @param node_name node name + /// @param ref_node assign input0 + /// @param value_node assign input1 + /// @return the inserted node. if insert failed return nullptr. + /// + NodePtr InsertAssignOp(ComputeGraphPtr &compute_graph, const string &node_type, const string &node_name, + const NodePtr &ref_node, const NodePtr &value_node); + + /// + /// insert StreamSwitch to graph. + /// @param compute_graph graph + /// @param switch_name inserted StreamSwitch node name + /// @param loop_cond loop condition + /// @param iter_per_loop iter per loop + /// @return the inserted node. if insert failed return nullptr. + /// + NodePtr InsertStreamSwitchOp(ComputeGraphPtr &compute_graph, const string &switch_name, const NodePtr &loop_cond, + const NodePtr &iter_per_loop); + + /// + /// check and add variable node to graph. + /// if the variable is exists, do nothing. + /// @param compute_graph graph + /// @param name inserted variable node name + /// @return the variable node. if insert failed return nullptr. + /// + NodePtr AddVariableNode(ComputeGraphPtr &compute_graph, const string &name); + + /// + /// insert GlobalStepAssignAdd to graph. + /// just for ME, please remove when ME do itself. + /// @param compute_graph graph + /// @param pre_node pre node + /// @param global_step global step node + /// @param loop_increment_node loop increment node + /// @return the GlobalStepAssignAdd node. if insert failed return nullptr. + /// + NodePtr InsertGlobalStepAssignAddOp(ComputeGraphPtr &compute_graph, NodePtr &pre_node, const NodePtr &global_step, + const NodePtr &loop_increment_node); + + /// + /// create switch true branch for big cycle. + /// @param compute_graph graph + /// @param loop_cond_node loop condition node + /// @param loop_increment_node loop increment node + /// @param switch_node switch node + /// @return SUCCESS: do success + /// Other: failed + /// + Status CreateIterCtrlTrueBranch(ComputeGraphPtr &compute_graph, const NodePtr &loop_cond_node, + const NodePtr &loop_increment_node, NodePtr &switch_node); + + /// + /// create switch false branch for big cycle. + /// @param compute_graph graph + /// @param loop_cond_node loop condition node + /// @param loop_reset_node loop reset node + /// @param switch_node switch node + /// @return SUCCESS: do success + /// Other: failed + /// + Status CreateIterCtrlFalseBranch(ComputeGraphPtr &compute_graph, const NodePtr &loop_cond_node, + const NodePtr &loop_reset_node, NodePtr &switch_node); + + /// + /// add Fp/Bp iterator ctrl nodes(big cycle). + /// @param compute_graph graph + /// @param pre_node pre node(netoutput node) + /// @return SUCCESS: do success + /// Other: failed + /// + Status AddFpBpIteratorCtrl(ComputeGraphPtr &compute_graph, NodePtr &pre_node); + + /// + /// add special iterator ctrl nodes(small cycle). + /// @param compute_graph graph + /// @param loop_after_node pre node(iterate node) + /// @return SUCCESS: do success + /// Other: failed + /// + Status AddSpecialNodeIteratorCtrl(ComputeGraphPtr &compute_graph, NodePtr &loop_after_node); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FLOW_CTRL_PASS_H_ diff --git a/src/ge/graph/passes/folding_kernel/add_kernel.cc b/src/ge/graph/passes/folding_kernel/add_kernel.cc new file mode 100644 index 00000000..5d619798 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/add_kernel.cc @@ -0,0 +1,202 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/add_kernel.h" + +#include + +#include "graph/common/bcast.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kAddFirstInput = 0; +const size_t kAddSecondInput = 1; +const size_t kAddFirstOutput = 0; +const size_t kAddInputSize = 2; +const size_t kAddOutputSize = 1; + +#define SET_BCAST_ADD_CASE(DTYPE, TYPE) \ + case (DTYPE): \ + ret = BCastAdd(op_desc_ptr, input, v_output); \ + break; + +#define SET_OVERFLOW_CHECK_SIGNED_CASE(DTYPE, MAX_VALUE, MIN_VALUE) \ + case (DTYPE): \ + if (((y > 0) && (x > ((MAX_VALUE)-y))) || ((y < 0) && (x < ((MIN_VALUE)-y)))) { \ + overflow_flag = true; \ + } \ + break; + +#define SET_OVERFLOW_CHECK_UNSIGNED_CASE(DTYPE, TYPE, MAX_VALUE) \ + case (DTYPE): { \ + TYPE threshold = static_cast(static_cast(MAX_VALUE) - y); \ + if (static_cast(x) > threshold) { \ + overflow_flag = true; \ + } \ + break; \ + } +} // namespace + +template +bool AddKernel::OverflowCheck(const T &x, const T &y, DataType data_type) { + bool overflow_flag = false; + + switch (data_type) { + SET_OVERFLOW_CHECK_SIGNED_CASE(DT_INT8, INT8_MAX, INT8_MIN) + SET_OVERFLOW_CHECK_SIGNED_CASE(DT_INT16, INT16_MAX, INT16_MIN) + SET_OVERFLOW_CHECK_SIGNED_CASE(DT_INT32, INT32_MAX, INT32_MIN) + SET_OVERFLOW_CHECK_SIGNED_CASE(DT_INT64, INT64_MAX, INT64_MIN) + SET_OVERFLOW_CHECK_SIGNED_CASE(DT_FLOAT, FLT_MAX, FLT_MIN) + SET_OVERFLOW_CHECK_SIGNED_CASE(DT_DOUBLE, DBL_MAX, DBL_MIN) + SET_OVERFLOW_CHECK_UNSIGNED_CASE(DT_UINT8, uint8_t, UINT8_MAX) + SET_OVERFLOW_CHECK_UNSIGNED_CASE(DT_UINT16, uint16_t, UINT16_MAX) + SET_OVERFLOW_CHECK_UNSIGNED_CASE(DT_UINT32, uint32_t, UINT32_MAX) + SET_OVERFLOW_CHECK_UNSIGNED_CASE(DT_UINT64, uint64_t, UINT64_MAX) + default: + break; + } + + return overflow_flag; +} + +template +Status AddKernel::BCastAdd(const OpDescPtr &op_desc_ptr, const std::vector &input, + std::vector &v_output) { + // only broadcast shape + BCast bcast; + Status ret = bcast.GenerateBcastInfo(ge::BCast::TransShapeToDimVec(input[kAddFirstInput]->GetTensorDesc()), + ge::BCast::TransShapeToDimVec(input[kAddSecondInput]->GetTensorDesc())); + if (ret != SUCCESS) { + GELOGE(ret, "Greater broadcasting failed."); + return ret; + } + + std::vector x_indexes; + std::vector y_indexes; + bcast.BCastIndexes(x_indexes, y_indexes); + + if (input[kAddFirstInput]->GetData().size() < sizeof(InT)) { + GELOGE(FAILED, "The size of the first input is less than the size of the InT."); + return FAILED; + } + auto x1_data = reinterpret_cast(input[kAddFirstInput]->GetData().data()); + + if (input[kAddSecondInput]->GetData().size() < sizeof(InT)) { + GELOGE(FAILED, "The size of the second input is less than the size of the InT."); + return FAILED; + } + auto x2_data = reinterpret_cast(input[kAddSecondInput]->GetData().data()); + + size_t data_num = x_indexes.size(); + InT *data = nullptr; + data = new (std::nothrow) InT[data_num](); + GE_CHECK_NOTNULL(data); + + DataType data_type = input[kAddFirstInput]->GetTensorDesc().GetDataType(); + for (size_t i = 0; i < data_num; i++) { + auto x_index = *(x1_data + x_indexes[i]); + auto y_index = *(x2_data + y_indexes[i]); + if (OverflowCheck(x_index, y_index, data_type)) { + GELOGE(PARAM_INVALID, "Result of add is overflow."); + GE_DELETE_NEW_ARRAY(data); + return PARAM_INVALID; + } + data[i] = x_index + y_index; + } + + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(kAddFirstOutput)); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared failed"); + GE_DELETE_NEW_ARRAY(data); + return MEMALLOC_FAILED; + } + if (output_ptr->SetData(reinterpret_cast(data), data_num * sizeof(InT))) { + GELOGW("GetRange: SetData failed"); + } + GE_DELETE_NEW_ARRAY(data); + + output_ptr->MutableTensorDesc().SetDataType(data_type); + vector bcast_dims = bcast.GetOutputShape(); + output_ptr->MutableTensorDesc().SetShape(GeShape(bcast_dims)); + v_output.push_back(output_ptr); + + return SUCCESS; +} + +Status AddKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Op_desc_ptr must not be null."); + return NOT_CHANGED; + } + // check how many inputs + if ((input.size() != kAddInputSize) || (op_desc_ptr->GetOutputsSize() != kAddOutputSize)) { + GELOGE(PARAM_INVALID, "The number of input for add must be %zu, output number must be %zu.", kAddInputSize, + kAddOutputSize); + return NOT_CHANGED; + } + + // input vector elements must not be null + if ((input[kAddFirstInput] == nullptr) || (input[kAddSecondInput] == nullptr)) { + GELOGE(PARAM_INVALID, "Input vector elements must not be null."); + return NOT_CHANGED; + } + + // Inputs must have the same datatype. + DataType data_type_0 = input[kAddFirstInput]->GetTensorDesc().GetDataType(); + DataType data_type_1 = input[kAddSecondInput]->GetTensorDesc().GetDataType(); + if (data_type_0 != data_type_1) { + GELOGE(PARAM_INVALID, "Data type of inputs for add not matched, data_type_0:%s, data_type_1:%s", + TypeUtils::DataTypeToSerialString(data_type_0).c_str(), + TypeUtils::DataTypeToSerialString(data_type_1).c_str()); + return NOT_CHANGED; + } + + // Checking whether the weightdef contains data + if ((input[kAddFirstInput]->GetData().size() == 0) || (input[kAddSecondInput]->GetData().size() == 0)) { + GELOGW("Data size of input0 is %zu, input1 is %zu.", input[kAddFirstInput]->GetData().size(), + input[kAddSecondInput]->GetData().size()); + return NOT_CHANGED; + } + + Status ret = NOT_CHANGED; + switch (data_type_0) { + SET_BCAST_ADD_CASE(DT_INT8, int8_t) + SET_BCAST_ADD_CASE(DT_INT16, int16_t) + SET_BCAST_ADD_CASE(DT_INT32, int32_t) + SET_BCAST_ADD_CASE(DT_INT64, int64_t) + SET_BCAST_ADD_CASE(DT_UINT8, uint8_t) + SET_BCAST_ADD_CASE(DT_UINT16, uint16_t) + SET_BCAST_ADD_CASE(DT_UINT32, uint32_t) + SET_BCAST_ADD_CASE(DT_UINT64, uint64_t) + SET_BCAST_ADD_CASE(DT_FLOAT, float) + SET_BCAST_ADD_CASE(DT_DOUBLE, double) + default: + GELOGI("Add kernel data type %s not support.", TypeUtils::DataTypeToSerialString(data_type_0).c_str()); + return NOT_CHANGED; + } + + if (ret != SUCCESS) { + GELOGE(ret, "Greater broadcasting failed."); + return NOT_CHANGED; + } + return SUCCESS; +} + +REGISTER_KERNEL(ADD, AddKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/add_kernel.h b/src/ge/graph/passes/folding_kernel/add_kernel.h new file mode 100644 index 00000000..d56898fc --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/add_kernel.h @@ -0,0 +1,38 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_ADD_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_ADD_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class AddKernel: public Kernel { + public: + template + bool OverflowCheck(const T &x, const T &y, DataType data_type); + + template + Status BCastAdd(const OpDescPtr &op_desc_ptr, + const std::vector &input, + std::vector &v_output); + Status Compute(const ge::OpDescPtr op_desc_ptr, + const std::vector &input, std::vector &v_output) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_ADD_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/broadcast_args_kernel.cc b/src/ge/graph/passes/folding_kernel/broadcast_args_kernel.cc new file mode 100644 index 00000000..b513f737 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/broadcast_args_kernel.cc @@ -0,0 +1,81 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/broadcast_args_kernel.h" + +#include + +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/bcast.h" +#include "graph/passes/pass_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kBCastArgsInputsSize = 2; +const size_t kBCastArgsOutputsSize = 1; +} // namespace + +Status BroadcastArgsKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("BroadcastArgsKernel in"); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, Input opDescPtr is nullptr."); + return PARAM_INVALID; + } + // check input size + bool size_check = + (op_desc_ptr->GetAllInputsDesc().size() != kBCastArgsInputsSize || input.size() != kBCastArgsInputsSize || + op_desc_ptr->GetAllOutputsDesc().size() != kBCastArgsOutputsSize); + if (size_check) { + GELOGW("input/output size error. InDesc size:%zu," + "OutDesc size:%zu, in size:%zu ", + op_desc_ptr->GetAllInputsDesc().size(), op_desc_ptr->GetAllOutputsDesc().size(), input.size()); + return NOT_CHANGED; + } + + vector x1_dims; + vector x2_dims; + DataType data_type = op_desc_ptr->GetInputDesc(0).GetDataType(); + bool result = (OpUtils::GetShapeDataFromConstTensor(input[0], data_type, x1_dims) == SUCCESS) && + (OpUtils::GetShapeDataFromConstTensor(input[1], data_type, x2_dims) == SUCCESS); + if (!result) { + GELOGE(PARAM_INVALID, "GetShapeDataFromConstTensor fail."); + return PARAM_INVALID; + } + + BCast bcast; + Status ret = bcast.GenerateBcastInfo(x1_dims, x2_dims); + if (ret != SUCCESS) { + GELOGE(ret, "GenerateBcastInfo fail."); + return ret; + } + + vector bcast_dims = bcast.GetOutputShape(); + ret = PassUtils::ConstructTensorDescWithData(op_desc_ptr->GetOutputDesc(0), bcast_dims, v_output); + if (ret != SUCCESS) { + GELOGE(ret, "BroadcastArgs kernel construct tensor desc fail"); + return ret; + } + + return SUCCESS; +} + +REGISTER_KERNEL(BROADCASTARGS, BroadcastArgsKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/broadcast_args_kernel.h b/src/ge/graph/passes/folding_kernel/broadcast_args_kernel.h new file mode 100644 index 00000000..6d57976c --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/broadcast_args_kernel.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_BROADCAST_ARGS_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_BROADCAST_ARGS_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class BroadcastArgsKernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_BROADCAST_ARGS_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/broadcast_gradient_args_kernel.cc b/src/ge/graph/passes/folding_kernel/broadcast_gradient_args_kernel.cc new file mode 100644 index 00000000..5fd5d576 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/broadcast_gradient_args_kernel.cc @@ -0,0 +1,88 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/broadcast_gradient_args_kernel.h" + +#include + +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/bcast.h" +#include "graph/passes/pass_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kBCastGradArgsInputsSize = 2; +const size_t kBCastGradArgsOutputsSize = 2; +} // namespace + +Status BroadcastGradientArgsKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("BroadcastGradientArgs kernel in"); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, Input opDescPtr is nullptr."); + return PARAM_INVALID; + } + // check input size + bool size_check_fail = + (op_desc_ptr->GetAllInputsDesc().size() != kBCastGradArgsInputsSize || input.size() != kBCastGradArgsInputsSize || + op_desc_ptr->GetAllOutputsDesc().size() != kBCastGradArgsOutputsSize); + if (size_check_fail) { + GELOGW("input/output size error. InDesc size:%zu," + "OutDesc size:%zu, in size:%zu ", + op_desc_ptr->GetAllInputsDesc().size(), op_desc_ptr->GetAllOutputsDesc().size(), input.size()); + return NOT_CHANGED; + } + + vector x1_dims; + vector x2_dims; + DataType x1_data_type = op_desc_ptr->GetInputDesc(0).GetDataType(); + DataType x2_data_type = op_desc_ptr->GetInputDesc(1).GetDataType(); + bool result = (OpUtils::GetShapeDataFromConstTensor(input[0], x1_data_type, x1_dims) == SUCCESS) && + (OpUtils::GetShapeDataFromConstTensor(input[1], x2_data_type, x2_dims) == SUCCESS); + if (!result) { + GELOGE(PARAM_INVALID, "Get shape data from const tensor fail."); + return PARAM_INVALID; + } + + BCast bcast; + Status ret = bcast.GenerateBcastInfo(x1_dims, x2_dims); + if (ret != SUCCESS) { + GELOGE(ret, "Generate bcast info fail."); + return ret; + } + + vector> grad_reduce_idx; + grad_reduce_idx.push_back(bcast.GetGradXReduceIdx()); + grad_reduce_idx.push_back(bcast.GetGradYReduceIdx()); + + for (size_t i = 0; i < grad_reduce_idx.size(); i++) { + ret = PassUtils::ConstructTensorDescWithData(op_desc_ptr->GetOutputDesc(i), grad_reduce_idx[i], v_output); + if (ret != SUCCESS) { + GELOGE(ret, "BroadcastGradientArgs kernel construct tensor desc fail"); + return ret; + } + } + + return SUCCESS; +} + +REGISTER_KERNEL(BROADCASTGRADIENTARGS, BroadcastGradientArgsKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/broadcast_gradient_args_kernel.h b/src/ge/graph/passes/folding_kernel/broadcast_gradient_args_kernel.h new file mode 100644 index 00000000..8f183653 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/broadcast_gradient_args_kernel.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_BROADCAST_GRADIENT_ARGS_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_BROADCAST_GRADIENT_ARGS_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class BroadcastGradientArgsKernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_BROADCAST_GRADIENT_ARGS_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/cast_kernel.cc b/src/ge/graph/passes/folding_kernel/cast_kernel.cc new file mode 100644 index 00000000..936f6b5c --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/cast_kernel.cc @@ -0,0 +1,125 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/cast_kernel.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/formats/formats.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "common/fp16_t.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/bcast.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kCastInputSize = 1; +} +Status CastKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("CastKernel begin."); + if (input.size() != kCastInputSize) { + GELOGE(PARAM_INVALID, "The number of input for cast must be %zu.", kCastInputSize); + return PARAM_INVALID; + } + ConstGeTensorPtr const_weight_ptr = input[0]; + if (const_weight_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Input const_weight_ptr is nullptr."); + return PARAM_INVALID; + } + const uint8_t *src_data = const_weight_ptr->GetData().data(); + if (op_desc_ptr == nullptr || src_data == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, Input opDescPtr or src_data is nullptr."); + return PARAM_INVALID; + } + GeTensorDesc op_desc = op_desc_ptr->GetOutputDesc(0); + GeTensorDesc op_desc_in = op_desc_ptr->GetInputDesc(0); + auto src_data_type = op_desc_in.GetDataType(); + auto src_shape = op_desc_in.GetShape(); + auto src_format = op_desc_in.GetFormat(); + auto data_type = op_desc.GetDataType(); + auto data_shape = op_desc.GetShape(); + auto data_format = op_desc.GetFormat(); + GELOGD("current node %s, format %s, input shape %s, data type %s, weight format %s, shape %s, data type %s. " + "output format %s, shape %s, data type %s", op_desc_ptr->GetName().c_str(), + TypeUtils::FormatToSerialString(src_format).c_str(), + formats::ShapeToString(src_shape).c_str(), + TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::FormatToSerialString(const_weight_ptr->GetTensorDesc().GetFormat()).c_str(), + formats::ShapeToString(const_weight_ptr->GetTensorDesc().GetShape()).c_str(), + TypeUtils::DataTypeToSerialString(const_weight_ptr->GetTensorDesc().GetDataType()).c_str(), + TypeUtils::FormatToSerialString(data_format).c_str(), + formats::ShapeToString(data_shape).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str()); + + GE_CHECK_SIZE(const_weight_ptr->GetData().GetSize()); + auto src_data_size = src_shape.GetShapeSize(); + if (src_data_size == 0 && + static_cast(const_weight_ptr->GetData().GetSize()) == GetSizeByDataType(src_data_type)) { + src_data_size = 1; + GELOGD("Weight of the current const node is scalar"); + } + const formats::CastArgs cast_args{src_data, static_cast(src_data_size), src_data_type, data_type}; + formats::TransResult trans_result; + GELOGD("Trans data type from %s to %s, shape %s, data size %ld", + TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str(), + formats::ShapeToString(src_shape).c_str(), src_data_size); + + if (src_format != data_format || src_shape.GetDims() != data_shape.GetDims() || + !formats::IsTransDataTypeSupport(cast_args)) { + GELOGW("Transfer from data type %s to %s, format %s to %s, shape %s to %s is not supported", + TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str(), + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(data_format).c_str(), + formats::ShapeToString(src_shape).c_str(), formats::ShapeToString(data_shape).c_str()); + return NOT_CHANGED; + } + if (!KernelUtils::CheckSizeForTransOp(const_weight_ptr, op_desc_ptr)) { + GELOGE(FAILED, "CheckSize failed, input size is not equal to weight size"); + return NOT_CHANGED; + } + if (formats::TransDataType(cast_args, trans_result) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to trans data type from %s to %s, shape %s, data size %ld.", + TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str(), + formats::ShapeToString(src_shape).c_str(), src_data_size); + return NOT_CHANGED; + } + + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(0)); + if (output_ptr == nullptr) { + return FAILED; + } + if (output_ptr->SetData(trans_result.data.get(), trans_result.length) != SUCCESS) { + GELOGW("Compute: SetData failed"); + return FAILED; + } + v_output.push_back(output_ptr); + return SUCCESS; +} + +REGISTER_KERNEL(CAST, CastKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/cast_kernel.h b/src/ge/graph/passes/folding_kernel/cast_kernel.h new file mode 100644 index 00000000..5212bad0 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/cast_kernel.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_CAST_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_CAST_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class CastKernel : public Kernel { + public: + Status Compute(const OpDescPtr attr, const std::vector &input, + std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_CAST_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/concat_offset_kernel.cc b/src/ge/graph/passes/folding_kernel/concat_offset_kernel.cc new file mode 100644 index 00000000..e734b9dd --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/concat_offset_kernel.cc @@ -0,0 +1,100 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/concat_offset_kernel.h" + +#include + +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kConcatOffsetInputIndexZero = 0; +const size_t kConcatOffsetInputIndexOne = 1; +const int kNumOne = 1; +} // namespace +Status ConcatOffsetKernel::Compute(const OpDescPtr op_desc_ptr, const vector &input, + vector &v_output) { + GELOGI("ConcatOffsetKernel in."); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "input opdesc is nullptr."); + return PARAM_INVALID; + } + // validate attrs + int N = 0; + if (!(AttrUtils::GetInt(op_desc_ptr, "N", N))) { + GELOGE(PARAM_INVALID, "Attr %s is not exist.", "N"); + return NOT_CHANGED; + } + // follow IR def, the first input is concat_dim + ConstGeTensorPtr input_0 = input[kConcatOffsetInputIndexZero]; + GE_CHECK_NOTNULL(input_0); + int32_t concat_dim = *(const_cast(reinterpret_cast(input_0->GetData().data()))); + // validate inputs + if (static_cast(input.size()) != (N + kNumOne) || input.size() <= kConcatOffsetInputIndexOne) { + GELOGE(PARAM_INVALID, "The number of input for concat offset must be equal with %d, and must be more than one.", + (N + kNumOne)); + return NOT_CHANGED; + } + + // calculate ouput dim + GeShape output_shape = input[kConcatOffsetInputIndexOne]->GetTensorDesc().GetShape(); + int64_t output_size = output_shape.GetShapeSize(); + if (concat_dim >= output_size) { + GELOGE(PARAM_INVALID, "Concat dim is biger than the size of output_shape."); + return NOT_CHANGED; + } + GELOGI("Output shape size is %ld", output_size); + int32_t offset = 0; + unique_ptr buf(new (std::nothrow) int32_t[output_size]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "new buf failed"); + return INTERNAL_ERROR; + } + for (size_t i = 0; i < static_cast(N); i++) { + buf[concat_dim] = offset; + // generate output + GeTensorPtr output_ptr = MakeShared(); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "Failed to fold node %s, out of memeory", op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + + output_ptr->MutableTensorDesc().SetDataType(DT_INT32); + output_ptr->MutableTensorDesc().SetShape(output_shape); + GE_IF_BOOL_EXEC(output_ptr->SetData(reinterpret_cast(buf.get()), + static_cast(sizeof(DT_INT32) * output_size)) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "set data failed"); + return NOT_CHANGED); + v_output.push_back(output_ptr); + // caculate offset + int64_t input_dim = input[i + kConcatOffsetInputIndexOne]->GetTensorDesc().GetShape().GetDim(concat_dim); + if (input_dim > (INT32_MAX - offset)) { + GELOGE(PARAM_INVALID, " %d and %ld addition can result in overflow!.", offset, input_dim); + return INTERNAL_ERROR; + } + offset += input_dim; + } + GELOGI("ConcatOffsetKernel success."); + return SUCCESS; +} +REGISTER_KERNEL(CONCATOFFSET, ConcatOffsetKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/concat_offset_kernel.h b/src/ge/graph/passes/folding_kernel/concat_offset_kernel.h new file mode 100644 index 00000000..b1e0958a --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/concat_offset_kernel.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_CONCAT_OFFSET_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_CONCAT_OFFSET_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class ConcatOffsetKernel : public Kernel { + public: + Status Compute(const OpDescPtr attr, const std::vector &input, + std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_CONCAT_OFFSET_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/concat_v2_kernel.cc b/src/ge/graph/passes/folding_kernel/concat_v2_kernel.cc new file mode 100644 index 00000000..0879ff86 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/concat_v2_kernel.cc @@ -0,0 +1,163 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/concat_v2_kernel.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kConcatV2InputNum = 3; +const std::set concatv2_supported_type = {DT_INT32, DT_FLOAT}; + +template +void GetOutputData(std::vector &y_data, int loop, size_t &input_size, const std::vector &input) { + for (int i = 0; i < loop; i++) { + for (size_t k = 0; k < input_size; k++) { + GeShape datak_shape = input.at(k)->GetTensorDesc().GetShape(); + const T *datak = reinterpret_cast(input.at(k)->GetData().data()); + int gapk = datak_shape.GetShapeSize() / loop; // [2,3] is 6/loop + for (int j = 0; j < gapk; j++) { + y_data.push_back(datak[j + gapk * i]); + } + } + } +} + +#define SET_OUTPUT(DTYPE, TYPE) \ + case DTYPE: \ + GetOutputData(y_data_##TYPE, loop, input_size, input); \ + (void)output_ptr->SetData(reinterpret_cast(y_data_##TYPE.data()), y_data_##TYPE.size() * length); \ + break; +} // namespace + +Status ConcatV2Kernel::Compute(const ge::OpDescPtr op_desc_ptr, const vector &input, + vector &v_output) { + GELOGI("ConcatV2Kernel in."); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "input opdesc is nullptr."); + return PARAM_INVALID; + } + int tidx = -1; + Status ret = ConcatV2PreCompute(input, tidx); + if (ret != SUCCESS) { + return ret; + } + + size_t input_size = input.size(); // N + 1 + input_size--; // N + + ConstGeTensorPtr tensor0 = input.at(0); + GE_CHECK_NOTNULL(tensor0); + DataType data_type = tensor0->GetTensorDesc().GetDataType(); + uint32_t length = 0; + if (!TypeUtils::GetDataTypeLength(data_type, length)) { + GELOGW("Can't GetDataTypeLength of data_type: %s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return NOT_CHANGED; + } + + std::vector y_data_int32_t; + std::vector y_data_float; + + GeTensorPtr output_ptr = MakeShared(); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "MakeShared failed."); + return MEMALLOC_FAILED; + } + + GeShape data0_shape = tensor0->GetTensorDesc().GetShape(); + int loop = 1; + for (int i = 0; i < tidx; i++) { + loop *= data0_shape.GetDim(i); + } + + switch (data_type) { + SET_OUTPUT(DT_INT32, int32_t) + SET_OUTPUT(DT_FLOAT, float) + default: + break; + } + output_ptr->MutableTensorDesc().SetDataType(data_type); + output_ptr->MutableTensorDesc().SetShape(GeShape({op_desc_ptr->GetOutputDesc(0).GetShape()})); + v_output.push_back(output_ptr); + GELOGI("ConcatV2Kernel success."); + return SUCCESS; +} + +Status ConcatV2Kernel::ConcatV2PreCompute(const std::vector &input, int &tidx) { + size_t input_size = input.size(); + // N >= 2 and N + 1 >= 3 + if (input_size < kConcatV2InputNum) { + GELOGI("The number of input for ConcatV2 must not be less than %zu.", kConcatV2InputNum); + return NOT_CHANGED; + } + + for (size_t i = 0; i < input_size; i++) { + if (input[i] == nullptr) { + GELOGI("Input%zu must not be null.", i); + return NOT_CHANGED; + } + if (input.at(i)->GetData().size() == 0) { + GELOGI("Check data size fail. input%zu size is 0.", i); + return NOT_CHANGED; + } + } + + input_size--; + ConstGeTensorPtr tensor0 = input.at(0); + GE_CHECK_NOTNULL(tensor0); + DataType data_type = tensor0->GetTensorDesc().GetDataType(); + for (size_t i = 1; i < input_size; i++) { + if (data_type != input.at(i)->GetTensorDesc().GetDataType()) { + GELOGI("Data type of N inputs for ConcatV2 not the same, check input %zu failed.", i); + return NOT_CHANGED; + } + } + + // check if input data type is supported + if (concatv2_supported_type.find(data_type) == concatv2_supported_type.end()) { + GELOGI("ConcatV2 does not support this Data type: %s.", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return NOT_CHANGED; + } + + ConstGeTensorPtr tensor_axis = input.at(input_size); + GE_CHECK_NOTNULL(tensor_axis); + const int *axis = reinterpret_cast(tensor_axis->GetData().data()); + tidx = axis[0]; // [-rank(values), rank(values)) + int dims = static_cast(tensor0->GetTensorDesc().GetShape().GetDimNum()); // rank + if (tidx < 0) { + tidx += dims; + } + if (tidx < 0 || tidx > dims) { + GELOGI("ConcatV2 tidx not legal."); + return NOT_CHANGED; + } + + return SUCCESS; +} + +REGISTER_KERNEL(CONCATV2, ConcatV2Kernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/concat_v2_kernel.h b/src/ge/graph/passes/folding_kernel/concat_v2_kernel.h new file mode 100644 index 00000000..c1514c80 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/concat_v2_kernel.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_CONCAT_V2_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_CONCAT_V2_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class ConcatV2Kernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + + private: + Status ConcatV2PreCompute(const std::vector &input, int &tidx); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_CONCAT_V2_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/dynamic_stitch_kernel.cc b/src/ge/graph/passes/folding_kernel/dynamic_stitch_kernel.cc new file mode 100644 index 00000000..479711c8 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/dynamic_stitch_kernel.cc @@ -0,0 +1,197 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/dynamic_stitch_kernel.h" + +#include + +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const int kDoubleAttrN = 2; +} // namespace +Status DynamicStitchKernel::Compute(const OpDescPtr op_desc_ptr, const vector &input, + vector &v_output) { + GELOGI("DynamicStitch Kernel in."); + Status validate_ret = ValidateParams(op_desc_ptr, input); + if (validate_ret != SUCCESS) { + GELOGW("Dynamic stitch kernel params validate failed."); + return validate_ret; + } + + GE_CHECK_NOTNULL(input[n_]); + auto data_type = input[n_]->GetTensorDesc().GetDataType(); + Status ret; + switch (data_type) { + case DT_INT8: + ret = GenData(input, v_output); + break; + case DT_UINT8: + ret = GenData(input, v_output); + break; + case DT_INT16: + ret = GenData(input, v_output); + break; + case DT_UINT16: + ret = GenData(input, v_output); + break; + case DT_INT32: + ret = GenData(input, v_output); + break; + case DT_INT64: + ret = GenData(input, v_output); + break; + case DT_BOOL: + ret = GenData(input, v_output); + break; + case DT_FLOAT16: + ret = GenData(input, v_output); + break; + case DT_FLOAT: + ret = GenData(input, v_output); + break; + case DT_DOUBLE: + ret = GenData(input, v_output); + break; + default: + ret = NOT_CHANGED; + GELOGI("Dynamic stitch op not support data type of %s.", TypeUtils::DataTypeToSerialString(data_type).c_str()); + break; + } + if (ret != SUCCESS) { + GELOGW("Dynamic stitch folding failed."); + return ret; + } + GELOGI("Dynamic stitch end."); + return SUCCESS; +} + +Status DynamicStitchKernel::ValidateParams(const OpDescPtr &op_desc_ptr, const std::vector &input) { + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "input opdesc is nullptr."); + return PARAM_INVALID; + } + // validate input + // input[0]~input[N-1] is indices, input[N]~input[2N-1] is datas + if (input.empty()) { + GELOGI("Input is empty.Ignore dynamic stitch kernel."); + return NOT_CHANGED; + } + // validate attrs + if (!(AttrUtils::GetInt(op_desc_ptr, DYNAMIC_STITCH_ATTR_NAME_NUM, n_))) { + GELOGW("Attr %s is not exist.", DYNAMIC_STITCH_ATTR_NAME_NUM.c_str()); + return NOT_CHANGED; + } + // validate attr N and input.size + if ((kDoubleAttrN * n_) != static_cast(input.size())) { + GELOGW("Input size is not not match with attr N. Ignore dynamic stitch kernel."); + return NOT_CHANGED; + } + return SUCCESS; +} + +template +void DynamicStitchKernel::ComputeMergedShape(const vector &input, GeShape &merged_shape, + map &indice_data_mapping) { + // data[i].shape = indices[i].shape + constant + size_t indice_dim = input[0]->GetTensorDesc().GetShape().GetDimNum(); + // index n_ for input is less than size of input + GeShape input_n_shape = input[n_]->GetTensorDesc().GetShape(); + int64_t dim_offset = (input_n_shape.GetDimNum() == indice_dim) ? 0 : input_n_shape.GetDim(indice_dim); + + int64_t merged_first_dim = 0; + vector indice_dims; + for (int i = 0; i < n_; i++) { + // all index for input is less than size of input + indice_dims = input[i]->GetTensorDesc().GetShape().GetDims(); + int32_t *input_indice = const_cast(reinterpret_cast(input[i]->GetData().data())); + T *input_data = const_cast(reinterpret_cast(input[i + n_]->GetData().data())); + // scaler indice has one element + if (indice_dims.empty()) { + // if indice repeated, need new data replace old data + indice_data_mapping[input_indice[0]] = input_data[0]; + merged_first_dim = (merged_first_dim > input_indice[0]) ? merged_first_dim : input_indice[0]; + continue; + } + // vector indice element mapping + for (const auto &dim : indice_dims) { + for (auto j = 0; j < dim; j++) { + // if indice repeated, need new data replace old data + indice_data_mapping[input_indice[j]] = input_data[j]; + merged_first_dim = (merged_first_dim > input_indice[j]) ? merged_first_dim : input_indice[j]; + } + } + } + ++merged_first_dim; + + vector merged_dim_vec = {merged_first_dim}; + if (dim_offset != 0) { + merged_dim_vec.emplace_back(dim_offset); + GELOGI("merged_shape is [ %ld, %ld].", merged_first_dim, dim_offset); + } + merged_shape = GeShape(merged_dim_vec); + GELOGI("merged_shape is [ %ld ].", merged_first_dim); +} + +template +Status DynamicStitchKernel::GenData(const vector &input, vector &v_output) { + GeShape merged_shape; + map indice_data_mapping; + ComputeMergedShape(input, merged_shape, indice_data_mapping); + + int64_t output_size = merged_shape.GetShapeSize(); + unique_ptr buf(new (std::nothrow) T[output_size]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "new buf failed"); + return INTERNAL_ERROR; + } + for (const auto &indice_data : indice_data_mapping) { + auto index = indice_data.first; + buf[index] = indice_data.second; + } + + GeTensorPtr output_ptr = MakeShared(); + if (output_ptr == nullptr) { + GELOGW("Fail to malloc output."); + return NOT_CHANGED; + } + auto dtype = input[n_]->GetTensorDesc().GetDataType(); + output_ptr->MutableTensorDesc().SetDataType(dtype); + output_ptr->MutableTensorDesc().SetShape(merged_shape); + + uint32_t length = 1; + if (!TypeUtils::GetDataTypeLength(dtype, length)) { + GELOGW("Can't GetDataTypeLength of data_type: %s", TypeUtils::DataTypeToSerialString(dtype).c_str()); + return NOT_CHANGED; + } + GE_IF_BOOL_EXEC(output_ptr->SetData(reinterpret_cast(buf.get()), + static_cast(output_size * length)) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "set data failed"); + return NOT_CHANGED); + v_output.push_back(output_ptr); + return SUCCESS; +} + +REGISTER_KERNEL(DYNAMICSTITCH, DynamicStitchKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/dynamic_stitch_kernel.h b/src/ge/graph/passes/folding_kernel/dynamic_stitch_kernel.h new file mode 100644 index 00000000..28de381f --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/dynamic_stitch_kernel.h @@ -0,0 +1,43 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_DYNAMIC_STITCH_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_DYNAMIC_STITCH_KERNEL_H_ + +#include +#include + +#include "inc/kernel.h" + +namespace ge { +class DynamicStitchKernel : public Kernel { + public: + Status Compute(const OpDescPtr attr, const std::vector &input, + std::vector &v_output) override; + + private: + Status ValidateParams(const OpDescPtr &attr, const std::vector &input); + template + void ComputeMergedShape(const vector &input, GeShape &merged_shape, + map &indice_data_mapping); + template + Status GenData(const vector &input, vector &v_output); + + int n_; // data input number +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_DYNAMIC_STITCH_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/empty_kernel.cc b/src/ge/graph/passes/folding_kernel/empty_kernel.cc new file mode 100644 index 00000000..1e9ced22 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/empty_kernel.cc @@ -0,0 +1,144 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/empty_kernel.h" + +#include + +#include "common/fp16_t.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/passes/pass_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kEmptyFirstInput = 0; +const size_t kEmptyFirstOutput = 0; +const size_t kEmptyInputsSize = 1; +const size_t kEmptyOutputsSize = 1; +const size_t kShapeMaxDims = 1; +} // namespace +Status EmptyKernel::EmptyCheck(const OpDescPtr &op_desc_ptr, const std::vector &input) { + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, Input opDescPtr is nullptr."); + return PARAM_INVALID; + } + // check input size + bool size_check = + ((op_desc_ptr->GetAllInputsDesc().size() != kEmptyInputsSize) || (input.size() != kEmptyInputsSize) || + (op_desc_ptr->GetAllOutputsDesc().size() != kEmptyOutputsSize)); + if (size_check) { + GELOGE(PARAM_INVALID, "Input/Output size error. InDesc size:%zu, OutDesc size:%zu, in size:%zu ", + op_desc_ptr->GetAllInputsDesc().size(), op_desc_ptr->GetAllOutputsDesc().size(), input.size()); + return PARAM_INVALID; + } + + if (input.at(kEmptyFirstInput) == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, first input is nullptr."); + return PARAM_INVALID; + } + ConstGeTensorPtr shape = input.at(kEmptyFirstInput); + // Check if the dimension is 1-D + if (shape->GetTensorDesc().GetShape().GetDimNum() > kShapeMaxDims) { + GELOGE(PARAM_INVALID, "Check if the dimension is 1-D failed, dims:%zu", + shape->GetTensorDesc().GetShape().GetDimNum()); + return PARAM_INVALID; + } + return SUCCESS; +} + +Status EmptyKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("Empty kernel in"); + Status ret = EmptyCheck(op_desc_ptr, input); + if (ret != SUCCESS) { + return NOT_CHANGED; + } + + ConstGeTensorPtr shape = input.at(kEmptyFirstInput); + GE_CHECK_NOTNULL(shape); + int64_t total_data_size = 1; + std::vector shape_vec; + DataType shape_type = shape->GetTensorDesc().GetDataType(); + // Calculate user input dim + if (shape_type == DT_INT32) { + ret = KernelUtils::CalcDims(shape, shape_vec, total_data_size); + } else if (shape_type == DT_INT64) { + ret = KernelUtils::CalcDims(shape, shape_vec, total_data_size); + } else { + GELOGE(PARAM_INVALID, "shape type must be DT_INT32 or DT_INT64."); + return NOT_CHANGED; + } + + if (ret != SUCCESS) { + GELOGE(ret, "CalcDims failed, dim_type: %s", TypeUtils::DataTypeToSerialString(shape_type).c_str()); + return ret; + } + + auto output_tensor_desc = op_desc_ptr->GetOutputDesc(kEmptyFirstOutput); + GeTensorPtr output_ptr = MakeShared(output_tensor_desc); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "make_shared ge::GeTensor failed"); + return MEMALLOC_FAILED; + } + + DataType data_type = op_desc_ptr->GetOutputDesc(kEmptyFirstOutput).GetDataType(); + ret = PARAM_INVALID; + uint64_t data = 0; + switch (data_type) { +#define CASE(dtype, type) \ + case dtype: \ + ret = KernelUtils::GenData(total_data_size, (type)data, output_ptr); \ + break; + CASE(DT_FLOAT, float) + CASE(DT_FLOAT16, fp16_t) + CASE(DT_INT8, int8_t) + CASE(DT_INT16, int16_t) + CASE(DT_UINT16, uint16_t) + CASE(DT_UINT8, uint8_t) + CASE(DT_INT32, int32_t) + CASE(DT_INT64, int64_t) + CASE(DT_UINT32, uint32_t) + CASE(DT_UINT64, uint64_t) + CASE(DT_BOOL, bool) + CASE(DT_DOUBLE, double) +#undef CASE + default: + GELOGW("invalid data type: %s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + break; + } + + if (ret != SUCCESS) { + GELOGE(ret, "GenData failed, data_type: %s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return ret; + } + + output_ptr->MutableTensorDesc().SetShape(GeShape(shape_vec)); + output_ptr->MutableTensorDesc().SetDataType(DataType(data_type)); + Format format = op_desc_ptr->GetOutputDesc(kEmptyFirstOutput).GetFormat(); + output_ptr->MutableTensorDesc().SetFormat(format); + v_output.push_back(output_ptr); + GELOGI("Empty kernel success"); + return SUCCESS; +} + +REGISTER_KERNEL(EMPTY, EmptyKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/empty_kernel.h b/src/ge/graph/passes/folding_kernel/empty_kernel.h new file mode 100644 index 00000000..bc426048 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/empty_kernel.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_EMPTY_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_EMPTY_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class EmptyKernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + + protected: + Status EmptyCheck(const OpDescPtr &op_desc_ptr, const std::vector &input); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_EMPTY_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/expanddims_kernel.cc b/src/ge/graph/passes/folding_kernel/expanddims_kernel.cc new file mode 100644 index 00000000..3d999a02 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/expanddims_kernel.cc @@ -0,0 +1,88 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/expanddims_kernel.h" + +#include + +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const int kExpandDimsIndexZero = 0; +const size_t kExpandDimsOutputDescNum = 1; +const size_t kExpandDimsInputNum = 2; +} // namespace +Status ExpanddimsKernel::Compute(const NodePtr &node_ptr) { + GELOGI("Expanddims dimension kernel in."); + if (node_ptr == nullptr) { + GELOGE(PARAM_INVALID, "parameter is nullptr"); + return PARAM_INVALID; + } + Status ret = KernelUtils::CheckDimensionNodeInfo(node_ptr); + if (ret != SUCCESS) { + GELOGW("GetDimensionNodeInfo failed"); + return ret; + } + + if (!KernelUtils::CheckFormatSupported(node_ptr)) { + GELOGW("CheckFormatSupported failed"); + return NOT_CHANGED; + } + GELOGI("Expanddims dimension kernel success."); + return SUCCESS; +} +Status ExpanddimsKernel::Compute(const ge::OpDescPtr op_desc_ptr, + const std::vector &input, + std::vector &v_output) { + GELOGI("Expanddims folding kernel in."); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Input opdesc is nullptr."); + return PARAM_INVALID; + } + if ((input.size() != kExpandDimsInputNum) || (op_desc_ptr->GetOutputsSize() != kExpandDimsOutputDescNum)) { + GELOGW("Unexpected ExpandDims node, node input size: %zu, node output size: %zu, node name: %s", input.size(), + op_desc_ptr->GetOutputsSize(), op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + + auto output_tensor_desc = op_desc_ptr->GetOutputDesc(kExpandDimsIndexZero); + GeTensorPtr output_ptr = MakeShared(output_tensor_desc); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "Failed to fold node %s, out of memory", op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + + // print output tensor information, and will be deleted + GELOGI("Expanddims op %s output tensor data size is %zu", op_desc_ptr->GetName().c_str(), + output_ptr->GetData().size()); + size_t data_dim_size = output_ptr->GetTensorDesc().GetShape().GetDims().size(); + GELOGI("Expanddims op %s output tensor dim size is %zu", op_desc_ptr->GetName().c_str(), data_dim_size); + + if (output_ptr->SetData(input.at(kExpandDimsIndexZero)->GetData()) != GRAPH_SUCCESS) { + GELOGW("Compute: SetData failed"); + } + v_output.emplace_back(output_ptr); + GELOGI("Expanddims folding kernel success."); + return SUCCESS; +} +REGISTER_KERNEL(EXPANDDIMS, ExpanddimsKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/expanddims_kernel.h b/src/ge/graph/passes/folding_kernel/expanddims_kernel.h new file mode 100644 index 00000000..4970d89c --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/expanddims_kernel.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_EXPANDDIMS_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_EXPANDDIMS_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +/** + * @ingroup ge + * @brief Expanddims optimization operator processing + */ +class ExpanddimsKernel : public Kernel { + public: + Status Compute(const NodePtr &node_ptr) override; + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_EXPANDDIMS_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/fill_kernel.cc b/src/ge/graph/passes/folding_kernel/fill_kernel.cc new file mode 100644 index 00000000..2085e36a --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/fill_kernel.cc @@ -0,0 +1,125 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/fill_kernel.h" + +#include +#include + +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +using ge::Status; + +namespace { +const int kFillInputSize = 2; +const int kFillDimsInputIndex = 0; +const int kFillDataInputIndex = 1; +} // namespace + +namespace ge { +Status FillKernel::Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + if (input.size() != kFillInputSize) { + GELOGW("fill input size must be %d", kFillInputSize); + return NOT_CHANGED; + } + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, Input opDescPtr is nullptr."); + return PARAM_INVALID; + } + + GE_CHECK_NOTNULL(input.at(kFillDimsInputIndex)); + GE_CHECK_NOTNULL(input.at(kFillDataInputIndex)); + + ConstGeTensorPtr dims = input.at(kFillDimsInputIndex); + ConstGeTensorPtr value = input.at(kFillDataInputIndex); + // Check if the value is a scalar + if (value->GetTensorDesc().GetShape().GetDimNum() != 0) { + GELOGW("value must be a scalar."); + return NOT_CHANGED; + } + + GeTensorPtr output_ptr; + output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(0)); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "make_shared ge::GeTensor failed"); + return MEMALLOC_FAILED; + } + + int64_t fill_size = 1; + std::vector vec_dim; + DataType dim_type = dims->GetTensorDesc().GetDataType(); + + // Calculate user input dim + Status ret = PARAM_INVALID; + if (dim_type == DT_INT32) { + ret = KernelUtils::CalcDims(dims, vec_dim, fill_size); + } else if (dim_type == DT_INT64) { + ret = KernelUtils::CalcDims(dims, vec_dim, fill_size); + } else { + GELOGE(PARAM_INVALID, "dim type must be DT_INT32 or DT_INT64."); + return PARAM_INVALID; + } + if (ret != SUCCESS) { + GELOGE(ret, "CalcDims failed, dim_type: %s", TypeUtils::DataTypeToSerialString(dim_type).c_str()); + return ret; + } + + // Generating a sequence of numbers + DataType data_type = value->GetTensorDesc().GetDataType(); + ret = PARAM_INVALID; + switch (data_type) { +#define CASE(dtype, type) \ + case dtype: \ + ret = KernelUtils::GenData(fill_size, *reinterpret_cast(value->GetData().data()), \ + output_ptr); \ + break; + CASE(DT_FLOAT, float) + CASE(DT_FLOAT16, fp16_t) + CASE(DT_INT8, int8_t) + CASE(DT_INT16, int16_t) + CASE(DT_UINT16, uint16_t) + CASE(DT_UINT8, uint8_t) + CASE(DT_INT32, int32_t) + CASE(DT_INT64, int64_t) + CASE(DT_UINT32, uint32_t) + CASE(DT_UINT64, uint64_t) + CASE(DT_BOOL, bool) + CASE(DT_DOUBLE, double) +#undef CASE + default: + GELOGE(PARAM_INVALID, "invalid data type: %s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + break; + } + if (ret != SUCCESS) { + GELOGE(ret, "GenData failed, data_type: %s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return ret; + } + + output_ptr->MutableTensorDesc().SetShape(GeShape(vec_dim)); + output_ptr->MutableTensorDesc().SetDataType(DataType(data_type)); + v_output.push_back(output_ptr); + + return SUCCESS; +} +REGISTER_KERNEL(FILL, FillKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/fill_kernel.h b/src/ge/graph/passes/folding_kernel/fill_kernel.h new file mode 100644 index 00000000..a1b6b4ef --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/fill_kernel.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_FILL_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_FILL_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class FillKernel : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_FILL_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/floordiv_kernel.cc b/src/ge/graph/passes/folding_kernel/floordiv_kernel.cc new file mode 100644 index 00000000..d411e034 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/floordiv_kernel.cc @@ -0,0 +1,285 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/floordiv_kernel.h" + +#include + +#include +#include + +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kFloorDivInputX = 0; +const size_t kFloorDivInputY = 1; +const size_t kFloorDivTensorShapeIsEmpty = 0; +const size_t kFloorDivInputSize = 2; +const std::set kFloorDivSupportedType = {DT_FLOAT, DT_DOUBLE, DT_UINT8, DT_INT8, + DT_UINT16, DT_INT16, DT_INT32, DT_INT64}; +} // namespace +Status FloorDivKernel::FloorDivCheck(const OpDescPtr &op_desc_ptr, + const std::vector &input) const { + // check input size + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "input opdesc is nullptr."); + return PARAM_INVALID; + } + if (input.size() != kFloorDivInputSize) { + GELOGW("Unexpected FloorDiv node, node input size: %zu, node name: %s", input.size(), + op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + + // check dims of x and y + ConstGeTensorPtr x_tensor = input.at(kFloorDivInputX); + ConstGeTensorPtr y_tensor = input.at(kFloorDivInputY); + GE_CHECK_NOTNULL(x_tensor); + GE_CHECK_NOTNULL(y_tensor); + if (x_tensor->GetTensorDesc().GetShape().GetDimNum() != kFloorDivTensorShapeIsEmpty && + y_tensor->GetTensorDesc().GetShape().GetDimNum() != kFloorDivTensorShapeIsEmpty) { + // x and y are not scalars + vector x_dims = x_tensor->GetTensorDesc().GetShape().GetDims(); + vector y_dims = y_tensor->GetTensorDesc().GetShape().GetDims(); + if (x_dims.size() != y_dims.size()) { + GELOGW("FloorDivKernel dims of x and y do not match, node name: %s", op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } else { + for (size_t i = 0; i < x_dims.size(); ++i) { + if (x_dims[i] != y_dims[i]) { + GELOGW("FloorDivKernel dims of x and y do not match, node name: %s", op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + } + } + } + + // check data type + DataType x_data_dtype = x_tensor->GetTensorDesc().GetDataType(); + DataType y_data_dtype = y_tensor->GetTensorDesc().GetDataType(); + if (x_data_dtype != y_data_dtype) { + GELOGW("FloorDivKernel data type of x and y do not match, x data type is %s, but y data type is %s, node name: %s.", + TypeUtils::DataTypeToSerialString(x_data_dtype).c_str(), + TypeUtils::DataTypeToSerialString(y_data_dtype).c_str(), op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + if (kFloorDivSupportedType.find(x_data_dtype) == kFloorDivSupportedType.end()) { + GELOGW("FloorDivKernel data type %s not support, node name: %s", + TypeUtils::DataTypeToSerialString(x_data_dtype).c_str(), op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + + // check data + if (x_tensor->GetData().size() == 0 || y_tensor->GetData().size() == 0) { + GELOGW("FloorDivKernel data size of inputs is 0, node name: %s", op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +void FloorDivKernel::ShapeCal(const std::vector &input, GeTensorPtr output_ptr) { + vector output_dims; + size_t x_dim = input.at(kFloorDivInputX)->GetTensorDesc().GetShape().GetDimNum(); + size_t y_dim = input.at(kFloorDivInputY)->GetTensorDesc().GetShape().GetDimNum(); + if (x_dim >= y_dim) { + output_dims = input.at(kFloorDivInputX)->GetTensorDesc().GetShape().GetDims(); + } else { + output_dims = input.at(kFloorDivInputY)->GetTensorDesc().GetShape().GetDims(); + } + output_ptr->MutableTensorDesc().SetShape(GeShape(output_dims)); +} + +template +T FloorDivKernel::DivCal(const T &x_i, const T &y_i) { + if ((x_i < static_cast(0)) != (y_i < static_cast(0))) { + T abs_x_i = std::abs(x_i); + T abs_y_i = std::abs(y_i); + return static_cast(static_cast(-(abs_x_i + abs_y_i - 1) / abs_y_i)); + } else { + return static_cast(static_cast(x_i / y_i)); + } +} + +template +bool FloorDivKernel::ZeroCheck(const T &element, DataType data_type) { + bool result = false; + if (data_type == DT_UINT8 || data_type == DT_INT8 || data_type == DT_UINT16 || data_type == DT_INT16 || + data_type == DT_INT32 || data_type == DT_INT64) { + result = (element == 0); + } else if (data_type == DT_FLOAT) { + result = (fabs(element) < FLT_EPSILON); + } else if (data_type == DT_DOUBLE) { + result = (fabs(element) < DBL_EPSILON); + } + return result; +} + +template +Status FloorDivKernel::DataCalBroadcast(const T &x, const T &y, size_t num_x, size_t num_y, DataType data_type, + GeTensorPtr output_ptr) { + size_t data_num = (num_x > num_y) ? num_x : num_y; + unique_ptr buf(new (std::nothrow) T[data_num]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "new buf failed"); + return INTERNAL_ERROR; + } + + if (num_x > num_y) { + if (ZeroCheck(y, data_type)) { + GELOGE(PARAM_INVALID, "The divisor of FloorDiv con not be zero"); + return PARAM_INVALID; + } + for (size_t i = 0; i < num_x; ++i) { + buf[i] = DivCal((&x)[i], y); + } + } else { + for (size_t i = 0; i < num_y; ++i) { + if (ZeroCheck((&y)[i], data_type)) { + GELOGE(PARAM_INVALID, "The divisor of FloorDiv con not be zero"); + return PARAM_INVALID; + } + buf[i] = DivCal(x, (&y)[i]); + } + } + if (output_ptr->SetData(reinterpret_cast(buf.get()), data_num * sizeof(T)) != GRAPH_SUCCESS) { + GELOGE(PARAM_INVALID, "set data failed"); + return PARAM_INVALID; + } + + return SUCCESS; +} + +template +Status FloorDivKernel::DataCal(const std::vector &input, GeTensorPtr output_ptr) { + ConstGeTensorPtr x_tensor = input.at(kFloorDivInputX); + ConstGeTensorPtr y_tensor = input.at(kFloorDivInputY); + GE_CHECK_NOTNULL(x_tensor); + GE_CHECK_NOTNULL(y_tensor); + T *x = const_cast(reinterpret_cast(x_tensor->GetData().GetData())); + T *y = const_cast(reinterpret_cast(y_tensor->GetData().GetData())); + if (x == nullptr || y == nullptr) { + GELOGE(PARAM_INVALID, "Input tensor is nullptr."); + return PARAM_INVALID; + } + + size_t data_num_x = x_tensor->GetData().size() / sizeof(T); + size_t data_num_y = y_tensor->GetData().size() / sizeof(T); + DataType data_type = x_tensor->GetTensorDesc().GetDataType(); + if (x_tensor->GetTensorDesc().GetShape().GetDimNum() == y_tensor->GetTensorDesc().GetShape().GetDimNum()) { + // x and y are both scalars or vector, no need broadcast + unique_ptr buf(new (std::nothrow) T[data_num_x]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "new buf failed"); + return INTERNAL_ERROR; + } + + for (size_t i = 0; i < data_num_x; ++i) { + if (ZeroCheck(y[i], data_type)) { + GELOGE(PARAM_INVALID, "The divisor of FloorDiv con not be zero"); + return PARAM_INVALID; + } + buf[i] = DivCal(x[i], y[i]); + } + if (output_ptr->SetData(reinterpret_cast(buf.get()), data_num_x * sizeof(T)) != GRAPH_SUCCESS) { + GELOGE(PARAM_INVALID, "set data failed"); + return PARAM_INVALID; + } + } else { + // x-y is vector-scalar, need broadcast + if (DataCalBroadcast(*x, *y, data_num_x, data_num_y, data_type, output_ptr) != SUCCESS) { + return PARAM_INVALID; + } + } + return SUCCESS; +} + +Status FloorDivKernel::ComputeByDataType(DataType data_type, const std::vector &input, + GeTensorPtr output_ptr) { + Status ret; + switch (data_type) { + case DT_FLOAT: + ret = DataCal(input, output_ptr); + break; + case DT_DOUBLE: + ret = DataCal(input, output_ptr); + break; + case DT_UINT8: + ret = DataCal(input, output_ptr); + break; + case DT_INT8: + ret = DataCal(input, output_ptr); + break; + case DT_UINT16: + ret = DataCal(input, output_ptr); + break; + case DT_INT16: + ret = DataCal(input, output_ptr); + break; + case DT_INT32: + ret = DataCal(input, output_ptr); + break; + case DT_INT64: + ret = DataCal(input, output_ptr); + break; + default: + GELOGI("FloorDivKernel does not support Data type:%s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return PARAM_INVALID; + } + return ret; +} + +Status FloorDivKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGI("FloorDivKernel in"); + if (FloorDivCheck(op_desc_ptr, input) != SUCCESS) { + GELOGW("FloorDivKernel input is invalid, failed to fold node."); + return NOT_CHANGED; + } + + GeTensorPtr output_ptr = MakeShared(); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "make_shared ge::GeTensor failed, node name %s.", op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + + // calculate shape + ShapeCal(input, output_ptr); + + // calculate data and data type + DataType x_data_dtype = input.at(kFloorDivInputX)->GetTensorDesc().GetDataType(); + output_ptr->MutableTensorDesc().SetDataType(x_data_dtype); + if (ComputeByDataType(x_data_dtype, input, output_ptr) != SUCCESS) { + return NOT_CHANGED; + } + + // print output tensor information, and will be deleted + GELOGD("FloorDiv op %s output tensor data size is %zu", op_desc_ptr->GetName().c_str(), output_ptr->GetData().size()); + vector data_dims = output_ptr->GetTensorDesc().GetShape().GetDims(); + GELOGD("FloorDiv op %s output tensor dim size is %zu", op_desc_ptr->GetName().c_str(), data_dims.size()); + + v_output.emplace_back(output_ptr); + GELOGI("FloorDivKernel success."); + return SUCCESS; +} +REGISTER_KERNEL(FLOORDIV, FloorDivKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/floordiv_kernel.h b/src/ge/graph/passes/folding_kernel/floordiv_kernel.h new file mode 100644 index 00000000..c8505731 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/floordiv_kernel.h @@ -0,0 +1,50 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_FLOORDIV_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_FLOORDIV_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class FloorDivKernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + + private: + Status FloorDivCheck(const OpDescPtr &op_desc_ptr, const std::vector &input) const; + void ShapeCal(const std::vector &input, GeTensorPtr output_ptr); + template + T DivCal(const T &x_i, const T &y_i); + template + bool ZeroCheck(const T &element, DataType data_type); + template + Status DataCalBroadcast(const T &x, const T &y, size_t num_x, size_t num_y, DataType data_type, + GeTensorPtr output_ptr); + template + Status DataCal(const std::vector &input, ge::GeTensorPtr output_ptr); + Status ComputeByDataType(DataType data_type, const std::vector &input, GeTensorPtr output_ptr); + + int64_t axis_dim_; + int64_t head_dim_; + int64_t end_dim_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_FLOORDIV_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/floormod_kernel.cc b/src/ge/graph/passes/folding_kernel/floormod_kernel.cc new file mode 100644 index 00000000..b5b661bb --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/floormod_kernel.cc @@ -0,0 +1,176 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/floormod_kernel.h" + +#include +#include + +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/bcast.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" +namespace ge { +namespace { +const size_t kFloorModInputX = 0; +const size_t kFloorModInputY = 1; +const size_t kFloorModFirstOutput = 0; +const size_t kFloorModInputNum = 2; +const std::set kFloorModSupportedType = {DT_INT32}; + +// func FloorDiv is compute floor(x/y); quotient is integer +template +T FloorDiv(T const &x, T const &y) { + if ((x < static_cast(0)) != (y < static_cast(0))) { + T abs_x = std::abs(x); + T abs_y = std::abs(y); + return static_cast((1 - abs_x) / abs_y - 1); + } else { + return x / y; + } +} + +template +Status CheckYIsZero(T const &y, DataType &type) { + switch (type) { + case DT_INT32: + if (y == static_cast(0)) { + GELOGE(INTERNAL_ERROR, "CheckYIsZero failed, y is zero."); + return INTERNAL_ERROR; + } + break; + default: + return INTERNAL_ERROR; + } + return SUCCESS; +} + +// mod(x,y) equals to x - y * floor(x/y) +#define DEFINE_FUNC_BY_TYPE(TYPE) \ + std::function func_##TYPE = []( \ + TYPE const &a, TYPE const &b, DataType &type, Status &ret) -> TYPE { \ + ret = CheckYIsZero(b, type); \ + if (ret != SUCCESS) { \ + return static_cast(0); \ + } \ + return (a - b * FloorDiv(a, b)); \ + }; + +#define SET_BCAST_COMPUTE_CASE(DTYPE, TYPE) \ + case DTYPE: \ + ret = bcast.BCastComputeCheck(input, y_data_##TYPE, func_##TYPE); \ + break; + +#define SET_OUTPUT(DTYPE, TYPE) \ + case DTYPE: \ + (void)output_ptr->SetData(reinterpret_cast(y_data_##TYPE.data()), y_data_##TYPE.size() * length); \ + break; + +DEFINE_FUNC_BY_TYPE(int32_t) +} // namespace + +Status FloorModKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("FloorModKernel in"); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, input opDescPtr is nullptr."); + return PARAM_INVALID; + } + Status ret = FloorModCheck(input); + if (ret != SUCCESS) { + return ret; + } + + std::vector y_data_int32_t; + DataType data_type = input[kFloorModInputX]->GetTensorDesc().GetDataType(); + BCast bcast; + switch (data_type) { + SET_BCAST_COMPUTE_CASE(DT_INT32, int32_t) + default: + ret = NOT_CHANGED; + break; + } + + if (ret != SUCCESS) { + GELOGW("BCastCompute fail, data_type: %s, ret: %s", TypeUtils::DataTypeToSerialString(data_type).c_str(), + GET_ERRORNO_STR(ret).c_str()); + return NOT_CHANGED; + } + + uint32_t length = 1; + if (!TypeUtils::GetDataTypeLength(data_type, length)) { + GELOGW("Can't GetDataTypeLength of data_type: %s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return NOT_CHANGED; + } + + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(kFloorModFirstOutput)); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "make_shared ge::GeTensor failed, node name %s.", op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + + output_ptr->MutableTensorDesc().SetShape(GeShape(bcast.GetOutputShape())); + // only return GRAPH_SUCCESS here + switch (data_type) { + SET_OUTPUT(DT_INT32, int32_t) + default: + break; + } + output_ptr->MutableTensorDesc().SetDataType(data_type); + v_output.push_back(output_ptr); + GELOGD("FloorModKernel success"); + + return SUCCESS; +} + +Status FloorModKernel::FloorModCheck(const std::vector &input) { + // check input number + if (input.size() != kFloorModInputNum) { + GELOGI("The number of input for FloorMod must be %zu.", kFloorModInputNum); + return NOT_CHANGED; + } + + ConstGeTensorPtr input_x1 = input.at(kFloorModInputX); + ConstGeTensorPtr input_x2 = input.at(kFloorModInputY); + GE_CHECK_NOTNULL(input_x1); + GE_CHECK_NOTNULL(input_x2); + // check whether there is data in Tensor + if (input_x1->GetData().size() == 0 || input_x2->GetData().size() == 0) { + GELOGI("Check data size fail. x1: %zu, x2: %zu", input_x1->GetData().size(), input_x2->GetData().size()); + return NOT_CHANGED; + } + + // check whether the data types are the same + DataType type = input_x1->GetTensorDesc().GetDataType(); + if (type != input_x2->GetTensorDesc().GetDataType()) { + GELOGI("Data type of inputs for FloorMod not matched."); + return NOT_CHANGED; + } + + // check if input data type is supported + if (kFloorModSupportedType.find(type) == kFloorModSupportedType.end()) { + GELOGI("FloorMod does not support this Data type: %s", TypeUtils::DataTypeToSerialString(type).c_str()); + return NOT_CHANGED; + } + + return SUCCESS; +} + +REGISTER_KERNEL(FLOORMOD, FloorModKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/floormod_kernel.h b/src/ge/graph/passes/folding_kernel/floormod_kernel.h new file mode 100644 index 00000000..faa5c8e2 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/floormod_kernel.h @@ -0,0 +1,36 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_FLOORMOD_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_FLOORMOD_KERNEL_H_ + +#include + +#include "graph/ge_tensor.h" +#include "inc/kernel.h" + +namespace ge { +class FloorModKernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + + private: + Status FloorModCheck(const std::vector &input); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_FLOORMOD_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/gather_v2_kernel.cc b/src/ge/graph/passes/folding_kernel/gather_v2_kernel.cc new file mode 100644 index 00000000..878c0cfc --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/gather_v2_kernel.cc @@ -0,0 +1,456 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/gather_v2_kernel.h" + +#include +#include + +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kGatherV2InputIndexZero = 0; +const size_t kGatherV2InputIndexOne = 1; +const size_t kGatherV2InputIndexTwo = 2; +const size_t kGatherV2InputIndexThree = 3; +const size_t kGatherV2DimOne = 1; +const size_t kGatherV2InpotNum = 3; +const size_t kMaxIndicatesDims = 1; // only support scalar and 1 dims indicates_ +const std::set supported_type = {DT_FLOAT16, DT_DOUBLE, DT_INT8, DT_INT16, DT_INT16, DT_INT32, + DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, DT_UINT64}; +} // namespace +template +Status GatherV2Kernel::ProcessAxis0(ConstGeTensorPtr tensor_x, GeTensorPtr output) { + Status ret = SUCCESS; + T *data_ptr_x = reinterpret_cast(const_cast(tensor_x->GetData().data())); + T *data_ptr_y = reinterpret_cast(const_cast(output->GetData().data())); + // index is valid, and no bigger than kGatherV2InputIndexZero + size_t output_size = output->GetData().size(); + for (int64_t i = 0; i < output->GetTensorDesc().GetShape().GetDim(kGatherV2InputIndexZero); i++) { + T *data_ptr_x_tmp = data_ptr_x + indicates_[i] * xstride_[kGatherV2InputIndexZero]; + T *data_ptr_y_tmp = data_ptr_y + i * ystride_[kGatherV2InputIndexZero]; + size_t size = sizeof(T) * xstride_[kGatherV2InputIndexZero]; + if (data_ptr_y_tmp - data_ptr_y < 0) { + GELOGE(PARAM_INVALID, "ptr_y - ptr_y_tmp less than zero"); + return PARAM_INVALID; + } + size_t offset_size = (data_ptr_y_tmp - data_ptr_y) * sizeof(T); + auto ret_mem = memcpy_s(reinterpret_cast(data_ptr_y_tmp), output_size - offset_size, + reinterpret_cast(data_ptr_x_tmp), size); + if (ret_mem != 0) { + GELOGE(MEMALLOC_FAILED, "memcpy failed!"); + return MEMALLOC_FAILED; + } + } + return ret; +} + +template +Status GatherV2Kernel::ProcessAxis1(ConstGeTensorPtr tensor_x, GeTensorPtr output) { + Status ret = SUCCESS; + T *data_ptr_x = reinterpret_cast(const_cast(tensor_x->GetData().data())); + T *data_ptr_y = reinterpret_cast(const_cast(output->GetData().data())); + // index is valid, and no bigger than kGatherV2InputIndexOne + size_t output_size = output->GetData().size(); + for (int64_t i = 0; i < output->GetTensorDesc().GetShape().GetDim(kGatherV2InputIndexZero); i++) { + T *data_ptr_x_i = data_ptr_x + i * xstride_[kGatherV2InputIndexZero]; + T *data_ptr_y_i = data_ptr_y + i * ystride_[kGatherV2InputIndexZero]; + for (int64_t j = 0; j < output->GetTensorDesc().GetShape().GetDim(kGatherV2InputIndexOne); j++) { + T *data_ptr_x_tmp = data_ptr_x_i + indicates_[j] * xstride_[kGatherV2InputIndexOne]; + T *data_ptr_y_tmp = data_ptr_y_i + j * ystride_[kGatherV2InputIndexOne]; + size_t size = sizeof(T) * xstride_[kGatherV2InputIndexOne]; + if (data_ptr_y_tmp - data_ptr_y < 0) { + GELOGE(PARAM_INVALID, "ptr_y - ptr_y_tmp less than zero"); + return PARAM_INVALID; + } + size_t offset_size = (data_ptr_y_tmp - data_ptr_y) * sizeof(T); + auto ret_mem = memcpy_s(reinterpret_cast(data_ptr_y_tmp), output_size - offset_size, + reinterpret_cast(data_ptr_x_tmp), size); + if (ret_mem != 0) { + GELOGE(MEMALLOC_FAILED, "memcpy failed!"); + return MEMALLOC_FAILED; + } + } + } + return ret; +} + +template +Status GatherV2Kernel::ProcessAxis2(ConstGeTensorPtr tensor_x, GeTensorPtr output) { + Status ret = SUCCESS; + T *data_ptr_x = reinterpret_cast(const_cast(tensor_x->GetData().data())); + T *data_ptr_y = reinterpret_cast(const_cast(output->GetData().data())); + // index is valid, and no bigger than kGatherV2InputIndexTwo + size_t output_size = output->GetData().size(); + for (int64_t i = 0; i < output->GetTensorDesc().GetShape().GetDim(kGatherV2InputIndexZero); i++) { + T *data_ptr_x_i = data_ptr_x + i * xstride_[kGatherV2InputIndexZero]; + T *data_ptr_y_i = data_ptr_y + i * ystride_[kGatherV2InputIndexZero]; + for (int64_t j = 0; j < output->GetTensorDesc().GetShape().GetDim(kGatherV2InputIndexOne); j++) { + T *data_ptr_x_j = data_ptr_x_i + j * xstride_[kGatherV2InputIndexOne]; + T *data_ptr_y_j = data_ptr_y_i + j * ystride_[kGatherV2InputIndexOne]; + for (int64_t m = 0; m < output->GetTensorDesc().GetShape().GetDim(kGatherV2InputIndexTwo); m++) { + T *data_ptr_x_tmp = data_ptr_x_j + indicates_[m] * xstride_[kGatherV2InputIndexTwo]; + T *data_ptr_y_tmp = data_ptr_y_j + m * ystride_[kGatherV2InputIndexTwo]; + size_t size = sizeof(T) * xstride_[kGatherV2InputIndexTwo]; + if (data_ptr_y_tmp - data_ptr_y < 0) { + GELOGE(PARAM_INVALID, "ptr_y - ptr_y_tmp less than zero"); + return PARAM_INVALID; + } + size_t offset_size = (data_ptr_y_tmp - data_ptr_y) * sizeof(T); + auto ret_mem = memcpy_s(reinterpret_cast(data_ptr_y_tmp), output_size - offset_size, + reinterpret_cast(data_ptr_x_tmp), size); + if (ret_mem != 0) { + GELOGE(MEMALLOC_FAILED, "memcpy failed!"); + return MEMALLOC_FAILED; + } + } + } + } + return ret; +} + +template +Status GatherV2Kernel::ProcessAxis3(ConstGeTensorPtr tensor_x, GeTensorPtr output) { + Status ret = SUCCESS; + T *data_ptr_x = reinterpret_cast(const_cast(tensor_x->GetData().data())); + T *data_ptr_y = reinterpret_cast(const_cast(output->GetData().data())); + // index is valid, and no bigger than kGatherV2InputIndexThree + size_t output_size = output->GetData().size(); + for (int64_t i = 0; i < output->GetTensorDesc().GetShape().GetDim(kGatherV2InputIndexZero); i++) { + T *data_ptr_x_i = data_ptr_x + i * xstride_[kGatherV2InputIndexZero]; + T *data_ptr_y_i = data_ptr_y + i * ystride_[kGatherV2InputIndexZero]; + for (int64_t j = 0; j < output->GetTensorDesc().GetShape().GetDim(kGatherV2InputIndexOne); j++) { + T *data_ptr_x_j = data_ptr_x_i + j * xstride_[kGatherV2InputIndexOne]; + T *data_ptr_y_j = data_ptr_y_i + j * ystride_[kGatherV2InputIndexOne]; + for (int64_t m = 0; m < output->GetTensorDesc().GetShape().GetDim(kGatherV2InputIndexTwo); m++) { + T *data_ptr_x_m = data_ptr_x_j + m * xstride_[kGatherV2InputIndexTwo]; + T *data_ptr_y_m = data_ptr_y_j + m * ystride_[kGatherV2InputIndexTwo]; + for (int64_t n = 0; n < output->GetTensorDesc().GetShape().GetDim(kGatherV2InputIndexThree); n++) { + T *data_ptr_x_tmp = data_ptr_x_m + indicates_[n] * xstride_[kGatherV2InputIndexThree]; + T *data_ptr_y_tmp = data_ptr_y_m + n * ystride_[kGatherV2InputIndexThree]; + size_t size = sizeof(T) * xstride_[kGatherV2InputIndexThree]; + if (data_ptr_y_tmp - data_ptr_y < 0) { + GELOGE(PARAM_INVALID, "ptr_y - ptr_y_tmp less than zero"); + return PARAM_INVALID; + } + size_t offset_size = (data_ptr_y_tmp - data_ptr_y) * sizeof(T); + auto ret_mem = memcpy_s(reinterpret_cast(data_ptr_y_tmp), output_size - offset_size, + reinterpret_cast(data_ptr_x_tmp), size); + if (ret_mem != 0) { + GELOGE(MEMALLOC_FAILED, "memcpy failed!"); + return MEMALLOC_FAILED; + } + } + } + } + } + return ret; +} + +template +Status GatherV2Kernel::GenData(const int64_t data_num, ConstGeTensorPtr tensor_x, int64_t axis, GeTensorPtr output) { + if (data_num <= 0) { + return PARAM_INVALID; + } + if (!CheckInt64MulOverflow(data_num, sizeof(T))) { + GELOGE(PARAM_INVALID, "Int64MulOverflow, data_num:%ld, type_len:%zu.", data_num, sizeof(T)); + return PARAM_INVALID; + } + + std::unique_ptr buf(new (std::nothrow) T[data_num]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "New sizeof(T) * data_num(%zu) memory failed", static_cast(sizeof(T) * data_num)); + return MEMALLOC_FAILED; + } + GE_IF_BOOL_EXEC(output->SetData(reinterpret_cast(buf.get()), static_cast(data_num * sizeof(T))) != + GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "set data failed"); + return INTERNAL_ERROR); + + Status ret = SUCCESS; + switch (axis) { + case 0: + ret = ProcessAxis0(tensor_x, output); + break; + case 1: + ret = ProcessAxis1(tensor_x, output); + break; + case 2: + ret = ProcessAxis2(tensor_x, output); + break; + case 3: + ret = ProcessAxis3(tensor_x, output); + break; + default: + GELOGI("Only support 4 dims and below but input axis is %ld", axis); + return NOT_CHANGED; + } + return ret; +} +Status GatherV2Kernel::CalcStride(std::vector &stride, std::vector dims) { + if (stride.size() != dims.size() || dims.size() == 0) { + return PARAM_INVALID; + } + int i = static_cast(dims.size() - kGatherV2DimOne); + stride[static_cast(i)] = static_cast(kGatherV2DimOne); + i--; + while (i >= 0) { + size_t index = static_cast(i) + kGatherV2DimOne; + if (!CheckInt64MulOverflow(stride[index], dims[index])) { + GELOGE(PARAM_INVALID, "Int64MulOverflow, data_num(%ld) type_len(%ld)", stride[index], dims[index]); + return PARAM_INVALID; + } + stride[static_cast(i)] = stride[index] * dims[index]; + i--; + } + return SUCCESS; +} +Status GatherV2Kernel::Process(int64_t axis, DataType data_type, ConstGeTensorPtr input_tensor_ptr, + GeTensorPtr output_ptr) { + Status ret = SUCCESS; + int64_t data_num = output_ptr->GetTensorDesc().GetShape().GetShapeSize(); + switch (data_type) { + case DT_FLOAT16: + ret = GenData(data_num, input_tensor_ptr, axis, output_ptr); + break; + case DT_DOUBLE: + ret = GenData(data_num, input_tensor_ptr, axis, output_ptr); + break; + case DT_INT8: + ret = GenData(data_num, input_tensor_ptr, axis, output_ptr); + break; + case DT_INT16: + ret = GenData(data_num, input_tensor_ptr, axis, output_ptr); + break; + case DT_INT32: + ret = GenData(data_num, input_tensor_ptr, axis, output_ptr); + break; + case DT_INT64: + ret = GenData(data_num, input_tensor_ptr, axis, output_ptr); + break; + case DT_UINT8: + ret = GenData(data_num, input_tensor_ptr, axis, output_ptr); + break; + case DT_UINT16: + ret = GenData(data_num, input_tensor_ptr, axis, output_ptr); + break; + case DT_UINT32: + ret = GenData(data_num, input_tensor_ptr, axis, output_ptr); + break; + case DT_UINT64: + ret = GenData(data_num, input_tensor_ptr, axis, output_ptr); + break; + default: + GELOGI("GatherV2Kernel does not support this Data type:%s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return NOT_CHANGED; + } + return ret; +} +Status GatherV2Kernel::SaveIndicesByDataType(ConstGeTensorPtr indices_tensor_ptr, GeShape &x_shape, + GeShape &indices_shape, DataType indices_data_type, size_t axis) { + if (indices_data_type == DT_INT32) { + auto indices_ptr = const_cast(reinterpret_cast(indices_tensor_ptr->GetData().data())); + for (int64_t i = 0; i < indices_shape.GetShapeSize(); i++) { + if (*(indices_ptr + i) < 0 || *(indices_ptr + i) >= x_shape.GetDim(axis)) { + GELOGE(NOT_CHANGED, "indices %ld value is not in range [0, %ld)", i, x_shape.GetDim(axis)); + return NOT_CHANGED; + } + indicates_.push_back(*(indices_ptr + i)); + } + } else { + // int64 + auto indices_ptr = const_cast(reinterpret_cast(indices_tensor_ptr->GetData().data())); + for (int64_t i = 0; i < indices_shape.GetShapeSize(); i++) { + if (*(indices_ptr + i) < 0 || *(indices_ptr + i) >= x_shape.GetDim(axis)) { + GELOGE(NOT_CHANGED, "indices %ld value is not in range [0, %ld)", i, x_shape.GetDim(axis)); + return NOT_CHANGED; + } + indicates_.push_back(*(indices_ptr + i)); + } + } + + return SUCCESS; +} +Status GatherV2Kernel::Check(const OpDescPtr &op_desc_ptr, const vector &input, + vector &v_output) const { + if (op_desc_ptr == nullptr) { + GELOGE(NOT_CHANGED, "input opdesc is nullptr."); + return NOT_CHANGED; + } + + if (input.size() != kGatherV2InpotNum) { + GELOGE(NOT_CHANGED, "The number of input for GatherV2 must be %zu.", kGatherV2InpotNum); + return NOT_CHANGED; + } + + bool is_null = (input[kGatherV2InputIndexZero] == nullptr || input[kGatherV2InputIndexOne] == nullptr || + input[kGatherV2InputIndexTwo] == nullptr); + if (is_null) { + GELOGE(NOT_CHANGED, "some input is nullptr."); + return NOT_CHANGED; + } + ConstGeTensorPtr tensor0 = input.at(kGatherV2InputIndexZero); + ConstGeTensorPtr tensor1 = input.at(kGatherV2InputIndexOne); + ConstGeTensorPtr tensor2 = input.at(kGatherV2InputIndexTwo); + + bool size_is_zero = + ((tensor0->GetData().size() == 0) || (tensor1->GetData().size() == 0) || (tensor2->GetData().size() == 0)); + if (size_is_zero) { + GELOGE(NOT_CHANGED, "some input size is zero."); + return NOT_CHANGED; + } + + auto indices_shape = tensor1->GetTensorDesc().GetShape(); + auto axis_shape = tensor2->GetTensorDesc().GetShape(); + // axis must be scalar + if (axis_shape.GetDimNum() != 0) { + GELOGE(NOT_CHANGED, "axis must be scalar but its shape is %zu", axis_shape.GetDimNum()); + return NOT_CHANGED; + } + auto axis_data_type = tensor2->GetTensorDesc().GetDataType(); + bool is_valid_axis_data_type = axis_data_type == DT_INT32 || axis_data_type == DT_INT64; + if (!is_valid_axis_data_type) { + GELOGE(NOT_CHANGED, "axis datatype must be DT_INT32 or DT_INT64"); + return NOT_CHANGED; + } + + // check indices data_type && dims && every element + auto indices_data_type = tensor1->GetTensorDesc().GetDataType(); + bool is_valid_indices_data_type = indices_data_type == DT_INT32 || indices_data_type == DT_INT64; + if (!is_valid_indices_data_type) { + GELOGE(NOT_CHANGED, "indices datatype must be DT_INT32 or DT_INT64"); + return NOT_CHANGED; + } + if (indices_shape.GetDimNum() > kMaxIndicatesDims) { + GELOGE(NOT_CHANGED, "indices input only support 0 or 1 dims"); + return NOT_CHANGED; + } + return SUCCESS; +} +void GatherV2Kernel::DebugPrint(int64_t axis, const GeShape &x_shape, const GeShape &indices_shape, + const std::vector &y_shape) { + GELOGD("GatherV2Kernel axis:%ld x_shape:%zu indices_shape:%zu y_shape:%zu", axis, x_shape.GetDimNum(), + indices_shape.GetDimNum(), y_shape.size()); + for (size_t i = 0; i < x_shape.GetDimNum(); i++) { + GELOGD("GatherV2Kernel x_shape[%zu]: %ld", i, x_shape.GetDim(i)); + } + for (size_t i = 0; i < indices_shape.GetDimNum(); i++) { + GELOGD("GatherV2Kernel indices_shape[%zu]: %ld", i, indices_shape.GetDim(i)); + } + for (size_t i = 0; i < y_shape.size(); i++) { + GELOGD("GatherV2Kernel y_shape[%zu]: %ld", i, y_shape[i]); + } + for (auto ele : indicates_) { + GELOGD("GatherV2Kernel indices:%ld", ele); + } +} + +Status GatherV2Kernel::Compute(const OpDescPtr op_desc_ptr, const vector &input, + vector &v_output) { + GELOGI("Enter GatherV2Kernel Process."); + Status ret = Check(op_desc_ptr, input, v_output); + if (ret != SUCCESS) { + GELOGE(NOT_CHANGED, "param check failed."); + return NOT_CHANGED; + } + GELOGI("GatherV2Kernel[%s] start Process.", op_desc_ptr->GetName().c_str()); + ConstGeTensorPtr tensor0 = input.at(kGatherV2InputIndexZero); + ConstGeTensorPtr tensor1 = input.at(kGatherV2InputIndexOne); + ConstGeTensorPtr tensor2 = input.at(kGatherV2InputIndexTwo); + + auto x_shape = tensor0->GetTensorDesc().GetShape(); + auto indices_shape = tensor1->GetTensorDesc().GetShape(); + + auto axis_data_type = tensor2->GetTensorDesc().GetDataType(); + int64_t axis = axis_data_type == DT_INT32 + ? *(const_cast(reinterpret_cast(tensor2->GetData().data()))) + : *(const_cast(reinterpret_cast(tensor2->GetData().data()))); + axis = axis >= 0 ? axis : axis + x_shape.GetDimNum(); + // check axis value + if (axis < 0 || (axis + 1) > static_cast(x_shape.GetDimNum())) { + GELOGE(NOT_CHANGED, "axis is invalid"); + return NOT_CHANGED; + } + auto indices_data_type = tensor1->GetTensorDesc().GetDataType(); + ret = SaveIndicesByDataType(tensor1, x_shape, indices_shape, indices_data_type, static_cast(axis)); + if (ret != SUCCESS) { + GELOGE(NOT_CHANGED, "Save indeices by data type failed!"); + return ret; + } + + // check input data type + auto x_data_type = tensor0->GetTensorDesc().GetDataType(); + if (supported_type.find(x_data_type) == supported_type.end()) { + GELOGI("GatherV2Kernel does not support this Data type:%s", TypeUtils::DataTypeToSerialString(x_data_type).c_str()); + return NOT_CHANGED; + } + // calc output shape + std::vector y_shape; + for (size_t i = 0; i < static_cast(axis); i++) { + y_shape.push_back(x_shape.GetDim(i)); + } + for (size_t i = 0; i < indices_shape.GetDimNum(); i++) { + y_shape.push_back(indices_shape.GetDim(i)); + } + for (size_t i = static_cast(axis) + 1; i < x_shape.GetDimNum(); i++) { + y_shape.push_back(x_shape.GetDim(i)); + } + + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(0)); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "make_shared ge::GeTensor failed, node name %s.", op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + output_ptr->MutableTensorDesc().SetShape(GeShape(y_shape)); + output_ptr->MutableTensorDesc().SetDataType(x_data_type); + + // added for debug + DebugPrint(axis, x_shape, indices_shape, y_shape); + + // calc stride + std::vector xstride(x_shape.GetDimNum()); + std::vector ystride(y_shape.size()); + xstride_ = xstride; + ystride_ = ystride; + auto ret_x = CalcStride(xstride_, x_shape.GetDims()); + auto ret_y = CalcStride(ystride_, y_shape); + ret = (ret_x == SUCCESS && ret_y == SUCCESS) ? SUCCESS : NOT_CHANGED; + if (ret != SUCCESS) { + GELOGE(ret, "CalcStride Failed"); + return ret; + } + + ret = Process(axis, x_data_type, tensor0, output_ptr); + if (ret != SUCCESS) { + GELOGE(ret, "GenData failed, data_type: %s", TypeUtils::DataTypeToSerialString(x_data_type).c_str()); + return ret; + } + + GELOGI("GatherV2Kernel Process Success."); + v_output.push_back(output_ptr); + return SUCCESS; +} +REGISTER_KERNEL(GATHERV2, GatherV2Kernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/gather_v2_kernel.h b/src/ge/graph/passes/folding_kernel/gather_v2_kernel.h new file mode 100644 index 00000000..0bf4e3ee --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/gather_v2_kernel.h @@ -0,0 +1,57 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_GATHER_V2_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_GATHER_V2_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class GatherV2Kernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + + private: + template + Status ProcessAxis0(ConstGeTensorPtr tensor_x, GeTensorPtr output); + template + Status ProcessAxis1(ConstGeTensorPtr tensor_x, GeTensorPtr output); + template + Status ProcessAxis2(ConstGeTensorPtr tensor_x, GeTensorPtr output); + template + Status ProcessAxis3(ConstGeTensorPtr tensor_x, GeTensorPtr output); + template + Status GenData(const int64_t data_num, ConstGeTensorPtr tensor_x, int64_t axis, GeTensorPtr output); + Status Check(const OpDescPtr &op_desc_ptr, const vector &input, + vector &v_output) const; + Status CalcStride(std::vector &stride, std::vector dims); + Status SaveIndicesByDataType(ConstGeTensorPtr indices_tensor_ptr, GeShape &x_shape, GeShape &indices_shape, + DataType indices_data_type, size_t axis); + Status Process(int64_t axis, DataType data_type, ConstGeTensorPtr input_tensor_ptr, GeTensorPtr output_ptr); + void DebugPrint(int64_t axis, const GeShape &x_shape, const GeShape &indices_shape, + const std::vector &y_shape); + + private: + std::vector indicates_; + std::vector xstride_; + std::vector ystride_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_GATHER_V2_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/greater_kernel.cc b/src/ge/graph/passes/folding_kernel/greater_kernel.cc new file mode 100644 index 00000000..816d3d05 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/greater_kernel.cc @@ -0,0 +1,155 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/greater_kernel.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/fp16_t.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/bcast.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +using domi::Status; +using domi::SUCCESS; + +namespace ge { +namespace { +const size_t kGreaterInputNum = 2; + +#define DEFINE_FUNC_BY_TYPE(TYPE) \ + std::function func_##TYPE = [](TYPE const &a, TYPE const &b) -> uint8_t { \ + return a > b; \ + }; + +#define SET_BCAST_COMPUTE_CASE(DTYPE, TYPE) \ + case DTYPE: \ + ret = bcast.BCastCompute(input, y_data, func_##TYPE); \ + break; + +DEFINE_FUNC_BY_TYPE(int8_t) +DEFINE_FUNC_BY_TYPE(int16_t) +DEFINE_FUNC_BY_TYPE(int32_t) +DEFINE_FUNC_BY_TYPE(int64_t) +DEFINE_FUNC_BY_TYPE(uint8_t) +DEFINE_FUNC_BY_TYPE(uint16_t) +DEFINE_FUNC_BY_TYPE(uint32_t) +DEFINE_FUNC_BY_TYPE(uint64_t) +DEFINE_FUNC_BY_TYPE(fp16_t) +DEFINE_FUNC_BY_TYPE(float) +DEFINE_FUNC_BY_TYPE(double) +DEFINE_FUNC_BY_TYPE(bool) +} // namespace + +Status GreaterKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("GreaterKernel in"); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, Input opDescPtr is nullptr."); + return PARAM_INVALID; + } + Status ret = GreaterCheck(input); + if (ret != SUCCESS) { + return ret; + } + + std::vector y_data; + GE_CHECK_NOTNULL(input[0]); + DataType data_type = input[0]->GetTensorDesc().GetDataType(); + BCast bcast; + switch (data_type) { + SET_BCAST_COMPUTE_CASE(DT_INT8, int8_t) + SET_BCAST_COMPUTE_CASE(DT_INT16, int16_t) + SET_BCAST_COMPUTE_CASE(DT_INT32, int32_t) + SET_BCAST_COMPUTE_CASE(DT_INT64, int64_t) + SET_BCAST_COMPUTE_CASE(DT_UINT8, uint8_t) + SET_BCAST_COMPUTE_CASE(DT_UINT16, uint16_t) + SET_BCAST_COMPUTE_CASE(DT_UINT32, uint32_t) + SET_BCAST_COMPUTE_CASE(DT_UINT64, uint64_t) + SET_BCAST_COMPUTE_CASE(DT_FLOAT16, fp16_t) + SET_BCAST_COMPUTE_CASE(DT_FLOAT, float) + SET_BCAST_COMPUTE_CASE(DT_DOUBLE, double) + SET_BCAST_COMPUTE_CASE(DT_BOOL, bool) + default: + ret = NOT_CHANGED; + break; + } + + if (ret != SUCCESS) { + GELOGW("BCastCompute fail, data_type:%s, ret:%s", TypeUtils::DataTypeToSerialString(data_type).c_str(), + GET_ERRORNO_STR(ret).c_str()); + return NOT_CHANGED; + } + + GeTensorPtr output_ptr; + output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(0)); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared failed %s.", op_desc_ptr->GetName().c_str()); + return MEMALLOC_FAILED; + } + + output_ptr->MutableTensorDesc().SetShape(GeShape(bcast.GetOutputShape())); + // only return GRAPH_SUCCESS here + GE_CHK_STATUS_RET(output_ptr->SetData(y_data)); + output_ptr->MutableTensorDesc().SetDataType(DT_BOOL); + v_output.push_back(output_ptr); + GELOGD("GreaterKernel success"); + + return SUCCESS; +} + +Status GreaterKernel::GreaterCheck(const std::vector &input) { + // check input number + if (input.size() != kGreaterInputNum) { + GELOGI("The number of input for greater must be %zu.", kGreaterInputNum); + return NOT_CHANGED; + } + + GE_CHECK_NOTNULL(input[0]); + GE_CHECK_NOTNULL(input[1]); + + ConstGeTensorPtr input_x1 = input.at(0); + ConstGeTensorPtr input_x2 = input.at(1); + // check whether there is data in Tensor + if (input_x1->GetData().size() == 0 || input_x2->GetData().size() == 0) { + GELOGI("Check data size fail. x1: %zu, x2:%zu", input_x1->GetData().size(), input_x2->GetData().size()); + return NOT_CHANGED; + } + + // check whether the data types are the same + if (input_x1->GetTensorDesc().GetDataType() != input_x2->GetTensorDesc().GetDataType()) { + GELOGI("Data type of inputs for greater not matched."); + return NOT_CHANGED; + } + + // check if input data type is supported + DataType type = input_x1->GetTensorDesc().GetDataType(); + if (greater_supported_type.find(type) == greater_supported_type.end()) { + GELOGI("Greater does not support this Data type:%s", TypeUtils::DataTypeToSerialString(type).c_str()); + return NOT_CHANGED; + } + + return SUCCESS; +} + +REGISTER_KERNEL(GREATER, GreaterKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/greater_kernel.h b/src/ge/graph/passes/folding_kernel/greater_kernel.h new file mode 100644 index 00000000..84b5bc87 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/greater_kernel.h @@ -0,0 +1,46 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_GREATER_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_GREATER_KERNEL_H_ + +#include +#include + +#include "common/fp16_t.h" +#include "graph/ge_tensor.h" +#include "inc/kernel.h" + +namespace ge { +class GreaterKernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + + private: + Status ComputeOutData(ConstGeTensorPtr input_x1, ConstGeTensorPtr input_x2, std::vector &x1_indexes, + std::vector &x2_indexes, std::vector &y_data); + + Status GreaterCheck(const std::vector &input); + + const std::set greater_supported_type = { + DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE, + }; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_GREATER_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/kernel_utils.cc b/src/ge/graph/passes/folding_kernel/kernel_utils.cc new file mode 100644 index 00000000..c9568d37 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/kernel_utils.cc @@ -0,0 +1,133 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/kernel_utils.h" + +#include + +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" + +namespace { +const int kDimensionShapeIndex = 0; +const int kDimensionDimsIndex = 1; +const size_t kDimensionNodeInputSize = 2; +} // namespace + +namespace ge { +Status KernelUtils::CheckDimensionNodeInfo(const NodePtr &node_ptr) { + if (node_ptr == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + auto input_nodes = node_ptr->GetInDataNodes(); + if (input_nodes.size() != kDimensionNodeInputSize) { + GELOGW("op:%s type: %s, dimension input size must be %zu, but get %zu inputs", node_ptr->GetName().c_str(), + node_ptr->GetType().c_str(), kDimensionNodeInputSize, input_nodes.size()); + return NOT_CHANGED; + } + + NodePtr dim_node = input_nodes.at(kDimensionDimsIndex); + if (dim_node == nullptr) { + GELOGE(PARAM_INVALID, "dim node is nullptr"); + return PARAM_INVALID; + } + + std::vector const_ge_tensor = OpDescUtils::GetWeights(dim_node); + if (const_ge_tensor.empty()) { + GELOGE(PARAM_INVALID, "dim node must be const op"); + return PARAM_INVALID; + } + ConstGeTensorPtr input_dim = const_ge_tensor.at(0); + if (input_dim->GetData().size() == 0) { + GELOGE(PARAM_INVALID, "dim data size is 0"); + return PARAM_INVALID; + } + + return SUCCESS; +} + +bool KernelUtils::CheckFormatSupported(const NodePtr &node_ptr) { + if (node_ptr == nullptr) { + GELOGE(FAILED, "parameter is null."); + return false; + } + OpDescPtr op_desc = node_ptr->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(FAILED, "op_desc is null"); + return false; + } + Format fmt = op_desc->GetInputDesc(kDimensionShapeIndex).GetFormat(); + if (fmt == FORMAT_NC1HWC0 || fmt == FORMAT_FRACTAL_Z) { + GELOGW("invalid format, fmt: %s", TypeUtils::FormatToSerialString(fmt).c_str()); + return false; + } + + return true; +} + +bool KernelUtils::CheckSizeForTransOp(const ge::ConstGeTensorPtr &const_weight_ptr, + const ge::OpDescPtr &op_desc_ptr) { + if (const_weight_ptr == nullptr || op_desc_ptr == nullptr) { + GELOGE(FAILED, "parameter invalid"); + return false; + } + auto data_size = const_weight_ptr->GetData().GetSize(); + + DataType data_type = op_desc_ptr->GetInputDesc(0).GetDataType(); + GeShape data_shape = op_desc_ptr->GetInputDesc(0).GetShape(); + Format data_format = op_desc_ptr->GetInputDesc(0).GetFormat(); + auto shape_size = op_desc_ptr->GetInputDesc(0).GetShape().GetShapeSize(); + int64_t cal_size = 0; + + auto ret = TensorUtils::CalcTensorMemSize(data_shape, data_format, data_type, cal_size); + if (ret != SUCCESS) { + GELOGE(FAILED, "CalcTensorMemSize failed"); + return false; + } + + uint32_t length = 1; + if (!TypeUtils::GetDataTypeLength(data_type, length)) { + GELOGE(PARAM_INVALID, "Input datatype %d is not support .", data_type); + return false; + } + + GELOGI("Const real value Size:%zu, op_desc Shape Size:%ld, data_type:%s.", data_size, cal_size, + TypeUtils::DataTypeToSerialString(data_type).c_str()); + if ((shape_size != 0) || (length != 0 && (data_size / static_cast(length) != 1))) { + if (!(data_size == static_cast(cal_size) && data_size != 0)) { + GELOGW("Const input data size is not equal with tensor desc shape"); + return false; + } + } + return true; +} + +bool KernelUtils::IsUnknownShape(const ge::GeShape &shape) { + vector dims = shape.GetDims(); + for (auto dim : dims) { + if (dim < 0) { + GELOGW("Shape kernel recoginze unknown shape.Ignore shape kernel."); + return true; + } + } + return false; +} +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/kernel_utils.h b/src/ge/graph/passes/folding_kernel/kernel_utils.h new file mode 100644 index 00000000..9eadf4ca --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/kernel_utils.h @@ -0,0 +1,109 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_KERNEL_UTILS_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_KERNEL_UTILS_H_ + +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/compute_graph.h" + +namespace ge { +class KernelUtils { + public: + KernelUtils() = delete; + static Status CheckDimensionNodeInfo(const NodePtr &node_ptr); + static bool CheckFormatSupported(const NodePtr &node_ptr); + static bool CheckSizeForTransOp(const ConstGeTensorPtr &const_weight_ptr, const OpDescPtr &op_desc_ptr); + static bool IsUnknownShape(const GeShape &shape); + + /** + * Generating a sequence of numbers + * @param [in] data_num the num of generate + * @param [in] value the value to write to buffer + * @param [out] output the tensor for save sequence of numbers + * @author + */ + template + static Status GenData(const int64_t data_num, const T value, const GeTensorPtr &output) { + if (data_num > 0) { + if (!CheckInt64MulOverflow(data_num, static_cast(sizeof(T)))) { + GELOGE(PARAM_INVALID, "Int64MulOverflow, data_num(%ld) type_len(%zu)", data_num, sizeof(T)); + return PARAM_INVALID; + } + + std::unique_ptr buf(new (std::nothrow) T[data_num]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "new sizeof(T) * data_num(%ld) memory failed", sizeof(T) * data_num); + return MEMALLOC_FAILED; + } + + for (int64_t i = 0; i < data_num; ++i) { + buf[i] = value; + } + Status ret = output->SetData(reinterpret_cast(buf.get()), data_num * sizeof(T)); + if (ret != SUCCESS) { + GELOGE(ret, " buf must not be null."); + return ret; + } + } + + return SUCCESS; + } + + /** + * Calculate dimension + * @param [in] dims save the tensor of the dimension + * @param [in] vec_dim results of each dimension + * @param [out] data_num total size of data + * @author + */ + template + static Status CalcDims(const ConstGeTensorPtr dims, std::vector &vec_dim, int64_t &data_num) { + data_num = 1; + int32_t size = dims->GetData().size() / sizeof(T); + + for (int32_t i = 0; i < size; i++) { + T dim = *(reinterpret_cast(dims->GetData().data()) + i); + if (dim < 0) { + GELOGE(PARAM_INVALID, "input dim(%d) is negative(%ld)", i, static_cast(dim)); + return PARAM_INVALID; + } + if (dim == 0) { + GELOGI("input dim(%d) is zero", i); + data_num = 0; + vec_dim.clear(); + break; + } + if (!CheckInt64MulOverflow(data_num, dim)) { + GELOGE(PARAM_INVALID, "Int64MulOverflow, data_num(%ld) dim(%ld)", data_num, static_cast(dim)); + return PARAM_INVALID; + } + + data_num *= dim; + vec_dim.push_back(dim); + } + + return SUCCESS; + } +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_KERNEL_UTILS_H_ diff --git a/src/ge/graph/passes/folding_kernel/maximum_kernel.cc b/src/ge/graph/passes/folding_kernel/maximum_kernel.cc new file mode 100644 index 00000000..7f376019 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/maximum_kernel.cc @@ -0,0 +1,195 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/maximum_kernel.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/fp16_t.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/bcast.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kMaximumInputNum = 2; +const size_t kMaximumFirstInput = 0; +const size_t kMaximumSecondInput = 1; +const size_t kMaximumFirstOutput = 0; +const std::set kMaximumSupportedType = {DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_DOUBLE}; + +#define DEFINE_FUNC_BY_TYPE(TYPE) \ + std::function func_##TYPE = [](TYPE const &a, TYPE const &b) -> TYPE { \ + return (a > b ? a : b); \ + }; + +#define SET_BCAST_COMPUTE_CASE(DTYPE, TYPE) \ + case DTYPE: \ + ret = bcast.BCastCompute(input, y_data_##TYPE, func_##TYPE); \ + break; + +#define SET_OUTPUT(DTYPE, TYPE) \ + case DTYPE: \ + if (output_ptr->SetData(reinterpret_cast(y_data_##TYPE.data()), y_data_##TYPE.size() * length) != \ + GRAPH_SUCCESS) { \ + GELOGW("GenData: SetData failed"); \ + } \ + break; + +DEFINE_FUNC_BY_TYPE(int8_t) +DEFINE_FUNC_BY_TYPE(int16_t) +DEFINE_FUNC_BY_TYPE(int32_t) +DEFINE_FUNC_BY_TYPE(int64_t) +DEFINE_FUNC_BY_TYPE(uint8_t) +DEFINE_FUNC_BY_TYPE(uint16_t) +DEFINE_FUNC_BY_TYPE(uint32_t) +DEFINE_FUNC_BY_TYPE(uint64_t) +DEFINE_FUNC_BY_TYPE(fp16_t) +DEFINE_FUNC_BY_TYPE(float) +DEFINE_FUNC_BY_TYPE(double) +} // namespace + +Status MaximumKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("MaximumKernel in"); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, input opDescPtr is nullptr."); + return PARAM_INVALID; + } + Status ret = MaximumCheck(input); + if (ret != SUCCESS) { + return ret; + } + + std::vector y_data_int8_t; + std::vector y_data_int16_t; + std::vector y_data_int32_t; + std::vector y_data_int64_t; + std::vector y_data_uint8_t; + std::vector y_data_uint16_t; + std::vector y_data_uint32_t; + std::vector y_data_uint64_t; + std::vector y_data_fp16_t; + std::vector y_data_float; + std::vector y_data_double; + + if (input.empty()) { + GELOGE(FAILED, "input is empty."); + return FAILED; + } + DataType data_type = input[kMaximumFirstInput]->GetTensorDesc().GetDataType(); + BCast bcast; + switch (data_type) { + SET_BCAST_COMPUTE_CASE(DT_INT8, int8_t) + SET_BCAST_COMPUTE_CASE(DT_INT16, int16_t) + SET_BCAST_COMPUTE_CASE(DT_INT32, int32_t) + SET_BCAST_COMPUTE_CASE(DT_INT64, int64_t) + SET_BCAST_COMPUTE_CASE(DT_UINT8, uint8_t) + SET_BCAST_COMPUTE_CASE(DT_UINT16, uint16_t) + SET_BCAST_COMPUTE_CASE(DT_UINT32, uint32_t) + SET_BCAST_COMPUTE_CASE(DT_UINT64, uint64_t) + SET_BCAST_COMPUTE_CASE(DT_FLOAT16, fp16_t) + SET_BCAST_COMPUTE_CASE(DT_FLOAT, float) + SET_BCAST_COMPUTE_CASE(DT_DOUBLE, double) + default: + ret = NOT_CHANGED; + break; + } + + if (ret != SUCCESS) { + GELOGW("BCastCompute fail, data_type: %s, ret: %s", TypeUtils::DataTypeToSerialString(data_type).c_str(), + GET_ERRORNO_STR(ret).c_str()); + return NOT_CHANGED; + } + + uint32_t length = 1; + if (!TypeUtils::GetDataTypeLength(data_type, length)) { + GELOGW("Can't GetDataTypeLength of data_type: %s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return NOT_CHANGED; + } + + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(kMaximumFirstOutput)); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared failed"); + return MEMALLOC_FAILED; + } + + output_ptr->MutableTensorDesc().SetShape(GeShape(bcast.GetOutputShape())); + // only return GRAPH_SUCCESS here + switch (data_type) { + SET_OUTPUT(DT_INT8, int8_t) + SET_OUTPUT(DT_INT16, int16_t) + SET_OUTPUT(DT_INT32, int32_t) + SET_OUTPUT(DT_INT64, int64_t) + SET_OUTPUT(DT_UINT8, uint8_t) + SET_OUTPUT(DT_UINT16, uint16_t) + SET_OUTPUT(DT_UINT32, uint32_t) + SET_OUTPUT(DT_UINT64, uint64_t) + SET_OUTPUT(DT_FLOAT16, fp16_t) + SET_OUTPUT(DT_FLOAT, float) + SET_OUTPUT(DT_DOUBLE, double) + default: + break; + } + output_ptr->MutableTensorDesc().SetDataType(data_type); + v_output.push_back(output_ptr); + GELOGD("MaximumKernel success"); + + return SUCCESS; +} + +Status MaximumKernel::MaximumCheck(const std::vector &input) { + // check input number + if (input.size() != kMaximumInputNum) { + GELOGI("The number of input for Maximum must be %zu.", kMaximumInputNum); + return NOT_CHANGED; + } + ConstGeTensorPtr input_x1 = input.at(kMaximumFirstInput); + ConstGeTensorPtr input_x2 = input.at(kMaximumSecondInput); + GE_CHECK_NOTNULL(input_x2); + GE_CHECK_NOTNULL(input_x2); + + // check whether there is data in Tensor + if (input_x1->GetData().size() == 0 || input_x2->GetData().size() == 0) { + GELOGI("Check data size fail. x1: %zu, x2: %zu", input_x1->GetData().size(), input_x2->GetData().size()); + return NOT_CHANGED; + } + + // check whether the data types are the same + DataType type = input_x1->GetTensorDesc().GetDataType(); + if (type != input_x2->GetTensorDesc().GetDataType()) { + GELOGI("Data type of inputs for Maximum not matched."); + return NOT_CHANGED; + } + + // check if input data type is supported + if (kMaximumSupportedType.find(type) == kMaximumSupportedType.end()) { + GELOGI("Maximum does not support this Data type: %s", TypeUtils::DataTypeToSerialString(type).c_str()); + return NOT_CHANGED; + } + + return SUCCESS; +} + +REGISTER_KERNEL(MAXIMUM, MaximumKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/maximum_kernel.h b/src/ge/graph/passes/folding_kernel/maximum_kernel.h new file mode 100644 index 00000000..feaa91e7 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/maximum_kernel.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_MAXIMUM_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_MAXIMUM_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class MaximumKernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + + private: + Status MaximumCheck(const std::vector &input); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_MAXIMUM_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/mul_kernel.cc b/src/ge/graph/passes/folding_kernel/mul_kernel.cc new file mode 100644 index 00000000..f328fb88 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/mul_kernel.cc @@ -0,0 +1,162 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/mul_kernel.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/math/math_util.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/bcast.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const std::set mul_supported_type = {DT_INT32, DT_UINT32}; + +template +Status IsOverflow(T const &a, T const &b, DataType &type) { + switch (type) { + case DT_INT32: + return CheckInt32MulOverflow(a, b); + case DT_UINT32: + return CheckUint32MulOverflow(a, b); + default: + return FAILED; + } +} + +#define DEFINE_FUNC_WITH_STATUS_BY_TYPE(TYPE) \ + std::function func_##TYPE = []( \ + TYPE const &a, TYPE const &b, DataType &type, Status &ret) -> TYPE { \ + ret = IsOverflow(a, b, type); \ + if (ret != SUCCESS) { \ + return static_cast(0); \ + } \ + return a * b; \ + }; + +#define SET_BCAST_COMPUTE_CASE(DTYPE, TYPE) \ + case DTYPE: \ + ret = bcast.BCastComputeCheck(input, y_data_##TYPE, func_##TYPE); \ + break; + +#define SET_OUTPUT(DTYPE, TYPE) \ + case DTYPE: \ + (void)output_ptr->SetData(reinterpret_cast(y_data_##TYPE.data()), y_data_##TYPE.size() * length); \ + break; +DEFINE_FUNC_WITH_STATUS_BY_TYPE(int32_t) +DEFINE_FUNC_WITH_STATUS_BY_TYPE(uint32_t) +} // namespace + +Status MulKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("MulKernel in"); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, input opDescPtr is nullptr."); + return PARAM_INVALID; + } + Status ret = MulCheck(input); + if (ret != SUCCESS) { + return ret; + } + + std::vector y_data_int32_t; + std::vector y_data_uint32_t; + DataType data_type = input[0]->GetTensorDesc().GetDataType(); + BCast bcast; + switch (data_type) { + SET_BCAST_COMPUTE_CASE(DT_INT32, int32_t) + SET_BCAST_COMPUTE_CASE(DT_UINT32, uint32_t) + default: + ret = NOT_CHANGED; + break; + } + + if (ret != SUCCESS) { + GELOGW("BCastCompute fail, data_type: %s, ret: %s", TypeUtils::DataTypeToSerialString(data_type).c_str(), + GET_ERRORNO_STR(ret).c_str()); + return NOT_CHANGED; + } + + uint32_t length = 1; + if (!TypeUtils::GetDataTypeLength(data_type, length)) { + GELOGW("Can't GetDataTypeLength of data_type: %s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return NOT_CHANGED; + } + + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(0)); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared failed"); + return MEMALLOC_FAILED; + } + + output_ptr->MutableTensorDesc().SetShape(GeShape(bcast.GetOutputShape())); + // only return GRAPH_SUCCESS here + switch (data_type) { + SET_OUTPUT(DT_INT32, int32_t) + SET_OUTPUT(DT_UINT32, uint32_t) + default: + break; + } + output_ptr->MutableTensorDesc().SetDataType(data_type); + v_output.push_back(output_ptr); + GELOGD("MulKernel success"); + + return SUCCESS; +} + +Status MulKernel::MulCheck(const std::vector &input) { + // check input number + if (input.size() != static_cast(MUL_INPUT_NUM)) { + GELOGI("The number of input for Mul must be %u.", MUL_INPUT_NUM); + return NOT_CHANGED; + } + + ConstGeTensorPtr input_x1 = input.at(0); + ConstGeTensorPtr input_x2 = input.at(1); + GE_CHECK_NOTNULL(input_x1); + GE_CHECK_NOTNULL(input_x2); + // check whether there is data in Tensor + if (input_x1->GetData().size() == 0 || input_x2->GetData().size() == 0) { + GELOGI("Check data size fail. x1: %zu, x2: %zu", input_x1->GetData().size(), input_x2->GetData().size()); + return NOT_CHANGED; + } + + // check whether the data types are the same + DataType type = input_x1->GetTensorDesc().GetDataType(); + if (type != input_x2->GetTensorDesc().GetDataType()) { + GELOGI("Data type of inputs for Mul not matched."); + return NOT_CHANGED; + } + + // check if input data type is supported + if (mul_supported_type.find(type) == mul_supported_type.end()) { + GELOGI("Mul does not support this Data type: %s", TypeUtils::DataTypeToSerialString(type).c_str()); + return NOT_CHANGED; + } + + return SUCCESS; +} + +REGISTER_KERNEL(MUL, MulKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/mul_kernel.h b/src/ge/graph/passes/folding_kernel/mul_kernel.h new file mode 100644 index 00000000..3116aee8 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/mul_kernel.h @@ -0,0 +1,36 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_MUL_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_MUL_KERNEL_H_ + +#include + +#include "graph/ge_tensor.h" +#include "inc/kernel.h" + +namespace ge { +class MulKernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + + private: + Status MulCheck(const std::vector &input); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_MUL_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/pack_kernel.cc b/src/ge/graph/passes/folding_kernel/pack_kernel.cc new file mode 100644 index 00000000..706d2211 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/pack_kernel.cc @@ -0,0 +1,220 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/pack_kernel.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace { +const int64_t kShapeItemNumMAX = 2000000000; +} // namespace +namespace ge { +Status PackKernel::Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGI("Pack kernel in."); + Status validate_ret = ValidateKernelParams(op_desc_ptr, input); + if (validate_ret != SUCCESS) { + GELOGW("Pack kernel input is invalid , can not continue compute."); + return NOT_CHANGED; + } + + GeShape final_shape; + ExpandDims(axis_, input, final_shape); + + // generate output + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(0)); + if (output_ptr == nullptr) { + GELOGW("Fail to malloc output."); + return OUT_OF_MEMORY; + } + Status ret = CopyOutputData(final_shape, input, output_ptr); + if (ret != SUCCESS) { + GELOGW("Pack inputs failed. Ignore pack kernel."); + return NOT_CHANGED; + } + v_output.push_back(output_ptr); + return SUCCESS; +} + +Status PackKernel::ValidateKernelParams(const ge::OpDescPtr &op_desc_ptr, + const std::vector &input) { + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "input opdesc is nullptr."); + return PARAM_INVALID; + } + if (!(AttrUtils::GetInt(op_desc_ptr, PACK_ATTR_NAME_NUM, n_))) { + GELOGE(PARAM_INVALID, "Attr %s is not exist.", PACK_ATTR_NAME_NUM.c_str()); + return PARAM_INVALID; + } + if (!(AttrUtils::GetInt(op_desc_ptr, ATTR_NAME_AXIS, axis_))) { + GELOGE(PARAM_INVALID, "Attr %s is not exist.", ATTR_NAME_AXIS.c_str()); + return PARAM_INVALID; + } + if (input.empty()) { + GELOGE(PARAM_INVALID, "The number of input for Pack should be %ld, in fact it is %zu ", n_, input.size()); + return NOT_CHANGED; + } + if (input.size() != static_cast(n_)) { + GELOGE(PARAM_INVALID, "The number of input for Pack should be %d, in fact it is %ld ", static_cast(n_), + input.size()); + return PARAM_INVALID; + } + data_type_ = op_desc_ptr->GetInputDesc(0).GetDataType(); + GeShape shape = op_desc_ptr->GetInputDesc(0).GetShape(); + if (axis_ < 0 || axis_ > (static_cast(shape.GetDimNum()) + 1)) { + GELOGW("Axis is %ld ,which is out of range [0,R+1].", axis_); + return NOT_CHANGED; + } + + Status validate_ret = ValidateInputs(op_desc_ptr, input); + if (validate_ret != SUCCESS) { + GELOGW("Validate inputs failed.Ignore pack kernel."); + return NOT_CHANGED; + } + return SUCCESS; +} + +Status PackKernel::ValidateInputs(const ge::OpDescPtr &op_desc_ptr, const std::vector &input) { + GeShape shape; + for (int64_t i = 0; i < n_; i++) { + if (input[i] == nullptr) { + GELOGW("Input %ld of pack kernel %s is null.", i, op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + // check if tensor contains data + if (input[i]->GetData().size() == 0) { + GELOGW("Inputs %ld do not have value.", i); + return NOT_CHANGED; + } + if (i == 0) { + // get first input shape + shape = input[0]->GetTensorDesc().GetShape(); + } + + GeTensorDesc tensor_desc = input[i]->GetTensorDesc(); + // check datatype of inputs is same or not + if (tensor_desc.GetDataType() != data_type_) { + GELOGW("Data type of inputs %ld for pack not matched, data type should be %s, but actual datatype is %s", i, + TypeUtils::DataTypeToSerialString(data_type_).c_str(), + TypeUtils::DataTypeToSerialString(tensor_desc.GetDataType()).c_str()); + return NOT_CHANGED; + } + // check shape of inputs is same or not + auto dst_shape = tensor_desc.GetShape(); + int64_t num = 1; + for (auto dim : dst_shape.GetDims()) { + if (dim < 1) { + GELOGW("Invalid zero dim in the shape %s", formats::ShapeToString(shape).c_str()); + return NOT_CHANGED; + } + num *= dim; + if (num > kShapeItemNumMAX) { + GELOGW("Shape overflow, the total count should be less than %ld!", kShapeItemNumMAX); + return NOT_CHANGED; + } + } + if (!formats::IsShapeEqual(shape, dst_shape)) { + GELOGW("Shape of input %ld is not equal with input 0.", i); + return NOT_CHANGED; + } + } + return SUCCESS; +} + +void PackKernel::ExpandDims(const int64_t axis, const std::vector &input, GeShape &final_shape) { + // expand dims + vector current_dims = input[0]->GetTensorDesc().GetShape().GetDims(); + vector final_dims; + final_dims.assign(current_dims.begin(), current_dims.end()); + + // expand dim of N + // assume there are N inputs, and shape is [A,B,C], + // if axis = 0, after pack, the output shape should be [N,A,B,C]. + // if axis = 1, after pack, the output shape should be [A,N,B,C]. + // ...etc + // if axis = 3, after pack, the output shape should be [A,B,C,N] + if (axis >= static_cast(final_dims.size())) { + final_dims.emplace_back(n_); + } else { + final_dims.insert(final_dims.begin() + axis, n_); + } + final_shape = GeShape(final_dims); +} + +Status PackKernel::CopyOutputData(const GeShape &final_shape, + const std::vector &input, + ge::GeTensorPtr &output_ptr) { + int64_t times = 1; + int64_t unit = 1; + // calculate data unit + for (int64_t i = (axis_ + 1); i < static_cast(final_shape.GetDimNum()); i++) { + unit *= final_shape.GetDim(static_cast(i)); + } + // calculate get times + for (int64_t i = 0; i < axis_; i++) { + times *= final_shape.GetDim(static_cast(i)); + } + GELOGD("Copy output data times is %ld, unit is %ld.", times, unit); + + uint32_t data_size = GetSizeByDataType(data_type_); + // assume output shape is [A,N,B,C], time=A,unit=B*C + // when copy data from input, we follow time*N*unit + auto output_size = final_shape.GetShapeSize(); + std::shared_ptr buf(new (std::nothrow) uint8_t[output_size * data_size], std::default_delete()); + if (buf == nullptr) { + GELOGW("malloc buf is null.Ignore pack kernel."); + return NOT_CHANGED; + } + + size_t dst_offset = 0; + size_t src_offset = 0; + // data copy follow times*N*offset, which offset = time*unit + for (int64_t i = 0; i < times; i++) { + for (int64_t j = 0; j < n_; j++) { + // input range already check before. Range is [0,n_). + const uint8_t *in_data = input[j]->GetData().data(); + auto ret = memcpy_s(buf.get() + dst_offset, output_size * data_size - dst_offset, in_data + src_offset, + data_size * unit); + if (ret != EOK) { + GELOGW("Memory copy failed."); + return NOT_CHANGED; + } + dst_offset += data_size * unit; + } + src_offset += unit * data_size; + } + + if (output_ptr->SetData(buf.get(), static_cast(output_size * data_size)) != GRAPH_SUCCESS) { + GELOGW("CopyOutputData: SetData failed"); + } + output_ptr->MutableTensorDesc().SetShape(final_shape); + output_ptr->MutableTensorDesc().SetDataType(DataType(data_type_)); + return SUCCESS; +} + +REGISTER_KERNEL(PACK, PackKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/pack_kernel.h b/src/ge/graph/passes/folding_kernel/pack_kernel.h new file mode 100644 index 00000000..b32e3fae --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/pack_kernel.h @@ -0,0 +1,46 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_PACK_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_PACK_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +/** + * @ingroup ge + * @brief Pack operator processing + * @author + */ +class PackKernel : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + private: + Status ValidateKernelParams(const ge::OpDescPtr &op_desc_ptr, const std::vector &input); + Status ValidateInputs(const ge::OpDescPtr &op_desc_ptr, const std::vector &input); + void ExpandDims(const int64_t axis, const std::vector &input, GeShape &final_shape); + Status CopyOutputData(const GeShape &final_shape, const std::vector &input, + ge::GeTensorPtr &output_ptr); + int64_t n_ = 0; // count of inputs + int64_t axis_ = 0; // axis to stack along. + DataType data_type_; // data type of inputs +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_PACK_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/permute_kernel.cc b/src/ge/graph/passes/folding_kernel/permute_kernel.cc new file mode 100644 index 00000000..a6968300 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/permute_kernel.cc @@ -0,0 +1,138 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/permute_kernel.h" + +#include +#include + +#include "common/debug/log.h" +#include "framework/common/debug/ge_log.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/common/bcast.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" +#include "common/formats/formats.h" +#include "common/formats/format_transfers/format_transfer_transpose.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "framework/common/ge_inner_error_codes.h" + + +namespace ge { +namespace { +const char *const kAttrOrder = "order"; +const char *const kAttrPerm = "perm"; +const size_t kTbePermuteInputSize = 2; +} // namespace + +Status PermuteKernel::ValidateInput(const OpDescPtr &op_desc_ptr, const std::vector &input) { + if (input.empty()) { + GELOGE(PARAM_INVALID, "Input tensor vector is empty"); + return PARAM_INVALID; + } + ConstGeTensorPtr const_weight_ptr = input[0]; + if (const_weight_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Input const_weight_ptr is nullptr."); + return PARAM_INVALID; + } + const uint8_t *src_data = const_weight_ptr->GetData().data(); + if (op_desc_ptr == nullptr || src_data == nullptr) { + GELOGE(PARAM_INVALID, "Input opDescPtr is nullptr."); + return PARAM_INVALID; + } + if (op_desc_ptr->GetInputsSize() >= kTbePermuteInputSize) { + GELOGW("trans_op has more than 1 input_size."); + return NOT_CHANGED; + } + return SUCCESS; +} + +Status PermuteKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("PermuteKernel begin."); + Status status = ValidateInput(op_desc_ptr, input); + if (status != SUCCESS) { + return status; + } + + ConstGeTensorPtr const_weight_ptr = input[0]; + GeTensorDesc op_desc = op_desc_ptr->GetOutputDesc(0); + GeTensorDesc op_desc_in = op_desc_ptr->GetInputDesc(0); + auto src_format = op_desc_in.GetFormat(); + auto src_shape = op_desc_in.GetShape().GetDims(); + auto src_data_type = op_desc_in.GetDataType(); + auto data_shape = op_desc.GetShape().GetDims(); + auto data_format = op_desc.GetFormat(); + auto data_type = op_desc.GetDataType(); + GELOGD( + "current node %s, format %s, input shape %s, data type %s, weight format %s, shape %s, data type %s. " + "output format %s, shape %s, data type %s", + op_desc_ptr->GetName().c_str(), TypeUtils::FormatToSerialString(src_format).c_str(), + formats::ShapeToString(src_shape).c_str(), TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::FormatToSerialString(const_weight_ptr->GetTensorDesc().GetFormat()).c_str(), + formats::ShapeToString(const_weight_ptr->GetTensorDesc().GetShape()).c_str(), + TypeUtils::DataTypeToSerialString(const_weight_ptr->GetTensorDesc().GetDataType()).c_str(), + TypeUtils::FormatToSerialString(data_format).c_str(), formats::ShapeToString(data_shape).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str()); + + vector perm_list; + if (!AttrUtils::GetListInt(op_desc_ptr, kAttrOrder, perm_list) && + !AttrUtils::GetListInt(op_desc_ptr, kAttrPerm, perm_list)) { + GELOGW("Get perm_list failed, Transpose from shape %s to %s is not supported, ", + formats::ShapeToString(src_shape).c_str(), formats::ShapeToString(data_shape).c_str()); + return NOT_CHANGED; + } + + GELOGD("Transpose from %s to %s, shape %s to %s, perm_list %s, data type %s", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(data_format).c_str(), + formats::ShapeToString(src_shape).c_str(), formats::ShapeToString(data_shape).c_str(), + formats::ShapeToString(perm_list).c_str(), TypeUtils::DataTypeToSerialString(src_data_type).c_str()); + if (data_shape.empty() || src_data_type != data_type) { + GELOGW("Transpose is not supported. Invalid shape (src: %s, dst: %s) or inconsistent datatype (src: %s, dst: %s)", + formats::ShapeToString(src_shape).c_str(), formats::ShapeToString(data_shape).c_str(), + TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str()); + return NOT_CHANGED; + } + if (!KernelUtils::CheckSizeForTransOp(const_weight_ptr, op_desc_ptr)) { + GELOGE(FAILED, "CheckSize failed, input size is not equal to weight size"); + return NOT_CHANGED; + } + const uint8_t *src_data = const_weight_ptr->GetData().data(); + formats::TransResult trans_result; + auto ret = formats::TransposeWithShapeCheck(src_data, src_shape, data_shape, src_data_type, perm_list, trans_result); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to Transpose from %s to %s, shape %s to %s, perm_list %s, data type %s", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(data_format).c_str(), + formats::ShapeToString(src_shape).c_str(), formats::ShapeToString(data_shape).c_str(), + formats::ShapeToString(perm_list).c_str(), TypeUtils::DataTypeToSerialString(src_data_type).c_str()); + return NOT_CHANGED; + } + + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(0)); + GE_CHECK_NOTNULL(output_ptr); + GE_CHK_STATUS_RET(output_ptr->SetData(trans_result.data.get(), trans_result.length)); + v_output.push_back(output_ptr); + return SUCCESS; +} + +REGISTER_KERNEL(PERMUTE, PermuteKernel); +REGISTER_KERNEL(TRANSPOSE, PermuteKernel); +REGISTER_KERNEL(TRANSPOSED, PermuteKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/permute_kernel.h b/src/ge/graph/passes/folding_kernel/permute_kernel.h new file mode 100644 index 00000000..b022abd7 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/permute_kernel.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_PERMUTE_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_PERMUTE_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class PermuteKernel : public Kernel { + public: + Status Compute(const OpDescPtr attr, const std::vector &input, + std::vector &v_output) override; + + private: + Status ValidateInput(const OpDescPtr &attr, const std::vector &input); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_PERMUTE_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/range_kernel.cc b/src/ge/graph/passes/folding_kernel/range_kernel.cc new file mode 100644 index 00000000..672bca6e --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/range_kernel.cc @@ -0,0 +1,168 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/range_kernel.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/fp16_t.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +constexpr size_t kRangeInputNum = 3; +constexpr uint32_t kRangeDimNum = 0; +const std::set range_supported_type = {DT_INT32, DT_FLOAT}; +} // namespace + +Status RangeKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("RangeKernel in"); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, input opDescPtr is nullptr."); + return PARAM_INVALID; + } + Status ret = RangeCheck(input); + if (ret != SUCCESS) { + return ret; + } + + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(0)); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared failed"); + return MEMALLOC_FAILED; + } + + ConstGeTensorPtr start = input.at(0); + ConstGeTensorPtr limit = input.at(1); + ConstGeTensorPtr delta = input.at(2); + DataType data_type = delta->GetTensorDesc().GetDataType(); + if (data_type == DT_FLOAT) { + if (GetRange(*reinterpret_cast(start->GetData().data()), + *reinterpret_cast(limit->GetData().data()), + *reinterpret_cast(delta->GetData().data()), output_ptr) != SUCCESS) { + return PARAM_INVALID; + } + } else if (data_type == DT_INT32) { + if (GetRange(*reinterpret_cast(start->GetData().data()), + *reinterpret_cast(limit->GetData().data()), + *reinterpret_cast(delta->GetData().data()), output_ptr) != SUCCESS) { + return PARAM_INVALID; + } + } + + output_ptr->MutableTensorDesc().SetDataType(data_type); + v_output.push_back(output_ptr); + return SUCCESS; +} + +Status RangeKernel::RangeCheck(const std::vector &input) { + // check input number + if (input.size() != kRangeInputNum) { + GELOGI("The number of input for Range must be %zu.", kRangeInputNum); + return NOT_CHANGED; + } + + ConstGeTensorPtr start = input.at(0); + ConstGeTensorPtr limit = input.at(1); + ConstGeTensorPtr delta = input.at(2); + + GE_CHECK_NOTNULL(start); + GE_CHECK_NOTNULL(limit); + GE_CHECK_NOTNULL(delta); + // check whether there is data in Tensor + if (start->GetData().size() == 0 || limit->GetData().size() == 0 || delta->GetData().size() == 0) { + GELOGI("Check data size fail. start: %zu, limit: %zu, delta: %zu", start->GetData().size(), limit->GetData().size(), + delta->GetData().size()); + return NOT_CHANGED; + } + + // check whether the data types are the same + DataType type = start->GetTensorDesc().GetDataType(); + if (type != limit->GetTensorDesc().GetDataType() || type != delta->GetTensorDesc().GetDataType()) { + GELOGI("Data type of inputs for Range not matched."); + return NOT_CHANGED; + } + + // check whether are all scalars + size_t range_dim = static_cast(kRangeDimNum); + bool all_scalar = start->GetTensorDesc().MutableShape().GetDimNum() == range_dim && + limit->GetTensorDesc().MutableShape().GetDimNum() == range_dim && + delta->GetTensorDesc().MutableShape().GetDimNum() == range_dim; + if (!all_scalar) { + GELOGI("Inputs for Range are not all scalars."); + return NOT_CHANGED; + } + + // check if input data type is supported + if (range_supported_type.find(type) == range_supported_type.end()) { + GELOGI("Range does not support this Data type: %s", TypeUtils::DataTypeToSerialString(type).c_str()); + return NOT_CHANGED; + } + + return SUCCESS; +} + +template +Status RangeKernel::GetRange(const T start, const T limit, const T delta, GeTensorPtr &output) { + // check whether start, limit, delta is valid + if (delta == 0) { + GELOGE(PARAM_INVALID, "Requires delta != 0"); + return PARAM_INVALID; + } + if (start > limit && delta > 0) { + GELOGE(PARAM_INVALID, "Requires start <= limit when delta > 0"); + return PARAM_INVALID; + } + if (start < limit && delta < 0) { + GELOGE(PARAM_INVALID, "Requires start >= limit when delta < 0"); + return PARAM_INVALID; + } + + int64_t size = (std::is_integral::value ? ((std::abs(limit - start) + std::abs(delta) - 1) / std::abs(delta)) + : std::ceil(std::abs((limit - start) / delta))); + output->MutableTensorDesc().SetShape(GeShape()); // when size is 0 + + if (size > 0) { + unique_ptr buf(new (std::nothrow) T[size]); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "New buf failed."); + return MEMALLOC_FAILED; + } + + T val = start; + for (int64_t i = 0; i < size; ++i) { + buf[i] = val; + val += delta; + } + if (output->SetData(reinterpret_cast(buf.get()), size * sizeof(T)) != GRAPH_SUCCESS) { + GELOGW("GetRange: SetData failed"); + } + output->MutableTensorDesc().SetShape(GeShape({size})); + } + + return SUCCESS; +} + +REGISTER_KERNEL(RANGE, RangeKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/range_kernel.h b/src/ge/graph/passes/folding_kernel/range_kernel.h new file mode 100644 index 00000000..50b1c232 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/range_kernel.h @@ -0,0 +1,39 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_RANGE_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_RANGE_KERNEL_H_ + +#include + +#include "graph/ge_tensor.h" +#include "inc/kernel.h" + +namespace ge { +class RangeKernel : public Kernel { + public: + Status Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + + private: + Status RangeCheck(const std::vector &input); + + template + Status GetRange(const T start, const T limit, const T delta, GeTensorPtr &output); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_RANGE_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/rank_kernel.cc b/src/ge/graph/passes/folding_kernel/rank_kernel.cc new file mode 100644 index 00000000..2dbd5e3d --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/rank_kernel.cc @@ -0,0 +1,63 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/rank_kernel.h" + +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "inc/kernel_factory.h" +#include "omg/omg_inner_types.h" + +using ge::Status; + +namespace { +const size_t kRankInputSize = 1; +const uint32_t kRankDataInputIndex = 0; +} // namespace + +namespace ge { +Status RankKernel::Compute(const NodePtr &node, std::vector &v_output) { + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + size_t input_node_size = op_desc->GetInputsSize(); + if (input_node_size != kRankInputSize) { + GELOGW("input node size must be %d", kRankInputSize); + return NOT_CHANGED; + } + + GeTensorDesc input_shape = op_desc->GetInputDesc(kRankDataInputIndex); + auto ndims = input_shape.GetShape().GetDimNum(); + GeTensorDesc tensor_desc(op_desc->GetOutputDesc(0)); + GeTensorPtr output_ptr; + output_ptr = MakeShared(tensor_desc, reinterpret_cast(&ndims), sizeof(ndims)); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "make_shared ge::GeTensor failed"); + return MEMALLOC_FAILED; + } + v_output.push_back(output_ptr); + return SUCCESS; +} + +REGISTER_KERNEL(RANK, RankKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/rank_kernel.h b/src/ge/graph/passes/folding_kernel/rank_kernel.h new file mode 100644 index 00000000..0de4960c --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/rank_kernel.h @@ -0,0 +1,30 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_RANK_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_RANK_KERNEL_H_ + +#include +#include "inc/kernel.h" + +namespace ge { +class RankKernel : public Kernel { + public: + Status Compute(const NodePtr &node, std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_RANK_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/reduce_prod_kernel.cc b/src/ge/graph/passes/folding_kernel/reduce_prod_kernel.cc new file mode 100644 index 00000000..11fc3107 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/reduce_prod_kernel.cc @@ -0,0 +1,286 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/reduce_prod_kernel.h" + +#include +#include + +#include "common/math/math_util.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kReduceProdDataIndex = 0; +const size_t kReduceProdAxisIndex = 1; +const size_t kReduceProdMaxAxisRank = 1; +const size_t kReduceProdInputOnlyData = 1; +const size_t kReduceProdInputSize = 2; +const std::set kReduceProdSupportedType = {DT_INT32}; +} // namespace + +Status ReduceProdKernel::ReduceProdCheck(const ge::OpDescPtr &op_desc_ptr, + const std::vector &input) const { + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "input opdesc is nullptr."); + return PARAM_INVALID; + } + if (input.size() != kReduceProdInputSize) { + if (input.size() == kReduceProdInputOnlyData) { + // Input only data, which means calculate product for all elements in data_tensor. + GELOGI("ReduceProd node input size is 1, which does not have param axis, node name %s", + op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + GELOGE(PARAM_INVALID, "Unexpected ReduceProd node, node input size: %zu, node name: %s", input.size(), + op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + ConstGeTensorPtr data_tensor = input.at(kReduceProdDataIndex); + ConstGeTensorPtr axis_tensor = input.at(kReduceProdAxisIndex); + GE_CHECK_NOTNULL(data_tensor); + GE_CHECK_NOTNULL(axis_tensor); + if (axis_tensor->GetTensorDesc().GetShape().GetDimNum() > kReduceProdMaxAxisRank) { + GELOGE(PARAM_INVALID, "Axis must be at most rank 1, node node: %s", op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + if (data_tensor->GetData().size() == 0 || axis_tensor->GetData().size() == 0) { + GELOGE(PARAM_INVALID, "ReduceProdKernel data size of inputs is 0, node node: %s", op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + DataType data_type = data_tensor->GetTensorDesc().GetDataType(); + if (kReduceProdSupportedType.find(data_type) == kReduceProdSupportedType.end()) { + GELOGE(PARAM_INVALID, "ReduceProdKernel data type %s not support, node name: %s", + TypeUtils::DataTypeToSerialString(data_type).c_str(), op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + + return SUCCESS; +} + +Status ReduceProdKernel::AxisCal(const std::vector &input) { + ConstGeTensorPtr data_tensor = input.at(kReduceProdDataIndex); + ConstGeTensorPtr axis_tensor = input.at(kReduceProdAxisIndex); + // support: compute for the first element of axis. + vector data_dims = data_tensor->GetTensorDesc().GetShape().GetDims(); + size_t data_dim_size = data_dims.size(); + int32_t *axis = const_cast(reinterpret_cast(axis_tensor->GetData().GetData())); + GE_CHECK_NOTNULL(axis); + if (static_cast(*axis) >= data_dim_size) { + GELOGE(PARAM_INVALID, "axis is out of rank of data_dims, axis is %d.", *axis); + return PARAM_INVALID; + } + axis_dim_ = data_dims[static_cast(*axis)]; + head_dim_ = 1; + end_dim_ = 1; + bool axis_appear = false; + for (size_t i = 0; i < data_dim_size; i++) { + if (i == static_cast(*axis)) { + axis_appear = true; + continue; + } + // data_dims is the vector of dims, element in data_dims isn't negative. + if (axis_appear) { + if (data_dims[i] != 0 && end_dim_ > (INT64_MAX / data_dims[i])) { + GELOGE(INTERNAL_ERROR, "Product is overflow. multiplier 1: %ld. multiplier 2: %ld.", end_dim_, data_dims[i]); + return INTERNAL_ERROR; + } + end_dim_ *= data_dims[i]; + } else { + if (data_dims[i] != 0 && head_dim_ > (INT64_MAX / data_dims[i])) { + GELOGE(INTERNAL_ERROR, "Product is overflow. multiplier 1: %ld. multiplier 2: %ld.", head_dim_, data_dims[i]); + return INTERNAL_ERROR; + } + head_dim_ *= data_dims[i]; + } + } + return SUCCESS; +} + +Status ReduceProdKernel::DataCal(const std::vector &input, ge::GeTensorPtr output_ptr) { + ConstGeTensorPtr data_tensor = input.at(kReduceProdDataIndex); + DataType data_dtype = data_tensor->GetTensorDesc().GetDataType(); + if (data_dtype == DT_INT32) { + int32_t *input_data = const_cast(reinterpret_cast(data_tensor->GetData().GetData())); + GE_CHECK_NOTNULL(input_data); + size_t data_num = data_tensor->GetData().size() / sizeof(int32_t); + unique_ptr buf(new (std::nothrow) int32_t[data_num]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "new buf failed"); + return INTERNAL_ERROR; + } + + int32_t tmp_x = 1; + for (int64_t i = 0; i < head_dim_; ++i) { + for (int64_t j = 0; j < end_dim_; ++j) { + // all index for input_data is less than size of input_data + tmp_x = input_data[static_cast(i * end_dim_ * axis_dim_ + j)]; + for (int64_t k = 1; k < axis_dim_; ++k) { + int32_t tmp_y = input_data[static_cast(i * end_dim_ * axis_dim_ + j + k * end_dim_)]; + if (CheckInt32MulOverflow(tmp_x, tmp_y) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Product is overflow. multiplier 1: %d. multiplier 2: %d.", tmp_x, tmp_y); + return INTERNAL_ERROR; + } + tmp_x *= tmp_y; + } + buf[static_cast(i * end_dim_ + j)] = tmp_x; + } + } + + GE_IF_BOOL_EXEC(output_ptr->SetData(reinterpret_cast(buf.get()), + static_cast(head_dim_ * end_dim_ * sizeof(int32_t))) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "set data failed"); + return INTERNAL_ERROR); + output_ptr->MutableTensorDesc().SetDataType(data_dtype); + } + return SUCCESS; +} + +void ReduceProdKernel::ShapeCal(const ge::OpDescPtr &op_desc_ptr, const std::vector &input, + ge::GeTensorPtr output_ptr) { + ConstGeTensorPtr data_tensor = input.at(kReduceProdDataIndex); + ConstGeTensorPtr axis_tensor = input.at(kReduceProdAxisIndex); + auto axis_data = axis_tensor->GetData().GetData(); + if (axis_data == nullptr) { + GELOGE(FAILED, "Data of axis tensor is nullptr."); + return; + } + vector data_dims = data_tensor->GetTensorDesc().GetShape().GetDims(); + int32_t data_dim_size = static_cast(data_dims.size()); + int32_t axis = *(const_cast(reinterpret_cast(axis_data))); + bool keep_dims = false; + if (!AttrUtils::GetBool(op_desc_ptr, "keep_dims", keep_dims)) { + GELOGI("Get the attr keep_dims was failed."); + } + + if (keep_dims) { + for (int32_t i = 0; i < data_dim_size; i++) { + if (i == axis) { + data_dims[i] = 1; + } + } + } else { + vector tmp_dims; + for (int32_t i = 0; i < data_dim_size; i++) { + if (i != axis) { + tmp_dims.push_back(data_dims[i]); + } + } + data_dims.clear(); + data_dims = tmp_dims; + } + output_ptr->MutableTensorDesc().SetShape(GeShape(data_dims)); +} + +Status ReduceProdKernel::ComputeNoAxis(const ge::OpDescPtr &op_desc_ptr, const std::vector &input, + ge::GeTensorPtr output_ptr) { + ConstGeTensorPtr data_tensor = input.at(kReduceProdDataIndex); + GE_CHECK_NOTNULL(data_tensor); + if (data_tensor->GetData().size() == 0) { + GELOGE(PARAM_INVALID, "ReduceProdKernel data size of inputs is 0, node node: %s", op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + DataType data_type = data_tensor->GetTensorDesc().GetDataType(); + if (kReduceProdSupportedType.find(data_type) == kReduceProdSupportedType.end()) { + GELOGE(PARAM_INVALID, "ReduceProdKernel data type %s not support, node name: %s", + TypeUtils::DataTypeToSerialString(data_type).c_str(), op_desc_ptr->GetName().c_str()); + return PARAM_INVALID; + } + + if (data_type == DT_INT32) { + int32_t *input_data = const_cast(reinterpret_cast(data_tensor->GetData().GetData())); + GE_CHECK_NOTNULL(input_data); + size_t data_num = data_tensor->GetData().size() / sizeof(int32_t); + unique_ptr buf(new (std::nothrow) int32_t[data_num]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "new buf failed"); + return INTERNAL_ERROR; + } + + int32_t tmp_x = input_data[0]; + int32_t tmp_y = 1; + for (size_t k = 1; k < data_num; ++k) { + tmp_y = input_data[k]; + if (CheckInt32MulOverflow(tmp_x, tmp_y) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Product is overflow. multiplier 1: %d. multiplier 2: %d.", tmp_x, tmp_y); + return INTERNAL_ERROR; + } + tmp_x *= tmp_y; + } + buf[0] = tmp_x; + GE_IF_BOOL_EXEC(output_ptr->SetData(reinterpret_cast(buf.get()), sizeof(int32_t)) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "set data failed"); + return INTERNAL_ERROR); + output_ptr->MutableTensorDesc().SetDataType(data_type); + output_ptr->MutableTensorDesc().SetShape(GeShape()); + } + return SUCCESS; +} + +Status ReduceProdKernel::Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGI("ReduceProdKernel in."); + Status ret = ReduceProdCheck(op_desc_ptr, input); + if (ret != SUCCESS && ret != NOT_CHANGED) { + GELOGE(PARAM_INVALID, "ReduceProdKernel input is invalid, failed to fold node."); + return NOT_CHANGED; + } + + GeTensorPtr output_ptr = MakeShared(); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "make_shared ge::GeTensor failed, node name %s.", op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + + if (ret == NOT_CHANGED) { + // compute output tensor when no param axis + ret = ComputeNoAxis(op_desc_ptr, input, output_ptr); + if (ret != SUCCESS) { + return NOT_CHANGED; + } + } else { + // calculate axis to reduce + ret = AxisCal(input); + if (ret != SUCCESS) { + return NOT_CHANGED; + } + // calculate data and data type + ret = DataCal(input, output_ptr); + if (ret != SUCCESS) { + return NOT_CHANGED; + } + // calculate shape + ShapeCal(op_desc_ptr, input, output_ptr); + } + + // print output tensor information, and will be deleted + GELOGD("ReduceProd op %s output tensor data size is %zu", op_desc_ptr->GetName().c_str(), + output_ptr->GetData().size()); + vector data_dims = output_ptr->GetTensorDesc().GetShape().GetDims(); + GELOGD("ReduceProd op %s output tensor dim size is %zu", op_desc_ptr->GetName().c_str(), data_dims.size()); + + v_output.emplace_back(output_ptr); + GELOGI("ReduceProdKernel success."); + return SUCCESS; +} +REGISTER_KERNEL(REDUCEPROD, ReduceProdKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/reduce_prod_kernel.h b/src/ge/graph/passes/folding_kernel/reduce_prod_kernel.h new file mode 100644 index 00000000..326dd2f5 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/reduce_prod_kernel.h @@ -0,0 +1,45 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_REDUCE_PROD_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_REDUCE_PROD_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class ReduceProdKernel : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; + + private: + Status ReduceProdCheck(const ge::OpDescPtr &op_desc_ptr, const std::vector &input) const; + Status ComputeNoAxis(const ge::OpDescPtr &op_desc_ptr, const std::vector &input, + ge::GeTensorPtr output_ptr); + Status AxisCal(const std::vector &input); + Status DataCal(const std::vector &input, ge::GeTensorPtr output_ptr); + void ShapeCal(const ge::OpDescPtr &op_desc_ptr, const std::vector &input, + ge::GeTensorPtr output_ptr); + + int64_t axis_dim_; + int64_t head_dim_; + int64_t end_dim_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_REDUCE_PROD_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/reformat_kernel.cc b/src/ge/graph/passes/folding_kernel/reformat_kernel.cc new file mode 100644 index 00000000..0c84f089 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/reformat_kernel.cc @@ -0,0 +1,98 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/reformat_kernel.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "common/ge/ge_util.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kReFormatInputSize = 1; +const size_t kReformatFirstInput = 0; +const size_t kReformatFirstOutput = 0; +} // namespace + +Status ReFormatKernel::ValidateInput(const OpDescPtr &op_desc_ptr, const std::vector &input) { + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Input opDescPtr is nullptr."); + return PARAM_INVALID; + } + if (op_desc_ptr->GetInputsSize() != kReFormatInputSize) { + GELOGW("trans_op has more than 1 input_size."); + return PARAM_INVALID; + } + if (input.empty()) { + GELOGE(PARAM_INVALID, "Input tensor vector is empty"); + return PARAM_INVALID; + } + return SUCCESS; +} + +Status ReFormatKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGD("ReFormatKernel begin."); + Status status = ValidateInput(op_desc_ptr, input); + if (status != SUCCESS) { + return status; + } + + ConstGeTensorPtr const_weight_ptr = input[kReformatFirstInput]; + if (const_weight_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, Input_0 is nullptr."); + return NOT_CHANGED; + } + + GeTensorDesc op_desc = op_desc_ptr->GetOutputDesc(kReformatFirstOutput); + GeTensorDesc op_desc_in = op_desc_ptr->GetInputDesc(kReformatFirstInput); + auto src_shape = op_desc_in.GetShape().GetDims(); + auto src_dtype = op_desc_in.GetDataType(); + auto dst_shape = op_desc.GetShape().GetDims(); + auto dst_dtype = op_desc.GetDataType(); + if (src_dtype != dst_dtype || src_shape != dst_shape) { + GELOGW("Check params failed. src data type %s and shape %s should be equal to dst data type %s and shape %s", + TypeUtils::DataTypeToSerialString(src_dtype).c_str(), formats::ShapeToString(src_shape).c_str(), + TypeUtils::DataTypeToSerialString(dst_dtype).c_str(), formats::ShapeToString(dst_shape).c_str()); + return NOT_CHANGED; + } + if (!KernelUtils::CheckSizeForTransOp(const_weight_ptr, op_desc_ptr)) { + GELOGE(FAILED, "CheckSize failed, input size(shape %s) is not equal to weight size(shape %s)", + formats::ShapeToString(src_shape).c_str(), + formats::ShapeToString(const_weight_ptr->GetTensorDesc().GetShape()).c_str()); + return NOT_CHANGED; + } + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(kReformatFirstOutput)); + if (output_ptr == nullptr) { + GELOGE(INTERNAL_ERROR, "Create shared ptr for GeTensor failed"); + return NOT_CHANGED; + } + GE_IF_BOOL_EXEC(output_ptr->SetData(input.at(0)->GetData()) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "set data failed"); + return NOT_CHANGED); + v_output.emplace_back(output_ptr); + GELOGD("ReFormatKernel success."); + return SUCCESS; +} + +REGISTER_KERNEL(REFORMAT, ReFormatKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/reformat_kernel.h b/src/ge/graph/passes/folding_kernel/reformat_kernel.h new file mode 100644 index 00000000..6b9f14c8 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/reformat_kernel.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_REFORMAT_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_REFORMAT_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class ReFormatKernel : public Kernel { + public: + Status Compute(const OpDescPtr attr, const std::vector &input, + std::vector &v_output) override; + + private: + Status ValidateInput(const OpDescPtr &attr, const std::vector &input); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_REFORMAT_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/reshape_kernel.cc b/src/ge/graph/passes/folding_kernel/reshape_kernel.cc new file mode 100644 index 00000000..4e925836 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/reshape_kernel.cc @@ -0,0 +1,88 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/reshape_kernel.h" + +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const int kReshapeDataIndex = 0; +const int kOutputDescFirstIndex = 0; +const size_t kReshapeOutputSize = 1; +const size_t kReshapeInputSize = 2; +} // namespace + +Status ReshapeKernel::Compute(const NodePtr &node_ptr) { + GELOGI("Reshape dimension kernel in."); + if (node_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Parameter's invalid, Input is nullptr."); + return PARAM_INVALID; + } + Status ret = KernelUtils::CheckDimensionNodeInfo(node_ptr); + if (ret != SUCCESS) { + GELOGW("GetDimensionNodeInfo failed"); + return ret; + } + + if (!KernelUtils::CheckFormatSupported(node_ptr)) { + GELOGW("CheckFormatSupported failed"); + return NOT_CHANGED; + } + GELOGI("Reshape dimension kernel success."); + return SUCCESS; +} + +Status ReshapeKernel::Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGI("Reshape folding kernel in."); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Input opdesc is nullptr."); + return PARAM_INVALID; + } + if ((input.size() != kReshapeInputSize) || (op_desc_ptr->GetOutputsSize() != kReshapeOutputSize)) { + GELOGW("Unexpected Reshape node, node input size: %zu, node output size: %zu, node name: %s", input.size(), + op_desc_ptr->GetOutputsSize(), op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + + auto output_tensor_desc = op_desc_ptr->GetOutputDesc(kOutputDescFirstIndex); + GeTensorPtr output_ptr = MakeShared(output_tensor_desc); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "Failed to fold node %s, out of memory", op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + + // print output tensor information, and will be deleted + GELOGI("Reshape op %s output tensor data size is %zu", op_desc_ptr->GetName().c_str(), output_ptr->GetData().size()); + size_t data_dim_size = output_ptr->GetTensorDesc().GetShape().GetDims().size(); + GELOGI("Reshape op %s output tensor dim size is %zu", op_desc_ptr->GetName().c_str(), data_dim_size); + + if (output_ptr->SetData(input.at(kReshapeDataIndex)->GetData()) != GRAPH_SUCCESS) { + GELOGW("Compute: SetData failed"); + } + v_output.emplace_back(output_ptr); + GELOGI("Reshape folding kernel success."); + return SUCCESS; +} + +REGISTER_KERNEL(RESHAPE, ReshapeKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/reshape_kernel.h b/src/ge/graph/passes/folding_kernel/reshape_kernel.h new file mode 100644 index 00000000..0e9692d9 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/reshape_kernel.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_RESHAPE_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_RESHAPE_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +/// +/// @ingroup ge +/// @brief Reshape optimization operator processing +/// +class ReshapeKernel : public Kernel { + public: + Status Compute(const NodePtr &node_ptr) override; + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_RESHAPE_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/rsqrt_kernel.cc b/src/ge/graph/passes/folding_kernel/rsqrt_kernel.cc new file mode 100644 index 00000000..ff3199a0 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/rsqrt_kernel.cc @@ -0,0 +1,94 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/rsqrt_kernel.h" + +#include + +#include + +#include "common/debug/ge_log.h" +#include "common/debug/log.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kRsqrtInputSize = 1; +const size_t kRsqrtInputIndex0 = 0; +} // namespace +Status RsqrtKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GELOGI("RsqrtKernel in."); + GE_CHECK_NOTNULL(op_desc_ptr); + // check input size + if (input.size() != kRsqrtInputSize) { + GELOGW("The number of input for rsqrt must be %zu.", kRsqrtInputSize); + return PARAM_INVALID; + } + + ConstGeTensorPtr input_ = input.at(kRsqrtInputIndex0); + GE_CHECK_NOTNULL(input_); + const GeShape &x_shape = input_->GetTensorDesc().GetShape(); + + size_t data_size = input_->GetData().size(); + size_t data_count = data_size / sizeof(float); + + // check whether input is zero + for (size_t i = 0; i < data_count; i++) { + if (fabs(*(reinterpret_cast(input_->GetData().data()) + i)) < FLT_EPSILON) { + GELOGW("input must be not equal 0."); + return NOT_CHANGED; + } + } + if (data_count > 0) { + unique_ptr buf(new (std::nothrow) float[data_count]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "new buf failed"); + return NOT_CHANGED; + } + + for (size_t i = 0; i < data_count; i++) { + float denominator = sqrt(*(reinterpret_cast(input_->GetData().data()) + i)); + if (fabs(denominator) < FLT_EPSILON) { + GELOGW("input must be not equal 0."); + return NOT_CHANGED; + } + buf[i] = 1 / denominator; + } + + GeTensorPtr output_ptr = MakeShared(); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "MakeShared GeTensor failed, node name %s.", op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + + output_ptr->MutableTensorDesc().SetDataType(DT_FLOAT); + GE_IF_BOOL_EXEC(output_ptr->SetData(reinterpret_cast(buf.get()), data_size) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "set data failed"); + return NOT_CHANGED); + output_ptr->MutableTensorDesc().SetShape(x_shape); + v_output.push_back(output_ptr); + } + GELOGI("RsqrtKernel success."); + return SUCCESS; +} + +REGISTER_KERNEL(RSQRT, RsqrtKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/rsqrt_kernel.h b/src/ge/graph/passes/folding_kernel/rsqrt_kernel.h new file mode 100644 index 00000000..f0bf9d7e --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/rsqrt_kernel.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_RSQRT_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_RSQRT_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class RsqrtKernel : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_RSQRT_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/shape_kernel.cc b/src/ge/graph/passes/folding_kernel/shape_kernel.cc new file mode 100644 index 00000000..38beee22 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/shape_kernel.cc @@ -0,0 +1,60 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/shape_kernel.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/passes/pass_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kShapeInputSize = 1; +const size_t kShapeOutputSize = 1; +} // namespace +Status ShapeKernel::Compute(const NodePtr &node, std::vector &v_output) { + GELOGD("ShapeKernel in"); + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + bool size_check = ((op_desc->GetInputsSize() != kShapeInputSize) || (op_desc->GetOutputsSize() != kShapeOutputSize)); + if (size_check) { + GELOGW("Size check fail, inputs size:%zu, outputs size:%zu", op_desc->GetInputsSize(), op_desc->GetOutputsSize()); + return NOT_CHANGED; + } + if (KernelUtils::IsUnknownShape(op_desc->GetInputDesc(0).GetShape())) { + GELOGW("Input shape is unknown, ignore shape kernel."); + return NOT_CHANGED; + } + vector dims = op_desc->GetInputDesc(0).GetShape().GetDims(); + Status ret = PassUtils::ConstructTensorDescWithData(op_desc->GetOutputDesc(0), dims, v_output); + if (ret != SUCCESS) { + GELOGE(ret, "Shape kernel construct tensor desc failed!"); + return ret; + } + GELOGD("Shape kernel success"); + return SUCCESS; +} + +REGISTER_KERNEL(SHAPE, ShapeKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/shape_kernel.h b/src/ge/graph/passes/folding_kernel/shape_kernel.h new file mode 100644 index 00000000..8e8791e5 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/shape_kernel.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_SHAPE_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_SHAPE_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class ShapeKernel : public Kernel { + public: + Status Compute(const NodePtr &node, std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_SHAPE_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/shape_n_kernel.cc b/src/ge/graph/passes/folding_kernel/shape_n_kernel.cc new file mode 100644 index 00000000..a4dbdedb --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/shape_n_kernel.cc @@ -0,0 +1,60 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/shape_n_kernel.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/passes/pass_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +Status ShapeNKernel::Compute(const NodePtr &node, std::vector &v_output) { + GELOGD("ShapeN kernel in"); + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + if (op_desc->GetAllInputsDesc().size() != op_desc->GetAllOutputsDesc().size()) { + GELOGW("ShapeN kernel, input and output are not the same size. Input size:%zu, output size:%zu", + op_desc->GetAllInputsDesc().size(), op_desc->GetAllOutputsDesc().size()); + return NOT_CHANGED; + } + + for (size_t i = 0; i < op_desc->GetAllInputsDesc().size(); i++) { + if (KernelUtils::IsUnknownShape(op_desc->GetInputDesc(i).GetShape())) { + GELOGW("Input %zu shape is unknown, ignore shape_n kernel.", i); + return NOT_CHANGED; + } + vector dims = op_desc->GetInputDesc(i).GetShape().GetDims(); + Status ret = PassUtils::ConstructTensorDescWithData(op_desc->GetOutputDesc(i), dims, v_output); + if (ret != SUCCESS) { + GELOGE(PARAM_INVALID, "ShapeN kernel construct tensor desc failed, i:%zu", i); + return ret; + } + } + + GELOGD("ShapeN kernel success"); + return SUCCESS; +} + +REGISTER_KERNEL(SHAPEN, ShapeNKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/shape_n_kernel.h b/src/ge/graph/passes/folding_kernel/shape_n_kernel.h new file mode 100644 index 00000000..55829a39 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/shape_n_kernel.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_SHAPE_N_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_SHAPE_N_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class ShapeNKernel : public Kernel { + public: + Status Compute(const NodePtr &node, std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_SHAPE_N_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/size_kernel.cc b/src/ge/graph/passes/folding_kernel/size_kernel.cc new file mode 100644 index 00000000..c5d59fd3 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/size_kernel.cc @@ -0,0 +1,84 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/size_kernel.h" + +#include + +#include + +#include "common/debug/log.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/types.h" +#include "framework/common/util.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/passes/pass_utils.h" +#include "inc/kernel_factory.h" +#include "omg/omg_inner_types.h" + +namespace ge { +namespace { +const size_t kSizeInputSize = 1; +const size_t kSizeOutputSize = 1; +} // namespace + +Status SizeKernel::Compute(const NodePtr &node, std::vector &v_output) { + GELOGD("SizeKernel in"); + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + OpDescPtr op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(PARAM_INVALID, "node:%s opdesc is null", node->GetName().c_str()); + return PARAM_INVALID; + } + + bool size_check = ((op_desc->GetInputsSize() != kSizeInputSize) || (op_desc->GetOutputsSize() != kSizeOutputSize)); + if (size_check) { + GELOGW("SizeKernel input size check fail, GetInputsSize:%zu", op_desc->GetInputsSize()); + return NOT_CHANGED; + } + + if (KernelUtils::IsUnknownShape(op_desc->GetInputDesc(0).GetShape())) { + GELOGW("Input shape is unknown, ignore size kernel."); + return NOT_CHANGED; + } + + int64_t size = 1; + // Calculate the number of elements of the sensor + for (int64_t dim : op_desc->GetInputDesc(0).GetShape().GetDims()) { + if (!CheckInt64MulOverflow(size, dim)) { + GELOGE(INTERNAL_ERROR, "int64 overflow!"); + return INTERNAL_ERROR; + } + size *= dim; + } + + std::vector data{size}; + Status ret = PassUtils::ConstructTensorDescWithData(op_desc->GetOutputDesc(0), data, v_output, true); + if (ret != SUCCESS) { + GELOGE(ret, "Size kernel construct tensor desc fail."); + return ret; + } + + GELOGD("Size kernel success"); + return SUCCESS; +} +REGISTER_KERNEL(SIZE, SizeKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/size_kernel.h b/src/ge/graph/passes/folding_kernel/size_kernel.h new file mode 100644 index 00000000..3a309bc7 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/size_kernel.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_SIZE_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_SIZE_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class SizeKernel : public Kernel { + public: + Status Compute(const NodePtr &node, std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_SIZE_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/slice_kernel.cc b/src/ge/graph/passes/folding_kernel/slice_kernel.cc new file mode 100644 index 00000000..d14e740a --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/slice_kernel.cc @@ -0,0 +1,121 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/slice_kernel.h" + +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kSliceInputSize = 3; +const size_t kSliceInputIndex0 = 0; +const size_t kSliceInputIndex1 = 1; +const size_t kSliceInputIndex2 = 2; +} // namespace + +Status SliceKernel::Compute(const OpDescPtr attr, const std::vector &input, + vector &v_output) { + GELOGI("SliceKernel in."); + if (attr == nullptr) { + GELOGE(PARAM_INVALID, "input opdescptr is nullptr."); + return NOT_CHANGED; + } + // check input size + if (input.size() != kSliceInputSize) { + GELOGE(PARAM_INVALID, "The number of input for slice must be %zu.", kSliceInputSize); + return NOT_CHANGED; + } + + ConstGeTensorPtr x_ = input[kSliceInputIndex0]; + ConstGeTensorPtr begin = input[kSliceInputIndex1]; + ConstGeTensorPtr size = input[kSliceInputIndex2]; + if (x_ == nullptr || begin == nullptr || size == nullptr) { + GELOGE(PARAM_INVALID, "input tensor is nullptr."); + return NOT_CHANGED; + } + + // data type in input_x + auto data_type = x_->GetTensorDesc().GetDataType(); + // check data type of begin and size + if (begin->GetTensorDesc().GetDataType() != DT_INT32 || size->GetTensorDesc().GetDataType() != DT_INT32) { + GELOGE(PARAM_INVALID, "Data type of begin and size for slice are not DT_INT32."); + return NOT_CHANGED; + } + + void *data = reinterpret_cast(const_cast(x_->GetData().data())); + int32_t *begin_data = const_cast(reinterpret_cast(begin->GetData().GetData())); + int32_t *size_data = const_cast(reinterpret_cast(size->GetData().GetData())); + GE_CHECK_NOTNULL(data); + GE_CHECK_NOTNULL(begin_data); + GE_CHECK_NOTNULL(size_data); + + size_t data_size = x_->GetData().size() / sizeof(int32_t); + size_t begin_size = begin->GetData().size() / sizeof(int32_t); + size_t size_size = size->GetData().size() / sizeof(int32_t); + const ge::GeShape &x_shape = x_->GetTensorDesc().GetShape(); + size_t dim_size = x_shape.GetDimNum(); + if (dim_size != begin_size || dim_size != size_size) { + GELOGE(PARAM_INVALID, "Data type of begin and size for slice are not DT_INT32."); + return NOT_CHANGED; + } + + std::vector input_dims; + std::vector begin_vec; + std::vector output_dims; + std::vector stride_vec; + for (size_t i = 0; i < dim_size; i++) { + int32_t begin_i = begin_data[i]; + int32_t size_i = size_data[i]; + int64_t dim_i = x_shape.GetDim(i); + if (size_i < 0) { + GE_IF_BOOL_EXEC(((dim_i - begin_i) > INT32_MAX) || ((dim_i - begin_i) < INT32_MIN), + GELOGE(PARAM_INVALID, " %ld and %d sub can result in overflow!.", dim_i, begin_i); + return INTERNAL_ERROR); + size_i = dim_i - begin_i; + } + input_dims.push_back(dim_i); + begin_vec.push_back(begin_i); + output_dims.push_back(size_i); + stride_vec.push_back(1); + } + // construct tensorDesc + ge::GeShape output_shape(output_dims); + GeTensorDesc output_tensor_desc(output_shape, FORMAT_NCHW, data_type); + GeTensorPtr output_ptr = MakeShared(output_tensor_desc); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "make_shared ge::GeTensor failed, node name %s.", attr->GetName().c_str()); + return NOT_CHANGED; + } + Status ret = OpUtils::SetOutputSliceData(data, static_cast(data_size), data_type, input_dims, begin_vec, + output_dims, output_ptr.get(), stride_vec); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "SetOutputSliceData failed."); + return NOT_CHANGED; + } + v_output.push_back(output_ptr); + GELOGI("SliceKernel success."); + return SUCCESS; +} + +REGISTER_KERNEL(SLICE, SliceKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/slice_kernel.h b/src/ge/graph/passes/folding_kernel/slice_kernel.h new file mode 100644 index 00000000..582e140a --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/slice_kernel.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_SLICE_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_SLICE_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class SliceKernel : public Kernel { + public: + Status Compute(const OpDescPtr attr, const std::vector &input, + vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_SLICE_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/squeeze_kernel.cc b/src/ge/graph/passes/folding_kernel/squeeze_kernel.cc new file mode 100644 index 00000000..c68fe564 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/squeeze_kernel.cc @@ -0,0 +1,82 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/squeeze_kernel.h" + +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "inc/kernel_factory.h" + +namespace { +constexpr uint32_t kInputDescIndex = 0; +constexpr uint32_t kOutputDescIndex = 0; +constexpr size_t kSqueezeInputSize = 1; +constexpr size_t kSqueezeOutputSize = 1; +} + +namespace ge { +Status SqueezeKernel::Compute(const NodePtr &node_ptr) { + if (node_ptr == nullptr) { + GELOGE(PARAM_INVALID, "parameter is nullptr"); + return PARAM_INVALID; + } + if (!KernelUtils::CheckFormatSupported(node_ptr)) { + GELOGW("CheckFormatSupported failed"); + return NOT_CHANGED; + } + return SUCCESS; +} + +Status SqueezeKernel::Compute(const ge::OpDescPtr op_desc, const std::vector &input, + std::vector &v_output) { + if (op_desc == nullptr) { + GELOGE(PARAM_INVALID, "SqueezeKernel op_desc is null."); + return PARAM_INVALID; + } + GELOGD("SqueezeKernel in: node[%s]", op_desc->GetName().c_str()); + + bool size_check = ((op_desc->GetInputsSize() != kSqueezeInputSize) || + (op_desc->GetOutputsSize() != kSqueezeOutputSize) || (input.size() != kSqueezeInputSize)); + if (size_check) { + GELOGW("Size check fail, node[%s] inputs size:%zu, outputs size:%zu", op_desc->GetName().c_str(), + op_desc->GetInputsSize(), op_desc->GetOutputsSize()); + return NOT_CHANGED; + } + + auto tensor_desc = op_desc->GetOutputDesc(kOutputDescIndex); + GeTensorPtr output_ptr = MakeShared(tensor_desc); + if (output_ptr == nullptr) { + GELOGE(PARAM_INVALID, "node [%s] make shared failed.", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + auto ge_tensor = input.at(kInputDescIndex); + if (ge_tensor == nullptr) { + GELOGE(PARAM_INVALID, "node [%s] get input failed.", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + if (output_ptr->SetData(ge_tensor->GetData()) != GRAPH_SUCCESS) { + GELOGW("Compute: SetData failed"); + } + v_output.emplace_back(output_ptr); + GELOGI("SqueezeKernel success: node[%s]", op_desc->GetName().c_str()); + + return SUCCESS; +} +REGISTER_KERNEL(SQUEEZE, SqueezeKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/squeeze_kernel.h b/src/ge/graph/passes/folding_kernel/squeeze_kernel.h new file mode 100644 index 00000000..8212bac6 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/squeeze_kernel.h @@ -0,0 +1,40 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_SQUEEZE_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_SQUEEZE_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +/// +/// @ingroup ge +/// @brief Squeeze optimization operator processing +/// +class SqueezeKernel : public Kernel { + public: + // DimensionAdjustPass + Status Compute(const NodePtr &node_ptr) override; + + // ConstantFoldingPass + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_SQUEEZE_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/ssd_prior_box_kernel.cc b/src/ge/graph/passes/folding_kernel/ssd_prior_box_kernel.cc new file mode 100644 index 00000000..490a8950 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/ssd_prior_box_kernel.cc @@ -0,0 +1,380 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/ssd_prior_box_kernel.h" + +#include + +#include +#include +#include + +#include "common/math/math_util.h" +#include "common/math_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "framework/common/util.h" +#include "graph/passes/pass_utils.h" +#include "graph/utils/attr_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const float kMinistBias = 1e-6; +const float kAspectRationBase = 1.0; +const size_t kBoundarySize = 4; +const size_t kOutputDescFirstIndex = 0; +const size_t kDimIndexZero = 0; +const size_t kDimIndexOne = 1; +const size_t kDimIndexTwo = 2; +const size_t kDimIndexThree = 3; +const int kNumVariance = 4; +const int32_t kNumOne = 1; +const int32_t kNumTwo = 2; +const float kFloatNumTwo = 2.0; +} // namespace + +Status SsdPriorboxKernel::GetPriorSizeParam(const OpDescPtr &op_desc, int &img_width, int &img_height, float &step_w, + float &step_h, int &layer_width, int &layer_height) { + if (op_desc == nullptr) { + GELOGE(PARAM_INVALID, "input opdescptr is nullptr."); + return PARAM_INVALID; + } + const GeTensorDesc tensor_desc = op_desc->GetInputDesc(kOutputDescFirstIndex); + layer_width = tensor_desc.GetShape().GetDim(kDimIndexThree); + layer_height = tensor_desc.GetShape().GetDim(kDimIndexTwo); + if (layer_height == 0 || layer_width == 0) { + GELOGE(PARAM_INVALID, "op:%s NCHW_DIM_H or NCHW_DIM_W is 0", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + int32_t img_h = 0; + int32_t img_w = 0; + if (!AttrUtils::GetInt(op_desc, SSD_PRIOR_BOX_ATTR_IMG_H, img_h)) { + GELOGE(PARAM_INVALID, "op:%s img_h attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + if (!AttrUtils::GetInt(op_desc, SSD_PRIOR_BOX_ATTR_IMG_W, img_w)) { + GELOGE(PARAM_INVALID, "op:%s img_w attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + if (img_h == 0 || img_w == 0) { + GELOGE(PARAM_INVALID, "op:%s Either img_h or img_w is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } else { + img_width = static_cast(img_w); + img_height = static_cast(img_h); + } + float step_height = 0.0; + float step_width = 0.0; + if (!AttrUtils::GetFloat(op_desc, SSD_PRIOR_BOX_ATTR_STEP_H, step_height)) { + GELOGE(PARAM_INVALID, "op:%s step_height attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + if (!AttrUtils::GetFloat(op_desc, SSD_PRIOR_BOX_ATTR_STEP_W, step_width)) { + GELOGE(PARAM_INVALID, "op:%s step_width attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + if ((fabs(step_height) < FLT_EPSILON) || (fabs(step_width) < FLT_EPSILON)) { + step_w = static_cast(img_width) / layer_width; + step_h = static_cast(img_height) / layer_height; + } else { + step_w = step_width; + step_h = step_height; + } + + return SUCCESS; +} + +Status SsdPriorboxKernel::GetPriorListParam(const OpDescPtr &op_desc, vector &min_size_list, + vector &max_size_list, vector &aspect_ratio_list, + vector &variance_list) { + if (!AttrUtils::GetListFloat(op_desc, SSD_PRIOR_BOX_ATTR_MIN_SIZE, min_size_list)) { + GELOGE(PARAM_INVALID, "op:%s min_size() attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + + if (!AttrUtils::GetListFloat(op_desc, SSD_PRIOR_BOX_ATTR_MAX_SIZE, max_size_list)) { + GELOGE(PARAM_INVALID, "op:%s max_size() attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + + if (!AttrUtils::GetListFloat(op_desc, SSD_PRIOR_BOX_ATTR_VARIANCE, variance_list)) { + GELOGE(PARAM_INVALID, "op:%s variance() attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + + if (!AttrUtils::GetListFloat(op_desc, SSD_PRIOR_BOX_ATTR_ASPECT_RATIO, aspect_ratio_list)) { + GELOGE(PARAM_INVALID, "op:%s aspect_ratio() attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + // if flip is true,aspect_ratio_list need add reciprocal + bool flip = false; + if (!AttrUtils::GetBool(op_desc, SSD_PRIOR_BOX_ATTR_FLIP, flip)) { + GELOGE(PARAM_INVALID, "op:%s flip() attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + vector aspect_ratios; + aspect_ratios.push_back(SSD_PRIORBOX_ASPECT_RATIO_VALUE); + for (size_t i = 0; i < aspect_ratio_list.size(); i++) { + float ar = aspect_ratio_list.at(i); + bool already_exist = + std::any_of(aspect_ratios.begin(), aspect_ratios.end(), [&ar](float x) { return fabs(ar - x) < kMinistBias; }); + if (!already_exist) { + aspect_ratios.push_back(ar); + if (flip) { + aspect_ratios.push_back(1. / ar); // 1. reciprocal + } + } + } + aspect_ratio_list = std::move(aspect_ratios); + return SUCCESS; +} + +Status SsdPriorboxKernel::GetPriorOtherParam(const OpDescPtr &op_desc, float &offset, bool &clip) { + if (!AttrUtils::GetBool(op_desc, SSD_PRIOR_BOX_ATTR_CLIP, clip)) { + GELOGE(PARAM_INVALID, "op:%s clip() attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + if (!AttrUtils::GetFloat(op_desc, SSD_PRIOR_BOX_ATTR_OFFSET, offset)) { + GELOGE(PARAM_INVALID, "op:%s offset() attr is null", op_desc->GetName().c_str()); + return PARAM_INVALID; + } + return SUCCESS; +} + +Status SsdPriorboxKernel::SetVariance(const vector &variance, const int dim, const int32_t layer_height, + const int32_t layer_width, const int num_priors, float *output_data) { + if (output_data == nullptr) { + GELOGE(PARAM_INVALID, "output_data is null"); + return PARAM_INVALID; + } + + output_data += dim; + if (variance.size() == 1) { + if (NnSet(dim, variance[0], output_data) != SUCCESS) { + GELOGE(PARAM_INVALID, "NnSet failed."); + return PARAM_INVALID; + } + } else { + size_t count = 0; + for (int i = 0; i < layer_height * layer_width * num_priors; ++i) { + for (size_t j = 0; j < 4; ++j) { // 4 variance + output_data[count] = variance[j]; + ++count; + } + } + } + + return SUCCESS; +} + +Status SsdPriorboxKernel::GetNumPriorAndDimSize(uint aspect_ratios_size, uint min_sizes_size, uint max_sizes_size, + int layer_width, int layer_height, int &num_priors, + int &dim_size) const { + if (CheckUint32MulOverflow(min_sizes_size, aspect_ratios_size) != SUCCESS) { + return PARAM_INVALID; + } + + uint tmp_value = aspect_ratios_size * min_sizes_size; + if (CheckUint32AddOverflow(tmp_value, max_sizes_size) != SUCCESS) { + GELOGE(PARAM_INVALID, "Failed to get list param."); + return PARAM_INVALID; + } + tmp_value += max_sizes_size; + + if (tmp_value > INT32_MAX) { + GELOGE(PARAM_INVALID, "Failed to get list param."); + return PARAM_INVALID; + } + num_priors = static_cast(tmp_value); + + if (CheckIntMulOverflow(layer_width, layer_height) != SUCCESS) { + GELOGE(PARAM_INVALID, "Failed to get list param."); + return PARAM_INVALID; + } + + if (CheckIntMulOverflow(layer_width * layer_height, num_priors) != SUCCESS) { + GELOGE(PARAM_INVALID, "Failed to get list param."); + return PARAM_INVALID; + } + + if (CheckIntMulOverflow(layer_width * layer_height * num_priors, kNumVariance) != SUCCESS) { + GELOGE(PARAM_INVALID, "Failed to get list param."); + return PARAM_INVALID; + } + dim_size = layer_width * layer_height * num_priors * kNumVariance; // 4 variance + + return SUCCESS; +} + +void SsdPriorboxKernel::DataCalulate(float x, float y, float box_x, float box_y, int img_x, int img_y, + vector &result) { + result.clear(); + // xmin + result.push_back((x - box_x / kFloatNumTwo) / static_cast(img_x)); + // ymin + result.push_back((y - box_y / kFloatNumTwo) / static_cast(img_y)); + // xmax + result.push_back((x + box_x / kFloatNumTwo) / static_cast(img_x)); + // ymax + result.push_back((y + box_y / kFloatNumTwo) / static_cast(img_y)); +} + +std::unique_ptr SsdPriorboxKernel::BoundaryCalulate(int dim_size, int layer_width, int layer_height, + float step_width, float step_height, int img_width, + int img_height, float offset, vector min_sizes, + vector max_sizes, vector aspect_ratios) { + // output two channel.First channel stores the mean of each prior coordinate. + // Second channel stores the variance of each prior coordinate. + unique_ptr output_data(new (std::nothrow) float[dim_size * kNumTwo]()); + if (output_data == nullptr) { + GELOGE(PARAM_INVALID, "Failed to create output_data ptr."); + return nullptr; + } + int idx = 0; + vector boundaries; + for (int height_index = 0; height_index < layer_height; ++height_index) { + for (int width_index = 0; width_index < layer_width; ++width_index) { + float center_x = (width_index + offset) * step_width; + float center_y = (height_index + offset) * step_height; + for (size_t size_index = 0; size_index < min_sizes.size(); ++size_index) { + int min_size = min_sizes[size_index]; + // first prior: aspect_ratio = 1, size = min_size + float box_width = min_size; + float box_height = min_size; + DataCalulate(center_x, center_y, box_width, box_height, img_width, img_height, boundaries); + size_t index = 0; + while (index < kBoundarySize) { + output_data[idx++] = boundaries[index++]; + } + if (!max_sizes.empty()) { + int max_size = max_sizes[size_index]; + // second prior: aspect_ratio = 1, size = sqrt(min_size * max_size) + box_width = sqrt(min_size * max_size); + DataCalulate(center_x, center_y, box_width, box_width, img_width, img_height, boundaries); + index = 0; + while (index < kBoundarySize) { + output_data[idx++] = boundaries[index++]; + } + } + + // rest of priors + for (size_t ratio_index = 0; ratio_index < aspect_ratios.size(); ++ratio_index) { + float aspect_ratio = aspect_ratios[ratio_index]; + if (fabs(aspect_ratio - kAspectRationBase) < kMinistBias) { // aspect ration base:1. + continue; + } + box_width = min_size * sqrt(aspect_ratio); + box_height = min_size / sqrt(aspect_ratio); + DataCalulate(center_x, center_y, box_width, box_height, img_width, img_height, boundaries); + index = 0; + while (index < kBoundarySize) { + output_data[idx++] = boundaries[index++]; + } + } + } + } + } + + return std::move(output_data); +} + +Status SsdPriorboxKernel::Compute(const NodePtr &node, std::vector &v_output) { + GELOGD("SsdPriorboxKernel in"); + OpDescPtr op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(PARAM_INVALID, "node:%s opdesc is null", node->GetName().c_str()); + return PARAM_INVALID; + } + int img_width = 0; + int img_height = 0; + int layer_width = 0; + int layer_height = 0; + float step_width = 0.0; + float step_height = 0.0; + Status ret = GetPriorSizeParam(op_desc, img_width, img_height, step_width, step_height, layer_width, layer_height); + if (ret != SUCCESS) { + GELOGE(PARAM_INVALID, "Failed to get size param."); + return PARAM_INVALID; + } + float offset = 0.0; + bool clip = false; + ret = GetPriorOtherParam(op_desc, offset, clip); + if (ret != SUCCESS) { + GELOGE(PARAM_INVALID, "Failed to get other param."); + return PARAM_INVALID; + } + + vector min_sizes; + vector aspect_ratios; + vector variances; + vector max_sizes; + if (GetPriorListParam(op_desc, min_sizes, max_sizes, aspect_ratios, variances) != SUCCESS) { + GELOGE(PARAM_INVALID, "Failed to get list param."); + return PARAM_INVALID; + } + + int num_priors = 0; + int dim_size = 0; + ret = GetNumPriorAndDimSize(aspect_ratios.size(), min_sizes.size(), max_sizes.size(), layer_width, layer_height, + num_priors, dim_size); + if (ret != SUCCESS) { + GELOGE(PARAM_INVALID, "Failed to get other param."); + return PARAM_INVALID; + } + + auto output_data = BoundaryCalulate(dim_size, layer_width, layer_height, step_width, step_height, img_width, + img_height, offset, min_sizes, max_sizes, aspect_ratios); + if (output_data == nullptr) { + GELOGE(PARAM_INVALID, "Failed to create output_data ptr."); + return PARAM_INVALID; + } + + if (clip) { + for (int d = 0; d < dim_size; ++d) { + // clip the prior's coordidate such that it is within [0.0 1.0] + output_data[d] = std::min(std::max(output_data[d], 0.), 1.); + } + } + + // set the variance. + if (SetVariance(variances, dim_size, layer_height, layer_width, num_priors, output_data.get()) != SUCCESS) { + GELOGE(PARAM_INVALID, "Failed to set variance."); + return PARAM_INVALID; + } + + GeTensorDesc output_tensor_desc = op_desc->GetOutputDesc(0); + std::vector v_dims(3, 1); // 3 dims + v_dims[kDimIndexZero] = kNumOne; + v_dims[kDimIndexOne] = kNumTwo; + v_dims[kDimIndexTwo] = dim_size; + DataType data_type = output_tensor_desc.GetDataType(); + output_tensor_desc.Update(GeShape(v_dims), FORMAT_NCHW, data_type); + // make TensorDesc + GeTensorPtr output_ptr = MakeShared(output_tensor_desc); + if (output_ptr == nullptr) { + GELOGE(INTERNAL_ERROR, "Create shared ptr for GeTensor failed"); + return NOT_CHANGED; + } + GE_IF_BOOL_EXEC(output_ptr->SetData(reinterpret_cast(output_data.get()), + static_cast(dim_size * kNumTwo * sizeof(data_type))) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "set data failed"); + return INTERNAL_ERROR); + v_output.push_back(output_ptr); + return SUCCESS; +} +REGISTER_KERNEL(SSDPRIORBOX, SsdPriorboxKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/ssd_prior_box_kernel.h b/src/ge/graph/passes/folding_kernel/ssd_prior_box_kernel.h new file mode 100644 index 00000000..84e2e523 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/ssd_prior_box_kernel.h @@ -0,0 +1,113 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_SSD_PRIOR_BOX_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_SSD_PRIOR_BOX_KERNEL_H_ + +#include +#include + +#include "inc/kernel.h" + +namespace ge { +class SsdPriorboxKernel : public Kernel { + public: + /// + /// Entry of the SsdPriorboxKernel optimizer + /// @param [in] node: Input Node + /// @return SUCCESS: node output compute success + /// @return OTHERS: Execution failed + /// @author + /// + Status Compute(const NodePtr &node, std::vector &v_output) override; + + private: + /// + /// Get specific op_desc attr value + /// @param [in] op_desc: Input op_desc + /// @param [in/out] img_width: img_width attr_value + /// @param [in/out] img_height: img_height attr_value + /// @param [in/out] step_h: step_h attr_value + /// @param [in/out] step_w: step_w attr_value + /// @param [in/out] layer_width: layer_width attr_value + /// @param [in/out] layer_height: layer_height attr_value + /// @return SUCCESS: node get attr value success + /// @return OTHERS: Execution failed + /// @author + /// + Status GetPriorSizeParam(const OpDescPtr &op_desc, int &img_width, int &img_height, float &step_w, float &step_h, + int &layer_width, int &layer_height); + /// + /// Get specific op_desc size attr value,min_size_num etc. + /// @param [in] op_desc: Input op_desc + /// @param [in/out] offset: offset attr_value + /// @param [in/out] clip: clip attr_value + /// @return SUCCESS: get attr success + /// @return OTHERS: Execution failed + /// @author + /// + Status GetPriorOtherParam(const OpDescPtr &op_desc, float &offset, bool &clip); + /// + /// Get specific op_desc list attr value,min_size_list etc. + /// @param [in] op_desc: Input op_desc + /// @param [in/out] min_size_list: min_size_list attr_value + /// @param [in/out] max_size_list: max_size_list attr_value + /// @param [in/out] aspect_ratio_list: aspect_ratio_list attr_value + /// @param [in/out] variance_list: variance_list attr_value + /// @param [in/out] clip: clip attr_value + /// @return SUCCESS: get list attr success + /// @return OTHERS: Execution failed + /// @author + /// + Status GetPriorListParam(const OpDescPtr &op_desc, vector &min_size_list, vector &max_size_list, + vector &aspect_ratio_list, vector &variance_list); + /// + /// set variance param to output_data. + /// @param [in] variance: variance list + /// @param [in] dim: output_data second channel offset + /// @param [in] layer_height: layer_height + /// @param [in] num_priors: num_priors + /// @param [in/out] output_data: output_data + /// @return SUCCESS: set variance success + /// @return OTHERS: Execution failed + /// @author + /// + Status SetVariance(const vector &variance, const int dim, const int32_t layer_height, + const int32_t layer_width, const int num_priors, float *output_data); + /// + /// get num priors and dim size. + /// @param [in] aspect_ratios_size: aspect_ratio_list size + /// @param [in] min_sizes_size: min_size_list size + /// @param [in] max_sizes_size: max_size_list size + /// @param [in] layer_width: layer_width + /// @param [in] layer_height: layer_height + /// @param [in/out] num_priors: num_priors + /// @param [in/out] dim_size: dim_size + /// @return SUCCESS: set variance success + /// @return OTHERS: Execution failed + /// @author + /// + Status GetNumPriorAndDimSize(uint aspect_ratios_size, uint min_sizes_size, uint max_sizes_size, int layer_width, + int layer_height, int &num_priors, int &dim_size) const; + void DataCalulate(float x, float y, float box_x, float box_y, int img_x, int img_y, vector &result); + std::unique_ptr BoundaryCalulate(int dim_size, int layer_width, int layer_height, float step_width, + float step_height, int img_width, int img_height, float offset, + vector min_sizes, vector max_sizes, + vector aspect_ratios); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_SSD_PRIOR_BOX_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/strided_slice_kernel.cc b/src/ge/graph/passes/folding_kernel/strided_slice_kernel.cc new file mode 100644 index 00000000..c3728899 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/strided_slice_kernel.cc @@ -0,0 +1,242 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/strided_slice_kernel.h" + +#include + +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + + +namespace ge { +namespace { +const int32_t kNumOne = 1; +const size_t kStridedSliceInputSize = 4; +const size_t kStridedSliceInputIndex0 = 0; +const size_t kStridedSliceInputIndex1 = 1; +const size_t kStridedSliceInputIndex2 = 2; +const size_t kStridedSliceInputIndex3 = 3; +} // namespace +Status StridedSliceKernel::CheckAndGetAttr(const OpDescPtr &attr, const std::vector &input, + Attr &args) { + int64_t begin_mask = 0; + int64_t end_mask = 0; + int64_t ellipsis_mask = 0; + int64_t new_axis_mask = 0; + int64_t shrink_axis_mask = 0; + + if (attr == nullptr) { + GELOGE(PARAM_INVALID, "input opdescptr is nullptr."); + return PARAM_INVALID; + } + if (input.size() != kStridedSliceInputSize) { + GELOGE(PARAM_INVALID, "The number of input for strided slice must be %zu.", kStridedSliceInputSize); + return PARAM_INVALID; + } + if (!AttrUtils::GetInt(attr, STRIDE_SLICE_ATTR_BEGIN_MASK, begin_mask)) { + GELOGE(PARAM_INVALID, "get begin_mask attr failed."); + return PARAM_INVALID; + } + if (!AttrUtils::GetInt(attr, STRIDE_SLICE_ATTR_END_MASK, end_mask)) { + GELOGE(PARAM_INVALID, "get end_mask attr failed."); + return PARAM_INVALID; + } + if (!AttrUtils::GetInt(attr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, ellipsis_mask)) { + GELOGE(PARAM_INVALID, "get ellipsis_mask attr failed."); + return PARAM_INVALID; + } + if (!AttrUtils::GetInt(attr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, new_axis_mask)) { + GELOGE(PARAM_INVALID, "get new_axis_mask attr failed."); + return PARAM_INVALID; + } + if (!AttrUtils::GetInt(attr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, shrink_axis_mask)) { + GELOGE(PARAM_INVALID, "get shrink_axis_mask attr failed."); + return PARAM_INVALID; + } + if (!(ellipsis_mask == 0 && new_axis_mask == 0)) { + GELOGE(NOT_CHANGED, "ellipsis_mask or new_axis_mask must be 0 with optimizer."); + return NOT_CHANGED; + } + DataType data_type = attr->GetInputDesc(kStridedSliceInputIndex0).GetDataType(); + if (!(data_type == DT_FLOAT || data_type == DT_INT32)) { + GELOGW( + "Data type of StridedSlice OP must be float or int32." + "Constant folding will not be carried out in this condition" + "which might affect the time performance but not the accuracy"); + } + args.begin_mask = begin_mask; + args.end_mask = end_mask; + args.ellipsis_mask = ellipsis_mask; + args.new_axis_mask = new_axis_mask; + args.data_type = static_cast(data_type); + args.shrink_axis_mask = shrink_axis_mask; + + ConstGeTensorPtr weight0 = input[kStridedSliceInputIndex0]; + ConstGeTensorPtr weight1 = input[kStridedSliceInputIndex1]; + ConstGeTensorPtr weight2 = input[kStridedSliceInputIndex2]; + ConstGeTensorPtr weight3 = input[kStridedSliceInputIndex3]; + if (CheckWeight(weight0, weight1, weight2, weight3) != SUCCESS) { + GELOGE(PARAM_INVALID, "Check And Get Attr failed."); + return PARAM_INVALID; + } + + return SUCCESS; +} +Status StridedSliceKernel::CheckWeight(const ConstGeTensorPtr &weight0, const ConstGeTensorPtr &weight1, + const ConstGeTensorPtr &weight2, const ConstGeTensorPtr &weight3) const { + if (weight0 == nullptr || weight1 == nullptr || weight2 == nullptr || weight3 == nullptr) { + GELOGE(PARAM_INVALID, "weight is nullptr."); + return PARAM_INVALID; + } + if (!(weight1->GetTensorDesc().GetDataType() == DT_INT32 && weight2->GetTensorDesc().GetDataType() == DT_INT32 && + weight3->GetTensorDesc().GetDataType() == DT_INT32)) { + GELOGE(INTERNAL_ERROR, "Data type of StridedSlice OP(begin,end,strides) must be int32."); + return INTERNAL_ERROR; + } + + // check data + size_t weight0_size = weight0->GetData().size() / sizeof(int32_t); + size_t weight1_size = weight1->GetData().size() / sizeof(int32_t); + size_t weight2_size = weight2->GetData().size() / sizeof(int32_t); + size_t weight3_size = weight3->GetData().size() / sizeof(int32_t); + if (weight0_size == 0 || weight1_size == 0 || weight2_size == 0 || weight3_size == 0) { + GELOGW("Data size of inputs is 0."); + return PARAM_INVALID; + } + + // check dim size + size_t weight0_dim_size = weight0->GetTensorDesc().GetShape().GetDimNum(); + if (!(weight0_dim_size == weight1_size && weight0_dim_size == weight2_size && weight0_dim_size == weight3_size)) { + GELOGE(PARAM_INVALID, "The sizes of begin, end and stride is not supported."); + return NOT_CHANGED; + } + + return SUCCESS; +} + +void StridedSliceKernel::MaskCal(const bool &begin_mask_flag, const bool &end_mask_flag, const bool &shrink_mask_flag, + int32_t &begin_i, int32_t &end_i, int32_t &dim_i) const { + if (shrink_mask_flag) { + begin_i = (begin_i < 0 ? (dim_i + begin_i) : begin_i); + end_i = begin_i + kNumOne; + } else { + if (begin_mask_flag) { + begin_i = 0; + } else { + begin_i = (begin_i < 0 ? (dim_i + begin_i) : begin_i); + } + if (end_mask_flag) { + end_i = dim_i; + } else { + end_i = (end_i < 0 ? (dim_i + end_i) : end_i); + } + } +} + +Status StridedSliceKernel::Compute(const ge::OpDescPtr attr, const std::vector &input, + vector &v_output) { + GELOGI("StridedSliceKernel in."); + Attr args; + Status ret = CheckAndGetAttr(attr, input, args); + if (ret != SUCCESS) { + GELOGE(PARAM_INVALID, "Check And Get Attr failed."); + return NOT_CHANGED; + } + + ConstGeTensorPtr weight0 = input[kStridedSliceInputIndex0]; + ConstGeTensorPtr weight1 = input[kStridedSliceInputIndex1]; + ConstGeTensorPtr weight2 = input[kStridedSliceInputIndex2]; + ConstGeTensorPtr weight3 = input[kStridedSliceInputIndex3]; + + const GeShape x_shape = weight0->GetTensorDesc().GetShape(); + size_t dim_size = x_shape.GetDimNum(); + size_t data_size = weight0->GetData().size() / sizeof(int32_t); + + const int32_t *begin = reinterpret_cast(weight1->GetData().data()); + const int32_t *end = reinterpret_cast(weight2->GetData().data()); + const int32_t *stride = reinterpret_cast(weight3->GetData().data()); + if ((begin == nullptr) || (end == nullptr) || (stride == nullptr)) { + GELOGE(PARAM_INVALID, "input weight tensor is nullptr."); + return NOT_CHANGED; + } + + std::vector input_dims; + std::vector begin_vec; + std::vector output_dims; + std::vector stride_vec; + for (size_t i = 0; i < dim_size; i++) { + int32_t begin_i = begin[i]; + int32_t end_i = end[i]; + int32_t stride_i = stride[i]; + int32_t dim_i = static_cast(x_shape.GetDim(i)); + GELOGI("%d\t %d\t %d\t %d", begin_i, end_i, stride_i, dim_i); + uint32_t i_temp = static_cast(i); + bool begin_mask_i = (static_cast(args.begin_mask) & (1 << i_temp)); + bool end_mask_i = (static_cast(args.end_mask) & (1 << i_temp)); + bool shrink_mask_i = (static_cast(args.shrink_axis_mask) & (1 << i_temp)); + MaskCal(begin_mask_i, end_mask_i, shrink_mask_i, begin_i, end_i, dim_i); + if (stride_i <= 0 || end_i <= begin_i) { + GELOGE(INTERNAL_ERROR, "Param for stride_slice is invalid."); + return NOT_CHANGED; + } + + int64_t dim_final = (end_i - begin_i) / stride_i; + output_dims.push_back(dim_final); + input_dims.push_back(x_shape.GetDim(i)); + begin_vec.push_back(begin_i); + stride_vec.push_back(stride_i); + } + + GeTensorPtr output_ptr = MakeShared(); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "MakeShared GeTensor failed, node name %s.", attr->GetName().c_str()); + return NOT_CHANGED; + } + + void *data = reinterpret_cast(const_cast(weight0->GetData().data())); + GE_CHECK_NOTNULL(data); + ret = OpUtils::SetOutputSliceData(data, static_cast(data_size), args.data_type, input_dims, begin_vec, + output_dims, output_ptr.get(), stride_vec); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "SetOutputSliceData failed."); + return NOT_CHANGED; + } + + GeTensorDesc &t_d = output_ptr->MutableTensorDesc(); + t_d.SetDataType(static_cast(args.data_type)); + + uint32_t final_dim_size = static_cast(output_dims.size()); + vector v_dims; + for (uint32_t k = 0; k < final_dim_size; k++) { + bool shrink_mask_i = (static_cast(args.shrink_axis_mask) & (1 << k)); + if (shrink_mask_i) { + continue; + } + v_dims.push_back(output_dims[k]); + } + t_d.SetShape(GeShape(v_dims)); + v_output.push_back(output_ptr); + GELOGI("StridedSliceKernel success."); + return SUCCESS; +} +REGISTER_KERNEL(STRIDEDSLICE, StridedSliceKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/strided_slice_kernel.h b/src/ge/graph/passes/folding_kernel/strided_slice_kernel.h new file mode 100644 index 00000000..047bc880 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/strided_slice_kernel.h @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_STRIDED_SLICE_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_STRIDED_SLICE_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +struct Attr { + int64_t begin_mask; + int64_t end_mask; + int64_t ellipsis_mask; + int64_t new_axis_mask; + int64_t data_type; + int64_t shrink_axis_mask; +}; + +class StridedSliceKernel : public Kernel { + public: + Status Compute(const OpDescPtr attr, const std::vector &input, + vector &v_output) override; + + private: + Status CheckAndGetAttr(const OpDescPtr &attr, const std::vector &input, Attr &args); + Status CheckWeight(const ConstGeTensorPtr &weight0, const ConstGeTensorPtr &weight1, const ConstGeTensorPtr &weight2, + const ConstGeTensorPtr &weight3) const; + void MaskCal(const bool &begin_mask_flag, const bool &end_mask_flag, const bool &shrink_mask_flag, int32_t &begin_i, + int32_t &end_i, int32_t &dim_i) const; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_STRIDED_SLICE_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/sub_kernel.cc b/src/ge/graph/passes/folding_kernel/sub_kernel.cc new file mode 100644 index 00000000..4fe71bb0 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/sub_kernel.cc @@ -0,0 +1,152 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/sub_kernel.h" + +#include + +#include "common/debug/log.h" +#include "common/fp16_t.h" +#include "common/op/ge_op_utils.h" +#include "graph/common/bcast.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const size_t kSubFirstInput = 0; +const size_t kSubSecondInput = 1; +const size_t kSubFirstOutput = 0; +const size_t kSubOutputSize = 1; +const size_t kSubInputSize = 2; + +#define DEFINE_FUNC_BY_TYPE(TYPE) \ + std::function func_##TYPE = [](TYPE const &a, TYPE const &b) -> TYPE { \ + return a - b; \ + }; + +#define SET_BCAST_COMPUTE_CASE(DTYPE, TYPE) \ + case DTYPE: \ + ret = bcast.BCastCompute(input, y_data_##TYPE, func_##TYPE); \ + break; + +#define SET_OUTPUT(DTYPE, TYPE) \ + case DTYPE: \ + (void)output_ptr->SetData(reinterpret_cast(y_data_##TYPE.data()), y_data_##TYPE.size() * length); \ + break; + +DEFINE_FUNC_BY_TYPE(int8_t) +DEFINE_FUNC_BY_TYPE(int16_t) +DEFINE_FUNC_BY_TYPE(int32_t) +DEFINE_FUNC_BY_TYPE(int64_t) +DEFINE_FUNC_BY_TYPE(uint8_t) +DEFINE_FUNC_BY_TYPE(uint16_t) +DEFINE_FUNC_BY_TYPE(uint32_t) +DEFINE_FUNC_BY_TYPE(uint64_t) +DEFINE_FUNC_BY_TYPE(float) +DEFINE_FUNC_BY_TYPE(double) +} // namespace + +Status SubKernel::Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + vector &v_output) { + GE_CHECK_NOTNULL(op_desc_ptr); + // check how many inputs + if ((input.size() != kSubInputSize) || (op_desc_ptr->GetOutputsSize() != kSubOutputSize)) { + GELOGW("The number of input for sub must be %zu.", kSubInputSize); + return PARAM_INVALID; + } + + GE_CHECK_NOTNULL(input[kSubFirstInput]); + GE_CHECK_NOTNULL(input[kSubSecondInput]); + ConstGeTensorPtr weight0 = input[kSubFirstInput]; + ConstGeTensorPtr weight1 = input[kSubSecondInput]; + + // to broadcast + std::vector y_data_int8_t; + std::vector y_data_int16_t; + std::vector y_data_int32_t; + std::vector y_data_int64_t; + std::vector y_data_uint8_t; + std::vector y_data_uint16_t; + std::vector y_data_uint32_t; + std::vector y_data_uint64_t; + std::vector y_data_fp16_t; + std::vector y_data_float; + std::vector y_data_double; + + Status ret; + DataType data_type = input[kSubFirstInput]->GetTensorDesc().GetDataType(); + BCast bcast; + switch (data_type) { + SET_BCAST_COMPUTE_CASE(DT_INT8, int8_t) + SET_BCAST_COMPUTE_CASE(DT_INT16, int16_t) + SET_BCAST_COMPUTE_CASE(DT_INT32, int32_t) + SET_BCAST_COMPUTE_CASE(DT_INT64, int64_t) + SET_BCAST_COMPUTE_CASE(DT_UINT8, uint8_t) + SET_BCAST_COMPUTE_CASE(DT_UINT16, uint16_t) + SET_BCAST_COMPUTE_CASE(DT_UINT32, uint32_t) + SET_BCAST_COMPUTE_CASE(DT_UINT64, uint64_t) + SET_BCAST_COMPUTE_CASE(DT_FLOAT, float) + SET_BCAST_COMPUTE_CASE(DT_DOUBLE, double) + default: + ret = NOT_CHANGED; + break; + } + + if (ret != SUCCESS) { + GELOGW("BCastCompute fail, data_type:%s, ret:%s", TypeUtils::DataTypeToSerialString(data_type).c_str(), + GET_ERRORNO_STR(ret).c_str()); + return NOT_CHANGED; + } + + uint32_t length = 1; + if (!TypeUtils::GetDataTypeLength(data_type, length)) { + GELOGW("Can't GetDataTypeLength of data_type: %s", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return NOT_CHANGED; + } + + auto output_tensor_desc = op_desc_ptr->GetOutputDesc(kSubFirstOutput); + GeTensorPtr output_ptr = MakeShared(output_tensor_desc); + if (output_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "make_shared ge::GeTensor failed, node name %s.", op_desc_ptr->GetName().c_str()); + return NOT_CHANGED; + } + + output_ptr->MutableTensorDesc().SetShape(GeShape(bcast.GetOutputShape())); + // only return GRAPH_SUCCESS here + switch (data_type) { + SET_OUTPUT(DT_INT8, int8_t) + SET_OUTPUT(DT_INT16, int16_t) + SET_OUTPUT(DT_INT32, int32_t) + SET_OUTPUT(DT_INT64, int64_t) + SET_OUTPUT(DT_UINT8, uint8_t) + SET_OUTPUT(DT_UINT16, uint16_t) + SET_OUTPUT(DT_UINT32, uint32_t) + SET_OUTPUT(DT_UINT64, uint64_t) + SET_OUTPUT(DT_FLOAT16, fp16_t) + SET_OUTPUT(DT_FLOAT, float) + SET_OUTPUT(DT_DOUBLE, double) + default: + break; + } + output_ptr->MutableTensorDesc().SetDataType(data_type); + v_output.push_back(output_ptr); + + return SUCCESS; +} + +REGISTER_KERNEL(SUB, SubKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/sub_kernel.h b/src/ge/graph/passes/folding_kernel/sub_kernel.h new file mode 100644 index 00000000..15f048d0 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/sub_kernel.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_SUB_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_SUB_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class SubKernel : public Kernel { + public: + Status Compute(const ge::OpDescPtr attr, const std::vector &input, + vector &v_output) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_SUB_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_kernel/transdata_kernel.cc b/src/ge/graph/passes/folding_kernel/transdata_kernel.cc new file mode 100644 index 00000000..46ba8805 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/transdata_kernel.cc @@ -0,0 +1,134 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_kernel/transdata_kernel.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/formats/formats.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "common/fp16_t.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/bcast.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" + + +namespace ge { +namespace { +const size_t kTransdataInputSize = 1; +} + +Status TransdataKernel::ValidateInput(const OpDescPtr &op_desc_ptr, const std::vector &input) { + if (input.empty()) { + GELOGE(PARAM_INVALID, "Input tensor vector is empty"); + return PARAM_INVALID; + } + ConstGeTensorPtr const_weight_ptr = input[0]; + if (const_weight_ptr == nullptr) { + GELOGE(PARAM_INVALID, "Input const_weight_ptr is nullptr."); + return PARAM_INVALID; + } + const uint8_t *src_data = const_weight_ptr->GetData().data(); + if (op_desc_ptr == nullptr || src_data == nullptr) { + GELOGE(PARAM_INVALID, "Input opDescPtr is nullptr."); + return PARAM_INVALID; + } + if (op_desc_ptr->GetInputsSize() != kTransdataInputSize) { + GELOGW("trans_op has more than 1 input_size."); + return NOT_CHANGED; + } + return SUCCESS; +} + +Status TransdataKernel::Compute(const OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) { + GE_CHECK_NOTNULL(op_desc_ptr); + GELOGD("TransdataKernel begin."); + Status status = ValidateInput(op_desc_ptr, input); + if (status != SUCCESS) { + return status; + } + + ConstGeTensorPtr const_weight_ptr = input[0]; + GeTensorDesc op_desc = op_desc_ptr->GetOutputDesc(0); + GeTensorDesc op_desc_in = op_desc_ptr->GetInputDesc(0); + auto src_format = op_desc_in.GetFormat(); + auto src_shape = op_desc_in.GetShape().GetDims(); + auto src_data_type = op_desc_in.GetDataType(); + auto data_shape = op_desc.GetShape().GetDims(); + auto data_format = op_desc.GetFormat(); + auto data_type = op_desc.GetDataType(); + GELOGD( + "current node %s, format %s, input shape %s, data type %s, weight format %s, shape %s, data type %s. " + "output format %s, shape %s, data type %s", + op_desc_ptr->GetName().c_str(), TypeUtils::FormatToSerialString(src_format).c_str(), + formats::ShapeToString(src_shape).c_str(), TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::FormatToSerialString(const_weight_ptr->GetTensorDesc().GetFormat()).c_str(), + formats::ShapeToString(const_weight_ptr->GetTensorDesc().GetShape()).c_str(), + TypeUtils::DataTypeToSerialString(const_weight_ptr->GetTensorDesc().GetDataType()).c_str(), + TypeUtils::FormatToSerialString(data_format).c_str(), formats::ShapeToString(data_shape).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str()); + + const uint8_t *src_data = const_weight_ptr->GetData().data(); + const formats::TransArgs trans_args{src_data, src_format, data_format, src_shape, data_shape, src_data_type}; + formats::TransResult trans_result; + GELOGD("Trans formats from %s to %s, shape %s to %s, data type %s", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(data_format).c_str(), + formats::ShapeToString(src_shape).c_str(), formats::ShapeToString(data_shape).c_str(), + TypeUtils::DataTypeToSerialString(src_data_type).c_str()); + + if (src_data_type != data_type || data_shape.empty() || !formats::IsTransFormatSupport(trans_args)) { + GELOGW("Transfer from format %s to %s, shape %s to %s, data type %s to %s is not supported", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(data_format).c_str(), + formats::ShapeToString(src_shape).c_str(), formats::ShapeToString(data_shape).c_str(), + TypeUtils::DataTypeToSerialString(src_data_type).c_str(), + TypeUtils::DataTypeToSerialString(data_type).c_str()); + return NOT_CHANGED; + } + if (!KernelUtils::CheckSizeForTransOp(const_weight_ptr, op_desc_ptr)) { + GELOGI("CheckSize failed, input size is not equal to weight size"); + return NOT_CHANGED; + } + if (formats::TransFormat(trans_args, trans_result) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to trans formats from %s to %s, shape %s to %s, data type %s", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(data_format).c_str(), + formats::ShapeToString(src_shape).c_str(), formats::ShapeToString(data_shape).c_str(), + TypeUtils::DataTypeToSerialString(src_data_type).c_str()); + return NOT_CHANGED; + } + + GeTensorPtr output_ptr = MakeShared(op_desc_ptr->GetOutputDesc(0)); + if (output_ptr == nullptr) { + GELOGE(ge::PARAM_INVALID, "Make shared failed"); + return ge::PARAM_INVALID; + } + if (output_ptr->SetData(trans_result.data.get(), trans_result.length) != GRAPH_SUCCESS) { + GELOGW("Compute: SetData failed"); + } + v_output.push_back(output_ptr); + return SUCCESS; +} + +REGISTER_KERNEL(TRANSDATA, TransdataKernel); +} // namespace ge diff --git a/src/ge/graph/passes/folding_kernel/transdata_kernel.h b/src/ge/graph/passes/folding_kernel/transdata_kernel.h new file mode 100644 index 00000000..e4cf9b39 --- /dev/null +++ b/src/ge/graph/passes/folding_kernel/transdata_kernel.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_KERNEL_TRANSDATA_KERNEL_H_ +#define GE_GRAPH_PASSES_FOLDING_KERNEL_TRANSDATA_KERNEL_H_ + +#include + +#include "inc/kernel.h" + +namespace ge { +class TransdataKernel : public Kernel { + public: + Status Compute(const OpDescPtr attr, const std::vector &input, + std::vector &v_output) override; + + private: + Status ValidateInput(const OpDescPtr &attr, const std::vector &input); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_KERNEL_TRANSDATA_KERNEL_H_ diff --git a/src/ge/graph/passes/folding_pass.cc b/src/ge/graph/passes/folding_pass.cc new file mode 100644 index 00000000..e37c8336 --- /dev/null +++ b/src/ge/graph/passes/folding_pass.cc @@ -0,0 +1,315 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/folding_pass.h" + +#include +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/node_utils.h" +#include "inc/kernel.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace folding_pass { +shared_ptr GetKernelByType(const NodePtr &node) { + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return nullptr; + } + KernelFactory &factory = KernelFactory::Instance(); + string type = node->GetType(); + if (type == FRAMEWORKOP) { + if (!ge::AttrUtils::GetStr(node->GetOpDesc(), ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, type)) { + return nullptr; + } + } + + return factory.Create(type); +} +} // namespace folding_pass + +namespace { +IndexsToAnchors GetIndexAndPeerInDataAnchors(NodePtr &node) { + IndexsToAnchors indexes_to_anchors; + for (auto &out_anchor : node->GetAllOutDataAnchors()) { + if (out_anchor == nullptr) { + continue; + } + for (auto &peer_in_anchor : out_anchor->GetPeerInDataAnchors()) { + if (peer_in_anchor == nullptr) { + continue; + } + const auto &peer_node = peer_in_anchor->GetOwnerNode(); + if (peer_node == nullptr) { + continue; + } + indexes_to_anchors[out_anchor->GetIdx()].push_back(peer_in_anchor); + } + } + + return indexes_to_anchors; +} + +NodePtr AddConstNodeToGraph(GeTensorPtr &tensor, ComputeGraphPtr &graph) { + auto const_desc = OpDescUtils::CreateConstOp(tensor); + if (const_desc == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to get const desc from tensor"); + return nullptr; + } + + GE_IF_BOOL_EXEC(graph == nullptr, GELOGW("input param graph is null"); return nullptr); + (void)AttrUtils::SetListStr(const_desc, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, std::move(std::vector())); + return graph->AddNodeFront(const_desc); +} + +NodePtr AddIdentityNodeToGraph(const std::string &name, const GeTensorDesc &tensor, ComputeGraphPtr &graph) { + if (graph == nullptr) { + GELOGE(INTERNAL_ERROR, "Compute graph ptr is null in creating identity node."); + return nullptr; + } + + OpDescPtr desc = MakeShared("", ""); + if (desc == nullptr) { + GELOGE(MEMALLOC_FAILED, "Failed to create op desc."); + return nullptr; + } + + desc->SetName(name); + desc->SetType(IDENTITY); + auto ret = desc->AddInputDesc(tensor); + auto ret2 = desc->AddOutputDesc(tensor); + if ((ret != GRAPH_SUCCESS) || (ret2 != GRAPH_SUCCESS)) { + GELOGE(INTERNAL_ERROR, "Failed to add input/output desc in creating Identity."); + return nullptr; + } + + return graph->AddNodeFront(desc); +} +} // namespace + +Status FoldingPass::Folding(NodePtr &node, vector &outputs) { + GE_CHECK_NOTNULL(node); + GELOGD("begin folding node:%s", node->GetName().c_str()); + // Before processing nodes, collect the relations between the out anchor and the peer out data nodes + // to prepare for const reconnection + auto indexes_to_anchors = GetIndexAndPeerInDataAnchors(node); + + auto ret = DealWithInNodes(node); + if (ret != SUCCESS) { + return ret; + } + if (AddConstNode(node, indexes_to_anchors, outputs) != SUCCESS) { + return INTERNAL_ERROR; + } + + auto in_data_nodes = node->GetInDataNodes(); + std::unordered_set in_data_nodes_set(in_data_nodes.begin(), in_data_nodes.end()); + if (IsolateAndDeleteNode(node, {}) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to isolate and delete node %s, type %s.", node->GetName().c_str(), + node->GetType().c_str()); + return INTERNAL_ERROR; + } + for (auto iter = in_data_nodes_set.begin(); iter != in_data_nodes_set.end(); ++iter) { + auto pre_node = *iter; + if (pre_node->GetOutDataNodesSize() == 0) { + if (IsolateAndDeleteNode(pre_node, {}) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to isolate and delete in data node %s, type %s.", pre_node->GetName().c_str(), + pre_node->GetType().c_str()); + return INTERNAL_ERROR; + } + } + } + + return SUCCESS; +} + +Status FoldingPass::DealWithInNodes(NodePtr &node) { + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(node->GetOpDesc()); + auto graph = node->GetOwnerComputeGraph(); + auto in_data_anchors = node->GetAllInDataAnchors(); + for (auto &in_data_anchor : in_data_anchors) { + if (in_data_anchor == nullptr) { + continue; + } + auto in_node_anchor = in_data_anchor->GetPeerOutAnchor(); + if (in_node_anchor == nullptr) { + continue; + } + auto in_node = in_node_anchor->GetOwnerNode(); + if (in_node == nullptr) { + continue; + } + if ((in_node->GetType() == SWITCH) || (in_node->GetType() == REFSWITCH)) { + GELOGI("The in_node name is %s, and node type is %s.", in_node->GetName().c_str(), in_node->GetType().c_str()); + auto ret = in_node_anchor->Unlink(in_data_anchor); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to unlink anchor between const node %s to constant-folding-node %s, type %s.", + in_node->GetName().c_str(), node->GetName().c_str(), node->GetType().c_str()); + return INTERNAL_ERROR; + } + GELOGI("Unlink anchor between in_node %s and node %s success.", in_node->GetName().c_str(), + node->GetName().c_str()); + auto identity_name = node->GetName() + "_ctrl_identity_" + std::to_string(in_data_anchor->GetIdx()); + auto identity = + AddIdentityNodeToGraph(identity_name, node->GetOpDesc()->GetInputDesc(in_data_anchor->GetIdx()), graph); + if (identity == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to add identity node to graph."); + return INTERNAL_ERROR; + } + ret = GraphUtils::AddEdge(in_node_anchor, identity->GetInDataAnchor(0)); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add edge, from node %s to node %s.", in_node->GetName().c_str(), + identity->GetName().c_str()); + return INTERNAL_ERROR; + } + GELOGI("Create new identity node success."); + ret = GraphUtils::AddEdge(identity->GetOutControlAnchor(), node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add edge, from node %s to node %s.", in_node->GetName().c_str(), + node->GetName().c_str()); + return INTERNAL_ERROR; + } + } + } + + return SUCCESS; +} + +Status FoldingPass::AddConstNode(NodePtr &node, IndexsToAnchors indexes_to_anchors, + std::vector &v_weight) { + if (node == nullptr) { + GELOGE(PARAM_INVALID, "node is null"); + return FAILED; + } + auto graph = node->GetOwnerComputeGraph(); + for (auto &index_to_anchors : indexes_to_anchors) { + auto index = static_cast(index_to_anchors.first); + if (index >= v_weight.size()) { + GELOGE(INTERNAL_ERROR, + "Failed to constant fold on node %s type %s, " + "the out nodes num %lu calculated is less than the node out anchor index %zu", + node->GetName().c_str(), node->GetType().c_str(), v_weight.size(), index); + return INTERNAL_ERROR; + } + GeTensorPtr weight = v_weight[index]; + if (weight == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to constant fold on node %s type %s, the %lust node calculated is null", + node->GetName().c_str(), node->GetType().c_str(), index); + return INTERNAL_ERROR; + } + + auto const_node = AddConstNodeToGraph(weight, graph); + if (const_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to add dynamic const node, node name:%s, index:%zu.", node->GetName().c_str(), + index); + return INTERNAL_ERROR; + } + GELOGI("add const_node:%s, replace node %s, type %s, index %zu.", const_node->GetName().c_str(), + node->GetName().c_str(), node->GetType().c_str(), index); + // add new const to re-pass node + for (auto &in_anchor : index_to_anchors.second) { + if (in_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, "In anchor is nullptr."); + return INTERNAL_ERROR; + } + auto ret = ConnectNodeToInAnchor(in_anchor, const_node, 0); + if (ret != SUCCESS) { + return ret; + } + NodeUtils::UpdateIsInputConst(*(in_anchor->GetOwnerNode())); + } + Status ret = GraphUtils::AddEdge(node->GetOutControlAnchor(), const_node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add control edge, from node %s to const node %s.", node->GetName().c_str(), + const_node->GetName().c_str()); + return INTERNAL_ERROR; + } + GE_CHECK_NOTNULL(node->GetOpDesc()); + std::string stream_label; + if (AttrUtils::GetStr(node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, stream_label)) { + GE_CHECK_NOTNULL(const_node->GetOpDesc()); + if (!AttrUtils::SetStr(const_node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, stream_label)) { + GELOGE(INTERNAL_ERROR, "Failed to set stream label on dynamic const node %s, with stream label:%s.", + const_node->GetName().c_str(), stream_label.c_str()); + return INTERNAL_ERROR; + } + } + GELOGD("Add control edge when insert dynamic const, from node %s to const node %s, with stream label:%s.", + node->GetName().c_str(), const_node->GetName().c_str(), stream_label.c_str()); + } + + return SUCCESS; +} + +Status FoldingPass::RemoveNodeKeepingCtrlEdges(NodePtr &node) { + GE_IF_BOOL_EXEC(node == nullptr, GELOGE(PARAM_INVALID, "node is null"); return PARAM_INVALID); + auto ret = GraphUtils::IsolateNode(node, {}); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to isolate the folding-node %s type %s", node->GetName().c_str(), + node->GetType().c_str()); + return INTERNAL_ERROR; + } + + auto graph = node->GetOwnerComputeGraph(); + ret = GraphUtils::RemoveNodeWithoutRelink(graph, node); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to remove node %s from graph", node->GetName().c_str()); + return INTERNAL_ERROR; + } + AddNodeDeleted(node.get()); + return SUCCESS; +} + +Status FoldingPass::ConnectNodeToInAnchor(InDataAnchorPtr &in_anchor, NodePtr &node, int node_index) { + // the origin edge must be removed before add + if (in_anchor == nullptr || node == nullptr) { + GELOGE(PARAM_INVALID, "in anchor or node is null"); + return PARAM_INVALID; + } + auto peer_out_anchor = in_anchor->GetPeerOutAnchor(); + if (peer_out_anchor != nullptr) { + if (ge::GraphUtils::RemoveEdge(peer_out_anchor, in_anchor) != GRAPH_SUCCESS) { + GELOGW("RemoveEdge failed."); + } + } + + auto new_out_anchor = node->GetOutDataAnchor(node_index); + if (new_out_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, + "Failed to add node to in anchor," + " the index %d for node %s, type %s is invalid", + node_index, node->GetName().c_str(), node->GetType().c_str()); + return INTERNAL_ERROR; + } + if (GraphUtils::AddEdge(new_out_anchor, in_anchor) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, + "Failed to add edge between anchors," + " new node %s, type %s", + node->GetName().c_str(), node->GetType().c_str()); + return INTERNAL_ERROR; + } + AddRePassNodesWithInOut(node); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/folding_pass.h b/src/ge/graph/passes/folding_pass.h new file mode 100644 index 00000000..41215dfe --- /dev/null +++ b/src/ge/graph/passes/folding_pass.h @@ -0,0 +1,46 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_FOLDING_PASS_H_ +#define GE_GRAPH_PASSES_FOLDING_PASS_H_ + +#include +#include +#include + +#include "graph/passes/base_pass.h" +#include "inc/kernel.h" + +namespace ge { +namespace folding_pass { +shared_ptr GetKernelByType(const NodePtr &node); +} + +using IndexsToAnchors = std::map>; + +class FoldingPass : public BaseNodePass { + protected: + Status Folding(NodePtr &node, vector &outputs); + + private: + Status AddConstNode(NodePtr &node, IndexsToAnchors indexes_to_anchors, std::vector &v_weight); + Status DealWithInNodes(NodePtr &node); + Status RemoveNodeKeepingCtrlEdges(NodePtr &node); + Status ConnectNodeToInAnchor(InDataAnchorPtr &in_anchor, NodePtr &node, int node_index); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_FOLDING_PASS_H_ diff --git a/src/ge/graph/passes/get_original_format_pass.cc b/src/ge/graph/passes/get_original_format_pass.cc new file mode 100644 index 00000000..e0f382e8 --- /dev/null +++ b/src/ge/graph/passes/get_original_format_pass.cc @@ -0,0 +1,182 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/get_original_format_pass.h" + +#include + +#include "common/debug/log.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/omg/omg_inner_types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/op_desc_utils.h" + +using domi::DOMI_TENSOR_NCHW; +using domi::DOMI_TENSOR_NHWC; +using domi::DOMI_TENSOR_RESERVED; +using domi::FAILED; +using domi::PARAM_INVALID; +using domi::SUCCESS; + +namespace ge { +Status GetOriginalFormatPass::Run(ge::ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + GE_RETURN_WITH_LOG_IF_ERROR(SetOriginalFormat(graph), "SetOriginalFormat failed"); + + return SUCCESS; +} + +Status GetOriginalFormatPass::SetOriginalFormat(const ge::ComputeGraphPtr &graph) { + GE_CHECK_NOTNULL(graph); + int64_t ori_format = 0; + int64_t tmp_format = 0; + + for (auto &node_ptr : graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node_ptr); + + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(node_ptr->GetOpDesc(), ATTR_NAME_INFERRED_FORMAT, DOMI_TENSOR_RESERVED), + GELOGE(FAILED, "set ATTR_NAME_INFERRED_FORMAT failed"); + return FAILED); + } + + for (auto &node_ptr : graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node_ptr); + OpDescPtr desc_ptr = node_ptr->GetOpDesc(); + GE_CHECK_NOTNULL(desc_ptr); + auto is_data = (desc_ptr->GetType() == DATA_TYPE || desc_ptr->GetType() == AIPP_DATA_TYPE); + if (is_data) { + GELOGI("Data node: %s,format :%d", node_ptr->GetName().c_str(), domi::GetContext().format); + ori_format = static_cast(domi::GetContext().format); + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(desc_ptr, ATTR_NAME_FORMAT, ori_format), + GELOGE(FAILED, "set ATTR_NAME_FORMAT failed"); + return FAILED); + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(desc_ptr, ATTR_NAME_INFERRED_FORMAT, ori_format), + GELOGE(FAILED, "set ATTR_NAME_INFERRED_FORMAT failed"); + return FAILED); + continue; + } + int32_t i = 0; + bool continue_flag = false; + bool ignore_pred_format = false; + for (auto &bias_node_ptr : node_ptr->GetInDataNodes()) { + GE_CHECK_NOTNULL(bias_node_ptr); + + OpDescPtr bias_op_ptr = bias_node_ptr->GetOpDesc(); + GE_CHECK_NOTNULL(bias_op_ptr); + + if (bias_op_ptr->GetType() == BIASADD) { + ignore_pred_format = true; + std::size_t tmp_size = ge::OpDescUtils::GetNonConstInputsSize(bias_node_ptr); + GE_IF_BOOL_EXEC(tmp_size > 2 || tmp_size == 0, + GELOGW("bias_node is node followed by %zu nodes, should be 1 or 2", tmp_size); + continue_flag = true; break); + OpDescPtr tmp_first_op_ptr = bias_node_ptr->GetInDataNodes().at(0)->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_first_op_ptr); + bias_op_ptr = tmp_first_op_ptr; + + // if biasadd have 2 input edges, format should be same + if (tmp_size == 2) { + int64_t first_input_format = 0; + int64_t second_input_format = 0; + OpDescPtr tmpSecondOpPtr = bias_node_ptr->GetInDataNodes().at(1)->GetOpDesc(); + GE_CHECK_NOTNULL(tmpSecondOpPtr); + GE_IF_BOOL_EXEC( + !AttrUtils::GetInt(tmp_first_op_ptr, ATTR_NAME_FORMAT, first_input_format), continue_flag = true; break); + GE_IF_BOOL_EXEC( + !AttrUtils::GetInt(tmpSecondOpPtr, ATTR_NAME_FORMAT, second_input_format), continue_flag = true; break); + + if (first_input_format != second_input_format) { + GELOGW("biasadd node is followed two nodes with different format, get original format failed"); + continue_flag = true; + break; + } + } + } + GE_IF_BOOL_EXEC(!AttrUtils::GetInt(bias_op_ptr, ATTR_NAME_FORMAT, tmp_format), continue_flag = true; break;); + if (i == 0) { + ori_format = tmp_format; + } + + GE_IF_BOOL_EXEC(tmp_format != ori_format, + GELOGW("node: %s , original format of src nodes must be same!", bias_node_ptr->GetName().c_str()); + continue_flag = true; break;); + + i++; + } + + GE_IF_BOOL_EXEC(continue_flag, continue); + OpDescPtr tmp_op_ptr = node_ptr->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_op_ptr); + + if (IsFormatTranspose(tmp_op_ptr, static_cast(ori_format))) { + ori_format = (ori_format == DOMI_TENSOR_NCHW) ? DOMI_TENSOR_NHWC : DOMI_TENSOR_NCHW; + } + + if (ignore_pred_format) { + GE_IF_BOOL_EXEC(!AttrUtils::SetBool(tmp_op_ptr, ATTR_NAME_IGNORE_PRED_FORMAT, true), + GELOGE(FAILED, "remove edge failed"); + return FAILED); + } + + // Do not reset ATTR_NAME_FORMAT if it is set in the OpParser. + if (!tmp_op_ptr->HasAttr(ATTR_NAME_FORMAT)) { + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(tmp_op_ptr, ATTR_NAME_FORMAT, ori_format), + GELOGE(FAILED, "set ATTR_NAME_FORMAT failed"); + return FAILED); + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(tmp_op_ptr, ATTR_NAME_INFERRED_FORMAT, ori_format), + GELOGE(FAILED, "set ATTR_NAME_INFERRED_FORMAT failed"); + return FAILED); + } else { + int64_t existingFormat = 0; + GE_RETURN_WITH_LOG_IF_FALSE(AttrUtils::GetInt(tmp_op_ptr, ATTR_NAME_FORMAT, existingFormat), + "Get existing_format attr failed"); + if (!AttrUtils::SetInt(tmp_op_ptr, ATTR_NAME_INFERRED_FORMAT, existingFormat)) { + GELOGE(FAILED, "set ATTR_NAME_INFERRED_FORMAT failed"); + return FAILED; + } + } + } + return SUCCESS; +} + +bool GetOriginalFormatPass::IsFormatTranspose(const ge::OpDescPtr op_ptr, int32_t ori_format) { + GE_CHK_BOOL_EXEC(op_ptr != nullptr, return false, "opdef is nullptr"); + if (op_ptr->GetType() == PERMUTE) { + vector index_list; + GE_IF_BOOL_EXEC(!AttrUtils::GetListInt(op_ptr, PERMUTE_ATTR_ORDER, index_list), return false); + + auto index_size = index_list.size(); + + GE_IF_BOOL_EXEC(static_cast(index_size) != PERMUTE_ORDER_NUM, return false); + + int32_t perm_nchw[4] = {0, 2, 3, 1}; // 4 format nums, {0,2,3,1} NCHW -> NHWC + int32_t perm_nhwc[4] = {0, 3, 1, 2}; // 4 format nums, {0,3,1,2} NHWC -> NCHW + bool is_nchw = true; + bool is_nhwc = true; + for (size_t i = 0; i < index_size; ++i) { + is_nchw = (perm_nchw[i] != index_list[i]) ? false : is_nchw; + is_nhwc = (perm_nhwc[i] != index_list[i]) ? false : is_nhwc; + } + bool ret = (is_nchw && ori_format == DOMI_TENSOR_NCHW && !is_nhwc) || + (is_nhwc && ori_format == DOMI_TENSOR_NHWC && !is_nchw); + + return ret; + } + return false; +} +} // namespace ge diff --git a/src/ge/graph/passes/get_original_format_pass.h b/src/ge/graph/passes/get_original_format_pass.h new file mode 100644 index 00000000..813fb2bf --- /dev/null +++ b/src/ge/graph/passes/get_original_format_pass.h @@ -0,0 +1,44 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_GET_ORIGINAL_FORMAT_PASS_H_ +#define GE_GRAPH_PASSES_GET_ORIGINAL_FORMAT_PASS_H_ + +#include "inc/graph_pass.h" + +namespace ge { +/// Set the original format of operator which is sensitive to format in order to change the real format. +/// The original format parameters only used in online model generate phase. +class GetOriginalFormatPass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph); + + private: + /// + /// Whether format transpose or not + /// @author + /// + bool IsFormatTranspose(const ge::OpDescPtr op_ptr, int32_t ori_format); + + /// + /// Set the original format of operator + /// @author + /// + Status SetOriginalFormat(const ge::ComputeGraphPtr &graph); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_GET_ORIGINAL_FORMAT_PASS_H_ diff --git a/src/ge/graph/passes/guarantee_const_pass.cc b/src/ge/graph/passes/guarantee_const_pass.cc new file mode 100644 index 00000000..761e6e16 --- /dev/null +++ b/src/ge/graph/passes/guarantee_const_pass.cc @@ -0,0 +1,58 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/guarantee_const_pass.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/common/omg_util.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" + +namespace ge { +namespace { +const uint32_t kGuaranteeConstInputsSize = 1; +} +Status GuaranteeConstPass::Run(NodePtr &node) { + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(node->GetOpDesc()); + string type; + Status status_ret = GetOriginalType(node, type); + if (status_ret != SUCCESS) { + GELOGE(status_ret, "GuaranteeConstPass get original type fail."); + return status_ret; + } + if (type != GUARANTEECONST) { + return SUCCESS; + } + if (node->GetOpDesc()->GetAllInputsDesc().size() != kGuaranteeConstInputsSize) { + GELOGE(PARAM_INVALID, "input size error. Input size:%zu", node->GetOpDesc()->GetAllInputsDesc().size()); + return PARAM_INVALID; + } + auto inDesc = node->GetOpDesc()->GetInputDesc(0); + // Input tensor cannot be a resource variable handle. + const DataType input_dtype = inDesc.GetDataType(); + if (input_dtype == DT_RESOURCE) { + GELOGE(FAILED, "Input tensor cannot be a resource variable handle in [%s].", node->GetName().c_str()); + return FAILED; + } + + return IsolateAndDeleteNode(node, {0}); +} +} // namespace ge diff --git a/src/ge/graph/passes/guarantee_const_pass.h b/src/ge/graph/passes/guarantee_const_pass.h new file mode 100644 index 00000000..7f289a10 --- /dev/null +++ b/src/ge/graph/passes/guarantee_const_pass.h @@ -0,0 +1,29 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_GUARANTEE_CONST_PASS_H_ +#define GE_GRAPH_PASSES_GUARANTEE_CONST_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class GuaranteeConstPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_GUARANTEE_CONST_PASS_H_ diff --git a/src/ge/graph/passes/hccl_memcpy_pass.cc b/src/ge/graph/passes/hccl_memcpy_pass.cc new file mode 100644 index 00000000..13846c4e --- /dev/null +++ b/src/ge/graph/passes/hccl_memcpy_pass.cc @@ -0,0 +1,157 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/hccl_memcpy_pass.h" + +#include + +#include "common/debug/log.h" +#include "common/ge/ge_util.h" +#include "common/ge_inner_error_codes.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/types.h" +#include "graph/utils/graph_utils.h" + +namespace { +const size_t kAnchorSize = 1; +const int kAnchorNum = 0; +} // namespace +namespace ge { +Status HcclMemcpyPass::Run(ge::ComputeGraphPtr graph) { + GE_IF_BOOL_EXEC(graph == nullptr, GELOGE(PARAM_INVALID, "param [graph] must not be null."); return PARAM_INVALID); + for (const auto &node : graph->GetDirectNode()) { + auto op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, continue); + if (!NeedInsertMemcpyOp(op_desc)) { + continue; + } + + GELOGI("hcom op is:%s.", op_desc->GetName().c_str()); + for (auto &hccl_in_anchor : node->GetAllInDataAnchors()) { + if (hccl_in_anchor == nullptr) { + continue; + } + auto src_out_anchor = hccl_in_anchor->GetPeerOutAnchor(); + GE_CHECK_NOTNULL(src_out_anchor); + + size_t src_out_anchor_size = src_out_anchor->GetPeerInDataAnchors().size(); + if (src_out_anchor_size <= kAnchorSize) { + GELOGI("Data op only link to hcom op, no need to add memcpy async node."); + continue; + } + + GELOGI("The op %s need insert memcpy async op.", src_out_anchor->GetOwnerNode()->GetName().c_str()); + NodePtr memcpy_node = CreateMemcpyNode(graph, src_out_anchor); + GE_CHECK_NOTNULL(memcpy_node); + + Status ret1 = src_out_anchor->Unlink(hccl_in_anchor); + if (ret1 != SUCCESS) { + GELOGE(INTERNAL_ERROR, "The op %s Unlink anchor %s fail.", src_out_anchor->GetOwnerNode()->GetName().c_str(), + node->GetName().c_str()); + return FAILED; + } + auto out_data_anchor_0 = memcpy_node->GetOutDataAnchor(kAnchorNum); + GE_CHECK_NOTNULL(out_data_anchor_0); + ret1 = out_data_anchor_0->LinkTo(hccl_in_anchor); + if (ret1 != SUCCESS) { + GELOGE(INTERNAL_ERROR, "The op %s link anchor %s fail.", memcpy_node->GetName().c_str(), + node->GetName().c_str()); + return FAILED; + } + + Status ret = src_out_anchor->LinkTo(memcpy_node->GetInDataAnchor(kAnchorNum)); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "The op %s link anchor %s fail.", src_out_anchor->GetOwnerNode()->GetName().c_str(), + memcpy_node->GetName().c_str()); + return FAILED; + } + } + } + return SUCCESS; +} + +/// +/// @brief Add MemcpyAsync Node +/// @param [in] ge::ComputeGraphPtr graph +/// @param [in] ge::OutDataAnchorPtr in_node +/// @return ge::NodePtr +/// +NodePtr HcclMemcpyPass::CreateMemcpyNode(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_data_anchor) { + GE_IF_BOOL_EXEC(graph == nullptr, return nullptr); + NodePtr pre_node = out_data_anchor->GetOwnerNode(); + OpDescPtr pre_op_desc = pre_node->GetOpDesc(); + if (pre_op_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "OpDesc of pre node is invalid."); + return nullptr; + } + + std::string node_name = pre_node->GetName() + "_" + MEMCPYASYNC; + node_name = CheckDuplicateName(node_name); + OpDescPtr op_desc = MakeShared(node_name.c_str(), MEMCPYASYNC); + if (op_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "Create MemcpyAsync op: MakeShared op_desc fail."); + return nullptr; + } + GELOGI("Create MemcpyAsync op:%s.", op_desc->GetName().c_str()); + + graphStatus ret = op_desc->AddInputDesc(pre_op_desc->GetOutputDesc(out_data_anchor->GetIdx())); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Create MemcpyAsync op: add input desc fail."); + return nullptr; + } + + ret = op_desc->AddOutputDesc(pre_op_desc->GetOutputDesc(out_data_anchor->GetIdx())); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Create MemcpyAsync op: add output desc fail."); + return nullptr; + } + + NodePtr memcpy_node = graph->AddNode(op_desc); + if (memcpy_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Insert MemcpyAsync node fail."); + return nullptr; + } + + return memcpy_node; +} + +/// +/// @brief Check duplicate node_name +/// @param [in] std::string& node_name +/// @return std::string +/// +std::string HcclMemcpyPass::CheckDuplicateName(const std::string &node_name) { + std::string tmp_name = node_name; + auto iter = node_num_map_.find(tmp_name); + if (iter != node_num_map_.end()) { + tmp_name = tmp_name + "_" + std::to_string(iter->second); + (iter->second)++; + } else { + node_num_map_[tmp_name] = 1; + } + return tmp_name; +} + +/// +/// @brief Check hcom op +/// @param [in] ge::ConstOpDescPtr op_desc +/// @return bool +/// +bool HcclMemcpyPass::NeedInsertMemcpyOp(const ge::ConstOpDescPtr &op_desc) const { + return (op_desc->GetType() == HCOMALLGATHER || op_desc->GetType() == HCOMALLREDUCE || + op_desc->GetType() == HCOMREDUCESCATTER); +} +} // namespace ge diff --git a/src/ge/graph/passes/hccl_memcpy_pass.h b/src/ge/graph/passes/hccl_memcpy_pass.h new file mode 100644 index 00000000..f4762980 --- /dev/null +++ b/src/ge/graph/passes/hccl_memcpy_pass.h @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_HCCL_MEMCPY_PASS_H_ +#define GE_GRAPH_PASSES_HCCL_MEMCPY_PASS_H_ + +#include +#include + +#include "graph/graph.h" +#include "inc/graph_pass.h" + +namespace ge { +class HcclMemcpyPass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph); + + private: + NodePtr CreateMemcpyNode(const ComputeGraphPtr &graph, const OutDataAnchorPtr &out_data_anchor); + + std::string CheckDuplicateName(const std::string &node_name); + + bool NeedInsertMemcpyOp(const ge::ConstOpDescPtr &op_desc) const; + + std::unordered_map node_num_map_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_HCCL_MEMCPY_PASS_H_ diff --git a/src/ge/graph/passes/identify_reference_pass.cc b/src/ge/graph/passes/identify_reference_pass.cc new file mode 100644 index 00000000..b4131287 --- /dev/null +++ b/src/ge/graph/passes/identify_reference_pass.cc @@ -0,0 +1,52 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/identify_reference_pass.h" + +#include +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_attr_define.h" + +namespace ge { +Status IdentifyReferencePass::Run(NodePtr &node) { + if (node == nullptr) { + GELOGE(PARAM_INVALID, "param [node] must not be null."); + return PARAM_INVALID; + } + auto op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(PARAM_INVALID, "OpDesc of param [node] must not be null."); + return PARAM_INVALID; + } + + auto input_names = op_desc->GetAllInputNames(); + auto outputs = op_desc->GetAllOutputName(); + for (auto &output : outputs) { + for (auto &input_name : input_names) { + if (input_name == output.first) { + bool is_ref = true; + if (AttrUtils::SetBool(op_desc, ATTR_NAME_REFERENCE, is_ref)) { + GELOGI("param [node] %s is reference node, set attribute %s to be true.", + node->GetName().c_str(), ATTR_NAME_REFERENCE.c_str()); + return SUCCESS; + } + } + } + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/identify_reference_pass.h b/src/ge/graph/passes/identify_reference_pass.h new file mode 100644 index 00000000..5f284b4c --- /dev/null +++ b/src/ge/graph/passes/identify_reference_pass.h @@ -0,0 +1,29 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_IDENTIFY_REFERENCE_PASS_H_ +#define GE_GRAPH_PASSES_IDENTIFY_REFERENCE_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class IdentifyReferencePass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_IDENTIFY_REFERENCE_PASS_H_ diff --git a/src/ge/graph/passes/identity_pass.cc b/src/ge/graph/passes/identity_pass.cc new file mode 100644 index 00000000..9b15f77a --- /dev/null +++ b/src/ge/graph/passes/identity_pass.cc @@ -0,0 +1,108 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/identity_pass.h" + +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/omg_util.h" + +namespace ge { +namespace { +/// +/// A `Identity` node may after a `Switch` node and has control-dependency-out nodes. +/// Or a `Identity` node may before a `Merge` node and has control-dependency-in nodes. +/// The identity nodes are used to represent control dependencies in condition branch, and can not be deleted. +/// +Status CheckIdentityUsable(const NodePtr &node, bool &usable) { + std::string node_type; + for (auto &in_node : node->GetInDataNodes()) { + auto ret = GetOriginalType(in_node, node_type); + if (ret != SUCCESS) { + GELOGE(ret, "Failed to get node type from node %s", node->GetName().c_str()); + return ret; + } + if ((node_type != SWITCH) && (node_type != REFSWITCH)) { + GELOGD("skip identity %s connected to switch", node->GetName().c_str()); + break; + } + GE_CHECK_NOTNULL(node->GetOutControlAnchor()); + if (!node->GetOutControlAnchor()->GetPeerInControlAnchors().empty()) { + usable = true; + return SUCCESS; + } + } + for (auto &out_node : node->GetOutDataNodes()) { + auto ret = GetOriginalType(out_node, node_type); + if (ret != SUCCESS) { + GELOGE(ret, "Failed to get node type from node %s", node->GetName().c_str()); + return ret; + } + if ((node_type != MERGE) && (node_type != REFMERGE)) { + GELOGD("skip identity %s connected to merge", node->GetName().c_str()); + break; + } + GE_CHECK_NOTNULL(node->GetInControlAnchor()); + if (!node->GetInControlAnchor()->GetPeerOutControlAnchors().empty()) { + usable = true; + return SUCCESS; + } + } + usable = false; + return SUCCESS; +} +} // namespace + +Status IdentityPass::Run(NodePtr &node) { + GE_CHECK_NOTNULL(node); + auto op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + string type; + Status status_ret = GetOriginalType(node, type); + if (status_ret != SUCCESS) { + GELOGE(status_ret, "Identity pass get original type fail."); + return status_ret; + } + if ((type != IDENTITY) && (type != IDENTITYN)) { + return SUCCESS; + } + + if (!force_) { + bool usable = false; + auto ret = CheckIdentityUsable(node, usable); + if (ret != SUCCESS) { + return ret; + } + if (usable) { + return SUCCESS; + } + } + size_t n = node->GetOpDesc()->GetOutputsSize(); + if (node->GetOpDesc()->GetInputsSize() != n) { + GELOGE(PARAM_INVALID, "Identity input / output size must be equal. in size:%lu, out size:%lu", + node->GetOpDesc()->GetInputsSize(), n); + return PARAM_INVALID; + } + std::vector io_map; + for (size_t i = 0; i < n; i++) { + io_map.push_back(i); + } + return IsolateAndDeleteNode(node, io_map); +} +} // namespace ge diff --git a/src/ge/graph/passes/identity_pass.h b/src/ge/graph/passes/identity_pass.h new file mode 100644 index 00000000..a4a80efc --- /dev/null +++ b/src/ge/graph/passes/identity_pass.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_IDENTITY_PASS_H_ +#define GE_GRAPH_PASSES_IDENTITY_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class IdentityPass : public BaseNodePass { + public: + explicit IdentityPass(bool force) : force_(force) {} + ~IdentityPass() override = default; + Status Run(NodePtr &node) override; + private: + bool force_ = false; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_IDENTITY_PASS_H_ diff --git a/src/ge/graph/passes/infershape_pass.cc b/src/ge/graph/passes/infershape_pass.cc new file mode 100644 index 00000000..ae9ec039 --- /dev/null +++ b/src/ge/graph/passes/infershape_pass.cc @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/infershape_pass.h" + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/shape_refiner.h" + +namespace ge { +Status InferShapePass::Run(NodePtr &node) { + if (ShapeRefiner::InferShapeAndType(node) != GRAPH_SUCCESS) { + GELOGE(GE_GRAPH_INFERSHAPE_FAILED, "infershape failed. node: %s", node->GetName().c_str()); + return GE_GRAPH_INFERSHAPE_FAILED; + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/infershape_pass.h b/src/ge/graph/passes/infershape_pass.h new file mode 100644 index 00000000..9e4df9a6 --- /dev/null +++ b/src/ge/graph/passes/infershape_pass.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_INFERSHAPE_PASS_H_ +#define GE_GRAPH_PASSES_INFERSHAPE_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class InferShapePass : public BaseNodePass { + public: + /// + /// Entry of the InferShapePass optimizer + /// @param [in] graph: Input ComputeGraph + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status Run(ge::NodePtr &node) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_INFERSHAPE_PASS_H_ diff --git a/src/ge/graph/passes/isolated_op_remove_pass.cc b/src/ge/graph/passes/isolated_op_remove_pass.cc new file mode 100644 index 00000000..746b5f6b --- /dev/null +++ b/src/ge/graph/passes/isolated_op_remove_pass.cc @@ -0,0 +1,38 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/isolated_op_remove_pass.h" + +#include "common/debug/log.h" +#include "common/types.h" +#include "common/util.h" + + +namespace ge { +Status IsolatedOpRemovePass::Run(ge::ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + for (NodePtr &node_ptr : graph->GetDirectNode()) { + GE_IF_BOOL_EXEC(node_ptr->GetOpDesc() == nullptr, continue); + if (node_ptr->GetInDataNodes().size() == 0 && node_ptr->GetOutAllNodes().size() == 0 && + !(node_ptr->GetOpDesc()->HasAttr(TO_BE_OUTPUT))) { + GE_RETURN_WITH_LOG_IF_ERROR(graph->RemoveNode(node_ptr), "remove graph node [%s] fail", + node_ptr->GetOpDesc()->GetName().c_str()); + } + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/isolated_op_remove_pass.h b/src/ge/graph/passes/isolated_op_remove_pass.h new file mode 100644 index 00000000..f17df21a --- /dev/null +++ b/src/ge/graph/passes/isolated_op_remove_pass.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_ISOLATED_OP_REMOVE_PASS_H_ +#define GE_GRAPH_PASSES_ISOLATED_OP_REMOVE_PASS_H_ + +#include "inc/graph_pass.h" + +namespace ge { +class IsolatedOpRemovePass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph); +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_ISOLATED_OP_REMOVE_PASS_H_ diff --git a/src/ge/graph/passes/iterator_op_pass.cc b/src/ge/graph/passes/iterator_op_pass.cc new file mode 100644 index 00000000..95ffcd90 --- /dev/null +++ b/src/ge/graph/passes/iterator_op_pass.cc @@ -0,0 +1,156 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/iterator_op_pass.h" + +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/anchor.h" +#include "graph/common/omg_util.h" +#include "graph/graph.h" +#include "graph/node.h" +#include "graph/passes/pass_utils.h" +#include "graph/utils/graph_utils.h" + + +namespace ge { +const char *const kGetNext = "GetNext"; + +Status IteratorOpPass::Run(ge::ComputeGraphPtr graph) { + GELOGD("GetNextOpPass begin"); + GE_CHECK_NOTNULL(graph); + if (!PassUtils::IsNeedTrainIteFlowCtrl(graph)) { + return SUCCESS; + } + std::string type; + for (ge::NodePtr &node : graph->GetDirectNode()) { + GE_CHK_STATUS_RET(GetOriginalType(node, type)); + auto op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + const string op_type = op_desc->GetType(); + if (type == "IteratorV2" || type == "Iterator" || op_type == kGetNext) { + ge::NodePtr memcpy_async_node = InsertMemcpyAsyncNode(node, graph); + GE_CHECK_NOTNULL(memcpy_async_node); + GE_CHK_STATUS_RET(SetCycleEvent(memcpy_async_node), "Set cycle event fail, node:%s", + memcpy_async_node->GetName().c_str()); + + GE_CHK_STATUS_RET(SetStreamLabel(memcpy_async_node, memcpy_async_node->GetName()), + "Set stream label fail, node:%s", node->GetName().c_str()); + + GE_CHK_STATUS_RET(SetStreamLabel(node, node->GetName()), "Set stream label fail, node:%s", + node->GetName().c_str()); + + GELOGI("Set independent loop for iterator node success"); + } + } + GELOGD("GetNextOpPass end"); + return SUCCESS; +} + +/// +/// @brief insert memcpy after GetNext +/// +/// @param pre_node +/// @param graph +/// @return ge::NodePtr +/// +ge::NodePtr IteratorOpPass::InsertMemcpyAsyncNode(const ge::NodePtr &pre_node, const ge::ComputeGraphPtr &graph) { + GE_CHK_BOOL_EXEC(pre_node != nullptr, GELOGW("Pre node is null."); return nullptr); + GE_CHK_BOOL_EXEC(graph != nullptr, GELOGW("graph is null."); return nullptr); + ge::OpDescPtr memcpy_async_op_desc = CreateMemcpyAsyncOp(pre_node); + GE_CHK_BOOL_EXEC(memcpy_async_op_desc != nullptr, GELOGW("Create memcpyAsync op fail."); return nullptr); + ge::NodePtr memcpy_async_node = graph->AddNode(memcpy_async_op_desc); + GE_CHK_BOOL_EXEC(memcpy_async_node != nullptr, return nullptr, "Insert mencpy node fail."); + + // Data out + for (auto &out_anchor : pre_node->GetAllOutDataAnchors()) { + if (out_anchor == nullptr) { + continue; + } + ge::graphStatus status; + GELOGI("Graph add memcpyAsync op in edge, index:%d.", out_anchor->GetIdx()); + for (auto &peer_in_anchor : out_anchor->GetPeerInDataAnchors()) { + GE_IF_BOOL_EXEC(peer_in_anchor == nullptr, GELOGW("peer_in_anchor is nullptr"); return nullptr); + status = GraphUtils::RemoveEdge(out_anchor, peer_in_anchor); + GE_CHK_BOOL_EXEC(status == GRAPH_SUCCESS, return nullptr, "Remove edge failed, index:%d.", out_anchor->GetIdx()); + status = GraphUtils::AddEdge(memcpy_async_node->GetOutDataAnchor(out_anchor->GetIdx()), peer_in_anchor); + GE_CHK_BOOL_EXEC(status == GRAPH_SUCCESS, return nullptr, + "Graph add memcpyAsync op out edge fail, src index:%d, dst index:%d, dst node: %s.", + out_anchor->GetIdx(), peer_in_anchor->GetIdx(), + peer_in_anchor->GetOwnerNode()->GetName().c_str()); + GELOGI("Graph add memcpyAsync op out edge, src index:%d, dst index:%d, dst node: %s.", out_anchor->GetIdx(), + peer_in_anchor->GetIdx(), peer_in_anchor->GetOwnerNode()->GetName().c_str()); + } + status = GraphUtils::AddEdge(out_anchor, memcpy_async_node->GetInDataAnchor(out_anchor->GetIdx())); + GE_CHK_BOOL_EXEC(status == GRAPH_SUCCESS, return nullptr, "Graph add memcpyAsync op in edge fail, index:%d.", + out_anchor->GetIdx()); + } + // Control out + OutControlAnchorPtr out_ctrl_anchor = pre_node->GetOutControlAnchor(); + GE_IF_BOOL_EXEC(out_ctrl_anchor != nullptr, for (auto &peer_in_ctrl_anchor + : out_ctrl_anchor->GetPeerInControlAnchors()) { + ge::graphStatus status = GraphUtils::RemoveEdge(out_ctrl_anchor, peer_in_ctrl_anchor); + GE_CHK_BOOL_EXEC(status == GRAPH_SUCCESS, return nullptr, "Remove edge failed, dst node: %s.", + peer_in_ctrl_anchor->GetOwnerNode()->GetName().c_str()); + status = GraphUtils::AddEdge(memcpy_async_node->GetOutControlAnchor(), peer_in_ctrl_anchor); + GE_CHK_BOOL_EXEC(status == GRAPH_SUCCESS, return nullptr, + "Graph add memcpyAsync op out ctrl edge fail, dst node: %s.", + peer_in_ctrl_anchor->GetOwnerNode()->GetName().c_str()); + GELOGI("Graph add memcpyAsync op out ctrl edge, dst node: %s.", + peer_in_ctrl_anchor->GetOwnerNode()->GetName().c_str()); + }); + GELOGI("Insert memcpyAsync op success."); + + return memcpy_async_node; +} + +/// +/// @brief create memcpy +/// +/// @param pre_node +/// @return ge::OpDescPtr +/// +ge::OpDescPtr IteratorOpPass::CreateMemcpyAsyncOp(const ge::NodePtr &pre_node) { + GE_CHK_BOOL_EXEC(pre_node != nullptr, return nullptr, "Input param invalid."); + + string node_name = pre_node->GetName() + "_MemcpyAsync"; + ge::OpDescPtr op_desc = MakeShared(node_name.c_str(), MEMCPYASYNC); + if (op_desc == nullptr) { + GELOGE(FAILED, "MakeShared fail."); + return op_desc; + } + GELOGI("Create memcpyAsync op:%s.", op_desc->GetName().c_str()); + + ge::OpDescPtr pre_node_op_desc = pre_node->GetOpDesc(); + GE_CHK_BOOL_EXEC(pre_node_op_desc != nullptr, return nullptr, "OpDesc of pre_node is invalid."); + + auto out_size = static_cast(pre_node_op_desc->GetOutputsSize()); + GELOGI("Create memcpyAsync op, pre_node out_size: %u.", out_size); + for (uint32_t i = 0; i < out_size; i++) { + GE_CHK_BOOL_EXEC(op_desc->AddInputDesc(pre_node_op_desc->GetOutputDesc(i)) == GRAPH_SUCCESS, return nullptr, + "Create memcpyAsync op:add input desc fail."); + GE_CHK_BOOL_EXEC(op_desc->AddOutputDesc(pre_node_op_desc->GetOutputDesc(i)) == GRAPH_SUCCESS, return nullptr, + "Create memcpyAsync op:add output desc fail."); + } + + return op_desc; +} +} // namespace ge diff --git a/src/ge/graph/passes/iterator_op_pass.h b/src/ge/graph/passes/iterator_op_pass.h new file mode 100644 index 00000000..be76cc87 --- /dev/null +++ b/src/ge/graph/passes/iterator_op_pass.h @@ -0,0 +1,50 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_ITERATOR_OP_PASS_H_ +#define GE_GRAPH_PASSES_ITERATOR_OP_PASS_H_ + +#include "graph/graph.h" +#include "inc/graph_pass.h" + +namespace ge { +class IteratorOpPass : public GraphPass { + public: + IteratorOpPass() {} + + virtual ~IteratorOpPass() {} + + Status Run(ge::ComputeGraphPtr graph); + + private: + /// + /// @brief inset memcpy node + /// + /// @param preNode + /// @param graph + /// @return ge::NodePtr + /// + ge::NodePtr InsertMemcpyAsyncNode(const ge::NodePtr &pre_node, const ge::ComputeGraphPtr &graph); + /// + /// @brief Create a Memcpy Async Op object + /// + /// @param preNode + /// @return ge::OpDescPtr + /// + ge::OpDescPtr CreateMemcpyAsyncOp(const ge::NodePtr &pre_node); +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_ITERATOR_OP_PASS_H_ diff --git a/src/ge/graph/passes/link_gen_mask_nodes_pass.cc b/src/ge/graph/passes/link_gen_mask_nodes_pass.cc new file mode 100644 index 00000000..d01fe231 --- /dev/null +++ b/src/ge/graph/passes/link_gen_mask_nodes_pass.cc @@ -0,0 +1,143 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/link_gen_mask_nodes_pass.h" + +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/types.h" +#include "init/gelib.h" + +using std::set; +using std::vector; + +namespace ge { +namespace { +const size_t kGenMaskInputIndex = 1; +const size_t kDefaultMaxParallelNum = 1; +} // namespace + +LinkGenMaskNodesPass::LinkGenMaskNodesPass(const map &stream_max_parallel_num) + : GraphPass(), stream_max_parallel_num_(stream_max_parallel_num) {} + +// GenMask is the second input of DoMask and one GenMask's output may be used by multiple DoMask. +// We will control the order of GenMask according to the order of the first DoMask. +Status LinkGenMaskNodesPass::Run(ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + + vector gen_mask_nodes; + GetAllGenMaskNodes(graph, gen_mask_nodes); + + size_t gen_mask_group_size = gen_mask_nodes.size(); + Status status = GetGenMaskGroupSize(gen_mask_nodes, gen_mask_group_size); + if (status != SUCCESS) { + GELOGE(FAILED, "Get GenMask group size failed."); + return FAILED; + } + + if (gen_mask_group_size < 1) { + gen_mask_group_size = 1; + } + + for (size_t index = 1; index < gen_mask_nodes.size(); ++index) { + if (index % gen_mask_group_size == 0) { + GELOGI("skiped index: %zu.", index); + continue; + } + + NodePtr &src_node = gen_mask_nodes[index - 1]; + auto src_anchor = src_node->GetOutControlAnchor(); + GE_CHECK_NOTNULL(src_anchor); + + NodePtr &dest_node = gen_mask_nodes[index]; + auto dest_anchor = dest_node->GetInControlAnchor(); + GE_CHECK_NOTNULL(dest_anchor); + + graphStatus ret = src_anchor->LinkTo(dest_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "Link from %s to %s failed.", src_node->GetName().c_str(), dest_node->GetName().c_str()); + return FAILED; + } + GELOGD("Link from %s to %s.", src_node->GetName().c_str(), dest_node->GetName().c_str()); + } + + return SUCCESS; +} + +// [pointer can not be null] +bool LinkGenMaskNodesPass::AreAllInputsConst(const NodePtr &node) const { + for (const NodePtr &in_node : node->GetInDataNodes()) { + string op_type = in_node->GetType(); + if ((op_type != CONSTANT) && (op_type != CONSTANTOP)) { + return false; + } + } + return true; +} + +void LinkGenMaskNodesPass::GetAllGenMaskNodes(const ComputeGraphPtr &graph, vector &gen_mask_nodes) const { + set nodes_set; + for (const NodePtr &node : graph->GetDirectNode()) { + if (node->GetType() != DROPOUTDOMASK) { + continue; + } + + auto in_data_nodes = node->GetInDataNodes(); + if (in_data_nodes.size() > kGenMaskInputIndex) { + NodePtr &gen_mask = in_data_nodes.at(kGenMaskInputIndex); + if (AreAllInputsConst(gen_mask) && nodes_set.count(gen_mask) == 0) { + gen_mask_nodes.emplace_back(gen_mask); + nodes_set.emplace(gen_mask); + } + } + } +} + +Status LinkGenMaskNodesPass::GetGenMaskGroupSize(vector &gen_mask_nodes, size_t &gen_mask_group_size) const { + if (gen_mask_nodes.empty()) { + return SUCCESS; + } + + NodePtr gen_mask_node = gen_mask_nodes.front(); + GE_CHECK_NOTNULL(gen_mask_node); + OpDescPtr gen_mask_op = gen_mask_node->GetOpDesc(); + GE_CHECK_NOTNULL(gen_mask_op); + + auto ge_lib = GELib::GetInstance(); + if ((ge_lib != nullptr) && ge_lib->InitFlag()) { + (void)ge_lib->DNNEngineManagerObj().GetDNNEngineName(gen_mask_op); + } + + size_t gen_mask_group_num = kDefaultMaxParallelNum; + string engine_name = gen_mask_op->GetOpEngineName(); + auto iter = stream_max_parallel_num_.find(engine_name); + if (iter != stream_max_parallel_num_.end()) { + gen_mask_group_num = static_cast(iter->second); + } + GELOGI("gen_mask_group_num: %zu.", gen_mask_group_num); + + if (gen_mask_group_num > 0) { + gen_mask_group_size = (gen_mask_nodes.size() + 1) / gen_mask_group_num; + } + GELOGI("gen_mask_group_size: %zu.", gen_mask_group_size); + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/link_gen_mask_nodes_pass.h b/src/ge/graph/passes/link_gen_mask_nodes_pass.h new file mode 100644 index 00000000..57343137 --- /dev/null +++ b/src/ge/graph/passes/link_gen_mask_nodes_pass.h @@ -0,0 +1,47 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_LINK_GEN_MASK_NODES_PASS_H_ +#define GE_GRAPH_PASSES_LINK_GEN_MASK_NODES_PASS_H_ + +#include +#include +#include + +#include "graph/graph.h" +#include "inc/graph_pass.h" + +namespace ge { +// Link all GenMask nodes using control edges. +class LinkGenMaskNodesPass : public GraphPass { + public: + explicit LinkGenMaskNodesPass(const std::map &stream_max_parallel_num); + ~LinkGenMaskNodesPass() override = default; + LinkGenMaskNodesPass(const LinkGenMaskNodesPass &) = delete; + LinkGenMaskNodesPass &operator=(const LinkGenMaskNodesPass &) = delete; + + Status Run(ComputeGraphPtr graph) override; + + private: + bool AreAllInputsConst(const NodePtr &node) const; + void GetAllGenMaskNodes(const ComputeGraphPtr &graph, std::vector &gen_mask_nodes) const; + Status GetGenMaskGroupSize(std::vector &gen_mask_nodes, size_t &gen_mask_group_size) const; + + const std::map stream_max_parallel_num_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_LINK_GEN_MASK_NODES_PASS_H_ diff --git a/src/ge/graph/passes/merge_pass.cc b/src/ge/graph/passes/merge_pass.cc new file mode 100644 index 00000000..768e5369 --- /dev/null +++ b/src/ge/graph/passes/merge_pass.cc @@ -0,0 +1,175 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/merge_pass.h" + +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/ge/ge_util.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/graph_utils.h" +#include "graph/passes/pass_utils.h" + +using domi::PARAM_INVALID; +using domi::SUCCESS; + +namespace ge { +const int kValueIndexOutputIndex = 1; + +Status MergePass::Run(NodePtr &node) { + GELOGD("MergePass running"); + if (node == nullptr) { + GELOGE(PARAM_INVALID, "param [node] must not be null."); + return PARAM_INVALID; + } + + std::string op_type; + GE_CHK_STATUS_RET(GetOriginalType(node, op_type), "get original type failed"); + if (op_type != MERGE) { + return SUCCESS; + } + + auto out_data_anchors = node->GetAllOutDataAnchors(); + if (out_data_anchors.empty()) { + GELOGE(PARAM_INVALID, "[%s] Merge node output anchor is empty", node->GetName().c_str()); + return PARAM_INVALID; + } + + auto in_data_nodes = node->GetInDataNodes(); + switch (in_data_nodes.size()) { + case 0: { + /// Case A: input_count = 0, the output of merge node is inactive as well + /// In which case the output branch can be removed + /// until another merge node is met + std::vector del_nodes; + std::vector end_nodes; + Status ret = PassUtils::RemoveBranch(node, del_nodes, end_nodes); + for (auto &end_node : end_nodes) { + AddRePassNode(end_node); + } + for (const auto &delete_node : del_nodes) { + AddNodeDeleted(delete_node.get()); + } + return ret; + } + case 1: { // Case B: input_count = 1, the merge node can be optimized out + std::vector merge_io_map = {PassUtils::GetUniqueInDataAnchorIndex(node), -1}; + if (merge_io_map[0] != -1 && IsNeedChangeIndexToConstant(node)) { + int index = merge_io_map[0]; + if (ChangeIndexToConstant(node, index) != SUCCESS) { + GELOGE(FAILED, "[%s] Change value index to be Constant failed.", node->GetName().c_str()); + return FAILED; + } + } + return IsolateAndDeleteNode(node, merge_io_map); + } + default: { + // Case C: input_count > 1, the merge node can not be optimized + return SUCCESS; + } + } +} + +bool MergePass::IsNeedChangeIndexToConstant(NodePtr &node) const { + /// value_index is the index 1 output of the Merge + /// value_index link to other node, change it to be Constant + GE_IF_BOOL_EXEC(node == nullptr, GELOGW("Node is nullptr"); return false); + auto out_anchor = node->GetOutDataAnchor(kValueIndexOutputIndex); + GE_IF_BOOL_EXEC(out_anchor == nullptr, GELOGW("Out_anchor is nullptr"); return false); + for (const auto &peer_in_anchor : out_anchor->GetPeerInDataAnchors()) { + if (peer_in_anchor != nullptr && peer_in_anchor->GetOwnerNode() != nullptr) { + GELOGI( + "[%s] MergePass, value_index link to other node, " + "change it to be Constant.", + node->GetName().c_str()); + return true; + } + } + + return false; +} + +Status MergePass::ChangeIndexToConstant(NodePtr &node, int &value_index) { + GE_CHECK_NOTNULL(node); + ComputeGraphPtr graph = node->GetOwnerComputeGraph(); + if (graph == nullptr) { + GELOGE(FAILED, "[%s] The owner graph must not be null.", node->GetName().c_str()); + return FAILED; + } + + OpDescPtr constant_op_desc = nullptr; + if (CreateConstByValue(node, value_index, constant_op_desc) != SUCCESS) { + return FAILED; + } + NodePtr const_node = graph->AddNode(constant_op_desc); + if (const_node == nullptr) { + return FAILED; + } + + // Change peer in anchors from value_index to new Constant node + if (GraphUtils::ReplaceNodeAnchors(const_node, node, {}, {1}) != GRAPH_SUCCESS) { + GELOGE(FAILED, "[%s] ReplaceNodeAnchors failed.", node->GetName().c_str()); + return FAILED; + } + auto out_control_anchor = node->GetOutControlAnchor(); + GE_CHECK_NOTNULL(out_control_anchor); + // Add control anchor between Merge and Constant + if (out_control_anchor->LinkTo(const_node->GetInControlAnchor()) != GRAPH_SUCCESS) { + return FAILED; + } + + return SUCCESS; +} + +Status MergePass::CreateConstByValue(NodePtr &node, int value_index, OpDescPtr &op_desc) { + std::string constant_name = node->GetName() + "_value_index"; + // 1. create Constant OpDesc + op_desc = MakeShared(constant_name, CONSTANT); + if (op_desc == nullptr) { + GELOGE(FAILED, "[%s] Make shared of Constant op desc failed.", constant_name.c_str()); + return FAILED; + } + + // 2. get OpDesc of output number one of Merge(value_index) + OpDescPtr original_op_desc = node->GetOpDesc(); + if (original_op_desc == nullptr) { + GELOGE(FAILED, "[%s] Op desc must not be null.", constant_name.c_str()); + return FAILED; + } + GeTensorDesc original_out_tensor_desc = original_op_desc->GetOutputDesc(1); + original_out_tensor_desc.SetDataType(DT_INT32); + + // 3. create attr value of Constant, is a tensor + GeTensorPtr const_tensor_ptr = + MakeShared(original_out_tensor_desc, reinterpret_cast(&value_index), sizeof(int)); + if (const_tensor_ptr == nullptr) { + GELOGE(FAILED, "[%s] Make shared of Constant tensor failed.", constant_name.c_str()); + return FAILED; + } + + GE_IF_BOOL_EXEC(!AttrUtils::SetTensor(op_desc, ATTR_NAME_WEIGHTS, const_tensor_ptr), + GELOGE(FAILED, "get ATTR_NAME_WEIGHTS failed"); return FAILED); + + // 4. set Constant output desc + GE_CHK_STATUS_RET(op_desc->AddOutputDesc(original_out_tensor_desc), "add out put desc failed"); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/merge_pass.h b/src/ge/graph/passes/merge_pass.h new file mode 100644 index 00000000..ef586713 --- /dev/null +++ b/src/ge/graph/passes/merge_pass.h @@ -0,0 +1,33 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_MERGE_PASS_H_ +#define GE_GRAPH_PASSES_MERGE_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class MergePass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; + + private: + bool IsNeedChangeIndexToConstant(NodePtr &node) const; + Status ChangeIndexToConstant(NodePtr &node, int &value_index); + Status CreateConstByValue(NodePtr &node, int value_index, OpDescPtr &op_desc); +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_MERGE_PASS_H_ diff --git a/src/ge/graph/passes/multi_batch_pass.cc b/src/ge/graph/passes/multi_batch_pass.cc new file mode 100644 index 00000000..24941b17 --- /dev/null +++ b/src/ge/graph/passes/multi_batch_pass.cc @@ -0,0 +1,511 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/multi_batch_pass.h" + +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/types.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/type_utils.h" + +namespace ge { +Status MultiBatchPass::Run(ComputeGraphPtr graph) { + GELOGD("MultiBatchPass Enter"); + + GraphUtils::DumpGEGraph(graph, "BeforeMultiBatchPass"); + GraphUtils::DumpGEGraphToOnnx(*graph, "BeforeMultiBatchPass"); + + OutDataAnchorPtr pred_value = nullptr; + Status ret = FindPredValue(graph, pred_value); + if (ret == NOT_CHANGED) { + GELOGI("SwitchN node not exist, graph not changed."); + return SUCCESS; + } + if (ret != SUCCESS) { + GELOGE(FAILED, "FindPredValue fail."); + return FAILED; + } + + std::vector> batch_shape; + if (!CheckSwitchN(batch_shape)) { + GELOGE(FAILED, "CheckSwitchN fail."); + return FAILED; + } + + FindSwitchOutNodes(batch_shape.size()); + + if (ReplaceSwitchN(graph, pred_value, batch_shape) != SUCCESS) { + GELOGE(FAILED, "Replace SwitchN nodes fail."); + return FAILED; + } + + for (NodePtr &node : bypass_nodes_) { + if (graph->RemoveNode(node) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Remove SwitchN nodes %s fail.", node->GetName().c_str()); + return FAILED; + } + } + + GraphUtils::DumpGEGraph(graph, "AfterMultiBatchPass"); + GraphUtils::DumpGEGraphToOnnx(*graph, "AfterMultiBatchPass"); + + GELOGD("MultiBatchPass Leave"); + return SUCCESS; +} + +/// +/// @brief Replace & Combine SwitchN nodes +/// @param [in] graph +/// @param [out] pred_value +/// @return Status +/// +Status MultiBatchPass::FindPredValue(const ComputeGraphPtr &graph, OutDataAnchorPtr &pred_value) { + for (NodePtr &node : graph->GetDirectNode()) { + if (node->GetType() != SWITCHN) { + continue; + } + + InDataAnchorPtr in_data_anchor = node->GetInDataAnchor(SWITCH_PRED_INPUT); + if (in_data_anchor == nullptr) { + GELOGE(FAILED, "FindPredInput fail, in_data_anchor is null, node:%s.", node->GetName().c_str()); + return FAILED; + } + OutDataAnchorPtr pred_input = in_data_anchor->GetPeerOutAnchor(); + if (pred_input == nullptr) { + GELOGE(FAILED, "FindPredInput fail, pred_input is null, node:%s.", node->GetName().c_str()); + return FAILED; + } + + if (pred_value == nullptr) { + pred_value = pred_input; + } else if (pred_value != pred_input) { + GELOGE(FAILED, "Multi pred_value node exist."); + return FAILED; + } + switch_n_nodes_.emplace_back(node); + } + + if (switch_n_nodes_.empty()) { + GELOGI("SwitchN node not exist."); + return NOT_CHANGED; + } + + if (pred_value == nullptr) { + GELOGE(FAILED, "FindPredInput fail, pred_value is null."); + return FAILED; + } + + GELOGI("Find pred_value %s.", pred_value->GetOwnerNode()->GetName().c_str()); + return SUCCESS; +} + +/// +/// @brief Check SwitchN nodes +/// @param [out] batch_shape +/// @return bool +/// +bool MultiBatchPass::CheckSwitchN(std::vector> &batch_shape) { + // Check if output_num of different SwitchN is same + uint32_t batch_num = 0; + for (NodePtr &node : switch_n_nodes_) { + uint32_t tmp_num = node->GetAllOutDataAnchorsSize(); + if (batch_num == 0) { + batch_num = tmp_num; + } else if (batch_num != tmp_num) { + GELOGE(FAILED, "Output size of SwitchN not equal;"); + return false; + } + } + + // Check if output_shape of different SwitchN is same + std::vector> idx_batch_shape; + for (uint32_t i = 0; i < batch_num; i++) { + idx_batch_shape.clear(); + for (NodePtr &node : switch_n_nodes_) { + std::vector output_dims; + OpDescPtr op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(FAILED, "CheckDims fail, get op_desc fail, node: %s.", node->GetName().c_str()); + return false; + } + if (!AttrUtils::GetListInt(op_desc->GetOutputDesc(i), ATTR_NAME_SWITCHN_PRED_VALUE, output_dims)) { + GELOGE(FAILED, "CheckDims fail, get attr ATTR_NAME_SWITCHN_PRED_VALUE fail, batch_index=%u.", i); + return false; + } + idx_batch_shape.emplace_back(output_dims); + } + if (!CheckDims(idx_batch_shape)) { + GELOGE(FAILED, "CheckDims fail, batch_index=%u.", i); + return false; + } + + batch_shape.emplace_back(idx_batch_shape[0]); + } + + // Check if dim_num of different batch is same + if (batch_shape.empty()) { + GELOGE(FAILED, "batch_shape is empty."); + return false; + } + uint32_t dim_num = batch_shape[0].size(); + for (uint32_t i = 1; i < batch_num; i++) { + uint32_t tmp_dim_num = batch_shape[i].size(); + if (dim_num != tmp_dim_num) { + GELOGE(FAILED, "dim_num not equal, batch_0:%u, batch_%u:%u.", dim_num, i, tmp_dim_num); + return false; + } + } + + return true; +} + +/// +/// @brief Find outputs of SwitchN nodes +/// @param [in] batch_num +/// @return void +/// +void MultiBatchPass::FindSwitchOutNodes(uint32_t batch_num) { + std::vector output_nodes; + for (uint32_t i = 0; i < batch_num; i++) { + output_nodes.clear(); + for (NodePtr &node : switch_n_nodes_) { + // idx is promised to be valid + OutDataAnchorPtr out_data_anchor = node->GetOutDataAnchor(i); + GE_CHECK_NOTNULL_JUST_RETURN(out_data_anchor); + for (InDataAnchorPtr &peer_in_anchor : out_data_anchor->GetPeerInDataAnchors()) { + output_nodes.emplace_back(peer_in_anchor->GetOwnerNode()); + } + } + batch_head_nodes_.emplace_back(output_nodes); + } + + return; +} + +/// +/// @brief Replace & Combine SwitchN nodes +/// @param [in] graph +/// @param [in] pred_value +/// @param [in] batch_shape +/// @return Status +/// +Status MultiBatchPass::ReplaceSwitchN(ComputeGraphPtr &graph, OutDataAnchorPtr &pred_value, + const std::vector> &batch_shape) { + NodePtr pred_value_node = pred_value->GetOwnerNode(); + // Create SwitchCase node + const std::string switch_case_name = pred_value_node->GetName() + "_" + STREAMSWITCHN; + NodePtr switch_case = CreateSwitchCaseNode(graph, switch_case_name, pred_value, batch_shape); + if (switch_case == nullptr) { + GELOGE(FAILED, "CreateSwitchCaseNode %s fail.", switch_case_name.c_str()); + return FAILED; + } + + for (NodePtr &switch_n_node : switch_n_nodes_) { + if (BypassSwitchN(switch_n_node, switch_case) != SUCCESS) { + GELOGE(FAILED, "Bypass SwitchN %s fail.", switch_case_name.c_str()); + return FAILED; + } + } + + // Add switchCase input edge + if (GraphUtils::AddEdge(pred_value, switch_case->GetInDataAnchor(0)) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add SwitchCase in_data_edge fail, %s->%s.", pred_value_node->GetName().c_str(), + switch_case->GetName().c_str()); + return FAILED; + } + + if (AttachLabel(switch_case) != SUCCESS) { + GELOGE(FAILED, "AttachLabel fail."); + return FAILED; + } + + return SUCCESS; +} + +/// +/// @brief Check if output_shape of different SwitchN is same +/// @param [in] output_shape +/// @return bool +/// +bool MultiBatchPass::CheckDims(const std::vector> &output_shape) const { + if (output_shape.empty()) { + GELOGE(FAILED, "CheckDims fail: output_shape is empty."); + return false; + } + + size_t num = output_shape.size(); + size_t dim_num = output_shape[0].size(); + for (size_t i = 1; i < num; i++) { + size_t tmp_dim_num = output_shape[i].size(); + if (dim_num != tmp_dim_num) { + GELOGE(FAILED, "CheckDims fail: dim_num not equal, output_0:%zu, output_%zu:%zu.", dim_num, i, tmp_dim_num); + return false; + } + } + + if (dim_num == 0) { + return true; + } + + for (size_t i = 0; i < dim_num; i++) { + int64_t dim_value = output_shape[0][i]; + for (size_t j = 1; j < num; j++) { + int64_t tmp_dim_value = output_shape[j][i]; + if (dim_value != tmp_dim_value) { + GELOGE(FAILED, "CheckDims fail: dim_value not equal, dim_index=%zu, dim_value_0:%ld, dim_value_%zu:%ld.", i, + dim_value, j, tmp_dim_value); + return false; + } + } + } + + return true; +} + +/// +/// @brief Create StreamSwitchN node +/// @param [in] graph +/// @param [in] name +/// @param [in] pred_value +/// @param [in] batch_shape +/// @return ge::NodePtr +/// +NodePtr MultiBatchPass::CreateSwitchCaseNode(ComputeGraphPtr &graph, const std::string &name, + const OutDataAnchorPtr &pred_value, + const std::vector> &batch_shape) { + OpDescPtr op_desc = MakeShared(name, STREAMSWITCHN); + if (op_desc == nullptr) { + GELOGE(FAILED, "Create op_desc fail, StreamSwitchN:%s.", name.c_str()); + return nullptr; + } + + GELOGI("Create StreamSwitchN op:%s.", name.c_str()); + OpDescPtr pred_desc = pred_value->GetOwnerNode()->GetOpDesc(); + if (pred_desc == nullptr) { + GELOGE(FAILED, "Get pred_desc fail, StreamSwitchN:%s.", name.c_str()); + return nullptr; + } + if (op_desc->AddInputDesc(pred_desc->GetOutputDesc(pred_value->GetIdx())) != GRAPH_SUCCESS) { + GELOGE(FAILED, "AddInputDesc fail, StreamSwitchN:%s.", name.c_str()); + return nullptr; + } + + NodePtr switch_case_node = graph->AddNode(op_desc); + if (switch_case_node == nullptr) { + GELOGE(FAILED, "Create node fail, StreamSwitchN:%s.", name.c_str()); + return nullptr; + } + + uint32_t batch_num = static_cast(batch_shape.size()); + if (!AttrUtils::SetInt(op_desc, ATTR_NAME_BATCH_NUM, batch_num)) { + GELOGE(FAILED, "set attr ATTR_NAME_BATCH_NUM fail, StreamSwitchN:%s.", name.c_str()); + return nullptr; + } + for (uint32_t i = 0; i < batch_num; i++) { + const std::string attr_name = ATTR_NAME_PRED_VALUE + "_" + std::to_string(i); + if (!AttrUtils::SetListInt(op_desc, attr_name, batch_shape[i])) { + GELOGE(FAILED, "set attr ATTR_NAME_PRED_VALUE fail, StreamSwitchN:%s.", name.c_str()); + return nullptr; + } + } + + return switch_case_node; +} + +/// +/// @brief Bypass SwitchN node +/// @param [in] switch_n_node +/// @param [in] switch_case +/// @return Status +/// +Status MultiBatchPass::BypassSwitchN(NodePtr &switch_n_node, NodePtr &switch_case) { + InDataAnchorPtr in_data_anchor = switch_n_node->GetInDataAnchor(SWITCH_DATA_INPUT); + if (in_data_anchor == nullptr) { + GELOGE(FAILED, "Check in_data_anchor fail, SwitchN:%s.", switch_n_node->GetName().c_str()); + return FAILED; + } + OutDataAnchorPtr peer_data_anchor = in_data_anchor->GetPeerOutAnchor(); + if (peer_data_anchor == nullptr) { + GELOGE(FAILED, "Check peer_data_anchor fail, SwitchN:%s.", switch_n_node->GetName().c_str()); + return FAILED; + } + NodePtr data_input = peer_data_anchor->GetOwnerNode(); + + // Remove SwitchN data input + if (GraphUtils::RemoveEdge(peer_data_anchor, in_data_anchor) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Remove SwitchN in_data_edge fail, %s->%s.", data_input->GetName().c_str(), + switch_n_node->GetName().c_str()); + return FAILED; + } + if (GraphUtils::AddEdge(data_input->GetOutControlAnchor(), switch_case->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add StreamSwitchN in_control_edge fail, %s->%s.", data_input->GetName().c_str(), + switch_case->GetName().c_str()); + return FAILED; + } + + // Add SwitchCase control output + for (OutDataAnchorPtr &out_data_anchor : switch_n_node->GetAllOutDataAnchors()) { + for (InDataAnchorPtr &peer_in_anchor : out_data_anchor->GetPeerInDataAnchors()) { + NodePtr data_output = peer_in_anchor->GetOwnerNode(); + if ((GraphUtils::RemoveEdge(out_data_anchor, peer_in_anchor) != GRAPH_SUCCESS) || + (GraphUtils::AddEdge(peer_data_anchor, peer_in_anchor) != GRAPH_SUCCESS)) { + GELOGE(FAILED, "Bypass SwitchN data_edge fail, %s->%s->%s.", data_input->GetName().c_str(), + switch_n_node->GetName().c_str(), data_output->GetName().c_str()); + return FAILED; + } + if (GraphUtils::AddEdge(switch_case->GetOutControlAnchor(), data_output->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Add SwitchCase out_control_edge fail, %s->%s.", switch_case->GetName().c_str(), + data_output->GetName().c_str()); + return FAILED; + } + } + } + + bypass_nodes_.emplace_back(switch_n_node); + GELOGI("Bypass SwitchN node %s success.", switch_n_node->GetName().c_str()); + return SUCCESS; +} + +/// +/// @brief Attach stream_label & batch_label for batch branch +/// @param [in] switch_case_node +/// @return Status +/// +Status MultiBatchPass::AttachLabel(NodePtr &switch_case_node) { + std::vector stream_label_list; + for (uint32_t i = 0; i < static_cast(batch_head_nodes_.size()); i++) { + if (AttachBatchLabel(i) != SUCCESS) { + GELOGE(FAILED, "AttachBatchLabel fail, batch_idx=%u", i); + return FAILED; + } + + const std::string stream_label = "stream_label_batch_" + std::to_string(i); + if (AttachStreamLabel(i, stream_label) != SUCCESS) { + GELOGE(FAILED, "AttachStreamLabel fail, stream_label=%s", stream_label.c_str()); + return FAILED; + } + stream_label_list.emplace_back(stream_label); + } + + return SetActiveLabelList(switch_case_node, stream_label_list); +} + +/// +/// @brief Attach batch_label for batch branch +/// @param [in] batch_idx +/// @return Status +/// +Status MultiBatchPass::AttachBatchLabel(uint32_t batch_idx) { + std::stack nodes; + for (auto &node : batch_head_nodes_[batch_idx]) { + nodes.push(node); + } + + const std::string batch_label = "Batch_" + std::to_string(batch_idx); + std::unordered_set handled_nodes; + while (!nodes.empty()) { + NodePtr cur_node = nodes.top(); + nodes.pop(); + if (handled_nodes.count(cur_node) > 0) { + continue; + } + + OpDescPtr cur_desc = cur_node->GetOpDesc(); + GE_CHECK_NOTNULL(cur_desc); + if (cur_desc->HasAttr(ATTR_NAME_BATCH_LABEL)) { + std::string tmp_label; + if (!AttrUtils::GetStr(cur_desc, ATTR_NAME_BATCH_LABEL, tmp_label)) { + GELOGE(FAILED, "get attr ATTR_NAME_BATCH_LABEL fail, node: %s.", cur_desc->GetName().c_str()); + return FAILED; + } + if (tmp_label != batch_label) { + GELOGE(FAILED, "Reach other batch_branch, node:%s, cur_label:%s, batch_label:%s.", cur_desc->GetName().c_str(), + tmp_label.c_str(), batch_label.c_str()); + return FAILED; + } + } + GELOGD("Attach batch_label %s to node %s.", batch_label.c_str(), cur_desc->GetName().c_str()); + if (!AttrUtils::SetStr(cur_desc, ATTR_NAME_BATCH_LABEL, batch_label)) { + GELOGE(FAILED, "set attr ATTR_NAME_BATCH_LABEL fail, node:%s.", cur_desc->GetName().c_str()); + return FAILED; + } + + for (auto &out_node : cur_node->GetOutAllNodes()) { + OpDescPtr op_desc = out_node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + const std::string type = op_desc->GetType(); + if ((type == STREAMMERGE) && (op_desc->HasAttr(ATTR_INSERT_BY_MBATCH))) { + continue; + } + if (type == NETOUTPUT) { + GELOGE(FAILED, "Reach net_output without Merge, cur_node:%s.", cur_node->GetName().c_str()); + return FAILED; + } + nodes.push(out_node); + } + (void)handled_nodes.insert(cur_node); + } + + return SUCCESS; +} + +/// +/// @brief Attach stream_label for batch branch +/// @param [in] batch_idx +/// @param [in] stream_label +/// @return Status +/// +Status MultiBatchPass::AttachStreamLabel(uint32_t batch_idx, const std::string &stream_label) { + std::stack nodes; + for (auto &node : batch_head_nodes_[batch_idx]) { + nodes.push(node); + } + + std::unordered_set handled_nodes; + while (!nodes.empty()) { + NodePtr cur_node = nodes.top(); + nodes.pop(); + + OpDescPtr cur_desc = cur_node->GetOpDesc(); + GE_CHECK_NOTNULL(cur_desc); + if ((handled_nodes.count(cur_node) > 0) || (cur_desc->HasAttr(ATTR_NAME_STREAM_LABEL))) { + continue; + } + + GELOGD("Attach stream_label %s to node %s.", stream_label.c_str(), cur_desc->GetName().c_str()); + if (SetStreamLabel(cur_node, stream_label) != SUCCESS) { + GELOGE(FAILED, "SetStreamLabel fail, node:%s.", cur_node->GetName().c_str()); + return FAILED; + } + + for (auto &out_node : cur_node->GetOutAllNodes()) { + nodes.push(out_node); + } + + (void)handled_nodes.insert(cur_node); + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/multi_batch_pass.h b/src/ge/graph/passes/multi_batch_pass.h new file mode 100644 index 00000000..6e3f5e46 --- /dev/null +++ b/src/ge/graph/passes/multi_batch_pass.h @@ -0,0 +1,50 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_MULTI_BATCH_PASS_H_ +#define GE_GRAPH_PASSES_MULTI_BATCH_PASS_H_ + +#include +#include + +#include "inc/graph_pass.h" + +namespace ge { +class MultiBatchPass : public GraphPass { + public: + Status Run(ComputeGraphPtr graph); + + private: + Status FindPredValue(const ComputeGraphPtr &graph, OutDataAnchorPtr &pred_value); + bool CheckSwitchN(std::vector> &batch_shape); + void FindSwitchOutNodes(uint32_t batch_num); + Status ReplaceSwitchN(ComputeGraphPtr &graph, OutDataAnchorPtr &pred_value, + const std::vector> &batch_shape); + + bool CheckDims(const std::vector> &output_shape) const; + NodePtr CreateSwitchCaseNode(ComputeGraphPtr &graph, const std::string &name, const OutDataAnchorPtr &pred_value, + const std::vector> &batch_shape); + Status BypassSwitchN(NodePtr &switch_n_node, NodePtr &switch_case_node); + Status AttachLabel(NodePtr &switch_case_node); + Status AttachBatchLabel(uint32_t batch_idx); + Status AttachStreamLabel(uint32_t batch_idx, const std::string &stream_label); + + std::vector switch_n_nodes_; + std::vector bypass_nodes_; + std::vector> batch_head_nodes_; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_MULTI_BATCH_PASS_H_ diff --git a/src/ge/graph/passes/net_output_pass.cc b/src/ge/graph/passes/net_output_pass.cc new file mode 100644 index 00000000..f3d4aa54 --- /dev/null +++ b/src/ge/graph/passes/net_output_pass.cc @@ -0,0 +1,518 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/net_output_pass.h" + +#include +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/pass_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" + + +namespace ge { +Status NetOutputPass::GetRetvalOutputInfo(const ge::NodePtr &node, + std::map> &retval_node_index_map) { + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(node->GetOpDesc()); + int64_t output_index = 0; + if (!AttrUtils::GetInt(node->GetOpDesc(), RETVAL_ATTR_NAME_INDEX, output_index)) { + GELOGE(PARAM_INVALID, "Get output index failed."); + return PARAM_INVALID; + } + if (retval_node_index_map.count(output_index) > 0) { + GELOGE(PARAM_INVALID, "Retval has duplicate index."); + return PARAM_INVALID; + } + InDataAnchorPtr in_data_anchor = node->GetInDataAnchor(0); + GE_CHECK_NOTNULL(in_data_anchor); + GE_CHECK_NOTNULL(in_data_anchor->GetPeerOutAnchor()); + int32_t src_node_index = in_data_anchor->GetPeerOutAnchor()->GetIdx(); + NodePtr src_node_ptr = in_data_anchor->GetPeerOutAnchor()->GetOwnerNode(); + retval_node_index_map[output_index] = std::make_pair(src_node_ptr, src_node_index); + // if user targets include retval node,delete it from set and insert its input node instead + auto iter = targets_.find(node); + if (iter != targets_.end()) { + targets_.erase(iter); + targets_.insert(src_node_ptr); + GELOGI("node [%s] is in user def targets, do not output result to user!", node->GetName().c_str()); + } + is_include_special_node_ = true; + return SUCCESS; +} + +Status NetOutputPass::GetOutputNode(const ge::ComputeGraphPtr &graph, + std::vector> &output_nodes_info) { + std::map> retval_node_index_map; + for (NodePtr &node : graph->GetDirectNode()) { + Status ret = SUCCESS; + if ((node->GetOpDesc() != nullptr) && (node->GetOpDesc()->HasAttr(RETVAL_ATTR_NAME_INDEX))) { + /// Set the output according to the Retval operator, + /// identify by whether there is an index parameter + ret = GetRetvalOutputInfo(node, retval_node_index_map); + } + if (ret != SUCCESS) { + GELOGE(ret, "GetRetvalOutputInfo failed"); + return ret; + } + } + GELOGI("Get retval node size:%zu.", retval_node_index_map.size()); + std::vector> out_nodes_tmp; + /// The Netoutput output is determined by Retval, and the input order + /// of Netoutput is sorted according to the index value of Retval. + for (auto it = retval_node_index_map.begin(); it != retval_node_index_map.end(); ++it) { + out_nodes_tmp.push_back(it->second); + } + + // when user set targets, mean that no output result + for (auto &ele : graph->GetGraphOutNodesInfo()) { + auto iter = targets_.find(ele.first); + if (iter != targets_.end()) { + GELOGI("user set out node [%s] is found in user def targets, out node is prio!", (ele.first)->GetName().c_str()); + targets_.erase(iter); + } + output_nodes_info.push_back(ele); + } + GELOGI("Output node set by user or leaf node, size:%zu.", output_nodes_info.size()); + for (auto &ele : out_nodes_tmp) { + // add member, no need to remove duplicated because we need to keep all edges + output_nodes_info.push_back(ele); + } + GELOGI("Get output node, size:%zu.", output_nodes_info.size()); + + Status check_ret = CheckOutputNodeInfo(graph, output_nodes_info); + if (check_ret != SUCCESS) { + return check_ret; + } + return SUCCESS; +} + +Status NetOutputPass::CheckOutputNodeInfo(const ComputeGraphPtr &graph, + const std::vector> &outputs) { + for (auto &item : outputs) { + NodePtr node = item.first; + if (node == nullptr) { + GELOGE(PARAM_INVALID, "Node in outputs is null."); + return PARAM_INVALID; + } else { + if (graph->FindNode(node->GetName()) == nullptr) { + GELOGE(INTERNAL_ERROR, "Out node (%s) is not in graph.", node->GetName().c_str()); + return INTERNAL_ERROR; + } + GE_CHECK_NOTNULL(node->GetOpDesc()); + int32_t out_size = node->GetOpDesc()->GetOutputsSize(); + int32_t index = item.second; + if (index < 0 || index >= out_size) { + GELOGE(PARAM_INVALID, + "User declared out node (%s) output index:%d must be smaller " + "than node ouput size:%d and cann't be negative!", + node->GetName().c_str(), index, out_size); + return PARAM_INVALID; + } + } + } + return SUCCESS; +} + +void NetOutputPass::AddInOutForNetOutputOp(const ge::ComputeGraphPtr &graph, const ge::OpDescPtr &net_output_desc, + const ge::NodePtr &src_node, int32_t src_index) { + /// Get the output attribute of src_node, + /// and set to the input/output of net_out_node. + if (src_node == nullptr || src_node->GetOpDesc() == nullptr || net_output_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "src node or net output desc is null."); + return; + } + ge::GeTensorDesc out_desc = src_node->GetOpDesc()->GetOutputDesc(src_index); + GE_IF_BOOL_EXEC(net_output_desc->AddInputDesc(out_desc) != SUCCESS, GELOGW("add input desc failed"); return); + TensorUtils::SetOutputTensor(out_desc, true); + GE_IF_BOOL_EXEC(net_output_desc->AddOutputDesc(out_desc) != SUCCESS, GELOGW("add output desc failed"); return); +} + +Status NetOutputPass::RemoveUnusedNode(const ge::ComputeGraphPtr &graph) { + std::vector node_to_delete; + // Delete _Retval operator. + for (auto &node : graph->GetDirectNode()) { + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr, GELOGW("Node OpDesc is nullptr"); continue); + bool need_be_deleted = node->GetInDataNodes().size() != 0 && node->GetOutDataNodesSize() == 0 && + (node->GetOpDesc()->HasAttr(RETVAL_ATTR_NAME_INDEX)); + if (need_be_deleted) { + node_to_delete.push_back(node); + } + } + for (NodePtr &node : node_to_delete) { + auto iter = targets_.find(node); + if (iter != targets_.end()) { + GELOGI("[Net output pass] node[%s] is in user set targets.so do not remove!", node->GetName().c_str()); + continue; + } + if (graph->RemoveNode(node) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Remove node failed, node name:%s.", node->GetName().c_str()); + return INTERNAL_ERROR; + } + GELOGI("Net output pass remove node:%s.", node->GetName().c_str()); + } + return SUCCESS; +} + +Status NetOutputPass::UpdateNetOutputDesc(const ge::NodePtr &net_output) { + OpDescPtr net_output_desc = net_output->GetOpDesc(); + if (net_output_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "Opdesc of net output node is nullptr."); + return INTERNAL_ERROR; + } + if (net_output_desc->GetInputsSize() == 0) { + GELOGE(INTERNAL_ERROR, "Net output node input is empty."); + return INTERNAL_ERROR; + } + + std::vector is_input_const; + for (const auto &in_anchor : net_output->GetAllInDataAnchors()) { + GE_CHECK_NOTNULL(in_anchor); + uint32_t index = static_cast(in_anchor->GetIdx()); + if (index >= net_output_desc->GetAllInputsDesc().size()) { + GELOGE(INTERNAL_ERROR, "Index is invalid, index:%u, size:%zu.", index, + net_output_desc->GetAllInputsDesc().size()); + return INTERNAL_ERROR; + } + GE_CHECK_NOTNULL(in_anchor->GetPeerOutAnchor()); + is_input_const.push_back(PassUtils::IsConstant(in_anchor->GetPeerOutAnchor()->GetOwnerNode())); + OpDescPtr src_op_desc = in_anchor->GetPeerOutAnchor()->GetOwnerNode()->GetOpDesc(); + GE_CHECK_NOTNULL(src_op_desc); + uint32_t peer_index = static_cast(in_anchor->GetPeerOutAnchor()->GetIdx()); + ge::GeTensorDesc output_in_desc = src_op_desc->GetOutputDesc(peer_index); + if (net_output_desc->UpdateInputDesc(index, output_in_desc) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Update input desc failed, index:%u.", index); + return INTERNAL_ERROR; + } + TensorUtils::SetOutputTensor(output_in_desc, true); + if (net_output_desc->UpdateOutputDesc(index, output_in_desc) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Update output desc failed, index:%u.", index); + return INTERNAL_ERROR; + } + GELOGI("Update desc, format:%s, data type:%s, index:%u.", + TypeUtils::FormatToSerialString(output_in_desc.GetFormat()).c_str(), + TypeUtils::DataTypeToSerialString(output_in_desc.GetDataType()).c_str(), index); + } + net_output_desc->SetIsInputConst(is_input_const); + return SUCCESS; +} + +Status NetOutputPass::AddCtrlEdgeForTargets(const ge::NodePtr &net_out_node) { + if (net_out_node == nullptr) { + GELOGE(PARAM_INVALID, "net out node is null."); + return PARAM_INVALID; + } + // Add ctrl edge for targets + for (auto &node : targets_) { + if (node == nullptr) { + continue; + } + // no need to check null because have handled it in run SaveAndRemoveTargets function + graphStatus status = GraphUtils::AddEdge(node->GetOutControlAnchor(), net_out_node->GetInControlAnchor()); + if (status != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Add ctrl edge to netoutput node[%s] for target node [%s] failed!", + net_out_node->GetName().c_str(), node->GetName().c_str()); + return INTERNAL_ERROR; + } + GELOGI("Add ctrl edge to netoutput node[%s] for target node [%s] success!", net_out_node->GetName().c_str(), + node->GetName().c_str()); + } + return SUCCESS; +} + +void NetOutputPass::SaveAndRemoveTargets(const ge::ComputeGraphPtr &graph) { + // save user targets node + for (auto &node : graph->GetGraphTargetNodesInfo()) { + if (node == nullptr) { + GELOGW("User pointed targets contains null node.ignore it !"); + continue; + } + targets_.insert(node); + } + GELOGI("User pointed targets size is %zu !", targets_.size()); +} + +Status NetOutputPass::AddEdgesForNetOutput(const ge::ComputeGraphPtr &graph, const ge::NodePtr &net_out_node, + const std::vector> &output_nodes_info) { + int32_t net_input_index = 0; + for (auto &item : output_nodes_info) { + NodePtr src_node = item.first; + GE_CHECK_NOTNULL(src_node); + graphStatus status = + GraphUtils::AddEdge(src_node->GetOutDataAnchor(item.second), net_out_node->GetInDataAnchor(net_input_index)); + if (status != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "AddEdge failed, src name:%s, src index:%d, dst index:%d.", src_node->GetName().c_str(), + item.second, net_input_index); + return INTERNAL_ERROR; + } + GELOGI("AddEdge to output node, src name:%s, src index:%d, dst index:%d.", src_node->GetName().c_str(), item.second, + net_input_index); + net_input_index++; + } + if (RemoveUnusedNode(graph) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Remove unused nodes failed."); + return INTERNAL_ERROR; + } + if (AddCtrlEdgeForTargets(net_out_node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Add ctrl edge for targets failed."); + return INTERNAL_ERROR; + } + // Add true stream, netoutput is 0 + GE_IF_BOOL_EXEC(!ge::AttrUtils::SetInt(net_out_node->GetOpDesc(), ATTR_NAME_TRUE_BRANCH_STREAM, 0), + GELOGE(INTERNAL_ERROR, "set ATTR_NAME_TRUE_BRANCH_STREAM failed"); + return INTERNAL_ERROR); + return SUCCESS; +} + +bool NetOutputPass::CheckNodeIsInOutputNodes(const ge::ComputeGraphPtr &graph, const ge::NodePtr &node) { + for (auto &ele : graph->GetGraphOutNodesInfo()) { + auto out_node = ele.first; + if (node == out_node) { + return true; + } + } + return false; +} +Status NetOutputPass::UnLinkDataAnchorOfNetoutput(const ge::ComputeGraphPtr &graph, const ge::NodePtr &net_out_node) { + if (net_out_node == nullptr) { + GELOGE(PARAM_INVALID, "net out node is null."); + return PARAM_INVALID; + } + Status ret = SUCCESS; + + // unlink all anchor to data anchor of netoutput + for (auto &in_data_anchor : net_out_node->GetAllInDataAnchors()) { + if (in_data_anchor == nullptr) { + continue; + } + auto peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + if (peer_out_anchor == nullptr) { + GELOGI("PeerOutAnchor is null!"); + continue; + } + auto node = peer_out_anchor->GetOwnerNode(); + auto iter = targets_.find(node); + if (iter != targets_.end()) { + if (!CheckNodeIsInOutputNodes(graph, node)) { + ret = in_data_anchor->Unlink(peer_out_anchor); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Unlink peer_out_anchor fail!"); + return ret; + } + } else { + targets_.erase(iter); + } + } + } + return ret; +} + +Status NetOutputPass::UnLinkControlAnchorOfNetoutput(const ge::ComputeGraphPtr &graph, + const ge::NodePtr &net_out_node) { + if (net_out_node == nullptr) { + GELOGE(PARAM_INVALID, "net out node is null."); + return PARAM_INVALID; + } + Status ret = SUCCESS; + auto in_control_anchor = net_out_node->GetInControlAnchor(); + if (in_control_anchor == nullptr) { + GELOGE(PARAM_INVALID, "in control anchor is null."); + return PARAM_INVALID; + } + // unlink all data anchor to control anchor of netoutput + for (auto &peer_out_data_anchor : in_control_anchor->GetPeerOutDataAnchors()) { + if (peer_out_data_anchor == nullptr) { + GELOGI("PeerOutControlAnchor is null!"); + } else { + auto node = peer_out_data_anchor->GetOwnerNode(); + auto iter = targets_.find(node); + if (iter != targets_.end()) { + if (CheckNodeIsInOutputNodes(graph, node) == false) { + ret = in_control_anchor->Unlink(peer_out_data_anchor); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Unlink peer_out_anchor fail!"); + return ret; + } + } else { + targets_.erase(iter); + } + } + } + } + /// check all control anchor to control anchor of netoutput and delete it from targets + /// to avoid duplicated add control edge; + for (auto &peer_out_control_anchor : in_control_anchor->GetPeerOutControlAnchors()) { + if (peer_out_control_anchor == nullptr) { + GELOGI("PeerOutControlAnchor is null"); + } else { + auto node = peer_out_control_anchor->GetOwnerNode(); + auto iter = targets_.find(node); + if (iter != targets_.end()) { + targets_.erase(iter); + } + } + } + return ret; +} + +Status NetOutputPass::UnLink(const ge::ComputeGraphPtr &graph, const ge::NodePtr &net_out_node) { + GELOGI("[NetOutputPass] Enter Unlink process."); + Status ret = UnLinkDataAnchorOfNetoutput(graph, net_out_node); + if (ret != SUCCESS) { + GELOGI("[NetOutputPass] UnLinkDataAnchorOfNetoutput process fail."); + return ret; + } + ret = UnLinkControlAnchorOfNetoutput(graph, net_out_node); + if (ret != SUCCESS) { + GELOGI("[NetOutputPass] UnLinkControlAnchorOfNetoutput process fail."); + return ret; + } + return ret; +} + +Status NetOutputPass::ProcessWithNetoutput(const ge::ComputeGraphPtr &graph, const ge::NodePtr &output_node) { + if (UpdateNetOutputDesc(output_node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Update net output desc failed."); + return INTERNAL_ERROR; + } + if (UnLink(graph, output_node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "UnLink connection between netoutput node and user set target node"); + return INTERNAL_ERROR; + } + if (AddCtrlEdgeForTargets(output_node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Add ctrl edge for targets failed."); + return INTERNAL_ERROR; + } + return SUCCESS; +} + +Status NetOutputPass::AddCtrlEdgesBetweenLeafAndNetOutput(const ge::ComputeGraphPtr &graph, + const ge::NodePtr &net_out_node) { + GE_CHECK_NOTNULL(net_out_node); + for (const auto &node : graph->GetDirectNode()) { + if (node == nullptr || node->GetOpDesc() == nullptr || node->GetOpDesc()->GetType() == NETOUTPUT) { + continue; + } + if ((node->GetInControlNodes().size() != 0 || node->GetInDataNodes().size() != 0) && + node->GetOutDataNodesSize() == 0 && node->GetOutControlNodes().size() == 0) { + GE_CHK_STATUS_RET(GraphUtils::AddEdge(node->GetOutControlAnchor(), net_out_node->GetInControlAnchor()), + "add edge failed"); + GELOGI("Add ctrl edge success. src name :%s, dst name :%s", node->GetName().c_str(), + net_out_node->GetName().c_str()); + } + } + return SUCCESS; +} +Status NetOutputPass::Run(ge::ComputeGraphPtr graph) { + if (graph == nullptr) { + GELOGE(GE_GRAPH_PARAM_NULLPTR, "Compute graph is null."); + return GE_GRAPH_PARAM_NULLPTR; + } + GELOGI("NetOutputPass Run."); + NodePtr output_node = graph->FindNode(NODE_NAME_NET_OUTPUT); + OpDescPtr net_output_desc = nullptr; + std::vector> output_nodes_info; + + // save user targets node + SaveAndRemoveTargets(graph); + // If graph already has a netoutput node, doesn't need to create it again. + if (output_node != nullptr) { + (void)AttrUtils::SetListStr(output_node->GetOpDesc(), ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, + std::move(std::vector())); + return ProcessWithNetoutput(graph, output_node); + } else { + net_output_desc = MakeShared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + if (net_output_desc == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared net output op failed."); + return MEMALLOC_FAILED; + } + (void)AttrUtils::SetListStr(net_output_desc, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, + std::move(std::vector())); + Status ret = GetOutputNode(graph, output_nodes_info); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Get net output nodes failed."); + return INTERNAL_ERROR; + } + GELOGI("[NETOUTPUT PASS] OutNodesInfo size:%zu, Targets Size:%zu, is_include_special_node_:%d", + graph->GetGraphOutNodesInfo().size(), graph->GetGraphTargetNodesInfo().size(), is_include_special_node_); + // If user does not set out nodes and targets and no retval node, return false + bool is_valid = (graph->GetGraphOutNodesInfo().size() == 0) && (graph->GetGraphTargetNodesInfo().size() == 0) && + (is_include_special_node_ == false); + if (is_valid) { + GELOGI("[NETOUTPUT PASS] output_nodes and target_nodes and special nodes is empty!It means no need netoutput!"); + return SUCCESS; + } + GELOGI("[NETOUTPUT PASS] Output node size:%lu.", output_nodes_info.size()); + if (output_nodes_info.empty()) { + // because retval node is contained by output_nodes_info, here means targets is non-empty + auto net_output_node = graph->AddNode(net_output_desc); + if (net_output_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Add output node failed."); + return INTERNAL_ERROR; + } + GE_CHK_STATUS_RET(AddCtrlEdgeForTargets(net_output_node), "add ctrl edge for targets failed"); + // Add true stream, netoutput is 0 + GE_IF_BOOL_EXEC(!ge::AttrUtils::SetInt(net_output_node->GetOpDesc(), ATTR_NAME_TRUE_BRANCH_STREAM, 0), + GELOGE(INTERNAL_ERROR, "set ATTR_NAME_TRUE_BRANCH_STREAM failed"); + return INTERNAL_ERROR); + return SUCCESS; + } + std::vector is_input_const; + for (auto iter = output_nodes_info.begin(); iter != output_nodes_info.end();) { + ge::NodePtr src_node = (*iter).first; + if (src_node == nullptr) { + continue; + } + int32_t src_index = (*iter).second; + // if src_node is in targets_, no need to Add in and out for netoutput + auto it = targets_.find(src_node); + if (it != targets_.end()) { + iter = output_nodes_info.erase(iter); + GELOGI("node [%s] is in processed targets, do not add inout for netoutput!", src_node->GetName().c_str()); + continue; + } + AddInOutForNetOutputOp(graph, net_output_desc, src_node, src_index); + GELOGI("Add output node:%s, index:%d.", src_node->GetName().c_str(), src_index); + is_input_const.push_back(PassUtils::IsConstant(src_node)); + ++iter; + } + net_output_desc->SetIsInputConst(is_input_const); + output_node = graph->AddNode(net_output_desc); + if (output_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Add output node failed."); + return INTERNAL_ERROR; + } + if (AddEdgesForNetOutput(graph, output_node, output_nodes_info) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Add edges for net output node failed."); + return INTERNAL_ERROR; + } + if (AddCtrlEdgesBetweenLeafAndNetOutput(graph, output_node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Add control edges between leaf and netoutput failed."); + return INTERNAL_ERROR; + } + GELOGI("Add NetOutput node success."); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/net_output_pass.h b/src/ge/graph/passes/net_output_pass.h new file mode 100644 index 00000000..62287e88 --- /dev/null +++ b/src/ge/graph/passes/net_output_pass.h @@ -0,0 +1,194 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_NET_OUTPUT_PASS_H_ +#define GE_GRAPH_PASSES_NET_OUTPUT_PASS_H_ + +#include +#include +#include +#include +#include + +#include "graph/types.h" +#include "inc/graph_pass.h" + +namespace ge { +class NetOutputPass : public GraphPass { + public: + /// + /// Entry of the NetOutputPass optimizer + /// @param [in] graph: Input ComputeGraph + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status Run(ge::ComputeGraphPtr graph) override; + + private: + /// + /// The graph of identifies the network output with + /// the _Retval node, we determine if the input node is a network output here. + /// @param [in] node: Input node + /// @param [in/out] retval_node_index_map: Obtained output node pair + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status GetRetvalOutputInfo(const ge::NodePtr &node, + std::map> &retval_node_index_map); + + /// + /// Get the output node of the graph + /// @param [in] graph: Input ComputeGraph + /// @param [in/out] output_nodes_info: Obtained output node pair + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status GetOutputNode(const ge::ComputeGraphPtr &graph, + std::vector> &output_nodes_info); + + /// + /// Check if the network output node is legal + /// @param [in] graph: Input ComputeGraph + /// @param [in] outputs: Output node information of graph + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status CheckOutputNodeInfo(const ComputeGraphPtr &graph, const std::vector> &outputs); + + /// + /// Set input and output for the NetOutput node + /// @param [in] graph: Input ComputeGraph + /// @param [in] net_output_desc: OpDesc of the NetOutput node + /// @param [in] src_node: Source node of the NetOutput + /// @param [in] src_index: Output index of the Source node + /// @return void + /// @author + /// + void AddInOutForNetOutputOp(const ge::ComputeGraphPtr &graph, const ge::OpDescPtr &net_output_desc, + const ge::NodePtr &src_node, int32_t src_index); + + /// + /// Delete unwanted _Retval/Save/Summary nodes + /// @param [in] graph: Input ComputeGraph + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status RemoveUnusedNode(const ge::ComputeGraphPtr &graph); + + /// + /// Update the output/input tensor description of the NetOutput node + /// @param [in] net_output: The netOutput node + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status UpdateNetOutputDesc(const ge::NodePtr &net_output); + + /// + /// Add ctrl edge from target node to netoutput node + /// @param [in] net_output: The netOutput node + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status AddCtrlEdgeForTargets(const ge::NodePtr &net_out_node); + + /// + /// Remove invalid node and duplicated node of user set targets + /// @param [in] : compute graph + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + void SaveAndRemoveTargets(const ge::ComputeGraphPtr &graph); + + /// + /// Add edges for the NetOutput node + /// @param [in] graph: Input ComputeGraph + /// @param [in] net_out_node: The netOutput node + /// @param [in] output_nodes_info: Output node pair + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status AddEdgesForNetOutput(const ge::ComputeGraphPtr &graph, const ge::NodePtr &net_out_node, + const std::vector> &output_nodes_info); + /// + /// Add ctrl edges for leaf node + /// @param [in] graph: Input ComputeGraph + /// @param [in] net_out_node: The netOutput node + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status AddCtrlEdgesBetweenLeafAndNetOutput(const ge::ComputeGraphPtr &graph, const ge::NodePtr &net_out_node); + /// + /// Unlink all connections between target nodes and netoutput node + /// @param [in] graph: ComputeGraph + /// @param [in] net_out_node: The netOutput node + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status UnLink(const ge::ComputeGraphPtr &graph, const ge::NodePtr &net_out_node); + /// + /// Unlink data connections between target nodes and netoutput node + /// @param [in] graph: ComputeGraph + /// @param [in] net_out_node: The netOutput node + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status UnLinkDataAnchorOfNetoutput(const ge::ComputeGraphPtr &graph, const ge::NodePtr &net_out_node); + /// + /// Unlink control connections between target nodes and netoutput node + /// @param [in] graph: ComputeGraph + /// @param [in] net_out_node: The netOutput node + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status UnLinkControlAnchorOfNetoutput(const ge::ComputeGraphPtr &graph, const ge::NodePtr &net_out_node); + /// + /// if user have set netoutput node , do relative process + /// @param [in] graph: ComputeGraph + /// @param [in] net_out_node: The netOutput node + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status ProcessWithNetoutput(const ge::ComputeGraphPtr &graph, const ge::NodePtr &output_node); + /// + /// check node whether exist in user-set output nodes + /// @param [in] graph: ComputeGraph + /// @param [in] net_out_node: The netOutput node + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + bool CheckNodeIsInOutputNodes(const ge::ComputeGraphPtr &graph, const ge::NodePtr &node); + + bool is_include_special_node_ = false; + std::set targets_; + friend class ReUpdateNetOutputPass; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_NET_OUTPUT_PASS_H_ diff --git a/src/ge/graph/passes/next_iteration_pass.cc b/src/ge/graph/passes/next_iteration_pass.cc new file mode 100644 index 00000000..f0da5346 --- /dev/null +++ b/src/ge/graph/passes/next_iteration_pass.cc @@ -0,0 +1,332 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/next_iteration_pass.h" + +#include +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/types.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" + + +namespace ge { +Status NextIterationPass::Run(ComputeGraphPtr graph) { + GELOGD("NextIterationPass Enter"); + /// Enter-----------+ + /// +-> Merge -> Switch <- LoopCond <- Cond + /// NextIteration---+ + for (auto &node : graph->GetDirectNode()) { + const std::string type = node->GetType(); + if ((type != ENTER) && (type != REFENTER)) { + continue; + } + if (HandleEnterNode(node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "HandleEnterNode for node %s fail.", node->GetName().c_str()); + return INTERNAL_ERROR; + } + } + + if (FindWhileGroups() != SUCCESS) { + GELOGE(INTERNAL_ERROR, "FindWhileGroups fail"); + return INTERNAL_ERROR; + } + + if (!VerifyWhileGroup()) { + GELOGE(INTERNAL_ERROR, "VerifyWhileGroup fail"); + return INTERNAL_ERROR; + } + + if (HandleWhileGroup(graph) != SUCCESS) { + GELOGE(FAILED, "HandleWhileGroup fail"); + return FAILED; + } + + GELOGD("NextIterationPass Leave"); + return SUCCESS; +} + +/// +/// @brief Handle Enter node +/// @param [in] enter_node +/// @return Status +/// +Status NextIterationPass::HandleEnterNode(const NodePtr &enter_node) { + OpDescPtr enter_desc = enter_node->GetOpDesc(); + GE_CHECK_NOTNULL(enter_desc); + std::string frame_name; + if (!ge::AttrUtils::GetStr(enter_desc, ENTER_ATTR_FRAME_NAME, frame_name) || frame_name.empty()) { + GELOGE(FAILED, "Get attr ENTER_ATTR_FRAME_NAME fail, node: %s", enter_desc->GetName().c_str()); + return FAILED; + } + + auto iter = loop_group_map_.find(frame_name); + if (iter == loop_group_map_.end()) { + LoopCondGroupPtr loop_group = MakeShared(); + if (loop_group == nullptr) { + GELOGE(FAILED, "MakeShared for LoopCondGroup fail."); + return FAILED; + } + loop_group->enter_nodes.emplace_back(enter_node); + loop_group_map_[frame_name] = loop_group; + } else { + iter->second->enter_nodes.emplace_back(enter_node); + } + + return SUCCESS; +} + +/// +/// @brief Find while groups +/// @return Status +/// +Status NextIterationPass::FindWhileGroups() { + for (auto &loop_group_iter : loop_group_map_) { + const std::string frame_name = loop_group_iter.first; + for (auto &enter_node : loop_group_iter.second->enter_nodes) { + for (auto &out_node : enter_node->GetOutAllNodes()) { + const std::string type = out_node->GetType(); + if ((type != MERGE) && (type != REFMERGE)) { + continue; + } + + NodePtr next_node = nullptr; + if (FindTargetNode(out_node, NEXTITERATION, true, next_node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Get NextIteration node fail, frame_name: %s.", frame_name.c_str()); + return INTERNAL_ERROR; + } + + NodePtr switch_node = nullptr; + if (FindTargetNode(out_node, SWITCH, false, switch_node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Get Switch node fail, frame_name: %s.", frame_name.c_str()); + return INTERNAL_ERROR; + } + + NodePtr loop_cond = nullptr; + if (FindTargetNode(switch_node, LOOPCOND, true, loop_cond) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Get LoopCond node fail, frame_name: %s.", frame_name.c_str()); + return INTERNAL_ERROR; + } + + if (loop_group_iter.second->loop_cond == nullptr) { + loop_group_iter.second->loop_cond = loop_cond; + } else if (loop_group_iter.second->loop_cond != loop_cond) { + GELOGE(FAILED, "Multi LoopCond nodes exist, frame_name: %s.", frame_name.c_str()); + return FAILED; + } + loop_group_iter.second->merge_next_pairs.emplace_back(std::make_pair(out_node, next_node)); + } + } + } + + return SUCCESS; +} + +/// +/// @brief Verify if valid +/// @return bool +/// +bool NextIterationPass::VerifyWhileGroup() { + // map + for (auto &loop_group_iter : loop_group_map_) { + const std::string frame_name = loop_group_iter.first; + if (frame_name.empty()) { + GELOGE(INTERNAL_ERROR, "VerifyWhileGroup fail, frame_name is empty."); + return false; + } + + if (loop_group_iter.second->loop_cond == nullptr) { + GELOGE(INTERNAL_ERROR, "VerifyWhileGroup fail, LoopCond is null, frame_name: %s.", frame_name.c_str()); + return false; + } + + for (auto &pair_iter : loop_group_iter.second->merge_next_pairs) { + if ((pair_iter.first == nullptr) || (pair_iter.second == nullptr)) { + GELOGE(INTERNAL_ERROR, "VerifyWhileGroup fail, merge_node/next_node is null, frame_name: %s.", + frame_name.c_str()); + return false; + } + } + } + + return true; +} + +/// +/// @brief Handle while group +/// @param [in] graph +/// @return Status +/// +Status NextIterationPass::HandleWhileGroup(ComputeGraphPtr &graph) { + for (auto &loop_cond_iter : loop_group_map_) { + std::string cond_name = loop_cond_iter.second->loop_cond->GetName(); + GELOGI("HandleWhileGroup, LoopCond node: %s.", cond_name.c_str()); + + // Create Active node, Enter->Active->Merge, NextItaration->Active->Merge + NodePtr enter_active = CreateActiveNode(graph, cond_name + "_Enter_" + STREAMACTIVE); + NodePtr next_active = CreateActiveNode(graph, cond_name + "_Next_" + STREAMACTIVE); + if ((enter_active == nullptr) || (next_active == nullptr)) { + GELOGE(INTERNAL_ERROR, "CreateActiveNode fail, cond_name: %s.", cond_name.c_str()); + return INTERNAL_ERROR; + } + + for (auto &enter_node : loop_cond_iter.second->enter_nodes) { + // Enter --> Active + if (GraphUtils::AddEdge(enter_node->GetOutControlAnchor(), enter_active->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Add control edge fail"); + return INTERNAL_ERROR; + } + } + + for (auto &pair : loop_cond_iter.second->merge_next_pairs) { + NodePtr merge_node = pair.first; + NodePtr next_node = pair.second; + // Active --> Merge + if (GraphUtils::AddEdge(enter_active->GetOutControlAnchor(), merge_node->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Add control edge fail"); + return INTERNAL_ERROR; + } + + // NextIteration --> Active + if (GraphUtils::AddEdge(next_node->GetOutControlAnchor(), next_active->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Add control edge fail"); + return INTERNAL_ERROR; + } + + // break link between NextIteration and Merge + if (BreakNextIteration(next_node, merge_node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "BreakNextIteration failed"); + return INTERNAL_ERROR; + } + } + + if ((SetActiveLabelList(enter_active, {cond_name}) != SUCCESS) || + (SetActiveLabelList(next_active, {cond_name}) != SUCCESS)) { + GELOGE(INTERNAL_ERROR, "SetActiveLabelList failed"); + return INTERNAL_ERROR; + } + } + + return SUCCESS; +} + +/// +/// @brief Create Active Node +/// @param [in] graph +/// @param [in] name +/// @return ge::NodePtr +/// +NodePtr NextIterationPass::CreateActiveNode(ComputeGraphPtr &graph, const std::string &name) { + OpDescPtr op_desc = MakeShared(name, STREAMACTIVE); + if (op_desc == nullptr) { + return nullptr; + } + + GELOGI("Create StreamActive op:%s.", op_desc->GetName().c_str()); + NodePtr active_node = graph->AddNode(op_desc); + if (active_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Create node[%s] fail.", name.c_str()); + return nullptr; + } + + if (SetSwitchBranchNodeLabel(active_node, name) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "SetSwitchBranchNodeLabel for node: %s failed.", active_node->GetName().c_str()); + return nullptr; + } + + return active_node; +} + +/// +/// @brief Break NextIteration Link & add name to merge attr +/// @param [in] next_node +/// @param [in] merge_node +/// @return Status +/// +Status NextIterationPass::BreakNextIteration(const NodePtr &next_node, NodePtr &merge_node) { + if ((merge_node == nullptr) || (next_node == nullptr)) { + GELOGE(PARAM_INVALID, "merge node or next node is null."); + return PARAM_INVALID; + } + for (auto &in_anchor : merge_node->GetAllInDataAnchors()) { + OutDataAnchorPtr out_anchor = in_anchor->GetPeerOutAnchor(); + if ((out_anchor == nullptr) || (out_anchor->GetOwnerNode() != next_node)) { + continue; + } + if (GraphUtils::RemoveEdge(out_anchor, in_anchor) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Remove data edge fail, %s->%s.", next_node->GetName().c_str(), + merge_node->GetName().c_str()); + return INTERNAL_ERROR; + } + if (SetNextIteration(merge_node, next_node->GetName()) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "SetNextIteration for node %s fail.", merge_node->GetName().c_str()); + return INTERNAL_ERROR; + } + } + return SUCCESS; +} + +/// +/// @brief find target node +/// @param [in] node +/// @param [in] target_type +/// @param [in] is_input +/// @param [out] target_node +/// @return Status +/// +Status NextIterationPass::FindTargetNode(const NodePtr &node, const std::string &target_type, bool is_input, + NodePtr &target_node) { + if (node == nullptr) { + GELOGE(PARAM_INVALID, "node is null."); + return PARAM_INVALID; + } + std::vector nodes; + if (is_input) { + for (auto &tmp_node : node->GetInDataNodes()) { + nodes.emplace_back(tmp_node); + } + } else { + for (auto &tmp_node : node->GetOutDataNodes()) { + nodes.emplace_back(tmp_node); + } + } + + for (auto &tmp_node : nodes) { + const std::string type = tmp_node->GetType(); + if ((target_type == LOOPCOND) && (type == target_type)) { + target_node = tmp_node; + break; + } else if ((type == target_type) || (type == "Ref" + target_type)) { + target_node = tmp_node; + break; + } + } + + if (target_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Find node %s fail", target_type.c_str()); + return INTERNAL_ERROR; + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/next_iteration_pass.h b/src/ge/graph/passes/next_iteration_pass.h new file mode 100644 index 00000000..47a86b3a --- /dev/null +++ b/src/ge/graph/passes/next_iteration_pass.h @@ -0,0 +1,55 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_NEXT_ITERATION_PASS_H_ +#define GE_GRAPH_PASSES_NEXT_ITERATION_PASS_H_ + +#include +#include +#include +#include +#include +#include + +#include "inc/graph_pass.h" + +struct LoopCondGroup { + LoopCondGroup() : loop_cond(nullptr) {} + ge::NodePtr loop_cond; // LoopCond node + std::vector enter_nodes; // Enter nodes + std::vector> merge_next_pairs; // +}; +using LoopCondGroupPtr = std::shared_ptr; + +namespace ge { +class NextIterationPass : public GraphPass { + public: + Status Run(ComputeGraphPtr graph); + + private: + Status HandleEnterNode(const NodePtr &enter_node); + Status FindWhileGroups(); + bool VerifyWhileGroup(); + Status HandleWhileGroup(ComputeGraphPtr &graph); + NodePtr CreateActiveNode(ComputeGraphPtr &graph, const std::string &name); + Status BreakNextIteration(const NodePtr &next_node, NodePtr &merge_node); + Status FindTargetNode(const NodePtr &node, const std::string &target_type, bool is_input, NodePtr &target_node); + + // map + std::unordered_map loop_group_map_; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_NEXT_ITERATION_PASS_H_ diff --git a/src/ge/graph/passes/no_reshape_op_remove_pass.cc b/src/ge/graph/passes/no_reshape_op_remove_pass.cc new file mode 100644 index 00000000..cf214920 --- /dev/null +++ b/src/ge/graph/passes/no_reshape_op_remove_pass.cc @@ -0,0 +1,200 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/no_reshape_op_remove_pass.h" + +#include +#include + +#include "common/op/attr_value_util.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/omg/omg_inner_types.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" + +namespace ge { +namespace { +const char *const kReshapeName = "Reshape_3"; +} // namespace +Status NoReshapeOpRemovePass::Run(ge::NodePtr &node) { + if (node == nullptr) { + GELOGE(PARAM_INVALID, "param [node] must not be null"); + return PARAM_INVALID; + } + OpDescPtr op_desc_ptr = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc_ptr); + if ((op_desc_ptr->GetType() == EXPANDDIMS) || (op_desc_ptr->GetType() == SQUEEZE)) { + return CheckNodeShapeAndForamt(node); + } else if (op_desc_ptr->GetType() == RESHAPE) { + if (op_desc_ptr->GetName() == kReshapeName) { + std::vector types; + std::list path; + path.push_back(node); + types.emplace_back(PERMUTE); + types.emplace_back(TRANSDATA); + types.emplace_back(CORRELATION); + // check reshape out data node fit specific type + bool reshape_correlation_flag = true; + for (const auto &type : types) { + if (!CheckOutDataNodesType(type, path)) { + reshape_correlation_flag = false; + break; + } + } + if (reshape_correlation_flag) { + path.pop_front(); + GE_IF_BOOL_EXEC(!AttrUtils::SetBool(path.front()->GetOpDesc(), "reshape_correlation", reshape_correlation_flag), + GELOGE(INTERNAL_ERROR, "set reshape_correlation failed"); + return INTERNAL_ERROR); + } + path.clear(); + types.clear(); + } + + if (domi::GetContext().format == domi::DOMI_TENSOR_NCHW && !op_desc_ptr->HasAttr(PERMUTE_ATTR_ORDER)) { + std::list path; + path.push_back(node); + string correlation = CORRELATION; + if (CheckOutDataNodesType(correlation, path)) { + op_desc_ptr->SetType(PERMUTE); + if (AttrUtils::SetListInt(op_desc_ptr, PERMUTE_ATTR_ORDER, vector{2, 3, 0, 1})) { + GELOGE(INTERNAL_ERROR, "Set permute attr order failed"); + return INTERNAL_ERROR; + } + path.clear(); + return SUCCESS; + } + } + + // prefer handle linked reshape than single reshape + vector delete_nodes = CheckLinkedReshape(node); + if (delete_nodes.empty()) { + return CheckNodeShapeAndForamt(node); + } + Status ret; + for (NodePtr &delete_node : delete_nodes) { + GE_CHECK_NOTNULL(delete_node); + GELOGI("NoReshapeOpRemovePass remove node:%s", delete_node->GetName().c_str()); + ret = IsolateAndDeleteNode(delete_node, {0}); + if (ret != SUCCESS) { + GELOGE(ret, "NoReshapeOpRemovePass remove node failed,ret:%u", ret); + return ret; + } + } + } + return SUCCESS; +} + +bool NoReshapeOpRemovePass::CheckOutDataNodesType(const string &type, std::list &path) { + if (path.empty()) { + return false; + } + Node::Vistor out_data_nodes = path.back()->GetOutDataNodes(); + bool flag = false; + GE_IF_BOOL_EXEC(out_data_nodes.at(0)->GetOpDesc() == nullptr, GELOGE(FAILED, "out_data_nodes GetOpDesc is nullptr"); + return false); + if ((out_data_nodes.size() == 1) && (out_data_nodes.at(0)->GetOpDesc()->GetType() == type)) { + path.push_back(out_data_nodes.at(0)); + flag = true; + } + return flag; +} + +// if single node input and output shape is same can be delete +Status NoReshapeOpRemovePass::CheckNodeShapeAndForamt(ge::NodePtr &node) { + bool to_be_deleted = false; + OpDescPtr op_desc_ptr = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc_ptr); + if (op_desc_ptr->GetAllInputsDescPtr().empty()) { + GELOGE(INTERNAL_ERROR, "Input num check fail. node name:%s", op_desc_ptr->GetName().c_str()); + return INTERNAL_ERROR; + } + GE_CHECK_NOTNULL(op_desc_ptr->GetInputDescPtr(0)); + if (op_desc_ptr->GetInputDescPtr(0)->GetFormat() == FORMAT_ND) { + to_be_deleted = true; + } else { + to_be_deleted = true; + // compare input and output dims + std::vector input_4dims; + GE_CHK_STATUS_RET(OpUtils::TransferDim(op_desc_ptr->GetInputDesc(0).GetShape().GetDims(), input_4dims), + "transfer dim failed"); + + std::vector output_4dims; + GE_CHK_STATUS_RET(OpUtils::TransferDim(op_desc_ptr->GetOutputDesc(0).GetShape().GetDims(), output_4dims), + "transfer dim failed"); + + size_t vec_size = (input_4dims.size() > output_4dims.size()) ? output_4dims.size() : input_4dims.size(); + + for (size_t i = 0; i < vec_size; i++) { + if (input_4dims[i] != output_4dims[i]) { + to_be_deleted = false; + break; + } + } + } + if (to_be_deleted) { + GELOGI("NoReshapeOpRemovePass remove node:%s", node->GetName().c_str()); + return IsolateAndDeleteNode(node, {0}); + } + return SUCCESS; +} + +// check Reshape->Reshape linked case if can be delete +vector NoReshapeOpRemovePass::CheckLinkedReshape(ge::NodePtr &node) { + std::list node_path; + std::vector delete_nodes; + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr, GELOGE(FAILED, "Node OpDesc is nullptr"); return delete_nodes); + const auto &cur_input_desc = node->GetOpDesc()->GetInputDesc(0); + vector cur_input_dims = cur_input_desc.GetShape().GetDims(); + Format cur_input_format = cur_input_desc.GetFormat(); + node_path.push_back(node); + // from front to back find longest sequence reshape can be delete + while (!node_path.empty()) { + const auto src_node = node_path.back(); + if (src_node == nullptr) { + continue; + } + Node::Vistor out_data_nodes = src_node->GetOutDataNodes(); + if ((out_data_nodes.size() == 1) && (out_data_nodes.at(0)->GetOpDesc() != nullptr) + && (out_data_nodes.at(0)->GetOpDesc()->GetType() == RESHAPE)) { + NodePtr dst_node = out_data_nodes.at(0); + node_path.push_back(dst_node); + GeTensorDesc dst_output_desc = dst_node->GetOpDesc()->GetOutputDesc(0); + vector dst_output_dims = dst_output_desc.GetShape().GetDims(); + if ((cur_input_dims.size() == dst_output_dims.size()) && (cur_input_format == dst_output_desc.GetFormat())) { + bool is_reshape_delete = true; + for (size_t i = 0; i < cur_input_dims.size(); i++) { + if (cur_input_dims[i] != dst_output_dims[i]) { + is_reshape_delete = false; + } + } + if (is_reshape_delete) { + delete_nodes.insert(delete_nodes.begin(), node_path.begin(), node_path.end()); + } + } + } else { + break; + } + } + node_path.clear(); + return delete_nodes; +} +} // namespace ge diff --git a/src/ge/graph/passes/no_reshape_op_remove_pass.h b/src/ge/graph/passes/no_reshape_op_remove_pass.h new file mode 100644 index 00000000..8bbc6d0a --- /dev/null +++ b/src/ge/graph/passes/no_reshape_op_remove_pass.h @@ -0,0 +1,68 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_NO_RESHAPE_OP_REMOVE_PASS_H_ +#define GE_GRAPH_PASSES_NO_RESHAPE_OP_REMOVE_PASS_H_ + +#include +#include +#include + +#include "graph/passes/base_pass.h" + +namespace ge { +class NoReshapeOpRemovePass : public BaseNodePass { + public: + /// + /// Entry of the NoReshapeOpRemovePass optimizer + /// @param [in] node: Input Node + /// @return SUCCESS: Dont find need to delete node + /// @return NOT_CHANGED: find need to delete node + /// @return OTHERS: Execution failed + /// @author + /// + Status Run(ge::NodePtr &node) override; + + private: + /// + /// check node input and output dims and format if can be delete + /// @param [in] opDescPtr: To be checked opDesc + /// @return SUCCESS: Check Node Success + /// @return OTHERS: Check Node Failed + /// @author + /// + Status CheckNodeShapeAndForamt(ge::NodePtr &node); + + /// + /// check linked reshape op if can be delete + /// @param [in] node: To be compare Node with opDescPtr + /// @return vector: To be delete nodes + /// @author + /// + vector CheckLinkedReshape(ge::NodePtr &node); + + /// + /// check node input and output dims and format if can be delete + /// @param [in] type: Check type + /// @param [in/out] path: outnode list + /// @return TRUE: To be delete + /// @return FALSE: To be Not delete + /// @author + /// + bool CheckOutDataNodesType(const string &type, std::list &path); +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_NO_RESHAPE_OP_REMOVE_PASS_H_ diff --git a/src/ge/graph/passes/no_use_reshape_remove_pass.cc b/src/ge/graph/passes/no_use_reshape_remove_pass.cc new file mode 100644 index 00000000..3e124264 --- /dev/null +++ b/src/ge/graph/passes/no_use_reshape_remove_pass.cc @@ -0,0 +1,80 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/no_use_reshape_remove_pass.h" + +#include +#include + +#include "external/graph/types.h" +#include "framework/common/debug/ge_log.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/passes/pass_utils.h" + +namespace ge { +namespace { +const int kReshapeDataIndex = 0; +const int kReshapeShapeIndex = 1; +} // namespace +Status NoUseReshapeRemovePass::Run(ge::NodePtr &node) { + GE_CHECK_NOTNULL(node); + OpDescPtr op_desc_ptr = node->GetOpDesc(); + if (op_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "NoUseReshapeRemovePass enter. OpDesc is null."); + return PARAM_INVALID; + } + if (op_desc_ptr->GetType() != RESHAPE) { + return SUCCESS; + } + GELOGI("NoUseReshapeRemovePass enter."); + + bool to_be_deleted = true; + // compare input and output dims + std::vector input_4dims = op_desc_ptr->GetInputDesc(0).GetShape().GetDims(); + std::vector output_4dims = op_desc_ptr->GetOutputDesc(0).GetShape().GetDims(); + + if (input_4dims.size() != output_4dims.size()) { + GELOGI("Input and output dim size is not equal.Keep this reshape op."); + return SUCCESS; + } + + size_t vec_size = input_4dims.size(); + for (size_t i = 0; i < vec_size; i++) { + if (input_4dims[i] < 0) { + GELOGI("Input shape is unknown.Keep this reshape op."); + return SUCCESS; + } + if (input_4dims[i] != output_4dims[i]) { + to_be_deleted = false; + break; + } + } + if (to_be_deleted) { + GELOGI("NoUseReshapeRemovePass remove useless node:%s", node->GetName().c_str()); + auto ret = PassUtils::UnlinkNodeWithControlCopy(node, kReshapeShapeIndex); + if (ret != SUCCESS) { + GELOGE(ret, "DimensionAdjustPass unlink node with control copy fail."); + return ret; + } + return IsolateAndDeleteNode(node, {kReshapeDataIndex}); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/no_use_reshape_remove_pass.h b/src/ge/graph/passes/no_use_reshape_remove_pass.h new file mode 100644 index 00000000..7ca36807 --- /dev/null +++ b/src/ge/graph/passes/no_use_reshape_remove_pass.h @@ -0,0 +1,38 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_NO_USE_RESHAPE_REMOVE_PASS_H_ +#define GE_GRAPH_PASSES_NO_USE_RESHAPE_REMOVE_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class NoUseReshapeRemovePass : public BaseNodePass { + public: + /// + /// Entry of the NoUseReshapeRemovePass optimizer + /// To satisfy fusion rule of FE, remove reshape op which input & output format is same + /// @param [in] node: Input Node + /// @return SUCCESS: Dont find need to delete node + /// @return NOT_CHANGED: find need to delete node + /// @return OTHERS: Execution failed + /// @author + /// + Status Run(ge::NodePtr &node) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_NO_USE_RESHAPE_REMOVE_PASS_H_ diff --git a/src/ge/graph/passes/pass_manager.cc b/src/ge/graph/passes/pass_manager.cc new file mode 100644 index 00000000..a8919a55 --- /dev/null +++ b/src/ge/graph/passes/pass_manager.cc @@ -0,0 +1,61 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "inc/pass_manager.h" +#include "common/debug/log.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/utils/node_utils.h" + +namespace ge { +const vector& PassManager::GraphPasses() const { return graph_passes_; } + +Status PassManager::AddPass(GraphPass *pass) { + GE_CHECK_NOTNULL(pass); + graph_passes_.push_back(pass); + return SUCCESS; +} + +Status PassManager::Run(const ComputeGraphPtr &graph) { + GE_CHECK_NOTNULL(graph); + return Run(graph, graph_passes_); +} + +Status PassManager::Run(const ComputeGraphPtr &graph, vector &passes) { + GE_CHECK_NOTNULL(graph); + bool not_changed = true; + + for (auto &pass : passes) { + GE_CHECK_NOTNULL(pass); + + Status status = pass->Run(graph); + if (status == SUCCESS) { + not_changed = false; + } else if (status != NOT_CHANGED) { + GELOGE(status, "Pass Run failed"); + return status; + } + } + + return not_changed ? NOT_CHANGED : SUCCESS; +} + +PassManager::~PassManager() { + for (auto pass : graph_passes_) { + GE_DELETE_NEW_SINGLE(pass); + } +} +} // namespace ge diff --git a/src/ge/graph/passes/pass_utils.cc b/src/ge/graph/passes/pass_utils.cc new file mode 100644 index 00000000..58679614 --- /dev/null +++ b/src/ge/graph/passes/pass_utils.cc @@ -0,0 +1,338 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/pass_utils.h" + +#include +#include +#include +#include +#include + +#include "cce/dnn_base_def.hpp" +#include "common/ge/ge_util.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/ge_tensor.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace { +const uint32_t kShapeDimSize = 1; +const uint32_t kDimSizeTwo = 2; +} // namespace + +Status PassUtils::ConstructTensorDescWithData(const GeTensorDesc &out_desc, std::vector &data, + std::vector &v_output, const bool scalar_output) { + Status ret = SUCCESS; + const uint32_t dim_size = static_cast(data.size()); + DataType data_type = out_desc.GetDataType(); + if (data_type == DT_INT32) { + unique_ptr buf(new (std::nothrow) int32_t[dim_size]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "new failed"); + return MEMALLOC_FAILED; + } + for (uint32_t i = 0; i < dim_size; i++) { + if (data[i] >= INT_MAX) { + GELOGE(PARAM_INVALID, "int32 overflow, data[%u]:%ld", i, data[i]); + return PARAM_INVALID; + } + buf[i] = static_cast(data[i]); + } + ret = ConstructTensorDescWithData(out_desc, buf.get(), dim_size, v_output, scalar_output); + } else if (data_type == DT_INT64) { + unique_ptr buf(new (std::nothrow) int64_t[dim_size]()); + if (buf == nullptr) { + GELOGE(MEMALLOC_FAILED, "new failed"); + return MEMALLOC_FAILED; + } + for (uint32_t i = 0; i < dim_size; i++) { + buf[i] = data[i]; + } + ret = ConstructTensorDescWithData(out_desc, buf.get(), dim_size, v_output, scalar_output); + } else { + GELOGE(PARAM_INVALID, "Only support DT_INT32 and DT_INT64. data_type:%s", + TypeUtils::DataTypeToSerialString(data_type).c_str()); + return PARAM_INVALID; + } + + if (ret != SUCCESS) { + GELOGE(ret, "GetShapeTensor failed."); + return ret; + } + + return SUCCESS; +} + +template +Status PassUtils::ConstructTensorDescWithData(const GeTensorDesc &out_desc, T *buf, uint32_t len, + std::vector &v_output, const bool scalar_output) { + bool empty_shape = ((len == 1) && scalar_output) || (len == 0); + // construct TensorDesc + GeShape out_shape = (empty_shape ? GeShape() : GeShape({len})); + GeTensorDesc output_tensor_desc(out_desc); + output_tensor_desc.SetShape(out_shape); + + GeTensorPtr output_tensor_ptr = + MakeShared(output_tensor_desc, reinterpret_cast(buf), sizeof(T) * len); + if (output_tensor_ptr == nullptr) { + GELOGE(MEMALLOC_FAILED, "Make shared failed"); + return MEMALLOC_FAILED; + } + + v_output.push_back(output_tensor_ptr); + return SUCCESS; +} + +bool PassUtils::IsConstant(const ConstNodePtr &node) { + if (node == nullptr) { + GELOGE(PARAM_INVALID, "node is null"); + return false; + } + + auto src_node_type = node->GetType(); + bool is_constant = (src_node_type == CONSTANT) || (src_node_type == CONSTANTOP); + return is_constant; +} + +Status PassUtils::SetOutNodeWeight(const OutDataAnchorPtr &out_data_anchor, const NodePtr &src_node) { + GE_IF_BOOL_EXEC(src_node == nullptr, GELOGE(PARAM_INVALID, "src_node is null"); return PARAM_INVALID); + if (!IsConstant(src_node)) { + return SUCCESS; + } + + auto weights = OpDescUtils::MutableWeights(src_node); + if (weights.empty()) { + return PARAM_INVALID; + } + + auto weight = weights.at(0); + auto src_in_ctrl = src_node->GetInControlAnchor(); + if ((src_in_ctrl == nullptr) || (out_data_anchor == nullptr)) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + auto src_out_control_anchors = src_in_ctrl->GetPeerAnchors(); + for (const auto &dst_in_data : out_data_anchor->GetPeerInDataAnchors()) { + auto dst_node = dst_in_data->GetOwnerNode(); + auto dst_op_desc = dst_node->GetOpDesc(); + if (dst_op_desc == nullptr) { + continue; + } + + std::vector is_input_const = dst_op_desc->GetIsInputConst(); + auto input_index = static_cast(dst_in_data->GetIdx()); + if (input_index < is_input_const.size()) { + is_input_const[input_index] = true; + dst_op_desc->SetIsInputConst(is_input_const); + } + + GE_CHK_STATUS_RET(GraphUtils::RemoveEdge(out_data_anchor, dst_in_data), "remove edge failed"); + graphStatus ret = OpDescUtils::AddConstOpToAnchor(dst_in_data, weight); + if (ret != SUCCESS) { + return ret; + } + GE_CHECK_NOTNULL(dst_in_data->GetPeerOutAnchor()); + auto dynamic_const_node = dst_in_data->GetPeerOutAnchor()->GetOwnerNode(); + GE_CHECK_NOTNULL(dynamic_const_node->GetOpDesc()); + dynamic_const_node->GetOpDesc()->SetType(src_node->GetType()); + + // restore control inputs to dynamically added constant ops, if any + for (const auto &src_out_control_anchor : src_out_control_anchors) { + GE_CHK_STATUS_RET(GraphUtils::AddEdge(src_out_control_anchor, dynamic_const_node->GetInControlAnchor()), + "add edge failed"); + } + } + + /// Before: + /// Op1 - - - > Constant ------> Switch - - - > Op2 + /// After: + /// Op1 - - - > Op2 + for (const auto &dst_in_ctrl : out_data_anchor->GetPeerInControlAnchors()) { + for (const auto &src_out_control_anchor : src_out_control_anchors) { + GE_CHK_STATUS_RET(GraphUtils::AddEdge(src_out_control_anchor, dst_in_ctrl), "add edge failed"); + } + } + + return SUCCESS; +} + +Status PassUtils::RemoveBranch(const NodePtr &node, std::vector &delete_nodes, + std::vector &end_nodes) { + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + GELOGI("Remove branch starting from node %s", node->GetName().c_str()); + std::queue search_queue; + search_queue.push(node); + + while (!search_queue.empty()) { + const NodePtr src_node = search_queue.front(); + if (src_node == nullptr) { + continue; + } + delete_nodes.push_back(src_node); + search_queue.pop(); + + for (const auto &src_out_anchor : src_node->GetAllOutAnchors()) { + for (const auto &dst_in_anchor : src_out_anchor->GetPeerAnchors()) { + if (dst_in_anchor == nullptr) { + continue; + } + auto dst_node = dst_in_anchor->GetOwnerNode(); + std::string node_type; + GE_CHK_STATUS_RET(GetOriginalType(dst_node, node_type), "get original type failed"); + if (node_type == NETOUTPUT) { + if (dst_in_anchor->IsTypeOf()) { + GELOGE(INTERNAL_ERROR, + "[%s] Inactive branch connected to " + "NetOutput with data anchor.", + node->GetName().c_str()); + return INTERNAL_ERROR; + } else { + // safe to unlink control edges + GE_CHK_STATUS_RET(GraphUtils::RemoveEdge(src_out_anchor, dst_in_anchor), "remove edge failed"); + end_nodes.push_back(dst_node); + } + } else if (node_type == MERGE) { + /// Unlink connection between the inactive branch and Merge/NetOutput. + /// The removal of inactive nodes will be handled in PrunePass + GE_CHK_STATUS_RET(GraphUtils::RemoveEdge(src_out_anchor, dst_in_anchor), "remove edge failed"); + end_nodes.push_back(dst_node); + GELOGD("Reach the end merge node %s, the branch removing stop", dst_node->GetName().c_str()); + } else { + search_queue.push(dst_node); + } + } + } + } + + return SUCCESS; +} + +NodePtr PassUtils::GetInDataNode(const ConstNodePtr &node, int index) { + if (node == nullptr) { + return nullptr; + } + + auto in_data_anchor = node->GetInDataAnchor(index); + if (in_data_anchor == nullptr) { + return nullptr; + } + + auto peer_out_data_anchor = in_data_anchor->GetPeerOutAnchor(); + if (peer_out_data_anchor == nullptr) { + return nullptr; + } + + auto src_node = peer_out_data_anchor->GetOwnerNode(); + return src_node; +} + +bool PassUtils::IsNeedTrainIteFlowCtrl(const ComputeGraphPtr &compute_graph) { + if (compute_graph == nullptr) { + return false; + } + if (!ge::VarManager::Instance(compute_graph->GetSessionID())->IsVarExist(NODE_NAME_FLOWCTRL_LOOP_PER_ITER)) { + return false; + } + return compute_graph->GetNeedIteration(); +} + +int PassUtils::GetUniqueInDataAnchorIndex(const NodePtr &node_ptr) { + const int invalid_index = -1; + if (node_ptr == nullptr) { + GELOGE(INTERNAL_ERROR, "GetUniqueInDataAnchorIndex: node is null"); + return invalid_index; + } + for (const auto &in_anchor : node_ptr->GetAllInDataAnchors()) { + if ((in_anchor != nullptr) && (in_anchor->GetPeerOutAnchor() != nullptr) && + (in_anchor->GetPeerOutAnchor()->GetOwnerNode() != nullptr)) { + return (in_anchor->GetIdx()); + } + } + GELOGE(INTERNAL_ERROR, + "GetUniqueInDataAnchorIndex: [%s] failed to find " + "in data anchor with a valid peer out node", + node_ptr->GetName().c_str()); + return invalid_index; +} + +Status PassUtils::UnlinkNodeWithControlCopy(NodePtr &node, int index) { + if (node == nullptr) { + GELOGE(PARAM_INVALID, "node is null."); + return PARAM_INVALID; + } + auto in_data_anchor = node->GetInDataAnchor(index); + if (in_data_anchor == nullptr) { + GELOGW("[%s] in_data_anchor is null with index [%d].", node->GetName().c_str(), index); + return SUCCESS; + } + auto out_data_anchor = in_data_anchor->GetPeerOutAnchor(); + if (out_data_anchor == nullptr) { + GELOGE(FAILED, "[%s] peer out_data_anchor is null with index [%d].", node->GetName().c_str(), index); + return FAILED; + } + // Remove link between father_node and node + in_data_anchor->UnlinkAll(); + + auto father_node = out_data_anchor->GetOwnerNode(); + // link father_node's in control nodes to node + if (GraphUtils::CopyInCtrlEdges(father_node, node) != GRAPH_SUCCESS) { + return FAILED; + } + return SUCCESS; +} + +Status PassUtils::RemoveInactiveBranchToMerge(const OutDataAnchorPtr &inactive_output_anchor, + std::vector &delete_nodes, std::vector &end_nodes) { + if (inactive_output_anchor == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + for (const auto &dst_anchor : inactive_output_anchor->GetPeerAnchors()) { + if (dst_anchor == nullptr) { + continue; + } + auto dst_node = dst_anchor->GetOwnerNode(); + if (dst_node != nullptr) { + std::string dst_node_type; + GE_CHK_STATUS_RET(GetOriginalType(dst_node, dst_node_type), "get original type failed"); + if (dst_node_type == MERGE) { + GELOGD("[%s] Switch connected directly to Merge", inactive_output_anchor->GetOwnerNode()->GetName().c_str()); + GE_CHK_STATUS_RET(GraphUtils::RemoveEdge(inactive_output_anchor, dst_anchor), "remove edge failed"); + continue; + } + + Status ret = PassUtils::RemoveBranch(dst_node, delete_nodes, end_nodes); + if (ret != SUCCESS) { + return ret; + } + } + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/pass_utils.h b/src/ge/graph/passes/pass_utils.h new file mode 100644 index 00000000..b80e05f1 --- /dev/null +++ b/src/ge/graph/passes/pass_utils.h @@ -0,0 +1,75 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_PASS_UTILS_H_ +#define GE_GRAPH_PASSES_PASS_UTILS_H_ + +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "graph/compute_graph.h" + +namespace ge { +class PassUtils { + public: + PassUtils() = delete; + + static NodePtr GetInDataNode(const ConstNodePtr &node, int index); + + static bool IsConstant(const ConstNodePtr &node); + + static Status SetOutNodeWeight(const OutDataAnchorPtr &out_data_anchor, const NodePtr &src_node); + + static Status RemoveBranch(const NodePtr &node, std::vector &delete_nodes, std::vector &end_nodes); + + static Status RemoveInactiveBranchToMerge(const OutDataAnchorPtr &inactive_output_anchor, + std::vector &delete_nodes, std::vector &end_nodes); + + /// + /// check is need iter flow ctrl. + /// @param compute_graph graph + /// @return true:need iter flow ctrl. + /// false:no need + /// + static bool IsNeedTrainIteFlowCtrl(const ComputeGraphPtr &compute_graph); + /// Construct a TensorDesc and put the data in it, it's shape is a list. + /// If the data length is 1, it's shape is [] + static Status ConstructTensorDescWithData(const GeTensorDesc &out_desc, std::vector &data, + std::vector &v_output, const bool scalar_output = false); + + template + static Status ConstructTensorDescWithData(const GeTensorDesc &out_desc, T *buf, uint32_t len, + std::vector &v_output, const bool scalar_output = false); + /// + /// find in data anchor index with a valid peer out node existed + /// @param node_ptr + /// @return index + /// + static int GetUniqueInDataAnchorIndex(const NodePtr &node_ptr); + /// + /// unlink node's in data anchors[index]'s father node with node itself + /// then link father node's all in control nodes to node + /// if any and not connected yet + /// @param node + /// @param index: in data anchor index + /// @return + /// + static Status UnlinkNodeWithControlCopy(NodePtr &node, int index); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_PASS_UTILS_H_ diff --git a/src/ge/graph/passes/permute_pass.cc b/src/ge/graph/passes/permute_pass.cc new file mode 100644 index 00000000..d541e66a --- /dev/null +++ b/src/ge/graph/passes/permute_pass.cc @@ -0,0 +1,133 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/permute_pass.h" + +#include +#include + +#include "common/debug/log.h" +#include "common/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "inc/kernel.h" +#include "inc/kernel_factory.h" +#include "framework/omg/omg_inner_types.h" + +using domi::DOMI_TENSOR_ND; +using domi::DOMI_TENSOR_NHWC; +using domi::GetContext; +using domi::SUCCESS; + +namespace ge { +Status PermutePass::Run(ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + std::vector isolate_nodes; + for (NodePtr &node : graph->GetAllNodes()) { + OpDescPtr op_desc_ptr = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc_ptr); + GE_IF_BOOL_EXEC( + op_desc_ptr->GetType() == PERMUTE && GetContext().type == domi::FMK_TYPE_T, + /// Input format 5D means NHWC in 4D way. So if input origin foramt is NCHW and + /// permute paramter list is [0,3,1,2], this permute can be optimised. + GE_IF_BOOL_EXEC( + GetContext().format != DOMI_TENSOR_ND, + // Get input origin foramt + for (NodePtr &n : graph->GetAllNodes()) { + GE_IF_BOOL_EXEC( + n->GetOpDesc()->GetType() == PERMUTE, std::queue < NodePtr > q_node; + q_node.push(n); + bool jump_out = false; + while (!q_node.empty()) { + NodePtr n_temp = q_node.back(); + q_node.pop(); + for (auto &inNode : n_temp->GetInDataNodes()) { + int64_t cur_format = 0; + GE_IF_BOOL_EXEC(AttrUtils::GetInt(inNode->GetOpDesc(), ATTR_NAME_FORMAT, cur_format), + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(n->GetOpDesc(), "permute_src_format", cur_format), + GELOGW("set permute_src_format failed"); + continue); + jump_out = true; + break); + q_node.push(inNode); + } + GE_IF_BOOL_EXEC(jump_out, + break); + }); + } + + int64_t permute_src_format = 0; + GE_IF_BOOL_EXEC(!AttrUtils::GetInt(op_desc_ptr, "permute_src_format", permute_src_format), + continue); + // Get dim_index_ + std::vector index_list; + GE_CHK_BOOL_RET_STATUS(AttrUtils::GetListInt(op_desc_ptr, PERMUTE_ATTR_ORDER, index_list), INTERNAL_ERROR, + "get index list failed"); + + size_t index_size = index_list.size(); + GE_IF_BOOL_EXEC(index_size == 0, + continue); + + GE_IF_BOOL_EXEC( + index_size == 4 && (permute_src_format == DOMI_TENSOR_NHWC && index_list.at(0) == 0 && + index_list.at(1) == 3 && index_list.at(2) == 1 && index_list.at(3) == 2), + isolate_nodes.push_back(node); + continue); + int64_t conv_format = 0; + GE_IF_BOOL_EXEC( + index_size == 4 && + (index_list.at(0) == 0 && index_list.at(1) == 2 && index_list.at(2) == 3 && index_list.at(3) == 1), + GE_IF_BOOL_EXEC( + (node->GetOutDataNodesSize() > 0 && node->GetOutDataNodes().at(0) != nullptr && + node->GetOutDataNodes().at(0)->GetOpDesc() != nullptr) && + ((node->GetOutDataNodesSize() != 0 && + CONVOLUTION == node->GetOutDataNodes().at(0)->GetOpDesc()->GetType() && + AttrUtils::GetInt(node->GetOutDataNodes().at(0)->GetOpDesc(), ATTR_NAME_FORMAT, + conv_format) && + conv_format == DOMI_TENSOR_NHWC) || + (node->GetOutDataNodesSize() != 0 && + node->GetOutDataNodes().at(0)->GetOpDesc()->GetType() == DEPCONVOLUTION) || + (node->GetOutDataNodesSize() != 0 && + node->GetOutDataNodes().at(0)->GetOpDesc()->GetType() == DECONVOLUTION) || + (node->GetOutDataNodesSize() != 0 && + node->GetOutDataNodes().at(0)->GetOpDesc()->GetType() == PAD && + node->GetOutDataNodes().at(0)->GetOutDataNodesSize() != 0 && + node->GetOutDataNodes().at(0)->GetOutDataNodes().at(0) != nullptr && + node->GetOutDataNodes().at(0)->GetOutDataNodes().at(0)->GetOpDesc() != nullptr && + node->GetOutDataNodes().at(0)->GetOutDataNodes().at(0)->GetOpDesc()->GetType() == + CONVOLUTION)), + isolate_nodes.push_back(node); + continue);););); + } + + GE_IF_BOOL_EXEC(isolate_nodes.size() != 0, + for (auto &node : isolate_nodes) { + // Adding an attribute indicates that the predecessor Permute has been deleted for the Builder to process. + for (auto &outNode : node->GetOutDataNodes()) { + OpDescPtr op_desc_ptr = outNode->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc_ptr); + if (!AttrUtils::SetBool(op_desc_ptr, ATTR_NAME_PRED_PERMUTE_DELETED, true)) { + GELOGE(INTERNAL_ERROR, "set ATTR_NAME_PRED_PERMUTE_DELETED failed"); + return INTERNAL_ERROR; + } + } + GE_RETURN_WITH_LOG_IF_ERROR(graph->RemoveNode(node), "[%s]:remove permute node failed", + node->GetOpDesc()->GetName().c_str()); + }); + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/permute_pass.h b/src/ge/graph/passes/permute_pass.h new file mode 100644 index 00000000..e4415b6e --- /dev/null +++ b/src/ge/graph/passes/permute_pass.h @@ -0,0 +1,29 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_PERMUTE_PASS_H_ +#define GE_GRAPH_PASSES_PERMUTE_PASS_H_ + +#include "inc/graph_pass.h" + +namespace ge { +class PermutePass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_PERMUTE_PASS_H_ diff --git a/src/ge/graph/passes/placeholder_with_default_pass.cc b/src/ge/graph/passes/placeholder_with_default_pass.cc new file mode 100644 index 00000000..4a8ded9c --- /dev/null +++ b/src/ge/graph/passes/placeholder_with_default_pass.cc @@ -0,0 +1,39 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/placeholder_with_default_pass.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/omg_util.h" + +namespace ge { +Status PlaceholderWithDefaultPass::Run(NodePtr &node) { + GE_CHECK_NOTNULL(node); + string type; + Status status_ret = GetOriginalType(node, type); + if (status_ret != SUCCESS) { + GELOGE(status_ret, "Placeholder with default pass get original type fail."); + return status_ret; + } + if (type == PLACEHOLDERWITHDEFAULT) { + return IsolateAndDeleteNode(node, {0}); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/placeholder_with_default_pass.h b/src/ge/graph/passes/placeholder_with_default_pass.h new file mode 100644 index 00000000..d48a0a5a --- /dev/null +++ b/src/ge/graph/passes/placeholder_with_default_pass.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_PLACEHOLDER_WITH_DEFAULT_PASS_H_ +#define GE_GRAPH_PASSES_PLACEHOLDER_WITH_DEFAULT_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class PlaceholderWithDefaultPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_PLACEHOLDER_WITH_DEFAULT_PASS_H_ diff --git a/src/ge/graph/passes/prevent_gradient_pass.cc b/src/ge/graph/passes/prevent_gradient_pass.cc new file mode 100644 index 00000000..ff4f3cc7 --- /dev/null +++ b/src/ge/graph/passes/prevent_gradient_pass.cc @@ -0,0 +1,39 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/prevent_gradient_pass.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/omg_util.h" + +namespace ge { +Status PreventGradientPass::Run(NodePtr &node) { + GE_CHECK_NOTNULL(node); + string type; + Status status_ret = GetOriginalType(node, type); + if (status_ret != SUCCESS) { + GELOGE(status_ret, "PreventGradientPass get original type fail."); + return status_ret; + } + if (type == PREVENTGRADIENT) { + return IsolateAndDeleteNode(node, {0}); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/prevent_gradient_pass.h b/src/ge/graph/passes/prevent_gradient_pass.h new file mode 100644 index 00000000..8fe02b96 --- /dev/null +++ b/src/ge/graph/passes/prevent_gradient_pass.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_PREVENT_GRADIENT_PASS_H_ +#define GE_GRAPH_PASSES_PREVENT_GRADIENT_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class PreventGradientPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_PREVENT_GRADIENT_PASS_H_ diff --git a/src/ge/graph/passes/print_op_pass.cc b/src/ge/graph/passes/print_op_pass.cc new file mode 100644 index 00000000..c0eedc1f --- /dev/null +++ b/src/ge/graph/passes/print_op_pass.cc @@ -0,0 +1,40 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/print_op_pass.h" + +#include + +namespace ge { +Status PrintOpPass::Run(ge::NodePtr &node) { + GELOGD("PrintOpPass running"); + if (node == nullptr) { + GELOGE(PARAM_INVALID, "param [node] must not be null."); + return PARAM_INVALID; + } + string type; + Status ret = GetOriginalType(node, type); + if (ret != SUCCESS) { + GELOGE(ret, "PrintOpPass: Get node type fail"); + return ret; + } + if (type == "Print") { + // print op has only one input and output data anchor + return IsolateAndDeleteNode(node, {0}); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/print_op_pass.h b/src/ge/graph/passes/print_op_pass.h new file mode 100644 index 00000000..e9e6d67e --- /dev/null +++ b/src/ge/graph/passes/print_op_pass.h @@ -0,0 +1,36 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_PRINT_OP_PASS_H_ +#define GE_GRAPH_PASSES_PRINT_OP_PASS_H_ + +#include "framework/common/debug/ge_log.h" +#include "framework/common/types.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/graph.h" +#include "graph/passes/base_pass.h" +#include "graph/utils/graph_utils.h" +#include "graph/passes/pass_utils.h" + +namespace ge { +class PrintOpPass : public BaseNodePass { + public: + Status Run(ge::NodePtr &node) override; +}; +}; // namespace ge + +#endif // GE_GRAPH_PASSES_PRINT_OP_PASS_H_ diff --git a/src/ge/graph/passes/prune_pass.cc b/src/ge/graph/passes/prune_pass.cc new file mode 100644 index 00000000..b57c52ec --- /dev/null +++ b/src/ge/graph/passes/prune_pass.cc @@ -0,0 +1,89 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/prune_pass.h" + +#include +#include +#include + +#include "common/debug/log.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" + +namespace ge { +Status PrunePass::Run(ge::ComputeGraphPtr graph) { + GELOGD("PrunePass Start"); + if (graph == nullptr) { + GELOGE(GE_GRAPH_ISNULL, "input compute graph is NULL."); + return GE_GRAPH_ISNULL; + } + + std::vector out_nodes; + std::unordered_set nodes; + for (NodePtr &node_ptr : graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node_ptr->GetOpDesc()); + nodes.insert(node_ptr); + if (node_ptr->GetOpDesc()->GetType() == NETOUTPUT) { + out_nodes.push_back(node_ptr); + } + } + + if (out_nodes.empty()) { + GELOGW("graph [%s] does not contain NETOUTPUT type node,no return value. Do nothing!", graph->GetName().c_str()); + return ge::SUCCESS; + } + + std::unordered_set nodes_seen; + for (NodePtr &node_ptr : out_nodes) { + std::deque queue; + queue.push_back(node_ptr); + nodes_seen.insert(node_ptr); + while (!queue.empty()) { + NodePtr node = queue.front(); + GE_CHECK_NOTNULL(node->GetOpDesc()); + queue.pop_front(); + for (auto &in_node : node->GetInAllNodes()) { + if (nodes_seen.insert(in_node).second) { + queue.push_back(in_node); + } + } + } + } + + for (auto &node_ptr : nodes) { + if (nodes_seen.count(node_ptr) != 0) { + continue; + } + if (node_ptr->GetOpDesc()->GetType() == DATA || node_ptr->GetOpDesc()->GetType() == AIPPDATA) { + Status status = ge::GraphUtils::AddEdge(node_ptr->GetOutControlAnchor(), out_nodes[0]->GetInControlAnchor()); + if (status != ge::SUCCESS) { + GELOGE(INTERNAL_ERROR, "[PrunePass] add control edge fail between DATA node[%s] and NETOUTPUT node[%s]!", + node_ptr->GetOpDesc()->GetName().c_str(), out_nodes[0]->GetOpDesc()->GetName().c_str()); + return INTERNAL_ERROR; + } + GELOGI("[PrunePass] add extra control edge between DATA node[%s] and NETOUTPUT node[%s]!", + node_ptr->GetOpDesc()->GetName().c_str(), out_nodes[0]->GetOpDesc()->GetName().c_str()); + continue; + } + /// Common function:[RemoveNode] will delete not only input node but its constant input node also will be deleted + (void)graph->RemoveNode(node_ptr); + GELOGI("[PrunePass] remove graph node [%s]!", node_ptr->GetOpDesc()->GetName().c_str()); + } + return ge::SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/prune_pass.h b/src/ge/graph/passes/prune_pass.h new file mode 100644 index 00000000..4bc6f184 --- /dev/null +++ b/src/ge/graph/passes/prune_pass.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_PRUNE_PASS_H_ +#define GE_GRAPH_PASSES_PRUNE_PASS_H_ + +#include "inc/graph_pass.h" + +namespace ge { +class PrunePass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_PRUNE_PASS_H_ diff --git a/src/ge/graph/passes/reshape_remove_pass.cc b/src/ge/graph/passes/reshape_remove_pass.cc new file mode 100644 index 00000000..0491270d --- /dev/null +++ b/src/ge/graph/passes/reshape_remove_pass.cc @@ -0,0 +1,43 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/reshape_remove_pass.h" + +#include "graph/passes/pass_utils.h" + +namespace ge { +namespace { +const int kReshapeDataIndex = 0; +const int kReshapeShapeIndex = 1; +} // namespace + +Status ReshapeRemovePass::Run(NodePtr &node) { + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + if (node->GetType() != RESHAPE) { + return SUCCESS; + } + GELOGD("Remove reshape node %s", node->GetName().c_str()); + auto ret = PassUtils::UnlinkNodeWithControlCopy(node, kReshapeShapeIndex); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed unlink shape edge for reshape node %s", node->GetName().c_str()); + return ret; + } + return IsolateAndDeleteNode(node, {kReshapeDataIndex}); +} +} // namespace ge diff --git a/src/ge/graph/passes/reshape_remove_pass.h b/src/ge/graph/passes/reshape_remove_pass.h new file mode 100644 index 00000000..044bbdb7 --- /dev/null +++ b/src/ge/graph/passes/reshape_remove_pass.h @@ -0,0 +1,29 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_RESHAPE_REMOVE_PASS_H_ +#define GE_GRAPH_PASSES_RESHAPE_REMOVE_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class ReshapeRemovePass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_RESHAPE_REMOVE_PASS_H_ diff --git a/src/ge/graph/passes/resource_pair_add_control_pass.cc b/src/ge/graph/passes/resource_pair_add_control_pass.cc new file mode 100644 index 00000000..7c896867 --- /dev/null +++ b/src/ge/graph/passes/resource_pair_add_control_pass.cc @@ -0,0 +1,98 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/resource_pair_add_control_pass.h" + +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/tensor_adapter.h" + +namespace { +const char *const kSeparate = "/"; +const std::map kResourcePairType = {{"StackPush", "StackPop"}}; +const std::set kResourceTypes = {"StackPush", "StackPop"}; +} // namespace + +namespace ge { +Status ResourcePairAddControlPass::Run(ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + GELOGD("ResourcePairAddControlPass pass start."); + std::map> prefix_2_node_per_type; + // find all node of condition type, store with type and scope prefix key + for (auto &node : graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node); + if (kResourceTypes.find(node->GetType()) != kResourceTypes.end()) { + std::string node_name = node->GetName(); + std::string node_prefix; + size_t last_separate_index = node_name.find_last_of(kSeparate); + if (last_separate_index != std::string::npos) { + node_prefix = node_name.substr(0, last_separate_index); + } + prefix_2_node_per_type[node->GetType()][node_prefix] = node; + GELOGD("ResourcePairAddControlPass insert prefix:%s, op_name:%s, op_type:%s", node_prefix.c_str(), + node_name.c_str(), node->GetType().c_str()); + } + } + + // according type pair, find same prefix node, add control edge + for (auto &resource_type_pair : kResourcePairType) { + auto from_item_prefix_2_node = prefix_2_node_per_type.find(resource_type_pair.first); + if (from_item_prefix_2_node != prefix_2_node_per_type.end()) { + for (auto &prefix_2_node : from_item_prefix_2_node->second) { + const std::string &prefix = prefix_2_node.first; + NodePtr from_node = prefix_2_node.second; + GE_CHECK_NOTNULL(from_node); + auto to_item_prefix_2_node = prefix_2_node_per_type.find(resource_type_pair.second); + if (to_item_prefix_2_node == prefix_2_node_per_type.end()) { + GELOGE(PARAM_INVALID, "find peer type node fail, suffix:%s, from_type:%s, to_type:%s", prefix.c_str(), + resource_type_pair.first.c_str(), resource_type_pair.second.c_str()); + return PARAM_INVALID; + } + auto to_prefix_2_node = to_item_prefix_2_node->second.find(prefix); + if (to_prefix_2_node == to_item_prefix_2_node->second.end()) { + GELOGE(PARAM_INVALID, "find peer prefix node fail, suffix:%s, from_type:%s, to_type:%s", prefix.c_str(), + resource_type_pair.first.c_str(), resource_type_pair.second.c_str()); + return PARAM_INVALID; + } + NodePtr to_node = to_prefix_2_node->second; + GE_CHECK_NOTNULL(to_node); + auto from_anchor = from_node->GetOutControlAnchor(); + auto to_anchor = to_node->GetInControlAnchor(); + GE_CHECK_NOTNULL(from_anchor); + GE_CHECK_NOTNULL(to_anchor); + graphStatus ret = from_anchor->LinkTo(to_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(PARAM_INVALID, "link fail, from_node:%s, to_node:%s, from_type:%s, to_type:%s", + from_node->GetName().c_str(), to_node->GetName().c_str(), resource_type_pair.first.c_str(), + resource_type_pair.second.c_str()); + return PARAM_INVALID; + } + GELOGD("link success, from_node:%s, to_node:%s, from_type:%s, to_type:%s", from_node->GetName().c_str(), + to_node->GetName().c_str(), resource_type_pair.first.c_str(), resource_type_pair.second.c_str()); + } + } + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/resource_pair_add_control_pass.h b/src/ge/graph/passes/resource_pair_add_control_pass.h new file mode 100644 index 00000000..02ebd78f --- /dev/null +++ b/src/ge/graph/passes/resource_pair_add_control_pass.h @@ -0,0 +1,30 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_RESOURCE_PAIR_ADD_CONTROL_PASS_H_ +#define GE_GRAPH_PASSES_RESOURCE_PAIR_ADD_CONTROL_PASS_H_ + +#include "inc/graph_pass.h" + +namespace ge { +class ResourcePairAddControlPass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph) override; + ResourcePairAddControlPass() = default; + ~ResourcePairAddControlPass() = default; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_RESOURCE_PAIR_ADD_CONTROL_PASS_H_ diff --git a/src/ge/graph/passes/resource_pair_remove_control_pass.cc b/src/ge/graph/passes/resource_pair_remove_control_pass.cc new file mode 100644 index 00000000..2bcb7db1 --- /dev/null +++ b/src/ge/graph/passes/resource_pair_remove_control_pass.cc @@ -0,0 +1,97 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/resource_pair_remove_control_pass.h" + +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/tensor_adapter.h" + +namespace { +const char *const kSeparate = "/"; +const std::map kResourcePairType = {{"StackPush", "StackPop"}}; +const std::set kResourceTypes = {"StackPush", "StackPop"}; +} // namespace + +namespace ge { +Status ResourcePairRemoveControlPass::Run(ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + GELOGD("ResourcePairRemoveControlPass pass start."); + std::map> prefix_2_node_per_type; + // find all node of condition type, store with type and scope prefix key + for (auto &node : graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node); + if (kResourceTypes.find(node->GetType()) != kResourceTypes.end()) { + std::string node_name = node->GetName(); + std::string node_prefix; + size_t last_separate_index = node_name.find_last_of(kSeparate); + if (last_separate_index != std::string::npos) { + node_prefix = node_name.substr(0, last_separate_index); + } + prefix_2_node_per_type[node->GetType()][node_prefix] = node; + GELOGD("ResourcePairRemoveControlPass insert prefix:%s, op_name:%s, op_type:%s", node_prefix.c_str(), + node_name.c_str(), node->GetType().c_str()); + } + } + + // according type pair, find same prefix node, remove control edge + for (auto &resource_type_pair : kResourcePairType) { + auto from_item_prefix_2_node = prefix_2_node_per_type.find(resource_type_pair.first); + if (from_item_prefix_2_node != prefix_2_node_per_type.end()) { + for (auto &prefix_2_node : from_item_prefix_2_node->second) { + const std::string &prefix = prefix_2_node.first; + NodePtr from_node = prefix_2_node.second; + GE_CHECK_NOTNULL(from_node); + auto to_item_prefix_2_node = prefix_2_node_per_type.find(resource_type_pair.second); + if (to_item_prefix_2_node == prefix_2_node_per_type.end()) { + GELOGE(INTERNAL_ERROR, "find peer type node fail, suffix:%s, from_type:%s, to_type:%s", prefix.c_str(), + resource_type_pair.first.c_str(), resource_type_pair.second.c_str()); + return domi::PARAM_INVALID; + } + auto to_prefix_2_node = to_item_prefix_2_node->second.find(prefix); + if (to_prefix_2_node == to_item_prefix_2_node->second.end()) { + GELOGE(INTERNAL_ERROR, "find peer prefix node fail, suffix:%s, from_type:%s, to_type:%s", prefix.c_str(), + resource_type_pair.first.c_str(), resource_type_pair.second.c_str()); + return domi::PARAM_INVALID; + } + NodePtr to_node = to_prefix_2_node->second; + GE_CHECK_NOTNULL(to_node); + auto from_anchor = from_node->GetOutControlAnchor(); + GE_CHECK_NOTNULL(from_anchor); + auto to_anchor = to_node->GetInControlAnchor(); + graphStatus ret = from_anchor->Unlink(to_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "unlink fail, from_node:%s, to_node:%s, from_type:%s, to_type:%s", + from_node->GetName().c_str(), to_node->GetName().c_str(), resource_type_pair.first.c_str(), + resource_type_pair.second.c_str()); + return domi::PARAM_INVALID; + } + GELOGD("unlink success, from_node:%s, to_node:%s, from_type:%s, to_type:%s", from_node->GetName().c_str(), + to_node->GetName().c_str(), resource_type_pair.first.c_str(), resource_type_pair.second.c_str()); + } + } + } + return domi::SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/resource_pair_remove_control_pass.h b/src/ge/graph/passes/resource_pair_remove_control_pass.h new file mode 100644 index 00000000..ab40b130 --- /dev/null +++ b/src/ge/graph/passes/resource_pair_remove_control_pass.h @@ -0,0 +1,30 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_RESOURCE_PAIR_REMOVE_CONTROL_PASS_H_ +#define GE_GRAPH_PASSES_RESOURCE_PAIR_REMOVE_CONTROL_PASS_H_ + +#include "inc/graph_pass.h" + +namespace ge { +class ResourcePairRemoveControlPass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph) override; + ResourcePairRemoveControlPass() = default; + ~ResourcePairRemoveControlPass() = default; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_RESOURCE_PAIR_REMOVE_CONTROL_PASS_H_ diff --git a/src/ge/graph/passes/same_transdata_breadth_fusion_pass.cc b/src/ge/graph/passes/same_transdata_breadth_fusion_pass.cc new file mode 100644 index 00000000..9074d35e --- /dev/null +++ b/src/ge/graph/passes/same_transdata_breadth_fusion_pass.cc @@ -0,0 +1,761 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/same_transdata_breadth_fusion_pass.h" + +#include +#include +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "init/gelib.h" + +namespace { +const char *const kRemainNode = "node_remain"; +const int kNoTransOp = 1; +} // namespace + +namespace ge { +void SameTransdataBreadthFusionPass::GetSubGraphNodesInfo() { + vector> before_transdata_nodes(sub_graph_anchors_.size()); + vector> all_transdata_nodes; + for (size_t i = 0; i < sub_graph_anchors_.size(); ++i) { + auto nodes_anchor = sub_graph_anchors_[i]; + auto iter = nodes_anchor.begin(); + auto first_out_anchor = iter->first; + GE_CHECK_NOTNULL_JUST_RETURN(first_out_anchor); + before_transdata_nodes[i].push_back(first_out_anchor->GetOwnerNode()); + GELOGD("index:%zu, node:%s, type:%s", i, first_out_anchor->GetOwnerNode()->GetName().c_str(), + first_out_anchor->GetOwnerNode()->GetType().c_str()); + while (iter != nodes_anchor.end()) { + auto in_anchor = iter->second; + GE_CHECK_NOTNULL_JUST_RETURN(in_anchor); + auto in_node = in_anchor->GetOwnerNode(); + GELOGD("index:%zu, node:%s, type:%s", i, first_out_anchor->GetOwnerNode()->GetName().c_str(), + first_out_anchor->GetOwnerNode()->GetType().c_str()); + if (in_node->GetType() == TRANSDATA) { + all_transdata_nodes.emplace_back(i, in_anchor); + } else { + before_transdata_nodes[i].push_back(in_node); + } + ++iter; + } + GELOGD("index:%zu, before trandata node size:%zu", i, before_transdata_nodes[i].size()); + } + before_transdata_nodes_.swap(before_transdata_nodes); + all_transdata_nodes_.swap(all_transdata_nodes); +} + +OpDescPtr SameTransdataBreadthFusionPass::GetCastOp(const GeTensorDesc &in_desc, const GeTensorDesc &out_desc) { + static uint32_t fusion_cast_op_count = 1; + std::stringstream cast_op_name; + cast_op_name << "fusion_cast_" << fusion_cast_op_count++; + auto node_op = ge::OperatorFactory::CreateOperator(cast_op_name.str(), CAST); + auto cast_op = ge::OpDescUtils::GetOpDescFromOperator(node_op); + if (cast_op == nullptr) { + GELOGE(INTERNAL_ERROR, "new fusion cast op failed!"); + return nullptr; + } + const int default_output_index = 0; + const int default_input_index = 0; + if (cast_op->GetInputsSize() == 0) { + if (cast_op->AddInputDesc(in_desc) != GRAPH_SUCCESS) { + GELOGW("AddInputDesc fail."); + } + } else { + if (cast_op->UpdateInputDesc(default_input_index, in_desc) != GRAPH_SUCCESS) { + GELOGW("UpdateInputDesc fail"); + } + } + + if (cast_op->GetOutputsSize() == 0) { + if (cast_op->AddOutputDesc(out_desc) != GRAPH_SUCCESS) { + GELOGW("AddOutputDesc fail."); + } + } else { + if (cast_op->UpdateOutputDesc(default_output_index, out_desc) != GRAPH_SUCCESS) { + GELOGW("UpdateOutputDesc fail"); + } + } + if (!AttrUtils::SetInt(cast_op, CAST_ATTR_DST_TYPE, static_cast(out_desc.GetDataType()))) { + GELOGE(INTERNAL_ERROR, "set dst_type attr failed"); + return nullptr; + } + return cast_op; +} + +void SameTransdataBreadthFusionPass::InsertSameTransdataNodeIndex(int anchors_index, + vector &same_transdata_nodes) { + auto same_iter = same_transdata_nodes.begin(); + while (same_iter != same_transdata_nodes.end()) { + if (before_transdata_nodes_[anchors_index].size() <= before_transdata_nodes_[*same_iter].size()) { + same_transdata_nodes.insert(same_iter, anchors_index); + return; + } + ++same_iter; + } + + same_transdata_nodes.push_back(anchors_index); +} + +void SameTransdataBreadthFusionPass::GetSameTransdataNode(vector &same_transdata_nodes) { + auto iter = all_transdata_nodes_.begin(); + same_transdata_nodes.push_back(iter->first); + auto node_for_compare_in_anchor = iter->second; + GE_CHECK_NOTNULL_JUST_RETURN(node_for_compare_in_anchor); + auto node_for_compare = node_for_compare_in_anchor->GetOwnerNode(); + auto op_desc_for_compare = node_for_compare->GetOpDesc(); + GE_CHECK_NOTNULL_JUST_RETURN(op_desc_for_compare); + bool op_compare_label = op_desc_for_compare->HasAttr(ATTR_NAME_STREAM_LABEL); + auto input_desc_for_compare = op_desc_for_compare->GetInputDescPtr(node_for_compare_in_anchor->GetIdx()); + GE_CHECK_NOTNULL_JUST_RETURN(input_desc_for_compare); + auto output_desc_for_compare = op_desc_for_compare->GetOutputDescPtr(0); + GE_CHECK_NOTNULL_JUST_RETURN(output_desc_for_compare); + iter = all_transdata_nodes_.erase(iter); + bool op_tmp_label = false; + while (iter != all_transdata_nodes_.end()) { + auto in_anchor = iter->second; + if (in_anchor == nullptr) { + continue; + } + auto node_tmp = in_anchor->GetOwnerNode(); + if (node_tmp == node_for_compare) { + ++iter; + continue; + } + GE_CHECK_NOTNULL_JUST_RETURN(node_tmp); + auto op_desc_tmp = node_tmp->GetOpDesc(); + GE_CHECK_NOTNULL_JUST_RETURN(op_desc_tmp); + auto input_desc_tmp = op_desc_tmp->GetInputDescPtr(in_anchor->GetIdx()); + auto output_desc_tmp = op_desc_tmp->GetOutputDescPtr(0); + op_tmp_label = op_desc_tmp->HasAttr(ATTR_NAME_STREAM_LABEL); + GE_CHECK_NOTNULL_JUST_RETURN(input_desc_tmp); + GE_CHECK_NOTNULL_JUST_RETURN(output_desc_tmp); + + if ((op_compare_label == op_tmp_label) && (input_desc_tmp->GetFormat() == input_desc_for_compare->GetFormat()) && + (output_desc_tmp->GetFormat() == output_desc_for_compare->GetFormat())) { + GELOGD("same transdata node:%s, src node:%s", node_tmp->GetName().c_str(), node_for_compare->GetName().c_str()); + InsertSameTransdataNodeIndex(iter->first, same_transdata_nodes); + iter = all_transdata_nodes_.erase(iter); + } else { + ++iter; + } + } +} + +graphStatus SameTransdataBreadthFusionPass::ReLinkDataOutput2PreNode(const NodePtr &transdata_node, + const OutDataAnchorPtr &pre_out_anchor, + const NodePtr &relink_node) { + GE_CHECK_NOTNULL(pre_out_anchor); + GE_CHECK_NOTNULL(transdata_node); + for (auto &out_anchor : transdata_node->GetAllOutDataAnchors()) { + // relink data edge + for (auto &transdata_peer_in_anchor : out_anchor->GetPeerInDataAnchors()) { + if (transdata_peer_in_anchor->GetOwnerNode() == relink_node) { + continue; + } + GELOGI("remove edge.src:%s, dst:%s", out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::RemoveEdge(out_anchor, transdata_peer_in_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "remove edge failed!src node:%s, dst node:%s", transdata_node->GetName().c_str(), + transdata_peer_in_anchor->GetOwnerNode()->GetName().c_str()); + return GRAPH_FAILED; + } + GELOGI("add edge.src:%s, dst:%s", pre_out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(pre_out_anchor, transdata_peer_in_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add edge failed!src node:%s, dst node:%s", + pre_out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_anchor->GetOwnerNode()->GetName().c_str()); + return GRAPH_FAILED; + } + } + } + return GRAPH_SUCCESS; +} + +graphStatus SameTransdataBreadthFusionPass::ReLinkOutDataPeerInControlNodes2PreNode( + const NodePtr &transdata_node, const OutDataAnchorPtr &pre_out_anchor) { + GE_CHECK_NOTNULL(pre_out_anchor); + GE_CHECK_NOTNULL(transdata_node); + auto transdata_peer_out_control_anchor = pre_out_anchor->GetOwnerNode()->GetOutControlAnchor(); + for (auto &out_anchor : transdata_node->GetAllOutDataAnchors()) { + for (auto &transdata_peer_in_control_anchor : out_anchor->GetPeerInControlAnchors()) { + GELOGD("remove edge.src:%s, dst:%s", out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_control_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::RemoveEdge(out_anchor, transdata_peer_in_control_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "remove edge failed!src node:%s, dst node:%s", transdata_node->GetName().c_str(), + transdata_peer_in_control_anchor->GetOwnerNode()->GetName().c_str()); + return GRAPH_FAILED; + } + + if (transdata_peer_out_control_anchor == nullptr) { + GELOGD("add edge.src:%s, dst:%s", pre_out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_control_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(pre_out_anchor, transdata_peer_in_control_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add edge failed!src node:%s, dst node:%s", + pre_out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_control_anchor->GetOwnerNode()->GetName().c_str()); + return GRAPH_FAILED; + } + } else { + GELOGD("add edge.src node:%s, dst node:%s", pre_out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_control_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(transdata_peer_out_control_anchor, transdata_peer_in_control_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add edge failed!src node:%s, dst node:%s", + pre_out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_control_anchor->GetOwnerNode()->GetName().c_str()); + return GRAPH_FAILED; + } + } + } + } + return GRAPH_SUCCESS; +} + +graphStatus SameTransdataBreadthFusionPass::ReLinkTransdataOutput2PreNode(const NodePtr &transdata_node, + const OutDataAnchorPtr &pre_out_anchor, + const NodePtr &relink_node) { + GE_CHECK_NOTNULL(pre_out_anchor); + if (ReLinkDataOutput2PreNode(transdata_node, pre_out_anchor, relink_node) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + if (ReLinkOutDataPeerInControlNodes2PreNode(transdata_node, pre_out_anchor) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + auto transdata_peer_out_control_anchor = pre_out_anchor->GetOwnerNode()->GetOutControlAnchor(); + + return ReLinkTransdataControlOutput2PreNode(transdata_node, pre_out_anchor, transdata_peer_out_control_anchor); +} + +graphStatus SameTransdataBreadthFusionPass::ReLinkOutControlPeerInControlAnchors( + const NodePtr &transdata_node_keep, const OutDataAnchorPtr &pre_out_anchor, + const OutControlAnchorPtr &transdata_peer_out_control_anchor) { + GE_CHECK_NOTNULL(transdata_node_keep); + GE_CHECK_NOTNULL(pre_out_anchor); + auto out_control_anchor = transdata_node_keep->GetOutControlAnchor(); + if (out_control_anchor == nullptr) { + return GRAPH_SUCCESS; + } + + for (auto &transdata_peer_in_control_anchor : out_control_anchor->GetPeerInControlAnchors()) { + GELOGD("remove edge.src:%s, dst:%s", transdata_node_keep->GetName().c_str(), + transdata_peer_in_control_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::RemoveEdge(out_control_anchor, transdata_peer_in_control_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "remove transdata control edge failed!"); + return GRAPH_FAILED; + } + + if (transdata_peer_out_control_anchor == nullptr) { + GELOGD("add edge.src:%s, dst:%s", pre_out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_control_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(pre_out_anchor, transdata_peer_in_control_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add control edge failed!"); + return GRAPH_FAILED; + } + } else { + GELOGD("add edge.src:%s, dst:%s", transdata_peer_out_control_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_control_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(transdata_peer_out_control_anchor, transdata_peer_in_control_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add control edge failed!"); + return GRAPH_FAILED; + } + } + } + return GRAPH_SUCCESS; +} + +graphStatus SameTransdataBreadthFusionPass::ReLinkOutControlPeerInDataAnchors( + const NodePtr &transdata_node_keep, const OutDataAnchorPtr &pre_out_anchor, + const OutControlAnchorPtr &transdata_peer_out_control_anchor) { + GE_CHECK_NOTNULL(transdata_node_keep); + GE_CHECK_NOTNULL(pre_out_anchor); + auto out_control_anchor = transdata_node_keep->GetOutControlAnchor(); + if (out_control_anchor == nullptr) { + return GRAPH_SUCCESS; + } + for (auto &transdata_peer_in_data_anchor : out_control_anchor->GetPeerInDataAnchors()) { + if (transdata_peer_in_data_anchor == nullptr || transdata_peer_in_data_anchor->GetOwnerNode() == nullptr) { + continue; + } + GELOGD("remove edge.src:%s, dst:%s", transdata_node_keep->GetName().c_str(), + transdata_peer_in_data_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::RemoveEdge(out_control_anchor, transdata_peer_in_data_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "remove transdata control edge failed!"); + return GRAPH_FAILED; + } + + if (transdata_peer_out_control_anchor == nullptr) { + GELOGD("add edge.src:%s, dst:%s", pre_out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_data_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(pre_out_anchor, transdata_peer_in_data_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add control edge failed!"); + return GRAPH_FAILED; + } + } else { + GELOGD("add edge.src:%s, dst:%s", transdata_peer_out_control_anchor->GetOwnerNode()->GetName().c_str(), + transdata_peer_in_data_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(transdata_peer_out_control_anchor, transdata_peer_in_data_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add control edge failed!"); + return GRAPH_FAILED; + } + } + } + return GRAPH_SUCCESS; +} + +graphStatus SameTransdataBreadthFusionPass::ReLinkTransdataControlOutput2PreNode( + const NodePtr &transdata_node_keep, const OutDataAnchorPtr &pre_out_anchor, + const OutControlAnchorPtr &transdata_peer_out_control_anchor) { + if (ReLinkOutControlPeerInControlAnchors(transdata_node_keep, pre_out_anchor, transdata_peer_out_control_anchor) != + GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + return ReLinkOutControlPeerInDataAnchors(transdata_node_keep, pre_out_anchor, transdata_peer_out_control_anchor); +} + +graphStatus SameTransdataBreadthFusionPass::Run(ComputeGraphPtr graph) { + GE_TIMESTAMP_START(SameTransdataBreadthFusionPass); + GELOGI("[SameTransdataBreadthFusionPass]: optimize begin."); + if (graph == nullptr) { + return GRAPH_SUCCESS; + } + + for (auto &node : graph->GetAllNodes()) { + if (IsTransOp(node) || node->GetOutDataNodes().size() <= 1) { + continue; + } + + GELOGD("Current normal node name: %s, type: %s.", node->GetName().c_str(), node->GetType().c_str()); + for (auto &out_anchor : node->GetAllOutDataAnchors()) { + vector>> sub_graph_anchors; + std::vector> nodes_list; + if (GetSubGraphsBetweenNormalAndTransdataNode(out_anchor, sub_graph_anchors, nodes_list) != GRAPH_SUCCESS) { + GELOGW("get transop failed!"); + continue; + } + + if (sub_graph_anchors.size() <= 1) { + continue; + } + sub_graph_anchors_.swap(sub_graph_anchors); + + // check reshape node + GetSubGraphNodesInfo(); + GELOGD("all trandata node size:%zu", all_transdata_nodes_.size()); + if (ExtractTransNode(graph) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + } + + GELOGI("[SameTransdataBreadthFusionPass]: Optimize success."); + GE_TIMESTAMP_END(SameTransdataBreadthFusionPass, "GraphManager::SameTransdataBreadthFusionPass"); + return GRAPH_SUCCESS; +} + +graphStatus SameTransdataBreadthFusionPass::ExtractTransNode(const ComputeGraphPtr &graph) { + while (all_transdata_nodes_.size() > 1) { + vector same_transdata_nodes; + GetSameTransdataNode(same_transdata_nodes); + GELOGD("same transdata node size:%zu", same_transdata_nodes.size()); + // reuse transdata ,new cast + if (same_transdata_nodes.size() <= 1) { + continue; + } + + int anchors_index = same_transdata_nodes[0]; + auto transdata_in_anchor = sub_graph_anchors_[anchors_index].back().second; + GE_CHECK_NOTNULL(transdata_in_anchor); + auto transdata_node_keep = transdata_in_anchor->GetOwnerNode(); + auto transdata_out_anchor = transdata_node_keep->GetOutDataAnchor(0); + GELOGD("anchor index %d, before transdata node size:%zu", anchors_index, + before_transdata_nodes_[anchors_index].size()); + if (before_transdata_nodes_[anchors_index].size() > 1) { + if (RelinkRemainTransdata(graph, same_transdata_nodes) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + if (LinkNewCastNode2RemainTransdata(graph, same_transdata_nodes, transdata_out_anchor, transdata_node_keep) != + GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + return GRAPH_SUCCESS; +} + +graphStatus SameTransdataBreadthFusionPass::RelinkRemainTransdata(const ComputeGraphPtr &graph, + const vector &same_transdata_nodes) { + int anchors_index = same_transdata_nodes[0]; + auto head_node_anchor = sub_graph_anchors_[anchors_index][0].first; + GE_CHECK_NOTNULL(head_node_anchor); + auto head_node = head_node_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(head_node->GetOpDesc()); + auto head_output_desc = head_node->GetOpDesc()->GetOutputDescPtr(head_node_anchor->GetIdx()); + auto transdata_in_anchor = sub_graph_anchors_[anchors_index].back().second; + GE_CHECK_NOTNULL(transdata_in_anchor); + auto transdata_node_keep = transdata_in_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(transdata_node_keep->GetOpDesc()); + auto transdata_out_anchor = transdata_node_keep->GetOutDataAnchor(0); + GELOGD("head node:%s, transdata node keep:%s", head_node->GetName().c_str(), transdata_node_keep->GetName().c_str()); + bool reuse_nodes = AllNodeBeforeTransdataHasOneDataOut(anchors_index); + UpdateTransdataDesc(transdata_in_anchor, transdata_node_keep->GetOpDesc(), head_output_desc); + auto transdata_peer_out_anchor = sub_graph_anchors_[anchors_index].back().first; + GE_CHECK_NOTNULL(transdata_peer_out_anchor); + auto transdata_peer_out_node = transdata_peer_out_anchor->GetOwnerNode(); + GELOGI("remove edge.src:%s, dst:%s", transdata_peer_out_node->GetName().c_str(), + transdata_node_keep->GetName().c_str()); + if (GraphUtils::RemoveEdge(transdata_peer_out_anchor, transdata_in_anchor) != GRAPH_SUCCESS) { + GELOGW("remove edge failed!out node %s, in node %s", transdata_peer_out_node->GetName().c_str(), + transdata_node_keep->GetName().c_str()); + } + + GELOGI("add edge.out node %s, in node %s", head_node->GetName().c_str(), transdata_node_keep->GetName().c_str()); + if (GraphUtils::AddEdge(head_node_anchor, transdata_in_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add edge failed!out node %s, in node %s", head_node->GetName().c_str(), + transdata_node_keep->GetName().c_str()); + return GRAPH_FAILED; + } + + NodePtr relink_node; + // relink to transdata output nodes + if (reuse_nodes) { + if (ReuseNodesBeforeTransdata(anchors_index, transdata_out_anchor, relink_node) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + if (ReLinkTransdataOutput2PreNode(transdata_node_keep, transdata_peer_out_anchor, relink_node) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } else { + OutDataAnchorPtr pre_out_anchor = transdata_out_anchor; + if (AddCastNode(graph, same_transdata_nodes[0], pre_out_anchor, relink_node) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + if (ReLinkTransdataOutput2PreNode(transdata_node_keep, pre_out_anchor, relink_node) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + return GRAPH_SUCCESS; +} + +void SameTransdataBreadthFusionPass::UpdateTransdataDesc(const InDataAnchorPtr &transdata_in_anchor, + const OpDescPtr &transdata_op_desc, + const ConstGeTensorDescPtr &head_output_desc) { + if (transdata_op_desc == nullptr || transdata_in_anchor == nullptr || head_output_desc == nullptr) { + return; + } + auto mutable_input_desc = transdata_op_desc->MutableInputDesc(transdata_in_anchor->GetIdx()); + GE_CHECK_NOTNULL_JUST_RETURN(mutable_input_desc); + mutable_input_desc->SetDataType(head_output_desc->GetDataType()); + mutable_input_desc->SetOriginDataType(head_output_desc->GetOriginDataType()); + auto mutable_output_desc = transdata_op_desc->MutableOutputDesc(0); + GE_CHECK_NOTNULL_JUST_RETURN(mutable_output_desc); + mutable_output_desc->SetDataType(head_output_desc->GetDataType()); + mutable_output_desc->SetOriginDataType(head_output_desc->GetOriginDataType()); + // maybe need to check support +} + +bool SameTransdataBreadthFusionPass::AllNodeBeforeTransdataHasOneDataOut(int anchors_index) { + for (size_t i = 1; i < before_transdata_nodes_[anchors_index].size(); ++i) { + auto node = before_transdata_nodes_[anchors_index][i]; + if (node == nullptr) { + return false; + } + if (node->GetOutDataNodes().size() > 1 || node->GetInDataNodes().size() > 1) { + return false; + } + } + return true; +} + +graphStatus SameTransdataBreadthFusionPass::ReuseNodesBeforeTransdata(int anchors_index, + const OutDataAnchorPtr &transdata_out_anchor, + NodePtr &relink_node) { + auto head_node_anchor = sub_graph_anchors_[anchors_index][0].first; + auto head_node_peer_anchor = sub_graph_anchors_[anchors_index][0].second; + GE_CHECK_NOTNULL(head_node_anchor); + GE_CHECK_NOTNULL(head_node_peer_anchor); + GE_CHECK_NOTNULL(transdata_out_anchor); + GELOGI("remove edge.src:%s, dst:%s", head_node_anchor->GetOwnerNode()->GetName().c_str(), + head_node_peer_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::RemoveEdge(head_node_anchor, head_node_peer_anchor) != GRAPH_SUCCESS) { + GELOGW("remove edge failed!src:%s, dst:%s", head_node_anchor->GetOwnerNode()->GetName().c_str(), + head_node_peer_anchor->GetOwnerNode()->GetName().c_str()); + } + + NodePtr transdata_node_keep = transdata_out_anchor->GetOwnerNode(); + if (before_transdata_nodes_[anchors_index].size() == kNoTransOp) { + return GRAPH_SUCCESS; + } + GELOGI("add edge.src:%s, dst:%s", transdata_node_keep->GetName().c_str(), + head_node_peer_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(transdata_out_anchor, head_node_peer_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add edge.src:%s, dst:%s", transdata_node_keep->GetName().c_str(), + head_node_peer_anchor->GetOwnerNode()->GetName().c_str()); + return GRAPH_FAILED; + } + relink_node = head_node_peer_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(transdata_node_keep->GetOpDesc()); + auto transdata_output_desc = transdata_node_keep->GetOpDesc()->GetOutputDescPtr(0); + GE_CHECK_NOTNULL(transdata_output_desc); + for (size_t i = 0; i < sub_graph_anchors_[anchors_index].size() - 1; ++i) { + auto in_data_anchor = sub_graph_anchors_[anchors_index][i].second; + GE_CHECK_NOTNULL(in_data_anchor); + auto in_owner_node = in_data_anchor->GetOwnerNode(); + auto in_op_desc = in_owner_node->GetOpDesc(); + GE_CHECK_NOTNULL(in_op_desc); + auto input_desc = in_op_desc->GetInputDesc(in_data_anchor->GetIdx()); + CopyTensorDesc(transdata_output_desc, input_desc); + if (in_op_desc->UpdateInputDesc(in_data_anchor->GetIdx(), input_desc) != GRAPH_SUCCESS) { + GELOGE(FAILED, "UpdateInputDesc fail."); + return FAILED; + } + int output_idx = sub_graph_anchors_[anchors_index][i + 1].first->GetIdx(); + auto output_desc = in_op_desc->GetOutputDesc(output_idx); + CopyTensorDesc(transdata_output_desc, output_desc); + GE_IF_BOOL_EXEC(in_op_desc->UpdateOutputDesc(output_idx, output_desc) != GRAPH_SUCCESS, + GELOGE(GRAPH_FAILED, "update input desc failed"); + return GRAPH_FAILED); + // relink control edge + if (RelinkInControlEdge(in_owner_node, transdata_node_keep) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + return GRAPH_SUCCESS; +} + +void SameTransdataBreadthFusionPass::CopyTensorDesc(const ConstGeTensorDescPtr &src_desc, GeTensorDesc &dst_desc) { + if (src_desc == nullptr) { + return; + } + dst_desc.SetFormat(src_desc->GetFormat()); + dst_desc.SetOriginFormat(src_desc->GetOriginFormat()); + dst_desc.SetShape(src_desc->GetShape()); + dst_desc.SetOriginShape(src_desc->GetOriginShape()); + uint32_t real_dim = 0; + if (TensorUtils::GetRealDimCnt(*src_desc, real_dim) == GRAPH_SUCCESS) { + TensorUtils::SetRealDimCnt(dst_desc, real_dim); + } +} + +graphStatus SameTransdataBreadthFusionPass::LinkNewCastNode2RemainTransdata( + const ComputeGraphPtr &graph, const vector &same_transdata_nodes, const OutDataAnchorPtr &transdata_out_anchor, + const NodePtr &transdata_node_keep) { + for (size_t i = 1; i < same_transdata_nodes.size(); ++i) { + int anchors_index = same_transdata_nodes[i]; + bool reuse_nodes = AllNodeBeforeTransdataHasOneDataOut(anchors_index); + auto transdata_peer_out_anchor = sub_graph_anchors_[anchors_index].back().first; + GE_CHECK_NOTNULL(transdata_peer_out_anchor); + auto transdata_remove_in_anchor = sub_graph_anchors_[anchors_index].back().second; + GE_CHECK_NOTNULL(transdata_remove_in_anchor); + auto transdata_node_remove = transdata_remove_in_anchor->GetOwnerNode(); + if (transdata_node_remove->GetInDataNodes().size() > 1) { + continue; + } + GELOGI("remove edge.src:%s, dst:%s", transdata_peer_out_anchor->GetOwnerNode()->GetName().c_str(), + transdata_remove_in_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::RemoveEdge(transdata_peer_out_anchor, transdata_remove_in_anchor) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + OutDataAnchorPtr pre_out_anchor = nullptr; + NodePtr relink_node = nullptr; + if (reuse_nodes) { + // reuse nodes before transdata + if (ReuseNodesBeforeTransdata(anchors_index, transdata_out_anchor, relink_node) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + if (before_transdata_nodes_[anchors_index].size() > kNoTransOp) { + pre_out_anchor = transdata_peer_out_anchor; + } else { + pre_out_anchor = transdata_out_anchor; + } + } else { + // miss cast control edge + pre_out_anchor = transdata_out_anchor; + if (AddCastNode(graph, same_transdata_nodes[i], pre_out_anchor, relink_node) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + if (ReLinkTransdataOutput2PreNode(transdata_node_remove, pre_out_anchor, relink_node) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + if (RelinkInControlEdge(transdata_node_remove, transdata_node_keep) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + if (graph->RemoveNode(transdata_node_remove) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "remove node %s failed!", transdata_node_remove->GetName().c_str()); + return GRAPH_FAILED; + } + } + return GRAPH_SUCCESS; +} + +graphStatus SameTransdataBreadthFusionPass::RelinkInControlEdge(const NodePtr &node_src, const NodePtr &node_dst) { + GE_CHECK_NOTNULL(node_dst); + GE_CHECK_NOTNULL(node_src); + if (node_src->GetInControlNodes().empty()) { + return GRAPH_SUCCESS; + } + GE_CHECK_NOTNULL(node_src->GetInControlAnchor()); + for (auto &peer_out_control_anchor : node_src->GetInControlAnchor()->GetPeerOutControlAnchors()) { + GELOGD("remove edge.src:%s, dst:%s", peer_out_control_anchor->GetOwnerNode()->GetName().c_str(), + node_src->GetName().c_str()); + if (GraphUtils::RemoveEdge(peer_out_control_anchor, node_src->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "remove edge faliled!src:%s, dst:%s", + peer_out_control_anchor->GetOwnerNode()->GetName().c_str(), node_src->GetName().c_str()); + return GRAPH_FAILED; + } + GELOGD("add edge.src:%s, dst:%s", peer_out_control_anchor->GetOwnerNode()->GetName().c_str(), + node_dst->GetName().c_str()); + if (GraphUtils::AddEdge(peer_out_control_anchor, node_dst->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add edge failed!src:%s, dst:%s", peer_out_control_anchor->GetOwnerNode()->GetName().c_str(), + node_dst->GetName().c_str()); + return GRAPH_FAILED; + } + } + return GRAPH_SUCCESS; +} + +graphStatus SameTransdataBreadthFusionPass::AddCastNode(const ComputeGraphPtr &graph, int anchors_index, + OutDataAnchorPtr &pre_out_anchor, NodePtr &first_link_node) { + GE_CHECK_NOTNULL(pre_out_anchor); + GE_CHECK_NOTNULL(graph); + auto pre_node = pre_out_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(pre_node->GetOpDesc()); + auto pre_output_desc = pre_node->GetOpDesc()->GetOutputDescPtr(pre_out_anchor->GetIdx()); + GE_CHECK_NOTNULL(pre_output_desc); + for (size_t i = 0; i < sub_graph_anchors_[anchors_index].size() - 1; ++i) { + auto in_data_anchor = sub_graph_anchors_[anchors_index][i].second; + GE_CHECK_NOTNULL(in_data_anchor); + auto in_owner_node = in_data_anchor->GetOwnerNode(); + auto in_op_desc = in_owner_node->GetOpDesc(); + GE_CHECK_NOTNULL(in_op_desc); + auto input_desc = in_op_desc->GetInputDesc(in_data_anchor->GetIdx()); + input_desc.SetFormat(pre_output_desc->GetFormat()); + input_desc.SetOriginFormat(pre_output_desc->GetOriginFormat()); + input_desc.SetShape(pre_output_desc->GetShape()); + input_desc.SetOriginShape(pre_output_desc->GetOriginShape()); + uint32_t real_dim = 0; + if (TensorUtils::GetRealDimCnt(*pre_output_desc, real_dim) != GRAPH_SUCCESS) { + GELOGW("get %s real dim cnt failed!", pre_node->GetName().c_str()); + } + TensorUtils::SetRealDimCnt(input_desc, real_dim); + auto output_desc = in_op_desc->GetOutputDesc(sub_graph_anchors_[anchors_index][i + 1].first->GetIdx()); + output_desc.SetFormat(pre_output_desc->GetFormat()); + output_desc.SetOriginFormat(pre_output_desc->GetOriginFormat()); + output_desc.SetShape(pre_output_desc->GetShape()); + output_desc.SetOriginShape(pre_output_desc->GetOriginShape()); + TensorUtils::SetRealDimCnt(output_desc, real_dim); + + auto cast_op_desc = GetCastOp(input_desc, output_desc); + if (cast_op_desc == nullptr) { + return GRAPH_FAILED; + } + + auto cast_node = graph->AddNode(cast_op_desc); + if (cast_node == nullptr) { + return GRAPH_FAILED; + } + GELOGD("add edge.src:%s, dst:%s", pre_out_anchor->GetOwnerNode()->GetName().c_str(), cast_node->GetName().c_str()); + if (GraphUtils::AddEdge(pre_out_anchor, cast_node->GetInDataAnchor(0)) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + if (i == 0) { + first_link_node = cast_node; + } + + if (!AttrUtils::SetBool(cast_op_desc, ATTR_NEED_COMPILE, true)) { + GELOGE(FAILED, "SetExtAttr fail."); + return FAILED; + } + pre_out_anchor = cast_node->GetOutDataAnchor(0); + } + return GRAPH_SUCCESS; +} + +graphStatus SameTransdataBreadthFusionPass::GetSubGraphsBetweenNormalAndTransdataNode( + OutDataAnchorPtr &out_anchor, + std::vector>> &sub_graphs_out, + std::vector> &nodes_list) { + graphStatus ret = GRAPH_SUCCESS; + if (out_anchor == nullptr) { + GELOGE(GRAPH_FAILED, "out data anchor is null!This should not happen!"); + return GRAPH_FAILED; + } + + for (auto &peer_in_anchor : out_anchor->GetPeerInDataAnchors()) { + if (peer_in_anchor == nullptr || peer_in_anchor->GetOwnerNode() == nullptr || + peer_in_anchor->GetOwnerNode()->GetOpDesc() == nullptr) { + continue; + } + + nodes_list.push_back(make_pair(out_anchor, peer_in_anchor)); + auto peer_in_node = peer_in_anchor->GetOwnerNode(); + if ((peer_in_node->GetType() == TRANSDATA && peer_in_node->GetOutDataNodes().size() > 0) || + !IsHandleOp(peer_in_node)) { + sub_graphs_out.push_back(nodes_list); + nodes_list.pop_back(); + } else { + if (peer_in_node->GetType() == TRANSDATA) { + if (peer_in_node->GetOutDataNodes().size() == 0) { + nodes_list.pop_back(); + continue; + } + } + for (auto &peer_out_anchor : peer_in_node->GetAllOutDataAnchors()) { + ret = GetSubGraphsBetweenNormalAndTransdataNode(peer_out_anchor, sub_graphs_out, nodes_list); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "get all transop between normal node failed!node:%s", peer_in_node->GetName().c_str()); + return GRAPH_FAILED; + } + } + nodes_list.pop_back(); + } + } + return GRAPH_SUCCESS; +} + +bool SameTransdataBreadthFusionPass::IsTransOp(const NodePtr &node) { + if (node == nullptr) { + return false; + } + return node->GetType() == CAST || node->GetType() == TRANSPOSE || node->GetType() == TRANSPOSED || + node->GetType() == RESHAPE || node->GetType() == TRANSDATA; +} + +bool SameTransdataBreadthFusionPass::IsHandleOp(const NodePtr &node) { + if (node == nullptr) { + return false; + } + return node->GetType() == CAST || node->GetType() == TRANSDATA; +} +} // namespace ge diff --git a/src/ge/graph/passes/same_transdata_breadth_fusion_pass.h b/src/ge/graph/passes/same_transdata_breadth_fusion_pass.h new file mode 100644 index 00000000..be745056 --- /dev/null +++ b/src/ge/graph/passes/same_transdata_breadth_fusion_pass.h @@ -0,0 +1,117 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_SAME_TRANSDATA_BREADTH_FUSION_PASS_H_ +#define GE_GRAPH_PASSES_SAME_TRANSDATA_BREADTH_FUSION_PASS_H_ + +#include +#include + +#include "inc/graph_pass.h" + +namespace ge { +/// +/// Transform operators depth fusion +/// +class SameTransdataBreadthFusionPass : public GraphPass { + public: + SameTransdataBreadthFusionPass() {} + virtual ~SameTransdataBreadthFusionPass() {} + + graphStatus Run(ComputeGraphPtr graph) override; + + private: + graphStatus ExtractTransNode(const ComputeGraphPtr &graph); + graphStatus GetSubGraphsBetweenNormalAndTransdataNode(OutDataAnchorPtr &out_anchor, + std::vector>> &sub_graphs_out, + std::vector> &nodes_list); + + void GetSubGraphNodesInfo(); + + OpDescPtr GetCastOp(const GeTensorDesc &in_desc, const GeTensorDesc &out_desc); + + graphStatus AddCastNode(const ComputeGraphPtr &graph, + int anchors_index, + OutDataAnchorPtr &pre_out_anchor, + NodePtr &first_link_node); + + void GetSameTransdataNode(vector &same_transdata_nodes); + + graphStatus ReLinkTransdataOutput2PreNode(const NodePtr &transdata_node, const OutDataAnchorPtr &pre_out_anchor, + const NodePtr &relink_node); + + graphStatus LinkNewCastNode2RemainTransdata(const ComputeGraphPtr &graph, + const vector &same_transdata_nodes, + const OutDataAnchorPtr &transdata_out_anchor, + const NodePtr &transdata_node_keep); + + void UpdateTransdataDesc(const InDataAnchorPtr &transdata_in_anchor, const OpDescPtr &transdata_op_desc, + const ConstGeTensorDescPtr &head_output_desc); + + graphStatus RelinkRemainTransdata(const ComputeGraphPtr &graph, const vector &same_transdata_nodes); + + graphStatus ReLinkTransdataControlOutput2PreNode(const NodePtr &transdata_node_keep, + const OutDataAnchorPtr &pre_out_anchor, + const OutControlAnchorPtr &transdata_peer_out_control_anchor); + + graphStatus ReuseNodesBeforeTransdata(int anchors_index, const OutDataAnchorPtr &transdata_out_anchor, + NodePtr &relink_node); + + bool AllNodeBeforeTransdataHasOneDataOut(int anchors_index); + + graphStatus RelinkInControlEdge(const NodePtr &node_src, const NodePtr &node_dst); + + graphStatus ReLinkDataOutput2PreNode(const NodePtr &transdata_node, + const OutDataAnchorPtr &pre_out_anchor, + const NodePtr &relink_node); + + graphStatus ReLinkOutDataPeerInControlNodes2PreNode(const NodePtr &transdata_node, + const OutDataAnchorPtr &pre_out_anchor); + + void InsertSameTransdataNodeIndex(int anchors_index, vector &same_transdata_nodes); + + graphStatus ReLinkOutControlPeerInControlAnchors(const NodePtr &transdata_node_keep, + const OutDataAnchorPtr &pre_out_anchor, + const OutControlAnchorPtr &transdata_peer_out_control_anchor); + + graphStatus ReLinkOutControlPeerInDataAnchors(const NodePtr &transdata_node_keep, + const OutDataAnchorPtr &pre_out_anchor, + const OutControlAnchorPtr &transdata_peer_out_control_anchor); + + void CopyTensorDesc(const ConstGeTensorDescPtr &src_desc, GeTensorDesc &dst_desc); + + /// + /// judge whether an operator is a transform op or not + /// @param node + /// @return True or False + /// + static bool IsTransOp(const NodePtr &node); + + static bool IsHandleOp(const NodePtr &node); + + vector>> sub_graph_anchors_; + vector> before_transdata_nodes_; + vector> all_transdata_nodes_; + vector> sub_graph_nodes_; + vector transop_num_count_; + vector sub_graph_has_reshape_node_; + vector> peer_out_control_anchors_; + vector> peer_in_control_anchors_; + vector sub_graph_has_control_edge_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_SAME_TRANSDATA_BREADTH_FUSION_PASS_H_ diff --git a/src/ge/graph/passes/save_pass.cc b/src/ge/graph/passes/save_pass.cc new file mode 100644 index 00000000..92e2af8d --- /dev/null +++ b/src/ge/graph/passes/save_pass.cc @@ -0,0 +1,85 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/save_pass.h" + +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "graph/utils/graph_utils.h" + +namespace ge { +namespace { +const char *const kSave = "Save"; +const char *const kVar = "Variable"; +const char *const kVarIsSave = "save_checkpoint"; +const char *const kVarAttrVarIsSave = "_var_is_save"; +} // namespace + +Status SavePass::Run(ge::ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + vector front_nodes; + vector out_index; + vector del_nodes; + for (auto &node : graph->GetDirectNode()) { + if (node->GetType() == kSave) { + for (auto &in : node->GetAllInDataAnchors()) { + auto out_anchor = in->GetPeerOutAnchor(); + if (out_anchor != nullptr) { + ge::NodePtr peer_node = out_anchor->GetOwnerNode(); + if (peer_node->GetType() == kVar) { + front_nodes.emplace_back(peer_node); + out_index.emplace_back(out_anchor->GetIdx()); + ge::OpDescPtr op_desc = peer_node->GetOpDesc(); + GE_IF_BOOL_EXEC(!ge::AttrUtils::SetStr(op_desc, kVarAttrVarIsSave, kVarIsSave), + GELOGE(INTERNAL_ERROR, "get kVarAttrVarIsSave failed"); return INTERNAL_ERROR); + } + } + } + del_nodes.emplace_back(node); + } + } + // add output nodes for save + std::vector> out_nodes_info{}; + for (size_t i = 0; i < front_nodes.size(); i++) { + out_nodes_info.emplace_back(pair(front_nodes[i], out_index[i])); + } + graph->AppendGraphOutNodesInfo(out_nodes_info); + + // delete save node + for (auto &node_ptr : del_nodes) { + auto ret = graph->RemoveNode(node_ptr); + if (ret != SUCCESS) { + GELOGE(ret, "GraphUtils::RemoveNodeWithoutRelink failed."); + return ret; + } + + // update Target list + vector graph_target = graph->GetGraphTargetNodesInfo(); + auto iter = find(graph_target.begin(), graph_target.end(), node_ptr); + if (iter != graph_target.end()) { + GELOGI("Current node %s is as Target, remove it from target vector.", node_ptr->GetName().c_str()); + graph_target.erase(iter); + graph->SetGraphTargetNodesInfo(graph_target); + } + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/save_pass.h b/src/ge/graph/passes/save_pass.h new file mode 100644 index 00000000..ce8c8a7a --- /dev/null +++ b/src/ge/graph/passes/save_pass.h @@ -0,0 +1,30 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_SAVE_PASS_H_ +#define GE_GRAPH_PASSES_SAVE_PASS_H_ + +#include "graph/graph.h" +#include "inc/graph_pass.h" + +namespace ge { +class SavePass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_SAVE_PASS_H_ diff --git a/src/ge/graph/passes/shape_operate_op_remove_pass.cc b/src/ge/graph/passes/shape_operate_op_remove_pass.cc new file mode 100644 index 00000000..894f3e68 --- /dev/null +++ b/src/ge/graph/passes/shape_operate_op_remove_pass.cc @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/shape_operate_op_remove_pass.h" + +#include "common/debug/log.h" +#include "common/util.h" +#include "graph/utils/attr_utils.h" + +using domi::SUCCESS; + +namespace ge { +Status ShapeOperateOpRemovePass::Run(ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + for (auto &node : graph->GetAllNodes()) { + OpDescPtr op_desc = node->GetOpDesc(); + GE_IF_BOOL_EXEC(op_desc == nullptr, continue); + bool to_be_deleted = false; + GE_IF_BOOL_EXEC(!AttrUtils::GetBool(op_desc, ATTR_TO_BE_DELETED, to_be_deleted), to_be_deleted = false); + GE_IF_BOOL_EXEC(to_be_deleted, GE_CHK_STATUS_RET(graph->RemoveNode(node), "remove node failed!")); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/shape_operate_op_remove_pass.h b/src/ge/graph/passes/shape_operate_op_remove_pass.h new file mode 100644 index 00000000..3abe68e5 --- /dev/null +++ b/src/ge/graph/passes/shape_operate_op_remove_pass.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_SHAPE_OPERATE_OP_REMOVE_PASS_H_ +#define GE_GRAPH_PASSES_SHAPE_OPERATE_OP_REMOVE_PASS_H_ + +#include "inc/graph_pass.h" + +namespace ge { +class ShapeOperateOpRemovePass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_SHAPE_OPERATE_OP_REMOVE_PASS_H_ diff --git a/src/ge/graph/passes/snapshot_pass.cc b/src/ge/graph/passes/snapshot_pass.cc new file mode 100644 index 00000000..2b8577b9 --- /dev/null +++ b/src/ge/graph/passes/snapshot_pass.cc @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/snapshot_pass.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/omg_util.h" + +namespace ge { +Status SnapshotPass::Run(NodePtr &node) { + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + string type; + Status status_ret = GetOriginalType(node, type); + if (status_ret != SUCCESS) { + GELOGE(status_ret, "SnapshotPass get original type fail."); + return status_ret; + } + if (type == SNAPSHOT) { + return IsolateAndDeleteNode(node, {0}); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/snapshot_pass.h b/src/ge/graph/passes/snapshot_pass.h new file mode 100644 index 00000000..94062b3d --- /dev/null +++ b/src/ge/graph/passes/snapshot_pass.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_SNAPSHOT_PASS_H_ +#define GE_GRAPH_PASSES_SNAPSHOT_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class SnapshotPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_SNAPSHOT_PASS_H_ diff --git a/src/ge/graph/passes/stop_gradient_pass.cc b/src/ge/graph/passes/stop_gradient_pass.cc new file mode 100644 index 00000000..680fbbba --- /dev/null +++ b/src/ge/graph/passes/stop_gradient_pass.cc @@ -0,0 +1,39 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/stop_gradient_pass.h" + +#include + +namespace ge { +Status StopGradientPass::Run(NodePtr &node) { + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + string type; + Status status_ret = GetOriginalType(node, type); + if (status_ret != SUCCESS) { + GELOGE(status_ret, "StopGradientPass get original type fail."); + return status_ret; + } + + if (type == STOPGRADIENT) { + return IsolateAndDeleteNode(node, {0}); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/stop_gradient_pass.h b/src/ge/graph/passes/stop_gradient_pass.h new file mode 100644 index 00000000..5b6e0e9e --- /dev/null +++ b/src/ge/graph/passes/stop_gradient_pass.h @@ -0,0 +1,33 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_STOP_GRADIENT_PASS_H_ +#define GE_GRAPH_PASSES_STOP_GRADIENT_PASS_H_ + +#include "framework/common/debug/ge_log.h" +#include "common/types.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/common/omg_util.h" +#include "graph/passes/base_pass.h" + +namespace ge { +class StopGradientPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_STOP_GRADIENT_PASS_H_ diff --git a/src/ge/graph/passes/switch_logic_remove_pass.cc b/src/ge/graph/passes/switch_logic_remove_pass.cc new file mode 100644 index 00000000..f3d72aad --- /dev/null +++ b/src/ge/graph/passes/switch_logic_remove_pass.cc @@ -0,0 +1,169 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/switch_logic_remove_pass.h" + +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/utils/graph_utils.h" +#include "graph/passes/pass_utils.h" +#include "common/util.h" + +namespace ge { +namespace { +using PredNodeAndOut = std::pair; +constexpr int kSwitchOutputNum = 2; +constexpr int kSwitchPredIndex = 1; + +char const *GetOutputNameFromIndex(int index) { + if ((index >= 0) && (index < kSwitchOutputNum)) { + static char const *name[kSwitchOutputNum] = {"false", "true"}; + return name[index]; + } + return "UNKNOWN"; +} + +inline bool IsSwitch(const std::string &type) { + return type == SWITCH || type == REFSWITCH; +} + +Status GetPredNode(const NodePtr &switch_node, PredNodeAndOut &pred_node_index) { + GE_CHECK_NOTNULL(switch_node); + auto pred_in_anchor = switch_node->GetInDataAnchor(kSwitchPredIndex); + if (pred_in_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to get pred node for switch %s, no pred anchor", switch_node->GetName().c_str()); + return INTERNAL_ERROR; + } + auto pred_node_anchor = pred_in_anchor->GetPeerOutAnchor(); + if (pred_node_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, + "Failed to get pred node for switch %s, node peer out anchor", + switch_node->GetName().c_str()); + return INTERNAL_ERROR; + } + auto pred_node = pred_node_anchor->GetOwnerNode(); + if (pred_node == nullptr) { + GELOGE(INTERNAL_ERROR, + "Failed to get pred node for switch %s, null node", + switch_node->GetName().c_str()); + return INTERNAL_ERROR; + } + pred_node_index.first = pred_node; + pred_node_index.second = pred_node_anchor->GetIdx(); + return SUCCESS; +} +} // namespace + +Status SwitchLogicRemovePass::Run(NodePtr &node) { + GE_CHECK_NOTNULL(node); + if (!IsSwitch(node->GetType())) { + return SUCCESS; + } + PredNodeAndOut pred_node_and_out; + auto ret = GetPredNode(node, pred_node_and_out); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to run switch logic remove pass, no pred node found from switch %s", + node->GetName().c_str()); + return INTERNAL_ERROR; + } + + for (int i = 0; i < kSwitchOutputNum; ++i) { + auto out_anchor = node->GetOutDataAnchor(i); + if (out_anchor == nullptr) { + GELOGW("Unexpected switch node, the %d out anchor is null", i); + return SUCCESS; + } + for (auto &in_anchor : out_anchor->GetPeerInDataAnchors()) { + if (in_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, "The in-anchor from out anchor %d node %s is null", i, node->GetName().c_str()); + return INTERNAL_ERROR; + } + auto dst_node = in_anchor->GetOwnerNode(); + if (dst_node == nullptr) { + GELOGE(INTERNAL_ERROR, "The peer node from out anchor %d node %s is null", i, node->GetName().c_str()); + return INTERNAL_ERROR; + } + if (!IsSwitch(dst_node->GetType())) { + continue; + } + PredNodeAndOut pred_node_next_switch; + ret = GetPredNode(dst_node, pred_node_next_switch); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to run switch logic remove pass, no pred node found from switch %s", + dst_node->GetName().c_str()); + return INTERNAL_ERROR; + } + if (pred_node_and_out != pred_node_next_switch) { + continue; + } + GELOGI("The switch nodes cascaded %s and %s have the save pred node %s, the %s can be remove", + node->GetName().c_str(), dst_node->GetName().c_str(), + pred_node_and_out.first->GetName().c_str(), dst_node->GetName().c_str()); + ret = RemoveSwitchNodeLogically(i, dst_node); + if (ret != SUCCESS) { + return ret; + } + } + } + + return SUCCESS; +} + +Status SwitchLogicRemovePass::RemoveSwitchNodeLogically(int parent_index, NodePtr &switch_node) { + std::vector isolate_map({-1, -1}); + for (int i = 0; i < kSwitchOutputNum; ++i) { + if (i == parent_index) { + isolate_map[i] = 0; + continue; + } + GE_CHECK_NOTNULL(switch_node); + auto out_anchor = switch_node->GetOutDataAnchor(i); + if (out_anchor == nullptr) { + GELOGW("The switch removing %s does not has %d out anchor, ignore it", switch_node->GetName().c_str(), i); + continue; + } + + GELOGI("Remove inactivate branch %s(%d) from switch %s", + GetOutputNameFromIndex(i), i, switch_node->GetName().c_str()); + std::vector deleted_nodes; + std::vector end_nodes; + auto ret = PassUtils::RemoveInactiveBranchToMerge(out_anchor, deleted_nodes, end_nodes); + if (ret != SUCCESS) { + return ret; + } + + for (auto &node : deleted_nodes) { + GE_CHECK_NOTNULL(node); + GELOGD("Remove node %s from inactivate branch from switch %s", + node->GetName().c_str(), switch_node->GetName().c_str()); + AddNodeDeleted(node.get()); + } + for (auto &node : end_nodes) { + GE_CHECK_NOTNULL(node); + GELOGD("Add end node %s to re-pass list, for inactivate branch from switch %s", + node->GetName().c_str(), switch_node->GetName().c_str()); + AddRePassNode(node); + } + } + GELOGI("Remove switch node cascaded %s, replace out index %d", + switch_node->GetName().c_str(), parent_index); + return IsolateAndDeleteNode(switch_node, isolate_map); +} +} // namespace ge + diff --git a/src/ge/graph/passes/switch_logic_remove_pass.h b/src/ge/graph/passes/switch_logic_remove_pass.h new file mode 100644 index 00000000..80f4eae4 --- /dev/null +++ b/src/ge/graph/passes/switch_logic_remove_pass.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_SWITCH_LOGIC_REMOVE_PASS_H_ +#define GE_GRAPH_PASSES_SWITCH_LOGIC_REMOVE_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class SwitchLogicRemovePass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; + private: + Status RemoveSwitchNodeLogically(int parent_index, NodePtr &switch_node); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_SWITCH_LOGIC_REMOVE_PASS_H_ diff --git a/src/ge/graph/passes/switch_op_pass.cc b/src/ge/graph/passes/switch_op_pass.cc new file mode 100644 index 00000000..50b1cf92 --- /dev/null +++ b/src/ge/graph/passes/switch_op_pass.cc @@ -0,0 +1,1123 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/switch_op_pass.h" + +#include +#include +#include +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/types.h" +#include "graph/common/omg_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/type_utils.h" + +namespace ge { +Status SwitchOpPass::Run(ComputeGraphPtr graph) { + GELOGD("SwitchOpPass Enter"); + + GraphUtils::DumpGEGraph(graph, "BeforeSwitchOpPass"); + GraphUtils::DumpGEGraphToOnnx(*graph, "BeforeSwitchOpPass"); + + GE_CHK_STATUS_RET(CheckCycleDependence(graph), "CheckCycleDependence fail."); + + for (auto &switch_node : switch_nodes_) { + GE_CHK_STATUS_RET(ReplaceSwitchNode(graph, switch_node), "Add StreamSwitch node fail."); + } + + for (auto &merge_node : merge_nodes_) { + GE_CHK_STATUS_RET(ReplaceMergeNode(graph, merge_node), "Add StreamMerge node fail."); + } + + GE_CHK_STATUS_RET(CombineSwitchNode(graph), "Combine StreamSwitch nodes fail."); + + for (auto &node : bypass_nodes_) { + GE_CHK_BOOL_EXEC(graph->RemoveNode(node) == GRAPH_SUCCESS, return FAILED, "Remove switch node fail."); + } + + for (auto &node : stream_switch_nodes_) { + for (auto &out_ctrl_node : node->GetOutControlNodes()) { + GELOGD("branch_head_nodes_ insert %s", out_ctrl_node->GetName().c_str()); + (void)branch_head_nodes_.insert(out_ctrl_node); + } + } + + for (auto &node : need_label_nodes_) { + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + if (!op_desc->HasAttr(ATTR_NAME_STREAM_LABEL)) { + GE_CHK_STATUS_RET(UpdateCondBranch(node), "Set cond branch fail, start node:%s", node->GetName().c_str()); + } + } + + GE_CHK_STATUS_RET(UpdateEnterNode(), "UpdateEnterNode fail."); + + GraphUtils::DumpGEGraph(graph, "AfterSwitchOpPass"); + GraphUtils::DumpGEGraphToOnnx(*graph, "AfterSwitchOpPass"); + + GELOGD("SwitchOpPass Leave"); + return SUCCESS; +} + +/// +/// @brief Replace Switch Op +/// @param [in] graph +/// @param [in] switch_node +/// @return Status +/// +Status SwitchOpPass::ReplaceSwitchNode(ComputeGraphPtr &graph, NodePtr &switch_node) { + std::string type; + GE_CHK_STATUS_RET(GetOriginalType(switch_node, type), "Get node type fail."); + GE_CHK_BOOL_EXEC((type == SWITCH) || (type == REFSWITCH), return FAILED, "Type of input node is not switch."); + + OutDataAnchorPtr peer_data_anchor = nullptr; + OutDataAnchorPtr peer_cond_anchor = nullptr; + GE_CHK_BOOL_EXEC(BypassSwitchNode(switch_node, peer_data_anchor, peer_cond_anchor) == SUCCESS, return FAILED, + "Bypass switch node fail."); + GE_CHECK_NOTNULL(peer_data_anchor); + GE_CHECK_NOTNULL(peer_cond_anchor); + OpDescPtr cond_desc = peer_cond_anchor->GetOwnerNode()->GetOpDesc(); + GE_CHECK_NOTNULL(cond_desc); + DataType cond_data_type = cond_desc->GetOutputDesc(peer_cond_anchor->GetIdx()).GetDataType(); + GE_CHK_BOOL_EXEC(cond_data_type == DT_BOOL, return FAILED, + "SwitchNode not support datatype %s, datatype of cond_input should be bool", + TypeUtils::DataTypeToSerialString(cond_data_type).c_str()); + + OpDescPtr switch_desc = switch_node->GetOpDesc(); + GE_CHECK_NOTNULL(switch_desc); + bool cyclic_flag = switch_desc->HasAttr(ATTR_NAME_CYCLIC_DEPENDENCE_FLAG); + + std::set out_node_list; + for (OutDataAnchorPtr &out_data_anchor : switch_node->GetAllOutDataAnchors()) { + bool true_branch_flag = (static_cast(out_data_anchor->GetIdx()) == SWITCH_TRUE_OUTPUT); + NodePtr stream_switch = nullptr; + out_node_list.clear(); + for (auto &peer_in_anchor : out_data_anchor->GetPeerAnchors()) { + GE_IF_BOOL_EXEC(stream_switch == nullptr, { + std::string suffix = (true_branch_flag ? "_t" : "_f"); + stream_switch = CreateStreamSwitchNode(graph, switch_node, suffix, peer_cond_anchor); + GE_CHK_BOOL_EXEC(stream_switch != nullptr, return FAILED, "Create stream_switch node fail."); + if (SetSwitchTrueBranchFlag(stream_switch, true_branch_flag) != SUCCESS) { + GELOGE(FAILED, "SetSwitchTrueBranchFlag for node %s fail.", stream_switch->GetName().c_str()); + return FAILED; + } + if (MarkBranchs(peer_cond_anchor, stream_switch, true_branch_flag) != SUCCESS) { + GELOGE(FAILED, "MarkBranchs for stream_switch %s fail.", stream_switch->GetName().c_str()); + return FAILED; + } + + if (!cyclic_flag) { + GE_CHK_STATUS(GraphUtils::AddEdge(peer_data_anchor->GetOwnerNode()->GetOutControlAnchor(), + stream_switch->GetInControlAnchor()), + "StreamSwitch node add ctl edge fail."); + } + }); + + GE_CHK_STATUS(GraphUtils::RemoveEdge(out_data_anchor, peer_in_anchor), "Remove Switch data output fail."); + + NodePtr out_node = peer_in_anchor->GetOwnerNode(); + GE_CHK_STATUS_RET(GetOriginalType(out_node, type), "Get node type fail."); + if ((type == MERGE) || (type == REFMERGE)) { + NodePtr memcpy_node = CreateMemcpyAsyncNode(graph, peer_data_anchor); + GE_CHK_BOOL_EXEC(memcpy_node != nullptr, return FAILED, "Create memcpy_async node fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(peer_data_anchor, memcpy_node->GetInDataAnchor(0)), + "MemcpyAsync node add edge fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(memcpy_node->GetOutDataAnchor(0), peer_in_anchor), + "MemcpyAsync node add edge fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(stream_switch->GetOutControlAnchor(), memcpy_node->GetInControlAnchor()), + "MemcpyAsync node add ctl edge fail."); + out_node_list.insert(memcpy_node->GetName()); + } else { + GE_CHK_STATUS(GraphUtils::AddEdge(peer_data_anchor, peer_in_anchor), "StreamSwitch node add edge fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(stream_switch->GetOutControlAnchor(), out_node->GetInControlAnchor()), + "StreamSwitch node add ctl edge fail."); + out_node_list.insert(out_node->GetName()); + } + } + GE_IF_BOOL_EXEC(stream_switch != nullptr, { + CopyControlEdges(switch_node, stream_switch, true); + switch_node_map_[stream_switch] = out_node_list; + if (SetOriginalNodeName(stream_switch, switch_node->GetName()) != SUCCESS) { + GELOGE(FAILED, "SetOriginalNodeName for node %s fail.", stream_switch->GetName().c_str()); + return FAILED; + } + }); + } + + RemoveControlEdges(switch_node); + (void)bypass_nodes_.insert(switch_node); + + return SUCCESS; +} + +/// +/// @brief Replace Merge Op +/// @param [in] graph +/// @param [in] merge_node +/// @return Status +/// +Status SwitchOpPass::ReplaceMergeNode(ComputeGraphPtr &graph, NodePtr &merge_node) { + std::string type; + GE_CHK_STATUS_RET(GetOriginalType(merge_node, type), "Get node type fail."); + GE_CHK_BOOL_EXEC((type == MERGE) || (type == REFMERGE), return FAILED, "Type of input node is not merge."); + + OpDescPtr merge_op_desc = merge_node->GetOpDesc(); + GE_CHECK_NOTNULL(merge_op_desc); + + const std::string node_name = merge_node->GetName(); + GELOGI("Create StreamMerge Op, name=%s.", node_name.c_str()); + OpDescPtr op_desc = MakeShared(node_name, STREAMMERGE); + if (op_desc == nullptr) { + GELOGE(FAILED, "Create op_desc fail, StreamMerge:%s.", node_name.c_str()); + return FAILED; + } + + for (InDataAnchorPtr &in_anchor : merge_node->GetAllInDataAnchors()) { + GE_CHK_BOOL_EXEC(op_desc->AddInputDesc(merge_op_desc->GetInputDesc(in_anchor->GetIdx())) == GRAPH_SUCCESS, + return FAILED, "Create StreamMerge op: add input desc fail."); + } + + for (OutDataAnchorPtr &out_anchor : merge_node->GetAllOutDataAnchors()) { + GE_CHK_BOOL_EXEC(op_desc->AddOutputDesc(merge_op_desc->GetOutputDesc(out_anchor->GetIdx())) == GRAPH_SUCCESS, + return FAILED, "Create StreamMerge op: add output desc fail."); + } + + NodePtr stream_merge = graph->AddNode(op_desc); + GE_CHK_BOOL_EXEC(stream_merge != nullptr, return FAILED, "Insert StreamMerge node fail."); + + for (InDataAnchorPtr &in_data_anchor : merge_node->GetAllInDataAnchors()) { + OutDataAnchorPtr peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, continue); + + GE_CHK_STATUS(GraphUtils::RemoveEdge(peer_out_anchor, in_data_anchor), "Remove Merge data input fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(peer_out_anchor, stream_merge->GetInDataAnchor(in_data_anchor->GetIdx())), + "StreamMerge node add edge fail."); + } + + for (OutDataAnchorPtr &out_data_anchor : merge_node->GetAllOutDataAnchors()) { + for (InDataAnchorPtr &peer_in_anchor : out_data_anchor->GetPeerInDataAnchors()) { + GE_CHK_STATUS(GraphUtils::RemoveEdge(out_data_anchor, peer_in_anchor), "Remove Merge data output fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(stream_merge->GetOutDataAnchor(out_data_anchor->GetIdx()), peer_in_anchor), + "StreamMerge node add edge fail."); + } + } + + ReplaceControlEdges(merge_node, stream_merge); + + if (merge_op_desc->HasAttr(ATTR_NAME_NEXT_ITERATION)) { + std::string next_iteration_name; + GE_IF_BOOL_EXEC(!AttrUtils::GetStr(merge_op_desc, ATTR_NAME_NEXT_ITERATION, next_iteration_name), + GELOGE(INTERNAL_ERROR, "get ATTR_NAME_NEXT_ITERATION failed"); + return INTERNAL_ERROR); + + GE_CHK_STATUS_RET(SetNextIteration(stream_merge, next_iteration_name), "set next iteration failed"); + } else { + need_label_nodes_.emplace_back(stream_merge); + } + + if (merge_op_desc->HasAttr(ATTR_INSERT_BY_MBATCH)) { + if (!ge::AttrUtils::SetBool(op_desc, ATTR_INSERT_BY_MBATCH, true)) { + GELOGE(FAILED, "Set attr ATTR_INSERT_BY_MBATCH fail, StreamMerge:%s.", node_name.c_str()); + return FAILED; + } + } + + (void)bypass_nodes_.insert(merge_node); + + GE_CHK_STATUS_RET(AddMemcpyAsyncNodes(graph, stream_merge), "StreamMerge add memcpy node fail."); + + return SUCCESS; +} + +/// +/// @brief Create StreamSwitch Node +/// @param [in] graph +/// @param [in] switch_node +/// @param [in] suffix +/// @param [in] peer_cond_anchor +/// @return ge::NodePtr +/// +NodePtr SwitchOpPass::CreateStreamSwitchNode(ComputeGraphPtr &graph, const NodePtr &switch_node, + const std::string &suffix, OutDataAnchorPtr &peer_cond_anchor) { + GE_CHK_BOOL_EXEC(switch_node != nullptr, return nullptr, "Param of merge node is null."); + OpDescPtr switch_op_desc = switch_node->GetOpDesc(); + GE_CHK_BOOL_EXEC(switch_op_desc != nullptr, return nullptr, "OpDesc of Switch node is invalid."); + GE_IF_BOOL_EXEC(switch_op_desc->GetInputsSize() != SWITCH_INPUT_NUM, { + GELOGE(FAILED, "Switch input param invalid, input_size=%lu, should be %u", switch_op_desc->GetInputsSize(), + SWITCH_INPUT_NUM); + return nullptr; + }); + + const std::string node_name = switch_node->GetName() + "_" + STREAMSWITCH + suffix; + GELOGI("Create StreamSwitch, name=%s.", node_name.c_str()); + OpDescPtr op_desc = MakeShared(node_name, STREAMSWITCH); + if (op_desc == nullptr) { + GELOGE(FAILED, "Create op_desc fail, StreamSwitch:%s.", node_name.c_str()); + return nullptr; + } + + if (!AttrUtils::SetInt(op_desc, ATTR_NAME_SWITCH_DATA_TYPE, RT_SWITCH_INT32) || + !AttrUtils::SetInt(op_desc, ATTR_NAME_STREAM_SWITCH_COND, (int64_t)RT_EQUAL)) { + GELOGE(INTERNAL_ERROR, "set int failed"); + return nullptr; + } + + // Already checked, first input is Variable will passed, second is condition will checked. + GeTensorDesc cond_input_desc = switch_op_desc->GetInputDesc(SWITCH_PRED_INPUT); + GeTensorDesc input_desc(GeShape(cond_input_desc.GetShape().GetDims()), cond_input_desc.GetFormat(), DT_INT32); + GE_CHK_BOOL_EXEC(op_desc->AddInputDesc(input_desc) == GRAPH_SUCCESS, return nullptr, + "Create StreamSwitch node: add input desc fail."); + GE_CHK_BOOL_EXEC(op_desc->AddInputDesc(input_desc) == GRAPH_SUCCESS, return nullptr, + "Create StreamSwitch node: add input desc fail."); + + NodePtr stream_switch = graph->AddNode(op_desc); + GE_CHK_BOOL_EXEC(stream_switch != nullptr, return nullptr, "Insert StreamSwitch node fail."); + + GE_CHK_STATUS(GraphUtils::AddEdge(peer_cond_anchor, stream_switch->GetInDataAnchor(0)), + "StreamSwitch node add cond edge fail."); + + return stream_switch; +} + +/// +/// @brief Add MemcpyAsync Node +/// @param [in] graph +/// @param [in] in_node +/// @return ge::NodePtr +/// +NodePtr SwitchOpPass::CreateMemcpyAsyncNode(ComputeGraphPtr &graph, const OutDataAnchorPtr &out_data_anchor) { + GE_CHK_BOOL_EXEC(out_data_anchor != nullptr, return nullptr, "Param of input node is null."); + OpDescPtr pre_op_desc = out_data_anchor->GetOwnerNode()->GetOpDesc(); + GE_CHK_BOOL_EXEC(pre_op_desc != nullptr, return nullptr, "OpDesc of pre node is invalid."); + + std::string node_name = pre_op_desc->GetName() + "_" + MEMCPYASYNC; + node_name = CheckDuplicateName(node_name); + GELOGI("Create MemcpyAsync op:%s.", node_name.c_str()); + OpDescPtr op_desc = MakeShared(node_name, MEMCPYASYNC); + if (op_desc == nullptr) { + GELOGE(FAILED, "Create op_desc fail, MemcpyAsync:%s.", node_name.c_str()); + return nullptr; + } + + GE_CHK_BOOL_EXEC(op_desc->AddInputDesc(pre_op_desc->GetOutputDesc(out_data_anchor->GetIdx())) == GRAPH_SUCCESS, + return nullptr, "Create MemcpyAsync op: add input desc fail."); + GE_CHK_BOOL_EXEC(op_desc->AddOutputDesc(pre_op_desc->GetOutputDesc(out_data_anchor->GetIdx())) == GRAPH_SUCCESS, + return nullptr, "Create MemcpyAsync op: add output desc fail."); + + NodePtr memcpy_node = graph->AddNode(op_desc); + GE_CHK_BOOL_EXEC(memcpy_node != nullptr, return nullptr, "Insert MemcpyAsync node fail."); + + return memcpy_node; +} + +/// +/// @brief Combine switch nodes link to same cond +/// @param [in] graph +/// @return Status +/// +Status SwitchOpPass::CombineSwitchNode(ComputeGraphPtr &graph) { + for (auto iter = cond_node_map_.begin(); iter != cond_node_map_.end(); ++iter) { + OutDataAnchorPtr peer_cond_anchor = iter->first; + GE_CHECK_NOTNULL(peer_cond_anchor); + std::list false_switch_list = iter->second[SWITCH_FALSE_OUTPUT]; + std::list true_switch_list = iter->second[SWITCH_TRUE_OUTPUT]; + std::set same_cond_switch; + same_cond_switch.insert(false_switch_list.begin(), false_switch_list.end()); + same_cond_switch.insert(true_switch_list.begin(), true_switch_list.end()); + + NodePtr cond_node = peer_cond_anchor->GetOwnerNode(); + GELOGI("CombineSwitchNode: cond_node=%s", cond_node->GetName().c_str()); + + NodePtr cast_node = CreateCastOp(graph, peer_cond_anchor); + GE_CHK_BOOL_EXEC(cast_node != nullptr, return FAILED, "Create cast_node fail."); + + NodePtr active_node = CreateActiveNode(graph, cond_node); + GE_CHK_BOOL_EXEC(active_node != nullptr, return FAILED, "Create StreamActive node fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(cast_node->GetOutControlAnchor(), active_node->GetInControlAnchor()), + "StreamActive add ctl edge fail."); + if (SetActiveLabelList(active_node, {cast_node->GetName()}) != SUCCESS) { + GELOGE(FAILED, "SetActiveLabelList for node %s fail.", active_node->GetName().c_str()); + return FAILED; + } + + const std::string cond_group = cond_node->GetName(); + for (uint32_t i = 0; i < SWITCH_OUTPUT_NUM; ++i) { + bool true_branch_flag = (i == SWITCH_TRUE_OUTPUT); + std::list &switch_list = (true_branch_flag ? true_switch_list : false_switch_list); + GE_IF_BOOL_EXEC(switch_list.empty(), continue); + + // select first stream_switch + NodePtr stream_switch = switch_list.front(); + OpDescPtr switch_desc = stream_switch->GetOpDesc(); + GE_CHECK_NOTNULL(switch_desc); + switch_desc->SetName(cond_group + "/" + STREAMSWITCH + (true_branch_flag ? "_t" : "_f")); + stream_switch_nodes_.emplace_back(stream_switch); + need_label_nodes_.emplace_back(stream_switch); + + // 0_input: original pred input, 1_input: constant node + GE_CHK_STATUS_RET(AddConstNode(graph, stream_switch), "Add const node fail"); + GE_CHK_STATUS(GraphUtils::RemoveEdge(peer_cond_anchor, stream_switch->GetInDataAnchor(0)), + "StreamSwitch remove data edge fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(cast_node->GetOutDataAnchor(0), stream_switch->GetInDataAnchor(0)), + "Cast add data edge fail."); + + for (NodePtr &node : switch_list) { + GE_CHECK_NOTNULL(node); + GE_IF_BOOL_EXEC(node != stream_switch, { + GE_CHK_STATUS(GraphUtils::RemoveEdge(peer_cond_anchor, node->GetInDataAnchor(0)), + "StreamSwitch remove data edge fail."); + }); + GE_CHK_STATUS(ModifySwitchInCtlEdges(node, cast_node, same_cond_switch), "ModifySwitchInCtlEdges fail"); + GE_CHK_STATUS(ModifySwitchOutCtlEdges(node, stream_switch, active_node), "ModifySwitchOutCtlEdges fail"); + } + + GE_CHK_STATUS(GraphUtils::AddEdge(active_node->GetOutControlAnchor(), stream_switch->GetInControlAnchor()), + "StreamActive add ctl edge fail."); + } + } + return SUCCESS; +} + +/// +/// @brief Create Active Op +/// @param [in] graph +/// @param [in] cond_node +/// @return ge::NodePtr +/// +NodePtr SwitchOpPass::CreateActiveNode(ComputeGraphPtr &graph, NodePtr &node) { + GE_CHK_BOOL_EXEC(node != nullptr, return nullptr, "Param of pre cond_node is null."); + std::string node_name = node->GetName() + "_" + STREAMACTIVE; + node_name = CheckDuplicateName(node_name); + GELOGI("Create StreamActive op:%s.", node_name.c_str()); + OpDescPtr op_desc = MakeShared(node_name, STREAMACTIVE); + if (op_desc == nullptr) { + GELOGE(FAILED, "Create op_desc fail, StreamActive:%s.", node_name.c_str()); + return nullptr; + } + + NodePtr active_node = graph->AddNode(op_desc); + GE_CHK_BOOL_EXEC(active_node != nullptr, return nullptr, "Create StreamActive node fail."); + + GE_IF_BOOL_EXEC(GraphUtils::AddEdge(node->GetOutControlAnchor(), active_node->GetInControlAnchor()) != SUCCESS, + GELOGE(INTERNAL_ERROR, "add edge failed"); + return nullptr); + + GE_IF_BOOL_EXEC(SetSwitchBranchNodeLabel(active_node, node_name) != SUCCESS, + GELOGE(INTERNAL_ERROR, "set switch branch node label failed"); + return nullptr); + + return active_node; +} + +/// +/// @brief Add MemcpyAsync Op as StreamMerge in_node +/// @param [in] graph +/// @param [in] node +/// @return Status +/// +Status SwitchOpPass::AddMemcpyAsyncNodes(ComputeGraphPtr &graph, NodePtr &node) { + GE_CHK_BOOL_EXEC(node != nullptr, return FAILED, "Param of pre node is null."); + for (InDataAnchorPtr &in_data_anchor : node->GetAllInDataAnchors()) { + OutDataAnchorPtr peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, continue); + NodePtr in_node = peer_out_anchor->GetOwnerNode(); + + const std::string type = in_node->GetType(); + // For WhileLoop no need memcpy & active for merge. + GE_IF_BOOL_EXEC((type == ENTER) || (type == REFENTER) || (type == NEXTITERATION) || (type == REFNEXTITERATION), + continue); + + GE_IF_BOOL_EXEC(type != MEMCPYASYNC, { + in_node = CreateMemcpyAsyncNode(graph, peer_out_anchor); + GE_CHK_BOOL_EXEC(in_node != nullptr, return FAILED, "Create MemcpyAsync node fail."); + GE_CHK_STATUS(GraphUtils::RemoveEdge(peer_out_anchor, in_data_anchor), "MemcpyAsync node remove edge fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(peer_out_anchor, in_node->GetInDataAnchor(0)), + "MemcpyAsync node add edge fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(in_node->GetOutDataAnchor(0), in_data_anchor), + "MemcpyAsync node add edge fail."); + }); + + NodePtr active_node = CreateActiveNode(graph, in_node); + GE_CHK_BOOL_EXEC(active_node != nullptr, return FAILED, "Create StreamActive node fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(active_node->GetOutControlAnchor(), node->GetInControlAnchor()), + "StreamActive add ctl edge fail."); + if (SetActiveLabelList(active_node, {node->GetName()}) != SUCCESS) { + GELOGE(FAILED, "SetActiveLabelList for node %s fail.", active_node->GetName().c_str()); + return FAILED; + } + } + + return SUCCESS; +} + +/// +/// @brief Bypass Switch Node +/// @param [in] switch_node +/// @param [out] peer_data_anchor +/// @param [out] peer_cond_anchor +/// @return Status +/// +Status SwitchOpPass::BypassSwitchNode(NodePtr &switch_node, OutDataAnchorPtr &peer_data_anchor, + OutDataAnchorPtr &peer_cond_anchor) { + GE_CHK_BOOL_EXEC(switch_node != nullptr, return FAILED, "Switch_node is null."); + for (uint32_t idx = 0; idx < SWITCH_INPUT_NUM; ++idx) { + InDataAnchorPtr in_data_anchor = switch_node->GetInDataAnchor(idx); + GE_CHK_BOOL_EXEC(in_data_anchor != nullptr, return FAILED, "Check Switch input anchor fail."); + OutDataAnchorPtr peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_CHK_BOOL_EXEC(peer_out_anchor != nullptr, return FAILED, "Check Pre node output anchor fail."); + // Remove Switch data input. + GE_CHK_STATUS_RET(GraphUtils::RemoveEdge(peer_out_anchor, in_data_anchor), "remove edge failed"); + + if (idx == SWITCH_DATA_INPUT) { + peer_data_anchor = peer_out_anchor; + } else { + if (FindSwitchCondInput(false, peer_out_anchor) != SUCCESS) { + GELOGE(FAILED, "FindSwitchCondInput fail, switch=%s", switch_node->GetName().c_str()); + return FAILED; + } + peer_cond_anchor = peer_out_anchor; + } + } + + return SUCCESS; +} + +/// +/// @brief Find Switch cond input +/// @param [in] pass_switch_flag +/// @param [out] peer_cond_anchor +/// @return Status +/// +Status SwitchOpPass::FindSwitchCondInput(bool pass_switch_flag, OutDataAnchorPtr &peer_cond_anchor) { + NodePtr tmp_node = nullptr; + string type; + bool need_pass_type = true; + while (need_pass_type) { + if (tmp_node == nullptr) { + GE_CHECK_NOTNULL(peer_cond_anchor); + tmp_node = peer_cond_anchor->GetOwnerNode(); + } else { + InDataAnchorPtr in_data_anchor = tmp_node->GetInDataAnchor(SWITCH_DATA_INPUT); + GE_CHECK_NOTNULL(in_data_anchor); + peer_cond_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_CHECK_NOTNULL(peer_cond_anchor); + tmp_node = peer_cond_anchor->GetOwnerNode(); + } + + GE_CHK_STATUS_RET(GetOriginalType(tmp_node, type), "Get node type fail"); + need_pass_type = (pass_switch_flag && ((type == SWITCH) || (type == REFSWITCH))); + } + + return SUCCESS; +} + +/// +/// @brief Mark Switch Branch +/// @param [in] peer_cond_anchor +/// @param [in] stream_switch +/// @param [in] true_branch_flag +/// @return Status +/// +Status SwitchOpPass::MarkBranchs(OutDataAnchorPtr &peer_cond_anchor, NodePtr &stream_switch, bool true_branch_flag) { + uint32_t index = true_branch_flag ? SWITCH_TRUE_OUTPUT : SWITCH_FALSE_OUTPUT; + GE_CHECK_NOTNULL(stream_switch); + auto it = cond_node_map_.find(peer_cond_anchor); + if (it != cond_node_map_.end()) { + GE_IF_BOOL_EXEC(it->second.size() != SWITCH_OUTPUT_NUM, { + GELOGE(INTERNAL_ERROR, "cond_node_map_ check size fail, node: %s", stream_switch->GetName().c_str()); + return FAILED; + }); + it->second[index].emplace_back(stream_switch); + } else { + std::list false_node_list; + std::list true_node_list; + std::list &node_list = true_branch_flag ? true_node_list : false_node_list; + node_list.emplace_back(stream_switch); + std::vector> switch_list; + switch_list.emplace_back(false_node_list); + switch_list.emplace_back(true_node_list); + auto result = cond_node_map_.insert( + std::pair>>(peer_cond_anchor, switch_list)); + GE_IF_BOOL_EXEC(!result.second, { + GELOGE(INTERNAL_ERROR, "cond_node_map_ insert fail, node: %s", stream_switch->GetName().c_str()); + return FAILED; + }); + } + return SUCCESS; +} + +/// +/// @brief Create cast node +/// @param [in] graph +/// @param [in] peer_cond_anchor +/// @return NodePtr +/// +NodePtr SwitchOpPass::CreateCastOp(ComputeGraphPtr &graph, OutDataAnchorPtr &peer_cond_anchor) { + GE_CHK_BOOL_EXEC(peer_cond_anchor != nullptr, return nullptr, "Param of pre cond_node is null."); + OpDescPtr cond_desc = peer_cond_anchor->GetOwnerNode()->GetOpDesc(); + GE_CHK_BOOL_EXEC(cond_desc != nullptr, return nullptr, "Get cond_desc fail."); + + const std::string cast_name = cond_desc->GetName() + "_" + CAST; + GELOGI("Create cast_node: %s, input datatype:DT_BOOL, out datatype:DT_INT32", cast_name.c_str()); + OpDescPtr cast_desc = MakeShared(cast_name, CAST); + if (cast_desc == nullptr) { + GELOGE(FAILED, "Create op_desc fail, Cast:%s.", cast_name.c_str()); + return nullptr; + } + if (!(AttrUtils::SetInt(cast_desc, CAST_ATTR_SRCT, (int64_t)DT_BOOL) && + AttrUtils::SetInt(cast_desc, CAST_ATTR_DSTT, (int64_t)DT_INT32) && + AttrUtils::SetInt(cast_desc, CAST_ATTR_DST_TYPE, (int64_t)DT_INT32) && + AttrUtils::SetBool(cast_desc, CAST_ATTR_TRUNCATE, false))) { + GELOGE(FAILED, "Set CAST_ATTR_SRCT or CAST_ATTR_DSTT or CAST_ATTR_DST_TYPE or CAST_ATTR_TRUNCATE fail, node: %s.", + cast_name.c_str()); + return nullptr; + } + GeTensorDesc tensor_desc = cond_desc->GetOutputDesc(peer_cond_anchor->GetIdx()); + tensor_desc.SetDataType(DT_BOOL); + GE_CHK_BOOL_EXEC(cast_desc->AddInputDesc(tensor_desc) == SUCCESS, return nullptr, "Cast_node add input desc fail."); + tensor_desc.SetDataType(DT_INT32); + GE_CHK_BOOL_EXEC(cast_desc->AddOutputDesc(tensor_desc) == SUCCESS, return nullptr, "Cast_node add output desc fail."); + + NodePtr cast_node = graph->AddNode(cast_desc); + GE_CHK_BOOL_EXEC(cast_node != nullptr, return nullptr, "Create cast_node fail."); + + GE_CHK_STATUS(GraphUtils::AddEdge(peer_cond_anchor, cast_node->GetInDataAnchor(0)), "Cast add data edge fail."); + + return cast_node; +} + +/// +/// @brief Add const node as switch input1 +/// @param [in] graph +/// @param [in] stream_switch +/// @return Status +/// +Status SwitchOpPass::AddConstNode(ComputeGraphPtr &graph, NodePtr &stream_switch) { + GE_CHK_BOOL_EXEC(stream_switch != nullptr, return FAILED, "stream_switch is null."); + OpDescPtr op_desc = stream_switch->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + bool value = false; + GE_CHK_BOOL_EXEC(AttrUtils::GetBool(op_desc, ATTR_NAME_SWITCH_TRUE_BRANCH_FLAG, value), return FAILED, + "StreamSwitch get attr TRUE_BRANCH_STREAM fail."); + + const std::string const_node_name = op_desc->GetName() + "_Constant_" + (value ? "t" : "f"); + GELOGI("Create const op: %s", const_node_name.c_str()); + OpDescPtr const_op_desc = MakeShared(const_node_name, CONSTANT); + if (const_op_desc == nullptr) { + GELOGE(FAILED, "Create op_desc fail, Constant:%s.", const_node_name.c_str()); + return FAILED; + } + + auto resize_value = (int32_t)value; + GeTensorDesc data_desc = op_desc->GetInputDesc(1); + GeTensorPtr const_value = + MakeShared(data_desc, reinterpret_cast(&resize_value), sizeof(int32_t)); + if (const_value == nullptr) { + GELOGE(FAILED, "Create tensor fail."); + return FAILED; + } + GE_CHK_BOOL_EXEC(AttrUtils::SetTensor(const_op_desc, ATTR_NAME_WEIGHTS, const_value), return FAILED); + GE_CHK_BOOL_EXEC(const_op_desc->AddOutputDesc(data_desc) == GRAPH_SUCCESS, return FAILED, + "Create Const op: add output desc fail."); + + NodePtr const_node = graph->AddNode(const_op_desc); + GE_CHK_BOOL_EXEC(const_node != nullptr, return FAILED, "Insert Const node fail."); + GE_CHK_STATUS(GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), stream_switch->GetInDataAnchor(1)), + "StreamSwitch node add ctl edge fail."); + + return SUCCESS; +} + +/// +/// @brief update cond branch +/// @param [in] node +/// @return Status +/// +Status SwitchOpPass::UpdateCondBranch(NodePtr &node) { + std::string stream_label; + std::unordered_set branch_nodes; + std::unordered_set handled_set; + std::stack nodes; + nodes.push(node); + + static const std::set end_type_set = {STREAMSWITCH, STREAMMERGE}; + bool merge_flag = false; + bool exit_flag = false; + bool net_output_flag = false; + + while (!nodes.empty()) { + NodePtr cur_node = nodes.top(); + nodes.pop(); + if (handled_set.count(cur_node) > 0) { + continue; + } + GE_CHECK_NOTNULL(cur_node); + if (UpdateAttachFlag(cur_node, stream_label, merge_flag, exit_flag, net_output_flag) != SUCCESS) { + GELOGE(FAILED, "UpdateAttachFlag fail, cur_node: %s.", cur_node->GetName().c_str()); + return FAILED; + } + + const std::string type = cur_node->GetType(); + for (auto &out_node : cur_node->GetOutAllNodes()) { + const std::string out_type = out_node->GetType(); + bool stop_flag = (end_type_set.count(out_type) > 0) || + ((type != STREAMSWITCH) && (branch_head_nodes_.count(out_node) > 0)) || + (((type == ENTER) || (type == REFENTER)) && (out_type != STREAMACTIVE)); + if (!stop_flag) { + nodes.push(out_node); + GELOGD("branch_nodes insert %s", out_node->GetName().c_str()); + branch_nodes.insert(out_node); + } + } + handled_set.insert(cur_node); + } + + if (node->GetType() == STREAMSWITCH) { + GE_CHK_STATUS_RET(SetActiveLabelList(node, {stream_label}), "set active_label_list failed"); + } + + bool attach_flag = (merge_flag || exit_flag) && net_output_flag; + if (attach_flag) { + GELOGI("No need to keep on attaching label."); + return SUCCESS; + } + + for (NodePtr tmp_node : branch_nodes) { + GELOGD("Attach label %s to node: %s", stream_label.c_str(), tmp_node->GetName().c_str()); + GE_CHK_STATUS_RET(SetStreamLabel(tmp_node, stream_label), "set stream label failed"); + } + + return SUCCESS; +} + +/// +/// @brief update attach flag +/// @param [in] node +/// @param [out] stream_label +/// @param [out] merge_flag +/// @param [out] exit_flag +/// @param [out] net_output_flag +/// @return Status +/// +Status SwitchOpPass::UpdateAttachFlag(const NodePtr &node, std::string &stream_label, bool &merge_flag, bool &exit_flag, + bool &net_output_flag) { + const std::string type = node->GetType(); + if (type == STREAMSWITCH) { + if (node->GetInDataNodes().empty()) { + GELOGE(INTERNAL_ERROR, "cur_node %s has no input_data_node", node->GetName().c_str()); + return INTERNAL_ERROR; + } + stream_label = node->GetInDataNodes().at(0)->GetName(); + GE_CHK_STATUS_RET(SetStreamLabel(node, stream_label), "set stream label failed"); + bool value = false; + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + GE_CHK_BOOL_EXEC(AttrUtils::GetBool(op_desc, ATTR_NAME_SWITCH_TRUE_BRANCH_FLAG, value), return FAILED, + "StreamSwitch get attr TRUE_BRANCH_STREAM fail."); + stream_label += (value ? "_t" : "_f"); + } else if (type == STREAMMERGE) { + stream_label = node->GetName(); + GE_CHK_STATUS_RET(SetStreamLabel(node, stream_label), "set stream label failed"); + merge_flag = true; + } else if ((type == EXIT) || (type == REFEXIT)) { + GE_CHK_STATUS_RET(SetStreamLabel(node, stream_label), "set stream label failed"); + exit_flag = true; + } else if (type == NETOUTPUT) { + net_output_flag = true; + } + + return SUCCESS; +} + +/// +/// @brief update loop branch +/// @param [in] enter_nodes +/// @param [in] stream_label +/// @return Status +/// +Status SwitchOpPass::UpdateLoopBranch(const std::stack &enter_nodes, const std::string &stream_label) { + std::stack nodes(enter_nodes); + NodePtr cur_node = nullptr; + while (!nodes.empty()) { + cur_node = nodes.top(); + nodes.pop(); + for (NodePtr &out_node : cur_node->GetOutAllNodes()) { + OpDescPtr out_desc = out_node->GetOpDesc(); + GE_CHECK_NOTNULL(out_desc); + if (out_desc->HasAttr(ATTR_NAME_STREAM_LABEL)) { + continue; + } + GELOGD("Attach label %s to node: %s", stream_label.c_str(), out_node->GetName().c_str()); + GE_CHK_STATUS_RET(SetStreamLabel(out_node, stream_label), "set stream label failed"); + nodes.push(out_node); + } + } + + return SUCCESS; +} + +/// +/// @brief update enter nodes +/// @return Status +/// +Status SwitchOpPass::UpdateEnterNode() { + std::unordered_map> enter_active_map; + for (auto &enter_node : enter_nodes_) { + for (auto &out_ctrl_node : enter_node->GetOutControlNodes()) { + if (out_ctrl_node->GetType() != STREAMACTIVE) { + continue; + } + auto iter = enter_active_map.find(out_ctrl_node); + if (iter == enter_active_map.end()) { + enter_active_map[out_ctrl_node] = {enter_node}; + } else { + iter->second.emplace_back(enter_node); + } + } + } + + for (auto &pair : enter_active_map) { + std::string stream_label; + NodePtr active_node = pair.first; + GE_CHECK_NOTNULL(active_node); + OpDescPtr active_desc = active_node->GetOpDesc(); + GE_CHECK_NOTNULL(active_desc); + (void)AttrUtils::GetStr(active_desc, ATTR_NAME_STREAM_LABEL, stream_label); + if (stream_label.empty()) { + stream_label = active_desc->GetName(); + GE_CHK_STATUS_RET(SetStreamLabel(active_node, stream_label), "set stream label failed"); + } + std::stack enter_nodes; + for (auto &enter_node : pair.second) { + GE_CHK_STATUS_RET(SetStreamLabel(enter_node, stream_label), "set stream label failed"); + enter_nodes.emplace(enter_node); + } + + std::vector active_label_list; + if (!AttrUtils::GetListStr(active_desc, ATTR_NAME_ACTIVE_LABEL_LIST, active_label_list) || + (active_label_list.size() != 1) || active_label_list[0].empty()) { + GELOGE(INTERNAL_ERROR, "Get attr ATTR_NAME_ACTIVE_LABEL_LIST fail, node: %s", active_desc->GetName().c_str()); + return INTERNAL_ERROR; + } + if (UpdateLoopBranch(enter_nodes, active_label_list[0]) != SUCCESS) { + GELOGE(FAILED, "UpdateLoopBranch fail."); + return FAILED; + } + } + + return SUCCESS; +} + +/// +/// @brief Check duplicate node_name +/// @param [in] node_name +/// @return std::string +/// +std::string SwitchOpPass::CheckDuplicateName(const std::string &node_name) { + std::string tmp_name = node_name; + auto iter = node_num_map_.find(tmp_name); + if (iter != node_num_map_.end()) { + tmp_name = tmp_name + "_" + std::to_string(iter->second); + (iter->second)++; + } else { + node_num_map_[tmp_name] = 1; + } + return tmp_name; +} + +/// +/// @brief Check cyclic dependence +/// @param [in] graph +/// @return Status +/// +Status SwitchOpPass::CheckCycleDependence(ComputeGraphPtr &graph) { + std::string type; + std::unordered_map> cond_switch_map; + for (NodePtr &node : graph->GetDirectNode()) { + GE_CHK_STATUS_RET(GetOriginalType(node, type), "Get node type fail"); + if ((type == SWITCH) || (type == REFSWITCH)) { + InDataAnchorPtr in_cond_anchor = node->GetInDataAnchor(SWITCH_PRED_INPUT); + GE_CHK_BOOL_EXEC(in_cond_anchor != nullptr, return INTERNAL_ERROR, "Check Switch in_cond_anchor fail."); + OutDataAnchorPtr peer_out_anchor = in_cond_anchor->GetPeerOutAnchor(); + GE_CHK_BOOL_EXEC(peer_out_anchor != nullptr, return INTERNAL_ERROR, "Check Switch peer_out_anchor fail."); + if (FindSwitchCondInput(true, peer_out_anchor) != SUCCESS) { + GELOGE(FAILED, "FindSwitchCondInput fail, switch=%s", node->GetName().c_str()); + return FAILED; + } + + NodePtr cond_node = peer_out_anchor->GetOwnerNode(); + auto iter = cond_switch_map.find(cond_node); + if (iter == cond_switch_map.end()) { + cond_switch_map[cond_node] = {node}; + } else { + iter->second.emplace_back(node); + } + + switch_nodes_.emplace_back(node); + } else if ((type == MERGE) || (type == REFMERGE)) { + merge_nodes_.emplace_back(node); + } else if ((type == ENTER) || (type == REFENTER)) { + enter_nodes_.emplace_back(node); + } + } + + MarkCycleDependence(cond_switch_map); + + return SUCCESS; +} + +/// +/// @brief Mark cyclic dependence +/// @param [in] graph +/// @param [in] cond_switch_map +/// @return void +/// +void SwitchOpPass::MarkCycleDependence(const std::unordered_map> &cond_switch_map) { + std::stack out_nodes; + NodePtr tmp_node = nullptr; + std::unordered_set handled_set; + for (auto &iter : cond_switch_map) { + std::set switch_nodes(iter.second.begin(), iter.second.end()); + for (auto &switch_node : switch_nodes) { + GE_CHECK_NOTNULL_JUST_RETURN(switch_node); + GELOGD("CheckCycleDependence: cond_node=%s, switch=%s", iter.first->GetName().c_str(), + switch_node->GetName().c_str()); + for (const NodePtr &node : switch_node->GetOutAllNodes()) { + out_nodes.push(node); + } + } + handled_set.clear(); + while (!out_nodes.empty()) { + tmp_node = out_nodes.top(); + GE_CHECK_NOTNULL_JUST_RETURN(tmp_node); + out_nodes.pop(); + if (handled_set.count(tmp_node) > 0) { + continue; + } + GELOGD("CheckCycleDependence: tmp_node=%s", tmp_node->GetName().c_str()); + for (NodePtr &out_node : tmp_node->GetOutAllNodes()) { + if (switch_nodes.find(out_node) == switch_nodes.end()) { + out_nodes.push(out_node); + continue; + } + GE_IF_BOOL_EXEC(SetCyclicDependenceFlag(out_node) != SUCCESS, GELOGW("set cyclic dependence failed"); return); + auto map_iter = switch_cyclic_map_.find(out_node); + if (map_iter == switch_cyclic_map_.end()) { + switch_cyclic_map_[out_node] = {tmp_node->GetName()}; + } else { + map_iter->second.insert(tmp_node->GetName()); + } + } + handled_set.insert(tmp_node); + } + } + + return; +} + +/// +/// @brief Modify in ctl edge for switch_node +/// @param [in] switch_node +/// @param [in] cast_node +/// @param [in] same_cond_switch +/// @return Status +/// +Status SwitchOpPass::ModifySwitchInCtlEdges(NodePtr &switch_node, NodePtr &cast_node, + const std::set &same_cond_switch) { + GE_CHECK_NOTNULL(switch_node); + GE_CHECK_NOTNULL(cast_node); + GELOGI("ModifySwitchInCtlEdges: switch_node=%s, active_node=%s", switch_node->GetName().c_str(), + cast_node->GetName().c_str()); + + std::string orig_switch_name = switch_node->GetName(); + OpDescPtr switch_desc = switch_node->GetOpDesc(); + GE_CHECK_NOTNULL(switch_desc); + if (!AttrUtils::GetStr(switch_desc, ATTR_NAME_ORIG_NODE_NAME, orig_switch_name) || orig_switch_name.empty()) { + GELOGE(INTERNAL_ERROR, "Get attr ATTR_NAME_ORIG_NODE_NAME fail, node: %s", switch_desc->GetName().c_str()); + return INTERNAL_ERROR; + } + + for (NodePtr &in_ctl_node : switch_node->GetInControlNodes()) { + GE_CHK_STATUS(GraphUtils::RemoveEdge(in_ctl_node->GetOutControlAnchor(), switch_node->GetInControlAnchor()), + "Remove ctl edge fail."); + GE_IF_BOOL_EXEC(!in_ctl_node->GetOutControlAnchor()->IsLinkedWith(cast_node->GetInControlAnchor()), { + GE_CHK_STATUS(GraphUtils::AddEdge(in_ctl_node->GetOutControlAnchor(), cast_node->GetInControlAnchor()), + "Add ctl edge fail."); + }); + + GE_IF_BOOL_EXEC(in_ctl_node->GetType() != STREAMSWITCH, continue); + if (same_cond_switch.count(in_ctl_node) > 0) { + GE_CHK_STATUS(GraphUtils::RemoveEdge(in_ctl_node->GetOutControlAnchor(), cast_node->GetInControlAnchor()), + "Remove ctl edge fail."); + continue; + } + auto find_res1 = switch_node_map_.find(in_ctl_node); + GE_IF_BOOL_EXEC(find_res1 == switch_node_map_.end(), { + GELOGE(INTERNAL_ERROR, "StreamSwitch node %s not found in switch_node_map_.", in_ctl_node->GetName().c_str()); + return INTERNAL_ERROR; + }); + auto find_res2 = find_res1->second.find(orig_switch_name); + auto find_res3 = find_res1->second.find(cast_node->GetName()); + GE_IF_BOOL_EXEC((find_res2 != find_res1->second.end()) && (find_res3 == find_res1->second.end()), { + find_res1->second.erase(find_res2); + find_res1->second.insert(cast_node->GetName()); + continue; + }); + } + + return SUCCESS; +} + +/// +/// @brief Modify out ctl edge for switch_node +/// @param [in] switch_node +/// @param [in] stream_switch +/// @param [in] active_node +/// @return Status +/// +Status SwitchOpPass::ModifySwitchOutCtlEdges(NodePtr &switch_node, NodePtr &stream_switch, NodePtr &active_node) { + GE_CHECK_NOTNULL(switch_node); + GE_CHECK_NOTNULL(stream_switch); + GE_CHECK_NOTNULL(active_node); + GELOGI("ModifySwitchOutCtlEdges: switch_node=%s, stream_switch=%s, active_node=%s", switch_node->GetName().c_str(), + stream_switch->GetName().c_str(), active_node->GetName().c_str()); + auto find_res = switch_node_map_.find(switch_node); + GE_IF_BOOL_EXEC(find_res == switch_node_map_.end(), { + GELOGE(INTERNAL_ERROR, "StreamSwitch node %s not found in switch_node_map_.", switch_node->GetName().c_str()); + return INTERNAL_ERROR; + }); + GE_IF_BOOL_EXEC(find_res->second.empty(), { + GELOGE(INTERNAL_ERROR, "true_nodes of StreamSwitch node %s is empty.", switch_node->GetName().c_str()); + return INTERNAL_ERROR; + }); + + for (NodePtr &node : switch_node->GetOutControlNodes()) { + GE_CHK_STATUS(GraphUtils::RemoveEdge(switch_node->GetOutControlAnchor(), node->GetInControlAnchor()), + "Remove ctl edge fail."); + OpDescPtr op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + std::string orig_name = op_desc->GetName(); + GE_IF_BOOL_EXEC(op_desc->HasAttr(ATTR_NAME_ORIG_NODE_NAME), { + if (!AttrUtils::GetStr(op_desc, ATTR_NAME_ORIG_NODE_NAME, orig_name) || orig_name.empty()) { + GELOGE(INTERNAL_ERROR, "Get attr ATTR_NAME_ORIG_NODE_NAME fail, node: %s.", op_desc->GetName().c_str()); + return INTERNAL_ERROR; + } + }); + if (find_res->second.find(orig_name) == find_res->second.end()) { + auto active_out_control_anchor = active_node->GetOutControlAnchor(); + GE_CHECK_NOTNULL(active_out_control_anchor); + GE_IF_BOOL_EXEC(!active_out_control_anchor->IsLinkedWith(node->GetInControlAnchor()), { + GE_CHK_STATUS(GraphUtils::AddEdge(active_out_control_anchor, node->GetInControlAnchor()), "Add ctl edge fail."); + }); + } else { + auto stream_switch_out_control_anchor = stream_switch->GetOutControlAnchor(); + GE_CHECK_NOTNULL(stream_switch_out_control_anchor); + GE_IF_BOOL_EXEC(!stream_switch_out_control_anchor->IsLinkedWith(node->GetInControlAnchor()), { + GE_CHK_STATUS(GraphUtils::AddEdge(stream_switch_out_control_anchor, node->GetInControlAnchor()), + "Add ctl edge fail."); + }); + } + } + + GE_IF_BOOL_EXEC(switch_node != stream_switch, (void)bypass_nodes_.insert(switch_node)); + + return SUCCESS; +} + +/// +/// @brief Copy Control Edges +/// @param [in] old_node +/// @param [in] new_node +/// @param [in] input_check_flag +/// @return void +/// +void SwitchOpPass::CopyControlEdges(NodePtr &old_node, NodePtr &new_node, bool input_check_flag) { + GE_CHECK_NOTNULL_JUST_RETURN(old_node); + GE_CHECK_NOTNULL_JUST_RETURN(new_node); + GE_IF_BOOL_EXEC(old_node == new_node, return ); + auto iter = switch_cyclic_map_.find(old_node); + bool check_flag = input_check_flag && (iter != switch_cyclic_map_.end()); + for (NodePtr &node : old_node->GetInControlNodes()) { + if (check_flag && (iter->second.count(node->GetName()) > 0)) { + for (auto &out_node : old_node->GetOutAllNodes()) { + auto out_control_anchor = node->GetOutControlAnchor(); + GE_CHECK_NOTNULL_JUST_RETURN(out_control_anchor); + GE_IF_BOOL_EXEC(!out_control_anchor->IsLinkedWith(out_node->GetInControlAnchor()), { + GE_CHK_STATUS(GraphUtils::AddEdge(out_control_anchor, out_node->GetInControlAnchor()), "Add ctl edge fail."); + }); + } + } else { + auto out_control_anchor = node->GetOutControlAnchor(); + GE_CHECK_NOTNULL_JUST_RETURN(out_control_anchor); + GE_IF_BOOL_EXEC(!out_control_anchor->IsLinkedWith(new_node->GetInControlAnchor()), { + GE_CHK_STATUS(GraphUtils::AddEdge(out_control_anchor, new_node->GetInControlAnchor()), "Add in ctl edge fail."); + }); + } + } + + for (NodePtr &node : old_node->GetOutControlNodes()) { + GE_IF_BOOL_EXEC(!new_node->GetOutControlAnchor()->IsLinkedWith(node->GetInControlAnchor()), { + GE_CHK_STATUS(GraphUtils::AddEdge(new_node->GetOutControlAnchor(), node->GetInControlAnchor()), + "Add out ctl edge fail."); + }); + } +} + +/// +/// @brief Remove Control Edges +/// @param [in] node +/// @return void +/// +void SwitchOpPass::RemoveControlEdges(NodePtr &node) { + GE_CHECK_NOTNULL_JUST_RETURN(node); + for (NodePtr &in_node : node->GetInControlNodes()) { + GE_CHK_STATUS(GraphUtils::RemoveEdge(in_node->GetOutControlAnchor(), node->GetInControlAnchor()), + "Remove in ctl edge fail."); + } + + for (auto &out_data_anchor : node->GetAllOutDataAnchors()) { + for (auto &in_ctrl_anchor : out_data_anchor->GetPeerInControlAnchors()) { + GE_CHK_STATUS(GraphUtils::RemoveEdge(out_data_anchor, in_ctrl_anchor), "Remove in ctl edge fail."); + } + } + + auto out_control_anchor = node->GetOutControlAnchor(); + GE_CHECK_NOTNULL_JUST_RETURN(out_control_anchor); + for (auto &peer_anchor : out_control_anchor->GetPeerAnchors()) { + GE_CHK_STATUS(GraphUtils::RemoveEdge(out_control_anchor, peer_anchor), "Remove out ctl edge fail."); + } +} + +/// +/// @brief Replace Control Edges +/// @param [in] old_node +/// @param [in] new_node +/// @return void +/// +void SwitchOpPass::ReplaceControlEdges(NodePtr &old_node, NodePtr &new_node) { + GE_IF_BOOL_EXEC(old_node == new_node, return); + CopyControlEdges(old_node, new_node); + RemoveControlEdges(old_node); +} +} // namespace ge diff --git a/src/ge/graph/passes/switch_op_pass.h b/src/ge/graph/passes/switch_op_pass.h new file mode 100644 index 00000000..2bb1adf0 --- /dev/null +++ b/src/ge/graph/passes/switch_op_pass.h @@ -0,0 +1,164 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_SWITCH_OP_PASS_H_ +#define GE_GRAPH_PASSES_SWITCH_OP_PASS_H_ + +#include +#include +#include +#include +#include +#include + +#include "inc/graph_pass.h" + +namespace ge { +/* Variable Initialize Flow, take as FrameworkOp + +-----------+ + | Merge | + +-----------+ + / \ + 0/ \x + / \ + +-----------+ +-----------+ + | Switch | | Switch | + +-----------+ +-----------+ + | |F T| | + 0| | | x| + | | | | + | +-----------------------+ | + | | IsVariableInitialized | | + | +-----------------------+ | + | | | + | | | + | | | + +-----------+ +-----------+ + | Const | | VariableV2| + +-----------+ +-----------+ +*/ + +/* Switch branch op optimize, Switches in same case merge to one StreamSwitch, update following nodes' input + + +-----------+ + / | task2 | \ + T/ +-----------+ \ + +-----------+ +-----------+ / \ +-----------+ +-----------+ + | task1 | --> | Switch | | task4 | --> | noop | + +-----------+ +-----------+ \ / +-----------+ +-----------+ + F\ +-----------+ / + \ | task3 | / + +-----------+ + + cond(x < y, lambda: add(x, z), lambda: square(y)) + + +-----------+ +-----------+ + | Merge | +------------|StreamMerge|----------+ + +-----------+ | +-----------+ | + / \ | | | + / \ |c | |c + / \ +----------+ ----------- +----------+ + +-----------+ +-----------+ | Active_f | / \ | Active_t | + | Square | | Add | +----------+ / \ +----------+ + +-----------+ +-----------+ \ / \ / + / / \ \c / \ /c + y/ x/ \z +-----------+ +-----------+ + / / \ | Square | | Add | + +-----------+ +-----------+ +-----------+ +-----------+ +-----------+ + | Switch | | Switch | | Switch | ====> / | / | \ + +-----------+ +-----------+ +-----------+ / | / | \ + y| |F T| |x T| |z +--------+ | +--------+ | +--------+ + | | | | | | | y/read | | | x/read | | | z/read | + | +-----------+ | | | +--------+ | +--------+ | +--------+ + | | Less |-------------------+ | |c |c + | +-----------+ | | +----------------+ +----------------+ + | | | | StreamSwitch_f | | StreamSwitch_t | + | | | +----------------+ +----------------+ + +-----------+ +-----------+ +-----------+ | | + | y/read | | x/read | | z/read | | +-----------+ | + +-----------+ +-----------+ +-----------+ +-----| Less |----+ + +-----------+ +*/ +class SwitchOpPass : public GraphPass { + public: + Status Run(ComputeGraphPtr graph); + + private: + Status ReplaceSwitchNode(ComputeGraphPtr &graph, NodePtr &switch_node); + + Status ReplaceMergeNode(ComputeGraphPtr &graph, NodePtr &merge_node); + + NodePtr CreateStreamSwitchNode(ComputeGraphPtr &graph, const NodePtr &switch_node, const std::string &suffix, + OutDataAnchorPtr &peer_cond_anchor); + + NodePtr CreateMemcpyAsyncNode(ComputeGraphPtr &graph, const OutDataAnchorPtr &out_data_anchor); + + Status CombineSwitchNode(ComputeGraphPtr &graph); + + NodePtr CreateActiveNode(ComputeGraphPtr &graph, NodePtr &node); + + Status AddMemcpyAsyncNodes(ComputeGraphPtr &graph, NodePtr &stream_merge_node); + + Status BypassSwitchNode(NodePtr &switch_node, OutDataAnchorPtr &peer_data_anchor, OutDataAnchorPtr &peer_cond_anchor); + + Status FindSwitchCondInput(bool pass_switch_flag, OutDataAnchorPtr &peer_cond_anchor); + + Status MarkBranchs(OutDataAnchorPtr &peer_cond_anchor, NodePtr &stream_switch_node, bool true_branch_flag); + + NodePtr CreateCastOp(ComputeGraphPtr &graph, OutDataAnchorPtr &peer_cond_anchor); + + Status AddConstNode(ComputeGraphPtr &graph, NodePtr &stream_switch_node); + + Status UpdateCondBranch(NodePtr &node); + + Status UpdateAttachFlag(const NodePtr &node, std::string &stream_label, + bool &merge_flag, bool &exit_flag, bool &net_output_flag); + + Status UpdateLoopBranch(const std::stack &enter_nodes, const std::string &stream_label); + + Status UpdateEnterNode(); + + std::string CheckDuplicateName(const std::string &node_name); + + Status CheckCycleDependence(ComputeGraphPtr &graph); + + void MarkCycleDependence(const std::unordered_map> &cond_switch_map); + + Status ModifySwitchInCtlEdges(NodePtr &switch_node, NodePtr &cast_node, const std::set &same_cond_switch); + + Status ModifySwitchOutCtlEdges(NodePtr &switch_node, NodePtr &stream_switch, NodePtr &active_node); + + void CopyControlEdges(NodePtr &old_node, NodePtr &new_node, bool input_check_flag = false); + + void RemoveControlEdges(NodePtr &node); + + void ReplaceControlEdges(NodePtr &old_node, NodePtr &new_node); + + std::vector switch_nodes_; + std::vector merge_nodes_; + std::vector enter_nodes_; + std::unordered_map> switch_cyclic_map_; + + std::set bypass_nodes_; + std::set branch_head_nodes_; + std::vector stream_switch_nodes_; + std::vector need_label_nodes_; + std::unordered_map>> cond_node_map_; + std::unordered_map> switch_node_map_; + std::unordered_map node_num_map_; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_SWITCH_OP_PASS_H_ diff --git a/src/ge/graph/passes/switch_pass.cc b/src/ge/graph/passes/switch_pass.cc new file mode 100644 index 00000000..c8565a74 --- /dev/null +++ b/src/ge/graph/passes/switch_pass.cc @@ -0,0 +1,181 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/switch_pass.h" + +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/common/omg_util.h" +#include "graph/passes/pass_utils.h" +#include "graph/utils/graph_utils.h" + +namespace ge { +namespace { +const std::vector::size_type kDataInputIndex = 0; +const std::vector::size_type kPredInputIndex = 1; +const int kDefaultInputIndex = -1; + +bool ParsePred(const ConstGeTensorPtr &tensor) { + if (tensor == nullptr) { + GELOGE(FAILED, "parameter is null."); + return false; + } + const uint8_t *data_ptr = tensor->GetData().data(); + auto type = tensor->GetTensorDesc().GetDataType(); + switch (type) { + case DT_BOOL: + return *reinterpret_cast(data_ptr); + case DT_FLOAT: + return static_cast(*reinterpret_cast(data_ptr)); + case DT_DOUBLE: + return static_cast(*reinterpret_cast(data_ptr)); + case DT_INT8: + case DT_UINT8: + return static_cast(*data_ptr); + case DT_FLOAT16: + case DT_INT16: + case DT_UINT16: + return static_cast(*reinterpret_cast(data_ptr)); + case DT_INT32: + case DT_UINT32: + return static_cast(*reinterpret_cast(data_ptr)); + case DT_INT64: + case DT_UINT64: + return static_cast(*reinterpret_cast(data_ptr)); + default: + return static_cast(*data_ptr); + } +} + +bool ParseOutDataAnchors(const NodePtr &node, const NodePtr &pred_node, OutDataAnchorPtr &active_out_data_anchor, + OutDataAnchorPtr &inactive_out_data_anchor) { + auto tensors = OpDescUtils::MutableWeights(pred_node); + if (tensors.empty()) { + return false; + } + + bool pred_value = ParsePred(tensors[0]); + int inactive_output_index = pred_value ? 0 : 1; + + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return false; + } + GELOGI("[%s] Inactive output index = %d", node->GetName().c_str(), inactive_output_index); + for (const auto &out_anchor : node->GetAllOutDataAnchors()) { + if (out_anchor->GetIdx() == inactive_output_index) { + inactive_out_data_anchor = out_anchor; + } else { + active_out_data_anchor = out_anchor; + } + } + + return true; +} +} // namespace + +Status SwitchPass::DeleteSwitchNode(NodePtr &node, NodePtr &pred_node, const OutDataAnchorPtr &active_out_data_anchor) { + if (node == nullptr || active_out_data_anchor == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + // link pred's in control nodes to switch + if (GraphUtils::CopyInCtrlEdges(pred_node, node) != GRAPH_SUCCESS) { + return FAILED; + } + // Remove link between pred and switch + auto in_pred_anchor = node->GetInDataAnchor(kPredInputIndex); + GE_CHECK_NOTNULL(in_pred_anchor); + in_pred_anchor->UnlinkAll(); + + /// If condition Const is isolate, it will be delete with pruning + /// Isolate Switch and delete it + std::vector switch_io_map = {kDefaultInputIndex, kDefaultInputIndex}; + size_t out_index = static_cast(active_out_data_anchor->GetIdx()); + if (out_index >= switch_io_map.size()) { + GELOGE(FAILED, "[%s] out index check failed, out_index:%zu.", node->GetName().c_str(), out_index); + return FAILED; + } + switch_io_map[out_index] = kDataInputIndex; + return IsolateAndDeleteNode(node, switch_io_map); +} + +Status SwitchPass::Run(NodePtr &node) { + GELOGD("SwitchPass running"); + if (node == nullptr) { + GELOGE(PARAM_INVALID, "param [node] must not be null."); + return PARAM_INVALID; + } + + std::string op_type; + GE_CHK_STATUS_RET(GetOriginalType(node, op_type), "get original type failed"); + if ((op_type != SWITCH) && (op_type != REFSWITCH)) { + return SUCCESS; + } + + if (node->GetOutAllNodes().empty()) { + return SUCCESS; + } + + auto pred_node = PassUtils::GetInDataNode(node, kPredInputIndex); + if (pred_node == nullptr) { + GELOGD("[%s] Pred input is null.", node->GetName().c_str()); + return SUCCESS; + } + + // Can be optimized when pred is constant + if (!PassUtils::IsConstant(pred_node)) { + GELOGD("[%s] Pred is not constant.", node->GetName().c_str()); + return SUCCESS; + } + + auto input_node = PassUtils::GetInDataNode(node, kDataInputIndex); + if (input_node == nullptr) { + GELOGD("[%s] Data input is null.", node->GetName().c_str()); + return SUCCESS; + } + + // Get active & inactive output anchors by the value of pred + OutDataAnchorPtr active_out_data_anchor = nullptr; + OutDataAnchorPtr inactive_out_data_anchor = nullptr; + if (!ParseOutDataAnchors(node, pred_node, active_out_data_anchor, inactive_out_data_anchor)) { + return PARAM_INVALID; + } + + if (inactive_out_data_anchor != nullptr) { + GELOGI("[%s] To unlink inactive output %d", node->GetName().c_str(), inactive_out_data_anchor->GetIdx()); + std::vector del_nodes; + std::vector end_nodes; + Status ret = PassUtils::RemoveInactiveBranchToMerge(inactive_out_data_anchor, del_nodes, end_nodes); + if (ret != SUCCESS) { + return ret; + } + + for (auto &end_node : end_nodes) { + AddRePassNode(end_node); + } + for (const auto &delete_node : del_nodes) { + AddNodeDeleted(delete_node.get()); + } + } + + return DeleteSwitchNode(node, pred_node, active_out_data_anchor); +} +} // namespace ge diff --git a/src/ge/graph/passes/switch_pass.h b/src/ge/graph/passes/switch_pass.h new file mode 100644 index 00000000..04760843 --- /dev/null +++ b/src/ge/graph/passes/switch_pass.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_SWITCH_PASS_H_ +#define GE_GRAPH_PASSES_SWITCH_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class SwitchPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; + + private: + Status DeleteSwitchNode(NodePtr &node, NodePtr &pred_node, const OutDataAnchorPtr &active_out_data_anchor); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_SWITCH_PASS_H_ diff --git a/src/ge/graph/passes/transop_breadth_fusion_pass.cc b/src/ge/graph/passes/transop_breadth_fusion_pass.cc new file mode 100644 index 00000000..444ae979 --- /dev/null +++ b/src/ge/graph/passes/transop_breadth_fusion_pass.cc @@ -0,0 +1,202 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/transop_breadth_fusion_pass.h" + +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/types.h" +#include "graph/common/transop_util.h" +#include "graph/utils/node_utils.h" + +namespace ge { +Status TransOpBreadthFusionPass::Run(ge::ComputeGraphPtr graph) { + if (graph == nullptr) { + return SUCCESS; + } + + for (auto const &node : graph->GetAllNodes()) { + GE_CHECK_NOTNULL(node); + auto ids_to_trans_nodes = GetOutputTransOpNodes(node); + for (auto const &id_to_trans_nodes : ids_to_trans_nodes) { + if (id_to_trans_nodes.second.size() > 1) { + GELOGI( + "Begin to breath fusion output trans-op-nodes for %s, " + "trans id %s, trans-op count %zu", + node->GetName().c_str(), id_to_trans_nodes.first.c_str(), id_to_trans_nodes.second.size()); + graphStatus status = Fusion(id_to_trans_nodes.second, graph); + if (status != GRAPH_SUCCESS) { + return FAILED; + } + } + } + } + return SUCCESS; +} + +std::string TransOpBreadthFusionPass::GetNodeId(const int anchor_index, const NodePtr &node) { + std::stringstream id; + bool trans_data_type = false; + bool trans_format = false; + bool trans_shape = false; + + GE_IF_BOOL_EXEC(node == nullptr || node->GetOpDesc() == nullptr, GELOGE(FAILED, "node is null"); return ""); + if (node->GetType() == CAST) { + trans_data_type = true; + } else if (node->GetType() == TRANSPOSE || node->GetType() == TRANSPOSED) { + trans_format = true; + trans_shape = true; + } else if (node->GetType() == TRANSDATA) { + trans_data_type = true; + trans_format = true; + trans_shape = true; + } else if (node->GetType() == RESHAPE) { + trans_shape = true; + } + + id << node->GetType() << '-' << anchor_index; + // temp solution, we should not care about which stream the trans op on + std::string stream_label; + if (AttrUtils::GetStr(node->GetOpDesc(), ATTR_NAME_STREAM_LABEL, stream_label)) { + GELOGD("Get stream label %s for node %s, add it to fusion id", stream_label.c_str(), node->GetName().c_str()); + id << '-' << stream_label; + } + if (trans_data_type) { + id << '-'; + id << static_cast(node->GetOpDesc()->GetInputDesc(0).GetDataType()); + id << '-'; + id << static_cast(node->GetOpDesc()->GetOutputDesc(0).GetDataType()); + } + if (trans_format) { + id << '-'; + id << static_cast(node->GetOpDesc()->GetInputDesc(0).GetFormat()); + id << '-'; + id << static_cast(node->GetOpDesc()->GetOutputDesc(0).GetFormat()); + } + if (trans_shape) { + id << '-'; + id << JoinDims(",", node->GetOpDesc()->GetInputDesc(0).GetShape().GetDims()); + id << '-'; + id << JoinDims(",", node->GetOpDesc()->GetOutputDesc(0).GetShape().GetDims()); + } + + return id.str(); +} + +/// +/// Get all transform operators in the output of node. +/// @param node +/// @return std::map +/// key - transform operator identifer +/// value - transform operator set +/// +std::map> TransOpBreadthFusionPass::GetOutputTransOpNodes(const NodePtr &node) { + auto result = std::map>(); + if (node == nullptr) { + return result; + } + for (const auto &out_anchor : node->GetAllOutDataAnchors()) { + if (out_anchor == nullptr) { + continue; + } + for (const auto &peer_in_anchor : out_anchor->GetPeerInDataAnchors()) { + if (peer_in_anchor == nullptr) { + continue; + } + + auto peer_node = peer_in_anchor->GetOwnerNode(); + if (peer_node == nullptr) { + continue; + } + + if (TransOpUtil::IsTransOp(peer_node) && + peer_in_anchor->GetIdx() == TransOpUtil::GetTransOpDataIndex(peer_node)) { + auto output_node_id = GetNodeId(out_anchor->GetIdx(), peer_node); + result[output_node_id].push_back(peer_node); + } + } + } + return result; +} + +/// +/// Reserving Transform operators which with smaller topo index, +/// other transform operators's output edges merge to the reserved transform operator. +/// Removed transform operators have no output edges. +/// @param trans_nodes +/// @param graph +/// +graphStatus TransOpBreadthFusionPass::Fusion(const std::vector &trans_nodes, ComputeGraphPtr &graph) { + if (trans_nodes.empty()) { + return GRAPH_FAILED; + } + + size_t min_index = 0; + GE_CHECK_NOTNULL(trans_nodes[0]); + auto op_desc = trans_nodes[0]->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + int64_t min_id = op_desc->GetId(); + size_t vec_size = trans_nodes.size(); + for (size_t i = 1; i < vec_size; i++) { + GE_CHECK_NOTNULL(trans_nodes[i]); + op_desc = trans_nodes[i]->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + if (op_desc->GetId() < min_id) { + min_index = i; + min_id = op_desc->GetId(); + } + } + + NodePtr node_remain = trans_nodes[min_index]; + for (size_t i = 0; i < trans_nodes.size(); ++i) { + if (min_index == i) { + continue; + } + graphStatus status = NodeUtils::MoveOutputEdges(trans_nodes[i], node_remain); + if (status != GRAPH_SUCCESS) { + return status; + } + // remove useless trans_node + status = GraphUtils::IsolateNode(trans_nodes[i], {}); + if (status != GRAPH_SUCCESS) { + return status; + } + + status = GraphUtils::RemoveNodeWithoutRelink(graph, trans_nodes[i]); + if (status != GRAPH_SUCCESS) { + return status; + } + GELOGD("[Breadth fusion] Remove node %s from graph", trans_nodes[i]->GetName().c_str()); + } + return GRAPH_SUCCESS; +} + +std::string TransOpBreadthFusionPass::JoinDims(const std::string &sp, const std::vector &dims) { + std::stringstream ss; + bool first = true; + for (int64_t dim : dims) { + if (first) { + first = false; + } else { + ss << sp; + } + ss << dim; + } + return ss.str(); +} +} // namespace ge diff --git a/src/ge/graph/passes/transop_breadth_fusion_pass.h b/src/ge/graph/passes/transop_breadth_fusion_pass.h new file mode 100644 index 00000000..8e7799e1 --- /dev/null +++ b/src/ge/graph/passes/transop_breadth_fusion_pass.h @@ -0,0 +1,45 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_TRANSOP_BREADTH_FUSION_PASS_H_ +#define GE_GRAPH_PASSES_TRANSOP_BREADTH_FUSION_PASS_H_ + +#include +#include +#include + +#include "inc/graph_pass.h" + +namespace ge { +/// +/// Transform operators breadth fusion +/// +class TransOpBreadthFusionPass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph) final; + + private: + std::string GetNodeId(const int anchor_index, const NodePtr& node); + + std::map> GetOutputTransOpNodes(const NodePtr& node); + + graphStatus Fusion(const std::vector& trans_nodes, ComputeGraphPtr& graph); + + std::string JoinDims(const std::string& sp, const std::vector& dims); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_TRANSOP_BREADTH_FUSION_PASS_H_ diff --git a/src/ge/graph/passes/transop_depth_fusion_pass.cc b/src/ge/graph/passes/transop_depth_fusion_pass.cc new file mode 100644 index 00000000..140efce4 --- /dev/null +++ b/src/ge/graph/passes/transop_depth_fusion_pass.cc @@ -0,0 +1,312 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/transop_depth_fusion_pass.h" + +#include + +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/compute_graph.h" +#include "graph/ge_tensor.h" +#include "graph/op_desc.h" +#include "graph/utils/graph_utils.h" + + +namespace ge { +graphStatus TransOpDepthFusionPass::Run(ComputeGraphPtr graph) { + GELOGI("[TransOpDepthFusionPass]: optimize in depth begin..."); + if (graph == nullptr) { + return GRAPH_SUCCESS; + } + for (const auto &node : graph->GetAllNodes()) { + GE_CHECK_NOTNULL(node); + if (IsTransOp(node)) { + continue; + } + GELOGD("Current normal node is: %s, type: %s, begin in-depth recursive", node->GetName().c_str(), + node->GetType().c_str()); + for (const auto &out_anchor : node->GetAllOutDataAnchors()) { + GE_CHECK_NOTNULL(out_anchor); + for (const auto &peer_in_anchor : out_anchor->GetPeerInDataAnchors()) { + if (RecursiveInDepth(peer_in_anchor, graph) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Recursive failed, root node is: %s, type: %s", node->GetName().c_str(), + node->GetType().c_str()); + } + } + } + } + GELOGI("[TransOpDepthFusionPass]: Optimize in depth success..."); + return GRAPH_SUCCESS; +} + +/// @@ Method: +/// Depth-first recursive strategy was utilized to traverse all the trans ops. +/// Both trans ops will be offset when the back one's output desc is consistent +/// with it's former neighbor's input. +/// @@ Limitation: +/// The current method only judge the neighbors. Trans ops separated by some +/// other ops which can't be offset are not taken into account in current +/// @@ Recursive depth +/// To ensure that the stack does not overflow, the maximum depth in recursive is +/// set to be maxRecursiveDepth = 20. More trans ops are seen abnormally. +graphStatus TransOpDepthFusionPass::RecursiveInDepth(const InDataAnchorPtr &dst_in_anchor, + const ge::ComputeGraphPtr &graph) { + static unsigned int temp_depth = 0; + static const unsigned int max_recursive_depth = 20; + temp_depth++; + if (temp_depth >= max_recursive_depth) { + GELOGI( + "Caution: recursive depth is become %u." + "It's abnormally to have so many trans ops between two normal ops" + "Please check your graph in detail!" + "The search terminate here and continue to another branch.", + temp_depth); + temp_depth--; + return GRAPH_SUCCESS; + } + + if (dst_in_anchor == nullptr || dst_in_anchor->GetOwnerNode() == nullptr || + dst_in_anchor->GetOwnerNode()->GetOpDesc() == nullptr) { + GELOGE(FAILED, "parameter is null."); + return GRAPH_FAILED; + } + auto node = dst_in_anchor->GetOwnerNode(); + if (!IsTransOp(node)) { + GELOGD("Now the end of this branch, node: %s, type: %s, recursive depth: %u", node->GetName().c_str(), + node->GetType().c_str(), temp_depth); + temp_depth--; + return GRAPH_SUCCESS; + } else if (node->GetType() == RESHAPE || node->GetType() == REFORMAT) { + GELOGD("node: %s, type: %s does not change memory, just delete", node->GetName().c_str(), node->GetType().c_str()); + + auto out_anchor = node->GetOutDataAnchor(0); + GE_CHECK_NOTNULL(out_anchor); + auto in_anchors = out_anchor->GetPeerInDataAnchors(); + GE_CHK_STATUS_RET(RemoveNode(node, graph), "remove edge failed"); + GELOGI("remove node: %s, type: %s.", node->GetName().c_str(), node->GetType().c_str()); + for (auto &in_anchor : in_anchors) { + GE_CHECK_NOTNULL(in_anchor); + GE_CHK_STATUS_RET(UpdateSrcAttr(in_anchor->GetPeerOutAnchor(), out_anchor, in_anchor), "UpdateSrcAttr failed"); + GE_CHK_STATUS_RET(RecursiveInDepth(in_anchor, graph), "RecursiveInDepth failed"); + } + } else if (trans_op_.empty() || !DescAreSymmetry(trans_op_.top(), node)) { + GELOGD("node: %s, type: %s can't be offset, push to trans_op_", node->GetName().c_str(), node->GetType().c_str()); + + trans_op_.push(node); + auto out_anchor = node->GetOutDataAnchor(0); + GE_CHECK_NOTNULL(out_anchor); + for (const auto &in_anchor : out_anchor->GetPeerInDataAnchors()) { + GE_CHK_STATUS_RET(RecursiveInDepth(in_anchor, graph), "RecursiveInDepth failed"); + } + + if (node->GetOutDataNodesSize() == 0) { + GE_CHK_STATUS_RET(RemoveNode(node, graph), "remove node failed"); + GELOGI("backtracking, trans op: %s, type: %s will be removed", node->GetName().c_str(), node->GetType().c_str()); + } + GELOGD("backtracking, trans_op_ fall back. pop node: %s, type: %s.", trans_op_.top()->GetName().c_str(), + trans_op_.top()->GetType().c_str()); + trans_op_.pop(); + } else if (DescAreSymmetry(trans_op_.top(), node)) { + GELOGD("current node: %s, type: %s can be offset with node: %s, type %s", node->GetName().c_str(), + node->GetType().c_str(), trans_op_.top()->GetName().c_str(), trans_op_.top()->GetType().c_str()); + GELOGD("offset_op_ push node: %s, type: %s.", trans_op_.top()->GetName().c_str(), + trans_op_.top()->GetType().c_str()); + offset_op_.push(trans_op_.top()); + + auto in_data_anchor = node->GetInDataAnchor(0); + GE_CHECK_NOTNULL(in_data_anchor); + auto old_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_CHECK_NOTNULL(old_out_anchor); + auto new_out_anchor = trans_op_.top()->GetInDataAnchor(0)->GetPeerOutAnchor(); + GE_CHECK_NOTNULL(new_out_anchor); + GE_IF_BOOL_EXEC(RelinkEdges(new_out_anchor, old_out_anchor, in_data_anchor) != GRAPH_SUCCESS, + GELOGE(FAILED, "RelinkEdges fail."); + return FAILED) + auto out_anchor = node->GetOutDataAnchor(0); + GE_CHECK_NOTNULL(out_anchor); + auto in_anchors = out_anchor->GetPeerInDataAnchors(); + + GELOGD("begin offset,trans_op_ pop node: %s, type: %s.", trans_op_.top()->GetName().c_str(), + trans_op_.top()->GetType().c_str()); + GELOGI("the offset node : %s, type: %s will be removed.", node->GetName().c_str(), node->GetType().c_str()); + GE_CHK_STATUS_RET(RemoveNode(node, graph), "remove node failed"); + trans_op_.pop(); + + for (const auto &in_anchor : in_anchors) { + GE_CHECK_NOTNULL(in_anchor); + GE_CHK_STATUS_RET(UpdateSrcAttr(in_anchor->GetPeerOutAnchor(), out_anchor, in_anchor), "UpdateSrcAttr failed"); + GE_CHK_STATUS_RET(RecursiveInDepth(in_anchor, graph), "RecursiveInDepth failed"); + } + + GELOGD("backtracking, trans_op_ push node: %s, type: %s.", offset_op_.top()->GetName().c_str(), + offset_op_.top()->GetType().c_str()); + trans_op_.push(offset_op_.top()); + offset_op_.pop(); + } + temp_depth--; + return GRAPH_SUCCESS; +} + +bool TransOpDepthFusionPass::IsTransOp(const NodePtr &node) { + if (node == nullptr) { + return false; + } + return node->GetType() == CAST || node->GetType() == RESHAPE || node->GetType() == REFORMAT || + node->GetType() == TRANSPOSE || node->GetType() == TRANSPOSED || node->GetType() == TRANSDATA; +} + +bool TransOpDepthFusionPass::DescAreSymmetry(const NodePtr &src_node, const NodePtr &dst_node) { + if (src_node == nullptr || dst_node == nullptr || src_node->GetOpDesc() == nullptr || + dst_node->GetOpDesc() == nullptr) { + return false; + } + auto src_input_desc = src_node->GetOpDesc()->GetInputDesc(0); + auto dst_output_desc = dst_node->GetOpDesc()->GetOutputDesc(0); + auto src_input_dtype = src_input_desc.GetDataType(); + auto src_input_format = src_input_desc.GetFormat(); + auto src_input_shape = src_input_desc.GetShape().GetDims(); + auto dst_output_dtype = dst_output_desc.GetDataType(); + auto dst_output_format = dst_output_desc.GetFormat(); + auto dst_output_shape = dst_output_desc.GetShape().GetDims(); + + if (src_node->GetType() == CAST && dst_node->GetType() == CAST) { + return src_input_dtype == dst_output_dtype && src_input_format == dst_output_format; + } else { + return src_input_dtype == dst_output_dtype && src_input_shape == dst_output_shape && + src_input_format == dst_output_format; + } +} + +// If the relationship was changed, the input and src name will be update +graphStatus TransOpDepthFusionPass::UpdateSrcAttr(const OutDataAnchorPtr &new_out_anchor, + const OutDataAnchorPtr &ori_out_anchor, + const InDataAnchorPtr &dst_in_anchor) { + if (dst_in_anchor == nullptr || dst_in_anchor->GetOwnerNode() == nullptr || + dst_in_anchor->GetOwnerNode()->GetOpDesc() == nullptr) { + GELOGW("dst_in_anchor or it's owner node and op_desc is nullptr"); + return GRAPH_SUCCESS; + } + GE_CHECK_NOTNULL(new_out_anchor); + GE_CHECK_NOTNULL(new_out_anchor->GetOwnerNode()); + GE_CHECK_NOTNULL(ori_out_anchor); + GE_CHECK_NOTNULL(ori_out_anchor->GetOwnerNode()); + auto new_name = new_out_anchor->GetOwnerNode()->GetName(); + auto ori_name = ori_out_anchor->GetOwnerNode()->GetName(); + auto dst_desc = dst_in_anchor->GetOwnerNode()->GetOpDesc(); + + auto ori_src_name = dst_desc->GetSrcName(); + auto ori_input_name = dst_desc->GetInputName(); + + std::vector new_src_name; + std::vector new_input_name; + + if (ori_src_name.empty()) { + new_src_name.push_back(new_name); + } else { + for (auto &src_name : ori_src_name) { + if (src_name == ori_name) { + new_src_name.push_back(new_name); + } else { + new_src_name.push_back(src_name); + } + } + } + + if (ori_input_name.empty()) { + new_input_name.push_back(new_name); + } else { + for (auto &input_name : ori_input_name) { + if (input_name == ori_name) { + new_input_name.push_back(new_name); + } else { + new_input_name.push_back(input_name); + } + } + } + dst_desc->SetSrcName(new_src_name); + dst_desc->SetInputName(new_input_name); + return GRAPH_SUCCESS; +} + +/// Relink the offset trans op with it's former neighbor's father node. +/// Note: control edge will be added to link the two offset ops, if the former op +/// has in control nodes +graphStatus TransOpDepthFusionPass::RelinkEdges(const OutDataAnchorPtr &new_out_anchor, + const OutDataAnchorPtr &old_out_anchor, + const InDataAnchorPtr &in_data_anchor) { + if (new_out_anchor == nullptr || old_out_anchor == nullptr || in_data_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, "new_out_anchor or old_out_anchor or in_data_anchor is nullptr"); + return GRAPH_FAILED; + } + if (new_out_anchor->GetOwnerNode() == nullptr || old_out_anchor->GetOwnerNode() == nullptr || + in_data_anchor->GetOwnerNode() == nullptr) { + GELOGE(INTERNAL_ERROR, "anchor's owner node is nullptr"); + return GRAPH_FAILED; + } + GE_CHK_STATUS_RET(GraphUtils::RemoveEdge(old_out_anchor, in_data_anchor), "remove edge failed"); + GE_CHK_STATUS_RET(GraphUtils::AddEdge(new_out_anchor, in_data_anchor), "add edge failed"); + GELOGD( + "relink edges before remove node, remove data edge between node: %s, " + "type: %s and node: %s, type: %s.", + old_out_anchor->GetOwnerNode()->GetName().c_str(), old_out_anchor->GetOwnerNode()->GetType().c_str(), + in_data_anchor->GetOwnerNode()->GetName().c_str(), in_data_anchor->GetOwnerNode()->GetType().c_str()); + GELOGD( + "relink edges before remove node, add data edge between node: %s, " + "type: %s and node: %s, type: %s.", + new_out_anchor->GetOwnerNode()->GetName().c_str(), new_out_anchor->GetOwnerNode()->GetType().c_str(), + in_data_anchor->GetOwnerNode()->GetName().c_str(), in_data_anchor->GetOwnerNode()->GetType().c_str()); + + bool is_linked = false; + auto dst_node = in_data_anchor->GetOwnerNode(); + auto src_node = old_out_anchor->GetOwnerNode(); + auto in_ctrl_nodes = dst_node->GetInControlNodes(); + if (!in_ctrl_nodes.empty()) { + auto iter = std::find(in_ctrl_nodes.begin(), in_ctrl_nodes.end(), src_node); + is_linked = iter != in_ctrl_nodes.end(); + } + if (!src_node->GetInControlNodes().empty() && !is_linked) { + auto out_ctrl_anchor = src_node->GetOutControlAnchor(); + auto in_ctrl_anchor = dst_node->GetInControlAnchor(); + GE_CHK_STATUS_RET(GraphUtils::AddEdge(out_ctrl_anchor, in_ctrl_anchor), "add edge failed"); + GELOGD( + "relink edges before remove node, add control edge between node: %s," + " type: %s and node: %s, type: %s.", + src_node->GetName().c_str(), src_node->GetType().c_str(), dst_node->GetName().c_str(), + dst_node->GetType().c_str()); + } + return GRAPH_SUCCESS; +} + +// Remove trans op by using interface: IsolateNode & RemoveNodeWithoutRelink +graphStatus TransOpDepthFusionPass::RemoveNode(const NodePtr &node, const ge::ComputeGraphPtr &graph) { + if (node == nullptr || graph == nullptr) { + return GRAPH_FAILED; + } + if (GraphUtils::IsolateNode(node, {0}) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Isolate removed node: %s, type: %s failed", node->GetName().c_str(), + node->GetType().c_str()); + return GRAPH_FAILED; + } + if (GraphUtils::RemoveNodeWithoutRelink(graph, node) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Remove node: %s, type: %s without relink failed", node->GetName().c_str(), + node->GetType().c_str()); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/transop_depth_fusion_pass.h b/src/ge/graph/passes/transop_depth_fusion_pass.h new file mode 100644 index 00000000..7188f6c0 --- /dev/null +++ b/src/ge/graph/passes/transop_depth_fusion_pass.h @@ -0,0 +1,99 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_TRANSOP_DEPTH_FUSION_PASS_H_ +#define GE_GRAPH_PASSES_TRANSOP_DEPTH_FUSION_PASS_H_ + +#include +#include +#include + +#include "inc/graph_pass.h" + +namespace ge { +/// +/// Transform operators depth fusion +/// +class TransOpDepthFusionPass : public GraphPass { + public: + TransOpDepthFusionPass() = default; + ~TransOpDepthFusionPass() = default; + + graphStatus Run(ge::ComputeGraphPtr graph) override; + + private: + /// + /// judge whether an operator is a transform op or not + /// @param node + /// @return True or False + /// + static bool IsTransOp(const NodePtr &node); + + /// + /// two transform nodes can be offset only when the front node's input is + /// consistent with the back one's output + /// @param src_node: the front node + /// @param dst_node: the back node + /// @return True or False, whether can be offset or not + /// + static bool DescAreSymmetry(const NodePtr &src_node, const NodePtr &dst_node); + + /// + /// update the input_name and src_name info when the relationship was changed + /// @param src_out_anchor: the new peer in data anchor of dst_in_anchor + /// @param old_src_anchor: the original peer in data anchor of dst_in_anchor + /// @param dst_in_anchor: the target anchor + /// @return Status + /// + static graphStatus UpdateSrcAttr(const OutDataAnchorPtr &src_out_anchor, const OutDataAnchorPtr &old_src_anchor, + const InDataAnchorPtr &dst_in_anchor); + + /// + /// Depth-first recursive to traverse all the transops + /// @param dst_in_anchor: each in_data_anchor is set as the root in the recursive + /// @return Status + /// + graphStatus RecursiveInDepth(const InDataAnchorPtr &dst_in_anchor, const ge::ComputeGraphPtr &graph); + + /// + /// Remove transop by using interface: IsolateNode & RemoveNodeWithoutRelink + /// @param node: the trans op which will be removed + /// @return Status + /// + static graphStatus RemoveNode(const NodePtr &node, const ge::ComputeGraphPtr &graph); + + /// + /// Relink the offset trans op with it's former's father node. + /// Note: control edge will be added to link the two offset ops, if the former + /// trans op have in control nodes + /// @param new_out_anchor: out_data_anchor of father node of the former trans op + /// @param old_out_anchor: out_data_anchor of the former trans op + /// @param in_data_anchor: in_data_anchor of the after trans op + /// @return Status + /// + static graphStatus RelinkEdges(const OutDataAnchorPtr &new_out_anchor, const OutDataAnchorPtr &old_out_anchor, + const InDataAnchorPtr &in_data_anchor); + + /// + /// @param trans_op_ : the trans op which can't be offset at the moment + /// @param offset_op_ : the former one of the offset pair nodes + /// + std::stack trans_op_; + std::stack offset_op_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_TRANSOP_DEPTH_FUSION_PASS_H_ diff --git a/src/ge/graph/passes/transop_nearby_allreduce_fusion_pass.cc b/src/ge/graph/passes/transop_nearby_allreduce_fusion_pass.cc new file mode 100644 index 00000000..083c30ea --- /dev/null +++ b/src/ge/graph/passes/transop_nearby_allreduce_fusion_pass.cc @@ -0,0 +1,173 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/transop_nearby_allreduce_fusion_pass.h" + +#include "framework/common/debug/ge_log.h" +#include "common/debug/log.h" +#include "common/types.h" +#include "graph/utils/graph_utils.h" +#include "graph/common/transop_util.h" + +namespace ge { +Status TransOpNearbyAllreduceFusionPass::Run(NodePtr &node) { + if (node == nullptr) { + GELOGW("null node is existed in graph"); + return SUCCESS; + } + + if (node->GetType() == HCOMALLREDUCE) { + GELOGI("found allreduce op %s", node->GetName().c_str()); + Status ret = RemoveNearbyPairedTransOps(node); + if (ret != SUCCESS) { + GELOGE(FAILED, "failed to remove paired transop for allreduce op %s", node->GetName().c_str()); + return FAILED; + } + GELOGI("successfully remove paired transop for allreduce op (%s)", node->GetName().c_str()); + } + + return SUCCESS; +} + +bool TransOpNearbyAllreduceFusionPass::IsSymmetricTransOps(const NodePtr &node1, const NodePtr &node2) { + if (node1 == nullptr || node2 == nullptr || node1->GetOpDesc() == nullptr || node2->GetOpDesc() == nullptr) { + return false; + } + + if (node1->GetType() != TRANSDATA || node2->GetType() != TRANSDATA) { + return false; + } + + // two symmetric trans ops should have same type + if (node1->GetType() != node2->GetType()) { + return false; + } + + auto node1_input_desc = node1->GetOpDesc()->GetInputDesc(0); + auto node1_output_desc = node1->GetOpDesc()->GetOutputDesc(0); + + auto node2_input_desc = node2->GetOpDesc()->GetInputDesc(0); + auto node2_output_desc = node2->GetOpDesc()->GetOutputDesc(0); + + // two symmetric trans ops should have symmetric input/output datatype + GELOGD("format: nod1_input=%d, nod1_output=%d, nod2_input=%d, nod2_output=%d", + node1_input_desc.GetFormat(), node1_output_desc.GetFormat(), node2_input_desc.GetFormat(), + node2_output_desc.GetFormat()); + if (node1_input_desc.GetFormat() != node2_output_desc.GetFormat() || + node1_output_desc.GetFormat() != node2_input_desc.GetFormat()) { + return false; + } + + // two symmetric trans ops should have symmetric input/output format + GELOGD("datatype: nod1_input=%d, nod1_output=%d, nod2_input=%d, nod2_output=%d", + node1_input_desc.GetDataType(), node1_output_desc.GetDataType(), node2_input_desc.GetDataType(), + node2_output_desc.GetDataType()); + if (node1_input_desc.GetDataType() != node2_output_desc.GetDataType() || + node1_output_desc.GetDataType() != node2_input_desc.GetDataType()) { + return false; + } + + // two symmetric trans ops should have symmetric input/output shape + if (node1_input_desc.GetShape().GetDims() != node2_output_desc.GetShape().GetDims() || + node1_output_desc.GetShape().GetDims() != node2_input_desc.GetShape().GetDims()) { + return false; + } + return true; +} + +Status TransOpNearbyAllreduceFusionPass::RemoveNearbyPairedTransOps(const NodePtr &node) { + if (node == nullptr) { + return FAILED; + } + GELOGI("find allReduce node %s", node->GetName().c_str()); + auto in_data_anchors = node->GetAllInDataAnchors(); + auto out_data_anchors = node->GetAllOutDataAnchors(); + if (in_data_anchors.size() != out_data_anchors.size()) { + GELOGE(FAILED, "in and out data anchor size are not equal, node=%s, in_size=%zu, out_size=%zu", + node->GetName().c_str(), in_data_anchors.size(), out_data_anchors.size()); + return FAILED; + } + + size_t data_anchor_size = in_data_anchors.size(); + GELOGI("node = %s, data_anchor_size = %zu", node->GetName().c_str(), data_anchor_size); + + size_t removed_node_count = 0; + for (size_t i = 0; i < data_anchor_size; i++) { + if (in_data_anchors.at(i) == nullptr || out_data_anchors.at(i) == nullptr) { + GELOGW("node=%s has a null anchor at idx=%zu", node->GetName().c_str(), i); + continue; + } + if (in_data_anchors.at(i)->GetPeerAnchors().size() != 1) { + GELOGW("nodes=%s has abnormal in peer anchors at %zu", node->GetName().c_str(), i); + continue; + } + if (out_data_anchors.at(i)->GetPeerAnchors().size() != 1) { + GELOGW("nodes=%s has abnormal out peer anchors at %zu", node->GetName().c_str(), i); + continue; + } + auto in_first_peer_anchor = in_data_anchors.at(i)->GetFirstPeerAnchor(); + if (in_first_peer_anchor == nullptr) { + GELOGW("node=%s, input anchor idx=%zu, first peer anchor is null", node->GetName().c_str(), i); + continue; + } + auto out_first_peer_anchor = out_data_anchors.at(i)->GetFirstPeerAnchor(); + if (out_first_peer_anchor == nullptr) { + GELOGW("node=%s, output anchor idx=%zu, first peer anchor is null", node->GetName().c_str(), i); + continue; + } + auto in_node = in_first_peer_anchor->GetOwnerNode(); + auto out_node = out_first_peer_anchor->GetOwnerNode(); + + GELOGI("in_node=%s, out_node=%s", in_node->GetName().c_str(), out_node->GetName().c_str()); + if (!IsSymmetricTransOps(in_node, out_node)) { + GELOGD("ignore asymmetric transop %s and %s for node %s", + in_node->GetName().c_str(), out_node->GetName().c_str(), node->GetName().c_str()); + continue; + } + + // delete in_node + if (IsolateAndDeleteNode(in_node, {0}) != SUCCESS) { + GELOGE(FAILED, "remove node %s failed", in_node->GetName().c_str()); + return FAILED; + } + removed_node_count++; + + // delete out_node + if (IsolateAndDeleteNode(out_node, {0}) != SUCCESS) { + GELOGE(FAILED, "remove node %s failed", out_node->GetName().c_str()); + return FAILED; + } + removed_node_count++; + + // update allreduce input/output desc + GE_CHECK_NOTNULL(node->GetOpDesc()); + GE_CHECK_NOTNULL(in_node->GetOpDesc()); + GE_CHECK_NOTNULL(out_node->GetOpDesc()); + auto input_desc = in_node->GetOpDesc()->GetInputDesc(0); + auto output_desc = out_node->GetOpDesc()->GetOutputDesc(0); + if (node->GetOpDesc()->UpdateInputDesc(static_cast(i), input_desc) != GRAPH_SUCCESS) { + GELOGE(FAILED, "UpdateInputDesc fail."); + } + if (node->GetOpDesc()->UpdateOutputDesc(static_cast(i), output_desc) != GRAPH_SUCCESS) { + GELOGE(FAILED, "UpdateOutputDesc"); + } + GELOGI("successfully remove paired transop (%s and %s) for node %s", + in_node->GetName().c_str(), out_node->GetName().c_str(), node->GetName().c_str()); + } + GELOGI("successfully remove %zu pair of transops in total for node %s", removed_node_count, node->GetName().c_str()); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/transop_nearby_allreduce_fusion_pass.h b/src/ge/graph/passes/transop_nearby_allreduce_fusion_pass.h new file mode 100644 index 00000000..1cd1eeec --- /dev/null +++ b/src/ge/graph/passes/transop_nearby_allreduce_fusion_pass.h @@ -0,0 +1,33 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_TRANSOP_NEARBY_ALLREDUCE_FUSION_PASS_H_ +#define GE_GRAPH_PASSES_TRANSOP_NEARBY_ALLREDUCE_FUSION_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class TransOpNearbyAllreduceFusionPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; + + private: + bool IsSymmetricTransOps(const NodePtr &node1, const NodePtr &node2); + + Status RemoveNearbyPairedTransOps(const NodePtr &node); +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_TRANSOP_NEARBY_ALLREDUCE_FUSION_PASS_H_ diff --git a/src/ge/graph/passes/transop_without_reshape_fusion_pass.cc b/src/ge/graph/passes/transop_without_reshape_fusion_pass.cc new file mode 100644 index 00000000..16fec054 --- /dev/null +++ b/src/ge/graph/passes/transop_without_reshape_fusion_pass.cc @@ -0,0 +1,1093 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/transop_without_reshape_fusion_pass.h" + +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "framework/common/debug/ge_log.h" +#include "graph/compute_graph.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/ge_tensor.h" +#include "graph/op_desc.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/type_utils.h" +#include "init/gelib.h" + +namespace { +const char *const kRemainNode = "node_remain"; +const int kInvalidFusionOpCount = -1; +const char *const kAttrNameSrcFormat = "src_format"; +const char *const kAttrNameDstFormat = "dst_format"; +} // namespace + +namespace ge { +void TransOpWithoutReshapeFusionPass::SetRemainNode( + const vector> &nodes_anchor) { + auto iter = nodes_anchor.begin(); + while (iter != nodes_anchor.end()) { + auto in_anchor = iter->second; + if (in_anchor == nullptr) { + return; + } + auto in_node = in_anchor->GetOwnerNode(); + ++iter; + if (in_node == nullptr) { + return; + } + if (!IsTransOp(in_node)) { + continue; + } + + auto op_desc = in_node->GetOpDesc(); + if (op_desc == nullptr) { + continue; + } + GE_IF_BOOL_EXEC(!op_desc->SetExtAttr(kRemainNode, true), GELOGE(INTERNAL_ERROR, "set ext attr failed"); return); + } +} + +bool TransOpWithoutReshapeFusionPass::FormatContinuousCheck(const OutDataAnchorPtr &out_anchor, + const InDataAnchorPtr &in_anchor) { + if (out_anchor == nullptr || in_anchor == nullptr || in_anchor->GetOwnerNode() == nullptr || + out_anchor->GetOwnerNode() == nullptr) { + return false; + } + auto in_node = in_anchor->GetOwnerNode(); + GE_IF_BOOL_EXEC(in_node == nullptr, GELOGE(INTERNAL_ERROR, "in_node is null"); return false); + auto in_op = in_node->GetOpDesc(); + auto out_owner_node = out_anchor->GetOwnerNode(); + GE_IF_BOOL_EXEC(out_owner_node == nullptr, GELOGE(INTERNAL_ERROR, "out_owner_node is null"); return false); + auto out_op = out_owner_node->GetOpDesc(); + GE_IF_BOOL_EXEC(in_op == nullptr, GELOGE(INTERNAL_ERROR, "in_op is null"); return false); + GE_IF_BOOL_EXEC(out_op == nullptr, GELOGE(INTERNAL_ERROR, "out_op is null"); return false); + auto in_op_desc = in_op->GetInputDescPtr(in_anchor->GetIdx()); + auto out_op_desc = out_op->GetOutputDescPtr(out_anchor->GetIdx()); + GE_IF_BOOL_EXEC(in_op_desc == nullptr, GELOGE(INTERNAL_ERROR, "in_op_desc is null"); return false); + GE_IF_BOOL_EXEC(out_op_desc == nullptr, GELOGE(INTERNAL_ERROR, "out_op_desc is null"); return false); + if (!ShapeEqualCheck(in_op_desc->GetShape(), out_op_desc->GetShape())) { + return false; + } + + if (in_op->GetType() == CAST || out_op->GetType() == CAST) { + return true; + } + + if (in_op_desc->GetFormat() == FORMAT_ND) { + return false; + } + + if (out_op_desc->GetFormat() == FORMAT_ND) { + return false; + } + + if (in_op_desc->GetFormat() != out_op_desc->GetFormat()) { + return false; + } + + return FusionFormatSupport(in_op_desc->GetFormat()); +} + +graphStatus TransOpWithoutReshapeFusionPass::GetSubGraphNodesInfo() { + vector sub_graph_has_reshape_node(sub_graph_anchors_.size(), false); + vector transop_num_count(sub_graph_anchors_.size(), 0); + vector> sub_graph_nodes(sub_graph_anchors_.size()); + for (size_t i = 0; i < sub_graph_anchors_.size(); ++i) { + auto nodes_anchor = sub_graph_anchors_[i]; + vector nodes_tmp; + auto iter = nodes_anchor.begin(); + auto first_out_anchor = iter->first; + if (first_out_anchor == nullptr) { + continue; + } + nodes_tmp.push_back(first_out_anchor->GetOwnerNode()); + while (iter != nodes_anchor.end()) { + auto in_anchor = iter->second; + GE_CHECK_NOTNULL(in_anchor); + auto in_node = in_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(in_node); + if (in_node->GetType() == RESHAPE) { + sub_graph_has_reshape_node[i] = true; + break; + } + + auto out_anchor = iter->first; + GE_CHECK_NOTNULL(out_anchor); + if (!FormatContinuousCheck(out_anchor, in_anchor)) { + sub_graph_has_reshape_node[i] = true; + break; + } + + nodes_tmp.push_back(in_node); + if (IsTransOp(in_node)) { + // count transop num + transop_num_count[i]++; + } + ++iter; + } + sub_graph_nodes[i].swap(nodes_tmp); + if (sub_graph_has_reshape_node[i]) { + SetRemainNode(nodes_anchor); + } + } + + sub_graph_has_reshape_node_.swap(sub_graph_has_reshape_node); + transop_num_count_.swap(transop_num_count); + sub_graph_nodes_.swap(sub_graph_nodes); + return GRAPH_SUCCESS; +} + +void TransOpWithoutReshapeFusionPass::GetOutDataPeerInControlAnchors( + const size_t index, vector> &out_data_peer_in_control_anchors) { + // The caller guarantees that the index is legal. + for (size_t j = 1; j < sub_graph_anchors_[index].size(); ++j) { + auto nodes_anchor = sub_graph_anchors_[index][j]; + auto out_data_anchor = nodes_anchor.first; + GE_CHECK_NOTNULL_JUST_RETURN(out_data_anchor); + for (const auto &peer_in_control_anchor : out_data_anchor->GetPeerInControlAnchors()) { + GE_CHECK_NOTNULL_JUST_RETURN(peer_in_control_anchor); + auto peer_node = peer_in_control_anchor->GetOwnerNode(); + if (peer_node == nullptr) { + continue; + } + auto iter = std::find(sub_graph_nodes_[index].begin(), sub_graph_nodes_[index].end(), peer_node); + if (iter == sub_graph_nodes_[index].end()) { + out_data_peer_in_control_anchors[index].push_back(peer_in_control_anchor); + } else { + sub_graph_has_out_data_peer_in_control_edge_[index] = true; + } + } + } +} + +void TransOpWithoutReshapeFusionPass::GetInControlPeerOutControlAnchors( + const size_t index, vector> &in_control_peer_out_control_anchors) { + // The caller guarantees that the index is legal. + for (size_t j = 1; j < sub_graph_nodes_[index].size(); ++j) { + auto node = sub_graph_nodes_[index][j]; + GE_CHECK_NOTNULL_JUST_RETURN(node); + auto in_control_anchor = node->GetInControlAnchor(); + if (in_control_anchor == nullptr) { + continue; + } + + for (const auto &peer_out_anchor : in_control_anchor->GetPeerOutControlAnchors()) { + GE_CHECK_NOTNULL_JUST_RETURN(peer_out_anchor); + auto peer_node = peer_out_anchor->GetOwnerNode(); + if (peer_node == nullptr) { + continue; + } + auto findIter = std::find(sub_graph_nodes_[index].begin(), sub_graph_nodes_[index].end(), peer_node); + if (findIter == sub_graph_nodes_[index].end()) { + in_control_peer_out_control_anchors[index].push_back(peer_out_anchor); + } else { + sub_graph_has_control_edge_[index] = true; + } + } + } +} + +void TransOpWithoutReshapeFusionPass::GetOutControlPeerAnchors( + const size_t index, vector> &out_control_peer_in_control_anchors, + vector> &out_control_peer_in_data_anchors) { + for (size_t j = 0; j < sub_graph_nodes_[index].size() - 1; ++j) { + auto node = sub_graph_nodes_[index][j]; + GE_CHECK_NOTNULL_JUST_RETURN(node); + auto out_control_anchor = node->GetOutControlAnchor(); + GE_CHECK_NOTNULL_JUST_RETURN(out_control_anchor); + + for (const auto &peer_in_anchor : out_control_anchor->GetPeerInControlAnchors()) { + GE_CHECK_NOTNULL_JUST_RETURN(peer_in_anchor); + auto peer_node = peer_in_anchor->GetOwnerNode(); + if (peer_node == nullptr) { + continue; + } + auto iter = std::find(sub_graph_nodes_[index].begin(), sub_graph_nodes_[index].end(), peer_node); + if (iter == sub_graph_nodes_[index].end()) { + if (j > 0) { + out_control_peer_in_control_anchors[index].push_back(peer_in_anchor); + } + } else { + sub_graph_has_control_edge_[index] = true; + } + } + + for (const auto &peer_in_anchor : out_control_anchor->GetPeerInDataAnchors()) { + GE_CHECK_NOTNULL_JUST_RETURN(peer_in_anchor); + auto peer_node = peer_in_anchor->GetOwnerNode(); + if (peer_node == nullptr) { + continue; + } + auto iter = std::find(sub_graph_nodes_[index].begin(), sub_graph_nodes_[index].end(), peer_node); + if (iter == sub_graph_nodes_[index].end()) { + if (j > 0) { + out_control_peer_in_data_anchors[index].push_back(peer_in_anchor); + } + } else { + sub_graph_has_control_edge_[index] = true; + } + } + } +} + +void TransOpWithoutReshapeFusionPass::GetControlAnchors() { + vector> in_control_peer_out_control_anchors(sub_graph_nodes_.size()); + vector> out_control_peer_in_control_anchors(sub_graph_nodes_.size()); + vector> out_control_peer_in_data_anchors(sub_graph_nodes_.size()); + vector> out_data_peer_in_control_anchors(sub_graph_nodes_.size()); + vector sub_graph_has_control_edge(sub_graph_nodes_.size(), false); + sub_graph_has_control_edge_.swap(sub_graph_has_control_edge); + vector sub_graph_has_out_data_peer_in_control_edge(sub_graph_nodes_.size(), false); + sub_graph_has_out_data_peer_in_control_edge_.swap(sub_graph_has_out_data_peer_in_control_edge); + for (size_t i = 0; i < sub_graph_nodes_.size(); ++i) { + if (sub_graph_has_reshape_node_[i]) { + continue; + } + + GetOutDataPeerInControlAnchors(i, out_data_peer_in_control_anchors); + + GetInControlPeerOutControlAnchors(i, in_control_peer_out_control_anchors); + + GetOutControlPeerAnchors(i, out_control_peer_in_control_anchors, out_control_peer_in_data_anchors); + } + + in_control_peer_out_control_anchors_.swap(in_control_peer_out_control_anchors); + out_control_peer_in_control_anchors_.swap(out_control_peer_in_control_anchors); + out_control_peer_in_data_anchors_.swap(out_control_peer_in_data_anchors); + out_data_peer_in_control_anchors_.swap(out_data_peer_in_control_anchors); +} + +void TransOpWithoutReshapeFusionPass::EraseInvalidAnchorsPair() { + auto sub_graph_iter = sub_graph_anchors_.begin(); + while (sub_graph_iter != sub_graph_anchors_.end()) { + if (sub_graph_iter->size() <= 1) { + sub_graph_iter = sub_graph_anchors_.erase(sub_graph_iter); + } else { + ++sub_graph_iter; + } + } +} + +void TransOpWithoutReshapeFusionPass::UpdateOutputName(const OutDataAnchorPtr &out_anchor, + const InDataAnchorPtr &old_peer_in_anchor, + const NodePtr &in_owner_node) { + if (out_anchor == nullptr || old_peer_in_anchor == nullptr || in_owner_node == nullptr) { + GELOGI("out_anchor or old_peer_in_anchor or in_owner_node is nullptr"); + return; + } + auto out_owner_node = out_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL_JUST_RETURN(out_owner_node); + GE_CHECK_NOTNULL_JUST_RETURN(old_peer_in_anchor->GetOwnerNode()); + auto old_peer_in_name = old_peer_in_anchor->GetOwnerNode()->GetName(); + auto output_op = out_owner_node->GetOpDesc(); + GE_CHECK_NOTNULL_JUST_RETURN(output_op); + auto output_names = output_op->GetAllOutputName(); + auto old_peer_in_name_iter = output_names.find(old_peer_in_name); + if (old_peer_in_name_iter != output_names.end()) { + output_names.erase(old_peer_in_name_iter); + } + output_names[in_owner_node->GetName()] = out_anchor->GetIdx(); + if (!output_op->UpdateOutputName(output_names)) { + GELOGW("output_op UpdateOutputName failed"); + } +} + +void TransOpWithoutReshapeFusionPass::UpdateInputName(const OutDataAnchorPtr &old_peer_out_anchor, + const InDataAnchorPtr &in_anchor, const NodePtr &out_owner_node) { + if (old_peer_out_anchor == nullptr || in_anchor == nullptr || out_owner_node == nullptr) { + GELOGI("old_peer_out_anchor or in_anchor or out_owner_node is nullptr"); + return; + } + auto old_node = old_peer_out_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL_JUST_RETURN(old_node); + auto old_peer_out_name = old_node->GetName(); + auto in_owner_node = in_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL_JUST_RETURN(in_owner_node); + auto input_op = in_owner_node->GetOpDesc(); + GE_CHECK_NOTNULL_JUST_RETURN(input_op); + auto input_names = input_op->GetAllInputName(); + auto old_peer_out_name_iter = input_names.find(old_peer_out_name); + if (old_peer_out_name_iter != input_names.end()) { + input_names.erase(old_peer_out_name_iter); + } + input_names[out_owner_node->GetName()] = in_anchor->GetIdx(); + input_op->UpdateInputName(input_names); +} + +graphStatus TransOpWithoutReshapeFusionPass::RelinkSubGraphControlEdges( + const pair &begin_anchors_pair, + const pair &end_anchors_pair, const int index) { + auto out_anchor = begin_anchors_pair.first; + GE_CHECK_NOTNULL(out_anchor); + auto out_owner_node = out_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(out_owner_node); + auto in_anchor = end_anchors_pair.second; + GE_CHECK_NOTNULL(in_anchor); + auto in_owner_node = in_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(in_owner_node); + if (sub_graph_has_control_edge_[index]) { + GELOGI("add control edge.src:%s, dst:%s", out_owner_node->GetName().c_str(), in_owner_node->GetName().c_str()); + if (GraphUtils::AddEdge(out_owner_node->GetOutControlAnchor(), in_owner_node->GetInControlAnchor()) != + GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + if (sub_graph_has_out_data_peer_in_control_edge_[index]) { + GELOGI("add out data 2 in contorl edge.src:%s, dst:%s", out_owner_node->GetName().c_str(), + in_owner_node->GetName().c_str()); + if (GraphUtils::AddEdge(out_anchor, in_owner_node->GetInControlAnchor()) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + return GRAPH_SUCCESS; +} + +graphStatus TransOpWithoutReshapeFusionPass::RelinkControlEdgesWhenDescNotChanged( + const pair &begin_anchors_pair, + const pair &end_anchors_pair, const int index) { + if (RelinkSubGraphControlEdges(begin_anchors_pair, end_anchors_pair, index) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + auto out_anchor = begin_anchors_pair.first; + GE_CHECK_NOTNULL(out_anchor); + auto out_owner_node = out_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(out_owner_node); + auto in_anchor = end_anchors_pair.second; + GE_CHECK_NOTNULL(in_anchor); + auto in_owner_node = in_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(in_owner_node); + // can not remove old control edge + for (const auto &peer_in_anchor : out_control_peer_in_control_anchors_[index]) { + GE_CHECK_NOTNULL(peer_in_anchor); + GELOGI("add control edge.src:%s, dst:%s, dst idx:%d", out_owner_node->GetName().c_str(), + peer_in_anchor->GetOwnerNode()->GetName().c_str(), peer_in_anchor->GetIdx()); + if (GraphUtils::AddEdge(out_owner_node->GetOutControlAnchor(), peer_in_anchor) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + for (const auto &peer_out_anchor : in_control_peer_out_control_anchors_[index]) { + GE_CHECK_NOTNULL(peer_out_anchor); + GELOGI("add control edge.src:%s, src idx:%d, dst:%s", peer_out_anchor->GetOwnerNode()->GetName().c_str(), + peer_out_anchor->GetIdx(), in_owner_node->GetName().c_str()); + if (GraphUtils::AddEdge(peer_out_anchor, in_owner_node->GetInControlAnchor()) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + for (const auto &peer_in_anchor : out_control_peer_in_data_anchors_[index]) { + GE_CHECK_NOTNULL(peer_in_anchor); + GELOGI("add out control 2 in data edge.src:%s, dst:%s, dst idx:%d", out_owner_node->GetName().c_str(), + peer_in_anchor->GetOwnerNode()->GetName().c_str(), peer_in_anchor->GetIdx()); + if (GraphUtils::AddEdge(out_owner_node->GetOutControlAnchor(), peer_in_anchor) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + for (const auto &peer_in_anchor : out_data_peer_in_control_anchors_[index]) { + GE_CHECK_NOTNULL(peer_in_anchor); + GELOGI("add out data 2 in control edge.src:%s, dst:%s, dst idx:%d", out_owner_node->GetName().c_str(), + peer_in_anchor->GetOwnerNode()->GetName().c_str(), peer_in_anchor->GetIdx()); + if (GraphUtils::AddEdge(out_anchor, peer_in_anchor) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + return GRAPH_SUCCESS; +} + +graphStatus TransOpWithoutReshapeFusionPass::RelinkNodesWhenDescNotChanged( + const pair &begin_anchors_pair, + const pair &end_anchors_pair, const int index) { + auto out_anchor = begin_anchors_pair.first; + GE_CHECK_NOTNULL(out_anchor); + auto out_owner_node = out_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(out_owner_node); + auto in_anchor = end_anchors_pair.second; + GE_CHECK_NOTNULL(in_anchor); + auto in_owner_node = in_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(in_owner_node); + GELOGI("remove edge.src %s, src idx:%d, dst:%s, dst idx:%d", + end_anchors_pair.first->GetOwnerNode()->GetName().c_str(), end_anchors_pair.first->GetIdx(), + in_owner_node->GetName().c_str(), in_anchor->GetIdx()); + GE_CHK_STATUS_RET(GraphUtils::RemoveEdge(end_anchors_pair.first, in_anchor), "remove edge failed"); + GELOGI("relink node.src node:%s, src idx:%d, dst node:%s, dst idx:%d", out_owner_node->GetName().c_str(), + out_anchor->GetIdx(), in_owner_node->GetName().c_str(), in_anchor->GetIdx()); + if (GraphUtils::AddEdge(out_anchor, in_anchor) != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "add edge failed!src:%s, src idx:%d, dst:%s, dst idx:%d", out_owner_node->GetName().c_str(), + out_anchor->GetIdx(), in_owner_node->GetName().c_str(), in_anchor->GetIdx()); + return GRAPH_FAILED; + } else { + auto old_peer_in_anchor = begin_anchors_pair.second; + UpdateOutputName(out_anchor, old_peer_in_anchor, in_owner_node); + + auto old_peer_out_anchor = end_anchors_pair.first; + UpdateInputName(old_peer_out_anchor, in_anchor, out_owner_node); + } + + return RelinkControlEdgesWhenDescNotChanged(begin_anchors_pair, end_anchors_pair, index); +} + +OpDescPtr TransOpWithoutReshapeFusionPass::GetFormatTransferOp(const GeTensorDesc &format_trans_input_desc, + const GeTensorDesc &format_trans_output_desc) { + static uint32_t fusion_format_transfer_op_count = 1; + std::stringstream format_transfer_op_name; + format_transfer_op_name << "fusion_format_transfer_" << fusion_format_transfer_op_count++; + OpDescPtr format_transfer_op = MakeShared(format_transfer_op_name.str().c_str(), TRANSDATA); + if (format_transfer_op == nullptr) { + GELOGE(INTERNAL_ERROR, "new format transfer op failed!"); + return nullptr; + } + + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(format_transfer_op, ATTR_NAME_INPUT_FORMAT, + static_cast(format_trans_input_desc.GetFormat())), + GELOGE(INTERNAL_ERROR, "set ATTR_NAME_INPUT_FORMAT failed"); + return nullptr); + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(format_transfer_op, ATTR_NAME_OUTPUT_FORMAT, + static_cast(format_trans_output_desc.GetFormat())), + GELOGE(INTERNAL_ERROR, "set ATTR_NAME_OUTPUT_FORMAT failed"); + return nullptr); + + string src_format = TypeUtils::FormatToSerialString(format_trans_input_desc.GetFormat()); + string dst_format = TypeUtils::FormatToSerialString(format_trans_output_desc.GetFormat()); + + GE_IF_BOOL_EXEC(!AttrUtils::SetStr(format_transfer_op, kAttrNameSrcFormat, src_format), + GELOGE(INTERNAL_ERROR, "set kAttrNameSrcFormat failed"); + return nullptr); + + GE_IF_BOOL_EXEC(!AttrUtils::SetStr(format_transfer_op, kAttrNameDstFormat, dst_format), + GELOGE(INTERNAL_ERROR, "set kAttrNameDstFormat failed"); + return nullptr); + + GE_IF_BOOL_EXEC(format_transfer_op->AddInputDesc(format_trans_input_desc) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "add input desc failed"); + return nullptr); + + GE_IF_BOOL_EXEC(format_transfer_op->AddOutputDesc(format_trans_output_desc) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "add output desc failed"); + return nullptr); + + GE_IF_BOOL_EXEC(!ge::AttrUtils::SetBool(format_transfer_op, ATTR_NEED_COMPILE, true), + GELOGE(INTERNAL_ERROR, "set ext attr failed"); + return nullptr); + return format_transfer_op; +} + +OpDescPtr TransOpWithoutReshapeFusionPass::GetCastOp(const GeTensorDesc &cast_input_desc, + const GeTensorDesc &cast_output_desc) { + std::stringstream cast_op_name; + static uint32_t fusion_cast_op_count = 1; + cast_op_name << "fusion_cast_op_" << fusion_cast_op_count++; + auto node_op = ge::OperatorFactory::CreateOperator(cast_op_name.str(), CAST); + auto cast_op = ge::OpDescUtils::GetOpDescFromOperator(node_op); + if (cast_op == nullptr) { + GELOGE(INTERNAL_ERROR, "new cast op failed!"); + return nullptr; + } + const int default_input_index = 0; + const int default_output_index = 0; + if (cast_op->GetInputsSize() == 0) { + GE_IF_BOOL_EXEC(cast_op->AddInputDesc(cast_input_desc) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "add input desc failed"); + return nullptr); + } else { + GE_IF_BOOL_EXEC(cast_op->UpdateInputDesc(default_input_index, cast_input_desc) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "update input desc failed"); + return nullptr); + } + + if (cast_op->GetOutputsSize() == 0) { + GE_IF_BOOL_EXEC(cast_op->AddOutputDesc(cast_output_desc) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "add output desc failed"); + return nullptr); + } else { + GE_IF_BOOL_EXEC(cast_op->UpdateOutputDesc(default_output_index, cast_output_desc) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "update output desc failed"); + return nullptr); + } + + if (!AttrUtils::SetInt(cast_op, CAST_ATTR_DST_TYPE, static_cast(cast_output_desc.GetDataType()))) { + GELOGE(INTERNAL_ERROR, "set dst_type attr failed"); + return nullptr; + } + if (!AttrUtils::SetBool(cast_op, ATTR_NEED_COMPILE, true)) { + GELOGE(INTERNAL_ERROR, "set need_compile attr failed"); + return nullptr; + } + return cast_op; +} + +bool TransOpWithoutReshapeFusionPass::InsertCastFirstCheck(const GeTensorDesc &out_desc, + const GeTensorDesc &in_desc) const { + return out_desc.GetDataType() != in_desc.GetDataType() && out_desc.GetDataType() != DT_FLOAT16 && + in_desc.GetDataType() == DT_FLOAT16; +} + +void TransOpWithoutReshapeFusionPass::GetFormatTransferDesc(const GeTensorDesc &out_desc, const GeTensorDesc &in_desc, + GeTensorDesc &format_transfer_input, + GeTensorDesc &format_transfer_output) { + bool insert_cast_first = InsertCastFirstCheck(out_desc, in_desc); + if (insert_cast_first) { + format_transfer_input = out_desc; + format_transfer_input.SetDataType(in_desc.GetDataType()); + format_transfer_output = in_desc; + } else { + format_transfer_input = out_desc; + format_transfer_output = in_desc; + format_transfer_output.SetDataType(out_desc.GetDataType()); + } +} + +void TransOpWithoutReshapeFusionPass::GetCastOpDesc(const GeTensorDesc &out_desc, const GeTensorDesc &in_desc, + GeTensorDesc &cast_input, GeTensorDesc &cast_output) { + bool insert_cast_first = InsertCastFirstCheck(out_desc, in_desc); + if (insert_cast_first) { + cast_input = out_desc; + cast_output = out_desc; + cast_output.SetDataType(in_desc.GetDataType()); + } else { + cast_input = in_desc; + cast_input.SetDataType(out_desc.GetDataType()); + cast_output = in_desc; + } +} + +void TransOpWithoutReshapeFusionPass::GetBeginOutDescAndEndInDesc(const int index, GeTensorDesc &out_desc, + GeTensorDesc &in_desc) { + auto nodes_anchor = sub_graph_anchors_[index]; + auto out_peer_anchor = nodes_anchor.front().second; + GE_CHECK_NOTNULL_JUST_RETURN(out_peer_anchor); + auto out_owner_node = out_peer_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL_JUST_RETURN(out_owner_node); + auto out_peer_op_desc = out_owner_node->GetOpDesc(); + GE_IF_BOOL_EXEC(out_peer_op_desc == nullptr, GELOGE(INTERNAL_ERROR, "out_peer_op_desc is nullptr"); return); + out_desc = out_peer_op_desc->GetInputDesc(out_peer_anchor->GetIdx()); + + auto in_peer_anchor = nodes_anchor.back().first; + GE_CHECK_NOTNULL_JUST_RETURN(in_peer_anchor); + auto in_owner_node = in_peer_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL_JUST_RETURN(in_owner_node); + auto in_peer_op_desc = in_owner_node->GetOpDesc(); + GE_IF_BOOL_EXEC(in_peer_op_desc == nullptr, GELOGE(INTERNAL_ERROR, "in_peer_op_desc is nullptr"); return); + in_desc = in_peer_op_desc->GetOutputDesc(in_peer_anchor->GetIdx()); +} + +graphStatus TransOpWithoutReshapeFusionPass::FormatFusion(const int index, OpDescPtr &format_transfer_op, + int32_t &fusion_op_count, bool &fusion_continue) { + GeTensorDesc out_desc; + GeTensorDesc in_desc; + GetBeginOutDescAndEndInDesc(index, out_desc, in_desc); + + GeTensorDesc format_transfer_input; + GeTensorDesc format_transfer_output; + GetFormatTransferDesc(out_desc, in_desc, format_transfer_input, format_transfer_output); + + if (out_desc.GetFormat() == in_desc.GetFormat() && + (!ShapeEqualCheck(out_desc.GetShape(), in_desc.GetShape()) || + !ShapeEqualCheck(out_desc.GetOriginShape(), in_desc.GetOriginShape()))) { + SetRemainNode(sub_graph_anchors_[index]); + return GRAPH_SUCCESS; + } + + if (out_desc.GetFormat() != in_desc.GetFormat() && FusionFormatSupport(out_desc.GetFormat()) && + FusionFormatSupport(in_desc.GetFormat())) { + // create format transop + format_transfer_op = GetFormatTransferOp(format_transfer_input, format_transfer_output); + if (format_transfer_op == nullptr) { + return GRAPH_FAILED; + } + + if (OpAccuracyAbilityCheck(format_transfer_op)) { + ++fusion_op_count; + GELOGI("support format transfer op %s", format_transfer_op->GetName().c_str()); + } else { + GELOGW("ability not support.src format:%d, src datatype:%d, dst format:%d, dst datatype:%d", + format_transfer_input.GetFormat(), format_transfer_input.GetDataType(), format_transfer_output.GetFormat(), + format_transfer_output.GetDataType()); + fusion_op_count = kInvalidFusionOpCount; + } + } else if (out_desc.GetFormat() != in_desc.GetFormat()) { + SetRemainNode(sub_graph_anchors_[index]); + return GRAPH_SUCCESS; + } + fusion_continue = true; + return GRAPH_SUCCESS; +} + +graphStatus TransOpWithoutReshapeFusionPass::DataTypeFusion(const int index, OpDescPtr &cast_op, + int32_t &fusion_op_count) { + GeTensorDesc out_desc; + GeTensorDesc in_desc; + GetBeginOutDescAndEndInDesc(index, out_desc, in_desc); + + GeTensorDesc cast_input; + GeTensorDesc cast_output; + GetCastOpDesc(out_desc, in_desc, cast_input, cast_output); + + if (fusion_op_count != kInvalidFusionOpCount && out_desc.GetDataType() != in_desc.GetDataType()) { + // create cast op + cast_op = GetCastOp(cast_input, cast_output); + if (cast_op == nullptr) { + fusion_op_count = kInvalidFusionOpCount; + return GRAPH_FAILED; + } + + if (OpAccuracyAbilityCheck(cast_op)) { + ++fusion_op_count; + GELOGI("support cast op %s. src format:%d, src datatype:%d, dst format:%d, dst datatype:%d", + cast_op->GetName().c_str(), cast_input.GetFormat(), cast_input.GetDataType(), cast_output.GetFormat(), + cast_output.GetDataType()); + } else { + GELOGW("ability not support.src format:%d, src datatype:%d, dst format:%d, dst datatype:%d", + cast_input.GetFormat(), cast_input.GetDataType(), cast_output.GetFormat(), cast_output.GetDataType()); + fusion_op_count = kInvalidFusionOpCount; + } + } + return GRAPH_SUCCESS; +} + +graphStatus TransOpWithoutReshapeFusionPass::TransOpFuseHandle(const ComputeGraphPtr &graph, const int index) { + bool fusion_continue = false; + OpDescPtr format_transfer_op = nullptr; + int32_t fusion_op_count = 0; + auto fortmat_fusion_ret = FormatFusion(index, format_transfer_op, fusion_op_count, fusion_continue); + if (fortmat_fusion_ret != GRAPH_SUCCESS || !fusion_continue) { + SetRemainNode(sub_graph_anchors_[index]); + return GRAPH_SUCCESS; + } + + OpDescPtr cast_op = nullptr; + if (DataTypeFusion(index, cast_op, fusion_op_count) != GRAPH_SUCCESS) { + SetRemainNode(sub_graph_anchors_[index]); + return GRAPH_SUCCESS; + } + + if (fusion_op_count != kInvalidFusionOpCount && fusion_op_count < transop_num_count_[index]) { + GeTensorDesc out_desc; + GeTensorDesc in_desc; + GetBeginOutDescAndEndInDesc(index, out_desc, in_desc); + bool insert_cast_first = InsertCastFirstCheck(out_desc, in_desc); + if (InsertNewTransOp(graph, cast_op, format_transfer_op, index, insert_cast_first) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } else { + // remain all nodes + SetRemainNode(sub_graph_anchors_[index]); + } + return GRAPH_SUCCESS; +} + +void TransOpWithoutReshapeFusionPass::RemoveNousedNodes(const ComputeGraphPtr &graph) { + if (graph == nullptr) { + return; + } + for (size_t i = 0; i < sub_graph_nodes_.size(); ++i) { + if (sub_graph_has_reshape_node_[i]) { + continue; + } + + for (const auto &node : sub_graph_nodes_[i]) { + GE_CHECK_NOTNULL_JUST_RETURN(node); + // remove nodes + if (!IsTransOp(node)) { + continue; + } + + auto op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL_JUST_RETURN(op_desc); + bool node_remain_flag = op_desc->TryGetExtAttr(kRemainNode, false); + if (node_remain_flag) { + continue; + } + + GE_IF_BOOL_EXEC(!op_desc->SetExtAttr(kRemainNode, true), GELOGE(INTERNAL_ERROR, "set ext attr failed"); return); + GELOGI("remove node:%s", node->GetName().c_str()); + if (graph->RemoveNode(node) != GRAPH_SUCCESS) { + GELOGW("remove node failed!node:%s", node->GetName().c_str()); + continue; + } + } + } +} + +graphStatus TransOpWithoutReshapeFusionPass::Run(ComputeGraphPtr graph) { + GE_TIMESTAMP_START(TransOpWithoutReshapeFusionPass); + GELOGI("[TransOpWithoutReshapeFusionPass]: optimize begin."); + if (graph == nullptr) { + return GRAPH_SUCCESS; + } + + for (const auto &node : graph->GetAllNodes()) { + GE_CHECK_NOTNULL(node); + if (IsTransOp(node)) { + continue; + } + GELOGD("Current normal node name: %s, type: %s.", node->GetName().c_str(), node->GetType().c_str()); + for (const auto &out_anchor : node->GetAllOutDataAnchors()) { + GE_CHECK_NOTNULL(out_anchor); + vector>> sub_graph_anchors; + vector> nodes_list; + if (GetSubGraphsBetweenNormalNode(out_anchor, sub_graph_anchors, nodes_list) != GRAPH_SUCCESS) { + GELOGW("get transops failed!"); + continue; + } + + sub_graph_anchors_.swap(sub_graph_anchors); + EraseInvalidAnchorsPair(); + if (sub_graph_anchors_.empty()) { + continue; + } + + // check reshape node + if (GetSubGraphNodesInfo() != GRAPH_SUCCESS) { + continue; + } + + // save control edge + GetControlAnchors(); + + if (TransOpFuse(graph) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + } + GELOGI("[TransOpWithoutReshapeFusionPass]: Optimize end."); + GE_TIMESTAMP_END(TransOpWithoutReshapeFusionPass, "GraphManager::TransOpWithoutReshapeFusionPass"); + return GRAPH_SUCCESS; +} + +bool TransOpWithoutReshapeFusionPass::DescEqualCheck(ConstGeTensorDescPtr &desc_src, + ConstGeTensorDescPtr &desc_dst) const { + if (desc_src == nullptr || desc_dst == nullptr) { + return false; + } + if (desc_src->GetFormat() != desc_dst->GetFormat() || desc_src->GetDataType() != desc_dst->GetDataType()) { + return false; + } + + if (!ShapeEqualCheck(desc_src->GetShape(), desc_dst->GetShape())) { + return false; + } + + return ShapeEqualCheck(desc_src->GetOriginShape(), desc_dst->GetOriginShape()); +} + +bool TransOpWithoutReshapeFusionPass::ShapeEqualCheck(const GeShape &src, const GeShape &dst) const { + if (src.GetDims().size() != dst.GetDims().size()) { + return false; + } + + for (size_t i = 0; i < src.GetDims().size(); ++i) { + if (src.GetDim(i) != dst.GetDim(i)) { + return false; + } + } + return true; +} + +graphStatus TransOpWithoutReshapeFusionPass::TransOpFuse(const ComputeGraphPtr &graph) { + for (size_t i = 0; i < sub_graph_anchors_.size(); ++i) { + if (sub_graph_has_reshape_node_[i]) { + continue; + } + + auto nodes_anchor = sub_graph_anchors_[i]; + auto out_anchor = nodes_anchor.front().first; + GE_CHECK_NOTNULL(out_anchor); + auto out_op_desc = out_anchor->GetOwnerNode()->GetOpDesc(); + GE_CHECK_NOTNULL(out_op_desc); + auto out_desc = out_op_desc->GetOutputDescPtr(out_anchor->GetIdx()); + GE_CHECK_NOTNULL(out_desc); + auto in_anchor = nodes_anchor.back().second; + GE_CHECK_NOTNULL(in_anchor); + auto in_op_desc = in_anchor->GetOwnerNode()->GetOpDesc(); + GE_CHECK_NOTNULL(in_op_desc); + auto in_desc = in_op_desc->GetInputDescPtr(in_anchor->GetIdx()); + GE_CHECK_NOTNULL(in_desc); + if (FusionFormatSupport(out_desc->GetFormat()) && DescEqualCheck(out_desc, in_desc)) { + // relink begin_out to end_in + if (RelinkNodesWhenDescNotChanged(nodes_anchor.front(), nodes_anchor.back(), static_cast(i)) != + GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } else { + if (TransOpFuseHandle(graph, static_cast(i)) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + } + RemoveNousedNodes(graph); + return GRAPH_SUCCESS; +} + +graphStatus TransOpWithoutReshapeFusionPass::AddTransNode(const ComputeGraphPtr &graph, const OpDescPtr &transop, + NodePtr &trans_node) { + if (graph == nullptr) { + return GRAPH_SUCCESS; + } + if (transop == nullptr) { + return GRAPH_SUCCESS; + } + + trans_node = graph->AddNode(transop); + if (trans_node == nullptr) { + GELOGE(GRAPH_FAILED, "add node failed!"); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +graphStatus TransOpWithoutReshapeFusionPass::GetTransNode(const ComputeGraphPtr &graph, const OpDescPtr &cast_op, + const OpDescPtr &format_transfer_op, + const bool insert_cast_first, + std::vector &new_trans_nodes) { + NodePtr format_transfer_node; + if (AddTransNode(graph, format_transfer_op, format_transfer_node) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + NodePtr cast_node; + if (AddTransNode(graph, cast_op, cast_node) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + if (insert_cast_first) { + if (cast_node != nullptr) { + new_trans_nodes.push_back(cast_node); + } + if (format_transfer_node != nullptr) { + new_trans_nodes.push_back(format_transfer_node); + } + } else { + if (format_transfer_node != nullptr) { + new_trans_nodes.push_back(format_transfer_node); + } + if (cast_node != nullptr) { + new_trans_nodes.push_back(cast_node); + } + } + + if (new_trans_nodes.empty()) { + GELOGE(GRAPH_FAILED, "no new transop!this should not happen!"); + return GRAPH_FAILED; + } + return GRAPH_SUCCESS; +} + +graphStatus TransOpWithoutReshapeFusionPass::InsertNewTransOp(const ComputeGraphPtr &graph, const OpDescPtr &cast_op, + const OpDescPtr &format_transfer_op, const int index, + const bool insert_cast_first) { + std::vector new_trans_nodes; + if (GetTransNode(graph, cast_op, format_transfer_op, insert_cast_first, new_trans_nodes) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + pair begin_out = sub_graph_anchors_[index].front(); + pair end_in = sub_graph_anchors_[index].back(); + auto out_anchor = begin_out.first; + GE_CHECK_NOTNULL(out_anchor); + auto out_owner_node = out_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(out_owner_node); + auto in_anchor = end_in.second; + GE_CHECK_NOTNULL(in_anchor); + auto in_owner_node = in_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(in_owner_node); + GELOGI("remove edge.src:%s, src idx:%d, dst:%s, dst idx:%d", end_in.first->GetOwnerNode()->GetName().c_str(), + end_in.first->GetIdx(), in_anchor->GetOwnerNode()->GetName().c_str(), in_anchor->GetIdx()); + GE_CHK_STATUS_RET(GraphUtils::RemoveEdge(end_in.first, in_anchor), "remove edge failed"); + GELOGI("add edge.src:%s, src idx:%d, dst:%s", out_anchor->GetOwnerNode()->GetName().c_str(), out_anchor->GetIdx(), + new_trans_nodes.front()->GetName().c_str()); + if (GraphUtils::AddEdge(out_anchor, new_trans_nodes.front()->GetInAnchor(0)) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } else { + auto old_peer_in_anchor = begin_out.second; + GE_CHECK_NOTNULL(old_peer_in_anchor); + UpdateOutputName(out_anchor, old_peer_in_anchor, in_owner_node); + } + + if (new_trans_nodes.size() > 1) { + GELOGI("add edge.src:%s, dst:%s", new_trans_nodes.front()->GetName().c_str(), + new_trans_nodes.back()->GetName().c_str()); + if (GraphUtils::AddEdge(new_trans_nodes.front()->GetOutAnchor(0), new_trans_nodes.back()->GetInAnchor(0)) != + GRAPH_SUCCESS) { + return GRAPH_FAILED; + } else { + auto old_peer_out_anchor = end_in.first; + GE_CHECK_NOTNULL(old_peer_out_anchor); + UpdateInputName(old_peer_out_anchor, in_anchor, out_owner_node); + } + } + GELOGI("add edge.src:%s, dst:%s, dst idx:%d", new_trans_nodes.back()->GetName().c_str(), + in_anchor->GetOwnerNode()->GetName().c_str(), in_anchor->GetIdx()); + if (GraphUtils::AddEdge(new_trans_nodes.back()->GetOutAnchor(0), in_anchor) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + + return RelinkControlEdge(index, out_anchor, new_trans_nodes); +} + +graphStatus TransOpWithoutReshapeFusionPass::RelinkControlEdge(const int index, const OutDataAnchorPtr &out_anchor, + const vector &new_trans_nodes) { + GE_CHECK_NOTNULL(out_anchor); + if (new_trans_nodes.front() == nullptr || new_trans_nodes.back() == nullptr) { + return GRAPH_FAILED; + } + if (sub_graph_has_control_edge_[index]) { + GELOGI("add control edge.src:%s, dst:%s", out_anchor->GetOwnerNode()->GetName().c_str(), + new_trans_nodes.front()->GetName().c_str()); + if (GraphUtils::AddEdge(out_anchor->GetOwnerNode()->GetOutControlAnchor(), + new_trans_nodes.front()->GetInControlAnchor()) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + for (const auto &peer_in_anchor : out_control_peer_in_control_anchors_[index]) { + GE_CHECK_NOTNULL(peer_in_anchor); + GELOGI("add control edge.src:%s, dst:%s", new_trans_nodes.back()->GetName().c_str(), + peer_in_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(new_trans_nodes.back()->GetOutControlAnchor(), peer_in_anchor) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + for (const auto &peer_out_anchor : in_control_peer_out_control_anchors_[index]) { + GE_CHECK_NOTNULL(peer_out_anchor); + GELOGI("add control edge.src:%s, dst:%s", peer_out_anchor->GetOwnerNode()->GetName().c_str(), + new_trans_nodes.front()->GetName().c_str()); + if (GraphUtils::AddEdge(peer_out_anchor, new_trans_nodes.front()->GetInControlAnchor()) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + for (const auto &peer_in_anchor : out_control_peer_in_data_anchors_[index]) { + GE_CHECK_NOTNULL(peer_in_anchor); + GELOGI("add control edge.src:%s, dst:%s", new_trans_nodes.back()->GetName().c_str(), + peer_in_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(new_trans_nodes.back()->GetOutControlAnchor(), peer_in_anchor) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + for (const auto &peer_in_anchor : out_data_peer_in_control_anchors_[index]) { + GE_CHECK_NOTNULL(peer_in_anchor); + GELOGI("add control edge.src:%s, dst:%s", new_trans_nodes.back()->GetName().c_str(), + peer_in_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(new_trans_nodes.back()->GetOutDataAnchor(0), peer_in_anchor) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + + if (sub_graph_has_out_data_peer_in_control_edge_[index]) { + auto in_anchor = sub_graph_anchors_[index].back().second; + GELOGI("add control edge.src:%s, dst:%s", new_trans_nodes.back()->GetName().c_str(), + in_anchor->GetOwnerNode()->GetName().c_str()); + if (GraphUtils::AddEdge(new_trans_nodes.back()->GetOutDataAnchor(0), + in_anchor->GetOwnerNode()->GetInControlAnchor()) != GRAPH_SUCCESS) { + return GRAPH_FAILED; + } + } + return GRAPH_SUCCESS; +} + +bool TransOpWithoutReshapeFusionPass::OpAccuracyAbilityCheck(const OpDescPtr &op_desc) { + auto instance = GELib::GetInstance(); + if ((instance == nullptr) || (!instance->InitFlag())) { + GELOGW("GELib is not initialized!"); + return false; + } + if (op_desc == nullptr) { + return false; + } + OpsKernelManager &ops_kernel_manager = instance->OpsKernelManagerObj(); + vector op_infos = ops_kernel_manager.GetOpsKernelInfo(op_desc->GetType()); + if (op_infos.empty()) { + GELOGI("Can not get op info by op type:%s", op_desc->GetType().c_str()); + return false; + } + + std::string unsupported_reason; + for (const auto &it : op_infos) { + auto kernel_map = ops_kernel_manager.GetAllOpsKernelInfoStores(); + auto &kernel_name = it.opKernelLib; + auto kernel_info_store = kernel_map.find(kernel_name); + if (kernel_info_store != kernel_map.end()) { + if (kernel_info_store->second != nullptr && + kernel_info_store->second->CheckAccuracySupported(op_desc, unsupported_reason)) { + op_desc->SetOpEngineName(it.engine); + op_desc->SetOpKernelLibName(kernel_name); + GELOGI("Set OpKernelLibName %s and engine name %s into op_desc %s", kernel_name.c_str(), it.engine.c_str(), + op_desc->GetName().c_str()); + return true; + } + } + } + GELOGI("op %s CheckAccuracySupported failed!reason:%s", op_desc->GetType().c_str(), unsupported_reason.c_str()); + return false; +} + +bool TransOpWithoutReshapeFusionPass::FusionFormatSupport(Format format) { + return format == FORMAT_NCHW || format == FORMAT_NHWC || format == FORMAT_FRACTAL_Z || format == FORMAT_NC1HWC0; +} + +graphStatus TransOpWithoutReshapeFusionPass::GetSubGraphsBetweenNormalNode( + const OutDataAnchorPtr &out_anchor, + std::vector>> &sub_graphs_out, + vector> &nodes_list) { + graphStatus ret = GRAPH_SUCCESS; + if (out_anchor == nullptr) { + return GRAPH_FAILED; + } + + for (const auto &peer_in_anchor : out_anchor->GetPeerInDataAnchors()) { + if (peer_in_anchor == nullptr || peer_in_anchor->GetOwnerNode() == nullptr || + peer_in_anchor->GetOwnerNode()->GetOpDesc() == nullptr) { + continue; + } + + nodes_list.push_back(make_pair(out_anchor, peer_in_anchor)); + auto peer_in_node = peer_in_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(peer_in_node); + if (!IsTransOp(peer_in_node)) { + sub_graphs_out.push_back(nodes_list); + nodes_list.pop_back(); + } else { + for (const auto &peer_out_anchor : peer_in_node->GetAllOutDataAnchors()) { + ret = GetSubGraphsBetweenNormalNode(peer_out_anchor, sub_graphs_out, nodes_list); + if (ret != GRAPH_SUCCESS) { + GELOGE(GRAPH_FAILED, "get all transops between normal node failed!node:%s", peer_in_node->GetName().c_str()); + return GRAPH_FAILED; + } + } + nodes_list.pop_back(); + } + } + return GRAPH_SUCCESS; +} + +bool TransOpWithoutReshapeFusionPass::IsTransOp(const NodePtr &node) { + // The caller guarantees that the pointer is not null. + return node->GetType() == CAST || node->GetType() == RESHAPE || node->GetType() == TRANSPOSE || + node->GetType() == TRANSPOSED || node->GetType() == TRANSDATA; +} +} // namespace ge diff --git a/src/ge/graph/passes/transop_without_reshape_fusion_pass.h b/src/ge/graph/passes/transop_without_reshape_fusion_pass.h new file mode 100644 index 00000000..4999c731 --- /dev/null +++ b/src/ge/graph/passes/transop_without_reshape_fusion_pass.h @@ -0,0 +1,138 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_TRANSOP_WITHOUT_RESHAPE_FUSION_PASS_H_ +#define GE_GRAPH_PASSES_TRANSOP_WITHOUT_RESHAPE_FUSION_PASS_H_ + +#include +#include + +#include "inc/graph_pass.h" + +namespace ge { +/// +/// Transform operators depth fusion +/// +class TransOpWithoutReshapeFusionPass : public GraphPass { + public: + TransOpWithoutReshapeFusionPass() {} + virtual ~TransOpWithoutReshapeFusionPass() {} + + graphStatus Run(ge::ComputeGraphPtr graph) override; + + private: + void SetRemainNode(const vector> &nodes_anchor); + bool FormatContinuousCheck(const OutDataAnchorPtr &out_anchor, const InDataAnchorPtr &in_anchor); + void RemoveNousedNodes(const ComputeGraphPtr &graph); + void GetBeginOutDescAndEndInDesc(const int index, GeTensorDesc &out_desc, GeTensorDesc &in_desc); + + void GetFormatTransferDesc(const GeTensorDesc &out_desc, const GeTensorDesc &in_desc, + GeTensorDesc &format_transfer_input, GeTensorDesc &format_transfer_output); + + void GetCastOpDesc(const GeTensorDesc &out_desc, const GeTensorDesc &in_desc, GeTensorDesc &cast_input, + GeTensorDesc &cast_output); + + graphStatus FormatFusion(const int index, OpDescPtr &format_transfer_op, int32_t &fusion_op_count, + bool &fusion_continue); + + graphStatus DataTypeFusion(const int index, OpDescPtr &cast_op, int32_t &fusion_op_count); + + void GetOutDataPeerInControlAnchors(const size_t index, + vector> &out_data_peer_in_control_anchors); + + void GetInControlPeerOutControlAnchors(const size_t index, + vector> &in_control_peer_out_control_anchors); + + void GetOutControlPeerAnchors(const size_t index, + vector> &out_control_peer_in_control_anchors, + vector> &out_control_peer_in_data_anchors); + + graphStatus TransOpFuse(const ComputeGraphPtr &graph); + + bool OpAccuracyAbilityCheck(const OpDescPtr &op_desc); + + graphStatus GetSubGraphsBetweenNormalNode( + const OutDataAnchorPtr &out_anchor, vector>> &sub_graphs_out, + vector> &nodes_list); + + graphStatus GetSubGraphNodesInfo(); + + void GetControlAnchors(); + + graphStatus InsertNewTransOp(const ComputeGraphPtr &graph, const OpDescPtr &cast_op, + const OpDescPtr &format_transfer_op, const int index, const bool insert_cast_first); + + void EraseInvalidAnchorsPair(); + + graphStatus RelinkNodesWhenDescNotChanged(const pair &begin_anchors_pair, + const pair &end_anchors_pair, + const int index); + + OpDescPtr GetFormatTransferOp(const GeTensorDesc &out_desc, const GeTensorDesc &in_desc); + + OpDescPtr GetCastOp(const GeTensorDesc &out_desc, const GeTensorDesc &in_desc); + + graphStatus TransOpFuseHandle(const ge::ComputeGraphPtr &graph, const int index); + + graphStatus AddTransNode(const ComputeGraphPtr &graph, const OpDescPtr &transop, NodePtr &trans_node); + + bool DescEqualCheck(ConstGeTensorDescPtr &desc_src, ConstGeTensorDescPtr &desc_dst) const; + + bool ShapeEqualCheck(const GeShape &src, const GeShape &dst) const; + + bool InsertCastFirstCheck(const GeTensorDesc &out_desc, const GeTensorDesc &in_desc) const; + + graphStatus RelinkControlEdge(const int index, const OutDataAnchorPtr &out_anchor, + const vector &new_trans_nodes); + + graphStatus GetTransNode(const ComputeGraphPtr &graph, const OpDescPtr &cast_op, const OpDescPtr &format_transfer_op, + const bool insert_cast_first, std::vector &new_trans_nodes); + + void UpdateOutputName(const OutDataAnchorPtr &out_anchor, const InDataAnchorPtr &old_peer_in_anchor, + const NodePtr &in_owner_node); + void UpdateInputName(const OutDataAnchorPtr &old_peer_out_anchor, const InDataAnchorPtr &in_anchor, + const NodePtr &out_owner_node); + + graphStatus RelinkControlEdgesWhenDescNotChanged(const pair &begin_anchors_pair, + const pair &end_anchors_pair, + const int index); + + graphStatus RelinkSubGraphControlEdges(const pair &begin_anchors_pair, + const pair &end_anchors_pair, + const int index); + /// + /// judge whether an operator is a transform op or not + /// @param node + /// @return True or False + /// + static bool IsTransOp(const NodePtr &node); + + static bool FusionFormatSupport(Format format); + + vector>> sub_graph_anchors_; + vector> sub_graph_nodes_; + vector transop_num_count_; + vector sub_graph_has_reshape_node_; + vector> in_control_peer_out_control_anchors_; + vector> out_control_peer_in_control_anchors_; + vector> out_control_peer_in_data_anchors_; + vector> out_data_peer_in_control_anchors_; + vector sub_graph_has_control_edge_; + vector sub_graph_has_out_data_peer_in_control_edge_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_TRANSOP_WITHOUT_RESHAPE_FUSION_PASS_H_ diff --git a/src/ge/graph/passes/transpose_transdata_pass.cc b/src/ge/graph/passes/transpose_transdata_pass.cc new file mode 100644 index 00000000..891c10ef --- /dev/null +++ b/src/ge/graph/passes/transpose_transdata_pass.cc @@ -0,0 +1,234 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/transpose_transdata_pass.h" + +#include +#include +#include + +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" +#include "graph/debug/ge_attr_define.h" +#include "init/gelib.h" +#include "opskernel_manager/ops_kernel_manager.h" + +namespace { +const char *const kAttrNameSrcFormat = "src_format"; +} + +namespace ge { +Status TransposeTransDataPass::Run(NodePtr &node) { + if (node == nullptr) { + GELOGE(PARAM_INVALID, "param [node] must not be null."); + return PARAM_INVALID; + } + auto op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(PARAM_INVALID, "OpDesc of param [node] must not be null."); + return PARAM_INVALID; + } + + if (op_desc->GetType() != TRANSPOSE && op_desc->GetType() != TRANSPOSED) { + return SUCCESS; + } + if (CheckOneInAndOneOutDataAnchor(node) != SUCCESS) { + return FAILED; + } + GELOGD("[%s] TransposeTransDataPass in.", node->GetName().c_str()); + + auto out_nodes = node->GetOutDataNodes(); + bool is_add_flag = false; + for (auto &out_node : out_nodes) { + GE_CHECK_NOTNULL(out_node); + OpDescPtr out_op_desc = out_node->GetOpDesc(); + if (out_op_desc == nullptr) { + GELOGE(FAILED, "OpDesc of out data node of [%s] must not be null.", node->GetName().c_str()); + return FAILED; + } + if (out_op_desc->GetType() != TRANSDATA) { + continue; + } + if (CheckOneInAndOneOutDataAnchor(out_node)) { + return FAILED; + } + if (!FusionIfNeed(op_desc, out_op_desc)) { + continue; + } + CopyInputEdges(node, out_node); + is_add_flag = true; + } + + if (is_add_flag) { + AddRePassNode(node->GetInDataNodes().at(0)); + } + if (node->GetOutDataNodesSize() == 0) { + // all output nodes of transpose has fused, delete transpose + return RemoveTranspose(node); + } + return SUCCESS; +} + +Status TransposeTransDataPass::CheckOneInAndOneOutDataAnchor(NodePtr &node) const { + GE_CHECK_NOTNULL(node); + // Trans op has one input one output data anchor + uint32_t in_data_anchor_nums = node->GetAllInDataAnchorsSize(); + uint32_t out_data_anchor_nums = node->GetAllOutDataAnchorsSize(); + // Trans op has one input data node, maybe has N output data nodes + uint32_t in_data_node_nums = node->GetInDataNodes().size(); + if (in_data_anchor_nums != 1 || out_data_anchor_nums != 1 || in_data_node_nums != 1) { + GELOGE(FAILED, "[%s] %s has %u in %u out data anchor, has %u in data node.", node->GetType().c_str(), + node->GetName().c_str(), in_data_anchor_nums, out_data_anchor_nums, in_data_node_nums); + return FAILED; + } + return SUCCESS; +} + +Status TransposeTransDataPass::RemoveTranspose(NodePtr &node) { + GE_CHECK_NOTNULL(node); + ComputeGraphPtr graph = node->GetOwnerComputeGraph(); + if (graph == nullptr) { + GELOGE(FAILED, "[%s] The owner graph must not be null.", node->GetName().c_str()); + return FAILED; + } + + // If delete Transpos/TransposeD, change its peer in ctrl anchor to its input node + // If not delete, need do nothing + auto origin_node_in = node->GetInDataNodes().at(0); + for (auto &peer_anchor : node->GetOutControlAnchor()->GetPeerInControlAnchors()) { + GE_CHECK_NOTNULL(origin_node_in); + GE_CHECK_NOTNULL(origin_node_in->GetOutControlAnchor()); + GE_CHK_STATUS_RET(origin_node_in->GetOutControlAnchor()->LinkTo(peer_anchor), "link failed"); + } + + for (const auto &anchor : node->GetAllInAnchors()) { + GE_CHECK_NOTNULL(anchor); + anchor->UnlinkAll(); + } + for (const auto &anchor : node->GetAllOutAnchors()) { + GE_CHECK_NOTNULL(anchor); + anchor->UnlinkAll(); + } + AddNodeDeleted(node.get()); + if (GraphUtils::RemoveNodeWithoutRelink(graph, node) != GRAPH_SUCCESS) { + GELOGE(FAILED, "[%s] RemoveNodeWithoutRelink failed.", node->GetName().c_str()); + return FAILED; + } + return SUCCESS; +} + +bool TransposeTransDataPass::FusionIfNeed(OpDescPtr &op_desc, OpDescPtr &transdata_op_desc) { + GE_CHECK_NOTNULL(op_desc); + GE_CHECK_NOTNULL(transdata_op_desc); + auto out_input_desc = transdata_op_desc->MutableInputDesc(0); + GE_CHECK_NOTNULL(out_input_desc); + auto out_input_format = out_input_desc->GetFormat(); + auto out_input_shape = out_input_desc->GetShape(); + + auto input_desc = op_desc->MutableInputDesc(0); + auto out_desc = op_desc->MutableOutputDesc(0); + GE_CHECK_NOTNULL(input_desc); + GE_CHECK_NOTNULL(out_desc); + auto src_format = input_desc->GetFormat(); + auto dst_format = out_desc->GetFormat(); + auto &dst_shape = out_desc->MutableShape(); + if (dst_format != out_input_format || !formats::IsShapeEqual(dst_shape, out_input_shape) || src_format == FORMAT_ND) { + GELOGD("Output of transpose isn't the same as input of transdata, or transpose input format must not be ND."); + GELOGD("Transpose input format %s, output format %s shape %s. transdata in %s %s.", + TypeUtils::FormatToSerialString(src_format).c_str(), TypeUtils::FormatToSerialString(dst_format).c_str(), + formats::ShapeToString(dst_shape.GetDims()).c_str(), + TypeUtils::FormatToSerialString(out_input_format).c_str(), + formats::ShapeToString(out_input_shape.GetDims()).c_str()); + return false; + } + + auto &src_shape = input_desc->MutableShape(); + GELOGI("Begin to fuse transpose transdata, transpose in format %s shape %s, transdata in %s %s", + TypeUtils::FormatToSerialString(src_format).c_str(), formats::ShapeToString(src_shape.GetDims()).c_str(), + TypeUtils::FormatToSerialString(out_input_format).c_str(), + formats::ShapeToString(out_input_shape.GetDims()).c_str()); + + // Transpose can change format and shape + out_input_desc->SetFormat(src_format); + out_input_desc->SetShape(src_shape); + + if (!TransDataCheckAccuracySupported(transdata_op_desc)) { + out_input_desc->SetFormat(out_input_format); + out_input_desc->SetShape(out_input_shape); + return false; + } + + // add attr to fused TransData, then will be rebuild + string new_node_name = op_desc->GetName() + transdata_op_desc->GetName(); + transdata_op_desc->SetName(new_node_name); + GE_IF_BOOL_EXEC(!AttrUtils::SetBool(transdata_op_desc, ATTR_NEED_COMPILE, true), + GELOGW("set ext attr failed"); return false); + + string format_val = TypeUtils::FormatToSerialString(src_format); + GE_IF_BOOL_EXEC(!AttrUtils::SetStr(transdata_op_desc, kAttrNameSrcFormat, format_val), + GELOGW("set kAttrNameSrcFormat failed"); return false); + GELOGI("TransposeTransDataPass, fuse to be node %s.", transdata_op_desc->GetName().c_str()); + return true; +} + +void TransposeTransDataPass::CopyInputEdges(NodePtr &origin_node, NodePtr &new_node) { + if (origin_node == nullptr || new_node == nullptr) { + return; + } + InDataAnchorPtr new_in_data_anchor = new_node->GetInDataAnchor(0); + if (new_in_data_anchor == nullptr || origin_node->GetInDataAnchor(0) == nullptr) { + return; + } + OutDataAnchorPtr out_anchor = origin_node->GetInDataAnchor(0)->GetPeerOutAnchor(); + new_in_data_anchor->UnlinkAll(); + GE_IF_BOOL_EXEC(new_in_data_anchor->LinkFrom(out_anchor) != GRAPH_SUCCESS, GELOGW("Link failed"); return); + + // control anchor only link to control anchor + GE_IF_BOOL_EXEC(GraphUtils::CopyInCtrlEdges(origin_node, new_node) != GRAPH_SUCCESS, + GELOGW("Copy in ctrl edges failed"); return); +} + +bool TransposeTransDataPass::TransDataCheckAccuracySupported(const OpDescPtr &op_desc) { + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if ((instance_ptr == nullptr) || (!instance_ptr->InitFlag())) { + GELOGW("GELib not initialized"); + return false; + } + + OpsKernelManager &ops_kernel_manager = instance_ptr->OpsKernelManagerObj(); + vector op_infos = ops_kernel_manager.GetOpsKernelInfo(op_desc->GetType()); + if (op_infos.empty()) { + GELOGW("Can not get op info by op type %s", op_desc->GetType().c_str()); + return false; + } + + std::string unsupported_reason; + for (auto &it : op_infos) { + auto kernel_map = ops_kernel_manager.GetAllOpsKernelInfoStores(); + auto &kernel_name = it.opKernelLib; + auto kernel_info_store = kernel_map.find(kernel_name); + if (kernel_info_store != kernel_map.end()) { + if (kernel_info_store->second->CheckAccuracySupported(op_desc, unsupported_reason, true)) { + return true; + } + } + } + GELOGI("TransposeTransDataPass CheckAccuracySupported[%s] all not support, reason:%s.", op_desc->GetName().c_str(), + unsupported_reason.c_str()); + return false; +} +} // namespace ge diff --git a/src/ge/graph/passes/transpose_transdata_pass.h b/src/ge/graph/passes/transpose_transdata_pass.h new file mode 100644 index 00000000..bf42f5de --- /dev/null +++ b/src/ge/graph/passes/transpose_transdata_pass.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_TRANSPOSE_TRANSDATA_PASS_H_ +#define GE_GRAPH_PASSES_TRANSPOSE_TRANSDATA_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class TransposeTransDataPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; + private: + Status CheckOneInAndOneOutDataAnchor(NodePtr &node) const; + Status RemoveTranspose(NodePtr &node); + bool FusionIfNeed(OpDescPtr &op_desc, OpDescPtr &transdata_op_desc); + void CopyInputEdges(NodePtr &origin_node, NodePtr &new_node); + bool TransDataCheckAccuracySupported(const OpDescPtr &op_desc); +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_TRANSPOSE_TRANSDATA_PASS_H_ + diff --git a/src/ge/graph/passes/unused_const_pass.cc b/src/ge/graph/passes/unused_const_pass.cc new file mode 100644 index 00000000..dc8c7c07 --- /dev/null +++ b/src/ge/graph/passes/unused_const_pass.cc @@ -0,0 +1,47 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/unused_const_pass.h" + +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" + +namespace ge { +/// +/// run pass +/// @param [in] node node to be deleted +/// @return Status +/// +Status UnusedConstPass::Run(NodePtr &node) { + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + if (node->GetOpDesc() == nullptr) { + GELOGE(PARAM_INVALID, "param [opDesc] must not be null."); + return PARAM_INVALID; + } + + std::string op_type = node->GetOpDesc()->GetType(); + if (op_type == UNUSEDCONST) { + GELOGD("op type is unused const."); + return IsolateAndDeleteNode(node, {-1}); + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/unused_const_pass.h b/src/ge/graph/passes/unused_const_pass.h new file mode 100644 index 00000000..3c7f3460 --- /dev/null +++ b/src/ge/graph/passes/unused_const_pass.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_UNUSED_CONST_PASS_H_ +#define GE_GRAPH_PASSES_UNUSED_CONST_PASS_H_ + +#include "graph/passes/base_pass.h" + +namespace ge { +class UnusedConstPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_UNUSED_CONST_PASS_H_ diff --git a/src/ge/graph/passes/unused_op_remove_pass.cc b/src/ge/graph/passes/unused_op_remove_pass.cc new file mode 100644 index 00000000..e268bf5b --- /dev/null +++ b/src/ge/graph/passes/unused_op_remove_pass.cc @@ -0,0 +1,132 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/unused_op_remove_pass.h" + +#include +#include +#include +#include + +#include "common/debug/log.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" + +using domi::SUCCESS; + +namespace ge { +const std::set kRemoveOpSet = {DROPOUT, PERMUTE, UNUSEDCONST, ASSERT}; +const std::set kOtherRemoveOpSet = {DROPOUT}; + +Status UnusedOpRemovePass::Run(ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + std::set remove_op_set; + vector nodes_to_be_deleted; + if (fmktype_ == FMK_TYPE_T) { + remove_op_set = kRemoveOpSet; + } else { + remove_op_set = kOtherRemoveOpSet; + } + + for (auto &node : graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + std::string op_type_str = node->GetOpDesc()->GetType(); + if (remove_op_set.count(op_type_str)) { + if (IsExceptions(node)) { + continue; + } + for (auto &out_anchor : node->GetAllOutDataAnchors()) { + for (auto &in_anchor : out_anchor->GetPeerInDataAnchors()) { + NodePtr dst_node = in_anchor->GetOwnerNode(); + GE_CHECK_NOTNULL(dst_node->GetOpDesc()); + int dst_index = in_anchor->GetIdx(); + std::vector list_bool; + list_bool = dst_node->GetOpDesc()->GetIsInputConst(); + GE_IF_BOOL_EXEC(list_bool.size() == 0, continue); + list_bool.erase(list_bool.begin() + dst_index); + dst_node->GetOpDesc()->SetIsInputConst(list_bool); + } + } + if (op_type_str == ASSERT) { + GE_CHK_STATUS_RET(CollectParentNode(graph, node, nodes_to_be_deleted), "remove node failed"); + } else { + GE_CHK_STATUS_RET(graph->RemoveNode(node), "remove node failed"); + } + } + } + for (auto &node : nodes_to_be_deleted) { + for (InDataAnchorPtr &inAnchor : node->GetAllInDataAnchors()) { + inAnchor->UnlinkAll(); + } + for (OutDataAnchorPtr &outAnchorPtr : node->GetAllOutDataAnchors()) { + outAnchorPtr->UnlinkAll(); + } + if (node->GetOutControlAnchor() != nullptr) { + node->GetOutControlAnchor()->UnlinkAll(); + } + GE_CHK_STATUS_RET(graph->RemoveNode(node), "remove node:%s failed", node->GetName().c_str()); + } + + return SUCCESS; +} + +Status UnusedOpRemovePass::CollectParentNode(const ComputeGraphPtr &graph, const NodePtr &node, + vector &node_vec) { + GE_CHECK_NOTNULL(graph); + GE_CHECK_NOTNULL(node); + node_vec.push_back(node); + std::queue node_queue; + + for (auto &src_node : node->GetInDataNodes()) { + if (src_node->GetOutDataNodesSize() == 1) { + node_queue.push(src_node); + } + } + + while (!node_queue.empty()) { + NodePtr temp = node_queue.front(); + node_queue.pop(); + + for (auto &src_node : temp->GetInDataNodes()) { + if (src_node->GetOutDataNodesSize() == 1) { + node_queue.push(src_node); + } + } + node_vec.push_back(temp); + } + + return SUCCESS; +} + +bool UnusedOpRemovePass::IsExceptions(const NodePtr &node) { + GE_CHK_BOOL_EXEC(node != nullptr, return false, "node is nullptr"); + auto op_def = node->GetOpDesc(); + GE_CHK_BOOL_EXEC(op_def != nullptr, return false, "opdesc is nullptr"); + // permute optimised in permute_pass.cpp + if (op_def->GetType() == PERMUTE) { + GE_IF_BOOL_EXEC( + (node->GetInDataNodes().size() != 0 && + (node->GetInDataNodes().at(0) != nullptr && node->GetInDataNodes().at(0)->GetOpDesc() != nullptr && + node->GetInDataNodes().at(0)->GetOpDesc()->GetType() == ATTENTIONDECODER)), + return false); + return true; + } + return false; +} +} // namespace ge diff --git a/src/ge/graph/passes/unused_op_remove_pass.h b/src/ge/graph/passes/unused_op_remove_pass.h new file mode 100644 index 00000000..525dfa7e --- /dev/null +++ b/src/ge/graph/passes/unused_op_remove_pass.h @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_UNUSED_OP_REMOVE_PASS_H_ +#define GE_GRAPH_PASSES_UNUSED_OP_REMOVE_PASS_H_ + +#include +#include + +#include "framework/common/ge_types.h" +#include "inc/graph_pass.h" + +namespace ge { +class UnusedOpRemovePass : public GraphPass { + public: + explicit UnusedOpRemovePass(FrameworkType type) : fmktype_(type) {} + ~UnusedOpRemovePass() {} + Status Run(ge::ComputeGraphPtr graph) override; + bool IsExceptions(const ge::NodePtr &node); + + private: + Status CollectParentNode(const ge::ComputeGraphPtr &graph, const ge::NodePtr &node, + std::vector &node_vec); + std::vector v_remove_ops; + FrameworkType fmktype_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_UNUSED_OP_REMOVE_PASS_H_ diff --git a/src/ge/graph/passes/update_net_output_pass.cc b/src/ge/graph/passes/update_net_output_pass.cc new file mode 100644 index 00000000..10a3e202 --- /dev/null +++ b/src/ge/graph/passes/update_net_output_pass.cc @@ -0,0 +1,174 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/update_net_output_pass.h" +#include +#include +#include +#include "omg/omg_inner_types.h" +#include "common/util.h" +#include "common/formats/formats.h" +#include "common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.h" +#include "common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h" + +namespace ge { +static std::map kOutputTypeStrToDataType = { + {"FP32", ge::DT_FLOAT}, {"FP16", ge::DT_FLOAT16}, {"INT8", ge::DT_INT8}, {"INT16", ge::DT_INT16}, + {"UINT16", ge::DT_UINT16}, {"UINT8", ge::DT_UINT8}, {"INT32", ge::DT_INT32}, {"INT64", ge::DT_INT64}, + {"UINT32", ge::DT_UINT32}, {"UINT64", ge::DT_UINT64}, {"DOUBLE", ge::DT_DOUBLE}, +}; + +static void SetNetoutputDataType(OpDescPtr &op_desc, + uint32_t index, + ge::DataType output_data_type) { + // op_desc is judged not nullptr + auto net_output_in_desc = op_desc->MutableInputDesc(index); + if (net_output_in_desc != nullptr) { + net_output_in_desc->SetDataType(output_data_type); + net_output_in_desc->SetOriginDataType(output_data_type); + GELOGI("Update input desc, datatype:%s,", + TypeUtils::DataTypeToSerialString(op_desc->GetInputDesc(0).GetDataType()).c_str()); + } + auto net_output_out_desc = op_desc->MutableOutputDesc(index); + if (net_output_out_desc != nullptr) { + net_output_out_desc->SetDataType(output_data_type); + net_output_out_desc->SetOriginDataType(output_data_type); + GELOGI("Update out desc, datatype:%s", + TypeUtils::DataTypeToSerialString(op_desc->GetOutputDesc(0).GetDataType()).c_str()); + } +} + +static Status SetNetoutputFormat(OpDescPtr op_desc, uint32_t index, ge::Format format) { + // op_desc is judged not nullptr + auto net_output_in_desc = op_desc->MutableInputDesc(index); + GE_CHECK_NOTNULL(net_output_in_desc); + ge::Format old_format = net_output_in_desc->GetFormat(); + bool support = ((old_format == FORMAT_NC1HWC0) || + (old_format == FORMAT_NCHW) || + (old_format == FORMAT_NHWC)); + if (!support) { + GELOGE(INTERNAL_ERROR, "The node %s format [%s] is unsupported", op_desc->GetName().c_str(), + TypeUtils::FormatToSerialString(old_format).c_str()); + return FAILED; + } + if (old_format == FORMAT_NC1HWC0) { + GELOGI("No need to transfer format"); + return SUCCESS; + } + std::vector old_shape = net_output_in_desc->GetShape().GetDims(); + ge::DataType dt = net_output_in_desc->GetDataType(); + std::vector dst_shape_dims; + if (old_format == FORMAT_NCHW) { + formats::FormatTransferNchwNc1hwc0 transfer; + if (transfer.TransShape(old_format, old_shape, dt, format, dst_shape_dims) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "TransShape failed"); + return FAILED; + } + } + if (old_format == FORMAT_NHWC) { + formats::FormatTransferNhwcNc1hwc0 transfer; + if (transfer.TransShape(old_format, old_shape, dt, format, dst_shape_dims) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "TransShape failed"); + return FAILED; + } + } + net_output_in_desc->SetShape(ge::GeShape(dst_shape_dims)); + net_output_in_desc->SetOriginShape(ge::GeShape(dst_shape_dims)); + net_output_in_desc->SetFormat(format); + net_output_in_desc->SetOriginFormat(format); + GELOGI("Update input desc, format:%s,", + TypeUtils::FormatToSerialString(op_desc->GetInputDesc(0).GetFormat()).c_str()); + + auto net_output_out_desc = op_desc->MutableOutputDesc(index); + if (net_output_out_desc == nullptr) { + GELOGW("The opdesc is nullptr"); + return FAILED; + } + net_output_out_desc->SetShape(ge::GeShape(dst_shape_dims)); + net_output_out_desc->SetOriginShape(ge::GeShape(dst_shape_dims)); + net_output_out_desc->SetFormat(format); + net_output_out_desc->SetOriginFormat(format); + GELOGI("Update out desc, format:%s", + TypeUtils::FormatToSerialString(op_desc->GetOutputDesc(0).GetFormat()).c_str()); + return SUCCESS; +} + +Status ReUpdateNetOutputPass::Run(ge::NodePtr &node) { + GELOGD("ReUpdateNetOutputPass running"); + if (node == nullptr) { + GELOGE(FAILED, "parameter is null."); + return FAILED; + } + auto op_desc = node->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(FAILED, "op_desc is null."); + return FAILED; + } + + std::string op_type = op_desc->GetType(); + if (op_type != NETOUTPUT) { + return SUCCESS; + } + GELOGD("NetOutput start ReUpdateNetOutputPass"); + bool is_set_output_type = false; + ge::DataType output_data_type = ge::DT_FLOAT; + std::string output_type = domi::GetContext().output_type; + if (kOutputTypeStrToDataType.find(output_type) != kOutputTypeStrToDataType.end()) { + output_data_type = kOutputTypeStrToDataType[output_type]; + is_set_output_type = true; + } else { + GELOGW("output_type [%s] set can not find", output_type.c_str()); + is_set_output_type = false; + } + + for (const auto &in_anchor : node->GetAllInDataAnchors()) { + auto index = static_cast(in_anchor->GetIdx()); + // Update datatype + if (is_set_output_type) { + SetNetoutputDataType(op_desc, index, output_data_type); + continue; + } + // output_node is not set,check if is_output_adjust_hw_layout is set + auto peer_out = in_anchor->GetPeerOutAnchor(); + GE_CHECK_NOTNULL(peer_out); + auto own_node = peer_out->GetOwnerNode(); + GE_CHECK_NOTNULL(own_node); + OpDescPtr src_op_desc = own_node->GetOpDesc(); + GE_CHECK_NOTNULL(src_op_desc); + bool set_fp16_nc1hwc0 = false; + if (AttrUtils::GetBool(src_op_desc, "output_set_fp16_nc1hwc0", set_fp16_nc1hwc0)) { + GELOGI("This output [%s] should be set FP16 and NC1HWC0", src_op_desc->GetName().c_str()); + if (set_fp16_nc1hwc0) { + SetNetoutputDataType(op_desc, index, ge::DT_FLOAT16); + if (SetNetoutputFormat(op_desc, index, FORMAT_NC1HWC0) != SUCCESS) { + GELOGE(PARAM_INVALID, "SetNetoutputFormat failed"); + return FAILED; + } + // set the outputdesc originformat NC1HWC0, as partition insert placehold node format is based on originformat + auto src_index = static_cast(in_anchor->GetPeerOutAnchor()->GetIdx()); + auto src_output_desc = src_op_desc->MutableOutputDesc(src_index); + if (src_output_desc == nullptr) { + GELOGE(PARAM_INVALID, "src_output_desc is m=nullptr"); + return FAILED; + } + src_output_desc->SetOriginFormat(FORMAT_NC1HWC0); + } + } + } + GELOGD("node[%s] ReUpdateNetOutputPass done", op_type.c_str()); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/update_net_output_pass.h b/src/ge/graph/passes/update_net_output_pass.h new file mode 100644 index 00000000..571d2b9c --- /dev/null +++ b/src/ge/graph/passes/update_net_output_pass.h @@ -0,0 +1,40 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_UPDATE_NET_OUTPUT_PASS_H_ +#define GE_GRAPH_PASSES_UPDATE_NET_OUTPUT_PASS_H_ + +#include "graph/types.h" +#include "graph/passes/base_pass.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/debug/ge_log.h" +#include "graph/utils/type_utils.h" +#include "graph/debug/ge_attr_define.h" + +namespace ge { +class ReUpdateNetOutputPass : public BaseNodePass { + public: + /// + /// Entry of the ReUpdateNetOutputPass optimizer + /// @param [in] node: Input node + /// @return SUCCESS: Execution succeed + /// @return OTHERS: Execution failed + /// @author + /// + Status Run(NodePtr &node) override; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_UPDATE_NET_OUTPUT_PASS_H_ diff --git a/src/ge/graph/passes/var_is_initialized_op_pass.cc b/src/ge/graph/passes/var_is_initialized_op_pass.cc new file mode 100644 index 00000000..4266cf1f --- /dev/null +++ b/src/ge/graph/passes/var_is_initialized_op_pass.cc @@ -0,0 +1,313 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/var_is_initialized_op_pass.h" + +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge/ge_util.h" +#include "graph/anchor.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/node.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/node_utils.h" + +namespace ge { +namespace { +const int kAssignVarRefIndex = 0; +const int kVarIsInitializedIOCnt = 1; +const int kVarIsInitVarInputIndex = 0; +} // namespace +Status VarIsInitializedOpPass::Run(NodePtr &node) { + GE_CHECK_NOTNULL(node); + auto ret = UpdateInitedVars(node); + if (ret != SUCCESS) { + GELOGE(ret, "Failed to run var is init pass on node %s", node->GetName().c_str()); + return ret; + } + + if (node->GetType() != VARISINITIALIZEDOP) { + return SUCCESS; + } + + bool inited = false; + if (CheckSrcNode(node, inited) != SUCCESS) { + return FAILED; + } + GELOGI("The variable inited status %s on node %s", + inited ? "true" : "false", node->GetName().c_str()); + + ret = ChangeNodeToConstant(node, inited); + GELOGI("Change VarIsInitializedOp %s to be Constant %s end.", + node->GetName().c_str(), inited ? "true" : "false"); + return ret; +} + +Status VarIsInitializedOpPass::CheckSrcNode(const NodePtr &node, bool &inited) const { + GE_CHECK_NOTNULL(node); + auto input_nodes = node->GetInDataNodes(); + if (input_nodes.size() != kVarIsInitializedIOCnt) { + GELOGE(FAILED, + "[%s] Node input data nodes size [%zu] is not equal 1.", + node->GetName().c_str(), + input_nodes.size()); + return FAILED; + } + + auto &input_node = input_nodes.at(kVarIsInitVarInputIndex); + GE_CHECK_NOTNULL(input_node); + auto input_node_name = input_node->GetName(); + auto input_node_type = input_node->GetType(); + if (input_node_type != VARIABLE) { + GELOGE(FAILED, "[%s] Src node %s is not Variable, is %s.", node->GetName().c_str(), input_node_name.c_str(), + input_node_type.c_str()); + return FAILED; + } + + // initialized and initialized check graph must not be in the same graph + ComputeGraphPtr compute_graph = node->GetOwnerComputeGraph(); + auto session_id = compute_graph->GetSessionID(); + if (VarManager::Instance(session_id)->IsVarExist(input_node_name)) { + inited = true; + return SUCCESS; + } + GE_CHECK_NOTNULL(input_node->GetOpDesc()); + inited = IsVarInitedOnTheGraphAndNode(node, input_node->GetOpDesc()->GetId()); + return SUCCESS; +} + +Status VarIsInitializedOpPass::CreateConstant(NodePtr &node, OpDescPtr &op_desc, bool inited) { + GE_CHECK_NOTNULL(node); + // 1. create Constant OpDesc + op_desc = MakeShared(node->GetName().c_str(), CONSTANT); + if (op_desc == nullptr) { + GELOGE(FAILED, "[%s] Make shared of Constant op desc failed.", node->GetName().c_str()); + return FAILED; + } + + // 2. get OpDesc of VarIsInitializedOp + OpDescPtr original_op_desc = node->GetOpDesc(); + if (original_op_desc == nullptr) { + GELOGE(FAILED, "[%s] Op desc must not be null.", node->GetName().c_str()); + return FAILED; + } + GeTensorDesc original_desc = original_op_desc->GetOutputDesc(0); + + // 3. create attr value of Constant, is a tensor + bool val = inited; + GeTensorPtr const_tensor_ptr = MakeShared(original_desc, reinterpret_cast(&val), sizeof(bool)); + if (const_tensor_ptr == nullptr) { + GELOGE(FAILED, "[%s] Make shared of Constant tensor failed.", node->GetName().c_str()); + return FAILED; + } + if (!AttrUtils::SetTensor(op_desc, ATTR_NAME_WEIGHTS, const_tensor_ptr)) { + GELOGE(INTERNAL_ERROR, "get ATTR_NAME_WEIGHTS failed"); + return FAILED; + } + + // 4. set Constant output desc + GE_CHK_STATUS_RET(op_desc->AddOutputDesc(original_desc), "add out put desc failed"); + return SUCCESS; +} + +Status VarIsInitializedOpPass::ProcessInAnchor(NodePtr &node, NodePtr &new_node) { + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(new_node); + auto in_anchors = node->GetAllInDataAnchors(); + auto out_anchors = node->GetAllOutDataAnchors(); + if ((in_anchors.size() != kVarIsInitializedIOCnt) || + (out_anchors.size() != kVarIsInitializedIOCnt)) { + GELOGE(FAILED, + "[%s] Node input/output data anchors" + " size [%lu][%lu] is not all equal 1.", + node->GetName().c_str(), in_anchors.size(), out_anchors.size()); + return FAILED; + } + + // 1. delete in data anchor of VarIsInitializedOp node + auto &in_anchor = in_anchors.at(kVarIsInitVarInputIndex); + GE_CHECK_NOTNULL(in_anchor); + auto peer_out_anchor = in_anchor->GetPeerOutAnchor(); + GE_CHECK_NOTNULL(peer_out_anchor); + if (GraphUtils::RemoveEdge(in_anchor, peer_out_anchor) != GRAPH_SUCCESS) { + GELOGE(FAILED, "[%s] Remove in data edge failed.", node->GetName().c_str()); + return FAILED; + } + auto src_node = peer_out_anchor->GetOwnerNode(); + if (GraphUtils::AddEdge(src_node->GetOutControlAnchor(), new_node->GetInControlAnchor()) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Failed to link control edges from var %s to new const %s", + src_node->GetName().c_str(), new_node->GetName().c_str()); + return FAILED; + } + + if (GraphUtils::MoveInCtrlEdges(node, new_node) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Failed to move in ctrl edges from %s to new const", node->GetName().c_str()); + return FAILED; + } + + if (GraphUtils::MoveOutCtrlEdges(node, new_node) != GRAPH_SUCCESS) { + GELOGE(FAILED, "Failed to move out ctrl edges from %s to new const", node->GetName().c_str()); + return FAILED; + } + + return SUCCESS; +} + +Status VarIsInitializedOpPass::ChangeNodeToConstant(NodePtr &node, bool inited) { + GE_CHECK_NOTNULL(node); + ComputeGraphPtr graph = node->GetOwnerComputeGraph(); + OpDescPtr constant_op_desc = nullptr; + if (CreateConstant(node, constant_op_desc, inited) != SUCCESS) { + return FAILED; + } + + NodePtr const_node = graph->AddNodeFront(constant_op_desc); + if (const_node == nullptr) { + return FAILED; + } + + if (ProcessInAnchor(node, const_node) != SUCCESS) { + return FAILED; + } + + if (NodeUtils::MoveOutputEdges(node, const_node) != GRAPH_SUCCESS) { + GELOGE(FAILED, "[%s] Move output edges to new node failed.", node->GetName().c_str()); + return FAILED; + } + + if (GraphUtils::RemoveNodeWithoutRelink(graph, node) != SUCCESS) { + GELOGE(FAILED, "[%s] RemoveNodeWithoutRelink failed.", node->GetName().c_str()); + return FAILED; + } + + AddRePassNodesWithInOut(const_node); + // delete VarIsInitializedOp node from the graph + AddNodeDeleted(node.get()); + return SUCCESS; +} + +Status VarIsInitializedOpPass::UpdateInitedVars(const NodePtr &node) { + GE_CHECK_NOTNULL(node); + std::set *inited_vars = nullptr; + bool inited_vars_merged = false; + + bool init_var = false; + int64_t inited_var_id; + auto ret = CheckAndSetVarInited(node, init_var, inited_var_id); + if (ret != SUCCESS) { + return ret; + } + + if (init_var) { + inited_vars = CreateInitedVars(); + if (inited_vars == nullptr) { + return OUT_OF_MEMORY; + } + inited_vars_merged = true; + inited_vars->insert(inited_var_id); + } + + for (auto &in_node : node->GetInNodes()) { + GE_CHECK_NOTNULL(in_node->GetOpDesc()); + auto iter = nodes_to_inited_vars_.find(in_node->GetOpDesc()->GetId()); + if (iter == nodes_to_inited_vars_.end()) { + continue; + } + if (inited_vars == nullptr) { + inited_vars = iter->second; + continue; + } + if (inited_vars == iter->second) { + continue; + } + + // if there are multiple different inited_vars set, we should merge them to a new one + if (inited_vars_merged) { + inited_vars->insert(iter->second->begin(), iter->second->end()); + } else { + auto origin_inited_vars = inited_vars; + inited_vars = CreateInitedVars(); + if (inited_vars == nullptr) { + return OUT_OF_MEMORY; + } + inited_vars_merged = true; + inited_vars->insert(origin_inited_vars->begin(), origin_inited_vars->end()); + inited_vars->insert(iter->second->begin(), iter->second->end()); + } + } + + if (inited_vars != nullptr) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + nodes_to_inited_vars_[node->GetOpDesc()->GetId()] = inited_vars; + GELOGD("Inited vars on this graph when node %s, inited vars count %zu", + node->GetName().c_str(), inited_vars->size()); + } + + return SUCCESS; +} + +std::set *VarIsInitializedOpPass::CreateInitedVars() { + std::unique_ptr> inited_vars_keeper(new(std::nothrow) std::set()); + if (inited_vars_keeper == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to alloc set memory"); + return nullptr; + } + auto inited_vars = inited_vars_keeper.get(); + var_inited_keeper_.emplace_back(std::move(inited_vars_keeper)); + return inited_vars; +} + +bool VarIsInitializedOpPass::IsVarInitedOnTheGraphAndNode(const NodePtr &node, int64_t var_id) const { + if (node == nullptr || node->GetOpDesc() == nullptr) { + return false; + } + auto iter = nodes_to_inited_vars_.find(node->GetOpDesc()->GetId()); + if (iter == nodes_to_inited_vars_.end()) { + return false; + } + return iter->second->count(var_id) > 0; +} + +Status VarIsInitializedOpPass::CheckAndSetVarInited(const NodePtr &node, bool &inited, int64_t &inited_var) { + GE_CHECK_NOTNULL(node); + inited = false; + if (node->GetType() != ASSIGN) { + return SUCCESS; + } + auto ref_in_anchor = node->GetInDataAnchor(kAssignVarRefIndex); + if (ref_in_anchor == nullptr) { + GELOGW("Invalid assign node on graph, no ref input. name %s", node->GetName().c_str()); + return PARAM_INVALID; + } + auto var_out_anchor = ref_in_anchor->GetPeerOutAnchor(); + if (var_out_anchor == nullptr) { + GELOGW("Invalid assign node on graph, no variable peer. name %s", node->GetName().c_str()); + return PARAM_INVALID; + } + auto var = var_out_anchor->GetOwnerNode(); + if (var == nullptr) { + GELOGW("Invalid assign node on graph, no variable peer. name %s", node->GetName().c_str()); + return PARAM_INVALID; + } + inited = true; + GE_CHECK_NOTNULL(var->GetOpDesc()); + inited_var = var->GetOpDesc()->GetId(); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/var_is_initialized_op_pass.h b/src/ge/graph/passes/var_is_initialized_op_pass.h new file mode 100644 index 00000000..83fb421f --- /dev/null +++ b/src/ge/graph/passes/var_is_initialized_op_pass.h @@ -0,0 +1,46 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_VAR_IS_INITIALIZED_OP_PASS_H_ +#define GE_GRAPH_PASSES_VAR_IS_INITIALIZED_OP_PASS_H_ + +#include +#include +#include +#include + +#include "graph/passes/base_pass.h" + +namespace ge { +class VarIsInitializedOpPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override; + + private: + Status CheckSrcNode(const NodePtr &node, bool &inited) const; + Status CreateConstant(NodePtr &node, OpDescPtr &op_desc, bool inited); + Status ProcessInAnchor(NodePtr &node, NodePtr &new_node); + Status ChangeNodeToConstant(NodePtr &node, bool inited); + Status UpdateInitedVars(const NodePtr &node); + Status CheckAndSetVarInited(const NodePtr &node, bool &inited, int64_t &inited_var); + std::set *CreateInitedVars(); + bool IsVarInitedOnTheGraphAndNode(const NodePtr &node, int64_t var_id) const; + + std::vector>> var_inited_keeper_; + std::map *> nodes_to_inited_vars_; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_VAR_IS_INITIALIZED_OP_PASS_H_ diff --git a/src/ge/graph/passes/variable_format_pass.cc b/src/ge/graph/passes/variable_format_pass.cc new file mode 100644 index 00000000..ebd5f8b6 --- /dev/null +++ b/src/ge/graph/passes/variable_format_pass.cc @@ -0,0 +1,122 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/variable_format_pass.h" + +#include +#include +#include + +#include "framework/common/debug/ge_log.h" + +namespace ge { +Status VariableFormatPass::Run(ge::ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + + for (auto &node : graph->GetDirectNode()) { + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr, continue); + GE_IF_BOOL_EXEC(node->GetOpDesc()->GetType() != VARIABLE, continue); + + ge::NodePtr use_node = nullptr; + if (GetApplyMomentumOpByVariableInput(node, use_node)) { + GE_CHK_STATUS_RET(UpdateVariableOutFormat(node, use_node), "update variable out format failed"); + GE_CHK_STATUS_RET(UpdateApplyMomentumInputFormat(use_node), "update apply momentum input format failed"); + } + } + + return domi::SUCCESS; +} + +bool VariableFormatPass::GetApplyMomentumOpByVariableInput(const ge::NodePtr &var_node, ge::NodePtr &use_node) { + GE_IF_BOOL_EXEC(var_node == nullptr, return false); + + std::map> confirm_ops = {{"ApplyMomentum", {1}}}; + for (auto &out_anchor : var_node->GetAllOutDataAnchors()) { + for (auto &in_anchor : out_anchor->GetPeerInDataAnchors()) { + GE_IF_BOOL_EXEC(ConfirmUseOpAndIndexByAnchor(in_anchor, confirm_ops, use_node), return true); + } + } + + return false; +} + +bool VariableFormatPass::ConfirmUseOpAndIndexByAnchor(const ge::InDataAnchorPtr &in_anchor, + const map> &confirm_ops, + ge::NodePtr &use_node) { + GE_IF_BOOL_EXEC(in_anchor == nullptr, return false); + ge::NodePtr dst_node = in_anchor->GetOwnerNode(); + ge::OpDescPtr dst_op_desc = dst_node->GetOpDesc(); + GE_IF_BOOL_EXEC(dst_op_desc == nullptr, return false); + const string &dst_type = dst_op_desc->GetType(); + int input_index = in_anchor->GetIdx(); + + GELOGD("ConfirmUseOpAndIndex, var name %s, dst_type = %s, input index %d", dst_node->GetName().c_str(), + dst_type.c_str(), input_index); + + GE_IF_BOOL_EXEC( + confirm_ops.count(dst_type) > 0, + GE_IF_BOOL_EXEC(confirm_ops.at(dst_type).count(input_index) > 0, use_node = dst_node; return true);); + return false; +} + +Status VariableFormatPass::UpdateVariableOutFormat(const ge::NodePtr &var_node, ge::NodePtr &use_node) { + GE_CHECK_NOTNULL(var_node); + GE_CHECK_NOTNULL(use_node); + ge::OpDescPtr op_desc_ptr = use_node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc_ptr); + GE_CHECK_NOTNULL(use_node->GetInDataAnchor(0)); + GE_CHECK_NOTNULL(use_node->GetInDataAnchor(0)->GetPeerOutAnchor()); + NodePtr in_node = use_node->GetInDataAnchor(0)->GetPeerOutAnchor()->GetOwnerNode(); + if (in_node != nullptr) { + string in_op_type = in_node->GetType(); + if ((in_op_type == VARIABLE) && (in_node->GetOpDesc() != nullptr) && + (in_node->GetOpDesc()->MutableOutputDesc(0) != nullptr)) { + ge::Format format = in_node->GetOpDesc()->MutableOutputDesc(0)->GetFormat(); + ge::OpDescPtr cur_op_desc_ptr = var_node->GetOpDesc(); + if (cur_op_desc_ptr != nullptr) { + cur_op_desc_ptr->MutableOutputDesc(0)->SetFormat(format); + cur_op_desc_ptr->MutableOutputDesc(0)->SetOriginFormat(format); + } + } + } + return domi::SUCCESS; +} + +Status VariableFormatPass::UpdateApplyMomentumInputFormat(const ge::NodePtr &node) { + GE_CHECK_NOTNULL(node); + ge::OpDescPtr op_desc_ptr = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc_ptr); + GE_CHECK_NOTNULL(node->GetInDataAnchor(0)); + GE_CHECK_NOTNULL(node->GetInDataAnchor(0)->GetPeerOutAnchor()); + GE_CHECK_NOTNULL(op_desc_ptr->MutableInputDesc(0)); + GE_CHECK_NOTNULL(op_desc_ptr->MutableInputDesc(1)); + GE_CHECK_NOTNULL(op_desc_ptr->MutableOutputDesc(0)); + NodePtr in_node = node->GetInDataAnchor(0)->GetPeerOutAnchor()->GetOwnerNode(); + if (in_node != nullptr) { + string inOpType = in_node->GetType(); + if ((inOpType == VARIABLE) && (in_node->GetOpDesc() != nullptr)) { + ge::Format format = in_node->GetOpDesc()->MutableOutputDesc(0)->GetFormat(); + op_desc_ptr->MutableInputDesc(0)->SetFormat(format); + op_desc_ptr->MutableInputDesc(0)->SetOriginFormat(format); + op_desc_ptr->MutableInputDesc(1)->SetFormat(format); + op_desc_ptr->MutableInputDesc(1)->SetOriginFormat(format); + op_desc_ptr->MutableOutputDesc(0)->SetFormat(format); + op_desc_ptr->MutableOutputDesc(0)->SetOriginFormat(format); + } + } + return domi::SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/variable_format_pass.h b/src/ge/graph/passes/variable_format_pass.h new file mode 100644 index 00000000..009ae14f --- /dev/null +++ b/src/ge/graph/passes/variable_format_pass.h @@ -0,0 +1,45 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_VARIABLE_FORMAT_PASS_H_ +#define GE_GRAPH_PASSES_VARIABLE_FORMAT_PASS_H_ + +#include +#include +#include + +#include "graph/types.h" +#include "graph/utils/op_desc_utils.h" +#include "inc/graph_pass.h" + +namespace ge { +class VariableFormatPass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph) override; + + private: + bool GetApplyMomentumOpByVariableInput(const ge::NodePtr &var_node, ge::NodePtr &use_node); + + bool ConfirmUseOpAndIndexByAnchor(const ge::InDataAnchorPtr &in_anchor, + const map > &confirm_ops, ge::NodePtr &use_node); + + Status UpdateApplyMomentumInputFormat(const ge::NodePtr &node); + + Status UpdateVariableOutFormat(const ge::NodePtr &var_node, ge::NodePtr &use_node); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_VARIABLE_FORMAT_PASS_H_ diff --git a/src/ge/graph/passes/variable_op_pass.cc b/src/ge/graph/passes/variable_op_pass.cc new file mode 100644 index 00000000..04a0ae72 --- /dev/null +++ b/src/ge/graph/passes/variable_op_pass.cc @@ -0,0 +1,595 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/variable_op_pass.h" + +#include +#include + +#include "common/formats/formats.h" +#include "common/formats/utils/formats_trans_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/ge_context.h" +#include "graph/graph.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" + +namespace ge { +namespace { +const int kTransOpOutIndex = 0; + +std::string GetKey(Format format, DataType type, const std::vector &dims) { + std::stringstream key; + key << static_cast(format) << '-'; + key << static_cast(type) << '-'; + for (auto dim : dims) { + key << dim << '-'; + } + return key.str(); +} + +Status ByPassTransNode(NodePtr &trans_node, NodePtr &ref_node) { + GE_CHECK_NOTNULL(trans_node); + GE_CHECK_NOTNULL(ref_node); + GELOGD("Begin to bypass trans node %s", trans_node->GetName().c_str()); + auto ret = GraphUtils::CopyInCtrlEdges(trans_node, ref_node); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, + "Failed to move control edges from trans " + "node %s to var-ref %s", + trans_node->GetName().c_str(), ref_node->GetName().c_str()); + return INTERNAL_ERROR; + } + auto ref_in_anchor = ref_node->GetInDataAnchor(0); + if (ref_in_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, + "The variable ref node %s does not have an " + "input anchor", + ref_node->GetName().c_str()); + return INTERNAL_ERROR; + } + ref_in_anchor->UnlinkAll(); + auto trans_in_anchor = trans_node->GetInDataAnchor(0); + if (trans_in_anchor == nullptr) { + GELOGE(INTERNAL_ERROR, + "Failed to get the in data anchor from trans" + " node %s type %s", + trans_node->GetName().c_str(), trans_node->GetType().c_str()); + return INTERNAL_ERROR; + } + auto prev_trans_node_out_anchor = trans_in_anchor->GetPeerOutAnchor(); + if (prev_trans_node_out_anchor == nullptr) { + GELOGW( + "The trans node %s does not have an input, so the ref node %s does" + " not have any inputs after bypass", + trans_node->GetName().c_str(), trans_node->GetName().c_str()); + } else { + ret = GraphUtils::AddEdge(prev_trans_node_out_anchor, ref_in_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, + "Failed to add edge between ref node %s " + "and the prev node of trans node %s", + ref_node->GetName().c_str(), trans_node->GetName().c_str()); + return INTERNAL_ERROR; + } + } + return SUCCESS; +} + +bool IsTransSupport(const TransNodeInfo &trans_info) { + if (trans_info.node_type == RESHAPE || trans_info.node_type == REFORMAT) { + return true; + } else if (trans_info.node_type == TRANSDATA) { + formats::TransArgs args{nullptr, + trans_info.input.GetFormat(), + trans_info.output.GetFormat(), + trans_info.input.GetShape().GetDims(), + trans_info.output.GetShape().GetDims(), + trans_info.input.GetDataType()}; + return formats::IsTransFormatSupport(args); + } else if (trans_info.node_type == CAST) { + formats::CastArgs datatype_args{nullptr, static_cast(trans_info.input.GetShape().GetShapeSize()), + trans_info.input.GetDataType(), trans_info.output.GetDataType()}; + return formats::IsTransDataTypeSupport(datatype_args); + } else { + return false; + } +} +} // namespace + +Status VariableOpPass::Run(ge::ComputeGraphPtr graph) { + if (graph == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to run variable op pass, null graph"); + return INTERNAL_ERROR; + } + + GELOGD("Begin to run variable op pass on graph %s, session %lu, graph id %u", graph->GetName().c_str(), + GetContext().SessionId(), graph->GetGraphID()); + + if (var_accelerate_ctrl_ == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to run var op pass, the variable accelerate control is null"); + return INTERNAL_ERROR; + } + + GELOGD("Begin to generate ref map for variable and refs, graph name:%s.", graph->GetName().c_str()); + if (RenewVarDesc(graph) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to renew var desc on graph"); + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + + if (GenerateVariableVariableRefMap(graph) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to generate variable map for graph %s", graph->GetName().c_str()); + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + + GELOGD("Begin to fusion variables and trans nodes"); + for (auto &var_to_refs : var_and_var_ref_map_) { + auto &node = var_to_refs.first; + GE_CHECK_NOTNULL(node); + GE_CHECK_NOTNULL(var_accelerate_ctrl_); + if (!var_accelerate_ctrl_->IsVarPermitToChangeFormats(node->GetName())) { + GELOGD("The var %s does not permit to change formats, skip it", node->GetName().c_str()); + continue; + } + + VarTransRoad fusion_road; + auto ret = FusionIfNeed(node, fusion_road); + if (ret != SUCCESS) { + return ret; + } + + if (fusion_road.empty()) { + GELOGD("No need to fusion variable and trans op for var %s", node->GetName().c_str()); + continue; + } + + auto start_iter = fusion_road.begin(); + auto end_iter = fusion_road.rbegin(); + GELOGI( + "Trans variable data for %s from format %s to %s, shape %s to %s " + "data-type %s to %s, path len %zu success", + node->GetName().c_str(), TypeUtils::FormatToSerialString(start_iter->input.GetFormat()).c_str(), + TypeUtils::FormatToSerialString(end_iter->output.GetFormat()).c_str(), + formats::ShapeToString(start_iter->input.GetShape().GetDims()).c_str(), + formats::ShapeToString(end_iter->output.GetShape().GetDims()).c_str(), + TypeUtils::DataTypeToSerialString(start_iter->input.GetDataType()).c_str(), + TypeUtils::DataTypeToSerialString(end_iter->output.GetDataType()).c_str(), fusion_road.size()); + + ret = VarManager::Instance(graph->GetSessionID())->SetTransRoad(node->GetName(), fusion_road); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to update the format fusion road for var %s", node->GetName().c_str()); + return INTERNAL_ERROR; + } + ret = VarManager::Instance(graph->GetSessionID())->SetChangedGraphId(node->GetName(), graph->GetGraphID()); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to update the graph id for var %s", node->GetName().c_str()); + return INTERNAL_ERROR; + } + var_accelerate_ctrl_->SetVarChanged(node->GetName()); + + GELOGD("Begin to update format info for var %s.", node->GetName().c_str()); + std::set node_set({node}); + if (UpdateIOFormatInfo(end_iter->output, node_set) != SUCCESS) { + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + } + + return SUCCESS; +} + +Status VariableOpPass::DealFusion(const ge::NodePtr &var_node) { + GE_CHECK_NOTNULL(var_node); + GELOGD("Begin to fusion var %s with trans", var_node->GetName().c_str()); + auto graph = var_node->GetOwnerComputeGraph(); + for (auto &trans_node : var_node->GetOutDataNodes()) { + GELOGI("Remove node %s type %s when fusion with variable %s", trans_node->GetName().c_str(), + trans_node->GetType().c_str(), var_node->GetName().c_str()); + + if (GraphUtils::IsolateNode(trans_node, {0}) != SUCCESS) { + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + + if (GraphUtils::RemoveNodeWithoutRelink(graph, trans_node) != SUCCESS) { + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + } + + auto iterator = var_and_var_ref_map_.find(var_node); + if (iterator == var_and_var_ref_map_.end()) { + GELOGD("there is no var_ref of node %s", var_node->GetName().c_str()); + return SUCCESS; + } + + for (auto ref_node : iterator->second) { + GE_CHECK_NOTNULL(ref_node); + for (auto &trans_node : ref_node->GetInDataNodes()) { + GELOGI("Remove node %s type %s when fusion with variable %s", trans_node->GetName().c_str(), + trans_node->GetType().c_str(), var_node->GetName().c_str()); + if (trans_node->GetOutDataNodes().size() > 1) { + GELOGD( + "The trans node %s type %s connecting with var-ref %s has more" + " than one output data nodes, unlink the edge between them", + trans_node->GetName().c_str(), trans_node->GetType().c_str(), ref_node->GetName().c_str()); + if (ByPassTransNode(trans_node, ref_node) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to bypass trans node %s to ref %s", trans_node->GetName().c_str(), + ref_node->GetName().c_str()); + return INTERNAL_ERROR; + } + } else { + GELOGD( + "The trans node %s type %s connecting with var-ref %s has only" + " one output data nodes, isolate and remove it.", + trans_node->GetName().c_str(), trans_node->GetType().c_str(), ref_node->GetName().c_str()); + if (GraphUtils::IsolateNode(trans_node, {0}) != SUCCESS) { + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + if (GraphUtils::RemoveNodeWithoutRelink(graph, trans_node) != SUCCESS) { + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + } + } + } + + return SUCCESS; +} + +Status VariableOpPass::CheckSameAndTransOp(const ge::NodePtr &var_node, bool &is_matched, VarTransRoad &fusion_road) { + std::set data_type_and_formats; + std::string trans_op_type; + ge::NodePtr out_node; + ge::GeTensorDesc output_desc; + GE_CHECK_NOTNULL(var_node); + for (auto &out_node_and_anchor : var_node->GetOutDataNodesAndAnchors()) { + auto in_anchor = out_node_and_anchor.second; + GE_CHECK_NOTNULL(in_anchor); + out_node = out_node_and_anchor.first; + GE_CHECK_NOTNULL(out_node); + auto trans_op_desc = out_node->GetOpDesc(); + GE_CHECK_NOTNULL(trans_op_desc); + trans_op_type = trans_op_desc->GetType(); + + GELOGD("current node type is %s.", trans_op_type.c_str()); + int data_index = TransOpUtil::GetTransOpDataIndex(trans_op_type); + if (data_index < 0) { + GELOGD("Variables only can be fusion with trans_op, the next op is %s type %s", out_node->GetName().c_str(), + out_node->GetType().c_str()); + return SUCCESS; + } + if (data_index != in_anchor->GetIdx()) { + GELOGD( + "Variables only can be fusion with trans nodes, the next node %s" + " type %s index %d does not trans anything(correct index %d)", + out_node->GetName().c_str(), out_node->GetType().c_str(), in_anchor->GetIdx(), data_index); + return SUCCESS; + } + + output_desc = trans_op_desc->GetOutputDesc(kTransOpOutIndex); + + auto trans_op_format = output_desc.GetFormat(); + auto trans_op_data_type = output_desc.GetDataType(); + auto shape = output_desc.GetShape().GetDims(); + auto datatype_and_format = GetKey(trans_op_format, trans_op_data_type, shape); + data_type_and_formats.insert(datatype_and_format); + } + + if (data_type_and_formats.empty()) { + return SUCCESS; + } + + if (data_type_and_formats.size() > 1) { + std::stringstream type_and_formats_stream; + bool first_time = true; + for (const auto &data_type_and_format : data_type_and_formats) { + if (first_time) { + first_time = false; + } else { + type_and_formats_stream << "|"; + } + type_and_formats_stream << data_type_and_format; + } + + GELOGW( + "trans_op type size for var Node(%s) is over 1, Currently not" + " supported, dataTypeAndFormats is %s.", + var_node->GetName().c_str(), type_and_formats_stream.str().c_str()); + return SUCCESS; + } + + int tran_in_index = TransOpUtil::GetTransOpDataIndex(out_node->GetType()); + auto out_op_desc = out_node->GetOpDesc(); + GE_CHECK_NOTNULL(out_op_desc); + TransNodeInfo trans_node_info; + trans_node_info.node_type = out_node->GetType(); + trans_node_info.input = out_op_desc->GetInputDesc(tran_in_index); + trans_node_info.output = out_op_desc->GetOutputDesc(kTransOpOutIndex); + + if (!IsTransSupport(trans_node_info)) { + GELOGD("The trans node %s does not support, skip the variable accelerating", trans_node_info.node_type.c_str()); + return SUCCESS; + } + + is_matched = true; + fusion_road.emplace_back(trans_node_info); + + return SUCCESS; +} + +Status VariableOpPass::CheckVariableRefLegally(const ge::NodePtr &var_node, bool &is_var_ref_legally) { + is_var_ref_legally = true; + GE_CHECK_NOTNULL(var_node); + auto iterator = var_and_var_ref_map_.find(var_node); + if (iterator == var_and_var_ref_map_.end()) { + GELOGD("var name %s are not in var var_ref map", var_node->GetName().c_str()); + return SUCCESS; + } + + GELOGD("var name %s, ref var count %zu.", var_node->GetName().c_str(), iterator->second.size()); + + for (const auto &var_ref_node : iterator->second) { + if (CheckVarAndVarRefAreAlike(var_node, var_ref_node, is_var_ref_legally) != SUCCESS) { + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + + GELOGD("is_var_ref_legally is %d", is_var_ref_legally); + + if (!is_var_ref_legally) { + return SUCCESS; + } + } + return SUCCESS; +} + +Status VariableOpPass::UpdateVarAndRefOutputFormatInfo(const GeTensorDesc &final_output, const ge::NodePtr &node) { + if (node == nullptr || node->GetOpDesc() == nullptr) { + GELOGE(FAILED, "node or opdesc is nullptr"); + return FAILED; + } + const Format &format = final_output.GetFormat(); + const DataType &data_type = final_output.GetDataType(); + const GeShape &shape = final_output.GetShape(); + GELOGD("last ref is (%s, %s, %lu), var_ref_name is %s.", TypeUtils::DataTypeToSerialString(data_type).c_str(), + TypeUtils::FormatToSerialString(format).c_str(), shape.GetDims().size(), node->GetName().c_str()); + + auto node_desc = node->GetOpDesc()->GetOutputDesc(0); + CopyVariableFormatDataTypeAndShape(final_output, node_desc); + if (node->GetOpDesc()->UpdateOutputDesc(0, node_desc) != GRAPH_SUCCESS) { + GELOGE(FAILED, "update output desc fail."); + return FAILED; + } + GELOGD("node ref is (%s, %s, %lu), var_ref_name is %s.", + TypeUtils::DataTypeToSerialString(node->GetOpDesc()->GetOutputDesc(0).GetDataType()).c_str(), + TypeUtils::FormatToSerialString(node->GetOpDesc()->GetOutputDesc(0).GetFormat()).c_str(), + node->GetOpDesc()->GetOutputDesc(0).GetShape().GetDims().size(), node->GetName().c_str()); + + auto iterator = var_and_var_ref_map_.find(node); + if (iterator == var_and_var_ref_map_.end()) { + auto graph = node->GetOwnerComputeGraph(); + if (GenerateVariableVariableRefMap(graph) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to generate variable map for graph %s", graph->GetName().c_str()); + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + } + iterator = var_and_var_ref_map_.find(node); + if (iterator == var_and_var_ref_map_.end()) { + GELOGW("The var node %s which belongs to graph %s can not be found on the graph", node->GetName().c_str(), + node->GetOwnerComputeGraph()->GetName().c_str()); + return SUCCESS; + } + + for (const auto &var_ref_node : iterator->second) { + auto var_ref_node_description = var_ref_node->GetOpDesc(); + GE_CHECK_NOTNULL(var_ref_node_description); + + GELOGD("var_ref_node before is (%s, %s, %zu), var_ref_name is %s.", + TypeUtils::DataTypeToSerialString(data_type).c_str(), TypeUtils::FormatToSerialString(format).c_str(), + shape.GetDims().size(), var_ref_node->GetName().c_str()); + if (var_ref_node_description->UpdateOutputDesc(0, node_desc) != GRAPH_SUCCESS) { + GELOGW("UpdateOutputDesc fail."); + } + if (var_ref_node_description->UpdateInputDesc(0, node_desc) != GRAPH_SUCCESS) { + GELOGW("UpdateInputDesc fail."); + } + GELOGD("var_ref_node ref is (%s, %s, %zu), var_ref_name is %s.", + TypeUtils::DataTypeToSerialString(var_ref_node_description->GetInputDesc(0).GetDataType()).c_str(), + TypeUtils::FormatToSerialString(var_ref_node_description->GetInputDesc(0).GetFormat()).c_str(), + var_ref_node_description->GetOutputDesc(0).GetShape().GetDims().size(), var_ref_node->GetName().c_str()); + } + + return SUCCESS; +} + +Status VariableOpPass::GenerateVariableVariableRefMap(const ComputeGraphPtr &compute_graph) { + std::map names_to_var; + std::map> names_to_refs; + GE_CHECK_NOTNULL(compute_graph); + for (auto &node : compute_graph->GetAllNodes()) { + if (node->GetType() != VARIABLE) { + continue; + } + std::string ref_var_name; + if (!ge::AttrUtils::GetStr(node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, ref_var_name)) { + names_to_var[node->GetName()] = node; + } else { + names_to_refs[ref_var_name].insert(node); + } + } + + for (auto &name_to_var : names_to_var) { + var_and_var_ref_map_[name_to_var.second] = names_to_refs[name_to_var.first]; + } + return SUCCESS; +} + +Status VariableOpPass::CheckVarAndVarRefAreAlike(const NodePtr &var_node, const NodePtr &var_ref_node, + bool &is_var_and_variable_ref_are_alike) { + GE_CHECK_NOTNULL(var_node); + GE_CHECK_NOTNULL(var_ref_node); + GELOGD("var_node GetOutDataNodes. name is %s.", var_node->GetName().c_str()); + const auto &var_node_trans_nodes = var_node->GetOutDataNodes(); + GELOGD("var_node_trans_nodes size is %zu.", var_node_trans_nodes.size()); + GELOGD("var_ref_node GetOutDataNodes. name is %s.", var_ref_node->GetName().c_str()); + const auto &var_ref_node_trans_nodes = var_ref_node->GetInDataNodes(); + GELOGD("var_ref_node_trans_nodes size is %zu.", var_ref_node_trans_nodes.size()); + + if (var_ref_node_trans_nodes.size() > 1) { + GELOGE(GE_GRAPH_VARIABLE_OP_PASS_FAILED, "var_ref_node_trans_nodes.size() > 1."); + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + + const auto &var_node_trans_node = var_node_trans_nodes.at(0); + const auto &var_ref_node_trans_node = var_ref_node_trans_nodes.at(0); + + if (CheckTransNodeAreInverse(var_node_trans_node, var_ref_node_trans_node, is_var_and_variable_ref_are_alike) != + SUCCESS) { + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + + return SUCCESS; +} + +Status VariableOpPass::CheckTransNodeAreInverse(const NodePtr &node_a, const NodePtr &node_b, bool &is_same) { + GELOGD("In CheckTransNodeAreInverse."); + GE_CHECK_NOTNULL(node_a); + GE_CHECK_NOTNULL(node_b); + const auto &node_a_op_desc = node_a->GetOpDesc(); + const auto &node_b_op_desc = node_b->GetOpDesc(); + GE_CHECK_NOTNULL(node_a_op_desc); + GE_CHECK_NOTNULL(node_b_op_desc); + const auto &node_a_out_op_desc = node_a_op_desc->GetOutputDesc(0); + const auto &node_a_in_op_desc = node_a_op_desc->GetInputDesc(0); + + const auto &node_b_out_op_desc = node_b_op_desc->GetOutputDesc(0); + const auto &node_b_in_op_desc = node_b_op_desc->GetInputDesc(0); + + is_same = IsOpDescSame(node_a_out_op_desc, node_b_in_op_desc) && IsOpDescSame(node_b_out_op_desc, node_a_in_op_desc); + + return SUCCESS; +} + +bool VariableOpPass::IsOpDescSame(const GeTensorDesc &op_desc_a, const GeTensorDesc &op_desc_b) { + const auto format_a = op_desc_a.GetFormat(); + const auto type_a = op_desc_a.GetDataType(); + const auto shape_a = op_desc_a.GetShape(); + + const auto format_b = op_desc_b.GetFormat(); + const auto type_b = op_desc_b.GetDataType(); + const auto shape_b = op_desc_b.GetShape(); + + const auto &dims_a = shape_a.GetDims(); + const auto &dims_b = shape_b.GetDims(); + GELOGD("(format, data type, shape) = (%s, %s, %zu) (%s, %s, %zu)", TypeUtils::FormatToSerialString(format_a).c_str(), + TypeUtils::DataTypeToSerialString(type_a).c_str(), dims_a.size(), + TypeUtils::FormatToSerialString(format_b).c_str(), TypeUtils::DataTypeToSerialString(type_b).c_str(), + dims_b.size()); + return (format_a == format_b) && (type_a == type_b) && (dims_a == dims_b); +} + +void VariableOpPass::CopyVariableFormatDataTypeAndShape(const GeTensorDesc &src_tensor_desc, + GeTensorDesc &dst_tensor_desc) { + dst_tensor_desc.SetShape(src_tensor_desc.GetShape()); + dst_tensor_desc.SetFormat(src_tensor_desc.GetFormat()); + dst_tensor_desc.SetDataType(src_tensor_desc.GetDataType()); +} + +Status VariableOpPass::CheckIfCouldBeOptimized(const ge::NodePtr &node, bool &flag, VarTransRoad &fusion_road) { + if (node == nullptr) { + return FAILED; + } + bool is_matched = false; + auto ret = CheckSameAndTransOp(node, is_matched, fusion_road); + if (ret != SUCCESS) { + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + if (!is_matched) { + flag = false; + return SUCCESS; + } + + bool is_var_ref_legally = false; + ret = CheckVariableRefLegally(node, is_var_ref_legally); + if (ret != SUCCESS) { + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + GELOGD("is_var_ref_legally is %d.", is_var_ref_legally); + if (!is_var_ref_legally) { + GELOGI("variable ref connection are illegally"); + flag = false; + fusion_road.clear(); + return SUCCESS; + } + + flag = true; + GELOGD("node %s, is_matched = %d is_var_ref_legally = %d, flag = %d", node->GetName().c_str(), is_matched, + is_var_ref_legally, flag); + + return SUCCESS; +} + +Status VariableOpPass::FusionIfNeed(const NodePtr &var, VarTransRoad &fusion_road) { + bool can_fusion = false; + while (true) { + auto ret = CheckIfCouldBeOptimized(var, can_fusion, fusion_road); + if (ret != SUCCESS) { + return ret; + } + if (!can_fusion) { + break; + } + + ret = DealFusion(var); + if (ret != SUCCESS) { + return ret; + } + } + return SUCCESS; +} + +Status VariableOpPass::UpdateIOFormatInfo(const GeTensorDesc &final_output, std::set &nodes) { + for (auto &need_set_node : nodes) { + auto ret = UpdateVarAndRefOutputFormatInfo(final_output, need_set_node); + if (ret != SUCCESS) { + return GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + } + return SUCCESS; +} + +Status VariableOpPass::RenewVarDesc(ge::ComputeGraphPtr &graph) { + GE_CHECK_NOTNULL(graph); + // renew var manager desc + Status ret = SUCCESS; + for (auto &node : graph->GetDirectNode()) { + bool is_var_node = (node->GetType() == VARIABLE) || (node->GetType() == VARIABLEV2) || + (node->GetType() == VARHANDLEOP); + if (is_var_node) { + if (!ge::VarManager::Instance(graph->GetSessionID())->IsVarExist(node->GetName())) { + GELOGI("var manager does not exist var node[%s]", node->GetName().c_str()); + continue; + } + GELOGD("var manager exist var node[%s], graph name[%s]", node->GetName().c_str(), graph->GetName().c_str()); + GE_CHECK_NOTNULL(node->GetOpDesc()); + ret = ge::VarManager::Instance(graph->GetSessionID())->RenewCurVarDesc(node->GetName(), node->GetOpDesc()); + if (ret != SUCCESS) { + GELOGE(FAILED, "var manager renew var[%s] descriptor failed!", node->GetName().c_str()); + return FAILED; + } + } + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/variable_op_pass.h b/src/ge/graph/passes/variable_op_pass.h new file mode 100644 index 00000000..f97f8d8a --- /dev/null +++ b/src/ge/graph/passes/variable_op_pass.h @@ -0,0 +1,77 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_VARIABLE_OP_PASS_H_ +#define GE_GRAPH_PASSES_VARIABLE_OP_PASS_H_ + +#include +#include + +#include "graph/common/transop_util.h" +#include "graph/graph.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/manager/util/variable_accelerate_ctrl.h" +#include "inc/graph_pass.h" + +namespace ge { +namespace variable_op { +struct NodeDesc { + ge::GeTensorDesc input; + ge::GeTensorDesc output; + bool is_update = false; +}; +} // namespace variable_op +class VariableOpPass : public GraphPass { + public: + explicit VariableOpPass(VarAccelerateCtrl *ctrl) : var_accelerate_ctrl_(ctrl) {} + + ~VariableOpPass() override = default; + + Status Run(ge::ComputeGraphPtr graph) override; + + private: + Status DealFusion(const ge::NodePtr &src_node); + + Status CheckVariableRefLegally(const ge::NodePtr &var_node, bool &is_var_legally); + + Status UpdateVarAndRefOutputFormatInfo(const GeTensorDesc &final_output, const ge::NodePtr &node); + + Status GenerateVariableVariableRefMap(const ComputeGraphPtr &compute_graph); + + Status CheckVarAndVarRefAreAlike(const NodePtr &var_node, const NodePtr &var_ref_node, + bool &is_var_and_var_ref_alike); + + bool IsOpDescSame(const GeTensorDesc &op_desc_a, const GeTensorDesc &op_desc_b); + + Status CheckTransNodeAreInverse(const NodePtr &node_a, const NodePtr &node_b, bool &is_trans_node_inverse); + + void CopyVariableFormatDataTypeAndShape(const GeTensorDesc &src_tensor_desc, GeTensorDesc &dst_tensor_desc); + + Status CheckSameAndTransOp(const ge::NodePtr &var_nodem, bool &is_matched, VarTransRoad &fusion_road); + + Status CheckIfCouldBeOptimized(const ge::NodePtr &node, bool &flag, VarTransRoad &fusion_road); + + Status FusionIfNeed(const NodePtr &var, VarTransRoad &fusion_road); + + Status UpdateIOFormatInfo(const GeTensorDesc &final_output, std::set &nodes); + Status RenewVarDesc(ge::ComputeGraphPtr &graph); + + std::map> var_and_var_ref_map_; + + VarAccelerateCtrl *var_accelerate_ctrl_; +}; +} // namespace ge +#endif // GE_GRAPH_PASSES_VARIABLE_OP_PASS_H_ diff --git a/src/ge/graph/passes/variable_prepare_op_pass.cc b/src/ge/graph/passes/variable_prepare_op_pass.cc new file mode 100644 index 00000000..e9a63fbc --- /dev/null +++ b/src/ge/graph/passes/variable_prepare_op_pass.cc @@ -0,0 +1,305 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/variable_prepare_op_pass.h" + +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge/ge_util.h" +#include "external/graph/graph.h" +#include "graph/node.h" +#include "graph/utils/tensor_utils.h" + + +namespace ge { +Status VariablePrepareOpPass::Run(ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + for (auto &node : graph->GetDirectNode()) { + GELOGD("before VariablePrepareOpPass, graph has node: %s, and node name: %s", node->GetType().c_str(), + node->GetName().c_str()); + } + + for (const auto &node : graph->GetDirectNode()) { + GenerateRefTypeAndInputOutputMap(node); + } + + if (ref_input_output_map_.empty()) { + GELOGI("No need to add variable_ref."); + return SUCCESS; + } + + for (auto &node : graph->GetDirectNode()) { + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr, continue); + GE_IF_BOOL_EXEC(node->GetOpDesc()->GetType() != VARIABLE, continue); + Status ret = DealVariableNode(node); + if (ret != SUCCESS) { + GELOGE(ret, "variable add back edge failed"); + return FAILED; + } + } + + for (auto &node : graph->GetDirectNode()) { + GELOGD("after VariablePrepareOpPass, graph has node: %s, and node name: %s", node->GetType().c_str(), + node->GetName().c_str()); + } + + return SUCCESS; +} + +Status VariablePrepareOpPass::DealVariableNode(NodePtr &var_node) { + GE_CHECK_NOTNULL(var_node); + for (auto &dst_node_and_inanchor : var_node->GetOutDataNodesAndAnchors()) { + NodePtr dst_node = dst_node_and_inanchor.first; + GE_CHECK_NOTNULL(dst_node); + InDataAnchorPtr dst_in_data_anchor = dst_node_and_inanchor.second; + GE_CHECK_NOTNULL(dst_in_data_anchor); + int out_index = GetWritableNodeOutIndex(dst_node, dst_in_data_anchor->GetIdx()); + if (out_index >= 0) { + Status ret = DealWritableNode(dst_node, var_node, out_index); + if (ret != SUCCESS) { + return FAILED; + } + } + } + return SUCCESS; +} + +Status VariablePrepareOpPass::DealWritableNode(ge::NodePtr &writable_node, ge::NodePtr &var_node, int out_index) { + GE_CHECK_NOTNULL(writable_node); + GE_CHECK_NOTNULL(var_node); + NodePtr final_writable_node = writable_node; + bool is_have_peer_node = false; + for (auto &dst_node_and_inanchor : writable_node->GetOutDataNodesAndAnchors()) { + NodePtr dst_node = dst_node_and_inanchor.first; + GE_CHECK_NOTNULL(dst_node); + InDataAnchorPtr dst_in_data_anchor = dst_node_and_inanchor.second; + GE_CHECK_NOTNULL(dst_in_data_anchor); + is_have_peer_node = true; + int current_out_index = GetWritableNodeOutIndex(dst_node, dst_in_data_anchor->GetIdx()); + if (current_out_index >= 0) { + final_writable_node = GetFinalWritableNode(dst_node, current_out_index); + out_index = current_out_index; + } + + GE_CHECK_NOTNULL(final_writable_node); + Status ret = AddVariableRef(final_writable_node, var_node, out_index); + if (ret != SUCCESS) { + GELOGE(FAILED, "add variable ref failed"); + return FAILED; + } + } + if (final_writable_node->GetName() == writable_node->GetName() && !is_have_peer_node) { + Status ret = AddVariableRef(final_writable_node, var_node, out_index); + if (ret != SUCCESS) { + return FAILED; + } + } + return SUCCESS; +} + +NodePtr VariablePrepareOpPass::GetFinalWritableNode(ge::NodePtr &writable_node, int &out_index) { + NodePtr current_node = writable_node; + std::unordered_set seen_node; + while (true) { + if (seen_node.count(current_node.get())) { + GELOGE(FAILED, "There is a ring structure in the graph"); + return nullptr; + } + seen_node.insert(current_node.get()); + OutDataAnchorPtr out_anchor = current_node->GetOutDataAnchor(out_index); + if (out_anchor == nullptr) { + GELOGE(FAILED, "Failed to get data anchor by index %d", out_index); + return nullptr; + } + bool found_writeable_node = false; + auto peer_in_anchors = out_anchor->GetPeerInDataAnchors(); + for (auto &peer_in_anchor : peer_in_anchors) { + if (peer_in_anchor == nullptr) { + GELOGE(FAILED, "peer in data anchor is nullptr, node %s:%s", current_node->GetType().c_str(), + current_node->GetName().c_str()); + continue; + } + + NodePtr peer_node = peer_in_anchor->GetOwnerNode(); + int current_out_index = GetWritableNodeOutIndex(peer_node, peer_in_anchor->GetIdx()); + if (current_out_index >= 0) { + current_node = peer_node; + out_index = current_out_index; + found_writeable_node = true; + break; + } + } + if (!found_writeable_node) { + GELOGI("final writable node is %s", current_node->GetName().c_str()); + return current_node; + } + } +} + +Status VariablePrepareOpPass::AddVariableRef(ge::NodePtr &final_writable_node, ge::NodePtr &var_node, int index) { + GE_CHECK_NOTNULL(final_writable_node); + GE_CHECK_NOTNULL(var_node); + + NodePtr var_ref_node = CreatVariableRef(final_writable_node, var_node); + GE_CHECK_NOTNULL(var_ref_node); + // add control anchor between var_ref_node and final peer node + // var_ref_node need to execute before other nodes + auto final_writable_outAnchors = final_writable_node->GetAllOutAnchors(); + for (auto &final_writable_outAnchor : final_writable_outAnchors) { + GE_CHECK_NOTNULL(final_writable_outAnchor); + for (auto &final_writable_peerAnchor : final_writable_outAnchor->GetPeerAnchors()) { + GE_CHECK_NOTNULL(final_writable_peerAnchor); + NodePtr peer_node = final_writable_peerAnchor->GetOwnerNode(); + graphStatus ret = ge::GraphUtils::AddEdge(var_ref_node->GetOutControlAnchor(), peer_node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "add control anchor between var_ref_node and final_writable peer_node failed"); + return FAILED; + } + } + } + // add edge final node:index ---> var_ref_node:0 + graphStatus ret = + ge::GraphUtils::AddEdge(final_writable_node->GetOutDataAnchor(index), var_ref_node->GetInDataAnchor(0)); + if (ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "add data anchor between var_ref_node and final_writable peer_node failed"); + return FAILED; + } + return SUCCESS; +} + +ge::NodePtr VariablePrepareOpPass::CreatVariableRef(ge::NodePtr &final_writable_node, ge::NodePtr &var_node) { + if ((final_writable_node == nullptr) || (var_node == nullptr) || (var_node->GetOwnerComputeGraph() == nullptr)) { + GELOGE(FAILED, "parameter ptr is null."); + return nullptr; + } + GELOGI("Create VarRef Op: final_writable_node: [%s] var_node: [%s]>>>>", final_writable_node->GetName().c_str(), + var_node->GetName().c_str()); + + static uint32_t var_ref_count = 0; + std::stringstream var_ref_name; + var_ref_name << "_to_" << final_writable_node->GetName() << "_REF_" << var_ref_count++; + + OpDescPtr var_op_desc = var_node->GetOpDesc(); + if (var_op_desc == nullptr) { + GELOGE(FAILED, "get var opdesc is nullptr"); + return nullptr; + } + + OpDescPtr var_ref_op_desc = + MakeShared(var_node->GetName() + var_ref_name.str(), var_op_desc->GetType()); + if (var_ref_op_desc == nullptr) { + GELOGE(FAILED, "var_ref opdesc is nullptr"); + return nullptr; + } + + GE_IF_BOOL_EXEC(var_ref_op_desc->AddOutputDesc(var_op_desc->GetOutputDesc(0)) != SUCCESS, + GELOGW("add output desc edge failed"); + return nullptr); + GE_IF_BOOL_EXEC(var_ref_op_desc->AddInputDesc(var_op_desc->GetOutputDesc(0)) != SUCCESS, + GELOGW("add input desc edge failed"); + return nullptr); + NodePtr var_ref_node = var_node->GetOwnerComputeGraph()->AddNode(var_ref_op_desc); + GE_IF_BOOL_EXEC(var_ref_node == nullptr, GELOGW("var_ref_node is null"); return nullptr); + + bool is_set_str = ge::AttrUtils::SetStr(var_ref_op_desc, REF_VAR_SRC_VAR_NAME, var_op_desc->GetName()); + if (is_set_str) { + GELOGI("Set node [%s] REF_VAR_SRC_VAR_NAME [%s]", var_ref_node->GetName().c_str(), var_op_desc->GetName().c_str()); + } + return var_ref_node; +} + +int VariablePrepareOpPass::GetWritableNodeOutIndex(const NodePtr &node, int input_index) { + if (node == nullptr) { + return -1; + } + GELOGI("get writable node and input index %s:%d", node->GetName().c_str(), input_index); + auto node_type = node->GetType(); + if (node_type == ASSIGN) { + if (UpdateAssignOpDesc(node) != SUCCESS) { + return -1; + } + } + + auto node_iter = ref_input_output_map_.find(node_type); + if (node_iter == ref_input_output_map_.end()) { + return -1; + } + + auto index_iter = node_iter->second.find(input_index); + if (index_iter == node_iter->second.end()) { + return -1; + } + return index_iter->second; +} + +void VariablePrepareOpPass::GenerateRefTypeAndInputOutputMap(const NodePtr &node) { + auto out_op_desc = node->GetOpDesc(); + map input_name_index; + for (const auto &input_name : out_op_desc->GetAllInputNames()) { + int index = out_op_desc->GetInputIndexByName(input_name); + input_name_index.emplace(input_name, index); + } + + for (auto &out_data_anchor : node->GetAllOutDataAnchors()) { + string out_data_anchor_name = out_op_desc->GetOutputNameByIndex(out_data_anchor->GetIdx()); + auto iter = input_name_index.find(out_data_anchor_name); + if (iter != input_name_index.end()) { + GELOGD("From input_name_index_map find corresponding output name and out index : [ %s : %d]", + out_data_anchor_name.c_str(), out_data_anchor->GetIdx()); + auto ref_type_iter = ref_input_output_map_.find(node->GetType()); + if (ref_type_iter != ref_input_output_map_.end()) { + GELOGD("From ref_input_output_map_ find already existed ref_type_iter. Type : [%s]", + ref_type_iter->first.c_str()); + auto input_output_iter = ref_type_iter->second.find(iter->second); + if (input_output_iter != ref_type_iter->second.end()) { + ref_type_iter->second.emplace(iter->second, out_data_anchor->GetIdx()); + GELOGI("Add RefInputOutputMap [ %s ] : {%d, %d}", node->GetType().c_str(), iter->second, + out_data_anchor->GetIdx()); + } + } else { + ref_input_output_map_.insert({node->GetType(), {{iter->second, out_data_anchor->GetIdx()}}}); + GELOGI("Create RefInputOutputMap { %s : {%d, %d}}", node->GetType().c_str(), iter->second, + out_data_anchor->GetIdx()); + } + } + } +} + +Status VariablePrepareOpPass::UpdateAssignOpDesc(const ge::NodePtr &node) { + GE_CHECK_NOTNULL(node); + ge::InDataAnchorPtr var_anchor = node->GetInDataAnchor(0); + GE_CHECK_NOTNULL(var_anchor); + GE_CHECK_NOTNULL(var_anchor->GetPeerOutAnchor()); + ge::NodePtr var_node = var_anchor->GetPeerOutAnchor()->GetOwnerNode(); + ge::OpDescPtr var_op_desc = var_node->GetOpDesc(); + GE_CHECK_NOTNULL(var_op_desc); + ge::GeTensorDesc var_tensor_desc = var_op_desc->GetOutputDesc(0); + + ge::OpDescPtr assign_op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(assign_op_desc); + Status update_input_desc_ret = assign_op_desc->UpdateInputDesc(0, var_tensor_desc); + Status update_output_desc_ret = assign_op_desc->UpdateOutputDesc(0, var_tensor_desc); + if (update_input_desc_ret != GRAPH_SUCCESS || update_output_desc_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "update input or output desc success"); + return FAILED; + } + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/variable_prepare_op_pass.h b/src/ge/graph/passes/variable_prepare_op_pass.h new file mode 100644 index 00000000..738faa10 --- /dev/null +++ b/src/ge/graph/passes/variable_prepare_op_pass.h @@ -0,0 +1,45 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_VARIABLE_PREPARE_OP_PASS_H_ +#define GE_GRAPH_PASSES_VARIABLE_PREPARE_OP_PASS_H_ + +#include +#include + +#include "framework/common/ge_inner_error_codes.h" +#include "inc/graph_pass.h" + +namespace ge { +class VariablePrepareOpPass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph); + + private: + Status DealVariableNode(ge::NodePtr &node); + Status DealWritableNode(ge::NodePtr &writable_node, ge::NodePtr &var_node, int out_index); + NodePtr GetFinalWritableNode(ge::NodePtr &writable_node, int &out_index); + Status AddVariableRef(ge::NodePtr &node, ge::NodePtr &var_node, int index); + NodePtr CreatVariableRef(ge::NodePtr &final_ref_type_node, ge::NodePtr &var_node); + int GetWritableNodeOutIndex(const NodePtr &node, int input_index); + Status UpdateAssignOpDesc(const ge::NodePtr &node); + void GenerateRefTypeAndInputOutputMap(const NodePtr &node); + + std::map> ref_input_output_map_; +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_VARIABLE_PREPARE_OP_PASS_H_ diff --git a/src/ge/graph/passes/variable_ref_delete_op_pass.cc b/src/ge/graph/passes/variable_ref_delete_op_pass.cc new file mode 100644 index 00000000..6a292d41 --- /dev/null +++ b/src/ge/graph/passes/variable_ref_delete_op_pass.cc @@ -0,0 +1,103 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/variable_ref_delete_op_pass.h" + +#include + +#include "framework/common/debug/ge_log.h" + +namespace ge { +Status VariableRefDeleteOpPass::Run(ge::ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + + for (auto &node : graph->GetDirectNode()) { + GELOGD("before VariableRefDeleteOpPass, graph has node: %s, and node name: %s", node->GetType().c_str(), + node->GetName().c_str()); + } + + for (auto &node : graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + std::string ref_var_src_var_name; + bool is_variable_ref = (node->GetOpDesc()->GetType() == VARIABLE) && + (ge::AttrUtils::GetStr(node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, ref_var_src_var_name)); + if (!is_variable_ref) { + continue; + } + Status ret = DealVariableRef(graph, node, ref_var_src_var_name); + if (ret != SUCCESS) { + GELOGE(ret, "variable ref [%s] delete failed", node->GetName().c_str()); + return FAILED; + } + } + + for (auto &node : graph->GetDirectNode()) { + GELOGD("after VariableRefDeleteOpPass, graph has node: %s, and node name: %s", node->GetType().c_str(), + node->GetName().c_str()); + } + + return SUCCESS; +} + +Status VariableRefDeleteOpPass::DealVariableRef(ge::ComputeGraphPtr &graph, ge::NodePtr &variable_ref, + const std::string &ref_var_src_var_name) { + GE_CHECK_NOTNULL(graph); + GE_CHECK_NOTNULL(variable_ref); + // remove variable_ref all out anchor + for (auto &variable_ref_outAnchor : variable_ref->GetAllOutAnchors()) { + variable_ref_outAnchor->UnlinkAll(); + } + + auto inAnchor0 = variable_ref->GetInDataAnchor(0); + if (inAnchor0 == nullptr) { + GELOGE(FAILED, "variable_ref [%s] no input", variable_ref->GetName().c_str()); + return FAILED; + } + GE_CHECK_NOTNULL(inAnchor0->GetPeerOutAnchor()); + // get the output index of the previous node connected to the variable_ref + // prepare for refreshing address in build phase + int index = inAnchor0->GetPeerOutAnchor()->GetIdx(); + + // get previous node of variable_ref + NodePtr peer_node = inAnchor0->GetPeerOutAnchor()->GetOwnerNode(); + + // remove in anchor [0] of variable_ref + inAnchor0->UnlinkAll(); + if (ge::GraphUtils::RemoveJustNode(graph, variable_ref) != GRAPH_SUCCESS) { + GELOGE(FAILED, "remove variable_ref failed"); + return FAILED; + } + + // add attr [REF_VAR_SRC_VAR_NAME] to the previous node of the variable_ref + GE_CHECK_NOTNULL(peer_node->GetOpDesc()); + bool is_set_str = ge::AttrUtils::SetStr(peer_node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, ref_var_src_var_name); + + ge::NodePtr var_ref_src_var = graph->FindNode(ref_var_src_var_name); + if (var_ref_src_var == nullptr) { + GELOGE(FAILED, "get var_ref_src_var failed"); + return FAILED; + } + + GE_CHECK_NOTNULL(var_ref_src_var->GetOpDesc()); + bool is_set_index = ge::AttrUtils::SetInt(var_ref_src_var->GetOpDesc(), REF_VAR_PRE_PEER_OUT_INDEX, index); + if (is_set_str && is_set_index) { + GELOGI("[%s]: add attr [REF_VAR_SRC_VAR_NAME: %s ] ", peer_node->GetName().c_str(), ref_var_src_var_name.c_str()); + GELOGI("[%s]: add attr [ REF_VAR_PRE_PEER_OUT_INDEX: %d ]", var_ref_src_var->GetName().c_str(), index); + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/passes/variable_ref_delete_op_pass.h b/src/ge/graph/passes/variable_ref_delete_op_pass.h new file mode 100644 index 00000000..e481f9f8 --- /dev/null +++ b/src/ge/graph/passes/variable_ref_delete_op_pass.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PASSES_VARIABLE_REF_DELETE_OP_PASS_H_ +#define GE_GRAPH_PASSES_VARIABLE_REF_DELETE_OP_PASS_H_ + +#include + +#include "framework/common/ge_inner_error_codes.h" +#include "inc/graph_pass.h" + +namespace ge { +class VariableRefDeleteOpPass : public GraphPass { + public: + Status Run(ge::ComputeGraphPtr graph); + + private: + Status DealVariableRef(ge::ComputeGraphPtr &graph, + ge::NodePtr &variable_ref, + const std::string &ref_var_src_var_name); +}; +} // namespace ge + +#endif // GE_GRAPH_PASSES_VARIABLE_REF_DELETE_OP_PASS_H_ diff --git a/src/ge/graph/preprocess/graph_preprocess.cc b/src/ge/graph/preprocess/graph_preprocess.cc new file mode 100644 index 00000000..0a438220 --- /dev/null +++ b/src/ge/graph/preprocess/graph_preprocess.cc @@ -0,0 +1,1406 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/preprocess/graph_preprocess.h" + +#include +#include +#include +#include + +#include "common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h" +#include "common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.h" +#include "common/helper/model_helper.h" +#include "common/math/math_util.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/common/transop_util.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/ge_context.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/manager/util/rt_context_util.h" +#include "graph/optimize/graph_optimize.h" +#include "graph/passes/addn_pass.h" +#include "graph/passes/aicpu_constant_folding_pass.h" +#include "graph/passes/assert_pass.h" +#include "graph/passes/base_pass.h" +#include "graph/passes/constant_folding_pass.h" +#include "graph/passes/constant_fuse_same_pass.h" +#include "graph/passes/control_trigger_pass.h" +#include "graph/passes/dimension_adjust_pass.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/passes/dropout_pass.h" +#include "graph/passes/end_graph_pass.h" +#include "graph/passes/enter_pass.h" +#include "graph/passes/flow_ctrl_pass.h" +#include "graph/passes/get_original_format_pass.h" +#include "graph/passes/guarantee_const_pass.h" +#include "graph/passes/hccl_memcpy_pass.h" +#include "graph/passes/identity_pass.h" +#include "graph/passes/infershape_pass.h" +#include "graph/passes/iterator_op_pass.h" +#include "graph/passes/merge_pass.h" +#include "graph/passes/net_output_pass.h" +#include "graph/passes/next_iteration_pass.h" +#include "graph/passes/no_use_reshape_remove_pass.h" +#include "graph/passes/placeholder_with_default_pass.h" +#include "graph/passes/prevent_gradient_pass.h" +#include "graph/passes/print_op_pass.h" +#include "graph/passes/prune_pass.h" +#include "graph/passes/resource_pair_add_control_pass.h" +#include "graph/passes/resource_pair_remove_control_pass.h" +#include "graph/passes/save_pass.h" +#include "graph/passes/shape_operate_op_remove_pass.h" +#include "graph/passes/snapshot_pass.h" +#include "graph/passes/stop_gradient_pass.h" +#include "graph/passes/switch_logic_remove_pass.h" +#include "graph/passes/switch_op_pass.h" +#include "graph/passes/switch_pass.h" +#include "graph/passes/unused_const_pass.h" +#include "graph/passes/unused_op_remove_pass.h" +#include "graph/passes/update_net_output_pass.h" +#include "graph/passes/var_is_initialized_op_pass.h" +#include "graph/passes/variable_prepare_op_pass.h" +#include "graph/preprocess/insert_op/util_insert_aipp_op.h" +#include "graph/types.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/pass_manager.h" +#include "init/gelib.h" +#include "runtime/dev.h" + +namespace ge { +namespace { +OpDescPtr CreateTensorShape(const GeTensorDesc &data_tensor) { + GeTensorPtr tensor = MakeShared(); + if (tensor == nullptr) { + GELOGE(INTERNAL_ERROR, "Create shared ptr for GeTensor failed"); + return nullptr; + } + tensor->MutableTensorDesc().SetDataType(DT_INT32); + tensor->MutableTensorDesc().SetFormat(FORMAT_ND); + auto dst_ge_shape = data_tensor.GetShape(); + auto dim_cnt = static_cast(dst_ge_shape.GetDimNum()); + if (dim_cnt == 0) { // if the dim_cnt is 0, the tensor is a scalar + tensor->MutableTensorDesc().SetShape(GeShape()); + int64_t dst_shape = 1; + if (tensor->SetData(reinterpret_cast(&dst_shape), sizeof(int64_t)) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "tensor set data failed"); + return nullptr; + } + } else { + tensor->MutableTensorDesc().SetShape(GeShape(std::vector({dim_cnt}))); + unique_ptr dst_shape(new (std::nothrow) int64_t[dim_cnt]()); + if (dst_shape == nullptr) { + GELOGE(INTERNAL_ERROR, "Create unique ptr failed"); + return nullptr; + } + for (int64_t i = 0; i < dim_cnt; ++i) { + dst_shape[i] = dst_ge_shape.GetDim(static_cast(i)); + } + GE_IF_BOOL_EXEC( + tensor->SetData(reinterpret_cast(dst_shape.get()), dim_cnt * sizeof(int64_t)) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "tensor set data failed"); + return nullptr;) + } + + GELOGD("Create shape input dim [%s]", dst_ge_shape.ToString().c_str()); + return OpDescUtils::CreateConstOp(tensor); +} + +void AddTransNodeAttr(const std::string &node_type, const GeTensorDesc &input, const GeTensorDesc &output, + OpDescPtr &op_desc) { + // For format transfer node, the IR definition has src/dst format attrs + if (node_type == TRANSDATA) { + GE_IF_BOOL_EXEC( + !AttrUtils::SetStr(op_desc, FORMAT_TRANSFER_SRC_FORMAT, TypeUtils::FormatToSerialString(input.GetFormat())), + GELOGW("SetStr FORMAT_TRANSFER_SRC_FORMAT failed");) + GE_IF_BOOL_EXEC( + !AttrUtils::SetStr(op_desc, FORMAT_TRANSFER_DST_FORMAT, TypeUtils::FormatToSerialString(output.GetFormat())), + GELOGW("SetStr FORMAT_TRANSFER_DST_FORMAT failed");) + } + // For cast node, the IR definition has src/dst attrs + if (node_type == CAST) { + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(op_desc, CAST_ATTR_SRCT, static_cast(input.GetDataType())), + GELOGW("SetInt CAST_ATTR_SRCT failed");) + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(op_desc, CAST_ATTR_DSTT, static_cast(output.GetDataType())), + GELOGW("SetInt CAST_ATTR_DSTT failed");) + GE_IF_BOOL_EXEC(!AttrUtils::SetInt(op_desc, CAST_ATTR_DST_TYPE, static_cast(output.GetDataType())), + GELOGW("SetInt CAST_ATTR_DST_TYPE failed");) + GE_IF_BOOL_EXEC(!AttrUtils::SetBool(op_desc, CAST_ATTR_TRUNCATE, false), + GELOGW("SetBool CAST_ATTR_TRUNCATE failed");) + } +} + +NodePtr CreateTransNode(const std::string &name, const std::string &node_type, const GeTensorDesc &input, + const GeTensorDesc &output, NodePtr &node) { + if (node == nullptr) { + GELOGE(PARAM_INVALID, "node is null."); + return nullptr; + } + auto graph = node->GetOwnerComputeGraph(); + if (graph == nullptr) { + GELOGE(PARAM_INVALID, "Owner graph is null, node name:%s.", node->GetName().c_str()); + return nullptr; + } + + auto index = TransOpUtil::GetTransOpDataIndex(node_type); + if (index < 0) { + GELOGE(INTERNAL_ERROR, "The trans node type %s does not exists", node_type.c_str()); + return nullptr; + } + OpDescPtr op_desc = MakeShared(name, node_type); + if (op_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "Create shared ptr for OpDesc failed"); + return nullptr; + } + + // for data dump + GE_IF_BOOL_EXEC( + !AttrUtils::SetListStr(op_desc, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, std::move(std::vector())), + GELOGW("CreateTransNode: SetListStr failed");) + + // Default single input and single output + auto ret = op_desc->AddInputDesc(input); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add input desc when create node %s type %s", name.c_str(), node_type.c_str()); + return nullptr; + } + ret = op_desc->AddOutputDesc(output); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add output desc when create node %s type %s", name.c_str(), node_type.c_str()); + return nullptr; + } + + AddTransNodeAttr(node_type, input, output, op_desc); + + NodePtr shape_node = nullptr; + if (node_type == RESHAPE) { + auto shape_desc = CreateTensorShape(output); + if (shape_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to add shape for reshape %s, can not create the shape input", + node->GetName().c_str()); + return nullptr; + } + ret = op_desc->AddInputDesc(shape_desc->GetOutputDesc(0)); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add the first input for reshape %s", name.c_str()); + return nullptr; + } + + shape_node = graph->AddNode(shape_desc); + if (shape_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to add shape node for reshape %s, can not add the shape to graph", name.c_str()); + return nullptr; + } + } + + auto trans_node = graph->AddNode(op_desc); + if (trans_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to add trans node %s to graph", name.c_str()); + return nullptr; + } + + if (node_type == RESHAPE) { + if (GraphUtils::AddEdge(shape_node->GetOutDataAnchor(0), trans_node->GetInDataAnchor(1)) != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add shape node for reshape %s, can not add the edge", name.c_str()); + return nullptr; + } + } + + return trans_node; +} + +Status RecoverOneTransNodeForVar(const std::string &name, const TransNodeInfo &trans_node_info, NodePtr node, + NodePtr &trans_node) { + GE_CHECK_NOTNULL(node); + trans_node = CreateTransNode(name, trans_node_info.node_type, trans_node_info.output, trans_node_info.input, node); + if (trans_node == nullptr) { + return INTERNAL_ERROR; + } + + auto ret = GraphUtils::ReplaceNodeDataAnchors(trans_node, node, {}, {0}); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to replace out anchors when recover trans node for %s type %s", + node->GetName().c_str(), node->GetType().c_str()); + return INTERNAL_ERROR; + } + + ret = GraphUtils::AddEdge(node->GetOutDataAnchor(0), trans_node->GetInDataAnchor(0)); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to connect node %s to trans node %s", node->GetName().c_str(), + trans_node->GetName().c_str()); + return INTERNAL_ERROR; + } + + ret = GraphUtils::MoveOutCtrlEdges(node, trans_node); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to move out control edges from %s to %s when recover trans node.", + node->GetName().c_str(), trans_node->GetName().c_str()); + return INTERNAL_ERROR; + } + + return SUCCESS; +} + +Status RecoverOneTransNodeForVarRef(const std::string &name, const TransNodeInfo &trans_node_info, NodePtr node, + NodePtr &trans_node) { + GE_CHECK_NOTNULL(node); + trans_node = CreateTransNode(name, trans_node_info.node_type, trans_node_info.input, trans_node_info.output, node); + if (trans_node == nullptr) { + return INTERNAL_ERROR; + } + + auto ret = GraphUtils::ReplaceNodeDataAnchors(trans_node, node, {0}, {}); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to replace int anchors when recover trans node for %s type %s", + node->GetName().c_str(), node->GetType().c_str()); + return INTERNAL_ERROR; + } + + ret = GraphUtils::AddEdge(trans_node->GetOutDataAnchor(0), node->GetInDataAnchor(0)); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to connect trans node %s to node %s", trans_node->GetName().c_str(), + node->GetName().c_str()); + return INTERNAL_ERROR; + } + + ret = GraphUtils::MoveInCtrlEdges(node, trans_node); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to move int control edges from %s to %s when recover trans node.", + node->GetName().c_str(), trans_node->GetName().c_str()); + return INTERNAL_ERROR; + } + + return SUCCESS; +} + +Status UpdateVarFormats(const NodePtr &var, const GeTensorDesc &tensor_desc) { + GE_IF_BOOL_EXEC(var == nullptr, GELOGW("node : var is nullptr"); return INTERNAL_ERROR); + GE_CHECK_NOTNULL(var->GetOpDesc()); + if (var->GetOpDesc()->GetOutputsSize() > 0) { + auto output_desc = var->GetOpDesc()->GetOutputDesc(0); + output_desc.SetFormat(tensor_desc.GetFormat()); + output_desc.SetDataType(tensor_desc.GetDataType()); + output_desc.SetShape(tensor_desc.GetShape()); + GE_IF_BOOL_EXEC(var->GetOpDesc()->UpdateOutputDesc(0, output_desc) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "UpdateOutputDesc failed"); + return INTERNAL_ERROR;); + } + + if (var->GetOpDesc()->GetInputsSize() > 0) { + auto desc = var->GetOpDesc()->GetInputDesc(0); + desc.SetFormat(tensor_desc.GetFormat()); + desc.SetDataType(tensor_desc.GetDataType()); + desc.SetShape(tensor_desc.GetShape()); + GE_IF_BOOL_EXEC(var->GetOpDesc()->UpdateInputDesc(0, desc) != GRAPH_SUCCESS, + GELOGE(INTERNAL_ERROR, "UpdateInputDesc failed"); + return INTERNAL_ERROR;) + } + return SUCCESS; +} + +Status RecoverTransRoadForVar(const NodePtr &var, const VarTransRoad &road) { + GE_CHECK_NOTNULL(var); + int index = 0; + NodePtr last_node = var; + for (auto iter = road.rbegin(); iter != road.rend(); ++iter) { + auto trans_name = var->GetName() + "_trans_" + std::to_string(index++); + auto ret = RecoverOneTransNodeForVar(trans_name, *iter, last_node, last_node); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to recover trans node for variable %s, index %d, type %s", var->GetName().c_str(), + index, iter->node_type.c_str()); + return INTERNAL_ERROR; + } + GE_CHK_BOOL_EXEC((ge::AttrUtils::SetBool(last_node->GetOpDesc(), ge::ATTR_INSERTED_BY_GE, true)), + return INTERNAL_ERROR, "Set attr ATTR_INSERTED_BY_GE failed."); + GELOGD("Recover trans node %s type %s success", trans_name.c_str(), iter->node_type.c_str()); + } + if (road.empty()) { + return SUCCESS; + } + return UpdateVarFormats(var, road.rbegin()->output); +} + +Status RecoverTransRoadForVarRef(const std::set &nodes, const VarTransRoad &road) { + for (auto &var : nodes) { + GE_CHECK_NOTNULL(var); + int index = 0; + NodePtr last_node = var; + GELOGI("Recover trans nodes for variable ref %s", var->GetName().c_str()); + for (auto iter = road.rbegin(); iter != road.rend(); ++iter) { + auto trans_name = var->GetName() + "_trans_" + std::to_string(index++); + auto ret = RecoverOneTransNodeForVarRef(trans_name, *iter, last_node, last_node); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to recover trans node for variable %s, index %d, type %s", + var->GetName().c_str(), index, iter->node_type.c_str()); + return INTERNAL_ERROR; + } + GE_CHK_BOOL_EXEC((ge::AttrUtils::SetBool(last_node->GetOpDesc(), ge::ATTR_INSERTED_BY_GE, true)), + return INTERNAL_ERROR, "Set attr ATTR_INSERTED_BY_GE failed."); + } + if (!(road.empty()) && (UpdateVarFormats(var, road.rbegin()->input) != SUCCESS)) { + return INTERNAL_ERROR; + } + } + return SUCCESS; +} + +Status SetDataNodeByAipp(const ge::NodePtr &node_ptr) { + GELOGI("Start to set data node by aipp."); + auto aipp_op_desc = node_ptr->GetOpDesc(); + GE_CHECK_NOTNULL(aipp_op_desc); + auto aipp_input = aipp_op_desc->MutableInputDesc(0); + GE_CHECK_NOTNULL(aipp_input); + ge::DataType aipp_dt = aipp_input->GetDataType(); + GELOGI("Aipp [%s] input datatype is %s.", aipp_op_desc->GetName().c_str(), + TypeUtils::DataTypeToSerialString(aipp_dt).c_str()); + uint32_t size = 0; + graphStatus graph_ret = ge::TensorUtils::GetSize(*aipp_input, size); + if (graph_ret != GRAPH_SUCCESS) { + GELOGE(FAILED, "UpdateOutputDesc fail, graph_ret:%u", graph_ret); + return FAILED; + } + GELOGI("Get size [%u] from aipp [%s].", size, aipp_op_desc->GetName().c_str()); + if (size == 0) { + GELOGE(FAILED, "Can not get size from aipp [%s]", aipp_op_desc->GetName().c_str()); + return FAILED; + } + for (const auto &in_data_anchor : node_ptr->GetAllInDataAnchors()) { + const auto &peer_out_anchor = in_data_anchor->GetPeerOutAnchor(); + GE_IF_BOOL_EXEC(peer_out_anchor == nullptr, continue); + const auto &src_node = peer_out_anchor->GetOwnerNode(); + const auto &src_op = src_node->GetOpDesc(); + GE_IF_BOOL_EXEC(src_op == nullptr, continue); + + const GeTensorDescPtr &input = src_op->MutableInputDesc(0); + GE_CHECK_NOTNULL(input); + input->SetDataType(aipp_dt); + input->SetOriginDataType(aipp_dt); + ge::TensorUtils::SetSize(*input, size); + + const GeTensorDescPtr &output = src_op->MutableOutputDesc(0); + GE_CHECK_NOTNULL(output); + output->SetDataType(aipp_dt); + output->SetOriginDataType(aipp_dt); + ge::TensorUtils::SetSize(*output, size); + GELOGI("Set data size [%u] by aipp.", size); + } + return SUCCESS; +} + +using VarNamesToRefs = std::map>; + +VarNamesToRefs CollectVarNamesToRefs(const ComputeGraphPtr &graph) { + VarNamesToRefs names_to_refs; + std::string var_name; + if (graph == nullptr) { + GELOGE(PARAM_INVALID, "graph is null."); + return names_to_refs; + } + for (auto &node : graph->GetAllNodes()) { + if (node->GetType() != VARIABLE) { + continue; + } + if (AttrUtils::GetStr(node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, var_name)) { + (void)names_to_refs[var_name].insert(node); + } + } + return names_to_refs; +} +Status AddTransNodeBetweenTwoNodes(OutDataAnchorPtr &src_out, InDataAnchorPtr &insert_in, + OutDataAnchorPtr &insert_out) { + if ((src_out == nullptr) || (insert_in == nullptr) || (insert_out == nullptr)) { + GELOGE(INTERNAL_ERROR, "anchor is nullptr"); + return FAILED; + } + auto vistor = src_out->GetPeerInDataAnchors(); + for (auto it = vistor.begin(); it != vistor.end(); ++it) { + InDataAnchorPtr dst_in = *it; + GE_CHK_STATUS_RET(src_out->Unlink(dst_in), "Unlink the anchor failed"); + GE_CHK_STATUS_RET(insert_out->LinkTo(dst_in), "Link the anchor failed"); + } + GE_CHK_STATUS_RET(src_out->LinkTo(insert_in), "Link the anchor failed"); + return SUCCESS; +} + +NodePtr CreateCastOp(const ge::GeShape &shape, const ge::DataType input_data_type, const ge::DataType output_data_type, + const ge::Format format, NodePtr &node) { + static uint32_t transop_count = 0; + std::string name = std::string("cast_node").append(std::to_string(transop_count++)); + + GELOGI("create cast op:%s, input datatype:%s, out datatype:%s", name.c_str(), + TypeUtils::DataTypeToSerialString(input_data_type).c_str(), + TypeUtils::DataTypeToSerialString(output_data_type).c_str()); + + GeTensorDesc input(shape, format, input_data_type); + input.SetOriginFormat(format); + input.SetOriginShape(shape); + input.SetOriginDataType(input_data_type); + ge::TensorUtils::SetRealDimCnt(input, static_cast(shape.GetDims().size())); + + GeTensorDesc output(shape, format, output_data_type); + output.SetOriginFormat(format); + output.SetOriginShape(shape); + output.SetOriginDataType(output_data_type); + ge::TensorUtils::SetRealDimCnt(output, static_cast(shape.GetDims().size())); + + return CreateTransNode(name, CAST, input, output, node); +} + +Status ProcessInputFP16(NodePtr &node_ptr) { + GE_CHECK_NOTNULL(node_ptr); + auto op_desc = node_ptr->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + const GeTensorDescPtr &input = op_desc->MutableInputDesc(0); + GE_CHECK_NOTNULL(input); + ge::DataType src_dtype = input->GetDataType(); + if (src_dtype == DT_FLOAT16) { + GELOGI("The node name %s dtype is fp16", node_ptr->GetName().c_str()); + return SUCCESS; + } + int64_t desc_shape = input->GetShape().GetShapeSize(); + uint32_t len = 0; + if (!TypeUtils::GetDataTypeLength(DT_FLOAT16, len)) { + GELOGE(INTERNAL_ERROR, "GET FP16 datatype length failed"); + return FAILED; + } + FMK_INT64_UINT32_MULCHECK(desc_shape, len); + int64_t shape_size = desc_shape * len; + input->SetDataType(DT_FLOAT16); + input->SetOriginDataType(DT_FLOAT16); + ge::TensorUtils::SetSize(*input, static_cast(shape_size)); + const GeTensorDescPtr &output = op_desc->MutableOutputDesc(0); + GE_CHECK_NOTNULL(output); + output->SetDataType(DT_FLOAT16); + output->SetOriginDataType(DT_FLOAT16); + ge::TensorUtils::SetSize(*output, static_cast(shape_size)); + + NodePtr cast_node = CreateCastOp(output->GetShape(), DT_FLOAT16, src_dtype, output->GetFormat(), node_ptr); + GE_CHECK_NOTNULL(cast_node); + OutDataAnchorPtr src_out = node_ptr->GetOutDataAnchor(0); + InDataAnchorPtr cast_in = cast_node->GetInDataAnchor(0); + OutDataAnchorPtr cast_out = cast_node->GetOutDataAnchor(0); + if (AddTransNodeBetweenTwoNodes(src_out, cast_in, cast_out) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "add node between two nodes failed, src name:%s, cast node name:%s.", + node_ptr->GetName().c_str(), cast_node->GetName().c_str()); + return FAILED; + } + return SUCCESS; +} + +NodePtr CreateTransdataNode(const ge::GeShape &in_shape, const ge::Format input_format, const ge::GeShape &out_shape, + const ge::Format output_format, const ge::DataType dt, NodePtr &node) { + static uint32_t transop_count = 0; + // Does not involve multithreading. + std::string name = std::string("transdata_node").append(std::to_string(transop_count++)); + + GELOGI("create trandata op:%s, input format:%s, out format:%s", name.c_str(), + TypeUtils::FormatToSerialString(input_format).c_str(), TypeUtils::FormatToSerialString(output_format).c_str()); + + GeTensorDesc input(in_shape, input_format, dt); + input.SetOriginFormat(input_format); + input.SetOriginShape(in_shape); + input.SetOriginDataType(dt); + + GeTensorDesc output(out_shape, output_format, dt); + output.SetOriginFormat(output_format); + output.SetOriginShape(out_shape); + output.SetOriginDataType(dt); + + return CreateTransNode(name, TRANSDATA, input, output, node); +} + +Status ModifyInputFormatAndShape(NodePtr &node_ptr) { + GE_CHECK_NOTNULL(node_ptr); + auto op_desc = node_ptr->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + const GeTensorDescPtr &input = op_desc->MutableInputDesc(0); + GE_CHECK_NOTNULL(input); + ge::Format old_format = input->GetFormat(); + std::vector old_shape = input->GetShape().GetDims(); + ge::DataType dt = input->GetDataType(); + std::vector dst_shape_dims; + if (old_format == FORMAT_NCHW) { + formats::FormatTransferNchwNc1hwc0 transfer; + if (transfer.TransShape(old_format, old_shape, dt, FORMAT_NC1HWC0, dst_shape_dims) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "TransShape failed"); + return FAILED; + } + } else if (old_format == FORMAT_NHWC) { + formats::FormatTransferNhwcNc1hwc0 transfer; + if (transfer.TransShape(old_format, old_shape, dt, FORMAT_NC1HWC0, dst_shape_dims) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "TransShape failed"); + return FAILED; + } + } + + input->SetShape(ge::GeShape(dst_shape_dims)); + input->SetOriginShape(ge::GeShape(dst_shape_dims)); + input->SetFormat(FORMAT_NC1HWC0); + input->SetOriginFormat(FORMAT_NC1HWC0); + + auto output = op_desc->MutableOutputDesc(0); + GE_CHECK_NOTNULL(output); + output->SetShape(ge::GeShape(dst_shape_dims)); + output->SetOriginShape(ge::GeShape(dst_shape_dims)); + output->SetFormat(FORMAT_NC1HWC0); + output->SetOriginFormat(FORMAT_NC1HWC0); + + uint32_t size = 0; + graphStatus graph_status = TensorUtils::GetTensorMemorySizeInBytes(*output, size); + if (graph_status != ge::GRAPH_SUCCESS) { + GELOGE(graph_status, "GetTensorSizeInBytes failed!"); + return FAILED; + } + ge::TensorUtils::SetSize(*input, size); + ge::TensorUtils::SetSize(*output, size); + + return SUCCESS; +} +Status ProcessInputNC1HWC0(NodePtr &node_ptr) { + GE_CHECK_NOTNULL(node_ptr); + auto op_desc = node_ptr->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + const GeTensorDescPtr &input = op_desc->MutableInputDesc(0); + GE_CHECK_NOTNULL(input); + ge::Format old_format = input->GetFormat(); + ge::GeShape old_shape = input->GetShape(); + bool support = ((old_format == FORMAT_NC1HWC0) || (old_format == FORMAT_NCHW) || (old_format == FORMAT_NHWC)); + if (!support) { + GELOGE(INTERNAL_ERROR, "The format [%s] is unsupported", TypeUtils::FormatToSerialString(old_format).c_str()); + return FAILED; + } + if (old_format == FORMAT_NC1HWC0) { + GELOGI("No need to transfer format"); + return SUCCESS; + } + if (ModifyInputFormatAndShape(node_ptr) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "modify format and shape failed"); + return FAILED; + } + + NodePtr trans_node = + CreateTransdataNode(input->GetShape(), FORMAT_NC1HWC0, old_shape, old_format, input->GetDataType(), node_ptr); + GE_CHECK_NOTNULL(trans_node); + OutDataAnchorPtr src_out = node_ptr->GetOutDataAnchor(0); + InDataAnchorPtr trans_in = trans_node->GetInDataAnchor(0); + OutDataAnchorPtr trans_out = trans_node->GetOutDataAnchor(0); + if (AddTransNodeBetweenTwoNodes(src_out, trans_in, trans_out) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "add node between two nodes failed"); + return FAILED; + } + return SUCCESS; +} +} // namespace + +GraphPrepare::GraphPrepare() : compute_graph_(nullptr) {} + +GraphPrepare::~GraphPrepare() {} + +Status GraphPrepare::UpdateVariableFormats(ComputeGraphPtr &graph) { + GE_CHECK_NOTNULL(graph); + auto var_names_to_refs = CollectVarNamesToRefs(graph); + for (auto &node : graph->GetAllNodes()) { + if (node == nullptr) { + continue; + } + if (node->GetType() != VARIABLE) { + continue; + } + auto trans_road = VarManager::Instance(graph->GetSessionID())->GetTransRoad(node->GetName()); + if (trans_road == nullptr) { + GELOGD("The variable %s does not have any trans road", node->GetName().c_str()); + continue; + } + + GELOGI("Recover the trans road for var %s reversely", node->GetName().c_str()); + + auto ret = RecoverTransRoadForVar(node, *trans_road); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to recovery trans road for var %s", node->GetName().c_str()); + return INTERNAL_ERROR; + } + auto iter = var_names_to_refs.find(node->GetName()); + if (iter != var_names_to_refs.end()) { + ret = RecoverTransRoadForVarRef(iter->second, *trans_road); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to recovery trans road for var ref %s", node->GetName().c_str()); + return INTERNAL_ERROR; + } + } + } + + return SUCCESS; +} + +void GraphPrepare::SetOptions(const ge::GraphManagerOptions &options) { options_ = options; } + +Status GraphPrepare::Init(const ge::Graph &graph, uint64_t session_id) { + compute_graph_ = GraphUtils::GetComputeGraph(graph); + if (compute_graph_ != nullptr) { + compute_graph_->SetSessionID(session_id); + } + Status ret = CheckGraph(); + if (ret != SUCCESS) { + GELOGE(ret, "RunGraph graph check fail, ret:%u", ret); + return ret; + } + + return SUCCESS; +} + +Status GraphPrepare::CheckGraph() { + if (compute_graph_ == nullptr) { + GELOGE(GE_GRAPH_INIT_FAILED, "Graph prepare init compute graph is NULLPTR"); + return GE_GRAPH_INIT_FAILED; + } + auto nodes = compute_graph_->GetAllNodes(); + if (nodes.empty()) { + GELOGE(GE_GRAPH_INIT_FAILED, "Invalid graph, no nodes in this graph."); + return GE_GRAPH_INIT_FAILED; + } + for (const NodePtr &node : compute_graph_->GetAllNodes()) { + GE_CHECK_NOTNULL(node); + if (node->GetOpDesc() == nullptr) { + GELOGE(GE_GRAPH_INIT_FAILED, "Check Graph node opdesc is NULL"); + return GE_GRAPH_INIT_FAILED; + } + } + return SUCCESS; +} + +Status GraphPrepare::SetRtContext(rtContext_t rt_context, rtCtxMode_t mode) { + GELOGI("set rt_context %d, device id:%u.", static_cast(mode), ge::GetContext().DeviceId()); + GE_CHK_RT_RET(rtCtxCreate(&rt_context, mode, ge::GetContext().DeviceId())); + GE_CHK_RT_RET(rtCtxSetCurrent(rt_context)); + RtContextUtil::GetInstance().AddrtContext(rt_context); + return SUCCESS; +} + +Status GraphPrepare::AdjustDataOpOutput(const NodePtr &node) { + if (node == nullptr) { + GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "Input node is NULL"); + return GE_GRAPH_GRAPH_NODE_NULL; + } + OpDescPtr op_desc_ptr = node->GetOpDesc(); + if (op_desc_ptr == nullptr) { + GELOGE(GE_GRAPH_GRAPH_NODE_NULL, "Input node opdesc is NULL"); + return GE_GRAPH_GRAPH_NODE_NULL; + } + GeTensorDesc output = op_desc_ptr->GetOutputDesc(0); + uint32_t tensor_size = 0; + graphStatus graph_status = TensorUtils::GetTensorMemorySizeInBytes(output, tensor_size); + if (graph_status != GRAPH_SUCCESS) { + GELOGE(graph_status, "GetTensorMemorySizeInBytes failed!"); + return FAILED; + } + TensorUtils::SetSize(output, tensor_size); + graphStatus graph_ret = op_desc_ptr->UpdateOutputDesc(0, output); + if (graph_ret != GRAPH_SUCCESS) { + GELOGE(graph_ret, "UpdateOutputDesc fail, graph_ret:%u", graph_ret); + return graph_ret; + } + return SUCCESS; +} + +Status GraphPrepare::UpdateInput(const std::vector &user_input) { + compute_graph_->SaveDataFormat((ge::Format)(domi::GetContext().format)); + for (NodePtr &input_node : compute_graph_->GetAllNodes()) { + GE_CHECK_NOTNULL(input_node); + OpDescPtr op = input_node->GetOpDesc(); + GE_CHECK_NOTNULL(op); + if (op->GetType() == DATA) { + GeAttrValue::INT index = 0; + if (!(AttrUtils::GetInt(op, ATTR_NAME_INDEX, index))) { + GELOGW("Get index from data attr failed"); + continue; + } + + if ((index < 0) || (static_cast(index) >= user_input.size())) { + GELOGE(PARAM_INVALID, "user_input size = %zu, graph data op index = %ld.", user_input.size(), index); + return FAILED; + } + + GeTensorDesc desc(user_input[index].GetTensorDesc()); + auto format = desc.GetFormat(); + auto origin_format = desc.GetOriginFormat(); + bool is_internal = TypeUtils::IsInternalFormat(format) || TypeUtils::IsInternalFormat(origin_format); + if (is_internal) { + GELOGE(PARAM_INVALID, "Input format %s or origin_format %s is not support.", + TypeUtils::FormatToSerialString(format).c_str(), TypeUtils::FormatToSerialString(origin_format).c_str()); + return FAILED; + } + + auto data_type = desc.GetDataType(); + uint32_t length = 1; + bool type_ret = TypeUtils::GetDataTypeLength(data_type, length); + if (!type_ret) { + GELOGE(PARAM_INVALID, "Input datatype %s is not support.", + TypeUtils::DataTypeToSerialString(data_type).c_str()); + return FAILED; + } + int64_t desc_shape = desc.GetShape().GetShapeSize(); + FMK_INT64_UINT32_MULCHECK(desc_shape, length); + int64_t shape_size = desc_shape * length; + GE_IF_BOOL_EXEC(shape_size == 0, shape_size = static_cast(length)); + uint32_t size = 0; + ge::TensorUtils::GetSize(desc, size); + if ((size != 0) && (shape_size != static_cast(size))) { + GELOGE(PARAM_INVALID, "input data size =%u, shape_size =%ld.", size, shape_size); + return FAILED; + } + + ge::TensorUtils::SetSize(desc, static_cast(shape_size)); + + graphStatus graph_ret = op->UpdateInputDesc(0, desc); + if (graph_ret != GRAPH_SUCCESS) { + GELOGE(graph_ret, "UpdateInputDesc fail, graph_ret:%u", graph_ret); + return graph_ret; + } + graph_ret = op->UpdateOutputDesc(0, desc); + if (graph_ret != GRAPH_SUCCESS) { + GELOGE(graph_ret, "UpdateOutputDesc fail, graph_ret:%u", graph_ret); + return graph_ret; + } + + if (!options_.train_graph_flag) { + Status ret = AdjustDataOpOutput(input_node); + if (ret != SUCCESS) { + GELOGE(ret, "AdjustDataOpOutput fail, ret:%u", ret); + return ret; + } + } + } + } + + return SUCCESS; +} + +Status GraphPrepare::TryDoAipp() { + // infer and with aipp configure file, then call aipp insert + if ((!options_.train_graph_flag) && (!options_.insert_op_file.empty())) { + GraphUtils::DumpGEGraph(compute_graph_, "Before_insert_aipp"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph_, "Before_insert_aipp"); + Status ret = ge::InsertNewOpUtil::Instance().Init(); + if (ret != SUCCESS) { + GELOGE(INTERNAL_ERROR, "TryDoAipp: InsertNewOpUtil instance failed."); + return INTERNAL_ERROR; + } + ret = ge::InsertNewOpUtil::Instance().Parse(options_.insert_op_file.c_str()); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIMIZE_INSERT_OP_PARSE_FAILED, "TryDoAipp: parse config file %s failed", + options_.insert_op_file.c_str()); + return GE_GRAPH_OPTIMIZE_INSERT_OP_PARSE_FAILED; + } + ret = ge::InsertNewOpUtil::Instance().InsertAippOps(compute_graph_, options_.insert_op_file); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_OPTIMIZE_INSERT_DYN_OP_FAILED, "TryDoAipp: insert aipp op ret failed, ret:%u", ret); + return GE_GRAPH_OPTIMIZE_INSERT_DYN_OP_FAILED; + } + } + return SUCCESS; +} + +Status GraphPrepare::FormatAndShapeProcess() { + Status ret = ResourcePairProcess("add"); + if (ret != SUCCESS) { + GELOGE(ret, "ResourcePairProcess failed"); + return ret; + } + + GE_TIMESTAMP_START(InferOriginFormat1); + ret = compute_graph_->InferOriginFormat(); + GE_TIMESTAMP_END(InferOriginFormat1, "GraphPrepare::InferOriginFormat1"); + if (ret != SUCCESS) { + GELOGE(ret, "Prepare Graph first inferformat failed"); + return ret; + } + GraphUtils::DumpGEGraph(compute_graph_, "after_first_inferformat"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph_, "after_first_inferformat"); + + GE_TIMESTAMP_START(InferShapeForPreprocess); + ret = InferShapeForPreprocess(); + GE_TIMESTAMP_END(InferShapeForPreprocess, "GraphPrepare::InferShapeForPreprocess"); + GraphUtils::DumpGEGraph(compute_graph_, "after_infershape"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph_, "after_infershape"); + if (ret != SUCCESS) { + GELOGE(GE_GRAPH_INFERSHAPE_FAILED, "Prepare Graph infershape failed"); + return GE_GRAPH_INFERSHAPE_FAILED; + } + + GE_TIMESTAMP_START(InferOriginFormat2); + ret = compute_graph_->InferOriginFormat(); + GE_TIMESTAMP_END(InferOriginFormat2, "GraphPrepare::InferOriginFormat2"); + if (ret != SUCCESS) { + GELOGE(ret, "Prepare Graph inferformat failed"); + return ret; + } + + ret = ResourcePairProcess("remove"); + if (ret != SUCCESS) { + return ret; + } + return ret; +} + +Status GraphPrepare::ResourcePairProcess(const std::string &action) { + PassManager control_pass; + // Graph pass tmp logic for resource infershape + if (options_.train_graph_flag) { + try { + if (action == "add") { + (void)control_pass.AddPass(new ResourcePairAddControlPass); + } else { + (void)control_pass.AddPass(new ResourcePairRemoveControlPass); + } + } catch (std::bad_alloc &e) { + GELOGE(INTERNAL_ERROR, "Add pass failed, bad memory allocation occur, action:%s.", action.c_str()); + return INTERNAL_ERROR; + } + } + Status ret = control_pass.Run(compute_graph_); + if (ret != SUCCESS && ret != NOT_CHANGED) { + GELOGE(ret, "Run ResourcePairControlPass failed, action:%s, ret:%u.", action.c_str(), ret); + return ret; + } + return SUCCESS; +} + +Status GraphPrepare::OptimizeForDataAfterInfershape() { + for (auto node_ptr : compute_graph_->GetAllNodes()) { + GE_CHECK_NOTNULL(node_ptr); + if (node_ptr->GetType() == AIPP) { + if (SetDataNodeByAipp(node_ptr) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Set data node by aipp failed"); + return FAILED; + } + } + if (node_ptr->GetType() != DATA) { + continue; + } + bool set_fp16 = false; + if (!ge::AttrUtils::GetBool(node_ptr->GetOpDesc(), "input_fp16", set_fp16) || !set_fp16) { + continue; + } + GELOGI("input_node_set_fp16 is found, the name is %s", node_ptr->GetName().c_str()); + if (ProcessInputFP16(node_ptr) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "ProcessInputFP16 failed"); + return FAILED; + } + // check if need to set format + bool set_format = false; + if (!ge::AttrUtils::GetBool(node_ptr->GetOpDesc(), "input_set_nc1hwc0", set_format) || !set_format) { + continue; + } + GELOGI("Find a node [%s] should set NC1HWC0", node_ptr->GetName().c_str()); + if (ProcessInputNC1HWC0(node_ptr) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "ProcessInputNC1HWC0 failed"); + return FAILED; + } + } + return SUCCESS; +} + +void GraphPrepare::ProcessCCEFormat() { + static const char *const parser_priority = std::getenv("PARSER_PRIORITY"); + static const bool keep_cce = parser_priority != nullptr && string(parser_priority) == "cce"; + if (keep_cce) { + GELOGI("keep cce priority"); + for (const ge::NodePtr &n : compute_graph_->GetDirectNode()) { + auto node_op_desc = n->GetOpDesc(); + GE_IF_BOOL_EXEC(node_op_desc == nullptr, continue); + if (node_op_desc->GetType() == MULTIPLY || node_op_desc->GetType() == ASSIGN) { + auto input_size = static_cast(node_op_desc->GetInputsSize()); + for (uint32_t i = 0; i < input_size; ++i) { + ge::GeTensorDesc org_tensor_input = node_op_desc->GetInputDesc(i); + GELOGD("keep cce name:%s, type:%s", node_op_desc->GetName().c_str(), node_op_desc->GetType().c_str()); + if (org_tensor_input.GetFormat() == FORMAT_ND) { + org_tensor_input.SetFormat(FORMAT_NCHW); + org_tensor_input.SetOriginFormat(FORMAT_NCHW); + (void)node_op_desc->UpdateInputDesc(i, org_tensor_input); + } + } + auto output_size = static_cast(node_op_desc->GetOutputsSize()); + for (uint32_t i = 0; i < output_size; ++i) { + ge::GeTensorDesc org_tensor_output = node_op_desc->GetOutputDesc(i); + GELOGD("keep cce name:%s, type:%s", node_op_desc->GetName().c_str(), node_op_desc->GetType().c_str()); + if (org_tensor_output.GetFormat() == FORMAT_ND) { + org_tensor_output.SetFormat(FORMAT_NCHW); + org_tensor_output.SetOriginFormat(FORMAT_NCHW); + (void)node_op_desc->UpdateOutputDesc(i, org_tensor_output); + } + } + } + } + } +} + +Status GraphPrepare::OptimizeBeforeInfershape() { + PassManager graph_passes_before_infershape; + // Graph pass + try { + if (options_.train_graph_flag) { + (void)graph_passes_before_infershape.AddPass(new SavePass); + } + (void)graph_passes_before_infershape.AddPass(new NetOutputPass); + } catch (std::bad_alloc &e) { + GELOGE(INTERNAL_ERROR, "Add pass failed, bad memory allocation occurs."); + return INTERNAL_ERROR; + } + GE_TIMESTAMP_START(graph_passes_before_infershape); + Status ret = graph_passes_before_infershape.Run(compute_graph_); + GE_TIMESTAMP_END(graph_passes_before_infershape, "GraphPrepare::BeforeInfershape"); + bool status = (ret != SUCCESS && ret != NOT_CHANGED); + if (status) { + GELOGE(ret, "Run graph_passes_before_infershape failed, ret:%u.", ret); + return ret; + } + + graphStatus ret_topo = compute_graph_->TopologicalSorting(); + if (ret_topo != GRAPH_SUCCESS) { + GELOGE(ret_topo, "Graph topological sort failed, ret:%u.", ret_topo); + return ret_topo; + } + return SUCCESS; +} + +void GraphPrepare::SaveOriginalGraphToOmModel() { + if (options_.save_original_model) { + ModelHelper model_helper; + Status ret = model_helper.SaveOriginalGraphToOmModel(ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph_), + options_.original_model_file); + if (ret != SUCCESS) { + // If save original model fail, process continue + GELOGW("SaveOriginalGraphToOmModel fail"); + } + } +} + +Status GraphPrepare::Preprocess(const std::vector &user_input) { + // rtContext_t... + Status ret = SetRtContext(rtContext_t(), RT_CTX_GEN_MODE); + if (ret != SUCCESS) { + GELOGE(ret, "Set rt context failed."); + return ret; + } + + ret = CheckUserInput(user_input); + if (ret != SUCCESS) { + GELOGE(ret, "Check user input failed."); + return ret; + } + + compute_graph_->SetInputSize(user_input.size()); + + ret = UpdateInput(user_input); + if (ret != SUCCESS) { + GELOGE(ret, "UpdateInput fail, ret:%u", ret); + return ret; + } + GraphUtils::DumpGEGraph(compute_graph_, "after_update_input"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph_, "after_update_input"); + if (user_input.size() != 0) { + ret = CheckConstOp(); + if (ret != SUCCESS) { + GELOGE(ret, "CheckConstOp fail, ret:%u", ret); + return ret; + } + } else { + ret = compute_graph_->TopologicalSorting(); + if (ret != SUCCESS) { + GELOGE(ret, "graph prepare error: compute_graph_->Topological Sorting"); + return FAILED; + } + } + ret = TryDoAipp(); + if (ret != SUCCESS) { + return ret; + } + + ret = OptimizeBeforeInfershape(); + if (ret != SUCCESS) { + GELOGE(ret, "OptimizeBeforeInfershape failed."); + return ret; + } + GE_TIMESTAMP_START(FormatAndShapeProcess); + ret = FormatAndShapeProcess(); + GE_TIMESTAMP_END(FormatAndShapeProcess, "GraphPrepare::FormatAndShapeProcess"); + if (ret != SUCCESS) { + GELOGE(ret, "FormatAndShape process failed"); + return ret; + } + GraphUtils::DumpGEGraph(compute_graph_, "after_inferformat_before_preprocess"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph_, "after_inferformat_before_preprocess"); + + ProcessCCEFormat(); + + ret = OptimizeForDataAfterInfershape(); + if (ret != SUCCESS) { + GELOGE(ret, "Optimize for input if set inputfp16 failed."); + return ret; + } + + SaveOriginalGraphToOmModel(); + + GE_TIMESTAMP_START(OptimizeForPreprocess); + ret = OptimizeForPreprocess(); + GE_TIMESTAMP_END(OptimizeForPreprocess, "GraphPrepare::OptimizeForPreprocess"); + if (ret != SUCCESS) { + GELOGE(ret, "Optimize for preprocess failed."); + return ret; + } + GELOGI("Optimize for preprocess success."); + + GE_TIMESTAMP_START(UpdateVariableFormats); + ret = UpdateVariableFormats(compute_graph_); + GE_TIMESTAMP_END(UpdateVariableFormats, "GraphPrepare::UpdateVariableFormats"); + if (ret != SUCCESS) { + GELOGE(ret, "Failed to update variables formats"); + return ret; + } + GELOGI("Update variable formats success."); + + GraphUtils::DumpGEGraph(compute_graph_, "Optimize_after_preprocess"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph_, "Optimize_after_preprocess"); + return SUCCESS; +} + +Status GraphPrepare::Prepare(ConstGraphPtr graph, const std::vector &user_input, + ge::ComputeGraphPtr &compute_graph, uint64_t session_id) { + // train graph flag + if (options_.train_graph_flag) { + domi::GetContext().train_flag = true; + } + domi::GetContext().type = static_cast(options_.framework_type); + + if (graph == nullptr) { + GELOGE(GE_GRAPH_NULL_INPUT, "Input Graph is NULL"); + return GE_GRAPH_NULL_INPUT; + } + const Graph &const_graph = *graph; + Status ret = Init(const_graph, session_id); + if (ret != SUCCESS) { + GELOGE(ret, "Init graph_prepare fail, ret:%u", ret); + return ret; + } + + GraphUtils::DumpGEGraph(compute_graph_, "BeforePreprocess"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph_, "BeforePreprocess"); + + GE_TIMESTAMP_START(Preprocess); + ret = Preprocess(user_input); + GE_TIMESTAMP_END(Preprocess, "GraphPrepare::Preprocess"); + if (ret != SUCCESS) { + GELOGE(ret, "Run graph_prepare fail, ret:%u", ret); + return ret; + } + // OriginalGraph optimize + GraphOptimize graph_optimize; + ret = graph_optimize.SetOptions(options_); + GE_CHK_STATUS_RET(ret, "Graph optimize initial fail"); + if (options_.local_fmk_op_flag) { + graph_optimize.TranFrameOp(compute_graph_); + } + + GraphUtils::DumpGEGraph(compute_graph_, "Prepare"); + GraphUtils::DumpGEGraphToOnnx(*compute_graph_, "Prepare"); + + if (!domi::GetContext().train_flag) { + GE_TIMESTAMP_START(OptimizeOriginalGraphForQuantize); + ret = graph_optimize.OptimizeOriginalGraphForQuantize(compute_graph_); + GE_TIMESTAMP_END(OptimizeOriginalGraphForQuantize, "GraphPrepare::OptimizeOriginalGraphForQuantize"); + if (ret != SUCCESS) { + GELOGE(ret, "originalGraph optimize for Quantize Failed"); + return ret; + } + } + GE_TIMESTAMP_START(OptimizeOriginalGraph); + ret = graph_optimize.OptimizeOriginalGraph(compute_graph_); + GE_TIMESTAMP_END(OptimizeOriginalGraph, "GraphPrepare::OptimizeOriginalGraph"); + if (ret != SUCCESS) { + GELOGE(ret, "originalGraph optimize Failed"); + return ret; + } + + compute_graph = compute_graph_; + return SUCCESS; +} + +Status GraphPrepare::CheckConstOp() { + for (auto &node_ptr : compute_graph_->GetAllNodes()) { + GE_CHECK_NOTNULL(node_ptr); + if (node_ptr->GetType() == CONSTANT) { + Status ret = VerifyConstOp(node_ptr); + GE_CHK_BOOL_RET_STATUS(ret == SUCCESS, ret, "Const Op Check failed"); + } else if (node_ptr->GetType() == FRAMEWORKOP) { + auto op_desc = node_ptr->GetOpDesc(); + if (op_desc == nullptr) { + GELOGE(PARAM_INVALID, "Get op desc failed"); + return PARAM_INVALID; + } + std::string original_type; + GE_IF_BOOL_EXEC(ge::AttrUtils::GetStr(op_desc, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, original_type), + GELOGI("Get FrameWorkOp original type [%s]", original_type.c_str())); + GELOGI("original type is %s", original_type.c_str()); + if (original_type == CONSTANT) { + Status ret = VerifyConstOp(node_ptr); + GE_CHK_BOOL_RET_STATUS(ret == SUCCESS, ret, "Const Op Check failed"); + } + } + } + return SUCCESS; +} +Status GraphPrepare::VerifyConstOp(const NodePtr &node) { + GE_CHECK_NOTNULL(node); + auto op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + ConstGeTensorPtr ge_tensor_ptr; + if (!(AttrUtils::GetTensor(op_desc, ATTR_NAME_WEIGHTS, ge_tensor_ptr))) { + GELOGE(PARAM_INVALID, "Get value from const attr failed"); + return PARAM_INVALID; + } + GE_CHECK_NOTNULL(ge_tensor_ptr); + auto data_size = ge_tensor_ptr->GetData().GetSize(); + auto ge_tensor_desc = ge_tensor_ptr->GetTensorDesc(); + int64_t shape_size = ge_tensor_desc.GetShape().GetShapeSize(); + auto data_type = ge_tensor_desc.GetDataType(); + uint32_t length = 1; + bool type_ret = TypeUtils::GetDataTypeLength(data_type, length); + if (!type_ret) { + GELOGE(PARAM_INVALID, "Input datatype %s is not support.", TypeUtils::DataTypeToSerialString(data_type).c_str()); + return FAILED; + } + FMK_INT64_UINT32_MULCHECK(shape_size, length); + GELOGI("Const real value Size:%zu, op_desc Shape Size:%ld, data_type:%s.", data_size, shape_size * length, + TypeUtils::DataTypeToSerialString(data_type).c_str()); + if ((shape_size != 0) || (data_size / length != 1)) { + GE_CHK_BOOL_EXEC(data_size == static_cast(shape_size * length) && data_size != 0, + return GRAPH_PARAM_INVALID, "Const input data size is not equal with tensor desc shape"); + } + return SUCCESS; +} + +Status GraphPrepare::CheckUserInput(const std::vector &user_input) { + if (user_input.empty()) { + return SUCCESS; + } + unsigned int node_num = 0; + unsigned int data_num = 0; + for (NodePtr &input_node : compute_graph_->GetAllNodes()) { + GE_CHECK_NOTNULL(input_node); + OpDescPtr op = input_node->GetOpDesc(); + GE_CHECK_NOTNULL(op); + node_num++; + if (op->GetType() == DATA || op->GetType() == AIPPDATA) { + data_num++; + GeAttrValue::INT index = 0; + if (!(AttrUtils::GetInt(op, ATTR_NAME_INDEX, index))) { + GELOGE(GE_GRAPH_INIT_FAILED, "Get index from attr failed"); + return GE_GRAPH_INIT_FAILED; + } + if ((index < 0) || (static_cast(index) >= user_input.size())) { + GELOGE(GE_GRAPH_INIT_FAILED, "user_input size:%zu, data op index:%ld.", user_input.size(), index); + return GE_GRAPH_INIT_FAILED; + } + GeTensorDesc desc(user_input[index].GetTensorDesc()); + + for (size_t i = 0; i < desc.GetShape().GetDimNum(); ++i) { + if (desc.GetShape().GetDim(i) <= 0) { + GELOGE(GE_GRAPH_INIT_FAILED, "data dim %zu is not supported, need > 0, real:%ld.", i, + desc.GetShape().GetDim(i)); + return GE_GRAPH_INIT_FAILED; + } + } + } + } + if (node_num <= data_num) { + GELOGW("Prepare check user input, data_num = %u, node_num = %u", data_num, node_num); + } + return SUCCESS; +} + +Status GraphPrepare::InferShapeForPreprocess() { + GELOGI("Start infershape for preprocess."); + GEPass ge_passes(compute_graph_); + NamesToPass names_to_passes; + AssertPass assert_pass; + if (!options_.train_graph_flag) { + names_to_passes.emplace_back("AssertPass", &assert_pass); + } + InferShapePass infer_shape_pass; + names_to_passes.emplace_back("InferShapePass", &infer_shape_pass); + DimensionComputePass dimension_compute_pass; + names_to_passes.emplace_back("DimensionComputePass", &dimension_compute_pass); + ConstantFoldingPass constant_folding_pass; + names_to_passes.emplace_back("ConstantFoldingPass", &constant_folding_pass); + + int32_t dev_count = 0; + AicpuConstantFoldingPass aicpu_constant_folding_pass; + const char *aicpu_constant_folding_on = std::getenv("AICPU_CONSTANT_FOLDING_ON"); + rtError_t rt_err = RT_ERROR_NONE; + if (aicpu_constant_folding_on != nullptr) { + rt_err = rtGetDeviceCount(&dev_count); + if (rt_err == RT_ERROR_NONE) { + Status result = SetRtContext(rtContext_t(), RT_CTX_NORMAL_MODE); + if (result != SUCCESS) { + GELOGE(result, "Set rt context failed."); + return result; + } + names_to_passes.emplace_back("AicpuConstantFoldingPass", &aicpu_constant_folding_pass); + } + } + Status ret = ge_passes.Run(names_to_passes); + if (aicpu_constant_folding_on != nullptr) { + if (rt_err == RT_ERROR_NONE) { + Status result = SetRtContext(rtContext_t(), RT_CTX_GEN_MODE); + if (result != SUCCESS) { + GELOGE(result, "Set rt context failed."); + return result; + } + } + } + if (ret != SUCCESS) { + GELOGE(ret, "Run ge_passes infershape for preprocess failed, ret:%u.", ret); + return ret; + } + return SUCCESS; +} + +Status GraphPrepare::OptimizeForPreprocess() { + GELOGI("Start optimize for preprocess."); + + PassManager original_graph_passes; + // Graph pass + try { + (void)original_graph_passes.AddPass(new ConstantFuseSamePass); + (void)original_graph_passes.AddPass(new VariablePrepareOpPass); + (void)original_graph_passes.AddPass(new IteratorOpPass); + (void)original_graph_passes.AddPass(new ShapeOperateOpRemovePass); + } catch (std::bad_alloc &e) { + GELOGE(INTERNAL_ERROR, "Add pass failed, bad memory allocation occurs."); + return INTERNAL_ERROR; + } + + GE_TIMESTAMP_START(original_graph_passes); + Status ret = original_graph_passes.Run(compute_graph_); + GE_TIMESTAMP_END(original_graph_passes, "GraphPrepare::OriginalGraphPasses"); + if (ret != SUCCESS && ret != NOT_CHANGED) { + GELOGE(ret, "Run graph passes optimize for preprocess failed, ret:%u.", ret); + return ret; + } + // New pass + GEPass ge_passes(compute_graph_); + NamesToPass names_to_passes; + EnterPass enter_pass; + names_to_passes.emplace_back("EnterPass", &enter_pass); + AddNPass addn_pass; + names_to_passes.emplace_back("AddNPass", &addn_pass); + PrintOpPass print_pass; + names_to_passes.emplace_back("PrintOpPass", &print_pass); + NoUseReshapeRemovePass no_use_reshape_remove_pass; + names_to_passes.emplace_back("NoUseReshapeRemovePass", &no_use_reshape_remove_pass); + + // for infer + DropOutPass dropout_pass; + AssertPass assert_pass; + ReUpdateNetOutputPass re_update_net_output_pass; + if (!options_.train_graph_flag) { + names_to_passes.emplace_back("ReUpdateNetOutputPass", &re_update_net_output_pass); + names_to_passes.emplace_back("DropOutPass", &dropout_pass); + names_to_passes.emplace_back("AssertPass", &assert_pass); + } + UnusedConstPass unused_const_pass; + names_to_passes.emplace_back("UnusedConstPass", &unused_const_pass); + StopGradientPass stop_gradient_pass; + names_to_passes.emplace_back("StopGradientPass", &stop_gradient_pass); + PreventGradientPass prevent_gradient_pass; + names_to_passes.emplace_back("PreventGradientPass", &prevent_gradient_pass); + PlaceholderWithDefaultPass placeholder_with_default_pass; + names_to_passes.emplace_back("PlaceholderWithDefaultPass", &placeholder_with_default_pass); + SnapshotPass snapshot_pass; + names_to_passes.emplace_back("SnapshotPass", &snapshot_pass); + GuaranteeConstPass guarantee_const_pass; + names_to_passes.emplace_back("GuaranteeConstPass", &guarantee_const_pass); + VarIsInitializedOpPass var_is_initialized_pass; + names_to_passes.emplace_back("VarIsInitializedOpPass", &var_is_initialized_pass); + IdentityPass identity_pass(false); + names_to_passes.emplace_back("IdentityPass", &identity_pass); + SwitchPass switch_pass; + names_to_passes.emplace_back("SwitchPass", &switch_pass); + SwitchLogicRemovePass switch_logic_remove_pass; + names_to_passes.emplace_back("SwitchLogicRemovePass", &switch_logic_remove_pass); + MergePass merge_pass; + names_to_passes.emplace_back("MergePass", &merge_pass); + GE_TIMESTAMP_START(names_to_passes); + ret = ge_passes.Run(names_to_passes); + GE_TIMESTAMP_END(names_to_passes, "GraphPrepare::NamesToPasses"); + if (ret != SUCCESS) { + GELOGE(ret, "Run ge_passes optimize for preprocess failed, ret:%u.", ret); + return ret; + } + + PassManager graph_pass; + try { + (void)graph_pass.AddPass(new PrunePass); + (void)graph_pass.AddPass(new NextIterationPass); + (void)graph_pass.AddPass(new ControlTriggerPass); + (void)graph_pass.AddPass(new SwitchOpPass); + (void)graph_pass.AddPass(new HcclMemcpyPass); + (void)graph_pass.AddPass(new FlowCtrlPass); + (void)graph_pass.AddPass(new EndGraphPass); + } catch (std::bad_alloc &e) { + GELOGE(INTERNAL_ERROR, "Add pass failed, bad memory allocation occurs."); + return INTERNAL_ERROR; + } + + ret = graph_pass.Run(compute_graph_); + if (ret != SUCCESS && ret != NOT_CHANGED) { + GELOGE(ret, "Run graph passes optimize for preprocess failed, ret:%u.", ret); + return ret; + } + + NamesToPass identity_remove_pass; + GE_TIMESTAMP_START(identity_remove_pass); + IdentityPass identity_force_pass(true); // after SwitchOpPass + identity_remove_pass.emplace_back("IdentityPass", &identity_force_pass); + ret = ge_passes.Run(identity_remove_pass); + GE_TIMESTAMP_END(identity_remove_pass, "GraphPrepare::IdentityRemovePass"); + if (ret != SUCCESS) { + GELOGE(ret, "Run identity remove pass for preprocess failed, ret:%u.", ret); + return ret; + } + // The constant for train is CONSTANTOP, and is CONSTANT for inference. They will be unified in future. + if (options_.train_graph_flag) { + for (ge::NodePtr &n : compute_graph_->GetDirectNode()) { + // This can ensure that n is not a null pointer + if (n->GetOpDesc()->GetType() == CONSTANT) { + n->GetOpDesc()->SetType(CONSTANTOP); + } + } + } + + ret = compute_graph_->TopologicalSorting(); + if (ret != SUCCESS) { + GELOGE(ret, "Graph topological sort failed, ret:%u.", ret); + return ret; + } + + GELOGI("End optimize for preprocess."); + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/preprocess/graph_preprocess.h b/src/ge/graph/preprocess/graph_preprocess.h new file mode 100644 index 00000000..00a3f22b --- /dev/null +++ b/src/ge/graph/preprocess/graph_preprocess.h @@ -0,0 +1,77 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PREPROCESS_GRAPH_PREPROCESS_H_ +#define GE_GRAPH_PREPROCESS_GRAPH_PREPROCESS_H_ + +#include +#include +#include +#include +#include + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/model_parser/base.h" +#include "common/properties_manager.h" +#include "common/string_util.h" +#include "common/types.h" +#include "common/util.h" +#include "graph/compute_graph.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/model.h" +#include "graph/node.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/tensor_utils.h" +#include "omg/omg_inner_types.h" +#include "runtime/context.h" + +namespace ge { +class GraphPrepare { + public: + GraphPrepare(); + virtual ~GraphPrepare(); + GraphPrepare(const GraphPrepare &in) = delete; + GraphPrepare &operator=(const GraphPrepare &in) = delete; + Status Prepare(ConstGraphPtr graph, const std::vector &user_input, + ge::ComputeGraphPtr &compute_graph, uint64_t session_id = 0); + void SetOptions(const GraphManagerOptions &options); + + private: + Status Init(const ge::Graph &graph, uint64_t session_id = 0); + Status Preprocess(const std::vector &user_input); + Status CheckGraph(); + Status SetRtContext(rtContext_t rt_context, rtCtxMode_t mode); + Status AdjustDataOpOutput(const NodePtr &node); + Status UpdateInput(const std::vector &user_input); + Status CheckConstOp(); + Status VerifyConstOp(const NodePtr &node); + Status CheckUserInput(const std::vector &user_input); + Status OptimizeForPreprocess(); + Status InferShapeForPreprocess(); + Status TryDoAipp(); + Status OptimizeForDataAfterInfershape(); + Status UpdateVariableFormats(ComputeGraphPtr &graph); + Status FormatAndShapeProcess(); + Status ResourcePairProcess(const std::string& action); + void ProcessCCEFormat(); + Status OptimizeBeforeInfershape(); + void SaveOriginalGraphToOmModel(); + ge::ComputeGraphPtr compute_graph_; + GraphManagerOptions options_; +}; +} // namespace ge +#endif // GE_GRAPH_PREPROCESS_GRAPH_PREPROCESS_H_ diff --git a/src/ge/graph/preprocess/insert_op/base_insert_op.cc b/src/ge/graph/preprocess/insert_op/base_insert_op.cc new file mode 100644 index 00000000..db8403b2 --- /dev/null +++ b/src/ge/graph/preprocess/insert_op/base_insert_op.cc @@ -0,0 +1,224 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/preprocess/insert_op/base_insert_op.h" + +#include +#include + +#include "common/ge/ge_util.h" +#include "common/math/math_util.h" +#include "common/op/attr_value_util.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "common/util.h" +#include "external/graph/operator.h" +#include "external/graph/operator_factory.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/op_desc.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" + +namespace ge { +static const char *const kAippConfigPath = "aipp_config_route"; +static const uint32_t kImageRatioYuv420SpU8Mul = 3; +static const uint32_t kImageRatioYuv420SpU8Div = 2; +static const uint32_t kImageRatioXrgb8888U8 = 4; +static const uint32_t kImageRatioRgb888U8 = 3; + +Status InsertOpBase::InsertAippToGraph(ComputeGraphPtr &graph, std::string &aipp_config_path, + ge::NodePtr &inserted_aipp_node) { + GE_CHECK_NOTNULL(graph); + NodePtr target_input = nullptr; + std::vector> target_edges; + GE_CHK_STATUS_RET(this->GetTargetPosition(graph, target_input, target_edges), "Get data nodes position failed"); + OpDescPtr op_desc = ge::MakeShared("", ""); + if (op_desc == nullptr) { + return FAILED; + } + GE_CHK_STATUS_RET(this->GenerateOpDesc(op_desc), "Generate aipp node opdesc failed"); + ge::GeAttrValue::NamedAttrs aipp_attr; + GE_IF_BOOL_EXEC(!AttrUtils::GetNamedAttrs(op_desc, ATTR_NAME_AIPP, aipp_attr), + GELOGW("InsertAippToGraph: GetNamedAttrs failed"); + return FAILED) + + auto opdesc_src_data = target_input->GetOpDesc()->GetOutputDesc(0); + if (opdesc_src_data.GetDataType() != DT_FLOAT) { + GELOGW("The datatype of data node %s is not FP32", target_input->GetName().c_str()); + opdesc_src_data.SetDataType(DT_FLOAT); + } + + static uint32_t op_idx = 0; + // Does not involve multithreading. + std::string current_name = std::string("aipp_node").append(std::to_string(op_idx++)); + auto aipp_op = ge::OperatorFactory::CreateOperator(current_name, "Aipp"); + GE_CHK_BOOL_RET_STATUS(!aipp_op.IsEmpty(), PARAM_INVALID, "Aipp is not registered"); + auto aipp_opdesc_ptr = ge::OpDescUtils::GetOpDescFromOperator(aipp_op); + GE_CHECK_NOTNULL(aipp_opdesc_ptr); + GE_IF_BOOL_EXEC(!AttrUtils::SetNamedAttrs(aipp_opdesc_ptr, ATTR_NAME_AIPP, aipp_attr), + GELOGE(FAILED, "SetNameAttrs failed"); + return FAILED;) + + unique_ptr aipp_params(new (std::nothrow) domi::AippOpParams()); + GE_CHECK_NOTNULL(aipp_params); + GE_CHK_STATUS_RET(ge::OpUtils::ConvertAippParams(aipp_attr, aipp_params.get()), "Get aipp params failed") + GE_CHK_STATUS_RET(aipp_opdesc_ptr->UpdateInputDesc(0, opdesc_src_data)) + + if (aipp_params->aipp_mode() == domi::AippOpParams::dynamic) { + Status ret = aipp_opdesc_ptr->UpdateInputDesc(1, opdesc_src_data); + if (ret != SUCCESS) { + return FAILED; + } + } + GE_IF_BOOL_EXEC(!AttrUtils::SetStr(aipp_opdesc_ptr, kAippConfigPath, aipp_config_path), + GELOGW("SetStr kAippConfigPath failed");) + GELOGI("Aipp config path is %s", aipp_config_path.c_str()); + + // for data dump + GE_IF_BOOL_EXEC(!AttrUtils::SetListStr(aipp_opdesc_ptr, ATTR_NAME_DATA_DUMP_ORIGIN_OP_NAMES, + std::move(std::vector())), + GELOGW("InsertAippToGraph: SetListStr failed");) + + NodePtr insert_op = graph->AddNode(aipp_opdesc_ptr); + GE_CHECK_NOTNULL(insert_op); + OutDataAnchorPtr target_input_out = target_input->GetOutDataAnchor(0); + GE_CHECK_NOTNULL(target_input_out); + InDataAnchorPtr insert_op_in = insert_op->GetInDataAnchor(0); + GE_CHECK_NOTNULL(insert_op_in); + OutDataAnchorPtr insert_op_out = insert_op->GetOutDataAnchor(0); + GE_CHECK_NOTNULL(insert_op_out); + + inserted_aipp_node = insert_op; + if (target_edges.size() == 1) { + OutDataAnchorPtr src_out = target_edges[0].first; + InDataAnchorPtr dst_in = target_edges[0].second; + GE_CHK_STATUS_RET(GraphUtils::InsertNodeBetweenDataAnchors(src_out, dst_in, insert_op)) + return SUCCESS; + } + for (auto &edge : target_edges) { + OutDataAnchorPtr src_out = edge.first; + GE_CHECK_NOTNULL(src_out); + InDataAnchorPtr dst_in = edge.second; + GE_CHK_STATUS_RET(src_out->Unlink(dst_in), "Unlink the anchor failed"); + GE_CHK_STATUS_RET(insert_op_out->LinkTo(dst_in), "Link the anchor failed"); + } + GE_CHK_STATUS_RET(target_input_out->LinkTo(insert_op_in), "Link the anchor failed"); + return SUCCESS; +} + +uint32_t InsertOpBase::AdjustDataSize(const GeTensorDesc &input_desc, unique_ptr &aipp_params) { + GE_CHECK_NOTNULL(aipp_params); + if (aipp_params->aipp_mode() == domi::AippOpParams::static_) { + uint32_t size = input_desc.GetShape().GetDim(NCHW_DIM_N); + const uint32_t h = (input_desc.GetFormat() == ge::FORMAT_NHWC) ? NHWC_DIM_H : NCHW_DIM_H; + const uint32_t w = (input_desc.GetFormat() == ge::FORMAT_NHWC) ? NHWC_DIM_W : NCHW_DIM_W; + const uint32_t shape_h = + aipp_params->src_image_size_h() ? aipp_params->src_image_size_h() : input_desc.GetShape().GetDim(h); + FMK_UINT32_MULCHECK(size, shape_h); + size *= shape_h; + const uint32_t shape_w = + aipp_params->src_image_size_w() ? aipp_params->src_image_size_w() : input_desc.GetShape().GetDim(w); + FMK_UINT32_MULCHECK(size, shape_w); + size *= shape_w; + if (aipp_params->input_format() == domi::AippOpParams::YUV420SP_U8) { + FMK_UINT32_MULCHECK((size / kImageRatioYuv420SpU8Div), kImageRatioYuv420SpU8Mul); + size = size / kImageRatioYuv420SpU8Div * kImageRatioYuv420SpU8Mul; // avoid use float + } else if (aipp_params->input_format() == domi::AippOpParams::XRGB8888_U8) { + FMK_UINT32_MULCHECK(size, kImageRatioXrgb8888U8); + size *= kImageRatioXrgb8888U8; + } else if (aipp_params->input_format() == domi::AippOpParams::RGB888_U8) { + FMK_UINT32_MULCHECK(size, kImageRatioRgb888U8); + size *= kImageRatioRgb888U8; + } + return size; + } else { + return aipp_params->max_src_image_size(); + } +} + +Status InsertOpBase::InsertOpToGraph(ComputeGraphPtr graph) { + GE_CHECK_NOTNULL(graph); + NodePtr target_input = nullptr; + std::vector> target_edges; + GE_CHK_STATUS_RET(this->GetTargetPosition(graph, target_input, target_edges), "Get nodes position failed"); + + // insertOp + OpDescPtr op_desc = MakeShared("", ""); + if (op_desc == nullptr) { + return FAILED; + } + GE_CHK_STATUS_RET(this->GenerateOpDesc(op_desc), "Generate aipp node failed"); + NodePtr insert_op = graph->AddNode(op_desc); + GE_CHECK_NOTNULL(insert_op); + OutDataAnchorPtr target_input_out = target_input->GetOutDataAnchor(0); + GE_CHECK_NOTNULL(target_input_out); + InDataAnchorPtr insert_op_in = insert_op->GetInDataAnchor(0); + GE_CHECK_NOTNULL(insert_op_in); + OutDataAnchorPtr insert_op_out = insert_op->GetOutDataAnchor(0); + GE_CHECK_NOTNULL(insert_op_out); + + if (target_edges.size() == 1) { + OutDataAnchorPtr src_out = target_edges[0].first; + InDataAnchorPtr dst_in = target_edges[0].second; + GE_CHK_STATUS_RET(GraphUtils::InsertNodeBetweenDataAnchors(src_out, dst_in, insert_op), + "InsertNodeBetweenDataAnchors failed"); + + return SUCCESS; + } + + for (auto &edge : target_edges) { + OutDataAnchorPtr src_out = edge.first; + GE_CHECK_NOTNULL(src_out); + InDataAnchorPtr dst_in = edge.second; + + GE_CHK_STATUS_RET(src_out->Unlink(dst_in), "Unlink the anchor failed"); + + GE_CHK_STATUS_RET(insert_op_out->LinkTo(dst_in), "Link the anchor failed"); + } + + GE_CHK_STATUS_RET(target_input_out->LinkTo(insert_op_in), "Link the anchor failed"); + + return SUCCESS; +} + +Status InsertOpBase::GetInputNode(ComputeGraphPtr graph, NodePtr &target_input, uint32_t rank) { + GE_CHECK_NOTNULL(graph); + std::vector input_nodes; + + for (ge::NodePtr &node : graph->GetAllNodes()) { + GE_CHECK_NOTNULL(node); + + ge::OpDescPtr op = node->GetOpDesc(); + GE_CHECK_NOTNULL(op); + + if (op->GetType() == DATA_TYPE) { + GE_CHK_BOOL_RET_STATUS(node->GetOutDataNodes().size() > 0, FAILED, "Data node %s has no output", + node->GetName().c_str()); + input_nodes.push_back(node); + } + } + + GE_CHK_BOOL_RET_STATUS(rank < input_nodes.size(), PARAM_INVALID, + "Get intput of index %d failed, There is %zu input nodes", rank, input_nodes.size()); + + target_input = input_nodes[rank]; + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/preprocess/insert_op/base_insert_op.h b/src/ge/graph/preprocess/insert_op/base_insert_op.h new file mode 100644 index 00000000..db826ece --- /dev/null +++ b/src/ge/graph/preprocess/insert_op/base_insert_op.h @@ -0,0 +1,104 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PREPROCESS_INSERT_OP_BASE_INSERT_OP_H_ +#define GE_GRAPH_PREPROCESS_INSERT_OP_BASE_INSERT_OP_H_ + +#include +#include +#include +#include + +#include "common/fmk_error_codes.h" +#include "common/types.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/compute_graph.h" +#include "proto/om.pb.h" +#include "proto/insert_op.pb.h" + +namespace ge { +class InsertOpBase { + public: + virtual ~InsertOpBase() = default; + + /// + /// @ingroup ge_omg + /// @brief Configure the default insertop parameter + /// + virtual Status SetDefaultParams() = 0; + + /// + /// @ingroup ge_omg + /// @brief Verify the insertop parameter + /// + virtual Status ValidateParams() = 0; + + /// + /// @ingroup ge_omg + /// @brief Insert the insert_op operator into the network graph + /// @param [in] graph + /// + virtual Status InsertOpToGraph(ge::ComputeGraphPtr graph); + + /// + /// @ingroup ge_omg + /// @brief Insert aipp operator into the network graph + /// @param [in] graph + /// @param [in] aippConfigPath aipp + /// + virtual Status InsertAippToGraph(ge::ComputeGraphPtr &graph, std::string &aippConfigPath, + ge::NodePtr &inserted_aipp_node); + + /// + /// @ingroup ge_omg + /// @brief get aipp mode : static or dyanmic + /// @param [in] aipp node + /// + virtual domi::AippOpParams::AippMode GetAippMode() = 0; + + protected: + /// + /// @ingroup ge_omg + /// @brief Get the input operator in the model + /// + static Status GetInputNode(ge::ComputeGraphPtr graph, ge::NodePtr &target_input, uint32_t rank); + + /// + /// @ingroup ge_omg + /// @brief Get the size of data bases on the input + /// + uint32_t AdjustDataSize(const ge::GeTensorDesc &desc, std::unique_ptr &aipp_params); + + /// + /// @ingroup ge_omg + /// @brief Generate the insert_op operator + /// + virtual Status GenerateOpDesc(ge::OpDescPtr op_desc) = 0; + + /// + /// @ingroup ge_omg + /// @brief Get the target operator + /// @param [in] graph graph + /// @param [in|out] target_input target operator + /// @param [in|out] target_edges target edge + /// + virtual Status GetTargetPosition(ge::ComputeGraphPtr graph, ge::NodePtr &target_input, + std::vector> &target_edges) = 0; +}; +} // namespace ge + +#endif // GE_GRAPH_PREPROCESS_INSERT_OP_BASE_INSERT_OP_H_ + diff --git a/src/ge/graph/preprocess/insert_op/ge_aipp_op.cc b/src/ge/graph/preprocess/insert_op/ge_aipp_op.cc new file mode 100644 index 00000000..129a27f2 --- /dev/null +++ b/src/ge/graph/preprocess/insert_op/ge_aipp_op.cc @@ -0,0 +1,382 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/preprocess/insert_op/ge_aipp_op.h" + +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "common/util.h" +#include "graph/optimize/common/params.h" + + +#define SAVE_AIPP_ATTR(KEY, SAVE_TYPE) \ + do { \ + (void)aipp_attrs.SetAttr(#KEY, GeAttrValue::CreateFrom(aipp_params_->KEY())); \ + } while (0) + +#define SAVE_AIPP_ATTR_LIST(KEY, SAVE_TYPE) \ + do { \ + if (aipp_params_->KEY##_size() > 0) { \ + (void)aipp_attrs.SetAttr(#KEY, GeAttrValue::CreateFrom(aipp_params_->KEY(0))); \ + } \ + } while (0) + +namespace { +const int32_t kDefaultMatrixR0C0Yuv2Rgb = 298; +const int32_t kDefaultMatrixR0C1Yuv2Rgb = 0; +const int32_t kDefaultMatrixR0C2Yuv2Rgb = 409; +const int32_t kDefaultMatrixR1C0Yuv2Rgb = 298; +const int32_t kDefaultMatrixR1C1Yuv2Rgb = -100; +const int32_t kDefaultMatrixR1C2Yuv2Rgb = -208; +const int32_t kDefaultMatrixR2C0Yuv2Rgb = 298; +const int32_t kDefaultMatrixR2C1Yuv2Rgb = 516; +const int32_t kDefaultMatrixR2C2Yuv2Rgb = 0; +const int32_t kDefaultMatrixR0C0Rgb2Yuv = 66; +const int32_t kDefaultMatrixR0C1Rgb2Yuv = 129; +const int32_t kDefaultMatrixR0C2Rgb2Yuv = 25; +const int32_t kDefaultMatrixR1C0Rgb2Yuv = -38; +const int32_t kDefaultMatrixR1C1Rgb2Yuv = -74; +const int32_t kDefaultMatrixR1C2Rgb2Yuv = 112; +const int32_t kDefaultMatrixR2C0Rgb2Yuv = 112; +const int32_t kDefaultMatrixR2C1Rgb2Yuv = -94; +const int32_t kDefaultMatrixR2C2Rgb2Yuv = -18; +const int32_t kDefaultOutputBias0 = 16; +const int32_t kDefaultOutputBias1 = 128; +const int32_t kDefaultOutputBias2 = 128; +const int32_t kDefaultInputBias0 = 16; +const int32_t kDefaultInputBias1 = 128; +const int32_t kDefaultInputBias2 = 128; +const float kDefaultVarReciChn = 1.0; +} // namespace + +namespace ge { +namespace { +const std::set kInsertAippExceptOp = {SHAPE, SSDPRIORBOX}; +} + +Status AippOp::Init(domi::AippOpParams *aipp_params) { + aipp_params_ = new (std::nothrow) domi::AippOpParams(); + if (aipp_params_ == nullptr) { + return FAILED; + } + aipp_params_->CopyFrom(*aipp_params); + return SUCCESS; +} + +AippOp::~AippOp() { + if (aipp_params_ != nullptr) { + delete aipp_params_; + aipp_params_ = nullptr; + } +} + +domi::AippOpParams::AippMode AippOp::GetAippMode() { + if (aipp_params_ == nullptr) { + return domi::AippOpParams::undefined; + } + return aipp_params_->aipp_mode(); +} + +Status AippOp::GetTargetPosition(ComputeGraphPtr graph, NodePtr &target_input, + std::vector> &target_edges) { + GE_CHECK_NOTNULL(graph); + target_input = nullptr; + target_edges.clear(); + + GE_CHECK_NOTNULL(aipp_params_); + const uint32_t related_input_rank = aipp_params_->related_input_rank(); + GE_CHK_STATUS_RET(GetInputNode(graph, target_input, related_input_rank), "get target input node failed"); + + const bool is_edge_configed = aipp_params_->input_edge_idx_size() > 0; + + GE_CHK_BOOL_RET_STATUS( + !is_edge_configed || aipp_params_->input_edge_idx(0) < target_input->GetOutDataNodes().size(), PARAM_INVALID, + "input_edge_idx %u should smaller than out edge size of target input %zu ", aipp_params_->input_edge_idx(0), + target_input->GetOutDataNodes().size()); + + uint32_t i = 0; + for (OutDataAnchorPtr &src_out : target_input->GetAllOutDataAnchors()) { + GE_RETURN_WITH_LOG_IF_FALSE(src_out != nullptr, "OutDataAnchor is null."); + auto vistor = src_out->GetPeerInDataAnchors(); + for (auto it = vistor.begin(); it != vistor.end(); ++it, ++i) { + InDataAnchorPtr dst_in = *it; + GE_RETURN_WITH_LOG_IF_FALSE(dst_in != nullptr, "InDataAnchor is null."); + + if ((is_edge_configed && i == aipp_params_->input_edge_idx(0)) || !is_edge_configed) { + NodePtr dst_node = dst_in->GetOwnerNode(); + OpDescPtr dst_op = dst_node->GetOpDesc(); + if (kInsertAippExceptOp.find(dst_op->GetType()) == kInsertAippExceptOp.end()) { + target_edges.push_back(make_pair(src_out, dst_in)); + continue; + } + + GE_CHK_BOOL_RET_STATUS(!is_edge_configed, PARAM_INVALID, "index %d of input node is %s node, can not do aipp", + aipp_params_->input_edge_idx(0), dst_op->GetType().c_str()); + } + } + } + + GE_CHK_BOOL_RET_STATUS(target_edges.size() > 0, FAILED, "get target edges failed"); + + return SUCCESS; +} + +Status AippOp::SetDefaultParams() { + GE_CHECK_NOTNULL(aipp_params_); + const domi::AippOpParams::AippMode aipp_mode = aipp_params_->aipp_mode(); + if (aipp_mode == domi::AippOpParams::static_) { + if (aipp_params_->csc_switch()) { + SetCscDefaultValue(); + } + + SetDtcDefaultValue(); + + GELOGI("parse aipp params:input_format:%s, csc_switch:%d.", + domi::AippOpParams::InputFormat_Name(aipp_params_->input_format()).c_str(), aipp_params_->csc_switch()); + + GELOGI("parse aipp params:mean_chn_0:%d, mean_chn_1:%d, mean_chn_2:%d.", aipp_params_->mean_chn_0(), + aipp_params_->mean_chn_1(), aipp_params_->mean_chn_2()); + + GELOGI("parse aipp params:min_chn_0:%f, min_chn_1:%f, min_chn_2:%f.", aipp_params_->min_chn_0(), + aipp_params_->min_chn_1(), aipp_params_->min_chn_2()); + + GE_IF_BOOL_EXEC(!aipp_params_->crop(), aipp_params_->set_load_start_pos_h(0); + aipp_params_->set_load_start_pos_w(0); aipp_params_->set_crop_size_h(0); + aipp_params_->set_crop_size_w(0);); + + GE_IF_BOOL_EXEC(!aipp_params_->resize(), aipp_params_->set_resize_output_h(0); + aipp_params_->set_resize_output_w(0);); + + GE_IF_BOOL_EXEC(!aipp_params_->padding(), aipp_params_->set_left_padding_size(0); + aipp_params_->set_right_padding_size(0); aipp_params_->set_top_padding_size(0); + aipp_params_->set_bottom_padding_size(0);); + } + + return SUCCESS; +} + +Status AippOp::ValidateParams() { + GE_CHECK_NOTNULL(aipp_params_); + GE_CHK_BOOL_RET_STATUS(aipp_params_->aipp_mode() != domi::AippOpParams::undefined, PARAM_INVALID, + "when insert AIPP op, aipp_mode must be configured as static or dynamic "); + + GE_CHK_BOOL_RET_STATUS(aipp_params_->var_reci_chn_0_size() <= 1, PARAM_INVALID, + "The parameter var_reci_chn_0 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->var_reci_chn_1_size() <= 1, PARAM_INVALID, + "The parameter var_reci_chn_1 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->var_reci_chn_2_size() <= 1, PARAM_INVALID, + "The parameter var_reci_chn_2 can not be configed repeatedly"); + + GE_CHK_BOOL_RET_STATUS(aipp_params_->matrix_r0c0_size() <= 1, PARAM_INVALID, + "The parameter matrix_r0c0 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->matrix_r0c1_size() <= 1, PARAM_INVALID, + "The parameter matrix_r0c1 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->matrix_r0c2_size() <= 1, PARAM_INVALID, + "The parameter matrix_r0c2 can not be configed repeatedly"); + + GE_CHK_BOOL_RET_STATUS(aipp_params_->matrix_r1c0_size() <= 1, PARAM_INVALID, + "The parameter matrix_r1c0 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->matrix_r1c1_size() <= 1, PARAM_INVALID, + "The parameter matrix_r1c1 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->matrix_r1c2_size() <= 1, PARAM_INVALID, + "The parameter matrix_r1c2 can not be configed repeatedly"); + + GE_CHK_BOOL_RET_STATUS(aipp_params_->matrix_r2c0_size() <= 1, PARAM_INVALID, + "The parameter matrix_r2c0 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->matrix_r2c1_size() <= 1, PARAM_INVALID, + "The parameter matrix_r2c1 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->matrix_r2c2_size() <= 1, PARAM_INVALID, + "The parameter matrix_r2c2 can not be configed repeatedly"); + + GE_CHK_BOOL_RET_STATUS(aipp_params_->output_bias_0_size() <= 1, PARAM_INVALID, + "The parameter output_bias_0 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->output_bias_1_size() <= 1, PARAM_INVALID, + "The parameter output_bias_1 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->output_bias_2_size() <= 1, PARAM_INVALID, + "The parameter output_bias_2 can not be configed repeatedly"); + + GE_CHK_BOOL_RET_STATUS(aipp_params_->input_bias_0_size() <= 1, PARAM_INVALID, + "The parameter input_bias_0 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->input_bias_1_size() <= 1, PARAM_INVALID, + "The parameter input_bias_1 can not be configed repeatedly"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->input_bias_2_size() <= 1, PARAM_INVALID, + "The parameter input_bias_2 can not be configed repeatedly"); + + GE_CHK_BOOL_RET_STATUS(aipp_params_->input_edge_idx_size() <= 1, PARAM_INVALID, + "The parameter input_edge_idx can not be configed repeatedly"); + + const domi::AippOpParams::AippMode aipp_mode = aipp_params_->aipp_mode(); + if (aipp_mode == domi::AippOpParams::dynamic) { + GE_CHK_BOOL_RET_STATUS(aipp_params_->max_src_image_size() > 0, PARAM_INVALID, + "for dynamic AIPP params, max_src_image_size must greater than 0"); + } else { + GE_CHK_BOOL_RET_STATUS(aipp_params_->input_format() != domi::AippOpParams::UNDEFINED, PARAM_INVALID, + "Input format of AIPP conf is undefined"); + + GE_CHK_BOOL_RET_STATUS(aipp_params_->src_image_size_w() >= 0, PARAM_INVALID, + "src_image_size_w must not be configed smaller than 0"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->src_image_size_h() >= 0, PARAM_INVALID, + "src_image_size_h must not be configed smaller than 0"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->load_start_pos_w() >= 0, PARAM_INVALID, + "load_start_pos_w must not be configed smaller than 0"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->load_start_pos_h() >= 0, PARAM_INVALID, + "load_start_pos_h must not be configed smaller than 0"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->crop_size_w() >= 0, PARAM_INVALID, + "crop_size_w must not be configed smaller than 0"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->resize_output_w() >= 0, PARAM_INVALID, + "resize_output_w must not be configed smaller than 0"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->resize_output_h() >= 0, PARAM_INVALID, + "resize_output_h must not be configed smaller than 0"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->left_padding_size() >= 0, PARAM_INVALID, + "left_padding_size must not be configed smaller than 0"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->right_padding_size() >= 0, PARAM_INVALID, + "right_padding_size must not be configed smaller than 0"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->top_padding_size() >= 0, PARAM_INVALID, + "top_padding_size must not be configed smaller than 0"); + GE_CHK_BOOL_RET_STATUS(aipp_params_->bottom_padding_size() >= 0, PARAM_INVALID, + "bottom_padding_size must not be configed smaller than 0"); + } + + return SUCCESS; +} + +void AippOp::SetCscDefaultValue() { + GE_CHECK_NOTNULL_JUST_RETURN(aipp_params_); + if (aipp_params_->input_format() == domi::AippOpParams::YUV420SP_U8) { + CHECK_FALSE_EXEC(aipp_params_->matrix_r0c0_size() > 0, aipp_params_->add_matrix_r0c0(kDefaultMatrixR2C0Yuv2Rgb)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r0c1_size() > 0, aipp_params_->add_matrix_r0c1(kDefaultMatrixR2C1Yuv2Rgb)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r0c2_size() > 0, aipp_params_->add_matrix_r0c2(kDefaultMatrixR2C2Yuv2Rgb)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r1c0_size() > 0, aipp_params_->add_matrix_r1c0(kDefaultMatrixR1C0Yuv2Rgb)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r1c1_size() > 0, aipp_params_->add_matrix_r1c1(kDefaultMatrixR1C1Yuv2Rgb)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r1c2_size() > 0, aipp_params_->add_matrix_r1c2(kDefaultMatrixR1C2Yuv2Rgb)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r2c0_size() > 0, aipp_params_->add_matrix_r2c0(kDefaultMatrixR0C0Yuv2Rgb)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r2c1_size() > 0, aipp_params_->add_matrix_r2c1(kDefaultMatrixR0C1Yuv2Rgb)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r2c2_size() > 0, aipp_params_->add_matrix_r2c2(kDefaultMatrixR0C2Yuv2Rgb)); + } else { + CHECK_FALSE_EXEC(aipp_params_->matrix_r0c0_size() > 0, aipp_params_->add_matrix_r0c0(kDefaultMatrixR0C0Rgb2Yuv)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r0c1_size() > 0, aipp_params_->add_matrix_r0c1(kDefaultMatrixR0C1Rgb2Yuv)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r0c2_size() > 0, aipp_params_->add_matrix_r0c2(kDefaultMatrixR0C2Rgb2Yuv)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r1c0_size() > 0, aipp_params_->add_matrix_r1c0(kDefaultMatrixR1C0Rgb2Yuv)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r1c1_size() > 0, aipp_params_->add_matrix_r1c1(kDefaultMatrixR1C1Rgb2Yuv)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r1c2_size() > 0, aipp_params_->add_matrix_r1c2(kDefaultMatrixR1C2Rgb2Yuv)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r2c0_size() > 0, aipp_params_->add_matrix_r2c0(kDefaultMatrixR2C0Rgb2Yuv)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r2c1_size() > 0, aipp_params_->add_matrix_r2c1(kDefaultMatrixR2C1Rgb2Yuv)); + CHECK_FALSE_EXEC(aipp_params_->matrix_r2c2_size() > 0, aipp_params_->add_matrix_r2c2(kDefaultMatrixR2C2Rgb2Yuv)); + } + CHECK_FALSE_EXEC(aipp_params_->input_bias_0_size() > 0, aipp_params_->add_input_bias_0(kDefaultInputBias0)); + CHECK_FALSE_EXEC(aipp_params_->input_bias_1_size() > 0, aipp_params_->add_input_bias_1(kDefaultInputBias1)); + CHECK_FALSE_EXEC(aipp_params_->input_bias_2_size() > 0, aipp_params_->add_input_bias_2(kDefaultInputBias2)); + CHECK_FALSE_EXEC(aipp_params_->output_bias_0_size() > 0, aipp_params_->add_output_bias_0(kDefaultOutputBias0)); + CHECK_FALSE_EXEC(aipp_params_->output_bias_1_size() > 0, aipp_params_->add_output_bias_1(kDefaultOutputBias1)); + CHECK_FALSE_EXEC(aipp_params_->output_bias_2_size() > 0, aipp_params_->add_output_bias_2(kDefaultOutputBias2)); +} + +void AippOp::SetDtcDefaultValue() { + GE_CHECK_NOTNULL_JUST_RETURN(aipp_params_); + CHECK_FALSE_EXEC(aipp_params_->var_reci_chn_0_size() > 0, aipp_params_->add_var_reci_chn_0(kDefaultVarReciChn)); + CHECK_FALSE_EXEC(aipp_params_->var_reci_chn_1_size() > 0, aipp_params_->add_var_reci_chn_1(kDefaultVarReciChn)); + CHECK_FALSE_EXEC(aipp_params_->var_reci_chn_2_size() > 0, aipp_params_->add_var_reci_chn_2(kDefaultVarReciChn)); +} + +Status AippOp::GenerateOpDesc(OpDescPtr op_desc) { + GE_CHECK_NOTNULL(op_desc); + + static int op_idx = 0; + op_desc->SetName(std::string("aipp_node").append(std::to_string(op_idx++))); + op_desc->SetType(AIPP); + + // Add two InputDesc, add the second after the first one is added successfully. + if ((op_desc->AddInputDesc(GeTensorDesc()) != GRAPH_SUCCESS) || + (op_desc->AddInputDesc(GeTensorDesc()) != GRAPH_SUCCESS)) { + GELOGE(FAILED, "failed to add input desc"); + return FAILED; + } + + if (op_desc->AddOutputDesc(GeTensorDesc()) != GRAPH_SUCCESS) { + GELOGE(FAILED, "add output desc failed."); + return FAILED; + } + GeAttrValue::NamedAttrs aipp_attrs; + ConvertParamToAttr(aipp_attrs); + + GE_IF_BOOL_EXEC(!AttrUtils::SetNamedAttrs(op_desc, ATTR_NAME_AIPP, aipp_attrs), + GELOGE(FAILED, "failed to set ATTR_NAME_AIPP"); return FAILED); + + return SUCCESS; +} + +void AippOp::ConvertParamToAttr(GeAttrValue::NamedAttrs &aipp_attrs) { + GE_CHECK_NOTNULL_JUST_RETURN(aipp_params_); + SAVE_AIPP_ATTR(aipp_mode, GeAttrValue::INT); + + if (aipp_params_->aipp_mode() == domi::AippOpParams::static_) { + SAVE_AIPP_ATTR(input_format, GeAttrValue::INT); + SAVE_AIPP_ATTR(csc_switch, GeAttrValue::BOOL); + SAVE_AIPP_ATTR(crop, GeAttrValue::BOOL); + SAVE_AIPP_ATTR(resize, GeAttrValue::BOOL); + SAVE_AIPP_ATTR(load_start_pos_w, GeAttrValue::INT); + SAVE_AIPP_ATTR(load_start_pos_h, GeAttrValue::INT); + SAVE_AIPP_ATTR(crop_size_w, GeAttrValue::INT); + SAVE_AIPP_ATTR(crop_size_h, GeAttrValue::INT); + SAVE_AIPP_ATTR(resize, GeAttrValue::BOOL); + SAVE_AIPP_ATTR(resize_output_w, GeAttrValue::INT); + SAVE_AIPP_ATTR(resize_output_h, GeAttrValue::INT); + SAVE_AIPP_ATTR(padding, GeAttrValue::BOOL); + SAVE_AIPP_ATTR(left_padding_size, GeAttrValue::INT); + SAVE_AIPP_ATTR(right_padding_size, GeAttrValue::INT); + SAVE_AIPP_ATTR(top_padding_size, GeAttrValue::INT); + SAVE_AIPP_ATTR(bottom_padding_size, GeAttrValue::INT); + SAVE_AIPP_ATTR(src_image_size_w, GeAttrValue::INT); + SAVE_AIPP_ATTR(src_image_size_h, GeAttrValue::INT); + SAVE_AIPP_ATTR(cpadding_value, GeAttrValue::FLOAT); + SAVE_AIPP_ATTR(rbuv_swap_switch, GeAttrValue::BOOL); + SAVE_AIPP_ATTR(ax_swap_switch, GeAttrValue::BOOL); + SAVE_AIPP_ATTR(single_line_mode, GeAttrValue::BOOL); + SAVE_AIPP_ATTR(mean_chn_0, GeAttrValue::INT); + SAVE_AIPP_ATTR(mean_chn_1, GeAttrValue::INT); + SAVE_AIPP_ATTR(mean_chn_2, GeAttrValue::INT); + SAVE_AIPP_ATTR(min_chn_0, GeAttrValue::FLOAT); + SAVE_AIPP_ATTR(min_chn_1, GeAttrValue::FLOAT); + SAVE_AIPP_ATTR(min_chn_2, GeAttrValue::FLOAT); + SAVE_AIPP_ATTR_LIST(var_reci_chn_0, GeAttrValue::FLOAT); + SAVE_AIPP_ATTR_LIST(var_reci_chn_1, GeAttrValue::FLOAT); + SAVE_AIPP_ATTR_LIST(var_reci_chn_2, GeAttrValue::FLOAT); + SAVE_AIPP_ATTR_LIST(matrix_r0c0, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(matrix_r0c1, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(matrix_r0c2, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(matrix_r1c0, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(matrix_r1c1, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(matrix_r1c2, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(matrix_r2c0, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(matrix_r2c1, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(matrix_r2c2, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(output_bias_0, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(output_bias_1, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(output_bias_2, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(input_bias_0, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(input_bias_1, GeAttrValue::INT); + SAVE_AIPP_ATTR_LIST(input_bias_2, GeAttrValue::INT); + } else { + SAVE_AIPP_ATTR(max_src_image_size, GeAttrValue::INT); + SAVE_AIPP_ATTR(support_rotation, GeAttrValue::BOOL); + } +} +} // namespace ge diff --git a/src/ge/graph/preprocess/insert_op/ge_aipp_op.h b/src/ge/graph/preprocess/insert_op/ge_aipp_op.h new file mode 100644 index 00000000..0e813d95 --- /dev/null +++ b/src/ge/graph/preprocess/insert_op/ge_aipp_op.h @@ -0,0 +1,86 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PREPROCESS_INSERT_OP_GE_AIPP_OP_H_ +#define GE_GRAPH_PREPROCESS_INSERT_OP_GE_AIPP_OP_H_ + +#include +#include + +#include "common/op/attr_value_util.h" +#include "graph/preprocess/insert_op/base_insert_op.h" +#include "proto/insert_op.pb.h" + +namespace ge { +class AippOp : public InsertOpBase { + public: + AippOp() {} + Status Init(domi::AippOpParams *aipp_params); + + ~AippOp() override; + + /// + /// @ingroup domi_omg + /// @brief Set Default Params + /// + Status SetDefaultParams() override; + + /// + /// @ingroup domi_omg + /// @brief Validate Params + /// + Status ValidateParams() override; + + protected: + /// + /// @ingroup domi_omg + /// @brief Generate Op Desc + /// + + Status GenerateOpDesc(ge::OpDescPtr op_desc) override; + + /// + /// @ingroup domi_omg + /// @brief Get Target Position + /// @param [in] graph graph + /// @param [in|out] target_input target input + /// @param [in|out] target_edges target edges + /// + Status GetTargetPosition(ge::ComputeGraphPtr graph, ge::NodePtr &target_input, + std::vector> &target_edges) override; + + domi::AippOpParams::AippMode GetAippMode() override; + + private: + AippOp& operator=(const AippOp& aipp_op); + AippOp(const AippOp& aipp_op); + + /// + /// @ingroup domi_omg + /// @brief Convert Param To Attr + /// + void ConvertParamToAttr(ge::GeAttrValue::NamedAttrs &aipp_attrs); + void SetCscDefaultValue(); + + void SetDtcDefaultValue(); + + domi::AippOpParams *aipp_params_; + ge::NodePtr aipp_node_ = nullptr; +}; +} // namespace ge + +#endif // GE_GRAPH_PREPROCESS_INSERT_OP_GE_AIPP_OP_H_ + diff --git a/src/ge/graph/preprocess/insert_op/util_insert_aipp_op.cc b/src/ge/graph/preprocess/insert_op/util_insert_aipp_op.cc new file mode 100644 index 00000000..c1f1f344 --- /dev/null +++ b/src/ge/graph/preprocess/insert_op/util_insert_aipp_op.cc @@ -0,0 +1,408 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/preprocess/insert_op/util_insert_aipp_op.h" + +#include +#include + +#include "common/dynamic_aipp.h" +#include "common/ge/ge_util.h" +#include "common/op/ge_op_utils.h" +#include "common/util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/omg/omg_inner_types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/preprocess/insert_op/ge_aipp_op.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" + +using domi::AippOpParams; + +namespace ge { +Status InsertNewOpUtil::Init() { + insert_op_conf_.reset((new (std::nothrow) domi::InsertNewOps())); + GE_CHECK_NOTNULL(insert_op_conf_); + return SUCCESS; +} + +namespace { +constexpr uint64_t kMinTransferShape = 3; +constexpr int64_t kMaxBatchCountNum = 32768; + +Status ExpandDimsAndNormalizedToNCHW(ge::Format src_format, const std::vector &src_dims, + std::vector &nchw_dims) { + GELOGD("Enter ExpandDimsAndNormalizedToNCHW process!"); + // The input of 3-dimension and 4-dimension is considered as picture dimension, + // which needs to be converted according to specific format + if (src_dims.size() != DIM_DEFAULT_SIZE && src_dims.size() != kMinTransferShape) { + GELOGE(PARAM_INVALID, "expand and normalize format failed, src size [%lu] is not in range [3,4]", src_dims.size()); + return PARAM_INVALID; + } + + switch (src_format) { + case ge::FORMAT_NCHW: + if (src_dims.size() == DIM_DEFAULT_SIZE) { + nchw_dims = src_dims; + } else { + nchw_dims.push_back(1); + nchw_dims.push_back(src_dims[0]); + nchw_dims.push_back(src_dims[1]); + nchw_dims.push_back(src_dims[2]); + } + break; + case ge::FORMAT_NHWC: + if (src_dims.size() == DIM_DEFAULT_SIZE) { + nchw_dims.push_back(src_dims[NHWC_DIM_N]); + nchw_dims.push_back(src_dims[NHWC_DIM_C]); + nchw_dims.push_back(src_dims[NHWC_DIM_H]); + nchw_dims.push_back(src_dims[NHWC_DIM_W]); + } else { + nchw_dims.push_back(1); + nchw_dims.push_back(src_dims[HWC_DIM_C]); + nchw_dims.push_back(src_dims[HWC_DIM_H]); + nchw_dims.push_back(src_dims[HWC_DIM_W]); + } + break; + default: + GELOGE(PARAM_INVALID, "Not support src format: %d", src_format); + return PARAM_INVALID; + } + + return ge::SUCCESS; +} +Status GetDataOpDims(const ge::NodePtr data_node, ge::Format format, std::vector &nchw_dims) { + GELOGD("Enter GetDataOpDims process!"); + + auto data_input_desc_ptr = data_node->GetOpDesc()->GetInputDescPtr(0); // GetOpDesc() has check null before logic + if (data_input_desc_ptr == nullptr) { + GELOGE(PARAM_INVALID, "data_node's input desc object is null"); + return PARAM_INVALID; + } + auto shape = data_input_desc_ptr->GetShape().GetDims(); + if ((shape.size() < kMinTransferShape) && (shape.size() > DIM_DEFAULT_SIZE)) { + GELOGE(PARAM_INVALID, "when dynamic aipp, shape must be in range [3, 4], but is %lu", shape.size()); + return PARAM_INVALID; + } + + return ExpandDimsAndNormalizedToNCHW(format, shape, nchw_dims); +} +} // namespace +Status InsertNewOpUtil::Parse(const char *conf_path) { + if (conf_path == nullptr || *conf_path == '\0') { + return SUCCESS; + } + + GE_CHK_BOOL_RET_STATUS(ReadProtoFromText(conf_path, insert_op_conf_.get()), FAILED, "Read AIPP conf file error: %s", + conf_path); + + GE_CHK_STATUS_RET(CheckPositionNotRepeat(), "Check insert position of op failed"); + + for (int i = 0; i < insert_op_conf_->aipp_op_size(); i++) { + domi::AippOpParams *aipp_op_params = insert_op_conf_->mutable_aipp_op(i); + std::unique_ptr aipp_op(new (std::nothrow) AippOp()); + GE_CHECK_NOTNULL(aipp_op); + GE_CHK_STATUS_RET(aipp_op->Init(aipp_op_params), "Aipp op init failed."); + insert_ops_.push_back(std::move(aipp_op)); + } + + for (auto &dynamic_op : insert_ops_) { + GE_CHECK_NOTNULL(dynamic_op); + GE_CHK_STATUS_RET(dynamic_op->ValidateParams(), "Validate insert_op config file failed"); + GE_CHK_STATUS_RET(dynamic_op->SetDefaultParams(), "Set default value of insert_op failed"); + } + + return SUCCESS; +} + +Status InsertNewOpUtil::AddAippInputData(const ge::NodePtr &aipp_node, const ge::ComputeGraphPtr &graph) { + GELOGD("Enter add aipp data node process!"); + static int index = 0; + + // get previous node, it should be DATA + auto data_node = aipp_node->GetInDataNodes().at(0); + if (data_node->GetOpDesc() == nullptr) { + GELOGE(PARAM_INVALID, "data node has no opdesc!"); + return PARAM_INVALID; + } + if (data_node->GetOpDesc()->GetType() != DATA) { + GELOGE(PARAM_INVALID, "aipp node should follow one data node, but previous node's type is %s", + data_node->GetOpDesc()->GetType().c_str()); + return PARAM_INVALID; + } + auto ori_data_format = static_cast(static_cast(domi::GetContext().format)); + if (ori_data_format != FORMAT_NCHW && ori_data_format != FORMAT_NHWC) { + GELOGE(PARAM_INVALID, "when dynamic aipp,input_format must be NCHW or NHWC, but [%s] format is %s", + data_node->GetName().c_str(), ge::TypeUtils::FormatToSerialString(ori_data_format).c_str()); + return PARAM_INVALID; + } + + std::vector nchw_dims; + auto ret = GetDataOpDims(data_node, ori_data_format, nchw_dims); + if (ret != ge::SUCCESS) { + GELOGE(PARAM_INVALID, "get data_node dims and transfer to nchw_dims failed!"); + return PARAM_INVALID; + } + + auto batch_count = nchw_dims[NCHW_DIM_N]; + // new add aipp_data ops for dynamic aipp param input + OpDescPtr op_desc_ptr_data = + ge::MakeShared(std::string("aipp_data_").append(std::to_string(index++)), AIPPDATA); + + // calc max size + if (batch_count <= 0 || batch_count > kMaxBatchCountNum) { + GELOGE(PARAM_INVALID, "batch_cout must be in range(0, %ld]", kMaxBatchCountNum); + return PARAM_INVALID; + } + uint64_t max_dynamic_aipp_size = sizeof(kAippDynamicPara) + (batch_count - 1) * sizeof(kAippDynamicBatchPara); + + GELOGI("Add aipp input data, batch count: %ld, max_dynamic_aipp_size: %ld", batch_count, max_dynamic_aipp_size); + vector input_shape_dim(1, 1); + input_shape_dim[0] = static_cast(max_dynamic_aipp_size); + GeShape input_shape(input_shape_dim); + // construct input tensor + GeTensorDesc input_tensor(input_shape, FORMAT_ND, DT_UINT8); + TensorUtils::SetReuseInput(input_tensor, false); + TensorUtils::SetSize(input_tensor, static_cast(max_dynamic_aipp_size)); + + auto stat1 = op_desc_ptr_data->AddInputDesc(input_tensor); + + GeShape output_shape(input_shape_dim); + // construct output tensor + GeTensorDesc output_tensor(output_shape, FORMAT_ND, DT_UINT8); + TensorUtils::SetReuseInput(output_tensor, false); + TensorUtils::SetSize(output_tensor, static_cast(max_dynamic_aipp_size)); + auto stat2 = op_desc_ptr_data->AddOutputDesc(output_tensor); + + NodePtr aipp_data_node_ptr = graph->AddNode(op_desc_ptr_data); + if (aipp_data_node_ptr == nullptr) { + GELOGE(INTERNAL_ERROR, "graph add node failed."); + return INTERNAL_ERROR; + } + // add node desc for aipp node + GE_CHECK_NOTNULL(aipp_node->GetOpDesc()); + auto stat3 = aipp_node->GetOpDesc()->UpdateInputDesc(1, output_tensor); + if (stat1 != SUCCESS || stat2 != SUCCESS || stat3 != SUCCESS) { + GELOGE(INTERNAL_ERROR, "node process desc failed!"); + return INTERNAL_ERROR; + } + // aipp_node should have two input data but now tbe only one input + if (GraphUtils::AddEdge(aipp_data_node_ptr->GetOutDataAnchor(0), aipp_node->GetInDataAnchor(1)) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "Add Anchor anchor between aipp data node and aipp failed!"); + return INTERNAL_ERROR; + } + + return SUCCESS; +} + +Status InsertNewOpUtil::InsertNewOps(const ComputeGraphPtr &graph) { + GE_CHECK_NOTNULL(graph); + for (auto &insert_op : insert_ops_) { + GE_CHK_STATUS_RET(insert_op->InsertOpToGraph(graph), "insert op to graph failed"); + } + + GE_CHK_STATUS_RET(CheckGraph(graph), "after inserting all ops, check graph failed"); + + GE_CHK_STATUS_RET(graph->TopologicalSorting(), "after insert dynamic op, sort graph failed"); + + ClearNewOps(); + + return SUCCESS; +} + +Status InsertNewOpUtil::InsertAippOps(ComputeGraphPtr &graph, std::string &aippConfigPath) { + GE_CHECK_NOTNULL(graph); + for (auto &insert_op : insert_ops_) { + AippOpParams::AippMode aipp_mode = insert_op->GetAippMode(); + ge::NodePtr aipp_node = nullptr; + GE_CHK_STATUS_RET(insert_op->InsertAippToGraph(graph, aippConfigPath, aipp_node), "insert op to graph failed"); + if (aipp_node == nullptr) { + GELOGE(FAILED, "aipp node is null!"); + return FAILED; + } + if (aipp_mode == AippOpParams::dynamic) { + Status stat = AddAippInputData(aipp_node, graph); + if (stat != SUCCESS) { + GELOGE(FAILED, "Add aipp input data failed"); + return FAILED; + } + } + } + + GE_CHK_STATUS_RET(CheckGraph(graph), "after inserting all ops, check graph failed"); + + GE_CHK_STATUS_RET(graph->TopologicalSorting(), "after insert dynamic op, sort graph failed"); + + ClearNewOps(); + + return SUCCESS; +} + +void InsertNewOpUtil::ClearNewOps() { + if (insert_op_conf_ != nullptr) { + insert_op_conf_->Clear(); + insert_ops_.clear(); + } +} + +Status InsertNewOpUtil::CheckPositionNotRepeat() { + for (int i = 0; i < insert_op_conf_->aipp_op_size(); i++) { + const domi::AippOpParams *item = insert_op_conf_->mutable_aipp_op(i); + + for (int j = i + 1; j < insert_op_conf_->aipp_op_size(); j++) { + const domi::AippOpParams *another_item = insert_op_conf_->mutable_aipp_op(j); + + GE_IF_BOOL_EXEC(item->related_input_rank() != another_item->related_input_rank(), continue;); + + GE_IF_BOOL_EXEC( + item->input_edge_idx_size() == 0 || another_item->input_edge_idx_size() == 0 || + item->input_edge_idx(0) == another_item->input_edge_idx(0), + GELOGE(PARAM_INVALID, + "Can not insert aipp op to the same position! please check related_input_rank and input_edge_idx."); + return PARAM_INVALID;); + } + } + + return SUCCESS; +} + +Status InsertNewOpUtil::CheckGraph(const ComputeGraphPtr &graph) { + GE_CHECK_NOTNULL(graph); + domi::AippOpParams::AippMode aippMode = domi::AippOpParams::undefined; + + for (const auto &node : graph->GetAllNodes()) { + if (node->GetType() != DATA) { + continue; + } + + std::vector aippNodes; + for (const auto &anchor : node->GetAllOutDataAnchors()) { + for (const auto &inAnchor : anchor->GetPeerInDataAnchors()) { + const std::string &nodeType = inAnchor->GetOwnerNode()->GetType(); + + GE_IF_BOOL_EXEC(nodeType == SSDPRIORBOX || nodeType == SHAPE, continue;); + + GE_CHK_BOOL_RET_STATUS(aippNodes.size() == 0 || nodeType == AIPP, PARAM_INVALID, + "Can not config part of outputs of Data node to support AIPP, config all of the " + "outputs of Data to support AIPP, or config none of them"); + + if (nodeType == AIPP) { + aippNodes.push_back(inAnchor->GetOwnerNode()); + continue; + } + } + } + + std::unique_ptr aippParams(new (std::nothrow) domi::AippOpParams()); + GE_CHECK_NOTNULL(aippParams); + + GE_IF_BOOL_EXEC(aippNodes.size() > 0, GE_CHK_STATUS(GetAippParams(aippParams, aippNodes[0])); + aippMode = (aippMode == domi::AippOpParams::undefined) ? aippParams->aipp_mode() : aippMode; + GE_CHK_BOOL_RET_STATUS(aippMode == aippParams->aipp_mode(), PARAM_INVALID, + "The aipp_mode of all aipp_op must be the same");); + + GE_IF_BOOL_EXEC(aippNodes.size() > 1, for (decltype(aippNodes)::size_type i = 1; i < aippNodes.size(); i++) { + std::unique_ptr currAippParam(new (std::nothrow) domi::AippOpParams()); + GE_CHECK_NOTNULL(currAippParam); + GE_CHK_STATUS(GetAippParams(currAippParam, aippNodes[i])); + + GE_CHK_BOOL_RET_STATUS(aippMode == currAippParam->aipp_mode(), PARAM_INVALID, + "The aipp_mode of all aipp_op must be the same"); + if (aippMode == domi::AippOpParams::static_) { + GE_CHK_BOOL_RET_STATUS(aippParams->input_format() == currAippParam->input_format(), PARAM_INVALID, + "The input_format of all aipp_ops after one Data should be the same"); + GE_CHK_BOOL_RET_STATUS(aippParams->src_image_size_w() == currAippParam->src_image_size_w(), PARAM_INVALID, + "The src_image_size_w of all aipp_ops after one Data should be the same"); + GE_CHK_BOOL_RET_STATUS(aippParams->src_image_size_h() == currAippParam->src_image_size_h(), PARAM_INVALID, + "The src_image_size_h of all aipp_ops after one Data should be the same"); + } else { + GE_CHK_BOOL_RET_STATUS(aippParams->max_src_image_size() == currAippParam->max_src_image_size(), PARAM_INVALID, + "The max_src_image_size of all aipp_ops after one Data should be the same"); + } + }); + } + + return SUCCESS; +} + +Status InsertNewOpUtil::GetAippParams(const std::unique_ptr &aipp_params, + const NodePtr &aipp_node) { + GE_CHECK_NOTNULL(aipp_node); + ge::GeAttrValue::NamedAttrs aipp_attr; + const OpDescPtr tmpOpPtr = aipp_node->GetOpDesc(); + GE_CHECK_NOTNULL(tmpOpPtr); + GE_CHK_BOOL_RET_STATUS(AttrUtils::GetNamedAttrs(tmpOpPtr, ATTR_NAME_AIPP, aipp_attr), FAILED, + "Aipp node should contain param aipp!"); + GE_CHK_STATUS_RET(OpUtils::ConvertAippParams(aipp_attr, aipp_params.get()), "get aipp params failed"); + + return SUCCESS; +} + +Status InsertNewOpUtil::AddMultiShapeInputData(const ge::ComputeGraphPtr &graph) { + GE_CHECK_NOTNULL(graph); + for (auto &node : graph->GetDirectNode()) { + GE_CHECK_NOTNULL(node->GetOpDesc()); + if (node->GetOpDesc()->GetType() != MULTISHAPE) { + continue; + } + + GE_CHK_BOOL_RET_STATUS(node->GetInDataNodes().size() == 1, FAILED, + "multi_shape node should follow one data node, but size of input edges is %zu", + node->GetInDataNodes().size()); + + NodePtr dataNode = node->GetInDataNodes().at(0); + GE_CHK_BOOL_RET_STATUS(dataNode->GetOpDesc()->GetType() == DATA, FAILED, + "multi_shape node should follow one data node, but previous node's type is %s", + dataNode->GetOpDesc()->GetType().c_str()); + + OpDescPtr opDescPtrData = MakeShared(std::string("multi_shape_data"), DATA); + if (opDescPtrData == nullptr) { + return PARAM_INVALID; + } + + const uint32_t shapeSize = 4; + const uint32_t REALDIM_CNT = 4; + + vector inputShapeDim(4, 1); // 4 dimensions: NCHW + inputShapeDim[0] = shapeSize; + + GeShape inputShape(inputShapeDim); + GeTensorDesc input_tensor(inputShape, FORMAT_NCHW, DT_UINT32); + TensorUtils::SetReuseInput(input_tensor, false); + TensorUtils::SetSize(input_tensor, shapeSize * sizeof(uint32_t)); + GE_CHK_STATUS_RET(opDescPtrData->AddInputDesc(input_tensor)); + + GeShape outputShape(inputShapeDim); + GeTensorDesc output_tensor(outputShape, FORMAT_NCHW, DT_UINT32); + TensorUtils::SetReuseInput(output_tensor, false); + TensorUtils::SetSize(output_tensor, shapeSize * sizeof(uint32_t)); + TensorUtils::SetRealDimCnt(output_tensor, REALDIM_CNT); + + GE_CHK_STATUS_RET(opDescPtrData->AddOutputDesc(output_tensor), "AddOutputDesc failed!"); + + NodePtr shapeDataNodePtr = graph->AddNode(opDescPtrData); + GE_CHECK_NOTNULL(shapeDataNodePtr); + GE_CHK_STATUS_RET(GraphUtils::AddEdge(shapeDataNodePtr->GetOutDataAnchor(0), node->GetInDataAnchor(1)), + "Add Anchor anchor between shape data and multi_shape failed!"); + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/graph/preprocess/insert_op/util_insert_aipp_op.h b/src/ge/graph/preprocess/insert_op/util_insert_aipp_op.h new file mode 100644 index 00000000..e6dc28ce --- /dev/null +++ b/src/ge/graph/preprocess/insert_op/util_insert_aipp_op.h @@ -0,0 +1,69 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PREPROCESS_INSERT_OP_UTIL_INSERT_AIPP_OP_H_ +#define GE_GRAPH_PREPROCESS_INSERT_OP_UTIL_INSERT_AIPP_OP_H_ + +#include +#include +#include + +#include "graph/compute_graph.h" +#include "graph/preprocess/insert_op/base_insert_op.h" +#include "proto/insert_op.pb.h" + +namespace ge { +enum AippType { kOldType, kNewType }; + +class InsertNewOpUtil { + public: + static InsertNewOpUtil &Instance() { + static InsertNewOpUtil instance; + return instance; + } + + Status Init(); + + Status Parse(const char *conf_path); + + Status InsertNewOps(const ge::ComputeGraphPtr &graph); + + Status InsertAippOps(ge::ComputeGraphPtr &graph, std::string &aipp_config_path); + + void ClearNewOps(); + + private: + Status CheckPositionNotRepeat(); + + Status AddMultiShapeInputData(const ge::ComputeGraphPtr &graph); + + Status GetAippParams(const std::unique_ptr &aipp_params, const ge::NodePtr &aipp_node); + + Status CheckGraph(const ge::ComputeGraphPtr &graph); + + InsertNewOpUtil() {} + + Status AddAippInputData(const ge::NodePtr &aipp_node, const ge::ComputeGraphPtr &graph); + + ~InsertNewOpUtil() = default; + + std::vector> insert_ops_; + + std::unique_ptr insert_op_conf_; +}; +} // namespace ge + +#endif // GE_GRAPH_PREPROCESS_INSERT_OP_UTIL_INSERT_AIPP_OP_H_ diff --git a/src/ge/graph/preprocess/multi_batch_copy_graph.cc b/src/ge/graph/preprocess/multi_batch_copy_graph.cc new file mode 100644 index 00000000..c7ab6fef --- /dev/null +++ b/src/ge/graph/preprocess/multi_batch_copy_graph.cc @@ -0,0 +1,915 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/preprocess/multi_batch_copy_graph.h" + +#include +#include +#include + +#include "common/formats/utils/formats_trans_utils.h" +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/string_util.h" +#include "framework/common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/ge_context.h" +#include "graph/passes/prune_pass.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/node_utils.h" + +namespace ge { +namespace multibatch { +namespace { +const int kSwitchNDataIndex = 0; +const int kSwitchNPredIndex = 1; +const int kDataOutIndex = 0; +const int kDataInIndex = 0; +const int kMergeDataOutIndex = 0; +const size_t kMaxShapesCount = 16; +const size_t kMinShapesCount = 2; + +inline bool IsDataLikeType(const std::string &node_type) { + return (node_type == DATA) || (node_type == AIPP); +} + +NodePtr InsertMergeNodeToGraph(const std::string &name, size_t input_num, const ComputeGraphPtr &graph) { + OpDescPtr desc = MakeShared(); + if (desc == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to insert merge node, name %s", name.c_str()); + return nullptr; + } + desc->SetName(name); + desc->SetType(MERGE); + GeTensorDesc tensor_desc; + for (size_t i = 0; i < input_num; ++i) { + auto ret = desc->AddInputDesc("x" + std::to_string(i), tensor_desc); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to create merge node %s, failed to add input %zu, error-code %u", name.c_str(), i, + ret); + return nullptr; + } + } + auto ret = desc->AddOutputDesc("y", tensor_desc); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to create merge node %s, failed to add output 'y', error-code %u", name.c_str(), + ret); + return nullptr; + } + tensor_desc.SetDataType(DT_INT32); + ret = desc->AddOutputDesc("value_index", tensor_desc); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to create merge node %s, failed to add output 'value_index', error-code %u", + name.c_str(), ret); + return nullptr; + } + + if (!AttrUtils::SetBool(desc, ATTR_INSERT_BY_MBATCH, true)) { + GELOGE(INTERNAL_ERROR, "Failed to create merge node %s, failed to add attr", name.c_str()); + return nullptr; + } + return graph->AddNode(desc); +} + +NodePtr InsertCopyNode(const NodePtr &node, const std::string &name) { + auto src_op_desc = node->GetOpDesc(); + if (src_op_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to copy node %s to %s, the OpDesc is null", node->GetName().c_str(), name.c_str()); + return nullptr; + } + auto desc = AttrUtils::CopyOpDesc(src_op_desc); + if (desc == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to create op desc for copy node for node %s name %s", node->GetName().c_str(), + name.c_str()); + return nullptr; + } + desc->SetName(name); + desc->CopyAttrsFrom(*src_op_desc); + for (uint32_t i = 0; i < node->GetAllInDataAnchorsSize(); ++i) { + auto input_desc = desc->MutableInputDesc(i); + if (input_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to get input desc by index %u from node %s when copy from %s", i, + desc->GetName().c_str(), node->GetName().c_str()); + return nullptr; + } + input_desc->CopyAttrsFrom(src_op_desc->GetInputDesc(i)); + } + for (uint32_t i = 0; i < node->GetAllOutDataAnchorsSize(); ++i) { + auto output_desc = desc->MutableOutputDesc(i); + if (output_desc == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to get output desc by index %u from node %s when copy from %s", i, + desc->GetName().c_str(), node->GetName().c_str()); + return nullptr; + } + output_desc->CopyAttrsFrom(src_op_desc->GetOutputDesc(i)); + } + auto graph = node->GetOwnerComputeGraph(); + return graph->AddNode(desc); +} + +Status CalcShape(const std::vector &batch_shape, GeShape &data_shape) { + size_t batch_shape_index = 0; + for (size_t i = 0; i < data_shape.GetDimNum(); ++i) { + if (data_shape.GetDim(i) < 0) { + if (batch_shape_index >= batch_shape.size()) { + GELOGE(PARAM_INVALID, + "Failed to calc tensor shape, the batch shape count %zu, doees not match the data shape %s", + batch_shape.size(), data_shape.ToString().c_str()); + return PARAM_INVALID; + } + data_shape.SetDim(i, batch_shape[batch_shape_index++]); + } + } + if (batch_shape_index != batch_shape.size()) { + GELOGE(PARAM_INVALID, "Failed to calc tensor shape, the batch shape count %zu, does not match the data shape %s", + batch_shape.size(), data_shape.ToString().c_str()); + return PARAM_INVALID; + } + return SUCCESS; +} + +bool IsAllDimsPositive(const std::vector &dims) { + for (auto dim : dims) { + if (dim <= 0) { + return false; + } + } + return true; +} + +NodePtr InsertConst(const std::string &name, const ComputeGraphPtr &graph) { + auto desc = MakeShared(); + if (desc == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to create const op %s, out of memory", name.c_str()); + return nullptr; + } + desc->SetName(name); + desc->SetType(CONSTANT); + GeTensor tensor; + tensor.SetData(std::vector({0})); + if (!AttrUtils::SetTensor(desc, ATTR_NAME_WEIGHTS, tensor)) { + GELOGE(OUT_OF_MEMORY, "Failed to init tensor value for const %s", name.c_str()); + return nullptr; + } + if (!AttrUtils::SetBool(desc, ATTR_INSERT_BY_MBATCH, true)) { + GELOGE(OUT_OF_MEMORY, "Failed to set insert flag for const node %s", name.c_str()); + return nullptr; + } + if (desc->AddOutputDesc(GeTensorDesc()) != GRAPH_SUCCESS) { + GELOGE(OUT_OF_MEMORY, "Failed to add output desc for const node %s", name.c_str()); + return nullptr; + } + return graph->AddNode(desc); +} + +bool IsOnlyOutputToAipp(const NodePtr &node) { + for (const auto &out_node : node->GetOutDataNodes()) { + if (out_node->GetType() != AIPP) { + return false; + } + } + return true; +} + +Status CheckDataShape(const std::vector &nodes) { + size_t unknown_shape_count = 0; + for (const auto &node : nodes) { + if (node->GetType() != DATA) { + continue; + } + for (auto dim : NodeUtils::GetOutputDesc(*node, kDataOutIndex).GetShape().GetDims()) { + if (dim < 0) { + unknown_shape_count++; + break; + } + } + } + if (unknown_shape_count == 0) { + GELOGE(PARAM_INVALID, "There are no unknown shape data, the dynamic batch/imagesize options will be ignored"); + return PARAM_INVALID; + } + + return SUCCESS; +} +} // namespace + +Status MultiBatchGraphCopyer::CopyGraph() { + auto ret = Init(); + if (ret != SUCCESS) { + return ret; + } + + ret = CheckDataShape(origin_data_nodes_); + if (ret != SUCCESS) { + return ret; + } + + ret = CreateNewNodes(); + if (ret != SUCCESS) { + return ret; + } + + ret = LinkEdges(); + if (ret != SUCCESS) { + return ret; + } + + GELOGI("Begin to remove useless nodes by prune pass after copy process"); + PrunePass prune_pass; + ret = prune_pass.Run(graph_); + if (ret != SUCCESS) { + GELOGE(ret, "Failed to prune"); + return ret; + } + return CheckCopyResult(origin_data_nodes_); +} + +Status MultiBatchGraphCopyer::Init() { + auto ret = CheckArguments(); + if (ret != SUCCESS) { + return ret; + } + auto tmp_all_nodes = graph_->GetAllNodes(); + for (auto &node : tmp_all_nodes) { + origin_all_nodes_.emplace_back(node); + if (IsDataLikeType(node->GetType())) { + origin_data_nodes_.emplace_back(node); + } + } + return SUCCESS; +} +Status MultiBatchGraphCopyer::CreateNewNodes() { + shape_data_ = InsertShapeDataNode(); + if (shape_data_ == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to create the shape data node for muti-batch"); + return INTERNAL_ERROR; + } + + for (const auto &node : origin_all_nodes_) { + auto node_type = node->GetType(); + Status ret = INTERNAL_ERROR; + auto branch_status = GetNodeStatus(node); + GELOGD("Process node %s, status %d", node->GetName().c_str(), static_cast(branch_status)); + switch (branch_status) { + case kNodeStartNode: + ret = InsertSwitchNForData(node); + if (ret == SUCCESS) { + ret = UpdateMaxShapeToData(node); + } + break; + case kNodeInBatchBranch: + ret = CopyNodeInBatchBranch(node); + break; + case kNodeOutBatchBranch: + ret = InsertMergeForEdgeNode(node); + break; + default: + GELOGE(INTERNAL_ERROR, "Unexpected status %d on node %s", static_cast(branch_status), + node->GetName().c_str()); + break; + } + if (ret != SUCCESS) { + GELOGE(ret, "Failed to deal with node %s in multi-batch process", node->GetName().c_str()); + return ret; + } + } + return SUCCESS; +} +NodeStatus MultiBatchGraphCopyer::GetNodeStatus(const NodePtr &node) { + if (node->GetType() == NETOUTPUT) { + return kNodeOutBatchBranch; + } + if (IsDataLikeType(node->GetType()) && !IsOnlyOutputToAipp(node)) { + return kNodeStartNode; + } + for (auto &in_node : node->GetInDataNodes()) { + if (IsInBatchBranch(in_node)) { + return kNodeInBatchBranch; + } + } + return kNodeOutBatchBranch; +} +NodePtr MultiBatchGraphCopyer::InsertMergeNode(const NodePtr &node, int index) { + if (index < 0) { + // the merge node must has data inputs, if origin connection is a control + // edge, we use data edge instead + index = 0; + } + + auto &merge_nodes = nodes_to_merge_nodes_[node.get()]; + if (merge_nodes.empty()) { + auto count = node->GetAllOutDataAnchorsSize(); + if (count == 0) { + count = 1; + } + merge_nodes.resize(count, nullptr); + } + + if (merge_nodes.at(index) != nullptr) { + return merge_nodes[index]; + } + + auto merge_node_name = node->GetName() + "_huawei_mbatch_merge_" + std::to_string(index); + auto merge_node = InsertMergeNodeToGraph(merge_node_name, shapes_.size(), node->GetOwnerComputeGraph()); + if (merge_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to create merge node for node %s, out index %d", node->GetName().c_str(), index); + return nullptr; + } + merge_nodes[index] = merge_node; + GELOGI("Create merge node %s for node %s index %d", merge_node_name.c_str(), node->GetName().c_str(), index); + return merge_node; +} +Status MultiBatchGraphCopyer::CopyInDataEdges(const NodePtr &origin_node, int batch_num, const NodePtr ©ed_node) { + for (auto &in_anchor : origin_node->GetAllInDataAnchors()) { + auto origin_src_anchor = in_anchor->GetPeerOutAnchor(); + if (origin_src_anchor == nullptr) { + GELOGD("The node %s does not have input on index %d", origin_node->GetName().c_str(), in_anchor->GetIdx()); + continue; + } + auto origin_src_node = origin_src_anchor->GetOwnerNode(); + auto dst_anchor = copyed_node->GetInDataAnchor(in_anchor->GetIdx()); + GE_CHECK_NOTNULL(dst_anchor); + auto switchn_iter = data_nodes_to_switchn_.find(origin_src_node.get()); + if (switchn_iter != data_nodes_to_switchn_.end()) { + auto ret = GraphUtils::AddEdge(switchn_iter->second->GetOutDataAnchor(batch_num), dst_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add data edge between %s(%d) to %s(%d), error-code %u", + switchn_iter->second->GetName().c_str(), batch_num, copyed_node->GetName().c_str(), in_anchor->GetIdx(), + ret); + return INTERNAL_ERROR; + } + GELOGD("Add data edge from %s(%d) to %s(%d)", switchn_iter->second->GetName().c_str(), batch_num, + copyed_node->GetName().c_str(), in_anchor->GetIdx()); + continue; + } + + auto batch_branch_iter = nodes_to_batch_nodes_.find(origin_src_node.get()); + if (batch_branch_iter != nodes_to_batch_nodes_.end()) { + auto src_batch_node = batch_branch_iter->second.at(batch_num); + auto ret = GraphUtils::AddEdge(src_batch_node->GetOutDataAnchor(origin_src_anchor->GetIdx()), dst_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add data edge between %s(%d) to %s(%d), error-code %u", + src_batch_node->GetName().c_str(), batch_num, copyed_node->GetName().c_str(), in_anchor->GetIdx(), ret); + return INTERNAL_ERROR; + } + GELOGD("Add data edge from %s(%d) to %s(%d)", src_batch_node->GetName().c_str(), batch_num, + copyed_node->GetName().c_str(), in_anchor->GetIdx()); + continue; + } + + auto ret = GraphUtils::AddEdge(origin_src_anchor, dst_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add data edge between origin node %s(%d) to copyed %s(%d)", + origin_src_node->GetName().c_str(), origin_src_anchor->GetIdx(), copyed_node->GetName().c_str(), + dst_anchor->GetIdx()); + return INTERNAL_ERROR; + } + GELOGD("Add data edge between branch-out %s(%d) to branch-in %s(%d)", origin_src_node->GetName().c_str(), + origin_src_anchor->GetIdx(), copyed_node->GetName().c_str(), dst_anchor->GetIdx()); + } + return SUCCESS; +} +Status MultiBatchGraphCopyer::CopyInControlEdges(const NodePtr &node, int batch_num, const NodePtr ©ed_node) { + for (auto &origin_src_node : node->GetInControlNodes()) { + auto switchn_iter = data_nodes_to_switchn_.find(origin_src_node.get()); + if (switchn_iter != data_nodes_to_switchn_.end()) { + // reconnect data node + auto ret = GraphUtils::AddEdge(switchn_iter->second->GetOutControlAnchor(), copyed_node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add control edge between %s to %s, error-code %u", + switchn_iter->second->GetName().c_str(), copyed_node->GetName().c_str(), ret); + return INTERNAL_ERROR; + } + GELOGD("Add control edge from %s to %s", switchn_iter->second->GetName().c_str(), copyed_node->GetName().c_str()); + continue; + } + + auto batch_branch_iter = nodes_to_batch_nodes_.find(origin_src_node.get()); + if (batch_branch_iter != nodes_to_batch_nodes_.end()) { + // reconnect node in batch branch + auto src_batch_node = batch_branch_iter->second.at(batch_num); + auto ret = GraphUtils::AddEdge(src_batch_node->GetOutControlAnchor(), copyed_node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add data edge between %s to %s, error-code %u", + src_batch_node->GetName().c_str(), copyed_node->GetName().c_str(), ret); + return INTERNAL_ERROR; + } + GELOGD("Add control edge from %s to %s", src_batch_node->GetName().c_str(), copyed_node->GetName().c_str()); + continue; + } + + auto ret = GraphUtils::AddEdge(origin_src_node->GetOutControlAnchor(), copyed_node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add control edge from origin %s to copyed %s", + origin_src_node->GetName().c_str(), copyed_node->GetName().c_str()); + return INTERNAL_ERROR; + } + GELOGD("Add control edge between branch-out %s to branch-in %s", origin_src_node->GetName().c_str(), + copyed_node->GetName().c_str()); + } + return SUCCESS; +} +NodePtr MultiBatchGraphCopyer::InsertShapeDataNode() { + auto desc = MakeShared(); + if (desc == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to create shape data node, out of memory"); + return nullptr; + } + desc->SetName("huawei_mbatch_shape_data"); + desc->SetType(DATA); + + GeTensorDesc tensor_desc; + tensor_desc.SetFormat(FORMAT_ND); + tensor_desc.SetShape(GeShape({static_cast(shapes_.at(0).size())})); + tensor_desc.SetDataType(DT_INT64); + auto ret = desc->AddInputDesc(tensor_desc); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add input desc for created data"); + return nullptr; + } + ret = desc->AddOutputDesc(tensor_desc); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add output desc for created data"); + return nullptr; + } + + if (!AttrUtils::SetBool(desc, ATTR_INSERT_BY_MBATCH, true)) { + GELOGE(INTERNAL_ERROR, "Failed to add attr for created data"); + return nullptr; + } + + auto data_node = graph_->AddNode(desc); + if (data_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to add shape data node to graph"); + return nullptr; + } + ret = GraphUtils::AppendInputNode(graph_, data_node); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to append data node %s as input to graph", data_node->GetName().c_str()); + return nullptr; + } + + return data_node; +} +Status MultiBatchGraphCopyer::CheckArguments() { + if (graph_ == nullptr) { + GELOGE(PARAM_INVALID, "Failed to copy graph, the graph is null"); + return PARAM_INVALID; + } + if (shapes_.size() < kMinShapesCount) { + GELOGE(PARAM_INVALID, "The minimum batch-shapes count is %zu", kMinShapesCount); + return PARAM_INVALID; + } + if (shapes_.size() > kMaxShapesCount) { + GELOGE(PARAM_INVALID, "The max batch-shapes count is %zu", kMaxShapesCount); + return PARAM_INVALID; + } + std::set> shapes_set; + size_t shape_size = shapes_.at(0).size(); + for (auto &shape : shapes_) { + if (shape_size != shape.size()) { + GELOGE(PARAM_INVALID, "All batch shapes size must be the same"); + return PARAM_INVALID; + } + for (auto dim : shape) { + if (dim <= 0) { + GELOGE(PARAM_INVALID, "Invalid dim %ld, all dims must more than 0", dim); + return PARAM_INVALID; + } + } + shapes_set.insert(shape); + } + if (shapes_set.size() != shapes_.size()) { + GELOGE(PARAM_INVALID, "There are duplicated batch-shapes, please check"); + return PARAM_INVALID; + } + return SUCCESS; +} +Status MultiBatchGraphCopyer::CheckCopyResult(const std::vector &start_nodes) { + for (auto &node : start_nodes) { + if (IsOnlyOutputToAipp(node)) { + continue; + } + auto dims = NodeUtils::GetOutputDesc(*node, kDataOutIndex).GetShape().GetDims(); + if (!IsAllDimsPositive(dims)) { + GELOGE(INTERNAL_ERROR, "Failed to copy multi batch graph, the node %s still has unknown shape %s", + node->GetName().c_str(), formats::ShapeToString(dims).c_str()); + return INTERNAL_ERROR; + } + } + return SUCCESS; +} +bool MultiBatchGraphCopyer::IsInBatchBranch(const NodePtr &node) { + return (nodes_to_batch_nodes_.count(node.get()) > 0) || (data_nodes_to_switchn_.count(node.get()) > 0); +} +Status MultiBatchGraphCopyer::LinkDataToMerge(const NodePtr &data, const NodePtr &merge) { + // The caller should make sure that the there is a SwitchN node in the map + auto &switchn = data_nodes_to_switchn_[data.get()]; + GELOGI("Link edge bwetween data %s to merge %s throw switchn %s", data->GetName().c_str(), merge->GetName().c_str(), + switchn->GetName().c_str()); + for (size_t i = 0; i < shapes_.size(); ++i) { + auto ret = GraphUtils::AddEdge(switchn->GetOutDataAnchor(i), merge->GetInDataAnchor(i)); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add edge between switchn %s(%zu) to merge %s(%zu), error-code %u", + switchn->GetName().c_str(), i, merge->GetName().c_str(), i, ret); + return INTERNAL_ERROR; + } + } + return SUCCESS; +} +Status MultiBatchGraphCopyer::LinkNodeToMerge(const NodePtr &node, int out_index, const NodePtr &merge) { + auto ©ed_nodes = nodes_to_batch_nodes_[node.get()]; + if (copyed_nodes.size() != shapes_.size()) { + GELOGE(INTERNAL_ERROR, + "Failed to create merge node for node %s, the copyed nodes for it count %zu different with shape %zu", + node->GetName().c_str(), copyed_nodes.size(), shapes_.size()); + return INTERNAL_ERROR; + } + for (size_t i = 0; i < copyed_nodes.size(); ++i) { + auto src_node = copyed_nodes[i]; + if (src_node->GetAllOutDataAnchorsSize() == 0) { + // if the node does not has any data output, we should create an const for it, like this: + // c d + // node ---> const ---> merge + auto const_name = src_node->GetName() + "_merge_const"; + GELOGI("The node %s on the batch branch edge does not have any data output, create a const %s for it", + src_node->GetName().c_str(), const_name.c_str()); + auto const_node = InsertConst(const_name, graph_); + if (const_node == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to create const for node %s to connect to a merge node", + src_node->GetName().c_str()); + return OUT_OF_MEMORY; + } + auto ret = GraphUtils::AddEdge(src_node->GetOutControlAnchor(), const_node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add control edge from %s to %s", src_node->GetName().c_str(), + const_node->GetName().c_str()); + return INTERNAL_ERROR; + } + src_node = const_node; + } + auto ret = GraphUtils::AddEdge(src_node->GetOutDataAnchor(out_index), merge->GetInDataAnchor(i)); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, + "Failed to add edge between copyed node %s(%d) to inserted merge node %s(%zu), error-code %u", + copyed_nodes[i]->GetName().c_str(), out_index, merge->GetName().c_str(), i, ret); + return INTERNAL_ERROR; + } + } + return SUCCESS; +} +Status MultiBatchGraphCopyer::UpdateMaxShapeToData(const NodePtr &data) { + auto data_shape = NodeUtils::GetOutputDesc(*data, kDataOutIndex).GetShape(); + if (IsAllDimsPositive(data_shape.GetDims())) { + return SUCCESS; + } + + size_t max_shape_index = 0; + int64_t max_size = 0; + for (size_t i = 0; i < shapes_.size(); ++i) { + int64_t size = 1; + for (auto dim : shapes_[i]) { + if (INT64_MAX / dim < size) { + GELOGE(PARAM_INVALID, "The shape %s size overflow", formats::ShapeToString(shapes_[i]).c_str()); + return PARAM_INVALID; + } + size *= dim; + } + if (size > max_size) { + max_size = size; + max_shape_index = i; + } + } + + // must not be error, the calc result has been checked in function InsertSwitchNForData + (void)CalcShape(shapes_[max_shape_index], data_shape); + + auto ret = NodeUtils::UpdateOutputShape(*data, kDataOutIndex, data_shape); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to update output shape for data %s", data->GetName().c_str()); + return INTERNAL_ERROR; + } + ret = NodeUtils::UpdateInputShape(*data, kDataInIndex, data_shape); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to update input shape for data %s", data->GetName().c_str()); + return INTERNAL_ERROR; + } + GELOGI("Update the data %s input/output shape to the max %s", data->GetName().c_str(), + formats::ShapeToString(data_shape).c_str()); + return SUCCESS; +} +Status MultiBatchGraphCopyer::InsertSwitchNForData(const NodePtr &data) { + auto data_shape = NodeUtils::GetOutputDesc(*data, kDataOutIndex).GetShape(); + if (IsAllDimsPositive(data_shape.GetDims())) { + GELOGI("The shape of data %s are positive(%s), skip the multi batch process", data->GetName().c_str(), + data_shape.ToString().c_str()); + return SUCCESS; + } + + auto switchn_desc = MakeShared(); + if (switchn_desc == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to create switchn for data %s", data->GetName().c_str()); + return OUT_OF_MEMORY; + } + switchn_desc->SetName(data->GetName() + "_huawei_mbatch_switchn"); + switchn_desc->SetType(SWITCHN); + + GeTensorDesc tensor(NodeUtils::GetOutputDesc(*data, kDataOutIndex)); + if (switchn_desc->AddInputDesc(tensor) != GRAPH_SUCCESS) { // data + GELOGE(FAILED, "Failed to add inpit desc."); + return FAILED; + } + + GeTensorDesc pred_tensor; + if (switchn_desc->AddInputDesc(pred_tensor) != GRAPH_SUCCESS) { // pred + GELOGE(FAILED, "Failed to add inpit desc."); + return FAILED; + } + for (size_t i = 0; i < shapes_.size(); ++i) { + auto shape = data_shape; + auto ret = CalcShape(shapes_.at(i), shape); + if (ret != SUCCESS) { + GELOGE(ret, "Failed to calculate the batched shape for data node %s, the shapes may not match", + data->GetName().c_str()); + return ret; + } + tensor.SetShape(shape); + if (!AttrUtils::SetListInt(tensor, ATTR_NAME_SWITCHN_PRED_VALUE, shapes_.at(i))) { + GELOGE(INTERNAL_ERROR, "Failed to add attr value on output %zu tensor", i); + return INTERNAL_ERROR; + } + if (switchn_desc->AddOutputDesc(tensor) != GRAPH_SUCCESS) { // pred + GELOGE(FAILED, "Failed to add inpit desc."); + return FAILED; + } + GELOGD("The SwitchN %s output index %zu, shape %s", switchn_desc->GetName().c_str(), i, shape.ToString().c_str()); + } + + if (!AttrUtils::SetBool(switchn_desc, ATTR_INSERT_BY_MBATCH, true)) { + GELOGE(INTERNAL_ERROR, "Failed to add insert attr on switchn node %s", switchn_desc->GetName().c_str()); + return INTERNAL_ERROR; + } + + auto switchn = graph_->AddNode(switchn_desc); + if (switchn == nullptr) { + GELOGE(OUT_OF_MEMORY, "Failed to create switchn %s from desc", switchn_desc->GetName().c_str()); + return OUT_OF_MEMORY; + } + data_nodes_to_switchn_[data.get()] = switchn; + return SUCCESS; +} +Status MultiBatchGraphCopyer::InsertMergeForEdgeNode(const NodePtr &node) { + for (auto &in_data_anchor : node->GetAllInDataAnchors()) { + auto src_out_anchor = in_data_anchor->GetPeerOutAnchor(); + if (src_out_anchor == nullptr) { + GELOGD("The node %s does not has input at index %d", node->GetName().c_str(), in_data_anchor->GetIdx()); + continue; + } + auto in_node = src_out_anchor->GetOwnerNode(); + if (!IsInBatchBranch(in_node)) { + continue; + } + auto merge_node = InsertMergeNode(in_node, src_out_anchor->GetIdx()); + if (merge_node == nullptr) { + return INTERNAL_ERROR; + } + } + + for (auto &in_node : node->GetInControlNodes()) { + if (!IsInBatchBranch(in_node)) { + continue; + } + auto merge_node = InsertMergeNode(in_node, -1); + if (merge_node == nullptr) { + return INTERNAL_ERROR; + } + } + + return SUCCESS; +} +Status MultiBatchGraphCopyer::CopyNodeInBatchBranch(const NodePtr &node) { + auto ©ed_nodes = nodes_to_batch_nodes_[node.get()]; + for (size_t i = 0; i < shapes_.size(); ++i) { + auto copyed_node = InsertCopyNode(node, node->GetName() + "_huawei_mbatch_batch_" + std::to_string(i)); + if (copyed_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to add node to graph when copy node %s", node->GetName().c_str()); + return INTERNAL_ERROR; + } + copyed_nodes.emplace_back(copyed_node); + GELOGI("Copy node %s type %s for shape %s, new node name %s", node->GetName().c_str(), node->GetType().c_str(), + formats::JoinToString(shapes_.at(i)).c_str(), copyed_node->GetName().c_str()); + } + return SUCCESS; +} +Status MultiBatchGraphCopyer::LinkEdges() { + Status ret; + for (const auto &node : origin_all_nodes_) { + if (data_nodes_to_switchn_.count(node.get()) > 0) { + ret = LinkDataToSwitchN(node); + if (ret != SUCCESS) { + return ret; + } + } + if (nodes_to_merge_nodes_.count(node.get()) > 0) { + ret = LinkToMerge(node); + if (ret != SUCCESS) { + return ret; + } + } + if (nodes_to_batch_nodes_.count(node.get()) > 0) { + ret = LinkToNodeInBranch(node); + } else { + ret = LinkToNodeOutBranch(node); + } + if (ret != SUCCESS) { + return ret; + } + } + return SUCCESS; +} +Status MultiBatchGraphCopyer::LinkDataToSwitchN(const NodePtr &data) { + auto switchn = data_nodes_to_switchn_[data.get()]; + auto ret = + GraphUtils::AddEdge(shape_data_->GetOutDataAnchor(kDataOutIndex), switchn->GetInDataAnchor(kSwitchNPredIndex)); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to link shape data %s to switchn %s", shape_data_->GetName().c_str(), + switchn->GetName().c_str()); + return INTERNAL_ERROR; + } + + ret = GraphUtils::AddEdge(data->GetOutDataAnchor(kDataOutIndex), switchn->GetInDataAnchor(kSwitchNDataIndex)); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to link data %s to switchn %s", data->GetName().c_str(), switchn->GetName().c_str()); + return INTERNAL_ERROR; + } + return SUCCESS; +} +Status MultiBatchGraphCopyer::LinkToMerge(const NodePtr &node) { + auto &merge_nodes = nodes_to_merge_nodes_[node.get()]; + for (size_t i = 0; i < merge_nodes.size(); ++i) { + auto merge_node = merge_nodes[i]; + if (merge_node == nullptr) { + continue; + } + if (nodes_to_batch_nodes_.count(node.get()) > 0) { + auto ret = LinkNodeToMerge(node, i, merge_node); + if (ret != SUCCESS) { + return ret; + } + continue; + } + if (data_nodes_to_switchn_.count(node.get()) > 0) { + auto ret = LinkDataToMerge(node, merge_node); + if (ret != SUCCESS) { + return ret; + } + continue; + } + GELOGE(INTERNAL_ERROR, "The merge node %s is created, index %zu, but can not find the src node", + merge_node->GetName().c_str(), i); + return INTERNAL_ERROR; + } + return SUCCESS; +} +Status MultiBatchGraphCopyer::LinkToNodeInBranch(const NodePtr &node) { + auto &branch_nodes = nodes_to_batch_nodes_[node.get()]; + for (size_t i = 0; i < branch_nodes.size(); ++i) { + auto ret = CopyInDataEdges(node, i, branch_nodes[i]); + if (ret != SUCCESS) { + return ret; + } + ret = CopyInControlEdges(node, i, branch_nodes[i]); + if (ret != SUCCESS) { + return ret; + } + } + return SUCCESS; +} +Status MultiBatchGraphCopyer::LinkToNodeOutBranch(const NodePtr &node) { + for (auto &in_data_anchor : node->GetAllInDataAnchors()) { + auto src_out_anchor = in_data_anchor->GetPeerOutAnchor(); + if (src_out_anchor == nullptr) { + GELOGD("The node %s does not has input at index %d", node->GetName().c_str(), in_data_anchor->GetIdx()); + continue; + } + auto in_node = src_out_anchor->GetOwnerNode(); + if (!IsInBatchBranch(in_node)) { + continue; + } + auto iter = nodes_to_merge_nodes_.find(in_node.get()); + if (iter == nodes_to_merge_nodes_.end()) { + GELOGE(INTERNAL_ERROR, "Failed to link IO data edge from %s(%d) to %s(%d), no merge node found", + in_node->GetName().c_str(), src_out_anchor->GetIdx(), node->GetName().c_str(), in_data_anchor->GetIdx()); + return INTERNAL_ERROR; + } + auto merge_node = iter->second[src_out_anchor->GetIdx()]; + if (merge_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to link IO data edge from %s(%d) to %s(%d), no merge node found", + in_node->GetName().c_str(), src_out_anchor->GetIdx(), node->GetName().c_str(), in_data_anchor->GetIdx()); + return INTERNAL_ERROR; + } + auto ret = src_out_anchor->Unlink(in_data_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to unlink the control edge from %s(%d) to %s(%d)", in_node->GetName().c_str(), + src_out_anchor->GetIdx(), node->GetName().c_str(), in_data_anchor->GetIdx()); + return INTERNAL_ERROR; + } + ret = GraphUtils::AddEdge(merge_node->GetOutDataAnchor(kMergeDataOutIndex), in_data_anchor); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add data edge from %s(%d) to %s(%d)", merge_node->GetName().c_str(), + src_out_anchor->GetIdx(), node->GetName().c_str(), in_data_anchor->GetIdx()); + return INTERNAL_ERROR; + } + GELOGI("Link data edge from merge %s(from %s(%d)) to %s(%d)", merge_node->GetName().c_str(), + in_node->GetName().c_str(), src_out_anchor->GetIdx(), node->GetName().c_str(), in_data_anchor->GetIdx()); + } + + for (auto &in_node : node->GetInControlNodes()) { + if (!IsInBatchBranch(in_node)) { + continue; + } + auto iter = nodes_to_merge_nodes_.find(in_node.get()); + if (iter == nodes_to_merge_nodes_.end()) { + GELOGE(INTERNAL_ERROR, "Failed to link IO control edge from %s to %s, no merge node found", + in_node->GetName().c_str(), node->GetName().c_str()); + return INTERNAL_ERROR; + } + auto merge_node = iter->second[0]; + if (merge_node == nullptr) { + GELOGE(INTERNAL_ERROR, "Failed to link IO control edge from %s to %s, no merge node found", + in_node->GetName().c_str(), node->GetName().c_str()); + return INTERNAL_ERROR; + } + auto ret = in_node->GetOutControlAnchor()->Unlink(node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to unlink the control edge from %s to %s", in_node->GetName().c_str(), + node->GetName().c_str()); + return INTERNAL_ERROR; + } + ret = GraphUtils::AddEdge(merge_node->GetOutControlAnchor(), node->GetInControlAnchor()); + if (ret != GRAPH_SUCCESS) { + GELOGE(INTERNAL_ERROR, "Failed to add control edge from %s to %s", merge_node->GetName().c_str(), + node->GetName().c_str()); + return INTERNAL_ERROR; + } + GELOGI("Link control edge from merge %s(from %s) to %s", merge_node->GetName().c_str(), in_node->GetName().c_str(), + node->GetName().c_str()); + } + + return SUCCESS; +} + +Status ProcessMultiBatch(ComputeGraphPtr &graph) { + const int kDecimal = 10; + std::vector> shapes; + std::string option; + if (GetContext().GetOption("ge.dynamic_batchsize", option) == GRAPH_SUCCESS) { + GELOGD("Found dynamic batch option, value %s", option.c_str()); + std::vector dims = StringUtils::Split(option, ','); + for (const auto &dim : dims) { + shapes.emplace_back(std::vector({std::strtol(dim.c_str(), nullptr, kDecimal)})); + GELOGI("Found dynamic batch, shape %s", formats::JoinToString(*shapes.rbegin()).c_str()); + } + } + if (GetContext().GetOption("ge.dynamic_imagesize", option) == GRAPH_SUCCESS) { + GELOGD("Found dynamic image size option, value %s", option.c_str()); + std::vector shape_strs = StringUtils::Split(option, ';'); + for (const auto &shape_str : shape_strs) { + std::vector shape; + std::vector dims = StringUtils::Split(shape_str, ','); + for (const auto &dim : dims) { + shape.emplace_back(std::strtol(dim.c_str(), nullptr, kDecimal)); + } + shapes.emplace_back(shape); + GELOGI("Found dynamic image size, shape %s", formats::JoinToString(shape).c_str()); + } + } + if (shapes.empty()) { + GELOGD("There is no multi-batch options, no need to process multi-batch copys"); + return SUCCESS; + } + + GELOGI("Begin to copy graph for multi-batch"); + multibatch::MultiBatchGraphCopyer copyer(graph); + for (auto &shape : shapes) { + copyer.AddShape(shape); + } + return copyer.CopyGraph(); +} +} // namespace multibatch +} // namespace ge diff --git a/src/ge/graph/preprocess/multi_batch_copy_graph.h b/src/ge/graph/preprocess/multi_batch_copy_graph.h new file mode 100644 index 00000000..ca0fe828 --- /dev/null +++ b/src/ge/graph/preprocess/multi_batch_copy_graph.h @@ -0,0 +1,116 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_GRAPH_PREPROCESS_MULTI_BATCH_COPY_GRAPH_H_ +#define GE_GRAPH_PREPROCESS_MULTI_BATCH_COPY_GRAPH_H_ + +#include +#include +#include + +#include "external/ge/ge_api_error_codes.h" +#include "graph/compute_graph.h" + +namespace ge { +namespace multibatch { +Status ProcessMultiBatch(ComputeGraphPtr &graph); + +enum NodeStatus { + kNodeInBatchBranch, + kNodeOutBatchBranch, + kNodeStartNode, +}; + +class MultiBatchGraphCopyer { + public: + explicit MultiBatchGraphCopyer(ComputeGraphPtr &graph) : graph_(graph) {} + ~MultiBatchGraphCopyer() = default; + + void AddShape(const std::vector &shape) { + shapes_.emplace_back(shape); + } + + Status CopyGraph(); + + private: + Status Init(); + Status CheckArguments(); + + // add nodes functions + Status CreateNewNodes(); + + NodePtr InsertShapeDataNode(); + Status InsertSwitchNForData(const NodePtr &data); + Status UpdateMaxShapeToData(const NodePtr &data); + + Status InsertMergeForEdgeNode(const NodePtr &node); + + /// + /// Insert a merge node for src node `node` on output index `index`. The merge node will be used to merge all nodes + /// in batch-branch to one output to the node out of the batch-branch. + /// Cond 1: If the `index` is -1, then the src node link a data edge(at output 0) to the merge node, + /// Cond 2: In condition 1, if the src node does not have any data output, we create a const node after it, + /// the result like this: + /// src_node ---------> const_for_src_node --------> merge + /// control data + /// Cond 3: If the src node is a data-like node, the SwitchN after it will be link to the merge node. + /// @param node + /// @param index + /// @return + /// + NodePtr InsertMergeNode(const NodePtr &node, int index); + Status CopyNodeInBatchBranch(const NodePtr &node); + + // link edges functions + Status LinkEdges(); + Status LinkDataToSwitchN(const NodePtr &data); + Status LinkToMerge(const NodePtr &node); + Status LinkToNodeInBranch(const NodePtr &node); + Status LinkToNodeOutBranch(const NodePtr &node); + Status LinkDataToMerge(const NodePtr &data, const NodePtr &merge); + Status LinkNodeToMerge(const NodePtr &node, int out_index, const NodePtr &merge); + Status CopyInDataEdges(const NodePtr &origin_node, int batch_num, const NodePtr ©ed_node); + Status CopyInControlEdges(const NodePtr &node, int batch_num, const NodePtr ©ed_node); + + bool IsInBatchBranch(const NodePtr &node); + NodeStatus GetNodeStatus(const NodePtr &node); + Status CheckCopyResult(const std::vector &start_nodes); + + // arguments + ComputeGraphPtr graph_; + std::vector> shapes_; + + // the shape data node created + NodePtr shape_data_; + + // all nodes in the origin graph + std::vector origin_all_nodes_; + + // all data nodes in the origin graph + std::vector origin_data_nodes_; + + // the nodes in-batch-branch, and the nodes copyed by shapes + std::map> nodes_to_batch_nodes_; + + // the data nodes, and the SwitchN nodes inserted after it + std::map data_nodes_to_switchn_; + + // the nodes on the in/out-batch-branch edge, and the merge nodes inserted after it + std::map> nodes_to_merge_nodes_; +}; +} // namespace multibatch +} // namespace ge +#endif // GE_GRAPH_PREPROCESS_MULTI_BATCH_COPY_GRAPH_H_ diff --git a/src/ge/inc/graph_pass.h b/src/ge/inc/graph_pass.h new file mode 100644 index 00000000..8eb241c8 --- /dev/null +++ b/src/ge/inc/graph_pass.h @@ -0,0 +1,91 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_INC_GRAPH_PASS_H_ +#define GE_INC_GRAPH_PASS_H_ + +#include +#include + +#include "common/op/attr_value_util.h" +#include "common/op/ge_op_utils.h" +#include "framework/common/debug/ge_log.h" +#include "graph/compute_graph.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "inc/pass.h" + +namespace ge { +/// +/// @ingroup domi_omg +/// @brief graph pass +/// @author +/// +class GraphPass : public Pass { + public: + /// + /// run graph pass + /// @param [in] graph graph to be optimized + /// @return SUCCESS optimize successfully + /// @return NOT_CHANGED not optimized + /// @return others optimized failed + /// @author + /// + virtual Status Run(ge::ComputeGraphPtr graph) = 0; + static void RecordOriginalNames(std::vector original_nodes, const ge::NodePtr &node) { + GE_CHECK_NOTNULL_JUST_RETURN(node); + std::vector original_names; + for (ge::NodePtr &node_tmp : original_nodes) { + std::vector names_tmp; + ge::OpDescPtr opdesc_tmp = node_tmp->GetOpDesc(); + GE_CHECK_NOTNULL_JUST_RETURN(opdesc_tmp); + Status ret = ge::AttrUtils::GetListStr(opdesc_tmp, "_datadump_original_op_names", names_tmp); + if (ret != domi::SUCCESS) { + GELOGW("get the original_op_names fail."); + } + if (names_tmp.size() != 0) { + original_names.insert(original_names.end(), names_tmp.begin(), names_tmp.end()); + } else { + original_names.push_back(opdesc_tmp->GetName()); + } + } + + if (original_names.size() == 0) { + std::string tmp; + original_names.push_back(tmp); + } + GE_CHK_BOOL_EXEC(ge::AttrUtils::SetListStr(node->GetOpDesc(), "_datadump_original_op_names", original_names), + return, "Set original_op_names fail."); + } + + static bool IsConstNode(const ge::NodePtr &node) { + GE_IF_BOOL_EXEC(node->GetOpDesc() == nullptr, GELOGE(FAILED, "Node GetOpDesc is nullptr"); return false); + if (node->GetOpDesc()->GetType() == CONSTANTOP) { + return true; + } else if (node->GetOpDesc()->GetType() == FRAMEWORKOP) { + string type; + GE_CHK_BOOL_EXEC(ge::AttrUtils::GetStr(node->GetOpDesc(), ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, type), + return false, "Get original_type for op %s fail!", node->GetName().c_str()); + GE_IF_BOOL_EXEC(type == CONSTANT, GELOGI("Is const op"); return true); + return false; + } else { + return false; + } + } +}; +} // namespace ge + +#endif // GE_INC_GRAPH_PASS_H_ diff --git a/src/ge/inc/kernel.h b/src/ge/inc/kernel.h new file mode 100644 index 00000000..ec0e5e40 --- /dev/null +++ b/src/ge/inc/kernel.h @@ -0,0 +1,85 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_INC_KERNEL_H_ +#define GE_INC_KERNEL_H_ + +#include + +#include "common/op/ge_op_utils.h" +#include "graph/compute_graph.h" +#include "graph/graph.h" +#include "graph/op_desc.h" + +using std::vector; +using std::unique_ptr; +using std::shared_ptr; + +namespace ge { +/// +/// @ingroup domi_omg +/// @brief Kernel interface +/// @author +/// +class Kernel { + public: + /// + /// Constant calculation interface, the result is appended to output + /// @param [in] op_desc_ptr Operator related parameters + /// @param [in] input Constant to be calculated + /// @param [inout] output Save calculation results + /// @author + /// + virtual Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector& input, + std::vector& v_output) { + (void)op_desc_ptr; + (void)input; + (void)v_output; + return NOT_CHANGED; + } + + /// + /// Data description transformation interface + /// @param [in] op_desc_ptr Operator related parameters + /// @param [in] input Data description(include dimension、format、data type etc.) + /// @param [inout] output save the transformation result + /// @author + /// + virtual Status Compute(const ge::OpDescPtr op_desc_ptr, const ge::GeTensorDescPtr input, ge::GeTensorDescPtr output) { + (void)op_desc_ptr; + (void)input; + (void)output; + return NOT_CHANGED; + } + + virtual Status Compute(const NodePtr& node, std::vector& v_output) { + (void)node; + (void)v_output; + return NOT_CHANGED; + } + + virtual Status Compute(const NodePtr& node_ptr) { + (void)node_ptr; + return NOT_CHANGED; + } + + /// + /// Destructor + /// + virtual ~Kernel() {} +}; +} // namespace ge +#endif // GE_INC_KERNEL_H_ diff --git a/src/ge/inc/kernel_factory.h b/src/ge/inc/kernel_factory.h new file mode 100644 index 00000000..8e5912eb --- /dev/null +++ b/src/ge/inc/kernel_factory.h @@ -0,0 +1,107 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_INC_KERNEL_FACTORY_H_ +#define GE_INC_KERNEL_FACTORY_H_ + +#include +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "graph/graph.h" + +using std::string; + +namespace ge { +class Kernel; + +/// +/// @ingroup domi_omg +/// @brief kernel create factory +/// @author +/// +class KernelFactory { + public: + // KernelCreator(function), type definition + using KERNEL_CREATOR_FUN = std::function(void)>; + + /// + /// Get singleton instance + /// + static KernelFactory &Instance() { + static KernelFactory instance; + return instance; + } + + /// + /// create Kernel + /// @param [in] op_type operation type + /// + std::shared_ptr Create(const std::string &op_type) { + std::map::iterator iter = creator_map_.find(op_type); + if (iter != creator_map_.end()) { + return iter->second(); + } + + return nullptr; + } + + // Kernel registration function to register different types of kernel to the factory + class Registerar { + public: + /// + /// @ingroup domi_omg + /// @brief Constructor + /// @param [in] type operation type + /// @param [in| fun kernel function of the operation + /// + Registerar(const string &type, const KERNEL_CREATOR_FUN &fun) { + KernelFactory::Instance().RegisterCreator(type, fun); + } + ~Registerar() {} + }; + + protected: + KernelFactory() {} + ~KernelFactory() {} + + // register creator, this function will call in the constructor + void RegisterCreator(const string &type, const KERNEL_CREATOR_FUN &fun) { + std::map::iterator iter = creator_map_.find(type); + if (iter != creator_map_.end()) { + GELOGD("KernelFactory::RegisterCreator: %s creator already exist", type.c_str()); + return; + } + + creator_map_[type] = fun; + } + + private: + std::map creator_map_; +}; + +#define REGISTER_KERNEL(type, clazz) \ + std::shared_ptr Creator_##type##_Kernel() { \ + std::shared_ptr ptr = nullptr; \ + ptr = MakeShared(); \ + return ptr; \ + } \ + KernelFactory::Registerar g_##type##_Kernel_Creator(type, Creator_##type##_Kernel) +}; // end namespace ge +#endif // GE_INC_KERNEL_FACTORY_H_ diff --git a/src/ge/inc/pass.h b/src/ge/inc/pass.h new file mode 100644 index 00000000..9f8519e1 --- /dev/null +++ b/src/ge/inc/pass.h @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_INC_PASS_H_ +#define GE_INC_PASS_H_ + +#include + +#include "common/fmk_error_codes.h" + +namespace ge { +/// +/// @ingroup domi_omg +/// @brief pass +/// @author +/// +template +class Pass { + public: + virtual ~Pass() {} + /// + /// run pass + /// @author + /// + virtual Status Run(std::shared_ptr) = 0; +}; +} // namespace ge + +#endif // GE_INC_PASS_H_ diff --git a/src/ge/inc/pass_manager.h b/src/ge/inc/pass_manager.h new file mode 100644 index 00000000..6a40b173 --- /dev/null +++ b/src/ge/inc/pass_manager.h @@ -0,0 +1,74 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_INC_PASS_MANAGER_H_ +#define GE_INC_PASS_MANAGER_H_ + +#include + +#include "inc/graph_pass.h" + +using std::vector; + +namespace ge { +/// +/// @ingroup domi_omg +/// @brief pass manager +/// @author +/// +class PassManager { + public: + /// + /// get graph passes + /// @author + /// + const vector &GraphPasses() const; + + /// + /// Add graph pass + /// @param [in] pass Pass to be added, it will be destroyed when pass manager destroys. + /// @author + /// + Status AddPass(GraphPass *pass); + + /// + /// Optimize graph with added pass + /// @param [inout] graph graph to be optimized + /// @return SUCCESS optimize successfully + /// @return NOT_CHANGED not optimized + /// @return others optimize failed + /// @author + /// + Status Run(const ge::ComputeGraphPtr &graph); + + /// + /// Optimize graph with specified pass + /// @param [inout] graph graph to be optimized + /// @param [in] passes passes to be used + /// @return SUCCESS optimize successfully + /// @return NOT_CHANGED not optimized + /// @return others optimized failed + /// @author + /// + static Status Run(const ge::ComputeGraphPtr &graph, vector &passes); + + ~PassManager(); + + private: + vector graph_passes_; +}; +} // namespace ge +#endif // GE_INC_PASS_MANAGER_H_ diff --git a/src/ge/init/gelib.cc b/src/ge/init/gelib.cc new file mode 100644 index 00000000..c8852346 --- /dev/null +++ b/src/ge/init/gelib.cc @@ -0,0 +1,386 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "init/gelib.h" + +#include +#include + +#include +#include +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge/plugin_manager.h" +#include "common/ge/ge_util.h" +#include "common/profiling/profiling_manager.h" +#include "graph/manager/graph_mem_allocator.h" +#include "graph/manager/graph_var_manager.h" +#include "runtime/kernel.h" +#include "graph/ge_context.h" +#include "graph/ge_global_options.h" +#include "ge/ge_api_types.h" +#include "cce/aicpu_engine.h" +#include "cce/fwk_adpt_struct.h" +#include "graph/load/new_model_manager/model_manager.h" +#include "omm/csa_interact.h" +#include "common/properties_manager.h" + +using Json = nlohmann::json; + +namespace ge { +namespace { +const int kDecimal = 10; +} // namespace +static std::shared_ptr instancePtr_ = nullptr; + +// Initial each module of GE, if one failed, release all +Status GELib::Initialize(const map &options) { + GELOGI("initial start"); + GEEVENT("[GEPERFTRACE] GE Init Start"); + // Multiple initializations are not allowed + instancePtr_ = MakeShared(); + if (instancePtr_ == nullptr) { + GELOGE(GE_CLI_INIT_FAILED, "GeLib initialize failed, malloc shared_ptr failed."); + return GE_CLI_INIT_FAILED; + } + GetMutableGlobalOptions().insert(options.begin(), options.end()); + GetThreadLocalContext().SetGlobalOption(GetMutableGlobalOptions()); + GE_TIMESTAMP_START(Init); + Status ret = instancePtr_->InnerInitialize(options); + if (ret != SUCCESS) { + GELOGE(ret, "GeLib initial failed."); + instancePtr_ = nullptr; + return ret; + } + GE_TIMESTAMP_END(Init, "GELib::Initialize"); + return SUCCESS; +} + +Status GELib::InnerInitialize(const map &options) { + // Multiple initializations are not allowed + if (init_flag_) { + GELOGW("multi initializations"); + return SUCCESS; + } + + GELOGI("GE System initial."); + Status init_system_status = SystemInitialize(options); + if (init_system_status != SUCCESS) { + GELOGE(init_system_status); + RollbackInit(); + return init_system_status; + } + + GELOGI("engineManager initial."); + Status init_em_status = engine_manager_.Initialize(options); + if (init_em_status != SUCCESS) { + GELOGE(init_em_status); + RollbackInit(); + return init_em_status; + } + + GELOGI("opsManager initial."); + Status init_ops_status = ops_manager_.Initialize(options); + if (init_ops_status != SUCCESS) { + GELOGE(init_ops_status); + RollbackInit(); + return init_ops_status; + } + + GELOGI("sessionManager initial."); + Status init_sm_status = session_manager_.Initialize(options); + if (init_sm_status != SUCCESS) { + GELOGE(init_sm_status); + RollbackInit(); + return init_sm_status; + } + + GELOGI("memoryMallocSize initial."); + Status init_mem_status = VarManager::Instance(0)->SetMemoryMallocSize(options); + if (init_mem_status != SUCCESS) { + GELOGE(init_mem_status, "failed to set malloc size"); + RollbackInit(); + return init_mem_status; + } + + init_flag_ = true; + GELOGI("GeLib initial success."); + return SUCCESS; +} + +Status GELib::SystemInitialize(const map &options) { + Status status = FAILED; + auto iter = options.find(OPTION_GRAPH_RUN_MODE); + if (iter != options.end()) { + if (GraphRunMode(std::strtol(iter->second.c_str(), nullptr, kDecimal)) >= TRAIN) { + is_train_mode_ = true; + } + } + + iter = options.find(HEAD_STREAM); + head_stream_ = (iter != options.end()) ? std::strtol(iter->second.c_str(), nullptr, kDecimal) : false; + + iter = options.find(OPTION_EXEC_ENABLE_DUMP); + if (iter != options.end()) { + int32_t enable_dump_flag = 1; + auto path_iter = options.find(OPTION_EXEC_DUMP_PATH); + if (iter->second == std::to_string(enable_dump_flag) && path_iter != options.end()) { + std::string dump_path = path_iter->second; + if (!dump_path.empty() && dump_path[dump_path.size() - 1] != '/') { + dump_path += "/"; + } + + PropertiesManager::Instance().AddDumpPropertyValue(DUMP_ALL_MODEL, {}); + PropertiesManager::Instance().SetDumpOutputPath(dump_path); + } + } + + if (is_train_mode_) { + InitOptions(options); + status = InitSystemWithOptions(this->options_); + } else { + status = InitSystemWithoutOptions(); + } + return status; +} + +void GELib::InitOptions(const map &options) { + this->options_.session_id = 0; + auto iter = options.find(OPTION_EXEC_SESSION_ID); + if (iter != options.end()) { + this->options_.session_id = std::strtoll(iter->second.c_str(), nullptr, kDecimal); + } + this->options_.device_id = 0; + iter = options.find(OPTION_EXEC_DEVICE_ID); + if (iter != options.end()) { + this->options_.device_id = static_cast(std::strtol(iter->second.c_str(), nullptr, kDecimal)); + } + this->options_.job_id = 0; + iter = options.find(OPTION_EXEC_JOB_ID); + if (iter != options.end()) { + this->options_.job_id = std::strtoll(iter->second.c_str(), nullptr, kDecimal); + } + this->options_.isUseHcom = false; + iter = options.find(OPTION_EXEC_IS_USEHCOM); + if (iter != options.end()) { + std::istringstream(iter->second) >> this->options_.isUseHcom; + } + this->options_.deployMode = false; + iter = options.find(OPTION_EXEC_DEPLOY_MODE); + if (iter != options.end()) { + std::istringstream(iter->second) >> this->options_.deployMode; + } + + iter = options.find(OPTION_EXEC_POD_NAME); + if (iter != options.end()) { + this->options_.podName = iter->second.c_str(); + } + + iter = options.find(OPTION_EXEC_RANK_ID); + if (iter != options.end()) { + this->options_.rankId = std::strtoll(iter->second.c_str(), nullptr, kDecimal); + } + iter = options.find(OPTION_EXEC_RANK_TABLE_FILE); + if (iter != options.end()) { + this->options_.rankTableFile = iter->second.c_str(); + } + this->options_.enable_atomic = true; + iter = options.find(OPTION_EXEC_ATOMIC_FLAG); + GE_IF_BOOL_EXEC(iter != options.end(), + this->options_.enable_atomic = std::strtol(iter->second.c_str(), nullptr, kDecimal)); + GELOGI("ge InnerInitialize, the enable_atomic_flag in options_ is %d", this->options_.enable_atomic); +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status GELib::InitSystemWithOptions(Options &options) { + GELOGI("Training init GELib. session Id:%ld, device id :%d ", options.session_id, options.device_id); + GEEVENT("System init with options begin, job id %ld", options.job_id); + std::lock_guard lock(status_mutex_); + GE_IF_BOOL_EXEC(is_system_inited && !is_shutdown, + GELOGW("System init with options is already inited and not shutdown."); + return SUCCESS); + GetContext().Init(); + + // profiling init + if (ProfilingManager::Instance().Init(options) != SUCCESS) { + GELOGW("Profiling init failed."); + } + + std::vector mem_type; + mem_type.push_back(RT_MEMORY_HBM); + Status initMmStatus = MemManager::Instance().Initialize(mem_type); + if (initMmStatus != SUCCESS) { + GELOGE(initMmStatus, "[Initialize] MemoryAllocatorManager initialize failed."); + return initMmStatus; + } + + // Update CSA file + CsaInteract::GetInstance().Init(options.device_id, options.job_id); + Status ret = CsaInteract::GetInstance().WriteJobState(JOBSTATE_RUNNING, JOBSUBSTATE_ENV_INIT); + GE_LOGE_IF(ret != SUCCESS, "write job state failed, ret:%u", ret); + options.physical_device_id = options.device_id; + + // The physical ID is transferred to the logical ID. FMK receives physical ID and needs to be converted + uint32_t dev_logic_index = 0; + rtError_t rt_ret = rtGetDeviceIndexByPhyId(static_cast(options.device_id), &dev_logic_index); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, + GELOGE(rt_ret, "rtGetDeviceIndexByPhyId transform index by phyId %d failed", options.device_id); + CsaInteract::GetInstance().WriteErrorCode(rt_ret, ERROR_MODULE_RUNTIME, JOBSUBSTATE_ENV_INIT); + return FAILED); + options.device_id = static_cast(dev_logic_index); + GELOGI("rtGetDeviceIndexByPhyId physical device id:%d,logical device id:%u", options.device_id, dev_logic_index); + + GetContext().SetCtxDeviceId(dev_logic_index); + + GE_CHK_RT_RET(rtSetDevice(options.device_id)); + cce::cceSysInit(); + // In the scenario that the automatic add fusion is set, but there is no cleanaddr operator, + // maybe need to check it + is_system_inited = true; + is_shutdown = false; + + GELOGI("Training init GELib success."); + + return SUCCESS; +} + +Status GELib::SystemShutdownWithOptions(const Options &options) { + GELOGI("Training finalize GELib begin."); + + std::lock_guard lock(status_mutex_); + GE_IF_BOOL_EXEC(is_shutdown || !is_system_inited, + GELOGW("System Shutdown with options is already is_shutdown or system does not inited. " + "is_shutdown:%d is_omm_inited:%d", + is_shutdown, is_system_inited); + return SUCCESS); + + GE_CHK_RT(rtDeviceReset(options.device_id)); + + // Update CSA file + Status ret = CsaInteract::GetInstance().WriteJobState(JOBSTATE_SUCCEED); + GE_LOGE_IF(ret != SUCCESS, "write job state failed, ret:%u", ret); + + if (!ProfilingManager::Instance().ProfilingOpTraceOn() && ProfilingManager::Instance().ProfilingOn()) { + ProfilingManager::Instance().StopProfiling(); + } + + is_system_inited = false; + is_shutdown = true; + + GELOGI("Training finalize GELib success."); + + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status GELib::InitSystemWithoutOptions() { + GELOGI("Inference Init GELib begin."); + + std::vector mem_type; + mem_type.push_back(RT_MEMORY_HBM); + Status initMmStatus = MemManager::Instance().Initialize(mem_type); + if (initMmStatus != SUCCESS) { + GELOGE(initMmStatus, "[Initialize] MemoryAllocatorManager initialize failed."); + return initMmStatus; + } + + static bool is_inited = false; + if (is_inited) { + GELOGW("System init without options is already inited, don't need to init again."); + return SUCCESS; + } + cce::cceSysInit(); + is_inited = true; + GELOGI("Inference init GELib success."); + + return SUCCESS; +} + +string GELib::GetPath() { return PluginManager::GetPath(); } + +// Finalize all modules +Status GELib::Finalize() { + GELOGI("finalization start"); + // Finalization is not allowed before initialization + if (!init_flag_) { + GELOGW("not initialize"); + return SUCCESS; + } + + GELOGI("engineManager finalization."); + Status final_em_status = engine_manager_.Finalize(); + GELOGI("sessionManager finalization."); + Status final_sm_status = session_manager_.Finalize(); + + if (final_em_status != SUCCESS) { + GELOGE(final_em_status); + return final_em_status; + } + + if (final_sm_status != SUCCESS) { + GELOGE(final_sm_status); + return final_sm_status; + } + + GELOGI("opsManager finalization."); + Status final_ops_status = ops_manager_.Finalize(); + if (final_ops_status != SUCCESS) { + GELOGE(final_ops_status); + return final_ops_status; + } + + GELOGI("VarManagerPool finalization."); + VarManagerPool::Instance().Destroy(); + + GELOGI("MemManager finalization."); + MemManager::Instance().Finalize(); + +#ifdef DAVINCI_CLOUD + if (is_train_mode_) { + GELOGI("System ShutDown."); + Status shutdown_status = SystemShutdownWithOptions(this->options_); + if (shutdown_status != SUCCESS) { + GELOGE(shutdown_status); + return shutdown_status; + } + } + is_train_mode_ = false; +#endif + + instancePtr_ = nullptr; + init_flag_ = false; + GELOGI("finalization success."); + return SUCCESS; +} + +// Get Singleton Instance +std::shared_ptr GELib::GetInstance() { return instancePtr_; } + +void GELib::RollbackInit() { + if (engine_manager_.init_flag_) { + (void)engine_manager_.Finalize(); + } + if (ops_manager_.init_flag_) { + (void)ops_manager_.Finalize(); + } + if (session_manager_.init_flag_) { + (void)session_manager_.Finalize(); + } + MemManager::Instance().Finalize(); + VarManagerPool::Instance().Destroy(); +} +} // namespace ge diff --git a/src/ge/init/gelib.h b/src/ge/init/gelib.h new file mode 100644 index 00000000..b0198c1d --- /dev/null +++ b/src/ge/init/gelib.h @@ -0,0 +1,97 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_INIT_GELIB_H_ +#define GE_INIT_GELIB_H_ + +#include +#include +#include +#include + +#include "engine_manager/dnnengine_manager.h" +#include "opskernel_manager/ops_kernel_manager.h" +#include "session/session_manager.h" +#include "common/ge_inner_error_codes.h" +#include "common/ge_types.h" + +using std::string; +using std::map; +using std::vector; + +namespace ge { +class GELib { + public: + GELib() = default; + ~GELib() = default; + + // get GELib singleton instance + static std::shared_ptr GetInstance(); + + // GE Environment Initialize, return Status: SUCCESS,FAILED + static Status Initialize(const map &options); + + static string GetPath(); + + // GE Environment Finalize, return Status: SUCCESS,FAILED + Status Finalize(); + + // get DNNEngineManager object + DNNEngineManager &DNNEngineManagerObj() { return engine_manager_; } + + // get OpsKernelManager object + OpsKernelManager &OpsKernelManagerObj() { return ops_manager_; } + + // get SessionManager object + SessionManager &SessionManagerObj() { return session_manager_; } + + // get Initial flag + bool InitFlag() const { return init_flag_; } + + // get TrainMode flag + bool isTrainMode() const { return is_train_mode_; } + + // add head stream to model + bool HeadStream() const { return head_stream_; } + + Status InitSystemWithoutOptions(); + Status InitSystemWithOptions(Options &options); + Status SystemShutdownWithOptions(const Options &options); + + private: + GELib(const GELib &); + const GELib &operator=(const GELib &); + Status InnerInitialize(const map &options); + Status SystemInitialize(const map &options); + void RollbackInit(); + void InitOptions(const map &options); + + DNNEngineManager engine_manager_; + OpsKernelManager ops_manager_; + SessionManager session_manager_; + std::mutex status_mutex_; + bool init_flag_ = false; + Options options_; + bool is_train_mode_ = false; + bool is_system_inited = false; + bool is_shutdown = false; + bool is_use_hcom = false; + + bool head_stream_ = false; +}; +} // namespace ge + +#endif // GE_INIT_GELIB_H_ diff --git a/src/ge/model/ge_model.cc b/src/ge/model/ge_model.cc new file mode 100644 index 00000000..f9b80eee --- /dev/null +++ b/src/ge/model/ge_model.cc @@ -0,0 +1,82 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "model/ge_model.h" + +#include + +#include "common/debug/log.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/attr_utils.h" + +namespace ge { +void GeModel::Init() { + (void)AttrUtils::SetInt(this, ATTR_MODEL_MEMORY_SIZE, 0); + (void)AttrUtils::SetInt(this, ATTR_MODEL_STREAM_NUM, 0); + (void)AttrUtils::SetInt(this, ATTR_MODEL_EVENT_NUM, 0); + (void)AttrUtils::SetInt(this, ATTR_MODEL_WEIGHT_SIZE, 0); + (void)AttrUtils::SetStr(this, ATTR_MODEL_TARGET_TYPE, TARGET_TYPE_MINI); + version_ = 0; + // default attrSize = 5 +} + +GeModel::GeModel() { + attrs_.InitDefault(); + Init(); +} + +const Graph &GeModel::GetGraph() const { return this->graph_; } + +std::shared_ptr GeModel::GetModelTaskDefPtr() const { return this->task_; } + +const TBEKernelStore &GeModel::GetTBEKernelStore() const { return this->tbe_kernal_store_; } + +Buffer GeModel::GetWeight() const { return this->weights_buffer_; } + +std::string GeModel::GetName() const { return this->name_; } + +uint32_t GeModel::GetVersion() const { return this->version_; } + +std::string GeModel::GetPlatformVersion() const { return this->platform_version_; } + +uint8_t GeModel::GetPlatformType() const { return this->platform_type_; } + +void GeModel::SetGraph(const Graph &graph) { this->graph_ = graph; } + +void GeModel::SetModelTaskDef(const std::shared_ptr &task) { this->task_ = task; } + +void GeModel::SetTBEKernelStore(const TBEKernelStore &tbe_kernal_store) { + this->tbe_kernal_store_ = tbe_kernal_store; +} + +void GeModel::SetWeight(const Buffer &weights_buffer) { this->weights_buffer_ = weights_buffer; } + +void GeModel::SetName(const std::string &name) { this->name_ = name; } + +void GeModel::SetVersion(uint32_t version) { this->version_ = version; } + +void GeModel::SetPlatformVersion(const std::string &platform_version) { this->platform_version_ = platform_version; } + +void GeModel::SetPlatformType(uint8_t platform_type) { this->platform_type_ = platform_type; } + +void GeModel::SetAttr(const ProtoAttrMapHelper &attrs) { attrs_ = attrs; } + +ProtoAttrMapHelper GeModel::MutableAttrMap() { return attrs_; } + +ConstProtoAttrMapHelper GeModel::GetAttrMap() const { + return ConstProtoAttrMapHelper(attrs_.GetProtoOwner(), attrs_.GetProtoMsg()); +} +} // namespace ge diff --git a/src/ge/model/ge_model.h b/src/ge/model/ge_model.h new file mode 100644 index 00000000..7c8581fb --- /dev/null +++ b/src/ge/model/ge_model.h @@ -0,0 +1,92 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_MODEL_GE_MODEL_H_ +#define GE_MODEL_GE_MODEL_H_ + +#include +#include +#include + +#include "common/tbe_kernel_store.h" +#include "framework/common/debug/log.h" +#include "framework/common/fmk_error_codes.h" +#include "graph/buffer.h" +#include "graph/graph.h" +#include "proto/task.pb.h" + +namespace ge { +const uint32_t INVALID_MODEL_ID = 0xFFFFFFFFUL; +class GE_FUNC_DEV_VISIBILITY GE_FUNC_HOST_VISIBILITY GeModel : public AttrHolder { + public: + GeModel(); + ~GeModel() = default; + GeModel(const GeModel &other) = delete; + GeModel &operator=(const GeModel &other) = delete; + + const Graph &GetGraph() const; + std::shared_ptr GetModelTaskDefPtr() const; + const TBEKernelStore &GetTBEKernelStore() const; + Buffer GetWeight() const; + + std::string GetName() const; + uint32_t GetVersion() const; + std::string GetPlatformVersion() const; + uint8_t GetPlatformType() const; + + void SetGraph(const Graph &graph); + void SetModelTaskDef(const std::shared_ptr &task); + void SetTBEKernelStore(const TBEKernelStore &tbe_kernal_store); + void SetWeight(const Buffer &weights_buffer); + + void SetName(const std::string &name); + void SetVersion(uint32_t version); + void SetPlatformVersion(const std::string &platform_version); + void SetPlatformType(uint8_t platform_type); + + void SetAttr(const ProtoAttrMapHelper &attrs); + + ProtoAttrMapHelper MutableAttrMap() override; + + using AttrHolder::SetAttr; + using AttrHolder::GetAllAttrs; + using AttrHolder::GetAllAttrNames; + + void SetModelId(uint32_t model_id) { model_id_ = model_id; } + uint32_t GetModelId() const { return model_id_; } + + protected: + ConstProtoAttrMapHelper GetAttrMap() const override; + + private: + void Init(); + + ProtoAttrMapHelper attrs_; + + Graph graph_; + std::shared_ptr task_; + TBEKernelStore tbe_kernal_store_; + Buffer weights_buffer_; + + std::string name_; + uint32_t version_ = {0}; + std::string platform_version_; + uint8_t platform_type_ = {0}; + uint32_t model_id_ = INVALID_MODEL_ID; +}; +}; // namespace ge +using GeModelPtr = std::shared_ptr; +#endif // GE_MODEL_GE_MODEL_H_ diff --git a/src/ge/omm/csa_interact.cc b/src/ge/omm/csa_interact.cc new file mode 100644 index 00000000..4b50f966 --- /dev/null +++ b/src/ge/omm/csa_interact.cc @@ -0,0 +1,255 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "omm/csa_interact.h" + +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "framework/common/util.h" +#include "graph/ge_context.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/utils/tensor_utils.h" +#include "mmpa/mmpa_api.h" +#include "nlohmann/json.hpp" + +namespace ge { +namespace { +const char FMK_STATUS_FILE_DIR_ENV[] = "FMK_STATUS_FILE_DIR"; +const char JOBSTATE_FILE_NAME[] = "jobstateupdate_framework"; +const char HCOM_DETECT_FILE_NAME[] = "hcom_detection_result"; + +const char FILE_SEPARATE[] = "/"; + +const uint32_t CSA_DIR_RWX_RIGHT = 0750; +} // namespace + +/// +/// @brief Obtain CsaInteract instance +/// @return CsaInteract instance +/// +CsaInteract &CsaInteract::GetInstance() { + static CsaInteract instance; + return instance; +} + +/// +/// @brief CsaInteract instance initialization +/// @param [in] dev_index device index +/// @param [in] job_id job id +/// @return void +/// +void CsaInteract::Init(int32_t dev_index, int64_t job_id) { + if (!is_init_) { + dev_index_ = dev_index; + job_id_ = job_id; + char *file_dir_env = std::getenv(FMK_STATUS_FILE_DIR_ENV); + string csa_path_prefix; + if (file_dir_env != nullptr) { + csa_path_prefix = file_dir_env; + } + if (!csa_path_prefix.empty()) { + job_state_file_ = csa_path_prefix + std::to_string(dev_index_) + FILE_SEPARATE + JOBSTATE_FILE_NAME; + hcom_detect_file_ = csa_path_prefix + std::to_string(dev_index_) + FILE_SEPARATE + HCOM_DETECT_FILE_NAME; + } + is_init_ = true; + } +} + +/// +/// @brief Update job state file +/// @param [in] job_state job state +/// @param [in] job_sub_state detailed job state +/// @param [in] module_ret_errcode sub module training failure error code +/// @param [in] error_module error module identified by FMK +/// @return Status +/// +Status CsaInteract::WriteJobState(JobState job_state, JobSubState job_sub_state, uint32_t module_ret_errcode, + ErrorModule error_module) { + if (!is_init_) { + GELOGE(INTERNAL_ERROR, "CsaInteract has not init, can't WriteJobState"); + return INTERNAL_ERROR; + } + if ((curr_state_ == JOBSTATE_FAILED) || (curr_state_ == JOBSTATE_KILLED)) { + return SUCCESS; + } + + if (job_state_file_.empty()) { + return SUCCESS; + } + + std::string content; + try { + nlohmann::json content_json; + content_json["job_id"] = job_id_; + content_json["jobstate"] = job_state; + // Only the running or running failure state has a job sub state + if ((job_state == JOBSTATE_RUNNING) || (job_state == JOBSTATE_FAILED)) { + content_json["job_sub_state"] = job_sub_state; + } + content_json["time"] = CurrentTimeInStr(); + // Write error code only if run failed + if (job_state == JOBSTATE_FAILED) { + content_json["errorcode"] = module_ret_errcode; + content_json["errmodule"] = error_module; + } + + content = content_json.dump(); + } catch (const nlohmann::json::exception &e) { + GELOGE(INTERNAL_ERROR, "build jobstate content json string failed, exception:%s job_state:%u", e.what(), job_state); + return INTERNAL_ERROR; + } + + if (WriteFile(job_state_file_, content) != SUCCESS) { + // The error log subfunction has been printed and will not print again + return INTERNAL_ERROR; + } + + curr_state_ = job_state; + return SUCCESS; +} + +/// +/// @brief Update error code in the job state file +/// @param [in] module_ret_errcode sub module training failure error code +/// @param [in] error_module error module identified by FMK +/// @param [in] job_sub_state detailed job state +/// @return void +/// +void CsaInteract::WriteErrorCode(uint32_t module_ret_errcode, ErrorModule error_module, JobSubState job_sub_state) { + // The error log subfunction has been printed and will not print again + Status ret = WriteJobState(JOBSTATE_FAILED, job_sub_state, module_ret_errcode, error_module); + if (ret != SUCCESS) { + GELOGW("write error code fail. ret_code: %u, status: %u", module_ret_errcode, job_sub_state); + } +} + +/// +/// @brief Record errors that occurred durning the training +/// @param [in] module_ret_errcode sub module training failure error code +/// @param [in] error_module error module identified by FMK +/// @param [in] job_sub_state detailed job state +/// @return void +/// +void CsaInteract::StoreInternalErrorCode(uint32_t module_ret_errcode, ErrorModule error_module, + JobSubState job_sub_state) { + is_have_internal_error_ = true; + + csa_error_code_.module_ret_errcode = module_ret_errcode; + csa_error_code_.error_module = error_module; + csa_error_code_.job_sub_state = job_sub_state; +} + +/// +/// @brief Update training error code in the job state file +/// @return void +/// +void CsaInteract::WriteInternalErrorCode() { + if (is_have_internal_error_) { + WriteErrorCode(csa_error_code_.module_ret_errcode, csa_error_code_.error_module, csa_error_code_.job_sub_state); + } +} + +/// +/// @brief Update network connectivity detect file +/// @param [in] content network connectivity content +/// @return Status +/// +Status CsaInteract::WriteHcomDetection(const std::string &content) { + if (!is_init_) { + GELOGE(INTERNAL_ERROR, "CsaInteract has not init, can't WriteJobState"); + return INTERNAL_ERROR; + } + + if (hcom_detect_file_.empty()) { + return SUCCESS; + } + + return WriteFile(hcom_detect_file_, content); +} + +/// +/// @ingroup WriteFile +/// @brief Write the content into the file. If the file does not exist, create the file +/// @param [in] file_name: File name to be written +/// @param [in] content: Contents to be written +/// @return Status +/// +Status CsaInteract::WriteFile(const std::string &file_name, const std::string &content) { + // if file path is not exist, then make path + INT32 flags = O_WRONLY | O_TRUNC | O_CREAT; + int32_t fd = mmOpen2(file_name.c_str(), flags, M_IRUSR | M_IWUSR | S_IRGRP); + if (fd == EN_ERROR) { + if (MakePath(file_name) != SUCCESS) { + GELOGE(INTERNAL_ERROR, "csainteract create file path fail, errno is %d", errno); + return INTERNAL_ERROR; + } + fd = mmOpen2(file_name.c_str(), flags, M_IRUSR | M_IWUSR | S_IRGRP); + if (fd == EN_ERROR) { + GELOGE(INTERNAL_ERROR, "open file fail, errno is %d", errno); + return INTERNAL_ERROR; + } + } + + ssize_t ret = write(fd, content.c_str(), content.length()); + if (ret == EN_ERROR) { + GELOGE(INTERNAL_ERROR, "write file fail, errno is %d", errno); + ret = mmClose(fd); + if (ret == EN_ERROR) { + GELOGE(INTERNAL_ERROR, "close file fail, error is %d", errno); + } + return INTERNAL_ERROR; + } + ret = mmClose(fd); + if (ret == EN_ERROR) { + GELOGE(INTERNAL_ERROR, "close file fail, error is %d", errno); + return INTERNAL_ERROR; + } + + return SUCCESS; +} + +/// +/// @ingroup MakePath +/// @brief Verify whether the file path exists, if not, recursively create the folder +/// @param [in] file_name: File name to be verified +/// @return Status +/// +Status CsaInteract::MakePath(const std::string &file_name) { + std::size_t found = file_name.find_last_of("/"); + if (found == std::string::npos) { + return PARAM_INVALID; + } + + std::string file_path = file_name.substr(0, found + 1); + if (mmAccess(file_path.c_str()) == EN_OK) { + return SUCCESS; + } + + found = file_path.find_first_of("/"); + while (found != std::string::npos) { + std::string pre_path = file_path.substr(0, found + 1); + if (mmAccess(pre_path.c_str()) != EN_OK) { + if (mmMkdir(pre_path.c_str(), S_IRWXU) != EN_OK) { + GELOGE(INTERNAL_ERROR, "csainteract mkdir fail, errno is %d", errno); + return INTERNAL_ERROR; + } + } + found = file_path.find_first_of("/", found + 1); + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/omm/csa_interact.h b/src/ge/omm/csa_interact.h new file mode 100644 index 00000000..0a609e09 --- /dev/null +++ b/src/ge/omm/csa_interact.h @@ -0,0 +1,183 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OMM_CSA_INTERACT_H_ +#define GE_OMM_CSA_INTERACT_H_ + +#include + +#include "framework/common/ge_inner_error_codes.h" + +namespace ge { +enum JobState { + JOBSTATE_WAITING = 1, + JOBSTATE_RUNNING, + JOBSTATE_KILLING, + JOBSTATE_SUCCEED, + JOBSTATE_FAILED, + JOBSTATE_KILLED, + JOBSTATE_UNKOWN +}; + +enum JobSubState { + JOBSUBSTATE_ENV_INIT = 201, + JOBSUBSTATE_ENV_FIN, + JOBSUBSTATE_RESOUCE_ALLOC, + JOBSUBSTATE_MODEL_COMPILE, + JOBSUBSTATE_GRAPH_PREPARE, + JOBSUBSTATE_GRAPH_SPLIT, + JOBSUBSTATE_GRAPH_OPTIMIZE, + JOBSUBSTATE_GRAPH_BUILD, + JOBSUBSTATE_GRAPH_LOAD, + JOBSUBSTATE_GRAPH_EXEC, + JOBSUBSTATE_GRAPH_UNLOAD, + JOBSUBSTATE_OTHER +}; + +enum ErrorModule { + ERROR_MODULE_DRIVER = 0x01, + ERROR_MODULE_RUNTIME = 0x04, + ERROR_MODULE_CCE = 0x06, + ERROR_MODULE_FMK = 0x08, + ERROR_MODULE_HCCL = 0x12 +}; + +struct CsaErrorCode { + CsaErrorCode() + : module_ret_errcode(0), + error_module(ERROR_MODULE_FMK), + job_sub_state(JOBSUBSTATE_OTHER) {} + ~CsaErrorCode() {} + uint32_t module_ret_errcode; + ErrorModule error_module; + JobSubState job_sub_state; +}; +class CsaInteract { + public: + /// + /// @brief Obtain CsaInteract instance + /// @return CsaInteract instance + /// + static CsaInteract& GetInstance(); + + /// + /// @brief CsaInteract instance initialization + /// @param [in] dev_index device index + /// @param [in] job_id job id + /// @return void + /// + void Init(int32_t dev_index, int64_t job_id); + + /// + /// @brief Update job state file + /// @param [in] job_state job state + /// @param [in] job_sub_state detailed job state + /// @param [in] module_ret_errcode sub module training failure error code + /// @param [in] error_module error module identified by FMK + /// @return Status + /// + Status WriteJobState(JobState job_state, + JobSubState job_sub_state = JOBSUBSTATE_OTHER, + uint32_t module_ret_errcode = SUCCESS, + ErrorModule error_module = ERROR_MODULE_FMK); + + /// + /// @brief Update error code in the job state file + /// @param [in] module_ret_errcode sub module training failure error code + /// @param [in] error_module error module identified by FMK + /// @param [in] job_sub_state detailed job state + /// @return void + /// + void WriteErrorCode(uint32_t module_ret_errcode, ErrorModule error_module, + JobSubState job_sub_state); + + /// + /// @brief Record errors that occurred durning the training + /// @param [in] module_ret_errcode sub module training failure error code + /// @param [in] error_module error module identified by FMK + /// @param [in] job_sub_state detailed job state + /// @return void + /// + void StoreInternalErrorCode(uint32_t module_ret_errcode, + ErrorModule error_module, + JobSubState job_sub_state); + + /// + /// @brief Update training error code in the job state file + /// @return void + /// + void WriteInternalErrorCode(); + + /// + /// @brief Update network connectivity detect file + /// @param [in] content network connectivity content + /// @return Status + /// + Status WriteHcomDetection(const std::string& content); + + private: + CsaInteract() + : dev_index_(0), + job_id_(0), + is_init_(false), + curr_state_(JOBSTATE_UNKOWN), + is_have_internal_error_(false) {} + + ~CsaInteract() {} + + CsaInteract(const CsaInteract&) = delete; + CsaInteract(CsaInteract&&) = delete; + CsaInteract& operator=(const CsaInteract&) = delete; + CsaInteract& operator=(CsaInteract&&) = delete; + + /// + /// @ingroup WriteFile + /// @brief Write the content into the file. If the file does not exist, create the file + /// @param [in] file_name: File name to be written + /// @param [in] content: Contents to be written + /// @return Status + /// + Status WriteFile(const std::string& file_name, const std::string& content); + + /// + /// @ingroup MakePath + /// @brief Verify whether the file path exists, if not, recursively create the folder + /// @param [in] file_name: File name to be verified + /// @return Status + /// + Status MakePath(const std::string& file_name); + + // device index + int32_t dev_index_; + // job id + int64_t job_id_; + // is initialization complete + bool is_init_; + // current job state + JobState curr_state_; + // job state file + std::string job_state_file_; + // network connectivity detect file + std::string hcom_detect_file_; + // identification of internal errors that occurred during the training + bool is_have_internal_error_; + // error code information + CsaErrorCode csa_error_code_; +}; +} // namespace ge + +#endif // GE_OMM_CSA_INTERACT_H_ + diff --git a/src/ge/opskernel_manager/ops_kernel_manager.cc b/src/ge/opskernel_manager/ops_kernel_manager.cc new file mode 100755 index 00000000..c07fd51b --- /dev/null +++ b/src/ge/opskernel_manager/ops_kernel_manager.cc @@ -0,0 +1,382 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "opskernel_manager/ops_kernel_manager.h" + +#include +#include +#include +#include + +#include "../init/gelib.h" +#include "framework/common/debug/ge_log.h" +#include "ge/ge_api.h" + +namespace { +const char *const kInitialize = "Initialize"; +const char *const kGetOpsKernelInfoStores = "GetOpsKernelInfoStores"; +const char *const kGetGraphOptimizerObjs = "GetGraphOptimizerObjs"; +const char *const kFinalize = "Finalize"; +} + +namespace ge { +OpsKernelManager::OpsKernelManager() + : plugin_manager_(), init_flag_(false), enable_fe_flag_(false), enable_aicpu_flag_(false) {} + +OpsKernelManager::~OpsKernelManager() { + graph_optimizers_.clear(); + ops_kernel_store_.clear(); + ops_kernel_info_.clear(); +} + +Status OpsKernelManager::Initialize(const map &options_const) { + if (init_flag_) { + GELOGW("OpsKernelManager has been initialized."); + return SUCCESS; + } + std::map options(options_const); + Status ret = InitPluginOptions(options); + if (ret != SUCCESS) { + GELOGE(ret, "[OpsKernelManager] [Initialize] parse pluginFlag from ge options failed."); + return ret; + } + + vector func_check_list = {kInitialize, kGetOpsKernelInfoStores, kGetGraphOptimizerObjs, kFinalize}; + string extern_engine_path; + + auto iter = options.find(OPTION_EXEC_IS_USEHCOM); + if (iter == options.end()) { + GELOGI("OPTION_EXEC_IS_USEHCOM is not set, default is single P"); + options.emplace("ge.exec.isUseHcom", to_string(0)); + } + + GetExternalEnginePath(extern_engine_path); + GELOGI("OPTION_EXEC_EXTERN_PLUGIN_PATH=%s.", extern_engine_path.c_str()); + + ret = plugin_manager_.LoadSo(extern_engine_path, func_check_list); + if (ret == SUCCESS) { + initialize_ = options; + Status rst0 = plugin_manager_.InvokeAll&, Status>(kInitialize, initialize_); + Status rst1 = + plugin_manager_.InvokeAll&>(kGetOpsKernelInfoStores, ops_kernel_store_); + Status rst2 = plugin_manager_.InvokeAll&>(kGetGraphOptimizerObjs, graph_optimizers_); + if ((rst0 != SUCCESS) || (rst1 != SUCCESS) || (rst2 != SUCCESS)) { + GELOGE(GE_OPS_GET_NO_VALID_SO); + return GE_OPS_GET_NO_VALID_SO; + } + ret = CheckPluginPtr(); + if (ret != SUCCESS) { + return ret; + } + ret = InitOpKernelInfoStores(options); + if (ret != SUCCESS) { + return ret; + } + InitOpsKernelInfo(); + ret = InitGraphOptimzers(options); + if (ret != SUCCESS) { + return ret; + } + init_flag_ = true; + return SUCCESS; + } else { + GELOGE(ret, "Failed to find any valid so file."); + return ret; + } +} + +void OpsKernelManager::GetExternalEnginePath(std::string &extern_engine_path) { + GELOGI("Enter get external engine so path schedule"); + const char *path_env = std::getenv("ASCEND_ENGINE_PATH"); + if (path_env != nullptr) { + extern_engine_path = path_env; + GELOGI("OpsKernelManager get external engine so path from env."); + return; + } + std::string path_base = PluginManager::GetPath(); + std::string so_path = "plugin/opskernel/"; + std::string path = path_base + so_path; + extern_engine_path = (path + "libfe.so" + ":") + + (path + "libge_local_engine.so" + ":") + + (path + "librts_engine.so" + ":") + + (path + "libaicpu_engine.so" + ":") + + (path_base + "libhccl.so"); +} + +Status OpsKernelManager::InitPluginOptions(const map &options) { + Status ret; + + // parse fe + ret = ParsePluginOptions(options, GE_FE_FLAG, enable_fe_flag_); + if (ret != SUCCESS) { + return ret; + } + + // parse aiCpu + ret = ParsePluginOptions(options, GE_AICPU_FLAG, enable_aicpu_flag_); + if (ret != SUCCESS) { + return ret; + } + + return SUCCESS; +} + +Status OpsKernelManager::ParsePluginOptions(const map &options, const string &plugin_name, + bool &enable_flag) { + GELOGI("Parse the Plugin Options, plugin_name:%s.", plugin_name.c_str()); + auto iter = options.find(plugin_name); + if (iter != options.end()) { + try { + int32_t flag = std::stoi(iter->second.c_str()); + if (flag == 0) { + enable_flag = false; + } else if (flag == 1) { + enable_flag = true; + } else { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:%s, its value %s is invalid, it must be 0 or 1.", plugin_name.c_str(), + iter->second.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + } catch (std::invalid_argument &) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:ge.feFlag, its value %s is invalid_argument, it must be 0 or 1.", + iter->second.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } catch (std::out_of_range &) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:ge.feFlag, its value %s is out of range, it must be 0 or 1.", + iter->second.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } catch (...) { + GELOGE(GE_GRAPH_OPTIONS_INVALID, "Key:%s, its value %s is invalid, it must be 0 or 1.", plugin_name.c_str(), + iter->second.c_str()); + return GE_GRAPH_OPTIONS_INVALID; + } + } else { + GELOGI("Not find key %s, set to default value false.", plugin_name.c_str()); + enable_flag = false; + } + + return SUCCESS; +} + +Status OpsKernelManager::CheckPluginPtr() { + for (auto iter = ops_kernel_store_.begin(); iter != ops_kernel_store_.end(); ++iter) { + if (iter->second == nullptr) { + GELOGE(INTERNAL_ERROR, "CheckPluginPtr OpsKernelInfoStorePtr is null"); + return FAILED; + } + } + for (auto iter1 = graph_optimizers_.begin(); iter1 != graph_optimizers_.end(); ++iter1) { + if (iter1->second == nullptr) { + GELOGE(INTERNAL_ERROR, "CheckPluginPtr GraphOptimizerPtr is null"); + return FAILED; + } + } + return SUCCESS; +} + +Status OpsKernelManager::InitOpKernelInfoStores(const map &options) { + GELOGI("The number of OpKernelInfoStoreObjs are %lu.", ops_kernel_store_.size()); + for (const auto &it : ops_kernel_store_) { + GELOGI("OpKernelInfoStore name: %s.", (it.first).c_str()); + Status ret = it.second->Initialize(options); + if (ret != SUCCESS) { + GELOGE(GE_OPS_KERNEL_STORE_INIT_FAILED, "OpKernelInfoStore: %s initialize failed.", (it.first).c_str()); + return GE_OPS_KERNEL_STORE_INIT_FAILED; + } + } + + return SUCCESS; +} + +void OpsKernelManager::InitOpsKernelInfo() { + ops_kernel_info_.clear(); + for (const auto &it : ops_kernel_store_) { + map op_infos{}; + it.second->GetAllOpsKernelInfo(op_infos); + for (const auto &op_info_it : op_infos) { + auto op_info_copy = op_info_it.second; + // flush ops kernel + op_info_copy.opKernelLib = it.first; + ops_kernel_info_[op_info_it.first].emplace_back(op_info_copy); + GELOGD("OpKernelInfoStore name: %s, found op type is %s, engine name is %s, opkernel name is %s", + (it.first).c_str(), op_info_it.first.c_str(), op_info_it.second.engine.c_str(), + op_info_it.second.opKernelLib.c_str()); + } + } + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "InitOpsKernelInfo failed."); + return; + } + // sort opinfo of ops_kernel_info_ + for (auto &it : ops_kernel_info_) { + if (it.second.empty()) { + continue; + } + auto comp_func = [this, &instance_ptr](const OpInfo &op_a, const OpInfo &op_b) -> bool { + const string &a = op_a.engine; + const string &b = op_b.engine; + // check if a or b is registered + if (!(instance_ptr->DNNEngineManagerObj().IsEngineRegistered(a))) { + return false; + } + if (!(instance_ptr->DNNEngineManagerObj().IsEngineRegistered(b))) { + return true; + } + // compare compute cost of a and b, IsEngineRegistered make sure engine is not nullptr + auto engine_a = instance_ptr->DNNEngineManagerObj().GetEngine(a); + auto engine_b = instance_ptr->DNNEngineManagerObj().GetEngine(b); + DNNEngineAttribute attr_a, attr_b; + engine_a->GetAttributes(attr_a); + engine_b->GetAttributes(attr_b); + return attr_a.compute_cost < attr_b.compute_cost; + }; + // Sort the OpInfos based on the compute cost of the engine + std::sort(it.second.begin(), it.second.end(), comp_func); + } + GELOGI("Init opsKernelInfo finished, size is %zu", ops_kernel_info_.size()); +} + +Status OpsKernelManager::InitGraphOptimzers(const map &options) { + GELOGI("The number of GraphOptimzerObjs are %zu.", graph_optimizers_.size()); + for (const auto &it : graph_optimizers_) { + GELOGI("GraphOptimzer name: %s.", (it.first).c_str()); + GraphOptimizerAttribute attrs; + GE_CHK_STATUS_RET(it.second->GetAttributes(attrs)) + std::shared_ptr instance_ptr = ge::GELib::GetInstance(); + if (instance_ptr == nullptr) { + GELOGE(GE_CLI_GE_NOT_INITIALIZED, "InitGraphOptimzers failed."); + return GE_CLI_GE_NOT_INITIALIZED; + } + if (!instance_ptr->DNNEngineManagerObj().IsEngineRegistered(attrs.engineName)) { + GELOGW("Engine: %s is not registered.", attrs.engineName.c_str()); + continue; + } + Status ret = it.second->Initialize(options); + if (ret != SUCCESS) { + GELOGE(GE_OPS_GRAPH_OPTIMIZER_INIT_FAILED, "GraphOptimzer: %s initialize failed.", (it.first).c_str()); + return GE_OPS_GRAPH_OPTIMIZER_INIT_FAILED; + } + } + + return SUCCESS; +} + +Status OpsKernelManager::Finalize() { + if (!init_flag_) { + GELOGW("Finalize is not allowed, initialize first is necessary."); + return SUCCESS; + } + GELOGI("free ops kernel resource."); + for (auto iter = ops_kernel_store_.begin(); iter != ops_kernel_store_.end(); ++iter) { + GELOGI("OpsKernelStore finalize, name: %s.", (iter->first).c_str()); + Status status = iter->second->Finalize(); + if (SUCCESS != status) { + GELOGE(status, "OpsKernelStore finalize failed, name: %s.", (iter->first).c_str()); + return status; + } + } + for (auto iter = graph_optimizers_.begin(); iter != graph_optimizers_.end(); ++iter) { + GELOGI("GraphOptimzers finalize, name: %s.", (iter->first).c_str()); + Status status = iter->second->Finalize(); + if (status != SUCCESS) { + GELOGE(status, "GraphOptimzers finalize failed, name: %s.", (iter->first).c_str()); + return status; + } + } + + Status ret = FinalizeOpsKernel(); + if (ret != SUCCESS) { + GELOGE(ret, "free ops kernel resource failed."); + return ret; + } + + init_flag_ = false; + return SUCCESS; +} + +const vector &OpsKernelManager::GetOpsKernelInfo(const string &op_type) { + auto find = ops_kernel_info_.find(op_type); + if (find != ops_kernel_info_.end()) { + return find->second; + } else { + InitOpsKernelInfo(); + find = ops_kernel_info_.find(op_type); + if (find != ops_kernel_info_.end()) { + return find->second; + } + GELOGW("Failed to get opsKernelInfo object by type: %s.", op_type.c_str()); + return empty_op_info_; + } +} + +const map> &OpsKernelManager::GetAllOpsKernelInfo() const { return ops_kernel_info_; } + +OpsKernelInfoStorePtr OpsKernelManager::GetOpsKernelInfoStore(const std::string &name) const { + auto find = ops_kernel_store_.find(name); + if (find != ops_kernel_store_.end()) { + return find->second; + } + + GELOGW("Failed to get opsKernelInfoStore object by name. OpKernelLibName is %s", name.c_str()); + return nullptr; +} + +const map &OpsKernelManager::GetAllOpsKernelInfoStores() const { + return ops_kernel_store_; +} + +const map &OpsKernelManager::GetAllGraphOptimizerObjs() const { return graph_optimizers_; } + +void OpsKernelManager::GetGraphOptimizerByEngine(const std::string &engine_name, + vector &graph_optimizer) const { + for (const auto &it : graph_optimizers_) { + GraphOptimizerAttribute attrs; + if (it.second->GetAttributes(attrs) != SUCCESS) { + GELOGW("Get GraphOptimzer name: %s attributes failed.", (it.first).c_str()); + continue; + } + if (attrs.engineName == engine_name) { + GELOGI("GetGraphOptimizerByEngine GraphOptimzer name: %s, engineName: %s", (it.first).c_str(), + attrs.engineName.c_str()); + graph_optimizer.push_back(it.second); + } + } + + if (graph_optimizer.empty()) { + GELOGI("GetGraphOptimizerByEngine EngineName %s has no graph_optimizer.", engine_name.c_str()); + } +} + +bool OpsKernelManager::GetEnableFeFlag() const { return enable_fe_flag_; } + + +bool OpsKernelManager::GetEnableAICPUFlag() const { return enable_aicpu_flag_; } + + +bool OpsKernelManager::GetEnablePluginFlag() const { return (enable_fe_flag_ || enable_aicpu_flag_); } + +Status OpsKernelManager::FinalizeOpsKernel() { + GELOGI("ge invoke ops kernal finalize."); + Status ret = plugin_manager_.InvokeAll(kFinalize); + if (ret != SUCCESS) { + GELOGE(ret, "[Finalize] invoke Fe finalize failed."); + return ret; + } + + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/opskernel_manager/ops_kernel_manager.h b/src/ge/opskernel_manager/ops_kernel_manager.h new file mode 100644 index 00000000..f779d2ec --- /dev/null +++ b/src/ge/opskernel_manager/ops_kernel_manager.h @@ -0,0 +1,119 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OPSKERNEL_MANAGER_OPS_KERNEL_MANAGER_H_ +#define GE_OPSKERNEL_MANAGER_OPS_KERNEL_MANAGER_H_ + +#include +#include +#include +#include + +#include "common/debug/log.h" +#include "common/ge/plugin_manager.h" +#include "common/ge_inner_error_codes.h" +#include "common/opskernel/ops_kernel_info_store.h" +#include "common/optimizer/graph_optimizer.h" +#include "graph/optimize/graph_optimize.h" +#include "framework/common/ge_inner_error_codes.h" +#include "ge/ge_api_types.h" +#include "runtime/base.h" + +using std::string; +using std::map; +using std::vector; + +namespace ge { +using OpsKernelInfoStorePtr = std::shared_ptr; + +class OpsKernelManager { + public: + friend class GELib; + + // get opsKernelInfo by type + const vector &GetOpsKernelInfo(const string &op_type); + + // get all opsKernelInfo + const map> &GetAllOpsKernelInfo() const; + + // get opsKernelInfoStore by name + OpsKernelInfoStorePtr GetOpsKernelInfoStore(const std::string &name) const; + + // get all opsKernelInfoStore + const map &GetAllOpsKernelInfoStores() const; + + // get all graph_optimizer + const map &GetAllGraphOptimizerObjs() const; + + // get subgraphOptimizer by engine name + void GetGraphOptimizerByEngine(const std::string &engine_name, vector &graph_optimizer) const; + + // get enableFeFlag + bool GetEnableFeFlag() const; + + // get enableAICPUFlag + bool GetEnableAICPUFlag() const; + + // get enablePluginFlag + bool GetEnablePluginFlag() const; + + // Finalize other ops kernel resource + Status FinalizeOpsKernel(); + + private: + OpsKernelManager(); + ~OpsKernelManager(); + + // opsKernelManager initialize, load all opsKernelInfoStore and graph_optimizer + Status Initialize(const map &options); + + // opsKernelManager finalize, unload all opsKernelInfoStore and graph_optimizer + Status Finalize(); + + Status InitOpKernelInfoStores(const map &options); + + Status CheckPluginPtr(); + + void GetExternalEnginePath(std::string &path); + + void InitOpsKernelInfo(); + + Status InitGraphOptimzers(const map &options); + + Status InitPluginOptions(const map &options); + + Status ParsePluginOptions(const map &options, const string &plugin_name, bool &enable_flag); + + PluginManager plugin_manager_; + // opsKernelInfoStore + map ops_kernel_store_{}; + // graph_optimizer + map graph_optimizers_{}; + // opsKernelInfo + map> ops_kernel_info_{}; + + map initialize_{}; + + vector empty_op_info_{}; + + bool init_flag_; + + bool enable_fe_flag_ = false; + + bool enable_aicpu_flag_ = false; +}; +} // namespace ge +#endif // GE_OPSKERNEL_MANAGER_OPS_KERNEL_MANAGER_H_ diff --git a/src/ge/plugin/engine/CMakeLists.txt b/src/ge/plugin/engine/CMakeLists.txt new file mode 100644 index 00000000..45c3d302 --- /dev/null +++ b/src/ge/plugin/engine/CMakeLists.txt @@ -0,0 +1,45 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +# libengine.so +file(GLOB_RECURSE SRC_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "*.cc" + ) + +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}) +include_directories(${GE_SOURCE_DIR}/src) +include_directories(${GE_SOURCE_DIR}/src/ge) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/framework/common) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) +include_directories(${GE_SOURCE_DIR}/build) + +######### libengine.so ############# +add_library(engine SHARED ${SRC_LIST}) +target_compile_definitions(engine PRIVATE + REUSE_MEMORY=1 + PLATFORM_CLOUD + PROTOBUF_INLINE_NOT_IN_HEADERS=0 + Werror) +target_link_libraries(engine + ${slog} + rt + dl) diff --git a/src/ge/plugin/engine/dnnengines.cc b/src/ge/plugin/engine/dnnengines.cc new file mode 100644 index 00000000..e75fb74b --- /dev/null +++ b/src/ge/plugin/engine/dnnengines.cc @@ -0,0 +1,113 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "plugin/engine/dnnengines.h" + +#include +#include +#include + +namespace ge { +AICoreDNNEngine::AICoreDNNEngine(const std::string &engine_name) { + engine_attribute_.engine_name = engine_name; + engine_attribute_.compute_cost = COST_0; + engine_attribute_.runtime_type = DEVICE; + engine_attribute_.engine_input_format = FORMAT_RESERVED; + engine_attribute_.engine_output_format = FORMAT_RESERVED; +} + +AICoreDNNEngine::AICoreDNNEngine(const DNNEngineAttribute &attrs) { engine_attribute_ = attrs; } + +Status AICoreDNNEngine::Initialize(const std::map &options) { return SUCCESS; } + +Status AICoreDNNEngine::Finalize() { return SUCCESS; } + +void AICoreDNNEngine::GetAttributes(DNNEngineAttribute &attrs) const { attrs = engine_attribute_; } + +VectorCoreDNNEngine::VectorCoreDNNEngine(const std::string &engine_name) { + engine_attribute_.engine_name = engine_name; + engine_attribute_.compute_cost = COST_1; + engine_attribute_.runtime_type = DEVICE; + engine_attribute_.engine_input_format = FORMAT_RESERVED; + engine_attribute_.engine_output_format = FORMAT_RESERVED; +} + +VectorCoreDNNEngine::VectorCoreDNNEngine(const DNNEngineAttribute &attrs) { engine_attribute_ = attrs; } + +Status VectorCoreDNNEngine::Initialize(const std::map &options) { return SUCCESS; } + +Status VectorCoreDNNEngine::Finalize() { return SUCCESS; } + +void VectorCoreDNNEngine::GetAttributes(DNNEngineAttribute &attrs) const { attrs = engine_attribute_; } + +AICpuDNNEngine::AICpuDNNEngine(const std::string &engine_name) { + engine_attribute_.engine_name = engine_name; + engine_attribute_.compute_cost = COST_2; + engine_attribute_.runtime_type = DEVICE; + engine_attribute_.engine_input_format = FORMAT_RESERVED; + engine_attribute_.engine_output_format = FORMAT_RESERVED; +} + +AICpuDNNEngine::AICpuDNNEngine(const DNNEngineAttribute &attrs) { engine_attribute_ = attrs; } + +Status AICpuDNNEngine::Initialize(const std::map &options) { return SUCCESS; } + +Status AICpuDNNEngine::Finalize() { return SUCCESS; } + +void AICpuDNNEngine::GetAttributes(DNNEngineAttribute &attrs) const { attrs = engine_attribute_; } + +GeLocalDNNEngine::GeLocalDNNEngine(const std::string &engine_name) { + engine_attribute_.engine_name = engine_name; + engine_attribute_.engine_input_format = FORMAT_RESERVED; + engine_attribute_.engine_output_format = FORMAT_RESERVED; +} + +GeLocalDNNEngine::GeLocalDNNEngine(const DNNEngineAttribute &attrs) { engine_attribute_ = attrs; } + +Status GeLocalDNNEngine::Initialize(const std::map &options) { return SUCCESS; } + +Status GeLocalDNNEngine::Finalize() { return SUCCESS; } + +void GeLocalDNNEngine::GetAttributes(DNNEngineAttribute &attrs) const { attrs = engine_attribute_; } + +RtsDNNEngine::RtsDNNEngine(const std::string &engine_name) { + engine_attribute_.engine_name = engine_name; + engine_attribute_.engine_input_format = FORMAT_RESERVED; + engine_attribute_.engine_output_format = FORMAT_RESERVED; +} + +RtsDNNEngine::RtsDNNEngine(const DNNEngineAttribute &attrs) { engine_attribute_ = attrs; } + +Status RtsDNNEngine::Initialize(const std::map &options) { return SUCCESS; } + +Status RtsDNNEngine::Finalize() { return SUCCESS; } + +void RtsDNNEngine::GetAttributes(DNNEngineAttribute &attrs) const { attrs = engine_attribute_; } + +HcclDNNEngine::HcclDNNEngine(const std::string &engine_name) { + engine_attribute_.engine_name = engine_name; + engine_attribute_.engine_input_format = FORMAT_RESERVED; + engine_attribute_.engine_output_format = FORMAT_RESERVED; +} + +HcclDNNEngine::HcclDNNEngine(const DNNEngineAttribute &attrs) { engine_attribute_ = attrs; } + +Status HcclDNNEngine::Initialize(const std::map &options) { return SUCCESS; } + +Status HcclDNNEngine::Finalize() { return SUCCESS; } + +void HcclDNNEngine::GetAttributes(DNNEngineAttribute &attrs) const { attrs = engine_attribute_; } +} // namespace ge diff --git a/src/ge/plugin/engine/dnnengines.h b/src/ge/plugin/engine/dnnengines.h new file mode 100644 index 00000000..6f669cc9 --- /dev/null +++ b/src/ge/plugin/engine/dnnengines.h @@ -0,0 +1,119 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_PLUGIN_ENGINE_DNNENGINES_H_ +#define GE_PLUGIN_ENGINE_DNNENGINES_H_ + +#include +#include +#include + +#include "engine/dnnengine.h" +#include "plugin/engine/engine_manage.h" + +namespace ge { +class AICoreDNNEngine : public DNNEngine { + public: + AICoreDNNEngine() = default; + explicit AICoreDNNEngine(const std::string &engine_name); + explicit AICoreDNNEngine(const DNNEngineAttribute &attrs); + ~AICoreDNNEngine() = default; + + Status Initialize(const std::map &options); + Status Finalize(); + void GetAttributes(DNNEngineAttribute &attr) const; + + private: + DNNEngineAttribute engine_attribute_; +}; + +class VectorCoreDNNEngine : public DNNEngine { + public: + VectorCoreDNNEngine() = default; + explicit VectorCoreDNNEngine(const std::string &engine_name); + explicit VectorCoreDNNEngine(const DNNEngineAttribute &attrs); + ~VectorCoreDNNEngine() = default; + + Status Initialize(const std::map &options); + Status Finalize(); + void GetAttributes(DNNEngineAttribute &attr) const; + + private: + DNNEngineAttribute engine_attribute_; +}; + + +class AICpuDNNEngine : public DNNEngine { + public: + AICpuDNNEngine() = default; + explicit AICpuDNNEngine(const std::string &engine_name); + explicit AICpuDNNEngine(const DNNEngineAttribute &attrs); + ~AICpuDNNEngine() = default; + + Status Initialize(const std::map &options); + Status Finalize(); + void GetAttributes(DNNEngineAttribute &attr) const; + + private: + DNNEngineAttribute engine_attribute_; +}; + +class GeLocalDNNEngine : public DNNEngine { + public: + GeLocalDNNEngine() = default; + explicit GeLocalDNNEngine(const std::string &engine_name); + explicit GeLocalDNNEngine(const DNNEngineAttribute &attrs); + ~GeLocalDNNEngine() = default; + + Status Initialize(const std::map &options); + Status Finalize(); + void GetAttributes(DNNEngineAttribute &attr) const; + + private: + DNNEngineAttribute engine_attribute_; +}; + +class RtsDNNEngine : public DNNEngine { + public: + RtsDNNEngine() = default; + explicit RtsDNNEngine(const std::string &engine_name); + explicit RtsDNNEngine(const DNNEngineAttribute &attrs); + ~RtsDNNEngine() = default; + + Status Initialize(const std::map &options); + Status Finalize(); + void GetAttributes(DNNEngineAttribute &attr) const; + + private: + DNNEngineAttribute engine_attribute_; +}; + +class HcclDNNEngine : public DNNEngine { + public: + HcclDNNEngine() = default; + explicit HcclDNNEngine(const std::string &engine_name); + explicit HcclDNNEngine(const DNNEngineAttribute &attrs); + ~HcclDNNEngine() = default; + + Status Initialize(const std::map &options); + Status Finalize(); + void GetAttributes(DNNEngineAttribute &attr) const; + + private: + DNNEngineAttribute engine_attribute_; +}; +} // namespace ge +#endif // GE_PLUGIN_ENGINE_DNNENGINES_H_ diff --git a/src/ge/plugin/engine/engine_manage.cc b/src/ge/plugin/engine/engine_manage.cc new file mode 100644 index 00000000..d29c3ac7 --- /dev/null +++ b/src/ge/plugin/engine/engine_manage.cc @@ -0,0 +1,150 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "plugin/engine/engine_manage.h" + +#include +#include +#include + +#include "common/ge/ge_util.h" +#include "framework/common/debug/ge_log.h" +#include "plugin/engine/dnnengines.h" + +namespace ge { +std::unique_ptr> EngineManager::engine_map_; + +Status EngineManager::RegisterEngine(const std::string &engine_name, DNNEnginePtr engine_ptr) { + if (engine_ptr == nullptr) { + GELOGE(FAILED, "enginePtr is nullptr"); + return FAILED; + } + + if (engine_map_ == nullptr) { + engine_map_.reset(new (std::nothrow) std::map()); + } + + auto it = engine_map_->find(engine_name); + if (it != engine_map_->end()) { + GELOGW("engine %s already exist.", engine_name.c_str()); + return FAILED; + } + engine_map_->emplace(engine_name, engine_ptr); + return SUCCESS; +} + +DNNEnginePtr EngineManager::GetEngine(const std::string &engine_name) { + auto it = engine_map_->find(engine_name); + if (it == engine_map_->end()) { + GELOGW("engine %s not exist.", engine_name.c_str()); + return nullptr; + } + + auto engine = it->second; + return engine; +} + +void GetDNNEngineObjs(std::map &engines) { + const std::string ai_core = "AIcoreEngine"; + std::vector mem_type_aicore; + mem_type_aicore.emplace_back(GE_ENGINE_ATTR_MEM_TYPE_HBM); + DNNEngineAttribute attr_aicore = {ai_core, mem_type_aicore, COST_0, DEVICE, FORMAT_RESERVED, FORMAT_RESERVED}; + DNNEnginePtr aicore_engine_ptr = MakeShared(attr_aicore); + if (aicore_engine_ptr == nullptr) { + GELOGE(ge::FAILED, "make aiCoreEnginePtr failed"); + return; + } + if (EngineManager::RegisterEngine(ai_core, aicore_engine_ptr) != SUCCESS) { + GELOGW("register ai_core failed"); + } + + const std::string vector_core = "VectorEngine"; + std::vector mem_type_aivcore; + mem_type_aivcore.emplace_back(GE_ENGINE_ATTR_MEM_TYPE_HBM); + DNNEngineAttribute attr_vector_core = {vector_core, mem_type_aivcore, COST_1, + DEVICE, FORMAT_RESERVED, FORMAT_RESERVED}; + DNNEnginePtr vectorcore_engine_ptr = MakeShared(attr_vector_core); + if (vectorcore_engine_ptr == nullptr) { + GELOGE(ge::FAILED, "make vectorCoreEnginePtr failed"); + return; + } + + if (EngineManager::RegisterEngine(vector_core, vectorcore_engine_ptr) != SUCCESS) { + GELOGW("register vector_core failed"); + } + + const std::string vm_aicpu = "DNN_VM_AICPU"; + std::vector mem_type_aicpu; + mem_type_aicpu.emplace_back(GE_ENGINE_ATTR_MEM_TYPE_HBM); + DNNEngineAttribute attr_aicpu = {vm_aicpu, mem_type_aicpu, COST_2, DEVICE, FORMAT_RESERVED, FORMAT_RESERVED}; + DNNEnginePtr vm_engine_ptr = MakeShared(attr_aicpu); + if (vm_engine_ptr == nullptr) { + GELOGE(ge::FAILED, "make vm_engine_ptr failed"); + return; + } + if (EngineManager::RegisterEngine(vm_aicpu, vm_engine_ptr) != SUCCESS) { + GELOGW("register vmAicpuEngine failed"); + } + + const std::string vm_ge_local = "DNN_VM_GE_LOCAL"; + std::vector mem_type_ge_local; + mem_type_ge_local.emplace_back(GE_ENGINE_ATTR_MEM_TYPE_HBM); + // GeLocal use minimum priority, set it as 9 + DNNEngineAttribute attr_ge_local = {vm_ge_local, mem_type_ge_local, COST_9, DEVICE, FORMAT_RESERVED, FORMAT_RESERVED}; + DNNEnginePtr ge_local_engine = MakeShared(attr_ge_local); + if (ge_local_engine == nullptr) { + GELOGE(ge::FAILED, "make ge_local_engine failed"); + return; + } + if (EngineManager::RegisterEngine(vm_ge_local, ge_local_engine) != SUCCESS) { + GELOGW("register ge_local_engine failed"); + } + + const std::string vm_rts = "DNN_VM_RTS"; + std::vector mem_type_rts; + mem_type_rts.emplace_back(GE_ENGINE_ATTR_MEM_TYPE_HBM); + DNNEngineAttribute attr_rts = {vm_rts, mem_type_rts, COST_1, DEVICE, FORMAT_RESERVED, FORMAT_RESERVED}; + DNNEnginePtr rts_engine = MakeShared(attr_rts); + if (rts_engine == nullptr) { + GELOGE(ge::FAILED, "make rts_engine failed"); + return; + } + if (EngineManager::RegisterEngine(vm_rts, rts_engine) != SUCCESS) { + GELOGW("register rts_engine failed"); + } + + const std::string dnn_hccl = "DNN_HCCL"; + std::vector mem_type_hccl; + mem_type_hccl.emplace_back(GE_ENGINE_ATTR_MEM_TYPE_HBM); + DNNEngineAttribute attr_hccl = {dnn_hccl, mem_type_hccl, COST_1, DEVICE, FORMAT_RESERVED, FORMAT_RESERVED}; + DNNEnginePtr hccl_engine = MakeShared(attr_hccl); + if (hccl_engine == nullptr) { + GELOGE(ge::FAILED, "make hccl_engine failed"); + return; + } + if (EngineManager::RegisterEngine(dnn_hccl, hccl_engine) != SUCCESS) { + GELOGW("register hccl_engine failed"); + } + + for (auto it = EngineManager::engine_map_->begin(); it != EngineManager::engine_map_->end(); ++it) { + GELOGI("get engine %s from engine plugin.", it->first.c_str()); + engines.emplace(std::pair(it->first, it->second)); + } + + GELOGI("after get engine, engine size: %zu", engines.size()); + return; +} +} // namespace ge diff --git a/src/ge/plugin/engine/engine_manage.h b/src/ge/plugin/engine/engine_manage.h new file mode 100644 index 00000000..5203ad3a --- /dev/null +++ b/src/ge/plugin/engine/engine_manage.h @@ -0,0 +1,40 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_PLUGIN_ENGINE_ENGINE_MANAGE_H_ +#define GE_PLUGIN_ENGINE_ENGINE_MANAGE_H_ + +#include +#include +#include +#include + +#include "engine/dnnengine.h" + +namespace ge { +using DNNEnginePtr = std::shared_ptr; +class EngineManager { + public: + static Status RegisterEngine(const std::string &engine_name, DNNEnginePtr engine_ptr); + static DNNEnginePtr GetEngine(const std::string &engine_name); + static std::unique_ptr> engine_map_; +}; + +extern "C" { +void GetDNNEngineObjs(std::map &engines); +} +} // namespace ge +#endif // GE_PLUGIN_ENGINE_ENGINE_MANAGE_H_ diff --git a/src/ge/session/inner_session.cc b/src/ge/session/inner_session.cc new file mode 100644 index 00000000..9f90ffd8 --- /dev/null +++ b/src/ge/session/inner_session.cc @@ -0,0 +1,203 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "session/inner_session.h" + +#include +#include +#include + +#include "graph/load/new_model_manager/model_manager.h" +#include "graph/ge_global_options.h" +#include "graph/ge_local_context.h" +#include "graph/ge_context.h" +#include "framework/common/debug/ge_log.h" +#include "common/util.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/utils/tensor_adapter.h" +#include "runtime/mem.h" + +namespace ge { +static std::mutex mutex_; // BuildGraph and RunGraph use + +InnerSession::InnerSession(uint64_t session_id, const std::map &options) + : init_flag_(false), session_id_(session_id), options_(options) {} + +Status InnerSession::Initialize() { + if (init_flag_) { + GELOGW("[InnerSession:%lu] session already initialize.", session_id_); + return SUCCESS; + } + UpdateThreadContext(); + + GE_CHK_RT_RET(rtSetDevice(GetContext().DeviceId())); + + Status ret = graph_manager_.Initialize(options_); + if (ret != SUCCESS) { + GELOGE(ret, "[InnerSession:%lu] initialize failed.", session_id_); + return ret; + } + + int32_t version = static_cast(SessionVersion::ClOUD_VERSION); + const int kDefaultDeviceId = 0; + const int kDefaultJobId = 0; + ret = VarManager::Instance(session_id_)->Init(version, session_id_, kDefaultDeviceId, kDefaultJobId); + if (ret != SUCCESS) { + GELOGE(ret, "failed to init session instance"); + } + init_flag_ = true; + return SUCCESS; +} + +Status InnerSession::Finalize() { + std::lock_guard lock(resource_mutex_); + if (!init_flag_) { + GELOGW("[InnerSession:%lu] session does not initialize.", session_id_); + return SUCCESS; + } + UpdateThreadContext(); + Status ret = graph_manager_.Finalize(); + if (ret != SUCCESS) { + // Subsequent code execution is required, so no return is required + GELOGE(ret, "[InnerSession:%lu] finalize failed.", session_id_); + } + + ModelManager::GetInstance()->DestroyAicpuSession(session_id_); + init_flag_ = false; + // release var memory + GELOGI("VarManager free var memory."); + (void)VarManager::Instance(session_id_)->FreeVarMemory(); + GE_CHK_RT(rtDeviceReset(static_cast(GetContext().DeviceId()))); + + return ret; +} + +Status InnerSession::GetVariable(const std::string &name, Tensor &val) { + UpdateThreadContext(); + return graph_manager_.GetVariable(name, val); +} + +Status InnerSession::AddGraph(uint32_t graph_id, const Graph &graph) { + std::lock_guard lock(resource_mutex_); + if (!init_flag_) { + GELOGE(GE_SESS_INIT_FAILED, "[InnerSession:%lu] initialize failed.", session_id_); + return GE_SESS_INIT_FAILED; + } + UpdateThreadContext(); + Status ret = graph_manager_.AddGraph(graph_id, graph); + if (ret != SUCCESS) { + GELOGE(ret, "[InnerSession:%lu] add graph %u failed.", session_id_, graph_id); + return ret; + } + + GELOGI("[InnerSession:%lu] add graph success, graph_id=%u.", session_id_, graph_id); + return SUCCESS; +} + +Status InnerSession::RunGraph(uint32_t graph_id, const std::vector &inputs, std::vector &outputs) { + GELOGI("[InnerSession:%lu] run graph on session, graph_id=%u.", session_id_, graph_id); + if (mutex_.try_lock()) { + std::lock_guard lock(mutex_, std::adopt_lock); + if (!init_flag_) { + GELOGE(GE_SESS_INIT_FAILED, "[InnerSession:%lu] initialize failed.", session_id_); + return GE_SESS_INIT_FAILED; + } + UpdateThreadContext(); + vector geInputs; + for (auto &item : inputs) { + geInputs.push_back(TensorAdapter::AsGeTensor(item)); + } + vector geOutputs; + Status ret = graph_manager_.RunGraph(graph_id, geInputs, geOutputs, session_id_); + domi::GetContext().out_nodes_map.clear(); + domi::GetContext().user_out_nodes.clear(); + if (ret != SUCCESS) { + GELOGE(ret, "[InnerSession:%lu] run graph failed, graph_id=%u.", session_id_, graph_id); + return ret; + } + outputs.clear(); + for (auto &item : geOutputs) { + outputs.push_back(TensorAdapter::AsTensor(item)); + } + + GELOGI("[InnerSession:%lu] run graph success, graph_id=%u.", session_id_, graph_id); + return SUCCESS; + } else { + GELOGE(GE_SESS_ALREADY_RUNNING, "[InnerSession:%lu] run graph failed, graph_id=%u.", session_id_, graph_id); + return GE_SESS_ALREADY_RUNNING; + } +} + +Status InnerSession::RemoveGraph(uint32_t graph_id) { + std::lock_guard lock(resource_mutex_); + if (!init_flag_) { + GELOGE(GE_SESS_INIT_FAILED, "[InnerSession:%lu] initialize failed.", session_id_); + return GE_SESS_INIT_FAILED; + } + UpdateThreadContext(); + Status ret = graph_manager_.RemoveGraph(graph_id); + if (ret != SUCCESS) { + GELOGE(ret, "[InnerSession:%lu] remove graph failed, graph_id=%u.", session_id_, graph_id); + return ret; + } + + GELOGI("[InnerSession:%lu] remove graph success, graph_id=%u.", session_id_, graph_id); + return SUCCESS; +} + +Status InnerSession::RegisterCallBackFunc( + const std::string &key, + const std::function &)> &callback) { + std::lock_guard lock(resource_mutex_); + if (!init_flag_) { + GELOGE(GE_SESS_INIT_FAILED, "[InnerSession:%lu] initialize failed.", session_id_); + return GE_SESS_INIT_FAILED; + } + UpdateThreadContext(); + Status ret = graph_manager_.RegisterCallBackFunc(key, callback); + if (ret != SUCCESS) { + GELOGE(ret, "[InnerSession:%lu] register %s callback function failed.", session_id_, key.c_str()); + return ret; + } + + GELOGI("[InnerSession:%lu] register %s callback function success.", session_id_, key.c_str()); + return SUCCESS; +} + +Status InnerSession::RunGraphAsync(uint32_t graph_id, const std::vector &inputs, + std::vector &outputs, std::function callback) { + UpdateThreadContext(); + GELOGI("[InnerSession:%lu] run graph on session, graph_id=%u.", session_id_, graph_id); + Status ret = graph_manager_.RunGraphAsync(graph_id, inputs, outputs, session_id_, callback); + if (ret != SUCCESS) { + GELOGE(ret, "[InnerSession:%lu] run graph failed, graph_id=%u.", session_id_, graph_id); + return ret; + } + GELOGI("[InnerSession:%lu] run graph success, graph_id=%u.", session_id_, graph_id); + return ret; +} + +const GraphManager &InnerSession::getGraphManagerObj() const { return graph_manager_; } + +void InnerSession::UpdateThreadContext() { + GetThreadLocalContext().SetGlobalOption(GetMutableGlobalOptions()); + GetThreadLocalContext().SetSessionOption(options_); +} +bool InnerSession::IsGraphNeedRebuild(uint32_t graph_id) { + UpdateThreadContext(); + return graph_manager_.IsGraphNeedRebuild(graph_id); +} +} // namespace ge diff --git a/src/ge/session/inner_session.h b/src/ge/session/inner_session.h new file mode 100644 index 00000000..38d5a9f6 --- /dev/null +++ b/src/ge/session/inner_session.h @@ -0,0 +1,68 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_SESSION_INNER_SESSION_H_ +#define GE_SESSION_INNER_SESSION_H_ + +#include +#include +#include + +#include "framework/common/ge_types.h" +#include "ge/ge_api_types.h" +#include "graph/manager/graph_manager.h" + +namespace ge { +class InnerSession { + public: + InnerSession(uint64_t session_id, const std::map &options); + + ~InnerSession() = default; + + Status Initialize(); + + Status AddGraph(uint32_t graph_id, const Graph &graph); + + Status RunGraph(uint32_t graph_id, const std::vector &inputs, std::vector &outputs); + + Status RemoveGraph(uint32_t graph_id); + + Status RunGraphAsync(uint32_t graph_id, const std::vector &inputs, std::vector &outputs, + std::function callback); + + Status Finalize(); + + Status GetVariable(const std::string &name, Tensor &val); + + Status RegisterCallBackFunc( + const std::string &key, + const std::function &)> &callback); + + const GraphManager &getGraphManagerObj() const; + + bool IsGraphNeedRebuild(uint32_t graph_id); + + private: + bool init_flag_; + uint64_t session_id_; + std::map options_; + GraphManager graph_manager_; + std::mutex resource_mutex_; // AddGraph, RemoveGraph and Finalize use + void UpdateThreadContext(); +}; +} // namespace ge + +#endif // GE_SESSION_INNER_SESSION_H_ diff --git a/src/ge/session/session_manager.cc b/src/ge/session/session_manager.cc new file mode 100644 index 00000000..5fe28723 --- /dev/null +++ b/src/ge/session/session_manager.cc @@ -0,0 +1,277 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "session/session_manager.h" + +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "common/ge/ge_util.h" +#include "graph/manager/util/rt_context_util.h" +#include "graph/load/new_model_manager/model_manager.h" +#include "graph/ge_context.h" + +using std::map; +using std::string; +using std::vector; + +namespace ge { +Status SessionManager::Initialize(const std::map &options) { + if (init_flag_) { + GELOGW("Session Manager has been initialized."); + return SUCCESS; + } + init_flag_ = true; + return SUCCESS; +} + +Status SessionManager::Finalize() { + if (!init_flag_) { + GELOGW("Session Manager has not been initialized."); + return SUCCESS; + } + std::lock_guard lock(mutex_); + for (auto iter = session_manager_map_.begin(); iter != session_manager_map_.end(); ++iter) { + (void)iter->second->Finalize(); + } + session_manager_map_.clear(); + init_flag_ = false; + return SUCCESS; +} + +Status SessionManager::SetrtContext(rtContext_t rt_context) { + GELOGI("set rt_context RT_CTX_NORMAL_MODE, device id:%u.", GetContext().DeviceId()); + GE_CHK_RT_RET(rtCtxCreate(&rt_context, RT_CTX_NORMAL_MODE, static_cast(GetContext().DeviceId()))); + GE_CHK_RT_RET(rtCtxSetCurrent(rt_context)); + RtContextUtil::GetInstance().AddrtContext(rt_context); + return SUCCESS; +} + +Status SessionManager::CreateSession(const std::map &options, SessionId &session_id) { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return GE_SESSION_MANAGER_NOT_INIT; + } + SessionId next_session_id = 0; + + std::lock_guard lock(mutex_); + Status next_session_id_ret = GetNextSessionId(next_session_id); + if (next_session_id_ret != SUCCESS) { + return next_session_id_ret; + } + + SessionPtr session_ptr = MakeShared(next_session_id, options); + if (session_ptr == nullptr) { + return MEMALLOC_FAILED; + } + Status ret = session_ptr->Initialize(); + if (ret != SUCCESS) { + return ret; + } + + (void)session_manager_map_.emplace(std::pair(next_session_id, session_ptr)); + session_id = next_session_id; + + // create a context + ret = SetrtContext(rtContext_t()); + + return ret; +} + +Status SessionManager::DestroySession(SessionId session_id) { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return GE_SESSION_MANAGER_NOT_INIT; + } + std::lock_guard lock(mutex_); + std::map::iterator it = session_manager_map_.find(session_id); + if (it == session_manager_map_.end()) { + return GE_SESSION_NOT_EXIST; + } + + if (ModelManager::GetInstance() != nullptr) { + ModelManager::GetInstance()->DestroyAicpuSession(session_id); + } + + // Unified destruct rt_context + RtContextUtil::GetInstance().DestroyrtContexts(); + + SessionPtr inner_session = it->second; + Status ret = inner_session->Finalize(); + if (ret != SUCCESS) { + return ret; + } + (void)session_manager_map_.erase(session_id); + return ret; +} + +Status SessionManager::GetVariable(SessionId session_id, const std::string &name, Tensor &val) { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return GE_SESSION_MANAGER_NOT_INIT; + } + SessionPtr inner_session = nullptr; + { + std::lock_guard lock(mutex_); + std::map::iterator it = session_manager_map_.find(session_id); + if (it == session_manager_map_.end()) { + return GE_SESSION_NOT_EXIST; + } else { + inner_session = it->second; + } + } + return inner_session->GetVariable(name, val); +} + +Status SessionManager::AddGraph(SessionId session_id, uint32_t graph_id, const Graph &graph) { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return GE_SESSION_MANAGER_NOT_INIT; + } + SessionPtr inner_session = nullptr; + { + std::lock_guard lock(mutex_); + std::map::iterator it = session_manager_map_.find(session_id); + if (it == session_manager_map_.end()) { + return GE_SESSION_NOT_EXIST; + } else { + inner_session = it->second; + } + auto compute_graph = GraphUtils::GetComputeGraph(graph); + std::string session_graph_id = std::to_string(session_id) + "_" + std::to_string(graph_id); + if (!AttrUtils::SetStr(*compute_graph, ATTR_NAME_SESSION_GRAPH_ID, session_graph_id)) { + GELOGW("Set graph session_graph_id attr failed."); + } else { + GELOGD("Set graph session_graph_id attr to [%s]", session_graph_id.c_str()); + } + } + return inner_session->AddGraph(graph_id, graph); +} + +Status SessionManager::RunGraph(SessionId session_id, uint32_t graph_id, const std::vector &inputs, + std::vector &outputs) { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return GE_SESSION_MANAGER_NOT_INIT; + } + SessionPtr inner_session = nullptr; + { + std::lock_guard lock(mutex_); + std::map::iterator it = session_manager_map_.find(session_id); + if (it == session_manager_map_.end()) { + return GE_SESSION_NOT_EXIST; + } else { + inner_session = it->second; + } + } + return inner_session->RunGraph(graph_id, inputs, outputs); +} + +Status SessionManager::RemoveGraph(SessionId session_id, uint32_t graph_id) { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return GE_SESSION_MANAGER_NOT_INIT; + } + SessionPtr inner_session = nullptr; + { + std::lock_guard lock(mutex_); + std::map::iterator it = session_manager_map_.find(session_id); + if (it == session_manager_map_.end()) { + return GE_SESSION_NOT_EXIST; + } else { + inner_session = it->second; + } + } + return inner_session->RemoveGraph(graph_id); +} + +bool SessionManager::HasSession(SessionId session_id) { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return false; + } + return session_manager_map_.find(session_id) != session_manager_map_.end(); +} + +Status SessionManager::GetNextSessionId(SessionId &next_session_id) const { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return GE_SESSION_MANAGER_NOT_INIT; + } + static SessionId session_id = 0; + + next_session_id = session_id++; + return SUCCESS; +} + +Status SessionManager::RegisterCallBackFunc( + SessionId session_id, const std::string &key, + const std::function &)> &callback) { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return GE_SESSION_MANAGER_NOT_INIT; + } + SessionPtr inner_session = nullptr; + { + std::lock_guard lock(mutex_); + std::map::iterator it = session_manager_map_.find(session_id); + if (it == session_manager_map_.end()) { + return GE_SESSION_NOT_EXIST; + } else { + inner_session = it->second; + } + } + return inner_session->RegisterCallBackFunc(key, callback); +} + +Status SessionManager::RunGraphAsync(SessionId session_id, uint32_t graph_id, const std::vector &inputs, + std::vector &outputs, std::function callback) { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return GE_SESSION_MANAGER_NOT_INIT; + } + SessionPtr inner_session = nullptr; + { + std::lock_guard lock(mutex_); + std::map::iterator it = session_manager_map_.find(session_id); + if (it == session_manager_map_.end()) { + return GE_SESSION_NOT_EXIST; + } else { + inner_session = it->second; + } + } + return inner_session->RunGraphAsync(graph_id, inputs, outputs, callback); +} +bool SessionManager::IsGraphNeedRebuild(SessionId session_id, uint32_t graph_id) { + if (!init_flag_) { + GELOGE(GE_SESSION_MANAGER_NOT_INIT); + return true; + } + SessionPtr inner_session = nullptr; + { + std::lock_guard lock(mutex_); + auto it = session_manager_map_.find(session_id); + if (it == session_manager_map_.end()) { + GELOGE(GE_SESSION_NOT_EXIST, "The session %lu does not exists", session_id); + return true; + } else { + inner_session = it->second; + } + } + return inner_session->IsGraphNeedRebuild(graph_id); +} +}; // namespace ge diff --git a/src/ge/session/session_manager.h b/src/ge/session/session_manager.h new file mode 100644 index 00000000..3df3e007 --- /dev/null +++ b/src/ge/session/session_manager.h @@ -0,0 +1,152 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_SESSION_SESSION_MANAGER_H_ +#define GE_SESSION_SESSION_MANAGER_H_ + +#include +#include +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "ge/ge_api_types.h" +#include "session/inner_session.h" + +namespace ge { +using SessionPtr = std::shared_ptr; + +class SessionManager { + friend class GELib; + + public: + Status SetrtContext(rtContext_t rt_context); + /// + /// @ingroup ge_session + /// @brief create session + /// @param [in] options session config options + /// @param [out] session_id session id + /// @return Status result of function + /// + Status CreateSession(const std::map &options, SessionId &session_id); + + /// + /// @ingroup ge_session + /// @brief destroy the session with specific session id + /// @param [in] session_id session id + /// @return Status result of function + /// + Status DestroySession(SessionId session_id); + + /// + /// @ingroup ge_session + /// @brief add a graph to the session with specific session id + /// @param [in] session_id session id + /// @param [in] graph_id graph id + /// @param [in] graph the graph to add + /// @return Status result of function + /// + Status AddGraph(SessionId session_id, uint32_t graph_id, const ge::Graph &graph); + + /// + /// @ingroup ge_session + /// @brief run a graph of the session with specific session id + /// @param [in] session_id session id + /// @param [in] graph_id graph id + /// @param [in] inputs input data + /// @param [out] outputs output data + /// @return Status result of function + /// + Status RunGraph(SessionId session_id, uint32_t graph_id, const std::vector &inputs, + std::vector &outputs); + + /// + /// @ingroup ge_session + /// @brief remove a graph from the session with specific session id + /// @param [in] session_id session id + /// @param [in] graph_id graph id + /// @return Status result of function + /// + Status RemoveGraph(SessionId session_id, uint32_t graph_id); + + /// + /// @ingroup ge_session + /// @brief get variable value from the session with specific session id + /// @param [in] session_id session id + /// @param [in] name op name + /// @param [out] val out value tensor + /// @return Status result of function + /// + Status GetVariable(SessionId session_id, const std::string &name, Tensor &val); + + /// + /// @ingroup ge_session + /// @brief run a graph of the session with specific session id for train asynchronously + /// @param [in] session_id session id + /// @param [in] graph_id graph id + /// @param [in] inputs input data + /// @param [out] outputs output data + /// @return Status result of function + /// + Status RunGraphAsync(SessionId session_id, uint32_t graph_id, const std::vector &inputs, + std::vector &outputs, std::function callback); + + /// + /// @ingroup ge_graph + /// @brief me register the callback function to get the result of summary or checkpoin + /// @param [in] session_id session id + /// @param [in] key: summary or checkpoint + /// @param [in] callbak: The real callback object of me + /// @return Status result of function + /// + Status RegisterCallBackFunc( + SessionId session_id, const std::string &key, + const std::function &)> &callback); + + bool IsGraphNeedRebuild(SessionId session_id, uint32_t graph_id); + + private: + SessionManager() = default; + + ~SessionManager() = default; + + /// + /// @ingroup ge_session + /// @brief initialize session manager + /// @param [in] options session manager config options + /// @return Status result of function + /// + Status Initialize(const std::map &options); + + /// + /// @ingroup ge_session + /// @brief finalize session manager + /// @return Status result of function + /// + Status Finalize(); + + bool HasSession(SessionId session_id); + + Status GetNextSessionId(SessionId &next_session_id) const; + + std::map session_manager_map_; + std::mutex mutex_; + bool init_flag_ = false; +}; +}; // namespace ge + +#endif // GE_SESSION_SESSION_MANAGER_H_ diff --git a/src/ge/single_op/single_op.cc b/src/ge/single_op/single_op.cc new file mode 100644 index 00000000..59a17d38 --- /dev/null +++ b/src/ge/single_op/single_op.cc @@ -0,0 +1,159 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "single_op/single_op.h" + +#include "common/fmk_types.h" +#include "common/profiling/profiling_manager.h" +#include "framework/common/debug/ge_log.h" +#include "graph/load/new_model_manager/model_utils.h" +#include "runtime/mem.h" + +namespace ge { +namespace { +const size_t kDataMemAlignSize = 32; + +size_t GetAlignedSize(uint32_t size) { + size_t aligned_size = (size + 2 * kDataMemAlignSize - 1) / kDataMemAlignSize * kDataMemAlignSize; + return aligned_size; +} +} // namespace +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY SingleOp::~SingleOp() { + for (auto task : tasks_) { + delete task; + task = nullptr; + } +} + +Status SingleOp::ValidateArgs(const std::vector &inputs, const std::vector &outputs) { + auto num_inputs = inputs.size(); + if (num_inputs != input_sizes_.size()) { + GELOGE(PARAM_INVALID, "Input num mismatch. model expect %zu, but given %zu", input_addr_list_.size(), + inputs.size()); + return PARAM_INVALID; + } + + for (size_t i = 0; i < num_inputs; ++i) { + // preventing from read out of bound + size_t aligned_size = GetAlignedSize(inputs[i].length); + if (aligned_size < input_sizes_[i]) { + GELOGE(PARAM_INVALID, "Input size mismatch. index = %zu, model expect %zu, but given %zu(after align)", i, + input_sizes_[i], aligned_size); + return PARAM_INVALID; + } + } + + auto num_outputs = outputs.size(); + if (num_outputs != output_sizes_.size()) { + GELOGE(PARAM_INVALID, "output num mismatch. model expect %zu, but given %zu", output_sizes_.size(), outputs.size()); + return PARAM_INVALID; + } + + for (size_t i = 0; i < num_outputs; ++i) { + // preventing from write out of bound + size_t aligned_size = GetAlignedSize(outputs[i].length); + if (aligned_size < output_sizes_[i]) { + GELOGE(PARAM_INVALID, "Output size mismatch. index = %zu, model expect %zu, but given %zu(after align)", i, + output_sizes_[i], aligned_size); + return PARAM_INVALID; + } + } + + return SUCCESS; +} + +Status SingleOp::UpdateArgs(const std::vector &inputs, const std::vector &outputs) { + size_t arg_index = 0; + if (use_physical_addr_) { + for (auto &input : inputs) { + auto *addr = reinterpret_cast(input.data); + size_t aligned_size = GetAlignedSize(input.length); + auto ret = ModelUtils::ConvertVirtualAddressToPhysical(addr, aligned_size, addr); + if (ret != SUCCESS) { + GELOGE(ret, "ConvertVirtualAddressToPhysical failed. Arg index = %zu", arg_index); + return ret; + } + args_[arg_index++] = reinterpret_cast(addr); + } + + for (auto &output : outputs) { + auto *addr = reinterpret_cast(output.data); + size_t aligned_size = GetAlignedSize(output.length); + auto ret = ModelUtils::ConvertVirtualAddressToPhysical(addr, aligned_size, addr); + if (ret != SUCCESS) { + GELOGE(ret, "ConvertVirtualAddressToPhysical failed. Arg index = %zu", arg_index); + return ret; + } + args_[arg_index++] = reinterpret_cast(addr); + } + } else { + for (auto &input : inputs) { + args_[arg_index++] = reinterpret_cast(input.data); + } + + for (auto &output : outputs) { + args_[arg_index++] = reinterpret_cast(output.data); + } + } + + size_t num_args = arg_table_.size(); + for (size_t i = 0; i < num_args; ++i) { + std::vector &ptr_to_arg_in_tasks = arg_table_[i]; + if (ptr_to_arg_in_tasks.empty()) { + GELOGW("found NO arg address to update for arg[%lu]", i); + continue; + } + + for (uintptr_t *arg_addr : ptr_to_arg_in_tasks) { + *arg_addr = args_[i]; + } + } + + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY Status SingleOp::ExecuteAsync(const std::vector &inputs, + const std::vector &outputs) { + Status ret = ValidateArgs(inputs, outputs); + if (ret != SUCCESS) { + return ret; + } + + ret = UpdateArgs(inputs, outputs); + if (ret != SUCCESS) { + return ret; + } + + for (auto &task : tasks_) { + ret = task->LaunchKernel(stream_); + if (ret != SUCCESS) { + return ret; + } + } + if (ProfilingManager::Instance().ProfilingOpTraceOn()) { + GELOGI("Op trace on, iter num:%d", ProfilingManager::Instance().GetOpTraceIterNum()); + ret = rtStreamSynchronize(stream_); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Invoke rtStreamSynchronize failed."); + return ret; + } + ProfilingManager::Instance().StopProfiling(); + } + return ret; +} + +void SingleOp::SetStream(rtStream_t stream) { stream_ = stream; } +} // namespace ge diff --git a/src/ge/single_op/single_op.h b/src/ge/single_op/single_op.h new file mode 100644 index 00000000..ba025c0b --- /dev/null +++ b/src/ge/single_op/single_op.h @@ -0,0 +1,56 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_SINGLE_OP_SINGLE_OP_H_ +#define GE_SINGLE_OP_SINGLE_OP_H_ + +#include +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "framework/executor/ge_executor.h" +#include "runtime/stream.h" +#include "task/op_task.h" + +namespace ge { +class SingleOp { + public: + SingleOp() = default; + ~SingleOp(); + + Status ExecuteAsync(const std::vector &inputs, const std::vector &outputs); + void SetStream(rtStream_t stream); + + private: + Status ValidateArgs(const std::vector &inputs, const std::vector &outputs); + Status UpdateArgs(const std::vector &inputs, const std::vector &outputs); + + friend class SingleOpModel; + rtStream_t stream_ = nullptr; + std::vector input_addr_list_; + std::vector input_sizes_; + std::vector output_addr_list_; + std::vector output_sizes_; + std::vector args_; + + std::vector tasks_; + std::vector> arg_table_; + bool use_physical_addr_ = false; +}; +} // namespace ge +#endif // GE_SINGLE_OP_SINGLE_OP_H_ diff --git a/src/ge/single_op/single_op_manager.cc b/src/ge/single_op/single_op_manager.cc new file mode 100644 index 00000000..8014cc2a --- /dev/null +++ b/src/ge/single_op/single_op_manager.cc @@ -0,0 +1,149 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "single_op/single_op_manager.h" + +#include +#include + +#include "runtime/dev.h" +#include "framework/common/debug/ge_log.h" + +namespace ge { +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY +SingleOpManager::~SingleOpManager() { + for (auto &it : stream_resources_) { + delete it.second; + it.second = nullptr; + } +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY +Status SingleOpManager::GetOpFromModel(const std::string &model_name, + const ModelData &model_data, + void *stream, + SingleOp **single_op) { + if (single_op == nullptr) { + GELOGE(PARAM_INVALID, "single op is null"); + return PARAM_INVALID; + } + uintptr_t resource_id; + // runtime uses NULL to denote a default stream for each device + if (stream == nullptr) { + // use device id as resource key instead + int32_t dev_id = 0; + auto rt_err = rtGetDevice(&dev_id); + if (rt_err != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Get current device id failed. ret = %d", static_cast(rt_err)); + return RT_FAILED; + } + + GELOGI("GetOpFromModel with default stream. device id = %d", dev_id); + resource_id = static_cast(dev_id); + } else { + resource_id = reinterpret_cast(stream); + } + + GELOGI("GetOpFromModel in. model name = %s, resource id = 0x%lx", + model_name.c_str(), + static_cast(resource_id)); + + StreamResource *res = GetResource(resource_id); + if (res == nullptr) { + GELOGE(MEMALLOC_FAILED, "GetResource failed"); + return MEMALLOC_FAILED; + } + + SingleOp *op = res->GetOperator(model_data.model_data); + if (op != nullptr) { + GELOGD("Got operator from stream cache"); + *single_op = op; + return SUCCESS; + } + + SingleOpModel model(model_name, model_data.model_data, model_data.model_len); + auto ret = model.Init(); + if (ret != SUCCESS) { + GELOGE(ret, "Init model failed. model = %s, ret = %u", model_name.c_str(), ret); + return ret; + } + + auto *new_op = new(std::nothrow)SingleOp(); + if (new_op == nullptr) { + GELOGE(MEMALLOC_FAILED, "new SingleOp failed"); + return MEMALLOC_FAILED; + } + + GELOGI("To build operator: %s", model_name.c_str()); + ret = model.BuildOp(*res, *new_op); + if (ret != SUCCESS) { + GELOGE(ret, "Build op failed. op = %s, resource id = 0x%lx, ret = %u", + model_name.c_str(), + static_cast(resource_id), + ret); + delete new_op; + new_op = nullptr; + return ret; + } + + // stream is nullable + new_op->SetStream(stream); + res->CacheOperator(model_data.model_data, new_op); + *single_op = new_op; + return SUCCESS; +} + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY +Status SingleOpManager::ReleaseResource(void *stream) { + auto resource_id = reinterpret_cast(stream); + GELOGI("ReleaseResource in. resource id = 0x%lx", static_cast(resource_id)); + std::lock_guard lock(mutex_); + auto it = stream_resources_.find(resource_id); + if (it == stream_resources_.end()) { + return SUCCESS; + } + delete it->second; + it->second = nullptr; + (void)stream_resources_.erase(it); + return SUCCESS; +} + +StreamResource *SingleOpManager::GetResource(uintptr_t resource_id) { + std::lock_guard lock(mutex_); + auto it = stream_resources_.find(resource_id); + StreamResource *res = nullptr; + if (it == stream_resources_.end()) { + res = new (std::nothrow)StreamResource(); + if (res != nullptr) { + stream_resources_.emplace(resource_id, res); + } + } else { + res = it->second; + } + + return res; +} + +StreamResource *SingleOpManager::TryGetResource(uintptr_t resource_id) { + std::lock_guard lock(mutex_); + auto it = stream_resources_.find(resource_id); + if (it == stream_resources_.end()) { + return nullptr; + } + + return it->second; +} +} // namespace ge diff --git a/src/ge/single_op/single_op_manager.h b/src/ge/single_op/single_op_manager.h new file mode 100644 index 00000000..15d32316 --- /dev/null +++ b/src/ge/single_op/single_op_manager.h @@ -0,0 +1,50 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_SINGLE_OP_SINGLE_OP_MANAGER_H_ +#define GE_SINGLE_OP_SINGLE_OP_MANAGER_H_ + +#include +#include +#include + +#include "single_op/single_op_model.h" +#include "single_op/stream_resource.h" + +namespace ge { +class SingleOpManager { + public: + ~SingleOpManager(); + + static SingleOpManager &GetInstance() { + static SingleOpManager instance; + return instance; + } + + Status GetOpFromModel(const std::string &key, const ge::ModelData &model_data, void *stream, SingleOp **single_op); + + Status ReleaseResource(void *stream); + + private: + StreamResource *GetResource(uintptr_t resource_id); + StreamResource *TryGetResource(uintptr_t resource_id); + + std::mutex mutex_; + std::unordered_map stream_resources_; +}; +} // namespace ge + +#endif // GE_SINGLE_OP_SINGLE_OP_MANAGER_H_ diff --git a/src/ge/single_op/single_op_model.cc b/src/ge/single_op/single_op_model.cc new file mode 100644 index 00000000..51a77694 --- /dev/null +++ b/src/ge/single_op/single_op_model.cc @@ -0,0 +1,316 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "single_op/single_op_model.h" + +#include +#include +#include + +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/load/new_model_manager/model_utils.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/tensor_utils.h" +#include "runtime/rt.h" +#include "task/tbe_task_builder.h" + +using std::vector; +using std::unique_ptr; +using domi::TaskDef; + +namespace ge { +namespace { +const size_t kDataOutputNum = 1; +} // namespace +SingleOpModel::SingleOpModel(const std::string &model_name, const void *model_data, uint32_t model_size) + : model_name_(model_name), ori_model_data_(model_data), ori_model_size_(model_size) {} + +Status SingleOpModel::Init() { + auto ret = InitModel(); + if (ret != SUCCESS) { + return ret; + } + + return ParseInputsAndOutputs(); +} + +Status SingleOpModel::InitModel() { + ge::ModelData model; + model.model_len = ori_model_size_; + model.model_data = const_cast(ori_model_data_); + + auto ret = model_helper_.LoadModel(model); + if (ret != SUCCESS) { + GELOGE(ret, "LoadModel failed"); + return ret; + } + + return SUCCESS; +} + +void SingleOpModel::ParseOpModelParams(ModelHelper &model_helper, SingleOpModelParam ¶m) { + int64_t value = 0; + bool ret = false; + std::shared_ptr model = model_helper.GetGeModel(); + GE_CHECK_NOTNULL_JUST_RETURN(model); + ret = ge::AttrUtils::GetInt(model, ATTR_MODEL_MEMORY_SIZE, value); + param.memory_size = ret ? static_cast(value) : 0; + ret = ge::AttrUtils::GetInt(model, ATTR_MODEL_WEIGHT_SIZE, value); + param.weight_size = ret ? static_cast(value) : 0; + ret = ge::AttrUtils::GetInt(model, MODEL_ATTR_TASK_GEN_BASE_ADDR, value); + param.base_addr = ret ? static_cast(value) : 0; + ret = ge::AttrUtils::GetInt(model, MODEL_ATTR_TASK_GEN_WEIGHT_ADDR, value); + param.weight_addr = ret ? static_cast(value) : 0; + + GELOGI("ParseOpModelParams(), memory_size:%lu, weight_size:%lu.", param.memory_size, + param.weight_size); +} + +Status SingleOpModel::InitModelMem(StreamResource &res) { + ParseOpModelParams(model_helper_, model_params_); + + if (model_params_.memory_size > ALLOC_MEMORY_MAX_SIZE || model_params_.weight_size > ALLOC_MEMORY_MAX_SIZE) { + GELOGE(PARAM_INVALID, "Can not alloc memory larger than %lu. memory size = %lu, weight size = %lu", + ALLOC_MEMORY_MAX_SIZE, model_params_.memory_size, model_params_.weight_size); + return PARAM_INVALID; + } + + if (model_params_.memory_size > 0) { + model_params_.mem_base = res.MallocMemory(model_params_.memory_size); + if (model_params_.mem_base == nullptr) { + return RT_FAILED; + } + } + + if (model_params_.weight_size > 0) { + model_params_.weight_base = res.MallocWeight(model_params_.weight_size); + if (model_params_.weight_base == nullptr) { + // no need to free memory, for that was handled by StreamResources + return RT_FAILED; + } + } + + return SUCCESS; +} + +Status SingleOpModel::ParseInputNode(const OpDescPtr &op_desc) { + vector offsets = op_desc->GetOutputOffset(); + if (offsets.size() != kDataOutputNum) { + GELOGE(PARAM_INVALID, "Data op should have only one output, but got %zu", op_desc->GetOutputOffset().size()); + return PARAM_INVALID; + } + + auto output_desc = op_desc->GetOutputDescPtr(0); + GE_CHECK_NOTNULL(output_desc); + uint32_t tensor_size = 0; + (void)TensorUtils::GetSize(*output_desc, tensor_size); + input_offset_list_.emplace_back(offsets[0]); + input_sizes_.emplace_back(tensor_size); + GELOGI("[%s] parse input node: %s, size = %u, offset = %u", model_name_.c_str(), op_desc->GetName().c_str(), + tensor_size, static_cast(offsets[0])); + return SUCCESS; +} + +void SingleOpModel::ParseOutputNode(const OpDescPtr &op_desc) { + vector offsets = op_desc->GetInputOffset(); + for (uint32_t k = 0; k < static_cast(offsets.size()); ++k) { + auto input_desc = op_desc->GetInputDescPtr(k); + if (input_desc == nullptr) { + continue; + } + uint32_t tensor_size = 0; + (void)TensorUtils::GetSize(*input_desc, tensor_size); + output_offset_list_.emplace_back(offsets[k]); + output_sizes_.emplace_back(tensor_size); + GELOGI("[%s] parse output node: %s, size = %u, offset = %u", model_name_.c_str(), op_desc->GetName().c_str(), + tensor_size, static_cast(offsets[k])); + } +} + +Status SingleOpModel::ParseInputsAndOutputs() { + auto ge_model = model_helper_.GetGeModel(); + GE_CHECK_NOTNULL(ge_model); + Graph graph = ge_model->GetGraph(); + auto compute_graph = GraphUtils::GetComputeGraph(graph); + if (compute_graph == nullptr) { + GELOGE(PARAM_INVALID, "[%s] compute_graph is null", model_name_.c_str()); + return PARAM_INVALID; + } + + auto nodes = compute_graph->GetAllNodes(); + size_t model_op_size = nodes.size(); + GELOGI("[%s] node size = %zu", model_name_.c_str(), model_op_size); + + for (size_t i = 0; i < model_op_size; ++i) { + auto node = nodes.at(i); + auto op_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(op_desc); + op_list_[i] = op_desc; + auto op_type = op_desc->GetType(); + GELOGI("[%s] node[%zu] = %s, type = %s", model_name_.c_str(), i, node->GetName().c_str(), op_type.c_str()); + + if (op_type == DATA_TYPE || op_type == AIPP_DATA_TYPE) { + auto ret = ParseInputNode(op_desc); + if (ret != SUCCESS) { + return ret; + } + } + + if (op_type == NETOUTPUT) { + ParseOutputNode(op_desc); + } + + ge_model->GetTBEKernelStore().LoadTBEKernelBinToOpDesc(op_desc); + } + + return SUCCESS; +} + +Status SingleOpModel::SetInputsAndOutputs(SingleOp &single_op) { + // for lhisi + const char *use_physical_address = std::getenv("GE_USE_PHYSICAL_ADDRESS"); + if (use_physical_address != nullptr) { + single_op.use_physical_addr_ = true; + } + + int arg_index = 0; + for (size_t i = 0; i < input_offset_list_.size(); ++i) { + auto *addr = model_params_.mem_base + input_offset_list_[i]; + auto ret = ModelUtils::ConvertVirtualAddressToPhysical(addr, input_sizes_[i], addr); + if (ret != SUCCESS) { + GELOGE(ret, "ConvertVirtualAddressToPhysical failed. Input index = %zu", i); + return ret; + } + model_params_.addr_mapping_.emplace(reinterpret_cast(addr), arg_index++); + single_op.input_sizes_.emplace_back(input_sizes_[i]); + single_op.input_addr_list_.emplace_back(addr); + } + + for (size_t i = 0; i < output_offset_list_.size(); ++i) { + auto *addr = model_params_.mem_base + output_offset_list_[i]; + auto ret = ModelUtils::ConvertVirtualAddressToPhysical(addr, output_sizes_[i], addr); + if (ret != SUCCESS) { + GELOGE(ret, "ConvertVirtualAddressToPhysical failed. Output index = %zu", i); + return ret; + } + model_params_.addr_mapping_.emplace(reinterpret_cast(addr), arg_index++); + single_op.output_sizes_.emplace_back(output_sizes_[i]); + single_op.output_addr_list_.emplace_back(addr); + } + + single_op.args_.resize(arg_index); + return SUCCESS; +} + +Status SingleOpModel::BuildTaskList(SingleOp &single_op) { + auto ge_model = model_helper_.GetGeModel(); + GE_CHECK_NOTNULL(ge_model); + auto tasks = ge_model->GetModelTaskDefPtr()->task(); + for (int i = 0; i < tasks.size(); ++i) { + const TaskDef &task_def = tasks[i]; + GELOGI("[%s] Task[%d], type = %u, DebugString = %s", model_name_.c_str(), i, task_def.type(), + task_def.DebugString().c_str()); + auto task_type = static_cast(task_def.type()); + if (task_type == RT_MODEL_TASK_KERNEL) { + OpTask *task = nullptr; + auto ret = BuildKernelTask(task_def.kernel(), single_op, &task); + if (ret != SUCCESS) { + return ret; + } + + single_op.tasks_.emplace_back(task); + } else if (task_type == RT_MODEL_TASK_KERNEL_EX) { + GELOGD("BuildKernelExTask is not supported. modelName = %s", model_name_.c_str()); + return UNSUPPORTED; + } else { + // skip + GELOGD("Skip task type: %d", static_cast(task_type)); + } + } + return SUCCESS; +} + +void SingleOpModel::ParseArgTable(TbeOpTask *task, SingleOp &op) { + if (task == nullptr) { + GELOGE(PARAM_INVALID, "tbe op task is nullptr"); + return; + } + // args: addr1, addr2, addr3 ... + auto *args = const_cast(reinterpret_cast(task->GetArgs())); + size_t arg_size = task->GetArgSize(); + for (size_t i = 0; i < arg_size / sizeof(void *); ++i) { + uintptr_t *ptr_to_addr = args + i; + uintptr_t addr = *ptr_to_addr; + auto iter = model_params_.addr_mapping_.find(addr); + if (iter != model_params_.addr_mapping_.end()) { + int arg_index = iter->second; + GELOGI("%s args[%zu] mapped to user designated args[%d]", task->GetStubName().c_str(), i, arg_index); + op.arg_table_[iter->second].emplace_back(ptr_to_addr); + } + } +} + +Status SingleOpModel::BuildKernelTask(const domi::KernelDef &kernel_def, SingleOp &single_op, OpTask **task) { + GE_CHECK_NOTNULL(task); + const auto &context = kernel_def.context(); + auto kernel_type = static_cast(context.kernel_type()); + if (kernel_type != cce::ccKernelType::TE) { + GELOGE(UNSUPPORTED, "Only TBE kernel is supported, but got %u", context.kernel_type()); + return UNSUPPORTED; + } + + auto iter = op_list_.find(context.op_index()); + if (iter == op_list_.end()) { + GELOGE(INTERNAL_ERROR, "op desc not found. op index = %u", context.op_index()); + return INTERNAL_ERROR; + } + + auto *tbe_task = new (std::nothrow) TbeOpTask(); + if (tbe_task == nullptr) { + GELOGE(MEMALLOC_FAILED, "create tbe op task failed"); + return MEMALLOC_FAILED; + } + + auto builder = TbeTaskBuilder(model_name_, iter->second, kernel_def); + auto ret = builder.BuildTask(*tbe_task, model_params_); + if (ret != SUCCESS) { + delete tbe_task; + tbe_task = nullptr; + return ret; + } + + single_op.arg_table_.resize(single_op.input_sizes_.size() + single_op.output_sizes_.size()); + ParseArgTable(tbe_task, single_op); + + *task = tbe_task; + return SUCCESS; +} + +Status SingleOpModel::BuildOp(StreamResource &resource, SingleOp &single_op) { + auto ret = InitModelMem(resource); + if (ret != SUCCESS) { + return ret; + } + + ret = SetInputsAndOutputs(single_op); + if (ret != SUCCESS) { + return ret; + } + return BuildTaskList(single_op); +} +} // namespace ge diff --git a/src/ge/single_op/single_op_model.h b/src/ge/single_op/single_op_model.h new file mode 100644 index 00000000..c8880b06 --- /dev/null +++ b/src/ge/single_op/single_op_model.h @@ -0,0 +1,86 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_SINGLE_OP_SINGLE_OP_MODEL_H_ +#define GE_SINGLE_OP_SINGLE_OP_MODEL_H_ + +#include +#include +#include +#include +#include + +#include "common/helper/model_helper.h" +#include "graph/load/new_model_manager/davinci_model_parser.h" +#include "single_op/single_op.h" +#include "single_op/stream_resource.h" + +namespace ge { +struct SingleOpModelParam { + uint64_t base_addr = 0; + uint64_t memory_size = 0; + uint64_t weight_addr = 0; + uint64_t weight_size = 0; + + uint8_t *mem_base = nullptr; + uint8_t *weight_base = nullptr; + + std::map addr_mapping_; +}; + +class SingleOpModel { + public: + SingleOpModel(const std::string &model_name, + const void *model_data, + uint32_t model_size); + ~SingleOpModel() = default; + + Status Init(); + Status BuildOp(StreamResource &resource, SingleOp &single_op); + + private: + Status InitModel(); + Status ParseInputsAndOutputs(); + Status SetInputsAndOutputs(SingleOp &single_op); + + Status InitModelMem(StreamResource &resource); + + Status ParseInputNode(const OpDescPtr &op_desc); + void ParseOutputNode(const OpDescPtr &op_desc); + + Status BuildTaskList(SingleOp &single_op); + Status BuildKernelTask(const domi::KernelDef &kernel_def, SingleOp &single_op, OpTask **task); + + static void ParseOpModelParams(ModelHelper &model_helper, SingleOpModelParam ¶m); + void ParseArgTable(TbeOpTask *task, SingleOp &op); + + std::string model_name_; + const void *ori_model_data_; + uint32_t ori_model_size_; + + ModelHelper model_helper_; + + map op_list_; + SingleOpModelParam model_params_; + + std::vector input_offset_list_; + std::vector input_sizes_; + std::vector output_offset_list_; + std::vector output_sizes_; +}; +} // namespace ge + +#endif // GE_SINGLE_OP_SINGLE_OP_MODEL_H_ diff --git a/src/ge/single_op/stream_resource.cc b/src/ge/single_op/stream_resource.cc new file mode 100644 index 00000000..0ba51fe3 --- /dev/null +++ b/src/ge/single_op/stream_resource.cc @@ -0,0 +1,98 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "single_op/stream_resource.h" + +#include "common/ge_inner_error_codes.h" +#include "framework/common/debug/ge_log.h" +#include "framework/common/debug/log.h" +#include "runtime/rt.h" + +namespace ge { +StreamResource::~StreamResource() { + for (auto it : op_map_) { + // it's safe to delete a nullptr + delete it.second; + it.second = nullptr; + } + + for (auto mem : memory_list_) { + if (mem != nullptr) { + auto rt_ret = rtFree(mem); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, GELOGE(RT_FAILED, "rtFree failed")); + } + } + + for (auto weight : weight_list_) { + if (weight != nullptr) { + auto rt_ret = rtFree(weight); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, GELOGE(RT_FAILED, "rtFree failed")); + } + } +} + +void StreamResource::CacheOperator(const void *key, SingleOp *single_op) { + op_map_[key] = single_op; +} + +SingleOp *StreamResource::GetOperator(const void *key) { + auto it = op_map_.find(key); + if (it == op_map_.end()) { + return nullptr; + } + + return it->second; +} + +uint8_t *StreamResource::DoMallocMemory(size_t size, size_t &max_allocated, std::vector &allocated) { + if (size <= max_allocated && !allocated.empty()) { + GELOGD("reuse last memory"); + return allocated.back(); + } + + uint8_t *buffer = nullptr; + auto ret = rtMalloc(reinterpret_cast(&buffer), size, RT_MEMORY_HBM); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtMalloc failed, size = %zu, ret = %d", size, ret); + return nullptr; + } + + ret = rtMemset(buffer, size, 0U, size); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtMemset failed, ret = %d", ret); + auto rt_ret = rtFree(buffer); + GE_IF_BOOL_EXEC(rt_ret != RT_ERROR_NONE, GELOGE(RT_FAILED, "rtFree failed")); + return nullptr; + } + + GELOGD("Malloc new memory succeeded. size = %zu", size); + max_allocated = size; + allocated.emplace_back(buffer); + return buffer; +} + +uint8_t *StreamResource::MallocMemory(size_t size) { + GELOGD("To Malloc memory, size = %zu", size); + uint8_t *buffer = DoMallocMemory(size, max_memory_size_, memory_list_); + return buffer; +} + +uint8_t *StreamResource::MallocWeight(size_t size) { + GELOGD("To Malloc weight, size = %zu", size); + uint8_t *buffer = DoMallocMemory(size, max_weight_size_, weight_list_); + return buffer; +} +} // namespace ge diff --git a/src/ge/single_op/stream_resource.h b/src/ge/single_op/stream_resource.h new file mode 100644 index 00000000..043a718c --- /dev/null +++ b/src/ge/single_op/stream_resource.h @@ -0,0 +1,58 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_SINGLE_OP_STREAM_RESOURCE_H_ +#define GE_SINGLE_OP_STREAM_RESOURCE_H_ + +#include +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "runtime/stream.h" +#include "single_op/single_op.h" + +namespace ge { +class StreamResource { + public: + StreamResource() = default; + ~StreamResource(); + + StreamResource(const StreamResource &) = delete; + StreamResource(StreamResource &&) = delete; + StreamResource &operator=(const StreamResource &) = delete; + StreamResource &operator=(StreamResource &&) = delete; + + void CacheOperator(const void *key, SingleOp *single_op); + + SingleOp *GetOperator(const void *key); + + uint8_t *MallocMemory(size_t size); + uint8_t *MallocWeight(size_t size); + + private: + static uint8_t *DoMallocMemory(size_t size, size_t &max_allocated, std::vector &allocated); + + size_t max_memory_size_ = 0; + size_t max_weight_size_ = 0; + std::vector memory_list_; + std::vector weight_list_; + std::unordered_map op_map_; +}; +} // namespace ge + +#endif // GE_SINGLE_OP_STREAM_RESOURCE_H_ diff --git a/src/ge/single_op/task/build_task_utils.cc b/src/ge/single_op/task/build_task_utils.cc new file mode 100644 index 00000000..82b77031 --- /dev/null +++ b/src/ge/single_op/task/build_task_utils.cc @@ -0,0 +1,66 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "single_op/task/build_task_utils.h" + +#include "runtime/rt.h" +#include "graph/load/new_model_manager/model_utils.h" +#include "graph/manager/graph_var_manager.h" +#include "framework/common/debug/ge_log.h" + +namespace ge { +namespace { +const uint64_t kSessionId = UINT64_MAX; +uint8_t *kVarBase = nullptr; +const uint64_t kLogicVarBase = 0; +const uint64_t kVarSize = 0; +} + +std::vector> BuildTaskUtils::GetAddresses(const OpDescPtr &op_desc, + const SingleOpModelParam ¶m) { + std::vector> ret; + RuntimeParam runtime_para; + runtime_para.mem_size = param.memory_size; + runtime_para.logic_mem_base = param.base_addr; + runtime_para.mem_base = param.mem_base; + runtime_para.weight_size = param.weight_size; + runtime_para.logic_weight_base = param.weight_addr; + runtime_para.weight_base = param.weight_base; + runtime_para.var_size = kVarSize; + runtime_para.logic_var_base = kLogicVarBase; + runtime_para.var_base = kVarBase; + runtime_para.session_id = kSessionId; + + ret.emplace_back(ModelUtils::GetInputDataAddrs(runtime_para, op_desc)); + ret.emplace_back(ModelUtils::GetOutputDataAddrs(runtime_para, op_desc)); + ret.emplace_back(ModelUtils::GetWorkspaceDataAddrs(runtime_para, op_desc)); + return ret; +} + +std::vector BuildTaskUtils::JoinAddresses(const std::vector> &addresses) { + std::vector ret; + for (auto &address : addresses) { + ret.insert(ret.end(), address.begin(), address.end()); + } + return ret; +} + +std::vector BuildTaskUtils::GetKernelArgs(const OpDescPtr &op_desc, + const SingleOpModelParam ¶m) { + auto addresses = GetAddresses(op_desc, param); + return JoinAddresses(addresses); +} +} // namespace ge diff --git a/src/ge/single_op/task/build_task_utils.h b/src/ge/single_op/task/build_task_utils.h new file mode 100644 index 00000000..a5030e69 --- /dev/null +++ b/src/ge/single_op/task/build_task_utils.h @@ -0,0 +1,36 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_SINGLE_OP_TASK_BUILD_TASK_UTILS_H_ +#define GE_SINGLE_OP_TASK_BUILD_TASK_UTILS_H_ + +#include + +#include "graph/op_desc.h" +#include "single_op/single_op.h" +#include "single_op/single_op_model.h" + +namespace ge { +class BuildTaskUtils { + public: + static constexpr int kAddressIndexWorkspace = 2; + + static std::vector> GetAddresses(const OpDescPtr &op_desc, const SingleOpModelParam ¶m); + static std::vector JoinAddresses(const std::vector> &addresses); + static std::vector GetKernelArgs(const OpDescPtr &op_desc, const SingleOpModelParam ¶m); +}; +} // namespace ge +#endif // GE_SINGLE_OP_TASK_BUILD_TASK_UTILS_H_ diff --git a/src/ge/single_op/task/op_task.cc b/src/ge/single_op/task/op_task.cc new file mode 100644 index 00000000..f983e080 --- /dev/null +++ b/src/ge/single_op/task/op_task.cc @@ -0,0 +1,74 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "single_op/task/op_task.h" + +#include "runtime/rt.h" +#include "framework/common/debug/ge_log.h" + +namespace ge { +void TbeOpTask::SetStubFunc(const std::string &name, const void *stub_func) { + this->stub_name_ = name; + this->stub_func_ = stub_func; +} + +void TbeOpTask::SetKernelArgs(void *args, size_t arg_size, uint32_t block_dim) { + args_ = args; + arg_size_ = arg_size; + block_dim_ = block_dim; +} + +void TbeOpTask::SetSmDesc(void *sm_desc) { + sm_desc_ = sm_desc; +} + +TbeOpTask::~TbeOpTask() { + if (args_ != nullptr) { + (void)rtFreeHost(args_); + } + + if (sm_desc_ != nullptr) { + (void)rtMemFreeManaged(sm_desc_); + } +} + +const void* TbeOpTask::GetArgs() const { + return args_; +} + +size_t TbeOpTask::GetArgSize() const { + return arg_size_; +} + +const std::string& TbeOpTask::GetStubName() const { + return stub_name_; +} + +Status TbeOpTask::LaunchKernel(rtStream_t stream) { + GELOGD("To invoke rtKernelLaunch. task = %s, block_dim = %u", + this->stub_name_.c_str(), + block_dim_); + auto *sm_desc = reinterpret_cast(sm_desc_); + auto ret = rtKernelLaunch(stub_func_, block_dim_, args_, static_cast(arg_size_), sm_desc, stream); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "Invoke rtKernelLaunch failed. ret = %d, task = %s", ret, this->stub_name_.c_str()); + return RT_FAILED; + } + + GELOGD("Invoke rtKernelLaunch succeeded. task = %s", this->stub_name_.c_str()); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/single_op/task/op_task.h b/src/ge/single_op/task/op_task.h new file mode 100644 index 00000000..5cda8ba4 --- /dev/null +++ b/src/ge/single_op/task/op_task.h @@ -0,0 +1,57 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_SINGLE_OP_TASK_OP_TASK_H_ +#define GE_SINGLE_OP_TASK_OP_TASK_H_ + +#include +#include + +#include "runtime/stream.h" +#include "common/ge_inner_error_codes.h" +#include "graph/op_kernel_bin.h" + +namespace ge { +class OpTask { + public: + OpTask() = default; + virtual ~OpTask() = default; + virtual Status LaunchKernel(rtStream_t stream) = 0; +}; + +class TbeOpTask : public OpTask { + public: + ~TbeOpTask() override; + Status LaunchKernel(rtStream_t stream) override; + + void SetSmDesc(void *sm_desc); + void SetStubFunc(const std::string &name, const void *stub_func); + void SetKernelArgs(void *args, size_t arg_size, uint32_t block_dim); + const void* GetArgs() const; + size_t GetArgSize() const; + const std::string& GetStubName() const; + + private: + const void *stub_func_ = nullptr; + void *args_ = nullptr; + size_t arg_size_ = 0; + uint32_t block_dim_ = 1; + void *sm_desc_ = nullptr; + std::string stub_name_; +}; +} // namespace ge + +#endif // GE_SINGLE_OP_TASK_OP_TASK_H_ diff --git a/src/ge/single_op/task/tbe_task_builder.cc b/src/ge/single_op/task/tbe_task_builder.cc new file mode 100644 index 00000000..c2c56398 --- /dev/null +++ b/src/ge/single_op/task/tbe_task_builder.cc @@ -0,0 +1,303 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "single_op/task/tbe_task_builder.h" + +#include +#include +#include + +#include "common/helper/model_helper.h" +#include "framework/common/debug/ge_log.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/load/new_model_manager/model_utils.h" +#include "graph/load/new_model_manager/task_info/task_info.h" +#include "graph/manager/graph_var_manager.h" +#include "runtime/rt.h" +#include "single_op/task/build_task_utils.h" + +namespace ge { +namespace { +std::mutex g_reg_mutex; + +inline void GetKernelName(const OpDescPtr &op_desc, std::string &kernel_name) { + (void)AttrUtils::GetStr(op_desc, op_desc->GetName() + "_kernelname", kernel_name); +} + +inline TBEKernelPtr GetTbeKernel(const OpDescPtr &op_desc) { + return op_desc->TryGetExtAttr(ge::OP_EXTATTR_NAME_TBE_KERNEL, TBEKernelPtr()); +} +} // namespace + +KernelHolder::KernelHolder(const char *stub_func, std::shared_ptr kernel_bin) + : stub_func_(stub_func), bin_handle_(nullptr), kernel_bin_(std::move(kernel_bin)) {} + +KernelHolder::~KernelHolder() { + if (bin_handle_ != nullptr) { + GE_CHK_RT(rtDevBinaryUnRegister(bin_handle_)); + } +} + +KernelBinRegistry::~KernelBinRegistry() { + for (auto &iter : registered_bins_) { + delete iter.second; + iter.second = nullptr; + } +} + +const char *KernelBinRegistry::GetUnique(const string &stub_func) { + std::lock_guard lock(mutex_); + auto it = unique_stubs_.find(stub_func); + if (it != unique_stubs_.end()) { + return it->c_str(); + } else { + it = unique_stubs_.insert(unique_stubs_.end(), stub_func); + return it->c_str(); + } +} + +const char *KernelBinRegistry::GetStubFunc(const std::string &stub_name) { + std::lock_guard lock(mutex_); + auto iter = registered_bins_.find(stub_name); + if (iter != registered_bins_.end()) { + return iter->second->stub_func_; + } + + return nullptr; +} + +bool KernelBinRegistry::AddKernel(const std::string &stub_name, const KernelHolder *holder) { + std::lock_guard lock(mutex_); + auto ret = registered_bins_.emplace(stub_name, holder); + return ret.second; +} + +TbeTaskBuilder::TbeTaskBuilder(const std::string &model_name, const OpDescPtr &op_desc, + const domi::KernelDef &kernel_def) + : op_desc_(op_desc), kernel_def_(kernel_def), stub_name_(model_name + "/" + op_desc->GetName() + "_tvmbin") {} + +Status TbeTaskBuilder::DoRegisterBinary(const OpKernelBin &kernel_bin, void **bin_handle) const { + rtDevBinary_t binary; + binary.version = 0; + binary.data = kernel_bin.GetBinData(); + binary.length = kernel_bin.GetBinDataSize(); + binary.magic = RT_DEV_BINARY_MAGIC_ELF; + auto ret = rtDevBinaryRegister(&binary, bin_handle); + if (ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtDevBinaryRegister failed, bin key = %s, rt ret = %d", stub_name_.c_str(), + static_cast(ret)); + return RT_FAILED; + } + + return SUCCESS; +} + +Status TbeTaskBuilder::DoRegisterMeta(void *bin_handle) { + std::string meta_data; + (void)AttrUtils::GetStr(op_desc_, TVM_ATTR_NAME_METADATA, meta_data); + GELOGI("TBE: meta data: %s", meta_data.empty() ? "null" : meta_data.c_str()); + if (!meta_data.empty()) { + auto rt_ret = rtMetadataRegister(bin_handle, meta_data.c_str()); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtMetadataRegister failed. bin key = %s, meta_data = %s, rt ret = %d", stub_name_.c_str(), + meta_data.c_str(), static_cast(rt_ret)); + return RT_FAILED; + } + } + + return SUCCESS; +} + +Status TbeTaskBuilder::DoRegisterFunction(void *bin_handle, const char *stub_name, const char *kernel_name) { + auto rt_ret = rtFunctionRegister(bin_handle, stub_name, stub_name, kernel_name, FUNC_MODE_NORMAL); + if (rt_ret != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtFunctionRegister failed. bin key = %s, kernel name = %s, rt ret = %d", stub_name, kernel_name, + static_cast(rt_ret)); + return RT_FAILED; + } + + return SUCCESS; +} + +Status TbeTaskBuilder::DoRegisterKernel(const ge::OpKernelBin &tbe_kernel, const char *bin_file_key, + void **bin_handle) { + std::string kernel_name; + GetKernelName(op_desc_, kernel_name); + + void *handle = nullptr; + auto ret = DoRegisterBinary(tbe_kernel, &handle); + if (ret != SUCCESS) { + return ret; + } + + ret = DoRegisterMeta(handle); + if (ret != SUCCESS) { + GE_CHK_RT(rtDevBinaryUnRegister(handle)); + return ret; + } + + ret = DoRegisterFunction(handle, bin_file_key, kernel_name.c_str()); + if (ret != SUCCESS) { + GE_CHK_RT(rtDevBinaryUnRegister(handle)); + return ret; + } + + GELOGI("Register function succeeded: kernel_name = %s", kernel_name.c_str()); + *bin_handle = handle; + return SUCCESS; +} + +Status TbeTaskBuilder::RegisterKernel(TbeOpTask &task) { + KernelBinRegistry ®istry = KernelBinRegistry::GetInstance(); + // check if already registered + const char *stub_func = registry.GetStubFunc(stub_name_); + if (stub_func != nullptr) { + task.SetStubFunc(stub_name_, stub_func); + return SUCCESS; + } + + // to avoid repeat register + std::lock_guard lock(g_reg_mutex); + // check again + stub_func = registry.GetStubFunc(stub_name_); + if (stub_func == nullptr) { + stub_func = registry.GetUnique(stub_name_); + GELOGI("RegisterKernel begin, stub_func = %s", stub_func); + + auto tbe_kernel = GetTbeKernel(op_desc_); + if (tbe_kernel == nullptr) { + GELOGE(PARAM_INVALID, "OP EXT ATTR NAME TBE_KERNEL not found. op = %s", op_desc_->GetName().c_str()); + return PARAM_INVALID; + } + + auto *holder = new (std::nothrow) KernelHolder(stub_func, tbe_kernel); + if (holder == nullptr) { + GELOGE(MEMALLOC_FAILED, "create KernelHodler failed."); + return MEMALLOC_FAILED; + } + + void *bin_handle = nullptr; + auto ret = DoRegisterKernel(*tbe_kernel, stub_func, &bin_handle); + if (ret == SUCCESS) { + holder->SetBinHandle(bin_handle); + if (!registry.AddKernel(stub_name_, holder)) { + // should not happen. only one thread can reach here + delete holder; + holder = nullptr; + GELOGE(INTERNAL_ERROR, "Add kernel failed. stub name = %s", stub_name_.c_str()); + return INTERNAL_ERROR; + } + } else { + delete holder; + holder = nullptr; + } + } + + task.SetStubFunc(stub_name_, stub_func); + return SUCCESS; +} + +Status TbeTaskBuilder::GetSmDesc(void **sm_desc, const SingleOpModelParam ¶m) const { + const std::string &sm_desc_str = kernel_def_.sm_desc(); + if (sm_desc_str.empty()) { + *sm_desc = nullptr; + } else { + GELOGD("To process sm desc, size = %zu", sm_desc_str.size()); + char *sm_control = const_cast(sm_desc_str.data()); + auto *l2_ctrl_info = reinterpret_cast(sm_control); + uint64_t gen_base_addr = param.base_addr; + // There is no weight for te op now. Update L2_mirror_addr by data memory base. + uint64_t data_base_addr = static_cast(reinterpret_cast(param.mem_base)) - gen_base_addr; + for (auto &data_index : l2_ctrl_info->data) { + if (data_index.L2_mirror_addr != 0) { + data_index.L2_mirror_addr += data_base_addr; + } + } + + auto rtRet = rtMemAllocManaged(sm_desc, sm_desc_str.size(), RT_MEMORY_SPM); + if (rtRet != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtMemAllocManaged failed, ret: %d", static_cast(rtRet)); + return RT_FAILED; + } + + rtRet = rtMemcpy(*sm_desc, sm_desc_str.size(), sm_desc_str.data(), sm_desc_str.size(), RT_MEMCPY_HOST_TO_DEVICE); + if (rtRet != RT_ERROR_NONE) { + (void)rtMemFreeManaged(*sm_desc); + GELOGE(RT_FAILED, "rtMemcpy, ret: %d", static_cast(rtRet)); + return RT_FAILED; + } + } + + return SUCCESS; +} + +Status TbeTaskBuilder::SetKernelArgs(TbeOpTask &task, const SingleOpModelParam ¶m) { + uint8_t *args = nullptr; + size_t arg_size = kernel_def_.args_size(); + auto rtRet = rtMallocHost(reinterpret_cast(&args), arg_size); + if (rtRet != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtMallocHost failed, size = %zu, ret = %d", arg_size, static_cast(rtRet)); + return RT_FAILED; + } + + task.SetKernelArgs(args, arg_size, kernel_def_.block_dim()); + + rtRet = rtMemcpy(args, arg_size, kernel_def_.args().data(), arg_size, RT_MEMCPY_HOST_TO_HOST); + if (rtRet != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtMemcpy args failed, size = %zu, ret = %d", arg_size, static_cast(rtRet)); + return RT_FAILED; + } + + const domi::KernelContext &context = kernel_def_.context(); + const auto *args_offset_tmp = reinterpret_cast(context.args_offset().data()); + uint16_t offset = *args_offset_tmp; + + // copy args + std::vector tensor_device_addr_vec = BuildTaskUtils::GetKernelArgs(op_desc_, param); + void *src_addr = reinterpret_cast(tensor_device_addr_vec.data()); + uint64_t src_len = sizeof(void *) * tensor_device_addr_vec.size(); + rtRet = rtMemcpy(args + offset, arg_size - offset, src_addr, src_len, RT_MEMCPY_HOST_TO_HOST); + if (rtRet != RT_ERROR_NONE) { + GELOGE(RT_FAILED, "rtMemcpy addresses failed, ret = %d", static_cast(rtRet)); + return RT_FAILED; + } + + return SUCCESS; +} + +Status TbeTaskBuilder::BuildTask(TbeOpTask &task, const SingleOpModelParam ¶m) { + GELOGD("Build tbe task begin"); + auto ret = SetKernelArgs(task, param); + if (ret != SUCCESS) { + return ret; + } + + ret = RegisterKernel(task); + if (ret != SUCCESS) { + return ret; + } + + void *stub_func = nullptr; + auto rtRet = rtGetFunctionByName(stub_name_.c_str(), &stub_func); + if (rtRet != SUCCESS) { + GELOGE(RT_FAILED, "rtGetFunctionByName failed."); + return RT_FAILED; + } + + task.SetStubFunc(stub_name_, stub_func); + return SUCCESS; +} +} // namespace ge diff --git a/src/ge/single_op/task/tbe_task_builder.h b/src/ge/single_op/task/tbe_task_builder.h new file mode 100644 index 00000000..25441289 --- /dev/null +++ b/src/ge/single_op/task/tbe_task_builder.h @@ -0,0 +1,90 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_SINGLE_OP_TASK_TBE_TASK_BUILDER_H_ +#define GE_SINGLE_OP_TASK_TBE_TASK_BUILDER_H_ + +#include +#include +#include +#include +#include + +#include "graph/op_desc.h" +#include "single_op/single_op.h" +#include "single_op/single_op_model.h" + +namespace ge { +class KernelHolder { + public: + KernelHolder(const char *stub_func, std::shared_ptr kernel_bin); + ~KernelHolder(); + + void SetBinHandle(void *bin_handle) { bin_handle_ = bin_handle; } + + private: + friend class KernelBinRegistry; + const char *stub_func_; + void *bin_handle_; + std::shared_ptr kernel_bin_; +}; + +class KernelBinRegistry { + public: + ~KernelBinRegistry(); + + static KernelBinRegistry &GetInstance() { + static KernelBinRegistry instance; + return instance; + } + + const char *GetUnique(const string &stub_func); + + const char *GetStubFunc(const std::string &stub_name); + + bool AddKernel(const std::string &stub_name, const KernelHolder *holder); + + private: + std::map registered_bins_; + std::set unique_stubs_; + std::mutex mutex_; +}; + +class TbeTaskBuilder { + public: + TbeTaskBuilder(const std::string &model_name, const OpDescPtr &op_desc, const domi::KernelDef &kernel_def); + ~TbeTaskBuilder() = default; + + Status BuildTask(TbeOpTask &task, const SingleOpModelParam ¶m); + + private: + Status SetKernelArgs(TbeOpTask &task, const SingleOpModelParam ¶m); + Status GetSmDesc(void **sm_desc, const SingleOpModelParam ¶m) const; + + Status RegisterKernel(TbeOpTask &task); + Status DoRegisterKernel(const OpKernelBin &kernel_bin, const char *bin_file_key, void **bin_handle); + Status DoRegisterBinary(const OpKernelBin &kernel_bin, void **bin_handle) const; + Status DoRegisterMeta(void *bin_handle); + + static Status DoRegisterFunction(void *bin_handle, const char *stub_name, const char *kernel_name); + + const OpDescPtr &op_desc_; + const domi::KernelDef &kernel_def_; + const std::string stub_name_; +}; +} // namespace ge + +#endif // GE_SINGLE_OP_TASK_TBE_TASK_BUILDER_H_ diff --git a/src/proto/CMakeLists.txt b/src/proto/CMakeLists.txt new file mode 100644 index 00000000..16112443 --- /dev/null +++ b/src/proto/CMakeLists.txt @@ -0,0 +1,66 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake_minimum_required(VERSION 2.8) + +project(GeProto[CXX]) + +set(CMAKE_CXX_STANDARD 11) + +# add all proto files, generate corresponding .h and .cc files +file(GLOB_RECURSE PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "om.proto" + "ge_ir.proto" + "insert_op.proto" + "task.proto" + "fwk_adapter.proto" + "op_mapping_info.proto" + ) + +file(GLOB_RECURSE ONNX_PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "${onnx_INC}/onnx/onnx.proto" + ) + +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) +ge_protobuf_generate(ge PROTO_ONNX_SRCS PROTO_ONNX_HDRS ${ONNX_PROTO_LIST}) + +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}/src) +include_directories(${GE_SOURCE_DIR}/src/common) +include_directories(${GE_SOURCE_DIR}/src/common/graph) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/inc/common) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/json/include) +include_directories(${GE_SOURCE_DIR}/third_party/protobuf/src) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) + +set(CMAKE_CXX_FLAGS "-Wno-unused-variable ${CMAKE_CXX_FLAGS}") + +######### libproto_common.so ############# +add_library(proto_common STATIC ${PROTO_SRCS} ${PROTO_ONNX_SRCS}) +target_compile_definitions(proto_common PRIVATE + DAVINCI_CLOUD) +target_link_libraries(proto_common + ${PROTOBUF_LIBRARY} + ${c_sec} + ${slog} + rt + dl) diff --git a/src/proto/fusion_model.proto b/src/proto/fusion_model.proto new file mode 100644 index 00000000..2ff6b77a --- /dev/null +++ b/src/proto/fusion_model.proto @@ -0,0 +1,25 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +import "om.proto"; +package domi; + +message FusionModelDef { + string version = 1; + repeated OpDef fusion_op = 2; +} diff --git a/src/proto/fwk_adapter.proto b/src/proto/fwk_adapter.proto new file mode 100644 index 00000000..96368d55 --- /dev/null +++ b/src/proto/fwk_adapter.proto @@ -0,0 +1,42 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package aicpu.FWKAdapter; +option cc_enable_arenas = true; + + +// Defines an struct for input and output. +message TensorDataInfo { + + // value DataType + uint32 dtype = 1; + + // shape dim + repeated int64 dim = 2; + + // data point addr + int64 data_addr = 3; +}; + +message KernelRunParam { + // input + repeated TensorDataInfo input = 1; + // output + repeated TensorDataInfo output = 2; +}; + diff --git a/src/proto/ge_api.proto b/src/proto/ge_api.proto new file mode 100644 index 00000000..ac5b3b3a --- /dev/null +++ b/src/proto/ge_api.proto @@ -0,0 +1,104 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package ge.api_pb; + +import "ge_ir.proto"; + +// GE initialize +message GEInitialize { + map options = 1; +}; + +// initialize response +message GEInitializeResponse { + uint32 status = 1; + uint32 clientId = 2; +}; + +// GE finalize +message GEFinalize { + bool final = 1; + uint32 clientId = 2; +}; + +message GEFinalizeResponse { + uint32 status = 1; +}; + +// GE Session +message CreateSession{ + map options = 1; +}; + +message CreateSessionResponse { + uint32 status = 1; + uint64 sessionId = 2; +}; + +//GE AddGraph +//model serialize :: serializegraph +message SessionAddGraph{ + uint32 graphId = 1; + uint64 sessionId = 2; + ge.proto.GraphDef graph = 3; +}; + +message SessionAddGraphResponse { + uint32 status = 1; +}; + +//GE SessionRemoveGraph +message SessionRemoveGraph{ + uint32 graphId = 1; + uint64 sessionId = 2; +}; + +message SessionRemoveGraphResponse { + uint32 status = 1; +}; + +message SessionRunGraph{ + uint32 graphId = 1; + uint64 sessionId = 2; + repeated ge.proto.TensorDef tensor = 3; +}; + +message SessionBuildGraph{ + uint32 graphId = 1; + uint64 sessionId = 2; + repeated ge.proto.TensorDef tensor = 3; + string savePath = 4; +}; + +message SessionRunGraphResponse { + uint32 status = 1; + repeated ge.proto.TensorDef tensor = 2; +}; + +message SessionBuildGraphResponse { + uint32 status = 1; +}; + +message DestroySession{ + bool final = 1; + uint64 sessionId = 2; +}; + +message DestroySessionResponse { + uint32 status = 1; +}; diff --git a/src/proto/ge_ir.proto b/src/proto/ge_ir.proto new file mode 100644 index 00000000..96962346 --- /dev/null +++ b/src/proto/ge_ir.proto @@ -0,0 +1,205 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package ge.proto; + +enum DataType +{ + DT_UNDEFINED = 0; // Used to indicate a DataType field has not been set. + DT_FLOAT = 1; // float type + DT_FLOAT16 = 2; // fp16 type + DT_INT8 = 3; // int8 type + DT_UINT8 = 4; // uint8 type + DT_INT16 = 5; // int16 type + DT_UINT16 = 6; // uint16 type + DT_INT32 = 7; // + DT_INT64 = 8; // int64 type + DT_UINT32 = 9; // unsigned int32 + DT_UINT64 = 10; // unsigned int64 + DT_BOOL = 11; // bool type + DT_DOUBLE = 12; // double type + DT_STRING = 13; // string type + DT_DUAL_SUB_INT8 = 14; /**< dual output int8 type */ + DT_DUAL_SUB_UINT8 = 15; /**< dual output uint8 type */ + DT_COMPLEX64 = 16; // complex64 type + DT_COMPLEX128 = 17; // complex128 type + DT_QINT8 = 18; // qint8 type + DT_QINT16 = 19; // qint16 type + DT_QINT32 = 20; // qint32 type + DT_QUINT8 = 21; // quint8 type + DT_QUINT16 = 22; // quint16 type + DT_RESOURCE = 23; // resource type + DT_STRING_REF = 24; // string_ref type + DT_DUAL = 25; /**< dual output type */ +} + +message AttrDef +{ + message ListValue + { + enum ListValueType{ + VT_LIST_NONE = 0; + VT_LIST_STRING = 1; + VT_LIST_INT = 2; + VT_LIST_FLOAT = 3; + VT_LIST_BOOL = 4; + VT_LIST_BYTES = 5; + VT_LIST_TENSOR_DESC = 6; + VT_LIST_TENSOR = 7; + VT_LIST_GRAPH = 8; + VT_LIST_NAMED_ATTRS = 9; + VT_LIST_DATA_TYPE = 10; + } + repeated bytes s = 2; // "list(string)" + repeated int64 i = 3; // "list(int)" + repeated float f = 4; // "list(float)" + repeated bool b = 5; // "list(bool)" + repeated bytes bt = 7; + repeated TensorDescriptor td = 8; + repeated TensorDef t = 9; + repeated GraphDef g = 10; + repeated NamedAttrs na = 11; + repeated int64 dt = 12; // list ge::DataType + + ListValueType val_type = 20; + } + + message ListListInt{ + message ListInt{ + repeated int64 list_i = 1; // list int + } + repeated ListInt list_list_i = 1; // list list int + } + + oneof value + { + bytes s = 2; // "string" + int64 i = 3; // "int" + float f = 4; // "float" + bool b = 5; // "bool" + bytes bt = 7; + ListValue list = 1; // any "list(...)" + NamedAttrs func = 10; // Used to support attr nesting + TensorDescriptor td = 11; // GeTensorDesc type + TensorDef t = 12; // GeTensor type + GraphDef g = 13; // Graph type + ListListInt list_list_int = 14; // List List Int type + int64 dt = 15; // ge::DataType + } +} + +// A list of attr names and their values. The whole list is attached +// with a string name. E.g., MatMul[T=float]. +message NamedAttrs +{ + string name = 1; + map attr = 2; +} + +// Shape / dimension description, using row-major order +message ShapeDef +{ + repeated int64 dim = 1; // Size of each dimension +} + +// Multidimensional data description +message TensorDescriptor +{ + string name = 1; // Optional parameter, tensor name + + DataType dtype = 2; // tensor datatype + ShapeDef shape = 3; // Shape / dimension + string layout = 4; // Tensor format, eg: "NCHW", "NHWC", "CHW", "ND" + + bool has_out_attr = 9; + int64 size = 10; + int64 weight_size = 11; + bool reuse_input = 12; + bool output_tensor = 13; + string device_type = 14; + bool input_tensor =15; + int64 real_dim_cnt = 16; + int64 reuse_input_index = 17; + int64 data_offset = 18; + int64 cmps_size = 19; + string cmps_tab = 20; + int64 cmps_tab_offset = 21; + + map attr = 5; // Set of extra parameter fields +} + +// GeTensor definition +message TensorDef +{ + TensorDescriptor desc = 1; // Tensor description + bytes data = 2; // Tensor data +} + + +// Operator description +message OpDef +{ + string name = 1; // name + string type = 2; // type + + repeated string input = 5; // input original op name + outgoing index. op_name:index + + map attr = 10; // Set of operator parameter fields + + bool has_out_attr = 20; + int64 id = 21; + int64 stream_id =22; + repeated string input_name = 23; + repeated string src_name = 24; + repeated int64 src_index = 25; + repeated string dst_name = 26; + repeated int64 dst_index = 27; + repeated int64 input_i = 28; + repeated int64 output_i = 29; + repeated int64 workspace = 30; + repeated int64 workspace_bytes = 31; + repeated bool is_input_const = 32; + repeated TensorDescriptor input_desc = 33; + repeated TensorDescriptor output_desc = 34; +} + +// Graph definition +message GraphDef +{ + string name = 1; // name + + repeated string input = 4; // Graph input + repeated string output = 5; // Graph output + + repeated OpDef op = 6; // List of operators + + map attr = 11; // Extended field +} + +// model definition +message ModelDef +{ + string name = 1; // name + uint32 version = 2; // IR Proto verion + string custom_version = 3; // User model version number, passed in by user + + repeated GraphDef graph = 7; // Graph definition,graph[0] represents the main diagram in modeldef + + map attr = 11; // Extended field +} + diff --git a/src/proto/insert_op.proto b/src/proto/insert_op.proto new file mode 100644 index 00000000..fd5bd3ec --- /dev/null +++ b/src/proto/insert_op.proto @@ -0,0 +1,142 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package domi; + +message InsertNewOps { + repeated AippOpParams aipp_op = 1; + repeated MultiShapeOpParams multi_shape_op = 2; +} + +message AippOpParams { + enum InputFormat { + UNDEFINED = 0; + YUV420SP_U8 = 1; + XRGB8888_U8 = 2; + RGB888_U8 = 3; + YUV400_U8 = 4; + NC1HWC0DI_FP16 = 5; + NC1HWC0DI_S8 = 6; + ARGB8888_U8 = 7; + YUYV_U8 = 8; + YUV422SP_U8 = 9; + AYUV444_U8 = 10; + RAW10 = 11; + RAW12 = 12; + RAW16 = 13; + RAW24 = 14; + } + + enum AippMode { + undefined = 0; + static = 1; + dynamic = 2; + } + + // AIPPģʽ־̬AIPPͶ̬AIPP + AippMode aipp_mode = 1; + + // related_input_rankΪΪͣ÷Χ>=0, <=DataӵĸĬֵΪ0 + // ʶģ͵ĵڼAIPPģ룬ҪԵ2AIPPrelated_input_rankΪ1 + uint32 related_input_rank = 2; + + // input_edge_idxΪѡΪͣ÷ΧΪ>=0 + // øòãڶDataӲͬͬAIPPòûãĬ϶related_input_rankָģAIPP + // ֵ <= Dataߵĸ + repeated uint32 input_edge_idx = 3; + + // [Begin] ̬AIPPþ̬AIPPʱЧ + uint32 max_src_image_size = 4; + + // Ƿ֧תĬϲ֧֣֧תʱжĿռʧ + bool support_rotation = 5; + + // [End] ̬AIPP + + + // [Begin] ̬AIPPö̬AIPPʱЧ + InputFormat input_format = 51; + bool csc_switch = 52; + float cpadding_value = 53; + bool rbuv_swap_switch = 54; + bool ax_swap_switch = 55; + bool single_line_mode = 56; + + int32 src_image_size_w = 57; + int32 src_image_size_h = 58; + + bool crop = 59; + int32 load_start_pos_w = 60; + int32 load_start_pos_h = 61; + int32 crop_size_w = 62; + int32 crop_size_h = 63; + + bool resize = 64; + int32 resize_output_w = 65; + int32 resize_output_h = 66; + + bool padding = 67; + int32 left_padding_size = 68; + int32 right_padding_size = 69; + int32 top_padding_size = 70; + int32 bottom_padding_size = 71; + + int32 mean_chn_0 = 10; + int32 mean_chn_1 = 11; + int32 mean_chn_2 = 12; + int32 mean_chn_3 = 19; + float min_chn_0 = 13; + float min_chn_1 = 14; + float min_chn_2 = 15; + float min_chn_3 = 20; + repeated float var_reci_chn_0 = 16; + repeated float var_reci_chn_1 = 17; + repeated float var_reci_chn_2 = 18; + repeated float var_reci_chn_3 = 21; + + repeated int32 matrix_r0c0 = 30; + repeated int32 matrix_r0c1 = 31; + repeated int32 matrix_r0c2 = 32; + repeated int32 matrix_r1c0 = 33; + repeated int32 matrix_r1c1 = 34; + repeated int32 matrix_r1c2 = 35; + repeated int32 matrix_r2c0 = 36; + repeated int32 matrix_r2c1 = 37; + repeated int32 matrix_r2c2 = 38; + repeated int32 output_bias_0 = 39; + repeated int32 output_bias_1 = 40; + repeated int32 output_bias_2 = 41; + repeated int32 input_bias_0 = 42; + repeated int32 input_bias_1 = 43; + repeated int32 input_bias_2 = 44; + + // [End] ̬AIPP +} + +message MultiShapeOpParams { + enum MultiShapeMode { + batch = 0; //̬batch + resolution = 1; //ֱ̬ʣչ + } + + MultiShapeMode mode = 1; //ģʽ + uint32 related_input_rank = 2; //Ӳ뵽ĸ + + + repeated uint32 batch_list = 11; //batch_listֵbatch_listĸ28֮ +} diff --git a/src/proto/om.proto b/src/proto/om.proto new file mode 100644 index 00000000..dd992191 --- /dev/null +++ b/src/proto/om.proto @@ -0,0 +1,401 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package domi; + +enum TargetType +{ + MINI = 0; + TINY = 1; + LITE = 2; +} + +// offline model +message ModelDef { + string name = 1; + uint32 version = 2; + + uint64 memory_size = 10; + uint32 stream_num = 11; + uint32 event_num = 12; + uint64 weight_size = 13; + uint32 label_num = 15; + repeated OpDef op = 20; + TargetType target_type = 23; + + map attr = 30; +}; + +// operator define +message OpDef { + string name = 1; + string type = 2; + + uint32 id = 3; + uint32 stream_id = 4; + + repeated string input_name = 5; + + repeated string src_name = 8; + repeated int32 src_index = 9; + repeated int64 input = 10; + repeated int64 output = 11; + repeated TensorDescriptor input_desc = 12; + repeated TensorDescriptor output_desc = 13; + repeated WeightDef weights = 14; + repeated string dst_name = 15; + repeated int32 dst_index = 16; + + repeated int64 workspace = 20; + repeated uint32 workspace_bytes = 21; + + repeated string weight_name = 22; + repeated bool is_input_const = 23; + + map attr = 30; + + QuantizeFactorParams quantize_factor = 31; + + oneof op_params { + // start at 100 here + SendOpParams sender_param = 100; + RecvOpParams receiver_param = 200; + ConvolutionOpParams convolution_param = 300; + PoolingOpParams pooling_param = 400; + EltwiseOpParams eltwise_param = 500; + BatchNormOpParams batchnorm_param = 600; + ScaleOpParams scale_param = 700; + FullConnectionOpParams full_connection_param = 800; + SoftmaxOpParams softmax_param = 900; + ActivationOpParams activation_param = 1000; + ReshapeOpParams reshape_param = 1100; + } +}; + +message SendOpParams { + uint32 event_id = 1; +}; + +message RecvOpParams { + uint32 event_id = 1; +}; + +enum QuantizeScaleType +{ + VECTOR_SCALE = 0; + SCALAR_SCALE = 1; +} + +enum QuantizeScaleMode +{ + NORMAL_MODE = 0; + SQRT_MODE = 1; +} + +enum QuantizeAlgorithm +{ + NON_OFFSET_ALGO = 0; + HALF_OFFSET_ALGO = 1; + ALL_OFFSET_ALGO = 2; +} +message QuantizeFactor +{ + QuantizeScaleMode scale_mode = 1; + bytes scale_value = 2; + int64 scale_offset = 3; + bytes offset_data_value = 4; + int64 offset_data_offset = 5; + bytes offset_weight_value = 6; + int64 offset_weight_offset = 7; + bytes offset_pad_value = 8; + int64 offset_pad_offset = 9; +}; + +message QuantizeCalcFactor +{ + bytes offsetw = 1; + int64 offsetw_offset = 2; + bytes offsetd = 3; + int64 offsetd_offset = 4; + bytes scalereq = 5; + int64 scaledreq_offset = 6; + bytes offsetdnext = 7; + int64 offsetdnext_offset = 8; +} + +message QuantizeFactorParams +{ + QuantizeAlgorithm quantize_algo = 1; + QuantizeScaleType scale_type = 2; + QuantizeFactor quantize_param = 3; + QuantizeFactor dequantize_param = 4; + QuantizeFactor requantize_param = 5; + QuantizeCalcFactor quantizecalc_param = 6; +}; + +message ConvolutionOpParams { + int32 mode = 1; + int32 algo = 2; + int32 pad_mode = 3; + uint32 group = 4; + uint32 num_output = 5; + + repeated uint32 pad = 10; + repeated uint32 stride = 11; + repeated uint32 dilation = 12; + repeated uint32 kernel = 13; + + float alpha = 20; + float beta = 21; + + WeightDef filter = 40; + WeightDef bias = 41; + + bool relu_flag = 62; + repeated uint32 adj = 70; + repeated uint32 target_shape = 71; + repeated uint32 before_pad = 72; +}; + +message PoolingOpParams { + int32 mode = 1; + int32 nan_opt = 2; + int32 pad_mode = 3; + bool global_pooling = 4; + + repeated uint32 window = 10; + repeated uint32 pad = 11; + repeated uint32 stride = 12; + bool ceil_mode = 13; + int32 data_mode = 14; + + float alpha = 20; + float beta = 21; + repeated uint32 before_pad = 22; +}; + +message EltwiseOpParams { + int32 mode = 1; + repeated float coeff = 2; + float alpha = 3; + float beta = 4; + repeated WeightDef weight = 5; + bool relu_flag = 6; +}; + +message ActivationOpParams { + int32 mode = 1; + float coef = 2; + float alpha = 3; + float beta = 4; +}; + +message BatchNormOpParams { + int32 mode = 1; + + float alpha = 2; + float beta = 3; + double epsilon = 4;//optinal,[default = 1e-5] + bool use_global_stats = 5; //optinal,by default true,testing mode + float moving_average_fraction = 6; //optinal,[default = .999]; + + WeightDef estimated_mean = 7; + WeightDef estimated_variance = 8; + + WeightDef scale = 9; + WeightDef bias = 10; +}; + +message ScaleOpParams { + WeightDef scale = 1; + WeightDef bias = 2; +}; + +message ReshapeOpParams { + float alpha = 1; + float beta = 2; + ShapeDef shape = 3; + int32 axis = 4; + int32 num_axes = 5; + int32 format = 6; +}; + +message SoftmaxOpParams { + int32 algo = 1; + int32 mode = 2; + float alpha = 3; + float beta = 4; +}; + +message FullConnectionOpParams { + WeightDef filter = 1; + WeightDef bias = 2; + uint32 num_output = 3; + bool relu_flag = 12; +}; + +message FlattenOpParams { + float alpha = 1; + float beta = 2; + int32 start_axis = 3; + int32 end_axis = 4; +} + +message AddLimitedOpParams { + float alpha = 1; + float beta = 2; + int32 axis = 3; + bool broadcast = 4; + + repeated WeightDef weight = 10; +}; + +message MulLimitedOpParams { + float alpha = 1; + float beta = 2; + int32 axis = 3; + bool broadcast = 4; + + repeated WeightDef weight = 10; +}; + +message AddOpParams { + float alpha = 1; + float beta = 2; + + repeated WeightDef weight = 10; +}; + +message MulOpParams { + float alpha = 1; + float beta = 2; + + repeated WeightDef weight = 10; +}; + +message SubOpParams { + float alpha = 1; + float beta = 2; + + repeated WeightDef weight = 10; +}; + +message BiasAddOpParams { + float alpha = 1; + float beta = 2; + + WeightDef bias = 10; +}; + +message MatMulOpParams { + float alpha = 1; + float beta = 2; + bool transposeX = 3; + bool transposeW = 4; + + WeightDef filter = 10; + WeightDef bias = 12; +}; + +message RsqrtOpParams { + float alpha = 1; + float beta = 2; +}; + + +message WeightDef { + int32 format = 1; + int32 data_type = 2; + ShapeDef shape = 3; + bytes data = 4; + int64 data_offset = 5; + uint32 cmps_size = 6; + bytes cmps_tab = 7; + int64 cmps_tab_offset = 10; + CompressInfo cmps_info = 8; + AllOffsetQuantizeInfo alloffset_quantize_info = 11; +} + +message ShapeDef { + repeated int64 dim = 1; +} + +enum DeviceType { + NPU = 0; // In default, we will use NPU. + CPU = 1; // CPU +} + +message AllOffsetQuantizeInfo { + float scale = 1; + int32 offset = 2; +} + +message TensorDescriptor { + int32 format = 1; + int32 data_type = 2; + repeated int64 dim = 3; + uint32 size = 4; + bool reuse_input = 5; + bool output_tensor = 7; + DeviceType device_type = 8; + bool input_tensor = 9; + uint32 real_dim_cnt = 10; + uint32 reuse_input_index = 11; + AllOffsetQuantizeInfo alloffset_quantize_info = 12; +} + +message CompressInfo { + int32 blockRow = 1; // block row + int32 blockCol = 2; // block col + int32 fractalK = 3; // fractal K + int32 fractalN = 4; // fractal N + int32 lastFractalK = 5; // K of last fractal + int32 lastFractalN = 6; // N of last fractal + int32 cubeSize = 7; // cube's length + int32 loadDir = 8; // data load directtiono 0:col load 1:row load +} + +message AttrDef { + message ListValue { + repeated string s = 2; // "list(string)" + repeated int64 i = 3 [packed = true]; // "list(int)" + repeated float f = 4 [packed = true]; // "list(float)" + repeated bool b = 5 [packed = true]; // "list(bool)" + repeated uint32 u = 6 [packed = true]; // "list(uint)" + repeated bytes bt = 7; + } + + oneof value { + string s = 2; // "string" + int64 i = 3; // "int" + float f = 4; // "float" + bool b = 5; // "bool" + uint32 u = 6; // "uint32" + bytes bt = 7; + ListValue list = 1; // any "list(...)" + NamedAttrs func = 10; + } +} + +// A list of attr names and their values. The whole list is attached +// with a string name. E.g., MatMul[T=float]. +message NamedAttrs { + string name = 1; + map attr = 2; +} + diff --git a/src/proto/op_mapping_info.proto b/src/proto/op_mapping_info.proto new file mode 100644 index 00000000..9a460907 --- /dev/null +++ b/src/proto/op_mapping_info.proto @@ -0,0 +1,57 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package aicpu.dump; + +message Shape { + repeated uint64 dim = 1; +} + +message Output { + int32 data_type = 1; + int32 format = 2; + Shape shape = 3; + uint64 address = 4; + string original_name = 5; + int32 original_output_index = 6; + int32 original_output_data_type = 7; + int32 original_output_format = 8; +}; + +message Op { + string op_name = 1; + string op_type = 2; +}; + +message Task { + uint32 task_id = 1; + uint32 stream_id = 2; + Op op = 3; + repeated Output output = 4; +}; + +message OpMappingInfo { + string dump_path = 1; + oneof model_name_param { + string model_name = 2; + } + oneof model_id_param { + uint32 model_id = 3; + } + uint32 flag = 4; // 0x01 load, 0x00 unload + repeated Task task = 5; +}; \ No newline at end of file diff --git a/src/proto/optimizer_priority.proto b/src/proto/optimizer_priority.proto new file mode 100644 index 00000000..3327be8a --- /dev/null +++ b/src/proto/optimizer_priority.proto @@ -0,0 +1,23 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; +package ge.optimizers; + +// Default: GE>FE>AICPU +message Priority{ + repeated string optimizer = 1; +} \ No newline at end of file diff --git a/src/proto/task.proto b/src/proto/task.proto new file mode 100644 index 00000000..3eb8de5c --- /dev/null +++ b/src/proto/task.proto @@ -0,0 +1,144 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +syntax = "proto3"; + +package domi; + +message ModelTaskDef { + string version = 1; + + map attr = 9; // Extended field + repeated TaskDef task = 10; + + uint64 memory_size = 11; + uint32 stream_num = 12; + uint32 event_num = 13; + uint64 weight_size = 14; + + repeated bytes op = 15; // input/output opdef in bytes + + uint64 base_addr = 16; // base addr + uint64 weight_addr = 17; // weight addr + uint32 batch_num = 18; +} + + +message TaskDef { + uint32 id = 1; + uint32 type = 2; + + uint32 stream_id = 10; + uint32 event_id = 11; + + KernelDef kernel = 20; + KernelExDef kernel_ex = 21; + KernelHcclDef kernel_hccl = 25; + EventExDef event_ex = 26; + LogTimeStampDef log_timestamp = 28; + + uint32 label_id = 30; + + MemcpyAsyncDef memcpy_async = 31; + StreamSwitchDef stream_switch = 32; + StreamActiveDef stream_active = 33; + bytes private_def = 34; + uint64 ops_kernel_store_ptr = 35; // adjustments to other fields in the future + StreamSwitchNDef stream_switch_n = 36; +} + +message KernelDef { + KernelContext context = 1; + + string stub_func = 10; + uint32 block_dim = 11; + uint32 args_size = 12; + bytes args = 13; + bytes sm_desc = 14; + bytes flowtable = 15; + string so_name = 16; + string kernel_name = 17; +} + +message KernelContext { + uint32 kernel_type = 1; + uint32 op_id = 2; // OP type in CCE + uint32 kernel_func_id = 3; + uint32 op_index = 4; // TE/Custom operator + bool is_flowtable = 5; // Identify whether args is a flowtable structure + bytes args_offset = 6; // args offset information + uint32 args_count = 7; // args count + repeated uint32 origin_op_index = 8; +} + + +message KernelExDef { + uint32 flags = 1; + + uint32 op_index = 4; + uint32 args_size = 12; + bytes args = 13; + bytes task_info = 14; // serialized nodeDef, funcDef, inputoutput + uint32 task_info_size = 15; +} + + +message KernelHcclDef { + uint32 op_index = 8; + string hccl_type = 9; +} + + +message EventExDef { + uint32 op_index = 1; + uint32 event_type = 2; +} + +message LogTimeStampDef { + uint64 logid = 1; + bool notify = 2; + uint32 flat = 3; +} + +message MemcpyAsyncDef { + uint64 dst = 1; + uint64 dst_max = 2; + uint64 src = 3; + uint64 count = 4; + uint32 kind = 5; +} + +message StreamSwitchDef { + uint32 op_index = 1; + uint32 true_stream_id = 2; + int64 value = 3; + uint64 value_ptr = 4; + uint32 data_type = 5; +} + +message StreamActiveDef { + uint32 op_index = 1; + uint32 active_stream_id = 2; +} + +message StreamSwitchNDef { + uint32 op_index = 1; + uint32 size = 2; + repeated int64 target_value = 3; + repeated uint32 true_stream_id = 4; + uint32 element_size = 5; + uint32 data_type = 6; +} diff --git a/tests/CMakeLists.txt b/tests/CMakeLists.txt new file mode 100644 index 00000000..53529082 --- /dev/null +++ b/tests/CMakeLists.txt @@ -0,0 +1,32 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +project(tests CXX C) +find_package(Threads) +add_subdirectory(depends/cce) +add_subdirectory(depends/slog) +add_subdirectory(depends/mmpa) +add_subdirectory(depends/runtime) +add_subdirectory(depends/omg) +add_subdirectory(depends/hccl) +add_subdirectory(depends/profiler) + +if (ENABLE_GE_COV OR ENABLE_GE_UT) + add_subdirectory(ut) +endif() + +if (ENABLE_GE_ST) + add_subdirectory(st) +endif() diff --git a/tests/depends/cce/CMakeLists.txt b/tests/depends/cce/CMakeLists.txt new file mode 100644 index 00000000..24aaa096 --- /dev/null +++ b/tests/depends/cce/CMakeLists.txt @@ -0,0 +1,73 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake_minimum_required(VERSION 2.8) + +project(STUB_CCE) + +set(CMAKE_CXX_STANDARD 11) + +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/src/common) +include_directories(${GE_SOURCE_DIR}/src/common/graph) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/cce) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +file(GLOB_RECURSE PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "${GE_SOURCE_DIR}/src/proto/om.proto" + "${GE_SOURCE_DIR}/src/proto/ge_ir.proto" + "${GE_SOURCE_DIR}/src/proto/task.proto" +) + +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) + +file(GLOB_RECURSE SRCS RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "${GE_SOURCE_DIR}/src/common/graph/ge_attr_define.cc" + "${GE_SOURCE_DIR}/src/common/graph/anchor.cc" + "${GE_SOURCE_DIR}/src/common/graph/ge_attr_value.cc" + "${GE_SOURCE_DIR}/src/common/graph/buffer.cc" + "${GE_SOURCE_DIR}/src/common/graph/compute_graph.cc" + "${GE_SOURCE_DIR}/src/common/graph/graph.cc" + "${GE_SOURCE_DIR}/src/common/graph/model.cc" + "${GE_SOURCE_DIR}/src/common/graph/model_serialize.cc" + "${GE_SOURCE_DIR}/src/common/graph/node.cc" + "${GE_SOURCE_DIR}/src/common/graph/op_desc.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator_factory.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator_factory_impl.cc" + "${GE_SOURCE_DIR}/src/common/graph/tensor.cc" + "${GE_SOURCE_DIR}/src/common/graph/detail/attributes_holder.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/anchor_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/graph_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/node_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/op_desc_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/type_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/op_imp.cc" + "${GE_SOURCE_DIR}/src/common/graph/shape_refiner.cc" + "${GE_SOURCE_DIR}/src/common/graph/ge_tensor.cc" + "${GE_SOURCE_DIR}/src/common/graph/opsproto/opsproto_manager.cc" +) +add_library(cce_ge_stub SHARED src/cce_stub.cc ${PROTO_SRCS} ${PROTO_HDRS}) +target_link_libraries(cce_ge_stub ge_protobuf::protobuf) + +add_library(cce_stub SHARED ${SRCS} ${PROTO_SRCS} ${PROTO_HDRS}) +target_link_libraries(cce_stub ge_protobuf::protobuf) diff --git a/tests/depends/cce/src/cce_stub.cc b/tests/depends/cce/src/cce_stub.cc new file mode 100644 index 00000000..6ce332ad --- /dev/null +++ b/tests/depends/cce/src/cce_stub.cc @@ -0,0 +1,577 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +#include "cce/optimizer/fusion_engine.h" +#include "common/op/attr_value_util.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/graph_utils.h" + +using namespace cce; +using namespace std; +using namespace ge; +using namespace fusion; + +uint64_t global_mem_base = 0; + +namespace cce { +#define DIM_MAX_SIZE 8 +static const uint32_t C0 = 16; +struct tagCcPad {}; +struct tagCcConvolution {}; + +struct tagCcLRN {}; + +struct tagCcFasterRcnnProposal {}; +struct tagCcRoiAlign {}; +struct tagCcBatchNorm {}; +struct tagCcDetectpostprocess {}; + +struct tagCcSsdDetectionOutput {}; + +struct tagCcRefinedetDetectionOutput {}; + +struct tagCcMsrGenerateRpnProposals {}; + +struct tagCcFilter { + vector dims; +}; + +struct tagCcTensor { + ccTensorFormat_t format; + ccDataType_t data_type; + uint32_t dim_cnt; + int32_t real_dim_cnt; + uint32_t data_size; + int32_t dim_buf[DIM_MAX_SIZE]; + int32_t stride_buf[DIM_MAX_SIZE]; +}; + +typedef struct tagCcPooling { + ccPoolingMode_t mode; + ccPaddingMode_t pad_mode; + ccNanPropagation_t max_pooling_nan_opt; + uint32_t dim_cnt; + int32_t window_dim[6]; + int32_t padding[6]; + int32_t stride[6]; +} ccPooling_t; + +struct tagCcActivation {}; + +struct tagCcFasterRcnnDetectionOutput {}; +struct tagCcSpatialTransformer {}; + +struct tagCcPower {}; +struct tagCcResizeBilinear {}; +struct tagCcSsdNormalize {}; +struct tagCcSsdPostProcessor {}; +struct tagCcSsdPriorBox {}; +struct tagCcPsRoiPooling {}; + +struct tagMsrFastRcnnPredictions {}; +struct tagCcPRelu {}; +struct tagCcStridedSlice {}; + +struct tagCcStridedSliceAttrs {}; + +struct tagCcRnn {}; + +struct tagCcArgmaxmin {}; + +typedef struct tagCcLog { + ccDataType_t data_type; + uint32_t param_cnt; +} ccLog_t; +typedef struct tagCcLog *ccLogDescriptor_t; + +struct tagCcPadV2 {}; + +ccStatus_t ccGetPadV2OutputDim(const ccTensorDescriptor_t x_desc, const ccPadV2Descriptor_t pad_desc, int32_t *dim_cnt, + int32_t dim[], int32_t dim_len) { + *dim_cnt = 4; + dim[0] = 1; + dim[1] = 2; + dim[2] = 2; + dim[3] = 3; + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccPadV2Forward(ccHandle_t handle, const ccPadV2Descriptor_t pad_desc, const void *alpha, + const ccTensorDescriptor_t x_desc, const void *x, const void *beta, + const ccTensorDescriptor_t output_desc, void *output) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccCreatePadV2Descriptor(ccPadV2Descriptor_t *pad_desc) { return CC_STATUS_SUCCESS; } + +ccStatus_t ccDestroyPadV2Descriptor(ccPadV2Descriptor_t *pad_desc) { return CC_STATUS_SUCCESS; } + +ccStatus_t ccSetKernelOpMap(ccHandle_t handle) { return CC_STATUS_SUCCESS; } + +ccStatus_t ccDataDumpForward(ccHandle_t handle, const void *buffer, const uint64_t buf_len, const uint32_t task_index) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetPadV2Descriptor(ccPadV2Descriptor_t pad_desc, const int32_t pad_shape_cnt, + const int32_t pad_shape_low[], const int32_t pad_shape_high[], + const ccPadMode_t pad_mode, const void *pad_value, const ccDataType_t pad_value_type) { + return CC_STATUS_SUCCESS; +} + +struct tagCcYoloDetectionOutput { + ccYoloVersion_t yolo_version; + uint32_t net_h; + uint32_t net_w; + uint32_t post_top_k; + uint32_t classes; + float nms_threshold; + float iou_thre_decay; + float coor_scale_factor; + bool relative; + float obj_threshold; + float cls_threshold; + uint32_t bias_num; + float *bias; +}; + +struct tagCcYoloRegion {}; + +struct tagCcEltwise {}; + +struct tagCcHashTableLookup {}; + +struct tagCcEmbeddingAttnDecoder {}; +struct tagNonMaxSuppression {}; + +struct tagCcArcSinCos {}; +struct tagCcPow {}; +struct tagCcConcatFive2Four_t {}; +struct tagCcConcatFour2Five_t {}; + +ccStatus_t ccCreatePowDescriptor(ccPowDescriptor_t *pow_desc) { + *pow_desc = new tagCcPow(); + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetPowDescriptor(ccPowDescriptor_t pow_desc, ccDataType_t data_type, uint32_t param_cnt) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccDestroyPowDescriptor(ccPowDescriptor_t *pow_desc) { + if (nullptr == pow_desc) { + return CC_STATUS_BAD_PARAM; + } + + delete *pow_desc; + *pow_desc = 0; + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccPowForward(ccHandle_t handle, const ccPowDescriptor_t pow_desc, const void *pow_param, const void *alpha, + const ccTensorDescriptor_t x_desc, const void *x, const ccTensorDescriptor_t y_desc, + const void *y, const void *beta, const ccTensorDescriptor_t z_desc, void *z) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccLogicalOrForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t x_desc, const void *x, + const ccTensorDescriptor_t y_desc, const void *y, const void *beta, + const ccTensorDescriptor_t output_desc, void *output) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccCompareForward(ccHandle_t handle, ccCompareType_t compare_type, const void *alpha, + const ccTensorDescriptor_t x_desc, const void *x, const ccTensorDescriptor_t y_desc, + const void *y, const void *beta, const ccTensorDescriptor_t output_desc, void *output) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccGetCompareOutputDim(const ccTensorDescriptor_t x_desc, const ccTensorDescriptor_t y_desc, int32_t *dim_cnt, + int32_t *dim, int32_t dim_len) { + *dim_cnt = 4; + dim[0] = 1; + dim[1] = 1; + dim[2] = 1; + dim[3] = 1; + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccArcTanForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t x_desc, const void *x, + const void *beta, const ccTensorDescriptor_t y_desc, void *y) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccAtanhForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t x_desc, const void *x, + const void *beta, const ccTensorDescriptor_t y_desc, void *y) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccIsDepthwiseHighPerformance(int32_t input_n, int32_t input_c, int32_t input_h, int32_t input_w, + int32_t filter_n, int32_t filter_c, int32_t filter_h, int32_t filter_w, + int32_t dilation_h, int32_t dilation_w, int32_t pad_h_head, int32_t pad_h_tail, + int32_t pad_w_head, int32_t pad_w_tail, int32_t stride_h, int32_t stride_w, + int32_t group_num, bool &is_high_performance, bool is_quant, + ccDataType_t input_data_type, ccDataType_t output_data_type) { + is_high_performance = true; + return CC_STATUS_SUCCESS; +} + +struct tagCcSpaceToBatch {}; + +struct tagCcBatchToSpace {}; + +struct tagCcResizeNearestNeighbor {}; + +ccStatus_t ccGetStream(ccHandle_t handle, rtStream_t *stream_id) { return CC_STATUS_SUCCESS; } + +ccStatus_t ccGetRtVersion(uint32_t *count) { return CC_STATUS_SUCCESS; } + +ccStatus_t ccDestroyTensorDescriptor(ccTensorDescriptor_t *tensor_desc) { + if (nullptr == tensor_desc) { + return CC_STATUS_BAD_PARAM; + } + delete *tensor_desc; + *tensor_desc = 0; + return CC_STATUS_SUCCESS; +} +ccStatus_t ccDestroyFilterDescriptor(ccFilterDescriptor_t *filter_desc) { + delete *filter_desc; + *filter_desc = 0; + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccGetFilterSizeInBytes(const ccFilterDescriptor_t filter_desc, uint32_t *size) { + *size = filter_desc->dims[0] * filter_desc->dims[1] * filter_desc->dims[2] * filter_desc->dims[3] * sizeof(float); + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccTransFilter(const ccFilterDescriptor_t w_desc, const void *w, ccFilterDescriptor_t y_desc, void *y, + uint32_t y_size_in_bytes) { + y = const_cast(w); + + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccCreateTensorDescriptor(ccTensorDescriptor_t *tensor_desc) { + *tensor_desc = new tagCcTensor(); + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetTensor4dDescriptor(ccTensorDescriptor_t tensor_desc, ccTensorFormat_t format, ccDataType_t data_type, + int32_t n, int32_t c, int32_t h, int32_t w) { + if (CC_TENSOR_NHWC == format) { + tensor_desc->dim_buf[0] = n; + tensor_desc->dim_buf[1] = h; + tensor_desc->dim_buf[2] = w; + tensor_desc->dim_buf[3] = c; + } else { + tensor_desc->dim_buf[0] = n; + tensor_desc->dim_buf[1] = c; + tensor_desc->dim_buf[2] = h; + tensor_desc->dim_buf[3] = w; + } + tensor_desc->dim_cnt = 4; + tensor_desc->data_type = data_type; + tensor_desc->format = format; + tensor_desc->data_size = n * c * h * w * sizeof(data_type); + return CC_STATUS_SUCCESS; +} +ccStatus_t ccGetTensorSizeInBytes(const ccTensorDescriptor_t tensor_desc, uint32_t *size) { + if ((NULL == tensor_desc) || (NULL == size)) { + return CC_STATUS_BAD_PARAM; + } + *size = tensor_desc->data_size; + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccGetTensorMemorySizeInBytes(const ccTensorDescriptor_t tensor_desc, uint32_t *size) { + *size = tensor_desc->data_size; + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccCreateFilterDescriptor(ccFilterDescriptor_t *filter_desc) { + *filter_desc = new tagCcFilter(); + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetFilter4dDescriptor(ccFilterDescriptor_t filter_desc, ccTensorFormat_t format, ccDataType_t data_type, + int32_t k, int32_t c, int32_t h, int32_t w) { + filter_desc->dims.push_back(k); + filter_desc->dims.push_back(c); + filter_desc->dims.push_back(h); + filter_desc->dims.push_back(w); + + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetFilterFractalDescriptor(ccFilterDescriptor_t filter_desc, ccTensorFormat_t format, + ccDataType_t data_type, int32_t k, int32_t c, int32_t h, int32_t w) { + filter_desc->dims.push_back(k); + filter_desc->dims.push_back(c); + filter_desc->dims.push_back(h); + filter_desc->dims.push_back(w); + + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetStream(ccHandle_t handle, rtStream_t stream_id) { return CC_STATUS_SUCCESS; } +ccStatus_t ccCreatePoolingMaskDescriptor(ccTensorDescriptor_t *pooling_mask_desc) { + *pooling_mask_desc = new tagCcTensor(); + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetPoolingMaskTensorDescriptor(ccTensorDescriptor_t tensor_desc, ccTensorFormat_t format, + ccDataType_t data_type, int32_t n, int32_t c, int32_t h, int32_t w, + int32_t window_h, int32_t window_w) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetFilter6dDescriptor(ccTensorDescriptor_t filter_desc, ccTensorFormat_t format, ccDataType_t data_type, + int32_t c1, int32_t h, int32_t w, int32_t n, int32_t co, int32_t c0) { + return CC_STATUS_SUCCESS; +} + +/// @ingroup dnn +/// @brief get the format and dimcnt of GeTensor +/// @param [in] tensor_desc descriptor of tensor +/// @param [in|out] format point to format +/// @return ccStatus_t +ccStatus_t ccGetTensorFormat(const ccTensorDescriptor_t tensor_desc, ccTensorFormat_t *format) { + *format = tensor_desc->format; + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccTransTensor(const ccTensorDescriptor_t x_desc, const void *x, const ccTensorDescriptor_t y_desc, void *y, + uint32_t y_size_in_bytes) { + return CC_STATUS_SUCCESS; +} +void cceSysInit() {} + +bool compilerStubFree() { return true; } + +bool compilerStubInit() { return true; } + +ccStatus_t ccSetInt8Filter4dDescriptor(ccFilterDescriptor_t filter_desc, ccTensorFormat_t format, + ccDataType_t data_type, int32_t k, int32_t c, int32_t h, int32_t w, + ccDataType_t output_data_type) { + filter_desc->dims.push_back(k); + filter_desc->dims.push_back(c); + filter_desc->dims.push_back(h); + filter_desc->dims.push_back(w); + + return CC_STATUS_SUCCESS; +} +ccStatus_t ccSetTensorNdDescriptor(ccTensorDescriptor_t tensor_desc, ccDataType_t data_type, int32_t dim_cnt, + int32_t dimA[]) { + tensor_desc->data_type = data_type; + tensor_desc->data_size = sizeof(data_type); + for (int32_t i = 0; i < dim_cnt; i++) { + tensor_desc->data_size = tensor_desc->data_size * dimA[i]; + } + tensor_desc->format = CC_TENSOR_ND; + return CC_STATUS_SUCCESS; +} + +ccStatus_t CceProfilingConfig(const char *target, const char *job_ctx, uint32_t flag) { return CC_STATUS_SUCCESS; } +ccStatus_t ccSetTensorRealDimCnt(ccTensorDescriptor_t tensor_desc, int32_t real_dim_cnt) { + if (tensor_desc != NULL && tensor_desc != nullptr) { + tensor_desc->real_dim_cnt = real_dim_cnt; + } + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccGetTensorRealDimCnt(ccTensorDescriptor_t tensor_desc, int32_t *real_dim_cnt) { + *real_dim_cnt = tensor_desc->real_dim_cnt; + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetQuantizeFactors(ccQuantizeDescriptor_t quantize_info, ccScaleValueMode_t scale_val_mode, + const uint16_t *scale, const uint16_t *offset, const uint8_t *offset_pad) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetReQuantizeFactors(ccQuantizeDescriptor_t quantize_info, ccScaleValueMode_t scale_val_mode, + const uint16_t *scale_rq, const uint16_t *next_layer_offset, + const int32_t *offset_w) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetDeQuantizeFactors(ccQuantizeDescriptor_t quantize_info, ccScaleValueMode_t scale_val_mode, + const uint16_t *scale_dq, const int32_t *offset_w) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetQuantizeAlgoAndScaleType(ccQuantizeDescriptor_t quantize_info, ccQuantizeAlgo_t quant_algo, + ccScaleType_t scale_type, bool relu_flag) { + return CC_STATUS_SUCCESS; +} +ccStatus_t ccPrintTimeStat() { return CC_STATUS_SUCCESS; } +ccStatus_t ccSetModelId(ccHandle_t handle, uint32_t model_id) { return CC_STATUS_SUCCESS; } + +ccStatus_t ccGetKernelContext(rtStream_t stream_id, ccOpContext &op_context) { + if (stream_id == nullptr) { + op_context.kernelType = ccKernelType::TE; + } else { + op_context.kernelType = ccKernelType::CCE_AI_CORE; + op_context.opId = 1; + op_context.kernelFuncId = 1; + op_context.isFlowtable = true; + op_context.opCount = 1; + op_context.opIndex2[0] = 0; + } + + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccUpdateKernelArgs(ccOpContext &op_context, uint64_t data_base_addr, uint64_t weight_base_addr, + uint64_t variable_base_addr, void *args_addr, uint64_t args_size, void *l2ctrl_addr) { + return CC_STATUS_SUCCESS; +} +ccStatus_t ccGetKernelArgsAddrs(ccOpContext &op_context, void *args_addr, uint64_t args_size, void *l2ctrl_addr, + std::vector &op_addrs_info) { + // cce + ccOpAddrsInfo tmp_op_addrs_info; + uint64_t tmp_input = (uint64_t)global_mem_base; + tmp_op_addrs_info.addrPos = &tmp_input; + tmp_op_addrs_info.addrData = tmp_input; + op_addrs_info.push_back(tmp_op_addrs_info); + + uint64_t tmp_output = (uint64_t)(global_mem_base + 5476352); + tmp_op_addrs_info.addrPos = &tmp_output; + tmp_op_addrs_info.addrData = tmp_output; + op_addrs_info.push_back(tmp_op_addrs_info); + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccSetKernelArgs(std::vector &date_info) { return CC_STATUS_SUCCESS; } +} // namespace cce +// ccFusion no namespace +ccStatus_t ccFusionStart(ccHandle_t handle, uint32_t graph_id, uint32_t init_flag, CceFusionMemCfg_t mem_cfg) { + return CC_STATUS_SUCCESS; +} + +//???ccFusion ????namespace cce?? +ccStatus_t ccFusionStart(ccHandle_t handle, uint32_t graph_id, uint32_t init_flag, uint32_t addr_change_flag) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t ccFusionEnd(ccHandle_t handle, uint32_t graph_id) { return CC_STATUS_SUCCESS; } + +ccStatus_t ccFusionTaskEnd(ccHandle_t handle, uint32_t graph_id) { return CC_STATUS_SUCCESS; } + +ccStatus_t ccKernelLaunchRepeat(ccHandle_t handle) { return CC_STATUS_SUCCESS; } + +ccStatus_t ccKernelDelete(ccHandle_t handle) { return CC_STATUS_SUCCESS; } + +ccStatus_t cce::ccSetTensorFormat(cce::tagCcTensor *, cce::tagCcTensorFormat) { return CC_STATUS_SUCCESS; } + +namespace fusion { +uint32_t BufferFusion(std::shared_ptr, std::shared_ptr, bool) { return 0; } + +uint32_t BufferFusionTrain(std::shared_ptr, std::shared_ptr) { return 0; } + +uint32_t GraphFusionTrain(ge::ComputeGraphPtr orig_graph, ge::ComputeGraphPtr fusion_graph) { return 0; } +} // namespace fusion +namespace fusion { +using namespace ge; + +uint32_t Fusion(ComputeGraphPtr model_graph, ComputeGraphPtr fusion_graph, kScopeNodeMap_t &te_fusion_map) { + OpDescPtr op_def_a = std::make_shared(); + op_def_a->SetName("reduction_nd"); + op_def_a->SetType("reduction_nd"); + + GeTensorDescPtr v_input_desc = std::make_shared(); + op_def_a->AddInputDesc(*v_input_desc); + + vector v_input; + v_input.push_back(0); + op_def_a->SetInputOffset(v_input); + + GeTensorDesc input_desc = op_def_a->GetInputDesc(0); + input_desc.SetFormat(FORMAT_NCHW); + input_desc.SetDataType(DT_FLOAT); + input_desc.SetShape(GeShape({1, 3, 5, 5})); + ge::TensorUtils::SetSize(input_desc, 192); + ge::TensorUtils::SetRealDimCnt(input_desc, 4); + + GeTensorDescPtr output_desc = std::make_shared(); + op_def_a->AddOutputDesc(*output_desc); + + output_desc->SetFormat(FORMAT_NCHW); + output_desc->SetDataType(DT_FLOAT); + output_desc->SetShape(GeShape({1, 3, 5})); + ge::TensorUtils::SetSize(*output_desc, 96); + ge::TensorUtils::SetRealDimCnt(*output_desc, 3); + + OpDescPtr op_def_b = std::make_shared(); + op_def_b->SetName("transdata_1"); + op_def_b->SetType("TransData"); + + int stream_num = 1; + int flag = 0; + + // make_graph_nd(graph); + NodePtr node_a = fusion_graph->AddNode(op_def_a); + NodePtr node_b = fusion_graph->AddNode(op_def_b); + + GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_b->GetInDataAnchor(0)); + int32_t a = 1; + int32_t b = 2; + + AttrUtils::SetInt(op_def_a, "fusion_scope", a); + AttrUtils::SetInt(op_def_b, "fusion_scope", b); + + vector node_list1; + node_list1.push_back(node_a); + vector node_list2; + node_list2.push_back(node_b); + te_fusion_map[1] = node_list1; + te_fusion_map[2] = node_list2; + + return FUSION_STATUS_SUCCESS; +} + +uint32_t FusionTaskBuild(cce::ccHandle_t cc_handle, ge::ComputeGraphPtr fusion_graph, ge::Buffer &buffer, + ModelRes &model_res, std::vector &task_def_list_) { + TaskDef task_def_temp; + task_def_list_.push_back(task_def_temp); + + return FUSION_STATUS_SUCCESS; +} +uint32_t GraphFusion(ge::ComputeGraphPtr orig_graph, ge::ComputeGraphPtr fusion_graph) { + *fusion_graph = *orig_graph; + return FUSION_STATUS_SUCCESS; +} + +void FusionTaskBuildComplete(std::vector cc_handle_list) { return; } + +} // namespace fusion + +ccStatus_t cce::ccSetTensorDescriptorQuantizeParam(ccTensorDescriptor_t tensor_desc, + const ccVecQuantizePara_t *vec_quantize_para) { + return CC_STATUS_SUCCESS; +} + +ccStatus_t cce::ccSetAllOffsetQuantizeFactors(ccQuantizeDescriptor_t quantize_info, const uint8_t *offset_w, + const uint8_t *offset_d, const uint16_t *scale_req, + const uint16_t *offset_d_next) { + return CC_STATUS_SUCCESS; +} diff --git a/tests/depends/hccl/CMakeLists.txt b/tests/depends/hccl/CMakeLists.txt new file mode 100644 index 00000000..9e3826dd --- /dev/null +++ b/tests/depends/hccl/CMakeLists.txt @@ -0,0 +1,26 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake_minimum_required(VERSION 2.8) +project(hccl_stub) + +file(GLOB_RECURSE SRC_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "src/hccl_stub.cc" +) + +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/inc) + +add_library(hccl_stub SHARED ${SRC_FILES}) \ No newline at end of file diff --git a/tests/depends/hccl/src/hccl_stub.cc b/tests/depends/hccl/src/hccl_stub.cc new file mode 100644 index 00000000..8bfeb715 --- /dev/null +++ b/tests/depends/hccl/src/hccl_stub.cc @@ -0,0 +1,44 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "hccl/hcom.h" + +hcclResult_t hcom_all_gather(const char *tag, void *input_count_ptr, void *output_ptr, u64 input_count, + hcclDataType_t data_type, const char *group, rtStream_t stream) { + return HCCL_SUCCESS; +} + +hcclResult_t hcom_broadcast(const char *tag, void *ptr, u64 count, hcclDataType_t data_type, u32 root, + const char *group, rtStream_t stream) { + return HCCL_SUCCESS; +} + +hcclResult_t hcom_all_reduce(const char *tag, void *input_ptr, void *output_ptr, u64 count, hcclDataType_t data_type, + hcclRedOp_t op, const char *group, rtStream_t stream) { + return HCCL_SUCCESS; +} + +hcclResult_t hcom_get_split_strategy(const char *group, const struct model_feature *feature, u32 max_segment_num, + u32 *segment_num, u32 *segment_idx) { + return HCCL_SUCCESS; +} + +hcclResult_t hcom_reduce_scatter(const char *tag, void *input_ptr, void *output_ptr, u64 count, + hcclDataType_t data_type, hcclRedOp_t op, const char *group, rtStream_t stream) { + return HCCL_SUCCESS; +} \ No newline at end of file diff --git a/tests/depends/mmpa/CMakeLists.txt b/tests/depends/mmpa/CMakeLists.txt new file mode 100644 index 00000000..8485a600 --- /dev/null +++ b/tests/depends/mmpa/CMakeLists.txt @@ -0,0 +1,31 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake_minimum_required(VERSION 2.8) + +project(STUB_MMPA) + +file(GLOB_RECURSE SRCS RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "src/mmpa_stub.cc" +) + +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) + +add_library(mmpa_stub SHARED ${SRCS}) +target_link_libraries(mmpa_stub ge_protobuf::protobuf) diff --git a/tests/depends/mmpa/src/mmpa_stub.cc b/tests/depends/mmpa/src/mmpa_stub.cc new file mode 100644 index 00000000..bdf24326 --- /dev/null +++ b/tests/depends/mmpa/src/mmpa_stub.cc @@ -0,0 +1,210 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "mmpa/mmpa_api.h" + +#include "common/types.h" +#include "common/util.h" + +using namespace domi; + +INT32 mmOpen(const CHAR *path_name, INT32 flags) { + INT32 fd = HANDLE_INVALID_VALUE; + + if (NULL == path_name) { + syslog(LOG_ERR, "The path name pointer is null.\r\n"); + return EN_INVALID_PARAM; + } + if (0 == (flags & (O_RDONLY | O_WRONLY | O_RDWR | O_CREAT))) { + syslog(LOG_ERR, "The file open mode is error.\r\n"); + return EN_INVALID_PARAM; + } + + fd = open(path_name, flags); + if (fd < MMPA_ZERO) { + syslog(LOG_ERR, "Open file failed, errno is %s.\r\n", strerror(errno)); + return EN_ERROR; + } + return fd; +} + +INT32 mmOpen2(const CHAR *path_name, INT32 flags, MODE mode) { + INT32 fd = HANDLE_INVALID_VALUE; + + if (NULL == path_name) { + syslog(LOG_ERR, "The path name pointer is null.\r\n"); + return EN_INVALID_PARAM; + } + if (MMPA_ZERO == (flags & (O_RDONLY | O_WRONLY | O_RDWR | O_CREAT))) { + syslog(LOG_ERR, "The file open mode is error.\r\n"); + return EN_INVALID_PARAM; + } + if ((MMPA_ZERO == (mode & (S_IRUSR | S_IREAD))) && (MMPA_ZERO == (mode & (S_IWUSR | S_IWRITE)))) { + syslog(LOG_ERR, "The permission mode of the file is error.\r\n"); + return EN_INVALID_PARAM; + } + + fd = open(path_name, flags, mode); + if (fd < MMPA_ZERO) { + syslog(LOG_ERR, "Open file failed, errno is %s.\r\n", strerror(errno)); + return EN_ERROR; + } + return fd; +} + +INT32 mmClose(INT32 fd) { + INT32 result = EN_OK; + + if (fd < MMPA_ZERO) { + syslog(LOG_ERR, "The file fd is invalid.\r\n"); + return EN_INVALID_PARAM; + } + + result = close(fd); + if (EN_OK != result) { + syslog(LOG_ERR, "Close the file failed, errno is %s.\r\n", strerror(errno)); + return EN_ERROR; + } + return EN_OK; +} + +mmSsize_t mmWrite(INT32 fd, VOID *mm_buf, UINT32 mm_count) { + mmSsize_t result = MMPA_ZERO; + + if ((fd < MMPA_ZERO) || (NULL == mm_buf)) { + syslog(LOG_ERR, "Input parameter invalid.\r\n"); + return EN_INVALID_PARAM; + } + + result = write(fd, mm_buf, (size_t)mm_count); + if (result < MMPA_ZERO) { + syslog(LOG_ERR, "Write buf to file failed, errno is %s.\r\n", strerror(errno)); + return EN_ERROR; + } + return result; +} + +mmSsize_t mmRead(INT32 fd, VOID *mm_buf, UINT32 mm_count) { + mmSsize_t result = MMPA_ZERO; + + if ((fd < MMPA_ZERO) || (NULL == mm_buf)) { + syslog(LOG_ERR, "Input parameter invalid.\r\n"); + return EN_INVALID_PARAM; + } + + result = read(fd, mm_buf, (size_t)mm_count); + if (result < MMPA_ZERO) { + syslog(LOG_ERR, "Read file to buf failed, errno is %s.\r\n", strerror(errno)); + return EN_ERROR; + } + return result; +} + +INT32 mmMkdir(const CHAR *lp_path_name, mmMode_t mode) { + INT32 t_mode = mode; + INT32 ret = EN_OK; + + if (NULL == lp_path_name) { + syslog(LOG_ERR, "The input path is null.\r\n"); + return EN_INVALID_PARAM; + } + + if (t_mode < MMPA_ZERO) { + syslog(LOG_ERR, "The input mode is wrong.\r\n"); + return EN_INVALID_PARAM; + } + + ret = mkdir(lp_path_name, mode); + + if (EN_OK != ret) { + syslog(LOG_ERR, "Failed to create the directory, the ret is %s.\r\n", strerror(errno)); + return EN_ERROR; + } + return EN_OK; +} + +void *memCpyS(void *dest, const void *src, UINT32 count) { + char *tmp = (char *)dest; + char *s = (char *)src; + + if (MMPA_ZERO == count) { + return dest; + } + + while (count--) { + *tmp++ = *s++; + } + return dest; +} + +INT32 mmRmdir(const CHAR *lp_path_name) { return rmdir(lp_path_name); } + +mmTimespec mmGetTickCount() { + mmTimespec rts; + struct timespec ts = {0}; + (void)clock_gettime(CLOCK_MONOTONIC_RAW, &ts); + rts.tv_sec = ts.tv_sec; + rts.tv_nsec = ts.tv_nsec; + return rts; +} + +INT32 mmGetTid() { + INT32 ret = (INT32)syscall(SYS_gettid); + + if (ret < MMPA_ZERO) { + return EN_ERROR; + } + + return ret; +} + +INT32 mmAccess(const CHAR *path_name) { + if (path_name == NULL) { + return EN_INVALID_PARAM; + } + + INT32 ret = access(path_name, F_OK); + if (ret != EN_OK) { + return EN_ERROR; + } + return EN_OK; +} + +INT32 mmStatGet(const CHAR *path, mmStat_t *buffer) { + if ((path == NULL) || (buffer == NULL)) { + return EN_INVALID_PARAM; + } + + INT32 ret = stat(path, buffer); + if (ret != EN_OK) { + return EN_ERROR; + } + return EN_OK; +} + +INT32 mmGetFileSize(const CHAR *file_name, ULONGLONG *length) { + if ((file_name == NULL) || (length == NULL)) { + return EN_INVALID_PARAM; + } + struct stat file_stat; + (void)memset_s(&file_stat, sizeof(file_stat), 0, sizeof(file_stat)); // unsafe_function_ignore: memset + INT32 ret = lstat(file_name, &file_stat); + if (ret < MMPA_ZERO) { + return EN_ERROR; + } + *length = (ULONGLONG)file_stat.st_size; + return EN_OK; +} diff --git a/tests/depends/omg/CMakeLists.txt b/tests/depends/omg/CMakeLists.txt new file mode 100644 index 00000000..fb63dc90 --- /dev/null +++ b/tests/depends/omg/CMakeLists.txt @@ -0,0 +1,46 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake_minimum_required(VERSION 2.8) + +project(OMG_CCE) + +set(CMAKE_CXX_STANDARD 11) + +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/cce) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/src/ge) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +file(GLOB_RECURSE PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "${GE_SOURCE_DIR}/src/proto/om.proto" + "${GE_SOURCE_DIR}/src/proto/task.proto" +) + +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) + +file(GLOB_RECURSE SRCS RELATIVE ${CMAKE_CURRENT_LIST_DIR} +# "${GE_SOURCE_DIR}/src/ge/common/util.cc" + "src/omg_stub.cc" +) + +add_library(omg_stub SHARED ${SRCS} ${PROTO_SRCS} ${PROTO_HDRS}) +target_link_libraries(omg_stub ge_protobuf::protobuf) diff --git a/tests/depends/omg/src/omg_stub.cc b/tests/depends/omg/src/omg_stub.cc new file mode 100644 index 00000000..7197dac7 --- /dev/null +++ b/tests/depends/omg/src/omg_stub.cc @@ -0,0 +1,876 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +#include "mmpa/mmpa_api.h" +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/types.h" +#include "common/util.h" +#include "common/string_util.h" +#include "common/properties_manager.h" +#include "common/model_parser/base.h" +#include "graph/model.h" +#include "cce/dnn.h" +#include "ge/ge_api_types.h" +#include "framework/common/ge_types.h" +#include "graph/utils/op_desc_utils.h" +#include "common/profiling/profiling_manager.h" + +using domi::domiTensorFormat_t; +using namespace cce; +using namespace ge; + +struct PROC_PARAM { + uint8_t *model_name; + + // ISV Ek buffer + uint8_t *model_key; + uint32_t model_key_len; + + // ISV root certificate buffer + uint8_t *root_cert; + uint32_t root_cert_len; + + // ISV private key buffer + uint8_t *pri_key; + uint32_t pri_key_len; + + // Raw AI Module Image buffer + uint8_t *ai_image; + uint32_t ai_image_len; + + // ISV HW key buffer + uint8_t *hw_key; + uint32_t hw_key_len; +}; + +#ifdef __cplusplus +extern "C" { +#endif +using namespace ge; +namespace { +const char FMK_STATUS_FILE_DIR_ENV[] = "FMK_STATUS_FILE_DIR"; +const char JOBSTATE_FILE_NAME[] = "jobstateupdate_framework"; +const char HCOM_DETECT_FILE_NAME[] = "hcom_detection_result"; +const char FILE_SEPARATE[] = "/"; +} // namespace + +#ifdef __cplusplus +} +#endif + +namespace ge { +struct GeModelPartition { + ModelPartitionType type_ = MODEL_DEF; + uint8_t *data_ = nullptr; + size_t size_ = 0; + + GeModelPartition() = default; + + GeModelPartition(const GeModelPartition &partition){}; + + GeModelPartition &operator=(const GeModelPartition &partition) = delete; + + ~GeModelPartition() { + if (data_ != nullptr) { + delete[] data_; + data_ = nullptr; + } + } + + Status SetData(uint8_t *data, size_t size) { + size_ = size; + data_ = new (std::nothrow) uint8_t[size](); + errno_t err; + err = memcpy_s(data_, size_, data, size); + if (err) { + GELOGE(ge::FAILED, "[GeModel Partition] Error occur when copy GeModel Partition data."); + return FAILED; + } + return SUCCESS; + } + + Status SetType(ModelPartitionType type) { + type_ = type; + return SUCCESS; + } +}; +struct OmFileContext { + vector partition_datas_; + vector partition_table_; + uint32_t model_data_len_; +}; + +class SubGraphInfo; +using SubGraphInfoPtr = std::shared_ptr; + +using GeModelPartitionPtr = std::shared_ptr; +using ModelPtr = std::shared_ptr; +class GeModel { + public: + explicit GeModel(const ModelPtr &model_ptr); + ~GeModel() = default; + GeModel(const GeModel &other) = delete; + GeModel &operator=(const GeModel &other) = delete; + + ModelPtr GetModelPtr() const; + Status AddPartition(uint8_t *data, size_t size, ModelPartitionType type); + Status GetPartition(ModelPartitionType type, GeModelPartitionPtr &partition); + uint8_t GetPlatformType() const; + void SetPlatformType(const uint8_t platform_type) { platform_type_ = platform_type; } + + private: + std::map partitions_; + ModelPtr model_ = nullptr; + uint8_t platform_type_ = {0}; +}; +using GeModelPtr = std::shared_ptr; + +GeModel::GeModel(const ModelPtr &model_ptr) { this->model_ = model_ptr; } + +ModelPtr GeModel::GetModelPtr() const { return this->model_; } + +uint8_t GeModel::GetPlatformType() const { return platform_type_; } + +Status GeModel::AddPartition(uint8_t *data, size_t size, ModelPartitionType type) { + if (size == 0) { + return FAILED; + } + + if (data == nullptr) { + return FAILED; + } + + auto iter = partitions_.find(type); + if (iter != partitions_.end()) { + return FAILED; + } + + GeModelPartitionPtr partition = nullptr; + GE_MAKE_SHARED(partition = std::make_shared(), return FAILED); + Status ret = partition->SetType(type); + if (ret != SUCCESS) { + return FAILED; + } + ret = partition->SetData(data, size); + if (ret != SUCCESS) { + return FAILED; + } + + partitions_.insert(std::pair(type, partition)); + return SUCCESS; +} + +Status GeModel::GetPartition(ModelPartitionType type, GeModelPartitionPtr &partition) { + auto iter = partitions_.find(type); + if (iter == partitions_.end()) { + return FAILED; + } + + partition = iter->second; + return SUCCESS; +} +class OmFileSaveHelper { + public: + OmFileSaveHelper(); + ~OmFileSaveHelper(); + vector &GetModelPartitions(); + ModelPartitionTable *GetPartitionTable(); + ModelFileHeader model_header_; + ModelFileHeader &GetModelFileHeader() { return model_header_; } + void AddPartition(GeModelPartition &partition); + + private: + OmFileContext context_; +}; + +OmFileSaveHelper::OmFileSaveHelper() {} + +OmFileSaveHelper::~OmFileSaveHelper() {} + +vector &OmFileSaveHelper::GetModelPartitions() { + static std::vector tmp; + return tmp; +} + +ModelPartitionTable *OmFileSaveHelper::GetPartitionTable() { return nullptr; } + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void OmFileSaveHelper::AddPartition(GeModelPartition &partition) { + context_.partition_datas_.push_back(partition); + context_.model_data_len_ += partition.size_; +} +class ModelBuilder { + public: + ModelBuilder(ge::ComputeGraphPtr compute_graph, const std::vector &subgraphs, + const std::map &stream_max_parallel_num, bool hcom_parallel, int mode); + virtual ~ModelBuilder(); + Status BuildModel(ge::Model &model_def); + Status SaveWeightsToModel(ge::Model &model); + Status SaveDataToModel(ge::Model &model, ge::GeModel &ge_model); + Status PreBuildModel(); + Status BuildModelForGetTask(ge::Model &model_def); + ge::Buffer GetWeightBuffer() const; + void SetModelVersion(ge::Model &model_def); + + public: + ge::Buffer weight_buffer_; +}; + +ModelBuilder::ModelBuilder(ge::ComputeGraphPtr compute_graph, const std::vector &subgraphs, + const std::map &stream_max_parallel_num, bool hcom_parallel, int mode) { + weight_buffer_ = ge::Buffer(4100000); +} + +ModelBuilder::~ModelBuilder() {} + +Status ModelBuilder::SaveWeightsToModel(ge::Model &model) { return SUCCESS; } + +Status ModelBuilder::BuildModel(ge::Model &model_def) { return SUCCESS; } + +Status ModelBuilder::SaveDataToModel(ge::Model &model, ge::GeModel &ge_model) { return SUCCESS; } + +Status ModelBuilder::PreBuildModel() { return SUCCESS; } + +Status ModelBuilder::BuildModelForGetTask(ge::Model &model_def) { return SUCCESS; } + +void ModelBuilder::SetModelVersion(ge::Model &model_def) { return; } + +ge::Buffer ModelBuilder::GetWeightBuffer() const { return ge::Buffer(4100000); } + +} // namespace ge + +using ProcParam = struct PROC_PARAM; + +namespace ge { +#include +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NCHW_DIM_N = 0; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NCHW_DIM_C = 1; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NCHW_DIM_H = 2; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NCHW_DIM_W = 3; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NHWC_DIM_N = 0; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NHWC_DIM_H = 1; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NHWC_DIM_W = 2; +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY extern const uint32_t NHWC_DIM_C = 3; + +const uint32_t MODEL_FILE_MAGIC_NUM = 0x444F4D49; +const uint32_t MODEL_FILE_HEAD_LEN = 256; +const uint32_t MODEL_VERSION = 0x10000000; +const int MAX_FILE_SIZE_LIMIT = INT_MAX; +bool FC_WEIGHT_COMPRESS_FLAG = false; + +bool ReadBytesFromBinaryFile(const char *file_name, char **buffer, int &length) { + length = 10; + *buffer = new (std::nothrow) char[10](); + GE_CHK_BOOL_TRUE_EXEC_RET_STATUS(*buffer == nullptr, false, "new an object failed."); + return true; +} +bool ReadProtoFromText(const char *file, google::protobuf::Message *message) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((nullptr == file || nullptr == message), return false, + "incorrect parameter. nullptr == file || nullptr == message"); + string real_path = RealPath(file); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), return false, "proto file path '%s' not valid", file); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(GetFileLength(real_path.c_str()) == -1, return false, "file size not valid."); + std::ifstream fs(real_path.c_str(), std::ifstream::in); + + if (!fs.is_open()) { + GELOGE(ge::FAILED, "proto file '%s' open fail.", file); + return false; + } + google::protobuf::io::IstreamInputStream input(&fs); + bool ret = google::protobuf::TextFormat::Parse(&input, message); + GE_IF_BOOL_EXEC(ret != true, + GELOGI("call [google::protobuf::TextFormat::Parse] func ret fail, please check your text file.")); + fs.close(); + return ret; +} + +uint64_t GetCurrentTimestap() { return 0; } + +// get length of file +long GetFileLength(const std::string &input_file) { + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(input_file.empty(), return -1, "input_file path is null."); + string real_path = RealPath(input_file.c_str()); + + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(real_path.empty(), return -1, "input_file path '%s' not valid", input_file.c_str()); + unsigned long long file_length = 0; + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(mmGetFileSize(input_file.c_str(), &file_length) != EN_OK, return -1, + "open file failed."); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG((file_length <= 0), return -1, "file length <= 0, not valid."); + GE_CHK_BOOL_TRUE_EXEC_WITH_LOG(file_length > MAX_FILE_SIZE_LIMIT, return -1, "file size %ld is out of limit: %d.", + file_length, MAX_FILE_SIZE_LIMIT); + return file_length; +} +string RealPath(const char *path) { + string s = path; + if (s.size() >= PATH_MAX) { + return ""; + } + if (s == "." || s == "1") { + return path; + // for insert_aipp_op unittest + } else if (s.substr(0, 3) == "llt") { + return path; + } else { + return "22"; + } +} + +bool CheckInputPathValid(const string &file_path) { return true; } +bool ReadProtoFromArray(const void *data, int size, Message *proto) { return true; } + +struct ModelPartition { + ModelPartitionType type; + uint8_t *data = 0; + uint32_t size = 0; +}; + +class InsertNewOpUtil { + public: + InsertNewOpUtil(); + ~InsertNewOpUtil(); + Status InsertNewOps(const ComputeGraphPtr &graph); + Status InsertAippOps(ge::ComputeGraphPtr graph, std::string &aipp_config_path); + Status Parse(const char *conf_path); +}; + +InsertNewOpUtil::InsertNewOpUtil() {} + +Status InsertNewOpUtil::InsertNewOps(const ComputeGraphPtr &graph) { return SUCCESS; } + +Status InsertNewOpUtil::InsertAippOps(ge::ComputeGraphPtr graph, std::string &aipp_config_path) { return SUCCESS; } + +Status InsertNewOpUtil::Parse(const char *conf_path) { return SUCCESS; } + +Status InitOME() { return SUCCESS; } +class GraphOptimizer { + public: + Status Optimize(); + Status OptimizeAfterCal(); + Status AdjustDataOpDesc(); + Status InsertTransOp(); + Status FusionFmkop(); + Status Optimize4Cloud(); + Status Optimize4FlowCtrl(); + Status OptimizeBeforeBuild(); +}; +Status GraphOptimizer::Optimize() { return SUCCESS; } + +Status Init(Options options) { return SUCCESS; } + +Status Shutdown(Options options) { return SUCCESS; } + +class Session { + public: + // singleton + static Session *Instance(); + const uint32_t &DeviceId() const; +}; + +const uint32_t &Session::DeviceId() const { return 0; } + +Session *Session::Instance() { + static Session instance; + return &instance; +} +struct OmgContext { + domiTensorFormat_t format; + + // get input format from cmd + std::unordered_map input_nodes_format_map; + std::vector output_formats; + + // user-designate input dims + std::vector>> user_input_dims; + // global input dims + std::unordered_map> input_dims; + + // solve rename op e.g: Detectionoutput:SsdDetectiontOutput + std::map op_conf_map; + // save output node of network: key is op name, value = index, index is the output index of op + std::map> out_nodes_map; + // user-designate out nodes (this is used for determing the orders) + std::vector> user_out_nodes; + // save the path of cutsom_aicpu + std::vector aicpu_op_run_paths; + // save ddk + std::string ddk_version; + // save format + domiTensorFormat_t net_format; + + FrameworkType type; + // RunMode run_mode; + bool train_flag = false; + + std::string output_type; + + /// save the name of network + /// eg:faster-rcnn, based on FirstStageProcessor after scope_fusion is faster-rcnn + /// then reorder conv+reshape of FirstStageBoxPredictor/BoxEncodingPredictor + /// need to delete op of reshape + std::string net_name; +}; +} // namespace ge + +namespace domi { +ge::OmgContext &GetContext() { + static ge::OmgContext tmp; + return tmp; +} +} // namespace domi + +namespace ge { +class OpUtils { + public: + static Status InitTensorDescriptor(const GeTensorDesc &tensor, ccTensorDescriptor_t &cc_tensor); + static Status InitTensorDescriptor(int32_t format, int32_t data_type, const std::vector &dim, + ccTensorDescriptor_t &cc_tensor, uint32_t real_dim_cnt); + static void DestroyTensorDescriptor(ccTensorDescriptor_t &cc_tensor); +}; +Status OpUtils::InitTensorDescriptor(const GeTensorDesc &tensor, ccTensorDescriptor_t &cc_tensor) { + ccCreatePoolingMaskDescriptor(&cc_tensor); + return SUCCESS; +} +Status OpUtils::InitTensorDescriptor(int32_t format, int32_t data_type, const std::vector &dim, + ccTensorDescriptor_t &cc_tensor, uint32_t real_dim_cnt) { + Status ret = SUCCESS; + return ret; +} + +class FileSaver { + public: + Status SaveToFile(const string &file_path, ModelFileHeader &model_file_header, + ModelPartitionTable &model_partition_table, const std::vector &partition_datas); + Status SaveToFileWithEncrypt(const std::string file_path, const ProcParam proc_param, + const ModelFileHeader *model_file_header, bool check_sum); +}; + +Status FileSaver::SaveToFile(const string &file_path, ModelFileHeader &model_file_header, + ModelPartitionTable &model_partition_table, + const std::vector &partition_datas) { + return SUCCESS; +} + +Status FileSaver::SaveToFileWithEncrypt(const std::string file_path, const ProcParam proc_param, + const ModelFileHeader *model_file_header, bool check_sum) { + return SUCCESS; +} + +class ModelSaver : public FileSaver {}; + +FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY void OpUtils::DestroyTensorDescriptor( + ccTensorDescriptor_t &cc_tensor) { + if (nullptr != cc_tensor) { + ccStatus_t ret = ccDestroyTensorDescriptor(&cc_tensor); + GE_LOGE_IF(CC_STATUS_SUCCESS != ret, "ccDestroyTensorDescriptor failed. ret = %d", ret); + cc_tensor = nullptr; + } +} + +} // namespace ge + +namespace domi { +class OpRegistrationData {}; + +class OpRegistry { + public: + static OpRegistry *Instance(); + std::vector registration_datas; + + ImplyType GetImplyType(const std::string &op_type); + void GetOpTypeByImplyType(std::vector &vec_op_type, const ImplyType &imply_type); +}; + +OpRegistry *OpRegistry::Instance() { + static OpRegistry instance; + return &instance; +} + +void OpRegistry::GetOpTypeByImplyType(std::vector &vec_op_type, const ImplyType &imply_type) { + if (imply_type == ImplyType::AI_CPU) { + vec_op_type.push_back("square"); + } +} + +class OpRegistrationTbe { + public: + static OpRegistrationTbe *Instance(); + + bool Finalize(OpRegistrationData ®_data, bool is_train); +}; + +OpRegistrationTbe *OpRegistrationTbe::Instance() { + static OpRegistrationTbe instance; + return &instance; +} + +bool OpRegistrationTbe::Finalize(OpRegistrationData ®_data, bool is_train) { return true; } +} // namespace domi + +namespace ge { +class GraphPrepare { + private: + Status OptimizeForPreprocess(ge::ComputeGraphPtr &compute_graph); +}; + +Status GraphPrepare::OptimizeForPreprocess(ge::ComputeGraphPtr &compute_graph) { return SUCCESS; } +} // namespace ge + +namespace ge { + +Status GetOriginalType(const ge::NodePtr &node, string &type) { + type = node->GetType(); + GE_IF_BOOL_EXEC(type != FRAMEWORKOP, return SUCCESS); + ge::AttrUtils::GetStr(node->GetOpDesc(), "original_type", type); + return SUCCESS; +} + +Status SetCycleEvent(const ge::NodePtr &node) { return SUCCESS; } + +Status SetStreamLabel(const ge::NodePtr &node, const std::string &label) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = AttrUtils::CloneOpDesc(node->GetOpDesc()); + GE_CHECK_NOTNULL(tmp_desc); + + if (!AttrUtils::SetStr(tmp_desc, "_stream_label", label)) { + GELOGE(ge::FAILED, "Op :%s set ATTR_NAME_STREAM_LABEL failed", node->GetName().c_str()); + return FAILED; + } + return SUCCESS; +} + +Status SetActiveLabelList(const ge::NodePtr &node, const std::vector &label) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + // add list of active_label + if (!AttrUtils::SetListStr(tmp_desc, "_active_label", label)) { + GELOGE(ge::FAILED, "Op: %s set ATTR_NAME_ACTIVE_LABEL_LIST failed", node->GetName().c_str()); + return FAILED; + } + return SUCCESS; +} + +Status SetSwitchBranchNodeLabel(const ge::NodePtr &node, const std::string &branch_label) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + // add branch_label of switch + if (!AttrUtils::SetStr(tmp_desc, "_switch_branch_node_label", branch_label)) { + GELOGE(ge::FAILED, "Op :%s set ATTR_NAME_SWITCH_BRANCH_NODE_LABEL failed", node->GetName().c_str()); + return FAILED; + } + return SUCCESS; +} + +Status SetSwitchTrueBranchFlag(const ge::NodePtr &node, bool value) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + // add switch_true_branch_flag + if (!AttrUtils::SetBool(tmp_desc, "_switch_true_branch_flag", value)) { + GELOGE(ge::FAILED, "Op :%s set ATTR_NAME_SWITCH_TRUE_BRANCH_FLAG failed", node->GetName().c_str()); + return FAILED; + } + return SUCCESS; +} + +Status SetOriginalNodeName(const ge::NodePtr &node, const std::string &orig_name) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + // record original_node_name + if (!AttrUtils::SetStr(tmp_desc, "_original_node_name", orig_name)) { + GELOGE(ge::FAILED, "Op :%s set ATTR_NAME_ORIG_NODE_NAME failed", node->GetName().c_str()); + return FAILED; + } + return SUCCESS; +} + +Status SetCyclicDependenceFlag(const ge::NodePtr &node) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + // add cyclic_dependence_flag + if (!AttrUtils::SetBool(tmp_desc, "_cyclic_dependence_flag", true)) { + GELOGE(ge::FAILED, "Op :%s set ATTR_NAME_CYCLIC_DEPENDENCE_FLAG failed", node->GetName().c_str()); + return FAILED; + } + return SUCCESS; +} + +Status SetNextIteration(const ge::NodePtr &node, const std::string &next) { + GE_CHECK_NOTNULL(node); + OpDescPtr tmp_desc = node->GetOpDesc(); + GE_CHECK_NOTNULL(tmp_desc); + + if (!AttrUtils::SetStr(tmp_desc, "_next_iteration_node", next)) { + GELOGE(ge::FAILED, "Op: %s set ATTR_NAME_NEXT_ITERATION failed", node->GetName().c_str()); + return FAILED; + } + return SUCCESS; +} +} // namespace ge + +namespace cce { +bool ccGetFuncState(ccFuncParamType_t type) { return true; } +} // namespace cce + +namespace ge { +Status UnloadModel(uint32_t model_id) { return SUCCESS; } + +Status GetInputOutputDescInfo(uint32_t model_id, vector &input_desc, + vector &output_desc) { + return SUCCESS; +} + +Status DataInput(const InputData *input_data, OutputData *output_data) { return SUCCESS; } + +class ModelManager { + public: + static std::shared_ptr GetInstance(); + static void FinalizeForPtr(ModelManager *) {} + Status DataInputTensor(uint32_t model_id, const std::vector &inputs, + std::vector &outputs); + Status DataInput(const InputData &input_data, OutputData &output_data); + Status GetInputOutputDescInfo(const uint32_t model_id, std::vector &input_desc, + std::vector &output_desc); + Status GetInputOutputDescInfo(const uint32_t model_id, std::vector &input_desc, + std::vector &output_desc, std::vector &input_formats, + std::vector &output_formats); + Status GetInputOutputDescInfoForZeroCopy(const uint32_t model_id, std::vector &input_desc, + std::vector &output_desc, + std::vector &input_formats, std::vector &output_formats); + Status Stop(uint32_t model_id); + Status Unload(uint32_t model_id); + Status LoadModelOnline(uint32_t &model_id, std::shared_ptr &model, + std::shared_ptr listener); + Status Start(uint32_t model_id); + Status GetMaxUsedMemory(const uint32_t model_id, uint64_t &max_size); + Status LoadModelOffline(uint32_t &model_id, const ModelData &model, std::shared_ptr listener = nullptr, + void *dev_ptr = nullptr, size_t mem_size = 0, void *weight_ptr = nullptr, + size_t weight_size = 0); + Status LoadModelWithQ(uint32_t &model_id, const ModelData &model_data, const std::vector &input_queue_ids, + const std::vector &output_queue_ids); + + Status HandleCommand(const Command &command); + Status ExecuteModel(uint32_t model_id, rtStream_t stream, bool async_mode, const InputData &input_data, + OutputData &output_data); + void DestroyAicpuSession(uint64_t session_id); +}; +void ModelManager::DestroyAicpuSession(uint64_t session_id) {} +std::shared_ptr ModelManager::GetInstance() { + static std::shared_ptr instance_ptr = + shared_ptr(new ModelManager(), ModelManager::FinalizeForPtr); + return instance_ptr; +} + +Status ModelManager::DataInputTensor(uint32_t model_id, const std::vector &inputs, + std::vector &outputs) { + return SUCCESS; +} + +Status ModelManager::DataInput(const InputData &input_data, OutputData &output_data) { return SUCCESS; } + +Status ModelManager::GetInputOutputDescInfo(const uint32_t model_id, std::vector &input_desc, + std::vector &output_desc, + std::vector &input_formats, + std::vector &output_formats) { + return SUCCESS; +} + +Status ModelManager::GetInputOutputDescInfo(const uint32_t model_id, std::vector &input_desc, + std::vector &output_desc) { + return SUCCESS; +} + +Status ModelManager::GetInputOutputDescInfoForZeroCopy(const uint32_t model_id, + std::vector &input_desc, + std::vector &output_desc, + std::vector &input_formats, + std::vector &output_formats) { + return SUCCESS; +} + +Status ModelManager::Stop(uint32_t model_id) { return SUCCESS; } + +Status ModelManager::Unload(uint32_t model_id) { return SUCCESS; } + +Status ModelManager::LoadModelOnline(uint32_t &model_id, std::shared_ptr &model, + std::shared_ptr listener) { + return SUCCESS; +} + +Status ModelManager::Start(uint32_t model_id) { return SUCCESS; } + +Status ModelManager::GetMaxUsedMemory(const uint32_t model_id, uint64_t &max_size) { return SUCCESS; } + +Status ModelManager::LoadModelOffline(uint32_t &model_id, const ModelData &model, shared_ptr listener, + void *dev_ptr, size_t mem_size, void *weight_ptr, size_t weight_size) { + return SUCCESS; +} + +Status ModelManager::LoadModelWithQ(uint32_t &model_id, const ModelData &model_data, + const std::vector &input_queue_ids, + const std::vector &output_queue_ids) { + return SUCCESS; +} + +Status ModelManager::HandleCommand(const Command &command) { return SUCCESS; } + +Status ModelManager::ExecuteModel(uint32_t model_id, rtStream_t stream, bool async_mode, const InputData &input_data, + OutputData &output_data) { + return SUCCESS; +} + +} // namespace ge + +namespace ge { + +enum JobState { + JOBSTATE_WAITING = 1, + JOBSTATE_RUNNING, + JOBSTATE_KILLING, + JOBSTATE_SUCCEED, + JOBSTATE_FAILED, + JOBSTATE_KILLED, + JOBSTATE_UNKOWN +}; + +enum JobSubState { + JOBSUBSTATE_ENV_INIT = 201, + JOBSUBSTATE_ENV_FIN, + JOBSUBSTATE_RESOUCE_ALLOC, + JOBSUBSTATE_MODEL_COMPILE, + JOBSUBSTATE_GRAPH_PREPARE, + JOBSUBSTATE_GRAPH_SPLIT, + JOBSUBSTATE_GRAPH_OPTIMIZE, + JOBSUBSTATE_GRAPH_BUILD, + JOBSUBSTATE_GRAPH_LOAD, + JOBSUBSTATE_GRAPH_EXEC, + JOBSUBSTATE_GRAPH_UNLOAD, + JOBSUBSTATE_OTHER +}; + +enum ErrorModule { + ERROR_MODULE_DRIVER = 0x01, + ERROR_MODULE_RUNTIME = 0x04, + ERROR_MODULE_CCE = 0x06, + ERROR_MODULE_FMK = 0x08, + ERROR_MODULE_HCCL = 0x12 +}; + +class CsaInteract { + public: + CsaInteract &GetInstance(); + void WriteErrorCode(uint32_t module_ret_errcode, ErrorModule error_module, JobSubState job_sub_state); + void Init(int32_t dev_index, int64_t job_id); + Status WriteJobState(JobState job_state, JobSubState job_sub_state = JOBSUBSTATE_OTHER, + uint32_t module_ret_errcode = SUCCESS, ErrorModule error_module = ERROR_MODULE_FMK); + // device index + int32_t dev_index_; + // job id + int64_t job_id_; + // is initialization complete + bool is_init_; + // current job state + JobState curr_state_; + // job state file + std::string job_state_file_; + // network connectivity detect file + std::string hcom_detect_file_; + // identification of internal errors that occurred during the training + bool is_have_internal_error_; +}; + +CsaInteract &CsaInteract::GetInstance() { + static CsaInteract instance; + return instance; +} + +void CsaInteract::Init(int32_t dev_index, int64_t job_id) { + if (!is_init_) { + dev_index_ = dev_index; + job_id_ = job_id; + string csa_path_prefix; + if (std::getenv(FMK_STATUS_FILE_DIR_ENV) != nullptr) { + csa_path_prefix = std::getenv(FMK_STATUS_FILE_DIR_ENV); + } + if (!csa_path_prefix.empty()) { + std::string job_state_file = csa_path_prefix + std::to_string(dev_index_) + FILE_SEPARATE + JOBSTATE_FILE_NAME; + std::string hcom_detect_file = + csa_path_prefix + std::to_string(dev_index_) + FILE_SEPARATE + HCOM_DETECT_FILE_NAME; + job_state_file_ = RealPath(job_state_file.c_str()); + hcom_detect_file_ = RealPath(hcom_detect_file.c_str()); + } + is_init_ = true; + } +} + +void CsaInteract::WriteErrorCode(uint32_t module_ret_errcode, ErrorModule error_module, JobSubState job_sub_state) {} + +} // namespace ge + +Status ModelParserBase::LoadFromFile(const char *model_path, const char *key, int32_t priority, + ge::ModelData &model_data) { + return SUCCESS; +} + +Status CsaInteract::WriteJobState(JobState job_state, JobSubState job_sub_state, uint32_t module_ret_errcode, + ErrorModule error_module) { + return SUCCESS; +} + +namespace ge { + +static std::map data_type_to_length = { + {DT_BOOL, sizeof(bool)}, {DT_INT64, sizeof(int64_t)}, {DT_UINT64, sizeof(int64_t)}, {DT_FLOAT, sizeof(float)}, + {DT_INT32, sizeof(int32_t)}, {DT_UINT32, sizeof(int32_t)}, {DT_INT8, sizeof(char)}, {DT_UINT8, sizeof(char)}, + {DT_INT16, sizeof(int16_t)}, {DT_UINT16, sizeof(int16_t)}, {DT_FLOAT16, sizeof(int16_t)}, {DT_DOUBLE, sizeof(double)}, +}; + +class TypeUtils { + public: + static bool GetDataTypeLength(ge::DataType data_type, uint32_t &length); + static bool CheckUint64MulOverflow(uint64_t a, uint32_t b); +}; + +bool TypeUtils::GetDataTypeLength(ge::DataType data_type, uint32_t &length) { + auto it = data_type_to_length.find(data_type); + if (it != data_type_to_length.end()) { + length = it->second; + return true; + } else { + return false; + } +} + +bool TypeUtils::CheckUint64MulOverflow(uint64_t a, uint32_t b) { + // Not overflow + if (a == 0) { + return false; + } + if ((ULLONG_MAX / a) >= b) { + return false; + } + return true; +} +} // namespace ge diff --git a/tests/depends/profiler/CMakeLists.txt b/tests/depends/profiler/CMakeLists.txt new file mode 100644 index 00000000..cc6d2ca5 --- /dev/null +++ b/tests/depends/profiler/CMakeLists.txt @@ -0,0 +1,24 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake_minimum_required(VERSION 2.8) +project(profiler_stub) + +file(GLOB_RECURSE SRC_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "src/profiler_stub.cc" +) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) + +add_library(profiler_stub SHARED ${SRC_FILES}) \ No newline at end of file diff --git a/tests/depends/profiler/src/profiler_stub.cc b/tests/depends/profiler/src/profiler_stub.cc new file mode 100644 index 00000000..1ed49fd8 --- /dev/null +++ b/tests/depends/profiler/src/profiler_stub.cc @@ -0,0 +1,34 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "toolchain/prof_engine.h" +#include "toolchain/prof_mgr_core.h" + +void * ProfMgrStartUp(const ProfMgrCfg *cfg) +{ + return nullptr; +} + +int ProfMgrStop(void *handle) +{ + return 0; +} + +int Msprof::Engine::RegisterEngine(const std::string& module, const Msprof::Engine::EngineIntf* engine) +{ + return 0; +} + diff --git a/tests/depends/runtime/CMakeLists.txt b/tests/depends/runtime/CMakeLists.txt new file mode 100644 index 00000000..dbbaa8fc --- /dev/null +++ b/tests/depends/runtime/CMakeLists.txt @@ -0,0 +1,28 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake_minimum_required(VERSION 2.8) + +project(STUB_MMPA) + +file(GLOB_RECURSE SRCS RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "src/runtime_stub.cc" + ) + +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) + +add_library(runtime_stub SHARED ${SRCS}) diff --git a/tests/depends/runtime/src/runtime_stub.cc b/tests/depends/runtime/src/runtime_stub.cc new file mode 100644 index 00000000..5ab36af3 --- /dev/null +++ b/tests/depends/runtime/src/runtime_stub.cc @@ -0,0 +1,287 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define EVENT_LENTH 10 + +rtError_t rtCtxSetCurrent(rtContext_t ctx) { return RT_ERROR_NONE; } + +rtError_t rtGetStreamId(rtStream_t stream, int32_t *stream_id) { + *stream_id = 0; + return RT_ERROR_NONE; +} + +rtError_t rtCtxGetCurrent(rtContext_t *ctx) { + int x = 1; + *ctx = (void *)x; + return RT_ERROR_NONE; +} + +rtError_t rtCtxSetDryRun(rtContext_t ctx, rtDryRunFlag_t enable, uint32_t flag) { return RT_ERROR_NONE; } + +rtError_t rtEventGetTimeStamp(uint64_t *time, rtEvent_t event) { + *time = 12345; + return RT_ERROR_NONE; +} + +rtError_t rtEventCreate(rtEvent_t *event) { + *event = new int[EVENT_LENTH]; + return RT_ERROR_NONE; +} +rtError_t rtEventRecord(rtEvent_t event, rtStream_t stream) { return RT_ERROR_NONE; } + +rtError_t rtEventSynchronize(rtEvent_t event) { return RT_ERROR_NONE; } + +rtError_t rtEventDestroy(rtEvent_t event) { + delete[](int *) event; + return RT_ERROR_NONE; +} + +rtError_t rtMalloc(void **dev_ptr, uint64_t size, rtMemType_t type) { + *dev_ptr = new uint8_t[size]; + return RT_ERROR_NONE; +} + +rtError_t rtMemset(void *dev_ptr, uint64_t dest_max, uint32_t value, uint64_t count) { return RT_ERROR_NONE; } + +rtError_t rtFree(void *dev_ptr) { + delete[](uint8_t *) dev_ptr; + return RT_ERROR_NONE; +} + +rtError_t rtMallocHost(void **host_ptr, uint64_t size) { + *host_ptr = new uint8_t[size]; + return RT_ERROR_NONE; +} + +rtError_t rtFreeHost(void *host_ptr) { + delete[](uint8_t *) host_ptr; + return RT_ERROR_NONE; +} + +rtError_t rtStreamCreate(rtStream_t *stream, int32_t priority) { + *stream = new uint32_t; + return RT_ERROR_NONE; +} + +rtError_t rtStreamDestroy(rtStream_t stream) { + if (stream != nullptr) { + delete (uint32_t *)stream; + } + return RT_ERROR_NONE; +} + +rtError_t rtSetDevice(int32_t device) { return RT_ERROR_NONE; } + +rtError_t rtStreamSynchronize(rtStream_t stream) { return RT_ERROR_NONE; } + +rtError_t rtMemcpy(void *dst, uint64_t dest_max, const void *src, uint64_t count, rtMemcpyKind_t kind) { +#ifdef OTQT_UT + if (dest_max == 12 && count == 12) { // UTEST_kernelinfo_manager.all_success special treatment + memcpy_s(dst, dest_max, src, count); + } +#endif + return RT_ERROR_NONE; +} +rtError_t rtMemcpyAsync(void *dst, uint64_t dest_max, const void *src, uint64_t count, rtMemcpyKind_t kind, + rtStream_t stream) { + return RT_ERROR_NONE; +} + +rtError_t rtStreamWaitEvent(rtStream_t stream, rtEvent_t event) { return RT_ERROR_NONE; } + +rtError_t rtGetDeviceCount(int32_t *count) { + *count = 1; + return RT_ERROR_NONE; +} + +rtError_t rtDeviceReset(int32_t device) { return RT_ERROR_NONE; } + +rtError_t rtEventElapsedTime(float *time, rtEvent_t start, rtEvent_t end) { + *time = 10.0f; + return RT_ERROR_NONE; +} +rtError_t rtFunctionRegister(void *bin_handle, const void *stub_func, const char *stub_name, const void *dev_func) { + return RT_ERROR_NONE; +} + +rtError_t rtFunctionRegister(void *bin_handle, const void *stub_func, const char *stub_name, const void *dev_func, + uint32_t func_mode) { + return RT_ERROR_NONE; +} + +rtError_t rtDevBinaryRegister(const rtDevBinary_t *bin, void **handle) { return RT_ERROR_NONE; } + +rtError_t rtKernelConfigTransArg(const void *ptr, uint64_t size, uint32_t flag, void **arg) { return RT_ERROR_NONE; } + +rtError_t rtKernelLaunch(const void *stub_func, uint32_t block_dim, void *args, uint32_t args_size, rtSmDesc_t *sm_desc, + rtStream_t stream) { + return RT_ERROR_NONE; +} +rtError_t rtSetupArgument(const void *arg, uint32_t size, uint32_t offset) { return RT_ERROR_NONE; } +rtError_t rtLaunch(const void *stub_func) { return RT_ERROR_NONE; } +rtError_t rtDevBinaryUnRegister(void *handle) { return RT_ERROR_NONE; } +rtError_t rtConfigureCall(uint32_t num_blocks, rtSmDesc_t *sm_desc, rtStream_t stream) { return RT_ERROR_NONE; } + +rtError_t rtSetProfDir(char *prof_dir) { return RT_ERROR_NONE; } + +rtError_t rtSetProfDirEx(char *prof_dir, char *address, char *job_ctx) { return RT_ERROR_NONE; } + +rtError_t rtAiCoreMemorySizes(rtAiCoreMemorySize_t *aicore_memory_size) { return RT_ERROR_NONE; } + +rtError_t rtSetKernelReportCallback(rtKernelReportCallback callback) { + rtKernelInfo rt_kernel_info = {0}; + rt_kernel_info.arg_size = 12; + rt_kernel_info.task_offset = 100; + rt_kernel_info.arg = (void *)100; + rt_kernel_info.module_addr = (void *)100; + rt_kernel_info.module_size = 100; + + rtStream_t stream; + callback(stream, &rt_kernel_info); + return RT_ERROR_NONE; +} + +rtError_t rtMemAdvise(void *ptr, uint64_t size, uint32_t advise) { return RT_ERROR_NONE; } + +/// @ingroup rt_kernel +/// @brief start fusion kernels. +/// @param [in] stream stream for fusion kernels +/// @return RT_ERROR_NONE for ok, errno for failed +rtError_t rtKernelFusionStart(rtStream_t stream) { return RT_ERROR_NONE; } + +/// @ingroup rt_kernel +/// @brief end fusion kernels. +/// @param [in] stream stream for fusion kernels +/// @return RT_ERROR_NONE for ok, errno for failed +rtError_t rtKernelFusionEnd(rtStream_t stream) { return RT_ERROR_NONE; } +rtError_t rtMemGetInfo(size_t *free, size_t *total) { + *free = 512UL * 1024UL * 1024UL; + *total = 1024UL * 1024UL * 1024UL; + return RT_ERROR_NONE; +} + +rtError_t rtMemAllocManaged(void **ptr, uint64_t size, uint32_t flag) { return RT_ERROR_NONE; } + +rtError_t rtMemFreeManaged(void *ptr) { return RT_ERROR_NONE; } + +rtError_t rtMetadataRegister(void *handle, const char *meta_data) { return RT_ERROR_NONE; } +rtError_t rtSetTaskGenCallback(rtTaskGenCallback callback) { return RT_ERROR_NONE; } + +rtError_t rtModelCreate(rtModel_t *model, uint32_t flag) { + *model = new uint32_t; + return RT_ERROR_NONE; +} + +rtError_t rtModelDestroy(rtModel_t model) { + delete model; + return RT_ERROR_NONE; +} + +rtError_t rtModelBindStream(rtModel_t model, rtStream_t stream, uint32_t flag) { return RT_ERROR_NONE; } +rtError_t rtModelUnbindStream(rtModel_t model, rtStream_t stream) { return RT_ERROR_NONE; } +rtError_t rtModelExecute(rtModel_t model, rtStream_t stream, uint32_t flag) { return RT_ERROR_NONE; } + +rtError_t rtGetFunctionByName(const char *stub_name, void **stub_func) { + *(char **)stub_func = "func"; + return RT_ERROR_NONE; +} + +rtError_t rtQueryFunctionRegistered(const char *stub_name) { return RT_ERROR_NONE; } + +rtError_t rtCtxCreate(rtContext_t *ctx, uint32_t flags, int32_t device) { return RT_ERROR_NONE; } + +rtError_t rtKernelLaunchEx(void *args, uint32_t args_size, uint32_t flags, rtStream_t stream_) { return RT_ERROR_NONE; } + +rtError_t rtCpuKernelLaunch(const void *so_name, const void *kernel_name, uint32_t block_dim, const void *args, + uint32_t args_size, rtSmDesc_t *sm_desc, rtStream_t stream) { + return RT_ERROR_NONE; +} + +rtError_t rtModelGetTaskId(void *handle, uint32_t *task_id) { + *task_id = 0; + return RT_ERROR_NONE; +} +rtError_t rtEndGraph(rtModel_t model, rtStream_t stream) { return RT_ERROR_NONE; } +rtError_t rtProfilerStop(void) { return RT_ERROR_NONE; } + +rtError_t rtSetDvfsProfile(DvfsProfileMode mode) { return RT_ERROR_NONE; } + +rtError_t rtUnsetDvfsProfile() { return RT_ERROR_NONE; } + +rtError_t rtGetDvfsProfile(DvfsProfileMode *pmode) { return RT_ERROR_NONE; } + +rtError_t rtCtxDestroy(rtContext_t ctx) { return RT_ERROR_NONE; } + +rtError_t rtProfilerInit(const char *prof_dir, const char *address, const char *job_ctx) { return RT_ERROR_NONE; } + +rtError_t rtProfilerStart(void) { return RT_ERROR_NONE; } + +rtError_t rtLabelCreate(rtLabel_t *label) { return RT_ERROR_NONE; } + +rtError_t rtLabelDestroy(rtLabel_t label) { return RT_ERROR_NONE; } + +rtError_t rtLabelSet(rtLabel_t label, rtStream_t stream) { return RT_ERROR_NONE; } + +rtError_t rtLabelSwitch(void *ptr, rtCondition_t condition, uint32_t value, rtLabel_t true_label, rtStream_t stream) { + return RT_ERROR_NONE; +} + +rtError_t rtLabelGoto(rtLabel_t label, rtStream_t stream) { return RT_ERROR_NONE; } + +rtError_t rtInvalidCache(uint64_t base, uint32_t len) { return RT_ERROR_NONE; } + +rtError_t rtModelLoadComplete(rtModel_t model) { return RT_ERROR_NONE; } + +rtError_t rtStreamCreateWithFlags(rtStream_t *stream, int32_t priority, uint32_t flags) { + *stream = new uint32_t; + return RT_ERROR_NONE; +} + +rtError_t rtFlushCache(uint64_t base, uint32_t len) { return RT_ERROR_NONE; } + +rtError_t rtProfilerTrace(uint64_t id, bool notify, uint32_t flags, rtStream_t stream_) { return RT_ERROR_NONE; } + +rtError_t rtMemSetRC(const void *dev_ptr, uint64_t size, uint32_t read_count) { return RT_ERROR_NONE; } + +rtError_t rtStreamSwitch(void *ptr, rtCondition_t condition, int64_t value, rtStream_t true_stream, rtStream_t stream) { + return RT_ERROR_NONE; +} + +rtError_t rtStreamSwitchEx(void *ptr, rtCondition_t condition, void *value_ptr, rtStream_t true_stream, + rtStream_t stream, rtSwitchDataType_t data_type) { + return RT_ERROR_NONE; +} + +rtError_t rtStreamActive(rtStream_t active_stream, rtStream_t stream) { return RT_ERROR_NONE; } + +rtError_t rtEventReset(rtEvent_t event, rtStream_t stream) { return RT_ERROR_NONE; } + +rtError_t rtGetDevice(int32_t *device) { return RT_ERROR_NONE; } + +rtError_t rtDatadumpInfoLoad(const void *dump_info, uint32_t length) { return RT_ERROR_NONE; } + +rtError_t rtKernelLaunchWithFlag(const void *stub_func, uint32_t block_dim, void *args, uint32_t args_size, + rtSmDesc_t *sm_desc, rtStream_t stream_, uint32_t flags) { + return RT_ERROR_NONE; +} + +rtError_t rtCpuKernelLaunchWithFlag(const void *so_name, const void *kernel_name, uint32_t core_dim, const void *args, + uint32_t args_size, rtL2Ctrl_t *l2ctrl, rtStream_t stream_, uint32_t flags) { + return RT_ERROR_NONE; +} \ No newline at end of file diff --git a/tests/depends/slog/CMakeLists.txt b/tests/depends/slog/CMakeLists.txt new file mode 100644 index 00000000..1538fd57 --- /dev/null +++ b/tests/depends/slog/CMakeLists.txt @@ -0,0 +1,23 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake_minimum_required(VERSION 2.8) +project(slog_stub) + +file(GLOB_RECURSE SRC_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "src/*.cc" +) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +add_library(slog_stub SHARED ${SRC_FILES}) \ No newline at end of file diff --git a/tests/depends/slog/src/slog_stub.cc b/tests/depends/slog/src/slog_stub.cc new file mode 100644 index 00000000..a27deba1 --- /dev/null +++ b/tests/depends/slog/src/slog_stub.cc @@ -0,0 +1,41 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "toolchain/slog.h" + +#include +#include +#include + +void dav_log(int module_id, const char *fmt, ...) {} + +void DlogErrorInner(int module_id, const char *fmt, ...) { dav_log(module_id, fmt); } + +void DlogWarnInner(int module_id, const char *fmt, ...) { dav_log(module_id, fmt); } + +void DlogInfoInner(int module_id, const char *fmt, ...) { dav_log(module_id, fmt); } + +void DlogDebugInner(int module_id, const char *fmt, ...) { dav_log(module_id, fmt); } + +void DlogEventInner(int module_id, const char *fmt, ...) { dav_log(module_id, fmt); } + +void DlogInner(int module_id, int level, const char *fmt, ...) { dav_log(module_id, fmt); } + +void DlogWithKVInner(int module_id, int level, KeyValue *pst_kv_array, int kv_num, const char *fmt, ...) { + dav_log(module_id, fmt); +} + +int dlog_getlevel(int module_id, int *enable_event) { return DLOG_DEBUG; } diff --git a/tests/st/CMakeLists.txt b/tests/st/CMakeLists.txt new file mode 100644 index 00000000..56babec1 --- /dev/null +++ b/tests/st/CMakeLists.txt @@ -0,0 +1,42 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +cmake_minimum_required(VERSION 3.0) +set(CMAKE_CXX_STANDARD 11) +project(ge_st CXX C) + +set(CMAKE_CXX_FLAGS "-O1 -fPIC -Wl,-unresolved-symbols=ignore-in-shared-libs") + + +file(GLOB_RECURSE RES50_TRAIN_SRCS RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "resnet50/resnet50_train.cc" + "resnet50/common.cc" +) + +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/ge) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/ops) +include_directories(/usr/local/HiAI/opp/op_proto/built-in/inc) + +add_executable(st_resnet50_train ${RES50_TRAIN_SRCS}) +target_link_libraries(st_resnet50_train + ${PROTOBUF_LIBRARY} + ge_client_train ge_memory +) \ No newline at end of file diff --git a/tests/st/resnet50/common.cc b/tests/st/resnet50/common.cc new file mode 100755 index 00000000..c1d54205 --- /dev/null +++ b/tests/st/resnet50/common.cc @@ -0,0 +1,765 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#include "common.h" +#include "model.h" + +#define MAX_HEAD_SIZE 50 + +using namespace std; +using namespace ge; + +void update_op_format(Operator ops, Format format) { + printf("set format begin.........\n"); + ge::TensorDesc tensor_desc_x = ops.GetInputDesc("x"); + ge::TensorDesc tensor_desc_y = ops.GetOutputDesc("y"); + Format f_x0 = tensor_desc_x.GetFormat(); + Format f_y0 = tensor_desc_x.GetFormat(); + printf("before set x format:%d \n", f_x0); + printf("before set y format:%d \n", f_y0); + printf("format to be set is :%d \n", format); + tensor_desc_x.SetFormat(format); + tensor_desc_y.SetFormat(format); + ops.UpdateInputDesc("x", tensor_desc_x); + ops.UpdateOutputDesc("y", tensor_desc_y); + Format f_x = tensor_desc_x.GetFormat(); + Format f_y = tensor_desc_y.GetFormat(); + printf("after set x format:%d \n", f_x); + printf("after set y format:%d \n", f_y); +} + +/// getDimInfo: get dim info from data file +/// param: +/// fp: the testing datafile object +/// +/// return : +/// dim_info: array to store the info of the dim in datafile, like [4,3,3,6,3,162(3*3*6*3)],4 is dim size,3,3,6,3 is the +/// dim shape data_size: the size of the testing data including the data file +void getDimInfo(FILE *fp, std::vector &dim_info) { + // get dim info from hisi testing data file + uint32_t *dim_buffer = (uint32_t *)malloc(MAX_HEAD_SIZE * sizeof(uint32_t)); + fread(dim_buffer, sizeof(uint32_t), MAX_HEAD_SIZE, fp); + dim_info.push_back(*dim_buffer); // get dim size + + // get data shape to compute the datasize + uint64_t data_size = 1; + uint32_t i = 1; + for (; i <= dim_info[0]; i++) { + dim_info.push_back(*(dim_buffer + i)); + data_size *= *(dim_buffer + i); + } + dim_info.push_back(data_size); + + free(dim_buffer); +} + +/// readTestDataFile: read test date from hisi .t datafile +/// param: +/// infile: the path of hisi .t datafile +/// return: +/// dim_info: array to store the info of the dim in datafile, like [4,3,3,6,3],4 is dim size,3,3,6,3 is the dim shape +void *readTestDataFile(std::string infile, std::vector &dim_info) { + FILE *fp; + fp = fopen(infile.c_str(), "r"); + + if (fp == NULL) { + printf("ERROR: cant't open file %s\n", infile.c_str()); + return NULL; + } else { + getDimInfo(fp, dim_info); + uint64_t data_size = dim_info[dim_info.size() - 1]; + + fclose(fp); + + fp = fopen(infile.c_str(), "r"); + if (fp == NULL) { + printf("ERROR: cant't open file %s\n", infile.c_str()); + return NULL; + } + uint32_t *memory = (uint32_t *)malloc((dim_info[0] + 1 + data_size) * sizeof(uint32_t)); + fread(memory, sizeof(uint32_t), (dim_info[0] + 1 + data_size), fp); + fclose(fp); + return memory + (dim_info[0] + 1); + } +} + +void *readUint8TestDataFile(std::string infile, int size) { + FILE *fp; + fp = fopen(infile.c_str(), "r"); + + if (fp == NULL) { + printf("ERROR: cant't open file %s\n", infile.c_str()); + return NULL; + } + uint8_t *memory = (uint8_t *)malloc((size) * sizeof(uint8_t)); + fread(memory, sizeof(uint8_t), (size), fp); + fclose(fp); + return memory; +} + +/// allclose +/// param: +/// a:compared file a +/// b:compared file b +/// count: the count size which will compare +/// rtol: +/// atol: +/// return: +/// true or false +bool allclose(float *a, float *b, uint64_t count, float rtol = 1e-05, float atol = 1e-08) { + uint32_t i = 0; + + for (; i < count; ++i) { + if (fabs(a[i] - b[i]) > (atol + rtol * fabs(b[i]))) { + printf("compara failed: i= %d, a[i]=%f, b[i]=%f,atol=%f,rtol=%f\n", i, a[i], b[i], atol, rtol); + return false; + } + } + + return true; +} + +/// compFp32WithTData: compare the data with the data in hisi .t file +/// param: +/// actual_output_data: the result of ge +/// expected_data_file: the path of hisi .t result file +/// rtol: +/// atol: +/// return: +/// true of false +bool compFp32WithTData(float *actual_output_data, std::string expected_data_file, float rtol = 1e-05, float atol = 1e-08) { + std::vector dim_info; + float *expected_output_data = (float *)readTestDataFile(expected_data_file, dim_info); + + uint32_t i = 1; + uint64_t data_size = 1; + for (; i <= dim_info[0]; i++) { + data_size *= dim_info[i]; + } + return allclose(actual_output_data, expected_output_data, data_size, rtol, atol); +} + +int SwitchDatatype(DataType dt) { + int size = 1; + if (dt == ge::DT_FLOAT) size = 4; + if (dt == ge::DT_INT32) size = 4; + if (dt == ge::DT_FLOAT16) size = 2; + if (dt == ge::DT_INT64) size = 8; + return size; +} + +ge::Tensor genTensor(std::vector tensor_shape, Format format, DataType dt) { + int size = 1; + for (int i = 0; i < tensor_shape.size(); i++) { + size = size * tensor_shape[i]; + } + + int data_type_size = SwitchDatatype(dt); + + size = abs(size * data_type_size); + vector data_value; + + if (size == 0) { + TensorDesc input_tensor_desc = TensorDesc(ge::Shape(tensor_shape), format, dt); + input_tensor_desc.SetRealDimCnt(tensor_shape.size()); + Tensor gen_tensor = Tensor(input_tensor_desc, data_value); + return gen_tensor; + } + for (int i = 0; i < size; i++) { + data_value.push_back(1); + } + TensorDesc input_tensor_desc = TensorDesc(ge::Shape(tensor_shape), format, dt); + input_tensor_desc.SetRealDimCnt(tensor_shape.size()); + Tensor gen_tensor = Tensor(input_tensor_desc, data_value); + return gen_tensor; +} + +ge::Tensor genTensor_withVaule(std::vector tensor_shape, float value) { + int size = 1; + for (int i = 0; i < tensor_shape.size(); i++) { + size = size * tensor_shape[i]; + } + + float *data_value = new float[size]; + for (int i = 0; i < size; i++) { + *(data_value + i) = value; + } + Tensor gen_ge_tensor; + TensorDesc input_tensor_desc = TensorDesc(ge::Shape(tensor_shape), FORMAT_NCHW); + gen_ge_tensor.SetTensorDesc(input_tensor_desc); + gen_ge_tensor.SetData((uint8_t *)data_value, size * 4); + + return gen_ge_tensor; +} + +Tensor genTesnor_Shape_as_data(std::vector tensor_shape) { + Format format = FORMAT_NCHW; + DataType dt = DT_INT32; + int size = tensor_shape.size(); + int32_t *tensor_data = new int32_t[size]; + std::cout << "shape tensor size:" << size << endl; + for (int i = 0; i < size; i++) { + *(tensor_data + i) = tensor_shape[i]; + } + + Tensor gen_tensor; + TensorDesc input_tensor_desc = TensorDesc(ge::Shape({size}), FORMAT_NCHW, DT_INT32); + gen_tensor.SetData((uint8_t *)tensor_data, size * GetDatTypeSize(dt)); + gen_tensor.SetTensorDesc(input_tensor_desc); + + return gen_tensor; +} + +/// train_flag is 0 when infer; train_flag is 1 when train; train_flag is 0 default +/// run_mode_path is not 0,1,2 when TBE; run_mode_path is 1 when FE; run_mode_path is 0 default +/// run_mode_path is 2 now when AICPU, ge.enabledlocalFmkop is 1 +ge::Status GEInitialize_api(string train_flag, string run_mode_path) { + ge::Status ret; + if (run_mode_path == "0") { + const std::map config = { + {"device_id", "0,2,4,6"}, + {"rank_table_file", "hccl from csa/paas"}, + {"ge.graphRunMode", train_flag}, + {"ge.aicpuFlag", "1"}, + {"ge.feFlag", "1"}, + {DDK_VERSION_FLAG, "1.60.T17.B830"}, + {"ge.soLoadPath", + "/usr/local/HiAI/runtime/lib64/plugin/opskernel/libfe.so:/usr/local/HiAI/runtime/lib64/plugin/opskernel/" + "libaicpu_plugin.so"}}; + ret = ge::GEInitialize(config); + } else if (run_mode_path == "1") { + const std::map config = { + {"device_id", "0,2,4,6"}, + {"rank_table_file", "hccl from csa/paas"}, + {"ge.graphRunMode", train_flag}, + {"ge.feFlag", "1"}, + {DDK_VERSION_FLAG, "1.60.T17.B830"}, + {TBE_PLUGIN_PATH_FLAG, "/usr/local/HiAI/runtime/lib64/tbe_plugin/bert"}, + {"ge.soLoadPath", "/usr/local/HiAI/runtime/lib64/plugin/opskernel/libfe.so"}}; + ret = ge::GEInitialize(config); + } else if (run_mode_path == "2") { + const std::map config = {{"device_id", "0,2,4,6"}, + {"rank_table_file", "hccl from csa/paas"}, + {"ge.graphRunMode", train_flag}, + {LOCAL_FMKOP_FLAG, "1"}}; + ret = ge::GEInitialize(config); + } else { + const std::map config = { + {"device_id", "0,2,4,6"}, + {"rank_table_file", "hccl from csa/paas"}, + {"ge.graphRunMode", train_flag}, + {DDK_VERSION_FLAG, "1.60.T17.B830"}, + {TBE_PLUGIN_PATH_FLAG, "/usr/local/HiAI/runtime/lib64/tbe_plugin/" + run_mode_path}}; + ret = ge::GEInitialize(config); + } + std::cout << "GEInitialize_ret is " << ret << std::endl; + + return ret; +} + +/// train_flag is infer default +/// run_mode: is multi group of [fe,aicpu,bert,deeplabv3,mobilenetv2,single_path_nas,ssd] +/// but bert,deeplabv3,mobilenetv2,single_path_nas,ssd can only set one value from array +/// eg:"fe,aicpu,bert" or "fe", default is “fe” +/// "fe,aicpu,bert" remain open fe aicpu and bert +ge::Status GEInitialize_api_new(string train_flag, string run_mode) { + ge::Status ret; + vector modes; + + char *strs = new char[run_mode.length() + 1]; + strcpy(strs, run_mode.c_str()); + const char *delim = ","; + char *p = strtok(strs, delim); + while (p) { + string s = p; // transform substr to string + modes.push_back(s); // save to result array + p = strtok(NULL, delim); + } + + std::map config = { + {"device_id", "0,2,4,6"}, + {"rank_table_file", "hccl from csa/paas"}, + {DDK_VERSION_FLAG, "1.60.T17.B830"}, + {"ge.opsProtoLibPath", "/usr/local/HiAI/runtime/ops/op_proto/built-in/libopsproto.so"}}; + if (train_flag == "infer") + config.insert(pair("ge.graphRunMode", "0")); + else if (train_flag == "train") + config.insert(pair("ge.graphRunMode", "1")); + else + std::cout << "GeInitialize give the error param" << std::endl; + + for (int i = 0; i < modes.size(); i++) { + if (modes[i] == "fe") { + config.insert(pair("ge.feFlag", "1")); + if (config.find("ge.soLoadPath") != config.end()) { + config["ge.soLoadPath"] = + "/usr/local/HiAI/runtime/lib64/plugin/opskernel/libfe.so:/usr/local/HiAI/runtime/lib64/plugin/opskernel/" + "libaicpu_plugin.so:/usr/local/HiAI/runtime/lib64/plugin/opskernel/libge_local_engine.so:/usr/local/HiAI/" + "runtime/lib64/plugin/opskernel/librts_engine.so"; + } else { + config.insert(pair( + "ge.soLoadPath", + "/usr/local/HiAI/runtime/lib64/plugin/opskernel/libfe.so:/usr/local/HiAI/runtime/lib64/plugin/opskernel/" + "libge_local_engine.so:/usr/local/HiAI/runtime/lib64/plugin/opskernel/librts_engine.so")); + } + } else if (modes[i] == "aicpu") { + config.insert(pair("ge.aicpuFlag", "1")); + if (config.find("ge.soLoadPath") != config.end()) { + config["ge.soLoadPath"] = + "/usr/local/HiAI/runtime/lib64/plugin/opskernel/libfe.so:/usr/local/HiAI/runtime/lib64/plugin/opskernel/" + "libaicpu_plugin.so:/usr/local/HiAI/runtime/lib64/plugin/opskernel/libge_local_engine.so:/usr/local/HiAI/" + "runtime/lib64/plugin/opskernel/librts_engine.so"; + } else { + config.insert(pair( + "ge.soLoadPath", + "/usr/local/HiAI/runtime/lib64/plugin/opskernel/libaicpu_plugin.so:/usr/local/HiAI/runtime/lib64/plugin/" + "opskernel/libge_local_engine.so:/usr/local/HiAI/runtime/lib64/plugin/opskernel/librts_engine.so")); + } + } else if (modes[i] == "bert" || modes[i] == "deeplabv3" || modes[i] == "mobilenetv2" || + modes[i] == "single_path_nas" || modes[i] == "ssd") { + config.insert(pair(TBE_PLUGIN_PATH_FLAG, "/usr/local/HiAI/runtime/lib64/tbe_plugin/" + modes[i])); + } else if (modes[i] == "plugin") { + + } else + std::cout << "GeInitialize give the error param" << std::endl; + } + ret = ge::GEInitialize(config); + + std::cout << "GEInitialize_ret is " << ret << std::endl; + + return ret; +} + +ge::Status GEFinalize_api() { + ge::Status ret = ge::GEFinalize(); + std::cout << "GEFinalize ret is " << ret << std::endl; + + return ret; +} + +/// set train_flag +/// if run_mode_path is "fe" remain FE process; "fe,plugin" is FE and TBE plugin process +/// "aicpu" is open aicpu plugin +int RunGraph_initData(Graph &graph, string op_name, map> attr_test, string train_flag, + string run_mode_path) { + std::map options = {{RUN_FLAG, "1"}}; + uint32_t graph_id = 0; + + ge::Status ret = GEInitialize_api_new(train_flag, run_mode_path); + EXPECT_EQ(ret, ge::SUCCESS); + + ge::Session *session = new Session(options); + ASSERT_TRUE(session != NULL); + + std::vector input; + if (attr_test.find("input1") != attr_test.end()) { + Tensor input_tensor = genTensor(attr_test["input1"]); + input.push_back(input_tensor); + } + if (attr_test.find("input2") != attr_test.end()) { + Tensor input_tensor = genTensor(attr_test["input2"]); + input.push_back(input_tensor); + } + if (attr_test.find("input3") != attr_test.end()) { + Tensor input_tensor = genTensor(attr_test["input3"]); + input.push_back(input_tensor); + } + std::vector output; + + ret = session->AddGraph(graph_id, graph); + EXPECT_EQ(ret, ge::SUCCESS); + if (train_flag == "1") { + setenv("GE_TRAIN", "1", true); + ret = session->RunGraph(graph_id, input, output); + setenv("GE_TRAIN", "0", true); + } else { + ret = session->RunGraph(graph_id, input, output); + } + delete session; + GEFinalize_api(); + + if (ret != ge::SUCCESS) { + std::cout << " run graph failed" << std::endl; + return -1; + } else { + return 0; + } +} + +ge::Status session_add_and_run_graph(ge::Session *session, uint32_t graph_id, Graph &graph, std::vector inputs, + std::vector &outputs) { + ge::Status ret = session->AddGraph(graph_id, graph); + EXPECT_EQ(ret, ge::SUCCESS); + ret = session->RunGraph(graph_id, inputs, outputs); + + return ret; +} + +ge::Session *create_session() { + // Init session + std::map options = {{"a", "b"}, {TRAIN_FLAG, "1"}}; + ge::Session *session = new Session(options); + ASSERT_TRUE(session != NULL); + + return session; +} + +ge::Session *create_aipp_session() { + // Init session + std::map options = {{"a", "b"}, {TRAIN_FLAG, "1"}, {"ge.insertOpFile", "/root/host/ge/aipp.cfg"}}; + ge::Session *session = new Session(options); + ASSERT_TRUE(session != NULL); + + return session; +} + +int buildCheckPointGraph(Graph &graph, map variables) { + std::vector inputs{}; + std::vector outputs{}; + + for (map::iterator it = variables.begin(); it != variables.end(); ++it) { + auto var = op::Variable(string(it->first)); + var.update_output_desc_y(it->second); + inputs.push_back(var); + graph.AddOp(var); + } + + auto save = op::Save().create_dynamic_input_tensors(inputs.size()); + for (int i = 0; i < inputs.size(); i++) { + save.set_dynamic_input_tensors(i, inputs[i]); + } + + graph.SetInputs(inputs).SetOutputs(outputs); + return 0; +} + +int buildInitGraph(Graph &graph, std::vector desc_var, std::vector name_var, + std::vector values_var) { + std::vector inputs{}; + std::vector outputs{}; + + for (int i = 0; i < desc_var.size(); i++) { + desc_var[i].SetRealDimCnt(desc_var[i].GetShape().GetDimNum()); + auto tensor_data = genTensor_withVaule(desc_var[i].GetShape().GetDims(), values_var[i]); + auto var_constant = op::Constant().set_attr_value(tensor_data); + var_constant.update_output_desc_y(desc_var[i]); + + auto var_init = op::Variable(string(name_var[i])); + var_init.update_output_desc_y(desc_var[i]); + auto var_assign = op::Assign().set_input_ref(var_init).set_input_value(var_constant); + inputs.push_back(var_init); + } + graph.SetInputs(inputs).SetOutputs(outputs); + return 0; +} + +int buildInitGraph_other_dataType(Graph &graph, std::vector desc_var, std::vector name_var) { + std::vector inputs{}; + std::vector outputs{}; + + for (int i = 0; i < desc_var.size(); i++) { + desc_var[i].SetRealDimCnt(desc_var[i].GetShape().GetDimNum()); + auto tensor_data = genTensor(desc_var[i].GetShape().GetDims(), desc_var[i].GetFormat(), desc_var[i].GetDataType()); + auto var_constant = op::Constant().set_attr_value(tensor_data); + var_constant.update_output_desc_y(desc_var[i]); + + auto var_init = op::Variable(string(name_var[i])); + var_init.update_output_desc_y(desc_var[i]); + auto var_assign = op::Assign().set_input_ref(var_init).set_input_value(var_constant); + inputs.push_back(var_init); + + graph.AddOp(var_constant); + graph.AddOp(var_init); + graph.AddOp(var_assign); + } + graph.SetInputs(inputs).SetOutputs(outputs); + return 0; +} + +bool build_multi_input_multi_output_graph(Graph &graph) { + auto data1 = op::Data("Data1").set_attr_index(0); + auto data2 = op::Data("Data2").set_attr_index(1); + + vector dim_info; + + auto relu1 = op::Relu("Relu1").set_input_x(data1); + auto relu2 = op::Relu("Relu2").set_input_x(data2); + + auto eltwise = op::Eltwise("Eltwise") + .create_dynamic_input___input(2) + .set_dynamic_input___input(0, relu1) + .set_dynamic_input___input(1, relu2) + .set_attr_mode(1) + .set_attr_coeff({1, 1}); + + auto eltwise1 = op::Eltwise("Eltwise1") + .create_dynamic_input___input(2) + .set_dynamic_input___input(0, eltwise) + .set_dynamic_input___input(1, eltwise) + .set_attr_mode(1) + .set_attr_coeff({1, 1}); + + auto eltwise2 = op::Eltwise("Eltwise2") + .create_dynamic_input___input(2) + .set_dynamic_input___input(0, eltwise) + .set_dynamic_input___input(1, eltwise) + .set_attr_mode(1) + .set_attr_coeff({1, 1}); + + std::vector inputs{data1, data2}; + std::vector outputs{eltwise1, eltwise2}; + graph.SetInputs(inputs).SetOutputs(outputs); + return true; +} + +void build_big_graph(Graph &graph, map> attr) { + auto data = op::Data("Data").set_attr_index(0); + auto weight = op::Const("weight1").set_attr_value(genTensor(attr["weight"])); + vector weight_shape(attr["weight"].begin(), attr["weight"].end()); + TensorDesc weight_desc(ge::Shape(weight_shape), FORMAT_NCHW, DT_FLOAT); + weight.update_output_desc_y(weight_desc); + auto conv_1 = op::Conv2D("conv1").set_input_x(data).set_input_filter(weight); + + auto conv_2 = op::Conv2D("conv2").set_input_x(conv_1).set_input_filter(weight); + auto conv_3 = op::Conv2D("conv3").set_input_x(conv_2).set_input_filter(weight); + auto conv_4 = op::Conv2D("conv4").set_input_x(conv_3).set_input_filter(weight); + auto conv_5 = op::Conv2D("conv5").set_input_x(conv_4).set_input_filter(weight); + auto conv_6 = op::Conv2D("conv6").set_input_x(conv_5).set_input_filter(weight); + auto conv_7 = op::Conv2D("conv7").set_input_x(conv_6).set_input_filter(weight); + auto conv_8 = op::Conv2D("conv8").set_input_x(conv_7).set_input_filter(weight); + auto conv_9 = op::Conv2D("conv9").set_input_x(conv_8).set_input_filter(weight); + auto conv_10 = op::Conv2D("conv10").set_input_x(conv_9).set_input_filter(weight); + auto conv_11 = op::Conv2D("conv11").set_input_x(conv_10).set_input_filter(weight); + auto conv_12 = op::Conv2D("conv12").set_input_x(conv_11).set_input_filter(weight); + auto conv_13 = op::Conv2D("conv13").set_input_x(conv_12).set_input_filter(weight); + auto conv_14 = op::Conv2D("conv14").set_input_x(conv_13).set_input_filter(weight); + auto conv_15 = op::Conv2D("conv15").set_input_x(conv_14).set_input_filter(weight); + auto conv_16 = op::Conv2D("conv16").set_input_x(conv_15).set_input_filter(weight); + auto conv_17 = op::Conv2D("conv17").set_input_x(conv_16).set_input_filter(weight); + auto conv_18 = op::Conv2D("conv18").set_input_x(conv_17).set_input_filter(weight); + auto conv_19 = op::Conv2D("conv19").set_input_x(conv_18).set_input_filter(weight); + auto conv_20 = op::Conv2D("conv20").set_input_x(conv_19).set_input_filter(weight); + auto conv_21 = op::Conv2D("conv21").set_input_x(conv_20).set_input_filter(weight); + auto conv_22 = op::Conv2D("conv22").set_input_x(conv_21).set_input_filter(weight); + auto conv_23 = op::Conv2D("conv23").set_input_x(conv_22).set_input_filter(weight); + auto conv_24 = op::Conv2D("conv24").set_input_x(conv_23).set_input_filter(weight); + auto conv_25 = op::Conv2D("conv25").set_input_x(conv_24).set_input_filter(weight); + auto conv_26 = op::Conv2D("conv26").set_input_x(conv_25).set_input_filter(weight); + auto conv_27 = op::Conv2D("conv27").set_input_x(conv_26).set_input_filter(weight); + auto conv_28 = op::Conv2D("conv28").set_input_x(conv_27).set_input_filter(weight); + auto conv_29 = op::Conv2D("conv29").set_input_x(conv_28).set_input_filter(weight); + auto conv_30 = op::Conv2D("conv30").set_input_x(conv_29).set_input_filter(weight); + auto conv_31 = op::Conv2D("conv31").set_input_x(conv_30).set_input_filter(weight); + auto conv_32 = op::Conv2D("conv32").set_input_x(conv_31).set_input_filter(weight); + auto conv_33 = op::Conv2D("conv33").set_input_x(conv_32).set_input_filter(weight); + auto conv_34 = op::Conv2D("conv34").set_input_x(conv_33).set_input_filter(weight); + auto conv_35 = op::Conv2D("conv35").set_input_x(conv_34).set_input_filter(weight); + auto conv_36 = op::Conv2D("conv36").set_input_x(conv_35).set_input_filter(weight); + auto conv_37 = op::Conv2D("conv37").set_input_x(conv_36).set_input_filter(weight); + auto conv_38 = op::Conv2D("conv38").set_input_x(conv_37).set_input_filter(weight); + auto conv_39 = op::Conv2D("conv39").set_input_x(conv_38).set_input_filter(weight); + auto conv_40 = op::Conv2D("conv40").set_input_x(conv_39).set_input_filter(weight); + auto conv_41 = op::Conv2D("conv41").set_input_x(conv_40).set_input_filter(weight); + auto conv_42 = op::Conv2D("conv42").set_input_x(conv_41).set_input_filter(weight); + auto conv_43 = op::Conv2D("conv43").set_input_x(conv_42).set_input_filter(weight); + auto conv_44 = op::Conv2D("conv44").set_input_x(conv_43).set_input_filter(weight); + auto conv_45 = op::Conv2D("conv45").set_input_x(conv_44).set_input_filter(weight); + auto conv_46 = op::Conv2D("conv46").set_input_x(conv_45).set_input_filter(weight); + auto conv_47 = op::Conv2D("conv47").set_input_x(conv_46).set_input_filter(weight); + auto conv_48 = op::Conv2D("conv48").set_input_x(conv_47).set_input_filter(weight); + auto conv_49 = op::Conv2D("conv49").set_input_x(conv_48).set_input_filter(weight); + auto conv_50 = op::Conv2D("conv50").set_input_x(conv_49).set_input_filter(weight); + auto conv_51 = op::Conv2D("conv51").set_input_x(conv_50).set_input_filter(weight); + auto conv_52 = op::Conv2D("conv52").set_input_x(conv_51).set_input_filter(weight); + auto conv_53 = op::Conv2D("conv53").set_input_x(conv_52).set_input_filter(weight); + auto conv_54 = op::Conv2D("conv54").set_input_x(conv_53).set_input_filter(weight); + auto conv_55 = op::Conv2D("conv55").set_input_x(conv_54).set_input_filter(weight); + auto conv_56 = op::Conv2D("conv56").set_input_x(conv_55).set_input_filter(weight); + auto conv_57 = op::Conv2D("conv57").set_input_x(conv_56).set_input_filter(weight); + auto conv_58 = op::Conv2D("conv58").set_input_x(conv_57).set_input_filter(weight); + auto conv_59 = op::Conv2D("conv59").set_input_x(conv_58).set_input_filter(weight); + auto conv_60 = op::Conv2D("conv60").set_input_x(conv_59).set_input_filter(weight); + auto conv_61 = op::Conv2D("conv61").set_input_x(conv_60).set_input_filter(weight); + auto conv_62 = op::Conv2D("conv62").set_input_x(conv_61).set_input_filter(weight); + auto conv_63 = op::Conv2D("conv63").set_input_x(conv_62).set_input_filter(weight); + auto conv_64 = op::Conv2D("conv64").set_input_x(conv_63).set_input_filter(weight); + auto conv_65 = op::Conv2D("conv65").set_input_x(conv_64).set_input_filter(weight); + auto conv_66 = op::Conv2D("conv66").set_input_x(conv_65).set_input_filter(weight); + auto conv_67 = op::Conv2D("conv67").set_input_x(conv_66).set_input_filter(weight); + auto conv_68 = op::Conv2D("conv68").set_input_x(conv_67).set_input_filter(weight); + auto conv_69 = op::Conv2D("conv69").set_input_x(conv_68).set_input_filter(weight); + auto conv_70 = op::Conv2D("conv70").set_input_x(conv_69).set_input_filter(weight); + auto conv_71 = op::Conv2D("conv71").set_input_x(conv_70).set_input_filter(weight); + auto conv_72 = op::Conv2D("conv72").set_input_x(conv_71).set_input_filter(weight); + auto conv_73 = op::Conv2D("conv73").set_input_x(conv_72).set_input_filter(weight); + auto conv_74 = op::Conv2D("conv74").set_input_x(conv_73).set_input_filter(weight); + auto conv_75 = op::Conv2D("conv75").set_input_x(conv_74).set_input_filter(weight); + auto conv_76 = op::Conv2D("conv76").set_input_x(conv_75).set_input_filter(weight); + auto conv_77 = op::Conv2D("conv77").set_input_x(conv_76).set_input_filter(weight); + auto conv_78 = op::Conv2D("conv78").set_input_x(conv_77).set_input_filter(weight); + auto conv_79 = op::Conv2D("conv79").set_input_x(conv_78).set_input_filter(weight); + auto conv_80 = op::Conv2D("conv80").set_input_x(conv_79).set_input_filter(weight); + auto conv_81 = op::Conv2D("conv81").set_input_x(conv_80).set_input_filter(weight); + auto conv_82 = op::Conv2D("conv82").set_input_x(conv_81).set_input_filter(weight); + auto conv_83 = op::Conv2D("conv83").set_input_x(conv_82).set_input_filter(weight); + auto conv_84 = op::Conv2D("conv84").set_input_x(conv_83).set_input_filter(weight); + auto conv_85 = op::Conv2D("conv85").set_input_x(conv_84).set_input_filter(weight); + auto conv_86 = op::Conv2D("conv86").set_input_x(conv_85).set_input_filter(weight); + auto conv_87 = op::Conv2D("conv87").set_input_x(conv_86).set_input_filter(weight); + auto conv_88 = op::Conv2D("conv88").set_input_x(conv_87).set_input_filter(weight); + auto conv_89 = op::Conv2D("conv89").set_input_x(conv_88).set_input_filter(weight); + auto conv_90 = op::Conv2D("conv90").set_input_x(conv_89).set_input_filter(weight); + auto conv_91 = op::Conv2D("conv91").set_input_x(conv_80).set_input_filter(weight); + auto conv_92 = op::Conv2D("conv92").set_input_x(conv_91).set_input_filter(weight); + auto conv_93 = op::Conv2D("conv93").set_input_x(conv_92).set_input_filter(weight); + auto conv_94 = op::Conv2D("conv94").set_input_x(conv_93).set_input_filter(weight); + auto conv_95 = op::Conv2D("conv95").set_input_x(conv_94).set_input_filter(weight); + auto conv_96 = op::Conv2D("conv96").set_input_x(conv_95).set_input_filter(weight); + auto conv_97 = op::Conv2D("conv97").set_input_x(conv_96).set_input_filter(weight); + auto conv_98 = op::Conv2D("conv98").set_input_x(conv_97).set_input_filter(weight); + auto conv_99 = op::Conv2D("conv99").set_input_x(conv_98).set_input_filter(weight); + auto conv_100 = op::Conv2D("conv100").set_input_x(conv_99).set_input_filter(weight); + auto conv_101 = op::Conv2D("conv101").set_input_x(conv_100).set_input_filter(weight); + auto conv_102 = op::Conv2D("conv102").set_input_x(conv_101).set_input_filter(weight); + auto conv_103 = op::Conv2D("conv103").set_input_x(conv_102).set_input_filter(weight); + auto conv_104 = op::Conv2D("conv104").set_input_x(conv_103).set_input_filter(weight); + auto conv_105 = op::Conv2D("conv105").set_input_x(conv_104).set_input_filter(weight); + auto conv_106 = op::Conv2D("conv106").set_input_x(conv_105).set_input_filter(weight); + auto conv_107 = op::Conv2D("conv107").set_input_x(conv_106).set_input_filter(weight); + auto conv_108 = op::Conv2D("conv108").set_input_x(conv_107).set_input_filter(weight); + auto conv_109 = op::Conv2D("conv109").set_input_x(conv_108).set_input_filter(weight); + auto conv_110 = op::Conv2D("conv110").set_input_x(conv_109).set_input_filter(weight); + auto conv_111 = op::Conv2D("conv111").set_input_x(conv_110).set_input_filter(weight); + auto conv_112 = op::Conv2D("conv112").set_input_x(conv_111).set_input_filter(weight); + auto conv_113 = op::Conv2D("conv113").set_input_x(conv_112).set_input_filter(weight); + auto conv_114 = op::Conv2D("conv114").set_input_x(conv_113).set_input_filter(weight); + auto conv_115 = op::Conv2D("conv115").set_input_x(conv_114).set_input_filter(weight); + auto conv_116 = op::Conv2D("conv116").set_input_x(conv_115).set_input_filter(weight); + auto conv_117 = op::Conv2D("conv117").set_input_x(conv_116).set_input_filter(weight); + auto conv_118 = op::Conv2D("conv118").set_input_x(conv_117).set_input_filter(weight); + auto conv_119 = op::Conv2D("conv119").set_input_x(conv_118).set_input_filter(weight); + auto conv_120 = op::Conv2D("conv120").set_input_x(conv_119).set_input_filter(weight); + auto conv_121 = op::Conv2D("conv121").set_input_x(conv_120).set_input_filter(weight); + auto conv_122 = op::Conv2D("conv122").set_input_x(conv_121).set_input_filter(weight); + auto conv_123 = op::Conv2D("conv123").set_input_x(conv_122).set_input_filter(weight); + auto conv_124 = op::Conv2D("conv124").set_input_x(conv_123).set_input_filter(weight); + auto conv_125 = op::Conv2D("conv125").set_input_x(conv_124).set_input_filter(weight); + auto conv_126 = op::Conv2D("conv126").set_input_x(conv_125).set_input_filter(weight); + auto conv_127 = op::Conv2D("conv127").set_input_x(conv_126).set_input_filter(weight); + auto conv_128 = op::Conv2D("conv128").set_input_x(conv_127).set_input_filter(weight); + auto conv_129 = op::Conv2D("conv129").set_input_x(conv_128).set_input_filter(weight); + auto conv_130 = op::Conv2D("conv130").set_input_x(conv_129).set_input_filter(weight); + + std::vector inputs{data}; + std::vector outputs{conv_130}; + graph.SetInputs(inputs).SetOutputs(outputs); +} + +int GetDatTypeSize(DataType dt) { + int dailation = 1; + if (dt == ge::DT_FLOAT) + dailation = 4; + else if (dt == ge::DT_FLOAT16) + dailation = 2; + else if (dt == ge::DT_INT16) + dailation = 2; + else if (dt == ge::DT_UINT16) + dailation = 2; + else if (dt == ge::DT_INT32) + dailation = 4; + else if (dt == ge::DT_UINT32) + dailation = 4; + else if (dt == ge::DT_INT64) + dailation = 8; + else if (dt == ge::DT_UINT64) + dailation = 8; + else if (dt == ge::DT_INT8) + dailation = 1; + + return dailation; +} + +int buildConvGraph_new(Graph &graph, std::vector desc_var, std::vector name_var, int flag, + Format format) { + auto data_x_shape = op::Data("xShape").set_attr_index(0); + auto var = op::Variable(name_var[0]); + auto var1 = op::Variable(name_var[1]); //add one seat of ApplyMomentum() + auto label1 = op::Variable(name_var[2]); //add one seat of ApplyMomentum() + auto conv2dgrad = op::Conv2DBackpropFilterD("output_1"); + auto test2 = op::ApplyMomentum(); + + var.update_output_desc_y(desc_var[0]); + var1.update_output_desc_y(desc_var[1]); + label1.update_output_desc_y(desc_var[2]); + + graph.AddOp(var); + graph.AddOp(var1); + graph.AddOp(label1); + + auto conv2d = op::Conv2D().set_input_x(data_x_shape).set_input_filter(var).set_attr_strides({1, 1, 1, 1}); + update_op_format(conv2d, format); + ge::TensorDesc tensor_desc_w = conv2d.GetInputDesc("filter"); + tensor_desc_w.SetFormat(format); + conv2d.UpdateInputDesc("filter", tensor_desc_w); + + if (flag >= 1) { + conv2dgrad.set_input_x(data_x_shape) + .set_attr_filter_sizes(desc_var[0].GetShape().GetDims()) + .set_input_out_backprop(conv2d) + .set_attr_strides({1, 1}) + .set_attr_pads({0, 0, 0, 0}); + update_op_format(conv2dgrad, format); + graph.AddOp(conv2dgrad); + } + if (flag >= 2) { + // set conv2dgrad var + test2.set_input_accum(var1) + .set_input_grad(conv2dgrad) + .set_input_lr(label1) + .set_input_momentum(label1) + .set_input_var(var); + graph.AddOp(test2); + } + + std::vector inputs{data_x_shape}; // set all val + std::vector outputs{conv2d}; + graph.SetInputs(inputs).SetOutputs(outputs); + graph.AddOp(conv2d); + + return 0; +} + +/// load bin data_fail +/// input_path: path of bin data_file +/// shapes: the shape of Tensor +/// ft: the format of Tensor +/// dt: the dataType of Tensor +Tensor load_variable_input_data(string input_path, std::vector shapes, Format ft, DataType dt) { + vector dim_info1; + + uint8_t *input_data = (uint8_t *)readTestDataFile(input_path, dim_info1); // common.h + TensorDesc input_tensor_desc = TensorDesc(ge::Shape(shapes), ft, dt); + input_tensor_desc.SetRealDimCnt(shapes.size()); + Tensor input_tensor = Tensor(input_tensor_desc, input_data, GetDatTypeSize(dt) * dim_info1[dim_info1[0] + 1]); + return input_tensor; +} diff --git a/tests/st/resnet50/common.h b/tests/st/resnet50/common.h new file mode 100644 index 00000000..75805db7 --- /dev/null +++ b/tests/st/resnet50/common.h @@ -0,0 +1,102 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ST_RESNET50_GE_COMMON_H_ +#define ST_RESNET50_GE_COMMON_H_ +#include "common/ge_inner_error_codes.h" +#include "utils/tensor_utils.h" + +#define MY_USER_GE_LOGI(...) GE_LOG_INFO(1, __VA_ARGS__) +#define MY_USER_GE_LOGW(...) GE_LOG_WARN(1, __VA_ARGS__) +#define MY_USER_GE_LOGE(...) GE_LOG_ERROR(1, 3, __VA_ARGS__) + +#ifndef USER_GE_LOGI +#define USER_GE_LOGI MY_USER_GE_LOGI +#endif // USER_GE_LOGI + +#ifndef USER_GE_LOGW +#define USER_GE_LOGW MY_USER_GE_LOGW +#endif // USER_GE_LOGW + +#ifndef USER_GE_LOGE +#define USER_GE_LOGE MY_USER_GE_LOGE +#endif // USER_GE_LOGE + +/// train_flag is 0 when infer, train_flag is 1 when train.this param is set for RunGranph_readData() and +/// RunGraph_initData() +#define TRAIN_FLAG_INFER "infer" +#define TRAIN_FLAG_TRAIN "train" + +#include +#include +#include +#include +#include +#include +#include + +#include "ge_api.h" +#include "graph.h" +#include "ptest.h" +#include "ops/all_ops.h" +using namespace std; +using namespace ge; + +// read bin file and compile result +void update_op_format(Operator ops, Format format = ge::FORMAT_NCHW); +void getDimInfo(FILE *fp, std::vector &dim_info); +void *readTestDataFile(std::string infile, std::vector &dim_info); +void *readUint8TestDataFile(std::string infile, int size); +bool allclose(float *a, float *b, uint64_t count, float rtol, float atol); +bool compFp32WithTData(float *actual_output_data, std::string expected_data_file, float rtol, float atol); +Tensor load_variable_input_data(string input_path, std::vector shapes, Format ft = ge::FORMAT_NCHW, + DataType dt = ge::DT_FLOAT); +// constructor Tensor +int GetDatTypeSize(DataType dt); +ge::Tensor genTensor(std::vector tensor_shape, Format format = ge::FORMAT_NCHW, DataType dt = ge::DT_FLOAT); +ge::Tensor genTensor_withVaule(std::vector tensor_shape, float value = 1); +Tensor genTesnor_Shape_as_data(std::vector tensor_shape); +// Init GE +ge::Status GEInitialize_api(string train_flag = "0", string run_mode_path = "0"); +ge::Status GEInitialize_api_new(string train_flag = "infer", string run_mode = "fe"); +ge::Status GEFinalize_api(); +// constructor session and build graph +ge::Session *create_aipp_session(); +ge::Session *create_session(); +ge::Status session_add_and_run_graph(ge::Session *session, uint32_t graphId, Graph &graph, std::vector inputs, + std::vector &outputs); + +// common interface for infer +int RunGraph_initData(Graph &graph, string op_name, map> attr_test, + string train_flag = "infer", string run_mode_path = "fe"); +void Inputs_load_Data(string op_name, std::vector &input, map> attr_test, + Format format = ge::FORMAT_NCHW, DataType dt = ge::DT_FLOAT); +bool comparaData(std::vector &output, string op_name, map> attr_test); +int RunGraph_readData(Graph &graph, string op_name, map> attr_test, + string train_flag = "infer", string run_mode_path = "fe", Format format = ge::FORMAT_NCHW, + DataType dt = ge::DT_FLOAT); + +// common interface for train +int buildCheckPointGraph(Graph &graph, map variables); +int buildInitGraph(Graph &graph, std::vector desc_var, std::vector name_var, + std::vector values_var); +int buildInitGraph_other_dataType(Graph &graph, std::vector desc_var, std::vector name_var); + +bool build_multi_input_multi_output_graph(Graph &graph); +void build_big_graph(Graph &graph, map> attr); +int buildConvGraph_new(Graph &graph, std::vector desc_var, std::vector name_var, int flag = 2); + +#endif // ST_RESNET50_GE_COMMON_H_ diff --git a/tests/st/resnet50/ptest.h b/tests/st/resnet50/ptest.h new file mode 100644 index 00000000..568969f8 --- /dev/null +++ b/tests/st/resnet50/ptest.h @@ -0,0 +1,225 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef ST_RESNET50_PTEST_H_ +#define ST_RESNET50_PTEST_H_ + +#include +#include +#include +#include +#include +#include +#include +#include +#include + +namespace ptest { +class assertion_error : public std::exception { + public: + const char *what() const throw() { return "Assertion Exception"; } +}; + +class TestFixture { + public: + virtual void SetUp() {} + virtual void TearDown() {} + void Run() { _func(); } + void BindFunction(std::function function) { _func = function; } + void SetName(const std::string &name) { _name = name; } + std::string Name() const { return _name; } + virtual ~TestFixture() {} + + private: + std::function _func; + std::string _name; +}; + +enum TestResult { SUCCESS, FAILED, UNAVAILABLE, UNKNOWN, NOCASEFOUND }; + +class TestManager { + public: + static TestManager &GetSingleton() { + static TestManager instance; + return instance; + } + void RegisterTest(const std::string &name, TestFixture *fixture) { _testfixtures[name] = fixture; } + + const std::string GetRunningTestcaseName() const { return _running_testcase_name; } + + const std::list GetAllTestNames() const { + std::list result; + for (auto &t : _testfixtures) { + result.push_back(t.first); + } + return result; + } + + TestResult RunTest(const std::string &name) { + if (_testfixtures.find(name) == _testfixtures.end()) { + return NOCASEFOUND; + } + + _running_testcase_name = name; + + do { + SetTestResult(name, UNKNOWN); + _testfixtures[name]->SetUp(); + if (_testresults[name] == FAILED) { + _testresults[name] = UNAVAILABLE; + break; + } + SetTestResult(name, SUCCESS); + try { + _testfixtures[name]->Run(); + } catch (assertion_error &e) { + // Do nothing as the error has been handled by the TestManager. + } + _testfixtures[name]->TearDown(); + } while (0); + + return _testresults[name]; + } + void SetTestResult(const std::string &name, TestResult result) { _testresults[name] = result; } + TestResult GetTestResult(const std::string &name) { return _testresults[name]; } + + private: + std::map _testfixtures; + std::map _testresults; + std::string _running_testcase_name; +}; + +class TestFixtureRegister { + public: + TestFixtureRegister(const std::string &name, TestFixture *fixture, std::function function) { + fixture->BindFunction(function); + fixture->SetName(name); + TestManager::GetSingleton().RegisterTest(name, fixture); + } +}; +} // namespace ptest + +#define _STR(x) #x +#define _EMPTY_NAMESPACE + +#define _TEST(NAMESPACE, FIXTURECLASS, TESTNAME, CASENAME) \ + void g_func_##TESTNAME##_##CASENAME(void); \ + NAMESPACE::FIXTURECLASS g_fixture_##TESTNAME##_##CASENAME; \ + ptest::TestFixtureRegister g_register_##TESTNAME##_##CASENAME( \ + _STR(TESTNAME##_##CASENAME), &g_fixture_##TESTNAME##_##CASENAME, g_func_##TESTNAME##_##CASENAME); \ + void g_func_##TESTNAME##_##CASENAME(void) + +#define TEST(TESTNAME, CASENAME) _TEST(ptest, TestFixture, TESTNAME, CASENAME) + +#define TEST_F(TESTFIXTURE, CASENAME) _TEST(_EMPTY_NAMESPACE, TESTFIXTURE, TESTFIXTURE, CASENAME) + +#define EXPECT_TRUE(X) \ + do { \ + if (!(X)) { \ + std::string test_name = ptest::TestManager::GetSingleton().GetRunningTestcaseName(); \ + ptest::TestManager::GetSingleton().SetTestResult(test_name, ptest::FAILED); \ + std::cerr << #X << "Expectation Failed\n" \ + << "Testcase Name: " << test_name << "\n" \ + << "File: " __FILE__ << "\tLine:" << __LINE__ << std::endl; \ + } \ + } while (0); + +// With the macro definition ensures that the compiler can detect compiler warning. +#define Max_Log_Len 1024 +#define PRINT_ERR(lpszFormat, ...) \ + do { \ + char szTmpBuf[Max_Log_Len + 1] = {0}; \ + snprintf(szTmpBuf, Max_Log_Len, lpszFormat, ##__VA_ARGS__); \ + std::cerr << szTmpBuf << std::endl; \ + } while (0) + +// Increase the content of print error messages and error to facilitate rapid analysis +#define EXPECT_TRUE_C(X, ERR_TYPE, format, ...) \ + do { \ + if (!(X)) { \ + std::string test_name = ptest::TestManager::GetSingleton().GetRunningTestcaseName(); \ + ptest::TestManager::GetSingleton().SetTestResult(test_name, ptest::FAILED); \ + std::cerr << #X << " Expectation Failed." \ + << "Testcase Name: " << test_name << " File:" __FILE__ << " Line:" << __LINE__ << std::endl; \ + PRINT_ERR("[" ERR_TYPE "]" format, ##__VA_ARGS__); \ + } \ + } while (0) + +#define ASSERT_TRUE(X) \ + do { \ + if (!(X)) { \ + std::string test_name = ptest::TestManager::GetSingleton().GetRunningTestcaseName(); \ + ptest::TestManager::GetSingleton().SetTestResult(test_name, ptest::FAILED); \ + std::cerr << #X << "Assertion Failed\n" \ + << "Testcase Name: " << test_name << "\n" \ + << "File: " __FILE__ << "\tLine:" << __LINE__ << std::endl; \ + throw ptest::assertion_error(); \ + } \ + } while (0); + +// Add printing error information and error line content for quick analysis +#define ASSERT_TRUE_C(X, ERR_TYPE, format, ...) \ + do { \ + if (!(X)) { \ + std::string test_name = ptest::TestManager::GetSingleton().GetRunningTestcaseName(); \ + ptest::TestManager::GetSingleton().SetTestResult(test_name, ptest::FAILED); \ + std::cerr << #X << " Assertion Failed." \ + << "Testcase Name: " << test_name << " File:" __FILE__ << " Line:" << __LINE__ << std::endl; \ + PRINT_ERR("[" ERR_TYPE "]" format, ##__VA_ARGS__); \ + throw ptest::assertion_error(); \ + } \ + } while (0); + +#define CONFIG_ERR "CONFIG_ERR" +#define LOAD_MODEL_ERR "LOAD_MODEL_ERR" +#define FILE_READ_ERR "FILE_READ_ERR" +#define RUN_ERROR "RUN_ERROR" +#define MEM_ERROR "MEM_ERROR" +#define RESULT_ERR "RESULT_ERR" + +#define EXPECT_FALSE(X) EXPECT_TRUE(!(X)) +#define EXPECT_EQ(X, Y) EXPECT_TRUE(((X) == (Y))) +#define EXPECT_NE(X, Y) EXPECT_TRUE(((X) != (Y))) +#define EXPECT_GT(X, Y) EXPECT_TRUE(((X) > (Y))) +#define EXPECT_GE(X, Y) EXPECT_TRUE(((X) >= (Y))) +#define EXPECT_LT(X, Y) EXPECT_TRUE(((X) < (Y))) +#define EXPECT_LE(X, Y) EXPECT_TRUE(((X) <= (Y))) + +#define EXPECT_FALSE_C(X, ERR_TYPE, format, ...) EXPECT_TRUE_C(!(X), ERR_TYPE, format, ##__VA_ARGS__) +#define EXPECT_EQ_C(X, Y, ERR_TYPE, format, ...) EXPECT_TRUE_C(((X) == (Y)), ERR_TYPE, format, ##__VA_ARGS__) +#define EXPECT_NE_C(X, Y, ERR_TYPE, format, ...) EXPECT_TRUE_C(((X) != (Y)), ERR_TYPE, format, ##__VA_ARGS__) +#define EXPECT_GT_C(X, Y, ERR_TYPE, format, ...) EXPECT_TRUE_C(((X) > (Y)), ERR_TYPE, format, ##__VA_ARGS__) +#define EXPECT_GE_C(X, Y, ERR_TYPE, format, ...) EXPECT_TRUE_C(((X) >= (Y)), ERR_TYPE, format, ##__VA_ARGS__) +#define EXPECT_LT_C(X, Y, ERR_TYPE, format, ...) EXPECT_TRUE_C(((X) < (Y)), ERR_TYPE, format, ##__VA_ARGS__) +#define EXPECT_LE_C(X, Y, ERR_TYPE, format, ...) EXPECT_TRUE_C(((X) <= (Y)), ERR_TYPE, format, ##__VA_ARGS__) + +#define ASSERT_FALSE(X) ASSERT_TRUE(!(X)) +#define ASSERT_EQ(X, Y) ASSERT_TRUE(((X) == (Y))) +#define ASSERT_NE(X, Y) ASSERT_TRUE(((X) != (Y))) +#define ASSERT_GT(X, Y) ASSERT_TRUE(((X) > (Y))) +#define ASSERT_GE(X, Y) ASSERT_TRUE(((X) >= (Y))) +#define ASSERT_LT(X, Y) ASSERT_TRUE(((X) < (Y))) +#define ASSERT_LE(X, Y) ASSERT_TRUE(((X) <= (Y))) + +#define ASSERT_FALSE_C(X, ERR_TYPE, format, ...) ASSERT_TRUE_C(!(X), ERR_TYPE, format, ##__VA_ARGS__) +#define ASSERT_EQ_C(X, Y, ERR_TYPE, format, ...) ASSERT_TRUE_C(((X) == (Y)), ERR_TYPE, format, ##__VA_ARGS__) +#define ASSERT_NE_C(X, Y, ERR_TYPE, format, ...) ASSERT_TRUE_C(((X) != (Y)), ERR_TYPE, format, ##__VA_ARGS__) +#define ASSERT_GT_C(X, Y, ERR_TYPE, format, ...) ASSERT_TRUE_C(((X) > (Y)), ERR_TYPE, format, ##__VA_ARGS__) +#define ASSERT_GE_C(X, Y, ERR_TYPE, format, ...) ASSERT_TRUE_C(((X) >= (Y)), ERR_TYPE, format, ##__VA_ARGS__) +#define ASSERT_LT_C(X, Y, ERR_TYPE, format, ...) ASSERT_TRUE_C(((X) < (Y)), ERR_TYPE, format, ##__VA_ARGS__) +#define ASSERT_LE_C(X, Y, ERR_TYPE, format, ...) ASSERT_TRUE_C(((X) <= (Y)), ERR_TYPE, format, ##__VA_ARGS__) + +#endif // ST_RESNET50_PTEST_H_ diff --git a/tests/st/resnet50/resnet50_train.cc b/tests/st/resnet50/resnet50_train.cc new file mode 100644 index 00000000..4242439f --- /dev/null +++ b/tests/st/resnet50/resnet50_train.cc @@ -0,0 +1,852 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include + +#include "common.h" +#include "ge_api.h" +#include "graph.h" +#include "ops/all_ops.h" +#include "types.h" +#include "utils/tensor_utils.h" + +using namespace std; +using namespace ge; +using namespace op; + +typedef bool (*Func)(Graph &graph); + +#define PADDING_MODE 6 +#define GRAD_PADDING_MODE 3 +vector pad_1{1, 1, 1, 1}; +vector pad_0{0, 0, 0, 0}; +vector stride_1{1, 1}; +vector stride_2{2, 2}; + +// (int out_channels, int h, int w, vector stride{1,1}, vector pad{1,1,1,1}, op::Data() input) +#define GENERATE_CONV_VAR(LAYER, BLK, OPNUM, in_channels, out_channels, h, w, stride, pad, input) \ + auto &LAYER##_##BLK##_##OPNUM##_input = input; \ + \ + TensorDesc LAYER##_##BLK##_##OPNUM##_desc(ge::Shape({out_channels, in_channels, h, w}), FORMAT_NCHW, DT_FLOAT); \ + auto LAYER##_##BLK##_##OPNUM##_weight = op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_weight"); \ + LAYER##_##BLK##_##OPNUM##_weight.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mom_weight = \ + op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_mom_weight"); \ + LAYER##_##BLK##_##OPNUM##_mom_weight.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + LAYER##_##BLK##_##OPNUM##_mom_weight.update_input_desc_x(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + cout << string(#LAYER) + string(#BLK) + string(#OPNUM) << "'s weight shape is:" << in_channels << out_channels << h \ + << w << endl; \ + cout << string(#LAYER) + string(#BLK) + string(#OPNUM) \ + << "'s input_x op's shape is:" << input.GetOutputDesc("y").GetShape().GetDim(2) << endl; \ + auto LAYER##_##BLK##_##OPNUM##_tmp_dims = input.GetOutputDesc("y").GetShape().GetDims(); \ + for (auto LAYER##_##BLK##_##OPNUM##_tmp_it = LAYER##_##BLK##_##OPNUM##_tmp_dims.begin(); \ + LAYER##_##BLK##_##OPNUM##_tmp_it != LAYER##_##BLK##_##OPNUM##_tmp_dims.end(); \ + LAYER##_##BLK##_##OPNUM##_tmp_it++) { \ + cout << *LAYER##_##BLK##_##OPNUM##_tmp_it; \ + } \ + cout << endl; \ + \ + auto LAYER##_##BLK##_##OPNUM = op::Conv2D(string(#LAYER) + string(#BLK) + string(#OPNUM)) \ + .set_input_x(input, "y") \ + .set_input_filter(LAYER##_##BLK##_##OPNUM##_weight) \ + .set_attr_strides({1, 1, stride[0], stride[1]}) \ + .set_attr_pads(pad); \ + update_op_format(LAYER##_##BLK##_##OPNUM); + +#define GENERATE_CONSTANT(LAYER, BLK, OPNUM, CONSTNAME) \ + Tensor LAYER##_##BLK##_##OPNUM##_##CONSTNAME##_tensor; \ + float *LAYER##_##BLK##_##OPNUM##_##CONSTNAME##_data = new float[LAYER##_##BLK##_##OPNUM##_size]; \ + for (int i = 0; i < (int)LAYER##_##BLK##_##OPNUM##_size; i++) { \ + *(LAYER##_##BLK##_##OPNUM##_##CONSTNAME##_data + i) = 0.01; \ + } \ + LAYER##_##BLK##_##OPNUM##_##CONSTNAME##_tensor.SetData((uint8_t *)LAYER##_##BLK##_##OPNUM##_##CONSTNAME##_data, \ + LAYER##_##BLK##_##OPNUM##_size * sizeof(float)); \ + LAYER##_##BLK##_##OPNUM##_##CONSTNAME##_tensor.SetTensorDesc(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_##CONSTNAME##_constant = \ + op::Constant().set_attr_value(LAYER##_##BLK##_##OPNUM##_##CONSTNAME##_tensor); \ + LAYER##_##BLK##_##OPNUM##_##CONSTNAME##_constant.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + delete[] LAYER##_##BLK##_##OPNUM##_##CONSTNAME##_data; + +#define GENERATE_CONV_VAR_VAR(LAYER, BLK, OPNUM, in_channels, out_channels, h, w, stride, pad, input) \ + TensorDesc LAYER##_##BLK##_##OPNUM##_desc(ge::Shape({out_channels, in_channels, h, w}), FORMAT_NCHW, DT_FLOAT); \ + uint32_t LAYER##_##BLK##_##OPNUM##_size = LAYER##_##BLK##_##OPNUM##_desc.GetShape().GetShapeSize(); \ + auto LAYER##_##BLK##_##OPNUM##_weight = op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_weight"); \ + LAYER##_##BLK##_##OPNUM##_weight.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mom_weight = \ + op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_mom_weight"); \ + LAYER##_##BLK##_##OPNUM##_mom_weight.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + GENERATE_CONSTANT(LAYER, BLK, OPNUM, weight); \ + auto LAYER##_##BLK##_##OPNUM##_weight_assign = op::Assign() \ + .set_input_ref(LAYER##_##BLK##_##OPNUM##_weight) \ + .set_input_value(LAYER##_##BLK##_##OPNUM##_weight_constant); \ + \ + GENERATE_CONSTANT(LAYER, BLK, OPNUM, mom_weight); \ + auto LAYER##_##BLK##_##OPNUM##_mom_weight_assign = \ + op::Assign() \ + .set_input_ref(LAYER##_##BLK##_##OPNUM##_mom_weight) \ + .set_input_value(LAYER##_##BLK##_##OPNUM##_mom_weight_constant); \ + \ + input.push_back(LAYER##_##BLK##_##OPNUM##_weight); \ + input.push_back(LAYER##_##BLK##_##OPNUM##_mom_weight); + +// (int out_channels, Operator& input) +#define GENERATE_BN_VAR(LAYER, BLK, OPNUM, out_channels, input) \ + auto &LAYER##_##BLK##_##OPNUM##_input = input; \ + \ + TensorDesc LAYER##_##BLK##_##OPNUM##_desc(ge::Shape({1, out_channels, 1, 1}), FORMAT_NCHW, DT_FLOAT); \ + auto LAYER##_##BLK##_##OPNUM##_scale = op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_scale"); \ + LAYER##_##BLK##_##OPNUM##_scale.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mom_scale = \ + op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_mom_scale"); \ + LAYER##_##BLK##_##OPNUM##_mom_scale.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_b = op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_b"); \ + LAYER##_##BLK##_##OPNUM##_b.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mom_b = op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_mom_b"); \ + LAYER##_##BLK##_##OPNUM##_mom_b.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mean = op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_mean"); \ + LAYER##_##BLK##_##OPNUM##_mean.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + auto LAYER##_##BLK##_##OPNUM##_variance = \ + op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_variance"); \ + LAYER##_##BLK##_##OPNUM##_variance.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM = op::FusedBatchNorm(string(#LAYER) + string(#BLK) + string(#OPNUM)) \ + .set_input_x(input, "y") \ + .set_input_scale(LAYER##_##BLK##_##OPNUM##_scale) \ + .set_input_b(LAYER##_##BLK##_##OPNUM##_b) \ + .set_input_mean(LAYER##_##BLK##_##OPNUM##_mean) \ + .set_input_variance(LAYER##_##BLK##_##OPNUM##_variance) \ + .set_attr_mode(1) \ + .set_attr_epsilon(1e-5) \ + .set_attr_is_training(true); + +#define GENERATE_BN_VAR_VAR(LAYER, BLK, OPNUM, out_channels, input) \ + TensorDesc LAYER##_##BLK##_##OPNUM##_desc(ge::Shape({1, out_channels, 1, 1}), FORMAT_NCHW, DT_FLOAT); \ + uint32_t LAYER##_##BLK##_##OPNUM##_size = LAYER##_##BLK##_##OPNUM##_desc.GetShape().GetShapeSize(); \ + auto LAYER##_##BLK##_##OPNUM##_scale = op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_scale"); \ + LAYER##_##BLK##_##OPNUM##_scale.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mom_scale = \ + op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_mom_scale"); \ + LAYER##_##BLK##_##OPNUM##_mom_scale.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_b = op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_b"); \ + LAYER##_##BLK##_##OPNUM##_b.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mom_b = op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_mom_b"); \ + LAYER##_##BLK##_##OPNUM##_mom_b.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mean = op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_mean"); \ + LAYER##_##BLK##_##OPNUM##_mean.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + auto LAYER##_##BLK##_##OPNUM##_variance = \ + op::Variable(string(#LAYER) + string(#BLK) + string(#OPNUM) + "_variance"); \ + LAYER##_##BLK##_##OPNUM##_variance.update_output_desc_y(LAYER##_##BLK##_##OPNUM##_desc); \ + \ + GENERATE_CONSTANT(LAYER, BLK, OPNUM, scale); \ + \ + auto LAYER##_##BLK##_##OPNUM##_scale_assign = op::Assign() \ + .set_input_ref(LAYER##_##BLK##_##OPNUM##_scale) \ + .set_input_value(LAYER##_##BLK##_##OPNUM##_scale_constant); \ + GENERATE_CONSTANT(LAYER, BLK, OPNUM, mom_scale); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mom_scale_assign = \ + op::Assign() \ + .set_input_ref(LAYER##_##BLK##_##OPNUM##_mom_scale) \ + .set_input_value(LAYER##_##BLK##_##OPNUM##_mom_scale_constant); \ + \ + GENERATE_CONSTANT(LAYER, BLK, OPNUM, b); \ + \ + auto LAYER##_##BLK##_##OPNUM##_b_assign = \ + op::Assign().set_input_ref(LAYER##_##BLK##_##OPNUM##_b).set_input_value(LAYER##_##BLK##_##OPNUM##_b_constant); \ + \ + GENERATE_CONSTANT(LAYER, BLK, OPNUM, mom_b); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mom_b_assign = op::Assign() \ + .set_input_ref(LAYER##_##BLK##_##OPNUM##_mom_b) \ + .set_input_value(LAYER##_##BLK##_##OPNUM##_mom_b_constant); \ + GENERATE_CONSTANT(LAYER, BLK, OPNUM, mean); \ + \ + auto LAYER##_##BLK##_##OPNUM##_mean_assign = op::Assign() \ + .set_input_ref(LAYER##_##BLK##_##OPNUM##_mean) \ + .set_input_value(LAYER##_##BLK##_##OPNUM##_mean_constant); \ + \ + GENERATE_CONSTANT(LAYER, BLK, OPNUM, variance); \ + \ + auto LAYER##_##BLK##_##OPNUM##_variance_assign = op::Assign() \ + .set_input_ref(LAYER##_##BLK##_##OPNUM##_variance) \ + .set_input_value(LAYER##_##BLK##_##OPNUM##_variance_constant); \ + \ + input.push_back(LAYER##_##BLK##_##OPNUM##_scale); \ + input.push_back(LAYER##_##BLK##_##OPNUM##_mom_scale); \ + input.push_back(LAYER##_##BLK##_##OPNUM##_b); \ + input.push_back(LAYER##_##BLK##_##OPNUM##_mom_b); \ + input.push_back(LAYER##_##BLK##_##OPNUM##_mean); \ + input.push_back(LAYER##_##BLK##_##OPNUM##_variance); + +// (int out_channels, Operator& input) +#define GENERATE_RELU_VAR(LAYER, BLK, OPNUM, input) \ + auto &LAYER##_##BLK##_##OPNUM##_input = input; \ + auto LAYER##_##BLK##_##OPNUM = op::Relu(string(#LAYER) + string(#BLK) + string(#OPNUM)).set_input_x(input, "y"); + +// (int out_channels, Operator& input) +#define GENERATE_MAXPOOL_VAR(LAYER, BLK, OPNUM, input) \ + auto &LAYER##_##BLK##_##OPNUM##_input = input; \ + \ + auto LAYER##_##BLK##_##OPNUM = op::MaxPoolWithArgmax(string(#LAYER) + string(#BLK) + string(#OPNUM)) \ + .set_input_x(input, "y") \ + .set_attr_ksize({1, 3, 3, 1}) \ + .set_attr_padding("SAME") \ + .set_attr_strides({1, 2, 2, 1}); + +// (int out_channels, Operator& input) +#define GENERATE_ADD_VAR(LAYER, BLK, OPNUM, input_x1, input_x2) \ + auto LAYER##_##BLK##_##OPNUM = \ + op::Add(string(#LAYER) + string(#BLK) + string(#OPNUM)).set_input_x1(input_x1, "y").set_input_x2(input_x2, "y"); + +// (int in_channels, int out_channels,vector stride{1,1}, Operator& input) +#define MAKE_RESIDUAL_BLOCK(LAYER, BLK, in_channels, out_channels, stride, input) \ + auto &LAYER##_##BLK##_input = input; \ + auto &LAYER##_##BLK##_stride = stride; \ + int LAYER##_##BLK##_out_chls = out_channels / 4; \ + \ + GENERATE_CONV_VAR(LAYER, BLK, conv1, in_channels, LAYER##_##BLK##_out_chls, 1, 1, stride, pad_0, input); \ + GENERATE_BN_VAR(LAYER, BLK, bn1, LAYER##_##BLK##_out_chls, LAYER##_##BLK##_conv1); \ + GENERATE_RELU_VAR(LAYER, BLK, relu1, LAYER##_##BLK##_bn1); \ + \ + GENERATE_CONV_VAR(LAYER, BLK, conv2, LAYER##_##BLK##_out_chls, LAYER##_##BLK##_out_chls, 3, 3, stride_1, pad_1, \ + LAYER##_##BLK##_relu1); \ + GENERATE_BN_VAR(LAYER, BLK, bn2, LAYER##_##BLK##_out_chls, LAYER##_##BLK##_conv2); \ + GENERATE_RELU_VAR(LAYER, BLK, relu2, LAYER##_##BLK##_bn2); \ + \ + GENERATE_CONV_VAR(LAYER, BLK, conv3, LAYER##_##BLK##_out_chls, out_channels, 1, 1, stride_1, pad_0, \ + LAYER##_##BLK##_relu2); \ + GENERATE_BN_VAR(LAYER, BLK, bn3, out_channels, LAYER##_##BLK##_conv3); \ + \ + GENERATE_CONV_VAR(LAYER, BLK, conv4, in_channels, out_channels, 1, 1, stride, pad_0, input); \ + GENERATE_BN_VAR(LAYER, BLK, bn4, out_channels, LAYER##_##BLK##_conv4); \ + \ + GENERATE_ADD_VAR(LAYER, BLK, add5, LAYER##_##BLK##_bn3, LAYER##_##BLK##_bn4); \ + GENERATE_RELU_VAR(LAYER, BLK, relu5, LAYER##_##BLK##_add5); \ + \ + auto &LAYER##_##BLK##_output = LAYER##_##BLK##_relu5; \ + auto &LAYER##_##BLK##_output_label = "y"; + +#define MAKE_RESIDUAL_BLOCK_VAR(LAYER, BLK, in_channels, out_channels, stride, input) \ + int LAYER##_##BLK##_out_chls = out_channels / 4; \ + GENERATE_CONV_VAR_VAR(LAYER, BLK, conv1, in_channels, LAYER##_##BLK##_out_chls, 1, 1, stride, pad_0, input); \ + GENERATE_BN_VAR_VAR(LAYER, BLK, bn1, LAYER##_##BLK##_out_chls, input); \ + \ + GENERATE_CONV_VAR_VAR(LAYER, BLK, conv2, LAYER##_##BLK##_out_chls, LAYER##_##BLK##_out_chls, 3, 3, stride_1, pad_1, \ + input); \ + GENERATE_BN_VAR_VAR(LAYER, BLK, bn2, LAYER##_##BLK##_out_chls, input); \ + \ + GENERATE_CONV_VAR_VAR(LAYER, BLK, conv3, LAYER##_##BLK##_out_chls, out_channels, 1, 1, stride_1, pad_0, input); \ + GENERATE_BN_VAR_VAR(LAYER, BLK, bn3, out_channels, input); \ + \ + GENERATE_CONV_VAR_VAR(LAYER, BLK, conv4, in_channels, out_channels, 1, 1, stride, pad_0, input); \ + GENERATE_BN_VAR_VAR(LAYER, BLK, bn4, out_channels, input); + +// (int in_channels, int out_channels,vector stride{1,1}, Operator& input) +#define MAKE_NORMAL_BLOCK(LAYER, BLK, in_channels, out_channels, stride, input) \ + auto &LAYER##_##BLK##_input = input; \ + auto &LAYER##_##BLK##_stride = stride; \ + int LAYER##_##BLK##_out_chls = out_channels / 4; \ + \ + GENERATE_CONV_VAR(LAYER, BLK, conv1, in_channels, LAYER##_##BLK##_out_chls, 1, 1, stride, pad_0, input); \ + GENERATE_BN_VAR(LAYER, BLK, bn1, LAYER##_##BLK##_out_chls, LAYER##_##BLK##_conv1); \ + GENERATE_RELU_VAR(LAYER, BLK, relu1, LAYER##_##BLK##_bn1); \ + \ + GENERATE_CONV_VAR(LAYER, BLK, conv2, LAYER##_##BLK##_out_chls, LAYER##_##BLK##_out_chls, 3, 3, stride_1, pad_1, \ + LAYER##_##BLK##_relu1); \ + GENERATE_BN_VAR(LAYER, BLK, bn2, LAYER##_##BLK##_out_chls, LAYER##_##BLK##_conv2); \ + GENERATE_RELU_VAR(LAYER, BLK, relu2, LAYER##_##BLK##_bn2); \ + \ + GENERATE_CONV_VAR(LAYER, BLK, conv3, LAYER##_##BLK##_out_chls, out_channels, 1, 1, stride_1, pad_0, \ + LAYER##_##BLK##_relu2); \ + GENERATE_BN_VAR(LAYER, BLK, bn3, out_channels, LAYER##_##BLK##_conv3); \ + \ + GENERATE_ADD_VAR(LAYER, BLK, add5, LAYER##_##BLK##_bn3, input); \ + GENERATE_RELU_VAR(LAYER, BLK, relu5, LAYER##_##BLK##_add5); \ + \ + auto &LAYER##_##BLK##_output = LAYER##_##BLK##_relu5; \ + auto &LAYER##_##BLK##_output_label = "y"; + +#define MAKE_NORMAL_BLOCK_VAR(LAYER, BLK, in_channels, out_channels, stride, input) \ + int LAYER##_##BLK##_out_chls = out_channels / 4; \ + GENERATE_CONV_VAR_VAR(LAYER, BLK, conv1, in_channels, LAYER##_##BLK##_out_chls, 1, 1, stride, pad_0, input); \ + GENERATE_BN_VAR_VAR(LAYER, BLK, bn1, LAYER##_##BLK##_out_chls, input); \ + \ + GENERATE_CONV_VAR_VAR(LAYER, BLK, conv2, LAYER##_##BLK##_out_chls, LAYER##_##BLK##_out_chls, 3, 3, stride_1, pad_1, \ + input); \ + GENERATE_BN_VAR_VAR(LAYER, BLK, bn2, LAYER##_##BLK##_out_chls, input); \ + \ + GENERATE_CONV_VAR_VAR(LAYER, BLK, conv3, LAYER##_##BLK##_out_chls, out_channels, 1, 1, stride_1, pad_0, input); \ + GENERATE_BN_VAR_VAR(LAYER, BLK, bn3, out_channels, input); + +// (int in_channels, int out_channels,vector stride{1,1}, Operator& input) +#define MAKE_RESIDUAL_LAYER(LAYER, in_channels, out_channels, stride, input) \ + MAKE_RESIDUAL_BLOCK(LAYER, blk1, in_channels, out_channels, stride, input); \ + \ + auto &LAYER##_output = LAYER##_blk1_output; \ + auto &LAYER##_output_label = LAYER##_blk1_output_label; + +#define MAKE_RESIDUAL_LAYER_VAR(LAYER, in_channels, out_channels, stride, input) \ + MAKE_RESIDUAL_BLOCK_VAR(LAYER, blk1, in_channels, out_channels, stride, input); + +// (int in_channels, int out_channels,vector stride{1,1}, Operator& input) +#define MAKE_NORMAL_LAYER(LAYER, in_channels, out_channels, stride, input) \ + MAKE_NORMAL_BLOCK(LAYER, blk1, in_channels, out_channels, stride, input); \ + \ + auto &LAYER##_output = LAYER##_blk1_output; \ + auto &LAYER##_output_label = LAYER##_blk1_output_label; + +#define MAKE_NORMAL_LAYER_VAR(LAYER, in_channels, out_channels, stride, input) \ + MAKE_NORMAL_BLOCK_VAR(LAYER, blk1, in_channels, out_channels, stride, input); + +#define MAKE_RESNET50(input) \ + MAKE_RESIDUAL_LAYER(layer1, 64, 256, stride_1, input) \ + MAKE_NORMAL_LAYER(layer2, 256, 256, stride_1, layer1_output) \ + MAKE_NORMAL_LAYER(layer3, 256, 256, stride_1, layer2_output) \ + MAKE_RESIDUAL_LAYER(layer4, 256, 512, stride_2, layer3_output) \ + MAKE_NORMAL_LAYER(layer5, 512, 512, stride_1, layer4_output) \ + MAKE_NORMAL_LAYER(layer6, 512, 512, stride_1, layer5_output) \ + MAKE_NORMAL_LAYER(layer7, 512, 512, stride_1, layer6_output) \ + MAKE_RESIDUAL_LAYER(layer8, 512, 1024, stride_2, layer7_output) \ + MAKE_NORMAL_LAYER(layer9, 1024, 1024, stride_1, layer8_output) \ + MAKE_NORMAL_LAYER(layer10, 1024, 1024, stride_1, layer9_output) \ + MAKE_NORMAL_LAYER(layer11, 1024, 1024, stride_1, layer10_output) \ + MAKE_NORMAL_LAYER(layer12, 1024, 1024, stride_1, layer11_output) \ + MAKE_NORMAL_LAYER(layer13, 1024, 1024, stride_1, layer12_output) \ + MAKE_RESIDUAL_LAYER(layer14, 1024, 2048, stride_2, layer13_output) \ + MAKE_NORMAL_LAYER(layer15, 2048, 2048, stride_1, layer14_output) \ + MAKE_NORMAL_LAYER(layer16, 2048, 2048, stride_1, layer15_output) \ + \ + auto &resnet50_output = layer16_output; \ + auto &resnet50_output_label = layer16_output_label; + +#define MAKE_RESNET50_VAR(inputs) \ + MAKE_RESIDUAL_LAYER_VAR(layer1, 64, 256, stride_1, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer2, 256, 256, stride_1, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer3, 256, 256, stride_1, inputs) \ + MAKE_RESIDUAL_LAYER_VAR(layer4, 256, 512, stride_2, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer5, 512, 512, stride_1, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer6, 512, 512, stride_1, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer7, 512, 512, stride_1, inputs) \ + MAKE_RESIDUAL_LAYER_VAR(layer8, 512, 1024, stride_2, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer9, 1024, 1024, stride_1, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer10, 1024, 1024, stride_1, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer11, 1024, 1024, stride_1, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer12, 1024, 1024, stride_1, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer13, 1024, 1024, stride_1, inputs) \ + MAKE_RESIDUAL_LAYER_VAR(layer14, 1024, 2048, stride_2, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer15, 2048, 2048, stride_1, inputs) \ + MAKE_NORMAL_LAYER_VAR(layer16, 2048, 2048, stride_1, inputs) \ +//--------------------------------------------------------------------------------------------- + +// (Operator& input) +#define GENERATE_BIASADD_GRAD(LAYER, BLK, OPNUM, input) \ + auto LAYER##_##BLK##_##OPNUM##_grad = \ + op::BiasAddGrad(string(#LAYER) + string(#BLK) + string(#OPNUM) + string("grad")) \ + .set_input_x(input, input.name_out_dx()); + +// (Operator& input) +#define GENERATE_MATMUL_GRAD(LAYER, BLK, OPNUM, input) \ + auto LAYER##_##BLK##_##OPNUM##_grad = \ + op::MatMul(string(#LAYER) + string(#BLK) + string(#OPNUM) + string("grad")).set_input_x1(input); + +// (Operator& input) +#define GENERATE_RESHAPE_GRAD(LAYER, BLK, OPNUM, input) \ + auto LAYER##_##BLK##_##OPNUM##_grad = \ + op::Reshape(string(#LAYER) + string(#BLK) + string(#OPNUM) + string("grad")).set_input_tensor(input); + +// (Operator& input_grad, Operator& input_maxpool) +#define GENERATE_MAXPOOL_GRAD(LAYER, BLK, OPNUM, input_grad, input_maxpool) \ + auto LAYER##_##BLK##_##OPNUM##_grad = \ + op::MaxPoolGradWithArgmax(string(#LAYER) + string(#BLK) + string(#OPNUM) + string("grad")) \ + .set_input_x(LAYER##_##BLK##_##OPNUM##_input, "y") \ + .set_input_grad(input_grad) \ + .set_input_argmax(input_maxpool, input_maxpool.name_out_argmax()) \ + .set_attr_ksize({1, 1, 3, 3}) \ + .set_attr_strides({1, 1, 2, 2}) \ + .set_attr_padding("SAME"); + +// (Operator& input_dy) +#define GENERATE_RELU_GRAD(LAYER, BLK, OPNUM, input_dy, dy_label) \ + auto LAYER##_##BLK##_##OPNUM##_grad = op::ReluGrad(string(#LAYER) + string(#BLK) + string(#OPNUM) + string("grad")) \ + .set_input_gradients(input_dy, dy_label) \ + .set_input_features(LAYER##_##BLK##_##OPNUM, "y"); + +// (Operator& input_dy) +#define GENERATE_BN_GRAD(LAYER, BLK, OPNUM, input_dy) \ + auto LAYER##_##BLK##_##OPNUM##_grad = \ + op::FusedBatchNormGrad(string(#LAYER) + string(#BLK) + string(#OPNUM) + string("grad")) \ + .set_input_dy(input_dy, "backprops") \ + .set_input_x(LAYER##_##BLK##_##OPNUM##_input, "y") \ + .set_input_scale(LAYER##_##BLK##_##OPNUM##_scale) \ + .set_input_save_mean(LAYER##_##BLK##_##OPNUM, "save_mean") \ + .set_input_save_inv_variance(LAYER##_##BLK##_##OPNUM, "save_inv_variance") \ + .set_attr_epsilon(0.0001); \ + \ + auto LAYER##_##BLK##_##OPNUM##_momentum_scale = \ + op::ApplyMomentum() \ + .set_input_accum(LAYER##_##BLK##_##OPNUM##_mom_scale) \ + .set_input_grad(LAYER##_##BLK##_##OPNUM##_grad, LAYER##_##BLK##_##OPNUM##_grad.name_out_bn_scale()) \ + .set_input_lr(label1) \ + .set_input_momentum(label1) \ + .set_input_var(LAYER##_##BLK##_##OPNUM##_scale); \ + \ + auto LAYER##_##BLK##_##OPNUM##_momentum_b = \ + op::ApplyMomentum() \ + .set_input_accum(LAYER##_##BLK##_##OPNUM##_mom_b) \ + .set_input_grad(LAYER##_##BLK##_##OPNUM##_grad, LAYER##_##BLK##_##OPNUM##_grad.name_out_bn_bias()) \ + .set_input_lr(label1) \ + .set_input_momentum(label1) \ + .set_input_var(LAYER##_##BLK##_##OPNUM##_b); + +// (Operator& input) +#define GENERATE_CONV_PROP_FILTER(LAYER, BLK, OPNUM, input_bngrad, stride) \ + auto LAYER##_##BLK##_##OPNUM##_propfilter = \ + op::Conv2DBackpropFilterD(string(#LAYER) + string(#BLK) + string(#OPNUM) + string("_propfilter")) \ + .set_input_x(LAYER##_##BLK##_##OPNUM##_input, "y") \ + .set_attr_filter_sizes(LAYER##_##BLK##_##OPNUM##_desc.GetShape().GetDims()) \ + .set_input_out_backprop(input_bngrad, input_bngrad.name_out_dx()) \ + .set_attr_strides(stride) \ + .set_attr_pads({1, 1, 1, 1}); \ + \ + update_op_format(LAYER##_##BLK##_##OPNUM##_propfilter); \ + auto LAYER##_##BLK##_##OPNUM##_momentum_weight = op::ApplyMomentum() \ + .set_input_accum(LAYER##_##BLK##_##OPNUM##_mom_weight) \ + .set_input_grad(LAYER##_##BLK##_##OPNUM##_propfilter) \ + .set_input_lr(label1) \ + .set_input_momentum(label1) \ + .set_input_var(LAYER##_##BLK##_##OPNUM##_weight); + +///.set_attr_input_sizes({input_bngrad.name_out_dx().GetOutputDesc().GetShape().GetDim(0),LAYER##_##BLK##_##OPNUM##_weight.GetOutputDesc().GetShape().GetDim(1), +///input_bngrad.name_out_dx().GetOutputDesc().GetShape().GetDim(2)*stride[2], +///input_bngrad.name_out_dx().GetOutputDesc().GetShape().GetDim(3)*stride[3]}) +#define GENERATE_CONV_PROP_INPUT(LAYER, BLK, OPNUM, input_bngrad, stride) \ + auto LAYER##_##BLK##_##OPNUM##_propinput = \ + op::Conv2DBackpropInputD(string(#LAYER) + string(#BLK) + string(#OPNUM) + string("_propinput")) \ + .set_attr_input_sizes(LAYER##_##BLK##_##OPNUM##_input.GetOutputDesc("y").GetShape().GetDims()) \ + .set_input_filters(LAYER##_##BLK##_##OPNUM##_weight) \ + .set_input_out_backprop(input_bngrad, input_bngrad.name_out_dx()) \ + .set_attr_strides(stride) \ + .set_attr_pads({1, 1, 1, 1}); \ + cout << string(#LAYER) + string(#BLK) + string(#OPNUM) + "_propinput" \ + << "'s input_x op's shape is:" << input_bngrad.GetOutputDesc("dx").GetShape().GetDim(3) * stride[3] << endl; \ + cout << string(#LAYER) + string(#BLK) + string(#OPNUM) + "_propinput" \ + << "'s input_x op's shape is:" << input_bngrad.GetOutputDesc("dx").GetShape().GetDim(2) * stride[2] << endl; \ + \ + update_op_format(LAYER##_##BLK##_##OPNUM##_propinput); \ + auto &LAYER##_##BLK##_##OPNUM##_propinput_label = "y" + +// (int out_channels, Operator& input) +#define GENERATE_ADD_GRAD(LAYER, BLK, OPNUM, input_x1, input_x1_label, input_x2, input_x2_label) \ + auto LAYER##_##BLK##_##OPNUM##_grad = op::Add(string(#LAYER) + string(#BLK) + string(#OPNUM) + string("grad")) \ + .set_input_x1(input_x1, input_x1_label) \ + .set_input_x2(input_x2, input_x2_label); + +// (Operator& input) +#define MAKE_RESIDUAL_BLOCK_GRAD(LAYER, BLK, input_dy, dy_label) \ + GENERATE_RELU_GRAD(LAYER, BLK, relu5, input_dy, dy_label); \ + \ + GENERATE_BN_GRAD(LAYER, BLK, bn4, LAYER##_##BLK##_relu5_grad); \ + GENERATE_CONV_PROP_FILTER(LAYER, BLK, conv4, LAYER##_##BLK##_bn4_grad, LAYER##_##BLK##_stride); \ + GENERATE_CONV_PROP_INPUT(LAYER, BLK, conv4, LAYER##_##BLK##_bn4_grad, LAYER##_##BLK##_stride); \ + \ + GENERATE_BN_GRAD(LAYER, BLK, bn3, LAYER##_##BLK##_relu5_grad); \ + GENERATE_CONV_PROP_FILTER(LAYER, BLK, conv3, LAYER##_##BLK##_bn3_grad, stride_1); \ + GENERATE_CONV_PROP_INPUT(LAYER, BLK, conv3, LAYER##_##BLK##_bn3_grad, stride_1); \ + \ + GENERATE_RELU_GRAD(LAYER, BLK, relu2, LAYER##_##BLK##_conv3_propinput, "y"); \ + GENERATE_BN_GRAD(LAYER, BLK, bn2, LAYER##_##BLK##_relu2_grad); \ + GENERATE_CONV_PROP_FILTER(LAYER, BLK, conv2, LAYER##_##BLK##_bn2_grad, stride_1); \ + GENERATE_CONV_PROP_INPUT(LAYER, BLK, conv2, LAYER##_##BLK##_bn2_grad, stride_1); \ + \ + GENERATE_RELU_GRAD(LAYER, BLK, relu1, LAYER##_##BLK##_conv2_propinput, "y"); \ + GENERATE_BN_GRAD(LAYER, BLK, bn1, LAYER##_##BLK##_relu1_grad); \ + GENERATE_CONV_PROP_FILTER(LAYER, BLK, conv1, LAYER##_##BLK##_bn1_grad, LAYER##_##BLK##_stride); \ + GENERATE_CONV_PROP_INPUT(LAYER, BLK, conv1, LAYER##_##BLK##_bn1_grad, LAYER##_##BLK##_stride); \ + \ + GENERATE_ADD_GRAD(LAYER, BLK, add5, LAYER##_##BLK##_conv1_propinput, LAYER##_##BLK##_conv1_propinput_label, \ + LAYER##_##BLK##_conv4_propinput, LAYER##_##BLK##_conv4_propinput_label); \ + \ + auto &LAYER##_##BLK##_grad_output = LAYER##_##BLK##_add5_grad; \ + auto &LAYER##_##BLK##_grad_output_label = "y" + +// (Operator& input) +#define MAKE_NORMAL_BLOCK_GRAD(LAYER, BLK, input_dy, dy_label) \ + GENERATE_RELU_GRAD(LAYER, BLK, relu5, input_dy, dy_label); \ + \ + GENERATE_BN_GRAD(LAYER, BLK, bn3, LAYER##_##BLK##_relu5_grad); \ + GENERATE_CONV_PROP_FILTER(LAYER, BLK, conv3, LAYER##_##BLK##_bn3_grad, stride_1); \ + GENERATE_CONV_PROP_INPUT(LAYER, BLK, conv3, LAYER##_##BLK##_bn3_grad, stride_1); \ + \ + GENERATE_RELU_GRAD(LAYER, BLK, relu2, LAYER##_##BLK##_conv3_propinput, "y"); \ + GENERATE_BN_GRAD(LAYER, BLK, bn2, LAYER##_##BLK##_relu2_grad); \ + GENERATE_CONV_PROP_FILTER(LAYER, BLK, conv2, LAYER##_##BLK##_bn2_grad, stride_1); \ + GENERATE_CONV_PROP_INPUT(LAYER, BLK, conv2, LAYER##_##BLK##_bn2_grad, stride_1); \ + \ + GENERATE_RELU_GRAD(LAYER, BLK, relu1, LAYER##_##BLK##_conv2_propinput, "y"); \ + GENERATE_BN_GRAD(LAYER, BLK, bn1, LAYER##_##BLK##_relu1_grad); \ + GENERATE_CONV_PROP_FILTER(LAYER, BLK, conv1, LAYER##_##BLK##_bn1_grad, LAYER##_##BLK##_stride); \ + GENERATE_CONV_PROP_INPUT(LAYER, BLK, conv1, LAYER##_##BLK##_bn1_grad, LAYER##_##BLK##_stride); \ + \ + GENERATE_ADD_GRAD(LAYER, BLK, add5, LAYER##_##BLK##_conv1_propinput, LAYER##_##BLK##_conv1_propinput_label, \ + input_dy, dy_label); \ + \ + auto &LAYER##_##BLK##_grad_output = LAYER##_##BLK##_add5_grad; \ + auto &LAYER##_##BLK##_grad_output_label = "y" + +// (Operator& input_dy) +#define MAKE_RESIDUAL_LAYER_GRAD(LAYER, input_dy, dy_label) \ + MAKE_RESIDUAL_BLOCK_GRAD(LAYER, blk1, input_dy, dy_label); \ + \ + auto &LAYER##_grad_output = LAYER##_blk1_grad_output; \ + auto &LAYER##_grad_output_label = LAYER##_blk1_grad_output_label; + +// (Operator& input_dy) +#define MAKE_NORMAL_LAYER_GRAD(LAYER, input_dy, dy_label) \ + MAKE_NORMAL_BLOCK_GRAD(LAYER, blk1, input_dy, dy_label); \ + \ + auto &LAYER##_grad_output = LAYER##_blk1_grad_output; \ + auto &LAYER##_grad_output_label = LAYER##_blk1_grad_output_label; + +#define MAKE_RESNET50_GRAD(input_dy, dy_label) \ + MAKE_NORMAL_LAYER_GRAD(layer16, input_dy, dy_label) \ + MAKE_NORMAL_LAYER_GRAD(layer15, layer16_grad_output, layer16_grad_output_label) \ + MAKE_RESIDUAL_LAYER_GRAD(layer14, layer15_grad_output, layer15_grad_output_label) \ + MAKE_NORMAL_LAYER_GRAD(layer13, layer14_grad_output, layer14_grad_output_label) \ + MAKE_NORMAL_LAYER_GRAD(layer12, layer13_grad_output, layer13_grad_output_label) \ + MAKE_NORMAL_LAYER_GRAD(layer11, layer12_grad_output, layer12_grad_output_label) \ + MAKE_NORMAL_LAYER_GRAD(layer10, layer11_grad_output, layer11_grad_output_label) \ + MAKE_NORMAL_LAYER_GRAD(layer9, layer10_grad_output, layer10_grad_output_label) \ + MAKE_RESIDUAL_LAYER_GRAD(layer8, layer9_grad_output, layer9_grad_output_label) \ + MAKE_NORMAL_LAYER_GRAD(layer7, layer8_grad_output, layer8_grad_output_label) \ + MAKE_NORMAL_LAYER_GRAD(layer6, layer7_grad_output, layer7_grad_output_label) \ + MAKE_NORMAL_LAYER_GRAD(layer5, layer6_grad_output, layer6_grad_output_label) \ + MAKE_RESIDUAL_LAYER_GRAD(layer4, layer5_grad_output, layer5_grad_output_label) \ + MAKE_NORMAL_LAYER_GRAD(layer3, layer4_grad_output, layer4_grad_output_label) \ + MAKE_NORMAL_LAYER_GRAD(layer2, layer3_grad_output, layer3_grad_output_label) \ + MAKE_RESIDUAL_LAYER_GRAD(layer1, layer2_grad_output, layer2_grad_output_label) \ + \ + auto &resnet50_grad_output = layer1_grad_output; \ + auto &resnet50_grad_output_label = layer1_grad_output_label; + +bool resnet50(Graph &graph) { + auto data = op::Data().set_attr_index(0); + auto data1 = op::Data().set_attr_index(1); + TensorDesc shape_desc(ge::Shape({32, 3, 224, 224}), FORMAT_NCHW, DT_FLOAT); + data.update_output_desc_out(shape_desc); + + TensorDesc desc(ge::Shape({64, 3, 7, 7}), FORMAT_NCHW, DT_FLOAT); + + auto var = op::Variable("conv2d_var"); + var.update_output_desc_y(desc); + var.update_input_desc_x(desc); + + auto varw1 = op::Variable("conv2d_varw1"); + varw1.update_output_desc_y(desc); + + auto conv2d = op::Conv2D("Translate") + .set_input_x(data) + .set_input_filter(var) + .set_attr_strides({1, 1, 2, 2}) + .set_attr_pads({2, 3, 2, 3}); + TensorDesc desc_y; + desc_y.SetFormat(FORMAT_NCHW); // shape: 32 64 112 112 + conv2d.update_output_desc_y(desc_y); + + TensorDesc desc1(ge::Shape({1, 64, 1, 1}), FORMAT_NCHW, DT_FLOAT); + auto var1 = op::Variable("bn_var1"); + var1.update_output_desc_y(desc1); + + auto var2 = op::Variable("bn_var2"); + var2.update_output_desc_y(desc1); + + auto var3 = op::Variable("bn_var3"); + var3.update_output_desc_y(desc1); + + auto var4 = op::Variable("bn_var4"); + var4.update_output_desc_y(desc1); + + TensorDesc desc2(ge::Shape({2048, 1001}), FORMAT_NCHW, DT_FLOAT); + + auto var5 = op::Variable("var5"); + var5.update_output_desc_y(desc2); + + auto var6 = op::Variable("var6"); + var6.update_output_desc_y(desc2); + + TensorDesc desclabel(ge::Shape({1, 1001, 1, 1}), FORMAT_NCHW, DT_FLOAT); + + auto label1 = op::Variable("label1"); + label1.update_output_desc_y(desclabel); + + TensorDesc descmatlabel(ge::Shape({1, 1001, 1, 1}), FORMAT_NCHW, DT_FLOAT); + auto matvar = op::Variable("matvar"); + matvar.update_output_desc_y(descmatlabel); + + auto matvar1 = op::Variable("matvar1"); + matvar1.update_output_desc_y(descmatlabel); + + auto bn = op::FusedBatchNorm() + .set_input_x(conv2d, "y") + .set_input_scale(var1) + .set_input_b(var2) + .set_input_mean(var3) + .set_input_variance(var4) + .set_attr_mode(1) + .set_attr_epsilon(1e-5) + .set_attr_is_training(true) + .set_attr_is_training_fusion(true) + .set_attr_moving_average_fraction(994352128); + + auto relu = op::Relu().set_input_x(bn, "y"); + + auto maxpool = op::MaxPoolWithArgmax() + .set_input_x(relu, "y") + .set_attr_ksize({1, 3, 3, 1}) + .set_attr_padding("SAME") + .set_attr_strides({1, 2, 2, 1}); + + MAKE_RESNET50(maxpool); + std::vector inputs{data}; //,var,var1,layer1_blk1_bn1_b,var3,var4}; + std::vector outputs{}; + + graph.SetInputs(inputs).SetOutputs(outputs); + return true; +} + +#define GENERATE_CONSTANT_USE_DESC(OPNUM, desc, val) \ + uint32_t OPNUM##_size = desc.GetShape().GetShapeSize(); \ + Tensor OPNUM##_tensor; \ + OPNUM##_tensor.SetTensorDesc(desc); \ + if (desc.GetDataType() == DT_FLOAT) { \ + float *OPNUM##_data = new float[OPNUM##_size]; \ + for (int i = 0; i < (int)OPNUM##_size; i++) { \ + *(OPNUM##_data + i) = val; \ + } \ + OPNUM##_tensor.SetData((uint8_t *)OPNUM##_data, OPNUM##_size * sizeof(float)); \ + delete[] OPNUM##_data; \ + } \ + if (desc.GetDataType() == DT_INT64) { \ + int64_t *OPNUM##_data = new int64_t[OPNUM##_size]; \ + for (int i = 0; i < (int)OPNUM##_size; i++) { \ + *(OPNUM##_data + i) = val; \ + } \ + OPNUM##_tensor.SetData((uint8_t *)OPNUM##_data, OPNUM##_size * sizeof(int64_t)); \ + delete[] OPNUM##_data; \ + } \ + auto OPNUM##_constant = op::Constant().set_attr_value(OPNUM##_tensor); \ + OPNUM##_constant.update_output_desc_y(desc); + +#define GENERATE_VAR_LAYER(OPNUM, desc, input) \ + auto OPNUM##_weight = op::Variable(string(#OPNUM)); \ + OPNUM##_weight.update_output_desc_y(desc); \ + auto OPNUM##_assign = op::Assign().set_input_ref(OPNUM##_weight).set_input_value(OPNUM##_constant); \ + \ + input.push_back(OPNUM##_weight); + +#define GENERATE_VAR_LAYER_1(OPNUM, desc, var_format, input, name) \ + auto OPNUM##_weight = op::Variable(string(name)); \ + OPNUM##_weight.update_output_desc_y(desc); \ + auto OPNUM##_assign = op::Assign().set_input_ref(OPNUM##_weight).set_input_value(OPNUM##_constant); \ + \ + input.push_back(OPNUM##_weight); + +int BuildInitVarGraph(Graph &graph) { + std::vector inputs{}; + std::vector outputs{}; + + TensorDesc desc(ge::Shape({64, 3, 7, 7}), FORMAT_NCHW, DT_FLOAT); + GENERATE_CONSTANT_USE_DESC(conv2d_var, desc, 0.01); + GENERATE_VAR_LAYER(conv2d_var, desc, inputs); + + GENERATE_CONSTANT_USE_DESC(conv2d_varw1, desc, 0.01); + GENERATE_VAR_LAYER(conv2d_varw1, desc, inputs); + + TensorDesc desc1(ge::Shape({1, 64, 1, 1}), FORMAT_NCHW, DT_FLOAT); + GENERATE_CONSTANT_USE_DESC(bn_var1, desc1, 0.01); + GENERATE_VAR_LAYER(bn_var1, desc1, inputs); + GENERATE_CONSTANT_USE_DESC(bn_var2, desc1, 0.01); + GENERATE_VAR_LAYER(bn_var2, desc1, inputs); + GENERATE_CONSTANT_USE_DESC(bn_var3, desc1, 0.01); + GENERATE_VAR_LAYER(bn_var3, desc1, inputs); + GENERATE_CONSTANT_USE_DESC(bn_var4, desc1, 0.01); + GENERATE_VAR_LAYER(bn_var4, desc1, inputs); + + TensorDesc desc2(ge::Shape({2048, 1001}), FORMAT_NCHW, DT_FLOAT); + GENERATE_CONSTANT_USE_DESC(var5, desc2, 0.01); + GENERATE_VAR_LAYER(var5, desc2, inputs); + GENERATE_CONSTANT_USE_DESC(var6, desc2, 0.01); + GENERATE_VAR_LAYER(var6, desc2, inputs); + + TensorDesc desclabel(ge::Shape({1, 1001, 1, 1}), FORMAT_NCHW, DT_FLOAT); + GENERATE_CONSTANT_USE_DESC(label1, desclabel, 0.1); + GENERATE_VAR_LAYER(label1, desclabel, inputs); + + TensorDesc descmatlabel(ge::Shape({1, 1001, 1, 1}), FORMAT_NCHW, DT_FLOAT); + GENERATE_CONSTANT_USE_DESC(matvar, descmatlabel, 0.01); + GENERATE_VAR_LAYER(matvar, descmatlabel, inputs); + GENERATE_CONSTANT_USE_DESC(matvar1, descmatlabel, 0.01); + GENERATE_VAR_LAYER(matvar1, descmatlabel, inputs); + + MAKE_RESNET50_VAR(inputs); + + TensorDesc ctrl(ge::Shape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT64); + + GENERATE_CONSTANT_USE_DESC(iterations_per_loop, ctrl, 100); + GENERATE_VAR_LAYER_1(iterations_per_loop, ctrl, "4D", inputs, "npu_runconfig/iterations_per_loop"); + GENERATE_CONSTANT_USE_DESC(loop_cond, ctrl, 0); + GENERATE_VAR_LAYER_1(loop_cond, ctrl, "4D", inputs, "npu_runconfig/loop_cond"); + GENERATE_CONSTANT_USE_DESC(one, ctrl, 1); + GENERATE_VAR_LAYER_1(one, ctrl, "4D", inputs, "npu_runconfig/one"); + GENERATE_CONSTANT_USE_DESC(zero, ctrl, 0); + GENERATE_VAR_LAYER_1(zero, ctrl, "4D", inputs, "npu_runconfig/zero"); + + graph.SetInputs(inputs).SetOutputs(outputs); + return 0; +} +int TestBuildGraphTest(Func fun, Graph &graph, vector &inputs, vector &outputs) { + bool graph_ret = fun(graph); + ge::Tensor shapeTensor; + TensorDesc shape_desc(ge::Shape({32, 3, 224, 224}), FORMAT_NCHW, DT_FLOAT); + uint32_t sizeshape = shape_desc.GetShape().GetShapeSize(); + printf("[test] desc size filter shape:%u\n", sizeshape); + shapeTensor.SetTensorDesc(shape_desc); + vector dataValuec; + for (int i = 0; i < sizeshape; i++) { + // dataValuec.push_back((float)(i%255)); + dataValuec.push_back(1); + } + + shapeTensor.SetData((uint8_t *)dataValuec.data(), 4 * sizeshape); + inputs.push_back(shapeTensor); + + ge::Tensor shapeTensor1; + TensorDesc shape_desc1(ge::Shape({1, 32, 1, 1}), FORMAT_NCHW, DT_FLOAT); + uint32_t sizeshape1 = shape_desc1.GetShape().GetShapeSize(); + printf("[test] desc size filter shape:%u\n", sizeshape1); + shapeTensor1.SetTensorDesc(shape_desc1); + vector dataValuec1; + for (int i = 0; i < sizeshape1; i++) { + dataValuec1.push_back(1); + } + + shapeTensor1.SetData((uint8_t *)dataValuec1.data(), 4 * sizeshape1); + // inputs.push_back(shapeTensor1); + + return 0; +} +int runTrainGraph(Func fun, int loopCount) { + printf("GE BBIT begin...\n"); + std::chrono::system_clock::time_point start = std::chrono::system_clock::now(); + + std::map ge_options = { + {"device_id", "0"}, {"rank_table_file", ""}, {"graphType", "1"}, {"ge.graphRunMode", "2"}}; + + std::map session_options = {{"a", "b"}, {TRAIN_FLAG, "1"}}; + + ge::Status ret; + + // init ge + ret = GEInitialize_api_new("train", "fe,plugin"); + printf("ge::GEInitialize ret:%d\n", ret); + + // init session + ge::Session session(session_options); + + int graphId_initvar = 1; + ge::Graph graph_initvar("initVarGraph"); + bool graph_ret = BuildInitVarGraph(graph_initvar); + + // session addgraph + int graphId = 0; + + // build graph + ge::Graph graph("bigGraph"); + std::vector inputs; + ge::Tensor outputTensor; + std::vector outputs; + graph_ret = TestBuildGraphTest(fun, graph, inputs, outputs); + printf("TestReluGrad ret:%d\n", graph_ret); + + ret = session.AddGraph(graphId_initvar, graph_initvar); + printf("session.AddVarGraph ret:%d\n", ret); + if (ret) return ret; + + ret = session.AddGraph(graphId, graph); + printf("session.AddGraph ret:%d\n", ret); + if (ret) return ret; + + std::vector inputs1; + std::vector outputs1; + ret = session.RunGraph(graphId_initvar, inputs1, outputs1); + + if (ret != SUCCESS) { + return ret; + } + // add loop for test of stabilty: + for (int i = 0; i < loopCount; i++) { + // session rungraph + printf("loopCount:%d\n", loopCount); + ret = session.RunGraph(graphId, inputs, outputs); + printf("session.RunGraph ret:%d\n", ret); + if (ret) return ret; + + // define 99999 as loop forever + if (loopCount == 99999) i = 0; + } + std::chrono::system_clock::time_point end = std::chrono::system_clock::now(); + auto millisecondsduration = std::chrono::duration_cast(end - start); + auto ms = millisecondsduration.count(); + std::stringstream ss; + ss << ms << "ms"; + std::string run_time = ss.str(); + printf("run time is : %s \n", run_time.c_str()); + + return 0; +} + +int main(int argc, char *argv[]) { + // add loop for test of stabilty: + int loopCount = 1; + if (argc >= 2) loopCount = atoi(argv[1]); + + Status ret = SUCCESS; + ret = runTrainGraph(resnet50, loopCount); + if (ret == SUCCESS) { + std::cout << "[train resnet50 success]" << std::endl; + } else { + std::cout << "!!! train resnet50 fail !!!" << std::endl; + } + return ret; +} diff --git a/tests/st/test_ge_st.py b/tests/st/test_ge_st.py new file mode 100644 index 00000000..9c366364 --- /dev/null +++ b/tests/st/test_ge_st.py @@ -0,0 +1,56 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +""" +ge st test. +""" +import pytest +import subprocess +import os + +@pytest.mark.level0 +@pytest.mark.platform_arm_ascend_training +@pytest.mark.platform_x86_ascend_training +@pytest.mark.env_card +@pytest.mark.component_ge +def test_resnet50_train(): + ge_st_dir=os.environ.get('GE_ST_DIR', + '/home/jenkins/workspace/release_pkg/gate/graphengine_lib') + ge_lib_dir=os.environ.get('GRAPHENGINE_LIB', '/home/jenkins/workspace/release_pkg/gate/graphengine_lib') + + real_pythonpath=os.environ.get('REAL_PYTHONPATH') + pythonpath=os.environ.get('PYTHONPATH') + if real_pythonpath: + if pythonpath: + os.environ['PYTHONPATH']=real_pythonpath+':'+pythonpath + else: + os.environ['PYTHONPATH']=real_pythonpath + print('PYTHONPATH: '+os.environ.get('PYTHONPATH')) + + os.environ['ASCEND_OPP_PATH']='/usr/local/HiAI/runtime/ops' + os.environ['ASCEND_ENGINE_PATH']='/usr/local/HiAI/runtime/lib64/plugin/opskernel/libaicpu_ms_engine.so:' \ + '/usr/local/HiAI/runtime/lib64/plugin/opskernel/libfe.so:' \ + '/usr/local/HiAI/runtime/lib64/plugin/opskernel/librts_engine.so:'+ \ + ge_lib_dir + '/libge_local_engine.so' + print('ASCEND_OPP_PATH: '+os.environ.get('ASCEND_OPP_PATH')) + print('ASCEND_ENGINE_PATH: '+os.environ.get('ASCEND_ENGINE_PATH')) + print('LD_LIBRARY_PATH: '+os.environ.get('LD_LIBRARY_PATH')) + + cmd=ge_st_dir + '/st_resnet50_train' + print('cmd: '+cmd) + os.environ['SLOG_PRINT_TO_STDOUT']="1" + ret=subprocess.call([cmd], shell=True) + assert ret==0 + diff --git a/tests/ut/CMakeLists.txt b/tests/ut/CMakeLists.txt new file mode 100644 index 00000000..f975eeb9 --- /dev/null +++ b/tests/ut/CMakeLists.txt @@ -0,0 +1,24 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +project(ut CXX C) + +if (ENABLE_GE_COV) + set(COVERAGE_COMPILER_FLAGS "-g --coverage -fprofile-arcs -fPIC -O0 -ftest-coverage") + set(CMAKE_CXX_FLAGS "${COVERAGE_COMPILER_FLAGS}") +endif() + +add_subdirectory(common/graph) +add_subdirectory(ge) diff --git a/tests/ut/common/graph/CMakeLists.txt b/tests/ut/common/graph/CMakeLists.txt new file mode 100644 index 00000000..9ac2d428 --- /dev/null +++ b/tests/ut/common/graph/CMakeLists.txt @@ -0,0 +1,100 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +project(ut_libgraph) + +set(CMAKE_CXX_STANDARD 11) + +file(GLOB_RECURSE PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "${GE_SOURCE_DIR}/src/proto/om.proto" + "${GE_SOURCE_DIR}/src/proto/ge_ir.proto" + "${onnx_INC}/onnx/onnx.proto" +) + +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) + +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}/src) +include_directories(${GE_SOURCE_DIR}/src/common) +include_directories(${GE_SOURCE_DIR}/src/common/graph) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/inc/common) +include_directories(${GE_SOURCE_DIR}/inc/ops) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/json/include) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/ops) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(/usr/local/HiAI/opp/op_proto/built-in/inc) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) + +file(GLOB_RECURSE UT_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "testcase/ge_graph/ge_anchor_utils_unittest.cc" + "testcase/ge_graph/ge_def_type_unittest.cc" + "testcase/ge_graph/ge_graph_anchor_unittest.cc" + "testcase/ge_graph/ge_model_serialize_unittest.cc" + "testcase/ge_graph/ge_node_unittest.cc" + "testcase/ge_graph/ge_opdesc_unittest.cc" + "testcase/ge_graph/ge_tensor_unittest.cc" + "testcase/ge_graph/graph_builder_utils.cc" + "testcase/ge_graph/ge_reg_unittest.cc" + "testcase/ge_graph/ge_format_refiner_unittest.cc" + "testcase/ge_graph/ge_opsproto_manager_unittest.cc" + "testcase/ge_graph/ge_operator_unittest.cc" + "testcase/ge_graph/ge_model_unittest.cc" +) + +file(GLOB_RECURSE SRC_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/common/graph/option/ge_local_context.cc" + "${GE_SOURCE_DIR}/src/common/graph/option/ge_context.cc" + "${GE_SOURCE_DIR}/src/common/graph/anchor.cc" + "${GE_SOURCE_DIR}/src/common/graph/ge_attr_value.cc" + "${GE_SOURCE_DIR}/src/common/graph/attr_value.cc" + "${GE_SOURCE_DIR}/src/common/graph/buffer.cc" + "${GE_SOURCE_DIR}/src/common/graph/compute_graph.cc" + "${GE_SOURCE_DIR}/src/common/graph/ge_attr_define.cc" + "${GE_SOURCE_DIR}/src/common/graph/graph.cc" + "${GE_SOURCE_DIR}/src/common/graph/model.cc" + "${GE_SOURCE_DIR}/src/common/graph/model_serialize.cc" + "${GE_SOURCE_DIR}/src/common/graph/node.cc" + "${GE_SOURCE_DIR}/src/common/graph/op_desc.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator_reg.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator_factory.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator_factory_impl.cc" + "${GE_SOURCE_DIR}/src/common/graph/range_vistor.cc" + "${GE_SOURCE_DIR}/src/common/graph/tensor.cc" + "${GE_SOURCE_DIR}/src/common/graph/ge_tensor.cc" + "${GE_SOURCE_DIR}/src/common/graph/shape_refiner.cc" + "${GE_SOURCE_DIR}/src/common/graph/format_refiner.cc" + "${GE_SOURCE_DIR}/src/common/graph/inference_context.cc" + "${GE_SOURCE_DIR}/src/common/graph/detail/attributes_holder.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/anchor_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/graph_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/node_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/op_desc_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/type_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/ge_ir_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/tensor_utils.cc" + "${GE_SOURCE_DIR}/src/common/ops/op_imp.cc" + "${GE_SOURCE_DIR}/src/common/graph/opsproto/opsproto_manager.cc" +) + +add_executable(ut_libgraph ${UT_FILES} ${SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) +target_link_libraries(ut_libgraph graphengine::gtest graphengine::gtest_main slog_stub ge_protobuf::protobuf ${c_sec} rt dl) diff --git a/tests/ut/common/graph/ops_stub.h b/tests/ut/common/graph/ops_stub.h new file mode 100644 index 00000000..27d927f1 --- /dev/null +++ b/tests/ut/common/graph/ops_stub.h @@ -0,0 +1,39 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef UT_COMMON_GRAPH_OP_STUB_H_ +#define UT_COMMON_GRAPH_OP_STUB_H_ + +#include "external/graph/operator_reg.h" + +// for ir +namespace ge { +// Data +REG_OP(Data) + .INPUT(data, TensorType::ALL()) + .OUTPUT(out, TensorType::ALL()) + .ATTR(index, Int, 0) + .OP_END_FACTORY_REG(Data) + + // Flatten + REG_OP(Flatten) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .OP_END_FACTORY_REG(Flatten) + +} // namespace ge + +#endif // UT_COMMON_GRAPH_OP_STUB_H_ diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_anchor_utils_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_anchor_utils_unittest.cc new file mode 100644 index 00000000..7f7f3465 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_anchor_utils_unittest.cc @@ -0,0 +1,81 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define protected public +#include "graph/utils/anchor_utils.h" + +#include "graph/anchor.h" +#include "graph/node.h" +#include "graph/utils/graph_utils.h" +#undef protected + +using namespace ge; + +class UtestGeAnchorUtils : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGeAnchorUtils, base) { + ComputeGraphPtr graph_ptr = std::make_shared("name"); + OpDescPtr desc_ptr = std::make_shared("name1", "type1"); + NodePtr n1 = graph_ptr->AddNode(desc_ptr); + InDataAnchorPtr a1 = std::make_shared(n1, 0); + + EXPECT_EQ(AnchorUtils::SetFormat(a1, FORMAT_ND), GRAPH_SUCCESS); + Format f1 = AnchorUtils::GetFormat(a1); + EXPECT_EQ(f1, FORMAT_ND); + + InDataAnchorPtr a2 = std::make_shared(n1, 0); + EXPECT_EQ(AnchorUtils::SetFormat(nullptr, FORMAT_ND), GRAPH_FAILED); + Format f2 = AnchorUtils::GetFormat(nullptr); + EXPECT_EQ(f2, FORMAT_RESERVED); + + // has control edge + OpDescPtr desc_ptr1 = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr1->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr1->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr1->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + OpDescPtr desc_ptr2 = std::make_shared("name2", "type2"); + EXPECT_EQ(desc_ptr2->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + ComputeGraphPtr graph_ptr1 = std::make_shared("name"); + n1 = graph_ptr1->AddNode(desc_ptr1); + NodePtr n2 = graph_ptr1->AddNode(desc_ptr2); + + EXPECT_EQ(GraphUtils::AddEdge(n1->GetOutControlAnchor(), n2->GetInControlAnchor()), GRAPH_SUCCESS); + EXPECT_EQ(AnchorUtils::HasControlEdge(n1->GetOutControlAnchor()), true); + EXPECT_EQ(AnchorUtils::IsControlEdge(n1->GetOutControlAnchor(), n2->GetInControlAnchor()), true); + EXPECT_EQ(GraphUtils::RemoveEdge(n1->GetOutControlAnchor(), n2->GetInControlAnchor()), GRAPH_SUCCESS); + + EXPECT_EQ(GraphUtils::AddEdge(n1->GetOutDataAnchor(0), n2->GetInControlAnchor()), GRAPH_SUCCESS); + EXPECT_EQ(AnchorUtils::HasControlEdge(n1->GetOutDataAnchor(0)), true); + EXPECT_EQ(AnchorUtils::IsControlEdge(n1->GetOutDataAnchor(0), n2->GetInControlAnchor()), true); + EXPECT_EQ(GraphUtils::RemoveEdge(n1->GetOutDataAnchor(0), n2->GetInControlAnchor()), GRAPH_SUCCESS); + + EXPECT_EQ(GraphUtils::AddEdge(n1->GetOutDataAnchor(0), n2->GetInDataAnchor(0)), GRAPH_SUCCESS); + EXPECT_EQ(AnchorUtils::HasControlEdge(n1->GetOutDataAnchor(0)), false); + EXPECT_EQ(AnchorUtils::IsControlEdge(n1->GetOutDataAnchor(0), n2->GetInDataAnchor(0)), false); + EXPECT_EQ(GraphUtils::RemoveEdge(n1->GetOutDataAnchor(0), n2->GetInDataAnchor(0)), GRAPH_SUCCESS); +} diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_def_type_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_def_type_unittest.cc new file mode 100644 index 00000000..10d6dc86 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_def_type_unittest.cc @@ -0,0 +1,75 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "graph/ge_attr_value.h" +#include "graph/ge_tensor.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" + +using namespace std; +using namespace ge; + +class UtestGeTestDefType : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGeTestDefType, base) { + CompressInfo com1; + com1.set_blockrow(1); + int32_t a = com1.blockrow; + EXPECT_EQ(a, 1); + +} + +TEST_F(UtestGeTestDefType, quant) { + OpDescPtr desc_ptr1 = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr1->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr1->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr1->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + EXPECT_EQ(OpDescUtils::HasQuantizeFactorParams(desc_ptr1), false); + EXPECT_EQ(OpDescUtils::HasQuantizeFactorParams(*desc_ptr1), false); + QuantizeFactorParams q1; + EXPECT_EQ(q1.has_quantize_param(), false); + QuantizeFactor *qf1 = q1.mutable_quantize_param(); + EXPECT_EQ(q1.has_quantize_param(), true); + + string s1 = "value1"; + q1.quantize_param.set_scale_value(s1.data(), s1.size()); + EXPECT_EQ(OpDescUtils::SetQuantizeFactorParams(desc_ptr1, q1), GRAPH_SUCCESS); + QuantizeFactorParams q2; + EXPECT_EQ(OpDescUtils::GetQuantizeFactorParams(desc_ptr1, q2), GRAPH_SUCCESS); + string s2((char *)q2.quantize_param.scale_value.GetData(), q2.quantize_param.scale_value.GetSize()); + EXPECT_EQ(s2, "value1"); + + float f[2] = {1, 2}; + string s(static_cast(static_cast(f)), 2 * sizeof(float)); + q1.quantize_param.set_scale_value(f, 2 * sizeof(float)); + EXPECT_EQ(OpDescUtils::SetQuantizeFactorParams(*desc_ptr1, q1), GRAPH_SUCCESS); + QuantizeFactorParams q3; + EXPECT_EQ(OpDescUtils::GetQuantizeFactorParams(*desc_ptr1, q3), GRAPH_SUCCESS); + Buffer &b = q3.quantize_param.scale_value; + float f1[2]; + memcpy(f1, b.GetData(), b.GetSize()); + EXPECT_EQ(f1[0], 1); + EXPECT_EQ(f1[1], 2); +} diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_format_refiner_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_format_refiner_unittest.cc new file mode 100644 index 00000000..a76ad02e --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_format_refiner_unittest.cc @@ -0,0 +1,661 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include + +#include "graph_builder_utils.h" + +#define private public +#define protected public +#include "format_refiner.h" +#undef private +#undef protected + +namespace ge { +class UtestFormatRefiner : public testing::Test { + protected: + void SetUp() { + char *which_op = getenv("WHICH_OP"); + if (which_op != nullptr) { + is_set_env = true; + return; + ; + } + int ret = setenv("WHICH_OP", "GEOP", 0); + } + + void TearDown() { + if (!is_set_env) { + unsetenv("WHICH_OP"); + } + } + + private: + bool is_set_env{false}; +}; + +namespace { + +/// +/// netoutput1 +/// | +/// relu1 +/// | +/// conv1 +/// / \ +/// var1 var2 +/// +ut::GraphBuilder BuildGraph1() { + auto builder = ut::GraphBuilder("g1"); + auto var1 = builder.AddNDNode("var1", "Variable", 0, 1); + auto var2 = builder.AddNDNode("var2", "Variable", 0, 1); + auto conv1 = builder.AddNDNode("conv1", "Conv2D", 2, 1); + auto conv_data = conv1->GetOpDesc()->GetInputDesc(0); + conv_data.SetFormat(FORMAT_NCHW); + conv_data.SetShape(GeShape(std::vector({1, 3, 224, 224}))); + conv1->GetOpDesc()->UpdateInputDesc(0, conv_data); + auto weight = conv1->GetOpDesc()->GetInputDesc(1); + weight.SetFormat(FORMAT_HWCN); + weight.SetShape(GeShape(std::vector({1, 1, 3, 256}))); + conv1->GetOpDesc()->UpdateInputDesc(1, weight); + auto conv_out = conv1->GetOpDesc()->GetOutputDesc(0); + conv_out.SetFormat(FORMAT_NCHW); + conv_out.SetShape(GeShape(std::vector({1, 256, 224, 224}))); + conv1->GetOpDesc()->UpdateOutputDesc(0, conv_out); + auto relu1 = builder.AddNDNode("relu1", "Relu", 1, 1); + auto netoutput1 = builder.AddNDNode("netoutput1", "NetOutput", 1, 0); + + builder.AddDataEdge(var1, 0, conv1, 0); + builder.AddDataEdge(var2, 0, conv1, 1); + builder.AddDataEdge(conv1, 0, relu1, 0); + builder.AddDataEdge(relu1, 0, netoutput1, 0); + FormatRefiner::SetInferOrigineFormatFlag(true); + return builder; +} + +/// +/// netoutput1 +/// | +/// relu1 +/// | +/// bn1 ----------------- +/// | \ \ \ \ +/// conv1 var3 var4 var5 var6 +/// | \ +/// var1 var2 +/// +ut::GraphBuilder BuildGraph2() { + auto builder = ut::GraphBuilder("g2"); + auto var1 = builder.AddNDNode("var1", "Variable", 0, 1); + auto var2 = builder.AddNDNode("var2", "Variable", 0, 1); + auto var3 = builder.AddNDNode("var3", "Variable", 0, 1); + auto var4 = builder.AddNDNode("var4", "Variable", 0, 1); + auto var5 = builder.AddNDNode("var5", "Variable", 0, 1); + auto var6 = builder.AddNDNode("var6", "Variable", 0, 1); + auto conv1 = builder.AddNDNode("conv1", "Conv2D", 2, 1); + auto conv_data = conv1->GetOpDesc()->GetInputDesc(0); + conv_data.SetFormat(FORMAT_NHWC); + conv_data.SetShape(GeShape(std::vector({1, 3, 224, 224}))); + conv1->GetOpDesc()->UpdateInputDesc(0, conv_data); + auto weight = conv1->GetOpDesc()->GetInputDesc(1); + weight.SetFormat(FORMAT_HWCN); + weight.SetShape(GeShape(std::vector({1, 1, 3, 256}))); + conv1->GetOpDesc()->UpdateInputDesc(1, weight); + auto conv_out = conv1->GetOpDesc()->GetOutputDesc(0); + conv_out.SetFormat(FORMAT_NHWC); + conv_out.SetShape(GeShape(std::vector({1, 256, 224, 224}))); + conv1->GetOpDesc()->UpdateOutputDesc(0, conv_out); + auto bn1 = builder.AddNDNode("bn1", "BatchNorm", 5, 1); + auto relu1 = builder.AddNDNode("relu1", "Relu", 1, 1); + auto netoutput1 = builder.AddNDNode("netoutput1", "NetOutput", 1, 0); + + builder.AddDataEdge(var1, 0, conv1, 0); + builder.AddDataEdge(var2, 0, conv1, 1); + builder.AddDataEdge(conv1, 0, bn1, 0); + builder.AddDataEdge(var3, 0, bn1, 1); + builder.AddDataEdge(var4, 0, bn1, 2); + builder.AddDataEdge(var5, 0, bn1, 3); + builder.AddDataEdge(var6, 0, bn1, 4); + builder.AddDataEdge(bn1, 0, relu1, 0); + builder.AddDataEdge(relu1, 0, netoutput1, 0); + FormatRefiner::SetInferOrigineFormatFlag(true); + return builder; +} + +/// +/// netoutput1 +/// | +/// conv2 +/// | \ +/// relu1 var3 +/// | +/// conv1 +/// / \ +/// var1 var2 +/// +ut::GraphBuilder BuildGraph3() { + auto builder = ut::GraphBuilder("g3"); + auto var1 = builder.AddNDNode("var1", "Variable", 0, 1); + auto var2 = builder.AddNDNode("var2", "Variable", 0, 1); + auto var3 = builder.AddNDNode("var3", "Variable", 0, 1); + auto conv1 = builder.AddNDNode("conv1", "Conv2D", 2, 1); + auto conv_data = conv1->GetOpDesc()->GetInputDesc(0); + conv_data.SetFormat(FORMAT_NCHW); + conv_data.SetShape(GeShape(std::vector({1, 3, 224, 224}))); + conv1->GetOpDesc()->UpdateInputDesc(0, conv_data); + auto weight = conv1->GetOpDesc()->GetInputDesc(1); + weight.SetFormat(FORMAT_HWCN); + weight.SetShape(GeShape(std::vector({1, 1, 3, 256}))); + conv1->GetOpDesc()->UpdateInputDesc(1, weight); + auto conv_out = conv1->GetOpDesc()->GetOutputDesc(0); + conv_out.SetFormat(FORMAT_NCHW); + conv_out.SetShape(GeShape(std::vector({1, 256, 224, 224}))); + conv1->GetOpDesc()->UpdateOutputDesc(0, conv_out); + auto relu1 = builder.AddNDNode("relu1", "Relu", 1, 1); + auto conv2 = builder.AddNDNode("conv2", "Conv2D", 2, 1); + conv_data = conv2->GetOpDesc()->GetInputDesc(0); + conv_data.SetFormat(FORMAT_NHWC); + conv_data.SetShape(GeShape(std::vector({1, 3, 224, 224}))); + conv2->GetOpDesc()->UpdateInputDesc(0, conv_data); + weight = conv2->GetOpDesc()->GetInputDesc(1); + weight.SetFormat(FORMAT_HWCN); + weight.SetShape(GeShape(std::vector({1, 1, 3, 256}))); + conv2->GetOpDesc()->UpdateInputDesc(1, weight); + conv_out = conv2->GetOpDesc()->GetOutputDesc(0); + conv_out.SetFormat(FORMAT_NHWC); + conv_out.SetShape(GeShape(std::vector({1, 256, 224, 224}))); + conv2->GetOpDesc()->UpdateOutputDesc(0, conv_out); + auto netoutput1 = builder.AddNDNode("netoutput1", "NetOutput", 1, 0); + + builder.AddDataEdge(var1, 0, conv1, 0); + builder.AddDataEdge(var2, 0, conv1, 1); + builder.AddDataEdge(conv1, 0, relu1, 0); + builder.AddDataEdge(relu1, 0, conv2, 0); + builder.AddDataEdge(var3, 0, conv2, 1); + builder.AddDataEdge(conv2, 0, netoutput1, 0); + FormatRefiner::SetInferOrigineFormatFlag(true); + return builder; +} + +/// +/// netoutput1 +/// | +/// conv2 +/// | \ +/// relu1 var3 +/// | +/// bn1 +/// | +/// conv1 +/// / \ +/// var1 var2 +/// +ut::GraphBuilder BuildGraph4() { + auto builder = ut::GraphBuilder("g4"); + auto var1 = builder.AddNDNode("var1", "Variable", 0, 1); + auto var2 = builder.AddNDNode("var2", "Variable", 0, 1); + auto var3 = builder.AddNDNode("var3", "Variable", 0, 1); + auto conv1 = builder.AddNDNode("conv1", "Conv2D", 2, 1); + auto conv_data = conv1->GetOpDesc()->GetInputDesc(0); + conv_data.SetFormat(FORMAT_NHWC); + conv_data.SetShape(GeShape(std::vector({1, 3, 224, 224}))); + conv1->GetOpDesc()->UpdateInputDesc(0, conv_data); + auto weight = conv1->GetOpDesc()->GetInputDesc(1); + weight.SetFormat(FORMAT_HWCN); + weight.SetShape(GeShape(std::vector({1, 1, 3, 256}))); + conv1->GetOpDesc()->UpdateInputDesc(1, weight); + auto conv_out = conv1->GetOpDesc()->GetOutputDesc(0); + conv_out.SetFormat(FORMAT_NHWC); + conv_out.SetShape(GeShape(std::vector({1, 256, 224, 224}))); + conv1->GetOpDesc()->UpdateOutputDesc(0, conv_out); + auto bn1 = builder.AddNDNode("bn1", "BatchNorm", 1, 1); + auto relu1 = builder.AddNDNode("relu1", "Relu", 1, 1); + auto conv2 = builder.AddNDNode("conv2", "Conv2D", 2, 1); + conv_data = conv2->GetOpDesc()->GetInputDesc(0); + conv_data.SetFormat(FORMAT_NHWC); + conv_data.SetShape(GeShape(std::vector({1, 3, 224, 224}))); + conv2->GetOpDesc()->UpdateInputDesc(0, conv_data); + weight = conv2->GetOpDesc()->GetInputDesc(1); + weight.SetFormat(FORMAT_HWCN); + weight.SetShape(GeShape(std::vector({1, 1, 3, 256}))); + conv2->GetOpDesc()->UpdateInputDesc(1, weight); + conv_out = conv2->GetOpDesc()->GetOutputDesc(0); + conv_out.SetFormat(FORMAT_NHWC); + conv_out.SetShape(GeShape(std::vector({1, 256, 224, 224}))); + conv2->GetOpDesc()->UpdateOutputDesc(0, conv_out); + auto netoutput1 = builder.AddNDNode("netoutput1", "NetOutput", 1, 0); + + builder.AddDataEdge(var1, 0, conv1, 0); + builder.AddDataEdge(var2, 0, conv1, 1); + builder.AddDataEdge(conv1, 0, bn1, 0); + builder.AddDataEdge(bn1, 0, relu1, 0); + builder.AddDataEdge(relu1, 0, conv2, 0); + builder.AddDataEdge(var3, 0, conv2, 1); + builder.AddDataEdge(conv2, 0, netoutput1, 0); + FormatRefiner::SetInferOrigineFormatFlag(true); + return builder; +} + +/// +/// netoutput1 +/// | +/// apply1 +/// / \ +/// relug1 --> bng1 \ +/// \ / | \ \ +/// relu1 | | \ +/// \| | | +/// | | | +/// bn1 | | +/// \ | | +/// conv1 | +/// / \| +/// / | +/// data1 var1 +/// +ut::GraphBuilder BuilderGraph5() { + auto builder = ut::GraphBuilder("g5"); + auto data1 = builder.AddNDNode("data1", "Data", 0, 1); + auto var1 = builder.AddNDNode("var1", "Variable", 0, 1); + auto conv1 = builder.AddNDNode("conv1", "Conv2D", 2, 1); + auto conv_data = conv1->GetOpDesc()->GetInputDesc(0); + conv_data.SetFormat(FORMAT_NHWC); + conv_data.SetShape(GeShape(std::vector({1, 3, 224, 224}))); + conv1->GetOpDesc()->UpdateInputDesc(0, conv_data); + auto weight = conv1->GetOpDesc()->GetInputDesc(1); + weight.SetFormat(FORMAT_HWCN); + weight.SetShape(GeShape(std::vector({1, 1, 3, 256}))); + conv1->GetOpDesc()->UpdateInputDesc(1, weight); + auto conv_out = conv1->GetOpDesc()->GetOutputDesc(0); + conv_out.SetFormat(FORMAT_NHWC); + conv_out.SetShape(GeShape(std::vector({1, 256, 224, 224}))); + conv1->GetOpDesc()->UpdateOutputDesc(0, conv_out); + auto bn1 = builder.AddNDNode("bn1", "BatchNorm", 1, 1); + auto relu1 = builder.AddNDNode("relu1", "Relu", 1, 1); + auto relug1 = builder.AddNDNode("relug1", "ReluGrad", 1, 1); + auto bng1 = builder.AddNDNode("bng1", "BatchNormGrad", 4, 1); + auto apply1 = builder.AddNDNode("apply1", "ApplyMomentum", 2, 1); + auto netoutput1 = builder.AddNDNode("netoutput1", "NetOutput", 1, 0); + + builder.AddDataEdge(data1, 0, conv1, 0); + builder.AddDataEdge(var1, 0, conv1, 1); + builder.AddDataEdge(var1, 0, apply1, 1); + builder.AddDataEdge(conv1, 0, bn1, 0); + builder.AddDataEdge(conv1, 0, bng1, 3); + builder.AddDataEdge(bn1, 0, relu1, 0); + builder.AddDataEdge(bn1, 0, bng1, 2); + builder.AddDataEdge(relu1, 0, relug1, 0); + builder.AddDataEdge(relu1, 0, bng1, 1); + builder.AddDataEdge(relug1, 0, bng1, 0); + builder.AddDataEdge(bng1, 0, apply1, 0); + builder.AddDataEdge(apply1, 0, netoutput1, 0); + FormatRefiner::SetInferOrigineFormatFlag(true); + return builder; +} + +/// +/// netoutput1 +/// | +/// AddN +/// / \ \ +/// L2Loss GatherV2 Constant +/// / \ +/// Data1 Data2 +/// +ut::GraphBuilder BuildGraph6() { + auto builder = ut::GraphBuilder("g1"); + auto data1 = builder.AddNDNode("data1", "Data", 1, 1); + auto data2 = builder.AddNDNode("data2", "Data", 1, 1); + auto loss = builder.AddNDNode("loss", "L2Loss", 1, 1); + auto gather = builder.AddNDNode("gather", "GatherV2", 1, 1); + auto addn = builder.AddNDNode("addN", "AddN", 3, 1); + auto netoutput = builder.AddNDNode("netoutput", "NetOutput", 1, 0); + auto constant = builder.AddNDNode("constant", "Constant", 0, 1); + + auto data1_input = data1->GetOpDesc()->GetInputDesc(0); + data1_input.SetFormat(FORMAT_HWCN); + data1->GetOpDesc()->UpdateInputDesc(0, data1_input); + auto data1_output = data1->GetOpDesc()->GetOutputDesc(0); + data1_output.SetFormat(FORMAT_HWCN); + data1->GetOpDesc()->UpdateOutputDesc(0, data1_output); + + auto net_input = netoutput->GetOpDesc()->GetInputDesc(0); + net_input.SetFormat(FORMAT_NCHW); + netoutput->GetOpDesc()->UpdateInputDesc(0, net_input); + + auto data2_input = data2->GetOpDesc()->GetInputDesc(0); + data2_input.SetFormat(FORMAT_HWCN); + data2->GetOpDesc()->UpdateInputDesc(0, data2_input); + auto data2_output = data2->GetOpDesc()->GetOutputDesc(0); + data2_output.SetFormat(FORMAT_HWCN); + data2->GetOpDesc()->UpdateOutputDesc(0, data2_output); + + builder.AddDataEdge(data1, 0, loss, 0); + builder.AddDataEdge(data2, 0, gather, 0); + builder.AddDataEdge(loss, 0, addn, 0); + builder.AddDataEdge(gather, 0, addn, 1); + builder.AddDataEdge(constant, 0, addn, 2); + builder.AddDataEdge(addn, 0, netoutput, 0); + + FormatRefiner::SetInferOrigineFormatFlag(true); + + return builder; +} + +/// +/// netoutput1 +/// | +/// AddN +/// / \ \ +/// L2Loss GatherV2 Constant +/// / \ +/// Data1 Data2 +/// +ut::GraphBuilder BuildGraph7() { + auto builder = ut::GraphBuilder("g1"); + auto data1 = builder.AddNDNode("data1", "Data", 1, 1); + auto data2 = builder.AddNDNode("data2", "Data", 1, 1); + auto loss = builder.AddNDNode("loss", "L2Loss", 1, 1); + auto gather = builder.AddNDNode("gather", "GatherV2", 1, 1); + auto addn = builder.AddNDNode("addN", "AddN", 3, 1); + auto netoutput = builder.AddNDNode("netoutput", "NetOutput", 1, 0); + auto constant = builder.AddNDNode("constant", "Constant", 0, 1); + + auto data1_input = data1->GetOpDesc()->GetInputDesc(0); + data1->GetOpDesc()->UpdateInputDesc(0, data1_input); + auto data1_output = data1->GetOpDesc()->GetOutputDesc(0); + data1->GetOpDesc()->UpdateOutputDesc(0, data1_output); + + auto net_input = netoutput->GetOpDesc()->GetInputDesc(0); + netoutput->GetOpDesc()->UpdateInputDesc(0, net_input); + + auto data2_input = data2->GetOpDesc()->GetInputDesc(0); + data2->GetOpDesc()->UpdateInputDesc(0, data2_input); + auto data2_output = data2->GetOpDesc()->GetOutputDesc(0); + data2->GetOpDesc()->UpdateOutputDesc(0, data2_output); + + builder.AddDataEdge(data1, 0, loss, 0); + builder.AddDataEdge(data2, 0, gather, 0); + builder.AddDataEdge(loss, 0, addn, 0); + builder.AddDataEdge(gather, 0, addn, 1); + builder.AddDataEdge(constant, 0, addn, 2); + builder.AddDataEdge(addn, 0, netoutput, 0); + + FormatRefiner::SetInferOrigineFormatFlag(true); + + return builder; +} + +/// +/// data2 +/// | +/// data1 relu +/// | +/// reshape +/// \ / +/// conv +/// | +/// netoutput +/// +ut::GraphBuilder BuildGraph8() { + auto builder = ut::GraphBuilder("g8"); + + auto data1 = builder.AddNDNode("data1", "Data", 1, 1); + auto data2 = builder.AddNDNode("data2", "Data", 1, 1); + auto relu = builder.AddNDNode("relu", "Relu", 1, 1); + auto reshape = builder.AddNDNode("reshape", "Reshape", 1, 1); + auto conv = builder.AddNDNode("conv", "Conv2D", 2, 1); + auto netoutput = builder.AddNDNode("netoutput", "NetOutput", 1, 0); + + auto reshape_data = reshape->GetOpDesc()->GetInputDesc(0); + reshape_data.SetFormat(FORMAT_ND); + reshape_data.SetOriginFormat(FORMAT_ND); + reshape_data.SetShape(GeShape(std::vector({224, 224}))); + reshape_data.SetShape(GeShape(std::vector({224, 224}))); + reshape->GetOpDesc()->UpdateInputDesc(0, reshape_data); + reshape->GetOpDesc()->UpdateOutputDesc(0, reshape_data); + + auto conv_data = conv->GetOpDesc()->GetInputDesc(0); + conv_data.SetFormat(FORMAT_NHWC); + conv_data.SetShape(GeShape(std::vector({1, 3, 224, 224}))); + conv->GetOpDesc()->UpdateInputDesc(0, conv_data); + auto weight = conv->GetOpDesc()->GetInputDesc(1); + weight.SetFormat(FORMAT_HWCN); + weight.SetShape(GeShape(std::vector({1, 1, 3, 256}))); + conv->GetOpDesc()->UpdateInputDesc(1, weight); + auto conv_out = conv->GetOpDesc()->GetOutputDesc(0); + conv_out.SetFormat(FORMAT_NHWC); + conv_out.SetShape(GeShape(std::vector({1, 256, 224, 224}))); + conv->GetOpDesc()->UpdateOutputDesc(0, conv_out); + + builder.AddDataEdge(data1, 0, conv, 0); + builder.AddDataEdge(data2, 0, relu, 0); + builder.AddDataEdge(relu, 0, reshape, 0); + builder.AddDataEdge(reshape, 0, conv, 1); + builder.AddDataEdge(conv, 0, netoutput, 0); + FormatRefiner::SetInferOrigineFormatFlag(true); + return builder; +} +} // namespace + +TEST_F(UtestFormatRefiner, data_format) { + auto builder = BuildGraph8(); + auto graph = builder.GetGraph(); + FormatRefiner::SetInferOrigineFormatFlag(false); + graph->SaveDataFormat(FORMAT_NCHW); + EXPECT_EQ(FormatRefiner::InferOrigineFormat(graph), GRAPH_SUCCESS); + auto data2 = graph->FindNode("data2"); + auto relu = graph->FindNode("relu"); + EXPECT_EQ(data2->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NCHW); + EXPECT_EQ(data2->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NCHW); + EXPECT_EQ(relu->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NCHW); + EXPECT_EQ(relu->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NCHW); + FormatRefiner::SetInferOrigineFormatFlag(true); +} + +TEST_F(UtestFormatRefiner, constant_fail) { + FormatRefiner::SetInferOrigineFormatFlag(true); + auto builder = BuildGraph6(); + auto graph = builder.GetGraph(); + EXPECT_EQ(FormatRefiner::InferOrigineFormat(graph), GRAPH_FAILED); +} + +TEST_F(UtestFormatRefiner, scalar_nodes_infer) { + FormatRefiner::SetInferOrigineFormatFlag(true); + auto builder = BuildGraph6(); + auto graph = builder.GetGraph(); + auto constant = graph->FindNode("constant"); + ge::GeTensorPtr value = std::make_shared(); + AttrUtils::SetTensor(constant->GetOpDesc(), "value", value); + EXPECT_EQ(FormatRefiner::InferOrigineFormat(graph), GRAPH_SUCCESS); +} + +TEST_F(UtestFormatRefiner, forward_and_default_infer_func) { + auto builder = BuildGraph1(); + auto graph = builder.GetGraph(); + EXPECT_EQ(FormatRefiner::InferOrigineFormat(graph), GRAPH_SUCCESS); + auto var1 = graph->FindNode("var1"); + EXPECT_EQ(var1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NCHW); + auto var2 = graph->FindNode("var2"); + EXPECT_EQ(var2->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_HWCN); + auto relu1 = graph->FindNode("relu1"); + EXPECT_EQ(relu1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NCHW); + EXPECT_EQ(relu1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NCHW); + auto netoutput1 = graph->FindNode("netoutput1"); + EXPECT_EQ(netoutput1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NCHW); + auto conv1 = graph->FindNode("conv1"); + EXPECT_EQ(conv1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NCHW); + EXPECT_EQ(conv1->GetOpDesc()->GetInputDesc(1).GetOriginFormat(), FORMAT_HWCN); + EXPECT_EQ(conv1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NCHW); +} + +TEST_F(UtestFormatRefiner, forward_and_specifed_infer_func) { + auto builder = BuildGraph1(); + auto graph = builder.GetGraph(); + auto relu1 = graph->FindNode("relu1"); + relu1->GetOpDesc()->AddInferFormatFunc([](Operator &op) { + auto output1 = op.GetOutputDesc(0); + output1.SetOriginFormat(FORMAT_NHWC); + op.UpdateOutputDesc("0", output1); + return GRAPH_SUCCESS; + }); + + EXPECT_EQ(FormatRefiner::InferOrigineFormat(graph), GRAPH_SUCCESS); + auto var1 = graph->FindNode("var1"); + EXPECT_EQ(var1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NCHW); + auto var2 = graph->FindNode("var2"); + EXPECT_EQ(var2->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_HWCN); + EXPECT_EQ(relu1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NCHW); + EXPECT_EQ(relu1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NHWC); + auto netoutput1 = graph->FindNode("netoutput1"); + EXPECT_EQ(netoutput1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NHWC); +} + +TEST_F(UtestFormatRefiner, failed_when_infer) { + auto builder = BuildGraph1(); + auto graph = builder.GetGraph(); + auto relu1 = graph->FindNode("relu1"); + relu1->GetOpDesc()->AddInferFormatFunc([](Operator &op) { return GRAPH_FAILED; }); + + EXPECT_NE(FormatRefiner::InferOrigineFormat(graph), GRAPH_SUCCESS); +} + +TEST_F(UtestFormatRefiner, forward_backward) { + auto builder = BuildGraph2(); + auto graph = builder.GetGraph(); + + EXPECT_EQ(FormatRefiner::InferOrigineFormat(graph), GRAPH_SUCCESS); + auto bn1 = graph->FindNode("bn1"); + EXPECT_EQ(bn1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NHWC); + EXPECT_EQ(bn1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NHWC); + for (auto name : {"var3", "var4", "var5", "var6"}) { + auto node = graph->FindNode(name); + EXPECT_EQ(node->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NHWC); + } +} + +TEST_F(UtestFormatRefiner, format_conflict) { + auto builder = BuildGraph3(); + auto graph = builder.GetGraph(); + EXPECT_EQ(FormatRefiner::InferOrigineFormat(graph), GRAPH_SUCCESS); +} + +TEST_F(UtestFormatRefiner, infer_stop_nd) { + auto builder = BuildGraph1(); + auto graph = builder.GetGraph(); + auto relu1 = graph->FindNode("relu1"); + relu1->GetOpDesc()->AddInferFormatFunc([](Operator &op) { return GRAPH_SUCCESS; }); + EXPECT_EQ(FormatRefiner::InferOrigineFormat(graph), GRAPH_SUCCESS); + auto var1 = graph->FindNode("var1"); + EXPECT_EQ(var1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NCHW); + auto var2 = graph->FindNode("var2"); + EXPECT_EQ(var2->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_HWCN); + relu1 = graph->FindNode("relu1"); + EXPECT_EQ(relu1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NCHW); + EXPECT_EQ(relu1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_ND); + auto netoutput1 = graph->FindNode("netoutput1"); + EXPECT_EQ(netoutput1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_ND); + auto conv1 = graph->FindNode("conv1"); + EXPECT_EQ(conv1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NCHW); + EXPECT_EQ(conv1->GetOpDesc()->GetInputDesc(1).GetOriginFormat(), FORMAT_HWCN); + EXPECT_EQ(conv1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NCHW); +} + +TEST_F(UtestFormatRefiner, infer_stop_same_format) { + auto builder = BuildGraph4(); + auto graph = builder.GetGraph(); + EXPECT_EQ(FormatRefiner::InferOrigineFormat(graph), GRAPH_SUCCESS); +} + +TEST_F(UtestFormatRefiner, forward_multi_output) { + auto builder = BuilderGraph5(); + auto graph = builder.GetGraph(); + auto apply1 = graph->FindNode("apply1"); + apply1->GetOpDesc()->AddInferFormatFunc([](Operator &op) { + auto out = op.GetOutputDesc(0); + out.SetOriginFormat(FORMAT_NHWC); + op.UpdateOutputDesc("0", out); + auto in0 = op.GetInputDesc(0); + in0.SetOriginFormat(FORMAT_NHWC); + op.UpdateInputDesc("0", in0); + auto in1 = op.GetInputDesc(1); + in1.SetOriginFormat(FORMAT_HWCN); + op.UpdateInputDesc("1", in1); + return GRAPH_SUCCESS; + }); + + EXPECT_EQ(FormatRefiner::InferOrigineFormat(graph), GRAPH_SUCCESS); + + auto data1 = graph->FindNode("data1"); + EXPECT_EQ(data1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NHWC); + auto var1 = graph->FindNode("var1"); + EXPECT_EQ(var1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_HWCN); + auto bn1 = graph->FindNode("bn1"); + EXPECT_EQ(bn1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NHWC); + EXPECT_EQ(bn1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NHWC); + auto relu1 = graph->FindNode("relu1"); + EXPECT_EQ(relu1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NHWC); + EXPECT_EQ(relu1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NHWC); + auto relug1 = graph->FindNode("relug1"); + EXPECT_EQ(relug1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NHWC); + EXPECT_EQ(relug1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NHWC); + auto bng1 = graph->FindNode("bng1"); + EXPECT_EQ(bng1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NHWC); + EXPECT_EQ(bng1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NHWC); + EXPECT_EQ(bng1->GetOpDesc()->GetInputDesc(1).GetOriginFormat(), FORMAT_NHWC); + EXPECT_EQ(bng1->GetOpDesc()->GetInputDesc(2).GetOriginFormat(), FORMAT_NHWC); + EXPECT_EQ(bng1->GetOpDesc()->GetInputDesc(3).GetOriginFormat(), FORMAT_NHWC); + + EXPECT_EQ(apply1->GetOpDesc()->GetOutputDesc(0).GetOriginFormat(), FORMAT_NHWC); + EXPECT_EQ(apply1->GetOpDesc()->GetInputDesc(0).GetOriginFormat(), FORMAT_NHWC); + EXPECT_EQ(apply1->GetOpDesc()->GetInputDesc(1).GetOriginFormat(), FORMAT_HWCN); +} + +TEST_F(UtestFormatRefiner, get_anchor_points_failed) { + ge::ComputeGraphPtr graph = nullptr; + std::vector anchor_points; + std::vector data_nodes; + std::unordered_map node_status; + auto status = FormatRefiner::GetAnchorPoints(graph, anchor_points, data_nodes, node_status); + EXPECT_EQ(status, GRAPH_FAILED); +} + +TEST_F(UtestFormatRefiner, anchor_process_failed) { + ge::NodePtr anchor_node; + std::unordered_map node_status; + auto status = FormatRefiner::AnchorProcess(anchor_node, node_status); + EXPECT_EQ(status, GRAPH_FAILED); +} + +TEST_F(UtestFormatRefiner, infer_origine_format_failed) { + ge::ComputeGraphPtr graph = nullptr; + auto status = FormatRefiner::InferOrigineFormat(graph); + EXPECT_EQ(status, GRAPH_FAILED); +} + +TEST_F(UtestFormatRefiner, save_format) { + FormatRefiner::SetInferOrigineFormatFlag(true); + auto builder = BuildGraph6(); + auto graph = builder.GetGraph(); + graph->SaveDataFormat(FORMAT_NHWC); + auto save_format = graph->GetDataFormat(); + EXPECT_EQ(save_format, FORMAT_NHWC); + graph->SaveDataFormat(FORMAT_ND); +} +} // namespace ge \ No newline at end of file diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_graph_anchor_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_graph_anchor_unittest.cc new file mode 100644 index 00000000..79777641 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_graph_anchor_unittest.cc @@ -0,0 +1,386 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define protected public +#define private public +#include "graph/anchor.h" + +#include "graph/node.h" +#include "graph/utils/anchor_utils.h" +#include "graph/utils/graph_utils.h" +#undef protected +#undef private + +using namespace ge; +using namespace std; + +class UtestGeAnchor : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGeAnchor, data_anchor_test) { + ComputeGraphPtr graph_ptr = std::make_shared("graph"); + OpDescPtr in_op_ptr_1 = std::make_shared("in_op_1", "float"); + in_op_ptr_1->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr_1->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr in_owner_node_1 = graph_ptr->AddNode(in_op_ptr_1); + InDataAnchorPtr in_data_anchor = in_owner_node_1->GetInDataAnchor(0); + + OpDescPtr in_op_ptr_2 = std::make_shared("in_op_2", "float"); + in_op_ptr_2->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr_2->AddInputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr_2->AddOutputDesc("z", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr in_owner_node_2 = graph_ptr->AddNode(in_op_ptr_2); + InDataAnchorPtr in_data_anchor_x = in_owner_node_2->GetInDataAnchor(0); + InDataAnchorPtr in_data_anchor_y = in_owner_node_2->GetInDataAnchor(1); + InControlAnchorPtr in_control_anchor = in_owner_node_2->GetInControlAnchor(); + + OpDescPtr out_op_ptr_1 = std::make_shared("out_op_1", "float"); + out_op_ptr_1->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + out_op_ptr_1->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr out_owner_node_1 = graph_ptr->AddNode(out_op_ptr_1); + OutDataAnchorPtr out_data_anchor_1 = out_owner_node_1->GetOutDataAnchor(0); + + OpDescPtr out_op_ptr_2 = std::make_shared("out_op_2", "float"); + out_op_ptr_2->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + out_op_ptr_2->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr out_owner_node_2 = graph_ptr->AddNode(out_op_ptr_2); + OutDataAnchorPtr out_data_anchor_2 = out_owner_node_2->GetOutDataAnchor(0); + + EXPECT_EQ((in_data_anchor->LinkFrom(out_data_anchor_1)), GRAPH_SUCCESS); + EXPECT_EQ(out_data_anchor_1->LinkTo(in_data_anchor_x), GRAPH_SUCCESS); + EXPECT_EQ(in_data_anchor_y->LinkFrom(out_data_anchor_2), GRAPH_SUCCESS); + EXPECT_EQ(out_data_anchor_2->LinkTo(in_control_anchor), GRAPH_SUCCESS); + EXPECT_EQ(in_control_anchor->GetPeerOutDataAnchors().size(), 1); + EXPECT_EQ(out_data_anchor_2->GetPeerAnchors().size(), 2); + EXPECT_EQ(out_data_anchor_2->GetPeerInDataAnchors().size(), 1); + EXPECT_EQ(out_data_anchor_2->GetPeerInControlAnchors().size(), 1); + EXPECT_EQ(out_data_anchor_1->GetPeerAnchors().size(), 2); + EXPECT_NE(in_data_anchor_y->GetPeerOutAnchor(), nullptr); + EXPECT_EQ(in_data_anchor_x->GetIdx(), 0); + EXPECT_NE(in_data_anchor_y->GetOwnerNode(), nullptr); + EXPECT_EQ(out_data_anchor_1->GetPeerInDataAnchors().size(), 2); + EXPECT_EQ(in_data_anchor_x->Unlink(in_data_anchor_y), GRAPH_FAILED); + EXPECT_EQ(in_data_anchor->Unlink(out_data_anchor_2), GRAPH_FAILED); + EXPECT_EQ(out_data_anchor_2->Unlink(in_data_anchor_x), GRAPH_FAILED); + out_data_anchor_1->UnlinkAll(); + EXPECT_EQ(out_data_anchor_1->GetPeerInDataAnchors().size(), 0); + out_data_anchor_2->UnlinkAll(); + EXPECT_EQ(out_data_anchor_2->GetPeerAnchors().size(), 0); +} + +TEST_F(UtestGeAnchor, data_anchor_exception_test) { + ComputeGraphPtr graph_ptr = std::make_shared("graph"); + OpDescPtr in_op_ptr = std::make_shared("in_op_1", "float"); + in_op_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr in_owner_node = graph_ptr->AddNode(in_op_ptr); + InDataAnchorPtr in_data_anchor = in_owner_node->GetInDataAnchor(0); + + OpDescPtr out_op_ptr_1 = std::make_shared("out_op_1", "float"); + out_op_ptr_1->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + out_op_ptr_1->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr out_owner_node_1 = graph_ptr->AddNode(out_op_ptr_1); + OutDataAnchorPtr out_data_anchor_1 = out_owner_node_1->GetOutDataAnchor(0); + + OpDescPtr out_op_ptr_2 = std::make_shared("out_op_2", "float"); + out_op_ptr_2->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + out_op_ptr_2->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr out_owner_node_2 = graph_ptr->AddNode(out_op_ptr_2); + + OutDataAnchorPtr out_data_anchor_2 = out_owner_node_2->GetOutDataAnchor(0); + + EXPECT_EQ(in_data_anchor->LinkFrom(nullptr), GRAPH_FAILED); + EXPECT_EQ(out_data_anchor_2->LinkTo(InDataAnchorPtr(nullptr)), GRAPH_FAILED); + EXPECT_EQ(out_data_anchor_2->LinkTo(InControlAnchorPtr(nullptr)), GRAPH_FAILED); + EXPECT_EQ(in_data_anchor->Unlink(nullptr), GRAPH_FAILED); + in_data_anchor->LinkFrom(out_data_anchor_1); + EXPECT_EQ(out_data_anchor_2->LinkTo(in_data_anchor), GRAPH_FAILED); + EXPECT_EQ(in_data_anchor->LinkFrom(out_data_anchor_2), GRAPH_FAILED); + EXPECT_EQ(in_data_anchor->Unlink(out_data_anchor_2), GRAPH_FAILED); + in_data_anchor->Unlink(out_data_anchor_1); + EXPECT_EQ(in_data_anchor->GetPeerOutAnchor(), nullptr); +} + +TEST_F(UtestGeAnchor, control_anchor_test) { + ComputeGraphPtr graph_ptr = std::make_shared("graph"); + OpDescPtr in_op_ptr_1 = std::make_shared("in_op_1", "float"); + in_op_ptr_1->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr_1->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr in_owner_node_1 = graph_ptr->AddNode(in_op_ptr_1); + InControlAnchorPtr in_control_anchor_1 = in_owner_node_1->GetInControlAnchor(); + + OpDescPtr in_op_ptr_2 = std::make_shared("in_op_2", "float"); + in_op_ptr_2->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr_2->AddInputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr_2->AddOutputDesc("z", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr in_owner_node_2 = graph_ptr->AddNode(in_op_ptr_2); + InControlAnchorPtr in_control_anchor_2 = in_owner_node_2->GetInControlAnchor(); + + OpDescPtr out_op_ptr_1 = std::make_shared("out_op_1", "float"); + out_op_ptr_1->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + out_op_ptr_1->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr out_owner_node_1 = graph_ptr->AddNode(out_op_ptr_1); + OutControlAnchorPtr out_control_anchor_1 = out_owner_node_1->GetOutControlAnchor(); + + OpDescPtr out_op_ptr_2 = std::make_shared("out_op_2", "float"); + out_op_ptr_2->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + out_op_ptr_2->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr out_owner_node_2 = graph_ptr->AddNode(out_op_ptr_2); + + OutControlAnchorPtr out_control_anchor_2 = out_owner_node_2->GetOutControlAnchor(); + + EXPECT_EQ(in_control_anchor_1->LinkFrom(out_control_anchor_1), GRAPH_SUCCESS); + EXPECT_EQ(out_control_anchor_1->LinkTo(in_control_anchor_2), GRAPH_SUCCESS); + EXPECT_EQ(in_control_anchor_2->LinkFrom(out_control_anchor_2), GRAPH_SUCCESS); + EXPECT_EQ(in_control_anchor_1->GetPeerAnchors().size(), 1); + EXPECT_EQ(in_control_anchor_2->GetPeerOutControlAnchors().size(), 2); + EXPECT_NE(in_control_anchor_2->GetOwnerNode(), nullptr); + EXPECT_EQ(out_control_anchor_1->GetPeerInControlAnchors().size(), 2); + + EXPECT_EQ(in_control_anchor_1->Unlink(out_control_anchor_2), GRAPH_FAILED); + EXPECT_EQ(out_control_anchor_2->Unlink(in_control_anchor_1), GRAPH_FAILED); + EXPECT_EQ(in_control_anchor_1->Unlink(in_control_anchor_2), GRAPH_FAILED); + + EXPECT_EQ(out_control_anchor_2->Unlink(in_control_anchor_2), GRAPH_SUCCESS); + EXPECT_EQ(in_control_anchor_2->GetPeerOutControlAnchors().size(), 1); + EXPECT_EQ(out_control_anchor_1->GetPeerAnchors().size(), 2); + out_control_anchor_1->UnlinkAll(); + EXPECT_EQ(out_control_anchor_1->GetPeerAnchors().size(), 0); +} + +TEST_F(UtestGeAnchor, control_anchor_exception_test) { + ComputeGraphPtr graph_ptr = std::make_shared("graph"); + OpDescPtr in_op_ptr = std::make_shared("in_op_1", "float"); + in_op_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr in_owner_node = graph_ptr->AddNode(in_op_ptr); + InControlAnchorPtr in_control_anchor = in_owner_node->GetInControlAnchor(); + + OpDescPtr out_op_ptr_1 = std::make_shared("out_op_1", "float"); + out_op_ptr_1->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + out_op_ptr_1->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr out_owner_node_1 = graph_ptr->AddNode(out_op_ptr_1); + OutControlAnchorPtr out_control_anchor_1 = out_owner_node_1->GetOutControlAnchor(); + + OpDescPtr out_op_ptr_2 = std::make_shared("out_op_2", "float"); + out_op_ptr_2->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + out_op_ptr_2->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr out_owner_node_2 = graph_ptr->AddNode(out_op_ptr_2); + + OutControlAnchorPtr out_control_anchor_2 = out_owner_node_2->GetOutControlAnchor(); + + EXPECT_EQ(in_control_anchor->LinkFrom(nullptr), GRAPH_FAILED); + EXPECT_EQ(out_control_anchor_1->LinkTo(InControlAnchorPtr(nullptr)), GRAPH_FAILED); + EXPECT_EQ(in_control_anchor->Unlink(nullptr), GRAPH_FAILED); + in_control_anchor->LinkFrom(out_control_anchor_1); + EXPECT_EQ(in_control_anchor->Unlink(out_control_anchor_2), GRAPH_FAILED); + in_control_anchor->Unlink(out_control_anchor_1); + EXPECT_EQ(in_control_anchor->GetPeerOutControlAnchors().size(), 0); +} + +TEST_F(UtestGeAnchor, anchor_utils_test) { + ComputeGraphPtr graph_ptr = std::make_shared("graph"); + OpDescPtr relu_op_ptr = std::make_shared("relu", "float"); + relu_op_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + relu_op_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr relu_node = graph_ptr->AddNode(relu_op_ptr); + + EXPECT_EQ(AnchorUtils::SetFormat(relu_node->GetInDataAnchor(0), FORMAT_NC1HWC0), GRAPH_SUCCESS); + EXPECT_EQ(AnchorUtils::GetFormat(relu_node->GetInDataAnchor(0)), FORMAT_NC1HWC0); + + // exception + EXPECT_EQ(AnchorUtils::SetFormat(relu_node->GetInDataAnchor(2), FORMAT_NCHW), GRAPH_FAILED); + EXPECT_EQ(AnchorUtils::GetFormat(relu_node->GetInDataAnchor(2)), FORMAT_RESERVED); + + EXPECT_EQ(AnchorUtils::SetFormat(relu_node->GetOutDataAnchor(0), FORMAT_NC1HWC0), GRAPH_SUCCESS); + EXPECT_EQ(AnchorUtils::GetFormat(relu_node->GetOutDataAnchor(0)), FORMAT_NC1HWC0); + + // exception + EXPECT_EQ(AnchorUtils::SetFormat(relu_node->GetOutDataAnchor(0), FORMAT_RESERVED), GRAPH_FAILED); + EXPECT_EQ(AnchorUtils::GetFormat(relu_node->GetOutDataAnchor(1)), FORMAT_RESERVED); +} + +TEST_F(UtestGeAnchor, graph_utils_test) { + ComputeGraphPtr graph_ptr = std::make_shared("graph"); + OpDescPtr conv_op_ptr = std::make_shared("conv", "float"); + conv_op_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)); + conv_op_ptr->AddInputDesc("w", GeTensorDesc(GeShape({32, 16, 3, 3}), FORMAT_FRACTAL_Z)); + conv_op_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr conv_node = graph_ptr->AddNode(conv_op_ptr); + + OpDescPtr bn_op_ptr = std::make_shared("bn", "float"); + bn_op_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + bn_op_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr bn_node = graph_ptr->AddNode(bn_op_ptr); + + EXPECT_EQ(GraphUtils::AddEdge(nullptr, conv_node->GetInDataAnchor(0)), GRAPH_FAILED); + EXPECT_EQ(GraphUtils::AddEdge(nullptr, FORMAT_NCHW, conv_node->GetInDataAnchor(0), FORMAT_NCHW), GRAPH_FAILED); + + EXPECT_EQ(GraphUtils::AddEdge(conv_node->GetOutDataAnchor(0), bn_node->GetInDataAnchor(0)), GRAPH_SUCCESS); + EXPECT_EQ(GraphUtils::RemoveEdge(conv_node->GetOutDataAnchor(0), bn_node->GetInDataAnchor(0)), GRAPH_SUCCESS); + EXPECT_EQ( + GraphUtils::AddEdge(conv_node->GetOutDataAnchor(0), FORMAT_NC1HWC0, bn_node->GetInDataAnchor(0), FORMAT_NC1HWC0), + GRAPH_SUCCESS); + + EXPECT_EQ(GraphUtils::AddEdge(OutControlAnchorPtr(nullptr), bn_node->GetInControlAnchor()), GRAPH_FAILED); + EXPECT_EQ(GraphUtils::AddEdge(conv_node->GetOutControlAnchor(), bn_node->GetInControlAnchor()), GRAPH_SUCCESS); + + OpDescPtr relu_op_ptr = std::make_shared("relu", "float"); + relu_op_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + relu_op_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr relu_node = graph_ptr->AddNode(relu_op_ptr); + + EXPECT_EQ(GraphUtils::ReplaceEdgeDst(conv_node->GetOutControlAnchor(), bn_node->GetInControlAnchor(), + relu_node->GetInControlAnchor()), + GRAPH_SUCCESS); + EXPECT_EQ(GraphUtils::ReplaceEdgeDst(conv_node->GetOutControlAnchor(), bn_node->GetInControlAnchor(), + relu_node->GetInControlAnchor()), + GRAPH_FAILED); + EXPECT_EQ(GraphUtils::RemoveEdge(conv_node->GetOutControlAnchor(), bn_node->GetInControlAnchor()), GRAPH_FAILED); + EXPECT_EQ(GraphUtils::RemoveEdge(conv_node->GetOutControlAnchor(), relu_node->GetInControlAnchor()), GRAPH_SUCCESS); + + EXPECT_EQ(GraphUtils::ReplaceEdgeDst(conv_node->GetOutDataAnchor(0), bn_node->GetInDataAnchor(0), + relu_node->GetInDataAnchor(0)), + GRAPH_SUCCESS); + EXPECT_EQ(GraphUtils::ReplaceEdgeDst(conv_node->GetOutDataAnchor(0), bn_node->GetInDataAnchor(0), + relu_node->GetInDataAnchor(0)), + GRAPH_FAILED); + EXPECT_EQ(GraphUtils::RemoveEdge(conv_node->GetOutDataAnchor(0), bn_node->GetInDataAnchor(0)), GRAPH_FAILED); + + EXPECT_EQ(GraphUtils::AddEdge(OutDataAnchorPtr(nullptr), bn_node->GetInControlAnchor()), GRAPH_FAILED); + EXPECT_EQ(GraphUtils::AddEdge(conv_node->GetOutDataAnchor(0), bn_node->GetInControlAnchor()), GRAPH_SUCCESS); + EXPECT_EQ(GraphUtils::RemoveEdge(conv_node->GetOutDataAnchor(0), bn_node->GetInControlAnchor()), GRAPH_SUCCESS); + EXPECT_EQ(GraphUtils::RemoveEdge(conv_node->GetOutDataAnchor(0), bn_node->GetInControlAnchor()), GRAPH_FAILED); +} + +TEST_F(UtestGeAnchor, data_anchor_replace_peer) { + ComputeGraphPtr graph_ptr = std::make_shared("graph"); + OpDescPtr in_op_ptr = std::make_shared("in_op_1", "float"); + in_op_ptr->AddInputDesc("x1", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddInputDesc("x2", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddInputDesc("x3", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddOutputDesc("y1", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddOutputDesc("y2", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddOutputDesc("y3", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr node1 = graph_ptr->AddNode(in_op_ptr); + NodePtr node2 = graph_ptr->AddNode(in_op_ptr); + NodePtr node3 = graph_ptr->AddNode(in_op_ptr); + + OutDataAnchorPtr out_data_anchor = node1->GetOutDataAnchor(1); + InDataAnchorPtr in_data_anchor = node2->GetInDataAnchor(1); + EXPECT_EQ(out_data_anchor != nullptr, true); + EXPECT_EQ(in_data_anchor != nullptr, true); + EXPECT_EQ(node1->GetOutDataAnchor(1)->LinkTo(node2->GetInDataAnchor(0)), GRAPH_SUCCESS); + EXPECT_EQ(node1->GetOutDataAnchor(1)->LinkTo(node2->GetInDataAnchor(1)), GRAPH_SUCCESS); + EXPECT_EQ(node1->GetOutDataAnchor(1)->LinkTo(node2->GetInDataAnchor(2)), GRAPH_SUCCESS); + + int out_idx = 0; + for (; out_idx < out_data_anchor->peer_anchors_.size(); out_idx++) { + if (out_data_anchor->peer_anchors_[out_idx].lock() == in_data_anchor) { + break; + } + } + EXPECT_EQ(out_idx, 1); + + int in_idx = 0; + for (; in_idx < in_data_anchor->peer_anchors_.size(); in_idx++) { + if (in_data_anchor->peer_anchors_[in_idx].lock() == out_data_anchor) { + break; + } + } + EXPECT_EQ(in_idx, 0); + + out_data_anchor->ReplacePeer(in_data_anchor, node3->GetInDataAnchor(1), node3->GetOutDataAnchor(1)); + + int out_idx1 = 0; + for (; out_idx1 < out_data_anchor->peer_anchors_.size(); out_idx1++) { + if (out_data_anchor->peer_anchors_[out_idx1].lock() == node3->GetInDataAnchor(1)) { + break; + } + } + EXPECT_EQ(out_idx1, out_idx); + + int in_idx1 = 0; + for (; in_idx1 < in_data_anchor->peer_anchors_.size(); in_idx1++) { + if (in_data_anchor->peer_anchors_[in_idx1].lock() == node3->GetOutDataAnchor(1)) { + break; + } + } + EXPECT_EQ(in_idx1, in_idx); +} + +TEST_F(UtestGeAnchor, graph_utils_insert_node) { + ComputeGraphPtr graph_ptr = std::make_shared("graph"); + OpDescPtr in_op_ptr = std::make_shared("in_op_1", "float"); + in_op_ptr->AddInputDesc("x1", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddInputDesc("x2", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddInputDesc("x3", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddOutputDesc("y1", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddOutputDesc("y2", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + in_op_ptr->AddOutputDesc("y3", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + NodePtr node1 = graph_ptr->AddNode(in_op_ptr); + NodePtr node2 = graph_ptr->AddNode(in_op_ptr); + NodePtr node3 = graph_ptr->AddNode(in_op_ptr); + + OutDataAnchorPtr out_data_anchor = node1->GetOutDataAnchor(1); + InDataAnchorPtr in_data_anchor = node2->GetInDataAnchor(1); + EXPECT_EQ(out_data_anchor != nullptr, true); + EXPECT_EQ(in_data_anchor != nullptr, true); + EXPECT_EQ(node1->GetOutDataAnchor(1)->LinkTo(node2->GetInDataAnchor(0)), GRAPH_SUCCESS); + EXPECT_EQ(node1->GetOutDataAnchor(1)->LinkTo(node2->GetInDataAnchor(1)), GRAPH_SUCCESS); + EXPECT_EQ(node1->GetOutDataAnchor(1)->LinkTo(node2->GetInDataAnchor(2)), GRAPH_SUCCESS); + + int out_idx = 0; + for (; out_idx < out_data_anchor->peer_anchors_.size(); out_idx++) { + if (out_data_anchor->peer_anchors_[out_idx].lock() == in_data_anchor) { + break; + } + } + EXPECT_EQ(out_idx, 1); + + int in_idx = 0; + for (; in_idx < in_data_anchor->peer_anchors_.size(); in_idx++) { + if (in_data_anchor->peer_anchors_[in_idx].lock() == out_data_anchor) { + break; + } + } + EXPECT_EQ(in_idx, 0); + + GraphUtils::InsertNodeBetweenDataAnchors(out_data_anchor, in_data_anchor, node3); + + int out_idx1 = 0; + for (; out_idx1 < out_data_anchor->peer_anchors_.size(); out_idx1++) { + if (out_data_anchor->peer_anchors_[out_idx1].lock() == node3->GetInDataAnchor(0)) { + break; + } + } + EXPECT_EQ(out_idx1, out_idx); + + int in_idx1 = 0; + for (; in_idx1 < in_data_anchor->peer_anchors_.size(); in_idx1++) { + if (in_data_anchor->peer_anchors_[in_idx1].lock() == node3->GetOutDataAnchor(0)) { + break; + } + } + EXPECT_EQ(in_idx1, in_idx); +} diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_model_serialize_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_model_serialize_unittest.cc new file mode 100644 index 00000000..291fcbfa --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_model_serialize_unittest.cc @@ -0,0 +1,1616 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define private public +#define protected public +#include "graph/model_serialize.h" + +#include "graph/detail/model_serialize_imp.h" +#include "graph/ge_attr_value.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/tensor_utils.h" +#undef private +#undef protected + +#include "proto/ge_ir.pb.h" + +using namespace ge; +using std::string; +using std::vector; + +bool LinkEdge(NodePtr src_node, int32_t src_index, NodePtr dst_node, int32_t dst_index) { + if (src_index >= 0) { + auto src_anchor = src_node->GetOutDataAnchor(src_index); + auto dst_anchor = dst_node->GetInDataAnchor(dst_index); + src_anchor->LinkTo(dst_anchor); + } else { + auto src_anchor = src_node->GetOutControlAnchor(); + auto dst_anchor = dst_node->GetInControlAnchor(); + src_anchor->LinkTo(dst_anchor); + } +} + +NodePtr CreateNode(OpDescPtr op, ComputeGraphPtr owner_graph) { return owner_graph->AddNode(op); } + +void CompareShape(const vector &shape1, const vector &shape2) { + EXPECT_EQ(shape1.size(), shape2.size()); + if (shape1.size() == shape2.size()) { + for (int i = 0; i < shape1.size(); i++) { + EXPECT_EQ(shape1[i], shape2[i]); + } + } +} + +template +void CompareList(const vector &val1, const vector &val2) { + EXPECT_EQ(val1.size(), val2.size()); + if (val1.size() == val2.size()) { + for (int i = 0; i < val1.size(); i++) { + EXPECT_EQ(val1[i], val2[i]); + } + } +} + +static bool NamedAttrsSimpleCmp(const GeAttrValue &left, const GeAttrValue &right) { + GeAttrValue::NamedAttrs val1, val2; + left.GetValue(val1); + right.GetValue(val2); + if (val1.GetName() != val2.GetName()) { + return false; + } + auto attrs1 = val1.GetAllAttrs(); + auto attrs2 = val2.GetAllAttrs(); + if (attrs1.size() != attrs1.size()) { + return false; + } + + for (auto it : attrs1) { + auto it2 = attrs2.find(it.first); + if (it2 == attrs2.end()) { // simple check + return false; + } + if (it.second.GetValueType() != it2->second.GetValueType()) { + return false; + } + switch (it.second.GetValueType()) { + case GeAttrValue::VT_INT: { + int64_t i1 = 0, i2 = 0; + it.second.GetValue(i1); + it2->second.GetValue(i2); + if (i1 != i2) { + return false; + } + } + case GeAttrValue::VT_FLOAT: { + GeAttrValue::FLOAT i1 = 0, i2 = 0; + it.second.GetValue(i1); + it2->second.GetValue(i2); + if (i1 != i2) { + return false; + } + } + case GeAttrValue::VT_STRING: { + string i1, i2; + it.second.GetValue(i1); + it2->second.GetValue(i2); + if (i1 != i2) { + return false; + } + } + case GeAttrValue::VT_BOOL: { + bool i1 = false, i2 = false; + it.second.GetValue(i1); + it2->second.GetValue(i2); + if (i1 != i2) { + return false; + } + } + } + } + return true; +} + +static GeAttrValue::NamedAttrs CreateNamedAttrs(const string &name, std::map map) { + GeAttrValue::NamedAttrs named_attrs; + named_attrs.SetName(name); + for (auto it : map) { + named_attrs.SetAttr(it.first, it.second); + } + return named_attrs; +} + +TEST(UtestGeModelSerialize, simple) { + Model model("model_name", "custom version3.0"); + model.SetAttr("model_key1", GeAttrValue::CreateFrom(123)); + model.SetAttr("model_key2", GeAttrValue::CreateFrom(456.78f)); + model.SetAttr("model_key3", GeAttrValue::CreateFrom("abcd")); + model.SetAttr("model_key4", GeAttrValue::CreateFrom({123, 456})); + model.SetAttr("model_key5", GeAttrValue::CreateFrom({456.78f, 998.90f})); + model.SetAttr("model_key6", GeAttrValue::CreateFrom({"abcd", "happy"})); + model.SetAttr("model_key7", GeAttrValue::CreateFrom(false)); + model.SetAttr("model_key8", GeAttrValue::CreateFrom({true, false})); + + auto compute_graph = std::make_shared("graph_name"); + + // input + auto input_op = std::make_shared("input", "Input"); + input_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + auto input = CreateNode(input_op, compute_graph); + // w1 + auto w1_op = std::make_shared("w1", "ConstOp"); + w1_op->AddOutputDesc(GeTensorDesc(GeShape({12, 2, 64, 64, 16}), FORMAT_NC1HWC0, DT_FLOAT16)); + auto w1 = CreateNode(w1_op, compute_graph); + + // node1 + auto node1_op = std::make_shared("node1", "Conv2D"); + node1_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + node1_op->AddInputDesc(GeTensorDesc(GeShape({12, 2, 64, 64, 16}), FORMAT_NC1HWC0, DT_FLOAT16)); + node1_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + auto node1 = CreateNode(node1_op, compute_graph); + + // Attr set + node1_op->SetAttr("node_key1", GeAttrValue::CreateFrom(Buffer(10))); + node1_op->SetAttr("node_key2", GeAttrValue::CreateFrom({Buffer(20), Buffer(30)})); + auto named_attrs1 = GeAttrValue::CreateFrom( + CreateNamedAttrs("my_name", {{"int_val", GeAttrValue::CreateFrom(123)}, + {"str_val", GeAttrValue::CreateFrom("abc")}, + {"float_val", GeAttrValue::CreateFrom(345.345)}})); + + node1_op->SetAttr("node_key3", std::move(named_attrs1)); + auto list_named_attrs = GeAttrValue::CreateFrom( + {CreateNamedAttrs("my_name", {{"int_val", GeAttrValue::CreateFrom(123)}, + {"float_val", GeAttrValue::CreateFrom(345.345)}}), + CreateNamedAttrs("my_name2", {{"str_val", GeAttrValue::CreateFrom("abc")}, + {"float_val", GeAttrValue::CreateFrom(345.345)}})}); + node1_op->SetAttr("node_key4", std::move(list_named_attrs)); + // tensor + auto tensor_data1 = "qwertyui"; + auto tensor1 = + std::make_shared(GeTensorDesc(GeShape({2, 2, 2}), FORMAT_NCHW, DT_INT8), (uint8_t *)tensor_data1, 8); + auto tensor_data2 = "asdfqwertyui"; + auto tensor2 = + std::make_shared(GeTensorDesc(GeShape({3, 2, 2}), FORMAT_ND, DT_UINT8), (uint8_t *)tensor_data2, 12); + auto tensor_data3 = "ghjkasdfqwertyui"; + auto tensor3 = + std::make_shared(GeTensorDesc(GeShape({4, 2, 2}), FORMAT_ND, DT_UINT16), (uint8_t *)tensor_data3, 16); + node1_op->SetAttr("node_key5", GeAttrValue::CreateFrom(tensor1)); + node1_op->SetAttr("node_key6", GeAttrValue::CreateFrom({tensor2, tensor3})); + + auto tensor_desc = GeTensorDesc(GeShape({2, 2, 2}), FORMAT_NCHW, DT_INT16); + TensorUtils::SetSize(tensor_desc, 100); + node1_op->SetAttr("node_key7", GeAttrValue::CreateFrom(tensor_desc)); + node1_op->SetAttr("node_key8", GeAttrValue::CreateFrom( + {GeTensorDesc(GeShape({2, 2, 2}), FORMAT_NCHW, DT_INT32), + GeTensorDesc(GeShape({2, 2, 2}), FORMAT_NCHW, DT_UINT32), + GeTensorDesc(GeShape({2, 2, 2}), FORMAT_NCHW, DT_INT64), + GeTensorDesc(GeShape({2, 2, 2}), FORMAT_NCHW, DT_UINT64), + GeTensorDesc(GeShape({2, 2, 2}), FORMAT_NCHW, DT_BOOL), + GeTensorDesc(GeShape({2, 2, 2}), FORMAT_NCHW, DT_DOUBLE)})); + + LinkEdge(input, 0, node1, 0); + LinkEdge(w1, 0, node1, 1); + + Graph graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + + Buffer buffer; + ASSERT_EQ(model.Save(buffer), GRAPH_SUCCESS); + EXPECT_TRUE(buffer.GetData() != nullptr); + + Model model2; + ASSERT_EQ(Model::Load(buffer.GetData(), buffer.GetSize(), model2), GRAPH_SUCCESS); + EXPECT_EQ(model2.GetName(), "model_name"); + GeAttrValue::INT model_val1; + AttrUtils::GetInt(&model2, "model_key1", model_val1); + EXPECT_EQ(model_val1, 123); + + GeAttrValue::FLOAT model_val2; + AttrUtils::GetFloat(&model2, "model_key2", model_val2); + EXPECT_EQ(model_val2, (float)456.78f); + + GeAttrValue::STR model_val3; + AttrUtils::GetStr(&model2, "model_key3", model_val3); + EXPECT_EQ(model_val3, "abcd"); + + GeAttrValue::LIST_INT model_val4; + AttrUtils::GetListInt(&model2, "model_key4", model_val4); + CompareList(model_val4, {123, 456}); + + GeAttrValue::LIST_FLOAT model_val5; + AttrUtils::GetListFloat(&model2, "model_key5", model_val5); + CompareList(model_val5, {456.78f, 998.90f}); + + GeAttrValue::LIST_STR model_val6; + AttrUtils::GetListStr(&model2, "model_key6", model_val6); + CompareList(model_val6, {"abcd", "happy"}); + + GeAttrValue::BOOL model_val7; + EXPECT_EQ(AttrUtils::GetBool(&model2, "model_key7", model_val7), true); + EXPECT_EQ(model_val7, false); + + GeAttrValue::LIST_BOOL model_val8; + AttrUtils::GetListBool(&model2, "model_key8", model_val8); + CompareList(model_val8, {true, false}); + + auto graph2 = model2.GetGraph(); + const auto &s_graph = GraphUtils::GetComputeGraph(graph2); + ASSERT_TRUE(s_graph != nullptr); + auto s_nodes = s_graph->GetDirectNode(); + ASSERT_EQ(3, s_nodes.size()); + + auto s_input = s_nodes.at(0); + auto s_w1 = s_nodes.at(1); + auto s_nod1 = s_nodes.at(2); + { + auto s_op = s_input->GetOpDesc(); + EXPECT_EQ(s_op->GetName(), "input"); + EXPECT_EQ(s_op->GetType(), "Input"); + auto s_input_descs = s_op->GetAllInputsDesc(); + ASSERT_EQ(s_input_descs.size(), 0); + auto s_output_descs = s_op->GetAllOutputsDesc(); + ASSERT_EQ(s_output_descs.size(), 1); + auto desc1 = s_output_descs.at(0); + EXPECT_EQ(desc1.GetFormat(), FORMAT_NCHW); + EXPECT_EQ(desc1.GetDataType(), DT_FLOAT); + CompareShape(desc1.GetShape().GetDims(), vector{12, 32, 64, 64}); + + auto out_anchor = s_input->GetOutDataAnchor(0); + auto peer_anchors = out_anchor->GetPeerInDataAnchors(); + ASSERT_EQ(peer_anchors.size(), 1); + auto peer_anchor = peer_anchors.at(0); + ASSERT_EQ(peer_anchor->GetIdx(), 0); + ASSERT_EQ(peer_anchor->GetOwnerNode(), s_nod1); + } + + { + auto s_op = s_w1->GetOpDesc(); + EXPECT_EQ(s_op->GetName(), "w1"); + EXPECT_EQ(s_op->GetType(), "ConstOp"); + auto s_input_descs = s_op->GetAllInputsDesc(); + ASSERT_EQ(s_input_descs.size(), 0); + auto s_output_descs = s_op->GetAllOutputsDesc(); + ASSERT_EQ(s_output_descs.size(), 1); + auto desc1 = s_output_descs.at(0); + EXPECT_EQ(desc1.GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(desc1.GetDataType(), DT_FLOAT16); + CompareShape(desc1.GetShape().GetDims(), vector{12, 2, 64, 64, 16}); + + auto out_anchor = s_w1->GetOutDataAnchor(0); + auto peer_anchors = out_anchor->GetPeerInDataAnchors(); + ASSERT_EQ(peer_anchors.size(), 1); + auto peer_anchor = peer_anchors.at(0); + ASSERT_EQ(peer_anchor->GetIdx(), 1); + ASSERT_EQ(peer_anchor->GetOwnerNode(), s_nod1); + } + { + auto s_op = s_nod1->GetOpDesc(); + EXPECT_EQ(s_op->GetName(), "node1"); + EXPECT_EQ(s_op->GetType(), "Conv2D"); + auto s_input_descs = s_op->GetAllInputsDesc(); + ASSERT_EQ(s_input_descs.size(), 2); + + auto desc1 = s_input_descs.at(0); + EXPECT_EQ(desc1.GetFormat(), FORMAT_NCHW); + EXPECT_EQ(desc1.GetDataType(), DT_FLOAT); + CompareShape(desc1.GetShape().GetDims(), vector{12, 32, 64, 64}); + + auto desc2 = s_input_descs.at(1); + EXPECT_EQ(desc2.GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(desc2.GetDataType(), DT_FLOAT16); + CompareShape(desc2.GetShape().GetDims(), vector{12, 2, 64, 64, 16}); + + auto s_output_descs = s_op->GetAllOutputsDesc(); + ASSERT_EQ(s_output_descs.size(), 1); + auto desc3 = s_output_descs.at(0); + EXPECT_EQ(desc3.GetFormat(), FORMAT_NCHW); + EXPECT_EQ(desc3.GetDataType(), DT_FLOAT); + CompareShape(desc3.GetShape().GetDims(), vector{12, 32, 64, 64}); + + auto out_anchor = s_nod1->GetOutDataAnchor(0); + auto peer_anchors = out_anchor->GetPeerInDataAnchors(); + ASSERT_EQ(peer_anchors.size(), 0); + + // node attrs + GeAttrValue::BYTES node_val1; + AttrUtils::GetBytes(s_op, "node_key1", node_val1); + ASSERT_EQ(node_val1.GetSize(), 10); + + GeAttrValue::LIST_BYTES node_val2; + AttrUtils::GetListBytes(s_op, "node_key2", node_val2); + ASSERT_EQ(node_val2.size(), 2); + ASSERT_EQ(node_val2[0].GetSize(), 20); + ASSERT_EQ(node_val2[1].GetSize(), 30); + + GeAttrValue s_named_attrs; + s_op->GetAttr("node_key3", s_named_attrs); + EXPECT_TRUE(NamedAttrsSimpleCmp(s_named_attrs, named_attrs1)); + + GeAttrValue s_list_named_attrs; + s_op->GetAttr("node_key4", s_list_named_attrs); + EXPECT_TRUE(NamedAttrsSimpleCmp(s_list_named_attrs, list_named_attrs)); + + ConstGeTensorPtr s_tensor; + AttrUtils::GetTensor(s_op, "node_key5", s_tensor); + ASSERT_TRUE(s_tensor != nullptr); + string str((char *)s_tensor->GetData().data(), s_tensor->GetData().size()); + EXPECT_EQ(str, "qwertyui"); + + vector s_list_tensor; + AttrUtils::GetListTensor(s_op, "node_key6", s_list_tensor); + ASSERT_EQ(s_list_tensor.size(), 2); + string str2((char *)s_list_tensor[0]->GetData().data(), s_list_tensor[0]->GetData().size()); + EXPECT_EQ(str2, "asdfqwertyui"); + string str3((char *)s_list_tensor[1]->GetData().data(), s_list_tensor[1]->GetData().size()); + EXPECT_EQ(str3, "ghjkasdfqwertyui"); + + GeTensorDesc s_tensor_desc; + AttrUtils::GetTensorDesc(s_op, "node_key7", s_tensor_desc); + EXPECT_EQ(s_tensor_desc.GetFormat(), FORMAT_NCHW); + EXPECT_EQ(s_tensor_desc.GetDataType(), DT_INT16); + uint32_t size = 0; + TensorUtils::GetSize(s_tensor_desc, size); + EXPECT_EQ(size, 100); + + vector s_list_tensor_desc; + AttrUtils::GetListTensorDesc(s_op, "node_key8", s_list_tensor_desc); + ASSERT_EQ(s_list_tensor_desc.size(), 6); + EXPECT_EQ(s_list_tensor_desc[0].GetDataType(), DT_INT32); + EXPECT_EQ(s_list_tensor_desc[1].GetDataType(), DT_UINT32); + EXPECT_EQ(s_list_tensor_desc[2].GetDataType(), DT_INT64); + EXPECT_EQ(s_list_tensor_desc[3].GetDataType(), DT_UINT64); + EXPECT_EQ(s_list_tensor_desc[4].GetDataType(), DT_BOOL); + EXPECT_EQ(s_list_tensor_desc[5].GetDataType(), DT_DOUBLE); + } +} + +TEST(UtestGeModelSerialize, op_desc) { + // node1_op + auto node1_op = std::make_shared("node1", "Conv2D"); + node1_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + node1_op->AddInputDesc(GeTensorDesc(GeShape({12, 2, 64, 64, 16}), FORMAT_NC1HWC0, DT_FLOAT16)); + node1_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + + // Attr set + node1_op->SetAttr("node_key1", GeAttrValue::CreateFrom(Buffer(10))); + node1_op->SetAttr("node_key2", GeAttrValue::CreateFrom({Buffer(20), Buffer(30)})); + auto named_attrs1 = GeAttrValue::CreateFrom( + CreateNamedAttrs("my_name", {{"int_val", GeAttrValue::CreateFrom(123)}, + {"str_val", GeAttrValue::CreateFrom("abc")}, + {"float_val", GeAttrValue::CreateFrom(345.345)}})); + + node1_op->SetAttr("node_key3", std::move(named_attrs1)); + auto list_named_attrs = GeAttrValue::CreateFrom( + {CreateNamedAttrs("my_name", {{"int_val", GeAttrValue::CreateFrom(123)}, + {"float_val", GeAttrValue::CreateFrom(345.345)}}), + CreateNamedAttrs("my_name2", {{"str_val", GeAttrValue::CreateFrom("abc")}, + {"float_val", GeAttrValue::CreateFrom(345.345)}})}); + node1_op->SetAttr("node_key4", std::move(list_named_attrs)); + + ModelSerialize model_serialize; + Buffer buffer = model_serialize.SerializeOpDesc(node1_op); + EXPECT_TRUE(buffer.GetData() != nullptr); + + auto s_op = model_serialize.UnserializeOpDesc(buffer.GetData(), buffer.GetSize()); + ASSERT_TRUE(s_op != nullptr); + + { + EXPECT_EQ(s_op->GetName(), "node1"); + EXPECT_EQ(s_op->GetType(), "Conv2D"); + auto s_input_descs = s_op->GetAllInputsDesc(); + ASSERT_EQ(s_input_descs.size(), 2); + + auto desc1 = s_input_descs.at(0); + EXPECT_EQ(desc1.GetFormat(), FORMAT_NCHW); + EXPECT_EQ(desc1.GetDataType(), DT_FLOAT); + CompareShape(desc1.GetShape().GetDims(), vector{12, 32, 64, 64}); + + auto desc2 = s_input_descs.at(1); + EXPECT_EQ(desc2.GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(desc2.GetDataType(), DT_FLOAT16); + CompareShape(desc2.GetShape().GetDims(), vector{12, 2, 64, 64, 16}); + + auto s_output_descs = s_op->GetAllOutputsDesc(); + ASSERT_EQ(s_output_descs.size(), 1); + auto desc3 = s_output_descs.at(0); + EXPECT_EQ(desc3.GetFormat(), FORMAT_NCHW); + EXPECT_EQ(desc3.GetDataType(), DT_FLOAT); + CompareShape(desc3.GetShape().GetDims(), vector{12, 32, 64, 64}); + + // node attrs + GeAttrValue::BYTES node_val1; + AttrUtils::GetBytes(s_op, "node_key1", node_val1); + ASSERT_EQ(node_val1.GetSize(), 10); + + GeAttrValue::LIST_BYTES node_val2; + AttrUtils::GetListBytes(s_op, "node_key2", node_val2); + ASSERT_EQ(node_val2.size(), 2); + ASSERT_EQ(node_val2[0].GetSize(), 20); + ASSERT_EQ(node_val2[1].GetSize(), 30); + + GeAttrValue s_named_attrs; + s_op->GetAttr("node_key3", s_named_attrs); + EXPECT_TRUE(NamedAttrsSimpleCmp(s_named_attrs, named_attrs1)); + + GeAttrValue s_list_named_attrs; + s_op->GetAttr("node_key4", s_list_named_attrs); + EXPECT_TRUE(NamedAttrsSimpleCmp(s_list_named_attrs, list_named_attrs)); + } +} + +TEST(UtestGeModelSerialize, opdesc_as_attr_value) { + // node1_op + auto node1_op = std::make_shared("node1", "Conv2D"); + node1_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + node1_op->AddInputDesc(GeTensorDesc(GeShape({12, 2, 64, 64, 16}), FORMAT_NC1HWC0, DT_FLOAT16)); + node1_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + + // Attr set + node1_op->SetAttr("node_key1", GeAttrValue::CreateFrom(Buffer(10))); + node1_op->SetAttr("node_key2", GeAttrValue::CreateFrom({Buffer(20), Buffer(30)})); + auto named_attrs1 = GeAttrValue::CreateFrom( + CreateNamedAttrs("my_name", {{"int_val", GeAttrValue::CreateFrom(123)}, + {"str_val", GeAttrValue::CreateFrom("abc")}, + {"float_val", GeAttrValue::CreateFrom(345.345)}})); + + node1_op->SetAttr("node_key3", std::move(named_attrs1)); + auto list_named_attrs = GeAttrValue::CreateFrom( + {CreateNamedAttrs("my_name", {{"int_val", GeAttrValue::CreateFrom(123)}, + {"float_val", GeAttrValue::CreateFrom(345.345)}}), + CreateNamedAttrs("my_name2", {{"str_val", GeAttrValue::CreateFrom("abc")}, + {"float_val", GeAttrValue::CreateFrom(345.345)}})}); + node1_op->SetAttr("node_key4", std::move(list_named_attrs)); + + Model model; + EXPECT_TRUE(AttrUtils::SetListOpDesc(&model, "my_key", vector{node1_op})); + EXPECT_TRUE(AttrUtils::SetListInt(&model, "my_key2", {123})); + EXPECT_TRUE(AttrUtils::SetListBytes(&model, "my_key3", {Buffer(100)})); + + vector op_list; + EXPECT_FALSE(AttrUtils::GetListOpDesc(&model, "my_error_key", op_list)); + EXPECT_FALSE(AttrUtils::GetListOpDesc(&model, "my_key2", op_list)); + + EXPECT_TRUE(AttrUtils::GetListOpDesc(&model, "my_key", op_list)); + + ASSERT_TRUE(op_list.size() > 0); + auto s_op = op_list[0]; + + { + EXPECT_EQ(s_op->GetName(), "node1"); + EXPECT_EQ(s_op->GetType(), "Conv2D"); + auto s_input_descs = s_op->GetAllInputsDesc(); + ASSERT_EQ(s_input_descs.size(), 2); + + auto desc1 = s_input_descs.at(0); + EXPECT_EQ(desc1.GetFormat(), FORMAT_NCHW); + EXPECT_EQ(desc1.GetDataType(), DT_FLOAT); + CompareShape(desc1.GetShape().GetDims(), vector{12, 32, 64, 64}); + + auto desc2 = s_input_descs.at(1); + EXPECT_EQ(desc2.GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(desc2.GetDataType(), DT_FLOAT16); + CompareShape(desc2.GetShape().GetDims(), vector{12, 2, 64, 64, 16}); + + auto s_output_descs = s_op->GetAllOutputsDesc(); + ASSERT_EQ(s_output_descs.size(), 1); + auto desc3 = s_output_descs.at(0); + EXPECT_EQ(desc3.GetFormat(), FORMAT_NCHW); + EXPECT_EQ(desc3.GetDataType(), DT_FLOAT); + CompareShape(desc3.GetShape().GetDims(), vector{12, 32, 64, 64}); + + // node attrs + GeAttrValue::BYTES node_val1; + AttrUtils::GetBytes(s_op, "node_key1", node_val1); + ASSERT_EQ(node_val1.GetSize(), 10); + + GeAttrValue::LIST_BYTES node_val2; + AttrUtils::GetListBytes(s_op, "node_key2", node_val2); + ASSERT_EQ(node_val2.size(), 2); + ASSERT_EQ(node_val2[0].GetSize(), 20); + ASSERT_EQ(node_val2[1].GetSize(), 30); + + GeAttrValue s_named_attrs; + s_op->GetAttr("node_key3", s_named_attrs); + EXPECT_TRUE(NamedAttrsSimpleCmp(s_named_attrs, named_attrs1)); + + GeAttrValue s_list_named_attrs; + s_op->GetAttr("node_key4", s_list_named_attrs); + EXPECT_TRUE(NamedAttrsSimpleCmp(s_list_named_attrs, list_named_attrs)); + } +} + +TEST(UtestGeModelSerialize, test_sub_graph) { + Model model("model_name", "custom version3.0"); + { + auto compute_graph = std::make_shared("graph_name"); + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + auto input = CreateNode(input_op, compute_graph); + Graph graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + + auto sub_compute_graph = std::make_shared("sub_graph"); + // input + auto sub_graph_input_op = std::make_shared("sub_graph_test", "TestOp2"); + sub_graph_input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + auto sub_graph_input = CreateNode(sub_graph_input_op, sub_compute_graph); + + AttrUtils::SetGraph(input_op, "sub_graph", sub_compute_graph); + } + + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + ASSERT_GE(buffer.GetSize(), 0); + ASSERT_GE(serialize.GetSerializeModelSize(model), 0); + + auto model2 = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + ASSERT_TRUE(model2.GetGraph().IsValid()); + auto graph2 = GraphUtils::GetComputeGraph(model2.GetGraph()); + EXPECT_EQ(graph2->GetName(), "graph_name"); + auto nodes2 = graph2->GetDirectNode(); + ASSERT_EQ(nodes2.size(), 1); + auto node2 = nodes2.at(0); + EXPECT_EQ(node2->GetName(), "test"); + auto node2_op = node2->GetOpDesc(); + EXPECT_EQ(node2_op->GetType(), "TestOp"); + auto node2_input_descs = node2_op->GetAllInputsDesc(); + ASSERT_EQ(node2_input_descs.size(), 1); + auto node2_input_desc = node2_input_descs.at(0); + + ComputeGraphPtr sub_compute_graph2; + ASSERT_TRUE(AttrUtils::GetGraph(node2_op, "sub_graph", sub_compute_graph2)); + EXPECT_EQ(sub_compute_graph2->GetName(), "sub_graph"); + auto sub_nodes2 = sub_compute_graph2->GetDirectNode(); + ASSERT_EQ(sub_nodes2.size(), 1); + auto sub_node2 = sub_nodes2.at(0); + EXPECT_EQ(sub_node2->GetName(), "sub_graph_test"); + ASSERT_EQ(sub_node2->GetAllInDataAnchors().size(), 1); + auto sub_node_op2 = sub_node2->GetOpDesc(); + EXPECT_EQ(sub_node_op2->GetType(), "TestOp2"); + ASSERT_EQ(sub_node_op2->GetAllInputsDesc().size(), 1); + auto sub_node2_input_desc = sub_node_op2->GetAllInputsDesc().at(0); + EXPECT_EQ(sub_node2_input_desc.GetShape().GetDim(1), 32); +} + +TEST(UtestGeModelSerialize, test_list_sub_graph) { + Model model("model_name", "custom version3.0"); + { + auto compute_graph = std::make_shared("graph_name"); + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + auto input = CreateNode(input_op, compute_graph); + Graph graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + + auto sub_compute_graph1 = std::make_shared("sub_graph1"); + // input + auto sub_graph_input_op1 = std::make_shared("sub_graph_test1", "TestOp2"); + sub_graph_input_op1->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + auto sub_graph_input1 = CreateNode(sub_graph_input_op1, sub_compute_graph1); + + auto sub_compute_graph2 = std::make_shared("sub_graph2"); + // input + auto sub_graph_input_op2 = std::make_shared("sub_graph_test2", "TestOp2"); + sub_graph_input_op2->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + auto sub_graph_input2 = CreateNode(sub_graph_input_op2, sub_compute_graph2); + + AttrUtils::SetListGraph(input_op, "sub_graph", vector{sub_compute_graph1, sub_compute_graph2}); + } + + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + ASSERT_GE(buffer.GetSize(), 0); + + auto model2 = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + ASSERT_TRUE(model2.GetGraph().IsValid()); + auto graph2 = GraphUtils::GetComputeGraph(model2.GetGraph()); + EXPECT_EQ(graph2->GetName(), "graph_name"); + auto nodes2 = graph2->GetDirectNode(); + ASSERT_EQ(nodes2.size(), 1); + auto node2 = nodes2.at(0); + auto node2_op = node2->GetOpDesc(); + + vector list_sub_compute_graph; + ASSERT_TRUE(AttrUtils::GetListGraph(node2_op, "sub_graph", list_sub_compute_graph)); + ASSERT_EQ(list_sub_compute_graph.size(), 2); + + EXPECT_EQ(list_sub_compute_graph[0]->GetName(), "sub_graph1"); + EXPECT_EQ(list_sub_compute_graph[1]->GetName(), "sub_graph2"); + + auto sub_nodes21 = list_sub_compute_graph[0]->GetDirectNode(); + ASSERT_EQ(sub_nodes21.size(), 1); + auto sub_node21 = sub_nodes21.at(0); + EXPECT_EQ(sub_node21->GetName(), "sub_graph_test1"); + + auto sub_nodes22 = list_sub_compute_graph[1]->GetDirectNode(); + ASSERT_EQ(sub_nodes22.size(), 1); + auto sub_node22 = sub_nodes22.at(0); + EXPECT_EQ(sub_node22->GetName(), "sub_graph_test2"); +} + +TEST(UtestGeModelSerialize, test_format) { + Model model("model_name", "custom version3.0"); + { + auto compute_graph = std::make_shared("graph_name"); + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NHWC, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_ND, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NC1HWC0, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_FRACTAL_Z, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NC1C0HWPAD, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NHWC1C0, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_FSR_NCHW, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_FRACTAL_DECONV, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_BN_WEIGHT, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_CHWN, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_FILTER_HWCK, DT_FLOAT)); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_FRACTAL_Z_C04, DT_FLOAT)); + auto input = CreateNode(input_op, compute_graph); + model.SetGraph(GraphUtils::CreateGraphFromComputeGraph(compute_graph)); + } + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + ASSERT_GE(buffer.GetSize(), 0); + auto model2 = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + ASSERT_TRUE(model2.GetGraph().IsValid()); + + auto graph = model2.GetGraph(); + ASSERT_TRUE(GraphUtils::GetComputeGraph(graph) != nullptr); + ASSERT_EQ(GraphUtils::GetComputeGraph(graph)->GetDirectNode().size(), 1); + + auto op = GraphUtils::GetComputeGraph(graph)->GetDirectNode().at(0)->GetOpDesc(); + auto input_descs = op->GetAllInputsDesc(); + ASSERT_EQ(input_descs.size(), 13); + EXPECT_EQ(input_descs.at(0).GetFormat(), FORMAT_NCHW); + EXPECT_EQ(input_descs.at(1).GetFormat(), FORMAT_NHWC); + EXPECT_EQ(input_descs.at(2).GetFormat(), FORMAT_ND); + EXPECT_EQ(input_descs.at(3).GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(input_descs.at(4).GetFormat(), FORMAT_FRACTAL_Z); + EXPECT_EQ(input_descs.at(5).GetFormat(), FORMAT_NC1C0HWPAD); + EXPECT_EQ(input_descs.at(6).GetFormat(), FORMAT_NHWC1C0); + EXPECT_EQ(input_descs.at(7).GetFormat(), FORMAT_FSR_NCHW); + EXPECT_EQ(input_descs.at(8).GetFormat(), FORMAT_FRACTAL_DECONV); + EXPECT_EQ(input_descs.at(9).GetFormat(), FORMAT_BN_WEIGHT); + EXPECT_EQ(input_descs.at(10).GetFormat(), FORMAT_CHWN); + EXPECT_EQ(input_descs.at(11).GetFormat(), FORMAT_FILTER_HWCK); + EXPECT_EQ(input_descs.at(12).GetFormat(), FORMAT_FRACTAL_Z_C04); +} + +TEST(UtestGeModelSerialize, test_control_edge) { + Model model("model_name", "custom version3.0"); + { + auto compute_graph = std::make_shared("graph_name"); + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + auto input = CreateNode(input_op, compute_graph); + // sink + auto sink_op = std::make_shared("test2", "Sink"); + auto sink = CreateNode(sink_op, compute_graph); + LinkEdge(sink, -1, input, -1); + + // sink2 + auto sink_op2 = std::make_shared("test3", "Sink"); + auto sink2 = CreateNode(sink_op2, compute_graph); + LinkEdge(sink2, -1, input, -1); + + // dest + auto dest_op = std::make_shared("test4", "Dest"); + auto dest = CreateNode(dest_op, compute_graph); + LinkEdge(input, -1, dest, -1); + + compute_graph->AddInputNode(sink); + compute_graph->AddInputNode(sink2); + compute_graph->AddOutputNode(dest); + + Graph graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + } + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + EXPECT_GE(buffer.GetSize(), 0); + + auto model2 = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + ASSERT_TRUE(model2.GetGraph().IsValid()); + auto graph = GraphUtils::GetComputeGraph(model2.GetGraph()); + EXPECT_EQ(graph->GetName(), "graph_name"); + auto nodes = graph->GetDirectNode(); + ASSERT_EQ(nodes.size(), 4); + + auto node1 = nodes.at(0); + auto sink = nodes.at(1); + auto sink2 = nodes.at(2); + auto dest = nodes.at(3); + EXPECT_EQ(node1->GetName(), "test"); + EXPECT_EQ(sink->GetName(), "test2"); + ASSERT_EQ(node1->GetAllInDataAnchors().size(), 1); + auto anchor1 = node1->GetAllInDataAnchors().at(0); + EXPECT_EQ(anchor1->GetPeerAnchors().size(), 0); + + auto contorl_in_anchor1 = node1->GetInControlAnchor(); + ASSERT_EQ(contorl_in_anchor1->GetPeerAnchors().size(), 2); + + EXPECT_EQ(contorl_in_anchor1->GetPeerAnchors().at(0)->GetOwnerNode(), sink); + EXPECT_EQ(contorl_in_anchor1->GetPeerAnchors().at(1)->GetOwnerNode(), sink2); + + auto contorl_out_anchor1 = node1->GetOutControlAnchor(); + ASSERT_EQ(contorl_out_anchor1->GetPeerAnchors().size(), 1); + EXPECT_EQ(contorl_out_anchor1->GetPeerAnchors().at(0)->GetOwnerNode(), dest); + + auto input_nodes = graph->GetInputNodes(); + ASSERT_EQ(input_nodes.size(), 2); + EXPECT_EQ(input_nodes.at(0), sink); + EXPECT_EQ(input_nodes.at(1), sink2); + + auto output_nodes = graph->GetOutputNodes(); + ASSERT_EQ(output_nodes.size(), 1); + EXPECT_EQ(output_nodes.at(0), dest); +} + +TEST(UtestGeModelSerialize, test_serialize_graph) { + auto compute_graph = std::make_shared("graph_name"); + { + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddInputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + auto input = CreateNode(input_op, compute_graph); + // sink + auto sink_op = std::make_shared("test2", "Sink"); + auto sink = CreateNode(sink_op, compute_graph); + LinkEdge(sink, -1, input, -1); + + // sink2 + auto sink_op2 = std::make_shared("test3", "Sink"); + auto sink2 = CreateNode(sink_op2, compute_graph); + LinkEdge(sink2, -1, input, -1); + + // dest + auto dest_op = std::make_shared("test4", "Dest"); + auto dest = CreateNode(dest_op, compute_graph); + LinkEdge(input, -1, dest, -1); + + compute_graph->AddInputNode(sink); + compute_graph->AddInputNode(sink2); + compute_graph->AddOutputNode(dest); + } + ModelSerialize serialize; + auto buffer = serialize.SerializeGraph(compute_graph); + EXPECT_GE(buffer.GetSize(), 0); + + auto graph = serialize.UnserializeGraph(buffer.GetData(), buffer.GetSize()); + ASSERT_TRUE(graph != nullptr); + EXPECT_EQ(graph->GetName(), "graph_name"); + auto nodes = graph->GetDirectNode(); + ASSERT_EQ(nodes.size(), 4); + + auto node1 = nodes.at(0); + auto sink = nodes.at(1); + auto sink2 = nodes.at(2); + auto dest = nodes.at(3); + EXPECT_EQ(node1->GetName(), "test"); + EXPECT_EQ(sink->GetName(), "test2"); + ASSERT_EQ(node1->GetAllInDataAnchors().size(), 1); + auto anchor1 = node1->GetAllInDataAnchors().at(0); + EXPECT_EQ(anchor1->GetPeerAnchors().size(), 0); + + auto contorl_in_anchor1 = node1->GetInControlAnchor(); + ASSERT_EQ(contorl_in_anchor1->GetPeerAnchors().size(), 2); + + EXPECT_EQ(contorl_in_anchor1->GetPeerAnchors().at(0)->GetOwnerNode(), sink); + EXPECT_EQ(contorl_in_anchor1->GetPeerAnchors().at(1)->GetOwnerNode(), sink2); + + auto contorl_out_anchor1 = node1->GetOutControlAnchor(); + ASSERT_EQ(contorl_out_anchor1->GetPeerAnchors().size(), 1); + EXPECT_EQ(contorl_out_anchor1->GetPeerAnchors().at(0)->GetOwnerNode(), dest); + + auto input_nodes = graph->GetInputNodes(); + ASSERT_EQ(input_nodes.size(), 2); + EXPECT_EQ(input_nodes.at(0), sink); + EXPECT_EQ(input_nodes.at(1), sink2); + + auto output_nodes = graph->GetOutputNodes(); + ASSERT_EQ(output_nodes.size(), 1); + EXPECT_EQ(output_nodes.at(0), dest); +} + +TEST(UtestGeModelSerialize, test_invalid_model) { + { // empty graph + Model model("model_name", "custom version3.0"); + auto compute_graph = std::make_shared("graph_name"); + + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + EXPECT_EQ(buffer.GetSize(), 0); + } +} + +TEST(UtestGeModelSerialize, test_invalid_graph) { + { // empty graph + + ComputeGraphPtr graph = nullptr; + + ModelSerialize serialize; + auto buffer = serialize.SerializeGraph(graph); + EXPECT_EQ(buffer.GetSize(), 0); + } +} + +TEST(UtestGeModelSerialize, test_invalid_opdesc) { + { // empty OpDesc + OpDescPtr op_desc = nullptr; + ModelSerialize serialize; + auto buffer = serialize.SerializeOpDesc(op_desc); + EXPECT_EQ(buffer.GetSize(), 0); + } +} + +TEST(UtestGeModelSerialize, test_invalid_tensor_desc) { + { // valid test + Model model("model_name", "custom version3.0"); + auto compute_graph = std::make_shared("graph_name"); + + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + auto input = CreateNode(input_op, compute_graph); + Graph graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + EXPECT_GE(buffer.GetSize(), 0); + } + { // invalid format + Model model("model_name", "custom version3.0"); + auto compute_graph = std::make_shared("graph_name"); + + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_RESERVED, DT_FLOAT)); // invalid format + auto input = CreateNode(input_op, compute_graph); + Graph graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + ASSERT_GE(buffer.GetSize(), 0); + auto model2 = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + ASSERT_TRUE(model2.IsValid()); + auto graph_new = GraphUtils::GetComputeGraph(model2.GetGraph()); + ASSERT_TRUE(graph_new != nullptr); + auto node_list_new = graph_new->GetAllNodes(); + ASSERT_EQ(node_list_new.size(), 1); + auto opdesc_new = node_list_new.at(0)->GetOpDesc(); + ASSERT_TRUE(opdesc_new != nullptr); + auto output_desc_list_new = opdesc_new->GetAllOutputsDesc(); + ASSERT_EQ(output_desc_list_new.size(), 1); + auto output_desc_new = output_desc_list_new.at(0); + EXPECT_EQ(output_desc_new.GetDataType(), DT_FLOAT); + EXPECT_EQ(output_desc_new.GetFormat(), FORMAT_RESERVED); + } + { // DT_UNDEFINED datatype + Model model("model_name", "custom version3.0"); + auto compute_graph = std::make_shared("graph_name"); + + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_UNDEFINED)); + auto input = CreateNode(input_op, compute_graph); + Graph graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + ASSERT_GE(buffer.GetSize(), 0); + auto model2 = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + ASSERT_TRUE(model2.IsValid()); + auto graph_new = GraphUtils::GetComputeGraph(model2.GetGraph()); + ASSERT_TRUE(graph_new != nullptr); + auto node_list_new = graph_new->GetAllNodes(); + ASSERT_EQ(node_list_new.size(), 1); + auto opdesc_new = node_list_new.at(0)->GetOpDesc(); + ASSERT_TRUE(opdesc_new != nullptr); + auto output_desc_list_new = opdesc_new->GetAllOutputsDesc(); + ASSERT_EQ(output_desc_list_new.size(), 1); + auto output_desc_new = output_desc_list_new.at(0); + EXPECT_EQ(output_desc_new.GetDataType(), DT_UNDEFINED); + EXPECT_EQ(output_desc_new.GetFormat(), FORMAT_NCHW); + } +} + +TEST(UtestGeModelSerialize, test_invalid_attrs) { + { // valid test + Model model("model_name", "custom version3.0"); + auto compute_graph = std::make_shared("graph_name"); + + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + + GeAttrValue::NamedAttrs named_attrs; + named_attrs.SetAttr("key1", GeAttrValue::CreateFrom(10)); + AttrUtils::SetNamedAttrs(input_op, "key", named_attrs); + + auto input = CreateNode(input_op, compute_graph); + Graph graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + EXPECT_GE(buffer.GetSize(), 0); + } + { // none type + Model model("model_name", "custom version3.0"); + auto compute_graph = std::make_shared("graph_name"); + + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + + GeAttrValue::NamedAttrs named_attrs; + EXPECT_EQ(named_attrs.SetAttr("key1", GeAttrValue()), GRAPH_FAILED); + } + { // bytes attr len is 0 + Model model("model_name", "custom version3.0"); + auto compute_graph = std::make_shared("graph_name"); + + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + + GeAttrValue::NamedAttrs named_attrs; + named_attrs.SetAttr("key1", GeAttrValue::CreateFrom(GeAttrValue::BYTES(0))); + AttrUtils::SetNamedAttrs(input_op, "key", named_attrs); + + auto input = CreateNode(input_op, compute_graph); + Graph graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + EXPECT_GE(buffer.GetSize(), 0); + + auto model2 = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + EXPECT_TRUE(model2.IsValid()); + } + { // invalid list bytes attr + Model model("model_name", "custom version3.0"); + auto compute_graph = std::make_shared("graph_name"); + + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + + GeAttrValue::NamedAttrs named_attrs; + named_attrs.SetAttr("key1", GeAttrValue::CreateFrom({GeAttrValue::BYTES(0)})); + AttrUtils::SetNamedAttrs(input_op, "key", named_attrs); + + auto input = CreateNode(input_op, compute_graph); + Graph graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + + ModelSerialize serialize; + auto buffer = serialize.SerializeModel(model); + EXPECT_GE(buffer.GetSize(), 0); + } + { // invalid graph attr + Model model("model_name", "custom version3.0"); + auto compute_graph = std::make_shared("graph_name"); + + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + + GeAttrValue::NamedAttrs named_attrs; + EXPECT_EQ(named_attrs.SetAttr("key1", GeAttrValue::CreateFrom(nullptr)), GRAPH_FAILED); + GeAttrValue value; + EXPECT_EQ(named_attrs.GetAttr("key1", value), GRAPH_FAILED); + EXPECT_TRUE(value.IsEmpty()); + } + { // invalid list graph attr + Model model("model_name", "custom version3.0"); + auto compute_graph = std::make_shared("graph_name"); + + // input + auto input_op = std::make_shared("test", "TestOp"); + input_op->AddOutputDesc(GeTensorDesc(GeShape({12, 32, 64, 64}), FORMAT_NCHW, DT_FLOAT)); + + GeAttrValue::NamedAttrs named_attrs; + EXPECT_EQ(named_attrs.SetAttr("key1", GeAttrValue::CreateFrom({nullptr})), GRAPH_FAILED); + GeAttrValue value; + EXPECT_EQ(named_attrs.GetAttr("key1", value), GRAPH_FAILED); + EXPECT_TRUE(value.IsEmpty()); + } +} + +TEST(UtestGeModelSerialize, test_model_serialize_imp_invalid_param) { + ModelSerializeImp imp; + EXPECT_FALSE(imp.SerializeModel(Model(), nullptr)); + EXPECT_FALSE(imp.SerializeGraph(nullptr, nullptr)); + EXPECT_FALSE(imp.SerializeNode(nullptr, nullptr)); + EXPECT_FALSE(imp.SerializeOpDesc(nullptr, nullptr)); + + auto graph = std::make_shared("test_graph"); + auto node = graph->AddNode(std::make_shared()); + node->op_ = nullptr; + proto::ModelDef model_def; + Model model; + model.SetGraph(GraphUtils::CreateGraphFromComputeGraph(graph)); + EXPECT_FALSE(imp.SerializeModel(model, &model_def)); + + ModelSerialize serialize; + EXPECT_EQ(serialize.GetSerializeModelSize(model), 0); +} + +TEST(UtestGeModelSerialize, test_parse_node_false) { + ModelSerializeImp imp; + string node_index = "invalid_index"; + string node_name = "name"; + int32_t index = 1; + EXPECT_EQ(imp.ParseNodeIndex(node_index, node_name, index), false); +} + +TEST(UtestGeModelSerialize, test_invalid_tensor) { + ModelSerializeImp imp; + EXPECT_EQ(imp.SerializeTensor(nullptr, nullptr), false); + + try { + ConstGeTensorPtr tensor_ptr = std::make_shared(); + EXPECT_EQ(imp.SerializeTensor(tensor_ptr, nullptr), false); + } catch (...) { + } +} + +TEST(UTEST_ge_model_unserialize, test_invalid_tensor) { + ModelSerializeImp imp; + EXPECT_EQ(imp.SerializeTensor(nullptr, nullptr), false); + + try { + ConstGeTensorPtr tensor_ptr = std::make_shared(); + EXPECT_EQ(imp.SerializeTensor(tensor_ptr, nullptr), false); + } catch (...) { + } +} + +TEST(UTEST_ge_model_unserialize, test_invalid_TensorDesc) { + { // valid + proto::ModelDef mode_def; + auto attrs = mode_def.mutable_attr(); + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + auto tensor_desc_attr = attr_def->mutable_td(); + tensor_desc_attr->set_layout("NCHW"); + tensor_desc_attr->set_dtype(proto::DataType::DT_INT8); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + } + { // invalid layout + proto::ModelDef mode_def; + auto attrs = mode_def.mutable_attr(); + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + auto tensor_desc_attr = attr_def->mutable_td(); + tensor_desc_attr->set_layout("InvalidLayout"); + tensor_desc_attr->set_dtype(proto::DataType::DT_INT8); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + GeTensorDesc tensor_desc; + EXPECT_TRUE(AttrUtils::GetTensorDesc(model, "key1", tensor_desc)); + EXPECT_EQ(tensor_desc.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc.GetDataType(), DT_INT8); + } + { // invalid datatype + proto::ModelDef mode_def; + auto attrs = mode_def.mutable_attr(); + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + auto tensor_desc_attr = attr_def->mutable_td(); // tensor desc + tensor_desc_attr->set_layout("NHWC"); + tensor_desc_attr->set_dtype((proto::DataType)100); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + GeTensorDesc tensor_desc; + EXPECT_TRUE(AttrUtils::GetTensorDesc(model, "key1", tensor_desc)); + EXPECT_EQ(tensor_desc.GetFormat(), FORMAT_NHWC); + EXPECT_EQ(tensor_desc.GetDataType(), DT_UNDEFINED); + } + { // invalid datatype + proto::ModelDef mode_def; + auto attrs = mode_def.mutable_attr(); + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + auto tensor_desc_attr = attr_def->mutable_t()->mutable_desc(); // tensor + tensor_desc_attr->set_layout("NHWC"); + tensor_desc_attr->set_dtype((proto::DataType)100); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + ConstGeTensorPtr tensor; + EXPECT_TRUE(AttrUtils::GetTensor(model, "key1", tensor)); + ASSERT_TRUE(tensor != nullptr); + auto tensor_desc = tensor->GetTensorDesc(); + EXPECT_EQ(tensor_desc.GetFormat(), FORMAT_NHWC); + EXPECT_EQ(tensor_desc.GetDataType(), DT_UNDEFINED); + } + { // invalid attrmap + proto::ModelDef mode_def; + auto attrs = mode_def.add_graph()->mutable_attr(); // graph attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + auto tensor_desc_attr = attr_def->mutable_t()->mutable_desc(); // tensor + tensor_desc_attr->set_layout("NCHW"); + tensor_desc_attr->set_dtype(proto::DataType::DT_INT8); + auto attrs1 = tensor_desc_attr->mutable_attr(); + auto attr1 = (*attrs1)["key2"]; // empty attr + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + auto graph = GraphUtils::GetComputeGraph(model.GetGraph()); + ASSERT_TRUE(graph != nullptr); + ConstGeTensorPtr tensor; + EXPECT_TRUE(AttrUtils::GetTensor(graph, "key1", tensor)); + ASSERT_TRUE(tensor != nullptr); + auto tensor_desc = tensor->GetTensorDesc(); + GeAttrValue attr_value; + EXPECT_EQ(tensor_desc.GetAttr("key2", attr_value), GRAPH_SUCCESS); + EXPECT_EQ(attr_value.GetValueType(), GeAttrValue::VT_NONE); + } + { // invalid attrmap2 + proto::ModelDef mode_def; + auto attrs = mode_def.add_graph()->add_op()->mutable_attr(); // node attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + auto tensor_desc_attr = attr_def->mutable_t()->mutable_desc(); // tensor + tensor_desc_attr->set_layout("NCHW"); + tensor_desc_attr->set_dtype(proto::DataType::DT_INT8); + auto attrs1 = tensor_desc_attr->mutable_attr(); + auto attr1 = (*attrs1)["key2"].mutable_list(); // empty list attr + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + auto graph = GraphUtils::GetComputeGraph(model.GetGraph()); + ASSERT_TRUE(graph != nullptr); + auto nodes = graph->GetAllNodes(); + ASSERT_EQ(nodes.size(), 1); + ConstGeTensorPtr tensor; + EXPECT_TRUE(AttrUtils::GetTensor(nodes.at(0)->GetOpDesc(), "key1", tensor)); + ASSERT_TRUE(tensor != nullptr); + auto tensor_desc = tensor->GetTensorDesc(); + GeAttrValue attr_value; + EXPECT_EQ(tensor_desc.GetAttr("key2", attr_value), GRAPH_SUCCESS); + EXPECT_EQ(attr_value.GetValueType(), GeAttrValue::VT_NONE); + } +} +TEST(UTEST_ge_model_unserialize, test_invalid_attr) { + { // invalid graph + proto::ModelDef mode_def; + auto attrs = mode_def.add_graph()->add_op()->mutable_attr(); // node attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + auto graph_attr = attr_def->mutable_g(); + auto attrs_of_graph = graph_attr->mutable_attr(); + auto tensor_val = (*attrs_of_graph)["key2"].mutable_td(); + tensor_val->set_dtype(proto::DT_INT8); + tensor_val->set_layout("invalidLayout"); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + auto graph = GraphUtils::GetComputeGraph(model.GetGraph()); + ASSERT_TRUE(graph != nullptr); + auto nodes = graph->GetAllNodes(); + ASSERT_EQ(nodes.size(), 1); + ComputeGraphPtr graph_attr_new; + EXPECT_TRUE(AttrUtils::GetGraph(nodes.at(0)->GetOpDesc(), "key1", graph_attr_new)); + ASSERT_TRUE(graph_attr_new != nullptr); + GeTensorDesc tensor_desc1; + EXPECT_TRUE(AttrUtils::GetTensorDesc(graph_attr_new, "key2", tensor_desc1)); + EXPECT_EQ(tensor_desc1.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc1.GetDataType(), DT_INT8); + } + { // invalid list graph + proto::ModelDef mode_def; + auto attrs = mode_def.add_graph()->add_op()->mutable_attr(); // node attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + attr_def->mutable_list()->set_val_type(ge::proto::AttrDef_ListValue_ListValueType_VT_LIST_GRAPH); + auto graph_attr = attr_def->mutable_list()->add_g(); + auto attrs_of_graph = graph_attr->mutable_attr(); + auto tensor_val = (*attrs_of_graph)["key2"].mutable_td(); + tensor_val->set_dtype(proto::DT_INT8); + tensor_val->set_layout("invalidLayout"); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + auto graph = GraphUtils::GetComputeGraph(model.GetGraph()); + ASSERT_TRUE(graph != nullptr); + auto nodes = graph->GetAllNodes(); + ASSERT_EQ(nodes.size(), 1); + vector graph_list_attr; + EXPECT_TRUE(AttrUtils::GetListGraph(nodes.at(0)->GetOpDesc(), "key1", graph_list_attr)); + ASSERT_EQ(graph_list_attr.size(), 1); + ASSERT_TRUE(graph_list_attr[0] != nullptr); + GeTensorDesc tensor_desc1; + EXPECT_TRUE(AttrUtils::GetTensorDesc(graph_list_attr[0], "key2", tensor_desc1)); + EXPECT_EQ(tensor_desc1.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc1.GetDataType(), DT_INT8); + } + { // invalid named_attrs + proto::ModelDef mode_def; + auto attrs = mode_def.add_graph()->add_op()->mutable_attr(); // node attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + auto graph_attr = attr_def->mutable_func(); + auto attrs_of_graph = graph_attr->mutable_attr(); + auto tensor_val = (*attrs_of_graph)["key2"].mutable_td(); + tensor_val->set_dtype(proto::DT_INT8); + tensor_val->set_layout("invalidLayout"); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + auto graph = GraphUtils::GetComputeGraph(model.GetGraph()); + ASSERT_TRUE(graph != nullptr); + auto nodes = graph->GetAllNodes(); + ASSERT_EQ(nodes.size(), 1); + GeAttrValue::NAMED_ATTRS named_attrs; + EXPECT_TRUE(AttrUtils::GetNamedAttrs(nodes.at(0)->GetOpDesc(), "key1", named_attrs)); + GeTensorDesc tensor_desc1; + EXPECT_TRUE(AttrUtils::GetTensorDesc(named_attrs, "key2", tensor_desc1)); + EXPECT_EQ(tensor_desc1.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc1.GetDataType(), DT_INT8); + } + { // invalid list named_attrs + proto::ModelDef mode_def; + auto attrs = mode_def.add_graph()->add_op()->mutable_attr(); // node attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + attr_def->mutable_list()->set_val_type(ge::proto::AttrDef_ListValue_ListValueType_VT_LIST_NAMED_ATTRS); + auto graph_attr = attr_def->mutable_list()->add_na(); + auto attrs_of_graph = graph_attr->mutable_attr(); + auto tensor_val = (*attrs_of_graph)["key2"].mutable_td(); + tensor_val->set_dtype(proto::DT_INT8); + tensor_val->set_layout("invalidLayout"); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + auto graph = GraphUtils::GetComputeGraph(model.GetGraph()); + ASSERT_TRUE(graph != nullptr); + auto nodes = graph->GetAllNodes(); + ASSERT_EQ(nodes.size(), 1); + GeAttrValue::LIST_NAMED_ATTRS named_attrs; + EXPECT_TRUE(AttrUtils::GetListNamedAttrs(nodes.at(0)->GetOpDesc(), "key1", named_attrs)); + ASSERT_EQ(named_attrs.size(), 1); + GeTensorDesc tensor_desc1; + EXPECT_TRUE(AttrUtils::GetTensorDesc(named_attrs.at(0), "key2", tensor_desc1)); + EXPECT_EQ(tensor_desc1.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc1.GetDataType(), DT_INT8); + } + { // invalid tensor_desc + proto::ModelDef mode_def; + auto attrs = mode_def.add_graph()->add_op()->mutable_attr(); // node attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + auto graph_attr = attr_def->mutable_td(); + auto attrs_of_graph = graph_attr->mutable_attr(); + auto tensor_val = (*attrs_of_graph)["key2"].mutable_td(); + tensor_val->set_dtype(proto::DT_INT8); + tensor_val->set_layout("invalidLayout"); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + auto graph = GraphUtils::GetComputeGraph(model.GetGraph()); + ASSERT_TRUE(graph != nullptr); + auto nodes = graph->GetAllNodes(); + ASSERT_EQ(nodes.size(), 1); + GeTensorDesc tensor_desc; + EXPECT_TRUE(AttrUtils::GetTensorDesc(nodes.at(0)->GetOpDesc(), "key1", tensor_desc)); + GeTensorDesc tensor_desc1; + EXPECT_TRUE(AttrUtils::GetTensorDesc(tensor_desc, "key2", tensor_desc1)); + EXPECT_EQ(tensor_desc1.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc1.GetDataType(), DT_INT8); + } + { // invalid list tensor_desc + proto::ModelDef mode_def; + auto attrs = mode_def.add_graph()->add_op()->mutable_attr(); // node attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + attr_def->mutable_list()->set_val_type(ge::proto::AttrDef_ListValue_ListValueType_VT_LIST_TENSOR_DESC); + auto graph_attr = attr_def->mutable_list()->add_td(); + auto attrs_of_graph = graph_attr->mutable_attr(); + auto tensor_val = (*attrs_of_graph)["key2"].mutable_td(); + tensor_val->set_dtype(proto::DT_INT8); + tensor_val->set_layout("invalidLayout"); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + auto graph = GraphUtils::GetComputeGraph(model.GetGraph()); + ASSERT_TRUE(graph != nullptr); + auto nodes = graph->GetAllNodes(); + ASSERT_EQ(nodes.size(), 1); + vector tensor_desc; + EXPECT_TRUE(AttrUtils::GetListTensorDesc(nodes.at(0)->GetOpDesc(), "key1", tensor_desc)); + ASSERT_EQ(tensor_desc.size(), 1); + GeTensorDesc tensor_desc1; + EXPECT_TRUE(AttrUtils::GetTensorDesc(tensor_desc.at(0), "key2", tensor_desc1)); + EXPECT_EQ(tensor_desc1.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc1.GetDataType(), DT_INT8); + } + { // invalid tensor + proto::ModelDef mode_def; + auto attrs = mode_def.add_graph()->add_op()->mutable_attr(); // node attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + auto graph_attr = attr_def->mutable_t()->mutable_desc(); + auto attrs_of_graph = graph_attr->mutable_attr(); + auto tensor_val = (*attrs_of_graph)["key2"].mutable_td(); + tensor_val->set_dtype(proto::DT_INT8); + tensor_val->set_layout("invalidLayout"); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + auto graph = GraphUtils::GetComputeGraph(model.GetGraph()); + ASSERT_TRUE(graph != nullptr); + auto nodes = graph->GetAllNodes(); + ASSERT_EQ(nodes.size(), 1); + ConstGeTensorPtr tensor; + EXPECT_TRUE(AttrUtils::GetTensor(nodes.at(0)->GetOpDesc(), "key1", tensor)); + GeTensorDesc tensor_desc1; + EXPECT_TRUE(AttrUtils::GetTensorDesc(tensor->GetTensorDesc(), "key2", tensor_desc1)); + EXPECT_EQ(tensor_desc1.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc1.GetDataType(), DT_INT8); + } + { // invalid list tensor + proto::ModelDef mode_def; + auto attrs = mode_def.add_graph()->add_op()->mutable_attr(); // node attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + attr_def->mutable_list()->set_val_type(ge::proto::AttrDef_ListValue_ListValueType_VT_LIST_TENSOR); + auto graph_attr = attr_def->mutable_list()->add_t()->mutable_desc(); + auto attrs_of_graph = graph_attr->mutable_attr(); + auto tensor_val = (*attrs_of_graph)["key2"].mutable_td(); + tensor_val->set_dtype(proto::DT_INT8); + tensor_val->set_layout("invalidLayout"); + + ModelSerializeImp imp; + Model model; + EXPECT_TRUE(imp.UnserializeModel(model, mode_def)); + auto graph = GraphUtils::GetComputeGraph(model.GetGraph()); + ASSERT_TRUE(graph != nullptr); + auto nodes = graph->GetAllNodes(); + ASSERT_EQ(nodes.size(), 1); + vector tensor; + EXPECT_TRUE(AttrUtils::GetListTensor(nodes.at(0)->GetOpDesc(), "key1", tensor)); + ASSERT_EQ(tensor.size(), 1); + GeTensorDesc tensor_desc1; + EXPECT_TRUE(AttrUtils::GetTensorDesc(tensor.at(0)->GetTensorDesc(), "key2", tensor_desc1)); + EXPECT_EQ(tensor_desc1.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc1.GetDataType(), DT_INT8); + } + { // invalid list tensor + proto::GraphDef graph_def; + auto attrs = graph_def.add_op()->mutable_attr(); // node attr + + proto::AttrDef *attr_def = &(*attrs)["key1"]; + attr_def->mutable_list()->set_val_type(ge::proto::AttrDef_ListValue_ListValueType_VT_LIST_TENSOR); + auto graph_attr = attr_def->mutable_list()->add_t()->mutable_desc(); + auto attrs_of_graph = graph_attr->mutable_attr(); + auto tensor_val = (*attrs_of_graph)["key2"].mutable_td(); + tensor_val->set_dtype(proto::DT_INT8); + tensor_val->set_layout("invalidLayout"); + + ModelSerializeImp imp; + Buffer buffer(graph_def.ByteSizeLong()); + graph_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + + ModelSerialize serialize; + auto graph = serialize.UnserializeGraph(buffer.GetData(), buffer.GetSize()); + ASSERT_TRUE(graph != nullptr); + auto nodes = graph->GetAllNodes(); + ASSERT_EQ(nodes.size(), 1); + vector tensor; + EXPECT_TRUE(AttrUtils::GetListTensor(nodes.at(0)->GetOpDesc(), "key1", tensor)); + ASSERT_EQ(tensor.size(), 1); + GeTensorDesc tensor_desc1; + EXPECT_TRUE(AttrUtils::GetTensorDesc(tensor.at(0)->GetTensorDesc(), "key2", tensor_desc1)); + EXPECT_EQ(tensor_desc1.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc1.GetDataType(), DT_INT8); + } +} + +TEST(UTEST_ge_model_unserialize, test_invalid_input_output) { + // model invalid node input + { + proto::ModelDef model_def; + auto op_def = model_def.add_graph()->add_op(); // node attr + op_def->add_input("invalidNodeName:0"); + + Buffer buffer(model_def.ByteSizeLong()); + model_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + + ModelSerialize serialize; + auto model = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + EXPECT_FALSE(model.IsValid()); + } + // model invalid node control input + { + proto::ModelDef model_def; + auto op_def = model_def.add_graph()->add_op(); // node attr + op_def->add_input("invalidNodeName:-1"); + + Buffer buffer(model_def.ByteSizeLong()); + model_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + + ModelSerialize serialize; + auto model = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + EXPECT_FALSE(model.IsValid()); + } + // model invalid graph input + { + proto::ModelDef model_def; + model_def.add_graph()->add_input("invalidNodeName:0"); + + Buffer buffer(model_def.ByteSizeLong()); + model_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + + ModelSerialize serialize; + auto model = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + EXPECT_FALSE(model.IsValid()); + } + // model invalid graph input + { + proto::ModelDef model_def; + model_def.add_graph()->add_output("invalidNodeName:0"); + + Buffer buffer(model_def.ByteSizeLong()); + model_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + + ModelSerialize serialize; + auto model = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + EXPECT_FALSE(model.IsValid()); + } + // graph invalid node input + { + proto::GraphDef graph_def; + auto op_def = graph_def.add_op(); // node attr + op_def->add_input("invalidNodeName:0"); + + Buffer buffer(graph_def.ByteSizeLong()); + graph_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + + ModelSerialize serialize; + auto graph = serialize.UnserializeGraph(buffer.GetData(), buffer.GetSize()); + EXPECT_FALSE(graph != nullptr); + } + // graph invalid node control input + { + proto::GraphDef graph_def; + auto op_def = graph_def.add_op(); // node attr + op_def->add_input("invalidNodeName:-1"); + + Buffer buffer(graph_def.ByteSizeLong()); + graph_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + + ModelSerialize serialize; + auto graph = serialize.UnserializeGraph(buffer.GetData(), buffer.GetSize()); + EXPECT_FALSE(graph != nullptr); + } + // graph invalid graph input + { + proto::GraphDef graph_def; + graph_def.add_input("invalidNodeName:0"); + + Buffer buffer(graph_def.ByteSizeLong()); + graph_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + + ModelSerialize serialize; + auto graph = serialize.UnserializeGraph(buffer.GetData(), buffer.GetSize()); + EXPECT_FALSE(graph != nullptr); + } + // graph invalid graph output + { + proto::GraphDef graph_def; + graph_def.add_output("invalidNodeName:0"); + + Buffer buffer(graph_def.ByteSizeLong()); + graph_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + + ModelSerialize serialize; + auto graph = serialize.UnserializeGraph(buffer.GetData(), buffer.GetSize()); + EXPECT_FALSE(graph != nullptr); + } + // model invalid node input anchor + { + proto::ModelDef model_def; + auto graph_def = model_def.add_graph(); + auto node_def1 = graph_def->add_op(); // node attr + node_def1->set_name("node1"); + + auto node_def2 = graph_def->add_op(); // node attr + node_def2->add_input("node1:0"); + + Buffer buffer(model_def.ByteSizeLong()); + model_def.SerializeToArray(buffer.GetData(), static_cast(buffer.GetSize())); + + ModelSerialize serialize; + auto model = serialize.UnserializeModel(buffer.GetData(), buffer.GetSize()); + EXPECT_FALSE(model.IsValid()); + } +} + +TEST(UTEST_ge_model_unserialize, test_invalid_CodeBuffer) { + { + char buffer[100] = "sdfasf"; + ModelSerialize serialize; + auto graph = serialize.UnserializeGraph((uint8_t *)buffer, 100); + EXPECT_FALSE(graph != nullptr); + } + { + char buffer[100] = "sdfasf"; + ModelSerialize serialize; + auto model = serialize.UnserializeModel((uint8_t *)buffer, 100); + EXPECT_FALSE(model.IsValid()); + } + { + char buffer[100] = "sdfasf"; + ModelSerialize serialize; + auto op_desc = serialize.UnserializeOpDesc((uint8_t *)buffer, 100); + EXPECT_FALSE(op_desc != nullptr); + } + { + ModelSerialize serialize; + auto graph = serialize.UnserializeGraph((uint8_t *)nullptr, 100); + EXPECT_FALSE(graph != nullptr); + } + { + ModelSerialize serialize; + auto model = serialize.UnserializeModel((uint8_t *)nullptr, 100); + EXPECT_FALSE(model.IsValid()); + } + { + ModelSerialize serialize; + auto op_desc = serialize.UnserializeOpDesc((uint8_t *)nullptr, 100); + EXPECT_FALSE(op_desc != nullptr); + } +} diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_model_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_model_unittest.cc new file mode 100644 index 00000000..07bd90f5 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_model_unittest.cc @@ -0,0 +1,80 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/model.h" + +#include + +#include "graph/compute_graph.h" +#include "graph/debug/graph_debug.h" + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestGeModelUnittest : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +ge::ComputeGraphPtr CreateSaveGraph() { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + // variable1 + ge::OpDescPtr variable_op = std::make_shared(); + variable_op->SetType("Variable"); + variable_op->SetName("Variable1"); + variable_op->AddInputDesc(ge::GeTensorDesc()); + variable_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr variable_node = graph->AddNode(variable_op); + // save1 + ge::OpDescPtr save_op = std::make_shared(); + save_op->SetType("Save"); + save_op->SetName("Save1"); + save_op->AddInputDesc(ge::GeTensorDesc()); + save_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr save_node = graph->AddNode(save_op); + + // add edge + ge::GraphUtils::AddEdge(variable_node->GetOutDataAnchor(0), save_node->GetInDataAnchor(0)); + + return graph; +} + +TEST_F(UtestGeModelUnittest, save_model_to_file_success) { + ge::ComputeGraphPtr compute_graph = CreateSaveGraph(); + auto all_nodes = compute_graph->GetAllNodes(); + for (auto node : all_nodes) { + auto op_desc = node->GetOpDesc(); + GeTensorDesc weight_desc; + op_desc->AddOptionalInputDesc("test", weight_desc); + for (auto in_anchor_ptr : node->GetAllInDataAnchors()) { + bool is_optional = op_desc->IsOptionalInput(in_anchor_ptr->GetIdx()); + } + } + ge::Graph ge_graph = ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph); + string file_name = "model_data.pb"; + setenv("DUMP_MODEL", "1", true); + // EXPECT_EQ(ge_graph.SaveToFile(file_name), GRAPH_FAILED); + setenv("DUMP_MODEL", "0", true); +} + +TEST_F(UtestGeModelUnittest, load_model_from_file_success) { + ge::Graph ge_graph; + string file_name = "model_data.pb"; + // EXPECT_EQ(ge_graph.LoadFromFile(file_name), GRAPH_SUCCESS); +} diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_node_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_node_unittest.cc new file mode 100644 index 00000000..be0fe27d --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_node_unittest.cc @@ -0,0 +1,226 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define protected public +#define private public +#include "graph/node.h" + +#include "graph/ge_attr_value.h" +#include "graph/ge_tensor.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/tensor_utils.h" +#undef protected +#undef private + +using namespace std; +using namespace ge; + +class UtestGeNode : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGeNode, node) { + OpDescPtr desc_ptr = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + OpDescPtr desc_ptr2 = std::make_shared("name2", "type2"); + EXPECT_EQ(desc_ptr2->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + ComputeGraphPtr graph_ptr = std::make_shared("name"); + NodePtr n1 = graph_ptr->AddNode(desc_ptr); + NodePtr n2 = graph_ptr->AddNode(desc_ptr); + NodePtr n3 = graph_ptr->AddNode(desc_ptr); + NodePtr n4 = graph_ptr->AddNode(desc_ptr); + + EXPECT_EQ(n3->Init(), GRAPH_SUCCESS); + EXPECT_EQ(n4->Init(), GRAPH_SUCCESS); + EXPECT_EQ(GraphUtils::AddEdge(n3->GetOutDataAnchor(0), n4->GetInDataAnchor(0)), GRAPH_SUCCESS); + + EXPECT_EQ(n3->GetOwnerComputeGraph(), graph_ptr); + EXPECT_EQ(n3->GetName(), "name1"); + EXPECT_EQ(n3->GetOpDesc(), desc_ptr); + int i = 0; + for (auto in : n3->GetAllOutDataAnchors()) { + EXPECT_EQ(in->GetIdx(), i++); + } + i = 0; + for (auto in : n3->GetAllInDataAnchors()) { + EXPECT_EQ(in->GetIdx(), i++); + } + EXPECT_EQ(n3->GetInControlAnchor() != nullptr, true); + EXPECT_EQ(n3->GetOutControlAnchor() != nullptr, true); + + for (auto innode : n4->GetInDataNodes()) { + EXPECT_EQ(innode, n3); + } + + for (auto outnode : n3->GetOutDataNodes()) { + EXPECT_EQ(outnode, n4); + } +} + +TEST_F(UtestGeNode, out_nodes) { + OpDescPtr desc_ptr = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + OpDescPtr desc_ptr2 = std::make_shared("name2", "type2"); + EXPECT_EQ(desc_ptr2->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + ComputeGraphPtr graph_ptr = std::make_shared("name"); + NodePtr n1 = graph_ptr->AddNode(desc_ptr); + NodePtr n2 = graph_ptr->AddNode(desc_ptr); + NodePtr n3 = graph_ptr->AddNode(desc_ptr); + NodePtr n4 = graph_ptr->AddNode(desc_ptr); + + EXPECT_EQ(GraphUtils::AddEdge(n1->GetOutDataAnchor(0), n2->GetInDataAnchor(0)), GRAPH_SUCCESS); + EXPECT_EQ(GraphUtils::AddEdge(n1->GetOutDataAnchor(0), n3->GetInControlAnchor()), GRAPH_SUCCESS); + EXPECT_EQ(GraphUtils::AddEdge(n1->GetOutControlAnchor(), n4->GetInControlAnchor()), GRAPH_SUCCESS); + EXPECT_EQ(GraphUtils::AddEdge(n2->GetOutDataAnchor(0), n4->GetInDataAnchor(0)), GRAPH_SUCCESS); + EXPECT_EQ(n1->GetOutDataNodes().size(), 1); + EXPECT_EQ(n1->GetOutDataNodes().at(0), n2); + EXPECT_EQ(n1->GetOutControlNodes().size(), 2); + EXPECT_EQ(n1->GetOutControlNodes().at(0), n3); + EXPECT_EQ(n1->GetOutControlNodes().at(1), n4); + EXPECT_EQ(n1->GetOutAllNodes().size(), 3); + EXPECT_EQ(n1->GetOutAllNodes().at(0), n2); + EXPECT_EQ(n1->GetOutAllNodes().at(1), n3); + EXPECT_EQ(n1->GetOutAllNodes().at(2), n4); + EXPECT_EQ(n4->GetInControlNodes().size(), 1); + EXPECT_EQ(n4->GetInDataNodes().size(), 1); + EXPECT_EQ(n4->GetInAllNodes().size(), 2); + + EXPECT_EQ(n1->GetOutDataNodesAndAnchors().size(), 1); + EXPECT_EQ(n1->GetOutDataNodesAndAnchors().at(0).first, n2); + EXPECT_EQ(n1->GetOutDataNodesAndAnchors().at(0).second, n2->GetAllInDataAnchors().at(0)); + EXPECT_EQ(n2->GetInDataNodesAndAnchors().size(), 1); + EXPECT_EQ(n2->GetInDataNodesAndAnchors().at(0).first, n1); + EXPECT_EQ(n2->GetInDataNodesAndAnchors().at(0).second, n1->GetAllOutDataAnchors().at(0)); + + OutDataAnchorPtr a1; + InControlAnchorPtr a2; + EXPECT_EQ(NodeUtils::GetDataOutAnchorAndControlInAnchor(n1, a1, a2), GRAPH_SUCCESS); + EXPECT_EQ(a1, n1->GetOutDataAnchor(0)); + EXPECT_EQ(a2, n3->GetInControlAnchor()); + + a1 = nullptr; + a2 = nullptr; + EXPECT_EQ(NodeUtils::GetDataOutAnchorAndControlInAnchor(n4, a1, a2), GRAPH_FAILED); +} + +TEST_F(UtestGeNode, update_opdesc) { + OpDescPtr desc_ptr = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + OpDescPtr desc_ptr2 = std::make_shared("name2", "type2"); + EXPECT_EQ(desc_ptr2->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + ComputeGraphPtr graph_ptr = std::make_shared("name"); + NodePtr n1 = graph_ptr->AddNode(desc_ptr); + + EXPECT_EQ(n1->UpdateOpDesc(desc_ptr2), GRAPH_SUCCESS); +} + +TEST_F(UtestGeNode, add_link_from) { + OpDescPtr desc_ptr = std::make_shared("name", "type"); + EXPECT_EQ(desc_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + ComputeGraphPtr graph_ptr = std::make_shared("name"); + NodePtr n1 = graph_ptr->AddNode(desc_ptr); + NodePtr n2 = graph_ptr->AddNode(desc_ptr); + EXPECT_EQ(n2->AddLinkFrom(n1), GRAPH_SUCCESS); + EXPECT_EQ(n2->AddLinkFromForParse(n1), GRAPH_SUCCESS); + NodePtr n3 = graph_ptr->AddNode(desc_ptr); + NodePtr n4 = graph_ptr->AddNode(desc_ptr); + NodePtr n5 = graph_ptr->AddNode(desc_ptr); + EXPECT_EQ(n3->AddLinkFrom("x", n4), GRAPH_SUCCESS); + EXPECT_EQ(n3->AddLinkFrom(0, n5), GRAPH_SUCCESS); + desc_ptr->input_name_idx_.insert(make_pair("__input1", 1)); + EXPECT_EQ(n2->AddLinkFrom(n1), GRAPH_SUCCESS); + + OpDescPtr desc_ptr1 = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr1->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + ComputeGraphPtr graph_ptr1 = std::make_shared("name1"); + NodePtr n7 = graph_ptr1->AddNode(desc_ptr1); + NodePtr n8 = graph_ptr1->AddNode(desc_ptr1); + EXPECT_EQ(n8->AddLinkFromForParse(n7), GRAPH_PARAM_INVALID); +} + +TEST_F(UtestGeNode, add_link_from_fail) { + OpDescPtr desc_ptr = std::make_shared("name1", "type1"); + ComputeGraphPtr graph_ptr = std::make_shared("name"); + NodePtr n1 = graph_ptr->AddNode(desc_ptr); + + NodePtr node_ptr = std::make_shared(); + EXPECT_EQ(n1->AddLinkFrom(node_ptr), GRAPH_PARAM_INVALID); + EXPECT_EQ(n1->AddLinkFrom(1, node_ptr), GRAPH_PARAM_INVALID); + EXPECT_EQ(n1->AddLinkFrom("test", node_ptr), GRAPH_PARAM_INVALID); + EXPECT_EQ(n1->AddLinkFromForParse(node_ptr), GRAPH_PARAM_INVALID); +} + +TEST_F(UtestGeNode, verify_failed) { + OpDescPtr desc_ptr = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + ComputeGraphPtr graph_ptr = std::make_shared("name"); + NodePtr n1 = graph_ptr->AddNode(desc_ptr); + + EXPECT_EQ(n1->Verify(), GRAPH_SUCCESS); +} + +TEST_F(UtestGeNode, infer_origin_format_success) { + OpDescPtr desc_ptr = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + ComputeGraphPtr graph_ptr = std::make_shared("name"); + NodePtr n1 = graph_ptr->AddNode(desc_ptr); + + EXPECT_EQ(n1->InferOriginFormat(), GRAPH_SUCCESS); +} + +TEST_F(UtestGeNode, node_anchor_is_equal) { + ComputeGraphPtr graph_ptr = std::make_shared("name"); + OpDescPtr desc_ptr_src = std::make_shared("str_node", "type"); + EXPECT_EQ(desc_ptr_src->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr_src->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + NodePtr str_node = graph_ptr->AddNode(desc_ptr_src); + + OpDescPtr desc_ptr_peer = std::make_shared("peer_node", "type"); + EXPECT_EQ(desc_ptr_peer->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr_peer->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + NodePtr peer_node = graph_ptr->AddNode(desc_ptr_peer); + EXPECT_EQ(peer_node->AddLinkFrom(str_node), GRAPH_SUCCESS); + EXPECT_EQ(str_node->NodeAnchorIsEqual(str_node->GetOutAnchor(0), str_node->GetOutAnchor(0), 0), true); +} diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_opdesc_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_opdesc_unittest.cc new file mode 100644 index 00000000..5378b632 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_opdesc_unittest.cc @@ -0,0 +1,238 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define protected public +#define private public +#include "graph/op_desc.h" + +#include "graph/compute_graph.h" +#include "graph/ge_attr_value.h" +#include "graph/ge_tensor.h" +#include "graph/node.h" +#include "graph/operator_factory.h" +#include "utils/op_desc_utils.h" +#undef protected +#undef private + +using namespace std; +using namespace ge; + +class UtestGeOpdesc : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGeOpdesc, ge_test_opdesc_common) { + string name = "Conv2d"; + string type = "Data"; + OpDescPtr op_desc = std::make_shared(name, type); + EXPECT_TRUE(op_desc); + EXPECT_EQ(name, op_desc->GetName()); + EXPECT_EQ(type, op_desc->GetType()); + name = name + "_modify"; + type = type + "_modify"; + op_desc->SetName(name); + op_desc->SetType(type); + EXPECT_EQ(name, op_desc->GetName()); + EXPECT_EQ(type, op_desc->GetType()); +} + +TEST_F(UtestGeOpdesc, clear_all_output_desc) { + auto g = std::make_shared("Test"); + + // creat node + ::ge::OpDescPtr desc = std::make_shared("", ""); + desc->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)); + desc->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)); + desc->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + auto node = g->AddNode(desc); + bool ret = OpDescUtils::ClearOutputDesc(node); + EXPECT_EQ(true, ret); +} + +TEST_F(UtestGeOpdesc, clear_output_desc_by_index) { + auto g = std::make_shared("Test"); + + // creat node + ::ge::OpDescPtr desc = std::make_shared("", ""); + desc->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)); + desc->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)); + desc->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)); + desc->AddOutputDesc("z", GeTensorDesc(GeShape({1, 1, 8, 8}), FORMAT_NCHW)); + auto node = g->AddNode(desc); + bool ret = OpDescUtils::ClearOutputDesc(desc, 1); + EXPECT_EQ(true, ret); +} + +TEST_F(UtestGeOpdesc, ge_test_opdesc_inputs) { + string name = "Conv2d"; + string type = "Data"; + OpDescPtr op_desc = std::make_shared(name, type); + EXPECT_TRUE(op_desc); + GeTensorDesc te_desc1(GeShape({1, 2, 3, 4}), FORMAT_NCHW, DT_FLOAT); + EXPECT_EQ(GRAPH_SUCCESS, op_desc->AddInputDesc(te_desc1)); + GeTensorDesc te_desc2(GeShape({4, 5, 6, 7}), FORMAT_NCHW, DT_FLOAT); + EXPECT_EQ(GRAPH_SUCCESS, op_desc->AddInputDesc("w", te_desc2)); + GeTensorDesc te_desc3(GeShape({8, 9, 10, 11}), FORMAT_NCHW, DT_FLOAT); + EXPECT_EQ(GRAPH_SUCCESS, op_desc->AddInputDesc("w", te_desc3)); + EXPECT_EQ(GRAPH_SUCCESS, op_desc->AddInputDesc(1, te_desc3)); + EXPECT_EQ(GRAPH_SUCCESS, op_desc->AddInputDesc(2, te_desc3)); + + GeTensorDesc te_desc4(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + EXPECT_EQ(op_desc->UpdateInputDesc(1, te_desc4), GRAPH_SUCCESS); + EXPECT_EQ(op_desc->UpdateInputDesc(4, te_desc4), GRAPH_FAILED); + EXPECT_EQ(op_desc->UpdateInputDesc("w", te_desc4), GRAPH_SUCCESS); + EXPECT_EQ(op_desc->UpdateInputDesc("weight", te_desc4), GRAPH_FAILED); + + GeTensorDesc get_te1 = op_desc->GetInputDesc(1); + GeTensorDesc get_te2 = op_desc->GetInputDesc(4); + GeTensorDesc get_te4 = op_desc->GetInputDesc("w"); + GeTensorDesc get_te3 = op_desc->GetInputDesc("weight"); + + EXPECT_EQ(op_desc->GetInputNameByIndex(1), "w"); + EXPECT_EQ(op_desc->GetInputNameByIndex(3), ""); + + auto vistor_in = op_desc->GetAllInputsDesc(); + EXPECT_EQ(vistor_in.size(), 3); + + auto input_size = op_desc->GetInputsSize(); + EXPECT_EQ(input_size, 3); +} + +TEST_F(UtestGeOpdesc, ge_test_opdesc_outputs) { + string name = "Conv2d"; + string type = "Data"; + OpDescPtr op_desc = std::make_shared(name, type); + EXPECT_TRUE(op_desc); + GeTensorDesc te_desc1(GeShape({1, 2, 3, 4}), FORMAT_NCHW, DT_FLOAT); + EXPECT_EQ(GRAPH_SUCCESS, op_desc->AddOutputDesc(te_desc1)); + GeTensorDesc te_desc2(GeShape({4, 5, 6, 7}), FORMAT_NCHW, DT_FLOAT); + EXPECT_EQ(GRAPH_SUCCESS, op_desc->AddOutputDesc("w", te_desc2)); + GeTensorDesc te_desc3(GeShape({8, 9, 10, 11}), FORMAT_NCHW, DT_FLOAT); + EXPECT_EQ(GRAPH_FAILED, op_desc->AddOutputDesc("w", te_desc3)); + + GeTensorDesc te_desc4(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + EXPECT_EQ(op_desc->UpdateOutputDesc(1, te_desc4), GRAPH_SUCCESS); + EXPECT_EQ(op_desc->UpdateOutputDesc(4, te_desc4), GRAPH_FAILED); + EXPECT_EQ(op_desc->UpdateOutputDesc("w", te_desc4), GRAPH_SUCCESS); + EXPECT_EQ(op_desc->UpdateOutputDesc("weight", te_desc4), GRAPH_FAILED); + + GeTensorDesc get_te1 = op_desc->GetOutputDesc(1); + GeTensorDesc get_te2 = op_desc->GetOutputDesc(4); + GeTensorDesc get_te4 = op_desc->GetOutputDesc("w"); + GeTensorDesc get_te3 = op_desc->GetOutputDesc("weight"); + + auto vistor_in = op_desc->GetAllOutputsDesc(); + EXPECT_EQ(vistor_in.size(), 2); +} + +TEST_F(UtestGeOpdesc, ge_test_opdesc_attrs) { + string name = "Conv2d"; + string type = "Data"; + OpDescPtr op_desc = std::make_shared(name, type); + EXPECT_TRUE(op_desc); + auto defautl_attr_size = op_desc->GetAllAttrs().size(); + + static const string PAD = "pad"; + static const string BIAS = "bias"; + + op_desc->SetAttr(PAD, GeAttrValue::CreateFrom(6)); + op_desc->SetAttr(BIAS, GeAttrValue::CreateFrom(0)); + + GeAttrValue at; + EXPECT_EQ(op_desc->GetAttr(PAD, at), GRAPH_SUCCESS); + int get_attr = -1; + at.GetValue(get_attr); + EXPECT_EQ(get_attr, 6); + EXPECT_EQ(op_desc->GetAttr("xxx", at), GRAPH_FAILED); + EXPECT_EQ(op_desc->GetAttr(BIAS, at), GRAPH_SUCCESS); + EXPECT_EQ(op_desc->GetAttr("bia", at), GRAPH_FAILED); + EXPECT_TRUE(op_desc->HasAttr(BIAS)); + EXPECT_FALSE(op_desc->HasAttr("xxx")); + + EXPECT_EQ(2, op_desc->GetAllAttrs().size() - defautl_attr_size); + EXPECT_EQ(op_desc->DelAttr("xxx"), GRAPH_FAILED); + EXPECT_EQ(op_desc->DelAttr(PAD), GRAPH_SUCCESS); + EXPECT_EQ(1, op_desc->GetAllAttrs().size() - defautl_attr_size); +} + +graphStatus InferFunctionStub(Operator &op) { return GRAPH_FAILED; } + +TEST_F(UtestGeOpdesc, ge_test_opdesc_call_infer_func_failed) { + GeTensorDesc ge_tensor_desc(GeShape({1, 2, 3, 4}), ge::FORMAT_NCHW, DT_FLOAT16); + auto addn_op_desc = std::make_shared("AddN", "AddN"); + addn_op_desc->AddInputDesc(ge_tensor_desc); + addn_op_desc->AddOutputDesc(ge_tensor_desc); + addn_op_desc->AddInferFunc(InferFunctionStub); + auto graph = std::make_shared("test"); + auto addn_node = std::make_shared(addn_op_desc, graph); + addn_node->Init(); + Operator op = OpDescUtils::CreateOperatorFromNode(addn_node); + + graphStatus ret = addn_op_desc->CallInferFunc(op); + EXPECT_EQ(ret, GRAPH_FAILED); +} + +graphStatus InferFunctionSuccessStub(Operator &op) { return GRAPH_SUCCESS; } + +TEST_F(UtestGeOpdesc, ge_test_opdesc_call_infer_func_success) { + auto addn_op_desc = std::make_shared("AddN", "AddN"); + addn_op_desc->AddInferFunc(InferFunctionSuccessStub); + auto graph = std::make_shared("test"); + auto addn_node = std::make_shared(addn_op_desc, graph); + addn_node->Init(); + Operator op = OpDescUtils::CreateOperatorFromNode(addn_node); + + graphStatus ret = addn_op_desc->CallInferFunc(op); + EXPECT_EQ(ret, GRAPH_SUCCESS); +} + +TEST_F(UtestGeOpdesc, ge_test_opdesc_infer_shape_and_type) { + auto addn_op_desc = std::make_shared("name", "type"); + graphStatus ret = addn_op_desc->InferShapeAndType(); + EXPECT_EQ(ret, GRAPH_SUCCESS); +} + +TEST_F(UtestGeOpdesc, default_infer_format_success) { + auto addn_op_desc = std::make_shared("name", "type"); + std::function func = nullptr; + addn_op_desc->AddInferFormatFunc(func); + auto fun1 = addn_op_desc->GetInferFormatFunc(); + graphStatus ret = addn_op_desc->DefaultInferFormat(); + EXPECT_EQ(ret, GRAPH_SUCCESS); +} + +TEST_F(UtestGeOpdesc, call_infer_format_func_success) { + auto addn_op_desc = std::make_shared("name", "type"); + Operator op; + graphStatus ret = addn_op_desc->CallInferFormatFunc(op); + EXPECT_EQ(ret, GRAPH_SUCCESS); +} + +TEST_F(UtestGeOpdesc, add_dynamic_output_desc) { + OpDescPtr desc_ptr = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr->AddDynamicOutputDesc("x", 1, false), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddDynamicOutputDesc("x1", 1, false), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddDynamicOutputDesc("x", 1, false), GRAPH_FAILED); + + OpDescPtr desc_ptr2 = std::make_shared("name2", "type2"); + EXPECT_EQ(desc_ptr2->AddDynamicOutputDesc("x", 1), GRAPH_SUCCESS); +} diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_opdesc_utils_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_opdesc_utils_unittest.cc new file mode 100644 index 00000000..827bac55 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_opdesc_utils_unittest.cc @@ -0,0 +1,133 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define protected public +#define private public +#include "graph/utils/op_desc_utils.h" + +#include "debug/ge_op_types.h" +#include "graph/compute_graph.h" +#include "graph/ge_attr_value.h" +#include "graph/ge_tensor.h" +#include "graph/node.h" +#include "graph/op_desc.h" +#include "graph/operator.h" +#undef protected +#undef private + +using namespace std; +using namespace ge; + +class UtestGeOpdescUtils : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGeOpdescUtils, CreateOperatorFromDesc) { + OpDescPtr desc_ptr = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + GeAttrValue test_attr = GeAttrValue::CreateFrom(1); + desc_ptr->SetAttr("test_attr", std::move(test_attr)); + + Operator oprt = OpDescUtils::CreateOperatorFromOpDesc(desc_ptr); + + GeAttrValue::INT out; + oprt.GetAttr("test_attr", out); + EXPECT_EQ(out, 1); + + TensorDesc input_desc1 = oprt.GetInputDesc("x"); + EXPECT_TRUE(input_desc1.GetShape().GetDimNum() == 4); + EXPECT_TRUE(input_desc1.GetShape().GetDim(0) == 1); + EXPECT_TRUE(input_desc1.GetShape().GetDim(1) == 16); + EXPECT_TRUE(input_desc1.GetShape().GetDim(2) == 16); + EXPECT_TRUE(input_desc1.GetShape().GetDim(3) == 16); + + TensorDesc input_desc2 = oprt.GetInputDesc(1); + EXPECT_TRUE(input_desc2.GetShape().GetDimNum() == 4); + EXPECT_TRUE(input_desc2.GetShape().GetDim(0) == 1); + EXPECT_TRUE(input_desc2.GetShape().GetDim(1) == 1); + EXPECT_TRUE(input_desc2.GetShape().GetDim(2) == 1); + EXPECT_TRUE(input_desc2.GetShape().GetDim(3) == 1); + + OpDescPtr out_ptr = OpDescUtils::GetOpDescFromOperator(oprt); + EXPECT_TRUE(out_ptr == desc_ptr); + + string name1 = out_ptr->GetName(); + string name2 = oprt.GetName(); + EXPECT_TRUE(name1 == name2); +} + +TEST_F(UtestGeOpdescUtils, clear_input_desc) { + OpDescPtr desc_ptr = std::make_shared("name1", "type1"); + EXPECT_EQ(desc_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + OpDescPtr desc_ptr2 = std::make_shared("name2", "type2"); + EXPECT_EQ(desc_ptr2->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + ComputeGraphPtr graph_ptr = std::make_shared("name"); + NodePtr n1 = graph_ptr->AddNode(desc_ptr); + NodePtr n2 = graph_ptr->AddNode(desc_ptr); + EXPECT_TRUE(OpDescUtils::ClearInputDesc(n1)); + EXPECT_TRUE(OpDescUtils::ClearInputDesc(desc_ptr2, 0)); +} + +TEST_F(UtestGeOpdescUtils, mutable_weights) { + OpDescPtr desc_ptr = std::make_shared("name1", CONSTANT); + EXPECT_EQ(desc_ptr->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + OpDescPtr desc_ptr2 = std::make_shared("name2", "type2"); + EXPECT_EQ(desc_ptr2->AddInputDesc("x", GeTensorDesc(GeShape({1, 16, 16, 16}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddInputDesc("w", GeTensorDesc(GeShape({1, 1, 1, 1}), FORMAT_NCHW)), GRAPH_SUCCESS); + EXPECT_EQ(desc_ptr2->AddOutputDesc("y", GeTensorDesc(GeShape({1, 32, 8, 8}), FORMAT_NCHW)), GRAPH_SUCCESS); + + ComputeGraphPtr graph_ptr = std::make_shared("name"); + NodePtr n1 = graph_ptr->AddNode(desc_ptr); + NodePtr n2 = graph_ptr->AddNode(desc_ptr); + + float f[1] = {1.0}; + GeTensorDesc tensor_desc(GeShape({1})); + GeTensorPtr tensor = std::make_shared(tensor_desc, (const uint8_t *)f, 1 * sizeof(float)); + + OpDescPtr null_opdesc = nullptr; + + EXPECT_EQ(GRAPH_PARAM_INVALID, OpDescUtils::SetWeights(desc_ptr, nullptr)); + EXPECT_EQ(GRAPH_SUCCESS, OpDescUtils::SetWeights(desc_ptr, tensor)); + EXPECT_EQ(GRAPH_SUCCESS, OpDescUtils::SetWeights(*desc_ptr2.get(), tensor)); + EXPECT_EQ(GRAPH_FAILED, OpDescUtils::SetWeights(*desc_ptr2.get(), nullptr)); + + EXPECT_NE(nullptr, OpDescUtils::MutableWeights(desc_ptr)); + EXPECT_NE(nullptr, OpDescUtils::MutableWeights(*desc_ptr.get())); + + EXPECT_EQ(nullptr, OpDescUtils::MutableWeights(null_opdesc)); + + EXPECT_EQ(nullptr, OpDescUtils::CreateOperatorFromOpDesc(desc_ptr)); + + auto tensor_vec = OpDescUtils::GetWeights(n1); + EXPECT_NE(0, tensor_vec.size()); +} diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_operator_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_operator_unittest.cc new file mode 100644 index 00000000..f621b848 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_operator_unittest.cc @@ -0,0 +1,185 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define private public +#define protected public +#include "graph/operator.h" + +#include "graph/def_types.h" +#include "graph/ge_attr_value.h" +#include "graph/ge_tensor.h" +#include "graph/graph.h" +#include "graph/operator_factory_impl.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#undef private +#undef protected + +using namespace std; +using namespace ge; + +class UtestGeOperator : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + string vec2str(vector &vec) { + string str((char *)vec.data(), vec.size()); + return str; + } +}; + +TEST_F(UtestGeOperator, quant) { + Operator op("quant"); + + UsrQuantizeFactorParams q1; + q1.quantize_algo = USR_HALF_OFFSET_ALGO; + q1.scale_type = USR_SCALAR_SCALE; + + q1.quantize_param.scale_mode = USR_SQRT_MODE; + string s1 = "value1"; + q1.quantize_param.set_scale_value(s1.data(), s1.size()); + q1.quantize_param.scale_offset = 5; + string s2 = "value2"; + q1.quantize_param.set_offset_data_value(s2.data(), s2.size()); + q1.quantize_param.offset_data_offset = 6; + string s3 = "value3"; + q1.quantize_param.set_offset_weight_value(s3.data(), s3.size()); + q1.quantize_param.offset_weight_offset = 7; + string s4 = "value4"; + q1.quantize_param.set_offset_pad_value(s4.data(), s4.size()); + q1.quantize_param.offset_pad_offset = 8; + + q1.dequantize_param.scale_mode = USR_SQRT_MODE; + q1.dequantize_param.set_scale_value(s1.data(), s1.size()); + q1.dequantize_param.scale_offset = 15; + q1.dequantize_param.set_offset_data_value(s2.data(), s2.size()); + q1.dequantize_param.offset_data_offset = 16; + q1.dequantize_param.set_offset_weight_value(s3.data(), s3.size()); + q1.dequantize_param.offset_weight_offset = 17; + q1.dequantize_param.set_offset_pad_value(s4.data(), s4.size()); + q1.dequantize_param.offset_pad_offset = 18; + + q1.requantize_param.scale_mode = USR_SQRT_MODE; + q1.requantize_param.set_scale_value(s1.data(), s1.size()); + q1.requantize_param.scale_offset = 25; + q1.requantize_param.set_offset_data_value(s2.data(), s2.size()); + q1.requantize_param.offset_data_offset = 26; + q1.requantize_param.set_offset_weight_value(s3.data(), s3.size()); + q1.requantize_param.offset_weight_offset = 27; + q1.requantize_param.set_offset_pad_value(s4.data(), s4.size()); + q1.requantize_param.offset_pad_offset = 28; + + q1.quantizecalc_param.set_offsetw(s1.data(), s1.size()); + q1.quantizecalc_param.set_offsetd(s2.data(), s2.size()); + q1.quantizecalc_param.set_scalereq(s3.data(), s3.size()); + q1.quantizecalc_param.set_offsetdnext(s4.data(), s4.size()); + q1.quantizecalc_param.offsetw_offset = 34; + q1.quantizecalc_param.offsetd_offset = 35; + q1.quantizecalc_param.scaledreq_offset = 36; + q1.quantizecalc_param.offsetdnext_offset = 37; + + op.SetAttr("quantize_factor", q1); + UsrQuantizeFactorParams q2; + op.GetAttr("quantize_factor", q2); + + EXPECT_EQ(q2.quantize_algo, USR_HALF_OFFSET_ALGO); + EXPECT_EQ(q2.scale_type, USR_SCALAR_SCALE); + + EXPECT_EQ(q2.quantize_param.scale_mode, USR_SQRT_MODE); + EXPECT_EQ(vec2str(q2.quantize_param.scale_value), s1); + + EXPECT_EQ(q2.quantize_param.scale_offset, 5); + EXPECT_EQ(vec2str(q2.quantize_param.offset_data_value), s2); + EXPECT_EQ(q2.quantize_param.offset_data_offset, 6); + EXPECT_EQ(vec2str(q2.quantize_param.offset_weight_value), s3); + EXPECT_EQ(q2.quantize_param.offset_weight_offset, 7); + EXPECT_EQ(vec2str(q2.quantize_param.offset_pad_value), s4); + EXPECT_EQ(q2.quantize_param.offset_pad_offset, 8); + + EXPECT_EQ(q2.dequantize_param.scale_mode, USR_SQRT_MODE); + EXPECT_EQ(vec2str(q2.dequantize_param.scale_value), s1); + EXPECT_EQ(q2.dequantize_param.scale_offset, 15); + EXPECT_EQ(vec2str(q2.dequantize_param.offset_data_value), s2); + EXPECT_EQ(q2.dequantize_param.offset_data_offset, 16); + EXPECT_EQ(vec2str(q2.dequantize_param.offset_weight_value), s3); + EXPECT_EQ(q2.dequantize_param.offset_weight_offset, 17); + EXPECT_EQ(vec2str(q2.dequantize_param.offset_pad_value), s4); + EXPECT_EQ(q2.dequantize_param.offset_pad_offset, 18); + + EXPECT_EQ(q2.requantize_param.scale_mode, USR_SQRT_MODE); + EXPECT_EQ(vec2str(q2.requantize_param.scale_value), s1); + EXPECT_EQ(q2.requantize_param.scale_offset, 25); + EXPECT_EQ(vec2str(q2.requantize_param.offset_data_value), s2); + EXPECT_EQ(q2.requantize_param.offset_data_offset, 26); + EXPECT_EQ(vec2str(q2.requantize_param.offset_weight_value), s3); + EXPECT_EQ(q2.requantize_param.offset_weight_offset, 27); + EXPECT_EQ(vec2str(q2.requantize_param.offset_pad_value), s4); + EXPECT_EQ(q2.requantize_param.offset_pad_offset, 28); + + EXPECT_EQ(vec2str(q2.quantizecalc_param.offsetw), s1); + EXPECT_EQ(vec2str(q2.quantizecalc_param.offsetd), s2); + EXPECT_EQ(vec2str(q2.quantizecalc_param.scalereq), s3); + EXPECT_EQ(vec2str(q2.quantizecalc_param.offsetdnext), s4); + EXPECT_EQ(q2.quantizecalc_param.offsetw_offset, 34); + EXPECT_EQ(q2.quantizecalc_param.offsetd_offset, 35); + EXPECT_EQ(q2.quantizecalc_param.scaledreq_offset, 36); + EXPECT_EQ(q2.quantizecalc_param.offsetdnext_offset, 37); + + EXPECT_EQ(QuantizeFactorHasData(q2.quantize_param), true); + EXPECT_EQ(QuantizeFactorHasData(q2.dequantize_param), true); + EXPECT_EQ(QuantizeFactorHasData(q2.requantize_param), true); + EXPECT_EQ(QuantizeFactorHasData(q2.quantizecalc_param), true); +} + +TEST_F(UtestGeOperator, try_get_input_desc) { + Operator data("data0"); + + TensorDesc td; + graphStatus ret = data.TryGetInputDesc("const", td); + EXPECT_EQ(ret, GRAPH_FAILED); +} + +TEST_F(UtestGeOperator, get_dynamic_input_num) { + Operator const_node("constNode"); + + (void)const_node.DynamicInputRegister("data", 2, 1); + int num = const_node.GetDynamicInputNum("data"); + EXPECT_EQ(num, 2); +} + +TEST_F(UtestGeOperator, infer_format_func_register) { + Operator add("add"); + std::function func = nullptr; + add.InferFormatFuncRegister(func); +} + +graphStatus TestFunc(Operator &op) { return 0; } +TEST_F(UtestGeOperator, get_infer_format_func_register) { + (void)OperatorFactoryImpl::GetInferFormatFunc("add"); + std::function func = TestFunc; + OperatorFactoryImpl::RegisterInferFormatFunc("add", TestFunc); + (void)OperatorFactoryImpl::GetInferFormatFunc("add"); +} + +TEST_F(UtestGeOperator, get_attr_names_and_types) { + Operator attr("attr"); + (void)attr.GetAllAttrNamesAndTypes(); +} \ No newline at end of file diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_opsproto_manager_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_opsproto_manager_unittest.cc new file mode 100644 index 00000000..a4f36e39 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_opsproto_manager_unittest.cc @@ -0,0 +1,65 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include +#include "graph/opsproto_manager.h" +#undef protected +#undef private + +using namespace ge; +using namespace testing; +using namespace std; + +class UtestOpsprotoManager : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + public: +}; + +TEST_F(UtestOpsprotoManager, initialize_failure) { + OpsProtoManager *manager = OpsProtoManager::Instance(); + std::map options; + options["a"] = "a"; + bool ret = manager->Initialize(options); + EXPECT_EQ(ret, false); + + options["ge.opsProtoLibPath"] = ""; + ret = manager->Initialize(options); + EXPECT_EQ(ret, true); + + options["ge.opsProtoLibPath"] = "path1:path2"; + ret = manager->Initialize(options); + EXPECT_EQ(ret, true); + + options["ge.opsProtoLibPath"] = "/usr/local/HiAI/path1.so:$ASCEND_HOME/path2"; + EXPECT_EQ(ret, true); + + mkdir("test_ops_proto_manager", S_IRUSR); + + options["ge.opsProtoLibPath"] = "test_ops_proto_manager"; + ret = manager->Initialize(options); + EXPECT_EQ(ret, true); + rmdir("test_proto_manager"); + + manager->Finalize(); +} \ No newline at end of file diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_out_tensor_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_out_tensor_unittest.cc new file mode 100644 index 00000000..cb2a19ed --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_out_tensor_unittest.cc @@ -0,0 +1,351 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "graph/ge_attr_value.h" +#include "graph/tensor.h" +#include "graph/utils/tensor_adapter.h" +#include "graph/utils/tensor_utils.h" + +using namespace std; +using namespace ge; + +class UtestGeOutTensor : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGeOutTensor, shape) { + Shape a; + EXPECT_EQ(a.GetDim(0), 0); + EXPECT_EQ(a.GetShapeSize(), 0); + EXPECT_EQ(a.SetDim(0, 0), GRAPH_FAILED); + + vector vec({1, 2, 3, 4}); + Shape b(vec); + Shape c({1, 2, 3, 4}); + EXPECT_EQ(c.GetDimNum(), 4); + EXPECT_EQ(c.GetDim(2), 3); + EXPECT_EQ(c.GetDim(5), 0); + EXPECT_EQ(c.SetDim(10, 0), GRAPH_FAILED); + + EXPECT_EQ(c.SetDim(2, 2), GRAPH_SUCCESS); + EXPECT_EQ(c.GetDim(2), 2); + vector vec1 = c.GetDims(); + EXPECT_EQ(c.GetDim(0), vec1[0]); + EXPECT_EQ(c.GetDim(1), vec1[1]); + EXPECT_EQ(c.GetDim(2), vec1[2]); + EXPECT_EQ(c.GetDim(3), vec1[3]); + + EXPECT_EQ(c.GetShapeSize(), 16); +} + +TEST_F(UtestGeOutTensor, tensor_desc) { + TensorDesc a; + Shape s({1, 2, 3, 4}); + TensorDesc b(s); + Shape s1 = b.GetShape(); + EXPECT_EQ(s1.GetDim(0), s.GetDim(0)); + auto shape_m1 = b.GetShape(); + shape_m1.SetDim(0, 2); + b.SetShape(shape_m1); + EXPECT_EQ(b.GetShape().GetDim(0), 2); + Shape s2({3, 2, 3, 4}); + b.SetShape(s2); + EXPECT_EQ(b.GetShape().GetDim(0), 3); + + EXPECT_EQ(b.GetFormat(), FORMAT_NCHW); + b.SetFormat(FORMAT_RESERVED); + EXPECT_EQ(b.GetFormat(), FORMAT_RESERVED); + + EXPECT_EQ(b.GetDataType(), DT_FLOAT); + b.SetDataType(DT_INT8); + EXPECT_EQ(b.GetDataType(), DT_INT8); + + TensorDesc c; + c.Update(Shape({1}), FORMAT_NCHW); + c.Update(s, FORMAT_NCHW); + c.SetSize(1); + + TensorDesc d; + d = c; // Clone; + EXPECT_EQ(d.GetSize(), 1); + d.SetSize(12); + EXPECT_EQ(d.GetSize(), 12); + + TensorDesc e = c; + EXPECT_EQ(e.GetSize(), 1); + + TensorDesc f = c; + EXPECT_EQ(f.GetSize(), 1); +} + +TEST_F(UtestGeOutTensor, tensor) { + Shape s({1, 2, 3, 4}); + TensorDesc tensor_desc(s); + std::vector data({1, 2, 3, 4}); + Tensor a; + Tensor b(tensor_desc); + Tensor c(tensor_desc, data); + Tensor d(tensor_desc, data.data(), data.size()); + + ASSERT_EQ(a.GetSize(), 0); + ASSERT_EQ(b.GetSize(), 0); + ASSERT_EQ(c.GetSize(), data.size()); + ASSERT_EQ(d.GetSize(), data.size()); + EXPECT_EQ(c.GetData()[0], uint8_t(1)); + EXPECT_EQ(c.GetData()[1], uint8_t(2)); + EXPECT_EQ(d.GetData()[2], uint8_t(3)); + EXPECT_EQ(d.GetData()[3], uint8_t(4)); + EXPECT_EQ(d.GetTensorDesc().GetFormat(), FORMAT_NCHW); + EXPECT_EQ(b.GetTensorDesc().GetShape().GetDim(0), 1); + EXPECT_EQ(c.GetTensorDesc().GetShape().GetDim(1), 2); + EXPECT_EQ(d.GetTensorDesc().GetShape().GetDim(2), 3); + + Shape s1 = b.GetTensorDesc().GetShape(); + EXPECT_EQ(s1.GetDim(0), 1); + EXPECT_EQ(b.GetTensorDesc().GetDataType(), DT_FLOAT); + EXPECT_EQ(b.GetTensorDesc().GetFormat(), FORMAT_NCHW); + + auto tensor_desc_m1 = b.GetTensorDesc(); + tensor_desc_m1.SetDataType(DT_INT8); + b.SetTensorDesc(tensor_desc_m1); + EXPECT_EQ(b.GetTensorDesc().GetDataType(), DT_INT8); + EXPECT_EQ(b.GetTensorDesc().GetFormat(), FORMAT_NCHW); + + EXPECT_EQ(b.GetTensorDesc().GetSize(), 0); + auto tensor_desc_m2 = b.GetTensorDesc(); + tensor_desc_m2.SetFormat(FORMAT_NC1HWC0); + tensor_desc_m2.SetSize(112); + b.SetTensorDesc(tensor_desc_m2); + EXPECT_EQ(b.GetTensorDesc().GetDataType(), DT_INT8); + EXPECT_EQ(b.GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(b.GetTensorDesc().GetSize(), 112); + + auto data1 = c.GetData(); + c.SetData(data); + c.SetData(data.data(), data.size()); + EXPECT_EQ(c.GetSize(), data.size()); + EXPECT_EQ(c.GetData()[0], uint8_t(1)); + EXPECT_EQ(c.GetData()[1], uint8_t(2)); + EXPECT_EQ(c.GetData()[2], uint8_t(3)); + EXPECT_EQ(c.GetData()[3], uint8_t(4)); + + Tensor e(std::move(tensor_desc), std::move(data)); + EXPECT_EQ(e.GetSize(), data.size()); + EXPECT_EQ(e.GetData()[2], uint8_t(3)); + + Tensor f = e.Clone(); + e.GetData()[2] = 5; + EXPECT_EQ(e.GetData()[2], uint8_t(5)); + EXPECT_EQ(f.GetSize(), data.size()); + EXPECT_EQ(f.GetData()[2], uint8_t(3)); +} + +TEST_F(UtestGeOutTensor, test_shape_copy) { + Shape shape; + EXPECT_EQ(shape.GetDimNum(), 0); + + Shape shape2 = shape; + EXPECT_EQ(shape2.GetDimNum(), 0); + + Shape shape3({1, 2, 3}); + shape2 = shape3; + EXPECT_EQ(shape2.GetDimNum(), 3); + EXPECT_EQ(shape3.GetDimNum(), 3); +} + +TEST_F(UtestGeOutTensor, test_tensor_adapter_as_ge_tensor) { + TensorDesc tensor_desc(Shape({2, 3, 4, 5}), FORMAT_NC1HWC0, DT_FLOAT16); + tensor_desc.SetSize(120); + vector data = {3, 4, 5, 6, 7, 8}; + Tensor tensor(tensor_desc, data); + + GeTensor ge_tensor = TensorAdapter::AsGeTensor(tensor); + EXPECT_EQ(ge_tensor.GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(ge_tensor.GetTensorDesc().GetDataType(), DT_FLOAT16); + uint32_t size = 0; + TensorUtils::GetSize(ge_tensor.GetTensorDesc(), size); + EXPECT_EQ(size, 120); + auto dims = ge_tensor.GetTensorDesc().GetShape().GetDims(); + ASSERT_EQ(dims.size(), 4); + EXPECT_EQ(dims[0], 2); + EXPECT_EQ(dims[3], 5); + EXPECT_EQ(ge_tensor.GetData().GetSize(), 6); + EXPECT_EQ(ge_tensor.GetData().GetData()[0], 3); + EXPECT_EQ(ge_tensor.GetData().GetData()[5], 8); + + auto ge_tensor_ptr = TensorAdapter::AsGeTensorPtr(tensor); + EXPECT_EQ(ge_tensor_ptr->GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(ge_tensor_ptr->GetTensorDesc().GetDataType(), DT_FLOAT16); + + const Tensor tensor2 = tensor; + const GeTensor ge_tensor2 = TensorAdapter::AsGeTensor(tensor2); + EXPECT_EQ(ge_tensor2.GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(ge_tensor2.GetTensorDesc().GetDataType(), DT_FLOAT16); + TensorUtils::GetSize(ge_tensor2.GetTensorDesc(), size); + EXPECT_EQ(size, 120); + auto dims2 = ge_tensor2.GetTensorDesc().GetShape().GetDims(); + ASSERT_EQ(dims2.size(), 4); + EXPECT_EQ(dims2[0], 2); + EXPECT_EQ(dims2[3], 5); + EXPECT_EQ(ge_tensor2.GetData().GetSize(), 6); + EXPECT_EQ(ge_tensor2.GetData().GetData()[0], 3); + EXPECT_EQ(ge_tensor2.GetData().GetData()[5], 8); + + auto ge_tensor_ptr2 = TensorAdapter::AsGeTensorPtr(tensor2); + EXPECT_EQ(ge_tensor_ptr2->GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(ge_tensor_ptr2->GetTensorDesc().GetDataType(), DT_FLOAT16); + + // modify format + ge_tensor.MutableTensorDesc().SetFormat(FORMAT_NC1C0HWPAD); + EXPECT_EQ(ge_tensor.GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + EXPECT_EQ(tensor.GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + EXPECT_EQ(ge_tensor2.GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + EXPECT_EQ(tensor2.GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + + EXPECT_EQ(ge_tensor_ptr->GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + EXPECT_EQ(ge_tensor_ptr2->GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + + // modify datatype + tensor_desc.SetDataType(DT_INT32); + tensor.SetTensorDesc(tensor_desc); + EXPECT_EQ(tensor.GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(ge_tensor.GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(tensor2.GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(ge_tensor2.GetTensorDesc().GetDataType(), DT_INT32); + + EXPECT_EQ(ge_tensor_ptr->GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(ge_tensor_ptr2->GetTensorDesc().GetDataType(), DT_INT32); +} + +TEST_F(UtestGeOutTensor, test_tensor_adapter_as_tensor) { + GeTensorDesc ge_tensor_desc(GeShape({2, 3, 4, 5}), FORMAT_NC1HWC0, DT_FLOAT16); + TensorUtils::SetSize(ge_tensor_desc, 120); + vector data = {3, 4, 5, 6, 7, 8}; + GeTensor ge_tensor(ge_tensor_desc, data); + + Tensor tensor = TensorAdapter::AsTensor(ge_tensor); + EXPECT_EQ(tensor.GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(tensor.GetTensorDesc().GetDataType(), DT_FLOAT16); + EXPECT_EQ(tensor.GetTensorDesc().GetSize(), 120); + + auto dims = tensor.GetTensorDesc().GetShape().GetDims(); + ASSERT_EQ(dims.size(), 4); + EXPECT_EQ(dims[0], 2); + EXPECT_EQ(dims[3], 5); + EXPECT_EQ(tensor.GetSize(), 6); + EXPECT_EQ(tensor.GetData()[0], 3); + EXPECT_EQ(tensor.GetData()[5], 8); + + const GeTensor ge_tensor2 = ge_tensor; + const Tensor tensor2 = TensorAdapter::AsTensor(ge_tensor2); + EXPECT_EQ(tensor2.GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(tensor2.GetTensorDesc().GetDataType(), DT_FLOAT16); + EXPECT_EQ(tensor2.GetTensorDesc().GetSize(), 120); + auto dims2 = tensor2.GetTensorDesc().GetShape().GetDims(); + ASSERT_EQ(dims2.size(), 4); + EXPECT_EQ(dims2[0], 2); + EXPECT_EQ(dims2[3], 5); + EXPECT_EQ(tensor2.GetSize(), 6); + EXPECT_EQ(tensor2.GetData()[0], 3); + EXPECT_EQ(tensor2.GetData()[5], 8); + + // modify format + ge_tensor.MutableTensorDesc().SetFormat(FORMAT_NC1C0HWPAD); + EXPECT_EQ(ge_tensor.GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + EXPECT_EQ(tensor.GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + EXPECT_EQ(ge_tensor2.GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + EXPECT_EQ(tensor2.GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + + // modify datatype + auto tensor_desc = TensorAdapter::GeTensorDesc2TensorDesc(ge_tensor_desc); + tensor_desc.SetDataType(DT_INT32); + tensor.SetTensorDesc(tensor_desc); + EXPECT_EQ(tensor.GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(ge_tensor.GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(tensor2.GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(ge_tensor2.GetTensorDesc().GetDataType(), DT_INT32); +} + +TEST_F(UtestGeOutTensor, test_tensor_adapter_transfer2_ge_tensor) { + TensorDesc tensor_desc(Shape({2, 3, 4, 5}), FORMAT_NC1HWC0, DT_FLOAT16); + tensor_desc.SetSize(120); + vector data = {3, 4, 5, 6, 7, 8}; + Tensor tensor(tensor_desc, data); + + auto get_tensor_ptr = TensorAdapter::Tensor2GeTensor(tensor); + + EXPECT_EQ(get_tensor_ptr->GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(get_tensor_ptr->GetTensorDesc().GetDataType(), DT_FLOAT16); + uint32_t size = 0; + TensorUtils::GetSize(get_tensor_ptr->GetTensorDesc(), size); + EXPECT_EQ(size, 120); + auto dims = get_tensor_ptr->GetTensorDesc().GetShape().GetDims(); + ASSERT_EQ(dims.size(), 4); + EXPECT_EQ(dims[0], 2); + EXPECT_EQ(dims[3], 5); + EXPECT_EQ(get_tensor_ptr->GetData().GetSize(), 6); + EXPECT_EQ(get_tensor_ptr->GetData().GetData()[0], 3); + EXPECT_EQ(get_tensor_ptr->GetData().GetData()[5], 8); + + // modify format + get_tensor_ptr->MutableTensorDesc().SetFormat(FORMAT_NC1C0HWPAD); + EXPECT_EQ(get_tensor_ptr->GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + EXPECT_EQ(tensor.GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); // copy, not change + + // modify datatype + tensor_desc.SetDataType(DT_INT32); + tensor.SetTensorDesc(tensor_desc); + EXPECT_EQ(tensor.GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(get_tensor_ptr->GetTensorDesc().GetDataType(), DT_FLOAT16); // copy, not change +} + +TEST_F(UtestGeOutTensor, test_tensor_adapter_transfer2_tensor) { + GeTensorDesc ge_tensor_desc(GeShape({2, 3, 4, 5}), FORMAT_NC1HWC0, DT_FLOAT16); + TensorUtils::SetSize(ge_tensor_desc, 120); + vector data = {3, 4, 5, 6, 7, 8}; + GeTensor ge_tensor(ge_tensor_desc, data); + + Tensor tensor = TensorAdapter::GeTensor2Tensor(std::make_shared(ge_tensor)); + EXPECT_EQ(tensor.GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); + EXPECT_EQ(tensor.GetTensorDesc().GetDataType(), DT_FLOAT16); + EXPECT_EQ(tensor.GetTensorDesc().GetSize(), 120); + + auto dims = tensor.GetTensorDesc().GetShape().GetDims(); + ASSERT_EQ(dims.size(), 4); + EXPECT_EQ(dims[0], 2); + EXPECT_EQ(dims[3], 5); + EXPECT_EQ(tensor.GetSize(), 6); + EXPECT_EQ(tensor.GetData()[0], 3); + EXPECT_EQ(tensor.GetData()[5], 8); + + // modify format + ge_tensor.MutableTensorDesc().SetFormat(FORMAT_NC1C0HWPAD); + EXPECT_EQ(ge_tensor.GetTensorDesc().GetFormat(), FORMAT_NC1C0HWPAD); + EXPECT_EQ(tensor.GetTensorDesc().GetFormat(), FORMAT_NC1HWC0); // copy, not change + + // modify datatype + auto tensor_desc = TensorAdapter::GeTensorDesc2TensorDesc(ge_tensor_desc); + tensor_desc.SetDataType(DT_INT32); + tensor.SetTensorDesc(tensor_desc); + EXPECT_EQ(tensor.GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(ge_tensor.GetTensorDesc().GetDataType(), DT_FLOAT16); // copy, not change +} diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_reg_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_reg_unittest.cc new file mode 100644 index 00000000..48e6a069 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_reg_unittest.cc @@ -0,0 +1,142 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define protected public +#define private public +#include "graph/graph.h" +#include "graph/model.h" +#include "graph/utils/tensor_utils.h" +#include "ops_stub.h" +#undef protected +#undef private + +using namespace std; +using namespace ge; + +class UtestGeOperatorReg : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGeOperatorReg, ge_test_operator_reg_test) { + TensorDesc desc(ge::Shape({1, 3, 224, 224})); + uint32_t size = desc.GetShape().GetShapeSize(); + desc.SetSize(size); + auto data = op::Data("Data").set_attr_index(0); + data.update_input_desc_data(desc); + data.update_output_desc_out(desc); + + auto flatten = op::Flatten("Flatten").set_input_x(data, data.name_out_out()); + + std::vector inputs{data}; + std::vector outputs{flatten}; + std::vector targets{flatten}; + + Graph graph("test_graph"); + graph.SetInputs(inputs).SetOutputs(outputs).SetTargets(targets); + EXPECT_EQ(true, graph.IsValid()); + + Graph graph1("test_graph1"); + auto data1 = op::Data("Data1").set_attr_index(0); + data1.update_input_desc_data(desc); + data1.update_output_desc_out(desc); + std::vector targets1{data1}; + graph1.SetInputs(inputs).SetTargets(targets1); +} + +TEST_F(UtestGeOperatorReg, test_set_outputs) { + TensorDesc desc(ge::Shape({1, 3, 224, 224})); + uint32_t size = desc.GetShape().GetShapeSize(); + desc.SetSize(size); + auto data = op::Data("Data").set_attr_index(0); + data.update_input_desc_data(desc); + data.update_output_desc_out(desc); + + auto flatten = op::Flatten("Flatten").set_input_x(data, data.name_out_out()); + std::vector inputs{data}; + std::vector targets{flatten}; + std::vector> outputs{{flatten, "Flattern"}}; + + Graph graph("test_graph"); + graph.SetInputs(inputs).SetOutputs(outputs).SetTargets(targets); + EXPECT_EQ(true, graph.IsValid()); +} + +TEST_F(UtestGeOperatorReg, test_setoutputs_node_not_exist) { + TensorDesc desc(ge::Shape({1, 3, 224, 224})); + uint32_t size = desc.GetShape().GetShapeSize(); + desc.SetSize(size); + auto data0 = op::Data("Data0").set_attr_index(0); + data0.update_input_desc_data(desc); + data0.update_output_desc_out(desc); + + auto data1 = op::Data("Data1").set_attr_index(0); + data1.update_input_desc_data(desc); + data1.update_output_desc_out(desc); + + std::vector inputs{data0}; + std::vector outputs{data1}; + + Graph graph("test_graph"); + graph.SetInputs(inputs).SetOutputs(outputs); +} + +bool buildGraph1(Graph &graph) { + auto data = op::Data("data").set_attr_index(0); + graphStatus ret = graph.AddOp(data); + EXPECT_EQ(GRAPH_SUCCESS, ret); + + auto flatten = op::Flatten("flatten").set_input_x(data); + ret = graph.AddOp(flatten); + EXPECT_EQ(GRAPH_SUCCESS, ret); + + return true; +} + +TEST_F(UtestGeOperatorReg, test_add_op) { + Graph graph("simpleGraph"); + bool ret_graph = buildGraph1(graph); + EXPECT_EQ(ret_graph, true); + + std::vector op_name; + graphStatus ret = graph.GetAllOpName(op_name); + EXPECT_EQ(GRAPH_SUCCESS, ret); + for (unsigned int i = 0; i < op_name.size(); i++) { + std::cout << "opname: " << op_name[i] << std::endl; + } + EXPECT_EQ(op_name.size(), 2); + + Operator op; + ret = graph.FindOpByName("dat", op); + EXPECT_EQ(ret, GRAPH_FAILED); + ret = graph.FindOpByName("data", op); + EXPECT_EQ(ret, GRAPH_SUCCESS); + ret = graph.FindOpByName("flatten", op); + EXPECT_EQ(ret, GRAPH_SUCCESS); + Operator data_op; + (void)graph.FindOpByName("data", data_op); + Operator f_op; + (void)graph.FindOpByName("flatten", f_op); + data_op.GetOutputsSize(); + std::vector inputs{data_op}; + std::vector outputs{f_op}; + graph.SetInputs(inputs).SetOutputs(outputs); +} diff --git a/tests/ut/common/graph/testcase/ge_graph/ge_tensor_unittest.cc b/tests/ut/common/graph/testcase/ge_graph/ge_tensor_unittest.cc new file mode 100644 index 00000000..a4af8399 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/ge_tensor_unittest.cc @@ -0,0 +1,378 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#define private public +#define protected public +#include "graph/ge_tensor.h" + +#include "graph/ge_attr_value.h" +#include "graph/tensor.h" +#include "graph/utils/tensor_utils.h" +#undef private +#undef protected + +using namespace std; +using namespace ge; + +class UtestGeTensor : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGeTensor, origin_shape_format) { + GeTensorDesc a; + GeShape shape({1, 2, 3, 4}); + a.SetOriginShape(shape); + a.SetOriginFormat(FORMAT_NCHW); + EXPECT_EQ(a.GetOriginShape().GetShapeSize(), 24); + EXPECT_EQ(a.GetOriginFormat(), FORMAT_NCHW); +} + +TEST_F(UtestGeTensor, get_shape_size) { + vector vec2{-1, 1, 2, 4}; + Shape shape2(vec2); + shape2.GetShapeSize(); + + vector vec3{-1, 2, 4, INT64_MAX}; + Shape shape3(vec3); + shape3.GetShapeSize(); + + vector vec4{-1, 2, 4, INT64_MAX}; + Shape shape4(vec4); + shape4.GetShapeSize(); + + vector vec1{1, 2, 3, 4}; + Shape shape1(vec1); + EXPECT_EQ(shape1.GetShapeSize(), 24); +} + +TEST_F(UtestGeTensor, shape) { + GeShape a; + EXPECT_EQ(a.GetDim(0), 0); + EXPECT_EQ(a.GetShapeSize(), 0); + EXPECT_EQ(a.SetDim(0, 0), GRAPH_FAILED); + + vector vec({1, 2, 3, 4}); + GeShape b(vec); + GeShape c({1, 2, 3, 4}); + EXPECT_EQ(c.GetDimNum(), 4); + EXPECT_EQ(c.GetDim(2), 3); + EXPECT_EQ(c.GetDim(5), 0); + EXPECT_EQ(c.SetDim(10, 0), GRAPH_FAILED); + + EXPECT_EQ(c.SetDim(2, 2), GRAPH_SUCCESS); + EXPECT_EQ(c.GetDim(2), 2); + vector vec1 = c.GetDims(); + EXPECT_EQ(c.GetDim(0), vec1[0]); + EXPECT_EQ(c.GetDim(1), vec1[1]); + EXPECT_EQ(c.GetDim(2), vec1[2]); + EXPECT_EQ(c.GetDim(3), vec1[3]); + + EXPECT_EQ(c.GetShapeSize(), 16); +} + +TEST_F(UtestGeTensor, ge_shape_to_string1) { + GeShape shape1({1, 2, 3, 4}); + EXPECT_EQ(shape1.ToString(), "1,2,3,4"); + GeShape shape2; + EXPECT_EQ(shape2.ToString(), ""); +} + +TEST_F(UtestGeTensor, tensor_desc) { + GeTensorDesc a; + GeShape s({1, 2, 3, 4}); + GeTensorDesc b(s, FORMAT_NCHW); + GeShape s1 = b.GetShape(); + EXPECT_EQ(s1.GetDim(0), s.GetDim(0)); + b.MutableShape().SetDim(0, 2); + EXPECT_EQ(b.GetShape().GetDim(0), 2); + GeShape s2({3, 2, 3, 4}); + b.SetShape(s2); + EXPECT_EQ(b.GetShape().GetDim(0), 3); + + EXPECT_EQ(b.GetFormat(), FORMAT_NCHW); + b.SetFormat(FORMAT_RESERVED); + EXPECT_EQ(b.GetFormat(), FORMAT_RESERVED); + + EXPECT_EQ(b.GetDataType(), DT_FLOAT); + b.SetDataType(DT_INT8); + EXPECT_EQ(b.GetDataType(), DT_INT8); + + GeTensorDesc c; + c.Update(GeShape({1}), FORMAT_NCHW); + c.Update(s, FORMAT_NCHW); + uint32_t size1 = 1; + TensorUtils::SetSize(c, size1); + GeTensorDesc d; + d = c.Clone(); + GeTensorDesc e = c; + uint32_t size2 = 0; + EXPECT_EQ(TensorUtils::GetSize(e, size2), GRAPH_SUCCESS); + EXPECT_EQ(size2, 1); + + GeTensorDesc f = c; + size2 = 0; + EXPECT_EQ(TensorUtils::GetSize(f, size2), GRAPH_SUCCESS); + EXPECT_EQ(size2, 1); + EXPECT_EQ(c.IsValid(), GRAPH_SUCCESS); + c.Update(GeShape(), FORMAT_RESERVED, DT_UNDEFINED); + EXPECT_EQ(c.IsValid(), GRAPH_PARAM_INVALID); +} + +TEST_F(UtestGeTensor, tensor) { + GeShape s({1, 2, 3, 4}); + GeTensorDesc tensor_desc(s); + std::vector data({1, 2, 3, 4}); + GeTensor a; + GeTensor b(tensor_desc); + GeTensor c(tensor_desc, data); + GeTensor d(tensor_desc, data.data(), data.size()); + + GeShape s1 = b.GetTensorDesc().GetShape(); + EXPECT_EQ(s1.GetDim(0), 1); + EXPECT_EQ(b.GetTensorDesc().GetDataType(), DT_FLOAT); + b.MutableTensorDesc().SetDataType(DT_INT8); + EXPECT_EQ(b.GetTensorDesc().GetDataType(), DT_INT8); + b.SetTensorDesc(tensor_desc); + + auto data1 = c.GetData(); + c.SetData(data); + c.SetData(data.data(), data.size()); + EXPECT_EQ(c.GetData()[0], uint8_t(1)); + EXPECT_EQ(c.GetData()[1], uint8_t(2)); + EXPECT_EQ(c.MutableData().GetData()[2], uint8_t(3)); + EXPECT_EQ(c.MutableData().GetData()[3], uint8_t(4)); + + GeTensor e(std::move(tensor_desc), std::move(data)); + EXPECT_EQ(e.GetData().GetSize(), data.size()); + EXPECT_EQ(e.GetData()[2], uint8_t(3)); + + GeTensor f = e.Clone(); + e.MutableData().data()[2] = 5; + EXPECT_EQ(e.GetData().data()[2], uint8_t(5)); + EXPECT_EQ(f.GetData().GetSize(), data.size()); + EXPECT_EQ(f.GetData()[2], uint8_t(3)); +} + +TEST_F(UtestGeTensor, test_shape_copy_move) { + GeShape shape(nullptr, nullptr); + EXPECT_EQ(shape.GetDimNum(), 0); + + GeShape shape2 = shape; + EXPECT_EQ(shape2.GetDimNum(), 0); + + GeShape shape3({1, 2, 3}); + shape2 = shape3; + EXPECT_EQ(shape2.GetDimNum(), 3); + EXPECT_EQ(shape3.GetDimNum(), 3); + + GeShape shape4 = std::move(shape3); + EXPECT_EQ(shape4.GetDimNum(), 3); + EXPECT_EQ(shape3.GetDimNum(), 0); + + GeShape shape5; + EXPECT_EQ(shape5.GetDimNum(), 0); + shape5 = std::move(shape4); + EXPECT_EQ(shape5.GetDimNum(), 3); + EXPECT_EQ(shape4.GetDimNum(), 3); +} + +TEST_F(UtestGeTensor, test_tensor_desc_invalid_null) { + GeTensorDesc tensor_desc(nullptr, nullptr); + EXPECT_EQ(tensor_desc.GetDataType(), DT_UNDEFINED); + EXPECT_EQ(tensor_desc.GetFormat(), FORMAT_RESERVED); + EXPECT_EQ(tensor_desc.MutableShape().shape_def_.GetProtoMsg(), nullptr); + + GeTensorDesc tensor_desc2; + EXPECT_EQ(tensor_desc2.GetDataType(), DT_FLOAT); + EXPECT_EQ(tensor_desc2.GetFormat(), FORMAT_ND); + + tensor_desc2.SetDataType(DT_DUAL_SUB_INT8); + EXPECT_EQ(tensor_desc2.GetDataType(), DT_DUAL_SUB_INT8); + + CompressInfo info; + EXPECT_EQ(TensorUtils::GetCmpsInfo(tensor_desc2, info), GRAPH_FAILED); + TensorUtils::SetCmpsInfo(tensor_desc2, info); + EXPECT_EQ(TensorUtils::GetCmpsInfo(tensor_desc2, info), GRAPH_SUCCESS); + + AllOffsetQuantizeInfo quantize_info; + EXPECT_FALSE(TensorUtils::HasAlloffsetQuantizeInfo(tensor_desc2)); + EXPECT_EQ(TensorUtils::GetAlloffsetQuantizeInfo(tensor_desc2, quantize_info), GRAPH_FAILED); + TensorUtils::SetAlloffsetQuantizeInfo(tensor_desc2, quantize_info); + EXPECT_EQ(TensorUtils::GetAlloffsetQuantizeInfo(tensor_desc2, quantize_info), GRAPH_SUCCESS); + EXPECT_TRUE(TensorUtils::HasAlloffsetQuantizeInfo(tensor_desc2)); + + TensorUtils::SetWeightSize(tensor_desc, 100); + EXPECT_EQ(TensorUtils::GetWeightSize(tensor_desc), 0); +} + +TEST_F(UtestGeTensor, test_tensor_invalid_null) { + ProtoMsgOwner msg_owner; + GeTensor tensor(msg_owner, nullptr); + EXPECT_EQ(tensor.GetData().size(), 0); + EXPECT_EQ(tensor.MutableData().size(), 0); + EXPECT_EQ(tensor.SetData(Buffer(100)), ge::GRAPH_PARAM_INVALID); + + TensorUtils::SetWeightSize(tensor.MutableTensorDesc(), 100); + EXPECT_EQ(TensorUtils::GetWeightSize(tensor), 0); + + auto tensor_ptr = std::make_shared(msg_owner, nullptr); + TensorUtils::SetWeightSize(tensor_ptr->MutableTensorDesc(), 100); + EXPECT_EQ(TensorUtils::GetWeightSize(tensor_ptr), 0); + + GeTensor tensor1 = tensor; + EXPECT_EQ(TensorUtils::GetWeightSize(tensor1), 0); +} + +TEST_F(UtestGeTensor, test_tensor_utils_weight_size) { + GeTensor tensor; + EXPECT_EQ(tensor.GetData().size(), 0); + EXPECT_EQ(tensor.MutableData().size(), 0); + EXPECT_EQ(tensor.SetData(Buffer(100)), GRAPH_SUCCESS); + + TensorUtils::SetWeightSize(tensor.MutableTensorDesc(), 100); + EXPECT_EQ(TensorUtils::GetWeightSize(tensor), 100); + + uint8_t buffer[100]; + EXPECT_TRUE(TensorUtils::GetWeightAddr(tensor, buffer) != nullptr); + + auto tensor_ptr = std::make_shared(); + TensorUtils::SetWeightSize(tensor_ptr->MutableTensorDesc(), 100); + EXPECT_EQ(TensorUtils::GetWeightSize(tensor_ptr), 100); + EXPECT_TRUE(TensorUtils::GetWeightAddr(tensor_ptr, buffer) != nullptr); + + GeTensor tensor1 = tensor; + EXPECT_EQ(TensorUtils::GetWeightSize(tensor1), 100); + + GeTensor tensor2(GeTensorDesc(), Buffer(100)); + EXPECT_EQ(tensor2.GetData().size(), 100); + EXPECT_EQ(tensor2.MutableData().size(), 100); + + GeTensor tensor3; + tensor3 = tensor2; + EXPECT_EQ(tensor3.GetData().size(), 100); + EXPECT_EQ(tensor3.MutableData().size(), 100); + + TensorUtils::SetDataOffset(tensor3.MutableTensorDesc(), 20); + EXPECT_EQ(TensorUtils::GetWeightAddr(tensor3, buffer), buffer + 20); +} + +TEST_F(UtestGeTensor, test_tensor_valid) { + // Tensor(const TensorDesc &tensor_desc, const std::vector &data) + Shape shape({1, 1, 1}); + TensorDesc tensor_desc(shape); + std::vector data({1, 2, 3, 4}); + Tensor tensor1(tensor_desc, data); + EXPECT_EQ(tensor1.IsValid(), GRAPH_SUCCESS); + + // Tensor(const TensorDesc &tensor_desc, const uint8_t *data, size_t size) + TensorDesc tensor_desc2(Shape({3, 3, 3}), FORMAT_NCHW, DT_FLOAT); + uint32_t size2 = 3 * 3 * 3 * 4; + uint8_t data2[3 * 3 * 3 * 4] = {0}; + Tensor tensor2(tensor_desc2, data2, size2); + EXPECT_EQ(tensor2.IsValid(), GRAPH_SUCCESS); + + // Tensor(TensorDesc &&tensor_desc, std::vector &&data) + Tensor tensor3(std::move(tensor_desc), std::move(data)); + EXPECT_EQ(tensor3.IsValid(), GRAPH_SUCCESS); + + // DT_UNDEFINED + TensorDesc tensor_desc3(Shape({3, 3, 3}), FORMAT_NCHW, DT_UNDEFINED); + Tensor tensor4(tensor_desc3, data2, size2); + EXPECT_EQ(tensor4.IsValid(), GRAPH_SUCCESS); + + // Tensor() + Tensor tensor5; + EXPECT_EQ(tensor5.IsValid(), GRAPH_SUCCESS); + tensor5.SetTensorDesc(tensor_desc); + tensor5.SetData(data); + EXPECT_EQ(tensor5.IsValid(), GRAPH_SUCCESS); + + // scalar 1 + uint8_t data6[4] = {1, 2, 3, 4}; + Tensor tensor6; + tensor6.SetData(data6, 4); + EXPECT_EQ(tensor6.IsValid(), GRAPH_SUCCESS); + + // scalar 2 + TensorDesc tensor_desc7(Shape(), FORMAT_NCHW, DT_FLOAT); + float data7 = 2; + Tensor tensor7(tensor_desc7, (uint8_t *)&data7, sizeof(float)); + EXPECT_EQ(tensor7.IsValid(), GRAPH_SUCCESS); + + // string scalar + TensorDesc tensor_desc8(Shape(), FORMAT_NCHW, DT_STRING); + Tensor tensor8; + tensor8.SetTensorDesc(tensor_desc8); + string data8 = "A handsome boy write this code"; + EXPECT_EQ(tensor8.SetData(data8), GRAPH_SUCCESS); + EXPECT_EQ(tensor8.IsValid(), GRAPH_SUCCESS); + + // string vector + TensorDesc tensor_desc9(Shape({2}), FORMAT_NCHW, DT_STRING); + vector data9 = {"A handsome boy write this code", "very handsome"}; + Tensor tensor9(tensor_desc9); + EXPECT_EQ(tensor9.SetData(data9), GRAPH_SUCCESS); + EXPECT_EQ(tensor9.IsValid(), GRAPH_SUCCESS); + + vector empty_data9; + EXPECT_EQ(tensor9.SetData(empty_data9), GRAPH_FAILED); +} + +TEST_F(UtestGeTensor, test_tensor_invalid) { + // Tensor(const TensorDesc &tensor_desc, const std::vector &data) + Shape shape({1, 1, 1}); + TensorDesc tensor_desc(shape); + std::vector data({1, 2, 3, 4, 5}); + Tensor tensor1(tensor_desc, data); + EXPECT_EQ(tensor1.IsValid(), GRAPH_SUCCESS); + + // Tensor(const TensorDesc &tensor_desc, const uint8_t *data, size_t size) + TensorDesc tensor_desc2(Shape({3, 3, 3}), FORMAT_NCHW, DT_FLOAT); + uint32_t size2 = 3 * 3 * 3; + uint8_t data2[3 * 3 * 3] = {0}; + Tensor tensor2(tensor_desc2, data2, size2); + EXPECT_EQ(tensor2.IsValid(), GRAPH_SUCCESS); + + // Tensor(TensorDesc &&tensor_desc, std::vector &&data) + Tensor tensor3(std::move(tensor_desc), std::move(data)); + EXPECT_EQ(tensor3.IsValid(), GRAPH_SUCCESS); + + // Tensor() + Tensor tensor4; + tensor4.SetTensorDesc(tensor_desc); + EXPECT_EQ(tensor4.IsValid(), GRAPH_SUCCESS); + tensor4.SetData(data); + EXPECT_EQ(tensor4.IsValid(), GRAPH_SUCCESS); + + Tensor tensor5; + tensor5.SetData(data); + EXPECT_EQ(tensor5.IsValid(), GRAPH_SUCCESS); + tensor5.SetTensorDesc(tensor_desc); + EXPECT_EQ(tensor5.IsValid(), GRAPH_SUCCESS); + + // scalar + TensorDesc tensor_desc6(Shape(), FORMAT_NCHW, DT_FLOAT); + uint8_t data6 = 2; + Tensor tensor6(tensor_desc6, &data6, 1); + EXPECT_EQ(tensor6.IsValid(), GRAPH_SUCCESS); +} diff --git a/tests/ut/common/graph/testcase/ge_graph/graph_builder_utils.cc b/tests/ut/common/graph/testcase/ge_graph/graph_builder_utils.cc new file mode 100644 index 00000000..4044d670 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/graph_builder_utils.cc @@ -0,0 +1,49 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph_builder_utils.h" + +#include "graph/utils/graph_utils.h" + +namespace ge { +namespace ut { +NodePtr GraphBuilder::AddNode(const std::string &name, const std::string &type, int in_cnt, int out_cnt, Format format, + DataType data_type, std::vector shape) { + auto tensor_desc = std::make_shared(); + tensor_desc->SetShape(GeShape(std::move(shape))); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + + auto op_desc = std::make_shared(name, type); + for (int i = 0; i < in_cnt; ++i) { + op_desc->AddInputDesc(tensor_desc->Clone()); + op_desc->RestoreInputNameIdx(std::to_string(i), i); + } + for (int i = 0; i < out_cnt; ++i) { + op_desc->AddOutputDesc(tensor_desc->Clone()); + op_desc->RestoreOutputNameIdx(std::to_string(i), i); + } + + return graph_->AddNode(op_desc); +} +void GraphBuilder::AddDataEdge(NodePtr &src_node, int src_idx, NodePtr &dst_node, int dst_idx) { + GraphUtils::AddEdge(src_node->GetOutDataAnchor(src_idx), dst_node->GetInDataAnchor(dst_idx)); +} +void GraphBuilder::AddControlEdge(NodePtr &src_node, NodePtr &dst_node) { + GraphUtils::AddEdge(src_node->GetOutControlAnchor(), dst_node->GetInControlAnchor()); +} +} // namespace ut +} // namespace ge \ No newline at end of file diff --git a/tests/ut/common/graph/testcase/ge_graph/graph_builder_utils.h b/tests/ut/common/graph/testcase/ge_graph/graph_builder_utils.h new file mode 100644 index 00000000..45c75b28 --- /dev/null +++ b/tests/ut/common/graph/testcase/ge_graph/graph_builder_utils.h @@ -0,0 +1,51 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef UT_COMMON_GRAPH_TESTCASE_GE_GRAPH_GRAPH_BUILDER_UTILS_H_ +#define UT_COMMON_GRAPH_TESTCASE_GE_GRAPH_GRAPH_BUILDER_UTILS_H_ + +#include +#include + +#include "graph/compute_graph.h" +#include "graph/graph.h" +#include "graph/node.h" + +namespace ge { +namespace ut { +class GraphBuilder { + public: + explicit GraphBuilder(const std::string &name) { graph_ = std::make_shared(name); } + NodePtr AddNode(const std::string &name, const std::string &type, int in_cnt, int out_cnt, + Format format = FORMAT_NCHW, DataType data_type = DT_FLOAT, + std::vector shape = {1, 1, 224, 224}); + NodePtr AddNDNode(const std::string &name, const std::string &type, int in_cnt, int out_cnt) { + return AddNode(name, type, in_cnt, out_cnt, FORMAT_ND, DT_FLOAT, {1, 1, 224, 224}); + } + void AddDataEdge(NodePtr &src_node, int src_idx, NodePtr &dst_node, int dst_idx); + void AddControlEdge(NodePtr &src_node, NodePtr &dst_node); + ComputeGraphPtr GetGraph() { + graph_->TopologicalSorting(); + return graph_; + } + + private: + ComputeGraphPtr graph_; +}; +} // namespace ut +} // namespace ge + +#endif // UT_COMMON_GRAPH_TESTCASE_GE_GRAPH_GRAPH_BUILDER_UTILS_H_ diff --git a/tests/ut/common/graph/testcase/main.cc b/tests/ut/common/graph/testcase/main.cc new file mode 100644 index 00000000..42a4188b --- /dev/null +++ b/tests/ut/common/graph/testcase/main.cc @@ -0,0 +1,23 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +int main(int argc, char **argv) { + testing::InitGoogleTest(&argc, argv); + int ret = RUN_ALL_TESTS(); + return ret; +} diff --git a/tests/ut/ge/CMakeLists.txt b/tests/ut/ge/CMakeLists.txt new file mode 100755 index 00000000..46f18e84 --- /dev/null +++ b/tests/ut/ge/CMakeLists.txt @@ -0,0 +1,587 @@ +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +project(ut_ge) + +set(CMAKE_CXX_STANDARD 11) + +file(GLOB_RECURSE PROTO_LIST RELATIVE ${CMAKE_CURRENT_LIST_DIR} + "${GE_SOURCE_DIR}/src/proto/om.proto" + "${GE_SOURCE_DIR}/src/proto/ge_ir.proto" + "${GE_SOURCE_DIR}/src/proto/ge_api.proto" + "${GE_SOURCE_DIR}/src/proto/fwk_adapter.proto" + "${GE_SOURCE_DIR}/src/proto/op_mapping_info.proto" + "${GE_SOURCE_DIR}/src/proto/ge_api.proto" + "${onnx_INC}/onnx/onnx.proto" + ) + +ge_protobuf_generate(ge PROTO_SRCS PROTO_HDRS ${PROTO_LIST}) + +# include directories +include_directories(${CMAKE_CURRENT_LIST_DIR}) +include_directories(${GE_SOURCE_DIR}) +include_directories(${GE_SOURCE_DIR}/src/ge/inc) +include_directories(${GE_SOURCE_DIR}/src) +include_directories(${GE_SOURCE_DIR}/src/ge) +include_directories(${GE_SOURCE_DIR}/src/common) +include_directories(${GE_SOURCE_DIR}/src/common/graph) +include_directories(${GE_SOURCE_DIR}/inc) +include_directories(${GE_SOURCE_DIR}/inc/external) +include_directories(${GE_SOURCE_DIR}/inc/external/graph) +include_directories(${GE_SOURCE_DIR}/inc/graph) +include_directories(${GE_SOURCE_DIR}/inc/framework) +include_directories(${GE_SOURCE_DIR}/inc/common) +include_directories(${GE_SOURCE_DIR}/inc/ops) +include_directories(${GE_SOURCE_DIR}/third_party/eigen) +include_directories(${GE_SOURCE_DIR}/third_party/securec/include) +include_directories(${GE_SOURCE_DIR}/third_party/json/include) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/ops) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc) +include_directories(${GE_SOURCE_DIR}/third_party/fwkacllib/inc/cce) +include_directories(${GE_SOURCE_DIR}/tests/ut/ge) +include_directories(/usr/local/HiAI/opp/op_proto/built-in/inc) +include_directories(${CMAKE_BINARY_DIR}) +include_directories(${CMAKE_BINARY_DIR}/proto/ge) + +file(GLOB_RECURSE COMMON_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/common/properties_manager.cc" + "${GE_SOURCE_DIR}/src/ge/common/ge/plugin_manager.cc" + "${GE_SOURCE_DIR}/src/ge/common/ge/tbe_plugin_manager.cc" + "${GE_SOURCE_DIR}/src/common/graph/option/ge_local_context.cc" + "${GE_SOURCE_DIR}/src/common/graph/option/ge_context.cc" + "${GE_SOURCE_DIR}/src/ge/common/types.cc" + "${GE_SOURCE_DIR}/src/ge/common/op_map.cc" + "${GE_SOURCE_DIR}/src/ge/common/fmk_error_codes.cc" + "${GE_SOURCE_DIR}/src/ge/common/op/ge_op_utils.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/util/node_searcher/need_rebuild_node_searcher.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/util/variable_accelerate_ctrl.cc" + "${GE_SOURCE_DIR}/src/ge/opskernel_manager/ops_kernel_manager.cc" + "${GE_SOURCE_DIR}/src/ge/generator/ge_generator.cc" + "${GE_SOURCE_DIR}/src/ge/generator/generator_api.cc" + "${GE_SOURCE_DIR}/src/ge/graph/common/omg_util.cc" + "${GE_SOURCE_DIR}/src/ge/graph/common/bcast.cc" + "${GE_SOURCE_DIR}/src/ge/common/util.cc" + "${GE_SOURCE_DIR}/src/common/graph/ge_attr_define.cc" + "${GE_SOURCE_DIR}/src/common/graph/anchor.cc" + "${GE_SOURCE_DIR}/src/common/graph/ge_attr_value.cc" + "${GE_SOURCE_DIR}/src/common/graph/attr_value.cc" + "${GE_SOURCE_DIR}/src/common/graph/buffer.cc" + "${GE_SOURCE_DIR}/src/common/graph/compute_graph.cc" + "${GE_SOURCE_DIR}/src/common/graph/graph.cc" + "${GE_SOURCE_DIR}/src/common/graph/inference_context.cc" + "${GE_SOURCE_DIR}/src/common/graph/shape_refiner.cc" + "${GE_SOURCE_DIR}/src/common/graph/model.cc" + "${GE_SOURCE_DIR}/src/common/graph/model_serialize.cc" + "${GE_SOURCE_DIR}/src/common/graph/node.cc" + "${GE_SOURCE_DIR}/src/common/graph/op_desc.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator_factory.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator_factory_impl.cc" + "${GE_SOURCE_DIR}/src/common/graph/operator_reg.cc" + "${GE_SOURCE_DIR}/src/common/graph/range_vistor.cc" + "${GE_SOURCE_DIR}/src/common/graph/ge_tensor.cc" + "${GE_SOURCE_DIR}/src/common/graph/tensor.cc" + "${GE_SOURCE_DIR}/src/common/graph/detail/attributes_holder.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/anchor_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/graph_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/ge_ir_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/node_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/op_desc_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/tensor_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/utils/type_utils.cc" + "${GE_SOURCE_DIR}/src/common/graph/debug/graph_debug.cc" + "${GE_SOURCE_DIR}/src/common/graph/opsproto/opsproto_manager.cc" + "${GE_SOURCE_DIR}/src/common/graph/op_imp.cc" + "${GE_SOURCE_DIR}/src/common/register/register.cc" + "${GE_SOURCE_DIR}/src/common/register/auto_mapping_util.cc" + "${GE_SOURCE_DIR}/src/common/register/tensor_assign.cc" + "${GE_SOURCE_DIR}/src/common/graph/format_refiner.cc" +) + +file(GLOB_RECURSE COMMON_FORMAT_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/common/fp16_t.cc" + "${GE_SOURCE_DIR}/src/ge/common/ge_format_util.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/formats.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/datatype_transfer.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_transpose.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_nchw_nc1hwc0.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_fractal_z.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_fractal_nz.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_fractal_zz.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nchw.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_nc1hwc0_nhwc.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_hwcn_c1hwncoc0.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_c1hwncoc0_hwcn.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_fracz_nchw.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_fracz_nhwc.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/format_transfers/format_transfer_fracz_hwcn.cc" + "${GE_SOURCE_DIR}/src/ge/common/formats/utils/formats_trans_utils.cc" +) + +file(GLOB_RECURSE GRAPH_OPTIMIZE_COMMON_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/graph/optimize/graph_optimize.cc" + "${GE_SOURCE_DIR}/src/ge/graph/optimize/summary_optimize.cc" +) + + +file(GLOB_RECURSE GRAPH_PREPARE_COMMON_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/graph/preprocess/graph_preprocess.cc" + "${GE_SOURCE_DIR}/src/ge/graph/preprocess/insert_op/util_insert_aipp_op.cc" + "${GE_SOURCE_DIR}/src/ge/graph/preprocess/insert_op/ge_aipp_op.cc" + "${GE_SOURCE_DIR}/src/ge/graph/preprocess/insert_op/base_insert_op.cc" +) + +file(GLOB_RECURSE GRAPH_PARTITION_COMMON_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/graph/partition/graph_partition.cc" + "${GE_SOURCE_DIR}/src/ge/plugin/engine/dnnengines.cc" + "${GE_SOURCE_DIR}/src/ge/graph/partition/engine_place.cc" +) + +file(GLOB_RECURSE GRAPH_LOAD_COMMON_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/graph/load/graph_loader.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/graph_manager_utils.cc" + "${GE_SOURCE_DIR}/src/ge/omm/csa_interact.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/graph_mem_allocator.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/graph_var_manager.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/trans_var_data_utils.cc" + "${GE_SOURCE_DIR}/src/ge/common/util.cc" +) + +file(GLOB_RECURSE DISTINCT_GRAPH_LOAD_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/graph/manager/util/hcom_util.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/util/debug.cc" + "${GE_SOURCE_DIR}/src/ge/common/properties_manager.cc" + "${GE_SOURCE_DIR}/src/ge/common/profiling/profiling_manager.cc" + "${GE_SOURCE_DIR}/src/ge/common/model_parser/base.cc" + "${GE_SOURCE_DIR}/src/ge/common/tbe_kernel_store.cc" + "${GE_SOURCE_DIR}/src/ge/common/util.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/data_dumper.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/data_inputer.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/davinci_model.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/davinci_model_parser.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/model_manager.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/model_output.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/model_utils.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/tbe_handle_store.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/event_record_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/event_wait_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/fusion_start_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/fusion_stop_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/hccl_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/kernel_ex_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/kernel_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/label_goto_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/label_set_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/memcpy_async_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/profiler_trace_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/stream_active_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/stream_switch_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/new_model_manager/task_info/end_graph_task_info.cc" + "${GE_SOURCE_DIR}/src/ge/graph/load/output/output.cc" + "${GE_SOURCE_DIR}/src/ge/model/ge_model.cc" + "${GE_SOURCE_DIR}/src/ge/common/helper/model_helper.cc" + "${GE_SOURCE_DIR}/src/ge/common/helper/om_file_helper.cc" + "${GE_SOURCE_DIR}/src/ge/common/debug/memory_dumper.cc" + "${GE_SOURCE_DIR}/src/ge/executor/ge_executor.cc" + "${GE_SOURCE_DIR}/src/ge/common/auth/file_saver.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/model_manager/event_manager.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/custom/custom_op.cc" + ) + +file(GLOB_RECURSE GRAPH_EXECUTE_COMMON_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/graph/execute/graph_execute.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/graph_manager.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/graph_context.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/util/rt_context_util.cc" + "${GE_SOURCE_DIR}/src/ge/graph/manager/graph_context.h" + "${GE_SOURCE_DIR}/src/ge/common/thread_pool.cc" +) + +file(GLOB_RECURSE GRAPH_BUILD_COMMON_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/graph/build/graph_build.cc" + "${GE_SOURCE_DIR}/src/ge/graph/build/new_model/task_generator.cc" + "${GE_SOURCE_DIR}/src/ge/init/gelib.cc" + "${GE_SOURCE_DIR}/src/ge/client/ge_api.cc" + "${GE_SOURCE_DIR}/src/ge/session/inner_session.cc" + "${GE_SOURCE_DIR}/src/ge/session/session_manager.cc" + "${GE_SOURCE_DIR}/src/ge/engine_manager/dnnengine_manager.cc" + "${GE_SOURCE_DIR}/src/ge/plugin/engine/engine_manage.cc" + "${GE_SOURCE_DIR}/src/ge/graph/build/logical_stream_allocator.cc" + "${GE_SOURCE_DIR}/src/ge/graph/build/stream_allocator.cc" + "${GE_SOURCE_DIR}/src/ge/graph/build/memory/block_mem_assigner.cc" + "${GE_SOURCE_DIR}/src/ge/graph/build/memory/binary_block_mem_assigner.cc" + "${GE_SOURCE_DIR}/src/ge/graph/build/memory/hybrid_mem_assigner.cc" + "${GE_SOURCE_DIR}/src/ge/graph/build/memory/max_block_mem_assigner.cc" + "${GE_SOURCE_DIR}/src/ge/model/ge_model.cc" + "${GE_SOURCE_DIR}/src/ge/common/helper/model_helper.cc" + "${GE_SOURCE_DIR}/src/ge/common/helper/om_file_helper.cc" + "${GE_SOURCE_DIR}/src/ge/common/tbe_kernel_store.cc" + "${GE_SOURCE_DIR}/src/ge/common/model_parser/base.cc" + "${GE_SOURCE_DIR}/src/ge/graph/build/run_context.cc" + "${GE_SOURCE_DIR}/src/ge/graph/build/optimize_stream_graph.cc" +) + +file(GLOB_RECURSE GRAPH_PASS_COMMON_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/graph/passes/pass_manager.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/base_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/variable_prepare_op_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/variable_ref_delete_op_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/atomic_addr_clean_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/constant_folding_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/iterator_fusion_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/iterator_op_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/net_output_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/update_net_output_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/end_graph_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/node_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/print_op_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/dimension_compute_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/dimension_adjust_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/get_original_format_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/stop_gradient_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/guarantee_const_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/prevent_gradient_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/identity_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/placeholder_with_default_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/snapshot_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/shape_operate_op_remove_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/unused_op_remove_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/isolated_op_remove_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/permute_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/var_is_initialized_op_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/cast_translate_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/dropout_scope_split_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/prune_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/enter_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/next_iteration_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/switch_op_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/pass_utils.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/addn_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/save_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/merge_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/switch_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/switch_logic_remove_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/assert_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/dropout_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/unused_const_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/reshape_remove_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/resource_pair_add_control_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/resource_pair_remove_control_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/transop_breadth_fusion_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/transop_without_reshape_fusion_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/transop_depth_fusion_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/transop_nearby_allreduce_fusion_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/same_transdata_breadth_fusion_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/compile_nodes_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/common/transop_util.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/flow_ctrl_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/optimize/optimizer/allreduce_fusion_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/folding_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/variable_op_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/transpose_transdata_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/hccl_memcpy_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/no_reshape_op_remove_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/no_use_reshape_remove_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/control_op_attr_pass.cc" + "${GE_SOURCE_DIR}/src/ge/graph/passes/infershape_pass.cc" +) + +file(GLOB_RECURSE KERNEL_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/broadcast_gradient_args_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/broadcast_args_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/greater_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/gather_v2_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/maximum_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/floormod_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/floordiv_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/range_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/mul_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/shape_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/shape_n_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/size_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/rank_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/fill_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/empty_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/cast_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/transdata_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/permute_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/squeeze_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/reshape_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/reformat_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/kernel_utils.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/expanddims_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/ssd_prior_box_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/pack_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/strided_slice_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/concat_v2_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/add_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/sub_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/reduce_prod_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/rsqrt_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/concat_offset_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/slice_kernel.cc" +"${GE_SOURCE_DIR}/src/ge/graph/passes/folding_kernel/dynamic_stitch_kernel.cc" +) + +file(GLOB_RECURSE SINGLE_OP_SRC_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "${GE_SOURCE_DIR}/src/ge/single_op/task/build_task_utils.cc" + "${GE_SOURCE_DIR}/src/ge/single_op/task/op_task.cc" + "${GE_SOURCE_DIR}/src/ge/single_op/task/tbe_task_builder.cc" + "${GE_SOURCE_DIR}/src/ge/single_op/single_op.cc" + "${GE_SOURCE_DIR}/src/ge/single_op/single_op_model.cc" + "${GE_SOURCE_DIR}/src/ge/single_op/stream_resource.cc" + "${GE_SOURCE_DIR}/src/ge/single_op/single_op_manager.cc" +) + +# test files +file(GLOB_RECURSE COMMON_TEST_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "graph/passes/graph_builder_utils.cc" + "test.cc" +) + +file(GLOB_RECURSE DISTINCT_GRAPH_LOAD_TEST_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "graph/load/data_dumper_unittest.cc" + "graph/load/new_model_manager_data_inputer_unittest.cc" + "graph/load/new_model_manager_davinci_model_unittest.cc" + "graph/load/new_model_manager_model_manager_unittest.cc" + "graph/load/new_model_manager_task_build_unittest.cc" + "graph/load/end_graph_task_unittest.cc" + "graph/load/new_model_manager_event_manager_unittest.cc" + "graph/load/output_net_output_unittest.cc" + "graph/load/tbe_handle_store_unittest.cc" + "graph/graph_load_unittest.cc" + "graph/ge_executor_unittest.cc" +) + +file(GLOB_RECURSE PASS_TEST_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "graph/passes/prune_pass_unittest.cc" + "graph/passes/enter_pass_unittest.cc" + "graph/passes/switch_op_pass_unittest.cc" + "graph/passes/get_original_format_pass_unittest.cc" + "graph/passes/pass_manager_unittest.cc" + "graph/passes/permute_pass_unittest.cc" + "graph/passes/print_op_pass_unittest.cc" + "graph/passes/shape_operate_op_remove_pass_unittest.cc" + "graph/passes/unused_and_isolated_op_remove_pass_unittest.cc" + "graph/passes/variable_op_pass_unittest.cc" + "graph/passes/base_pass_unittest.cc" + "graph/passes/addn_pass_unittest.cc" + "graph/passes/save_pass_unittest.cc" + "graph/passes/merge_pass_unittest.cc" + "graph/passes/switch_pass_unittest.cc" + "graph/passes/switch_logic_remove_pass_unittest.cc" + "graph/passes/assert_pass_unittest.cc" + "graph/passes/dropout_pass_unittest.cc" + "graph/passes/unused_const_pass_unittest.cc" + "graph/passes/reshape_remove_pass_unittest.cc" + "graph/passes/resource_pair_control_pass_unittest.cc" + "graph/passes/trans_op_breadth_fusion_pass_unittest.cc" + "graph/passes/trans_op_depth_fusion_pass_unittest.cc" + "graph/passes/transop_nearby_allreduce_fusion_pass_unittest.cc" + "graph/passes/constant_folding_pass_unittest.cc" + "graph/passes/stop_gradient_pass_unittest.cc" + "graph/passes/prevent_gradient_pass_unittest.cc" + "graph/passes/identity_pass_unittest.cc" + "graph/passes/placeholder_with_default_pass_unittest.cc" + "graph/passes/snapshot_pass_unittest.cc" + "graph/passes/guarantee_const_pass_unittest.cc" + "graph/passes/flow_ctrl_pass_unittest.cc" + "graph/passes/dimension_compute_pass_unittest.cc" + "graph/passes/variable_prepare_pass_unittest.cc" + "graph/passes/variable_ref_delete_pass_unittest.cc" + "graph/passes/dimension_adjust_pass_unittest.cc" + "graph/passes/pass_utils_unittest.cc" + "graph/passes/net_output_pass_unittest.cc" + "graph/passes/update_net_output_pass_unittest.cc" + "graph/passes/no_reshape_op_remove_pass_unittest.cc" + "graph/passes/no_use_reshape_remove_pass_unittest.cc" + "graph/passes/infershape_pass_unittest.cc" +) + +file(GLOB_RECURSE KERNEL_TEST_FILES ${CMAKE_CURRENT_SOURCE_DIR} + "graph/passes/folding_kernel/greater_kernel_unittest.cc" + "graph/passes/folding_kernel/maximum_kernel_unittest.cc" + "graph/passes/folding_kernel/floormod_kernel_unittest.cc" + "graph/passes/folding_kernel/floordiv_kernel_unittest.cc" + "graph/passes/folding_kernel/range_kernel_unittest.cc" + "graph/passes/folding_kernel/mul_kernel_unittest.cc" + "graph/passes/folding_kernel/shape_kernel_unittest.cc" + "graph/passes/folding_kernel/shape_n_kernel_unittest.cc" + "graph/passes/folding_kernel/size_kernel_unittest.cc" + "graph/passes/folding_kernel/broadcast_args_kernel_unittest.cc" + "graph/passes/folding_kernel/broadcast_gradient_args_kernel_unittest.cc" + "graph/passes/folding_kernel/fill_kernel_unittest.cc" + "graph/passes/folding_kernel/empty_kernel_unittest.cc" + "graph/passes/folding_kernel/rank_kernel_unittest.cc" + "graph/passes/folding_kernel/expanddims_kernel_unittest.cc" + "graph/passes/folding_kernel/reshape_kernel_unittest.cc" + "graph/passes/folding_kernel/reformat_kernel_unittest.cc" + "graph/passes/folding_kernel/squeeze_kernel_unittest.cc" + "graph/passes/folding_kernel/transdata_kernel_unittest.cc" + "graph/passes/folding_kernel/permute_kernel_unittest.cc" + "graph/passes/folding_kernel/cast_kernel_unittest.cc" + "graph/passes/folding_kernel/ssd_prior_box_kernel_unittest.cc" + "graph/passes/folding_kernel/strided_slice_kernel_unittest.cc" + "graph/passes/folding_kernel/pack_kernel_unittest.cc" + "graph/passes/folding_kernel/concat_v2_kernel_unittest.cc" + "graph/passes/folding_kernel/add_kernel_unittest.cc" + "graph/passes/folding_kernel/sub_kernel_unittest.cc" + "graph/passes/folding_kernel/reduce_prod_kernel_unittest.cc" + "graph/passes/folding_kernel/rsqrt_kernel_unittest.cc" + "graph/passes/folding_kernel/concat_offset_kernel_unittest.cc" + "graph/passes/folding_kernel/gather_v2_kernel_unittest.cc" + "graph/passes/folding_kernel/slice_kernel_unittest.cc" + "graph/passes/folding_kernel/dynamic_stitch_kernel_unittest.cc" +) + +file(GLOB_RECURSE MULTI_PARTS_TEST_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "graph_ir/ge_operator_factory_unittest.cc" + "graph/transop_util_unittest.cc" + "common/datatype_transfer_unittest.cc" + "common/format_transfer_unittest.cc" + "common/format_transfer_transpose_unittest.cc" + "common/format_transfer_nchw_5d_unittest.cc" + "common/format_transfer_nchw_fractalz_unittest.cc" + "common/format_transfer_hwcn_fractalz_unittest.cc" + "common/format_transfer_nhwc_fractalz_unittest.cc" + "common/format_transfer_fractal_nz_unittest.cc" + "common/format_transfer_fractal_zz_unittest.cc" + "common/format_transfer_nhwc_5d_unittest.cc" + "common/format_transfer_5d_nchw_unittest.cc" + "common/format_transfer_5d_nhwc_unittest.cc" + "common/format_transfer_hwcn_c1hwncoc0_unittest.cc" + "common/format_transfer_c1hwncoc0_hwcn_unittest.cc" + "common/format_transfer_fracz_nchw_unittest.cc" + "common/format_transfer_fracz_nhwc_unittest.cc" + "common/format_transfer_fracz_hwcn_unittest.cc" + "common/ge_format_util_unittest.cc" + "graph/variable_accelerate_ctrl_unittest.cc" + "graph/build/logical_stream_allocator_unittest.cc" + "graph/build/mem_assigner_unittest.cc" +) + +file(GLOB_RECURSE SINGLE_OP_TEST_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "single_op/single_op_model_unittest.cc" + "single_op/single_op_manager_unittest.cc" + "single_op/stream_resource_unittest.cc" +) + +file(GLOB_RECURSE PROFILING_MNG_TEST_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "profiling/ge_profiling_manager_unittest.cc" +) + +file(GLOB_RECURSE OTHERS_TEST_FILES RELATIVE ${CMAKE_CURRENT_SOURCE_DIR} + "plugin_manager/ge_util_unittest.cc" +) + +list(APPEND COMMON_SHARED_LIBRARIES + omg_stub + ${c_sec} + slog_stub + cce_ge_stub + runtime_stub + profiler_stub + mmpa_stub + hccl_stub +) +# build common +add_library(ge_ut_common STATIC ${COMMON_SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) + +# build common format +add_library(ge_ut_common_format STATIC ${COMMON_SRC_FILES} ${COMMON_FORMAT_SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) + +# build graph prepare common +add_library(ge_prepare_common STATIC ${GRAPH_PREPARE_COMMON_SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) + +# build graph optimize common +add_library(ge_optimize_common STATIC ${GRAPH_OPTIMIZE_COMMON_SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) + +# build graph partition common +add_library(ge_partition_common STATIC ${GRAPH_PARTITION_COMMON_SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) + +# build build graph load common +add_library(ge_load_common STATIC ${GRAPH_LOAD_COMMON_SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) + +# build graph execute common +add_library(ge_execute_common STATIC ${GRAPH_EXECUTE_COMMON_SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) + +# build graph build common +add_library(ge_build_common STATIC ${GRAPH_BUILD_COMMON_SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) + +# build graph pass common +add_library(ge_pass_common STATIC ${GRAPH_PASS_COMMON_SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) + +# build single_op common +add_library(ge_single_op STATIC ${SINGLE_OP_SRC_FILES} ${PROTO_SRCS} ${PROTO_HDRS}) + +# ut binary + +# libge_mutiparts_utest +add_executable(ut_libge_multiparts_utest + ${COMMON_TEST_FILES} + ${COMMON_FORMAT_SRC_FILES} + ${MULTI_PARTS_TEST_FILES} +) +target_link_libraries(ut_libge_multiparts_utest + ge_build_common ge_load_common ge_build_common ge_execute_common ge_optimize_common ge_partition_common ge_pass_common + ge_prepare_common ge_single_op ge_ut_common + graphengine::gtest graphengine::gtest_main ge_protobuf::protobuf rt dl +) +target_link_libraries(ut_libge_multiparts_utest ${COMMON_SHARED_LIBRARIES} ge_protobuf::protobuf) + +# libge_others_utest +add_executable(ut_libge_others_utest + ${COMMON_TEST_FILES} + ${COMMON_FORMAT_SRC_FILES} + ${PASS_TEST_FILES} + ${EXECUTE_TEST_FILES} + ${OTHERS_TEST_FILES} +) +target_link_libraries(ut_libge_others_utest + ge_execute_common ge_load_common ge_pass_common ge_ut_common + graphengine::gtest graphengine::gtest_main ge_protobuf::protobuf rt dl +) +target_link_libraries(ut_libge_others_utest ${COMMON_SHARED_LIBRARIES} ge_protobuf::protobuf) + +# libge_kernel_utest +add_executable(ut_libge_kernel_utest + ${COMMON_TEST_FILES} + ${COMMON_FORMAT_SRC_FILES} + ${KERNEL_TEST_FILES} + ${KERNEL_SRC_FILES} +) +target_link_libraries(ut_libge_kernel_utest + ge_pass_common ge_ut_common ge_load_common + graphengine::gtest graphengine::gtest_main ge_protobuf::protobuf rt dl +) +target_link_libraries(ut_libge_kernel_utest ${COMMON_SHARED_LIBRARIES} ge_protobuf::protobuf) + +# libge_distinct_load_utest +add_executable(ut_libge_distinct_load_utest + ${COMMON_TEST_FILES} + ${DISTINCT_GRAPH_LOAD_TEST_FILES} + ${DISTINCT_GRAPH_LOAD_SRC_FILES} + ${SINGLE_OP_TEST_FILES} + ${PROFILING_MNG_TEST_FILES} +) +target_link_libraries(ut_libge_distinct_load_utest ${COMMON_SHARED_LIBRARIES} + ge_execute_common ge_ut_common ge_ut_common_format ge_pass_common ge_load_common + ge_single_op ge_prepare_common + ge_optimize_common ge_build_common ge_partition_common + graphengine::gtest graphengine::gtest_main ge_protobuf::protobuf rt dl pthread +) diff --git a/tests/ut/ge/common/datatype_transfer_unittest.cc b/tests/ut/ge/common/datatype_transfer_unittest.cc new file mode 100644 index 00000000..5f11b272 --- /dev/null +++ b/tests/ut/ge/common/datatype_transfer_unittest.cc @@ -0,0 +1,401 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/datatype_transfer.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/formats/formats.h" +#include "common/fp16_t.h" + +namespace ge { +namespace formats { +class UtestDataTypeTransfer : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestDataTypeTransfer, fp16_fp32) { + fp16_t data[1 * 4 * 4 * 2] = { + 15272, 12501, 13940, 10024, 13356, 13068, 12088, 13733, 15257, 14104, 11089, 15298, 10597, 14359, 14402, 14748, + 14596, 14063, 14674, 13393, 12937, 13466, 14313, 13295, 15000, 15167, 15311, 13122, 10691, 15165, 14621, 14000, + }; + + float ret[1 * 4 * 4 * 2] = { + 0.957031, 0.151001, 0.40332, 0.0279541, 0.260742, 0.220215, 0.112793, 0.352783, 0.949707, 0.443359, 0.0571594, + 0.969727, 0.0421448, 0.51123, 0.532227, 0.701172, 0.626953, 0.43335, 0.665039, 0.269775, 0.204224, 0.287598, + 0.494385, 0.247925, 0.824219, 0.905762, 0.976074, 0.226807, 0.0450134, 0.904785, 0.63916, 0.417969, + }; + TransResult result; + DataTypeTransfer transfer; + CastArgs args{reinterpret_cast(data), sizeof(ret) / sizeof(ret[0]), DT_FLOAT16, DT_FLOAT}; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + bool is_equal = true; + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + if (abs((reinterpret_cast(result.data.get()))[i] - ret[i]) > 1.0e-6) { + is_equal = false; + break; + } + } + EXPECT_FLOAT_EQ(is_equal, true); + + TransResult result2; + DataTypeTransfer transfer2; + CastArgs args2{reinterpret_cast(ret), sizeof(ret) / sizeof(ret[0]), DT_FLOAT, DT_FLOAT16}; + EXPECT_EQ(transfer2.TransDataType(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data)); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result2.data.get()))[i].val, data[i].val); + } + EXPECT_EQ(TransDataType(args2, result2), SUCCESS); +} + +TEST_F(UtestDataTypeTransfer, int32_fp16) { + int32_t data[1 * 4 * 4 * 2] = { + 15272, 12501, 13940, 10024, 13356, 13068, 12088, 13733, 15257, 14104, 11089, 15298, 10597, 14359, 14402, 14748, + 14596, 14063, 14674, 13393, 12937, 13466, 14313, 13295, 15000, 15167, 15311, 13122, 10691, 15165, 14621, 14000, + }; + + fp16_t ret[1 * 4 * 4 * 2] = { + 29557, 29211, 29390, 28901, 29318, 29282, 29159, 29365, 29555, 29411, 29034, 29560, 28973, 29443, 29448, 29492, + 29472, 29406, 29482, 29322, 29265, 29331, 29437, 29310, 29523, 29544, 29562, 29288, 28984, 29544, 29476, 29398, + }; + TransResult result; + DataTypeTransfer transfer; + CastArgs args{reinterpret_cast(data), sizeof(ret) / sizeof(ret[0]), DT_INT32, DT_FLOAT16}; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i].val, ret[i].val); + } + + TransResult result2; + DataTypeTransfer transfer2; + CastArgs args2{reinterpret_cast(ret), sizeof(data) / sizeof(data[0]), DT_FLOAT16, DT_INT32}; + EXPECT_EQ(transfer2.TransDataType(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data)); + bool is_equal = true; + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + if (abs((reinterpret_cast(result2.data.get()))[i] - data[i]) / abs(data[i]) > 0.05) { + is_equal = false; + break; + } + } + EXPECT_EQ(is_equal, true); +} + +TEST_F(UtestDataTypeTransfer, fp32_fp16) { + fp16_t data[1 * 4 * 4 * 16] = { + 15272, 12501, 13940, 10024, 13356, 13068, 12088, 13733, 15257, 14104, 11089, 15298, 10597, 14359, 14402, 14748, + 14596, 14063, 14674, 13393, 12937, 13466, 14313, 13295, 15000, 15167, 15311, 13122, 10691, 15165, 14621, 14000, + 13584, 14715, 15105, 14479, 14007, 9846, 14325, 12765, 13343, 13988, 10021, 14598, 14623, 15077, 15204, 12528, + 12024, 14236, 14857, 13009, 15216, 12916, 12754, 14807, 15174, 15075, 12998, 13834, 15174, 13674, 15251, 12683, + 13116, 14819, 11956, 14416, 14717, 14954, 15267, 15143, 15292, 9704, 14781, 14965, 14808, 15008, 11416, 15074, + 14168, 14417, 13441, 10673, 14945, 15114, 15358, 15116, 11950, 12057, 15321, 14973, 14950, 13984, 14900, 11361, + 10161, 14742, 13366, 13683, 13439, 12865, 10623, 14720, 14545, 13063, 10190, 12474, 9850, 15088, 15228, 14195, + 13428, 12443, 14719, 14816, 13231, 12818, 13667, 9680, 14814, 13924, 12757, 15178, 13444, 13673, 14405, 12711, + 15279, 14207, 9089, 13774, 13008, 14685, 13887, 15293, 13983, 14590, 15232, 15285, 15071, 14974, 15257, 13900, + 14907, 15269, 10955, 13635, 15132, 15026, 14218, 14498, 15235, 11243, 14704, 11563, 14394, 6840, 13619, 14655, + 12830, 14094, 12487, 13016, 13128, 15082, 6517, 14170, 14713, 14208, 13583, 12831, 15064, 13157, 13761, 14456, + 14905, 14798, 11391, 14668, 13906, 11053, 12381, 15210, 13567, 15159, 15270, 15073, 13887, 11861, 14615, 12627, + 15209, 14630, 13394, 14228, 14184, 13719, 14805, 13748, 14215, 13234, 13053, 14651, 14753, 14560, 12289, 14957, + 12826, 14788, 15236, 14249, 15211, 14329, 14830, 14793, 13202, 14635, 14489, 14664, 10751, 10992, 13459, 13658, + 14947, 14484, 15045, 14431, 14644, 13939, 14088, 14092, 14765, 14096, 14696, 13201, 15162, 14751, 14119, 13506, + 14659, 15355, 14904, 13374, 15048, 15188, 14733, 14307, 12518, 12511, 15187, 11018, 13072, 15023, 11355, 14216, + }; + + float ret[1 * 4 * 4 * 16] = { + 0.957031, 0.151001, 0.40332, 0.0279541, 0.260742, 0.220215, 0.112793, 0.352783, 0.949707, 0.443359, + 0.0571594, 0.969727, 0.0421448, 0.51123, 0.532227, 0.701172, 0.626953, 0.43335, 0.665039, 0.269775, + 0.204224, 0.287598, 0.494385, 0.247925, 0.824219, 0.905762, 0.976074, 0.226807, 0.0450134, 0.904785, + 0.63916, 0.417969, 0.316406, 0.685059, 0.875488, 0.569824, 0.419678, 0.025238, 0.497314, 0.183228, + 0.257568, 0.415039, 0.0279083, 0.62793, 0.640137, 0.861816, 0.923828, 0.154297, 0.108887, 0.475586, + 0.754395, 0.213013, 0.929688, 0.20166, 0.181885, 0.72998, 0.90918, 0.86084, 0.21167, 0.377441, + 0.90918, 0.338379, 0.946777, 0.173218, 0.226074, 0.73584, 0.104736, 0.539062, 0.686035, 0.801758, + 0.95459, 0.894043, 0.966797, 0.0230713, 0.717285, 0.807129, 0.730469, 0.828125, 0.0717773, 0.860352, + 0.458984, 0.539551, 0.281494, 0.0444641, 0.797363, 0.879883, 0.999023, 0.880859, 0.10437, 0.110901, + 0.980957, 0.811035, 0.799805, 0.414062, 0.775391, 0.0684204, 0.0300446, 0.698242, 0.263184, 0.340576, + 0.281006, 0.195435, 0.0429382, 0.6875, 0.602051, 0.219604, 0.0304871, 0.147705, 0.0252991, 0.867188, + 0.935547, 0.465576, 0.27832, 0.143921, 0.687012, 0.734375, 0.240112, 0.189697, 0.33667, 0.0227051, + 0.733398, 0.399414, 0.182251, 0.911133, 0.282227, 0.338135, 0.533691, 0.176636, 0.960449, 0.468506, + 0.0146561, 0.362793, 0.212891, 0.67041, 0.390381, 0.967285, 0.413818, 0.624023, 0.9375, 0.963379, + 0.858887, 0.811523, 0.949707, 0.393555, 0.778809, 0.955566, 0.0530701, 0.328857, 0.888672, 0.836914, + 0.471191, 0.579102, 0.938965, 0.0618591, 0.679688, 0.0807495, 0.52832, 0.00328064, 0.324951, 0.655762, + 0.191162, 0.440918, 0.149292, 0.213867, 0.227539, 0.864258, 0.00266457, 0.459473, 0.684082, 0.46875, + 0.316162, 0.191284, 0.855469, 0.231079, 0.359619, 0.558594, 0.777832, 0.725586, 0.0702515, 0.662109, + 0.39502, 0.0560608, 0.136353, 0.926758, 0.312256, 0.901855, 0.956055, 0.859863, 0.390381, 0.098938, + 0.63623, 0.166382, 0.92627, 0.643555, 0.27002, 0.473633, 0.462891, 0.349365, 0.729004, 0.356445, + 0.470459, 0.240479, 0.218384, 0.653809, 0.703613, 0.609375, 0.125122, 0.803223, 0.190674, 0.720703, + 0.939453, 0.47876, 0.927246, 0.498291, 0.741211, 0.723145, 0.236572, 0.645996, 0.574707, 0.660156, + 0.0468445, 0.0541992, 0.285889, 0.334473, 0.79834, 0.572266, 0.846191, 0.546387, 0.650391, 0.403076, + 0.439453, 0.44043, 0.709473, 0.441406, 0.675781, 0.23645, 0.90332, 0.702637, 0.447021, 0.297363, + 0.657715, 0.997559, 0.777344, 0.265137, 0.847656, 0.916016, 0.693848, 0.49292, 0.153076, 0.152222, + 0.915527, 0.0549927, 0.220703, 0.835449, 0.0680542, 0.470703, + }; + TransResult result; + DataTypeTransfer transfer; + CastArgs args{reinterpret_cast(data), sizeof(ret) / sizeof(ret[0]), DT_FLOAT16, DT_FLOAT}; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + bool is_equal = true; + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + if (abs((reinterpret_cast(result.data.get()))[i] - ret[i]) > 1.0e-6) { + is_equal = false; + break; + } + } + EXPECT_FLOAT_EQ(is_equal, true); + + TransResult result2; + DataTypeTransfer transfer2; + CastArgs args2{reinterpret_cast(ret), sizeof(data) / sizeof(data[0]), DT_FLOAT, DT_FLOAT16}; + EXPECT_EQ(transfer2.TransDataType(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result2.data.get()))[i].val, data[i].val); + } +} + +TEST_F(UtestDataTypeTransfer, int8_fp32) { + int8_t data[2 * 4 * 5 * 3] = { + 0, 1, 2, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, + 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, + 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, + 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 8, 9, 10, + }; + + float ret[2 * 4 * 5 * 3] = { + 0, 1, 2, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, + 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, + 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, + 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 8, 9, 10, + }; + + TransResult result; + DataTypeTransfer transfer; + CastArgs args{reinterpret_cast(data), sizeof(ret) / sizeof(ret[0]), DT_INT8, DT_FLOAT}; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestDataTypeTransfer, int8_int32) { + int8_t data[2 * 4 * 5 * 3] = { + 0, 1, 2, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, + 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, + 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, + 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 8, 9, 10, + }; + + int32_t ret[2 * 4 * 5 * 3] = { + 0, 1, 2, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, + 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, + 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, + 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 8, 9, 10, + }; + + TransResult result; + DataTypeTransfer transfer; + CastArgs args{reinterpret_cast(data), sizeof(ret) / sizeof(ret[0]), DT_INT8, DT_INT32}; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestDataTypeTransfer, uint8_fp32) { + uint8_t data[1 * 4 * 4 * 3] = { + 2, 6, 1, 6, 11, 12, 30, 24, 4, 28, 22, 25, 20, 5, 18, 15, 23, 27, 1, 25, 26, 24, 11, 8, + 21, 15, 6, 5, 23, 17, 11, 18, 21, 24, 14, 20, 19, 12, 23, 16, 3, 9, 10, 3, 15, 31, 18, 9, + }; + float ret[1 * 4 * 4 * 3] = { + 2, 6, 1, 6, 11, 12, 30, 24, 4, 28, 22, 25, 20, 5, 18, 15, 23, 27, 1, 25, 26, 24, 11, 8, + 21, 15, 6, 5, 23, 17, 11, 18, 21, 24, 14, 20, 19, 12, 23, 16, 3, 9, 10, 3, 15, 31, 18, 9, + }; + + CastArgs args{data, sizeof(ret) / sizeof(ret[0]), DT_UINT8, DT_FLOAT}; + TransResult result; + + DataTypeTransfer transfer; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestDataTypeTransfer, uint8_int32) { + uint8_t data[1 * 4 * 4 * 3] = { + 2, 6, 1, 6, 11, 12, 30, 24, 4, 28, 22, 25, 20, 5, 18, 15, 23, 27, 1, 25, 26, 24, 11, 8, + 21, 15, 6, 5, 23, 17, 11, 18, 21, 24, 14, 20, 19, 12, 23, 16, 3, 9, 10, 3, 15, 31, 18, 9, + }; + float ret[1 * 4 * 4 * 3] = { + 2, 6, 1, 6, 11, 12, 30, 24, 4, 28, 22, 25, 20, 5, 18, 15, 23, 27, 1, 25, 26, 24, 11, 8, + 21, 15, 6, 5, 23, 17, 11, 18, 21, 24, 14, 20, 19, 12, 23, 16, 3, 9, 10, 3, 15, 31, 18, 9, + }; + + CastArgs args{data, sizeof(ret) / sizeof(ret[0]), DT_UINT8, DT_INT32}; + TransResult result; + + DataTypeTransfer transfer; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestDataTypeTransfer, fp32_int32) { + float data[2 * 3 * 2 * 2] = { + 12069.558428388797, 15153.257385100667, 14984.26436591075, 14609.400052018833, 14685.809894536767, + 15086.047829821913, 14608.516342177387, 15045.212419446521, 14990.208555280951, 15160.085043556863, + 12590.929413431828, 13329.114266064971, 14156.37125633003, 13280.133356778959, 12436.203018490567, + 15326.305606200556, 14378.984205090384, 14309.322926704968, 15127.534200544495, 14504.079809440058, + 14404.89917121715, 10767.05264755489, 13679.223916928482, 14460.12063510443, + }; + float ret[2 * 3 * 2 * 2] = { + 12069, 15153, 14984, 14609, 14685, 15086, 14608, 15045, 14990, 15160, 12590, 13329, + 14156, 13280, 12436, 15326, 14378, 14309, 15127, 14504, 14404, 10767, 13679, 14460, + }; + CastArgs args{reinterpret_cast(data), sizeof(ret) / sizeof(ret[0]), DT_FLOAT, DT_INT32}; + TransResult result; + + DataTypeTransfer transfer; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestDataTypeTransfer, int32_fp32) { + int32_t data[1 * 3 * 4 * 4] = { + 15322, 14946, 12601, 14058, 12641, 14470, 14686, 15052, 11964, 14846, 13154, 13571, 14947, 12467, 12786, 14238, + 15334, 14814, 13608, 12163, 14169, 15274, 14780, 15303, 14986, 14841, 14290, 13785, 13878, 11576, 14411, 14522, + 14394, 13508, 13021, 14691, 13263, 15145, 14724, 15167, 14523, 13334, 14834, 13844, 9902, 14984, 15051, 14511, + }; + float ret[1 * 3 * 4 * 4] = { + 15322, 14946, 12601, 14058, 12641, 14470, 14686, 15052, 11964, 14846, 13154, 13571, 14947, 12467, 12786, 14238, + 15334, 14814, 13608, 12163, 14169, 15274, 14780, 15303, 14986, 14841, 14290, 13785, 13878, 11576, 14411, 14522, + 14394, 13508, 13021, 14691, 13263, 15145, 14724, 15167, 14523, 13334, 14834, 13844, 9902, 14984, 15051, 14511, + }; + CastArgs args{reinterpret_cast(data), sizeof(ret) / sizeof(ret[0]), DT_INT32, DT_FLOAT}; + TransResult result; + + DataTypeTransfer transfer; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestDataTypeTransfer, int32_uint8) { + int32_t data[2 * 4 * 5 * 3] = { + 0, 1, 2, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, + 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, + 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, + 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 8, 9, 10, + }; + + uint8_t ret[2 * 4 * 5 * 3] = { + 0, 1, 2, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, + 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, + 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, + 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 8, 9, 10, + }; + CastArgs args{reinterpret_cast(data), sizeof(ret) / sizeof(ret[0]), DT_INT32, DT_UINT8}; + TransResult result; + + DataTypeTransfer transfer; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestDataTypeTransfer, int32_int8) { + int32_t data[2 * 4 * 5 * 3] = { + 0, 1, 2, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, + 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, + 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, + 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 8, 9, 10, + }; + + int8_t ret[2 * 4 * 5 * 3] = { + 0, 1, 2, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, + 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, + 1, 2, 3, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 2, 3, 4, 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, + 3, 4, 5, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 4, 5, 6, 5, 6, 7, 6, 7, 8, 7, 8, 9, 8, 9, 10, + }; + CastArgs args{reinterpret_cast(data), sizeof(ret) / sizeof(ret[0]), DT_INT32, DT_INT8}; + TransResult result; + + DataTypeTransfer transfer; + EXPECT_EQ(transfer.TransDataType(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestDataTypeTransfer, invalid_src_data_type) { + uint8_t data[1 * 4 * 4 * 1] = {0}; + CastArgs args{reinterpret_cast(data), 16, DT_UNDEFINED, DT_FLOAT}; + TransResult result; + + DataTypeTransfer transfer; + EXPECT_EQ(transfer.TransDataType(args, result), UNSUPPORTED); +} + +TEST_F(UtestDataTypeTransfer, src_shape_empry) { + uint8_t data[1 * 4 * 4 * 1] = {0}; + + DataTypeTransfer transfer; + CastArgs args{reinterpret_cast(data), 0, DT_UINT8, DT_INT32}; + + TransResult result; + EXPECT_EQ(transfer.TransDataType(args, result), PARAM_INVALID); +} + +TEST_F(UtestDataTypeTransfer, unsupprot_trans) { + bool data[1 * 4 * 4 * 1] = {0}; + CastArgs args{reinterpret_cast(data), 16, DT_BOOL, DT_INT8}; + TransResult result; + + DataTypeTransfer transfer; + EXPECT_EQ(transfer.TransDataType(args, result), UNSUPPORTED); + EXPECT_EQ(TransDataType(args, result), UNSUPPORTED); +} + +TEST_F(UtestDataTypeTransfer, unsupprot_trans2) { + bool data[1 * 4 * 4 * 1] = {0}; + CastArgs args{reinterpret_cast(data), 16, DT_BOOL, DT_INT32}; + TransResult result; + + DataTypeTransfer transfer; + EXPECT_EQ(transfer.TransDataType(args, result), UNSUPPORTED); + EXPECT_EQ(TransDataType(args, result), UNSUPPORTED); +} +} // namespace formats +} // namespace ge diff --git a/tests/ut/ge/common/format_transfer_5d_nchw_unittest.cc b/tests/ut/ge/common/format_transfer_5d_nchw_unittest.cc new file mode 100644 index 00000000..4a7d3672 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_5d_nchw_unittest.cc @@ -0,0 +1,647 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_nc1hwc0_nchw.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/fp16_t.h" + +namespace ge { +namespace formats { +class UTEST_FormatTransferNc1hwc0ToNchw : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, nc1hwc0_to_nchw_uint8) { + uint8_t data_5d[1 * 1 * 4 * 4 * 32] = { + 1, 101, 201, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 2, 102, 202, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 3, 103, 203, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 4, 104, 204, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 5, 105, 205, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6, 106, 206, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 7, 107, 207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8, 108, 208, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9, 109, 209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10, 110, 210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11, 111, 211, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12, 112, 212, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13, 113, 213, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14, 114, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15, 115, 215, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 16, 116, 216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint8_t data[1 * 3 * 4 * 4] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, + 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216}; + + TransArgs args{data_5d, FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 4, 32}, {1, 3, 4, 4}, DT_UINT8}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ(*(result.data.get() + i), data[i]); + } +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, nc1hwc0_to_nchw_uint8_32c) { + uint8_t data_5d[1 * 1 * 4 * 4 * 32] = { + 0, 16, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240, 1, 17, 33, 49, 65, 81, 97, + 113, 129, 145, 161, 177, 193, 209, 225, 241, 1, 17, 33, 49, 65, 81, 97, 113, 129, 145, 161, 177, 193, 209, + 225, 241, 2, 18, 34, 50, 66, 82, 98, 114, 130, 146, 162, 178, 194, 210, 226, 242, 2, 18, 34, 50, 66, + 82, 98, 114, 130, 146, 162, 178, 194, 210, 226, 242, 3, 19, 35, 51, 67, 83, 99, 115, 131, 147, 163, 179, + 195, 211, 227, 243, 3, 19, 35, 51, 67, 83, 99, 115, 131, 147, 163, 179, 195, 211, 227, 243, 4, 20, 36, + 52, 68, 84, 100, 116, 132, 148, 164, 180, 196, 212, 228, 244, 4, 20, 36, 52, 68, 84, 100, 116, 132, 148, + 164, 180, 196, 212, 228, 244, 5, 21, 37, 53, 69, 85, 101, 117, 133, 149, 165, 181, 197, 213, 229, 245, 5, + 21, 37, 53, 69, 85, 101, 117, 133, 149, 165, 181, 197, 213, 229, 245, 6, 22, 38, 54, 70, 86, 102, 118, + 134, 150, 166, 182, 198, 214, 230, 246, 6, 22, 38, 54, 70, 86, 102, 118, 134, 150, 166, 182, 198, 214, 230, + 246, 7, 23, 39, 55, 71, 87, 103, 119, 135, 151, 167, 183, 199, 215, 231, 247, 7, 23, 39, 55, 71, 87, + 103, 119, 135, 151, 167, 183, 199, 215, 231, 247, 8, 24, 40, 56, 72, 88, 104, 120, 136, 152, 168, 184, 200, + 216, 232, 248, 8, 24, 40, 56, 72, 88, 104, 120, 136, 152, 168, 184, 200, 216, 232, 248, 9, 25, 41, 57, + 73, 89, 105, 121, 137, 153, 169, 185, 201, 217, 233, 249, 9, 25, 41, 57, 73, 89, 105, 121, 137, 153, 169, + 185, 201, 217, 233, 249, 10, 26, 42, 58, 74, 90, 106, 122, 138, 154, 170, 186, 202, 218, 234, 250, 10, 26, + 42, 58, 74, 90, 106, 122, 138, 154, 170, 186, 202, 218, 234, 250, 11, 27, 43, 59, 75, 91, 107, 123, 139, + 155, 171, 187, 203, 219, 235, 251, 11, 27, 43, 59, 75, 91, 107, 123, 139, 155, 171, 187, 203, 219, 235, 251, + 12, 28, 44, 60, 76, 92, 108, 124, 140, 156, 172, 188, 204, 220, 236, 252, 12, 28, 44, 60, 76, 92, 108, + 124, 140, 156, 172, 188, 204, 220, 236, 252, 13, 29, 45, 61, 77, 93, 109, 125, 141, 157, 173, 189, 205, 221, + 237, 253, 13, 29, 45, 61, 77, 93, 109, 125, 141, 157, 173, 189, 205, 221, 237, 253, 14, 30, 46, 62, 78, + 94, 110, 126, 142, 158, 174, 190, 206, 222, 238, 254, 14, 30, 46, 62, 78, 94, 110, 126, 142, 158, 174, 190, + 206, 222, 238, 254, 15, 31, 47, 63, 79, 95, 111, 127, 143, 159, 175, 191, 207, 223, 239, 0, 15, 31, 47, + 63, 79, 95, 111, 127, 143, 159, 175, 191, 207, 223, 239, 0, 16, 32, 48, 64, 80, 96, 112, 128, 144, 160, + 176, 192, 208, 224, 240, 1, + }; + uint8_t data[1 * 32 * 4 * 4] = { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, + 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, + 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, + 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, + 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, + 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, + 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, + 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, + 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, + 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, + 253, 254, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, + 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, + 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, + 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, + 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, + 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, + 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, + 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, + 251, 252, 253, 254, 0, 1, + }; + + TransArgs args{data_5d, FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 4, 32}, {1, 32, 4, 4}, DT_UINT8}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ(*(result.data.get() + i), data[i]); + } +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, nc1hwc0_to_nchwfp16_single) { + uint16_t data[1 * 1 * 1 * 1] = {13425}; + uint16_t data_5d[1 * 1 * 1 * 1 * 16] = { + 13425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{ + reinterpret_cast(data_5d), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 1, 1, 16}, {1, 1, 1, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, nc1hwc0_to_nchw_fp16) { + uint16_t data_5d[1 * 1 * 4 * 4 * 16] = { + 14862, 14023, 14816, 11390, 14779, 11715, 14638, 14509, 14441, 13503, 14814, 13795, 13847, 14745, 15059, 13746, + 8282, 15213, 14495, 14221, 15299, 14126, 14322, 11435, 14238, 15287, 15273, 14807, 15163, 14883, 14912, 12849, + 14921, 14896, 15259, 14167, 13653, 14609, 12054, 13925, 14673, 14744, 10492, 14718, 12709, 14964, 14952, 15255, + 15123, 14600, 14627, 12936, 14972, 11265, 13908, 13264, 13523, 14115, 13541, 13446, 8391, 10700, 14445, 11586, + 15088, 12353, 15138, 15162, 15290, 14850, 14862, 14131, 10827, 14038, 12047, 13469, 13654, 15009, 15189, 12226, + 15295, 15216, 15254, 14722, 15216, 14427, 12508, 14553, 15158, 11562, 15171, 14775, 15161, 13439, 15097, 8566, + 14780, 14590, 12232, 14709, 12227, 14429, 15354, 12318, 13589, 14488, 14560, 15145, 14640, 14740, 14165, 12610, + 15230, 15069, 15161, 14221, 14997, 14818, 11639, 13424, 15310, 11455, 13646, 12862, 12989, 15032, 13677, 14551, + 14510, 15157, 14759, 15108, 12693, 14636, 14575, 13679, 12659, 15217, 11509, 10551, 14286, 13520, 13470, 12282, + 14734, 14821, 14667, 12783, 11479, 13083, 12017, 10590, 12052, 15280, 12302, 14675, 14546, 14962, 12830, 13908, + 12601, 12343, 11266, 14534, 14482, 12896, 12191, 14423, 15219, 14576, 10054, 15120, 15021, 14918, 13288, 14529, + 14616, 14758, 15067, 12250, 14251, 14158, 14423, 13791, 15224, 14959, 14637, 12702, 15359, 15334, 15144, 9603, + 14632, 14955, 15207, 11988, 12695, 15015, 15123, 14705, 14942, 14763, 13985, 14995, 13719, 14939, 14033, 14696, + 14174, 15183, 14922, 11460, 14856, 14838, 14643, 14625, 15080, 14732, 15128, 15241, 13484, 14453, 14983, 13360, + 13613, 13815, 14349, 12802, 14386, 14675, 13697, 13069, 12385, 14200, 13553, 14937, 14979, 14747, 14663, 14843, + 13120, 14373, 13953, 15234, 13729, 13999, 13505, 13100, 14788, 15024, 14270, 13430, 15140, 15104, 14409, 14096, + }; + uint16_t data[1 * 16 * 4 * 4] = { + 14862, 8282, 14921, 15123, 15088, 15295, 14780, 15230, 14510, 14734, 12601, 14616, 14632, 14174, 13613, 13120, + 14023, 15213, 14896, 14600, 12353, 15216, 14590, 15069, 15157, 14821, 12343, 14758, 14955, 15183, 13815, 14373, + 14816, 14495, 15259, 14627, 15138, 15254, 12232, 15161, 14759, 14667, 11266, 15067, 15207, 14922, 14349, 13953, + 11390, 14221, 14167, 12936, 15162, 14722, 14709, 14221, 15108, 12783, 14534, 12250, 11988, 11460, 12802, 15234, + 14779, 15299, 13653, 14972, 15290, 15216, 12227, 14997, 12693, 11479, 14482, 14251, 12695, 14856, 14386, 13729, + 11715, 14126, 14609, 11265, 14850, 14427, 14429, 14818, 14636, 13083, 12896, 14158, 15015, 14838, 14675, 13999, + 14638, 14322, 12054, 13908, 14862, 12508, 15354, 11639, 14575, 12017, 12191, 14423, 15123, 14643, 13697, 13505, + 14509, 11435, 13925, 13264, 14131, 14553, 12318, 13424, 13679, 10590, 14423, 13791, 14705, 14625, 13069, 13100, + 14441, 14238, 14673, 13523, 10827, 15158, 13589, 15310, 12659, 12052, 15219, 15224, 14942, 15080, 12385, 14788, + 13503, 15287, 14744, 14115, 14038, 11562, 14488, 11455, 15217, 15280, 14576, 14959, 14763, 14732, 14200, 15024, + 14814, 15273, 10492, 13541, 12047, 15171, 14560, 13646, 11509, 12302, 10054, 14637, 13985, 15128, 13553, 14270, + 13795, 14807, 14718, 13446, 13469, 14775, 15145, 12862, 10551, 14675, 15120, 12702, 14995, 15241, 14937, 13430, + 13847, 15163, 12709, 8391, 13654, 15161, 14640, 12989, 14286, 14546, 15021, 15359, 13719, 13484, 14979, 15140, + 14745, 14883, 14964, 10700, 15009, 13439, 14740, 15032, 13520, 14962, 14918, 15334, 14939, 14453, 14747, 15104, + 15059, 14912, 14952, 14445, 15189, 15097, 14165, 13677, 13470, 12830, 13288, 15144, 14033, 14983, 14663, 14409, + 13746, 12849, 15255, 11586, 12226, 8566, 12610, 14551, 12282, 13908, 14529, 9603, 14696, 13360, 14843, 14096, + }; + + TransArgs args{ + reinterpret_cast(data_5d), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 4, 16}, {1, 16, 4, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, nc1hwc0_to_nchw_fp16_17c) { + uint16_t data_5d[1 * 2 * 4 * 4 * 16] = { + 14568, 14421, 14797, 14544, 13105, 13370, 15172, 14381, 15270, 12195, 13233, 15350, 13571, 10961, 10330, 13865, + 13864, 11756, 13374, 14495, 11969, 11541, 13723, 14035, 12632, 12958, 15051, 13749, 15124, 13845, 13742, 14891, + 14728, 15305, 13959, 14165, 13934, 14044, 14807, 14168, 14771, 13262, 14305, 14341, 14705, 11336, 13744, 12774, + 9931, 15085, 14748, 14941, 15255, 13546, 9992, 15150, 14654, 15272, 14954, 14247, 14786, 14715, 14736, 14596, + 13909, 15000, 14607, 13415, 15009, 12345, 14431, 11364, 12769, 14173, 15184, 12405, 14950, 14596, 14995, 14367, + 15238, 12103, 15218, 13477, 14363, 15129, 13895, 14014, 14424, 12544, 15284, 14554, 14972, 14031, 12372, 14881, + 13252, 13647, 14843, 13552, 12019, 13836, 14410, 14987, 13967, 15002, 14597, 13275, 15273, 15332, 13185, 13735, + 14643, 10549, 14527, 14460, 14840, 13478, 14703, 14563, 10958, 14177, 15050, 15096, 14875, 14397, 14143, 13434, + 14995, 12539, 15308, 14687, 14654, 14183, 12403, 14639, 14824, 12740, 7525, 14283, 14554, 14766, 15104, 15021, + 14363, 7084, 12950, 14904, 15248, 11753, 14283, 15168, 14818, 13238, 15014, 12556, 12828, 14049, 14558, 15137, + 15298, 14873, 14293, 13047, 14263, 13515, 14198, 13906, 14489, 9541, 13617, 13656, 13406, 14513, 13360, 13604, + 14911, 12182, 14215, 14713, 12221, 9609, 15221, 15010, 11716, 12863, 15349, 14575, 13794, 15164, 14754, 11357, + 15106, 9793, 11630, 10997, 14930, 14946, 15178, 12638, 14766, 14440, 14660, 13576, 14684, 15073, 12315, 15039, + 14522, 11816, 13598, 14531, 14892, 14832, 12603, 11430, 13780, 14985, 14740, 14461, 10439, 11816, 14360, 14929, + 12107, 12592, 14456, 13482, 14755, 15340, 15225, 14599, 14681, 14962, 12123, 10863, 15160, 13471, 12378, 15048, + 14647, 15209, 15114, 13465, 14893, 14925, 14931, 12569, 12727, 15193, 13636, 15240, 14501, 14656, 13539, 14598, + 14699, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15057, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13683, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14368, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8654, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11915, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15315, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14933, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14449, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13736, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14541, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12031, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14550, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14823, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[1 * 17 * 4 * 4] = { + 14568, 13864, 14728, 9931, 13909, 15238, 13252, 14643, 14995, 14363, 15298, 14911, 15106, 14522, 12107, 14647, + 14421, 11756, 15305, 15085, 15000, 12103, 13647, 10549, 12539, 7084, 14873, 12182, 9793, 11816, 12592, 15209, + 14797, 13374, 13959, 14748, 14607, 15218, 14843, 14527, 15308, 12950, 14293, 14215, 11630, 13598, 14456, 15114, + 14544, 14495, 14165, 14941, 13415, 13477, 13552, 14460, 14687, 14904, 13047, 14713, 10997, 14531, 13482, 13465, + 13105, 11969, 13934, 15255, 15009, 14363, 12019, 14840, 14654, 15248, 14263, 12221, 14930, 14892, 14755, 14893, + 13370, 11541, 14044, 13546, 12345, 15129, 13836, 13478, 14183, 11753, 13515, 9609, 14946, 14832, 15340, 14925, + 15172, 13723, 14807, 9992, 14431, 13895, 14410, 14703, 12403, 14283, 14198, 15221, 15178, 12603, 15225, 14931, + 14381, 14035, 14168, 15150, 11364, 14014, 14987, 14563, 14639, 15168, 13906, 15010, 12638, 11430, 14599, 12569, + 15270, 12632, 14771, 14654, 12769, 14424, 13967, 10958, 14824, 14818, 14489, 11716, 14766, 13780, 14681, 12727, + 12195, 12958, 13262, 15272, 14173, 12544, 15002, 14177, 12740, 13238, 9541, 12863, 14440, 14985, 14962, 15193, + 13233, 15051, 14305, 14954, 15184, 15284, 14597, 15050, 7525, 15014, 13617, 15349, 14660, 14740, 12123, 13636, + 15350, 13749, 14341, 14247, 12405, 14554, 13275, 15096, 14283, 12556, 13656, 14575, 13576, 14461, 10863, 15240, + 13571, 15124, 14705, 14786, 14950, 14972, 15273, 14875, 14554, 12828, 13406, 13794, 14684, 10439, 15160, 14501, + 10961, 13845, 11336, 14715, 14596, 14031, 15332, 14397, 14766, 14049, 14513, 15164, 15073, 11816, 13471, 14656, + 10330, 13742, 13744, 14736, 14995, 12372, 13185, 14143, 15104, 14558, 13360, 14754, 12315, 14360, 12378, 13539, + 13865, 14891, 12774, 14596, 14367, 14881, 13735, 13434, 15021, 15137, 13604, 11357, 15039, 14929, 15048, 14598, + 14699, 15057, 13683, 14368, 8654, 11915, 15315, 14933, 14449, 15148, 13736, 14541, 12031, 15255, 14550, 14823, + }; + + TransArgs args{ + reinterpret_cast(data_5d), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 2, 4, 4, 16}, {1, 17, 4, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, nc1hwc0_to_nchw_float) { + float data[1 * 8 * 4 * 4] = { + 0.5892849569036265, 0.6803315204121938, 0.5177982416755607, 0.12129040093083365, 0.04237103024867994, + 0.7428342506399291, 0.5359802823824235, 0.04499999698629653, 0.7610018014391726, 0.38621323898777005, + 0.4360751167195973, 0.3834964892197801, 0.5888008090373326, 0.09836678109117547, 0.9950749943600654, + 0.9635155267139188, 0.5378606253569519, 0.2383735299723022, 0.5049400994471483, 0.2967628815119744, + 0.9172822428045107, 0.5429433505121389, 0.38499549462545024, 0.23619965842338686, 0.720664799527641, + 0.02313921408863151, 0.6488943229526023, 0.3390551602851408, 0.728417105824467, 0.9053225912171141, + 0.32494694533300583, 0.9119093270624166, 0.9647657094436359, 0.7219930950678662, 0.36167953499559, + 0.5984012357524195, 0.9544874847178995, 0.02306924612189265, 0.8026403495895027, 0.22551907272533667, + 0.36263992795411604, 0.58886941262115, 0.5735986398876265, 0.5252128788659909, 0.0827150730694497, + 0.17498225712307047, 0.4845825388200229, 0.40605108821850533, 0.9274359210940875, 0.7147299778467197, + 0.32288439175726646, 0.4065504767493492, 0.6286803275241362, 0.20374542713340105, 0.7445032000224268, + 0.9674821461856206, 0.909400577299532, 0.40363134678641066, 0.9627522330737276, 0.6933785292758723, + 0.9641353478602301, 0.7754020225695061, 0.620702777688872, 0.11214574817054179, 0.894884208921027, + 0.7101293717077931, 0.36970203638442056, 0.9356214764169016, 0.8339204066613951, 0.516307604153244, + 0.7030058401326411, 0.3991170380257899, 0.691216036157706, 0.7414799310134091, 0.22811510970169568, + 0.5609880702374889, 0.22477373948238633, 0.12881731266251306, 0.4573255943473218, 0.17517491298262455, + 0.4664711535236884, 0.36304572216421005, 0.04147865556156949, 0.321799545851576, 0.3134658252359267, + 0.9168822528697251, 0.2070779910124293, 0.4370570617679451, 0.5458639932730591, 0.34286569365484054, + 0.4939443382175456, 0.3412383781775876, 0.299800764913397, 0.3458667905629188, 0.5037122283316625, + 0.13792096399324794, 0.5296944120355204, 0.6593970795972891, 0.672741074468844, 0.5297456647001881, + 0.006066715407170187, 0.8287877066716592, 0.18033462634801634, 0.5000576732820233, 0.8853254925542572, + 0.38219052838295775, 0.17776888090118503, 0.2556143927933693, 0.46146366919906867, 0.4037875054768396, + 0.062043324444360226, 0.4479202861693887, 0.25183795798980213, 0.6102048134444441, 0.9471408150891643, + 0.8789211226767781, 0.5987926543415545, 0.08687291331362201, 0.09890376596175199, 0.39921593538893263, + 0.8463226026274682, 0.5365747044508772, 0.762082525622205, 0.6515229727575028, 0.10831064130367352, + 0.4628228725538879, 0.820619798511191, 0.5779888725124475, + }; + float data_5d[1 * 1 * 4 * 4 * 16] = { + 0.5892849569036265, + 0.5378606253569519, + 0.9647657094436359, + 0.9274359210940875, + 0.894884208921027, + 0.4664711535236884, + 0.5296944120355204, + 0.25183795798980213, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6803315204121938, + 0.2383735299723022, + 0.7219930950678662, + 0.7147299778467197, + 0.7101293717077931, + 0.36304572216421005, + 0.6593970795972891, + 0.6102048134444441, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5177982416755607, + 0.5049400994471483, + 0.36167953499559, + 0.32288439175726646, + 0.36970203638442056, + 0.04147865556156949, + 0.672741074468844, + 0.9471408150891643, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12129040093083365, + 0.2967628815119744, + 0.5984012357524195, + 0.4065504767493492, + 0.9356214764169016, + 0.321799545851576, + 0.5297456647001881, + 0.8789211226767781, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.04237103024867994, + 0.9172822428045107, + 0.9544874847178995, + 0.6286803275241362, + 0.8339204066613951, + 0.3134658252359267, + 0.006066715407170187, + 0.5987926543415545, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7428342506399291, + 0.5429433505121389, + 0.02306924612189265, + 0.20374542713340105, + 0.516307604153244, + 0.9168822528697251, + 0.8287877066716592, + 0.08687291331362201, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5359802823824235, + 0.38499549462545024, + 0.8026403495895027, + 0.7445032000224268, + 0.7030058401326411, + 0.2070779910124293, + 0.18033462634801634, + 0.09890376596175199, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.04499999698629653, + 0.23619965842338686, + 0.22551907272533667, + 0.9674821461856206, + 0.3991170380257899, + 0.4370570617679451, + 0.5000576732820233, + 0.39921593538893263, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7610018014391726, + 0.720664799527641, + 0.36263992795411604, + 0.909400577299532, + 0.691216036157706, + 0.5458639932730591, + 0.8853254925542572, + 0.8463226026274682, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.38621323898777005, + 0.02313921408863151, + 0.58886941262115, + 0.40363134678641066, + 0.7414799310134091, + 0.34286569365484054, + 0.38219052838295775, + 0.5365747044508772, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4360751167195973, + 0.6488943229526023, + 0.5735986398876265, + 0.9627522330737276, + 0.22811510970169568, + 0.4939443382175456, + 0.17776888090118503, + 0.762082525622205, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3834964892197801, + 0.3390551602851408, + 0.5252128788659909, + 0.6933785292758723, + 0.5609880702374889, + 0.3412383781775876, + 0.2556143927933693, + 0.6515229727575028, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5888008090373326, + 0.728417105824467, + 0.0827150730694497, + 0.9641353478602301, + 0.22477373948238633, + 0.299800764913397, + 0.46146366919906867, + 0.10831064130367352, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.09836678109117547, + 0.9053225912171141, + 0.17498225712307047, + 0.7754020225695061, + 0.12881731266251306, + 0.3458667905629188, + 0.4037875054768396, + 0.4628228725538879, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9950749943600654, + 0.32494694533300583, + 0.4845825388200229, + 0.620702777688872, + 0.4573255943473218, + 0.5037122283316625, + 0.062043324444360226, + 0.820619798511191, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9635155267139188, + 0.9119093270624166, + 0.40605108821850533, + 0.11214574817054179, + 0.17517491298262455, + 0.13792096399324794, + 0.4479202861693887, + 0.5779888725124475, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + TransArgs args{ + reinterpret_cast(data_5d), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 4, 16}, {1, 8, 4, 4}, DT_FLOAT}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, invalid_src_shape1) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 16}, {1, 0, 4, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, invalid_src_shape2) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 16, 0}, {1, 0, 4, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, invalid_dst_shape1) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 4, 16}, {4, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, invalid_dst_shape2) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 4, 16}, {1, 0, 4, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, invalid_src_dst_shape_relation) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 4, 16}, {1, 17, 4, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, invalid_src_format) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_RESERVED, FORMAT_NCHW, {1, 1, 4, 4, 16}, {1, 1, 4, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, UNSUPPORTED); +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, invalid_dst_format) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 4, 4, 16}, {1, 1, 4, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UTEST_FormatTransferNc1hwc0ToNchw, invalid_src_data_type) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 4, 16}, {1, 1, 1, 4, 4}, DT_UNDEFINED}; + TransResult result; + + FormatTransferNc1hwc0Nchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_5d_nhwc_unittest.cc b/tests/ut/ge/common/format_transfer_5d_nhwc_unittest.cc new file mode 100644 index 00000000..c811884f --- /dev/null +++ b/tests/ut/ge/common/format_transfer_5d_nhwc_unittest.cc @@ -0,0 +1,762 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_nc1hwc0_nhwc.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/fp16_t.h" + +namespace ge { +namespace formats { +class UtestFormatTransfer5dNhwc : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransfer5dNhwc, nc1hwc0_to_nhwc_uint8) { + uint8_t data[1 * 4 * 4 * 3] = { + 2, 6, 1, 6, 11, 12, 30, 24, 4, 28, 22, 25, 20, 5, 18, 15, 23, 27, 1, 25, 26, 24, 11, 8, + 21, 15, 6, 5, 23, 17, 11, 18, 21, 24, 14, 20, 19, 12, 23, 16, 3, 9, 10, 3, 15, 31, 18, 9, + }; + + uint8_t data_5d[1 * 1 * 4 * 4 * 32] = { + 2, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6, 11, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 30, 24, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 28, 22, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 20, 5, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15, 23, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 24, 11, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 21, 15, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 5, 23, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11, 18, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 24, 14, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 19, 12, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 16, 3, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10, 3, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 31, 18, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{data_5d, FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 4, 4, 32}, {1, 4, 4, 3}, DT_UINT8}; + TransResult result; + + FormatTransferNc1hwc0Nhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransfer5dNhwc, nc1hwc0_to_nhwc_uint8_32c) { + uint8_t data[1 * 4 * 4 * 32] = { + 21, 14, 25, 9, 25, 3, 8, 21, 13, 29, 15, 6, 9, 30, 2, 9, 10, 25, 10, 3, 10, 21, 28, 31, 5, 6, 9, + 18, 5, 16, 28, 21, 3, 1, 25, 5, 20, 25, 2, 21, 11, 19, 12, 7, 25, 6, 26, 17, 2, 31, 26, 23, 28, 23, + 4, 12, 0, 6, 16, 29, 18, 2, 27, 21, 14, 15, 27, 14, 30, 4, 28, 26, 6, 21, 8, 28, 4, 20, 20, 14, 14, + 21, 15, 14, 2, 0, 3, 21, 22, 28, 23, 2, 23, 23, 17, 6, 17, 5, 28, 23, 6, 2, 0, 26, 3, 24, 30, 9, + 23, 24, 5, 12, 27, 14, 5, 30, 20, 20, 23, 12, 6, 24, 11, 16, 13, 19, 18, 16, 12, 5, 23, 15, 29, 31, 2, + 17, 6, 28, 27, 9, 18, 7, 19, 10, 28, 14, 28, 16, 17, 8, 18, 7, 16, 0, 24, 26, 9, 10, 8, 26, 23, 4, + 21, 21, 13, 9, 24, 13, 0, 6, 10, 3, 14, 21, 22, 28, 4, 13, 20, 20, 10, 17, 24, 31, 6, 14, 21, 29, 31, + 0, 5, 28, 0, 2, 18, 25, 0, 28, 14, 27, 0, 25, 20, 15, 9, 9, 24, 22, 0, 4, 24, 28, 17, 14, 27, 14, + 30, 10, 8, 30, 29, 23, 11, 9, 23, 17, 7, 31, 22, 12, 6, 9, 10, 15, 12, 14, 11, 12, 19, 19, 29, 9, 21, + 12, 1, 27, 29, 8, 11, 3, 7, 18, 13, 15, 23, 5, 9, 6, 8, 31, 20, 12, 14, 26, 31, 16, 18, 5, 15, 1, + 27, 8, 21, 15, 13, 13, 5, 11, 18, 9, 25, 31, 17, 23, 5, 10, 9, 22, 19, 14, 13, 27, 11, 20, 1, 16, 2, + 12, 2, 11, 28, 20, 9, 2, 31, 9, 7, 3, 16, 26, 7, 21, 11, 24, 25, 5, 7, 28, 19, 6, 6, 4, 0, 6, + 28, 6, 20, 10, 12, 16, 7, 3, 3, 17, 1, 11, 21, 5, 14, 18, 17, 17, 2, 21, 15, 20, 0, 15, 13, 29, 21, + 11, 15, 25, 25, 25, 9, 21, 25, 15, 19, 1, 31, 21, 18, 18, 14, 5, 25, 26, 2, 17, 15, 8, 27, 15, 20, 11, + 30, 27, 1, 3, 0, 22, 8, 11, 1, 24, 29, 28, 24, 31, 15, 31, 8, 15, 8, 0, 0, 20, 19, 21, 21, 20, 25, + 31, 24, 20, 3, 3, 4, 14, 15, 4, 12, 26, 22, 13, 10, 13, 22, 9, 14, 12, 7, 13, 19, 3, 23, 24, 3, 15, + 23, 25, 16, 5, 30, 21, 10, 28, 7, 9, 18, 9, 3, 12, 25, 9, 18, 31, 15, 11, 1, 11, 10, 8, 28, 4, 19, + 27, 22, 17, 10, 23, 25, 2, 19, 16, 2, 19, 28, 25, 24, 2, 2, 4, 17, 4, 12, 26, 4, 25, 31, 27, 31, 14, + 17, 13, 24, 5, 10, 10, 17, 26, 16, 15, 25, 18, 15, 10, 22, 13, 30, 16, 23, 10, 23, 24, 10, 14, 5, 17, + }; + + uint8_t data_5d[1 * 1 * 4 * 4 * 32] = { + 21, 14, 25, 9, 25, 3, 8, 21, 13, 29, 15, 6, 9, 30, 2, 9, 10, 25, 10, 3, 10, 21, 28, 31, 5, 6, 9, + 18, 5, 16, 28, 21, 3, 1, 25, 5, 20, 25, 2, 21, 11, 19, 12, 7, 25, 6, 26, 17, 2, 31, 26, 23, 28, 23, + 4, 12, 0, 6, 16, 29, 18, 2, 27, 21, 14, 15, 27, 14, 30, 4, 28, 26, 6, 21, 8, 28, 4, 20, 20, 14, 14, + 21, 15, 14, 2, 0, 3, 21, 22, 28, 23, 2, 23, 23, 17, 6, 17, 5, 28, 23, 6, 2, 0, 26, 3, 24, 30, 9, + 23, 24, 5, 12, 27, 14, 5, 30, 20, 20, 23, 12, 6, 24, 11, 16, 13, 19, 18, 16, 12, 5, 23, 15, 29, 31, 2, + 17, 6, 28, 27, 9, 18, 7, 19, 10, 28, 14, 28, 16, 17, 8, 18, 7, 16, 0, 24, 26, 9, 10, 8, 26, 23, 4, + 21, 21, 13, 9, 24, 13, 0, 6, 10, 3, 14, 21, 22, 28, 4, 13, 20, 20, 10, 17, 24, 31, 6, 14, 21, 29, 31, + 0, 5, 28, 0, 2, 18, 25, 0, 28, 14, 27, 0, 25, 20, 15, 9, 9, 24, 22, 0, 4, 24, 28, 17, 14, 27, 14, + 30, 10, 8, 30, 29, 23, 11, 9, 23, 17, 7, 31, 22, 12, 6, 9, 10, 15, 12, 14, 11, 12, 19, 19, 29, 9, 21, + 12, 1, 27, 29, 8, 11, 3, 7, 18, 13, 15, 23, 5, 9, 6, 8, 31, 20, 12, 14, 26, 31, 16, 18, 5, 15, 1, + 27, 8, 21, 15, 13, 13, 5, 11, 18, 9, 25, 31, 17, 23, 5, 10, 9, 22, 19, 14, 13, 27, 11, 20, 1, 16, 2, + 12, 2, 11, 28, 20, 9, 2, 31, 9, 7, 3, 16, 26, 7, 21, 11, 24, 25, 5, 7, 28, 19, 6, 6, 4, 0, 6, + 28, 6, 20, 10, 12, 16, 7, 3, 3, 17, 1, 11, 21, 5, 14, 18, 17, 17, 2, 21, 15, 20, 0, 15, 13, 29, 21, + 11, 15, 25, 25, 25, 9, 21, 25, 15, 19, 1, 31, 21, 18, 18, 14, 5, 25, 26, 2, 17, 15, 8, 27, 15, 20, 11, + 30, 27, 1, 3, 0, 22, 8, 11, 1, 24, 29, 28, 24, 31, 15, 31, 8, 15, 8, 0, 0, 20, 19, 21, 21, 20, 25, + 31, 24, 20, 3, 3, 4, 14, 15, 4, 12, 26, 22, 13, 10, 13, 22, 9, 14, 12, 7, 13, 19, 3, 23, 24, 3, 15, + 23, 25, 16, 5, 30, 21, 10, 28, 7, 9, 18, 9, 3, 12, 25, 9, 18, 31, 15, 11, 1, 11, 10, 8, 28, 4, 19, + 27, 22, 17, 10, 23, 25, 2, 19, 16, 2, 19, 28, 25, 24, 2, 2, 4, 17, 4, 12, 26, 4, 25, 31, 27, 31, 14, + 17, 13, 24, 5, 10, 10, 17, 26, 16, 15, 25, 18, 15, 10, 22, 13, 30, 16, 23, 10, 23, 24, 10, 14, 5, 17, + }; + + TransArgs args{data_5d, FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 4, 4, 32}, {1, 4, 4, 32}, DT_UINT8}; + TransResult result; + + FormatTransferNc1hwc0Nhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransfer5dNhwc, nc1hwc0_to_nhwc_fp16_single) { + uint16_t data[1 * 1 * 1 * 1] = {13425}; + uint16_t data_5d[1 * 1 * 1 * 1 * 16] = { + 13425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferNc1hwc0Nhwc transfer; + TransArgs args{ + reinterpret_cast(data_5d), FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 1, 1, 16}, {1, 1, 1, 1}, DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransfer5dNhwc, nc1hwc0_to_nhwc_fp16) { + uint16_t data[1 * 4 * 4 * 16] = { + 15272, 12501, 13940, 10024, 13356, 13068, 12088, 13733, 15257, 14104, 11089, 15298, 10597, 14359, 14402, 14748, + 14596, 14063, 14674, 13393, 12937, 13466, 14313, 13295, 15000, 15167, 15311, 13122, 10691, 15165, 14621, 14000, + 13584, 14715, 15105, 14479, 14007, 9846, 14325, 12765, 13343, 13988, 10021, 14598, 14623, 15077, 15204, 12528, + 12024, 14236, 14857, 13009, 15216, 12916, 12754, 14807, 15174, 15075, 12998, 13834, 15174, 13674, 15251, 12683, + 13116, 14819, 11956, 14416, 14717, 14954, 15267, 15143, 15292, 9704, 14781, 14965, 14808, 15008, 11416, 15074, + 14168, 14417, 13441, 10673, 14945, 15114, 15358, 15116, 11950, 12057, 15321, 14973, 14950, 13984, 14900, 11361, + 10161, 14742, 13366, 13683, 13439, 12865, 10623, 14720, 14545, 13063, 10190, 12474, 9850, 15088, 15228, 14195, + 13428, 12443, 14719, 14816, 13231, 12818, 13667, 9680, 14814, 13924, 12757, 15178, 13444, 13673, 14405, 12711, + 15279, 14207, 9089, 13774, 13008, 14685, 13887, 15293, 13983, 14590, 15232, 15285, 15071, 14974, 15257, 13900, + 14907, 15269, 10955, 13635, 15132, 15026, 14218, 14498, 15235, 11243, 14704, 11563, 14394, 6840, 13619, 14655, + 12830, 14094, 12487, 13016, 13128, 15082, 6517, 14170, 14713, 14208, 13583, 12831, 15064, 13157, 13761, 14456, + 14905, 14798, 11391, 14668, 13906, 11053, 12381, 15210, 13567, 15159, 15270, 15073, 13887, 11861, 14615, 12627, + 15209, 14630, 13394, 14228, 14184, 13719, 14805, 13748, 14215, 13234, 13053, 14651, 14753, 14560, 12289, 14957, + 12826, 14788, 15236, 14249, 15211, 14329, 14830, 14793, 13202, 14635, 14489, 14664, 10751, 10992, 13459, 13658, + 14947, 14484, 15045, 14431, 14644, 13939, 14088, 14092, 14765, 14096, 14696, 13201, 15162, 14751, 14119, 13506, + 14659, 15355, 14904, 13374, 15048, 15188, 14733, 14307, 12518, 12511, 15187, 11018, 13072, 15023, 11355, 14216, + }; + + uint16_t data_5d[1 * 1 * 4 * 4 * 16] = { + 15272, 12501, 13940, 10024, 13356, 13068, 12088, 13733, 15257, 14104, 11089, 15298, 10597, 14359, 14402, 14748, + 14596, 14063, 14674, 13393, 12937, 13466, 14313, 13295, 15000, 15167, 15311, 13122, 10691, 15165, 14621, 14000, + 13584, 14715, 15105, 14479, 14007, 9846, 14325, 12765, 13343, 13988, 10021, 14598, 14623, 15077, 15204, 12528, + 12024, 14236, 14857, 13009, 15216, 12916, 12754, 14807, 15174, 15075, 12998, 13834, 15174, 13674, 15251, 12683, + 13116, 14819, 11956, 14416, 14717, 14954, 15267, 15143, 15292, 9704, 14781, 14965, 14808, 15008, 11416, 15074, + 14168, 14417, 13441, 10673, 14945, 15114, 15358, 15116, 11950, 12057, 15321, 14973, 14950, 13984, 14900, 11361, + 10161, 14742, 13366, 13683, 13439, 12865, 10623, 14720, 14545, 13063, 10190, 12474, 9850, 15088, 15228, 14195, + 13428, 12443, 14719, 14816, 13231, 12818, 13667, 9680, 14814, 13924, 12757, 15178, 13444, 13673, 14405, 12711, + 15279, 14207, 9089, 13774, 13008, 14685, 13887, 15293, 13983, 14590, 15232, 15285, 15071, 14974, 15257, 13900, + 14907, 15269, 10955, 13635, 15132, 15026, 14218, 14498, 15235, 11243, 14704, 11563, 14394, 6840, 13619, 14655, + 12830, 14094, 12487, 13016, 13128, 15082, 6517, 14170, 14713, 14208, 13583, 12831, 15064, 13157, 13761, 14456, + 14905, 14798, 11391, 14668, 13906, 11053, 12381, 15210, 13567, 15159, 15270, 15073, 13887, 11861, 14615, 12627, + 15209, 14630, 13394, 14228, 14184, 13719, 14805, 13748, 14215, 13234, 13053, 14651, 14753, 14560, 12289, 14957, + 12826, 14788, 15236, 14249, 15211, 14329, 14830, 14793, 13202, 14635, 14489, 14664, 10751, 10992, 13459, 13658, + 14947, 14484, 15045, 14431, 14644, 13939, 14088, 14092, 14765, 14096, 14696, 13201, 15162, 14751, 14119, 13506, + 14659, 15355, 14904, 13374, 15048, 15188, 14733, 14307, 12518, 12511, 15187, 11018, 13072, 15023, 11355, 14216, + }; + + FormatTransferNc1hwc0Nhwc transfer; + TransArgs args{ + reinterpret_cast(data_5d), FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 4, 4, 16}, {1, 4, 4, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransfer5dNhwc, nc1hwc0_to_nhwc_fp16_17c) { + uint16_t data[1 * 4 * 4 * 17] = { + 13688, 13163, 15170, 12549, 12876, 15228, 14672, 14988, 13134, 14510, 9810, 15108, 14863, 13964, 13563, 12807, + 13855, 13713, 14783, 14854, 13049, 11513, 15025, 14681, 13663, 13722, 14976, 15068, 14816, 13831, 13642, 15235, + 13133, 14666, 14169, 11361, 14948, 14421, 14255, 14285, 15057, 11992, 14788, 13201, 15119, 14856, 14793, 12473, + 15215, 14789, 14728, 14311, 12980, 10457, 10908, 12896, 14619, 15085, 12907, 11787, 13731, 13228, 15259, 14651, + 13829, 12858, 14998, 13957, 15122, 13691, 13185, 11770, 13198, 12714, 15199, 13931, 9780, 14569, 15353, 14807, + 14196, 14570, 14753, 14042, 14983, 12953, 14874, 15089, 13383, 13795, 12129, 14556, 13240, 14965, 13331, 11285, + 15188, 15110, 14909, 14485, 14336, 13854, 13931, 15042, 13277, 14478, 12890, 14101, 14639, 14380, 14453, 15222, + 13431, 13650, 14330, 15188, 15118, 13645, 13878, 8366, 15192, 9233, 13516, 14812, 15289, 14551, 14772, 10564, + 13672, 14892, 11295, 15075, 15080, 14504, 11827, 15286, 13510, 13808, 10051, 12669, 15104, 14790, 15166, 12735, + 13908, 15202, 15103, 13600, 10602, 14248, 14810, 14408, 14317, 11545, 14353, 12064, 14610, 14610, 14557, 14438, + 11772, 14486, 14024, 15136, 11316, 15070, 14996, 13987, 12120, 14548, 13976, 13462, 13614, 14785, 12854, 11411, + 14179, 13944, 12796, 13491, 13781, 14549, 13506, 13369, 14249, 14973, 12837, 14507, 13462, 14467, 14856, 14851, + 14744, 15209, 12085, 13802, 13234, 15098, 12964, 14569, 15206, 11899, 11525, 15082, 14794, 14982, 14552, 15075, + 14741, 14730, 15013, 13483, 14997, 13342, 11416, 12620, 15125, 13969, 13737, 14041, 11219, 15355, 11361, 12378, + 13717, 13299, 14856, 12113, 15291, 12946, 15038, 14854, 13568, 13635, 14525, 12769, 13663, 13672, 14164, 14763, + 14661, 12871, 10567, 13383, 14679, 13331, 15247, 14464, 14642, 13930, 15218, 15040, 14614, 10533, 13511, 13577, + 12696, 11780, 11060, 15069, 11544, 15333, 15295, 14505, 11951, 12790, 14115, 14978, 12227, 14806, 15238, 14393, + }; + uint16_t data_5d[1 * 2 * 4 * 4 * 16] = { + 13688, 13163, 15170, 12549, 12876, 15228, 14672, 14988, 13134, 14510, 9810, 15108, 14863, 13964, 13563, 12807, + 13713, 14783, 14854, 13049, 11513, 15025, 14681, 13663, 13722, 14976, 15068, 14816, 13831, 13642, 15235, 13133, + 14169, 11361, 14948, 14421, 14255, 14285, 15057, 11992, 14788, 13201, 15119, 14856, 14793, 12473, 15215, 14789, + 14311, 12980, 10457, 10908, 12896, 14619, 15085, 12907, 11787, 13731, 13228, 15259, 14651, 13829, 12858, 14998, + 15122, 13691, 13185, 11770, 13198, 12714, 15199, 13931, 9780, 14569, 15353, 14807, 14196, 14570, 14753, 14042, + 12953, 14874, 15089, 13383, 13795, 12129, 14556, 13240, 14965, 13331, 11285, 15188, 15110, 14909, 14485, 14336, + 13931, 15042, 13277, 14478, 12890, 14101, 14639, 14380, 14453, 15222, 13431, 13650, 14330, 15188, 15118, 13645, + 8366, 15192, 9233, 13516, 14812, 15289, 14551, 14772, 10564, 13672, 14892, 11295, 15075, 15080, 14504, 11827, + 13510, 13808, 10051, 12669, 15104, 14790, 15166, 12735, 13908, 15202, 15103, 13600, 10602, 14248, 14810, 14408, + 11545, 14353, 12064, 14610, 14610, 14557, 14438, 11772, 14486, 14024, 15136, 11316, 15070, 14996, 13987, 12120, + 13976, 13462, 13614, 14785, 12854, 11411, 14179, 13944, 12796, 13491, 13781, 14549, 13506, 13369, 14249, 14973, + 14507, 13462, 14467, 14856, 14851, 14744, 15209, 12085, 13802, 13234, 15098, 12964, 14569, 15206, 11899, 11525, + 14794, 14982, 14552, 15075, 14741, 14730, 15013, 13483, 14997, 13342, 11416, 12620, 15125, 13969, 13737, 14041, + 15355, 11361, 12378, 13717, 13299, 14856, 12113, 15291, 12946, 15038, 14854, 13568, 13635, 14525, 12769, 13663, + 14164, 14763, 14661, 12871, 10567, 13383, 14679, 13331, 15247, 14464, 14642, 13930, 15218, 15040, 14614, 10533, + 13577, 12696, 11780, 11060, 15069, 11544, 15333, 15295, 14505, 11951, 12790, 14115, 14978, 12227, 14806, 15238, + 13855, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13957, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14983, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13854, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13878, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15286, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14317, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14548, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12837, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11219, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13672, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13511, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + FormatTransferNc1hwc0Nhwc transfer; + TransArgs args{ + reinterpret_cast(data_5d), FORMAT_NC1HWC0, FORMAT_NHWC, {1, 2, 4, 4, 16}, {1, 4, 4, 17}, DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransfer5dNhwc, nc1hwc0_to_nhwc_float) { + float data[1 * 4 * 4 * 8] = { + 0.14395748132615127, 0.7088975015001723, 0.33509522683279236, 0.519409599070846, 0.6877815703183492, + 0.11202024034801872, 0.006276379581528935, 0.3691877050360296, 0.8286682894080306, 0.8577776441477236, + 0.8620479285149965, 0.4785995315212451, 0.44290479161201646, 0.9298880355625483, 0.2079760936942212, + 0.7584010867023864, 0.04130212504113229, 0.6886546611913413, 0.8905605080864101, 0.44890343542909616, + 0.033926825396621396, 0.5658782347882929, 0.0154316787055232, 0.6715229410700493, 0.15104032692043634, + 0.7987494899164227, 0.4107814620344469, 0.2739026396531693, 0.78314190163481, 0.5572462878749022, + 0.49687645697979144, 0.2610836567590066, 0.662984178264575, 0.8272197084584877, 0.951921638821051, + 0.22020310043156965, 0.7970234862803476, 0.786770080635131, 0.2782844900661975, 0.2214492309528462, + 0.05973945191243013, 0.7787265114728507, 0.5885108317539937, 0.6350434845578384, 0.03432358265902924, + 0.13814464833713236, 0.47716000964132366, 0.2172979558296817, 0.28184686482223664, 0.023912988786341294, + 0.564989222222373, 0.6350727555041364, 0.17406682486362202, 0.5782687973077343, 0.7691109852619834, + 0.6283233021413348, 0.7351740165991039, 0.3215521304014334, 0.6774009330532079, 0.33739099291474717, + 0.9568828717913317, 0.00406630044661338, 0.022773887476273513, 0.0062475315550286625, 0.11386475535418572, + 0.31803152343086083, 0.5060149804451273, 0.9748224337922627, 0.08021564523597269, 0.36955307731376397, + 0.45745050755121797, 0.6991568446588684, 0.48452855411290163, 0.1687682253709446, 0.8171081226272253, + 0.5722562245860371, 0.38631439575235693, 0.4152775169941805, 0.5471240543016923, 0.47255359909361083, + 0.19979061254107167, 0.6128813529241708, 0.23241802167600212, 0.6598280464895825, 0.39993127352459, + 0.6179092276151944, 0.3842495249973191, 0.07172557002264568, 0.5232161572150006, 0.33507445318217577, + 0.6669179668737779, 0.5710568144146737, 0.09743181036899662, 0.1960181228757637, 0.024614338703409122, + 0.04305198418453349, 0.8272287766449594, 0.3104293133165287, 0.295404336140902, 0.869972288744926, + 0.6598182869917978, 0.1256465164983911, 0.6611169004945606, 0.887335228528663, 0.30319799367763645, + 0.10221678669180034, 0.822023968653782, 0.7054515545991238, 0.7026671130911287, 0.6583675813685899, + 0.14794276026959252, 0.12379423708188408, 0.010717044340432524, 0.3335554745873852, 0.6960727743111309, + 0.835599614916433, 0.6695589997837782, 0.928169629281005, 0.2751019740519224, 0.09543122169280194, + 0.5117813618227156, 0.33444700996623, 0.5634565397240759, 0.5205229823558587, 0.7650601279838857, + 0.517468037811738, 0.5880785947369374, 0.2177496979194814, + }; + float data_5d[1 * 1 * 4 * 4 * 16] = { + 0.14395748132615127, + 0.7088975015001723, + 0.33509522683279236, + 0.519409599070846, + 0.6877815703183492, + 0.11202024034801872, + 0.006276379581528935, + 0.3691877050360296, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8286682894080306, + 0.8577776441477236, + 0.8620479285149965, + 0.4785995315212451, + 0.44290479161201646, + 0.9298880355625483, + 0.2079760936942212, + 0.7584010867023864, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.04130212504113229, + 0.6886546611913413, + 0.8905605080864101, + 0.44890343542909616, + 0.033926825396621396, + 0.5658782347882929, + 0.0154316787055232, + 0.6715229410700493, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15104032692043634, + 0.7987494899164227, + 0.4107814620344469, + 0.2739026396531693, + 0.78314190163481, + 0.5572462878749022, + 0.49687645697979144, + 0.2610836567590066, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.662984178264575, + 0.8272197084584877, + 0.951921638821051, + 0.22020310043156965, + 0.7970234862803476, + 0.786770080635131, + 0.2782844900661975, + 0.2214492309528462, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.05973945191243013, + 0.7787265114728507, + 0.5885108317539937, + 0.6350434845578384, + 0.03432358265902924, + 0.13814464833713236, + 0.47716000964132366, + 0.2172979558296817, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.28184686482223664, + 0.023912988786341294, + 0.564989222222373, + 0.6350727555041364, + 0.17406682486362202, + 0.5782687973077343, + 0.7691109852619834, + 0.6283233021413348, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7351740165991039, + 0.3215521304014334, + 0.6774009330532079, + 0.33739099291474717, + 0.9568828717913317, + 0.00406630044661338, + 0.022773887476273513, + 0.0062475315550286625, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11386475535418572, + 0.31803152343086083, + 0.5060149804451273, + 0.9748224337922627, + 0.08021564523597269, + 0.36955307731376397, + 0.45745050755121797, + 0.6991568446588684, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.48452855411290163, + 0.1687682253709446, + 0.8171081226272253, + 0.5722562245860371, + 0.38631439575235693, + 0.4152775169941805, + 0.5471240543016923, + 0.47255359909361083, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19979061254107167, + 0.6128813529241708, + 0.23241802167600212, + 0.6598280464895825, + 0.39993127352459, + 0.6179092276151944, + 0.3842495249973191, + 0.07172557002264568, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5232161572150006, + 0.33507445318217577, + 0.6669179668737779, + 0.5710568144146737, + 0.09743181036899662, + 0.1960181228757637, + 0.024614338703409122, + 0.04305198418453349, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8272287766449594, + 0.3104293133165287, + 0.295404336140902, + 0.869972288744926, + 0.6598182869917978, + 0.1256465164983911, + 0.6611169004945606, + 0.887335228528663, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30319799367763645, + 0.10221678669180034, + 0.822023968653782, + 0.7054515545991238, + 0.7026671130911287, + 0.6583675813685899, + 0.14794276026959252, + 0.12379423708188408, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.010717044340432524, + 0.3335554745873852, + 0.6960727743111309, + 0.835599614916433, + 0.6695589997837782, + 0.928169629281005, + 0.2751019740519224, + 0.09543122169280194, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5117813618227156, + 0.33444700996623, + 0.5634565397240759, + 0.5205229823558587, + 0.7650601279838857, + 0.517468037811738, + 0.5880785947369374, + 0.2177496979194814, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + FormatTransferNc1hwc0Nhwc transfer; + TransArgs args{ + reinterpret_cast(data_5d), FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 4, 4, 16}, {1, 4, 4, 8}, DT_FLOAT}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransfer5dNhwc, nc1hwc0_to_nhwc_float2) { + float data[1 * 4 * 4 * 16] = { + 0.4564786036237277, 0.9979708631827585, 0.3433449574519194, 0.8327170836954324, 0.7102312568870411, + 0.4620266296757265, 0.731409804774576, 0.6657620022925489, 0.9035335884051056, 0.9985587438582897, + 0.9914301800355473, 0.7964115237958334, 0.8584244270496925, 0.1378300681142447, 0.9146423401653642, + 0.5838660267355298, 0.08206209108887697, 0.5978772929814459, 0.8606477151292675, 0.9621549085527097, + 0.9903169628823607, 0.4413502878053672, 0.3419319864126722, 0.2055590347352939, 0.6958247381061989, + 0.31025181192729134, 0.8754696913785533, 0.16342168578297833, 0.48930764038405494, 0.4313174744262651, + 0.9702299296220505, 0.8608633387702506, 0.32414390364635537, 0.9499311305911101, 0.16162894064122346, + 0.7903673191014232, 0.9747281169251742, 0.19047786660677712, 0.5261388562183582, 0.08832658004278726, + 0.5026034609888064, 0.6012786562129157, 0.22023272766428525, 0.020620813194720755, 0.05621537431872736, + 0.9065752732717621, 0.10241901312343715, 0.7468164462101752, 0.6025800857266902, 0.8563704540567573, + 0.06781353150900471, 0.07449933352495186, 0.6462834271091005, 0.6443763466531751, 0.23443689347408292, + 0.9007764794631198, 0.2654578696798139, 0.34714459047552515, 0.9442670098376124, 0.6551617300899828, + 0.18577821984901555, 0.6554056318808226, 0.9256976155576719, 0.5652951773970069, 0.9036782198563219, + 0.4044957431656302, 0.7720355215505535, 0.9615844951249943, 0.9583990983695638, 0.6734156011463083, + 0.07600462174220979, 0.6483143942496717, 0.7721284471233, 0.8844698574851142, 0.36446559980424187, + 0.2020712252813367, 0.5532440010466321, 0.774082923402127, 0.15753243710488063, 0.7494487845016365, + 0.9777126596615223, 0.5977366091149126, 0.6118678708384551, 0.5776021887764191, 0.7052275323758874, + 0.8643536179940521, 0.7351423461892231, 0.7472712957396752, 0.8320997849162026, 0.6893202120458873, + 0.6061132885338082, 0.7922244558608248, 0.0694183157723195, 0.46637306821083124, 0.24761782321363746, + 0.841220929773547, 0.19251486945606122, 0.38619121136754864, 0.7777480845728865, 0.5153172040040658, + 0.3221852259862178, 0.4422626646996969, 0.37297798349444533, 0.28636326416033275, 0.872322772220719, + 0.36684031320178934, 0.19151281097764805, 0.011093279580771997, 0.4565429665149845, 0.33857376446626597, + 0.3893706571125203, 0.09763853688754631, 0.7857370586828217, 0.8196400350274258, 0.6233161446810015, + 0.7290476585781567, 0.32302816180960103, 0.6465065602583131, 0.7039055178319722, 0.737269028941926, + 0.4423557810299573, 0.2936287529863649, 0.17383709070689168, 0.07431649542543162, 0.3395265133684905, + 0.6974196233605686, 0.029369533449977392, 0.8462674586537903, 0.1862823571798382, 0.7055362309875918, + 0.17654051410721183, 0.6964155103280214, 0.8712303990874309, 0.8568724052794551, 0.12588302504561832, + 0.7811576698822645, 0.4767670325567058, 0.6159667262200001, 0.05490717393005784, 0.6057928538737336, + 0.8546815077994012, 0.35062523335992557, 0.12129862795339719, 0.26548933441616984, 0.7385743748060322, + 0.40555441839717865, 0.9982561364349957, 0.3188263567371774, 0.9424891762411117, 0.6987901399365776, + 0.20540961831616766, 0.02167040277535437, 0.3185967031398763, 0.019782643683121437, 0.8438032683953371, + 0.31134590764527503, 0.4314797108424774, 0.8627081052949851, 0.8372865189589538, 0.06727616337496733, + 0.3842235730532829, 0.5620567599469514, 0.4872978433211791, 0.6475139880108287, 0.415262171705042, + 0.16946425200394755, 0.37100485677595374, 0.5675133342481045, 0.39388891080376875, 0.11961877709354252, + 0.6050560699851201, 0.6113694663807391, 0.7148151069557451, 0.3227410199494769, 0.47070000531233336, + 0.6184325422121354, 0.9366505496068971, 0.044456665466675815, 0.6052379671242412, 0.0566508245272247, + 0.293920203298791, 0.13061352402232362, 0.11203690960569768, 0.8258966037746421, 0.2946765132214273, + 0.8466489444530028, 0.7451823806430503, 0.15781221269704837, 0.4021853233314594, 0.664241200519387, + 0.7091761574997846, 0.7412572946122742, 0.12408378577361201, 0.6351997345547254, 0.49923734082324256, + 0.7532652811776691, 0.4831886413654197, 0.8560909415971528, 0.5359252954295073, 0.27940116425452954, + 0.697831712634371, 0.6651941742331731, 0.38894273544941194, 0.6511702859506332, 0.24281567468797605, + 0.28501013033328104, 0.4337819166171949, 0.5961407181707284, 0.5496662828770404, 0.41579648284745674, + 0.8751091806383153, 0.20914271806373042, 0.5223981065749688, 0.36853485871088787, 0.6950926163206822, + 0.8626788290432134, 0.280786741801478, 0.9968042088585791, 0.047574943064607855, 0.3553705848433375, + 0.8500631428597956, 0.7900997773399084, 0.08208295892311868, 0.5799468056312337, 0.8678034851684076, + 0.6620161902490288, 0.10118441445571336, 0.5346674104167647, 0.46546006518032723, 0.7854958793456643, + 0.41889328134628867, 0.4788457107828109, 0.5693837686243997, 0.03982329678460883, 0.5036591611514133, + 0.8634869905751454, 0.36418146420377306, 0.3560197611754259, 0.4237274215048007, 0.9272113781908002, + 0.4733639065953018, 0.11277189215022076, 0.46944385729018046, 0.5240510466702447, 0.3809929220315893, + 0.9044904830984387, 0.630375764858229, 0.40395182381843286, 0.39508838980681005, 0.762649660569511, + 0.7194310117846976, 0.992973488796045, 0.22591279583891666, 0.4331673497772569, 0.6014661361937058, + 0.7590490257651524, + }; + float data_5d[1 * 1 * 4 * 4 * 16] = { + 0.4564786036237277, 0.9979708631827585, 0.3433449574519194, 0.8327170836954324, 0.7102312568870411, + 0.4620266296757265, 0.731409804774576, 0.6657620022925489, 0.9035335884051056, 0.9985587438582897, + 0.9914301800355473, 0.7964115237958334, 0.8584244270496925, 0.1378300681142447, 0.9146423401653642, + 0.5838660267355298, 0.08206209108887697, 0.5978772929814459, 0.8606477151292675, 0.9621549085527097, + 0.9903169628823607, 0.4413502878053672, 0.3419319864126722, 0.2055590347352939, 0.6958247381061989, + 0.31025181192729134, 0.8754696913785533, 0.16342168578297833, 0.48930764038405494, 0.4313174744262651, + 0.9702299296220505, 0.8608633387702506, 0.32414390364635537, 0.9499311305911101, 0.16162894064122346, + 0.7903673191014232, 0.9747281169251742, 0.19047786660677712, 0.5261388562183582, 0.08832658004278726, + 0.5026034609888064, 0.6012786562129157, 0.22023272766428525, 0.020620813194720755, 0.05621537431872736, + 0.9065752732717621, 0.10241901312343715, 0.7468164462101752, 0.6025800857266902, 0.8563704540567573, + 0.06781353150900471, 0.07449933352495186, 0.6462834271091005, 0.6443763466531751, 0.23443689347408292, + 0.9007764794631198, 0.2654578696798139, 0.34714459047552515, 0.9442670098376124, 0.6551617300899828, + 0.18577821984901555, 0.6554056318808226, 0.9256976155576719, 0.5652951773970069, 0.9036782198563219, + 0.4044957431656302, 0.7720355215505535, 0.9615844951249943, 0.9583990983695638, 0.6734156011463083, + 0.07600462174220979, 0.6483143942496717, 0.7721284471233, 0.8844698574851142, 0.36446559980424187, + 0.2020712252813367, 0.5532440010466321, 0.774082923402127, 0.15753243710488063, 0.7494487845016365, + 0.9777126596615223, 0.5977366091149126, 0.6118678708384551, 0.5776021887764191, 0.7052275323758874, + 0.8643536179940521, 0.7351423461892231, 0.7472712957396752, 0.8320997849162026, 0.6893202120458873, + 0.6061132885338082, 0.7922244558608248, 0.0694183157723195, 0.46637306821083124, 0.24761782321363746, + 0.841220929773547, 0.19251486945606122, 0.38619121136754864, 0.7777480845728865, 0.5153172040040658, + 0.3221852259862178, 0.4422626646996969, 0.37297798349444533, 0.28636326416033275, 0.872322772220719, + 0.36684031320178934, 0.19151281097764805, 0.011093279580771997, 0.4565429665149845, 0.33857376446626597, + 0.3893706571125203, 0.09763853688754631, 0.7857370586828217, 0.8196400350274258, 0.6233161446810015, + 0.7290476585781567, 0.32302816180960103, 0.6465065602583131, 0.7039055178319722, 0.737269028941926, + 0.4423557810299573, 0.2936287529863649, 0.17383709070689168, 0.07431649542543162, 0.3395265133684905, + 0.6974196233605686, 0.029369533449977392, 0.8462674586537903, 0.1862823571798382, 0.7055362309875918, + 0.17654051410721183, 0.6964155103280214, 0.8712303990874309, 0.8568724052794551, 0.12588302504561832, + 0.7811576698822645, 0.4767670325567058, 0.6159667262200001, 0.05490717393005784, 0.6057928538737336, + 0.8546815077994012, 0.35062523335992557, 0.12129862795339719, 0.26548933441616984, 0.7385743748060322, + 0.40555441839717865, 0.9982561364349957, 0.3188263567371774, 0.9424891762411117, 0.6987901399365776, + 0.20540961831616766, 0.02167040277535437, 0.3185967031398763, 0.019782643683121437, 0.8438032683953371, + 0.31134590764527503, 0.4314797108424774, 0.8627081052949851, 0.8372865189589538, 0.06727616337496733, + 0.3842235730532829, 0.5620567599469514, 0.4872978433211791, 0.6475139880108287, 0.415262171705042, + 0.16946425200394755, 0.37100485677595374, 0.5675133342481045, 0.39388891080376875, 0.11961877709354252, + 0.6050560699851201, 0.6113694663807391, 0.7148151069557451, 0.3227410199494769, 0.47070000531233336, + 0.6184325422121354, 0.9366505496068971, 0.044456665466675815, 0.6052379671242412, 0.0566508245272247, + 0.293920203298791, 0.13061352402232362, 0.11203690960569768, 0.8258966037746421, 0.2946765132214273, + 0.8466489444530028, 0.7451823806430503, 0.15781221269704837, 0.4021853233314594, 0.664241200519387, + 0.7091761574997846, 0.7412572946122742, 0.12408378577361201, 0.6351997345547254, 0.49923734082324256, + 0.7532652811776691, 0.4831886413654197, 0.8560909415971528, 0.5359252954295073, 0.27940116425452954, + 0.697831712634371, 0.6651941742331731, 0.38894273544941194, 0.6511702859506332, 0.24281567468797605, + 0.28501013033328104, 0.4337819166171949, 0.5961407181707284, 0.5496662828770404, 0.41579648284745674, + 0.8751091806383153, 0.20914271806373042, 0.5223981065749688, 0.36853485871088787, 0.6950926163206822, + 0.8626788290432134, 0.280786741801478, 0.9968042088585791, 0.047574943064607855, 0.3553705848433375, + 0.8500631428597956, 0.7900997773399084, 0.08208295892311868, 0.5799468056312337, 0.8678034851684076, + 0.6620161902490288, 0.10118441445571336, 0.5346674104167647, 0.46546006518032723, 0.7854958793456643, + 0.41889328134628867, 0.4788457107828109, 0.5693837686243997, 0.03982329678460883, 0.5036591611514133, + 0.8634869905751454, 0.36418146420377306, 0.3560197611754259, 0.4237274215048007, 0.9272113781908002, + 0.4733639065953018, 0.11277189215022076, 0.46944385729018046, 0.5240510466702447, 0.3809929220315893, + 0.9044904830984387, 0.630375764858229, 0.40395182381843286, 0.39508838980681005, 0.762649660569511, + 0.7194310117846976, 0.992973488796045, 0.22591279583891666, 0.4331673497772569, 0.6014661361937058, + 0.7590490257651524, + }; + + FormatTransferNc1hwc0Nhwc transfer; + TransArgs args{ + reinterpret_cast(data_5d), FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 4, 4, 16}, {1, 4, 4, 16}, DT_FLOAT}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, UNSUPPORTED); +} + +TEST_F(UtestFormatTransfer5dNhwc, invalid_src_format) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_RESERVED, FORMAT_NHWC, {1, 1, 4, 4, 16}, {1, 4, 4, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransfer5dNhwc, invalid_src_shape1) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 4, 4, 16, 1}, {1, 4, 4, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransfer5dNhwc, InvalidSrcShape2) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NHWC, {1, -1, 4, 4, 16}, {1, 4, 4, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransfer5dNhwc, invalid_src_data_type) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NHWC, {1, -1, 4, 4, 16}, {1, 4, 4, 1}, DT_UNDEFINED}; + TransResult result; + + FormatTransferNc1hwc0Nhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransfer5dNhwc, invalid_dst_format) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NCHW, {1, 1, 4, 4, 16}, {1, 4, 4, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransfer5dNhwc, invalid_dst_shape1) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 4, 4, 16}, {1, 4, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransfer5dNhwc, invalid_dst_shape2) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 4, 4, 16}, {1, 4, 4, -1}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransfer5dNhwc, invalid_src_dst_shape_relation) { + uint16_t data[1 * 1 * 4 * 4 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1HWC0, FORMAT_NHWC, {1, 1, 4, 4, 16}, {1, 4, 4, 17}, DT_FLOAT16}; + TransResult result; + + FormatTransferNc1hwc0Nhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_c1hwncoc0_hwcn_unittest.cc b/tests/ut/ge/common/format_transfer_c1hwncoc0_hwcn_unittest.cc new file mode 100644 index 00000000..570310f3 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_c1hwncoc0_hwcn_unittest.cc @@ -0,0 +1,13713 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_c1hwncoc0_hwcn.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/fp16_t.h" + +namespace ge { +namespace formats { +class UtestFormatTransferC1hwncoc0Hwcn : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_invalid_data_type_uint8) { + uint8_t data[1 * 4 * 4 * 1 * 16 * 16] = {1}; + + TransArgs args{data, FORMAT_C1HWNCoC0, FORMAT_HWCN, {1, 4, 4, 1, 16, 16}, {4, 4, 3, 1}, DT_UINT8}; + TransResult result; + + FormatTransferC1hwncoc0Hwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_invalid_data_type_int32) { + int32_t data[1 * 4 * 1 * 1 * 16 * 16] = {1}; + + FormatTransferC1hwncoc0Hwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_C1HWNCoC0, FORMAT_HWCN, {1, 4, 4, 1, 16, 16}, {4, 4, 3, 1}, DT_INT32}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_invalid_src_format_nc1khkwhwc0) { + float data[1 * 4 * 1 * 1 * 16 * 16] = {1}; + + FormatTransferC1hwncoc0Hwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NC1KHKWHWC0, FORMAT_HWCN, {1, 4, 4, 1, 16, 16}, {4, 4, 3, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_invalid_dst_format_nchw) { + float data[1 * 4 * 1 * 1 * 16 * 16] = {1}; + + FormatTransferC1hwncoc0Hwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_C1HWNCoC0, FORMAT_NCHW, {1, 4, 4, 1, 16, 16}, {4, 4, 3, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_invalid_src_shape) { + float data[1 * 4 * 4 * 1 * 16 * 16] = {1}; + + FormatTransferC1hwncoc0Hwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_C1HWNCoC0, FORMAT_HWCN, {1, 4, 4, 1, 16}, {4, 4, 3, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_invalid_src_shape2) { + float data[1 * 4 * 4 * 1 * 16 * 16] = {1}; + + FormatTransferC1hwncoc0Hwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_C1HWNCoC0, FORMAT_HWCN, {1, 4, 4, 1, 16, -16}, {4, 4, 3, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_invali_dst_shape) { + float data[1 * 4 * 4 * 1 * 16 * 16] = {1}; + + FormatTransferC1hwncoc0Hwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_C1HWNCoC0, FORMAT_HWCN, {1, 4, 4, 1, 16, 16}, {4, 4, 3}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_invalid_dst_shape2) { + float data[1 * 4 * 4 * 1 * 16 * 16] = {1}; + + FormatTransferC1hwncoc0Hwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_C1HWNCoC0, FORMAT_HWCN, {1, 4, 4, 1, 16, 16}, {4, 4, 3, -1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_invalid_src_dst_shape_relation) { + float data[1 * 4 * 4 * 1 * 16 * 16] = {1}; + + FormatTransferC1hwncoc0Hwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_C1HWNCoC0, FORMAT_HWCN, {1, 4, 4, 1, 16, 16}, {4, 4, 17, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_fp16_success_lt_cube) { + uint16_t data_6d[1 * 1 * 1 * 1 * 16 * 16] = { + 15113, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[1 * 1 * 1 * 1] = {15113}; + + TransArgs args{reinterpret_cast(data_6d), + FORMAT_C1HWNCoC0, + FORMAT_HWCN, + {1, 1, 1, 1, 16, 16}, + {1, 1, 1, 1}, + DT_FLOAT16}; + TransResult result; + + FormatTransferC1hwncoc0Hwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, UNSUPPORTED); +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_gp16_success_eq_cube) { + uint16_t data_6d[1 * 4 * 4 * 1 * 16 * 16] = { + 12645, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14633, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14433, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15165, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15047, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13830, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14508, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15278, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13532, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13839, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14970, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14273, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14616, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14711, 11952, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14887, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14729, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13614, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13478, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14522, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14147, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 11144, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 15107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13564, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14352, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14662, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13086, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13693, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14067, 15036, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14841, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14326, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14089, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14852, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14623, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15283, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14344, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 13962, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15084, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14657, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14896, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14563, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 11217, 13603, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14815, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 11567, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14761, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15035, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14499, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14721, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14891, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14768, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14392, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14785, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15012, 15007, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12596, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 13899, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12321, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14661, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14890, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 13671, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12573, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 13413, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15222, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12603, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14647, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 13872, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 13590, 14681, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 12216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13767, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13402, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 12884, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14936, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14808, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14355, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14305, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14438, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14119, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14776, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13720, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14046, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14748, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13212, 14000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13509, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14440, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14659, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 11826, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 8774, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 15114, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14939, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13985, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14522, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14855, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14781, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13573, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 15106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13698, 14476, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15228, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15058, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12869, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12086, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14357, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14130, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 13966, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14526, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14678, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15108, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14579, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14976, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 13274, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14262, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15120, 14780, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14257, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15284, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14800, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15091, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15299, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 13724, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14127, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15056, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14654, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14881, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 13736, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14030, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14227, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14382, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15140, 14068, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14013, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 12639, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14995, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 10596, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 10501, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 15029, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 13543, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 12682, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14597, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14390, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 15301, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14582, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 15214, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 13571, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14725, 13897, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13078, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9868, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13569, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15358, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14417, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14445, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15126, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14470, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13145, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14483, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14557, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14139, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14513, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14773, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12032, 15055, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14267, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13681, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14748, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13844, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13620, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15016, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14635, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14348, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14655, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15240, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15359, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14784, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15093, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14534, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14619, 11677, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12206, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15270, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13777, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14574, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12283, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11265, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11326, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13932, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15055, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13331, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14461, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14841, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15065, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13824, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15185, 14438, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10315, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13020, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14978, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13350, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12751, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14639, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12404, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13012, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13042, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13779, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12811, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14088, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13573, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13606, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14758, 11869, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14837, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14567, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15208, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15274, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14937, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14079, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14031, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15192, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15309, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14776, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14386, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13435, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14708, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11464, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15279, 13535, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14943, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14386, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14293, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14413, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15271, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14957, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15186, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7837, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14912, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15279, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13994, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13416, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11451, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13456, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12447, + }; + uint16_t data[4 * 4 * 16 * 1] = { + 12645, 14633, 15320, 14433, 15165, 15047, 13830, 14508, 15278, 14154, 13532, 13839, 14970, 14273, 14616, 14711, + 11952, 14887, 14729, 13614, 13478, 14522, 14147, 11144, 15107, 13564, 14352, 14639, 14662, 13086, 13693, 14067, + 15036, 14841, 14326, 14089, 14852, 14623, 15283, 14344, 13962, 15084, 14657, 14154, 14200, 14896, 14563, 11217, + 13603, 14815, 15010, 11567, 14761, 15134, 15035, 14146, 14499, 14180, 14721, 14891, 14768, 14392, 14785, 15012, + 15007, 12596, 13899, 12321, 14661, 14890, 13671, 12573, 13413, 14209, 15222, 12603, 15129, 14647, 13872, 13590, + 14681, 12216, 13767, 13402, 12884, 14936, 14808, 14355, 14305, 14438, 14119, 14776, 13720, 14046, 14748, 13212, + 14000, 13509, 14440, 14659, 11826, 14333, 8774, 15114, 14939, 13985, 14522, 14855, 14781, 13573, 15106, 13698, + 14476, 15228, 15058, 12869, 12086, 14357, 14130, 13966, 14526, 14678, 15108, 14579, 14976, 13274, 14262, 15120, + 14780, 14257, 15284, 14800, 15091, 15299, 13724, 14127, 15056, 14654, 14881, 13736, 14030, 14227, 14382, 15140, + 14068, 14013, 12639, 14995, 10596, 10501, 15029, 13543, 12682, 14597, 14390, 15301, 14582, 15214, 13571, 14725, + 13897, 13078, 9868, 13569, 15358, 14417, 14445, 15126, 14470, 13145, 14483, 14557, 14139, 14513, 14773, 12032, + 15055, 14267, 13681, 14748, 13844, 13620, 15016, 14635, 14348, 14655, 15240, 15359, 14784, 15093, 14534, 14619, + 11677, 12206, 15270, 13777, 14574, 12283, 11265, 11326, 13932, 15055, 13331, 14461, 14841, 15065, 13824, 15185, + 14438, 10315, 13020, 14978, 13350, 12751, 14639, 12404, 13012, 13042, 13779, 12811, 14088, 13573, 13606, 14758, + 11869, 14837, 14567, 15208, 15274, 14937, 14079, 14031, 15192, 15309, 14776, 14386, 13435, 14708, 11464, 15279, + 13535, 14943, 14386, 14293, 14413, 15271, 14957, 15186, 7837, 14912, 15279, 13994, 13416, 11451, 13456, 12447, + }; + TransArgs args{reinterpret_cast(data_6d), + FORMAT_C1HWNCoC0, + FORMAT_HWCN, + {1, 4, 4, 1, 16, 16}, + {4, 4, 16, 1}, + DT_FLOAT16}; + TransResult result; + + FormatTransferC1hwncoc0Hwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_fp16_success_gt_cube) { + uint16_t data_6d[2 * 4 * 4 * 1 * 16 * 16] = { + 14565, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14517, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 11937, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14175, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14079, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 10294, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 12826, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14537, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14158, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15202, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15009, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13370, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10297, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13339, + 13739, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14959, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14412, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14675, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14290, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13366, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14483, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15075, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 15324, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14819, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13401, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13947, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15193, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15069, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14371, + 14604, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14434, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14664, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15124, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15237, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14269, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14180, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14446, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 13470, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14892, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15273, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14384, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14983, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14848, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13982, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14507, + 13019, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 13484, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 13850, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14821, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 15158, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14394, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15062, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15360, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 13755, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15137, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15133, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15246, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14083, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14781, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14839, + 11574, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14966, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15049, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14617, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13950, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 15347, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12890, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 11499, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14547, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15283, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12599, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11355, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14027, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11928, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14581, + 13533, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 15194, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15139, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 13626, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14253, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14135, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13564, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14206, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14806, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14390, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14395, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15223, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14989, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14575, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14499, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11659, + 15100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 7824, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 13215, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 13748, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15033, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 9676, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14966, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15319, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 9847, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14483, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14010, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14596, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13987, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9443, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13319, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14000, + 13287, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13881, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 12626, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14531, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 15077, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13117, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14297, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15319, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14457, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14373, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14892, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12128, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15103, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15036, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14850, + 14549, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13355, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14697, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14790, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 12319, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14378, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14239, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14715, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 12741, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15331, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13285, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15222, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14856, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13820, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14906, + 8603, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13362, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14541, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 8135, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12912, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14646, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14950, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14595, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 13315, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 11524, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13995, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15203, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15317, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13332, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14233, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13959, + 13423, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 13521, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 5104, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14853, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13550, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13039, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12624, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15286, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14205, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7388, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13467, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14655, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14535, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14807, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14402, + 11269, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 11427, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15094, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 5359, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13681, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15216, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15229, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 7516, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14872, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14047, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13049, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14044, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13898, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11374, + 14186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13970, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 13584, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14987, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14485, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 12866, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14090, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15182, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14753, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15306, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14519, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14418, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10923, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14409, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14982, + 14684, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14658, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14390, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14884, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14511, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 11804, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14798, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 13697, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14483, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 12489, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15208, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14126, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14920, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14559, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10435, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9400, + 13650, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14651, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 12556, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 13767, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14448, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 15211, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13578, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15011, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 11703, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14306, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14676, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15254, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13989, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14771, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13538, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15253, + 11941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14819, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14166, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14892, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14592, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14545, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 9841, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12710, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15109, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 13382, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13875, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12953, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13008, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14341, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13954, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13552, + 12829, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13569, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14414, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13760, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14659, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13960, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14606, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12947, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13410, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12637, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 15203, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12776, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 15053, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14568, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 8541, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14414, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 12651, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15345, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 12891, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12785, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14614, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14806, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13831, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15311, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14869, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14147, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 10617, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14483, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14958, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 15002, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[4 * 4 * 18 * 1] = { + 14565, 14517, 11937, 14184, 12256, 14175, 14079, 10294, 12826, 14537, 14158, 15202, 15009, 13370, 10297, 13339, + 12829, 13569, 13739, 14959, 15082, 14412, 14675, 14290, 13366, 14483, 15075, 15324, 14819, 13401, 13947, 15193, + 15069, 14371, 14625, 14414, 14604, 14434, 14664, 15124, 15237, 14269, 14180, 14446, 13470, 14892, 15273, 14384, + 14983, 14848, 13982, 14507, 13760, 14659, 13019, 13304, 13484, 13850, 14821, 15158, 14394, 15062, 15360, 13755, + 15137, 15133, 15246, 14083, 14781, 14839, 13960, 14606, 11574, 14966, 15049, 14617, 15210, 13950, 15347, 12890, + 11499, 14547, 15283, 12599, 11355, 14027, 11928, 14581, 12947, 13410, 13533, 15194, 15139, 13626, 14253, 14135, + 13564, 14206, 14806, 14390, 14395, 15223, 14989, 14575, 14499, 11659, 12637, 15203, 15100, 7824, 13215, 13748, + 15033, 9676, 14966, 15319, 9847, 14483, 14010, 14596, 13987, 9443, 13319, 14000, 12776, 15053, 13287, 13881, + 12626, 14531, 15207, 15077, 13117, 14297, 15319, 14457, 14373, 14892, 12128, 15103, 15036, 14850, 14568, 8541, + 14549, 13355, 14697, 14790, 15146, 12319, 14378, 14239, 14715, 12741, 15331, 13285, 15222, 14856, 13820, 14906, + 14414, 12651, 8603, 13362, 14541, 8135, 12912, 14646, 14950, 14595, 13315, 11524, 13995, 15203, 15317, 13332, + 14233, 13959, 15345, 12891, 13423, 14152, 13521, 5104, 14853, 13550, 13039, 12624, 15286, 14205, 7388, 13467, + 14655, 14535, 14807, 14402, 12785, 14614, 11269, 11427, 15094, 15351, 14353, 5359, 13681, 15216, 15229, 7516, + 14872, 14047, 13049, 14044, 13898, 11374, 14806, 13831, 14186, 13970, 15100, 13584, 14987, 14485, 12866, 14090, + 15182, 14753, 15306, 14519, 14418, 10923, 14409, 14982, 15311, 14869, 14684, 14658, 14390, 14884, 14511, 11804, + 14798, 13697, 14483, 12489, 15208, 14126, 14920, 14559, 10435, 9400, 14147, 10617, 13650, 14651, 12556, 13767, + 14448, 15211, 13578, 15011, 11703, 14306, 14676, 15254, 13989, 14771, 13538, 15253, 15192, 14483, 11941, 14819, + 14166, 14892, 14592, 14545, 9841, 12710, 15109, 13382, 13875, 12953, 13008, 14341, 13954, 13552, 14958, 15002, + }; + + TransArgs args{reinterpret_cast(data_6d), + FORMAT_C1HWNCoC0, + FORMAT_HWCN, + {2, 4, 4, 1, 16, 16}, + {4, 4, 18, 1}, + DT_FLOAT16}; + TransResult result; + + FormatTransferC1hwncoc0Hwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_fp32_success_lt_cube) { + float data_6d[1 * 1 * 1 * 1 * 16 * 16] = { + 0.029033836332871932, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + float data[1 * 1 * 1 * 1] = { + 0.029033836332871932, + }; + + TransArgs args{reinterpret_cast(data_6d), + FORMAT_C1HWNCoC0, + FORMAT_HWCN, + {1, 1, 1, 1, 16, 16}, + {1, 1, 1, 1}, + DT_FLOAT}; + TransResult result; + + FormatTransferC1hwncoc0Hwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_fp32_success_eq_cube) { + float data_6d[1 * 4 * 4 * 1 * 16 * 16] = { + 0.9287460024109794, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2406040495018288, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4469466172807033, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18882162922891632, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.76109939494347, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8840655482887357, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9872180535026731, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11661622466386445, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.01235434128678281, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6695347367567285, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7330905362956434, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9500990356218342, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6457271811424727, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6912643273019475, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6703753186632049, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5526387021634535, + 0.08705872980636764, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4385500267268264, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5216635886669073, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4630255672949294, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8447529479061752, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23010207882669031, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.29600005325577006, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.02000550424397074, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7628668574925462, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5151224887847649, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8359422516194682, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0915549204131687, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.28249447760074176, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5593289876304682, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7529148849244871, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35147879233628254, + 0.8522667732376881, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6188752944349367, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4125801589727227, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5575123955704276, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9606574527884366, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9277948431850809, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.939489083802638, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24504726014528755, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.02999449647781749, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30412310024943934, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1569667109270132, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20531688682553262, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.511866170077109, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30737250907136127, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5262560471173453, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.051581442743936834, + 0.28808257291456085, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9790858030799796, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8557661260282156, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.36218305985767596, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23228812370501528, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6202104009666045, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8867642180949732, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22763923689310117, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.683892145506164, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15600116320152568, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8441515372048103, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7606861500676428, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7065017767052714, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16378490819181213, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2436941641993159, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24179674334818047, + 0.33003082683163454, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5592171765943758, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24805778500361175, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.42431250531190945, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9250793225267471, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6342480713430114, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5202507413267883, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8366451214336564, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41107414664728825, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8830127077533332, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4229721229885849, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3907250704851828, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5473355612929265, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2579362446014144, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.29814407939629906, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7974491298876576, + 0.11314089555697293, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.09686198188162443, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.766867100263332, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.26442474662174176, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2933189470390257, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22549268624349572, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4961002633634892, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1580013775297051, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.21598290255717667, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4655927560826495, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8277472438455392, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41612131449467127, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3335383280743127, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10846113574338023, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6339633064975212, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9151650782934098, + 0.21942078738580506, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.549211704289178, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.900038868763936, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.42750089237786226, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24495290801771097, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7807783442418869, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11582898086933058, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.273017019466652, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6167952968186619, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.805968619144528, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5592353328539852, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17408130039506065, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9140423648414512, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.397637193906782, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2429466103320661, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7576964175710638, + 0.27755909265772416, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35174929928544163, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.025116336045789667, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8200439777524748, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15720187100200855, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8405458279462585, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9743776693254922, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5093505030290263, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5248426457649971, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6903619402857111, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9615336231506699, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2657925151967536, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9335887667671454, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4655908804279567, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.589653875263392, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.34991672103804417, + 0.8853866814617952, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2622863324354565, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5873699968555803, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35658721497888446, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07549717596969518, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30570308676663926, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.016710650584320863, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.060511009580757724, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.27730366170461584, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6467835576800469, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.78946400211903, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12673883796963992, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6211809769142388, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4535415813018152, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.01900632641535227, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07429317586616391, + 0.08873439700873431, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5183778209898324, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07045645847676274, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20871881410884752, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12099162032633193, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06756553561714795, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18622090431815963, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2550314449072233, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9548117463313748, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4963000444548905, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9069349499388143, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9577869735201033, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4562239464601897, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3153033546637033, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.38483711369107565, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30047291174136637, + 0.9659042903615469, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08121765524841995, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4948994117457166, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6527347570733716, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4342822182228542, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12011671561156101, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16733239805529432, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9971875623339772, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.652088843546415, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.892783518485851, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1797647996524161, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16357215367441935, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07445336658438717, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7175786171125849, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5373097051865485, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7588555623293853, + 0.24491199195515445, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6006434355933403, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2707709862387566, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5089645596354377, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10921056750583202, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4455355571312122, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9457933356842753, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12147470400771088, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03149362879979012, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.48572911640356164, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3250179215480892, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35311147401543064, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2092635837818405, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49042076332409634, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40242274607334083, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.004886615073804279, + 0.20426340630702144, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1838638359609932, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7753027700256112, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35858437729817183, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6549264421919153, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9518718749485302, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7867904941578512, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18952975452560894, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5722812243941315, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22334762573351363, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2530877482635505, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.630808154618444, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49578220340765145, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.015165276035877806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06819096090677312, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3485257589215519, + 0.19486401838003786, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7005237519796033, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9170064488757839, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3028155964556515, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9707152112263164, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8647697904557152, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4458911971404024, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08084238914494124, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.31600901382771396, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4219203574297017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3165211497269509, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5452055491181059, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9380819907542078, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07699622976151643, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6722737835384232, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5828829255010801, + 0.4074030107848773, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12009262426356326, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1299589517611489, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7879216523704174, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6406279562372446, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3387666455887235, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.25359168817784106, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3664733566676899, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2961304938638407, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.884096601158495, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.594299500006145, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.841562685279157, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.844669921819271, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9523065435900523, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9996692237492393, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8429188149689719, + 0.5180064341547981, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35347561778319236, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5513515344510534, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9818366109994351, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7653855239166565, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.39074108458136203, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6052080786642133, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.01814107282507771, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6614808714120729, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5817911146666129, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6468528789424193, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5107046740684658, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7835858669708228, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5639340388879576, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18955202007187844, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6703755117543645, + + }; + float data[4 * 4 * 16 * 1] = { + 0.9287460024109794, 0.2406040495018288, 0.4469466172807033, 0.18882162922891632, 0.76109939494347, + 0.8840655482887357, 0.9872180535026731, 0.11661622466386445, 0.01235434128678281, 0.6695347367567285, + 0.7330905362956434, 0.9500990356218342, 0.6457271811424727, 0.6912643273019475, 0.6703753186632049, + 0.5526387021634535, 0.08705872980636764, 0.4385500267268264, 0.5216635886669073, 0.4630255672949294, + 0.8447529479061752, 0.23010207882669031, 0.29600005325577006, 0.02000550424397074, 0.7628668574925462, + 0.5151224887847649, 0.8359422516194682, 0.0915549204131687, 0.28249447760074176, 0.5593289876304682, + 0.7529148849244871, 0.35147879233628254, 0.8522667732376881, 0.6188752944349367, 0.4125801589727227, + 0.5575123955704276, 0.9606574527884366, 0.9277948431850809, 0.939489083802638, 0.24504726014528755, + 0.02999449647781749, 0.30412310024943934, 0.1569667109270132, 0.20531688682553262, 0.511866170077109, + 0.30737250907136127, 0.5262560471173453, 0.051581442743936834, 0.28808257291456085, 0.9790858030799796, + 0.8557661260282156, 0.36218305985767596, 0.23228812370501528, 0.6202104009666045, 0.8867642180949732, + 0.22763923689310117, 0.683892145506164, 0.15600116320152568, 0.8441515372048103, 0.7606861500676428, + 0.7065017767052714, 0.16378490819181213, 0.2436941641993159, 0.24179674334818047, 0.33003082683163454, + 0.5592171765943758, 0.24805778500361175, 0.42431250531190945, 0.9250793225267471, 0.6342480713430114, + 0.5202507413267883, 0.8366451214336564, 0.41107414664728825, 0.8830127077533332, 0.4229721229885849, + 0.3907250704851828, 0.5473355612929265, 0.2579362446014144, 0.29814407939629906, 0.7974491298876576, + 0.11314089555697293, 0.09686198188162443, 0.766867100263332, 0.26442474662174176, 0.2933189470390257, + 0.22549268624349572, 0.4961002633634892, 0.1580013775297051, 0.21598290255717667, 0.4655927560826495, + 0.8277472438455392, 0.41612131449467127, 0.3335383280743127, 0.10846113574338023, 0.6339633064975212, + 0.9151650782934098, 0.21942078738580506, 0.549211704289178, 0.900038868763936, 0.42750089237786226, + 0.24495290801771097, 0.7807783442418869, 0.11582898086933058, 0.273017019466652, 0.6167952968186619, + 0.805968619144528, 0.5592353328539852, 0.17408130039506065, 0.9140423648414512, 0.397637193906782, + 0.2429466103320661, 0.7576964175710638, 0.27755909265772416, 0.35174929928544163, 0.025116336045789667, + 0.8200439777524748, 0.15720187100200855, 0.8405458279462585, 0.9743776693254922, 0.5093505030290263, + 0.5248426457649971, 0.6903619402857111, 0.9615336231506699, 0.2657925151967536, 0.9335887667671454, + 0.4655908804279567, 0.589653875263392, 0.34991672103804417, 0.8853866814617952, 0.2622863324354565, + 0.5873699968555803, 0.35658721497888446, 0.07549717596969518, 0.30570308676663926, 0.016710650584320863, + 0.060511009580757724, 0.27730366170461584, 0.6467835576800469, 0.78946400211903, 0.12673883796963992, + 0.6211809769142388, 0.4535415813018152, 0.01900632641535227, 0.07429317586616391, 0.08873439700873431, + 0.5183778209898324, 0.07045645847676274, 0.20871881410884752, 0.12099162032633193, 0.06756553561714795, + 0.18622090431815963, 0.2550314449072233, 0.9548117463313748, 0.4963000444548905, 0.9069349499388143, + 0.9577869735201033, 0.4562239464601897, 0.3153033546637033, 0.38483711369107565, 0.30047291174136637, + 0.9659042903615469, 0.08121765524841995, 0.4948994117457166, 0.6527347570733716, 0.4342822182228542, + 0.12011671561156101, 0.16733239805529432, 0.9971875623339772, 0.652088843546415, 0.892783518485851, + 0.1797647996524161, 0.16357215367441935, 0.07445336658438717, 0.7175786171125849, 0.5373097051865485, + 0.7588555623293853, 0.24491199195515445, 0.6006434355933403, 0.2707709862387566, 0.5089645596354377, + 0.10921056750583202, 0.4455355571312122, 0.9457933356842753, 0.12147470400771088, 0.03149362879979012, + 0.48572911640356164, 0.3250179215480892, 0.35311147401543064, 0.2092635837818405, 0.49042076332409634, + 0.40242274607334083, 0.004886615073804279, 0.20426340630702144, 0.1838638359609932, 0.7753027700256112, + 0.35858437729817183, 0.6549264421919153, 0.9518718749485302, 0.7867904941578512, 0.18952975452560894, + 0.5722812243941315, 0.22334762573351363, 0.2530877482635505, 0.630808154618444, 0.49578220340765145, + 0.015165276035877806, 0.06819096090677312, 0.3485257589215519, 0.19486401838003786, 0.7005237519796033, + 0.9170064488757839, 0.3028155964556515, 0.9707152112263164, 0.8647697904557152, 0.4458911971404024, + 0.08084238914494124, 0.31600901382771396, 0.4219203574297017, 0.3165211497269509, 0.5452055491181059, + 0.9380819907542078, 0.07699622976151643, 0.6722737835384232, 0.5828829255010801, 0.4074030107848773, + 0.12009262426356326, 0.1299589517611489, 0.7879216523704174, 0.6406279562372446, 0.3387666455887235, + 0.25359168817784106, 0.3664733566676899, 0.2961304938638407, 0.884096601158495, 0.594299500006145, + 0.841562685279157, 0.844669921819271, 0.9523065435900523, 0.9996692237492393, 0.8429188149689719, + 0.5180064341547981, 0.35347561778319236, 0.5513515344510534, 0.9818366109994351, 0.7653855239166565, + 0.39074108458136203, 0.6052080786642133, 0.01814107282507771, 0.6614808714120729, 0.5817911146666129, + 0.6468528789424193, 0.5107046740684658, 0.7835858669708228, 0.5639340388879576, 0.18955202007187844, + 0.6703755117543645, + }; + + TransArgs args{reinterpret_cast(data_6d), + FORMAT_C1HWNCoC0, + FORMAT_HWCN, + {1, 4, 4, 1, 16, 16}, + {4, 4, 16, 1}, + DT_FLOAT}; + TransResult result; + + FormatTransferC1hwncoc0Hwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferC1hwncoc0Hwcn, sixd_to_hwcn_fp32_success_gt_cube) { + float data_6d[2 * 4 * 4 * 1 * 16 * 16] = { + 0.6720256978880832, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6298321784394392, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8295463903639503, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08068822718676427, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2763754699096522, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.46152014633273986, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20230989654712017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8265439766280472, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6978840683277164, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3947669140970781, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6967164722950044, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8077474483635809, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19827523419269066, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3826597464491931, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5075781383319881, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16553192090689484, + 0.38764251162629126, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.030725684367002737, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5854377878139135, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9635834400583236, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49041179401730606, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.621085945352873, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.44172994709534286, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5833817236813902, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8600448001807334, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.442614470737162, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17070317633972198, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9988020807965683, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2240540555141428, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6044781869127227, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30577703539138534, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.02297159726606912, + 0.902324022353879, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5538644692423377, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18435254776067966, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3895724610639357, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19087655730583952, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.056732904450757515, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.25552017902227, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06563475489528825, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.756805330230557, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10559137855731648, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8400314490094558, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8792165029212121, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8836700846101576, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6520614003045484, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14016100663096442, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.581151084050165, + 0.3855507003349319, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7520089723082347, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6856546484511095, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9754907615630698, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14302334875233447, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7040998143942422, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6427806893397862, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11796374565827439, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5399207555415639, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.638659628241251, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9749369607001012, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3505202643968658, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08579439029326386, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15728399281291716, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08269816307243716, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6624434213972792, + 0.8329561803018197, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.032120763663658014, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9566737019034673, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24486631938044456, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10932492657226744, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9610284338669389, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41097347240583404, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0735833696925311, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3780115946364353, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7357275308740584, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9117467893165317, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23215135900531814, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2552443333765917, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4365079028469411, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8294987620958296, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.34605401854979034, + 0.7609918535852507, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6057237297402626, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.29981454756176007, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17165620361712652, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18458981725403534, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.004254743220997059, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06033948937486866, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7790401829914644, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11568714682253833, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.43683466739272303, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8819350949367237, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8005961689683059, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08186128360840594, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7976491103318911, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2240607437129538, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4000304869187492, + 0.012780956383760311, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6224623388838404, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11070174075269568, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5259081698751006, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3517889145201747, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3628829064705983, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8437125312778323, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6056651611116248, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7177350438363246, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6999540632573293, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3403428629909401, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07348171626497002, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.39195415450273896, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.449983124098813, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1982705926999152, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3580904389416687, + 0.3676453029272587, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5707966169974507, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.506818390006326, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1751770315457628, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.718498080506697, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8079214106434996, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8595179550008838, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40706953073355634, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2043863176329428, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.43753654062005165, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12359348546795079, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8688872460691682, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.26121855072313027, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.56509648986008, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.550065776630837, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8998629077235578, + 0.5642214251740352, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.05231827080996221, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8038883833385772, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30952956046069646, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3598608732338813, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6145804575957855, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3320551452477549, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9638909795216967, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08869226956064591, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7530597192935157, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.31240869693130824, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40612578513052544, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0640935173214825, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9762953742908935, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9345851271589867, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.013659298980408296, + 0.5776657549782201, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3210961667727359, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8655969484625035, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4905974887025172, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.020861530507902182, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41836177257417506, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22840946125158446, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.999760661824541, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5176796791644477, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7590124368612039, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.29805000994564823, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.004710630015977935, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6177707492426662, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7451246003472498, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1523039655234233, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9075278261410844, + 0.289575591103169, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3070639729185224, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9742328874167968, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6318899822466207, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9217385935243647, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19404567944345485, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3139318217228174, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8371916762608241, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1506427751612257, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2829262340657469, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49640016645570884, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8637019386815881, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8771476858744097, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4744543148622167, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18561372295225653, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7729955705289158, + 0.12537024160436816, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9736015268504483, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7426156935434283, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07981904307037135, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2516303608793887, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4821737490291119, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3919587137996561, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6940336352360019, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20204425364829914, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.061410960956086225, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9501863128037674, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.47696633050116755, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17107444463847665, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08457274936327863, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06611426945217114, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.42385232680469254, + 0.4515792934324635, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3980126878557152, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6118931459268907, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9536200260871817, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6941502008716358, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14593827491155287, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.653804411206232, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17122496175413993, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8935085488292329, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4470478148103193, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3008635842212459, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6544943596338767, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.00840302099207646, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.32006536950974807, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6513831279668915, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5465500886719159, + 0.17860722906905457, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6553483167887741, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5329073894590799, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6082473386866861, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14094030378222067, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4377704114236626, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8187469149625034, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7573680466444704, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5892725685846012, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8539266550501383, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.09341956386406303, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6593752073372753, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24313527975813853, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5784193841594126, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17778501843915462, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5629482573422732, + 0.7087739397180405, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8282323774102832, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8730568803071196, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41526292072882587, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7439760697782625, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7918970028413459, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4563153895380918, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5717854219119896, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.01748112771668553, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22682216394271615, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9958918322474324, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.48183531723667616, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8935003354855042, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40177759725197093, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6872178583919453, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7159034109642741, + 0.29836411859959233, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6068625352293453, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5925920016853471, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8072024784646464, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9109206349529911, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.722094298160962, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10120402884670032, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07658301410630952, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11917426063790237, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9973204706041966, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7135005026702921, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1702573277105751, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14969293539722228, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7224044611911594, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49954902657480205, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8418937883473929, + 0.7574068656303569, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8345474929377174, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6092749558646937, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0981101859240252, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23183561828290822, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4928795451972683, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8445559494723369, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5667390062688675, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08684759407061038, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9827470249624215, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6501001434308644, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4390783631610483, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5535182266162149, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14208763968019233, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8956981051296642, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.32727466525204096, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5560394910334464, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4261919078800247, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8948665916989693, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4927558911152501, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.25548747469984023, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8044527167142301, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.008000018527419495, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15764366047828826, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2280696013454805, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5298168210264326, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9221856932277527, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5202041093485866, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8916464595265667, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15828089333695605, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22935018929177786, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14044259339375098, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + float data[4 * 4 * 18 * 1] = { + 0.6720256978880832, 0.6298321784394392, 0.8295463903639503, 0.08068822718676427, 0.2763754699096522, + 0.46152014633273986, 0.20230989654712017, 0.8265439766280472, 0.6978840683277164, 0.3947669140970781, + 0.6967164722950044, 0.8077474483635809, 0.19827523419269066, 0.3826597464491931, 0.5075781383319881, + 0.16553192090689484, 0.7574068656303569, 0.8345474929377174, 0.38764251162629126, 0.030725684367002737, + 0.5854377878139135, 0.9635834400583236, 0.49041179401730606, 0.621085945352873, 0.44172994709534286, + 0.5833817236813902, 0.8600448001807334, 0.442614470737162, 0.17070317633972198, 0.9988020807965683, + 0.2240540555141428, 0.6044781869127227, 0.30577703539138534, 0.02297159726606912, 0.6092749558646937, + 0.0981101859240252, 0.902324022353879, 0.5538644692423377, 0.18435254776067966, 0.3895724610639357, + 0.19087655730583952, 0.056732904450757515, 0.25552017902227, 0.06563475489528825, 0.756805330230557, + 0.10559137855731648, 0.8400314490094558, 0.8792165029212121, 0.8836700846101576, 0.6520614003045484, + 0.14016100663096442, 0.581151084050165, 0.23183561828290822, 0.4928795451972683, 0.3855507003349319, + 0.7520089723082347, 0.6856546484511095, 0.9754907615630698, 0.14302334875233447, 0.7040998143942422, + 0.6427806893397862, 0.11796374565827439, 0.5399207555415639, 0.638659628241251, 0.9749369607001012, + 0.3505202643968658, 0.08579439029326386, 0.15728399281291716, 0.08269816307243716, 0.6624434213972792, + 0.8445559494723369, 0.5667390062688675, 0.8329561803018197, 0.032120763663658014, 0.9566737019034673, + 0.24486631938044456, 0.10932492657226744, 0.9610284338669389, 0.41097347240583404, 0.0735833696925311, + 0.3780115946364353, 0.7357275308740584, 0.9117467893165317, 0.23215135900531814, 0.2552443333765917, + 0.4365079028469411, 0.8294987620958296, 0.34605401854979034, 0.08684759407061038, 0.9827470249624215, + 0.7609918535852507, 0.6057237297402626, 0.29981454756176007, 0.17165620361712652, 0.18458981725403534, + 0.004254743220997059, 0.06033948937486866, 0.7790401829914644, 0.11568714682253833, 0.43683466739272303, + 0.8819350949367237, 0.8005961689683059, 0.08186128360840594, 0.7976491103318911, 0.2240607437129538, + 0.4000304869187492, 0.6501001434308644, 0.4390783631610483, 0.012780956383760311, 0.6224623388838404, + 0.11070174075269568, 0.5259081698751006, 0.3517889145201747, 0.3628829064705983, 0.8437125312778323, + 0.6056651611116248, 0.7177350438363246, 0.6999540632573293, 0.3403428629909401, 0.07348171626497002, + 0.39195415450273896, 0.449983124098813, 0.1982705926999152, 0.3580904389416687, 0.5535182266162149, + 0.14208763968019233, 0.3676453029272587, 0.5707966169974507, 0.506818390006326, 0.1751770315457628, + 0.718498080506697, 0.8079214106434996, 0.8595179550008838, 0.40706953073355634, 0.2043863176329428, + 0.43753654062005165, 0.12359348546795079, 0.8688872460691682, 0.26121855072313027, 0.56509648986008, + 0.550065776630837, 0.8998629077235578, 0.8956981051296642, 0.32727466525204096, 0.5642214251740352, + 0.05231827080996221, 0.8038883833385772, 0.30952956046069646, 0.3598608732338813, 0.6145804575957855, + 0.3320551452477549, 0.9638909795216967, 0.08869226956064591, 0.7530597192935157, 0.31240869693130824, + 0.40612578513052544, 0.0640935173214825, 0.9762953742908935, 0.9345851271589867, 0.013659298980408296, + 0.5560394910334464, 0.4261919078800247, 0.5776657549782201, 0.3210961667727359, 0.8655969484625035, + 0.4905974887025172, 0.020861530507902182, 0.41836177257417506, 0.22840946125158446, 0.999760661824541, + 0.5176796791644477, 0.7590124368612039, 0.29805000994564823, 0.004710630015977935, 0.6177707492426662, + 0.7451246003472498, 0.1523039655234233, 0.9075278261410844, 0.8948665916989693, 0.4927558911152501, + 0.289575591103169, 0.3070639729185224, 0.9742328874167968, 0.6318899822466207, 0.9217385935243647, + 0.19404567944345485, 0.3139318217228174, 0.8371916762608241, 0.1506427751612257, 0.2829262340657469, + 0.49640016645570884, 0.8637019386815881, 0.8771476858744097, 0.4744543148622167, 0.18561372295225653, + 0.7729955705289158, 0.25548747469984023, 0.8044527167142301, 0.12537024160436816, 0.9736015268504483, + 0.7426156935434283, 0.07981904307037135, 0.2516303608793887, 0.4821737490291119, 0.3919587137996561, + 0.6940336352360019, 0.20204425364829914, 0.061410960956086225, 0.9501863128037674, 0.47696633050116755, + 0.17107444463847665, 0.08457274936327863, 0.06611426945217114, 0.42385232680469254, 0.008000018527419495, + 0.15764366047828826, 0.4515792934324635, 0.3980126878557152, 0.6118931459268907, 0.9536200260871817, + 0.6941502008716358, 0.14593827491155287, 0.653804411206232, 0.17122496175413993, 0.8935085488292329, + 0.4470478148103193, 0.3008635842212459, 0.6544943596338767, 0.00840302099207646, 0.32006536950974807, + 0.6513831279668915, 0.5465500886719159, 0.2280696013454805, 0.5298168210264326, 0.17860722906905457, + 0.6553483167887741, 0.5329073894590799, 0.6082473386866861, 0.14094030378222067, 0.4377704114236626, + 0.8187469149625034, 0.7573680466444704, 0.5892725685846012, 0.8539266550501383, 0.09341956386406303, + 0.6593752073372753, 0.24313527975813853, 0.5784193841594126, 0.17778501843915462, 0.5629482573422732, + 0.9221856932277527, 0.5202041093485866, 0.7087739397180405, 0.8282323774102832, 0.8730568803071196, + 0.41526292072882587, 0.7439760697782625, 0.7918970028413459, 0.4563153895380918, 0.5717854219119896, + 0.01748112771668553, 0.22682216394271615, 0.9958918322474324, 0.48183531723667616, 0.8935003354855042, + 0.40177759725197093, 0.6872178583919453, 0.7159034109642741, 0.8916464595265667, 0.15828089333695605, + 0.29836411859959233, 0.6068625352293453, 0.5925920016853471, 0.8072024784646464, 0.9109206349529911, + 0.722094298160962, 0.10120402884670032, 0.07658301410630952, 0.11917426063790237, 0.9973204706041966, + 0.7135005026702921, 0.1702573277105751, 0.14969293539722228, 0.7224044611911594, 0.49954902657480205, + 0.8418937883473929, 0.22935018929177786, 0.14044259339375098, + }; + + TransArgs args{reinterpret_cast(data_6d), + FORMAT_C1HWNCoC0, + FORMAT_HWCN, + {2, 4, 4, 1, 16, 16}, + {4, 4, 18, 1}, + DT_FLOAT}; + TransResult result; + + FormatTransferC1hwncoc0Hwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_fractal_nz_unittest.cc b/tests/ut/ge/common/format_transfer_fractal_nz_unittest.cc new file mode 100644 index 00000000..bf2a35be --- /dev/null +++ b/tests/ut/ge/common/format_transfer_fractal_nz_unittest.cc @@ -0,0 +1,9167 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "common/formats/format_transfers/format_transfer_fractal_nz.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/formats/formats.h" +#include "common/fp16_t.h" +#include "time.h" + +namespace ge { +namespace formats { +class UtestFormatTransferNdFractNz : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferNdFractNz, nd_shape1_uint8_1) { + uint8_t data[1] = { + 176, + }; + uint8_t ret[1 * 1 * 32 * 32] = { + 176, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {1}, {1, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {1, 1, 32, 32}, {1}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape1_uint8_2) { + uint8_t data[32] = { + 194, 182, 243, 9, 141, 3, 25, 168, 123, 253, 25, 2, 76, 207, 206, 214, + 212, 36, 10, 104, 185, 61, 195, 52, 187, 87, 54, 43, 87, 13, 67, 85, + }; + uint8_t ret[1 * 1 * 32 * 32] = { + 194, 182, 243, 9, 141, 3, 25, 168, 123, 253, 25, 2, 76, 207, 206, 214, 212, 36, 10, 104, 185, 61, 195, 52, 187, + 87, 54, 43, 87, 13, 67, 85, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {32}, {1, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {1, 1, 32, 32}, {32}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape1_uint8_3) { + uint8_t data[33] = { + 173, 126, 65, 202, 177, 161, 81, 98, 165, 98, 206, 162, 209, 58, 160, 171, 124, + 99, 45, 160, 68, 125, 39, 2, 43, 36, 211, 200, 250, 63, 195, 121, 95, + }; + uint8_t ret[2 * 1 * 32 * 32] = { + 173, 126, 65, 202, 177, 161, 81, 98, 165, 98, 206, 162, 209, 58, 160, 171, 124, 99, 45, 160, 68, 125, 39, 2, 43, + 36, 211, 200, 250, 63, 195, 121, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 95, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {33}, {2, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 1, 32, 32}, {33}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape2_uint8_1) { + uint8_t data[32 * 32] = { + 47, 78, 47, 180, 246, 76, 157, 127, 63, 0, 168, 23, 148, 198, 180, 190, 43, 187, 76, 67, 77, 246, 11, + 149, 240, 236, 136, 123, 51, 95, 7, 163, 163, 64, 157, 230, 247, 122, 67, 106, 150, 20, 231, 118, 43, 208, + 190, 201, 149, 180, 47, 223, 69, 51, 222, 2, 254, 161, 69, 17, 122, 79, 98, 85, 77, 97, 127, 144, 216, + 217, 230, 175, 115, 121, 86, 110, 81, 37, 143, 189, 23, 63, 214, 235, 188, 193, 125, 236, 57, 76, 192, 144, + 20, 2, 247, 153, 101, 114, 201, 218, 221, 62, 203, 222, 1, 229, 100, 223, 65, 62, 78, 210, 115, 149, 115, + 171, 55, 186, 67, 16, 154, 252, 104, 6, 154, 142, 129, 223, 84, 221, 82, 202, 113, 167, 143, 136, 203, 237, + 195, 40, 47, 228, 125, 49, 156, 201, 187, 96, 2, 120, 227, 239, 134, 105, 153, 232, 228, 42, 65, 11, 203, + 79, 90, 240, 54, 131, 16, 30, 171, 98, 228, 61, 160, 117, 80, 38, 223, 25, 15, 227, 245, 11, 241, 195, + 178, 250, 187, 220, 80, 77, 101, 21, 79, 194, 33, 95, 91, 165, 93, 74, 120, 124, 214, 92, 101, 181, 89, + 79, 159, 229, 184, 133, 188, 200, 200, 97, 223, 92, 42, 56, 51, 21, 103, 65, 8, 10, 25, 232, 29, 9, + 209, 126, 37, 175, 179, 92, 239, 25, 253, 53, 154, 202, 233, 11, 83, 146, 160, 3, 209, 227, 68, 162, 190, + 175, 206, 153, 230, 120, 249, 65, 105, 117, 145, 169, 247, 155, 163, 78, 184, 172, 41, 67, 49, 177, 70, 22, + 44, 205, 35, 95, 188, 140, 27, 123, 187, 116, 80, 189, 11, 161, 20, 181, 10, 159, 29, 36, 48, 165, 108, + 201, 153, 42, 64, 57, 29, 195, 190, 65, 112, 13, 150, 18, 49, 50, 168, 70, 206, 155, 58, 91, 215, 209, + 250, 76, 81, 114, 131, 159, 208, 97, 164, 238, 186, 21, 106, 179, 70, 177, 171, 204, 31, 39, 120, 161, 143, + 43, 15, 28, 218, 8, 95, 43, 134, 116, 33, 160, 123, 184, 235, 8, 245, 131, 111, 239, 159, 42, 71, 224, + 120, 169, 254, 219, 131, 234, 207, 85, 23, 117, 39, 212, 156, 210, 11, 51, 148, 198, 35, 163, 155, 102, 183, + 11, 59, 176, 17, 143, 64, 101, 7, 146, 164, 140, 18, 91, 213, 216, 103, 84, 252, 239, 31, 169, 239, 128, + 99, 7, 94, 175, 129, 185, 97, 175, 200, 231, 100, 44, 242, 246, 73, 211, 89, 143, 141, 23, 116, 118, 97, + 104, 93, 55, 118, 246, 236, 38, 58, 178, 53, 19, 2, 232, 186, 253, 109, 63, 56, 37, 242, 64, 47, 246, + 84, 23, 215, 134, 200, 20, 60, 104, 228, 36, 172, 100, 27, 128, 219, 56, 155, 233, 197, 157, 66, 122, 120, + 4, 73, 240, 165, 70, 225, 193, 40, 124, 120, 57, 213, 109, 159, 162, 254, 76, 5, 183, 184, 98, 104, 138, + 226, 53, 20, 165, 170, 84, 177, 176, 123, 223, 225, 82, 127, 241, 125, 124, 21, 109, 65, 106, 235, 254, 37, + 226, 106, 172, 188, 184, 89, 22, 20, 175, 9, 42, 166, 236, 17, 250, 249, 251, 143, 87, 185, 47, 223, 171, + 217, 43, 147, 129, 126, 175, 105, 65, 213, 205, 116, 136, 6, 126, 195, 87, 150, 85, 29, 2, 144, 10, 165, + 120, 156, 193, 246, 168, 65, 108, 250, 32, 118, 205, 95, 10, 87, 48, 13, 251, 218, 42, 197, 14, 151, 145, + 123, 5, 153, 90, 11, 207, 5, 93, 2, 131, 236, 10, 51, 157, 62, 241, 194, 45, 181, 92, 190, 7, 222, + 194, 95, 121, 31, 92, 188, 69, 39, 238, 63, 19, 155, 144, 178, 235, 34, 200, 158, 89, 69, 59, 74, 97, + 52, 25, 4, 84, 244, 65, 181, 10, 119, 34, 30, 7, 22, 239, 176, 15, 7, 159, 92, 195, 171, 101, 174, + 32, 201, 218, 20, 231, 54, 252, 158, 166, 63, 250, 45, 49, 151, 170, 208, 99, 161, 27, 122, 72, 213, 177, + 218, 140, 56, 99, 185, 216, 95, 240, 139, 108, 116, 138, 253, 146, 41, 130, 114, 66, 241, 140, 117, 196, 35, + 76, 223, 198, 248, 58, 23, 156, 9, 128, 139, 87, 125, 186, 137, 18, 41, 166, 230, 165, 44, 24, 183, 6, + 20, 84, 26, 44, 26, 173, 39, 3, 97, 146, 165, 223, 190, 231, 52, 65, 219, 183, 85, 150, 198, 53, 156, + 220, 30, 42, 95, 107, 245, 122, 93, 124, 217, 227, 188, 154, 169, 51, 246, 143, 219, 60, 180, 42, 211, 99, + 243, 128, 132, 130, 104, 103, 204, 227, 179, 236, 119, 175, 26, 33, 67, 174, 178, 94, 30, 182, 246, 58, 121, + 253, 220, 141, 141, 234, 82, 223, 199, 212, 94, 2, 95, 50, 169, 233, 109, 14, 96, 75, 214, 107, 181, 15, + 141, 180, 100, 75, 247, 159, 212, 177, 126, 166, 112, 192, 111, 100, 91, 150, 236, 62, 140, 60, 158, 150, 53, + 237, 156, 253, 37, 143, 95, 150, 66, 252, 112, 248, 115, 46, 30, 93, 21, 193, 82, 20, 185, 26, 167, 197, + 165, 161, 207, 245, 44, 58, 110, 63, 81, 169, 66, 55, 85, 56, 4, 58, 145, 226, 167, 47, 41, 176, 228, + 238, 190, 28, 111, 216, 142, 230, 229, 4, 205, 52, 105, 40, 159, 222, 41, 189, 195, 20, 254, 15, 141, 37, + 73, 136, 165, 12, 166, 70, 39, 204, 28, 113, 233, 142, 99, 4, 2, 87, 29, 165, 57, 152, 163, 167, 42, + 127, 189, 21, 164, 141, 93, 149, 142, 85, 48, 117, 38, 94, 244, 120, 80, 180, 10, 200, 80, 249, 213, 209, + 104, 27, 145, 116, 144, 180, 182, 89, 166, 127, 19, 214, 210, 165, 16, 158, 31, 166, 62, 235, 38, 229, 12, + 130, 116, 52, 230, 149, 30, 185, 119, 207, 99, 104, 228, 198, 52, 192, 69, 97, 237, 142, 19, 2, 194, 34, + 197, 4, 82, 50, 0, 101, 238, 37, 62, 153, 106, 183, + }; + uint8_t ret[1 * 1 * 32 * 32] = { + 47, 78, 47, 180, 246, 76, 157, 127, 63, 0, 168, 23, 148, 198, 180, 190, 43, 187, 76, 67, 77, 246, 11, + 149, 240, 236, 136, 123, 51, 95, 7, 163, 163, 64, 157, 230, 247, 122, 67, 106, 150, 20, 231, 118, 43, 208, + 190, 201, 149, 180, 47, 223, 69, 51, 222, 2, 254, 161, 69, 17, 122, 79, 98, 85, 77, 97, 127, 144, 216, + 217, 230, 175, 115, 121, 86, 110, 81, 37, 143, 189, 23, 63, 214, 235, 188, 193, 125, 236, 57, 76, 192, 144, + 20, 2, 247, 153, 101, 114, 201, 218, 221, 62, 203, 222, 1, 229, 100, 223, 65, 62, 78, 210, 115, 149, 115, + 171, 55, 186, 67, 16, 154, 252, 104, 6, 154, 142, 129, 223, 84, 221, 82, 202, 113, 167, 143, 136, 203, 237, + 195, 40, 47, 228, 125, 49, 156, 201, 187, 96, 2, 120, 227, 239, 134, 105, 153, 232, 228, 42, 65, 11, 203, + 79, 90, 240, 54, 131, 16, 30, 171, 98, 228, 61, 160, 117, 80, 38, 223, 25, 15, 227, 245, 11, 241, 195, + 178, 250, 187, 220, 80, 77, 101, 21, 79, 194, 33, 95, 91, 165, 93, 74, 120, 124, 214, 92, 101, 181, 89, + 79, 159, 229, 184, 133, 188, 200, 200, 97, 223, 92, 42, 56, 51, 21, 103, 65, 8, 10, 25, 232, 29, 9, + 209, 126, 37, 175, 179, 92, 239, 25, 253, 53, 154, 202, 233, 11, 83, 146, 160, 3, 209, 227, 68, 162, 190, + 175, 206, 153, 230, 120, 249, 65, 105, 117, 145, 169, 247, 155, 163, 78, 184, 172, 41, 67, 49, 177, 70, 22, + 44, 205, 35, 95, 188, 140, 27, 123, 187, 116, 80, 189, 11, 161, 20, 181, 10, 159, 29, 36, 48, 165, 108, + 201, 153, 42, 64, 57, 29, 195, 190, 65, 112, 13, 150, 18, 49, 50, 168, 70, 206, 155, 58, 91, 215, 209, + 250, 76, 81, 114, 131, 159, 208, 97, 164, 238, 186, 21, 106, 179, 70, 177, 171, 204, 31, 39, 120, 161, 143, + 43, 15, 28, 218, 8, 95, 43, 134, 116, 33, 160, 123, 184, 235, 8, 245, 131, 111, 239, 159, 42, 71, 224, + 120, 169, 254, 219, 131, 234, 207, 85, 23, 117, 39, 212, 156, 210, 11, 51, 148, 198, 35, 163, 155, 102, 183, + 11, 59, 176, 17, 143, 64, 101, 7, 146, 164, 140, 18, 91, 213, 216, 103, 84, 252, 239, 31, 169, 239, 128, + 99, 7, 94, 175, 129, 185, 97, 175, 200, 231, 100, 44, 242, 246, 73, 211, 89, 143, 141, 23, 116, 118, 97, + 104, 93, 55, 118, 246, 236, 38, 58, 178, 53, 19, 2, 232, 186, 253, 109, 63, 56, 37, 242, 64, 47, 246, + 84, 23, 215, 134, 200, 20, 60, 104, 228, 36, 172, 100, 27, 128, 219, 56, 155, 233, 197, 157, 66, 122, 120, + 4, 73, 240, 165, 70, 225, 193, 40, 124, 120, 57, 213, 109, 159, 162, 254, 76, 5, 183, 184, 98, 104, 138, + 226, 53, 20, 165, 170, 84, 177, 176, 123, 223, 225, 82, 127, 241, 125, 124, 21, 109, 65, 106, 235, 254, 37, + 226, 106, 172, 188, 184, 89, 22, 20, 175, 9, 42, 166, 236, 17, 250, 249, 251, 143, 87, 185, 47, 223, 171, + 217, 43, 147, 129, 126, 175, 105, 65, 213, 205, 116, 136, 6, 126, 195, 87, 150, 85, 29, 2, 144, 10, 165, + 120, 156, 193, 246, 168, 65, 108, 250, 32, 118, 205, 95, 10, 87, 48, 13, 251, 218, 42, 197, 14, 151, 145, + 123, 5, 153, 90, 11, 207, 5, 93, 2, 131, 236, 10, 51, 157, 62, 241, 194, 45, 181, 92, 190, 7, 222, + 194, 95, 121, 31, 92, 188, 69, 39, 238, 63, 19, 155, 144, 178, 235, 34, 200, 158, 89, 69, 59, 74, 97, + 52, 25, 4, 84, 244, 65, 181, 10, 119, 34, 30, 7, 22, 239, 176, 15, 7, 159, 92, 195, 171, 101, 174, + 32, 201, 218, 20, 231, 54, 252, 158, 166, 63, 250, 45, 49, 151, 170, 208, 99, 161, 27, 122, 72, 213, 177, + 218, 140, 56, 99, 185, 216, 95, 240, 139, 108, 116, 138, 253, 146, 41, 130, 114, 66, 241, 140, 117, 196, 35, + 76, 223, 198, 248, 58, 23, 156, 9, 128, 139, 87, 125, 186, 137, 18, 41, 166, 230, 165, 44, 24, 183, 6, + 20, 84, 26, 44, 26, 173, 39, 3, 97, 146, 165, 223, 190, 231, 52, 65, 219, 183, 85, 150, 198, 53, 156, + 220, 30, 42, 95, 107, 245, 122, 93, 124, 217, 227, 188, 154, 169, 51, 246, 143, 219, 60, 180, 42, 211, 99, + 243, 128, 132, 130, 104, 103, 204, 227, 179, 236, 119, 175, 26, 33, 67, 174, 178, 94, 30, 182, 246, 58, 121, + 253, 220, 141, 141, 234, 82, 223, 199, 212, 94, 2, 95, 50, 169, 233, 109, 14, 96, 75, 214, 107, 181, 15, + 141, 180, 100, 75, 247, 159, 212, 177, 126, 166, 112, 192, 111, 100, 91, 150, 236, 62, 140, 60, 158, 150, 53, + 237, 156, 253, 37, 143, 95, 150, 66, 252, 112, 248, 115, 46, 30, 93, 21, 193, 82, 20, 185, 26, 167, 197, + 165, 161, 207, 245, 44, 58, 110, 63, 81, 169, 66, 55, 85, 56, 4, 58, 145, 226, 167, 47, 41, 176, 228, + 238, 190, 28, 111, 216, 142, 230, 229, 4, 205, 52, 105, 40, 159, 222, 41, 189, 195, 20, 254, 15, 141, 37, + 73, 136, 165, 12, 166, 70, 39, 204, 28, 113, 233, 142, 99, 4, 2, 87, 29, 165, 57, 152, 163, 167, 42, + 127, 189, 21, 164, 141, 93, 149, 142, 85, 48, 117, 38, 94, 244, 120, 80, 180, 10, 200, 80, 249, 213, 209, + 104, 27, 145, 116, 144, 180, 182, 89, 166, 127, 19, 214, 210, 165, 16, 158, 31, 166, 62, 235, 38, 229, 12, + 130, 116, 52, 230, 149, 30, 185, 119, 207, 99, 104, 228, 198, 52, 192, 69, 97, 237, 142, 19, 2, 194, 34, + 197, 4, 82, 50, 0, 101, 238, 37, 62, 153, 106, 183, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {32, 32}, {1, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {1, 1, 32, 32}, {32, 32}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape2_uint8_2) { + uint8_t data[30 * 33] = { + 127, 144, 140, 41, 204, 45, 12, 150, 135, 155, 45, 38, 69, 114, 12, 212, 232, 135, 115, 165, 156, 127, 85, + 184, 113, 175, 123, 248, 81, 184, 162, 162, 56, 140, 68, 78, 157, 101, 218, 13, 133, 233, 167, 229, 236, 110, + 249, 168, 0, 45, 159, 73, 121, 70, 110, 127, 115, 171, 68, 237, 38, 121, 119, 114, 192, 105, 95, 167, 216, + 200, 80, 17, 111, 172, 206, 106, 122, 203, 51, 105, 2, 191, 38, 193, 247, 83, 252, 230, 45, 117, 215, 172, + 217, 186, 91, 241, 165, 168, 104, 205, 176, 172, 114, 232, 173, 152, 40, 15, 10, 144, 35, 120, 102, 161, 141, + 121, 191, 167, 69, 49, 69, 18, 105, 98, 56, 82, 78, 28, 150, 122, 223, 175, 125, 38, 72, 225, 248, 124, + 205, 12, 74, 18, 157, 5, 249, 179, 137, 228, 92, 50, 6, 27, 9, 207, 155, 135, 36, 3, 170, 54, 107, + 147, 225, 92, 47, 228, 234, 206, 22, 31, 28, 112, 167, 209, 46, 75, 10, 111, 113, 230, 27, 169, 225, 35, + 82, 117, 111, 2, 153, 96, 33, 243, 118, 174, 168, 164, 161, 152, 137, 161, 23, 13, 53, 51, 240, 236, 185, + 16, 62, 175, 63, 80, 97, 6, 221, 97, 103, 92, 216, 85, 225, 154, 27, 141, 146, 184, 199, 132, 216, 165, + 234, 108, 139, 49, 190, 164, 196, 195, 53, 219, 239, 87, 101, 183, 48, 98, 222, 66, 99, 172, 191, 189, 189, + 174, 131, 209, 108, 185, 141, 212, 124, 169, 117, 9, 131, 155, 107, 122, 212, 95, 9, 246, 202, 13, 188, 25, + 35, 20, 138, 155, 7, 18, 33, 251, 49, 142, 157, 215, 125, 20, 183, 8, 252, 122, 247, 11, 47, 37, 140, + 60, 12, 45, 192, 21, 176, 116, 130, 96, 64, 97, 108, 214, 58, 129, 227, 1, 94, 4, 252, 138, 100, 240, + 52, 178, 12, 148, 160, 143, 84, 60, 165, 21, 112, 249, 173, 171, 83, 218, 72, 126, 157, 45, 78, 150, 65, + 165, 107, 158, 11, 165, 136, 20, 37, 209, 141, 138, 108, 220, 69, 177, 107, 194, 144, 130, 135, 226, 106, 46, + 24, 88, 47, 67, 37, 241, 73, 129, 223, 129, 218, 127, 156, 145, 113, 53, 226, 173, 84, 230, 82, 166, 244, + 12, 219, 12, 203, 172, 118, 10, 170, 252, 179, 144, 217, 185, 130, 149, 22, 58, 38, 142, 246, 77, 186, 214, + 121, 129, 120, 125, 192, 205, 231, 165, 1, 3, 246, 53, 221, 119, 93, 140, 189, 94, 114, 75, 116, 76, 45, + 29, 93, 237, 143, 234, 225, 222, 67, 37, 98, 37, 144, 222, 49, 215, 156, 56, 185, 109, 75, 143, 36, 165, + 220, 227, 142, 19, 127, 232, 84, 235, 202, 56, 184, 204, 48, 155, 75, 183, 74, 75, 55, 77, 246, 21, 202, + 184, 248, 84, 89, 47, 213, 194, 167, 144, 184, 108, 47, 60, 78, 12, 221, 30, 169, 162, 247, 1, 96, 225, + 73, 6, 143, 192, 134, 162, 231, 174, 172, 145, 199, 45, 135, 117, 70, 218, 226, 94, 43, 215, 130, 161, 139, + 48, 231, 191, 142, 189, 140, 185, 194, 41, 230, 160, 105, 211, 130, 141, 249, 88, 240, 38, 13, 12, 152, 248, + 143, 250, 120, 215, 176, 246, 236, 131, 228, 187, 172, 136, 95, 143, 154, 59, 111, 150, 35, 82, 33, 11, 126, + 92, 205, 91, 249, 125, 7, 112, 220, 189, 234, 66, 58, 161, 149, 7, 38, 101, 109, 120, 249, 212, 164, 72, + 221, 162, 96, 31, 64, 125, 203, 74, 117, 87, 233, 21, 156, 43, 174, 89, 34, 75, 217, 164, 145, 84, 201, + 105, 178, 155, 54, 73, 176, 102, 39, 31, 138, 164, 23, 167, 250, 239, 44, 47, 181, 11, 149, 192, 92, 68, + 77, 141, 157, 194, 160, 104, 220, 229, 95, 63, 47, 242, 75, 102, 219, 60, 136, 253, 117, 31, 23, 75, 152, + 35, 44, 91, 143, 42, 15, 58, 156, 198, 93, 234, 95, 70, 238, 196, 209, 94, 12, 222, 92, 177, 87, 205, + 222, 17, 19, 67, 94, 109, 145, 156, 155, 40, 50, 188, 145, 221, 174, 61, 117, 57, 100, 151, 59, 216, 20, + 63, 1, 100, 163, 25, 111, 156, 249, 34, 54, 73, 104, 199, 156, 47, 113, 63, 251, 244, 177, 124, 45, 48, + 224, 227, 205, 218, 149, 80, 68, 80, 127, 243, 55, 127, 113, 28, 18, 30, 181, 253, 177, 197, 53, 150, 17, + 103, 195, 219, 178, 126, 167, 156, 4, 240, 69, 34, 89, 213, 200, 239, 133, 164, 42, 46, 165, 47, 121, 209, + 232, 236, 45, 89, 222, 141, 37, 128, 52, 22, 62, 148, 245, 107, 115, 250, 150, 5, 191, 158, 20, 204, 169, + 232, 98, 250, 71, 179, 74, 253, 130, 96, 110, 254, 42, 106, 196, 121, 251, 225, 67, 88, 2, 128, 101, 152, + 59, 86, 241, 200, 76, 202, 2, 26, 166, 211, 92, 63, 39, 18, 250, 116, 0, 61, 238, 71, 218, 20, 118, + 125, 144, 74, 8, 220, 15, 27, 108, 5, 36, 61, 83, 4, 190, 195, 127, 52, 250, 1, 57, 172, 234, 214, + 161, 163, 78, 80, 235, 41, 229, 141, 208, 246, 185, 50, 56, 185, 61, 47, 243, 204, 54, 172, 223, 165, 32, + 31, 125, 12, 57, 11, 20, 148, 141, 92, 107, 13, 76, 89, 59, 182, 40, 120, 21, 104, 202, 210, 61, 114, + 239, 150, 58, 38, 74, 253, 38, 163, 215, 59, 47, 185, 204, 1, 88, 133, 74, 179, 76, 25, 232, 158, 211, + 19, 181, 42, 104, 8, 191, 98, 233, 9, 16, 71, 62, 205, 195, 241, 246, 118, 180, 136, 52, 80, 127, 174, + 90, 63, 127, 196, 152, 93, 207, 243, 163, 27, 55, 17, 131, 142, 230, 83, 235, 227, 31, 151, 126, 24, 48, + 110, + }; + uint8_t ret[2 * 1 * 32 * 32] = { + 127, 144, 140, 41, 204, 45, 12, 150, 135, 155, 45, 38, 69, 114, 12, 212, 232, 135, 115, 165, 156, 127, 85, + 184, 113, 175, 123, 248, 81, 184, 162, 162, 140, 68, 78, 157, 101, 218, 13, 133, 233, 167, 229, 236, 110, 249, + 168, 0, 45, 159, 73, 121, 70, 110, 127, 115, 171, 68, 237, 38, 121, 119, 114, 192, 95, 167, 216, 200, 80, + 17, 111, 172, 206, 106, 122, 203, 51, 105, 2, 191, 38, 193, 247, 83, 252, 230, 45, 117, 215, 172, 217, 186, + 91, 241, 165, 168, 205, 176, 172, 114, 232, 173, 152, 40, 15, 10, 144, 35, 120, 102, 161, 141, 121, 191, 167, + 69, 49, 69, 18, 105, 98, 56, 82, 78, 28, 150, 122, 223, 125, 38, 72, 225, 248, 124, 205, 12, 74, 18, + 157, 5, 249, 179, 137, 228, 92, 50, 6, 27, 9, 207, 155, 135, 36, 3, 170, 54, 107, 147, 225, 92, 228, + 234, 206, 22, 31, 28, 112, 167, 209, 46, 75, 10, 111, 113, 230, 27, 169, 225, 35, 82, 117, 111, 2, 153, + 96, 33, 243, 118, 174, 168, 164, 161, 137, 161, 23, 13, 53, 51, 240, 236, 185, 16, 62, 175, 63, 80, 97, + 6, 221, 97, 103, 92, 216, 85, 225, 154, 27, 141, 146, 184, 199, 132, 216, 165, 108, 139, 49, 190, 164, 196, + 195, 53, 219, 239, 87, 101, 183, 48, 98, 222, 66, 99, 172, 191, 189, 189, 174, 131, 209, 108, 185, 141, 212, + 124, 169, 117, 131, 155, 107, 122, 212, 95, 9, 246, 202, 13, 188, 25, 35, 20, 138, 155, 7, 18, 33, 251, + 49, 142, 157, 215, 125, 20, 183, 8, 252, 122, 247, 11, 37, 140, 60, 12, 45, 192, 21, 176, 116, 130, 96, + 64, 97, 108, 214, 58, 129, 227, 1, 94, 4, 252, 138, 100, 240, 52, 178, 12, 148, 160, 143, 84, 165, 21, + 112, 249, 173, 171, 83, 218, 72, 126, 157, 45, 78, 150, 65, 165, 107, 158, 11, 165, 136, 20, 37, 209, 141, + 138, 108, 220, 69, 177, 107, 194, 130, 135, 226, 106, 46, 24, 88, 47, 67, 37, 241, 73, 129, 223, 129, 218, + 127, 156, 145, 113, 53, 226, 173, 84, 230, 82, 166, 244, 12, 219, 12, 203, 118, 10, 170, 252, 179, 144, 217, + 185, 130, 149, 22, 58, 38, 142, 246, 77, 186, 214, 121, 129, 120, 125, 192, 205, 231, 165, 1, 3, 246, 53, + 221, 119, 140, 189, 94, 114, 75, 116, 76, 45, 29, 93, 237, 143, 234, 225, 222, 67, 37, 98, 37, 144, 222, + 49, 215, 156, 56, 185, 109, 75, 143, 36, 165, 220, 142, 19, 127, 232, 84, 235, 202, 56, 184, 204, 48, 155, + 75, 183, 74, 75, 55, 77, 246, 21, 202, 184, 248, 84, 89, 47, 213, 194, 167, 144, 184, 108, 60, 78, 12, + 221, 30, 169, 162, 247, 1, 96, 225, 73, 6, 143, 192, 134, 162, 231, 174, 172, 145, 199, 45, 135, 117, 70, + 218, 226, 94, 43, 215, 130, 139, 48, 231, 191, 142, 189, 140, 185, 194, 41, 230, 160, 105, 211, 130, 141, 249, + 88, 240, 38, 13, 12, 152, 248, 143, 250, 120, 215, 176, 246, 236, 131, 187, 172, 136, 95, 143, 154, 59, 111, + 150, 35, 82, 33, 11, 126, 92, 205, 91, 249, 125, 7, 112, 220, 189, 234, 66, 58, 161, 149, 7, 38, 101, + 109, 249, 212, 164, 72, 221, 162, 96, 31, 64, 125, 203, 74, 117, 87, 233, 21, 156, 43, 174, 89, 34, 75, + 217, 164, 145, 84, 201, 105, 178, 155, 54, 73, 102, 39, 31, 138, 164, 23, 167, 250, 239, 44, 47, 181, 11, + 149, 192, 92, 68, 77, 141, 157, 194, 160, 104, 220, 229, 95, 63, 47, 242, 75, 102, 219, 136, 253, 117, 31, + 23, 75, 152, 35, 44, 91, 143, 42, 15, 58, 156, 198, 93, 234, 95, 70, 238, 196, 209, 94, 12, 222, 92, + 177, 87, 205, 222, 17, 67, 94, 109, 145, 156, 155, 40, 50, 188, 145, 221, 174, 61, 117, 57, 100, 151, 59, + 216, 20, 63, 1, 100, 163, 25, 111, 156, 249, 34, 54, 73, 104, 156, 47, 113, 63, 251, 244, 177, 124, 45, + 48, 224, 227, 205, 218, 149, 80, 68, 80, 127, 243, 55, 127, 113, 28, 18, 30, 181, 253, 177, 197, 53, 150, + 103, 195, 219, 178, 126, 167, 156, 4, 240, 69, 34, 89, 213, 200, 239, 133, 164, 42, 46, 165, 47, 121, 209, + 232, 236, 45, 89, 222, 141, 37, 128, 52, 62, 148, 245, 107, 115, 250, 150, 5, 191, 158, 20, 204, 169, 232, + 98, 250, 71, 179, 74, 253, 130, 96, 110, 254, 42, 106, 196, 121, 251, 225, 67, 88, 128, 101, 152, 59, 86, + 241, 200, 76, 202, 2, 26, 166, 211, 92, 63, 39, 18, 250, 116, 0, 61, 238, 71, 218, 20, 118, 125, 144, + 74, 8, 220, 15, 108, 5, 36, 61, 83, 4, 190, 195, 127, 52, 250, 1, 57, 172, 234, 214, 161, 163, 78, + 80, 235, 41, 229, 141, 208, 246, 185, 50, 56, 185, 61, 47, 204, 54, 172, 223, 165, 32, 31, 125, 12, 57, + 11, 20, 148, 141, 92, 107, 13, 76, 89, 59, 182, 40, 120, 21, 104, 202, 210, 61, 114, 239, 150, 58, 74, + 253, 38, 163, 215, 59, 47, 185, 204, 1, 88, 133, 74, 179, 76, 25, 232, 158, 211, 19, 181, 42, 104, 8, + 191, 98, 233, 9, 16, 71, 62, 205, 241, 246, 118, 180, 136, 52, 80, 127, 174, 90, 63, 127, 196, 152, 93, + 207, 243, 163, 27, 55, 17, 131, 142, 230, 83, 235, 227, 31, 151, 126, 24, 48, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 56, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 105, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 104, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 175, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 234, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 47, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 144, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 172, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 93, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 227, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 228, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 176, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 60, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 199, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 22, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 2, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 243, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 195, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 110, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {30, 33}, {2, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 1, 32, 32}, {30, 33}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape3_uint8) { + uint8_t data[2 * 32 * 32] = { + 110, 223, 166, 62, 135, 226, 67, 118, 86, 236, 22, 196, 106, 156, 36, 141, 249, 66, 207, 150, 50, 223, 43, + 156, 35, 17, 10, 27, 190, 239, 52, 70, 85, 92, 35, 246, 169, 88, 13, 6, 216, 237, 2, 191, 65, 17, + 37, 28, 195, 39, 65, 148, 155, 192, 230, 243, 98, 249, 94, 192, 211, 65, 164, 217, 48, 18, 121, 246, 29, + 63, 196, 155, 208, 28, 79, 207, 111, 148, 44, 180, 233, 12, 157, 211, 227, 6, 59, 103, 0, 9, 164, 65, + 9, 53, 93, 70, 2, 26, 75, 96, 144, 37, 43, 140, 168, 160, 68, 202, 9, 84, 212, 231, 52, 218, 140, + 185, 87, 17, 118, 30, 123, 35, 113, 207, 126, 25, 231, 168, 18, 200, 209, 40, 89, 174, 148, 117, 250, 126, + 45, 240, 5, 187, 70, 142, 32, 49, 30, 88, 67, 154, 63, 100, 62, 120, 115, 132, 189, 69, 177, 79, 147, + 226, 22, 244, 1, 166, 232, 3, 7, 198, 25, 183, 64, 59, 145, 126, 23, 7, 169, 195, 10, 50, 150, 196, + 238, 141, 187, 169, 169, 80, 226, 225, 166, 72, 208, 16, 149, 38, 137, 213, 159, 104, 48, 203, 213, 56, 127, + 26, 182, 242, 111, 31, 140, 117, 251, 59, 84, 42, 205, 204, 79, 235, 196, 245, 74, 148, 91, 26, 182, 240, + 100, 78, 171, 14, 221, 88, 186, 246, 107, 200, 91, 70, 204, 135, 213, 226, 115, 33, 128, 76, 106, 232, 76, + 98, 56, 232, 19, 172, 243, 11, 47, 192, 160, 232, 23, 78, 19, 75, 169, 243, 25, 4, 72, 140, 36, 1, + 44, 31, 232, 247, 15, 79, 140, 103, 181, 140, 24, 117, 155, 96, 151, 112, 17, 209, 204, 139, 223, 89, 122, + 149, 41, 41, 201, 4, 206, 171, 172, 207, 185, 156, 140, 204, 45, 124, 8, 45, 176, 66, 135, 232, 29, 17, + 220, 160, 246, 97, 110, 200, 201, 103, 155, 195, 73, 183, 235, 67, 211, 214, 23, 196, 217, 57, 57, 27, 106, + 100, 90, 190, 155, 38, 143, 164, 12, 140, 30, 36, 175, 229, 177, 121, 164, 92, 181, 221, 26, 239, 184, 229, + 233, 32, 139, 120, 12, 206, 72, 227, 223, 161, 12, 211, 188, 10, 69, 190, 23, 247, 215, 65, 228, 246, 77, + 20, 205, 17, 220, 198, 79, 44, 167, 88, 43, 77, 155, 111, 135, 52, 102, 178, 92, 197, 151, 167, 38, 44, + 246, 84, 140, 126, 192, 12, 215, 172, 233, 98, 52, 189, 30, 222, 98, 195, 149, 83, 160, 227, 26, 37, 207, + 129, 31, 235, 146, 184, 101, 152, 34, 32, 109, 130, 142, 113, 148, 73, 125, 190, 201, 9, 50, 48, 93, 71, + 180, 16, 107, 22, 160, 111, 213, 235, 124, 231, 140, 226, 135, 107, 245, 45, 130, 126, 241, 46, 226, 154, 238, + 241, 148, 10, 192, 60, 110, 89, 31, 194, 221, 217, 118, 242, 243, 220, 76, 201, 164, 29, 6, 26, 162, 133, + 204, 252, 149, 12, 104, 181, 172, 226, 131, 207, 144, 236, 230, 154, 240, 189, 204, 196, 204, 0, 101, 102, 68, + 95, 56, 72, 51, 12, 170, 154, 76, 153, 209, 219, 68, 111, 64, 82, 56, 205, 204, 218, 150, 102, 197, 157, + 103, 133, 35, 186, 36, 144, 74, 150, 74, 110, 178, 143, 137, 45, 169, 106, 114, 250, 67, 217, 248, 46, 244, + 249, 50, 158, 138, 183, 220, 91, 193, 229, 159, 22, 206, 94, 10, 63, 128, 211, 162, 28, 40, 100, 8, 121, + 178, 78, 247, 244, 98, 14, 165, 28, 65, 26, 65, 185, 233, 51, 153, 165, 205, 12, 116, 97, 88, 157, 153, + 23, 160, 141, 162, 119, 10, 86, 122, 21, 240, 141, 100, 248, 87, 190, 170, 204, 162, 68, 216, 196, 129, 254, + 164, 157, 123, 51, 6, 27, 76, 22, 106, 82, 209, 83, 164, 8, 76, 219, 107, 252, 119, 49, 106, 213, 218, + 155, 250, 68, 132, 157, 193, 135, 15, 173, 90, 134, 131, 67, 45, 76, 178, 116, 50, 53, 84, 183, 46, 50, + 123, 92, 167, 230, 223, 55, 208, 228, 165, 247, 254, 177, 21, 187, 192, 75, 182, 209, 243, 11, 0, 189, 72, + 82, 24, 165, 223, 143, 131, 133, 120, 211, 75, 152, 185, 55, 70, 52, 44, 109, 72, 235, 140, 88, 189, 205, + 44, 81, 44, 172, 22, 115, 114, 187, 100, 157, 198, 182, 77, 123, 13, 2, 201, 152, 64, 157, 186, 31, 21, + 17, 242, 30, 116, 178, 88, 140, 232, 36, 133, 123, 86, 58, 181, 34, 126, 241, 67, 147, 95, 151, 245, 9, + 142, 16, 58, 20, 6, 223, 211, 43, 96, 251, 188, 193, 164, 229, 29, 242, 109, 46, 195, 7, 14, 37, 222, + 38, 135, 157, 39, 5, 146, 158, 198, 230, 32, 2, 115, 200, 151, 31, 20, 239, 150, 50, 201, 211, 217, 64, + 106, 232, 109, 101, 102, 32, 0, 153, 92, 233, 50, 243, 78, 12, 155, 16, 45, 34, 127, 138, 41, 249, 11, + 117, 230, 239, 63, 230, 134, 173, 229, 32, 176, 61, 165, 244, 41, 139, 107, 34, 43, 48, 114, 4, 237, 154, + 215, 140, 71, 234, 4, 25, 192, 31, 238, 139, 58, 123, 207, 150, 94, 121, 116, 49, 87, 25, 242, 224, 95, + 205, 189, 205, 9, 20, 51, 25, 175, 73, 91, 139, 117, 86, 75, 52, 106, 201, 10, 70, 93, 124, 203, 107, + 223, 2, 184, 147, 223, 9, 29, 11, 211, 170, 247, 72, 17, 4, 67, 74, 251, 103, 190, 20, 88, 206, 91, + 153, 202, 42, 47, 94, 186, 8, 22, 119, 241, 72, 203, 180, 79, 186, 181, 78, 218, 157, 173, 138, 156, 33, + 149, 175, 246, 179, 166, 117, 108, 148, 147, 181, 67, 11, 163, 99, 248, 161, 31, 195, 49, 122, 35, 214, 65, + 81, 41, 150, 180, 134, 185, 212, 242, 123, 231, 227, 129, 211, 80, 248, 154, 243, 219, 89, 57, 197, 167, 245, + 177, 38, 112, 178, 214, 111, 244, 218, 149, 240, 49, 243, 14, 70, 150, 74, 4, 223, 188, 214, 112, 23, 101, + 176, 118, 207, 106, 156, 240, 62, 19, 206, 120, 181, 145, 239, 229, 166, 189, 35, 168, 145, 144, 106, 189, 87, + 163, 147, 91, 228, 111, 180, 90, 199, 64, 164, 36, 8, 165, 118, 209, 245, 39, 214, 168, 15, 20, 110, 95, + 89, 37, 71, 52, 107, 228, 229, 240, 207, 47, 169, 146, 46, 49, 130, 11, 47, 0, 190, 230, 205, 80, 13, + 209, 238, 146, 131, 187, 175, 27, 41, 249, 252, 148, 178, 23, 207, 124, 219, 7, 156, 31, 105, 116, 19, 140, + 63, 66, 5, 131, 157, 213, 9, 137, 39, 68, 89, 94, 28, 101, 229, 26, 103, 138, 186, 38, 251, 207, 136, + 132, 182, 105, 114, 31, 6, 41, 22, 37, 124, 184, 51, 67, 245, 77, 229, 136, 241, 167, 7, 53, 20, 63, + 246, 35, 97, 80, 150, 79, 51, 252, 89, 1, 29, 228, 130, 123, 32, 5, 51, 252, 105, 46, 236, 16, 209, + 246, 238, 169, 79, 87, 223, 209, 93, 105, 193, 35, 152, 14, 46, 192, 201, 117, 247, 142, 157, 84, 191, 9, + 165, 209, 72, 226, 213, 42, 159, 133, 56, 151, 196, 89, 35, 159, 254, 98, 55, 155, 92, 216, 186, 65, 226, + 98, 121, 58, 157, 167, 112, 201, 194, 32, 139, 164, 119, 225, 246, 51, 68, 187, 40, 39, 162, 248, 107, 254, + 48, 66, 112, 112, 3, 43, 231, 251, 219, 26, 12, 118, 116, 89, 135, 139, 20, 142, 89, 46, 168, 96, 109, + 212, 216, 243, 221, 34, 154, 243, 119, 158, 170, 246, 59, 28, 137, 213, 173, 16, 51, 194, 62, 163, 82, 89, + 11, 54, 164, 146, 92, 42, 61, 110, 97, 116, 8, 117, 193, 140, 30, 99, 50, 174, 115, 72, 102, 96, 153, + 57, 16, 34, 100, 128, 190, 59, 19, 252, 223, 252, 222, 5, 70, 58, 40, 63, 131, 250, 76, 77, 210, 207, + 248, 127, 55, 22, 22, 176, 185, 110, 246, 199, 32, 73, 137, 58, 177, 193, 236, 169, 22, 117, 240, 58, 206, + 115, 23, 51, 243, 188, 139, 32, 219, 233, 162, 246, 242, 209, 197, 210, 78, 235, 31, 190, 61, 115, 218, 243, + 213, 90, 98, 241, 200, 232, 198, 225, 112, 106, 104, 250, 198, 21, 142, 71, 135, 113, 43, 19, 239, 96, 179, + 136, 91, 176, 202, 238, 251, 75, 8, 112, 50, 235, 184, 192, 131, 133, 125, 102, 122, 17, 215, 146, 20, 127, + 130, 207, 149, 21, 47, 240, 9, 151, 131, 41, 37, 201, 36, 99, 202, 108, 195, 162, 252, 240, 172, 66, 243, + 11, 91, 237, 23, 237, 187, 2, 253, 85, 169, 239, 239, 128, 79, 201, 220, 187, 148, 87, 100, 17, 107, 158, + 164, 162, 99, 6, 138, 176, 138, 78, 10, 161, 209, 163, 231, 0, 57, 59, 109, 110, 80, 102, 220, 97, 38, + 10, 3, 119, 172, 72, 180, 125, 48, 63, 78, 127, 252, 80, 18, 205, 106, 230, 85, 27, 235, 116, 211, 7, + 156, 117, 42, 212, 217, 0, 115, 214, 146, 37, 240, 143, 235, 251, 201, 109, 106, 213, 148, 238, 123, 101, 141, + 94, 215, 69, 3, 29, 228, 98, 210, 191, 218, 148, 137, 231, 224, 130, 213, 23, 59, 183, 217, 47, 176, 229, + 210, 118, 209, 175, 166, 250, 207, 156, 60, 161, 169, 63, 179, 214, 231, 97, 144, 28, 115, 212, 106, 206, 244, + 12, 165, 29, 167, 89, 243, 214, 249, 113, 250, 229, 121, 210, 94, 29, 229, 81, 158, 148, 139, 34, 187, 138, + 65, 250, 254, 183, 155, 216, 152, 184, 122, 242, 149, 28, 171, 248, 210, 142, 78, 101, 246, 22, 253, 129, 138, + 225, 179, 234, 188, 15, 50, 145, 87, 192, 223, 46, 245, 87, 10, 0, 39, 207, 41, 62, 213, 233, 10, 45, + 185, 138, 67, 11, 16, 7, 150, 194, 233, 131, 110, 237, 130, 190, 139, 195, 252, 41, 157, 196, 192, 163, 182, + 181, 202, 121, 189, 162, 48, 33, 69, 3, 66, 209, 42, 139, 9, 85, 136, 163, 183, 218, 12, 22, 20, 139, + 26, 33, 245, 179, 199, 120, 76, 239, 206, 181, 117, 231, 173, 216, 96, 98, 253, 250, 171, 31, 214, 228, 14, + 159, 39, 37, 153, 87, 183, 213, 18, 71, 147, 128, 150, 63, 85, 92, 225, 55, 5, 167, 39, 69, 178, 131, + 73, 38, 146, 175, 154, 58, 57, 98, 145, 219, 79, 145, 27, 143, 237, 155, 56, 239, 83, 114, 125, 58, 175, + 187, 35, 109, 36, 160, 85, 251, 192, 94, 194, 83, 22, 42, 104, 93, 103, 32, 217, 110, 131, 227, 142, 199, + 83, 59, 10, 104, 54, 41, 138, 47, 100, 6, 152, 59, 103, 98, 149, 29, 233, 125, 119, 131, 210, 64, 133, + 249, 34, 157, 132, 69, 200, 20, 45, 235, 126, 43, 195, 180, 67, 95, 75, 43, 247, 152, 84, 212, 86, 151, + 159, 167, 197, 107, 175, 230, 14, 99, 252, 54, 138, 74, 152, 75, 245, 88, 99, 114, 32, 146, 187, 156, 39, + 18, 68, 7, 184, 215, 115, 245, 129, 154, 73, 59, 194, 245, 96, 181, 49, 11, 4, 103, 185, 246, 152, 29, + 213, 9, 162, 23, 44, 89, 119, 214, 216, 139, 223, 188, 5, 165, 72, 183, 41, 231, 253, 29, 66, 102, 243, + 44, 92, 122, 32, 164, 63, 128, 38, 178, 162, 21, 164, 9, 29, 153, 244, 199, 192, 206, 249, 115, 142, 118, + 169, 186, 239, 34, 171, 211, 74, 92, 103, 167, 110, 66, 221, 67, 74, 189, 23, 236, 189, 120, 249, 87, 99, + 194, 247, 133, 211, 69, 49, 15, 27, 133, 233, 181, 15, 112, 181, 200, 149, 229, 103, 93, 254, 253, 182, 74, + 35, 171, 114, 50, 230, 106, 138, 170, 4, 95, 23, 53, 181, 87, 90, 85, 18, 98, 233, 194, 16, 18, 83, + 81, 234, 4, 42, 99, 225, 166, 46, 51, 71, 227, 170, 176, 51, 37, 12, 26, 198, 216, 247, 10, 150, 230, + 15, + }; + uint8_t ret[2 * 1 * 1 * 32 * 32] = { + 110, 223, 166, 62, 135, 226, 67, 118, 86, 236, 22, 196, 106, 156, 36, 141, 249, 66, 207, 150, 50, 223, 43, + 156, 35, 17, 10, 27, 190, 239, 52, 70, 85, 92, 35, 246, 169, 88, 13, 6, 216, 237, 2, 191, 65, 17, + 37, 28, 195, 39, 65, 148, 155, 192, 230, 243, 98, 249, 94, 192, 211, 65, 164, 217, 48, 18, 121, 246, 29, + 63, 196, 155, 208, 28, 79, 207, 111, 148, 44, 180, 233, 12, 157, 211, 227, 6, 59, 103, 0, 9, 164, 65, + 9, 53, 93, 70, 2, 26, 75, 96, 144, 37, 43, 140, 168, 160, 68, 202, 9, 84, 212, 231, 52, 218, 140, + 185, 87, 17, 118, 30, 123, 35, 113, 207, 126, 25, 231, 168, 18, 200, 209, 40, 89, 174, 148, 117, 250, 126, + 45, 240, 5, 187, 70, 142, 32, 49, 30, 88, 67, 154, 63, 100, 62, 120, 115, 132, 189, 69, 177, 79, 147, + 226, 22, 244, 1, 166, 232, 3, 7, 198, 25, 183, 64, 59, 145, 126, 23, 7, 169, 195, 10, 50, 150, 196, + 238, 141, 187, 169, 169, 80, 226, 225, 166, 72, 208, 16, 149, 38, 137, 213, 159, 104, 48, 203, 213, 56, 127, + 26, 182, 242, 111, 31, 140, 117, 251, 59, 84, 42, 205, 204, 79, 235, 196, 245, 74, 148, 91, 26, 182, 240, + 100, 78, 171, 14, 221, 88, 186, 246, 107, 200, 91, 70, 204, 135, 213, 226, 115, 33, 128, 76, 106, 232, 76, + 98, 56, 232, 19, 172, 243, 11, 47, 192, 160, 232, 23, 78, 19, 75, 169, 243, 25, 4, 72, 140, 36, 1, + 44, 31, 232, 247, 15, 79, 140, 103, 181, 140, 24, 117, 155, 96, 151, 112, 17, 209, 204, 139, 223, 89, 122, + 149, 41, 41, 201, 4, 206, 171, 172, 207, 185, 156, 140, 204, 45, 124, 8, 45, 176, 66, 135, 232, 29, 17, + 220, 160, 246, 97, 110, 200, 201, 103, 155, 195, 73, 183, 235, 67, 211, 214, 23, 196, 217, 57, 57, 27, 106, + 100, 90, 190, 155, 38, 143, 164, 12, 140, 30, 36, 175, 229, 177, 121, 164, 92, 181, 221, 26, 239, 184, 229, + 233, 32, 139, 120, 12, 206, 72, 227, 223, 161, 12, 211, 188, 10, 69, 190, 23, 247, 215, 65, 228, 246, 77, + 20, 205, 17, 220, 198, 79, 44, 167, 88, 43, 77, 155, 111, 135, 52, 102, 178, 92, 197, 151, 167, 38, 44, + 246, 84, 140, 126, 192, 12, 215, 172, 233, 98, 52, 189, 30, 222, 98, 195, 149, 83, 160, 227, 26, 37, 207, + 129, 31, 235, 146, 184, 101, 152, 34, 32, 109, 130, 142, 113, 148, 73, 125, 190, 201, 9, 50, 48, 93, 71, + 180, 16, 107, 22, 160, 111, 213, 235, 124, 231, 140, 226, 135, 107, 245, 45, 130, 126, 241, 46, 226, 154, 238, + 241, 148, 10, 192, 60, 110, 89, 31, 194, 221, 217, 118, 242, 243, 220, 76, 201, 164, 29, 6, 26, 162, 133, + 204, 252, 149, 12, 104, 181, 172, 226, 131, 207, 144, 236, 230, 154, 240, 189, 204, 196, 204, 0, 101, 102, 68, + 95, 56, 72, 51, 12, 170, 154, 76, 153, 209, 219, 68, 111, 64, 82, 56, 205, 204, 218, 150, 102, 197, 157, + 103, 133, 35, 186, 36, 144, 74, 150, 74, 110, 178, 143, 137, 45, 169, 106, 114, 250, 67, 217, 248, 46, 244, + 249, 50, 158, 138, 183, 220, 91, 193, 229, 159, 22, 206, 94, 10, 63, 128, 211, 162, 28, 40, 100, 8, 121, + 178, 78, 247, 244, 98, 14, 165, 28, 65, 26, 65, 185, 233, 51, 153, 165, 205, 12, 116, 97, 88, 157, 153, + 23, 160, 141, 162, 119, 10, 86, 122, 21, 240, 141, 100, 248, 87, 190, 170, 204, 162, 68, 216, 196, 129, 254, + 164, 157, 123, 51, 6, 27, 76, 22, 106, 82, 209, 83, 164, 8, 76, 219, 107, 252, 119, 49, 106, 213, 218, + 155, 250, 68, 132, 157, 193, 135, 15, 173, 90, 134, 131, 67, 45, 76, 178, 116, 50, 53, 84, 183, 46, 50, + 123, 92, 167, 230, 223, 55, 208, 228, 165, 247, 254, 177, 21, 187, 192, 75, 182, 209, 243, 11, 0, 189, 72, + 82, 24, 165, 223, 143, 131, 133, 120, 211, 75, 152, 185, 55, 70, 52, 44, 109, 72, 235, 140, 88, 189, 205, + 44, 81, 44, 172, 22, 115, 114, 187, 100, 157, 198, 182, 77, 123, 13, 2, 201, 152, 64, 157, 186, 31, 21, + 17, 242, 30, 116, 178, 88, 140, 232, 36, 133, 123, 86, 58, 181, 34, 126, 241, 67, 147, 95, 151, 245, 9, + 142, 16, 58, 20, 6, 223, 211, 43, 96, 251, 188, 193, 164, 229, 29, 242, 109, 46, 195, 7, 14, 37, 222, + 38, 135, 157, 39, 5, 146, 158, 198, 230, 32, 2, 115, 200, 151, 31, 20, 239, 150, 50, 201, 211, 217, 64, + 106, 232, 109, 101, 102, 32, 0, 153, 92, 233, 50, 243, 78, 12, 155, 16, 45, 34, 127, 138, 41, 249, 11, + 117, 230, 239, 63, 230, 134, 173, 229, 32, 176, 61, 165, 244, 41, 139, 107, 34, 43, 48, 114, 4, 237, 154, + 215, 140, 71, 234, 4, 25, 192, 31, 238, 139, 58, 123, 207, 150, 94, 121, 116, 49, 87, 25, 242, 224, 95, + 205, 189, 205, 9, 20, 51, 25, 175, 73, 91, 139, 117, 86, 75, 52, 106, 201, 10, 70, 93, 124, 203, 107, + 223, 2, 184, 147, 223, 9, 29, 11, 211, 170, 247, 72, 17, 4, 67, 74, 251, 103, 190, 20, 88, 206, 91, + 153, 202, 42, 47, 94, 186, 8, 22, 119, 241, 72, 203, 180, 79, 186, 181, 78, 218, 157, 173, 138, 156, 33, + 149, 175, 246, 179, 166, 117, 108, 148, 147, 181, 67, 11, 163, 99, 248, 161, 31, 195, 49, 122, 35, 214, 65, + 81, 41, 150, 180, 134, 185, 212, 242, 123, 231, 227, 129, 211, 80, 248, 154, 243, 219, 89, 57, 197, 167, 245, + 177, 38, 112, 178, 214, 111, 244, 218, 149, 240, 49, 243, 14, 70, 150, 74, 4, 223, 188, 214, 112, 23, 101, + 176, 118, 207, 106, 156, 240, 62, 19, 206, 120, 181, 145, 239, 229, 166, 189, 35, 168, 145, 144, 106, 189, 87, + 163, 147, 91, 228, 111, 180, 90, 199, 64, 164, 36, 8, 165, 118, 209, 245, 39, 214, 168, 15, 20, 110, 95, + 89, 37, 71, 52, 107, 228, 229, 240, 207, 47, 169, 146, 46, 49, 130, 11, 47, 0, 190, 230, 205, 80, 13, + 209, 238, 146, 131, 187, 175, 27, 41, 249, 252, 148, 178, 23, 207, 124, 219, 7, 156, 31, 105, 116, 19, 140, + 63, 66, 5, 131, 157, 213, 9, 137, 39, 68, 89, 94, 28, 101, 229, 26, 103, 138, 186, 38, 251, 207, 136, + 132, 182, 105, 114, 31, 6, 41, 22, 37, 124, 184, 51, 67, 245, 77, 229, 136, 241, 167, 7, 53, 20, 63, + 246, 35, 97, 80, 150, 79, 51, 252, 89, 1, 29, 228, 130, 123, 32, 5, 51, 252, 105, 46, 236, 16, 209, + 246, 238, 169, 79, 87, 223, 209, 93, 105, 193, 35, 152, 14, 46, 192, 201, 117, 247, 142, 157, 84, 191, 9, + 165, 209, 72, 226, 213, 42, 159, 133, 56, 151, 196, 89, 35, 159, 254, 98, 55, 155, 92, 216, 186, 65, 226, + 98, 121, 58, 157, 167, 112, 201, 194, 32, 139, 164, 119, 225, 246, 51, 68, 187, 40, 39, 162, 248, 107, 254, + 48, 66, 112, 112, 3, 43, 231, 251, 219, 26, 12, 118, 116, 89, 135, 139, 20, 142, 89, 46, 168, 96, 109, + 212, 216, 243, 221, 34, 154, 243, 119, 158, 170, 246, 59, 28, 137, 213, 173, 16, 51, 194, 62, 163, 82, 89, + 11, 54, 164, 146, 92, 42, 61, 110, 97, 116, 8, 117, 193, 140, 30, 99, 50, 174, 115, 72, 102, 96, 153, + 57, 16, 34, 100, 128, 190, 59, 19, 252, 223, 252, 222, 5, 70, 58, 40, 63, 131, 250, 76, 77, 210, 207, + 248, 127, 55, 22, 22, 176, 185, 110, 246, 199, 32, 73, 137, 58, 177, 193, 236, 169, 22, 117, 240, 58, 206, + 115, 23, 51, 243, 188, 139, 32, 219, 233, 162, 246, 242, 209, 197, 210, 78, 235, 31, 190, 61, 115, 218, 243, + 213, 90, 98, 241, 200, 232, 198, 225, 112, 106, 104, 250, 198, 21, 142, 71, 135, 113, 43, 19, 239, 96, 179, + 136, 91, 176, 202, 238, 251, 75, 8, 112, 50, 235, 184, 192, 131, 133, 125, 102, 122, 17, 215, 146, 20, 127, + 130, 207, 149, 21, 47, 240, 9, 151, 131, 41, 37, 201, 36, 99, 202, 108, 195, 162, 252, 240, 172, 66, 243, + 11, 91, 237, 23, 237, 187, 2, 253, 85, 169, 239, 239, 128, 79, 201, 220, 187, 148, 87, 100, 17, 107, 158, + 164, 162, 99, 6, 138, 176, 138, 78, 10, 161, 209, 163, 231, 0, 57, 59, 109, 110, 80, 102, 220, 97, 38, + 10, 3, 119, 172, 72, 180, 125, 48, 63, 78, 127, 252, 80, 18, 205, 106, 230, 85, 27, 235, 116, 211, 7, + 156, 117, 42, 212, 217, 0, 115, 214, 146, 37, 240, 143, 235, 251, 201, 109, 106, 213, 148, 238, 123, 101, 141, + 94, 215, 69, 3, 29, 228, 98, 210, 191, 218, 148, 137, 231, 224, 130, 213, 23, 59, 183, 217, 47, 176, 229, + 210, 118, 209, 175, 166, 250, 207, 156, 60, 161, 169, 63, 179, 214, 231, 97, 144, 28, 115, 212, 106, 206, 244, + 12, 165, 29, 167, 89, 243, 214, 249, 113, 250, 229, 121, 210, 94, 29, 229, 81, 158, 148, 139, 34, 187, 138, + 65, 250, 254, 183, 155, 216, 152, 184, 122, 242, 149, 28, 171, 248, 210, 142, 78, 101, 246, 22, 253, 129, 138, + 225, 179, 234, 188, 15, 50, 145, 87, 192, 223, 46, 245, 87, 10, 0, 39, 207, 41, 62, 213, 233, 10, 45, + 185, 138, 67, 11, 16, 7, 150, 194, 233, 131, 110, 237, 130, 190, 139, 195, 252, 41, 157, 196, 192, 163, 182, + 181, 202, 121, 189, 162, 48, 33, 69, 3, 66, 209, 42, 139, 9, 85, 136, 163, 183, 218, 12, 22, 20, 139, + 26, 33, 245, 179, 199, 120, 76, 239, 206, 181, 117, 231, 173, 216, 96, 98, 253, 250, 171, 31, 214, 228, 14, + 159, 39, 37, 153, 87, 183, 213, 18, 71, 147, 128, 150, 63, 85, 92, 225, 55, 5, 167, 39, 69, 178, 131, + 73, 38, 146, 175, 154, 58, 57, 98, 145, 219, 79, 145, 27, 143, 237, 155, 56, 239, 83, 114, 125, 58, 175, + 187, 35, 109, 36, 160, 85, 251, 192, 94, 194, 83, 22, 42, 104, 93, 103, 32, 217, 110, 131, 227, 142, 199, + 83, 59, 10, 104, 54, 41, 138, 47, 100, 6, 152, 59, 103, 98, 149, 29, 233, 125, 119, 131, 210, 64, 133, + 249, 34, 157, 132, 69, 200, 20, 45, 235, 126, 43, 195, 180, 67, 95, 75, 43, 247, 152, 84, 212, 86, 151, + 159, 167, 197, 107, 175, 230, 14, 99, 252, 54, 138, 74, 152, 75, 245, 88, 99, 114, 32, 146, 187, 156, 39, + 18, 68, 7, 184, 215, 115, 245, 129, 154, 73, 59, 194, 245, 96, 181, 49, 11, 4, 103, 185, 246, 152, 29, + 213, 9, 162, 23, 44, 89, 119, 214, 216, 139, 223, 188, 5, 165, 72, 183, 41, 231, 253, 29, 66, 102, 243, + 44, 92, 122, 32, 164, 63, 128, 38, 178, 162, 21, 164, 9, 29, 153, 244, 199, 192, 206, 249, 115, 142, 118, + 169, 186, 239, 34, 171, 211, 74, 92, 103, 167, 110, 66, 221, 67, 74, 189, 23, 236, 189, 120, 249, 87, 99, + 194, 247, 133, 211, 69, 49, 15, 27, 133, 233, 181, 15, 112, 181, 200, 149, 229, 103, 93, 254, 253, 182, 74, + 35, 171, 114, 50, 230, 106, 138, 170, 4, 95, 23, 53, 181, 87, 90, 85, 18, 98, 233, 194, 16, 18, 83, + 81, 234, 4, 42, 99, 225, 166, 46, 51, 71, 227, 170, 176, 51, 37, 12, 26, 198, 216, 247, 10, 150, 230, + 15, + }; + + FormatTransferFractalNz transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {2, 32, 32}, {2, 1, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 1, 1, 32, 32}, {2, 32, 32}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape4_uint8_1) { + uint8_t data[2 * 3 * 4 * 32] = { + 85, 14, 85, 145, 50, 114, 71, 246, 101, 16, 101, 237, 4, 192, 118, 148, 119, 42, 185, 105, 189, 38, 138, + 149, 123, 8, 222, 30, 153, 182, 13, 117, 143, 112, 111, 234, 72, 115, 6, 141, 169, 86, 57, 254, 200, 31, + 111, 214, 190, 130, 221, 182, 236, 153, 120, 251, 98, 178, 35, 197, 176, 50, 34, 102, 72, 217, 134, 139, 230, + 42, 254, 235, 79, 76, 102, 34, 111, 49, 131, 102, 178, 247, 44, 213, 215, 142, 96, 236, 167, 123, 221, 8, + 22, 5, 93, 6, 84, 5, 198, 222, 67, 210, 229, 72, 220, 149, 74, 173, 71, 20, 171, 42, 92, 236, 234, + 249, 27, 25, 21, 156, 93, 173, 14, 252, 90, 251, 177, 32, 224, 251, 192, 71, 53, 181, 127, 66, 104, 148, + 221, 27, 82, 233, 102, 107, 138, 182, 181, 159, 136, 71, 97, 40, 22, 133, 112, 159, 229, 252, 34, 122, 115, + 52, 149, 167, 220, 174, 67, 201, 158, 78, 96, 145, 31, 100, 133, 30, 3, 232, 55, 230, 175, 211, 117, 100, + 12, 182, 104, 156, 29, 119, 227, 25, 159, 50, 136, 181, 43, 96, 138, 130, 58, 31, 127, 231, 163, 121, 46, + 42, 133, 238, 69, 19, 81, 147, 87, 28, 84, 57, 140, 37, 3, 15, 114, 34, 119, 215, 77, 71, 178, 127, + 130, 146, 179, 193, 248, 51, 174, 51, 199, 211, 35, 98, 75, 100, 55, 86, 171, 167, 188, 201, 118, 140, 159, + 236, 137, 180, 240, 41, 61, 227, 80, 240, 26, 194, 163, 10, 18, 167, 79, 9, 36, 70, 46, 233, 167, 187, + 23, 204, 45, 184, 60, 143, 149, 181, 186, 229, 147, 249, 83, 36, 94, 161, 96, 177, 209, 210, 194, 30, 103, + 94, 232, 215, 156, 35, 220, 252, 128, 85, 161, 115, 209, 254, 11, 229, 69, 68, 16, 173, 115, 51, 188, 84, + 197, 166, 46, 228, 248, 91, 131, 201, 180, 242, 112, 145, 118, 216, 59, 56, 76, 166, 144, 118, 147, 190, 79, + 249, 138, 39, 172, 224, 104, 15, 147, 203, 194, 160, 114, 212, 85, 57, 118, 163, 146, 235, 22, 113, 225, 74, + 47, 227, 151, 187, 3, 104, 222, 52, 63, 176, 183, 228, 48, 57, 210, 240, 244, 33, 140, 109, 150, 236, 111, + 6, 122, 34, 198, 143, 41, 160, 114, 87, 19, 100, 67, 105, 17, 23, 207, 207, 165, 154, 197, 195, 216, 122, + 85, 71, 188, 136, 22, 221, 120, 105, 250, 197, 244, 25, 31, 235, 26, 82, 20, 2, 161, 190, 145, 105, 181, + 93, 220, 189, 123, 28, 125, 146, 40, 136, 233, 195, 243, 90, 188, 166, 191, 143, 81, 162, 172, 155, 171, 81, + 193, 146, 241, 54, 43, 27, 123, 252, 118, 186, 206, 189, 137, 190, 27, 108, 191, 224, 165, 176, 35, 84, 180, + 239, 1, 154, 90, 88, 119, 188, 14, 206, 130, 201, 48, 162, 22, 47, 154, 178, 69, 23, 16, 125, 41, 205, + 154, 150, 69, 38, 232, 246, 92, 24, 110, 154, 116, 44, 172, 151, 12, 212, 237, 167, 72, 212, 34, 152, 70, + 137, 18, 63, 107, 49, 62, 47, 55, 73, 23, 94, 82, 198, 25, 15, 205, 206, 55, 193, 27, 105, 182, 226, + 43, 114, 78, 239, 88, 140, 59, 174, 235, 234, 87, 164, 243, 119, 165, 0, 135, 34, 87, 134, 174, 36, 11, + 178, 6, 32, 77, 35, 148, 136, 111, 224, 184, 68, 222, 77, 148, 224, 180, 111, 226, 153, 137, 137, 68, 107, + 242, 178, 165, 75, 29, 58, 180, 55, 68, 149, 97, 179, 170, 65, 214, 104, 212, 186, 175, 231, 205, 157, 108, + 127, 17, 233, 201, 208, 25, 136, 254, 21, 155, 199, 155, 80, 25, 94, 63, 207, 225, 54, 91, 154, 122, 193, + 97, 235, 18, 122, 97, 189, 4, 149, 229, 239, 33, 52, 187, 7, 82, 229, 93, 184, 150, 218, 48, 150, 196, + 152, 3, 233, 183, 182, 25, 178, 0, 236, 49, 20, 189, 6, 173, 95, 227, 105, 161, 27, 250, 121, 14, 135, + 103, 229, 188, 81, 9, 234, 88, 37, 52, 59, 84, 117, 204, 242, 225, 34, 2, 104, 66, 141, 109, 151, 47, + 124, 247, 79, 70, 104, 63, 13, 198, 242, 110, 131, 247, 20, 246, 65, 238, 119, 86, 42, 21, 43, 175, 45, + 231, 78, 143, 118, 8, 100, 133, 27, 59, 188, 61, 105, 165, 183, 81, 96, 26, 106, 153, 151, 186, 232, 54, + 231, 35, 95, 172, 56, 208, 126, 198, 239, + }; + uint8_t ret[2 * 3 * 1 * 1 * 32 * 32] = { + 85, 14, 85, 145, 50, 114, 71, 246, 101, 16, 101, 237, 4, 192, 118, 148, 119, 42, 185, 105, 189, 38, 138, + 149, 123, 8, 222, 30, 153, 182, 13, 117, 143, 112, 111, 234, 72, 115, 6, 141, 169, 86, 57, 254, 200, 31, + 111, 214, 190, 130, 221, 182, 236, 153, 120, 251, 98, 178, 35, 197, 176, 50, 34, 102, 72, 217, 134, 139, 230, + 42, 254, 235, 79, 76, 102, 34, 111, 49, 131, 102, 178, 247, 44, 213, 215, 142, 96, 236, 167, 123, 221, 8, + 22, 5, 93, 6, 84, 5, 198, 222, 67, 210, 229, 72, 220, 149, 74, 173, 71, 20, 171, 42, 92, 236, 234, + 249, 27, 25, 21, 156, 93, 173, 14, 252, 90, 251, 177, 32, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 224, 251, 192, 71, 53, 181, 127, 66, 104, 148, 221, + 27, 82, 233, 102, 107, 138, 182, 181, 159, 136, 71, 97, 40, 22, 133, 112, 159, 229, 252, 34, 122, 115, 52, + 149, 167, 220, 174, 67, 201, 158, 78, 96, 145, 31, 100, 133, 30, 3, 232, 55, 230, 175, 211, 117, 100, 12, + 182, 104, 156, 29, 119, 227, 25, 159, 50, 136, 181, 43, 96, 138, 130, 58, 31, 127, 231, 163, 121, 46, 42, + 133, 238, 69, 19, 81, 147, 87, 28, 84, 57, 140, 37, 3, 15, 114, 34, 119, 215, 77, 71, 178, 127, 130, + 146, 179, 193, 248, 51, 174, 51, 199, 211, 35, 98, 75, 100, 55, 86, 171, 167, 188, 201, 118, 140, 159, 236, + 137, 180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 240, 41, 61, 227, 80, 240, 26, 194, 163, 10, 18, 167, 79, 9, 36, 70, 46, 233, 167, 187, 23, 204, + 45, 184, 60, 143, 149, 181, 186, 229, 147, 249, 83, 36, 94, 161, 96, 177, 209, 210, 194, 30, 103, 94, 232, + 215, 156, 35, 220, 252, 128, 85, 161, 115, 209, 254, 11, 229, 69, 68, 16, 173, 115, 51, 188, 84, 197, 166, + 46, 228, 248, 91, 131, 201, 180, 242, 112, 145, 118, 216, 59, 56, 76, 166, 144, 118, 147, 190, 79, 249, 138, + 39, 172, 224, 104, 15, 147, 203, 194, 160, 114, 212, 85, 57, 118, 163, 146, 235, 22, 113, 225, 74, 47, 227, + 151, 187, 3, 104, 222, 52, 63, 176, 183, 228, 48, 57, 210, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 244, 33, 140, 109, 150, 236, 111, 6, 122, 34, + 198, 143, 41, 160, 114, 87, 19, 100, 67, 105, 17, 23, 207, 207, 165, 154, 197, 195, 216, 122, 85, 71, 188, + 136, 22, 221, 120, 105, 250, 197, 244, 25, 31, 235, 26, 82, 20, 2, 161, 190, 145, 105, 181, 93, 220, 189, + 123, 28, 125, 146, 40, 136, 233, 195, 243, 90, 188, 166, 191, 143, 81, 162, 172, 155, 171, 81, 193, 146, 241, + 54, 43, 27, 123, 252, 118, 186, 206, 189, 137, 190, 27, 108, 191, 224, 165, 176, 35, 84, 180, 239, 1, 154, + 90, 88, 119, 188, 14, 206, 130, 201, 48, 162, 22, 47, 154, 178, 69, 23, 16, 125, 41, 205, 154, 150, 69, + 38, 232, 246, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 92, 24, 110, 154, 116, 44, 172, 151, 12, 212, 237, 167, 72, 212, 34, 152, 70, 137, 18, 63, 107, + 49, 62, 47, 55, 73, 23, 94, 82, 198, 25, 15, 205, 206, 55, 193, 27, 105, 182, 226, 43, 114, 78, 239, + 88, 140, 59, 174, 235, 234, 87, 164, 243, 119, 165, 0, 135, 34, 87, 134, 174, 36, 11, 178, 6, 32, 77, + 35, 148, 136, 111, 224, 184, 68, 222, 77, 148, 224, 180, 111, 226, 153, 137, 137, 68, 107, 242, 178, 165, 75, + 29, 58, 180, 55, 68, 149, 97, 179, 170, 65, 214, 104, 212, 186, 175, 231, 205, 157, 108, 127, 17, 233, 201, + 208, 25, 136, 254, 21, 155, 199, 155, 80, 25, 94, 63, 207, 225, 54, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 91, 154, 122, 193, 97, 235, 18, 122, 97, + 189, 4, 149, 229, 239, 33, 52, 187, 7, 82, 229, 93, 184, 150, 218, 48, 150, 196, 152, 3, 233, 183, 182, + 25, 178, 0, 236, 49, 20, 189, 6, 173, 95, 227, 105, 161, 27, 250, 121, 14, 135, 103, 229, 188, 81, 9, + 234, 88, 37, 52, 59, 84, 117, 204, 242, 225, 34, 2, 104, 66, 141, 109, 151, 47, 124, 247, 79, 70, 104, + 63, 13, 198, 242, 110, 131, 247, 20, 246, 65, 238, 119, 86, 42, 21, 43, 175, 45, 231, 78, 143, 118, 8, + 100, 133, 27, 59, 188, 61, 105, 165, 183, 81, 96, 26, 106, 153, 151, 186, 232, 54, 231, 35, 95, 172, 56, + 208, 126, 198, 239, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {2, 3, 4, 32}, {2, 3, 1, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 3, 1, 1, 32, 32}, {2, 3, 4, 32}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape4_uint8_2) { + uint8_t data[2 * 3 * 4 * 33] = { + 52, 248, 168, 232, 216, 6, 57, 232, 23, 51, 58, 199, 97, 233, 34, 233, 253, 246, 140, 158, 15, 242, 67, + 94, 7, 87, 198, 238, 158, 196, 171, 112, 217, 115, 221, 218, 208, 169, 161, 197, 234, 74, 98, 234, 121, 4, + 100, 8, 63, 109, 26, 23, 160, 242, 143, 227, 74, 30, 1, 39, 128, 38, 136, 2, 23, 249, 92, 57, 72, + 172, 185, 10, 5, 12, 89, 204, 161, 209, 245, 10, 160, 195, 120, 45, 210, 182, 236, 63, 170, 16, 132, 166, + 217, 183, 24, 40, 239, 122, 76, 26, 3, 102, 209, 149, 246, 178, 31, 78, 18, 40, 79, 201, 238, 145, 33, + 20, 168, 155, 222, 111, 200, 235, 123, 191, 25, 143, 222, 114, 81, 105, 60, 4, 210, 22, 87, 144, 187, 226, + 115, 42, 96, 18, 197, 34, 19, 160, 152, 174, 1, 206, 196, 253, 187, 180, 7, 224, 56, 153, 96, 130, 107, + 102, 143, 114, 48, 59, 249, 241, 134, 212, 174, 231, 147, 25, 166, 184, 74, 162, 138, 179, 121, 8, 56, 214, + 2, 100, 88, 6, 28, 203, 251, 170, 81, 21, 238, 104, 61, 156, 178, 111, 161, 49, 241, 117, 28, 55, 171, + 112, 29, 162, 205, 57, 99, 236, 129, 114, 148, 209, 116, 72, 220, 195, 93, 151, 206, 20, 188, 127, 60, 231, + 62, 181, 119, 145, 120, 53, 145, 45, 61, 56, 20, 4, 197, 141, 133, 198, 59, 98, 46, 225, 221, 159, 16, + 209, 90, 217, 238, 127, 149, 93, 174, 144, 29, 27, 46, 94, 126, 32, 182, 183, 123, 60, 183, 242, 8, 104, + 52, 107, 58, 147, 90, 0, 23, 95, 51, 243, 191, 12, 199, 30, 253, 120, 154, 68, 170, 83, 41, 235, 197, + 234, 171, 57, 161, 254, 19, 44, 37, 180, 35, 157, 101, 92, 99, 182, 225, 74, 231, 162, 96, 197, 167, 91, + 29, 81, 52, 10, 202, 81, 81, 191, 89, 81, 244, 52, 78, 239, 123, 6, 164, 186, 91, 170, 46, 135, 134, + 126, 199, 118, 153, 32, 82, 181, 130, 97, 16, 36, 71, 105, 227, 148, 138, 122, 250, 32, 47, 42, 79, 221, + 25, 181, 231, 99, 42, 143, 174, 182, 93, 2, 78, 171, 7, 219, 132, 57, 75, 74, 240, 73, 74, 27, 184, + 35, 212, 18, 28, 154, 59, 88, 129, 148, 228, 191, 104, 163, 157, 23, 67, 202, 171, 1, 187, 132, 216, 233, + 152, 231, 35, 94, 25, 209, 79, 152, 207, 103, 192, 169, 42, 60, 64, 124, 140, 183, 224, 53, 162, 7, 3, + 162, 9, 232, 156, 216, 180, 34, 155, 202, 88, 44, 91, 190, 221, 38, 95, 62, 131, 1, 74, 238, 174, 200, + 201, 240, 75, 127, 125, 131, 76, 132, 62, 169, 211, 173, 29, 142, 162, 226, 237, 166, 252, 12, 85, 164, 76, + 228, 23, 126, 189, 203, 51, 135, 76, 90, 195, 89, 220, 55, 206, 231, 7, 52, 61, 128, 87, 134, 243, 203, + 46, 5, 243, 241, 228, 17, 58, 75, 143, 228, 13, 151, 205, 194, 62, 50, 124, 223, 59, 178, 201, 181, 27, + 129, 9, 241, 214, 241, 177, 110, 63, 215, 104, 118, 117, 137, 89, 16, 57, 46, 13, 243, 20, 65, 209, 35, + 241, 127, 117, 161, 71, 37, 89, 10, 18, 38, 19, 120, 137, 198, 143, 240, 74, 240, 119, 242, 50, 64, 47, + 111, 226, 49, 141, 215, 219, 212, 113, 119, 98, 168, 185, 174, 172, 89, 86, 245, 26, 46, 120, 155, 12, 209, + 131, 232, 147, 65, 181, 184, 75, 185, 195, 10, 62, 174, 77, 194, 129, 30, 48, 247, 199, 251, 35, 143, 162, + 80, 26, 11, 117, 12, 232, 46, 75, 215, 114, 127, 34, 211, 133, 244, 233, 117, 75, 5, 201, 47, 120, 197, + 250, 223, 104, 4, 36, 5, 248, 12, 62, 48, 249, 91, 116, 145, 57, 213, 121, 104, 191, 33, 158, 12, 138, + 107, 70, 105, 110, 189, 17, 58, 112, 45, 178, 110, 72, 31, 80, 249, 54, 76, 45, 54, 88, 114, 213, 240, + 248, 70, 76, 216, 191, 217, 26, 215, 88, 23, 49, 246, 162, 196, 131, 157, 233, 42, 78, 235, 228, 215, 217, + 77, 70, 29, 236, 125, 148, 124, 139, 205, 177, 118, 13, 162, 93, 44, 62, 155, 86, 190, 219, 230, 13, 193, + 174, 233, 35, 14, 107, 61, 83, 181, 89, 198, 200, 127, 161, 58, 79, 18, 152, 157, 74, 241, 93, 241, 68, + 102, 35, 254, 142, 116, 215, 252, 242, 124, 98, 178, 243, 172, 158, 144, 108, 201, 24, 202, 243, 114, 35, 74, + 0, 70, 78, 28, 46, 52, 213, 195, 159, 42, + }; + uint8_t ret[2 * 3 * 2 * 1 * 32 * 32] = { + 52, 248, 168, 232, 216, 6, 57, 232, 23, 51, 58, 199, 97, 233, 34, 233, 253, 246, 140, 158, 15, 242, 67, + 94, 7, 87, 198, 238, 158, 196, 171, 112, 115, 221, 218, 208, 169, 161, 197, 234, 74, 98, 234, 121, 4, 100, + 8, 63, 109, 26, 23, 160, 242, 143, 227, 74, 30, 1, 39, 128, 38, 136, 2, 23, 92, 57, 72, 172, 185, + 10, 5, 12, 89, 204, 161, 209, 245, 10, 160, 195, 120, 45, 210, 182, 236, 63, 170, 16, 132, 166, 217, 183, + 24, 40, 239, 122, 26, 3, 102, 209, 149, 246, 178, 31, 78, 18, 40, 79, 201, 238, 145, 33, 20, 168, 155, + 222, 111, 200, 235, 123, 191, 25, 143, 222, 114, 81, 105, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 217, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 249, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 76, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 210, 22, 87, 144, 187, 226, 115, 42, 96, 18, 197, 34, 19, 160, 152, 174, 1, 206, 196, 253, 187, 180, + 7, 224, 56, 153, 96, 130, 107, 102, 143, 114, 59, 249, 241, 134, 212, 174, 231, 147, 25, 166, 184, 74, 162, + 138, 179, 121, 8, 56, 214, 2, 100, 88, 6, 28, 203, 251, 170, 81, 21, 238, 104, 61, 178, 111, 161, 49, + 241, 117, 28, 55, 171, 112, 29, 162, 205, 57, 99, 236, 129, 114, 148, 209, 116, 72, 220, 195, 93, 151, 206, + 20, 188, 127, 60, 231, 181, 119, 145, 120, 53, 145, 45, 61, 56, 20, 4, 197, 141, 133, 198, 59, 98, 46, + 225, 221, 159, 16, 209, 90, 217, 238, 127, 149, 93, 174, 144, 29, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 48, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 156, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 46, 94, 126, 32, 182, 183, 123, 60, 183, 242, 8, 104, 52, 107, 58, 147, 90, 0, 23, 95, 51, + 243, 191, 12, 199, 30, 253, 120, 154, 68, 170, 83, 235, 197, 234, 171, 57, 161, 254, 19, 44, 37, 180, 35, + 157, 101, 92, 99, 182, 225, 74, 231, 162, 96, 197, 167, 91, 29, 81, 52, 10, 202, 81, 81, 89, 81, 244, + 52, 78, 239, 123, 6, 164, 186, 91, 170, 46, 135, 134, 126, 199, 118, 153, 32, 82, 181, 130, 97, 16, 36, + 71, 105, 227, 148, 138, 122, 32, 47, 42, 79, 221, 25, 181, 231, 99, 42, 143, 174, 182, 93, 2, 78, 171, + 7, 219, 132, 57, 75, 74, 240, 73, 74, 27, 184, 35, 212, 18, 28, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 41, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 191, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 250, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 154, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 59, 88, 129, 148, 228, 191, 104, 163, 157, 23, 67, 202, 171, 1, 187, 132, 216, 233, 152, 231, + 35, 94, 25, 209, 79, 152, 207, 103, 192, 169, 42, 60, 124, 140, 183, 224, 53, 162, 7, 3, 162, 9, 232, + 156, 216, 180, 34, 155, 202, 88, 44, 91, 190, 221, 38, 95, 62, 131, 1, 74, 238, 174, 200, 201, 75, 127, + 125, 131, 76, 132, 62, 169, 211, 173, 29, 142, 162, 226, 237, 166, 252, 12, 85, 164, 76, 228, 23, 126, 189, + 203, 51, 135, 76, 90, 195, 89, 55, 206, 231, 7, 52, 61, 128, 87, 134, 243, 203, 46, 5, 243, 241, 228, + 17, 58, 75, 143, 228, 13, 151, 205, 194, 62, 50, 124, 223, 59, 178, 201, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 220, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 181, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 27, 129, 9, 241, 214, 241, 177, 110, 63, 215, 104, 118, 117, 137, 89, 16, 57, 46, 13, + 243, 20, 65, 209, 35, 241, 127, 117, 161, 71, 37, 89, 10, 38, 19, 120, 137, 198, 143, 240, 74, 240, 119, + 242, 50, 64, 47, 111, 226, 49, 141, 215, 219, 212, 113, 119, 98, 168, 185, 174, 172, 89, 86, 245, 26, 120, + 155, 12, 209, 131, 232, 147, 65, 181, 184, 75, 185, 195, 10, 62, 174, 77, 194, 129, 30, 48, 247, 199, 251, + 35, 143, 162, 80, 26, 11, 117, 12, 46, 75, 215, 114, 127, 34, 211, 133, 244, 233, 117, 75, 5, 201, 47, + 120, 197, 250, 223, 104, 4, 36, 5, 248, 12, 62, 48, 249, 91, 116, 145, 57, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 18, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 46, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 232, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 213, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 121, 104, 191, 33, 158, 12, 138, 107, 70, 105, 110, 189, 17, 58, 112, 45, 178, 110, + 72, 31, 80, 249, 54, 76, 45, 54, 88, 114, 213, 240, 248, 70, 216, 191, 217, 26, 215, 88, 23, 49, 246, + 162, 196, 131, 157, 233, 42, 78, 235, 228, 215, 217, 77, 70, 29, 236, 125, 148, 124, 139, 205, 177, 118, 13, + 93, 44, 62, 155, 86, 190, 219, 230, 13, 193, 174, 233, 35, 14, 107, 61, 83, 181, 89, 198, 200, 127, 161, + 58, 79, 18, 152, 157, 74, 241, 93, 241, 102, 35, 254, 142, 116, 215, 252, 242, 124, 98, 178, 243, 172, 158, + 144, 108, 201, 24, 202, 243, 114, 35, 74, 0, 70, 78, 28, 46, 52, 213, 195, 159, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 76, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 162, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 68, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 42, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {2, 3, 4, 33}, {2, 3, 2, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 3, 2, 1, 32, 32}, {2, 3, 4, 33}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape1_fp16_1) { + uint16_t data[1] = {15298}; + uint16_t ret[1 * 1 * 16 * 16] = { + 15298, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {1}, {1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {1, 1, 16, 16}, {1}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape1_fp16_2) { + uint16_t data[15] = { + 14396, 15084, 15205, 15113, 14336, 13738, 15301, 15244, 11605, 12532, 12476, 12812, 15042, 14469, 14447, + }; + + uint16_t ret[1 * 1 * 16 * 16] = { + 14396, 15084, 15205, 15113, 14336, 13738, 15301, 15244, 11605, 12532, 12476, 12812, 15042, 14469, 14447, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {15}, {1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {1, 1, 16, 16}, {15}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape1_fp16_17) { + uint16_t data[17] = { + 12414, 14598, 9916, 13501, 14162, 10687, 14114, 13019, 15344, + 14505, 14443, 14429, 14846, 14810, 14866, 14155, 11389, + }; + uint16_t ret[2 * 1 * 16 * 16] = { + 12414, 14598, 9916, 13501, 14162, 10687, 14114, 13019, 15344, 14505, 14443, 14429, 14846, 14810, 14866, 14155, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 11389, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {17}, {2, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 1, 16, 16}, {17}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape2_fp16_lt_cube) { + uint16_t data[1 * 1] = { + 15298, + }; + uint16_t ret[1 * 1 * 16 * 16] = { + 15298, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {1, 1}, {1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {1, 1, 16, 16}, {1, 1}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape2_fp16_eq_cube) { + uint16_t data[16 * 16] = { + 15023, 14612, 15081, 12835, 14874, 15149, 13652, 15110, 14848, 14036, 13512, 14386, 11849, 14530, 13828, 13930, + 14380, 14759, 13350, 15216, 14638, 12730, 14545, 15337, 14680, 12383, 9617, 13410, 13531, 14910, 14039, 15353, + 14827, 14388, 15312, 13804, 14042, 15127, 14739, 12524, 11523, 15063, 12156, 10173, 14850, 13303, 14890, 14409, + 12562, 11147, 15040, 14602, 14800, 14806, 14556, 15295, 14799, 15345, 14531, 15101, 15258, 12423, 14116, 13513, + 15023, 9054, 13172, 14555, 15170, 13835, 13291, 15211, 13469, 14362, 12255, 11921, 15103, 14972, 13437, 14769, + 14376, 14253, 14829, 12051, 15240, 15318, 14845, 13541, 15126, 13757, 12055, 14772, 12652, 7449, 12143, 14846, + 13031, 15287, 14828, 14525, 12504, 12700, 13977, 14559, 15086, 12995, 14136, 14313, 14534, 14824, 11548, 12033, + 14358, 14592, 15360, 12312, 13607, 11532, 12772, 11878, 14546, 13322, 14124, 13507, 15044, 15012, 14657, 14341, + 15088, 14846, 14957, 14860, 14826, 13977, 14557, 14106, 10848, 13691, 14355, 15078, 14355, 11530, 11720, 13849, + 13692, 14786, 6425, 13848, 8909, 10261, 13502, 12837, 12819, 11658, 15035, 11295, 14190, 14923, 12409, 14470, + 10467, 14903, 14952, 13936, 12559, 14967, 8628, 13685, 12665, 13643, 13720, 15137, 10802, 12425, 12274, 15259, + 15030, 14501, 14943, 12473, 15254, 13442, 13908, 14537, 10579, 15249, 14514, 14834, 14371, 12354, 14397, 14764, + 14343, 15326, 11343, 13873, 14873, 13394, 14522, 14173, 9848, 14347, 13027, 13164, 14692, 14342, 11434, 15144, + 15346, 14809, 15330, 14318, 13783, 13595, 15112, 14847, 14366, 14770, 14573, 12783, 10261, 13906, 13933, 14975, + 14484, 14586, 14058, 13779, 14595, 14540, 12425, 15070, 14434, 15048, 14875, 12944, 13621, 13993, 10807, 14620, + 13771, 13421, 15224, 14947, 13752, 14836, 14124, 13230, 14185, 15145, 15082, 14819, 12868, 14462, 14960, 13543, + }; + uint16_t ret[1 * 1 * 16 * 16] = { + 15023, 14612, 15081, 12835, 14874, 15149, 13652, 15110, 14848, 14036, 13512, 14386, 11849, 14530, 13828, 13930, + 14380, 14759, 13350, 15216, 14638, 12730, 14545, 15337, 14680, 12383, 9617, 13410, 13531, 14910, 14039, 15353, + 14827, 14388, 15312, 13804, 14042, 15127, 14739, 12524, 11523, 15063, 12156, 10173, 14850, 13303, 14890, 14409, + 12562, 11147, 15040, 14602, 14800, 14806, 14556, 15295, 14799, 15345, 14531, 15101, 15258, 12423, 14116, 13513, + 15023, 9054, 13172, 14555, 15170, 13835, 13291, 15211, 13469, 14362, 12255, 11921, 15103, 14972, 13437, 14769, + 14376, 14253, 14829, 12051, 15240, 15318, 14845, 13541, 15126, 13757, 12055, 14772, 12652, 7449, 12143, 14846, + 13031, 15287, 14828, 14525, 12504, 12700, 13977, 14559, 15086, 12995, 14136, 14313, 14534, 14824, 11548, 12033, + 14358, 14592, 15360, 12312, 13607, 11532, 12772, 11878, 14546, 13322, 14124, 13507, 15044, 15012, 14657, 14341, + 15088, 14846, 14957, 14860, 14826, 13977, 14557, 14106, 10848, 13691, 14355, 15078, 14355, 11530, 11720, 13849, + 13692, 14786, 6425, 13848, 8909, 10261, 13502, 12837, 12819, 11658, 15035, 11295, 14190, 14923, 12409, 14470, + 10467, 14903, 14952, 13936, 12559, 14967, 8628, 13685, 12665, 13643, 13720, 15137, 10802, 12425, 12274, 15259, + 15030, 14501, 14943, 12473, 15254, 13442, 13908, 14537, 10579, 15249, 14514, 14834, 14371, 12354, 14397, 14764, + 14343, 15326, 11343, 13873, 14873, 13394, 14522, 14173, 9848, 14347, 13027, 13164, 14692, 14342, 11434, 15144, + 15346, 14809, 15330, 14318, 13783, 13595, 15112, 14847, 14366, 14770, 14573, 12783, 10261, 13906, 13933, 14975, + 14484, 14586, 14058, 13779, 14595, 14540, 12425, 15070, 14434, 15048, 14875, 12944, 13621, 13993, 10807, 14620, + 13771, 13421, 15224, 14947, 13752, 14836, 14124, 13230, 14185, 15145, 15082, 14819, 12868, 14462, 14960, 13543, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {16, 16}, {1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {1, 1, 16, 16}, {16, 16}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape2_fp16_bt_cube) { + uint16_t data[17 * 17] = { + 13662, 14610, 14474, 14923, 14368, 14666, 15104, 14562, 13724, 15218, 15128, 12691, 12533, 15299, 13238, 15205, + 13707, 14975, 11272, 9758, 15355, 14665, 14185, 15145, 12189, 12080, 15238, 11535, 12583, 13443, 14808, 9307, + 12003, 13952, 14663, 14717, 15005, 15046, 14935, 12887, 14334, 15267, 13781, 11960, 11527, 13839, 14623, 12042, + 15032, 12200, 15331, 15099, 11710, 15344, 13350, 12958, 15026, 15009, 14975, 15168, 15308, 14891, 10173, 15178, + 13484, 14803, 5640, 15097, 14876, 14466, 14321, 14769, 14030, 14593, 13651, 14915, 14939, 14129, 14266, 15353, + 14176, 14033, 14899, 15295, 13806, 15335, 14704, 13453, 13973, 14946, 14778, 13658, 13817, 12284, 15310, 14573, + 15092, 11556, 14010, 15285, 15224, 14415, 14416, 14267, 13769, 15087, 14703, 14785, 14997, 13715, 15240, 14832, + 12986, 10470, 14585, 12599, 14580, 15148, 15224, 15198, 14689, 15220, 14893, 15210, 14395, 12158, 14983, 11536, + 14766, 14352, 14730, 10461, 13677, 13519, 14250, 14910, 14798, 13310, 15315, 8482, 12198, 14496, 10964, 12832, + 15159, 15277, 14356, 15267, 14255, 13964, 14685, 12434, 13629, 12445, 14936, 13465, 13457, 15203, 13259, 12179, + 11561, 13570, 14219, 12253, 15191, 12973, 14491, 12556, 11324, 13478, 13908, 15148, 13858, 12349, 15093, 13225, + 14566, 11695, 14521, 14990, 13768, 13051, 13170, 13388, 14859, 12443, 15160, 14516, 13247, 12775, 13522, 14360, + 14664, 14986, 12542, 14363, 15271, 14422, 13573, 14342, 15206, 14705, 15095, 13836, 15212, 14619, 13880, 14634, + 15114, 12986, 14256, 14717, 15024, 15131, 14527, 12591, 13944, 14605, 14441, 14800, 12477, 13455, 14527, 14968, + 13970, 12611, 14455, 15221, 14419, 13924, 15268, 13875, 13521, 14872, 14614, 12206, 15235, 11125, 14982, 14509, + 15098, 14978, 12263, 14535, 13207, 14490, 15225, 14878, 14757, 14951, 12629, 13027, 11620, 14371, 14146, 14833, + 11001, 14672, 14658, 13925, 14528, 14290, 14456, 14173, 14949, 12333, 12236, 13683, 15116, 14828, 13426, 15322, + 13277, 15314, 14786, 14703, 13890, 14661, 10342, 14796, 13330, 14998, 14403, 14078, 14871, 11508, 14323, 11436, + 14368, + }; + uint16_t ret[2 * 2 * 16 * 16] = { + 13662, 14610, 14474, 14923, 14368, 14666, 15104, 14562, 13724, 15218, 15128, 12691, 12533, 15299, 13238, 15205, + 14975, 11272, 9758, 15355, 14665, 14185, 15145, 12189, 12080, 15238, 11535, 12583, 13443, 14808, 9307, 12003, + 14663, 14717, 15005, 15046, 14935, 12887, 14334, 15267, 13781, 11960, 11527, 13839, 14623, 12042, 15032, 12200, + 15099, 11710, 15344, 13350, 12958, 15026, 15009, 14975, 15168, 15308, 14891, 10173, 15178, 13484, 14803, 5640, + 14876, 14466, 14321, 14769, 14030, 14593, 13651, 14915, 14939, 14129, 14266, 15353, 14176, 14033, 14899, 15295, + 15335, 14704, 13453, 13973, 14946, 14778, 13658, 13817, 12284, 15310, 14573, 15092, 11556, 14010, 15285, 15224, + 14416, 14267, 13769, 15087, 14703, 14785, 14997, 13715, 15240, 14832, 12986, 10470, 14585, 12599, 14580, 15148, + 15198, 14689, 15220, 14893, 15210, 14395, 12158, 14983, 11536, 14766, 14352, 14730, 10461, 13677, 13519, 14250, + 14798, 13310, 15315, 8482, 12198, 14496, 10964, 12832, 15159, 15277, 14356, 15267, 14255, 13964, 14685, 12434, + 12445, 14936, 13465, 13457, 15203, 13259, 12179, 11561, 13570, 14219, 12253, 15191, 12973, 14491, 12556, 11324, + 13908, 15148, 13858, 12349, 15093, 13225, 14566, 11695, 14521, 14990, 13768, 13051, 13170, 13388, 14859, 12443, + 14516, 13247, 12775, 13522, 14360, 14664, 14986, 12542, 14363, 15271, 14422, 13573, 14342, 15206, 14705, 15095, + 15212, 14619, 13880, 14634, 15114, 12986, 14256, 14717, 15024, 15131, 14527, 12591, 13944, 14605, 14441, 14800, + 13455, 14527, 14968, 13970, 12611, 14455, 15221, 14419, 13924, 15268, 13875, 13521, 14872, 14614, 12206, 15235, + 14982, 14509, 15098, 14978, 12263, 14535, 13207, 14490, 15225, 14878, 14757, 14951, 12629, 13027, 11620, 14371, + 14833, 11001, 14672, 14658, 13925, 14528, 14290, 14456, 14173, 14949, 12333, 12236, 13683, 15116, 14828, 13426, + 13277, 15314, 14786, 14703, 13890, 14661, 10342, 14796, 13330, 14998, 14403, 14078, 14871, 11508, 14323, 11436, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13707, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13952, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15331, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15097, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13806, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14415, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14910, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13629, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13478, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13836, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12477, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15322, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14368, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {17, 17}, {2, 2, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 2, 16, 16}, {17, 17}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape2_fp16_bt_cube2) { + uint16_t data[15 * 17] = { + 13984, 15309, 14687, 15012, 13546, 5757, 13664, 14888, 13415, 11619, 14844, 15296, 14527, 14740, 14202, 13892, + 13612, 13694, 12624, 14353, 14717, 12610, 15259, 15005, 14785, 14505, 12518, 14368, 14784, 13988, 13606, 15051, + 14421, 9956, 15157, 13565, 13482, 15032, 10603, 15251, 14966, 13910, 9878, 15249, 14559, 15056, 14339, 13208, + 13714, 15162, 13749, 15001, 14643, 14293, 13792, 9517, 13732, 12938, 15180, 14149, 12330, 14026, 14678, 11979, + 9373, 12142, 14542, 14979, 12248, 14923, 14068, 13732, 14410, 13366, 13795, 12315, 14444, 14806, 15145, 14416, + 15079, 15012, 14635, 14751, 14345, 15327, 10808, 15195, 14788, 12895, 12271, 12437, 12297, 11606, 14954, 14753, + 14131, 14014, 15312, 15018, 15178, 14950, 13521, 15063, 15165, 14438, 14535, 15350, 13133, 14331, 13157, 14667, + 15093, 13472, 14155, 14407, 14958, 11307, 14806, 13644, 9861, 14927, 14574, 14727, 14888, 14474, 14027, 12409, + 15054, 10922, 12313, 13337, 13645, 14812, 14635, 14200, 13674, 13863, 12255, 14175, 13494, 15287, 14981, 13635, + 14259, 14517, 11989, 13467, 11705, 14539, 15304, 13791, 10817, 14220, 14849, 13068, 13337, 11710, 15034, 15188, + 14799, 15214, 14798, 13523, 14919, 15292, 13676, 14603, 10039, 15268, 14717, 11092, 11533, 14989, 14516, 13759, + 13459, 14620, 14159, 13090, 14814, 15344, 15153, 14214, 14683, 15251, 14676, 13381, 15126, 15346, 13542, 14744, + 15055, 14232, 14670, 10211, 13648, 15156, 14030, 12342, 13609, 14376, 13477, 14579, 15173, 15276, 14864, 15294, + 13281, 11630, 9285, 14847, 13219, 14664, 15252, 14198, 15213, 13443, 12339, 14079, 13788, 15304, 14932, 15346, + 15208, 14305, 15259, 11820, 15067, 10837, 11285, 11989, 15032, 11854, 14738, 14365, 13529, 14699, 14273, 12198, + 14542, 12227, 15360, 14999, 10498, 14782, 14549, 14651, 11737, 14529, 15044, 14735, 10683, 15355, 14630, + }; + uint16_t ret[2 * 1 * 16 * 16] = { + 13984, 15309, 14687, 15012, 13546, 5757, 13664, 14888, 13415, 11619, 14844, 15296, 14527, 14740, 14202, 13892, + 13694, 12624, 14353, 14717, 12610, 15259, 15005, 14785, 14505, 12518, 14368, 14784, 13988, 13606, 15051, 14421, + 15157, 13565, 13482, 15032, 10603, 15251, 14966, 13910, 9878, 15249, 14559, 15056, 14339, 13208, 13714, 15162, + 15001, 14643, 14293, 13792, 9517, 13732, 12938, 15180, 14149, 12330, 14026, 14678, 11979, 9373, 12142, 14542, + 12248, 14923, 14068, 13732, 14410, 13366, 13795, 12315, 14444, 14806, 15145, 14416, 15079, 15012, 14635, 14751, + 15327, 10808, 15195, 14788, 12895, 12271, 12437, 12297, 11606, 14954, 14753, 14131, 14014, 15312, 15018, 15178, + 13521, 15063, 15165, 14438, 14535, 15350, 13133, 14331, 13157, 14667, 15093, 13472, 14155, 14407, 14958, 11307, + 13644, 9861, 14927, 14574, 14727, 14888, 14474, 14027, 12409, 15054, 10922, 12313, 13337, 13645, 14812, 14635, + 13674, 13863, 12255, 14175, 13494, 15287, 14981, 13635, 14259, 14517, 11989, 13467, 11705, 14539, 15304, 13791, + 14220, 14849, 13068, 13337, 11710, 15034, 15188, 14799, 15214, 14798, 13523, 14919, 15292, 13676, 14603, 10039, + 14717, 11092, 11533, 14989, 14516, 13759, 13459, 14620, 14159, 13090, 14814, 15344, 15153, 14214, 14683, 15251, + 13381, 15126, 15346, 13542, 14744, 15055, 14232, 14670, 10211, 13648, 15156, 14030, 12342, 13609, 14376, 13477, + 15173, 15276, 14864, 15294, 13281, 11630, 9285, 14847, 13219, 14664, 15252, 14198, 15213, 13443, 12339, 14079, + 15304, 14932, 15346, 15208, 14305, 15259, 11820, 15067, 10837, 11285, 11989, 15032, 11854, 14738, 14365, 13529, + 14273, 12198, 14542, 12227, 15360, 14999, 10498, 14782, 14549, 14651, 11737, 14529, 15044, 14735, 10683, 15355, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13612, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9956, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13749, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14979, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14345, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14950, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14806, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10817, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15268, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14676, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14579, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13788, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14699, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14630, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {15, 17}, {2, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 1, 16, 16}, {15, 17}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape3_fp16) { + uint16_t data[2 * 4 * 17] = { + 14048, 10542, 14296, 15275, 13485, 13171, 14457, 13836, 14419, 14642, 13846, 12295, 15267, 13606, 10887, 14342, + 13293, 14639, 14817, 14463, 15081, 14824, 14599, 14053, 14711, 14912, 15249, 15327, 15131, 13504, 9495, 12905, + 10185, 12650, 12423, 14293, 13922, 13335, 15249, 14355, 12930, 7097, 14121, 14494, 13242, 14305, 15180, 15272, + 12432, 11561, 8031, 13089, 14898, 13582, 11267, 14449, 14847, 11742, 15349, 14744, 14773, 15270, 15198, 11861, + 14172, 14523, 13865, 12930, 12860, 14823, 13583, 14664, 13818, 14550, 14743, 15228, 13850, 15114, 13699, 14964, + 14785, 14490, 11593, 14639, 14924, 14457, 14903, 15036, 15076, 4349, 15271, 13926, 14913, 15078, 9526, 13779, + 14572, 13331, 15246, 13603, 14729, 14508, 14948, 15226, 15171, 13050, 13602, 15051, 12979, 11946, 15219, 12984, + 12561, 14049, 14910, 14352, 15356, 12397, 15046, 14445, 14555, 13245, 14687, 14424, 13651, 13615, 9557, 15190, + 10581, 14783, 9610, 14871, 13393, 14193, 15217, 14855, + }; + uint16_t ret[2 * 2 * 1 * 16 * 16] = { + 14048, 10542, 14296, 15275, 13485, 13171, 14457, 13836, 14419, 14642, 13846, 12295, 15267, 13606, 10887, 14342, + 14639, 14817, 14463, 15081, 14824, 14599, 14053, 14711, 14912, 15249, 15327, 15131, 13504, 9495, 12905, 10185, + 12423, 14293, 13922, 13335, 15249, 14355, 12930, 7097, 14121, 14494, 13242, 14305, 15180, 15272, 12432, 11561, + 13089, 14898, 13582, 11267, 14449, 14847, 11742, 15349, 14744, 14773, 15270, 15198, 11861, 14172, 14523, 13865, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13293, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12650, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8031, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12930, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12860, 14823, 13583, 14664, 13818, 14550, 14743, 15228, 13850, 15114, 13699, 14964, 14785, 14490, 11593, 14639, + 14457, 14903, 15036, 15076, 4349, 15271, 13926, 14913, 15078, 9526, 13779, 14572, 13331, 15246, 13603, 14729, + 14948, 15226, 15171, 13050, 13602, 15051, 12979, 11946, 15219, 12984, 12561, 14049, 14910, 14352, 15356, 12397, + 14445, 14555, 13245, 14687, 14424, 13651, 13615, 9557, 15190, 10581, 14783, 9610, 14871, 13393, 14193, 15217, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14924, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14508, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15046, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14855, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalNz transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {2, 4, 17}, {2, 2, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalNzND transfer2; + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 2, 1, 16, 16}, {2, 4, 17}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape4_fp16) { + uint16_t data[2 * 2 * 17 * 4] = { + 12210, 13522, 12430, 13868, 14463, 12261, 12394, 15327, 14988, 14663, 14310, 12308, 14886, 15036, 13655, 12774, + 13715, 13322, 13198, 14931, 14944, 10231, 14824, 14512, 14493, 14936, 14513, 14481, 13061, 14808, 14637, 13011, + 15351, 15277, 13709, 9313, 14684, 14460, 14576, 13978, 14945, 13652, 14162, 12974, 11122, 15207, 14677, 12431, + 14361, 14347, 14675, 12983, 14020, 13429, 13678, 14861, 14016, 13590, 13322, 9523, 10130, 15338, 11862, 15194, + 14545, 14488, 14159, 15192, 13563, 14782, 13852, 7998, 14920, 12686, 14363, 13754, 14350, 13814, 15258, 14156, + 14198, 14849, 13955, 15126, 13663, 14033, 14483, 12880, 14765, 12977, 14017, 14881, 10395, 14950, 13676, 12497, + 11587, 13427, 14507, 12408, 14615, 12010, 14586, 13531, 9126, 14077, 12947, 13723, 15185, 15262, 15288, 14608, + 15211, 13514, 12745, 14905, 14579, 14199, 14990, 15012, 13932, 13096, 13995, 10413, 9657, 13398, 15304, 10993, + 13516, 14415, 11920, 13584, 13772, 15204, 14925, 14462, 12207, 14373, 14882, 10069, 13641, 12941, 13577, 13330, + 14191, 13926, 13325, 13662, 13478, 14251, 13212, 15161, 14471, 14691, 13904, 12831, 14277, 14566, 14577, 14575, + 12646, 15218, 13438, 13827, 15323, 15245, 12022, 13928, 13358, 15286, 14556, 14414, 12664, 11754, 13737, 15360, + 14533, 14148, 15259, 14354, 14253, 15358, 13804, 13513, 14825, 13973, 14492, 14943, 15124, 14221, 13908, 12768, + 14923, 14801, 15134, 13681, 15313, 10562, 8965, 14670, 15028, 13264, 14901, 14973, 14120, 12946, 13663, 13418, + 9930, 15264, 13267, 11311, 14857, 15204, 14787, 14466, 11394, 14305, 14712, 11728, 14401, 13790, 15359, 15108, + 13342, 15088, 14348, 12047, 14544, 13244, 15299, 14790, 14565, 14827, 12551, 12386, 15074, 13453, 10206, 14530, + 14922, 11713, 14811, 12342, 14867, 13452, 12332, 11289, 15105, 14896, 15182, 14087, 11717, 14525, 12705, 15096, + 13561, 15094, 13168, 15007, 14888, 14556, 15156, 14829, 12482, 14449, 14379, 14233, 12640, 15000, 13268, 15342, + }; + uint16_t ret[2 * 2 * 1 * 2 * 16 * 16] = { + 12210, 13522, 12430, 13868, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14463, 12261, 12394, 15327, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14988, 14663, 14310, 12308, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14886, 15036, 13655, 12774, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13715, 13322, 13198, 14931, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14944, 10231, 14824, 14512, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14493, 14936, 14513, 14481, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13061, 14808, 14637, 13011, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15351, 15277, 13709, 9313, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14684, 14460, 14576, 13978, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14945, 13652, 14162, 12974, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11122, 15207, 14677, 12431, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14361, 14347, 14675, 12983, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14020, 13429, 13678, 14861, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14016, 13590, 13322, 9523, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10130, 15338, 11862, 15194, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14545, 14488, 14159, 15192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13563, 14782, 13852, 7998, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14920, 12686, 14363, 13754, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14350, 13814, 15258, 14156, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14198, 14849, 13955, 15126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13663, 14033, 14483, 12880, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14765, 12977, 14017, 14881, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10395, 14950, 13676, 12497, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11587, 13427, 14507, 12408, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14615, 12010, 14586, 13531, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9126, 14077, 12947, 13723, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15185, 15262, 15288, 14608, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15211, 13514, 12745, 14905, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14579, 14199, 14990, 15012, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13932, 13096, 13995, 10413, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9657, 13398, 15304, 10993, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13516, 14415, 11920, 13584, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13772, 15204, 14925, 14462, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12207, 14373, 14882, 10069, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13641, 12941, 13577, 13330, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14191, 13926, 13325, 13662, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13478, 14251, 13212, 15161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14471, 14691, 13904, 12831, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14277, 14566, 14577, 14575, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12646, 15218, 13438, 13827, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15323, 15245, 12022, 13928, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13358, 15286, 14556, 14414, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12664, 11754, 13737, 15360, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14533, 14148, 15259, 14354, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14253, 15358, 13804, 13513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14825, 13973, 14492, 14943, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15124, 14221, 13908, 12768, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14923, 14801, 15134, 13681, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15313, 10562, 8965, 14670, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15028, 13264, 14901, 14973, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14120, 12946, 13663, 13418, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9930, 15264, 13267, 11311, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14857, 15204, 14787, 14466, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11394, 14305, 14712, 11728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14401, 13790, 15359, 15108, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13342, 15088, 14348, 12047, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14544, 13244, 15299, 14790, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14565, 14827, 12551, 12386, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15074, 13453, 10206, 14530, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14922, 11713, 14811, 12342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14867, 13452, 12332, 11289, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15105, 14896, 15182, 14087, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11717, 14525, 12705, 15096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13561, 15094, 13168, 15007, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14888, 14556, 15156, 14829, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12482, 14449, 14379, 14233, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12640, 15000, 13268, 15342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {2, 2, 17, 4}, {2, 2, 1, 2, 16, 16}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalNz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 2, 1, 2, 16, 16}, {2, 2, 17, 4}, DT_FLOAT16}; + TransResult result2; + FormatTransferFractalNzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } + EXPECT_EQ( + transfer2.TransShape(args2.src_format, args2.src_shape, args2.src_data_type, args2.dst_format, args2.dst_shape), + UNSUPPORTED); +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape5_fp16) { + uint16_t data[2 * 2 * 2 * 17 * 4] = { + 13556, 15090, 13431, 15346, 14703, 12989, 14001, 12860, 13575, 12502, 14798, 15138, 14631, 12157, 14835, 15238, + 12694, 15169, 15085, 14099, 14607, 12480, 14215, 13775, 14927, 11856, 15337, 14506, 14543, 7807, 14832, 10676, + 13073, 13826, 14517, 14412, 13708, 14177, 15000, 13601, 13602, 13334, 12356, 14735, 12911, 13328, 15254, 10387, + 15178, 14461, 14904, 10405, 14517, 15036, 15269, 11255, 11794, 14079, 13985, 14900, 15348, 14743, 10052, 13384, + 15316, 15335, 14157, 13419, 11505, 13544, 12810, 14669, 12757, 15333, 14752, 15037, 15145, 13280, 13592, 10910, + 14744, 13751, 12814, 14982, 14472, 15305, 14934, 15110, 14994, 15006, 14992, 15265, 11530, 14069, 15037, 8437, + 14039, 15081, 12730, 14487, 15310, 13424, 14573, 12558, 14849, 14986, 11413, 13644, 12642, 13521, 13799, 14189, + 11610, 14352, 14088, 13861, 14593, 14717, 14329, 13764, 13170, 13856, 13098, 14325, 14698, 14849, 12469, 14720, + 14304, 14576, 14982, 14447, 14757, 12881, 13558, 14911, 14554, 14337, 14352, 14555, 14389, 12870, 13010, 13167, + 13268, 14942, 14911, 15197, 8206, 14737, 14671, 14054, 14813, 12949, 14572, 15021, 12855, 13987, 12985, 14355, + 15062, 13801, 14908, 12510, 13709, 15026, 12941, 15232, 13479, 15154, 14795, 14828, 13822, 13856, 14727, 12783, + 13970, 14933, 15262, 14714, 11584, 12866, 15210, 14814, 11503, 14593, 15172, 14393, 10815, 13132, 14913, 14604, + 14181, 14879, 14672, 14774, 14897, 13458, 11012, 13005, 13111, 13352, 14459, 11855, 14548, 13371, 14764, 14338, + 13650, 13757, 12772, 15167, 13692, 14370, 15212, 14216, 13461, 10527, 14846, 15332, 14618, 14004, 13418, 15285, + 15229, 10738, 12875, 12084, 14031, 12661, 15150, 14428, 13571, 14880, 14502, 10309, 12040, 14806, 14822, 13210, + 14393, 14655, 15005, 13270, 14886, 10217, 14173, 13759, 14735, 12563, 12367, 10329, 14646, 12655, 14272, 12609, + 15237, 12138, 14607, 14227, 15165, 13831, 14258, 13192, 15224, 11343, 13923, 13314, 15272, 12291, 14374, 14475, + 14371, 13431, 13291, 12922, 10251, 13184, 13515, 14492, 14766, 14170, 12844, 11903, 13872, 14757, 14971, 14913, + 15144, 13792, 11809, 11480, 14205, 14657, 12875, 15073, 13630, 13329, 14373, 13506, 14163, 15304, 13232, 14395, + 14460, 13353, 15082, 14870, 12710, 13103, 13336, 12321, 12647, 15168, 14898, 13728, 12686, 14789, 15240, 14579, + 14397, 15174, 14279, 14121, 11157, 14978, 13277, 12842, 14598, 15286, 13462, 14271, 14094, 14960, 13514, 14075, + 12637, 14892, 14818, 15027, 15014, 15357, 14838, 14989, 12848, 15091, 13763, 13669, 15239, 14462, 14615, 15082, + 14330, 13958, 14246, 15207, 14803, 15161, 12998, 13896, 12147, 14778, 15046, 13333, 14858, 15355, 15162, 12372, + 13517, 13776, 14294, 14426, 14045, 14515, 13986, 14662, 14177, 14523, 13650, 15292, 14592, 14360, 14693, 14678, + 13446, 14333, 11799, 12098, 8510, 10784, 13598, 14546, 14495, 12675, 13347, 8987, 13801, 12245, 10710, 15308, + 15007, 10601, 15296, 13662, 11378, 13952, 13798, 12348, 11870, 13585, 14267, 14469, 14754, 13968, 14876, 14450, + 15165, 14067, 14933, 12365, 15338, 14738, 13092, 13725, 14389, 14073, 14400, 13726, 15150, 14789, 15266, 15153, + 15205, 14654, 14273, 9239, 15187, 14684, 12777, 14878, 8817, 15316, 14618, 14897, 14279, 14911, 15207, 12417, + 15259, 14703, 14236, 15065, 14220, 11487, 15283, 14223, 15250, 15049, 14920, 15189, 14776, 13008, 15171, 15003, + 13927, 13856, 14854, 12342, 15219, 13735, 15170, 15203, 13618, 11929, 14821, 15149, 15123, 14251, 14990, 11703, + 14754, 13892, 13407, 13905, 14442, 14741, 14178, 13100, 15285, 13124, 15017, 15054, 14027, 13320, 13337, 15204, + 13910, 13596, 14558, 14393, 14669, 13791, 14094, 14617, 13563, 15187, 13985, 11905, 14285, 14040, 11892, 12635, + 14749, 15181, 14808, 12451, 14639, 15123, 14346, 15036, 12163, 15157, 14339, 13836, 14722, 14431, 14497, 13198, + 11770, 11317, 13766, 14760, 14773, 14848, 14593, 13575, 11623, 14680, 13875, 14539, 14379, 14348, 15332, 14475, + }; + uint16_t ret[2 * 2 * 2 * 1 * 2 * 16 * 16] = { + 13556, 15090, 13431, 15346, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14703, 12989, 14001, 12860, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13575, 12502, 14798, 15138, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14631, 12157, 14835, 15238, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12694, 15169, 15085, 14099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14607, 12480, 14215, 13775, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14927, 11856, 15337, 14506, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14543, 7807, 14832, 10676, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13073, 13826, 14517, 14412, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13708, 14177, 15000, 13601, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13602, 13334, 12356, 14735, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12911, 13328, 15254, 10387, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15178, 14461, 14904, 10405, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14517, 15036, 15269, 11255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11794, 14079, 13985, 14900, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15348, 14743, 10052, 13384, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15316, 15335, 14157, 13419, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11505, 13544, 12810, 14669, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12757, 15333, 14752, 15037, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15145, 13280, 13592, 10910, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14744, 13751, 12814, 14982, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14472, 15305, 14934, 15110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14994, 15006, 14992, 15265, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11530, 14069, 15037, 8437, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14039, 15081, 12730, 14487, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15310, 13424, 14573, 12558, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14849, 14986, 11413, 13644, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12642, 13521, 13799, 14189, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11610, 14352, 14088, 13861, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14593, 14717, 14329, 13764, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13170, 13856, 13098, 14325, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14698, 14849, 12469, 14720, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14304, 14576, 14982, 14447, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14757, 12881, 13558, 14911, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14554, 14337, 14352, 14555, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14389, 12870, 13010, 13167, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13268, 14942, 14911, 15197, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8206, 14737, 14671, 14054, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14813, 12949, 14572, 15021, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12855, 13987, 12985, 14355, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15062, 13801, 14908, 12510, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13709, 15026, 12941, 15232, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13479, 15154, 14795, 14828, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13822, 13856, 14727, 12783, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13970, 14933, 15262, 14714, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11584, 12866, 15210, 14814, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11503, 14593, 15172, 14393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10815, 13132, 14913, 14604, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14181, 14879, 14672, 14774, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14897, 13458, 11012, 13005, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13111, 13352, 14459, 11855, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14548, 13371, 14764, 14338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13650, 13757, 12772, 15167, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13692, 14370, 15212, 14216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13461, 10527, 14846, 15332, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14618, 14004, 13418, 15285, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15229, 10738, 12875, 12084, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14031, 12661, 15150, 14428, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13571, 14880, 14502, 10309, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12040, 14806, 14822, 13210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14393, 14655, 15005, 13270, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14886, 10217, 14173, 13759, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14735, 12563, 12367, 10329, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14646, 12655, 14272, 12609, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15237, 12138, 14607, 14227, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15165, 13831, 14258, 13192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15224, 11343, 13923, 13314, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15272, 12291, 14374, 14475, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14371, 13431, 13291, 12922, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10251, 13184, 13515, 14492, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14766, 14170, 12844, 11903, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13872, 14757, 14971, 14913, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15144, 13792, 11809, 11480, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14205, 14657, 12875, 15073, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13630, 13329, 14373, 13506, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14163, 15304, 13232, 14395, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14460, 13353, 15082, 14870, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12710, 13103, 13336, 12321, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12647, 15168, 14898, 13728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12686, 14789, 15240, 14579, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14397, 15174, 14279, 14121, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11157, 14978, 13277, 12842, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14598, 15286, 13462, 14271, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14094, 14960, 13514, 14075, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12637, 14892, 14818, 15027, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15014, 15357, 14838, 14989, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12848, 15091, 13763, 13669, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15239, 14462, 14615, 15082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14330, 13958, 14246, 15207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14803, 15161, 12998, 13896, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12147, 14778, 15046, 13333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14858, 15355, 15162, 12372, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13517, 13776, 14294, 14426, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14045, 14515, 13986, 14662, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14177, 14523, 13650, 15292, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14592, 14360, 14693, 14678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13446, 14333, 11799, 12098, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8510, 10784, 13598, 14546, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14495, 12675, 13347, 8987, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13801, 12245, 10710, 15308, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15007, 10601, 15296, 13662, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11378, 13952, 13798, 12348, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11870, 13585, 14267, 14469, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14754, 13968, 14876, 14450, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15165, 14067, 14933, 12365, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15338, 14738, 13092, 13725, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14389, 14073, 14400, 13726, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15150, 14789, 15266, 15153, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15205, 14654, 14273, 9239, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15187, 14684, 12777, 14878, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8817, 15316, 14618, 14897, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14279, 14911, 15207, 12417, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15259, 14703, 14236, 15065, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14220, 11487, 15283, 14223, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15250, 15049, 14920, 15189, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14776, 13008, 15171, 15003, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13927, 13856, 14854, 12342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15219, 13735, 15170, 15203, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13618, 11929, 14821, 15149, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15123, 14251, 14990, 11703, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14754, 13892, 13407, 13905, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14442, 14741, 14178, 13100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15285, 13124, 15017, 15054, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14027, 13320, 13337, 15204, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13910, 13596, 14558, 14393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14669, 13791, 14094, 14617, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13563, 15187, 13985, 11905, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14285, 14040, 11892, 12635, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14749, 15181, 14808, 12451, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14639, 15123, 14346, 15036, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12163, 15157, 14339, 13836, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14722, 14431, 14497, 13198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11770, 11317, 13766, 14760, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14773, 14848, 14593, 13575, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11623, 14680, 13875, 14539, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14379, 14348, 15332, 14475, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {2, 2, 2, 17, 4}, + {2, 2, 2, 1, 2, 16, 16}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalNz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, + {2, 2, 2, 1, 2, 16, 16}, {2, 2, 2, 17, 4}, DT_FLOAT16}; + TransResult result2; + FormatTransferFractalNzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nd_shape4_fp32) { + float data[2 * 2 * 17 * 4] = { + 0.24017745833005677, 0.7908338513866925, 0.5051460913827948, 0.9212520685117512, 0.4329735845708327, + 0.9207470159046747, 0.05926231862060216, 0.76990655686985, 0.7223645626864388, 0.43501433129012024, + 0.40570186933951713, 0.702542637118438, 0.773774946386804, 0.04265471007054322, 0.2989896038480955, + 0.4033603882338742, 0.34393668393267907, 0.5850246556661213, 0.6750891081060844, 0.3559427848582608, + 0.43614052169082185, 0.6908271139843668, 0.5897294883672143, 0.35488198940932736, 0.4682340192505966, + 0.9113037459116056, 0.7443829866997046, 0.89433687460791, 0.31510061278863843, 0.20044500575711433, + 0.28142410758015657, 0.4517900601137299, 0.6351885056619392, 0.8974159512127593, 0.3331668058021574, + 0.11222970046456449, 0.8732340412519595, 0.2148878909636669, 0.33598072192817896, 0.5817085292780417, + 0.14893294317017536, 0.5551645528327099, 0.9796426641977305, 0.9568166955425949, 0.7718066236111724, + 0.6300751765176865, 0.8497117123321973, 0.3534708093622494, 0.7657382003166453, 0.587899478133755, + 0.7784779515613647, 0.4015232698113339, 0.9254034195951792, 0.9806776040632871, 0.7398963961756377, + 0.4717105017726926, 0.13075121515420907, 0.26501153374416997, 0.4592668571870513, 0.38083145381689754, + 0.5048080389976721, 0.8316384355591627, 0.016176084697146043, 0.3500580014319852, 0.2494321620580643, + 0.8363651463924084, 0.39040381268116464, 0.41421765968696656, 0.2296129377253905, 0.3795273985435502, + 0.49840831219300297, 0.5660689163115291, 0.5410166356568628, 0.2932118573951793, 0.7231596801124212, + 0.7615642755162464, 0.5700242312079427, 0.015282443180747451, 0.6074141579987347, 0.606840341727317, + 0.7958320122497057, 0.46657383708965916, 0.9198103681172877, 0.7466699920139369, 0.8798355998800057, + 0.5490826769093224, 0.1569764914287578, 0.631652567858096, 0.07538015959634248, 0.30771430912374564, + 0.801341360218133, 0.4112029724744082, 0.24872460704563282, 0.7988220100765487, 0.8956893776310078, + 0.3523457305677019, 0.7607249251896161, 0.9508908308534454, 0.9844149475432433, 0.46137662395819556, + 0.3361994725719326, 0.910215423356587, 0.8514253530131357, 0.18564786925351284, 0.031197422157041577, + 0.32088308811207233, 0.5268494210366956, 0.37652532271007444, 0.018319746627926126, 0.6805316533631041, + 0.6637986306311675, 0.8682215174258817, 0.9928743926162217, 0.11762205726513975, 0.6195005538182846, + 0.573208212736639, 0.5325639815459885, 0.8685720994574258, 0.30416206083796893, 0.29375443689500746, + 0.722600953459129, 0.060879922974677037, 0.5516867775841858, 0.7282801588163559, 0.1544836784918584, + 0.41799093214709193, 0.980749303446289, 0.43370624352513687, 0.36172612101052615, 0.3028306626687869, + 0.2230506430289495, 0.05431581276922193, 0.7968231625249534, 0.8068938615817157, 0.45578821844224493, + 0.0018389569240472303, 0.12845645761430868, 0.43174872314200496, 0.19019706886091625, 0.7206755294822818, + 0.31089686267676875, 0.34478094342047017, 0.20388910753878797, 0.9013725029423029, 0.20172359299220255, + 0.1629922568981731, 0.6954990727866708, 0.26497630445509435, 0.12379402401488193, 0.6447543753857757, + 0.32969746233649067, 0.4546871355393207, 0.9032509525867825, 0.9858786808247757, 0.4862602610112142, + 0.08219085113577962, 0.7870912428425353, 0.9013772471762193, 0.09947185442909168, 0.8752347829007687, + 0.39322080280348903, 0.38553330390114127, 0.815521169321101, 0.25465238460393447, 0.7719638078574547, + 0.3681634855586098, 0.12917191531069916, 0.8782893607825132, 0.31538202975647667, 0.11060152128642453, + 0.8877996039186895, 0.6277653065585466, 0.788572654306122, 0.8473770431762598, 0.06733131234431977, + 0.8257265623261241, 0.7358077509239135, 0.6897284238471844, 0.16316388905274182, 0.6419388952410704, + 0.46373928769713935, 0.22624570338477634, 0.8344650905883917, 0.5220387776690497, 0.8223957140404234, + 0.8522442229878241, 0.914872169915326, 0.9507088797212623, 0.5886632711634477, 0.18329757039598793, + 0.43052254564266745, 0.3273224327191193, 0.7027336757262003, 0.6825647115162772, 0.9175905076205535, + 0.5938919379015825, 0.05279831332678442, 0.9610878906434714, 0.8151949508556603, 0.9200058002592004, + 0.4101544181795893, 0.09912168423937695, 0.17364500943816064, 0.4429247543426651, 0.14062873380913987, + 0.23738371369937172, 0.4938182494622466, 0.28673035611440445, 0.6816179119575762, 0.5172878882496988, + 0.7506142571467134, 0.32139544952437704, 0.3032752890874719, 0.8444155718538534, 0.8331913949169982, + 0.5432084580245907, 0.07799994151132295, 0.6807639318778701, 0.22042219816817854, 0.9407382759228957, + 0.847128737153653, 0.393277915570015, 0.36572620008744783, 0.9932234924651848, 0.980452452787659, + 0.09199460655585212, 0.2040881385507629, 0.6416200216420471, 0.3163820661665575, 0.0178114452425554, + 0.19209600193274623, 0.56386270765364, 0.5883501002605002, 0.047290373739652725, 0.9429035695153023, + 0.29861798784711935, 0.8987397733067158, 0.31081233810078046, 0.32273697333672224, 0.6123857767652331, + 0.47907753776865447, 0.38230796360390273, 0.015510368346530967, 0.10606747306100672, 0.5930796121652361, + 0.3716827365867703, 0.6965466612015544, 0.9129958695419979, 0.13740941341338297, 0.6057045902472779, + 0.795600140320669, 0.9353438814017689, 0.7814345681667847, 0.6606618076700961, 0.15167442010087617, + 0.3024865384516856, 0.4121791513857548, 0.8073450149573994, 0.16939447194962576, 0.05472329091313577, + 0.7853320936770362, 0.7413721006805821, 0.7875008545125217, 0.949175848170841, 0.13480031206099008, + 0.6583276122829612, 0.3606029738996872, 0.3139689644221102, 0.18118778976359917, 0.5639618456607737, + 0.8471462699161201, 0.5527251348233345, + }; + float ret[2 * 2 * 1 * 2 * 16 * 16] = { + 0.24017745833005677, + 0.7908338513866925, + 0.5051460913827948, + 0.9212520685117512, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4329735845708327, + 0.9207470159046747, + 0.05926231862060216, + 0.76990655686985, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7223645626864388, + 0.43501433129012024, + 0.40570186933951713, + 0.702542637118438, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.773774946386804, + 0.04265471007054322, + 0.2989896038480955, + 0.4033603882338742, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.34393668393267907, + 0.5850246556661213, + 0.6750891081060844, + 0.3559427848582608, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.43614052169082185, + 0.6908271139843668, + 0.5897294883672143, + 0.35488198940932736, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4682340192505966, + 0.9113037459116056, + 0.7443829866997046, + 0.89433687460791, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.31510061278863843, + 0.20044500575711433, + 0.28142410758015657, + 0.4517900601137299, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6351885056619392, + 0.8974159512127593, + 0.3331668058021574, + 0.11222970046456449, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8732340412519595, + 0.2148878909636669, + 0.33598072192817896, + 0.5817085292780417, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14893294317017536, + 0.5551645528327099, + 0.9796426641977305, + 0.9568166955425949, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7718066236111724, + 0.6300751765176865, + 0.8497117123321973, + 0.3534708093622494, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7657382003166453, + 0.587899478133755, + 0.7784779515613647, + 0.4015232698113339, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9254034195951792, + 0.9806776040632871, + 0.7398963961756377, + 0.4717105017726926, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.13075121515420907, + 0.26501153374416997, + 0.4592668571870513, + 0.38083145381689754, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5048080389976721, + 0.8316384355591627, + 0.016176084697146043, + 0.3500580014319852, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2494321620580643, + 0.8363651463924084, + 0.39040381268116464, + 0.41421765968696656, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2296129377253905, + 0.3795273985435502, + 0.49840831219300297, + 0.5660689163115291, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5410166356568628, + 0.2932118573951793, + 0.7231596801124212, + 0.7615642755162464, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5700242312079427, + 0.015282443180747451, + 0.6074141579987347, + 0.606840341727317, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7958320122497057, + 0.46657383708965916, + 0.9198103681172877, + 0.7466699920139369, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8798355998800057, + 0.5490826769093224, + 0.1569764914287578, + 0.631652567858096, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07538015959634248, + 0.30771430912374564, + 0.801341360218133, + 0.4112029724744082, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24872460704563282, + 0.7988220100765487, + 0.8956893776310078, + 0.3523457305677019, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7607249251896161, + 0.9508908308534454, + 0.9844149475432433, + 0.46137662395819556, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3361994725719326, + 0.910215423356587, + 0.8514253530131357, + 0.18564786925351284, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.031197422157041577, + 0.32088308811207233, + 0.5268494210366956, + 0.37652532271007444, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.018319746627926126, + 0.6805316533631041, + 0.6637986306311675, + 0.8682215174258817, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9928743926162217, + 0.11762205726513975, + 0.6195005538182846, + 0.573208212736639, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5325639815459885, + 0.8685720994574258, + 0.30416206083796893, + 0.29375443689500746, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.722600953459129, + 0.060879922974677037, + 0.5516867775841858, + 0.7282801588163559, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1544836784918584, + 0.41799093214709193, + 0.980749303446289, + 0.43370624352513687, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.36172612101052615, + 0.3028306626687869, + 0.2230506430289495, + 0.05431581276922193, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7968231625249534, + 0.8068938615817157, + 0.45578821844224493, + 0.0018389569240472303, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12845645761430868, + 0.43174872314200496, + 0.19019706886091625, + 0.7206755294822818, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.31089686267676875, + 0.34478094342047017, + 0.20388910753878797, + 0.9013725029423029, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20172359299220255, + 0.1629922568981731, + 0.6954990727866708, + 0.26497630445509435, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12379402401488193, + 0.6447543753857757, + 0.32969746233649067, + 0.4546871355393207, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9032509525867825, + 0.9858786808247757, + 0.4862602610112142, + 0.08219085113577962, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7870912428425353, + 0.9013772471762193, + 0.09947185442909168, + 0.8752347829007687, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.39322080280348903, + 0.38553330390114127, + 0.815521169321101, + 0.25465238460393447, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7719638078574547, + 0.3681634855586098, + 0.12917191531069916, + 0.8782893607825132, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.31538202975647667, + 0.11060152128642453, + 0.8877996039186895, + 0.6277653065585466, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.788572654306122, + 0.8473770431762598, + 0.06733131234431977, + 0.8257265623261241, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7358077509239135, + 0.6897284238471844, + 0.16316388905274182, + 0.6419388952410704, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.46373928769713935, + 0.22624570338477634, + 0.8344650905883917, + 0.5220387776690497, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8223957140404234, + 0.8522442229878241, + 0.914872169915326, + 0.9507088797212623, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5886632711634477, + 0.18329757039598793, + 0.43052254564266745, + 0.3273224327191193, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7027336757262003, + 0.6825647115162772, + 0.9175905076205535, + 0.5938919379015825, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.05279831332678442, + 0.9610878906434714, + 0.8151949508556603, + 0.9200058002592004, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4101544181795893, + 0.09912168423937695, + 0.17364500943816064, + 0.4429247543426651, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14062873380913987, + 0.23738371369937172, + 0.4938182494622466, + 0.28673035611440445, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6816179119575762, + 0.5172878882496988, + 0.7506142571467134, + 0.32139544952437704, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3032752890874719, + 0.8444155718538534, + 0.8331913949169982, + 0.5432084580245907, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07799994151132295, + 0.6807639318778701, + 0.22042219816817854, + 0.9407382759228957, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.847128737153653, + 0.393277915570015, + 0.36572620008744783, + 0.9932234924651848, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.980452452787659, + 0.09199460655585212, + 0.2040881385507629, + 0.6416200216420471, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3163820661665575, + 0.0178114452425554, + 0.19209600193274623, + 0.56386270765364, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5883501002605002, + 0.047290373739652725, + 0.9429035695153023, + 0.29861798784711935, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8987397733067158, + 0.31081233810078046, + 0.32273697333672224, + 0.6123857767652331, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.47907753776865447, + 0.38230796360390273, + 0.015510368346530967, + 0.10606747306100672, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5930796121652361, + 0.3716827365867703, + 0.6965466612015544, + 0.9129958695419979, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.13740941341338297, + 0.6057045902472779, + 0.795600140320669, + 0.9353438814017689, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7814345681667847, + 0.6606618076700961, + 0.15167442010087617, + 0.3024865384516856, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4121791513857548, + 0.8073450149573994, + 0.16939447194962576, + 0.05472329091313577, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7853320936770362, + 0.7413721006805821, + 0.7875008545125217, + 0.949175848170841, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.13480031206099008, + 0.6583276122829612, + 0.3606029738996872, + 0.3139689644221102, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18118778976359917, + 0.5639618456607737, + 0.8471462699161201, + 0.5527251348233345, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_NZ, {2, 2, 17, 4}, {2, 2, 1, 2, 16, 16}, DT_FLOAT}; + TransResult result; + FormatTransferFractalNz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 4); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_ND, {2, 2, 1, 2, 16, 16}, {2, 2, 17, 4}, DT_FLOAT}; + TransResult result2; + FormatTransferFractalNzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 4); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nchw_shape4_fp32) { + float data[2 * 2 * 17 * 4] = { + 0.5602121231402468, 0.12800383781971492, 0.5326714230167173, 0.6272243646573483, 0.02789503350921596, + 0.6760986699314114, 0.8240135711596471, 0.34248414683295103, 0.7280572978193823, 0.03408620931709949, + 0.17897548862797752, 0.049319216130346066, 0.5285626554498787, 0.6543935963248121, 0.9499835158011221, + 0.15696623695207612, 0.5518593327582763, 0.735480571158555, 0.27252722389251094, 0.5952131357932664, + 0.9342664862715897, 0.06807185097642088, 0.7986155333967675, 0.2940533147166918, 0.7717279809493778, + 0.06500681776149064, 0.40209523004375736, 0.6402504455119793, 0.7183356244742499, 0.8848092118855054, + 0.9623394724948869, 0.6404196604920734, 0.3129415157617603, 0.6566440258339626, 0.4829961676502589, + 0.9095794636705585, 0.38431275502739315, 0.6618934269256913, 0.016948846804644813, 0.8487902640408118, + 0.0019437036739571312, 0.9593039469007972, 0.7369345068397335, 0.8038376866300426, 0.23733142070927404, + 0.6766568636607861, 0.8791428417567961, 0.34970553354412104, 0.7767111731260719, 0.36939908341858885, + 0.8877771223343094, 0.16656518085775396, 0.40524411235449553, 0.5094980580412446, 0.42932740059809227, + 0.4806992101733476, 0.942893176736217, 0.7569186327365722, 0.43762218258484487, 0.5709277573723915, + 0.6626357139000086, 0.9388125948827755, 0.6922120795940312, 0.2802353109920688, 0.6384950189782832, + 0.8203631774543025, 0.4819185524506592, 0.5612388571573312, 0.7890794105898543, 0.026657068661979144, + 0.36745236086958255, 0.09421420582014717, 0.4125170256726969, 0.8372759010112841, 0.6858892915066466, + 0.055965911139327695, 0.733752513401978, 0.2860600524805865, 0.24581639133981414, 0.4657602259317729, + 0.9844669862500401, 0.04259392169423548, 0.2850934545160856, 0.6116234365122941, 0.8431280227198134, + 0.09330478841127465, 0.10738171141015296, 0.8058302145583697, 0.1887687053820234, 0.020772300699924195, + 0.2675743523191082, 0.509425440737449, 0.7454162260498424, 0.3601811686584533, 0.6356766970010271, + 0.08368780784222996, 0.7954204147818181, 0.6808861341529187, 0.9732894436382853, 0.028456208873444133, + 0.6898670482181255, 0.5602400644580263, 0.007835682106110964, 0.3126424433906586, 0.5461294624203212, + 0.24974364067650745, 0.3225823914937893, 0.5003866335569956, 0.661414559901519, 0.6611594211086985, + 0.0832376382837211, 0.6088151725555888, 0.31350612996189475, 0.6824719404358928, 0.8962030999552082, + 0.5898412128782159, 0.6192600820436853, 0.7065475830586582, 0.47775907849535804, 0.9872123954283326, + 0.5794892396998665, 0.9821886556152676, 0.5938251530775381, 0.30404687707509925, 0.03598332590135811, + 0.43853593213910447, 0.42005264249981566, 0.27092494528759103, 0.5857315286790321, 0.3453845905764671, + 0.38938830465869656, 0.2686851760469917, 0.6080813798369535, 0.8975830535375084, 0.16063980186497728, + 0.6586597679036962, 0.34853632098020637, 0.5222409587324605, 0.34629031735429283, 0.9571001776320724, + 0.22758512028439548, 0.4961198993089996, 0.7035427414896834, 0.9873475820942967, 0.9225755101854336, + 0.4345542179267431, 0.5826852296872961, 0.3959711566516181, 0.8959668161588287, 0.5781473185927918, + 0.15770465606442519, 0.2465330879179335, 0.9967009535833719, 0.7209513334268077, 0.3050224221409039, + 0.9219568626656348, 0.718546385528138, 0.21647873996460432, 0.9485312530824154, 0.7544823112339518, + 0.8793678793431775, 0.5321808027467093, 0.44421663094592234, 0.3540210579398514, 0.5201311798545218, + 0.42005202877685466, 0.9185755141210294, 0.2513817846312163, 0.2726794018274953, 0.4146719265832651, + 0.9556854071727073, 0.34185681700133563, 0.23008112912632606, 0.5530759925523093, 0.1287401965557975, + 0.9329184289796552, 0.597049889010525, 0.6117591325403197, 0.4119313490448945, 0.3232267846716024, + 0.20173136300954464, 0.6002148229523541, 0.7815012855520117, 0.28544406461634386, 0.8269928516978476, + 0.10932038205185535, 0.6143160563886838, 0.2184224010482163, 0.5522809330115714, 0.5677845896560437, + 0.8579879882249971, 0.6355837987328417, 0.04362984397169789, 0.7754836644431609, 0.1431977287912315, + 0.6092936034964014, 0.4596033225489746, 0.3592578817865715, 0.553616658068951, 0.19183381195695703, + 0.45079770418260423, 0.2799744643950717, 0.6432910866105234, 0.08411912299187807, 0.6745840838964664, + 0.07598087848264623, 0.8240250634502845, 0.36570374341381084, 0.006762919216348862, 0.7968845960590147, + 0.02514782044366326, 0.3068699746421577, 0.1138875791627122, 0.4292015547916156, 0.3153636011167973, + 0.9394545926418999, 0.5658737300910498, 0.5162676744048261, 0.06243779693597862, 0.6687945075056573, + 0.6108948588331585, 0.251069063724365, 0.9194382362252776, 0.427182897606301, 0.2859283493741377, + 0.47567202696913746, 0.4158263080352007, 0.13267581511787707, 0.7282071557718046, 0.1269283431875673, + 0.3225554819437485, 0.3474737724799578, 0.9174000239171781, 0.575526441418363, 0.6424082388100079, + 0.7795858447358935, 0.4961176629005909, 0.7783772378271568, 0.42736590566493216, 0.47753749552007785, + 0.1624392018841635, 0.028751267485759735, 0.32060419387936456, 0.9219882035969446, 0.8615631839985258, + 0.769906654883067, 0.8950660850360729, 0.5997750400061773, 0.7849875817664871, 0.4668319723460155, + 0.395114919896967, 0.8253563646816371, 0.07626413874831572, 0.11553536946297527, 0.5967093302357965, + 0.6250874863042443, 0.9295880168076048, 0.09201501553706759, 0.4002992747710562, 0.5020954829641272, + 0.8107293478061589, 0.7991328274514402, 0.14082794482093408, 0.19565070959320263, 0.8149919267700367, + 0.9462274058046314, 0.7109368887841762, 0.3664044938712081, 0.9577840290332994, 0.134161476174524, + 0.08818671344845908, 0.6137505449578752, + }; + float ret[2 * 2 * 1 * 2 * 16 * 16] = { + 0.5602121231402468, + 0.12800383781971492, + 0.5326714230167173, + 0.6272243646573483, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.02789503350921596, + 0.6760986699314114, + 0.8240135711596471, + 0.34248414683295103, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7280572978193823, + 0.03408620931709949, + 0.17897548862797752, + 0.049319216130346066, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5285626554498787, + 0.6543935963248121, + 0.9499835158011221, + 0.15696623695207612, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5518593327582763, + 0.735480571158555, + 0.27252722389251094, + 0.5952131357932664, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9342664862715897, + 0.06807185097642088, + 0.7986155333967675, + 0.2940533147166918, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7717279809493778, + 0.06500681776149064, + 0.40209523004375736, + 0.6402504455119793, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7183356244742499, + 0.8848092118855054, + 0.9623394724948869, + 0.6404196604920734, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3129415157617603, + 0.6566440258339626, + 0.4829961676502589, + 0.9095794636705585, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.38431275502739315, + 0.6618934269256913, + 0.016948846804644813, + 0.8487902640408118, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0019437036739571312, + 0.9593039469007972, + 0.7369345068397335, + 0.8038376866300426, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23733142070927404, + 0.6766568636607861, + 0.8791428417567961, + 0.34970553354412104, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7767111731260719, + 0.36939908341858885, + 0.8877771223343094, + 0.16656518085775396, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40524411235449553, + 0.5094980580412446, + 0.42932740059809227, + 0.4806992101733476, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.942893176736217, + 0.7569186327365722, + 0.43762218258484487, + 0.5709277573723915, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6626357139000086, + 0.9388125948827755, + 0.6922120795940312, + 0.2802353109920688, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6384950189782832, + 0.8203631774543025, + 0.4819185524506592, + 0.5612388571573312, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7890794105898543, + 0.026657068661979144, + 0.36745236086958255, + 0.09421420582014717, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4125170256726969, + 0.8372759010112841, + 0.6858892915066466, + 0.055965911139327695, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.733752513401978, + 0.2860600524805865, + 0.24581639133981414, + 0.4657602259317729, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9844669862500401, + 0.04259392169423548, + 0.2850934545160856, + 0.6116234365122941, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8431280227198134, + 0.09330478841127465, + 0.10738171141015296, + 0.8058302145583697, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1887687053820234, + 0.020772300699924195, + 0.2675743523191082, + 0.509425440737449, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7454162260498424, + 0.3601811686584533, + 0.6356766970010271, + 0.08368780784222996, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7954204147818181, + 0.6808861341529187, + 0.9732894436382853, + 0.028456208873444133, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6898670482181255, + 0.5602400644580263, + 0.007835682106110964, + 0.3126424433906586, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5461294624203212, + 0.24974364067650745, + 0.3225823914937893, + 0.5003866335569956, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.661414559901519, + 0.6611594211086985, + 0.0832376382837211, + 0.6088151725555888, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.31350612996189475, + 0.6824719404358928, + 0.8962030999552082, + 0.5898412128782159, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6192600820436853, + 0.7065475830586582, + 0.47775907849535804, + 0.9872123954283326, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5794892396998665, + 0.9821886556152676, + 0.5938251530775381, + 0.30404687707509925, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03598332590135811, + 0.43853593213910447, + 0.42005264249981566, + 0.27092494528759103, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5857315286790321, + 0.3453845905764671, + 0.38938830465869656, + 0.2686851760469917, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6080813798369535, + 0.8975830535375084, + 0.16063980186497728, + 0.6586597679036962, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.34853632098020637, + 0.5222409587324605, + 0.34629031735429283, + 0.9571001776320724, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22758512028439548, + 0.4961198993089996, + 0.7035427414896834, + 0.9873475820942967, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9225755101854336, + 0.4345542179267431, + 0.5826852296872961, + 0.3959711566516181, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8959668161588287, + 0.5781473185927918, + 0.15770465606442519, + 0.2465330879179335, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9967009535833719, + 0.7209513334268077, + 0.3050224221409039, + 0.9219568626656348, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.718546385528138, + 0.21647873996460432, + 0.9485312530824154, + 0.7544823112339518, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8793678793431775, + 0.5321808027467093, + 0.44421663094592234, + 0.3540210579398514, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5201311798545218, + 0.42005202877685466, + 0.9185755141210294, + 0.2513817846312163, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2726794018274953, + 0.4146719265832651, + 0.9556854071727073, + 0.34185681700133563, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23008112912632606, + 0.5530759925523093, + 0.1287401965557975, + 0.9329184289796552, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.597049889010525, + 0.6117591325403197, + 0.4119313490448945, + 0.3232267846716024, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20173136300954464, + 0.6002148229523541, + 0.7815012855520117, + 0.28544406461634386, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8269928516978476, + 0.10932038205185535, + 0.6143160563886838, + 0.2184224010482163, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5522809330115714, + 0.5677845896560437, + 0.8579879882249971, + 0.6355837987328417, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.04362984397169789, + 0.7754836644431609, + 0.1431977287912315, + 0.6092936034964014, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4596033225489746, + 0.3592578817865715, + 0.553616658068951, + 0.19183381195695703, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.45079770418260423, + 0.2799744643950717, + 0.6432910866105234, + 0.08411912299187807, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6745840838964664, + 0.07598087848264623, + 0.8240250634502845, + 0.36570374341381084, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.006762919216348862, + 0.7968845960590147, + 0.02514782044366326, + 0.3068699746421577, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1138875791627122, + 0.4292015547916156, + 0.3153636011167973, + 0.9394545926418999, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5658737300910498, + 0.5162676744048261, + 0.06243779693597862, + 0.6687945075056573, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6108948588331585, + 0.251069063724365, + 0.9194382362252776, + 0.427182897606301, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2859283493741377, + 0.47567202696913746, + 0.4158263080352007, + 0.13267581511787707, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7282071557718046, + 0.1269283431875673, + 0.3225554819437485, + 0.3474737724799578, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9174000239171781, + 0.575526441418363, + 0.6424082388100079, + 0.7795858447358935, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4961176629005909, + 0.7783772378271568, + 0.42736590566493216, + 0.47753749552007785, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1624392018841635, + 0.028751267485759735, + 0.32060419387936456, + 0.9219882035969446, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8615631839985258, + 0.769906654883067, + 0.8950660850360729, + 0.5997750400061773, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7849875817664871, + 0.4668319723460155, + 0.395114919896967, + 0.8253563646816371, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07626413874831572, + 0.11553536946297527, + 0.5967093302357965, + 0.6250874863042443, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9295880168076048, + 0.09201501553706759, + 0.4002992747710562, + 0.5020954829641272, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8107293478061589, + 0.7991328274514402, + 0.14082794482093408, + 0.19565070959320263, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8149919267700367, + 0.9462274058046314, + 0.7109368887841762, + 0.3664044938712081, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9577840290332994, + 0.134161476174524, + 0.08818671344845908, + 0.6137505449578752, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_NZ, {2, 2, 17, 4}, {2, 2, 1, 2, 16, 16}, DT_FLOAT}; + TransResult result; + FormatTransferFractalNz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 4); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_NCHW, {2, 2, 1, 2, 16, 16}, {2, 2, 17, 4}, DT_FLOAT}; + TransResult result2; + FormatTransferFractalNzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 4); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, nhwc_shape4_fp32) { + float data[2 * 2 * 4 * 17] = { + 0.011873719514500558, 0.9055337066931509, 0.4007962186879912, 0.4823248697007644, 0.434413711781758, + 0.3374081761653097, 0.1327889293124861, 0.9573280427327941, 0.900157174839599, 0.22675418134305847, + 0.915880789401114, 0.2858704345543902, 0.9538630233786596, 0.7750326328379964, 0.6241316443191395, + 0.9671130471718147, 0.4519972100893155, 0.27178741914922444, 0.9858432029701361, 0.19476602256637698, + 0.3880385111012623, 0.4904599528435152, 0.7763741995316538, 0.595041172451146, 0.7697729614020185, + 0.9008648456464086, 0.10660640559911716, 0.9872130150760993, 0.8863345663931051, 0.42187474552924187, + 0.890712319631421, 0.8511675560554783, 0.49085053954474256, 0.6393638683917573, 0.7753996529731574, + 0.4520989748269998, 0.8049570752479891, 0.8587842070040622, 0.8086535260498522, 0.2874426357486748, + 0.4655016269762732, 0.2966483939283724, 0.034319962943348825, 0.8204809948347593, 0.009691641525112815, + 0.08010193588797776, 0.965026232132812, 0.7971692934454786, 0.8820844501303087, 0.3328045944663507, + 0.18561195288144405, 0.07216530096679319, 0.005908780438422023, 0.06520472370046182, 0.14742586552942527, + 0.1584733061658462, 0.40478873960253803, 0.8517076637562077, 0.38362604730995475, 0.6242972689768637, + 0.8029032121893791, 0.9087739592614803, 0.4937699233327367, 0.45546332276267887, 0.798105153390389, + 0.1314550983849262, 0.2591920182847739, 0.9114933413684086, 0.3414109593297737, 0.026092765672044882, + 0.2931518007384959, 0.8469358330691886, 0.47102665902579355, 0.40483320725316896, 0.12104263689198036, + 0.7610259859295765, 0.18319775575048725, 0.9581675087575537, 0.6023459738089741, 0.9770086789537543, + 0.32030513714161946, 0.9998097997775989, 0.9030541284918164, 0.929507484163007, 0.5701279464990141, + 0.485477868042418, 0.10576997065572546, 0.9869010419984975, 0.7601188774307379, 0.7299650375549, + 0.47395049565443736, 0.2987941122063468, 0.28780906999224076, 0.5007695250663617, 0.9980887820922187, + 0.6505062822604325, 0.2551175826121026, 0.8805142041510927, 0.540881304210333, 0.512165946641996, + 0.7391492488205184, 0.5349603795874779, 0.8477996679763127, 0.36974774843557734, 0.5386600638597664, + 0.8265820196734138, 0.8331013533783092, 0.3184354327418504, 0.21196431290299877, 0.0007803293886190499, + 0.9031099441741426, 0.7338031418981182, 0.4181974944656337, 0.9475100023819251, 0.9164113945149563, + 0.9880971962092363, 0.971369242048584, 0.46347470842977934, 0.514856183416368, 0.5806407538118334, + 0.47403794573443647, 0.4983463935801685, 0.10528401666738596, 0.1526320361717619, 0.28061664920854257, + 0.30961878366674567, 0.44923363657730486, 0.9484407824169037, 0.5159122159882427, 0.5167660592705424, + 0.7367297318467558, 0.5538203882599549, 0.4506813979810824, 0.2442886238881219, 0.759900709991574, + 0.6650499928247546, 0.48708807614210625, 0.15306454148232096, 0.06357296925269262, 0.32107378247726914, + 0.4549262540499909, 0.5730762333418195, 0.657776134839074, 0.46288302719843766, 0.47841000194019123, + 0.44891897033375505, 0.10930401547813162, 0.24339407947523917, 0.12836594551381142, 0.6934045473476478, + 0.3961572539291023, 0.26876738129298816, 0.3666361092088235, 0.021036984332677644, 0.8996511029257301, + 0.8028217206118277, 0.02125903018390507, 0.02961138398355878, 0.6235739131455775, 0.6760579858568656, + 0.5857480476460779, 0.01234894215599136, 0.932659669453309, 0.5769971719464481, 0.4431403578173735, + 0.6210058271168252, 0.44512130009247053, 0.1256576731684177, 0.7554367685462694, 0.4552800403309736, + 0.5782247194165684, 0.9378829346189987, 0.4767719439722925, 0.588459429493016, 0.38309400222741374, + 0.45826986645737, 0.6923380843272136, 0.910198917887028, 0.27557533418495017, 0.5929635576694986, + 0.853359402144766, 0.7887863171140258, 0.6975730076388099, 0.46150867673575524, 0.8701541258470236, + 0.11825577576640478, 0.12613178124889635, 0.6295407278784825, 0.19813460927054138, 0.6415083047302164, + 0.6485982020831015, 0.503325917338586, 0.05296267036442437, 0.84952445870043, 0.45866219246495377, + 0.32768756890079054, 0.589085245208557, 0.34892536724353695, 0.91035413546058, 0.9880017939439389, + 0.40415901103861485, 0.578040771005287, 0.3959288005375471, 0.16148211530085899, 0.9318405934814316, + 0.6756677973943621, 0.4606385109370067, 0.905962237773528, 0.2177532221826981, 0.4261085792963576, + 0.980991006583082, 0.2412848972859596, 0.8431509870043342, 0.48095393083099247, 0.9748865288169085, + 0.7190166864903728, 0.013915463190590227, 0.1845161452237204, 0.9027340647387055, 0.661002921841787, + 0.27262309567650456, 0.980513934833282, 0.905065693190242, 0.48817348627366275, 0.5491844851920269, + 0.41304323921393715, 0.9759785094279378, 0.853997033979551, 0.002002500366118709, 0.6174459093057074, + 0.34462573854351475, 0.14693603152033852, 0.5662501103103056, 0.3249376766553893, 0.9306275547982649, + 0.859786096152122, 0.28218315978146635, 0.3279232246229644, 0.48861872218445535, 0.9233508569342559, + 0.05722530428945971, 0.9083781089398145, 0.4339485341608773, 0.7203410635948555, 0.8095638597988294, + 0.7400838811513829, 0.7086563132366934, 0.21415793475415057, 0.1710534847076408, 0.7560622473588406, + 0.5861151794417773, 0.886760410951225, 0.24883451496070186, 0.16786802676698942, 0.13821448342001896, + 0.5543758072261663, 0.23877866570604567, 0.5009385005089074, 0.5186822212396692, 0.2418401387392375, + 0.6913013072430841, 0.45462902660124316, 0.1971623092606769, 0.9959491383675012, 0.1351262179824929, + 0.8588580970731138, 0.1773361844666207, 0.3586325230242825, 0.8750229859703434, 0.26477337004803103, + 0.8958200538690423, 0.710860220058141, + }; + float ret[2 * 2 * 1 * 2 * 16 * 16] = { + 0.011873719514500558, + 0.9055337066931509, + 0.4007962186879912, + 0.4823248697007644, + 0.434413711781758, + 0.3374081761653097, + 0.1327889293124861, + 0.9573280427327941, + 0.900157174839599, + 0.22675418134305847, + 0.915880789401114, + 0.2858704345543902, + 0.9538630233786596, + 0.7750326328379964, + 0.6241316443191395, + 0.9671130471718147, + 0.27178741914922444, + 0.9858432029701361, + 0.19476602256637698, + 0.3880385111012623, + 0.4904599528435152, + 0.7763741995316538, + 0.595041172451146, + 0.7697729614020185, + 0.9008648456464086, + 0.10660640559911716, + 0.9872130150760993, + 0.8863345663931051, + 0.42187474552924187, + 0.890712319631421, + 0.8511675560554783, + 0.49085053954474256, + 0.7753996529731574, + 0.4520989748269998, + 0.8049570752479891, + 0.8587842070040622, + 0.8086535260498522, + 0.2874426357486748, + 0.4655016269762732, + 0.2966483939283724, + 0.034319962943348825, + 0.8204809948347593, + 0.009691641525112815, + 0.08010193588797776, + 0.965026232132812, + 0.7971692934454786, + 0.8820844501303087, + 0.3328045944663507, + 0.07216530096679319, + 0.005908780438422023, + 0.06520472370046182, + 0.14742586552942527, + 0.1584733061658462, + 0.40478873960253803, + 0.8517076637562077, + 0.38362604730995475, + 0.6242972689768637, + 0.8029032121893791, + 0.9087739592614803, + 0.4937699233327367, + 0.45546332276267887, + 0.798105153390389, + 0.1314550983849262, + 0.2591920182847739, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4519972100893155, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6393638683917573, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18561195288144405, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9114933413684086, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3414109593297737, + 0.026092765672044882, + 0.2931518007384959, + 0.8469358330691886, + 0.47102665902579355, + 0.40483320725316896, + 0.12104263689198036, + 0.7610259859295765, + 0.18319775575048725, + 0.9581675087575537, + 0.6023459738089741, + 0.9770086789537543, + 0.32030513714161946, + 0.9998097997775989, + 0.9030541284918164, + 0.929507484163007, + 0.485477868042418, + 0.10576997065572546, + 0.9869010419984975, + 0.7601188774307379, + 0.7299650375549, + 0.47395049565443736, + 0.2987941122063468, + 0.28780906999224076, + 0.5007695250663617, + 0.9980887820922187, + 0.6505062822604325, + 0.2551175826121026, + 0.8805142041510927, + 0.540881304210333, + 0.512165946641996, + 0.7391492488205184, + 0.8477996679763127, + 0.36974774843557734, + 0.5386600638597664, + 0.8265820196734138, + 0.8331013533783092, + 0.3184354327418504, + 0.21196431290299877, + 0.0007803293886190499, + 0.9031099441741426, + 0.7338031418981182, + 0.4181974944656337, + 0.9475100023819251, + 0.9164113945149563, + 0.9880971962092363, + 0.971369242048584, + 0.46347470842977934, + 0.5806407538118334, + 0.47403794573443647, + 0.4983463935801685, + 0.10528401666738596, + 0.1526320361717619, + 0.28061664920854257, + 0.30961878366674567, + 0.44923363657730486, + 0.9484407824169037, + 0.5159122159882427, + 0.5167660592705424, + 0.7367297318467558, + 0.5538203882599549, + 0.4506813979810824, + 0.2442886238881219, + 0.759900709991574, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5701279464990141, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5349603795874779, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.514856183416368, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6650499928247546, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.48708807614210625, + 0.15306454148232096, + 0.06357296925269262, + 0.32107378247726914, + 0.4549262540499909, + 0.5730762333418195, + 0.657776134839074, + 0.46288302719843766, + 0.47841000194019123, + 0.44891897033375505, + 0.10930401547813162, + 0.24339407947523917, + 0.12836594551381142, + 0.6934045473476478, + 0.3961572539291023, + 0.26876738129298816, + 0.021036984332677644, + 0.8996511029257301, + 0.8028217206118277, + 0.02125903018390507, + 0.02961138398355878, + 0.6235739131455775, + 0.6760579858568656, + 0.5857480476460779, + 0.01234894215599136, + 0.932659669453309, + 0.5769971719464481, + 0.4431403578173735, + 0.6210058271168252, + 0.44512130009247053, + 0.1256576731684177, + 0.7554367685462694, + 0.5782247194165684, + 0.9378829346189987, + 0.4767719439722925, + 0.588459429493016, + 0.38309400222741374, + 0.45826986645737, + 0.6923380843272136, + 0.910198917887028, + 0.27557533418495017, + 0.5929635576694986, + 0.853359402144766, + 0.7887863171140258, + 0.6975730076388099, + 0.46150867673575524, + 0.8701541258470236, + 0.11825577576640478, + 0.6295407278784825, + 0.19813460927054138, + 0.6415083047302164, + 0.6485982020831015, + 0.503325917338586, + 0.05296267036442437, + 0.84952445870043, + 0.45866219246495377, + 0.32768756890079054, + 0.589085245208557, + 0.34892536724353695, + 0.91035413546058, + 0.9880017939439389, + 0.40415901103861485, + 0.578040771005287, + 0.3959288005375471, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3666361092088235, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4552800403309736, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12613178124889635, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16148211530085899, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9318405934814316, + 0.6756677973943621, + 0.4606385109370067, + 0.905962237773528, + 0.2177532221826981, + 0.4261085792963576, + 0.980991006583082, + 0.2412848972859596, + 0.8431509870043342, + 0.48095393083099247, + 0.9748865288169085, + 0.7190166864903728, + 0.013915463190590227, + 0.1845161452237204, + 0.9027340647387055, + 0.661002921841787, + 0.980513934833282, + 0.905065693190242, + 0.48817348627366275, + 0.5491844851920269, + 0.41304323921393715, + 0.9759785094279378, + 0.853997033979551, + 0.002002500366118709, + 0.6174459093057074, + 0.34462573854351475, + 0.14693603152033852, + 0.5662501103103056, + 0.3249376766553893, + 0.9306275547982649, + 0.859786096152122, + 0.28218315978146635, + 0.48861872218445535, + 0.9233508569342559, + 0.05722530428945971, + 0.9083781089398145, + 0.4339485341608773, + 0.7203410635948555, + 0.8095638597988294, + 0.7400838811513829, + 0.7086563132366934, + 0.21415793475415057, + 0.1710534847076408, + 0.7560622473588406, + 0.5861151794417773, + 0.886760410951225, + 0.24883451496070186, + 0.16786802676698942, + 0.5543758072261663, + 0.23877866570604567, + 0.5009385005089074, + 0.5186822212396692, + 0.2418401387392375, + 0.6913013072430841, + 0.45462902660124316, + 0.1971623092606769, + 0.9959491383675012, + 0.1351262179824929, + 0.8588580970731138, + 0.1773361844666207, + 0.3586325230242825, + 0.8750229859703434, + 0.26477337004803103, + 0.8958200538690423, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.27262309567650456, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3279232246229644, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.13821448342001896, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.710860220058141, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_NZ, {2, 2, 4, 17}, {2, 2, 2, 1, 16, 16}, DT_FLOAT}; + TransResult result; + FormatTransferFractalNz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 4); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_NZ, FORMAT_NHWC, {2, 2, 2, 1, 16, 16}, {2, 2, 4, 17}, DT_FLOAT}; + TransResult result2; + FormatTransferFractalNzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 4); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractNz, invalid_src_shape) { + uint16_t data[1 * 4 * 4 * 1] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_NZ, {1, 4, 4}, {1, 1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalNz transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNdFractNz, invalid_src_data_type) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + TransArgs args{reinterpret_cast(data), + FORMAT_NHWC, + FORMAT_FRACTAL_NZ, + { + 1, + 1, + 4, + 4, + }, + {1, 1, 1, 16, 16}, + DT_UNDEFINED}; + TransResult result; + FormatTransferFractalNz transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNdFractNz, invalid_src_format) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + TransArgs args{reinterpret_cast(data), + FORMAT_HWCN, + FORMAT_FRACTAL_NZ, + {1, 1, 4, 4}, + {1, 1, 1, 1, 16, 16}, + DT_FLOAT16}; + TransResult result; + FormatTransferFractalNz transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + PARAM_INVALID); + EXPECT_EQ(TransFormat(args, result), UNSUPPORTED); +} + +TEST_F(UtestFormatTransferNdFractNz, invalid_dst_shape) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_NZ, {1, 1, 4, 4}, {1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalNz transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); +} + +TEST_F(UtestFormatTransferNdFractNz, invalid_dst_shape2) { + uint16_t data[1 * 1 * 1 * 1 * 16 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_NZ, FORMAT_NHWC, {1, 1, 1, 1, 16, 16}, {1, 4, 4}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalNzND transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNdFractNz, invalid_src_data_type2) { + uint16_t data[1 * 1 * 1 * 16 * 16] = {0}; + TransArgs args{reinterpret_cast(data), + FORMAT_FRACTAL_NZ, + FORMAT_NHWC, + {1, 1, 1, 16, 16}, + { + 1, + 1, + 4, + 4, + }, + DT_UNDEFINED}; + TransResult result; + FormatTransferFractalNzND transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNdFractNz, invalid_dst_format2) { + uint16_t data[1 * 1 * 1 * 1 * 16 * 16] = {0}; + TransArgs args{reinterpret_cast(data), + FORMAT_FRACTAL_NZ, + FORMAT_HWCN, + {1, 1, 1, 1, 16, 16}, + {1, 1, 4, 4}, + DT_FLOAT16}; + TransResult result; + FormatTransferFractalNzND transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + EXPECT_EQ(TransFormat(args, result), UNSUPPORTED); +} + +TEST_F(UtestFormatTransferNdFractNz, invalid_src_shape2) { + uint16_t data[1 * 1 * 1 * 1 * 16 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_NZ, FORMAT_NHWC, {1, 1, 16, 16}, {1, 1, 4, 4}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalNzND transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNdFractNz, invalid_src_dst_shape_relation) { + uint16_t data[1 * 1 * 2 * 1 * 16 * 16] = {0}; + TransArgs args{reinterpret_cast(data), + FORMAT_FRACTAL_NZ, + FORMAT_NHWC, + {1, 1, 2, 1, 16, 16}, + {1, 1, 17, 15}, + DT_FLOAT16}; + TransResult result; + FormatTransferFractalNzND transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_fractal_zz_unittest.cc b/tests/ut/ge/common/format_transfer_fractal_zz_unittest.cc new file mode 100644 index 00000000..c80f72ca --- /dev/null +++ b/tests/ut/ge/common/format_transfer_fractal_zz_unittest.cc @@ -0,0 +1,7991 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "common/formats/format_transfers/format_transfer_fractal_zz.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/formats/formats.h" +#include "common/fp16_t.h" +#include "time.h" + +namespace ge { +namespace formats { +class UtestFormatTransferNdFractZz : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferNdFractZz, nd_shape1_uint8_1) { + uint8_t data[1] = { + 176, + }; + uint8_t ret[1 * 1 * 32 * 32] = { + 176, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {1}, {1, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {1, 1, 32, 32}, {1}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape1_uint8_2) { + uint8_t data[32] = { + 6, 252, 167, 37, 140, 227, 115, 64, 179, 18, 63, 134, 23, 184, 175, 128, + 233, 103, 57, 50, 191, 16, 128, 175, 138, 239, 243, 13, 170, 126, 186, 84, + }; + uint8_t ret[1 * 1 * 32 * 32] = { + 6, 252, 167, 37, 140, 227, 115, 64, 179, 18, 63, 134, 23, 184, 175, 128, 233, 103, 57, 50, 191, 16, 128, 175, + 138, 239, 243, 13, 170, 126, 186, 84, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {32}, {1, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {1, 1, 32, 32}, {32}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape2_uint8_1) { + uint8_t data[32 * 32] = { + 208, 160, 225, 29, 237, 101, 212, 233, 14, 238, 254, 35, 252, 25, 84, 2, 218, 117, 5, 253, 58, 196, 72, + 41, 127, 17, 34, 159, 138, 216, 184, 13, 197, 30, 203, 249, 15, 97, 128, 241, 88, 66, 202, 112, 54, 27, + 57, 236, 112, 186, 58, 108, 0, 133, 79, 85, 180, 17, 55, 198, 147, 16, 98, 108, 211, 167, 81, 1, 15, + 35, 166, 206, 84, 135, 40, 41, 126, 59, 179, 69, 6, 234, 144, 168, 232, 19, 117, 155, 120, 37, 92, 114, + 206, 11, 76, 186, 203, 0, 108, 102, 116, 155, 50, 67, 113, 5, 52, 212, 73, 163, 86, 124, 110, 156, 134, + 247, 9, 189, 27, 147, 103, 61, 221, 238, 119, 28, 73, 229, 162, 51, 78, 5, 40, 186, 120, 96, 41, 226, + 60, 148, 134, 250, 66, 76, 81, 25, 245, 171, 116, 120, 71, 141, 187, 178, 115, 120, 245, 57, 68, 0, 190, + 158, 82, 103, 181, 138, 236, 125, 32, 193, 217, 128, 204, 54, 126, 232, 170, 236, 162, 1, 43, 102, 182, 225, + 170, 210, 35, 242, 241, 175, 4, 9, 16, 145, 21, 198, 244, 23, 84, 187, 151, 116, 152, 182, 50, 247, 227, + 47, 217, 66, 135, 55, 144, 25, 172, 114, 166, 130, 170, 33, 37, 134, 233, 70, 152, 38, 225, 250, 122, 11, + 206, 24, 89, 80, 119, 51, 84, 199, 140, 82, 103, 158, 140, 46, 9, 86, 24, 85, 35, 181, 100, 141, 43, + 97, 8, 79, 162, 113, 1, 119, 15, 60, 72, 195, 93, 223, 106, 192, 245, 123, 37, 33, 43, 115, 230, 228, + 67, 64, 1, 146, 227, 152, 150, 167, 78, 209, 178, 242, 142, 72, 52, 54, 253, 246, 225, 179, 41, 194, 28, + 185, 201, 119, 182, 209, 223, 216, 217, 91, 219, 39, 136, 253, 216, 128, 186, 87, 235, 126, 138, 12, 206, 31, + 220, 172, 212, 37, 178, 130, 171, 202, 73, 163, 22, 73, 82, 196, 40, 222, 50, 60, 94, 93, 11, 176, 133, + 160, 107, 44, 158, 104, 177, 248, 85, 192, 214, 114, 206, 138, 173, 170, 231, 182, 246, 70, 61, 126, 193, 55, + 62, 230, 87, 124, 158, 55, 236, 73, 125, 171, 78, 177, 225, 233, 12, 216, 224, 3, 245, 60, 102, 5, 93, + 151, 48, 78, 86, 8, 90, 97, 187, 134, 48, 59, 18, 69, 235, 22, 79, 62, 57, 75, 139, 93, 112, 69, + 237, 189, 93, 120, 27, 147, 159, 107, 128, 252, 19, 195, 155, 65, 219, 180, 191, 47, 236, 122, 56, 82, 97, + 153, 155, 49, 28, 88, 151, 170, 212, 204, 74, 183, 139, 218, 110, 49, 175, 241, 247, 18, 227, 57, 12, 235, + 167, 13, 30, 65, 205, 184, 170, 126, 244, 23, 144, 157, 169, 225, 226, 248, 111, 76, 45, 232, 18, 185, 8, + 211, 189, 44, 48, 86, 224, 213, 222, 241, 66, 40, 7, 207, 104, 128, 232, 72, 108, 87, 114, 214, 84, 142, + 110, 129, 85, 1, 70, 194, 169, 40, 51, 128, 63, 51, 73, 193, 198, 137, 105, 121, 75, 171, 52, 173, 227, + 206, 22, 214, 110, 17, 210, 191, 178, 192, 191, 13, 224, 181, 232, 156, 79, 222, 59, 7, 152, 136, 109, 243, + 234, 31, 238, 51, 17, 20, 200, 122, 254, 209, 61, 58, 35, 159, 51, 60, 245, 217, 50, 31, 30, 146, 116, + 55, 58, 168, 50, 27, 213, 34, 155, 36, 161, 247, 146, 209, 170, 108, 50, 232, 61, 121, 61, 92, 239, 101, + 35, 34, 57, 176, 175, 65, 128, 126, 226, 66, 41, 52, 150, 35, 83, 84, 67, 48, 129, 4, 1, 172, 234, + 70, 182, 87, 239, 33, 130, 59, 177, 198, 46, 143, 115, 128, 8, 194, 231, 207, 227, 50, 127, 101, 131, 139, + 207, 241, 63, 142, 191, 30, 225, 110, 51, 220, 131, 46, 196, 158, 78, 191, 200, 110, 172, 44, 193, 22, 225, + 254, 93, 170, 73, 242, 92, 114, 37, 1, 191, 120, 234, 205, 236, 95, 44, 44, 25, 146, 48, 35, 191, 120, + 128, 7, 77, 58, 40, 170, 177, 17, 242, 41, 73, 116, 50, 210, 88, 100, 32, 175, 195, 37, 222, 98, 229, + 139, 152, 145, 108, 57, 168, 30, 1, 183, 170, 55, 78, 26, 228, 103, 11, 145, 61, 33, 224, 115, 103, 145, + 17, 207, 109, 35, 202, 188, 124, 134, 233, 200, 49, 146, 240, 75, 118, 41, 105, 122, 51, 235, 191, 44, 58, + 16, 95, 225, 105, 237, 186, 204, 155, 199, 21, 31, 159, 175, 234, 231, 43, 163, 184, 122, 182, 87, 11, 228, + 174, 169, 167, 178, 35, 143, 169, 107, 182, 29, 109, 249, 46, 177, 87, 158, 168, 185, 18, 62, 55, 119, 52, + 108, 203, 251, 16, 161, 15, 46, 6, 246, 93, 161, 89, 165, 142, 182, 43, 141, 152, 124, 187, 7, 243, 243, + 157, 90, 170, 163, 133, 113, 24, 6, 224, 98, 36, 195, 68, 114, 65, 249, 165, 164, 239, 0, 193, 224, 254, + 188, 119, 189, 199, 38, 131, 74, 34, 220, 118, 101, 178, 56, 57, 158, 4, 226, 143, 13, 236, 147, 189, 35, + 22, 103, 112, 212, 43, 51, 213, 49, 175, 26, 222, 177, 204, 82, 191, 222, 61, 160, 160, 137, 159, 254, 69, + 51, 207, 35, 137, 129, 201, 113, 160, 213, 192, 252, 73, 230, 179, 139, 208, 83, 170, 34, 190, 249, 130, 215, + 95, 119, 52, 122, 43, 108, 8, 123, 65, 130, 61, 230, 229, 178, 145, 112, 246, 86, 136, 190, 236, 98, 245, + 83, 173, 30, 143, 199, 89, 84, 149, 93, 56, 109, 230, 239, 117, 52, 222, 108, 158, 116, 126, 234, 114, 201, + 27, 21, 118, 165, 215, 23, 241, 163, 188, 0, 230, 225, 209, 250, 199, 252, 45, 184, 205, 159, 240, 186, 35, + 73, 129, 195, 45, 120, 15, 75, 22, 5, 12, 68, 165, 8, 26, 170, 104, 98, 7, 252, 242, 111, 108, 154, + 240, 32, 96, 103, 138, 22, 210, 231, 193, 122, 221, 214, + }; + uint8_t ret[1 * 1 * 32 * 32] = { + 208, 160, 225, 29, 237, 101, 212, 233, 14, 238, 254, 35, 252, 25, 84, 2, 218, 117, 5, 253, 58, 196, 72, + 41, 127, 17, 34, 159, 138, 216, 184, 13, 197, 30, 203, 249, 15, 97, 128, 241, 88, 66, 202, 112, 54, 27, + 57, 236, 112, 186, 58, 108, 0, 133, 79, 85, 180, 17, 55, 198, 147, 16, 98, 108, 211, 167, 81, 1, 15, + 35, 166, 206, 84, 135, 40, 41, 126, 59, 179, 69, 6, 234, 144, 168, 232, 19, 117, 155, 120, 37, 92, 114, + 206, 11, 76, 186, 203, 0, 108, 102, 116, 155, 50, 67, 113, 5, 52, 212, 73, 163, 86, 124, 110, 156, 134, + 247, 9, 189, 27, 147, 103, 61, 221, 238, 119, 28, 73, 229, 162, 51, 78, 5, 40, 186, 120, 96, 41, 226, + 60, 148, 134, 250, 66, 76, 81, 25, 245, 171, 116, 120, 71, 141, 187, 178, 115, 120, 245, 57, 68, 0, 190, + 158, 82, 103, 181, 138, 236, 125, 32, 193, 217, 128, 204, 54, 126, 232, 170, 236, 162, 1, 43, 102, 182, 225, + 170, 210, 35, 242, 241, 175, 4, 9, 16, 145, 21, 198, 244, 23, 84, 187, 151, 116, 152, 182, 50, 247, 227, + 47, 217, 66, 135, 55, 144, 25, 172, 114, 166, 130, 170, 33, 37, 134, 233, 70, 152, 38, 225, 250, 122, 11, + 206, 24, 89, 80, 119, 51, 84, 199, 140, 82, 103, 158, 140, 46, 9, 86, 24, 85, 35, 181, 100, 141, 43, + 97, 8, 79, 162, 113, 1, 119, 15, 60, 72, 195, 93, 223, 106, 192, 245, 123, 37, 33, 43, 115, 230, 228, + 67, 64, 1, 146, 227, 152, 150, 167, 78, 209, 178, 242, 142, 72, 52, 54, 253, 246, 225, 179, 41, 194, 28, + 185, 201, 119, 182, 209, 223, 216, 217, 91, 219, 39, 136, 253, 216, 128, 186, 87, 235, 126, 138, 12, 206, 31, + 220, 172, 212, 37, 178, 130, 171, 202, 73, 163, 22, 73, 82, 196, 40, 222, 50, 60, 94, 93, 11, 176, 133, + 160, 107, 44, 158, 104, 177, 248, 85, 192, 214, 114, 206, 138, 173, 170, 231, 182, 246, 70, 61, 126, 193, 55, + 62, 230, 87, 124, 158, 55, 236, 73, 125, 171, 78, 177, 225, 233, 12, 216, 224, 3, 245, 60, 102, 5, 93, + 151, 48, 78, 86, 8, 90, 97, 187, 134, 48, 59, 18, 69, 235, 22, 79, 62, 57, 75, 139, 93, 112, 69, + 237, 189, 93, 120, 27, 147, 159, 107, 128, 252, 19, 195, 155, 65, 219, 180, 191, 47, 236, 122, 56, 82, 97, + 153, 155, 49, 28, 88, 151, 170, 212, 204, 74, 183, 139, 218, 110, 49, 175, 241, 247, 18, 227, 57, 12, 235, + 167, 13, 30, 65, 205, 184, 170, 126, 244, 23, 144, 157, 169, 225, 226, 248, 111, 76, 45, 232, 18, 185, 8, + 211, 189, 44, 48, 86, 224, 213, 222, 241, 66, 40, 7, 207, 104, 128, 232, 72, 108, 87, 114, 214, 84, 142, + 110, 129, 85, 1, 70, 194, 169, 40, 51, 128, 63, 51, 73, 193, 198, 137, 105, 121, 75, 171, 52, 173, 227, + 206, 22, 214, 110, 17, 210, 191, 178, 192, 191, 13, 224, 181, 232, 156, 79, 222, 59, 7, 152, 136, 109, 243, + 234, 31, 238, 51, 17, 20, 200, 122, 254, 209, 61, 58, 35, 159, 51, 60, 245, 217, 50, 31, 30, 146, 116, + 55, 58, 168, 50, 27, 213, 34, 155, 36, 161, 247, 146, 209, 170, 108, 50, 232, 61, 121, 61, 92, 239, 101, + 35, 34, 57, 176, 175, 65, 128, 126, 226, 66, 41, 52, 150, 35, 83, 84, 67, 48, 129, 4, 1, 172, 234, + 70, 182, 87, 239, 33, 130, 59, 177, 198, 46, 143, 115, 128, 8, 194, 231, 207, 227, 50, 127, 101, 131, 139, + 207, 241, 63, 142, 191, 30, 225, 110, 51, 220, 131, 46, 196, 158, 78, 191, 200, 110, 172, 44, 193, 22, 225, + 254, 93, 170, 73, 242, 92, 114, 37, 1, 191, 120, 234, 205, 236, 95, 44, 44, 25, 146, 48, 35, 191, 120, + 128, 7, 77, 58, 40, 170, 177, 17, 242, 41, 73, 116, 50, 210, 88, 100, 32, 175, 195, 37, 222, 98, 229, + 139, 152, 145, 108, 57, 168, 30, 1, 183, 170, 55, 78, 26, 228, 103, 11, 145, 61, 33, 224, 115, 103, 145, + 17, 207, 109, 35, 202, 188, 124, 134, 233, 200, 49, 146, 240, 75, 118, 41, 105, 122, 51, 235, 191, 44, 58, + 16, 95, 225, 105, 237, 186, 204, 155, 199, 21, 31, 159, 175, 234, 231, 43, 163, 184, 122, 182, 87, 11, 228, + 174, 169, 167, 178, 35, 143, 169, 107, 182, 29, 109, 249, 46, 177, 87, 158, 168, 185, 18, 62, 55, 119, 52, + 108, 203, 251, 16, 161, 15, 46, 6, 246, 93, 161, 89, 165, 142, 182, 43, 141, 152, 124, 187, 7, 243, 243, + 157, 90, 170, 163, 133, 113, 24, 6, 224, 98, 36, 195, 68, 114, 65, 249, 165, 164, 239, 0, 193, 224, 254, + 188, 119, 189, 199, 38, 131, 74, 34, 220, 118, 101, 178, 56, 57, 158, 4, 226, 143, 13, 236, 147, 189, 35, + 22, 103, 112, 212, 43, 51, 213, 49, 175, 26, 222, 177, 204, 82, 191, 222, 61, 160, 160, 137, 159, 254, 69, + 51, 207, 35, 137, 129, 201, 113, 160, 213, 192, 252, 73, 230, 179, 139, 208, 83, 170, 34, 190, 249, 130, 215, + 95, 119, 52, 122, 43, 108, 8, 123, 65, 130, 61, 230, 229, 178, 145, 112, 246, 86, 136, 190, 236, 98, 245, + 83, 173, 30, 143, 199, 89, 84, 149, 93, 56, 109, 230, 239, 117, 52, 222, 108, 158, 116, 126, 234, 114, 201, + 27, 21, 118, 165, 215, 23, 241, 163, 188, 0, 230, 225, 209, 250, 199, 252, 45, 184, 205, 159, 240, 186, 35, + 73, 129, 195, 45, 120, 15, 75, 22, 5, 12, 68, 165, 8, 26, 170, 104, 98, 7, 252, 242, 111, 108, 154, + 240, 32, 96, 103, 138, 22, 210, 231, 193, 122, 221, 214, + }; + + FormatTransferFractalZz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {32, 32}, {1, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {1, 1, 32, 32}, {32, 32}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape2_uint8_2) { + uint8_t data[30 * 33] = { + 184, 109, 202, 127, 3, 82, 191, 133, 0, 197, 127, 115, 149, 156, 126, 88, 84, 116, 125, 205, 68, 166, 145, + 139, 193, 43, 173, 235, 19, 147, 191, 35, 126, 138, 161, 82, 130, 77, 112, 47, 115, 163, 235, 114, 51, 2, + 53, 60, 54, 129, 74, 249, 54, 200, 173, 240, 126, 70, 184, 138, 16, 79, 117, 233, 238, 144, 242, 230, 157, + 18, 87, 185, 98, 8, 236, 232, 2, 108, 3, 180, 177, 164, 128, 194, 171, 93, 64, 194, 214, 31, 86, 114, + 109, 253, 174, 185, 167, 239, 79, 96, 84, 253, 7, 153, 205, 209, 120, 107, 216, 172, 239, 178, 159, 19, 161, + 114, 194, 183, 170, 173, 201, 234, 202, 131, 78, 0, 61, 3, 101, 23, 98, 39, 208, 173, 243, 93, 104, 78, + 232, 242, 73, 122, 87, 83, 12, 136, 209, 61, 148, 234, 220, 183, 44, 71, 127, 239, 55, 136, 73, 115, 29, + 206, 72, 175, 214, 92, 196, 19, 242, 106, 220, 102, 215, 50, 78, 116, 74, 225, 221, 168, 83, 213, 239, 197, + 137, 209, 184, 35, 149, 125, 98, 216, 115, 231, 152, 179, 244, 44, 152, 89, 107, 147, 37, 218, 131, 55, 96, + 25, 36, 103, 19, 211, 194, 172, 113, 43, 200, 211, 190, 63, 133, 120, 234, 129, 48, 95, 218, 66, 253, 188, + 66, 204, 75, 218, 210, 126, 215, 2, 98, 205, 108, 56, 87, 128, 16, 231, 24, 165, 162, 27, 85, 89, 117, + 240, 239, 179, 96, 215, 198, 110, 80, 178, 68, 186, 46, 83, 102, 202, 216, 124, 5, 86, 178, 62, 198, 229, + 100, 54, 111, 16, 206, 151, 245, 100, 179, 100, 33, 106, 97, 5, 61, 43, 111, 47, 135, 210, 87, 135, 36, + 1, 212, 102, 217, 14, 183, 122, 27, 134, 224, 107, 186, 5, 29, 204, 114, 64, 54, 48, 48, 32, 219, 89, + 95, 50, 97, 155, 23, 125, 131, 222, 181, 220, 113, 171, 178, 168, 75, 215, 150, 102, 142, 245, 116, 181, 133, + 164, 241, 43, 61, 12, 214, 23, 54, 161, 100, 202, 17, 43, 54, 66, 28, 92, 77, 114, 246, 207, 22, 63, + 74, 97, 100, 58, 167, 232, 16, 180, 59, 133, 2, 99, 95, 46, 103, 22, 168, 4, 71, 88, 167, 44, 9, + 60, 232, 226, 128, 254, 236, 206, 120, 230, 76, 214, 93, 46, 225, 99, 145, 123, 193, 112, 124, 30, 13, 204, + 108, 148, 252, 234, 70, 122, 121, 94, 230, 73, 175, 224, 233, 2, 135, 110, 197, 224, 60, 196, 107, 248, 67, + 122, 224, 59, 44, 178, 131, 23, 36, 2, 185, 155, 152, 3, 203, 0, 50, 230, 131, 254, 235, 55, 199, 16, + 61, 6, 203, 26, 250, 148, 213, 182, 40, 9, 99, 87, 46, 140, 56, 144, 81, 12, 50, 232, 75, 110, 208, + 108, 77, 244, 80, 90, 49, 221, 162, 189, 57, 118, 67, 199, 116, 248, 80, 232, 77, 52, 196, 107, 21, 91, + 10, 96, 225, 82, 88, 6, 121, 241, 201, 251, 192, 53, 252, 66, 123, 116, 151, 138, 190, 102, 82, 13, 159, + 158, 184, 182, 156, 159, 143, 120, 26, 9, 122, 163, 64, 26, 185, 176, 66, 194, 235, 168, 166, 46, 199, 113, + 167, 21, 203, 157, 163, 117, 222, 177, 161, 133, 34, 241, 28, 241, 80, 71, 117, 90, 51, 133, 134, 180, 214, + 75, 24, 165, 74, 187, 248, 49, 86, 97, 214, 60, 161, 88, 136, 66, 229, 81, 29, 17, 90, 187, 226, 35, + 160, 254, 63, 245, 156, 151, 117, 152, 247, 178, 232, 213, 91, 233, 167, 172, 190, 1, 251, 245, 94, 7, 94, + 8, 108, 148, 199, 153, 69, 217, 202, 33, 22, 143, 216, 131, 134, 236, 76, 84, 1, 1, 146, 205, 5, 195, + 216, 136, 181, 93, 54, 1, 249, 54, 239, 101, 130, 59, 50, 230, 197, 17, 72, 135, 1, 221, 164, 175, 60, + 185, 149, 6, 153, 121, 249, 124, 47, 216, 248, 124, 222, 253, 91, 25, 251, 17, 114, 116, 67, 15, 17, 209, + 195, 9, 228, 168, 241, 165, 238, 57, 148, 249, 185, 56, 136, 167, 100, 114, 169, 41, 199, 74, 179, 19, 143, + 71, 215, 181, 222, 157, 144, 178, 57, 167, 177, 1, 161, 79, 19, 33, 243, 245, 152, 102, 160, 221, 58, 182, + 140, 225, 94, 247, 23, 90, 248, 207, 36, 185, 27, 28, 220, 15, 73, 198, 228, 45, 2, 55, 153, 2, 86, + 103, 221, 189, 19, 39, 202, 123, 103, 64, 110, 213, 58, 33, 8, 77, 141, 0, 205, 89, 2, 51, 26, 249, + 28, 34, 143, 69, 85, 231, 62, 125, 211, 23, 39, 196, 207, 151, 91, 171, 232, 236, 229, 82, 125, 111, 180, + 246, 48, 98, 53, 5, 121, 236, 123, 102, 33, 134, 137, 24, 209, 173, 202, 224, 148, 166, 71, 99, 163, 120, + 66, 13, 88, 65, 212, 123, 52, 45, 164, 154, 101, 144, 127, 212, 199, 31, 170, 21, 126, 203, 86, 21, 168, + 165, 253, 243, 144, 105, 62, 188, 80, 45, 33, 192, 190, 248, 211, 240, 178, 157, 14, 145, 95, 248, 194, 143, + 65, 217, 70, 160, 85, 178, 233, 159, 96, 216, 181, 252, 95, 47, 220, 20, 239, 113, 43, 98, 9, 154, 253, + 132, 69, 131, 109, 238, 37, 252, 46, 88, 188, 167, 153, 28, 100, 116, 175, 142, 52, 209, 228, 152, 116, 98, + 102, 15, 52, 214, 47, 204, 185, 5, 189, 56, 211, 125, 155, 76, 59, 234, 223, 47, 10, 166, 59, 112, 108, + 32, 214, 88, 159, 229, 47, 209, 237, 9, 95, 189, 180, 54, 208, 31, 201, 88, 78, 167, 166, 48, 8, 54, + 8, 191, 176, 224, 167, 204, 185, 243, 211, 40, 57, 49, 118, 207, 5, 189, 56, 138, 69, 38, 56, 149, 156, + 110, + }; + uint8_t ret[2 * 1 * 32 * 32] = { + 184, 109, 202, 127, 3, 82, 191, 133, 0, 197, 127, 115, 149, 156, 126, 88, 84, 116, 125, 205, 68, 166, 145, + 139, 193, 43, 173, 235, 19, 147, 191, 35, 138, 161, 82, 130, 77, 112, 47, 115, 163, 235, 114, 51, 2, 53, + 60, 54, 129, 74, 249, 54, 200, 173, 240, 126, 70, 184, 138, 16, 79, 117, 233, 238, 242, 230, 157, 18, 87, + 185, 98, 8, 236, 232, 2, 108, 3, 180, 177, 164, 128, 194, 171, 93, 64, 194, 214, 31, 86, 114, 109, 253, + 174, 185, 167, 239, 96, 84, 253, 7, 153, 205, 209, 120, 107, 216, 172, 239, 178, 159, 19, 161, 114, 194, 183, + 170, 173, 201, 234, 202, 131, 78, 0, 61, 3, 101, 23, 98, 208, 173, 243, 93, 104, 78, 232, 242, 73, 122, + 87, 83, 12, 136, 209, 61, 148, 234, 220, 183, 44, 71, 127, 239, 55, 136, 73, 115, 29, 206, 72, 175, 92, + 196, 19, 242, 106, 220, 102, 215, 50, 78, 116, 74, 225, 221, 168, 83, 213, 239, 197, 137, 209, 184, 35, 149, + 125, 98, 216, 115, 231, 152, 179, 244, 152, 89, 107, 147, 37, 218, 131, 55, 96, 25, 36, 103, 19, 211, 194, + 172, 113, 43, 200, 211, 190, 63, 133, 120, 234, 129, 48, 95, 218, 66, 253, 188, 204, 75, 218, 210, 126, 215, + 2, 98, 205, 108, 56, 87, 128, 16, 231, 24, 165, 162, 27, 85, 89, 117, 240, 239, 179, 96, 215, 198, 110, + 80, 178, 68, 46, 83, 102, 202, 216, 124, 5, 86, 178, 62, 198, 229, 100, 54, 111, 16, 206, 151, 245, 100, + 179, 100, 33, 106, 97, 5, 61, 43, 111, 47, 135, 210, 135, 36, 1, 212, 102, 217, 14, 183, 122, 27, 134, + 224, 107, 186, 5, 29, 204, 114, 64, 54, 48, 48, 32, 219, 89, 95, 50, 97, 155, 23, 125, 131, 181, 220, + 113, 171, 178, 168, 75, 215, 150, 102, 142, 245, 116, 181, 133, 164, 241, 43, 61, 12, 214, 23, 54, 161, 100, + 202, 17, 43, 54, 66, 28, 92, 114, 246, 207, 22, 63, 74, 97, 100, 58, 167, 232, 16, 180, 59, 133, 2, + 99, 95, 46, 103, 22, 168, 4, 71, 88, 167, 44, 9, 60, 232, 226, 128, 236, 206, 120, 230, 76, 214, 93, + 46, 225, 99, 145, 123, 193, 112, 124, 30, 13, 204, 108, 148, 252, 234, 70, 122, 121, 94, 230, 73, 175, 224, + 233, 2, 110, 197, 224, 60, 196, 107, 248, 67, 122, 224, 59, 44, 178, 131, 23, 36, 2, 185, 155, 152, 3, + 203, 0, 50, 230, 131, 254, 235, 55, 199, 16, 61, 203, 26, 250, 148, 213, 182, 40, 9, 99, 87, 46, 140, + 56, 144, 81, 12, 50, 232, 75, 110, 208, 108, 77, 244, 80, 90, 49, 221, 162, 189, 57, 118, 199, 116, 248, + 80, 232, 77, 52, 196, 107, 21, 91, 10, 96, 225, 82, 88, 6, 121, 241, 201, 251, 192, 53, 252, 66, 123, + 116, 151, 138, 190, 102, 82, 159, 158, 184, 182, 156, 159, 143, 120, 26, 9, 122, 163, 64, 26, 185, 176, 66, + 194, 235, 168, 166, 46, 199, 113, 167, 21, 203, 157, 163, 117, 222, 177, 133, 34, 241, 28, 241, 80, 71, 117, + 90, 51, 133, 134, 180, 214, 75, 24, 165, 74, 187, 248, 49, 86, 97, 214, 60, 161, 88, 136, 66, 229, 81, + 29, 90, 187, 226, 35, 160, 254, 63, 245, 156, 151, 117, 152, 247, 178, 232, 213, 91, 233, 167, 172, 190, 1, + 251, 245, 94, 7, 94, 8, 108, 148, 199, 153, 217, 202, 33, 22, 143, 216, 131, 134, 236, 76, 84, 1, 1, + 146, 205, 5, 195, 216, 136, 181, 93, 54, 1, 249, 54, 239, 101, 130, 59, 50, 230, 197, 72, 135, 1, 221, + 164, 175, 60, 185, 149, 6, 153, 121, 249, 124, 47, 216, 248, 124, 222, 253, 91, 25, 251, 17, 114, 116, 67, + 15, 17, 209, 195, 9, 168, 241, 165, 238, 57, 148, 249, 185, 56, 136, 167, 100, 114, 169, 41, 199, 74, 179, + 19, 143, 71, 215, 181, 222, 157, 144, 178, 57, 167, 177, 1, 161, 19, 33, 243, 245, 152, 102, 160, 221, 58, + 182, 140, 225, 94, 247, 23, 90, 248, 207, 36, 185, 27, 28, 220, 15, 73, 198, 228, 45, 2, 55, 153, 2, + 103, 221, 189, 19, 39, 202, 123, 103, 64, 110, 213, 58, 33, 8, 77, 141, 0, 205, 89, 2, 51, 26, 249, + 28, 34, 143, 69, 85, 231, 62, 125, 211, 39, 196, 207, 151, 91, 171, 232, 236, 229, 82, 125, 111, 180, 246, + 48, 98, 53, 5, 121, 236, 123, 102, 33, 134, 137, 24, 209, 173, 202, 224, 148, 166, 99, 163, 120, 66, 13, + 88, 65, 212, 123, 52, 45, 164, 154, 101, 144, 127, 212, 199, 31, 170, 21, 126, 203, 86, 21, 168, 165, 253, + 243, 144, 105, 62, 80, 45, 33, 192, 190, 248, 211, 240, 178, 157, 14, 145, 95, 248, 194, 143, 65, 217, 70, + 160, 85, 178, 233, 159, 96, 216, 181, 252, 95, 47, 220, 20, 113, 43, 98, 9, 154, 253, 132, 69, 131, 109, + 238, 37, 252, 46, 88, 188, 167, 153, 28, 100, 116, 175, 142, 52, 209, 228, 152, 116, 98, 102, 15, 52, 47, + 204, 185, 5, 189, 56, 211, 125, 155, 76, 59, 234, 223, 47, 10, 166, 59, 112, 108, 32, 214, 88, 159, 229, + 47, 209, 237, 9, 95, 189, 180, 54, 31, 201, 88, 78, 167, 166, 48, 8, 54, 8, 191, 176, 224, 167, 204, + 185, 243, 211, 40, 57, 49, 118, 207, 5, 189, 56, 138, 69, 38, 56, 149, 156, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 144, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 79, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 66, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 87, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 222, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 254, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 135, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 6, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 161, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 17, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 228, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 79, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 86, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 71, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 188, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 239, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 208, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 110, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {30, 33}, {1, 2, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {1, 2, 32, 32}, {30, 33}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape3_uint8) { + uint8_t data[2 * 30 * 32] = { + 252, 93, 197, 26, 218, 156, 186, 88, 248, 251, 232, 100, 3, 79, 56, 127, 142, 20, 63, 109, 145, 151, 206, + 52, 164, 20, 233, 117, 57, 218, 169, 158, 174, 118, 118, 54, 99, 37, 194, 209, 116, 38, 26, 72, 110, 60, + 215, 102, 169, 202, 118, 70, 238, 238, 179, 241, 137, 125, 154, 116, 59, 231, 154, 208, 242, 187, 206, 179, 99, + 254, 40, 52, 124, 4, 217, 111, 26, 184, 153, 59, 20, 85, 150, 0, 15, 25, 144, 134, 116, 54, 184, 97, + 189, 140, 40, 188, 144, 116, 64, 65, 207, 244, 139, 211, 202, 0, 170, 75, 219, 246, 93, 99, 60, 28, 46, + 78, 73, 205, 79, 250, 95, 198, 169, 79, 40, 253, 247, 101, 114, 170, 105, 148, 245, 237, 157, 46, 248, 212, + 157, 228, 116, 56, 43, 245, 181, 147, 6, 168, 217, 141, 196, 48, 37, 93, 234, 154, 189, 64, 16, 85, 184, + 113, 154, 125, 54, 84, 21, 171, 245, 63, 96, 194, 58, 176, 75, 64, 161, 59, 249, 160, 240, 52, 253, 122, + 185, 123, 42, 133, 139, 98, 191, 162, 71, 157, 143, 60, 233, 20, 58, 0, 171, 253, 34, 159, 65, 35, 247, + 107, 17, 78, 88, 182, 193, 66, 119, 177, 143, 231, 15, 35, 193, 137, 96, 175, 36, 59, 36, 62, 167, 196, + 67, 44, 172, 218, 159, 122, 94, 72, 54, 65, 23, 69, 244, 219, 117, 180, 72, 162, 88, 118, 24, 202, 93, + 67, 55, 86, 93, 122, 15, 92, 151, 28, 95, 62, 246, 203, 4, 23, 92, 175, 153, 31, 59, 142, 6, 199, + 198, 214, 78, 20, 51, 228, 193, 246, 145, 144, 129, 31, 150, 151, 206, 13, 5, 2, 78, 161, 233, 50, 217, + 254, 211, 0, 155, 182, 74, 98, 64, 126, 218, 1, 18, 202, 146, 176, 74, 127, 45, 192, 101, 153, 122, 101, + 88, 46, 128, 221, 86, 74, 119, 185, 213, 91, 167, 69, 211, 129, 73, 114, 250, 216, 52, 250, 184, 149, 69, + 147, 205, 82, 115, 246, 98, 75, 139, 102, 125, 26, 200, 205, 191, 197, 70, 2, 11, 244, 192, 34, 88, 187, + 101, 238, 227, 150, 124, 215, 145, 91, 102, 212, 17, 117, 134, 71, 111, 129, 39, 253, 35, 241, 213, 18, 129, + 239, 216, 181, 170, 103, 123, 226, 142, 24, 128, 169, 160, 56, 219, 6, 62, 251, 167, 229, 31, 99, 177, 139, + 137, 232, 135, 59, 25, 197, 128, 62, 208, 8, 240, 131, 64, 147, 98, 64, 24, 72, 213, 145, 148, 164, 81, + 180, 185, 155, 40, 72, 39, 69, 111, 208, 121, 212, 57, 143, 36, 203, 77, 19, 167, 154, 190, 123, 52, 191, + 7, 105, 154, 142, 152, 59, 58, 101, 28, 195, 223, 215, 209, 127, 218, 121, 217, 46, 200, 192, 197, 188, 134, + 159, 49, 75, 67, 247, 75, 96, 50, 163, 109, 147, 147, 38, 120, 2, 129, 171, 32, 198, 127, 203, 196, 111, + 81, 253, 90, 80, 125, 234, 17, 205, 216, 55, 140, 227, 131, 180, 0, 208, 36, 85, 16, 69, 163, 58, 177, + 136, 109, 243, 160, 64, 76, 181, 182, 35, 182, 118, 243, 93, 4, 176, 208, 61, 193, 171, 105, 33, 89, 81, + 72, 220, 94, 153, 36, 96, 223, 47, 198, 150, 21, 254, 130, 10, 221, 81, 206, 47, 119, 151, 243, 123, 206, + 94, 164, 130, 152, 147, 226, 135, 109, 249, 147, 216, 225, 1, 46, 158, 26, 137, 71, 10, 127, 124, 172, 50, + 247, 82, 11, 8, 252, 179, 23, 107, 80, 114, 168, 194, 23, 95, 86, 93, 177, 71, 82, 180, 161, 95, 58, + 146, 154, 219, 84, 101, 214, 56, 125, 76, 89, 172, 244, 203, 242, 104, 77, 23, 6, 229, 223, 172, 146, 184, + 68, 245, 12, 117, 171, 221, 170, 201, 201, 108, 228, 119, 221, 89, 107, 202, 41, 162, 168, 157, 139, 225, 178, + 179, 163, 53, 64, 37, 75, 226, 249, 197, 40, 184, 132, 153, 126, 158, 233, 170, 198, 142, 132, 205, 40, 18, + 128, 10, 246, 240, 156, 109, 233, 132, 6, 145, 184, 166, 64, 19, 187, 31, 60, 37, 241, 61, 108, 1, 70, + 78, 149, 134, 91, 2, 169, 181, 72, 79, 218, 240, 69, 45, 168, 162, 123, 106, 77, 252, 91, 66, 38, 111, + 132, 249, 213, 254, 155, 113, 74, 30, 187, 123, 210, 94, 106, 47, 155, 85, 208, 220, 187, 213, 173, 50, 163, + 77, 110, 52, 59, 135, 138, 153, 93, 61, 18, 150, 50, 104, 95, 155, 115, 203, 90, 13, 250, 94, 65, 112, + 198, 136, 81, 215, 167, 103, 145, 116, 245, 213, 12, 78, 144, 43, 133, 4, 35, 21, 157, 149, 186, 118, 131, + 48, 145, 16, 149, 0, 128, 135, 250, 134, 207, 114, 211, 3, 178, 184, 170, 216, 233, 237, 182, 153, 132, 24, + 36, 117, 217, 242, 187, 109, 12, 79, 196, 124, 200, 138, 160, 43, 242, 55, 157, 185, 252, 40, 46, 232, 112, + 137, 166, 61, 5, 67, 136, 32, 212, 172, 55, 58, 133, 72, 248, 101, 19, 65, 208, 134, 66, 76, 63, 178, + 207, 133, 20, 191, 48, 31, 116, 67, 233, 116, 119, 216, 17, 137, 201, 219, 181, 161, 114, 128, 41, 127, 0, + 226, 87, 43, 241, 2, 67, 136, 230, 104, 43, 178, 23, 155, 74, 139, 175, 226, 121, 135, 190, 227, 179, 148, + 188, 133, 209, 32, 230, 128, 158, 248, 146, 131, 226, 37, 168, 228, 48, 78, 112, 81, 137, 99, 0, 11, 184, + 126, 87, 179, 71, 24, 143, 200, 217, 48, 219, 152, 14, 204, 150, 156, 239, 220, 59, 111, 23, 167, 180, 220, + 191, 128, 50, 82, 173, 69, 42, 215, 95, 171, 114, 210, 15, 149, 155, 35, 250, 250, 153, 208, 64, 191, 231, + 232, 238, 153, 219, 188, 23, 82, 252, 1, 50, 33, 38, 204, 169, 118, 30, 129, 33, 130, 239, 208, 153, 221, + 94, 212, 87, 127, 156, 109, 151, 176, 168, 41, 168, 46, 31, 228, 81, 201, 154, 113, 190, 201, 254, 169, 113, + 205, 43, 157, 96, 138, 118, 181, 233, 40, 173, 120, 198, 64, 96, 162, 175, 1, 62, 84, 231, 29, 29, 204, + 201, 141, 253, 149, 186, 176, 118, 197, 18, 131, 78, 198, 98, 80, 203, 133, 22, 34, 201, 120, 73, 93, 223, + 128, 222, 213, 218, 66, 252, 130, 12, 221, 124, 102, 80, 150, 57, 238, 38, 136, 183, 96, 8, 227, 52, 235, + 62, 197, 12, 62, 25, 78, 222, 91, 193, 140, 86, 210, 56, 187, 34, 222, 25, 0, 182, 117, 246, 232, 7, + 79, 70, 47, 60, 233, 194, 203, 13, 186, 15, 115, 17, 246, 130, 212, 105, 115, 142, 151, 7, 135, 115, 51, + 241, 67, 228, 61, 204, 208, 95, 149, 164, 146, 145, 161, 186, 108, 180, 50, 166, 70, 143, 95, 108, 114, 28, + 127, 19, 69, 161, 156, 86, 150, 147, 72, 97, 238, 172, 62, 216, 98, 5, 96, 57, 216, 204, 159, 43, 82, + 104, 233, 56, 2, 55, 148, 2, 80, 146, 132, 62, 76, 229, 59, 212, 236, 227, 1, 3, 217, 23, 36, 238, + 155, 157, 254, 175, 45, 228, 185, 102, 199, 43, 28, 72, 228, 171, 137, 9, 113, 215, 69, 26, 62, 138, 10, + 10, 117, 9, 145, 125, 216, 195, 111, 110, 154, 118, 36, 159, 155, 165, 198, 70, 10, 254, 245, 218, 93, 53, + 105, 188, 86, 140, 204, 9, 150, 234, 126, 157, 62, 41, 3, 22, 80, 190, 15, 81, 41, 69, 42, 124, 87, + 21, 54, 119, 209, 48, 215, 153, 94, 210, 139, 165, 171, 153, 70, 123, 51, 151, 0, 12, 222, 191, 144, 42, + 5, 13, 71, 172, 73, 92, 120, 213, 226, 46, 161, 63, 136, 2, 205, 123, 67, 112, 206, 176, 21, 28, 4, + 29, 89, 45, 246, 65, 204, 178, 175, 48, 20, 167, 54, 146, 37, 206, 91, 110, 195, 9, 179, 16, 69, 15, + 209, 100, 60, 217, 178, 159, 9, 25, 11, 154, 215, 138, 214, 248, 249, 217, 146, 123, 100, 50, 33, 86, 197, + 115, 47, 30, 169, 162, 121, 92, 21, 21, 140, 113, 28, 100, 153, 211, 106, 68, 21, 56, 54, 135, 155, 192, + 105, 192, 97, 132, 105, 132, 191, 244, 82, 141, 213, 97, 122, 62, 204, 112, 164, 24, 26, 11, 171, 173, 114, + 233, 204, 125, 105, 36, 123, 236, 221, 115, 7, 221, 4, 34, 69, 183, 99, 63, 135, 245, 14, 90, 236, 239, + 201, 84, 167, 250, 83, 254, 58, 4, 56, 253, 83, 53, 25, 234, 30, 122, 85, 242, 51, 113, 241, 242, 25, + 8, 8, 70, 151, 252, 84, 86, 27, 102, 111, 61, 235, 131, 171, 182, 48, 179, 75, 18, 168, 168, 190, 33, + 21, 156, 215, 232, 103, 10, 142, 199, 201, 118, 144, 163, 215, 146, 88, 181, 12, 63, 229, 61, 203, 120, 149, + 158, 149, 66, 88, 4, 116, 188, 126, 80, 76, 69, 70, 213, 230, 17, 43, 124, 165, 215, 38, 15, 124, 124, + 191, 95, 125, 228, 74, 234, 103, 244, 34, 196, 24, 70, 246, 248, 54, 126, 198, 229, 180, 217, 95, 148, 208, + 187, 119, 181, 239, 194, 206, 245, 16, 253, 20, 219, 9, 18, 59, 136, 233, 138, 145, 88, 162, 143, 210, 111, + 226, 176, 172, 159, 248, 2, 30, 31, 9, 229, 146, 142, 86, 240, 77, 102, 156, 225, 102, 114, 78, 19, 245, + 237, 137, 133, 66, 110, 204, 100, 3, 8, 161, 164, 38, 14, 21, 203, 152, 212, 129, 199, 189, 188, 13, 248, + 223, 4, 26, 124, 253, 239, 174, 84, 42, 43, 117, 113, 229, 120, 113, 164, 100, 4, 146, 49, 68, 4, 87, + 102, 53, 43, 101, 86, 148, 195, 21, 16, 88, 213, 122, 239, 97, 210, 223, 83, 237, 124, 104, 4, 184, 24, + 193, 40, 108, 222, 254, 103, 2, 134, 240, 221, 68, 72, 76, 247, 119, 238, 102, 9, 6, 43, 143, 151, 243, + 34, 10, 254, 107, 211, 233, 100, 234, 103, 31, 218, 170, 219, 242, 250, 186, 196, 208, 37, 107, 182, 62, 137, + 248, 13, 241, 230, 104, 40, 138, 231, 171, 213, 35, 104, 253, 195, 6, 26, 34, 133, 33, 195, 81, 186, 111, + 9, 104, 86, 3, 91, 96, 195, 157, 107, 148, 90, 135, 110, 104, 222, 37, 183, 10, 221, 150, 27, 243, 127, + 69, 191, 126, 127, 208, 65, 198, 50, 24, 49, 134, 131, 4, 241, 87, 75, 44, 248, 19, 86, 215, 200, 130, + 20, 205, 86, 197, 156, 8, 142, 109, 220, 75, 195, 157, 73, 201, 202, 80, 59, 242, 9, 8, 131, 153, 2, + 245, 76, 38, 195, 197, 157, 118, 50, 246, 44, 254, 33, 65, 31, 216, 187, 155, 35, 28, 238, 147, 175, 122, + 246, 104, 188, 192, 81, 55, 138, 165, 228, 126, 240, 88, 99, 22, 58, 133, 150, 59, 141, 35, 243, 66, 198, + 245, 111, 229, 113, 246, 248, 48, 155, 231, 170, 214, 76, 21, 208, 105, 140, 141, 93, 23, 187, 73, 140, 201, + 46, 254, 174, 232, 206, 80, 71, 35, 91, 217, 143, 167, 83, 89, 151, 233, 29, 170, 211, 87, 40, 98, 62, + 161, 237, 32, 236, 65, 48, 27, 162, 180, 7, 1, + }; + uint8_t ret[2 * 1 * 1 * 32 * 32] = { + 252, 93, 197, 26, 218, 156, 186, 88, 248, 251, 232, 100, 3, 79, 56, 127, 142, 20, 63, 109, 145, 151, 206, + 52, 164, 20, 233, 117, 57, 218, 169, 158, 174, 118, 118, 54, 99, 37, 194, 209, 116, 38, 26, 72, 110, 60, + 215, 102, 169, 202, 118, 70, 238, 238, 179, 241, 137, 125, 154, 116, 59, 231, 154, 208, 242, 187, 206, 179, 99, + 254, 40, 52, 124, 4, 217, 111, 26, 184, 153, 59, 20, 85, 150, 0, 15, 25, 144, 134, 116, 54, 184, 97, + 189, 140, 40, 188, 144, 116, 64, 65, 207, 244, 139, 211, 202, 0, 170, 75, 219, 246, 93, 99, 60, 28, 46, + 78, 73, 205, 79, 250, 95, 198, 169, 79, 40, 253, 247, 101, 114, 170, 105, 148, 245, 237, 157, 46, 248, 212, + 157, 228, 116, 56, 43, 245, 181, 147, 6, 168, 217, 141, 196, 48, 37, 93, 234, 154, 189, 64, 16, 85, 184, + 113, 154, 125, 54, 84, 21, 171, 245, 63, 96, 194, 58, 176, 75, 64, 161, 59, 249, 160, 240, 52, 253, 122, + 185, 123, 42, 133, 139, 98, 191, 162, 71, 157, 143, 60, 233, 20, 58, 0, 171, 253, 34, 159, 65, 35, 247, + 107, 17, 78, 88, 182, 193, 66, 119, 177, 143, 231, 15, 35, 193, 137, 96, 175, 36, 59, 36, 62, 167, 196, + 67, 44, 172, 218, 159, 122, 94, 72, 54, 65, 23, 69, 244, 219, 117, 180, 72, 162, 88, 118, 24, 202, 93, + 67, 55, 86, 93, 122, 15, 92, 151, 28, 95, 62, 246, 203, 4, 23, 92, 175, 153, 31, 59, 142, 6, 199, + 198, 214, 78, 20, 51, 228, 193, 246, 145, 144, 129, 31, 150, 151, 206, 13, 5, 2, 78, 161, 233, 50, 217, + 254, 211, 0, 155, 182, 74, 98, 64, 126, 218, 1, 18, 202, 146, 176, 74, 127, 45, 192, 101, 153, 122, 101, + 88, 46, 128, 221, 86, 74, 119, 185, 213, 91, 167, 69, 211, 129, 73, 114, 250, 216, 52, 250, 184, 149, 69, + 147, 205, 82, 115, 246, 98, 75, 139, 102, 125, 26, 200, 205, 191, 197, 70, 2, 11, 244, 192, 34, 88, 187, + 101, 238, 227, 150, 124, 215, 145, 91, 102, 212, 17, 117, 134, 71, 111, 129, 39, 253, 35, 241, 213, 18, 129, + 239, 216, 181, 170, 103, 123, 226, 142, 24, 128, 169, 160, 56, 219, 6, 62, 251, 167, 229, 31, 99, 177, 139, + 137, 232, 135, 59, 25, 197, 128, 62, 208, 8, 240, 131, 64, 147, 98, 64, 24, 72, 213, 145, 148, 164, 81, + 180, 185, 155, 40, 72, 39, 69, 111, 208, 121, 212, 57, 143, 36, 203, 77, 19, 167, 154, 190, 123, 52, 191, + 7, 105, 154, 142, 152, 59, 58, 101, 28, 195, 223, 215, 209, 127, 218, 121, 217, 46, 200, 192, 197, 188, 134, + 159, 49, 75, 67, 247, 75, 96, 50, 163, 109, 147, 147, 38, 120, 2, 129, 171, 32, 198, 127, 203, 196, 111, + 81, 253, 90, 80, 125, 234, 17, 205, 216, 55, 140, 227, 131, 180, 0, 208, 36, 85, 16, 69, 163, 58, 177, + 136, 109, 243, 160, 64, 76, 181, 182, 35, 182, 118, 243, 93, 4, 176, 208, 61, 193, 171, 105, 33, 89, 81, + 72, 220, 94, 153, 36, 96, 223, 47, 198, 150, 21, 254, 130, 10, 221, 81, 206, 47, 119, 151, 243, 123, 206, + 94, 164, 130, 152, 147, 226, 135, 109, 249, 147, 216, 225, 1, 46, 158, 26, 137, 71, 10, 127, 124, 172, 50, + 247, 82, 11, 8, 252, 179, 23, 107, 80, 114, 168, 194, 23, 95, 86, 93, 177, 71, 82, 180, 161, 95, 58, + 146, 154, 219, 84, 101, 214, 56, 125, 76, 89, 172, 244, 203, 242, 104, 77, 23, 6, 229, 223, 172, 146, 184, + 68, 245, 12, 117, 171, 221, 170, 201, 201, 108, 228, 119, 221, 89, 107, 202, 41, 162, 168, 157, 139, 225, 178, + 179, 163, 53, 64, 37, 75, 226, 249, 197, 40, 184, 132, 153, 126, 158, 233, 170, 198, 142, 132, 205, 40, 18, + 128, 10, 246, 240, 156, 109, 233, 132, 6, 145, 184, 166, 64, 19, 187, 31, 60, 37, 241, 61, 108, 1, 70, + 78, 149, 134, 91, 2, 169, 181, 72, 79, 218, 240, 69, 45, 168, 162, 123, 106, 77, 252, 91, 66, 38, 111, + 132, 249, 213, 254, 155, 113, 74, 30, 187, 123, 210, 94, 106, 47, 155, 85, 208, 220, 187, 213, 173, 50, 163, + 77, 110, 52, 59, 135, 138, 153, 93, 61, 18, 150, 50, 104, 95, 155, 115, 203, 90, 13, 250, 94, 65, 112, + 198, 136, 81, 215, 167, 103, 145, 116, 245, 213, 12, 78, 144, 43, 133, 4, 35, 21, 157, 149, 186, 118, 131, + 48, 145, 16, 149, 0, 128, 135, 250, 134, 207, 114, 211, 3, 178, 184, 170, 216, 233, 237, 182, 153, 132, 24, + 36, 117, 217, 242, 187, 109, 12, 79, 196, 124, 200, 138, 160, 43, 242, 55, 157, 185, 252, 40, 46, 232, 112, + 137, 166, 61, 5, 67, 136, 32, 212, 172, 55, 58, 133, 72, 248, 101, 19, 65, 208, 134, 66, 76, 63, 178, + 207, 133, 20, 191, 48, 31, 116, 67, 233, 116, 119, 216, 17, 137, 201, 219, 181, 161, 114, 128, 41, 127, 0, + 226, 87, 43, 241, 2, 67, 136, 230, 104, 43, 178, 23, 155, 74, 139, 175, 226, 121, 135, 190, 227, 179, 148, + 188, 133, 209, 32, 230, 128, 158, 248, 146, 131, 226, 37, 168, 228, 48, 78, 112, 81, 137, 99, 0, 11, 184, + 126, 87, 179, 71, 24, 143, 200, 217, 48, 219, 152, 14, 204, 150, 156, 239, 220, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59, 111, 23, 167, 180, 220, 191, 128, 50, 82, 173, + 69, 42, 215, 95, 171, 114, 210, 15, 149, 155, 35, 250, 250, 153, 208, 64, 191, 231, 232, 238, 153, 219, 188, + 23, 82, 252, 1, 50, 33, 38, 204, 169, 118, 30, 129, 33, 130, 239, 208, 153, 221, 94, 212, 87, 127, 156, + 109, 151, 176, 168, 41, 168, 46, 31, 228, 81, 201, 154, 113, 190, 201, 254, 169, 113, 205, 43, 157, 96, 138, + 118, 181, 233, 40, 173, 120, 198, 64, 96, 162, 175, 1, 62, 84, 231, 29, 29, 204, 201, 141, 253, 149, 186, + 176, 118, 197, 18, 131, 78, 198, 98, 80, 203, 133, 22, 34, 201, 120, 73, 93, 223, 128, 222, 213, 218, 66, + 252, 130, 12, 221, 124, 102, 80, 150, 57, 238, 38, 136, 183, 96, 8, 227, 52, 235, 62, 197, 12, 62, 25, + 78, 222, 91, 193, 140, 86, 210, 56, 187, 34, 222, 25, 0, 182, 117, 246, 232, 7, 79, 70, 47, 60, 233, + 194, 203, 13, 186, 15, 115, 17, 246, 130, 212, 105, 115, 142, 151, 7, 135, 115, 51, 241, 67, 228, 61, 204, + 208, 95, 149, 164, 146, 145, 161, 186, 108, 180, 50, 166, 70, 143, 95, 108, 114, 28, 127, 19, 69, 161, 156, + 86, 150, 147, 72, 97, 238, 172, 62, 216, 98, 5, 96, 57, 216, 204, 159, 43, 82, 104, 233, 56, 2, 55, + 148, 2, 80, 146, 132, 62, 76, 229, 59, 212, 236, 227, 1, 3, 217, 23, 36, 238, 155, 157, 254, 175, 45, + 228, 185, 102, 199, 43, 28, 72, 228, 171, 137, 9, 113, 215, 69, 26, 62, 138, 10, 10, 117, 9, 145, 125, + 216, 195, 111, 110, 154, 118, 36, 159, 155, 165, 198, 70, 10, 254, 245, 218, 93, 53, 105, 188, 86, 140, 204, + 9, 150, 234, 126, 157, 62, 41, 3, 22, 80, 190, 15, 81, 41, 69, 42, 124, 87, 21, 54, 119, 209, 48, + 215, 153, 94, 210, 139, 165, 171, 153, 70, 123, 51, 151, 0, 12, 222, 191, 144, 42, 5, 13, 71, 172, 73, + 92, 120, 213, 226, 46, 161, 63, 136, 2, 205, 123, 67, 112, 206, 176, 21, 28, 4, 29, 89, 45, 246, 65, + 204, 178, 175, 48, 20, 167, 54, 146, 37, 206, 91, 110, 195, 9, 179, 16, 69, 15, 209, 100, 60, 217, 178, + 159, 9, 25, 11, 154, 215, 138, 214, 248, 249, 217, 146, 123, 100, 50, 33, 86, 197, 115, 47, 30, 169, 162, + 121, 92, 21, 21, 140, 113, 28, 100, 153, 211, 106, 68, 21, 56, 54, 135, 155, 192, 105, 192, 97, 132, 105, + 132, 191, 244, 82, 141, 213, 97, 122, 62, 204, 112, 164, 24, 26, 11, 171, 173, 114, 233, 204, 125, 105, 36, + 123, 236, 221, 115, 7, 221, 4, 34, 69, 183, 99, 63, 135, 245, 14, 90, 236, 239, 201, 84, 167, 250, 83, + 254, 58, 4, 56, 253, 83, 53, 25, 234, 30, 122, 85, 242, 51, 113, 241, 242, 25, 8, 8, 70, 151, 252, + 84, 86, 27, 102, 111, 61, 235, 131, 171, 182, 48, 179, 75, 18, 168, 168, 190, 33, 21, 156, 215, 232, 103, + 10, 142, 199, 201, 118, 144, 163, 215, 146, 88, 181, 12, 63, 229, 61, 203, 120, 149, 158, 149, 66, 88, 4, + 116, 188, 126, 80, 76, 69, 70, 213, 230, 17, 43, 124, 165, 215, 38, 15, 124, 124, 191, 95, 125, 228, 74, + 234, 103, 244, 34, 196, 24, 70, 246, 248, 54, 126, 198, 229, 180, 217, 95, 148, 208, 187, 119, 181, 239, 194, + 206, 245, 16, 253, 20, 219, 9, 18, 59, 136, 233, 138, 145, 88, 162, 143, 210, 111, 226, 176, 172, 159, 248, + 2, 30, 31, 9, 229, 146, 142, 86, 240, 77, 102, 156, 225, 102, 114, 78, 19, 245, 237, 137, 133, 66, 110, + 204, 100, 3, 8, 161, 164, 38, 14, 21, 203, 152, 212, 129, 199, 189, 188, 13, 248, 223, 4, 26, 124, 253, + 239, 174, 84, 42, 43, 117, 113, 229, 120, 113, 164, 100, 4, 146, 49, 68, 4, 87, 102, 53, 43, 101, 86, + 148, 195, 21, 16, 88, 213, 122, 239, 97, 210, 223, 83, 237, 124, 104, 4, 184, 24, 193, 40, 108, 222, 254, + 103, 2, 134, 240, 221, 68, 72, 76, 247, 119, 238, 102, 9, 6, 43, 143, 151, 243, 34, 10, 254, 107, 211, + 233, 100, 234, 103, 31, 218, 170, 219, 242, 250, 186, 196, 208, 37, 107, 182, 62, 137, 248, 13, 241, 230, 104, + 40, 138, 231, 171, 213, 35, 104, 253, 195, 6, 26, 34, 133, 33, 195, 81, 186, 111, 9, 104, 86, 3, 91, + 96, 195, 157, 107, 148, 90, 135, 110, 104, 222, 37, 183, 10, 221, 150, 27, 243, 127, 69, 191, 126, 127, 208, + 65, 198, 50, 24, 49, 134, 131, 4, 241, 87, 75, 44, 248, 19, 86, 215, 200, 130, 20, 205, 86, 197, 156, + 8, 142, 109, 220, 75, 195, 157, 73, 201, 202, 80, 59, 242, 9, 8, 131, 153, 2, 245, 76, 38, 195, 197, + 157, 118, 50, 246, 44, 254, 33, 65, 31, 216, 187, 155, 35, 28, 238, 147, 175, 122, 246, 104, 188, 192, 81, + 55, 138, 165, 228, 126, 240, 88, 99, 22, 58, 133, 150, 59, 141, 35, 243, 66, 198, 245, 111, 229, 113, 246, + 248, 48, 155, 231, 170, 214, 76, 21, 208, 105, 140, 141, 93, 23, 187, 73, 140, 201, 46, 254, 174, 232, 206, + 80, 71, 35, 91, 217, 143, 167, 83, 89, 151, 233, 29, 170, 211, 87, 40, 98, 62, 161, 237, 32, 236, 65, + 48, 27, 162, 180, 7, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {2, 30, 32}, {2, 1, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {2, 1, 1, 32, 32}, {2, 30, 32}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape4_uint8_1) { + uint8_t data[2 * 3 * 4 * 32] = { + 148, 135, 243, 39, 238, 232, 237, 101, 186, 112, 180, 123, 109, 64, 123, 101, 47, 159, 63, 69, 188, 123, 197, + 110, 164, 105, 80, 57, 21, 7, 239, 181, 12, 43, 107, 213, 56, 66, 189, 132, 136, 184, 153, 167, 150, 48, + 184, 216, 88, 203, 221, 46, 47, 249, 106, 106, 123, 44, 186, 75, 132, 17, 102, 97, 173, 140, 125, 194, 47, + 176, 0, 89, 193, 206, 217, 203, 226, 25, 230, 233, 218, 100, 224, 90, 202, 18, 82, 61, 153, 204, 90, 76, + 173, 85, 153, 28, 10, 196, 150, 120, 179, 220, 115, 228, 143, 233, 80, 25, 40, 87, 171, 28, 54, 114, 52, + 18, 51, 153, 214, 214, 242, 103, 59, 173, 29, 172, 154, 97, 85, 136, 120, 131, 150, 96, 158, 90, 206, 105, + 90, 155, 117, 106, 3, 90, 184, 127, 248, 163, 147, 222, 112, 5, 154, 194, 82, 40, 210, 79, 188, 67, 168, + 19, 181, 109, 168, 95, 26, 149, 154, 70, 21, 198, 177, 81, 144, 137, 221, 140, 164, 126, 138, 242, 219, 8, + 111, 232, 145, 69, 180, 118, 49, 101, 240, 155, 141, 197, 241, 128, 214, 175, 126, 214, 167, 21, 30, 122, 159, + 180, 228, 180, 198, 31, 75, 56, 81, 213, 181, 112, 97, 60, 138, 29, 21, 173, 98, 184, 181, 39, 170, 151, + 143, 64, 189, 179, 124, 216, 178, 196, 206, 182, 239, 77, 240, 53, 109, 206, 7, 173, 120, 8, 166, 128, 65, + 180, 10, 36, 252, 144, 65, 62, 22, 149, 85, 133, 136, 149, 172, 14, 121, 112, 187, 87, 212, 171, 222, 205, + 23, 29, 123, 86, 54, 59, 166, 111, 250, 101, 165, 229, 20, 194, 106, 100, 144, 147, 78, 211, 203, 202, 73, + 29, 193, 207, 107, 209, 48, 159, 133, 203, 135, 86, 228, 158, 219, 22, 229, 194, 215, 58, 137, 79, 137, 198, + 224, 200, 140, 65, 88, 20, 108, 237, 21, 251, 199, 138, 222, 254, 53, 107, 154, 227, 25, 96, 72, 219, 96, + 224, 165, 22, 100, 89, 174, 32, 107, 209, 46, 39, 231, 163, 157, 184, 89, 243, 18, 123, 254, 153, 77, 174, + 6, 204, 140, 63, 172, 75, 69, 110, 150, 87, 12, 15, 180, 57, 193, 196, 193, 170, 34, 30, 87, 167, 25, + 219, 114, 179, 113, 79, 138, 199, 138, 244, 33, 125, 240, 201, 56, 93, 52, 222, 51, 57, 247, 73, 150, 234, + 47, 250, 39, 36, 63, 19, 236, 128, 206, 67, 29, 74, 124, 246, 154, 142, 41, 6, 44, 43, 165, 135, 94, + 249, 240, 122, 117, 160, 95, 186, 104, 158, 66, 133, 112, 15, 173, 212, 77, 231, 203, 253, 237, 210, 95, 203, + 63, 193, 93, 144, 14, 137, 131, 222, 110, 249, 195, 168, 238, 230, 44, 112, 216, 254, 83, 217, 142, 168, 109, + 161, 5, 4, 30, 40, 251, 144, 88, 140, 49, 127, 254, 254, 120, 174, 204, 210, 144, 93, 122, 185, 158, 64, + 99, 140, 119, 5, 15, 31, 100, 152, 202, 45, 228, 112, 1, 75, 147, 168, 111, 72, 185, 174, 100, 111, 242, + 67, 18, 226, 172, 132, 2, 112, 130, 82, 6, 32, 126, 81, 149, 254, 23, 177, 56, 51, 232, 183, 182, 12, + 162, 144, 150, 205, 40, 250, 17, 226, 36, 44, 166, 207, 47, 32, 205, 15, 37, 167, 129, 97, 55, 232, 99, + 122, 85, 19, 16, 218, 161, 38, 197, 133, 124, 85, 39, 176, 149, 247, 78, 163, 67, 139, 218, 57, 151, 60, + 242, 184, 115, 79, 198, 98, 176, 175, 130, 116, 118, 150, 245, 213, 228, 217, 56, 237, 214, 196, 197, 223, 243, + 71, 209, 159, 153, 199, 48, 168, 195, 107, 2, 4, 24, 67, 201, 65, 216, 135, 89, 57, 151, 28, 225, 154, + 252, 153, 157, 60, 46, 126, 134, 212, 237, 233, 176, 2, 131, 111, 124, 245, 216, 209, 23, 244, 228, 190, 140, + 155, 102, 121, 113, 103, 130, 154, 189, 119, 205, 89, 169, 58, 195, 15, 11, 107, 253, 214, 201, 201, 209, 199, + 52, 214, 46, 97, 98, 21, 46, 61, 188, 200, 138, 200, 125, 154, 123, 98, 97, 121, 56, 201, 47, 134, 35, + 181, 164, 61, 126, 152, 29, 122, 16, 119, 70, 181, 142, 217, 117, 34, 144, 224, 26, 152, 58, 31, 138, 5, + 89, 130, 172, 158, 199, 87, 175, 220, 185, 27, 170, 126, 34, 163, 14, 130, 158, 175, 249, 87, 190, 139, 200, + 41, 222, 126, 200, 147, 14, 32, 193, 211, + }; + uint8_t ret[2 * 3 * 1 * 1 * 32 * 32] = { + 148, 135, 243, 39, 238, 232, 237, 101, 186, 112, 180, 123, 109, 64, 123, 101, 47, 159, 63, 69, 188, 123, 197, + 110, 164, 105, 80, 57, 21, 7, 239, 181, 12, 43, 107, 213, 56, 66, 189, 132, 136, 184, 153, 167, 150, 48, + 184, 216, 88, 203, 221, 46, 47, 249, 106, 106, 123, 44, 186, 75, 132, 17, 102, 97, 173, 140, 125, 194, 47, + 176, 0, 89, 193, 206, 217, 203, 226, 25, 230, 233, 218, 100, 224, 90, 202, 18, 82, 61, 153, 204, 90, 76, + 173, 85, 153, 28, 10, 196, 150, 120, 179, 220, 115, 228, 143, 233, 80, 25, 40, 87, 171, 28, 54, 114, 52, + 18, 51, 153, 214, 214, 242, 103, 59, 173, 29, 172, 154, 97, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 85, 136, 120, 131, 150, 96, 158, 90, 206, 105, 90, + 155, 117, 106, 3, 90, 184, 127, 248, 163, 147, 222, 112, 5, 154, 194, 82, 40, 210, 79, 188, 67, 168, 19, + 181, 109, 168, 95, 26, 149, 154, 70, 21, 198, 177, 81, 144, 137, 221, 140, 164, 126, 138, 242, 219, 8, 111, + 232, 145, 69, 180, 118, 49, 101, 240, 155, 141, 197, 241, 128, 214, 175, 126, 214, 167, 21, 30, 122, 159, 180, + 228, 180, 198, 31, 75, 56, 81, 213, 181, 112, 97, 60, 138, 29, 21, 173, 98, 184, 181, 39, 170, 151, 143, + 64, 189, 179, 124, 216, 178, 196, 206, 182, 239, 77, 240, 53, 109, 206, 7, 173, 120, 8, 166, 128, 65, 180, + 10, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 252, 144, 65, 62, 22, 149, 85, 133, 136, 149, 172, 14, 121, 112, 187, 87, 212, 171, 222, 205, 23, 29, + 123, 86, 54, 59, 166, 111, 250, 101, 165, 229, 20, 194, 106, 100, 144, 147, 78, 211, 203, 202, 73, 29, 193, + 207, 107, 209, 48, 159, 133, 203, 135, 86, 228, 158, 219, 22, 229, 194, 215, 58, 137, 79, 137, 198, 224, 200, + 140, 65, 88, 20, 108, 237, 21, 251, 199, 138, 222, 254, 53, 107, 154, 227, 25, 96, 72, 219, 96, 224, 165, + 22, 100, 89, 174, 32, 107, 209, 46, 39, 231, 163, 157, 184, 89, 243, 18, 123, 254, 153, 77, 174, 6, 204, + 140, 63, 172, 75, 69, 110, 150, 87, 12, 15, 180, 57, 193, 196, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 193, 170, 34, 30, 87, 167, 25, 219, 114, 179, + 113, 79, 138, 199, 138, 244, 33, 125, 240, 201, 56, 93, 52, 222, 51, 57, 247, 73, 150, 234, 47, 250, 39, + 36, 63, 19, 236, 128, 206, 67, 29, 74, 124, 246, 154, 142, 41, 6, 44, 43, 165, 135, 94, 249, 240, 122, + 117, 160, 95, 186, 104, 158, 66, 133, 112, 15, 173, 212, 77, 231, 203, 253, 237, 210, 95, 203, 63, 193, 93, + 144, 14, 137, 131, 222, 110, 249, 195, 168, 238, 230, 44, 112, 216, 254, 83, 217, 142, 168, 109, 161, 5, 4, + 30, 40, 251, 144, 88, 140, 49, 127, 254, 254, 120, 174, 204, 210, 144, 93, 122, 185, 158, 64, 99, 140, 119, + 5, 15, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 100, 152, 202, 45, 228, 112, 1, 75, 147, 168, 111, 72, 185, 174, 100, 111, 242, 67, 18, 226, 172, + 132, 2, 112, 130, 82, 6, 32, 126, 81, 149, 254, 23, 177, 56, 51, 232, 183, 182, 12, 162, 144, 150, 205, + 40, 250, 17, 226, 36, 44, 166, 207, 47, 32, 205, 15, 37, 167, 129, 97, 55, 232, 99, 122, 85, 19, 16, + 218, 161, 38, 197, 133, 124, 85, 39, 176, 149, 247, 78, 163, 67, 139, 218, 57, 151, 60, 242, 184, 115, 79, + 198, 98, 176, 175, 130, 116, 118, 150, 245, 213, 228, 217, 56, 237, 214, 196, 197, 223, 243, 71, 209, 159, 153, + 199, 48, 168, 195, 107, 2, 4, 24, 67, 201, 65, 216, 135, 89, 57, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 151, 28, 225, 154, 252, 153, 157, 60, 46, + 126, 134, 212, 237, 233, 176, 2, 131, 111, 124, 245, 216, 209, 23, 244, 228, 190, 140, 155, 102, 121, 113, 103, + 130, 154, 189, 119, 205, 89, 169, 58, 195, 15, 11, 107, 253, 214, 201, 201, 209, 199, 52, 214, 46, 97, 98, + 21, 46, 61, 188, 200, 138, 200, 125, 154, 123, 98, 97, 121, 56, 201, 47, 134, 35, 181, 164, 61, 126, 152, + 29, 122, 16, 119, 70, 181, 142, 217, 117, 34, 144, 224, 26, 152, 58, 31, 138, 5, 89, 130, 172, 158, 199, + 87, 175, 220, 185, 27, 170, 126, 34, 163, 14, 130, 158, 175, 249, 87, 190, 139, 200, 41, 222, 126, 200, 147, + 14, 32, 193, 211, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {2, 3, 4, 32}, {2, 3, 1, 1, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {2, 3, 1, 1, 32, 32}, {2, 3, 4, 32}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape4_uint8_2) { + uint8_t data[2 * 3 * 4 * 33] = { + 18, 158, 60, 43, 108, 88, 198, 190, 25, 94, 196, 98, 41, 45, 64, 161, 136, 109, 60, 6, 165, 120, 9, + 65, 99, 106, 141, 0, 119, 100, 60, 35, 111, 178, 104, 158, 221, 115, 103, 85, 223, 231, 149, 174, 238, 120, + 35, 53, 53, 134, 89, 253, 123, 19, 75, 78, 174, 231, 196, 87, 74, 134, 69, 230, 237, 12, 162, 210, 182, + 194, 10, 102, 131, 240, 110, 69, 0, 98, 112, 180, 39, 8, 109, 57, 163, 136, 177, 133, 223, 146, 128, 128, + 173, 100, 83, 226, 16, 17, 46, 196, 116, 165, 65, 66, 193, 99, 214, 47, 199, 226, 233, 7, 250, 215, 133, + 104, 29, 20, 54, 1, 42, 231, 49, 48, 116, 214, 52, 11, 165, 164, 168, 230, 26, 135, 110, 134, 140, 125, + 214, 204, 231, 57, 193, 44, 101, 245, 81, 29, 84, 76, 60, 234, 59, 53, 138, 132, 44, 238, 22, 18, 80, + 72, 84, 178, 222, 53, 44, 87, 19, 196, 112, 58, 237, 201, 203, 59, 218, 75, 105, 234, 95, 175, 36, 64, + 96, 12, 74, 56, 7, 171, 235, 138, 60, 243, 226, 242, 61, 116, 164, 185, 52, 82, 154, 160, 16, 47, 23, + 214, 173, 82, 144, 210, 144, 150, 38, 236, 51, 49, 205, 79, 221, 180, 216, 196, 233, 149, 81, 68, 51, 233, + 94, 219, 175, 24, 207, 205, 130, 108, 213, 31, 128, 41, 142, 7, 82, 133, 207, 9, 224, 128, 214, 24, 76, + 172, 228, 139, 33, 168, 9, 127, 187, 196, 121, 78, 201, 240, 18, 157, 203, 244, 157, 38, 246, 231, 94, 29, + 251, 18, 186, 33, 108, 212, 212, 100, 95, 16, 69, 163, 36, 155, 178, 68, 102, 8, 174, 123, 101, 88, 51, + 112, 237, 39, 50, 237, 64, 136, 5, 144, 183, 11, 81, 115, 248, 165, 196, 91, 3, 185, 126, 38, 25, 102, + 203, 238, 174, 156, 36, 53, 90, 168, 240, 77, 145, 20, 100, 197, 157, 183, 91, 116, 165, 209, 142, 212, 211, + 150, 20, 20, 38, 149, 220, 218, 171, 134, 18, 184, 105, 9, 192, 243, 140, 134, 111, 55, 161, 236, 211, 115, + 58, 159, 114, 211, 53, 234, 82, 38, 38, 112, 227, 159, 210, 129, 61, 249, 64, 31, 136, 110, 254, 226, 10, + 223, 223, 241, 192, 160, 231, 207, 151, 35, 41, 244, 154, 129, 127, 32, 173, 113, 111, 42, 221, 73, 141, 72, + 196, 90, 41, 115, 220, 207, 123, 89, 210, 119, 133, 5, 3, 70, 71, 0, 88, 134, 182, 152, 12, 223, 179, + 101, 100, 92, 195, 190, 253, 230, 218, 215, 164, 92, 50, 191, 50, 152, 171, 226, 207, 74, 154, 213, 139, 26, + 60, 59, 121, 177, 229, 139, 52, 203, 111, 120, 53, 109, 35, 177, 59, 254, 188, 185, 189, 81, 233, 223, 164, + 26, 143, 125, 65, 41, 243, 231, 96, 174, 157, 30, 106, 78, 185, 252, 11, 98, 19, 34, 119, 34, 240, 141, + 39, 189, 48, 149, 166, 176, 2, 142, 185, 231, 239, 49, 188, 234, 77, 213, 99, 249, 169, 165, 153, 100, 249, + 161, 9, 101, 41, 141, 182, 162, 241, 58, 129, 177, 37, 233, 97, 163, 43, 24, 254, 34, 49, 165, 94, 36, + 47, 19, 32, 85, 162, 190, 153, 183, 135, 204, 91, 79, 45, 72, 26, 19, 197, 179, 246, 241, 27, 141, 177, + 9, 8, 140, 112, 212, 5, 197, 86, 2, 170, 125, 179, 138, 240, 204, 99, 59, 190, 65, 113, 181, 166, 159, + 40, 154, 81, 12, 141, 122, 185, 220, 203, 22, 82, 105, 146, 52, 118, 16, 10, 99, 183, 103, 18, 170, 142, + 204, 49, 73, 250, 61, 169, 243, 85, 203, 17, 170, 133, 171, 179, 237, 163, 131, 150, 71, 171, 120, 213, 3, + 147, 138, 223, 171, 235, 51, 5, 155, 19, 241, 253, 152, 19, 44, 250, 110, 244, 211, 170, 226, 44, 181, 208, + 68, 143, 94, 85, 229, 211, 16, 23, 219, 94, 35, 159, 63, 208, 207, 7, 189, 254, 207, 38, 6, 105, 92, + 104, 73, 156, 219, 191, 108, 99, 60, 49, 42, 4, 245, 158, 239, 131, 42, 206, 187, 214, 73, 138, 59, 85, + 54, 100, 38, 133, 182, 157, 187, 150, 4, 78, 120, 22, 19, 119, 91, 52, 156, 127, 9, 93, 6, 172, 209, + 114, 249, 190, 67, 235, 169, 133, 125, 66, 214, 234, 146, 219, 149, 193, 150, 35, 56, 98, 38, 233, 125, 88, + 89, 73, 146, 230, 47, 159, 52, 31, 23, 164, 103, 250, 228, 57, 25, 2, 219, 56, 122, 58, 190, 232, 120, + 205, 35, 25, 76, 182, 69, 137, 27, 5, 12, + }; + uint8_t ret[2 * 3 * 1 * 2 * 32 * 32] = { + 18, 158, 60, 43, 108, 88, 198, 190, 25, 94, 196, 98, 41, 45, 64, 161, 136, 109, 60, 6, 165, 120, 9, + 65, 99, 106, 141, 0, 119, 100, 60, 35, 178, 104, 158, 221, 115, 103, 85, 223, 231, 149, 174, 238, 120, 35, + 53, 53, 134, 89, 253, 123, 19, 75, 78, 174, 231, 196, 87, 74, 134, 69, 230, 237, 162, 210, 182, 194, 10, + 102, 131, 240, 110, 69, 0, 98, 112, 180, 39, 8, 109, 57, 163, 136, 177, 133, 223, 146, 128, 128, 173, 100, + 83, 226, 16, 17, 196, 116, 165, 65, 66, 193, 99, 214, 47, 199, 226, 233, 7, 250, 215, 133, 104, 29, 20, + 54, 1, 42, 231, 49, 48, 116, 214, 52, 11, 165, 164, 168, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 46, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 230, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 26, 135, 110, 134, 140, 125, 214, 204, 231, 57, 193, 44, 101, 245, 81, 29, 84, 76, 60, 234, 59, 53, + 138, 132, 44, 238, 22, 18, 80, 72, 84, 178, 53, 44, 87, 19, 196, 112, 58, 237, 201, 203, 59, 218, 75, + 105, 234, 95, 175, 36, 64, 96, 12, 74, 56, 7, 171, 235, 138, 60, 243, 226, 242, 61, 164, 185, 52, 82, + 154, 160, 16, 47, 23, 214, 173, 82, 144, 210, 144, 150, 38, 236, 51, 49, 205, 79, 221, 180, 216, 196, 233, + 149, 81, 68, 51, 233, 219, 175, 24, 207, 205, 130, 108, 213, 31, 128, 41, 142, 7, 82, 133, 207, 9, 224, + 128, 214, 24, 76, 172, 228, 139, 33, 168, 9, 127, 187, 196, 121, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 222, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 94, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 78, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 201, 240, 18, 157, 203, 244, 157, 38, 246, 231, 94, 29, 251, 18, 186, 33, 108, 212, 212, 100, 95, + 16, 69, 163, 36, 155, 178, 68, 102, 8, 174, 123, 88, 51, 112, 237, 39, 50, 237, 64, 136, 5, 144, 183, + 11, 81, 115, 248, 165, 196, 91, 3, 185, 126, 38, 25, 102, 203, 238, 174, 156, 36, 53, 90, 240, 77, 145, + 20, 100, 197, 157, 183, 91, 116, 165, 209, 142, 212, 211, 150, 20, 20, 38, 149, 220, 218, 171, 134, 18, 184, + 105, 9, 192, 243, 140, 134, 55, 161, 236, 211, 115, 58, 159, 114, 211, 53, 234, 82, 38, 38, 112, 227, 159, + 210, 129, 61, 249, 64, 31, 136, 110, 254, 226, 10, 223, 223, 241, 192, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 101, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 168, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 160, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 231, 207, 151, 35, 41, 244, 154, 129, 127, 32, 173, 113, 111, 42, 221, 73, 141, 72, 196, 90, + 41, 115, 220, 207, 123, 89, 210, 119, 133, 5, 3, 70, 0, 88, 134, 182, 152, 12, 223, 179, 101, 100, 92, + 195, 190, 253, 230, 218, 215, 164, 92, 50, 191, 50, 152, 171, 226, 207, 74, 154, 213, 139, 26, 60, 121, 177, + 229, 139, 52, 203, 111, 120, 53, 109, 35, 177, 59, 254, 188, 185, 189, 81, 233, 223, 164, 26, 143, 125, 65, + 41, 243, 231, 96, 174, 157, 30, 78, 185, 252, 11, 98, 19, 34, 119, 34, 240, 141, 39, 189, 48, 149, 166, + 176, 2, 142, 185, 231, 239, 49, 188, 234, 77, 213, 99, 249, 169, 165, 153, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 71, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 59, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 249, 161, 9, 101, 41, 141, 182, 162, 241, 58, 129, 177, 37, 233, 97, 163, 43, 24, 254, + 34, 49, 165, 94, 36, 47, 19, 32, 85, 162, 190, 153, 183, 204, 91, 79, 45, 72, 26, 19, 197, 179, 246, + 241, 27, 141, 177, 9, 8, 140, 112, 212, 5, 197, 86, 2, 170, 125, 179, 138, 240, 204, 99, 59, 190, 113, + 181, 166, 159, 40, 154, 81, 12, 141, 122, 185, 220, 203, 22, 82, 105, 146, 52, 118, 16, 10, 99, 183, 103, + 18, 170, 142, 204, 49, 73, 250, 61, 243, 85, 203, 17, 170, 133, 171, 179, 237, 163, 131, 150, 71, 171, 120, + 213, 3, 147, 138, 223, 171, 235, 51, 5, 155, 19, 241, 253, 152, 19, 44, 250, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 135, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 65, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 110, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 244, 211, 170, 226, 44, 181, 208, 68, 143, 94, 85, 229, 211, 16, 23, 219, 94, 35, + 159, 63, 208, 207, 7, 189, 254, 207, 38, 6, 105, 92, 104, 73, 219, 191, 108, 99, 60, 49, 42, 4, 245, + 158, 239, 131, 42, 206, 187, 214, 73, 138, 59, 85, 54, 100, 38, 133, 182, 157, 187, 150, 4, 78, 120, 22, + 119, 91, 52, 156, 127, 9, 93, 6, 172, 209, 114, 249, 190, 67, 235, 169, 133, 125, 66, 214, 234, 146, 219, + 149, 193, 150, 35, 56, 98, 38, 233, 125, 89, 73, 146, 230, 47, 159, 52, 31, 23, 164, 103, 250, 228, 57, + 25, 2, 219, 56, 122, 58, 190, 232, 120, 205, 35, 25, 76, 182, 69, 137, 27, 5, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 156, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 88, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {2, 3, 4, 33}, {2, 3, 1, 2, 32, 32}, DT_UINT8}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {2, 3, 1, 2, 32, 32}, {2, 3, 4, 33}, DT_UINT8}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0])); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape1_fp16_1) { + uint16_t data[1] = {14805}; + uint16_t ret[1 * 1 * 16 * 16] = { + 14805, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {1}, {1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {1, 1, 16, 16}, {1}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape1_fp16_2) { + uint16_t data[15] = { + 13976, 13979, 13424, 13692, 14268, 13703, 14220, 13135, 13693, 13965, 12067, 14261, 14716, 14975, 13763, + }; + + uint16_t ret[1 * 1 * 16 * 16] = { + 13976, 13979, 13424, 13692, 14268, 13703, 14220, 13135, 13693, 13965, 12067, 14261, 14716, 14975, 13763, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {15}, {1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {1, 1, 16, 16}, {15}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape2_fp16_lt_cube) { + uint16_t data[1 * 16] = { + 15300, 13152, 14880, 15296, 14710, 13100, 13380, 15207, 15273, 13920, 14607, 13777, 14943, 14517, 15138, 15198, + }; + uint16_t ret[1 * 1 * 16 * 16] = { + 15300, 13152, 14880, 15296, 14710, 13100, 13380, 15207, 15273, 13920, 14607, 13777, 14943, 14517, 15138, 15198, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {1, 16}, {1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {1, 1, 16, 16}, {1, 16}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape2_fp16_bt_cube) { + uint16_t data[15 * 17] = { + 13136, 13938, 13692, 14295, 15066, 15291, 14157, 14975, 14355, 15311, 12473, 13320, 13125, 13649, 13016, 15193, + 11689, 13992, 13147, 15051, 14496, 11768, 15080, 14341, 14215, 12872, 12580, 14679, 14670, 15058, 11421, 12526, + 14768, 14792, 13796, 14983, 15175, 14399, 14858, 12314, 11303, 15151, 14493, 15153, 14328, 13337, 11597, 14341, + 14482, 14325, 13750, 12838, 12433, 13391, 14811, 15346, 13415, 14518, 13790, 14547, 10431, 14110, 15298, 14310, + 12360, 12432, 13530, 14788, 11648, 13664, 14697, 14219, 13649, 14621, 14898, 14588, 13875, 14302, 14599, 14620, + 14884, 14977, 14728, 14506, 14855, 15326, 13580, 14575, 14426, 15284, 12298, 13859, 13743, 14833, 15096, 10690, + 13315, 13237, 13799, 14673, 13948, 14728, 13372, 15186, 15013, 14558, 15144, 14004, 14390, 14773, 14941, 15133, + 14678, 13959, 12531, 13905, 12597, 14344, 14513, 11837, 13348, 14198, 14936, 15120, 13568, 12183, 13921, 14910, + 13254, 11297, 14298, 14729, 12965, 13326, 15005, 13947, 15095, 12504, 14994, 15355, 14176, 12754, 13388, 14572, + 13598, 15154, 14542, 2456, 13328, 14108, 12614, 14357, 14524, 14887, 14628, 14856, 14437, 14022, 14686, 10009, + 12263, 14832, 14516, 14331, 14472, 14569, 12931, 14562, 14404, 14582, 14079, 14079, 11643, 12625, 13684, 12119, + 14728, 14379, 14513, 14384, 14791, 7282, 10282, 14394, 14584, 12495, 15004, 8552, 13711, 15079, 14360, 12401, + 14452, 12655, 14601, 11849, 14306, 13585, 10584, 14535, 11525, 14830, 13127, 13605, 13200, 14650, 13011, 12334, + 14387, 14501, 12816, 14917, 13389, 14660, 14716, 13227, 13387, 14959, 13128, 7764, 13873, 13452, 14378, 13317, + 14482, 13781, 11235, 12167, 15153, 14285, 13952, 14930, 15093, 14145, 14392, 15038, 14442, 13980, 14247, 12048, + 14542, 15012, 12545, 13550, 9689, 14928, 13519, 14253, 15310, 4717, 14838, 15210, 14477, 14316, 12144, + }; + uint16_t ret[1 * 2 * 16 * 16] = { + 13136, 13938, 13692, 14295, 15066, 15291, 14157, 14975, 14355, 15311, 12473, 13320, 13125, 13649, 13016, 15193, + 13992, 13147, 15051, 14496, 11768, 15080, 14341, 14215, 12872, 12580, 14679, 14670, 15058, 11421, 12526, 14768, + 13796, 14983, 15175, 14399, 14858, 12314, 11303, 15151, 14493, 15153, 14328, 13337, 11597, 14341, 14482, 14325, + 12838, 12433, 13391, 14811, 15346, 13415, 14518, 13790, 14547, 10431, 14110, 15298, 14310, 12360, 12432, 13530, + 11648, 13664, 14697, 14219, 13649, 14621, 14898, 14588, 13875, 14302, 14599, 14620, 14884, 14977, 14728, 14506, + 15326, 13580, 14575, 14426, 15284, 12298, 13859, 13743, 14833, 15096, 10690, 13315, 13237, 13799, 14673, 13948, + 13372, 15186, 15013, 14558, 15144, 14004, 14390, 14773, 14941, 15133, 14678, 13959, 12531, 13905, 12597, 14344, + 11837, 13348, 14198, 14936, 15120, 13568, 12183, 13921, 14910, 13254, 11297, 14298, 14729, 12965, 13326, 15005, + 15095, 12504, 14994, 15355, 14176, 12754, 13388, 14572, 13598, 15154, 14542, 2456, 13328, 14108, 12614, 14357, + 14887, 14628, 14856, 14437, 14022, 14686, 10009, 12263, 14832, 14516, 14331, 14472, 14569, 12931, 14562, 14404, + 14079, 14079, 11643, 12625, 13684, 12119, 14728, 14379, 14513, 14384, 14791, 7282, 10282, 14394, 14584, 12495, + 8552, 13711, 15079, 14360, 12401, 14452, 12655, 14601, 11849, 14306, 13585, 10584, 14535, 11525, 14830, 13127, + 13200, 14650, 13011, 12334, 14387, 14501, 12816, 14917, 13389, 14660, 14716, 13227, 13387, 14959, 13128, 7764, + 13452, 14378, 13317, 14482, 13781, 11235, 12167, 15153, 14285, 13952, 14930, 15093, 14145, 14392, 15038, 14442, + 14247, 12048, 14542, 15012, 12545, 13550, 9689, 14928, 13519, 14253, 15310, 4717, 14838, 15210, 14477, 14316, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11689, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14792, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13750, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14788, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14855, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13947, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14524, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14582, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15004, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13605, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13873, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13980, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12144, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {15, 17}, {1, 2, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {1, 2, 16, 16}, {15, 17}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape3_fp16) { + uint16_t data[2 * 4 * 17] = { + 14356, 14196, 13985, 15033, 14593, 13229, 15318, 14470, 14100, 14818, 14847, 14989, 13011, 14733, 10638, 12097, + 15335, 10990, 14628, 14757, 15124, 14145, 10114, 14396, 15245, 14402, 15193, 11308, 14658, 14565, 15185, 14891, + 15020, 13353, 15346, 13820, 11782, 14436, 11308, 12817, 13936, 15058, 14104, 13169, 13833, 15175, 9882, 11003, + 15219, 14533, 14894, 13845, 8417, 15160, 13856, 15086, 14683, 15158, 14653, 13931, 14588, 13947, 15046, 14568, + 12336, 15339, 14343, 8727, 13185, 14113, 14384, 13943, 15189, 15323, 12386, 13539, 14971, 12917, 12787, 13738, + 11293, 14121, 14757, 15202, 14586, 12485, 14816, 14622, 14352, 14774, 12407, 14750, 14368, 14650, 14876, 15086, + 14808, 13749, 13358, 14550, 14031, 13210, 15228, 8667, 14913, 15138, 14055, 14865, 15290, 14167, 13875, 14433, + 15156, 13546, 15023, 13825, 14349, 13734, 14023, 14934, 11822, 14858, 14871, 13453, 14480, 13670, 13346, 11262, + 15088, 13484, 15274, 13007, 12458, 13530, 14978, 15155, + }; + uint16_t ret[2 * 1 * 2 * 16 * 16] = { + 14356, 14196, 13985, 15033, 14593, 13229, 15318, 14470, 14100, 14818, 14847, 14989, 13011, 14733, 10638, 12097, + 10990, 14628, 14757, 15124, 14145, 10114, 14396, 15245, 14402, 15193, 11308, 14658, 14565, 15185, 14891, 15020, + 15346, 13820, 11782, 14436, 11308, 12817, 13936, 15058, 14104, 13169, 13833, 15175, 9882, 11003, 15219, 14533, + 13845, 8417, 15160, 13856, 15086, 14683, 15158, 14653, 13931, 14588, 13947, 15046, 14568, 12336, 15339, 14343, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15335, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14894, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8727, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13185, 14113, 14384, 13943, 15189, 15323, 12386, 13539, 14971, 12917, 12787, 13738, 11293, 14121, 14757, 15202, + 12485, 14816, 14622, 14352, 14774, 12407, 14750, 14368, 14650, 14876, 15086, 14808, 13749, 13358, 14550, 14031, + 15228, 8667, 14913, 15138, 14055, 14865, 15290, 14167, 13875, 14433, 15156, 13546, 15023, 13825, 14349, 13734, + 14934, 11822, 14858, 14871, 13453, 14480, 13670, 13346, 11262, 15088, 13484, 15274, 13007, 12458, 13530, 14978, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14023, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15155, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZz transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {2, 4, 17}, {2, 1, 2, 16, 16}, DT_FLOAT16}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + + FormatTransferFractalZzND transfer2; + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {2, 1, 2, 16, 16}, {2, 4, 17}, DT_FLOAT16}; + TransResult result2; + EXPECT_EQ(transfer2.TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape4_fp16_1) { + uint16_t data[1 * 1 * 17 * 4] = { + 15326, 14638, 15204, 11500, 13867, 13869, 14727, 13806, 14006, 13821, 14019, 14505, 11796, 11517, + 14469, 13619, 14439, 14777, 14089, 14677, 13678, 15328, 11413, 14218, 14677, 14790, 13502, 13417, + 14288, 14807, 13460, 14627, 14607, 12875, 13632, 13706, 14012, 14699, 12040, 13948, 14099, 13854, + 11998, 14920, 14080, 13218, 10556, 15298, 15131, 14444, 10388, 15036, 13989, 14702, 14443, 14180, + 14565, 12548, 15124, 14783, 13679, 14072, 13832, 13606, 11777, 11673, 14339, 13487, + }; + uint16_t ret[1 * 1 * 2 * 1 * 16 * 16] = { + 15326, 14638, 15204, 11500, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13867, 13869, 14727, 13806, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14006, 13821, 14019, 14505, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11796, 11517, 14469, 13619, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14439, 14777, 14089, 14677, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13678, 15328, 11413, 14218, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14677, 14790, 13502, 13417, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14288, 14807, 13460, 14627, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14607, 12875, 13632, 13706, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14012, 14699, 12040, 13948, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14099, 13854, 11998, 14920, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14080, 13218, 10556, 15298, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15131, 14444, 10388, 15036, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13989, 14702, 14443, 14180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14565, 12548, 15124, 14783, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13679, 14072, 13832, 13606, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11777, 11673, 14339, 13487, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {1, 1, 17, 4}, {1, 1, 2, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalZz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {1, 1, 2, 1, 16, 16}, {1, 1, 17, 4}, DT_FLOAT16}; + TransResult result2; + FormatTransferFractalZzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } + EXPECT_EQ( + transfer2.TransShape(args2.src_format, args2.src_shape, args2.src_data_type, args2.dst_format, args2.dst_shape), + UNSUPPORTED); +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape4_fp16) { + uint16_t data[2 * 2 * 17 * 4] = { + 13717, 14633, 13393, 15273, 14219, 14977, 15129, 15049, 15185, 14727, 14121, 13586, 10579, 15226, 15260, 11991, + 15078, 14415, 14178, 15075, 12726, 14277, 10874, 9451, 12911, 11791, 15013, 12924, 15180, 13784, 7774, 15348, + 14477, 14247, 14523, 15246, 14223, 14805, 14335, 12387, 9019, 15012, 15220, 15254, 11593, 13539, 14436, 14677, + 12538, 13629, 15131, 14448, 13915, 12483, 14604, 7842, 14802, 14612, 15327, 12582, 13820, 14096, 12041, 15051, + 14481, 14387, 14791, 11639, 14035, 14968, 15245, 13959, 15076, 14043, 15351, 13718, 14065, 11168, 14670, 13385, + 14415, 10126, 14327, 12871, 13278, 14263, 13842, 13420, 14813, 15179, 15201, 12669, 10909, 13404, 15012, 14775, + 14167, 14296, 14659, 14999, 8257, 15345, 11292, 14284, 14388, 14215, 14899, 13700, 15117, 11280, 14407, 15226, + 14659, 14238, 9645, 14763, 14259, 11179, 14093, 15064, 14546, 10329, 14517, 11502, 11346, 14389, 15343, 13370, + 15305, 14455, 15170, 12374, 15135, 13232, 11257, 13393, 13192, 14629, 13648, 15159, 14576, 13830, 14427, 14739, + 12837, 13978, 14633, 15185, 14971, 11183, 15123, 14586, 12630, 14836, 14897, 13545, 14842, 14514, 14407, 12336, + 15142, 15291, 14817, 13495, 15149, 15219, 15302, 14088, 9507, 12779, 14672, 12172, 14361, 9038, 10286, 11767, + 11743, 14965, 14847, 14598, 14362, 13553, 14834, 15149, 15325, 14845, 13870, 15102, 14942, 15359, 14904, 15034, + 9360, 13457, 13630, 12647, 13767, 15264, 13700, 12651, 14721, 14055, 15035, 14461, 13251, 13468, 15235, 14382, + 14905, 14092, 12383, 14141, 15303, 12123, 14006, 14228, 15136, 13119, 14391, 14624, 13095, 14981, 14994, 12624, + 14894, 12580, 13922, 15265, 12878, 14602, 14165, 14303, 11757, 15331, 15343, 15149, 13209, 11594, 15247, 14459, + 13118, 12868, 13982, 11374, 13367, 14585, 13846, 15319, 14810, 13662, 12718, 13320, 14489, 12071, 13509, 15225, + 14608, 14576, 13899, 14718, 14972, 13522, 12321, 11909, 13750, 14365, 14762, 14729, 12767, 14197, 13849, 12160, + }; + uint16_t ret[2 * 2 * 2 * 1 * 16 * 16] = { + 13717, 14633, 13393, 15273, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14219, 14977, 15129, 15049, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15185, 14727, 14121, 13586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10579, 15226, 15260, 11991, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15078, 14415, 14178, 15075, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12726, 14277, 10874, 9451, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12911, 11791, 15013, 12924, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15180, 13784, 7774, 15348, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14477, 14247, 14523, 15246, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14223, 14805, 14335, 12387, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9019, 15012, 15220, 15254, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11593, 13539, 14436, 14677, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12538, 13629, 15131, 14448, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13915, 12483, 14604, 7842, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14802, 14612, 15327, 12582, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13820, 14096, 12041, 15051, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14481, 14387, 14791, 11639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14035, 14968, 15245, 13959, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15076, 14043, 15351, 13718, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14065, 11168, 14670, 13385, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14415, 10126, 14327, 12871, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13278, 14263, 13842, 13420, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14813, 15179, 15201, 12669, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10909, 13404, 15012, 14775, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14167, 14296, 14659, 14999, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8257, 15345, 11292, 14284, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14388, 14215, 14899, 13700, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15117, 11280, 14407, 15226, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14659, 14238, 9645, 14763, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14259, 11179, 14093, 15064, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14546, 10329, 14517, 11502, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11346, 14389, 15343, 13370, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15305, 14455, 15170, 12374, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15135, 13232, 11257, 13393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13192, 14629, 13648, 15159, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14576, 13830, 14427, 14739, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12837, 13978, 14633, 15185, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14971, 11183, 15123, 14586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12630, 14836, 14897, 13545, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14842, 14514, 14407, 12336, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15142, 15291, 14817, 13495, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15149, 15219, 15302, 14088, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9507, 12779, 14672, 12172, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14361, 9038, 10286, 11767, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11743, 14965, 14847, 14598, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14362, 13553, 14834, 15149, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15325, 14845, 13870, 15102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14942, 15359, 14904, 15034, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9360, 13457, 13630, 12647, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13767, 15264, 13700, 12651, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14721, 14055, 15035, 14461, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13251, 13468, 15235, 14382, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14905, 14092, 12383, 14141, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15303, 12123, 14006, 14228, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15136, 13119, 14391, 14624, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13095, 14981, 14994, 12624, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14894, 12580, 13922, 15265, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12878, 14602, 14165, 14303, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11757, 15331, 15343, 15149, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13209, 11594, 15247, 14459, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13118, 12868, 13982, 11374, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13367, 14585, 13846, 15319, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14810, 13662, 12718, 13320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14489, 12071, 13509, 15225, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14608, 14576, 13899, 14718, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14972, 13522, 12321, 11909, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13750, 14365, 14762, 14729, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12767, 14197, 13849, 12160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {2, 2, 17, 4}, {2, 2, 2, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalZz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {2, 2, 2, 1, 16, 16}, {2, 2, 17, 4}, DT_FLOAT16}; + TransResult result2; + FormatTransferFractalZzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } + EXPECT_EQ( + transfer2.TransShape(args2.src_format, args2.src_shape, args2.src_data_type, args2.dst_format, args2.dst_shape), + UNSUPPORTED); +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape5_fp16) { + uint16_t data[2 * 3 * 2 * 3 * 17] = { + 13832, 14161, 15182, 12654, 14878, 12188, 14405, 14401, 14635, 14215, 15223, 14238, 15133, 11718, 15331, 14338, + 15265, 15194, 15288, 13191, 14573, 14512, 14999, 13420, 12967, 14867, 13486, 14285, 11731, 15012, 15185, 14578, + 14604, 14930, 15339, 14672, 14361, 14595, 13897, 13702, 14738, 14940, 15055, 10452, 13759, 14578, 12756, 14084, + 14632, 10550, 14379, 13063, 15277, 12710, 13758, 14430, 14032, 12959, 12610, 14780, 15077, 14376, 14767, 14531, + 12999, 14750, 14625, 14733, 13516, 14989, 14483, 13888, 14280, 14438, 15054, 14699, 11164, 14506, 9849, 14973, + 14036, 12835, 14942, 15314, 11479, 14805, 13564, 14216, 14489, 12263, 12317, 14394, 15330, 14865, 15101, 13487, + 14978, 14717, 14730, 9234, 14369, 12551, 13386, 14797, 15142, 14638, 13065, 14524, 14501, 14516, 15074, 9012, + 14343, 11870, 15004, 14822, 13897, 13746, 14365, 12776, 13974, 11111, 14798, 14519, 11743, 13554, 15174, 14761, + 13111, 15119, 13462, 13807, 13867, 13672, 8661, 14533, 12666, 12279, 13297, 14739, 13949, 14644, 15039, 14740, + 14543, 15101, 14416, 13802, 13635, 14776, 14331, 14353, 14947, 14299, 12744, 15291, 14536, 11738, 14455, 15308, + 15266, 13019, 13572, 11926, 15082, 14224, 9462, 13668, 14824, 12329, 12870, 14772, 14834, 11708, 15172, 14120, + 13451, 15360, 13950, 13965, 13200, 10682, 13446, 13298, 15052, 14316, 13322, 15079, 14510, 15047, 15209, 12569, + 14585, 14428, 11620, 14371, 15045, 14808, 14919, 14586, 14743, 14510, 12366, 12928, 14632, 13414, 13375, 14798, + 13449, 13741, 14342, 15150, 14384, 15197, 14590, 13859, 14973, 15093, 13743, 12435, 15349, 15007, 12547, 15053, + 14896, 15134, 14397, 14418, 13524, 14763, 13316, 12556, 15102, 15036, 12881, 13259, 14450, 14956, 13726, 12271, + 14861, 12837, 14857, 13393, 13747, 14731, 14557, 10441, 15208, 12014, 14473, 14014, 13970, 13328, 15348, 15291, + 15250, 13773, 14774, 14388, 14110, 12128, 13446, 13528, 13884, 11343, 15344, 15110, 13023, 11379, 10818, 14879, + 13726, 14487, 15089, 14818, 15154, 14075, 15287, 14476, 14361, 14792, 14905, 14017, 14056, 12394, 14860, 14831, + 15246, 9459, 14900, 14641, 11659, 15087, 15043, 12976, 15048, 14800, 14994, 15204, 9229, 14139, 12185, 15170, + 11514, 14916, 14476, 9861, 14664, 13726, 13386, 15268, 14659, 15240, 14942, 15287, 14214, 15340, 12256, 12964, + 12739, 14755, 14766, 13935, 11940, 12670, 14601, 11843, 14484, 15131, 14896, 14675, 15174, 10600, 13778, 12872, + 11877, 14065, 12621, 14708, 15078, 15146, 12562, 15177, 14771, 14191, 15296, 14136, 14995, 14103, 14462, 13363, + 14599, 15154, 14747, 11554, 15275, 14737, 14484, 11567, 12337, 11946, 15254, 11493, 14168, 14868, 15090, 13811, + 14064, 14996, 15243, 14645, 15240, 14774, 13726, 12929, 14818, 14524, 13709, 14709, 12375, 14916, 13434, 14960, + 13966, 14502, 14232, 15030, 15145, 15174, 13891, 13952, 14401, 12912, 13329, 15200, 14691, 13676, 14057, 11970, + 15359, 14434, 14461, 14734, 14446, 14236, 12909, 13688, 8230, 14875, 15305, 13715, 14729, 13571, 14578, 13491, + 15049, 12556, 14350, 14044, 13558, 14617, 13512, 15214, 14461, 11904, 14929, 14996, 13421, 14285, 12716, 13199, + 14107, 13368, 15012, 15150, 13428, 13113, 12456, 14275, 14909, 13438, 14050, 13672, 14489, 11595, 14776, 14038, + 14285, 12346, 13721, 12983, 13332, 14940, 13479, 14683, 13765, 15125, 13420, 14035, 9788, 15254, 15200, 14645, + 13748, 14510, 15293, 14706, 14444, 14586, 14786, 10598, 14997, 14358, 15144, 13528, 7493, 10094, 11973, 12700, + 14358, 12935, 12858, 14628, 13953, 13500, 14436, 14342, 15329, 14811, 11681, 13516, 12939, 15358, 12927, 14526, + 8919, 9601, 15163, 12303, 9854, 14850, 14704, 14548, 12500, 13563, 14251, 14267, 14862, 14164, 14370, 13450, + 14482, 14247, 13076, 14777, 13678, 12668, 14488, 8794, 13883, 13602, 10705, 12641, 13569, 13477, 15042, 15269, + 14449, 14489, 14341, 14127, 14554, 11366, 15130, 13479, 13384, 14574, 11501, 9470, 15122, 14179, 14480, 13224, + 12376, 14420, 14469, 14920, 12054, 7632, 14994, 13384, 11545, 15233, 14744, 14554, 14136, 13251, 14072, 13863, + 13176, 12532, 11110, 15262, 14771, 15191, 14622, 14752, 12759, 15150, 14699, 14797, 13573, 13782, 12854, 14870, + 13485, 15242, 14253, 14472, 14578, 14140, 12008, 14335, 11901, 14540, 14607, 15197, 13362, 14992, 14699, 14469, + 15065, 14702, 15180, 12374, 11178, 14562, 14063, 14387, 14358, 12712, 14251, 15251, 14755, 10911, 14955, 14890, + 15255, 12943, 14027, 14621, + }; + uint16_t ret[2 * 3 * 2 * 1 * 2 * 16 * 16] = { + 13832, 14161, 15182, 12654, 14878, 12188, 14405, 14401, 14635, 14215, 15223, 14238, 15133, 11718, 15331, 14338, + 15194, 15288, 13191, 14573, 14512, 14999, 13420, 12967, 14867, 13486, 14285, 11731, 15012, 15185, 14578, 14604, + 15339, 14672, 14361, 14595, 13897, 13702, 14738, 14940, 15055, 10452, 13759, 14578, 12756, 14084, 14632, 10550, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15265, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14930, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14379, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13063, 15277, 12710, 13758, 14430, 14032, 12959, 12610, 14780, 15077, 14376, 14767, 14531, 12999, 14750, 14625, + 13516, 14989, 14483, 13888, 14280, 14438, 15054, 14699, 11164, 14506, 9849, 14973, 14036, 12835, 14942, 15314, + 14805, 13564, 14216, 14489, 12263, 12317, 14394, 15330, 14865, 15101, 13487, 14978, 14717, 14730, 9234, 14369, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14733, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11479, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12551, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13386, 14797, 15142, 14638, 13065, 14524, 14501, 14516, 15074, 9012, 14343, 11870, 15004, 14822, 13897, 13746, + 12776, 13974, 11111, 14798, 14519, 11743, 13554, 15174, 14761, 13111, 15119, 13462, 13807, 13867, 13672, 8661, + 12666, 12279, 13297, 14739, 13949, 14644, 15039, 14740, 14543, 15101, 14416, 13802, 13635, 14776, 14331, 14353, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14365, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14533, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14947, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14299, 12744, 15291, 14536, 11738, 14455, 15308, 15266, 13019, 13572, 11926, 15082, 14224, 9462, 13668, 14824, + 12870, 14772, 14834, 11708, 15172, 14120, 13451, 15360, 13950, 13965, 13200, 10682, 13446, 13298, 15052, 14316, + 15079, 14510, 15047, 15209, 12569, 14585, 14428, 11620, 14371, 15045, 14808, 14919, 14586, 14743, 14510, 12366, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12329, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13322, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12928, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14632, 13414, 13375, 14798, 13449, 13741, 14342, 15150, 14384, 15197, 14590, 13859, 14973, 15093, 13743, 12435, + 15007, 12547, 15053, 14896, 15134, 14397, 14418, 13524, 14763, 13316, 12556, 15102, 15036, 12881, 13259, 14450, + 13726, 12271, 14861, 12837, 14857, 13393, 13747, 14731, 14557, 10441, 15208, 12014, 14473, 14014, 13970, 13328, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15349, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14956, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15348, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15291, 15250, 13773, 14774, 14388, 14110, 12128, 13446, 13528, 13884, 11343, 15344, 15110, 13023, 11379, 10818, + 13726, 14487, 15089, 14818, 15154, 14075, 15287, 14476, 14361, 14792, 14905, 14017, 14056, 12394, 14860, 14831, + 9459, 14900, 14641, 11659, 15087, 15043, 12976, 15048, 14800, 14994, 15204, 9229, 14139, 12185, 15170, 11514, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14879, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15246, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14916, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14476, 9861, 14664, 13726, 13386, 15268, 14659, 15240, 14942, 15287, 14214, 15340, 12256, 12964, 12739, 14755, + 13935, 11940, 12670, 14601, 11843, 14484, 15131, 14896, 14675, 15174, 10600, 13778, 12872, 11877, 14065, 12621, + 15078, 15146, 12562, 15177, 14771, 14191, 15296, 14136, 14995, 14103, 14462, 13363, 14599, 15154, 14747, 11554, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14766, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14708, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15275, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14737, 14484, 11567, 12337, 11946, 15254, 11493, 14168, 14868, 15090, 13811, 14064, 14996, 15243, 14645, 15240, + 13726, 12929, 14818, 14524, 13709, 14709, 12375, 14916, 13434, 14960, 13966, 14502, 14232, 15030, 15145, 15174, + 13952, 14401, 12912, 13329, 15200, 14691, 13676, 14057, 11970, 15359, 14434, 14461, 14734, 14446, 14236, 12909, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14774, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13891, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13688, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8230, 14875, 15305, 13715, 14729, 13571, 14578, 13491, 15049, 12556, 14350, 14044, 13558, 14617, 13512, 15214, + 11904, 14929, 14996, 13421, 14285, 12716, 13199, 14107, 13368, 15012, 15150, 13428, 13113, 12456, 14275, 14909, + 14050, 13672, 14489, 11595, 14776, 14038, 14285, 12346, 13721, 12983, 13332, 14940, 13479, 14683, 13765, 15125, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14461, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13438, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13420, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14035, 9788, 15254, 15200, 14645, 13748, 14510, 15293, 14706, 14444, 14586, 14786, 10598, 14997, 14358, 15144, + 7493, 10094, 11973, 12700, 14358, 12935, 12858, 14628, 13953, 13500, 14436, 14342, 15329, 14811, 11681, 13516, + 15358, 12927, 14526, 8919, 9601, 15163, 12303, 9854, 14850, 14704, 14548, 12500, 13563, 14251, 14267, 14862, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13528, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12939, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14164, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14370, 13450, 14482, 14247, 13076, 14777, 13678, 12668, 14488, 8794, 13883, 13602, 10705, 12641, 13569, 13477, + 15269, 14449, 14489, 14341, 14127, 14554, 11366, 15130, 13479, 13384, 14574, 11501, 9470, 15122, 14179, 14480, + 12376, 14420, 14469, 14920, 12054, 7632, 14994, 13384, 11545, 15233, 14744, 14554, 14136, 13251, 14072, 13863, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15042, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13176, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12532, 11110, 15262, 14771, 15191, 14622, 14752, 12759, 15150, 14699, 14797, 13573, 13782, 12854, 14870, 13485, + 14253, 14472, 14578, 14140, 12008, 14335, 11901, 14540, 14607, 15197, 13362, 14992, 14699, 14469, 15065, 14702, + 12374, 11178, 14562, 14063, 14387, 14358, 12712, 14251, 15251, 14755, 10911, 14955, 14890, 15255, 12943, 14027, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15242, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14621, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {2, 3, 2, 3, 17}, + {2, 3, 2, 1, 2, 16, 16}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalZz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, + {2, 3, 2, 1, 2, 16, 16}, {2, 3, 2, 3, 17}, DT_FLOAT16}; + TransResult result2; + FormatTransferFractalZzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 2); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nd_shape4_fp32) { + float data[2 * 2 * 4 * 17] = { + 0.48734783127746584, 0.15190233146679444, 0.6309953031435181, 0.6571782855267494, 0.18640007179772766, + 0.8708466062957234, 0.8354796070795485, 0.5937101170057715, 0.20286466757490806, 0.7236210224379754, + 0.5133016305159891, 0.30759221385705016, 0.8034168117461314, 0.8396583387580794, 0.6499070393851767, + 0.8144802952530074, 0.5948098768034782, 0.5950270148901504, 0.06571103810980217, 0.04113537524070798, + 0.850404641124823, 0.9948419071548489, 0.6215161021063617, 0.16249192772658316, 0.5050913431975846, + 0.7937775887578682, 0.028831707551000396, 0.28325013514723507, 0.8211410723546999, 0.7176669570041827, + 0.6588421729934614, 0.16841110318562091, 0.7095162395962725, 0.39576308638615343, 0.10528222016675215, + 0.43028024321798863, 0.044868717722925666, 0.8408329229287285, 0.931315710167798, 0.5383338657309856, + 0.7474342292121668, 0.6531557890786839, 0.7071920439731455, 0.10085996724687896, 0.21146039946088158, + 0.3211131278764192, 0.921636927547933, 0.9981429184367644, 0.09662709007192716, 0.4734277260159877, + 0.835128648845281, 0.9867914228220531, 0.4415437181821612, 0.45416217225683975, 0.27983653489892013, + 0.7609050274817578, 0.23017630407947565, 0.7702388069981185, 0.2477211058028833, 0.8261031804962897, + 0.4877356097679747, 0.8602972626219345, 0.7063671926943388, 0.4048930504139826, 0.7880778896294389, + 0.5829037833793927, 0.6209757776523838, 0.6085773554247663, 0.4679223140485166, 0.027438781009462065, + 0.3716878195231198, 0.0199579978088682, 0.36102038094904887, 0.6431222830463951, 0.34741587153320386, + 0.22603314514462725, 0.5067781241362863, 0.8342422249794591, 0.26646178333204906, 0.6759305818665823, + 0.6690347797724682, 0.7929141594499515, 0.4097471168728528, 0.9675174260723787, 0.3525636088638997, + 0.8824227903662591, 0.004340246025094863, 0.5813145980752394, 0.5704711233756934, 0.8954523668644979, + 0.6484554011109014, 0.17555592535956188, 0.5157569568469266, 0.8812349707210847, 0.812251544084032, + 0.8656711072891791, 0.8172850573004784, 0.5949828072747959, 0.9049741100221382, 0.8879859467554044, + 0.8313146429660512, 0.0457129718803142, 0.4065258282791677, 0.15727897761125864, 0.3484205075303167, + 0.20316484509187538, 0.040710637233631664, 0.9422636673185534, 0.0668039513759997, 0.5166874092345884, + 0.28992632700217513, 0.45448621751855833, 0.8428703807764792, 0.8095396267374237, 0.9405578679861039, + 0.6690338086676076, 0.6856661561482512, 0.509377133767494, 0.18949568385264326, 0.746092919110688, + 0.718560590882892, 0.19987548889823148, 0.5985766244393099, 0.3892282635563432, 0.042174527314988386, + 0.28834424048937324, 0.5647006422391396, 0.10768865385191584, 0.6680480168326073, 0.48502265172153936, + 0.20107347266422226, 0.06700154189232066, 0.2583110319336359, 0.14494240789279178, 0.214153857788978, + 0.9730538073350009, 0.7358124562940163, 0.9023289319229876, 0.5584131484343305, 0.7165017329880333, + 0.5698554353567142, 0.8659927309025036, 0.4755112121320433, 0.3580491896897282, 0.7820043061394484, + 0.3875849451386024, 0.6442406157037353, 0.9250353474149741, 0.8698903846139167, 0.25516670302899325, + 0.34742866403193917, 0.5540345282385027, 0.1249381619067389, 0.6511396228200564, 0.0518584845004707, + 0.47687327587566675, 0.7391090941667979, 0.050741858095834425, 0.3565270757329517, 0.42851750996605265, + 0.5294163914194764, 0.7398537176473816, 0.9078831020615753, 0.1572735298694834, 0.09459429130511421, + 0.3617954816433758, 0.3029217652293019, 0.5997775212822984, 0.248580737678893, 0.9543674249350337, + 0.954451145757734, 0.1962316838207071, 0.6745526891253325, 0.88651822532634, 0.9624023505476093, + 0.8682513367267045, 0.5281261201765712, 0.15965941469601175, 0.4441328893966944, 0.2923804418476723, + 0.06490779221949527, 0.0017403590798413804, 0.8520009823265204, 0.44971843937991374, 0.3386034163640277, + 0.7702096738809434, 0.7334994568530535, 0.8830585641425194, 0.6618970151878655, 0.4848376040578781, + 0.5049250727515491, 0.5785873080885714, 0.9952698162199863, 0.43430072374659, 0.6188847870072389, + 0.7581591778750364, 0.6383739016829548, 0.7567651057215206, 0.7513738640734398, 0.49253050728145975, + 0.8139147833902005, 0.14316930311349307, 0.5945922505817162, 0.6035610085176568, 0.9930256611842178, + 0.5403654583619014, 0.8979267433404176, 0.28173125276394995, 0.3467577704282401, 0.23380875880551844, + 0.7134712817327136, 0.7883287163765654, 0.7846645621192855, 0.3189860693282949, 0.8804937197162858, + 0.7337066619168504, 0.37284629923791457, 0.3537077448212921, 0.16015819664911035, 0.555665025454486, + 0.27860376850661894, 0.8312480037258619, 0.19232821858251292, 0.9001473519318394, 0.5047301751313313, + 0.38814468343949315, 0.39857403311434425, 0.1371117310249017, 0.6115736287886191, 0.3291784075711639, + 0.0050888243214743945, 0.28200919754529385, 0.757181510166513, 0.3924420485744937, 0.3675296019027998, + 0.5701179208189977, 0.11717175159145532, 0.8646010844866165, 0.32028644383186755, 0.7555585921315293, + 0.8421362025797383, 0.014565912419013505, 0.1192955679481531, 0.19321039675423968, 0.2792456334803882, + 0.6943539216498176, 0.22242515779974714, 0.0312889230468939, 0.5201858645943402, 0.3820715745426422, + 0.915240687220273, 0.5018375488170763, 0.4544010301976812, 0.10290809465603667, 0.9988616579631764, + 0.4639631681412365, 0.36800589920527194, 0.02608047026799898, 0.3907130363579898, 0.22964761662220634, + 0.36479183564369644, 0.8837663008817578, 0.8767224380658272, 0.09642232491729474, 0.37809846987796636, + 0.8852159908183284, 0.30341830122754354, 0.7551325894584977, 0.5477351512879798, 0.05477218246488447, + 0.5564337547416925, 0.8988484613080273, + }; + float ret[2 * 2 * 1 * 2 * 16 * 16] = { + 0.48734783127746584, + 0.15190233146679444, + 0.6309953031435181, + 0.6571782855267494, + 0.18640007179772766, + 0.8708466062957234, + 0.8354796070795485, + 0.5937101170057715, + 0.20286466757490806, + 0.7236210224379754, + 0.5133016305159891, + 0.30759221385705016, + 0.8034168117461314, + 0.8396583387580794, + 0.6499070393851767, + 0.8144802952530074, + 0.5950270148901504, + 0.06571103810980217, + 0.04113537524070798, + 0.850404641124823, + 0.9948419071548489, + 0.6215161021063617, + 0.16249192772658316, + 0.5050913431975846, + 0.7937775887578682, + 0.028831707551000396, + 0.28325013514723507, + 0.8211410723546999, + 0.7176669570041827, + 0.6588421729934614, + 0.16841110318562091, + 0.7095162395962725, + 0.10528222016675215, + 0.43028024321798863, + 0.044868717722925666, + 0.8408329229287285, + 0.931315710167798, + 0.5383338657309856, + 0.7474342292121668, + 0.6531557890786839, + 0.7071920439731455, + 0.10085996724687896, + 0.21146039946088158, + 0.3211131278764192, + 0.921636927547933, + 0.9981429184367644, + 0.09662709007192716, + 0.4734277260159877, + 0.9867914228220531, + 0.4415437181821612, + 0.45416217225683975, + 0.27983653489892013, + 0.7609050274817578, + 0.23017630407947565, + 0.7702388069981185, + 0.2477211058028833, + 0.8261031804962897, + 0.4877356097679747, + 0.8602972626219345, + 0.7063671926943388, + 0.4048930504139826, + 0.7880778896294389, + 0.5829037833793927, + 0.6209757776523838, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5948098768034782, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.39576308638615343, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.835128648845281, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6085773554247663, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4679223140485166, + 0.027438781009462065, + 0.3716878195231198, + 0.0199579978088682, + 0.36102038094904887, + 0.6431222830463951, + 0.34741587153320386, + 0.22603314514462725, + 0.5067781241362863, + 0.8342422249794591, + 0.26646178333204906, + 0.6759305818665823, + 0.6690347797724682, + 0.7929141594499515, + 0.4097471168728528, + 0.9675174260723787, + 0.8824227903662591, + 0.004340246025094863, + 0.5813145980752394, + 0.5704711233756934, + 0.8954523668644979, + 0.6484554011109014, + 0.17555592535956188, + 0.5157569568469266, + 0.8812349707210847, + 0.812251544084032, + 0.8656711072891791, + 0.8172850573004784, + 0.5949828072747959, + 0.9049741100221382, + 0.8879859467554044, + 0.8313146429660512, + 0.4065258282791677, + 0.15727897761125864, + 0.3484205075303167, + 0.20316484509187538, + 0.040710637233631664, + 0.9422636673185534, + 0.0668039513759997, + 0.5166874092345884, + 0.28992632700217513, + 0.45448621751855833, + 0.8428703807764792, + 0.8095396267374237, + 0.9405578679861039, + 0.6690338086676076, + 0.6856661561482512, + 0.509377133767494, + 0.746092919110688, + 0.718560590882892, + 0.19987548889823148, + 0.5985766244393099, + 0.3892282635563432, + 0.042174527314988386, + 0.28834424048937324, + 0.5647006422391396, + 0.10768865385191584, + 0.6680480168326073, + 0.48502265172153936, + 0.20107347266422226, + 0.06700154189232066, + 0.2583110319336359, + 0.14494240789279178, + 0.214153857788978, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3525636088638997, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0457129718803142, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18949568385264326, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9730538073350009, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7358124562940163, + 0.9023289319229876, + 0.5584131484343305, + 0.7165017329880333, + 0.5698554353567142, + 0.8659927309025036, + 0.4755112121320433, + 0.3580491896897282, + 0.7820043061394484, + 0.3875849451386024, + 0.6442406157037353, + 0.9250353474149741, + 0.8698903846139167, + 0.25516670302899325, + 0.34742866403193917, + 0.5540345282385027, + 0.6511396228200564, + 0.0518584845004707, + 0.47687327587566675, + 0.7391090941667979, + 0.050741858095834425, + 0.3565270757329517, + 0.42851750996605265, + 0.5294163914194764, + 0.7398537176473816, + 0.9078831020615753, + 0.1572735298694834, + 0.09459429130511421, + 0.3617954816433758, + 0.3029217652293019, + 0.5997775212822984, + 0.248580737678893, + 0.954451145757734, + 0.1962316838207071, + 0.6745526891253325, + 0.88651822532634, + 0.9624023505476093, + 0.8682513367267045, + 0.5281261201765712, + 0.15965941469601175, + 0.4441328893966944, + 0.2923804418476723, + 0.06490779221949527, + 0.0017403590798413804, + 0.8520009823265204, + 0.44971843937991374, + 0.3386034163640277, + 0.7702096738809434, + 0.8830585641425194, + 0.6618970151878655, + 0.4848376040578781, + 0.5049250727515491, + 0.5785873080885714, + 0.9952698162199863, + 0.43430072374659, + 0.6188847870072389, + 0.7581591778750364, + 0.6383739016829548, + 0.7567651057215206, + 0.7513738640734398, + 0.49253050728145975, + 0.8139147833902005, + 0.14316930311349307, + 0.5945922505817162, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1249381619067389, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9543674249350337, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7334994568530535, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6035610085176568, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9930256611842178, + 0.5403654583619014, + 0.8979267433404176, + 0.28173125276394995, + 0.3467577704282401, + 0.23380875880551844, + 0.7134712817327136, + 0.7883287163765654, + 0.7846645621192855, + 0.3189860693282949, + 0.8804937197162858, + 0.7337066619168504, + 0.37284629923791457, + 0.3537077448212921, + 0.16015819664911035, + 0.555665025454486, + 0.8312480037258619, + 0.19232821858251292, + 0.9001473519318394, + 0.5047301751313313, + 0.38814468343949315, + 0.39857403311434425, + 0.1371117310249017, + 0.6115736287886191, + 0.3291784075711639, + 0.0050888243214743945, + 0.28200919754529385, + 0.757181510166513, + 0.3924420485744937, + 0.3675296019027998, + 0.5701179208189977, + 0.11717175159145532, + 0.32028644383186755, + 0.7555585921315293, + 0.8421362025797383, + 0.014565912419013505, + 0.1192955679481531, + 0.19321039675423968, + 0.2792456334803882, + 0.6943539216498176, + 0.22242515779974714, + 0.0312889230468939, + 0.5201858645943402, + 0.3820715745426422, + 0.915240687220273, + 0.5018375488170763, + 0.4544010301976812, + 0.10290809465603667, + 0.4639631681412365, + 0.36800589920527194, + 0.02608047026799898, + 0.3907130363579898, + 0.22964761662220634, + 0.36479183564369644, + 0.8837663008817578, + 0.8767224380658272, + 0.09642232491729474, + 0.37809846987796636, + 0.8852159908183284, + 0.30341830122754354, + 0.7551325894584977, + 0.5477351512879798, + 0.05477218246488447, + 0.5564337547416925, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.27860376850661894, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8646010844866165, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9988616579631764, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8988484613080273, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_ND, FORMAT_FRACTAL_ZZ, {2, 2, 4, 17}, {2, 2, 1, 2, 16, 16}, DT_FLOAT}; + TransResult result; + FormatTransferFractalZz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 4); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_ND, {2, 2, 1, 2, 16, 16}, {2, 2, 4, 17}, DT_FLOAT}; + TransResult result2; + FormatTransferFractalZzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 4); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nchw_fp32) { + float data[1 * 2 * 4 * 17] = { + 0.48354565998830756, 0.5288379191643942, 0.6564947180129607, 0.7340645053567824, 0.38024908879386665, + 0.21585634536453502, 0.07072412555001373, 0.3895052227714314, 0.6943410020138906, 0.25354216910391947, + 0.12426328263652953, 0.4029102597944607, 0.3372883475188658, 0.15188057276427525, 0.5012013044152124, + 0.2517370960382155, 0.171046514195472, 0.030114607358494028, 0.6244909419097497, 0.35110098875166074, + 0.5870767726787481, 0.4984577853475606, 0.8268546800139805, 0.33992771536906896, 0.6460327104116448, + 0.9313283133633905, 0.2989738630411848, 0.2738679126057951, 0.06777160291006379, 0.4087307988402077, + 0.46439703610597116, 0.16710550387532463, 0.6145733123000938, 0.6662604590567247, 0.9305072712135973, + 0.8323094426588402, 0.898281245473632, 0.6976122377419587, 0.20317433978716226, 0.14557923584209076, + 0.36832234829498045, 0.9415737658692604, 0.816288063336007, 0.7422170145942777, 0.19178381426007074, + 0.20831566936693735, 0.5544509915059798, 0.36345356512424987, 0.19945926108107215, 0.45935424005820946, + 0.7679089618456956, 0.8404241327512916, 0.14812383323193712, 0.30500408542184043, 0.6568831766890362, + 0.6926143461584111, 0.5399882410175864, 0.1892429432304602, 0.07467176864247294, 0.39941943666029067, + 0.2853795635416202, 0.4533368695021506, 0.6172309334055194, 0.9181934347544681, 0.04114781094262976, + 0.19628237211682176, 0.2992459720151508, 0.8065474976443873, 0.43556366882268016, 0.29680994009755635, + 0.027028192513735783, 0.1333937532467585, 0.7057053295919739, 0.26552163258627426, 0.18921931005168524, + 0.24298428082662826, 0.002365124316292211, 0.14448627328010977, 0.22643484261451152, 0.5302073755142979, + 0.9383674394413312, 0.9285339529978078, 0.33604609763869764, 0.9591323699000807, 0.4758520057631518, + 0.5724188248299613, 0.08772714551221295, 0.327875416428662, 0.7661605531724149, 0.5610963691380759, + 0.5663803171694582, 0.96865287455917, 0.7105003163177777, 0.17674291676252174, 0.4553989259943879, + 0.06394523733703428, 0.47436999492415177, 0.317587265492125, 0.6702452379659213, 0.23264947092844812, + 0.9535389507438575, 0.13653273271434707, 0.8550107222862715, 0.9171716650149612, 0.33036188965071134, + 0.5093434924862221, 0.9127693374642095, 0.15948127768481335, 0.9385875661217681, 0.47327218726414644, + 0.562557864399026, 0.5094836574054532, 0.39525315162429187, 0.39044442327833495, 0.42354272355881273, + 0.630368716154984, 0.5294499358806938, 0.18840958534702223, 0.9116004700132844, 0.217051263222375, + 0.3355053440865866, 0.7249031228856988, 0.11569833736241464, 0.17220924930082948, 0.6841828482580393, + 0.6451877670507526, 0.9960171769322085, 0.22836997946145943, 0.8876738074952507, 0.9036808362741429, + 0.3386767083756137, 0.7215167652596821, 0.7222152655225196, 0.5268864163210882, 0.5220848656018956, + 0.5033325975056796, + }; + float ret[1 * 2 * 1 * 2 * 16 * 16] = { + 0.48354565998830756, + 0.5288379191643942, + 0.6564947180129607, + 0.7340645053567824, + 0.38024908879386665, + 0.21585634536453502, + 0.07072412555001373, + 0.3895052227714314, + 0.6943410020138906, + 0.25354216910391947, + 0.12426328263652953, + 0.4029102597944607, + 0.3372883475188658, + 0.15188057276427525, + 0.5012013044152124, + 0.2517370960382155, + 0.030114607358494028, + 0.6244909419097497, + 0.35110098875166074, + 0.5870767726787481, + 0.4984577853475606, + 0.8268546800139805, + 0.33992771536906896, + 0.6460327104116448, + 0.9313283133633905, + 0.2989738630411848, + 0.2738679126057951, + 0.06777160291006379, + 0.4087307988402077, + 0.46439703610597116, + 0.16710550387532463, + 0.6145733123000938, + 0.9305072712135973, + 0.8323094426588402, + 0.898281245473632, + 0.6976122377419587, + 0.20317433978716226, + 0.14557923584209076, + 0.36832234829498045, + 0.9415737658692604, + 0.816288063336007, + 0.7422170145942777, + 0.19178381426007074, + 0.20831566936693735, + 0.5544509915059798, + 0.36345356512424987, + 0.19945926108107215, + 0.45935424005820946, + 0.8404241327512916, + 0.14812383323193712, + 0.30500408542184043, + 0.6568831766890362, + 0.6926143461584111, + 0.5399882410175864, + 0.1892429432304602, + 0.07467176864247294, + 0.39941943666029067, + 0.2853795635416202, + 0.4533368695021506, + 0.6172309334055194, + 0.9181934347544681, + 0.04114781094262976, + 0.19628237211682176, + 0.2992459720151508, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.171046514195472, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6662604590567247, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7679089618456956, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8065474976443873, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.43556366882268016, + 0.29680994009755635, + 0.027028192513735783, + 0.1333937532467585, + 0.7057053295919739, + 0.26552163258627426, + 0.18921931005168524, + 0.24298428082662826, + 0.002365124316292211, + 0.14448627328010977, + 0.22643484261451152, + 0.5302073755142979, + 0.9383674394413312, + 0.9285339529978078, + 0.33604609763869764, + 0.9591323699000807, + 0.5724188248299613, + 0.08772714551221295, + 0.327875416428662, + 0.7661605531724149, + 0.5610963691380759, + 0.5663803171694582, + 0.96865287455917, + 0.7105003163177777, + 0.17674291676252174, + 0.4553989259943879, + 0.06394523733703428, + 0.47436999492415177, + 0.317587265492125, + 0.6702452379659213, + 0.23264947092844812, + 0.9535389507438575, + 0.8550107222862715, + 0.9171716650149612, + 0.33036188965071134, + 0.5093434924862221, + 0.9127693374642095, + 0.15948127768481335, + 0.9385875661217681, + 0.47327218726414644, + 0.562557864399026, + 0.5094836574054532, + 0.39525315162429187, + 0.39044442327833495, + 0.42354272355881273, + 0.630368716154984, + 0.5294499358806938, + 0.18840958534702223, + 0.217051263222375, + 0.3355053440865866, + 0.7249031228856988, + 0.11569833736241464, + 0.17220924930082948, + 0.6841828482580393, + 0.6451877670507526, + 0.9960171769322085, + 0.22836997946145943, + 0.8876738074952507, + 0.9036808362741429, + 0.3386767083756137, + 0.7215167652596821, + 0.7222152655225196, + 0.5268864163210882, + 0.5220848656018956, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4758520057631518, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.13653273271434707, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9116004700132844, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5033325975056796, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_ZZ, {1, 2, 4, 17}, {1, 2, 1, 2, 16, 16}, DT_FLOAT}; + TransResult result; + FormatTransferFractalZz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 4); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_NCHW, {1, 2, 1, 2, 16, 16}, {1, 2, 4, 17}, DT_FLOAT}; + TransResult result2; + FormatTransferFractalZzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 4); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, nhwc_fp32) { + float data[2 * 2 * 17 * 4] = { + 0.6826642868668084, 0.043866050762608744, 0.17666253295970225, 0.6377947294887623, 0.5731253161317631, + 0.8660479153259933, 0.3283955922096047, 0.28822941576304395, 0.266035796785115, 0.2668960688194587, + 0.13558803601784464, 0.014302374775957749, 0.22218488108454748, 0.5502228577797044, 0.8537365635590441, + 0.3449278734931246, 0.7079460210099658, 0.32166794924574393, 0.6016995473528488, 0.5155731631574363, + 0.4350255443350567, 0.15194948171848122, 0.17060018740751715, 0.5585688800367302, 0.5106144168384212, + 0.7774792500405002, 0.9166771685022536, 0.1520171589804501, 0.055349159562811945, 0.08597516811763906, + 0.8318337876627744, 0.8013703637699361, 0.4567924562463306, 0.236971745638061, 0.8091749658245033, + 0.3048528694425596, 0.8440505279254544, 0.4565011814466987, 0.8444823800442499, 0.2665125487284098, + 0.7942646379904031, 0.3003394430517031, 0.6060659316793948, 0.27949558854026524, 0.22970861575053936, + 0.39464357988879895, 0.6323358476347488, 0.4900446166203536, 0.4262678398449171, 0.5175669011162926, + 0.8074101200638781, 0.46309351804139254, 0.38526259936740015, 0.9475081441242449, 0.8026773601813839, + 0.06648433150135313, 0.33054450113462375, 0.16206263925129671, 0.014661402248451694, 0.665996111688725, + 0.49445514708553606, 0.1970970197451115, 0.932784874403773, 0.8203255119427988, 0.8368935109284026, + 0.16366406201739148, 0.24295756148468395, 0.5998796055465441, 0.8329503908566426, 0.6233943209050588, + 0.5485288788944125, 0.4936593563676702, 0.6353616329318031, 0.1622857757417404, 0.9106806866582022, + 0.6660538060703398, 0.7791820943318373, 0.1706647725627367, 0.6381242174232316, 0.5563114541831535, + 0.34546090886259295, 0.6183240367763264, 0.35075893676319736, 0.7802417875294433, 0.9353130445205744, + 0.26121944223135474, 0.5879440609405164, 0.14403352098963806, 0.9478402093648892, 0.1091812289297438, + 0.6835183818698114, 0.5384651415019075, 0.3727099433058595, 0.07027289982611373, 0.24479654654168947, + 0.6340807114324638, 0.1089351825903393, 0.4261205290528358, 0.788035046472595, 0.1669274692010293, + 0.7891031034793119, 0.30903585028517444, 0.44496600737518255, 0.17917072427208325, 0.8400527538813541, + 0.871910504463008, 0.38573905907087824, 0.10475763959813045, 0.6067697302192192, 0.3664373646929884, + 0.8540231355708695, 0.7361917987131372, 0.04323780187994608, 0.4571086189944381, 0.5232490913448236, + 0.2034034806401901, 0.35033687234667865, 0.4983408469170202, 0.06750931495858226, 0.15379622192977516, + 0.43307977818364696, 0.1349960580625169, 0.5823669234697881, 0.5317808621747713, 0.22318251516739263, + 0.49734566581161355, 0.011911822059888721, 0.9508092497909877, 0.904895935832329, 0.9258062283860066, + 0.27726879265908166, 0.5614587634330378, 0.4836523068545914, 0.2276530565354099, 0.4634085711120596, + 0.22179062615921818, 0.6018307008437298, 0.40590069858881495, 0.5126780415762812, 0.6465924460988698, + 0.2605000666954156, 0.11171926959828349, 0.6105601403074952, 0.5534360444770465, 0.8109441285089057, + 0.587338599177806, 0.20351854196971497, 0.3698354775123749, 0.3381739805155217, 0.8338993543749309, + 0.5934550069127651, 0.3500320504998983, 0.44079385400682924, 0.6233180976082278, 0.46480077933911323, + 0.34927091073628136, 0.4187070529833151, 0.7423954257982627, 0.38881437522154527, 0.7714112671689478, + 0.8179509335133315, 0.3831078838732509, 0.5079490533023165, 0.03606601875029403, 0.04250359963248451, + 0.12750451249951045, 0.9971879993139143, 0.05829183629354873, 0.3265538538455145, 0.7778408294948845, + 0.044318813495665244, 0.31281482741835165, 0.8394089943853243, 0.9311574975953711, 0.4945645162287725, + 0.3126359293664953, 0.7723066998764473, 0.30410825487593607, 0.899894640728656, 0.6467261185695091, + 0.6538998794010177, 0.21853150677202715, 0.5021502873311521, 0.9431876774620246, 0.477918638219775, + 0.633568735592873, 0.5180803503595832, 0.9533087354778479, 0.7385933023116928, 0.5938150594439631, + 0.8502221479425777, 0.13090966344672805, 0.15291414392453007, 0.329710111032374, 0.3905939714054877, + 0.45771800705289356, 0.6192100451790177, 0.5126529214276997, 0.8229108192573972, 0.73202196858338, + 0.4785584310461053, 0.8823363509107982, 0.3792618584525941, 0.1591318008380328, 0.8922797495718489, + 0.41579350628761014, 0.6755235112503503, 0.9742082066104254, 0.719115743910399, 0.3112817152639481, + 0.7897030377170158, 0.3120282966936694, 0.3362586043272986, 0.33468051317300296, 0.32925703998421263, + 0.5022858273437679, 0.8307640088142761, 0.454233538180681, 0.493034504851536, 0.6675154014187731, + 0.6084033780738657, 0.39712687614397224, 0.46986115672066886, 0.50569748391499, 0.011927554221092929, + 0.7665958947310153, 0.2096478710154812, 0.8348997760875302, 0.9346732345703666, 0.7366480312487773, + 0.9447711246630602, 0.13470562438257616, 0.7036874707129143, 0.6739655054223922, 0.8441899947297865, + 0.1048650889727486, 0.9958266923005692, 0.22846263420411372, 0.3116180316667533, 0.8696545362199898, + 0.623634518307979, 0.4372726660035817, 0.7072767829114394, 0.36685317131241946, 0.6113192123495405, + 0.5403136926716755, 0.7969503412555408, 0.8976391022871576, 0.6904457546606565, 0.256293407218113, + 0.12407625950806878, 0.8932902373278819, 0.12956469654646197, 0.889619024273833, 0.009357463204375138, + 0.2117754631989739, 0.8874412877293805, 0.27088254423859315, 0.46310127400524936, 0.31001925997527413, + 0.6263861695214231, 0.8998318442583092, 0.5619257152280644, 0.09296453039644181, 0.5991036267324175, + 0.5085470750797965, 0.49798421993443254, 0.4492691325524484, 0.19410472899785058, 0.3865163021583399, + 0.49187965812508505, 0.5724473984180373, + }; + float ret[2 * 2 * 2 * 1 * 16 * 16] = { + 0.6826642868668084, + 0.043866050762608744, + 0.17666253295970225, + 0.6377947294887623, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5731253161317631, + 0.8660479153259933, + 0.3283955922096047, + 0.28822941576304395, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.266035796785115, + 0.2668960688194587, + 0.13558803601784464, + 0.014302374775957749, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22218488108454748, + 0.5502228577797044, + 0.8537365635590441, + 0.3449278734931246, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7079460210099658, + 0.32166794924574393, + 0.6016995473528488, + 0.5155731631574363, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4350255443350567, + 0.15194948171848122, + 0.17060018740751715, + 0.5585688800367302, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5106144168384212, + 0.7774792500405002, + 0.9166771685022536, + 0.1520171589804501, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.055349159562811945, + 0.08597516811763906, + 0.8318337876627744, + 0.8013703637699361, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4567924562463306, + 0.236971745638061, + 0.8091749658245033, + 0.3048528694425596, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8440505279254544, + 0.4565011814466987, + 0.8444823800442499, + 0.2665125487284098, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7942646379904031, + 0.3003394430517031, + 0.6060659316793948, + 0.27949558854026524, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22970861575053936, + 0.39464357988879895, + 0.6323358476347488, + 0.4900446166203536, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4262678398449171, + 0.5175669011162926, + 0.8074101200638781, + 0.46309351804139254, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.38526259936740015, + 0.9475081441242449, + 0.8026773601813839, + 0.06648433150135313, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.33054450113462375, + 0.16206263925129671, + 0.014661402248451694, + 0.665996111688725, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49445514708553606, + 0.1970970197451115, + 0.932784874403773, + 0.8203255119427988, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8368935109284026, + 0.16366406201739148, + 0.24295756148468395, + 0.5998796055465441, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8329503908566426, + 0.6233943209050588, + 0.5485288788944125, + 0.4936593563676702, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6353616329318031, + 0.1622857757417404, + 0.9106806866582022, + 0.6660538060703398, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7791820943318373, + 0.1706647725627367, + 0.6381242174232316, + 0.5563114541831535, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.34546090886259295, + 0.6183240367763264, + 0.35075893676319736, + 0.7802417875294433, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9353130445205744, + 0.26121944223135474, + 0.5879440609405164, + 0.14403352098963806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9478402093648892, + 0.1091812289297438, + 0.6835183818698114, + 0.5384651415019075, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3727099433058595, + 0.07027289982611373, + 0.24479654654168947, + 0.6340807114324638, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1089351825903393, + 0.4261205290528358, + 0.788035046472595, + 0.1669274692010293, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7891031034793119, + 0.30903585028517444, + 0.44496600737518255, + 0.17917072427208325, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8400527538813541, + 0.871910504463008, + 0.38573905907087824, + 0.10475763959813045, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6067697302192192, + 0.3664373646929884, + 0.8540231355708695, + 0.7361917987131372, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.04323780187994608, + 0.4571086189944381, + 0.5232490913448236, + 0.2034034806401901, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35033687234667865, + 0.4983408469170202, + 0.06750931495858226, + 0.15379622192977516, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.43307977818364696, + 0.1349960580625169, + 0.5823669234697881, + 0.5317808621747713, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22318251516739263, + 0.49734566581161355, + 0.011911822059888721, + 0.9508092497909877, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.904895935832329, + 0.9258062283860066, + 0.27726879265908166, + 0.5614587634330378, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4836523068545914, + 0.2276530565354099, + 0.4634085711120596, + 0.22179062615921818, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6018307008437298, + 0.40590069858881495, + 0.5126780415762812, + 0.6465924460988698, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2605000666954156, + 0.11171926959828349, + 0.6105601403074952, + 0.5534360444770465, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8109441285089057, + 0.587338599177806, + 0.20351854196971497, + 0.3698354775123749, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3381739805155217, + 0.8338993543749309, + 0.5934550069127651, + 0.3500320504998983, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.44079385400682924, + 0.6233180976082278, + 0.46480077933911323, + 0.34927091073628136, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4187070529833151, + 0.7423954257982627, + 0.38881437522154527, + 0.7714112671689478, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8179509335133315, + 0.3831078838732509, + 0.5079490533023165, + 0.03606601875029403, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.04250359963248451, + 0.12750451249951045, + 0.9971879993139143, + 0.05829183629354873, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3265538538455145, + 0.7778408294948845, + 0.044318813495665244, + 0.31281482741835165, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8394089943853243, + 0.9311574975953711, + 0.4945645162287725, + 0.3126359293664953, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7723066998764473, + 0.30410825487593607, + 0.899894640728656, + 0.6467261185695091, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6538998794010177, + 0.21853150677202715, + 0.5021502873311521, + 0.9431876774620246, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.477918638219775, + 0.633568735592873, + 0.5180803503595832, + 0.9533087354778479, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7385933023116928, + 0.5938150594439631, + 0.8502221479425777, + 0.13090966344672805, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15291414392453007, + 0.329710111032374, + 0.3905939714054877, + 0.45771800705289356, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6192100451790177, + 0.5126529214276997, + 0.8229108192573972, + 0.73202196858338, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4785584310461053, + 0.8823363509107982, + 0.3792618584525941, + 0.1591318008380328, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8922797495718489, + 0.41579350628761014, + 0.6755235112503503, + 0.9742082066104254, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.719115743910399, + 0.3112817152639481, + 0.7897030377170158, + 0.3120282966936694, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3362586043272986, + 0.33468051317300296, + 0.32925703998421263, + 0.5022858273437679, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8307640088142761, + 0.454233538180681, + 0.493034504851536, + 0.6675154014187731, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6084033780738657, + 0.39712687614397224, + 0.46986115672066886, + 0.50569748391499, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.011927554221092929, + 0.7665958947310153, + 0.2096478710154812, + 0.8348997760875302, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9346732345703666, + 0.7366480312487773, + 0.9447711246630602, + 0.13470562438257616, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7036874707129143, + 0.6739655054223922, + 0.8441899947297865, + 0.1048650889727486, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9958266923005692, + 0.22846263420411372, + 0.3116180316667533, + 0.8696545362199898, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.623634518307979, + 0.4372726660035817, + 0.7072767829114394, + 0.36685317131241946, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6113192123495405, + 0.5403136926716755, + 0.7969503412555408, + 0.8976391022871576, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6904457546606565, + 0.256293407218113, + 0.12407625950806878, + 0.8932902373278819, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12956469654646197, + 0.889619024273833, + 0.009357463204375138, + 0.2117754631989739, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8874412877293805, + 0.27088254423859315, + 0.46310127400524936, + 0.31001925997527413, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6263861695214231, + 0.8998318442583092, + 0.5619257152280644, + 0.09296453039644181, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5991036267324175, + 0.5085470750797965, + 0.49798421993443254, + 0.4492691325524484, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19410472899785058, + 0.3865163021583399, + 0.49187965812508505, + 0.5724473984180373, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_ZZ, {2, 2, 17, 4}, {2, 2, 2, 1, 16, 16}, DT_FLOAT}; + TransResult result; + FormatTransferFractalZz transfer; + EXPECT_EQ(TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 4); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); + + TransArgs args2{ + reinterpret_cast(ret), FORMAT_FRACTAL_ZZ, FORMAT_NHWC, {2, 2, 2, 1, 16, 16}, {2, 2, 17, 4}, DT_FLOAT}; + TransResult result2; + FormatTransferFractalZzND transfer2; + EXPECT_EQ(TransFormat(args2, result2), SUCCESS); + EXPECT_EQ(result2.length, sizeof(data) / sizeof(data[0]) * 4); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result2.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferNdFractZz, invalid_src_shape) { + uint16_t data[1 * 4 * 4 * 1] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_ZZ, {1, 4, 4}, {1, 1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalZz transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNdFractZz, invalid_src_data_type) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + TransArgs args{reinterpret_cast(data), + FORMAT_NHWC, + FORMAT_FRACTAL_ZZ, + { + 1, + 1, + 4, + 4, + }, + {1, 1, 1, 16, 16}, + DT_UNDEFINED}; + TransResult result; + FormatTransferFractalZz transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNdFractZz, invalid_src_format) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + TransArgs args{reinterpret_cast(data), + FORMAT_HWCN, + FORMAT_FRACTAL_ZZ, + {1, 1, 4, 4}, + {1, 1, 1, 1, 16, 16}, + DT_FLOAT16}; + TransResult result; + FormatTransferFractalZz transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + PARAM_INVALID); + EXPECT_EQ(TransFormat(args, result), UNSUPPORTED); +} + +TEST_F(UtestFormatTransferNdFractZz, invalid_dst_shape) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_ZZ, {1, 1, 4, 4}, {1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalZz transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + EXPECT_EQ(transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape), + SUCCESS); +} + +TEST_F(UtestFormatTransferNdFractZz, invalid_dst_shape2) { + uint16_t data[1 * 1 * 1 * 1 * 16 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_ZZ, FORMAT_NHWC, {1, 1, 1, 1, 16, 16}, {1, 4, 4}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalZzND transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNdFractZz, invalid_src_data_type2) { + uint16_t data[1 * 1 * 1 * 16 * 16] = {0}; + TransArgs args{reinterpret_cast(data), + FORMAT_FRACTAL_ZZ, + FORMAT_NHWC, + {1, 1, 1, 16, 16}, + { + 1, + 1, + 4, + 4, + }, + DT_UNDEFINED}; + TransResult result; + FormatTransferFractalZzND transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNdFractZz, invalid_dst_format2) { + uint16_t data[1 * 1 * 1 * 1 * 16 * 16] = {0}; + TransArgs args{reinterpret_cast(data), + FORMAT_FRACTAL_ZZ, + FORMAT_HWCN, + {1, 1, 1, 1, 16, 16}, + {1, 1, 4, 4}, + DT_FLOAT16}; + TransResult result; + FormatTransferFractalZzND transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + EXPECT_EQ(TransFormat(args, result), UNSUPPORTED); +} + +TEST_F(UtestFormatTransferNdFractZz, invalid_src_shape2) { + uint16_t data[1 * 1 * 1 * 1 * 16 * 16] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_ZZ, FORMAT_NHWC, {1, 1, 16, 16}, {1, 1, 4, 4}, DT_FLOAT16}; + TransResult result; + FormatTransferFractalZzND transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNdFractZz, invalid_src_dst_shape_relation) { + uint16_t data[1 * 1 * 2 * 1 * 16 * 16] = {0}; + TransArgs args{reinterpret_cast(data), + FORMAT_FRACTAL_ZZ, + FORMAT_NHWC, + {1, 1, 2, 1, 16, 16}, + {1, 1, 15, 17}, + DT_FLOAT16}; + TransResult result; + FormatTransferFractalZzND transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_fracz_hwcn_unittest.cc b/tests/ut/ge/common/format_transfer_fracz_hwcn_unittest.cc new file mode 100644 index 00000000..bc203ff3 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_fracz_hwcn_unittest.cc @@ -0,0 +1,6898 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_fracz_hwcn.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/fp16_t.h" + +namespace ge { +namespace formats { +class UtestFormatTransferFracZHwcn : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_invalid_data_type_invalid_data_type) { + uint8_t data[16 * 1 * 16 * 16] = {1}; + + TransArgs args{data, FORMAT_FRACTAL_Z, FORMAT_HWCN, {16, 1, 32, 16}, {4, 4, 1, 1}, DT_UNDEFINED}; + TransResult result; + + FormatTransferFracZHwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_invalid_src_format_reserved) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZHwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_RESERVED, FORMAT_HWCN, {16, 1, 16, 16}, {4, 4, 1, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_invalid_dst_format_reserved) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZHwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_RESERVED, {16, 1, 16, 16}, {4, 4, 1, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_invalid_src_shape) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZHwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_HWCN, {16, 1, 1, 16, 16}, {4, 4, 1, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_invalid_src_shape2) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZHwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_HWCN, {16, -1, 16, 16}, {4, 4, 1, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_invalid_dst_shape) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZHwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_HWCN, {16, 1, 16, 16}, {4, 4, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_invalid_dst_shape2) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZHwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_HWCN, {16, 1, 16, 16}, {4, 4, -1, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_invalid_src_dst_shape_relation1) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZHwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_HWCN, {16, 1, 16, 16}, {4, 4, 17, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_invalid_src_dst_shape_relation2) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZHwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_HWCN, {16, 1, 16, 16}, {4, 4, 1, 17}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_fp16_success_lt_cube) { + uint16_t data_4d[1 * 1 * 16 * 16] = { + 14822, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[1 * 1 * 1 * 1] = { + 14822, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_HWCN, {1, 1, 16, 16}, {1, 1, 1, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferFracZHwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_fp16_success_eq_cube) { + uint16_t data_4d[4 * 1 * 16 * 16] = { + 9254, 9808, 12314, 14964, 15320, 11958, 14734, 12285, 12664, 14762, 15338, 14864, 14989, 15042, 13537, 12967, + 14370, 11563, 14686, 14765, 6146, 14641, 14825, 13758, 14427, 14774, 14510, 14906, 14717, 12654, 11194, 13658, + 15115, 14700, 12754, 11263, 14550, 14609, 13676, 15286, 13811, 15010, 13137, 15086, 14176, 14242, 13476, 12807, + 12974, 14927, 14992, 15001, 12314, 15348, 13879, 15127, 14728, 11712, 15311, 13665, 9579, 14996, 12895, 15249, + 11813, 14360, 14957, 14906, 15357, 11777, 13499, 12547, 7477, 14713, 12719, 15193, 11497, 11936, 14421, 14014, + 15213, 14100, 10567, 14414, 9380, 13397, 14428, 11044, 14904, 12958, 15088, 15188, 14994, 15031, 14115, 14352, + 8484, 14791, 11353, 14359, 15354, 15246, 14423, 9961, 11284, 14085, 14643, 15183, 13439, 13375, 13252, 12470, + 14426, 14827, 14663, 13982, 15126, 15346, 14727, 12647, 14815, 14321, 10971, 14038, 14902, 12415, 14357, 13571, + 14689, 15065, 14794, 11504, 14698, 14253, 13380, 12805, 13797, 14627, 14017, 10845, 14582, 14869, 13567, 15265, + 14107, 13407, 13917, 15226, 14034, 15343, 15258, 14178, 14593, 13625, 13918, 13515, 14207, 15084, 15178, 13027, + 13477, 9288, 12640, 14660, 12607, 15270, 13900, 15156, 14962, 14434, 12310, 14439, 14073, 14692, 15213, 15145, + 12302, 15078, 10682, 8360, 15256, 14801, 14936, 14708, 14680, 14052, 13577, 14780, 10408, 15145, 12419, 15265, + 14900, 14612, 13750, 14428, 14625, 13533, 13654, 11131, 14362, 14571, 12116, 12137, 15058, 12849, 11897, 13640, + 15224, 15023, 15289, 4884, 13302, 14033, 14977, 14743, 14785, 14615, 13925, 14471, 14574, 12883, 15100, 13668, + 11702, 11637, 11013, 14757, 15046, 11736, 14669, 14371, 14720, 13893, 11595, 15109, 12415, 14408, 13321, 12791, + 12817, 12533, 14835, 14670, 14095, 13019, 15037, 15291, 13539, 14963, 13055, 13072, 12319, 15305, 15258, 14156, + 14669, 14412, 12449, 14693, 14592, 14002, 14219, 10475, 13567, 15006, 14249, 15051, 14998, 13900, 14858, 10314, + 15301, 15210, 14528, 14832, 13871, 14029, 14739, 14742, 13017, 15344, 13962, 14374, 15097, 12718, 13726, 11288, + 10655, 14873, 14566, 12878, 14520, 14819, 15115, 13500, 11464, 14684, 15024, 14527, 13446, 14934, 13954, 11160, + 13498, 14571, 13377, 14247, 14691, 15341, 12222, 13146, 14234, 14961, 14869, 10908, 13639, 14799, 12839, 14969, + 15211, 15082, 14818, 13295, 14892, 13585, 10640, 14044, 14139, 10387, 13315, 14363, 13863, 13415, 14326, 15030, + 15280, 15217, 15172, 13564, 14291, 10036, 15344, 15274, 12265, 11798, 13584, 15075, 13659, 15116, 14881, 12500, + 14627, 9877, 13400, 15020, 11961, 14856, 14255, 6203, 13984, 13572, 14403, 15354, 13728, 14874, 14849, 14694, + 14462, 15105, 14228, 9727, 13873, 13910, 13577, 14152, 11295, 14572, 15155, 12656, 13703, 14807, 14725, 14907, + 14322, 14482, 15112, 8303, 14163, 13970, 14087, 14276, 14336, 14557, 14255, 14640, 14966, 14426, 13329, 13891, + 13692, 10423, 14630, 12685, 13244, 14359, 12750, 14677, 15295, 14243, 14422, 14178, 14623, 14117, 13387, 14649, + 13853, 14885, 11291, 15295, 14399, 15047, 15338, 11707, 13937, 13962, 12573, 14845, 14410, 13935, 12701, 14474, + 14605, 13172, 13965, 11917, 13729, 13601, 15128, 14986, 14227, 14603, 13596, 13663, 14864, 13336, 15341, 14261, + 14671, 15133, 13495, 14387, 15308, 14201, 14107, 14193, 9416, 15009, 14942, 14759, 15218, 15172, 14008, 13605, + 14961, 13211, 12617, 14472, 14510, 13968, 15230, 14413, 15254, 14829, 14740, 15314, 13192, 15321, 13375, 15287, + 14533, 14546, 14838, 15284, 14978, 14418, 14749, 15285, 13631, 14637, 15233, 14583, 15358, 14694, 15075, 14006, + 15042, 13341, 14791, 14404, 14401, 14543, 14609, 13877, 14581, 13513, 13959, 13656, 13559, 15086, 13102, 12707, + 12366, 13106, 12647, 14381, 15003, 14587, 14981, 10618, 14570, 12555, 13656, 14070, 15289, 3801, 13908, 15052, + 15227, 14696, 14949, 14292, 14339, 9995, 14526, 15181, 15147, 13843, 13520, 11276, 14455, 13650, 14076, 13107, + 14370, 13651, 15211, 15172, 11747, 14520, 13497, 12307, 9476, 11792, 15353, 14573, 14342, 14252, 12604, 15326, + 13828, 13654, 13891, 15187, 14739, 13578, 12493, 14816, 14539, 13780, 14409, 14207, 14351, 14308, 14508, 13241, + 14390, 7997, 14902, 10892, 15220, 14772, 11973, 15081, 11390, 14957, 14647, 14881, 8089, 15345, 14546, 13712, + 14139, 15087, 13870, 14943, 13450, 14439, 13201, 14239, 11054, 14928, 14443, 13789, 12925, 15312, 13915, 14665, + 14976, 13816, 15015, 15091, 14645, 12833, 15310, 14556, 12385, 7916, 13730, 15212, 14382, 14437, 14701, 15256, + 13204, 13603, 10229, 11798, 14925, 12702, 14966, 15174, 15236, 13170, 14904, 12368, 15183, 14933, 14574, 12521, + 14676, 14315, 15228, 13266, 13811, 13409, 11744, 15064, 13472, 13606, 14669, 15268, 12872, 13914, 13782, 14895, + 12858, 12754, 14330, 15122, 14658, 14773, 15328, 10775, 12319, 13474, 14203, 13552, 14323, 14940, 14983, 14407, + 13769, 15332, 15360, 14662, 12716, 14630, 14712, 14532, 15016, 13804, 13112, 13650, 15201, 14993, 13636, 11165, + 14651, 14671, 13865, 14527, 14326, 13755, 13399, 14834, 13899, 13681, 11506, 13406, 14294, 13135, 9807, 15113, + 13951, 14012, 11954, 14379, 14730, 12604, 12516, 14637, 13905, 14203, 14158, 14735, 11521, 14769, 10761, 14158, + 14647, 14783, 14551, 14338, 12664, 15078, 13049, 14783, 13194, 15252, 14988, 14923, 14395, 12582, 14538, 10491, + 14843, 13609, 12710, 13806, 14358, 14794, 14908, 14161, 14885, 13527, 14638, 15146, 13358, 11445, 14039, 14754, + 9236, 15172, 14838, 12705, 14387, 15069, 14638, 12133, 12560, 13382, 14616, 10303, 14593, 13415, 14266, 14986, + 12917, 11836, 14865, 11406, 15353, 12771, 12955, 14673, 12633, 13525, 11425, 13112, 13516, 13232, 11946, 14345, + 13746, 14009, 13385, 14838, 13991, 15258, 15046, 10990, 9407, 14763, 14637, 12530, 14804, 15341, 13765, 12177, + 14545, 14983, 12315, 14381, 13688, 14967, 13539, 15264, 13341, 13931, 15204, 14455, 14590, 14433, 13293, 13503, + 15002, 13896, 14673, 15020, 11256, 15130, 14911, 13757, 14493, 14969, 14069, 6585, 13131, 12946, 15173, 14687, + 7536, 14171, 15169, 11482, 14079, 14833, 13446, 15348, 13965, 13779, 14601, 12126, 14277, 14515, 15060, 14600, + 15173, 14799, 15168, 14752, 13523, 14409, 14369, 12535, 12290, 15355, 14284, 12924, 15230, 13471, 13711, 13050, + 14104, 14792, 15208, 11858, 13929, 14534, 13852, 15352, 14469, 15293, 14318, 14362, 12273, 14811, 13223, 12908, + 15067, 5531, 13562, 14874, 14376, 13028, 15073, 13512, 14347, 13293, 15218, 12350, 14174, 10169, 13333, 11814, + 15152, 14020, 14447, 14753, 15349, 14772, 15220, 14235, 10795, 14596, 14593, 14929, 14092, 15246, 15114, 14686, + 15140, 14848, 14495, 13230, 14804, 13847, 15132, 13467, 12491, 13701, 13848, 14682, 7191, 14771, 14354, 14841, + 13645, 12889, 12196, 14052, 14614, 12446, 13295, 13004, 13422, 14743, 12386, 11764, 15334, 13080, 14883, 13450, + 13390, 12353, 14936, 11279, 15089, 9586, 12649, 14718, 12961, 14114, 14383, 13558, 13366, 14745, 13930, 15214, + 15186, 14259, 14519, 11904, 10138, 14068, 14833, 12266, 11271, 12727, 14984, 14482, 13063, 15139, 14367, 15105, + 14829, 14868, 14750, 11535, 13272, 13556, 15290, 15147, 12188, 15356, 15023, 13514, 13558, 11827, 14330, 14562, + 12742, 14210, 14735, 8360, 14940, 11634, 14529, 12089, 7702, 13384, 14789, 14909, 11698, 13389, 13429, 14320, + 13259, 14533, 11509, 14308, 7516, 14521, 14928, 15212, 14638, 13400, 9815, 14251, 13370, 14737, 14241, 13099, + }; + uint16_t data[2 * 2 * 16 * 16] = { + 9254, 14370, 15115, 12974, 11813, 15213, 8484, 14426, 14689, 14107, 13477, 12302, 14900, 15224, 11702, 12817, + 9808, 11563, 14700, 14927, 14360, 14100, 14791, 14827, 15065, 13407, 9288, 15078, 14612, 15023, 11637, 12533, + 12314, 14686, 12754, 14992, 14957, 10567, 11353, 14663, 14794, 13917, 12640, 10682, 13750, 15289, 11013, 14835, + 14964, 14765, 11263, 15001, 14906, 14414, 14359, 13982, 11504, 15226, 14660, 8360, 14428, 4884, 14757, 14670, + 15320, 6146, 14550, 12314, 15357, 9380, 15354, 15126, 14698, 14034, 12607, 15256, 14625, 13302, 15046, 14095, + 11958, 14641, 14609, 15348, 11777, 13397, 15246, 15346, 14253, 15343, 15270, 14801, 13533, 14033, 11736, 13019, + 14734, 14825, 13676, 13879, 13499, 14428, 14423, 14727, 13380, 15258, 13900, 14936, 13654, 14977, 14669, 15037, + 12285, 13758, 15286, 15127, 12547, 11044, 9961, 12647, 12805, 14178, 15156, 14708, 11131, 14743, 14371, 15291, + 12664, 14427, 13811, 14728, 7477, 14904, 11284, 14815, 13797, 14593, 14962, 14680, 14362, 14785, 14720, 13539, + 14762, 14774, 15010, 11712, 14713, 12958, 14085, 14321, 14627, 13625, 14434, 14052, 14571, 14615, 13893, 14963, + 15338, 14510, 13137, 15311, 12719, 15088, 14643, 10971, 14017, 13918, 12310, 13577, 12116, 13925, 11595, 13055, + 14864, 14906, 15086, 13665, 15193, 15188, 15183, 14038, 10845, 13515, 14439, 14780, 12137, 14471, 15109, 13072, + 14989, 14717, 14176, 9579, 11497, 14994, 13439, 14902, 14582, 14207, 14073, 10408, 15058, 14574, 12415, 12319, + 15042, 12654, 14242, 14996, 11936, 15031, 13375, 12415, 14869, 15084, 14692, 15145, 12849, 12883, 14408, 15305, + 13537, 11194, 13476, 12895, 14421, 14115, 13252, 14357, 13567, 15178, 15213, 12419, 11897, 15100, 13321, 15258, + 12967, 13658, 12807, 15249, 14014, 14352, 12470, 13571, 15265, 13027, 15145, 15265, 13640, 13668, 12791, 14156, + 14669, 15301, 10655, 13498, 15211, 15280, 14627, 14462, 14322, 13692, 13853, 14605, 14671, 14961, 14533, 15042, + 14412, 15210, 14873, 14571, 15082, 15217, 9877, 15105, 14482, 10423, 14885, 13172, 15133, 13211, 14546, 13341, + 12449, 14528, 14566, 13377, 14818, 15172, 13400, 14228, 15112, 14630, 11291, 13965, 13495, 12617, 14838, 14791, + 14693, 14832, 12878, 14247, 13295, 13564, 15020, 9727, 8303, 12685, 15295, 11917, 14387, 14472, 15284, 14404, + 14592, 13871, 14520, 14691, 14892, 14291, 11961, 13873, 14163, 13244, 14399, 13729, 15308, 14510, 14978, 14401, + 14002, 14029, 14819, 15341, 13585, 10036, 14856, 13910, 13970, 14359, 15047, 13601, 14201, 13968, 14418, 14543, + 14219, 14739, 15115, 12222, 10640, 15344, 14255, 13577, 14087, 12750, 15338, 15128, 14107, 15230, 14749, 14609, + 10475, 14742, 13500, 13146, 14044, 15274, 6203, 14152, 14276, 14677, 11707, 14986, 14193, 14413, 15285, 13877, + 13567, 13017, 11464, 14234, 14139, 12265, 13984, 11295, 14336, 15295, 13937, 14227, 9416, 15254, 13631, 14581, + 15006, 15344, 14684, 14961, 10387, 11798, 13572, 14572, 14557, 14243, 13962, 14603, 15009, 14829, 14637, 13513, + 14249, 13962, 15024, 14869, 13315, 13584, 14403, 15155, 14255, 14422, 12573, 13596, 14942, 14740, 15233, 13959, + 15051, 14374, 14527, 10908, 14363, 15075, 15354, 12656, 14640, 14178, 14845, 13663, 14759, 15314, 14583, 13656, + 14998, 15097, 13446, 13639, 13863, 13659, 13728, 13703, 14966, 14623, 14410, 14864, 15218, 13192, 15358, 13559, + 13900, 12718, 14934, 14799, 13415, 15116, 14874, 14807, 14426, 14117, 13935, 13336, 15172, 15321, 14694, 15086, + 14858, 13726, 13954, 12839, 14326, 14881, 14849, 14725, 13329, 13387, 12701, 15341, 14008, 13375, 15075, 13102, + 10314, 11288, 11160, 14969, 15030, 12500, 14694, 14907, 13891, 14649, 14474, 14261, 13605, 15287, 14006, 12707, + 12366, 15227, 14370, 13828, 14390, 14139, 14976, 13204, 14676, 12858, 13769, 14651, 13951, 14647, 14843, 9236, + 13106, 14696, 13651, 13654, 7997, 15087, 13816, 13603, 14315, 12754, 15332, 14671, 14012, 14783, 13609, 15172, + 12647, 14949, 15211, 13891, 14902, 13870, 15015, 10229, 15228, 14330, 15360, 13865, 11954, 14551, 12710, 14838, + 14381, 14292, 15172, 15187, 10892, 14943, 15091, 11798, 13266, 15122, 14662, 14527, 14379, 14338, 13806, 12705, + 15003, 14339, 11747, 14739, 15220, 13450, 14645, 14925, 13811, 14658, 12716, 14326, 14730, 12664, 14358, 14387, + 14587, 9995, 14520, 13578, 14772, 14439, 12833, 12702, 13409, 14773, 14630, 13755, 12604, 15078, 14794, 15069, + 14981, 14526, 13497, 12493, 11973, 13201, 15310, 14966, 11744, 15328, 14712, 13399, 12516, 13049, 14908, 14638, + 10618, 15181, 12307, 14816, 15081, 14239, 14556, 15174, 15064, 10775, 14532, 14834, 14637, 14783, 14161, 12133, + 14570, 15147, 9476, 14539, 11390, 11054, 12385, 15236, 13472, 12319, 15016, 13899, 13905, 13194, 14885, 12560, + 12555, 13843, 11792, 13780, 14957, 14928, 7916, 13170, 13606, 13474, 13804, 13681, 14203, 15252, 13527, 13382, + 13656, 13520, 15353, 14409, 14647, 14443, 13730, 14904, 14669, 14203, 13112, 11506, 14158, 14988, 14638, 14616, + 14070, 11276, 14573, 14207, 14881, 13789, 15212, 12368, 15268, 13552, 13650, 13406, 14735, 14923, 15146, 10303, + 15289, 14455, 14342, 14351, 8089, 12925, 14382, 15183, 12872, 14323, 15201, 14294, 11521, 14395, 13358, 14593, + 3801, 13650, 14252, 14308, 15345, 15312, 14437, 14933, 13914, 14940, 14993, 13135, 14769, 12582, 11445, 13415, + 13908, 14076, 12604, 14508, 14546, 13915, 14701, 14574, 13782, 14983, 13636, 9807, 10761, 14538, 14039, 14266, + 15052, 13107, 15326, 13241, 13712, 14665, 15256, 12521, 14895, 14407, 11165, 15113, 14158, 10491, 14754, 14986, + 12917, 13746, 14545, 15002, 7536, 15173, 14104, 15067, 15152, 15140, 13645, 13390, 15186, 14829, 12742, 13259, + 11836, 14009, 14983, 13896, 14171, 14799, 14792, 5531, 14020, 14848, 12889, 12353, 14259, 14868, 14210, 14533, + 14865, 13385, 12315, 14673, 15169, 15168, 15208, 13562, 14447, 14495, 12196, 14936, 14519, 14750, 14735, 11509, + 11406, 14838, 14381, 15020, 11482, 14752, 11858, 14874, 14753, 13230, 14052, 11279, 11904, 11535, 8360, 14308, + 15353, 13991, 13688, 11256, 14079, 13523, 13929, 14376, 15349, 14804, 14614, 15089, 10138, 13272, 14940, 7516, + 12771, 15258, 14967, 15130, 14833, 14409, 14534, 13028, 14772, 13847, 12446, 9586, 14068, 13556, 11634, 14521, + 12955, 15046, 13539, 14911, 13446, 14369, 13852, 15073, 15220, 15132, 13295, 12649, 14833, 15290, 14529, 14928, + 14673, 10990, 15264, 13757, 15348, 12535, 15352, 13512, 14235, 13467, 13004, 14718, 12266, 15147, 12089, 15212, + 12633, 9407, 13341, 14493, 13965, 12290, 14469, 14347, 10795, 12491, 13422, 12961, 11271, 12188, 7702, 14638, + 13525, 14763, 13931, 14969, 13779, 15355, 15293, 13293, 14596, 13701, 14743, 14114, 12727, 15356, 13384, 13400, + 11425, 14637, 15204, 14069, 14601, 14284, 14318, 15218, 14593, 13848, 12386, 14383, 14984, 15023, 14789, 9815, + 13112, 12530, 14455, 6585, 12126, 12924, 14362, 12350, 14929, 14682, 11764, 13558, 14482, 13514, 14909, 14251, + 13516, 14804, 14590, 13131, 14277, 15230, 12273, 14174, 14092, 7191, 15334, 13366, 13063, 13558, 11698, 13370, + 13232, 15341, 14433, 12946, 14515, 13471, 14811, 10169, 15246, 14771, 13080, 14745, 15139, 11827, 13389, 14737, + 11946, 13765, 13293, 15173, 15060, 13711, 13223, 13333, 15114, 14354, 14883, 13930, 14367, 14330, 13429, 14241, + 14345, 12177, 13503, 14687, 14600, 13050, 12908, 11814, 14686, 14841, 13450, 15214, 15105, 14562, 14320, 13099, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_HWCN, {4, 1, 16, 16}, {2, 2, 16, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferFracZHwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, UNSUPPORTED); +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_fp16_success_gt_cube) { + uint16_t data_4d[8 * 2 * 16 * 16] = { + 14689, 14783, 12157, 13663, 15025, 14257, 14996, 13713, 13589, 13602, 14197, 14027, 15094, 14944, 11478, 13782, + 15209, 13106, 14608, 14397, 14859, 11337, 10505, 13485, 14446, 12817, 15224, 15077, 12943, 15008, 15282, 14380, + 13504, 12994, 9789, 15178, 13351, 13583, 10664, 14524, 13859, 11265, 11401, 12523, 13353, 14540, 11907, 15192, + 14693, 13383, 13882, 15187, 15256, 15357, 14759, 13647, 14364, 14377, 13344, 13686, 14571, 13798, 13158, 12056, + 11969, 14193, 12967, 13634, 10244, 15185, 13743, 12621, 14423, 15292, 15325, 14784, 14041, 15272, 13629, 15252, + 13435, 15113, 12388, 13524, 14783, 13525, 15191, 13639, 9632, 11262, 12830, 13326, 13442, 11949, 14165, 13539, + 14972, 15165, 11833, 14778, 13295, 14617, 14238, 14715, 13940, 13728, 12434, 14304, 13130, 13798, 11861, 14667, + 14765, 13988, 14243, 14286, 14973, 15329, 14026, 12162, 12730, 14433, 14102, 14444, 15272, 14870, 14910, 10260, + 11779, 14083, 13641, 12013, 15349, 12572, 14445, 14151, 14235, 15180, 15341, 13953, 14738, 14943, 13985, 14382, + 12363, 7861, 14778, 15304, 13954, 14467, 12597, 14655, 11317, 14878, 13390, 13995, 12438, 15345, 13497, 13235, + 12079, 14488, 15295, 11342, 13032, 5182, 14545, 14817, 12049, 14873, 13940, 13374, 13842, 14016, 13842, 14368, + 15228, 14441, 15076, 13998, 13488, 10489, 15013, 15289, 10887, 14646, 15199, 14682, 10731, 15078, 14992, 13486, + 13760, 6560, 15112, 15286, 14412, 14509, 12793, 14910, 12597, 15171, 14770, 13926, 13911, 11273, 14547, 11730, + 14600, 13716, 15025, 14976, 13176, 11222, 13081, 12611, 14272, 15194, 13794, 15017, 15074, 15337, 13784, 13402, + 14718, 13102, 13839, 11883, 14794, 15042, 12703, 10509, 13033, 14516, 13079, 14549, 12656, 15046, 14507, 6337, + 15029, 11631, 15277, 15308, 12406, 11838, 14015, 14170, 14014, 15210, 12628, 12923, 15041, 15035, 14362, 13705, + 11145, 11327, 13769, 15254, 14763, 11372, 14844, 11086, 15090, 15285, 12687, 13542, 10442, 14508, 12400, 14874, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14538, 14282, 15116, 14874, 14001, 14122, 9771, 14751, 11277, 12285, 13154, 14633, 12130, 15264, 14130, 14016, + 15221, 15209, 14914, 13639, 13638, 15033, 11526, 13354, 15082, 14432, 10349, 13521, 14596, 14090, 14905, 13028, + 14625, 13631, 14854, 13574, 11604, 13841, 14481, 14842, 14987, 13801, 13996, 14229, 14667, 14102, 11421, 13424, + 14201, 12749, 12382, 15253, 13907, 13948, 15225, 13080, 13436, 13802, 14655, 14805, 13169, 15236, 12947, 14168, + 14472, 15054, 15065, 15057, 10218, 15134, 14810, 13595, 14186, 14482, 10693, 13913, 14020, 14844, 14521, 13427, + 14875, 13396, 14267, 14489, 14886, 14451, 14598, 10904, 13256, 14423, 13501, 15008, 12546, 13590, 14819, 14667, + 15010, 14873, 15295, 15293, 14217, 11956, 13442, 14544, 13505, 12319, 15287, 13706, 15263, 14513, 12802, 14306, + 12724, 12975, 15146, 13507, 14926, 13717, 15049, 7635, 13514, 14873, 15237, 14785, 12525, 12462, 13857, 15245, + 14795, 13545, 15239, 14158, 13571, 13996, 14229, 14996, 14550, 14406, 13410, 14201, 11326, 13706, 13960, 12486, + 14559, 13381, 12735, 13344, 15057, 14893, 13489, 12327, 9239, 15194, 15228, 14625, 14694, 15064, 14735, 14568, + 15081, 12371, 14795, 15175, 14914, 13614, 14816, 13632, 15119, 13492, 14925, 14152, 15001, 14911, 14789, 13469, + 13826, 15332, 12677, 13407, 13735, 12924, 12928, 14678, 14557, 14392, 15298, 14761, 14540, 14885, 15217, 15015, + 14449, 13850, 14327, 15180, 14851, 15323, 12525, 13690, 13208, 14954, 14369, 15066, 7842, 15280, 11926, 12176, + 4578, 14266, 14127, 15259, 14717, 15345, 15162, 14581, 15042, 14455, 14928, 14411, 13614, 14349, 14837, 13393, + 15248, 10384, 14728, 12498, 15134, 14855, 15086, 12432, 13813, 14639, 13357, 13350, 15128, 13675, 14953, 14374, + 14290, 15037, 14834, 14745, 8915, 8997, 14352, 14125, 15277, 13936, 14789, 13911, 9817, 14990, 15213, 15198, + 15173, 14793, 14624, 13795, 10258, 9738, 15269, 14056, 15036, 15038, 14257, 10306, 14543, 14420, 14739, 14723, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14404, 13903, 14861, 13767, 11706, 15073, 15332, 14884, 14041, 15089, 15264, 14604, 14775, 14004, 15040, 13680, + 15261, 15099, 14640, 11929, 13555, 15139, 12401, 14243, 11341, 15187, 15331, 13351, 13111, 14583, 15129, 15115, + 11846, 14398, 13483, 14420, 14770, 14211, 15053, 14135, 11703, 13373, 14610, 15092, 11562, 12700, 13820, 11822, + 14517, 13562, 14887, 14795, 13151, 13081, 13907, 15014, 14060, 14755, 15010, 14339, 14371, 15059, 15109, 14925, + 14277, 14836, 14077, 14644, 12741, 14994, 10634, 14887, 14912, 14047, 10874, 9192, 14557, 13932, 14520, 12248, + 10424, 13853, 11735, 14058, 14823, 14588, 8527, 13403, 8761, 13375, 14661, 8118, 14331, 14452, 9840, 13718, + 14998, 11612, 12513, 14873, 12387, 15043, 14968, 14873, 13681, 13298, 12917, 14587, 13898, 14378, 14965, 14794, + 13690, 12386, 14033, 14343, 15341, 14476, 15180, 14510, 14966, 13153, 14611, 14061, 15105, 12933, 14852, 13862, + 14379, 13261, 12776, 15139, 15341, 12338, 12951, 14245, 15220, 8655, 15105, 14551, 14923, 12605, 10939, 15240, + 14535, 15082, 13937, 14263, 14662, 11335, 14557, 14627, 12721, 11821, 10410, 14869, 11905, 15191, 12705, 13991, + 15316, 8461, 14541, 12343, 14026, 15342, 13706, 13813, 12940, 14729, 14805, 14114, 11007, 14809, 14796, 14505, + 14675, 14338, 13781, 15123, 13634, 13817, 14474, 12563, 13962, 14231, 14555, 13776, 13072, 14976, 15025, 13660, + 12770, 14716, 13835, 14656, 11412, 15269, 14852, 15018, 14907, 12634, 15036, 13968, 11077, 14628, 15085, 14860, + 14019, 11328, 15097, 14033, 15162, 12699, 14425, 14724, 15087, 15340, 15208, 14539, 13360, 14611, 13062, 15042, + 14306, 14333, 14353, 11266, 8071, 14850, 13740, 14648, 15193, 15262, 13316, 15147, 14906, 13780, 11802, 13601, + 14999, 14572, 15234, 12484, 13979, 14896, 13604, 12848, 15065, 14590, 14671, 15296, 12796, 14158, 14762, 14108, + 14438, 14954, 15140, 13301, 15047, 14484, 13129, 13233, 12740, 12925, 14603, 12690, 15230, 14835, 15025, 15157, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15244, 13923, 14036, 12650, 13151, 14358, 14418, 14649, 13759, 13583, 14366, 14333, 15290, 14090, 14877, 11805, + 15347, 12687, 14713, 11262, 14466, 12561, 14591, 11287, 15218, 12923, 9547, 13503, 13685, 15149, 14722, 12766, + 15080, 11477, 15240, 11105, 15116, 13938, 15248, 15048, 13502, 12737, 14423, 15172, 12866, 15063, 14649, 13609, + 13151, 14400, 14454, 10564, 12829, 14741, 12475, 13188, 12623, 14714, 15109, 11311, 14329, 15027, 15352, 11766, + 14862, 14838, 13591, 14771, 15148, 15319, 14934, 14553, 15300, 14594, 14928, 13354, 14194, 14839, 9871, 13245, + 14799, 15262, 15183, 12100, 12445, 3125, 14645, 15287, 14414, 13828, 14910, 14781, 11418, 14876, 14889, 14296, + 14237, 11439, 14438, 7727, 14569, 12594, 14299, 14382, 12642, 13932, 14627, 15343, 12534, 14728, 14931, 14993, + 15025, 12525, 13760, 14675, 13718, 14682, 14901, 12546, 15227, 14080, 12703, 14410, 12979, 12689, 14361, 13620, + 14862, 12453, 14462, 14686, 11993, 10744, 14365, 15321, 14990, 15308, 14759, 14074, 14676, 15143, 15270, 14820, + 10859, 15074, 13371, 13141, 14729, 15150, 12441, 14380, 12705, 14449, 14719, 14925, 12762, 10963, 14801, 14072, + 14767, 15360, 13355, 13524, 15006, 14452, 14435, 6080, 14181, 7887, 13760, 14661, 14912, 15202, 14133, 14390, + 14445, 14668, 13224, 13840, 14590, 15323, 11181, 15010, 15049, 11869, 14596, 12759, 14821, 14796, 11383, 15129, + 14960, 14945, 15352, 14960, 14754, 12929, 13633, 15243, 11711, 15118, 13557, 8992, 12517, 12865, 15354, 14531, + 11003, 13886, 15324, 15341, 14936, 14741, 13349, 13420, 14600, 11986, 12700, 13413, 14859, 14490, 13428, 14467, + 13052, 15308, 13948, 13719, 11020, 14385, 13578, 8279, 13489, 14762, 7461, 12408, 13733, 12134, 13639, 14065, + 14345, 13501, 11785, 14848, 14317, 10619, 13642, 14874, 13103, 13443, 13494, 13743, 11965, 12209, 15274, 13803, + 14892, 14450, 13195, 11389, 14760, 15325, 13028, 14510, 14139, 14554, 14373, 14505, 12719, 14130, 12819, 13330, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15331, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15222, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14902, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13480, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14853, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13271, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13814, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13968, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8354, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14441, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15019, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14929, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14876, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15138, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15174, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13631, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14708, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15077, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14173, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8900, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14916, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12820, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14600, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13976, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15025, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10182, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14616, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13423, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15279, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14491, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15006, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15087, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14737, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14143, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13354, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9329, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14229, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13825, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14162, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15315, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15232, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9797, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10507, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13981, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14188, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14759, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13569, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12389, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11230, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14406, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14237, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14623, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10557, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14452, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12876, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14676, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15335, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13518, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14983, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14407, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14262, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12052, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12803, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[2 * 2 * 17 * 17] = { + 14689, 15209, 13504, 14693, 11969, 13435, 14972, 14765, 11779, 12363, 12079, 15228, 13760, 14600, 14718, 15029, + 11145, 14783, 13106, 12994, 13383, 14193, 15113, 15165, 13988, 14083, 7861, 14488, 14441, 6560, 13716, 13102, + 11631, 11327, 12157, 14608, 9789, 13882, 12967, 12388, 11833, 14243, 13641, 14778, 15295, 15076, 15112, 15025, + 13839, 15277, 13769, 13663, 14397, 15178, 15187, 13634, 13524, 14778, 14286, 12013, 15304, 11342, 13998, 15286, + 14976, 11883, 15308, 15254, 15025, 14859, 13351, 15256, 10244, 14783, 13295, 14973, 15349, 13954, 13032, 13488, + 14412, 13176, 14794, 12406, 14763, 14257, 11337, 13583, 15357, 15185, 13525, 14617, 15329, 12572, 14467, 5182, + 10489, 14509, 11222, 15042, 11838, 11372, 14996, 10505, 10664, 14759, 13743, 15191, 14238, 14026, 14445, 12597, + 14545, 15013, 12793, 13081, 12703, 14015, 14844, 13713, 13485, 14524, 13647, 12621, 13639, 14715, 12162, 14151, + 14655, 14817, 15289, 14910, 12611, 10509, 14170, 11086, 13589, 14446, 13859, 14364, 14423, 9632, 13940, 12730, + 14235, 11317, 12049, 10887, 12597, 14272, 13033, 14014, 15090, 13602, 12817, 11265, 14377, 15292, 11262, 13728, + 14433, 15180, 14878, 14873, 14646, 15171, 15194, 14516, 15210, 15285, 14197, 15224, 11401, 13344, 15325, 12830, + 12434, 14102, 15341, 13390, 13940, 15199, 14770, 13794, 13079, 12628, 12687, 14027, 15077, 12523, 13686, 14784, + 13326, 14304, 14444, 13953, 13995, 13374, 14682, 13926, 15017, 14549, 12923, 13542, 15094, 12943, 13353, 14571, + 14041, 13442, 13130, 15272, 14738, 12438, 13842, 10731, 13911, 15074, 12656, 15041, 10442, 14944, 15008, 14540, + 13798, 15272, 11949, 13798, 14870, 14943, 15345, 14016, 15078, 11273, 15337, 15046, 15035, 14508, 11478, 15282, + 11907, 13158, 13629, 14165, 11861, 14910, 13985, 13497, 13842, 14992, 14547, 13784, 14507, 14362, 12400, 13782, + 14380, 15192, 12056, 15252, 13539, 14667, 10260, 14382, 13235, 14368, 13486, 11730, 13402, 6337, 13705, 14874, + 15331, 15222, 14902, 13480, 14853, 13271, 13814, 13968, 8354, 14441, 15019, 14929, 14876, 15138, 15174, 13631, + 14708, 14538, 15221, 14625, 14201, 14472, 14875, 15010, 12724, 14795, 14559, 15081, 13826, 14449, 4578, 15248, + 14290, 15173, 14282, 15209, 13631, 12749, 15054, 13396, 14873, 12975, 13545, 13381, 12371, 15332, 13850, 14266, + 10384, 15037, 14793, 15116, 14914, 14854, 12382, 15065, 14267, 15295, 15146, 15239, 12735, 14795, 12677, 14327, + 14127, 14728, 14834, 14624, 14874, 13639, 13574, 15253, 15057, 14489, 15293, 13507, 14158, 13344, 15175, 13407, + 15180, 15259, 12498, 14745, 13795, 14001, 13638, 11604, 13907, 10218, 14886, 14217, 14926, 13571, 15057, 14914, + 13735, 14851, 14717, 15134, 8915, 10258, 14122, 15033, 13841, 13948, 15134, 14451, 11956, 13717, 13996, 14893, + 13614, 12924, 15323, 15345, 14855, 8997, 9738, 9771, 11526, 14481, 15225, 14810, 14598, 13442, 15049, 14229, + 13489, 14816, 12928, 12525, 15162, 15086, 14352, 15269, 14751, 13354, 14842, 13080, 13595, 10904, 14544, 7635, + 14996, 12327, 13632, 14678, 13690, 14581, 12432, 14125, 14056, 11277, 15082, 14987, 13436, 14186, 13256, 13505, + 13514, 14550, 9239, 15119, 14557, 13208, 15042, 13813, 15277, 15036, 12285, 14432, 13801, 13802, 14482, 14423, + 12319, 14873, 14406, 15194, 13492, 14392, 14954, 14455, 14639, 13936, 15038, 13154, 10349, 13996, 14655, 10693, + 13501, 15287, 15237, 13410, 15228, 14925, 15298, 14369, 14928, 13357, 14789, 14257, 14633, 13521, 14229, 14805, + 13913, 15008, 13706, 14785, 14201, 14625, 14152, 14761, 15066, 14411, 13350, 13911, 10306, 12130, 14596, 14667, + 13169, 14020, 12546, 15263, 12525, 11326, 14694, 15001, 14540, 7842, 13614, 15128, 9817, 14543, 15264, 14090, + 14102, 15236, 14844, 13590, 14513, 12462, 13706, 15064, 14911, 14885, 15280, 14349, 13675, 14990, 14420, 14130, + 14905, 11421, 12947, 14521, 14819, 12802, 13857, 13960, 14735, 14789, 15217, 11926, 14837, 14953, 15213, 14739, + 14016, 13028, 13424, 14168, 13427, 14667, 14306, 15245, 12486, 14568, 13469, 15015, 12176, 13393, 14374, 15198, + 14723, 15077, 14173, 8900, 14916, 12820, 14600, 13976, 15025, 10182, 14616, 13423, 15099, 15279, 14491, 15006, + 15087, 14737, 14404, 15261, 11846, 14517, 14277, 10424, 14998, 13690, 14379, 14535, 15316, 14675, 12770, 14019, + 14306, 14999, 14438, 13903, 15099, 14398, 13562, 14836, 13853, 11612, 12386, 13261, 15082, 8461, 14338, 14716, + 11328, 14333, 14572, 14954, 14861, 14640, 13483, 14887, 14077, 11735, 12513, 14033, 12776, 13937, 14541, 13781, + 13835, 15097, 14353, 15234, 15140, 13767, 11929, 14420, 14795, 14644, 14058, 14873, 14343, 15139, 14263, 12343, + 15123, 14656, 14033, 11266, 12484, 13301, 11706, 13555, 14770, 13151, 12741, 14823, 12387, 15341, 15341, 14662, + 14026, 13634, 11412, 15162, 8071, 13979, 15047, 15073, 15139, 14211, 13081, 14994, 14588, 15043, 14476, 12338, + 11335, 15342, 13817, 15269, 12699, 14850, 14896, 14484, 15332, 12401, 15053, 13907, 10634, 8527, 14968, 15180, + 12951, 14557, 13706, 14474, 14852, 14425, 13740, 13604, 13129, 14884, 14243, 14135, 15014, 14887, 13403, 14873, + 14510, 14245, 14627, 13813, 12563, 15018, 14724, 14648, 12848, 13233, 14041, 11341, 11703, 14060, 14912, 8761, + 13681, 14966, 15220, 12721, 12940, 13962, 14907, 15087, 15193, 15065, 12740, 15089, 15187, 13373, 14755, 14047, + 13375, 13298, 13153, 8655, 11821, 14729, 14231, 12634, 15340, 15262, 14590, 12925, 15264, 15331, 14610, 15010, + 10874, 14661, 12917, 14611, 15105, 10410, 14805, 14555, 15036, 15208, 13316, 14671, 14603, 14604, 13351, 15092, + 14339, 9192, 8118, 14587, 14061, 14551, 14869, 14114, 13776, 13968, 14539, 15147, 15296, 12690, 14775, 13111, + 11562, 14371, 14557, 14331, 13898, 15105, 14923, 11905, 11007, 13072, 11077, 13360, 14906, 12796, 15230, 14004, + 14583, 12700, 15059, 13932, 14452, 14378, 12933, 12605, 15191, 14809, 14976, 14628, 14611, 13780, 14158, 14835, + 15040, 15129, 13820, 15109, 14520, 9840, 14965, 14852, 10939, 12705, 14796, 15025, 15085, 13062, 11802, 14762, + 15025, 13680, 15115, 11822, 14925, 12248, 13718, 14794, 13862, 15240, 13991, 14505, 13660, 14860, 15042, 13601, + 14108, 15157, 14143, 13354, 9329, 14229, 13825, 14162, 15315, 15232, 15110, 9797, 10507, 15186, 13981, 14188, + 14759, 13569, 12389, 15244, 15347, 15080, 13151, 14862, 14799, 14237, 15025, 14862, 10859, 14767, 14445, 14960, + 11003, 13052, 14345, 14892, 13923, 12687, 11477, 14400, 14838, 15262, 11439, 12525, 12453, 15074, 15360, 14668, + 14945, 13886, 15308, 13501, 14450, 14036, 14713, 15240, 14454, 13591, 15183, 14438, 13760, 14462, 13371, 13355, + 13224, 15352, 15324, 13948, 11785, 13195, 12650, 11262, 11105, 10564, 14771, 12100, 7727, 14675, 14686, 13141, + 13524, 13840, 14960, 15341, 13719, 14848, 11389, 13151, 14466, 15116, 12829, 15148, 12445, 14569, 13718, 11993, + 14729, 15006, 14590, 14754, 14936, 11020, 14317, 14760, 14358, 12561, 13938, 14741, 15319, 3125, 12594, 14682, + 10744, 15150, 14452, 15323, 12929, 14741, 14385, 10619, 15325, 14418, 14591, 15248, 12475, 14934, 14645, 14299, + 14901, 14365, 12441, 14435, 11181, 13633, 13349, 13578, 13642, 13028, 14649, 11287, 15048, 13188, 14553, 15287, + 14382, 12546, 15321, 14380, 6080, 15010, 15243, 13420, 8279, 14874, 14510, 13759, 15218, 13502, 12623, 15300, + 14414, 12642, 15227, 14990, 12705, 14181, 15049, 11711, 14600, 13489, 13103, 14139, 13583, 12923, 12737, 14714, + 14594, 13828, 13932, 14080, 15308, 14449, 7887, 11869, 15118, 11986, 14762, 13443, 14554, 14366, 9547, 14423, + 15109, 14928, 14910, 14627, 12703, 14759, 14719, 13760, 14596, 13557, 12700, 7461, 13494, 14373, 14333, 13503, + 15172, 11311, 13354, 14781, 15343, 14410, 14074, 14925, 14661, 12759, 8992, 13413, 12408, 13743, 14505, 15290, + 13685, 12866, 14329, 14194, 11418, 12534, 12979, 14676, 12762, 14912, 14821, 12517, 14859, 13733, 11965, 12719, + 14090, 15149, 15063, 15027, 14839, 14876, 14728, 12689, 15143, 10963, 15202, 14796, 12865, 14490, 12134, 12209, + 14130, 14877, 14722, 14649, 15352, 9871, 14889, 14931, 14361, 15270, 14801, 14133, 11383, 15354, 13428, 13639, + 15274, 12819, 11805, 12766, 13609, 11766, 13245, 14296, 14993, 13620, 14820, 14072, 14390, 15129, 14531, 14467, + 14065, 13803, 13330, 11230, 14406, 14237, 14623, 15209, 10557, 13728, 14452, 12876, 14676, 15335, 13518, 14983, + 14407, 14262, 12052, 12803, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_HWCN, {8, 2, 16, 16}, {2, 2, 17, 17}, DT_FLOAT16}; + TransResult result; + + FormatTransferFracZHwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_fp32_success_lt_cube) { + float data_4d[1 * 1 * 16 * 16] = { + 0.7735707016927454, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + float data[1 * 1 * 1 * 1] = { + 0.7735707016927454, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_HWCN, {1, 1, 16, 16}, {1, 1, 1, 1}, DT_FLOAT}; + TransResult result; + + FormatTransferFracZHwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_fp32_success_eq_cube) { + float data_4d[4 * 1 * 16 * 16] = { + 0.6831664612720662, 0.46807409193230387, 0.6844620901431121, 0.44734933345075634, 0.6589946419451493, + 0.45407279559796754, 0.8079953053990072, 0.7711440578240174, 0.2222480197158998, 0.2946079420692501, + 0.08262985158363112, 0.6551984526443926, 0.12350662771089382, 0.07529018039533752, 0.7104137297407095, + 0.7972267016544862, 0.6704100492257761, 0.38380538447623347, 0.20504291496685378, 0.8937458228458971, + 0.5953133421005451, 0.4024209042151684, 0.2708997526965613, 0.9947039772657421, 0.05191087431024033, + 0.527684234902843, 0.5822965426031946, 0.34051690161871384, 0.618964448418473, 0.7083129376138301, + 0.9430455365564758, 0.8428917434812192, 0.1100008024925273, 0.8658867150465022, 0.37785518796941187, + 0.0560682924999949, 0.8917013967072703, 0.29011820100715613, 0.841641605015567, 0.28710785229202607, + 0.5550365115148658, 0.899317020625125, 0.9469770833029874, 0.481722161169593, 0.35659645959724773, + 0.4753321858251086, 0.018165235960573067, 0.5901523502446024, 0.06782629901187387, 0.5846316383032825, + 0.3343937523173849, 0.516557711551506, 0.5927516352022604, 0.09425462440794818, 0.3842649634085703, + 0.42456237358629856, 0.20273646764485553, 0.12179285856927158, 0.3434820114860655, 0.5418600485584334, + 0.537288775668625, 0.6345679130390505, 0.6150793068687287, 0.16942428803365528, 0.011962277754646644, + 0.33383308483647145, 0.7229490738382419, 0.8723731071128644, 0.4562484024783662, 0.31296996922277387, + 0.3988577244476993, 0.7166828085794636, 0.9261781977899168, 0.7180806906192968, 0.3570445695122011, + 0.11624287490051621, 0.04304626670554146, 0.3568430078652245, 0.22391301646447914, 0.22682002973089832, + 0.01218595314175075, 0.554901867035217, 0.2134313172685377, 0.9956484063389531, 0.2961420578323939, + 0.21025993218922812, 0.03497098648361374, 0.8444663243772668, 0.02360231224920384, 0.6752210926078791, + 0.18555970633957697, 0.05634985529260572, 0.17545433876008154, 0.20734533736133154, 0.6476268799888912, + 0.4521612477872248, 0.34992521007432076, 0.9899336751886556, 0.02291890899604221, 0.9615364801116805, + 0.7837811830236121, 0.7375328865078891, 0.4505680465750297, 0.7978480168417859, 0.2066279104002401, + 0.09440474356926032, 0.3436308066695164, 0.6174168480342069, 0.12290174347647109, 0.33173796997985794, + 0.43112101816430504, 0.40304239503355177, 0.031929425925011135, 0.843148774421602, 0.43932783284222643, + 0.10429153549692427, 0.2669115094745256, 0.2501512958138894, 0.15875525021129788, 0.05380204382715681, + 0.7668998634194449, 0.29285471597450274, 0.8414998513397356, 0.008673589506358104, 0.7346896807722134, + 0.7683317364722542, 0.5349102121913144, 0.08391554522130873, 0.01747534679308327, 0.42657242063886225, + 0.3127446423322787, 0.9950170425256947, 0.5254215517249816, 0.45548344249057005, 0.29438124781919195, + 0.3431514950412976, 0.46159896792164, 0.2663182629571247, 0.14187222894379747, 0.03516199633720751, + 0.8871942666614756, 0.7883386536030593, 0.6954694188692718, 0.5149870428091292, 0.6935106138241113, + 0.6375971960455311, 0.3071082642145303, 0.45361717586917905, 0.42316905468276345, 0.5414478686661514, + 0.43314211633167776, 0.18402472219501342, 0.2952875335419224, 0.6863539467549075, 0.7896909379393948, + 0.6515549755281398, 0.09222274447852541, 0.7031577793063953, 0.6336940353910993, 0.5368054627466105, + 0.4048242728667739, 0.4780005481374817, 0.28667136829518425, 0.5501136320860092, 0.24405829809957202, + 0.3891135788358344, 0.848283969134549, 0.6896238704509293, 0.4906467181577798, 0.06369584024032915, + 0.6040586446248664, 0.48037417839972163, 0.19339964886791605, 0.4045444987172572, 0.27121319053675963, + 0.27422624363244774, 0.8807898332792651, 0.8429603983261464, 0.5250908186015898, 0.41429943016156967, + 0.8245389032113795, 0.05260608835007785, 0.9857231222727136, 0.43722621091188607, 0.15375510884645693, + 0.4958653433337298, 0.8300430827109714, 0.2071098335922219, 0.8563173170431349, 0.6582593541664329, + 0.3147633699411625, 0.5579354687623471, 0.37552051711003365, 0.6165825942348354, 0.055699679454250584, + 0.45314086306788737, 0.5828334501321621, 0.10560619781955471, 0.10175287058893512, 0.6806324103865506, + 0.06222750656276943, 0.7483015426806955, 0.23890864348090746, 0.1042878700523937, 0.8136578720368534, + 0.9040410601874307, 0.02624241542169059, 0.7932359546968593, 0.7944394689825284, 0.5321276778145092, + 0.0535571856697612, 0.9266505353414602, 0.7888192017690357, 0.0933256511080266, 0.5923909155484899, + 0.06805780980492815, 0.974417825179212, 0.41566199696641626, 0.48642427283294987, 0.10997939958993308, + 0.8429828847103998, 0.6260279433004406, 0.21491223899537837, 0.0504983090565575, 0.5425208877913553, + 0.5069119006322486, 0.678551795781891, 0.14011533201044424, 0.43278585657967816, 0.926593576262934, + 0.9702293073631606, 0.36018301931740304, 0.7931543445611511, 0.9404026082713018, 0.5463036464543467, + 0.17330678996166182, 0.7894006098150973, 0.442734368440455, 0.8981027042449529, 0.33497157875463235, + 0.5788717206660638, 0.986669415303064, 0.12806259068029724, 0.3279938067944209, 0.34756560511018186, + 0.9085775315386622, 0.4810608942027086, 0.7344984959546728, 0.08118053512401946, 0.12385988616582788, + 0.19373242493951304, 0.08095871969642188, 0.9001565264037388, 0.3588283237184189, 0.29536303816010245, + 0.8060034335764349, 0.46546383867107344, 0.7205892937122093, 0.025925230173998726, 0.5102876474425719, + 0.9197592030619824, 0.11333553766433013, 0.13886358508848384, 0.05643823802653081, 0.8226616539197039, + 0.9076379169526932, 0.8129624750328708, 0.4518451662674847, 0.22149088921320348, 0.8541020027122685, + 0.23318802256305826, 0.150063466262228, 0.814450981092272, 0.42312227098878497, 0.7013267301080973, + 0.42513846632876096, 0.6810194156003911, 0.3794681153385391, 0.34920332246724384, 0.38420512983502575, + 0.1525078295516621, 0.8090605987311331, 0.0193905398760037, 0.31996157111293577, 0.7922829336516499, + 0.46226143243492035, 0.6416293524794912, 0.8908665837266646, 0.2637275626853073, 0.8565214584341261, + 0.09724835107213159, 0.12295592223905671, 0.6323054179956534, 0.4759654290551254, 0.5619941553685122, + 0.8584563839072712, 0.2278756939300155, 0.7908206301991599, 0.5596737212791594, 0.10163359233318092, + 0.32976304303206283, 0.4670590666020392, 0.17554298026024773, 0.14240827362868846, 0.5034614767751614, + 0.794893343621075, 0.493481917833547, 0.9925824631695594, 0.6812892829063387, 0.7887908051078045, + 0.6106188172917685, 0.36632090169684395, 0.4400036570026227, 0.694934848977295, 0.2736956467267132, + 0.3993747238415968, 0.5884066975152941, 0.377732416633768, 0.26652656251072004, 0.06154436361479121, + 0.38243421602723326, 0.03741283704650877, 0.7719142177120245, 0.32654545113159816, 0.9811099286760184, + 0.40549583385019305, 0.8524557457758495, 0.54269330786462, 0.46531629198714, 0.3866394496126604, + 0.10902348291155928, 0.317460556630631, 0.5420053903087978, 0.37886498413688585, 0.43249107307395207, + 0.6569751230209416, 0.7507342081719128, 0.8673927700405522, 0.14771320822513423, 0.49125083349457255, + 0.3485655420225199, 0.08039429774533102, 0.7853285592704566, 0.45904421355532876, 0.020751099069649337, + 0.8809860202717111, 0.1117483917540818, 0.4022651174022852, 0.7840742107173263, 0.4621822083435302, + 0.19568628449811376, 0.920198564024848, 0.8047471610165974, 0.05809292818063094, 0.2909407249624676, + 0.9959738352958282, 0.808405726749241, 0.4988825048856117, 0.530080132504718, 0.5526298311529374, + 0.2407659161095752, 0.8964997582949811, 0.5367622534230377, 0.24723353671430526, 0.07103293249287579, + 0.09655940030600907, 0.7648122752472973, 0.010032358443275102, 0.5842326307910283, 0.777742793860481, + 0.75986430850467, 0.39473463341772097, 0.33415825689773015, 0.28625931855104614, 0.724555891803827, + 0.11640935897348226, 0.9250059573755629, 0.47536466030424107, 0.43992889231568155, 0.8513889913773053, + 0.8572119394581306, 0.37622646274683946, 0.40142738686603874, 0.012995801393796858, 0.7935336924086659, + 0.40563890672238834, 0.45466564739918536, 0.570849287281551, 0.22547374818094368, 0.32476277113745666, + 0.16935924210176234, 0.7396115806050879, 0.27737221116673494, 0.45507109078354513, 0.1846995471935421, + 0.5897312486952494, 0.5395726401976545, 0.7349955594555352, 0.40693340266579436, 0.11690371538489985, + 0.6079902004546779, 0.73771380405663, 0.10541819520665319, 0.026977686728101857, 0.4500187981233643, + 0.06474126489274601, 0.98761531692832, 0.09410821162123428, 0.33686296834615204, 0.26837316011632395, + 0.12729624843562048, 0.9433519670635382, 0.8707313735578062, 0.16079780642882713, 0.28429857709682227, + 0.5571251732839778, 0.5348694047790081, 0.6833195598884997, 0.15182871738993353, 0.491045171071926, + 0.29326496215358233, 0.49162045819706357, 0.6304841404403468, 0.07059592099413725, 0.4270449247566951, + 0.704714010708951, 0.5156429184165244, 0.705737670330288, 0.5715047898300967, 0.5661583143004812, + 0.5902161490364923, 0.3979903309706505, 0.4888094450313448, 0.26627701774793255, 0.48998657626108477, + 0.1554004362113669, 0.936004512466527, 0.11825433228258497, 0.12549561732281167, 0.7012030577838808, + 0.21436889983722973, 0.8699582487347918, 0.0192743915338478, 0.6625797270870113, 0.855336187908017, + 0.3754536075891237, 0.23405113906650388, 0.4192396615212136, 0.22127280436177366, 0.43164069610611944, + 0.1675513073887226, 0.13001517540034935, 0.42345926820248136, 0.8689876090935275, 0.07580867233524491, + 0.6776796046633388, 0.6704560487979574, 0.04473925731958006, 0.46163054855599905, 0.3900650788591161, + 0.8241054911600366, 0.9547114281792741, 0.056092304981274155, 0.8508601819697378, 0.48847793129215134, + 0.23180635419589213, 0.7348713872810098, 0.03678780892522715, 0.48024185650458984, 0.2680856225282512, + 0.12413177831862876, 0.9583478777724984, 0.5387699863266349, 0.16980871122153518, 0.5395003971944796, + 0.24004337638983342, 0.24786316910119555, 0.026233819587968332, 0.6996171147385536, 0.279604293162519, + 0.7889897218117005, 0.4784414591841616, 0.726571033219543, 0.5194933223935144, 0.5102932009299628, + 0.9717415537588983, 0.38287488582381835, 0.6999407132066563, 0.8127931321646774, 0.5275124422319207, + 0.13770686280221067, 0.7871890380955198, 0.47406589969297297, 0.9106454871097285, 0.00781691329115608, + 0.8950293014208512, 0.6523119271938245, 0.7226667621938894, 0.292174813070197, 0.9142077799335105, + 0.6551343366610451, 0.48180056169665797, 0.7040011936196882, 0.5086501980098859, 0.042503208992013986, + 0.8821281169320708, 0.8395700105317818, 0.38480406941859957, 0.5145990085613465, 0.23109838051074427, + 0.47085915459268524, 0.6392541863634652, 0.0011517350795945402, 0.4352514529217414, 0.3391838669145525, + 0.6974288480613592, 0.28322528648853385, 0.33208031793114734, 0.021060973453037035, 0.909582371173019, + 0.19463134219614753, 0.7781693131100709, 0.9365643543694223, 0.36457252683522756, 0.14941527683842093, + 0.4559443017244216, 0.3354620354653778, 0.4215935674188409, 0.3179828820733691, 0.6808184152525771, + 0.9775827838703164, 0.5751067756152548, 0.44886211693328903, 0.764238534300501, 0.6276312526017124, + 0.051834948496720656, 0.27004389295833675, 0.7029901104801899, 0.6278899666238881, 0.24278691454078338, + 0.36546325633284493, 0.9812980074238206, 0.44834020622301407, 0.14966162014933604, 0.7868719388708943, + 0.7065586095515894, 0.6364835932247739, 0.8264421581923695, 0.0035118223254034797, 0.6443681787183393, + 0.41067458217386676, 0.892389047362956, 0.29660097478905667, 0.4148792774498472, 0.9167695771391512, + 0.7621107615417726, 0.8628333420437356, 0.4706749812166604, 0.42395916668728717, 0.6871439339671128, + 0.16343019791595848, 0.8555600890910067, 0.18663880545899425, 0.7659119030802831, 0.0024571713622885127, + 0.12474601741306279, 0.17624502657490493, 0.07102168443181744, 0.2766138040829982, 0.70250560426075, + 0.9104989798750418, 0.11601224618291417, 0.9163050771088229, 0.2921218451649672, 0.9156480830306547, + 0.1260635770067572, 0.45284937850754714, 0.623127746212215, 0.43434315006170543, 0.5657931668646882, + 0.7648823870386725, 0.023867149366012086, 0.06309889991154916, 0.9420037583644638, 0.4937826265620643, + 0.14250597504899842, 0.3822802235503563, 0.5421376354869643, 0.5373297123213626, 0.33019397705840037, + 0.7070970521308528, 0.44663016285658497, 0.8608042708557483, 0.747760955239228, 0.9259911753589466, + 0.7822278423256982, 0.7210764104642025, 0.16707350078699712, 0.4757286389194296, 0.2875045651754615, + 0.2803191369257856, 0.5051855485918096, 0.24485897195744344, 0.14399027852311141, 0.4662949176984196, + 0.43489193254471825, 0.8331805112602307, 0.8313466700309183, 0.12136494753753657, 0.7231965027565961, + 0.9924878844025083, 0.808533876962095, 0.616556845043056, 0.038379037197397214, 0.23360888711220384, + 0.43286210942540415, 0.29356145513705545, 0.8868555851092578, 0.5940921036615089, 0.08472555325190867, + 0.9991923980972558, 0.3941053921380935, 0.9808635163437694, 0.17685203056882326, 0.19798316729300292, + 0.9526742194230162, 0.3856873300304975, 0.761629773068807, 0.5975318578336405, 0.47064284297450454, + 0.5275436273483901, 0.004751033126319526, 0.2720049656556979, 0.7411408944034059, 0.8451888247183171, + 0.39647310980996087, 0.2714703487234702, 0.7053853428678178, 0.7798825181178566, 0.27979140115483725, + 0.4141899501695743, 0.6643473929144047, 0.4803346887258785, 0.7869592887064103, 0.6208782186564221, + 0.06589826828656975, 0.46618942111551465, 0.9751736868358482, 0.2137674562471874, 0.57656914189662, + 0.03911968762741813, 0.8312531636073637, 0.2133084787107239, 0.8614874801300464, 0.8540106732789453, + 0.16622952991905793, 0.7118917475703413, 0.4954313852390353, 0.38787474702322755, 0.6446085650793983, + 0.6872004100798245, 0.17514420258469143, 0.8259781152662251, 0.26945596320651144, 0.5641334745297626, + 0.5606745080228348, 0.3307599402107675, 0.28117583723512807, 0.7379929284335528, 0.7456300751677036, + 0.3092126324432367, 0.34900299452977235, 0.8218969478205516, 0.14020555470061447, 0.4938456319074028, + 0.147703797288803, 0.17087811285859922, 0.8144254559841663, 0.48391169406642554, 0.3566778694735223, + 0.4652122740697997, 0.6761799412268091, 0.19766898733665061, 0.25563122791668114, 0.5748277086082783, + 0.617600425956734, 0.6262548164405474, 0.27037612729706706, 0.16535271213628477, 0.9472888972576935, + 0.9069136347688539, 0.15153092403338186, 0.29182221282307463, 0.9770461371713691, 0.4794895181659964, + 0.36861133501387167, 0.5527764007134675, 0.1356744427576113, 0.9052873135897423, 0.21421666418111796, + 0.7423918859901605, 0.43426111117080757, 0.40160624612048856, 0.713919830422035, 0.9058501544549272, + 0.8463815001810163, 0.18644481649281086, 0.4476560563653188, 0.4942428147993275, 0.381079777122051, + 0.220047268980334, 0.9809706016212754, 0.2771049132714265, 0.7477400509890159, 0.009918541286799809, + 0.10406917247312042, 0.2508639245179938, 0.4625791481571955, 0.28272129944647095, 0.18172893239062426, + 0.8437188991596719, 0.12014088192118189, 0.4396215411214064, 0.11879153339008564, 0.22660695439179968, + 0.17063556592155904, 0.3409459367181056, 0.05411876516404135, 0.8405051547703243, 0.09057923342992225, + 0.7828794366278582, 0.19865367646322263, 0.5869964513283881, 0.5652719533881609, 0.40483259940948424, + 0.2529773635219552, 0.9766799456588338, 0.2489315994112682, 0.036903591509810596, 0.8189861113024955, + 0.9170748964519404, 0.5102931342925842, 0.8296909764204485, 0.628604548871245, 0.9922349376500471, + 0.9603755134468333, 0.6839676923991466, 0.6167503396510216, 0.4675060134913003, 0.5175723647127469, + 0.09017342631334502, 0.45465076557776485, 0.8567399789576292, 0.3644482467677622, 0.7746037570681462, + 0.6575020735435918, 0.038718639734492744, 0.18822021734070737, 0.14171689374558305, 0.8821388976738747, + 0.2871856735550452, 0.24166612486805816, 0.31446232390877327, 0.8071819782936023, 0.32295183679656514, + 0.2686347182149137, 0.35413975002107856, 0.981199266871307, 0.10562689284011406, 0.30603152447000326, + 0.5688125740478548, 0.582767706006426, 0.8475163038855884, 0.9583135940309339, 0.25447979211694116, + 0.7197133644724168, 0.2696356893402736, 0.13742826030613675, 0.5404155070995436, 0.3378761975061685, + 0.18649116769634477, 0.9769467982418673, 0.3822258721388825, 0.49512574517741414, 0.05874681009041205, + 0.5164094633106974, 0.5174261983921327, 0.20993774248784414, 0.0023076194317925847, 0.43600250461129864, + 0.6658781626439674, 0.01252483392898629, 0.06819848778854909, 0.9700512147479452, 0.07699155770525956, + 0.3594024013470921, 0.2651796388826595, 0.5383156359262355, 0.617136759546696, 0.4343278373265286, + 0.5238057040715188, 0.44249065473043125, 0.8674355770611861, 0.9623252657198269, 0.9520731395173417, + 0.17225566614426924, 0.8160451706486238, 0.259692323124718, 0.4015736852642504, 0.7518319270504007, + 0.0031347298909260024, 0.04706957304444048, 0.8399826103171201, 0.8314931338503961, 0.6602858559189395, + 0.27331544209481184, 0.6693215409966775, 0.7588986683209246, 0.22818233479986216, 0.5899292800086123, + 0.6814191735363567, 0.014049664372590032, 0.5329943911422461, 0.9095428798401878, 0.6050458792626395, + 0.46343882793690616, 0.5676060591456907, 0.9110051463445652, 0.20152636139953495, 0.9737591946763959, + 0.583386157842853, 0.6022580717413262, 0.7449507594166878, 0.6845438556043444, 0.6765620708428703, + 0.7482079102220434, 0.09317226220309016, 0.28047904770696586, 0.06528309412938993, 0.6332579040841826, + 0.6990000524368737, 0.6571893331380114, 0.29236622278798463, 0.6697757773566023, 0.2662937875726342, + 0.0982538860561668, 0.7287839713639058, 0.3171535568464592, 0.011063131370617874, 0.6040478455499635, + 0.7622303773870567, 0.10707790453440946, 0.2375388719117737, 0.4852907979238138, 0.93357557715282, + 0.2745010968952273, 0.07248344456646405, 0.5973802075585944, 0.4965442982192305, 0.5521873153411291, + 0.4343712963398306, 0.7127225734661942, 0.0023092611850982214, 0.6563813829254826, 0.07691029522466009, + 0.5116904879280413, 0.22400011032606482, 0.7235608133931579, 0.6137472473092337, 0.8656980825419603, + 0.07711735430740096, 0.39099296558300245, 0.5796161795730663, 0.8370908606922681, 0.1537414279185617, + 0.3081203425517297, 0.5426627874981451, 0.4102282338438321, 0.2115074984972014, 0.38423989072855613, + 0.4934963691876564, 0.8898006701959784, 0.0020026786914055794, 0.6624110868318063, 0.05603948420954197, + 0.22422042503104178, 0.771909773905311, 0.5068158178625776, 0.6770516665696477, 0.6116620002796381, + 0.7586660213684354, 0.2733861671206076, 0.5933021353506663, 0.40756763501282034, 0.7319840379060449, + 0.36401778528445394, 0.39957079625047065, 0.7399712679852177, 0.8997372794861339, 0.008223235280982344, + 0.15488052909951278, 0.5101878953761034, 0.5990007053028108, 0.7838294980264872, 0.6639559101930096, + 0.9843797936868443, 0.03810999309518304, 0.6850769508040535, 0.41247626245057356, 0.05904018343949069, + 0.8688083036380179, 0.46384003598712475, 0.2540779469158905, 0.2336662188944475, 0.6862110148169684, + 0.03107169521257047, 0.66191467125411, 0.26237569141698025, 0.8635178174057434, 0.25596259039679425, + 0.04455216237864856, 0.8195300753708519, 0.704808857028874, 0.8519580131779299, 0.4272465292038824, + 0.852044036967288, 0.7388641307494216, 0.7534985606386517, 0.39846011104236867, 0.7388394895852001, + 0.6459391314681788, 0.640260664909671, 0.5111460586748117, 0.80443448939929, 0.42719889494650143, + 0.37982798514793914, 0.7790515150101655, 0.1426225105392278, 0.7573338301154167, 0.8238299352115052, + 0.2516629563087275, 0.47670319089362045, 0.8768104265717728, 0.6813174782132152, 0.36538436401335717, + 0.9538984147659229, 0.33895553956798596, 0.5042328480905443, 0.35888628786369703, 0.5696183464330247, + 0.826581992069732, 0.6712396401944386, 0.9509250432331467, 0.2514001644292745, 0.04628408349794133, + 0.44692487935732084, 0.5286987408008823, 0.18227867796825847, 0.2756670744092533, 0.5890085618833815, + 0.15361676424072057, 0.3112065842869294, 0.16172945211030598, 0.9577072093023093, 0.05125732646208947, + 0.005878771424188023, 0.9449442428947165, 0.7018453422389489, 0.15454991391401007, 0.5821480481063563, + 0.9078593647977083, 0.632878554207089, 0.0002496443870546594, 0.024594532234186506, 0.2180635337072514, + 0.5423671524932987, 0.6584820098951952, 0.7680927998881413, 0.2665259835637993, 0.3673595477453472, + 0.41151637359492077, 0.7294317133290991, 0.08634325268259369, 0.537550613453831, 0.3272511551932701, + 0.23951615810506843, 0.18854433867049736, 0.6554860518436072, 0.35432188934380204, 0.09660389753167853, + 0.29355884866525306, 0.6975982642757892, 0.37792729759190136, 0.6841746291087246, 0.12206175290131605, + 0.3702383556858654, 0.8928022055477935, 0.7169042496671583, 0.18378045974821677, 0.6003995894084154, + 0.7236686851446756, 0.8437574011780651, 0.20121382443408364, 0.34854507526650924, 0.35175573461559784, + 0.44949384517169155, 0.4741917424025329, 0.7729441143536179, 0.550204417695396, 0.8409788814852021, + 0.15312848002048507, 0.8616757892110115, 0.5966950825260615, 0.6754406021840288, 0.027021667330288657, + 0.8327295607228997, 0.6058217168383617, 0.9904431061326998, 0.1806575194352621, 0.3665641617585177, + 0.42527027826024877, 0.09384753816567692, 0.7751872225238126, 0.3825117200801388, + }; + float data[2 * 2 * 16 * 16] = { + 0.6831664612720662, 0.6704100492257761, 0.1100008024925273, 0.06782629901187387, 0.011962277754646644, + 0.01218595314175075, 0.34992521007432076, 0.031929425925011135, 0.01747534679308327, 0.6935106138241113, + 0.4048242728667739, 0.8807898332792651, 0.37552051711003365, 0.7944394689825284, 0.5425208877913553, + 0.5788717206660638, 0.46807409193230387, 0.38380538447623347, 0.8658867150465022, 0.5846316383032825, + 0.33383308483647145, 0.554901867035217, 0.9899336751886556, 0.843148774421602, 0.42657242063886225, + 0.6375971960455311, 0.4780005481374817, 0.8429603983261464, 0.6165825942348354, 0.5321276778145092, + 0.5069119006322486, 0.986669415303064, 0.6844620901431121, 0.20504291496685378, 0.37785518796941187, + 0.3343937523173849, 0.7229490738382419, 0.2134313172685377, 0.02291890899604221, 0.43932783284222643, + 0.3127446423322787, 0.3071082642145303, 0.28667136829518425, 0.5250908186015898, 0.055699679454250584, + 0.0535571856697612, 0.678551795781891, 0.12806259068029724, 0.44734933345075634, 0.8937458228458971, + 0.0560682924999949, 0.516557711551506, 0.8723731071128644, 0.9956484063389531, 0.9615364801116805, + 0.10429153549692427, 0.9950170425256947, 0.45361717586917905, 0.5501136320860092, 0.41429943016156967, + 0.45314086306788737, 0.9266505353414602, 0.14011533201044424, 0.3279938067944209, 0.6589946419451493, + 0.5953133421005451, 0.8917013967072703, 0.5927516352022604, 0.4562484024783662, 0.2961420578323939, + 0.7837811830236121, 0.2669115094745256, 0.5254215517249816, 0.42316905468276345, 0.24405829809957202, + 0.8245389032113795, 0.5828334501321621, 0.7888192017690357, 0.43278585657967816, 0.34756560511018186, + 0.45407279559796754, 0.4024209042151684, 0.29011820100715613, 0.09425462440794818, 0.31296996922277387, + 0.21025993218922812, 0.7375328865078891, 0.2501512958138894, 0.45548344249057005, 0.5414478686661514, + 0.3891135788358344, 0.05260608835007785, 0.10560619781955471, 0.0933256511080266, 0.926593576262934, + 0.9085775315386622, 0.8079953053990072, 0.2708997526965613, 0.841641605015567, 0.3842649634085703, + 0.3988577244476993, 0.03497098648361374, 0.4505680465750297, 0.15875525021129788, 0.29438124781919195, + 0.43314211633167776, 0.848283969134549, 0.9857231222727136, 0.10175287058893512, 0.5923909155484899, + 0.9702293073631606, 0.4810608942027086, 0.7711440578240174, 0.9947039772657421, 0.28710785229202607, + 0.42456237358629856, 0.7166828085794636, 0.8444663243772668, 0.7978480168417859, 0.05380204382715681, + 0.3431514950412976, 0.18402472219501342, 0.6896238704509293, 0.43722621091188607, 0.6806324103865506, + 0.06805780980492815, 0.36018301931740304, 0.7344984959546728, 0.2222480197158998, 0.05191087431024033, + 0.5550365115148658, 0.20273646764485553, 0.9261781977899168, 0.02360231224920384, 0.2066279104002401, + 0.7668998634194449, 0.46159896792164, 0.2952875335419224, 0.4906467181577798, 0.15375510884645693, + 0.06222750656276943, 0.974417825179212, 0.7931543445611511, 0.08118053512401946, 0.2946079420692501, + 0.527684234902843, 0.899317020625125, 0.12179285856927158, 0.7180806906192968, 0.6752210926078791, + 0.09440474356926032, 0.29285471597450274, 0.2663182629571247, 0.6863539467549075, 0.06369584024032915, + 0.4958653433337298, 0.7483015426806955, 0.41566199696641626, 0.9404026082713018, 0.12385988616582788, + 0.08262985158363112, 0.5822965426031946, 0.9469770833029874, 0.3434820114860655, 0.3570445695122011, + 0.18555970633957697, 0.3436308066695164, 0.8414998513397356, 0.14187222894379747, 0.7896909379393948, + 0.6040586446248664, 0.8300430827109714, 0.23890864348090746, 0.48642427283294987, 0.5463036464543467, + 0.19373242493951304, 0.6551984526443926, 0.34051690161871384, 0.481722161169593, 0.5418600485584334, + 0.11624287490051621, 0.05634985529260572, 0.6174168480342069, 0.008673589506358104, 0.03516199633720751, + 0.6515549755281398, 0.48037417839972163, 0.2071098335922219, 0.1042878700523937, 0.10997939958993308, + 0.17330678996166182, 0.08095871969642188, 0.12350662771089382, 0.618964448418473, 0.35659645959724773, + 0.537288775668625, 0.04304626670554146, 0.17545433876008154, 0.12290174347647109, 0.7346896807722134, + 0.8871942666614756, 0.09222274447852541, 0.19339964886791605, 0.8563173170431349, 0.8136578720368534, + 0.8429828847103998, 0.7894006098150973, 0.9001565264037388, 0.07529018039533752, 0.7083129376138301, + 0.4753321858251086, 0.6345679130390505, 0.3568430078652245, 0.20734533736133154, 0.33173796997985794, + 0.7683317364722542, 0.7883386536030593, 0.7031577793063953, 0.4045444987172572, 0.6582593541664329, + 0.9040410601874307, 0.6260279433004406, 0.442734368440455, 0.3588283237184189, 0.7104137297407095, + 0.9430455365564758, 0.018165235960573067, 0.6150793068687287, 0.22391301646447914, 0.6476268799888912, + 0.43112101816430504, 0.5349102121913144, 0.6954694188692718, 0.6336940353910993, 0.27121319053675963, + 0.3147633699411625, 0.02624241542169059, 0.21491223899537837, 0.8981027042449529, 0.29536303816010245, + 0.7972267016544862, 0.8428917434812192, 0.5901523502446024, 0.16942428803365528, 0.22682002973089832, + 0.4521612477872248, 0.40304239503355177, 0.08391554522130873, 0.5149870428091292, 0.5368054627466105, + 0.27422624363244774, 0.5579354687623471, 0.7932359546968593, 0.0504983090565575, 0.33497157875463235, + 0.8060034335764349, 0.46546383867107344, 0.814450981092272, 0.2637275626853073, 0.5034614767751614, + 0.38243421602723326, 0.7507342081719128, 0.8047471610165974, 0.5842326307910283, 0.7935336924086659, + 0.6079902004546779, 0.5348694047790081, 0.4888094450313448, 0.22127280436177366, 0.48847793129215134, + 0.7889897218117005, 0.6523119271938245, 0.7205892937122093, 0.42312227098878497, 0.8565214584341261, + 0.794893343621075, 0.03741283704650877, 0.8673927700405522, 0.05809292818063094, 0.777742793860481, + 0.40563890672238834, 0.73771380405663, 0.6833195598884997, 0.26627701774793255, 0.43164069610611944, + 0.23180635419589213, 0.4784414591841616, 0.7226667621938894, 0.025925230173998726, 0.7013267301080973, + 0.09724835107213159, 0.493481917833547, 0.7719142177120245, 0.14771320822513423, 0.2909407249624676, + 0.75986430850467, 0.45466564739918536, 0.10541819520665319, 0.15182871738993353, 0.48998657626108477, + 0.1675513073887226, 0.7348713872810098, 0.726571033219543, 0.292174813070197, 0.5102876474425719, + 0.42513846632876096, 0.12295592223905671, 0.9925824631695594, 0.32654545113159816, 0.49125083349457255, + 0.9959738352958282, 0.39473463341772097, 0.570849287281551, 0.026977686728101857, 0.491045171071926, + 0.1554004362113669, 0.13001517540034935, 0.03678780892522715, 0.5194933223935144, 0.9142077799335105, + 0.9197592030619824, 0.6810194156003911, 0.6323054179956534, 0.6812892829063387, 0.9811099286760184, + 0.3485655420225199, 0.808405726749241, 0.33415825689773015, 0.22547374818094368, 0.4500187981233643, + 0.29326496215358233, 0.936004512466527, 0.42345926820248136, 0.48024185650458984, 0.5102932009299628, + 0.6551343366610451, 0.11333553766433013, 0.3794681153385391, 0.4759654290551254, 0.7887908051078045, + 0.40549583385019305, 0.08039429774533102, 0.4988825048856117, 0.28625931855104614, 0.32476277113745666, + 0.06474126489274601, 0.49162045819706357, 0.11825433228258497, 0.8689876090935275, 0.2680856225282512, + 0.9717415537588983, 0.48180056169665797, 0.13886358508848384, 0.34920332246724384, 0.5619941553685122, + 0.6106188172917685, 0.8524557457758495, 0.7853285592704566, 0.530080132504718, 0.724555891803827, + 0.16935924210176234, 0.98761531692832, 0.6304841404403468, 0.12549561732281167, 0.07580867233524491, + 0.12413177831862876, 0.38287488582381835, 0.7040011936196882, 0.05643823802653081, 0.38420512983502575, + 0.8584563839072712, 0.36632090169684395, 0.54269330786462, 0.45904421355532876, 0.5526298311529374, + 0.11640935897348226, 0.7396115806050879, 0.09410821162123428, 0.07059592099413725, 0.7012030577838808, + 0.6776796046633388, 0.9583478777724984, 0.6999407132066563, 0.5086501980098859, 0.8226616539197039, + 0.1525078295516621, 0.2278756939300155, 0.4400036570026227, 0.46531629198714, 0.020751099069649337, + 0.2407659161095752, 0.9250059573755629, 0.27737221116673494, 0.33686296834615204, 0.4270449247566951, + 0.21436889983722973, 0.6704560487979574, 0.5387699863266349, 0.8127931321646774, 0.042503208992013986, + 0.9076379169526932, 0.8090605987311331, 0.7908206301991599, 0.694934848977295, 0.3866394496126604, + 0.8809860202717111, 0.8964997582949811, 0.47536466030424107, 0.45507109078354513, 0.26837316011632395, + 0.704714010708951, 0.8699582487347918, 0.04473925731958006, 0.16980871122153518, 0.5275124422319207, + 0.8821281169320708, 0.8129624750328708, 0.0193905398760037, 0.5596737212791594, 0.2736956467267132, + 0.10902348291155928, 0.1117483917540818, 0.5367622534230377, 0.43992889231568155, 0.1846995471935421, + 0.12729624843562048, 0.5156429184165244, 0.0192743915338478, 0.46163054855599905, 0.5395003971944796, + 0.13770686280221067, 0.8395700105317818, 0.4518451662674847, 0.31996157111293577, 0.10163359233318092, + 0.3993747238415968, 0.317460556630631, 0.4022651174022852, 0.24723353671430526, 0.8513889913773053, + 0.5897312486952494, 0.9433519670635382, 0.705737670330288, 0.6625797270870113, 0.3900650788591161, + 0.24004337638983342, 0.7871890380955198, 0.38480406941859957, 0.22149088921320348, 0.7922829336516499, + 0.32976304303206283, 0.5884066975152941, 0.5420053903087978, 0.7840742107173263, 0.07103293249287579, + 0.8572119394581306, 0.5395726401976545, 0.8707313735578062, 0.5715047898300967, 0.855336187908017, + 0.8241054911600366, 0.24786316910119555, 0.47406589969297297, 0.5145990085613465, 0.8541020027122685, + 0.46226143243492035, 0.4670590666020392, 0.377732416633768, 0.37886498413688585, 0.4621822083435302, + 0.09655940030600907, 0.37622646274683946, 0.7349955594555352, 0.16079780642882713, 0.5661583143004812, + 0.3754536075891237, 0.9547114281792741, 0.026233819587968332, 0.9106454871097285, 0.23109838051074427, + 0.23318802256305826, 0.6416293524794912, 0.17554298026024773, 0.26652656251072004, 0.43249107307395207, + 0.19568628449811376, 0.7648122752472973, 0.40142738686603874, 0.40693340266579436, 0.28429857709682227, + 0.5902161490364923, 0.23405113906650388, 0.056092304981274155, 0.6996171147385536, 0.00781691329115608, + 0.47085915459268524, 0.150063466262228, 0.8908665837266646, 0.14240827362868846, 0.06154436361479121, + 0.6569751230209416, 0.920198564024848, 0.010032358443275102, 0.012995801393796858, 0.11690371538489985, + 0.5571251732839778, 0.3979903309706505, 0.4192396615212136, 0.8508601819697378, 0.279604293162519, + 0.8950293014208512, 0.6392541863634652, 0.0011517350795945402, 0.3179828820733691, 0.7868719388708943, + 0.16343019791595848, 0.45284937850754714, 0.8608042708557483, 0.12136494753753657, 0.19798316729300292, + 0.4141899501695743, 0.7118917475703413, 0.8218969478205516, 0.16535271213628477, 0.9058501544549272, + 0.8437188991596719, 0.9766799456588338, 0.8567399789576292, 0.4352514529217414, 0.6808184152525771, + 0.7065586095515894, 0.8555600890910067, 0.623127746212215, 0.747760955239228, 0.7231965027565961, + 0.9526742194230162, 0.6643473929144047, 0.4954313852390353, 0.14020555470061447, 0.9472888972576935, + 0.8463815001810163, 0.12014088192118189, 0.2489315994112682, 0.3644482467677622, 0.3391838669145525, + 0.9775827838703164, 0.6364835932247739, 0.18663880545899425, 0.43434315006170543, 0.9259911753589466, + 0.9924878844025083, 0.3856873300304975, 0.4803346887258785, 0.38787474702322755, 0.4938456319074028, + 0.9069136347688539, 0.18644481649281086, 0.4396215411214064, 0.036903591509810596, 0.7746037570681462, + 0.6974288480613592, 0.5751067756152548, 0.8264421581923695, 0.7659119030802831, 0.5657931668646882, + 0.7822278423256982, 0.808533876962095, 0.761629773068807, 0.7869592887064103, 0.6446085650793983, + 0.147703797288803, 0.15153092403338186, 0.4476560563653188, 0.11879153339008564, 0.8189861113024955, + 0.6575020735435918, 0.28322528648853385, 0.44886211693328903, 0.0035118223254034797, 0.0024571713622885127, + 0.7648823870386725, 0.7210764104642025, 0.616556845043056, 0.5975318578336405, 0.6208782186564221, + 0.6872004100798245, 0.17087811285859922, 0.29182221282307463, 0.4942428147993275, 0.22660695439179968, + 0.9170748964519404, 0.038718639734492744, 0.33208031793114734, 0.764238534300501, 0.6443681787183393, + 0.12474601741306279, 0.023867149366012086, 0.16707350078699712, 0.038379037197397214, 0.47064284297450454, + 0.06589826828656975, 0.17514420258469143, 0.8144254559841663, 0.9770461371713691, 0.381079777122051, + 0.17063556592155904, 0.5102931342925842, 0.18822021734070737, 0.021060973453037035, 0.6276312526017124, + 0.41067458217386676, 0.17624502657490493, 0.06309889991154916, 0.4757286389194296, 0.23360888711220384, + 0.5275436273483901, 0.46618942111551465, 0.8259781152662251, 0.48391169406642554, 0.4794895181659964, + 0.220047268980334, 0.3409459367181056, 0.8296909764204485, 0.14171689374558305, 0.909582371173019, + 0.051834948496720656, 0.892389047362956, 0.07102168443181744, 0.9420037583644638, 0.2875045651754615, + 0.43286210942540415, 0.004751033126319526, 0.9751736868358482, 0.26945596320651144, 0.3566778694735223, + 0.36861133501387167, 0.9809706016212754, 0.05411876516404135, 0.628604548871245, 0.8821388976738747, + 0.19463134219614753, 0.27004389295833675, 0.29660097478905667, 0.2766138040829982, 0.4937826265620643, + 0.2803191369257856, 0.29356145513705545, 0.2720049656556979, 0.2137674562471874, 0.5641334745297626, + 0.4652122740697997, 0.5527764007134675, 0.2771049132714265, 0.8405051547703243, 0.9922349376500471, + 0.2871856735550452, 0.7781693131100709, 0.7029901104801899, 0.4148792774498472, 0.70250560426075, + 0.14250597504899842, 0.5051855485918096, 0.8868555851092578, 0.7411408944034059, 0.57656914189662, + 0.5606745080228348, 0.6761799412268091, 0.1356744427576113, 0.7477400509890159, 0.09057923342992225, + 0.9603755134468333, 0.24166612486805816, 0.9365643543694223, 0.6278899666238881, 0.9167695771391512, + 0.9104989798750418, 0.3822802235503563, 0.24485897195744344, 0.5940921036615089, 0.8451888247183171, + 0.03911968762741813, 0.3307599402107675, 0.19766898733665061, 0.9052873135897423, 0.009918541286799809, + 0.7828794366278582, 0.6839676923991466, 0.31446232390877327, 0.36457252683522756, 0.24278691454078338, + 0.7621107615417726, 0.11601224618291417, 0.5421376354869643, 0.14399027852311141, 0.08472555325190867, + 0.39647310980996087, 0.8312531636073637, 0.28117583723512807, 0.25563122791668114, 0.21421666418111796, + 0.10406917247312042, 0.19865367646322263, 0.6167503396510216, 0.8071819782936023, 0.14941527683842093, + 0.36546325633284493, 0.8628333420437356, 0.9163050771088229, 0.5373297123213626, 0.4662949176984196, + 0.9991923980972558, 0.2714703487234702, 0.2133084787107239, 0.7379929284335528, 0.5748277086082783, + 0.7423918859901605, 0.2508639245179938, 0.5869964513283881, 0.4675060134913003, 0.32295183679656514, + 0.4559443017244216, 0.9812980074238206, 0.4706749812166604, 0.2921218451649672, 0.33019397705840037, + 0.43489193254471825, 0.3941053921380935, 0.7053853428678178, 0.8614874801300464, 0.7456300751677036, + 0.617600425956734, 0.43426111117080757, 0.4625791481571955, 0.5652719533881609, 0.5175723647127469, + 0.2686347182149137, 0.3354620354653778, 0.44834020622301407, 0.42395916668728717, 0.9156480830306547, + 0.7070970521308528, 0.8331805112602307, 0.9808635163437694, 0.7798825181178566, 0.8540106732789453, + 0.3092126324432367, 0.6262548164405474, 0.40160624612048856, 0.28272129944647095, 0.40483259940948424, + 0.09017342631334502, 0.35413975002107856, 0.4215935674188409, 0.14966162014933604, 0.6871439339671128, + 0.1260635770067572, 0.44663016285658497, 0.8313466700309183, 0.17685203056882326, 0.27979140115483725, + 0.16622952991905793, 0.34900299452977235, 0.27037612729706706, 0.713919830422035, 0.18172893239062426, + 0.2529773635219552, 0.45465076557776485, 0.981199266871307, 0.10562689284011406, 0.05874681009041205, + 0.5238057040715188, 0.6693215409966775, 0.7449507594166878, 0.011063131370617874, 0.07691029522466009, + 0.4934963691876564, 0.39957079625047065, 0.2540779469158905, 0.39846011104236867, 0.36538436401335717, + 0.15361676424072057, 0.6584820098951952, 0.37792729759190136, 0.550204417695396, 0.30603152447000326, + 0.5164094633106974, 0.44249065473043125, 0.7588986683209246, 0.6845438556043444, 0.6040478455499635, + 0.5116904879280413, 0.8898006701959784, 0.7399712679852177, 0.2336662188944475, 0.7388394895852001, + 0.9538984147659229, 0.3112065842869294, 0.7680927998881413, 0.6841746291087246, 0.8409788814852021, + 0.5688125740478548, 0.5174261983921327, 0.8674355770611861, 0.22818233479986216, 0.6765620708428703, + 0.7622303773870567, 0.22400011032606482, 0.0020026786914055794, 0.8997372794861339, 0.6862110148169684, + 0.6459391314681788, 0.33895553956798596, 0.16172945211030598, 0.2665259835637993, 0.12206175290131605, + 0.15312848002048507, 0.582767706006426, 0.20993774248784414, 0.9623252657198269, 0.5899292800086123, + 0.7482079102220434, 0.10707790453440946, 0.7235608133931579, 0.6624110868318063, 0.008223235280982344, + 0.03107169521257047, 0.640260664909671, 0.5042328480905443, 0.9577072093023093, 0.3673595477453472, + 0.3702383556858654, 0.8616757892110115, 0.8475163038855884, 0.0023076194317925847, 0.9520731395173417, + 0.6814191735363567, 0.09317226220309016, 0.2375388719117737, 0.6137472473092337, 0.05603948420954197, + 0.15488052909951278, 0.66191467125411, 0.5111460586748117, 0.35888628786369703, 0.05125732646208947, + 0.41151637359492077, 0.8928022055477935, 0.5966950825260615, 0.9583135940309339, 0.43600250461129864, + 0.17225566614426924, 0.014049664372590032, 0.28047904770696586, 0.4852907979238138, 0.8656980825419603, + 0.22422042503104178, 0.5101878953761034, 0.26237569141698025, 0.80443448939929, 0.5696183464330247, + 0.005878771424188023, 0.7294317133290991, 0.7169042496671583, 0.6754406021840288, 0.25447979211694116, + 0.6658781626439674, 0.8160451706486238, 0.5329943911422461, 0.06528309412938993, 0.93357557715282, + 0.07711735430740096, 0.771909773905311, 0.5990007053028108, 0.8635178174057434, 0.42719889494650143, + 0.826581992069732, 0.9449442428947165, 0.08634325268259369, 0.18378045974821677, 0.027021667330288657, + 0.7197133644724168, 0.01252483392898629, 0.259692323124718, 0.9095428798401878, 0.6332579040841826, + 0.2745010968952273, 0.39099296558300245, 0.5068158178625776, 0.7838294980264872, 0.25596259039679425, + 0.37982798514793914, 0.6712396401944386, 0.7018453422389489, 0.537550613453831, 0.6003995894084154, + 0.8327295607228997, 0.2696356893402736, 0.06819848778854909, 0.4015736852642504, 0.6050458792626395, + 0.6990000524368737, 0.07248344456646405, 0.5796161795730663, 0.6770516665696477, 0.6639559101930096, + 0.04455216237864856, 0.7790515150101655, 0.9509250432331467, 0.15454991391401007, 0.3272511551932701, + 0.7236686851446756, 0.6058217168383617, 0.13742826030613675, 0.9700512147479452, 0.7518319270504007, + 0.46343882793690616, 0.6571893331380114, 0.5973802075585944, 0.8370908606922681, 0.6116620002796381, + 0.9843797936868443, 0.8195300753708519, 0.1426225105392278, 0.2514001644292745, 0.5821480481063563, + 0.23951615810506843, 0.8437574011780651, 0.9904431061326998, 0.5404155070995436, 0.07699155770525956, + 0.0031347298909260024, 0.5676060591456907, 0.29236622278798463, 0.4965442982192305, 0.1537414279185617, + 0.7586660213684354, 0.03810999309518304, 0.704808857028874, 0.7573338301154167, 0.04628408349794133, + 0.9078593647977083, 0.18854433867049736, 0.20121382443408364, 0.1806575194352621, 0.3378761975061685, + 0.3594024013470921, 0.04706957304444048, 0.9110051463445652, 0.6697757773566023, 0.5521873153411291, + 0.3081203425517297, 0.2733861671206076, 0.6850769508040535, 0.8519580131779299, 0.8238299352115052, + 0.44692487935732084, 0.632878554207089, 0.6554860518436072, 0.34854507526650924, 0.3665641617585177, + 0.18649116769634477, 0.2651796388826595, 0.8399826103171201, 0.20152636139953495, 0.2662937875726342, + 0.4343712963398306, 0.5426627874981451, 0.5933021353506663, 0.41247626245057356, 0.4272465292038824, + 0.2516629563087275, 0.5286987408008823, 0.0002496443870546594, 0.35432188934380204, 0.35175573461559784, + 0.42527027826024877, 0.9769467982418673, 0.5383156359262355, 0.8314931338503961, 0.9737591946763959, + 0.0982538860561668, 0.7127225734661942, 0.4102282338438321, 0.40756763501282034, 0.05904018343949069, + 0.852044036967288, 0.47670319089362045, 0.18227867796825847, 0.024594532234186506, 0.09660389753167853, + 0.44949384517169155, 0.09384753816567692, 0.3822258721388825, 0.617136759546696, 0.6602858559189395, + 0.583386157842853, 0.7287839713639058, 0.0023092611850982214, 0.2115074984972014, 0.7319840379060449, + 0.8688083036380179, 0.7388641307494216, 0.8768104265717728, 0.2756670744092533, 0.2180635337072514, + 0.29355884866525306, 0.4741917424025329, 0.7751872225238126, 0.49512574517741414, 0.4343278373265286, + 0.27331544209481184, 0.6022580717413262, 0.3171535568464592, 0.6563813829254826, 0.38423989072855613, + 0.36401778528445394, 0.46384003598712475, 0.7534985606386517, 0.6813174782132152, 0.5890085618833815, + 0.5423671524932987, 0.6975982642757892, 0.7729441143536179, 0.3825117200801388, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_HWCN, {4, 1, 16, 16}, {2, 2, 16, 16}, DT_FLOAT}; + TransResult result; + + FormatTransferFracZHwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFracZHwcn, fracz_to_hwcn_fp32_success_gt_cube) { + float data_4d[8 * 2 * 16 * 16] = { + 0.5262470840946074, + 0.18562000104563015, + 0.5615369673938634, + 0.8587599751019871, + 0.5509884807270933, + 0.4014820342237435, + 0.7254703259240384, + 0.9221892139151201, + 0.9051123582670892, + 0.6318024147565033, + 0.27926503492559474, + 0.8492756730986297, + 0.5155877026309951, + 0.8045325552445173, + 0.174201977964776, + 0.5701019652356103, + 0.2599275570805496, + 0.5475276584465685, + 0.8049760798836795, + 0.6775131703780617, + 0.7617937037969288, + 0.47784809966988495, + 0.1989437316686401, + 0.31742454191999847, + 0.42181386769365103, + 0.6639389629436601, + 0.8686676471676089, + 0.22309451034455818, + 0.5211799202598583, + 0.1976179240322029, + 0.4939811012007336, + 0.41770317978608806, + 0.9853040510244775, + 0.09778460335243722, + 0.07684018109314716, + 0.6335452075218833, + 0.39168888702909943, + 0.7592483976125082, + 0.42637965326347016, + 0.9153832428923505, + 0.36904313835896374, + 0.07085538901262733, + 0.706538437689299, + 0.6788589266221415, + 0.025410373401994035, + 0.9663406916171869, + 0.7549932146581845, + 0.39059111438681227, + 0.7662270734030949, + 0.9369607968872835, + 0.836905044826716, + 0.06934671759319733, + 0.9881681709664655, + 0.806353074077933, + 0.2533875804046045, + 0.981068057585326, + 0.8900874340490004, + 0.889802896968625, + 0.9032637461433765, + 0.7476143557236724, + 0.4211218161868183, + 0.7313928694621235, + 0.9977056137259912, + 0.6235316600325075, + 0.530841434614198, + 0.3507058699739156, + 0.5708014024932662, + 0.3052844915173061, + 0.08674476352301563, + 0.9899624018687854, + 0.2841260136074859, + 0.051224583281113345, + 0.027358738238934976, + 0.03973713677188995, + 0.13261013546882394, + 0.8036940145332488, + 0.9556631725208989, + 0.6389017364085043, + 0.7826300377042417, + 0.1920886526285347, + 0.44626734531908463, + 0.36736233181384337, + 0.31658943655651617, + 0.16206123425822094, + 0.6013214250236357, + 0.8119977866990217, + 0.19554158383602693, + 0.1269243503058195, + 0.28551778681471607, + 0.8624172944156852, + 0.630982390896547, + 0.5150584517188221, + 0.898451762546253, + 0.15993269832272117, + 0.264293959730718, + 0.8837861921740301, + 0.3119271649617613, + 0.5470288990041243, + 0.25284498064012806, + 0.6309942568896834, + 0.38862251626596367, + 0.16790321800722863, + 0.2651072827146369, + 0.8471427057082983, + 0.9089180108604413, + 0.7391668948415816, + 0.9815538780528091, + 0.8074529608513048, + 0.5922343123206459, + 0.2881933320989839, + 0.6437064951687319, + 0.00244999543928337, + 0.6626855421687033, + 0.14280222685394472, + 0.6162999456081352, + 0.1468663503533323, + 0.8529965121334725, + 0.1978111472284153, + 0.5348799410677707, + 0.011886125274325088, + 0.10375139630325214, + 0.3154865236914467, + 0.11940648503190532, + 0.9219521205464487, + 0.42714348866071816, + 0.47918295431768, + 0.8028029041388223, + 0.9032023458460028, + 0.8752102914244295, + 0.6611616854266359, + 0.3803093435446827, + 0.6385030188827648, + 0.1353615951681887, + 0.30251437601177134, + 0.1483796925608314, + 0.35159318416873464, + 0.8915997500927774, + 0.303678071770221, + 0.3999637600541244, + 0.3123999460017278, + 0.5944175367864415, + 0.477825893004286, + 0.1143959237748291, + 0.5759571178613899, + 0.33084687222105125, + 0.9772473817588189, + 0.4481765323045367, + 0.43834356117895534, + 0.0521211814839041, + 0.9959859264509981, + 0.556498522203592, + 0.5401203817789871, + 0.6884504501624745, + 0.46149787037437584, + 0.4987208340668672, + 0.3260055497429517, + 0.8894971894751379, + 0.2999085208541824, + 0.4714671205220138, + 0.554781248162564, + 0.6531461176818982, + 0.6016776985132304, + 0.8489721823323437, + 0.8033939962925624, + 0.6100591466551525, + 0.15561202809244157, + 0.8570216966490248, + 0.04933408890510327, + 0.9428985656137959, + 0.8888712125662346, + 0.4460696278783418, + 0.042811137094394214, + 0.47756000468041915, + 0.4447815966411831, + 0.7415256538727033, + 0.5175571912577063, + 0.5459428512726848, + 0.430703741441283, + 0.6682760511716276, + 0.5249175028286187, + 0.21691499814951154, + 0.15230729594665604, + 0.809611471896218, + 0.7673952593308585, + 0.44091834777473504, + 0.18391016758687107, + 0.3090706970892756, + 0.4816380510206846, + 0.7610279070927591, + 0.8983282086951327, + 0.32481136297228796, + 0.05408771148868641, + 0.41268456318648417, + 0.9057607332635397, + 0.9989705556041563, + 0.4749202032027596, + 0.7092037993551189, + 0.529660792438232, + 0.7937955587055074, + 0.20212932501794834, + 0.28423816905534316, + 0.088265655192032, + 0.6588572308735293, + 0.9770898113470604, + 0.9511870017238692, + 0.7896856360130685, + 0.9579790122176625, + 0.027702827316338885, + 0.48174502997628343, + 0.55272674865965, + 0.2895276989780864, + 0.591794387856851, + 0.8461377506230634, + 0.019816952365595886, + 0.5416760712708776, + 0.6982028114825873, + 0.20128047489619438, + 0.6449865423234372, + 0.5849788271829304, + 0.5856503944043783, + 0.664750968203006, + 0.6859801602679525, + 0.6279271887461749, + 0.9025322959302119, + 0.7298232209177002, + 0.18511892014922127, + 0.8866868870526441, + 0.3849372094271225, + 0.30662066021542467, + 0.46579144492276736, + 0.7048016949513274, + 0.9670977496947673, + 0.5804499113140081, + 0.28784685447468095, + 0.054424369814474405, + 0.9912230663260104, + 0.1596593206175151, + 0.300616943079272, + 0.34437010406551705, + 0.5327961410085527, + 0.997023168187204, + 0.53451581075253, + 0.9983971734501372, + 0.06338609804492179, + 0.05077506229920048, + 0.9793760450641456, + 0.1596794108571158, + 0.6880104017436701, + 0.9414981905155704, + 0.267538173836512, + 0.5863833444906936, + 0.732536193840161, + 0.5167681922631239, + 0.27073135067757326, + 0.7977612357066789, + 0.9720933373941855, + 0.12333578898998188, + 0.7203386689061504, + 0.48210589435647444, + 0.0589251101400885, + 0.283961844422973, + 0.6480555870064912, + 0.9904271116015025, + 0.16730737556899755, + 0.3116581806526617, + 0.362512313135534, + 0.8288575816171124, + 0.13388772277018135, + 0.6894476027681907, + 0.45285366443683217, + 0.31371373738067987, + 0.17723311527819086, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7918682117509727, + 0.9611897727382896, + 0.04764154842922541, + 0.6359791414767878, + 0.9590555164457821, + 0.11632079446525156, + 0.05430567932876784, + 0.20665132910683937, + 0.8674524226969336, + 0.16866101140444134, + 0.20246253236279066, + 0.763565017073811, + 0.345089429452039, + 0.3460664692865518, + 0.48093210416987786, + 0.3644431089494301, + 0.25491368945321924, + 0.08098628401032482, + 0.34169536734404193, + 0.9718774298853685, + 0.9154182706335288, + 0.9062064954352042, + 0.5711512432074957, + 0.2564513917885336, + 0.5413848234939881, + 0.9713537390911877, + 0.500434682755821, + 0.514956672854547, + 0.42387498806569357, + 0.26206043781283317, + 0.6836396369695441, + 0.021097153792147383, + 0.6786050109290417, + 0.21451639840748682, + 0.17867782316557934, + 0.5301916140657921, + 0.04887411317718848, + 0.6874767491375144, + 0.34440312445911503, + 0.2203555055348826, + 0.9564545919115185, + 0.8143108696586439, + 0.043501889994028375, + 0.6412821290242677, + 0.553940474472896, + 0.8814523193341739, + 0.9386230279466492, + 0.6413668921501134, + 0.8477125241211667, + 0.5768977156844659, + 0.02509653619032559, + 0.600470723278435, + 0.13304323107759775, + 0.2258362439419268, + 0.12109588787627701, + 0.5778491195905766, + 0.4831985839345372, + 0.26621359640781606, + 0.5248138586045901, + 0.15133563665166538, + 0.7122946781750881, + 0.9196954749538162, + 0.02499106790133776, + 0.3107186380818101, + 0.8849321592347881, + 0.26513297341603714, + 0.7563252602059222, + 0.3047056693815837, + 0.5393661935414817, + 0.6794552085831682, + 0.5036956130457365, + 0.44199698196247816, + 0.3175796945764736, + 0.08215570422238005, + 0.41823960616201317, + 0.754620854174738, + 0.49778930944748756, + 0.013175970443288665, + 0.020922504745778947, + 0.003980535400756824, + 0.9267716104061233, + 0.7324561265662716, + 0.08315155168910016, + 0.01701205986875387, + 0.3407684898687162, + 0.4002532098706061, + 0.4143238858611441, + 0.9286489819890967, + 0.47310127132389546, + 0.13220260240206427, + 0.20923524191957044, + 0.9616288151903299, + 0.08787827317100672, + 0.9651538291433787, + 0.6623553583485186, + 0.469297808440877, + 0.7183148924993774, + 0.6113987795131144, + 0.8599360432496744, + 0.3545446489011168, + 0.24796297355379537, + 0.6129527919109219, + 0.7545156467368622, + 0.16106749553848354, + 0.03129977774073123, + 0.03762079121167172, + 0.5746001932664768, + 0.48416383003273333, + 0.4116271712176073, + 0.9664622879357454, + 0.11335985397026105, + 0.18781138643845274, + 0.6788020686244602, + 0.97410981182431, + 0.006232202864729031, + 0.8903502367183758, + 0.6616846208040407, + 0.9181514938492773, + 0.8903238156374075, + 0.7289567024055328, + 0.09465791619238773, + 0.8809942300084808, + 0.3157636473721608, + 0.17626499659844597, + 0.03298376792526192, + 0.2547219272659277, + 0.156105149395114, + 0.9962229356813803, + 0.5311528841668889, + 0.28602537656896476, + 0.7970828784609515, + 0.6761119017648953, + 0.9920202949016083, + 0.8379077569584945, + 0.9237649345790961, + 0.7800275096211687, + 0.6487646791937893, + 0.3871595989095862, + 0.7504657703944658, + 0.19691548800582348, + 0.9077995672389654, + 0.07846151780675747, + 0.45865493017794867, + 0.9424117884794422, + 0.1093467426944017, + 0.082588601751441, + 0.524531268699229, + 0.668933677287048, + 0.15633491687065648, + 0.3670325266283059, + 0.5595352995199334, + 0.4602184718863326, + 0.004173349234257051, + 0.3522705354218415, + 0.00701735725790209, + 0.7105429002989472, + 0.9071076418105887, + 0.03449929613125302, + 0.7646711144904659, + 0.8790055264273281, + 0.611804778445946, + 0.4667126388636281, + 0.6613146415694391, + 0.5268301179582384, + 0.026261169320518363, + 0.5339699697866235, + 0.5018247103213517, + 0.8320790096886941, + 0.8860987348048975, + 0.14563040682141926, + 0.8817930001278131, + 0.8278691246165878, + 0.9987878921135839, + 0.11132378249497388, + 0.9497821106888364, + 0.3584038351896536, + 0.46525512153228654, + 0.8382315395128299, + 0.7461233724121545, + 0.9773425643718713, + 0.16633653783250157, + 0.052806185584314935, + 0.08791699089229132, + 0.8836683716945654, + 0.9289313672930872, + 0.973217088329034, + 0.771556223654079, + 0.10395315243238379, + 0.1544937655681795, + 0.7459123276004201, + 0.253649950983154, + 0.5865220856902467, + 0.7043630522166378, + 0.04933702607569013, + 0.4650602243220291, + 0.7807337230105253, + 0.4141931338432051, + 0.7987039854123447, + 0.4626691086984821, + 0.9131505890812511, + 0.9708329061611259, + 0.06342421633800865, + 0.5791031128679329, + 0.5582173499302838, + 0.25291732105237286, + 0.3116061877585733, + 0.8614576279397507, + 0.0767019478308203, + 0.44016047563477556, + 0.0005326172316258981, + 0.1784929253615195, + 0.3058007319157179, + 0.46844833009651166, + 0.5895726611280803, + 0.06678373419547612, + 0.7040449162909432, + 0.25717075199620165, + 0.6725005753375712, + 0.9557077641602835, + 0.9239316412606712, + 0.6645990918035876, + 0.12784746937617553, + 0.26399573580277913, + 0.4377855787782695, + 0.09121426936776944, + 0.5865353966441388, + 0.7668412602690984, + 0.2713475977837423, + 0.8679827200390235, + 0.11522134299953346, + 0.8976253770659826, + 0.8647007528629761, + 0.34192084390485955, + 0.8894506890532137, + 0.63919490915165, + 0.6054931809892881, + 0.47255437589764915, + 0.12253067262655448, + 0.26769500242894306, + 0.6007392726471552, + 0.8705469085182395, + 0.8620736373525405, + 0.9271678636205023, + 0.5544805715424246, + 0.38243534174482696, + 0.13421234322757603, + 0.052478391790792944, + 0.8146791198796317, + 0.4168823676588157, + 0.22129737642388003, + 0.8107995120994241, + 0.5901150707686857, + 0.4777615517705799, + 0.38903273975067665, + 0.10660146655633229, + 0.5539509687747731, + 0.17215698913489486, + 0.6306945280084336, + 0.7385021172863362, + 0.6015664973527335, + 0.9244808767432451, + 0.1657941846464276, + 0.8162929960857885, + 0.5023926042439503, + 0.4780097638691799, + 0.49357870687275085, + 0.8357730181411765, + 0.8515700237526853, + 0.004448282702601558, + 0.4566782228864755, + 0.8620968379208583, + 0.14823858663651712, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4796467505964924, + 0.09103544362646132, + 0.9254689001478529, + 0.8000810110714159, + 0.05050480490924614, + 0.5267360009479215, + 0.3670167360070553, + 0.6579293314393269, + 0.35374182053158953, + 0.9798945307705291, + 0.7849211483431402, + 0.02428987467416799, + 0.3449537236772878, + 0.8912149326674362, + 0.6573943617231828, + 0.527839378558378, + 0.6903679291544869, + 0.6775107296333263, + 0.14779175118597787, + 0.9736194862266951, + 0.07188406888915577, + 0.4111679064023177, + 0.12650025225065376, + 0.52354628216733, + 0.8189300707822506, + 0.5698010276007985, + 0.5532788316792159, + 0.253357557252365, + 0.5728748980725252, + 0.6942557299464097, + 0.7044186941401308, + 0.4575541987011301, + 0.9400618464944154, + 0.10117982191354724, + 0.19696083132414388, + 0.5094291973347561, + 0.3636373390399269, + 0.13345061046806383, + 0.013845886991579959, + 0.4074209331005676, + 0.7714215773473824, + 0.5709760852241303, + 0.5868065461349722, + 0.8685667940719852, + 0.7209041850732071, + 0.2367984531926669, + 0.8154031716623844, + 0.9518430806804513, + 0.6174497372078122, + 0.07637851015892627, + 0.43579476640614967, + 0.8503495803323085, + 0.6482183809971421, + 0.7433371730802318, + 0.9443490362158871, + 0.6801247783621012, + 0.4860916942789072, + 0.7308547905086359, + 0.9696472233804034, + 0.32594628878515897, + 0.9921630819475203, + 0.8994861603528518, + 0.8603656823318697, + 0.5770391799057243, + 0.7591513267491136, + 0.9584463569118296, + 0.6134307586410246, + 0.6296221119340748, + 0.735266347418902, + 0.3119013329695528, + 0.14874596710326138, + 0.9282208421152665, + 0.7091568048853436, + 0.34579047562430887, + 0.7847637185773797, + 0.19931150694854305, + 0.07041934621086088, + 0.3568737629913865, + 0.06838422316937898, + 0.06116336748279105, + 0.29182113691713263, + 0.8881704417177094, + 0.6371115962253767, + 0.779165675792864, + 0.2609423421314738, + 0.029581058448018727, + 0.6073209693848843, + 0.36214013008585844, + 0.1033360965975485, + 0.4807162361096927, + 0.0006487337432082851, + 0.9017522677471346, + 0.5487237401878415, + 0.5793736927661238, + 0.9008211806019828, + 0.7725544638973622, + 0.40683809791301695, + 0.9911241870823463, + 0.6008861931407631, + 0.1870684081477867, + 0.001390312382257064, + 0.737060682804882, + 0.46388334782877216, + 0.37346697818658003, + 0.25692629387974253, + 0.9870663016043032, + 0.7975887097949725, + 0.40692604522463827, + 0.6369051078738235, + 0.8350551237810963, + 0.1538035934869676, + 0.10671648828459357, + 0.6855441241850057, + 0.3417072689174143, + 0.9835005351975031, + 0.28225150911101116, + 0.4867468616871172, + 0.04855763799354207, + 0.1098695842088494, + 0.13168747524145386, + 0.0063239982168763476, + 0.2630946287007021, + 0.5506417091731192, + 0.6902764075602817, + 0.9616603400642277, + 0.7592693855776683, + 0.039397303780642434, + 0.688541995884717, + 0.33627384815926264, + 0.22416462198856957, + 0.18211187854500788, + 0.6974968168258074, + 0.39362298364780024, + 0.6025135199348383, + 0.7219978807943509, + 0.5206552125072349, + 0.41399365541263755, + 0.8566536950947363, + 0.5582351457222449, + 0.6415261809393741, + 0.77549325800219, + 0.24982822569089858, + 0.44113929947515973, + 0.7107378206312127, + 0.7174843858993726, + 0.1835425354927137, + 0.7691578614285655, + 0.9627117835302494, + 0.17762270668038382, + 0.9322016562688125, + 0.14306445919007293, + 0.5056598810020343, + 0.6160528437592933, + 0.8186427731917715, + 0.9971410624826377, + 0.6071798192068825, + 0.16675314377923556, + 0.7721123612576728, + 0.3881273097877219, + 0.7313291731374914, + 0.4265957012941558, + 0.5678358270399689, + 0.9764530981527227, + 0.133492746039518, + 0.863293008208188, + 0.4732854692138253, + 0.4499116083864544, + 0.1270869099841716, + 0.9872683922874925, + 0.5448276967988157, + 0.8085286239910665, + 0.06807167107365653, + 0.04531123904698342, + 0.41130846764003626, + 0.45217981256195905, + 0.49500737451758503, + 0.09481108205452049, + 0.703301230417089, + 0.9275298163403998, + 0.30182501278118523, + 0.7608826601501297, + 0.6740131359666773, + 0.8793410938504569, + 0.2123848510847658, + 0.6701705548801383, + 0.42758276519556737, + 0.6511816867092731, + 0.5589656957444312, + 0.18241400468703362, + 0.11800518188559928, + 0.23245112466812745, + 0.33442635506669294, + 0.2206907485865991, + 0.36676192127614693, + 0.04439801298756285, + 0.6600144215196212, + 0.5318460079756204, + 0.6242678706062978, + 0.8501099959402697, + 0.3166843415770534, + 0.9866943323945742, + 0.1271784248683363, + 0.2337609565804203, + 0.9941213515971187, + 0.4219072337419115, + 0.40040064093676375, + 0.4988434763150812, + 0.6130125527795475, + 0.43836508334434177, + 0.42173628909953265, + 0.551017371163644, + 0.1968119456969003, + 0.5438157260240885, + 0.3196241929013254, + 0.7032342631302387, + 0.7509854732305814, + 0.8708226403361269, + 0.19588962063567117, + 0.5327356266169655, + 0.9022612012427644, + 0.6348708519190531, + 0.8096424198527009, + 0.41653427054689374, + 0.7691900499918106, + 0.03858693381829026, + 0.4300682786265866, + 0.7205889229919754, + 0.16380617828733002, + 0.769813510621498, + 0.029126233415790015, + 0.8007234290388204, + 0.7172873280713993, + 0.8865635181190356, + 0.3141150285693117, + 0.12120186066609784, + 0.9078437887038926, + 0.21719230415637303, + 0.1018441581467846, + 0.1034164351076462, + 0.7456351056689471, + 0.7149571552182608, + 0.14219662693688206, + 0.4842804471308868, + 0.8078383801034981, + 0.6583387212866237, + 0.5327422453923659, + 0.8248017025613454, + 0.27813867932753855, + 0.5645966730958535, + 0.6058861014253342, + 0.35297993714423737, + 0.63331261629013, + 0.8200569850384922, + 0.716327213866516, + 0.6497134763355603, + 0.3096907632260236, + 0.9373405756268672, + 0.8150029538555531, + 0.8805866651024098, + 0.6809539681627669, + 0.529136834340884, + 0.91457749551686, + 0.05284929495805646, + 0.5246414587679292, + 0.4141033527397324, + 0.06715003321072976, + 0.31646575772900487, + 0.06436568796081588, + 0.9022988974146449, + 0.2794888632765272, + 0.4377172334266779, + 0.26114230800062044, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7937225342067015, + 0.8471619893457908, + 0.382560187301876, + 0.049569157865243074, + 0.8083240090993435, + 0.43064622796152574, + 0.4400382572906255, + 0.3311939137649188, + 0.0625158359350092, + 0.4260791952020392, + 0.9688391237644197, + 0.5935914756673489, + 0.700652008671759, + 0.6009468709183672, + 0.1401191406929443, + 0.3893546805316366, + 0.6392629267080432, + 0.5942232265682109, + 0.3190534230287244, + 0.4905970487167317, + 0.19598627194813623, + 0.8547771895292302, + 0.11000661087563446, + 0.6246452113273748, + 0.39160036459673786, + 0.9635687246998184, + 0.9037991893107805, + 0.28944940732885804, + 0.6416750259188707, + 0.7410021886161868, + 0.4351739951164847, + 0.4604759643994246, + 0.4735878147669671, + 0.17081584343839806, + 0.55642212629952, + 0.9359363889293013, + 0.24011337538080146, + 0.5602994188130606, + 0.937966627900615, + 0.6815237923749249, + 0.43130097279022905, + 0.040576248270011606, + 0.5248760897670668, + 0.07143611768224323, + 0.739003452854678, + 0.696269191178893, + 0.4029047150659123, + 0.6527838205694618, + 0.8991559081336787, + 0.19626250900544784, + 0.4552609450593593, + 0.7657379455376203, + 0.375097519078059, + 0.6517484005602143, + 0.3482023897442358, + 0.3227991650920835, + 0.7595996642185282, + 0.4506662406445222, + 0.5805460564741822, + 0.970640600339885, + 0.9136311948354005, + 0.06868418937047505, + 0.6699294765915349, + 0.9292554265448705, + 0.7668004493627582, + 0.934304380974092, + 0.9273512496420965, + 0.2721534676723715, + 0.1318375396328999, + 0.31477582187626985, + 0.5608305618704201, + 0.5020648939439555, + 0.7051596280927722, + 0.5536870372663544, + 0.3509837109554569, + 0.04482234945829422, + 0.5502105249747118, + 0.33605904856179736, + 0.6573926282763929, + 0.3612392305962885, + 0.616605158291584, + 0.06340444724152905, + 0.34886696285669094, + 0.4764309946485116, + 0.4838760745556058, + 0.7565179897153195, + 0.567040908728685, + 0.23017428519331895, + 0.8078082635701143, + 0.27079559433796196, + 0.7301172602117544, + 0.4721967273155353, + 0.7580554296017025, + 0.15353543339760822, + 0.9164508003679109, + 0.06549665619801204, + 0.4189440850618942, + 0.9067731924701304, + 0.900530266743957, + 0.4537271021133793, + 0.9771787467311074, + 0.22565407587908615, + 0.6087511473257247, + 0.7641796940749915, + 0.9165594168049757, + 0.4529731827372153, + 0.14946012617105275, + 0.3023666280729985, + 0.07003818242446003, + 0.21699652767305433, + 0.4601016058547346, + 0.05544507896604156, + 0.21881213058804527, + 0.3635574756455685, + 0.9213723551697597, + 0.5229363131827134, + 0.5303858912662304, + 0.30404241435399293, + 0.2823783601265032, + 0.07829789350704186, + 0.45609692566641136, + 0.30394717751467293, + 0.5486040186666324, + 0.7292874312983759, + 0.39612552881451235, + 0.8905554735112293, + 0.9997214438162066, + 0.24623724982926498, + 0.15404736318394507, + 0.9585949210149967, + 0.5611380768799336, + 0.8565596193614202, + 0.30139303668464923, + 0.8803718916197911, + 0.7186502688876168, + 0.040973773653967926, + 0.3186458398987321, + 0.9417743345487153, + 0.5338975712422733, + 0.5351131006083096, + 0.48703692646781394, + 0.3998355695810606, + 0.9511835082140031, + 0.015068329553183823, + 0.9514719168931699, + 0.8896806595274409, + 0.1693984075782411, + 0.649436416272076, + 0.12993891116603073, + 0.4137964030742799, + 0.4198632568272431, + 0.94546991269337, + 0.9097533917281811, + 0.1595973054129839, + 0.44042026945061663, + 0.8820488003536908, + 0.5012908826773911, + 0.7362667515842057, + 0.23887605701823889, + 0.050919626909062043, + 0.8572388895279167, + 0.24230683806375908, + 0.9789876065987205, + 0.02577272568800959, + 0.04847260524386765, + 0.8790220739851453, + 0.8259888928683129, + 0.7876333393448203, + 0.534415606150436, + 0.524944603773529, + 0.6484483129128772, + 0.1959992842019812, + 0.2913253081495982, + 0.9812815339236836, + 0.9141244622742661, + 0.359039158748261, + 0.7695651684353306, + 0.42486391206575047, + 0.5990540756999841, + 0.9426749539934983, + 0.9010709535970589, + 0.4006967107899765, + 0.05965337112024971, + 0.2636616405004455, + 0.1630656009058158, + 0.6600573849870056, + 0.5982932509679235, + 0.4722945712586679, + 0.5683895822123131, + 0.42086188276413306, + 0.24465444867106156, + 0.9608194950832173, + 0.5200964904844124, + 0.5693783254589333, + 0.2948703269163482, + 0.6580250607035528, + 0.2066636969129345, + 0.7118073681091255, + 0.39804210128074613, + 0.9635766491233103, + 0.6092085952172871, + 0.7535188400072915, + 0.5024079324325041, + 0.16868421008162604, + 0.8238181390218491, + 0.4261354705753385, + 0.5389422916165942, + 0.1935051147588388, + 0.4641435536186225, + 0.5856389236236841, + 0.4054553126348215, + 0.6054538946034959, + 0.9695393749554523, + 0.9457957224013442, + 0.5763590906954813, + 0.14317888051290673, + 0.6613881656920997, + 0.13345728238389631, + 0.15934952832630112, + 0.939100968047668, + 0.8968600751556971, + 0.42908909714058385, + 0.1249239270120327, + 0.45554771240835235, + 0.03166344639403551, + 0.7391544336186706, + 0.8539578133878286, + 0.956548439779998, + 0.9130349746153508, + 0.7685272816303742, + 0.1421473448098215, + 0.7662780919434238, + 0.37561050141094254, + 0.4194083778307489, + 0.044677234299907, + 0.8066650727668283, + 0.6586789174328685, + 0.5966904035729225, + 0.6167832291917559, + 0.4551359427269517, + 0.3498043078989227, + 0.23984707278091588, + 0.7129570728658369, + 0.014037720747974713, + 0.3330714042912156, + 0.08774103421013557, + 0.3270588678120382, + 0.7787717251199148, + 0.20655533285441197, + 0.27327236506564223, + 0.5180437643536496, + 0.8428255880041005, + 0.3823116443736294, + 0.722291236125945, + 0.6444181146074366, + 0.9885862378989301, + 0.9857887951736034, + 0.6857869301950458, + 0.539156732806676, + 0.8953652205195822, + 0.6509836675522569, + 0.44046857608248824, + 0.8854425633754791, + 0.47614344327016855, + 0.42052902928145486, + 0.21739488894691872, + 0.06636511541523027, + 0.4162563664751737, + 0.801092747448573, + 0.2306325468658711, + 0.742728341300588, + 0.5085867099448891, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5640187603293254, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8159182286466172, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9689945218115408, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.023787420592958464, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.562112480129902, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06362909341697354, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17544861898792408, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30293762582734984, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9450217161448506, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24917343952483273, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7624941576787222, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.33204566137170677, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.326342732626145, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8509648176357671, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20725907729195658, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7784497985223777, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9653764160329492, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9934545025486349, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9573970946648828, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.568010974298202, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8499024752990427, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08265269165731448, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7337208488716614, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.09393092375466039, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19708111012900464, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6379106204223915, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10374622622411978, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.39949025762970214, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5461369552483832, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6355007220053059, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5808980500273655, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6080856168865105, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9917314649792296, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7529081189940015, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.017252629974433442, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10324581445230152, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14560930585353093, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.91044107726182, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3082826533242927, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7781267748623237, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2614646975706304, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7182573197994735, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9344083597750972, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7302738535933991, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5426671684224912, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8728332634973097, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30988741330774505, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5421274741939771, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.25135035258940974, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9806913272944415, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2007751004710696, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7379813875076469, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4595423552899529, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3425280440560493, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7184009460589201, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7937379190850589, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40486160917649405, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.632494477827828, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.00043012606449288615, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.47308337689057733, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5873125066010546, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5642130408118529, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.05585449108260587, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0009067465907667271, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4213200693458017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.32810117888017454, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03201803778046464, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17672885224825907, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + float data[2 * 2 * 17 * 17] = { + 0.5262470840946074, 0.2599275570805496, 0.9853040510244775, 0.7662270734030949, 0.530841434614198, + 0.44626734531908463, 0.3119271649617613, 0.6626855421687033, 0.8752102914244295, 0.33084687222105125, + 0.6531461176818982, 0.5459428512726848, 0.41268456318648417, 0.48174502997628343, 0.7298232209177002, + 0.997023168187204, 0.12333578898998188, 0.18562000104563015, 0.5475276584465685, 0.09778460335243722, + 0.9369607968872835, 0.3507058699739156, 0.36736233181384337, 0.5470288990041243, 0.14280222685394472, + 0.6611616854266359, 0.9772473817588189, 0.6016776985132304, 0.430703741441283, 0.9057607332635397, + 0.55272674865965, 0.18511892014922127, 0.53451581075253, 0.7203386689061504, 0.5615369673938634, + 0.8049760798836795, 0.07684018109314716, 0.836905044826716, 0.5708014024932662, 0.31658943655651617, + 0.25284498064012806, 0.6162999456081352, 0.3803093435446827, 0.4481765323045367, 0.8489721823323437, + 0.6682760511716276, 0.9989705556041563, 0.2895276989780864, 0.8866868870526441, 0.9983971734501372, + 0.48210589435647444, 0.8587599751019871, 0.6775131703780617, 0.6335452075218833, 0.06934671759319733, + 0.3052844915173061, 0.16206123425822094, 0.6309942568896834, 0.1468663503533323, 0.6385030188827648, + 0.43834356117895534, 0.8033939962925624, 0.5249175028286187, 0.4749202032027596, 0.591794387856851, + 0.3849372094271225, 0.06338609804492179, 0.0589251101400885, 0.5509884807270933, 0.7617937037969288, + 0.39168888702909943, 0.9881681709664655, 0.08674476352301563, 0.6013214250236357, 0.38862251626596367, + 0.8529965121334725, 0.1353615951681887, 0.0521211814839041, 0.6100591466551525, 0.21691499814951154, + 0.7092037993551189, 0.8461377506230634, 0.30662066021542467, 0.05077506229920048, 0.283961844422973, + 0.4014820342237435, 0.47784809966988495, 0.7592483976125082, 0.806353074077933, 0.9899624018687854, + 0.8119977866990217, 0.16790321800722863, 0.1978111472284153, 0.30251437601177134, 0.9959859264509981, + 0.15561202809244157, 0.15230729594665604, 0.529660792438232, 0.019816952365595886, 0.46579144492276736, + 0.9793760450641456, 0.6480555870064912, 0.7254703259240384, 0.1989437316686401, 0.42637965326347016, + 0.2533875804046045, 0.2841260136074859, 0.19554158383602693, 0.2651072827146369, 0.5348799410677707, + 0.1483796925608314, 0.556498522203592, 0.8570216966490248, 0.809611471896218, 0.7937955587055074, + 0.5416760712708776, 0.7048016949513274, 0.1596794108571158, 0.9904271116015025, 0.9221892139151201, + 0.31742454191999847, 0.9153832428923505, 0.981068057585326, 0.051224583281113345, 0.1269243503058195, + 0.8471427057082983, 0.011886125274325088, 0.35159318416873464, 0.5401203817789871, 0.04933408890510327, + 0.7673952593308585, 0.20212932501794834, 0.6982028114825873, 0.9670977496947673, 0.6880104017436701, + 0.16730737556899755, 0.9051123582670892, 0.42181386769365103, 0.36904313835896374, 0.8900874340490004, + 0.027358738238934976, 0.28551778681471607, 0.9089180108604413, 0.10375139630325214, 0.8915997500927774, + 0.6884504501624745, 0.9428985656137959, 0.44091834777473504, 0.28423816905534316, 0.20128047489619438, + 0.5804499113140081, 0.9414981905155704, 0.3116581806526617, 0.6318024147565033, 0.6639389629436601, + 0.07085538901262733, 0.889802896968625, 0.03973713677188995, 0.8624172944156852, 0.7391668948415816, + 0.3154865236914467, 0.303678071770221, 0.46149787037437584, 0.8888712125662346, 0.18391016758687107, + 0.088265655192032, 0.6449865423234372, 0.28784685447468095, 0.267538173836512, 0.362512313135534, + 0.27926503492559474, 0.8686676471676089, 0.706538437689299, 0.9032637461433765, 0.13261013546882394, + 0.630982390896547, 0.9815538780528091, 0.11940648503190532, 0.3999637600541244, 0.4987208340668672, + 0.4460696278783418, 0.3090706970892756, 0.6588572308735293, 0.5849788271829304, 0.054424369814474405, + 0.5863833444906936, 0.8288575816171124, 0.8492756730986297, 0.22309451034455818, 0.6788589266221415, + 0.7476143557236724, 0.8036940145332488, 0.5150584517188221, 0.8074529608513048, 0.9219521205464487, + 0.3123999460017278, 0.3260055497429517, 0.042811137094394214, 0.4816380510206846, 0.9770898113470604, + 0.5856503944043783, 0.9912230663260104, 0.732536193840161, 0.13388772277018135, 0.5155877026309951, + 0.5211799202598583, 0.025410373401994035, 0.4211218161868183, 0.9556631725208989, 0.898451762546253, + 0.5922343123206459, 0.42714348866071816, 0.5944175367864415, 0.8894971894751379, 0.47756000468041915, + 0.7610279070927591, 0.9511870017238692, 0.664750968203006, 0.1596593206175151, 0.5167681922631239, + 0.6894476027681907, 0.8045325552445173, 0.1976179240322029, 0.9663406916171869, 0.7313928694621235, + 0.6389017364085043, 0.15993269832272117, 0.2881933320989839, 0.47918295431768, 0.477825893004286, + 0.2999085208541824, 0.4447815966411831, 0.8983282086951327, 0.7896856360130685, 0.6859801602679525, + 0.300616943079272, 0.27073135067757326, 0.45285366443683217, 0.174201977964776, 0.4939811012007336, + 0.7549932146581845, 0.9977056137259912, 0.7826300377042417, 0.264293959730718, 0.6437064951687319, + 0.8028029041388223, 0.1143959237748291, 0.4714671205220138, 0.7415256538727033, 0.32481136297228796, + 0.9579790122176625, 0.6279271887461749, 0.34437010406551705, 0.7977612357066789, 0.31371373738067987, + 0.5701019652356103, 0.41770317978608806, 0.39059111438681227, 0.6235316600325075, 0.1920886526285347, + 0.8837861921740301, 0.00244999543928337, 0.9032023458460028, 0.5759571178613899, 0.554781248162564, + 0.5175571912577063, 0.05408771148868641, 0.027702827316338885, 0.9025322959302119, 0.5327961410085527, + 0.9720933373941855, 0.17723311527819086, 0.5640187603293254, 0.8159182286466172, 0.9689945218115408, + 0.023787420592958464, 0.562112480129902, 0.06362909341697354, 0.17544861898792408, 0.30293762582734984, + 0.9450217161448506, 0.24917343952483273, 0.7624941576787222, 0.33204566137170677, 0.326342732626145, + 0.8509648176357671, 0.20725907729195658, 0.7784497985223777, 0.9653764160329492, 0.7918682117509727, + 0.25491368945321924, 0.6786050109290417, 0.8477125241211667, 0.8849321592347881, 0.9267716104061233, + 0.7183148924993774, 0.6788020686244602, 0.5311528841668889, 0.1093467426944017, 0.611804778445946, + 0.46525512153228654, 0.7043630522166378, 0.44016047563477556, 0.09121426936776944, 0.8705469085182395, + 0.17215698913489486, 0.9611897727382896, 0.08098628401032482, 0.21451639840748682, 0.5768977156844659, + 0.26513297341603714, 0.7324561265662716, 0.6113987795131144, 0.97410981182431, 0.28602537656896476, + 0.082588601751441, 0.4667126388636281, 0.8382315395128299, 0.04933702607569013, 0.0005326172316258981, + 0.5865353966441388, 0.8620736373525405, 0.6306945280084336, 0.04764154842922541, 0.34169536734404193, + 0.17867782316557934, 0.02509653619032559, 0.7563252602059222, 0.08315155168910016, 0.8599360432496744, + 0.006232202864729031, 0.7970828784609515, 0.524531268699229, 0.6613146415694391, 0.7461233724121545, + 0.4650602243220291, 0.1784929253615195, 0.7668412602690984, 0.9271678636205023, 0.7385021172863362, + 0.6359791414767878, 0.9718774298853685, 0.5301916140657921, 0.600470723278435, 0.3047056693815837, + 0.01701205986875387, 0.3545446489011168, 0.8903502367183758, 0.6761119017648953, 0.668933677287048, + 0.5268301179582384, 0.9773425643718713, 0.7807337230105253, 0.3058007319157179, 0.2713475977837423, + 0.5544805715424246, 0.6015664973527335, 0.9590555164457821, 0.9154182706335288, 0.04887411317718848, + 0.13304323107759775, 0.5393661935414817, 0.3407684898687162, 0.24796297355379537, 0.6616846208040407, + 0.9920202949016083, 0.15633491687065648, 0.026261169320518363, 0.16633653783250157, 0.4141931338432051, + 0.46844833009651166, 0.8679827200390235, 0.38243534174482696, 0.9244808767432451, 0.11632079446525156, + 0.9062064954352042, 0.6874767491375144, 0.2258362439419268, 0.6794552085831682, 0.4002532098706061, + 0.6129527919109219, 0.9181514938492773, 0.8379077569584945, 0.3670325266283059, 0.5339699697866235, + 0.052806185584314935, 0.7987039854123447, 0.5895726611280803, 0.11522134299953346, 0.13421234322757603, + 0.1657941846464276, 0.05430567932876784, 0.5711512432074957, 0.34440312445911503, 0.12109588787627701, + 0.5036956130457365, 0.4143238858611441, 0.7545156467368622, 0.8903238156374075, 0.9237649345790961, + 0.5595352995199334, 0.5018247103213517, 0.08791699089229132, 0.4626691086984821, 0.06678373419547612, + 0.8976253770659826, 0.052478391790792944, 0.8162929960857885, 0.20665132910683937, 0.2564513917885336, + 0.2203555055348826, 0.5778491195905766, 0.44199698196247816, 0.9286489819890967, 0.16106749553848354, + 0.7289567024055328, 0.7800275096211687, 0.4602184718863326, 0.8320790096886941, 0.8836683716945654, + 0.9131505890812511, 0.7040449162909432, 0.8647007528629761, 0.8146791198796317, 0.5023926042439503, + 0.8674524226969336, 0.5413848234939881, 0.9564545919115185, 0.4831985839345372, 0.3175796945764736, + 0.47310127132389546, 0.03129977774073123, 0.09465791619238773, 0.6487646791937893, 0.004173349234257051, + 0.8860987348048975, 0.9289313672930872, 0.9708329061611259, 0.25717075199620165, 0.34192084390485955, + 0.4168823676588157, 0.4780097638691799, 0.16866101140444134, 0.9713537390911877, 0.8143108696586439, + 0.26621359640781606, 0.08215570422238005, 0.13220260240206427, 0.03762079121167172, 0.8809942300084808, + 0.3871595989095862, 0.3522705354218415, 0.14563040682141926, 0.973217088329034, 0.06342421633800865, + 0.6725005753375712, 0.8894506890532137, 0.22129737642388003, 0.49357870687275085, 0.20246253236279066, + 0.500434682755821, 0.043501889994028375, 0.5248138586045901, 0.41823960616201317, 0.20923524191957044, + 0.5746001932664768, 0.3157636473721608, 0.7504657703944658, 0.00701735725790209, 0.8817930001278131, + 0.771556223654079, 0.5791031128679329, 0.9557077641602835, 0.63919490915165, 0.8107995120994241, + 0.8357730181411765, 0.763565017073811, 0.514956672854547, 0.6412821290242677, 0.15133563665166538, + 0.754620854174738, 0.9616288151903299, 0.48416383003273333, 0.17626499659844597, 0.19691548800582348, + 0.7105429002989472, 0.8278691246165878, 0.10395315243238379, 0.5582173499302838, 0.9239316412606712, + 0.6054931809892881, 0.5901150707686857, 0.8515700237526853, 0.345089429452039, 0.42387498806569357, + 0.553940474472896, 0.7122946781750881, 0.49778930944748756, 0.08787827317100672, 0.4116271712176073, + 0.03298376792526192, 0.9077995672389654, 0.9071076418105887, 0.9987878921135839, 0.1544937655681795, + 0.25291732105237286, 0.6645990918035876, 0.47255437589764915, 0.4777615517705799, 0.004448282702601558, + 0.3460664692865518, 0.26206043781283317, 0.8814523193341739, 0.9196954749538162, 0.013175970443288665, + 0.9651538291433787, 0.9664622879357454, 0.2547219272659277, 0.07846151780675747, 0.03449929613125302, + 0.11132378249497388, 0.7459123276004201, 0.3116061877585733, 0.12784746937617553, 0.12253067262655448, + 0.38903273975067665, 0.4566782228864755, 0.48093210416987786, 0.6836396369695441, 0.9386230279466492, + 0.02499106790133776, 0.020922504745778947, 0.6623553583485186, 0.11335985397026105, 0.156105149395114, + 0.45865493017794867, 0.7646711144904659, 0.9497821106888364, 0.253649950983154, 0.8614576279397507, + 0.26399573580277913, 0.26769500242894306, 0.10660146655633229, 0.8620968379208583, 0.3644431089494301, + 0.021097153792147383, 0.6413668921501134, 0.3107186380818101, 0.003980535400756824, 0.469297808440877, + 0.18781138643845274, 0.9962229356813803, 0.9424117884794422, 0.8790055264273281, 0.3584038351896536, + 0.5865220856902467, 0.0767019478308203, 0.4377855787782695, 0.6007392726471552, 0.5539509687747731, + 0.14823858663651712, 0.9934545025486349, 0.9573970946648828, 0.568010974298202, 0.8499024752990427, + 0.08265269165731448, 0.7337208488716614, 0.09393092375466039, 0.19708111012900464, 0.6379106204223915, + 0.10374622622411978, 0.39949025762970214, 0.5461369552483832, 0.6355007220053059, 0.5808980500273655, + 0.6080856168865105, 0.9917314649792296, 0.7529081189940015, 0.4796467505964924, 0.6903679291544869, + 0.9400618464944154, 0.6174497372078122, 0.7591513267491136, 0.29182113691713263, 0.40683809791301695, + 0.6855441241850057, 0.33627384815926264, 0.7174843858993726, 0.4265957012941558, 0.09481108205452049, + 0.2206907485865991, 0.43836508334434177, 0.03858693381829026, 0.7149571552182608, 0.9373405756268672, + 0.09103544362646132, 0.6775107296333263, 0.10117982191354724, 0.07637851015892627, 0.9584463569118296, + 0.8881704417177094, 0.9911241870823463, 0.3417072689174143, 0.22416462198856957, 0.1835425354927137, + 0.5678358270399689, 0.703301230417089, 0.36676192127614693, 0.42173628909953265, 0.4300682786265866, + 0.14219662693688206, 0.8150029538555531, 0.9254689001478529, 0.14779175118597787, 0.19696083132414388, + 0.43579476640614967, 0.6134307586410246, 0.6371115962253767, 0.6008861931407631, 0.9835005351975031, + 0.18211187854500788, 0.7691578614285655, 0.9764530981527227, 0.9275298163403998, 0.04439801298756285, + 0.551017371163644, 0.7205889229919754, 0.4842804471308868, 0.8805866651024098, 0.8000810110714159, + 0.9736194862266951, 0.5094291973347561, 0.8503495803323085, 0.6296221119340748, 0.779165675792864, + 0.1870684081477867, 0.28225150911101116, 0.6974968168258074, 0.9627117835302494, 0.133492746039518, + 0.30182501278118523, 0.6600144215196212, 0.1968119456969003, 0.16380617828733002, 0.8078383801034981, + 0.6809539681627669, 0.05050480490924614, 0.07188406888915577, 0.3636373390399269, 0.6482183809971421, + 0.735266347418902, 0.2609423421314738, 0.001390312382257064, 0.4867468616871172, 0.39362298364780024, + 0.17762270668038382, 0.863293008208188, 0.7608826601501297, 0.5318460079756204, 0.5438157260240885, + 0.769813510621498, 0.6583387212866237, 0.529136834340884, 0.5267360009479215, 0.4111679064023177, + 0.13345061046806383, 0.7433371730802318, 0.3119013329695528, 0.029581058448018727, 0.737060682804882, + 0.04855763799354207, 0.6025135199348383, 0.9322016562688125, 0.4732854692138253, 0.6740131359666773, + 0.6242678706062978, 0.3196241929013254, 0.029126233415790015, 0.5327422453923659, 0.91457749551686, + 0.3670167360070553, 0.12650025225065376, 0.013845886991579959, 0.9443490362158871, 0.14874596710326138, + 0.6073209693848843, 0.46388334782877216, 0.1098695842088494, 0.7219978807943509, 0.14306445919007293, + 0.4499116083864544, 0.8793410938504569, 0.8501099959402697, 0.7032342631302387, 0.8007234290388204, + 0.8248017025613454, 0.05284929495805646, 0.6579293314393269, 0.52354628216733, 0.4074209331005676, + 0.6801247783621012, 0.9282208421152665, 0.36214013008585844, 0.37346697818658003, 0.13168747524145386, + 0.5206552125072349, 0.5056598810020343, 0.1270869099841716, 0.2123848510847658, 0.3166843415770534, + 0.7509854732305814, 0.7172873280713993, 0.27813867932753855, 0.5246414587679292, 0.35374182053158953, + 0.8189300707822506, 0.7714215773473824, 0.4860916942789072, 0.7091568048853436, 0.1033360965975485, + 0.25692629387974253, 0.0063239982168763476, 0.41399365541263755, 0.6160528437592933, 0.9872683922874925, + 0.6701705548801383, 0.9866943323945742, 0.8708226403361269, 0.8865635181190356, 0.5645966730958535, + 0.4141033527397324, 0.9798945307705291, 0.5698010276007985, 0.5709760852241303, 0.7308547905086359, + 0.34579047562430887, 0.4807162361096927, 0.9870663016043032, 0.2630946287007021, 0.8566536950947363, + 0.8186427731917715, 0.5448276967988157, 0.42758276519556737, 0.1271784248683363, 0.19588962063567117, + 0.3141150285693117, 0.6058861014253342, 0.06715003321072976, 0.7849211483431402, 0.5532788316792159, + 0.5868065461349722, 0.9696472233804034, 0.7847637185773797, 0.0006487337432082851, 0.7975887097949725, + 0.5506417091731192, 0.5582351457222449, 0.9971410624826377, 0.8085286239910665, 0.6511816867092731, + 0.2337609565804203, 0.5327356266169655, 0.12120186066609784, 0.35297993714423737, 0.31646575772900487, + 0.02428987467416799, 0.253357557252365, 0.8685667940719852, 0.32594628878515897, 0.19931150694854305, + 0.9017522677471346, 0.40692604522463827, 0.6902764075602817, 0.6415261809393741, 0.6071798192068825, + 0.06807167107365653, 0.5589656957444312, 0.9941213515971187, 0.9022612012427644, 0.9078437887038926, + 0.63331261629013, 0.06436568796081588, 0.3449537236772878, 0.5728748980725252, 0.7209041850732071, + 0.9921630819475203, 0.07041934621086088, 0.5487237401878415, 0.6369051078738235, 0.9616603400642277, + 0.77549325800219, 0.16675314377923556, 0.04531123904698342, 0.18241400468703362, 0.4219072337419115, + 0.6348708519190531, 0.21719230415637303, 0.8200569850384922, 0.9022988974146449, 0.8912149326674362, + 0.6942557299464097, 0.2367984531926669, 0.8994861603528518, 0.3568737629913865, 0.5793736927661238, + 0.8350551237810963, 0.7592693855776683, 0.24982822569089858, 0.7721123612576728, 0.41130846764003626, + 0.11800518188559928, 0.40040064093676375, 0.8096424198527009, 0.1018441581467846, 0.716327213866516, + 0.2794888632765272, 0.6573943617231828, 0.7044186941401308, 0.8154031716623844, 0.8603656823318697, + 0.06838422316937898, 0.9008211806019828, 0.1538035934869676, 0.039397303780642434, 0.44113929947515973, + 0.3881273097877219, 0.45217981256195905, 0.23245112466812745, 0.4988434763150812, 0.41653427054689374, + 0.1034164351076462, 0.6497134763355603, 0.4377172334266779, 0.527839378558378, 0.4575541987011301, + 0.9518430806804513, 0.5770391799057243, 0.06116336748279105, 0.7725544638973622, 0.10671648828459357, + 0.688541995884717, 0.7107378206312127, 0.7313291731374914, 0.49500737451758503, 0.33442635506669294, + 0.6130125527795475, 0.7691900499918106, 0.7456351056689471, 0.3096907632260236, 0.26114230800062044, + 0.017252629974433442, 0.10324581445230152, 0.14560930585353093, 0.91044107726182, 0.3082826533242927, + 0.7781267748623237, 0.2614646975706304, 0.7182573197994735, 0.9344083597750972, 0.7302738535933991, + 0.5426671684224912, 0.8728332634973097, 0.30988741330774505, 0.5421274741939771, 0.25135035258940974, + 0.9806913272944415, 0.2007751004710696, 0.7937225342067015, 0.6392629267080432, 0.4735878147669671, + 0.8991559081336787, 0.7668004493627582, 0.616605158291584, 0.4189440850618942, 0.21881213058804527, + 0.15404736318394507, 0.9514719168931699, 0.8572388895279167, 0.7695651684353306, 0.5200964904844124, + 0.4641435536186225, 0.03166344639403551, 0.3498043078989227, 0.9857887951736034, 0.8471619893457908, + 0.5942232265682109, 0.17081584343839806, 0.19626250900544784, 0.934304380974092, 0.06340444724152905, + 0.9067731924701304, 0.3635574756455685, 0.9585949210149967, 0.8896806595274409, 0.24230683806375908, + 0.42486391206575047, 0.5693783254589333, 0.5856389236236841, 0.7391544336186706, 0.23984707278091588, + 0.6857869301950458, 0.382560187301876, 0.3190534230287244, 0.55642212629952, 0.4552609450593593, + 0.9273512496420965, 0.34886696285669094, 0.900530266743957, 0.9213723551697597, 0.5611380768799336, + 0.1693984075782411, 0.9789876065987205, 0.5990540756999841, 0.2948703269163482, 0.4054553126348215, + 0.8539578133878286, 0.7129570728658369, 0.539156732806676, 0.049569157865243074, 0.4905970487167317, + 0.9359363889293013, 0.7657379455376203, 0.2721534676723715, 0.4764309946485116, 0.4537271021133793, + 0.5229363131827134, 0.8565596193614202, 0.649436416272076, 0.02577272568800959, 0.9426749539934983, + 0.6580250607035528, 0.6054538946034959, 0.956548439779998, 0.014037720747974713, 0.8953652205195822, + 0.8083240090993435, 0.19598627194813623, 0.24011337538080146, 0.375097519078059, 0.1318375396328999, + 0.4838760745556058, 0.9771787467311074, 0.5303858912662304, 0.30139303668464923, 0.12993891116603073, + 0.04847260524386765, 0.9010709535970589, 0.2066636969129345, 0.9695393749554523, 0.9130349746153508, + 0.3330714042912156, 0.6509836675522569, 0.43064622796152574, 0.8547771895292302, 0.5602994188130606, + 0.6517484005602143, 0.31477582187626985, 0.7565179897153195, 0.22565407587908615, 0.30404241435399293, + 0.8803718916197911, 0.4137964030742799, 0.8790220739851453, 0.4006967107899765, 0.7118073681091255, + 0.9457957224013442, 0.7685272816303742, 0.08774103421013557, 0.44046857608248824, 0.4400382572906255, + 0.11000661087563446, 0.937966627900615, 0.3482023897442358, 0.5608305618704201, 0.567040908728685, + 0.6087511473257247, 0.2823783601265032, 0.7186502688876168, 0.4198632568272431, 0.8259888928683129, + 0.05965337112024971, 0.39804210128074613, 0.5763590906954813, 0.1421473448098215, 0.3270588678120382, + 0.8854425633754791, 0.3311939137649188, 0.6246452113273748, 0.6815237923749249, 0.3227991650920835, + 0.5020648939439555, 0.23017428519331895, 0.7641796940749915, 0.07829789350704186, 0.040973773653967926, + 0.94546991269337, 0.7876333393448203, 0.2636616405004455, 0.9635766491233103, 0.14317888051290673, + 0.7662780919434238, 0.7787717251199148, 0.47614344327016855, 0.0625158359350092, 0.39160036459673786, + 0.43130097279022905, 0.7595996642185282, 0.7051596280927722, 0.8078082635701143, 0.9165594168049757, + 0.45609692566641136, 0.3186458398987321, 0.9097533917281811, 0.534415606150436, 0.1630656009058158, + 0.6092085952172871, 0.6613881656920997, 0.37561050141094254, 0.20655533285441197, 0.42052902928145486, + 0.4260791952020392, 0.9635687246998184, 0.040576248270011606, 0.4506662406445222, 0.5536870372663544, + 0.27079559433796196, 0.4529731827372153, 0.30394717751467293, 0.9417743345487153, 0.1595973054129839, + 0.524944603773529, 0.6600573849870056, 0.7535188400072915, 0.13345728238389631, 0.4194083778307489, + 0.27327236506564223, 0.21739488894691872, 0.9688391237644197, 0.9037991893107805, 0.5248760897670668, + 0.5805460564741822, 0.3509837109554569, 0.7301172602117544, 0.14946012617105275, 0.5486040186666324, + 0.5338975712422733, 0.44042026945061663, 0.6484483129128772, 0.5982932509679235, 0.5024079324325041, + 0.15934952832630112, 0.044677234299907, 0.5180437643536496, 0.06636511541523027, 0.5935914756673489, + 0.28944940732885804, 0.07143611768224323, 0.970640600339885, 0.04482234945829422, 0.4721967273155353, + 0.3023666280729985, 0.7292874312983759, 0.5351131006083096, 0.8820488003536908, 0.1959992842019812, + 0.4722945712586679, 0.16868421008162604, 0.939100968047668, 0.8066650727668283, 0.8428255880041005, + 0.4162563664751737, 0.700652008671759, 0.6416750259188707, 0.739003452854678, 0.9136311948354005, + 0.5502105249747118, 0.7580554296017025, 0.07003818242446003, 0.39612552881451235, 0.48703692646781394, + 0.5012908826773911, 0.2913253081495982, 0.5683895822123131, 0.8238181390218491, 0.8968600751556971, + 0.6586789174328685, 0.3823116443736294, 0.801092747448573, 0.6009468709183672, 0.7410021886161868, + 0.696269191178893, 0.06868418937047505, 0.33605904856179736, 0.15353543339760822, 0.21699652767305433, + 0.8905554735112293, 0.3998355695810606, 0.7362667515842057, 0.9812815339236836, 0.42086188276413306, + 0.4261354705753385, 0.42908909714058385, 0.5966904035729225, 0.722291236125945, 0.2306325468658711, + 0.1401191406929443, 0.4351739951164847, 0.4029047150659123, 0.6699294765915349, 0.6573926282763929, + 0.9164508003679109, 0.4601016058547346, 0.9997214438162066, 0.9511835082140031, 0.23887605701823889, + 0.9141244622742661, 0.24465444867106156, 0.5389422916165942, 0.1249239270120327, 0.6167832291917559, + 0.6444181146074366, 0.742728341300588, 0.3893546805316366, 0.4604759643994246, 0.6527838205694618, + 0.9292554265448705, 0.3612392305962885, 0.06549665619801204, 0.05544507896604156, 0.24623724982926498, + 0.015068329553183823, 0.050919626909062043, 0.359039158748261, 0.9608194950832173, 0.1935051147588388, + 0.45554771240835235, 0.4551359427269517, 0.9885862378989301, 0.5085867099448891, 0.7379813875076469, + 0.4595423552899529, 0.3425280440560493, 0.7184009460589201, 0.7937379190850589, 0.40486160917649405, + 0.632494477827828, 0.00043012606449288615, 0.47308337689057733, 0.5873125066010546, 0.5642130408118529, + 0.05585449108260587, 0.0009067465907667271, 0.4213200693458017, 0.32810117888017454, 0.03201803778046464, + 0.17672885224825907, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_HWCN, {8, 2, 16, 16}, {2, 2, 17, 17}, DT_FLOAT}; + TransResult result; + + FormatTransferFracZHwcn transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFracZHwcn, uint8_1c_2n_padcn) { + uint8_t ret[3 * 3 * 30 * 20] = { + 125, 165, 84, 250, 90, 201, 42, 35, 194, 76, 108, 1, 163, 194, 146, 247, 138, 181, 145, 159, 85, 36, 236, + 254, 208, 66, 249, 45, 64, 248, 69, 1, 16, 161, 132, 24, 2, 83, 105, 188, 69, 16, 222, 28, 216, 131, + 23, 103, 108, 233, 74, 151, 110, 194, 179, 67, 219, 178, 42, 220, 166, 204, 119, 208, 224, 81, 67, 189, 123, + 153, 63, 249, 140, 167, 75, 238, 162, 203, 244, 38, 119, 216, 11, 87, 236, 242, 23, 132, 11, 243, 69, 60, + 112, 190, 37, 37, 246, 160, 203, 76, 70, 242, 137, 18, 3, 147, 188, 146, 144, 119, 30, 48, 154, 150, 71, + 89, 7, 195, 59, 102, 157, 63, 45, 113, 219, 13, 231, 120, 243, 115, 26, 100, 12, 74, 10, 111, 17, 113, + 4, 242, 170, 244, 72, 149, 218, 238, 39, 95, 80, 123, 194, 191, 56, 85, 102, 120, 29, 134, 180, 35, 14, + 187, 134, 72, 109, 4, 237, 99, 146, 72, 46, 141, 138, 198, 190, 210, 222, 7, 105, 101, 11, 78, 212, 251, + 21, 97, 48, 90, 107, 103, 182, 247, 161, 151, 169, 97, 103, 106, 34, 220, 225, 56, 31, 16, 58, 83, 32, + 136, 251, 241, 194, 21, 38, 40, 118, 82, 72, 145, 222, 98, 215, 206, 64, 241, 51, 170, 122, 110, 178, 66, + 156, 61, 225, 193, 10, 80, 137, 51, 94, 49, 182, 35, 177, 158, 194, 194, 216, 0, 167, 114, 234, 59, 101, + 81, 176, 45, 20, 220, 26, 209, 243, 78, 53, 35, 158, 44, 14, 245, 215, 170, 1, 167, 120, 243, 169, 22, + 39, 74, 110, 20, 153, 88, 246, 190, 207, 65, 125, 97, 42, 109, 84, 46, 250, 175, 175, 235, 156, 230, 194, + 206, 117, 218, 64, 210, 249, 11, 110, 27, 92, 190, 34, 76, 48, 41, 32, 73, 19, 234, 30, 243, 91, 9, + 11, 6, 241, 12, 190, 77, 180, 181, 249, 55, 76, 39, 129, 50, 87, 70, 64, 241, 88, 46, 169, 60, 213, + 160, 42, 47, 174, 183, 58, 220, 234, 167, 107, 130, 91, 155, 67, 32, 206, 177, 229, 27, 30, 230, 110, 106, + 102, 251, 86, 74, 191, 44, 194, 28, 23, 95, 227, 89, 18, 106, 65, 172, 134, 136, 118, 4, 107, 84, 225, + 242, 28, 53, 108, 96, 21, 212, 240, 6, 118, 153, 90, 221, 122, 85, 18, 31, 4, 32, 74, 141, 242, 45, + 17, 104, 24, 199, 101, 114, 25, 204, 22, 148, 54, 248, 249, 221, 58, 2, 153, 184, 206, 244, 194, 5, 250, + 45, 98, 113, 5, 37, 222, 82, 199, 161, 106, 178, 251, 141, 214, 183, 4, 62, 36, 208, 34, 32, 86, 141, + 101, 71, 208, 213, 230, 212, 196, 211, 76, 27, 42, 20, 203, 153, 90, 51, 203, 224, 216, 209, 134, 136, 115, + 42, 62, 77, 91, 14, 139, 13, 75, 163, 1, 34, 196, 241, 225, 135, 189, 191, 166, 242, 63, 175, 180, 74, + 202, 79, 187, 2, 220, 136, 54, 24, 97, 204, 28, 89, 165, 167, 32, 96, 124, 244, 205, 108, 185, 6, 16, + 197, 140, 56, 188, 223, 179, 200, 31, 11, 92, 180, 103, 175, 143, 138, 200, 151, 67, 168, 140, 234, 253, 183, + 249, 7, 61, 33, 1, 134, 99, 239, 107, 127, 185, 200, 22, 205, 182, 42, 161, 181, 187, 173, 150, 29, 66, + 147, 116, 19, 132, 33, 0, 217, 88, 116, 30, 185, 223, 233, 186, 72, 195, 170, 95, 234, 139, 5, 215, 32, + 31, 121, 188, 121, 6, 77, 124, 154, 38, 159, 137, 218, 119, 239, 53, 87, 243, 130, 217, 194, 180, 101, 72, + 26, 233, 14, 153, 247, 23, 9, 168, 79, 206, 62, 96, 2, 84, 119, 241, 118, 91, 240, 162, 61, 227, 135, + 98, 223, 112, 86, 67, 244, 181, 187, 113, 125, 240, 45, 62, 186, 247, 19, 20, 241, 185, 165, 25, 84, 68, + 138, 109, 75, 218, 95, 37, 116, 172, 70, 190, 176, 56, 104, 169, 184, 68, 164, 128, 106, 207, 2, 253, 36, + 109, 198, 4, 112, 6, 198, 215, 130, 11, 233, 165, 79, 169, 214, 110, 67, 244, 196, 179, 158, 57, 110, 126, + 136, 58, 119, 106, 124, 8, 63, 42, 57, 12, 183, 69, 75, 168, 228, 8, 204, 112, 200, 57, 186, 124, 148, + 27, 109, 175, 234, 111, 41, 186, 23, 134, 174, 25, 150, 38, 42, 47, 74, 101, 27, 140, 18, 115, 251, 111, + 231, 162, 179, 169, 53, 120, 145, 1, 79, 51, 63, 174, 7, 208, 92, 69, 12, 56, 25, 182, 28, 108, 36, + 109, 190, 107, 1, 103, 130, 86, 41, 236, 6, 97, 41, 110, 78, 78, 17, 149, 66, 189, 175, 148, 50, 74, + 37, 73, 230, 180, 201, 77, 208, 206, 110, 158, 26, 115, 206, 97, 49, 187, 165, 139, 253, 87, 253, 116, 74, + 31, 159, 78, 179, 8, 65, 111, 16, 34, 7, 134, 109, 97, 55, 181, 244, 16, 137, 95, 112, 82, 129, 97, + 47, 215, 64, 246, 95, 98, 254, 160, 237, 71, 76, 49, 223, 47, 74, 45, 162, 197, 156, 34, 19, 222, 58, + 138, 208, 121, 218, 199, 45, 49, 190, 211, 167, 62, 54, 25, 101, 86, 212, 147, 28, 236, 253, 251, 67, 12, + 178, 152, 199, 9, 218, 9, 164, 195, 250, 241, 249, 143, 222, 250, 94, 12, 36, 232, 219, 128, 148, 186, 21, + 100, 50, 32, 205, 89, 252, 201, 99, 153, 71, 70, 227, 199, 189, 139, 20, 100, 88, 12, 70, 90, 212, 70, + 51, 37, 76, 186, 252, 19, 245, 18, 251, 176, 110, 239, 240, 29, 200, 232, 79, 192, 71, 172, 190, 97, 217, + 150, 29, 36, 31, 110, 183, 54, 197, 38, 163, 190, 188, 9, 234, 186, 180, 229, 19, 126, 196, 242, 27, 180, + 198, 206, 197, 1, 136, 210, 245, 185, 251, 135, 65, 150, 194, 190, 174, 41, 107, 151, 70, 225, 21, 227, 4, + 65, 220, 92, 210, 68, 145, 121, 13, 155, 197, 122, 4, 1, 133, 198, 134, 148, 132, 195, 133, 189, 129, 114, + 231, 23, 110, 91, 16, 254, 123, 223, 18, 157, 191, 183, 131, 25, 105, 15, 28, 195, 14, 217, 71, 75, 218, + 145, 35, 206, 193, 178, 240, 73, 219, 30, 135, 144, 177, 32, 30, 252, 82, 110, 188, 21, 196, 172, 180, 52, + 166, 191, 107, 210, 133, 67, 114, 5, 91, 140, 74, 101, 220, 95, 69, 253, 179, 67, 87, 148, 141, 12, 78, + 84, 6, 221, 203, 214, 0, 85, 30, 22, 248, 69, 222, 67, 84, 176, 14, 105, 188, 77, 75, 152, 188, 88, + 89, 180, 122, 164, 76, 149, 254, 45, 132, 151, 142, 85, 188, 44, 52, 88, 170, 209, 47, 220, 109, 225, 178, + 56, 143, 26, 157, 69, 216, 190, 138, 238, 67, 105, 18, 127, 201, 192, 254, 102, 17, 235, 160, 1, 109, 146, + 51, 209, 65, 233, 39, 106, 215, 128, 189, 113, 89, 73, 86, 116, 32, 149, 97, 45, 116, 49, 46, 227, 67, + 240, 44, 158, 98, 31, 238, 88, 104, 136, 175, 85, 101, 37, 10, 89, 108, 120, 194, 252, 101, 252, 162, 25, + 107, 134, 2, 181, 135, 199, 105, 143, 235, 21, 79, 154, 73, 93, 153, 158, 106, 20, 242, 140, 62, 209, 148, + 128, 52, 189, 216, 143, 29, 135, 120, 170, 227, 88, 209, 143, 73, 44, 87, 227, 162, 79, 48, 45, 246, 171, + 112, 61, 63, 170, 101, 99, 31, 9, 140, 77, 93, 250, 157, 184, 178, 242, 174, 76, 42, 67, 96, 105, 233, + 169, 100, 200, 96, 181, 19, 111, 132, 143, 4, 244, 97, 134, 140, 207, 135, 118, 165, 86, 194, 141, 44, 21, + 232, 169, 87, 235, 231, 146, 196, 225, 37, 47, 24, 215, 3, 200, 216, 63, 29, 89, 117, 90, 205, 129, 128, + 126, 174, 220, 78, 4, 95, 79, 26, 126, 132, 214, 153, 165, 79, 228, 29, 61, 109, 100, 82, 20, 164, 246, + 101, 141, 234, 126, 89, 70, 102, 45, 245, 161, 125, 48, 158, 253, 141, 194, 13, 14, 94, 111, 88, 15, 178, + 119, 4, 52, 99, 78, 143, 165, 140, 11, 222, 72, 3, 103, 226, 62, 177, 39, 76, 237, 110, 32, 172, 215, + 143, 0, 167, 18, 119, 163, 216, 176, 202, 54, 191, 81, 102, 207, 28, 171, 118, 239, 244, 82, 23, 98, 140, + 151, 122, 1, 140, 35, 251, 120, 203, 203, 139, 2, 64, 153, 224, 37, 120, 136, 37, 211, 25, 107, 69, 94, + 190, 141, 48, 57, 145, 251, 132, 245, 22, 36, 219, 207, 130, 85, 42, 219, 114, 181, 223, 66, 247, 65, 224, + 176, 118, 140, 169, 170, 8, 239, 92, 196, 77, 98, 59, 211, 141, 247, 222, 41, 128, 142, 22, 210, 129, 204, + 80, 168, 124, 159, 76, 248, 39, 146, 134, 112, 31, 202, 18, 208, 119, 43, 9, 66, 24, 251, 23, 146, 18, + 241, 61, 218, 169, 186, 196, 19, 152, 115, 33, 188, 171, 178, 115, 240, 82, 70, 251, 112, 188, 33, 48, 245, + 180, 53, 135, 210, 114, 99, 212, 57, 107, 241, 116, 76, 129, 6, 109, 83, 209, 125, 15, 77, 234, 250, 116, + 175, 191, 234, 59, 141, 51, 58, 48, 46, 154, 128, 19, 135, 112, 109, 227, 227, 125, 201, 32, 52, 150, 158, + 12, 125, 172, 28, 111, 243, 100, 149, 77, 159, 45, 147, 23, 47, 144, 189, 176, 84, 155, 208, 213, 252, 4, + 228, 232, 253, 108, 206, 23, 183, 75, 248, 226, 253, 170, 147, 106, 123, 217, 89, 155, 76, 247, 61, 80, 32, + 47, 197, 72, 10, 223, 14, 222, 40, 82, 16, 137, 228, 214, 213, 213, 158, 125, 159, 154, 148, 250, 149, 24, + 198, 154, 227, 142, 59, 64, 15, 19, 20, 105, 89, 39, 228, 51, 99, 105, 245, 89, 182, 116, 94, 70, 181, + 59, 159, 84, 104, 16, 7, 230, 13, 66, 201, 191, 143, 217, 10, 79, 105, 169, 209, 202, 133, 194, 41, 33, + 183, 33, 10, 154, 8, 212, 141, 215, 192, 22, 100, 125, 44, 211, 82, 23, 228, 158, 240, 100, 70, 254, 25, + 44, 232, 199, 251, 206, 160, 78, 138, 76, 96, 245, 78, 167, 142, 32, 59, 61, 207, 248, 4, 5, 85, 170, + 151, 243, 56, 33, 130, 6, 226, 84, 249, 108, 236, 1, 178, 130, 14, 206, 54, 75, 136, 15, 167, 188, 201, + 59, 213, 147, 110, 64, 89, 31, 248, 87, 8, 113, 19, 59, 67, 8, 93, 10, 66, 185, 95, 0, 35, 181, + 231, 157, 117, 7, 230, 125, 107, 162, 15, 158, 61, 112, 101, 183, 100, 195, 253, 71, 248, 137, 92, 12, 245, + 243, 223, 64, 161, 242, 228, 99, 45, 223, 67, 1, 88, 227, 86, 82, 83, 162, 81, 81, 199, 67, 181, 231, + 126, 222, 22, 8, 33, 236, 65, 46, 209, 227, 67, 32, 170, 60, 188, 128, 126, 102, 42, 66, 49, 133, 26, + 219, 40, 231, 155, 179, 233, 246, 149, 203, 21, 126, 253, 140, 6, 197, 26, 247, 3, 251, 238, 172, 201, 86, + 124, 25, 197, 182, 254, 160, 91, 220, 135, 144, 216, 86, 189, 202, 32, 216, 250, 160, 197, 252, 105, 217, 93, + 224, 229, 147, 244, 30, 13, 82, 95, 240, 28, 89, 58, 181, 52, 108, 218, 129, 15, 156, 159, 138, 57, 175, + 89, 118, 74, 66, 175, 133, 24, 132, 198, 102, 218, 45, 236, 245, 202, 210, 132, 61, 84, 218, 80, 203, 11, + 167, 107, 93, 85, 223, 52, 29, 205, 199, 29, 85, 103, 12, 222, 240, 29, 87, 9, 214, 182, 45, 46, 198, + 214, 18, 65, 2, 159, 92, 175, 93, 175, 1, 238, 180, 237, 193, 124, 152, 8, 217, 127, 18, 7, 81, 33, + 190, 28, 177, 6, 186, 40, 44, 221, 34, 178, 150, 241, 42, 139, 86, 224, 50, 74, 137, 230, 246, 14, 237, + 11, 90, 25, 108, 184, 73, 93, 159, 111, 73, 5, 126, 62, 39, 200, 188, 249, 195, 30, 47, 225, 147, 254, + 185, 99, 60, 208, 90, 226, 211, 92, 235, 191, 50, 60, 221, 98, 113, 248, 234, 108, 80, 198, 64, 74, 213, + 13, 13, 155, 200, 33, 240, 226, 180, 42, 173, 241, 252, 252, 184, 25, 124, 19, 106, 73, 191, 135, 4, 169, + 141, 167, 105, 143, 151, 211, 49, 117, 200, 160, 105, 178, 130, 102, 157, 51, 82, 54, 13, 204, 176, 247, 133, + 126, 146, 253, 208, 53, 78, 48, 114, 26, 41, 202, 59, 167, 132, 68, 37, 97, 25, 61, 220, 27, 142, 72, + 64, 57, 122, 64, 63, 253, 181, 128, 40, 235, 89, 172, 183, 252, 133, 126, 181, 0, 181, 254, 156, 148, 105, + 192, 250, 100, 31, 57, 169, 64, 174, 161, 137, 95, 243, 158, 64, 192, 220, 132, 32, 62, 15, 226, 238, 81, + 106, 214, 145, 143, 231, 106, 32, 34, 64, 10, 53, 90, 186, 210, 185, 23, 241, 181, 69, 34, 254, 38, 182, + 122, 2, 180, 201, 197, 23, 80, 147, 58, 200, 131, 198, 90, 152, 57, 30, 31, 169, 221, 74, 196, 126, 186, + 9, 254, 243, 70, 243, 90, 223, 127, 93, 34, 76, 106, 227, 10, 150, 180, 134, 71, 53, 142, 56, 199, 231, + 167, 209, 115, 52, 224, 160, 121, 203, 124, 182, 79, 123, 166, 39, 125, 121, 170, 180, 92, 234, 200, 134, 249, + 41, 78, 174, 194, 238, 211, 51, 54, 112, 163, 153, 233, 143, 164, 6, 121, 133, 198, 4, 113, 29, 38, 51, + 142, 245, 145, 68, 27, 102, 146, 44, 119, 18, 169, 165, 241, 177, 250, 135, 81, 127, 171, 76, 121, 76, 60, + 237, 166, 221, 229, 84, 230, 17, 190, 130, 112, 47, 207, 171, 127, 86, 241, 169, 147, 45, 171, 246, 181, 222, + 103, 132, 73, 111, 46, 177, 187, 151, 254, 251, 134, 197, 93, 253, 143, 214, 165, 66, 224, 113, 97, 131, 32, + 222, 64, 181, 168, 193, 225, 12, 242, 89, 199, 139, 166, 236, 195, 135, 33, 174, 58, 98, 23, 86, 32, 130, + 60, 151, 82, 207, 123, 173, 211, 44, 221, 165, 51, 69, 36, 238, 204, 130, 135, 68, 10, 181, 201, 195, 15, + 72, 148, 119, 74, 114, 115, 102, 164, 185, 101, 107, 185, 221, 251, 67, 143, 208, 27, 80, 198, 95, 33, 189, + 237, 83, 129, 27, 33, 161, 98, 22, 113, 194, 46, 33, 177, 24, 141, 232, 218, 13, 47, 13, 139, 198, 5, + 34, 157, 5, 120, 68, 118, 45, 186, 48, 160, 218, 25, 55, 226, 213, 34, 104, 132, 178, 66, 149, 173, 81, + 154, 100, 170, 126, 10, 251, 65, 132, 130, 132, 207, 117, 26, 234, 13, 56, 181, 29, 191, 7, 29, 3, 91, + 190, 201, 159, 106, 79, 180, 97, 123, 63, 10, 38, 38, 195, 152, 108, 144, 146, 46, 10, 23, 146, 148, 101, + 224, 33, 88, 246, 12, 32, 179, 149, 45, 6, 253, 157, 36, 173, 189, 139, 20, 21, 55, 8, 101, 9, 157, + 137, 237, 154, 249, 90, 24, 119, 186, 81, 211, 251, 225, 21, 143, 99, 184, 222, 224, 113, 93, 142, 29, 89, + 185, 101, 147, 201, 123, 38, 165, 204, 204, 9, 243, 130, 246, 143, 83, 213, 6, 216, 5, 92, 218, 22, 142, + 201, 34, 253, 54, 168, 80, 177, 217, 45, 82, 104, 241, 222, 70, 198, 241, 196, 187, 153, 5, 143, 193, 145, + 135, 11, 47, 50, 146, 186, 246, 86, 218, 253, 47, 14, 168, 50, 91, 82, 235, 127, 174, 75, 137, 63, 236, + 198, 81, 190, 119, 154, 214, 92, 66, 0, 22, 235, 5, 226, 244, 141, 13, 254, 57, 162, 90, 17, 106, 209, + 240, 197, 6, 49, 86, 164, 230, 37, 17, 81, 194, 245, 130, 253, 167, 31, 150, 3, 226, 221, 64, 105, 83, + 247, 134, 96, 132, 221, 10, 161, 215, 151, 154, 194, 46, 71, 44, 250, 29, 228, 218, 48, 125, 133, 12, 70, + 86, 188, 182, 2, 109, 87, 225, 139, 106, 104, 51, 213, 46, 175, 194, 90, 24, 21, 29, 151, 154, 246, 0, + 125, 11, 186, 137, 105, 45, 192, 205, 114, 240, 170, 219, 139, 56, 134, 103, 205, 160, 115, 41, 124, 220, 87, + 177, 161, 242, 250, 220, 239, 114, 59, 171, 9, 56, 124, 31, 108, 184, 80, 135, 144, 222, 144, 26, 10, 205, + 163, 115, 243, 247, 237, 231, 91, 5, 173, 80, 82, 137, 124, 237, 79, 103, 7, 88, 48, 72, 176, 86, 23, + 139, 239, 244, 91, 244, 49, 127, 102, 195, 42, 103, 101, 167, 41, 156, 158, 213, 55, 92, 28, 254, 138, 33, + 35, 114, 83, 200, 249, 61, 25, 138, 228, 67, 152, 168, 143, 141, 142, 33, 164, 47, 18, 83, 123, 11, 15, + 224, 12, 231, 84, 0, 132, 184, 48, 77, 81, 219, 163, 109, 96, 225, 15, 250, 29, 196, 212, 250, 206, 137, + 191, 127, 142, 104, 168, 146, 103, 5, 175, 148, 181, 23, 142, 47, 64, 133, 238, 50, 167, 189, 10, 235, 253, + 65, 22, 140, 217, 222, 28, 60, 27, 191, 43, 252, 215, 169, 34, 228, 117, 125, 113, 221, 132, 38, 100, 156, + 206, 73, 134, 67, 78, 180, 50, 77, 219, 75, 142, 94, 204, 170, 80, 52, 84, 200, 216, 105, 244, 173, 133, + 217, 151, 120, 78, 215, 188, 2, 241, 180, 214, 122, 91, 47, 150, 181, 132, 16, 59, 98, 27, 143, 3, 172, + 169, 122, 127, 101, 154, 52, 169, 86, 82, 167, 108, 198, 12, 166, 109, 231, 138, 79, 194, 234, 179, 70, 67, + 216, 55, 107, 13, 217, 150, 165, 109, 176, 183, 95, 175, 89, 35, 32, 80, 105, 186, 41, 95, 33, 93, 248, + 211, 127, 207, 171, 218, 203, 158, 26, 4, 205, 213, 110, 72, 68, 93, 248, 122, 7, 205, 47, 224, 248, 214, + 254, 173, 51, 79, 42, 114, 83, 107, 136, 44, 94, 240, 147, 220, 27, 32, 33, 62, 33, 46, 145, 4, 101, + 123, 8, 53, 156, 20, 105, 201, 101, 111, 136, 244, 97, 131, 193, 10, 172, 103, 187, 103, 88, 233, 253, 153, + 57, 67, 206, 188, 95, 227, 150, 9, 29, 17, 149, 77, 13, 138, 107, 38, 122, 246, 208, 199, 50, 12, 117, + 199, 48, 22, 65, 146, 27, 152, 196, 235, 187, 33, 84, 164, 163, 170, 240, 242, 230, 157, 91, 118, 238, 76, + 131, 21, 103, 180, 158, 67, 33, 170, 63, 249, 4, 140, 172, 22, 28, 224, 174, 112, 11, 218, 38, 177, 33, + 142, 193, 226, 245, 92, 104, 13, 239, 212, 34, 131, 179, 172, 11, 119, 119, 233, 92, 107, 108, 35, 81, 248, + 23, 107, 20, 179, 17, 170, 129, 26, 144, 98, 13, 139, 199, 152, 66, 234, 211, 9, 237, 144, 45, 75, 176, + 130, 56, 17, 80, 241, 98, 66, 88, 152, 166, 175, 209, 238, 239, 9, 164, 77, 247, 239, 187, 145, 83, 181, + 146, 61, 117, 56, 229, 237, 69, 28, 60, 107, 190, 165, 222, 215, 32, 129, 202, 17, 159, 93, 168, 127, 105, + 157, 58, 239, 161, 126, 123, 31, 188, 115, 227, 207, 109, 158, 70, 51, 67, 234, 37, 100, 127, 199, 132, 99, + 73, 117, 182, 153, 15, 94, 168, 63, 136, 91, 224, 221, 3, 154, 216, 38, 121, 229, 135, 220, 32, 105, 168, + 197, 197, 96, 52, 122, 65, 210, 43, 185, 206, 28, 114, 197, 112, 86, 68, 153, 21, 136, 141, 121, 172, 212, + 1, 119, 163, 100, 144, 36, 67, 72, 249, 129, 10, 64, 186, 76, 189, 171, 212, 214, 154, 100, 32, 33, 2, + 138, 46, 85, 204, 55, 57, 176, 188, 112, 184, 104, 230, 65, 87, 8, 123, 234, 138, 225, 213, 131, 181, 189, + 63, 78, 27, 142, 19, 161, 178, 225, 0, 225, 101, 76, 177, 2, 184, 158, 196, 251, 230, 250, 250, 6, 81, + 248, 94, 129, 12, 94, 216, 0, 208, 252, 241, 179, 27, 242, 147, 212, 173, 27, 172, 7, 12, 34, 139, 247, + 240, 248, 25, 129, 254, 51, 216, 143, 78, 150, 235, 212, 193, 218, 169, 199, 226, 92, 61, 165, 247, 1, 73, + 150, 9, 34, 10, 161, 58, 81, 66, 189, 170, 134, 3, 69, 33, 82, 201, 42, 164, 129, 76, 187, 165, 100, + 32, 94, 229, 182, 73, 69, 5, 143, 232, 163, 254, 162, 70, 118, 70, 132, 147, 81, 51, 201, 150, 224, 225, + 8, 89, 136, 221, 253, 194, 47, 143, 216, 39, 54, 24, 61, 21, 130, 24, 235, 121, 135, 72, 160, 64, 169, + 74, 240, 236, 107, 24, 62, 252, 38, 186, 4, 88, 41, 5, 238, 130, 51, 187, 244, 74, 48, 24, 24, 252, + 228, 181, 146, 12, 186, 47, 181, 156, 86, 214, 226, 82, 120, 245, 86, 195, 64, 216, 239, 221, 36, 86, 246, + 226, 222, 42, 251, 31, 186, 217, 24, 167, 53, 65, 39, 9, 61, 247, 235, 198, 56, 149, 87, 215, 185, 172, + 146, 164, 193, 207, 113, 35, 226, 99, 119, 233, 44, 118, 199, 136, 66, 1, 110, 4, 196, 148, 132, 215, 132, + 236, 55, 212, 89, 245, 151, 201, 164, 223, 52, 130, 184, 55, 240, 80, 75, 107, 172, 114, 165, 94, 178, 150, + 197, 8, 195, 45, 244, 127, 169, 250, 173, 144, 30, 151, 141, 132, 99, 194, 234, 140, 34, 3, 224, 226, 179, + 177, 68, 230, 71, 250, 42, 188, 142, 98, 98, 193, 250, 3, 211, 176, 78, 164, 119, 125, 161, 192, 241, 36, + 205, 166, 52, 52, 92, 14, 239, 10, 13, 197, 66, 230, 39, 233, 191, 220, 136, 11, 11, 219, 0, 231, 248, + 183, 174, 73, 120, 6, 26, 254, 85, 217, 208, 250, 195, 109, 55, 215, 235, 218, 82, 8, 224, 120, 66, 244, + 172, 150, 13, 134, 96, 22, 61, 72, 160, 52, 235, 62, 205, 132, 97, 232, 134, 138, 28, 5, 211, 252, 135, + 35, 87, 5, 138, 144, 190, 12, 148, 147, 39, 89, 39, 221, 147, 238, 253, 21, 157, 216, 55, 53, 177, 83, + 8, 122, 71, 62, 105, 203, 122, 111, 250, 108, 239, 234, 72, 218, 69, 168, 8, 132, 112, 21, 18, 46, 102, + 143, 55, 228, 94, 77, 173, 23, 229, 80, 185, 11, 212, 237, 122, 248, 195, 136, 85, 228, 94, 154, 57, 21, + 115, 94, 10, 250, 22, 69, 207, 97, 62, 243, 102, 210, 162, 96, 90, 197, 105, 73, 191, 90, 32, 166, 246, + 88, 228, 125, 222, 53, 197, 178, 2, 2, 217, 45, 216, 131, 231, 102, 175, 251, 187, 254, 162, 212, 9, 41, + 153, 188, 110, 101, 70, 37, 73, 94, 240, 20, 92, 21, 89, 26, 191, 154, 219, 67, 175, 203, 253, 42, 83, + 112, 158, 58, 114, 238, 154, 217, 14, 212, 112, 168, 138, 200, 18, 71, 155, 139, 69, 137, 64, 47, 210, 248, + 145, 159, 58, 129, 14, 225, 235, 35, 70, 19, 99, 19, 120, 194, 0, 5, 126, 122, 95, 189, 98, 34, 171, + 229, 71, 244, 138, 153, 176, 209, 34, 76, 19, 86, 19, 88, 232, 170, 232, 6, 217, 234, 92, 109, 145, 200, + 70, 222, 171, 30, 237, 221, 42, 224, 102, 121, 115, 197, 17, 20, 92, 100, 171, 41, 190, 236, 122, 202, 95, + 11, 198, 177, 204, 48, 150, 183, 149, 144, 212, 182, 121, 86, 119, 48, 20, 141, 168, 158, 158, 202, 90, 214, + 24, 144, 88, 182, 19, 13, 105, 16, 99, 80, 68, 205, 191, 198, 85, 241, 203, 71, 121, 184, 198, 21, 188, + 189, 171, 61, 131, 75, 185, 26, 156, 228, 222, 207, 3, 232, 202, 8, 229, 31, 200, 52, 61, 243, 248, 219, + 114, 136, 5, 233, 136, 235, 82, 87, 231, 41, 239, 132, 205, 144, 44, 139, 99, 217, 145, 249, 86, 159, 238, + 82, 130, 36, 33, 54, 105, 142, 52, 157, 193, 223, 118, 29, 16, 43, 114, 214, 84, 159, 25, 60, 25, 218, + 196, 15, 22, 220, 162, 138, 174, 96, 188, 49, 172, 204, 106, 140, 91, 199, 17, 242, 145, 96, 179, 34, 189, + 178, 224, 235, 245, 132, 159, 87, 57, 182, 174, 229, 102, 211, 86, 6, 210, 250, 33, 228, 41, 201, 155, 172, + 40, 5, 27, 41, 134, 224, 232, 97, 87, 219, 28, 122, 219, 12, 224, 66, 11, 161, 22, 106, 130, 222, 38, + 145, 134, 175, 184, 135, 93, 146, 224, 61, 28, 119, 138, 59, 136, 143, 24, 206, 52, 250, 99, 195, 71, 26, + 163, 29, 201, 97, 252, 230, 88, 160, 229, 234, 217, 97, 242, 161, 228, 204, 98, 75, 81, 155, 51, 184, 123, + 50, 26, 120, 50, 222, 50, 23, 79, 179, 183, 175, 182, 227, 27, 164, 181, 158, 126, 4, 229, 252, 206, 81, + 65, 89, 230, 179, 82, 54, 64, 229, 147, 44, 34, 139, 36, 98, 244, 56, 12, 133, 229, 156, 249, 82, 163, + 77, 120, 142, 79, 92, 251, 109, 173, 94, 194, 226, 43, 181, 118, 253, 215, 204, 23, 26, 161, 253, 206, 251, + 159, 163, 46, 246, 114, 201, 243, 66, 20, 251, 91, 9, 188, 25, 39, 82, 22, 146, 26, 232, 222, 252, 81, + 228, 238, 250, 212, 61, 75, 3, 155, 129, 64, 45, 102, 222, 172, 194, 133, 227, 35, 135, 142, 144, 21, 100, + 42, 75, 254, 68, 249, 83, 102, 98, 51, 24, 58, 114, 214, 146, 0, 80, 20, 66, 205, 95, 21, 39, 150, + 87, 65, 221, 153, 44, 223, 195, 180, 90, 63, 232, 177, 27, 52, 122, 95, 251, 235, 4, 209, 253, 109, 154, + 238, 198, 9, 161, 172, 68, 24, 87, 113, 100, 209, 227, 86, 230, 42, 251, 203, 8, 43, 60, 44, 145, 159, + 24, 102, 226, 114, 96, 107, 103, 103, 187, 136, 217, 25, 2, 92, 57, 89, 180, 109, 236, 8, 119, 59, 130, + 182, 143, 1, 238, 71, 164, 78, 17, 139, 31, 32, 203, 201, 223, 89, 148, 23, 129, 222, 19, 243, 139, 10, + 236, 107, 83, 141, 10, 130, 158, 10, 72, 215, 54, 157, 114, 158, 6, 19, 74, 132, 142, 66, 14, 246, 47, + 252, 34, 104, 238, 142, 130, 228, 73, 236, 1, 63, 239, 32, 190, 81, 45, 46, 55, 208, 94, 126, 134, 149, + 64, 230, 6, 104, 39, 121, 183, 110, 29, 229, 42, 80, 138, 200, 64, 100, 184, 103, 11, 190, 51, 221, 201, + 146, 71, 27, 192, 68, 157, 199, 56, 218, 151, 161, 127, 7, 217, 173, 200, 60, 207, 137, 27, 211, 88, 67, + 41, 26, 131, 186, 16, 109, 216, 54, 143, 58, 74, 68, 165, 79, 5, 235, 133, 58, 155, 81, 177, 52, 95, + 50, 174, 62, 11, 234, 204, 224, 230, 251, 151, 144, 15, 215, 60, 95, 41, 21, 168, 239, 21, 251, 21, 60, + 88, 212, 200, 140, 58, 196, 248, 242, 191, 92, 222, 166, 106, 122, 61, 94, 201, 63, 128, 163, 154, 225, 50, + 29, 26, 81, 138, 177, 56, 180, 54, 188, 139, 92, 141, 111, 251, 60, 14, 108, 218, 225, 37, 193, 54, 158, + 112, 159, 252, 156, 108, 115, 181, 201, 230, 119, 32, 57, 125, 206, 125, 126, 56, 142, 11, 196, 241, 61, 94, + 91, 52, 239, 60, 5, 207, 169, 35, 47, 116, 178, 223, 89, 55, 122, 29, 12, 34, 217, 238, 160, 253, 39, + 158, 75, 214, 216, 127, 52, 129, 233, 31, 145, 121, 196, 85, 243, 2, 155, 38, 31, 75, 11, 46, 65, 151, + 9, 33, 27, 58, 165, 132, 24, 3, 128, 58, 118, 131, 108, 5, 137, 5, 192, 40, 193, 86, 106, 185, 91, + 161, 146, 85, 115, 131, 216, 203, 249, 54, 157, 198, 72, 101, 114, 248, 71, 113, 163, 184, 82, 185, 87, 68, + 179, 222, 59, 75, 212, 183, 170, 248, 236, 227, 41, 248, 115, 99, 30, 202, 68, 217, 180, 50, 65, 168, 23, + 14, 21, 167, 146, 234, 213, 60, 11, 108, 159, 74, 195, 218, 181, 94, 61, 75, 42, 92, 52, 205, 56, 26, + 219, 33, 208, 154, 131, 163, 185, 145, 91, 120, 35, 43, 112, 59, 96, 84, 48, 130, 78, 184, 144, 32, 166, + 68, 172, 148, 196, 41, 12, 205, 85, 252, 66, 29, 56, 159, 81, 88, 177, 91, 86, 238, 39, 12, 129, 6, + 27, 212, 233, 9, 175, 3, 140, 45, 65, 75, 48, 243, 239, 107, 55, 63, 10, 91, 121, 245, 147, 59, 157, + 38, 118, 158, 244, 204, 130, 125, 183, 25, 206, 46, 130, 162, 238, 130, 253, 87, 102, 102, 59, 173, 36, 249, + 216, 46, 75, 185, 60, 149, 188, 93, 98, 6, 161, 102, 213, 69, 1, 72, 215, 75, 189, 6, 164, 201, 217, + 99, 113, 170, 103, 246, 230, 45, 66, 36, 81, 90, 68, 85, 227, 76, 30, 108, 167, 83, 95, 47, 107, 223, + 235, 133, 91, 199, 66, 70, 55, 152, 0, 185, 166, 219, 209, 144, 43, 111, 113, 151, 228, 157, 190, 173, 16, + 53, 79, 102, 171, 3, 44, 119, 151, 165, 163, 97, 34, 250, 159, 159, 161, 238, 38, 205, 137, 3, 169, 7, + 18, 67, 56, 115, 166, 116, 187, 220, 93, 179, 162, 198, 214, 86, 61, 14, 254, 186, 53, 226, 6, 200, 55, + 227, 202, 78, 87, 123, 121, 246, 133, 132, 223, 191, 124, 168, 162, 167, 101, 20, 138, 250, 115, 87, 151, 127, + 52, 37, 126, 251, 193, 131, 38, 77, 40, 56, 199, 161, 222, 205, 240, 116, 198, 186, 129, 236, 81, 94, 159, + 48, 94, 208, 52, 137, 82, 52, 3, 219, 109, 242, 16, 143, 183, 111, 10, 108, 54, 166, 119, 70, 118, 101, + 206, 136, 209, 181, 2, 253, 149, 166, 139, 232, 71, 157, 220, 89, 222, 71, 44, 177, 194, 14, 55, 166, 71, + 158, 154, 187, 40, 119, 170, 46, 150, 16, 201, 146, 23, 121, 212, 38, 184, 7, 55, 181, 46, 188, 152, 53, + 86, 30, 85, 65, 96, 107, 75, 61, 40, 181, 108, 215, 64, 81, 172, 149, 128, 148, 53, 90, 67, 135, 13, + 192, 198, 235, 39, 202, 62, 94, 184, 95, 176, 25, 23, 121, 15, 182, 182, 169, 67, 153, 39, 89, 50, 144, + 184, 97, 132, 127, 217, 75, 125, 79, 53, 18, 191, 105, 183, 22, 199, 111, 254, 140, 138, 11, 249, 147, 107, + 121, 41, 55, 225, 101, 109, 214, 220, 244, 99, 182, 143, 147, 166, 194, 190, 129, 218, 244, 116, 229, 169, 222, + 190, 78, 42, 201, 16, 4, 165, 23, 253, 254, 229, 238, 3, 201, 133, 104, 213, 17, 129, 215, 58, 76, 193, + 232, 242, 43, 196, 145, 215, 203, 66, 71, 167, 191, 14, 41, 29, 81, 21, 160, 26, 90, 7, 9, 219, 186, + 84, 12, 71, 200, 23, 55, 118, 115, 155, 61, 235, 8, 107, 147, 62, 224, 96, 246, 23, 149, 81, 67, 56, + 21, 12, 248, 158, 25, 7, 16, 112, 127, 82, 31, 184, 243, 207, 151, 47, 93, 69, 242, 208, 192, 52, 250, + 84, 78, 237, 217, 83, 10, 163, 232, 121, 22, 3, 203, 164, 106, 203, 153, 145, 157, 156, 156, 78, 164, 47, + 232, 56, 151, 195, 3, 218, 246, 10, 162, 150, 207, 185, 236, 128, 82, 221, 16, 161, 15, 248, 209, 110, 19, + 135, 50, 22, 190, 96, 222, 24, 144, 133, 127, 174, 130, 13, 15, 78, 115, 233, 62, 111, 161, 30, 200, 26, + 141, 47, 240, 49, 243, 11, 50, 94, 231, 242, 37, 195, 93, 8, 186, 102, 195, 31, 253, 134, 219, 139, 56, + 244, 165, 192, 242, 204, 165, 99, 228, 87, 27, 45, 67, 84, 46, 225, 205, 121, 205, 186, 28, 2, 248, 249, + 90, 113, 214, 183, 84, 160, 148, 42, 183, 146, 189, 191, 81, 153, 150, 25, 44, 87, + }; + uint8_t data[9 * 2 * 16 * 32] = { + 125, 85, 69, 166, 119, 70, 157, 170, 14, 11, 225, 215, 182, 243, 153, 117, 91, 88, 206, 18, 118, 25, 5, + 101, 134, 166, 32, 103, 107, 0, 0, 0, 165, 36, 16, 204, 216, 242, 63, 244, 187, 78, 56, 206, 35, 78, + 88, 218, 9, 46, 177, 106, 153, 204, 37, 71, 136, 242, 96, 175, 127, 217, 0, 0, 84, 236, 222, 119, 11, + 137, 45, 72, 134, 212, 31, 64, 177, 53, 246, 64, 11, 169, 229, 65, 90, 22, 222, 208, 115, 63, 124, 143, + 185, 88, 0, 0, 250, 254, 28, 208, 87, 18, 113, 149, 72, 251, 16, 241, 158, 35, 190, 210, 6, 60, 27, + 172, 221, 148, 82, 213, 42, 175, 244, 138, 200, 116, 0, 0, 90, 208, 216, 224, 236, 3, 219, 218, 109, 21, + 58, 51, 194, 158, 207, 249, 241, 213, 30, 134, 122, 54, 199, 230, 62, 180, 205, 200, 22, 30, 0, 0, 201, + 66, 131, 81, 242, 147, 13, 238, 4, 97, 83, 170, 194, 44, 65, 11, 12, 160, 230, 136, 85, 248, 161, 212, + 77, 74, 108, 151, 205, 185, 0, 0, 42, 249, 23, 67, 23, 188, 231, 39, 237, 48, 32, 122, 216, 14, 125, + 110, 190, 42, 110, 118, 18, 249, 106, 196, 91, 202, 185, 67, 182, 223, 0, 0, 35, 45, 103, 189, 132, 146, + 120, 95, 99, 90, 136, 110, 0, 245, 97, 27, 77, 47, 106, 4, 31, 221, 178, 211, 14, 79, 6, 168, 42, + 233, 0, 0, 194, 64, 108, 123, 11, 144, 243, 80, 146, 107, 251, 178, 167, 215, 42, 92, 180, 174, 102, 107, + 4, 58, 251, 76, 139, 187, 16, 140, 161, 186, 0, 0, 76, 248, 233, 153, 243, 119, 115, 123, 72, 103, 241, + 66, 114, 170, 109, 190, 181, 183, 251, 84, 32, 2, 141, 27, 13, 2, 197, 234, 181, 72, 0, 0, 108, 69, + 74, 63, 69, 30, 26, 194, 46, 182, 194, 156, 234, 1, 84, 34, 249, 58, 86, 225, 74, 153, 214, 42, 75, + 220, 140, 253, 187, 195, 0, 0, 1, 1, 151, 249, 60, 48, 100, 191, 141, 247, 21, 61, 59, 167, 46, 76, + 55, 220, 74, 242, 141, 184, 183, 20, 163, 136, 56, 183, 173, 170, 0, 0, 163, 16, 110, 140, 112, 154, 12, + 56, 138, 161, 38, 225, 101, 120, 250, 48, 76, 234, 191, 28, 242, 206, 4, 203, 1, 54, 188, 249, 150, 95, + 0, 0, 194, 161, 194, 167, 190, 150, 74, 85, 198, 151, 40, 193, 81, 243, 175, 41, 39, 167, 44, 53, 45, + 244, 62, 153, 34, 24, 223, 7, 29, 234, 0, 0, 146, 132, 179, 75, 37, 71, 10, 102, 190, 169, 118, 10, + 176, 169, 175, 32, 129, 107, 194, 108, 17, 194, 36, 90, 196, 97, 179, 61, 66, 139, 0, 0, 247, 24, 67, + 238, 37, 89, 111, 120, 210, 97, 82, 80, 45, 22, 235, 73, 50, 130, 28, 96, 104, 5, 208, 51, 241, 204, + 200, 33, 147, 5, 0, 0, 138, 2, 219, 162, 246, 7, 17, 29, 222, 103, 72, 137, 20, 39, 156, 19, 87, + 91, 23, 21, 24, 250, 34, 203, 225, 28, 31, 1, 116, 215, 0, 0, 181, 83, 178, 203, 160, 195, 113, 134, + 7, 106, 145, 51, 220, 74, 230, 234, 70, 155, 95, 212, 199, 45, 32, 224, 135, 89, 11, 134, 19, 32, 0, + 0, 145, 105, 42, 244, 203, 59, 4, 180, 105, 34, 222, 94, 26, 110, 194, 30, 64, 67, 227, 240, 101, 98, + 86, 216, 189, 165, 92, 99, 132, 31, 0, 0, 159, 188, 220, 38, 76, 102, 242, 35, 101, 220, 98, 49, 209, + 20, 206, 243, 241, 32, 89, 6, 114, 113, 141, 209, 191, 167, 180, 239, 33, 121, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 188, 72, 162, 20, 169, 165, 42, 111, 162, 108, 189, + 187, 97, 71, 49, 9, 100, 90, 192, 186, 150, 155, 254, 206, 52, 148, 105, 44, 105, 128, 0, 0, 121, 26, + 61, 241, 184, 79, 57, 41, 179, 36, 175, 165, 55, 76, 190, 218, 50, 212, 71, 180, 194, 197, 123, 193, 166, + 141, 188, 52, 18, 189, 0, 0, 6, 233, 227, 185, 68, 169, 12, 186, 169, 109, 148, 139, 181, 49, 211, 9, + 32, 70, 172, 229, 190, 122, 223, 178, 191, 12, 77, 88, 127, 113, 0, 0, 77, 14, 135, 165, 164, 214, 183, + 23, 53, 190, 50, 253, 244, 223, 167, 164, 205, 51, 190, 19, 174, 4, 18, 240, 107, 78, 75, 170, 201, 89, + 0, 0, 124, 153, 98, 25, 128, 110, 69, 134, 120, 107, 74, 87, 16, 47, 62, 195, 89, 37, 97, 126, 41, + 1, 157, 73, 210, 84, 152, 209, 192, 73, 0, 0, 154, 247, 223, 84, 106, 67, 75, 174, 145, 1, 37, 253, + 137, 74, 54, 250, 252, 76, 217, 196, 107, 133, 191, 219, 133, 6, 188, 47, 254, 86, 0, 0, 38, 23, 112, + 68, 207, 244, 168, 25, 1, 103, 73, 116, 95, 45, 25, 241, 201, 186, 150, 242, 151, 198, 183, 30, 67, 221, + 88, 220, 102, 116, 0, 0, 159, 9, 86, 138, 2, 196, 228, 150, 79, 130, 230, 74, 112, 162, 101, 249, 99, + 252, 29, 27, 70, 134, 131, 135, 114, 203, 89, 109, 17, 32, 0, 0, 137, 168, 67, 109, 253, 179, 8, 38, + 51, 86, 180, 31, 82, 197, 86, 143, 153, 19, 36, 180, 225, 148, 25, 144, 5, 214, 180, 225, 235, 149, 0, + 0, 218, 79, 244, 75, 36, 158, 204, 42, 63, 41, 201, 159, 129, 156, 212, 222, 71, 245, 31, 198, 21, 132, + 105, 177, 91, 0, 122, 178, 160, 97, 0, 0, 119, 206, 181, 218, 109, 57, 112, 47, 174, 236, 77, 78, 97, + 34, 147, 250, 70, 18, 110, 206, 227, 195, 15, 32, 140, 85, 164, 56, 1, 45, 0, 0, 239, 62, 187, 95, + 198, 110, 200, 74, 7, 6, 208, 179, 47, 19, 28, 94, 227, 251, 183, 197, 4, 133, 28, 30, 74, 30, 76, + 143, 109, 116, 0, 0, 53, 96, 113, 37, 4, 126, 57, 101, 208, 97, 206, 8, 215, 222, 236, 12, 199, 176, + 54, 1, 65, 189, 195, 252, 101, 22, 149, 26, 146, 49, 0, 0, 87, 2, 125, 116, 112, 136, 186, 27, 92, + 41, 110, 65, 64, 58, 253, 36, 189, 110, 197, 136, 220, 129, 14, 82, 220, 248, 254, 157, 51, 46, 0, 0, + 243, 84, 240, 172, 6, 58, 124, 140, 69, 110, 158, 111, 246, 138, 251, 232, 139, 239, 38, 210, 92, 114, 217, + 110, 95, 69, 45, 69, 209, 227, 0, 0, 130, 119, 45, 70, 198, 119, 148, 18, 12, 78, 26, 16, 95, 208, + 67, 219, 20, 240, 163, 245, 210, 231, 71, 188, 69, 222, 132, 216, 65, 67, 0, 0, 217, 241, 62, 190, 215, + 106, 27, 115, 56, 78, 115, 34, 98, 121, 12, 128, 100, 29, 190, 185, 68, 23, 75, 21, 253, 67, 151, 190, + 233, 240, 0, 0, 194, 118, 186, 176, 130, 124, 109, 251, 25, 17, 206, 7, 254, 218, 178, 148, 88, 200, 188, + 251, 145, 110, 218, 196, 179, 84, 142, 138, 39, 44, 0, 0, 180, 91, 247, 56, 11, 8, 175, 111, 182, 149, + 97, 134, 160, 199, 152, 186, 12, 232, 9, 135, 121, 91, 145, 172, 67, 176, 85, 238, 106, 158, 0, 0, 101, + 240, 19, 104, 233, 63, 234, 231, 28, 66, 49, 109, 237, 45, 199, 21, 70, 79, 234, 65, 13, 16, 35, 180, + 87, 14, 188, 67, 215, 98, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 31, 134, 209, 79, 242, 134, 47, 79, 126, 119, 32, 239, 37, 207, 196, 248, 218, 245, 77, 227, 47, 253, + 40, 59, 159, 41, 240, 59, 178, 8, 0, 0, 238, 2, 148, 48, 174, 140, 24, 26, 89, 4, 172, 244, 120, + 130, 77, 39, 169, 180, 234, 125, 144, 170, 82, 64, 84, 33, 100, 61, 130, 113, 0, 0, 88, 181, 128, 45, + 76, 207, 215, 126, 70, 52, 215, 82, 136, 85, 98, 146, 186, 53, 250, 201, 189, 147, 16, 15, 104, 183, 70, + 207, 14, 19, 0, 0, 104, 135, 52, 246, 42, 135, 3, 132, 102, 99, 143, 23, 37, 42, 59, 134, 196, 135, + 116, 32, 176, 106, 137, 19, 16, 33, 254, 248, 206, 59, 0, 0, 136, 199, 189, 171, 67, 118, 200, 214, 45, + 78, 0, 98, 211, 219, 211, 112, 19, 210, 175, 52, 84, 123, 228, 20, 7, 10, 25, 4, 54, 67, 0, 0, + 175, 105, 216, 112, 96, 165, 216, 153, 245, 143, 167, 140, 25, 114, 141, 31, 152, 114, 191, 150, 155, 217, 214, + 105, 230, 154, 44, 5, 75, 8, 0, 0, 85, 143, 143, 61, 105, 86, 63, 165, 161, 165, 18, 151, 107, 181, + 247, 202, 115, 99, 234, 158, 208, 89, 213, 89, 13, 8, 232, 85, 136, 93, 0, 0, 101, 235, 29, 63, 233, + 194, 29, 79, 125, 140, 119, 122, 69, 223, 222, 18, 33, 212, 59, 12, 213, 155, 213, 39, 66, 212, 199, 170, + 15, 10, 0, 0, 37, 21, 135, 170, 169, 141, 89, 228, 48, 11, 163, 1, 94, 66, 41, 208, 188, 57, 141, + 125, 252, 76, 158, 228, 201, 141, 251, 151, 167, 66, 0, 0, 10, 79, 120, 101, 100, 44, 117, 29, 158, 222, + 216, 140, 190, 247, 128, 119, 171, 107, 51, 172, 4, 247, 125, 51, 191, 215, 206, 243, 188, 185, 0, 0, 89, + 154, 170, 99, 200, 21, 90, 61, 253, 72, 176, 35, 141, 65, 142, 43, 178, 241, 58, 28, 228, 61, 159, 99, + 143, 192, 160, 56, 201, 95, 0, 0, 108, 73, 227, 31, 96, 232, 205, 109, 141, 3, 202, 251, 48, 224, 22, + 9, 115, 116, 48, 111, 232, 80, 154, 105, 217, 22, 78, 33, 59, 0, 0, 0, 120, 93, 88, 9, 181, 169, + 129, 100, 194, 103, 54, 120, 57, 176, 210, 66, 240, 76, 46, 243, 253, 32, 148, 245, 10, 100, 138, 130, 213, + 35, 0, 0, 194, 153, 209, 140, 19, 87, 128, 82, 13, 226, 191, 203, 145, 118, 129, 24, 82, 129, 154, 100, + 108, 47, 250, 89, 79, 125, 76, 6, 147, 181, 0, 0, 252, 158, 143, 77, 111, 235, 126, 20, 14, 62, 81, + 203, 251, 140, 204, 251, 70, 6, 128, 149, 206, 197, 149, 182, 105, 44, 96, 226, 110, 231, 0, 0, 101, 106, + 73, 93, 132, 231, 174, 164, 94, 177, 102, 139, 132, 169, 80, 23, 251, 109, 19, 77, 23, 72, 24, 116, 169, + 211, 245, 84, 64, 157, 0, 0, 252, 20, 44, 250, 143, 146, 220, 246, 111, 39, 207, 2, 245, 170, 168, 146, + 112, 83, 135, 159, 183, 10, 198, 94, 209, 82, 78, 249, 89, 117, 0, 0, 162, 242, 87, 157, 4, 196, 78, + 101, 88, 76, 28, 64, 22, 8, 124, 18, 188, 209, 112, 45, 75, 223, 154, 70, 202, 23, 167, 108, 31, 7, + 0, 0, 25, 140, 227, 184, 244, 225, 4, 141, 15, 237, 171, 153, 36, 239, 159, 241, 33, 125, 109, 147, 248, + 14, 227, 181, 133, 228, 142, 236, 248, 230, 0, 0, 107, 62, 162, 178, 97, 37, 95, 234, 178, 110, 118, 224, + 219, 92, 76, 61, 48, 15, 227, 23, 226, 222, 142, 59, 194, 158, 32, 1, 87, 125, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 161, 126, 49, 3, 32, 58, 198, 52, 65, + 33, 230, 249, 98, 241, 117, 53, 57, 148, 62, 23, 90, 34, 121, 194, 142, 121, 147, 143, 166, 0, 0, 162, + 242, 222, 133, 251, 216, 181, 102, 29, 2, 190, 246, 195, 113, 252, 200, 78, 122, 105, 15, 241, 152, 76, 203, + 238, 245, 76, 45, 214, 236, 0, 0, 15, 228, 22, 26, 238, 250, 52, 218, 205, 159, 28, 14, 30, 248, 252, + 160, 48, 64, 192, 226, 181, 57, 106, 124, 211, 145, 60, 171, 165, 195, 0, 0, 158, 99, 8, 219, 172, 160, + 108, 45, 199, 92, 177, 237, 47, 234, 184, 105, 114, 63, 250, 238, 69, 30, 227, 182, 51, 68, 237, 246, 66, + 135, 0, 0, 61, 45, 33, 40, 201, 197, 218, 236, 29, 175, 6, 11, 225, 108, 25, 178, 26, 253, 100, 81, + 34, 31, 10, 79, 54, 27, 166, 181, 224, 33, 0, 0, 112, 223, 236, 231, 86, 252, 129, 245, 85, 93, 186, + 90, 147, 80, 124, 130, 41, 181, 31, 106, 254, 169, 150, 123, 112, 102, 221, 222, 113, 174, 0, 0, 101, 67, + 65, 155, 124, 105, 15, 202, 103, 175, 40, 25, 254, 198, 19, 102, 202, 128, 57, 214, 38, 221, 180, 166, 163, + 146, 229, 103, 97, 58, 0, 0, 183, 1, 46, 179, 25, 217, 156, 210, 12, 1, 44, 108, 185, 64, 106, 157, + 59, 40, 169, 145, 182, 74, 134, 39, 153, 44, 84, 132, 131, 98, 0, 0, 100, 88, 209, 233, 197, 93, 159, + 132, 222, 238, 221, 184, 99, 74, 73, 51, 167, 235, 64, 143, 122, 196, 71, 125, 233, 119, 230, 73, 32, 23, + 0, 0, 195, 227, 227, 246, 182, 224, 138, 61, 240, 180, 34, 73, 60, 213, 191, 82, 132, 89, 174, 231, 2, + 126, 53, 121, 143, 18, 17, 111, 222, 86, 0, 0, 253, 86, 67, 149, 254, 229, 57, 84, 29, 237, 178, 93, + 208, 13, 135, 54, 68, 172, 161, 106, 180, 186, 142, 170, 164, 169, 190, 46, 64, 32, 0, 0, 71, 82, 32, + 203, 160, 147, 175, 218, 87, 193, 150, 159, 90, 13, 4, 13, 37, 183, 137, 32, 201, 9, 56, 180, 6, 165, + 130, 177, 181, 130, 0, 0, 248, 83, 170, 21, 91, 244, 89, 80, 9, 124, 241, 111, 226, 155, 169, 204, 97, + 252, 95, 34, 197, 254, 199, 92, 121, 241, 112, 187, 168, 60, 0, 0, 137, 162, 60, 126, 220, 30, 118, 203, + 214, 152, 42, 73, 211, 200, 141, 176, 25, 133, 243, 64, 23, 243, 231, 234, 133, 177, 47, 151, 193, 151, 0, + 0, 92, 81, 188, 253, 135, 13, 74, 11, 182, 8, 139, 5, 92, 33, 167, 247, 61, 126, 158, 10, 80, 70, + 167, 200, 198, 250, 207, 254, 225, 82, 0, 0, 12, 81, 128, 140, 144, 82, 66, 167, 45, 217, 86, 126, 235, + 240, 105, 133, 220, 181, 64, 53, 147, 243, 209, 134, 4, 135, 171, 251, 12, 207, 0, 0, 245, 199, 126, 6, + 216, 95, 175, 107, 46, 127, 224, 62, 191, 226, 143, 126, 27, 0, 192, 90, 58, 90, 115, 249, 113, 81, 127, + 134, 242, 123, 0, 0, 243, 67, 102, 197, 86, 240, 133, 93, 198, 18, 50, 39, 50, 180, 151, 146, 142, 181, + 220, 186, 200, 223, 52, 41, 29, 127, 86, 197, 89, 173, 0, 0, 223, 181, 42, 26, 189, 28, 24, 85, 214, + 7, 74, 200, 60, 42, 211, 253, 72, 254, 132, 210, 131, 127, 224, 78, 38, 171, 241, 93, 199, 211, 0, 0, + 64, 231, 66, 247, 202, 89, 132, 223, 18, 81, 137, 188, 221, 173, 49, 208, 64, 156, 32, 185, 198, 93, 160, + 174, 51, 76, 169, 253, 139, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 221, 115, 129, 5, 66, 181, 152, 253, 186, 123, 34, 193, 174, 13, 130, 154, 225, 137, 177, 26, 88, + 156, 168, 77, 146, 140, 156, 105, 16, 166, 0, 0, 165, 102, 27, 34, 149, 29, 108, 157, 81, 38, 253, 145, + 75, 254, 253, 194, 139, 105, 161, 10, 48, 158, 143, 81, 103, 217, 206, 244, 59, 109, 0, 0, 51, 164, 33, + 157, 173, 191, 144, 36, 211, 165, 54, 135, 137, 57, 167, 46, 106, 45, 242, 205, 72, 213, 141, 219, 5, 222, + 73, 173, 98, 231, 0, 0, 69, 185, 161, 5, 81, 7, 146, 173, 251, 204, 168, 11, 63, 162, 31, 71, 104, + 192, 250, 163, 176, 55, 142, 163, 175, 28, 134, 133, 27, 138, 0, 0, 36, 101, 98, 120, 154, 29, 46, 189, + 225, 204, 80, 47, 236, 90, 150, 44, 51, 205, 220, 115, 86, 92, 33, 109, 148, 60, 67, 217, 143, 79, 0, + 0, 238, 107, 22, 68, 100, 3, 10, 139, 21, 9, 177, 50, 198, 17, 3, 250, 213, 114, 239, 243, 23, 28, + 164, 96, 181, 27, 78, 151, 3, 194, 0, 0, 204, 185, 113, 118, 170, 91, 23, 20, 143, 243, 217, 146, 81, + 106, 226, 29, 46, 240, 114, 247, 139, 254, 47, 225, 23, 191, 180, 120, 172, 234, 0, 0, 130, 221, 194, 45, + 126, 190, 146, 21, 99, 130, 45, 186, 190, 209, 221, 228, 175, 170, 59, 237, 239, 138, 18, 15, 142, 43, 50, + 78, 169, 179, 0, 0, 135, 251, 46, 186, 10, 201, 148, 55, 184, 246, 82, 246, 119, 240, 64, 218, 194, 219, + 171, 231, 244, 33, 83, 250, 47, 252, 77, 215, 122, 70, 0, 0, 68, 67, 33, 48, 251, 159, 101, 8, 222, + 143, 104, 86, 154, 197, 105, 48, 90, 139, 9, 91, 91, 35, 123, 29, 64, 215, 219, 188, 127, 67, 0, 0, + 10, 143, 177, 160, 65, 106, 224, 101, 224, 83, 241, 218, 214, 6, 83, 125, 24, 56, 56, 5, 244, 114, 11, + 196, 133, 169, 75, 2, 101, 216, 0, 0, 181, 208, 24, 218, 132, 79, 33, 9, 113, 213, 222, 253, 92, 49, + 247, 133, 21, 134, 124, 173, 49, 83, 15, 212, 238, 34, 142, 241, 154, 55, 0, 0, 201, 27, 141, 25, 130, + 180, 88, 157, 93, 6, 70, 47, 66, 86, 134, 12, 29, 103, 31, 80, 127, 200, 224, 250, 50, 228, 94, 180, + 52, 107, 0, 0, 195, 80, 232, 55, 132, 97, 246, 137, 142, 216, 198, 14, 0, 164, 96, 70, 151, 205, 108, + 82, 102, 249, 12, 206, 167, 117, 204, 214, 169, 13, 0, 0, 15, 198, 218, 226, 207, 123, 12, 237, 29, 5, + 241, 168, 22, 230, 132, 86, 154, 160, 184, 137, 195, 61, 231, 137, 189, 125, 170, 122, 86, 217, 0, 0, 72, + 95, 13, 213, 117, 63, 32, 154, 89, 92, 196, 50, 235, 37, 221, 188, 246, 115, 80, 124, 42, 25, 84, 191, + 10, 113, 80, 91, 82, 150, 0, 0, 148, 33, 47, 34, 26, 10, 179, 249, 185, 218, 187, 91, 5, 17, 10, + 182, 0, 41, 135, 237, 103, 138, 0, 127, 235, 221, 52, 47, 167, 165, 0, 0, 119, 189, 13, 104, 234, 38, + 149, 90, 101, 22, 153, 82, 226, 81, 161, 2, 125, 124, 144, 79, 101, 228, 132, 142, 253, 132, 84, 150, 108, + 109, 0, 0, 74, 237, 139, 132, 13, 38, 45, 24, 147, 142, 5, 235, 244, 194, 215, 109, 11, 220, 222, 103, + 167, 67, 184, 104, 65, 38, 200, 181, 198, 176, 0, 0, 114, 83, 198, 178, 56, 195, 6, 119, 201, 201, 143, + 127, 141, 245, 151, 87, 186, 87, 144, 7, 41, 152, 48, 168, 22, 100, 216, 132, 12, 183, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 95, 26, 42, 8, 253, 208, 240, 172, 34, + 129, 80, 146, 168, 37, 216, 114, 249, 57, 27, 81, 12, 226, 33, 254, 143, 24, 181, 86, 149, 1, 0, 0, + 175, 4, 114, 53, 153, 199, 242, 22, 131, 26, 241, 61, 127, 100, 38, 197, 129, 176, 142, 248, 34, 92, 82, + 162, 216, 62, 146, 246, 87, 110, 0, 0, 89, 205, 83, 156, 57, 50, 230, 28, 179, 144, 98, 117, 105, 127, + 121, 112, 10, 188, 19, 94, 139, 61, 201, 70, 39, 252, 12, 226, 215, 4, 0, 0, 35, 213, 107, 20, 67, + 12, 157, 224, 172, 98, 66, 56, 157, 199, 229, 86, 64, 112, 161, 129, 247, 165, 42, 118, 54, 38, 186, 222, + 185, 196, 0, 0, 32, 110, 136, 105, 206, 117, 91, 174, 11, 13, 88, 229, 58, 132, 135, 68, 186, 184, 178, + 12, 240, 247, 164, 70, 24, 186, 47, 42, 172, 148, 0, 0, 80, 72, 44, 201, 188, 199, 118, 112, 119, 139, + 152, 237, 239, 99, 220, 153, 76, 104, 225, 94, 248, 1, 129, 132, 61, 4, 181, 251, 146, 132, 0, 0, 105, + 68, 94, 101, 95, 48, 238, 11, 119, 199, 166, 69, 161, 73, 32, 21, 189, 230, 0, 216, 25, 73, 76, 147, + 21, 88, 156, 31, 164, 215, 0, 0, 186, 93, 240, 111, 227, 22, 76, 218, 233, 152, 175, 28, 126, 117, 105, + 136, 171, 65, 225, 0, 129, 150, 187, 81, 130, 41, 86, 186, 193, 132, 0, 0, 41, 248, 147, 136, 150, 65, + 131, 38, 92, 66, 209, 60, 123, 182, 168, 141, 212, 87, 101, 208, 254, 9, 165, 51, 24, 5, 214, 217, 207, + 236, 0, 0, 95, 122, 220, 244, 9, 146, 21, 177, 107, 234, 238, 107, 31, 153, 197, 121, 214, 8, 76, 252, + 51, 34, 100, 201, 235, 238, 226, 24, 113, 55, 0, 0, 33, 7, 27, 97, 29, 27, 103, 33, 108, 211, 239, + 190, 188, 15, 197, 172, 154, 123, 177, 241, 216, 10, 32, 150, 121, 130, 82, 167, 35, 212, 0, 0, 93, 205, + 32, 131, 17, 152, 180, 142, 35, 9, 9, 165, 115, 94, 96, 212, 100, 234, 2, 179, 143, 161, 94, 224, 135, + 51, 120, 53, 226, 89, 0, 0, 248, 47, 33, 193, 149, 196, 158, 193, 81, 237, 164, 222, 227, 168, 52, 1, + 32, 138, 184, 27, 78, 58, 229, 225, 72, 187, 245, 65, 99, 245, 0, 0, 211, 224, 62, 10, 77, 235, 67, + 226, 248, 144, 77, 215, 207, 63, 122, 119, 33, 225, 158, 242, 150, 81, 182, 8, 160, 244, 86, 39, 119, 151, + 0, 0, 127, 248, 33, 172, 13, 187, 33, 245, 23, 45, 247, 32, 109, 136, 65, 163, 2, 213, 196, 147, 235, + 66, 73, 89, 64, 74, 195, 9, 233, 201, 0, 0, 207, 214, 46, 103, 138, 33, 170, 92, 107, 75, 239, 129, + 158, 91, 210, 100, 138, 131, 251, 212, 212, 189, 69, 136, 169, 48, 64, 61, 44, 164, 0, 0, 171, 254, 145, + 187, 107, 84, 63, 104, 20, 176, 187, 202, 70, 224, 43, 144, 46, 181, 230, 173, 193, 170, 5, 221, 74, 24, + 216, 247, 118, 223, 0, 0, 218, 173, 4, 103, 38, 164, 249, 13, 179, 130, 145, 17, 51, 221, 185, 36, 85, + 189, 250, 27, 218, 134, 143, 253, 240, 24, 239, 235, 199, 52, 0, 0, 203, 51, 101, 88, 122, 163, 4, 239, + 17, 56, 83, 159, 67, 3, 206, 67, 204, 63, 250, 172, 169, 3, 232, 194, 236, 252, 221, 198, 136, 130, 0, + 0, 158, 79, 123, 233, 246, 170, 140, 212, 170, 17, 181, 93, 234, 154, 28, 72, 55, 78, 6, 7, 199, 69, + 163, 47, 107, 228, 36, 56, 66, 184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 55, 144, 188, 52, 183, 120, 138, 238, 234, 80, 69, 125, 41, 203, 139, 194, 86, 224, 48, 144, + 21, 52, 139, 29, 49, 87, 41, 145, 195, 75, 0, 0, 240, 30, 142, 92, 174, 66, 28, 253, 72, 185, 207, + 222, 153, 253, 69, 0, 19, 102, 150, 88, 188, 61, 99, 16, 172, 57, 134, 134, 71, 81, 0, 0, 80, 151, + 98, 14, 73, 244, 5, 21, 218, 11, 97, 53, 188, 42, 137, 5, 88, 121, 183, 182, 189, 243, 217, 43, 204, + 182, 224, 175, 26, 155, 0, 0, 75, 141, 98, 239, 120, 172, 211, 157, 69, 212, 62, 197, 110, 83, 64, 126, + 232, 115, 149, 19, 171, 248, 145, 114, 106, 174, 232, 184, 163, 51, 0, 0, 107, 132, 193, 10, 6, 150, 252, + 216, 168, 237, 243, 178, 101, 112, 47, 122, 170, 197, 144, 13, 61, 219, 249, 214, 140, 229, 97, 135, 29, 184, + 0, 0, 172, 99, 250, 13, 26, 13, 135, 55, 8, 122, 102, 2, 70, 158, 210, 95, 232, 17, 212, 105, 131, + 114, 86, 84, 91, 102, 87, 93, 201, 123, 0, 0, 114, 194, 3, 197, 254, 134, 35, 53, 132, 248, 210, 2, + 37, 58, 248, 189, 6, 20, 182, 16, 75, 136, 159, 159, 199, 211, 219, 146, 97, 50, 0, 0, 165, 234, 211, + 66, 85, 96, 87, 177, 112, 195, 162, 217, 73, 114, 145, 98, 217, 92, 121, 99, 185, 5, 238, 25, 17, 86, + 28, 224, 252, 26, 0, 0, 94, 140, 176, 230, 217, 22, 5, 83, 21, 136, 96, 45, 94, 238, 159, 34, 234, + 100, 86, 80, 26, 233, 82, 60, 242, 6, 122, 61, 230, 120, 0, 0, 178, 34, 78, 39, 208, 61, 138, 8, + 18, 85, 90, 216, 240, 154, 58, 171, 92, 171, 119, 68, 156, 136, 130, 25, 145, 210, 219, 28, 88, 50, 0, + 0, 150, 3, 164, 233, 250, 72, 144, 122, 46, 228, 197, 131, 20, 217, 129, 229, 109, 41, 48, 205, 228, 235, + 36, 218, 96, 250, 12, 119, 160, 222, 0, 0, 197, 224, 119, 191, 195, 160, 190, 71, 102, 94, 105, 231, 92, + 14, 14, 71, 145, 190, 20, 191, 222, 82, 33, 196, 179, 33, 224, 138, 229, 50, 0, 0, 8, 226, 125, 220, + 109, 52, 12, 62, 143, 154, 73, 102, 21, 212, 225, 244, 200, 236, 141, 198, 207, 87, 54, 15, 34, 228, 66, + 59, 234, 23, 0, 0, 195, 179, 161, 136, 55, 235, 148, 105, 55, 57, 191, 175, 89, 112, 235, 138, 70, 122, + 168, 85, 3, 231, 105, 22, 189, 41, 11, 136, 217, 79, 0, 0, 45, 177, 192, 11, 215, 62, 147, 203, 228, + 21, 90, 251, 26, 168, 35, 153, 222, 202, 158, 241, 232, 41, 142, 220, 178, 201, 161, 143, 97, 179, 0, 0, + 244, 68, 241, 11, 235, 205, 39, 122, 94, 115, 32, 187, 191, 138, 70, 176, 171, 95, 158, 203, 202, 239, 52, + 162, 224, 155, 22, 24, 242, 183, 0, 0, 127, 230, 36, 219, 218, 132, 89, 111, 77, 94, 166, 254, 154, 200, + 19, 209, 30, 11, 202, 71, 8, 132, 157, 138, 235, 172, 106, 206, 161, 175, 0, 0, 169, 71, 205, 0, 82, + 97, 39, 250, 173, 10, 246, 162, 219, 18, 99, 34, 237, 198, 90, 121, 229, 205, 193, 174, 245, 40, 130, 52, + 228, 182, 0, 0, 250, 250, 166, 231, 8, 232, 221, 108, 23, 250, 88, 212, 67, 71, 19, 76, 221, 177, 214, + 184, 31, 144, 223, 96, 132, 5, 222, 250, 204, 227, 0, 0, 173, 42, 52, 248, 224, 134, 147, 239, 229, 22, + 228, 9, 175, 155, 120, 19, 42, 204, 24, 198, 200, 44, 118, 188, 159, 27, 38, 99, 98, 27, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 164, 139, 94, 201, 250, 100, 95, 251, + 230, 217, 17, 10, 34, 134, 11, 200, 165, 151, 248, 138, 112, 241, 34, 2, 131, 54, 183, 167, 26, 184, 0, + 0, 181, 36, 194, 243, 212, 42, 21, 235, 42, 25, 139, 130, 104, 149, 190, 60, 79, 144, 242, 177, 159, 61, + 217, 155, 108, 157, 170, 146, 219, 144, 0, 0, 158, 98, 226, 66, 61, 75, 39, 4, 251, 2, 31, 158, 238, + 64, 51, 207, 5, 15, 191, 56, 252, 94, 238, 38, 5, 198, 248, 234, 33, 32, 0, 0, 126, 244, 43, 20, + 75, 254, 150, 209, 203, 92, 32, 10, 142, 230, 221, 137, 235, 215, 92, 180, 156, 91, 160, 31, 137, 72, 236, + 213, 208, 166, 0, 0, 4, 56, 181, 251, 3, 68, 87, 253, 8, 57, 203, 72, 130, 6, 201, 27, 133, 60, + 222, 54, 108, 52, 253, 75, 5, 101, 227, 60, 154, 68, 0, 0, 229, 12, 118, 91, 155, 249, 65, 109, 43, + 89, 201, 215, 228, 104, 146, 211, 58, 95, 166, 188, 115, 239, 39, 11, 192, 114, 41, 11, 131, 172, 0, 0, + 252, 133, 253, 9, 129, 83, 221, 154, 60, 180, 223, 54, 73, 39, 71, 88, 155, 41, 106, 139, 181, 60, 158, + 46, 40, 248, 248, 108, 163, 148, 0, 0, 206, 229, 215, 188, 64, 102, 153, 238, 44, 109, 89, 157, 236, 121, + 27, 67, 81, 21, 122, 92, 201, 5, 75, 65, 193, 71, 115, 159, 185, 196, 0, 0, 81, 156, 204, 25, 45, + 98, 44, 198, 145, 236, 148, 114, 1, 183, 192, 41, 177, 168, 61, 141, 230, 207, 214, 151, 86, 113, 99, 74, + 145, 41, 0, 0, 65, 249, 23, 39, 102, 51, 223, 9, 159, 8, 23, 158, 63, 110, 68, 26, 52, 239, 94, + 111, 119, 169, 216, 9, 106, 163, 30, 195, 91, 12, 0, 0, 89, 82, 26, 82, 222, 24, 195, 161, 24, 119, + 129, 6, 239, 29, 157, 131, 95, 21, 201, 251, 32, 35, 127, 33, 185, 184, 202, 218, 120, 205, 0, 0, 230, + 163, 161, 22, 172, 58, 180, 172, 102, 59, 222, 19, 32, 229, 199, 186, 50, 251, 63, 60, 57, 47, 52, 27, + 91, 82, 68, 181, 35, 85, 0, 0, 179, 77, 253, 146, 194, 114, 90, 68, 226, 130, 19, 74, 190, 42, 56, + 16, 174, 21, 128, 14, 125, 116, 129, 58, 161, 185, 217, 94, 43, 252, 0, 0, 82, 120, 206, 26, 133, 214, + 63, 24, 114, 182, 243, 132, 81, 80, 218, 109, 62, 60, 163, 108, 206, 178, 233, 165, 146, 87, 180, 61, 112, + 66, 0, 0, 54, 142, 251, 232, 227, 146, 232, 87, 96, 143, 139, 142, 45, 138, 151, 216, 11, 88, 154, 218, + 125, 223, 31, 132, 85, 68, 50, 75, 59, 29, 0, 0, 64, 79, 159, 222, 35, 0, 177, 113, 107, 1, 10, + 66, 46, 200, 161, 54, 234, 212, 225, 225, 126, 89, 145, 24, 115, 179, 65, 42, 96, 56, 0, 0, 229, 92, + 163, 252, 135, 80, 27, 100, 103, 238, 236, 14, 55, 64, 127, 143, 204, 200, 50, 37, 56, 55, 121, 3, 131, + 222, 168, 92, 84, 159, 0, 0, 147, 251, 46, 81, 142, 20, 52, 209, 103, 71, 107, 246, 208, 100, 7, 58, + 224, 140, 29, 193, 142, 122, 196, 128, 216, 59, 23, 52, 48, 81, 0, 0, 44, 109, 246, 228, 144, 66, 122, + 227, 187, 164, 83, 47, 94, 184, 217, 74, 230, 58, 26, 54, 11, 29, 85, 58, 203, 75, 14, 205, 130, 88, + 0, 0, 34, 173, 114, 238, 21, 205, 95, 86, 136, 78, 141, 252, 126, 103, 173, 68, 251, 196, 81, 158, 196, + 12, 243, 118, 249, 212, 21, 56, 78, 177, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 91, 107, 46, 93, 246, 133, 173, 205, 14, 168, 56, 52, 181, 158, 188, 148, 182, 105, 244, + 4, 43, 186, 149, 93, 106, 207, 144, 243, 165, 248, 0, 0, 86, 55, 130, 98, 230, 91, 16, 137, 254, 162, + 199, 3, 2, 154, 152, 53, 182, 183, 99, 165, 196, 84, 81, 69, 203, 185, 133, 11, 192, 249, 0, 0, 238, + 63, 162, 6, 45, 199, 53, 3, 186, 167, 161, 219, 253, 187, 53, 90, 169, 22, 182, 23, 145, 12, 67, 242, + 153, 236, 127, 50, 242, 90, 0, 0, 39, 10, 238, 161, 66, 66, 79, 169, 53, 101, 222, 109, 149, 40, 86, + 67, 67, 199, 143, 253, 215, 71, 56, 208, 145, 128, 174, 94, 204, 113, 0, 0, 12, 91, 130, 102, 36, 70, + 102, 7, 226, 20, 205, 242, 166, 119, 30, 135, 153, 111, 147, 254, 203, 200, 21, 192, 157, 82, 130, 231, 165, + 214, 0, 0, 129, 121, 253, 213, 81, 55, 171, 18, 6, 138, 240, 16, 139, 170, 85, 13, 39, 254, 166, 229, + 66, 23, 12, 52, 156, 221, 13, 242, 99, 183, 0, 0, 6, 245, 87, 69, 90, 152, 3, 67, 200, 250, 116, + 143, 232, 46, 65, 192, 89, 140, 194, 238, 71, 55, 248, 250, 156, 16, 15, 37, 228, 84, 0, 0, 27, 147, + 102, 1, 68, 0, 44, 56, 55, 115, 198, 183, 71, 150, 96, 198, 50, 138, 190, 3, 167, 118, 158, 84, 78, + 161, 78, 195, 87, 160, 0, 0, 212, 59, 102, 72, 85, 185, 119, 115, 227, 87, 186, 111, 157, 16, 107, 235, + 144, 11, 129, 201, 191, 115, 25, 78, 164, 15, 115, 93, 27, 148, 0, 0, 233, 157, 59, 215, 227, 166, 151, + 166, 202, 151, 129, 10, 220, 201, 75, 39, 184, 249, 218, 133, 14, 155, 7, 237, 47, 248, 233, 8, 45, 42, + 0, 0, 9, 38, 173, 75, 76, 219, 165, 116, 78, 127, 236, 108, 89, 146, 61, 202, 97, 147, 244, 104, 41, + 61, 16, 217, 232, 209, 62, 186, 67, 183, 0, 0, 175, 118, 36, 189, 30, 209, 163, 187, 87, 52, 81, 54, + 222, 23, 40, 62, 132, 107, 116, 213, 29, 235, 112, 83, 56, 110, 111, 102, 84, 146, 0, 0, 3, 158, 249, + 6, 108, 144, 97, 220, 123, 37, 94, 166, 71, 121, 181, 94, 127, 121, 229, 17, 81, 8, 127, 10, 151, 19, + 161, 195, 46, 189, 0, 0, 140, 244, 216, 164, 167, 43, 34, 93, 121, 126, 159, 119, 44, 212, 108, 184, 217, + 41, 169, 129, 21, 107, 82, 163, 195, 135, 30, 31, 225, 191, 0, 0, 45, 204, 46, 201, 83, 111, 250, 179, + 246, 251, 48, 70, 177, 38, 215, 95, 75, 55, 222, 215, 160, 147, 31, 232, 3, 50, 200, 253, 205, 81, 0, + 0, 65, 130, 75, 217, 95, 113, 159, 162, 133, 193, 94, 118, 194, 184, 64, 176, 125, 225, 190, 58, 26, 62, + 184, 121, 218, 22, 26, 134, 121, 153, 0, 0, 75, 125, 185, 99, 47, 151, 159, 198, 132, 131, 208, 101, 14, + 7, 81, 25, 79, 101, 78, 76, 90, 224, 243, 22, 246, 190, 141, 219, 205, 150, 0, 0, 48, 183, 60, 113, + 107, 228, 161, 214, 223, 38, 52, 206, 55, 55, 172, 23, 53, 109, 42, 193, 7, 96, 207, 3, 10, 96, 47, + 139, 186, 25, 0, 0, 243, 25, 149, 170, 223, 157, 238, 86, 191, 77, 137, 136, 166, 181, 149, 121, 18, 214, + 201, 232, 9, 246, 151, 203, 162, 222, 240, 56, 28, 44, 0, 0, 239, 206, 188, 103, 235, 190, 38, 61, 124, + 40, 82, 209, 71, 46, 128, 15, 191, 220, 16, 242, 219, 23, 47, 164, 150, 24, 49, 244, 2, 87, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_HWCN, std::vector({9, 2, 16, 32}), + std::vector({3, 3, 30, 20}), DT_UINT8}; + FormatTransferFracZHwcn transfer; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferFracZHwcn, fp16_1c_1n_pad_cn) { + uint16_t ret[8 * 8 * 3 * 1] = { + 13127, 14445, 15133, 14580, 14605, 15056, 15081, 14421, 14987, 12263, 12622, 12833, 14095, 14596, 15333, 14456, + 15074, 14849, 13348, 13798, 12663, 14461, 11588, 14507, 12632, 14783, 14086, 14026, 15276, 15278, 14795, 15200, + 15109, 14483, 12120, 14598, 15255, 12325, 11741, 15188, 14898, 13586, 11772, 14704, 13636, 15059, 14681, 14961, + 15070, 13559, 14070, 11768, 12341, 12688, 13874, 11886, 13396, 13482, 14753, 14270, 15218, 14829, 14957, 14860, + 15101, 14467, 15054, 6483, 13929, 14187, 13979, 14475, 14110, 14652, 15293, 15305, 14856, 14307, 14923, 14941, + 15135, 15188, 14778, 13442, 14724, 11632, 15133, 14435, 15289, 13856, 14529, 8967, 11880, 13731, 15354, 13363, + 12063, 13343, 14769, 15251, 15282, 15002, 14695, 9259, 13478, 14118, 13586, 14224, 13911, 15058, 12668, 13077, + 13212, 14749, 12748, 14577, 13872, 13508, 14117, 13519, 14989, 10590, 14688, 15162, 14365, 12335, 12946, 11510, + 15229, 12313, 14507, 13152, 13320, 14705, 13071, 15181, 8469, 15041, 14998, 14824, 14634, 14866, 10316, 14118, + 14719, 14350, 15129, 15233, 13609, 14561, 13526, 14397, 12084, 14020, 14034, 14577, 15041, 13822, 14535, 15320, + 14194, 15112, 11318, 14200, 14878, 13645, 14536, 11995, 15134, 13599, 14490, 12348, 14143, 13132, 14998, 14342, + 10944, 14585, 14425, 15088, 15120, 14816, 14879, 14120, 11740, 15202, 14342, 15044, 14402, 15057, 14616, 14987, + }; + uint16_t data[64 * 1 * 16 * 16] = { + 13127, 14445, 15133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14580, 14605, 15056, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15081, 14421, 14987, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12263, 12622, 12833, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14095, 14596, 15333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14456, 15074, 14849, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13348, 13798, 12663, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14461, 11588, 14507, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12632, 14783, 14086, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14026, 15276, 15278, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14795, 15200, 15109, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14483, 12120, 14598, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15255, 12325, 11741, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15188, 14898, 13586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11772, 14704, 13636, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15059, 14681, 14961, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15070, 13559, 14070, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11768, 12341, 12688, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13874, 11886, 13396, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13482, 14753, 14270, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15218, 14829, 14957, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14860, 15101, 14467, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15054, 6483, 13929, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14187, 13979, 14475, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14110, 14652, 15293, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15305, 14856, 14307, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14923, 14941, 15135, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15188, 14778, 13442, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14724, 11632, 15133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14435, 15289, 13856, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14529, 8967, 11880, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13731, 15354, 13363, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12063, 13343, 14769, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15251, 15282, 15002, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14695, 9259, 13478, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14118, 13586, 14224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13911, 15058, 12668, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13077, 13212, 14749, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12748, 14577, 13872, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13508, 14117, 13519, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14989, 10590, 14688, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15162, 14365, 12335, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12946, 11510, 15229, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12313, 14507, 13152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13320, 14705, 13071, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15181, 8469, 15041, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14998, 14824, 14634, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14866, 10316, 14118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14719, 14350, 15129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15233, 13609, 14561, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13526, 14397, 12084, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14020, 14034, 14577, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15041, 13822, 14535, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15320, 14194, 15112, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11318, 14200, 14878, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13645, 14536, 11995, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15134, 13599, 14490, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12348, 14143, 13132, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14998, 14342, 10944, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14585, 14425, 15088, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15120, 14816, 14879, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14120, 11740, 15202, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14342, 15044, 14402, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15057, 14616, 14987, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFracZHwcn transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_HWCN, std::vector({64, 1, 16, 16}), + std::vector({8, 8, 3, 1}), DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} +} // namespace formats +} // namespace ge diff --git a/tests/ut/ge/common/format_transfer_fracz_nchw_unittest.cc b/tests/ut/ge/common/format_transfer_fracz_nchw_unittest.cc new file mode 100644 index 00000000..1b363625 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_fracz_nchw_unittest.cc @@ -0,0 +1,10489 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_fracz_nchw.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/fp16_t.h" + +namespace ge { +namespace formats { +class UtestFormatTransferFraczNchw : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_invalid_data_type) { + uint8_t data[16 * 1 * 32 * 16] = {1}; + + TransArgs args{data, FORMAT_FRACTAL_Z, FORMAT_NCHW, {16, 1, 32, 16}, {1, 1, 4, 4}, DT_UNDEFINED}; + TransResult result; + + FormatTransferFracZNchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_Invalid_src_format_reserved) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_RESERVED, FORMAT_NCHW, {16, 1, 16, 16}, {1, 1, 4, 4}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_invalid_dst_format_reserved) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_RESERVED, {16, 1, 16, 16}, {1, 1, 4, 4}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_invalid_src_shape) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, {16, 1, 1, 16, 16}, {1, 1, 4, 4}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_invalid_src_shape2) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, {16, 1, -16, 16}, {1, 1, 4, 4}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_invalid_dst_shape) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, {16, 1, 16, 16}, {1, 4, 4}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_invalid_dst_shape2) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, {16, 1, 16, 16}, {1, -1, 4, 4}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_invalid_src_dst_shape_relation1) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, {16, 1, 16, 16}, {1, 17, 4, 4}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_invalid_src_dst_shape_relation2) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, {16, 1, 16, 16}, {17, 1, 4, 4}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_fp16_success_lt_cube) { + uint16_t data_4d[1 * 1 * 16 * 16] = { + 13040, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[1 * 1 * 1 * 1] = { + 13040, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NCHW, {1, 1, 16, 16}, {1, 1, 1, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferFracZNchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_fp16_success_eq_cube) { + uint16_t data_4d[4 * 1 * 16 * 16] = { + 15321, 14501, 14483, 13936, 15043, 13673, 12307, 13885, 4847, 15263, 13716, 15066, 14419, 15319, 9266, 14930, + 10867, 15258, 15241, 11237, 15191, 15087, 14477, 10812, 15069, 9464, 7729, 14554, 15221, 14669, 14987, 14285, + 15201, 12849, 15322, 15137, 15088, 14748, 13775, 11884, 15230, 14420, 13994, 15304, 14805, 13879, 14491, 14404, + 14787, 15313, 9650, 15062, 12020, 13837, 14360, 14635, 12925, 14525, 14668, 15016, 14857, 13496, 15269, 13929, + 15157, 12904, 14142, 14221, 12612, 15184, 14286, 14212, 14339, 13084, 13186, 14730, 11482, 14712, 14820, 14965, + 13685, 8037, 14072, 13687, 15179, 14509, 14434, 15260, 14824, 13738, 13709, 15131, 11415, 14541, 13876, 14083, + 14844, 15291, 14663, 14954, 14046, 12994, 12355, 12498, 14382, 14003, 13924, 13914, 15342, 13374, 14012, 13714, + 15217, 14495, 9197, 13551, 13981, 11611, 14250, 12013, 14498, 14992, 13532, 11597, 15354, 15321, 14483, 14470, + 14032, 11620, 14527, 15108, 15105, 14942, 13826, 9073, 14412, 14109, 14718, 13055, 13486, 15285, 14326, 13160, + 13669, 13807, 14516, 13405, 15020, 14336, 13866, 14850, 14987, 11233, 13271, 15030, 13129, 10303, 15201, 12672, + 15010, 11951, 14837, 13946, 10496, 14971, 14106, 13516, 12637, 14084, 13097, 12852, 14811, 12480, 14914, 15073, + 11546, 13817, 14617, 13540, 12857, 14473, 13375, 14729, 13752, 14813, 14286, 12731, 14300, 10109, 14881, 13741, + 14706, 13214, 14164, 14549, 13435, 14038, 15090, 15058, 14382, 14118, 14606, 13358, 14086, 14881, 14289, 14537, + 15347, 10719, 11996, 13391, 14272, 15135, 14728, 15134, 11692, 12862, 12646, 14786, 14944, 14416, 15092, 13003, + 8681, 15041, 14187, 11620, 11693, 14667, 14583, 14792, 14642, 14493, 13267, 13506, 14712, 15291, 15079, 15337, + 14735, 11768, 12961, 14219, 14846, 14494, 14729, 13060, 14464, 15269, 14425, 15333, 13674, 11749, 14606, 11590, + 12886, 13850, 14716, 13941, 14643, 14744, 15163, 15141, 13983, 14858, 12830, 13025, 14988, 14285, 14740, 13352, + 11972, 12316, 13555, 15197, 13984, 15069, 13775, 14626, 12878, 14771, 12878, 15137, 14383, 12753, 13520, 14902, + 13924, 11361, 10655, 9100, 13999, 15220, 15166, 13336, 12293, 10088, 11778, 15081, 14045, 14406, 13509, 9504, + 14371, 13225, 15034, 14703, 14686, 14808, 13976, 9695, 14616, 14631, 14859, 13826, 14998, 15101, 13471, 14673, + 8286, 14988, 13555, 14705, 15105, 14937, 10691, 15195, 7636, 13144, 15128, 14778, 15130, 13607, 13974, 14622, + 13249, 14119, 12877, 14659, 14075, 13603, 13973, 14976, 14296, 11888, 14241, 13299, 15292, 12780, 14819, 15081, + 14631, 14631, 13379, 14471, 14757, 14252, 14349, 15024, 13475, 15320, 13755, 14478, 15228, 13543, 9409, 11631, + 15184, 14076, 15043, 14265, 15241, 11449, 15078, 14865, 12988, 14643, 14339, 14504, 6814, 13461, 15347, 15053, + 13509, 12315, 12104, 14685, 15141, 15253, 14752, 13715, 13940, 13548, 14359, 12752, 7951, 14404, 13046, 14376, + 15334, 14907, 15135, 13383, 13520, 13990, 13881, 12601, 14975, 13665, 13909, 13901, 12795, 14043, 15240, 15324, + 14427, 14493, 14279, 13421, 14165, 10574, 15358, 13503, 12607, 13488, 13919, 14707, 12374, 15050, 14569, 14277, + 14198, 12343, 13527, 14970, 14988, 14855, 12149, 15119, 13368, 15227, 13476, 14857, 13668, 14744, 13224, 13701, + 13270, 14597, 13880, 13364, 12825, 13980, 10073, 15330, 12665, 14870, 14886, 14702, 14577, 15333, 14759, 15003, + 13233, 11785, 15345, 10633, 15260, 14557, 15030, 9786, 14747, 11463, 11585, 14625, 13729, 14911, 15189, 14083, + 13764, 14552, 10020, 12569, 14805, 11953, 15147, 14240, 13118, 12816, 14696, 13327, 13627, 15095, 14043, 13298, + 14615, 15027, 13456, 14340, 12897, 14207, 13463, 14715, 14580, 13762, 14954, 11439, 15224, 13949, 14756, 14388, + 10292, 12758, 14301, 14076, 15130, 15224, 15297, 13788, 14156, 14302, 14879, 13440, 13880, 11505, 13511, 14961, + 14366, 14674, 14709, 14618, 15082, 9169, 15350, 13438, 12408, 14954, 13990, 15256, 12514, 13144, 13893, 11435, + 13429, 14448, 14499, 12315, 15284, 14809, 13124, 9087, 14867, 14483, 12832, 13040, 14751, 14554, 14490, 14826, + 14355, 15221, 15122, 11546, 14906, 14486, 15225, 10278, 13774, 14724, 11395, 11465, 15244, 13935, 15322, 9695, + 14966, 14081, 13138, 13616, 13736, 12922, 13494, 13992, 15297, 12437, 11768, 14151, 15332, 13332, 14841, 15236, + 14435, 14608, 13683, 15143, 14395, 15319, 14938, 15110, 15236, 11931, 12694, 11947, 14570, 14369, 15144, 14535, + 15048, 14735, 15217, 12554, 13548, 14092, 14719, 15086, 14539, 13947, 11368, 14496, 13421, 12350, 13215, 12610, + 15316, 9328, 14670, 15166, 14062, 15338, 14535, 12646, 13907, 13461, 14725, 14683, 13344, 13222, 14474, 14745, + 14084, 14972, 15212, 12449, 14218, 11659, 15286, 12451, 9711, 12833, 13730, 12460, 13748, 11887, 9794, 15355, + 13291, 11506, 14543, 13526, 12386, 14689, 13129, 12716, 14726, 14135, 12690, 15035, 13825, 15024, 14735, 13333, + 14906, 14092, 14458, 14576, 11812, 14960, 14741, 13067, 15348, 14798, 15101, 9753, 14040, 14780, 12358, 15206, + 15144, 13346, 13120, 13470, 14845, 13907, 15229, 13879, 12299, 14952, 15061, 14391, 15084, 14760, 13896, 14320, + 15338, 14404, 15097, 14815, 13002, 13366, 15333, 14624, 12032, 11321, 14102, 12735, 13433, 12332, 13779, 15070, + 15248, 11753, 12877, 13162, 13740, 14906, 14746, 14618, 13075, 13339, 15016, 14879, 15117, 12455, 14950, 14084, + 12651, 15233, 12770, 12456, 12289, 15170, 12421, 14159, 13430, 14361, 14422, 13700, 14693, 14835, 14234, 14830, + 15197, 14830, 13318, 15217, 13361, 9601, 14396, 14383, 14724, 12701, 12424, 15183, 10020, 13572, 15196, 14493, + 15259, 14687, 13772, 15168, 14343, 12708, 15149, 11266, 13343, 11713, 13761, 14513, 15063, 9882, 14773, 15235, + 13125, 14347, 14993, 14868, 14425, 14803, 14950, 15123, 14736, 11347, 13960, 12695, 13338, 10399, 10862, 15249, + 14968, 15201, 11690, 14989, 14946, 15144, 14526, 14773, 15027, 8041, 12886, 15298, 14655, 13168, 14068, 14810, + 10837, 14681, 12314, 12583, 14510, 14758, 14750, 14545, 15168, 14825, 14803, 11993, 13811, 15269, 13376, 14697, + 13312, 14764, 13857, 14524, 14852, 14172, 13180, 13348, 14413, 11386, 14733, 14822, 14770, 12511, 13369, 14240, + 14831, 15141, 13868, 13562, 15162, 14840, 15129, 13387, 14974, 13710, 14834, 14922, 13966, 13712, 15194, 13610, + 14951, 13562, 14678, 13369, 13551, 12896, 14704, 10316, 14428, 14880, 15264, 13866, 12114, 12500, 13385, 10455, + 14679, 14471, 12361, 15320, 14386, 13844, 14864, 11922, 10933, 13356, 14782, 14598, 13236, 14829, 15231, 14350, + 14550, 14964, 14845, 11692, 15325, 14383, 14090, 14873, 12926, 15343, 13661, 14803, 12968, 13741, 14004, 14699, + 12099, 13531, 14968, 14644, 15170, 15256, 12070, 14853, 11500, 15343, 14004, 12168, 11066, 13855, 10086, 14083, + 14107, 14870, 8556, 12951, 10031, 10387, 14534, 11951, 14015, 10938, 14463, 15349, 14973, 12431, 15119, 13704, + 14527, 12773, 14853, 15148, 13012, 14921, 14187, 14027, 10103, 15266, 13372, 12727, 15100, 14009, 13683, 15234, + 12717, 14650, 14601, 14453, 13799, 14819, 13656, 12366, 15349, 11187, 9283, 15028, 14000, 14023, 14979, 14105, + 14899, 13435, 11447, 11158, 14662, 12884, 15025, 9574, 15064, 15027, 13888, 15105, 11581, 13353, 13919, 14064, + 12278, 13483, 13835, 13839, 9470, 15330, 14400, 13076, 11430, 13613, 14875, 14824, 14792, 12630, 14557, 14563, + 13672, 14934, 15327, 13292, 15152, 12363, 12970, 14119, 14671, 15097, 14815, 12299, 15154, 15122, 14722, 13551, + }; + uint16_t data[16 * 16 * 2 * 2] = { + 15321, 12886, 10292, 15259, 14501, 13850, 12758, 14687, 14483, 14716, 14301, 13772, 13936, 13941, 14076, 15168, + 15043, 14643, 15130, 14343, 13673, 14744, 15224, 12708, 12307, 15163, 15297, 15149, 13885, 15141, 13788, 11266, + 4847, 13983, 14156, 13343, 15263, 14858, 14302, 11713, 13716, 12830, 14879, 13761, 15066, 13025, 13440, 14513, + 14419, 14988, 13880, 15063, 15319, 14285, 11505, 9882, 9266, 14740, 13511, 14773, 14930, 13352, 14961, 15235, + 10867, 11972, 14366, 13125, 15258, 12316, 14674, 14347, 15241, 13555, 14709, 14993, 11237, 15197, 14618, 14868, + 15191, 13984, 15082, 14425, 15087, 15069, 9169, 14803, 14477, 13775, 15350, 14950, 10812, 14626, 13438, 15123, + 15069, 12878, 12408, 14736, 9464, 14771, 14954, 11347, 7729, 12878, 13990, 13960, 14554, 15137, 15256, 12695, + 15221, 14383, 12514, 13338, 14669, 12753, 13144, 10399, 14987, 13520, 13893, 10862, 14285, 14902, 11435, 15249, + 15201, 13924, 13429, 14968, 12849, 11361, 14448, 15201, 15322, 10655, 14499, 11690, 15137, 9100, 12315, 14989, + 15088, 13999, 15284, 14946, 14748, 15220, 14809, 15144, 13775, 15166, 13124, 14526, 11884, 13336, 9087, 14773, + 15230, 12293, 14867, 15027, 14420, 10088, 14483, 8041, 13994, 11778, 12832, 12886, 15304, 15081, 13040, 15298, + 14805, 14045, 14751, 14655, 13879, 14406, 14554, 13168, 14491, 13509, 14490, 14068, 14404, 9504, 14826, 14810, + 14787, 14371, 14355, 10837, 15313, 13225, 15221, 14681, 9650, 15034, 15122, 12314, 15062, 14703, 11546, 12583, + 12020, 14686, 14906, 14510, 13837, 14808, 14486, 14758, 14360, 13976, 15225, 14750, 14635, 9695, 10278, 14545, + 12925, 14616, 13774, 15168, 14525, 14631, 14724, 14825, 14668, 14859, 11395, 14803, 15016, 13826, 11465, 11993, + 14857, 14998, 15244, 13811, 13496, 15101, 13935, 15269, 15269, 13471, 15322, 13376, 13929, 14673, 9695, 14697, + 15157, 8286, 14966, 13312, 12904, 14988, 14081, 14764, 14142, 13555, 13138, 13857, 14221, 14705, 13616, 14524, + 12612, 15105, 13736, 14852, 15184, 14937, 12922, 14172, 14286, 10691, 13494, 13180, 14212, 15195, 13992, 13348, + 14339, 7636, 15297, 14413, 13084, 13144, 12437, 11386, 13186, 15128, 11768, 14733, 14730, 14778, 14151, 14822, + 11482, 15130, 15332, 14770, 14712, 13607, 13332, 12511, 14820, 13974, 14841, 13369, 14965, 14622, 15236, 14240, + 13685, 13249, 14435, 14831, 8037, 14119, 14608, 15141, 14072, 12877, 13683, 13868, 13687, 14659, 15143, 13562, + 15179, 14075, 14395, 15162, 14509, 13603, 15319, 14840, 14434, 13973, 14938, 15129, 15260, 14976, 15110, 13387, + 14824, 14296, 15236, 14974, 13738, 11888, 11931, 13710, 13709, 14241, 12694, 14834, 15131, 13299, 11947, 14922, + 11415, 15292, 14570, 13966, 14541, 12780, 14369, 13712, 13876, 14819, 15144, 15194, 14083, 15081, 14535, 13610, + 14844, 14631, 15048, 14951, 15291, 14631, 14735, 13562, 14663, 13379, 15217, 14678, 14954, 14471, 12554, 13369, + 14046, 14757, 13548, 13551, 12994, 14252, 14092, 12896, 12355, 14349, 14719, 14704, 12498, 15024, 15086, 10316, + 14382, 13475, 14539, 14428, 14003, 15320, 13947, 14880, 13924, 13755, 11368, 15264, 13914, 14478, 14496, 13866, + 15342, 15228, 13421, 12114, 13374, 13543, 12350, 12500, 14012, 9409, 13215, 13385, 13714, 11631, 12610, 10455, + 15217, 15184, 15316, 14679, 14495, 14076, 9328, 14471, 9197, 15043, 14670, 12361, 13551, 14265, 15166, 15320, + 13981, 15241, 14062, 14386, 11611, 11449, 15338, 13844, 14250, 15078, 14535, 14864, 12013, 14865, 12646, 11922, + 14498, 12988, 13907, 10933, 14992, 14643, 13461, 13356, 13532, 14339, 14725, 14782, 11597, 14504, 14683, 14598, + 15354, 6814, 13344, 13236, 15321, 13461, 13222, 14829, 14483, 15347, 14474, 15231, 14470, 15053, 14745, 14350, + 14032, 13509, 14084, 14550, 11620, 12315, 14972, 14964, 14527, 12104, 15212, 14845, 15108, 14685, 12449, 11692, + 15105, 15141, 14218, 15325, 14942, 15253, 11659, 14383, 13826, 14752, 15286, 14090, 9073, 13715, 12451, 14873, + 14412, 13940, 9711, 12926, 14109, 13548, 12833, 15343, 14718, 14359, 13730, 13661, 13055, 12752, 12460, 14803, + 13486, 7951, 13748, 12968, 15285, 14404, 11887, 13741, 14326, 13046, 9794, 14004, 13160, 14376, 15355, 14699, + 13669, 15334, 13291, 12099, 13807, 14907, 11506, 13531, 14516, 15135, 14543, 14968, 13405, 13383, 13526, 14644, + 15020, 13520, 12386, 15170, 14336, 13990, 14689, 15256, 13866, 13881, 13129, 12070, 14850, 12601, 12716, 14853, + 14987, 14975, 14726, 11500, 11233, 13665, 14135, 15343, 13271, 13909, 12690, 14004, 15030, 13901, 15035, 12168, + 13129, 12795, 13825, 11066, 10303, 14043, 15024, 13855, 15201, 15240, 14735, 10086, 12672, 15324, 13333, 14083, + 15010, 14427, 14906, 14107, 11951, 14493, 14092, 14870, 14837, 14279, 14458, 8556, 13946, 13421, 14576, 12951, + 10496, 14165, 11812, 10031, 14971, 10574, 14960, 10387, 14106, 15358, 14741, 14534, 13516, 13503, 13067, 11951, + 12637, 12607, 15348, 14015, 14084, 13488, 14798, 10938, 13097, 13919, 15101, 14463, 12852, 14707, 9753, 15349, + 14811, 12374, 14040, 14973, 12480, 15050, 14780, 12431, 14914, 14569, 12358, 15119, 15073, 14277, 15206, 13704, + 11546, 14198, 15144, 14527, 13817, 12343, 13346, 12773, 14617, 13527, 13120, 14853, 13540, 14970, 13470, 15148, + 12857, 14988, 14845, 13012, 14473, 14855, 13907, 14921, 13375, 12149, 15229, 14187, 14729, 15119, 13879, 14027, + 13752, 13368, 12299, 10103, 14813, 15227, 14952, 15266, 14286, 13476, 15061, 13372, 12731, 14857, 14391, 12727, + 14300, 13668, 15084, 15100, 10109, 14744, 14760, 14009, 14881, 13224, 13896, 13683, 13741, 13701, 14320, 15234, + 14706, 13270, 15338, 12717, 13214, 14597, 14404, 14650, 14164, 13880, 15097, 14601, 14549, 13364, 14815, 14453, + 13435, 12825, 13002, 13799, 14038, 13980, 13366, 14819, 15090, 10073, 15333, 13656, 15058, 15330, 14624, 12366, + 14382, 12665, 12032, 15349, 14118, 14870, 11321, 11187, 14606, 14886, 14102, 9283, 13358, 14702, 12735, 15028, + 14086, 14577, 13433, 14000, 14881, 15333, 12332, 14023, 14289, 14759, 13779, 14979, 14537, 15003, 15070, 14105, + 15347, 13233, 15248, 14899, 10719, 11785, 11753, 13435, 11996, 15345, 12877, 11447, 13391, 10633, 13162, 11158, + 14272, 15260, 13740, 14662, 15135, 14557, 14906, 12884, 14728, 15030, 14746, 15025, 15134, 9786, 14618, 9574, + 11692, 14747, 13075, 15064, 12862, 11463, 13339, 15027, 12646, 11585, 15016, 13888, 14786, 14625, 14879, 15105, + 14944, 13729, 15117, 11581, 14416, 14911, 12455, 13353, 15092, 15189, 14950, 13919, 13003, 14083, 14084, 14064, + 8681, 13764, 12651, 12278, 15041, 14552, 15233, 13483, 14187, 10020, 12770, 13835, 11620, 12569, 12456, 13839, + 11693, 14805, 12289, 9470, 14667, 11953, 15170, 15330, 14583, 15147, 12421, 14400, 14792, 14240, 14159, 13076, + 14642, 13118, 13430, 11430, 14493, 12816, 14361, 13613, 13267, 14696, 14422, 14875, 13506, 13327, 13700, 14824, + 14712, 13627, 14693, 14792, 15291, 15095, 14835, 12630, 15079, 14043, 14234, 14557, 15337, 13298, 14830, 14563, + 14735, 14615, 15197, 13672, 11768, 15027, 14830, 14934, 12961, 13456, 13318, 15327, 14219, 14340, 15217, 13292, + 14846, 12897, 13361, 15152, 14494, 14207, 9601, 12363, 14729, 13463, 14396, 12970, 13060, 14715, 14383, 14119, + 14464, 14580, 14724, 14671, 15269, 13762, 12701, 15097, 14425, 14954, 12424, 14815, 15333, 11439, 15183, 12299, + 13674, 15224, 10020, 15154, 11749, 13949, 13572, 15122, 14606, 14756, 15196, 14722, 11590, 14388, 14493, 13551, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NCHW, {4, 1, 16, 16}, {16, 16, 2, 2}, DT_FLOAT16}; + TransResult result; + + FormatTransferFracZNchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, UNSUPPORTED); +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_fp16_success_gt_cube) { + uint16_t data_4d[8 * 2 * 16 * 16] = { + 14154, 14684, 14727, 13722, 14690, 14935, 14421, 13953, 12940, 12530, 14800, 14640, 14945, 13325, 9403, 14385, + 13906, 13803, 13931, 15357, 14369, 14980, 14535, 5279, 15317, 13421, 14772, 14603, 10823, 14745, 15225, 15067, + 12558, 15054, 10046, 14110, 13129, 11599, 9248, 14644, 10818, 14568, 14743, 14885, 14629, 12260, 12808, 15242, + 11947, 14247, 12952, 11328, 13163, 15182, 14328, 14944, 12718, 13768, 15312, 12814, 15297, 14043, 14478, 14790, + 14411, 14373, 14325, 14025, 14205, 15186, 13849, 15307, 14734, 13643, 15281, 14473, 5762, 13974, 6694, 15220, + 15079, 13535, 14741, 8934, 14764, 13322, 14812, 14932, 14580, 14064, 15027, 14373, 14620, 15236, 15246, 12821, + 8629, 7304, 14729, 10175, 13538, 14521, 11603, 13960, 14852, 15138, 14708, 11553, 13516, 15351, 14635, 14532, + 11791, 14493, 13742, 15049, 13081, 14433, 13545, 14912, 14482, 15320, 13218, 15265, 13532, 12787, 13425, 14713, + 14760, 15347, 13096, 11612, 14251, 15237, 12356, 15276, 14359, 14245, 13778, 14509, 15278, 14521, 15065, 14704, + 13425, 12037, 14441, 15194, 14650, 14131, 14811, 14373, 10475, 15121, 11375, 9445, 11907, 14355, 14583, 15262, + 14939, 15243, 15056, 12781, 15003, 13862, 13329, 14617, 11336, 14337, 14778, 14716, 8625, 13549, 13190, 15293, + 15098, 14455, 15180, 15082, 14121, 12136, 13707, 13597, 13483, 15292, 15171, 15220, 13349, 13013, 15070, 14766, + 14707, 13540, 13779, 14457, 13684, 13347, 14087, 12587, 12677, 11717, 15282, 12870, 14710, 14482, 14167, 14831, + 14669, 15125, 13674, 15151, 14981, 13730, 14962, 13548, 15286, 15083, 13917, 13323, 14693, 14461, 15077, 15229, + 14385, 14464, 13659, 11526, 14506, 14463, 14427, 13615, 11809, 14998, 15025, 15115, 13023, 14517, 13778, 14345, + 13024, 13770, 15184, 13291, 13454, 14736, 14465, 13678, 14183, 11796, 14389, 13549, 15297, 14870, 11984, 15230, + 14956, 12907, 15241, 13404, 14466, 15211, 13506, 5438, 14443, 13097, 15153, 14254, 13329, 14648, 15295, 12227, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11800, 12358, 14316, 12598, 13583, 8102, 14411, 14479, 14557, 15017, 15153, 11356, 14136, 8613, 15125, 13679, + 11053, 13226, 11435, 15073, 15220, 14361, 13055, 12462, 15119, 13677, 11813, 14653, 9952, 13571, 13645, 14280, + 14501, 13499, 14900, 15211, 14420, 15268, 15025, 14933, 13993, 15303, 14487, 12617, 13585, 14460, 12358, 13565, + 12293, 13924, 14311, 14576, 13357, 12506, 11957, 15203, 13746, 15329, 14796, 12237, 14906, 15050, 12760, 13392, + 12750, 11969, 14920, 14600, 15327, 14775, 7403, 13519, 15348, 15103, 11304, 11048, 14523, 13798, 13774, 14755, + 13771, 14988, 11779, 14298, 14563, 14384, 13986, 15323, 14718, 14860, 14085, 12370, 14686, 14467, 15030, 13812, + 13382, 12618, 14118, 14452, 11174, 11473, 14109, 14605, 12979, 14736, 15207, 14737, 15193, 13449, 13957, 14396, + 13852, 1878, 14919, 13765, 14563, 14505, 13745, 14634, 15087, 14368, 13795, 14069, 15353, 14643, 12714, 12792, + 14056, 14923, 14819, 11435, 14751, 14441, 14160, 10889, 9986, 7205, 14862, 14598, 14741, 14325, 12374, 14987, + 13673, 14869, 11179, 14934, 14436, 10938, 14941, 8729, 13266, 14625, 13982, 8387, 14026, 12480, 14168, 8390, + 14696, 8866, 13506, 14369, 14029, 14936, 14969, 15066, 13177, 15282, 13286, 15056, 14178, 13413, 15308, 6845, + 14532, 12612, 10677, 13312, 11024, 14404, 14585, 13696, 13914, 14397, 12951, 15074, 14445, 14755, 15024, 15270, + 14463, 14953, 13485, 14672, 14036, 12445, 12533, 13715, 14323, 14783, 11897, 12776, 11327, 14698, 14690, 13010, + 13831, 13249, 12859, 10272, 13878, 14753, 14122, 12725, 10978, 15320, 14607, 14884, 14836, 15070, 13231, 9284, + 14372, 14985, 13348, 14939, 14916, 15302, 15311, 14139, 13058, 14226, 13932, 11407, 12343, 14710, 5722, 8516, + 15113, 14820, 14658, 12285, 13870, 15056, 15086, 13957, 15162, 13755, 15124, 14350, 15099, 14817, 14727, 13388, + 11990, 14651, 15344, 11849, 14892, 12450, 12593, 13974, 14782, 15105, 13778, 13788, 15151, 13586, 14046, 14400, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13512, 14640, 14461, 10825, 13981, 12378, 13832, 14227, 13920, 12225, 10915, 14182, 14444, 13064, 15072, 14836, + 14973, 14923, 15184, 14532, 14095, 9147, 14710, 12578, 15043, 14200, 13632, 14806, 14299, 13382, 14640, 14747, + 14917, 12732, 14266, 12630, 12611, 14319, 11974, 15103, 13543, 14535, 13446, 15240, 13313, 14928, 14465, 12020, + 13141, 13175, 14724, 14817, 14639, 13378, 14696, 12602, 14982, 12295, 13923, 12940, 12651, 11972, 15324, 14715, + 14990, 15083, 14750, 8474, 13334, 14199, 11237, 11631, 13509, 15045, 14897, 9206, 13220, 14569, 15204, 7383, + 14189, 14549, 12888, 15104, 13199, 11908, 14612, 13819, 13813, 12517, 10472, 12990, 15176, 14045, 13634, 15000, + 9699, 14619, 13175, 14292, 15082, 14888, 13390, 12366, 14212, 10265, 14337, 14022, 13839, 13570, 14608, 14819, + 11608, 12516, 11311, 14933, 15186, 14829, 15227, 10570, 15091, 14722, 10043, 12823, 14933, 15355, 14393, 13497, + 13333, 14904, 14567, 14895, 12612, 13737, 11376, 13772, 14523, 10706, 12092, 13318, 14824, 13816, 14837, 11674, + 14373, 12621, 14442, 14424, 14388, 13065, 14722, 14355, 15340, 12170, 12517, 15266, 14767, 14043, 14630, 14440, + 11406, 14385, 14981, 14970, 12614, 12085, 15304, 12188, 12739, 14061, 11147, 14850, 11490, 14949, 13713, 7462, + 14444, 15019, 14319, 13325, 5092, 10389, 15093, 14802, 14628, 14786, 13637, 13629, 12970, 10894, 12485, 11866, + 14129, 14570, 15027, 14723, 15047, 14604, 14319, 14614, 13108, 13319, 13199, 14501, 14840, 11856, 13794, 14902, + 13609, 13467, 14848, 12820, 14397, 14863, 14685, 15285, 14472, 14739, 14583, 13708, 15077, 14041, 14212, 13567, + 14074, 14695, 13370, 9930, 14304, 15315, 14821, 13994, 13469, 12706, 12675, 14357, 14028, 13777, 14463, 13905, + 13754, 15008, 14980, 13714, 13490, 15058, 13770, 14504, 14316, 14734, 14575, 14604, 11725, 11796, 12995, 15170, + 14396, 14583, 14839, 15070, 14791, 14068, 10384, 10719, 12585, 14690, 14779, 13934, 15314, 12019, 15067, 14448, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10308, 14741, 12058, 14995, 14428, 14722, 15321, 15183, 14091, 13848, 13365, 12050, 14716, 13000, 14552, 12681, + 11216, 15252, 13499, 12581, 15181, 14955, 13063, 13897, 14016, 15309, 13825, 15228, 14840, 13324, 13516, 13078, + 15144, 15016, 14905, 14395, 12703, 14347, 13033, 11565, 14654, 14098, 14281, 14443, 13940, 9779, 14714, 12614, + 14573, 12248, 14439, 12143, 14548, 7240, 14180, 14096, 15004, 14926, 14948, 11338, 13864, 15138, 14975, 14767, + 13601, 13063, 15104, 13401, 11289, 15045, 14177, 13620, 11498, 15131, 12756, 14399, 11377, 15101, 12778, 12935, + 13883, 15294, 14741, 14532, 13800, 9177, 14953, 15249, 13365, 14995, 14838, 13245, 12433, 13683, 11853, 14642, + 13543, 15183, 12583, 14399, 15031, 14006, 10820, 14875, 13137, 13705, 13612, 15317, 15242, 15101, 15248, 14775, + 14142, 13980, 14424, 15318, 12946, 10844, 15148, 13020, 12949, 14393, 15021, 15026, 14548, 14016, 13323, 12955, + 14877, 12928, 14541, 14669, 14096, 12731, 14645, 14491, 12676, 13304, 15165, 13506, 14748, 13327, 13579, 14431, + 14351, 13225, 12845, 12674, 14701, 14949, 12380, 13545, 13713, 12862, 14987, 13983, 12618, 15124, 15326, 15170, + 14030, 13322, 14443, 14960, 10340, 14820, 9438, 14923, 11185, 10851, 14603, 14517, 14264, 14507, 13530, 14177, + 12951, 15351, 11045, 15065, 11293, 14979, 14865, 14845, 14958, 14534, 11270, 14390, 14793, 13774, 12444, 13114, + 13725, 14625, 12422, 13912, 13766, 13048, 14569, 10447, 13123, 14987, 7305, 13692, 15024, 12935, 11960, 14928, + 14260, 13770, 14945, 14897, 13331, 14123, 9608, 14570, 14778, 14530, 13492, 15281, 15088, 11678, 12623, 14531, + 13388, 12533, 10379, 14162, 15100, 12815, 8714, 14044, 12414, 15087, 13566, 11750, 13354, 13371, 13660, 14537, + 15021, 15228, 14674, 14424, 12422, 14996, 13398, 14737, 14667, 15152, 14512, 14338, 14463, 14789, 13383, 13544, + 15161, 14430, 15167, 13795, 14443, 7770, 14507, 13644, 14576, 11515, 13661, 15097, 15248, 15031, 12337, 15007, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12041, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14750, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13541, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14571, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14750, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15295, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14736, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13354, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10879, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14898, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14673, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12841, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14384, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14768, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14181, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13577, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10846, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14315, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12467, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14377, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14213, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14975, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14173, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14416, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14626, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14876, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11506, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14122, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14363, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13801, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14442, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10269, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14404, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14554, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14322, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13762, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13481, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14396, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14777, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14808, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13164, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13968, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14766, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14684, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13635, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14036, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12037, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14479, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13976, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11516, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14177, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15172, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14726, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13388, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13888, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14665, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14364, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14499, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12528, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15072, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10487, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[17 * 17 * 2 * 2] = { + 14154, 11800, 13512, 10308, 14684, 12358, 14640, 14741, 14727, 14316, 14461, 12058, 13722, 12598, 10825, 14995, + 14690, 13583, 13981, 14428, 14935, 8102, 12378, 14722, 14421, 14411, 13832, 15321, 13953, 14479, 14227, 15183, + 12940, 14557, 13920, 14091, 12530, 15017, 12225, 13848, 14800, 15153, 10915, 13365, 14640, 11356, 14182, 12050, + 14945, 14136, 14444, 14716, 13325, 8613, 13064, 13000, 9403, 15125, 15072, 14552, 14385, 13679, 14836, 12681, + 12041, 10846, 10269, 13635, 13906, 11053, 14973, 11216, 13803, 13226, 14923, 15252, 13931, 11435, 15184, 13499, + 15357, 15073, 14532, 12581, 14369, 15220, 14095, 15181, 14980, 14361, 9147, 14955, 14535, 13055, 14710, 13063, + 5279, 12462, 12578, 13897, 15317, 15119, 15043, 14016, 13421, 13677, 14200, 15309, 14772, 11813, 13632, 13825, + 14603, 14653, 14806, 15228, 10823, 9952, 14299, 14840, 14745, 13571, 13382, 13324, 15225, 13645, 14640, 13516, + 15067, 14280, 14747, 13078, 14750, 14315, 14404, 14036, 12558, 14501, 14917, 15144, 15054, 13499, 12732, 15016, + 10046, 14900, 14266, 14905, 14110, 15211, 12630, 14395, 13129, 14420, 12611, 12703, 11599, 15268, 14319, 14347, + 9248, 15025, 11974, 13033, 14644, 14933, 15103, 11565, 10818, 13993, 13543, 14654, 14568, 15303, 14535, 14098, + 14743, 14487, 13446, 14281, 14885, 12617, 15240, 14443, 14629, 13585, 13313, 13940, 12260, 14460, 14928, 9779, + 12808, 12358, 14465, 14714, 15242, 13565, 12020, 12614, 14240, 14129, 14554, 12037, 11947, 12293, 13141, 14573, + 14247, 13924, 13175, 12248, 12952, 14311, 14724, 14439, 11328, 14576, 14817, 12143, 13163, 13357, 14639, 14548, + 15182, 12506, 13378, 7240, 14328, 11957, 14696, 14180, 14944, 15203, 12602, 14096, 12718, 13746, 14982, 15004, + 13768, 15329, 12295, 14926, 15312, 14796, 13923, 14948, 12814, 12237, 12940, 11338, 15297, 14906, 12651, 13864, + 14043, 15050, 11972, 15138, 14478, 12760, 15324, 14975, 14790, 13392, 14715, 14767, 13541, 12467, 14322, 14479, + 14411, 12750, 14990, 13601, 14373, 11969, 15083, 13063, 14325, 14920, 14750, 15104, 14025, 14600, 8474, 13401, + 14205, 15327, 13334, 11289, 15186, 14775, 14199, 15045, 13849, 7403, 11237, 14177, 15307, 13519, 11631, 13620, + 14734, 15348, 13509, 11498, 13643, 15103, 15045, 15131, 15281, 11304, 14897, 12756, 14473, 11048, 9206, 14399, + 5762, 14523, 13220, 11377, 13974, 13798, 14569, 15101, 6694, 13774, 15204, 12778, 15220, 14755, 7383, 12935, + 14571, 14351, 13762, 13976, 15079, 13771, 14189, 13883, 13535, 14988, 14549, 15294, 14741, 11779, 12888, 14741, + 8934, 14298, 15104, 14532, 14764, 14563, 13199, 13800, 13322, 14384, 11908, 9177, 14812, 13986, 14612, 14953, + 14932, 15323, 13819, 15249, 14580, 14718, 13813, 13365, 14064, 14860, 12517, 14995, 15027, 14085, 10472, 14838, + 14373, 12370, 12990, 13245, 14620, 14686, 15176, 12433, 15236, 14467, 14045, 13683, 15246, 15030, 13634, 11853, + 12821, 13812, 15000, 14642, 14750, 14377, 13481, 11516, 8629, 13382, 9699, 13543, 7304, 12618, 14619, 15183, + 14729, 14118, 13175, 12583, 10175, 14452, 14292, 14399, 13538, 11174, 15082, 15031, 14521, 11473, 14888, 14006, + 11603, 14109, 13390, 10820, 13960, 14605, 12366, 14875, 14852, 12979, 14212, 13137, 15138, 14736, 10265, 13705, + 14708, 15207, 14337, 13612, 11553, 14737, 14022, 15317, 13516, 15193, 13839, 15242, 15351, 13449, 13570, 15101, + 14635, 13957, 14608, 15248, 14532, 14396, 14819, 14775, 15295, 14213, 14396, 14177, 11791, 13852, 11608, 14142, + 14493, 1878, 12516, 13980, 13742, 14919, 11311, 14424, 15049, 13765, 14933, 15318, 13081, 14563, 15186, 12946, + 14433, 14505, 14829, 10844, 13545, 13745, 15227, 15148, 14912, 14634, 10570, 13020, 14482, 15087, 15091, 12949, + 15320, 14368, 14722, 14393, 13218, 13795, 10043, 15021, 15265, 14069, 12823, 15026, 13532, 15353, 14933, 14548, + 12787, 14643, 15355, 14016, 13425, 12714, 14393, 13323, 14713, 12792, 13497, 12955, 14736, 14975, 14777, 15172, + 14760, 14056, 13333, 14877, 15347, 14923, 14904, 12928, 13096, 14819, 14567, 14541, 11612, 11435, 14895, 14669, + 14251, 14751, 12612, 14096, 15237, 14441, 13737, 12731, 12356, 14160, 11376, 14645, 15276, 10889, 13772, 14491, + 14359, 9986, 14523, 12676, 14245, 7205, 10706, 13304, 13778, 14862, 12092, 15165, 14509, 14598, 13318, 13506, + 15278, 14741, 14824, 14748, 14521, 14325, 13816, 13327, 15065, 12374, 14837, 13579, 14704, 14987, 11674, 14431, + 13354, 14173, 14808, 14726, 13425, 13673, 14373, 14351, 12037, 14869, 12621, 13225, 14441, 11179, 14442, 12845, + 15194, 14934, 14424, 12674, 14650, 14436, 14388, 14701, 14131, 10938, 13065, 14949, 14811, 14941, 14722, 12380, + 14373, 8729, 14355, 13545, 10475, 13266, 15340, 13713, 15121, 14625, 12170, 12862, 11375, 13982, 12517, 14987, + 9445, 8387, 15266, 13983, 11907, 14026, 14767, 12618, 14355, 12480, 14043, 15124, 14583, 14168, 14630, 15326, + 15262, 8390, 14440, 15170, 10879, 14416, 13164, 13388, 14939, 14696, 11406, 14030, 15243, 8866, 14385, 13322, + 15056, 13506, 14981, 14443, 12781, 14369, 14970, 14960, 15003, 14029, 12614, 10340, 13862, 14936, 12085, 14820, + 13329, 14969, 15304, 9438, 14617, 15066, 12188, 14923, 11336, 13177, 12739, 11185, 14337, 15282, 14061, 10851, + 14778, 13286, 11147, 14603, 14716, 15056, 14850, 14517, 8625, 14178, 11490, 14264, 13549, 13413, 14949, 14507, + 13190, 15308, 13713, 13530, 15293, 6845, 7462, 14177, 14898, 14626, 13393, 13888, 15098, 14532, 14444, 12951, + 14455, 12612, 15019, 15351, 15180, 10677, 14319, 11045, 15082, 13312, 13325, 15065, 14121, 11024, 5092, 11293, + 12136, 14404, 10389, 14979, 13707, 14585, 15093, 14865, 13597, 13696, 14802, 14845, 13483, 13914, 14628, 14958, + 15292, 14397, 14786, 14534, 15171, 12951, 13637, 11270, 15220, 15074, 13629, 14390, 13349, 14445, 12970, 14793, + 13013, 14755, 10894, 13774, 15070, 15024, 12485, 12444, 14766, 15270, 11866, 13114, 14673, 14876, 15099, 14665, + 14707, 14463, 14129, 13725, 13540, 14953, 14570, 14625, 13779, 13485, 15027, 12422, 14457, 14672, 14723, 13912, + 13684, 14036, 15047, 13766, 13347, 12445, 14604, 13048, 14087, 12533, 14319, 14569, 12587, 13715, 14614, 10447, + 12677, 14323, 13108, 13123, 11717, 14783, 13319, 14987, 15282, 11897, 13199, 7305, 12870, 12776, 14501, 13692, + 14710, 11327, 14840, 15024, 14482, 14698, 11856, 12935, 14167, 14690, 13794, 11960, 14831, 13010, 14902, 14928, + 12841, 11506, 13082, 14364, 14669, 13831, 13609, 14260, 15125, 13249, 13467, 13770, 13674, 12859, 14848, 14945, + 15151, 10272, 12820, 14897, 14981, 13878, 14397, 13331, 13730, 14753, 14863, 14123, 14962, 14122, 14685, 9608, + 13548, 12725, 15285, 14570, 15286, 10978, 14472, 14778, 15083, 15320, 14739, 14530, 13917, 14607, 14583, 13492, + 13323, 14884, 13708, 15281, 14693, 14836, 15077, 15088, 14461, 15070, 14041, 11678, 15077, 13231, 14212, 12623, + 15229, 9284, 13567, 14531, 14384, 14122, 13968, 14499, 14385, 14372, 14074, 13388, 14464, 14985, 14695, 12533, + 13659, 13348, 13370, 10379, 11526, 14939, 9930, 14162, 14506, 14916, 14304, 15100, 14463, 15302, 15315, 12815, + 14427, 15311, 14821, 8714, 13615, 14139, 13994, 14044, 11809, 13058, 13469, 12414, 14998, 14226, 12706, 15087, + 15025, 13932, 12675, 13566, 15115, 11407, 14357, 11750, 13023, 12343, 14028, 13354, 14517, 14710, 13777, 13371, + 13778, 5722, 14463, 13660, 14345, 8516, 13905, 14537, 14768, 14363, 14128, 12528, 13024, 15113, 13754, 15021, + 13770, 14820, 15008, 15228, 15184, 14658, 14980, 14674, 13291, 12285, 13714, 14424, 13454, 13870, 13490, 12422, + 14736, 15056, 15058, 14996, 14465, 15086, 13770, 13398, 13678, 13957, 14504, 14737, 14183, 15162, 14316, 14667, + 11796, 13755, 14734, 15152, 14389, 15124, 14575, 14512, 13549, 14350, 14604, 14338, 15297, 15099, 11725, 14463, + 14870, 14817, 11796, 14789, 11984, 14727, 12995, 13383, 15230, 13388, 15170, 13544, 14181, 13801, 14766, 15072, + 14956, 11990, 14396, 15161, 12907, 14651, 14583, 14430, 15241, 15344, 14839, 15167, 13404, 11849, 15070, 13795, + 14466, 14892, 14791, 14443, 15211, 12450, 14068, 7770, 13506, 12593, 10384, 14507, 5438, 13974, 10719, 13644, + 14443, 14782, 12585, 14576, 13097, 15105, 14690, 11515, 15153, 13778, 14779, 13661, 14254, 13788, 13934, 15097, + 13329, 15151, 15314, 15248, 14648, 13586, 12019, 15031, 15295, 14046, 15067, 12337, 12227, 14400, 14448, 15007, + 13577, 14442, 14684, 10487, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NCHW, {8, 2, 16, 16}, {17, 17, 2, 2}, DT_FLOAT16}; + TransResult result; + + FormatTransferFracZNchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_fp32_success_lt_cube) { + float data_4d[1 * 1 * 16 * 16] = { + 0.09754133710736013, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + float data[1 * 1 * 1 * 1] = { + 0.09754133710736013, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NCHW, {1, 1, 16, 16}, {1, 1, 1, 1}, DT_FLOAT}; + TransResult result; + + FormatTransferFracZNchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_fp32_success_eq_cube) { + float data_4d[4 * 1 * 16 * 16] = { + 0.8168560091594788, 0.9509842896547284, 0.5833092887893205, 0.6589822147276274, 0.47045074330618286, + 0.9281547983684513, 0.600092769881412, 0.17898594440529647, 0.8576318058687813, 0.938720727628625, + 0.8033558006015729, 0.24200277760696154, 0.6186692952513718, 0.835520484071766, 0.4311907295387437, + 0.617409884383429, 0.814295189333189, 0.9794132570935358, 0.06066024635820566, 0.7862193019409862, + 0.15602380534563587, 0.5061528494608266, 0.023818815576188035, 0.8383178948092223, 0.006568502107600405, + 0.10855668820302111, 0.6390430740654722, 0.7105474488440885, 0.18442298188011697, 0.39633873084569105, + 0.02203410245430748, 0.9585879381347081, 0.6253312439146503, 0.8797792374489929, 0.564915192256268, + 0.17059305879396147, 0.13449603025595858, 0.5132140444074369, 0.888281188122505, 0.7458979631897087, + 0.20784801035172418, 0.5316210502378305, 0.264930003499788, 0.822790801630084, 0.1275310727535136, + 0.17621014057762763, 0.20121209254119576, 0.9629309059771185, 0.16152061993669253, 0.9184351717655141, + 0.5768371846556524, 0.09692577674404401, 0.19365802143801403, 0.6713471682467593, 0.4464779971247871, + 0.35841838325964526, 0.8838362049508657, 0.14644641430223437, 0.020600348804988733, 0.5830140247131442, + 0.9120723994416113, 0.2890771263757931, 0.6731885002986142, 0.05978784194051445, 0.49836605146120294, + 0.5361904204459131, 0.7037203835218252, 0.2337470724618712, 0.9359869249295187, 0.6511806581115431, + 0.16960732523446453, 0.6331176515629879, 0.29776350367890303, 0.898087610056731, 0.7050336566805279, + 0.1779294951964464, 0.8407454580002686, 0.9030176530428079, 0.3962354901081948, 0.8480860206951764, + 0.44641475102114236, 0.6189982722879348, 0.5677669260248176, 0.8756562316364245, 0.9218814825285827, + 0.3385841911868406, 0.43452691499150753, 0.43495420630177895, 0.2167458539368593, 0.2747435364989407, + 0.44742375825894276, 0.28845466817065657, 0.8351637336690974, 0.08351581783753559, 0.22109616153818434, + 0.007403305376558755, 0.3096894526703965, 0.07174528411870695, 0.8392748235221433, 0.10064316912490501, + 0.854298828885103, 0.33412876368740774, 0.6625524883774276, 0.5441341649229283, 0.0046000314503452655, + 0.8961701205958728, 0.4738158753347098, 0.6638762364422152, 0.6280238370222847, 0.15693613434765008, + 0.647995408143734, 0.7489801614723965, 0.00681896126863879, 0.3314307419400613, 0.4745661412858506, + 0.26383036751434474, 0.4052284090198067, 0.852589324987285, 0.9548352483736139, 0.13954018638569377, + 0.8944096051223135, 0.4668506648602152, 0.6601070226329898, 0.36285239316259754, 0.17661350678122167, + 0.8132552504971018, 0.4667529963466772, 0.469826848277653, 0.858475203494622, 0.6680567128563278, + 0.5990851321154904, 0.4999661773857067, 0.9946626628204948, 0.8020650217923826, 0.6303835339592258, + 0.976235178750195, 0.5329535915098538, 0.3159470095831284, 0.2252686632477826, 0.8962128673665215, + 0.5509642305707612, 0.6420467128229859, 0.21302728758817935, 0.46137147849437576, 0.24492029694593542, + 0.3933137970579751, 0.5590786322601159, 0.1663466717888198, 0.3789353856819866, 0.7835470327519002, + 0.5172370168586898, 0.1893090477523638, 0.8628182031087973, 0.48556423350072164, 0.3184362891300201, + 0.3405437685721455, 0.30773451388643114, 0.3310473758050274, 0.26617106280235514, 0.8881836032717928, + 0.0019748406404110908, 0.11411778617269408, 0.8813082291176062, 0.6974321018129322, 0.7107020765056907, + 0.5959190816016348, 0.7285173323071417, 0.11388295655175429, 0.36364630195494363, 0.430570120947953, + 0.794418360007892, 0.9589303369282219, 0.8049303691996378, 0.20783858732450966, 0.813758326094692, + 0.42831396861230087, 0.8338790183483511, 0.6240323008565776, 0.7998340910323554, 0.9920403620580774, + 0.5709015334284396, 0.05223128642784547, 0.03675975391122588, 0.8923908481198983, 0.38875135912006153, + 0.15633456851623195, 0.47645536361462515, 0.4524629303911627, 0.16560337215822885, 0.20511084091555487, + 0.24422253257338689, 0.5399578206643209, 0.5895995758457725, 0.49512837325997494, 0.9866682191699911, + 0.1565576231389576, 0.11031857761037567, 0.7053415600250933, 0.6070071446106846, 0.5611511345020163, + 0.8381768551013316, 0.24164170248232963, 0.49323005501992434, 0.03687616857370135, 0.3376426574069038, + 0.5267449154082644, 0.8576270193692666, 0.6450736154582144, 0.819101464517182, 0.1144351292725918, + 0.4017280714518162, 0.5556116757224207, 0.9221926500998648, 0.19774518884072245, 0.8122964973474701, + 0.3262859226363999, 0.9107762918587479, 0.7931491228346562, 0.17300525008891987, 0.8896454505060492, + 0.6975214852271043, 0.11085715794712914, 0.5406368884178097, 0.24236996044129, 0.005040450959806875, + 0.5307190997595231, 0.21917947859334652, 0.12836684746170424, 0.04341533031285172, 0.40282978335635267, + 0.10689728015561661, 0.7062597786898408, 0.10060052958019916, 0.7519581440272585, 0.33898387421578013, + 0.7197323638730856, 0.10818828151427162, 0.257128306617908, 0.9431399649274476, 0.3427575441649615, + 0.027590449104072157, 0.344080675836019, 0.9188712333199375, 0.7774798184435455, 0.4703208696983341, + 0.4877224153511128, 0.9744804142777878, 0.3865392856063704, 0.9086582770947371, 0.8118173580897312, + 0.7127504119865766, 0.5428116981162028, 0.6567208354036572, 0.9941028268677842, 0.7089451172899908, + 0.1291678032399346, 0.8143848013695553, 0.08371796772497964, 0.7572744073194271, 0.7355714422434593, + 0.6474000922419452, 0.3543097445054675, 0.9775931220354745, 0.39195795313344217, 0.734977049152991, + 0.40992200643773524, 0.8956803193572815, 0.5304486284524556, 0.6917120158692178, 0.16698225613025464, + 0.25305871430218674, 0.2773774239009309, 0.2585681607191772, 0.34706972398955294, 0.03412426914877709, + 0.5866199298906816, 0.8611266571199963, 0.5615543838156779, 0.3107543580607147, 0.49795816525650705, + 0.8863226516778276, 0.2299424424942591, 0.43831819986329346, 0.4518885193253971, 0.6383902848395534, + 0.6430345264842787, 0.9609476845062578, 0.8590313057683825, 0.946593498563966, 0.2983608041225252, + 0.7282427899662679, 0.7739583717212988, 0.5164712358293232, 0.3829975311218565, 0.49259771833928856, + 0.164872639082684, 0.7908460033382655, 0.37086685026871635, 0.2832773609739344, 0.5560741120852402, + 0.423183068847399, 0.7638596377247268, 0.7000310278304417, 0.3259451790595508, 0.8949307563709777, + 0.45246027501617325, 0.4285428045299762, 0.4446071658359174, 0.3628421665222693, 0.5001169820074454, + 0.35067001891108707, 0.36640616781084445, 0.1319510785443646, 0.6374188461865751, 0.9619721858079823, + 0.450648239278473, 0.9458594644503746, 0.8368954310764464, 0.5842754845864907, 0.8095185597678237, + 0.8024953167276908, 0.05684876774993508, 0.04398365551438299, 0.3654011515745126, 0.6548055931649895, + 0.3431922859529116, 0.7930336989599233, 0.9661409956880415, 0.9404276716436277, 0.12139618629958815, + 0.8684061415265188, 0.03925836062942434, 0.24546224115198678, 0.4418476578509971, 0.3092537970650777, + 0.6166894942612399, 0.8914552592267662, 0.7601940155928462, 0.46273264284275795, 0.9910667860131801, + 0.989613199284938, 0.1350031504055178, 0.6601097097467896, 0.707338878990174, 0.35275396575269313, + 0.26552339318174023, 0.03836960346510854, 0.8575456787092489, 0.5490190619411595, 0.7267212712570938, + 0.2762364592057298, 0.701780668337549, 0.2774126338983015, 0.7886402903198428, 0.3112319568588495, + 0.9914504745956655, 0.23751441228513104, 0.7672314578717838, 0.5243418879999135, 0.020762148251404766, + 0.24795198841784238, 0.14033068437521923, 0.9820365012566946, 0.6543938822239167, 0.9640711664489289, + 0.8966714624583073, 0.5377330391683879, 0.31007202504736653, 0.8106485186894752, 0.20714445608926513, + 0.9437903821587161, 0.5964319454038916, 0.5451424408949661, 0.6477227012675425, 0.542586343018901, + 0.06833388853078093, 0.32606464238339183, 0.5581681811335306, 0.3275977545031692, 0.28340841019953833, + 0.7481897117282391, 0.30369871162505524, 0.47988765582449644, 0.7199247952470547, 0.7740570642269338, + 0.3792168788107044, 0.08724745594330285, 0.10057488876028953, 0.9035165171151448, 0.6383129952890263, + 0.5618959248529879, 0.9325368374505515, 0.08110775144153692, 0.0711648088478326, 0.16467109290939597, + 0.14915808899876637, 0.4351685924488591, 0.09918991161233881, 0.26787331851893303, 0.3752180188092137, + 0.9570321603275969, 0.018174267700791447, 0.5354470030366117, 0.08197310494768051, 0.1698909520507832, + 0.36465305001153236, 0.9686532192504956, 0.018478913704299882, 0.7490592279789233, 0.66958227563015, + 0.8065219848142055, 0.04008517538279088, 0.872515125292373, 0.4860298654033053, 0.18322168851434328, + 0.6048670557593928, 0.5013006566885004, 0.9184904088200172, 0.11545698833942053, 0.9388634472496009, + 0.5090817884855765, 0.4259104411681456, 0.6001774322912643, 0.7998639326761543, 0.3517388371715816, + 0.22736693792043783, 0.3089134680838309, 0.80724353471677, 0.3305659100205217, 0.7622798133203901, + 0.548658338677721, 0.41969457789024045, 0.8220890665327201, 0.7478312908945212, 0.8507041436520468, + 0.871258831698886, 0.10663552610035198, 0.9838857866221765, 0.7750037566395708, 0.2589287012836461, + 0.5509778371334433, 0.12181367833394996, 0.7187267185588344, 0.23212767811353918, 0.08317782184394085, + 0.8583710732117895, 0.6114511863557442, 0.9301399797123336, 0.919567831488458, 0.8688510005455988, + 0.4514271103179398, 0.03260164380485031, 0.7919825170425685, 0.8840906648864124, 0.18755046657366148, + 0.3532735787477914, 0.0316474703213836, 0.1128557788135679, 0.8453920161239551, 0.9442933089890817, + 0.7275184011552345, 0.5187615618800065, 0.547374563712834, 0.9316544035727097, 0.17710067883829017, + 0.8461317931422729, 0.6975272247451271, 0.6638162363778531, 0.43962725522043367, 0.9722150986994611, + 0.9924930481370859, 0.48465595076579016, 0.3638622689991824, 0.29202054581835457, 0.836306784595344, + 0.9599966862466585, 0.24014743456248733, 0.057907977353944706, 0.4052971131253651, 0.409528724694306, + 0.016921739543765635, 0.9944957404740558, 0.497678370549514, 0.3109661172605419, 0.65494173043691, + 0.8700685824205098, 0.28898664019111053, 0.1656176601583519, 0.6720371646498426, 0.5068669682523493, + 0.45612210493017646, 0.12612039229185712, 0.6216751159917858, 0.229399433957176, 0.39090320483261953, + 0.42480641493443805, 0.30915922531455675, 0.6718290412296023, 0.6662306327726546, 0.38675267059285845, + 0.5278348214068792, 0.928656700953216, 0.3404487585426379, 0.08665641437850102, 0.7659937015592914, + 0.2481889944626371, 0.0820735670409255, 0.6591059848854349, 0.8575519848132668, 0.4312086049658347, + 0.40734998866870453, 0.9127160055196917, 0.17831086075491986, 0.4987696353380683, 0.24969463334542352, + 0.25387278255025425, 0.45260915183568373, 0.9489936531210268, 0.25731793764593913, 0.549497380000946, + 0.04987042645173634, 0.6176062723559267, 0.4974628172419382, 0.22002087852372565, 0.8478032105864682, + 0.9771214815295913, 0.25763265280560277, 0.9468669882728353, 0.8788003530387016, 0.009612198576430142, + 0.6015103951914454, 0.07840696980709017, 0.8574066041211744, 0.8512521760763662, 0.6294403926212478, + 0.31422988517015304, 0.03260665549833752, 0.5459645373661592, 0.566558385068647, 0.4394158960380766, + 0.20460252793714095, 0.6301246345403493, 0.959197925681073, 0.13161493994917695, 0.25354386299396436, + 0.3435784686761524, 0.30447875804616154, 0.2641596508968739, 0.18034575029023447, 0.9141084597985908, + 0.2686654682752302, 0.2423135345770029, 0.1369363683392847, 0.9046537057606677, 0.08651204329137108, + 0.587229796493613, 0.14998627678593768, 0.9951223899404943, 0.05284484108795895, 0.7069841606969918, + 0.3457427661753919, 0.5677182815844276, 0.8867620993871848, 0.15609582018314616, 0.8248984097469988, + 0.5215435683614805, 0.7622504327833796, 0.42547271680893395, 0.5963613199856507, 0.07376145722251282, + 0.7431532306702398, 0.8719344267165662, 0.7967791529178645, 0.41533986772266474, 0.655829342011151, + 0.5547342350977146, 0.7346706935343904, 0.31834800355814097, 0.6917315704923358, 0.7974655877028941, + 0.7856595575225411, 0.33875107001435634, 0.2234786808748691, 0.6521145402408668, 0.6726573436211514, + 0.8733807978796556, 0.9621610691368013, 0.34279676512041746, 0.6716812009164921, 0.1403220922375994, + 0.6971779423432034, 0.600741255229763, 0.8852669468778349, 0.635908978571998, 0.17265456191779038, + 0.5999955102379257, 0.6672321014924975, 0.45626209843205523, 0.9458311544541305, 0.6570898489991635, + 0.9047193195927493, 0.7631557896257812, 0.2717021832489478, 0.2372662172825153, 0.17562859770047534, + 0.7519080171095838, 0.545009922910364, 0.061896113732203495, 0.32781141126662117, 0.38143075565845896, + 0.7499092284286264, 0.5997967230277158, 0.5919689972495351, 0.9749273043906194, 0.7129627435131244, + 0.8212988182775148, 0.5283680741464317, 0.9175043929909029, 0.02808909882978694, 0.5160217262690808, + 0.15792767510896566, 0.6904311507714506, 0.5623044553967744, 0.3686699379521433, 0.9022023913778509, + 0.5747619290732661, 0.018612257619440964, 0.36118967316669814, 0.8403688184533271, 0.09695520335025176, + 0.5400152625182123, 0.9407961067118134, 0.2255517082156655, 0.7439862596864373, 0.3371838848204518, + 0.05583253517461406, 0.8674952421961661, 0.15771469591732024, 0.9328305900348385, 0.6064305145965554, + 0.24440769376582883, 0.9138651868178408, 0.17029307748314682, 0.2627206088303473, 0.4039569145649755, + 0.27976198708603306, 0.9081095529075438, 0.7341822172341462, 0.6535583901663519, 0.9037218781902312, + 0.4132853954269039, 0.8677647022630963, 0.1363785587788613, 0.031726056012278736, 0.6967913663146085, + 0.251338912609961, 0.4504258736016229, 0.7939957881108681, 0.14373999152171169, 0.1345170110145837, + 0.22939821164356122, 0.2783262247792664, 0.5065783963181841, 0.2983062713783934, 0.09420519718693543, + 0.7678426568123674, 0.547205839024813, 0.9173821772312987, 0.9147495466711926, 0.6808591135362148, + 0.9433708812876275, 0.1888338795032064, 0.806439515054873, 0.5478245245285066, 0.06243129077708354, + 0.6738377942022481, 0.39313272333964333, 0.7107772272634887, 0.37140531406236, 0.9752231781906548, + 0.15530983015174504, 0.5083889494703363, 0.45787573647433233, 0.719369064985418, 0.695487795959419, + 0.648910517283219, 0.37255976835344096, 0.5128959126867926, 0.8462728472107226, 0.07161197909954886, + 0.5971577610206942, 0.3474139774062083, 0.17309257680758672, 0.4557553537522939, 0.48640839826996807, + 0.556565593151529, 0.909822162078459, 0.7815653980073078, 0.5558710994447176, 0.939412569745768, + 0.22458732015840954, 0.8854274720474302, 0.6612317372691434, 0.2353120266565194, 0.5431618026688126, + 0.8080683403154149, 0.9649378581633239, 0.5911987082154285, 0.4014005464684842, 0.31309623122426067, + 0.9209060982654941, 0.35675947143505937, 0.997932692240173, 0.05977641223564234, 0.4906372581279175, + 0.12540566557242627, 0.8113504889733711, 0.03439967972502145, 0.6996834889783037, 0.8580431256846641, + 0.29550733460934664, 0.8647558928256841, 0.17166159986929208, 0.2988438217337216, 0.30685222281881586, + 0.8740648760980403, 0.5733339597519914, 0.30588789631733104, 0.32034957581661505, 0.40447880890518706, + 0.05643993675567216, 0.9841350296823745, 0.0235057813209415, 0.5907496794641183, 0.9938255962978501, + 0.666736772901298, 0.09142543413958681, 0.5867777814038379, 0.10954805879898033, 0.5192786459845283, + 0.577284845201679, 0.4663457531259627, 0.9661498415023864, 0.7418291449321575, 0.6919757941011632, + 0.938162816686544, 0.2574986744080595, 0.5743986035644238, 0.3722004061266849, 0.48731682130765364, + 0.48804003791575845, 0.26568686568088584, 0.3024462081682625, 0.4927729450565037, 0.8208388909384069, + 0.3524966116601721, 0.7699774350342129, 0.3699307222802808, 0.6854571865797977, 0.05342232186562068, + 0.2568140216774778, 0.07986634967315454, 0.42206069298473226, 0.503112433651799, 0.2709998780180116, + 0.8540634740401672, 0.804771765143114, 0.08307842440728763, 0.5855745330669161, 0.20132581514653336, + 0.4733824279238499, 0.5397079569901397, 0.22332659849685244, 0.5681449614592287, 0.34305090560502416, + 0.19045524744723108, 0.14510857090406903, 0.8994852388900417, 0.3606955413718713, 0.03434264356844052, + 0.8425487893633322, 0.14471220280317265, 0.795235579857456, 0.9928577361495664, 0.8944565820128865, + 0.5951646624476978, 0.3387207739399991, 0.5254294787876147, 0.7528483715664833, 0.7724822370664601, + 0.06341949098827515, 0.1104781806479469, 0.2871131014826662, 0.9763926620716555, 0.8365936991760566, + 0.6894468038940823, 0.5516079036449986, 0.46459727852607835, 0.29698554757837436, 0.8641332259921782, + 0.2903577124285951, 0.5222172363827591, 0.2781390218294064, 0.6782975009525524, 0.43142531465001865, + 0.5482135764861943, 0.7958468590852784, 0.4601983363144355, 0.8293797711344699, 0.8218089614461451, + 0.2840468119059041, 0.5435124947402741, 0.292138895069197, 0.7093564706598441, 0.8634789517279025, + 0.7425214561915762, 0.4281558028893979, 0.23760087848913258, 0.12788063731046162, 0.4256176878159583, + 0.879074694521102, 0.9422873213978823, 0.1972547796962546, 0.7242073256517022, 0.33232344535504643, + 0.5537318465846828, 0.05869543998802962, 0.018360338257800812, 0.11030166295839527, 0.932707044606341, + 0.8123473545874034, 0.5693378326924512, 0.11500464247603825, 0.28180799331414497, 0.8696676356158933, + 0.34191489224832605, 0.8143563939660721, 0.010614668169477537, 0.7342136789417802, 0.8532271966112469, + 0.5748001835729629, 0.259112776070359, 0.5624154624891504, 0.09445036349110147, 0.3181656968422939, + 0.8939438952061383, 0.36760093868600874, 0.12440825480994533, 0.8224276403148182, 0.9440369737584494, + 0.8535593738819319, 0.012696538588551909, 0.39742245686374533, 0.3981006126038902, 0.12670159963685623, + 0.2225579855362756, 0.3639484396143289, 0.0736550465308492, 0.10747766849305518, 0.772278857461047, + 0.6434712254626346, 0.5946129067876548, 0.575837578716988, 0.6174587426513194, 0.9867408326366033, + 0.7658708170703061, 0.06043464109847374, 0.013078794647001901, 0.3219023249140547, 0.6928581061373904, + 0.29883540042062473, 0.6351799737621933, 0.2450520777737294, 0.7173739254073681, 0.2686491080989024, + 0.709877147040917, 0.17955632096592133, 0.5365934363287246, 0.692273457387911, 0.33888954272328764, + 0.2998404098945733, 0.13370452878408368, 0.8849589530269457, 0.43138580638305324, 0.8546451329124286, + 0.3457307101229713, 0.13674533003654288, 0.5925656225040199, 0.37366051705050807, 0.39713227198955114, + 0.8874356560086295, 0.8448286162871339, 0.2999165142924891, 0.6272603347680122, 0.9780875560006471, + 0.19514027598932304, 0.15829426674672442, 0.3554952077139394, 0.5987084697855811, 0.13497880870204337, + 0.9370079887875561, 0.9741398726037899, 0.9947348781247561, 0.5404091154422912, 0.6175609477753102, + 0.6131714181668053, 0.002477181737273959, 0.562280412091289, 0.7526435019202964, 0.3886096650631513, + 0.7821589398562553, 0.4695756803811453, 0.33999744031557955, 0.026417766980760926, 0.425174218763559, + 0.9712645643794963, 0.9696591839902201, 0.5373884989988515, 0.2875715965653579, 0.5325921358773241, + 0.9375649075751361, 0.7495572291868463, 0.2845140573246496, 0.46222375013708306, 0.84766692046905, + 0.6096220988114378, 0.15281616741547543, 0.8976612157167602, 0.9598524638502606, 0.23176953671336242, + 0.35578130428870847, 0.9120284563249282, 0.7833430085511944, 0.4083339394119787, 0.15267251534727655, + 0.6987791734238211, 0.318356175762636, 0.30599782277384735, 0.4823778703838705, 0.34246199799786725, + 0.45326212230640783, 0.9298832251551966, 0.80478120813917, 0.3842837068723949, 0.33483707922857575, + 0.5349291719254071, 0.6969575869879915, 0.7640239512132223, 0.19254059499782594, 0.0015221041677935254, + 0.411096175108574, 0.07717616484569123, 0.26137439637971194, 0.4751277053825842, 0.1870851529008637, + 0.7035527103294765, 0.22997764922641362, 0.6298971009845201, 0.41996421105921633, 0.24160305460943277, + 0.20894095942263502, 0.34448120238498403, 0.4793234914333593, 0.6012743927518429, 0.7412751836227155, + 0.23117503944651796, 0.7061590551786392, 0.6047964625652399, 0.26804051988359034, 0.20076795111638046, + 0.4676035290247943, 0.1882473105335125, 0.9180400475167111, 0.9989722501677417, 0.1895142045817545, + 0.3578897315238895, 0.7435437908679559, 0.5298733651365005, 0.2304246678026204, 0.6011480070982935, + 0.10039562955799841, 0.8701814345396093, 0.0940865963515104, 0.0003793943320034021, 0.5157733659631437, + 0.8546444054631211, 0.45404164701093686, 0.25421124457554, 0.8613953032846574, 0.7864680405695876, + 0.8470290022128667, 0.9414818442785001, 0.7626297442507252, 0.09357238052156414, 0.12743456850506396, + 0.5938704185634249, 0.7419615280910089, 0.621961090816814, 0.9067310219255523, 0.1360738049917236, + 0.5398831308323078, 0.1684504073334684, 0.7202214965416577, 0.7983453971630412, 0.33506042716085815, + 0.884016368776341, 0.6997701510258648, 0.5994184647706472, 0.24249478773185507, 0.19198624273422238, + 0.14966846053861182, 0.8872570608294355, 0.3925258780570219, 0.6037222439415791, 0.4108783367719028, + 0.032474872356025974, 0.15007040476252542, 0.5703765565370817, 0.03564049082735243, 0.9251552343127925, + 0.333522021214367, 0.6463190763686415, 0.544050804224951, 0.1428602387810044, + }; + float data[16 * 16 * 2 * 2] = { + 0.8168560091594788, 0.8143848013695553, 0.17831086075491986, 0.5855745330669161, 0.9509842896547284, + 0.08371796772497964, 0.4987696353380683, 0.20132581514653336, 0.5833092887893205, 0.7572744073194271, + 0.24969463334542352, 0.4733824279238499, 0.6589822147276274, 0.7355714422434593, 0.25387278255025425, + 0.5397079569901397, 0.47045074330618286, 0.6474000922419452, 0.45260915183568373, 0.22332659849685244, + 0.9281547983684513, 0.3543097445054675, 0.9489936531210268, 0.5681449614592287, 0.600092769881412, + 0.9775931220354745, 0.25731793764593913, 0.34305090560502416, 0.17898594440529647, 0.39195795313344217, + 0.549497380000946, 0.19045524744723108, 0.8576318058687813, 0.734977049152991, 0.04987042645173634, + 0.14510857090406903, 0.938720727628625, 0.40992200643773524, 0.6176062723559267, 0.8994852388900417, + 0.8033558006015729, 0.8956803193572815, 0.4974628172419382, 0.3606955413718713, 0.24200277760696154, + 0.5304486284524556, 0.22002087852372565, 0.03434264356844052, 0.6186692952513718, 0.6917120158692178, + 0.8478032105864682, 0.8425487893633322, 0.835520484071766, 0.16698225613025464, 0.9771214815295913, + 0.14471220280317265, 0.4311907295387437, 0.25305871430218674, 0.25763265280560277, 0.795235579857456, + 0.617409884383429, 0.2773774239009309, 0.9468669882728353, 0.9928577361495664, 0.814295189333189, + 0.2585681607191772, 0.8788003530387016, 0.8944565820128865, 0.9794132570935358, 0.34706972398955294, + 0.009612198576430142, 0.5951646624476978, 0.06066024635820566, 0.03412426914877709, 0.6015103951914454, + 0.3387207739399991, 0.7862193019409862, 0.5866199298906816, 0.07840696980709017, 0.5254294787876147, + 0.15602380534563587, 0.8611266571199963, 0.8574066041211744, 0.7528483715664833, 0.5061528494608266, + 0.5615543838156779, 0.8512521760763662, 0.7724822370664601, 0.023818815576188035, 0.3107543580607147, + 0.6294403926212478, 0.06341949098827515, 0.8383178948092223, 0.49795816525650705, 0.31422988517015304, + 0.1104781806479469, 0.006568502107600405, 0.8863226516778276, 0.03260665549833752, 0.2871131014826662, + 0.10855668820302111, 0.2299424424942591, 0.5459645373661592, 0.9763926620716555, 0.6390430740654722, + 0.43831819986329346, 0.566558385068647, 0.8365936991760566, 0.7105474488440885, 0.4518885193253971, + 0.4394158960380766, 0.6894468038940823, 0.18442298188011697, 0.6383902848395534, 0.20460252793714095, + 0.5516079036449986, 0.39633873084569105, 0.6430345264842787, 0.6301246345403493, 0.46459727852607835, + 0.02203410245430748, 0.9609476845062578, 0.959197925681073, 0.29698554757837436, 0.9585879381347081, + 0.8590313057683825, 0.13161493994917695, 0.8641332259921782, 0.6253312439146503, 0.946593498563966, + 0.25354386299396436, 0.2903577124285951, 0.8797792374489929, 0.2983608041225252, 0.3435784686761524, + 0.5222172363827591, 0.564915192256268, 0.7282427899662679, 0.30447875804616154, 0.2781390218294064, + 0.17059305879396147, 0.7739583717212988, 0.2641596508968739, 0.6782975009525524, 0.13449603025595858, + 0.5164712358293232, 0.18034575029023447, 0.43142531465001865, 0.5132140444074369, 0.3829975311218565, + 0.9141084597985908, 0.5482135764861943, 0.888281188122505, 0.49259771833928856, 0.2686654682752302, + 0.7958468590852784, 0.7458979631897087, 0.164872639082684, 0.2423135345770029, 0.4601983363144355, + 0.20784801035172418, 0.7908460033382655, 0.1369363683392847, 0.8293797711344699, 0.5316210502378305, + 0.37086685026871635, 0.9046537057606677, 0.8218089614461451, 0.264930003499788, 0.2832773609739344, + 0.08651204329137108, 0.2840468119059041, 0.822790801630084, 0.5560741120852402, 0.587229796493613, + 0.5435124947402741, 0.1275310727535136, 0.423183068847399, 0.14998627678593768, 0.292138895069197, + 0.17621014057762763, 0.7638596377247268, 0.9951223899404943, 0.7093564706598441, 0.20121209254119576, + 0.7000310278304417, 0.05284484108795895, 0.8634789517279025, 0.9629309059771185, 0.3259451790595508, + 0.7069841606969918, 0.7425214561915762, 0.16152061993669253, 0.8949307563709777, 0.3457427661753919, + 0.4281558028893979, 0.9184351717655141, 0.45246027501617325, 0.5677182815844276, 0.23760087848913258, + 0.5768371846556524, 0.4285428045299762, 0.8867620993871848, 0.12788063731046162, 0.09692577674404401, + 0.4446071658359174, 0.15609582018314616, 0.4256176878159583, 0.19365802143801403, 0.3628421665222693, + 0.8248984097469988, 0.879074694521102, 0.6713471682467593, 0.5001169820074454, 0.5215435683614805, + 0.9422873213978823, 0.4464779971247871, 0.35067001891108707, 0.7622504327833796, 0.1972547796962546, + 0.35841838325964526, 0.36640616781084445, 0.42547271680893395, 0.7242073256517022, 0.8838362049508657, + 0.1319510785443646, 0.5963613199856507, 0.33232344535504643, 0.14644641430223437, 0.6374188461865751, + 0.07376145722251282, 0.5537318465846828, 0.020600348804988733, 0.9619721858079823, 0.7431532306702398, + 0.05869543998802962, 0.5830140247131442, 0.450648239278473, 0.8719344267165662, 0.018360338257800812, + 0.9120723994416113, 0.9458594644503746, 0.7967791529178645, 0.11030166295839527, 0.2890771263757931, + 0.8368954310764464, 0.41533986772266474, 0.932707044606341, 0.6731885002986142, 0.5842754845864907, + 0.655829342011151, 0.8123473545874034, 0.05978784194051445, 0.8095185597678237, 0.5547342350977146, + 0.5693378326924512, 0.49836605146120294, 0.8024953167276908, 0.7346706935343904, 0.11500464247603825, + 0.5361904204459131, 0.05684876774993508, 0.31834800355814097, 0.28180799331414497, 0.7037203835218252, + 0.04398365551438299, 0.6917315704923358, 0.8696676356158933, 0.2337470724618712, 0.3654011515745126, + 0.7974655877028941, 0.34191489224832605, 0.9359869249295187, 0.6548055931649895, 0.7856595575225411, + 0.8143563939660721, 0.6511806581115431, 0.3431922859529116, 0.33875107001435634, 0.010614668169477537, + 0.16960732523446453, 0.7930336989599233, 0.2234786808748691, 0.7342136789417802, 0.6331176515629879, + 0.9661409956880415, 0.6521145402408668, 0.8532271966112469, 0.29776350367890303, 0.9404276716436277, + 0.6726573436211514, 0.5748001835729629, 0.898087610056731, 0.12139618629958815, 0.8733807978796556, + 0.259112776070359, 0.7050336566805279, 0.8684061415265188, 0.9621610691368013, 0.5624154624891504, + 0.1779294951964464, 0.03925836062942434, 0.34279676512041746, 0.09445036349110147, 0.8407454580002686, + 0.24546224115198678, 0.6716812009164921, 0.3181656968422939, 0.9030176530428079, 0.4418476578509971, + 0.1403220922375994, 0.8939438952061383, 0.3962354901081948, 0.3092537970650777, 0.6971779423432034, + 0.36760093868600874, 0.8480860206951764, 0.6166894942612399, 0.600741255229763, 0.12440825480994533, + 0.44641475102114236, 0.8914552592267662, 0.8852669468778349, 0.8224276403148182, 0.6189982722879348, + 0.7601940155928462, 0.635908978571998, 0.9440369737584494, 0.5677669260248176, 0.46273264284275795, + 0.17265456191779038, 0.8535593738819319, 0.8756562316364245, 0.9910667860131801, 0.5999955102379257, + 0.012696538588551909, 0.9218814825285827, 0.989613199284938, 0.6672321014924975, 0.39742245686374533, + 0.3385841911868406, 0.1350031504055178, 0.45626209843205523, 0.3981006126038902, 0.43452691499150753, + 0.6601097097467896, 0.9458311544541305, 0.12670159963685623, 0.43495420630177895, 0.707338878990174, + 0.6570898489991635, 0.2225579855362756, 0.2167458539368593, 0.35275396575269313, 0.9047193195927493, + 0.3639484396143289, 0.2747435364989407, 0.26552339318174023, 0.7631557896257812, 0.0736550465308492, + 0.44742375825894276, 0.03836960346510854, 0.2717021832489478, 0.10747766849305518, 0.28845466817065657, + 0.8575456787092489, 0.2372662172825153, 0.772278857461047, 0.8351637336690974, 0.5490190619411595, + 0.17562859770047534, 0.6434712254626346, 0.08351581783753559, 0.7267212712570938, 0.7519080171095838, + 0.5946129067876548, 0.22109616153818434, 0.2762364592057298, 0.545009922910364, 0.575837578716988, + 0.007403305376558755, 0.701780668337549, 0.061896113732203495, 0.6174587426513194, 0.3096894526703965, + 0.2774126338983015, 0.32781141126662117, 0.9867408326366033, 0.07174528411870695, 0.7886402903198428, + 0.38143075565845896, 0.7658708170703061, 0.8392748235221433, 0.3112319568588495, 0.7499092284286264, + 0.06043464109847374, 0.10064316912490501, 0.9914504745956655, 0.5997967230277158, 0.013078794647001901, + 0.854298828885103, 0.23751441228513104, 0.5919689972495351, 0.3219023249140547, 0.33412876368740774, + 0.7672314578717838, 0.9749273043906194, 0.6928581061373904, 0.6625524883774276, 0.5243418879999135, + 0.7129627435131244, 0.29883540042062473, 0.5441341649229283, 0.020762148251404766, 0.8212988182775148, + 0.6351799737621933, 0.0046000314503452655, 0.24795198841784238, 0.5283680741464317, 0.2450520777737294, + 0.8961701205958728, 0.14033068437521923, 0.9175043929909029, 0.7173739254073681, 0.4738158753347098, + 0.9820365012566946, 0.02808909882978694, 0.2686491080989024, 0.6638762364422152, 0.6543938822239167, + 0.5160217262690808, 0.709877147040917, 0.6280238370222847, 0.9640711664489289, 0.15792767510896566, + 0.17955632096592133, 0.15693613434765008, 0.8966714624583073, 0.6904311507714506, 0.5365934363287246, + 0.647995408143734, 0.5377330391683879, 0.5623044553967744, 0.692273457387911, 0.7489801614723965, + 0.31007202504736653, 0.3686699379521433, 0.33888954272328764, 0.00681896126863879, 0.8106485186894752, + 0.9022023913778509, 0.2998404098945733, 0.3314307419400613, 0.20714445608926513, 0.5747619290732661, + 0.13370452878408368, 0.4745661412858506, 0.9437903821587161, 0.018612257619440964, 0.8849589530269457, + 0.26383036751434474, 0.5964319454038916, 0.36118967316669814, 0.43138580638305324, 0.4052284090198067, + 0.5451424408949661, 0.8403688184533271, 0.8546451329124286, 0.852589324987285, 0.6477227012675425, + 0.09695520335025176, 0.3457307101229713, 0.9548352483736139, 0.542586343018901, 0.5400152625182123, + 0.13674533003654288, 0.13954018638569377, 0.06833388853078093, 0.9407961067118134, 0.5925656225040199, + 0.8944096051223135, 0.32606464238339183, 0.2255517082156655, 0.37366051705050807, 0.4668506648602152, + 0.5581681811335306, 0.7439862596864373, 0.39713227198955114, 0.6601070226329898, 0.3275977545031692, + 0.3371838848204518, 0.8874356560086295, 0.36285239316259754, 0.28340841019953833, 0.05583253517461406, + 0.8448286162871339, 0.17661350678122167, 0.7481897117282391, 0.8674952421961661, 0.2999165142924891, + 0.8132552504971018, 0.30369871162505524, 0.15771469591732024, 0.6272603347680122, 0.4667529963466772, + 0.47988765582449644, 0.9328305900348385, 0.9780875560006471, 0.469826848277653, 0.7199247952470547, + 0.6064305145965554, 0.19514027598932304, 0.858475203494622, 0.7740570642269338, 0.24440769376582883, + 0.15829426674672442, 0.6680567128563278, 0.3792168788107044, 0.9138651868178408, 0.3554952077139394, + 0.5990851321154904, 0.08724745594330285, 0.17029307748314682, 0.5987084697855811, 0.4999661773857067, + 0.10057488876028953, 0.2627206088303473, 0.13497880870204337, 0.9946626628204948, 0.9035165171151448, + 0.4039569145649755, 0.9370079887875561, 0.8020650217923826, 0.6383129952890263, 0.27976198708603306, + 0.9741398726037899, 0.6303835339592258, 0.5618959248529879, 0.9081095529075438, 0.9947348781247561, + 0.976235178750195, 0.9325368374505515, 0.7341822172341462, 0.5404091154422912, 0.5329535915098538, + 0.08110775144153692, 0.6535583901663519, 0.6175609477753102, 0.3159470095831284, 0.0711648088478326, + 0.9037218781902312, 0.6131714181668053, 0.2252686632477826, 0.16467109290939597, 0.4132853954269039, + 0.002477181737273959, 0.8962128673665215, 0.14915808899876637, 0.8677647022630963, 0.562280412091289, + 0.5509642305707612, 0.4351685924488591, 0.1363785587788613, 0.7526435019202964, 0.6420467128229859, + 0.09918991161233881, 0.031726056012278736, 0.3886096650631513, 0.21302728758817935, 0.26787331851893303, + 0.6967913663146085, 0.7821589398562553, 0.46137147849437576, 0.3752180188092137, 0.251338912609961, + 0.4695756803811453, 0.24492029694593542, 0.9570321603275969, 0.4504258736016229, 0.33999744031557955, + 0.3933137970579751, 0.018174267700791447, 0.7939957881108681, 0.026417766980760926, 0.5590786322601159, + 0.5354470030366117, 0.14373999152171169, 0.425174218763559, 0.1663466717888198, 0.08197310494768051, + 0.1345170110145837, 0.9712645643794963, 0.3789353856819866, 0.1698909520507832, 0.22939821164356122, + 0.9696591839902201, 0.7835470327519002, 0.36465305001153236, 0.2783262247792664, 0.5373884989988515, + 0.5172370168586898, 0.9686532192504956, 0.5065783963181841, 0.2875715965653579, 0.1893090477523638, + 0.018478913704299882, 0.2983062713783934, 0.5325921358773241, 0.8628182031087973, 0.7490592279789233, + 0.09420519718693543, 0.9375649075751361, 0.48556423350072164, 0.66958227563015, 0.7678426568123674, + 0.7495572291868463, 0.3184362891300201, 0.8065219848142055, 0.547205839024813, 0.2845140573246496, + 0.3405437685721455, 0.04008517538279088, 0.9173821772312987, 0.46222375013708306, 0.30773451388643114, + 0.872515125292373, 0.9147495466711926, 0.84766692046905, 0.3310473758050274, 0.4860298654033053, + 0.6808591135362148, 0.6096220988114378, 0.26617106280235514, 0.18322168851434328, 0.9433708812876275, + 0.15281616741547543, 0.8881836032717928, 0.6048670557593928, 0.1888338795032064, 0.8976612157167602, + 0.0019748406404110908, 0.5013006566885004, 0.806439515054873, 0.9598524638502606, 0.11411778617269408, + 0.9184904088200172, 0.5478245245285066, 0.23176953671336242, 0.8813082291176062, 0.11545698833942053, + 0.06243129077708354, 0.35578130428870847, 0.6974321018129322, 0.9388634472496009, 0.6738377942022481, + 0.9120284563249282, 0.7107020765056907, 0.5090817884855765, 0.39313272333964333, 0.7833430085511944, + 0.5959190816016348, 0.4259104411681456, 0.7107772272634887, 0.4083339394119787, 0.7285173323071417, + 0.6001774322912643, 0.37140531406236, 0.15267251534727655, 0.11388295655175429, 0.7998639326761543, + 0.9752231781906548, 0.6987791734238211, 0.36364630195494363, 0.3517388371715816, 0.15530983015174504, + 0.318356175762636, 0.430570120947953, 0.22736693792043783, 0.5083889494703363, 0.30599782277384735, + 0.794418360007892, 0.3089134680838309, 0.45787573647433233, 0.4823778703838705, 0.9589303369282219, + 0.80724353471677, 0.719369064985418, 0.34246199799786725, 0.8049303691996378, 0.3305659100205217, + 0.695487795959419, 0.45326212230640783, 0.20783858732450966, 0.7622798133203901, 0.648910517283219, + 0.9298832251551966, 0.813758326094692, 0.548658338677721, 0.37255976835344096, 0.80478120813917, + 0.42831396861230087, 0.41969457789024045, 0.5128959126867926, 0.3842837068723949, 0.8338790183483511, + 0.8220890665327201, 0.8462728472107226, 0.33483707922857575, 0.6240323008565776, 0.7478312908945212, + 0.07161197909954886, 0.5349291719254071, 0.7998340910323554, 0.8507041436520468, 0.5971577610206942, + 0.6969575869879915, 0.9920403620580774, 0.871258831698886, 0.3474139774062083, 0.7640239512132223, + 0.5709015334284396, 0.10663552610035198, 0.17309257680758672, 0.19254059499782594, 0.05223128642784547, + 0.9838857866221765, 0.4557553537522939, 0.0015221041677935254, 0.03675975391122588, 0.7750037566395708, + 0.48640839826996807, 0.411096175108574, 0.8923908481198983, 0.2589287012836461, 0.556565593151529, + 0.07717616484569123, 0.38875135912006153, 0.5509778371334433, 0.909822162078459, 0.26137439637971194, + 0.15633456851623195, 0.12181367833394996, 0.7815653980073078, 0.4751277053825842, 0.47645536361462515, + 0.7187267185588344, 0.5558710994447176, 0.1870851529008637, 0.4524629303911627, 0.23212767811353918, + 0.939412569745768, 0.7035527103294765, 0.16560337215822885, 0.08317782184394085, 0.22458732015840954, + 0.22997764922641362, 0.20511084091555487, 0.8583710732117895, 0.8854274720474302, 0.6298971009845201, + 0.24422253257338689, 0.6114511863557442, 0.6612317372691434, 0.41996421105921633, 0.5399578206643209, + 0.9301399797123336, 0.2353120266565194, 0.24160305460943277, 0.5895995758457725, 0.919567831488458, + 0.5431618026688126, 0.20894095942263502, 0.49512837325997494, 0.8688510005455988, 0.8080683403154149, + 0.34448120238498403, 0.9866682191699911, 0.4514271103179398, 0.9649378581633239, 0.4793234914333593, + 0.1565576231389576, 0.03260164380485031, 0.5911987082154285, 0.6012743927518429, 0.11031857761037567, + 0.7919825170425685, 0.4014005464684842, 0.7412751836227155, 0.7053415600250933, 0.8840906648864124, + 0.31309623122426067, 0.23117503944651796, 0.6070071446106846, 0.18755046657366148, 0.9209060982654941, + 0.7061590551786392, 0.5611511345020163, 0.3532735787477914, 0.35675947143505937, 0.6047964625652399, + 0.8381768551013316, 0.0316474703213836, 0.997932692240173, 0.26804051988359034, 0.24164170248232963, + 0.1128557788135679, 0.05977641223564234, 0.20076795111638046, 0.49323005501992434, 0.8453920161239551, + 0.4906372581279175, 0.4676035290247943, 0.03687616857370135, 0.9442933089890817, 0.12540566557242627, + 0.1882473105335125, 0.3376426574069038, 0.7275184011552345, 0.8113504889733711, 0.9180400475167111, + 0.5267449154082644, 0.5187615618800065, 0.03439967972502145, 0.9989722501677417, 0.8576270193692666, + 0.547374563712834, 0.6996834889783037, 0.1895142045817545, 0.6450736154582144, 0.9316544035727097, + 0.8580431256846641, 0.3578897315238895, 0.819101464517182, 0.17710067883829017, 0.29550733460934664, + 0.7435437908679559, 0.1144351292725918, 0.8461317931422729, 0.8647558928256841, 0.5298733651365005, + 0.4017280714518162, 0.6975272247451271, 0.17166159986929208, 0.2304246678026204, 0.5556116757224207, + 0.6638162363778531, 0.2988438217337216, 0.6011480070982935, 0.9221926500998648, 0.43962725522043367, + 0.30685222281881586, 0.10039562955799841, 0.19774518884072245, 0.9722150986994611, 0.8740648760980403, + 0.8701814345396093, 0.8122964973474701, 0.9924930481370859, 0.5733339597519914, 0.0940865963515104, + 0.3262859226363999, 0.48465595076579016, 0.30588789631733104, 0.0003793943320034021, 0.9107762918587479, + 0.3638622689991824, 0.32034957581661505, 0.5157733659631437, 0.7931491228346562, 0.29202054581835457, + 0.40447880890518706, 0.8546444054631211, 0.17300525008891987, 0.836306784595344, 0.05643993675567216, + 0.45404164701093686, 0.8896454505060492, 0.9599966862466585, 0.9841350296823745, 0.25421124457554, + 0.6975214852271043, 0.24014743456248733, 0.0235057813209415, 0.8613953032846574, 0.11085715794712914, + 0.057907977353944706, 0.5907496794641183, 0.7864680405695876, 0.5406368884178097, 0.4052971131253651, + 0.9938255962978501, 0.8470290022128667, 0.24236996044129, 0.409528724694306, 0.666736772901298, + 0.9414818442785001, 0.005040450959806875, 0.016921739543765635, 0.09142543413958681, 0.7626297442507252, + 0.5307190997595231, 0.9944957404740558, 0.5867777814038379, 0.09357238052156414, 0.21917947859334652, + 0.497678370549514, 0.10954805879898033, 0.12743456850506396, 0.12836684746170424, 0.3109661172605419, + 0.5192786459845283, 0.5938704185634249, 0.04341533031285172, 0.65494173043691, 0.577284845201679, + 0.7419615280910089, 0.40282978335635267, 0.8700685824205098, 0.4663457531259627, 0.621961090816814, + 0.10689728015561661, 0.28898664019111053, 0.9661498415023864, 0.9067310219255523, 0.7062597786898408, + 0.1656176601583519, 0.7418291449321575, 0.1360738049917236, 0.10060052958019916, 0.6720371646498426, + 0.6919757941011632, 0.5398831308323078, 0.7519581440272585, 0.5068669682523493, 0.938162816686544, + 0.1684504073334684, 0.33898387421578013, 0.45612210493017646, 0.2574986744080595, 0.7202214965416577, + 0.7197323638730856, 0.12612039229185712, 0.5743986035644238, 0.7983453971630412, 0.10818828151427162, + 0.6216751159917858, 0.3722004061266849, 0.33506042716085815, 0.257128306617908, 0.229399433957176, + 0.48731682130765364, 0.884016368776341, 0.9431399649274476, 0.39090320483261953, 0.48804003791575845, + 0.6997701510258648, 0.3427575441649615, 0.42480641493443805, 0.26568686568088584, 0.5994184647706472, + 0.027590449104072157, 0.30915922531455675, 0.3024462081682625, 0.24249478773185507, 0.344080675836019, + 0.6718290412296023, 0.4927729450565037, 0.19198624273422238, 0.9188712333199375, 0.6662306327726546, + 0.8208388909384069, 0.14966846053861182, 0.7774798184435455, 0.38675267059285845, 0.3524966116601721, + 0.8872570608294355, 0.4703208696983341, 0.5278348214068792, 0.7699774350342129, 0.3925258780570219, + 0.4877224153511128, 0.928656700953216, 0.3699307222802808, 0.6037222439415791, 0.9744804142777878, + 0.3404487585426379, 0.6854571865797977, 0.4108783367719028, 0.3865392856063704, 0.08665641437850102, + 0.05342232186562068, 0.032474872356025974, 0.9086582770947371, 0.7659937015592914, 0.2568140216774778, + 0.15007040476252542, 0.8118173580897312, 0.2481889944626371, 0.07986634967315454, 0.5703765565370817, + 0.7127504119865766, 0.0820735670409255, 0.42206069298473226, 0.03564049082735243, 0.5428116981162028, + 0.6591059848854349, 0.503112433651799, 0.9251552343127925, 0.6567208354036572, 0.8575519848132668, + 0.2709998780180116, 0.333522021214367, 0.9941028268677842, 0.4312086049658347, 0.8540634740401672, + 0.6463190763686415, 0.7089451172899908, 0.40734998866870453, 0.804771765143114, 0.544050804224951, + 0.1291678032399346, 0.9127160055196917, 0.08307842440728763, 0.1428602387810044, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NCHW, {4, 1, 16, 16}, {16, 16, 2, 2}, DT_FLOAT}; + TransResult result; + + FormatTransferFracZNchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFraczNchw, fracz_to_nchw_fp32_success_gt_cube) { + float data_4d[8 * 2 * 16 * 16] = { + 0.13861991091475812, + 0.10525121122814463, + 0.14870114967919246, + 0.37158545264352527, + 0.08879297931832297, + 0.06760373986806234, + 0.003849344223073925, + 0.7561138729196587, + 0.0028097578732378947, + 0.7353701611370006, + 0.6933417527357065, + 0.2656723224637475, + 0.31405414074086935, + 0.9820863796986057, + 0.6698167465680883, + 0.7025073707567491, + 0.7925940267469043, + 0.9858147072404602, + 0.727796317012897, + 0.015787104612503655, + 0.27491105976633057, + 0.6369448325207008, + 0.6896774205279625, + 0.9323244549718339, + 0.6913756703206293, + 0.16754362854376104, + 0.8188111598667317, + 0.5428410503366073, + 0.5125348441029931, + 0.9913510386173713, + 0.9375861536812142, + 0.8736979003360933, + 0.7260342849793483, + 0.20120410264885236, + 0.7646333976747771, + 0.15173165326001736, + 0.39265689606394716, + 0.15243591466584205, + 0.010340535191060374, + 0.7563590156106308, + 0.9033088706649143, + 0.5709275013402282, + 0.4128767392139201, + 0.18461034076886662, + 0.917605405768162, + 0.5914222027681203, + 0.8848126150904408, + 0.02129918275230247, + 0.7696659137833982, + 0.8785872487273547, + 0.37446354377616475, + 0.7646072372892175, + 0.7758773698800284, + 0.3065250204557999, + 0.4170857632470394, + 0.044089732363661094, + 0.6890756783794824, + 0.43909351458338675, + 0.2939490692189499, + 0.2459954395643219, + 0.4618194259116881, + 0.2407026393868288, + 0.36086002695619024, + 0.8640851072532453, + 0.15128698698720322, + 0.10445646966322941, + 0.6279086302955916, + 0.30063695907830557, + 0.5803620249830622, + 0.834738368359478, + 0.9658333522412966, + 0.49084190134047334, + 0.5982948387159709, + 0.7364746708042765, + 0.1728040800468621, + 0.8960926950526985, + 0.5284394832775934, + 0.6568173944147312, + 0.7069769008278068, + 0.5579428147048966, + 0.13357691464798138, + 0.2908730982610468, + 0.5010437198187823, + 0.09963121799246955, + 0.17217086189255992, + 0.5648254121117188, + 0.053705219700942, + 0.35796688301072, + 0.20469063893497697, + 0.09371763139775569, + 0.17616094680644334, + 0.15162118086324772, + 0.690327705519752, + 0.9014654398254837, + 0.8026738672485313, + 0.2336754606791367, + 0.21401989030280077, + 0.3050069506431471, + 0.6236653000860762, + 0.713337515092525, + 0.10737739320492923, + 0.3873591518740387, + 0.8659119048707833, + 0.5925084869238261, + 0.9111021456064748, + 0.919429204772624, + 0.8384109327495086, + 0.1575614925244445, + 0.80509149343632, + 0.28370128790625326, + 0.4282317439660449, + 0.2357917781969051, + 0.011471956973760244, + 0.17339338632884005, + 0.6715959811967953, + 0.233220125983571, + 0.05314850242766944, + 0.7056619496920902, + 0.5456598510028011, + 0.13598966999855922, + 0.012529344446802426, + 0.6201816276837667, + 0.006964022371572898, + 0.3412241888432692, + 0.8049499889919485, + 0.667343472513223, + 0.4701897375183579, + 0.7418169336190366, + 0.9737001474297444, + 0.20654566517027428, + 0.17248149069917285, + 0.053157049516807664, + 0.2861359192158687, + 0.7593751161265202, + 0.06755860607493347, + 0.3852737431079606, + 0.04992360626473957, + 0.302489320076742, + 0.40132951312095344, + 0.6795152678016566, + 0.782572010580318, + 0.5746635474611804, + 0.7966442996538724, + 0.15459601428614667, + 0.3693961500059507, + 0.06411018686027592, + 0.22945572978660644, + 0.8731427819714899, + 0.5076890561556178, + 0.12930627273556972, + 0.9656927645096538, + 0.38569829114808596, + 0.12375154698912139, + 0.2509506112091433, + 0.1319370980474388, + 0.38289710756686746, + 0.25840864046822043, + 0.38571855983419834, + 0.8364680728786794, + 0.05987516981191243, + 0.42570991590805674, + 0.4119405321035826, + 0.17762967468331525, + 0.637507115987954, + 0.6450032164935743, + 0.5654207482006985, + 0.6329507004942213, + 0.33844084372877925, + 0.9049256528358469, + 0.10047293005385105, + 0.8194138858161407, + 0.6769310231737116, + 0.20440622491432292, + 0.8741638758927367, + 0.44505905665328027, + 0.21280609696170782, + 0.011781058613303919, + 0.47688438712340286, + 0.07768770074191944, + 0.00822444546154455, + 0.9681521716228816, + 0.776893063617345, + 0.17870297652809786, + 0.9918631665731272, + 0.18754660546807156, + 0.09674403006985133, + 0.7256042711333616, + 0.37613730048899263, + 0.5668621385905648, + 0.8147168836664814, + 0.7145827173057614, + 0.5419252027429404, + 0.8624241578915682, + 0.9767778331103008, + 0.33134453444767853, + 0.1511696460358204, + 0.931109609080415, + 0.26760280136617476, + 0.720504118679755, + 0.561054639152762, + 0.04403144974993356, + 0.6650312577642018, + 0.20842812187870807, + 0.9709344182765282, + 0.29749853648546964, + 0.16720208620538812, + 0.936915662933375, + 0.786663329801668, + 0.8726576278463952, + 0.9747036251493297, + 0.09534090496984604, + 0.6034024789176371, + 0.9165559738358574, + 0.17251694852573773, + 0.6518257199411946, + 0.30944313225985576, + 0.7563828055308841, + 0.253253633473531, + 0.5629789915185913, + 0.15465010252531697, + 0.6402301841568493, + 0.0795608311951832, + 0.13552737276834392, + 0.4753501403209113, + 0.12763429765746592, + 0.7976683366910478, + 0.8941181691336204, + 0.5075573010849087, + 0.18532317502693563, + 0.6848997362918184, + 0.5714156890506997, + 0.44174703642978874, + 0.3424985993076507, + 0.951509130793907, + 0.08295791787636164, + 0.11549926102263153, + 0.2749400181421586, + 0.9113280686842665, + 0.7990989912334494, + 0.14710662300775412, + 0.46955053458590934, + 0.40065138391111943, + 0.5752642927349247, + 0.07116527059062705, + 0.2712956260859689, + 0.080480044589748, + 0.6450046812325141, + 0.531319360316121, + 0.781899452578717, + 0.11552970112165872, + 0.5611500316920904, + 0.9249303707149428, + 0.3779808468902329, + 0.04917726412895995, + 0.7272035505938381, + 0.20229470167487462, + 0.6225381036572762, + 0.09665526780685885, + 0.4790754927951175, + 0.8852506071141146, + 0.6378272080403014, + 0.4302617641125852, + 0.17529425791527875, + 0.2911580454791233, + 0.5685378955963167, + 0.5996629884166993, + 0.35379997432878574, + 0.4812449632594248, + 0.9327970145025657, + 0.29959697003490815, + 0.18566739299261037, + 0.7906655716605615, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8795377572972203, + 0.1470755313991291, + 0.561247554034285, + 0.7662220587760888, + 0.2986541461597556, + 0.42411698786743934, + 0.7611568879677035, + 0.11264806170566222, + 0.440973443278477, + 0.25390038851594676, + 0.26612481125474574, + 0.3350292840944643, + 0.3005887681812899, + 0.22284371079118692, + 0.5659697003311785, + 0.7122019087559107, + 0.9567807277929921, + 0.20818145030885005, + 0.60588935414239, + 0.5040087971937715, + 0.29392139130299244, + 0.6363756825181826, + 0.7333242098434483, + 0.261729101999164, + 0.9569771739814522, + 0.6143153706299556, + 0.631409359723997, + 0.011901826035510932, + 0.2955498735301396, + 0.8816750025849788, + 0.14633316716600764, + 0.4793915962021015, + 0.5704212237455046, + 0.13150180417478674, + 0.31690962084442686, + 0.21228124048991093, + 0.7296604549037735, + 0.6650141171161462, + 0.04241446875266841, + 0.3321108610667033, + 0.9575885483833089, + 0.2436150246808676, + 0.987210551195358, + 0.6372128000142347, + 0.761113851092484, + 0.7437116188736885, + 0.45594654788563493, + 0.830052343780784, + 0.7982349711510114, + 0.004734651371140819, + 0.6690845068463606, + 0.7188268519310974, + 0.9407991624965886, + 0.1865674886269657, + 0.27113574284356157, + 0.7096913513550435, + 0.08449102565738409, + 0.9898338931888807, + 0.24247197683608523, + 0.16556601945810856, + 0.8106885120353601, + 0.4321572361784677, + 0.7113731972624318, + 0.007494933722813291, + 0.8141904135220546, + 0.7395150888511759, + 0.547581203606972, + 0.6530263306464384, + 0.007521198939880125, + 0.7474194348631291, + 0.5777144381034068, + 0.8584438346701353, + 0.16969711093899553, + 0.21749292052270808, + 0.36244880368539034, + 0.6945674442909555, + 0.04384851936719336, + 0.7586877459463743, + 0.869698410395238, + 0.9767059033674064, + 0.9446884919932719, + 0.6572566972190457, + 0.2048989668957929, + 0.8170741278189959, + 0.5547941031298822, + 0.7309773397133232, + 0.5063767684051376, + 0.2772421373998536, + 0.04711937267440225, + 0.23774692855986723, + 0.6737215431713256, + 0.30986446944527846, + 0.5491562617797402, + 0.1401554977665822, + 0.7932244713519243, + 0.7323878899162881, + 0.002396651178786957, + 0.08089773378821119, + 0.461475864945851, + 0.5224297784281214, + 0.3141596605465167, + 0.48263368203794865, + 0.2464902259465227, + 0.4728058512337572, + 0.47155205473777895, + 0.9833083681599707, + 0.03332507249957106, + 0.7163502359065218, + 0.3421635067549482, + 0.9787191868196575, + 0.8307200462203801, + 0.12284932223048928, + 0.5773682448264239, + 0.6558446451611865, + 0.680394068488586, + 0.7310433251859529, + 0.35523994189400077, + 0.6766207358391074, + 0.9060741761912895, + 0.04914274667362528, + 0.19144189303663528, + 0.4845610368675931, + 0.6541048336512872, + 0.8575012513297512, + 0.8537472449635101, + 0.7450443706280852, + 0.5803426777495934, + 0.5689408381290824, + 0.9092918118236812, + 0.17460334682241974, + 0.2340700994442091, + 0.9298928386669005, + 0.36914916933156305, + 0.12622765437152517, + 0.10689808095884157, + 0.4310771822646119, + 0.13267160640865372, + 0.14895973120547823, + 0.1397935054459304, + 0.9349783221173635, + 0.2503384778850122, + 0.31822595375881446, + 0.9913550891610706, + 0.55454466322197, + 0.6118908274540877, + 0.8779734145096334, + 0.3543109331520725, + 0.306988901348892, + 0.6726982566869614, + 0.02467090616045342, + 0.06553857351644254, + 0.22442983249751902, + 0.26395669898138985, + 0.9632237865411843, + 0.732461885956622, + 0.7317620554576368, + 0.49519103539917386, + 0.18413692212953858, + 0.7150341406645073, + 0.04522592798730041, + 0.6447408662943692, + 0.9385896789116761, + 0.24032378135246224, + 0.9682515458629827, + 0.26982819483722165, + 0.48178308190918895, + 0.5823940174941661, + 0.409359485776034, + 0.40752327445425973, + 0.1471890531783654, + 0.6242049562872195, + 0.18811560788574455, + 0.9893442282921309, + 0.42331698403095386, + 0.9302534567908484, + 0.35889687716970375, + 0.8660877509389459, + 0.928772577903708, + 0.6158032483682454, + 0.16070664756432096, + 0.9661412869875025, + 0.31898622890124084, + 0.5441507345809006, + 0.5055786252454072, + 0.8853269573231654, + 0.9931343022347453, + 0.08922742117815974, + 0.45857685361148903, + 0.7770746229113067, + 0.0887419464763255, + 0.3162106953425323, + 0.8536288391503049, + 0.28011809028393886, + 0.8521904076959731, + 0.7248634019855653, + 0.49869013399498285, + 0.4053194525003626, + 0.4735972943090352, + 0.17389059420154984, + 0.03366201552897685, + 0.9823521603852087, + 0.7808072048924388, + 0.5526270573062637, + 0.6738010436288211, + 0.5432293708851752, + 0.6914357121517429, + 0.43842610640884583, + 0.8530609582658267, + 0.11278100676094394, + 0.4941714606912455, + 0.4638603168540302, + 0.22194186056037057, + 0.6716327950358132, + 0.9166902550664012, + 0.8351982565565297, + 0.8412216162402254, + 0.2222539480109168, + 0.5991520863203645, + 0.38864001352791677, + 0.34985786042192857, + 0.15795832535816468, + 0.9661077058919707, + 0.012634417342274218, + 0.343423762898566, + 0.451146025504531, + 0.8761611666855926, + 0.8608539041919699, + 0.8903503433345549, + 0.6548981638110877, + 0.35486660560902894, + 0.08811346271007414, + 0.8110740131813556, + 0.8539951703461579, + 0.9555089087268872, + 0.9562723010283051, + 0.4681219537960889, + 0.37592691228594466, + 0.9992573696993887, + 0.664341812871904, + 0.23944640184088473, + 0.2963560937896125, + 0.508501991516788, + 0.15376590814347213, + 0.7379530846527411, + 0.7977010542015935, + 0.8915772781308771, + 0.5524912580999269, + 0.30375439556599226, + 0.6566031761406997, + 0.31858523647827175, + 0.019979751727917994, + 0.8743281776278087, + 0.3810202153747745, + 0.5203446091374957, + 0.966155481985703, + 0.37469353816237017, + 0.7352375292410388, + 0.3666447891178437, + 0.015451749872628873, + 0.7832706823237181, + 0.3999266945419052, + 0.4364277580092467, + 0.7230546551485972, + 0.157525724298896, + 0.5729599421962962, + 0.19803754933587325, + 0.8333412543498672, + 0.7867785122337513, + 0.27896398037168957, + 0.5484736202867737, + 0.32766071211385694, + 0.38499709012670014, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5293502876783279, + 0.8291193070208391, + 0.42200289273599556, + 0.686857118791328, + 0.9305878823942679, + 0.509816187082568, + 0.41487977014592914, + 0.09554332355010453, + 0.7412162233285089, + 0.9973832859416821, + 0.23910159953071597, + 0.48004632089879007, + 0.039772947348565624, + 0.0894049198451472, + 0.01874007337969552, + 0.11961507320612019, + 0.7101967112871143, + 0.7197179842368491, + 0.578232652038329, + 0.040072115029379685, + 0.5804692341544302, + 0.6623947182585623, + 0.5438434786671921, + 0.17343133597811278, + 0.7367546132475674, + 0.13201613557039393, + 0.09897754229118771, + 0.8999983118049748, + 0.6003901478395222, + 0.32512047896820595, + 0.9128032309113029, + 0.9031362578958313, + 0.8276019986094242, + 0.16844251099981267, + 0.14661295960750242, + 0.5333431363401799, + 0.8299745503174504, + 0.5339333337288752, + 0.9106147969126901, + 0.19580399811145166, + 0.7078571833791006, + 0.48145075892895806, + 0.603131492234237, + 0.27265318902818514, + 0.8791795132792125, + 0.8411100928304986, + 0.017593478154503073, + 0.7060157268133952, + 0.7724279248361262, + 0.8861613857589661, + 0.343469114076287, + 0.017394361915610057, + 0.7883518755638007, + 0.4391735391309788, + 0.2504695401963266, + 0.15170174477220055, + 0.21393977565249778, + 0.443592550980236, + 0.5095334554083597, + 0.09938394670258921, + 0.028315604429974273, + 0.45463003066865415, + 0.8298597748035649, + 0.4532423883161699, + 0.7007554265498704, + 0.40115166637181754, + 0.1470799829764481, + 0.6294304491599311, + 0.23594389336137767, + 0.7112579137154827, + 0.10185947547633445, + 0.20139399288408033, + 0.1180404666237802, + 0.8399634215048786, + 0.7632940118923165, + 0.5303499842405933, + 0.4155261517501555, + 0.5548711377923504, + 0.025360358738519495, + 0.8232568061375535, + 0.3346104779456238, + 0.532915432070937, + 0.40015851318727125, + 0.5389460419617879, + 0.5987416086426901, + 0.6698552912716527, + 0.780388759983075, + 0.7925588085624273, + 0.45037796758318804, + 0.23963041265644203, + 0.36874366609595643, + 0.18375410637584955, + 0.5281492850328181, + 0.5460266989110848, + 0.9280948067239118, + 0.9873043353493716, + 0.03442022110787202, + 0.6291590014933156, + 0.9981627789260562, + 0.3200659468426875, + 0.44180687340667857, + 0.3349303332479078, + 0.4553140241842599, + 0.05359923274228118, + 0.03929951143652333, + 0.8592328433935742, + 0.6149351679083243, + 0.16259509931077443, + 0.24314980248993667, + 0.7595415381121309, + 0.7096499077090591, + 0.6615524634203029, + 0.6035851122706498, + 0.7343833438614558, + 0.41474303482746533, + 0.4310140395683666, + 0.5462996571286937, + 0.636139222807529, + 0.4261420957770189, + 0.1721447584349456, + 0.47147571375284814, + 0.5003705454959895, + 0.41863524425463694, + 0.1212972250835862, + 0.9945715392091543, + 0.40446452849666314, + 0.5562287852609348, + 0.46298522943000064, + 0.21327330991620574, + 0.4174038781545222, + 0.5506923312964094, + 0.08960554649117725, + 0.16821297722710693, + 0.009183189735727626, + 0.9519318593697729, + 0.7911885910393738, + 0.30243793737732894, + 0.36171955618702634, + 0.7275163676699398, + 0.6017302317686811, + 0.9211449821926627, + 0.016789744324152922, + 0.2231842692779209, + 0.7320508107990614, + 0.9172186193742928, + 0.38726858040604806, + 0.45346727152383326, + 0.9736037921150315, + 0.1415848835987198, + 0.4287144407465886, + 0.7192727473967828, + 0.44212966241115104, + 0.6075560359436376, + 0.2699313871787882, + 0.9501669443744405, + 0.6891624698228346, + 0.03594820662269016, + 0.4140665514245061, + 0.19123529007541273, + 0.6956985142613639, + 0.18608296200597518, + 0.21395244570112526, + 0.14994545679313542, + 0.7955362084226552, + 0.2908836237948339, + 0.3189721351237217, + 0.9113691555501472, + 0.16608774824486627, + 0.3652540906020737, + 0.7661105179559459, + 0.5503285960900492, + 0.6296482965637312, + 0.021618970592347186, + 0.3222338893974612, + 0.1771909927336177, + 0.36132216279703644, + 0.7031384528295931, + 0.5596365181728887, + 0.8053366142728067, + 0.8638400429864582, + 0.9085095227101969, + 0.5058802974670702, + 0.022489996885331487, + 0.16136704513340805, + 0.6783741471392065, + 0.2433432886640784, + 0.08811202713859834, + 0.7019149642340254, + 0.7074398653385896, + 0.4188458869744829, + 0.7417870718217474, + 0.09314542994725039, + 0.9526702722469567, + 0.15447695419829233, + 0.5234613373790115, + 0.4587900976769065, + 0.4666560847640946, + 0.47741158025412755, + 0.3941302129542221, + 0.8725938986300577, + 0.1868618851608026, + 0.5960817832633942, + 0.5402399164827054, + 0.6542430096117432, + 0.14093404302294743, + 0.7351008222411094, + 0.626896869991658, + 0.8153132939963299, + 0.4692693688236108, + 0.05305160105808626, + 0.2791581608581686, + 0.9228439462893766, + 0.7332915821155096, + 0.6628517982577824, + 0.04860139460088986, + 0.08951277138503844, + 0.8320411266990881, + 0.2048707134105746, + 0.8068872022870146, + 0.4948504180620279, + 0.41184033137111975, + 0.7190998979639045, + 0.8380990899424366, + 0.086092548343773, + 0.9820085929439971, + 0.32879915468366416, + 0.84204761146793, + 0.8700722120474124, + 0.48076406766202595, + 0.0732106249839285, + 0.14783939754527176, + 0.574931463245764, + 0.7079277349244375, + 0.7292094480906886, + 0.7047746244055081, + 0.7943898405857803, + 0.45038405067793374, + 0.3012144754381675, + 0.27280381214968086, + 0.12926601852738606, + 0.05313715099993188, + 0.9954587172603774, + 0.4718694982334253, + 0.4298764101970639, + 0.6681098129873408, + 0.42036967115181456, + 0.0839795459090027, + 0.5792609679185063, + 0.37605072282304686, + 0.6583979311718429, + 0.5148337803671061, + 0.385262882179612, + 0.5273305274418582, + 0.6199363284810673, + 0.16762752670302516, + 0.017492223678952867, + 0.30648818939511857, + 0.1260323700300645, + 0.7715815662819216, + 0.4481707153463781, + 0.763405910124729, + 0.09025576212085595, + 0.5033159147423758, + 0.35268716544351064, + 0.25698746611256706, + 0.8420450508984809, + 0.5982144085578488, + 0.9586114553080439, + 0.6344034099803877, + 0.5143253458855047, + 0.5249453898611923, + 0.12136272756597788, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.37446715213889414, + 0.37998233432424766, + 0.2031003471352949, + 0.12415625519029183, + 0.6085733581700042, + 0.3796334566305014, + 0.8371386332055144, + 0.012793800556977097, + 0.5833205323286814, + 0.09779108988244167, + 0.5520359287683562, + 0.4849558923188444, + 0.5282674162369237, + 0.06675479822979058, + 0.5553542608092429, + 0.39696323881119, + 0.2671520815086441, + 0.5412004706206277, + 0.9803949953224173, + 0.18482177407515232, + 0.28474674412781464, + 0.5217727387416771, + 0.27255844427862186, + 0.09233866919232392, + 0.11346891778995816, + 0.6851858317061092, + 0.0005627333655138234, + 0.9053829799883957, + 0.1141296807683726, + 0.7571659430911946, + 0.030398776009645023, + 0.8521206636214926, + 0.686751963460763, + 0.9234485268943335, + 0.4262558321277006, + 0.3659515307852249, + 0.8886441531294881, + 0.6990554941733981, + 0.8768036198178328, + 0.3319762164814921, + 0.7118863871542145, + 0.8291405634796339, + 0.7454138001482092, + 0.6496653584298466, + 0.44449868173307283, + 0.9805574793354571, + 0.62370311211052, + 0.5317392897133469, + 0.5670304904635779, + 0.007148288012197801, + 0.0857835687436056, + 0.4467901200429536, + 0.661795939990852, + 0.13366433186289184, + 0.8378440756500211, + 0.4622738238198597, + 0.9381592583428692, + 0.9596830650619335, + 0.3450364035776594, + 0.3035151186951639, + 0.4686002937786584, + 0.9453220019662685, + 0.6452195208794166, + 0.4358498394990138, + 0.5212431731505457, + 0.19932525488647057, + 0.9898306221037652, + 0.4747038016224169, + 0.82876683912034, + 0.9175597660672588, + 0.28147168770097997, + 0.25502934165477764, + 0.6415827981200575, + 0.799877067418722, + 0.5105555039916515, + 0.9146513147957873, + 0.9060172079755968, + 0.791556691837571, + 0.32491506165874695, + 0.6486302438986716, + 0.4569527670153789, + 0.9495997246091489, + 0.9396139468185104, + 0.45899358803544554, + 0.6894229831946286, + 0.270746137652799, + 0.7248139481233704, + 0.7626497479115815, + 0.75066781795124, + 0.22192123430700905, + 0.28396757247535576, + 0.9112190358293802, + 0.26330626036820004, + 0.3113617142082512, + 0.07148685551219547, + 0.7195158974755017, + 0.43477271771982284, + 0.5787097784473253, + 0.7313260977369199, + 0.9107728219751681, + 0.032906572995198724, + 0.7598530618731676, + 0.9818987602819136, + 0.4657556371687326, + 0.594695390220099, + 0.6883643754551224, + 0.2719751089503445, + 0.37581000966714084, + 0.83824915279393, + 0.06549247203877806, + 0.36632445386556045, + 0.2322214574176914, + 0.4704668571121897, + 0.14006001098754373, + 0.9681288328156152, + 0.47809152528622967, + 0.9657400895521029, + 0.47776444371478033, + 0.9687034478573067, + 0.07412119197941236, + 0.5721744237867166, + 0.6141593977164898, + 0.042054099789581856, + 0.8459542642975082, + 0.5189399037858603, + 0.2535302003984826, + 0.36275145211112614, + 0.48400839758585756, + 0.6285648741302996, + 0.6191394018077834, + 0.7749665246023152, + 0.45757180036095835, + 0.7671537731273556, + 0.4709601630937571, + 0.42941454508912114, + 0.8657921575600451, + 0.8985445127220502, + 0.9861050189074767, + 0.24757531177032255, + 0.978850701352717, + 0.7158918887968034, + 0.6831064106036061, + 0.9180071448105787, + 0.8376882158653953, + 0.9638665261744293, + 0.45042997757613545, + 0.21665688963627494, + 0.35195278013601794, + 0.05267969117623417, + 0.5389555311153045, + 0.9114851765836308, + 0.6258773970631035, + 0.7199341249146247, + 0.7923169276434796, + 0.8366591904387193, + 0.18053239868755144, + 0.272265164206111, + 0.331101485981824, + 0.8612879717194625, + 0.634731689137481, + 0.6486972068084338, + 0.33847889999990544, + 0.9967302039665963, + 0.2414374570780865, + 0.847995824351949, + 0.11948257056250688, + 0.02712104372887425, + 0.1840501737732555, + 0.9860125291135977, + 0.2193886006138287, + 0.7272713759543263, + 0.21513305795319226, + 0.5524594198562498, + 0.7513328175757993, + 0.09209983389065812, + 0.8560345875874015, + 0.37399657249679696, + 0.08589741028223441, + 0.3236249823421554, + 0.8698790447233965, + 0.1790385977070852, + 0.818425585420191, + 0.5829548995026376, + 0.984699782767251, + 0.18726766084750612, + 0.0658699939740679, + 0.8821854015107203, + 0.2788585976377317, + 0.2940684401683791, + 0.6238528546466062, + 0.9857679125734509, + 0.5518248962183375, + 0.3293403217991683, + 0.39029609682738287, + 0.6664252543793544, + 0.9743791622455666, + 0.22571944921717335, + 0.526565707851956, + 0.9769750936911633, + 0.39225171270804193, + 0.3389503252328022, + 0.30958977269472954, + 0.9213149201546954, + 0.5618971748260793, + 0.8745377717246547, + 0.5394045987877338, + 0.19551840539914045, + 0.35059087479820794, + 0.7836610133411863, + 0.29958461037233053, + 0.3938045042892838, + 0.18104328507119194, + 0.06778745965102362, + 0.9561014878015668, + 0.4551212110068099, + 0.025066292417908254, + 0.9492800499708482, + 0.47352200454570115, + 0.48246698916058783, + 0.18473893932854824, + 0.20994529454414756, + 0.33427933653414255, + 0.2864221132522502, + 0.3156841132503668, + 0.27292955984128087, + 0.4566805827078497, + 0.1462692343227675, + 0.7712660332064996, + 0.32975091626883435, + 0.5574361427043032, + 0.017003207714762714, + 0.5102761455044432, + 0.4111481217664793, + 0.4409066019171828, + 0.9072393279560211, + 0.5483245331582209, + 0.5609330056140861, + 0.6302384260721924, + 0.6742101055320844, + 0.5015443794327109, + 0.9202060801646266, + 0.4150966747261118, + 0.7791425089745136, + 0.3891997488372002, + 0.10129548673467059, + 0.36666062118419507, + 0.636617168184616, + 0.814697212596663, + 0.001881280677498931, + 0.60526396245784, + 0.721749351841282, + 0.95594196409197, + 0.7585796885360693, + 0.919887225009393, + 0.6189213811964427, + 0.2548479350612681, + 0.9162607004972148, + 0.7172637798754251, + 0.283564079660791, + 0.9552100378519058, + 0.6670310568579203, + 0.5654935550366178, + 0.24678578258378903, + 0.6118146734984746, + 0.07729267856727107, + 0.3301131217627997, + 0.24512151253567938, + 0.6908428767174394, + 0.5206197306406314, + 0.14555190477564528, + 0.3111331699242166, + 0.7052840640615085, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3764290776930458, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4146979809830683, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9958462749035231, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15849243309504923, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9572203123547415, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.541530670814326, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.767028446649636, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1309162637452962, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10659462196323277, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6772010208911962, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.43806594461121307, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5769624715772051, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19737388285992674, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9971339679357473, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.647689169873493, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3909836391230528, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.02543957664974894, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8964726438537993, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30631061637899615, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.862029888701881, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.005941495896108373, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.556525443906726, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6817726202638454, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8759690783659748, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5360842728537408, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3718155827833767, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2659724158347494, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7644468243071734, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20475493002236655, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3995961251457969, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9932862480814585, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.34550246822699693, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7738675159774514, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.024816608620341962, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5665559410658447, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9452069954015251, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8017745565146187, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5238548480735659, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8706467667318868, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4353807234830298, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8306436124180614, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.09064214693350137, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8685785467629582, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8345253702291049, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11103772926010236, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7070617835227128, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23790479221880045, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9264437066213292, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.45351833013599707, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8606352509445246, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8580755998879297, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5067469023858207, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.929885851198275, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6782413640337536, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7098499981352698, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5823811990622354, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9674534075204786, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5798472290187143, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7175920799518698, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30953641019091327, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12167573120389918, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.31084959197890827, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.34157322002831525, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.13141740731351004, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18204780231842665, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7288130733024423, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4139691275664631, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1475584382157734, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + float data[17 * 17 * 2 * 2] = { + 0.13861991091475812, 0.8795377572972203, 0.5293502876783279, 0.37446715213889414, 0.10525121122814463, + 0.1470755313991291, 0.8291193070208391, 0.37998233432424766, 0.14870114967919246, 0.561247554034285, + 0.42200289273599556, 0.2031003471352949, 0.37158545264352527, 0.7662220587760888, 0.686857118791328, + 0.12415625519029183, 0.08879297931832297, 0.2986541461597556, 0.9305878823942679, 0.6085733581700042, + 0.06760373986806234, 0.42411698786743934, 0.509816187082568, 0.3796334566305014, 0.003849344223073925, + 0.7611568879677035, 0.41487977014592914, 0.8371386332055144, 0.7561138729196587, 0.11264806170566222, + 0.09554332355010453, 0.012793800556977097, 0.0028097578732378947, 0.440973443278477, 0.7412162233285089, + 0.5833205323286814, 0.7353701611370006, 0.25390038851594676, 0.9973832859416821, 0.09779108988244167, + 0.6933417527357065, 0.26612481125474574, 0.23910159953071597, 0.5520359287683562, 0.2656723224637475, + 0.3350292840944643, 0.48004632089879007, 0.4849558923188444, 0.31405414074086935, 0.3005887681812899, + 0.039772947348565624, 0.5282674162369237, 0.9820863796986057, 0.22284371079118692, 0.0894049198451472, + 0.06675479822979058, 0.6698167465680883, 0.5659697003311785, 0.01874007337969552, 0.5553542608092429, + 0.7025073707567491, 0.7122019087559107, 0.11961507320612019, 0.39696323881119, 0.3764290776930458, + 0.8964726438537993, 0.5665559410658447, 0.5067469023858207, 0.7925940267469043, 0.9567807277929921, + 0.7101967112871143, 0.2671520815086441, 0.9858147072404602, 0.20818145030885005, 0.7197179842368491, + 0.5412004706206277, 0.727796317012897, 0.60588935414239, 0.578232652038329, 0.9803949953224173, + 0.015787104612503655, 0.5040087971937715, 0.040072115029379685, 0.18482177407515232, 0.27491105976633057, + 0.29392139130299244, 0.5804692341544302, 0.28474674412781464, 0.6369448325207008, 0.6363756825181826, + 0.6623947182585623, 0.5217727387416771, 0.6896774205279625, 0.7333242098434483, 0.5438434786671921, + 0.27255844427862186, 0.9323244549718339, 0.261729101999164, 0.17343133597811278, 0.09233866919232392, + 0.6913756703206293, 0.9569771739814522, 0.7367546132475674, 0.11346891778995816, 0.16754362854376104, + 0.6143153706299556, 0.13201613557039393, 0.6851858317061092, 0.8188111598667317, 0.631409359723997, + 0.09897754229118771, 0.0005627333655138234, 0.5428410503366073, 0.011901826035510932, 0.8999983118049748, + 0.9053829799883957, 0.5125348441029931, 0.2955498735301396, 0.6003901478395222, 0.1141296807683726, + 0.9913510386173713, 0.8816750025849788, 0.32512047896820595, 0.7571659430911946, 0.9375861536812142, + 0.14633316716600764, 0.9128032309113029, 0.030398776009645023, 0.8736979003360933, 0.4793915962021015, + 0.9031362578958313, 0.8521206636214926, 0.4146979809830683, 0.30631061637899615, 0.9452069954015251, + 0.929885851198275, 0.7260342849793483, 0.5704212237455046, 0.8276019986094242, 0.686751963460763, + 0.20120410264885236, 0.13150180417478674, 0.16844251099981267, 0.9234485268943335, 0.7646333976747771, + 0.31690962084442686, 0.14661295960750242, 0.4262558321277006, 0.15173165326001736, 0.21228124048991093, + 0.5333431363401799, 0.3659515307852249, 0.39265689606394716, 0.7296604549037735, 0.8299745503174504, + 0.8886441531294881, 0.15243591466584205, 0.6650141171161462, 0.5339333337288752, 0.6990554941733981, + 0.010340535191060374, 0.04241446875266841, 0.9106147969126901, 0.8768036198178328, 0.7563590156106308, + 0.3321108610667033, 0.19580399811145166, 0.3319762164814921, 0.9033088706649143, 0.9575885483833089, + 0.7078571833791006, 0.7118863871542145, 0.5709275013402282, 0.2436150246808676, 0.48145075892895806, + 0.8291405634796339, 0.4128767392139201, 0.987210551195358, 0.603131492234237, 0.7454138001482092, + 0.18461034076886662, 0.6372128000142347, 0.27265318902818514, 0.6496653584298466, 0.917605405768162, + 0.761113851092484, 0.8791795132792125, 0.44449868173307283, 0.5914222027681203, 0.7437116188736885, + 0.8411100928304986, 0.9805574793354571, 0.8848126150904408, 0.45594654788563493, 0.017593478154503073, + 0.62370311211052, 0.02129918275230247, 0.830052343780784, 0.7060157268133952, 0.5317392897133469, + 0.9958462749035231, 0.862029888701881, 0.8017745565146187, 0.6782413640337536, 0.7696659137833982, + 0.7982349711510114, 0.7724279248361262, 0.5670304904635779, 0.8785872487273547, 0.004734651371140819, + 0.8861613857589661, 0.007148288012197801, 0.37446354377616475, 0.6690845068463606, 0.343469114076287, + 0.0857835687436056, 0.7646072372892175, 0.7188268519310974, 0.017394361915610057, 0.4467901200429536, + 0.7758773698800284, 0.9407991624965886, 0.7883518755638007, 0.661795939990852, 0.3065250204557999, + 0.1865674886269657, 0.4391735391309788, 0.13366433186289184, 0.4170857632470394, 0.27113574284356157, + 0.2504695401963266, 0.8378440756500211, 0.044089732363661094, 0.7096913513550435, 0.15170174477220055, + 0.4622738238198597, 0.6890756783794824, 0.08449102565738409, 0.21393977565249778, 0.9381592583428692, + 0.43909351458338675, 0.9898338931888807, 0.443592550980236, 0.9596830650619335, 0.2939490692189499, + 0.24247197683608523, 0.5095334554083597, 0.3450364035776594, 0.2459954395643219, 0.16556601945810856, + 0.09938394670258921, 0.3035151186951639, 0.4618194259116881, 0.8106885120353601, 0.028315604429974273, + 0.4686002937786584, 0.2407026393868288, 0.4321572361784677, 0.45463003066865415, 0.9453220019662685, + 0.36086002695619024, 0.7113731972624318, 0.8298597748035649, 0.6452195208794166, 0.8640851072532453, + 0.007494933722813291, 0.4532423883161699, 0.4358498394990138, 0.15849243309504923, 0.005941495896108373, + 0.5238548480735659, 0.7098499981352698, 0.15128698698720322, 0.8141904135220546, 0.7007554265498704, + 0.5212431731505457, 0.10445646966322941, 0.7395150888511759, 0.40115166637181754, 0.19932525488647057, + 0.6279086302955916, 0.547581203606972, 0.1470799829764481, 0.9898306221037652, 0.30063695907830557, + 0.6530263306464384, 0.6294304491599311, 0.4747038016224169, 0.5803620249830622, 0.007521198939880125, + 0.23594389336137767, 0.82876683912034, 0.834738368359478, 0.7474194348631291, 0.7112579137154827, + 0.9175597660672588, 0.9658333522412966, 0.5777144381034068, 0.10185947547633445, 0.28147168770097997, + 0.49084190134047334, 0.8584438346701353, 0.20139399288408033, 0.25502934165477764, 0.5982948387159709, + 0.16969711093899553, 0.1180404666237802, 0.6415827981200575, 0.7364746708042765, 0.21749292052270808, + 0.8399634215048786, 0.799877067418722, 0.1728040800468621, 0.36244880368539034, 0.7632940118923165, + 0.5105555039916515, 0.8960926950526985, 0.6945674442909555, 0.5303499842405933, 0.9146513147957873, + 0.5284394832775934, 0.04384851936719336, 0.4155261517501555, 0.9060172079755968, 0.6568173944147312, + 0.7586877459463743, 0.5548711377923504, 0.791556691837571, 0.7069769008278068, 0.869698410395238, + 0.025360358738519495, 0.32491506165874695, 0.5579428147048966, 0.9767059033674064, 0.8232568061375535, + 0.6486302438986716, 0.9572203123547415, 0.556525443906726, 0.8706467667318868, 0.5823811990622354, + 0.13357691464798138, 0.9446884919932719, 0.3346104779456238, 0.4569527670153789, 0.2908730982610468, + 0.6572566972190457, 0.532915432070937, 0.9495997246091489, 0.5010437198187823, 0.2048989668957929, + 0.40015851318727125, 0.9396139468185104, 0.09963121799246955, 0.8170741278189959, 0.5389460419617879, + 0.45899358803544554, 0.17217086189255992, 0.5547941031298822, 0.5987416086426901, 0.6894229831946286, + 0.5648254121117188, 0.7309773397133232, 0.6698552912716527, 0.270746137652799, 0.053705219700942, + 0.5063767684051376, 0.780388759983075, 0.7248139481233704, 0.35796688301072, 0.2772421373998536, + 0.7925588085624273, 0.7626497479115815, 0.20469063893497697, 0.04711937267440225, 0.45037796758318804, + 0.75066781795124, 0.09371763139775569, 0.23774692855986723, 0.23963041265644203, 0.22192123430700905, + 0.17616094680644334, 0.6737215431713256, 0.36874366609595643, 0.28396757247535576, 0.15162118086324772, + 0.30986446944527846, 0.18375410637584955, 0.9112190358293802, 0.690327705519752, 0.5491562617797402, + 0.5281492850328181, 0.26330626036820004, 0.9014654398254837, 0.1401554977665822, 0.5460266989110848, + 0.3113617142082512, 0.8026738672485313, 0.7932244713519243, 0.9280948067239118, 0.07148685551219547, + 0.2336754606791367, 0.7323878899162881, 0.9873043353493716, 0.7195158974755017, 0.541530670814326, + 0.6817726202638454, 0.4353807234830298, 0.9674534075204786, 0.21401989030280077, 0.002396651178786957, + 0.03442022110787202, 0.43477271771982284, 0.3050069506431471, 0.08089773378821119, 0.6291590014933156, + 0.5787097784473253, 0.6236653000860762, 0.461475864945851, 0.9981627789260562, 0.7313260977369199, + 0.713337515092525, 0.5224297784281214, 0.3200659468426875, 0.9107728219751681, 0.10737739320492923, + 0.3141596605465167, 0.44180687340667857, 0.032906572995198724, 0.3873591518740387, 0.48263368203794865, + 0.3349303332479078, 0.7598530618731676, 0.8659119048707833, 0.2464902259465227, 0.4553140241842599, + 0.9818987602819136, 0.5925084869238261, 0.4728058512337572, 0.05359923274228118, 0.4657556371687326, + 0.9111021456064748, 0.47155205473777895, 0.03929951143652333, 0.594695390220099, 0.919429204772624, + 0.9833083681599707, 0.8592328433935742, 0.6883643754551224, 0.8384109327495086, 0.03332507249957106, + 0.6149351679083243, 0.2719751089503445, 0.1575614925244445, 0.7163502359065218, 0.16259509931077443, + 0.37581000966714084, 0.80509149343632, 0.3421635067549482, 0.24314980248993667, 0.83824915279393, + 0.28370128790625326, 0.9787191868196575, 0.7595415381121309, 0.06549247203877806, 0.4282317439660449, + 0.8307200462203801, 0.7096499077090591, 0.36632445386556045, 0.2357917781969051, 0.12284932223048928, + 0.6615524634203029, 0.2322214574176914, 0.767028446649636, 0.8759690783659748, 0.8306436124180614, + 0.5798472290187143, 0.011471956973760244, 0.5773682448264239, 0.6035851122706498, 0.4704668571121897, + 0.17339338632884005, 0.6558446451611865, 0.7343833438614558, 0.14006001098754373, 0.6715959811967953, + 0.680394068488586, 0.41474303482746533, 0.9681288328156152, 0.233220125983571, 0.7310433251859529, + 0.4310140395683666, 0.47809152528622967, 0.05314850242766944, 0.35523994189400077, 0.5462996571286937, + 0.9657400895521029, 0.7056619496920902, 0.6766207358391074, 0.636139222807529, 0.47776444371478033, + 0.5456598510028011, 0.9060741761912895, 0.4261420957770189, 0.9687034478573067, 0.13598966999855922, + 0.04914274667362528, 0.1721447584349456, 0.07412119197941236, 0.012529344446802426, 0.19144189303663528, + 0.47147571375284814, 0.5721744237867166, 0.6201816276837667, 0.4845610368675931, 0.5003705454959895, + 0.6141593977164898, 0.006964022371572898, 0.6541048336512872, 0.41863524425463694, 0.042054099789581856, + 0.3412241888432692, 0.8575012513297512, 0.1212972250835862, 0.8459542642975082, 0.8049499889919485, + 0.8537472449635101, 0.9945715392091543, 0.5189399037858603, 0.667343472513223, 0.7450443706280852, + 0.40446452849666314, 0.2535302003984826, 0.4701897375183579, 0.5803426777495934, 0.5562287852609348, + 0.36275145211112614, 0.7418169336190366, 0.5689408381290824, 0.46298522943000064, 0.48400839758585756, + 0.1309162637452962, 0.5360842728537408, 0.09064214693350137, 0.7175920799518698, 0.9737001474297444, + 0.9092918118236812, 0.21327330991620574, 0.6285648741302996, 0.20654566517027428, 0.17460334682241974, + 0.4174038781545222, 0.6191394018077834, 0.17248149069917285, 0.2340700994442091, 0.5506923312964094, + 0.7749665246023152, 0.053157049516807664, 0.9298928386669005, 0.08960554649117725, 0.45757180036095835, + 0.2861359192158687, 0.36914916933156305, 0.16821297722710693, 0.7671537731273556, 0.7593751161265202, + 0.12622765437152517, 0.009183189735727626, 0.4709601630937571, 0.06755860607493347, 0.10689808095884157, + 0.9519318593697729, 0.42941454508912114, 0.3852737431079606, 0.4310771822646119, 0.7911885910393738, + 0.8657921575600451, 0.04992360626473957, 0.13267160640865372, 0.30243793737732894, 0.8985445127220502, + 0.302489320076742, 0.14895973120547823, 0.36171955618702634, 0.9861050189074767, 0.40132951312095344, + 0.1397935054459304, 0.7275163676699398, 0.24757531177032255, 0.6795152678016566, 0.9349783221173635, + 0.6017302317686811, 0.978850701352717, 0.782572010580318, 0.2503384778850122, 0.9211449821926627, + 0.7158918887968034, 0.5746635474611804, 0.31822595375881446, 0.016789744324152922, 0.6831064106036061, + 0.7966442996538724, 0.9913550891610706, 0.2231842692779209, 0.9180071448105787, 0.15459601428614667, + 0.55454466322197, 0.7320508107990614, 0.8376882158653953, 0.10659462196323277, 0.3718155827833767, + 0.8685785467629582, 0.30953641019091327, 0.3693961500059507, 0.6118908274540877, 0.9172186193742928, + 0.9638665261744293, 0.06411018686027592, 0.8779734145096334, 0.38726858040604806, 0.45042997757613545, + 0.22945572978660644, 0.3543109331520725, 0.45346727152383326, 0.21665688963627494, 0.8731427819714899, + 0.306988901348892, 0.9736037921150315, 0.35195278013601794, 0.5076890561556178, 0.6726982566869614, + 0.1415848835987198, 0.05267969117623417, 0.12930627273556972, 0.02467090616045342, 0.4287144407465886, + 0.5389555311153045, 0.9656927645096538, 0.06553857351644254, 0.7192727473967828, 0.9114851765836308, + 0.38569829114808596, 0.22442983249751902, 0.44212966241115104, 0.6258773970631035, 0.12375154698912139, + 0.26395669898138985, 0.6075560359436376, 0.7199341249146247, 0.2509506112091433, 0.9632237865411843, + 0.2699313871787882, 0.7923169276434796, 0.1319370980474388, 0.732461885956622, 0.9501669443744405, + 0.8366591904387193, 0.38289710756686746, 0.7317620554576368, 0.6891624698228346, 0.18053239868755144, + 0.25840864046822043, 0.49519103539917386, 0.03594820662269016, 0.272265164206111, 0.38571855983419834, + 0.18413692212953858, 0.4140665514245061, 0.331101485981824, 0.8364680728786794, 0.7150341406645073, + 0.19123529007541273, 0.8612879717194625, 0.05987516981191243, 0.04522592798730041, 0.6956985142613639, + 0.634731689137481, 0.6772010208911962, 0.2659724158347494, 0.8345253702291049, 0.12167573120389918, + 0.42570991590805674, 0.6447408662943692, 0.18608296200597518, 0.6486972068084338, 0.4119405321035826, + 0.9385896789116761, 0.21395244570112526, 0.33847889999990544, 0.17762967468331525, 0.24032378135246224, + 0.14994545679313542, 0.9967302039665963, 0.637507115987954, 0.9682515458629827, 0.7955362084226552, + 0.2414374570780865, 0.6450032164935743, 0.26982819483722165, 0.2908836237948339, 0.847995824351949, + 0.5654207482006985, 0.48178308190918895, 0.3189721351237217, 0.11948257056250688, 0.6329507004942213, + 0.5823940174941661, 0.9113691555501472, 0.02712104372887425, 0.33844084372877925, 0.409359485776034, + 0.16608774824486627, 0.1840501737732555, 0.9049256528358469, 0.40752327445425973, 0.3652540906020737, + 0.9860125291135977, 0.10047293005385105, 0.1471890531783654, 0.7661105179559459, 0.2193886006138287, + 0.8194138858161407, 0.6242049562872195, 0.5503285960900492, 0.7272713759543263, 0.6769310231737116, + 0.18811560788574455, 0.6296482965637312, 0.21513305795319226, 0.20440622491432292, 0.9893442282921309, + 0.021618970592347186, 0.5524594198562498, 0.8741638758927367, 0.42331698403095386, 0.3222338893974612, + 0.7513328175757993, 0.44505905665328027, 0.9302534567908484, 0.1771909927336177, 0.09209983389065812, + 0.21280609696170782, 0.35889687716970375, 0.36132216279703644, 0.8560345875874015, 0.43806594461121307, + 0.7644468243071734, 0.11103772926010236, 0.31084959197890827, 0.011781058613303919, 0.8660877509389459, + 0.7031384528295931, 0.37399657249679696, 0.47688438712340286, 0.928772577903708, 0.5596365181728887, + 0.08589741028223441, 0.07768770074191944, 0.6158032483682454, 0.8053366142728067, 0.3236249823421554, + 0.00822444546154455, 0.16070664756432096, 0.8638400429864582, 0.8698790447233965, 0.9681521716228816, + 0.9661412869875025, 0.9085095227101969, 0.1790385977070852, 0.776893063617345, 0.31898622890124084, + 0.5058802974670702, 0.818425585420191, 0.17870297652809786, 0.5441507345809006, 0.022489996885331487, + 0.5829548995026376, 0.9918631665731272, 0.5055786252454072, 0.16136704513340805, 0.984699782767251, + 0.18754660546807156, 0.8853269573231654, 0.6783741471392065, 0.18726766084750612, 0.09674403006985133, + 0.9931343022347453, 0.2433432886640784, 0.0658699939740679, 0.7256042711333616, 0.08922742117815974, + 0.08811202713859834, 0.8821854015107203, 0.37613730048899263, 0.45857685361148903, 0.7019149642340254, + 0.2788585976377317, 0.5668621385905648, 0.7770746229113067, 0.7074398653385896, 0.2940684401683791, + 0.8147168836664814, 0.0887419464763255, 0.4188458869744829, 0.6238528546466062, 0.7145827173057614, + 0.3162106953425323, 0.7417870718217474, 0.9857679125734509, 0.5419252027429404, 0.8536288391503049, + 0.09314542994725039, 0.5518248962183375, 0.5769624715772051, 0.20475493002236655, 0.7070617835227128, + 0.34157322002831525, 0.8624241578915682, 0.28011809028393886, 0.9526702722469567, 0.3293403217991683, + 0.9767778331103008, 0.8521904076959731, 0.15447695419829233, 0.39029609682738287, 0.33134453444767853, + 0.7248634019855653, 0.5234613373790115, 0.6664252543793544, 0.1511696460358204, 0.49869013399498285, + 0.4587900976769065, 0.9743791622455666, 0.931109609080415, 0.4053194525003626, 0.4666560847640946, + 0.22571944921717335, 0.26760280136617476, 0.4735972943090352, 0.47741158025412755, 0.526565707851956, + 0.720504118679755, 0.17389059420154984, 0.3941302129542221, 0.9769750936911633, 0.561054639152762, + 0.03366201552897685, 0.8725938986300577, 0.39225171270804193, 0.04403144974993356, 0.9823521603852087, + 0.1868618851608026, 0.3389503252328022, 0.6650312577642018, 0.7808072048924388, 0.5960817832633942, + 0.30958977269472954, 0.20842812187870807, 0.5526270573062637, 0.5402399164827054, 0.9213149201546954, + 0.9709344182765282, 0.6738010436288211, 0.6542430096117432, 0.5618971748260793, 0.29749853648546964, + 0.5432293708851752, 0.14093404302294743, 0.8745377717246547, 0.16720208620538812, 0.6914357121517429, + 0.7351008222411094, 0.5394045987877338, 0.936915662933375, 0.43842610640884583, 0.626896869991658, + 0.19551840539914045, 0.786663329801668, 0.8530609582658267, 0.8153132939963299, 0.35059087479820794, + 0.19737388285992674, 0.3995961251457969, 0.23790479221880045, 0.13141740731351004, 0.8726576278463952, + 0.11278100676094394, 0.4692693688236108, 0.7836610133411863, 0.9747036251493297, 0.4941714606912455, + 0.05305160105808626, 0.29958461037233053, 0.09534090496984604, 0.4638603168540302, 0.2791581608581686, + 0.3938045042892838, 0.6034024789176371, 0.22194186056037057, 0.9228439462893766, 0.18104328507119194, + 0.9165559738358574, 0.6716327950358132, 0.7332915821155096, 0.06778745965102362, 0.17251694852573773, + 0.9166902550664012, 0.6628517982577824, 0.9561014878015668, 0.6518257199411946, 0.8351982565565297, + 0.04860139460088986, 0.4551212110068099, 0.30944313225985576, 0.8412216162402254, 0.08951277138503844, + 0.025066292417908254, 0.7563828055308841, 0.2222539480109168, 0.8320411266990881, 0.9492800499708482, + 0.253253633473531, 0.5991520863203645, 0.2048707134105746, 0.47352200454570115, 0.5629789915185913, + 0.38864001352791677, 0.8068872022870146, 0.48246698916058783, 0.15465010252531697, 0.34985786042192857, + 0.4948504180620279, 0.18473893932854824, 0.6402301841568493, 0.15795832535816468, 0.41184033137111975, + 0.20994529454414756, 0.0795608311951832, 0.9661077058919707, 0.7190998979639045, 0.33427933653414255, + 0.13552737276834392, 0.012634417342274218, 0.8380990899424366, 0.2864221132522502, 0.4753501403209113, + 0.343423762898566, 0.086092548343773, 0.3156841132503668, 0.9971339679357473, 0.9932862480814585, + 0.9264437066213292, 0.18204780231842665, 0.12763429765746592, 0.451146025504531, 0.9820085929439971, + 0.27292955984128087, 0.7976683366910478, 0.8761611666855926, 0.32879915468366416, 0.4566805827078497, + 0.8941181691336204, 0.8608539041919699, 0.84204761146793, 0.1462692343227675, 0.5075573010849087, + 0.8903503433345549, 0.8700722120474124, 0.7712660332064996, 0.18532317502693563, 0.6548981638110877, + 0.48076406766202595, 0.32975091626883435, 0.6848997362918184, 0.35486660560902894, 0.0732106249839285, + 0.5574361427043032, 0.5714156890506997, 0.08811346271007414, 0.14783939754527176, 0.017003207714762714, + 0.44174703642978874, 0.8110740131813556, 0.574931463245764, 0.5102761455044432, 0.3424985993076507, + 0.8539951703461579, 0.7079277349244375, 0.4111481217664793, 0.951509130793907, 0.9555089087268872, + 0.7292094480906886, 0.4409066019171828, 0.08295791787636164, 0.9562723010283051, 0.7047746244055081, + 0.9072393279560211, 0.11549926102263153, 0.4681219537960889, 0.7943898405857803, 0.5483245331582209, + 0.2749400181421586, 0.37592691228594466, 0.45038405067793374, 0.5609330056140861, 0.9113280686842665, + 0.9992573696993887, 0.3012144754381675, 0.6302384260721924, 0.7990989912334494, 0.664341812871904, + 0.27280381214968086, 0.6742101055320844, 0.14710662300775412, 0.23944640184088473, 0.12926601852738606, + 0.5015443794327109, 0.647689169873493, 0.34550246822699693, 0.45351833013599707, 0.7288130733024423, + 0.46955053458590934, 0.2963560937896125, 0.05313715099993188, 0.9202060801646266, 0.40065138391111943, + 0.508501991516788, 0.9954587172603774, 0.4150966747261118, 0.5752642927349247, 0.15376590814347213, + 0.4718694982334253, 0.7791425089745136, 0.07116527059062705, 0.7379530846527411, 0.4298764101970639, + 0.3891997488372002, 0.2712956260859689, 0.7977010542015935, 0.6681098129873408, 0.10129548673467059, + 0.080480044589748, 0.8915772781308771, 0.42036967115181456, 0.36666062118419507, 0.6450046812325141, + 0.5524912580999269, 0.0839795459090027, 0.636617168184616, 0.531319360316121, 0.30375439556599226, + 0.5792609679185063, 0.814697212596663, 0.781899452578717, 0.6566031761406997, 0.37605072282304686, + 0.001881280677498931, 0.11552970112165872, 0.31858523647827175, 0.6583979311718429, 0.60526396245784, + 0.5611500316920904, 0.019979751727917994, 0.5148337803671061, 0.721749351841282, 0.9249303707149428, + 0.8743281776278087, 0.385262882179612, 0.95594196409197, 0.3779808468902329, 0.3810202153747745, + 0.5273305274418582, 0.7585796885360693, 0.04917726412895995, 0.5203446091374957, 0.6199363284810673, + 0.919887225009393, 0.7272035505938381, 0.966155481985703, 0.16762752670302516, 0.6189213811964427, + 0.20229470167487462, 0.37469353816237017, 0.017492223678952867, 0.2548479350612681, 0.3909836391230528, + 0.7738675159774514, 0.8606352509445246, 0.4139691275664631, 0.6225381036572762, 0.7352375292410388, + 0.30648818939511857, 0.9162607004972148, 0.09665526780685885, 0.3666447891178437, 0.1260323700300645, + 0.7172637798754251, 0.4790754927951175, 0.015451749872628873, 0.7715815662819216, 0.283564079660791, + 0.8852506071141146, 0.7832706823237181, 0.4481707153463781, 0.9552100378519058, 0.6378272080403014, + 0.3999266945419052, 0.763405910124729, 0.6670310568579203, 0.4302617641125852, 0.4364277580092467, + 0.09025576212085595, 0.5654935550366178, 0.17529425791527875, 0.7230546551485972, 0.5033159147423758, + 0.24678578258378903, 0.2911580454791233, 0.157525724298896, 0.35268716544351064, 0.6118146734984746, + 0.5685378955963167, 0.5729599421962962, 0.25698746611256706, 0.07729267856727107, 0.5996629884166993, + 0.19803754933587325, 0.8420450508984809, 0.3301131217627997, 0.35379997432878574, 0.8333412543498672, + 0.5982144085578488, 0.24512151253567938, 0.4812449632594248, 0.7867785122337513, 0.9586114553080439, + 0.6908428767174394, 0.9327970145025657, 0.27896398037168957, 0.6344034099803877, 0.5206197306406314, + 0.29959697003490815, 0.5484736202867737, 0.5143253458855047, 0.14555190477564528, 0.18566739299261037, + 0.32766071211385694, 0.5249453898611923, 0.3111331699242166, 0.7906655716605615, 0.38499709012670014, + 0.12136272756597788, 0.7052840640615085, 0.02543957664974894, 0.024816608620341962, 0.8580755998879297, + 0.1475584382157734, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NCHW, {8, 2, 16, 16}, {17, 17, 2, 2}, DT_FLOAT}; + TransResult result; + + FormatTransferFracZNchw transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFraczNchw, int8_1) { + uint8_t ret[1 * 1 * 1 * 1] = {70}; + uint8_t data[1 * 1 * 16 * 32] = { + 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, {1, 1, 16, 32}, {1, 1, 1, 1}, DT_INT8}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferFraczNchw, uint8_2c_2n) { + uint8_t ret[32 * 64 * 3 * 3] = { + 116, 236, 243, 202, 173, 49, 65, 195, 129, 147, 208, 52, 95, 22, 14, 0, 158, 123, 48, 214, 35, 238, 226, + 165, 2, 152, 179, 65, 128, 243, 151, 3, 125, 102, 201, 0, 40, 143, 164, 240, 58, 38, 208, 47, 98, 100, + 174, 44, 73, 56, 234, 197, 184, 44, 194, 8, 25, 56, 21, 212, 78, 69, 123, 33, 97, 232, 197, 162, 177, + 89, 214, 158, 93, 20, 117, 48, 95, 252, 253, 244, 153, 130, 131, 63, 28, 138, 102, 148, 196, 214, 44, 92, + 184, 205, 227, 94, 54, 120, 212, 108, 3, 188, 164, 193, 192, 65, 178, 15, 251, 111, 186, 189, 219, 125, 210, + 30, 1, 49, 89, 141, 253, 167, 223, 121, 13, 161, 249, 133, 41, 254, 51, 42, 163, 46, 203, 225, 46, 92, + 28, 62, 106, 68, 232, 21, 210, 128, 190, 23, 109, 116, 98, 124, 203, 23, 3, 119, 254, 214, 50, 184, 168, + 223, 163, 208, 45, 106, 247, 202, 25, 100, 152, 80, 226, 149, 178, 136, 37, 28, 199, 53, 144, 19, 232, 201, + 218, 223, 13, 160, 213, 249, 55, 108, 96, 199, 33, 61, 157, 54, 202, 80, 28, 233, 104, 125, 167, 0, 155, + 157, 233, 95, 111, 15, 225, 2, 95, 166, 185, 205, 203, 135, 51, 40, 192, 21, 203, 38, 129, 233, 73, 184, + 145, 206, 45, 126, 167, 170, 77, 202, 98, 180, 59, 231, 130, 187, 242, 199, 40, 223, 159, 220, 206, 147, 87, + 249, 251, 189, 122, 84, 183, 31, 66, 122, 159, 249, 47, 179, 81, 88, 77, 13, 245, 122, 62, 146, 211, 198, + 145, 149, 165, 235, 132, 96, 229, 210, 28, 207, 208, 29, 197, 214, 131, 27, 2, 217, 212, 51, 233, 27, 44, + 226, 210, 139, 68, 24, 186, 9, 53, 114, 44, 173, 13, 141, 85, 60, 15, 156, 78, 197, 64, 183, 204, 217, + 46, 200, 7, 160, 132, 173, 108, 251, 110, 4, 222, 40, 254, 99, 92, 156, 5, 69, 84, 243, 4, 156, 26, + 20, 124, 0, 251, 212, 241, 151, 153, 48, 193, 249, 70, 48, 32, 214, 101, 242, 38, 21, 248, 57, 124, 161, + 104, 26, 143, 217, 25, 107, 200, 194, 191, 241, 36, 211, 16, 164, 114, 181, 106, 59, 176, 201, 199, 231, 72, + 162, 98, 158, 10, 136, 42, 192, 32, 242, 32, 162, 169, 52, 246, 174, 221, 223, 95, 162, 185, 245, 29, 129, + 140, 187, 239, 30, 193, 130, 119, 2, 39, 251, 227, 103, 94, 253, 151, 71, 208, 251, 220, 56, 74, 79, 23, + 192, 42, 56, 65, 73, 246, 29, 195, 78, 58, 120, 203, 165, 163, 201, 198, 217, 163, 116, 78, 74, 7, 195, + 198, 90, 88, 123, 71, 128, 80, 141, 24, 157, 159, 55, 250, 149, 184, 38, 139, 86, 32, 191, 24, 204, 82, + 116, 195, 146, 206, 141, 103, 221, 44, 76, 230, 13, 110, 0, 184, 186, 48, 130, 232, 34, 190, 119, 48, 167, + 116, 149, 199, 66, 22, 237, 68, 79, 194, 217, 110, 97, 32, 21, 207, 110, 23, 1, 191, 224, 97, 127, 166, + 204, 49, 223, 142, 92, 158, 199, 6, 34, 211, 73, 72, 173, 74, 30, 103, 62, 229, 192, 30, 102, 57, 222, + 212, 118, 132, 212, 190, 126, 181, 149, 158, 223, 29, 132, 167, 156, 48, 85, 246, 217, 88, 124, 131, 159, 237, + 252, 60, 81, 54, 71, 76, 82, 242, 78, 167, 192, 250, 5, 142, 9, 27, 168, 246, 75, 171, 200, 128, 119, + 49, 241, 101, 4, 222, 182, 23, 154, 39, 215, 108, 79, 40, 182, 79, 79, 49, 20, 69, 240, 105, 246, 215, + 111, 36, 108, 192, 69, 223, 179, 205, 100, 136, 13, 160, 93, 91, 132, 98, 39, 69, 246, 141, 237, 135, 55, + 151, 51, 153, 39, 129, 139, 59, 67, 90, 197, 231, 47, 129, 40, 241, 165, 191, 131, 18, 95, 147, 135, 49, + 228, 186, 84, 161, 178, 64, 123, 229, 28, 71, 112, 139, 195, 21, 99, 106, 31, 118, 101, 121, 7, 91, 128, + 136, 124, 147, 8, 241, 227, 130, 39, 151, 123, 234, 239, 240, 232, 211, 110, 176, 32, 91, 153, 230, 99, 53, + 174, 105, 231, 236, 115, 146, 97, 87, 172, 25, 204, 221, 103, 23, 208, 83, 125, 81, 87, 18, 233, 126, 188, + 244, 102, 140, 120, 37, 68, 128, 36, 177, 23, 236, 65, 70, 217, 217, 51, 189, 170, 99, 242, 77, 187, 153, + 85, 70, 111, 176, 94, 108, 100, 17, 67, 226, 89, 50, 201, 87, 106, 210, 234, 156, 51, 88, 9, 204, 4, + 132, 196, 163, 144, 232, 14, 49, 124, 77, 230, 81, 146, 128, 112, 226, 2, 230, 72, 106, 254, 126, 130, 191, + 2, 104, 232, 136, 73, 202, 174, 183, 32, 174, 27, 185, 186, 88, 173, 165, 62, 214, 23, 166, 35, 27, 9, + 106, 104, 144, 133, 112, 231, 191, 10, 8, 169, 204, 70, 149, 175, 2, 116, 26, 142, 44, 127, 123, 58, 56, + 203, 215, 226, 196, 81, 244, 147, 68, 74, 79, 122, 230, 79, 132, 121, 133, 93, 198, 240, 12, 222, 134, 141, + 84, 46, 2, 164, 39, 173, 159, 219, 24, 200, 0, 78, 41, 113, 82, 168, 214, 112, 237, 253, 149, 163, 119, + 244, 99, 218, 150, 153, 182, 109, 134, 87, 156, 175, 133, 72, 158, 166, 146, 181, 38, 135, 12, 138, 217, 157, + 187, 103, 200, 238, 239, 46, 26, 127, 20, 205, 101, 143, 79, 51, 221, 90, 204, 10, 216, 175, 95, 17, 78, + 63, 152, 111, 108, 168, 118, 150, 208, 194, 183, 23, 73, 36, 48, 11, 183, 126, 169, 222, 112, 189, 185, 157, + 59, 130, 174, 219, 216, 175, 140, 192, 69, 141, 224, 79, 32, 12, 44, 190, 199, 189, 76, 26, 103, 9, 43, + 22, 80, 218, 135, 235, 101, 253, 75, 154, 32, 216, 185, 145, 38, 243, 189, 234, 154, 180, 36, 130, 152, 138, + 22, 236, 147, 71, 135, 205, 76, 118, 25, 3, 178, 252, 222, 207, 131, 207, 181, 249, 50, 232, 51, 111, 213, + 43, 126, 43, 65, 193, 68, 214, 12, 76, 116, 135, 118, 187, 233, 123, 75, 58, 36, 251, 46, 184, 119, 105, + 226, 182, 103, 185, 57, 219, 175, 240, 251, 2, 193, 44, 173, 166, 45, 140, 185, 90, 130, 45, 110, 225, 54, + 3, 94, 175, 208, 154, 150, 146, 12, 37, 89, 253, 71, 171, 191, 30, 139, 121, 94, 51, 16, 110, 100, 72, + 27, 87, 199, 230, 73, 95, 202, 79, 90, 20, 55, 181, 157, 180, 11, 116, 238, 236, 51, 24, 251, 119, 211, + 210, 40, 107, 186, 66, 43, 148, 37, 27, 182, 217, 65, 235, 20, 9, 54, 166, 99, 48, 182, 254, 149, 212, + 188, 55, 124, 50, 163, 177, 2, 109, 98, 246, 194, 16, 250, 203, 181, 0, 31, 183, 151, 222, 168, 145, 44, + 73, 109, 28, 75, 221, 205, 106, 56, 254, 24, 25, 182, 152, 216, 45, 31, 163, 185, 134, 241, 168, 236, 74, + 212, 152, 172, 4, 206, 72, 252, 213, 156, 241, 153, 204, 205, 95, 116, 212, 237, 132, 125, 29, 166, 127, 144, + 170, 28, 183, 12, 221, 217, 9, 129, 187, 41, 112, 29, 192, 40, 40, 162, 125, 157, 32, 191, 17, 215, 57, + 55, 101, 143, 130, 160, 213, 139, 6, 24, 3, 227, 193, 142, 228, 73, 86, 205, 184, 46, 187, 95, 85, 23, + 34, 136, 76, 246, 225, 13, 26, 138, 191, 143, 183, 98, 178, 195, 172, 92, 115, 244, 239, 6, 113, 101, 74, + 237, 75, 184, 186, 36, 43, 178, 244, 150, 16, 68, 123, 46, 94, 206, 139, 153, 144, 107, 11, 139, 76, 181, + 129, 38, 225, 250, 207, 174, 181, 129, 46, 103, 145, 219, 121, 124, 167, 102, 205, 115, 148, 30, 179, 219, 144, + 209, 99, 40, 186, 92, 214, 17, 184, 36, 0, 154, 188, 100, 131, 223, 47, 110, 31, 2, 58, 210, 212, 139, + 161, 184, 48, 168, 222, 241, 15, 247, 23, 21, 3, 36, 120, 235, 50, 125, 45, 187, 187, 78, 13, 176, 123, + 94, 118, 134, 35, 143, 85, 129, 82, 129, 218, 21, 93, 71, 81, 151, 10, 68, 21, 172, 136, 146, 230, 230, + 206, 144, 158, 204, 207, 101, 65, 30, 29, 33, 246, 131, 143, 254, 12, 74, 29, 193, 251, 185, 118, 107, 143, + 107, 104, 124, 32, 109, 245, 150, 239, 106, 81, 89, 158, 181, 159, 87, 99, 112, 60, 126, 46, 204, 143, 15, + 93, 252, 62, 191, 159, 155, 232, 92, 171, 31, 30, 101, 74, 25, 94, 174, 58, 162, 166, 250, 167, 127, 23, + 203, 87, 250, 145, 60, 12, 169, 50, 174, 96, 87, 21, 114, 37, 50, 73, 131, 198, 129, 154, 187, 127, 191, + 145, 105, 65, 71, 61, 48, 45, 99, 136, 145, 240, 74, 55, 8, 199, 109, 168, 101, 252, 61, 4, 163, 57, + 26, 147, 182, 240, 87, 203, 209, 71, 123, 23, 196, 72, 246, 198, 74, 170, 40, 37, 205, 178, 190, 24, 40, + 25, 44, 224, 206, 62, 57, 27, 75, 188, 212, 246, 245, 240, 86, 65, 56, 160, 89, 169, 246, 227, 72, 3, + 51, 250, 31, 126, 78, 139, 185, 59, 230, 133, 193, 215, 224, 76, 235, 12, 64, 124, 176, 174, 173, 125, 113, + 61, 236, 43, 215, 177, 31, 175, 139, 8, 243, 76, 87, 150, 173, 229, 101, 81, 201, 71, 98, 165, 244, 137, + 89, 155, 18, 183, 59, 104, 216, 215, 13, 83, 85, 76, 80, 179, 204, 176, 150, 162, 133, 106, 123, 207, 213, + 105, 161, 243, 46, 179, 201, 82, 63, 62, 230, 226, 178, 110, 13, 147, 188, 164, 203, 10, 232, 71, 77, 179, + 39, 94, 100, 194, 81, 48, 216, 190, 242, 244, 60, 116, 150, 69, 101, 156, 231, 91, 100, 243, 236, 106, 65, + 146, 20, 126, 19, 55, 21, 221, 45, 245, 155, 233, 218, 161, 250, 138, 193, 177, 53, 146, 230, 204, 163, 144, + 148, 85, 91, 144, 227, 138, 71, 54, 165, 36, 117, 30, 149, 54, 87, 198, 177, 246, 252, 36, 63, 134, 209, + 144, 190, 111, 24, 145, 44, 158, 1, 117, 224, 223, 155, 147, 74, 10, 174, 98, 244, 29, 9, 138, 141, 93, + 27, 236, 175, 187, 131, 86, 161, 177, 153, 153, 189, 228, 132, 1, 164, 190, 14, 117, 56, 215, 68, 103, 3, + 190, 201, 127, 112, 48, 207, 36, 33, 79, 36, 194, 138, 185, 48, 72, 111, 90, 0, 148, 167, 123, 49, 164, + 86, 178, 40, 178, 66, 2, 197, 196, 108, 105, 118, 191, 205, 33, 172, 208, 162, 74, 174, 47, 166, 87, 205, + 6, 215, 68, 20, 157, 125, 241, 74, 31, 239, 161, 219, 143, 118, 29, 111, 44, 96, 23, 16, 245, 233, 164, + 201, 59, 221, 115, 238, 146, 9, 98, 36, 202, 64, 25, 144, 93, 157, 57, 192, 125, 200, 96, 137, 154, 80, + 112, 117, 254, 246, 159, 98, 180, 45, 101, 145, 189, 128, 199, 216, 176, 194, 238, 33, 4, 203, 226, 140, 50, + 196, 57, 249, 66, 229, 60, 46, 98, 202, 230, 65, 234, 169, 212, 10, 0, 68, 99, 171, 68, 1, 91, 81, + 89, 166, 209, 23, 245, 241, 150, 62, 65, 75, 183, 151, 55, 94, 160, 37, 200, 9, 111, 15, 235, 223, 179, + 200, 120, 53, 90, 145, 106, 183, 195, 37, 209, 44, 237, 77, 223, 144, 232, 86, 233, 73, 224, 60, 16, 234, + 254, 12, 45, 200, 208, 76, 13, 41, 97, 65, 140, 231, 2, 223, 224, 153, 182, 210, 54, 1, 95, 198, 204, + 232, 239, 250, 161, 204, 250, 123, 115, 53, 46, 113, 68, 0, 249, 178, 191, 48, 122, 85, 130, 154, 231, 29, + 245, 184, 184, 237, 225, 89, 22, 168, 105, 85, 162, 18, 100, 242, 39, 175, 8, 75, 202, 77, 210, 20, 124, + 70, 229, 78, 179, 6, 139, 243, 83, 92, 144, 114, 188, 67, 231, 246, 136, 220, 176, 67, 56, 186, 28, 216, + 126, 233, 104, 197, 178, 77, 204, 220, 81, 29, 124, 198, 158, 123, 175, 190, 219, 57, 47, 209, 162, 224, 216, + 121, 161, 24, 79, 48, 189, 164, 239, 232, 191, 193, 97, 21, 86, 35, 219, 236, 86, 161, 27, 119, 224, 144, + 34, 179, 250, 52, 242, 223, 102, 185, 84, 163, 245, 178, 218, 158, 124, 68, 57, 190, 9, 79, 171, 1, 45, + 223, 100, 151, 249, 205, 37, 235, 224, 135, 81, 119, 209, 183, 68, 158, 179, 30, 121, 202, 74, 206, 115, 250, + 147, 245, 182, 33, 55, 152, 46, 139, 166, 154, 186, 69, 227, 229, 85, 218, 178, 172, 110, 120, 225, 69, 212, + 101, 15, 19, 91, 228, 89, 0, 195, 62, 230, 2, 136, 143, 122, 86, 71, 108, 32, 254, 21, 106, 247, 65, + 195, 45, 225, 248, 105, 30, 225, 127, 227, 41, 232, 27, 254, 162, 104, 227, 97, 187, 227, 144, 18, 225, 6, + 242, 189, 45, 114, 153, 48, 165, 152, 174, 87, 223, 122, 176, 35, 245, 221, 195, 186, 98, 185, 119, 21, 133, + 43, 193, 113, 37, 239, 79, 97, 36, 199, 1, 253, 185, 233, 167, 199, 102, 77, 85, 151, 153, 165, 161, 112, + 246, 77, 232, 210, 82, 119, 164, 121, 235, 251, 192, 137, 194, 45, 79, 117, 8, 20, 9, 4, 71, 26, 168, + 191, 251, 126, 152, 114, 133, 24, 137, 9, 222, 189, 24, 236, 158, 192, 108, 251, 206, 190, 134, 235, 216, 219, + 116, 70, 48, 116, 136, 182, 253, 177, 59, 161, 64, 38, 31, 248, 217, 134, 17, 105, 24, 194, 243, 135, 93, + 56, 148, 203, 7, 242, 53, 28, 171, 254, 213, 38, 29, 22, 153, 156, 127, 216, 225, 28, 16, 40, 186, 146, + 46, 64, 15, 175, 213, 9, 97, 55, 82, 103, 133, 27, 167, 109, 3, 56, 195, 178, 214, 127, 52, 198, 118, + 28, 186, 49, 209, 239, 176, 211, 205, 73, 132, 111, 215, 99, 230, 85, 39, 226, 28, 222, 252, 32, 60, 155, + 92, 1, 26, 70, 157, 224, 186, 183, 247, 142, 11, 127, 254, 65, 86, 197, 98, 251, 17, 241, 99, 215, 159, + 140, 33, 252, 190, 201, 142, 33, 134, 106, 249, 91, 11, 102, 161, 221, 91, 188, 78, 127, 204, 95, 0, 249, + 159, 49, 22, 107, 180, 138, 41, 245, 145, 4, 108, 219, 32, 243, 186, 137, 104, 237, 194, 160, 171, 72, 239, + 72, 176, 10, 41, 151, 234, 74, 7, 161, 145, 203, 72, 42, 228, 162, 134, 206, 252, 18, 14, 202, 67, 55, + 185, 59, 122, 218, 1, 185, 45, 97, 181, 13, 89, 9, 8, 206, 226, 112, 2, 130, 51, 192, 105, 200, 47, + 79, 235, 146, 78, 205, 116, 21, 113, 186, 227, 32, 172, 168, 229, 74, 115, 136, 212, 218, 198, 17, 15, 238, + 118, 182, 85, 91, 143, 234, 187, 130, 237, 90, 104, 221, 97, 164, 183, 33, 99, 45, 233, 228, 89, 18, 177, + 205, 190, 119, 33, 49, 136, 1, 196, 97, 95, 33, 69, 244, 108, 58, 158, 156, 56, 170, 98, 88, 8, 75, + 186, 157, 240, 231, 145, 204, 142, 122, 160, 167, 144, 172, 251, 214, 22, 225, 8, 89, 243, 153, 204, 128, 193, + 187, 84, 231, 9, 55, 196, 198, 14, 54, 106, 121, 95, 231, 229, 48, 97, 90, 113, 124, 250, 126, 4, 111, + 211, 171, 22, 121, 42, 151, 151, 217, 51, 248, 69, 161, 245, 33, 119, 34, 54, 129, 46, 39, 42, 157, 145, + 219, 125, 194, 201, 169, 64, 106, 64, 17, 101, 138, 89, 148, 63, 2, 20, 202, 24, 109, 42, 236, 57, 95, + 250, 53, 41, 163, 5, 67, 194, 128, 253, 76, 224, 216, 100, 187, 32, 34, 6, 87, 196, 201, 151, 193, 133, + 65, 144, 53, 198, 47, 84, 9, 41, 154, 128, 169, 58, 158, 238, 178, 96, 172, 215, 37, 171, 28, 62, 9, + 107, 198, 21, 208, 157, 31, 243, 115, 240, 49, 172, 114, 235, 12, 23, 197, 231, 8, 54, 14, 16, 114, 108, + 52, 221, 155, 140, 33, 204, 132, 100, 106, 220, 130, 113, 168, 44, 193, 3, 66, 250, 79, 232, 54, 20, 108, + 83, 176, 133, 130, 236, 6, 216, 161, 101, 108, 86, 127, 98, 179, 22, 225, 210, 111, 86, 216, 66, 145, 190, + 248, 114, 117, 124, 154, 34, 133, 155, 1, 203, 23, 180, 84, 225, 140, 7, 25, 63, 64, 180, 190, 110, 215, + 181, 1, 98, 142, 183, 222, 90, 218, 114, 175, 152, 116, 34, 248, 80, 195, 54, 169, 183, 23, 115, 55, 212, + 124, 29, 211, 117, 162, 65, 111, 203, 87, 109, 176, 223, 20, 153, 249, 38, 138, 72, 104, 162, 243, 73, 52, + 116, 114, 12, 82, 184, 247, 70, 82, 196, 219, 143, 17, 200, 219, 79, 95, 18, 154, 225, 125, 86, 201, 73, + 194, 249, 37, 54, 150, 141, 35, 179, 7, 19, 13, 10, 30, 98, 224, 239, 98, 15, 159, 170, 245, 149, 39, + 6, 162, 83, 123, 104, 211, 25, 245, 249, 153, 175, 254, 181, 254, 167, 53, 86, 110, 111, 204, 5, 230, 230, + 181, 74, 223, 85, 123, 99, 118, 221, 52, 106, 214, 209, 163, 12, 143, 174, 194, 214, 17, 201, 200, 119, 87, + 112, 128, 27, 130, 220, 7, 209, 29, 164, 6, 144, 4, 225, 9, 240, 106, 108, 50, 206, 32, 123, 141, 58, + 79, 173, 180, 225, 51, 179, 177, 248, 44, 44, 55, 172, 146, 176, 166, 26, 114, 231, 79, 113, 56, 175, 149, + 213, 215, 161, 47, 149, 221, 9, 22, 239, 155, 192, 91, 9, 46, 145, 164, 248, 70, 220, 150, 62, 104, 203, + 254, 50, 123, 108, 175, 68, 54, 178, 28, 101, 189, 13, 30, 79, 65, 235, 143, 40, 54, 149, 213, 14, 107, + 103, 11, 130, 24, 174, 129, 128, 96, 137, 210, 189, 138, 222, 19, 200, 17, 188, 150, 16, 67, 28, 232, 240, + 50, 54, 86, 117, 0, 128, 163, 70, 14, 67, 39, 100, 44, 42, 24, 136, 10, 42, 237, 79, 252, 98, 233, + 245, 63, 34, 80, 172, 83, 18, 58, 32, 10, 148, 137, 12, 47, 81, 62, 76, 250, 68, 230, 90, 18, 139, + 185, 158, 7, 236, 225, 54, 50, 237, 53, 85, 48, 29, 175, 235, 76, 199, 127, 67, 123, 180, 193, 49, 218, + 83, 239, 72, 24, 174, 200, 165, 39, 169, 97, 192, 42, 20, 58, 93, 124, 253, 40, 202, 72, 39, 88, 108, + 221, 171, 196, 245, 238, 184, 209, 35, 98, 111, 218, 196, 211, 65, 44, 239, 51, 225, 137, 87, 226, 141, 186, + 225, 68, 163, 33, 152, 77, 37, 120, 236, 102, 134, 30, 252, 170, 51, 10, 36, 122, 214, 51, 162, 159, 178, + 164, 15, 137, 60, 248, 195, 115, 251, 102, 149, 90, 210, 151, 222, 175, 60, 189, 182, 179, 162, 250, 159, 91, + 233, 164, 88, 9, 51, 153, 57, 122, 177, 247, 96, 133, 186, 99, 60, 153, 178, 205, 44, 26, 117, 79, 243, + 217, 184, 3, 253, 108, 9, 218, 195, 9, 104, 247, 68, 58, 60, 253, 98, 44, 94, 31, 58, 132, 244, 180, + 13, 178, 189, 208, 31, 57, 61, 212, 127, 100, 184, 49, 64, 104, 46, 136, 78, 219, 210, 108, 181, 247, 207, + 184, 250, 182, 31, 147, 194, 118, 84, 40, 61, 41, 245, 227, 159, 56, 118, 124, 26, 203, 150, 168, 210, 78, + 105, 133, 191, 185, 170, 82, 81, 84, 179, 112, 155, 131, 238, 172, 99, 253, 201, 168, 47, 92, 148, 216, 63, + 87, 187, 27, 110, 58, 182, 196, 112, 206, 164, 71, 82, 197, 36, 157, 151, 160, 219, 113, 6, 247, 99, 139, + 79, 187, 245, 125, 36, 132, 181, 72, 120, 34, 72, 51, 33, 143, 135, 149, 30, 83, 155, 151, 177, 102, 3, + 179, 76, 144, 9, 77, 116, 6, 16, 28, 208, 194, 68, 150, 26, 38, 94, 179, 123, 108, 21, 196, 10, 2, + 164, 75, 118, 147, 231, 19, 222, 69, 224, 174, 59, 198, 80, 126, 199, 32, 224, 173, 188, 251, 7, 15, 18, + 115, 208, 228, 128, 160, 107, 115, 218, 195, 80, 217, 192, 87, 209, 182, 215, 90, 65, 52, 248, 68, 167, 18, + 120, 226, 68, 211, 246, 179, 82, 36, 3, 222, 212, 218, 39, 206, 190, 141, 245, 210, 9, 92, 126, 31, 106, + 64, 159, 14, 191, 32, 38, 246, 237, 85, 69, 48, 215, 62, 249, 14, 74, 211, 99, 209, 247, 194, 252, 9, + 210, 84, 110, 98, 169, 176, 15, 98, 110, 19, 114, 118, 160, 193, 14, 169, 249, 63, 240, 210, 170, 211, 10, + 24, 142, 70, 20, 111, 175, 251, 89, 219, 25, 56, 205, 16, 15, 52, 50, 177, 226, 81, 106, 149, 128, 87, + 153, 219, 117, 151, 195, 201, 74, 177, 128, 146, 173, 247, 160, 29, 142, 107, 155, 153, 93, 39, 67, 45, 252, + 143, 208, 48, 201, 199, 91, 183, 6, 247, 21, 57, 191, 19, 173, 212, 20, 5, 101, 147, 72, 190, 160, 132, + 108, 171, 242, 102, 50, 56, 211, 216, 129, 177, 210, 3, 247, 194, 11, 36, 116, 131, 198, 1, 148, 88, 78, + 143, 48, 175, 190, 244, 2, 84, 167, 202, 190, 27, 216, 249, 155, 36, 167, 111, 47, 64, 66, 109, 245, 113, + 228, 80, 212, 228, 60, 100, 98, 4, 176, 250, 96, 7, 40, 67, 152, 209, 226, 69, 61, 16, 71, 178, 52, + 138, 220, 77, 210, 93, 133, 168, 99, 159, 14, 61, 248, 38, 17, 195, 177, 10, 110, 12, 132, 242, 53, 193, + 185, 236, 20, 118, 3, 252, 221, 168, 132, 179, 226, 67, 133, 229, 228, 34, 89, 124, 198, 252, 79, 141, 223, + 7, 199, 201, 126, 100, 25, 27, 125, 47, 252, 79, 55, 164, 21, 176, 179, 44, 131, 234, 57, 59, 113, 179, + 11, 73, 1, 30, 252, 39, 98, 84, 167, 99, 35, 55, 65, 54, 159, 111, 15, 158, 9, 134, 61, 213, 226, + 235, 134, 61, 186, 198, 212, 183, 98, 1, 28, 16, 75, 3, 150, 193, 200, 90, 123, 69, 104, 212, 113, 170, + 106, 100, 136, 91, 19, 196, 89, 79, 129, 111, 226, 161, 59, 223, 109, 243, 199, 249, 143, 181, 230, 231, 68, + 173, 126, 138, 91, 9, 119, 81, 238, 154, 86, 0, 190, 214, 145, 71, 223, 48, 78, 150, 68, 167, 143, 189, + 87, 216, 98, 52, 94, 109, 45, 181, 2, 170, 102, 248, 95, 116, 98, 99, 91, 147, 242, 153, 121, 164, 76, + 14, 92, 75, 209, 33, 47, 17, 242, 180, 59, 6, 74, 80, 35, 136, 89, 60, 53, 79, 84, 176, 225, 52, + 249, 29, 65, 75, 147, 123, 67, 247, 115, 197, 228, 28, 160, 250, 118, 207, 67, 63, 38, 129, 63, 223, 218, + 238, 148, 9, 137, 189, 182, 14, 103, 225, 86, 21, 92, 57, 100, 197, 220, 250, 185, 41, 173, 73, 4, 9, + 165, 194, 216, 90, 59, 239, 252, 20, 99, 95, 155, 51, 60, 14, 189, 129, 127, 101, 34, 186, 31, 116, 226, + 52, 162, 250, 72, 174, 221, 218, 154, 82, 218, 176, 246, 2, 44, 97, 153, 170, 146, 216, 171, 53, 246, 70, + 248, 163, 241, 83, 9, 204, 185, 180, 40, 9, 184, 54, 198, 211, 168, 202, 63, 13, 244, 223, 216, 155, 105, + 115, 59, 123, 168, 251, 252, 137, 175, 138, 253, 16, 82, 160, 238, 140, 233, 20, 142, 173, 229, 52, 96, 137, + 98, 89, 119, 114, 128, 159, 203, 22, 204, 101, 186, 80, 118, 217, 106, 53, 120, 137, 187, 98, 251, 172, 151, + 208, 34, 230, 254, 10, 95, 210, 130, 54, 211, 243, 21, 19, 188, 30, 113, 194, 16, 201, 101, 166, 79, 192, + 81, 86, 56, 174, 122, 197, 116, 251, 224, 50, 248, 37, 51, 169, 6, 88, 3, 231, 44, 192, 78, 18, 7, + 102, 144, 53, 181, 223, 17, 147, 168, 160, 20, 22, 74, 243, 146, 141, 86, 165, 44, 17, 188, 84, 236, 2, + 110, 45, 154, 134, 152, 158, 190, 71, 130, 77, 89, 49, 213, 65, 135, 171, 185, 176, 173, 181, 169, 248, 23, + 76, 227, 241, 205, 168, 229, 91, 42, 53, 102, 95, 186, 254, 91, 155, 116, 95, 254, 70, 194, 3, 28, 221, + 137, 188, 139, 164, 103, 113, 232, 233, 16, 124, 177, 36, 152, 157, 216, 119, 230, 73, 100, 133, 143, 101, 122, + 145, 0, 56, 2, 129, 62, 103, 134, 201, 145, 187, 189, 90, 180, 159, 0, 106, 154, 130, 121, 228, 27, 43, + 145, 147, 20, 191, 66, 82, 23, 214, 242, 202, 199, 53, 143, 164, 3, 246, 100, 15, 172, 59, 227, 129, 161, + 115, 97, 239, 104, 85, 201, 167, 96, 12, 58, 102, 213, 104, 191, 64, 211, 59, 112, 35, 120, 83, 143, 240, + 106, 47, 244, 41, 189, 254, 175, 21, 181, 141, 166, 197, 133, 44, 238, 175, 134, 170, 37, 23, 90, 87, 121, + 102, 59, 54, 219, 82, 120, 194, 127, 152, 177, 63, 229, 69, 234, 235, 103, 128, 178, 97, 60, 126, 217, 245, + 246, 242, 220, 69, 167, 98, 235, 121, 76, 42, 148, 206, 199, 26, 133, 143, 56, 253, 200, 204, 154, 36, 158, + 120, 195, 145, 149, 214, 125, 193, 127, 49, 201, 87, 65, 75, 14, 199, 46, 242, 126, 179, 194, 211, 6, 34, + 176, 127, 99, 30, 101, 21, 49, 8, 7, 206, 97, 229, 63, 231, 43, 51, 33, 201, 206, 65, 82, 12, 253, + 176, 200, 182, 153, 48, 55, 80, 113, 245, 207, 237, 151, 6, 41, 135, 253, 74, 208, 170, 229, 36, 92, 26, + 24, 226, 239, 77, 141, 59, 222, 148, 78, 32, 79, 65, 195, 111, 28, 243, 193, 84, 59, 46, 22, 155, 137, + 149, 113, 186, 16, 115, 211, 18, 138, 74, 234, 20, 183, 203, 12, 140, 119, 72, 187, 188, 150, 137, 205, 20, + 233, 82, 86, 216, 127, 103, 145, 197, 57, 239, 27, 228, 69, 20, 175, 193, 229, 232, 68, 112, 120, 141, 6, + 121, 194, 70, 166, 204, 31, 134, 49, 54, 54, 199, 126, 26, 24, 237, 227, 38, 179, 31, 154, 143, 32, 22, + 105, 33, 232, 224, 156, 204, 152, 237, 38, 219, 221, 44, 73, 16, 251, 206, 56, 180, 194, 181, 2, 64, 205, + 163, 162, 194, 230, 64, 3, 6, 9, 25, 16, 115, 195, 158, 109, 78, 159, 49, 167, 150, 159, 57, 9, 144, + 71, 128, 200, 254, 136, 19, 104, 37, 36, 26, 229, 112, 139, 139, 40, 242, 5, 128, 131, 33, 43, 148, 206, + 17, 153, 116, 74, 116, 40, 40, 85, 160, 231, 240, 185, 239, 76, 66, 144, 228, 137, 253, 212, 139, 115, 62, + 137, 203, 90, 207, 53, 226, 141, 148, 198, 212, 183, 165, 42, 208, 37, 17, 71, 183, 116, 186, 198, 249, 121, + 170, 159, 215, 87, 158, 40, 110, 117, 201, 93, 216, 64, 69, 86, 109, 80, 35, 142, 125, 1, 144, 62, 21, + 185, 61, 58, 230, 74, 144, 151, 26, 112, 128, 131, 20, 100, 209, 31, 81, 100, 102, 21, 211, 182, 52, 161, + 72, 154, 171, 186, 137, 209, 212, 169, 124, 23, 214, 143, 176, 198, 4, 4, 149, 73, 151, 56, 140, 253, 17, + 68, 142, 196, 246, 15, 191, 62, 233, 33, 87, 108, 173, 7, 96, 101, 140, 9, 193, 121, 71, 88, 135, 179, + 200, 10, 60, 161, 186, 167, 235, 98, 185, 174, 229, 16, 127, 216, 104, 159, 72, 154, 238, 36, 153, 132, 240, + 98, 132, 248, 64, 91, 52, 238, 166, 172, 158, 208, 123, 119, 189, 56, 6, 226, 69, 223, 106, 59, 3, 135, + 125, 184, 237, 19, 24, 189, 25, 119, 82, 13, 11, 167, 31, 220, 163, 73, 16, 141, 239, 222, 211, 5, 63, + 237, 182, 138, 199, 24, 168, 44, 125, 177, 164, 95, 185, 68, 177, 206, 37, 177, 61, 95, 97, 181, 128, 158, + 170, 172, 155, 143, 209, 216, 218, 220, 77, 209, 74, 207, 60, 115, 254, 104, 87, 65, 89, 224, 18, 55, 180, + 239, 95, 57, 137, 195, 245, 22, 0, 164, 207, 171, 142, 237, 233, 87, 56, 214, 182, 68, 0, 1, 225, 65, + 41, 240, 124, 68, 40, 245, 57, 237, 45, 177, 221, 137, 78, 11, 176, 84, 25, 251, 203, 161, 228, 26, 138, + 212, 240, 6, 21, 143, 161, 232, 108, 240, 35, 84, 148, 124, 36, 75, 239, 135, 150, 112, 45, 1, 116, 214, + 1, 59, 214, 62, 152, 143, 154, 166, 115, 32, 113, 153, 7, 191, 147, 202, 66, 64, 157, 186, 249, 164, 162, + 170, 226, 180, 234, 104, 134, 59, 168, 92, 52, 90, 106, 76, 239, 231, 14, 237, 15, 131, 81, 92, 9, 95, + 190, 21, 28, 96, 190, 44, 26, 212, 83, 34, 8, 106, 5, 119, 66, 72, 200, 136, 252, 212, 104, 131, 93, + 150, 37, 67, 188, 26, 105, 125, 201, 41, 248, 61, 238, 176, 254, 205, 157, 37, 194, 191, 239, 3, 128, 90, + 221, 250, 223, 180, 111, 131, 174, 22, 235, 151, 229, 108, 1, 42, 55, 58, 118, 166, 28, 157, 107, 140, 243, + 84, 129, 138, 94, 122, 47, 107, 223, 26, 228, 89, 30, 143, 160, 219, 238, 153, 58, 12, 110, 180, 104, 98, + 245, 8, 138, 108, 160, 182, 252, 79, 162, 122, 83, 131, 54, 210, 120, 167, 53, 209, 54, 4, 147, 138, 244, + 169, 238, 169, 25, 2, 230, 73, 200, 163, 52, 86, 40, 25, 238, 244, 33, 116, 148, 173, 147, 135, 39, 75, + 86, 51, 82, 114, 23, 218, 161, 170, 53, 219, 211, 12, 208, 157, 169, 18, 113, 37, 99, 192, 2, 184, 8, + 171, 164, 92, 160, 112, 234, 8, 213, 83, 45, 99, 160, 60, 243, 9, 72, 62, 9, 20, 130, 217, 47, 82, + 78, 17, 57, 28, 187, 192, 196, 192, 225, 143, 87, 14, 161, 67, 75, 71, 97, 209, 62, 254, 65, 223, 239, + 46, 53, 251, 91, 237, 205, 149, 158, 5, 168, 32, 4, 10, 160, 66, 155, 1, 113, 225, 82, 148, 224, 18, + 216, 244, 134, 107, 47, 218, 142, 52, 125, 203, 81, 169, 79, 11, 33, 252, 90, 152, 47, 59, 121, 182, 150, + 200, 108, 246, 137, 103, 227, 2, 252, 120, 186, 137, 21, 192, 202, 98, 173, 49, 230, 39, 43, 178, 121, 21, + 113, 178, 244, 235, 68, 8, 97, 23, 181, 11, 166, 162, 179, 66, 10, 19, 77, 139, 38, 50, 213, 134, 242, + 151, 37, 116, 112, 185, 132, 118, 239, 18, 9, 180, 117, 229, 66, 118, 228, 109, 45, 64, 67, 177, 112, 173, + 245, 212, 25, 119, 108, 231, 206, 41, 211, 111, 111, 174, 43, 172, 225, 191, 43, 161, 118, 157, 108, 49, 214, + 215, 125, 54, 236, 242, 167, 205, 75, 35, 12, 124, 238, 163, 73, 238, 226, 235, 43, 199, 69, 147, 20, 187, + 157, 210, 250, 178, 43, 92, 20, 125, 51, 151, 183, 94, 85, 14, 23, 221, 244, 230, 22, 75, 194, 198, 213, + 65, 152, 200, 102, 79, 197, 202, 222, 228, 10, 198, 73, 150, 102, 26, 244, 41, 161, 196, 142, 52, 63, 138, + 74, 140, 121, 119, 192, 209, 183, 138, 53, 132, 29, 121, 161, 138, 249, 70, 46, 233, 57, 211, 60, 215, 232, + 54, 249, 118, 230, 200, 77, 233, 5, 115, 145, 203, 66, 248, 227, 171, 86, 125, 133, 25, 19, 147, 181, 243, + 40, 207, 58, 199, 15, 105, 244, 245, 15, 140, 244, 159, 62, 229, 80, 209, 252, 245, 154, 72, 113, 122, 80, + 160, 94, 146, 71, 83, 19, 28, 206, 174, 20, 30, 70, 234, 203, 242, 190, 243, 27, 83, 243, 145, 51, 217, + 70, 36, 72, 139, 82, 41, 94, 29, 68, 46, 68, 161, 76, 56, 170, 37, 20, 20, 21, 24, 219, 173, 28, + 54, 66, 125, 168, 8, 193, 232, 20, 220, 81, 187, 18, 37, 186, 78, 82, 86, 244, 184, 23, 120, 101, 245, + 229, 248, 192, 185, 208, 23, 136, 85, 110, 125, 203, 63, 44, 158, 155, 194, 97, 220, 229, 60, 159, 80, 8, + 175, 90, 93, 227, 69, 113, 139, 93, 220, 169, 100, 144, 27, 72, 242, 169, 247, 195, 37, 237, 223, 122, 138, + 70, 78, 68, 139, 102, 164, 228, 51, 227, 199, 1, 193, 164, 178, 126, 99, 99, 172, 20, 24, 50, 57, 5, + 69, 113, 39, 109, 77, 210, 232, 70, 80, 122, 168, 156, 238, 118, 88, 234, 190, 130, 95, 94, 195, 161, 197, + 215, 245, 236, 5, 29, 104, 233, 238, 207, 197, 155, 85, 202, 202, 195, 232, 79, 11, 43, 233, 49, 106, 121, + 60, 219, 6, 99, 174, 206, 121, 202, 245, 136, 5, 35, 121, 174, 51, 231, 40, 75, 13, 127, 143, 159, 225, + 158, 19, 66, 191, 193, 104, 96, 37, 128, 106, 92, 68, 1, 210, 198, 37, 78, 121, 197, 160, 72, 95, 109, + 84, 134, 65, 1, 37, 11, 214, 143, 101, 214, 86, 201, 218, 116, 213, 189, 57, 103, 69, 89, 102, 75, 114, + 170, 46, 139, 156, 241, 193, 86, 243, 50, 60, 223, 142, 250, 81, 21, 22, 37, 192, 102, 100, 186, 214, 146, + 147, 87, 230, 180, 77, 70, 12, 6, 150, 198, 11, 47, 149, 126, 132, 57, 204, 254, 230, 141, 246, 14, 51, + 160, 227, 167, 137, 176, 214, 186, 7, 218, 69, 250, 109, 123, 30, 150, 176, 44, 204, 61, 33, 135, 193, 172, + 233, 221, 251, 109, 154, 13, 245, 105, 245, 43, 184, 209, 46, 116, 214, 90, 131, 0, 52, 91, 165, 118, 140, + 243, 118, 81, 240, 88, 34, 72, 157, 177, 138, 101, 16, 45, 16, 98, 222, 7, 173, 80, 236, 106, 201, 80, + 1, 115, 189, 246, 141, 208, 88, 9, 167, 75, 200, 34, 210, 173, 249, 171, 219, 144, 160, 6, 231, 135, 228, + 193, 146, 16, 176, 117, 20, 170, 62, 72, 168, 41, 207, 138, 168, 88, 86, 241, 72, 205, 154, 78, 93, 168, + 229, 29, 166, 232, 149, 66, 130, 154, 236, 240, 91, 88, 155, 196, 139, 203, 76, 181, 105, 49, 67, 3, 127, + 28, 214, 233, 107, 10, 110, 60, 248, 222, 73, 110, 127, 44, 228, 87, 180, 215, 49, 146, 48, 129, 203, 227, + 197, 11, 23, 107, 189, 234, 38, 195, 42, 19, 13, 40, 254, 15, 97, 223, 251, 35, 236, 238, 29, 173, 199, + 181, 47, 185, 71, 125, 198, 121, 25, 211, 172, 75, 18, 225, 164, 161, 65, 198, 247, 125, 226, 249, 154, 48, + 207, 148, 151, 46, 45, 81, 154, 173, 208, 133, 107, 161, 0, 74, 14, 192, 209, 254, 126, 126, 187, 33, 171, + 210, 18, 227, 72, 174, 250, 23, 70, 71, 150, 152, 193, 43, 113, 150, 68, 247, 169, 113, 248, 210, 120, 118, + 198, 206, 229, 154, 232, 80, 175, 68, 247, 58, 57, 248, 230, 141, 93, 49, 197, 159, 177, 120, 11, 137, 188, + 42, 61, 110, 223, 29, 156, 166, 212, 135, 51, 53, 0, 191, 167, 223, 196, 231, 229, 30, 200, 248, 57, 192, + 87, 49, 172, 185, 90, 48, 123, 222, 244, 169, 22, 198, 196, 239, 95, 160, 196, 19, 127, 223, 224, 72, 24, + 62, 147, 80, 65, 77, 9, 1, 203, 210, 75, 51, 138, 250, 40, 114, 127, 230, 237, 88, 69, 173, 200, 56, + 0, 221, 88, 249, 104, 237, 12, 159, 182, 85, 113, 38, 55, 25, 132, 180, 167, 145, 192, 209, 167, 102, 17, + 21, 40, 23, 244, 10, 21, 73, 207, 43, 2, 159, 127, 0, 51, 70, 238, 78, 32, 238, 212, 154, 47, 143, + 60, 38, 135, 253, 133, 93, 69, 3, 249, 40, 157, 166, 138, 216, 169, 108, 142, 37, 254, 143, 48, 20, 101, + 79, 72, 198, 233, 64, 118, 116, 136, 117, 231, 51, 247, 165, 206, 24, 35, 241, 83, 55, 86, 15, 28, 96, + 40, 71, 171, 92, 129, 77, 9, 133, 159, 213, 173, 220, 104, 221, 247, 97, 208, 18, 163, 182, 199, 52, 81, + 128, 206, 100, 154, 116, 246, 86, 183, 204, 115, 117, 207, 184, 160, 76, 172, 220, 96, 225, 65, 212, 107, 223, + 148, 59, 213, 3, 204, 98, 228, 51, 68, 208, 143, 72, 61, 211, 121, 236, 238, 202, 1, 140, 154, 6, 136, + 129, 118, 57, 51, 133, 123, 225, 211, 11, 243, 103, 170, 230, 131, 222, 160, 49, 66, 251, 246, 92, 69, 126, + 145, 76, 236, 9, 3, 156, 165, 87, 175, 179, 18, 85, 241, 24, 220, 84, 182, 110, 145, 28, 142, 171, 252, + 226, 36, 72, 243, 111, 35, 157, 151, 207, 199, 239, 85, 133, 100, 157, 105, 35, 89, 17, 27, 44, 190, 136, + 19, 70, 122, 180, 163, 47, 99, 123, 95, 2, 138, 103, 21, 190, 31, 198, 125, 40, 32, 148, 36, 139, 122, + 140, 165, 139, 169, 91, 38, 96, 88, 20, 159, 165, 101, 18, 228, 53, 171, 237, 71, 53, 25, 253, 202, 250, + 187, 44, 227, 29, 105, 237, 96, 0, 11, 73, 161, 128, 105, 213, 137, 68, 72, 187, 60, 51, 10, 155, 34, + 115, 94, 246, 81, 212, 135, 156, 164, 43, 152, 220, 84, 168, 94, 151, 156, 62, 42, 10, 79, 57, 102, 233, + 58, 56, 242, 150, 14, 23, 80, 247, 186, 249, 110, 88, 18, 55, 202, 203, 83, 16, 101, 199, 234, 212, 101, + 123, 240, 49, 204, 227, 45, 45, 175, 74, 196, 34, 96, 8, 163, 248, 84, 164, 19, 73, 24, 111, 92, 216, + 121, 1, 175, 213, 171, 182, 72, 106, 11, 216, 152, 102, 37, 186, 164, 104, 85, 112, 245, 154, 24, 226, 248, + 31, 88, 90, 34, 134, 59, 139, 92, 173, 80, 144, 186, 222, 134, 229, 1, 103, 113, 124, 226, 83, 50, 4, + 226, 206, 159, 207, 137, 139, 136, 14, 105, 131, 148, 85, 46, 197, 146, 248, 156, 221, 145, 252, 36, 67, 159, + 9, 164, 252, 29, 94, 45, 190, 41, 252, 133, 79, 42, 82, 38, 15, 30, 221, 97, 224, 238, 136, 1, 210, + 128, 104, 218, 203, 204, 160, 95, 162, 207, 55, 119, 20, 215, 45, 177, 89, 18, 96, 237, 64, 137, 89, 89, + 130, 175, 188, 62, 20, 237, 104, 243, 145, 187, 87, 59, 242, 147, 178, 109, 65, 211, 100, 177, 110, 144, 175, + 34, 73, 20, 102, 202, 117, 8, 123, 171, 17, 193, 101, 142, 200, 6, 30, 214, 17, 69, 8, 38, 190, 122, + 68, 213, 5, 222, 185, 140, 16, 7, 245, 47, 221, 44, 50, 88, 99, 105, 2, 18, 59, 42, 72, 217, 34, + 34, 169, 76, 35, 148, 143, 57, 165, 150, 98, 141, 236, 176, 197, 125, 58, 15, 154, 221, 143, 233, 246, 62, + 248, 17, 15, 39, 74, 236, 46, 5, 220, 43, 217, 231, 127, 224, 94, 80, 58, 143, 140, 8, 56, 86, 246, + 68, 96, 54, 179, 105, 143, 5, 180, 60, 194, 57, 19, 218, 112, 129, 124, 7, 222, 75, 47, 91, 45, 192, + 190, 251, 88, 4, 23, 156, 113, 170, 99, 49, 110, 241, 65, 100, 51, 66, 3, 108, 106, 60, 121, 122, 71, + 43, 242, 161, 22, 249, 231, 182, 118, 237, 12, 33, 242, 134, 87, 106, 112, 0, 106, 193, 60, 133, 112, 245, + 99, 93, 147, 244, 26, 55, 90, 1, 107, 7, 29, 87, 115, 26, 243, 240, 153, 24, 220, 252, 63, 116, 162, + 76, 11, 18, 140, 173, 31, 51, 6, 197, 219, 240, 239, 21, 20, 143, 95, 32, 153, 210, 58, 127, 248, 194, + 24, 109, 31, 67, 147, 194, 30, 49, 28, 227, 240, 60, 100, 191, 3, 73, 57, 12, 241, 97, 154, 112, 88, + 82, 85, 24, 100, 150, 184, 78, 132, 146, 87, 151, 86, 196, 207, 65, 24, 88, 117, 110, 192, 203, 194, 8, + 218, 92, 117, 250, 132, 108, 83, 157, 253, 136, 238, 218, 27, 226, 51, 195, 126, 21, 45, 241, 122, 212, 87, + 250, 36, 87, 246, 233, 204, 183, 219, 95, 56, 202, 47, 181, 68, 106, 197, 192, 44, 110, 78, 219, 158, 78, + 75, 98, 53, 11, 127, 2, 220, 40, 49, 64, 104, 70, 239, 92, 9, 134, 115, 135, 166, 235, 104, 130, 106, + 129, 10, 97, 233, 251, 21, 127, 17, 48, 244, 200, 51, 165, 176, 114, 111, 175, 186, 206, 189, 209, 151, 6, + 128, 217, 16, 203, 5, 216, 169, 216, 67, 238, 22, 220, 123, 83, 174, 86, 26, 223, 15, 131, 61, 23, 151, + 50, 108, 240, 129, 229, 176, 153, 135, 129, 106, 10, 115, 204, 162, 186, 43, 102, 241, 107, 195, 221, 55, 254, + 214, 56, 218, 131, 221, 151, 48, 118, 104, 162, 166, 221, 118, 125, 142, 130, 252, 62, 33, 94, 65, 95, 80, + 108, 9, 180, 113, 128, 123, 213, 12, 237, 233, 53, 167, 2, 244, 113, 39, 157, 234, 53, 56, 154, 85, 199, + 143, 84, 216, 89, 163, 16, 179, 166, 152, 196, 210, 20, 138, 148, 109, 187, 179, 111, 136, 82, 135, 112, 97, + 210, 216, 200, 236, 216, 82, 30, 149, 246, 247, 220, 244, 226, 32, 247, 42, 250, 124, 9, 168, 47, 113, 127, + 6, 152, 73, 82, 124, 214, 72, 92, 79, 148, 197, 104, 78, 7, 68, 169, 138, 126, 196, 131, 139, 72, 219, + 205, 49, 92, 231, 32, 119, 93, 14, 218, 120, 222, 68, 141, 29, 23, 185, 210, 225, 141, 65, 157, 231, 245, + 2, 205, 239, 138, 114, 197, 191, 229, 196, 171, 249, 107, 177, 200, 188, 218, 2, 113, 231, 248, 133, 44, 90, + 157, 41, 44, 46, 205, 4, 79, 46, 30, 48, 218, 154, 157, 127, 249, 192, 4, 147, 176, 190, 107, 64, 47, + 6, 75, 30, 73, 90, 244, 200, 170, 21, 152, 37, 236, 108, 66, 184, 191, 97, 210, 3, 148, 101, 141, 189, + 189, 47, 95, 230, 63, 163, 227, 211, 235, 159, 213, 155, 207, 191, 46, 41, 24, 169, 167, 144, 99, 220, 113, + 50, 18, 49, 156, 11, 64, 159, 172, 46, 139, 115, 12, 240, 71, 70, 34, 118, 203, 32, 21, 178, 69, 92, + 37, 160, 11, 167, 38, 115, 196, 122, 62, 126, 92, 244, 122, 178, 18, 81, 248, 53, 208, 248, 21, 9, 177, + 75, 171, 244, 45, 110, 128, 179, 61, 71, 250, 242, 52, 152, 160, 104, 45, 247, 106, 122, 16, 78, 90, 191, + 180, 103, 178, 118, 223, 94, 122, 188, 36, 114, 129, 81, 104, 122, 165, 180, 120, 15, 60, 164, 127, 237, 118, + 17, 115, 150, 28, 253, 10, 51, 245, 9, 178, 13, 57, 137, 125, 75, 66, 118, 40, 95, 54, 99, 21, 184, + 52, 52, 58, 130, 136, 80, 71, 180, 27, 127, 58, 8, 33, 76, 160, 172, 109, 235, 224, 189, 174, 169, 202, + 14, 51, 168, 62, 168, 130, 223, 248, 136, 92, 22, 26, 144, 211, 212, 120, 13, 218, 199, 159, 57, 216, 218, + 188, 92, 223, 95, 138, 164, 251, 76, 185, 61, 170, 169, 173, 95, 73, 111, 54, 90, 71, 149, 197, 211, 98, + 229, 26, 44, 167, 102, 10, 46, 38, 58, 203, 36, 212, 115, 127, 162, 215, 11, 148, 116, 132, 4, 49, 94, + 31, 84, 156, 173, 250, 84, 2, 74, 157, 55, 50, 40, 72, 56, 92, 234, 129, 56, 189, 205, 32, 3, 129, + 250, 208, 184, 161, 174, 106, 142, 86, 24, 167, 103, 243, 143, 238, 150, 158, 56, 75, 222, 212, 30, 188, 37, + 155, 150, 218, 191, 162, 67, 163, 109, 53, 222, 117, 244, 24, 246, 169, 223, 119, 223, 201, 173, 63, 209, 209, + 51, 191, 246, 183, 12, 218, 60, 179, 195, 104, 199, 132, 131, 203, 82, 26, 217, 204, 215, 97, 131, 20, 63, + 137, 233, 208, 201, 198, 254, 99, 181, 254, 120, 129, 72, 167, 47, 243, 60, 96, 201, 187, 6, 40, 240, 133, + 180, 88, 238, 168, 222, 54, 129, 57, 80, 27, 190, 201, 35, 174, 129, 195, 229, 29, 251, 22, 151, 254, 59, + 30, 84, 39, 155, 150, 146, 133, 54, 54, 123, 116, 132, 218, 195, 83, 51, 171, 210, 193, 254, 235, 154, 64, + 107, 24, 228, 225, 224, 32, 198, 46, 31, 74, 100, 76, 78, 112, 63, 56, 16, 17, 163, 17, 2, 222, 227, + 160, 196, 16, 86, 68, 217, 50, 12, 117, 128, 73, 47, 83, 41, 219, 39, 150, 218, 21, 20, 243, 13, 159, + 175, 16, 183, 103, 149, 83, 16, 112, 117, 251, 56, 0, 248, 237, 14, 124, 232, 90, 63, 82, 139, 4, 125, + 242, 159, 40, 84, 70, 74, 233, 51, 80, 145, 183, 165, 0, 36, 210, 227, 149, 133, 236, 123, 233, 253, 28, + 121, 25, 46, 93, 43, 196, 4, 182, 92, 39, 128, 120, 31, 95, 200, 73, 32, 144, 92, 63, 96, 184, 67, + 28, 169, 245, 227, 209, 216, 17, 158, 180, 180, 99, 92, 219, 112, 135, 89, 193, 236, 188, 252, 177, 168, 121, + 63, 131, 122, 131, 12, 142, 149, 7, 13, 125, 25, 218, 240, 121, 186, 148, 251, 47, 145, 82, 152, 163, 133, + 133, 80, 130, 134, 170, 212, 218, 223, 134, 106, 245, 73, 244, 92, 100, 52, 96, 225, 27, 154, 81, 65, 215, + 208, 227, 113, 36, 83, 84, 14, 120, 42, 59, 76, 176, 53, 253, 130, 79, 86, 243, 217, 167, 248, 169, 123, + 134, 211, 172, 82, 115, 91, 31, 165, 210, 103, 155, 27, 252, 164, 36, 50, 207, 69, 215, 100, 155, 52, 12, + 242, 5, 103, 201, 225, 97, 115, 20, 160, 5, 218, 202, 201, 169, 216, 22, 100, 246, 196, 166, 82, 42, 11, + 200, 2, 14, 153, 75, 61, 11, 91, 202, 167, 217, 196, 109, 225, 6, 67, 16, 237, 88, 41, 114, 54, 229, + 73, 209, 26, 203, 70, 11, 253, 18, 166, 64, 63, 24, 253, 150, 108, 99, 184, 182, 171, 0, 172, 167, 190, + 51, 157, 207, 182, 155, 122, 21, 16, 5, 252, 111, 232, 185, 45, 243, 98, 54, 174, 86, 65, 63, 129, 234, + 127, 53, 224, 210, 66, 139, 157, 173, 86, 43, 0, 187, 160, 244, 198, 200, 79, 226, 39, 190, 239, 40, 26, + 150, 22, 200, 3, 142, 180, 115, 168, 251, 38, 163, 54, 127, 227, 236, 94, 111, 130, 60, 24, 152, 191, 108, + 33, 32, 132, 188, 26, 18, 135, 224, 233, 187, 166, 33, 5, 105, 161, 5, 97, 61, 56, 152, 184, 136, 70, + 206, 32, 247, 233, 244, 8, 193, 154, 58, 143, 157, 176, 69, 157, 70, 244, 232, 74, 1, 58, 13, 151, 90, + 10, 47, 22, 177, 154, 89, 199, 39, 196, 55, 133, 67, 157, 52, 68, 200, 162, 173, 92, 78, 147, 15, 12, + 172, 218, 1, 7, 234, 169, 225, 0, 212, 164, 148, 145, 25, 186, 170, 54, 47, 238, 101, 163, 61, 201, 173, + 144, 26, 76, 26, 43, 188, 105, 0, 3, 177, 221, 119, 138, 36, 170, 21, 133, 165, 138, 38, 30, 7, 134, + 163, 221, 214, 199, 228, 158, 89, 211, 126, 36, 53, 51, 175, 87, 139, 167, 112, 144, 157, 247, 84, 183, 139, + 236, 206, 147, 161, 177, 57, 79, 239, 173, 91, 252, 138, 90, 87, 3, 154, 51, 96, 141, 177, 47, 197, 44, + 108, 170, 221, 156, 243, 136, 54, 248, 228, 40, 245, 232, 4, 212, 133, 153, 215, 120, 119, 129, 112, 121, 71, + 31, 75, 173, 144, 127, 172, 1, 93, 131, 159, 117, 166, 231, 179, 34, 172, 31, 78, 167, 193, 122, 119, 184, + 201, 236, 156, 108, 174, 163, 9, 239, 229, 27, 69, 8, 192, 16, 63, 244, 58, 2, 247, 204, 154, 246, 80, + 113, 30, 104, 236, 109, 56, 210, 254, 4, 164, 136, 147, 5, 16, 135, 245, 237, 172, 207, 150, 122, 39, 84, + 244, 216, 74, 171, 9, 244, 235, 118, 211, 169, 24, 22, 241, 173, 127, 224, 143, 158, 102, 94, 37, 64, 23, + 221, 57, 227, 2, 128, 11, 158, 221, 96, 28, 188, 213, 217, 221, 31, 31, 77, 106, 65, 185, 34, 135, 19, + 202, 81, 49, 88, 252, 244, 194, 111, 180, 221, 203, 220, 54, 237, 42, 220, 209, 37, 136, 127, 191, 238, 40, + 152, 152, 9, 112, 45, 146, 79, 196, 243, 72, 54, 129, 193, 167, 194, 142, 29, 162, 107, 254, 147, 111, 14, + 210, 208, 56, 205, 221, 227, 83, 44, 123, 148, 248, 140, 13, 208, 230, 92, 166, 190, 158, 159, 182, 32, 118, + 197, 243, 79, 194, 78, 122, 89, 125, 167, 31, 179, 70, 62, 231, 110, 45, 126, 221, 77, 116, 95, 191, 170, + 205, 218, 156, 146, 113, 96, 115, 88, 122, 243, 236, 228, 139, 191, 175, 189, 254, 57, 210, 192, 244, 127, 239, + 89, 28, 19, 178, 153, 155, 208, 110, 224, 97, 141, 104, 112, 89, 6, 73, 196, 115, 24, 204, 183, 253, 112, + 191, 177, 111, 253, 225, 195, 41, 75, 19, 31, 148, 148, 171, 21, 32, 186, 228, 92, 227, 167, 133, 143, 61, + 215, 215, 218, 215, 211, 143, 6, 21, 166, 161, 24, 113, 37, 169, 18, 79, 54, 114, 24, 54, 227, 151, 210, + 144, 113, 22, 82, 94, 170, 116, 41, 43, 148, 38, 95, 235, 158, 100, 121, 1, 146, 220, 194, 59, 229, 130, + 70, 19, 146, 34, 33, 190, 5, 31, 250, 142, 68, 79, 9, 126, 105, 225, 3, 112, 4, 203, 84, 123, 182, + 189, 82, 127, 207, 247, 106, 211, 130, 222, 241, 105, 216, 75, 25, 147, 121, 206, 124, 146, 35, 134, 128, 191, + 29, 105, 161, 121, 75, 67, 7, 99, 232, 216, 194, 211, 218, 144, 154, 197, 157, 204, 242, 128, 221, 121, 87, + 102, 86, 27, 203, 18, 65, 101, 129, 208, 254, 216, 181, 55, 55, 43, 81, 36, 85, 23, 211, 57, 130, 18, + 78, 103, 190, 171, 208, 198, 0, 166, 252, 152, 220, 141, 55, 141, 226, 157, 214, 139, 104, 250, 49, 2, 109, + 62, 31, 182, 112, 223, 182, 166, 151, 174, 153, 97, 194, 123, 121, 34, 13, 28, 218, 32, 83, 217, 182, 12, + 160, 95, 85, 169, 216, 128, 188, 251, 203, 222, 223, 99, 241, 243, 173, 106, 216, 158, 134, 69, 157, 71, 217, + 40, 217, 144, 7, 2, 22, 51, 77, 95, 33, 204, 178, 218, 113, 171, 37, 160, 185, 190, 40, 179, 126, 123, + 48, 127, 12, 179, 168, 247, 24, 79, 70, 18, 197, 65, 51, 138, 225, 104, 124, 213, 83, 95, 145, 80, 210, + 250, 41, 101, 82, 211, 99, 118, 53, 87, 98, 254, 215, 75, 194, 93, 96, 180, 51, 142, 109, 152, 42, 199, + 34, 233, 125, 219, 200, 234, 20, 228, 58, 18, 165, 161, 218, 100, 74, 20, 177, 72, 74, 3, 39, 74, 68, + 90, 188, 93, 217, 55, 61, 129, 102, 228, 192, 27, 150, 234, 138, 250, 168, 59, 179, 82, 241, 43, 184, 253, + 164, 121, 247, 78, 134, 104, 162, 222, 27, 84, 81, 135, 100, 238, 168, 50, 42, 207, 136, 206, 216, 115, 220, + 143, 15, 74, 130, 16, 33, 216, 36, 182, 211, 100, 187, 33, 103, 72, 67, 167, 90, 226, 121, 159, 243, 163, + 10, 230, 192, 219, 210, 196, 202, 64, 163, 7, 169, 215, 169, 85, 19, 50, 14, 93, 182, 71, 218, 154, 25, + 134, 248, 196, 77, 156, 154, 155, 113, 199, 76, 39, 162, 180, 130, 105, 249, 33, 155, 8, 172, 200, 64, 198, + 50, 182, 240, 26, 214, 7, 104, 103, 199, 49, 98, 183, 177, 135, 116, 136, 11, 172, 197, 163, 147, 229, 165, + 151, 180, 248, 132, 250, 176, 174, 15, 73, 230, 159, 141, 29, 167, 82, 140, 61, 76, 221, 167, 177, 32, 32, + 104, 5, 94, 201, 36, 154, 123, 19, 99, 63, 226, 105, 108, 35, 215, 33, 155, 159, 181, 182, 10, 105, 112, + 81, 185, 108, 177, 94, 104, 71, 173, 118, 170, 118, 34, 131, 51, 148, 16, 113, 30, 147, 56, 200, 167, 7, + 227, 236, 235, 166, 124, 178, 48, 242, 236, 188, 109, 144, 35, 30, 68, 3, 53, 103, 58, 67, 135, 167, 219, + 138, 45, 205, 104, 9, 196, 53, 24, 16, 180, 191, 2, 208, 76, 57, 60, 24, 197, 54, 190, 31, 91, 23, + 169, 47, 135, 108, 21, 107, 169, 3, 137, 216, 129, 112, 42, 124, 229, 128, 48, 245, 112, 78, 167, 213, 133, + 229, 9, 98, 156, 243, 184, 54, 251, 37, 45, 64, 33, 189, 137, 49, 108, 241, 51, 169, 72, 123, 202, 89, + 174, 168, 238, 1, 219, 241, 231, 247, 219, 60, 75, 34, 106, 219, 126, 221, 58, 155, 223, 235, 46, 238, 160, + 153, 214, 146, 46, 243, 19, 59, 244, 253, 225, 190, 176, 135, 28, 176, 91, 226, 68, 69, 100, 125, 154, 66, + 98, 230, 206, 199, 119, 229, 176, 218, 176, 32, 8, 72, 19, 151, 21, 167, 122, 10, 51, 227, 98, 34, 214, + 139, 211, 5, 92, 123, 133, 116, 152, 127, 206, 32, 43, 195, 41, 57, 151, 112, 252, 250, 171, 151, 227, 195, + 245, 145, 136, 63, 254, 93, 59, 124, 197, 132, 202, 252, 161, 77, 220, 106, 249, 48, 88, 199, 10, 100, 233, + 204, 210, 105, 196, 210, 240, 133, 83, 112, 17, 33, 53, 211, 183, 204, 105, 192, 51, 27, 9, 93, 249, 156, + 177, 155, 218, 174, 36, 140, 153, 151, 81, 79, 99, 176, 183, 10, 126, 173, 136, 180, 164, 244, 136, 89, 16, + 90, 164, 175, 236, 142, 125, 87, 54, 38, 19, 244, 221, 141, 100, 216, 69, 36, 207, 139, 162, 184, 0, 61, + 67, 223, 231, 144, 138, 59, 121, 21, 145, 134, 109, 120, 18, 114, 242, 85, 185, 34, 241, 104, 230, 219, 47, + 251, 64, 247, 80, 106, 38, 200, 1, 13, 93, 235, 86, 21, 241, 79, 19, 247, 184, 11, 10, 243, 180, 47, + 37, 67, 92, 31, 199, 202, 245, 156, 238, 245, 206, 54, 95, 50, 166, 6, 119, 186, 172, 248, 91, 225, 165, + 51, 52, 35, 214, 88, 81, 44, 251, 18, 203, 50, 242, 89, 196, 220, 137, 60, 154, 41, 249, 104, 115, 99, + 250, 113, 100, 85, 22, 195, 202, 194, 47, 71, 50, 106, 101, 83, 174, 217, 91, 16, 73, 25, 26, 106, 202, + 240, 237, 244, 141, 119, 14, 32, 79, 80, 173, 107, 64, 207, 93, 188, 210, 5, 2, 185, 107, 203, 68, 113, + 78, 167, 222, 67, 220, 244, 111, 152, 131, 69, 182, 20, 249, 222, 148, 175, 102, 16, 70, 236, 68, 1, 160, + 111, 67, 135, 75, 172, 193, 225, 133, 45, 124, 137, 151, 197, 25, 12, 73, 11, 125, 67, 58, 141, 245, 37, + 69, 112, 58, 37, 29, 134, 178, 51, 239, 140, 145, 64, 196, 88, 193, 122, 196, 90, 80, 141, 185, 35, 19, + 55, 68, 104, 204, 160, 151, 114, 43, 189, 162, 122, 241, 142, 150, 46, 95, 180, 19, 134, 42, 219, 127, 146, + 211, 16, 251, 171, 83, 152, 63, 97, 174, 81, 53, 240, 36, 26, 119, 201, 254, 64, 48, 188, 50, 203, 85, + 179, 172, 172, 242, 21, 66, 53, 155, 191, 157, 12, 106, 231, 130, 1, 246, 203, 116, 134, 234, 222, 77, 102, + 59, 211, 176, 111, 228, 202, 165, 219, 30, 230, 191, 81, 162, 189, 80, 158, 142, 150, 194, 2, 156, 97, 189, + 6, 226, 78, 29, 167, 38, 232, 188, 239, 158, 212, 209, 182, 25, 23, 138, 198, 83, 117, 160, 245, 92, 47, + 164, 16, 163, 186, 194, 137, 203, 99, 144, 185, 233, 113, 99, 120, 23, 53, 221, 245, 179, 213, 209, 119, 99, + 140, 120, 156, 59, 124, 19, 56, 81, 13, 234, 113, 147, 192, 208, 114, 243, 146, 133, 157, 143, 7, 206, 230, + 253, 49, 203, 104, 201, 248, 49, 229, 14, 81, 125, 78, 54, 49, 55, 82, 215, 106, 97, 147, 83, 118, 31, + 142, 19, 189, 206, 18, 142, 73, 56, 39, 41, 223, 179, 43, 239, 168, 153, 200, 54, 39, 113, 108, 2, 27, + 134, 93, 75, 187, 246, 100, 114, 80, 3, 32, 203, 239, 95, 125, 92, 207, 72, 249, 137, 171, 150, 98, 175, + 185, 191, 227, 158, 44, 169, 42, 2, 249, 108, 164, 154, 70, 143, 113, 41, 20, 22, 222, 114, 85, 216, 130, + 112, 183, 130, 187, 93, 114, 224, 175, 93, 137, 85, 253, 85, 9, 20, 84, 248, 120, 185, 210, 149, 5, 189, + 174, 228, 93, 21, 137, 202, 74, 13, 161, 200, 11, 79, 207, 74, 83, 227, 140, 254, 138, 162, 175, 120, 244, + 242, 64, 54, 61, 247, 232, 241, 139, 37, 198, 4, 95, 235, 104, 160, 207, 100, 50, 221, 127, 149, 125, 52, + 212, 100, 94, 20, 50, 167, 228, 52, 47, 56, 251, 133, 229, 41, 16, 152, 149, 199, 108, 239, 248, 136, 115, + 242, 74, 173, 45, 17, 178, 85, 50, 161, 183, 208, 42, 211, 174, 12, 150, 10, 6, 126, 27, 25, 126, 165, + 80, 24, 54, 29, 4, 88, 193, 219, 233, 77, 183, 73, 155, 55, 11, 65, 103, 208, 227, 194, 78, 231, 250, + 254, 30, 93, 238, 178, 164, 167, 124, 62, 206, 154, 36, 213, 5, 149, 247, 122, 3, 164, 217, 112, 68, 111, + 46, 127, 60, 217, 169, 164, 130, 154, 19, 2, 160, 175, 214, 55, 170, 73, 142, 57, 206, 9, 198, 77, 80, + 158, 142, 179, 10, 203, 74, 228, 104, 75, 149, 177, 105, 30, 60, 248, 131, 59, 80, 177, 213, 24, 238, 148, + 98, 23, 126, 128, 236, 50, 131, 254, 230, 160, 162, 160, 86, 236, 131, 242, 165, 58, 98, 32, 243, 34, 40, + 226, 46, 192, 130, 246, 137, 229, 117, 29, 233, 11, 105, 160, 1, 201, 55, 173, 169, 87, 211, 121, 43, 71, + 123, 243, 47, 121, 242, 211, 194, 194, 176, 7, 153, 52, 221, 246, 152, 206, 252, 155, 239, 216, 236, 53, 156, + 188, 244, 95, 104, 2, 229, 65, 139, 26, 230, 149, 118, 150, 253, 130, 156, 23, 189, 34, 12, 23, 221, 2, + 75, 216, 151, 5, 80, 198, 104, 197, 219, 71, 29, 103, 140, 241, 125, 254, 76, 34, 69, 31, 107, 235, 71, + 195, 144, 150, 14, 217, 163, 45, 41, 19, 171, 204, 149, 171, 85, 124, 57, 119, 96, 235, 2, 242, 254, 53, + 180, 91, 145, 40, 38, 228, 189, 175, 217, 184, 15, 38, 222, 169, 100, 68, 11, 175, 91, 15, 175, 9, 51, + 199, 207, 40, 244, 179, 53, 193, 53, 50, 232, 127, 88, 1, 120, 177, 228, 107, 43, 188, 216, 5, 201, 243, + 29, 190, 140, 10, 178, 30, 221, 169, 190, 204, 204, 56, 213, 5, 83, 147, 97, 61, 7, 151, 182, 224, 142, + 215, 7, 187, 184, 22, 142, 170, 236, 70, 252, 223, 234, 239, 16, 228, 149, 196, 44, 124, 83, 42, 251, 209, + 47, 152, 184, 33, 78, 185, 200, 148, 116, 42, 52, 222, 72, 99, 25, 13, 206, 135, 244, 19, 131, 114, 150, + 178, 103, 162, 76, 183, 89, 221, 252, 139, 165, 80, 250, 201, 190, 181, 189, 249, 224, 170, 100, 184, 242, 215, + 149, 205, 192, 89, 108, 61, 57, 216, 225, 188, 63, 160, 159, 128, 170, 23, 43, 110, 75, 30, 232, 194, 75, + 87, 80, 88, 175, 33, 69, 48, 215, 143, 106, 192, 179, 113, 151, 203, 112, 170, 241, 199, 250, 16, 220, 143, + 179, 200, 224, 83, 226, 43, 254, 151, 105, 170, 215, 175, 84, 99, 32, 220, 181, 61, 236, 108, 234, 70, 181, + 58, 119, 146, 10, 168, 201, 84, 16, 84, 74, 70, 55, 110, 114, 129, 23, 2, 229, 252, 243, 48, 98, 26, + 33, 55, 148, 145, 46, 40, 127, 119, 34, 122, 245, 183, 213, 208, 221, 237, 113, 189, 175, 223, 41, 220, 53, + 129, 148, 98, 9, 161, 63, 71, 26, 167, 124, 113, 58, 177, 155, 126, 207, 89, 125, 110, 80, 236, 5, 187, + 179, 216, 187, 176, 187, 69, 39, 118, 121, 0, 149, 216, 169, 251, 51, 19, 6, 136, 139, 245, 210, 148, 149, + 52, 30, 94, 229, 144, 85, 206, 117, 56, 232, 252, 176, 149, 148, 25, 6, 70, 142, 118, 190, 180, 15, 7, + 166, 239, 203, 216, 247, 108, 227, 100, 210, 18, 54, 80, 44, 97, 202, 36, 196, 93, 140, 153, 58, 248, 110, + 70, 32, 238, 246, 53, 15, 223, 75, 93, 38, 194, 65, 172, 251, 133, 229, 113, 180, 139, 232, 157, 243, 123, + 140, 154, 112, 98, 26, 17, 155, 20, 199, 222, 183, 106, 214, 175, 167, 148, 158, 131, 155, 11, 225, 126, 55, + 132, 247, 79, 88, 36, 8, 150, 0, 160, 100, 31, 51, 175, 191, 253, 207, 127, 47, 3, 208, 38, 29, 76, + 238, 137, 75, 10, 43, 107, 167, 222, 167, 53, 15, 160, 201, 156, 108, 95, 236, 2, 193, 219, 178, 35, 168, + 48, 144, 68, 19, 102, 235, 220, 17, 59, 237, 57, 112, 222, 150, 149, 68, 11, 157, 240, 7, 123, 235, 164, + 139, 33, 5, 128, 59, 219, 54, 43, 38, 239, 139, 142, 247, 46, 47, 241, 44, 229, 72, 62, 192, 25, 154, + 111, 49, 97, 225, 5, 95, 234, 62, 10, 185, 93, 244, 20, 76, 64, 36, 247, 180, 254, 176, 205, 80, 209, + 156, 206, 26, 6, 34, 100, 57, 66, 245, 236, 149, 66, 224, 182, 91, 132, 173, 129, 180, 235, 13, 33, 107, + 180, 240, 117, 139, 106, 94, 111, 86, 190, 102, 92, 12, 194, 53, 96, 60, 165, 158, 0, 12, 75, 191, 198, + 37, 162, 236, 212, 162, 102, 233, 16, 194, 223, 168, 217, 98, 191, 171, 204, 46, 20, 101, 164, 153, 32, 239, + 85, 118, 2, 162, 200, 204, 28, 113, 111, 160, 97, 252, 33, 213, 185, 105, 73, 191, 48, 182, 92, 146, 101, + 47, 54, 52, 165, 94, 180, 55, 100, 36, 91, 10, 111, 239, 232, 65, 175, 86, 109, 84, 211, 241, 119, 29, + 254, 12, 134, 86, 99, 156, 253, 69, 106, 156, 10, 119, 187, 207, 38, 223, 187, 129, 114, 176, 198, 10, 87, + 211, 168, 44, 170, 148, 110, 40, 211, 65, 46, 215, 200, 160, 165, 225, 225, 60, 99, 145, 86, 201, 225, 126, + 110, 54, 66, 227, 220, 128, 60, 90, 182, 251, 19, 123, 146, 162, 160, 51, 95, 82, 113, 220, 229, 52, 216, + 169, 132, 89, 215, 4, 51, 224, 244, 5, 141, 165, 25, 192, 204, 18, 239, 122, 239, 47, 46, 248, 30, 143, + 83, 49, 32, 248, 1, 23, 45, 192, 88, 153, 133, 227, 9, 234, 47, 124, 92, 33, 248, 176, 242, 107, 223, + 230, 57, 19, 30, 246, 212, 137, 107, 37, 32, 192, 51, 234, 85, 236, 63, 184, 72, 203, 120, 171, 252, 57, + 76, 72, 248, 58, 198, 157, 213, 106, 73, 236, 3, 152, 60, 59, 27, 79, 92, 127, 84, 4, 90, 102, 196, + 128, 210, 166, 38, 217, 173, 238, 92, 222, 252, 206, 103, 99, 101, 230, 233, 142, 45, 88, 66, 113, 2, 73, + 166, 135, 123, 31, 7, 187, 89, 141, 18, 174, 6, 210, 145, 117, 242, 187, 151, 85, 197, 103, 84, 94, 222, + 124, 199, 29, 84, 88, 243, 178, 74, 28, 97, 191, 120, 6, 34, 164, 138, 92, 221, 219, 167, 147, 82, 192, + 50, 86, 154, 95, 44, 152, 158, 140, 130, 3, 18, 128, 25, 153, 181, 155, 175, 246, 45, 27, 2, 43, 221, + 140, 240, 230, 98, 193, 0, 114, 79, 103, 86, 182, 28, 56, 36, 216, 166, 43, 239, 118, 101, 231, 188, 244, + 68, 129, 191, 121, 242, 228, 89, 145, 242, 213, 28, 100, 75, 118, 23, 175, 189, 90, 69, 11, 243, 20, 97, + 242, 236, 188, 63, 58, 110, 53, 45, 182, 96, 68, 145, 152, 146, 71, 96, 245, 227, 155, 156, 145, 127, 201, + 68, 1, 183, 129, 29, 199, 99, 54, 120, 3, 195, 198, 62, 210, 125, 192, 212, 7, 127, 10, 29, 106, 181, + 65, 64, 71, 62, 105, 50, 42, 129, 33, 95, 68, 16, 73, 155, 12, 131, 126, 108, 0, 13, 247, 87, 75, + 35, 248, 173, 175, 120, 218, 185, 189, 117, 84, 215, 100, 165, 157, 134, 196, 20, 144, 51, 139, 72, 45, 173, + 34, 84, 225, 138, 101, 65, 34, 120, 212, 68, 85, 26, 165, 62, 190, 40, 1, 5, 248, 224, 2, 188, 207, + 185, 226, 164, 96, 228, 179, 90, 170, 219, 108, 174, 146, 128, 137, 191, 76, 112, 97, 203, 6, 174, 241, 227, + 126, 60, 28, 119, 125, 102, 179, 234, 133, 124, 167, 9, 27, 87, 158, 170, 76, 125, 191, 129, 166, 84, 228, + 1, 247, 2, 111, 1, 107, 14, 119, 182, 85, 151, 9, 152, 88, 5, 120, 158, 26, 183, 151, 170, 120, 32, + 23, 19, 158, 124, 181, 125, 175, 176, 31, 135, 162, 4, 116, 101, 43, 84, 54, 52, 75, 247, 127, 66, 132, + 35, 77, 110, 190, 63, 150, 6, 117, 249, 131, 130, 194, 164, 27, 92, 252, 99, 206, 76, 181, 250, 216, 217, + 65, 187, 253, 207, 130, 193, 69, 12, 3, 147, 80, 46, 73, 251, 179, 100, 16, 30, 88, 59, 114, 20, 160, + 63, 219, 1, 28, 133, 37, 42, 25, 212, 71, 243, 37, 224, 177, 217, 156, 33, 3, 83, 37, 68, 73, 194, + 182, 168, 141, 57, 24, 46, 10, 144, 37, 235, 28, 229, 145, 105, 24, 157, 62, 120, 27, 230, 37, 237, 149, + 204, 66, 32, 230, 75, 111, 219, 61, 85, 102, 176, 232, 247, 11, 80, 56, 66, 187, 115, 161, 170, 210, 25, + 51, 188, 200, 224, 206, 196, 24, 21, 109, 152, 159, 187, 170, 29, 188, 87, 40, 33, 104, 195, 45, 90, 176, + 55, 97, 56, 32, 20, 70, 236, 41, 16, 228, 108, 215, 232, 89, 157, 87, 209, 206, 216, 136, 26, 34, 75, + 49, 115, 194, 71, 213, 186, 48, 193, 227, 253, 146, 169, 53, 52, 134, 195, 43, 10, 47, 34, 132, 212, 145, + 199, 123, 50, 122, 140, 131, 208, 137, 80, 86, 65, 217, 117, 204, 165, 53, 61, 162, 184, 245, 183, 98, 148, + 196, 103, 7, 171, 44, 30, 57, 54, 134, 150, 201, 29, 197, 4, 17, 149, 130, 31, 11, 247, 97, 213, 65, + 145, 26, 4, 179, 187, 36, 171, 40, 164, 192, 77, 40, 75, 151, 205, 26, 31, 242, 208, 210, 147, 238, 223, + 246, 170, 252, 118, 97, 2, 52, 188, 35, 216, 28, 76, 250, 54, 79, 52, 227, 187, 178, 92, 246, 213, 159, + 215, 3, 217, 179, 132, 51, 124, 79, 196, 53, 228, 212, 80, 50, 115, 63, 107, 83, 53, 160, 176, 118, 247, + 126, 163, 98, 4, 26, 167, 127, 18, 157, 35, 107, 173, 64, 186, 201, 251, 64, 219, 189, 52, 76, 71, 47, + 47, 69, 153, 118, 147, 115, 110, 30, 151, 155, 68, 78, 131, 75, 134, 157, 72, 180, 254, 220, 150, 9, 100, + 124, 68, 178, 23, 15, 187, 84, 32, 149, 23, 159, 64, 180, 122, 230, 94, 237, 11, 107, 167, 100, 22, 96, + 25, 145, 68, 47, 142, 71, 8, 20, 194, 35, 165, 229, 47, 133, 87, 156, 54, 127, 175, 62, 246, 61, 216, + 11, 206, 101, 244, 233, 84, 146, 182, 67, 88, 5, 159, 209, 15, 165, 193, 103, 195, 144, 186, 150, 82, 132, + 223, 48, 145, 103, 44, 13, 7, 210, 128, 97, 128, 123, 246, 157, 59, 31, 246, 85, 85, 242, 78, 81, 92, + 240, 18, 207, 192, 53, 157, 31, 209, 133, 25, 212, 114, 94, 70, 150, 102, 150, 182, 37, 214, 64, 218, 224, + 124, 166, 132, 56, 66, 80, 189, 100, 171, 19, 89, 113, 48, 220, 49, 91, 216, 159, 57, 51, 102, 249, 169, + 207, 29, 253, 208, 198, 110, 101, 158, 190, 87, 100, 213, 35, 55, 205, 100, 157, 137, 77, 128, 254, 61, 184, + 9, 129, 145, 161, 227, 209, 198, 14, 119, 172, 250, 78, 84, 163, 157, 227, 123, 206, 150, 155, 2, 84, 7, + 180, 224, 132, 134, 40, 157, 154, 215, 80, 231, 118, 114, 110, 12, 101, 4, 222, 85, 46, 156, 181, 30, 126, + 136, 5, 91, 185, 93, 166, 33, 38, 44, 225, 144, 21, 30, 244, 172, 72, 229, 52, 35, 74, 162, 46, 61, + 47, 93, 24, 241, 60, 253, 190, 104, 11, 205, 106, 115, 74, 19, 254, 139, 229, 191, 15, 119, 211, 91, 126, + 77, 6, 84, 180, 157, 169, 73, 147, 154, 201, 132, 65, 118, 185, 156, 154, 6, 176, 67, 87, 128, 75, 244, + 78, 181, 43, 29, 72, 49, 217, 137, 132, 158, 11, 179, 103, 170, 92, 179, 231, 61, 209, 191, 83, 138, 66, + 209, 129, 31, 254, 185, 41, 36, 239, 230, 97, 83, 199, 115, 243, 20, 230, 194, 102, 103, 254, 202, 214, 253, + 85, 32, 17, 85, 211, 149, 96, 88, 37, 12, 154, 209, 176, 73, 53, 153, 132, 199, 18, 5, 80, 58, 48, + 139, 85, 212, 192, 138, 209, 88, 62, 250, 242, 46, 230, 90, 196, 36, 23, 214, 55, 157, 249, 220, 121, 162, + 4, 133, 22, 40, 180, 111, 157, 87, 200, 5, 167, 159, 63, 190, 176, 172, 65, 135, 220, 119, 78, 94, 151, + 62, 58, 161, 104, 200, 84, 191, 254, 43, 190, 92, 19, 101, 22, 25, 182, 22, 123, 54, 191, 246, 195, 139, + 246, 173, 150, 56, 147, 222, 149, 83, 181, 209, 199, 27, 117, 95, 151, 163, 167, 253, 64, 187, 240, 145, 193, + 153, 88, 94, 31, 211, 228, 154, 207, 219, 243, 213, 212, 138, 237, 216, 13, 82, 208, 113, 249, 30, 44, 251, + 239, 110, 40, 166, 96, 191, 78, 120, 96, 242, 212, 55, 13, 70, 160, 15, 71, 150, 122, 132, 111, 142, 43, + 97, 93, 41, 120, 222, 41, 171, 64, 178, 2, 37, 79, 77, 82, 108, 106, 181, 223, 57, 203, 248, 232, 138, + 76, 97, 69, 172, 98, 191, 227, 112, 58, 175, 13, 162, 147, 103, 164, 219, 93, 31, 127, 22, 65, 27, 143, + 142, 157, 106, 249, 239, 4, 145, 135, 174, 181, 132, 184, 125, 117, 193, 87, 198, 219, 241, 13, 136, 149, 11, + 221, 102, 72, 245, 34, 112, 230, 240, 66, 85, 180, 143, 147, 142, 152, 3, 78, 16, 52, 90, 144, 41, 229, + 238, 74, 79, 15, 58, 137, 251, 101, 93, 43, 4, 32, 170, 31, 156, 47, 249, 40, 231, 78, 195, 138, 139, + 138, 249, 119, 183, 118, 197, 167, 248, 229, 118, 251, 199, 203, 180, 209, 17, 227, 166, 215, 214, 20, 213, 254, + 54, 181, 83, 8, 18, 243, 42, 93, 167, 246, 113, 246, 67, 215, 36, 94, 235, 75, 113, 205, 148, 107, 92, + 148, 11, 38, 20, 186, 247, 237, 188, 234, 241, 176, 154, 44, 140, 115, 251, 55, 253, 151, 136, 175, 219, 71, + 250, 17, 248, 129, 47, 208, 68, 191, 130, 22, 57, 192, 226, 88, 253, 248, 48, 253, 47, 198, 234, 78, 210, + 106, 27, 127, 245, 113, 103, 192, 48, 48, 65, 191, 41, 36, 131, 191, 109, 29, 143, 187, 82, 84, 123, 165, + 65, 125, 122, 151, 81, 253, 14, 42, 68, 159, 234, 153, 196, 53, 154, 221, 123, 4, 19, 239, 253, 87, 249, + 123, 219, 67, 124, 19, 196, 88, 197, 37, 82, 115, 48, 72, 68, 19, 41, 118, 182, 21, 36, 55, 173, 157, + 100, 168, 98, 115, 119, 34, 66, 221, 181, 213, 58, 185, 203, 196, 68, 245, 21, 29, 154, 212, 162, 134, 175, + 66, 91, 170, 153, 147, 200, 176, 169, 142, 213, 168, 149, 133, 120, 114, 98, 32, 254, 102, 202, 226, 245, 129, + 72, 219, 153, 49, 172, 81, 43, 73, 184, 234, 8, 251, 121, 54, 16, 165, 241, 132, 50, 201, 223, 249, 150, + 193, 52, 131, 161, 128, 8, 183, 8, 186, 173, 87, 73, 117, 92, 13, 221, 31, 52, 123, 247, 165, 209, 35, + 232, 162, 227, 1, 66, 225, 82, 205, 185, 139, 211, 160, 46, 65, 179, 41, 216, 29, 162, 182, 134, 130, 28, + 65, 22, 80, 163, 137, 237, 217, 46, 251, 119, 30, 1, 96, 8, 225, 36, 143, 226, 41, 211, 159, 234, 152, + 89, 221, 181, 204, 145, 138, 233, 166, 83, 7, 161, 248, 239, 134, 115, 2, 31, 165, 225, 153, 234, 129, 64, + 13, 65, 75, 145, 57, 4, 113, 243, 131, 144, 99, 203, 13, 5, 182, 142, 93, 117, 30, 171, 55, 109, 56, + 136, 248, 156, 93, 241, 117, 16, 196, 139, 110, 43, 6, 111, 16, 209, 209, 59, 2, 208, 149, 56, 169, 3, + 84, 203, 152, 18, 233, 21, 98, 90, 82, 177, 226, 5, 212, 237, 196, 122, 112, 204, 244, 186, 115, 65, 194, + 112, 55, 141, 7, 147, 176, 164, 102, 138, 53, 233, 37, 46, 143, 142, 87, 208, 205, 35, 154, 187, 117, 81, + 42, 69, 82, 64, 207, 243, 219, 89, 141, 49, 215, 89, 222, 120, 41, 134, 23, 146, 154, 226, 9, 196, 51, + 211, 79, 33, 174, 66, 51, 34, 187, 66, 191, 204, 29, 239, 246, 238, 60, 234, 217, 170, 119, 221, 138, 7, + 222, 88, 122, 195, 111, 68, 209, 22, 202, 243, 105, 96, 148, 12, 187, 33, 30, 140, 244, 52, 204, 177, 179, + 9, 203, 118, 245, 193, 146, 18, 173, 188, 84, 207, 96, 16, 84, 0, 101, 172, 148, 102, 199, 213, 21, 238, + 138, 19, 157, 104, 95, 24, 36, 246, 41, 156, 179, 225, 233, 227, 148, 66, 53, 254, 141, 140, 23, 191, 9, + 49, 10, 67, 55, 57, 227, 213, 212, 98, 67, 207, 234, 36, 73, 180, 166, 94, 230, 175, 166, 58, 196, 234, + 182, 131, 66, 36, 108, 91, 251, 6, 39, 33, 209, 187, 199, 182, 41, 155, 65, 250, 153, 61, 33, 112, 42, + 201, 142, 148, 96, 61, 96, 160, 42, 233, 195, 115, 14, 207, 175, 197, 232, 112, 124, 41, 190, 232, 56, 104, + 164, 75, 68, 249, 144, 184, 237, 56, 129, 87, 124, 236, 63, 101, 132, 47, 148, 252, 78, 121, 0, 7, 81, + 192, 216, 67, 136, 212, 125, 191, 167, 167, 159, 233, 160, 112, 40, 14, 62, 48, 100, 104, 129, 48, 241, 152, + 67, 58, 135, 25, 99, 88, 254, 113, 89, 126, 211, 208, 15, 249, 245, 250, 233, 7, 129, 91, 157, 225, 63, + 137, 248, 196, 66, 120, 69, 73, 203, 209, 87, 163, 120, 111, 85, 68, 18, 47, 187, 148, 151, 226, 41, 99, + 56, 154, 58, 238, 24, 7, 22, 173, 133, 162, 246, 216, 112, 252, 199, 58, 224, 184, 23, 115, 209, 232, 244, + 218, 74, 63, 117, 69, 196, 198, 210, 52, 199, 150, 62, 236, 66, 47, 161, 94, 172, 4, 110, 179, 41, 172, + 139, 72, 246, 177, 237, 253, 8, 50, 135, 20, 93, 25, 21, 101, 160, 145, 159, 74, 99, 69, 132, 248, 46, + 182, 20, 199, 160, 210, 6, 45, 49, 178, 124, 128, 249, 192, 94, 121, 186, 76, 15, 149, 103, 42, 130, 247, + 242, 3, 97, 171, 32, 157, 56, 112, 202, 116, 124, 175, 49, 49, 13, 146, 94, 109, 61, 14, 186, 161, 145, + 66, 80, 41, 57, 133, 218, 79, 58, 214, 219, 170, 143, 106, 98, 82, 164, 229, 187, 121, 170, 147, 28, 126, + 49, 134, 16, 121, 104, 213, 207, 55, 31, 39, 165, 103, 61, 67, 209, 4, 15, 24, 72, 155, 150, 105, 149, + 141, 253, 245, 124, 137, 126, 219, 28, 27, 131, 66, 242, 215, 90, 51, 159, 134, 136, 223, 66, 241, 136, 2, + 25, 235, 176, 83, 13, 210, 136, 4, 233, 191, 162, 15, 101, 231, 113, 56, 155, 4, 128, 40, 0, 132, 4, + 195, 148, 210, 245, 69, 22, 5, 156, 62, 43, 254, 39, 236, 55, 178, 211, 26, 61, 95, 249, 158, 98, 58, + 126, 178, 45, 20, 4, 153, 216, 85, 218, 125, 2, 219, 11, 33, 202, 145, 46, 192, 201, 249, 197, 22, 231, + 132, 73, 181, 153, 253, 64, 209, 137, 253, 174, 100, 228, 157, 28, 32, 78, 145, 1, 188, 251, 147, 2, 105, + 174, 239, 64, 244, 189, 118, 147, 55, 37, 82, 130, 241, 200, 35, 167, 90, 181, 232, 171, 177, 5, 223, 204, + 175, 130, 121, 27, 202, 124, 20, 15, 18, 233, 247, 32, 8, 218, 100, 128, 235, 202, 196, 0, 6, 202, 97, + 14, 85, 247, 199, 155, 8, 99, 3, 1, 239, 18, 200, 131, 130, 114, 145, 111, 96, 53, 201, 164, 168, 87, + 67, 209, 194, 45, 237, 41, 65, 94, 48, 187, 239, 126, 181, 121, 87, 63, 143, 176, 50, 131, 15, 11, 132, + 242, 29, 126, 198, 51, 16, 79, 89, 207, 228, 220, 76, 97, 8, 237, 197, 192, 205, 89, 201, 224, 143, 44, + 50, 135, 189, 226, 212, 1, 252, 71, 204, 33, 24, 234, 46, 125, 85, 205, 192, 106, 241, 43, 198, 246, 55, + 193, 238, 227, 185, 103, 104, 145, 139, 224, 113, 133, 25, 136, 143, 93, 146, 44, 96, 150, 145, 202, 102, 135, + 32, 215, 241, 150, 139, 34, 202, 208, 10, 22, 82, 249, 79, 99, 189, 174, 240, 36, 254, 8, 142, 86, 148, + 159, 197, 235, 34, 146, 99, 94, 242, 45, 195, 178, 23, 164, 245, 32, 52, 46, 244, 233, 217, 79, 87, 159, + 48, 138, 69, 34, 93, 1, 171, 191, 56, 47, 90, 26, 215, 139, 248, 194, 138, 99, 138, 197, 145, 102, 230, + 180, 96, 115, 32, 156, 204, 162, 180, 82, 191, 5, 92, 199, 107, 79, 253, 207, 37, 46, 160, 158, 28, 136, + 240, 145, 103, 52, 186, 250, 170, 142, 123, 8, 101, 182, 242, 41, 29, 206, 115, 196, 62, 163, 29, 191, 58, + 19, 157, 99, 239, 228, 138, 33, 233, 177, 189, 137, 60, 6, 228, 52, 139, 213, 125, 36, 47, 165, 175, 168, + 111, 215, 212, 92, 94, 26, 195, 247, 83, 154, 66, 237, 232, 156, 250, 192, 162, 156, 113, 23, 10, 219, 72, + 195, 148, 19, 0, 56, 27, 78, 147, 62, 141, 184, 253, 244, 129, 60, 218, 69, 175, 229, 72, 199, 198, 234, + 37, 137, 38, 20, 153, 46, 216, 145, 51, 164, 141, 43, 219, 11, 151, 40, 242, 225, 116, 227, 110, 192, 132, + 97, 39, 120, 222, 133, 187, 122, 136, 67, 206, 92, 226, 184, 132, 75, 88, 194, 235, 13, 8, 10, 46, 87, + 54, 101, 36, 249, 89, 4, 164, 89, 106, 167, 141, 125, 159, 7, 14, 221, 42, 36, 60, 51, 20, 148, 145, + 2, 80, 243, 99, 60, 73, 220, 146, 217, 130, 167, 230, 235, 170, 49, 11, 169, 39, 214, 242, 44, 202, 32, + 117, 126, 20, 3, 155, 147, 142, 21, 210, 35, 192, 151, 226, 199, 14, 72, 84, 219, 49, 133, 122, 93, 186, + 38, 77, 32, 242, 12, 154, 188, 182, 215, 52, 10, 92, 149, 115, 79, 13, 131, 24, 110, 158, 76, 110, 98, + 126, 85, 241, 130, 27, 71, 40, 60, 112, 18, 201, 63, 215, 235, 42, 190, 155, 35, 204, 114, 138, 199, 175, + 169, 183, 0, 231, 193, 171, 207, 217, 58, 158, 254, 113, 32, 208, 106, 31, 223, 64, 1, 81, 16, 89, 161, + 51, 212, 158, 20, 179, 95, 248, 28, 189, 89, 89, 83, 71, 191, 186, 233, 157, 18, 171, 160, 49, 74, 145, + 155, 194, 202, 195, 86, 45, 60, 183, 156, 22, 145, 188, 182, 202, 176, 160, 78, 138, 124, 119, 25, 252, 79, + 16, 113, 97, 248, 79, 84, 60, 107, 47, 75, 197, 187, 138, 1, 2, 114, 184, 226, 10, 166, 88, 71, 93, + 10, 119, 15, 129, 151, 119, 50, 62, 142, 134, 211, 108, 0, 93, 254, 235, 178, 65, 108, 138, 184, 67, 124, + 250, 149, 45, 228, 168, 14, 237, 39, 193, 119, 35, 14, 144, 88, 225, 208, 72, 170, 182, 197, 14, 133, 117, + 197, 247, 121, 125, 134, 149, 53, 62, 148, 169, 131, 42, 193, 169, 87, 110, 192, 38, 26, 31, 133, 198, 79, + 94, 42, 233, 51, 133, 184, 155, 47, 136, 103, 15, 252, 19, 115, 97, 16, 122, 24, 56, 50, 170, 153, 22, + 126, 149, 108, 86, 124, 111, 159, 120, 206, 57, 31, 53, 0, 136, 6, 19, 234, 222, 235, 112, 209, 29, 4, + 171, 251, 56, 27, 132, 197, 137, 55, 187, 40, 50, 90, 101, 64, 169, 74, 241, 33, 73, 172, 167, 148, 183, + 59, 141, 84, 163, 215, 19, 211, 188, 108, 218, 145, 42, 240, 207, 73, 47, 104, 146, 68, 165, 127, 3, 119, + 46, 252, 193, 104, 39, 196, 64, 73, 19, 71, 61, 16, 135, 199, 47, 138, 142, 208, 74, 242, 76, 17, 84, + 61, 208, 53, 145, 190, 32, 149, 160, 35, 161, 101, 60, 59, 69, 66, 41, 193, 147, 177, 146, 227, 236, 16, + 17, 185, 104, 17, 192, 167, 82, 6, 172, 5, 87, 16, 76, 21, 102, 144, 164, 183, 69, 37, 120, 208, 20, + 78, 41, 149, 9, 176, 83, 94, 33, 144, 7, 158, 128, 86, 24, 121, 101, 37, 98, 70, 180, 214, 31, 66, + 252, 124, 54, 194, 79, 202, 126, 245, 49, 90, 32, 204, 90, 61, 171, 170, 52, 5, 1, 121, 93, 78, 221, + 134, 75, 114, 232, 176, 168, 182, 123, 210, 230, 9, 208, 225, 121, 103, 121, 58, 151, 19, 127, 231, 81, 71, + 158, 197, 55, 194, 9, 40, 121, 206, 239, 5, 64, 55, 98, 120, 183, 187, 164, 160, 138, 213, 193, 52, 67, + 72, 6, 81, 176, 42, 192, 170, 89, 231, 240, 165, 226, 178, 196, 221, 128, 39, 10, 196, 188, 181, 64, 129, + 46, 121, 5, 14, 74, 4, 164, 168, 130, 22, 174, 162, 172, 121, 155, 180, 101, 84, 54, 94, 34, 71, 87, + 138, 210, 223, 231, 126, 76, 194, 187, 37, 118, 99, 133, 77, 218, 113, 75, 198, 130, 21, 230, 4, 231, 184, + 150, 213, 251, 3, 2, 34, 10, 72, 116, 77, 201, 2, 117, 239, 99, 211, 47, 9, 97, 53, 210, 113, 248, + 218, 109, 246, 119, 254, 94, 11, 30, 121, 169, 29, 127, 62, 74, 12, 0, 117, 104, 107, 86, 241, 180, 154, + 10, 83, 31, 140, 109, 241, 31, 131, 68, 184, 95, 16, 102, 190, 248, 32, 88, 235, 54, 249, 179, 177, 59, + 38, 55, 6, 109, 173, 91, 105, 86, 117, 60, 102, 56, 3, 17, 155, 187, 133, 245, 48, 13, 122, 205, 97, + 207, 11, 221, 5, 128, 21, 253, 132, 51, 183, 15, 201, 179, 106, 206, 196, 1, 68, 89, 82, 147, 139, 212, + 1, 121, 206, 22, 149, 27, 208, 137, 74, 139, 107, 140, 197, 111, 84, 224, 94, 179, 218, 2, 217, 77, 62, + 54, 71, 145, 245, 195, 141, 31, 127, 210, 44, 167, 202, 229, 0, 167, 215, 102, 24, 244, 189, 40, 5, 182, + 167, 26, 127, 207, 45, 122, 184, 29, 178, 92, 111, 152, 97, 72, 148, 181, 75, 138, 17, 28, 215, 155, 147, + 155, 73, 67, 86, 67, 140, 177, 185, 145, 140, 14, 181, 56, 210, 234, 57, 235, 234, 137, 36, 50, 110, 251, + 117, 211, 97, 138, 174, 189, 133, 26, 216, 155, 108, 209, 249, 52, 9, 50, 112, 150, 164, 235, 105, 144, 115, + 238, 191, 69, 108, 238, 152, 79, 22, 162, 181, 41, 222, 252, 124, 241, 125, 106, 122, 166, 36, 234, 4, 86, + 202, 2, 42, 204, 139, 16, 253, 108, 180, 210, 6, 162, 108, 48, 0, 27, 184, 200, 13, 86, 181, 52, 216, + 55, 118, 124, 228, 38, 6, 252, 20, 148, 79, 130, 179, 208, 83, 8, 8, 108, 223, 240, 101, 135, 97, 94, + 168, 160, 198, 227, 53, 251, 62, 173, 186, 247, 214, 238, 109, 235, 84, 140, 52, 206, 175, 17, 46, 148, 237, + 142, 177, 149, 80, 66, 235, 229, 152, 94, 173, 1, 254, 11, 53, 180, 130, 16, 107, 214, 1, 204, 13, 197, + 123, 6, 19, 85, 204, 25, 38, 220, 231, 213, 95, 251, 123, 172, 239, 164, 19, 130, 162, 189, 117, 214, 47, + 73, 219, 152, 168, 138, 83, 48, 67, 110, 253, 112, 219, 79, 245, 76, 178, 227, 115, 83, 222, 181, 159, 131, + 150, 82, 154, 25, 18, 190, 47, 117, 110, 189, 40, 73, 53, 70, 238, 233, 69, 203, 121, 104, 125, 21, 63, + 69, 217, 80, 36, 164, 190, 165, 138, 48, 20, 188, 82, 38, 144, 214, 226, 253, 10, 229, 49, 221, 205, 141, + 15, 182, 227, 16, 4, 80, 220, 120, 225, 90, 99, 71, 73, 67, 136, 44, 46, 137, 194, 23, 113, 252, 85, + 76, 152, 239, 152, 69, 200, 243, 56, 201, 3, 6, 191, 94, 105, 232, 89, 156, 44, 118, 104, 71, 117, 2, + 43, 219, 142, 79, 229, 88, 4, 110, 170, 139, 162, 94, 223, 60, 53, 224, 131, 108, 228, 63, 251, 156, 235, + 3, 9, 194, 108, 37, 212, 161, 172, 46, 172, 178, 172, 203, 1, 180, 219, 39, 173, 2, 198, 33, 50, 199, + 20, 214, 242, 61, 21, 138, 217, 17, 101, 67, 245, 215, 16, 91, 8, 25, 222, 222, 82, 38, 113, 232, 125, + 208, 189, 71, 11, 106, 143, 127, 138, 36, 229, 58, 74, 127, 132, 162, 33, 162, 144, 45, 95, 26, 172, 83, + 247, 65, 148, 235, 243, 220, 98, 54, 88, 3, 203, 179, 196, 77, 8, 183, 58, 64, 0, 100, 122, 183, 142, + 154, 133, 62, 105, 240, 213, 172, 60, 73, 59, 109, 64, 81, 43, 81, 238, 251, 164, 171, 103, 32, 62, 50, + 75, 126, 192, 161, 188, 4, 81, 85, 12, 14, 227, 104, 143, 130, 246, 24, 109, 123, 236, 21, 197, 14, 118, + 241, 1, 137, 162, 197, 5, 117, 121, 239, 199, 163, 52, 220, 16, 180, 72, 28, 248, 25, 168, 151, 128, 15, + 235, 249, 124, 35, 90, 24, 91, 123, 49, 4, 249, 191, 32, 197, 102, 189, 55, 241, 78, 68, 67, 213, 14, + 72, 44, 10, 183, 37, 71, 248, 54, 140, 134, 243, 237, 29, 10, 212, 21, 238, 139, 37, 24, 181, 211, 221, + 88, 184, 127, 95, 56, 35, 63, 202, 129, 247, 77, 193, 192, 86, 195, 153, 226, 95, 56, 1, 149, 226, 216, + 249, 137, 129, 225, 175, 157, 122, 206, 110, 164, 241, 54, 17, 137, 56, 28, 144, 179, 80, 19, 39, 59, 6, + 245, 185, 35, 116, 211, 210, 249, 163, 73, 83, 48, 123, 63, 189, 65, 53, 156, 115, 41, 104, 22, 205, 151, + 86, 168, 3, 122, 116, 213, 147, 241, 193, 65, 173, 77, 211, 4, 192, 9, 176, 199, 217, 102, 98, 160, 158, + 57, 14, 17, 95, 190, 68, 204, 133, 15, 188, 131, 204, 109, 104, 110, 50, 41, 89, 155, 213, 98, 248, 55, + 155, 239, 24, 35, 235, 197, 170, 241, 144, 203, 133, 207, 207, 215, 219, 57, 21, 98, 33, 94, 38, 87, 194, + 26, 147, 192, 169, 51, 60, 191, 203, 26, 70, 54, 146, 55, 251, 155, 2, 120, 207, 197, 24, 32, 223, 136, + 50, 150, 162, 233, 157, 60, 226, 82, 162, 134, 129, 48, 90, 241, 253, 101, 230, 160, 94, 156, 138, 39, 221, + 190, 133, 158, 247, 106, 121, 143, 117, 20, 213, 75, 211, 187, 70, 227, 91, 8, 193, 24, 153, 47, 212, 186, + 17, 253, 75, 250, 7, 159, 213, 41, 226, 166, 58, 132, 173, 150, 102, 228, 57, 36, 66, 177, 0, 84, 212, + 114, 111, 15, 215, 223, 10, 195, 79, 227, 6, 66, 30, 35, 29, 18, 160, 150, 230, 159, 7, 204, 243, 142, + 232, 117, 138, 186, 153, 104, 182, 69, 83, 142, 173, 206, 173, 127, 91, 156, 191, 24, 13, 214, 241, 223, 96, + 222, 16, 179, 106, 152, 140, 102, 75, 91, 242, 117, 154, 206, 106, 127, 173, 203, 206, 10, 209, 146, 136, 67, + 156, 226, 59, 123, 25, 130, 209, 69, 58, 37, 230, 27, 251, 193, 74, 93, 87, 152, 221, 130, 98, 82, 77, + 189, 86, 167, 19, 201, 81, 94, 247, 247, 233, 198, 126, 172, 81, 126, 45, 178, 152, 105, 192, 227, 58, 108, + 117, 151, 9, 220, 80, 134, 233, 214, 104, 136, 153, 198, 30, 233, 33, 229, 200, 62, 248, 18, 9, 73, 60, + 33, 105, 208, 223, 18, 104, 209, 244, 197, 107, 240, 37, 197, 13, 108, 106, 56, 22, 109, 183, 59, 150, 238, + 16, 12, 149, 138, 155, 234, 219, 57, 105, 251, 86, 197, 141, 47, 170, 188, 73, 237, 73, 247, 88, 124, 184, + 67, 57, 218, 237, 174, 252, 0, 236, 31, 248, 225, 54, 222, 85, 59, 11, 64, 32, 142, 131, 44, 33, 55, + 53, 29, 109, 46, 22, 85, 238, 101, 188, 229, 249, 182, 194, 91, 106, 54, 192, 242, 66, 73, 142, 141, 117, + 5, 188, 45, 216, 112, 230, 131, 227, 126, 192, 153, 141, 83, 212, 117, 240, 33, 6, 33, 27, 203, 27, 100, + 18, 95, 150, 208, 161, 166, 30, 110, 197, 14, 94, 113, 52, 162, 159, 42, 136, 179, 152, 34, 78, 28, 170, + 146, 134, 198, 203, 52, 240, 7, 241, 130, 230, 75, 180, 228, 250, 220, 93, 170, 165, 218, 193, 181, 220, 160, + 110, 199, 206, 116, 117, 5, 107, 2, 193, 164, 184, 135, 25, 42, 45, 86, 166, 232, 134, 103, 126, 254, 177, + 63, 138, 79, 184, 208, 55, 15, 187, 49, 37, 122, 209, 3, 26, 107, 80, 207, 115, 137, 89, 26, 251, 138, + 140, 132, 127, 24, 254, 79, 124, 234, 123, 49, 16, 30, 121, 102, 99, 130, 198, 140, 75, 158, 189, 69, 31, + 223, 153, 112, 43, 24, 232, 170, 19, 239, 135, 167, 18, 250, 32, 248, 21, 46, 237, 238, 129, 194, 124, 80, + 237, 178, 186, 248, 22, 193, 145, 170, 154, 120, 185, 199, 88, 133, 238, 3, 20, 123, 21, 82, 144, 202, 66, + 100, 123, 82, 225, 48, 239, 149, 78, 157, 81, 16, 61, 59, 58, 178, 99, 163, 239, 105, 195, 224, 66, 126, + 43, 129, 178, 206, 36, 76, 132, 31, 28, 116, 229, 19, 64, 50, 100, 112, 235, 162, 101, 68, 66, 51, 236, + 17, 199, 73, 137, 149, 237, 231, 174, 134, 7, 246, 25, 74, 8, 64, 28, 230, 86, 176, 116, 89, 205, 140, + 42, 245, 17, 188, 105, 197, 40, 207, 32, 248, 31, 54, 127, 178, 171, 123, 159, 25, 106, 212, 123, 170, 158, + 220, 191, 112, 202, 225, 39, 133, 30, 92, 50, 151, 46, 147, 169, 129, 179, 72, 186, 250, 243, 3, 122, 16, + 201, 118, 8, 159, 83, 8, 45, 12, 36, 167, 249, 65, 186, 159, 209, 252, 122, 123, 71, 57, 155, 217, 244, + 185, 153, 132, 31, 195, 229, 2, 225, 141, 203, 31, 113, 79, 22, 37, 15, 36, 204, 200, 177, 65, 162, 250, + 235, 128, 134, 42, 252, 129, 253, 81, 222, 230, 237, 156, 213, 39, 186, 251, 86, 231, 87, 203, 16, 134, 98, + 157, 194, 117, 67, 231, 75, 9, 233, 236, 66, 9, 67, 116, 137, 155, 142, 194, 185, 18, 184, 150, 170, 223, + 184, 147, 170, 67, 67, 130, 67, 155, 226, 52, 76, 111, 65, 185, 15, 191, 148, 212, 145, 210, 242, 124, 165, + 42, 242, 137, 225, 110, 68, 113, 197, 213, 193, 176, 80, 12, 5, 223, 197, 227, 118, 85, 35, 144, 102, 71, + 110, 137, 94, 74, 132, 245, 156, 100, 201, 181, 205, 41, 200, 36, 34, 48, 94, 154, 228, 229, 207, 63, 9, + 248, 213, 167, 133, 189, 75, 115, 57, 29, 152, 163, 119, 188, 26, 208, 157, 32, 59, 219, 214, 8, 96, 140, + 51, 16, 251, 170, 23, 30, 26, 0, 164, 153, 45, 63, 112, 150, 121, 254, 112, 250, 168, 124, 114, 70, 50, + 178, 232, 239, 77, 23, 169, 224, 208, 41, 187, 157, 30, 38, 2, 205, 187, 86, 134, 120, 87, 207, 1, 245, + 167, 98, 6, 156, 153, 76, 228, 246, 198, 170, 142, 244, 205, 98, 240, 68, 209, 151, 12, 39, 59, 147, 157, + 155, 111, 144, 172, 213, 135, 159, 183, 32, 48, 141, 8, 126, 235, 16, 17, 95, 250, 47, 28, 24, 56, 211, + 74, 118, 40, 196, 159, 249, 151, 241, 239, 20, 2, 52, 207, 233, 46, 89, 41, 107, 11, 157, 246, 55, 27, + 25, 228, 34, 42, 106, 166, 10, 247, 193, 6, 119, 109, 42, 28, 95, 48, 101, 38, 223, 96, 29, 9, 50, + 197, 233, 191, 188, 93, 181, 36, 55, 1, 22, 205, 120, 220, 72, 26, 175, 22, 187, 240, 24, 58, 194, 102, + 214, 63, 9, 106, 120, 237, 150, 25, 143, 240, 233, 91, 220, 146, 95, 226, 33, 102, 224, 78, 151, 108, 183, + 225, 156, 181, 54, 178, 203, 177, 20, 208, 63, 57, 120, 48, 59, 19, 171, 224, 73, 139, 42, 206, 65, 95, + 43, 23, 155, 47, 252, 169, 214, 142, 221, 155, 123, 150, 164, 53, 49, 244, 228, 120, 159, 78, 0, 108, 117, + 161, 186, 192, 162, 133, 11, 195, 60, 139, 101, 149, 218, 106, 120, 235, 107, 85, 104, 25, 123, 212, 67, 55, + 191, 7, 127, 142, 223, 222, 200, 218, 5, 227, 237, 5, 90, 13, 219, 141, 11, 222, 150, 202, 169, 193, 105, + 195, 126, 134, 130, 45, 238, 58, 180, 94, 140, 252, 43, 103, 20, 238, 73, 69, 111, 134, 124, 174, 77, 111, + 5, 58, 115, 188, 27, 248, 214, 73, 23, 72, 231, 69, 61, 141, 177, 173, 124, 159, 238, 27, 236, 18, 43, + 236, 138, 116, 16, 42, 19, 34, 69, 94, 128, 210, 146, 249, 181, 157, 209, 171, 140, 158, 93, 183, 89, 145, + 239, 244, 6, 164, 245, 129, 233, 10, 187, 239, 246, 26, 56, 32, 70, 207, 3, 187, 121, 58, 69, 87, 213, + 238, 245, 102, 227, 116, 246, 192, 62, 24, 162, 25, 127, 67, 33, 74, 6, 215, 199, 87, 193, 24, 163, 39, + 48, 51, 159, 217, 27, 220, 53, 199, 170, 116, 25, 145, 223, 137, 249, 245, 2, 225, 116, 94, 20, 252, 189, + 89, 57, 94, 82, 163, 48, 119, 235, 204, 202, 33, 155, 176, 53, 131, 254, 82, 106, 160, 74, 95, 17, 157, + 128, 212, 26, 231, 225, 10, 67, 84, 139, 63, 147, 31, 61, 123, 177, 239, 66, 198, 62, 210, 19, 40, 243, + 145, 160, 245, 251, 243, 52, 134, 49, 111, 29, 135, 90, 166, 157, 89, 222, 44, 126, 29, 139, 196, 73, 13, + 126, 31, 143, 227, 84, 10, 91, 36, 43, 239, 208, 10, 108, 248, 136, 139, 127, 61, 51, 180, 75, 118, 77, + 222, 79, 226, 72, 174, 252, 25, 15, 241, 51, 168, 254, 49, 106, 167, 164, 217, 48, 203, 251, 93, 74, 179, + 51, 235, 229, 77, 242, 79, 195, 193, 207, 89, 121, 201, 179, 61, 182, 116, 188, 4, 210, 249, 56, 178, 95, + 200, 201, 184, 137, 163, 113, 28, 33, 243, 204, 243, 7, 178, 57, 239, 195, 105, 124, 162, 231, 201, 202, 9, + 78, 17, 245, 211, 85, 42, 192, 205, 212, 141, 90, 121, 237, 12, 192, 245, 46, 4, 250, 33, 221, 253, 78, + 30, 170, 196, 25, 147, 102, 143, 16, 167, 247, 79, 93, 175, 70, 243, 77, 135, 211, 127, 190, 130, 53, 230, + 81, 67, 246, 111, 84, 230, 183, 213, 14, 58, 57, 32, 67, 40, 44, 242, 235, 73, 169, 110, 46, 213, 35, + 33, 21, 62, 137, 58, 230, 227, 166, 55, 28, 158, 222, 106, 163, 64, 132, 55, 132, 244, 59, 131, 24, 160, + 249, 60, 176, 192, 21, 66, 128, 191, 177, 91, 25, 158, 168, 7, 49, 174, 182, 90, 3, 212, 70, 90, 250, + 55, 10, 234, 26, 34, 190, 219, 228, 169, 182, 130, 31, 113, 81, 3, 3, 49, 53, 38, 99, 182, 77, 173, + 148, 110, 174, 180, 180, 240, 16, 253, 223, 173, 23, 155, 29, 71, 44, 188, 227, 11, 233, 14, 58, 193, 60, + 25, 192, 187, 23, 171, 115, 14, 33, 218, 103, 135, 8, 157, 250, 218, 61, 199, 77, 77, 73, 214, 211, 191, + 165, 96, 118, 241, 113, 203, 224, 12, 47, 115, 105, 140, 15, 73, 8, 195, 119, 92, 231, 198, 161, 238, 40, + 89, 108, 30, 52, 222, 146, 189, 8, 238, 191, 243, 156, 214, 245, 107, 198, 132, 53, 7, 115, 169, 130, 67, + 40, 39, 240, 218, 225, 33, 15, 182, 52, 97, 87, 134, 19, 167, 145, 5, 139, 27, 250, 102, 160, 201, 150, + 13, 180, 51, 55, 40, 160, 212, 247, 126, 245, 114, 164, 25, 180, 174, 179, 221, 251, 226, 9, 129, 71, 79, + 140, 79, 73, 224, 78, 205, 15, 131, 174, 211, 154, 173, 171, 254, 10, 224, 77, 145, 212, 247, 132, 99, 4, + 213, 43, 68, 228, 49, 70, 142, 161, 65, 137, 113, 134, 105, 169, 144, 66, 98, 58, 183, 56, 71, 234, 211, + 199, 26, 21, 38, 189, 198, 236, 221, 163, 177, 230, 96, 67, 175, 63, 122, 209, 232, 226, 127, 224, 229, 40, + 205, 120, 235, 82, 54, 106, 179, 246, 66, 170, 139, 241, 87, 185, 12, 38, 222, 138, 181, 150, 7, 82, 16, + 36, 172, 155, 10, 68, 219, 184, 17, 243, 249, 188, 62, 228, 90, 120, 248, 73, 40, 75, 180, 46, 170, 215, + 139, 196, 59, 182, 142, 40, 151, 149, 175, 172, 38, 69, 134, 166, 196, 111, 63, 37, 152, 33, 58, 39, 34, + 63, 238, 202, 220, 4, 54, 63, 168, 144, 76, 213, 111, 185, 235, 132, 165, 122, 133, 119, 132, 238, 94, 154, + 174, 178, 38, 59, 76, 214, 53, 99, 109, 207, 9, 228, 148, 128, 101, 169, 91, 39, 60, 186, 22, 45, 56, + 22, 125, 211, 81, 14, 191, 181, 122, 76, 63, 196, 181, 128, 61, 115, 34, 155, 122, 121, 127, 247, 247, 177, + 134, 214, 4, 119, 177, 251, 5, 197, 56, 141, 252, 129, 97, 22, 87, 231, 28, 166, 104, 114, 189, 149, 89, + 226, 131, 24, 252, 150, 13, 211, 225, 106, 132, 139, 33, 196, 70, 243, 49, 64, 215, 182, 34, 200, 61, 89, + 160, 131, 2, 66, 81, 162, 174, 176, 190, 202, 236, 50, 184, 66, 136, 210, 15, 178, 12, 251, 96, 129, 208, + 42, 249, 241, 42, 26, 74, 141, 51, 155, 131, 29, 27, 173, 179, 156, 89, 51, 35, 96, 218, 140, 112, 169, + 164, 73, 221, 252, 230, 108, 92, 61, 250, 160, 165, 70, 138, 36, 38, 250, 87, 224, 201, 84, 152, 216, 132, + 215, 163, 137, 105, 81, 82, 196, 239, 14, 222, 234, 74, 233, 142, 194, 12, 163, 214, 184, 251, 162, 124, 212, + 1, 43, 187, 138, 36, 113, 217, 24, 232, 148, 140, 90, 101, 92, 31, 166, 155, 227, 58, 111, 99, 249, 244, + 136, 108, 102, 70, 10, 58, 210, 73, 133, 162, 70, 254, 38, 149, 239, 154, 222, 234, 100, 64, 200, 109, 199, + 27, 77, 141, 239, 122, 212, 165, 213, 216, 158, 192, 83, 25, 186, 239, 4, 156, 69, 104, 4, 81, 226, 148, + 61, 210, 128, 185, 226, 220, 22, 47, 215, 163, 177, 232, 228, 196, 54, 22, 33, 250, 77, 156, 78, 31, 37, + 87, 19, 114, 207, 19, 153, 45, 118, 161, 126, 117, 154, 228, 42, 70, 88, 124, 233, 142, 184, 206, 206, 190, + 151, 60, 24, 46, 185, 176, 202, 180, 87, 251, 208, 13, 9, 84, 105, 190, 186, 37, 174, 35, 2, 198, 135, + 174, 70, 253, 210, 215, 214, 79, 53, 97, 86, 94, 208, 134, 28, 89, 170, 198, 9, 103, 132, 40, 40, 215, + 242, 75, 114, 70, 8, 163, 220, 207, 181, 54, 70, 11, 10, 238, 84, 177, 151, 143, 16, 16, 197, 217, 64, + 155, 151, 197, 247, 121, 179, 235, 51, 28, 173, 99, 202, 85, 197, 116, 89, 55, 196, 223, 144, 99, 128, 1, + 246, 36, 107, 65, 179, 127, 181, 252, 164, 147, 147, 245, 136, 59, 212, 120, 171, 111, 165, 135, 172, 214, 164, + 184, 154, 183, 12, 157, 205, 190, 193, 110, 53, 95, 127, 182, 177, 183, 202, 213, 101, 19, 224, 26, 164, 86, + 211, 139, 148, 93, 237, 127, 238, 100, 237, 46, 217, 18, 57, 78, 32, 125, 167, 130, 16, 202, 44, 207, 81, + 206, 140, 183, 95, 152, 28, 76, 119, 83, 248, 230, 15, 139, 212, 91, 242, 185, 34, 173, 147, 167, 30, 133, + 110, 173, 57, 220, 202, 220, 135, 196, 163, 47, 174, 5, 151, 232, 119, 28, 37, 36, 136, 150, 91, 205, 191, + 112, 189, 128, 119, 111, 169, 104, 224, 106, 243, 198, 53, 108, 138, 221, 84, 160, 211, 232, 168, 184, 120, 231, + 16, 248, 138, 90, 107, 54, 248, 33, 54, 65, 185, 206, 26, 186, 112, 97, 113, 84, 94, 227, 233, 182, 148, + 51, 4, 14, 198, 28, 243, 4, 33, 81, 5, 146, 103, 248, 225, 50, 247, 219, 79, 248, 74, 53, 176, 201, + 214, 136, 12, 34, 147, 251, 113, 11, 118, 146, 130, 226, 106, 85, 56, 166, 40, 104, 115, 151, 124, 4, 130, + 220, 229, 84, 115, 8, 219, 197, 95, 161, 118, 94, 166, 213, 62, 46, 142, 121, 153, 79, 221, 97, 112, 131, + 66, 174, 18, 33, 154, 221, 45, 46, 105, 249, 159, 135, 107, 186, 135, 15, 184, 84, 213, 37, 33, 64, 223, + 166, 223, 35, 55, 187, 68, 23, 208, 161, 103, 12, 28, 73, 158, 248, 75, 243, 7, 81, 177, 39, 109, 155, + 66, 8, 18, 237, 119, 199, 19, 34, 188, 22, 123, 96, 86, 9, 140, 49, 216, 160, 227, 245, 27, 161, 215, + 112, 59, 149, 141, 56, 228, 41, 146, 190, 107, 146, 147, 243, 39, 228, 229, 49, 22, 175, 128, 185, 201, 62, + 190, 11, 195, 202, 127, 172, 157, 94, 30, 140, 48, 175, 41, 227, 244, 166, 97, 238, 228, 153, 137, 19, 136, + 215, 237, 41, 164, 72, 216, 85, 143, 245, 86, 149, 105, 43, 115, 251, 174, 19, 236, 158, 186, 225, 124, 38, + 167, 215, 39, 58, 218, 248, 22, 92, 251, 240, 175, 221, 132, 116, 54, 201, 106, 172, 41, 61, 221, 83, 52, + 206, 154, 90, 195, 218, 238, 16, 36, 124, 216, 75, 146, 68, 152, 192, 176, 59, 75, 62, 214, 55, 79, 225, + 129, 53, 205, 30, 52, 120, 16, 105, 245, 177, 45, 163, 102, 243, 227, 86, 12, 21, 123, 85, 85, 92, 11, + 25, 155, 198, 106, 199, 108, 35, 103, 223, 5, 153, 54, 149, 42, 152, 151, 141, 45, 79, 115, 168, 158, 74, + 1, 113, 68, 26, 63, 79, 120, 215, 192, 106, 228, 125, 169, 216, 136, 83, 189, 212, 154, 72, 18, 22, 154, + 197, 61, 18, 129, 223, 110, 74, 237, 179, 38, 146, 135, 19, 25, 197, 220, 20, 202, 26, 239, 1, 153, 101, + 22, 154, 4, 213, 36, 73, 111, 216, 190, 227, 181, 176, 39, 152, 97, 224, 124, 154, 89, 126, 73, 2, 157, + 239, 41, 246, 35, 185, 212, 187, 53, 77, 29, 16, 244, 130, 169, 6, 130, 224, 237, 88, 81, 33, 222, 222, + 186, 237, 216, 63, 1, 71, 130, 182, 137, 24, 13, 92, 109, 86, 119, 179, 129, 182, 198, 229, 35, 250, 69, + 93, 88, 133, 243, 169, 247, 141, 153, 106, 29, 102, 127, 108, 91, 230, 53, 235, 139, 14, 178, 165, 28, 102, + 44, 151, 227, 236, 59, 149, 68, 44, 65, 125, 135, 198, 222, 207, 203, 51, 188, 210, 249, 53, 229, 247, 18, + 119, 111, 232, 120, 17, 82, 22, 54, 196, 160, 182, 196, 26, 29, 186, 46, 22, 99, 220, 253, 191, 148, 140, + 121, 238, 135, 207, 129, 68, 43, 142, 212, 98, 55, 159, 141, 137, 19, 149, 58, 241, 23, 47, 73, 243, 37, + 172, 34, 138, 122, 7, 117, 83, 152, 215, 218, 202, 61, 46, 168, 79, 55, 207, 43, 168, 50, 211, 220, 204, + 194, 75, 179, 24, 132, 85, 190, 185, 123, 116, 76, 69, 228, 203, 88, 42, 241, 107, 103, 117, 222, 62, 95, + 85, 14, 42, 219, 27, 54, 32, 245, 2, 32, 37, 104, 138, 248, 31, 188, 90, 245, 200, 41, 218, 232, 24, + 106, 187, 187, 203, 212, 21, 158, 137, 147, 170, 253, 98, 214, 76, 29, 234, 110, 95, 234, 67, 89, 197, 92, + 148, 230, 171, 161, 4, 13, 188, 37, 92, 24, 244, 137, 146, 89, 116, 72, 63, 177, 59, 141, 86, 228, 222, + 99, 91, 199, 50, 192, 161, 79, 228, 70, 16, 64, 127, 41, 21, 132, 167, 209, 11, 235, 106, 32, 70, 196, + 179, 27, 40, 246, 195, 9, 89, 37, 33, 89, 92, 33, 208, 176, 144, 150, 144, 110, 102, 36, 37, 131, 143, + 32, 62, 226, 116, 4, 106, 6, 100, 156, 56, 202, 231, 83, 138, 22, 42, 147, 111, 229, 209, 99, 55, 44, + 72, 162, 254, 182, 1, 158, 132, 27, 18, 141, 208, 7, 189, 207, 244, 186, 129, 191, 47, 220, 144, 87, 151, + 240, 38, 40, 34, 168, 220, 221, 7, 30, 81, 21, 121, 162, 112, 105, 101, 234, 9, 237, 65, 199, 160, 229, + 227, 197, 217, 177, 12, 52, 20, 48, 159, 79, 239, 84, 144, 224, 67, 178, 140, 97, 100, 229, 247, 121, 68, + 248, 112, 40, 219, 25, 98, 240, 229, 87, 60, 239, 130, 138, 238, 205, 142, 221, 140, 83, 99, 166, 199, 183, + 135, 4, 36, 130, 66, 81, 171, 219, 227, 67, 237, 147, 50, 49, 223, 65, 82, 147, 9, 26, 157, 130, 112, + 93, 3, 36, 74, 106, 58, 241, 205, 91, 35, 137, 148, 201, 81, 4, 73, 17, 232, 30, 83, 175, 242, 187, + 165, 151, 164, 150, 60, 238, 196, 108, 186, 250, 96, 22, 125, 73, 28, 155, 242, 163, 173, 42, 93, 111, 197, + 18, 191, 19, 199, 180, 250, 86, 132, 243, 244, 237, 143, 144, 157, 29, 41, 182, 239, 1, 251, 152, 124, 186, + 45, 7, 88, 142, 125, 100, 82, 73, 145, 177, 201, 185, 251, 114, 131, 73, 64, 209, 111, 220, 231, 21, 60, + 8, 203, 239, 230, 103, 254, 209, 60, 57, 246, 139, 123, 109, 178, 242, 212, 138, 238, 182, 39, 181, 52, 189, + 188, 240, 49, 250, 189, 219, 184, 27, 17, 41, 253, 216, 94, 134, 98, 205, 168, 84, 105, 92, 27, 136, 181, + 140, 186, 208, 218, 119, 180, 56, 72, 0, 173, 93, 188, 111, 168, 206, 21, 249, 160, 126, 180, 21, 14, 214, + 32, 11, 183, 239, 88, 68, 120, 212, 89, 208, 109, 32, 74, 59, 48, 109, 143, 161, 187, 196, 36, 55, 89, + 111, 132, 124, 59, 112, 141, 91, 97, 254, 12, 208, 246, 190, 196, 83, 244, 223, 191, 66, 64, 236, 42, 205, + 131, 105, 75, 211, 224, 201, 169, 203, 182, 48, 36, 245, 91, 118, 227, 5, 42, 36, 183, 185, 134, 224, 196, + 84, 3, 32, 162, 200, 154, 223, 99, 215, 191, 251, 227, 47, 31, 167, 35, 210, 230, 163, 224, 39, 43, 70, + 165, 213, 168, 193, 40, 122, 199, 16, 228, 11, 233, 169, 2, 112, 135, 207, 206, 158, 24, 71, 134, 246, 207, + 202, 138, 211, 79, 123, 132, 100, 12, 2, 186, 181, 227, 9, 30, 124, 195, 52, 39, 42, 53, 182, 222, 136, + 151, 118, 154, 98, 116, 155, 228, 239, 242, 197, 236, 57, 172, 188, 229, 178, 125, 151, 162, 147, 146, 115, 86, + 118, 92, 248, 57, 31, 181, 113, 185, 36, 133, 239, 143, 77, 72, 148, 122, 9, 0, 45, 83, 29, 86, 143, + 54, 112, 106, 240, 144, 57, 183, 1, 5, 1, 58, 194, 244, 155, 132, 235, 203, 26, 138, 152, 181, 23, 218, + 244, 219, 215, 234, 43, 228, 245, 27, 201, 87, 237, 187, 15, 163, 189, 3, 209, 86, 247, 252, 58, 106, 145, + 209, 133, 229, 212, 213, 21, 224, 133, 250, 95, 145, 235, 103, 37, 84, 100, 32, 225, 19, 24, 224, 22, 111, + 14, 245, 9, 99, 0, 192, 25, 111, 16, 105, 189, 212, 142, 35, 124, 215, 198, 21, 199, 155, 40, 98, 17, + 105, 119, 236, 71, 84, 50, 189, 57, 115, + }; + uint8_t data[18 * 2 * 16 * 32] = { + 116, 147, 48, 65, 40, 100, 194, 33, 93, 130, 44, 108, 251, 49, 249, 225, 210, 23, 163, 80, 144, 249, 202, + 157, 185, 38, 167, 187, 87, 122, 245, 235, 60, 192, 171, 182, 79, 111, 136, 246, 129, 40, 49, 28, 118, 8, + 240, 99, 87, 125, 140, 65, 77, 100, 210, 196, 81, 254, 202, 173, 106, 169, 44, 81, 124, 16, 168, 106, 31, + 152, 153, 29, 217, 40, 55, 3, 46, 225, 195, 74, 150, 144, 250, 121, 219, 184, 110, 48, 36, 13, 129, 10, + 144, 246, 185, 245, 24, 147, 141, 177, 14, 127, 138, 123, 197, 208, 215, 161, 16, 146, 157, 112, 145, 4, 229, + 212, 81, 65, 9, 90, 77, 16, 41, 182, 250, 68, 154, 22, 136, 248, 93, 254, 225, 175, 167, 198, 205, 226, + 26, 127, 99, 33, 91, 49, 108, 160, 234, 162, 185, 13, 51, 205, 229, 238, 237, 45, 33, 244, 8, 122, 141, + 224, 6, 153, 111, 123, 12, 87, 164, 50, 225, 146, 175, 22, 248, 123, 13, 213, 128, 17, 54, 39, 79, 83, + 81, 185, 85, 123, 174, 58, 108, 98, 82, 141, 159, 48, 247, 176, 14, 24, 25, 81, 195, 29, 252, 247, 101, + 102, 247, 88, 167, 111, 212, 7, 71, 168, 177, 236, 226, 252, 25, 176, 11, 99, 130, 194, 56, 37, 78, 147, + 86, 45, 89, 181, 229, 155, 137, 124, 100, 129, 180, 43, 242, 15, 104, 104, 143, 21, 134, 54, 229, 126, 235, + 143, 195, 87, 112, 102, 186, 176, 253, 233, 9, 60, 16, 153, 238, 6, 184, 11, 222, 168, 206, 170, 209, 89, + 195, 233, 65, 45, 251, 21, 124, 116, 166, 66, 180, 106, 11, 38, 185, 66, 173, 211, 161, 236, 163, 20, 125, + 244, 200, 73, 52, 183, 70, 249, 203, 19, 105, 80, 160, 20, 83, 82, 56, 28, 220, 244, 185, 44, 200, 6, + 20, 88, 229, 240, 105, 10, 228, 227, 42, 35, 71, 225, 154, 173, 209, 227, 193, 210, 175, 49, 61, 53, 200, + 48, 95, 62, 75, 88, 104, 25, 84, 57, 80, 203, 240, 34, 24, 182, 164, 31, 80, 124, 137, 197, 159, 252, + 97, 203, 215, 89, 243, 65, 20, 101, 38, 16, 105, 169, 141, 143, 236, 94, 204, 55, 118, 252, 180, 167, 154, + 179, 187, 216, 220, 168, 214, 68, 205, 120, 141, 114, 200, 90, 30, 147, 73, 108, 141, 211, 24, 49, 12, 178, + 196, 81, 174, 59, 54, 210, 225, 78, 222, 12, 150, 183, 0, 139, 233, 227, 25, 128, 63, 216, 135, 63, 125, + 145, 170, 92, 215, 42, 243, 82, 252, 52, 20, 100, 133, 31, 159, 167, 174, 16, 80, 4, 172, 171, 241, 64, + 221, 77, 49, 220, 191, 79, 142, 208, 248, 159, 122, 110, 205, 243, 210, 153, 89, 112, 19, 92, 20, 188, 27, + 241, 104, 168, 143, 211, 226, 210, 85, 25, 199, 155, 26, 177, 229, 15, 61, 94, 105, 10, 71, 16, 236, 109, + 67, 196, 57, 169, 216, 112, 60, 100, 106, 26, 32, 210, 167, 182, 236, 193, 12, 69, 140, 80, 160, 150, 146, + 174, 64, 242, 231, 77, 219, 142, 78, 209, 245, 203, 53, 120, 113, 143, 80, 128, 86, 34, 117, 173, 47, 52, + 236, 65, 156, 216, 29, 31, 163, 124, 180, 184, 91, 179, 120, 243, 190, 61, 184, 239, 251, 148, 206, 162, 250, + 184, 193, 102, 150, 164, 38, 229, 225, 20, 80, 66, 173, 117, 12, 75, 233, 204, 118, 97, 182, 180, 65, 254, + 156, 114, 148, 165, 126, 182, 82, 215, 192, 30, 139, 65, 190, 185, 108, 203, 125, 87, 228, 182, 26, 124, 116, + 66, 117, 99, 253, 46, 114, 42, 156, 168, 28, 230, 111, 80, 51, 152, 104, 20, 89, 75, 64, 189, 91, 29, + 100, 128, 209, 157, 180, 231, 46, 93, 244, 61, 11, 191, 180, 118, 75, 137, 231, 31, 199, 202, 96, 153, 85, + 46, 249, 111, 176, 62, 78, 48, 29, 122, 153, 253, 88, 41, 168, 58, 212, 200, 114, 72, 234, 50, 128, 92, + 35, 185, 29, 163, 96, 234, 166, 31, 75, 203, 55, 16, 209, 203, 63, 209, 187, 238, 112, 232, 210, 94, 246, + 25, 132, 45, 186, 3, 124, 14, 218, 82, 49, 39, 72, 137, 90, 2, 233, 4, 245, 236, 98, 85, 46, 181, + 19, 189, 36, 94, 156, 72, 62, 175, 20, 219, 192, 136, 194, 36, 125, 20, 220, 11, 126, 192, 133, 154, 79, + 126, 18, 204, 193, 208, 161, 189, 18, 195, 124, 32, 121, 168, 103, 158, 5, 138, 42, 196, 129, 130, 84, 231, + 77, 231, 72, 47, 246, 127, 241, 31, 32, 55, 102, 13, 21, 206, 1, 139, 218, 195, 227, 71, 113, 243, 89, + 219, 162, 63, 212, 180, 20, 67, 82, 106, 132, 83, 88, 64, 105, 81, 62, 85, 109, 137, 52, 151, 91, 189, + 44, 243, 24, 35, 123, 251, 82, 247, 178, 9, 198, 9, 209, 106, 12, 86, 247, 252, 59, 53, 229, 66, 112, + 212, 100, 197, 179, 203, 228, 220, 2, 166, 79, 209, 26, 124, 110, 5, 71, 201, 154, 133, 188, 96, 0, 112, + 239, 30, 207, 228, 68, 111, 141, 28, 249, 46, 25, 6, 223, 93, 72, 102, 143, 102, 54, 48, 65, 142, 252, + 167, 51, 89, 210, 163, 57, 9, 212, 4, 25, 175, 53, 213, 235, 62, 222, 131, 128, 174, 10, 130, 99, 240, + 44, 25, 103, 77, 113, 73, 40, 238, 211, 49, 131, 236, 251, 74, 156, 164, 160, 201, 81, 142, 212, 232, 227, + 70, 38, 109, 213, 156, 128, 232, 78, 45, 88, 60, 208, 35, 214, 89, 242, 54, 208, 243, 18, 96, 27, 41, + 229, 11, 48, 153, 216, 251, 167, 240, 41, 218, 152, 225, 245, 21, 106, 149, 158, 215, 189, 18, 135, 1, 111, + 224, 41, 16, 156, 111, 182, 189, 87, 7, 234, 217, 84, 247, 240, 142, 4, 237, 26, 58, 4, 165, 250, 173, + 180, 157, 186, 145, 209, 230, 109, 52, 27, 168, 208, 188, 236, 208, 214, 128, 143, 174, 8, 97, 20, 131, 92, + 3, 111, 89, 133, 46, 128, 3, 208, 226, 19, 55, 80, 233, 205, 129, 170, 242, 249, 159, 122, 132, 81, 250, + 200, 23, 79, 36, 13, 141, 139, 241, 228, 71, 101, 241, 232, 53, 172, 81, 120, 70, 187, 17, 234, 163, 146, + 126, 174, 165, 104, 204, 127, 244, 50, 250, 145, 56, 163, 172, 204, 166, 9, 162, 101, 227, 187, 13, 172, 237, + 16, 107, 207, 124, 144, 36, 31, 168, 120, 176, 82, 68, 158, 131, 118, 150, 145, 74, 93, 153, 117, 112, 185, + 49, 196, 162, 68, 219, 245, 9, 57, 117, 189, 203, 60, 10, 89, 75, 111, 145, 223, 234, 97, 210, 161, 0, + 231, 168, 182, 217, 56, 213, 28, 213, 109, 118, 73, 28, 70, 254, 215, 134, 188, 22, 219, 171, 74, 134, 59, + 89, 192, 116, 74, 118, 90, 233, 49, 108, 75, 160, 35, 239, 162, 175, 204, 99, 143, 112, 6, 206, 51, 176, + 149, 239, 70, 108, 30, 14, 96, 188, 86, 100, 252, 18, 62, 158, 48, 180, 200, 93, 221, 111, 36, 245, 14, + 215, 194, 15, 169, 142, 56, 106, 201, 142, 143, 21, 147, 50, 194, 78, 202, 47, 228, 40, 178, 99, 10, 20, + 67, 79, 27, 179, 73, 35, 54, 16, 174, 51, 18, 168, 165, 154, 49, 169, 91, 116, 188, 177, 133, 62, 159, + 145, 202, 172, 85, 191, 240, 181, 170, 219, 69, 217, 121, 56, 145, 65, 128, 21, 137, 198, 17, 33, 193, 161, + 127, 132, 166, 226, 237, 167, 211, 44, 37, 172, 74, 224, 245, 87, 41, 177, 203, 143, 36, 214, 115, 64, 234, + 76, 166, 50, 132, 118, 245, 111, 118, 242, 73, 187, 51, 230, 102, 150, 63, 138, 46, 118, 66, 147, 244, 209, + 94, 30, 243, 41, 170, 54, 81, 184, 208, 158, 34, 231, 170, 86, 29, 91, 49, 110, 87, 197, 19, 236, 125, + 164, 48, 208, 254, 72, 43, 120, 68, 197, 110, 0, 248, 123, 160, 147, 51, 69, 237, 132, 168, 102, 247, 83, + 49, 96, 111, 72, 104, 88, 144, 226, 139, 146, 9, 133, 224, 204, 45, 89, 145, 211, 102, 142, 190, 7, 2, + 76, 236, 233, 46, 80, 162, 254, 104, 62, 113, 2, 85, 166, 179, 200, 244, 47, 72, 169, 49, 222, 65, 197, + 188, 157, 48, 176, 90, 66, 189, 235, 169, 156, 240, 69, 122, 248, 129, 30, 123, 193, 224, 112, 227, 117, 218, + 103, 248, 4, 51, 149, 46, 120, 96, 17, 89, 131, 25, 82, 212, 100, 208, 59, 217, 115, 164, 12, 160, 246, + 153, 75, 117, 193, 163, 63, 113, 164, 207, 9, 173, 23, 96, 106, 88, 54, 238, 196, 29, 56, 140, 182, 89, + 45, 218, 236, 192, 155, 6, 191, 31, 227, 177, 93, 150, 43, 162, 50, 15, 100, 121, 196, 19, 134, 76, 8, + 214, 135, 165, 73, 76, 201, 108, 105, 173, 113, 235, 144, 135, 53, 60, 47, 129, 78, 154, 85, 101, 106, 79, + 5, 222, 20, 68, 225, 73, 112, 145, 141, 151, 46, 211, 81, 48, 21, 130, 102, 30, 150, 29, 182, 92, 99, + 221, 156, 147, 7, 177, 236, 236, 40, 29, 169, 121, 221, 53, 139, 23, 151, 103, 107, 45, 57, 91, 15, 15, + 53, 177, 29, 204, 7, 22, 16, 209, 116, 135, 76, 201, 242, 219, 235, 149, 139, 239, 72, 5, 76, 209, 245, + 129, 139, 194, 191, 16, 46, 2, 252, 92, 55, 175, 12, 10, 176, 110, 225, 110, 251, 113, 4, 204, 143, 72, + 34, 40, 226, 174, 6, 102, 158, 1, 85, 183, 181, 101, 132, 249, 206, 207, 73, 20, 25, 33, 141, 229, 37, + 219, 56, 188, 159, 195, 70, 157, 49, 218, 100, 216, 253, 213, 254, 198, 227, 224, 118, 156, 166, 172, 47, 205, + 15, 157, 185, 244, 132, 61, 254, 115, 214, 88, 132, 212, 230, 220, 157, 172, 58, 210, 48, 143, 151, 196, 87, + 197, 118, 98, 185, 162, 176, 98, 219, 8, 201, 8, 13, 232, 139, 162, 137, 8, 152, 83, 165, 145, 13, 109, + 196, 59, 152, 137, 87, 148, 24, 252, 244, 52, 172, 177, 21, 248, 49, 76, 97, 175, 186, 79, 164, 134, 165, + 155, 126, 51, 25, 191, 128, 69, 55, 58, 218, 192, 153, 157, 137, 47, 26, 250, 195, 141, 229, 153, 11, 132, + 67, 235, 249, 159, 148, 146, 169, 20, 151, 122, 188, 13, 85, 201, 114, 171, 106, 51, 89, 171, 86, 54, 204, + 93, 182, 121, 197, 64, 213, 192, 221, 46, 22, 54, 126, 218, 184, 116, 9, 119, 62, 180, 131, 88, 6, 56, + 122, 253, 196, 121, 107, 2, 141, 16, 73, 252, 56, 156, 142, 94, 251, 161, 219, 214, 245, 38, 143, 162, 247, + 3, 0, 240, 43, 50, 12, 123, 162, 220, 128, 123, 55, 10, 237, 181, 63, 25, 193, 77, 247, 152, 220, 30, + 73, 244, 56, 149, 197, 88, 0, 11, 29, 249, 73, 230, 117, 18, 14, 152, 52, 250, 160, 193, 232, 184, 3, + 251, 234, 68, 223, 110, 181, 228, 189, 26, 140, 164, 250, 77, 38, 1, 246, 209, 144, 8, 24, 151, 89, 228, + 119, 96, 181, 26, 214, 240, 224, 178, 59, 95, 221, 25, 164, 235, 121, 249, 113, 239, 78, 141, 250, 147, 70, + 230, 14, 73, 137, 106, 24, 191, 182, 234, 31, 182, 16, 188, 192, 135, 73, 203, 8, 89, 191, 225, 64, 2, + 50, 96, 141, 89, 73, 165, 84, 82, 194, 1, 148, 58, 10, 149, 199, 216, 69, 185, 228, 31, 118, 124, 24, + 13, 2, 79, 170, 75, 70, 161, 7, 237, 86, 161, 146, 49, 195, 175, 137, 85, 174, 215, 175, 61, 238, 192, + 129, 177, 123, 199, 42, 74, 192, 212, 129, 19, 153, 216, 124, 246, 244, 56, 229, 1, 207, 151, 30, 9, 177, + 144, 121, 229, 221, 36, 147, 157, 241, 73, 151, 96, 42, 250, 29, 45, 177, 111, 103, 178, 189, 17, 84, 218, + 111, 243, 52, 35, 243, 164, 44, 25, 232, 117, 63, 184, 188, 186, 141, 41, 92, 190, 119, 45, 149, 232, 108, + 28, 95, 203, 233, 77, 199, 251, 249, 62, 96, 54, 5, 128, 154, 49, 108, 160, 237, 59, 165, 186, 112, 121, + 227, 211, 174, 25, 87, 37, 217, 153, 67, 156, 144, 128, 130, 183, 62, 144, 70, 123, 147, 163, 203, 44, 254, + 185, 4, 205, 127, 129, 125, 143, 193, 95, 26, 92, 75, 68, 11, 174, 167, 209, 0, 2, 222, 235, 123, 129, + 21, 204, 143, 107, 239, 44, 10, 27, 153, 56, 48, 48, 164, 108, 74, 20, 143, 233, 98, 192, 254, 128, 226, + 46, 0, 166, 183, 15, 106, 144, 254, 65, 54, 204, 249, 29, 105, 253, 134, 148, 38, 16, 9, 3, 28, 132, + 222, 157, 65, 159, 106, 78, 107, 32, 72, 7, 206, 122, 9, 105, 21, 115, 182, 104, 228, 136, 58, 186, 167, + 179, 98, 83, 254, 5, 118, 174, 128, 144, 32, 179, 166, 213, 155, 220, 175, 79, 107, 137, 150, 117, 44, 98, + 58, 76, 7, 29, 193, 165, 124, 171, 218, 3, 210, 191, 62, 252, 98, 249, 70, 205, 149, 74, 107, 208, 57, + 72, 56, 11, 143, 190, 64, 60, 67, 52, 159, 110, 118, 133, 141, 125, 44, 1, 55, 211, 201, 122, 169, 7, + 160, 44, 134, 213, 248, 42, 95, 139, 36, 143, 103, 0, 147, 199, 59, 201, 64, 106, 141, 37, 82, 234, 245, + 76, 253, 149, 75, 131, 211, 209, 4, 68, 87, 121, 186, 216, 240, 172, 69, 19, 31, 5, 125, 177, 155, 207, + 18, 22, 56, 240, 221, 161, 161, 75, 1, 32, 157, 104, 239, 162, 213, 118, 228, 212, 111, 157, 167, 238, 157, + 151, 22, 79, 102, 138, 53, 233, 230, 248, 181, 245, 252, 146, 70, 145, 94, 37, 66, 187, 23, 23, 155, 210, + 135, 62, 241, 166, 88, 67, 60, 180, 11, 13, 238, 198, 161, 207, 133, 126, 174, 113, 118, 247, 159, 223, 191, + 57, 222, 196, 80, 138, 173, 12, 180, 94, 233, 186, 16, 204, 8, 92, 106, 85, 90, 186, 83, 136, 248, 164, + 79, 238, 160, 177, 130, 187, 100, 202, 200, 122, 245, 18, 35, 176, 246, 5, 58, 186, 214, 162, 33, 128, 244, + 199, 152, 111, 236, 226, 113, 92, 138, 92, 68, 157, 191, 218, 41, 218, 190, 244, 184, 189, 159, 167, 11, 71, + 92, 62, 53, 195, 84, 116, 254, 32, 63, 160, 128, 21, 149, 237, 125, 80, 133, 93, 31, 184, 158, 193, 122, + 218, 152, 218, 52, 227, 76, 167, 91, 36, 242, 5, 196, 215, 173, 166, 122, 9, 244, 30, 136, 150, 244, 127, + 221, 28, 65, 252, 237, 40, 243, 162, 205, 13, 32, 125, 126, 156, 228, 244, 208, 73, 177, 148, 167, 72, 217, + 234, 184, 222, 42, 74, 187, 159, 202, 50, 248, 39, 172, 7, 116, 151, 230, 221, 36, 35, 112, 118, 30, 166, + 35, 167, 24, 24, 135, 112, 167, 41, 22, 83, 202, 80, 2, 67, 249, 1, 133, 11, 58, 64, 185, 114, 95, + 16, 53, 188, 66, 1, 59, 230, 194, 167, 25, 47, 144, 245, 59, 192, 206, 213, 50, 131, 226, 233, 87, 242, + 246, 156, 26, 189, 5, 140, 235, 41, 119, 145, 38, 175, 193, 228, 190, 204, 151, 142, 228, 47, 42, 244, 183, + 190, 215, 178, 220, 68, 33, 139, 62, 95, 64, 156, 236, 180, 106, 53, 198, 194, 20, 162, 33, 146, 100, 86, + 134, 119, 198, 40, 225, 54, 19, 220, 51, 18, 83, 45, 120, 1, 164, 146, 174, 179, 170, 247, 151, 151, 125, + 43, 35, 131, 76, 130, 251, 160, 212, 3, 57, 145, 237, 61, 66, 200, 187, 45, 236, 87, 115, 224, 171, 159, + 208, 35, 61, 14, 123, 132, 114, 181, 33, 72, 93, 106, 119, 169, 156, 78, 158, 209, 185, 243, 253, 37, 199, + 192, 90, 121, 87, 65, 161, 106, 65, 187, 81, 53, 249, 37, 182, 115, 203, 134, 169, 32, 153, 251, 223, 183, + 221, 162, 211, 182, 237, 225, 89, 7, 225, 57, 5, 56, 139, 2, 18, 248, 163, 151, 7, 199, 218, 199, 4, + 237, 101, 46, 178, 15, 171, 49, 161, 58, 229, 16, 103, 150, 219, 159, 235, 162, 40, 22, 178, 126, 125, 201, + 253, 99, 60, 165, 195, 192, 148, 184, 72, 46, 151, 97, 206, 13, 89, 7, 145, 217, 39, 3, 226, 93, 182, + 131, 241, 63, 138, 207, 31, 212, 89, 160, 45, 194, 90, 78, 123, 58, 55, 55, 193, 170, 128, 121, 174, 94, + 76, 113, 150, 77, 97, 254, 74, 154, 68, 235, 109, 3, 205, 132, 1, 206, 140, 217, 31, 4, 67, 85, 201, + 44, 79, 223, 156, 172, 39, 242, 215, 113, 127, 33, 65, 203, 100, 213, 81, 75, 14, 236, 197, 16, 15, 49, + 241, 183, 29, 211, 202, 130, 74, 189, 233, 105, 80, 233, 60, 197, 22, 138, 141, 124, 236, 64, 109, 182, 142, + 131, 240, 95, 94, 34, 240, 220, 110, 164, 134, 208, 26, 138, 123, 113, 197, 137, 205, 229, 75, 208, 51, 153, + 168, 23, 2, 245, 198, 151, 172, 126, 56, 241, 41, 34, 109, 29, 36, 175, 63, 233, 78, 203, 19, 43, 155, + 15, 217, 229, 201, 56, 28, 195, 17, 90, 33, 102, 243, 81, 58, 169, 58, 163, 160, 177, 90, 26, 113, 77, + 253, 227, 187, 8, 214, 224, 195, 108, 243, 106, 215, 66, 184, 129, 51, 51, 221, 70, 152, 196, 12, 43, 140, + 111, 58, 239, 27, 158, 104, 226, 196, 37, 161, 233, 46, 9, 198, 53, 198, 114, 11, 103, 81, 119, 9, 215, + 190, 22, 202, 41, 19, 143, 19, 39, 221, 221, 16, 176, 53, 45, 85, 108, 152, 1, 106, 154, 223, 25, 101, + 190, 154, 35, 130, 202, 209, 158, 244, 240, 81, 237, 12, 224, 68, 87, 140, 130, 50, 130, 205, 17, 164, 22, + 93, 86, 41, 7, 201, 220, 254, 242, 188, 41, 105, 119, 168, 202, 95, 238, 151, 240, 73, 56, 197, 48, 28, + 205, 164, 189, 253, 254, 28, 23, 254, 106, 178, 201, 96, 233, 111, 135, 73, 202, 40, 189, 47, 146, 229, 71, + 142, 119, 39, 20, 192, 93, 135, 67, 191, 84, 139, 7, 130, 110, 105, 204, 18, 68, 217, 85, 226, 51, 232, + 112, 191, 32, 214, 133, 149, 58, 68, 177, 181, 73, 24, 134, 206, 95, 144, 187, 157, 130, 142, 85, 138, 115, + 184, 123, 139, 181, 102, 99, 154, 58, 241, 50, 94, 218, 172, 207, 254, 143, 106, 158, 174, 236, 189, 215, 207, + 72, 86, 105, 174, 157, 118, 164, 36, 125, 246, 199, 140, 98, 68, 209, 151, 235, 183, 232, 12, 140, 1, 250, + 178, 245, 85, 177, 17, 203, 29, 40, 97, 56, 186, 111, 252, 224, 86, 140, 249, 127, 180, 243, 239, 161, 252, + 218, 8, 200, 113, 136, 85, 221, 89, 1, 158, 157, 144, 7, 15, 123, 181, 230, 221, 194, 27, 4, 123, 177, + 26, 215, 192, 150, 68, 65, 103, 210, 16, 0, 42, 233, 32, 250, 236, 175, 49, 39, 253, 196, 196, 222, 9, + 32, 249, 9, 110, 63, 20, 16, 128, 177, 155, 48, 191, 190, 211, 36, 48, 27, 66, 100, 152, 138, 14, 12, + 3, 229, 223, 47, 131, 30, 65, 243, 101, 197, 6, 102, 20, 17, 152, 65, 23, 53, 254, 164, 152, 101, 134, + 106, 20, 53, 227, 167, 211, 47, 166, 23, 120, 235, 246, 42, 200, 214, 14, 20, 182, 212, 4, 142, 108, 71, + 167, 104, 98, 158, 223, 24, 220, 63, 177, 61, 143, 60, 55, 0, 214, 124, 137, 228, 232, 239, 59, 113, 186, + 134, 231, 179, 134, 239, 109, 25, 174, 108, 205, 226, 210, 183, 75, 197, 26, 74, 132, 57, 200, 227, 243, 15, + 245, 71, 234, 51, 29, 20, 125, 18, 120, 136, 194, 173, 228, 72, 72, 232, 155, 3, 248, 215, 23, 40, 29, + 121, 65, 148, 107, 126, 250, 150, 198, 58, 177, 29, 167, 192, 244, 19, 65, 250, 200, 159, 167, 151, 58, 249, + 101, 227, 163, 216, 11, 112, 34, 222, 50, 14, 156, 252, 42, 136, 95, 89, 175, 87, 177, 117, 6, 68, 47, + 59, 148, 197, 62, 220, 143, 43, 56, 166, 94, 123, 113, 143, 196, 136, 216, 32, 127, 79, 126, 231, 141, 231, + 229, 2, 44, 154, 107, 200, 191, 47, 213, 144, 64, 70, 37, 126, 208, 229, 39, 132, 235, 198, 56, 196, 73, + 20, 83, 14, 242, 145, 236, 43, 95, 67, 180, 236, 131, 240, 163, 223, 96, 113, 176, 248, 31, 50, 5, 218, + 166, 120, 144, 231, 119, 239, 58, 104, 147, 122, 235, 224, 57, 188, 185, 244, 42, 152, 72, 107, 221, 208, 118, + 167, 221, 146, 139, 127, 110, 196, 111, 148, 133, 74, 55, 138, 253, 27, 207, 130, 33, 243, 64, 14, 196, 162, + 200, 104, 136, 180, 159, 167, 154, 215, 81, 170, 147, 124, 30, 219, 16, 197, 108, 42, 213, 249, 195, 174, 240, + 173, 185, 220, 222, 160, 45, 125, 37, 196, 35, 43, 180, 251, 240, 50, 53, 246, 211, 191, 2, 38, 23, 164, + 185, 179, 124, 208, 230, 24, 131, 242, 46, 11, 211, 211, 152, 188, 230, 34, 80, 241, 71, 19, 96, 40, 222, + 9, 53, 107, 140, 56, 182, 170, 149, 152, 52, 19, 89, 181, 149, 35, 17, 11, 5, 142, 192, 234, 36, 206, + 149, 235, 94, 96, 37, 223, 101, 200, 213, 101, 36, 109, 86, 187, 10, 211, 60, 66, 123, 229, 224, 239, 49, + 173, 212, 5, 96, 128, 241, 234, 76, 2, 9, 170, 175, 84, 77, 130, 181, 193, 179, 63, 71, 83, 24, 105, + 149, 85, 187, 224, 170, 90, 41, 209, 194, 124, 19, 57, 198, 55, 184, 119, 206, 134, 110, 30, 38, 229, 24, + 115, 211, 73, 154, 181, 11, 191, 41, 20, 85, 12, 18, 138, 196, 162, 200, 135, 104, 27, 191, 82, 253, 154, + 123, 82, 21, 119, 196, 175, 142, 254, 49, 121, 249, 8, 31, 227, 160, 134, 217, 36, 221, 161, 153, 4, 182, + 136, 110, 208, 233, 196, 120, 226, 22, 58, 74, 150, 110, 253, 160, 182, 124, 149, 32, 49, 145, 214, 187, 121, + 61, 105, 28, 134, 176, 15, 0, 5, 211, 178, 2, 249, 64, 239, 6, 175, 247, 162, 19, 253, 199, 216, 40, + 39, 92, 8, 4, 14, 2, 130, 214, 155, 199, 186, 215, 24, 130, 215, 199, 217, 223, 158, 83, 49, 60, 79, + 61, 221, 210, 151, 194, 98, 52, 89, 39, 5, 162, 34, 194, 75, 213, 201, 53, 94, 12, 10, 184, 54, 173, + 17, 97, 51, 68, 22, 197, 77, 127, 80, 136, 76, 3, 118, 229, 60, 235, 46, 173, 61, 16, 232, 138, 162, + 148, 179, 122, 172, 238, 126, 227, 21, 5, 180, 235, 4, 78, 37, 10, 221, 129, 209, 93, 86, 198, 192, 134, + 33, 33, 107, 109, 155, 47, 184, 31, 32, 46, 194, 141, 227, 33, 150, 113, 78, 7, 93, 199, 184, 103, 55, + 107, 140, 49, 197, 227, 94, 41, 207, 115, 157, 16, 45, 124, 169, 205, 167, 170, 12, 213, 235, 211, 239, 107, + 42, 42, 9, 55, 22, 9, 91, 151, 177, 171, 23, 123, 241, 48, 77, 179, 178, 33, 105, 245, 121, 221, 143, + 77, 67, 57, 110, 230, 64, 249, 91, 3, 34, 81, 173, 223, 11, 23, 157, 211, 12, 119, 30, 156, 132, 182, + 81, 66, 208, 155, 35, 252, 138, 216, 239, 163, 187, 90, 99, 210, 154, 77, 192, 4, 220, 54, 87, 126, 142, + 185, 84, 135, 97, 9, 70, 10, 12, 177, 199, 140, 112, 107, 175, 127, 227, 136, 245, 236, 58, 132, 83, 36, + 59, 205, 163, 85, 35, 151, 113, 228, 72, 110, 197, 22, 227, 89, 185, 169, 231, 99, 132, 186, 38, 21, 65, + 52, 67, 248, 60, 83, 66, 49, 112, 91, 232, 150, 125, 111, 132, 182, 88, 185, 231, 209, 212, 240, 253, 92, + 180, 206, 173, 22, 226, 3, 58, 56, 21, 162, 95, 138, 227, 193, 219, 167, 51, 62, 109, 214, 247, 136, 218, + 199, 104, 15, 51, 184, 98, 223, 122, 179, 211, 210, 76, 9, 49, 215, 69, 69, 91, 55, 90, 131, 161, 195, + 91, 39, 176, 231, 221, 233, 128, 51, 70, 89, 88, 14, 226, 2, 174, 23, 112, 175, 56, 74, 2, 0, 109, + 25, 241, 72, 116, 170, 41, 32, 160, 228, 23, 191, 244, 186, 46, 76, 129, 205, 40, 188, 210, 15, 125, 118, + 21, 136, 101, 12, 107, 81, 1, 98, 175, 228, 68, 36, 111, 178, 118, 47, 125, 29, 201, 202, 200, 159, 216, + 50, 202, 99, 23, 55, 223, 195, 86, 45, 231, 95, 123, 191, 184, 162, 59, 105, 7, 22, 186, 55, 195, 49, + 215, 32, 186, 197, 33, 91, 204, 138, 186, 72, 145, 18, 1, 206, 47, 186, 212, 91, 97, 18, 196, 156, 240, + 172, 19, 159, 104, 254, 230, 52, 214, 130, 225, 141, 248, 114, 161, 91, 62, 54, 235, 11, 189, 67, 128, 24, + 245, 10, 68, 225, 235, 218, 169, 40, 245, 211, 212, 92, 38, 14, 210, 19, 240, 111, 15, 87, 128, 153, 201, + 19, 160, 216, 116, 175, 216, 109, 98, 209, 220, 61, 132, 252, 228, 7, 252, 234, 252, 54, 21, 166, 116, 88, + 144, 22, 188, 158, 135, 76, 102, 70, 103, 157, 122, 201, 154, 191, 143, 129, 96, 59, 244, 197, 90, 194, 103, + 242, 148, 204, 125, 199, 100, 52, 169, 149, 196, 173, 88, 235, 159, 132, 208, 106, 189, 163, 237, 164, 95, 209, + 115, 180, 164, 182, 68, 78, 26, 108, 135, 214, 153, 249, 59, 14, 66, 242, 18, 45, 119, 43, 49, 75, 235, + 250, 94, 194, 202, 244, 140, 29, 211, 77, 171, 40, 140, 154, 83, 203, 217, 68, 20, 168, 37, 101, 85, 97, + 249, 193, 168, 205, 149, 196, 127, 222, 49, 107, 254, 173, 25, 198, 151, 161, 187, 23, 68, 206, 57, 120, 156, + 223, 87, 169, 127, 77, 40, 56, 182, 145, 156, 56, 110, 199, 45, 248, 121, 216, 245, 134, 134, 4, 105, 221, + 29, 82, 1, 162, 18, 188, 59, 110, 8, 30, 213, 221, 42, 143, 125, 248, 43, 140, 102, 218, 221, 65, 213, + 39, 84, 210, 82, 82, 247, 6, 148, 196, 32, 29, 245, 196, 113, 46, 157, 64, 170, 97, 95, 155, 99, 159, + 34, 160, 92, 248, 29, 155, 218, 154, 46, 16, 16, 47, 243, 16, 124, 159, 183, 123, 196, 200, 28, 180, 188, + 12, 121, 133, 134, 225, 36, 53, 169, 165, 207, 103, 202, 82, 119, 127, 179, 184, 229, 2, 236, 5, 39, 118, + 143, 227, 213, 34, 194, 220, 152, 54, 254, 227, 230, 197, 31, 77, 113, 191, 239, 224, 115, 253, 171, 143, 3, + 61, 250, 164, 84, 136, 16, 103, 163, 163, 93, 77, 180, 64, 103, 11, 248, 141, 177, 123, 33, 185, 118, 56, + 178, 68, 138, 180, 54, 21, 124, 133, 104, 202, 217, 237, 107, 107, 244, 148, 111, 124, 67, 29, 88, 19, 189, + 19, 171, 36, 203, 155, 203, 176, 81, 156, 232, 138, 16, 233, 213, 19, 114, 253, 238, 254, 165, 192, 105, 121, + 194, 206, 244, 149, 12, 198, 125, 195, 171, 235, 38, 169, 51, 50, 43, 10, 213, 224, 236, 196, 184, 222, 131, + 221, 189, 205, 168, 59, 157, 128, 247, 25, 62, 247, 26, 66, 13, 111, 60, 162, 168, 164, 204, 185, 47, 91, + 84, 99, 207, 87, 65, 99, 227, 146, 52, 244, 122, 32, 34, 68, 248, 228, 137, 227, 133, 125, 111, 152, 120, + 176, 54, 110, 194, 250, 69, 100, 219, 243, 37, 46, 24, 204, 102, 115, 206, 29, 176, 16, 206, 71, 166, 89, + 51, 110, 205, 9, 172, 150, 40, 12, 126, 44, 52, 241, 74, 91, 147, 6, 43, 179, 83, 36, 230, 32, 154, + 5, 209, 36, 4, 5, 220, 200, 127, 41, 84, 14, 221, 219, 115, 36, 34, 68, 66, 213, 102, 172, 54, 150, + 186, 52, 1, 46, 130, 46, 143, 181, 248, 234, 113, 142, 248, 43, 149, 21, 66, 111, 41, 173, 224, 63, 62, + 179, 8, 145, 20, 128, 103, 157, 13, 66, 219, 121, 104, 67, 149, 27, 136, 83, 101, 132, 156, 26, 45, 219, + 197, 209, 228, 228, 168, 83, 156, 0, 244, 198, 145, 242, 120, 226, 10, 164, 221, 80, 167, 242, 147, 14, 38, + 52, 110, 27, 235, 175, 58, 64, 20, 71, 74, 183, 202, 171, 134, 230, 19, 9, 120, 67, 231, 10, 14, 172, + 71, 187, 198, 251, 2, 210, 11, 0, 83, 95, 249, 91, 155, 207, 183, 89, 149, 111, 62, 210, 220, 44, 152, + 6, 104, 88, 53, 3, 172, 2, 21, 91, 125, 36, 144, 235, 196, 183, 60, 251, 192, 104, 197, 117, 72, 249, + 249, 68, 71, 212, 88, 247, 69, 87, 167, 126, 227, 233, 229, 105, 240, 183, 234, 170, 67, 248, 142, 22, 91, + 117, 126, 6, 208, 52, 28, 241, 170, 206, 135, 126, 15, 80, 132, 16, 213, 118, 74, 200, 63, 57, 32, 251, + 63, 114, 224, 187, 98, 142, 39, 135, 16, 74, 20, 11, 106, 28, 50, 1, 187, 106, 220, 108, 20, 224, 155, + 150, 51, 203, 242, 61, 95, 243, 124, 211, 237, 253, 16, 135, 246, 32, 46, 227, 132, 60, 25, 212, 190, 3, + 148, 173, 233, 171, 250, 191, 47, 92, 52, 214, 139, 34, 162, 136, 42, 131, 96, 230, 36, 132, 14, 214, 138, + 101, 249, 73, 222, 141, 83, 81, 22, 22, 19, 117, 184, 176, 105, 174, 86, 103, 8, 238, 28, 39, 19, 49, + 59, 146, 128, 172, 244, 215, 86, 158, 218, 116, 52, 124, 75, 30, 102, 92, 103, 141, 68, 125, 18, 74, 220, + 154, 181, 126, 212, 6, 83, 55, 27, 129, 40, 121, 199, 20, 178, 112, 239, 99, 81, 223, 93, 35, 30, 60, + 73, 197, 243, 239, 142, 251, 21, 60, 138, 49, 216, 27, 56, 21, 49, 14, 165, 125, 38, 234, 212, 177, 252, + 102, 94, 192, 125, 223, 42, 106, 116, 50, 202, 37, 223, 33, 125, 225, 40, 145, 180, 159, 84, 81, 198, 28, + 82, 27, 241, 108, 240, 223, 132, 151, 197, 18, 178, 21, 128, 151, 32, 236, 103, 126, 36, 189, 111, 50, 9, + 49, 2, 104, 27, 166, 231, 2, 203, 79, 109, 31, 28, 182, 168, 252, 212, 28, 112, 191, 213, 73, 34, 143, + 239, 36, 94, 181, 46, 115, 186, 100, 212, 247, 45, 134, 93, 146, 65, 74, 104, 89, 117, 244, 187, 132, 103, + 33, 90, 40, 191, 166, 241, 111, 59, 64, 96, 98, 176, 196, 230, 171, 245, 94, 179, 37, 233, 200, 2, 198, + 115, 48, 184, 18, 161, 24, 242, 153, 146, 82, 178, 209, 99, 60, 183, 98, 252, 11, 95, 41, 137, 176, 203, + 14, 185, 226, 79, 227, 218, 143, 164, 177, 97, 56, 231, 251, 13, 170, 211, 167, 181, 106, 17, 220, 9, 58, + 44, 231, 47, 9, 104, 178, 143, 130, 138, 28, 163, 136, 63, 148, 230, 54, 76, 83, 97, 202, 238, 65, 218, + 126, 246, 74, 84, 114, 210, 175, 52, 153, 146, 93, 199, 173, 132, 129, 131, 190, 249, 245, 4, 226, 77, 248, + 242, 221, 34, 199, 79, 57, 39, 159, 19, 79, 251, 3, 53, 74, 84, 190, 171, 227, 95, 194, 113, 216, 145, + 145, 130, 66, 164, 161, 12, 112, 41, 133, 87, 127, 128, 220, 206, 154, 193, 46, 209, 161, 124, 73, 246, 7, + 135, 98, 72, 248, 123, 59, 25, 73, 182, 95, 97, 216, 254, 239, 207, 68, 40, 11, 138, 240, 150, 62, 7, + 164, 168, 237, 10, 151, 9, 64, 108, 172, 214, 35, 43, 178, 85, 198, 222, 41, 121, 121, 60, 233, 86, 207, + 244, 72, 19, 242, 70, 46, 21, 8, 186, 245, 110, 220, 171, 146, 41, 154, 66, 139, 28, 73, 146, 189, 15, + 199, 211, 247, 46, 0, 33, 70, 247, 229, 248, 11, 166, 196, 49, 22, 223, 9, 114, 0, 85, 192, 62, 242, + 88, 234, 45, 84, 1, 152, 154, 59, 229, 226, 131, 145, 94, 38, 210, 207, 96, 62, 242, 144, 123, 214, 5, + 44, 72, 57, 58, 17, 217, 8, 241, 131, 118, 95, 12, 157, 216, 20, 135, 30, 42, 152, 197, 131, 119, 23, + 2, 171, 231, 205, 127, 47, 21, 210, 230, 207, 220, 172, 118, 11, 244, 21, 251, 150, 195, 64, 31, 17, 86, + 83, 13, 112, 232, 40, 165, 233, 4, 73, 169, 99, 252, 142, 186, 133, 106, 27, 83, 253, 123, 210, 69, 201, + 201, 42, 129, 172, 34, 201, 27, 247, 109, 16, 84, 211, 158, 2, 217, 135, 111, 209, 9, 129, 147, 83, 92, + 243, 179, 116, 96, 175, 89, 97, 24, 225, 21, 61, 39, 129, 168, 121, 81, 206, 33, 72, 10, 7, 182, 156, + 130, 198, 199, 172, 132, 29, 32, 19, 155, 108, 34, 200, 48, 3, 45, 191, 190, 107, 229, 229, 115, 194, 91, + 244, 64, 203, 111, 175, 67, 137, 58, 134, 193, 55, 162, 134, 83, 26, 85, 191, 116, 111, 162, 97, 188, 198, + 163, 113, 209, 56, 243, 49, 148, 230, 58, 130, 160, 43, 194, 252, 95, 118, 23, 104, 254, 144, 204, 2, 228, + 100, 199, 232, 188, 178, 5, 142, 70, 44, 33, 72, 114, 252, 249, 192, 48, 237, 240, 59, 46, 154, 10, 180, + 6, 224, 33, 86, 165, 236, 217, 153, 28, 105, 54, 10, 211, 156, 38, 211, 46, 145, 220, 162, 216, 5, 239, + 248, 84, 85, 224, 179, 191, 126, 124, 191, 1, 88, 32, 31, 52, 190, 164, 216, 12, 16, 1, 37, 68, 10, + 157, 66, 176, 161, 196, 188, 55, 228, 216, 213, 132, 113, 102, 101, 100, 129, 250, 155, 157, 101, 136, 225, 35, + 60, 19, 126, 154, 176, 29, 103, 138, 239, 194, 17, 209, 80, 88, 23, 133, 167, 119, 84, 245, 36, 123, 42, + 123, 67, 48, 55, 66, 245, 91, 168, 202, 81, 16, 193, 173, 123, 66, 65, 28, 251, 226, 204, 239, 129, 243, + 93, 156, 6, 56, 98, 120, 85, 99, 133, 184, 117, 236, 41, 50, 159, 199, 249, 42, 56, 146, 80, 170, 170, + 213, 209, 141, 131, 223, 13, 231, 4, 62, 61, 20, 11, 22, 137, 138, 52, 111, 154, 113, 56, 129, 234, 51, + 225, 222, 184, 46, 89, 42, 243, 230, 44, 142, 72, 77, 10, 158, 71, 42, 169, 158, 1, 179, 191, 145, 156, + 126, 170, 75, 9, 127, 40, 183, 72, 240, 196, 74, 121, 87, 37, 130, 3, 117, 113, 30, 117, 31, 16, 179, + 105, 187, 11, 15, 82, 27, 84, 54, 44, 120, 46, 239, 191, 71, 4, 224, 9, 178, 198, 138, 8, 208, 229, + 45, 243, 77, 142, 73, 164, 161, 143, 14, 121, 28, 124, 191, 67, 248, 21, 184, 77, 58, 152, 19, 172, 58, + 214, 200, 208, 37, 59, 219, 188, 57, 225, 131, 85, 106, 5, 192, 33, 161, 162, 170, 130, 165, 116, 25, 254, + 187, 207, 127, 30, 193, 85, 132, 36, 9, 29, 59, 170, 112, 70, 208, 86, 6, 244, 59, 159, 17, 118, 2, + 157, 166, 95, 197, 22, 240, 120, 146, 183, 208, 73, 47, 164, 168, 251, 79, 182, 200, 204, 162, 85, 12, 78, + 167, 211, 111, 67, 213, 166, 55, 176, 158, 70, 219, 3, 110, 23, 14, 115, 218, 165, 115, 231, 222, 245, 33, + 200, 174, 210, 249, 29, 218, 108, 38, 215, 222, 184, 36, 92, 244, 133, 234, 239, 25, 226, 47, 33, 114, 154, + 206, 202, 190, 70, 94, 132, 163, 84, 73, 109, 34, 216, 149, 147, 185, 157, 166, 237, 149, 186, 248, 54, 206, + 216, 62, 52, 243, 11, 223, 45, 26, 169, 22, 237, 20, 4, 176, 73, 187, 130, 138, 44, 18, 191, 34, 162, + 160, 48, 140, 40, 130, 166, 171, 65, 3, 137, 83, 238, 28, 18, 244, 1, 125, 114, 60, 57, 238, 250, 94, + 136, 72, 249, 65, 0, 2, 102, 208, 197, 78, 89, 253, 148, 54, 65, 210, 121, 163, 68, 98, 184, 25, 28, + 13, 61, 167, 2, 192, 206, 59, 220, 183, 88, 145, 207, 242, 168, 101, 79, 105, 179, 98, 51, 231, 95, 64, + 99, 136, 123, 91, 115, 23, 188, 177, 170, 176, 201, 204, 124, 230, 232, 185, 35, 191, 116, 215, 122, 98, 183, + 75, 152, 236, 213, 237, 183, 29, 17, 139, 86, 136, 183, 6, 43, 206, 129, 103, 148, 92, 131, 139, 23, 187, + 35, 71, 230, 30, 29, 124, 158, 224, 29, 131, 1, 3, 79, 0, 178, 205, 87, 74, 44, 221, 25, 137, 180, + 194, 57, 65, 68, 241, 160, 200, 209, 73, 208, 223, 204, 53, 122, 237, 100, 64, 194, 53, 156, 46, 103, 214, + 239, 230, 155, 247, 251, 190, 102, 0, 245, 104, 10, 72, 202, 45, 112, 235, 32, 198, 234, 183, 205, 95, 170, + 145, 214, 10, 245, 25, 53, 74, 214, 201, 7, 240, 79, 44, 79, 149, 46, 203, 28, 40, 24, 222, 232, 70, + 10, 34, 137, 90, 50, 199, 239, 192, 72, 184, 44, 39, 31, 237, 211, 110, 118, 170, 251, 50, 219, 173, 39, + 91, 212, 108, 177, 198, 244, 155, 113, 176, 69, 210, 38, 53, 168, 89, 201, 55, 59, 98, 111, 188, 192, 224, + 231, 181, 243, 236, 71, 185, 241, 186, 3, 232, 119, 0, 187, 121, 82, 3, 115, 58, 35, 189, 44, 121, 152, + 178, 69, 199, 36, 127, 242, 31, 72, 23, 151, 15, 96, 179, 185, 154, 64, 119, 3, 119, 16, 138, 185, 181, + 218, 104, 95, 171, 0, 245, 176, 212, 35, 112, 152, 191, 162, 92, 15, 19, 37, 180, 67, 231, 225, 215, 12, + 199, 43, 14, 213, 228, 161, 119, 161, 215, 5, 125, 58, 159, 113, 28, 190, 36, 68, 24, 193, 78, 229, 125, + 229, 219, 16, 207, 78, 130, 203, 214, 110, 48, 234, 97, 181, 172, 125, 45, 74, 171, 71, 169, 154, 230, 137, + 212, 231, 172, 198, 224, 1, 127, 221, 113, 209, 42, 150, 18, 212, 175, 164, 175, 102, 24, 139, 1, 206, 148, + 252, 45, 15, 128, 55, 237, 20, 147, 175, 171, 17, 222, 50, 217, 165, 15, 15, 231, 56, 107, 221, 125, 80, + 237, 234, 89, 138, 112, 149, 250, 73, 104, 139, 93, 185, 205, 249, 248, 4, 249, 6, 152, 3, 63, 191, 113, + 46, 203, 167, 122, 9, 22, 146, 83, 107, 74, 163, 68, 41, 159, 117, 90, 84, 0, 253, 182, 32, 245, 92, + 177, 149, 148, 80, 245, 154, 84, 130, 134, 103, 215, 225, 169, 11, 112, 1, 172, 236, 69, 204, 56, 135, 244, + 169, 102, 128, 221, 19, 180, 37, 112, 193, 111, 44, 166, 79, 70, 95, 115, 189, 28, 141, 204, 195, 32, 215, + 74, 102, 59, 247, 135, 216, 216, 67, 230, 169, 71, 154, 105, 50, 49, 197, 250, 167, 32, 99, 159, 177, 131, + 167, 242, 53, 205, 2, 31, 169, 128, 9, 99, 47, 16, 141, 207, 68, 152, 102, 135, 151, 141, 178, 122, 68, + 122, 42, 152, 119, 179, 157, 134, 228, 189, 189, 239, 83, 186, 99, 119, 81, 146, 203, 98, 160, 98, 246, 1, + 71, 176, 155, 104, 150, 221, 197, 76, 150, 149, 242, 189, 68, 207, 127, 216, 30, 83, 215, 252, 124, 78, 99, + 150, 139, 224, 89, 144, 57, 7, 219, 47, 111, 185, 254, 34, 182, 107, 190, 158, 212, 98, 32, 113, 73, 52, + 111, 241, 253, 223, 168, 215, 86, 128, 160, 169, 141, 47, 1, 225, 26, 2, 90, 76, 60, 167, 129, 107, 5, + 23, 135, 75, 63, 27, 217, 3, 30, 28, 224, 73, 144, 62, 32, 232, 170, 24, 87, 97, 108, 136, 186, 56, + 48, 249, 158, 157, 145, 78, 2, 154, 4, 5, 144, 74, 253, 254, 77, 201, 67, 72, 170, 66, 230, 102, 85, + 176, 58, 62, 214, 22, 159, 78, 191, 113, 131, 165, 68, 4, 124, 72, 173, 221, 21, 170, 149, 226, 43, 165, + 52, 87, 247, 225, 179, 65, 119, 41, 145, 134, 64, 131, 117, 93, 111, 169, 90, 69, 68, 56, 162, 23, 69, + 66, 172, 135, 74, 160, 192, 130, 112, 94, 41, 143, 147, 207, 4, 253, 66, 66, 210, 113, 195, 43, 95, 4, + 33, 231, 253, 33, 139, 215, 66, 23, 27, 60, 37, 164, 116, 133, 132, 87, 106, 36, 99, 235, 202, 21, 84, + 32, 92, 76, 40, 190, 183, 254, 81, 95, 186, 155, 22, 245, 52, 114, 208, 231, 121, 187, 6, 165, 188, 4, + 155, 138, 118, 21, 2, 239, 248, 121, 104, 140, 102, 177, 86, 133, 221, 201, 147, 208, 224, 71, 167, 225, 137, + 152, 94, 117, 110, 131, 194, 172, 33, 217, 25, 189, 58, 95, 220, 8, 154, 59, 171, 188, 130, 118, 239, 248, + 35, 32, 213, 54, 238, 127, 193, 37, 221, 201, 81, 108, 104, 62, 223, 197, 150, 57, 73, 218, 54, 44, 238, + 54, 188, 153, 27, 166, 159, 146, 230, 218, 117, 42, 177, 49, 115, 24, 121, 176, 35, 245, 34, 248, 152, 219, + 23, 150, 50, 41, 134, 156, 205, 147, 183, 95, 40, 52, 246, 10, 48, 233, 205, 24, 237, 95, 225, 63, 139, + 252, 53, 254, 93, 195, 116, 201, 243, 231, 42, 192, 30, 247, 127, 84, 40, 35, 55, 132, 192, 168, 90, 228, + 49, 174, 155, 58, 14, 61, 96, 105, 198, 146, 107, 196, 61, 176, 15, 241, 27, 140, 92, 250, 163, 234, 251, + 113, 31, 136, 162, 100, 122, 186, 148, 215, 250, 207, 228, 206, 180, 186, 253, 208, 40, 220, 177, 158, 155, 188, + 160, 141, 243, 201, 94, 97, 41, 105, 225, 22, 201, 154, 75, 214, 120, 227, 25, 5, 79, 63, 216, 154, 179, + 202, 213, 39, 2, 53, 224, 22, 72, 141, 47, 168, 112, 229, 159, 97, 219, 138, 199, 219, 82, 36, 148, 175, + 196, 155, 191, 237, 251, 100, 131, 8, 246, 182, 189, 134, 181, 0, 160, 195, 158, 152, 201, 47, 184, 69, 214, + 244, 196, 120, 178, 30, 13, 46, 232, 124, 168, 100, 199, 160, 157, 0, 95, 21, 45, 231, 206, 31, 77, 149, + 208, 78, 246, 4, 40, 246, 205, 39, 153, 47, 147, 123, 106, 124, 234, 153, 146, 208, 244, 23, 99, 94, 87, + 4, 77, 72, 136, 186, 27, 10, 26, 226, 230, 246, 151, 221, 216, 74, 156, 132, 12, 192, 215, 6, 205, 76, + 98, 113, 178, 139, 38, 145, 30, 214, 223, 161, 21, 187, 143, 81, 230, 29, 193, 32, 181, 223, 9, 86, 164, + 190, 36, 148, 66, 33, 205, 31, 96, 115, 144, 154, 45, 238, 249, 234, 1, 150, 37, 120, 44, 224, 76, 224, + 232, 46, 85, 225, 242, 38, 243, 28, 127, 64, 133, 127, 176, 85, 92, 142, 17, 201, 161, 249, 145, 237, 41, + 42, 67, 97, 2, 146, 172, 17, 187, 33, 190, 33, 98, 204, 22, 30, 149, 245, 86, 223, 209, 200, 209, 106, + 173, 55, 113, 221, 145, 254, 101, 54, 174, 19, 240, 14, 42, 80, 12, 18, 237, 127, 72, 42, 39, 209, 239, + 206, 106, 85, 99, 98, 160, 211, 89, 177, 117, 247, 67, 183, 20, 171, 210, 1, 2, 36, 228, 250, 61, 93, + 17, 193, 132, 124, 126, 164, 113, 84, 15, 30, 81, 50, 44, 223, 146, 2, 130, 176, 205, 254, 28, 233, 230, + 56, 189, 228, 23, 246, 97, 102, 120, 254, 238, 102, 177, 97, 167, 26, 158, 49, 126, 81, 154, 214, 56, 191, + 101, 200, 174, 238, 91, 189, 135, 82, 141, 199, 68, 128, 220, 87, 57, 142, 1, 57, 84, 240, 84, 45, 143, + 147, 170, 52, 131, 77, 116, 117, 177, 206, 191, 125, 124, 69, 92, 23, 65, 10, 196, 192, 138, 232, 115, 133, + 199, 62, 122, 206, 243, 72, 161, 219, 232, 82, 248, 203, 60, 144, 176, 138, 93, 154, 76, 233, 127, 129, 38, + 223, 47, 75, 226, 81, 14, 210, 150, 113, 232, 141, 188, 135, 229, 185, 196, 72, 203, 230, 88, 38, 167, 10, + 14, 55, 101, 74, 19, 213, 37, 226, 92, 103, 159, 85, 36, 190, 30, 104, 119, 64, 237, 178, 34, 17, 69, + 185, 88, 34, 150, 154, 39, 127, 86, 195, 151, 142, 108, 233, 53, 163, 148, 97, 246, 124, 82, 78, 72, 14, + 210, 239, 107, 133, 79, 192, 75, 37, 148, 163, 46, 50, 139, 32, 38, 178, 177, 151, 133, 51, 24, 100, 17, + 217, 219, 175, 251, 63, 70, 36, 28, 92, 144, 227, 219, 168, 7, 251, 130, 73, 81, 14, 79, 211, 155, 100, + 97, 216, 200, 121, 93, 31, 156, 8, 154, 210, 245, 216, 24, 94, 11, 31, 202, 221, 136, 45, 167, 14, 123, + 190, 194, 62, 191, 88, 254, 19, 104, 183, 41, 186, 215, 68, 228, 179, 78, 100, 115, 36, 167, 192, 215, 218, + 155, 249, 182, 98, 163, 176, 82, 104, 63, 181, 94, 51, 7, 236, 103, 104, 208, 91, 3, 48, 98, 250, 71, + 73, 119, 93, 113, 131, 16, 75, 197, 245, 51, 196, 104, 241, 219, 63, 201, 172, 12, 234, 202, 80, 6, 158, + 117, 194, 120, 99, 13, 133, 104, 23, 162, 32, 137, 201, 123, 7, 239, 2, 253, 2, 219, 34, 14, 171, 254, + 175, 11, 40, 88, 5, 221, 147, 7, 223, 83, 185, 25, 178, 165, 170, 108, 68, 112, 123, 54, 241, 49, 93, + 176, 100, 91, 180, 102, 0, 162, 191, 239, 111, 191, 165, 239, 119, 69, 187, 44, 200, 201, 60, 51, 132, 165, + 46, 23, 138, 165, 188, 170, 112, 28, 9, 166, 14, 120, 19, 162, 247, 150, 92, 65, 147, 88, 133, 177, 194, + 37, 120, 230, 247, 210, 21, 40, 56, 215, 26, 48, 66, 220, 169, 190, 137, 161, 84, 84, 215, 222, 91, 21, + 162, 190, 139, 6, 132, 87, 49, 92, 209, 97, 103, 211, 73, 48, 250, 55, 40, 63, 94, 254, 103, 191, 65, + 159, 19, 19, 68, 157, 181, 29, 153, 133, 245, 73, 241, 131, 73, 165, 82, 41, 22, 30, 211, 138, 115, 13, + 144, 30, 241, 16, 3, 82, 73, 18, 154, 246, 115, 196, 47, 139, 20, 99, 210, 94, 247, 202, 109, 57, 106, + 28, 55, 15, 245, 242, 241, 136, 56, 148, 254, 249, 153, 202, 132, 174, 233, 213, 212, 237, 10, 78, 218, 137, + 141, 227, 187, 75, 54, 167, 60, 60, 170, 32, 210, 219, 242, 149, 110, 60, 155, 0, 113, 16, 248, 233, 194, + 145, 49, 5, 232, 225, 81, 206, 164, 81, 226, 181, 164, 180, 210, 99, 230, 34, 99, 218, 169, 107, 109, 190, + 59, 117, 245, 5, 179, 139, 137, 94, 145, 202, 90, 194, 69, 105, 2, 170, 108, 108, 203, 50, 17, 222, 71, + 74, 26, 98, 183, 133, 109, 103, 4, 246, 241, 199, 25, 90, 197, 14, 140, 139, 95, 192, 230, 130, 81, 126, + 117, 136, 248, 18, 13, 238, 105, 237, 237, 222, 33, 101, 192, 45, 141, 203, 30, 42, 134, 75, 193, 5, 45, + 63, 37, 137, 254, 102, 80, 144, 156, 48, 213, 163, 214, 30, 121, 178, 187, 120, 153, 98, 157, 32, 250, 196, + 207, 55, 247, 101, 191, 120, 58, 150, 226, 156, 57, 42, 169, 49, 49, 74, 193, 188, 184, 7, 201, 192, 245, + 170, 79, 190, 230, 44, 33, 28, 244, 21, 7, 250, 169, 53, 180, 29, 193, 33, 199, 118, 140, 161, 189, 198, + 70, 89, 190, 178, 42, 173, 112, 61, 87, 137, 74, 162, 217, 166, 108, 70, 64, 212, 239, 61, 163, 77, 19, + 42, 190, 87, 37, 210, 134, 40, 207, 151, 248, 66, 22, 227, 56, 39, 62, 30, 238, 164, 43, 124, 92, 106, + 90, 146, 55, 16, 86, 155, 153, 115, 79, 136, 197, 38, 26, 36, 152, 157, 77, 237, 42, 162, 208, 220, 220, + 105, 227, 79, 100, 25, 238, 183, 227, 147, 74, 201, 242, 108, 242, 19, 143, 152, 82, 73, 203, 139, 39, 219, + 98, 140, 173, 126, 129, 123, 179, 0, 98, 44, 123, 158, 153, 214, 212, 15, 1, 161, 203, 21, 203, 223, 152, + 53, 213, 54, 155, 166, 203, 126, 130, 147, 66, 13, 165, 29, 167, 75, 222, 182, 215, 100, 69, 39, 129, 135, + 229, 31, 147, 239, 230, 97, 83, 102, 236, 242, 108, 106, 132, 230, 106, 73, 88, 9, 8, 142, 196, 79, 194, + 222, 205, 45, 212, 241, 125, 221, 40, 57, 24, 184, 246, 178, 101, 244, 153, 225, 219, 179, 17, 47, 184, 3, + 78, 85, 151, 206, 33, 251, 109, 159, 155, 138, 161, 190, 201, 194, 167, 2, 172, 6, 239, 23, 238, 93, 80, + 101, 33, 66, 169, 91, 62, 200, 53, 237, 60, 13, 153, 239, 113, 130, 89, 39, 31, 135, 171, 216, 15, 27, + 52, 211, 39, 1, 11, 241, 142, 221, 159, 4, 194, 151, 228, 55, 181, 130, 78, 168, 15, 130, 99, 119, 69, + 88, 142, 225, 98, 39, 249, 110, 85, 163, 119, 29, 108, 180, 172, 56, 9, 164, 50, 189, 149, 129, 200, 50, + 67, 237, 172, 47, 139, 53, 67, 24, 20, 88, 35, 51, 190, 64, 69, 209, 169, 193, 10, 219, 226, 151, 160, + 45, 6, 5, 242, 3, 148, 84, 167, 80, 96, 16, 133, 195, 185, 179, 198, 100, 21, 179, 167, 158, 113, 86, + 248, 192, 17, 141, 110, 77, 173, 168, 91, 221, 16, 73, 2, 90, 27, 214, 100, 239, 213, 83, 175, 175, 59, + 63, 60, 98, 133, 120, 201, 179, 100, 171, 143, 140, 62, 140, 10, 229, 36, 52, 56, 125, 13, 239, 24, 177, + 158, 77, 65, 137, 237, 225, 237, 25, 6, 148, 1, 154, 202, 226, 90, 81, 139, 112, 229, 112, 41, 43, 54, + 238, 147, 20, 221, 152, 198, 142, 209, 249, 54, 145, 25, 15, 229, 80, 174, 27, 139, 76, 173, 20, 86, 192, + 63, 159, 160, 117, 168, 168, 236, 181, 107, 44, 203, 195, 251, 185, 18, 249, 154, 192, 18, 152, 248, 80, 93, + 42, 51, 30, 90, 239, 24, 210, 237, 249, 55, 102, 79, 23, 202, 123, 196, 73, 171, 186, 248, 173, 113, 207, + 46, 67, 41, 221, 218, 20, 137, 104, 109, 73, 193, 8, 140, 99, 34, 98, 221, 74, 224, 246, 221, 48, 130, + 9, 53, 56, 16, 109, 210, 247, 9, 124, 7, 219, 218, 225, 138, 177, 44, 46, 4, 30, 236, 101, 227, 41, + 18, 115, 21, 115, 18, 75, 254, 54, 171, 228, 76, 2, 50, 39, 16, 56, 82, 74, 210, 121, 39, 92, 209, + 112, 121, 13, 47, 134, 244, 65, 120, 86, 172, 27, 155, 115, 22, 2, 71, 131, 78, 108, 192, 246, 254, 237, + 74, 22, 37, 158, 31, 81, 203, 127, 146, 194, 210, 148, 158, 78, 231, 170, 122, 57, 178, 112, 253, 75, 228, + 218, 90, 192, 82, 134, 238, 220, 182, 90, 219, 169, 154, 113, 33, 240, 183, 147, 174, 140, 5, 226, 182, 104, + 148, 227, 188, 58, 9, 76, 23, 137, 245, 156, 113, 50, 25, 14, 188, 78, 69, 70, 172, 25, 37, 239, 90, + 204, 142, 127, 97, 254, 172, 106, 222, 165, 158, 226, 212, 160, 137, 23, 140, 234, 157, 201, 126, 160, 243, 229, + 55, 243, 153, 216, 229, 130, 75, 71, 69, 217, 85, 53, 217, 175, 244, 1, 201, 169, 97, 187, 234, 42, 200, + 13, 103, 80, 100, 61, 19, 222, 235, 43, 44, 97, 244, 205, 57, 132, 240, 92, 12, 102, 171, 85, 160, 48, + 94, 232, 29, 106, 129, 170, 160, 225, 90, 95, 89, 25, 248, 45, 101, 62, 207, 219, 97, 119, 27, 84, 119, + 158, 158, 4, 127, 6, 252, 187, 80, 59, 37, 217, 182, 235, 27, 75, 11, 25, 109, 33, 32, 232, 34, 193, + 80, 49, 207, 87, 77, 227, 163, 7, 80, 85, 185, 30, 46, 104, 229, 84, 65, 128, 217, 179, 129, 83, 254, + 149, 53, 139, 242, 157, 180, 190, 151, 43, 192, 109, 125, 234, 239, 196, 19, 100, 213, 154, 147, 120, 129, 184, + 132, 161, 117, 209, 205, 216, 80, 1, 159, 233, 2, 65, 99, 171, 117, 209, 84, 177, 203, 47, 58, 216, 209, + 198, 161, 72, 93, 69, 6, 121, 242, 116, 61, 133, 98, 126, 31, 24, 124, 215, 136, 4, 155, 210, 39, 158, + 216, 145, 73, 100, 177, 125, 92, 232, 219, 147, 69, 38, 43, 110, 122, 88, 101, 141, 51, 73, 49, 117, 35, + 49, 12, 115, 98, 112, 35, 231, 32, 89, 28, 157, 202, 188, 90, 1, 176, 121, 71, 239, 160, 176, 178, 64, + 168, 101, 223, 133, 4, 10, 211, 109, 29, 86, 241, 248, 38, 60, 48, 128, 106, 212, 74, 179, 245, 229, 99, + 23, 200, 232, 43, 139, 228, 37, 1, 199, 101, 222, 11, 127, 172, 54, 58, 62, 64, 32, 81, 24, 1, 163, + 168, 24, 102, 72, 134, 37, 56, 86, 27, 98, 94, 45, 151, 153, 18, 104, 108, 16, 251, 73, 174, 85, 55, + 188, 242, 216, 83, 27, 110, 136, 198, 180, 181, 107, 86, 138, 122, 89, 79, 99, 12, 102, 100, 94, 167, 119, + 8, 26, 254, 232, 157, 87, 76, 240, 155, 48, 47, 159, 233, 27, 193, 38, 188, 220, 194, 25, 33, 181, 120, + 206, 214, 244, 106, 179, 207, 4, 137, 178, 202, 205, 46, 196, 93, 130, 183, 242, 21, 158, 59, 66, 49, 55, + 182, 38, 180, 71, 60, 218, 77, 241, 15, 238, 8, 132, 243, 160, 202, 12, 26, 179, 169, 250, 224, 105, 233, + 124, 24, 155, 102, 254, 200, 165, 4, 210, 177, 156, 153, 70, 151, 251, 174, 215, 28, 215, 181, 143, 75, 8, + 123, 245, 228, 228, 190, 140, 228, 72, 115, 38, 251, 172, 195, 68, 79, 105, 12, 198, 54, 168, 120, 83, 61, + 146, 239, 73, 97, 239, 29, 88, 147, 254, 7, 144, 221, 101, 197, 239, 229, 98, 205, 135, 67, 9, 106, 81, + 187, 186, 163, 199, 144, 124, 73, 64, 239, 123, 181, 184, 205, 186, 93, 180, 197, 27, 53, 156, 7, 40, 4, + 151, 101, 26, 36, 201, 42, 174, 140, 251, 220, 73, 163, 195, 24, 86, 206, 0, 48, 79, 23, 223, 72, 102, + 181, 85, 132, 141, 24, 112, 150, 72, 217, 127, 204, 111, 73, 189, 140, 190, 80, 216, 36, 205, 131, 43, 116, + 251, 57, 166, 54, 37, 94, 230, 157, 119, 37, 166, 87, 93, 31, 166, 60, 37, 191, 136, 101, 240, 246, 24, + 75, 160, 31, 215, 173, 175, 101, 155, 85, 106, 201, 147, 39, 244, 100, 55, 250, 144, 165, 246, 175, 229, 114, + 56, 77, 175, 121, 191, 161, 242, 158, 45, 135, 121, 33, 227, 69, 195, 108, 225, 27, 18, 165, 221, 193, 253, + 153, 119, 79, 191, 222, 190, 8, 231, 95, 126, 151, 34, 125, 138, 42, 67, 32, 65, 128, 37, 157, 12, 108, + 106, 250, 130, 98, 145, 155, 25, 98, 116, 115, 111, 38, 114, 143, 125, 225, 33, 252, 159, 251, 189, 88, 133, + 117, 218, 98, 178, 184, 108, 194, 56, 105, 112, 47, 58, 36, 139, 120, 83, 9, 150, 10, 69, 224, 228, 192, + 68, 9, 198, 150, 170, 129, 249, 91, 214, 143, 181, 91, 75, 74, 176, 67, 207, 148, 21, 173, 239, 189, 52, + 218, 216, 9, 211, 105, 138, 142, 114, 118, 172, 194, 21, 43, 176, 207, 170, 141, 111, 137, 74, 187, 216, 69, + 141, 49, 38, 232, 44, 2, 6, 159, 128, 229, 33, 40, 66, 137, 212, 116, 158, 86, 21, 92, 26, 72, 37, + 61, 239, 131, 55, 84, 228, 12, 160, 210, 244, 163, 148, 114, 208, 184, 213, 62, 57, 14, 65, 149, 155, 244, + 81, 59, 227, 98, 113, 80, 93, 247, 68, 193, 50, 232, 234, 245, 155, 233, 206, 51, 158, 106, 197, 37, 116, + 114, 50, 192, 180, 149, 14, 7, 44, 251, 209, 165, 72, 222, 115, 17, 43, 32, 253, 138, 20, 136, 241, 171, + 220, 199, 86, 172, 59, 143, 140, 123, 222, 145, 179, 145, 111, 100, 136, 95, 40, 169, 18, 202, 0, 72, 246, + 68, 194, 75, 23, 100, 71, 237, 106, 244, 115, 116, 6, 32, 31, 60, 154, 78, 24, 92, 238, 241, 204, 106, + 75, 64, 166, 251, 176, 6, 67, 223, 129, 171, 242, 16, 94, 165, 17, 178, 95, 136, 76, 202, 136, 218, 95, + 173, 211, 38, 11, 156, 40, 32, 142, 158, 150, 117, 173, 218, 82, 137, 120, 187, 222, 14, 196, 114, 253, 99, + 157, 111, 65, 139, 198, 150, 38, 60, 26, 105, 70, 58, 74, 177, 157, 15, 0, 47, 76, 119, 30, 89, 167, + 206, 252, 177, 136, 215, 37, 151, 41, 1, 146, 79, 84, 211, 121, 105, 194, 128, 65, 43, 78, 152, 104, 223, + 121, 12, 203, 158, 7, 218, 126, 79, 124, 101, 215, 152, 20, 243, 137, 89, 219, 155, 46, 135, 154, 218, 122, + 5, 43, 151, 59, 106, 210, 33, 9, 140, 126, 90, 19, 139, 138, 114, 47, 13, 184, 31, 95, 225, 251, 248, + 55, 142, 41, 39, 246, 125, 175, 249, 22, 187, 85, 5, 13, 140, 54, 95, 149, 228, 152, 74, 208, 27, 88, + 11, 254, 206, 164, 169, 55, 80, 75, 57, 23, 80, 192, 250, 43, 32, 58, 74, 252, 46, 208, 53, 167, 125, + 176, 169, 148, 117, 70, 203, 80, 58, 223, 229, 154, 183, 11, 8, 253, 238, 53, 192, 92, 19, 51, 171, 213, + 79, 210, 206, 66, 187, 242, 124, 97, 219, 44, 153, 221, 103, 239, 121, 75, 20, 45, 245, 183, 198, 29, 42, + 131, 248, 215, 227, 10, 122, 117, 98, 54, 130, 4, 40, 147, 52, 52, 3, 228, 160, 167, 201, 47, 155, 254, + 15, 122, 96, 194, 127, 244, 209, 82, 210, 246, 207, 114, 190, 54, 147, 95, 193, 219, 208, 166, 13, 142, 64, + 181, 69, 162, 65, 145, 87, 102, 180, 90, 137, 156, 138, 118, 215, 18, 215, 92, 234, 253, 129, 226, 226, 186, + 176, 142, 42, 49, 154, 66, 246, 7, 202, 140, 245, 16, 21, 246, 53, 67, 234, 58, 251, 155, 142, 115, 190, + 184, 132, 192, 159, 104, 99, 249, 228, 147, 147, 90, 130, 247, 0, 8, 114, 67, 187, 50, 51, 8, 44, 204, + 106, 185, 136, 102, 208, 240, 235, 23, 79, 171, 194, 96, 5, 160, 250, 29, 182, 252, 107, 184, 15, 108, 184, + 237, 208, 247, 131, 31, 184, 97, 126, 57, 235, 132, 64, 183, 108, 146, 104, 135, 17, 160, 193, 104, 16, 120, + 94, 101, 0, 182, 178, 138, 86, 56, 110, 26, 112, 69, 222, 234, 253, 27, 118, 130, 101, 251, 84, 142, 173, + 214, 204, 172, 47, 110, 115, 25, 53, 21, 138, 253, 195, 249, 164, 80, 211, 189, 151, 193, 199, 95, 109, 248, + 241, 21, 192, 146, 32, 226, 101, 133, 75, 153, 159, 102, 114, 6, 159, 153, 127, 96, 91, 206, 130, 153, 167, + 129, 193, 238, 100, 81, 105, 36, 50, 236, 134, 86, 188, 127, 170, 30, 72, 8, 65, 155, 2, 15, 128, 237, + 203, 75, 155, 184, 52, 145, 228, 192, 218, 212, 200, 141, 126, 252, 124, 248, 177, 236, 128, 158, 245, 32, 213, + 24, 199, 217, 223, 252, 235, 82, 26, 31, 19, 134, 222, 31, 208, 180, 53, 218, 19, 201, 247, 221, 73, 173, + 132, 142, 66, 26, 230, 127, 106, 12, 36, 249, 75, 142, 166, 34, 144, 133, 59, 148, 45, 122, 155, 4, 129, + 189, 16, 121, 197, 1, 164, 111, 12, 182, 164, 100, 167, 183, 15, 167, 135, 28, 189, 198, 168, 54, 112, 51, + 5, 248, 147, 85, 130, 161, 153, 33, 107, 64, 81, 71, 119, 93, 29, 14, 59, 207, 18, 196, 99, 207, 141, + 243, 152, 207, 179, 69, 222, 32, 188, 187, 253, 67, 13, 116, 99, 16, 235, 195, 176, 143, 21, 120, 109, 132, + 208, 64, 201, 227, 84, 191, 163, 40, 112, 207, 2, 39, 98, 172, 115, 185, 9, 106, 194, 181, 245, 3, 133, + 145, 24, 192, 124, 105, 214, 44, 114, 78, 160, 254, 156, 153, 242, 143, 211, 199, 192, 221, 187, 227, 56, 246, + 201, 198, 157, 32, 141, 184, 167, 194, 1, 142, 173, 57, 149, 246, 121, 84, 200, 237, 153, 158, 157, 20, 10, + 108, 36, 185, 192, 199, 218, 185, 130, 76, 207, 126, 135, 46, 219, 45, 3, 89, 51, 73, 180, 211, 27, 99, + 99, 252, 30, 250, 12, 50, 145, 145, 252, 87, 198, 40, 188, 89, 126, 224, 125, 139, 81, 18, 76, 123, 82, + 188, 94, 60, 243, 21, 138, 148, 36, 252, 8, 78, 188, 186, 204, 190, 161, 193, 27, 223, 124, 223, 81, 202, + 55, 229, 212, 62, 32, 248, 254, 225, 152, 195, 113, 185, 165, 164, 117, 251, 189, 134, 89, 9, 231, 4, 217, + 54, 194, 89, 236, 194, 34, 144, 169, 171, 31, 23, 52, 220, 79, 236, 179, 190, 1, 63, 142, 34, 55, 203, + 138, 12, 17, 86, 137, 152, 170, 178, 102, 182, 9, 186, 79, 195, 44, 189, 49, 181, 118, 118, 133, 155, 92, + 182, 157, 79, 34, 155, 77, 26, 2, 224, 173, 128, 87, 167, 134, 212, 193, 106, 111, 143, 9, 145, 189, 2, + 147, 209, 80, 225, 247, 67, 9, 92, 73, 252, 129, 162, 176, 171, 204, 168, 115, 253, 173, 128, 217, 151, 211, + 49, 51, 200, 237, 229, 59, 28, 149, 234, 188, 127, 20, 6, 54, 179, 224, 73, 64, 9, 49, 200, 112, 43, + 40, 144, 203, 183, 186, 40, 109, 185, 9, 212, 200, 67, 238, 3, 174, 58, 129, 89, 110, 182, 120, 169, 52, + 173, 23, 157, 8, 83, 9, 28, 161, 223, 158, 1, 134, 169, 121, 2, 173, 178, 8, 220, 195, 139, 164, 57, + 70, 190, 236, 85, 49, 121, 231, 19, 92, 160, 11, 213, 170, 60, 102, 77, 126, 51, 218, 204, 109, 46, 118, + 157, 7, 189, 21, 2, 238, 133, 216, 101, 117, 83, 92, 104, 52, 183, 220, 213, 72, 154, 225, 160, 76, 18, + 28, 35, 157, 19, 2, 32, 91, 228, 250, 11, 187, 81, 96, 57, 47, 156, 51, 43, 12, 193, 26, 26, 162, + 197, 153, 67, 100, 112, 132, 88, 117, 218, 122, 183, 197, 98, 104, 235, 21, 114, 128, 238, 15, 229, 244, 52, + 78, 122, 180, 115, 13, 54, 80, 160, 14, 92, 199, 138, 95, 98, 58, 148, 173, 72, 3, 86, 56, 218, 244, + 63, 60, 26, 233, 129, 6, 54, 153, 109, 54, 18, 184, 207, 232, 63, 157, 200, 22, 163, 24, 18, 161, 206, + 143, 1, 154, 52, 12, 212, 238, 26, 138, 7, 211, 112, 147, 138, 47, 54, 211, 169, 210, 43, 146, 34, 9, + 123, 130, 206, 161, 211, 221, 101, 81, 103, 220, 250, 182, 34, 160, 222, 134, 2, 113, 123, 70, 213, 82, 75, + 42, 228, 184, 49, 174, 60, 223, 243, 28, 66, 176, 10, 92, 195, 227, 124, 249, 105, 53, 93, 153, 173, 164, + 244, 162, 59, 242, 251, 93, 11, 199, 50, 165, 18, 49, 82, 19, 223, 113, 100, 92, 185, 108, 222, 93, 9, + 189, 161, 254, 61, 235, 125, 52, 149, 173, 42, 25, 193, 65, 30, 154, 217, 164, 170, 158, 149, 216, 43, 88, + 179, 16, 254, 220, 119, 70, 243, 40, 221, 129, 124, 110, 187, 251, 149, 56, 142, 216, 44, 248, 75, 113, 112, + 106, 225, 150, 207, 137, 15, 88, 33, 30, 234, 252, 106, 92, 166, 103, 113, 89, 187, 199, 191, 167, 152, 181, + 140, 86, 118, 242, 118, 97, 182, 227, 129, 62, 106, 129, 126, 173, 100, 253, 47, 140, 204, 148, 134, 31, 179, + 75, 238, 188, 227, 217, 212, 176, 127, 251, 69, 68, 220, 187, 230, 25, 35, 175, 233, 15, 132, 128, 85, 192, + 94, 92, 191, 222, 151, 153, 243, 113, 96, 70, 43, 178, 223, 172, 147, 27, 135, 198, 72, 143, 144, 251, 47, + 249, 251, 214, 243, 36, 148, 241, 151, 47, 88, 5, 115, 164, 87, 69, 215, 226, 51, 238, 222, 243, 244, 193, + 84, 238, 41, 254, 55, 36, 196, 6, 65, 148, 14, 232, 237, 47, 216, 233, 129, 88, 245, 157, 2, 55, 181, + 121, 32, 6, 99, 145, 209, 239, 131, 16, 237, 50, 33, 241, 103, 143, 135, 10, 36, 34, 164, 87, 191, 138, + 115, 92, 158, 170, 206, 202, 79, 47, 226, 129, 0, 67, 39, 72, 121, 42, 133, 155, 16, 149, 31, 112, 197, + 169, 59, 218, 68, 39, 199, 84, 35, 147, 17, 76, 208, 33, 37, 167, 167, 92, 17, 67, 210, 251, 216, 150, + 108, 252, 4, 108, 184, 124, 179, 135, 62, 140, 177, 1, 1, 25, 239, 73, 253, 83, 18, 70, 63, 48, 10, + 153, 137, 241, 19, 210, 65, 86, 65, 217, 190, 104, 55, 144, 98, 169, 55, 223, 82, 230, 158, 211, 47, 213, + 228, 111, 66, 7, 104, 91, 222, 242, 10, 198, 112, 18, 194, 145, 3, 123, 16, 195, 76, 100, 17, 7, 176, + 105, 178, 158, 92, 186, 159, 186, 217, 225, 36, 134, 156, 16, 9, 142, 147, 76, 210, 120, 162, 106, 67, 218, + 11, 134, 43, 174, 214, 173, 138, 210, 93, 129, 70, 238, 162, 87, 27, 137, 189, 204, 106, 231, 61, 40, 49, + 44, 143, 10, 75, 7, 225, 167, 150, 126, 251, 224, 171, 99, 161, 98, 21, 96, 224, 179, 38, 172, 188, 180, + 40, 196, 63, 76, 119, 76, 128, 56, 76, 122, 119, 97, 149, 16, 179, 116, 246, 147, 165, 157, 177, 86, 237, + 130, 95, 139, 30, 196, 37, 128, 53, 184, 248, 97, 4, 146, 74, 251, 56, 220, 118, 79, 154, 186, 223, 33, + 130, 179, 88, 102, 178, 149, 203, 119, 160, 220, 129, 137, 37, 215, 43, 24, 228, 62, 245, 90, 187, 98, 89, + 188, 72, 91, 64, 106, 9, 144, 32, 14, 212, 143, 124, 246, 236, 169, 5, 3, 251, 224, 122, 135, 202, 186, + 42, 116, 188, 86, 36, 0, 240, 244, 23, 27, 209, 229, 235, 224, 25, 215, 119, 131, 226, 44, 197, 132, 99, + 26, 48, 38, 217, 16, 231, 32, 223, 239, 103, 74, 29, 198, 90, 159, 191, 103, 186, 116, 217, 191, 92, 74, + 222, 158, 217, 133, 46, 0, 253, 182, 166, 187, 205, 216, 168, 48, 157, 69, 189, 135, 145, 152, 118, 181, 43, + 118, 184, 175, 140, 94, 253, 16, 95, 11, 210, 182, 48, 112, 62, 101, 167, 169, 73, 105, 240, 61, 203, 74, + 25, 212, 169, 78, 76, 113, 8, 201, 183, 80, 207, 63, 164, 100, 116, 236, 221, 193, 85, 117, 36, 75, 179, + 67, 28, 220, 219, 24, 97, 119, 102, 68, 100, 119, 74, 152, 85, 101, 230, 254, 105, 162, 6, 174, 186, 37, + 233, 161, 121, 8, 126, 24, 235, 243, 55, 229, 111, 51, 129, 201, 148, 57, 128, 6, 53, 58, 28, 243, 197, + 221, 130, 232, 6, 22, 248, 203, 64, 183, 248, 212, 87, 72, 82, 200, 201, 87, 77, 51, 164, 149, 179, 51, + 99, 243, 9, 94, 208, 64, 247, 84, 124, 191, 131, 148, 196, 151, 187, 72, 151, 116, 38, 164, 174, 188, 160, + 209, 18, 61, 183, 200, 100, 226, 181, 119, 71, 87, 170, 242, 33, 35, 52, 115, 63, 137, 57, 4, 20, 127, + 250, 246, 53, 185, 202, 59, 16, 229, 159, 106, 208, 6, 8, 33, 182, 151, 36, 222, 243, 113, 20, 150, 103, + 175, 121, 54, 31, 156, 16, 205, 25, 167, 254, 139, 148, 85, 228, 90, 165, 198, 110, 80, 61, 95, 83, 136, + 188, 176, 128, 22, 118, 138, 30, 180, 252, 167, 238, 86, 147, 218, 169, 171, 45, 20, 187, 67, 239, 5, 113, + 107, 79, 182, 252, 49, 244, 175, 169, 37, 102, 178, 5, 80, 130, 5, 202, 106, 202, 40, 66, 68, 72, 214, + 189, 46, 223, 100, 70, 132, 160, 69, 61, 154, 116, 140, 177, 173, 246, 40, 159, 212, 93, 169, 79, 231, 55, + 129, 221, 81, 204, 96, 3, 61, 6, 211, 49, 236, 85, 142, 157, 105, 70, 138, 148, 38, 53, 187, 73, 60, + 212, 54, 19, 91, 113, 66, 242, 33, 60, 55, 243, 76, 219, 210, 147, 191, 88, 146, 117, 250, 27, 212, 219, + 192, 53, 70, 104, 127, 111, 217, 22, 131, 176, 45, 152, 90, 188, 120, 150, 57, 99, 71, 172, 51, 22, 159, + 164, 73, 229, 203, 116, 250, 56, 129, 24, 75, 191, 24, 209, 179, 217, 208, 72, 40, 129, 75, 225, 229, 166, + 182, 182, 185, 129, 173, 79, 200, 54, 152, 135, 5, 32, 157, 58, 89, 68, 172, 164, 101, 43, 36, 134, 126, + 144, 161, 90, 197, 248, 143, 18, 144, 148, 220, 33, 126, 182, 222, 124, 121, 218, 121, 129, 36, 190, 141, 49, + 166, 13, 95, 223, 69, 22, 171, 48, 18, 83, 211, 194, 199, 58, 54, 108, 168, 75, 235, 19, 176, 98, 32, + 51, 123, 41, 195, 197, 48, 196, 211, 249, 151, 136, 175, 221, 184, 121, 85, 64, 235, 10, 202, 166, 51, 203, + 229, 215, 189, 179, 108, 114, 207, 191, 164, 114, 114, 20, 174, 200, 138, 247, 104, 52, 47, 199, 45, 211, 126, + 219, 103, 93, 36, 112, 130, 73, 142, 177, 225, 110, 175, 113, 220, 151, 181, 146, 55, 48, 127, 237, 148, 113, + 80, 69, 51, 52, 232, 118, 247, 97, 110, 93, 180, 98, 214, 126, 0, 127, 75, 160, 153, 248, 246, 85, 57, + 73, 127, 38, 99, 2, 141, 151, 29, 120, 147, 158, 155, 240, 182, 101, 228, 23, 242, 96, 155, 29, 210, 181, + 33, 108, 175, 165, 146, 34, 131, 165, 196, 150, 11, 187, 151, 223, 35, 187, 179, 80, 118, 18, 64, 153, 78, + 150, 84, 94, 145, 165, 62, 84, 165, 223, 97, 85, 53, 70, 19, 246, 149, 163, 88, 213, 249, 191, 160, 97, + 2, 57, 98, 103, 143, 174, 219, 245, 147, 41, 101, 249, 119, 199, 20, 42, 94, 11, 176, 136, 208, 253, 212, + 65, 102, 208, 82, 89, 9, 34, 60, 88, 105, 52, 146, 0, 138, 156, 141, 57, 73, 234, 39, 250, 96, 207, + 56, 56, 148, 67, 160, 48, 254, 250, 28, 105, 37, 232, 27, 8, 202, 3, 111, 194, 126, 15, 79, 197, 135, + 24, 43, 104, 93, 32, 22, 254, 146, 245, 159, 56, 99, 32, 199, 28, 142, 115, 176, 16, 75, 10, 151, 93, + 124, 193, 170, 125, 193, 198, 47, 122, 108, 53, 209, 137, 74, 141, 145, 165, 196, 47, 61, 161, 177, 192, 21, + 20, 144, 98, 215, 26, 111, 28, 140, 234, 117, 155, 164, 238, 124, 86, 180, 200, 228, 208, 97, 173, 52, 149, + 254, 204, 38, 164, 219, 112, 222, 190, 238, 69, 20, 229, 226, 129, 54, 39, 249, 53, 168, 173, 102, 68, 110, + 155, 203, 33, 51, 251, 136, 162, 160, 247, 187, 212, 41, 57, 15, 30, 204, 182, 156, 16, 117, 209, 140, 43, + 250, 124, 170, 20, 82, 61, 224, 132, 112, 199, 246, 116, 197, 171, 220, 50, 250, 83, 159, 244, 141, 204, 42, + 213, 134, 233, 194, 170, 111, 242, 159, 133, 120, 55, 5, 222, 130, 103, 77, 73, 124, 116, 146, 183, 233, 207, + 245, 25, 193, 220, 249, 89, 202, 160, 225, 123, 243, 111, 126, 227, 108, 118, 115, 33, 145, 13, 245, 226, 78, + 254, 4, 65, 58, 38, 67, 229, 246, 222, 155, 62, 46, 151, 111, 238, 213, 132, 214, 101, 22, 63, 121, 177, + 22, 89, 197, 235, 89, 36, 147, 135, 205, 183, 211, 46, 16, 152, 212, 133, 163, 36, 119, 108, 120, 33, 113, + 14, 103, 53, 113, 166, 229, 94, 221, 221, 135, 166, 222, 182, 129, 133, 127, 165, 68, 51, 111, 182, 253, 68, + 19, 172, 218, 168, 132, 203, 95, 2, 245, 203, 214, 197, 37, 63, 199, 127, 32, 89, 150, 62, 214, 89, 161, + 59, 190, 42, 203, 42, 32, 227, 39, 199, 207, 138, 181, 53, 155, 229, 118, 133, 45, 144, 155, 218, 201, 86, + 212, 103, 22, 111, 198, 236, 27, 210, 173, 64, 173, 92, 20, 193, 21, 25, 164, 72, 242, 95, 30, 94, 79, + 195, 217, 88, 55, 24, 221, 48, 149, 110, 224, 158, 30, 212, 223, 88, 93, 2, 78, 149, 109, 146, 103, 101, + 175, 118, 11, 59, 141, 76, 235, 38, 138, 25, 249, 65, 187, 119, 240, 185, 175, 71, 110, 202, 116, 40, 217, + 182, 60, 191, 74, 127, 50, 131, 65, 74, 4, 209, 170, 44, 246, 246, 139, 235, 61, 243, 71, 59, 179, 213, + 62, 203, 194, 150, 106, 45, 177, 91, 30, 63, 202, 6, 231, 216, 81, 57, 79, 21, 224, 185, 57, 151, 209, + 206, 46, 218, 15, 2, 21, 30, 104, 242, 87, 98, 239, 167, 112, 235, 20, 152, 236, 216, 153, 196, 48, 211, + 248, 46, 169, 63, 95, 253, 87, 198, 158, 62, 115, 231, 155, 113, 54, 216, 225, 114, 23, 180, 222, 80, 124, + 109, 104, 184, 219, 73, 226, 37, 10, 15, 90, 162, 153, 60, 217, 104, 31, 31, 104, 207, 40, 26, 185, 238, + 216, 112, 160, 245, 51, 177, 6, 94, 75, 59, 251, 107, 182, 120, 213, 98, 90, 136, 161, 230, 81, 223, 216, + 102, 153, 47, 136, 249, 197, 38, 189, 100, 9, 99, 101, 72, 2, 246, 180, 63, 123, 82, 52, 203, 53, 34, + 34, 7, 201, 153, 6, 92, 148, 193, 186, 183, 137, 145, 193, 194, 199, 154, 204, 251, 163, 16, 150, 136, 139, + 206, 160, 137, 207, 42, 249, 117, 35, 58, 190, 34, 252, 26, 254, 90, 235, 166, 94, 143, 104, 79, 53, 169, + 40, 135, 161, 18, 164, 99, 130, 192, 75, 46, 168, 225, 47, 11, 150, 120, 230, 235, 90, 100, 237, 164, 126, + 69, 122, 95, 29, 202, 121, 245, 75, 191, 1, 95, 143, 57, 139, 142, 186, 12, 57, 227, 250, 33, 13, 214, + 243, 138, 80, 141, 23, 127, 154, 69, 108, 72, 51, 86, 77, 247, 128, 115, 225, 204, 211, 136, 11, 66, 9, + 241, 171, 151, 35, 122, 103, 36, 96, 171, 44, 161, 51, 135, 179, 218, 45, 170, 3, 161, 242, 133, 90, 240, + 11, 240, 58, 194, 3, 82, 87, 110, 132, 226, 87, 95, 44, 11, 239, 130, 17, 175, 16, 220, 61, 153, 110, + 160, 191, 36, 15, 28, 137, 21, 180, 109, 168, 26, 57, 251, 111, 26, 36, 132, 84, 92, 250, 167, 222, 162, + 246, 209, 195, 204, 201, 167, 240, 57, 61, 6, 73, 64, 171, 155, 45, 234, 86, 226, 3, 127, 191, 224, 97, + 247, 176, 13, 199, 200, 218, 148, 163, 188, 170, 163, 36, 157, 177, 87, 44, 228, 6, 79, 113, 38, 194, 190, + 105, 189, 241, 146, 75, 144, 87, 208, 85, 171, 55, 2, 151, 28, 85, 99, 157, 51, 37, 127, 197, 95, 99, + 93, 34, 18, 251, 241, 238, 34, 46, 59, 91, 230, 8, 227, 133, 57, 245, 132, 88, 210, 183, 156, 81, 180, + 236, 141, 0, 21, 185, 247, 86, 243, 245, 6, 52, 50, 14, 106, 206, 43, 2, 80, 72, 227, 154, 85, 224, + 84, 228, 11, 162, 232, 160, 212, 56, 108, 17, 174, 165, 233, 208, 238, 213, 68, 154, 142, 179, 105, 188, 75, + 33, 151, 143, 105, 61, 10, 110, 98, 119, 113, 98, 58, 236, 39, 19, 30, 252, 190, 108, 202, 70, 38, 139, + 26, 175, 55, 160, 47, 10, 201, 133, 176, 212, 236, 76, 236, 84, 217, 101, 73, 18, 85, 84, 6, 82, 140, + 175, 230, 28, 231, 89, 175, 236, 68, 156, 199, 125, 65, 95, 0, 120, 157, 169, 132, 208, 53, 103, 201, 247, + 36, 205, 246, 216, 178, 132, 50, 247, 157, 219, 118, 131, 9, 32, 237, 68, 229, 246, 146, 193, 48, 128, 242, + 157, 150, 101, 195, 83, 167, 94, 212, 30, 78, 15, 93, 37, 203, 191, 164, 142, 181, 241, 34, 142, 229, 93, + 40, 183, 203, 213, 93, 235, 38, 154, 175, 68, 248, 237, 194, 138, 205, 64, 222, 196, 187, 234, 122, 96, 204, + 18, 101, 19, 179, 140, 227, 180, 182, 33, 153, 61, 175, 104, 129, 252, 136, 112, 241, 113, 233, 32, 174, 82, + 171, 202, 218, 97, 1, 96, 45, 181, 11, 89, 192, 189, 234, 198, 145, 146, 215, 82, 8, 99, 32, 48, 47, + 138, 156, 107, 136, 123, 196, 160, 113, 197, 166, 119, 254, 250, 119, 182, 134, 169, 79, 136, 24, 86, 0, 29, + 55, 241, 84, 42, 127, 64, 138, 208, 101, 146, 167, 102, 78, 7, 70, 102, 127, 152, 215, 177, 57, 211, 108, + 235, 152, 241, 202, 210, 13, 38, 83, 94, 186, 206, 80, 11, 13, 220, 19, 152, 219, 181, 47, 233, 217, 188, + 49, 95, 225, 17, 59, 163, 156, 3, 77, 98, 204, 50, 239, 133, 94, 60, 155, 50, 134, 94, 106, 70, 186, + 226, 36, 215, 35, 243, 69, 191, 179, 154, 146, 75, 24, 32, 80, 154, 123, 225, 59, 66, 31, 235, 73, 25, + 89, 40, 123, 191, 151, 243, 8, 209, 185, 203, 200, 252, 39, 98, 236, 185, 67, 65, 124, 78, 11, 235, 191, + 227, 150, 45, 20, 111, 23, 159, 16, 249, 89, 10, 3, 102, 127, 24, 53, 245, 57, 33, 74, 10, 177, 145, + 29, 29, 84, 248, 77, 169, 15, 5, 180, 114, 9, 205, 10, 213, 137, 183, 189, 175, 40, 66, 138, 10, 228, + 170, 149, 63, 202, 111, 238, 53, 169, 125, 196, 127, 251, 87, 226, 217, 51, 55, 107, 245, 172, 190, 202, 139, + 217, 202, 28, 91, 110, 47, 136, 111, 138, 231, 54, 84, 198, 248, 176, 11, 40, 84, 166, 97, 45, 15, 223, + 222, 137, 182, 243, 108, 28, 44, 188, 232, 196, 191, 43, 149, 34, 202, 50, 85, 88, 85, 32, 200, 212, 76, + 92, 92, 177, 50, 41, 70, 37, 144, 226, 32, 208, 187, 112, 196, 205, 182, 36, 162, 47, 43, 16, 206, 211, + 227, 182, 228, 178, 92, 239, 83, 57, 132, 244, 87, 247, 213, 37, 111, 16, 21, 71, 2, 139, 13, 183, 108, + 156, 124, 249, 248, 107, 114, 162, 32, 162, 193, 253, 23, 78, 163, 123, 250, 204, 44, 130, 199, 97, 97, 199, + 103, 118, 29, 124, 198, 164, 41, 163, 134, 181, 200, 143, 95, 150, 183, 130, 224, 26, 101, 243, 22, 3, 50, + 193, 233, 105, 251, 90, 208, 171, 100, 79, 238, 107, 65, 254, 126, 159, 25, 23, 174, 198, 71, 55, 163, 71, + 40, 224, 245, 227, 185, 12, 236, 76, 98, 104, 204, 105, 230, 10, 81, 69, 65, 245, 53, 144, 149, 134, 77, + 139, 246, 126, 29, 47, 48, 86, 144, 84, 190, 249, 183, 115, 139, 178, 19, 136, 106, 225, 227, 189, 223, 185, + 79, 199, 246, 251, 9, 114, 158, 219, 204, 198, 97, 171, 69, 39, 64, 2, 250, 76, 196, 47, 238, 9, 240, + 8, 140, 168, 20, 161, 210, 117, 180, 190, 90, 195, 29, 176, 162, 247, 79, 194, 141, 120, 36, 137, 210, 250, + 57, 153, 184, 247, 58, 57, 46, 184, 61, 203, 170, 172, 63, 206, 219, 125, 33, 102, 16, 179, 118, 198, 7, + 115, 215, 226, 226, 1, 123, 91, 59, 231, 238, 48, 98, 248, 121, 17, 89, 29, 228, 129, 182, 197, 165, 95, + 34, 174, 44, 70, 40, 13, 168, 160, 96, 22, 120, 230, 176, 206, 206, 48, 41, 26, 78, 84, 16, 203, 205, + 197, 229, 70, 126, 143, 152, 206, 162, 115, 159, 19, 40, 17, 231, 253, 53, 208, 121, 201, 142, 230, 21, 8, + 212, 105, 205, 221, 151, 28, 122, 160, 98, 162, 209, 25, 25, 39, 170, 113, 92, 160, 217, 196, 71, 53, 32, + 82, 218, 33, 200, 186, 39, 68, 93, 144, 223, 228, 99, 113, 168, 94, 104, 195, 60, 136, 13, 193, 210, 109, + 101, 103, 156, 250, 214, 6, 204, 167, 109, 135, 245, 90, 118, 101, 236, 208, 244, 0, 47, 3, 142, 198, 247, + 15, 9, 97, 206, 117, 65, 98, 121, 129, 243, 251, 3, 24, 252, 207, 89, 180, 21, 139, 88, 237, 227, 128, + 10, 156, 105, 112, 192, 99, 108, 22, 134, 112, 1, 153, 18, 239, 127, 30, 73, 85, 151, 192, 108, 51, 250, + 56, 110, 127, 92, 106, 48, 186, 203, 123, 23, 135, 128, 104, 180, 114, 60, 253, 125, 184, 27, 235, 62, 144, + 216, 76, 54, 44, 212, 4, 2, 234, 208, 103, 212, 67, 169, 51, 104, 215, 198, 47, 133, 80, 11, 67, 209, + 63, 0, 122, 243, 127, 43, 39, 142, 227, 108, 233, 61, 233, 69, 151, 39, 162, 1, 145, 61, 105, 21, 221, + 53, 247, 57, 3, 108, 40, 21, 54, 22, 95, 59, 5, 225, 82, 105, 35, 67, 154, 102, 254, 23, 208, 141, + 109, 174, 218, 169, 241, 71, 77, 160, 12, 65, 145, 118, 96, 233, 165, 37, 51, 1, 106, 238, 244, 226, 206, + 72, 98, 116, 151, 145, 202, 199, 240, 204, 177, 79, 164, 142, 100, 61, 145, 34, 80, 21, 180, 156, 119, 35, + 242, 81, 97, 18, 239, 27, 3, 249, 158, 70, 216, 175, 248, 93, 79, 175, 241, 207, 100, 251, 239, 178, 12, + 80, 77, 227, 178, 5, 111, 19, 57, 10, 30, 63, 30, 69, 203, 179, 170, 236, 168, 114, 26, 34, 189, 9, + 177, 5, 118, 6, 94, 176, 180, 227, 36, 32, 194, 232, 17, 167, 132, 100, 3, 43, 156, 227, 242, 137, 63, + 72, 3, 4, 173, 230, 166, 174, 197, 88, 34, 192, 130, 246, 98, 56, 188, 145, 189, 188, 145, 145, 99, 192, + 64, 68, 13, 218, 134, 53, 212, 137, 61, 7, 29, 97, 171, 26, 170, 28, 92, 51, 115, 126, 35, 189, 147, + 75, 100, 149, 11, 47, 47, 61, 182, 103, 145, 123, 78, 31, 102, 22, 139, 181, 253, 31, 138, 44, 120, 71, + 41, 79, 248, 227, 219, 157, 132, 13, 112, 152, 238, 43, 231, 118, 180, 254, 167, 75, 20, 44, 219, 191, 48, + 196, 112, 53, 35, 207, 120, 51, 66, 217, 195, 148, 177, 173, 172, 157, 225, 23, 213, 166, 131, 209, 61, 96, + 197, 164, 87, 78, 212, 40, 152, 89, 7, 78, 239, 130, 177, 124, 100, 14, 239, 53, 237, 121, 132, 207, 205, + 226, 46, 246, 139, 44, 241, 249, 142, 94, 52, 138, 90, 197, 204, 79, 240, 8, 62, 78, 97, 187, 88, 50, + 235, 149, 35, 197, 149, 87, 94, 103, 56, 124, 136, 4, 187, 33, 163, 240, 3, 73, 142, 53, 60, 227, 82, + 144, 41, 158, 180, 24, 207, 97, 155, 185, 235, 97, 209, 105, 79, 125, 2, 6, 86, 6, 8, 168, 247, 175, + 66, 53, 197, 231, 130, 168, 79, 159, 117, 69, 80, 82, 221, 56, 175, 137, 6, 73, 115, 122, 211, 160, 133, + 41, 24, 207, 38, 191, 2, 150, 129, 156, 121, 227, 17, 166, 66, 223, 29, 142, 83, 24, 106, 206, 136, 158, + 232, 248, 237, 120, 21, 48, 58, 126, 28, 162, 137, 74, 205, 207, 159, 112, 46, 3, 45, 252, 153, 31, 177, + 129, 186, 157, 66, 18, 67, 185, 165, 0, 195, 107, 7, 237, 202, 238, 238, 5, 72, 238, 42, 181, 145, 187, + 187, 227, 67, 163, 199, 2, 94, 155, 95, 67, 239, 160, 135, 139, 10, 136, 222, 130, 182, 139, 51, 164, 129, + 15, 224, 43, 113, 56, 198, 63, 205, 170, 181, 68, 90, 215, 175, 37, 220, 185, 94, 99, 91, 211, 181, 247, + 5, 231, 131, 64, 28, 196, 65, 136, 214, 193, 213, 148, 18, 44, 76, 242, 173, 174, 150, 169, 221, 16, 65, + 94, 28, 225, 201, 118, 104, 115, 213, 112, 46, 184, 35, 186, 24, 198, 169, 91, 102, 65, 210, 120, 26, 148, + 142, 58, 138, 61, 211, 190, 42, 14, 37, 41, 21, 29, 148, 24, 59, 192, 21, 196, 33, 110, 116, 11, 109, + 196, 141, 83, 131, 48, 183, 200, 31, 70, 228, 158, 79, 9, 222, 239, 125, 248, 143, 29, 183, 235, 219, 237, + 252, 21, 84, 14, 105, 199, 84, 217, 68, 141, 204, 251, 5, 0, 70, 57, 200, 181, 98, 162, 185, 130, 151, + 192, 58, 116, 71, 149, 82, 76, 232, 66, 32, 127, 6, 62, 132, 132, 131, 240, 39, 113, 119, 87, 38, 238, + 79, 17, 208, 126, 174, 79, 103, 253, 189, 236, 178, 232, 68, 123, 226, 2, 130, 154, 191, 72, 90, 236, 186, + 235, 149, 46, 155, 94, 203, 96, 129, 61, 8, 57, 123, 37, 206, 240, 72, 59, 64, 43, 87, 165, 216, 176, + 161, 226, 232, 48, 101, 146, 155, 146, 227, 54, 209, 210, 243, 136, 233, 124, 209, 189, 35, 34, 163, 9, 205, + 68, 250, 166, 172, 91, 143, 247, 127, 97, 45, 122, 119, 97, 102, 77, 192, 4, 133, 192, 116, 128, 14, 90, + 22, 161, 42, 106, 20, 53, 224, 201, 84, 178, 107, 49, 54, 33, 44, 108, 101, 111, 124, 84, 110, 218, 54, + 211, 223, 243, 70, 95, 249, 186, 236, 122, 60, 151, 159, 122, 178, 3, 68, 132, 61, 136, 250, 41, 150, 82, + 99, 87, 164, 113, 36, 143, 3, 28, 123, 147, 80, 15, 218, 90, 68, 235, 28, 69, 19, 223, 68, 154, 78, + 52, 95, 164, 242, 60, 65, 28, 63, 14, 220, 194, 155, 186, 221, 97, 248, 9, 244, 251, 238, 137, 204, 137, + 254, 127, 97, 65, 55, 135, 24, 32, 59, 115, 12, 20, 57, 232, 166, 26, 32, 237, 56, 194, 195, 57, 104, + 242, 153, 240, 212, 226, 37, 170, 93, 125, 74, 28, 106, 104, 125, 157, 250, 229, 157, 47, 219, 245, 122, 54, + 2, 238, 75, 53, 37, 160, 60, 47, 192, 97, 251, 4, 148, 142, 252, 108, 137, 43, 8, 227, 27, 122, 51, + 99, 39, 156, 195, 233, 232, 219, 5, 127, 104, 198, 84, 214, 69, 241, 81, 146, 150, 254, 137, 123, 193, 105, + 131, 81, 16, 106, 88, 10, 51, 143, 249, 37, 233, 165, 28, 133, 208, 100, 207, 212, 228, 236, 118, 103, 246, + 156, 220, 226, 199, 17, 163, 190, 122, 20, 71, 29, 105, 155, 164, 143, 129, 190, 49, 106, 249, 87, 245, 107, + 24, 140, 21, 248, 49, 57, 24, 86, 203, 83, 195, 36, 202, 78, 2, 9, 129, 244, 206, 5, 83, 151, 129, + 179, 45, 103, 129, 164, 10, 75, 52, 127, 224, 168, 211, 218, 185, 90, 167, 115, 49, 74, 129, 184, 243, 30, + 163, 223, 191, 199, 97, 254, 243, 180, 27, 91, 16, 26, 24, 172, 21, 98, 53, 0, 190, 180, 236, 33, 187, + 56, 244, 157, 90, 196, 173, 7, 25, 201, 0, 133, 214, 51, 84, 79, 154, 170, 245, 166, 114, 82, 235, 229, + 31, 3, 127, 216, 134, 7, 197, 86, 216, 211, 198, 226, 62, 153, 32, 216, 243, 217, 95, 185, 179, 51, 80, + 53, 180, 125, 161, 45, 169, 219, 219, 160, 253, 68, 199, 19, 34, 152, 112, 136, 252, 10, 133, 105, 155, 99, + 244, 125, 216, 67, 134, 241, 106, 241, 47, 238, 186, 214, 89, 125, 147, 142, 168, 134, 32, 137, 44, 143, 130, + 93, 120, 21, 207, 120, 139, 100, 94, 133, 248, 85, 150, 24, 183, 194, 164, 149, 46, 2, 206, 203, 60, 160, + 232, 48, 112, 200, 215, 108, 201, 129, 33, 122, 175, 161, 155, 187, 121, 136, 229, 149, 15, 100, 196, 238, 65, + 157, 155, 148, 247, 31, 208, 107, 108, 9, 107, 107, 184, 248, 152, 90, 238, 233, 135, 6, 103, 243, 164, 50, + 3, 45, 193, 36, 244, 242, 90, 63, 152, 127, 54, 212, 71, 16, 247, 185, 196, 52, 145, 80, 162, 171, 197, + 213, 40, 31, 252, 76, 246, 124, 63, 163, 107, 52, 115, 134, 124, 23, 107, 142, 133, 216, 67, 195, 103, 246, + 81, 209, 150, 25, 246, 209, 64, 211, 237, 251, 96, 150, 120, 77, 232, 112, 93, 106, 184, 136, 230, 3, 74, + 4, 78, 197, 209, 54, 246, 113, 186, 140, 71, 130, 253, 122, 55, 233, 154, 243, 41, 211, 191, 170, 111, 12, + 179, 188, 148, 104, 233, 191, 212, 94, 66, 187, 33, 160, 232, 75, 124, 121, 125, 14, 67, 126, 129, 145, 64, + 241, 5, 20, 128, 85, 18, 201, 41, 87, 242, 228, 89, 212, 125, 55, 224, 96, 150, 79, 86, 242, 46, 69, + 26, 145, 162, 253, 145, 101, 163, 138, 248, 138, 71, 62, 178, 45, 14, 14, 53, 110, 42, 15, 50, 111, 6, + 171, 40, 73, 215, 207, 119, 19, 208, 145, 59, 236, 6, 164, 149, 128, 214, 244, 45, 72, 147, 145, 234, 138, + 249, 144, 22, 106, 42, 162, 181, 252, 8, 160, 214, 17, 235, 180, 123, 213, 162, 138, 245, 131, 110, 203, 36, + 38, 205, 1, 157, 56, 245, 83, 41, 116, 4, 158, 15, 89, 35, 207, 87, 203, 120, 162, 48, 138, 143, 91, + 253, 58, 177, 10, 18, 232, 142, 13, 152, 106, 67, 189, 170, 21, 178, 185, 82, 239, 178, 43, 116, 101, 149, + 8, 140, 32, 25, 202, 147, 122, 12, 122, 132, 113, 65, 253, 251, 194, 9, 184, 130, 15, 42, 108, 60, 85, + 127, 5, 169, 58, 73, 58, 231, 27, 19, 157, 239, 239, 121, 116, 33, 39, 170, 225, 82, 176, 17, 84, 66, + 245, 90, 196, 91, 139, 79, 67, 52, 27, 55, 25, 71, 131, 77, 68, 134, 71, 236, 122, 120, 139, 150, 219, + 120, 139, 172, 152, 4, 235, 154, 109, 39, 81, 128, 247, 197, 28, 24, 155, 173, 223, 179, 59, 164, 110, 101, + 93, 57, 207, 119, 185, 57, 5, 91, 104, 84, 248, 185, 227, 243, 50, 214, 146, 115, 8, 62, 131, 105, 84, + 55, 237, 13, 229, 247, 230, 44, 125, 249, 17, 29, 140, 212, 241, 122, 46, 220, 185, 241, 42, 104, 218, 158, + 234, 230, 244, 141, 161, 132, 179, 89, 102, 4, 183, 32, 36, 91, 244, 105, 36, 185, 154, 167, 165, 11, 24, + 123, 30, 136, 242, 151, 57, 77, 86, 1, 203, 215, 187, 58, 224, 100, 245, 189, 155, 50, 212, 24, 85, 217, + 110, 69, 251, 48, 124, 194, 106, 158, 169, 245, 119, 71, 42, 120, 78, 128, 184, 116, 230, 34, 22, 21, 166, + 34, 229, 212, 167, 159, 12, 173, 82, 244, 156, 135, 239, 51, 78, 194, 169, 219, 32, 9, 75, 234, 147, 252, + 51, 214, 75, 182, 193, 45, 150, 30, 27, 20, 51, 66, 20, 212, 204, 232, 174, 87, 87, 154, 48, 199, 26, + 23, 205, 62, 86, 3, 230, 124, 215, 150, 244, 215, 150, 243, 178, 71, 216, 156, 20, 233, 230, 138, 87, 144, + 20, 83, 220, 104, 198, 162, 164, 219, 179, 245, 79, 37, 158, 147, 154, 110, 228, 122, 65, 227, 187, 114, 176, + 21, 36, 77, 232, 137, 71, 24, 108, 70, 193, 54, 113, 121, 245, 157, 64, 202, 41, 216, 151, 9, 96, 198, + 172, 14, 204, 193, 83, 108, 86, 154, 225, 215, 114, 169, 117, 20, 73, 82, 18, 37, 225, 102, 214, 248, 222, + 91, 177, 205, 253, 58, 244, 212, 78, 182, 245, 168, 81, 253, 187, 71, 6, 132, 135, 179, 208, 108, 231, 126, + 18, 195, 65, 211, 134, 16, 104, 196, 109, 173, 86, 150, 94, 116, 76, 180, 53, 75, 160, 223, 103, 250, 216, + 51, 31, 218, 153, 163, 184, 223, 252, 140, 98, 101, 187, 10, 99, 229, 82, 80, 253, 226, 79, 46, 211, 140, + 233, 239, 68, 204, 24, 22, 38, 180, 230, 158, 9, 37, 5, 116, 185, 139, 141, 17, 159, 216, 1, 144, 96, + 5, 131, 201, 37, 223, 108, 107, 107, 238, 8, 83, 4, 230, 244, 86, 219, 99, 112, 243, 82, 225, 209, 91, + 10, 224, 52, 90, 246, 21, 178, 97, 69, 72, 138, 227, 172, 109, 238, 161, 238, 79, 6, 35, 143, 96, 37, + 134, 86, 89, 193, 21, 147, 198, 230, 176, 30, 172, 245, 0, 240, 45, 201, 9, 21, 70, 60, 40, 254, 64, + 206, 96, 159, 18, 154, 184, 107, 51, 238, 57, 170, 92, 165, 84, 36, 239, 27, 47, 31, 140, 159, 53, 105, + 213, 34, 43, 5, 124, 251, 110, 60, 231, 106, 99, 7, 220, 173, 20, 194, 28, 12, 100, 196, 194, 157, 126, + 87, 47, 219, 220, 134, 10, 200, 189, 216, 174, 50, 106, 61, 247, 178, 81, 127, 51, 66, 52, 58, 189, 130, + 212, 188, 61, 71, 102, 127, 94, 157, 56, 161, 143, 188, 109, 119, 246, 132, 131, 99, 60, 88, 190, 202, 237, + 203, 253, 167, 16, 54, 224, 187, 239, 115, 94, 32, 166, 152, 8, 70, 10, 55, 92, 234, 186, 173, 3, 165, + 199, 175, 183, 239, 51, 221, 232, 161, 24, 94, 158, 130, 250, 112, 207, 75, 128, 99, 157, 27, 181, 57, 0, + 157, 31, 97, 83, 128, 173, 40, 33, 190, 168, 138, 210, 87, 51, 219, 218, 64, 72, 241, 126, 153, 225, 69, + 119, 151, 214, 127, 252, 63, 161, 100, 83, 192, 218, 176, 136, 87, 69, 223, 109, 104, 38, 79, 37, 245, 172, + 88, 196, 78, 83, 73, 153, 93, 203, 171, 169, 113, 112, 137, 185, 137, 74, 244, 37, 50, 20, 229, 136, 50, + 10, 54, 73, 78, 167, 247, 127, 160, 9, 74, 248, 159, 194, 215, 170, 224, 175, 234, 84, 23, 55, 245, 223, + 63, 126, 179, 0, 139, 144, 148, 7, 210, 93, 246, 172, 243, 20, 158, 79, 51, 38, 167, 95, 234, 223, 37, + 72, 58, 60, 102, 92, 142, 123, 210, 84, 178, 138, 86, 18, 27, 0, 216, 68, 213, 69, 58, 146, 201, 120, + 7, 62, 73, 87, 189, 20, 134, 199, 86, 184, 44, 4, 65, 164, 242, 118, 250, 213, 79, 107, 98, 173, 76, + 110, 157, 68, 159, 167, 71, 87, 11, 88, 144, 44, 157, 92, 133, 182, 182, 173, 199, 187, 228, 216, 239, 242, + 122, 222, 82, 138, 58, 31, 249, 125, 149, 240, 78, 79, 32, 195, 167, 17, 181, 113, 205, 247, 115, 250, 22, + 47, 112, 141, 37, 187, 219, 134, 79, 204, 119, 68, 187, 9, 84, 102, 95, 227, 9, 98, 230, 36, 199, 112, + 42, 112, 68, 236, 0, 191, 62, 58, 211, 91, 1, 244, 200, 223, 15, 235, 247, 200, 164, 65, 63, 29, 220, + 201, 1, 85, 193, 113, 150, 139, 99, 148, 45, 244, 34, 215, 102, 180, 207, 103, 182, 29, 124, 79, 1, 93, + 142, 65, 228, 144, 133, 62, 192, 233, 252, 170, 159, 19, 251, 50, 172, 19, 73, 46, 71, 74, 190, 69, 16, + 172, 183, 9, 86, 31, 189, 122, 148, 155, 140, 137, 174, 52, 115, 162, 122, 204, 108, 52, 20, 108, 198, 238, + 46, 229, 130, 6, 95, 189, 83, 76, 150, 189, 121, 164, 144, 141, 149, 122, 28, 185, 48, 104, 213, 192, 57, + 188, 155, 235, 215, 194, 26, 207, 233, 90, 39, 117, 8, 75, 132, 0, 195, 160, 117, 173, 214, 140, 127, 156, + 69, 19, 46, 186, 199, 144, 149, 99, 129, 229, 68, 237, 64, 42, 248, 106, 225, 169, 16, 36, 123, 31, 79, + 162, 81, 86, 117, 67, 150, 67, 191, 242, 117, 139, 104, 142, 90, 193, 180, 69, 115, 69, 236, 34, 209, 244, + 246, 58, 246, 74, 48, 116, 116, 163, 53, 157, 139, 198, 251, 166, 73, 36, 127, 226, 40, 97, 250, 40, 180, + 79, 174, 145, 228, 105, 234, 221, 209, 235, 241, 7, 184, 248, 196, 38, 33, 54, 132, 174, 207, 60, 14, 61, + 177, 56, 166, 252, 151, 99, 144, 127, 212, 184, 53, 19, 237, 78, 81, 83, 34, 220, 151, 205, 224, 160, 138, + 206, 233, 4, 247, 136, 130, 151, 219, 46, 66, 249, 213, 187, 216, 92, 35, 141, 53, 151, 135, 53, 82, 186, + 121, 98, 23, 7, 168, 204, 123, 107, 219, 138, 232, 137, 110, 171, 137, 86, 79, 167, 27, 92, 36, 106, 239, + 74, 55, 97, 223, 75, 245, 134, 223, 35, 213, 233, 71, 132, 124, 151, 197, 162, 31, 72, 143, 5, 26, 234, + 15, 106, 133, 32, 9, 212, 40, 189, 51, 186, 60, 46, 4, 84, 212, 32, 161, 191, 59, 10, 52, 29, 2, + 208, 56, 203, 74, 80, 38, 195, 13, 190, 237, 207, 204, 211, 192, 190, 156, 237, 222, 159, 168, 99, 175, 12, + 46, 221, 63, 183, 222, 216, 12, 43, 154, 154, 71, 222, 111, 12, 58, 103, 44, 110, 146, 139, 87, 55, 24, + 43, 9, 188, 143, 92, 58, 250, 21, 187, 45, 109, 147, 196, 178, 57, 65, 51, 133, 176, 177, 173, 137, 13, + 162, 46, 110, 77, 190, 231, 126, 218, 204, 71, 198, 190, 124, 92, 176, 197, 158, 224, 239, 236, 250, 178, 171, + 235, 179, 245, 186, 120, 89, 86, 195, 41, 227, 153, 35, 133, 199, 85, 210, 194, 26, 137, 251, 48, 187, 106, + 124, 42, 33, 145, 17, 24, 163, 100, 193, 41, 172, 21, 114, 16, 132, 3, 176, 86, 216, 34, 140, 181, 175, + 183, 162, 153, 52, 196, 154, 54, 68, 134, 51, 195, 175, 233, 247, 44, 108, 60, 180, 127, 219, 31, 227, 210, + 84, 201, 27, 82, 247, 181, 149, 76, 194, 21, 19, 199, 115, 80, 52, 246, 61, 75, 212, 89, 243, 126, 0, + 68, 109, 98, 14, 59, 79, 147, 250, 218, 225, 185, 90, 60, 116, 154, 170, 241, 54, 216, 137, 233, 89, 186, + 98, 95, 30, 63, 12, 113, 74, 239, 65, 22, 18, 119, 82, 27, 112, 31, 237, 105, 219, 194, 64, 109, 144, + 36, 128, 74, 239, 115, 148, 71, 215, 64, 144, 151, 190, 119, 93, 41, 194, 180, 1, 140, 223, 153, 138, 131, + 147, 73, 33, 51, 211, 192, 234, 9, 78, 143, 62, 237, 160, 18, 125, 152, 137, 192, 121, 23, 113, 242, 70, + 199, 20, 77, 118, 197, 207, 11, 99, 121, 159, 37, 78, 65, 201, 102, 86, 22, 87, 11, 141, 214, 150, 233, + 43, 52, 88, 16, 80, 167, 73, 238, 38, 157, 143, 118, 24, 40, 213, 163, 116, 160, 223, 68, 202, 51, 230, + 69, 87, 182, 72, 85, 44, 99, 198, 165, 165, 25, 237, 137, 115, 152, 180, 7, 88, 241, 121, 182, 112, 93, + 29, 252, 31, 143, 24, 227, 241, 150, 207, 8, 253, 21, 246, 181, 158, 40, 115, 97, 51, 209, 169, 86, 108, + 10, 71, 106, 118, 104, 237, 245, 118, 58, 8, 174, 223, 120, 92, 170, 149, 10, 162, 31, 55, 189, 174, 238, + 37, 53, 223, 183, 131, 20, 181, 96, 238, 201, 167, 88, 70, 150, 190, 5, 174, 210, 160, 40, 168, 111, 132, + 33, 184, 193, 244, 47, 133, 78, 169, 170, 144, 177, 138, 228, 87, 139, 173, 96, 156, 4, 24, 54, 170, 100, + 70, 142, 4, 247, 25, 191, 232, 204, 203, 55, 130, 166, 214, 182, 194, 217, 188, 106, 217, 204, 40, 247, 225, + 250, 98, 142, 200, 100, 33, 123, 231, 221, 214, 190, 100, 229, 21, 139, 206, 250, 254, 77, 233, 112, 51, 174, + 183, 89, 54, 36, 231, 120, 230, 200, 19, 67, 206, 248, 81, 220, 54, 118, 56, 200, 75, 239, 150, 42, 41, + 183, 85, 210, 202, 83, 242, 198, 221, 50, 41, 115, 161, 6, 29, 155, 231, 124, 122, 60, 175, 198, 228, 131, + 128, 75, 143, 241, 83, 84, 70, 16, 2, 148, 183, 41, 71, 207, 216, 149, 245, 85, 25, 166, 18, 140, 53, + 251, 123, 199, 131, 88, 175, 29, 222, 236, 47, 230, 32, 203, 198, 59, 196, 222, 45, 31, 145, 94, 74, 92, + 154, 128, 2, 114, 166, 129, 28, 11, 110, 71, 68, 3, 127, 105, 155, 75, 117, 144, 195, 123, 65, 245, 30, + 17, 145, 192, 208, 97, 54, 159, 196, 83, 4, 64, 71, 30, 72, 178, 64, 100, 8, 156, 206, 5, 186, 13, + 59, 240, 25, 37, 22, 150, 27, 240, 154, 13, 110, 212, 132, 41, 108, 76, 175, 127, 239, 117, 11, 66, 16, + 15, 170, 138, 248, 227, 83, 246, 148, 237, 251, 17, 57, 198, 204, 7, 46, 117, 89, 23, 33, 29, 221, 209, + 33, 203, 207, 199, 24, 148, 49, 67, 175, 108, 182, 42, 233, 124, 249, 63, 7, 167, 48, 135, 208, 157, 188, + 189, 35, 204, 18, 202, 199, 131, 168, 94, 143, 126, 76, 224, 252, 205, 238, 133, 145, 34, 189, 159, 195, 233, + 93, 139, 230, 82, 37, 52, 242, 191, 119, 84, 2, 10, 134, 108, 168, 88, 117, 148, 38, 51, 19, 153, 120, + 234, 56, 90, 167, 211, 47, 252, 61, 242, 32, 66, 17, 5, 69, 176, 24, 66, 40, 184, 181, 73, 14, 36, + 189, 9, 238, 181, 166, 139, 48, 216, 148, 223, 227, 109, 148, 152, 16, 19, 251, 117, 48, 178, 82, 40, 104, + 190, 214, 15, 226, 206, 144, 35, 123, 22, 147, 9, 14, 131, 213, 197, 219, 26, 70, 197, 157, 241, 221, 20, + 193, 250, 173, 84, 79, 150, 138, 206, 241, 102, 173, 226, 31, 239, 237, 248, 88, 202, 78, 163, 178, 19, 66, + 231, 28, 245, 31, 212, 39, 129, 201, 167, 71, 195, 22, 250, 222, 231, 67, 116, 170, 155, 148, 137, 161, 101, + 25, 223, 13, 105, 94, 111, 188, 61, 18, 69, 171, 6, 26, 69, 192, 6, 51, 25, 94, 48, 131, 128, 63, + 62, 243, 157, 13, 43, 61, 72, 39, 87, 102, 160, 174, 140, 211, 212, 49, 169, 211, 163, 232, 82, 87, 82, + 17, 73, 59, 69, 58, 63, 165, 178, 9, 186, 191, 115, 134, 141, 104, 150, 197, 202, 99, 181, 120, 154, 95, + 224, 127, 32, 206, 248, 173, 202, 232, 191, 106, 211, 90, 26, 182, 33, 219, 12, 226, 124, 197, 142, 174, 159, + 37, 68, 63, 109, 250, 153, 235, 227, 198, 229, 22, 46, 238, 55, 47, 117, 79, 194, 116, 103, 27, 248, 24, + 147, 95, 161, 146, 228, 228, 209, 40, 33, 37, 6, 88, 59, 89, 254, 191, 211, 91, 224, 99, 210, 168, 169, + 134, 100, 195, 118, 236, 147, 181, 148, 54, 1, 138, 43, 163, 145, 250, 225, 99, 142, 98, 57, 233, 9, 15, + 200, 222, 243, 241, 214, 104, 241, 176, 136, 246, 129, 39, 251, 65, 165, 7, 141, 139, 146, 110, 119, 68, 110, + 49, 73, 30, 126, 48, 252, 134, 219, 214, 218, 133, 138, 26, 90, 152, 23, 112, 175, 44, 22, 32, 180, 135, + 207, 213, 76, 36, 185, 173, 225, 12, 121, 199, 181, 251, 148, 54, 55, 15, 171, 162, 145, 114, 127, 99, 168, + 182, 72, 190, 27, 56, 250, 193, 174, 31, 229, 89, 83, 133, 179, 13, 179, 242, 91, 19, 161, 163, 54, 177, + 111, 70, 144, 67, 178, 123, 216, 232, 86, 52, 218, 1, 224, 30, 182, 69, 225, 0, 71, 45, 232, 144, 48, + 245, 43, 1, 151, 82, 45, 168, 9, 206, 116, 84, 121, 250, 151, 119, 219, 101, 109, 5, 187, 133, 154, 215, + 208, 235, 114, 100, 66, 133, 127, 66, 133, 7, 1, 152, 23, 65, 249, 116, 219, 225, 150, 163, 30, 162, 115, + 60, 164, 96, 26, 9, 253, 13, 100, 210, 147, 159, 78, 179, 168, 110, 197, 99, 72, 30, 144, 68, 196, 222, + 32, 208, 217, 248, 179, 186, 3, 113, 79, 199, 138, 190, 167, 45, 99, 92, 6, 84, 123, 118, 238, 86, 41, + 59, 14, 226, 82, 146, 83, 198, 155, 175, 20, 119, 80, 251, 210, 101, 231, 253, 245, 208, 77, 195, 155, 138, + 72, 86, 228, 120, 134, 227, 33, 221, 181, 3, 78, 71, 26, 131, 116, 76, 62, 198, 183, 87, 69, 62, 26, + 44, 66, 150, 248, 191, 111, 42, 243, 26, 58, 108, 54, 138, 200, 116, 82, 12, 2, 8, 72, 17, 87, 254, + 205, 66, 216, 203, 47, 103, 202, 21, 181, 139, 169, 78, 1, 24, 210, 88, 215, 197, 43, 174, 174, 225, 128, + 121, 1, 218, 75, 243, 37, 230, 47, 246, 186, 176, 221, 184, 91, 34, 98, 1, 75, 207, 78, 135, 166, 48, + 116, 35, 71, 173, 182, 246, 76, 148, 208, 1, 133, 131, 126, 175, 110, 243, 133, 190, 123, 125, 139, 101, 253, + 96, 68, 94, 220, 60, 222, 4, 65, 122, 118, 0, 147, 87, 63, 51, 95, 109, 240, 97, 184, 65, 218, 136, + 45, 233, 68, 78, 49, 135, 233, 165, 151, 216, 26, 240, 115, 250, 122, 223, 122, 118, 9, 40, 130, 33, 169, + 248, 13, 223, 169, 197, 46, 215, 84, 50, 205, 106, 150, 155, 222, 201, 12, 203, 63, 254, 201, 168, 35, 217, + 41, 11, 108, 51, 252, 86, 66, 244, 26, 251, 130, 188, 5, 136, 154, 232, 22, 67, 147, 225, 54, 26, 221, + 38, 158, 139, 236, 91, 141, 243, 212, 113, 227, 116, 121, 19, 68, 203, 106, 147, 29, 216, 242, 18, 55, 18, + 252, 139, 112, 123, 182, 251, 216, 144, 178, 179, 24, 104, 41, 254, 109, 234, 74, 189, 202, 247, 58, 146, 176, + 125, 176, 167, 211, 32, 171, 93, 220, 204, 17, 27, 36, 10, 16, 38, 207, 144, 18, 219, 1, 247, 92, 54, + 91, 44, 137, 49, 31, 39, 54, 187, 95, 98, 2, 20, 130, 253, 149, 74, 227, 64, 4, 127, 167, 16, 242, + 183, 126, 4, 55, 250, 62, 3, 217, 214, 77, 104, 59, 170, 87, 106, 199, 226, 99, 181, 84, 229, 145, 213, + 220, 26, 89, 187, 216, 210, 206, 6, 239, 54, 153, 15, 133, 140, 222, 155, 36, 191, 76, 167, 2, 124, 57, + 192, 120, 157, 27, 128, 252, 88, 7, 117, 222, 28, 221, 95, 25, 43, 79, 43, 191, 100, 243, 53, 96, 1, + 195, 10, 50, 12, 35, 84, 51, 43, 50, 217, 183, 57, 149, 26, 77, 210, 2, 79, 215, 53, 53, 26, 186, + 47, 151, 180, 23, 180, 22, 20, 54, 101, 159, 150, 7, 31, 18, 212, 214, 123, 56, 117, 145, 207, 82, 40, + 55, 111, 171, 106, 97, 13, 22, 4, 193, 221, 85, 52, 58, 31, 139, 229, 166, 8, 67, 107, 188, 55, 248, + 192, 234, 244, 147, 143, 81, 141, 146, 174, 239, 138, 22, 30, 118, 96, 213, 36, 66, 10, 207, 166, 91, 41, + 201, 195, 41, 144, 101, 81, 167, 100, 25, 15, 225, 251, 118, 167, 175, 233, 196, 155, 130, 87, 48, 176, 198, + 97, 143, 71, 192, 227, 25, 202, 202, 174, 197, 178, 217, 1, 248, 180, 191, 46, 186, 41, 58, 25, 60, 114, + 119, 211, 138, 14, 225, 197, 169, 26, 133, 115, 22, 206, 222, 27, 101, 148, 188, 104, 193, 16, 76, 149, 41, + 185, 87, 37, 83, 121, 252, 5, 29, 75, 67, 181, 50, 133, 50, 191, 41, 36, 16, 0, 55, 79, 240, 53, + 235, 237, 94, 107, 85, 123, 214, 67, 227, 154, 73, 125, 165, 226, 182, 216, 110, 179, 116, 63, 205, 241, 176, + 17, 204, 98, 170, 57, 147, 54, 24, 60, 253, 190, 213, 24, 7, 150, 212, 227, 230, 186, 173, 223, 75, 203, + 59, 223, 135, 238, 22, 133, 66, 157, 239, 206, 64, 51, 174, 230, 17, 54, 123, 133, 179, 118, 249, 57, 229, + 37, 235, 230, 87, 231, 137, 223, 226, 212, 225, 186, 149, 123, 222, 219, 195, 140, 134, 27, 141, 43, 94, 140, + 164, 56, 87, 62, 215, 159, 145, 20, 119, 254, 212, 147, 210, 52, 89, 126, 239, 51, 174, 240, 134, 160, 212, + 179, 79, 154, 247, 70, 144, 199, 177, 226, 54, 185, 16, 243, 40, 182, 134, 39, 168, 122, 38, 228, 22, 181, + 34, 214, 252, 114, 13, 247, 85, 128, 252, 171, 183, 127, 26, 238, 125, 140, 230, 147, 220, 119, 112, 243, 232, + 107, 186, 148, 81, 79, 34, 106, 4, 95, 121, 18, 135, 33, 23, 1, 86, 69, 106, 139, 236, 222, 247, 54, + 22, 135, 159, 73, 83, 55, 75, 76, 117, 54, 31, 106, 170, 234, 4, 89, 222, 70, 11, 246, 208, 131, 100, + 68, 48, 111, 12, 66, 224, 118, 196, 215, 230, 193, 2, 246, 12, 52, 154, 57, 146, 113, 122, 112, 58, 152, + 228, 189, 209, 95, 19, 0, 35, 17, 115, + }; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, std::vector({18, 2, 16, 32}), + std::vector({32, 64, 3, 3}), DT_INT8}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferFraczNchw, uint8_2c_2n_pad) { + uint8_t ret[30 * 40 * 3 * 3] = { + 240, 195, 239, 13, 48, 181, 136, 177, 217, 211, 45, 33, 95, 18, 221, 5, 112, 37, 76, 243, 215, 65, 119, + 101, 119, 96, 145, 44, 108, 73, 72, 170, 49, 80, 168, 160, 3, 74, 18, 163, 51, 140, 154, 65, 185, 198, + 253, 26, 28, 246, 116, 4, 84, 189, 126, 186, 154, 217, 252, 13, 241, 5, 159, 60, 6, 53, 68, 48, 221, + 70, 126, 26, 124, 115, 220, 31, 249, 248, 159, 102, 106, 45, 25, 228, 90, 136, 121, 227, 156, 138, 242, 199, + 109, 114, 156, 205, 2, 175, 109, 246, 9, 64, 236, 157, 207, 238, 133, 0, 105, 146, 3, 33, 210, 222, 104, + 130, 151, 69, 30, 247, 210, 220, 163, 242, 245, 61, 129, 225, 135, 76, 33, 213, 90, 182, 151, 230, 40, 99, + 143, 131, 141, 84, 135, 232, 218, 249, 72, 181, 244, 133, 111, 221, 89, 122, 192, 211, 176, 223, 135, 231, 171, + 181, 49, 134, 90, 83, 212, 129, 89, 41, 85, 224, 87, 45, 45, 54, 132, 60, 132, 52, 200, 202, 197, 61, + 220, 26, 9, 114, 217, 134, 201, 240, 217, 48, 213, 57, 212, 92, 254, 217, 229, 151, 151, 249, 37, 178, 61, + 171, 196, 252, 37, 139, 69, 67, 126, 143, 166, 136, 138, 43, 186, 92, 199, 162, 47, 50, 151, 40, 240, 35, + 2, 38, 0, 204, 153, 244, 230, 105, 168, 124, 232, 187, 108, 189, 19, 166, 149, 201, 99, 235, 71, 147, 241, + 214, 70, 12, 232, 86, 141, 39, 140, 155, 179, 86, 161, 67, 139, 213, 223, 212, 136, 117, 164, 28, 118, 17, + 56, 211, 240, 221, 22, 11, 254, 170, 144, 16, 238, 94, 19, 246, 179, 133, 205, 16, 65, 91, 72, 146, 132, + 22, 88, 228, 212, 26, 177, 129, 65, 223, 46, 96, 152, 92, 246, 207, 126, 18, 201, 192, 9, 6, 209, 117, + 200, 161, 222, 82, 216, 213, 33, 18, 172, 249, 167, 192, 47, 254, 50, 162, 34, 129, 241, 25, 28, 254, 241, + 100, 17, 240, 172, 97, 97, 129, 102, 78, 133, 67, 191, 99, 160, 209, 34, 10, 209, 242, 109, 135, 31, 243, + 244, 3, 209, 71, 186, 200, 245, 123, 157, 152, 160, 89, 30, 252, 101, 118, 175, 112, 6, 87, 144, 21, 196, + 217, 54, 4, 160, 85, 194, 31, 188, 242, 180, 144, 96, 172, 214, 251, 39, 151, 249, 146, 178, 122, 8, 166, + 8, 198, 34, 171, 154, 248, 28, 95, 78, 106, 26, 224, 175, 45, 241, 41, 6, 16, 96, 19, 120, 91, 21, + 153, 208, 34, 205, 154, 14, 215, 194, 170, 16, 176, 141, 104, 115, 81, 46, 212, 245, 120, 135, 83, 121, 84, + 71, 159, 240, 156, 44, 37, 161, 183, 233, 66, 156, 203, 54, 4, 111, 62, 100, 171, 209, 127, 87, 118, 8, + 233, 177, 120, 126, 224, 141, 148, 184, 16, 60, 111, 66, 232, 80, 165, 40, 130, 54, 206, 171, 249, 112, 236, + 216, 117, 166, 211, 42, 71, 68, 168, 197, 52, 99, 151, 231, 208, 231, 37, 38, 55, 164, 214, 202, 13, 212, + 18, 16, 89, 103, 113, 109, 2, 53, 103, 86, 128, 60, 58, 235, 40, 49, 118, 197, 182, 118, 3, 8, 75, + 97, 114, 69, 110, 74, 126, 211, 32, 76, 82, 49, 213, 153, 133, 218, 237, 17, 87, 130, 94, 206, 152, 116, + 252, 221, 52, 57, 225, 168, 203, 245, 85, 174, 250, 17, 114, 223, 66, 22, 229, 2, 157, 55, 12, 131, 16, + 213, 240, 129, 150, 234, 118, 78, 52, 199, 222, 102, 59, 226, 251, 68, 204, 188, 203, 221, 51, 172, 147, 49, + 169, 143, 5, 94, 26, 205, 155, 192, 126, 228, 26, 181, 107, 64, 7, 132, 88, 79, 5, 165, 182, 99, 106, + 39, 104, 2, 82, 141, 246, 136, 56, 229, 248, 42, 30, 238, 194, 15, 154, 20, 3, 8, 231, 119, 235, 179, + 204, 175, 46, 64, 109, 214, 79, 82, 150, 188, 18, 96, 231, 191, 189, 46, 136, 178, 11, 77, 151, 165, 111, + 29, 143, 65, 254, 80, 128, 43, 10, 62, 162, 120, 99, 199, 246, 245, 101, 36, 222, 191, 220, 13, 164, 168, + 195, 141, 20, 229, 23, 240, 195, 200, 52, 83, 225, 85, 56, 53, 211, 121, 119, 123, 224, 225, 196, 63, 12, + 17, 11, 32, 210, 86, 119, 26, 19, 13, 60, 76, 196, 89, 7, 55, 167, 118, 105, 90, 120, 177, 6, 223, + 31, 127, 128, 234, 203, 157, 237, 208, 117, 112, 250, 47, 168, 28, 197, 193, 109, 214, 201, 41, 248, 191, 227, + 124, 163, 71, 13, 5, 125, 50, 184, 120, 226, 171, 114, 95, 29, 207, 194, 111, 232, 157, 80, 214, 8, 206, + 104, 63, 198, 115, 178, 229, 52, 201, 203, 173, 84, 58, 185, 69, 75, 118, 162, 92, 2, 56, 212, 108, 18, + 105, 200, 156, 122, 161, 14, 222, 67, 188, 231, 85, 9, 22, 111, 250, 156, 235, 120, 236, 48, 5, 208, 236, + 150, 84, 224, 67, 81, 166, 110, 4, 194, 70, 196, 200, 160, 172, 177, 8, 31, 42, 198, 62, 30, 230, 7, + 50, 6, 252, 59, 57, 205, 214, 85, 80, 157, 75, 129, 210, 213, 142, 220, 133, 235, 97, 113, 195, 149, 2, + 73, 25, 105, 159, 238, 200, 77, 99, 73, 0, 143, 4, 6, 158, 134, 238, 244, 145, 96, 188, 192, 53, 31, + 189, 178, 181, 31, 241, 218, 112, 155, 125, 92, 146, 179, 119, 191, 248, 210, 223, 123, 116, 171, 57, 199, 204, + 75, 221, 234, 33, 170, 44, 162, 6, 212, 236, 217, 108, 251, 140, 23, 124, 196, 209, 181, 39, 221, 242, 23, + 238, 203, 48, 58, 44, 86, 56, 116, 57, 54, 7, 70, 190, 130, 79, 254, 41, 216, 103, 32, 50, 237, 215, + 93, 252, 40, 219, 253, 18, 123, 185, 113, 42, 53, 97, 246, 241, 63, 89, 162, 248, 114, 63, 182, 233, 175, + 186, 235, 245, 171, 135, 86, 191, 30, 229, 101, 200, 77, 76, 88, 70, 225, 222, 68, 148, 48, 142, 48, 251, + 20, 26, 197, 20, 236, 199, 16, 54, 247, 115, 52, 6, 158, 30, 132, 93, 76, 62, 220, 65, 216, 175, 35, + 47, 151, 152, 3, 58, 149, 20, 223, 163, 44, 202, 37, 65, 54, 11, 21, 134, 49, 183, 35, 72, 92, 65, + 55, 213, 36, 112, 3, 188, 62, 68, 128, 31, 196, 78, 249, 63, 108, 59, 157, 142, 248, 6, 45, 183, 177, + 216, 136, 223, 111, 162, 222, 194, 33, 129, 144, 115, 146, 170, 55, 136, 141, 29, 61, 63, 33, 35, 153, 196, + 150, 173, 80, 84, 207, 174, 40, 137, 82, 171, 68, 106, 184, 191, 90, 210, 253, 148, 102, 77, 175, 158, 194, + 6, 134, 215, 225, 49, 253, 226, 179, 44, 176, 137, 78, 143, 72, 96, 109, 7, 68, 188, 118, 176, 108, 7, + 34, 17, 76, 254, 168, 27, 88, 148, 184, 11, 42, 242, 140, 2, 249, 240, 210, 135, 213, 12, 230, 118, 212, + 31, 239, 234, 133, 104, 72, 171, 111, 18, 169, 120, 27, 119, 165, 204, 90, 83, 54, 196, 229, 37, 52, 19, + 132, 1, 121, 156, 139, 180, 102, 11, 159, 66, 222, 242, 246, 114, 38, 118, 179, 71, 112, 144, 108, 181, 188, + 62, 181, 108, 150, 224, 101, 97, 22, 131, 174, 116, 148, 175, 187, 53, 40, 74, 204, 97, 87, 130, 177, 194, + 137, 94, 57, 79, 225, 38, 222, 152, 199, 132, 208, 25, 31, 222, 130, 242, 20, 25, 212, 135, 234, 71, 116, + 193, 200, 120, 122, 124, 168, 230, 20, 213, 137, 63, 4, 6, 90, 116, 123, 182, 4, 20, 177, 62, 42, 242, + 197, 184, 89, 117, 192, 49, 236, 241, 231, 233, 69, 218, 144, 239, 236, 202, 75, 135, 141, 59, 249, 196, 37, + 246, 185, 181, 51, 42, 62, 56, 62, 193, 47, 210, 26, 95, 84, 151, 236, 60, 119, 66, 252, 75, 231, 96, + 16, 226, 106, 184, 8, 64, 174, 61, 208, 114, 43, 87, 71, 23, 95, 116, 100, 6, 187, 5, 189, 191, 36, + 28, 97, 10, 32, 236, 35, 150, 171, 184, 94, 121, 57, 176, 100, 174, 42, 180, 47, 0, 132, 146, 150, 172, + 84, 83, 6, 223, 109, 86, 87, 181, 113, 107, 54, 36, 53, 1, 130, 214, 19, 122, 30, 191, 183, 151, 252, + 48, 123, 66, 114, 198, 178, 199, 107, 188, 191, 22, 187, 146, 183, 93, 74, 99, 74, 16, 47, 110, 219, 65, + 57, 201, 94, 129, 208, 162, 111, 98, 24, 103, 215, 20, 176, 41, 220, 250, 55, 61, 4, 111, 121, 66, 190, + 127, 170, 175, 237, 59, 115, 136, 21, 87, 26, 53, 168, 247, 55, 10, 135, 31, 97, 204, 171, 151, 127, 206, + 122, 96, 225, 131, 155, 178, 105, 149, 203, 169, 59, 129, 5, 63, 136, 114, 92, 62, 9, 172, 230, 86, 213, + 168, 251, 213, 87, 136, 231, 240, 168, 98, 182, 228, 103, 21, 9, 208, 241, 203, 144, 168, 75, 34, 26, 189, + 220, 175, 187, 179, 217, 107, 177, 29, 235, 185, 210, 177, 155, 151, 114, 3, 163, 27, 123, 155, 207, 204, 36, + 237, 211, 57, 246, 157, 251, 231, 251, 59, 90, 235, 189, 78, 141, 221, 108, 132, 143, 218, 50, 180, 157, 97, + 119, 213, 240, 86, 51, 32, 1, 215, 47, 116, 103, 76, 238, 44, 235, 54, 72, 228, 156, 194, 29, 197, 169, + 149, 219, 29, 146, 144, 136, 102, 26, 168, 20, 217, 22, 47, 207, 244, 250, 107, 232, 169, 209, 3, 30, 240, + 90, 14, 42, 106, 170, 194, 212, 24, 170, 154, 91, 23, 244, 56, 108, 159, 156, 210, 97, 181, 151, 31, 87, + 250, 26, 107, 225, 253, 154, 225, 83, 72, 206, 139, 241, 207, 94, 17, 227, 150, 236, 10, 238, 26, 123, 108, + 133, 200, 204, 247, 175, 184, 15, 42, 87, 92, 9, 210, 9, 77, 200, 189, 6, 204, 41, 82, 110, 24, 63, + 152, 224, 35, 108, 124, 6, 108, 226, 144, 179, 37, 156, 128, 206, 73, 175, 136, 38, 238, 133, 196, 188, 123, + 123, 17, 50, 168, 66, 49, 32, 80, 190, 164, 109, 226, 148, 32, 131, 215, 190, 190, 189, 40, 206, 227, 79, + 190, 242, 17, 234, 1, 7, 161, 237, 63, 24, 67, 150, 161, 192, 18, 253, 0, 222, 114, 159, 22, 61, 17, + 70, 114, 58, 180, 220, 169, 48, 186, 82, 242, 213, 76, 149, 99, 170, 104, 252, 123, 242, 218, 125, 68, 166, + 60, 85, 49, 239, 169, 145, 210, 69, 165, 235, 216, 45, 201, 106, 124, 192, 143, 236, 45, 229, 177, 31, 165, + 76, 147, 21, 0, 52, 98, 195, 109, 192, 94, 66, 223, 47, 8, 146, 81, 41, 127, 159, 46, 111, 67, 94, + 55, 205, 220, 59, 160, 216, 198, 79, 47, 34, 152, 188, 47, 161, 61, 160, 206, 98, 80, 195, 122, 99, 165, + 86, 130, 166, 90, 201, 40, 7, 177, 52, 118, 203, 225, 158, 151, 189, 102, 98, 82, 198, 121, 20, 125, 142, + 113, 190, 123, 13, 228, 206, 124, 58, 108, 26, 90, 18, 240, 78, 161, 172, 172, 21, 66, 54, 122, 146, 96, + 46, 109, 152, 28, 26, 138, 10, 247, 15, 16, 147, 116, 208, 220, 46, 139, 27, 142, 86, 240, 230, 32, 71, + 68, 232, 254, 223, 94, 221, 29, 248, 198, 72, 5, 212, 154, 15, 46, 29, 207, 76, 69, 146, 190, 176, 251, + 123, 126, 232, 134, 46, 29, 23, 236, 22, 240, 76, 46, 194, 27, 237, 179, 234, 211, 194, 164, 230, 237, 14, + 46, 248, 82, 61, 86, 64, 44, 66, 190, 69, 79, 111, 96, 152, 150, 234, 77, 237, 186, 216, 9, 110, 158, + 250, 169, 188, 140, 212, 28, 31, 95, 99, 193, 37, 125, 71, 95, 40, 14, 155, 153, 207, 131, 165, 42, 96, + 108, 127, 30, 217, 98, 181, 26, 172, 223, 174, 165, 81, 68, 233, 65, 192, 178, 78, 143, 170, 68, 12, 61, + 6, 64, 142, 173, 78, 94, 191, 89, 97, 112, 90, 200, 212, 193, 199, 15, 179, 119, 136, 205, 114, 234, 220, + 155, 152, 173, 159, 129, 62, 108, 39, 94, 209, 20, 131, 124, 66, 167, 162, 141, 223, 106, 58, 103, 210, 171, + 153, 222, 157, 11, 81, 172, 224, 25, 242, 144, 84, 45, 40, 225, 192, 13, 18, 157, 250, 89, 166, 71, 130, + 3, 98, 148, 66, 41, 81, 116, 9, 80, 9, 19, 205, 44, 228, 183, 146, 68, 57, 104, 122, 218, 139, 58, + 46, 128, 243, 19, 4, 47, 227, 12, 43, 96, 93, 149, 44, 89, 204, 187, 58, 58, 223, 154, 97, 146, 56, + 221, 118, 86, 169, 253, 48, 136, 40, 219, 142, 185, 14, 77, 168, 78, 187, 202, 124, 245, 250, 108, 12, 241, + 136, 136, 175, 243, 117, 217, 4, 11, 115, 21, 14, 221, 185, 250, 125, 245, 13, 71, 212, 32, 230, 8, 94, + 247, 132, 197, 18, 206, 178, 131, 38, 93, 22, 230, 174, 112, 144, 10, 88, 75, 132, 77, 23, 252, 22, 18, + 101, 130, 88, 180, 18, 244, 253, 145, 67, 206, 30, 109, 141, 2, 11, 64, 15, 74, 16, 201, 114, 40, 17, + 225, 146, 231, 35, 9, 55, 49, 143, 181, 104, 102, 88, 171, 38, 68, 4, 91, 225, 232, 186, 100, 210, 41, + 59, 54, 30, 67, 129, 3, 163, 125, 218, 188, 203, 40, 210, 7, 34, 250, 245, 10, 245, 139, 106, 3, 237, + 202, 72, 61, 143, 79, 213, 17, 53, 73, 203, 129, 231, 229, 133, 216, 186, 188, 251, 187, 244, 58, 136, 116, + 1, 178, 142, 172, 20, 233, 134, 247, 86, 186, 59, 161, 77, 187, 86, 45, 177, 241, 41, 114, 35, 156, 173, + 70, 92, 178, 198, 138, 91, 29, 206, 80, 26, 61, 8, 88, 240, 237, 88, 209, 57, 191, 78, 203, 42, 12, + 154, 105, 165, 52, 52, 29, 70, 172, 52, 210, 154, 190, 205, 26, 123, 223, 66, 108, 4, 45, 191, 162, 58, + 142, 205, 254, 64, 205, 84, 235, 41, 50, 18, 77, 97, 86, 52, 189, 80, 198, 254, 78, 26, 131, 142, 69, + 210, 45, 123, 5, 162, 7, 208, 32, 46, 123, 37, 195, 30, 60, 160, 223, 165, 98, 110, 34, 41, 44, 166, + 156, 204, 121, 75, 120, 24, 193, 5, 31, 125, 203, 39, 229, 11, 192, 90, 243, 142, 135, 199, 165, 143, 140, + 37, 133, 30, 65, 18, 88, 11, 190, 181, 126, 158, 2, 174, 225, 221, 151, 73, 64, 216, 212, 28, 40, 131, + 85, 236, 20, 143, 125, 245, 239, 86, 178, 236, 179, 9, 160, 49, 231, 97, 93, 190, 28, 31, 91, 70, 39, + 64, 96, 28, 218, 154, 62, 214, 84, 19, 153, 2, 193, 100, 23, 185, 182, 102, 66, 206, 222, 149, 247, 247, + 19, 193, 64, 56, 68, 149, 175, 139, 30, 221, 10, 242, 35, 166, 113, 63, 206, 186, 163, 217, 25, 17, 217, + 3, 89, 113, 182, 117, 105, 159, 112, 62, 140, 65, 244, 120, 118, 88, 175, 157, 195, 140, 143, 107, 33, 198, + 11, 8, 241, 59, 225, 190, 135, 228, 19, 34, 228, 125, 196, 107, 50, 245, 204, 73, 9, 81, 126, 199, 86, + 88, 178, 220, 53, 87, 74, 24, 188, 156, 104, 122, 20, 111, 16, 102, 212, 169, 10, 172, 50, 167, 89, 28, + 87, 180, 47, 247, 113, 94, 186, 96, 99, 16, 231, 7, 228, 103, 49, 45, 27, 55, 123, 212, 130, 212, 38, + 70, 118, 57, 205, 7, 156, 146, 79, 35, 57, 9, 182, 32, 214, 32, 23, 85, 154, 3, 61, 60, 240, 96, + 105, 153, 63, 3, 229, 235, 129, 194, 129, 233, 71, 122, 8, 195, 137, 39, 158, 86, 2, 147, 77, 28, 41, + 234, 144, 180, 213, 36, 254, 157, 160, 80, 99, 125, 205, 57, 170, 136, 228, 54, 58, 185, 181, 121, 42, 229, + 155, 233, 247, 66, 20, 91, 34, 212, 197, 99, 157, 10, 45, 249, 81, 250, 32, 192, 232, 167, 102, 205, 211, + 168, 240, 103, 117, 91, 82, 184, 28, 85, 221, 91, 167, 233, 46, 254, 95, 182, 186, 219, 28, 102, 154, 227, + 65, 35, 63, 59, 133, 173, 145, 133, 192, 208, 126, 248, 179, 120, 137, 177, 241, 57, 0, 154, 142, 48, 73, + 69, 166, 192, 96, 175, 162, 14, 253, 31, 67, 71, 150, 49, 24, 85, 93, 156, 20, 243, 141, 127, 185, 56, + 71, 60, 137, 29, 51, 87, 39, 138, 130, 32, 229, 70, 180, 35, 252, 134, 167, 121, 33, 246, 56, 143, 42, + 245, 44, 77, 235, 181, 13, 15, 81, 212, 64, 75, 109, 25, 84, 134, 38, 31, 52, 168, 205, 52, 152, 157, + 231, 198, 161, 117, 102, 15, 247, 59, 213, 48, 57, 29, 170, 58, 211, 174, 40, 54, 239, 188, 1, 240, 174, + 197, 70, 149, 134, 94, 48, 49, 251, 144, 51, 153, 185, 169, 222, 68, 237, 130, 187, 13, 74, 224, 43, 17, + 124, 133, 125, 87, 192, 247, 186, 158, 221, 54, 223, 233, 245, 249, 169, 204, 100, 206, 17, 127, 167, 139, 140, + 82, 248, 223, 92, 193, 158, 120, 240, 89, 195, 139, 218, 248, 248, 228, 2, 21, 184, 193, 120, 233, 42, 37, + 209, 20, 81, 131, 72, 196, 124, 21, 216, 160, 168, 250, 208, 109, 225, 126, 148, 204, 106, 101, 13, 235, 239, + 156, 211, 4, 47, 216, 254, 113, 157, 186, 126, 253, 5, 230, 4, 67, 223, 76, 193, 198, 164, 5, 113, 20, + 197, 160, 29, 119, 72, 15, 221, 150, 159, 179, 113, 65, 121, 46, 156, 230, 111, 230, 187, 2, 246, 77, 240, + 10, 146, 68, 199, 198, 38, 120, 201, 4, 27, 103, 236, 116, 13, 224, 57, 126, 138, 232, 206, 227, 131, 251, + 34, 229, 4, 253, 96, 79, 10, 72, 73, 50, 20, 245, 69, 231, 134, 7, 48, 207, 41, 138, 241, 158, 80, + 160, 55, 140, 3, 201, 105, 12, 182, 49, 154, 91, 5, 22, 233, 92, 231, 40, 47, 215, 16, 197, 82, 175, + 105, 76, 229, 30, 9, 88, 29, 203, 193, 199, 129, 14, 239, 168, 129, 192, 158, 100, 248, 88, 64, 139, 177, + 123, 30, 132, 170, 32, 165, 55, 215, 127, 246, 204, 110, 177, 37, 131, 221, 68, 208, 217, 31, 248, 14, 227, + 97, 123, 100, 196, 119, 41, 206, 110, 224, 192, 165, 235, 142, 116, 241, 98, 222, 69, 246, 89, 108, 113, 229, + 35, 69, 70, 21, 129, 130, 30, 36, 27, 143, 247, 243, 63, 185, 206, 229, 204, 171, 201, 45, 60, 93, 227, + 113, 243, 113, 226, 165, 144, 231, 81, 137, 46, 3, 61, 22, 129, 12, 63, 63, 253, 130, 235, 29, 161, 75, + 178, 214, 152, 203, 80, 181, 193, 146, 141, 80, 98, 130, 194, 93, 118, 134, 207, 69, 146, 74, 248, 46, 125, + 39, 253, 253, 64, 30, 172, 73, 199, 1, 228, 64, 221, 111, 166, 64, 184, 121, 248, 72, 12, 118, 237, 122, + 126, 218, 20, 246, 213, 246, 51, 59, 228, 77, 171, 156, 237, 247, 238, 60, 27, 162, 153, 219, 15, 253, 209, + 48, 179, 202, 191, 62, 100, 219, 193, 243, 246, 178, 43, 99, 30, 238, 122, 134, 153, 118, 67, 238, 243, 112, + 28, 241, 194, 249, 147, 235, 24, 182, 127, 2, 142, 70, 181, 247, 162, 32, 158, 53, 214, 251, 197, 85, 70, + 88, 54, 128, 90, 159, 13, 135, 244, 10, 30, 31, 165, 27, 70, 105, 104, 53, 183, 35, 243, 122, 152, 216, + 98, 141, 196, 169, 222, 223, 143, 245, 210, 212, 35, 171, 140, 66, 198, 20, 245, 70, 140, 190, 106, 7, 183, + 199, 38, 46, 85, 52, 23, 1, 199, 239, 241, 202, 96, 140, 236, 154, 182, 50, 82, 211, 211, 104, 217, 217, + 252, 94, 204, 239, 98, 64, 210, 33, 201, 226, 208, 151, 112, 46, 90, 212, 22, 53, 212, 221, 34, 66, 177, + 204, 205, 99, 8, 79, 134, 141, 236, 157, 183, 192, 123, 212, 234, 39, 208, 105, 185, 215, 235, 34, 95, 158, + 54, 94, 46, 42, 172, 47, 34, 233, 238, 195, 45, 6, 166, 27, 11, 22, 243, 126, 9, 239, 62, 238, 84, + 181, 224, 223, 232, 227, 60, 46, 242, 67, 92, 68, 205, 62, 210, 199, 29, 25, 205, 190, 30, 57, 112, 112, + 183, 114, 223, 105, 213, 118, 113, 86, 152, 121, 22, 91, 167, 37, 135, 221, 124, 192, 73, 45, 213, 8, 37, + 51, 152, 18, 17, 114, 73, 63, 78, 65, 8, 136, 151, 20, 33, 181, 61, 223, 9, 99, 138, 166, 114, 150, + 30, 99, 143, 155, 198, 203, 142, 218, 152, 238, 15, 57, 164, 25, 179, 61, 172, 16, 243, 27, 17, 139, 31, + 9, 237, 128, 224, 247, 112, 165, 178, 203, 102, 129, 31, 38, 62, 242, 121, 166, 241, 241, 172, 26, 222, 91, + 101, 15, 11, 115, 121, 20, 34, 38, 24, 206, 238, 215, 104, 221, 214, 81, 84, 96, 26, 225, 20, 152, 149, + 50, 251, 152, 62, 69, 94, 18, 174, 220, 172, 25, 206, 162, 201, 55, 150, 108, 123, 160, 81, 167, 168, 46, + 118, 243, 12, 15, 218, 125, 152, 216, 79, 175, 79, 65, 114, 244, 175, 127, 163, 90, 202, 68, 127, 80, 135, + 206, 9, 237, 117, 228, 244, 163, 26, 211, 25, 123, 107, 96, 61, 74, 132, 202, 180, 55, 73, 196, 65, 252, + 62, 60, 99, 218, 17, 223, 118, 132, 74, 75, 88, 139, 48, 125, 60, 14, 10, 45, 24, 95, 129, 159, 2, + 90, 174, 204, 18, 184, 240, 56, 210, 34, 93, 191, 143, 165, 38, 139, 23, 36, 49, 140, 181, 252, 148, 245, + 137, 240, 153, 31, 62, 111, 22, 14, 156, 200, 97, 175, 157, 253, 104, 178, 161, 184, 192, 97, 73, 11, 38, + 39, 197, 162, 144, 107, 164, 27, 253, 194, 42, 149, 138, 40, 194, 166, 138, 28, 100, 16, 97, 229, 112, 70, + 163, 229, 69, 164, 33, 72, 49, 232, 172, 207, 80, 23, 150, 241, 25, 216, 249, 120, 70, 236, 203, 81, 29, + 173, 31, 160, 34, 234, 133, 144, 187, 36, 160, 100, 56, 179, 51, 25, 128, 162, 63, 129, 184, 172, 224, 113, + 209, 223, 158, 190, 123, 123, 4, 137, 127, 164, 184, 26, 17, 106, 41, 29, 55, 232, 240, 88, 197, 89, 13, + 75, 254, 163, 201, 61, 135, 179, 184, 69, 41, 70, 136, 104, 211, 82, 123, 70, 188, 51, 247, 246, 5, 163, + 125, 107, 81, 125, 150, 156, 7, 22, 48, 156, 108, 5, 252, 181, 48, 48, 230, 253, 62, 88, 151, 228, 94, + 91, 178, 159, 35, 30, 30, 206, 218, 92, 182, 14, 88, 97, 40, 45, 57, 234, 154, 38, 62, 193, 7, 244, + 111, 43, 140, 96, 210, 130, 189, 189, 42, 35, 92, 57, 202, 236, 67, 254, 243, 189, 129, 135, 129, 171, 124, + 206, 41, 178, 62, 101, 166, 215, 146, 15, 44, 138, 145, 20, 170, 200, 240, 72, 193, 71, 154, 249, 68, 117, + 178, 66, 206, 113, 58, 124, 154, 172, 140, 32, 56, 75, 81, 235, 141, 252, 107, 124, 143, 148, 100, 61, 12, + 79, 133, 129, 77, 0, 142, 187, 251, 16, 57, 251, 105, 141, 91, 77, 89, 132, 141, 114, 198, 168, 17, 244, + 194, 60, 36, 93, 107, 146, 177, 122, 77, 15, 100, 184, 200, 127, 45, 209, 142, 121, 65, 211, 126, 179, 161, + 51, 105, 28, 126, 14, 186, 54, 131, 107, 80, 199, 232, 215, 178, 36, 253, 42, 36, 60, 106, 140, 114, 126, + 198, 248, 147, 44, 5, 78, 21, 34, 241, 61, 223, 92, 31, 249, 239, 242, 84, 80, 26, 9, 80, 36, 81, + 3, 77, 10, 233, 35, 65, 79, 125, 81, 136, 35, 10, 248, 165, 65, 91, 23, 76, 168, 206, 179, 35, 186, + 179, 61, 128, 46, 8, 158, 38, 152, 208, 239, 57, 64, 194, 155, 187, 12, 96, 78, 177, 232, 75, 21, 98, + 115, 69, 191, 240, 82, 9, 185, 129, 227, 115, 132, 141, 169, 254, 116, 152, 38, 64, 175, 148, 219, 210, 61, + 248, 109, 167, 13, 148, 159, 78, 16, 59, 102, 29, 8, 220, 113, 161, 204, 150, 4, 25, 53, 58, 22, 158, + 90, 207, 155, 240, 61, 109, 101, 213, 213, 168, 38, 137, 91, 125, 68, 133, 103, 102, 176, 95, 60, 142, 121, + 159, 177, 148, 2, 65, 249, 36, 215, 171, 89, 142, 238, 125, 78, 201, 1, 175, 50, 120, 129, 177, 216, 76, + 201, 234, 41, 159, 149, 53, 206, 125, 4, 97, 13, 33, 181, 232, 235, 234, 220, 14, 165, 76, 15, 125, 140, + 246, 6, 104, 191, 155, 58, 247, 90, 106, 57, 247, 30, 60, 150, 248, 124, 122, 147, 174, 155, 170, 155, 192, + 50, 108, 43, 189, 209, 138, 188, 171, 223, 32, 17, 134, 231, 205, 205, 136, 199, 225, 220, 183, 6, 161, 53, + 107, 141, 58, 35, 1, 166, 92, 25, 41, 88, 16, 182, 59, 0, 99, 73, 100, 238, 225, 32, 38, 113, 171, + 239, 144, 236, 229, 176, 139, 46, 165, 235, 5, 192, 78, 202, 63, 131, 223, 101, 18, 128, 22, 103, 126, 148, + 114, 251, 86, 166, 194, 150, 14, 2, 68, 67, 177, 190, 203, 10, 75, 52, 151, 4, 36, 77, 72, 229, 87, + 137, 203, 51, 54, 34, 72, 164, 121, 30, 146, 127, 63, 8, 100, 119, 33, 71, 91, 81, 80, 16, 17, 17, + 106, 73, 195, 162, 110, 184, 65, 37, 180, 208, 171, 195, 28, 204, 60, 199, 21, 10, 241, 156, 172, 179, 71, + 229, 180, 235, 238, 143, 150, 93, 164, 219, 230, 84, 46, 190, 123, 251, 237, 154, 88, 196, 147, 132, 74, 113, + 239, 40, 185, 184, 154, 76, 31, 48, 234, 57, 69, 147, 158, 173, 54, 49, 26, 168, 209, 205, 109, 112, 184, + 104, 30, 115, 28, 43, 87, 2, 218, 171, 103, 87, 113, 211, 98, 229, 55, 118, 169, 139, 82, 215, 249, 67, + 222, 122, 121, 110, 196, 231, 201, 125, 239, 59, 139, 79, 157, 162, 93, 121, 154, 92, 28, 232, 200, 35, 44, + 38, 114, 44, 126, 72, 68, 7, 79, 44, 91, 215, 6, 139, 16, 109, 240, 13, 227, 167, 241, 49, 75, 58, + 120, 64, 48, 38, 159, 128, 136, 55, 223, 187, 233, 75, 46, 167, 101, 213, 132, 23, 245, 41, 160, 146, 223, + 44, 253, 28, 214, 82, 220, 205, 118, 95, 113, 206, 0, 111, 6, 3, 74, 238, 217, 37, 164, 76, 246, 45, + 0, 229, 233, 70, 196, 60, 96, 46, 196, 7, 251, 156, 77, 124, 6, 80, 157, 158, 64, 6, 223, 205, 254, + 124, 37, 0, 247, 150, 212, 205, 8, 109, 99, 164, 146, 238, 76, 94, 131, 156, 195, 93, 247, 101, 125, 171, + 145, 87, 130, 213, 227, 6, 207, 151, 84, 9, 60, 15, 141, 55, 30, 14, 173, 26, 115, 247, 169, 239, 3, + 231, 17, 15, 217, 124, 139, 213, 139, 8, 39, 210, 203, 231, 126, 175, 46, 182, 54, 241, 244, 98, 183, 77, + 194, 138, 72, 221, 201, 77, 219, 115, 10, 252, 62, 152, 20, 16, 101, 26, 82, 86, 87, 86, 58, 104, 7, + 116, 102, 6, 216, 87, 204, 132, 192, 199, 86, 11, 111, 77, 86, 130, 89, 15, 31, 179, 107, 44, 100, 180, + 207, 30, 16, 214, 39, 130, 154, 201, 197, 220, 10, 177, 182, 233, 162, 146, 189, 108, 109, 35, 113, 96, 105, + 101, 32, 24, 18, 203, 101, 130, 40, 100, 38, 128, 180, 56, 114, 70, 30, 15, 160, 11, 33, 164, 192, 1, + 157, 234, 104, 202, 202, 172, 69, 221, 252, 203, 75, 95, 5, 99, 230, 11, 220, 44, 113, 227, 224, 234, 69, + 168, 8, 208, 93, 229, 254, 216, 154, 186, 40, 79, 94, 224, 233, 55, 78, 108, 217, 154, 4, 203, 164, 204, + 73, 159, 56, 185, 159, 128, 28, 115, 97, 244, 32, 131, 76, 19, 32, 209, 163, 148, 196, 8, 241, 46, 197, + 92, 70, 103, 212, 233, 31, 79, 44, 160, 96, 53, 97, 208, 120, 81, 251, 253, 90, 47, 102, 103, 174, 91, + 113, 130, 56, 211, 172, 220, 81, 126, 111, 84, 75, 8, 208, 134, 144, 165, 203, 151, 134, 238, 137, 10, 177, + 107, 121, 245, 60, 0, 33, 234, 190, 171, 56, 25, 22, 124, 148, 32, 210, 144, 210, 142, 31, 119, 253, 109, + 129, 103, 17, 103, 194, 140, 36, 123, 47, 180, 220, 5, 64, 232, 251, 5, 15, 87, 45, 87, 40, 45, 161, + 71, 149, 30, 158, 11, 204, 167, 214, 152, 120, 32, 170, 122, 53, 135, 217, 152, 118, 137, 9, 55, 208, 113, + 205, 238, 9, 23, 74, 141, 225, 180, 213, 184, 222, 218, 171, 103, 0, 195, 25, 65, 161, 162, 25, 87, 78, + 202, 12, 37, 13, 58, 172, 94, 132, 77, 134, 228, 117, 45, 183, 45, 81, 80, 151, 58, 173, 225, 42, 6, + 31, 64, 4, 194, 131, 175, 129, 204, 102, 105, 115, 106, 239, 132, 225, 22, 213, 163, 137, 161, 193, 76, 126, + 52, 94, 190, 239, 34, 246, 161, 43, 163, 250, 214, 124, 235, 209, 191, 147, 53, 136, 181, 47, 134, 31, 181, + 32, 24, 211, 158, 83, 254, 122, 64, 252, 245, 236, 0, 211, 73, 191, 45, 175, 239, 137, 43, 62, 228, 108, + 204, 172, 2, 164, 150, 162, 170, 202, 39, 10, 101, 19, 185, 189, 30, 181, 3, 238, 187, 164, 194, 106, 87, + 169, 121, 146, 40, 29, 239, 58, 246, 238, 231, 99, 52, 249, 92, 54, 193, 46, 13, 250, 82, 126, 76, 252, + 3, 185, 69, 74, 42, 150, 74, 36, 51, 88, 113, 136, 231, 101, 106, 234, 163, 43, 10, 172, 26, 222, 59, + 27, 212, 74, 92, 148, 14, 40, 36, 37, 249, 221, 111, 148, 79, 44, 168, 196, 174, 248, 80, 151, 153, 73, + 200, 193, 210, 245, 97, 19, 203, 149, 58, 216, 191, 164, 167, 106, 64, 82, 216, 151, 153, 194, 143, 187, 133, + 117, 252, 33, 23, 203, 28, 167, 30, 214, 167, 204, 18, 79, 112, 216, 231, 189, 219, 118, 194, 115, 126, 51, + 177, 55, 212, 122, 52, 215, 149, 9, 61, 56, 186, 53, 249, 6, 199, 43, 40, 91, 44, 94, 191, 245, 14, + 215, 197, 46, 179, 226, 10, 235, 230, 99, 251, 164, 222, 25, 2, 97, 208, 115, 77, 174, 251, 199, 10, 136, + 72, 137, 48, 113, 3, 96, 247, 57, 0, 211, 168, 254, 201, 24, 94, 226, 205, 44, 71, 177, 227, 248, 242, + 90, 81, 242, 157, 77, 4, 46, 127, 249, 40, 47, 44, 115, 85, 120, 147, 135, 160, 42, 192, 25, 17, 166, + 50, 188, 205, 215, 16, 200, 132, 36, 113, 157, 30, 147, 72, 41, 213, 229, 19, 250, 218, 105, 227, 218, 241, + 35, 95, 246, 41, 126, 96, 60, 62, 95, 63, 230, 21, 101, 230, 66, 46, 117, 173, 114, 195, 148, 23, 122, + 116, 55, 102, 14, 162, 203, 202, 250, 118, 169, 237, 169, 95, 207, 134, 235, 97, 194, 185, 192, 153, 189, 247, + 90, 33, 231, 119, 3, 149, 32, 62, 72, 71, 80, 189, 100, 29, 226, 87, 109, 54, 84, 155, 19, 133, 13, + 144, 62, 237, 236, 30, 78, 164, 25, 24, 89, 63, 174, 207, 198, 58, 208, 111, 93, 192, 109, 252, 201, 145, + 155, 123, 132, 172, 123, 185, 46, 100, 183, 63, 205, 59, 183, 224, 27, 216, 9, 111, 186, 90, 24, 70, 234, + 47, 181, 201, 189, 176, 87, 71, 227, 52, 55, 101, 55, 25, 240, 120, 9, 207, 73, 172, 49, 201, 251, 194, + 249, 197, 232, 95, 224, 240, 245, 216, 160, 67, 157, 27, 104, 251, 182, 132, 48, 6, 150, 228, 219, 162, 78, + 57, 12, 126, 161, 183, 77, 210, 76, 22, 153, 44, 143, 252, 6, 119, 179, 24, 196, 70, 195, 247, 125, 202, + 171, 100, 100, 155, 117, 19, 15, 132, 64, 62, 176, 38, 225, 63, 128, 244, 216, 76, 229, 243, 44, 100, 247, + 227, 44, 175, 47, 251, 3, 240, 78, 209, 126, 56, 152, 94, 10, 149, 137, 99, 97, 35, 152, 244, 18, 119, + 227, 93, 198, 119, 203, 224, 30, 157, 24, 178, 6, 55, 108, 3, 105, 30, 95, 80, 89, 136, 125, 68, 233, + 215, 159, 68, 73, 3, 62, 46, 200, 130, 24, 70, 10, 254, 218, 141, 132, 8, 4, 94, 232, 226, 223, 190, + 164, 6, 254, 94, 110, 72, 221, 37, 135, 172, 67, 108, 194, 105, 146, 81, 47, 166, 11, 33, 190, 131, 218, + 69, 242, 178, 5, 55, 195, 1, 38, 85, 61, 110, 239, 93, 227, 178, 184, 63, 168, 192, 53, 78, 93, 159, + 174, 222, 123, 111, 156, 101, 150, 172, 225, 86, 60, 79, 234, 78, 144, 122, 74, 195, 208, 46, 234, 189, 130, + 51, 37, 242, 104, 153, 193, 243, 71, 172, 14, 93, 241, 243, 117, 66, 199, 56, 223, 25, 125, 44, 112, 63, + 50, 142, 241, 62, 206, 35, 198, 31, 73, 44, 75, 231, 108, 124, 80, 94, 70, 157, 225, 203, 31, 75, 112, + 82, 228, 104, 93, 195, 131, 221, 168, 10, 38, 240, 173, 64, 186, 195, 217, 12, 137, 6, 233, 242, 104, 190, + 114, 243, 52, 17, 40, 234, 157, 201, 94, 147, 46, 237, 58, 64, 52, 226, 228, 228, 203, 161, 229, 59, 225, + 200, 120, 200, 195, 209, 77, 122, 143, 112, 133, 51, 210, 95, 159, 51, 248, 128, 150, 165, 187, 40, 198, 188, + 188, 160, 100, 137, 140, 80, 189, 10, 4, 59, 78, 218, 242, 107, 230, 69, 247, 167, 222, 57, 58, 120, 41, + 25, 33, 141, 188, 184, 7, 236, 75, 221, 86, 108, 246, 146, 54, 15, 10, 168, 183, 232, 199, 129, 167, 93, + 96, 67, 168, 232, 162, 145, 157, 165, 142, 100, 21, 83, 50, 225, 100, 180, 121, 52, 254, 61, 180, 136, 87, + 161, 140, 138, 191, 134, 140, 105, 23, 105, 132, 122, 176, 213, 12, 40, 186, 34, 182, 242, 175, 45, 54, 74, + 230, 218, 64, 136, 165, 249, 84, 191, 143, 172, 174, 138, 194, 96, 20, 146, 206, 232, 120, 137, 93, 112, 18, + 28, 71, 106, 30, 174, 205, 220, 96, 144, 120, 17, 169, 245, 82, 20, 35, 155, 243, 65, 194, 168, 59, 136, + 32, 10, 171, 207, 80, 117, 221, 90, 87, 197, 242, 243, 5, 14, 171, 93, 14, 232, 166, 172, 122, 57, 38, + 140, 243, 199, 26, 41, 123, 53, 31, 51, 233, 73, 246, 59, 33, 108, 197, 178, 229, 252, 92, 187, 148, 96, + 2, 194, 78, 66, 245, 203, 204, 95, 252, 244, 208, 81, 189, 13, 189, 35, 73, 2, 77, 240, 173, 174, 73, + 197, 204, 162, 148, 62, 35, 2, 66, 189, 188, 142, 117, 59, 203, 37, 160, 30, 238, 225, 220, 159, 12, 249, + 117, 95, 94, 52, 191, 177, 32, 95, 160, 201, 51, 164, 136, 252, 73, 202, 8, 232, 235, 135, 101, 51, 35, + 230, 61, 44, 69, 205, 181, 32, 97, 243, 242, 118, 68, 231, 118, 40, 210, 241, 163, 201, 179, 81, 10, 213, + 229, 78, 119, 221, 74, 33, 178, 214, 80, 221, 121, 150, 126, 157, 19, 165, 6, 53, 17, 134, 209, 166, 85, + 241, 185, 94, 126, 52, 200, 110, 21, 52, 58, 128, 113, 186, 9, 8, 198, 76, 215, 55, 33, 213, 170, 163, + 247, 226, 81, 71, 164, 173, 124, 101, 163, 88, 222, 13, 252, 141, 69, 135, 183, 100, 223, 71, 96, 131, 145, + 61, 150, 47, 42, 141, 224, 175, 214, 163, 70, 222, 20, 233, 193, 186, 242, 106, 80, 108, 226, 92, 131, 52, + 133, 206, 149, 86, 20, 218, 43, 196, 30, 168, 141, 245, 225, 129, 62, 37, 207, 164, 254, 93, 17, 16, 214, + 12, 110, 186, 238, 186, 10, 68, 104, 159, 87, 2, 2, 131, 210, 107, 89, 137, 19, 36, 7, 114, 139, 98, + 221, 90, 133, 236, 115, 233, 215, 246, 110, 180, 164, 114, 224, 166, 95, 77, 219, 195, 50, 35, 122, 181, 119, + 107, 150, 45, 98, 126, 186, 70, 83, 145, 144, 171, 251, 251, 188, 183, 151, 115, 59, 133, 210, 141, 16, 164, + 198, 78, 30, 110, 154, 139, 65, 253, 164, 21, 226, 241, 83, 228, 174, 141, 237, 199, 111, 5, 220, 131, 55, + 222, 145, 231, 183, 197, 150, 181, 69, 156, 241, 124, 244, 11, 200, 253, 105, 196, 108, 135, 20, 237, 182, 223, + 35, 128, 14, 241, 42, 96, 152, 181, 89, 93, 141, 242, 167, 220, 225, 44, 58, 120, 30, 189, 91, 245, 244, + 94, 94, 189, 223, 92, 149, 17, 6, 111, 10, 214, 210, 180, 23, 142, 143, 8, 159, 144, 50, 100, 78, 2, + 11, 128, 173, 160, 239, 30, 92, 76, 165, 81, 180, 54, 119, 83, 220, 204, 47, 122, 253, 249, 235, 99, 146, + 127, 237, 185, 20, 30, 3, 128, 146, 126, 52, 52, 31, 251, 103, 247, 185, 42, 243, 208, 134, 191, 119, 226, + 141, 139, 5, 80, 107, 50, 131, 247, 8, 157, 251, 151, 96, 246, 111, 239, 158, 211, 28, 97, 76, 125, 214, + 161, 10, 2, 171, 74, 140, 188, 232, 189, 10, 77, 113, 45, 253, 94, 206, 222, 153, 141, 2, 206, 180, 104, + 166, 207, 0, 192, 149, 8, 46, 104, 174, 171, 39, 152, 176, 192, 132, 6, 2, 44, 70, 241, 32, 176, 172, + 49, 72, 79, 171, 62, 1, 107, 77, 42, 217, 53, 96, 145, 193, 61, 207, 15, 242, 154, 131, 252, 197, 128, + 64, 157, 254, 250, 160, 224, 157, 197, 18, 224, 67, 246, 2, 23, 35, 90, 186, 182, 148, 177, 42, 118, 35, + 146, 104, 27, 233, 207, 140, 224, 103, 180, 166, 49, 226, 75, 87, 84, 165, 160, 122, 116, 182, 134, 49, 250, + 41, 177, 165, 146, 50, 44, 173, 229, 49, 195, 30, 234, 183, 118, 236, 63, 201, 71, 154, 86, 87, 15, 34, + 22, 192, 87, 90, 39, 120, 166, 59, 176, 192, 245, 203, 178, 184, 153, 245, 90, 91, 27, 215, 31, 48, 210, + 125, 252, 30, 164, 229, 137, 43, 214, 244, 226, 214, 77, 157, 217, 239, 237, 205, 146, 37, 11, 68, 179, 93, + 1, 199, 22, 185, 177, 128, 110, 101, 96, 5, 4, 245, 71, 70, 164, 8, 189, 136, 43, 201, 156, 174, 153, + 22, 254, 250, 10, 186, 37, 148, 16, 209, 145, 95, 246, 43, 154, 48, 131, 149, 2, 158, 149, 164, 79, 245, + 64, 180, 78, 99, 113, 20, 10, 236, 249, 99, 69, 93, 78, 194, 217, 87, 156, 20, 251, 126, 77, 139, 28, + 80, 175, 42, 16, 200, 17, 197, 174, 233, 218, 201, 245, 31, 55, 85, 213, 136, 62, 163, 223, 153, 224, 59, + 97, 59, 25, 85, 153, 128, 251, 118, 39, 227, 31, 28, 214, 228, 109, 253, 139, 163, 224, 119, 223, 253, 152, + 214, 65, 73, 88, 15, 16, 73, 82, 210, 141, 147, 167, 236, 88, 211, 26, 210, 16, 218, 84, 134, 35, 208, + 22, 195, 32, 160, 112, 27, 189, 83, 166, 201, 201, 205, 114, 49, 18, 163, 5, 40, 12, 198, 151, 245, 33, + 161, 170, 10, 206, 12, 48, 248, 244, 96, 47, 90, 37, 71, 93, 76, 47, 188, 101, 243, 62, 212, 66, 8, + 160, 30, 176, 254, 222, 118, 69, 236, 51, 146, 51, 148, 12, 48, 201, 19, 84, 206, 37, 165, 252, 56, 23, + 122, 124, 223, 150, 107, 192, 67, 148, 71, 29, 125, 185, 213, 151, 214, 127, 140, 225, 140, 92, 35, 152, 220, + 93, 1, 172, 112, 205, 184, 110, 1, 114, 17, 20, 36, 240, 120, 75, 228, 142, 90, 31, 147, 195, 186, 42, + 95, 74, 188, 26, 3, 196, 53, 217, 126, 18, 100, 96, 68, 66, 145, 249, 48, 117, 248, 21, 130, 8, 12, + 9, 1, 136, 206, 140, 232, 31, 226, 163, 114, 239, 7, 24, 204, 188, 32, 41, 157, 33, 99, 240, 249, 11, + 23, 107, 175, 224, 154, 5, 182, 50, 74, 185, 34, 241, 196, 220, 168, 38, 40, 94, 107, 63, 125, 218, 244, + 43, 176, 5, 93, 144, 82, 64, 83, 59, 160, 104, 6, 179, 38, 106, 163, 164, 188, 102, 17, 183, 199, 152, + 28, 198, 147, 113, 80, 188, 242, 211, 146, 18, 238, 154, 43, 172, 86, 170, 7, 12, 66, 60, 10, 170, 138, + 95, 18, 119, 122, 251, 58, 119, 221, 46, 6, 202, 24, 26, 162, 97, 0, 155, 9, 163, 232, 14, 254, 69, + 241, 156, 101, 19, 153, 87, 156, 56, 30, 43, 21, 158, 207, 77, 208, 220, 23, 133, 11, 229, 205, 59, 80, + 149, 88, 222, 191, 86, 40, 160, 69, 203, 20, 240, 229, 125, 173, 133, 167, 48, 173, 128, 101, 23, 227, 40, + 240, 74, 39, 164, 238, 10, 112, 220, 163, 128, 12, 211, 5, 6, 44, 14, 62, 231, 70, 41, 52, 242, 88, + 40, 192, 63, 27, 109, 70, 124, 240, 237, 192, 223, 202, 105, 77, 189, 43, 70, 109, 171, 221, 64, 196, 218, + 47, 204, 214, 217, 162, 172, 230, 44, 240, 216, 253, 41, 117, 138, 161, 156, 205, 32, 229, 86, 157, 86, 121, + 145, 91, 40, 41, 47, 65, 101, 199, 111, 225, 82, 238, 150, 3, 11, 65, 137, 203, 122, 156, 174, 125, 73, + 20, 104, 164, 157, 227, 44, 69, 231, 72, 146, 175, 45, 198, 167, 105, 228, 240, 236, 222, 250, 151, 211, 28, + 48, 171, 88, 16, 55, 5, 117, 251, 58, 66, 0, 90, 173, 243, 119, 217, 22, 101, 117, 75, 238, 10, 16, + 78, 209, 84, 50, 1, 194, 54, 177, 187, 226, 182, 20, 53, 126, 233, 167, 14, 219, 88, 28, 99, 99, 92, + 77, 27, 196, 240, 115, 180, 216, 141, 25, 201, 119, 213, 27, 163, 14, 159, 163, 47, 183, 77, 216, 99, 110, + 118, 244, 15, 159, 119, 99, 114, 197, 151, 165, 66, 250, 216, 149, 184, 42, 17, 167, 195, 166, 143, 60, 87, + 95, 81, 250, 47, 189, 149, 144, 8, 40, 233, 252, 129, 55, 145, 132, 193, 168, 253, 77, 21, 69, 131, 33, + 143, 179, 178, 100, 69, 99, 177, 161, 189, 221, 191, 216, 23, 16, 109, 110, 46, 107, 253, 90, 88, 216, 90, + 206, 48, 234, 146, 60, 181, 96, 49, 149, 105, 201, 5, 174, 92, 27, 166, 115, 16, 135, 153, 117, 133, 181, + 170, 106, 179, 106, 65, 226, 79, 253, 237, 110, 247, 215, 26, 6, 131, 47, 74, 193, 231, 115, 252, 165, 249, + 92, 160, 40, 161, 45, 241, 59, 224, 233, 154, 211, 77, 211, 96, 15, 135, 193, 238, 52, 196, 76, 139, 131, + 136, 254, 70, 211, 195, 228, 188, 12, 174, 110, 56, 220, 178, 140, 180, 152, 57, 185, 75, 165, 158, 155, 37, + 50, 27, 173, 34, 119, 209, 183, 252, 142, 34, 135, 148, 95, 85, 127, 75, 79, 76, 172, 247, 160, 196, 157, + 103, 163, 69, 47, 169, 159, 229, 58, 92, 240, 9, 43, 134, 98, 137, 121, 100, 40, 178, 57, 190, 140, 199, + 39, 104, 218, 18, 40, 117, 193, 74, 206, 101, 66, 224, 33, 138, 248, 4, 129, 49, 19, 113, 20, 166, 122, + 158, 58, 50, 17, 136, 175, 217, 51, 35, 66, 196, 1, 113, 159, 157, 33, 195, 207, 206, 182, 143, 29, 193, + 177, 243, 251, 109, 104, 119, 19, 47, 148, 219, 186, 51, 98, 210, 164, 92, 131, 194, 38, 52, 117, 12, 147, + 164, 40, 93, 247, 77, 147, 68, 245, 151, 46, 20, 55, 136, 72, 12, 182, 38, 146, 102, 103, 40, 41, 76, + 133, 104, 78, 96, 71, 131, 113, 89, 217, 90, 46, 52, 29, 195, 199, 5, 196, 233, 16, 58, 249, 226, 83, + 119, 97, 140, 101, 151, 177, 137, 158, 249, 56, 84, 81, 24, 101, 17, 4, 214, 148, 12, 185, 54, 144, 121, + 44, 98, 240, 144, 219, 192, 7, 9, 138, 31, 148, 48, 77, 151, 253, 213, 200, 146, 207, 230, 108, 158, 245, + 72, 61, 23, 243, 217, 2, 217, 164, 136, 96, 144, 116, 91, 43, 23, 163, 0, 248, 119, 25, 68, 162, 167, + 205, 39, 37, 124, 171, 71, 109, 43, 134, 224, 97, 244, 82, 69, 38, 215, 134, 111, 56, 145, 22, 236, 146, + 23, 182, 172, 143, 4, 61, 85, 199, 236, 174, 109, 79, 215, 149, 108, 117, 250, 206, 79, 238, 100, 218, 21, + 207, 5, 1, 212, 233, 118, 222, 100, 77, 171, 61, 80, 164, 228, 177, 69, 52, 238, 249, 227, 19, 217, 174, + 156, 185, 55, 109, 203, 221, 148, 197, 186, 89, 207, 120, 134, 187, 180, 78, 173, 126, 22, 46, 138, 167, 228, + 211, 191, 1, 85, 63, 156, 61, 48, 198, 42, 158, 223, 172, 9, 240, 176, 194, 43, 26, 88, 152, 228, 241, + 12, 126, 237, 235, 224, 120, 144, 113, 135, 44, 142, 77, 129, 191, 125, 137, 44, 59, 39, 78, 168, 82, 107, + 6, 230, 50, 69, 43, 171, 102, 135, 220, 50, 39, 253, 90, 70, 85, 242, 243, 254, 217, 222, 51, 87, 70, + 65, 46, 165, 32, 233, 249, 251, 130, 217, 93, 94, 228, 175, 185, 253, 192, 177, 220, 85, 186, 79, 185, 55, + 42, 253, 70, 129, 103, 40, 174, 89, 56, 209, 42, 169, 21, 45, 126, 86, 123, 163, 125, 151, 179, 186, 22, + 140, 152, 30, 125, 132, 17, 13, 22, 32, 119, 76, 174, 137, 186, 168, 21, 57, 168, 91, 95, 52, 78, 195, + 26, 109, 13, 145, 107, 44, 143, 133, 180, 241, 187, 237, 139, 136, 20, 94, 129, 211, 145, 164, 165, 239, 250, + 195, 195, 208, 139, 169, 110, 73, 32, 205, 151, 124, 101, 186, 222, 88, 248, 231, 64, 61, 232, 225, 5, 99, + 61, 79, 160, 161, 170, 68, 244, 7, 68, 115, 230, 115, 48, 76, 18, 167, 106, 195, 81, 90, 86, 164, 207, + 120, 155, 101, 238, 186, 11, 106, 26, 235, 233, 13, 190, 56, 35, 59, 82, 112, 98, 174, 221, 184, 230, 132, + 131, 173, 123, 50, 45, 190, 69, 129, 220, 6, 99, 15, 63, 246, 178, 31, 107, 14, 28, 30, 19, 138, 239, + 43, 49, 73, 248, 220, 37, 177, 26, 231, 97, 224, 102, 57, 203, 190, 140, 225, 60, 27, 215, 162, 116, 187, + 164, 152, 126, 8, 171, 167, 206, 127, 17, 208, 41, 127, 232, 157, 154, 20, 216, 146, 108, 15, 224, 58, 7, + 77, 10, 20, 223, 182, 144, 159, 44, 108, 85, 91, 124, 71, 240, 21, 18, 13, 157, 172, 147, 206, 237, 89, + 205, 28, 57, 101, 114, 220, 188, 214, 147, 104, 76, 96, 138, 49, 241, 202, 29, 78, 97, 2, 92, 132, 126, + 175, 143, 92, 159, 243, 98, 168, 82, 236, 210, 162, 58, 70, 120, 240, 117, 46, 173, 82, 192, 179, 209, 14, + 217, 58, 227, 65, 185, 208, 103, 110, 138, 118, 235, 128, 100, 201, 15, 222, 123, 206, 204, 33, 162, 136, 168, + 34, 159, 18, 15, 43, 168, 234, 196, 187, 80, 136, 217, 224, 153, 216, 189, 124, 225, 91, 185, 62, 248, 127, + 144, 234, 9, 18, 233, 176, 10, 12, 60, 123, 176, 83, 109, 194, 74, 159, 70, 69, 37, 101, 209, 235, 163, + 185, 125, 166, 25, 182, 114, 26, 3, 173, 73, 95, 7, 2, 165, 75, 127, 29, 61, 181, 66, 124, 62, 23, + 169, 195, 75, 80, 102, 57, 208, 156, 205, 120, 4, 248, 161, 130, 118, 226, 33, 139, 176, 227, 236, 147, 180, + 74, 222, 51, 78, 134, 252, 102, 46, 128, 158, 219, 71, 178, 51, 34, 215, 44, 166, 156, 35, 100, 34, 241, + 162, 2, 14, 155, 7, 173, 220, 58, 39, 69, 242, 184, 21, 227, 236, 174, 57, 36, 53, 133, 20, 198, 184, + 118, 82, 121, 212, 88, 185, 129, 193, 251, 84, 8, 188, 193, 54, 107, 149, 109, 130, 110, 151, 71, 238, 225, + 2, 196, 44, 211, 108, 249, 136, 187, 206, 88, 241, 45, 48, 188, 113, 133, 172, 75, 245, 90, 245, 107, 79, + 12, 112, 108, 96, 188, 61, 233, 75, 55, 175, 253, 210, 28, 145, 125, 244, 77, 116, 130, 84, 208, 190, 221, + 92, 171, 217, 197, 10, 28, 54, 214, 153, 39, 114, 128, 57, 110, 149, 162, 227, 141, 228, 186, 103, 10, 198, + 74, 254, 107, 98, 214, 18, 87, 27, 129, 5, 244, 156, 152, 146, 108, 196, 179, 191, 239, 85, 199, 214, 194, + 164, 216, 6, 150, 238, 92, 154, 232, 164, 61, 155, 118, 174, 113, 189, 63, 135, 125, 24, 229, 119, 200, 7, + 41, 62, 63, 113, 160, 69, 147, 155, 103, 80, 27, 183, 228, 178, 62, 94, 16, 126, 238, 223, 44, 127, 8, + 47, 77, 237, 93, 170, 12, 163, 119, 154, 221, 100, 10, 94, 247, 9, 155, 185, 213, 218, 23, 42, 118, 119, + 159, 177, 11, 10, 111, 215, 170, 80, 51, 167, 127, 108, 87, 194, 207, 133, 168, 233, 4, 166, 175, 57, 78, + 65, 241, 6, 246, 77, 51, 195, 218, 147, 208, 33, 42, 146, 25, 86, 233, 201, 53, 74, 98, 88, 99, 162, + 170, 31, 72, 224, 126, 253, 129, 143, 136, 193, 17, 160, 133, 73, 192, 215, 62, 129, 212, 103, 3, 215, 92, + 98, 162, 130, 0, 182, 132, 14, 100, 172, 111, 66, 2, 218, 84, 253, 221, 53, 181, 86, 140, 79, 44, 8, + 146, 116, 94, 159, 135, 247, 64, 161, 236, 192, 239, 167, 77, 104, 137, 42, 135, 212, 111, 188, 132, 231, 104, + 3, 26, 107, 168, 172, 197, 101, 65, 51, 237, 88, 49, 154, 47, 195, 76, 136, 126, 109, 166, 50, 222, 150, + 102, 76, 63, 5, 253, 21, 137, 177, 77, 62, 96, 236, 182, 236, 119, 110, 166, 143, 58, 82, 218, 35, 68, + 224, 22, 48, 5, 32, 88, 98, 120, 93, 172, 138, 129, 148, 161, 25, 238, 254, 85, 217, 130, 235, 49, 122, + 145, 94, 166, 114, 141, 163, 154, 172, 180, 162, 82, 248, 223, 187, 63, 167, 106, 30, 218, 77, 184, 97, 42, + 120, 38, 141, 101, 100, 177, 176, 29, 147, 250, 84, 227, 241, 240, 220, 139, 217, 13, 110, 131, 133, 239, 88, + 209, 108, 176, 3, 184, 15, 179, 20, 234, 60, 163, 222, 69, 212, 229, 205, 254, 237, 45, 8, 58, 152, 230, + 20, 204, 188, 206, 79, 160, 35, 178, 88, 75, 246, 136, 215, 233, 239, 44, 214, 224, 219, 109, 79, 12, 117, + 170, 2, 234, 9, 212, 187, 94, 57, 81, 35, 155, 29, 106, 117, 171, 71, 156, 30, 212, 49, 169, 42, 33, + 85, 115, 9, 54, 161, 186, 55, 76, 93, 95, 191, 245, 206, 46, 215, 206, 250, 70, 92, 111, 199, 62, 251, + 72, 147, 64, 52, 152, 121, 16, 228, 108, 78, 215, 195, 33, 25, 218, 63, 31, 1, 164, 193, 28, 32, 39, + 171, 87, 40, 136, 234, 85, 170, 125, 16, 45, 33, 84, 90, 151, 21, 135, 204, 108, 203, 17, 67, 249, 29, + 10, 32, 8, 132, 55, 41, 215, 50, 53, 173, 138, 154, 70, 30, 186, 34, 163, 31, 1, 182, 47, 247, 74, + 12, 210, 242, 66, 47, 150, 106, 14, 17, 236, 127, 84, 220, 192, 242, 118, 56, 197, 219, 93, 188, 70, 123, + 252, 8, 63, 162, 63, 146, 111, 230, 242, 8, 141, 213, 192, 83, 25, 81, 174, 127, 209, 31, 155, 194, 90, + 237, 121, 129, 222, 232, 2, 240, 185, 38, 0, 85, 243, 247, 184, 132, 148, 200, 143, 55, 82, 205, 233, 234, + 138, 242, 29, 168, 83, 4, 65, 170, 155, 205, 96, 37, 93, 176, 133, 142, 221, 203, 162, 71, 20, 207, 60, + 71, 104, 220, 16, 197, 251, 74, 177, 35, 190, 219, 226, 22, 28, 100, 159, 128, 94, 244, 172, 101, 38, 17, + 107, 205, 104, 186, 67, 44, 218, 50, 144, 122, 63, 135, 24, 63, 77, 214, 183, 199, 173, 163, 27, 65, 183, + 203, 103, 21, 5, 51, 227, 77, 130, 146, 226, 36, 191, 247, 59, 126, 87, 236, 146, 67, 13, 242, 57, 82, + 59, 159, 236, 202, 212, 195, 211, 119, 171, 63, 121, 37, 44, 96, 36, 88, 211, 232, 67, 202, 113, 42, 25, + 32, 216, 99, 133, 76, 166, 115, 108, 130, 155, 227, 211, 99, 64, 228, 81, 155, 113, 220, 62, 122, 100, 21, + 252, 235, 244, 154, 23, 96, 77, 233, 56, 243, 240, 69, 57, 102, 15, 181, 40, 243, 30, 121, 119, 105, 90, + 210, 231, 247, 197, 7, 164, 8, 117, 28, 231, 186, 111, 57, 163, 46, 216, 103, 36, 79, 19, 49, 40, 119, + 68, 56, 157, 46, 53, 7, 51, 27, 33, 25, 192, 176, 43, 225, 141, 36, 73, 147, 27, 13, 174, 76, 72, + 80, 65, 215, 124, 10, 104, 11, 189, 215, 116, 168, 133, 31, 50, 136, 167, 88, 253, 24, 137, 94, 108, 75, + 227, 99, 2, 203, 202, 182, 244, 96, 163, 63, 42, 147, 91, 162, 15, 252, 144, 32, 110, 4, 128, 34, 185, + 38, 151, 1, 142, 81, 80, 180, 126, 30, 208, 56, 92, 105, 49, 38, 65, 246, 159, 18, 29, 155, 114, 86, + 108, 91, 144, 143, 107, 181, 125, 97, 99, 127, 179, 250, 181, 27, 218, 37, 132, 106, 177, 124, 59, 101, 77, + 127, 8, 189, 117, 67, 108, 221, 73, 170, 197, 183, 188, 175, 151, 116, 24, 242, 93, 147, 44, 208, 100, 168, + 50, 132, 21, 100, 50, 177, 13, 37, 204, 50, 247, 153, 245, 172, 225, 52, 95, 112, 142, 218, 58, 74, 192, + 1, 84, 213, 150, 40, 246, 38, 250, 35, 71, 127, 121, 14, 180, 90, 12, 111, 196, 120, 115, 156, 158, 110, + 218, 177, 223, 0, 237, 180, 37, 233, 246, 73, 166, 154, 166, 99, 218, 73, 178, 20, 194, 135, 84, 116, 157, + 41, 137, 159, 52, 249, 25, 51, 220, 9, 170, 74, 230, 240, 171, 95, 28, 201, 75, 173, 193, 12, 76, 9, + 30, 203, 250, 138, 126, 52, 160, 186, 118, 4, 250, 31, 97, 217, 219, 128, 60, 227, 83, 208, 160, 214, 61, + 90, 12, 228, 76, 64, 105, 176, 242, 176, 13, 239, 174, 28, 98, 128, 35, 246, 51, 191, 97, 56, 182, 196, + 77, 209, 84, 167, 89, 5, 102, 249, 211, 9, 42, 80, 27, 90, 184, 42, 223, 120, 214, 63, 216, 250, 196, + 144, 140, 166, 96, 53, 50, 208, 195, 52, 99, 157, 139, 200, 240, 196, 83, 119, 69, 142, 116, 116, 168, 120, + 168, 70, 124, 230, 39, 203, 65, 21, 100, 85, 233, 21, 147, 26, 135, 136, 164, 99, 165, 204, 110, 242, 200, + 234, 125, 125, 78, 173, 33, 50, 134, 251, 44, 31, 164, 106, 159, 211, 185, 240, 249, 31, 121, 85, 194, 102, + 121, 130, 250, 229, 203, 61, 33, 9, 47, 230, 207, 63, 200, 24, 216, 119, 137, 85, 232, 93, 207, 178, 228, + 165, 198, 201, 45, 145, 50, 92, 57, 47, 193, 221, 17, 124, 81, 229, 102, 52, 107, 247, 118, 17, 93, 75, + 66, 224, 214, 131, 110, 49, 161, 12, 206, 237, 116, 196, 161, 201, 130, 31, 158, 138, 192, 250, 177, 3, 5, + 84, 188, 51, 174, 17, 226, 167, 133, 62, 58, 192, 120, 196, 54, 114, 139, 106, 132, 230, 24, 182, 121, 29, + 44, 64, 82, 161, 227, 112, 177, 76, 109, 243, 248, 74, 230, 39, 78, 170, 214, 54, 92, 86, 249, 103, 190, + 70, 101, 30, 155, 144, 58, 167, 21, 103, 174, 248, 148, 236, 154, 107, 181, 22, 13, 101, 204, 111, 135, 23, + 42, 172, 15, 156, 102, 41, 176, 212, 192, 128, 189, 161, 41, 131, 33, 240, 106, 69, 105, 159, 124, 223, 90, + 145, 86, 21, 20, 57, 89, 46, 133, 15, 65, 65, 209, 167, 21, 153, 77, 125, 145, 227, 58, 24, 52, 206, + 118, 12, 205, 215, 218, 22, 19, 227, 74, 232, 136, 92, 87, 119, 0, 121, 12, 126, 248, 114, 244, 94, 118, + 135, 197, 54, 70, 205, 45, 106, 28, 42, 107, 105, 238, 28, 147, 128, 158, 198, 110, 17, 93, 7, 228, 8, + 43, 213, 134, 41, 84, 33, 221, 246, 203, 87, 39, 197, 7, 210, 4, 168, 42, 192, 209, 71, 103, 91, 213, + 181, 32, 27, 20, 191, 13, 115, 29, 55, 19, 186, 93, 169, 137, 247, 127, 161, 166, 177, 124, 191, 189, 68, + 173, 172, 196, 28, 33, 139, 153, 148, 69, 121, 156, 66, 113, 78, 242, 178, 59, 170, 239, 112, 54, 2, 196, + 107, 110, 208, 162, 71, 135, 126, 253, 35, 102, 41, 148, 172, 21, 170, 47, 184, 66, 224, 197, 207, 193, 146, + 248, 43, 5, 235, 232, 198, 66, 130, 196, 81, 133, 138, 81, 222, 204, 248, 59, 214, 203, 97, 147, 20, 159, + 10, 130, 54, 61, 32, 245, 208, 246, 87, 235, 12, 230, 104, 219, 121, 234, 253, 45, 112, 234, 219, 242, 172, + 146, 200, 219, 59, 199, 233, 202, 234, 159, 126, 8, 14, 188, 23, 130, 254, 114, 12, 86, 47, 184, 231, 136, + 230, 50, 83, 234, 152, 221, 145, 203, 252, 171, 190, 63, 199, 2, 68, 169, 247, 9, 105, 61, 26, 209, 9, + 65, 173, 251, 109, 36, 163, 26, 70, 97, 132, 7, 16, 153, 246, 189, 166, 247, 252, 79, 68, 229, 82, 191, + 160, 134, 214, 208, 29, 39, 26, 204, 211, 102, 207, 171, 5, 178, 88, 220, 107, 165, 136, 147, 248, 93, 130, + 61, 10, 132, 88, 130, 77, 161, 230, 237, 138, 125, 129, 154, 97, 113, 181, 61, 71, 223, 148, 233, 136, 126, + 152, 116, 67, 149, 82, 67, 200, 6, 20, 171, 12, 225, 173, 123, 143, 165, 90, 18, 226, 41, 55, 188, 147, + 230, 20, 131, 245, 108, 124, 149, 111, 35, 95, 95, 200, 182, 72, 8, 119, 93, 46, 133, 50, 241, 147, 175, + 193, 225, 85, 236, 239, 111, 55, 41, 187, 33, 148, 247, 194, 61, 79, 92, 82, 167, 244, 46, 254, 219, 28, + 19, 135, 98, 29, 10, 247, 213, 43, 99, 130, 221, 56, 89, 226, 118, 180, 139, 31, 213, 39, 72, 112, 208, + 180, 79, 35, 107, 164, 14, 245, 201, 78, 221, 111, 155, 58, 220, 68, 31, 27, 244, 79, 240, 196, 137, 60, + 139, 43, 171, 49, 50, 20, 227, 166, 119, 24, 139, 193, 115, 53, 72, 90, 1, 220, 237, 199, 78, 201, 249, + 27, 161, 195, 179, 215, 130, 183, 129, 69, 176, 49, 186, 145, 20, 210, 111, 7, 215, 12, 217, 69, 76, 177, + 104, 115, 253, 73, 67, 40, 110, 98, 80, 254, 184, 120, 250, 97, 250, 101, 214, 65, 59, 210, 251, 139, 210, + 104, 52, 29, 36, 48, 120, 224, 147, 235, 157, 253, 104, 249, 178, 31, 15, 9, 232, 128, 88, 38, 59, 156, + 64, 125, 128, 141, 106, 238, 30, 31, 149, 56, 135, 42, 198, 148, 220, 251, 4, 5, 224, 28, 212, 54, 47, + 212, 69, 3, 41, 244, 131, 80, 180, 1, 44, 211, 190, 170, 127, 208, 221, 21, 187, 178, 192, 51, 26, 203, + 80, 213, 153, 127, 245, 245, 106, 60, 84, 162, 159, 208, 213, 124, 135, 127, 69, 211, 44, 78, 179, 127, 196, + 244, 237, 237, 138, 34, 32, 123, 62, 228, 119, 146, 87, 167, 23, 147, 13, 115, 112, 141, 222, 100, 95, 143, + 50, 120, 65, 186, 70, 61, 203, 82, 216, 109, 211, 225, 63, 174, 247, 130, 10, 7, 214, 146, 58, 94, 195, + 173, 71, 185, 15, 83, 251, 15, 50, 194, 2, 217, 59, 229, 231, 11, 47, 85, 124, 112, 60, 152, 217, 214, + 133, 99, 10, 129, 211, 182, 17, 47, 248, 225, 103, 230, 188, 145, 199, 178, 102, 38, 142, 89, 17, 245, 99, + 22, 242, 37, 84, 242, 152, 108, 96, 95, 181, 229, 65, 58, 77, 114, 238, 65, 206, 71, 243, 94, 208, 251, + 6, 120, 124, 164, 234, 158, 123, 104, 214, 125, 150, 230, 183, 12, 215, 91, 247, 118, 195, 204, 106, 233, 215, + 197, 93, 226, 41, 220, 91, 106, 38, 188, 46, 75, 9, 111, 49, 40, 252, 21, 196, 160, 247, 3, 12, 142, + 49, 229, 44, 220, 2, 8, 195, 75, 66, 30, 181, 106, 177, 61, 58, 7, 100, 226, 60, 144, 226, 175, 253, + 17, 90, 75, 102, 157, 178, 183, 161, 220, 121, 72, 192, 96, 205, 27, 171, 128, 114, 253, 177, 38, 120, 54, + 194, 230, 47, 106, 3, 109, 157, 79, 204, 200, 201, 73, 130, 178, 148, 78, 218, 46, 210, 56, 232, 142, 101, + 75, 132, 142, 165, 241, 77, 28, 162, 69, 72, 58, 116, 98, 79, 118, 66, 253, 117, 100, 177, 252, 46, 249, + 49, 79, 108, 102, 204, 171, 101, 13, 179, 40, 218, 242, 173, 155, 221, 113, 61, 39, 124, 54, 64, 153, 163, + 225, 89, 62, 10, 250, 104, 171, 133, 171, 125, 95, 170, 114, 250, 10, 112, 47, 146, 197, 125, 192, 16, 88, + 187, 230, 241, 4, 69, 243, 4, 97, 195, 77, 197, 197, 25, 136, 111, 57, 96, 11, 123, 159, 108, 9, 191, + 178, 35, 164, 121, 122, 174, 74, 228, 250, 19, 224, 238, 184, 71, 133, 78, 145, 108, 165, 147, 111, 141, 234, + 101, 156, 208, 115, 96, 98, 252, 129, 33, 219, 169, 177, 180, 29, 208, 175, 114, 142, 119, 118, 98, 190, 250, + 163, 235, 127, 155, 38, 61, 104, 72, 70, 111, 114, 3, 59, 172, 120, 50, 249, 212, 43, 157, 136, 93, 166, + 208, 130, 136, 125, 184, 96, 188, 159, 220, 193, 9, 232, 37, 171, 105, 10, 71, 102, 45, 67, 26, 12, 183, + 122, 243, 88, 60, 248, 78, 4, 220, 167, 244, 123, 174, 53, 238, 208, 81, 161, 100, 32, 204, 70, 73, 173, + 184, 63, 161, 32, 84, 106, 222, 208, 126, 210, 147, 117, 77, 215, 74, 186, 118, 52, 9, 5, 171, 16, 252, + 241, 91, 168, 127, 121, 246, 221, 13, 12, 202, 167, 143, 232, 167, 134, 106, 67, 31, 186, 198, 134, 59, 39, + 220, 107, 96, 104, 187, 89, 201, 185, 19, 119, 11, 73, 21, 233, 101, 60, 116, 140, 175, 17, 46, 248, 151, + 149, 26, 38, 114, 189, 220, 86, 183, 222, 117, 62, 65, 114, 220, 237, 216, 219, 27, 15, 172, 110, 237, 130, + 117, 80, 166, 135, 187, 99, 226, 59, 9, 140, 137, 164, 167, 241, 172, 236, 216, 149, 74, 177, 185, 125, 190, + 241, 33, 6, 23, 53, 231, 77, 63, 32, 35, 235, 12, 120, 228, 152, 4, 226, 229, 190, 176, 181, 207, 45, + 58, 171, 25, 25, 178, 222, 23, 145, 38, 108, 122, 139, 209, 154, 249, 237, 97, 253, 234, 13, 131, 158, 6, + 63, 32, 254, 93, 82, 8, 8, 2, 138, 164, 92, 95, 48, 3, 95, 105, 126, 167, 123, 225, 224, 170, 233, + 198, 52, 122, 175, 173, 172, 6, 91, 248, 179, 92, 148, 177, 123, 148, 91, 98, 191, 205, 182, 187, 146, 191, + 16, 85, 162, 86, 39, 136, 226, 251, 10, 70, 1, 183, 139, 140, 23, 182, 185, 14, 164, 232, 191, 15, 213, + 128, 185, 215, 148, 46, 154, 47, 237, 220, 249, 168, 226, 105, + }; + uint8_t data[18 * 2 * 16 * 32] = { + 240, 211, 76, 44, 3, 198, 126, 60, 124, 45, 242, 246, 105, 69, 129, 230, 218, 122, 49, 224, 200, 134, 254, + 171, 166, 50, 153, 189, 241, 155, 136, 221, 34, 3, 160, 87, 194, 251, 8, 106, 96, 154, 115, 84, 233, 171, + 126, 232, 112, 168, 38, 89, 60, 3, 211, 237, 221, 250, 55, 118, 68, 169, 228, 5, 200, 119, 32, 196, 177, + 237, 193, 163, 171, 80, 229, 75, 105, 231, 236, 81, 172, 7, 80, 235, 159, 6, 53, 155, 223, 234, 108, 221, + 56, 254, 252, 53, 65, 128, 142, 111, 170, 153, 137, 253, 215, 78, 176, 88, 240, 239, 120, 229, 180, 38, 62, + 174, 97, 225, 222, 116, 213, 4, 117, 144, 196, 62, 60, 106, 93, 57, 103, 4, 59, 55, 206, 203, 62, 87, + 21, 26, 29, 163, 57, 189, 180, 1, 54, 219, 217, 209, 194, 108, 250, 206, 10, 175, 77, 63, 144, 38, 210, + 192, 147, 66, 46, 216, 61, 86, 118, 198, 228, 78, 96, 15, 142, 223, 154, 176, 236, 234, 82, 111, 9, 31, + 14, 127, 165, 170, 94, 199, 155, 209, 146, 40, 202, 175, 221, 230, 131, 88, 130, 30, 201, 55, 68, 59, 188, + 245, 79, 133, 116, 86, 241, 198, 88, 42, 172, 66, 254, 97, 131, 208, 223, 204, 23, 247, 30, 186, 182, 120, + 33, 228, 204, 220, 20, 167, 186, 45, 118, 9, 61, 235, 137, 234, 99, 185, 20, 249, 211, 85, 186, 59, 179, + 48, 253, 156, 48, 68, 124, 54, 17, 193, 248, 37, 216, 204, 47, 230, 113, 150, 111, 68, 236, 227, 10, 7, + 55, 91, 16, 88, 129, 123, 246, 217, 119, 116, 229, 27, 118, 51, 60, 179, 178, 67, 235, 162, 88, 30, 35, + 222, 66, 183, 239, 82, 239, 112, 66, 236, 105, 46, 6, 62, 46, 29, 114, 22, 45, 73, 181, 30, 162, 168, + 216, 163, 237, 107, 196, 118, 14, 174, 191, 181, 111, 104, 39, 42, 16, 33, 241, 29, 36, 63, 190, 17, 89, + 184, 70, 81, 5, 151, 206, 57, 0, 91, 244, 77, 121, 126, 215, 114, 34, 84, 10, 10, 179, 38, 12, 69, + 132, 148, 159, 161, 90, 168, 176, 65, 78, 76, 4, 14, 191, 60, 155, 171, 77, 72, 119, 106, 208, 241, 143, + 123, 113, 234, 168, 28, 211, 249, 125, 154, 44, 6, 49, 136, 213, 253, 206, 164, 60, 6, 124, 99, 93, 227, + 55, 3, 177, 113, 130, 30, 234, 75, 227, 254, 55, 73, 244, 196, 233, 120, 91, 111, 151, 60, 124, 253, 123, + 15, 30, 170, 55, 225, 195, 12, 228, 173, 175, 225, 40, 249, 76, 36, 163, 74, 111, 151, 203, 82, 252, 204, + 194, 215, 199, 215, 251, 174, 3, 24, 242, 249, 160, 215, 72, 218, 62, 117, 102, 169, 153, 32, 172, 224, 251, + 78, 22, 196, 155, 225, 100, 78, 99, 198, 55, 125, 46, 132, 6, 67, 33, 195, 178, 174, 86, 208, 153, 117, + 63, 73, 157, 93, 64, 104, 108, 199, 145, 100, 161, 132, 242, 165, 96, 18, 144, 243, 207, 5, 57, 31, 178, + 78, 81, 173, 2, 160, 95, 51, 135, 181, 40, 229, 221, 17, 52, 9, 186, 2, 114, 215, 77, 150, 171, 210, + 139, 174, 222, 241, 135, 42, 220, 244, 111, 159, 160, 119, 99, 146, 42, 5, 151, 76, 188, 206, 207, 39, 241, + 1, 71, 90, 178, 48, 214, 205, 22, 245, 156, 148, 131, 180, 69, 126, 17, 85, 97, 227, 224, 15, 88, 208, + 166, 40, 206, 71, 66, 236, 84, 223, 185, 35, 185, 107, 144, 38, 152, 146, 12, 122, 26, 254, 56, 23, 222, + 229, 23, 112, 14, 192, 223, 221, 172, 161, 145, 225, 122, 227, 167, 28, 58, 101, 50, 53, 179, 191, 90, 181, + 27, 170, 110, 231, 45, 96, 131, 174, 185, 34, 95, 196, 58, 100, 218, 224, 20, 217, 33, 243, 186, 52, 147, + 12, 133, 90, 16, 151, 134, 172, 79, 100, 222, 69, 185, 207, 46, 156, 240, 12, 44, 39, 43, 70, 70, 217, + 220, 129, 21, 186, 22, 57, 13, 237, 165, 73, 248, 79, 230, 90, 17, 146, 223, 71, 237, 214, 29, 92, 58, + 179, 103, 222, 159, 136, 185, 176, 74, 185, 73, 181, 102, 130, 180, 128, 166, 155, 21, 198, 193, 109, 44, 45, + 164, 61, 24, 160, 178, 8, 154, 213, 10, 87, 57, 218, 201, 72, 160, 3, 14, 221, 116, 239, 188, 197, 195, + 102, 62, 58, 32, 161, 122, 180, 30, 101, 71, 115, 191, 111, 121, 218, 171, 45, 203, 55, 30, 74, 17, 197, + 162, 192, 194, 185, 200, 29, 37, 20, 74, 159, 205, 63, 163, 227, 126, 59, 63, 67, 27, 73, 215, 133, 94, + 244, 252, 151, 56, 29, 181, 218, 127, 197, 147, 50, 172, 192, 35, 196, 0, 166, 116, 220, 201, 250, 31, 160, + 176, 35, 209, 42, 102, 47, 85, 45, 124, 93, 12, 158, 51, 120, 182, 177, 170, 101, 248, 204, 41, 33, 145, + 65, 227, 218, 119, 118, 42, 110, 41, 7, 91, 29, 161, 196, 121, 146, 126, 86, 152, 2, 9, 97, 252, 208, + 5, 93, 230, 61, 67, 225, 55, 149, 119, 225, 148, 46, 247, 118, 180, 221, 79, 50, 53, 249, 69, 215, 80, + 221, 213, 159, 78, 32, 147, 50, 109, 214, 83, 231, 214, 248, 38, 84, 58, 208, 104, 247, 226, 9, 3, 195, + 7, 90, 72, 177, 109, 148, 75, 72, 9, 228, 145, 208, 177, 98, 104, 50, 130, 9, 67, 78, 208, 184, 210, + 9, 121, 167, 39, 19, 140, 114, 114, 237, 59, 216, 6, 12, 181, 23, 237, 32, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 195, 45, 243, 108, 74, 253, 186, 6, 115, 25, 199, + 9, 146, 30, 225, 40, 249, 192, 134, 87, 202, 201, 217, 196, 136, 151, 244, 19, 214, 179, 117, 22, 10, 209, + 89, 144, 31, 39, 198, 26, 19, 14, 81, 71, 66, 209, 224, 80, 236, 197, 55, 103, 58, 8, 32, 17, 52, + 17, 12, 78, 204, 143, 26, 165, 52, 123, 210, 89, 6, 208, 109, 71, 114, 214, 52, 118, 200, 85, 48, 166, + 177, 50, 157, 97, 238, 158, 31, 125, 123, 33, 251, 242, 116, 41, 40, 97, 55, 31, 248, 162, 55, 196, 82, + 148, 225, 143, 108, 148, 210, 234, 27, 37, 102, 118, 181, 116, 87, 38, 130, 193, 137, 20, 192, 239, 37, 193, + 119, 184, 74, 201, 215, 111, 115, 10, 122, 169, 9, 136, 9, 189, 235, 27, 246, 78, 157, 215, 72, 29, 22, + 3, 212, 159, 26, 139, 238, 184, 200, 152, 179, 238, 69, 143, 21, 223, 111, 198, 160, 130, 203, 121, 206, 161, + 46, 16, 86, 94, 15, 251, 22, 211, 61, 96, 110, 95, 155, 30, 81, 68, 191, 15, 152, 20, 56, 219, 124, + 243, 185, 8, 38, 75, 88, 109, 114, 49, 4, 54, 203, 139, 213, 216, 1, 186, 41, 138, 240, 12, 52, 108, + 64, 86, 142, 32, 165, 121, 185, 19, 221, 163, 117, 118, 198, 19, 73, 53, 111, 89, 96, 27, 57, 182, 60, + 129, 39, 144, 125, 181, 91, 81, 168, 221, 219, 133, 120, 73, 31, 20, 49, 237, 133, 223, 127, 158, 228, 209, + 160, 106, 216, 4, 20, 159, 230, 199, 116, 131, 72, 48, 140, 5, 197, 29, 192, 30, 204, 31, 41, 241, 35, + 143, 237, 59, 27, 202, 43, 238, 24, 32, 54, 31, 243, 223, 198, 199, 241, 211, 98, 46, 177, 157, 185, 42, + 166, 238, 242, 25, 223, 91, 213, 63, 61, 99, 201, 46, 79, 90, 117, 96, 65, 132, 10, 204, 143, 252, 22, + 178, 197, 149, 97, 72, 25, 173, 160, 129, 123, 106, 13, 69, 188, 125, 252, 228, 218, 234, 142, 77, 194, 15, + 65, 14, 178, 126, 241, 80, 233, 248, 35, 152, 96, 191, 141, 219, 78, 204, 207, 38, 95, 249, 201, 201, 97, + 165, 155, 150, 192, 223, 72, 164, 33, 73, 171, 156, 150, 251, 239, 57, 209, 43, 98, 67, 239, 92, 126, 139, + 75, 55, 132, 28, 0, 76, 96, 80, 37, 164, 247, 6, 30, 231, 182, 96, 40, 15, 104, 95, 224, 216, 78, + 159, 32, 8, 31, 81, 113, 84, 134, 0, 148, 109, 47, 87, 158, 122, 208, 180, 25, 37, 117, 225, 129, 22, + 29, 92, 252, 51, 43, 92, 148, 153, 149, 216, 33, 18, 115, 149, 43, 197, 164, 251, 96, 94, 90, 40, 42, + 16, 41, 241, 95, 173, 14, 95, 189, 62, 49, 240, 182, 57, 153, 70, 117, 63, 247, 209, 97, 119, 108, 68, + 200, 8, 254, 108, 190, 1, 184, 222, 60, 46, 193, 66, 50, 44, 225, 195, 186, 190, 246, 129, 157, 180, 140, + 122, 175, 249, 20, 28, 120, 65, 80, 14, 38, 51, 229, 66, 189, 174, 66, 30, 94, 164, 101, 32, 210, 78, + 121, 134, 200, 8, 238, 131, 139, 246, 219, 45, 251, 141, 65, 141, 145, 124, 20, 96, 225, 94, 10, 144, 239, + 83, 146, 126, 243, 80, 96, 125, 232, 222, 0, 152, 32, 107, 154, 39, 184, 210, 244, 146, 185, 71, 174, 16, + 149, 78, 93, 77, 197, 213, 59, 31, 119, 16, 211, 22, 201, 12, 12, 93, 8, 51, 206, 150, 213, 152, 34, + 63, 82, 106, 28, 18, 66, 251, 162, 69, 30, 133, 191, 125, 227, 220, 62, 63, 202, 64, 230, 156, 91, 82, + 156, 44, 105, 48, 66, 117, 1, 126, 178, 216, 88, 96, 166, 106, 247, 115, 241, 15, 136, 110, 75, 119, 85, + 157, 92, 40, 18, 33, 166, 51, 195, 251, 51, 117, 68, 182, 104, 46, 58, 177, 111, 143, 215, 218, 100, 52, + 55, 120, 138, 61, 176, 126, 142, 78, 171, 85, 65, 93, 85, 103, 45, 22, 32, 168, 145, 139, 239, 32, 231, + 160, 115, 86, 208, 108, 182, 240, 89, 147, 78, 159, 70, 209, 110, 123, 18, 217, 62, 10, 159, 125, 95, 66, + 57, 118, 74, 158, 156, 7, 227, 184, 251, 130, 211, 48, 216, 155, 229, 69, 62, 47, 221, 218, 111, 194, 78, + 147, 53, 224, 133, 215, 100, 53, 94, 167, 132, 101, 76, 76, 96, 82, 88, 25, 145, 162, 218, 100, 156, 9, + 245, 199, 16, 63, 87, 33, 17, 41, 186, 12, 236, 219, 63, 83, 90, 38, 143, 168, 93, 207, 177, 128, 104, + 135, 27, 77, 87, 159, 121, 202, 33, 147, 124, 31, 108, 96, 144, 1, 92, 155, 125, 37, 8, 183, 44, 177, + 225, 1, 71, 120, 237, 99, 157, 9, 75, 138, 97, 214, 242, 246, 84, 80, 121, 230, 232, 145, 81, 75, 206, + 138, 174, 196, 121, 76, 214, 30, 148, 111, 176, 240, 86, 65, 58, 22, 0, 135, 107, 17, 84, 210, 213, 55, + 166, 28, 234, 200, 8, 47, 221, 68, 65, 132, 79, 29, 178, 130, 237, 71, 149, 173, 188, 111, 93, 85, 247, + 254, 213, 180, 79, 111, 240, 20, 72, 27, 176, 12, 180, 21, 153, 208, 179, 123, 13, 120, 211, 146, 251, 11, + 133, 225, 142, 242, 77, 251, 214, 118, 41, 111, 12, 75, 100, 75, 192, 38, 157, 78, 132, 58, 191, 250, 108, + 115, 180, 190, 72, 249, 136, 232, 26, 4, 81, 63, 147, 5, 246, 134, 220, 119, 175, 189, 220, 130, 9, 149, + 23, 120, 207, 145, 97, 254, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 239, 33, 215, 73, 18, 26, 154, 53, 220, 228, 109, 64, 3, 247, 135, 99, 72, 211, 90, 45, 197, 240, + 229, 252, 138, 40, 230, 166, 70, 86, 164, 11, 209, 71, 30, 21, 188, 151, 34, 224, 120, 215, 46, 159, 156, + 127, 141, 165, 216, 52, 164, 113, 235, 75, 76, 87, 57, 114, 131, 52, 188, 5, 181, 182, 83, 224, 86, 7, + 223, 117, 214, 13, 95, 8, 201, 162, 156, 9, 5, 110, 8, 6, 75, 113, 200, 134, 189, 92, 116, 170, 140, + 23, 57, 216, 219, 246, 213, 196, 6, 222, 136, 150, 171, 102, 49, 72, 7, 184, 135, 133, 119, 52, 11, 179, + 108, 148, 130, 222, 242, 200, 63, 177, 49, 236, 246, 47, 66, 8, 99, 94, 20, 121, 136, 135, 96, 59, 172, + 231, 208, 220, 185, 123, 157, 141, 97, 47, 228, 146, 47, 30, 24, 156, 107, 241, 26, 15, 189, 224, 37, 133, + 165, 236, 0, 47, 67, 79, 206, 166, 225, 20, 124, 172, 109, 147, 240, 221, 46, 123, 240, 194, 86, 152, 158, + 99, 153, 217, 68, 12, 89, 179, 173, 131, 221, 142, 245, 117, 250, 94, 93, 132, 180, 141, 40, 143, 91, 30, + 40, 106, 17, 186, 178, 59, 114, 91, 237, 154, 210, 4, 205, 52, 69, 46, 98, 75, 182, 193, 10, 217, 105, + 88, 11, 34, 9, 87, 16, 28, 99, 55, 205, 32, 240, 194, 158, 180, 205, 121, 34, 250, 240, 91, 28, 173, + 137, 69, 67, 243, 251, 130, 125, 233, 167, 120, 2, 20, 168, 101, 254, 67, 197, 179, 187, 198, 13, 251, 73, + 207, 3, 22, 82, 203, 158, 132, 110, 248, 206, 98, 69, 247, 122, 228, 162, 191, 99, 243, 182, 158, 128, 165, + 122, 143, 20, 38, 202, 211, 64, 90, 204, 183, 215, 172, 27, 84, 67, 205, 105, 167, 8, 78, 223, 143, 55, + 118, 175, 202, 228, 61, 252, 74, 45, 18, 165, 148, 14, 161, 162, 138, 229, 49, 216, 31, 100, 184, 123, 41, + 75, 41, 51, 150, 181, 94, 92, 154, 187, 89, 60, 100, 211, 186, 36, 198, 61, 26, 35, 165, 186, 208, 78, + 240, 169, 210, 16, 150, 155, 137, 60, 36, 1, 234, 13, 76, 58, 248, 50, 32, 229, 121, 71, 195, 195, 172, + 93, 237, 40, 69, 205, 87, 229, 222, 59, 28, 72, 16, 58, 223, 23, 214, 111, 246, 46, 157, 0, 146, 101, + 207, 14, 17, 233, 105, 100, 160, 202, 5, 234, 154, 108, 56, 131, 241, 79, 251, 130, 75, 238, 33, 32, 129, + 180, 45, 11, 53, 113, 213, 65, 13, 45, 42, 204, 213, 239, 54, 3, 88, 10, 148, 79, 73, 58, 151, 23, + 79, 126, 9, 40, 46, 222, 199, 247, 226, 81, 47, 192, 200, 213, 35, 63, 114, 162, 207, 247, 72, 201, 245, + 132, 12, 44, 195, 19, 128, 227, 126, 35, 203, 3, 233, 130, 4, 94, 194, 131, 38, 63, 123, 79, 234, 243, + 199, 142, 75, 203, 131, 195, 114, 146, 167, 165, 121, 138, 176, 45, 84, 146, 71, 17, 194, 117, 171, 140, 233, + 252, 245, 13, 73, 189, 238, 52, 136, 51, 97, 241, 119, 150, 209, 110, 198, 186, 210, 98, 110, 195, 98, 251, + 16, 253, 237, 231, 244, 237, 152, 44, 94, 214, 50, 30, 220, 127, 52, 208, 107, 246, 214, 189, 153, 192, 176, + 176, 77, 86, 120, 153, 125, 226, 37, 177, 70, 153, 209, 2, 99, 78, 139, 174, 136, 25, 28, 223, 73, 26, + 195, 201, 198, 48, 76, 160, 146, 37, 107, 151, 220, 241, 125, 64, 163, 198, 238, 60, 58, 97, 241, 43, 11, + 86, 173, 40, 163, 231, 27, 105, 196, 44, 205, 40, 238, 174, 69, 228, 171, 0, 75, 194, 233, 100, 23, 216, + 49, 115, 179, 215, 252, 59, 135, 254, 56, 165, 209, 127, 103, 240, 178, 40, 138, 122, 35, 207, 109, 98, 12, + 245, 38, 78, 52, 249, 137, 56, 4, 149, 21, 77, 238, 109, 134, 167, 48, 194, 237, 77, 168, 102, 242, 46, + 94, 186, 40, 126, 140, 119, 91, 107, 136, 250, 205, 64, 161, 48, 164, 41, 15, 144, 21, 205, 104, 97, 243, + 120, 14, 138, 206, 15, 224, 248, 12, 70, 166, 7, 124, 208, 226, 222, 219, 35, 173, 236, 118, 84, 110, 108, + 188, 6, 118, 119, 147, 94, 77, 100, 23, 215, 207, 65, 208, 74, 126, 73, 92, 172, 181, 159, 77, 231, 65, + 136, 63, 236, 218, 98, 238, 94, 82, 77, 177, 30, 54, 206, 62, 228, 31, 40, 84, 67, 215, 34, 210, 127, + 93, 146, 25, 237, 0, 55, 83, 176, 60, 35, 94, 186, 24, 65, 130, 236, 236, 37, 113, 25, 27, 10, 50, + 75, 163, 32, 142, 105, 114, 97, 132, 189, 188, 208, 13, 52, 84, 127, 115, 180, 218, 41, 170, 173, 126, 217, + 61, 176, 51, 167, 27, 130, 207, 93, 50, 229, 66, 237, 192, 17, 54, 29, 109, 54, 155, 236, 135, 212, 106, + 21, 209, 24, 19, 121, 197, 105, 93, 33, 4, 181, 19, 177, 33, 253, 219, 14, 184, 145, 169, 173, 7, 68, + 39, 88, 61, 138, 223, 82, 123, 147, 35, 46, 236, 194, 219, 43, 139, 35, 155, 196, 227, 90, 161, 49, 217, + 1, 187, 127, 213, 127, 62, 115, 65, 225, 58, 15, 47, 99, 103, 89, 152, 114, 6, 125, 195, 220, 49, 142, + 66, 226, 102, 96, 120, 79, 218, 142, 116, 178, 19, 165, 96, 29, 250, 70, 212, 125, 37, 12, 220, 161, 161, + 117, 171, 221, 106, 107, 11, 17, 220, 237, 117, 140, 74, 53, 228, 45, 38, 253, 93, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 95, 65, 72, 163, 28, 217, 68, 31, 90, + 114, 236, 33, 210, 76, 143, 181, 176, 83, 45, 61, 217, 151, 37, 43, 240, 105, 149, 12, 161, 28, 254, 242, + 186, 252, 196, 242, 249, 171, 175, 91, 194, 212, 240, 203, 87, 148, 40, 117, 99, 214, 109, 40, 97, 82, 130, + 225, 223, 16, 199, 203, 94, 107, 99, 225, 225, 119, 55, 31, 112, 201, 5, 29, 206, 203, 92, 122, 22, 208, + 4, 31, 252, 129, 195, 77, 238, 178, 146, 171, 44, 23, 238, 54, 103, 253, 241, 36, 78, 45, 194, 141, 173, + 68, 77, 253, 96, 34, 11, 213, 104, 165, 19, 159, 71, 150, 175, 177, 152, 20, 120, 4, 62, 236, 202, 185, + 210, 252, 64, 74, 129, 176, 66, 21, 31, 225, 129, 230, 240, 241, 175, 210, 155, 251, 221, 119, 116, 156, 144, + 207, 240, 170, 210, 225, 207, 123, 42, 6, 35, 156, 196, 235, 45, 52, 8, 94, 47, 98, 90, 158, 125, 58, + 172, 152, 116, 230, 29, 29, 126, 76, 164, 64, 150, 250, 193, 207, 98, 233, 61, 97, 119, 159, 124, 118, 185, + 250, 217, 125, 247, 22, 77, 18, 2, 17, 181, 225, 67, 210, 3, 53, 188, 142, 161, 35, 29, 88, 105, 154, + 45, 84, 189, 210, 123, 110, 120, 102, 64, 242, 25, 159, 175, 8, 228, 81, 74, 102, 87, 16, 123, 7, 214, + 96, 129, 86, 213, 57, 42, 212, 32, 103, 167, 102, 145, 177, 166, 71, 141, 144, 187, 87, 245, 139, 240, 21, + 81, 250, 13, 113, 223, 160, 113, 2, 38, 224, 34, 50, 41, 201, 233, 175, 193, 100, 170, 177, 14, 110, 222, + 70, 243, 126, 77, 153, 62, 30, 112, 127, 53, 90, 27, 152, 245, 245, 46, 96, 104, 210, 212, 205, 192, 235, + 47, 11, 181, 92, 190, 213, 37, 37, 65, 9, 155, 150, 243, 79, 68, 244, 74, 62, 75, 24, 184, 38, 245, + 156, 184, 144, 40, 112, 232, 249, 160, 56, 172, 4, 29, 254, 70, 247, 156, 48, 91, 182, 38, 251, 132, 36, + 184, 126, 54, 253, 248, 223, 9, 65, 65, 179, 239, 177, 82, 254, 61, 59, 4, 240, 91, 142, 215, 175, 41, + 33, 15, 247, 124, 108, 17, 87, 30, 91, 162, 28, 179, 164, 154, 185, 147, 109, 2, 55, 122, 139, 232, 68, + 109, 120, 187, 245, 82, 6, 45, 196, 158, 247, 238, 125, 151, 173, 15, 162, 101, 38, 11, 202, 99, 69, 186, + 217, 185, 76, 46, 44, 253, 56, 8, 137, 234, 210, 103, 220, 87, 204, 135, 205, 184, 161, 58, 183, 6, 102, + 163, 58, 193, 185, 113, 172, 14, 44, 200, 216, 153, 203, 112, 51, 61, 91, 179, 25, 10, 57, 205, 242, 44, + 25, 132, 229, 95, 230, 195, 203, 134, 90, 71, 251, 216, 48, 126, 143, 247, 15, 244, 44, 56, 152, 224, 105, + 215, 24, 94, 110, 105, 218, 85, 168, 111, 234, 189, 71, 56, 241, 231, 31, 221, 217, 243, 54, 93, 142, 52, + 191, 213, 54, 191, 206, 106, 169, 168, 221, 93, 243, 73, 92, 203, 189, 197, 188, 225, 191, 252, 35, 243, 163, + 221, 126, 166, 21, 76, 10, 107, 221, 180, 50, 126, 188, 164, 164, 199, 183, 11, 182, 181, 58, 189, 210, 100, + 92, 204, 237, 52, 134, 50, 111, 161, 10, 141, 149, 192, 172, 42, 87, 166, 245, 252, 214, 11, 128, 164, 22, + 145, 158, 113, 194, 28, 233, 62, 85, 214, 253, 82, 210, 32, 205, 151, 248, 47, 30, 51, 165, 192, 214, 93, + 196, 218, 83, 164, 147, 154, 10, 119, 0, 156, 21, 229, 40, 133, 240, 128, 70, 109, 77, 218, 240, 32, 41, + 150, 125, 231, 240, 88, 90, 238, 54, 167, 69, 16, 90, 149, 16, 106, 26, 165, 224, 193, 70, 220, 158, 183, + 75, 163, 9, 57, 117, 248, 158, 66, 206, 104, 210, 147, 151, 146, 96, 29, 226, 158, 145, 61, 108, 207, 171, + 249, 203, 187, 228, 198, 43, 235, 129, 82, 135, 243, 165, 228, 79, 174, 86, 152, 76, 95, 44, 20, 195, 151, + 61, 170, 76, 207, 127, 224, 159, 18, 28, 76, 2, 98, 240, 217, 118, 204, 43, 153, 127, 60, 69, 25, 2, + 62, 156, 33, 51, 71, 100, 220, 174, 82, 8, 151, 249, 113, 150, 174, 200, 155, 16, 237, 10, 42, 170, 133, + 241, 33, 98, 253, 192, 98, 111, 86, 135, 104, 104, 51, 126, 5, 182, 35, 120, 254, 166, 248, 184, 176, 212, + 161, 46, 251, 108, 1, 136, 90, 249, 50, 163, 242, 84, 188, 111, 81, 121, 85, 82, 4, 133, 71, 190, 244, + 67, 63, 183, 146, 146, 202, 44, 42, 192, 13, 104, 136, 227, 63, 110, 81, 49, 86, 99, 106, 117, 175, 100, + 37, 95, 213, 121, 156, 37, 73, 137, 74, 193, 52, 219, 90, 13, 191, 89, 90, 250, 63, 207, 92, 102, 224, + 116, 250, 226, 114, 44, 243, 92, 144, 154, 23, 192, 69, 20, 167, 52, 227, 12, 54, 238, 7, 221, 168, 32, + 186, 124, 139, 45, 59, 188, 231, 203, 247, 251, 16, 229, 26, 220, 10, 125, 148, 67, 143, 230, 95, 133, 239, + 61, 28, 99, 31, 107, 58, 137, 166, 1, 195, 186, 69, 44, 178, 245, 124, 196, 228, 112, 186, 63, 94, 50, + 85, 10, 230, 17, 108, 238, 120, 150, 204, 91, 40, 49, 30, 60, 157, 205, 54, 204, 46, 165, 98, 35, 224, + 147, 98, 208, 163, 111, 43, 184, 171, 183, 167, 100, 32, 77, 16, 13, 67, 96, 73, 46, 86, 216, 80, 137, + 177, 231, 152, 58, 108, 234, 82, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 48, 18, 119, 170, 51, 246, 252, 48, 249, 136, 156, 157, 210, 220, 33, 131, 244, 223, 212, 54, 220, + 48, 151, 139, 186, 35, 168, 201, 232, 67, 118, 170, 109, 200, 101, 217, 180, 146, 154, 45, 21, 170, 245, 156, + 54, 118, 184, 130, 166, 151, 202, 2, 49, 114, 49, 94, 168, 66, 213, 222, 221, 26, 64, 106, 85, 196, 26, + 167, 127, 250, 41, 125, 207, 104, 173, 2, 161, 111, 236, 194, 42, 59, 210, 149, 99, 244, 181, 179, 57, 162, + 124, 203, 7, 32, 18, 63, 112, 249, 183, 33, 29, 80, 106, 175, 226, 109, 17, 42, 12, 72, 204, 132, 66, + 112, 224, 187, 194, 199, 25, 122, 6, 42, 241, 75, 181, 26, 75, 174, 16, 208, 41, 190, 87, 97, 131, 5, + 86, 168, 203, 187, 177, 207, 231, 108, 213, 103, 194, 136, 244, 90, 154, 97, 253, 94, 108, 87, 204, 108, 128, + 188, 216, 229, 98, 146, 55, 34, 80, 201, 151, 142, 108, 21, 28, 208, 32, 248, 207, 232, 46, 230, 44, 234, + 169, 37, 131, 181, 65, 6, 112, 136, 129, 66, 86, 14, 108, 4, 245, 132, 230, 23, 244, 11, 225, 104, 232, + 129, 7, 237, 73, 251, 172, 77, 156, 206, 209, 165, 190, 191, 235, 80, 45, 37, 34, 24, 66, 56, 35, 17, + 112, 157, 241, 125, 126, 24, 212, 180, 231, 212, 156, 32, 105, 233, 2, 36, 170, 229, 197, 192, 117, 233, 154, + 133, 241, 192, 150, 127, 51, 13, 192, 249, 140, 89, 184, 131, 208, 235, 157, 76, 29, 65, 246, 120, 57, 229, + 20, 138, 105, 92, 105, 199, 248, 32, 37, 227, 224, 69, 21, 63, 218, 171, 219, 100, 238, 28, 2, 214, 159, + 70, 216, 210, 70, 85, 140, 217, 33, 22, 99, 123, 34, 34, 22, 224, 68, 30, 118, 135, 51, 8, 99, 198, + 108, 12, 65, 127, 163, 132, 60, 88, 95, 240, 139, 137, 200, 192, 107, 194, 70, 172, 120, 34, 179, 224, 137, + 55, 163, 136, 246, 7, 48, 178, 14, 62, 16, 141, 93, 200, 179, 131, 42, 147, 92, 80, 79, 91, 61, 57, + 232, 9, 116, 248, 102, 25, 61, 125, 121, 171, 50, 159, 181, 125, 90, 122, 43, 134, 137, 146, 81, 110, 204, + 71, 219, 88, 184, 158, 112, 218, 118, 121, 79, 200, 7, 240, 64, 233, 41, 220, 3, 0, 7, 64, 150, 76, + 171, 84, 26, 217, 146, 32, 128, 33, 172, 230, 168, 40, 154, 159, 19, 197, 160, 90, 211, 208, 10, 190, 144, + 17, 5, 40, 167, 217, 238, 222, 162, 172, 45, 31, 105, 137, 246, 46, 69, 136, 26, 40, 168, 193, 191, 194, + 28, 216, 177, 56, 44, 226, 2, 136, 0, 44, 157, 115, 17, 36, 19, 246, 21, 148, 202, 235, 33, 80, 194, + 160, 6, 161, 252, 125, 132, 216, 175, 152, 244, 30, 30, 159, 70, 232, 72, 146, 69, 61, 192, 156, 78, 130, + 172, 223, 62, 108, 75, 168, 12, 52, 15, 96, 100, 254, 134, 12, 74, 143, 232, 30, 245, 59, 90, 14, 199, + 246, 187, 204, 35, 204, 142, 220, 177, 73, 230, 242, 201, 74, 157, 85, 52, 215, 68, 89, 90, 164, 35, 186, + 183, 198, 21, 111, 197, 200, 223, 89, 120, 223, 180, 78, 76, 47, 185, 31, 191, 131, 239, 10, 77, 2, 8, + 132, 49, 217, 15, 59, 90, 30, 77, 68, 110, 8, 254, 95, 149, 20, 217, 80, 218, 163, 153, 228, 152, 210, + 16, 160, 114, 245, 244, 188, 176, 148, 252, 67, 127, 1, 220, 244, 59, 188, 113, 43, 170, 221, 155, 101, 158, + 205, 160, 167, 74, 12, 41, 70, 189, 47, 216, 229, 47, 3, 73, 72, 236, 16, 173, 10, 177, 14, 99, 109, + 206, 105, 135, 65, 6, 249, 233, 238, 211, 178, 155, 252, 79, 69, 43, 190, 193, 4, 58, 196, 182, 119, 164, + 164, 46, 102, 71, 195, 83, 249, 22, 85, 117, 5, 61, 227, 221, 180, 211, 42, 26, 224, 191, 107, 220, 254, + 32, 175, 185, 89, 123, 30, 174, 52, 143, 94, 195, 124, 232, 68, 18, 120, 232, 58, 44, 13, 57, 96, 92, + 168, 117, 58, 235, 33, 168, 216, 144, 123, 37, 182, 165, 23, 205, 139, 78, 178, 34, 58, 57, 121, 188, 71, + 136, 133, 238, 113, 7, 103, 126, 93, 94, 118, 80, 168, 6, 42, 88, 129, 215, 162, 66, 140, 247, 137, 3, + 237, 109, 253, 236, 68, 93, 85, 114, 223, 97, 29, 49, 186, 215, 72, 78, 164, 234, 151, 29, 53, 31, 66, + 220, 70, 230, 174, 129, 243, 205, 65, 142, 104, 219, 172, 44, 77, 203, 226, 67, 212, 96, 25, 176, 174, 11, + 167, 99, 42, 4, 80, 38, 108, 127, 177, 67, 151, 168, 204, 112, 150, 14, 158, 233, 178, 159, 230, 12, 160, + 128, 12, 239, 97, 5, 184, 229, 200, 178, 57, 52, 214, 196, 177, 167, 139, 64, 248, 86, 58, 107, 42, 128, + 105, 57, 21, 206, 74, 126, 70, 28, 228, 246, 42, 27, 93, 191, 153, 112, 199, 23, 136, 252, 9, 109, 153, + 82, 204, 107, 132, 129, 233, 200, 165, 20, 95, 50, 111, 79, 19, 130, 213, 164, 220, 60, 119, 220, 179, 145, + 76, 211, 192, 245, 135, 244, 119, 141, 70, 174, 195, 194, 124, 129, 188, 245, 96, 65, 124, 230, 106, 106, 252, + 229, 181, 144, 178, 27, 194, 200, 210, 241, 79, 164, 238, 111, 252, 175, 235, 114, 157, 96, 105, 122, 244, 32, + 84, 215, 252, 12, 31, 104, 21, 248, 183, 219, 166, 164, 185, 77, 4, 171, 122, 13, 8, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 181, 221, 101, 49, 140, 116, 13, 221, 248, + 121, 205, 207, 222, 163, 213, 141, 133, 135, 129, 132, 26, 213, 249, 69, 92, 2, 124, 99, 86, 139, 17, 144, + 135, 245, 118, 54, 144, 178, 248, 241, 153, 16, 120, 44, 4, 8, 16, 54, 211, 231, 13, 53, 118, 69, 213, + 206, 203, 22, 240, 102, 51, 205, 7, 39, 56, 63, 19, 118, 128, 47, 248, 50, 194, 63, 84, 56, 14, 250, + 150, 70, 198, 57, 213, 2, 73, 145, 31, 119, 199, 6, 196, 48, 70, 50, 123, 89, 3, 63, 177, 129, 61, + 84, 184, 158, 179, 7, 76, 242, 230, 171, 90, 1, 222, 144, 101, 53, 137, 132, 212, 124, 90, 242, 231, 135, + 51, 95, 231, 61, 47, 162, 220, 127, 26, 204, 155, 63, 213, 98, 144, 179, 155, 204, 251, 132, 240, 76, 29, + 102, 250, 14, 91, 181, 154, 17, 133, 92, 41, 124, 206, 123, 45, 177, 195, 81, 205, 152, 195, 40, 189, 113, + 26, 66, 26, 220, 71, 198, 76, 134, 194, 237, 66, 77, 188, 125, 165, 26, 192, 64, 90, 205, 62, 167, 169, + 77, 12, 11, 13, 197, 174, 252, 253, 64, 146, 102, 186, 3, 34, 202, 203, 187, 20, 187, 173, 80, 57, 52, + 205, 162, 41, 198, 123, 195, 41, 193, 206, 68, 166, 217, 62, 195, 59, 196, 199, 188, 169, 47, 7, 130, 146, + 23, 153, 71, 147, 254, 136, 155, 99, 232, 91, 46, 227, 192, 57, 96, 49, 185, 153, 74, 247, 169, 82, 195, + 193, 72, 109, 239, 186, 193, 119, 121, 77, 201, 126, 4, 245, 241, 12, 231, 76, 129, 88, 165, 131, 97, 192, + 246, 129, 185, 20, 156, 15, 219, 122, 241, 142, 251, 13, 105, 98, 212, 140, 52, 236, 217, 201, 53, 8, 212, + 95, 233, 243, 223, 205, 57, 113, 221, 152, 136, 138, 203, 123, 15, 114, 80, 26, 202, 99, 139, 129, 56, 23, + 240, 97, 97, 164, 166, 163, 207, 70, 234, 51, 113, 127, 232, 201, 104, 5, 22, 230, 159, 88, 193, 57, 114, + 107, 127, 161, 107, 36, 44, 31, 36, 125, 23, 128, 64, 75, 185, 152, 109, 29, 53, 109, 68, 159, 89, 120, + 149, 232, 140, 106, 147, 189, 231, 203, 127, 80, 184, 60, 229, 230, 196, 154, 173, 184, 171, 169, 110, 157, 35, + 79, 13, 48, 75, 160, 205, 74, 229, 251, 6, 212, 94, 145, 9, 115, 124, 189, 24, 180, 164, 69, 11, 8, + 79, 4, 128, 32, 92, 96, 47, 172, 134, 177, 171, 210, 103, 64, 45, 214, 152, 9, 218, 25, 94, 81, 64, + 115, 161, 238, 13, 74, 231, 222, 36, 196, 210, 164, 143, 167, 231, 55, 186, 94, 10, 97, 72, 211, 71, 77, + 85, 166, 113, 250, 41, 101, 23, 250, 97, 231, 189, 249, 67, 150, 183, 6, 202, 64, 76, 47, 94, 18, 157, + 95, 68, 10, 226, 221, 81, 242, 110, 53, 101, 144, 51, 14, 25, 206, 124, 112, 10, 137, 17, 10, 67, 21, + 61, 140, 40, 230, 172, 120, 174, 82, 136, 87, 232, 26, 59, 148, 95, 73, 162, 117, 159, 32, 202, 61, 118, + 179, 33, 19, 241, 58, 55, 104, 137, 133, 114, 122, 70, 151, 78, 226, 5, 150, 253, 35, 93, 30, 92, 23, + 2, 165, 122, 20, 251, 119, 247, 158, 2, 113, 206, 46, 6, 72, 53, 34, 176, 91, 164, 157, 179, 101, 189, + 250, 246, 164, 10, 87, 175, 201, 223, 128, 109, 214, 141, 218, 112, 49, 33, 96, 101, 254, 12, 56, 148, 140, + 172, 168, 43, 160, 102, 80, 172, 138, 46, 9, 19, 207, 59, 69, 48, 39, 211, 52, 124, 43, 204, 253, 86, + 65, 11, 20, 146, 222, 55, 243, 16, 187, 219, 177, 110, 48, 201, 153, 226, 131, 92, 154, 52, 195, 140, 37, + 142, 76, 47, 134, 140, 74, 129, 50, 1, 143, 19, 92, 40, 20, 103, 131, 199, 119, 56, 236, 199, 250, 1, + 80, 19, 148, 78, 191, 158, 88, 120, 125, 6, 50, 217, 233, 185, 55, 56, 163, 125, 137, 78, 133, 129, 208, + 101, 225, 244, 167, 155, 157, 7, 108, 157, 101, 138, 132, 82, 46, 227, 128, 162, 234, 189, 234, 176, 101, 114, + 75, 169, 120, 176, 134, 51, 241, 39, 36, 212, 193, 238, 187, 172, 92, 189, 41, 80, 238, 170, 247, 119, 51, + 233, 246, 146, 99, 143, 62, 130, 2, 79, 64, 42, 26, 88, 166, 21, 119, 224, 172, 217, 141, 187, 42, 147, + 169, 55, 206, 147, 215, 193, 85, 21, 10, 173, 1, 47, 192, 123, 242, 127, 222, 247, 233, 170, 221, 220, 226, + 101, 218, 214, 103, 36, 13, 195, 36, 32, 43, 76, 189, 88, 2, 147, 128, 180, 65, 91, 179, 124, 108, 116, + 50, 50, 142, 40, 180, 110, 246, 20, 52, 240, 76, 186, 60, 228, 174, 56, 102, 42, 203, 24, 228, 47, 107, + 131, 161, 3, 133, 106, 82, 74, 249, 167, 181, 172, 189, 159, 89, 153, 118, 232, 248, 205, 147, 8, 203, 192, + 20, 169, 189, 148, 234, 233, 130, 230, 171, 105, 36, 246, 191, 211, 165, 88, 154, 136, 6, 90, 131, 200, 241, + 55, 92, 135, 221, 39, 14, 68, 139, 24, 237, 215, 20, 177, 190, 51, 106, 127, 237, 146, 222, 61, 247, 173, + 2, 112, 211, 145, 99, 95, 206, 164, 183, 233, 38, 21, 44, 106, 226, 183, 171, 230, 201, 56, 77, 118, 121, + 184, 141, 129, 114, 127, 3, 136, 188, 10, 243, 123, 204, 106, 74, 241, 202, 186, 187, 233, 151, 222, 27, 135, + 167, 125, 63, 226, 25, 139, 131, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 136, 5, 119, 80, 154, 4, 241, 70, 159, 227, 2, 238, 104, 242, 90, 84, 111, 231, 89, 60, + 9, 57, 37, 67, 199, 38, 232, 235, 141, 213, 56, 16, 31, 123, 175, 4, 96, 122, 28, 41, 208, 176, 135, + 37, 111, 233, 60, 206, 42, 208, 212, 103, 197, 110, 153, 152, 245, 229, 129, 59, 172, 155, 132, 104, 53, 12, + 13, 105, 234, 168, 191, 184, 111, 198, 58, 212, 222, 156, 84, 196, 62, 205, 142, 73, 0, 96, 241, 191, 204, + 212, 209, 58, 190, 237, 185, 162, 188, 108, 216, 144, 63, 207, 191, 194, 44, 68, 254, 140, 118, 111, 83, 121, + 242, 108, 97, 40, 94, 208, 135, 168, 116, 197, 233, 141, 42, 84, 96, 208, 110, 111, 250, 170, 53, 171, 178, + 136, 168, 182, 168, 217, 151, 36, 59, 143, 86, 238, 197, 26, 107, 42, 23, 151, 225, 227, 200, 9, 82, 6, + 73, 123, 201, 31, 109, 41, 220, 188, 122, 7, 102, 190, 90, 54, 138, 46, 68, 72, 69, 46, 27, 14, 190, + 237, 140, 71, 42, 172, 178, 142, 200, 114, 108, 162, 253, 168, 241, 115, 71, 18, 112, 22, 145, 15, 231, 88, + 100, 163, 250, 72, 129, 244, 233, 86, 70, 26, 191, 52, 26, 58, 50, 254, 5, 30, 44, 5, 222, 149, 113, + 3, 140, 140, 225, 107, 86, 156, 10, 247, 228, 212, 79, 85, 63, 122, 77, 157, 228, 233, 157, 167, 82, 254, + 65, 208, 0, 175, 24, 56, 185, 224, 186, 204, 248, 139, 120, 196, 225, 156, 126, 198, 72, 46, 240, 4, 138, + 253, 69, 158, 182, 40, 229, 14, 64, 55, 221, 123, 165, 89, 130, 206, 246, 237, 253, 193, 134, 194, 70, 197, + 135, 104, 141, 35, 190, 23, 154, 252, 226, 212, 79, 234, 158, 238, 126, 232, 62, 112, 86, 124, 18, 151, 166, + 142, 160, 218, 244, 135, 211, 180, 218, 48, 159, 210, 36, 153, 175, 73, 27, 138, 229, 80, 236, 133, 25, 209, + 164, 240, 61, 211, 163, 48, 253, 35, 97, 7, 251, 198, 146, 45, 51, 80, 60, 5, 249, 81, 81, 76, 46, + 194, 21, 129, 38, 167, 8, 58, 101, 133, 177, 142, 129, 53, 235, 246, 57, 174, 209, 205, 51, 63, 16, 65, + 199, 180, 84, 147, 76, 54, 104, 103, 139, 196, 162, 44, 44, 227, 38, 46, 146, 118, 238, 233, 156, 223, 205, + 131, 87, 60, 247, 139, 108, 18, 56, 192, 221, 220, 208, 94, 203, 28, 209, 70, 53, 102, 220, 144, 107, 56, + 142, 194, 232, 161, 152, 118, 23, 171, 87, 132, 80, 4, 106, 193, 231, 250, 42, 101, 59, 37, 174, 245, 167, + 187, 30, 189, 212, 53, 191, 235, 208, 137, 168, 177, 4, 120, 50, 157, 218, 126, 230, 122, 118, 194, 119, 100, + 197, 157, 228, 77, 119, 171, 62, 229, 251, 10, 119, 24, 80, 73, 254, 223, 37, 47, 178, 239, 78, 150, 122, + 37, 93, 125, 35, 80, 82, 38, 6, 40, 168, 168, 83, 180, 105, 186, 218, 174, 137, 205, 20, 32, 197, 166, + 41, 33, 96, 252, 2, 148, 59, 12, 95, 8, 44, 68, 81, 178, 165, 185, 128, 33, 159, 19, 236, 224, 181, + 83, 115, 30, 241, 220, 181, 105, 128, 141, 189, 149, 142, 11, 81, 253, 30, 103, 226, 8, 211, 171, 45, 180, + 104, 2, 79, 96, 22, 192, 27, 229, 217, 93, 96, 136, 10, 43, 79, 236, 156, 42, 245, 153, 251, 253, 65, + 147, 84, 27, 18, 161, 47, 243, 222, 48, 23, 71, 225, 112, 38, 176, 104, 17, 188, 86, 95, 6, 163, 153, + 77, 80, 203, 173, 164, 5, 242, 240, 70, 214, 41, 157, 101, 65, 104, 175, 250, 5, 119, 78, 226, 88, 161, + 46, 234, 5, 117, 79, 47, 160, 211, 196, 228, 180, 50, 34, 172, 169, 98, 199, 206, 49, 17, 113, 29, 47, + 131, 93, 55, 40, 113, 5, 97, 84, 146, 236, 206, 212, 164, 217, 197, 173, 1, 223, 152, 144, 137, 230, 39, + 222, 249, 253, 42, 209, 125, 132, 186, 195, 180, 211, 139, 186, 5, 7, 106, 101, 154, 77, 85, 172, 114, 49, + 126, 236, 173, 65, 100, 136, 196, 124, 9, 83, 209, 26, 127, 195, 4, 227, 252, 34, 162, 69, 53, 88, 54, + 225, 206, 75, 154, 63, 62, 27, 223, 12, 9, 159, 167, 4, 77, 25, 162, 136, 129, 0, 218, 44, 161, 135, + 107, 49, 50, 137, 110, 22, 138, 130, 163, 63, 120, 250, 42, 76, 250, 64, 195, 28, 170, 135, 32, 138, 182, + 150, 242, 252, 8, 209, 232, 184, 234, 155, 203, 16, 22, 38, 50, 183, 21, 191, 242, 211, 88, 216, 225, 72, + 215, 253, 203, 91, 34, 126, 246, 144, 250, 59, 221, 24, 132, 247, 218, 246, 90, 218, 73, 194, 249, 171, 9, + 118, 227, 76, 28, 182, 249, 223, 61, 216, 165, 193, 247, 110, 201, 5, 62, 132, 161, 230, 103, 21, 22, 15, + 161, 124, 46, 77, 12, 136, 114, 45, 128, 43, 87, 209, 191, 137, 68, 69, 219, 202, 254, 50, 190, 61, 163, + 189, 160, 102, 136, 130, 97, 126, 20, 18, 245, 182, 147, 41, 82, 98, 56, 72, 245, 31, 43, 139, 199, 130, + 210, 104, 170, 26, 60, 69, 237, 87, 100, 203, 130, 71, 217, 60, 182, 199, 22, 181, 71, 234, 12, 215, 188, + 196, 220, 177, 175, 161, 128, 47, 73, 232, 28, 66, 122, 71, 234, 33, 142, 155, 59, 93, 159, 71, 88, 174, + 70, 222, 186, 91, 167, 198, 89, 101, 149, 117, 15, 187, 241, 190, 32, 229, 25, 209, 158, 2, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 177, 112, 96, 168, 65, 84, 5, 126, + 102, 156, 175, 133, 130, 245, 182, 135, 221, 171, 41, 132, 114, 212, 178, 126, 162, 0, 187, 71, 39, 223, 211, + 238, 243, 157, 112, 160, 172, 8, 95, 6, 34, 141, 83, 161, 62, 177, 111, 171, 71, 231, 18, 86, 182, 74, + 133, 116, 85, 2, 150, 226, 147, 192, 88, 2, 211, 17, 60, 90, 203, 28, 227, 120, 232, 115, 185, 108, 67, + 235, 224, 200, 30, 214, 220, 25, 143, 188, 218, 248, 75, 236, 181, 44, 130, 215, 113, 248, 62, 59, 136, 115, + 33, 174, 90, 6, 176, 188, 168, 2, 212, 18, 54, 156, 246, 181, 22, 74, 57, 25, 234, 230, 123, 184, 69, + 59, 62, 151, 16, 114, 219, 98, 55, 175, 168, 151, 105, 114, 251, 228, 75, 107, 114, 237, 90, 218, 51, 44, + 169, 168, 232, 106, 244, 31, 83, 150, 204, 210, 110, 108, 175, 17, 106, 165, 192, 127, 59, 47, 99, 177, 98, + 123, 18, 122, 10, 139, 232, 5, 146, 29, 237, 46, 69, 186, 212, 95, 96, 223, 78, 173, 212, 234, 39, 141, + 48, 78, 136, 21, 212, 206, 144, 18, 67, 74, 35, 171, 210, 125, 245, 61, 231, 58, 134, 45, 92, 61, 78, + 29, 123, 142, 18, 78, 162, 60, 166, 31, 149, 175, 63, 89, 65, 143, 190, 50, 88, 104, 172, 113, 103, 38, + 35, 154, 3, 8, 28, 160, 54, 247, 10, 102, 184, 95, 35, 126, 154, 162, 85, 71, 169, 43, 158, 100, 223, + 218, 233, 124, 126, 211, 253, 164, 15, 156, 10, 27, 232, 96, 231, 80, 49, 47, 30, 239, 139, 215, 68, 100, + 235, 108, 30, 229, 213, 247, 209, 243, 153, 249, 181, 85, 244, 53, 196, 171, 106, 1, 182, 94, 208, 221, 134, + 39, 54, 195, 9, 227, 210, 112, 152, 192, 17, 20, 114, 218, 81, 125, 175, 206, 25, 55, 17, 125, 2, 34, + 49, 31, 157, 11, 253, 28, 69, 23, 203, 144, 128, 223, 184, 88, 135, 82, 125, 156, 62, 30, 40, 244, 105, + 168, 177, 209, 105, 199, 106, 78, 239, 3, 136, 168, 8, 155, 98, 227, 64, 13, 220, 22, 213, 103, 148, 238, + 177, 206, 234, 6, 247, 155, 138, 205, 54, 8, 17, 37, 21, 235, 46, 132, 31, 49, 30, 87, 82, 231, 93, + 38, 91, 167, 159, 167, 223, 95, 217, 70, 77, 205, 8, 156, 130, 15, 169, 213, 109, 203, 114, 1, 252, 44, + 93, 224, 164, 115, 163, 103, 97, 103, 81, 165, 121, 25, 31, 140, 251, 71, 120, 137, 74, 103, 78, 77, 151, + 194, 239, 76, 99, 82, 150, 106, 27, 249, 248, 97, 106, 133, 214, 219, 122, 249, 245, 230, 115, 48, 254, 227, + 46, 147, 188, 30, 105, 96, 66, 116, 169, 185, 3, 29, 232, 27, 219, 210, 179, 100, 176, 243, 3, 149, 227, + 178, 89, 3, 218, 190, 135, 166, 5, 93, 93, 172, 74, 242, 241, 44, 198, 94, 228, 240, 233, 234, 183, 232, + 50, 136, 23, 34, 64, 138, 93, 220, 35, 10, 242, 172, 123, 108, 2, 244, 77, 62, 203, 249, 160, 232, 69, + 231, 10, 214, 6, 94, 113, 213, 87, 36, 115, 166, 119, 145, 59, 110, 83, 131, 69, 196, 14, 242, 91, 17, + 143, 128, 180, 249, 3, 247, 141, 157, 28, 74, 253, 104, 174, 44, 171, 145, 192, 245, 215, 137, 239, 1, 5, + 43, 186, 154, 245, 249, 20, 16, 31, 224, 118, 139, 73, 167, 134, 189, 163, 170, 90, 62, 118, 201, 122, 29, + 140, 205, 40, 5, 6, 183, 242, 170, 18, 202, 232, 87, 208, 149, 20, 128, 238, 6, 88, 237, 109, 217, 117, + 86, 199, 137, 164, 45, 151, 117, 217, 209, 182, 28, 189, 107, 146, 174, 133, 253, 74, 40, 77, 76, 188, 152, + 27, 135, 247, 159, 137, 39, 101, 19, 136, 159, 193, 148, 194, 247, 136, 41, 89, 196, 140, 81, 23, 174, 79, + 233, 228, 174, 186, 126, 85, 172, 228, 113, 44, 50, 253, 51, 251, 192, 253, 42, 151, 17, 168, 26, 241, 145, + 169, 222, 99, 68, 195, 238, 20, 10, 91, 147, 220, 241, 175, 210, 82, 185, 201, 168, 187, 225, 18, 109, 235, + 3, 29, 75, 248, 236, 102, 215, 2, 242, 133, 185, 107, 2, 88, 245, 232, 135, 63, 183, 44, 163, 155, 177, + 127, 166, 51, 86, 170, 193, 212, 182, 84, 8, 236, 212, 168, 154, 222, 177, 166, 48, 129, 235, 154, 167, 38, + 84, 33, 93, 70, 52, 33, 32, 125, 204, 8, 154, 47, 106, 118, 8, 141, 31, 2, 132, 138, 205, 162, 197, + 28, 17, 144, 199, 5, 247, 57, 119, 211, 99, 141, 80, 116, 24, 202, 162, 185, 30, 159, 143, 181, 101, 73, + 242, 21, 153, 58, 38, 12, 177, 166, 135, 25, 95, 30, 4, 83, 64, 98, 196, 211, 120, 33, 119, 198, 221, + 118, 49, 130, 84, 58, 230, 227, 39, 190, 103, 13, 156, 41, 223, 133, 125, 205, 92, 244, 106, 158, 213, 39, + 71, 13, 247, 173, 121, 242, 234, 114, 83, 63, 26, 26, 166, 134, 207, 147, 77, 113, 152, 171, 226, 108, 72, + 175, 187, 167, 29, 89, 112, 201, 27, 171, 193, 78, 183, 111, 115, 127, 203, 84, 211, 138, 167, 95, 82, 10, + 185, 59, 152, 17, 178, 242, 229, 243, 158, 215, 197, 46, 160, 2, 61, 253, 220, 114, 106, 130, 142, 162, 253, + 174, 133, 101, 219, 119, 38, 172, 166, 220, 102, 60, 53, 73, 208, 118, 168, 143, 134, 201, 60, 26, 62, 172, + 99, 172, 241, 35, 190, 178, 154, 6, 138, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 217, 37, 145, 160, 185, 189, 159, 26, 106, 138, 109, 0, 151, 61, 151, 232, 89, 181, 85, + 52, 217, 92, 61, 143, 47, 204, 108, 147, 140, 212, 240, 94, 244, 152, 6, 85, 214, 166, 78, 16, 205, 104, + 121, 183, 100, 120, 66, 249, 68, 37, 16, 128, 118, 126, 218, 252, 174, 157, 234, 251, 49, 126, 79, 82, 121, + 11, 76, 120, 157, 197, 124, 226, 157, 178, 69, 18, 188, 120, 67, 160, 230, 85, 133, 105, 4, 192, 112, 210, + 221, 217, 39, 86, 79, 93, 42, 114, 68, 157, 223, 146, 35, 40, 210, 134, 137, 118, 27, 249, 31, 169, 196, + 139, 114, 188, 131, 204, 79, 31, 71, 20, 182, 89, 218, 249, 56, 236, 226, 43, 65, 24, 61, 237, 247, 127, + 149, 92, 213, 103, 34, 177, 3, 211, 235, 50, 32, 235, 149, 20, 169, 170, 56, 87, 72, 236, 247, 9, 24, + 226, 136, 50, 124, 76, 94, 159, 160, 161, 165, 52, 82, 13, 240, 146, 247, 27, 254, 212, 190, 23, 179, 248, + 79, 216, 28, 40, 108, 174, 143, 78, 193, 220, 94, 223, 136, 187, 136, 14, 32, 178, 10, 101, 206, 16, 9, + 38, 41, 218, 10, 143, 229, 136, 247, 177, 178, 8, 203, 70, 223, 205, 77, 26, 7, 160, 156, 125, 247, 139, + 206, 113, 244, 107, 135, 245, 178, 122, 50, 94, 49, 70, 57, 3, 229, 195, 41, 80, 58, 66, 45, 205, 28, + 182, 63, 248, 142, 14, 93, 60, 222, 17, 221, 206, 92, 248, 42, 21, 148, 4, 5, 5, 221, 230, 146, 103, + 206, 79, 134, 160, 154, 215, 9, 168, 177, 127, 208, 196, 142, 113, 36, 204, 246, 238, 48, 246, 118, 147, 247, + 70, 10, 183, 169, 140, 7, 199, 50, 204, 151, 34, 141, 208, 94, 45, 239, 60, 199, 183, 121, 73, 114, 33, + 150, 152, 167, 152, 127, 9, 123, 73, 223, 60, 90, 93, 140, 62, 253, 38, 194, 100, 164, 150, 81, 187, 162, + 158, 26, 197, 179, 123, 107, 108, 88, 30, 45, 111, 141, 17, 122, 142, 28, 232, 140, 21, 242, 77, 35, 206, + 158, 187, 115, 115, 175, 148, 113, 158, 213, 102, 2, 125, 216, 125, 220, 104, 30, 170, 188, 136, 34, 100, 17, + 180, 10, 238, 190, 74, 48, 26, 115, 113, 215, 201, 121, 114, 215, 241, 128, 101, 44, 113, 37, 196, 124, 254, + 109, 195, 213, 141, 239, 139, 35, 101, 70, 157, 203, 113, 229, 233, 204, 97, 148, 212, 208, 174, 126, 203, 245, + 22, 119, 36, 5, 149, 32, 9, 141, 0, 202, 134, 58, 131, 132, 126, 52, 126, 74, 234, 212, 221, 80, 19, + 64, 117, 167, 118, 52, 6, 14, 99, 77, 113, 201, 248, 127, 135, 205, 147, 227, 60, 46, 55, 237, 192, 149, + 226, 95, 104, 162, 76, 24, 100, 38, 44, 240, 137, 93, 6, 136, 62, 141, 164, 172, 11, 55, 227, 159, 225, + 195, 104, 243, 112, 31, 70, 104, 173, 242, 157, 232, 162, 225, 87, 105, 182, 136, 194, 112, 96, 155, 171, 243, + 122, 53, 197, 194, 208, 240, 35, 37, 117, 201, 235, 205, 118, 213, 80, 53, 126, 186, 170, 2, 7, 233, 95, + 107, 144, 133, 154, 228, 55, 156, 108, 241, 167, 245, 6, 8, 173, 54, 235, 128, 185, 139, 251, 97, 140, 94, + 166, 171, 70, 62, 193, 87, 203, 31, 43, 237, 199, 4, 201, 37, 48, 64, 99, 251, 200, 55, 59, 39, 163, + 88, 236, 35, 83, 5, 10, 37, 212, 69, 19, 124, 125, 92, 184, 94, 93, 179, 199, 211, 7, 119, 24, 14, + 156, 220, 88, 240, 101, 10, 44, 40, 192, 171, 162, 138, 121, 111, 203, 157, 198, 211, 251, 22, 84, 20, 99, + 221, 253, 60, 92, 181, 237, 193, 161, 211, 139, 12, 57, 173, 148, 160, 229, 121, 104, 66, 113, 175, 157, 177, + 219, 38, 77, 72, 76, 217, 233, 101, 24, 182, 109, 238, 118, 177, 156, 89, 22, 63, 9, 241, 135, 59, 69, + 90, 87, 130, 177, 70, 169, 179, 13, 21, 109, 187, 164, 110, 88, 61, 115, 81, 186, 216, 20, 124, 206, 188, + 202, 143, 162, 192, 208, 15, 34, 80, 91, 233, 194, 163, 173, 61, 80, 161, 147, 46, 44, 14, 184, 20, 129, + 149, 196, 241, 90, 164, 125, 113, 228, 127, 119, 185, 11, 108, 175, 195, 233, 31, 17, 103, 132, 253, 146, 192, + 111, 172, 47, 150, 77, 143, 5, 148, 49, 172, 106, 141, 227, 85, 95, 92, 152, 25, 39, 16, 108, 132, 70, + 247, 14, 56, 63, 213, 155, 240, 148, 242, 96, 71, 251, 100, 107, 122, 173, 51, 59, 82, 171, 232, 133, 36, + 65, 168, 137, 182, 15, 38, 208, 18, 107, 27, 77, 170, 93, 100, 245, 74, 250, 111, 223, 154, 84, 51, 28, + 203, 250, 208, 105, 128, 77, 9, 214, 9, 137, 201, 17, 17, 161, 31, 188, 192, 24, 112, 78, 70, 174, 101, + 102, 131, 90, 15, 145, 215, 87, 94, 28, 198, 134, 197, 103, 115, 127, 172, 156, 172, 159, 12, 234, 199, 209, + 70, 247, 214, 171, 248, 161, 181, 116, 12, 41, 124, 8, 193, 33, 244, 10, 226, 208, 78, 244, 49, 115, 201, + 129, 7, 253, 208, 80, 162, 44, 34, 23, 143, 216, 7, 15, 229, 217, 47, 102, 37, 65, 94, 123, 91, 93, + 75, 247, 8, 58, 17, 121, 253, 3, 178, 101, 69, 117, 74, 78, 156, 169, 118, 61, 120, 208, 193, 45, 248, + 238, 173, 126, 52, 127, 232, 59, 185, 116, 38, 65, 110, 226, 236, 33, 235, 176, 222, 249, 63, 164, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 146, 65, 18, 222, 192, 28, + 129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 141, 194, 179, 150, 178, 254, 199, 164, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 86, 70, 20, 115, 220, 58, 54, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 87, 189, 150, + 42, 83, 54, 191, 178, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 168, 148, 227, 237, 0, 58, 76, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 106, 81, 225, 130, 80, 57, 19, 44, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 203, 199, 88, 221, 85, 236, 28, 154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 137, 70, 56, 15, 38, 198, 57, 188, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 171, 226, 22, 161, + 146, 207, 253, 221, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 238, 243, 247, 62, 91, 24, 96, 62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 92, 135, 166, 200, 178, 32, 143, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 199, 58, 182, 38, 46, 223, 251, 177, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 54, 221, 20, 104, 192, 15, 16, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 250, 181, 83, 73, + 108, 39, 238, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 87, 62, 63, 109, 185, 27, 47, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 201, 228, 200, 210, 40, 189, 69, 33, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 163, + 163, 100, 42, 233, 131, 196, 207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 61, 64, 224, 148, 207, 87, 250, 49, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 110, 228, 74, 100, 21, 232, + 188, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 99, 141, 163, 15, 250, 143, 144, 193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 101, 121, 138, 146, 243, 91, 162, 43, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 59, + 131, 6, 28, 220, 203, 187, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 245, 233, 244, 171, 114, 186, 18, 108, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 241, 239, 20, 254, 188, 136, 79, + 94, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 76, 64, 21, 56, 243, 197, 57, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 50, 196, 168, 85, 165, 173, 159, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 66, 54, 126, + 47, 43, 133, 97, 245, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 73, 250, 139, 147, 9, 128, 42, 212, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 204, 155, 163, 171, 146, 4, 25, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 92, 225, 172, 148, 16, 70, 164, 46, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 246, 132, 223, 201, 82, 47, 254, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 246, 15, 204, 188, 11, 80, 246, 168, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 182, 191, 225, 26, 52, 65, 149, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 71, 191, 171, 180, 6, 36, 183, 199, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 66, 32, 79, 63, 222, + 180, 149, 68, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 58, 172, 192, 3, 9, 104, 4, 89, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 165, 11, 151, 236, 179, 31, 62, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, + 180, 143, 81, 31, 161, 29, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 201, 165, 129, 75, 141, 69, 64, 111, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 27, 112, 242, 101, 206, + 26, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 140, 57, 129, 215, 240, 66, 56, 148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 225, 35, 59, 113, 165, 101, 86, 190, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 241, + 201, 16, 7, 199, 31, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 94, 214, 47, 254, 191, 204, 10, 187, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 109, 237, 174, 252, 46, 216, 181, + 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 94, 228, 195, 95, 198, 10, 247, 141, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 247, 88, 223, 141, 193, 52, 30, 164, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 207, 157, 67, + 177, 140, 84, 41, 195, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 142, 188, 96, 130, 31, 32, 107, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 92, 25, 47, 159, 216, 60, 8, 168, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 17, 44, 31, 207, 217, 43, 167, 134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 106, 82, 173, 99, 30, 37, 190, 164, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 75, 77, 217, + 128, 103, 87, 196, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 240, 88, 234, 237, 206, 215, 12, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 166, 228, 252, 243, 30, 7, 163, 119, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 216, 208, 83, 70, 233, 204, 33, 211, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 113, 2, 253, 184, 5, 138, 147, 208, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 67, 97, 210, 235, 232, + 141, 198, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 177, 171, 221, 225, 125, 197, 69, 136, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 95, 224, 6, 91, 85, 1, 232, 154, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 179, 22, 46, 192, 216, 254, + 241, 78, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 136, 154, 175, 18, 77, 128, 245, 195, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 233, 30, 222, 197, 6, 216, 20, 21, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 36, + 184, 47, 223, 53, 151, 107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 49, 131, 190, 24, 114, 220, 99, 166, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 103, 224, 13, 98, 19, 122, 47, + 204, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 229, 143, 190, 73, 20, 9, 91, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 51, 35, 42, 212, 52, 117, 170, 240, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 144, 12, + 178, 80, 146, 30, 166, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 57, 17, 165, 121, 15, 238, 225, 94, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, 202, 171, 146, 72, 206, 75, 100, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 220, 1, 0, 171, 235, 18, 166, 203, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 210, 244, 77, 101, 116, 86, 179, 39, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 190, 124, 134, 122, + 45, 172, 101, 164, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 54, 236, 207, 201, 100, 9, 201, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 147, 203, 209, 159, 188, 4, 167, 188, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 226, 222, 71, 224, 186, 133, 168, 254, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 254, 246, 42, 224, 165, 177, 30, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 114, 90, 26, 68, 8, + 226, 41, 175, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 77, 201, 183, 119, 149, 87, 40, 253, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 98, 148, 230, 2, 23, 205, 224, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, + 112, 123, 15, 19, 177, 140, 152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 79, 55, 116, 197, 57, 10, 27, 179, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 220, 209, 60, 45, 79, 233, + 117, 81, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 115, 81, 235, 240, 121, 164, 46, 68, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 250, 195, 119, 124, 21, 110, 50, 185, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 78, 196, + 35, 66, 235, 81, 20, 246, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 40, 250, 104, 157, 128, 106, 148, 47, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 252, 101, 113, 89, 95, 125, 243, + 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 48, 170, 91, 98, 162, 183, 191, 47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 133, 88, 96, 9, 213, 50, 100, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 56, 20, 46, 96, 151, 43, 101, 141, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 175, 229, 68, 20, 158, 175, 223, 134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 95, 28, 94, 0, 109, 1, 252, 188, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 215, 242, 67, + 159, 169, 170, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 210, 25, 18, 148, 205, 218, 227, 187, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 140, 181, 64, 143, 160, 70, 84, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 87, 252, 245, 64, 168, 102, 58, 174, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 60, 231, 63, 214, 98, 74, 172, 64, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 164, 139, 178, 166, 11, + 215, 20, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 210, 236, 124, 15, 193, 113, 81, 61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 183, 166, 99, 239, 5, 128, 194, 10, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 203, + 98, 219, 26, 102, 11, 107, 130, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 239, 235, 31, 64, 175, 2, 19, 194, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 84, 30, 198, 145, 183, 111, + 189, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 46, 161, 77, 51, 188, 59, 222, 184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 81, 13, 96, 175, 242, 206, 141, 93, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 242, 250, + 2, 118, 103, 160, 165, 234, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 17, 31, 3, 66, 12, 163, 157, 224, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 119, 77, 99, 184, 95, 233, + 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 214, 240, 48, 108, 217, 163, 39, 97, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 235, 98, 50, 63, 138, 26, 225, 126, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 175, 130, + 10, 110, 198, 129, 191, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 139, 108, 163, 8, 160, 239, 170, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 108, 155, 244, 69, 119, 8, 216, 56, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 196, 52, 69, 230, 147, 242, 134, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 242, 107, 102, 224, 232, 222, 159, 87, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 110, 101, 52, 253, + 88, 238, 220, 212, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 46, 13, 61, 62, 170, 192, 4, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 233, 248, 191, 86, 139, 15, 237, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 205, 228, 152, 6, 33, + 162, 17, 67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 229, 3, 64, 231, 165, 10, 36, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 186, 101, 148, 236, 30, 35, 163, 49, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, + 97, 121, 132, 86, 130, 48, 191, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 80, 190, 17, 150, 22, 48, 104, 85, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 171, 242, 157, 66, 44, 139, + 12, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 192, 37, 126, 216, 125, 49, 39, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 134, 44, 75, 205, 15, 211, 197, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 93, 81, + 63, 152, 130, 248, 73, 184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 25, 31, 203, 241, 115, 104, 152, 174, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 130, 67, 206, 44, 71, 58, 235, + 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 6, 92, 73, 144, 192, 22, 150, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 231, 183, 115, 82, 6, 111, 44, 154, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 209, 181, + 252, 239, 164, 185, 106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 155, 78, 58, 155, 63, 186, 176, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 237, 229, 122, 248, 160, 78, 57, 7, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 71, 252, 131, 214, 106, 149, 245, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 154, 160, 23, 35, 180, 122, 146, 183, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 147, 196, 145, + 9, 114, 33, 154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 196, 213, 216, 114, 42, 81, 252, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 148, 144, 77, 158, 164, 0, 37, 244, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 233, 174, 45, 246, 239, 231, 60, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 112, 253, 84, 28, 149, 74, 5, 239, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 217, 176, 222, 58, 35, + 44, 2, 155, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 130, 113, 154, 57, 105, 117, 103, 157, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 144, 99, 142, 39, 26, 200, 251, 249, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 178, + 110, 41, 197, 198, 204, 10, 235, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 98, 214, 29, 104, 38, 30, 251, 69, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 249, 179, 39, 10, 114, 16, + 97, 96, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 95, 198, 179, 205, 39, 140, 213, 220, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 16, 212, 92, 209, 18, 34, 240, 191, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 8, 109, 191, 111, 62, 222, + 229, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 235, 200, 48, 199, 132, 47, 44, 183, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 10, 57, 146, 87, 214, 123, 22, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 190, 190, 234, + 161, 61, 186, 252, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 153, 144, 250, 41, 228, 58, 43, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 90, 133, 158, 212, 245, 231, 64, 153, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 138, 167, 77, 109, 52, 247, 174, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 227, 137, 253, 203, 194, 46, 199, 121, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 179, 9, 102, 241, + 121, 221, 149, 220, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 189, 254, 41, 138, 154, 124, 141, 79, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 161, 25, 100, 236, 78, 103, 14, 52, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 126, 77, 10, 86, 216, 77, 100, 201, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 246, 191, 32, 245, 137, 150, 189, 87, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 164, 208, 123, 205, + 90, 87, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 58, 59, 143, 128, 100, 218, 58, 236, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 164, 141, 145, 163, 80, 86, 225, 16, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 131, + 224, 35, 146, 166, 116, 50, 118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 36, 195, 53, 249, 1, 239, 99, 5, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 240, 27, 99, 197, 17, 250, + 129, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 12, 219, 151, 245, 136, 248, 124, 82, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 221, 190, 178, 43, 97, 27, 171, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 108, 210, + 208, 54, 162, 254, 244, 85, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 13, 3, 69, 152, 178, 214, 234, 29, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 155, 220, 23, 102, 90, 28, 36, + 46, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 140, 157, 116, 203, 135, 234, 44, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59, 208, 148, 207, 66, 248, 130, 12, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 80, 65, 36, + 249, 59, 31, 4, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 49, 40, 124, 250, 250, 88, 195, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 105, 52, 92, 182, 136, 23, 128, 249, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 65, 26, 246, 117, + 172, 129, 172, 99, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 42, 231, 214, 189, 29, 162, 191, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 245, 77, 142, 16, 93, 151, 202, 35, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6, 32, 176, 150, 181, 19, 66, 187, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 164, 189, 1, 192, 17, 82, 123, 239, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 222, 84, 89, 81, 183, + 46, 96, 223, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 243, 30, 2, 28, 239, 97, 96, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 130, 121, 235, 25, 152, 59, 40, 149, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 113, + 46, 130, 80, 93, 125, 1, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 61, 237, 129, 172, 20, 214, 50, 172, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 189, 243, 178, 145, 249, 154, + 252, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 53, 41, 238, 229, 202, 126, 2, 151, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 175, 194, 252, 87, 87, 86, 180, 197, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 161, 147, + 24, 236, 43, 162, 30, 169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 133, 25, 111, 132, 59, 24, 71, 9, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 225, 112, 150, 137, 242, 120, + 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 173, 69, 61, 70, 108, 20, 129, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 252, 157, 90, 104, 49, 182, 44, 236, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 240, 186, 217, + 48, 136, 7, 240, 182, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 115, 163, 110, 151, 167, 47, 55, 131, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 185, 192, 253, 72, 96, 119, 171, 69, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 190, 184, 69, 31, 49, 224, 215, 167, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, 28, 190, 214, 227, 107, 156, 199, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 110, 184, 212, 230, + 88, 224, 9, 106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 227, 62, 96, 15, 210, 231, 79, 53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 166, 139, 116, 65, 136, 125, 31, 121, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 170, 162, 172, 193, 130, 59, 54, 230, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 254, 59, 48, 178, 156, 149, 5, 41, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 79, 218, 54, 104, 10, + 187, 77, 123, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 126, 122, 148, 187, 226, 182, 185, 168, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 91, 177, 207, 200, 249, 241, 97, 160, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 119, 79, 46, 143, 120, + 220, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 171, 76, 48, 54, 76, 152, 37, 72, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 187, 236, 100, 172, 113, 122, 114, 146, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 109, 40, + 7, 18, 70, 242, 242, 169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 157, 45, 166, 116, 146, 128, 93, 154, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 142, 65, 174, 40, 86, 93, 28, + 193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 32, 33, 181, 84, 157, 213, 54, 134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 243, 3, 235, 181, 118, 39, 228, 72, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 172, 128, 31, + 26, 34, 81, 251, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 42, 189, 62, 20, 68, 172, 107, 129, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 88, 225, 176, 63, 148, 68, 4, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 46, 138, 62, 86, 204, 130, 207, 220, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 53, 211, 0, 62, 170, 181, 121, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 24, 93, 172, + 183, 70, 227, 207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 52, 200, 133, 165, 140, 107, 41, 221, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 124, 135, 150, 222, 226, 218, 62, 12, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 197, 197, 186, 27, 226, 134, 173, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 120, 42, 126, 117, 206, 24, 249, 50, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 180, 14, 118, 165, 195, + 189, 145, 33, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 54, 7, 213, 61, 144, 25, 71, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 56, 230, 129, 107, 73, 102, 162, 206, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 188, + 145, 221, 153, 141, 98, 152, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 131, 15, 229, 20, 75, 219, 212, 117, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 211, 122, 77, 181, 231, 186, + 19, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 96, 200, 168, 21, 164, 125, 164, 85, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 239, 71, 21, 146, 196, 214, 61, 104, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 184, 210, + 120, 31, 64, 56, 224, 244, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 108, 242, 64, 171, 112, 230, 197, 159, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 167, 175, 177, 146, 251, 185, 215, + 226, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 72, 129, 126, + 161, 167, 25, 97, 209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 238, 235, 82, 136, 65, 99, 13, 195, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 135, 88, 251, 247, 62, 3, 65, 92, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 5, 35, 174, 84, 107, 30, 198, 183, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 226, 206, 161, 253, 114, 213, 218, 145, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 40, 71, 9, + 68, 243, 149, 97, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 135, 18, 225, 131, 178, 190, 218, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 229, 246, 13, 134, 231, 48, 239, 94, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 113, 61, 29, 193, 134, 253, 64, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 224, 38, 222, 38, 84, 152, 206, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 129, 101, 170, 117, + 140, 124, 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 141, 16, 32, 139, 131, 114, 67, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 182, 72, 152, 58, 132, 89, 30, 10, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 163, + 136, 158, 211, 228, 202, 3, 146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 144, 89, 192, 123, 224, 234, 52, 73, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 226, 120, 51, 187, 80, 230, + 25, 86, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 101, 183, 47, 20, 92, 43, 37, 110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 18, 182, 233, 75, 49, 229, 201, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 75, 95, + 18, 248, 140, 204, 11, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 216, 159, 244, 66, 166, 149, 132, 143, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 144, 9, 200, 23, 116, 68, 109, + 215, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 35, 132, 220, 14, 248, 57, 116, 127, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 61, 125, 92, 39, 228, 214, 146, 194, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 133, 179, 205, + 204, 246, 109, 187, 171, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 99, 100, 233, 40, 247, 111, 49, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 53, 240, 120, 100, 99, 78, 106, 194, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 112, 135, 170, 248, 81, 203, 32, 219, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 120, 251, 224, 15, 125, 135, 28, 131, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 102, 173, 153, 133, + 47, 241, 197, 108, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 123, 173, 123, 191, 10, 14, 148, 105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, std::vector({18, 2, 16, 32}), + std::vector({30, 40, 3, 3}), DT_INT8}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferFraczNchw, fp16_2c_2n_pad) { + uint16_t ret[31 * 17 * 4 * 4]{ + 14402, 14379, 14633, 14774, 13826, 14869, 13703, 15068, 13624, 13537, 14521, 15114, 13281, 14976, 12899, 15129, + 12289, 12461, 12096, 10696, 14558, 14464, 14389, 15323, 15219, 13308, 14041, 13518, 14388, 15320, 14145, 12633, + 13852, 14410, 14115, 11497, 14819, 13048, 15357, 14681, 15248, 14640, 13757, 13980, 14653, 14856, 11482, 12271, + 13540, 12429, 13971, 14888, 12988, 15321, 14609, 13548, 14965, 15148, 15160, 12685, 14235, 14747, 14941, 15335, + 14810, 12870, 11429, 14307, 14957, 14165, 13507, 15335, 15060, 15034, 15311, 13682, 14949, 14545, 14311, 14478, + 11027, 15231, 14833, 10577, 14400, 14471, 12798, 15263, 15010, 14107, 10812, 14272, 14756, 14933, 11793, 13558, + 12387, 14480, 9432, 14724, 13686, 15178, 7127, 14452, 13930, 14178, 14911, 13538, 13327, 12942, 15308, 14348, + 11288, 10750, 10395, 14163, 14618, 14458, 11317, 14390, 15194, 14971, 13180, 15247, 14157, 12746, 13356, 12826, + 14169, 15177, 11319, 14063, 13328, 14065, 12473, 15172, 12830, 15062, 14396, 14919, 8095, 14663, 15285, 11074, + 14805, 13884, 14428, 14944, 14431, 13189, 10181, 15146, 14422, 14689, 15068, 14981, 14198, 14383, 14843, 14488, + 15159, 14158, 14843, 14811, 13607, 15082, 13934, 13595, 14830, 14087, 15157, 14820, 12592, 14315, 14161, 14704, + 13632, 13253, 14232, 11742, 14661, 11794, 14768, 10439, 15237, 14869, 12268, 14814, 14315, 15076, 14144, 8808, + 8595, 13506, 12548, 13451, 14936, 13946, 9166, 11487, 13611, 15071, 14916, 12429, 14425, 12981, 14181, 13510, + 13403, 12498, 7919, 15307, 14149, 15057, 14686, 13501, 13620, 13326, 14896, 15300, 14529, 12639, 13929, 12700, + 9001, 13153, 15088, 14621, 12825, 14106, 14612, 15019, 13647, 11647, 15259, 15148, 14431, 14651, 15262, 15345, + 14821, 14708, 14305, 14696, 13775, 12556, 14553, 14596, 11235, 14732, 14839, 14391, 13207, 12301, 15282, 13496, + 14065, 13863, 15014, 14777, 14892, 12814, 13125, 15044, 14655, 15189, 14590, 12415, 13728, 14926, 14932, 14764, + 15355, 12676, 14813, 11644, 15105, 12800, 14405, 10732, 12721, 15127, 14816, 12449, 15309, 13831, 13569, 15207, + 14719, 9604, 14686, 13284, 14833, 14904, 15088, 14678, 15095, 14809, 13398, 15326, 14636, 13248, 14586, 14772, + 14958, 15172, 14342, 14089, 14258, 13405, 14868, 14431, 12673, 14949, 13859, 10562, 14993, 13561, 13484, 14952, + 10920, 13863, 13174, 12359, 13959, 15042, 14786, 13899, 14605, 14381, 15227, 13005, 13322, 14089, 14018, 13082, + 9389, 15312, 14400, 14499, 14433, 14837, 13899, 14042, 14997, 12504, 14596, 10240, 14044, 7133, 14595, 14483, + 13408, 14436, 14498, 13580, 14944, 14805, 14672, 12729, 13016, 13193, 15092, 12977, 15251, 15107, 12157, 14744, + 12314, 13490, 12616, 12784, 14866, 15288, 13778, 15029, 12163, 13002, 14842, 13699, 14487, 15243, 11768, 13120, + 11907, 15083, 9064, 15080, 13562, 14200, 14107, 14491, 13819, 12799, 14530, 12957, 11736, 14283, 14595, 13042, + 14759, 13908, 12837, 12547, 12613, 14559, 14426, 14984, 14467, 14962, 14260, 14309, 14643, 13685, 15219, 12760, + 14419, 12270, 13379, 15254, 14804, 14015, 11742, 14901, 11807, 13657, 13213, 14718, 14772, 13837, 13311, 13895, + 15124, 12599, 15281, 13663, 13340, 14732, 15192, 13832, 14527, 10077, 15051, 13593, 14514, 13150, 15300, 13191, + 14591, 15223, 14225, 13678, 12922, 13770, 13051, 9329, 12291, 14056, 14141, 13903, 11930, 15097, 14819, 13038, + 13422, 12463, 13027, 13441, 13934, 13548, 15294, 15243, 15243, 12819, 14495, 14501, 14370, 15066, 15228, 12863, + 14465, 14561, 11927, 14072, 15105, 9506, 15066, 12170, 12578, 14915, 14633, 14105, 9144, 15321, 5703, 15248, + 13347, 14493, 14870, 13255, 14797, 13812, 12955, 14658, 15091, 14919, 15262, 13537, 14312, 14754, 15260, 14447, + 14063, 12352, 14120, 15329, 15133, 14514, 14408, 15279, 13726, 12232, 14959, 13399, 14996, 15065, 14424, 15060, + 14792, 12424, 9458, 10585, 14736, 13592, 14694, 14952, 14586, 14830, 12547, 14625, 12400, 14950, 14851, 14374, + 14776, 14537, 14831, 14373, 15333, 14774, 12938, 15357, 13384, 13289, 13207, 15004, 13933, 15145, 13975, 12681, + 14719, 11611, 15038, 14519, 12573, 13596, 13429, 14441, 14376, 14861, 12909, 14600, 14605, 14604, 14023, 9499, + 15329, 12643, 13931, 15027, 12382, 15240, 14812, 13583, 14716, 13851, 7459, 14481, 14132, 9350, 15247, 14706, + 14763, 14922, 13712, 13187, 15063, 14154, 13719, 15282, 12223, 14027, 13892, 11560, 14396, 14595, 11991, 13362, + 13881, 14035, 14883, 14387, 14096, 14763, 14230, 12512, 12235, 14182, 12596, 12340, 12388, 14753, 12462, 14831, + 14609, 11970, 14616, 11512, 14016, 14776, 13673, 14152, 14832, 13716, 12119, 14614, 12102, 11829, 15047, 13935, + 12510, 14910, 14223, 14306, 13757, 11326, 15331, 14992, 12210, 14593, 10983, 14917, 15068, 11989, 14329, 10062, + 11670, 14490, 13614, 13573, 14299, 15072, 15249, 12920, 11776, 10044, 12957, 10587, 13772, 14282, 14918, 14542, + 15296, 14761, 13670, 14483, 12979, 12365, 12350, 11432, 13509, 14614, 14531, 13746, 13114, 15170, 14216, 14510, + 15309, 15061, 14720, 14117, 14479, 14618, 12389, 13789, 15227, 15248, 13544, 14556, 14727, 13365, 14559, 15345, + 14152, 13666, 13492, 12714, 14525, 12291, 13700, 12309, 14195, 14593, 14509, 11536, 13750, 11849, 11484, 13422, + 14890, 14130, 13209, 14456, 14495, 13529, 15096, 11671, 13535, 4916, 15115, 15142, 15137, 15026, 14921, 14830, + 13556, 15296, 11716, 15265, 14297, 14645, 11518, 15117, 15291, 13955, 13785, 13452, 14107, 15063, 14635, 12740, + 12447, 13639, 14816, 14567, 15025, 14881, 12991, 14427, 15102, 12799, 13379, 12608, 10686, 14496, 14301, 12078, + 13171, 15249, 12861, 14418, 14322, 14303, 13629, 13272, 11448, 11306, 13939, 14624, 13506, 14898, 13155, 13737, + 13672, 8727, 14087, 10373, 13354, 12724, 13212, 11178, 15113, 14428, 14480, 15003, 14924, 13614, 14491, 13762, + 13601, 14831, 10198, 14248, 14119, 13918, 11619, 14970, 14298, 13402, 14792, 15078, 14575, 14180, 14120, 13085, + 14945, 10349, 13979, 15274, 15353, 14721, 13903, 14031, 10803, 14533, 15343, 13146, 14572, 14455, 15118, 14374, + 14355, 14741, 14569, 14673, 15071, 15261, 13330, 14744, 10303, 12502, 9636, 14870, 15097, 11490, 14144, 14772, + 12883, 15080, 15068, 14411, 14392, 13305, 14984, 13344, 13793, 15031, 10720, 14150, 14554, 14996, 12558, 14502, + 14015, 14391, 14409, 12520, 13362, 9595, 13370, 14939, 14396, 15285, 13944, 15225, 15329, 13146, 14257, 12734, + 12895, 14718, 14972, 14462, 15358, 13349, 14435, 15199, 15062, 13372, 11941, 14224, 15000, 15316, 12490, 13597, + 14170, 14301, 13483, 12592, 14349, 15174, 14623, 15014, 15117, 14418, 13620, 15215, 15066, 13678, 13843, 14395, + 14818, 14930, 11160, 14885, 15337, 14179, 14816, 11916, 13010, 14099, 12337, 15046, 11758, 12519, 15011, 14447, + 15274, 14129, 12313, 13104, 15269, 15000, 13469, 14529, 15326, 14479, 14093, 15346, 8366, 12904, 13518, 14932, + 13020, 14326, 13228, 11688, 14668, 14621, 14498, 15187, 9846, 12259, 11449, 14393, 15122, 15215, 15244, 15197, + 14222, 13851, 14602, 14095, 15329, 13367, 14406, 14468, 14612, 11665, 14929, 15237, 13639, 10141, 14844, 14258, + 14246, 15293, 14673, 8674, 12969, 14865, 8639, 14870, 14329, 13712, 13804, 15270, 11621, 14359, 14244, 12867, + 12003, 14246, 11905, 14517, 12346, 14095, 12049, 13745, 12856, 13578, 14913, 15042, 10547, 12764, 11510, 15233, + 11254, 12144, 15025, 12548, 14223, 12241, 14655, 15204, 15285, 14538, 14288, 14430, 14410, 14952, 14862, 13123, + 14901, 14809, 10021, 14583, 13431, 14631, 11804, 14232, 15201, 13574, 12117, 14683, 14117, 14945, 10261, 13901, + 13659, 12441, 15117, 15064, 14558, 13502, 15118, 12588, 15345, 14768, 13934, 14882, 14278, 14318, 13492, 14823, + 14984, 13698, 15153, 13103, 13445, 14964, 14408, 12287, 15235, 13431, 14156, 13610, 14783, 12545, 8340, 14414, + 14857, 15236, 14503, 12692, 13097, 13652, 14563, 13493, 13275, 14246, 14919, 13941, 14055, 10566, 13511, 14873, + 14532, 15026, 13462, 13427, 15137, 14076, 14352, 14771, 15001, 13558, 15329, 14700, 12150, 15021, 12616, 12598, + 14496, 10637, 15339, 13123, 10664, 12994, 12358, 14219, 14679, 13296, 10598, 12601, 12072, 13737, 14608, 12265, + 13605, 14544, 14120, 10293, 12592, 12775, 15207, 14873, 13208, 14159, 13388, 15178, 14949, 12487, 14084, 13458, + 14803, 14678, 10311, 13432, 14963, 14553, 9121, 14458, 13688, 7712, 14816, 13571, 12702, 14273, 14340, 13322, + 14524, 12307, 15128, 14966, 14986, 14754, 11664, 13407, 14351, 13937, 14286, 14787, 14558, 15306, 13877, 15095, + 10024, 13911, 11470, 12557, 12031, 12972, 9385, 8415, 12979, 15047, 15191, 15318, 13354, 14737, 14281, 12678, + 14259, 15175, 12619, 14568, 14361, 14178, 13938, 14643, 11526, 6603, 13799, 12600, 13587, 14678, 14336, 14753, + 14899, 15000, 13442, 12131, 15034, 14496, 14900, 13754, 14595, 14638, 15118, 13036, 14553, 13369, 10026, 12056, + 10336, 13365, 13318, 13572, 15017, 15292, 13038, 15236, 13734, 14787, 14854, 14689, 11582, 14656, 12765, 14876, + 14532, 15001, 14462, 12341, 14861, 13452, 11285, 15176, 14689, 15185, 10596, 14263, 12985, 14651, 15152, 14289, + 12566, 14693, 14980, 13878, 14407, 15024, 11678, 13704, 12398, 14661, 14308, 15030, 12942, 11771, 13970, 11887, + 14884, 14497, 13346, 13997, 15341, 14505, 13124, 12266, 14682, 13100, 14981, 13271, 14217, 14753, 14958, 11132, + 10386, 14887, 10339, 14997, 13711, 14176, 14488, 14775, 14396, 14816, 13629, 10435, 11719, 15329, 15249, 14634, + 14316, 12578, 14495, 13166, 14400, 14770, 14949, 14766, 15209, 13365, 14640, 12379, 10286, 11653, 14512, 14333, + 14828, 14367, 13259, 14640, 15313, 12292, 14965, 13609, 11280, 14185, 14988, 12355, 13961, 14861, 13765, 14876, + 14809, 14256, 14816, 14879, 14709, 14789, 15304, 10730, 14779, 14742, 12668, 14546, 14120, 13370, 13705, 14487, + 14208, 12827, 13700, 15161, 15127, 13212, 14851, 15299, 11610, 13495, 14632, 13567, 15011, 14351, 13677, 13749, + 14952, 13042, 12911, 12277, 8940, 14490, 13899, 13740, 13183, 15285, 12333, 14677, 12922, 14876, 14586, 15255, + 14336, 13771, 13893, 10751, 15160, 14727, 12719, 14521, 13543, 13335, 11533, 14471, 15223, 13650, 12913, 13232, + 12187, 13831, 14669, 14927, 10954, 14735, 15145, 15354, 13745, 14104, 13140, 13314, 14233, 14235, 14076, 9419, + 13557, 13797, 15124, 13409, 13437, 14427, 15224, 14516, 15148, 12570, 15177, 15305, 14738, 14345, 15062, 13113, + 15065, 15268, 14457, 14429, 13989, 15345, 12562, 14693, 12792, 14599, 14705, 12584, 14341, 14666, 14704, 15125, + 15203, 15168, 14563, 14919, 14458, 14713, 15133, 10414, 14050, 14137, 13552, 14631, 14852, 11985, 14657, 13709, + 15318, 13592, 13906, 15264, 14346, 14383, 13413, 11588, 15274, 14520, 13724, 11957, 14378, 15343, 13504, 8430, + 12324, 15333, 15166, 14671, 14929, 13684, 15089, 15132, 14644, 11365, 14799, 14793, 13464, 10510, 11789, 14475, + 13915, 13043, 11106, 14425, 10392, 14334, 14601, 13881, 14806, 12667, 13199, 15038, 15071, 14125, 13853, 14072, + 8520, 15348, 14892, 14677, 14398, 12964, 14925, 15354, 15147, 7907, 15279, 8991, 15134, 13014, 12733, 14976, + 13101, 14720, 14597, 13876, 15291, 12167, 14932, 14552, 14733, 14115, 15194, 14965, 13977, 12078, 15155, 13150, + 14928, 14460, 15320, 13591, 12367, 12637, 13333, 14913, 14071, 14945, 13727, 15046, 13730, 14598, 14505, 13565, + 15148, 14043, 12485, 13313, 14418, 15330, 12989, 14830, 11773, 14848, 13375, 13934, 14450, 14707, 10599, 11062, + 12837, 14486, 14134, 11291, 12551, 14924, 14941, 14537, 11541, 11701, 15246, 13647, 14537, 14807, 12417, 15349, + 14358, 10748, 13921, 14313, 14450, 15036, 11314, 15189, 15325, 15022, 12346, 13532, 11988, 14031, 13781, 13720, + 14794, 14262, 12826, 13999, 14488, 14548, 14486, 14608, 14506, 14795, 13047, 14115, 13453, 14574, 14709, 15089, + 11756, 14985, 13921, 14319, 15095, 15048, 9072, 14221, 14687, 13538, 15146, 15349, 10473, 15117, 14543, 14737, + 13444, 13232, 14758, 14107, 14916, 14465, 13839, 14705, 12335, 14895, 15094, 14380, 14846, 14259, 14560, 14169, + 14120, 14590, 14909, 13828, 14416, 11265, 12928, 14484, 14768, 14556, 13587, 13663, 15224, 12994, 12499, 14668, + 13324, 12376, 14365, 15272, 12284, 9254, 14683, 13013, 14983, 14678, 13719, 13892, 14809, 12920, 12124, 14598, + 15242, 14716, 12603, 14484, 13667, 14577, 13609, 13738, 13610, 14957, 14039, 13726, 14926, 14841, 13801, 14397, + 14846, 14813, 13223, 14103, 13726, 14937, 13802, 13998, 15357, 11519, 14223, 13892, 10310, 12705, 13898, 14420, + 14609, 12310, 13127, 13717, 14808, 15135, 13812, 14919, 13141, 14121, 14334, 15344, 13068, 12426, 14684, 14360, + 14722, 13740, 15191, 14366, 14886, 14963, 9200, 14976, 14685, 14699, 9141, 15272, 14659, 10228, 9283, 13381, + 15019, 14201, 14982, 13275, 12011, 10080, 14860, 15234, 14214, 10350, 13058, 14946, 14381, 12535, 13609, 12958, + 13205, 13379, 12878, 14444, 14410, 9216, 15096, 11936, 12797, 14075, 13097, 15288, 13033, 13332, 13356, 13602, + 13693, 13640, 14189, 14996, 14884, 12579, 12380, 13260, 12342, 13752, 14011, 11327, 13457, 14389, 14633, 11956, + 15019, 13591, 14970, 12885, 14709, 12549, 14703, 13261, 13386, 12190, 14735, 12495, 15043, 14773, 14525, 13638, + 13263, 13190, 15115, 13558, 15335, 14339, 14657, 14576, 14986, 12278, 13165, 14350, 14285, 12520, 14352, 15086, + 13500, 14543, 12713, 14974, 12206, 14805, 12983, 14725, 15346, 15205, 14608, 15288, 14822, 14113, 14154, 14589, + 11845, 10802, 13664, 14438, 14610, 13225, 14806, 15335, 15291, 13492, 13898, 13236, 14320, 11590, 14939, 14067, + 12709, 15231, 14020, 12789, 14247, 15103, 12310, 15300, 14202, 14934, 14846, 15099, 14747, 13318, 14956, 11622, + 13293, 11537, 14562, 14954, 14466, 15231, 14985, 14602, 15196, 11438, 15077, 13013, 15336, 14054, 15259, 14921, + 14946, 13782, 14515, 15163, 15209, 14635, 13588, 14531, 13896, 15002, 14128, 12954, 13115, 12727, 14605, 14765, + 12727, 10729, 15122, 13587, 15182, 12600, 12573, 13185, 15246, 13212, 14801, 12867, 12970, 14045, 12805, 13278, + 12279, 13332, 15343, 11654, 14639, 14236, 14206, 13996, 14830, 13807, 14569, 14925, 11143, 15238, 12921, 14506, + 13721, 11635, 11395, 13805, 13531, 12448, 13846, 14956, 14969, 14459, 8896, 11699, 15268, 14760, 14092, 15108, + 14261, 13524, 11082, 15267, 14698, 14538, 15246, 13931, 13360, 12631, 11603, 12644, 14511, 14441, 14138, 11403, + 6931, 11244, 14740, 12551, 13701, 14025, 14010, 14093, 14089, 10293, 14566, 11685, 14287, 13800, 14778, 11014, + 14579, 12494, 12097, 15187, 13845, 14585, 14226, 14721, 12898, 13520, 15100, 14909, 14914, 12903, 11760, 14973, + 15140, 13989, 15132, 12325, 11439, 14354, 13598, 14044, 13476, 15127, 14442, 15264, 12641, 14482, 14176, 12967, + 15252, 15284, 14941, 12852, 14604, 14734, 13837, 14652, 11673, 11775, 13576, 14874, 13277, 14181, 12487, 11848, + 14686, 13862, 14863, 14838, 15190, 12476, 14150, 15086, 10807, 15109, 15088, 11388, 14756, 15116, 14835, 13624, + 13994, 14603, 14428, 14401, 9397, 15154, 15255, 12694, 14838, 14629, 12282, 14071, 12932, 14771, 13389, 14924, + 14934, 13622, 12155, 15213, 13368, 14948, 15338, 12117, 13520, 14842, 14433, 12527, 11826, 14710, 15044, 15303, + 14897, 12856, 9555, 15251, 14343, 14140, 14668, 14414, 13466, 14325, 15299, 12382, 14039, 12219, 13351, 15065, + 14748, 15165, 13747, 14928, 14534, 15315, 13606, 14041, 14693, 14465, 13872, 14128, 13819, 14550, 13906, 11164, + 12729, 13434, 15117, 14501, 14046, 9922, 13786, 15146, 14963, 13558, 13745, 14680, 15342, 15131, 14751, 14361, + 14560, 14572, 9091, 15157, 15277, 15067, 14572, 14382, 11844, 15010, 13441, 11357, 15009, 12956, 11136, 13992, + 14591, 14424, 11442, 10234, 14000, 14953, 11525, 11595, 12761, 13803, 14853, 14657, 11671, 15206, 12328, 14433, + 14318, 15242, 15334, 14105, 13293, 14587, 13374, 13929, 14496, 15010, 13529, 13139, 11919, 14729, 15036, 14897, + 13643, 14869, 14680, 14818, 14977, 13522, 14858, 13828, 14504, 14643, 12404, 15323, 14493, 14452, 14690, 14085, + 15213, 12322, 14657, 12091, 10428, 15237, 15018, 12504, 14724, 10557, 12848, 14513, 15019, 14133, 15177, 13432, + 14626, 14700, 15288, 14329, 14737, 14907, 14807, 15160, 14947, 14667, 14324, 13166, 14550, 14825, 13000, 13679, + 14254, 15336, 14832, 10152, 14760, 13558, 15229, 15236, 13740, 13447, 12418, 15126, 14406, 14782, 13450, 12375, + 14498, 13617, 15327, 14803, 14594, 11370, 15316, 15088, 15234, 14523, 13344, 13490, 14474, 13950, 14235, 14695, + 15019, 10878, 14530, 15328, 13517, 14899, 13853, 13170, 15091, 13688, 13374, 12492, 12501, 14989, 12819, 14953, + 15162, 11790, 14762, 15142, 14970, 14456, 15130, 15254, 7194, 12757, 14484, 14094, 14314, 14822, 14293, 12370, + 14222, 14230, 11369, 10653, 15112, 13350, 14369, 15086, 15065, 14386, 12633, 15206, 14509, 14550, 11417, 12341, + 14382, 13544, 12046, 12672, 13351, 11326, 13994, 11902, 14590, 11731, 14552, 14004, 14271, 14960, 11860, 15181, + 14808, 15139, 14239, 12911, 11824, 14597, 15006, 14226, 14707, 14541, 10802, 14287, 12944, 13546, 13690, 14370, + 15215, 12874, 14444, 14842, 15183, 14251, 14406, 14445, 14749, 14117, 13784, 10214, 7321, 15291, 13293, 13683, + 14977, 14437, 14427, 14444, 13677, 13013, 15073, 15348, 5073, 15042, 12116, 12793, 14959, 15047, 14079, 13684, + 14343, 15094, 13630, 8237, 15109, 14686, 14783, 15218, 14503, 10056, 12288, 13024, 14552, 14721, 14799, 14325, + 14110, 10034, 14440, 14515, 12718, 14832, 14370, 14063, 14474, 12985, 14760, 14627, 13422, 12796, 15218, 13300, + 14728, 13460, 14185, 12908, 9219, 14892, 15032, 13379, 11454, 13616, 12854, 7760, 14685, 13049, 14458, 13569, + 14801, 12359, 15358, 14428, 15136, 14019, 14014, 13255, 14668, 8243, 11920, 15161, 14722, 13984, 13535, 14682, + 14096, 13631, 15096, 9218, 15347, 13791, 10685, 14481, 13793, 13429, 14307, 15253, 14798, 14351, 13315, 13562, + 14190, 13440, 14596, 14433, 11343, 11530, 4925, 14968, 10188, 15344, 15094, 10408, 15248, 15339, 12326, 14868, + 15069, 13762, 13507, 15114, 9799, 14562, 14985, 14077, 13698, 13323, 11698, 12813, 13906, 13726, 14279, 14656, + 14546, 14905, 12182, 14777, 12893, 12312, 14101, 9896, 14965, 14677, 10655, 12688, 15278, 14888, 14893, 14537, + 14639, 12550, 15128, 14666, 14379, 14723, 13339, 15116, 14368, 14744, 11817, 15255, 13789, 13541, 11920, 15188, + 13854, 14582, 15258, 13371, 15119, 13665, 15075, 14713, 13231, 15155, 11286, 13870, 12703, 14481, 14464, 9057, + 13561, 12519, 13328, 14986, 14656, 14479, 13804, 10682, 12985, 14935, 12912, 13770, 12355, 11449, 9670, 13714, + 12404, 14664, 13382, 11184, 14713, 14093, 14547, 13433, 14669, 14482, 15163, 13946, 14521, 15145, 15145, 14465, + 14199, 14553, 14445, 14102, 14743, 14238, 14464, 14524, 14546, 14885, 14920, 15027, 13515, 15314, 11534, 14510, + 12155, 13654, 15311, 13957, 13445, 14112, 14298, 15358, 13316, 13472, 14672, 11218, 14498, 14862, 12507, 12882, + 13052, 15352, 14266, 15101, 15284, 9318, 13471, 15069, 11003, 9380, 14529, 13474, 9958, 14949, 12900, 13514, + 14615, 15101, 14393, 11326, 14671, 15011, 11166, 15167, 15198, 14745, 13075, 14726, 15012, 15320, 14952, 15251, + 8692, 14574, 14805, 13973, 10449, 13937, 14144, 14675, 10440, 15337, 13138, 14544, 15088, 15218, 15346, 14690, + 14454, 14744, 15317, 14778, 14915, 14942, 13830, 15288, 14237, 13099, 13504, 12516, 14775, 13523, 13570, 14821, + 13955, 12251, 15329, 14560, 11675, 15305, 14830, 14473, 12337, 14818, 14404, 13443, 14876, 13857, 10871, 15016, + 14348, 11255, 14980, 14038, 13835, 14487, 14440, 9509, 15004, 11235, 13522, 13627, 14482, 14019, 10659, 14776, + 14669, 14438, 13958, 14695, 15073, 13004, 13469, 10959, 13679, 13413, 14504, 14660, 14983, 14322, 12697, 14634, + 11157, 15262, 13184, 14070, 14294, 14928, 14581, 14592, 14384, 12975, 15314, 13474, 13877, 13756, 14657, 11925, + 14725, 10879, 14426, 14095, 13314, 13728, 14996, 11853, 14118, 15112, 14969, 12360, 15083, 14519, 15218, 14755, + 14013, 14460, 14342, 15143, 14800, 14388, 10658, 13742, 15351, 14182, 11547, 14074, 13026, 13801, 14874, 14577, + 10298, 14690, 14592, 13917, 15093, 13848, 15141, 12502, 15011, 12813, 14734, 14088, 15222, 13015, 13795, 14514, + 13418, 14639, 14623, 14689, 13773, 14156, 15300, 14591, 13250, 14332, 14905, 15354, 13686, 10633, 11730, 14490, + 10995, 15314, 14516, 15251, 14372, 14883, 12828, 14779, 14965, 14958, 15107, 13954, 15189, 14755, 14170, 14143, + 13411, 15079, 15332, 14533, 14477, 12481, 11910, 14374, 13119, 13170, 13908, 13999, 10315, 11690, 14592, 12098, + 14635, 15153, 15033, 14213, 14362, 14363, 15233, 12589, 14695, 15205, 9897, 12297, 12789, 14555, 14551, 15029, + 14876, 14607, 14530, 14681, 15249, 14664, 14074, 14430, 14839, 15230, 13781, 12222, 13710, 14487, 15168, 14748, + 14851, 13996, 14147, 13595, 13173, 13576, 14825, 15011, 13717, 11066, 8014, 14771, 14859, 14256, 12243, 14516, + 13306, 14167, 14488, 14483, 14433, 14540, 10296, 13495, 15021, 15054, 14788, 15322, 11598, 13564, 14472, 14139, + 13907, 14901, 15019, 14818, 15222, 14525, 6472, 13969, 14399, 14337, 14205, 13871, 13848, 12702, 13646, 9531, + 14367, 13147, 15210, 14307, 14375, 12863, 13301, 14488, 14391, 12766, 10000, 13605, 14604, 11090, 14970, 13087, + 13818, 14311, 14461, 14756, 12764, 13897, 14411, 14595, 14016, 15359, 12874, 14143, 9806, 15349, 14591, 15199, + 14603, 12178, 14918, 14408, 10838, 14980, 13930, 13054, 15065, 12418, 13865, 7336, 15131, 13077, 14193, 14901, + 13917, 14388, 15027, 14375, 13635, 12519, 13658, 12588, 11456, 14437, 14373, 14738, 11235, 14269, 15329, 12515, + 13571, 15357, 15086, 14778, 14860, 10133, 13691, 14097, 15173, 14934, 14442, 14951, 14405, 15188, 13948, 15335, + 12292, 10437, 14963, 13320, 12041, 14768, 12774, 14656, 13852, 13624, 11927, 15193, 14651, 15267, 14617, 14867, + 13815, 15084, 14973, 15238, 14606, 14491, 14457, 12787, 15143, 13930, 12877, 13706, 7898, 13954, 14947, 11315, + 14143, 14408, 14397, 15167, 11191, 14279, 13385, 11270, 12510, 14445, 9398, 12842, 14241, 14889, 12265, 15279, + 12729, 14791, 14615, 12734, 13448, 15303, 12395, 11327, 13078, 11758, 15062, 14169, 11721, 14325, 11429, 15326, + 14858, 14912, 14523, 11814, 12967, 14788, 13480, 14427, 13523, 11151, 14598, 13381, 14579, 13875, 12986, 13345, + 15061, 14584, 14954, 12876, 15126, 13938, 15240, 9596, 12696, 15070, 12366, 14768, 13142, 14970, 14938, 13394, + 15024, 13196, 14492, 13531, 14826, 14218, 14183, 13774, 12475, 13375, 13106, 13068, 13406, 13963, 11598, 14553, + 15344, 14388, 14660, 14709, 14981, 13993, 14804, 14524, 15034, 14845, 15232, 13489, 13562, 14442, 14570, 10464, + 14674, 14588, 15128, 11673, 14958, 12747, 13111, 14488, 12986, 14640, 14749, 12370, 14886, 14405, 14835, 13564, + 14804, 14193, 14428, 13621, 15111, 14766, 12161, 15046, 13311, 14535, 14962, 9925, 15058, 13956, 14436, 15067, + 13627, 14915, 13198, 15265, 11041, 14758, 13433, 14663, 15095, 14063, 15242, 14721, 13923, 14661, 14768, 13190, + 13595, 15183, 15025, 14342, 13992, 15134, 13397, 13406, 14617, 12811, 14564, 11967, 12291, 14439, 13390, 15029, + 12462, 14922, 11796, 13895, 11696, 11550, 11153, 14576, 14512, 13791, 15028, 15162, 13886, 14251, 14957, 13106, + 12942, 15118, 13754, 14828, 14786, 13267, 13484, 14093, 14924, 14659, 14269, 14758, 14662, 14473, 13054, 12338, + 14556, 14491, 10344, 15154, 13659, 15176, 13262, 14342, 15033, 14855, 10100, 15157, 14492, 14590, 14693, 15167, + 14479, 13039, 12936, 15105, 15300, 14390, 14721, 15286, 14022, 15269, 15054, 15129, 8785, 15279, 15213, 14579, + 15022, 14974, 13814, 14826, 12961, 13032, 15101, 14650, 14667, 14759, 13584, 14126, 14705, 15312, 13255, 12368, + 14480, 14845, 13778, 15055, 13481, 15199, 14392, 14377, 13732, 12418, 13193, 10893, 14093, 14900, 14875, 14287, + 14651, 13376, 13428, 14856, 13953, 7701, 12967, 13071, 13972, 14590, 13375, 13267, 13319, 15103, 14467, 14676, + 15247, 14354, 12732, 12222, 14445, 12821, 13096, 14216, 10315, 14903, 14611, 14922, 14371, 15129, 4195, 14490, + 14809, 15131, 14407, 14474, 12547, 14612, 15050, 14870, 13957, 15111, 11912, 14078, 13297, 12912, 12705, 14064, + 14470, 12635, 13246, 14890, 14996, 14240, 14258, 11484, 11762, 12959, 14210, 14992, 14752, 14690, 14166, 15245, + 15184, 14944, 14762, 13533, 13247, 13172, 13298, 11818, 14519, 14816, 14297, 15240, 14933, 14609, 14194, 14118, + 14802, 14679, 10268, 14549, 14583, 14106, 13745, 13472, 12738, 15175, 13091, 14910, 13149, 13013, 15188, 14384, + 12978, 13913, 14164, 15012, 15063, 14524, 13910, 14018, 11713, 13615, 15098, 11827, 14319, 14625, 14352, 15154, + 15009, 15109, 13468, 15206, 15331, 11571, 13163, 14590, 13805, 14527, 14903, 14301, 15048, 14788, 15112, 14798, + 15202, 13552, 14436, 6181, 15349, 14674, 13595, 14654, 13776, 14460, 15187, 14121, 10985, 14695, 13565, 15267, + 14529, 14077, 11718, 15025, 15059, 14203, 15296, 14050, 14075, 13587, 14270, 14654, 15077, 13120, 12581, 13168, + 13084, 15065, 14979, 13511, 10628, 14836, 14917, 13534, 13017, 13475, 13106, 14724, 14658, 14112, 14909, 14834, + 11909, 9046, 14861, 14215, 14658, 14982, 15165, 14611, 13693, 13530, 12456, 11465, 15309, 13991, 12447, 14392, + 12829, 14797, 13928, 14497, 14949, 15277, 13876, 13152, 15306, 13578, 14514, 14836, 15038, 14719, 13551, 14985, + 15295, 15152, 14641, 14852, 13345, 14391, 14900, 14750, 13871, 14655, 10900, 14496, 14519, 13317, 12369, 13895, + 14812, 14474, 14702, 14441, 13287, 14544, 13580, 13006, 13714, 14145, 11718, 13914, 14080, 14953, 12856, 13405, + 14878, 11354, 12458, 15245, 6968, 15275, 13654, 15123, 14722, 13091, 15114, 14521, 13682, 14861, 13661, 15033, + 15096, 13724, 14597, 14216, 15168, 15191, 13811, 14961, 13695, 14493, 12913, 10037, 14333, 14541, 14488, 14233, + 13653, 14821, 14620, 12786, 13684, 13172, 15324, 13266, 14558, 15283, 13413, 13585, 11960, 12677, 14803, 14648, + 14935, 8686, 14603, 14495, 15181, 8810, 14953, 14259, 10367, 13338, 13196, 13562, 12674, 14062, 12494, 13778, + 13949, 15155, 10232, 13347, 12586, 13700, 15001, 15053, 14406, 15283, 15113, 13179, 14814, 13603, 14553, 12922, + 15068, 14645, 14343, 12467, 14444, 13263, 13261, 13472, 14442, 14787, 9223, 14937, 13905, 11757, 11957, 9520, + 15269, 14280, 15360, 12646, 15134, 10573, 14929, 13898, 14412, 13546, 14720, 14686, 14884, 13703, 15300, 13830, + 13831, 13560, 12329, 15206, 12724, 13434, 10310, 15258, 13807, 13712, 14567, 13899, 12316, 13908, 10307, 14078, + 13386, 14454, 14416, 14573, 13452, 14279, 15148, 15107, 14965, 11718, 14824, 14493, 14700, 12437, 11859, 13337, + 14766, 12429, 8166, 13597, 14371, 12949, 14903, 12516, 12142, 14405, 14460, 13764, 12754, 9314, 13746, 15191, + 14954, 11976, 11019, 12273, 15022, 15185, 14690, 14833, 12032, 13876, 14432, 13087, 14468, 12550, 15120, 14099, + 14239, 7793, 15011, 13346, 12886, 14524, 13676, 14551, 14047, 15129, 15139, 15281, 14064, 8987, 11608, 14020, + 14171, 13349, 14075, 11284, 14345, 12222, 14771, 11824, 14323, 13036, 14575, 13444, 15130, 14382, 14829, 15230, + 11621, 11970, 14114, 14446, 9328, 9011, 12151, 11692, 14402, 11492, 14995, 13940, 11870, 11060, 15028, 13925, + 15245, 11974, 14414, 12268, 14528, 14474, 12725, 10772, 14032, 14549, 14689, 13250, 13111, 15021, 14252, 14606, + 13899, 11055, 14717, 13935, 13394, 14669, 13397, 12845, 15062, 13751, 13067, 14737, 14785, 14353, 12338, 14374, + 15152, 14006, 14340, 15023, 14302, 13270, 9584, 14651, 9547, 11630, 9113, 14233, 13544, 14740, 13494, 7937, + 15235, 13838, 15139, 12175, 15294, 12139, 13397, 13355, 15305, 15334, 15235, 12423, 14927, 14386, 14231, 15081, + 12208, 12354, 14753, 14930, 13302, 15027, 13815, 15317, 13396, 12925, 14072, 15178, 12934, 15250, 13788, 15094, + 14666, 14891, 15240, 14322, 15292, 14873, 14885, 14194, 14386, 14550, 14282, 13957, 10080, 15262, 12999, 14374, + 13278, 15255, 14814, 12500, 12829, 15351, 14535, 7340, 14355, 13560, 14710, 11705, 14046, 9436, 14335, 15336, + 14746, 13720, 14748, 14429, 15314, 10338, 13590, 12994, 12711, 10352, 15017, 15191, 12328, 14217, 14031, 8978, + 14092, 14502, 14476, 14175, 14816, 11195, 14330, 7635, 12453, 14345, 14294, 13076, 13732, 13844, 12662, 14746, + 13507, 10339, 14488, 14375, 14267, 10468, 13784, 14559, 15122, 12805, 12492, 14108, 14914, 15278, 14483, 14837, + 15111, 14936, 11043, 14225, 12337, 12922, 11299, 14401, 11737, 15336, 13506, 14963, 13920, 13635, 15057, 12443, + 13682, 13696, 14510, 14464, 13664, 12852, 13677, 15101, 9868, 13736, 11041, 14682, 10934, 13995, 13938, 14773, + 14883, 15107, 12898, 10470, 13757, 15329, 13268, 11995, 9404, 14685, 13773, 14005, 15350, 14881, 8440, 11460, + 15186, 11576, 13609, 13521, 15094, 13019, 15053, 14869, 8654, 14177, 13757, 15184, 14796, 15223, 14639, 14236, + 12843, 15216, 12786, 14523, 14433, 15216, 15359, 11906, 13810, 13255, 9798, 14534, 13997, 14603, 14175, 8503, + 14421, 15054, 13935, 13100, 14262, 14025, 13674, 14659, 11623, 14140, 12974, 12839, 14814, 14039, 15089, 14702, + 12372, 14624, 14567, 13429, 15222, 12075, 14833, 14950, 14351, 15219, 14446, 15109, 10346, 14987, 10597, 14511, + 12425, 13412, 12992, 14254, 15285, 14664, 12385, 10345, 14662, 12555, 12195, 14780, 14982, 14338, 13834, 13591, + 13297, 14306, 11696, 12366, 15004, 10601, 15337, 11481, 9434, 14448, 9658, 15302, 12412, 14804, 15083, 13645, + 13970, 10568, 15028, 15032, 14089, 14565, 14208, 11671, 14048, 13703, 13742, 15102, 11890, 15039, 15312, 12096, + 15276, 14587, 14636, 12712, 11922, 14456, 15157, 15243, 14453, 13956, 14931, 14704, 13211, 13950, 14730, 14453, + 11920, 13559, 14499, 15247, 15192, 14549, 14784, 15227, 15195, 14323, 14846, 15217, 14526, 14757, 14421, 14490, + 15006, 15300, 14751, 14670, 14744, 13911, 14540, 13809, 14929, 15171, 14542, 11798, 7608, 15027, 15270, 13910, + 14257, 14269, 15252, 15326, 14025, 15072, 14971, 15075, 13726, 14701, 14320, 14783, 15091, 14131, 15034, 14809, + 13132, 12607, 14273, 8883, 15242, 14912, 14463, 15209, 13899, 14203, 12161, 14588, 14696, 15027, 10214, 9859, + 14866, 14541, 13307, 13942, 14988, 14966, 14055, 11382, 12514, 13357, 15116, 15198, 13887, 14733, 14149, 11043, + 13398, 14415, 13844, 14724, 13491, 12650, 12399, 15318, 13635, 14545, 12761, 15147, 15345, 12575, 12903, 15091, + 15335, 12965, 11071, 14433, 15083, 14579, 14457, 14605, 13615, 14625, 14822, 12612, 13824, 14899, 13272, 12568, + 15000, 15145, 14553, 12196, 13125, 14821, 14490, 10374, 15060, 13522, 13260, 10312, 14880, 15147, 10594, 13063, + 13704, 10865, 12454, 14995, 11604, 14229, 14423, 14656, 12259, 14356, 15196, 13328, 14774, 14539, 15231, 12664, + 15281, 12531, 11382, 14129, 13901, 14870, 6364, 14784, 14948, 13543, 14983, 14922, 14041, 14019, 13343, 12483, + 13112, 14627, 13440, 15098, 10310, 13792, 13283, 14056, 13234, 11589, 15271, 15029, 13713, 14406, 14936, 12531, + 13769, 14380, 14607, 14725, 10571, 15332, 13591, 13719, 13610, 13550, 12995, 10556, 15009, 11221, 14773, 13754, + 14760, 14476, 15161, 14151, 12334, 14001, 13908, 14638, 14556, 14519, 13760, 14484, 15119, 14913, 15295, 15352, + 13475, 13385, 13849, 14591, 13386, 14882, 13477, 14897, 15331, 14323, 11677, 12727, 14820, 15175, 12305, 15089, + 13698, 12942, 14990, 15325, 10272, 14616, 13892, 13245, 12515, 15206, 15335, 13751, 14655, 15293, 14615, 13728, + 14499, 10869, 13858, 10495, 13301, 15217, 10530, 15223, 15072, 15103, 15026, 14369, 12334, 13944, 13113, 11477, + 15030, 11206, 14570, 12650, 15064, 15235, 14196, 14995, 11554, 14940, 15032, 14926, 14429, 9951, 7949, 12352, + 14342, 14110, 14867, 15110, 11879, 13263, 9198, 14179, 11654, 10586, 14363, 14853, 13663, 15132, 15000, 11202, + 15154, 15039, 14131, 15086, 14576, 13333, 14071, 14903, 14101, 14358, 14872, 15310, 14259, 14276, 14883, 12859, + 14952, 9443, 15356, 13387, 10482, 13661, 14759, 14925, 13582, 14954, 13753, 13435, 11618, 15309, 14394, 8557, + 9074, 12794, 12642, 13799, 14958, 14636, 15199, 13112, 14605, 13572, 15275, 10187, 12647, 11477, 15307, 13822, + 12046, 14946, 13217, 14354, 11616, 9404, 15175, 15073, 12639, 13673, 14934, 14401, 15121, 13522, 13544, 13402, + 14860, 14995, 11878, 15048, 14554, 15318, 12198, 15073, 14676, 14714, 15101, 15200, 14091, 9043, 14360, 13676, + 14751, 14134, 14944, 14023, 14794, 15017, 12898, 15021, 14604, 7564, 13562, 12906, 14538, 14880, 14421, 10691, + 13603, 14693, 10750, 15143, 13244, 15227, 15348, 12597, 15274, 9717, 14657, 15279, 13483, 14893, 14897, 15003, + 11537, 14666, 14647, 14346, 14444, 14077, 11350, 15350, 14545, 14139, 11250, 15019, 15153, 10032, 14917, 11804, + 14708, 14989, 13837, 14518, 14136, 11937, 10184, 9024, 14700, 12563, 13622, 13355, 15303, 12896, 11127, 14072, + 12316, 11793, 14591, 14516, 14047, 15058, 15169, 14790, 14400, 14969, 15131, 14651, 11594, 12839, 12531, 11966, + 11701, 12480, 14343, 14706, 14854, 8858, 14391, 12986, 9641, 12041, 15102, 13387, 14800, 13480, 14685, 15007, + 14308, 15113, 12894, 14400, 14623, 14339, 14836, 14229, 14999, 15183, 15186, 14433, 14366, 15326, 12592, 13977, + 11710, 13660, 14669, 13687, 15082, 11505, 15240, 14765, 11556, 14301, 14419, 14551, 11562, 8593, 15233, 14454, + 13911, 13025, 14586, 13830, 13854, 14795, 14585, 13021, 15043, 14382, 14856, 13391, 14498, 10118, 15031, 14459, + 15075, 14645, 14970, 14554, 13204, 15063, 11593, 15236, 11083, 14828, 14402, 14118, 15267, 15130, 14239, 11890, + 14568, 14029, 12816, 14444, 14768, 14568, 13293, 12624, 14104, 15130, 11097, 14722, 14114, 14666, 15016, 14098, + 12946, 14780, 13995, 14353, 14714, 14210, 14839, 14827, 14856, 14383, 15183, 14891, 13669, 12011, 13867, 14407, + 12962, 14497, 11248, 15151, 13330, 14573, 14629, 11126, 14386, 14505, 15235, 13990, 12398, 12737, 15320, 13689, + 12315, 15299, 15353, 14423, 12857, 13913, 11089, 14815, 12375, 14363, 14167, 5843, 14270, 10912, 15334, 14974, + 12828, 15145, 14566, 8487, 14109, 13482, 14511, 13124, 12995, 10760, 9888, 12131, 13029, 14774, 14753, 11621, + 14600, 12294, 14838, 14858, 10505, 8317, 12171, 11843, 13416, 13472, 14476, 15192, 15225, 12419, 12701, 11774, + 15237, 14905, 12767, 13453, 15276, 14624, 14492, 14994, 14927, 12977, 13534, 15148, 14450, 14430, 14847, 12809, + 12665, 14661, 14363, 14810, 13856, 15180, 14863, 10524, 15252, 15338, 12744, 14414, 15071, 14359, 15179, 14080, + 12387, 11443, 12677, 14534, 13383, 15047, 14229, 14922, 15085, 15199, 14564, 14604, 15015, 13123, 14850, 14352, + 15167, 13140, 15029, 13427, 15325, 13804, 14680, 13076, 14730, 14584, 12131, 15079, 13722, 12935, 14042, 12959, + 14544, 14311, 14034, 13325, 14371, 15037, 15245, 13394, 15341, 14846, 15200, 12226, 14481, 15153, 13791, 13544, + 15042, 14003, 10915, 14918, 13744, 14885, 13817, 14614, 14234, 14324, 13018, 7803, 13872, 14394, 14559, 14411, + 12462, 13561, 14965, 15296, 14888, 13830, 14374, 15058, 9350, 12027, 13109, 10866, 11952, 14949, 11986, 12972, + 15284, 15314, 14393, 15080, 13009, 13054, 14489, 14349, 15060, 15346, 13829, 14738, 13001, 14113, 15197, 15266, + 14667, 14892, 13749, 14842, 13484, 15277, 14934, 11430, 14652, 14523, 14142, 14102, 14710, 14684, 14747, 14629, + 14184, 14809, 14888, 14704, 13659, 8931, 12806, 14686, 13643, 13870, 14821, 13970, 14538, 13348, 15275, 15013, + 14449, 13535, 12912, 14026, 14535, 14791, 14636, 14919, 13215, 13064, 14718, 15067, 14735, 15086, 13160, 15166, + 14428, 12842, 14157, 13991, 13033, 14553, 14843, 13633, 10203, 13332, 11350, 14543, 15116, 12414, 14448, 14163, + 14351, 15227, 12477, 10914, 15251, 15165, 12029, 13639, 14742, 14160, 13458, 14071, 12694, 13651, 15170, 13738, + 15348, 14534, 13838, 14816, 13440, 14583, 13870, 14790, 12977, 13485, 14263, 14180, 14141, 13458, 14344, 13829, + 15299, 13468, 12290, 15045, 13479, 13324, 12783, 13423, 15356, 14701, 14819, 15247, 15168, 14803, 15001, 14381, + 14783, 14777, 12232, 14649, 13830, 13881, 14715, 14598, 14228, 13714, 14899, 11606, 14990, 15243, 14447, 14666, + 12810, 12869, 13419, 14902, 14471, 12596, 13506, 14675, 14781, 13091, 11836, 13890, 13575, 12783, 13626, 15249, + 14127, 11685, 14643, 14866, 15209, 14734, 15174, 13694, 14448, 14463, 15002, 11493, 14597, 11518, 14376, 14878, + 14890, 14879, 14877, 14437, 12633, 14694, 14376, 13787, 13935, 14509, 12015, 13911, 10412, 14356, 13635, 14485, + 14184, 13661, 14879, 14425, 13784, 13115, 13423, 12274, 14956, 14562, 13231, 14480, 13787, 14634, 13367, 15023, + 9201, 14314, 13383, 14023, 12862, 14394, 13738, 14260, 14205, 11792, 13408, 13782, 13336, 14364, 13908, 11462, + 12311, 12576, 14649, 15283, 13615, 9514, 12444, 11583, 14368, 15151, 11930, 13709, 15341, 13764, 14136, 14618, + 15125, 13520, 13527, 14573, 13439, 13875, 14558, 13380, 15054, 15324, 13611, 13827, 14384, 14562, 11662, 15282, + 13550, 15102, 14441, 14357, 11794, 15347, 12865, 13426, 15304, 12958, 14337, 13780, 14827, 13552, 13239, 9025, + 15200, 13322, 15160, 11566, 13377, 12064, 14347, 12401, 14781, 13065, 15196, 11400, 13904, 11792, 15069, 10056, + 13236, 12698, 13934, 15055, 13452, 14342, 15335, 11267, 10657, 15243, 13508, 15072, 14229, 13272, 11325, 12482, + 14286, 14268, 13328, 14607, 15347, 11916, 13690, 15311, 14554, 13239, 12894, 12473, 14331, 14166, 14682, 14399, + 14352, 13505, 14586, 12724, 14386, 13715, 13522, 14742, 13900, 15350, 11714, 14587, 14880, 13093, 9847, 11935, + 14444, 15323, 13407, 12836, 14979, 13902, 14786, 15215, 14452, 12792, 15190, 11537, 14209, 14020, 14223, 14529, + 14558, 14348, 13580, 13413, 13897, 12100, 14362, 13759, 13681, 12425, 11316, 12289, 10221, 14624, 14389, 13849, + 9967, 13954, 13475, 13747, 12796, 13801, 14716, 9595, 13863, 13439, 14151, 13455, 14378, 14673, 14973, 15329, + 14572, 14903, 11751, 14717, 12501, 14774, 12440, 13786, 13466, 15105, 15326, 14526, 15247, 11360, 15085, 10444, + 15254, 13785, 15161, 14463, 13963, 12740, 14663, 12602, 12423, 15022, 15287, 15094, 13762, 14245, 15009, 10458, + 15045, 14378, 14851, 15013, 13629, 14707, 14844, 14856, 13130, 11235, 14572, 12745, 15124, 14856, 14663, 15164, + 13801, 13589, 14579, 14579, 15051, 13822, 14773, 14785, 15163, 15296, 14340, 9218, 13800, 14758, 10946, 14283, + 14841, 15077, 12361, 12294, 14902, 15298, 9077, 14738, 11731, 13945, 14503, 15088, 14845, 13310, 12595, 15047, + 12947, 14844, 15143, 14879, 15350, 14155, 12870, 14311, 14533, 14790, 14626, 15056, 14774, 14676, 13998, 15122, + 15065, 14877, 11779, 15221, 12400, 14852, 11268, 12707, 14894, 14394, 15076, 15208, 12305, 12681, 9601, 15061, + 14880, 14023, 14347, 14948, 14509, 14043, 13449, 14501, 12325, 13123, 14068, 14648, 15287, 11917, 14156, 15336, + 14239, 14486, 15340, 15299, 13238, 15250, 14771, 14582, 14494, 15251, 12422, 14387, 14592, 15018, 11295, 13797, + 15183, 14448, 15178, 13466, 14809, 13527, 14844, 13607, 14009, 14424, 14483, 14917, 13862, 12183, 14518, 15325, + 12408, 14507, 14467, 10446, 15239, 14589, 13446, 13323, 15151, 13850, 15212, 15032, 9171, 13263, 12537, 14232, + 14605, 13758, 13465, 15047, 15104, 12808, 13647, 14156, 12925, 15040, 13402, 12307, 13419, 13681, 10824, 15121, + 14977, 15168, 8601, 13726, 13226, 14671, 14683, 15115, 14477, 10156, 14674, 9955, 14593, 14529, 14509, 13841, + 12482, 12245, 14510, 13818, 15030, 12719, 13705, 11588, 14835, 14728, 14647, 12872, 15168, 12440, 14633, 14858, + 14857, 14379, 11762, 15354, 12257, 13862, 13822, 15100, 15222, 14653, 14210, 10061, 12761, 15073, 12603, 14648, + 14744, 14242, 14033, 14450, 14353, 15096, 14537, 14608, 14148, 14565, 15006, 14341, 14892, 14063, 14577, 14813, + 11292, 11658, 13531, 12399, 13550, 14930, 15140, 14805, 14077, 13784, 14530, 14633, 14663, 15030, 14325, 15091, + 11937, 14744, 13428, 15264, 10249, 14423, 12749, 15186, 14784, 13583, 15210, 14675, 14776, 11081, 14560, 12648, + 14706, 11331, 13998, 15216, 14165, 13174, 15121, 14098, 13705, 14000, 12721, 14866, 14976, 14591, 12694, 14668, + 15322, 14951, 11058, 15054, 15016, 14918, 15204, 10408, 13474, 15004, 13927, 15325, 13314, 11958, 11881, 15276, + 13482, 15038, 13442, 14677, 15044, 14393, 13774, 13179, 14610, 15110, 15354, 13700, 15073, 15136, 15345, 10408, + 12420, 14948, 15027, 14375, 15211, 14673, 14670, 13165, 14458, 15241, 12381, 12137, 12674, 13898, 15215, 10465, + 14965, 15088, 11686, 14636, 15160, 13466, 14668, 12900, 14355, 14464, 15243, 14069, 14822, 14443, 12140, 15050, + 12978, 14722, 14933, 13466, 13653, 11384, 14565, 15129, 14702, 12955, 14850, 15208, 14765, 13995, 14573, 14160, + 14939, 15243, 15052, 13989, 12291, 13939, 13758, 14569, 14786, 14795, 15076, 13356, 15289, 14057, 14656, 13669, + 15313, 14382, 15124, 15181, 10955, 11986, 13730, 14500, 12446, 15179, 15261, 13354, 13832, 10504, 13104, 14655, + 14848, 15343, 14766, 14871, 15055, 15010, 14424, 14574, 13750, 15057, 10670, 13682, 14804, 13954, 14137, 15008, + 15123, 14736, 11392, 14873, 8800, 14063, 13741, 14311, 13968, 14548, 13510, 14550, 14411, 13927, 14680, 15149, + 13955, 15104, 15291, 13049, 13594, 13568, 13231, 13574, 13497, 12142, 13559, 9415, 15166, 12744, 14727, 15034, + 15176, 14563, 14762, 15052, 14758, 14347, 13698, 12820, 12855, 14204, 10514, 12219, 14909, 14617, 14107, 14635, + 13256, 14717, 9723, 13506, 15046, 15136, 14174, 13975, 11803, 14339, 14573, 14245, 14581, 14437, 14586, 15302, + 13717, 10431, 14811, 13462, 14590, 14448, 11559, 14707, 14628, 14852, 8620, 14355, 14703, 15311, 14857, 13467, + 12591, 15346, 15062, 13480, 14687, 14993, 13695, 12358, 13082, 15275, 13998, 11465, 15126, 15068, 15337, 11320, + 14557, 15197, 15193, 15236, 13394, 14350, 13599, 14687, 12309, 11507, 10362, 14609, 14984, 15004, 11186, 15279, + 14749, 13735, 10349, 14844, 14336, 14946, 14945, 11370, 14529, 15108, 14784, 14134, 13050, 7602, 15053, 15145, + 8647, 14642, 13312, 13027, 13651, 14852, 10707, 10558, 14802, 13474, 10743, 14551, 15107, 15221, 14947, 13339, + 14672, 14661, 13594, 14467, 15016, 11392, 14937, 15038, 14758, 13761, 14881, 14889, 14908, 15353, 15277, 15047, + 11742, 11502, 14974, 12319, 15041, 13839, 13208, 14426, 13469, 13802, 13378, 14736, 14379, 14844, 13652, 8492, + 12934, 12512, 15282, 12825, 13545, 11065, 15255, 14599, 14231, 14374, 13660, 13331, 15357, 14487, 13831, 13484, + 12885, 15320, 13357, 10661, 15152, 14588, 14871, 14631, 14884, 14918, 15118, 12648, 14384, 15278, 12764, 14358, + 15266, 15152, 13630, 15270, 13812, 13645, 14720, 12818, 14635, 14610, 14459, 11684, 12170, 14855, 14462, 14790, + 15263, 14818, 14902, 14720, 14193, 15071, 13320, 14799, 14696, 13878, 14676, 14757, 15186, 13187, 14954, 14121, + 13137, 10891, 6814, 14841, 12873, 14341, 10999, 14054, 9437, 15338, 14387, 14961, 13363, 11666, 15013, 15256, + 14374, 14429, 14823, 14546, 12956, 14562, 13556, 11866, 12615, 11663, 14999, 14788, 14094, 12805, 14735, 14492, + 15272, 15334, 12348, 13485, 13936, 14347, 14846, 15039, 8456, 13099, 14541, 15216, 15131, 12928, 14759, 14495, + 15169, 12609, 15254, 15219, 14021, 14474, 10317, 12306, 15133, 13066, 13982, 15263, 14898, 12768, 14923, 14379, + 8550, 13661, 11617, 14136, 14434, 14600, 14804, 10307, 14727, 14252, 14125, 14019, 14342, 14658, 8551, 13603, + 14026, 14380, 14628, 13957, 14928, 14828, 14698, 14684, 14499, 15254, 13918, 11677, 15090, 7348, 14570, 15282, + 14688, 14429, 14255, 15203, 15053, 13689, 15150, 11834, 14248, 14581, 13424, 15117, 15226, 13047, 15358, 13101, + 15111, 12389, 13503, 14669, 14978, 10874, 14451, 14905, 11598, 12711, 12457, 12509, 14958, 13835, 15245, 14512, + 14892, 15235, 14730, 14742, 14763, 15183, 13892, 9403, 11706, 13388, 14744, 15101, 14340, 11651, 14816, 14199, + 12049, 12891, 14654, 14070, 14622, 11950, 12602, 11437, 15003, 13708, 10829, 13684, 14612, 12457, 14734, 15169, + 14532, 12516, 14792, 14410, 14896, 11471, 14749, 14648, 13853, 13940, 15230, 14919, 12815, 14692, 14971, 14894, + 12929, 13803, 13820, 14587, 15301, 12894, 15291, 15074, 14528, 12384, 11625, 14932, 13927, 12098, 15208, 14209, + 13731, 14302, 14659, 14104, 15050, 15168, 11603, 14955, 13639, 15192, 13706, 12004, 13914, 14889, 12952, 14965, + 15015, 13926, 13421, 13831, 13302, 15281, 14435, 15200, 15201, 14525, 14871, 13786, 14786, 13845, 12846, 15284, + 10406, 14361, 12091, 14027, 11064, 14024, 14509, 15173, 13439, 15316, 15211, 14691, 14459, 14825, 14985, 14068, + 15283, 15141, 12595, 15156, 14745, 13250, 14731, 13605, 10531, 14766, 14743, 7719, 12572, 15299, 14813, 9544, + 15056, 14492, 14134, 14349, 12485, 14944, 15268, 15262, 13316, 12479, 15015, 15137, 12060, 14087, 8634, 13332, + 14663, 14630, 14844, 14418, 13010, 14714, 13587, 14381, 14663, 15227, 13199, 12794, 10287, 12794, 15070, 15286, + 15197, 13694, 13339, 12925, 14215, 13405, 14042, 14504, 14413, 7297, 11460, 13755, 14665, 15135, 12693, 14710, + 14070, 15018, 11345, 14412, 14190, 14627, 14476, 14706, 11985, 12960, 12476, 14670, 13979, 13823, 14531, 11613, + 15157, 13256, 14182, 13995, 14598, 14129, 14818, 14354, 13562, 15157, 13403, 14884, 15070, 14469, 7817, 15354, + 15176, 14412, 13778, 15287, 11413, 13354, 14939, 14743, 12773, 12697, 15277, 6460, 9430, 14422, 14175, 13708, + 14837, 14184, 14931, 13683, 15059, 9382, 14748, 15337, 15208, 13079, 13169, 11961, 13653, 13172, 15264, 13892, + 12923, 14555, 15042, 15192, 14840, 10852, 13206, 13409, 13928, 15121, 15293, 13286, 11688, 13470, 14279, 13712, + 15067, 15251, 15119, 15000, 12990, 11939, 15068, 14642, 14549, 11424, 14080, 15241, 14436, 11489, 14764, 13608, + 13773, 15016, 13958, 14010, 15262, 10867, 14655, 14268, 14146, 12680, 13554, 14853, 13386, 14456, 6596, 13578, + 14392, 14257, 14068, 15253, 13948, 15345, 12468, 14789, 14797, 14262, 12442, 12137, 14209, 14897, 14706, 14036, + 13864, 14495, 15257, 14249, 12620, 11935, 14429, 14944, 9880, 14308, 14985, 13602, 14062, 14513, 14619, 14619, + 13479, 14148, 15093, 14898, 14478, 10035, 14518, 14568, 11792, 10448, 14066, 14396, 14526, 12444, 14492, 14832, + 15246, 14570, 14847, 13838, 13647, 13619, 13369, 12170, 14764, 14260, 15210, 15333, 13840, 15330, 10603, 12956, + 11879, 13319, 13486, 14861, 12444, 15148, 12260, 13883, 15229, 8657, 14791, 11885, 13974, 13965, 13054, 14467, + 14585, 15169, 13739, 14553, 14592, 11569, 14974, 11447, 14669, 13722, 15323, 15354, 12540, 13733, 13316, 13883, + 13794, 12711, 14538, 13990, 14162, 11920, 15254, 13104, 14314, 14656, 14265, 14870, 14728, 14928, 14601, 11593, + 14499, 14371, 15130, 14401, 14271, 15204, 13466, 10327, 14915, 14369, 14558, 15033, 15181, 13571, 14544, 13838, + 14802, 14993, 14807, 13981, 13440, 14509, 14908, 13366, 14729, 14400, 14069, 15355, 13579, 12734, 12521, 15191, + 12367, 13813, 12499, 13611, 15353, 11898, 13545, 13485, 14855, 14838, 14994, 15008, 15201, 14482, 12599, 14717, + 15129, 12538, 14800, 13579, 13803, 14700, 15052, 15263, 13969, 13624, 15037, 14664, 13679, 14512, 14599, 12866, + 15331, 14997, 10530, 15156, 8540, 11104, 12978, 14420, 11660, 13529, 15105, 15104, 14547, 15152, 14708, 13553, + 14589, 14861, 14715, 13974, 14138, 15085, 14428, 14988, 10718, 8447, 12346, 14911, 10079, 13734, 13769, 11320, + 13571, 15193, 14722, 13554, 14681, 11813, 14879, 14463, 14496, 9413, 14842, 14301, 8373, 12670, 14922, 13374, + 13482, 14736, 14937, 14389, 14784, 13805, 14056, 12294, 14388, 14858, 14255, 11977, 15089, 14767, 14526, 13573, + 14394, 4533, 13305, 15323, 14778, 14439, 15245, 14787, 15065, 14569, 14737, 14393, 13436, 11410, 13328, 14363, + 14457, 14850, 14940, 14737, 13467, 12699, 14791, 14837, 13227, 11964, 12820, 14343, 14652, 12819, 13172, 9403, + 12846, 13029, 15320, 14357, 13517, 14409, 13883, 14633, 14170, 13605, 12654, 12836, 15218, 12825, 13819, 15343, + 14652, 14177, 13399, 14935, 14795, 12223, 8417, 13237, 14723, 12390, 15235, 14766, 13794, 14931, 10934, 13873, + 13441, 15229, 15070, 14470, 14727, 12384, 14086, 14020, 13123, 14508, 14283, 12698, 14695, 13675, 14303, 14781, + 11368, 14818, 14769, 12365, 15021, 13636, 11600, 12941, 10553, 13340, 13623, 14500, 14495, 13686, 14878, 9553, + 15347, 13569, 15077, 13978, 14633, 14366, 14734, 11567, 14819, 15181, 14493, 14934, 11434, 12069, 8124, 13937, + 12779, 13882, 12688, 12607, 11536, 13286, 14842, 12943, 15063, 15235, 12396, 12836, 13729, 13955, 13354, 12206, + 14722, 14725, 14882, 14331, 13062, 12238, 15056, 12383, 14600, 13919, 13549, 14637, 14457, 15206, 14921, 15322, + 13745, 13966, 13834, 12056, 15182, 13361, 13241, 14595, 14569, 14151, 13894, 14471, 14750, 14125, 14543, 14692, + 14396, 14946, 13685, 14406, 15136, 14855, 11687, 11361, 12020, 14204, 14453, 14192, 13953, 13530, 13798, 14836, + 15323, 13791, 6802, 11662, 13729, 13820, 12201, 14394, 13081, 15075, 14552, 15029, 14900, 14171, 15034, 12846, + 14403, 15108, 13742, 12894, 12616, 12140, 15112, 13062, 15275, 15330, 15102, 14811, 14345, 11805, 14823, 14908, + 10843, 11432, 14331, 14534, 14904, 13632, 14901, 14608, 15235, 13339, 12439, 12884, 14743, 14993, 12955, 15352, + 13657, 14445, 15268, 14367, 15062, 14112, 15191, 13039, 13644, 9388, 12825, 14442, 8968, 14774, 14768, 14397, + 14527, 11595, 15315, 14090, 13574, 14398, 13983, 14528, 13407, 15057, 12694, 12308, 13501, 15087, 12814, 14597, + 14543, 13988, 15102, 14528, 15183, 15269, 14748, 14863, 12307, 14477, 13811, 14963, 14411, 14289, 15046, 14143, + 14413, 15102, 13398, 14488, 14613, 14445, 14640, 14237, 13787, 14745, 13997, 14062, 14203, 14745, 14910, 11696, + 11866, 15283, 14687, 14863, 14575, 14416, 13608, 11278, 12354, 14029, 10065, 11497, 15088, 14288, 12731, 14374, + 14673, 13409, 14984, 13964, 15177, 13336, 14968, 15322, 13318, 14185, 14513, 14934, 13765, 11321, 11502, 15102, + 14595, 12407, 13290, 15341, 14324, 14574, 11884, 13976, 14845, 15295, 14812, 12533, 14305, 14504, 15217, 13916, + 12352, 12135, 13514, 14708, 12463, 15311, 14754, 11609, 12191, 12946, 13473, 14256, 13611, 15098, 14096, 14190, + 14620, 15187, 13110, 15256, 14350, 14875, 15298, 13768, 13135, 12571, 12791, 14962, 11454, 13589, 15056, 13450, + 14372, 15339, 14748, 14637, 13541, 13113, 14973, 14365, 14179, 15094, 14328, 13988, 12431, 13506, 13701, 15129, + 13934, 14282, 12588, 12783, 14136, 15139, 13208, 11604, 13987, 14492, 15066, 15160, 13916, 15000, 14933, 14920, + 14477, 15035, 14708, 14807, 14595, 14909, 14392, 13060, 12175, 12319, 15048, 15042, 15239, 14225, 13839, 11822, + 11687, 12978, 12295, 12723, 13447, 13747, 12509, 14791, 13546, 12556, 11463, 13813, 15101, 14552, 13640, 15233, + 15349, 14306, 11914, 14545, 13667, 14339, 15216, 15337, 14929, 15210, 14968, 15163, 14880, 10865, 13472, 14898, + 13762, 14404, 14968, 14097, 12860, 14855, 14594, 12749, 12261, 13587, 11341, 14976, 15032, 13645, 13642, 14593, + 12631, 14859, 15328, 14443, 14883, 13676, 12750, 14914, 12478, 14448, 14479, 14372, 14959, 10572, 15028, 12774, + 11892, 14338, 14022, 13578, 8592, 15212, 12442, 15285, 14068, 14693, 15234, 14674, 13428, 14811, 15075, 14934, + 13379, 13535, 14892, 15235, 10946, 14025, 14654, 10445, 14602, 14812, 14784, 14915, 15156, 13932, 14471, 14347, + 13748, 10517, 14949, 14376, 14763, 14557, 8887, 14254, 11931, 14077, 14348, 15000, 12868, 14066, 14484, 14450, + 13053, 15324, 13417, 10492, 15303, 12556, 15126, 14709, 13139, 4628, 12320, 12889, 14880, 14928, 13619, 15266, + 14172, 14669, 14639, 14227, 6439, 15124, 15348, 14005, 14229, 13329, 14464, 14187, 12725, 10842, 13327, 9268, + 14416, 13911, 14573, 14257, 15345, 14405, 15275, 14777, 14906, 14049, 12833, 14454, 14404, 13085, 14529, 15297, + 9122, 14587, 14701, 14517, 13778, 13626, 15196, 13562, 14997, 15012, 15018, 14337, 10812, 13980, 14757, 9337, + 14190, 14338, 14468, 13074, 13625, 11728, 15133, 15016, 14764, 15272, 12601, 12817, 13679, 10430, 14998, 14273, + 13939, 15074, 7623, 13956, 14840, 14451, 14631, 14809, 12458, 14018, 15296, 15282, 15352, 15270, 13596, 15174, + 14895, 13076, 15360, 15012, 9883, 14560, 12924, 14390, 14770, 13920, 14451, 11888, 13889, 13851, 14590, 9064, + 14580, 14566, 14885, 12834, 14114, 14291, 15214, 14800, 12595, 15039, 14974, 14712, 14393, 14104, 13665, 14082, + 13387, 14621, 14298, 14665, 15209, 13838, 15231, 11768, 14593, 14819, 14719, 12716, 14057, 11861, 15055, 15048, + 14882, 14444, 14867, 15113, 14145, 12243, 12469, 14780, 15092, 15049, 14989, 12968, 14521, 12539, 13691, 15183, + 14436, 14832, 13729, 14075, 15115, 12702, 15163, 10247, 14573, 8219, 14241, 14828, 11904, 13621, 14136, 14733, + 13201, 13622, 12630, 14663, 12188, 14666, 14886, 14129, 12408, 15157, 12347, 11331, 13946, 15241, 14868, 12449, + 15039, 11182, 10966, 11274, 15200, 14894, 13209, 13263, 14569, 14588, 14848, 14553, 13475, 15220, 13456, 14415, + 10837, 14552, 14239, 12808, 13942, 15155, 14857, 15312, 12626, 13018, 10659, 14960, 15140, 9377, 13010, 14596, + 11398, 15308, 15346, 14613, 15005, 13828, 13094, 11437, 13585, 8563, 13709, 11745, 12184, 15000, 13747, 13726, + 15275, 14269, 11740, 13731, 12946, 13482, 14757, 14098, 9730, 14406, 12121, 14426, 12043, 14933, 13788, 15278, + 15245, 14726, 12908, 9269, 14307, 13260, 10118, 9705, 15271, 12439, 15087, 13329, 5618, 11082, 14739, 14017, + 14809, 12637, 14318, 13759, 13028, 15114, 13482, 14547, 14099, 14969, 14941, 15029, 13417, 13061, 14456, 14580, + 14355, 12679, 14383, 14773, 13982, 15356, 14015, 13770, 9833, 15118, 13012, 14753, 13754, 14661, 13381, 14793, + 15359, 13504, 14697, 13818, 14061, 12522, 12457, 14482, 15323, 15124, 15149, 12324, 15095, 14605, 14842, 15231, + 11869, 14826, 13947, 14939, 14396, 12589, 15280, 13844, 14235, 14341, 14270, 15039, 14830, 12090, 15239, 13321, + 10164, 13816, 14003, 13577, 13036, 14429, 15191, 14617, 14347, 15111, 15081, 9540, 14417, 11294, 10946, 13312, + 15226, 13790, 15118, 15076, 14599, 14321, 12659, 11447, 13916, 14712, 14664, 13603, 14349, 14511, 14313, 14997, + 14592, 13480, 15315, 14060, 15145, 11816, 14979, 14689, 13526, 14384, 11446, 13163, 14508, 13747, 15197, 13352, + 13638, 15038, 13408, 14401, 15179, 14720, 10039, 14040, 12626, 15184, 13555, 13497, 12684, 14102, 12329, 15232, + 15138, 15183, 15097, 14793, 11650, 14901, 15302, 9593, 4843, 14832, 13773, 14905, 14265, 14932, 13187, 14729, + 11821, 14511, 15062, 14857, 14069, 15091, 14040, 13796, 13964, 15038, 9525, 13530, 14503, 15220, 15178, 15251, + 14889, 14781, 13466, 14301, 5960, 14708, 15152, 14328, 14462, 15212, 12308, 14697, 14177, 12548, 14432, 13066, + 14319, 14688, 14631, 13712, 12265, 15102, 14613, 15214, 15147, 13771, 15295, 15327, 14799, 14598, 13369, 15096, + 15280, 10941, 15271, 12022, 6008, 13833, 14368, 15107, 13742, 15150, 14422, 13809, 12752, 13892, 14970, 13439, + 11849, 14618, 14338, 15027, 15274, 14262, 15052, 12752, 14465, 14718, 13152, 14862, 15176, 15322, 13321, 13541, + 14801, 13681, 14774, 14451, 14211, 14833, 7702, 13656, 15343, 15113, 12539, 15121, 11621, 15107, 12521, 15199, + 15155, 15059, 13913, 12693, 14426, 13741, 15188, 13108, 13473, 11440, 15244, 13496, 14074, 11542, 14943, 13725, + 14847, 15338, 15192, 13091, 11462, 14068, 12011, 13414, 14725, 12559, 13480, 13204, 14476, 15160, 11130, 14965, + 12413, 13181, 11323, 14102, 13629, 12569, 15295, 14289, 11858, 15188, 14684, 14345, 13631, 10795, 14829, 15104, + 12605, 13779, 13644, 14973, 12487, 15129, 15117, 12943, 13955, 11945, 13586, 15099, 14161, 14857, 11468, 12674, + 14302, 15359, 15193, 14541, 14997, 14959, 12240, 14371, 13906, 14994, 14812, 11908, 14996, 15081, 14687, 14034, + 14236, 15202, 13226, 14394, 14795, 14941, 13213, 14489, 14243, 12731, 14554, 15058, 14413, 14780, 14478, 9564, + 13488, 14087, 12455, 12319, 14372, 12067, 14653, 14929, 11319, 12423, 14730, 11816, 14905, 13869, 14998, 12353, + 15153, 11400, 14878, 14442, 14979, 13117, 14876, 15131, 12066, 14827, 13256, 14434, 15034, 14676, 14565, 14342, + 14586, 14998, 13991, 13945, 14710, 11035, 14278, 8191, 13485, 10994, 13691, 13870, 15041, 12543, 14567, 11883, + 14315, 10675, 14922, 14174, 13970, 14525, 13345, 14299, 13712, 13780, 12453, 13792, 14168, 15249, 13773, 13095, + 14635, 14181, 9979, 15296, 14242, 14822, 15178, 15217, 15240, 15293, 6673, 13100, 12507, 14737, 15297, 12822, + 13609, 13456, 14498, 13737, 14238, 15154, 15162, 12381, 12487, 15005, 14755, 14035, 15127, 13867, 13552, 14708, + 14408, 14632, 13985, 12505, 14428, 12193, 15144, 14814, 14136, 10856, 15323, 13718, 14898, 14420, 14613, 15247, + 14435, 15282, 15328, 12070, 12309, 12353, 14372, 12436, 12509, 14405, 14365, 14104, 11142, 14356, 13802, 14645, + 14729, 11306, 13769, 14032, 14079, 12062, 13511, 14757, 14870, 13899, 14017, 10908, 12775, 15208, 15311, 13155, + 13844, 12694, 13469, 15111, 14731, 15253, 11769, 15321, 13362, 14340, 14530, 11752, 14944, 15003, 10983, 14587, + 13668, 14297, 14753, 14664, 13716, 7524, 14226, 13325, 14005, 13822, 13733, 14814, 14429, 13838, 14762, 14819, + 13404, 15198, 13777, 15028, 12461, 11820, 15322, 15271, 15272, 15261, 14433, 15270, 13475, 14914, 14291, 13913, + 14993, 14827, 13021, 15205, 14103, 15269, 14012, 13998, 14363, 14717, 14073, 14354, 14620, 13949, 8820, 14674, + 14513, 15234, 14817, 15261, 14672, 14204, 14255, 14568, 13714, 14426, 14792, 15227, 15249, 15354, 15110, 13300, + 14499, 13342, 12744, 12545, 14793, 12379, 13956, 12976, 13531, 14448, 14871, 14075, 15342, 14987, 11271, 13757, + 14288, 14529, 14427, 10754, 15270, 14779, 15099, 15015, 14422, 14240, 14318, 14864, 14723, 13796, 15056, 8992, + 14638, 14798, 14805, 13607, 14582, 14729, 15249, 13968, 15141, 14409, 15157, 14910, 11573, 15292, 14468, 14810, + 13946, 12579, 15338, 12081, 11006, 14520, 14689, 15105, 14918, 13899, 14565, 14112, 15098, 13661, 8369, 14241, + 15084, 15092, 14357, 14304, 14795, 14892, 12919, 14117, 11091, 15087, 11757, 14770, 12652, 14913, 14950, 14566, + 14046, 14859, 14424, 14120, 10261, 14716, 13908, 14382, 14222, 15056, 11358, 15135, 14089, 14461, 15161, 14986, + 9750, 14419, 14575, 13452, 15022, 11635, 12514, 14825, 14809, 13786, 14243, 15286, 14582, 14694, 11667, 14578, + 14236, 13541, 13141, 14387, 13456, 12328, 13463, 14978, 13447, 14197, 13404, 14790, 14455, 12464, 14952, 15054, + 14230, 15345, 15068, 12740, 10990, 14415, 11246, 15082, 13807, 14851, 14209, 12491, 14365, 13470, 15068, 15239, + 15334, 14380, 15301, 14213, 14975, 15153, 15298, 13132, 14797, 15074, 14693, 10339, 13275, 14888, 13913, 14988, + 13333, 13978, 13172, 14911, 14182, 14087, 11002, 14496, 13252, 12401, 12953, 10311, 13550, 11147, 12478, 14905, + 13980, 14658, 13389, 10828, 11774, 14244, 14798, 14136, 14985, 15096, 14188, 12666, 15278, 15148, 13105, 14537, + 13381, 15288, 14128, 11950, 14543, 12310, 14681, 12642, 14648, 13457, 14867, 13673, 12935, 12601, 13826, 13648, + 14158, 15008, 13845, 12310, 14296, 11970, 9376, 15169, 14804, 15233, 12148, 13661, 14109, 14848, 12650, 14056, + 13403, 14733, 13948, 14133, 14577, 14913, 15205, 13368, 15332, 14686, 9734, 15197, 14868, 14154, 13910, 14587, + 14360, 15078, 13164, 10991, 12172, 14371, 15054, 13330, 14268, 14366, 10471, 15228, 15056, 10547, 13450, 14932, + 14622, 15102, 6953, 14903, 15197, 14692, 14394, 14832, 12289, 11804, 14921, 13740, 15294, 15327, 13828, 12516, + 14809, 11953, 14881, 14904, 13660, 13445, 14110, 14393, 13912, 14916, 12206, 12822, 11444, 13569, 12719, 14622, + 13975, 14346, 11920, 14665, 14874, 14503, 15204, 10579, 14421, 14755, 11834, 14016, 14365, 14875, 14882, 15270, + 14883, 15077, 14451, 14359, 13630, 12341, 14080, 13145, 14889, 14701, 14153, 14858, 14405, 14520, 14228, 15241, + 14256, 12457, 12594, 14769, 14721, 14057, 10622, 14858, 14216, 14937, 13803, 14433, 11798, 14455, 14588, 14960, + 15203, 14880, 13751, 14239, 14789, 12766, 13402, 13637, 14934, 9540, 12527, 13865, 13938, 13110, 14683, 11623, + 13637, 14468, 14620, 14176, 14983, 14089, 15006, 14285, 14891, 11746, 13412, 14953, 12237, 15166, 15289, 12726, + 13372, 12311, 13028, 14811, 14679, 13462, 15167, 15162, 14272, 14478, 14278, 14904, 15015, 13329, 14730, 14406, + 13785, 14827, 11517, 15169, 8074, 14524, 14702, 13869, 12013, 15083, 14379, 12803, 13686, 15242, 13633, 14502, + 12105, 13801, 14912, 14471, 15054, 10619, 13939, 14862, 14294, 14148, 10916, 15006, 12382, 12645, 14851, 13488, + 15241, 13408, 14340, 14257, 14748, 15141, 14354, 14771, 13501, 14683, 15165, 13634, 14689, 14231, 12290, 14570, + 13515, 13066, 14374, 14658, 14095, 12732, 13869, 14729, 14046, 11412, 14495, 13596, 13340, 9729, 14166, 10964, + 14955, 9924, 13897, 14671, 12765, 14778, 11458, 11524, 13595, 14585, 13531, 13522, 15273, 13808, 10686, 13249, + 10557, 14922, 14445, 10205, 14911, 12784, 13285, 14575, 12407, 14639, 13432, 11651, 11760, 14604, 14612, 10442, + 13692, 11749, 14212, 11993, 15031, 13937, 14710, 14182, 14815, 13010, 14209, 8251, 15173, 12699, 14558, 12306, + 14690, 15297, 14758, 13955, 12726, 10271, 13428, 14030, 13861, 15113, 15262, 14412, 11573, 14319, 14602, 13676, + 13984, 14236, 15026, 15301, 13938, 13962, 15164, 14736, 14917, 12314, 11705, 15116, 11228, 14690, 11166, 15122, + 12329, 14400, 11242, 15308, 14928, 15253, 14765, 14840, 14472, 14873, 12347, 13971, 14127, 14702, 11270, 13435, + 12525, 13642, 15082, 15003, 14699, 12820, 12100, 14302, 14542, 14338, 14005, 10563, 15262, 12763, 15023, 14169, + }; + + uint16_t data[32 * 2 * 16 * 16]{ + 14402, 12289, 13852, 13540, 14810, 11027, 12387, 11288, 14169, 14805, 15159, 13632, 8595, 13403, 9001, 14821, + 15355, 14719, 14958, 10920, 9389, 13408, 12314, 11907, 14759, 14419, 15124, 14591, 13422, 14465, 13347, 14063, + 14776, 14719, 15329, 14763, 13881, 14609, 12510, 11670, 15296, 15309, 14152, 14890, 13556, 12447, 13171, 13672, + 14945, 14355, 12883, 14015, 12895, 14170, 14818, 15274, 13020, 14222, 14246, 12003, 11254, 14901, 13659, 14984, + 14532, 14496, 13605, 14803, 14524, 10024, 14259, 14899, 10336, 14532, 12566, 14884, 10386, 14316, 14828, 14809, + 14952, 14336, 12187, 13557, 15065, 15203, 15318, 12324, 13915, 8520, 13101, 14928, 15148, 12837, 14358, 14794, + 13444, 14120, 13324, 15242, 14846, 14609, 14722, 15019, 13205, 13693, 15019, 13263, 13500, 11845, 12709, 13293, + 12727, 12279, 13721, 14261, 6931, 14579, 15140, 15252, 14686, 13994, 14934, 14897, 14748, 12729, 14560, 14591, + 13643, 15213, 14626, 14254, 14498, 15019, 15162, 14222, 14382, 14808, 15215, 14977, 14343, 14110, 14728, 14801, + 14190, 15069, 14546, 14639, 13854, 13561, 12404, 14199, 12155, 13052, 14615, 8692, 14454, 13955, 14348, 14669, + 14725, 14013, 10298, 13418, 10995, 13411, 14635, 14876, 14851, 13306, 13907, 14367, 13818, 14603, 13917, 13571, + 13815, 14143, 12729, 14858, 15061, 15024, 15344, 14674, 14804, 13627, 13595, 12462, 12942, 14556, 14479, 15022, + 14651, 15247, 14809, 14470, 15184, 14802, 12978, 15009, 15202, 14529, 13084, 11909, 12829, 15295, 14812, 14878, + 13653, 14935, 13949, 15068, 15269, 13831, 13386, 14766, 14954, 14239, 14171, 11621, 15245, 13899, 15152, 15235, + 14666, 13278, 14746, 14092, 13507, 15111, 13682, 14883, 15186, 12843, 14421, 12372, 12425, 13297, 13970, 15276, + 15006, 14257, 13132, 14866, 13398, 15335, 15000, 13704, 15281, 13112, 13769, 14760, 13475, 13698, 14499, 15030, + 15154, 14952, 9074, 12046, 14860, 14751, 13603, 11537, 14708, 12316, 11701, 14308, 11710, 13911, 15075, 14568, + 12962, 12315, 12828, 14600, 15237, 12665, 12387, 15167, 14544, 15042, 12462, 15284, 14667, 14184, 14449, 14428, + 15348, 15299, 14783, 12810, 14127, 14890, 14184, 9201, 12311, 15125, 13550, 15200, 13236, 14286, 14352, 14444, + 9967, 14572, 15254, 15045, 13801, 14841, 12947, 15065, 14880, 14239, 15183, 12408, 14605, 14977, 12482, 14857, + 11292, 11937, 14706, 15322, 13482, 12420, 14965, 12978, 14939, 15313, 14848, 15123, 13955, 15176, 13256, 13717, + 14557, 14749, 8647, 14672, 11742, 12934, 12885, 15266, 15263, 13137, 14374, 15272, 15169, 8550, 14026, 14688, + 14892, 12049, 14532, 12929, 13731, 15015, 10406, 15283, 15056, 14663, 15197, 14070, 15157, 15176, 14837, 12923, + 13773, 14392, 13864, 13479, 15246, 11879, 14585, 13794, 14499, 14802, 12367, 15129, 15331, 14589, 13571, 13482, + 14457, 12846, 14652, 13441, 11368, 15347, 12779, 14722, 13745, 14396, 15323, 14403, 10843, 13657, 14527, 14543, + 11866, 14673, 14595, 12352, 14620, 14372, 13934, 14477, 11687, 15349, 13762, 12631, 11892, 13379, 13748, 13053, + 14416, 9122, 14190, 13939, 14895, 14580, 13387, 14882, 14436, 13201, 15039, 10837, 11398, 15275, 15245, 14809, + 15359, 11869, 10164, 15226, 14592, 13638, 15138, 11821, 14889, 14319, 15280, 11849, 14801, 15155, 14847, 12413, + 14302, 14236, 13488, 15153, 14586, 14315, 14635, 13609, 14408, 14435, 14729, 13844, 13668, 13404, 14993, 14513, + 14288, 14638, 13946, 15084, 14046, 9750, 14236, 14230, 15334, 13333, 13980, 13381, 14158, 13403, 14360, 14622, + 13975, 14883, 14256, 15203, 13637, 13372, 13785, 12105, 15241, 13515, 14955, 10557, 13692, 14690, 13984, 12329, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14379, 12461, 14410, 12429, 12870, 15231, 14480, 10750, 15177, 13884, 14158, 13253, 13506, 12498, 13153, 14708, + 12676, 9604, 15172, 13863, 15312, 14436, 13490, 15083, 13908, 12270, 12599, 15223, 12463, 14561, 14493, 12352, + 14537, 11611, 12643, 14922, 14035, 11970, 14910, 14490, 14761, 15061, 13666, 14130, 15296, 13639, 15249, 8727, + 10349, 14741, 15080, 14391, 14718, 14301, 14930, 14129, 14326, 13851, 15293, 14246, 12144, 14809, 12441, 13698, + 15026, 10637, 14544, 14678, 12307, 13911, 15175, 15000, 13365, 15001, 14693, 14497, 14887, 12578, 14367, 14256, + 13042, 13771, 13831, 13797, 15268, 15168, 13592, 15333, 13043, 15348, 14720, 14460, 14043, 14486, 10748, 14262, + 13232, 14590, 12376, 14716, 14813, 12310, 13740, 14201, 13379, 13640, 13591, 13190, 14543, 10802, 15231, 11537, + 10729, 13332, 11635, 13524, 11244, 12494, 13989, 15284, 13862, 14603, 13622, 12856, 15165, 13434, 14572, 14424, + 14869, 12322, 14700, 15336, 13617, 10878, 11790, 14230, 13544, 15139, 12874, 14437, 15094, 10034, 13460, 12359, + 13440, 13762, 14905, 12550, 14582, 12519, 14664, 14553, 13654, 15352, 15101, 14574, 14744, 12251, 11255, 14438, + 10879, 14460, 14690, 14639, 15314, 15079, 15153, 14607, 13996, 14167, 14901, 13147, 14311, 12178, 14388, 15357, + 15084, 14408, 14791, 14912, 14584, 13196, 14388, 14588, 14193, 14915, 15183, 14922, 15118, 14491, 13039, 14974, + 13376, 14354, 15131, 12635, 14944, 14679, 13913, 15109, 13552, 14077, 15065, 9046, 14797, 15152, 14474, 11354, + 14821, 8686, 15155, 14645, 14280, 13560, 14454, 12429, 11976, 7793, 13349, 11970, 11974, 11055, 14006, 13838, + 14891, 15255, 13720, 14502, 10339, 14936, 13696, 15107, 11576, 15216, 15054, 14624, 13412, 14306, 10568, 14587, + 15300, 14269, 12607, 14541, 14415, 12965, 15145, 10865, 12531, 14627, 14380, 14476, 13385, 12942, 10869, 11206, + 15039, 9443, 12794, 14946, 14995, 14134, 14693, 14666, 14989, 11793, 12480, 15113, 13660, 13025, 14645, 14029, + 14497, 15299, 15145, 12294, 14905, 14661, 11443, 13140, 14311, 14003, 13561, 15314, 14892, 14809, 13535, 12842, + 14534, 13468, 14777, 12869, 11685, 14879, 13661, 14314, 12576, 13520, 15102, 13322, 12698, 14268, 13505, 15323, + 13954, 14903, 13785, 14378, 13589, 15077, 14844, 14877, 14023, 14486, 14448, 14507, 13758, 15168, 12245, 14379, + 11658, 14744, 11331, 14951, 15038, 14948, 15088, 14722, 15243, 14382, 15343, 14736, 15104, 14563, 14717, 10431, + 15197, 13735, 14642, 14661, 11502, 12512, 15320, 15152, 14818, 10891, 14429, 15334, 12609, 13661, 14380, 14429, + 15235, 12891, 12516, 13803, 14302, 13926, 14361, 15141, 14492, 14630, 13694, 15018, 13256, 14412, 14184, 14555, + 15016, 14257, 14495, 14148, 14570, 13319, 15169, 12711, 14371, 14993, 13813, 12538, 14997, 14861, 15193, 14736, + 14850, 13029, 14177, 15229, 14818, 13569, 13882, 14725, 13966, 14946, 13791, 15108, 11432, 14445, 11595, 13988, + 15283, 13409, 12407, 12135, 15187, 15339, 14282, 15035, 12978, 14306, 14404, 14859, 14338, 13535, 10517, 15324, + 13911, 14587, 14338, 15074, 13076, 14566, 14621, 14444, 14832, 13622, 11182, 14552, 15308, 14269, 14726, 12637, + 13504, 14826, 13816, 13790, 13480, 15038, 15183, 14511, 14781, 14688, 10941, 14618, 13681, 15059, 15338, 13181, + 15359, 15202, 14087, 11400, 14998, 10675, 14181, 13456, 14632, 15282, 11306, 12694, 14297, 15198, 14827, 15234, + 14529, 14798, 12579, 15092, 14859, 14419, 13541, 15345, 14380, 13978, 14658, 15288, 15008, 14733, 15078, 15102, + 14346, 15077, 12457, 14880, 14468, 12311, 14827, 13801, 13408, 13066, 9924, 14922, 11749, 15297, 14236, 14400, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14633, 12096, 14115, 13971, 11429, 14833, 9432, 10395, 11319, 14428, 14843, 14232, 12548, 7919, 15088, 14305, + 14813, 14686, 14342, 13174, 14400, 14498, 12616, 9064, 12837, 13379, 15281, 14225, 13027, 11927, 14870, 14120, + 14831, 15038, 13931, 13712, 14883, 14616, 14223, 13614, 13670, 14720, 13492, 13209, 11716, 14816, 12861, 14087, + 13979, 14569, 15068, 14409, 14972, 13483, 11160, 12313, 13228, 14602, 14673, 11905, 15025, 10021, 15117, 15153, + 13462, 15339, 14120, 10311, 15128, 11470, 12619, 13442, 13318, 14462, 14980, 13346, 10339, 14495, 13259, 14816, + 12911, 13893, 14669, 15124, 14457, 14563, 13906, 15166, 11106, 14892, 14597, 15320, 12485, 14134, 13921, 12826, + 14758, 14909, 14365, 12603, 13223, 13127, 15191, 14982, 12878, 14189, 14970, 15115, 12713, 13664, 14020, 14562, + 15122, 15343, 11395, 11082, 14740, 12097, 15132, 14941, 14863, 14428, 12155, 9555, 13747, 15117, 9091, 11442, + 14680, 14657, 15288, 14832, 15327, 14530, 14762, 11369, 12046, 14239, 14444, 14427, 13630, 14440, 14185, 15358, + 14596, 13507, 12182, 15128, 15258, 13328, 13382, 14445, 15311, 14266, 14393, 14805, 15317, 15329, 14980, 13958, + 14426, 14342, 14592, 14623, 14516, 15332, 15033, 14530, 14147, 14488, 15019, 15210, 14461, 14918, 15027, 15086, + 14973, 14397, 14615, 14523, 14954, 14492, 14660, 15128, 14428, 13198, 15025, 11796, 13754, 10344, 12936, 13814, + 13428, 12732, 14407, 13246, 14762, 10268, 14164, 13468, 14436, 11718, 14979, 14861, 13928, 14641, 14702, 12458, + 14620, 14603, 10232, 14343, 15360, 12329, 14416, 8166, 11019, 15011, 14075, 14114, 14414, 14717, 14340, 15139, + 15240, 14814, 14748, 14476, 14488, 11043, 14510, 12898, 13609, 12786, 13935, 14567, 12992, 11696, 15028, 14636, + 14751, 15252, 14273, 13307, 13844, 11071, 14553, 12454, 11382, 13440, 14607, 15161, 13849, 14990, 13858, 14570, + 14131, 15356, 12642, 13217, 11878, 14944, 10750, 14647, 13837, 14591, 14343, 12894, 14669, 14586, 14970, 12816, + 11248, 15353, 14566, 14838, 12767, 14363, 12677, 15029, 14034, 10915, 14965, 14393, 13749, 14888, 12912, 14157, + 13838, 12290, 12232, 13419, 14643, 14877, 14879, 13383, 14649, 13527, 14441, 15160, 13934, 13328, 14586, 13407, + 13475, 11751, 15161, 14851, 14579, 12361, 15143, 11779, 14347, 15340, 15178, 14467, 13465, 8601, 14510, 11762, + 13531, 13428, 13998, 11058, 13442, 15027, 11686, 14933, 15052, 15124, 14766, 11392, 15291, 14762, 9723, 14811, + 15193, 10349, 13312, 13594, 14974, 15282, 13357, 13630, 14902, 6814, 14823, 12348, 15254, 11617, 14628, 14255, + 14730, 14654, 14792, 13820, 14659, 13421, 12091, 12595, 14134, 14844, 13339, 11345, 14182, 13778, 14931, 15042, + 13958, 14068, 15257, 15093, 14847, 13486, 13739, 14538, 15130, 14807, 12499, 14800, 10530, 14715, 14722, 14937, + 14940, 15320, 13399, 15070, 14769, 15077, 12688, 14882, 13834, 13685, 6802, 13742, 14331, 15268, 15315, 15102, + 14687, 14984, 13290, 13514, 13110, 14748, 12588, 14708, 12295, 11914, 14968, 15328, 14022, 14892, 14949, 13417, + 14573, 14701, 14468, 7623, 15360, 14885, 14298, 14867, 13729, 12630, 10966, 14239, 15346, 11740, 12908, 14318, + 14697, 13947, 14003, 15118, 15315, 13408, 15097, 15062, 13466, 14631, 15271, 14338, 14774, 13913, 15192, 11323, + 15193, 13226, 12455, 14878, 13991, 14922, 9979, 14498, 13985, 15328, 13769, 13469, 14753, 13777, 13021, 14817, + 14427, 14805, 15338, 14357, 14424, 14575, 13141, 15068, 15301, 13172, 13389, 14128, 13845, 13948, 13164, 6953, + 11920, 14451, 12594, 13751, 14620, 13028, 11517, 14912, 14340, 14374, 13897, 14445, 14212, 14758, 15026, 11242, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14774, 10696, 11497, 14888, 14307, 10577, 14724, 14163, 14063, 14944, 14811, 11742, 13451, 15307, 14621, 14696, + 11644, 13284, 14089, 12359, 14499, 13580, 12784, 15080, 12547, 15254, 13663, 13678, 13441, 14072, 13255, 15329, + 14373, 14519, 15027, 13187, 14387, 11512, 14306, 13573, 14483, 14117, 12714, 14456, 15265, 14567, 14418, 10373, + 15274, 14673, 14411, 12520, 14462, 12592, 14885, 13104, 11688, 14095, 8674, 14517, 12548, 14583, 15064, 13103, + 13427, 13123, 10293, 13432, 14966, 12557, 14568, 12131, 13572, 12341, 13878, 13997, 14997, 13166, 14640, 14879, + 12277, 10751, 14927, 13409, 14429, 14919, 15264, 14671, 14425, 14677, 13876, 13591, 13313, 11291, 14313, 13999, + 14107, 13828, 15272, 14484, 14103, 13717, 14366, 13275, 14444, 14996, 12885, 13558, 14974, 14438, 12789, 14954, + 13587, 11654, 13805, 15267, 12551, 15187, 12325, 12852, 14838, 14401, 15213, 15251, 14928, 14501, 15157, 10234, + 14818, 12091, 14329, 10152, 14803, 15328, 15142, 10653, 12672, 12911, 14842, 14444, 8237, 14515, 12908, 14428, + 14433, 15114, 14777, 14666, 13371, 14986, 11184, 14102, 13957, 15101, 11326, 13973, 14778, 14560, 14038, 14695, + 14095, 15143, 13917, 14689, 15251, 14533, 14213, 14681, 13595, 14483, 14818, 14307, 14756, 14408, 14375, 14778, + 15238, 15167, 12734, 11814, 12876, 13531, 14709, 11673, 13621, 15265, 14342, 13895, 14828, 15154, 15105, 14826, + 14856, 12222, 14474, 14890, 13533, 14549, 15012, 15206, 6181, 15025, 13511, 14215, 14497, 14852, 14441, 15245, + 12786, 14495, 13347, 12467, 12646, 15206, 14573, 13597, 12273, 13346, 11284, 14446, 12268, 13935, 15023, 12175, + 14322, 12500, 14429, 14175, 14375, 14225, 14464, 10470, 13521, 14523, 13100, 13429, 14254, 12366, 15032, 12712, + 14670, 15326, 8883, 13942, 14724, 14433, 12196, 14995, 14129, 15098, 14725, 14151, 14591, 15325, 10495, 12650, + 15086, 13387, 13799, 14354, 15048, 14023, 15143, 14346, 14518, 14516, 14706, 14400, 13687, 13830, 14554, 14444, + 15151, 14423, 8487, 14858, 13453, 14810, 14534, 13427, 13325, 14918, 15296, 15080, 14842, 14704, 14026, 13991, + 14816, 15045, 14649, 14902, 14866, 14437, 14425, 14023, 15283, 14573, 14357, 11566, 15055, 14607, 12724, 12836, + 13747, 14717, 14463, 15013, 14579, 12294, 14879, 15221, 14948, 15299, 13466, 10446, 15047, 13726, 13818, 15354, + 12399, 15264, 15216, 15054, 14677, 14375, 14636, 13466, 13989, 15181, 14871, 14873, 13049, 15052, 13506, 13462, + 15236, 14844, 13027, 14467, 12319, 12825, 10661, 15270, 14720, 14841, 14546, 13485, 15219, 14136, 13957, 15203, + 14742, 14070, 14410, 14587, 14104, 13831, 14027, 15156, 14349, 14418, 12925, 14412, 13995, 15287, 13683, 15192, + 14010, 15253, 14249, 14898, 13838, 14861, 14553, 13990, 14401, 13981, 13611, 13579, 15156, 13974, 13554, 14389, + 14737, 14357, 14935, 14470, 12365, 13978, 12607, 14331, 12056, 14406, 11662, 12894, 14534, 14367, 14090, 14528, + 14863, 13964, 15341, 14708, 15256, 14637, 12783, 14807, 12723, 14545, 14097, 14443, 13578, 15235, 14376, 10492, + 14257, 14517, 13074, 13956, 15012, 12834, 14665, 15113, 14075, 14663, 11274, 12808, 14613, 13731, 9269, 13759, + 13818, 14939, 13577, 15076, 14060, 14401, 14793, 14857, 14301, 13712, 12022, 15027, 14451, 12693, 13091, 14102, + 14541, 14394, 12319, 14442, 13945, 14174, 15296, 13737, 12505, 12070, 14032, 15111, 14664, 15028, 15205, 15261, + 10754, 13607, 12081, 14304, 14120, 13452, 14387, 12740, 14213, 14911, 10828, 11950, 12310, 14133, 10991, 14903, + 14665, 14359, 14769, 14239, 14176, 14811, 15169, 14471, 14257, 14658, 14671, 10205, 11993, 13955, 15301, 15308, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13826, 14558, 14819, 12988, 14957, 14400, 13686, 14618, 13328, 14431, 13607, 14661, 14936, 14149, 12825, 13775, + 15105, 14833, 14258, 13959, 14433, 14944, 14866, 13562, 12613, 14804, 13340, 12922, 13934, 15105, 14797, 15133, + 15333, 12573, 12382, 15063, 14096, 14016, 13757, 14299, 12979, 14479, 14525, 14495, 14297, 15025, 14322, 13354, + 15353, 15071, 14392, 13362, 15358, 14349, 15337, 15269, 14668, 15329, 12969, 12346, 14223, 13431, 14558, 13445, + 15137, 10664, 12592, 14963, 14986, 12031, 14361, 15034, 15017, 14861, 14407, 15341, 13711, 14400, 15313, 14709, + 8940, 15160, 10954, 13437, 13989, 14458, 14346, 14929, 10392, 14398, 15291, 12367, 14418, 12551, 14450, 14488, + 14916, 14416, 12284, 13667, 13726, 14808, 14886, 12011, 14410, 14884, 14709, 15335, 12206, 14610, 14247, 14466, + 15182, 14639, 13531, 14698, 13701, 13845, 11439, 14604, 15190, 9397, 13368, 14343, 14534, 14046, 15277, 14000, + 14977, 10428, 14737, 14760, 14594, 13517, 14970, 15112, 13351, 11824, 15183, 13677, 15109, 12718, 9219, 15136, + 11343, 9799, 12893, 14379, 15119, 14656, 14713, 14743, 13445, 15284, 14671, 10449, 14915, 11675, 13835, 15073, + 13314, 14800, 15093, 13773, 14372, 14477, 14362, 15249, 13173, 14433, 15222, 14375, 12764, 10838, 13635, 14860, + 14606, 11191, 13448, 12967, 15126, 14826, 14981, 14958, 15111, 11041, 13992, 11696, 14786, 13659, 15300, 12961, + 13953, 14445, 12547, 14996, 13247, 14583, 15063, 15331, 15349, 15059, 10628, 14658, 14949, 13345, 13287, 6968, + 13684, 15181, 12586, 14444, 15134, 12724, 13452, 14371, 15022, 12886, 14345, 9328, 14528, 13394, 14302, 15294, + 15292, 12829, 15314, 14816, 14267, 12337, 13664, 13757, 15094, 14433, 14262, 15222, 15285, 15004, 14089, 11922, + 14744, 14025, 15242, 14988, 13491, 15083, 13125, 11604, 13901, 10310, 10571, 12334, 13386, 10272, 13301, 15064, + 14576, 10482, 14958, 11616, 14554, 14794, 13244, 14444, 14136, 14047, 14854, 14623, 15082, 13854, 13204, 14768, + 13330, 12857, 14109, 10505, 15276, 13856, 13383, 15325, 14371, 13744, 14888, 13009, 13484, 13659, 14535, 13033, + 13440, 13479, 13830, 14471, 15209, 12633, 13784, 12862, 13615, 13439, 11794, 13377, 13452, 15347, 14386, 14979, + 12796, 12501, 13963, 13629, 15051, 14902, 15350, 12400, 14509, 13238, 14809, 15239, 15104, 13226, 15030, 12257, + 13550, 10249, 14165, 15016, 15044, 15211, 15160, 13653, 12291, 10955, 15055, 8800, 13594, 14758, 15046, 14590, + 13394, 14336, 13651, 15016, 15041, 13545, 15152, 13812, 14193, 12873, 12956, 13936, 14021, 14434, 14928, 15053, + 14763, 14622, 14896, 15301, 15050, 13302, 11064, 14745, 12485, 13010, 14215, 14190, 14598, 11413, 15059, 14840, + 15262, 13948, 12620, 14478, 13647, 12444, 14592, 14162, 14271, 13440, 15353, 13803, 8540, 14138, 14681, 14784, + 13467, 13517, 14795, 14727, 15021, 14633, 11536, 13062, 15182, 15136, 13729, 12616, 14904, 15062, 13574, 15183, + 14575, 15177, 14324, 12463, 14350, 13541, 14136, 14595, 13447, 13667, 12860, 14883, 8592, 10946, 14763, 15303, + 15345, 13778, 13625, 14840, 9883, 14114, 15209, 14145, 15115, 12188, 15200, 13942, 15005, 12946, 14307, 13028, + 14061, 14396, 13036, 14599, 15145, 15179, 11650, 14069, 5960, 12265, 6008, 15274, 14211, 14426, 11462, 13629, + 14997, 14795, 14372, 14979, 14710, 13970, 14242, 14238, 14428, 12309, 14079, 14731, 13716, 12461, 14103, 14672, + 15270, 14582, 11006, 14795, 10261, 15022, 13456, 10990, 14975, 14182, 11774, 14543, 14296, 14577, 12172, 15197, + 14874, 13630, 14721, 14789, 14983, 14679, 8074, 15054, 14748, 14095, 12765, 14911, 15031, 12726, 13938, 14928, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14869, 14464, 13048, 15321, 14165, 14471, 15178, 14458, 14065, 13189, 15082, 11794, 13946, 15057, 14106, 12556, + 12800, 14904, 13405, 15042, 14837, 14805, 15288, 14200, 14559, 14015, 14732, 13770, 13548, 9506, 13812, 14514, + 14774, 13596, 15240, 14154, 14763, 14776, 11326, 15072, 12365, 14618, 12291, 13529, 14645, 14881, 14303, 12724, + 14721, 15261, 13305, 9595, 13349, 15174, 14179, 15000, 14621, 13367, 14865, 14095, 12241, 14631, 13502, 14964, + 14076, 12994, 12775, 14553, 14754, 12972, 14178, 14496, 15292, 13452, 15024, 14505, 14176, 14770, 12292, 14789, + 14490, 14727, 14735, 14427, 15345, 14713, 14383, 13684, 14334, 12964, 12167, 12637, 15330, 14924, 15036, 14548, + 14465, 11265, 9254, 14577, 14937, 15135, 14963, 10080, 9216, 12579, 12549, 14339, 14805, 13225, 15103, 15231, + 12600, 14236, 12448, 14538, 14025, 14585, 14354, 14734, 12476, 15154, 14948, 14140, 15315, 9922, 15067, 14953, + 13522, 15237, 14907, 13558, 11370, 14899, 14456, 13350, 11326, 14597, 14251, 13013, 14686, 14832, 14892, 14019, + 11530, 14562, 12312, 14723, 13665, 14479, 14093, 14238, 14112, 9318, 15011, 13937, 14942, 15305, 14487, 13004, + 13728, 14388, 13848, 14156, 14883, 12481, 14363, 14664, 13576, 14540, 14525, 12863, 13897, 14980, 12519, 10133, + 14491, 14279, 15303, 14788, 13938, 14218, 13993, 12747, 14766, 14758, 15134, 11550, 13267, 15176, 14390, 13032, + 7701, 12821, 14612, 14240, 13172, 14106, 14524, 11571, 14674, 14203, 14836, 14982, 15277, 14391, 14544, 15275, + 13172, 8810, 13700, 13263, 10573, 13434, 14279, 12949, 15185, 14524, 12222, 9011, 14474, 14669, 13270, 12139, + 14873, 15351, 10338, 11195, 10468, 12922, 12852, 15329, 13019, 15216, 14025, 12075, 14664, 10601, 14565, 14456, + 13911, 15072, 14912, 14966, 12650, 14579, 14821, 14229, 14870, 13792, 15332, 14001, 14882, 14616, 15217, 15235, + 13333, 13661, 14636, 9404, 15318, 15017, 15227, 14077, 11937, 15058, 8858, 14339, 11505, 14795, 15063, 14568, + 14573, 13913, 13482, 8317, 14624, 15180, 15047, 13804, 15037, 14885, 13830, 13054, 15277, 8931, 14791, 14553, + 14583, 13324, 13881, 12596, 14734, 14694, 13115, 14394, 9514, 13875, 15347, 12064, 14342, 11916, 13715, 13902, + 13801, 14774, 12740, 14707, 13822, 15298, 14155, 14852, 14043, 15250, 13527, 14589, 12808, 14671, 12719, 13862, + 14930, 14423, 13174, 14918, 14393, 14673, 13466, 11384, 13939, 11986, 15010, 14063, 13568, 14347, 15136, 14448, + 14350, 14946, 14852, 11392, 13839, 11065, 14588, 13645, 15071, 14341, 14562, 14347, 14474, 14600, 14828, 13689, + 15183, 11950, 11471, 12894, 15168, 15281, 14024, 13250, 14944, 14714, 13405, 14627, 14129, 13354, 9382, 10852, + 10867, 15345, 11935, 10035, 13619, 15148, 11569, 11920, 15204, 14509, 11898, 14700, 11104, 15085, 11813, 13805, + 12699, 14409, 12223, 12384, 13636, 14366, 13286, 12238, 13361, 14855, 13820, 12140, 13632, 14112, 14398, 15269, + 14416, 13336, 14574, 15311, 14875, 13113, 15139, 14909, 13747, 14339, 14855, 13676, 15212, 14025, 14557, 12556, + 14405, 13626, 11728, 14451, 14560, 14291, 13838, 12243, 12702, 14666, 14894, 15155, 13828, 13482, 13260, 15114, + 12522, 12589, 14429, 14321, 11816, 14720, 14901, 15091, 14708, 15102, 13833, 14262, 14833, 13741, 14068, 12569, + 14959, 14941, 12067, 13117, 11035, 14525, 14822, 15154, 12193, 12353, 12062, 15253, 7524, 11820, 15269, 14204, + 14779, 14729, 14520, 14892, 14716, 11635, 12328, 14415, 15153, 14087, 14244, 12310, 11970, 14913, 14371, 14692, + 14503, 12341, 14057, 12766, 14089, 13462, 14524, 10619, 15141, 12732, 14778, 12784, 13937, 10271, 13962, 15253, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13703, 14389, 15357, 14609, 13507, 12798, 7127, 11317, 12473, 10181, 13934, 14768, 9166, 14686, 14612, 14553, + 14405, 15088, 14868, 14786, 13899, 14672, 13778, 14107, 14426, 11742, 15192, 13051, 15294, 15066, 12955, 14408, + 12938, 13429, 14812, 13719, 14230, 13673, 15331, 15249, 12350, 12389, 13700, 15096, 11518, 12991, 13629, 13212, + 13903, 13330, 14984, 13370, 14435, 14623, 14816, 13469, 14498, 14406, 8639, 12049, 14655, 11804, 15118, 14408, + 14352, 12358, 15207, 9121, 11664, 9385, 13938, 14900, 13038, 11285, 11678, 13124, 14488, 14949, 14965, 15304, + 13899, 12719, 15145, 15224, 12562, 15133, 13413, 15089, 14601, 14925, 14932, 13333, 12989, 14941, 11314, 14486, + 13839, 12928, 14683, 13609, 13802, 13812, 9200, 14860, 15096, 12380, 14703, 14657, 12983, 14806, 12310, 14985, + 12573, 14206, 13846, 15246, 14010, 14226, 13598, 13837, 14150, 15255, 15338, 14668, 13606, 13786, 14572, 11525, + 14858, 15018, 14807, 15229, 15316, 13853, 15130, 14369, 13994, 15006, 14406, 15073, 14783, 14370, 15032, 14014, + 4925, 14985, 14101, 13339, 15075, 13804, 14547, 14464, 14298, 13471, 11166, 14144, 13830, 14830, 14440, 13469, + 14996, 10658, 15141, 15300, 12828, 11910, 15233, 14074, 14825, 10296, 6472, 13301, 14411, 13930, 13658, 13691, + 14457, 13385, 12395, 13480, 15240, 14183, 14804, 13111, 12161, 13433, 13397, 11153, 13484, 13262, 14721, 15101, + 12967, 13096, 15050, 14258, 13298, 13745, 13910, 13163, 13595, 15296, 14917, 15165, 13876, 14900, 13580, 13654, + 15324, 14953, 15001, 13261, 14929, 10310, 15148, 14903, 14690, 13676, 14771, 12151, 12725, 13397, 9584, 13397, + 14885, 14535, 13590, 14330, 13784, 11299, 13677, 13268, 15053, 15359, 13674, 14833, 12385, 15337, 14208, 15157, + 14540, 14971, 14463, 14055, 12399, 14457, 14490, 14423, 6364, 13283, 13591, 13908, 13477, 13892, 10530, 14196, + 14071, 14759, 15199, 15175, 12198, 12898, 15348, 11350, 10184, 15169, 14391, 14836, 15240, 14585, 11593, 13293, + 14629, 11089, 14511, 12171, 14492, 14863, 14229, 14680, 15245, 13817, 14374, 14489, 14934, 12806, 14636, 14843, + 13870, 12783, 14715, 13506, 15174, 14376, 13423, 13738, 12444, 14558, 12865, 14347, 15335, 13690, 13522, 14786, + 14716, 12440, 14663, 14844, 14773, 9077, 12870, 11268, 13449, 14771, 14844, 13446, 13647, 14683, 13705, 13822, + 15140, 12749, 15121, 15204, 13774, 14670, 14668, 14565, 13758, 13730, 14424, 13741, 13231, 13698, 14174, 11559, + 13599, 14945, 10707, 14937, 13208, 15255, 14871, 14720, 13320, 10999, 13556, 14846, 10317, 14804, 14698, 15150, + 13892, 12602, 14749, 15291, 11603, 14435, 14509, 14731, 15268, 13587, 14042, 14476, 14818, 14939, 14748, 13206, + 14655, 12468, 14429, 14518, 13369, 12260, 14974, 15254, 13466, 14908, 13545, 15052, 12978, 14428, 14879, 14056, + 14791, 13883, 8417, 14086, 11600, 14734, 14842, 15056, 13241, 11687, 12201, 15112, 14901, 15191, 13983, 14748, + 13608, 14968, 11884, 14754, 15298, 14973, 13208, 14392, 12509, 15216, 14594, 12750, 12442, 14654, 8887, 15126, + 15275, 15196, 15133, 14631, 12924, 15214, 15231, 12469, 15163, 14886, 13209, 14857, 13094, 14757, 10118, 13482, + 12457, 15280, 15191, 12659, 14979, 10039, 15302, 14040, 15152, 14613, 14368, 15052, 7702, 15188, 12011, 15295, + 12240, 13213, 14653, 14876, 14278, 13345, 15178, 15162, 15144, 14372, 13511, 11769, 14226, 15322, 14012, 14255, + 15099, 15249, 14689, 12919, 13908, 12514, 13463, 11246, 15298, 11002, 14798, 14681, 9376, 15205, 15054, 14394, + 15204, 14080, 10622, 13402, 15006, 15167, 14702, 13939, 14354, 13869, 11458, 13285, 14710, 13428, 15164, 14765, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15068, 15323, 14681, 13548, 15335, 15263, 14452, 14390, 15172, 15146, 13595, 10439, 11487, 13501, 15019, 14596, + 10732, 14678, 14431, 13899, 14042, 12729, 15029, 14491, 14984, 14901, 13832, 9329, 15243, 12170, 14658, 15279, + 15357, 14441, 13583, 15282, 12512, 14152, 14992, 12920, 11432, 13789, 12309, 11671, 15117, 14427, 13272, 11178, + 14031, 14744, 13344, 14939, 15199, 15014, 11916, 14529, 15187, 14468, 14870, 13745, 15204, 14232, 12588, 12287, + 14771, 14219, 14873, 14458, 13407, 8415, 14643, 13754, 15236, 15176, 13704, 12266, 14775, 14766, 13609, 10730, + 13740, 14521, 15354, 14516, 14693, 10414, 11588, 15132, 13881, 15354, 14552, 14913, 14830, 14537, 15189, 14608, + 14705, 14484, 13013, 13738, 13998, 14919, 14976, 15234, 11936, 13260, 13261, 14576, 14725, 15335, 15300, 14602, + 13185, 13996, 14956, 13931, 14093, 14721, 14044, 14652, 15086, 12694, 12117, 14414, 14041, 15146, 14382, 11595, + 13828, 12504, 15160, 15236, 15088, 13170, 15254, 15086, 11902, 14226, 14445, 15348, 15218, 14063, 13379, 13255, + 14968, 14077, 9896, 15116, 14713, 10682, 13433, 14524, 15358, 15069, 15167, 14675, 15288, 14473, 9509, 10959, + 11853, 13742, 12502, 14591, 14779, 14374, 12589, 14430, 15011, 13495, 13969, 14488, 14595, 13054, 12588, 14097, + 12787, 11270, 11327, 14427, 9596, 13774, 14524, 14488, 15046, 14663, 13406, 14576, 14093, 14342, 15286, 14650, + 13071, 14216, 14870, 11484, 11818, 13472, 14018, 14590, 14654, 14050, 13534, 14611, 13152, 14750, 13006, 15123, + 13266, 14259, 15053, 13472, 13898, 15258, 15107, 12516, 14833, 14551, 11824, 11692, 10772, 12845, 14651, 13355, + 14194, 7340, 12994, 7635, 14559, 14401, 15101, 11995, 14869, 11906, 14659, 14950, 10345, 11481, 11671, 15243, + 13809, 15075, 15209, 11382, 15318, 14605, 10374, 14656, 14784, 14056, 13719, 14638, 14897, 13245, 15223, 14995, + 14903, 14925, 13112, 15073, 15073, 15021, 12597, 15350, 9024, 14790, 12986, 14229, 14765, 13021, 15236, 12624, + 11126, 14815, 13124, 11843, 14994, 10524, 14922, 13076, 13394, 14614, 15058, 14349, 11430, 14686, 14919, 13633, + 14790, 13423, 14598, 14675, 13694, 13787, 12274, 14260, 11583, 13380, 13426, 12401, 11267, 15311, 14742, 15215, + 9595, 13786, 12602, 14856, 14785, 14738, 14311, 12707, 14501, 14582, 13607, 13323, 14156, 15115, 11588, 15100, + 14805, 15186, 14098, 10408, 13179, 13165, 12900, 15129, 14569, 14500, 14574, 14311, 13574, 12820, 13975, 14707, + 14687, 11370, 10558, 15038, 14426, 14599, 14631, 12818, 14799, 14054, 11866, 15039, 12306, 10307, 14684, 11834, + 9403, 11437, 14648, 15074, 14955, 15200, 15173, 13605, 15262, 14381, 14504, 14706, 14354, 14743, 15337, 13409, + 14268, 14789, 14944, 14568, 12170, 13883, 11447, 13104, 10327, 13366, 13485, 15263, 14420, 14988, 14463, 12294, + 14837, 14633, 13237, 14020, 12941, 11567, 12943, 12383, 14595, 11361, 14394, 13062, 14608, 13039, 14528, 14863, + 11278, 15322, 13976, 11609, 13768, 14365, 11604, 13060, 14791, 15337, 12749, 14914, 15285, 10445, 14254, 14709, + 14777, 13562, 15016, 14809, 14390, 14800, 11768, 14780, 10247, 14129, 13263, 15312, 11437, 14098, 9705, 14547, + 14482, 13844, 14617, 11447, 14689, 14040, 9593, 13796, 14328, 15214, 15107, 12752, 13656, 13108, 13414, 14289, + 14371, 14489, 14929, 15131, 8191, 14299, 15217, 12381, 14814, 12436, 14757, 15321, 13325, 15271, 13998, 14568, + 15015, 13968, 15105, 14117, 14382, 14825, 14978, 15082, 13132, 14496, 14136, 12642, 15169, 13368, 13330, 14832, + 10579, 13145, 14858, 13637, 14285, 15162, 13869, 14862, 14771, 14729, 11524, 14575, 14182, 14030, 14736, 14840, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13624, 15219, 15248, 14965, 15060, 15010, 13930, 15194, 12830, 14422, 14830, 15237, 13611, 13620, 13647, 11235, + 12721, 15095, 12673, 14605, 14997, 13016, 12163, 13819, 14467, 11807, 14527, 12291, 15243, 12578, 15091, 13726, + 13384, 14376, 14716, 12223, 12235, 14832, 12210, 11776, 13509, 15227, 14195, 13535, 15291, 15102, 11448, 15113, + 10803, 10303, 13793, 14396, 15062, 15117, 13010, 15326, 9846, 14612, 14329, 12856, 15285, 15201, 15345, 15235, + 15001, 14679, 13208, 13688, 14351, 12979, 11526, 14595, 13734, 14689, 12398, 14682, 14396, 15209, 11280, 14779, + 13183, 13543, 13745, 15148, 12792, 14050, 15274, 14644, 14806, 15147, 14733, 14071, 11773, 11541, 15325, 14506, + 12335, 14768, 14983, 13610, 15357, 13141, 14685, 14214, 12797, 12342, 13386, 14986, 15346, 15291, 14202, 15196, + 15246, 14830, 14969, 13360, 14089, 12898, 13476, 11673, 10807, 14838, 13520, 13466, 14693, 14963, 11844, 12761, + 14504, 14724, 14947, 13740, 15234, 15091, 7194, 15065, 14590, 14707, 14749, 5073, 14503, 14474, 11454, 14668, + 10188, 13698, 14965, 14368, 13231, 12985, 14669, 14546, 13316, 11003, 15198, 10440, 14237, 12337, 15004, 13679, + 14118, 15351, 15011, 13250, 14965, 13119, 14695, 14839, 13717, 15021, 14399, 14391, 14016, 15065, 11456, 15173, + 15143, 12510, 13078, 13523, 12696, 12475, 15034, 12986, 13311, 15095, 14617, 14512, 14924, 15033, 14022, 14667, + 13972, 10315, 13957, 11762, 14519, 12738, 11713, 13805, 13776, 14075, 13017, 13693, 15306, 13871, 13714, 14722, + 14558, 10367, 14406, 14442, 14412, 13807, 14965, 12142, 12032, 14047, 14323, 14402, 14032, 15062, 9547, 15305, + 14386, 14355, 12711, 12453, 15122, 11737, 9868, 9404, 8654, 13810, 11623, 14351, 14662, 9434, 14048, 14453, + 14929, 13726, 13899, 12514, 13635, 13615, 15060, 12259, 14948, 13234, 13610, 14556, 15331, 12515, 15072, 11554, + 14101, 13582, 14605, 12639, 14676, 14604, 15274, 14545, 14700, 14400, 9641, 14999, 11556, 15043, 11083, 14104, + 14386, 12375, 12995, 13416, 14927, 15252, 15085, 14730, 15341, 14234, 9350, 15060, 14652, 13643, 13215, 10203, + 12977, 15356, 14228, 14781, 14448, 13935, 14956, 14205, 14368, 15054, 15304, 14781, 10657, 14554, 13900, 14452, + 13863, 13466, 12423, 13130, 15163, 11731, 14533, 14894, 12325, 14494, 14009, 15151, 12925, 14477, 14835, 15222, + 14077, 14784, 13705, 13474, 14610, 14458, 14355, 14702, 14786, 12446, 13750, 13968, 13497, 12855, 11803, 14628, + 12309, 14529, 14802, 14758, 13469, 14231, 14884, 14635, 14696, 9437, 12615, 8456, 15133, 14727, 14499, 14248, + 11706, 15003, 13853, 14528, 13639, 15201, 13439, 10531, 13316, 14663, 14413, 11985, 13562, 12773, 15208, 13928, + 14146, 14797, 9880, 11792, 14764, 15229, 14669, 14314, 14915, 14729, 14855, 13969, 11660, 10718, 14496, 14388, + 13227, 14170, 14723, 13123, 10553, 14819, 15063, 14600, 14569, 12020, 13081, 15275, 15235, 13644, 13407, 12307, + 12354, 13318, 14845, 12191, 13135, 14179, 13987, 12175, 13546, 14929, 12261, 12478, 14068, 14602, 11931, 13139, + 14906, 14997, 14764, 12458, 14770, 12595, 14593, 15092, 14573, 12408, 14569, 12626, 13585, 9730, 15271, 14099, + 15323, 14235, 14347, 13916, 13526, 12626, 4843, 13964, 14462, 15147, 13742, 14465, 15343, 13473, 14725, 11858, + 13906, 14243, 11319, 12066, 13485, 13712, 15240, 12487, 14136, 12509, 14870, 13362, 14005, 15272, 14363, 13714, + 14422, 15141, 14918, 11091, 14222, 14809, 13447, 13807, 14797, 13252, 14985, 14648, 14804, 15332, 14268, 12289, + 14421, 14889, 14216, 14934, 14891, 14272, 12013, 14294, 13501, 14046, 13595, 12407, 14815, 13861, 14917, 14472, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13537, 13308, 14640, 15148, 15034, 14107, 14178, 14971, 15062, 14689, 14087, 14869, 15071, 13326, 11647, 14732, + 15127, 14809, 14949, 14381, 12504, 13193, 13002, 12799, 14962, 13657, 10077, 14056, 12819, 14915, 14919, 12232, + 13289, 14861, 13851, 14027, 14182, 13716, 14593, 10044, 14614, 15248, 14593, 4916, 13955, 12799, 11306, 14428, + 14533, 12502, 15031, 15285, 13372, 14418, 14099, 14479, 12259, 11665, 13712, 13578, 14538, 13574, 14768, 13431, + 13558, 13296, 14159, 7712, 13937, 15047, 6603, 14638, 14787, 15185, 14661, 13100, 14816, 13365, 14185, 14742, + 15285, 13335, 14104, 12570, 14599, 14137, 14520, 11365, 12667, 7907, 14115, 14945, 14848, 11701, 15022, 14795, + 14895, 14556, 14678, 14957, 11519, 14121, 14699, 10350, 14075, 13752, 12190, 12278, 15205, 13492, 14934, 11438, + 13212, 13807, 14459, 12631, 10293, 13520, 15127, 11775, 15109, 14629, 14842, 14325, 14465, 13558, 15010, 13803, + 14643, 10557, 14667, 13447, 14523, 13688, 12757, 14386, 11731, 14541, 14117, 15042, 10056, 12985, 13616, 8243, + 15344, 13323, 14677, 14744, 15155, 14935, 14482, 14885, 13472, 9380, 14745, 15337, 13099, 14818, 11235, 13413, + 15112, 14182, 12813, 14332, 14958, 13170, 15205, 15230, 11066, 15054, 14337, 12766, 15359, 12418, 14437, 14934, + 13930, 14445, 11758, 11151, 15070, 13375, 14845, 14640, 14535, 14063, 12811, 13791, 14659, 14855, 15269, 14759, + 14590, 14903, 15111, 12959, 14816, 15175, 13615, 14527, 14460, 13587, 13475, 13530, 13578, 14655, 14145, 13091, + 15283, 13338, 15283, 14787, 13546, 13712, 11718, 14405, 13876, 15129, 13036, 11492, 14549, 13751, 11630, 15334, + 14550, 13560, 10352, 14345, 12805, 15336, 13736, 14685, 14177, 13255, 14140, 15219, 12555, 14448, 13703, 13956, + 15171, 14701, 14203, 13357, 14545, 14625, 13522, 14356, 13543, 11589, 13550, 14519, 14323, 15206, 15103, 14940, + 14358, 14954, 13572, 13673, 14714, 7564, 9717, 14139, 12563, 14969, 12041, 15183, 14301, 14382, 14828, 15130, + 14505, 14363, 10760, 13472, 12977, 15338, 15199, 14584, 14846, 14324, 12027, 15346, 14523, 13870, 13064, 13332, + 13485, 14701, 13714, 13091, 14463, 14509, 14562, 11792, 15151, 15324, 12958, 13065, 15243, 13239, 15350, 12792, + 13439, 15105, 15022, 11235, 15296, 13945, 14790, 14394, 13123, 15251, 14424, 13850, 15040, 10156, 14728, 14653, + 13784, 13583, 14000, 15004, 15110, 15241, 14464, 12955, 14795, 15179, 15057, 14548, 12142, 14204, 14339, 14852, + 11507, 15108, 13474, 13761, 13802, 14374, 14918, 14610, 13878, 15338, 11663, 13099, 13066, 14252, 15254, 14581, + 13388, 13708, 13940, 12384, 15192, 14525, 15316, 14766, 12479, 15227, 7297, 12960, 15157, 12697, 13079, 15121, + 12680, 14262, 14308, 10448, 14260, 8657, 13722, 14656, 14369, 14400, 14838, 13624, 13529, 8447, 9413, 14858, + 11964, 13605, 12390, 14508, 13340, 15181, 15235, 13919, 14151, 14204, 15075, 15330, 13339, 9388, 15057, 14477, + 14029, 14185, 15295, 12946, 12571, 15094, 14492, 12319, 12556, 15210, 13587, 14448, 14693, 14812, 14077, 4628, + 14049, 15012, 15272, 14018, 13920, 15039, 14819, 15049, 8219, 15157, 14588, 13018, 8563, 14406, 12439, 14969, + 15124, 14341, 15111, 14712, 14384, 15184, 14832, 15038, 15212, 13771, 15150, 14718, 15113, 11440, 12559, 15188, + 14994, 12731, 12423, 14827, 10994, 13780, 15293, 15005, 10856, 14405, 13899, 14340, 13822, 15261, 14717, 14426, + 14240, 14409, 13899, 15087, 15056, 13786, 14197, 14851, 15074, 12401, 15096, 13457, 15233, 14686, 14366, 11804, + 14755, 14701, 14937, 9540, 11746, 14478, 15083, 14148, 14683, 11412, 14585, 14639, 13010, 15113, 12314, 14873, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14521, 14041, 13757, 15160, 15311, 10812, 14911, 13180, 14396, 15068, 15157, 12268, 14916, 14896, 15259, 14839, + 14816, 13398, 13859, 15227, 14596, 15092, 14842, 14530, 14260, 13213, 15051, 14141, 14495, 14633, 15262, 14959, + 13207, 12909, 7459, 13892, 12596, 12119, 10983, 12957, 14531, 13544, 14509, 15115, 13785, 13379, 13939, 14480, + 15343, 9636, 10720, 13944, 11941, 13620, 12337, 14093, 11449, 14929, 13804, 14913, 14288, 12117, 13934, 14156, + 15329, 10598, 13388, 14816, 14286, 15191, 13799, 15118, 14854, 10596, 14308, 14981, 13629, 14640, 14988, 12668, + 12333, 11533, 13140, 15177, 14705, 13552, 13724, 14799, 13199, 15279, 15194, 13727, 13375, 15246, 12346, 13047, + 15094, 13587, 13719, 14039, 14223, 14334, 9141, 13058, 13097, 14011, 14735, 13165, 14608, 13898, 14846, 15077, + 14801, 14569, 8896, 11603, 14566, 15100, 14442, 13576, 15088, 12282, 14433, 15299, 13872, 13745, 13441, 14853, + 12404, 12848, 14324, 12418, 13344, 13374, 14484, 12633, 14552, 10802, 13784, 12116, 12288, 14760, 12854, 11920, + 15094, 11698, 10655, 11817, 11286, 12912, 15163, 14920, 14672, 14529, 13075, 13138, 13504, 14404, 13522, 14504, + 14969, 11547, 14734, 14905, 15107, 13908, 9897, 13781, 8014, 14788, 14205, 10000, 12874, 13865, 14373, 14442, + 12877, 9398, 15062, 14598, 12366, 13106, 15232, 14749, 14962, 15242, 14564, 15028, 14269, 10100, 15054, 13584, + 13375, 14611, 11912, 14210, 14297, 13091, 15098, 14903, 15187, 14270, 13106, 12456, 14514, 10900, 11718, 15114, + 13413, 13196, 15113, 9223, 14720, 14567, 14824, 14460, 14432, 15139, 14575, 14995, 14689, 13067, 9113, 15235, + 14282, 14710, 15017, 14294, 12492, 13506, 11041, 13773, 13757, 9798, 12974, 14446, 12195, 9658, 13742, 14931, + 14542, 14320, 12161, 15116, 12761, 14822, 13260, 15196, 14983, 15271, 12995, 13760, 11677, 15335, 15026, 15032, + 14872, 13753, 15275, 14934, 15101, 13562, 14657, 11250, 13622, 15131, 15102, 15186, 14419, 14856, 14402, 11097, + 15235, 14167, 9888, 14476, 13534, 12744, 14564, 12131, 15200, 13018, 13109, 13829, 14142, 14821, 14718, 11350, + 14263, 14819, 14899, 11836, 15002, 12015, 13231, 13408, 11930, 13611, 14337, 15196, 13508, 12894, 11714, 15190, + 14151, 15326, 15287, 14572, 14340, 14503, 14626, 15076, 14068, 12422, 14483, 15212, 13402, 14674, 14647, 14210, + 14530, 15210, 12721, 13927, 15354, 12381, 15243, 14850, 15076, 15261, 10670, 13510, 13559, 10514, 14573, 8620, + 10362, 14784, 10743, 14881, 13378, 13660, 15118, 14459, 14676, 14387, 14999, 14541, 13982, 14125, 13918, 13424, + 14744, 10829, 15230, 11625, 13706, 14871, 15211, 14743, 15015, 13199, 11460, 12476, 13403, 15277, 13169, 15293, + 13554, 12442, 14985, 14066, 15210, 14791, 15323, 14265, 14558, 14069, 14994, 15037, 15105, 12346, 14842, 14255, + 12820, 12654, 15235, 14283, 13623, 14493, 12396, 13549, 13894, 14453, 14552, 15102, 12439, 12825, 12694, 13811, + 10065, 14513, 14812, 13473, 12791, 14328, 15066, 15048, 11463, 14968, 11341, 14479, 15234, 14784, 14348, 12320, + 12833, 15018, 12601, 15296, 14451, 14974, 14719, 14989, 14241, 12347, 14848, 10659, 13709, 12121, 15087, 14941, + 15149, 14270, 15081, 14664, 11446, 13555, 13773, 9525, 12308, 15295, 14422, 13152, 12539, 15244, 13480, 14684, + 14812, 14554, 14730, 13256, 13691, 12453, 6673, 14755, 15323, 14365, 14017, 14530, 13733, 14433, 14073, 14792, + 14318, 15157, 14565, 11757, 11358, 14243, 13404, 14209, 14693, 12953, 14188, 14867, 12148, 9734, 10471, 14921, + 11834, 14153, 13803, 12527, 13412, 14278, 14379, 10916, 15165, 14495, 13531, 13432, 14209, 15262, 11705, 12347, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15114, 13518, 13980, 12685, 13682, 14272, 13538, 15247, 14919, 14981, 14820, 14814, 12429, 15300, 15148, 14391, + 12449, 15326, 10562, 13005, 10240, 12977, 13699, 12957, 14309, 14718, 13593, 13903, 14501, 14105, 13537, 13399, + 15004, 14600, 14481, 11560, 12340, 14614, 14917, 10587, 13746, 14556, 11536, 15142, 13452, 12608, 14624, 15003, + 13146, 14870, 14150, 15225, 14224, 15215, 15046, 15346, 14393, 15237, 15270, 15042, 14430, 14683, 14882, 13610, + 14700, 12601, 15178, 13571, 14787, 15318, 12600, 13036, 14689, 14263, 15030, 13271, 10435, 12379, 12355, 14546, + 14677, 14471, 13314, 15305, 12584, 14631, 11957, 14793, 15038, 8991, 14965, 15046, 13934, 13647, 13532, 14115, + 14380, 13663, 13892, 13726, 13892, 15344, 15272, 14946, 15288, 11327, 12495, 14350, 15288, 13236, 15099, 13013, + 12867, 14925, 11699, 12644, 11685, 14909, 15264, 14874, 11388, 14071, 12527, 12382, 14128, 14680, 11357, 14657, + 15323, 14513, 13166, 15126, 13490, 12492, 14094, 15206, 14004, 14287, 10214, 12793, 13024, 14627, 7760, 15161, + 10408, 12813, 12688, 15255, 13870, 13770, 13946, 15027, 11218, 13474, 14726, 14544, 12516, 13443, 13627, 14660, + 12360, 14074, 14088, 15354, 13954, 13999, 12297, 12222, 14771, 15322, 13871, 13605, 14143, 7336, 14738, 14951, + 13706, 12842, 14169, 13381, 14768, 13068, 13489, 12370, 9925, 14721, 11967, 15162, 14758, 15157, 15129, 14126, + 13267, 14922, 14078, 14992, 15240, 14910, 11827, 14301, 14121, 14654, 14724, 11465, 14836, 14496, 13914, 14521, + 13585, 13562, 13179, 14937, 14686, 13899, 14493, 13764, 13087, 15281, 13444, 13940, 13250, 14737, 14233, 12423, + 13957, 11705, 15191, 13076, 14108, 14963, 14682, 14005, 15184, 14534, 12839, 15109, 14780, 15302, 15102, 14704, + 11798, 14783, 14588, 15198, 15147, 12612, 10312, 13328, 14922, 15029, 10556, 14484, 12727, 13751, 14369, 14926, + 15310, 13435, 10187, 14401, 15200, 12906, 15279, 15019, 13355, 14651, 13387, 14433, 14551, 13391, 14118, 14722, + 13990, 5843, 12131, 15192, 15148, 14414, 14604, 15079, 12226, 7803, 10866, 14738, 14102, 13970, 15067, 14543, + 14180, 15247, 11606, 13890, 11493, 13911, 14480, 13782, 13709, 13827, 13780, 11400, 15072, 12473, 14587, 11537, + 13455, 14526, 15094, 12745, 9218, 15088, 15056, 15208, 14648, 14387, 14917, 15032, 12307, 9955, 12872, 10061, + 14633, 14675, 14866, 15325, 13700, 12137, 14069, 15208, 13356, 13354, 13682, 14550, 9415, 12219, 14245, 14355, + 14609, 14134, 14551, 14889, 14736, 13331, 12648, 11684, 14757, 14961, 14788, 15216, 15263, 14019, 11677, 15117, + 15101, 13684, 14919, 14932, 12004, 13786, 14691, 7719, 15137, 12794, 13755, 14670, 14884, 6460, 11961, 13286, + 14853, 12137, 13602, 14396, 15333, 11885, 15354, 14870, 15033, 15355, 15008, 14664, 15104, 14911, 14301, 11977, + 14343, 12836, 14766, 12698, 14500, 14934, 12836, 14637, 14471, 14192, 15029, 14811, 12884, 14442, 12308, 14963, + 11497, 14934, 12533, 14256, 14962, 13988, 15160, 15042, 13813, 15163, 14976, 14372, 14674, 14915, 15000, 12889, + 14454, 14337, 12817, 15282, 11888, 14712, 12716, 12968, 14828, 11331, 14553, 14960, 11745, 14426, 13329, 15029, + 12324, 15039, 9540, 13603, 13163, 13497, 14905, 13530, 14697, 15327, 13809, 14862, 15121, 13496, 13204, 14345, + 11908, 15058, 11816, 14434, 13870, 13792, 13100, 14035, 13718, 14104, 10908, 11752, 14814, 15270, 14354, 15227, + 14864, 14910, 14112, 14770, 15135, 15286, 14790, 12491, 10339, 10311, 12666, 13673, 13661, 15197, 15228, 13740, + 14016, 14858, 14433, 13865, 14953, 14904, 12803, 15006, 13634, 13596, 13522, 11651, 8251, 14412, 15116, 13971, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13281, 14388, 14653, 14235, 14949, 14756, 13327, 14157, 8095, 14198, 12592, 14315, 14425, 14529, 14431, 13207, + 15309, 14636, 14993, 13322, 14044, 15251, 14487, 11736, 14643, 14772, 14514, 11930, 14370, 9144, 14312, 14996, + 13933, 14605, 14132, 14396, 12388, 12102, 15068, 13772, 13114, 14727, 13750, 15137, 14107, 10686, 13506, 14924, + 14572, 15097, 14554, 15329, 15000, 15066, 11758, 8366, 15122, 13639, 11621, 10547, 14410, 14117, 14278, 14783, + 12150, 12072, 14949, 12702, 14558, 13354, 13587, 14553, 11582, 12985, 12942, 14217, 11719, 10286, 13961, 14120, + 12922, 15223, 14233, 14738, 14341, 14852, 14378, 13464, 15071, 15134, 13977, 13730, 14450, 14537, 11988, 13453, + 14846, 15224, 14809, 14926, 10310, 13068, 14659, 14381, 13033, 13457, 15043, 14285, 14822, 14320, 14747, 15336, + 12970, 11143, 15268, 14511, 14287, 14914, 12641, 13277, 14756, 12932, 11826, 14039, 13819, 15342, 15009, 11671, + 14493, 15019, 14550, 14406, 14474, 12501, 14314, 14509, 14271, 12944, 7321, 14959, 14552, 13422, 14685, 14722, + 15248, 13906, 15278, 13789, 12703, 12355, 14521, 13515, 14498, 9958, 15012, 15088, 14775, 14876, 14482, 14983, + 15083, 13026, 15222, 13686, 15189, 10315, 12789, 13710, 14859, 11598, 13848, 14604, 9806, 15131, 11235, 14405, + 7898, 14241, 11721, 14579, 13142, 13406, 13562, 14886, 15058, 13923, 12291, 13886, 14662, 14492, 8785, 14705, + 13319, 14371, 13297, 14752, 14933, 13149, 14319, 15048, 10985, 15077, 14658, 15309, 15038, 14519, 14080, 13682, + 11960, 12674, 14814, 13905, 14884, 12316, 14700, 12754, 14468, 14064, 15130, 11870, 13111, 14785, 13544, 14927, + 10080, 14046, 12328, 13732, 14914, 13920, 10934, 15350, 14796, 13997, 14814, 10346, 14982, 12412, 11890, 13211, + 7608, 15091, 14696, 13887, 15345, 13824, 14880, 14774, 14041, 13713, 15009, 15119, 14820, 14655, 12334, 14429, + 14259, 11618, 12647, 15121, 14091, 14538, 13483, 15153, 15303, 11594, 14800, 14366, 11562, 14498, 15267, 14114, + 12398, 14270, 13029, 15225, 14450, 15071, 15015, 13722, 14481, 13872, 11952, 13001, 14710, 14538, 14735, 15116, + 14141, 15168, 14990, 13575, 14597, 10412, 13787, 13336, 15341, 14384, 14827, 13904, 14229, 14331, 14880, 14209, + 14378, 15247, 13762, 15124, 13800, 14845, 14774, 12305, 15287, 14592, 13862, 9171, 13419, 14593, 15168, 12761, + 14663, 14776, 14976, 13314, 15073, 12674, 14822, 14765, 15289, 13832, 14804, 14411, 15166, 14909, 14581, 14703, + 14984, 13050, 15107, 14908, 14379, 15357, 14384, 12170, 15186, 13363, 14094, 15131, 14898, 14342, 15090, 15226, + 14340, 14612, 12815, 13927, 13914, 14786, 14459, 12572, 12060, 10287, 14665, 13979, 15070, 9430, 13653, 11688, + 13386, 14209, 14062, 14526, 13840, 13974, 12540, 14728, 15181, 13579, 15201, 13679, 14547, 10079, 8373, 15089, + 14652, 15218, 13794, 14695, 14495, 11434, 13729, 14457, 14750, 13953, 14900, 14345, 14743, 8968, 13501, 14411, + 15088, 13765, 14305, 13611, 11454, 12431, 13916, 15239, 15101, 14880, 15032, 14959, 13428, 15156, 12868, 14880, + 14404, 10812, 13679, 15352, 13889, 14393, 14057, 14521, 11904, 13946, 13475, 15140, 12184, 12043, 5618, 13417, + 15095, 14830, 14417, 14349, 14508, 12684, 14265, 14503, 14177, 14799, 12752, 15176, 11621, 14074, 14476, 13631, + 14996, 14413, 14905, 15034, 15041, 14168, 12507, 15127, 14898, 11142, 12775, 14944, 14429, 13475, 14620, 15249, + 14723, 11573, 15098, 12652, 14089, 14582, 14455, 14365, 13275, 13550, 15278, 12935, 14109, 14868, 15056, 15294, + 14365, 14405, 11798, 13938, 12237, 15015, 13686, 12382, 14689, 13340, 15273, 11760, 15173, 11573, 11228, 14127, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14976, 15320, 14856, 14747, 14545, 14933, 12942, 12746, 14663, 14383, 14315, 15076, 12981, 12639, 14651, 12301, + 13831, 13248, 13561, 14089, 7133, 15107, 15243, 14283, 13685, 13837, 13150, 15097, 15066, 15321, 14754, 15065, + 15145, 14604, 9350, 14595, 14753, 11829, 11989, 14282, 15170, 13365, 11849, 15026, 15063, 14496, 14898, 13614, + 14455, 11490, 14996, 13146, 15316, 13678, 12519, 12904, 15215, 10141, 14359, 12764, 14952, 14945, 14318, 12545, + 15021, 13737, 12487, 14273, 15306, 14737, 14678, 13369, 14656, 14651, 11771, 14753, 15329, 11653, 14861, 13370, + 14876, 13650, 14235, 14345, 14666, 11985, 15343, 10510, 14125, 13014, 12078, 14598, 14707, 14807, 14031, 14574, + 14259, 12994, 12920, 14841, 12705, 12426, 10228, 12535, 13332, 14389, 14773, 12520, 14113, 11590, 13318, 14054, + 14045, 15238, 14760, 14441, 13800, 12903, 14482, 14181, 15116, 14771, 14710, 12219, 14550, 15131, 12956, 15206, + 14452, 14133, 14825, 14782, 13950, 14989, 14822, 14550, 14960, 13546, 15291, 15047, 14721, 12796, 13049, 13984, + 15339, 13726, 14888, 13541, 14481, 11449, 15145, 15314, 14862, 14949, 15320, 15218, 13523, 13857, 14019, 14322, + 14519, 13801, 13015, 10633, 14755, 11690, 14555, 14487, 14256, 13564, 12702, 11090, 15349, 13077, 14269, 15188, + 13954, 14889, 14325, 13875, 14970, 13963, 14442, 14405, 13956, 14661, 14439, 14251, 14473, 14590, 15279, 15312, + 15103, 15129, 12912, 14690, 14609, 13013, 14625, 14788, 14695, 13120, 14112, 13991, 14719, 13317, 14953, 14861, + 12677, 14062, 13603, 11757, 13703, 13908, 12437, 9314, 12550, 8987, 14382, 11060, 15021, 14353, 14740, 14386, + 15262, 9436, 14217, 13844, 15278, 13635, 13995, 14881, 15223, 14603, 14039, 14987, 14338, 14804, 15039, 13950, + 15027, 14131, 15027, 14733, 12575, 14899, 15147, 14539, 14019, 14406, 11221, 14913, 15175, 15293, 13944, 9951, + 14276, 15309, 11477, 13522, 9043, 14880, 14893, 10032, 12896, 12839, 13480, 15326, 8593, 10118, 15130, 14666, + 12737, 10912, 14774, 12419, 14430, 14359, 13123, 12935, 15153, 14394, 14949, 14113, 14684, 13348, 15086, 12414, + 13458, 14803, 15243, 12783, 11518, 14356, 14634, 14364, 13764, 14562, 13552, 11792, 13272, 14166, 13093, 14020, + 14673, 11360, 14245, 14856, 14758, 13310, 14676, 12681, 11917, 15018, 12183, 13263, 13681, 14529, 12440, 15073, + 15030, 11081, 14591, 11958, 15136, 13898, 14443, 13995, 14057, 10504, 13954, 13927, 12744, 14617, 14437, 15311, + 15004, 7602, 15221, 15353, 14844, 14487, 15278, 14855, 13187, 11666, 12805, 12928, 12768, 14658, 7348, 13047, + 11651, 12457, 14692, 12098, 14889, 13845, 14825, 15299, 14087, 12794, 15135, 13823, 14469, 14422, 13172, 13470, + 14456, 14897, 14513, 12444, 15330, 13965, 13733, 14928, 13571, 12734, 14482, 14512, 15152, 13734, 12670, 14767, + 12819, 12825, 14931, 13675, 13686, 12069, 13955, 15206, 14125, 13530, 14171, 11805, 14993, 14774, 15087, 14289, + 14288, 11321, 14504, 15098, 13589, 13506, 15000, 14225, 14552, 10865, 13645, 10572, 14811, 13932, 14066, 14928, + 13085, 13980, 10430, 15270, 13851, 14104, 11861, 12539, 13621, 15241, 15220, 9377, 15000, 14933, 11082, 13061, + 14605, 12090, 11294, 14511, 13747, 14102, 14932, 15220, 12548, 14598, 13892, 15322, 15107, 11542, 15160, 10795, + 15081, 14780, 13869, 14676, 12543, 15249, 14737, 13867, 14420, 14356, 15208, 15003, 13838, 14914, 13949, 15354, + 13796, 15292, 13661, 14913, 14461, 14694, 12464, 13470, 14888, 11147, 15148, 12601, 14848, 14154, 10547, 15327, + 14875, 14520, 14455, 13110, 15166, 13329, 15242, 12645, 14231, 9729, 13808, 14604, 12699, 14319, 14690, 14702, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12899, 14145, 11482, 14941, 14311, 11793, 15308, 13356, 15285, 14843, 14161, 14144, 14181, 13929, 15262, 15282, + 13569, 14586, 13484, 14018, 14595, 12157, 11768, 14595, 15219, 13311, 15300, 14819, 15228, 5703, 15260, 14424, + 13975, 14023, 15247, 11991, 12462, 15047, 14329, 14918, 14216, 14559, 11484, 14921, 14635, 14301, 13155, 14491, + 15118, 14144, 12558, 14257, 12490, 13843, 15011, 13518, 15244, 14844, 14244, 11510, 14862, 10261, 13492, 8340, + 12616, 14608, 14084, 14340, 13877, 14281, 14336, 10026, 12765, 15152, 13970, 14958, 15249, 14512, 13765, 13705, + 14586, 12913, 14076, 15062, 14704, 14657, 13504, 11789, 13853, 12733, 15155, 14505, 10599, 12417, 13781, 14709, + 14560, 12499, 12124, 13801, 13898, 14684, 9283, 13609, 13356, 14633, 14525, 14352, 14154, 14939, 14956, 15259, + 12805, 12921, 14092, 14138, 14778, 11760, 14176, 12487, 14835, 13389, 15044, 13351, 13906, 14751, 11136, 12328, + 14690, 15177, 13000, 13450, 14235, 12819, 14293, 11417, 11860, 13690, 13293, 14079, 14799, 15218, 14458, 13535, + 12326, 14279, 14893, 11920, 14464, 9670, 15145, 11534, 12507, 12900, 14952, 15346, 13570, 10871, 10659, 12697, + 15218, 14874, 13795, 11730, 14170, 14592, 14551, 15168, 12243, 14472, 13646, 14970, 14591, 14193, 15329, 13948, + 14947, 12265, 11429, 12986, 14938, 11598, 14570, 14835, 14436, 14768, 13390, 14957, 13054, 14693, 15213, 13255, + 14467, 4195, 12705, 14166, 14194, 15188, 14352, 15112, 13565, 12581, 14909, 12447, 13551, 12369, 12856, 13661, + 14803, 12494, 14553, 11957, 15300, 10307, 11859, 13746, 15120, 11608, 14829, 15028, 14252, 12338, 13494, 14231, + 12999, 14335, 14031, 12662, 14483, 15057, 13938, 8440, 14639, 14175, 15089, 10597, 13834, 15083, 15312, 14730, + 15270, 15034, 10214, 14149, 12903, 13272, 10594, 15231, 13343, 14936, 14773, 15295, 12305, 14615, 13113, 7949, + 14883, 14394, 15307, 13544, 14360, 14421, 14897, 14917, 11127, 12531, 14685, 12592, 15233, 15031, 14239, 15016, + 15320, 15334, 14753, 12701, 14847, 15179, 14850, 14042, 13791, 14559, 11986, 15197, 14747, 15275, 13160, 14448, + 14344, 15001, 14447, 13626, 14376, 13635, 13367, 13908, 14136, 11662, 13239, 15069, 11325, 14682, 9847, 14223, + 14973, 15085, 15009, 14663, 10946, 12595, 13998, 9601, 14156, 11295, 14518, 12537, 10824, 14509, 14633, 12603, + 14325, 14560, 12694, 11881, 15345, 15215, 12140, 14573, 14656, 13104, 14137, 14680, 14727, 14107, 14586, 14857, + 11186, 15053, 14947, 15277, 13652, 13831, 12764, 14462, 14954, 15013, 14735, 14759, 14923, 8551, 14570, 15358, + 14816, 14734, 14971, 15208, 12952, 12846, 14985, 14813, 8634, 15070, 12693, 14531, 7817, 14175, 15264, 14279, + 6596, 14706, 14619, 14492, 10603, 13054, 13316, 14601, 14544, 12521, 12599, 14599, 14708, 13769, 14922, 14526, + 13172, 13819, 10934, 14303, 14878, 8124, 13354, 14921, 14543, 13798, 15034, 14823, 12955, 14768, 12814, 15046, + 12731, 11502, 15217, 14096, 15056, 13701, 14933, 13839, 13640, 13472, 13642, 15028, 15075, 14471, 14484, 13619, + 14529, 14757, 14998, 13596, 14590, 13665, 15055, 13691, 14136, 14868, 13456, 13010, 13747, 13788, 14739, 14456, + 14842, 15239, 10946, 14313, 15197, 12329, 13187, 15178, 14432, 13369, 14970, 13321, 12521, 14943, 11130, 14829, + 14687, 14478, 14998, 14565, 14567, 13773, 15297, 13552, 14613, 13802, 15311, 10983, 14762, 14291, 8820, 15110, + 15056, 14468, 8369, 14950, 15161, 11667, 14952, 15068, 13913, 12478, 13105, 13826, 12650, 13910, 13450, 13828, + 14882, 14228, 14588, 14683, 15289, 14730, 13633, 14851, 12290, 14166, 10686, 14612, 14558, 14602, 11166, 11270, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15129, 12633, 12271, 15335, 14478, 13558, 14348, 12826, 11074, 14488, 14704, 8808, 13510, 12700, 15345, 13496, + 15207, 14772, 14952, 13082, 14483, 14744, 13120, 13042, 12760, 13895, 13191, 13038, 12863, 15248, 14447, 15060, + 12681, 9499, 14706, 13362, 14831, 13935, 10062, 14542, 14510, 15345, 13422, 14830, 12740, 12078, 13737, 13762, + 14374, 14772, 14502, 12734, 13597, 14395, 14447, 14932, 15197, 14258, 12867, 15233, 13123, 13901, 14823, 14414, + 12598, 12265, 13458, 13322, 15095, 12678, 14753, 12056, 14876, 14289, 11887, 11132, 14634, 14333, 14876, 14487, + 15255, 13232, 9419, 13113, 15125, 13709, 8430, 14475, 14072, 14976, 13150, 13565, 11062, 15349, 13720, 15089, + 14169, 14668, 14598, 14397, 14420, 14360, 13381, 12958, 13602, 11956, 13638, 15086, 14589, 14067, 11622, 14921, + 13278, 14506, 15108, 11403, 11014, 14973, 12967, 11848, 13624, 14924, 15303, 15065, 11164, 14361, 13992, 14433, + 14085, 13432, 13679, 12375, 14695, 14953, 12370, 12341, 15181, 14370, 13683, 13684, 14325, 13300, 13569, 14682, + 14868, 14656, 14537, 15188, 9057, 13714, 14465, 14510, 12882, 13514, 15251, 14690, 14821, 15016, 14776, 14634, + 14755, 14577, 14514, 14490, 14143, 12098, 15029, 14748, 14516, 14139, 9531, 13087, 15199, 14901, 12515, 15335, + 11315, 15279, 15326, 13345, 13394, 14553, 10464, 13564, 15067, 13190, 15029, 13106, 12338, 15167, 14579, 12368, + 14676, 14490, 14064, 15245, 14118, 14384, 15154, 14798, 15267, 13168, 14834, 14392, 14985, 13895, 13405, 15033, + 14648, 13778, 12922, 9520, 13830, 14078, 13337, 15191, 14099, 14020, 15230, 13925, 14606, 14374, 7937, 15081, + 14374, 15336, 8978, 14746, 14837, 12443, 14773, 11460, 14236, 8503, 14702, 14511, 13591, 13645, 12096, 14453, + 13910, 14809, 9859, 11043, 15091, 12568, 13063, 12664, 12483, 12531, 13754, 15352, 15089, 13728, 11477, 12352, + 12859, 8557, 13822, 13402, 13676, 10691, 15003, 11804, 14072, 11966, 15007, 13977, 14454, 14459, 11890, 14098, + 13689, 14974, 11621, 11774, 12809, 14080, 14352, 12959, 13544, 14411, 12972, 15266, 14629, 15013, 15166, 14163, + 13829, 14381, 14666, 15249, 14878, 14485, 15023, 11462, 14618, 15282, 9025, 10056, 12482, 14399, 11935, 14529, + 15329, 10444, 10458, 15164, 14283, 15047, 15122, 15061, 15336, 13797, 15325, 14232, 15121, 13841, 14858, 14648, + 15091, 12648, 14668, 15276, 10408, 10465, 15050, 14160, 13669, 14655, 15008, 15149, 15034, 14635, 15302, 13467, + 15279, 15145, 13339, 15047, 8492, 13484, 14358, 14790, 14121, 15256, 14492, 14495, 14379, 13603, 15282, 13101, + 14199, 15169, 14894, 14209, 14965, 15284, 14068, 9544, 13332, 15286, 14710, 11613, 15354, 13708, 13892, 13712, + 13578, 14036, 14619, 14832, 12956, 14467, 13883, 11593, 13838, 15191, 14717, 12866, 13553, 11320, 13374, 13573, + 9403, 15343, 13873, 14781, 9553, 13937, 12206, 15322, 14692, 14836, 12846, 14908, 15352, 14397, 14597, 14143, + 14374, 15102, 13916, 14190, 13450, 15129, 14920, 11822, 15233, 14898, 14593, 12774, 14934, 14347, 14450, 15266, + 15297, 9337, 14273, 15174, 9064, 14082, 15048, 15183, 14733, 12449, 14415, 14596, 13726, 15278, 14017, 14580, + 15231, 13321, 13312, 14997, 13352, 15232, 14729, 15251, 13066, 15096, 13439, 13541, 15199, 13725, 14965, 15104, + 14034, 9564, 12353, 14342, 11883, 13095, 12822, 14708, 15247, 14645, 13155, 14587, 14819, 13913, 14674, 13300, + 8992, 14810, 14241, 14566, 14986, 14578, 15054, 15239, 14988, 14905, 14537, 13648, 14056, 14587, 14932, 12516, + 15270, 15241, 14960, 11623, 12726, 14406, 14502, 13488, 14570, 10964, 13249, 10442, 12306, 13676, 15122, 13435, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14065, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14792, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13601, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14857, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14208, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11756, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14946, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14318, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11157, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12292, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14480, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12208, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11920, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12946, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14558, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14744, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12591, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15067, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14394, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14413, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14172, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14355, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12605, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14499, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14809, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12525, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13863, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12424, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14831, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15236, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12827, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14985, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13782, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15242, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13631, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15262, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10437, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14845, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13724, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12354, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13559, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14780, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15227, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14348, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14242, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15346, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12389, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15251, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 4533, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14669, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12679, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13779, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11953, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13642, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15014, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9458, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14503, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13700, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13921, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14515, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15334, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14963, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13778, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14597, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14753, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14499, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14867, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13995, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12477, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13580, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14033, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15062, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13503, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15119, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13305, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13398, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14383, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13644, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12744, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14881, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14777, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10585, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12692, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14319, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15163, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9218, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14070, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15055, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14930, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15247, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10914, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13413, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14450, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13480, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14669, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15323, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14488, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14227, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14773, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14973, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12545, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14904, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15003, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14892, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14736, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14119, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13097, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15127, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15095, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13293, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15347, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14294, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12041, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13481, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15168, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13302, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11879, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14714, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15251, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13897, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14687, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14978, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12990, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14778, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14613, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6439, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13982, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12487, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14793, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13660, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14699, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12814, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13592, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13918, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13652, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13212, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15048, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14635, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14587, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13791, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14928, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14768, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15199, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15191, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15027, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14549, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13263, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15165, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14993, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10874, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11939, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14439, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14445, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15124, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15356, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12379, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13445, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12820, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14694, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11619, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14563, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14851, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9072, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13588, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13374, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10685, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14581, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12774, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14392, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13811, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13815, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14784, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14839, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12029, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14362, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14537, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13695, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14451, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15068, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15245, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14640, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15348, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14015, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15117, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13956, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15044, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14952, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14970, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13493, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15299, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14221, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14531, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13929, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14481, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14592, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14656, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14377, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14961, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15317, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15227, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14827, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13759, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14608, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12358, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14905, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14642, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14787, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14237, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14005, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13770, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12943, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12976, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14302, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14655, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14298, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13275, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11610, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14687, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13896, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14496, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13793, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14384, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13852, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13732, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13695, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13396, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15195, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11654, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14856, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14742, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13681, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11598, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14549, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15065, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13787, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14229, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9833, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13955, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13531, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13912, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14542, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15189, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14830, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13402, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14246, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13495, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13538, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15002, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13429, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12975, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13624, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12418, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14493, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12925, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14323, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14383, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14565, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15275, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12711, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11424, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14569, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14745, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13329, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11945, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14448, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14916, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14590, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12547, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14792, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14919, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14632, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13529, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14307, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15314, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11927, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12913, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14072, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14846, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14363, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15183, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13458, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11316, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15006, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13998, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12457, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14080, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14737, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13997, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14464, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13012, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14871, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12206, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14005, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12415, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15078, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13567, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15349, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12954, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13139, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15253, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13474, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10893, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10037, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15178, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15217, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14853, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14891, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14071, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12289, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14341, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11465, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12509, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15241, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14062, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14187, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14753, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14075, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12822, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10563, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12400, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14575, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14055, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15011, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10473, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13115, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11919, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14798, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13877, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14651, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14093, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12934, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14526, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13663, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13669, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12694, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10221, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14892, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14958, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14436, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13436, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14203, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12725, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13754, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11444, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15262, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14926, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14950, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10566, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15117, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12727, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14729, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13756, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15267, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14900, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14541, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15250, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14757, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15132, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12011, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13651, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14624, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14063, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15068, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13835, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11489, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11410, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14745, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10842, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14661, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14857, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14987, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13569, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12763, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14932, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14851, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13511, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13677, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14543, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14605, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15036, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13315, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14657, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14617, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14875, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14488, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13788, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14421, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13867, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15170, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14389, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14577, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15337, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15245, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14764, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13328, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14910, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13327, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13381, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11468, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11271, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12719, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15023, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14764, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14374, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13085, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14873, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13749, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14737, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14765, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14897, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13562, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11925, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14867, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14287, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14233, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15094, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14490, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11202, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14407, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13738, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13849, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14813, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14512, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13608, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14363, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11696, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9268, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14793, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12674, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13757, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14622, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, std::vector({32, 2, 16, 16}), + std::vector({31, 17, 4, 4}), DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, 31 * 17 * 4 * 4 * 2); + for (int i = 0; i < 31 * 17 * 4 * 4; ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferFraczNchw, fp32_1) { + float ret[1 * 1 * 1 * 1] = {0.8329063818189503}; + float data[1 * 1 * 16 * 16] = { + 0.8329063818189503, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + FormatTransferFracZNchw transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NCHW, {1, 1, 16, 16}, {1, 1, 1, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 4); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_fracz_nhwc_unittest.cc b/tests/ut/ge/common/format_transfer_fracz_nhwc_unittest.cc new file mode 100644 index 00000000..51770ecc --- /dev/null +++ b/tests/ut/ge/common/format_transfer_fracz_nhwc_unittest.cc @@ -0,0 +1,5425 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_fracz_nhwc.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/fp16_t.h" + +namespace ge { +namespace formats { +class UtestFormatTransferFraczNhwc : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_invalid_data_type) { + uint8_t data[16 * 1 * 32 * 16] = {1}; + + TransArgs args{data, FORMAT_FRACTAL_Z, FORMAT_NHWC, {16, 1, 32, 16}, {1, 4, 4, 1}, DT_UNDEFINED}; + TransResult result; + + FormatTransferFracZNhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_invalid_src_format_reserved) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNhwc transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_RESERVED, FORMAT_NHWC, {16, 1, 16, 16}, {1, 4, 4, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_invalid_dst_format_reserved) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNhwc transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_RESERVED, {16, 1, 16, 16}, {1, 4, 4, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_invalid_src_shape) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNhwc transfer; + TransArgs args{reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NHWC, {16, 1, 16}, {1, 4, 4, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_invalid_src_shape2) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNhwc transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NHWC, {16, -1, 16, 16}, {1, 4, 4, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_invalid_dst_shape) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNhwc transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NHWC, {16, 1, 16, 16}, {1, 4, 4}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_invalid_dst_shape2) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNhwc transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NHWC, {16, 1, 16, 16}, {1, 4, 4, -1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_invalid_src_dst_shape_relation1) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNhwc transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NHWC, {16, 1, 16, 16}, {17, 4, 4, 1}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_invalid_src_dst_shape_relation2) { + float data[16 * 1 * 16 * 16] = {1}; + + FormatTransferFracZNhwc transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_NHWC, {16, 1, 16, 16}, {1, 4, 4, 17}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_fp16_success_lt_cube) { + uint16_t data_4d[1 * 1 * 16 * 16] = { + 15108, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[1 * 1 * 1 * 1] = { + 15108, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NHWC, {1, 1, 16, 16}, {1, 1, 1, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferFracZNhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_fp16_success_eq_cube) { + uint16_t data_4d[4 * 1 * 16 * 16] = { + 14422, 14250, 15207, 11368, 13196, 12957, 15106, 15099, 15047, 13771, 13891, 14758, 13298, 13411, 15283, 15351, + 13386, 13586, 12661, 14761, 15025, 10628, 12072, 14528, 14866, 13804, 15034, 14930, 14536, 13776, 11278, 10879, + 12538, 13720, 14793, 8244, 14509, 15201, 14563, 12649, 15320, 14814, 14229, 13811, 13734, 13795, 13354, 12915, + 13498, 14218, 14748, 9860, 13268, 11456, 14650, 14749, 14957, 13538, 14260, 15152, 14451, 14932, 12831, 14326, + 13007, 14492, 14752, 15121, 14798, 14508, 14502, 14809, 13826, 15346, 14598, 13712, 14560, 14605, 14954, 14740, + 13011, 13442, 14945, 13182, 15009, 8704, 12211, 14842, 15205, 12795, 10772, 13892, 12902, 14761, 14601, 12663, + 13702, 14580, 15322, 14745, 14750, 14264, 14433, 13854, 14778, 15011, 14745, 14599, 13430, 9216, 14570, 14514, + 15336, 9439, 15021, 15107, 14006, 12950, 13622, 14707, 14981, 14850, 13337, 13280, 15124, 10206, 13412, 13832, + 15303, 14455, 15079, 12787, 14613, 14302, 15036, 13536, 15243, 13739, 15259, 15099, 13211, 14770, 15097, 14402, + 15204, 14955, 14621, 12097, 15083, 14119, 14814, 4005, 10998, 14856, 13380, 14647, 14540, 14008, 11346, 14363, + 14313, 15108, 6776, 15180, 14647, 13917, 14629, 13412, 13480, 10439, 14776, 14563, 15328, 14777, 14436, 14902, + 15255, 14241, 13324, 15082, 14738, 15265, 14657, 13571, 15280, 15082, 9338, 12341, 14991, 14847, 14948, 14037, + 13624, 14947, 14968, 12058, 14060, 13434, 14349, 14972, 12716, 14287, 12350, 12619, 15263, 13831, 15160, 15032, + 14591, 12370, 13537, 11343, 13909, 12663, 15078, 14938, 14409, 13852, 12414, 13903, 13825, 13958, 10736, 11772, + 13429, 13236, 15033, 13910, 15008, 14564, 14344, 14546, 15227, 14491, 14858, 14957, 12725, 13536, 13030, 12562, + 15163, 13294, 15229, 15146, 11977, 14509, 15259, 12425, 15334, 13340, 15329, 15071, 14342, 14555, 14473, 15054, + 14551, 14307, 14721, 14255, 13352, 15201, 15191, 13045, 12146, 13419, 14884, 13352, 14843, 14356, 10607, 15253, + 12948, 13749, 13930, 14144, 9963, 15200, 14364, 14952, 14652, 15188, 14500, 14558, 15289, 14978, 10959, 14101, + 15131, 14699, 15179, 14896, 14614, 13448, 14463, 12326, 13099, 13035, 12361, 13939, 15216, 15031, 15104, 15122, + 14754, 14837, 14295, 12738, 13007, 13385, 13712, 13448, 14487, 13904, 14940, 12247, 15013, 13784, 15169, 11543, + 15351, 14657, 15047, 14243, 14234, 14986, 15288, 9009, 14717, 13685, 13416, 12043, 14874, 14362, 12565, 15139, + 15351, 11442, 14669, 12929, 15084, 12574, 12783, 14385, 10838, 14537, 14440, 14713, 15140, 14316, 14694, 11250, + 14399, 14942, 14509, 11151, 15125, 13241, 14844, 5053, 13906, 14498, 10895, 15031, 15220, 15172, 15160, 13782, + 7336, 13280, 15245, 15190, 12154, 13669, 14612, 14388, 9114, 14866, 15180, 12447, 14803, 13802, 13786, 14692, + 13145, 13633, 10303, 14120, 14322, 9452, 13412, 14852, 15342, 12512, 12724, 14482, 14708, 13886, 14371, 13771, + 13399, 14432, 13585, 15028, 14173, 14347, 15077, 13051, 12138, 14267, 11114, 13958, 14961, 13844, 15272, 14643, + 12614, 13212, 13324, 15077, 14349, 14662, 14273, 10859, 5552, 10874, 13863, 14104, 13495, 14904, 11517, 15108, + 14999, 13078, 15106, 14639, 13345, 6413, 13979, 15205, 14332, 12647, 13386, 15312, 10976, 15089, 14089, 14370, + 8462, 13359, 14322, 14221, 14371, 14449, 13536, 12118, 14288, 13483, 12182, 13621, 15261, 14723, 12623, 13323, + 14817, 14529, 13499, 14350, 11063, 14315, 12718, 14574, 14180, 12505, 13109, 11207, 14801, 13781, 14392, 13549, + 14481, 13731, 15236, 9398, 15358, 12351, 14751, 14509, 11815, 14787, 9669, 14645, 15099, 14490, 13121, 15050, + 15076, 14756, 14958, 4978, 14576, 11508, 14746, 13161, 14408, 10590, 13429, 14945, 13993, 15033, 13183, 13027, + 13040, 15055, 14563, 15154, 14639, 15242, 14828, 12673, 12310, 15149, 12333, 11282, 14004, 15172, 11527, 15094, + 14725, 14941, 14435, 12561, 13832, 13489, 13186, 11553, 14927, 14861, 11684, 14518, 14331, 10694, 12473, 14447, + 14008, 15061, 14954, 15103, 14619, 13134, 14321, 14898, 10583, 15066, 15013, 14557, 14566, 14025, 13341, 14381, + 13587, 11771, 12011, 15320, 13462, 14503, 14944, 12535, 11976, 12949, 11427, 12383, 14405, 13595, 15002, 14362, + 13336, 13825, 11229, 13185, 14932, 13706, 14583, 15300, 15332, 14997, 11169, 15248, 14661, 9558, 13210, 13015, + 15314, 14332, 14428, 15128, 12862, 15058, 14929, 14899, 8721, 14699, 14163, 14774, 14835, 14416, 14252, 12609, + 11329, 13053, 11842, 15323, 12010, 13224, 12629, 15297, 12422, 11133, 9125, 11724, 14585, 14376, 14733, 14556, + 11591, 12995, 14556, 14822, 14914, 15332, 13424, 14846, 13522, 14947, 9886, 15179, 11710, 14524, 12604, 13600, + 14898, 14863, 14901, 9668, 13991, 15090, 14553, 9288, 13662, 12807, 13881, 15143, 14591, 10092, 15358, 9907, + 15349, 14811, 14578, 14389, 15154, 13072, 13805, 13115, 14879, 14902, 14466, 14540, 9926, 14475, 13389, 14468, + 14489, 14673, 14794, 13494, 15219, 15314, 14266, 12104, 14068, 12709, 13135, 15088, 15118, 14229, 15145, 13771, + 14614, 14426, 14336, 14375, 15264, 13925, 13501, 14613, 14973, 13381, 15025, 14770, 11856, 15035, 13759, 14437, + 14171, 15349, 14474, 14662, 13505, 15329, 15210, 15353, 13535, 12864, 14658, 14968, 9562, 13352, 12356, 13565, + 14481, 15092, 12920, 15290, 14046, 10403, 14299, 13885, 9128, 13798, 14575, 14484, 14728, 14471, 14678, 15356, + 13955, 14939, 15149, 14775, 15303, 14614, 14530, 13890, 14051, 14869, 15253, 12810, 12328, 14590, 13489, 13653, + 14448, 15013, 11047, 13979, 12332, 14777, 14414, 15286, 12226, 14359, 13505, 13476, 13932, 14273, 14620, 14246, + 14987, 13774, 14562, 12418, 15303, 13383, 15006, 11992, 13652, 14342, 15025, 13315, 13500, 12981, 13108, 14252, + 14185, 14384, 14865, 12340, 13978, 14106, 12372, 14367, 10200, 15157, 13088, 14517, 15146, 14143, 14497, 13560, + 12995, 13539, 14724, 14606, 13340, 13378, 13432, 15143, 13249, 13229, 13168, 13910, 14061, 14456, 14896, 12972, + 13595, 13416, 13143, 9473, 14306, 13768, 15134, 13998, 10894, 15136, 12425, 13283, 11400, 13338, 12118, 12982, + 11692, 15215, 14555, 10674, 14734, 12815, 13038, 14365, 7730, 14605, 12123, 14907, 12370, 14712, 14057, 11671, + 14851, 14573, 14218, 11442, 14831, 14930, 13150, 12025, 15170, 7647, 14724, 14542, 14814, 14375, 14807, 13142, + 10157, 14206, 14185, 15359, 15121, 15264, 15117, 12578, 12061, 14273, 15339, 15092, 12803, 13734, 12847, 13867, + 11298, 13600, 13421, 14637, 11295, 15278, 13706, 14380, 11848, 14498, 15301, 15005, 14836, 14136, 14218, 14496, + 12261, 15284, 12586, 12621, 15127, 12804, 13534, 14163, 12599, 12975, 14403, 14132, 13920, 14636, 14395, 14503, + 11160, 15052, 14894, 13730, 13449, 14626, 13776, 13444, 12879, 13214, 13681, 14877, 14295, 14621, 13375, 8695, + 14774, 14179, 14221, 15341, 12290, 14558, 12476, 11462, 13847, 14278, 14174, 14092, 15192, 15254, 15205, 14700, + 14907, 10047, 12606, 12503, 14393, 14854, 14327, 15155, 13000, 10717, 15247, 13657, 13965, 15243, 14989, 13768, + 15198, 11511, 13788, 12911, 13993, 13693, 12862, 14613, 15056, 15244, 14444, 15174, 15312, 15133, 14859, 14225, + 14558, 14003, 10672, 14388, 14487, 15001, 15022, 14634, 13742, 12429, 13937, 15102, 12090, 15326, 13650, 13590, + 13648, 10701, 14528, 15178, 14926, 14584, 13916, 13144, 10598, 15269, 14247, 12646, 15283, 14202, 14649, 14873, + 15252, 14565, 14099, 14651, 15105, 12658, 14767, 12639, 13930, 13290, 14558, 13149, 10365, 13507, 14337, 14605, + }; + uint16_t data[16 * 2 * 2 * 16] = { + 14422, 14250, 15207, 11368, 13196, 12957, 15106, 15099, 15047, 13771, 13891, 14758, 13298, 13411, 15283, 15351, + 14551, 14307, 14721, 14255, 13352, 15201, 15191, 13045, 12146, 13419, 14884, 13352, 14843, 14356, 10607, 15253, + 13040, 15055, 14563, 15154, 14639, 15242, 14828, 12673, 12310, 15149, 12333, 11282, 14004, 15172, 11527, 15094, + 14987, 13774, 14562, 12418, 15303, 13383, 15006, 11992, 13652, 14342, 15025, 13315, 13500, 12981, 13108, 14252, + 13386, 13586, 12661, 14761, 15025, 10628, 12072, 14528, 14866, 13804, 15034, 14930, 14536, 13776, 11278, 10879, + 12948, 13749, 13930, 14144, 9963, 15200, 14364, 14952, 14652, 15188, 14500, 14558, 15289, 14978, 10959, 14101, + 14725, 14941, 14435, 12561, 13832, 13489, 13186, 11553, 14927, 14861, 11684, 14518, 14331, 10694, 12473, 14447, + 14185, 14384, 14865, 12340, 13978, 14106, 12372, 14367, 10200, 15157, 13088, 14517, 15146, 14143, 14497, 13560, + 12538, 13720, 14793, 8244, 14509, 15201, 14563, 12649, 15320, 14814, 14229, 13811, 13734, 13795, 13354, 12915, + 15131, 14699, 15179, 14896, 14614, 13448, 14463, 12326, 13099, 13035, 12361, 13939, 15216, 15031, 15104, 15122, + 14008, 15061, 14954, 15103, 14619, 13134, 14321, 14898, 10583, 15066, 15013, 14557, 14566, 14025, 13341, 14381, + 12995, 13539, 14724, 14606, 13340, 13378, 13432, 15143, 13249, 13229, 13168, 13910, 14061, 14456, 14896, 12972, + 13498, 14218, 14748, 9860, 13268, 11456, 14650, 14749, 14957, 13538, 14260, 15152, 14451, 14932, 12831, 14326, + 14754, 14837, 14295, 12738, 13007, 13385, 13712, 13448, 14487, 13904, 14940, 12247, 15013, 13784, 15169, 11543, + 13587, 11771, 12011, 15320, 13462, 14503, 14944, 12535, 11976, 12949, 11427, 12383, 14405, 13595, 15002, 14362, + 13595, 13416, 13143, 9473, 14306, 13768, 15134, 13998, 10894, 15136, 12425, 13283, 11400, 13338, 12118, 12982, + 13007, 14492, 14752, 15121, 14798, 14508, 14502, 14809, 13826, 15346, 14598, 13712, 14560, 14605, 14954, 14740, + 15351, 14657, 15047, 14243, 14234, 14986, 15288, 9009, 14717, 13685, 13416, 12043, 14874, 14362, 12565, 15139, + 13336, 13825, 11229, 13185, 14932, 13706, 14583, 15300, 15332, 14997, 11169, 15248, 14661, 9558, 13210, 13015, + 11692, 15215, 14555, 10674, 14734, 12815, 13038, 14365, 7730, 14605, 12123, 14907, 12370, 14712, 14057, 11671, + 13011, 13442, 14945, 13182, 15009, 8704, 12211, 14842, 15205, 12795, 10772, 13892, 12902, 14761, 14601, 12663, + 15351, 11442, 14669, 12929, 15084, 12574, 12783, 14385, 10838, 14537, 14440, 14713, 15140, 14316, 14694, 11250, + 15314, 14332, 14428, 15128, 12862, 15058, 14929, 14899, 8721, 14699, 14163, 14774, 14835, 14416, 14252, 12609, + 14851, 14573, 14218, 11442, 14831, 14930, 13150, 12025, 15170, 7647, 14724, 14542, 14814, 14375, 14807, 13142, + 13702, 14580, 15322, 14745, 14750, 14264, 14433, 13854, 14778, 15011, 14745, 14599, 13430, 9216, 14570, 14514, + 14399, 14942, 14509, 11151, 15125, 13241, 14844, 5053, 13906, 14498, 10895, 15031, 15220, 15172, 15160, 13782, + 11329, 13053, 11842, 15323, 12010, 13224, 12629, 15297, 12422, 11133, 9125, 11724, 14585, 14376, 14733, 14556, + 10157, 14206, 14185, 15359, 15121, 15264, 15117, 12578, 12061, 14273, 15339, 15092, 12803, 13734, 12847, 13867, + 15336, 9439, 15021, 15107, 14006, 12950, 13622, 14707, 14981, 14850, 13337, 13280, 15124, 10206, 13412, 13832, + 7336, 13280, 15245, 15190, 12154, 13669, 14612, 14388, 9114, 14866, 15180, 12447, 14803, 13802, 13786, 14692, + 11591, 12995, 14556, 14822, 14914, 15332, 13424, 14846, 13522, 14947, 9886, 15179, 11710, 14524, 12604, 13600, + 11298, 13600, 13421, 14637, 11295, 15278, 13706, 14380, 11848, 14498, 15301, 15005, 14836, 14136, 14218, 14496, + 15303, 14455, 15079, 12787, 14613, 14302, 15036, 13536, 15243, 13739, 15259, 15099, 13211, 14770, 15097, 14402, + 13145, 13633, 10303, 14120, 14322, 9452, 13412, 14852, 15342, 12512, 12724, 14482, 14708, 13886, 14371, 13771, + 14898, 14863, 14901, 9668, 13991, 15090, 14553, 9288, 13662, 12807, 13881, 15143, 14591, 10092, 15358, 9907, + 12261, 15284, 12586, 12621, 15127, 12804, 13534, 14163, 12599, 12975, 14403, 14132, 13920, 14636, 14395, 14503, + 15204, 14955, 14621, 12097, 15083, 14119, 14814, 4005, 10998, 14856, 13380, 14647, 14540, 14008, 11346, 14363, + 13399, 14432, 13585, 15028, 14173, 14347, 15077, 13051, 12138, 14267, 11114, 13958, 14961, 13844, 15272, 14643, + 15349, 14811, 14578, 14389, 15154, 13072, 13805, 13115, 14879, 14902, 14466, 14540, 9926, 14475, 13389, 14468, + 11160, 15052, 14894, 13730, 13449, 14626, 13776, 13444, 12879, 13214, 13681, 14877, 14295, 14621, 13375, 8695, + 14313, 15108, 6776, 15180, 14647, 13917, 14629, 13412, 13480, 10439, 14776, 14563, 15328, 14777, 14436, 14902, + 12614, 13212, 13324, 15077, 14349, 14662, 14273, 10859, 5552, 10874, 13863, 14104, 13495, 14904, 11517, 15108, + 14489, 14673, 14794, 13494, 15219, 15314, 14266, 12104, 14068, 12709, 13135, 15088, 15118, 14229, 15145, 13771, + 14774, 14179, 14221, 15341, 12290, 14558, 12476, 11462, 13847, 14278, 14174, 14092, 15192, 15254, 15205, 14700, + 15255, 14241, 13324, 15082, 14738, 15265, 14657, 13571, 15280, 15082, 9338, 12341, 14991, 14847, 14948, 14037, + 14999, 13078, 15106, 14639, 13345, 6413, 13979, 15205, 14332, 12647, 13386, 15312, 10976, 15089, 14089, 14370, + 14614, 14426, 14336, 14375, 15264, 13925, 13501, 14613, 14973, 13381, 15025, 14770, 11856, 15035, 13759, 14437, + 14907, 10047, 12606, 12503, 14393, 14854, 14327, 15155, 13000, 10717, 15247, 13657, 13965, 15243, 14989, 13768, + 13624, 14947, 14968, 12058, 14060, 13434, 14349, 14972, 12716, 14287, 12350, 12619, 15263, 13831, 15160, 15032, + 8462, 13359, 14322, 14221, 14371, 14449, 13536, 12118, 14288, 13483, 12182, 13621, 15261, 14723, 12623, 13323, + 14171, 15349, 14474, 14662, 13505, 15329, 15210, 15353, 13535, 12864, 14658, 14968, 9562, 13352, 12356, 13565, + 15198, 11511, 13788, 12911, 13993, 13693, 12862, 14613, 15056, 15244, 14444, 15174, 15312, 15133, 14859, 14225, + 14591, 12370, 13537, 11343, 13909, 12663, 15078, 14938, 14409, 13852, 12414, 13903, 13825, 13958, 10736, 11772, + 14817, 14529, 13499, 14350, 11063, 14315, 12718, 14574, 14180, 12505, 13109, 11207, 14801, 13781, 14392, 13549, + 14481, 15092, 12920, 15290, 14046, 10403, 14299, 13885, 9128, 13798, 14575, 14484, 14728, 14471, 14678, 15356, + 14558, 14003, 10672, 14388, 14487, 15001, 15022, 14634, 13742, 12429, 13937, 15102, 12090, 15326, 13650, 13590, + 13429, 13236, 15033, 13910, 15008, 14564, 14344, 14546, 15227, 14491, 14858, 14957, 12725, 13536, 13030, 12562, + 14481, 13731, 15236, 9398, 15358, 12351, 14751, 14509, 11815, 14787, 9669, 14645, 15099, 14490, 13121, 15050, + 13955, 14939, 15149, 14775, 15303, 14614, 14530, 13890, 14051, 14869, 15253, 12810, 12328, 14590, 13489, 13653, + 13648, 10701, 14528, 15178, 14926, 14584, 13916, 13144, 10598, 15269, 14247, 12646, 15283, 14202, 14649, 14873, + 15163, 13294, 15229, 15146, 11977, 14509, 15259, 12425, 15334, 13340, 15329, 15071, 14342, 14555, 14473, 15054, + 15076, 14756, 14958, 4978, 14576, 11508, 14746, 13161, 14408, 10590, 13429, 14945, 13993, 15033, 13183, 13027, + 14448, 15013, 11047, 13979, 12332, 14777, 14414, 15286, 12226, 14359, 13505, 13476, 13932, 14273, 14620, 14246, + 15252, 14565, 14099, 14651, 15105, 12658, 14767, 12639, 13930, 13290, 14558, 13149, 10365, 13507, 14337, 14605, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NHWC, {4, 1, 16, 16}, {16, 2, 2, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferFracZNhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, UNSUPPORTED); +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_fp16_success_gt_cube) { + uint16_t data_4d[8 * 2 * 16 * 16] = { + 14643, 15084, 14775, 14950, 14594, 14803, 12177, 13120, 15030, 12525, 12640, 14817, 15252, 14692, 15054, 14655, + 13395, 13556, 14508, 12414, 14272, 14148, 15056, 14734, 12544, 14138, 14641, 14793, 8925, 14903, 14744, 15219, + 14346, 12140, 12310, 14303, 12541, 14031, 11419, 14800, 11586, 14817, 14949, 13869, 9911, 14768, 14748, 11469, + 8984, 13553, 15077, 11291, 14419, 14259, 12234, 15036, 14970, 14166, 13225, 15283, 11555, 10732, 14294, 13613, + 13563, 14758, 12310, 13452, 15248, 7913, 14820, 15188, 13607, 15097, 12595, 15041, 13419, 14927, 11680, 13391, + 14730, 15210, 14573, 14986, 14480, 14810, 14421, 14868, 10163, 14768, 10289, 11549, 15206, 13973, 13249, 14465, + 14496, 11267, 13298, 14689, 9725, 13593, 14101, 9950, 14135, 15068, 14225, 12521, 13628, 11982, 13805, 13082, + 15123, 14687, 8903, 14758, 15064, 14312, 14878, 13979, 14309, 13373, 14744, 14816, 15310, 8674, 9634, 12371, + 13822, 13824, 14557, 10587, 14401, 15024, 14156, 14041, 13187, 14616, 15076, 13747, 13466, 13658, 15143, 13651, + 14175, 13921, 14447, 14076, 13601, 15003, 15049, 15102, 13701, 13614, 14557, 14391, 14514, 13575, 15138, 15103, + 15112, 14971, 14817, 14402, 14761, 9003, 14242, 14980, 14496, 14497, 14609, 13874, 8555, 13947, 12563, 10897, + 14602, 14543, 14369, 14684, 15008, 14482, 14680, 14910, 14616, 13475, 15173, 14966, 14472, 14434, 13850, 14037, + 14811, 14348, 15039, 14880, 10391, 12333, 15321, 14740, 15207, 14610, 14648, 12813, 6567, 14928, 14413, 14744, + 11330, 14223, 14667, 13822, 13416, 15092, 15163, 13721, 14985, 9430, 12456, 14628, 15093, 15078, 15046, 15078, + 14990, 14651, 14006, 13975, 15298, 14288, 14979, 13086, 14670, 13331, 12948, 14906, 15132, 15037, 14229, 14915, + 13694, 14670, 13931, 9961, 15073, 12960, 14769, 14151, 14950, 14695, 12619, 14347, 14390, 11545, 14696, 13679, + 15111, 12417, 14618, 12814, 13727, 14465, 14517, 14346, 14509, 13867, 15316, 13620, 13718, 13418, 13772, 12245, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14386, 13893, 14559, 12220, 11761, 14936, 13865, 14796, 14800, 14307, 14951, 15107, 14857, 14806, 15154, 9412, + 15010, 14921, 13512, 13793, 15309, 13425, 9729, 14167, 13513, 14683, 13483, 14709, 13920, 15026, 11308, 12400, + 15305, 14339, 15090, 15119, 12392, 15007, 15177, 14208, 15306, 13599, 13798, 14864, 11722, 13015, 14789, 14611, + 14106, 12803, 15151, 15303, 14577, 13538, 14339, 13154, 14084, 14672, 14793, 12240, 15070, 15005, 12175, 12888, + 12691, 15117, 13446, 14515, 13913, 15123, 15252, 15002, 11511, 14787, 14912, 14650, 12035, 14548, 14950, 14458, + 14888, 14514, 12791, 12573, 14604, 14992, 13541, 13780, 14397, 7631, 14614, 14986, 11108, 9225, 13665, 11943, + 12347, 13665, 13279, 14423, 12845, 13661, 15214, 15317, 14435, 12791, 13900, 15242, 15298, 13969, 14826, 12584, + 13064, 14236, 15186, 14340, 14893, 10331, 15251, 13083, 13584, 14344, 12569, 15036, 14204, 14777, 13837, 9226, + 14390, 14256, 9829, 14674, 14495, 11709, 13436, 10770, 11661, 12662, 14120, 13109, 14908, 14266, 15293, 13191, + 15269, 12610, 11706, 15330, 15108, 13080, 13123, 15201, 13707, 10054, 15187, 14830, 9929, 14661, 13177, 14356, + 12342, 14773, 13089, 12318, 13166, 14760, 13345, 14645, 14567, 15115, 13404, 14136, 14565, 13476, 13556, 14580, + 13406, 15357, 14433, 15156, 14827, 13972, 14672, 13399, 12478, 14720, 13335, 14073, 14411, 14526, 12450, 14861, + 14134, 9725, 12565, 14915, 14003, 11550, 14061, 14535, 13853, 12453, 14490, 15016, 15297, 14140, 14761, 14240, + 13114, 14017, 15109, 14027, 14750, 15099, 14659, 15315, 15280, 13607, 15302, 12982, 14741, 13700, 14019, 14737, + 15142, 14341, 15349, 15248, 14658, 14385, 15234, 13992, 12674, 14441, 14450, 13356, 14453, 11286, 14279, 13864, + 13637, 13656, 14231, 15128, 13956, 13701, 14356, 13255, 14956, 14416, 14589, 10372, 13312, 13646, 12487, 14193, + 14248, 12300, 14051, 14149, 12979, 14556, 12855, 13393, 12614, 13063, 14960, 14242, 15221, 14739, 15318, 12088, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14043, 14715, 4461, 12136, 11320, 12141, 13421, 14144, 14654, 14438, 12365, 14905, 14475, 11313, 14675, 14024, + 15086, 14135, 14081, 12712, 13637, 14202, 15194, 10907, 14310, 13866, 12043, 14719, 12523, 12397, 11980, 14952, + 14791, 14589, 14889, 13021, 12165, 14606, 14915, 14481, 7432, 11318, 12487, 14494, 13898, 12595, 15251, 13996, + 15238, 14606, 13433, 13580, 14933, 15357, 14675, 15004, 15062, 13963, 14702, 15011, 12695, 13389, 12565, 6232, + 14874, 14986, 13113, 12631, 14683, 12689, 13168, 14350, 12712, 14651, 12576, 14268, 14113, 13122, 12348, 14734, + 13589, 11061, 14828, 15344, 15174, 14972, 11092, 13911, 15065, 13101, 9859, 14503, 15312, 15270, 13689, 9178, + 14120, 14380, 13990, 13286, 13716, 15054, 14484, 14296, 14432, 14359, 12895, 14532, 13144, 13461, 14701, 14595, + 10068, 14653, 15162, 13017, 12396, 12356, 13637, 14958, 15264, 13340, 14541, 11381, 13584, 13979, 12390, 13560, + 14273, 12290, 14310, 14260, 14359, 11935, 14459, 15121, 11360, 10629, 15080, 10805, 14465, 15294, 11377, 13617, + 13625, 14975, 13361, 15188, 11507, 13782, 13706, 14910, 15032, 14859, 14381, 14483, 14288, 14813, 15212, 15315, + 15223, 14809, 11125, 14758, 15244, 14876, 14124, 11921, 14989, 15058, 14938, 14835, 13565, 14624, 14678, 15205, + 14567, 13504, 14861, 11531, 14618, 10146, 13621, 14624, 14849, 14853, 11429, 14339, 11584, 14625, 14717, 13815, + 14536, 14510, 13687, 13496, 14323, 14679, 14930, 13994, 14464, 13903, 13502, 15069, 14769, 15098, 8267, 14274, + 15092, 12553, 15171, 15033, 14416, 12944, 13450, 7388, 14432, 13499, 11582, 15087, 14321, 13927, 14848, 12613, + 14363, 14848, 14352, 15196, 13999, 15332, 14209, 14842, 14958, 12534, 14343, 13924, 13708, 13461, 11637, 14547, + 11127, 13365, 9418, 14569, 13315, 14468, 10755, 14684, 12758, 14080, 15053, 12528, 14937, 15230, 13987, 11713, + 14433, 13539, 13780, 7173, 14561, 12659, 12992, 11349, 12457, 14520, 14523, 13782, 12775, 14346, 15013, 13325, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11880, 11753, 6459, 14392, 13877, 15195, 14598, 14550, 13889, 14746, 11107, 13097, 14615, 9419, 13007, 13018, + 14222, 12796, 12392, 14443, 14882, 13277, 8521, 15181, 13088, 14393, 13834, 15332, 13605, 12444, 14993, 9433, + 13463, 10558, 10884, 13659, 13271, 14576, 14577, 14459, 14699, 12142, 15236, 9687, 14845, 13997, 12791, 13754, + 14956, 14343, 13564, 14853, 12144, 14074, 13545, 15078, 10321, 14195, 13709, 12695, 15156, 13280, 15276, 12365, + 15203, 12099, 13895, 13919, 14560, 14204, 13339, 14947, 13279, 14393, 14668, 14655, 14563, 15148, 15102, 15131, + 10412, 14766, 13574, 13560, 13926, 15086, 14189, 15212, 12819, 15044, 14025, 13684, 12454, 15232, 10332, 14054, + 12012, 12532, 13700, 14412, 14968, 13642, 13344, 14660, 15205, 15286, 12482, 14829, 14276, 14337, 12881, 14818, + 14140, 14478, 14003, 12802, 14959, 13420, 15192, 14788, 14340, 15351, 13404, 9944, 14488, 13117, 10405, 15095, + 13808, 12943, 14496, 12012, 15045, 14421, 13980, 14862, 15299, 10098, 15161, 10924, 11461, 14357, 14699, 14415, + 15077, 13517, 12150, 13717, 10732, 5651, 12664, 14255, 12880, 13683, 13378, 14858, 7146, 11804, 13439, 12732, + 13152, 12775, 11870, 9128, 13044, 14974, 15290, 14926, 13213, 13608, 15171, 13437, 14144, 13658, 14529, 12541, + 13217, 14336, 14399, 13413, 13503, 14854, 14557, 13883, 13508, 13327, 14788, 15249, 15292, 14663, 13973, 13029, + 14428, 14215, 12959, 15012, 14580, 14184, 14571, 12924, 14133, 14731, 13555, 14168, 14746, 10015, 15212, 14626, + 14724, 12444, 12317, 12709, 14912, 12731, 13557, 13513, 14632, 14075, 12545, 12588, 15317, 14740, 12682, 12690, + 14808, 14686, 14691, 13338, 14594, 13349, 14735, 14704, 14174, 9442, 14914, 13437, 14599, 14152, 13606, 12522, + 14940, 13113, 14935, 14237, 13916, 14691, 14536, 15305, 14955, 12251, 15001, 13519, 13402, 14632, 13716, 14866, + 15112, 14792, 14770, 13838, 14739, 14985, 11484, 14655, 12182, 15192, 14306, 11545, 12635, 15232, 13368, 14235, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14911, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14571, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14660, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13027, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13376, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12388, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14953, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12801, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13575, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14810, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14546, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14914, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14220, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14375, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14571, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13694, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13834, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14864, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14566, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14786, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14435, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14856, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14472, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14804, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11788, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11765, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12954, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14575, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12897, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14459, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13661, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14339, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14364, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14557, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14197, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14933, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13492, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12510, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14703, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13792, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15041, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15284, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12352, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14824, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13448, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11359, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14515, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15269, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14357, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13933, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14652, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12772, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13786, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14769, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10686, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14445, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15254, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[17 * 2 * 2 * 17] = { + 14643, 15084, 14775, 14950, 14594, 14803, 12177, 13120, 15030, 12525, 12640, 14817, 15252, 14692, 15054, 14655, + 14911, 14386, 13893, 14559, 12220, 11761, 14936, 13865, 14796, 14800, 14307, 14951, 15107, 14857, 14806, 15154, + 9412, 13694, 14043, 14715, 4461, 12136, 11320, 12141, 13421, 14144, 14654, 14438, 12365, 14905, 14475, 11313, + 14675, 14024, 14459, 11880, 11753, 6459, 14392, 13877, 15195, 14598, 14550, 13889, 14746, 11107, 13097, 14615, + 9419, 13007, 13018, 12352, 13395, 13556, 14508, 12414, 14272, 14148, 15056, 14734, 12544, 14138, 14641, 14793, + 8925, 14903, 14744, 15219, 14571, 15010, 14921, 13512, 13793, 15309, 13425, 9729, 14167, 13513, 14683, 13483, + 14709, 13920, 15026, 11308, 12400, 13834, 15086, 14135, 14081, 12712, 13637, 14202, 15194, 10907, 14310, 13866, + 12043, 14719, 12523, 12397, 11980, 14952, 13661, 14222, 12796, 12392, 14443, 14882, 13277, 8521, 15181, 13088, + 14393, 13834, 15332, 13605, 12444, 14993, 9433, 14824, 14346, 12140, 12310, 14303, 12541, 14031, 11419, 14800, + 11586, 14817, 14949, 13869, 9911, 14768, 14748, 11469, 14660, 15305, 14339, 15090, 15119, 12392, 15007, 15177, + 14208, 15306, 13599, 13798, 14864, 11722, 13015, 14789, 14611, 14864, 14791, 14589, 14889, 13021, 12165, 14606, + 14915, 14481, 7432, 11318, 12487, 14494, 13898, 12595, 15251, 13996, 14339, 13463, 10558, 10884, 13659, 13271, + 14576, 14577, 14459, 14699, 12142, 15236, 9687, 14845, 13997, 12791, 13754, 13448, 8984, 13553, 15077, 11291, + 14419, 14259, 12234, 15036, 14970, 14166, 13225, 15283, 11555, 10732, 14294, 13613, 13027, 14106, 12803, 15151, + 15303, 14577, 13538, 14339, 13154, 14084, 14672, 14793, 12240, 15070, 15005, 12175, 12888, 15179, 15238, 14606, + 13433, 13580, 14933, 15357, 14675, 15004, 15062, 13963, 14702, 15011, 12695, 13389, 12565, 6232, 14142, 14956, + 14343, 13564, 14853, 12144, 14074, 13545, 15078, 10321, 14195, 13709, 12695, 15156, 13280, 15276, 12365, 11359, + 13563, 14758, 12310, 13452, 15248, 7913, 14820, 15188, 13607, 15097, 12595, 15041, 13419, 14927, 11680, 13391, + 13376, 12691, 15117, 13446, 14515, 13913, 15123, 15252, 15002, 11511, 14787, 14912, 14650, 12035, 14548, 14950, + 14458, 14566, 14874, 14986, 13113, 12631, 14683, 12689, 13168, 14350, 12712, 14651, 12576, 14268, 14113, 13122, + 12348, 14734, 14353, 15203, 12099, 13895, 13919, 14560, 14204, 13339, 14947, 13279, 14393, 14668, 14655, 14563, + 15148, 15102, 15131, 14515, 14730, 15210, 14573, 14986, 14480, 14810, 14421, 14868, 10163, 14768, 10289, 11549, + 15206, 13973, 13249, 14465, 12388, 14888, 14514, 12791, 12573, 14604, 14992, 13541, 13780, 14397, 7631, 14614, + 14986, 11108, 9225, 13665, 11943, 14786, 13589, 11061, 14828, 15344, 15174, 14972, 11092, 13911, 15065, 13101, + 9859, 14503, 15312, 15270, 13689, 9178, 14364, 10412, 14766, 13574, 13560, 13926, 15086, 14189, 15212, 12819, + 15044, 14025, 13684, 12454, 15232, 10332, 14054, 15269, 14496, 11267, 13298, 14689, 9725, 13593, 14101, 9950, + 14135, 15068, 14225, 12521, 13628, 11982, 13805, 13082, 15116, 12347, 13665, 13279, 14423, 12845, 13661, 15214, + 15317, 14435, 12791, 13900, 15242, 15298, 13969, 14826, 12584, 14435, 14120, 14380, 13990, 13286, 13716, 15054, + 14484, 14296, 14432, 14359, 12895, 14532, 13144, 13461, 14701, 14595, 14557, 12012, 12532, 13700, 14412, 14968, + 13642, 13344, 14660, 15205, 15286, 12482, 14829, 14276, 14337, 12881, 14818, 14357, 15123, 14687, 8903, 14758, + 15064, 14312, 14878, 13979, 14309, 13373, 14744, 14816, 15310, 8674, 9634, 12371, 14953, 13064, 14236, 15186, + 14340, 14893, 10331, 15251, 13083, 13584, 14344, 12569, 15036, 14204, 14777, 13837, 9226, 14856, 10068, 14653, + 15162, 13017, 12396, 12356, 13637, 14958, 15264, 13340, 14541, 11381, 13584, 13979, 12390, 13560, 15342, 14140, + 14478, 14003, 12802, 14959, 13420, 15192, 14788, 14340, 15351, 13404, 9944, 14488, 13117, 10405, 15095, 15224, + 13822, 13824, 14557, 10587, 14401, 15024, 14156, 14041, 13187, 14616, 15076, 13747, 13466, 13658, 15143, 13651, + 12801, 14390, 14256, 9829, 14674, 14495, 11709, 13436, 10770, 11661, 12662, 14120, 13109, 14908, 14266, 15293, + 13191, 14639, 14273, 12290, 14310, 14260, 14359, 11935, 14459, 15121, 11360, 10629, 15080, 10805, 14465, 15294, + 11377, 13617, 14197, 13808, 12943, 14496, 12012, 15045, 14421, 13980, 14862, 15299, 10098, 15161, 10924, 11461, + 14357, 14699, 14415, 13933, 14175, 13921, 14447, 14076, 13601, 15003, 15049, 15102, 13701, 13614, 14557, 14391, + 14514, 13575, 15138, 15103, 15198, 15269, 12610, 11706, 15330, 15108, 13080, 13123, 15201, 13707, 10054, 15187, + 14830, 9929, 14661, 13177, 14356, 11209, 13625, 14975, 13361, 15188, 11507, 13782, 13706, 14910, 15032, 14859, + 14381, 14483, 14288, 14813, 15212, 15315, 14118, 15077, 13517, 12150, 13717, 10732, 5651, 12664, 14255, 12880, + 13683, 13378, 14858, 7146, 11804, 13439, 12732, 14652, 15112, 14971, 14817, 14402, 14761, 9003, 14242, 14980, + 14496, 14497, 14609, 13874, 8555, 13947, 12563, 10897, 13575, 12342, 14773, 13089, 12318, 13166, 14760, 13345, + 14645, 14567, 15115, 13404, 14136, 14565, 13476, 13556, 14580, 14472, 15223, 14809, 11125, 14758, 15244, 14876, + 14124, 11921, 14989, 15058, 14938, 14835, 13565, 14624, 14678, 15205, 14933, 13152, 12775, 11870, 9128, 13044, + 14974, 15290, 14926, 13213, 13608, 15171, 13437, 14144, 13658, 14529, 12541, 12772, 14602, 14543, 14369, 14684, + 15008, 14482, 14680, 14910, 14616, 13475, 15173, 14966, 14472, 14434, 13850, 14037, 14810, 13406, 15357, 14433, + 15156, 14827, 13972, 14672, 13399, 12478, 14720, 13335, 14073, 14411, 14526, 12450, 14861, 14804, 14567, 13504, + 14861, 11531, 14618, 10146, 13621, 14624, 14849, 14853, 11429, 14339, 11584, 14625, 14717, 13815, 13492, 13217, + 14336, 14399, 13413, 13503, 14854, 14557, 13883, 13508, 13327, 14788, 15249, 15292, 14663, 13973, 13029, 13786, + 14811, 14348, 15039, 14880, 10391, 12333, 15321, 14740, 15207, 14610, 14648, 12813, 6567, 14928, 14413, 14744, + 14546, 14134, 9725, 12565, 14915, 14003, 11550, 14061, 14535, 13853, 12453, 14490, 15016, 15297, 14140, 14761, + 14240, 11788, 14536, 14510, 13687, 13496, 14323, 14679, 14930, 13994, 14464, 13903, 13502, 15069, 14769, 15098, + 8267, 14274, 12510, 14428, 14215, 12959, 15012, 14580, 14184, 14571, 12924, 14133, 14731, 13555, 14168, 14746, + 10015, 15212, 14626, 14769, 11330, 14223, 14667, 13822, 13416, 15092, 15163, 13721, 14985, 9430, 12456, 14628, + 15093, 15078, 15046, 15078, 14914, 13114, 14017, 15109, 14027, 14750, 15099, 14659, 15315, 15280, 13607, 15302, + 12982, 14741, 13700, 14019, 14737, 11765, 15092, 12553, 15171, 15033, 14416, 12944, 13450, 7388, 14432, 13499, + 11582, 15087, 14321, 13927, 14848, 12613, 14703, 14724, 12444, 12317, 12709, 14912, 12731, 13557, 13513, 14632, + 14075, 12545, 12588, 15317, 14740, 12682, 12690, 10686, 14990, 14651, 14006, 13975, 15298, 14288, 14979, 13086, + 14670, 13331, 12948, 14906, 15132, 15037, 14229, 14915, 14220, 15142, 14341, 15349, 15248, 14658, 14385, 15234, + 13992, 12674, 14441, 14450, 13356, 14453, 11286, 14279, 13864, 12954, 14363, 14848, 14352, 15196, 13999, 15332, + 14209, 14842, 14958, 12534, 14343, 13924, 13708, 13461, 11637, 14547, 13792, 14808, 14686, 14691, 13338, 14594, + 13349, 14735, 14704, 14174, 9442, 14914, 13437, 14599, 14152, 13606, 12522, 14445, 13694, 14670, 13931, 9961, + 15073, 12960, 14769, 14151, 14950, 14695, 12619, 14347, 14390, 11545, 14696, 13679, 14375, 13637, 13656, 14231, + 15128, 13956, 13701, 14356, 13255, 14956, 14416, 14589, 10372, 13312, 13646, 12487, 14193, 14575, 11127, 13365, + 9418, 14569, 13315, 14468, 10755, 14684, 12758, 14080, 15053, 12528, 14937, 15230, 13987, 11713, 15041, 14940, + 13113, 14935, 14237, 13916, 14691, 14536, 15305, 14955, 12251, 15001, 13519, 13402, 14632, 13716, 14866, 15254, + 15111, 12417, 14618, 12814, 13727, 14465, 14517, 14346, 14509, 13867, 15316, 13620, 13718, 13418, 13772, 12245, + 14571, 14248, 12300, 14051, 14149, 12979, 14556, 12855, 13393, 12614, 13063, 14960, 14242, 15221, 14739, 15318, + 12088, 12897, 14433, 13539, 13780, 7173, 14561, 12659, 12992, 11349, 12457, 14520, 14523, 13782, 12775, 14346, + 15013, 13325, 15284, 15112, 14792, 14770, 13838, 14739, 14985, 11484, 14655, 12182, 15192, 14306, 11545, 12635, + 15232, 13368, 14235, 11142, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NHWC, {8, 2, 16, 16}, {17, 2, 2, 17}, DT_FLOAT16}; + TransResult result; + + FormatTransferFracZNhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_fp32_success_eq_cube) { + float data_4d[4 * 1 * 16 * 16] = { + 0.484169822867354, 0.8140947249809813, 0.4940455204015697, 0.7135851157134282, 0.8768734157203649, + 0.4184033435957544, 0.44292626120971623, 0.696532137919516, 0.9171321370698801, 0.7151284988967475, + 0.5109347861993496, 0.38142119707414746, 0.6698099769064979, 0.11820154777502989, 0.13012960479351465, + 0.16817089869937873, 0.5302578028349337, 0.7252510992951486, 0.8213662663451495, 0.255865605133453, + 0.5347808590628998, 0.03484423993654684, 0.4053151604997237, 0.1268756305743911, 0.7512214677373925, + 0.7021691682264435, 0.5790723649519143, 0.19175327358031247, 0.5748858471708752, 0.34613167229455966, + 0.9081476470039521, 0.6213341986714184, 0.8738901859075797, 0.4191500219416586, 0.9272377212501333, + 0.6088513158752427, 0.43503086957072457, 0.7568802524830642, 0.6218604254226138, 0.4893048459594117, + 0.020152542775963678, 0.7267242398249997, 0.9274966736416573, 0.49200037295381294, 0.42692100017651613, + 0.7892621068774159, 0.6845597457970014, 0.8711448933002091, 0.9718143171574413, 0.6597628763384622, + 0.34484306915489416, 0.880431818533278, 0.7955875470049112, 0.8757665484415526, 0.9915924406526361, + 0.04436704716518447, 0.02262359380765988, 0.7052472895837604, 0.7057299524522475, 0.3194782791822033, + 0.09871902909610286, 0.364619480384346, 0.06943275085154965, 0.4214138425069305, 0.6434162586445695, + 0.5323110978433342, 0.1248556733436923, 0.15902590556482255, 0.4712411793898579, 0.7894290004428436, + 0.5032833161606196, 0.527059468896646, 0.9191006481472115, 0.25305549423863427, 0.6340006741168577, + 0.358184464955455, 0.25972912049066155, 0.18926991931768677, 0.27595176819944, 0.8161194869680272, + 0.6487804290051798, 0.17197756477570048, 0.1792393744184949, 0.7262808230390461, 0.5266352244919847, + 0.3774751618211455, 0.14059289117160334, 0.08529920580377037, 0.28344872274215105, 0.9959953800069987, + 0.08464059391988354, 0.6670563867386871, 0.4687389304005585, 0.532305594930434, 0.28410362935440214, + 0.07129098950073931, 0.90130643092616, 0.1863314734620497, 0.7182836382686819, 0.05869761827209363, + 0.43215310837091325, 0.5109770631402548, 0.8507620188714253, 0.9448034463404849, 0.8204290492967814, + 0.30175616425548013, 0.6347738558965675, 0.7115068243493056, 0.9106453434867423, 0.7081724799882917, + 0.9480983751448969, 0.30683290902116644, 0.13870348711282365, 0.882637588475232, 0.8911520456086311, + 0.13836682539246214, 0.9507965084267185, 0.5052382254338335, 0.45961247912692105, 0.979182256053379, + 0.4013111553698335, 0.7788395612306059, 0.8062745586439758, 0.19584512860236825, 0.275650733936018, + 0.23472653953495393, 0.5268011060669119, 0.11615458325117867, 0.37003145722910347, 0.7429488863605441, + 0.7499741271194941, 0.849429488346799, 0.764998070770062, 0.8627353540448067, 0.2639189465037146, + 0.18543192561486244, 0.8406963006607173, 0.29867265092172757, 0.5830874946322163, 0.7266542276089016, + 0.47732859671105865, 0.5276495401100896, 0.0285892476174624, 0.10154908386840933, 0.4088412035484068, + 0.7434536979800712, 0.9525161199007488, 0.07138799479311309, 0.8519420240978587, 0.08747097413510685, + 0.06259133820158325, 0.6102164785932812, 0.5272535740517759, 0.3882790321273053, 0.5219871401239666, + 0.08342830725989958, 0.06992376312633286, 0.45588347082683367, 0.8507181024497145, 0.8168001145505971, + 0.6288218440807817, 0.5627797837213618, 0.8642380932712407, 0.6296224131150504, 0.810297385035068, + 0.9521660572032933, 0.006989866947720524, 0.15360690769811158, 0.609592365107798, 0.856111276058204, + 0.5569449989810563, 0.6358938242284586, 0.43974619938434945, 0.23369648280955435, 0.17610792852502044, + 0.5393700616427376, 0.14414511952710007, 0.9499336265300194, 0.6444587977800735, 0.7418623715827386, + 0.044802260433499996, 0.850581483363445, 0.05712457779883895, 0.15282854721354788, 0.3482893221081773, + 0.6247260064132429, 0.42099978778043057, 0.7479237929881165, 0.02099064732615097, 0.013098766701113651, + 0.7226056703495463, 0.04820012498442583, 0.30512165089613275, 0.7316809143395435, 0.33837084272328344, + 0.031181701520131222, 0.8998729244301411, 0.13963009532040604, 0.5524300751373528, 0.998751142570437, + 0.8079412199511319, 0.38764559152084754, 0.3210831483519665, 0.7304185865067885, 0.5822602684215884, + 0.6364590811085312, 0.10627778088019713, 0.5399985372878428, 0.3140212188001139, 0.6511943391734281, + 0.7376690823137533, 0.16614528063403933, 0.45753024916507445, 0.6563453150496379, 0.011057777761439902, + 0.0018264730571969645, 0.3140058709073794, 0.8479357203115255, 0.9718814368259422, 0.2938044793936031, + 0.389604645965164, 0.7259980798553759, 0.384597064894432, 0.6461406690633517, 0.28149855317178163, + 0.03265235541107758, 0.7517419899840844, 0.3364005732928873, 0.7329527122940036, 0.43759767242690906, + 0.3172137189567925, 0.2707815810278883, 0.6642248201697422, 0.3972911147544086, 0.009127503808928017, + 0.1234374452587974, 0.39970292448775213, 0.11989802753965029, 0.9927165644356675, 0.8609568773974031, + 0.8160834394485424, 0.8645970999084115, 0.43923002522405574, 0.3760052855173096, 0.08902326927063253, + 0.09948678314542847, 0.3578210615036086, 0.8485940282988038, 0.2516468752579376, 0.7365499301475807, + 0.5612758649671108, 0.7859804827616864, 0.8851363067086282, 0.8935474690220618, 0.5336384648157917, + 0.23839322063516444, 0.8176105543815939, 0.629909392274251, 0.31313221345906006, 0.3221421451570968, + 0.5818901337492731, 0.9534404285747669, 0.30689058064437647, 0.7213959303267752, 0.6899507455723445, + 0.1145659733805463, 0.36766351628538607, 0.882482457649216, 0.8273160847663066, 0.344437602133445, + 0.6831369390963725, 0.15697402810153005, 0.7568275537113736, 0.3692026509026701, 0.25344774469478004, + 0.24956690401767134, 0.012836876914660733, 0.985850540555467, 0.8626773871697432, 0.5476182090476109, + 0.3623970160812814, 0.9376822616892581, 0.14087495346759915, 0.23523285339278033, 0.037762342255319714, + 0.020689168902525368, 0.6838812404349178, 0.5299351074122371, 0.7796551239198329, 0.46483996491976176, + 0.3896989084777407, 0.0246526241822681, 0.150842690954615, 0.2168947002788526, 0.1149972560028748, + 0.8984092864643223, 0.12025595226036978, 0.39457447774297594, 0.27976746576883327, 0.9120741897014442, + 0.07701027303624552, 0.8145938381183272, 0.988408599764702, 0.9891205523056846, 0.38958399171559976, + 0.19562724561171574, 0.7406668247925765, 0.5176452064093667, 0.1833071142826258, 0.981011435762899, + 0.1038485455897169, 0.6360424773286126, 0.07505098129761023, 0.7110748432628814, 0.23864180641973576, + 0.6311090911472507, 0.2898434465648123, 0.024685643880116404, 0.1206400134468234, 0.9642954826184025, + 0.1307237743057954, 0.3682368837401174, 0.10696899485835165, 0.4381449790035217, 0.8778405603604115, + 0.33026410718319044, 0.43155412970009344, 0.688941263482083, 0.6484615948673739, 0.20103619018128005, + 0.08393097766033852, 0.8913511709861782, 0.39172683394484564, 0.8390068848742098, 0.8242315116115289, + 0.016933814177084616, 0.09858800624814978, 0.4866184190503341, 0.5797421505352837, 0.7275188051657857, + 0.8257101240399874, 0.2805336430088575, 0.5360449509204634, 0.48837931206904084, 0.9312020866617485, + 0.7410346712070657, 0.08381751355840361, 0.622083699685216, 0.5182872441017575, 0.20300436297731594, + 0.8408847205120992, 0.4065634267823589, 0.538625415898359, 0.24508951206516771, 0.6706828695669036, + 0.8841876638401157, 0.14646094171092727, 0.0873651816168074, 0.3789152991251008, 0.7514417784728014, + 0.36660529419052745, 0.9058668780554411, 0.3905648290701539, 0.46346477300678546, 0.1784939685332635, + 0.5505805956291808, 0.57458139202536, 0.37655520125792563, 0.21289003081542524, 0.41033428588999377, + 0.13922941757955198, 0.7339905698509648, 0.6420365465526778, 0.5935365043786606, 0.031448611228246826, + 0.6446584269225506, 0.3168819180982111, 0.6919296394674304, 0.7611936857915317, 0.6581989409515427, + 0.7119379353798316, 0.19877568188642458, 0.29773337106911635, 0.6353172249870478, 0.7355153876080694, + 0.5965273658488094, 0.9873670681950693, 0.4231813740764955, 0.2367076098094284, 0.7766733432911771, + 0.6429580955121145, 0.8307521364617134, 0.6525257497726437, 0.6857387208240013, 0.6812935881218981, + 0.24823806562055328, 0.23222434233678046, 0.14734449630794566, 0.8593695744180271, 0.9521390092531038, + 0.814592050185302, 0.8530358400139052, 0.7553391685463368, 0.467322607265681, 0.40821268238278274, + 0.15224535394276528, 0.7721886316195505, 0.3188390117882929, 0.18861616707188456, 0.3183899178336651, + 0.14272447128849497, 0.490605601502897, 0.046383765771242036, 0.16415576287753442, 0.22888703447357206, + 0.5375730996026021, 0.535512670941786, 0.5912839261540107, 0.6983894048201051, 0.15323404928308648, + 0.8182668948670956, 0.29446560941893807, 0.418354823269551, 0.21870888711201486, 0.6424499336686369, + 0.3214700322107067, 0.24736247781577525, 0.7305497857337323, 0.568789544871731, 0.6533320204219322, + 0.33898139527730176, 0.16552403626291268, 0.10248361115345117, 0.8892140389602003, 0.9313325327216908, + 0.7919070208756257, 0.0708315749326881, 0.2188073927368014, 0.8127787161494373, 0.6257366952292145, + 0.6924998870427207, 0.26793105051769306, 0.7826490483783863, 0.5966997551066409, 0.2129270109769711, + 0.44769099683243785, 0.7574472416898225, 0.26729283252732317, 0.4140922332840643, 0.32495829469129234, + 0.7880364474443821, 0.6521293980091717, 0.23933614126005887, 0.40069381935472004, 0.3253434499214978, + 0.43850426563837475, 0.3582655821803802, 0.02315611544237217, 0.7562158058764148, 0.7040259198312322, + 0.5059192662520398, 0.052574245106938755, 0.0044571360420935235, 0.19994806707117685, 0.17143060225012974, + 0.7905787674108862, 0.700764812184617, 0.9178067635156595, 0.007883547902275412, 0.04345877753973626, + 0.40662108491778937, 0.32503174588972183, 0.05007085167672787, 0.9581958519270192, 0.20437076813207167, + 0.2397322863247101, 0.2432309640469006, 0.945083897778037, 0.715659408560469, 0.24955287567344286, + 0.28473276918917545, 0.5837263814586879, 0.2741346897790268, 0.5647425538488864, 0.969909805392653, + 0.7261702056098153, 0.5430436390733616, 0.08333922871976862, 0.5609888136600801, 0.014806506476614079, + 0.9455759599914065, 0.3193070470406175, 0.08542679684744281, 0.6744677936417323, 0.27782578504968625, + 0.29358418856919855, 0.23213589762876796, 0.005758486459847889, 0.36868262474116764, 0.264207113494464, + 0.36157643004033746, 0.9637257888559727, 0.5737186764359195, 0.5315516381845716, 0.5101088363620886, + 0.039248651321625805, 0.9681240024592983, 0.08677429466572284, 0.9518099421221371, 0.7952823450404229, + 0.05713730664487837, 0.17614720504232317, 0.5864853334395049, 0.5355917571532767, 0.14016085457936855, + 0.4699268852262969, 0.588214580416792, 0.7315541026108536, 0.2615939952632047, 0.6940338433274365, + 0.9654764709266905, 0.17552901543503086, 0.2673106011552777, 0.16278327321010144, 0.2829647956459945, + 0.027781103679172303, 0.9715339351098571, 0.5104277134265274, 0.26688023346613854, 0.7675111602635859, + 0.8456916770759129, 0.9418274416132982, 0.4082337424410951, 0.2966721771835694, 0.4702670949658979, + 0.6827917396699957, 0.34049137335485646, 0.979883320082454, 0.9532270119107664, 0.8572037348042103, + 0.848146567657908, 0.431869811030539, 0.7941580229111237, 0.3436486153857896, 0.7273788931852989, + 0.49224680779113494, 0.3862553126741952, 0.9807757900646529, 0.701979947074233, 0.26056383661375004, + 0.6931835277270919, 0.7067946120442928, 0.4582357436150932, 0.11829934680864151, 0.2095153024553008, + 0.9185955528449318, 0.12823153669882037, 0.7424605800987469, 0.25214577369248303, 0.9208309549123908, + 0.28212583854425577, 0.33137007225108495, 0.24341818673228321, 0.2619469700798778, 0.28262355294589947, + 0.7755838712533507, 0.4053638246615504, 0.7976887925976534, 0.5522240670284019, 0.7227130132090629, + 0.5864070673077637, 0.42782888384518447, 0.28679923903462323, 0.16844780802036896, 0.17193836732536305, + 0.007199948294376979, 0.8432770047003173, 0.5329394715412086, 0.9027093385378767, 0.13464647139460417, + 0.8011185435869719, 0.8299531042565435, 0.16599661763813744, 0.6152441825738123, 0.25151020323522655, + 0.447172104562374, 0.5702866695855857, 0.8192062209036824, 0.23552991431295267, 0.32632467763985884, + 0.038987953903722006, 0.11718840620381377, 0.9992640055987563, 0.5148951028222453, 0.15248694988722633, + 0.6534611001367189, 0.031062641103680733, 0.8694831172503376, 0.24862657671809274, 0.6479564397956218, + 0.504649233907658, 0.07716366084071735, 0.6049908845448526, 0.7137870858666769, 0.7054652650387054, + 0.36350442596954213, 0.8795945111811084, 0.582926532185696, 0.8714877396827005, 0.06711091117831969, + 0.2982765088342778, 0.3329050905695813, 0.4865987400975561, 0.6201284834454466, 0.46275286462889154, + 0.8851839278084489, 0.9999593670233206, 0.2758236652060908, 0.17890347032894782, 0.048086683036078326, + 0.28413000356301654, 0.9536736877854535, 0.44798896242923036, 0.29983211622633354, 0.6784308970797828, + 0.08779676069610531, 0.42626312256206, 0.22394679796804695, 0.9244223195097627, 0.15901668161462468, + 0.7618146378555393, 0.07687698303830115, 0.1929344802840327, 0.7553281474784295, 0.8025581206851257, + 0.14022257845130615, 0.9138712084521406, 0.8040597327540038, 0.6328239965840534, 0.6024965111935174, + 0.5589881068558278, 0.048191039611119035, 0.1404201318896956, 0.40850427818021384, 0.7203622066092176, + 0.8471988115463831, 0.8860087660548518, 0.45971281262939667, 0.08887747998382489, 0.9740387490405953, + 0.8585000450094692, 0.36058899298739555, 0.4278199731132287, 0.8375237958882933, 0.01417622872482538, + 0.16029654637258495, 0.057127838376448525, 0.35749934689051033, 0.2930410231839118, 0.8409144982684501, + 0.07593293596409612, 0.356635418237763, 0.226719753247275, 0.0007109181986780788, 0.7184665789785081, + 0.395824938261763, 0.4867276192621923, 0.5172670089066486, 0.5841508888388441, 0.08047869657287621, + 0.9482746906157185, 0.41294535716449154, 0.19159501891328978, 0.5578510869508337, 0.29546640911784916, + 0.2869367181145863, 0.7294147056255083, 0.3991485374776169, 0.4445980140617459, 0.6662896463638776, + 0.8614878148940442, 0.1808774427317561, 0.4614411389376568, 0.3303364346947858, 0.8028852020786104, + 0.5169929310162137, 0.14136531759583693, 0.9831280581298246, 0.5630783736198899, 0.8608024828157775, + 0.3931840317246529, 0.022722258330870937, 0.1056061545184902, 0.6060652796018042, 0.3352528944308385, + 0.8977154563912682, 0.7221307220260405, 0.46922215009200297, 0.4677265831501206, 0.49431288865022915, + 0.42276247133689504, 0.6788161518073327, 0.04439101827129144, 0.028811435615006542, 0.6839680845965146, + 0.9986738127631021, 0.07122924252967722, 0.6409050009983894, 0.8555552153820654, 0.9370992032325905, + 0.7376153474252244, 0.05650174272501618, 0.9555164199341558, 0.24096140966133506, 0.15235115309934777, + 0.2227762469886192, 0.7035491425028773, 0.4883693004134255, 0.4991880671239809, 0.2038723768682421, + 0.4424646154063001, 0.42947559846701466, 0.15496718950982546, 0.8613665468468363, 0.6173709310699306, + 0.34240192063131025, 0.17697172987534104, 0.05400248287734222, 0.47400410893135847, 0.5657413124334275, + 0.6886788284227651, 0.693192811671234, 0.5531430026891606, 0.766990407259576, 0.8518985685483231, + 0.5389165861980743, 0.9950513588111255, 0.09523381762884475, 0.7696317754383216, 0.6698100854232568, + 0.43625543466202976, 0.2330898194698613, 0.4127476057822641, 0.9294766285644805, 0.05680528782969185, + 0.6537538588698784, 0.17396071383121814, 0.9595723514929978, 0.42975991044177986, 0.5431447973683532, + 0.8833668707212631, 0.26133302914884515, 0.14661930610537355, 0.7433132788379365, 0.3574322869140888, + 0.2341799637707056, 0.8349985525202629, 0.8160746720387089, 0.6407640183325787, 0.6197726260454984, + 0.41237945120152153, 0.1830987925080485, 0.7553599629295343, 0.05454691882123808, 0.5562940920005512, + 0.5102400141204675, 0.9541297229195699, 0.5577971859903864, 0.2938597050255226, 0.8403402780212583, + 0.47661930123442686, 0.4103920153350502, 0.3119461029229196, 0.3801380305294956, 0.3434681831027364, + 0.7197973788263885, 0.0987748411433772, 0.01388200527143002, 0.1651316494632148, 0.9141734949661398, + 0.2719071586501257, 0.711375764410416, 0.3627408492437765, 0.18177223898925443, 0.7975651507116626, + 0.6859695133005402, 0.990527721423081, 0.08901845138285569, 0.47695118798375047, 0.4581629061404291, + 0.6647405334731831, 0.4779626404166062, 0.5638043017770987, 0.3701472211861363, 0.3793667615579185, + 0.49023578639191434, 0.5453313612019725, 0.580306136685107, 0.9098682664715276, 0.4207018293353191, + 0.741785389613043, 0.9262602540365329, 0.703428108041273, 0.8044195024538315, 0.27721795146313744, + 0.1630139202426243, 0.34579225120146284, 0.11553068934190114, 0.5928663617496019, 0.8952425964480728, + 0.47925358700494936, 0.345789231337841, 0.3110153026634752, 0.4628526273295195, 0.39546232211152244, + 0.04056519079629961, 0.6268438803068547, 0.8592557500599268, 0.9575425033955798, 0.3502505288978284, + 0.059258039358505155, 0.7518021608110209, 0.8739573910743396, 0.7067417457943123, 0.23881851357439288, + 0.5516104950394077, 0.16949728066552805, 0.17699999941441635, 0.3259215888704792, 0.6829558794210719, + 0.8768342657870796, 0.0828341270227837, 0.7034737450184155, 0.546752909248851, 0.7267979136936736, + 0.03712586684634822, 0.03808220856446631, 0.4344229625584559, 0.5073943443532393, 0.0876469591985547, + 0.5479537630990768, 0.15022676424664938, 0.7065732472092339, 0.5267637826119574, 0.49239129236901735, + 0.8218857103459851, 0.7979465301023618, 0.029548757715175067, 0.6932694244947931, 0.5595855181231474, + 0.7570733169968579, 0.3926838954245869, 0.06662638382372121, 0.7354175771053119, 0.6689967299755643, + 0.1503851907294197, 0.09334662866451149, 0.6985469006194642, 0.3853073559702226, 0.7598678794046461, + 0.8430367249722724, 0.09609005474058485, 0.3327016836424206, 0.5147633020947202, 0.9849763413326892, + 0.45758533152883063, 0.5410627384028919, 0.8101421663582148, 0.4745789007815937, 0.9695833469428511, + 0.7406298601731718, 0.8928719524735501, 0.07408106888379695, 0.48443913560921603, 0.9875039549398698, + 0.9207282373994886, 0.916412184605058, 0.5325664548137249, 0.5186829523993904, 0.0011137436559524039, + 0.7348700397039633, 0.09290265494838656, 0.23580663015210346, 0.35820294378273676, 0.7639205360566864, + 0.9465492151069357, 0.4252441824179847, 0.5316291043266255, 0.8122536967408113, 0.5789541420307059, + 0.22093739858997807, 0.721478902701215, 0.6885946622229991, 0.17835185715057533, 0.7238629983060336, + 0.20125336494303825, 0.16681217791113911, 0.9318773298015346, 0.1515859497360037, 0.21766180447529104, + 0.4792250665535124, 0.24411300991628737, 0.3217581574119305, 0.8390359459774884, 0.9659682387174011, + 0.40034412355266336, 0.5255065433133791, 0.42837064152809445, 0.7654114974398095, 0.3865006028597384, + 0.24734575434479678, 0.41425008509412264, 0.8361654352683431, 0.4321112716511004, 0.011850429412228025, + 0.4725427310129483, 0.13015593957624672, 0.4613391477382468, 0.6411806951208191, 0.5748219462376235, + 0.7676968075086735, 0.4551543834911985, 0.3981321361133089, 0.5537117150328572, 0.640188645994515, + 0.8733008957299926, 0.7680564850159917, 0.591855214421642, 0.7758621954461007, 0.9883180605472691, + 0.7032276300145796, 0.31585307578954147, 0.28146832444773495, 0.9471559002035407, 0.44047203430454707, + 0.29854162639920856, 0.07733153530994863, 0.8953385322255972, 0.8718260794429866, 0.8244843036717918, + 0.9473049441286394, 0.13636726569332214, 0.9029170461356006, 0.2711524085545959, 0.008956533117231325, + 0.14719117410301985, 0.7352499716127123, 0.9005647194328514, 0.04239801036045521, 0.32113153439789777, + 0.23008784431942686, 0.25059835793851803, 0.7821196322936473, 0.3571983300993258, 0.2624691465092742, + 0.8018016489757368, 0.5248393284810376, 0.6977900148749919, 0.33093445127674614, 0.7554471103150353, + 0.7756032542183166, 0.006225581322849116, 0.4008868869085592, 0.20812329467775148, 0.17556686644385078, + 0.6096597423735237, 0.6191160871486041, 0.7349303921541335, 0.9192496344735487, 0.24229302674522868, + 0.9899032791595263, 0.29044779138561216, 0.8321620650117503, 0.7598221463622512, 0.4229989353498531, + 0.9387553475507483, 0.9984378083937185, 0.8151195733132465, 0.7295039076200354, 0.0922729891266788, + 0.8668318130738214, 0.23372381782403595, 0.03956728658695974, 0.9606270879918739, 0.8167629519087327, + 0.6627590908369925, 0.3128149140030476, 0.6317394199057031, 0.12021250922401605, 0.42604564092211705, + 0.3977230376064276, 0.022079790505487362, 0.37848317176894164, 0.46852397650558253, 0.20735781040531553, + 0.8524713092661131, 0.44086749842642614, 0.9183713393663233, 0.9450411621274358, 0.697302770534409, + 0.9135605184069232, 0.09015468169209084, 0.704151745426799, 0.5400455593098272, 0.5480655211682695, + 0.30738071301825654, 0.15067120555532854, 0.925682378381145, 0.3371623491173492, 0.9597353973741677, + 0.9125053638178813, 0.5331026395371243, 0.7058339751156828, 0.28253557117321937, 0.7269649285767406, + 0.12353532557783586, 0.4491118104036773, 0.6637148382841026, 0.6351120142249237, 0.864473727452456, + 0.8265584180895602, 0.6156722515329398, 0.8643555219638754, 0.6571916479877694, + }; + float data[2 * 2 * 16 * 16] = { + 0.484169822867354, 0.8140947249809813, 0.4940455204015697, 0.7135851157134282, 0.8768734157203649, + 0.4184033435957544, 0.44292626120971623, 0.696532137919516, 0.9171321370698801, 0.7151284988967475, + 0.5109347861993496, 0.38142119707414746, 0.6698099769064979, 0.11820154777502989, 0.13012960479351465, + 0.16817089869937873, 0.8176105543815939, 0.629909392274251, 0.31313221345906006, 0.3221421451570968, + 0.5818901337492731, 0.9534404285747669, 0.30689058064437647, 0.7213959303267752, 0.6899507455723445, + 0.1145659733805463, 0.36766351628538607, 0.882482457649216, 0.8273160847663066, 0.344437602133445, + 0.6831369390963725, 0.15697402810153005, 0.5864853334395049, 0.5355917571532767, 0.14016085457936855, + 0.4699268852262969, 0.588214580416792, 0.7315541026108536, 0.2615939952632047, 0.6940338433274365, + 0.9654764709266905, 0.17552901543503086, 0.2673106011552777, 0.16278327321010144, 0.2829647956459945, + 0.027781103679172303, 0.9715339351098571, 0.5104277134265274, 0.2938597050255226, 0.8403402780212583, + 0.47661930123442686, 0.4103920153350502, 0.3119461029229196, 0.3801380305294956, 0.3434681831027364, + 0.7197973788263885, 0.0987748411433772, 0.01388200527143002, 0.1651316494632148, 0.9141734949661398, + 0.2719071586501257, 0.711375764410416, 0.3627408492437765, 0.18177223898925443, 0.5302578028349337, + 0.7252510992951486, 0.8213662663451495, 0.255865605133453, 0.5347808590628998, 0.03484423993654684, + 0.4053151604997237, 0.1268756305743911, 0.7512214677373925, 0.7021691682264435, 0.5790723649519143, + 0.19175327358031247, 0.5748858471708752, 0.34613167229455966, 0.9081476470039521, 0.6213341986714184, + 0.7568275537113736, 0.3692026509026701, 0.25344774469478004, 0.24956690401767134, 0.012836876914660733, + 0.985850540555467, 0.8626773871697432, 0.5476182090476109, 0.3623970160812814, 0.9376822616892581, + 0.14087495346759915, 0.23523285339278033, 0.037762342255319714, 0.020689168902525368, 0.6838812404349178, + 0.5299351074122371, 0.26688023346613854, 0.7675111602635859, 0.8456916770759129, 0.9418274416132982, + 0.4082337424410951, 0.2966721771835694, 0.4702670949658979, 0.6827917396699957, 0.34049137335485646, + 0.979883320082454, 0.9532270119107664, 0.8572037348042103, 0.848146567657908, 0.431869811030539, + 0.7941580229111237, 0.3436486153857896, 0.7975651507116626, 0.6859695133005402, 0.990527721423081, + 0.08901845138285569, 0.47695118798375047, 0.4581629061404291, 0.6647405334731831, 0.4779626404166062, + 0.5638043017770987, 0.3701472211861363, 0.3793667615579185, 0.49023578639191434, 0.5453313612019725, + 0.580306136685107, 0.9098682664715276, 0.4207018293353191, 0.8738901859075797, 0.4191500219416586, + 0.9272377212501333, 0.6088513158752427, 0.43503086957072457, 0.7568802524830642, 0.6218604254226138, + 0.4893048459594117, 0.020152542775963678, 0.7267242398249997, 0.9274966736416573, 0.49200037295381294, + 0.42692100017651613, 0.7892621068774159, 0.6845597457970014, 0.8711448933002091, 0.7796551239198329, + 0.46483996491976176, 0.3896989084777407, 0.0246526241822681, 0.150842690954615, 0.2168947002788526, + 0.1149972560028748, 0.8984092864643223, 0.12025595226036978, 0.39457447774297594, 0.27976746576883327, + 0.9120741897014442, 0.07701027303624552, 0.8145938381183272, 0.988408599764702, 0.9891205523056846, + 0.7273788931852989, 0.49224680779113494, 0.3862553126741952, 0.9807757900646529, 0.701979947074233, + 0.26056383661375004, 0.6931835277270919, 0.7067946120442928, 0.4582357436150932, 0.11829934680864151, + 0.2095153024553008, 0.9185955528449318, 0.12823153669882037, 0.7424605800987469, 0.25214577369248303, + 0.9208309549123908, 0.741785389613043, 0.9262602540365329, 0.703428108041273, 0.8044195024538315, + 0.27721795146313744, 0.1630139202426243, 0.34579225120146284, 0.11553068934190114, 0.5928663617496019, + 0.8952425964480728, 0.47925358700494936, 0.345789231337841, 0.3110153026634752, 0.4628526273295195, + 0.39546232211152244, 0.04056519079629961, 0.9718143171574413, 0.6597628763384622, 0.34484306915489416, + 0.880431818533278, 0.7955875470049112, 0.8757665484415526, 0.9915924406526361, 0.04436704716518447, + 0.02262359380765988, 0.7052472895837604, 0.7057299524522475, 0.3194782791822033, 0.09871902909610286, + 0.364619480384346, 0.06943275085154965, 0.4214138425069305, 0.38958399171559976, 0.19562724561171574, + 0.7406668247925765, 0.5176452064093667, 0.1833071142826258, 0.981011435762899, 0.1038485455897169, + 0.6360424773286126, 0.07505098129761023, 0.7110748432628814, 0.23864180641973576, 0.6311090911472507, + 0.2898434465648123, 0.024685643880116404, 0.1206400134468234, 0.9642954826184025, 0.28212583854425577, + 0.33137007225108495, 0.24341818673228321, 0.2619469700798778, 0.28262355294589947, 0.7755838712533507, + 0.4053638246615504, 0.7976887925976534, 0.5522240670284019, 0.7227130132090629, 0.5864070673077637, + 0.42782888384518447, 0.28679923903462323, 0.16844780802036896, 0.17193836732536305, 0.007199948294376979, + 0.6268438803068547, 0.8592557500599268, 0.9575425033955798, 0.3502505288978284, 0.059258039358505155, + 0.7518021608110209, 0.8739573910743396, 0.7067417457943123, 0.23881851357439288, 0.5516104950394077, + 0.16949728066552805, 0.17699999941441635, 0.3259215888704792, 0.6829558794210719, 0.8768342657870796, + 0.0828341270227837, 0.6434162586445695, 0.5323110978433342, 0.1248556733436923, 0.15902590556482255, + 0.4712411793898579, 0.7894290004428436, 0.5032833161606196, 0.527059468896646, 0.9191006481472115, + 0.25305549423863427, 0.6340006741168577, 0.358184464955455, 0.25972912049066155, 0.18926991931768677, + 0.27595176819944, 0.8161194869680272, 0.1307237743057954, 0.3682368837401174, 0.10696899485835165, + 0.4381449790035217, 0.8778405603604115, 0.33026410718319044, 0.43155412970009344, 0.688941263482083, + 0.6484615948673739, 0.20103619018128005, 0.08393097766033852, 0.8913511709861782, 0.39172683394484564, + 0.8390068848742098, 0.8242315116115289, 0.016933814177084616, 0.8432770047003173, 0.5329394715412086, + 0.9027093385378767, 0.13464647139460417, 0.8011185435869719, 0.8299531042565435, 0.16599661763813744, + 0.6152441825738123, 0.25151020323522655, 0.447172104562374, 0.5702866695855857, 0.8192062209036824, + 0.23552991431295267, 0.32632467763985884, 0.038987953903722006, 0.11718840620381377, 0.7034737450184155, + 0.546752909248851, 0.7267979136936736, 0.03712586684634822, 0.03808220856446631, 0.4344229625584559, + 0.5073943443532393, 0.0876469591985547, 0.5479537630990768, 0.15022676424664938, 0.7065732472092339, + 0.5267637826119574, 0.49239129236901735, 0.8218857103459851, 0.7979465301023618, 0.029548757715175067, + 0.6487804290051798, 0.17197756477570048, 0.1792393744184949, 0.7262808230390461, 0.5266352244919847, + 0.3774751618211455, 0.14059289117160334, 0.08529920580377037, 0.28344872274215105, 0.9959953800069987, + 0.08464059391988354, 0.6670563867386871, 0.4687389304005585, 0.532305594930434, 0.28410362935440214, + 0.07129098950073931, 0.09858800624814978, 0.4866184190503341, 0.5797421505352837, 0.7275188051657857, + 0.8257101240399874, 0.2805336430088575, 0.5360449509204634, 0.48837931206904084, 0.9312020866617485, + 0.7410346712070657, 0.08381751355840361, 0.622083699685216, 0.5182872441017575, 0.20300436297731594, + 0.8408847205120992, 0.4065634267823589, 0.9992640055987563, 0.5148951028222453, 0.15248694988722633, + 0.6534611001367189, 0.031062641103680733, 0.8694831172503376, 0.24862657671809274, 0.6479564397956218, + 0.504649233907658, 0.07716366084071735, 0.6049908845448526, 0.7137870858666769, 0.7054652650387054, + 0.36350442596954213, 0.8795945111811084, 0.582926532185696, 0.6932694244947931, 0.5595855181231474, + 0.7570733169968579, 0.3926838954245869, 0.06662638382372121, 0.7354175771053119, 0.6689967299755643, + 0.1503851907294197, 0.09334662866451149, 0.6985469006194642, 0.3853073559702226, 0.7598678794046461, + 0.8430367249722724, 0.09609005474058485, 0.3327016836424206, 0.5147633020947202, 0.90130643092616, + 0.1863314734620497, 0.7182836382686819, 0.05869761827209363, 0.43215310837091325, 0.5109770631402548, + 0.8507620188714253, 0.9448034463404849, 0.8204290492967814, 0.30175616425548013, 0.6347738558965675, + 0.7115068243493056, 0.9106453434867423, 0.7081724799882917, 0.9480983751448969, 0.30683290902116644, + 0.538625415898359, 0.24508951206516771, 0.6706828695669036, 0.8841876638401157, 0.14646094171092727, + 0.0873651816168074, 0.3789152991251008, 0.7514417784728014, 0.36660529419052745, 0.9058668780554411, + 0.3905648290701539, 0.46346477300678546, 0.1784939685332635, 0.5505805956291808, 0.57458139202536, + 0.37655520125792563, 0.8714877396827005, 0.06711091117831969, 0.2982765088342778, 0.3329050905695813, + 0.4865987400975561, 0.6201284834454466, 0.46275286462889154, 0.8851839278084489, 0.9999593670233206, + 0.2758236652060908, 0.17890347032894782, 0.048086683036078326, 0.28413000356301654, 0.9536736877854535, + 0.44798896242923036, 0.29983211622633354, 0.9849763413326892, 0.45758533152883063, 0.5410627384028919, + 0.8101421663582148, 0.4745789007815937, 0.9695833469428511, 0.7406298601731718, 0.8928719524735501, + 0.07408106888379695, 0.48443913560921603, 0.9875039549398698, 0.9207282373994886, 0.916412184605058, + 0.5325664548137249, 0.5186829523993904, 0.0011137436559524039, 0.13870348711282365, 0.882637588475232, + 0.8911520456086311, 0.13836682539246214, 0.9507965084267185, 0.5052382254338335, 0.45961247912692105, + 0.979182256053379, 0.4013111553698335, 0.7788395612306059, 0.8062745586439758, 0.19584512860236825, + 0.275650733936018, 0.23472653953495393, 0.5268011060669119, 0.11615458325117867, 0.21289003081542524, + 0.41033428588999377, 0.13922941757955198, 0.7339905698509648, 0.6420365465526778, 0.5935365043786606, + 0.031448611228246826, 0.6446584269225506, 0.3168819180982111, 0.6919296394674304, 0.7611936857915317, + 0.6581989409515427, 0.7119379353798316, 0.19877568188642458, 0.29773337106911635, 0.6353172249870478, + 0.6784308970797828, 0.08779676069610531, 0.42626312256206, 0.22394679796804695, 0.9244223195097627, + 0.15901668161462468, 0.7618146378555393, 0.07687698303830115, 0.1929344802840327, 0.7553281474784295, + 0.8025581206851257, 0.14022257845130615, 0.9138712084521406, 0.8040597327540038, 0.6328239965840534, + 0.6024965111935174, 0.7348700397039633, 0.09290265494838656, 0.23580663015210346, 0.35820294378273676, + 0.7639205360566864, 0.9465492151069357, 0.4252441824179847, 0.5316291043266255, 0.8122536967408113, + 0.5789541420307059, 0.22093739858997807, 0.721478902701215, 0.6885946622229991, 0.17835185715057533, + 0.7238629983060336, 0.20125336494303825, 0.37003145722910347, 0.7429488863605441, 0.7499741271194941, + 0.849429488346799, 0.764998070770062, 0.8627353540448067, 0.2639189465037146, 0.18543192561486244, + 0.8406963006607173, 0.29867265092172757, 0.5830874946322163, 0.7266542276089016, 0.47732859671105865, + 0.5276495401100896, 0.0285892476174624, 0.10154908386840933, 0.7355153876080694, 0.5965273658488094, + 0.9873670681950693, 0.4231813740764955, 0.2367076098094284, 0.7766733432911771, 0.6429580955121145, + 0.8307521364617134, 0.6525257497726437, 0.6857387208240013, 0.6812935881218981, 0.24823806562055328, + 0.23222434233678046, 0.14734449630794566, 0.8593695744180271, 0.9521390092531038, 0.5589881068558278, + 0.048191039611119035, 0.1404201318896956, 0.40850427818021384, 0.7203622066092176, 0.8471988115463831, + 0.8860087660548518, 0.45971281262939667, 0.08887747998382489, 0.9740387490405953, 0.8585000450094692, + 0.36058899298739555, 0.4278199731132287, 0.8375237958882933, 0.01417622872482538, 0.16029654637258495, + 0.16681217791113911, 0.9318773298015346, 0.1515859497360037, 0.21766180447529104, 0.4792250665535124, + 0.24411300991628737, 0.3217581574119305, 0.8390359459774884, 0.9659682387174011, 0.40034412355266336, + 0.5255065433133791, 0.42837064152809445, 0.7654114974398095, 0.3865006028597384, 0.24734575434479678, + 0.41425008509412264, 0.4088412035484068, 0.7434536979800712, 0.9525161199007488, 0.07138799479311309, + 0.8519420240978587, 0.08747097413510685, 0.06259133820158325, 0.6102164785932812, 0.5272535740517759, + 0.3882790321273053, 0.5219871401239666, 0.08342830725989958, 0.06992376312633286, 0.45588347082683367, + 0.8507181024497145, 0.8168001145505971, 0.814592050185302, 0.8530358400139052, 0.7553391685463368, + 0.467322607265681, 0.40821268238278274, 0.15224535394276528, 0.7721886316195505, 0.3188390117882929, + 0.18861616707188456, 0.3183899178336651, 0.14272447128849497, 0.490605601502897, 0.046383765771242036, + 0.16415576287753442, 0.22888703447357206, 0.5375730996026021, 0.057127838376448525, 0.35749934689051033, + 0.2930410231839118, 0.8409144982684501, 0.07593293596409612, 0.356635418237763, 0.226719753247275, + 0.0007109181986780788, 0.7184665789785081, 0.395824938261763, 0.4867276192621923, 0.5172670089066486, + 0.5841508888388441, 0.08047869657287621, 0.9482746906157185, 0.41294535716449154, 0.8361654352683431, + 0.4321112716511004, 0.011850429412228025, 0.4725427310129483, 0.13015593957624672, 0.4613391477382468, + 0.6411806951208191, 0.5748219462376235, 0.7676968075086735, 0.4551543834911985, 0.3981321361133089, + 0.5537117150328572, 0.640188645994515, 0.8733008957299926, 0.7680564850159917, 0.591855214421642, + 0.6288218440807817, 0.5627797837213618, 0.8642380932712407, 0.6296224131150504, 0.810297385035068, + 0.9521660572032933, 0.006989866947720524, 0.15360690769811158, 0.609592365107798, 0.856111276058204, + 0.5569449989810563, 0.6358938242284586, 0.43974619938434945, 0.23369648280955435, 0.17610792852502044, + 0.5393700616427376, 0.535512670941786, 0.5912839261540107, 0.6983894048201051, 0.15323404928308648, + 0.8182668948670956, 0.29446560941893807, 0.418354823269551, 0.21870888711201486, 0.6424499336686369, + 0.3214700322107067, 0.24736247781577525, 0.7305497857337323, 0.568789544871731, 0.6533320204219322, + 0.33898139527730176, 0.16552403626291268, 0.19159501891328978, 0.5578510869508337, 0.29546640911784916, + 0.2869367181145863, 0.7294147056255083, 0.3991485374776169, 0.4445980140617459, 0.6662896463638776, + 0.8614878148940442, 0.1808774427317561, 0.4614411389376568, 0.3303364346947858, 0.8028852020786104, + 0.5169929310162137, 0.14136531759583693, 0.9831280581298246, 0.7758621954461007, 0.9883180605472691, + 0.7032276300145796, 0.31585307578954147, 0.28146832444773495, 0.9471559002035407, 0.44047203430454707, + 0.29854162639920856, 0.07733153530994863, 0.8953385322255972, 0.8718260794429866, 0.8244843036717918, + 0.9473049441286394, 0.13636726569332214, 0.9029170461356006, 0.2711524085545959, 0.14414511952710007, + 0.9499336265300194, 0.6444587977800735, 0.7418623715827386, 0.044802260433499996, 0.850581483363445, + 0.05712457779883895, 0.15282854721354788, 0.3482893221081773, 0.6247260064132429, 0.42099978778043057, + 0.7479237929881165, 0.02099064732615097, 0.013098766701113651, 0.7226056703495463, 0.04820012498442583, + 0.10248361115345117, 0.8892140389602003, 0.9313325327216908, 0.7919070208756257, 0.0708315749326881, + 0.2188073927368014, 0.8127787161494373, 0.6257366952292145, 0.6924998870427207, 0.26793105051769306, + 0.7826490483783863, 0.5966997551066409, 0.2129270109769711, 0.44769099683243785, 0.7574472416898225, + 0.26729283252732317, 0.5630783736198899, 0.8608024828157775, 0.3931840317246529, 0.022722258330870937, + 0.1056061545184902, 0.6060652796018042, 0.3352528944308385, 0.8977154563912682, 0.7221307220260405, + 0.46922215009200297, 0.4677265831501206, 0.49431288865022915, 0.42276247133689504, 0.6788161518073327, + 0.04439101827129144, 0.028811435615006542, 0.008956533117231325, 0.14719117410301985, 0.7352499716127123, + 0.9005647194328514, 0.04239801036045521, 0.32113153439789777, 0.23008784431942686, 0.25059835793851803, + 0.7821196322936473, 0.3571983300993258, 0.2624691465092742, 0.8018016489757368, 0.5248393284810376, + 0.6977900148749919, 0.33093445127674614, 0.7554471103150353, 0.30512165089613275, 0.7316809143395435, + 0.33837084272328344, 0.031181701520131222, 0.8998729244301411, 0.13963009532040604, 0.5524300751373528, + 0.998751142570437, 0.8079412199511319, 0.38764559152084754, 0.3210831483519665, 0.7304185865067885, + 0.5822602684215884, 0.6364590811085312, 0.10627778088019713, 0.5399985372878428, 0.4140922332840643, + 0.32495829469129234, 0.7880364474443821, 0.6521293980091717, 0.23933614126005887, 0.40069381935472004, + 0.3253434499214978, 0.43850426563837475, 0.3582655821803802, 0.02315611544237217, 0.7562158058764148, + 0.7040259198312322, 0.5059192662520398, 0.052574245106938755, 0.0044571360420935235, 0.19994806707117685, + 0.6839680845965146, 0.9986738127631021, 0.07122924252967722, 0.6409050009983894, 0.8555552153820654, + 0.9370992032325905, 0.7376153474252244, 0.05650174272501618, 0.9555164199341558, 0.24096140966133506, + 0.15235115309934777, 0.2227762469886192, 0.7035491425028773, 0.4883693004134255, 0.4991880671239809, + 0.2038723768682421, 0.7756032542183166, 0.006225581322849116, 0.4008868869085592, 0.20812329467775148, + 0.17556686644385078, 0.6096597423735237, 0.6191160871486041, 0.7349303921541335, 0.9192496344735487, + 0.24229302674522868, 0.9899032791595263, 0.29044779138561216, 0.8321620650117503, 0.7598221463622512, + 0.4229989353498531, 0.9387553475507483, 0.3140212188001139, 0.6511943391734281, 0.7376690823137533, + 0.16614528063403933, 0.45753024916507445, 0.6563453150496379, 0.011057777761439902, 0.0018264730571969645, + 0.3140058709073794, 0.8479357203115255, 0.9718814368259422, 0.2938044793936031, 0.389604645965164, + 0.7259980798553759, 0.384597064894432, 0.6461406690633517, 0.17143060225012974, 0.7905787674108862, + 0.700764812184617, 0.9178067635156595, 0.007883547902275412, 0.04345877753973626, 0.40662108491778937, + 0.32503174588972183, 0.05007085167672787, 0.9581958519270192, 0.20437076813207167, 0.2397322863247101, + 0.2432309640469006, 0.945083897778037, 0.715659408560469, 0.24955287567344286, 0.4424646154063001, + 0.42947559846701466, 0.15496718950982546, 0.8613665468468363, 0.6173709310699306, 0.34240192063131025, + 0.17697172987534104, 0.05400248287734222, 0.47400410893135847, 0.5657413124334275, 0.6886788284227651, + 0.693192811671234, 0.5531430026891606, 0.766990407259576, 0.8518985685483231, 0.5389165861980743, + 0.9984378083937185, 0.8151195733132465, 0.7295039076200354, 0.0922729891266788, 0.8668318130738214, + 0.23372381782403595, 0.03956728658695974, 0.9606270879918739, 0.8167629519087327, 0.6627590908369925, + 0.3128149140030476, 0.6317394199057031, 0.12021250922401605, 0.42604564092211705, 0.3977230376064276, + 0.022079790505487362, 0.28149855317178163, 0.03265235541107758, 0.7517419899840844, 0.3364005732928873, + 0.7329527122940036, 0.43759767242690906, 0.3172137189567925, 0.2707815810278883, 0.6642248201697422, + 0.3972911147544086, 0.009127503808928017, 0.1234374452587974, 0.39970292448775213, 0.11989802753965029, + 0.9927165644356675, 0.8609568773974031, 0.28473276918917545, 0.5837263814586879, 0.2741346897790268, + 0.5647425538488864, 0.969909805392653, 0.7261702056098153, 0.5430436390733616, 0.08333922871976862, + 0.5609888136600801, 0.014806506476614079, 0.9455759599914065, 0.3193070470406175, 0.08542679684744281, + 0.6744677936417323, 0.27782578504968625, 0.29358418856919855, 0.9950513588111255, 0.09523381762884475, + 0.7696317754383216, 0.6698100854232568, 0.43625543466202976, 0.2330898194698613, 0.4127476057822641, + 0.9294766285644805, 0.05680528782969185, 0.6537538588698784, 0.17396071383121814, 0.9595723514929978, + 0.42975991044177986, 0.5431447973683532, 0.8833668707212631, 0.26133302914884515, 0.37848317176894164, + 0.46852397650558253, 0.20735781040531553, 0.8524713092661131, 0.44086749842642614, 0.9183713393663233, + 0.9450411621274358, 0.697302770534409, 0.9135605184069232, 0.09015468169209084, 0.704151745426799, + 0.5400455593098272, 0.5480655211682695, 0.30738071301825654, 0.15067120555532854, 0.925682378381145, + 0.8160834394485424, 0.8645970999084115, 0.43923002522405574, 0.3760052855173096, 0.08902326927063253, + 0.09948678314542847, 0.3578210615036086, 0.8485940282988038, 0.2516468752579376, 0.7365499301475807, + 0.5612758649671108, 0.7859804827616864, 0.8851363067086282, 0.8935474690220618, 0.5336384648157917, + 0.23839322063516444, 0.23213589762876796, 0.005758486459847889, 0.36868262474116764, 0.264207113494464, + 0.36157643004033746, 0.9637257888559727, 0.5737186764359195, 0.5315516381845716, 0.5101088363620886, + 0.039248651321625805, 0.9681240024592983, 0.08677429466572284, 0.9518099421221371, 0.7952823450404229, + 0.05713730664487837, 0.17614720504232317, 0.14661930610537355, 0.7433132788379365, 0.3574322869140888, + 0.2341799637707056, 0.8349985525202629, 0.8160746720387089, 0.6407640183325787, 0.6197726260454984, + 0.41237945120152153, 0.1830987925080485, 0.7553599629295343, 0.05454691882123808, 0.5562940920005512, + 0.5102400141204675, 0.9541297229195699, 0.5577971859903864, 0.3371623491173492, 0.9597353973741677, + 0.9125053638178813, 0.5331026395371243, 0.7058339751156828, 0.28253557117321937, 0.7269649285767406, + 0.12353532557783586, 0.4491118104036773, 0.6637148382841026, 0.6351120142249237, 0.864473727452456, + 0.8265584180895602, 0.6156722515329398, 0.8643555219638754, 0.6571916479877694, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NHWC, {4, 1, 16, 16}, {16, 2, 2, 16}, DT_FLOAT}; + TransResult result; + + FormatTransferFracZNhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} + +TEST_F(UtestFormatTransferFraczNhwc, fracz_to_nhwc_fp32_success_gt_cube) { + float data_4d[8 * 2 * 16 * 16] = { + 0.9818316040500025, + 0.6304740371328553, + 0.3403190259672165, + 0.1545772791147686, + 0.636859736696714, + 0.7286646469411707, + 0.6881973306039132, + 0.11522718733202364, + 0.05221067208366703, + 0.760070751319305, + 0.7630787390114239, + 0.03062661180995363, + 0.6996367230214888, + 0.05874377568360356, + 0.7989654896241913, + 0.48340672056664313, + 0.9824687054977398, + 0.9118200012456436, + 0.5325827377037693, + 0.13215275466376963, + 0.16258783427877654, + 0.8733241462879588, + 0.9811689540387104, + 0.16717227735564522, + 0.17175025957595413, + 0.26751940129597196, + 0.6902723346114396, + 0.9141926645323374, + 0.1602148239484672, + 0.590826374279885, + 0.12879622297601634, + 0.9993413116954417, + 0.108757112003669, + 0.1058705156043852, + 0.29841868434742747, + 0.29680370546430235, + 0.7166628643533716, + 0.7961560236596105, + 0.2998001849925561, + 0.17227883677244848, + 0.273205026266526, + 0.4057715298841855, + 0.9241372689749366, + 0.4818191702106074, + 0.8407488865265121, + 0.37467834871018846, + 0.08393661785563533, + 0.9287360118914758, + 0.9323262118603665, + 0.0959850628867761, + 0.07841190876627901, + 0.7575281447707486, + 0.6451504082047301, + 0.2867531294639032, + 0.6038261174674847, + 0.5722864102604774, + 0.5980542102140242, + 0.29143691325023924, + 0.3323126425519707, + 0.6213156440241487, + 0.2783744506447783, + 0.8192692866948531, + 0.9580914367293034, + 0.8180085498115592, + 0.46737984143039313, + 0.21761303972473267, + 0.010092223694828983, + 0.133924872196312, + 0.6457939399463842, + 0.7108065587126572, + 0.6213477205246348, + 0.03780712693181687, + 0.052964796784310986, + 0.7720495422151494, + 0.07597908210473048, + 0.3880721492592797, + 0.8673754495725557, + 0.2159619821305203, + 0.9349796659144918, + 0.19280128505408822, + 0.19225222099905237, + 0.6381200674681307, + 0.09517206563329228, + 0.7401210217185733, + 0.9213966447997659, + 0.8282252903940375, + 0.25575682793740195, + 0.09419161503451245, + 0.7133067524485386, + 0.30622040856962174, + 0.04567030385976745, + 0.4421966327959601, + 0.9470399466953359, + 0.1863959618388854, + 0.603892794005211, + 0.7305230388181004, + 0.28364151338783206, + 0.7685411345675934, + 0.8988615642504377, + 0.7248529774876415, + 0.8955713245004737, + 0.9694464156380791, + 0.13627973385112224, + 0.0262054823780018, + 0.9598488199046755, + 0.6637745866350796, + 0.567021314483643, + 0.8028105889988362, + 0.5449983501324986, + 0.7405568508319487, + 0.7479203765092981, + 0.6091595342627724, + 0.3571584723082011, + 0.6634376909205661, + 0.7823687380994333, + 0.46769811417519425, + 0.43313252298354554, + 0.4512165332058773, + 0.7409639493996218, + 0.773542103389995, + 0.03251757301743474, + 0.2918512326156285, + 0.2707389025126231, + 0.48862277656898045, + 0.0677707878489644, + 0.5319713656658794, + 0.19055872328014922, + 0.42372710919648093, + 0.5410233673552419, + 0.7053385617098781, + 0.5796221225781045, + 0.6774744169203547, + 0.38184777780992407, + 0.5921849312009162, + 0.4139673487900193, + 0.6324891962264935, + 0.23998117926531592, + 0.3006459023062501, + 0.7543836662968866, + 0.21903128794922366, + 0.8847078454339562, + 0.43658173987503657, + 0.13725114471518118, + 0.5082197711407339, + 0.18832064432940687, + 0.08574080793259364, + 0.30059358691237237, + 0.6482106495699304, + 0.8511603041679399, + 0.2701156230003453, + 0.21298871203478398, + 0.08203197080945912, + 0.6725944240256472, + 0.3066569123534626, + 0.2662259874220829, + 0.16779728161704843, + 0.7158644073214633, + 0.6878815896858301, + 0.9934704148141994, + 0.40929994195329833, + 0.9879146301551541, + 0.8134508106867051, + 0.4772264929878759, + 0.5239150673556234, + 0.04973375923455958, + 0.9408063913333713, + 0.8933576927435202, + 0.7690497885609424, + 0.3432688849226637, + 0.09841971497506807, + 0.6489987050683524, + 0.4612619245600613, + 0.9668831791357312, + 0.6773541509970112, + 0.8113556563575658, + 0.5103191595379972, + 0.692501163915668, + 0.872476678417899, + 0.39847669533309527, + 0.2550816582357872, + 0.44785761524405, + 0.6631992982892411, + 0.07909667197674031, + 0.15595306847956636, + 0.7549753608310522, + 0.7497451144627425, + 0.961356053996849, + 0.047012242220749845, + 0.39968661191290844, + 0.7900745768809934, + 0.18988750301939106, + 0.34309148527453104, + 0.8666802227613912, + 0.33819954591027035, + 0.6754386381771887, + 0.012056309491297434, + 0.6529806784933322, + 0.9651442384174174, + 0.9978184657413758, + 0.6258752595314446, + 0.4974480025947464, + 0.28768692957840036, + 0.5851676600752466, + 0.38541216263965494, + 0.5412711123583736, + 0.7940464609305427, + 0.008865031997954298, + 0.05848479024922548, + 0.9396557362265029, + 0.4326393542047332, + 0.2488609657348656, + 0.7562763464489093, + 0.9983141149044218, + 0.008402913032401704, + 0.19571855359179036, + 0.690386550914582, + 0.7536429455538223, + 0.9378744182203695, + 0.7586425829635426, + 0.1445178013804469, + 0.9109541843179556, + 0.07821616551324684, + 0.05078095176454778, + 0.9130796647001046, + 0.20480686637597612, + 0.8200671332173322, + 0.4353078372661333, + 0.9539464470659443, + 0.8303326350536121, + 0.6278771584995406, + 0.2509169090532466, + 0.1774990588847868, + 0.4984180010157796, + 0.37490941253308996, + 0.747904697670194, + 0.4689694228884409, + 0.07865978524122308, + 0.018278256806917637, + 0.7339557318674227, + 0.9194551313394231, + 0.48861250541718937, + 0.01814006325736084, + 0.7541958032692393, + 0.8774612716989119, + 0.3760072307293194, + 0.2342391882453575, + 0.5507128966242177, + 0.8814107840171872, + 0.7629915021375545, + 0.022596785422104193, + 0.4676586074696423, + 0.6323496539184577, + 0.10709853732458496, + 0.24458540292747133, + 0.7432044950079842, + 0.991407564153164, + 0.6266082092973347, + 0.04229595441634182, + 0.8184353596321909, + 0.10225103323158635, + 0.11394336686699535, + 0.4022127256961855, + 0.30838155456109306, + 0.3195787933473151, + 0.11242740506794102, + 0.06655005190369112, + 0.5103072329299937, + 0.22668773651030127, + 0.32922441298388727, + 0.8140348723745937, + 0.5638190653380799, + 0.26541973372425653, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6639543926405075, + 0.590430754187306, + 0.13166255110692238, + 0.46762259840886866, + 0.8254471928133637, + 0.4192809120720761, + 0.9109983519609524, + 0.06282341072241782, + 0.11304156042728886, + 0.5823811143881991, + 0.8401405236826993, + 0.39537016399274805, + 0.3393604217038657, + 0.981346463187018, + 0.8145676445740513, + 0.9617260330265892, + 0.8974954048898902, + 0.9194970967140944, + 0.9244598046892946, + 0.5278043489398178, + 0.6099197828719526, + 0.6616471969341827, + 0.6838181481733964, + 0.3321388894085553, + 0.32772025405797767, + 0.8045236337225692, + 0.543424952115358, + 0.31439754345644666, + 0.20390144864709325, + 0.7287843878803184, + 0.9530947743998013, + 0.9986403638627129, + 0.3975263188390229, + 0.5781879687545874, + 0.8724409861023287, + 0.8566257438266888, + 0.14606536893076372, + 0.12865091525620642, + 0.8838077200484497, + 0.7193688017246426, + 0.5554742650350404, + 0.1590835864032687, + 0.14658353856064854, + 0.8378474528967791, + 0.48599324242561204, + 0.4618113619505574, + 0.42866889296674937, + 0.5822891945174828, + 0.9030309982011011, + 0.41714977847695556, + 0.46228541811630763, + 0.2749445339474653, + 0.25981719721224195, + 0.8216787813868978, + 0.4538230925345249, + 0.7282865762095903, + 0.8788563382405642, + 0.8826153372351091, + 0.008215547008057156, + 0.7021704582311347, + 0.142877593970687, + 0.03730299658952074, + 0.5109381307125636, + 0.496533373979947, + 0.6221167728428014, + 0.594797090684722, + 0.9484422878814833, + 0.5779176961086859, + 0.05054664123568153, + 0.6904408145042924, + 0.9456781975122573, + 0.07234907191568096, + 0.31454539272595317, + 0.039782261918874906, + 0.9587127521414383, + 0.8805585025554179, + 0.9859544914622989, + 0.6275317338277425, + 0.14352580582038044, + 0.7695326504943156, + 0.41043924117549035, + 0.3199344511919423, + 0.17009843234582034, + 0.19333893045555228, + 0.4065598982414558, + 0.4783366067549203, + 0.2925065077781679, + 0.46612867625915266, + 0.2260401750143446, + 0.9769887894299829, + 0.626959110785719, + 0.9005591753667794, + 0.22730676915421288, + 0.43752553937624405, + 0.20832599816771735, + 0.0648434614140343, + 0.5533790070863268, + 0.08314085235301405, + 0.8425708019179065, + 0.15917383752943826, + 0.6527601974253487, + 0.30126252545266263, + 0.6463253615283967, + 0.4485504420576313, + 0.6669832516919934, + 0.6390350894633332, + 0.5841276848813355, + 0.3633156947810412, + 0.20053851382498278, + 0.2991176134182495, + 0.2237519088309342, + 0.6924468706663672, + 0.6267863686588406, + 0.6957686899861832, + 0.8481678412966801, + 0.6679132796626048, + 0.2520678664501832, + 0.6859580348999734, + 0.41653832488428466, + 0.3443130260330426, + 0.9265385049178334, + 0.4632590871458663, + 0.2792541905414425, + 0.297175390928865, + 0.9649706249008346, + 0.8868549044180415, + 0.27859114362929416, + 0.7113192182809037, + 0.9797397982051781, + 0.5567150341480799, + 0.7202351101992689, + 0.16413702022000565, + 0.336467018972884, + 0.0377230903593081, + 0.8602687151091007, + 0.11839554304283328, + 0.14208471520735977, + 0.43607071414863874, + 0.8741600257317956, + 0.6640170355939985, + 0.0909679192917131, + 0.9106544964694222, + 0.804698027266483, + 0.35002717921135407, + 0.151688664815809, + 0.7393767419885556, + 0.48217967745270196, + 0.9602943716631533, + 0.30319852768975375, + 0.1330627046343439, + 0.6710020756991474, + 0.27078039927991027, + 0.11186194386988224, + 0.5457171365684865, + 0.7880448237433672, + 0.013805055471389882, + 0.6180892645154643, + 0.48759905827516603, + 0.8233479375602223, + 0.4264784017101182, + 0.9633894232982487, + 0.7448212055191065, + 0.5452953261409613, + 0.4856970915644032, + 0.8693281473365151, + 0.6373330823065603, + 0.5771587194750025, + 0.8691299641815123, + 0.0815851410708418, + 0.6806300549508425, + 0.5075760571421807, + 0.601124222598236, + 0.07050306902810866, + 0.36920524642079033, + 0.18818005490550915, + 0.5583892243115647, + 0.5927420217262831, + 0.7580186574311277, + 0.5033605618537017, + 0.9425017651769168, + 0.48862010484707064, + 0.9853716648830263, + 0.39984394946359525, + 0.5558506900537014, + 0.5908610536703583, + 0.22602671513614803, + 0.8798987778429802, + 0.9531473683222749, + 0.7410953157434534, + 0.30111561488120153, + 0.4403622314059401, + 0.052456464089102095, + 0.01209641118408944, + 0.46704246287932405, + 0.4750820624217653, + 0.10164673751729414, + 0.9796758207356858, + 0.8610487700070613, + 0.6211171296662068, + 0.8731238761599872, + 0.37578834957139673, + 0.1120201496751766, + 0.011314071997954644, + 0.2572189570511383, + 0.879811392407261, + 0.10362272859591204, + 0.01645233098389376, + 0.9500294012864102, + 0.9690251328314643, + 0.42575080842172963, + 0.7001480708771485, + 0.4041297773374436, + 0.4185210345469337, + 0.7190995465953123, + 0.7640566649798914, + 0.23242098306402514, + 0.3542816499880481, + 0.5507254523321877, + 0.3029570242047027, + 0.3038459304238752, + 0.9598648212640765, + 0.7204511737388988, + 0.8790782739473424, + 0.7794923303105117, + 0.8815381229146946, + 0.03744741097255122, + 0.9047169962685846, + 0.4610613790126912, + 0.2475915213540839, + 0.4298710416601963, + 0.8221398481995089, + 0.23343189008359377, + 0.5350763782441681, + 0.14739879873747308, + 0.33280129854717244, + 0.23821919516506462, + 0.7167817307593071, + 0.8773007050805388, + 0.48070282049083113, + 0.08899633176897048, + 0.14443919067113453, + 0.9763728131494807, + 0.13169707338891068, + 0.2574020118799679, + 0.9117198409079358, + 0.4813044045568353, + 0.7148710776112692, + 0.10816472148122425, + 0.4872820160105873, + 0.7843715998368327, + 0.9792048963235088, + 0.5032302898011388, + 0.9133488145295496, + 0.8343572569411505, + 0.45894548309522043, + 0.8041104427062074, + 0.5871631865450887, + 0.056334307939404415, + 0.7231806850118402, + 0.6744439230965042, + 0.8458065219298543, + 0.009223176587899173, + 0.6934220235290146, + 0.7552659426337391, + 0.40417163368066655, + 0.18495513003494457, + 0.716067507969818, + 0.14155481130771042, + 0.029361076807311592, + 0.8830993452371644, + 0.6613103229791982, + 0.9398338818781078, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3578659411897861, + 0.7301301934786895, + 0.8618464466584491, + 0.18816515849292015, + 0.9107401874129222, + 0.03292249492058863, + 0.8173098249178635, + 0.2326748580551784, + 0.8712189622758891, + 0.052152358807047494, + 0.5500602809399827, + 0.29888382736441554, + 0.7171636160760438, + 0.61107113112707, + 0.8742559685716594, + 0.27079826979584654, + 0.034930190616391354, + 0.7646888920837598, + 0.33294464574985105, + 0.18964393968657645, + 0.8359357998538534, + 0.603777432373904, + 0.8080737216440349, + 0.43253091727556037, + 0.29704589713859186, + 0.35101583873151454, + 0.27797579020562, + 0.4409871040404285, + 0.03183653010616194, + 0.6888718475212194, + 0.743930296258262, + 0.1383195679389676, + 0.7653124697875597, + 0.6197416698501532, + 0.03370111396538, + 0.277730833007543, + 0.47173387429680513, + 0.004031925171442463, + 0.4216810529719548, + 0.1527542910995907, + 0.755050985872739, + 0.5103916874099794, + 0.24663350472458012, + 0.9944239408861326, + 0.8532667918305049, + 0.571258224550583, + 0.8203424472901112, + 0.7752687501935466, + 0.3379267797413644, + 0.04363591901403252, + 0.021159698090064682, + 0.9844201045382279, + 0.9320998367516875, + 0.7508176117887979, + 0.6279835385302436, + 0.8296132817079003, + 0.7742499180798184, + 0.22327265194322599, + 0.05523148177019932, + 0.015431424053453768, + 0.9810406410163607, + 0.6360185797616702, + 0.318578721332871, + 0.8545232136203632, + 0.23240215777624518, + 0.5821386011719336, + 0.40705038971314067, + 0.9726331053853133, + 0.5877629214351668, + 0.737951863797599, + 0.13209112961192793, + 0.32596609082453, + 0.9051706320834314, + 0.006592902179112681, + 0.44539065743524575, + 0.21040963856054318, + 0.02648671883280662, + 0.8307275617617943, + 0.4998882468907887, + 0.553040303946554, + 0.5297816530911937, + 0.5843239411260238, + 0.1571513016873196, + 0.4588380934037194, + 0.8580445067796844, + 0.08760914807929077, + 0.3167980809508252, + 0.5688868537877791, + 0.7432713240307058, + 0.47849574835183317, + 0.08569476671747134, + 0.0028373026780509347, + 0.36146132556611577, + 0.11732007318482318, + 0.8826440135398512, + 0.8057998064675778, + 0.04722817815152447, + 0.5163195461666986, + 0.8923939769755282, + 0.05625559077233866, + 0.061665786518363186, + 0.662644780876765, + 0.6339151490480687, + 0.23437975062774363, + 0.12890375862760606, + 0.3501592943353464, + 0.09947716801883921, + 0.9783908021362502, + 0.5039620019339041, + 0.7894302782128263, + 0.40318009245339925, + 0.010992512436047153, + 0.7885896931523694, + 0.09972075576249273, + 0.327050109765748, + 0.040566933728248045, + 0.5192171211123873, + 0.5010930644256814, + 0.1751335903258584, + 0.973049431223127, + 0.04605996955300973, + 0.5346739560986009, + 0.8356986203333525, + 0.8197705199993206, + 0.00016846934847547512, + 0.9767516366117579, + 0.7373887650278352, + 0.25343735960629343, + 0.32665192959159395, + 0.9515843270568628, + 0.7912931593367771, + 0.3390543201499241, + 0.39116094675420365, + 0.48473912485681336, + 0.7748044486400519, + 0.9504668800747522, + 0.17379382917448194, + 0.9058195197373183, + 0.2269566175662039, + 0.038537709312595037, + 0.26170965377152067, + 0.39139592915908183, + 0.21106987419176348, + 0.2688423270567659, + 0.0588646031637704, + 0.8142663494405596, + 0.7230712928151145, + 0.10388770611234022, + 0.6320570126609345, + 0.3854592292304997, + 0.8401593200228608, + 0.19857080876585775, + 0.4632224663542698, + 0.8338896779165693, + 0.07498742940886116, + 0.019157468796003774, + 0.1973426137959765, + 0.9481227475490336, + 0.18022422840604635, + 0.8193635052157996, + 0.6971962981473214, + 0.03280146452993471, + 0.6100845348121183, + 0.2564881425807104, + 0.40952730901052836, + 0.11198840547195466, + 0.6384039317824369, + 0.40293878780785075, + 0.022692351439830727, + 0.8875572687105943, + 0.06480422446351664, + 0.10051348811549643, + 0.1980882930823188, + 0.9179898183709072, + 0.6861308896442132, + 0.6956857450336589, + 0.31107695018579007, + 0.7677549648467343, + 0.6201418408382342, + 0.804275349391573, + 0.2870986397529448, + 0.1397119476044384, + 0.8556387210623139, + 0.6062812206466079, + 0.6575142947882678, + 0.45585821923189085, + 0.4099371374021249, + 0.7082884101648349, + 0.8291749729575852, + 0.9185967769978759, + 0.04060621570729972, + 0.4474316536950259, + 0.8363505443285942, + 0.8575757647091935, + 0.039799459941253335, + 0.36286406449207986, + 0.5131647448366778, + 0.39819043236603746, + 0.7618607865355099, + 0.22711968862867105, + 0.9394712456140883, + 0.027435600732236387, + 0.7758712942733171, + 0.2025711015279088, + 0.9991247917543385, + 0.9938131541840006, + 0.186905251806328, + 0.6306921520671528, + 0.9934696708319142, + 0.4155138570203807, + 0.33151958596483977, + 0.7351239628799405, + 0.20830903644833842, + 0.7740550628440345, + 0.4191234862718004, + 0.23674572501250302, + 0.059571405085674156, + 0.9129267438467386, + 0.06451592284712904, + 0.6857709392141015, + 0.8380269534549667, + 0.39885910326895746, + 0.2834031729279012, + 0.2382256471610743, + 0.9638471775191081, + 0.7215840179080331, + 0.36423940072103955, + 0.17430531770484514, + 0.9415074521796919, + 0.2738258731873858, + 0.9306468185245629, + 0.6656678169751403, + 0.1155683928384511, + 0.8747709248985905, + 0.12635835804712037, + 0.5759875379497646, + 0.5731929978928748, + 0.03741903689544501, + 0.19725909198310998, + 0.7735858565736194, + 0.4790973691825182, + 0.0981833133220551, + 0.8764985001957379, + 0.5258092509418005, + 0.8342042669276518, + 0.8790350110712204, + 0.7094946960609122, + 0.7708450113645778, + 0.4180654412461543, + 0.4242979420279134, + 0.24139470932602636, + 0.2209321570893309, + 0.7846941996259452, + 0.12945719412306944, + 0.7602339455424194, + 0.10743040252161462, + 0.46685068003629737, + 0.046369043986771774, + 0.23308706601773554, + 0.4844512179420487, + 0.9643822582174879, + 0.6006453251508257, + 0.6936888180218606, + 0.3013953093096562, + 0.7354859248567551, + 0.8349944098141118, + 0.4048627428035879, + 0.44505698895658985, + 0.7751604897094069, + 0.8744909179894252, + 0.947828891904564, + 0.8640222948104718, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6314201140353499, + 0.11044529796167624, + 0.5234602021756846, + 0.8383531146832072, + 0.35499006367650765, + 0.9442108337417638, + 0.8956398237978505, + 0.1882592063763192, + 0.37890051336945096, + 0.2616367877576542, + 0.42834342236173995, + 0.963048643409656, + 0.9217911303998747, + 0.3863817294878098, + 0.0974751151813743, + 0.8599500573727576, + 0.057089287186821935, + 0.5533755728797164, + 0.6005915913361521, + 0.1620083752339584, + 0.6061041103116162, + 0.3995882134501906, + 0.7085082598407784, + 0.08407809216757356, + 0.9084207349081772, + 0.7272457718657432, + 0.32992683980137916, + 0.4946824285274082, + 0.615073271111235, + 0.4193961465372614, + 0.30553456868372697, + 0.05060126762779704, + 0.291593602521547, + 0.92010890659423, + 0.983625492452828, + 0.48639081135981677, + 0.4765739226583956, + 0.10661041022380258, + 0.5214502864845213, + 0.8599870683300007, + 0.5179732137433295, + 0.8809917505983484, + 0.722106976281213, + 0.48823452875607354, + 0.3503839647122472, + 0.9865830045574908, + 0.5426630898906415, + 0.5217709034718941, + 0.10414112024944422, + 0.7758532076224506, + 0.3073683279926598, + 0.8740808571085373, + 0.31398731169122107, + 0.9731775740247134, + 0.006681240523999987, + 0.15988007000514737, + 0.12796151451926918, + 0.34990462264973865, + 0.0656152325665823, + 0.573229266270487, + 0.8187327005316167, + 0.7370367403897583, + 0.5653206456450454, + 0.6242381891062653, + 0.6005553470527086, + 0.5780289963610858, + 0.4615664520082169, + 0.33336403885310373, + 0.3607367809090497, + 0.41916265564486266, + 0.01606174796202109, + 0.14162979911924312, + 0.4134088930503992, + 0.8487134257579578, + 0.5428387502949792, + 0.7715775547811061, + 0.3987807271022865, + 0.9327040075466025, + 0.5991429329697456, + 0.42591643248294897, + 0.9341257420123151, + 0.00901387215143401, + 0.31965995666124947, + 0.30354580893146743, + 0.29702838396393805, + 0.8783416396086332, + 0.661370050417091, + 0.5972415473147473, + 0.7785999094421769, + 0.22056824973746136, + 0.5673911270103246, + 0.43802545724601716, + 0.7305663462744951, + 0.16885785839419054, + 0.5057125448994015, + 0.6601524988598084, + 0.41780992611018475, + 0.3827436867828057, + 0.4994412662997859, + 0.6760343095970682, + 0.804379353505974, + 0.6968983017423745, + 0.7087690507974629, + 0.9060334223047234, + 0.7408399172400371, + 0.4399334354759946, + 0.0505122443751177, + 0.6419455093215115, + 0.6284706946091785, + 0.03490785589787804, + 0.23143346520326058, + 0.918425982705948, + 0.13419172631284015, + 0.19703920828417498, + 0.8385184444257783, + 0.5863278446649178, + 0.8162272321723847, + 0.9352789460968992, + 0.8647561254544748, + 0.15975188650299932, + 0.17836973320343386, + 0.6864848216734166, + 0.36415105850464413, + 0.3426658491754645, + 0.08329749299302491, + 0.42193590764837385, + 0.6642021794485784, + 0.6636584135623489, + 0.6053567411529699, + 0.6988705097367177, + 0.3729424883568403, + 0.04411243675556986, + 0.534604760793021, + 0.6085219738551051, + 0.12447165936004256, + 0.05883726779432985, + 0.24847343117316678, + 0.5063340116556715, + 0.3976127704496062, + 0.08152081595341609, + 0.3506522585285301, + 0.1683344260481151, + 0.48691479882706157, + 0.8353082899370665, + 0.14637104867208328, + 0.2129098116028093, + 0.5796866670289799, + 0.39100590803988866, + 0.32139056156796886, + 0.15058673720696558, + 0.010737680262784766, + 0.004595895220052548, + 0.5243334524071466, + 0.3062990700506023, + 0.8989948145015754, + 0.3995636846032241, + 0.4560874030151093, + 0.948023137115712, + 0.540281310007275, + 0.26457559429892186, + 0.6515191420301568, + 0.9554532124850424, + 0.20822584693009594, + 0.21886259526121254, + 0.8463453916651477, + 0.7415404364419516, + 0.5419971984385825, + 0.5497563263808113, + 0.980394542822379, + 0.15307532804101953, + 0.572368260863743, + 0.48863254508712584, + 0.6979314145830208, + 0.43581241800260095, + 0.6989017345810631, + 0.8187807739671614, + 0.2575823090229311, + 0.8240801799547735, + 0.1448669779417252, + 0.8116170735906293, + 0.9082959459536531, + 0.8862781397082458, + 0.7324826297013995, + 0.6259636012578446, + 0.028853519416508266, + 0.6228437907861963, + 0.02409626691061728, + 0.9203609494394416, + 0.8461273177832925, + 0.2736512366046855, + 0.04020369061681861, + 0.902228304152258, + 0.550880345014923, + 0.07148117151736633, + 0.41054299237597647, + 0.10848671426422485, + 0.9728118525566118, + 0.20990798653999276, + 0.9308342861844455, + 0.4978127378528848, + 0.7351482579424665, + 0.5623836482085756, + 0.143782791890815, + 0.5898839079534395, + 0.2546773800832157, + 0.666653682817035, + 0.7467649896168816, + 0.7339634345720852, + 0.707682901066885, + 0.9101318378861685, + 0.5123191129010624, + 0.9439951816262205, + 0.5463959087566002, + 0.05760611428924023, + 0.676503376165408, + 0.44429048072068333, + 0.5230209296642828, + 0.3224367091169168, + 0.6913680319012937, + 0.17110739904678152, + 0.16779286710665142, + 0.6335681699796741, + 0.07931757368130377, + 0.46888858792493004, + 0.18197527553087145, + 0.002638591858257655, + 0.691716324882675, + 0.044337163682884206, + 0.6293554666845848, + 0.40764392827851403, + 0.2538655700672533, + 0.8281469647313634, + 0.023926045054366463, + 0.8167534085038402, + 0.43320133354594026, + 0.24862060630736682, + 0.09125749468359245, + 0.8118122437010404, + 0.9973271765376198, + 0.9865525895778507, + 0.47291321372136974, + 0.8635521722706079, + 0.4701334745256115, + 0.1506952535704139, + 0.9018427816330945, + 0.6748520059749242, + 0.6573750823451866, + 0.8618860620086874, + 0.08827502135749443, + 0.6729164433989506, + 0.21618869286155384, + 0.4287518670302243, + 0.2407327438728406, + 0.6338004043821022, + 0.8515817109182252, + 0.9659962777150152, + 0.6549008736011241, + 0.8479504542038493, + 0.006679071347445054, + 0.2724677376754999, + 0.9915443051333377, + 0.21071715217089027, + 0.8684246639753141, + 0.7856846182004006, + 0.6859853232823806, + 0.4789831650688282, + 0.8185051822940307, + 0.5789517433188925, + 0.6919294885862914, + 0.921789377332735, + 0.41376092831273026, + 0.29579140546489957, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06536693360779877, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8189538792252146, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8485698722432129, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.42689303422429525, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7474879973990272, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9950215619461602, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41411358428231926, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.21626088206538852, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22487940449338928, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4682545943541082, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9510104387444676, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18639355175119265, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8815486106206981, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9534802944405797, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5689890773146097, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3329371174770699, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7461012639404849, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7556533452821178, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6957123659099351, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08242896820190027, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23616744795045597, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.13692726551997658, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16712455310677277, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8035400908448518, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1837892082294046, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.05376976135766964, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3992656098634447, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9176895603923744, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19440695649597994, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8475060356712824, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18787115939366794, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9920266776266308, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8961208287376816, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41160144347284433, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17742916588766944, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07851007465436122, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06660419203737089, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07000294703215704, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9664967803907726, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16668958705182269, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.060520301199066595, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7159738475986986, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7139909208243974, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5477377714401691, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8949482680224852, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6066615918823247, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3799019937820075, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.006052425806343575, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7784261014648269, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8334328323308305, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3595682016631978, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9593119247449344, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11585279449665897, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5430944490786588, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4432788242430207, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.44626736830361025, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8072981979896457, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9707561358089939, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.694866090191261, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2577772577688977, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6567808670786782, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10632659645304032, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1875989205574723, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40947541132428245, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.05797277783754817, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8163056554998648, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4315493010082393, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.534265983707278, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + float data[17 * 2 * 2 * 17] = { + 0.9818316040500025, 0.6304740371328553, 0.3403190259672165, 0.1545772791147686, 0.636859736696714, + 0.7286646469411707, 0.6881973306039132, 0.11522718733202364, 0.05221067208366703, 0.760070751319305, + 0.7630787390114239, 0.03062661180995363, 0.6996367230214888, 0.05874377568360356, 0.7989654896241913, + 0.48340672056664313, 0.06536693360779877, 0.6639543926405075, 0.590430754187306, 0.13166255110692238, + 0.46762259840886866, 0.8254471928133637, 0.4192809120720761, 0.9109983519609524, 0.06282341072241782, + 0.11304156042728886, 0.5823811143881991, 0.8401405236826993, 0.39537016399274805, 0.3393604217038657, + 0.981346463187018, 0.8145676445740513, 0.9617260330265892, 0.7556533452821178, 0.3578659411897861, + 0.7301301934786895, 0.8618464466584491, 0.18816515849292015, 0.9107401874129222, 0.03292249492058863, + 0.8173098249178635, 0.2326748580551784, 0.8712189622758891, 0.052152358807047494, 0.5500602809399827, + 0.29888382736441554, 0.7171636160760438, 0.61107113112707, 0.8742559685716594, 0.27079826979584654, + 0.17742916588766944, 0.6314201140353499, 0.11044529796167624, 0.5234602021756846, 0.8383531146832072, + 0.35499006367650765, 0.9442108337417638, 0.8956398237978505, 0.1882592063763192, 0.37890051336945096, + 0.2616367877576542, 0.42834342236173995, 0.963048643409656, 0.9217911303998747, 0.3863817294878098, + 0.0974751151813743, 0.8599500573727576, 0.9593119247449344, 0.9824687054977398, 0.9118200012456436, + 0.5325827377037693, 0.13215275466376963, 0.16258783427877654, 0.8733241462879588, 0.9811689540387104, + 0.16717227735564522, 0.17175025957595413, 0.26751940129597196, 0.6902723346114396, 0.9141926645323374, + 0.1602148239484672, 0.590826374279885, 0.12879622297601634, 0.9993413116954417, 0.8189538792252146, + 0.8974954048898902, 0.9194970967140944, 0.9244598046892946, 0.5278043489398178, 0.6099197828719526, + 0.6616471969341827, 0.6838181481733964, 0.3321388894085553, 0.32772025405797767, 0.8045236337225692, + 0.543424952115358, 0.31439754345644666, 0.20390144864709325, 0.7287843878803184, 0.9530947743998013, + 0.9986403638627129, 0.6957123659099351, 0.034930190616391354, 0.7646888920837598, 0.33294464574985105, + 0.18964393968657645, 0.8359357998538534, 0.603777432373904, 0.8080737216440349, 0.43253091727556037, + 0.29704589713859186, 0.35101583873151454, 0.27797579020562, 0.4409871040404285, 0.03183653010616194, + 0.6888718475212194, 0.743930296258262, 0.1383195679389676, 0.07851007465436122, 0.057089287186821935, + 0.5533755728797164, 0.6005915913361521, 0.1620083752339584, 0.6061041103116162, 0.3995882134501906, + 0.7085082598407784, 0.08407809216757356, 0.9084207349081772, 0.7272457718657432, 0.32992683980137916, + 0.4946824285274082, 0.615073271111235, 0.4193961465372614, 0.30553456868372697, 0.05060126762779704, + 0.11585279449665897, 0.108757112003669, 0.1058705156043852, 0.29841868434742747, 0.29680370546430235, + 0.7166628643533716, 0.7961560236596105, 0.2998001849925561, 0.17227883677244848, 0.273205026266526, + 0.4057715298841855, 0.9241372689749366, 0.4818191702106074, 0.8407488865265121, 0.37467834871018846, + 0.08393661785563533, 0.9287360118914758, 0.8485698722432129, 0.3975263188390229, 0.5781879687545874, + 0.8724409861023287, 0.8566257438266888, 0.14606536893076372, 0.12865091525620642, 0.8838077200484497, + 0.7193688017246426, 0.5554742650350404, 0.1590835864032687, 0.14658353856064854, 0.8378474528967791, + 0.48599324242561204, 0.4618113619505574, 0.42866889296674937, 0.5822891945174828, 0.08242896820190027, + 0.7653124697875597, 0.6197416698501532, 0.03370111396538, 0.277730833007543, 0.47173387429680513, + 0.004031925171442463, 0.4216810529719548, 0.1527542910995907, 0.755050985872739, 0.5103916874099794, + 0.24663350472458012, 0.9944239408861326, 0.8532667918305049, 0.571258224550583, 0.8203424472901112, + 0.7752687501935466, 0.06660419203737089, 0.291593602521547, 0.92010890659423, 0.983625492452828, + 0.48639081135981677, 0.4765739226583956, 0.10661041022380258, 0.5214502864845213, 0.8599870683300007, + 0.5179732137433295, 0.8809917505983484, 0.722106976281213, 0.48823452875607354, 0.3503839647122472, + 0.9865830045574908, 0.5426630898906415, 0.5217709034718941, 0.5430944490786588, 0.9323262118603665, + 0.0959850628867761, 0.07841190876627901, 0.7575281447707486, 0.6451504082047301, 0.2867531294639032, + 0.6038261174674847, 0.5722864102604774, 0.5980542102140242, 0.29143691325023924, 0.3323126425519707, + 0.6213156440241487, 0.2783744506447783, 0.8192692866948531, 0.9580914367293034, 0.8180085498115592, + 0.42689303422429525, 0.9030309982011011, 0.41714977847695556, 0.46228541811630763, 0.2749445339474653, + 0.25981719721224195, 0.8216787813868978, 0.4538230925345249, 0.7282865762095903, 0.8788563382405642, + 0.8826153372351091, 0.008215547008057156, 0.7021704582311347, 0.142877593970687, 0.03730299658952074, + 0.5109381307125636, 0.496533373979947, 0.23616744795045597, 0.3379267797413644, 0.04363591901403252, + 0.021159698090064682, 0.9844201045382279, 0.9320998367516875, 0.7508176117887979, 0.6279835385302436, + 0.8296132817079003, 0.7742499180798184, 0.22327265194322599, 0.05523148177019932, 0.015431424053453768, + 0.9810406410163607, 0.6360185797616702, 0.318578721332871, 0.8545232136203632, 0.07000294703215704, + 0.10414112024944422, 0.7758532076224506, 0.3073683279926598, 0.8740808571085373, 0.31398731169122107, + 0.9731775740247134, 0.006681240523999987, 0.15988007000514737, 0.12796151451926918, 0.34990462264973865, + 0.0656152325665823, 0.573229266270487, 0.8187327005316167, 0.7370367403897583, 0.5653206456450454, + 0.6242381891062653, 0.4432788242430207, 0.46737984143039313, 0.21761303972473267, 0.010092223694828983, + 0.133924872196312, 0.6457939399463842, 0.7108065587126572, 0.6213477205246348, 0.03780712693181687, + 0.052964796784310986, 0.7720495422151494, 0.07597908210473048, 0.3880721492592797, 0.8673754495725557, + 0.2159619821305203, 0.9349796659144918, 0.19280128505408822, 0.7474879973990272, 0.6221167728428014, + 0.594797090684722, 0.9484422878814833, 0.5779176961086859, 0.05054664123568153, 0.6904408145042924, + 0.9456781975122573, 0.07234907191568096, 0.31454539272595317, 0.039782261918874906, 0.9587127521414383, + 0.8805585025554179, 0.9859544914622989, 0.6275317338277425, 0.14352580582038044, 0.7695326504943156, + 0.13692726551997658, 0.23240215777624518, 0.5821386011719336, 0.40705038971314067, 0.9726331053853133, + 0.5877629214351668, 0.737951863797599, 0.13209112961192793, 0.32596609082453, 0.9051706320834314, + 0.006592902179112681, 0.44539065743524575, 0.21040963856054318, 0.02648671883280662, 0.8307275617617943, + 0.4998882468907887, 0.553040303946554, 0.9664967803907726, 0.6005553470527086, 0.5780289963610858, + 0.4615664520082169, 0.33336403885310373, 0.3607367809090497, 0.41916265564486266, 0.01606174796202109, + 0.14162979911924312, 0.4134088930503992, 0.8487134257579578, 0.5428387502949792, 0.7715775547811061, + 0.3987807271022865, 0.9327040075466025, 0.5991429329697456, 0.42591643248294897, 0.44626736830361025, + 0.19225222099905237, 0.6381200674681307, 0.09517206563329228, 0.7401210217185733, 0.9213966447997659, + 0.8282252903940375, 0.25575682793740195, 0.09419161503451245, 0.7133067524485386, 0.30622040856962174, + 0.04567030385976745, 0.4421966327959601, 0.9470399466953359, 0.1863959618388854, 0.603892794005211, + 0.7305230388181004, 0.9950215619461602, 0.41043924117549035, 0.3199344511919423, 0.17009843234582034, + 0.19333893045555228, 0.4065598982414558, 0.4783366067549203, 0.2925065077781679, 0.46612867625915266, + 0.2260401750143446, 0.9769887894299829, 0.626959110785719, 0.9005591753667794, 0.22730676915421288, + 0.43752553937624405, 0.20832599816771735, 0.0648434614140343, 0.16712455310677277, 0.5297816530911937, + 0.5843239411260238, 0.1571513016873196, 0.4588380934037194, 0.8580445067796844, 0.08760914807929077, + 0.3167980809508252, 0.5688868537877791, 0.7432713240307058, 0.47849574835183317, 0.08569476671747134, + 0.0028373026780509347, 0.36146132556611577, 0.11732007318482318, 0.8826440135398512, 0.8057998064675778, + 0.16668958705182269, 0.9341257420123151, 0.00901387215143401, 0.31965995666124947, 0.30354580893146743, + 0.29702838396393805, 0.8783416396086332, 0.661370050417091, 0.5972415473147473, 0.7785999094421769, + 0.22056824973746136, 0.5673911270103246, 0.43802545724601716, 0.7305663462744951, 0.16885785839419054, + 0.5057125448994015, 0.6601524988598084, 0.8072981979896457, 0.28364151338783206, 0.7685411345675934, + 0.8988615642504377, 0.7248529774876415, 0.8955713245004737, 0.9694464156380791, 0.13627973385112224, + 0.0262054823780018, 0.9598488199046755, 0.6637745866350796, 0.567021314483643, 0.8028105889988362, + 0.5449983501324986, 0.7405568508319487, 0.7479203765092981, 0.6091595342627724, 0.41411358428231926, + 0.5533790070863268, 0.08314085235301405, 0.8425708019179065, 0.15917383752943826, 0.6527601974253487, + 0.30126252545266263, 0.6463253615283967, 0.4485504420576313, 0.6669832516919934, 0.6390350894633332, + 0.5841276848813355, 0.3633156947810412, 0.20053851382498278, 0.2991176134182495, 0.2237519088309342, + 0.6924468706663672, 0.8035400908448518, 0.04722817815152447, 0.5163195461666986, 0.8923939769755282, + 0.05625559077233866, 0.061665786518363186, 0.662644780876765, 0.6339151490480687, 0.23437975062774363, + 0.12890375862760606, 0.3501592943353464, 0.09947716801883921, 0.9783908021362502, 0.5039620019339041, + 0.7894302782128263, 0.40318009245339925, 0.010992512436047153, 0.060520301199066595, 0.41780992611018475, + 0.3827436867828057, 0.4994412662997859, 0.6760343095970682, 0.804379353505974, 0.6968983017423745, + 0.7087690507974629, 0.9060334223047234, 0.7408399172400371, 0.4399334354759946, 0.0505122443751177, + 0.6419455093215115, 0.6284706946091785, 0.03490785589787804, 0.23143346520326058, 0.918425982705948, + 0.9707561358089939, 0.3571584723082011, 0.6634376909205661, 0.7823687380994333, 0.46769811417519425, + 0.43313252298354554, 0.4512165332058773, 0.7409639493996218, 0.773542103389995, 0.03251757301743474, + 0.2918512326156285, 0.2707389025126231, 0.48862277656898045, 0.0677707878489644, 0.5319713656658794, + 0.19055872328014922, 0.42372710919648093, 0.21626088206538852, 0.6267863686588406, 0.6957686899861832, + 0.8481678412966801, 0.6679132796626048, 0.2520678664501832, 0.6859580348999734, 0.41653832488428466, + 0.3443130260330426, 0.9265385049178334, 0.4632590871458663, 0.2792541905414425, 0.297175390928865, + 0.9649706249008346, 0.8868549044180415, 0.27859114362929416, 0.7113192182809037, 0.1837892082294046, + 0.7885896931523694, 0.09972075576249273, 0.327050109765748, 0.040566933728248045, 0.5192171211123873, + 0.5010930644256814, 0.1751335903258584, 0.973049431223127, 0.04605996955300973, 0.5346739560986009, + 0.8356986203333525, 0.8197705199993206, 0.00016846934847547512, 0.9767516366117579, 0.7373887650278352, + 0.25343735960629343, 0.7159738475986986, 0.13419172631284015, 0.19703920828417498, 0.8385184444257783, + 0.5863278446649178, 0.8162272321723847, 0.9352789460968992, 0.8647561254544748, 0.15975188650299932, + 0.17836973320343386, 0.6864848216734166, 0.36415105850464413, 0.3426658491754645, 0.08329749299302491, + 0.42193590764837385, 0.6642021794485784, 0.6636584135623489, 0.694866090191261, 0.5410233673552419, + 0.7053385617098781, 0.5796221225781045, 0.6774744169203547, 0.38184777780992407, 0.5921849312009162, + 0.4139673487900193, 0.6324891962264935, 0.23998117926531592, 0.3006459023062501, 0.7543836662968866, + 0.21903128794922366, 0.8847078454339562, 0.43658173987503657, 0.13725114471518118, 0.5082197711407339, + 0.22487940449338928, 0.9797397982051781, 0.5567150341480799, 0.7202351101992689, 0.16413702022000565, + 0.336467018972884, 0.0377230903593081, 0.8602687151091007, 0.11839554304283328, 0.14208471520735977, + 0.43607071414863874, 0.8741600257317956, 0.6640170355939985, 0.0909679192917131, 0.9106544964694222, + 0.804698027266483, 0.35002717921135407, 0.05376976135766964, 0.32665192959159395, 0.9515843270568628, + 0.7912931593367771, 0.3390543201499241, 0.39116094675420365, 0.48473912485681336, 0.7748044486400519, + 0.9504668800747522, 0.17379382917448194, 0.9058195197373183, 0.2269566175662039, 0.038537709312595037, + 0.26170965377152067, 0.39139592915908183, 0.21106987419176348, 0.2688423270567659, 0.7139909208243974, + 0.6053567411529699, 0.6988705097367177, 0.3729424883568403, 0.04411243675556986, 0.534604760793021, + 0.6085219738551051, 0.12447165936004256, 0.05883726779432985, 0.24847343117316678, 0.5063340116556715, + 0.3976127704496062, 0.08152081595341609, 0.3506522585285301, 0.1683344260481151, 0.48691479882706157, + 0.8353082899370665, 0.2577772577688977, 0.18832064432940687, 0.08574080793259364, 0.30059358691237237, + 0.6482106495699304, 0.8511603041679399, 0.2701156230003453, 0.21298871203478398, 0.08203197080945912, + 0.6725944240256472, 0.3066569123534626, 0.2662259874220829, 0.16779728161704843, 0.7158644073214633, + 0.6878815896858301, 0.9934704148141994, 0.40929994195329833, 0.4682545943541082, 0.151688664815809, + 0.7393767419885556, 0.48217967745270196, 0.9602943716631533, 0.30319852768975375, 0.1330627046343439, + 0.6710020756991474, 0.27078039927991027, 0.11186194386988224, 0.5457171365684865, 0.7880448237433672, + 0.013805055471389882, 0.6180892645154643, 0.48759905827516603, 0.8233479375602223, 0.4264784017101182, + 0.3992656098634447, 0.0588646031637704, 0.8142663494405596, 0.7230712928151145, 0.10388770611234022, + 0.6320570126609345, 0.3854592292304997, 0.8401593200228608, 0.19857080876585775, 0.4632224663542698, + 0.8338896779165693, 0.07498742940886116, 0.019157468796003774, 0.1973426137959765, 0.9481227475490336, + 0.18022422840604635, 0.8193635052157996, 0.5477377714401691, 0.14637104867208328, 0.2129098116028093, + 0.5796866670289799, 0.39100590803988866, 0.32139056156796886, 0.15058673720696558, 0.010737680262784766, + 0.004595895220052548, 0.5243334524071466, 0.3062990700506023, 0.8989948145015754, 0.3995636846032241, + 0.4560874030151093, 0.948023137115712, 0.540281310007275, 0.26457559429892186, 0.6567808670786782, + 0.9879146301551541, 0.8134508106867051, 0.4772264929878759, 0.5239150673556234, 0.04973375923455958, + 0.9408063913333713, 0.8933576927435202, 0.7690497885609424, 0.3432688849226637, 0.09841971497506807, + 0.6489987050683524, 0.4612619245600613, 0.9668831791357312, 0.6773541509970112, 0.8113556563575658, + 0.5103191595379972, 0.9510104387444676, 0.9633894232982487, 0.7448212055191065, 0.5452953261409613, + 0.4856970915644032, 0.8693281473365151, 0.6373330823065603, 0.5771587194750025, 0.8691299641815123, + 0.0815851410708418, 0.6806300549508425, 0.5075760571421807, 0.601124222598236, 0.07050306902810866, + 0.36920524642079033, 0.18818005490550915, 0.5583892243115647, 0.9176895603923744, 0.6971962981473214, + 0.03280146452993471, 0.6100845348121183, 0.2564881425807104, 0.40952730901052836, 0.11198840547195466, + 0.6384039317824369, 0.40293878780785075, 0.022692351439830727, 0.8875572687105943, 0.06480422446351664, + 0.10051348811549643, 0.1980882930823188, 0.9179898183709072, 0.6861308896442132, 0.6956857450336589, + 0.8949482680224852, 0.6515191420301568, 0.9554532124850424, 0.20822584693009594, 0.21886259526121254, + 0.8463453916651477, 0.7415404364419516, 0.5419971984385825, 0.5497563263808113, 0.980394542822379, + 0.15307532804101953, 0.572368260863743, 0.48863254508712584, 0.6979314145830208, 0.43581241800260095, + 0.6989017345810631, 0.8187807739671614, 0.10632659645304032, 0.692501163915668, 0.872476678417899, + 0.39847669533309527, 0.2550816582357872, 0.44785761524405, 0.6631992982892411, 0.07909667197674031, + 0.15595306847956636, 0.7549753608310522, 0.7497451144627425, 0.961356053996849, 0.047012242220749845, + 0.39968661191290844, 0.7900745768809934, 0.18988750301939106, 0.34309148527453104, 0.18639355175119265, + 0.5927420217262831, 0.7580186574311277, 0.5033605618537017, 0.9425017651769168, 0.48862010484707064, + 0.9853716648830263, 0.39984394946359525, 0.5558506900537014, 0.5908610536703583, 0.22602671513614803, + 0.8798987778429802, 0.9531473683222749, 0.7410953157434534, 0.30111561488120153, 0.4403622314059401, + 0.052456464089102095, 0.19440695649597994, 0.31107695018579007, 0.7677549648467343, 0.6201418408382342, + 0.804275349391573, 0.2870986397529448, 0.1397119476044384, 0.8556387210623139, 0.6062812206466079, + 0.6575142947882678, 0.45585821923189085, 0.4099371374021249, 0.7082884101648349, 0.8291749729575852, + 0.9185967769978759, 0.04060621570729972, 0.4474316536950259, 0.6066615918823247, 0.2575823090229311, + 0.8240801799547735, 0.1448669779417252, 0.8116170735906293, 0.9082959459536531, 0.8862781397082458, + 0.7324826297013995, 0.6259636012578446, 0.028853519416508266, 0.6228437907861963, 0.02409626691061728, + 0.9203609494394416, 0.8461273177832925, 0.2736512366046855, 0.04020369061681861, 0.902228304152258, + 0.1875989205574723, 0.8666802227613912, 0.33819954591027035, 0.6754386381771887, 0.012056309491297434, + 0.6529806784933322, 0.9651442384174174, 0.9978184657413758, 0.6258752595314446, 0.4974480025947464, + 0.28768692957840036, 0.5851676600752466, 0.38541216263965494, 0.5412711123583736, 0.7940464609305427, + 0.008865031997954298, 0.05848479024922548, 0.8815486106206981, 0.01209641118408944, 0.46704246287932405, + 0.4750820624217653, 0.10164673751729414, 0.9796758207356858, 0.8610487700070613, 0.6211171296662068, + 0.8731238761599872, 0.37578834957139673, 0.1120201496751766, 0.011314071997954644, 0.2572189570511383, + 0.879811392407261, 0.10362272859591204, 0.01645233098389376, 0.9500294012864102, 0.8475060356712824, + 0.8363505443285942, 0.8575757647091935, 0.039799459941253335, 0.36286406449207986, 0.5131647448366778, + 0.39819043236603746, 0.7618607865355099, 0.22711968862867105, 0.9394712456140883, 0.027435600732236387, + 0.7758712942733171, 0.2025711015279088, 0.9991247917543385, 0.9938131541840006, 0.186905251806328, + 0.6306921520671528, 0.3799019937820075, 0.550880345014923, 0.07148117151736633, 0.41054299237597647, + 0.10848671426422485, 0.9728118525566118, 0.20990798653999276, 0.9308342861844455, 0.4978127378528848, + 0.7351482579424665, 0.5623836482085756, 0.143782791890815, 0.5898839079534395, 0.2546773800832157, + 0.666653682817035, 0.7467649896168816, 0.7339634345720852, 0.40947541132428245, 0.9396557362265029, + 0.4326393542047332, 0.2488609657348656, 0.7562763464489093, 0.9983141149044218, 0.008402913032401704, + 0.19571855359179036, 0.690386550914582, 0.7536429455538223, 0.9378744182203695, 0.7586425829635426, + 0.1445178013804469, 0.9109541843179556, 0.07821616551324684, 0.05078095176454778, 0.9130796647001046, + 0.9534802944405797, 0.9690251328314643, 0.42575080842172963, 0.7001480708771485, 0.4041297773374436, + 0.4185210345469337, 0.7190995465953123, 0.7640566649798914, 0.23242098306402514, 0.3542816499880481, + 0.5507254523321877, 0.3029570242047027, 0.3038459304238752, 0.9598648212640765, 0.7204511737388988, + 0.8790782739473424, 0.7794923303105117, 0.18787115939366794, 0.9934696708319142, 0.4155138570203807, + 0.33151958596483977, 0.7351239628799405, 0.20830903644833842, 0.7740550628440345, 0.4191234862718004, + 0.23674572501250302, 0.059571405085674156, 0.9129267438467386, 0.06451592284712904, 0.6857709392141015, + 0.8380269534549667, 0.39885910326895746, 0.2834031729279012, 0.2382256471610743, 0.006052425806343575, + 0.707682901066885, 0.9101318378861685, 0.5123191129010624, 0.9439951816262205, 0.5463959087566002, + 0.05760611428924023, 0.676503376165408, 0.44429048072068333, 0.5230209296642828, 0.3224367091169168, + 0.6913680319012937, 0.17110739904678152, 0.16779286710665142, 0.6335681699796741, 0.07931757368130377, + 0.46888858792493004, 0.05797277783754817, 0.20480686637597612, 0.8200671332173322, 0.4353078372661333, + 0.9539464470659443, 0.8303326350536121, 0.6278771584995406, 0.2509169090532466, 0.1774990588847868, + 0.4984180010157796, 0.37490941253308996, 0.747904697670194, 0.4689694228884409, 0.07865978524122308, + 0.018278256806917637, 0.7339557318674227, 0.9194551313394231, 0.5689890773146097, 0.8815381229146946, + 0.03744741097255122, 0.9047169962685846, 0.4610613790126912, 0.2475915213540839, 0.4298710416601963, + 0.8221398481995089, 0.23343189008359377, 0.5350763782441681, 0.14739879873747308, 0.33280129854717244, + 0.23821919516506462, 0.7167817307593071, 0.8773007050805388, 0.48070282049083113, 0.08899633176897048, + 0.9920266776266308, 0.9638471775191081, 0.7215840179080331, 0.36423940072103955, 0.17430531770484514, + 0.9415074521796919, 0.2738258731873858, 0.9306468185245629, 0.6656678169751403, 0.1155683928384511, + 0.8747709248985905, 0.12635835804712037, 0.5759875379497646, 0.5731929978928748, 0.03741903689544501, + 0.19725909198310998, 0.7735858565736194, 0.7784261014648269, 0.18197527553087145, 0.002638591858257655, + 0.691716324882675, 0.044337163682884206, 0.6293554666845848, 0.40764392827851403, 0.2538655700672533, + 0.8281469647313634, 0.023926045054366463, 0.8167534085038402, 0.43320133354594026, 0.24862060630736682, + 0.09125749468359245, 0.8118122437010404, 0.9973271765376198, 0.9865525895778507, 0.8163056554998648, + 0.48861250541718937, 0.01814006325736084, 0.7541958032692393, 0.8774612716989119, 0.3760072307293194, + 0.2342391882453575, 0.5507128966242177, 0.8814107840171872, 0.7629915021375545, 0.022596785422104193, + 0.4676586074696423, 0.6323496539184577, 0.10709853732458496, 0.24458540292747133, 0.7432044950079842, + 0.991407564153164, 0.3329371174770699, 0.14443919067113453, 0.9763728131494807, 0.13169707338891068, + 0.2574020118799679, 0.9117198409079358, 0.4813044045568353, 0.7148710776112692, 0.10816472148122425, + 0.4872820160105873, 0.7843715998368327, 0.9792048963235088, 0.5032302898011388, 0.9133488145295496, + 0.8343572569411505, 0.45894548309522043, 0.8041104427062074, 0.8961208287376816, 0.4790973691825182, + 0.0981833133220551, 0.8764985001957379, 0.5258092509418005, 0.8342042669276518, 0.8790350110712204, + 0.7094946960609122, 0.7708450113645778, 0.4180654412461543, 0.4242979420279134, 0.24139470932602636, + 0.2209321570893309, 0.7846941996259452, 0.12945719412306944, 0.7602339455424194, 0.10743040252161462, + 0.8334328323308305, 0.47291321372136974, 0.8635521722706079, 0.4701334745256115, 0.1506952535704139, + 0.9018427816330945, 0.6748520059749242, 0.6573750823451866, 0.8618860620086874, 0.08827502135749443, + 0.6729164433989506, 0.21618869286155384, 0.4287518670302243, 0.2407327438728406, 0.6338004043821022, + 0.8515817109182252, 0.9659962777150152, 0.4315493010082393, 0.6266082092973347, 0.04229595441634182, + 0.8184353596321909, 0.10225103323158635, 0.11394336686699535, 0.4022127256961855, 0.30838155456109306, + 0.3195787933473151, 0.11242740506794102, 0.06655005190369112, 0.5103072329299937, 0.22668773651030127, + 0.32922441298388727, 0.8140348723745937, 0.5638190653380799, 0.26541973372425653, 0.7461012639404849, + 0.5871631865450887, 0.056334307939404415, 0.7231806850118402, 0.6744439230965042, 0.8458065219298543, + 0.009223176587899173, 0.6934220235290146, 0.7552659426337391, 0.40417163368066655, 0.18495513003494457, + 0.716067507969818, 0.14155481130771042, 0.029361076807311592, 0.8830993452371644, 0.6613103229791982, + 0.9398338818781078, 0.41160144347284433, 0.46685068003629737, 0.046369043986771774, 0.23308706601773554, + 0.4844512179420487, 0.9643822582174879, 0.6006453251508257, 0.6936888180218606, 0.3013953093096562, + 0.7354859248567551, 0.8349944098141118, 0.4048627428035879, 0.44505698895658985, 0.7751604897094069, + 0.8744909179894252, 0.947828891904564, 0.8640222948104718, 0.3595682016631978, 0.6549008736011241, + 0.8479504542038493, 0.006679071347445054, 0.2724677376754999, 0.9915443051333377, 0.21071715217089027, + 0.8684246639753141, 0.7856846182004006, 0.6859853232823806, 0.4789831650688282, 0.8185051822940307, + 0.5789517433188925, 0.6919294885862914, 0.921789377332735, 0.41376092831273026, 0.29579140546489957, + 0.534265983707278, + }; + + TransArgs args{ + reinterpret_cast(data_4d), FORMAT_FRACTAL_Z, FORMAT_NHWC, {8, 2, 16, 16}, {17, 2, 2, 17}, DT_FLOAT}; + TransResult result; + + FormatTransferFracZNhwc transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data)); + for (int i = 0; i < sizeof(data) / sizeof(data[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data[i]); + } +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_hwcn_c1hwncoc0_unittest.cc b/tests/ut/ge/common/format_transfer_hwcn_c1hwncoc0_unittest.cc new file mode 100644 index 00000000..16c3ba77 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_hwcn_c1hwncoc0_unittest.cc @@ -0,0 +1,13748 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_hwcn_c1hwncoc0.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/fp16_t.h" + +namespace ge { +namespace formats { +class UtestFormatTransferHwcnC1hwncoc0 : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_invalid_data_type_uint8) { + uint8_t data[4 * 4 * 3 * 1] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, + }; + + TransArgs args{data, FORMAT_HWCN, FORMAT_C1HWNCoC0, {4, 4, 3, 1}, {1, 4, 4, 1, 16, 16}, DT_UINT8}; + TransResult result; + + FormatTransferHwcnC1hwncoc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_invalid_data_type_int32) { + int32_t data[4 * 4 * 3 * 1] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, + }; + + FormatTransferHwcnC1hwncoc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_C1HWNCoC0, {4, 4, 3, 1}, {1, 4, 4, 1, 16, 16}, DT_INT32}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_invalid_src_format_nchw) { + float data[4 * 4 * 3 * 1] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, + }; + + FormatTransferHwcnC1hwncoc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_C1HWNCoC0, {4, 4, 3, 1}, {1, 4, 4, 1, 16, 16}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, UNSUPPORTED); +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_invalid_dst_format_nc1khkwhwc0) { + float data[4 * 4 * 3 * 1] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, + }; + + FormatTransferHwcnC1hwncoc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_NC1KHKWHWC0, {4, 4, 3, 1}, {1, 4, 4, 1, 16, 16}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_invalid_src_shape) { + float data[4 * 4 * 3 * 1] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, + }; + + FormatTransferHwcnC1hwncoc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_NC1KHKWHWC0, {4, 4, 3}, {1, 4, 4, 1, 16, 16}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_invalid_src_shape2) { + float data[4 * 4 * 3 * 1] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, + }; + + FormatTransferHwcnC1hwncoc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_C1HWNCoC0, {4, 4}, {1, 4, 4, 1, 16, 16}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_invalid_src_shape3) { + float data[4 * 4 * 3 * 1] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, + }; + + FormatTransferHwcnC1hwncoc0 transfer; + TransArgs args{reinterpret_cast(data), + FORMAT_HWCN, + FORMAT_NC1KHKWHWC0, + {4, 4, 3, -1}, + {1, 4, 4, 3, 16, 16}, + DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, PARAM_INVALID); +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_invalid_dst_format) { + float data[4 * 4 * 3 * 1] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, + }; + + FormatTransferHwcnC1hwncoc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_NC1KHKWHWC0, {4, 4, 3, 1}, {1, 1, 4, 4, 16, 16}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_invalid_dst_shape2) { + float data[4 * 4 * 3 * 1] = { + 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 101, 102, 103, 104, + 105, 106, 107, 108, 109, 110, 111, 112, 201, 202, 203, 204, 205, 206, 207, 208, + 209, 210, 211, 212, 301, 302, 303, 304, 305, 306, 307, 308, 309, 310, 311, 312, + }; + + FormatTransferHwcnC1hwncoc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_C1HWNCoC0, {4, 4, 3, 1}, {2, 4, 4, 1, 16, 16}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_fp16_success_lt_cube) { + uint16_t data[1 * 1 * 1 * 1] = { + 15113, + }; + uint16_t data_6d[1 * 1 * 1 * 1 * 16 * 16] = { + 15113, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_C1HWNCoC0, {1, 1, 1, 1}, {1, 1, 1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferHwcnC1hwncoc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_6d)); + for (int i = 0; i < sizeof(data_6d) / sizeof(data_6d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_6d[i]); + } + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, SUCCESS); +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_fp16_success_eq_cube) { + uint16_t data[4 * 4 * 16 * 1] = { + 12645, 14633, 15320, 14433, 15165, 15047, 13830, 14508, 15278, 14154, 13532, 13839, 14970, 14273, 14616, 14711, + 11952, 14887, 14729, 13614, 13478, 14522, 14147, 11144, 15107, 13564, 14352, 14639, 14662, 13086, 13693, 14067, + 15036, 14841, 14326, 14089, 14852, 14623, 15283, 14344, 13962, 15084, 14657, 14154, 14200, 14896, 14563, 11217, + 13603, 14815, 15010, 11567, 14761, 15134, 15035, 14146, 14499, 14180, 14721, 14891, 14768, 14392, 14785, 15012, + 15007, 12596, 13899, 12321, 14661, 14890, 13671, 12573, 13413, 14209, 15222, 12603, 15129, 14647, 13872, 13590, + 14681, 12216, 13767, 13402, 12884, 14936, 14808, 14355, 14305, 14438, 14119, 14776, 13720, 14046, 14748, 13212, + 14000, 13509, 14440, 14659, 11826, 14333, 8774, 15114, 14939, 13985, 14522, 14855, 14781, 13573, 15106, 13698, + 14476, 15228, 15058, 12869, 12086, 14357, 14130, 13966, 14526, 14678, 15108, 14579, 14976, 13274, 14262, 15120, + 14780, 14257, 15284, 14800, 15091, 15299, 13724, 14127, 15056, 14654, 14881, 13736, 14030, 14227, 14382, 15140, + 14068, 14013, 12639, 14995, 10596, 10501, 15029, 13543, 12682, 14597, 14390, 15301, 14582, 15214, 13571, 14725, + 13897, 13078, 9868, 13569, 15358, 14417, 14445, 15126, 14470, 13145, 14483, 14557, 14139, 14513, 14773, 12032, + 15055, 14267, 13681, 14748, 13844, 13620, 15016, 14635, 14348, 14655, 15240, 15359, 14784, 15093, 14534, 14619, + 11677, 12206, 15270, 13777, 14574, 12283, 11265, 11326, 13932, 15055, 13331, 14461, 14841, 15065, 13824, 15185, + 14438, 10315, 13020, 14978, 13350, 12751, 14639, 12404, 13012, 13042, 13779, 12811, 14088, 13573, 13606, 14758, + 11869, 14837, 14567, 15208, 15274, 14937, 14079, 14031, 15192, 15309, 14776, 14386, 13435, 14708, 11464, 15279, + 13535, 14943, 14386, 14293, 14413, 15271, 14957, 15186, 7837, 14912, 15279, 13994, 13416, 11451, 13456, 12447, + }; + + uint16_t data_6d[1 * 4 * 4 * 1 * 16 * 16] = { + 12645, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14633, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14433, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15165, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15047, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13830, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14508, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15278, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13532, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13839, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14970, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14273, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14616, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14711, 11952, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14887, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14729, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13614, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13478, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14522, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14147, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 11144, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 15107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13564, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14352, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14662, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13086, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13693, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14067, 15036, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14841, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14326, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14089, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14852, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14623, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15283, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14344, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 13962, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15084, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14657, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14896, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14563, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 11217, 13603, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14815, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 11567, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14761, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15035, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14499, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14721, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14891, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14768, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14392, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14785, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15012, 15007, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12596, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 13899, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12321, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14661, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14890, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 13671, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12573, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 13413, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15222, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12603, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14647, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 13872, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 13590, 14681, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 12216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13767, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13402, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 12884, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14936, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14808, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14355, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14305, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14438, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14119, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14776, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13720, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14046, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14748, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13212, 14000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13509, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14440, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14659, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 11826, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 8774, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 15114, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14939, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13985, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14522, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14855, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14781, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13573, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 15106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13698, 14476, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15228, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15058, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12869, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12086, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14357, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14130, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 13966, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14526, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14678, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15108, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14579, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14976, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 13274, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14262, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15120, 14780, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14257, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15284, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14800, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15091, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15299, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 13724, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14127, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15056, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14654, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14881, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 13736, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14030, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14227, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14382, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15140, 14068, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14013, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 12639, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14995, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 10596, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 10501, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 15029, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 13543, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 12682, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14597, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14390, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 15301, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14582, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 15214, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 13571, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14725, 13897, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13078, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9868, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13569, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15358, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14417, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14445, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15126, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14470, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13145, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14483, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14557, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14139, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14513, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14773, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12032, 15055, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14267, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13681, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14748, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13844, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13620, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15016, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14635, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14348, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14655, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15240, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15359, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14784, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15093, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14534, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14619, 11677, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12206, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15270, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13777, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14574, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12283, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11265, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11326, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13932, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15055, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13331, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14461, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14841, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15065, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13824, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15185, 14438, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10315, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13020, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14978, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13350, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12751, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14639, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12404, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13012, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13042, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13779, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12811, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14088, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13573, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13606, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14758, 11869, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14837, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14567, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15208, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15274, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14937, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14079, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14031, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15192, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15309, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14776, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14386, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13435, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14708, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11464, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15279, 13535, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14943, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14386, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14293, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14413, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15271, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14957, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15186, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7837, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14912, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15279, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13994, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13416, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11451, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13456, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12447, + }; + + TransArgs args{reinterpret_cast(data), + FORMAT_HWCN, + FORMAT_C1HWNCoC0, + {4, 4, 16, 1}, + {1, 4, 4, 1, 16, 16}, + DT_FLOAT16}; + TransResult result; + + FormatTransferHwcnC1hwncoc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_6d)); + for (int i = 0; i < sizeof(data_6d) / sizeof(data_6d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_6d[i]); + } +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_fp16_success_gt_cube) { + uint16_t data[4 * 4 * 18 * 1] = { + 14565, 14517, 11937, 14184, 12256, 14175, 14079, 10294, 12826, 14537, 14158, 15202, 15009, 13370, 10297, 13339, + 12829, 13569, 13739, 14959, 15082, 14412, 14675, 14290, 13366, 14483, 15075, 15324, 14819, 13401, 13947, 15193, + 15069, 14371, 14625, 14414, 14604, 14434, 14664, 15124, 15237, 14269, 14180, 14446, 13470, 14892, 15273, 14384, + 14983, 14848, 13982, 14507, 13760, 14659, 13019, 13304, 13484, 13850, 14821, 15158, 14394, 15062, 15360, 13755, + 15137, 15133, 15246, 14083, 14781, 14839, 13960, 14606, 11574, 14966, 15049, 14617, 15210, 13950, 15347, 12890, + 11499, 14547, 15283, 12599, 11355, 14027, 11928, 14581, 12947, 13410, 13533, 15194, 15139, 13626, 14253, 14135, + 13564, 14206, 14806, 14390, 14395, 15223, 14989, 14575, 14499, 11659, 12637, 15203, 15100, 7824, 13215, 13748, + 15033, 9676, 14966, 15319, 9847, 14483, 14010, 14596, 13987, 9443, 13319, 14000, 12776, 15053, 13287, 13881, + 12626, 14531, 15207, 15077, 13117, 14297, 15319, 14457, 14373, 14892, 12128, 15103, 15036, 14850, 14568, 8541, + 14549, 13355, 14697, 14790, 15146, 12319, 14378, 14239, 14715, 12741, 15331, 13285, 15222, 14856, 13820, 14906, + 14414, 12651, 8603, 13362, 14541, 8135, 12912, 14646, 14950, 14595, 13315, 11524, 13995, 15203, 15317, 13332, + 14233, 13959, 15345, 12891, 13423, 14152, 13521, 5104, 14853, 13550, 13039, 12624, 15286, 14205, 7388, 13467, + 14655, 14535, 14807, 14402, 12785, 14614, 11269, 11427, 15094, 15351, 14353, 5359, 13681, 15216, 15229, 7516, + 14872, 14047, 13049, 14044, 13898, 11374, 14806, 13831, 14186, 13970, 15100, 13584, 14987, 14485, 12866, 14090, + 15182, 14753, 15306, 14519, 14418, 10923, 14409, 14982, 15311, 14869, 14684, 14658, 14390, 14884, 14511, 11804, + 14798, 13697, 14483, 12489, 15208, 14126, 14920, 14559, 10435, 9400, 14147, 10617, 13650, 14651, 12556, 13767, + 14448, 15211, 13578, 15011, 11703, 14306, 14676, 15254, 13989, 14771, 13538, 15253, 15192, 14483, 11941, 14819, + 14166, 14892, 14592, 14545, 9841, 12710, 15109, 13382, 13875, 12953, 13008, 14341, 13954, 13552, 14958, 15002, + }; + + uint16_t data_6d[2 * 4 * 4 * 1 * 16 * 16] = { + 14565, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14517, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 11937, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14175, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14079, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 10294, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 12826, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14537, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14158, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15202, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15009, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13370, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10297, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13339, + 13739, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14959, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14412, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14675, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14290, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13366, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14483, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15075, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 15324, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14819, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13401, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13947, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15193, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15069, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14371, + 14604, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14434, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14664, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15124, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15237, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14269, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14180, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14446, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 13470, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14892, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15273, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14384, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14983, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14848, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13982, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14507, + 13019, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 13484, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 13850, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14821, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 15158, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14394, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15062, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15360, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 13755, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15137, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15133, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15246, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14083, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14781, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14839, + 11574, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14966, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15049, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14617, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13950, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 15347, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12890, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 11499, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14547, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15283, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12599, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11355, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14027, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11928, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14581, + 13533, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 15194, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15139, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 13626, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14253, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14135, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13564, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14206, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14806, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14390, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14395, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15223, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14989, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14575, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14499, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11659, + 15100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 7824, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 13215, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 13748, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15033, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 9676, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14966, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15319, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 9847, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14483, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14010, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14596, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13987, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9443, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13319, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14000, + 13287, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13881, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 12626, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14531, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 15077, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13117, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14297, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15319, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14457, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14373, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14892, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12128, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15103, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15036, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14850, + 14549, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13355, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14697, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14790, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 15146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 12319, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14378, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14239, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14715, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 12741, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15331, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13285, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15222, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14856, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13820, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14906, + 8603, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13362, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14541, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 8135, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 12912, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14646, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14950, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14595, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 13315, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 11524, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13995, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15203, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15317, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13332, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14233, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13959, + 13423, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 13521, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 5104, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14853, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 13550, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13039, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12624, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15286, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14205, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 7388, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13467, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14655, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14535, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14807, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14402, + 11269, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 11427, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15094, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 15351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 5359, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13681, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15216, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15229, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 7516, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14872, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14047, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13049, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14044, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13898, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11374, + 14186, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13970, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 15100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 13584, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14987, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14485, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 12866, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 14090, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15182, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14753, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15306, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14519, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14418, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10923, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14409, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14982, + 14684, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14658, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14390, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14884, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14511, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 11804, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 14798, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 13697, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 14483, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 12489, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15208, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14126, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14920, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14559, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 10435, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 9400, + 13650, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14651, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 12556, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 13767, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14448, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 15211, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 13578, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 15011, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 11703, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 14306, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14676, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15254, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13989, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14771, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13538, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15253, + 11941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14819, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 14166, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 14892, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 14592, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 14545, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 9841, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 12710, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 15109, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 13382, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13875, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12953, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13008, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 14341, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13954, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13552, + 12829, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13569, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14414, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13760, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14659, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13960, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14606, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12947, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13410, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12637, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 15203, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12776, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 15053, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14568, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 8541, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14414, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 12651, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15345, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 12891, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12785, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14614, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14806, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 13831, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15311, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14869, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14147, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 10617, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 14483, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14958, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 15002, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{reinterpret_cast(data), + FORMAT_HWCN, + FORMAT_C1HWNCoC0, + {4, 4, 18, 1}, + {2, 4, 4, 1, 16, 16}, + DT_FLOAT16}; + TransResult result; + + FormatTransferHwcnC1hwncoc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_6d)); + for (int i = 0; i < sizeof(data_6d) / sizeof(data_6d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_6d[i]); + } +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_fp32_Success_lt_cube) { + float data[1 * 1 * 1 * 1] = { + 0.029033836332871932, + }; + float data_6d[1 * 1 * 1 * 1 * 16 * 16] = { + 0.029033836332871932, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_C1HWNCoC0, {1, 1, 1, 1}, {1, 1, 1, 1, 16, 16}, DT_FLOAT}; + TransResult result; + + FormatTransferHwcnC1hwncoc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_6d)); + for (int i = 0; i < sizeof(data_6d) / sizeof(data_6d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_6d[i]); + } +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_fp32_success_eq_cube) { + float data[4 * 4 * 16 * 1] = { + 0.9287460024109794, 0.2406040495018288, 0.4469466172807033, 0.18882162922891632, 0.76109939494347, + 0.8840655482887357, 0.9872180535026731, 0.11661622466386445, 0.01235434128678281, 0.6695347367567285, + 0.7330905362956434, 0.9500990356218342, 0.6457271811424727, 0.6912643273019475, 0.6703753186632049, + 0.5526387021634535, 0.08705872980636764, 0.4385500267268264, 0.5216635886669073, 0.4630255672949294, + 0.8447529479061752, 0.23010207882669031, 0.29600005325577006, 0.02000550424397074, 0.7628668574925462, + 0.5151224887847649, 0.8359422516194682, 0.0915549204131687, 0.28249447760074176, 0.5593289876304682, + 0.7529148849244871, 0.35147879233628254, 0.8522667732376881, 0.6188752944349367, 0.4125801589727227, + 0.5575123955704276, 0.9606574527884366, 0.9277948431850809, 0.939489083802638, 0.24504726014528755, + 0.02999449647781749, 0.30412310024943934, 0.1569667109270132, 0.20531688682553262, 0.511866170077109, + 0.30737250907136127, 0.5262560471173453, 0.051581442743936834, 0.28808257291456085, 0.9790858030799796, + 0.8557661260282156, 0.36218305985767596, 0.23228812370501528, 0.6202104009666045, 0.8867642180949732, + 0.22763923689310117, 0.683892145506164, 0.15600116320152568, 0.8441515372048103, 0.7606861500676428, + 0.7065017767052714, 0.16378490819181213, 0.2436941641993159, 0.24179674334818047, 0.33003082683163454, + 0.5592171765943758, 0.24805778500361175, 0.42431250531190945, 0.9250793225267471, 0.6342480713430114, + 0.5202507413267883, 0.8366451214336564, 0.41107414664728825, 0.8830127077533332, 0.4229721229885849, + 0.3907250704851828, 0.5473355612929265, 0.2579362446014144, 0.29814407939629906, 0.7974491298876576, + 0.11314089555697293, 0.09686198188162443, 0.766867100263332, 0.26442474662174176, 0.2933189470390257, + 0.22549268624349572, 0.4961002633634892, 0.1580013775297051, 0.21598290255717667, 0.4655927560826495, + 0.8277472438455392, 0.41612131449467127, 0.3335383280743127, 0.10846113574338023, 0.6339633064975212, + 0.9151650782934098, 0.21942078738580506, 0.549211704289178, 0.900038868763936, 0.42750089237786226, + 0.24495290801771097, 0.7807783442418869, 0.11582898086933058, 0.273017019466652, 0.6167952968186619, + 0.805968619144528, 0.5592353328539852, 0.17408130039506065, 0.9140423648414512, 0.397637193906782, + 0.2429466103320661, 0.7576964175710638, 0.27755909265772416, 0.35174929928544163, 0.025116336045789667, + 0.8200439777524748, 0.15720187100200855, 0.8405458279462585, 0.9743776693254922, 0.5093505030290263, + 0.5248426457649971, 0.6903619402857111, 0.9615336231506699, 0.2657925151967536, 0.9335887667671454, + 0.4655908804279567, 0.589653875263392, 0.34991672103804417, 0.8853866814617952, 0.2622863324354565, + 0.5873699968555803, 0.35658721497888446, 0.07549717596969518, 0.30570308676663926, 0.016710650584320863, + 0.060511009580757724, 0.27730366170461584, 0.6467835576800469, 0.78946400211903, 0.12673883796963992, + 0.6211809769142388, 0.4535415813018152, 0.01900632641535227, 0.07429317586616391, 0.08873439700873431, + 0.5183778209898324, 0.07045645847676274, 0.20871881410884752, 0.12099162032633193, 0.06756553561714795, + 0.18622090431815963, 0.2550314449072233, 0.9548117463313748, 0.4963000444548905, 0.9069349499388143, + 0.9577869735201033, 0.4562239464601897, 0.3153033546637033, 0.38483711369107565, 0.30047291174136637, + 0.9659042903615469, 0.08121765524841995, 0.4948994117457166, 0.6527347570733716, 0.4342822182228542, + 0.12011671561156101, 0.16733239805529432, 0.9971875623339772, 0.652088843546415, 0.892783518485851, + 0.1797647996524161, 0.16357215367441935, 0.07445336658438717, 0.7175786171125849, 0.5373097051865485, + 0.7588555623293853, 0.24491199195515445, 0.6006434355933403, 0.2707709862387566, 0.5089645596354377, + 0.10921056750583202, 0.4455355571312122, 0.9457933356842753, 0.12147470400771088, 0.03149362879979012, + 0.48572911640356164, 0.3250179215480892, 0.35311147401543064, 0.2092635837818405, 0.49042076332409634, + 0.40242274607334083, 0.004886615073804279, 0.20426340630702144, 0.1838638359609932, 0.7753027700256112, + 0.35858437729817183, 0.6549264421919153, 0.9518718749485302, 0.7867904941578512, 0.18952975452560894, + 0.5722812243941315, 0.22334762573351363, 0.2530877482635505, 0.630808154618444, 0.49578220340765145, + 0.015165276035877806, 0.06819096090677312, 0.3485257589215519, 0.19486401838003786, 0.7005237519796033, + 0.9170064488757839, 0.3028155964556515, 0.9707152112263164, 0.8647697904557152, 0.4458911971404024, + 0.08084238914494124, 0.31600901382771396, 0.4219203574297017, 0.3165211497269509, 0.5452055491181059, + 0.9380819907542078, 0.07699622976151643, 0.6722737835384232, 0.5828829255010801, 0.4074030107848773, + 0.12009262426356326, 0.1299589517611489, 0.7879216523704174, 0.6406279562372446, 0.3387666455887235, + 0.25359168817784106, 0.3664733566676899, 0.2961304938638407, 0.884096601158495, 0.594299500006145, + 0.841562685279157, 0.844669921819271, 0.9523065435900523, 0.9996692237492393, 0.8429188149689719, + 0.5180064341547981, 0.35347561778319236, 0.5513515344510534, 0.9818366109994351, 0.7653855239166565, + 0.39074108458136203, 0.6052080786642133, 0.01814107282507771, 0.6614808714120729, 0.5817911146666129, + 0.6468528789424193, 0.5107046740684658, 0.7835858669708228, 0.5639340388879576, 0.18955202007187844, + 0.6703755117543645, + }; + float data_6d[1 * 4 * 4 * 1 * 16 * 16] = { + 0.9287460024109794, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2406040495018288, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4469466172807033, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18882162922891632, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.76109939494347, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8840655482887357, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9872180535026731, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11661622466386445, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.01235434128678281, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6695347367567285, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7330905362956434, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9500990356218342, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6457271811424727, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6912643273019475, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6703753186632049, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5526387021634535, + 0.08705872980636764, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4385500267268264, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5216635886669073, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4630255672949294, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8447529479061752, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23010207882669031, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.29600005325577006, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.02000550424397074, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7628668574925462, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5151224887847649, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8359422516194682, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0915549204131687, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.28249447760074176, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5593289876304682, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7529148849244871, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35147879233628254, + 0.8522667732376881, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6188752944349367, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4125801589727227, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5575123955704276, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9606574527884366, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9277948431850809, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.939489083802638, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24504726014528755, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.02999449647781749, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30412310024943934, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1569667109270132, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20531688682553262, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.511866170077109, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30737250907136127, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5262560471173453, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.051581442743936834, + 0.28808257291456085, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9790858030799796, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8557661260282156, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.36218305985767596, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23228812370501528, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6202104009666045, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8867642180949732, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22763923689310117, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.683892145506164, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15600116320152568, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8441515372048103, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7606861500676428, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7065017767052714, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16378490819181213, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2436941641993159, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24179674334818047, + 0.33003082683163454, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5592171765943758, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24805778500361175, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.42431250531190945, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9250793225267471, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6342480713430114, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5202507413267883, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8366451214336564, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41107414664728825, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8830127077533332, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4229721229885849, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3907250704851828, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5473355612929265, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2579362446014144, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.29814407939629906, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7974491298876576, + 0.11314089555697293, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.09686198188162443, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.766867100263332, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.26442474662174176, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2933189470390257, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22549268624349572, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4961002633634892, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1580013775297051, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.21598290255717667, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4655927560826495, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8277472438455392, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41612131449467127, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3335383280743127, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10846113574338023, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6339633064975212, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9151650782934098, + 0.21942078738580506, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.549211704289178, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.900038868763936, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.42750089237786226, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24495290801771097, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7807783442418869, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11582898086933058, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.273017019466652, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6167952968186619, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.805968619144528, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5592353328539852, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17408130039506065, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9140423648414512, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.397637193906782, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2429466103320661, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7576964175710638, + 0.27755909265772416, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35174929928544163, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.025116336045789667, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8200439777524748, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15720187100200855, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8405458279462585, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9743776693254922, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5093505030290263, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5248426457649971, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6903619402857111, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9615336231506699, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2657925151967536, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9335887667671454, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4655908804279567, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.589653875263392, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.34991672103804417, + 0.8853866814617952, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2622863324354565, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5873699968555803, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35658721497888446, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07549717596969518, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30570308676663926, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.016710650584320863, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.060511009580757724, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.27730366170461584, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6467835576800469, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.78946400211903, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12673883796963992, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6211809769142388, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4535415813018152, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.01900632641535227, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07429317586616391, + 0.08873439700873431, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5183778209898324, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07045645847676274, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20871881410884752, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12099162032633193, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06756553561714795, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18622090431815963, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2550314449072233, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9548117463313748, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4963000444548905, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9069349499388143, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9577869735201033, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4562239464601897, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3153033546637033, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.38483711369107565, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30047291174136637, + 0.9659042903615469, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08121765524841995, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4948994117457166, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6527347570733716, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4342822182228542, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12011671561156101, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16733239805529432, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9971875623339772, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.652088843546415, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.892783518485851, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1797647996524161, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16357215367441935, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07445336658438717, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7175786171125849, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5373097051865485, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7588555623293853, + 0.24491199195515445, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6006434355933403, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2707709862387566, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5089645596354377, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10921056750583202, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4455355571312122, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9457933356842753, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12147470400771088, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.03149362879979012, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.48572911640356164, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3250179215480892, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35311147401543064, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2092635837818405, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49042076332409634, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40242274607334083, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.004886615073804279, + 0.20426340630702144, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1838638359609932, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7753027700256112, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35858437729817183, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6549264421919153, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9518718749485302, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7867904941578512, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18952975452560894, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5722812243941315, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22334762573351363, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2530877482635505, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.630808154618444, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49578220340765145, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.015165276035877806, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06819096090677312, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3485257589215519, + 0.19486401838003786, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7005237519796033, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9170064488757839, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3028155964556515, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9707152112263164, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8647697904557152, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4458911971404024, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08084238914494124, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.31600901382771396, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4219203574297017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3165211497269509, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5452055491181059, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9380819907542078, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07699622976151643, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6722737835384232, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5828829255010801, + 0.4074030107848773, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12009262426356326, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1299589517611489, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7879216523704174, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6406279562372446, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3387666455887235, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.25359168817784106, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3664733566676899, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2961304938638407, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.884096601158495, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.594299500006145, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.841562685279157, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.844669921819271, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9523065435900523, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9996692237492393, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8429188149689719, + 0.5180064341547981, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35347561778319236, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5513515344510534, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9818366109994351, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7653855239166565, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.39074108458136203, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6052080786642133, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.01814107282507771, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6614808714120729, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5817911146666129, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6468528789424193, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5107046740684658, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7835858669708228, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5639340388879576, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18955202007187844, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6703755117543645, + + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_C1HWNCoC0, {4, 4, 16, 1}, {1, 4, 4, 1, 16, 16}, DT_FLOAT}; + TransResult result; + + FormatTransferHwcnC1hwncoc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_6d)); + for (int i = 0; i < sizeof(data_6d) / sizeof(data_6d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_6d[i]); + } +} + +TEST_F(UtestFormatTransferHwcnC1hwncoc0, hwcn_to_6d_fp32_success_gt_cube) { + float data[4 * 4 * 18 * 1] = { + 0.6720256978880832, 0.6298321784394392, 0.8295463903639503, 0.08068822718676427, 0.2763754699096522, + 0.46152014633273986, 0.20230989654712017, 0.8265439766280472, 0.6978840683277164, 0.3947669140970781, + 0.6967164722950044, 0.8077474483635809, 0.19827523419269066, 0.3826597464491931, 0.5075781383319881, + 0.16553192090689484, 0.7574068656303569, 0.8345474929377174, 0.38764251162629126, 0.030725684367002737, + 0.5854377878139135, 0.9635834400583236, 0.49041179401730606, 0.621085945352873, 0.44172994709534286, + 0.5833817236813902, 0.8600448001807334, 0.442614470737162, 0.17070317633972198, 0.9988020807965683, + 0.2240540555141428, 0.6044781869127227, 0.30577703539138534, 0.02297159726606912, 0.6092749558646937, + 0.0981101859240252, 0.902324022353879, 0.5538644692423377, 0.18435254776067966, 0.3895724610639357, + 0.19087655730583952, 0.056732904450757515, 0.25552017902227, 0.06563475489528825, 0.756805330230557, + 0.10559137855731648, 0.8400314490094558, 0.8792165029212121, 0.8836700846101576, 0.6520614003045484, + 0.14016100663096442, 0.581151084050165, 0.23183561828290822, 0.4928795451972683, 0.3855507003349319, + 0.7520089723082347, 0.6856546484511095, 0.9754907615630698, 0.14302334875233447, 0.7040998143942422, + 0.6427806893397862, 0.11796374565827439, 0.5399207555415639, 0.638659628241251, 0.9749369607001012, + 0.3505202643968658, 0.08579439029326386, 0.15728399281291716, 0.08269816307243716, 0.6624434213972792, + 0.8445559494723369, 0.5667390062688675, 0.8329561803018197, 0.032120763663658014, 0.9566737019034673, + 0.24486631938044456, 0.10932492657226744, 0.9610284338669389, 0.41097347240583404, 0.0735833696925311, + 0.3780115946364353, 0.7357275308740584, 0.9117467893165317, 0.23215135900531814, 0.2552443333765917, + 0.4365079028469411, 0.8294987620958296, 0.34605401854979034, 0.08684759407061038, 0.9827470249624215, + 0.7609918535852507, 0.6057237297402626, 0.29981454756176007, 0.17165620361712652, 0.18458981725403534, + 0.004254743220997059, 0.06033948937486866, 0.7790401829914644, 0.11568714682253833, 0.43683466739272303, + 0.8819350949367237, 0.8005961689683059, 0.08186128360840594, 0.7976491103318911, 0.2240607437129538, + 0.4000304869187492, 0.6501001434308644, 0.4390783631610483, 0.012780956383760311, 0.6224623388838404, + 0.11070174075269568, 0.5259081698751006, 0.3517889145201747, 0.3628829064705983, 0.8437125312778323, + 0.6056651611116248, 0.7177350438363246, 0.6999540632573293, 0.3403428629909401, 0.07348171626497002, + 0.39195415450273896, 0.449983124098813, 0.1982705926999152, 0.3580904389416687, 0.5535182266162149, + 0.14208763968019233, 0.3676453029272587, 0.5707966169974507, 0.506818390006326, 0.1751770315457628, + 0.718498080506697, 0.8079214106434996, 0.8595179550008838, 0.40706953073355634, 0.2043863176329428, + 0.43753654062005165, 0.12359348546795079, 0.8688872460691682, 0.26121855072313027, 0.56509648986008, + 0.550065776630837, 0.8998629077235578, 0.8956981051296642, 0.32727466525204096, 0.5642214251740352, + 0.05231827080996221, 0.8038883833385772, 0.30952956046069646, 0.3598608732338813, 0.6145804575957855, + 0.3320551452477549, 0.9638909795216967, 0.08869226956064591, 0.7530597192935157, 0.31240869693130824, + 0.40612578513052544, 0.0640935173214825, 0.9762953742908935, 0.9345851271589867, 0.013659298980408296, + 0.5560394910334464, 0.4261919078800247, 0.5776657549782201, 0.3210961667727359, 0.8655969484625035, + 0.4905974887025172, 0.020861530507902182, 0.41836177257417506, 0.22840946125158446, 0.999760661824541, + 0.5176796791644477, 0.7590124368612039, 0.29805000994564823, 0.004710630015977935, 0.6177707492426662, + 0.7451246003472498, 0.1523039655234233, 0.9075278261410844, 0.8948665916989693, 0.4927558911152501, + 0.289575591103169, 0.3070639729185224, 0.9742328874167968, 0.6318899822466207, 0.9217385935243647, + 0.19404567944345485, 0.3139318217228174, 0.8371916762608241, 0.1506427751612257, 0.2829262340657469, + 0.49640016645570884, 0.8637019386815881, 0.8771476858744097, 0.4744543148622167, 0.18561372295225653, + 0.7729955705289158, 0.25548747469984023, 0.8044527167142301, 0.12537024160436816, 0.9736015268504483, + 0.7426156935434283, 0.07981904307037135, 0.2516303608793887, 0.4821737490291119, 0.3919587137996561, + 0.6940336352360019, 0.20204425364829914, 0.061410960956086225, 0.9501863128037674, 0.47696633050116755, + 0.17107444463847665, 0.08457274936327863, 0.06611426945217114, 0.42385232680469254, 0.008000018527419495, + 0.15764366047828826, 0.4515792934324635, 0.3980126878557152, 0.6118931459268907, 0.9536200260871817, + 0.6941502008716358, 0.14593827491155287, 0.653804411206232, 0.17122496175413993, 0.8935085488292329, + 0.4470478148103193, 0.3008635842212459, 0.6544943596338767, 0.00840302099207646, 0.32006536950974807, + 0.6513831279668915, 0.5465500886719159, 0.2280696013454805, 0.5298168210264326, 0.17860722906905457, + 0.6553483167887741, 0.5329073894590799, 0.6082473386866861, 0.14094030378222067, 0.4377704114236626, + 0.8187469149625034, 0.7573680466444704, 0.5892725685846012, 0.8539266550501383, 0.09341956386406303, + 0.6593752073372753, 0.24313527975813853, 0.5784193841594126, 0.17778501843915462, 0.5629482573422732, + 0.9221856932277527, 0.5202041093485866, 0.7087739397180405, 0.8282323774102832, 0.8730568803071196, + 0.41526292072882587, 0.7439760697782625, 0.7918970028413459, 0.4563153895380918, 0.5717854219119896, + 0.01748112771668553, 0.22682216394271615, 0.9958918322474324, 0.48183531723667616, 0.8935003354855042, + 0.40177759725197093, 0.6872178583919453, 0.7159034109642741, 0.8916464595265667, 0.15828089333695605, + 0.29836411859959233, 0.6068625352293453, 0.5925920016853471, 0.8072024784646464, 0.9109206349529911, + 0.722094298160962, 0.10120402884670032, 0.07658301410630952, 0.11917426063790237, 0.9973204706041966, + 0.7135005026702921, 0.1702573277105751, 0.14969293539722228, 0.7224044611911594, 0.49954902657480205, + 0.8418937883473929, 0.22935018929177786, 0.14044259339375098, + }; + + float data_6d[2 * 4 * 4 * 1 * 16 * 16] = { + 0.6720256978880832, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6298321784394392, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8295463903639503, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08068822718676427, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2763754699096522, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.46152014633273986, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20230989654712017, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8265439766280472, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6978840683277164, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3947669140970781, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6967164722950044, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8077474483635809, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19827523419269066, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3826597464491931, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5075781383319881, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16553192090689484, + 0.38764251162629126, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.030725684367002737, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5854377878139135, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9635834400583236, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49041179401730606, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.621085945352873, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.44172994709534286, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5833817236813902, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8600448001807334, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.442614470737162, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17070317633972198, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9988020807965683, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2240540555141428, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6044781869127227, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30577703539138534, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.02297159726606912, + 0.902324022353879, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5538644692423377, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18435254776067966, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3895724610639357, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19087655730583952, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.056732904450757515, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.25552017902227, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06563475489528825, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.756805330230557, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10559137855731648, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8400314490094558, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8792165029212121, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8836700846101576, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6520614003045484, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14016100663096442, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.581151084050165, + 0.3855507003349319, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7520089723082347, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6856546484511095, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9754907615630698, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14302334875233447, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7040998143942422, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6427806893397862, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11796374565827439, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5399207555415639, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.638659628241251, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9749369607001012, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3505202643968658, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08579439029326386, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15728399281291716, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08269816307243716, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6624434213972792, + 0.8329561803018197, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.032120763663658014, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9566737019034673, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24486631938044456, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10932492657226744, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9610284338669389, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41097347240583404, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0735833696925311, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3780115946364353, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7357275308740584, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9117467893165317, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23215135900531814, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2552443333765917, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4365079028469411, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8294987620958296, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.34605401854979034, + 0.7609918535852507, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6057237297402626, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.29981454756176007, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17165620361712652, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18458981725403534, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.004254743220997059, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06033948937486866, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7790401829914644, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11568714682253833, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.43683466739272303, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8819350949367237, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8005961689683059, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08186128360840594, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7976491103318911, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2240607437129538, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4000304869187492, + 0.012780956383760311, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6224623388838404, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11070174075269568, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5259081698751006, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3517889145201747, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3628829064705983, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8437125312778323, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6056651611116248, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7177350438363246, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6999540632573293, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3403428629909401, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07348171626497002, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.39195415450273896, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.449983124098813, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1982705926999152, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3580904389416687, + 0.3676453029272587, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5707966169974507, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.506818390006326, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1751770315457628, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.718498080506697, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8079214106434996, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8595179550008838, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40706953073355634, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2043863176329428, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.43753654062005165, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12359348546795079, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8688872460691682, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.26121855072313027, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.56509648986008, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.550065776630837, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8998629077235578, + 0.5642214251740352, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.05231827080996221, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8038883833385772, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30952956046069646, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3598608732338813, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6145804575957855, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3320551452477549, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9638909795216967, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08869226956064591, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7530597192935157, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.31240869693130824, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40612578513052544, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0640935173214825, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9762953742908935, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9345851271589867, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.013659298980408296, + 0.5776657549782201, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3210961667727359, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8655969484625035, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4905974887025172, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.020861530507902182, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41836177257417506, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22840946125158446, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.999760661824541, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5176796791644477, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7590124368612039, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.29805000994564823, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.004710630015977935, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6177707492426662, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7451246003472498, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1523039655234233, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9075278261410844, + 0.289575591103169, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3070639729185224, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9742328874167968, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6318899822466207, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9217385935243647, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19404567944345485, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3139318217228174, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8371916762608241, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1506427751612257, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2829262340657469, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49640016645570884, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8637019386815881, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8771476858744097, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4744543148622167, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18561372295225653, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7729955705289158, + 0.12537024160436816, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9736015268504483, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7426156935434283, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07981904307037135, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2516303608793887, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4821737490291119, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3919587137996561, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6940336352360019, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.20204425364829914, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.061410960956086225, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9501863128037674, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.47696633050116755, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17107444463847665, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08457274936327863, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06611426945217114, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.42385232680469254, + 0.4515792934324635, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3980126878557152, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6118931459268907, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9536200260871817, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6941502008716358, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14593827491155287, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.653804411206232, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17122496175413993, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8935085488292329, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4470478148103193, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3008635842212459, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6544943596338767, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.00840302099207646, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.32006536950974807, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6513831279668915, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5465500886719159, + 0.17860722906905457, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6553483167887741, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5329073894590799, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6082473386866861, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14094030378222067, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4377704114236626, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8187469149625034, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7573680466444704, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5892725685846012, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8539266550501383, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.09341956386406303, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6593752073372753, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24313527975813853, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5784193841594126, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17778501843915462, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5629482573422732, + 0.7087739397180405, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8282323774102832, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8730568803071196, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41526292072882587, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7439760697782625, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7918970028413459, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4563153895380918, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5717854219119896, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.01748112771668553, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22682216394271615, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9958918322474324, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.48183531723667616, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8935003354855042, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40177759725197093, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6872178583919453, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7159034109642741, + 0.29836411859959233, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6068625352293453, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5925920016853471, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8072024784646464, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9109206349529911, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.722094298160962, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10120402884670032, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07658301410630952, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11917426063790237, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9973204706041966, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7135005026702921, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1702573277105751, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14969293539722228, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7224044611911594, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49954902657480205, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8418937883473929, + 0.7574068656303569, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8345474929377174, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6092749558646937, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0981101859240252, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23183561828290822, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4928795451972683, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8445559494723369, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5667390062688675, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08684759407061038, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9827470249624215, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6501001434308644, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4390783631610483, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5535182266162149, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14208763968019233, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8956981051296642, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.32727466525204096, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5560394910334464, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4261919078800247, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8948665916989693, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4927558911152501, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.25548747469984023, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8044527167142301, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.008000018527419495, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15764366047828826, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2280696013454805, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5298168210264326, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9221856932277527, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5202041093485866, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8916464595265667, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15828089333695605, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22935018929177786, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14044259339375098, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_C1HWNCoC0, {4, 4, 18, 1}, {2, 4, 4, 1, 16, 16}, DT_FLOAT}; + TransResult result; + + FormatTransferHwcnC1hwncoc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_6d)); + for (int i = 0; i < sizeof(data_6d) / sizeof(data_6d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_6d[i]); + } +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_hwcn_fractalz_unittest.cc b/tests/ut/ge/common/format_transfer_hwcn_fractalz_unittest.cc new file mode 100644 index 00000000..df5afe4d --- /dev/null +++ b/tests/ut/ge/common/format_transfer_hwcn_fractalz_unittest.cc @@ -0,0 +1,34463 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_fractal_z.h" + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class UtestFormatTransferHwcnFz : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferHwcnFz, fp16_1c_1n_pad_cn) { + uint16_t data[8 * 8 * 3 * 1] = { + 13127, 14445, 15133, 14580, 14605, 15056, 15081, 14421, 14987, 12263, 12622, 12833, 14095, 14596, 15333, 14456, + 15074, 14849, 13348, 13798, 12663, 14461, 11588, 14507, 12632, 14783, 14086, 14026, 15276, 15278, 14795, 15200, + 15109, 14483, 12120, 14598, 15255, 12325, 11741, 15188, 14898, 13586, 11772, 14704, 13636, 15059, 14681, 14961, + 15070, 13559, 14070, 11768, 12341, 12688, 13874, 11886, 13396, 13482, 14753, 14270, 15218, 14829, 14957, 14860, + 15101, 14467, 15054, 6483, 13929, 14187, 13979, 14475, 14110, 14652, 15293, 15305, 14856, 14307, 14923, 14941, + 15135, 15188, 14778, 13442, 14724, 11632, 15133, 14435, 15289, 13856, 14529, 8967, 11880, 13731, 15354, 13363, + 12063, 13343, 14769, 15251, 15282, 15002, 14695, 9259, 13478, 14118, 13586, 14224, 13911, 15058, 12668, 13077, + 13212, 14749, 12748, 14577, 13872, 13508, 14117, 13519, 14989, 10590, 14688, 15162, 14365, 12335, 12946, 11510, + 15229, 12313, 14507, 13152, 13320, 14705, 13071, 15181, 8469, 15041, 14998, 14824, 14634, 14866, 10316, 14118, + 14719, 14350, 15129, 15233, 13609, 14561, 13526, 14397, 12084, 14020, 14034, 14577, 15041, 13822, 14535, 15320, + 14194, 15112, 11318, 14200, 14878, 13645, 14536, 11995, 15134, 13599, 14490, 12348, 14143, 13132, 14998, 14342, + 10944, 14585, 14425, 15088, 15120, 14816, 14879, 14120, 11740, 15202, 14342, 15044, 14402, 15057, 14616, 14987, + }; + uint16_t ret[64 * 1 * 16 * 16] = { + 13127, 14445, 15133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14580, 14605, 15056, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15081, 14421, 14987, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12263, 12622, 12833, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14095, 14596, 15333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14456, 15074, 14849, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13348, 13798, 12663, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14461, 11588, 14507, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12632, 14783, 14086, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14026, 15276, 15278, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14795, 15200, 15109, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14483, 12120, 14598, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15255, 12325, 11741, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15188, 14898, 13586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11772, 14704, 13636, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15059, 14681, 14961, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15070, 13559, 14070, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11768, 12341, 12688, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13874, 11886, 13396, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13482, 14753, 14270, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15218, 14829, 14957, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14860, 15101, 14467, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15054, 6483, 13929, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14187, 13979, 14475, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14110, 14652, 15293, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15305, 14856, 14307, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14923, 14941, 15135, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15188, 14778, 13442, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14724, 11632, 15133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14435, 15289, 13856, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14529, 8967, 11880, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13731, 15354, 13363, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12063, 13343, 14769, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15251, 15282, 15002, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14695, 9259, 13478, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14118, 13586, 14224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13911, 15058, 12668, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13077, 13212, 14749, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12748, 14577, 13872, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13508, 14117, 13519, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14989, 10590, 14688, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15162, 14365, 12335, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12946, 11510, 15229, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12313, 14507, 13152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13320, 14705, 13071, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15181, 8469, 15041, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14998, 14824, 14634, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14866, 10316, 14118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14719, 14350, 15129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15233, 13609, 14561, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13526, 14397, 12084, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14020, 14034, 14577, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15041, 13822, 14535, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15320, 14194, 15112, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11318, 14200, 14878, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13645, 14536, 11995, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15134, 13599, 14490, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12348, 14143, 13132, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14998, 14342, 10944, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14585, 14425, 15088, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15120, 14816, 14879, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14120, 11740, 15202, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14342, 15044, 14402, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15057, 14616, 14987, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_FRACTAL_Z, std::vector({8, 8, 3, 1}), + std::vector({64, 1, 16, 16}), DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferHwcnFz, fp16_1) { + uint16_t data[1 * 1 * 1 * 1] = {14912}; + uint16_t ret[1 * 1 * 16 * 16] = { + 14912, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_FRACTAL_Z, std::vector({1, 1, 1, 1}), + std::vector({1, 1, 16, 16}), DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferHwcnFz, fp16_3c_3n_pad_cn) { + uint16_t data[4 * 4 * 33 * 47] = { + 15328, 14938, 14411, 14846, 14417, 14366, 15228, 14525, 14482, 14753, 14731, 14550, 14579, 13601, 10362, 13517, + 13611, 14482, 10424, 10756, 15359, 14498, 14957, 14750, 10583, 15023, 14956, 13659, 15051, 14805, 13938, 13872, + 14929, 14065, 15153, 12692, 13693, 11679, 12543, 14618, 15045, 14679, 12916, 8992, 14908, 14038, 14922, 11885, + 14970, 15232, 14781, 14791, 15133, 14452, 15038, 13101, 14875, 15313, 14975, 13626, 10074, 15032, 15222, 14058, + 13781, 14740, 13671, 15291, 14700, 13713, 12328, 14638, 10196, 12913, 11452, 14789, 11801, 14102, 12534, 14992, + 14710, 15188, 12446, 14649, 14908, 15218, 14376, 10757, 11033, 14269, 14338, 14617, 15095, 14593, 14850, 14865, + 14482, 14900, 13625, 14098, 14734, 14934, 14341, 15189, 14424, 13615, 11477, 15097, 14309, 14973, 14727, 15073, + 15086, 15093, 15054, 12005, 14478, 11403, 14808, 14434, 14868, 13759, 13464, 14364, 13029, 13671, 14619, 15116, + 14802, 14814, 14126, 13689, 13929, 14536, 13045, 12019, 14437, 13665, 10001, 11667, 14654, 13491, 12159, 13788, + 15080, 15134, 13697, 14727, 14472, 14824, 15130, 14597, 15183, 12396, 14448, 15005, 15349, 14246, 15341, 14723, + 14160, 8704, 14976, 14845, 12048, 14561, 14033, 14614, 15291, 14722, 11469, 10818, 15142, 14616, 12870, 14911, + 15207, 12766, 13759, 14104, 14451, 14666, 15234, 13364, 14673, 13457, 13694, 12428, 15282, 14705, 13191, 14968, + 11958, 13858, 11609, 13349, 15340, 15087, 14803, 13234, 15018, 15163, 14602, 14361, 15337, 14988, 14981, 15330, + 13491, 14169, 14520, 14905, 13153, 14529, 15344, 13407, 13460, 13530, 12621, 14652, 14434, 15310, 14142, 13329, + 12334, 13393, 14650, 15121, 12769, 12716, 14320, 11195, 15176, 14444, 14389, 14562, 12982, 13792, 14692, 14734, + 14349, 14302, 12320, 12450, 8949, 14892, 10778, 14669, 14362, 15063, 13948, 14514, 15185, 13562, 15007, 14576, + 14345, 14560, 15198, 15118, 12565, 13008, 13808, 14236, 14483, 14230, 10288, 14901, 14989, 13873, 15036, 15069, + 11557, 14562, 13048, 14347, 15065, 9806, 14201, 14274, 14966, 12311, 14814, 14009, 13705, 15127, 15209, 15168, + 12017, 13101, 14380, 12932, 13612, 15249, 9116, 13829, 13876, 12273, 13846, 14572, 13483, 14052, 14382, 11676, + 14495, 5539, 14569, 14620, 15242, 14746, 14929, 15235, 10329, 14401, 12606, 13374, 14346, 12418, 13986, 15012, + 12319, 13135, 12819, 14886, 11327, 15084, 12258, 13593, 11366, 13442, 14750, 14444, 14695, 15306, 13034, 13700, + 15232, 14466, 13310, 14385, 14230, 15200, 15000, 14282, 14640, 15223, 14631, 14385, 13458, 13633, 13676, 12517, + 14540, 13466, 15203, 13769, 13462, 14640, 14507, 13563, 13745, 9326, 13966, 15007, 13327, 15202, 14515, 11597, + 15065, 14376, 15305, 13339, 9515, 11264, 13656, 14448, 12873, 13580, 9862, 15213, 15341, 10841, 12920, 14559, + 12756, 14871, 13900, 14460, 13240, 14960, 14595, 15187, 14428, 7997, 13668, 14876, 14998, 15095, 15282, 13729, + 14169, 14965, 14135, 15325, 12253, 14119, 14912, 13506, 13323, 11419, 11385, 14933, 14067, 7711, 14604, 15310, + 14649, 15275, 15343, 14240, 13880, 13085, 13849, 12411, 14466, 14574, 10661, 15271, 14164, 14671, 13626, 10301, + 14137, 14506, 13630, 12567, 14687, 15000, 14418, 14932, 15017, 15014, 10361, 13827, 14932, 13751, 13881, 11826, + 14752, 14594, 13344, 13549, 14718, 9259, 14884, 13393, 15283, 14735, 13473, 10855, 13420, 15226, 14436, 14341, + 8501, 14318, 13623, 14642, 14365, 14749, 13541, 11726, 12804, 15119, 14841, 14801, 11125, 11735, 14885, 13244, + 14658, 14094, 14689, 13258, 14596, 15220, 14018, 13178, 14640, 15075, 15131, 14858, 14447, 14158, 15281, 12354, + 5519, 15231, 14462, 8963, 15189, 12776, 14589, 13071, 14868, 12099, 12825, 11631, 15092, 13212, 13867, 14844, + 15221, 14326, 14476, 13016, 10674, 15219, 14410, 13856, 14774, 14778, 15199, 14345, 14543, 14534, 14837, 14822, + 15079, 15182, 15139, 13982, 15066, 13378, 13413, 13307, 14735, 12957, 14419, 14527, 15281, 13498, 15087, 14996, + 10380, 12041, 11587, 12298, 14557, 15153, 15084, 15154, 15200, 14433, 15112, 14962, 13999, 14106, 14887, 11391, + 14861, 14281, 13891, 8430, 15145, 13106, 14761, 12677, 14606, 13955, 15186, 13751, 14327, 14709, 14391, 12767, + 12855, 14728, 13040, 14384, 14697, 14723, 14461, 12625, 12529, 15177, 15233, 14722, 8474, 14367, 13819, 14501, + 14932, 14619, 13754, 14658, 13107, 15280, 15039, 11767, 14976, 13002, 14933, 12867, 14062, 13631, 14663, 13971, + 14468, 13561, 13455, 8543, 13810, 15342, 14360, 14441, 14838, 15048, 14878, 14443, 14857, 14373, 15020, 13499, + 14365, 15247, 13228, 14303, 14944, 13975, 14275, 15152, 15087, 13356, 15109, 14473, 14347, 14630, 12457, 13549, + 14634, 15147, 14365, 9555, 12571, 13867, 15195, 14815, 13513, 15158, 10945, 14749, 15077, 12691, 13021, 15326, + 13884, 14794, 13470, 15171, 15242, 14587, 14982, 11355, 9528, 14943, 13017, 15320, 14394, 14860, 13822, 14059, + 15121, 10061, 15138, 13438, 13667, 15235, 14445, 13857, 13502, 14086, 14769, 15299, 14573, 14143, 9494, 14821, + 13471, 11343, 15155, 14429, 14894, 13241, 14103, 12444, 14628, 14230, 10974, 13886, 14457, 13068, 13854, 12652, + 15354, 14598, 14411, 12447, 13728, 12896, 12638, 15239, 14363, 15290, 13484, 15053, 14598, 12372, 15069, 14609, + 13676, 14831, 14574, 14926, 15064, 13982, 5890, 10378, 14434, 14484, 11600, 15157, 14620, 12638, 14436, 15113, + 13908, 14611, 15219, 14679, 12990, 12600, 11584, 14557, 13153, 14920, 13465, 11977, 14109, 14852, 14059, 10677, + 14367, 13759, 15011, 15140, 6747, 11961, 15159, 12210, 14563, 13937, 15084, 14727, 13838, 10296, 13755, 12906, + 10182, 13990, 15101, 14816, 8962, 9712, 14828, 12140, 14969, 14766, 12865, 15164, 15230, 13161, 15090, 12751, + 14343, 14590, 11903, 14647, 14648, 15260, 14551, 13825, 13670, 15179, 14559, 15286, 15291, 14530, 13497, 14539, + 14940, 14028, 14146, 15101, 9149, 14759, 14970, 15314, 9577, 14077, 13954, 14002, 15189, 13118, 14485, 15333, + 14099, 14941, 14362, 14444, 14298, 11843, 14765, 10437, 15007, 14595, 13516, 14601, 13295, 14667, 14513, 14663, + 11916, 14574, 14285, 14326, 15182, 13225, 8788, 14028, 10873, 11633, 15297, 12415, 13705, 14461, 15048, 12092, + 10298, 14507, 12071, 14191, 12564, 9954, 14453, 14630, 14808, 12806, 14882, 15336, 14696, 12691, 14050, 13400, + 12235, 13709, 14747, 13351, 11400, 15204, 13656, 13630, 12578, 15130, 14385, 13870, 15201, 13465, 14341, 14700, + 12900, 14352, 8285, 13410, 13163, 13352, 14079, 14689, 14522, 14404, 10435, 13590, 13646, 14349, 14699, 15333, + 15008, 15098, 15002, 15188, 14717, 14605, 14331, 15111, 14245, 14896, 14538, 12537, 13673, 14790, 15352, 14683, + 12347, 12541, 14526, 15085, 14476, 11847, 14347, 14526, 14372, 14708, 14712, 14394, 14949, 7577, 14376, 14355, + 13973, 14285, 12919, 13885, 14661, 13933, 15295, 14765, 14965, 13957, 11713, 14813, 14625, 13384, 15198, 14475, + 14413, 13898, 15171, 14705, 14245, 15357, 14141, 12551, 13749, 15001, 14889, 14992, 14794, 11280, 13172, 13982, + 13633, 14830, 14768, 14894, 12400, 12916, 14536, 15139, 14106, 12703, 14612, 15016, 14299, 14445, 13090, 14344, + 14849, 14537, 14457, 15054, 15214, 15213, 13468, 14421, 14361, 14760, 15314, 13776, 15001, 12829, 11399, 13402, + 14056, 15027, 14578, 15066, 14444, 15006, 15277, 14374, 15281, 13875, 13100, 15333, 13191, 14640, 15063, 12292, + 13746, 11279, 13429, 14641, 15043, 9211, 14285, 15236, 15245, 13627, 12493, 13789, 12859, 15090, 12935, 14885, + 14695, 14264, 12568, 15085, 13092, 12458, 9633, 15359, 14079, 14987, 12580, 15020, 14953, 14966, 12812, 14725, + 13984, 14418, 11618, 14770, 14791, 13429, 12487, 14627, 13464, 13739, 14803, 14079, 14043, 15070, 12763, 14482, + 10774, 15083, 13433, 13932, 13778, 14071, 13758, 14003, 15269, 14382, 14970, 14092, 14635, 10773, 14529, 10421, + 12937, 11323, 9346, 11871, 14762, 14755, 13396, 14476, 14498, 13779, 14451, 12915, 13903, 11397, 13473, 14339, + 15308, 13489, 14623, 13634, 15280, 13765, 14485, 14144, 14515, 12653, 13820, 9091, 14787, 13526, 14892, 15067, + 15108, 12740, 15325, 15161, 13636, 15105, 13793, 15309, 14452, 10339, 14171, 15219, 14568, 15311, 14482, 12444, + 13566, 14353, 14724, 10627, 9299, 13633, 14310, 14871, 13848, 14436, 15063, 15202, 7991, 12454, 14569, 14507, + 15242, 14189, 12477, 15026, 14137, 15182, 14153, 14038, 15051, 11818, 12875, 14156, 13379, 14945, 13190, 14808, + 14415, 15151, 14809, 10591, 14841, 14834, 12919, 10564, 15321, 15153, 10768, 15283, 14787, 11381, 15142, 12636, + 14278, 13902, 14619, 13887, 13481, 15204, 14536, 15109, 14946, 15271, 13274, 14811, 15349, 15108, 12929, 11881, + 14529, 12967, 15100, 15003, 14537, 10400, 14440, 13447, 14481, 12957, 13401, 14435, 13712, 14833, 14918, 13252, + 14857, 15325, 15144, 14378, 14786, 14458, 11925, 15151, 3426, 14842, 7203, 14737, 12727, 15277, 13999, 15060, + 12613, 14637, 12327, 13269, 12663, 13062, 14140, 14667, 13629, 13641, 14199, 15080, 15010, 13637, 13656, 3757, + 14733, 14443, 15253, 8282, 11863, 13178, 13365, 14243, 14857, 13115, 15265, 14333, 11340, 15202, 15329, 12137, + 13966, 14850, 14114, 14846, 14007, 14923, 14514, 13397, 14366, 4943, 12956, 15278, 13981, 14636, 14601, 15233, + 12274, 15194, 13160, 14643, 14680, 15144, 14587, 15309, 9768, 14806, 10641, 12679, 15302, 14137, 14722, 15093, + 14614, 14870, 14857, 11802, 14257, 14505, 13398, 8035, 15151, 13480, 14095, 14730, 15172, 14309, 13334, 13364, + 14741, 14339, 14131, 14805, 14514, 14647, 13464, 15359, 11443, 14124, 14157, 14425, 15304, 14762, 13636, 13812, + 14660, 14316, 15333, 13396, 11953, 12257, 13447, 15213, 14730, 14191, 11549, 13957, 15078, 11271, 15320, 10176, + 13378, 13629, 15265, 13382, 13885, 14098, 15088, 14630, 14493, 15268, 12024, 14613, 14350, 12880, 14964, 12594, + 14255, 10965, 14304, 12734, 11481, 13548, 15316, 15099, 14740, 14700, 14931, 14336, 12994, 14395, 12767, 13798, + 13379, 14176, 14180, 12565, 13530, 12219, 12679, 11785, 12428, 13395, 14468, 14133, 13876, 15355, 14197, 15171, + 15154, 15358, 15138, 14436, 14207, 12712, 15067, 13826, 12652, 14611, 14430, 11583, 14347, 14429, 14011, 15095, + 15354, 15003, 14677, 13321, 14470, 15216, 13638, 12881, 14425, 10354, 6338, 10205, 13917, 10460, 14181, 14584, + 13323, 14203, 15249, 13410, 14810, 12607, 14712, 14769, 13128, 14769, 14636, 14974, 14705, 12447, 14398, 12940, + 13450, 15107, 9804, 13631, 15185, 11205, 15341, 15288, 10657, 14806, 14054, 15223, 14442, 13387, 14476, 10850, + 13432, 14181, 14645, 13613, 14165, 13911, 15209, 15189, 14432, 10986, 14070, 14140, 13961, 12074, 13731, 10999, + 14495, 13646, 15023, 15166, 13668, 15092, 14881, 15109, 12013, 14786, 12482, 12087, 13891, 13961, 14172, 14951, + 15085, 14400, 13606, 14689, 14685, 13922, 13594, 12953, 13033, 14738, 12113, 11725, 14728, 14755, 14883, 15238, + 12512, 13608, 14708, 13372, 14480, 12793, 11609, 14721, 9222, 14650, 13828, 14339, 13536, 10700, 14405, 14915, + 15311, 13223, 14019, 12710, 15004, 14715, 11168, 15113, 15147, 11494, 12979, 12524, 13158, 13748, 13642, 15028, + 12265, 13201, 11141, 9438, 11409, 15285, 14983, 14407, 14718, 15239, 13728, 11817, 13893, 15029, 11413, 13351, + 13986, 13439, 14122, 14488, 13126, 9817, 14702, 12928, 11867, 15012, 15351, 11719, 15344, 13776, 14918, 10806, + 14355, 14376, 13735, 13612, 12496, 10513, 12867, 14254, 14071, 14187, 12730, 15173, 13659, 14865, 14623, 12418, + 14724, 14690, 14341, 14475, 15225, 14804, 12545, 14022, 12342, 12840, 14626, 13414, 14817, 12588, 14545, 14544, + 12547, 11899, 12537, 14767, 14548, 14969, 13204, 15260, 13512, 8627, 14359, 15207, 12145, 15136, 14440, 13916, + 14080, 14462, 13542, 10293, 14932, 13993, 14092, 14099, 13842, 13845, 5052, 12775, 13855, 15274, 14527, 14843, + 9665, 15153, 15188, 12265, 15155, 14110, 13075, 15212, 12921, 14642, 10697, 14480, 14127, 14649, 13390, 14907, + 13868, 13969, 11931, 15126, 15311, 13027, 14303, 12985, 13572, 498, 12527, 12872, 14379, 15259, 14757, 14945, + 9816, 13608, 14538, 14608, 14804, 15290, 14904, 14392, 12305, 13519, 9416, 14708, 11988, 12452, 14841, 11880, + 12106, 14490, 13671, 14675, 14293, 15333, 14882, 14613, 9608, 13995, 13538, 12628, 13748, 14803, 9283, 14944, + 14585, 12585, 14766, 13789, 14821, 15073, 13625, 14606, 15355, 13806, 15175, 13835, 14670, 12991, 14873, 14002, + 14715, 11513, 14861, 14724, 13830, 12684, 11885, 15037, 14898, 11973, 15328, 14868, 13573, 15052, 14920, 15160, + 15298, 14362, 15283, 14893, 15064, 15166, 15108, 13984, 15304, 14011, 14492, 12612, 14577, 15247, 15136, 12557, + 12905, 15137, 15019, 14613, 15286, 14581, 13884, 14621, 12805, 15025, 15106, 14347, 14319, 12494, 15003, 13124, + 14952, 14933, 10535, 14724, 14912, 13617, 15161, 14537, 14403, 14812, 15154, 14876, 13724, 15064, 14693, 14132, + 14551, 14981, 13451, 11976, 13219, 9232, 13262, 14847, 12554, 13363, 14645, 14003, 14232, 15122, 11353, 14800, + 12677, 10557, 13655, 15038, 14296, 13889, 15096, 11655, 12520, 15187, 12599, 14208, 15019, 13871, 14662, 13943, + 12795, 15039, 14773, 11485, 14972, 14753, 10294, 14736, 14768, 14729, 15331, 14246, 15125, 10746, 14274, 14348, + 15070, 15299, 14139, 14759, 12577, 12493, 15146, 12511, 15061, 12495, 12021, 15080, 14361, 14998, 15299, 15054, + 14580, 7408, 14598, 14129, 14342, 13522, 13996, 14378, 14833, 13709, 13710, 15201, 15034, 15199, 14754, 13409, + 12867, 13169, 14828, 8803, 11960, 14242, 15197, 11932, 13571, 14052, 13306, 14715, 13745, 11785, 13421, 13790, + 14819, 14867, 14859, 14271, 14691, 13445, 12186, 12055, 11389, 15045, 14119, 13511, 14770, 15000, 11689, 15257, + 12530, 13892, 13170, 14296, 15120, 13926, 12193, 12832, 9025, 14764, 15265, 14661, 15245, 13897, 14275, 10931, + 13113, 15184, 13586, 11466, 15318, 14269, 14690, 5149, 12436, 13106, 14828, 15262, 12699, 14955, 15082, 15104, + 15172, 14752, 12812, 13570, 13162, 14508, 11082, 14163, 14845, 14820, 14936, 12305, 13216, 13406, 14221, 13548, + 14440, 10226, 9488, 9838, 15238, 14924, 14454, 15046, 13153, 14741, 14626, 13455, 14947, 14925, 12582, 14763, + 14836, 9700, 14745, 14491, 11898, 14211, 12619, 14576, 14847, 14656, 14748, 13890, 10555, 14475, 14489, 12429, + 15343, 10916, 14666, 14727, 12780, 14500, 15337, 14823, 15155, 14120, 14440, 13483, 14094, 11327, 15335, 12673, + 15021, 14118, 13918, 10379, 14710, 14454, 13527, 13572, 12609, 14200, 13105, 15323, 14039, 14951, 14598, 13371, + 14633, 14374, 12357, 12201, 14734, 15164, 15306, 12021, 15080, 10924, 11652, 15222, 14430, 11805, 15220, 13709, + 14458, 12016, 13701, 13420, 14201, 12463, 9029, 14078, 9040, 12769, 13812, 14356, 8658, 14469, 14358, 13548, + 14490, 14594, 15231, 14433, 13542, 12670, 14534, 11272, 13446, 14272, 14159, 13235, 14380, 11269, 14903, 13454, + 14379, 13390, 13762, 15181, 14719, 13368, 15137, 12669, 14078, 15110, 11006, 11930, 11557, 15131, 13454, 15239, + 12717, 14611, 12289, 12462, 13696, 14050, 14519, 14897, 12698, 14068, 14487, 15193, 14932, 12511, 13375, 12350, + 15299, 14791, 14919, 13729, 14701, 13904, 14822, 14346, 13678, 14422, 14773, 14314, 14788, 14652, 14672, 14044, + 14340, 14472, 14166, 11631, 14095, 11785, 12046, 15278, 13687, 15191, 15191, 13373, 10353, 13187, 15240, 14735, + 14325, 12704, 14112, 14868, 12553, 14544, 14498, 15143, 14665, 15036, 14099, 14859, 12763, 14402, 12647, 14399, + 15004, 13174, 13971, 10226, 14598, 15031, 14949, 14587, 14776, 8235, 15158, 14804, 14550, 14321, 12303, 11174, + 14272, 13080, 12976, 15021, 15295, 15330, 11772, 14420, 12773, 12819, 14817, 13628, 12954, 15276, 13156, 14950, + 15138, 13886, 14490, 12536, 13755, 15103, 13621, 13937, 14364, 11778, 9397, 14345, 13748, 13634, 10548, 13273, + 12998, 14371, 15044, 15145, 14502, 14851, 15027, 15359, 14041, 14423, 12747, 13209, 13313, 13971, 15134, 13378, + 14116, 13145, 15011, 14628, 13180, 15182, 15108, 14028, 14450, 11704, 13686, 15337, 14434, 11318, 12760, 13653, + 13199, 13437, 14494, 8395, 13648, 14809, 14383, 14970, 14145, 14484, 14101, 12631, 15184, 14368, 15188, 14370, + 14372, 14027, 13157, 14894, 8898, 13719, 14528, 14183, 13124, 14549, 14872, 14985, 15258, 14534, 14211, 15037, + 14584, 15206, 13687, 14516, 13617, 14392, 14539, 15059, 14090, 13666, 13914, 13904, 15086, 12925, 14213, 15087, + 13013, 14810, 14869, 14619, 15157, 13533, 12998, 12786, 15228, 12670, 13444, 13772, 14217, 15289, 15178, 11647, + 11629, 13062, 14518, 15136, 13665, 15050, 9757, 13584, 14246, 14629, 12899, 15111, 14907, 14672, 15192, 15301, + 14853, 14490, 15332, 14454, 12790, 14661, 8207, 14798, 14169, 14446, 14992, 10693, 3613, 12366, 14190, 14580, + 15064, 13986, 12763, 14404, 14895, 14033, 11156, 14791, 12518, 14524, 14845, 13569, 13836, 14863, 12151, 12229, + 15301, 14968, 13203, 14987, 14818, 15233, 15297, 14797, 14261, 14583, 9065, 12897, 13514, 15344, 14601, 11928, + 13489, 13867, 13474, 14394, 14430, 12523, 13733, 14339, 14793, 13658, 13797, 14756, 14106, 14434, 15049, 12324, + 14621, 12629, 13453, 14587, 12373, 13930, 15351, 14584, 15158, 13699, 14867, 14554, 13811, 12984, 13472, 12637, + 14482, 13355, 14485, 14710, 14947, 15303, 13800, 12155, 14141, 15254, 15205, 14983, 14381, 13257, 15264, 14543, + 13478, 12879, 14703, 14698, 12455, 14425, 13163, 14390, 14881, 15238, 14916, 9858, 9115, 13893, 10893, 12817, + 15349, 10392, 10731, 14641, 14569, 11312, 13620, 15050, 14673, 14757, 10711, 8939, 13692, 14980, 14475, 15326, + 14960, 14835, 14132, 13835, 12654, 14958, 14607, 15109, 15345, 15082, 13662, 14035, 13924, 15304, 11220, 8162, + 12776, 14611, 14084, 15159, 15339, 12674, 15305, 9719, 14987, 15274, 10258, 12486, 14667, 14437, 15187, 14651, + 13615, 13782, 13300, 14522, 14797, 9176, 15116, 12561, 14407, 12788, 14620, 7387, 10359, 14515, 14171, 14357, + 13612, 12234, 9808, 14587, 12309, 14627, 14993, 12511, 12714, 14810, 10969, 15185, 12884, 14508, 13737, 15335, + 14015, 14100, 14879, 15071, 14124, 14566, 13476, 9858, 10893, 13435, 14716, 14590, 13729, 10660, 13704, 15186, + 14656, 14596, 14091, 14906, 15106, 15088, 12377, 10960, 14697, 14796, 14284, 13532, 14253, 12356, 14842, 14356, + 8275, 14204, 14569, 15200, 14344, 14519, 12744, 13332, 14963, 14676, 13864, 15323, 14380, 14351, 14406, 14347, + 14741, 15058, 11278, 12404, 15001, 14539, 15309, 14558, 14463, 15213, 13855, 15084, 11548, 14188, 13820, 13521, + 12319, 13410, 7033, 14438, 13001, 14922, 14798, 13142, 12939, 14972, 14104, 14270, 14446, 14916, 15060, 15001, + 14632, 15280, 14077, 14397, 11590, 15239, 14642, 13594, 14506, 14520, 13406, 14046, 15082, 12461, 12547, 15083, + 12501, 14819, 13975, 15108, 14458, 12356, 14002, 14930, 14145, 14944, 14220, 15170, 14348, 14473, 13743, 15024, + 14630, 14071, 14358, 12917, 14641, 15142, 14052, 14417, 14649, 10666, 14268, 11000, 12088, 14982, 14744, 14416, + 10726, 15197, 13960, 12364, 14630, 15307, 12448, 14950, 14357, 10877, 15151, 13901, 14117, 12463, 14285, 14664, + 13719, 12984, 15048, 13834, 14808, 13781, 12442, 14937, 11247, 13757, 14288, 14871, 14634, 13673, 15242, 15027, + 15219, 12670, 14494, 14431, 14550, 15115, 14009, 12584, 15061, 15305, 14220, 10852, 12343, 15294, 14318, 14338, + 11116, 13796, 14510, 13641, 14772, 13955, 15046, 15183, 12898, 14519, 14434, 14345, 14752, 14932, 15291, 2974, + 13271, 9376, 13859, 13267, 13699, 14702, 13629, 14944, 14680, 13944, 11108, 14355, 13791, 13264, 15304, 3785, + 15320, 11381, 9227, 15318, 14450, 14852, 13682, 14922, 11509, 15241, 12842, 15097, 13532, 15243, 14119, 12911, + 13616, 14421, 13278, 14714, 14550, 14341, 13854, 14926, 14916, 14981, 13162, 14365, 14779, 13528, 14579, 14812, + 13337, 13422, 14579, 14078, 13677, 14282, 13732, 14662, 14118, 14836, 14637, 8391, 14988, 14852, 14932, 13580, + 14615, 12908, 14558, 15352, 15026, 14494, 12363, 14334, 15293, 14807, 14724, 12513, 13631, 9963, 14247, 14616, + 15286, 14368, 15193, 14974, 11497, 13759, 14344, 14736, 13487, 12579, 13904, 14922, 12636, 14511, 15253, 12729, + 14875, 15249, 14370, 12239, 13612, 12925, 15133, 11852, 14743, 13894, 13166, 13960, 14679, 14069, 14579, 14405, + 14770, 14841, 14472, 13579, 14816, 15232, 13744, 15139, 11292, 14115, 13987, 15166, 12437, 14459, 14742, 11983, + 14485, 15251, 15086, 15146, 14912, 14037, 12399, 13340, 14509, 11480, 14585, 13171, 13032, 13569, 14686, 14807, + 14917, 12883, 13833, 14717, 15275, 13333, 13913, 14416, 14590, 13339, 14436, 14090, 14520, 15325, 14531, 14070, + 13842, 14886, 14184, 14432, 9893, 14153, 12490, 14787, 14718, 15166, 13048, 14675, 15159, 14298, 14902, 13963, + 13361, 10777, 12279, 12479, 14027, 14369, 12832, 14500, 14265, 13970, 11609, 12540, 12671, 13769, 14877, 13826, + 13347, 14442, 14891, 11363, 14443, 14755, 14817, 13507, 13982, 13923, 14558, 10325, 8425, 13212, 13971, 14275, + 12327, 14386, 14056, 12477, 15151, 14291, 15304, 14364, 15198, 14393, 12144, 14563, 14941, 15311, 14618, 14634, + 15184, 15279, 14173, 12894, 14870, 14665, 14786, 14228, 14354, 13698, 13756, 14679, 13365, 12445, 12422, 15208, + 12858, 12451, 14779, 14530, 11999, 14604, 13924, 15360, 14445, 14816, 13185, 7413, 13932, 12900, 13974, 14688, + 15003, 13970, 8572, 14781, 14811, 14350, 13713, 13737, 14964, 15066, 14368, 13922, 12973, 14534, 9653, 14233, + 14850, 13934, 14428, 12531, 12316, 13517, 13988, 13836, 13870, 14712, 14780, 13931, 14964, 15218, 15246, 13632, + 13577, 12325, 14686, 15033, 14964, 13266, 14954, 15219, 14009, 13321, 14657, 14645, 13562, 9116, 14820, 14654, + 9918, 12569, 14485, 15034, 14525, 14377, 14869, 14658, 14176, 15091, 13894, 13469, 13571, 14767, 15103, 15035, + 15349, 9445, 13669, 15146, 15299, 14159, 14752, 15110, 7374, 15300, 13979, 14870, 12060, 13518, 15251, 15074, + 15159, 14779, 13377, 13336, 15121, 14547, 11057, 12873, 15355, 14403, 14807, 13505, 14767, 14757, 11807, 14994, + 14090, 13518, 14392, 12942, 15093, 15208, 15241, 12124, 15117, 14729, 14146, 13915, 14365, 12929, 10583, 14241, + 15316, 15027, 14124, 14647, 15083, 13726, 13235, 10923, 13710, 14531, 13856, 13224, 13930, 14439, 15049, 15100, + 14061, 13127, 13701, 13276, 14818, 11741, 11767, 13864, 12816, 13941, 14093, 15272, 13832, 14625, 12667, 13958, + 15088, 14523, 14106, 14980, 14979, 14812, 13001, 13563, 13362, 14478, 13968, 13762, 13065, 13666, 12385, 15225, + 14990, 15069, 13788, 14931, 14990, 14982, 15080, 13066, 14356, 15250, 12394, 9415, 15101, 14437, 12115, 14725, + 13966, 12655, 14756, 13942, 13527, 15210, 13158, 11358, 15083, 14711, 13425, 14754, 15053, 14887, 13100, 14386, + 13541, 14894, 14460, 15120, 15223, 15074, 13198, 15243, 13599, 11903, 7934, 12595, 14638, 11602, 14668, 14892, + 14417, 14393, 14790, 15176, 13826, 11796, 14683, 13867, 13444, 14221, 14265, 14684, 15310, 15339, 13979, 12717, + 14904, 4633, 15277, 13439, 14680, 13364, 14796, 12379, 14457, 14138, 14173, 12748, 13770, 13737, 13078, 13315, + 14398, 12098, 11521, 14340, 12776, 14705, 14786, 13555, 15010, 11710, 14309, 9215, 15043, 14214, 13257, 15350, + 13749, 12918, 11559, 14755, 14932, 9252, 12957, 12887, 14581, 14880, 13546, 13471, 11059, 14433, 12567, 13044, + 13932, 13918, 13364, 14921, 15356, 15204, 14472, 13452, 14267, 11621, 14481, 15266, 11863, 15106, 14641, 12087, + 15029, 15316, 5025, 12417, 14962, 14904, 14881, 14259, 12435, 14454, 14921, 13401, 13977, 14452, 13869, 12463, + 14948, 14305, 10774, 14834, 14446, 13097, 14802, 15271, 15002, 14340, 13711, 14956, 15223, 13701, 12362, 14326, + 15115, 13764, 13503, 12229, 15018, 10921, 8235, 15052, 11313, 12907, 15081, 14480, 11820, 15150, 14358, 14627, + 13801, 9996, 14762, 13344, 14603, 11781, 11493, 15250, 11569, 14273, 15334, 15202, 15184, 14171, 13472, 11875, + 14397, 13510, 14412, 15028, 14612, 15154, 14040, 13644, 12751, 14905, 14553, 15352, 14669, 12550, 14894, 14446, + 15137, 14634, 10819, 14919, 14510, 13151, 14676, 13553, 13277, 12003, 15189, 15089, 13698, 10037, 14573, 14512, + 13719, 13982, 13692, 10301, 10208, 14867, 14914, 13641, 12559, 13866, 12974, 12769, 14956, 15287, 13900, 15274, + 13997, 14104, 15344, 12753, 14984, 11938, 13942, 14235, 15072, 14544, 14577, 14024, 12708, 15232, 12815, 12270, + 13591, 12788, 14841, 13923, 13555, 14585, 14718, 13081, 14714, 12907, 11222, 13773, 8643, 12170, 13421, 12170, + 13123, 15304, 14122, 8572, 13117, 15089, 15245, 11635, 14824, 15164, 15123, 14131, 14445, 13302, 14686, 14700, + 14853, 13768, 14569, 11826, 15000, 14235, 15315, 13771, 12138, 14387, 15239, 10956, 12887, 14540, 14740, 13710, + 14521, 15275, 15172, 12429, 13785, 15312, 12138, 14885, 14838, 14332, 12080, 12350, 13473, 10843, 3755, 13283, + 13111, 14838, 12837, 9917, 13650, 13103, 14708, 15223, 10917, 11585, 14951, 12963, 10662, 13810, 11632, 14362, + 15024, 15261, 12467, 13980, 14522, 13419, 10795, 14793, 13180, 13520, 13818, 12889, 14897, 11387, 14265, 12468, + 8626, 12023, 14692, 15319, 14192, 15141, 14380, 12884, 14155, 13996, 15117, 15317, 13650, 14953, 13828, 14405, + 13003, 14422, 11807, 12255, 14783, 13551, 14552, 14600, 14233, 12628, 12887, 15305, 15048, 14293, 13846, 14937, + 14443, 15049, 14590, 15266, 13980, 12789, 9570, 13973, 14676, 14354, 12046, 13316, 12467, 13733, 14758, 11878, + 14395, 13409, 14839, 14407, 14394, 12330, 12522, 14558, 13981, 8771, 14260, 10918, 13972, 12752, 14590, 13400, + 13120, 14459, 14561, 14400, 13854, 14053, 13000, 14920, 14451, 15098, 15108, 11959, 14765, 14825, 14999, 13773, + 14871, 14806, 14608, 14733, 12665, 14338, 14889, 15021, 14358, 15192, 14859, 10379, 14642, 14549, 14508, 15228, + 11531, 14355, 11938, 14713, 14153, 15305, 15169, 13324, 14150, 14702, 15256, 15192, 14851, 15333, 14832, 15200, + 13403, 15119, 14530, 11310, 13480, 11961, 13356, 15218, 14430, 14150, 14419, 15054, 14789, 15136, 12019, 14169, + 15249, 14559, 13929, 15099, 13544, 14940, 10311, 11884, 14470, 14962, 13631, 5417, 15358, 14672, 12522, 14612, + 14462, 13743, 14261, 7412, 14786, 14207, 9344, 15032, 15348, 4724, 14414, 12719, 13610, 14488, 15191, 14868, + 12427, 15270, 14942, 11280, 13756, 15212, 11799, 14328, 13732, 15012, 15028, 14360, 14350, 14762, 15057, 14623, + 15242, 12893, 14854, 12310, 14587, 14545, 15125, 14972, 13429, 14537, 12744, 14728, 15091, 10008, 14766, 15131, + 14907, 12189, 14371, 14656, 15105, 13687, 13083, 12630, 14413, 13370, 14856, 13854, 15344, 14744, 14147, 14097, + 14804, 15345, 14800, 14574, 15064, 14506, 14918, 15359, 13336, 13094, 15100, 11593, 15299, 11918, 12797, 14920, + 13106, 15094, 15192, 13830, 14503, 14457, 10841, 12092, 14286, 13713, 11773, 12732, 13424, 14062, 9994, 14840, + 15057, 15236, 13785, 11509, 14569, 13782, 14380, 14097, 13937, 14624, 15184, 14803, 13446, 14950, 15126, 13215, + 15042, 12414, 14777, 14475, 13746, 14471, 12778, 14880, 13616, 14031, 15212, 14805, 14137, 14090, 15264, 13555, + 15071, 13345, 13388, 14702, 14470, 14415, 15320, 15110, 15083, 14118, 14415, 14746, 14755, 13997, 12705, 12550, + 14193, 14920, 12249, 11241, 14363, 14941, 15263, 14463, 13481, 14941, 11713, 14133, 13150, 11304, 14617, 14063, + 14690, 13444, 13585, 13443, 14941, 11861, 14207, 14851, 14261, 13391, 15288, 10539, 11084, 12976, 13846, 12582, + 10917, 14815, 8870, 13657, 14349, 13084, 15139, 14693, 15168, 14967, 10574, 14651, 14458, 14648, 12569, 14780, + 12479, 14401, 14762, 14416, 15252, 13472, 11780, 14880, 14765, 13751, 12418, 12473, 14578, 15029, 13644, 15139, + 12696, 13192, 14357, 10944, 13476, 13799, 13904, 15078, 15217, 13799, 8761, 14362, 12793, 15244, 14103, 14402, + 14537, 14414, 14178, 15090, 14427, 13567, 11315, 15185, 14775, 15157, 15003, 11639, 15069, 11634, 14390, 10670, + 15338, 12173, 14639, 15288, 14807, 14583, 14585, 13557, 15076, 14671, 13761, 14652, 13321, 14901, 15155, 14620, + 10359, 14774, 14578, 14282, 13183, 14142, 13442, 15230, 12660, 13353, 14543, 12185, 14468, 13469, 14268, 14987, + 13411, 14619, 14367, 15189, 15045, 14265, 14785, 14379, 11741, 13385, 10862, 14178, 14929, 14516, 13988, 13768, + 15206, 15114, 14865, 14149, 15344, 14485, 10329, 13094, 15235, 13574, 14962, 14440, 11578, 15204, 5794, 14991, + 14566, 12948, 15088, 15223, 14893, 14718, 15266, 14860, 14729, 14739, 12492, 13438, 15285, 13762, 14611, 15022, + 14854, 15222, 14217, 15065, 15181, 14957, 14965, 13366, 13024, 14443, 13870, 13641, 12761, 13090, 14935, 15328, + 14697, 13311, 11217, 13371, 15058, 14246, 9479, 13018, 15033, 15016, 13819, 14908, 14472, 15030, 12496, 13079, + 15257, 14337, 15014, 14877, 12398, 11056, 13797, 13349, 12724, 11979, 14367, 15159, 13543, 15237, 14626, 14487, + 13110, 15021, 14638, 15338, 13871, 12173, 12403, 14467, 15300, 15318, 15175, 12329, 15337, 14218, 15165, 14979, + 13387, 15041, 13089, 14939, 14788, 14363, 9909, 14896, 13574, 13931, 13379, 15028, 10217, 15320, 15302, 14722, + 14353, 13415, 11907, 15089, 14464, 14615, 14209, 13976, 14447, 13543, 13052, 14719, 13662, 13525, 15064, 14969, + 14699, 15261, 14827, 12852, 13076, 14600, 15334, 14933, 14584, 13019, 15112, 11763, 13825, 14604, 14913, 14357, + 14465, 12354, 12724, 14091, 11655, 14505, 12640, 9726, 14529, 13403, 12611, 11723, 14887, 14425, 14499, 13542, + 13642, 15229, 14458, 12308, 15290, 9333, 14704, 13296, 15185, 14513, 11227, 15134, 11643, 10692, 15101, 13825, + 15230, 15031, 14465, 14692, 13526, 14541, 14702, 14536, 15078, 14851, 14351, 14823, 14591, 11249, 14868, 14497, + 13675, 12025, 11761, 14646, 14681, 15196, 12768, 11757, 13711, 14870, 15241, 13429, 15344, 13532, 13730, 14587, + 15232, 12736, 14715, 13076, 14709, 15022, 12289, 11087, 10503, 12821, 12217, 15183, 13424, 14539, 12823, 15294, + 12552, 15007, 14987, 6495, 12393, 12083, 14068, 14439, 13196, 13376, 13388, 13388, 11536, 11827, 11729, 15008, + 15313, 13780, 14959, 14643, 12830, 14129, 14954, 15035, 12840, 15065, 15239, 14790, 14529, 13463, 9928, 15073, + 15089, 12875, 14445, 13767, 14651, 15157, 13641, 14756, 13954, 15289, 14373, 13460, 13509, 14299, 12410, 11993, + 14882, 15232, 14693, 14011, 14146, 11015, 11152, 15000, 14821, 13548, 14953, 14541, 15009, 14804, 12704, 12643, + 14894, 7405, 13496, 14671, 14975, 14359, 14727, 14976, 15321, 15144, 14178, 14406, 11409, 12577, 12370, 14937, + 13929, 13907, 14198, 14421, 14701, 14970, 10455, 14350, 13507, 11481, 14641, 14923, 14553, 15084, 14559, 15050, + 14771, 14995, 10270, 14429, 14639, 15244, 13645, 15044, 11707, 13541, 12056, 14232, 13460, 6938, 15285, 13559, + 13484, 14539, 15212, 15153, 15277, 13038, 8838, 12035, 14867, 14780, 13754, 14546, 13081, 15188, 13649, 10454, + 13764, 13799, 13385, 14289, 15279, 14309, 13607, 14179, 15062, 13492, 11848, 14978, 15005, 9977, 13434, 13992, + 13139, 14489, 14431, 13638, 12375, 14406, 11568, 15077, 14951, 14767, 14124, 13272, 13491, 14310, 13013, 14563, + 11609, 9908, 10548, 13058, 14599, 14678, 14888, 13971, 14498, 14554, 15214, 11970, 14859, 13490, 14894, 14281, + 13621, 14831, 14925, 13341, 15141, 11824, 10369, 14471, 12886, 14432, 15332, 14472, 15163, 14695, 15139, 14175, + 12636, 11975, 14904, 13456, 15270, 15056, 14911, 15190, 12623, 14665, 14917, 12603, 12560, 14766, 14890, 14716, + 9405, 15002, 15040, 10854, 13490, 14519, 11609, 13428, 11621, 13454, 14776, 12941, 12516, 14566, 13969, 15012, + 14637, 14893, 15238, 13304, 15122, 14823, 11969, 11467, 15255, 12395, 15198, 13730, 10491, 14470, 13914, 15099, + 14970, 14480, 14146, 14054, 14914, 14485, 14149, 13641, 14061, 13624, 15043, 13901, 11132, 11414, 14092, 12971, + 14632, 14750, 14596, 13202, 9683, 13800, 13416, 14690, 15331, 15182, 12674, 14472, 15315, 15223, 12336, 9635, + 14504, 14433, 15349, 14183, 13605, 11405, 15140, 14590, 13497, 13354, 14855, 13672, 15028, 14684, 12738, 14612, + 14806, 14046, 15254, 15283, 14911, 8171, 15199, 10096, 15106, 14766, 14091, 15281, 12465, 13502, 14440, 13734, + 14484, 14720, 13450, 14743, 15067, 10939, 12344, 14604, 14900, 14729, 14391, 13508, 15022, 15226, 12990, 14543, + 14283, 14954, 13324, 12251, 11773, 13514, 15275, 13668, 14310, 12608, 14663, 14185, 14414, 13978, 14431, 13376, + 15345, 14593, 14947, 11711, 14858, 14500, 15227, 14660, 10547, 15213, 14423, 14056, 15180, 15320, 15077, 11877, + 13955, 12284, 11400, 15283, 14467, 14088, 13717, 12675, 11621, 13346, 11645, 14702, 12883, 14537, 13767, 14506, + 15239, 15142, 8713, 14340, 14218, 15027, 14914, 14455, 14265, 13297, 12500, 15083, 15226, 13676, 13069, 14520, + 14255, 15041, 14517, 14536, 12066, 15122, 10984, 15144, 15179, 12771, 14741, 10144, 14972, 14235, 13625, 13898, + 13152, 15311, 13439, 10976, 15312, 14791, 14292, 15268, 10855, 13865, 13886, 13590, 13236, 14972, 12454, 13240, + 14822, 14894, 15129, 14473, 14609, 14370, 14505, 15032, 15233, 11456, 15043, 15101, 12398, 14587, 15185, 11269, + 14014, 14651, 15181, 14444, 12480, 14360, 12388, 13745, 14244, 14175, 14506, 13044, 14854, 12052, 9630, 14735, + 15082, 13829, 14941, 14509, 13429, 15120, 14465, 14036, 14914, 13310, 15111, 9967, 14776, 15342, 15143, 13153, + 13583, 15018, 14729, 13978, 15250, 9250, 13230, 15016, 14177, 14373, 14795, 15022, 15226, 13438, 9215, 14078, + 14463, 8650, 14977, 14406, 13424, 15116, 13100, 14621, 12223, 14357, 13534, 14136, 13768, 13905, 14774, 9133, + 14596, 14644, 15001, 13240, 12315, 13173, 14713, 13400, 13023, 14834, 13206, 14415, 15272, 14646, 14638, 14393, + 13907, 12640, 14238, 15265, 15151, 14448, 15357, 12978, 11079, 11027, 14160, 14399, 14719, 14986, 14881, 13215, + 14817, 13764, 14784, 12342, 15133, 15335, 15258, 14473, 13482, 14857, 14965, 12658, 14461, 14540, 15304, 14318, + 13513, 8776, 12994, 14880, 15096, 10159, 14040, 14572, 11781, 15307, 12841, 13381, 15099, 15039, 12309, 14776, + 14362, 15289, 12801, 13918, 12468, 12941, 15093, 14832, 13801, 14426, 13946, 15056, 13813, 14528, 10620, 14542, + 14674, 14655, 14415, 14451, 14544, 14723, 13240, 12789, 14688, 13053, 14591, 14620, 15225, 15093, 14935, 15292, + 11487, 12039, 15331, 13519, 14849, 15338, 15153, 14872, 15352, 15184, 12745, 14075, 12686, 14799, 12950, 12966, + 8495, 14361, 13913, 13440, 14525, 15298, 14824, 14913, 13723, 11193, 15328, 12838, 15259, 14372, 14590, 13496, + 14761, 14039, 13634, 15194, 11187, 11633, 8770, 14324, 9762, 13610, 14345, 15049, 14212, 15286, 13731, 15088, + 3817, 13994, 15009, 14129, 14485, 13999, 14683, 11758, 13842, 14663, 13487, 13994, 12621, 12939, 15258, 13707, + 12321, 15230, 11547, 13612, 13388, 15285, 14186, 14576, 13627, 14854, 12483, 14813, 14603, 14551, 14373, 14991, + 15263, 13199, 14101, 14078, 15180, 13842, 13962, 15057, 13913, 14040, 15081, 15335, 13107, 14189, 12384, 11139, + 14366, 14697, 13702, 14169, 14318, 14223, 13808, 13773, 14349, 12047, 14361, 13564, 12851, 13468, 14412, 15214, + 14282, 14589, 13886, 15270, 13693, 14579, 14734, 10860, 12953, 13554, 14875, 12749, 15217, 14553, 14906, 14585, + 15126, 14460, 13324, 13648, 14692, 13887, 14677, 14483, 11007, 13567, 14681, 14618, 12401, 14467, 14970, 14195, + 13487, 14807, 9627, 13328, 13357, 14800, 15141, 11981, 10067, 14658, 13832, 15216, 14964, 12698, 14694, 15029, + 15063, 13524, 15343, 13655, 13637, 15296, 15075, 14986, 14060, 13574, 13451, 12354, 14493, 14398, 12954, 12334, + 12530, 14912, 14165, 15273, 14538, 15152, 14623, 14097, 12072, 14444, 15222, 13928, 13202, 13444, 14488, 15320, + 11437, 13514, 11181, 13230, 14715, 14598, 14371, 15239, 14779, 13292, 14972, 14618, 15190, 14539, 14494, 14744, + 14916, 15296, 14641, 15172, 14976, 13155, 14680, 13392, 15149, 11382, 14379, 14595, 14941, 15082, 15337, 11327, + 12947, 14391, 14964, 13890, 13595, 14079, 14828, 14714, 11302, 14179, 13408, 15117, 15233, 14717, 12379, 15179, + 14140, 13291, 15320, 15017, 15029, 13369, 12491, 14994, 11792, 13757, 15207, 14999, 12856, 12887, 14375, 13696, + 14607, 14962, 14781, 14679, 13337, 14754, 15109, 12370, 14409, 13924, 13460, 13342, 13027, 15011, 15343, 14931, + 14630, 13245, 15238, 13904, 9957, 15001, 10858, 14223, 13768, 14431, 11240, 14489, 14404, 14634, 12857, 14452, + 13686, 14597, 14281, 14398, 13120, 13542, 14221, 13173, 14541, 13385, 13418, 14007, 11379, 14645, 14097, 13453, + 12215, 11855, 14645, 15085, 15328, 14463, 12792, 14004, 14253, 13418, 14536, 13875, 14814, 13844, 13369, 12661, + 13918, 12831, 15088, 15343, 14826, 14775, 14244, 15010, 14721, 9243, 15274, 14802, 14360, 14691, 15064, 13945, + 15355, 15004, 14768, 13727, 13504, 10379, 12488, 10012, 15170, 14791, 14245, 12101, 14909, 12690, 14578, 15124, + 13360, 9776, 14578, 14298, 14535, 14123, 14389, 14621, 13790, 14400, 11123, 14957, 12925, 13930, 15258, 15164, + 14492, 14386, 15343, 10995, 11260, 14840, 14168, 13528, 14486, 14001, 13367, 12940, 13423, 14862, 14948, 15187, + 13497, 12663, 14909, 13539, 13928, 15258, 15167, 13812, 14545, 12305, 11726, 14546, 13499, 11149, 10264, 14141, + 14420, 13248, 12309, 15122, 14014, 14545, 13617, 15032, 12573, 14512, 15263, 14755, 14426, 13525, 14383, 8858, + 14884, 15163, 13098, 14543, 15168, 13340, 10208, 12285, 14627, 13056, 14632, 12751, 14515, 14387, 14777, 14382, + 14611, 14645, 13468, 13425, 15228, 13305, 15244, 15177, 13648, 15355, 14398, 12932, 12354, 13055, 15049, 13912, + 13453, 14845, 14789, 14745, 13380, 13400, 15096, 12899, 11722, 14101, 14847, 15126, 15129, 13522, 14571, 12995, + 11828, 14745, 14377, 14698, 13985, 12017, 14445, 13643, 13854, 14442, 14088, 8704, 11671, 14069, 13167, 14475, + 13330, 15345, 13473, 13836, 15235, 13431, 11650, 7913, 14542, 14636, 14481, 13773, 13453, 14825, 15019, 14706, + 13220, 14591, 12997, 14828, 8946, 15090, 14537, 13597, 13494, 14112, 13196, 15089, 12802, 13637, 13924, 14960, + 15005, 13544, 14721, 12549, 12569, 6554, 13793, 14904, 14955, 13769, 13330, 13830, 14929, 9910, 15289, 15037, + 8833, 11863, 13953, 14529, 13492, 14341, 15094, 15036, 14408, 14962, 14910, 14727, 14578, 14538, 14029, 13190, + 13735, 14373, 15157, 14360, 13832, 14475, 12144, 13936, 14708, 15321, 13050, 13671, 14300, 13438, 14599, 14572, + 14498, 15188, 12618, 11308, 13174, 11504, 14472, 15028, 12535, 13262, 14146, 15319, 13489, 13532, 14450, 14350, + 14809, 15002, 13038, 15130, 13858, 15141, 12671, 14381, 14194, 15198, 14868, 13839, 12684, 14145, 15302, 15317, + 13477, 14647, 15280, 15037, 13391, 14666, 14450, 14547, 13604, 14956, 9733, 15059, 14773, 13635, 13941, 15268, + 14471, 12922, 13326, 14850, 12779, 11928, 14936, 14137, 13735, 14918, 14340, 14567, 13354, 10489, 12761, 14768, + 15071, 13739, 12974, 11394, 11957, 13974, 14788, 14920, 15175, 14557, 14538, 14119, 14869, 13390, 15116, 15317, + 14735, 13518, 12628, 10889, 13715, 9766, 12935, 15084, 6661, 13916, 14464, 13780, 7484, 12608, 13488, 14773, + 14805, 10509, 14712, 14390, 12298, 13684, 14394, 13804, 13474, 10674, 13061, 15109, 14395, 13681, 14662, 14622, + 13119, 14880, 14125, 11239, 15277, 12678, 15293, 7496, 14951, 12308, 13605, 15041, 14157, 14943, 13365, 14349, + 14445, 11426, 11346, 13062, 15307, 15191, 15197, 15009, 14649, 11724, 13945, 14999, 11661, 13397, 14773, 14669, + 8419, 14918, 14737, 13701, 11666, 13172, 14634, 14099, 14769, 14680, 14716, 14562, 12337, 14020, 15032, 14680, + 12802, 14690, 14564, 12852, 14304, 14524, 13582, 9525, 14356, 14540, 13183, 14890, 10673, 13445, 14999, 15237, + 14208, 9247, 13841, 13113, 15065, 15183, 15011, 11454, 10544, 15276, 12948, 14122, 14390, 13752, 14223, 14366, + 15008, 15346, 12446, 14545, 13070, 11385, 12531, 13427, 14552, 14983, 13329, 14614, 14833, 11760, 15128, 14028, + 12172, 12399, 9657, 15150, 13591, 14574, 13650, 14754, 13467, 15312, 14427, 12654, 14525, 13574, 13761, 14554, + 14421, 13392, 13640, 13365, 15181, 14393, 13278, 11598, 13396, 14798, 14430, 15129, 13259, 14458, 14090, 15181, + 14828, 12336, 12226, 14111, 13557, 13809, 14577, 14540, 15297, 15253, 11171, 15002, 12600, 15221, 14844, 13729, + 14568, 13883, 14099, 15214, 12416, 13418, 14600, 12071, 15297, 15222, 14469, 14576, 11190, 13812, 11946, 11166, + 13410, 15231, 14572, 15210, 14147, 14334, 11286, 15291, 14563, 14539, 14985, 14944, 14654, 14488, 13653, 14771, + 13877, 11117, 12986, 11545, 13814, 13713, 15057, 14466, 14961, 13590, 15113, 13724, 14666, 14380, 14844, 12292, + 13896, 10248, 13821, 12143, 11436, 10135, 15208, 10261, 14939, 15005, 12612, 13639, 15015, 11744, 15344, 14587, + 14835, 14880, 11824, 12585, 13516, 14627, 11482, 15305, 15096, 13177, 15185, 14624, 11934, 15154, 15021, 14578, + 14465, 12955, 15287, 14434, 13255, 15139, 14661, 14812, 13780, 14557, 14894, 13713, 11869, 15117, 11717, 11711, + 14844, 13745, 13629, 13139, 14661, 13135, 14620, 13198, 13770, 13814, 12071, 14624, 14884, 13376, 15066, 14460, + 14420, 14529, 13578, 12885, 14220, 14642, 14633, 13943, 15022, 14830, 15103, 13595, 10314, 13513, 9918, 14562, + 11288, 14674, 15074, 14727, 14874, 12744, 14366, 15128, 14160, 13018, 12519, 14997, 11703, 14506, 14380, 14140, + 12926, 11964, 14445, 15185, 14813, 13768, 12691, 14739, 15002, 14797, 15130, 14474, 12979, 15015, 13391, 13653, + 14953, 14581, 14602, 14850, 13594, 13269, 13790, 11237, 15103, 11329, 11460, 14881, 14516, 14808, 14756, 11748, + 12754, 14621, 14102, 14680, 13561, 12841, 13111, 14308, 12220, 15131, 14433, 15127, 15342, 14740, 11372, 15040, + 10519, 14611, 14971, 15320, 13675, 15306, 12916, 14719, 11340, 13636, 13774, 12911, 15332, 15125, 12525, 14212, + 12302, 12054, 15216, 14150, 14827, 14868, 11987, 14813, 14617, 15149, 13458, 14226, 13823, 15192, 14857, 13677, + 10916, 14635, 15327, 14916, 14579, 15103, 15231, 14297, 14686, 12985, 11593, 15270, 15251, 14225, 15239, 13731, + 12810, 11071, 15288, 10870, 14821, 9481, 14777, 10330, 12410, 14200, 11826, 11899, 14841, 12482, 14496, 14379, + 11799, 15270, 12871, 15355, 11261, 10261, 13787, 12267, 14526, 12996, 13528, 15015, 14627, 14924, 15021, 13318, + 14495, 13610, 12322, 13621, 14046, 14539, 12260, 12909, 14264, 13400, 14539, 13754, 14024, 15112, 13918, 9985, + 15060, 12807, 15094, 14898, 14590, 12432, 12997, 15140, 14726, 14469, 14514, 13675, 14419, 13490, 14403, 13657, + 12771, 14591, 15098, 14925, 14505, 14410, 15132, 13489, 9307, 14227, 12980, 14627, 12911, 12933, 14341, 12405, + 13647, 13802, 14533, 13516, 14072, 15098, 14824, 12119, 14759, 9628, 6598, 12015, 15116, 14538, 15174, 14842, + 13914, 14516, 9605, 13286, 14496, 14910, 14409, 11458, 14639, 12315, 13484, 13827, 14470, 12899, 10979, 13754, + 14476, 12408, 15181, 14432, 14768, 14920, 13633, 13852, 14421, 14914, 14906, 13147, 15096, 15150, 15152, 15028, + 6794, 13654, 12643, 12321, 9905, 14489, 14357, 14885, 15036, 13573, 12259, 12456, 14923, 11498, 15308, 13551, + 13978, 12599, 15351, 15052, 14180, 15197, 14014, 14878, 14049, 13822, 14705, 14775, 14201, 15334, 14633, 12964, + 12962, 7897, 15184, 13983, 15318, 11702, 14337, 13412, 12711, 14471, 14832, 15359, 15197, 14796, 12316, 15302, + 13740, 12063, 15296, 14585, 14843, 14964, 12029, 14383, 13608, 8781, 13052, 14966, 12324, 12932, 15324, 14967, + 14214, 13798, 14382, 15189, 12594, 12554, 14939, 15168, 15273, 15142, 12918, 12629, 14149, 15012, 12409, 12894, + 15163, 14357, 14761, 14958, 10085, 13324, 15188, 12467, 13891, 14992, 15031, 13331, 15142, 13757, 14636, 11876, + 14191, 14770, 14861, 13555, 14777, 14344, 14672, 14513, 13120, 14607, 14640, 14553, 13472, 14758, 12647, 13923, + 14052, 14243, 14937, 13847, 13861, 11823, 11695, 13203, 12906, 11969, 12299, 14387, 15122, 14377, 15247, 14329, + 15179, 14549, 14852, 13485, 13892, 14604, 14560, 15077, 11859, 14429, 14979, 10955, 12375, 14485, 14533, 13101, + 14164, 14333, 7383, 14849, 15353, 14746, 14890, 15054, 15295, 14577, 13811, 11564, 14366, 15293, 14424, 15303, + 15328, 14477, 13838, 14609, 12787, 13932, 14712, 13406, 15050, 14628, 14697, 12711, 14132, 13454, 15114, 15156, + 15137, 14817, 12991, 15143, 13865, 13805, 14861, 12609, 14865, 11438, 14558, 14802, 14073, 14497, 13962, 14640, + 15063, 14557, 13410, 12978, 14955, 12982, 14892, 15248, 15049, 11916, 12426, 12775, 13426, 15105, 13596, 12409, + 14475, 15132, 13053, 12479, 12819, 14724, 14867, 14892, 9317, 14913, 14892, 15329, 14640, 13908, 15193, 13328, + 14985, 14644, 13335, 13577, 13919, 13746, 14396, 10498, 12372, 12908, 12988, 13431, 14535, 14135, 13701, 14754, + 14765, 14570, 14539, 13774, 15324, 13426, 13292, 14914, 13577, 13637, 13428, 14128, 9976, 14866, 14930, 13861, + 14428, 13327, 14909, 13774, 14447, 14958, 13881, 15045, 13514, 12467, 11430, 14960, 13830, 14469, 9160, 13402, + 13237, 14822, 12547, 14501, 14812, 14263, 14192, 14340, 14920, 13374, 13985, 15002, 12290, 15074, 11282, 14210, + 13045, 9652, 13519, 14097, 14382, 14857, 13641, 13339, 14851, 14470, 14596, 15089, 15068, 14275, 15029, 13626, + 14313, 14732, 14330, 14840, 14869, 15042, 15359, 13828, 14957, 10747, 11393, 14626, 14958, 14971, 12795, 14462, + 14478, 15085, 13300, 13782, 13474, 12718, 14708, 12598, 10943, 13670, 10273, 10193, 14413, 13218, 12433, 10868, + 15150, 13743, 14465, 15145, 11424, 15037, 14674, 13399, 14428, 15096, 14643, 15169, 13688, 12422, 14491, 15339, + 14671, 12317, 15298, 13859, 14922, 14585, 11895, 15205, 14258, 12942, 12044, 12629, 14229, 13867, 14698, 14405, + 14111, 15079, 14350, 14570, 14236, 14045, 14853, 13816, 13569, 11417, 14682, 14686, 15126, 14807, 13514, 15283, + 13116, 12107, 14730, 13588, 13274, 12731, 15141, 14612, 14251, 14776, 13587, 13939, 12161, 14201, 13413, 13131, + 15246, 14441, 15346, 14145, 14951, 14862, 15250, 14627, 14249, 14738, 12687, 14122, 14822, 15314, 13496, 14510, + 14565, 14878, 13225, 14893, 15183, 13937, 14403, 13500, 14986, 15317, 14760, 14103, 12895, 14930, 15078, 13753, + 14124, 15093, 15249, 11976, 13591, 13916, 14812, 14201, 13992, 14353, 13830, 13626, 13871, 14024, 11873, 13623, + 11449, 14700, 14556, 11802, 13626, 13481, 13221, 14647, 14090, 15002, 14966, 13583, 14601, 15148, 15079, 15037, + 11954, 14752, 14903, 14734, 12907, 13504, 11450, 14789, 14113, 15355, 14460, 13711, 14470, 14410, 14181, 14452, + 15323, 14592, 14637, 13924, 15271, 15031, 11422, 14399, 13652, 13018, 13051, 11521, 13569, 14036, 13408, 15048, + 13770, 12683, 11508, 9071, 13930, 15268, 14469, 14396, 13012, 14069, 13931, 14442, 11565, 14720, 13755, 15219, + 13612, 15037, 14307, 12901, 13864, 13643, 14993, 15227, 14773, 15141, 14543, 14079, 14649, 14098, 15310, 14400, + 14330, 14863, 14406, 15347, 13911, 11930, 11819, 12848, 14962, 14244, 14879, 13262, 15172, 13690, 12390, 14876, + 12359, 15110, 14789, 13733, 14876, 14359, 14595, 13447, 15333, 14936, 14339, 10111, 15268, 15009, 15011, 10278, + 13452, 13904, 13421, 14772, 14411, 13794, 13788, 15113, 15084, 15024, 14951, 14495, 14659, 15092, 12425, 13341, + 15320, 13790, 14950, 15274, 12351, 14181, 12197, 14654, 14034, 14610, 15270, 14796, 11995, 14107, 13701, 15061, + 10205, 14096, 15289, 15197, 14648, 13577, 14093, 15329, 14844, 12262, 14427, 11863, 14704, 14495, 14734, 12759, + 13452, 14934, 14760, 12641, 15155, 13907, 14837, 14305, 14495, 12408, 14283, 14340, 13664, 9866, 15158, 14291, + 15169, 15091, 13781, 12428, 14032, 12506, 15233, 12734, 14418, 15033, 14669, 12298, 13764, 15004, 14476, 13319, + 15084, 13614, 14770, 13850, 13788, 14838, 13405, 15255, 14525, 11560, 13774, 14954, 13908, 12484, 14708, 14499, + 15199, 14948, 14716, 15185, 11966, 14842, 14513, 14510, 14468, 12328, 12674, 13610, 14738, 14700, 14500, 14277, + 13313, 14398, 13858, 14506, 13850, 11584, 12594, 13851, 14630, 14062, 14018, 13669, 14941, 13781, 15300, 15191, + 14772, 15104, 15035, 15338, 15008, 14614, 14543, 13328, 14821, 12924, 15344, 14532, 15142, 14510, 15191, 13111, + 13573, 13590, 14673, 12400, 13915, 13854, 13317, 13571, 14845, 12566, 14358, 13038, 14110, 9304, 14092, 12519, + 14289, 12262, 12438, 12122, 13098, 13612, 14822, 13069, 14822, 14050, 13855, 14359, 14160, 15265, 12923, 14913, + 11151, 15201, 14240, 13082, 13278, 14682, 13832, 14491, 12068, 15027, 14866, 14748, 14566, 15091, 14629, 14198, + 14924, 14707, 9700, 13169, 13548, 14962, 12925, 15304, 14434, 15260, 14482, 15076, 14117, 14474, 15082, 13718, + 14778, 12745, 14763, 13381, 14455, 13599, 11588, 15331, 13370, 14806, 15309, 14807, 13485, 14301, 14812, 10586, + 14353, 14695, 14348, 13115, 14888, 14716, 13406, 14726, 15319, 13222, 15327, 13839, 14597, 13451, 13720, 15247, + 12358, 14037, 14426, 13303, 12098, 15358, 13452, 13799, 14465, 14066, 10650, 14804, 13260, 14070, 15076, 12325, + 14328, 14995, 13620, 15259, 15340, 15310, 9312, 14523, 15096, 15053, 13809, 12512, 15065, 11627, 15117, 12522, + 14431, 10027, 14231, 14367, 14089, 15030, 14483, 11701, 13835, 15218, 15172, 14999, 14832, 13646, 14908, 14411, + 15125, 10379, 15037, 14885, 11270, 9134, 9824, 15344, 13043, 14516, 13768, 14476, 8559, 15286, 15087, 15213, + 14672, 15075, 12666, 10676, 15355, 13774, 13435, 14530, 13627, 14749, 13401, 13518, 11381, 13793, 14935, 8904, + 10243, 9176, 14842, 15252, 13053, 14285, 9529, 15303, 14984, 14874, 13944, 10237, 15196, 14042, 13823, 13350, + 14442, 14403, 15310, 12532, 15207, 14266, 15340, 14670, 10898, 13345, 13400, 12358, 13810, 14288, 14859, 14505, + 14571, 14604, 13220, 14948, 14140, 12596, 15356, 13020, 14499, 15048, 15121, 12054, 11454, 14700, 12651, 14703, + 12661, 15355, 14247, 12573, 12596, 13240, 13785, 14718, 11300, 13444, 15276, 13879, 12534, 14985, 12765, 14960, + 14736, 14685, 14585, 12841, 13717, 10772, 13650, 15285, 14884, 11300, 12976, 14562, 14675, 14216, 13567, 14405, + 14758, 14613, 13600, 15285, 14049, 14867, 14524, 10582, 12988, 15131, 14325, 12322, 12949, 14102, 13283, 15242, + 14490, 14568, 13538, 13721, 10286, 13301, 15020, 15204, 11968, 14370, 13572, 15120, 14349, 13684, 13499, 14742, + 13472, 12093, 11672, 14463, 14759, 14017, 14400, 8247, 14262, 15150, 15063, 14861, 14302, 14062, 11706, 15051, + 14754, 11996, 13588, 15129, 13396, 13883, 12678, 14909, 14649, 11943, 4030, 14487, 15261, 15106, 13108, 7562, + 12984, 14053, 14527, 14525, 14525, 13251, 13709, 14303, 15300, 9994, 14307, 13367, 13719, 12782, 14133, 12684, + 13562, 13009, 14390, 15321, 13956, 11325, 12096, 13876, 13131, 13400, 14614, 11919, 14967, 13407, 14822, 13319, + 14965, 14802, 15126, 12750, 14985, 13899, 9493, 14132, 15178, 14442, 13746, 13790, 14203, 14940, 15315, 15231, + 14695, 12423, 11705, 14130, 13549, 15176, 15273, 14992, 15250, 14754, 13257, 14198, 13825, 14583, 14401, 13376, + 13572, 11194, 12335, 14808, 12890, 11024, 13588, 14441, 14366, 14791, 15061, 13779, 14406, 15304, 6623, 10980, + 15162, 14651, 13872, 12796, 12316, 11521, 15320, 15085, 15109, 10805, 14192, 12222, 14917, 15281, 14840, 15210, + 14257, 14075, 13597, 14111, 13291, 12432, 14899, 15355, 13894, 14036, 14446, 11826, 13375, 14851, 14628, 11404, + 14099, 14639, 15172, 14906, 15232, 14555, 14678, 13799, 15022, 12466, 14182, 14520, 14271, 13455, 13888, 15301, + 14018, 9417, 14702, 12842, 12574, 15038, 14669, 14373, 14732, 15134, 13142, 13324, 15228, 15105, 13906, 12674, + 14373, 14623, 14791, 13938, 14090, 13392, 13249, 10272, 14970, 13180, 14510, 14787, 14814, 13173, 14502, 14411, + 11095, 15031, 12280, 14780, 14463, 14478, 15348, 14705, 11022, 13791, 14064, 14393, 13489, 14918, 14306, 14791, + 15263, 13385, 13622, 13232, 14295, 14601, 14401, 14460, 13934, 14542, 14439, 14465, 14652, 14395, 12461, 14182, + 14547, 13272, 10857, 14708, 13553, 13925, 12092, 15034, 14810, 13595, 9410, 15156, 15187, 14824, 14475, 14377, + 11633, 4104, 14705, 12186, 13551, 13476, 12694, 14281, 14988, 14510, 14692, 14996, 11737, 13507, 13811, 15169, + 14703, 13419, 15261, 14235, 14149, 14346, 15308, 14542, 13031, 14460, 13974, 13908, 10962, 13479, 12066, 15227, + 14939, 13588, 12441, 13222, 14503, 11896, 14776, 12468, 13818, 13589, 14281, 13884, 14997, 14914, 11826, 9633, + 14548, 13694, 13859, 14823, 15313, 14078, 14743, 14287, 14431, 15259, 14629, 15330, 15040, 14384, 13812, 15132, + 14925, 14083, 14239, 15192, 14005, 12194, 13398, 14804, 9298, 15265, 14893, 14374, 15281, 14953, 15332, 14864, + 14983, 12488, 11559, 14497, 13654, 14429, 14236, 14290, 11357, 14511, 12391, 13520, 15093, 11490, 10736, 14727, + 14367, 15283, 15317, 14471, 14497, 14187, 13972, 13512, 13723, 14475, 14715, 10954, 14969, 15342, 12669, 12220, + 15141, 9343, 10273, 15274, 14392, 13049, 14832, 14971, 12643, 14439, 11897, 11601, 14608, 13105, 10804, 15285, + 12609, 14769, 13960, 15084, 14410, 15224, 13786, 14622, 10726, 12343, 13936, 14286, 14324, 14886, 13936, 14152, + 13796, 14807, 13313, 12176, 15277, 14757, 10752, 13464, 14246, 15306, 15167, 14267, 14842, 14659, 15346, 12125, + 14159, 14386, 15177, 14361, 15000, 14915, 14480, 15066, 14755, 14622, 13384, 14474, 15198, 15239, 14001, 7620, + 14824, 14982, 13347, 12748, 13346, 13113, 12577, 12383, 12737, 15073, 13861, 13131, 14819, 14716, 9327, 14664, + 14052, 14989, 13934, 14496, 15086, 14280, 11713, 13843, 13997, 15163, 15158, 13922, 13903, 15044, 14143, 13136, + 14932, 14701, 15014, 13875, 14694, 13411, 14193, 14593, 13130, 10384, 14502, 15149, 9291, 15258, 12429, 13335, + 13442, 15041, 14712, 14649, 14542, 15129, 14014, 12571, 15201, 13789, 15228, 14056, 12447, 13825, 12019, 15259, + 12655, 10410, 10233, 14710, 14619, 13427, 14742, 14166, 15056, 13480, 14149, 14863, 15134, 11270, 14771, 15076, + 13377, 12658, 12389, 15038, 14394, 14534, 14978, 15169, 15025, 14831, 12229, 14731, 15162, 15238, 14832, 15035, + 15200, 14336, 14142, 15098, 14353, 13200, 12747, 14467, 13891, 14454, 14097, 14343, 12385, 10757, 14044, 15104, + 14162, 15268, 13917, 14949, 12769, 12463, 15035, 14787, 13704, 14011, 12924, 14752, 11144, 12395, 13561, 14592, + 13750, 14101, 14632, 12549, 14222, 13732, 15000, 12743, 12231, 15316, 15258, 14493, 10146, 13715, 13340, 10235, + 11117, 14368, 14735, 12635, 15035, 15252, 14440, 11993, 15147, 14585, 14218, 9986, 15021, 13985, 14851, 14732, + 14627, 13816, 14529, 11322, 13246, 14000, 15081, 12357, 14933, 14096, 14434, 15011, 12677, 15355, 12497, 14063, + 13527, 14628, 14772, 14759, 15343, 12492, 11842, 14900, 15030, 13870, 13786, 14629, 14420, 14354, 14314, 12124, + 13971, 14911, 15299, 14575, 14805, 13950, 9484, 11443, 15032, 15221, 15203, 14261, 13164, 14839, 14570, 14392, + 13112, 14477, 12918, 15079, 14300, 15055, 13797, 11585, 13793, 13904, 15093, 14861, 13054, 13207, 14870, 12947, + 13727, 9166, 12715, 12614, 14582, 14189, 6773, 14726, 14261, 15250, 10369, 15058, 15213, 15352, 12505, 13487, + 14567, 15051, 15341, 14168, 14627, 14860, 15175, 13274, 12649, 15119, 14616, 14993, 14654, 14618, 13353, 14074, + 15000, 14698, 10450, 12933, 15057, 12957, 13414, 12811, 14927, 12471, 13793, 13557, 14848, 11122, 14089, 14553, + 15022, 14564, 15135, 12648, 12720, 14561, 14954, 12107, 14068, 14150, 15019, 10674, 13952, 14461, 15268, 14513, + 12779, 14716, 12373, 12208, 15197, 8393, 13207, 12041, 14838, 13248, 14835, 15034, 12538, 14528, 12621, 14438, + 11609, 15237, 15047, 14641, 12356, 12483, 15284, 15131, 14767, 14652, 14600, 13293, 14818, 13068, 14415, 15338, + 13435, 14953, 13962, 14495, 12964, 11843, 14879, 13928, 14441, 14196, 13552, 14816, 14794, 13295, 13499, 15213, + 14311, 13625, 14892, 14827, 15173, 15261, 13720, 12985, 14512, 14906, 15329, 14640, 11752, 13534, 14289, 14566, + 12370, 14647, 13440, 14480, 14420, 14841, 11313, 13264, 12765, 14499, 14063, 12019, 14681, 15262, 14952, 14263, + 14501, 14823, 14151, 14686, 7546, 12877, 14687, 13327, 14504, 15097, 15004, 13898, 14554, 12480, 12311, 14473, + 15024, 14711, 14415, 15094, 13189, 13481, 11292, 13980, 13498, 14212, 14743, 12875, 13902, 13650, 12586, 14574, + 13881, 15272, 13333, 15025, 15106, 15130, 14537, 14392, 14403, 13593, 13872, 15018, 14854, 14636, 13988, 14331, + 14757, 11868, 14781, 14061, 15263, 11171, 13860, 15028, 14399, 14956, 14676, 15214, 14157, 12920, 14577, 13517, + 14422, 14676, 14508, 14751, 15067, 15165, 14020, 14630, 15197, 12366, 8838, 12100, 11390, 14805, 13479, 13588, + 14408, 14436, 14417, 14508, 14889, 14493, 14877, 14586, 14294, 14907, 15328, 14143, 11402, 13226, 13038, 15209, + 15081, 12441, 13993, 15225, 15315, 12069, 13800, 14351, 15265, 14287, 14682, 14915, 14786, 14838, 13580, 15230, + 14710, 14875, 14822, 14905, 13643, 10921, 13200, 14915, 15130, 8974, 15149, 13874, 14168, 13106, 12607, 14375, + 13652, 13746, 14514, 14900, 15166, 14219, 14585, 14767, 14019, 13507, 14308, 13462, 13583, 14004, 15163, 12719, + 13626, 14784, 13318, 13866, 12390, 13372, 12931, 11360, 14988, 13642, 9696, 14452, 12347, 14866, 13741, 12654, + 15023, 14858, 12759, 11781, 13770, 14856, 14468, 14110, 14621, 14381, 14592, 12875, 14471, 11751, 15101, 15344, + 14150, 13596, 13203, 14287, 15219, 14398, 14921, 8120, 15158, 14354, 14393, 13855, 14102, 14664, 14814, 15179, + 14490, 13925, 13790, 14606, 13043, 14696, 14865, 14035, 13514, 14013, 12270, 12058, 14386, 15272, 13953, 11908, + 14817, 12856, 14991, 13685, 6419, 15148, 13670, 15054, 14381, 15110, 14462, 14982, 15222, 15301, 15281, 12385, + 11148, 14596, 13151, 12284, 14201, 14870, 14402, 14746, 14823, 12787, 15289, 15345, 14670, 10777, 15278, 12343, + 14654, 13702, 14254, 13791, 14563, 14189, 13761, 14056, 14498, 15352, 14845, 9951, 13390, 11955, 15242, 14617, + 15010, 14234, 13166, 13706, 14469, 14591, 14423, 9361, 15171, 9565, 15098, 13631, 13705, 15358, 11357, 12408, + 15101, 13908, 15292, 13498, 15107, 14540, 15261, 14002, 15261, 14117, 14318, 15236, 13602, 14511, 14456, 14759, + 7403, 14380, 13409, 14753, 9391, 14905, 15199, 13488, 15109, 12651, 14509, 14810, 14769, 15197, 12165, 9348, + 15074, 13443, 14706, 15118, 15060, 15140, 14671, 13778, 11275, 13944, 13389, 15308, 11364, 15221, 15087, 13823, + 14032, 14957, 10844, 6159, 14342, 9397, 14700, 14471, 14885, 13851, 13983, 15251, 12545, 14071, 14730, 11337, + 14496, 15010, 14521, 14616, 15277, 12589, 15192, 14519, 15294, 14694, 14562, 13379, 14613, 13895, 14664, 13457, + 11692, 15273, 13211, 13862, 13827, 14523, 13934, 10315, 14610, 13404, 14528, 12886, 13306, 11912, 15195, 15312, + 13805, 13690, 14774, 13471, 14357, 11592, 13563, 13571, 14282, 10808, 12747, 13871, 12554, 15127, 15335, 14993, + 14463, 14675, 14470, 11684, 14011, 13719, 14961, 7694, 13471, 14515, 13193, 13901, 15110, 14497, 15283, 15141, + 13671, 15044, 13906, 10249, 15012, 15179, 15079, 14002, 13069, 14992, 14570, 14439, 14466, 11908, 14840, 15272, + 11234, 15093, 10264, 14626, 15340, 13802, 15283, 13944, 14182, 15009, 14949, 14446, 15001, 13344, 14282, 14473, + 14717, 15224, 13267, 14523, 12060, 13864, 14313, 14365, 15322, 14075, 14448, 12097, 14638, 13533, 13670, 10867, + 14329, 14642, 15054, 15074, 14917, 14458, 14954, 13615, 14178, 14621, 14979, 13987, 15279, 14043, 12249, 10269, + 15288, 15036, 13559, 14472, 14777, 14259, 14983, 15262, 6291, 10846, 13980, 11363, 15134, 14900, 8320, 13942, + 14382, 14474, 12892, 15008, 15258, 15206, 14556, 11201, 14290, 13839, 14502, 15014, 7520, 14973, 14351, 11255, + 13330, 14409, 14888, 14471, 11414, 15264, 11895, 10699, 11460, 14757, 13529, 15270, 13577, 14148, 14082, 14101, + 13162, 15083, 15080, 8740, 15286, 14418, 9612, 14745, 14529, 14824, 14292, 15194, 12466, 14538, 13914, 15021, + 15181, 15092, 14190, 13506, 13426, 14964, 15327, 14548, 14682, 13540, 14685, 15068, 15347, 9109, 12909, 14674, + 15152, 13331, 14758, 14097, 11286, 14957, 11628, 15120, 13882, 13163, 14862, 15221, 8298, 15030, 12898, 12228, + 15029, 14921, 15190, 15015, 15164, 15097, 14334, 13121, 13927, 14752, 11366, 14910, 13787, 14653, 15194, 14918, + 15243, 13221, 14095, 14629, 15155, 14003, 12435, 14455, 14672, 14552, 15317, 14750, 12551, 13149, 11487, 12799, + 13845, 12996, 14465, 10201, 12143, 14265, 12985, 15262, 12493, 14355, 11521, 12863, 15216, 12811, 14299, 10000, + 13395, 14984, 9842, 13695, 14893, 14437, 14409, 13835, 15196, 12313, 12465, 13432, 15321, 12408, 13847, 14713, + 15183, 14737, 14790, 15267, 14105, 12328, 14938, 14479, 12330, 9031, 14917, 14541, 14628, 14614, 13197, 14427, + 13647, 14456, 14854, 14349, 14852, 12773, 14993, 14609, 9750, 14022, 13255, 14575, 14981, 14604, 12963, 15315, + 14947, 14590, 14719, 12444, 14873, 13738, 13127, 15119, 14834, 15188, 14448, 12926, 13866, 13494, 15221, 13439, + 13561, 15182, 15324, 11620, 14471, 9576, 13952, 14961, 13974, 13150, 14167, 14582, 14297, 14247, 14194, 15336, + 15002, 13130, 13842, 10489, 13593, 13909, 10747, 14901, 15264, 15089, 13281, 14351, 13342, 15177, 14102, 13484, + 14719, 13506, 14463, 12005, 15029, 14144, 15344, 10527, 12674, 14486, 14377, 14990, 8073, 14906, 10168, 14926, + 13368, 12864, 14972, 9529, 14558, 14193, 14057, 14712, 14376, 12870, 11341, 15241, 14252, 14538, 10894, 13535, + 13676, 12831, 15155, 14337, 14937, 14424, 14976, 15312, 11957, 14009, 14031, 13824, 14888, 10645, 14844, 13467, + 13006, 15018, 14091, 14559, 8495, 14378, 15172, 12460, 14943, 11811, 14832, 13641, 14131, 13440, 15102, 15350, + 13937, 13766, 11646, 15084, 14500, 10648, 14074, 14386, 15189, 14945, 12650, 12734, 15181, 14645, 11826, 13315, + 13020, 15253, 12949, 15009, 14654, 13542, 13556, 14360, 14599, 14296, 15098, 14442, 13766, 9901, 13568, 10913, + 14489, 14822, 14801, 13259, 12417, 10012, 13879, 15078, 14358, 14961, 14653, 12299, 11290, 13363, 12634, 13331, + 15082, 15065, 14172, 11387, 14405, 15019, 13212, 14694, 14325, 14729, 13883, 13466, 13884, 14664, 14956, 14340, + 15352, 15188, 15062, 14205, 12747, 12428, 14603, 7582, 13427, 15266, 14492, 11186, 15091, 14531, 14514, 14465, + 14905, 10574, 12575, 14836, 14765, 12462, 13918, 14819, 14459, 13377, 14491, 13275, 15208, 14576, 11289, 14528, + 12646, 15267, 11844, 15278, 14874, 14432, 15103, 15332, 15048, 14188, 12648, 15128, 14800, 14629, 15318, 14745, + 13777, 11902, 14668, 15245, 9422, 14460, 15276, 14079, 13454, 14850, 12793, 14628, 13265, 15342, 13430, 15171, + 14622, 14874, 14638, 14867, 13376, 13574, 12745, 12242, 14893, 15079, 15046, 11750, 15160, 14368, 15337, 14052, + 15047, 14390, 15202, 14728, 12149, 14005, 14515, 10831, 13751, 13761, 14105, 12852, 15278, 13340, 10944, 10805, + 13871, 12374, 14373, 14123, 14846, 14993, 14154, 12343, 14626, 12890, 14707, 13914, 15293, 11653, 8252, 13646, + 15080, 14727, 12242, 12578, 13812, 15291, 14675, 13407, 15200, 13326, 14834, 9253, 14399, 15310, 14517, 15090, + 14783, 12948, 13542, 12343, 11449, 14311, 11431, 14683, 11338, 11436, 15131, 13757, 14493, 13826, 14085, 15266, + 13909, 14276, 14799, 14756, 14599, 14109, 14722, 12436, 14762, 12394, 14812, 14059, 13749, 13720, 15122, 13557, + 15033, 15150, 12350, 15134, 15114, 15032, 10841, 15155, 14684, 1474, 14957, 14954, 14912, 15298, 14299, 13980, + 14101, 13537, 14731, 10035, 15110, 14366, 12161, 15142, 14837, 15010, 11090, 10395, 11655, 13094, 15308, 15048, + 8217, 14715, 14117, 14435, 14808, 14536, 14216, 7461, 14713, 12708, 8808, 14686, 15096, 13180, 14902, 11743, + 15026, 14217, 14608, 15254, 14472, 12166, 14874, 14069, 15315, 14006, 14041, 15074, 14866, 14133, 14027, 14391, + 15353, 13698, 14941, 14840, 15052, 14603, 12778, 15197, 13379, 12974, 15080, 14813, 15215, 14668, 13482, 14926, + 14853, 14538, 12329, 14368, 13547, 13925, 15355, 11667, 13556, 15151, 10711, 10018, 15043, 14895, 14554, 15180, + 14672, 14945, 15177, 15328, 14239, 15189, 14749, 15223, 10994, 14174, 14802, 13253, 14821, 14206, 8284, 14945, + 14446, 12498, 13891, 9328, 15156, 13375, 13549, 10812, 14031, 14863, 15175, 14655, 14988, 9761, 13007, 15195, + 14527, 10990, 14765, 14620, 13882, 11626, 15223, 11768, 14350, 13395, 14567, 14011, 13971, 14426, 13765, 9068, + 10283, 14819, 14529, 13628, 13277, 14385, 14338, 14635, 14543, 13542, 14326, 14956, 13770, 14885, 15186, 14787, + 14939, 11674, 15034, 13767, 12537, 13959, 14806, 14406, 14366, 12656, 12916, 11349, 14596, 14437, 14966, 14883, + 11658, 14645, 11570, 14416, 7253, 14502, 14770, 14794, 15156, 15002, 14861, 14724, 14865, 13814, 10004, 15255, + 15259, 10166, 14153, 14924, 13511, 12038, 14776, 13108, 9317, 15255, 14533, 14930, 14611, 15164, 14634, 15214, + 13947, 12757, 12084, 13689, 13068, 15352, 14387, 14515, 13646, 14390, 14004, 13050, 13660, 14367, 14679, 15095, + 13224, 13487, 14579, 15192, 13676, 15329, 14827, 14587, 13374, 13962, 13013, 12696, 11077, 14687, 13998, 14934, + 13376, 14742, 14743, 15155, 11355, 13574, 14903, 13439, 14373, 10556, 14997, 11996, 12123, 15251, 12777, 14606, + 14335, 15067, 15103, 15163, 11585, 10885, 14681, 14727, 13479, 15152, 13672, 14893, 13898, 14834, 15182, 12538, + 14435, 8713, 12317, 14366, 14351, 14730, 12811, 12941, 14403, 10131, 12148, 13338, 14471, 13611, 13819, 14732, + 12821, 14615, 15091, 15085, 13400, 10196, 14402, 14655, 14325, 14868, 14683, 13421, 12839, 15132, 11607, 12828, + 13441, 14109, 10639, 15116, 13867, 14960, 6796, 14730, 7505, 14568, 12880, 8610, 14713, 13061, 15074, 15031, + 14712, 13516, 13710, 13138, 13247, 14457, 13606, 14426, 12876, 13825, 15337, 13796, 14370, 14752, 14873, 14716, + 14518, 14360, 11459, 15339, 13824, 12660, 13321, 14819, 14167, 14301, 14337, 13465, 14185, 14197, 15241, 11575, + 11489, 14855, 13544, 14881, 12384, 14978, 13904, 14881, 15206, 9243, 11911, 14064, 9838, 9713, 14914, 14386, + 10776, 14165, 14561, 9858, 11332, 15013, 13365, 14503, 15343, 14255, 14595, 14809, 13539, 14819, 14872, 13409, + 14929, 14855, 14354, 14194, 11594, 14198, 11637, 14579, 14112, 15179, 11363, 15336, 14675, 14484, 12912, 13255, + 14777, 12899, 13527, 15232, 15063, 13617, 13822, 11478, 11771, 14829, 14900, 14237, 14474, 13869, 15224, 14654, + 12567, 12362, 14651, 14221, 12353, 14513, 14426, 13904, 10652, 15148, 13583, 9897, 14310, 14449, 14951, 14901, + 14399, 14922, 13654, 14446, 14848, 14666, 13576, 12474, 12004, 15090, 15201, 13508, 14739, 14651, 13125, 13434, + 15313, 14632, 14067, 11431, 13439, 14959, 14928, 11569, 12318, 13715, 14111, 9931, 14688, 14445, 14128, 11773, + 15351, 14999, 15241, 15110, 11939, 9972, 14506, 10502, 13247, 14162, 12704, 15210, 12426, 14694, 15061, 14631, + 13762, 15274, 11372, 13921, 13967, 15348, 13532, 9763, 14500, 14282, 14985, 15319, 8583, 15238, 15265, 15147, + 13476, 15185, 15270, 14835, 14830, 14961, 14412, 14308, 15197, 14957, 13235, 13493, 13617, 14188, 14813, 14271, + 15165, 13978, 13371, 15147, 13168, 12971, 13981, 13331, 14024, 13855, 14085, 12501, 13345, 14739, 13013, 14471, + 13673, 13882, 13937, 12925, 13765, 14435, 7458, 14456, 13164, 14668, 14210, 14847, 15322, 14681, 14783, 14285, + 15212, 12969, 14347, 15299, 14053, 14285, 14491, 13204, 15033, 10448, 13444, 14937, 13304, 15238, 14825, 13280, + 14185, 12846, 13273, 13530, 13868, 13927, 12718, 15267, 14600, 13840, 11296, 13981, 13516, 13693, 12752, 15146, + 12619, 13622, 14694, 15107, 13718, 11766, 11934, 14689, 14722, 15188, 14880, 14454, 14654, 12199, 13849, 14643, + 14252, 14493, 13077, 12424, 14904, 15251, 14792, 13852, 13436, 13817, 14232, 14811, 15322, 13707, 13911, 14679, + 14899, 13506, 14995, 12414, 12454, 14480, 12679, 15010, 14991, 14824, 15106, 14612, 11519, 14017, 14851, 10248, + 14471, 14420, 14662, 13134, 14481, 14493, 14770, 12428, 12931, 14192, 14193, 14340, 13723, 15316, 13681, 15180, + 14850, 14559, 9648, 14762, 13034, 13436, 15161, 14443, 14714, 14574, 14492, 12691, 14963, 12259, 14210, 11975, + 14298, 14557, 9641, 13489, 13904, 15003, 10955, 14866, 13412, 14225, 14417, 14991, 15225, 13544, 14558, 15256, + 14501, 13131, 14720, 14966, 12590, 14279, 14497, 11343, 11777, 14587, 14780, 13791, 14707, 14505, 13555, 12438, + 15088, 12751, 11127, 15249, 13626, 13510, 13756, 12501, 13571, 13569, 12750, 12694, 14803, 14144, 15252, 14337, + 14538, 14575, 15087, 14916, 14388, 14934, 14715, 12776, 14715, 14525, 13373, 13562, 14365, 13452, 12402, 14504, + 11917, 14649, 15127, 14231, 14714, 15155, 12125, 14996, 8266, 14396, 12409, 14740, 14351, 13608, 14406, 14993, + 14097, 10563, 13603, 14730, 14898, 15118, 13589, 14188, 12423, 8215, 15323, 14907, 13977, 15106, 11600, 12757, + 13400, 14366, 15163, 14889, 13368, 14352, 14847, 13737, 14774, 15011, 13760, 14803, 14848, 14607, 13079, 14903, + 14661, 15344, 15260, 13332, 15342, 13253, 15150, 15326, 12987, 13637, 14240, 13595, 13917, 14115, 13969, 14703, + 14884, 15162, 12048, 11699, 14495, 14073, 12622, 14291, 15052, 14310, 10539, 12840, 13223, 14455, 14920, 10762, + 14795, 15342, 12586, 13510, 15297, 14698, 15058, 13501, 12976, 14585, 15108, 15025, 13326, 14889, 15334, 15280, + 15273, 14866, 14995, 13426, 14460, 12733, 15321, 14807, 11147, 13472, 14915, 15074, 14658, 14019, 12228, 14944, + 14352, 15133, 15089, 14667, 15024, 14398, 14315, 14381, 14370, 14214, 14352, 14079, 14650, 14090, 13361, 14561, + 11459, 14767, 8160, 15320, 12995, 12747, 12183, 10967, 14196, 14576, 14216, 14213, 14574, 13654, 14986, 14920, + 14776, 14281, 13698, 14601, 14752, 14806, 14771, 14998, 14496, 14679, 10476, 14572, 14550, 14228, 14383, 14883, + 13811, 11710, 15328, 14601, 14380, 15043, 13335, 13734, 15281, 13584, 15332, 11630, 14109, 15316, 13580, 14851, + 13293, 15069, 8772, 14099, 15287, 14845, 11274, 14810, 11366, 14507, 15149, 15266, 12289, 14029, 14702, 15046, + 13258, 15263, 13739, 12472, 14467, 14052, 15002, 14935, 13984, 10145, 15110, 14713, 15068, 15269, 13189, 14112, + 14006, 14877, 14572, 14346, 15198, 15050, 7803, 14340, 14477, 14295, 12785, 12826, 13275, 13390, 12037, 14349, + 14876, 13324, 14367, 14722, 14458, 14895, 11771, 15136, 14695, 13481, 15331, 14546, 14466, 14997, 14668, 15317, + 12160, 10725, 12592, 15186, 15030, 14630, 13652, 13716, 14768, 14391, 14388, 14468, 12834, 13164, 12262, 14768, + 14015, 14087, 14520, 14158, 14800, 15164, 14991, 13578, 14478, 15240, 15104, 14935, 14903, 14894, 15144, 14135, + 13004, 14054, 12904, 13275, 13149, 14510, 14372, 14966, 14481, 11881, 14307, 14807, 15296, 15284, 14348, 11829, + 14662, 14823, 14743, 14144, 15288, 13630, 15357, 15177, 14838, 14874, 14128, 13578, 15101, 15029, 14818, 14696, + 13789, 14014, 15055, 14801, 13411, 13339, 14360, 14624, 15323, 14567, 8560, 13323, 11985, 13116, 9374, 14801, + 13756, 12538, 14669, 14192, 14581, 14587, 14490, 13429, 13473, 12889, 13227, 15193, 14810, 12821, 13512, 11340, + 14983, 13896, 12305, 14501, 10696, 14703, 11626, 13983, 15032, 15356, 14084, 11917, 6511, 14749, 15191, 14940, + 12646, 13840, 13987, 14762, 15313, 13853, 12826, 13272, 14802, 15004, 13845, 12511, 12982, 12314, 13938, 12909, + 11723, 13721, 13105, 12523, 13943, 12212, 13440, 13822, 14537, 10486, 14383, 14674, 14277, 15270, 8470, 15118, + 14748, 14664, 13960, 14283, 12838, 14748, 13027, 11282, 15336, 8240, 13979, 14341, 13432, 13995, 14546, 14240, + 11689, 14396, 11455, 14416, 13907, 13128, 11770, 14775, 14446, 13998, 11363, 14453, 13501, 10955, 12611, 12911, + 14211, 14577, 12633, 13421, 12909, 14499, 14571, 14718, 10893, 14093, 15075, 14649, 13446, 12937, 13137, 13391, + 12725, 14042, 14452, 15212, 14335, 15130, 12466, 14128, 14738, 12944, 14943, 13255, 14279, 14726, 14752, 15240, + 14925, 14552, 13926, 14954, 12078, 14774, 15035, 13417, 14856, 14647, 12736, 15132, 14076, 15021, 12979, 14253, + 14190, 13449, 14692, 12629, 14709, 14560, 14663, 11814, 14583, 13835, 14571, 12170, 13340, 11701, 15003, 14952, + 15139, 14049, 13981, 14668, 14535, 13988, 13674, 14675, 14783, 9339, 15206, 14699, 15154, 15250, 11561, 14321, + 15045, 13476, 14541, 15071, 15089, 13828, 14804, 14624, 14663, 12612, 14568, 14765, 14398, 15082, 11660, 14411, + 13701, 14488, 14855, 15318, 13666, 13459, 14594, 15230, 14443, 14224, 14496, 14717, 14596, 15279, 14949, 13479, + 14392, 14970, 14831, 8860, 13464, 15300, 12799, 15068, 12293, 14405, 14834, 14584, 14118, 12958, 14565, 13815, + 13685, 14560, 13245, 14799, 15282, 13832, 14389, 12345, 5323, 14050, 12837, 14551, 11039, 14832, 14910, 14849, + 15115, 14358, 14990, 14520, 10105, 14766, 14048, 13496, 11704, 14081, 14388, 14917, 14190, 15171, 13245, 13641, + 10001, 13645, 13839, 14335, 12420, 14991, 13911, 15217, 12752, 15011, 10473, 11102, 13178, 14228, 14346, 15022, + 14078, 12116, 12080, 14933, 12332, 14912, 14411, 13456, 13451, 13428, 14716, 15131, 14352, 13228, 13362, 13847, + 13522, 13954, 13598, 15127, 14740, 13959, 14356, 14899, 14153, 15174, 14329, 13906, 12935, 14179, 8209, 14963, + 14724, 15012, 14721, 14429, 8163, 14871, 14755, 13319, 14464, 13924, 14728, 14728, 11852, 14988, 14874, 14068, + 14881, 13231, 15323, 15163, 14993, 14594, 14969, 14567, 13832, 14894, 14715, 13984, 13324, 13839, 14552, 14450, + 13901, 13296, 13348, 11707, 14503, 13608, 13117, 14543, 14811, 13793, 9212, 12427, 14676, 13995, 14602, 14860, + 8923, 15092, 13708, 14915, 15066, 13324, 15256, 13787, 15117, 14910, 14209, 13490, 13645, 15118, 14422, 13245, + 12404, 9590, 14690, 13029, 14530, 15232, 11978, 13447, 13641, 14603, 14863, 11576, 14296, 12555, 11516, 12819, + 11898, 13952, 15356, 14613, 13780, 14093, 14813, 10126, 12906, 14506, 12807, 14587, 15076, 14630, 14723, 14541, + 14693, 14944, 15156, 13382, 14863, 15288, 13936, 7296, 14518, 14214, 14111, 9640, 15211, 15196, 15114, 11283, + 13364, 10803, 13520, 11627, 14723, 14424, 13429, 14509, 14611, 14820, 12806, 12666, 14821, 12459, 12335, 13598, + 10495, 13334, 11354, 12409, 12987, 15054, 9582, 14840, 13037, 13604, 15036, 9488, 14029, 15154, 15127, 13738, + 13045, 13013, 15115, 10918, 12879, 13817, 14520, 15092, 13683, 12592, 15144, 14668, 14679, 14530, 14886, 11482, + 15149, 14376, 8480, 14266, 12516, 14802, 15111, 12047, 12600, 8677, 14672, 13402, 15177, 14501, 14379, 14869, + 15221, 14572, 14636, 14352, 12983, 11966, 13641, 14927, 15141, 14463, 15141, 14837, 13908, 13977, 13278, 13063, + 14162, 13368, 13722, 15148, 14653, 7561, 14382, 14664, 14699, 12041, 13346, 13952, 14435, 11873, 12596, 14862, + 11107, 14416, 13685, 13194, 11599, 15065, 14679, 15107, 10509, 14556, 13325, 14214, 13743, 14617, 14536, 14749, + 14971, 14173, 14674, 14896, 14943, 14726, 15354, 15065, 14628, 13720, 12221, 14398, 12198, 15348, 14804, 12341, + 14920, 14542, 14401, 12890, 14895, 13148, 14834, 15237, 13178, 4091, 15130, 13745, 14412, 14820, 10495, 15064, + 15166, 14881, 9933, 12174, 11354, 13026, 8418, 14936, 14622, 13829, 14452, 14889, 14744, 14382, 12329, 14743, + 14830, 14588, 14386, 11514, 15306, 15208, 13035, 15167, 13026, 15087, 13292, 12613, 15144, 11851, 14655, 13368, + 14167, 14096, 14259, 14269, 13776, 13653, 14702, 11812, 14562, 14919, 15028, 12432, 14955, 13503, 15082, 12926, + 14497, 14553, 12687, 13182, 13905, 15326, 12753, 14262, 13052, 14591, 14463, 14492, 15110, 14193, 14490, 15072, + 14555, 15119, 14533, 13968, 15103, 11082, 14252, 13840, 14583, 15191, 13801, 10285, 14358, 14042, 13367, 14428, + 15358, 15004, 14565, 8043, 14705, 14555, 13120, 13222, 14429, 6508, 14375, 13787, 11759, 11098, 14382, 14369, + 15283, 12944, 12356, 15123, 14350, 14891, 9056, 14239, 13661, 13587, 15228, 13417, 15278, 14010, 13658, 12451, + 14768, 14607, 14745, 13670, 14969, 15357, 15083, 14023, 14948, 15239, 14862, 14499, 8271, 14094, 14717, 12443, + 14533, 12189, 13320, 12521, 11682, 14252, 11383, 14362, 15075, 11884, 13655, 14805, 14734, 14701, 14803, 15114, + 12315, 15225, 13875, 15327, 14580, 14379, 14403, 14850, 13739, 15052, 13447, 11702, 15167, 10206, 14411, 14634, + 11331, 9001, 12852, 13339, 15247, 14093, 14024, 14988, 14954, 14293, 13781, 15345, 14237, 13968, 12465, 14174, + 10541, 13873, 14918, 14889, 12125, 15349, 14999, 13885, 15136, 11677, 15290, 13181, 14441, 14316, 15089, 13689, + 15351, 10205, 11249, 15197, 15174, 14178, 12328, 14843, 15353, 14607, 15023, 12497, 15087, 14876, 13455, 14495, + 14577, 11605, 13429, 12728, 14515, 15023, 12546, 12873, 15209, 11956, 13076, 13532, 13875, 15204, 14053, 6427, + 13185, 15144, 15226, 12345, 12801, 12385, 14096, 15065, 15085, 12706, 14622, 14301, 14740, 12523, 13474, 12760, + 13686, 9024, 15067, 14218, 15226, 14650, 15329, 14554, 12739, 11226, 13700, 13413, 12694, 13011, 13949, 15111, + 14819, 13670, 11964, 11510, 12752, 11490, 12476, 14795, 14258, 15083, 14592, 13983, 13820, 15143, 10663, 14440, + 15163, 14813, 15116, 15192, 14905, 13428, 14881, 11622, 14760, 13806, 14280, 14090, 14596, 14862, 14781, 15014, + 14656, 14610, 14722, 12111, 12691, 14445, 15172, 12216, 15307, 12993, 13252, 14893, 13431, 15005, 14888, 13203, + 13016, 14342, 8343, 15048, 13231, 13176, 14871, 15099, 15099, 12387, 14354, 13601, 13934, 14575, 14538, 14359, + 14607, 14953, 15088, 12440, 13121, 14552, 15335, 15053, 12662, 14836, 15069, 14154, 11753, 14151, 15244, 15152, + 14968, 14037, 14709, 15301, 15149, 12502, 14330, 15128, 12372, 14055, 13701, 11965, 15040, 15344, 13255, 13315, + 13508, 15076, 13065, 12329, 13670, 13189, 12575, 11620, 10429, 15155, 13039, 15062, 14265, 13170, 15331, 13642, + 11654, 14386, 14970, 13438, 13289, 13497, 14216, 14861, 10806, 15168, 14091, 15050, 14500, 13700, 13444, 15279, + 13276, 14652, 14687, 10674, 13563, 14323, 12332, 13729, 15034, 14051, 14172, 13872, 11838, 14287, 13671, 15120, + 13159, 14996, 11651, 14894, 14175, 13996, 13045, 15238, 14848, 10925, 14945, 14621, 15103, 13440, 13622, 12725, + 11975, 13577, 13690, 12144, 14718, 15256, 15022, 11791, 15041, 11332, 13969, 13561, 14590, 14574, 13311, 14442, + 14881, 14976, 15321, 14965, 14055, 14736, 14560, 14967, 14717, 15140, 15219, 13817, 14490, 14884, 13506, 10973, + 14524, 13398, 14798, 13806, 14794, 11556, 13530, 13813, 11445, 13969, 15120, 10913, 13989, 14488, 12726, 14452, + 14851, 14061, 14987, 14450, 15184, 14629, 15009, 13784, 14570, 14701, 13775, 13226, 12717, 13242, 9740, 10490, + 15227, 12898, 14606, 14771, 14565, 14674, 13710, 14713, 11120, 11377, 14549, 14278, 14225, 14825, 13151, 14886, + 14898, 14689, 12315, 13956, 15135, 13317, 14571, 15024, 13956, 12047, 15288, 15047, 14079, 9264, 12682, 14385, + 14574, 14772, 13601, 14097, 15147, 14826, 11547, 12399, 10569, 14289, 15111, 13716, 13462, 9466, 14392, 9643, + 14391, 15070, 14652, 8908, 14253, 13240, 10506, 14233, 13860, 12541, 14855, 14490, 14347, 15291, 15273, 13922, + 14540, 14571, 14786, 15316, 12883, 14763, 10915, 14579, 14420, 15313, 14486, 8208, 13773, 15296, 14511, 12419, + 15196, 14451, 14679, 15060, 12823, 14202, 14225, 15173, 11649, 15302, 14495, 10391, 14768, 13749, 14044, 13812, + 14396, 14623, 12471, 13159, 12272, 14741, 15125, 14575, 15253, 15084, 13948, 14451, 13453, 14795, 14985, 12466, + 11639, 12233, 13323, 14826, 11399, 15082, 13114, 11823, 14990, 14176, 12825, 13192, 12309, 14788, 13536, 14950, + 13414, 13685, 14777, 13562, 14269, 14354, 14724, 14538, 12801, 15182, 14618, 14675, 15075, 14749, 14719, 14196, + 13736, 6179, 15078, 12978, 14400, 12191, 13677, 13368, 14991, 14037, 14288, 14803, 15254, 14349, 15228, 12460, + 13980, 13527, 14753, 15285, 14204, 14602, 14486, 13794, 14743, 14860, 13695, 14828, 13162, 15048, 14903, 11438, + 14455, 9751, 12395, 12315, 14859, 11555, 9787, 13597, 14062, 13994, 15236, 14662, 13751, 14149, 14794, 15206, + 12913, 15113, 11218, 13558, 12832, 15219, 13970, 15260, 14503, 13626, 12579, 14737, 14438, 13997, 12685, 14974, + 14228, 15153, 14476, 15127, 9421, 14317, 15032, 11860, 7471, 14964, 13304, 13552, 14342, 7818, 13857, 12019, + 15197, 14630, 14862, 14836, 15186, 15095, 8259, 14689, 13510, 13236, 14375, 14551, 13829, 13002, 12231, 13221, + 15286, 14287, 14725, 13053, 15328, 13887, 12562, 15159, 15263, 14643, 13381, 14280, 14871, 13752, 14541, 10184, + 12669, 14714, 15359, 12825, 15030, 15281, 15298, 14001, 15073, 11243, 14791, 14311, 11587, 14101, 14452, 9953, + 15051, 15345, 15208, 14476, 14423, 12953, 13702, 13354, 13002, 13984, 14657, 13791, 14664, 15232, 13894, 14494, + 14294, 14370, 14679, 12839, 14769, 13745, 13895, 14937, 11538, 15267, 15132, 14665, 12493, 14768, 13418, 15188, + 12767, 13634, 14552, 14677, 14368, 13588, 11828, 14726, 14241, 11594, 13501, 12953, 13435, 8533, 14535, 14945, + 11046, 12954, 14183, 14586, 12336, 14737, 14102, 12294, 14186, 14549, 14243, 14284, 15127, 14318, 15258, 14737, + 11522, 11445, 15198, 14920, 13847, 14811, 13626, 13525, 13382, 15031, 14713, 10716, 14169, 12170, 8507, 14497, + 12336, 14620, 15332, 13974, 14560, 15229, 14813, 12320, 14391, 13902, 13739, 13787, 13078, 15051, 14601, 14357, + 15223, 14010, 14298, 14450, 14898, 14211, 15132, 13340, 13392, 14683, 15051, 10263, 15051, 14359, 11264, 10770, + 13710, 13980, 12181, 15285, 11858, 14345, 13284, 14416, 12626, 13731, 13972, 12693, 15140, 14125, 11429, 14612, + 14084, 14935, 14214, 14905, 13910, 15051, 12004, 13729, 14058, 14220, 14792, 14034, 14669, 15022, 13365, 14273, + 9829, 13554, 15026, 14374, 14669, 13939, 15271, 14264, 14844, 14003, 14548, 15218, 14168, 12475, 14837, 14067, + 13670, 11912, 14886, 14520, 12795, 15162, 12839, 14706, 14462, 13463, 13539, 14753, 14172, 11832, 14940, 13955, + 12451, 14866, 14307, 14357, 15115, 13398, 14718, 13697, 14268, 14539, 15066, 11408, 15231, 15196, 15061, 11543, + 14431, 13701, 14853, 15241, 12685, 14869, 14835, 15299, 13397, 11185, 14563, 15315, 13644, 12498, 14392, 15123, + 14361, 14835, 15092, 13708, 13221, 13707, 13504, 12829, 13721, 13517, 15179, 12659, 14681, 13530, 14916, 14932, + 13932, 13459, 14991, 15017, 13548, 13886, 14931, 11513, 14361, 14823, 14710, 13622, 13827, 14685, 13470, 14850, + 14871, 14850, 11886, 14192, 11050, 14502, 13689, 14495, 14270, 13545, 15292, 14426, 10987, 14849, 14354, 14344, + 12560, 15002, 10623, 13083, 10792, 15072, 13143, 14439, 14785, 15270, 15070, 14594, 14509, 14328, 11407, 14941, + 13827, 15353, 11324, 12424, 13895, 15093, 14736, 14908, 14103, 13168, 13332, 14804, 14635, 9794, 14875, 13788, + 14343, 14687, 11241, 11008, 13439, 14725, 12613, 13005, 13479, 15153, 12943, 14866, 13653, 13122, 13499, 15091, + 15124, 13200, 15322, 14810, 14058, 14834, 14492, 14213, 14345, 13982, 15314, 14126, 14604, 14619, 12985, 13665, + 12495, 14687, 15072, 12010, 14963, 13956, 12711, 15189, 14537, 14932, 13818, 14187, 14177, 14595, 14369, 14253, + 15108, 14510, 14225, 15125, 14779, 14793, 13836, 15163, 15043, 15189, 11375, 14044, 13953, 15265, 13212, 14336, + 14709, 13848, 14365, 14845, 13977, 15094, 14143, 14380, 15243, 14944, 14713, 15278, 14859, 15045, 14285, 14384, + 15282, 14373, 14006, 15078, 13878, 12628, 15323, 13597, 13319, 12588, 14383, 12809, 14871, 13792, 14357, 13634, + 14884, 15035, 11995, 13955, 14362, 14099, 14488, 12771, 15110, 14872, 11615, 12952, 15231, 13283, 14867, 14926, + 11585, 12095, 15332, 12987, 13757, 14398, 14350, 13862, 13854, 14598, 10540, 14836, 14825, 14669, 13754, 10710, + 14914, 13083, 12953, 12806, 14909, 13668, 13411, 11252, 6923, 14608, 14816, 15290, 15348, 15216, 10852, 15094, + 15024, 14783, 13321, 14119, 14359, 14474, 15151, 14948, 13520, 14536, 14429, 14364, 14310, 13467, 14672, 14921, + 12527, 14962, 14900, 11872, 8643, 15338, 14254, 14587, 13869, 14335, 14645, 15192, 12040, 11446, 15087, 14359, + 12672, 13581, 15242, 15068, 11217, 15020, 13396, 14660, 11955, 13587, 13334, 13368, 12462, 15327, 12118, 11844, + 14387, 14878, 15236, 14689, 14496, 15000, 14125, 13975, 14839, 9148, 14348, 8736, 10093, 13429, 14495, 7058, + 13470, 14502, 12895, 13958, 13506, 14551, 12229, 14013, 15230, 11329, 14240, 14076, 13610, 11983, 15047, 14337, + 14667, 14208, 15259, 14386, 14545, 13736, 14989, 14462, 12035, 12954, 14021, 14964, 15348, 14660, 13381, 13993, + 14202, 13528, 14596, 10305, 15145, 12981, 15342, 11670, 12488, 14434, 13952, 14094, 10302, 15289, 14716, 14870, + 14869, 14977, 12594, 14956, 14635, 15222, 14053, 12727, 15045, 11960, 13881, 12392, 15131, 15161, 15060, 14283, + 14735, 9496, 14459, 12342, 12538, 13773, 12940, 14852, 14399, 14552, 15058, 13133, 14270, 11906, 14486, 14242, + 14588, 14867, 14667, 15333, 13352, 10063, 14621, 14865, 11644, 15255, 13535, 12384, 14144, 13650, 13785, 15123, + 10497, 15008, 15033, 11903, 12024, 13414, 12309, 14892, 11717, 14346, 13576, 12546, 14599, 13322, 13625, 15202, + 14880, 14857, 14444, 12744, 12405, 14162, 14720, 15026, 13898, 14774, 12721, 13712, 9045, 13526, 15106, 15181, + 13840, 14648, 13316, 14344, 13401, 14665, 8432, 14513, 14429, 15118, 15140, 13829, 13255, 14720, 14067, 13982, + 15101, 15062, 14702, 12416, 14706, 13427, 12424, 14995, 13499, 13321, 14885, 14369, 15285, 14792, 14741, 15321, + 14558, 14898, 15079, 13711, 14721, 12255, 15228, 14898, 12537, 13412, 12809, 14755, 14394, 13402, 10463, 14969, + 12746, 14680, 15033, 13315, 14853, 15161, 13295, 7791, 15243, 14985, 15231, 14380, 15104, 13081, 15299, 15181, + 14766, 11635, 13724, 14564, 12349, 12506, 15072, 15035, 10298, 12880, 14891, 14536, 14554, 14970, 15012, 14917, + 15321, 14922, 13598, 12379, 14180, 14939, 12301, 13844, 14240, 13366, 14517, 12588, 13736, 15359, 14905, 14096, + 13494, 13470, 13503, 14344, 12840, 14915, 13589, 14769, 14752, 13459, 12138, 13656, 13841, 15351, 14960, 12231, + 14912, 13065, 15149, 15015, 14623, 15048, 12606, 13338, 12618, 13930, 15101, 14489, 11568, 13112, 13529, 15054, + 13702, 12695, 14764, 14340, 14151, 14585, 14845, 15079, 14263, 14968, 14855, 15073, 13851, 14384, 13833, 15001, + 13154, 13945, 14816, 11115, 13271, 15195, 14553, 14553, 14494, 13921, 8324, 13945, 14709, 11046, 14398, 14794, + 12979, 14827, 12844, 14197, 15034, 15184, 14104, 14631, 15074, 14630, 14212, 14690, 12306, 15165, 12716, 12321, + 14137, 12042, 11847, 12415, 15108, 13597, 12565, 13114, 13683, 15246, 9390, 11671, 15271, 14671, 15238, 15233, + 14362, 13022, 13547, 8559, 14550, 13601, 14526, 13539, 14928, 15103, 15207, 13842, 13334, 13873, 12179, 15230, + 13806, 14621, 13230, 12642, 14849, 15044, 9667, 12419, 14706, 15040, 15152, 15326, 12692, 15035, 14719, 14828, + 12955, 11375, 15203, 14212, 14796, 12880, 15176, 13693, 12824, 14488, 14402, 15115, 14082, 13999, 10339, 15177, + 14707, 14408, 14477, 14570, 14867, 13056, 15144, 12493, 11950, 13454, 12357, 12178, 11781, 13978, 14047, 13865, + 14974, 14030, 14473, 14854, 12777, 14509, 13637, 14409, 14458, 14643, 14767, 13434, 14631, 10224, 14595, 14733, + 13476, 14935, 13013, 12900, 12387, 13990, 15003, 12917, 14398, 15123, 13501, 11977, 12591, 9193, 12878, 13831, + 14954, 15000, 8449, 13451, 14483, 11876, 14861, 14497, 11945, 15301, 13666, 14579, 15133, 11447, 12854, 14524, + 13442, 12075, 14316, 10544, 9171, 13537, 13383, 12837, 14310, 15194, 15180, 12111, 13259, 13924, 13229, 12461, + 14780, 14432, 13217, 14265, 13815, 14507, 10955, 14375, 14198, 14841, 14372, 12949, 14745, 13749, 9732, 14709, + 13517, 14716, 13654, 14424, 11893, 12995, 14931, 11712, 14436, 14940, 12711, 13052, 14047, 14285, 14654, 15003, + 13991, 14485, 13629, 14554, 14503, 11560, 12858, 14204, 15275, 15243, 11650, 12482, 14917, 14927, 14639, 14420, + 12045, 14419, 14423, 11853, 13820, 11192, 14593, 14358, 14204, 14459, 14713, 15100, 10465, 11451, 15301, 10159, + 12245, 13027, 14390, 15023, 15027, 12295, 13427, 13825, 11441, 14247, 13657, 14896, 14714, 15113, 14767, 14796, + 13311, 15182, 11421, 14750, 15026, 15109, 15349, 15243, 12703, 11006, 14751, 14606, 11579, 14513, 13937, 12440, + 13107, 14402, 7793, 13477, 14706, 13836, 13428, 13805, 14618, 15206, 11878, 14917, 14579, 13682, 14023, 14268, + 15087, 13941, 14314, 15172, 14997, 14329, 11395, 13257, 11516, 14159, 4295, 13683, 13407, 12786, 12456, 13814, + 15342, 14731, 8661, 14602, 14096, 11800, 14312, 13669, 9468, 15240, 14353, 13610, 15058, 15356, 13576, 14846, + 13377, 15222, 11197, 14438, 15307, 15180, 14708, 10353, 12706, 12494, 10293, 13243, 14805, 14609, 14316, 14493, + 14965, 14288, 15313, 15240, 13598, 12832, 14944, 11803, 11346, 14579, 14869, 7344, 14991, 12711, 14500, 14509, + 12595, 13223, 14911, 13899, 14613, 12770, 13763, 14413, 11259, 15307, 14959, 13233, 14130, 14934, 15208, 10340, + 14180, 13499, 11881, 12612, 15042, 13411, 12433, 15176, 13670, 14041, 13568, 15030, 14543, 14359, 15060, 14476, + 10861, 15258, 14208, 14663, 15228, 14130, 14732, 15087, 13749, 13653, 14554, 12380, 14909, 13749, 14960, 9537, + 13351, 15083, 14386, 15231, 12552, 13396, 15027, 13479, 8028, 14653, 15182, 13519, 14362, 12812, 14955, 13653, + 14661, 15332, 12712, 14713, 14990, 14119, 14387, 9209, 14360, 14424, 12640, 13489, 14849, 14407, 15339, 15018, + 15048, 14824, 10910, 14901, 15181, 14553, 14655, 15295, 13861, 14798, 12365, 14560, 12667, 9847, 14499, 13856, + 11947, 10227, 15117, 14849, 13038, 11765, 14843, 15266, 14239, 14288, 13075, 14509, 14519, 15276, 15303, 11712, + 14621, 13346, 12532, 14347, 14654, 13059, 10712, 14591, 14578, 14907, 14751, 13764, 14347, 13631, 12938, 13487, + 12822, 13523, 12281, 14493, 13421, 9978, 14331, 14394, 14387, 15164, 10586, 14669, 10869, 13955, 14093, 12764, + 12986, 15098, 14908, 13599, 14578, 15284, 10761, 13899, 15253, 14471, 14862, 14851, 10455, 14558, 14728, 14077, + 14385, 14702, 14366, 14525, 14168, 12677, 12514, 15163, 13240, 13026, 15116, 14863, 9407, 15123, 11084, 13503, + 14813, 15313, 14815, 13322, 15118, 15108, 13160, 13835, 15216, 13196, 12710, 15025, 11941, 13251, 14998, 13969, + 13742, 15134, 12344, 15005, 15243, 14300, 14519, 14375, 11731, 14581, 11457, 14488, 14723, 14295, 14364, 12312, + 13927, 14421, 15121, 9761, 14396, 14324, 13730, 9022, 13909, 12449, 12079, 14495, 13882, 15004, 12254, 15038, + 14873, 13488, 12736, 15306, 13564, 15145, 12477, 12418, 14610, 14986, 14355, 14657, 13573, 14661, 13470, 15084, + 13120, 13627, 7490, 15241, 14113, 8965, 14375, 8997, 13841, 13641, 15247, 14338, 15240, 14535, 14834, 13692, + 14926, 12316, 14693, 15010, 13449, 14831, 14909, 14384, 9802, 12229, 14492, 13741, 14867, 15273, 13782, 14539, + 14374, 15213, 13447, 15137, 15246, 15157, 15187, 12397, 13829, 13865, 14413, 15358, 12294, 15255, 15315, 14667, + 13513, 12674, 13948, 13264, 10190, 14658, 13265, 13508, 12045, 14701, 14403, 13591, 15178, 14521, 12874, 12003, + 15252, 14204, 8557, 14711, 14935, 13494, 14805, 14064, 13489, 14402, 14928, 11804, 12877, 13266, 14604, 14921, + 13736, 14883, 13516, 15069, 15214, 14525, 7198, 14647, 14953, 10711, 14636, 14633, 13612, 14937, 15343, 14594, + 15131, 10826, 13405, 14633, 14267, 13305, 14543, 14845, 13913, 12809, 14546, 14459, 13715, 14828, 13964, 11972, + 14968, 14904, 9234, 14745, 14389, 13530, 14843, 14371, 13931, 13142, 15024, 14400, 14540, 14857, 15056, 13380, + 880, 14803, 14404, 12347, 14119, 13563, 13625, 13936, 14249, 11821, 12538, 14791, 14471, 11185, 15089, 14682, + 14465, 15295, 11597, 14666, 12771, 14543, 14880, 15250, 15108, 12458, 13155, 14295, 15085, 9647, 14842, 13936, + 14372, 14090, 13701, 13488, 12371, 14065, 13429, 10657, 10650, 10308, 11777, 14464, 13978, 13976, 15274, 11277, + 14983, 15184, 14821, 14229, 15049, 10646, 13599, 12538, 14486, 13334, 12119, 14566, 11756, 13019, 14535, 14793, + 13412, 14267, 14835, 14829, 15308, 13510, 15224, 13073, 14978, 13450, 12643, 15322, 14568, 12201, 14075, 15219, + 14703, 13968, 13428, 15351, 12158, 10795, 13375, 14051, 13808, 12884, 14437, 14450, 14623, 15318, 14525, 7434, + 15220, 15209, 12753, 14411, 13851, 14724, 15219, 13561, 13058, 15252, 15034, 14894, 14833, 14505, 14763, 13712, + 13773, 14113, 14571, 11600, 13994, 7807, 14664, 12304, 5086, 11065, 13331, 13632, 12296, 13322, 13535, 15292, + 15103, 14810, 14676, 14624, 14648, 12069, 15101, 14498, 15360, 12999, 13554, 14930, 14467, 13318, 11669, 12768, + 14098, 15064, 13973, 15000, 10460, 15166, 11296, 15212, 14916, 15327, 15017, 14970, 14921, 14876, 14464, 14988, + 12373, 14246, 10077, 14724, 12613, 13586, 15329, 13662, 15195, 14898, 14292, 13574, 14746, 14665, 12294, 15077, + 11727, 12948, 14158, 13521, 15207, 11768, 12214, 12079, 15038, 14413, 15140, 13392, 14398, 13268, 12162, 13658, + 15094, 13386, 14934, 15280, 13097, 9304, 13702, 14958, 15352, 14035, 14479, 15131, 15251, 14545, 11759, 14832, + 15208, 15199, 14199, 14739, 15083, 15204, 13917, 14581, 15325, 14820, 13659, 14396, 15029, 14485, 13781, 14954, + 15213, 13432, 15119, 14835, 12872, 14201, 14716, 14823, 12047, 15259, 12175, 12689, 13796, 14623, 14867, 15060, + 14561, 15200, 14474, 14127, 13058, 14535, 13954, 15075, 11474, 14081, 14792, 12876, 14750, 13241, 14717, 14990, + 14147, 14705, 14591, 15180, 15231, 14622, 14649, 14053, 15081, 14002, 14851, 12551, 13861, 15117, 14715, 12946, + 14673, 14889, 15295, 14471, 15006, 13502, 15038, 12336, 14045, 12186, 14418, 13400, 14826, 14580, 12041, 14240, + 13911, 14444, 15114, 13549, 14439, 14652, 15332, 12837, 15307, 12499, 15136, 13972, 12730, 13795, 15353, 14783, + 13173, 15101, 15226, 15348, 13085, 14656, 15063, 11424, 13977, 15235, 14047, 10574, 15228, 15286, 13970, 14596, + 10852, 14425, 14982, 13477, 15065, 14286, 15099, 14888, 13736, 15081, 14622, 12561, 14501, 13657, 15181, 14848, + 15113, 12306, 13859, 14493, 14302, 13551, 14372, 14310, 14670, 6216, 14511, 14657, 14244, 14501, 14347, 13457, + 14492, 13553, 13590, 13836, 12292, 12820, 10842, 13807, 14365, 13937, 15192, 14714, 14753, 15137, 12844, 13553, + 15043, 13565, 14541, 14696, 14823, 14511, 13933, 11811, 12759, 12680, 14578, 14913, 14670, 13393, 14023, 13258, + 13805, 14445, 12028, 14097, 14346, 13445, 15143, 11528, 14929, 14496, 14712, 11425, 14197, 13013, 10638, 11326, + 11308, 14920, 14430, 11635, 15167, 14171, 14834, 15348, 13207, 9504, 15116, 14240, 13842, 15187, 15132, 14485, + 13410, 12942, 14507, 8543, 14199, 9774, 15066, 13098, 14492, 14672, 14908, 13099, 15028, 13534, 14585, 14706, + 15223, 13008, 13789, 15010, 14956, 14233, 13983, 15315, 14863, 14956, 15350, 14588, 15085, 14714, 14674, 9458, + 14642, 14881, 15088, 9357, 14257, 14763, 14705, 13312, 13698, 14963, 15163, 15220, 14806, 14346, 13727, 13889, + 10689, 12664, 14930, 14912, 8596, 14872, 15278, 14387, 15016, 13815, 14056, 11309, 9644, 14490, 14498, 14921, + 13202, 14451, 13850, 15185, 12679, 14354, 14863, 15071, 11032, 15330, 11595, 13390, 13475, 12425, 15150, 14279, + 14438, 15112, 14803, 15166, 15143, 14755, 14690, 14617, 15184, 14051, 11776, 13236, 14377, 13193, 14751, 12698, + 14759, 14579, 12302, 15181, 13412, 14847, 14727, 14016, 14972, 13327, 14282, 14425, 8741, 14663, 14473, 14943, + 14366, 14768, 13971, 12816, 14872, 15099, 12137, 12723, 13270, 11530, 15107, 14929, 13361, 11835, 14916, 12765, + 14773, 14567, 13800, 15109, 14460, 13941, 13968, 15177, 14572, 11630, 15143, 12509, 14862, 14395, 13742, 14524, + 15343, 10864, 14640, 14133, 14215, 14675, 13073, 10577, 13707, 15263, 15245, 13861, 9641, 15250, 11749, 13292, + 13902, 14397, 12649, 12550, 15264, 15061, 14037, 11445, 14200, 13863, 15043, 12513, 14090, 3882, 14895, 14755, + 15174, 15086, 14712, 14732, 13395, 14756, 14404, 15198, 12836, 15049, 14691, 14725, 14830, 12493, 14277, 14800, + 14807, 13843, 14971, 14382, 14564, 15205, 12091, 14901, 12798, 15271, 10194, 14667, 13458, 14953, 14952, 9891, + 14669, 15322, 14703, 11964, 11529, 14111, 12240, 14652, 12379, 15242, 14784, 13969, 15126, 10827, 14524, 14902, + 12341, 15197, 13650, 13779, 13958, 14946, 14706, 13852, 14696, 14674, 15154, 14478, 14933, 15025, 14574, 14759, + 14554, 14982, 12400, 14643, 12949, 14618, 14783, 12651, 13408, 14907, 14103, 14927, 14126, 12402, 14047, 15195, + 13540, 14390, 12785, 14817, 15308, 14384, 14380, 12221, 14395, 14928, 14309, 13578, 13796, 12463, 14005, 9641, + 14715, 15268, 15056, 14386, 14885, 14750, 14060, 14623, 14508, 14815, 9801, 14027, 11433, 12739, 10557, 12800, + 14433, 14852, 14704, 14892, 14628, 15340, 12282, 13520, 13873, 12393, 13325, 12503, 15184, 14205, 15079, 14984, + 13450, 15289, 13237, 13389, 15322, 13569, 14384, 14176, 13015, 13255, 14883, 14335, 15254, 15045, 14675, 12299, + 13094, 14112, 12166, 13923, 15181, 13800, 14943, 14848, 14423, 14401, 13768, 12604, 13077, 12803, 14934, 13062, + 10860, 14525, 13206, 15269, 14278, 11915, 13000, 11391, 14797, 14669, 14265, 11662, 15198, 13037, 13222, 11781, + 12932, 13814, 14376, 14636, 15172, 14081, 14250, 14034, 15357, 15216, 12081, 15238, 14886, 9857, 15348, 13103, + 12921, 13073, 14376, 14381, 15294, 15185, 10688, 14408, 15206, 14995, 14826, 15340, 14375, 14481, 14528, 14493, + 15011, 11335, 13738, 14107, 11349, 13236, 14710, 15186, 11645, 14722, 14652, 14905, 14930, 15270, 15180, 14434, + 14804, 14864, 14440, 15106, 13356, 14862, 14403, 15240, 14716, 11372, 14538, 14497, 15112, 14425, 10958, 13608, + 15323, 14676, 13419, 11458, 14345, 14748, 13372, 14366, 14571, 14988, 13758, 12608, 13123, 14054, 14814, 13828, + 14988, 15011, 14660, 12063, 10776, 11509, 14915, 14361, 15288, 14078, 13152, 14408, 14737, 15193, 14573, 14627, + 13837, 14454, 13364, 15214, 12836, 11605, 14342, 15279, 15203, 14416, 15158, 14555, 15055, 13609, 13139, 12860, + 14528, 10848, 15006, 14516, 14017, 14017, 14823, 15210, 14890, 15308, 14755, 13615, 10400, 14463, 14805, 14779, + 15177, 14589, 10146, 15298, 10380, 12193, 14772, 14286, 14499, 14493, 15326, 13333, 13515, 13145, 13825, 13869, + 15137, 15044, 13804, 13789, 14022, 13982, 14018, 15061, 12734, 12864, 13218, 9616, 14322, 13482, 14336, 13847, + 12118, 14678, 14517, 14902, 13690, 4852, 14397, 15037, 14417, 14771, 13388, 15051, 7526, 14574, 14849, 14411, + 14831, 14544, 12455, 10289, 14874, 12193, 8626, 11678, 13467, 15220, 14408, 13040, 14489, 11552, 14941, 14900, + 14474, 15042, 14923, 15007, 15293, 14527, 14485, 14753, 13415, 13889, 14010, 14449, 12564, 13880, 12024, 14011, + 13694, 13060, 13611, 13096, 14643, 15189, 15076, 14798, 14158, 12974, 9319, 14167, 15283, 12904, 15208, 14573, + 14729, 10032, 15091, 12726, 14500, 14747, 12654, 10031, 11988, 15273, 14541, 10397, 13128, 15000, 14360, 14925, + 14935, 14504, 8914, 14953, 15245, 14795, 14973, 12691, 14258, 14525, 12782, 12727, 15256, 14669, 14115, 13292, + 13211, 13253, 15015, 14352, 13753, 11669, 15324, 11830, 15153, 10181, 15170, 14633, 13928, 13024, 14636, 14114, + 14821, 15092, 15297, 14271, 15120, 14289, 14115, 13553, 15138, 15292, 13742, 14841, 13191, 14882, 12468, 14541, + 13301, 14980, 13988, 13430, 13402, 12699, 14279, 15340, 14786, 10166, 15079, 12562, 14993, 13347, 13753, 15302, + 14612, 14498, 14309, 13942, 12199, 14406, 13824, 14680, 13383, 14580, 13645, 14953, 15253, 14920, 12935, 13901, + 14511, 13292, 13173, 15208, 12514, 13166, 14503, 12933, 13382, 13937, 12682, 13888, 14706, 14848, 14685, 14317, + 14546, 13380, 13156, 15112, 14667, 14761, 14911, 14891, 14991, 10993, 14208, 11875, 13556, 14818, 14637, 14022, + 11611, 15313, 14777, 13460, 14460, 15302, 15003, 14768, 8521, 13630, 13562, 13189, 13901, 14962, 13534, 15310, + 13347, 15026, 11846, 14965, 11903, 14796, 14974, 12919, 14293, 13873, 15207, 10570, 15015, 13597, 14383, 14460, + 13381, 13022, 14033, 14862, 14255, 15248, 14857, 13451, 14282, 12529, 14571, 15165, 12540, 13566, 15077, 9961, + 14853, 14497, 13257, 13316, 13619, 13734, 13659, 14422, 12902, 14249, 14391, 14447, 15121, 14542, 13715, 14946, + 13419, 13089, 13280, 10966, 13990, 12318, 15083, 13896, 12601, 15305, 11725, 15215, 14502, 13774, 14106, 14358, + 13812, 14591, 14181, 12608, 14582, 13570, 14970, 14168, 13629, 15278, 13856, 11617, 15146, 14418, 12471, 15345, + 14411, 15266, 15311, 13336, 15253, 14585, 15308, 14547, 15311, 14016, 13676, 14435, 15110, 14754, 15246, 14956, + 12092, 14864, 13184, 10051, 13287, 14835, 12969, 14504, 11977, 13333, 12168, 12569, 14004, 14463, 15011, 14823, + 12620, 12883, 8910, 14541, 15247, 14698, 14874, 13449, 9619, 15313, 13831, 15038, 13667, 14681, 14080, 14890, + 14634, 12800, 13154, 15027, 13520, 14726, 11355, 10908, 13967, 12917, 14797, 14324, 13784, 12510, 13849, 15119, + 14195, 14448, 14457, 14382, 15196, 12293, 14445, 12725, 13161, 14850, 14241, 14225, 13662, 14483, 14720, 13624, + 14428, 14666, 14336, 13522, 12713, 14915, 13608, 14097, 14259, 15049, 13985, 12506, 12544, 13500, 11304, 14673, + 12686, 12706, 14019, 14476, 15079, 13312, 10565, 13271, 13878, 14974, 12583, 13538, 15002, 10901, 15070, 11478, + 14174, 14705, 13475, 13528, 14304, 10263, 14114, 15353, 14293, 15047, 14844, 15238, 12988, 15026, 11270, 14387, + 13091, 14010, 11729, 14433, 14587, 14670, 14564, 11693, 14150, 13812, 13356, 14732, 14903, 12797, 14549, 14965, + 12485, 14837, 5672, 13255, 14706, 14391, 13230, 13861, 14182, 15031, 13486, 12070, 14616, 15083, 14540, 13510, + 14848, 14132, 14509, 15313, 13109, 14941, 14809, 14254, 14783, 14574, 15335, 14124, 14361, 13153, 15333, 14943, + 14852, 13006, 14618, 13407, 13550, 14695, 13479, 12990, 14717, 13885, 15162, 11892, 14564, 13627, 12813, 15219, + 12673, 14710, 13452, 15328, 12750, 14774, 15275, 14612, 14473, 14814, 13776, 15323, 13624, 14591, 11370, 15179, + 14128, 11663, 15175, 15074, 13387, 11748, 10347, 14740, 14474, 11984, 13989, 14134, 14819, 8596, 14885, 13793, + 13188, 14950, 11691, 14337, 9993, 14534, 12173, 10001, 13769, 14449, 14557, 10255, 12353, 14386, 13791, 14133, + 13554, 14121, 13689, 14393, 14219, 14391, 14285, 12708, 13974, 14540, 13963, 11042, 12815, 12325, 14744, 11303, + 12979, 14960, 14465, 15327, 14910, 10312, 14917, 15076, 11177, 13309, 14405, 14505, 12587, 13546, 14143, 15316, + 14730, 14871, 15281, 14608, 13992, 13498, 14000, 14356, 14458, 15283, 14909, 14646, 14571, 12908, 14949, 10330, + 15136, 11268, 13786, 14741, 15309, 8246, 12826, 15301, 15310, 12448, 13889, 14234, 12065, 14523, 13145, 13673, + 15256, 14944, 13677, 6822, 14959, 15082, 14336, 14493, 14451, 12639, 14787, 12494, 14901, 14521, 13825, 11704, + 14213, 13468, 8270, 14639, 14922, 15171, 14568, 15200, 13597, 12208, 14591, 12391, 12797, 15199, 15298, 8659, + 14714, 8839, 11921, 11797, 14506, 13753, 12782, 15324, 12624, 12718, 14455, 12916, 9614, 15305, 13374, 14832, + 15307, 14982, 14430, 14505, 11792, 13557, 15199, 12732, 13432, 14064, 14205, 13808, 14860, 14101, 13933, 7197, + 12805, 12505, 14649, 14322, 14316, 15054, 14374, 15046, 11307, 13665, 14401, 13014, 13494, 15247, 14445, 11984, + 14078, 15160, 14260, 14458, 13738, 14724, 11833, 12443, 12350, 14095, 5343, 12100, 15081, 14526, 12888, 10379, + 13401, 14092, 11822, 14844, 14836, 14959, 14181, 13790, 11805, 15124, 15337, 13132, 9982, 14342, 13752, 12307, + 10652, 11203, 14161, 14786, 14118, 12857, 15335, 15023, 14659, 11850, 10724, 13263, 14500, 12197, 14489, 14234, + 13788, 15065, 13942, 13466, 14323, 14011, 13719, 13863, 15017, 12232, 14749, 13858, 15276, 8359, 12184, 14638, + 13660, 14963, 13558, 11283, 13359, 14942, 14448, 13561, 14483, 13871, 13696, 9100, 15004, 11600, 12642, 15261, + 13859, 14581, 14861, 14793, 14115, 11901, 13835, 13676, 10483, 14750, 14994, 14539, 13280, 13375, 14899, 14419, + 13676, 14980, 12456, 13981, 15089, 11301, 14748, 11942, 13684, 6185, 15150, 14427, 13471, 15303, 10470, 14012, + 15258, 12987, 6917, 15298, 15194, 13333, 14596, 15271, 15131, 13806, 15115, 15062, 14827, 11181, 14302, 11782, + 14096, 12776, 13192, 14793, 14437, 15220, 14243, 14740, 13746, 13668, 12316, 15170, 11398, 12412, 9040, 13676, + 14983, 14570, 14909, 11609, 10250, 14823, 14355, 12876, 13950, 12637, 13016, 14788, 14370, 14340, 11202, 14391, + 13266, 15031, 12178, 12253, 15159, 14494, 14725, 13321, 14328, 11125, 15156, 12571, 14003, 15216, 15310, 15187, + 14146, 15225, 8266, 14487, 12409, 14273, 14519, 12647, 14718, 14669, 14625, 13910, 10871, 12862, 14753, 14375, + 14592, 14986, 14795, 15070, 14761, 11842, 10666, 13408, 12561, 14611, 11591, 14453, 12804, 14974, 12631, 12813, + 14491, 13583, 13412, 11998, 11448, 14676, 12860, 14657, 14935, 15313, 15337, 14822, 14790, 14537, 14696, 12500, + 13675, 14010, 13471, 14686, 11800, 13479, 14551, 14761, 13319, 14573, 13218, 14396, 14003, 14043, 14430, 14866, + 13624, 15055, 13358, 15103, 14453, 14890, 12167, 12834, 12797, 14018, 13943, 13131, 12335, 14619, 14407, 14600, + 15142, 14725, 11499, 15256, 14836, 11228, 13692, 14337, 14884, 15086, 13081, 12194, 14322, 15103, 15264, 14871, + 13424, 11917, 11380, 15175, 13103, 14099, 15343, 13025, 13874, 13802, 13962, 13409, 13946, 15187, 8437, 12102, + 14953, 14030, 15035, 13818, 13724, 14091, 14769, 13482, 11660, 11967, 15217, 15242, 13425, 14281, 15216, 14382, + 14878, 14463, 13902, 13727, 11159, 12688, 15335, 15180, 15302, 12420, 13891, 14315, 13510, 13458, 14515, 14593, + 13886, 13824, 14974, 14789, 14427, 11954, 10134, 14057, 14801, 14887, 14405, 14171, 11949, 13102, 15176, 12355, + 15305, 14833, 12859, 14603, 14927, 14104, 13028, 14811, 12905, 15260, 14375, 15353, 14018, 14769, 10128, 14204, + 14508, 15226, 14024, 13333, 12437, 15134, 13325, 15003, 12449, 14373, 10355, 12404, 12713, 10631, 11343, 13720, + 10657, 14290, 13678, 14385, 13168, 15070, 8302, 13368, 14465, 14909, 13772, 15163, 13598, 14826, 13292, 12067, + 14382, 15340, 14461, 14263, 11577, 15294, 13806, 14755, 12826, 14111, 13314, 14535, 15016, 9231, 14865, 11133, + 14417, 14887, 14963, 14654, 15190, 15176, 14068, 15022, 15049, 13645, 14124, 13048, 12937, 13726, 9006, 13350, + 13574, 13318, 10800, 14568, 14807, 13567, 13574, 14678, 15327, 14060, 13381, 11425, 14955, 15223, 14853, 14428, + 14630, 13131, 14892, 15188, 12671, 13351, 14871, 14353, 15056, 15177, 14337, 14497, 13598, 14726, 10606, 14644, + 14748, 11886, 14555, 12965, 15010, 13970, 14784, 15197, 13202, 14869, 15148, 14409, 14672, 13496, 13752, 14942, + 13458, 14102, 14200, 15300, 14988, 14609, 15250, 15083, 11487, 12503, 13343, 14111, 14426, 12896, 14429, 9560, + 13788, 13447, 13512, 13751, 13543, 14773, 14691, 13477, 11593, 14825, 15100, 15203, 15349, 15334, 14857, 14657, + 14198, 13410, 12658, 15164, 14180, 12677, 13635, 14318, 14572, 13283, 13554, 14398, 14048, 6437, 14982, 15222, + 15214, 15162, 14434, 15162, 14235, 15145, 14901, 15349, 14038, 14787, 14923, 14504, 13775, 9438, 14946, 14778, + 10676, 9377, 15349, 14206, 13897, 14325, 14464, 11680, 14351, 14377, 13813, 14468, 13986, 13535, 14793, 15130, + 14849, 14042, 13613, 14494, 14648, 14320, 14229, 14964, 15274, 14172, 12292, 13326, 14413, 13076, 15340, 12400, + 11053, 14266, 14727, 14431, 12769, 14903, 14849, 14191, 14981, 13308, 14962, 15114, 14899, 13937, 12023, 15003, + 9772, 14687, 15021, 15181, 13578, 15238, 14054, 14814, 14394, 13346, 13302, 14833, 14069, 13202, 14988, 14478, + 13526, 13118, 15188, 14705, 13823, 14429, 14404, 14858, 14046, 14662, 15193, 13445, 11796, 13439, 14742, 14968, + 15336, 14946, 13344, 15323, 14994, 15187, 14212, 11989, 14454, 14978, 13894, 14485, 14712, 15180, 15276, 14687, + 11613, 9627, 14679, 11569, 14933, 14097, 14759, 14845, 14645, 13316, 12613, 14707, 12999, 13151, 14708, 13225, + 14770, 13740, 15317, 13361, 14594, 14386, 14474, 14952, 9443, 11937, 14579, 15244, 14480, 12106, 15240, 12565, + 15035, 14483, 13555, 14773, 14443, 15288, 14196, 14694, 13034, 15254, 13918, 15074, 13901, 13547, 9518, 10505, + 14162, 14658, 15166, 13917, 13612, 14880, 13214, 15304, 9578, 13895, 13297, 13410, 13358, 10369, 14428, 10695, + 13116, 14979, 14744, 12680, 13791, 9015, 13791, 14167, 14397, 12071, 15179, 14915, 11111, 13133, 14061, 14808, + 14349, 14732, 14254, 13980, 14507, 13467, 14412, 13975, 13032, 15270, 13494, 14868, 15068, 12596, 14584, 15202, + 14914, 14671, 14955, 13702, 11049, 13784, 13199, 14734, 15115, 14844, 13734, 13705, 13832, 14898, 13881, 14538, + 14200, 13854, 13713, 14137, 15346, 14164, 14576, 15059, 11940, 13786, 13362, 15002, 13630, 14805, 12637, 13318, + 13276, 14540, 13237, 15301, 14751, 11118, 14138, 15192, 14830, 14012, 14776, 15350, 15349, 14295, 12489, 13817, + 14504, 14915, 13794, 14371, 15114, 13460, 14720, 14887, 15314, 13886, 13471, 12350, 9445, 11062, 14049, 15079, + 14346, 14314, 10317, 11669, 12737, 13085, 15204, 14051, 14396, 15059, 14127, 15096, 12787, 13372, 14590, 12794, + 15252, 15175, 14877, 15204, 12431, 14991, 14652, 14231, 13024, 15178, 11131, 14012, 14696, 13319, 14875, 12262, + 14771, 13530, 10580, 14624, 14325, 13931, 14687, 12389, 13502, 14555, 15112, 13883, 13847, 15121, 14765, 13915, + 10781, 14514, 14971, 11934, 13727, 13608, 15260, 14618, 15265, 12409, 13763, 12692, 14274, 14569, 8732, 15221, + 12906, 14001, 14784, 10496, 14761, 14649, 14735, 15278, 13954, 12444, 13669, 14971, 11496, 14815, 15071, 14637, + 13664, 14839, 15078, 14465, 12970, 14889, 13107, 14946, 13596, 13554, 14500, 14444, 10719, 14536, 14800, 13458, + 13915, 12840, 15031, 13493, 14827, 14601, 15308, 13447, 11525, 11630, 13950, 14932, 14831, 12406, 15165, 9297, + 14829, 14628, 14052, 13836, 15188, 14998, 12584, 12968, 14797, 15277, 13325, 14871, 14970, 12657, 14692, 15166, + 12915, 14672, 14150, 13607, 13951, 15011, 13733, 13365, 14746, 14446, 13913, 12215, 15319, 10442, 13491, 14906, + 14346, 13439, 11702, 15066, 13948, 14486, 14679, 13354, 14672, 11985, 12043, 14489, 11430, 13951, 10323, 11893, + 14723, 14676, 14622, 13421, 13436, 14757, 15203, 14337, 12302, 9581, 14921, 14692, 15274, 13891, 14821, 15322, + 14461, 14493, 13847, 15313, 15217, 15129, 15088, 13922, 14712, 15317, 7829, 14670, 14947, 10872, 14368, 10031, + 12712, 13936, 13693, 14483, 15129, 12058, 11728, 12426, 15254, 14637, 15225, 15246, 14796, 11540, 15213, 14585, + 14434, 14761, 12534, 14919, 11925, 13487, 12562, 15300, 14497, 14980, 14466, 11895, 8587, 14649, 14460, 14814, + 11038, 14660, 12064, 13937, 15343, 15110, 12554, 13086, 14826, 14304, 13637, 13344, 14641, 14928, 12931, 12906, + 8239, 15227, 13949, 14565, 10475, 13637, 10304, 14389, 14407, 11840, 9493, 9763, 15153, 12111, 10545, 15298, + 13706, 14479, 12828, 14466, 11046, 13797, 15230, 15305, 12021, 15080, 12696, 15269, 13965, 10067, 13321, 14140, + 15226, 14871, 11824, 15039, 13560, 14568, 14566, 12897, 14515, 13160, 15166, 15252, 7708, 14745, 12414, 13861, + 14638, 13357, 15174, 14519, 14118, 12576, 13241, 13737, 14565, 15322, 14039, 7341, 15219, 13904, 13420, 12807, + 12354, 13022, 14677, 13898, 15339, 14891, 13552, 14359, 14616, 11954, 15357, 14418, 14598, 13532, 15069, 14594, + 12465, 14704, 14819, 14915, 14375, 12668, 13149, 10448, 13860, 12194, 14546, 15264, 14888, 14654, 14721, 13300, + 12749, 13289, 14396, 14789, 14652, 14592, 15034, 14955, 14776, 12731, 14417, 14649, 13549, 15026, 12786, 13384, + 13025, 15139, 14431, 14913, 14499, 15298, 13662, 14426, 14174, 12561, 14219, 14780, 14370, 15093, 14842, 12735, + 15298, 12188, 14612, 13871, 15343, 13960, 15205, 15359, 12679, 14357, 14084, 15191, 14933, 13159, 14487, 12669, + 13784, 15006, 13379, 13720, 12721, 14744, 15286, 14938, 11558, 14840, 13528, 11570, 14274, 15349, 14444, 14506, + 14258, 15246, 11433, 15172, 14998, 14997, 15146, 13037, 13020, 15327, 14630, 15147, 14332, 13484, 13429, 14604, + 14531, 14836, 13058, 14116, 13678, 11022, 14555, 14888, 15161, 13921, 13533, 12402, 15302, 14775, 14359, 12997, + 13361, 14668, 15103, 14889, 13763, 14618, 15109, 14784, 14783, 14005, 14624, 15009, 14305, 15059, 12652, 9445, + 14930, 13506, 12445, 14723, 14136, 13521, 14416, 15233, 14662, 14879, 13995, 15208, 15038, 14428, 14783, 14673, + 13898, 14067, 12327, 14441, 15007, 14038, 14850, 12598, 13989, 15177, 13737, 15111, 12872, 15329, 14433, 13998, + 13857, 14901, 13928, 11479, 13866, 13212, 14686, 9810, 14385, 14832, 15320, 14453, 14430, 14159, 13192, 14598, + 14799, 15092, 13657, 13367, 14682, 14055, 14689, 14158, 9214, 14880, 13247, 14894, 14066, 15001, 14417, 14974, + 14560, 14567, 15201, 14736, 10737, 14995, 13964, 14403, 14970, 14129, 13543, 13318, 14508, 13618, 15294, 13819, + 15025, 14491, 15021, 12451, 14627, 13084, 15089, 14661, 13458, 13738, 14940, 15096, 14319, 14685, 14800, 15278, + 13701, 14821, 11137, 14947, 14616, 14971, 15197, 14929, 12929, 14044, 15108, 13652, 14203, 13551, 13170, 14928, + 14932, 13998, 14130, 12621, 10270, 13220, 12906, 13693, 13916, 13498, 13779, 14371, 11469, 11767, 14122, 13811, + 14379, 14644, 9960, 14725, 13833, 11515, 14972, 14356, 14780, 12621, 15149, 12476, 15220, 15085, 12902, 14224, + 14675, 14876, 13152, 15097, 12720, 13541, 10614, 15011, 14478, 14438, 12623, 13604, 15242, 10264, 13826, 14989, + 14482, 15021, 11027, 13520, 14764, 12688, 15033, 11315, 15151, 13642, 13488, 14971, 14443, 13431, 14703, 12289, + 14976, 13670, 11460, 13879, 14681, 13925, 15253, 7895, 13159, 12588, 14612, 13126, 15076, 15088, 14904, 12790, + 15330, 15009, 14875, 15321, 13641, 9955, 13717, 14057, 15316, 13065, 13927, 14612, 14544, 15316, 11625, 11303, + 13348, 14617, 13322, 15251, 13988, 12943, 12502, 9523, 13964, 12841, 13492, 14945, 14867, 10490, 11438, 14806, + 13134, 14788, 15358, 15085, 13520, 14507, 13876, 13797, 12663, 14889, 11728, 15213, 15035, 14629, 15118, 14879, + 14594, 14094, 13888, 14937, 15140, 13934, 13566, 13052, 13537, 14903, 12614, 11953, 15213, 10369, 15295, 11500, + 14154, 15281, 11812, 13993, 15310, 15015, 14700, 13560, 14868, 13416, 13009, 15099, 14901, 12852, 15235, 11620, + 14404, 14013, 11852, 14657, 14376, 14568, 14671, 14609, 12667, 13639, 11616, 14287, 14804, 14716, 13691, 14912, + 14921, 14688, 15305, 15011, 14198, 12777, 15143, 10422, 13466, 15181, 10639, 14007, 13770, 14377, 14530, 14686, + 12909, 14224, 14394, 15141, 14075, 14369, 8326, 13262, 15020, 15192, 11924, 13605, 12621, 10564, 14331, 10286, + 12949, 13433, 14018, 11543, 14698, 13746, 13448, 15264, 14165, 14166, 14659, 12524, 13440, 14825, 14494, 14008, + 14000, 12398, 13773, 14620, 14894, 14424, 11428, 15182, 13703, 14164, 13823, 15151, 14335, 15194, 14367, 10901, + 9805, 10428, 14177, 10714, 14476, 14863, 14967, 15272, 14375, 14606, 12928, 13898, 11080, 14136, 14254, 14573, + 13370, 14630, 15270, 11198, 14973, 14503, 12412, 13167, 14447, 13395, 14391, 14594, 13495, 14865, 15195, 12424, + 9981, 10423, 15355, 14831, 14936, 14378, 14490, 13464, 15195, 13900, 13687, 14003, 13186, 13703, 14886, 14268, + 14458, 11267, 14799, 12629, 12863, 12483, 14242, 8057, 14828, 15241, 14689, 15238, 11449, 11062, 14994, 11486, + 13478, 12016, 15296, 15077, 13177, 13704, 11037, 14552, 15223, 14145, 13924, 14681, 14457, 15152, 13853, 11016, + 13174, 14765, 14336, 13722, 13823, 12838, 14987, 12764, 14238, 13775, 14990, 14717, 8219, 14989, 11121, 14012, + 11396, 14965, 14735, 14256, 15033, 13589, 15280, 12833, 14994, 14851, 13277, 14497, 13473, 15231, 14383, 14949, + 15350, 13361, 12319, 13484, 14821, 14450, 14650, 14899, 15001, 15158, 11485, 13605, 14585, 14643, 15327, 14934, + 15067, 11704, 15307, 12577, 14817, 15102, 13603, 14867, 14818, 12929, 14182, 14848, 14684, 13566, 14359, 15074, + 14773, 14643, 14696, 12862, 15327, 14949, 13791, 14818, 14809, 13785, 14380, 14282, 15216, 11689, 14571, 14577, + 14231, 13396, 12055, 15136, 15280, 12832, 14919, 13707, 12915, 14989, 10779, 13492, 14606, 6865, 10007, 15023, + 14826, 15104, 13899, 14526, 14555, 9306, 13541, 13895, 14782, 13351, 14083, 11837, 13822, 13129, 15323, 13342, + 12360, 14806, 14358, 14573, 14065, 14820, 13252, 11134, 12518, 14762, 14943, 9948, 14403, 13924, 15119, 10162, + 11379, 13954, 14708, 14365, 12690, 15182, 14019, 13682, 14474, 14727, 13478, 13974, 13910, 11644, 15032, 10753, + 15319, 14792, 14588, 15315, 14567, 12585, 14618, 13097, 13826, 14500, 15240, 14958, 14215, 15302, 11650, 14507, + 14651, 14569, 13737, 11927, 13773, 14456, 13764, 14204, 12989, 13396, 14855, 13362, 14970, 14151, 14376, 15188, + 15187, 13152, 13493, 14392, 14485, 14448, 14028, 13738, 14304, 11211, 15164, 14861, 14710, 13374, 14625, 14777, + 13846, 14924, 13090, 14834, 14416, 12454, 13348, 12435, 10819, 9911, 14262, 13168, 15024, 13781, 14533, 13579, + 15276, 14856, 12296, 14778, 13268, 14498, 15302, 14020, 15100, 12247, 12745, 12768, 15074, 14260, 15166, 14458, + 14269, 14672, 15039, 15064, 14576, 10376, 11881, 14600, 15059, 11420, 14948, 14889, 15014, 15021, 15318, 13454, + 15066, 14549, 14733, 13318, 12542, 12356, 13533, 13587, 13157, 14782, 13828, 15277, 10519, 14106, 10041, 12777, + 10947, 12057, 13803, 15028, 12863, 14852, 14720, 15318, 14337, 15251, 13447, 13851, 14400, 12780, 14503, 10472, + 14394, 14448, 14901, 15242, 14314, 15252, 14394, 13292, 13965, 13134, 14263, 9111, 13886, 13680, 15166, 14982, + 14923, 15277, 14616, 13378, 14436, 14923, 12988, 14258, 12796, 13693, 14197, 15314, 7981, 14193, 14836, 12421, + 11760, 12758, 13708, 13544, 14816, 14603, 14948, 14458, 10508, 13431, 14370, 14805, 13339, 15225, 10381, 13216, + 13938, 14998, 14448, 15095, 14379, 14783, 13522, 14002, 13620, 15084, 15301, 14104, 15032, 14996, 14247, 14900, + 12772, 14207, 14447, 11399, 14924, 14308, 14431, 13464, 14346, 14518, 14645, 15127, 13356, 13524, 12219, 13869, + 13918, 14739, 14887, 14625, 13940, 15053, 15191, 14470, 12775, 15100, 13716, 9166, 9767, 14201, 13308, 15173, + 15312, 14880, 13464, 14257, 13527, 15262, 13182, 13766, 11438, 13630, 14938, 15162, 13706, 10830, 13966, 9714, + 11097, 14716, 10458, 14619, 15030, 14937, 12031, 13718, 14491, 15169, 14662, 12177, 14922, 8339, 14087, 15090, + 14528, 15314, 14815, 15233, 15076, 13090, 14261, 14277, 14571, 15314, 14739, 15354, 15353, 13372, 14873, 14602, + 13895, 15053, 13248, 14638, 14214, 13855, 12192, 12507, 13543, 13697, 11544, 14920, 15112, 14200, 13516, 12144, + 15239, 13903, 13547, 14456, 12468, 13580, 15289, 13458, 15292, 14805, 13593, 14423, 13710, 13543, 12061, 12625, + 12906, 13573, 10718, 14809, 13948, 14842, 13043, 15169, 15340, 13857, 13453, 14028, 14760, 14381, 14051, 9550, + 14544, 15187, 14645, 12794, 14417, 12533, 10467, 15118, 14292, 14893, 9593, 13365, 14502, 15263, 15234, 14420, + 14391, 14399, 12012, 13560, 15078, 14614, 13382, 14816, 15303, 10508, 14206, 14303, 13624, 14526, 15184, 15152, + 14359, 15328, 14635, 14636, 12476, 12582, 14186, 13381, 13582, 13810, 13973, 14019, 15089, 14595, 14626, 13388, + 14481, 10368, 11363, 14111, 15301, 12729, 9157, 14846, 14859, 14192, 15259, 14636, 14429, 14847, 12567, 14974, + 14335, 15337, 15173, 13299, 15135, 12669, 12738, 12320, 13877, 14946, 14683, 14893, 14020, 11840, 14603, 14455, + 14808, 14985, 14562, 14370, 15339, 14566, 11245, 15296, 13333, 13140, 14827, 11879, 14681, 14663, 12510, 11654, + 9362, 9295, 13945, 14321, 13573, 15137, 15083, 14655, 10883, 13525, 14937, 15281, 11331, 14372, 14735, 15086, + 14919, 14876, 14835, 14881, 15070, 11249, 13379, 14430, 15241, 14465, 14665, 14651, 14857, 13354, 14467, 13609, + 12161, 14401, 11231, 15188, 13866, 12652, 14034, 13314, 14527, 13564, 14815, 13823, 15291, 14589, 10756, 14463, + 13327, 12435, 13289, 11547, 14431, 14806, 14714, 14403, 14952, 13837, 11629, 14838, 14682, 15328, 14894, 13981, + 12356, 14936, 11792, 14953, 14078, 14419, 13362, 15126, 14469, 14998, 14623, 14514, 15109, 13236, 15338, 14796, + 14363, 14708, 9147, 11325, 15194, 12691, 10756, 14501, 11613, 12245, 14930, 15235, 14471, 14055, 15172, 12666, + 7153, 10281, 13855, 8940, 14581, 15134, 13575, 14845, 14879, 15241, 11553, 10826, 11151, 14341, 13743, 15275, + 14752, 14655, 12928, 14609, 15109, 13908, 13104, 10931, 13958, 15034, 14464, 15331, 15210, 13800, 14004, 14385, + 12807, 14293, 15130, 10026, 14909, 11753, 11855, 15300, 14740, 14448, 14678, 13176, 14957, 13548, 11036, 14859, + 14565, 13886, 14870, 13544, 15224, 14402, 15037, 14452, 11838, 14678, 14704, 14295, 13214, 13330, 14642, 12648, + 13911, 14761, 10635, 15096, 15134, 12806, 14188, 14432, 10088, 14527, 15168, 12972, 14664, 14313, 12680, 15227, + 14772, 13606, 15239, 14536, 14772, 15325, 15105, 15068, 13435, 11663, 15026, 13465, 13606, 15269, 14873, 15053, + 12008, 13898, 14034, 15092, 14171, 15159, 15332, 13073, 9295, 14197, 14690, 14343, 14716, 15228, 12838, 14270, + 15200, 15206, 14750, 10619, 12576, 14435, 13865, 12886, 13831, 14295, 15354, 14995, 13860, 14437, 14652, 12911, + 15283, 8662, 12582, 13855, 15186, 12736, 14855, 13967, 10824, 13279, 14478, 13146, 15143, 8844, 14834, 14790, + 13972, 14830, 13885, 14425, 12745, 15176, 14986, 13415, 7892, 13794, 13078, 13341, 13023, 14932, 10760, 8266, + 15045, 15152, 12771, 15295, 9744, 10341, 14006, 14437, 14343, 14363, 14124, 14673, 14748, 13543, 13465, 14092, + 12044, 13575, 14664, 14832, 15198, 14892, 14847, 14350, 13744, 13352, 14917, 15142, 14765, 13739, 13588, 14843, + 14348, 14393, 14511, 12085, 15319, 14413, 12514, 15220, 15288, 14673, 13127, 15032, 13887, 13436, 13614, 15181, + 14785, 13520, 13943, 13981, 13826, 15250, 12810, 12411, 12894, 11885, 9542, 15257, 15227, 13838, 15245, 15020, + 11587, 13470, 12911, 13445, 15114, 14592, 14672, 11281, 14569, 15230, 15071, 14755, 13297, 13466, 14854, 14230, + 13670, 14772, 11247, 12024, 12595, 14902, 15343, 14636, 14261, 14210, 12691, 15080, 13908, 14312, 14762, 14703, + 14734, 9539, 13464, 13903, 14240, 13324, 15018, 14419, 14849, 14777, 14846, 14686, 13758, 10796, 14553, 11714, + 14756, 13880, 13960, 14402, 14128, 14811, 15228, 12191, 15075, 13495, 14819, 14925, 13192, 14451, 13274, 15275, + 15323, 12621, 11673, 14821, 14622, 15041, 13047, 15036, 10176, 12348, 14511, 13909, 15167, 14951, 13826, 14718, + 13549, 12403, 12640, 12506, 14518, 14535, 14266, 15051, 11800, 14010, 14587, 10870, 14941, 10327, 15152, 10337, + 13648, 13867, 12902, 14991, 14370, 14629, 15217, 14745, 12295, 12706, 15232, 13442, 8630, 14903, 14491, 13737, + 14907, 15071, 14567, 15258, 15136, 15130, 14600, 13259, 14739, 13730, 15358, 13428, 11908, 13854, 12784, 14754, + 14483, 14775, 14724, 14036, 14346, 14499, 14789, 14905, 14753, 8575, 14506, 13429, 15291, 14809, 14903, 15045, + 14006, 13194, 13465, 12414, 12671, 11273, 10660, 15328, 14814, 14602, 13783, 14049, 12418, 14695, 13027, 14937, + 14681, 13605, 13845, 12089, 14086, 10033, 11669, 14557, 12607, 14998, 15185, 12627, 14276, 15241, 13228, 10358, + 13409, 9151, 14627, 13049, 12680, 13822, 10741, 15064, 12809, 14981, 15041, 15279, 12708, 14079, 15317, 14414, + 14601, 14557, 14938, 12146, 14416, 13910, 14677, 14086, 13708, 14937, 14725, 14520, 14885, 14890, 13327, 14939, + 14963, 15118, 14841, 14352, 14741, 14069, 10636, 14639, 14809, 14988, 13902, 12430, 13306, 11641, 11612, 14799, + 12951, 14400, 14959, 14454, 12770, 12959, 14284, 15284, 13606, 13653, 14820, 14467, 14290, 14226, 12310, 13683, + 15104, 14696, 14749, 14911, 14585, 14879, 14650, 8843, 15338, 13357, 14350, 14966, 14999, 15086, 11448, 13030, + 10707, 13334, 13901, 13315, 14179, 12371, 14960, 14702, 15324, 14562, 12546, 14920, 11291, 15026, 6149, 14370, + 15228, 10711, 14710, 14877, 13514, 15109, 15216, 13029, 13480, 14920, 14688, 15212, 14357, 14349, 14947, 14249, + 8497, 15122, 15180, 15138, 13895, 14344, 14807, 14991, 14709, 14855, 13392, 14259, 14835, 6690, 13431, 13839, + 11789, 13741, 15206, 14281, 15153, 15120, 12320, 14380, 13310, 15195, 15094, 15211, 15220, 14425, 13408, 14872, + 12535, 13727, 15180, 13777, 12801, 15025, 15174, 13740, 13392, 13897, 15128, 14520, 14304, 9654, 9955, 14451, + 11428, 12840, 10315, 14146, 14733, 12836, 14864, 13536, 15175, 12420, 13734, 13811, 14916, 14507, 13875, 8736, + 14431, 14762, 13539, 15306, 13059, 14962, 15084, 14230, 14483, 14036, 15039, 13860, 14006, 8216, 14567, 13549, + 13548, 12991, 14344, 14565, 15239, 13519, 14479, 14636, 14791, 14912, 13960, 14679, 14260, 12836, 13943, 15042, + 11486, 14872, 14926, 12127, 13175, 11922, 15029, 15315, 14057, 14687, 13569, 15168, 14395, 15256, 11450, 14752, + 13777, 15196, 14837, 14932, 14778, 11765, 12453, 14228, 14908, 12458, 14760, 14680, 13034, 11972, 15139, 15257, + 14269, 14159, 14162, 13521, 11571, 15010, 9925, 13604, 12404, 11356, 14953, 14756, 14494, 12470, 13291, 14781, + 6478, 13846, 14913, 12481, 11316, 13570, 13279, 15226, 14906, 12050, 13830, 14730, 14299, 11877, 14444, 13879, + 13613, 13359, 14773, 14964, 11519, 15068, 15128, 15060, 5487, 13714, 15070, 15086, 15251, 8328, 10352, 13899, + 14137, 14444, 13781, 13809, 15068, 12856, 10363, 14643, 11092, 13235, 14408, 15189, 13436, 8483, 15118, 11631, + 14484, 14558, 15341, 11924, 14223, 13364, 11941, 14487, 14574, 13544, 15269, 14172, 12445, 14717, 14765, 15188, + 14632, 14452, 15160, 10320, 14388, 14417, 11495, 14184, 11911, 14745, 15301, 13577, 12666, 11649, 13522, 14941, + 15011, 14662, 11687, 14182, 14973, 14634, 13244, 14482, 13686, 14352, 9511, 13928, 9459, 14468, 13595, 14362, + 14885, 15222, 11612, 14355, 13789, 10362, 14809, 15130, 14418, 14073, 8133, 14650, 15351, 15082, 11600, 8960, + 14525, 14472, 14800, 15036, 14701, 14680, 13779, 13824, 14877, 11908, 14028, 12695, 11500, 10412, 14381, 14627, + 14450, 12677, 14311, 14885, 12916, 15104, 12487, 12439, 13459, 13361, 12719, 14605, 14755, 14239, 15210, 13400, + 15005, 13491, 14017, 14910, 12095, 11920, 13151, 12707, 14757, 15261, 11291, 12581, 13505, 14372, 14647, 13066, + 12799, 14427, 14550, 14696, 13655, 13260, 13530, 15185, 12407, 14458, 12390, 13527, 13036, 14617, 14611, 11559, + 14410, 14857, 14775, 13641, 13624, 14412, 14865, 13968, 13746, 13627, 14572, 14630, 11829, 15219, 14124, 13538, + 15276, 14882, 12360, 15234, 13515, 14449, 14715, 9820, 14178, 13067, 14158, 14851, 14978, 13252, 14157, 13308, + 13264, 13517, 12835, 11757, 15177, 14063, 14770, 9894, 14414, 12257, 14771, 14996, 14905, 14641, 11140, 14101, + 13841, 14426, 13337, 13262, 12724, 15197, 14744, 13912, 14542, 13915, 14371, 13028, 14593, 12464, 14931, 14686, + 13783, 12877, 12380, 14588, 14495, 13109, 13321, 10698, 12889, 13653, 13061, 15053, 488, 9678, 13945, 14703, + 13774, 15259, 12509, 14896, 13119, 14735, 13584, 14393, 13623, 15031, 15016, 12353, 14944, 14758, 13365, 15121, + 13193, 14551, 9648, 14846, 14153, 14671, 14010, 14603, 13492, 13525, 14253, 12228, 12502, 14612, 14449, 14926, + 14178, 13068, 14837, 14524, 12536, 12428, 14925, 15098, 15359, 11961, 13542, 14342, 14524, 15031, 15352, 13670, + 14501, 14376, 15260, 14593, 14066, 13406, 15081, 14808, 9987, 15003, 14663, 13643, 14804, 14320, 14641, 14576, + 14850, 13807, 14561, 15099, 12708, 13120, 15281, 12429, 15201, 14909, 13660, 14617, 13392, 13707, 13736, 14595, + 14730, 15193, 14647, 14851, 14306, 4437, 13684, 14801, 13414, 13800, 14942, 11626, 15027, 15014, 14759, 15271, + 14721, 11971, 5581, 13344, 12900, 15252, 15341, 14347, 15227, 13319, 14392, 13967, 13026, 12426, 14384, 13472, + 9173, 13720, 14111, 15177, 14383, 14892, 12522, 9901, 15057, 14737, 14781, 13600, 14913, 13574, 15043, 14926, + 13861, 15242, 11026, 14996, 12320, 14325, 15356, 9917, 13857, 14219, 13476, 14398, 14590, 14039, 12854, 13711, + 15170, 12973, 14412, 10768, 14300, 13753, 14219, 14079, 15328, 15000, 14818, 14497, 14840, 14394, 15213, 14325, + 13851, 14310, 14347, 15287, 13933, 14077, 14396, 13788, 13656, 13986, 14004, 15126, 12747, 12677, 15192, 12398, + 12627, 14722, 15261, 14701, 8355, 14475, 15013, 15279, 13528, 13382, 14729, 14936, 15074, 13483, 12740, 14492, + 13882, 14551, 14587, 13040, 15084, 10521, 12410, 11083, 13618, 14415, 14896, 14648, 13407, 13812, 15097, 15129, + 14385, 13760, 14657, 13650, 11208, 12503, 11097, 11988, 12389, 13906, 14324, 15152, 14522, 12305, 10373, 11091, + 14829, 11819, 12706, 15106, 14550, 11676, 14762, 14331, 15089, 11112, 10120, 14753, 13219, 15005, 14629, 10780, + 13413, 15264, 11484, 13682, 13479, 12967, 13689, 11954, 15091, 14894, 13495, 14751, 14624, 15346, 14401, 11164, + 14806, 12399, 12768, 15055, 13484, 14617, 14355, 12313, 14424, 14815, 15168, 15237, 13834, 14496, 14066, 13581, + 15092, 12856, 15268, 13934, 14170, 13347, 13423, 13705, 14959, 14171, 11509, 14528, 15050, 12025, 15081, 14177, + 14260, 10394, 13499, 5464, 10903, 13841, 12823, 15100, 12172, 12707, 14231, 14903, 15304, 13841, 15226, 15346, + 14882, 14219, 12501, 14176, 13893, 14385, 14733, 14032, 14848, 15218, 13468, 15351, 13911, 14425, 14866, 13563, + 13922, 14996, 13820, 14191, 15287, 15063, 15084, 14987, 9748, 14674, 14730, 13502, 13519, 15029, 13518, 14541, + 14601, 14708, 15126, 14378, 14179, 13722, 15064, 14583, 14443, 14890, 15170, 13838, 13155, 14561, 11554, 14720, + 12568, 15073, 14453, 14056, 14708, 13609, 14677, 14871, 13378, 13542, 15022, 15063, 12890, 14535, 15218, 11153, + 15245, 14404, 15130, 15175, 14367, 14561, 15268, 14827, 14891, 13377, 14702, 9482, 12627, 11424, 13997, 13654, + 13474, 14546, 15326, 14689, 15334, 11449, 14726, 14728, 14615, 14704, 14990, 15289, 13560, 14953, 15339, 14374, + 14346, 14457, 14356, 14983, 13669, 12807, 13463, 15216, 14794, 14357, 13135, 15065, 14388, 13539, 14876, 15208, + 14351, 15083, 14169, 15145, 9370, 10725, 14843, 14689, 13840, 13437, 12802, 10625, 13955, 15292, 10263, 14988, + 15104, 13875, 13348, 13537, 14299, 12824, 12922, 13824, 13682, 14753, 15212, 7261, 14531, 9335, 14446, 14923, + 14759, 14691, 14862, 14850, 13353, 15105, 13144, 13270, 15222, 9449, 12951, 13398, 15131, 13806, 12188, 13652, + 12954, 13910, 11741, 14596, 14657, 14754, 15069, 15250, 12754, 13513, 14427, 13553, 13154, 15285, 15214, 13970, + 14923, 9637, 14808, 15327, 14491, 15123, 14394, 15328, 13769, 15173, 14350, 14069, 14513, 14582, 13391, 14647, + 14192, 14890, 13121, 14972, 14489, 13116, 11305, 14292, 13874, 15110, 14338, 14551, 14693, 14728, 14211, 14954, + 13657, 14617, 14788, 14406, 14491, 13349, 14570, 15316, 15083, 14044, 14738, 14472, 14020, 15260, 15077, 14997, + 12785, 12867, 14977, 12968, 13616, 14210, 14708, 12811, 15331, 12493, 14129, 13588, 13360, 13096, 15036, 12903, + 14620, 14195, 15201, 14887, 14937, 14320, 14042, 14670, 14723, 14739, 15356, 14217, 15119, 13064, 13647, 13389, + 15212, 12524, 14960, 13768, 14767, 13757, 15111, 12785, 14118, 15275, 14423, 15354, 14075, 15301, 15057, 14510, + 15265, 14065, 13165, 15340, 15342, 12955, 11728, 14355, 14944, 14571, 15056, 14540, 15116, 10546, 14462, 12304, + 14692, 15130, 14755, 13700, 12518, 10757, 12359, 14116, 13008, 14498, 12678, 14402, 13265, 15143, 14757, 14556, + 13242, 15205, 14376, 13700, 14429, 9053, 14636, 15281, 14612, 14258, 14490, 15072, 15101, 15194, 15132, 14144, + 14505, 15138, 15131, 13624, 14818, 13617, 13744, 13567, 14475, 13412, 14327, 14468, 14236, 14660, 13455, 13534, + 12920, 15201, 14236, 13974, 14759, 11913, 11899, 15297, 13802, 15270, 13194, 15082, 10277, 13201, 14185, 14895, + 12670, 13918, 15137, 15122, 14366, 12838, 15090, 13389, 15154, 14669, 13184, 12690, 14434, 14992, 13571, 8573, + 15158, 15175, 13621, 15181, 15000, 14453, 15067, 15179, 15148, 14195, 14899, 14473, 14538, 15279, 14915, 13118, + 14374, 15287, 13521, 12019, 14587, 13669, 15329, 14644, 10533, 13570, 14966, 14334, 13555, 14537, 13890, 11772, + 14278, 14334, 15071, 14412, 13232, 15172, 13742, 14349, 13203, 15200, 13991, 13159, 14396, 14511, 13914, 14120, + 14604, 11212, 15002, 14951, 13471, 12093, 15091, 13795, 14343, 13499, 14479, 13200, 15339, 13868, 14514, 15276, + 13549, 11200, 10937, 14992, 14008, 14712, 15246, 13597, 14446, 13004, 12179, 14399, 13260, 14576, 13079, 13198, + 14457, 14649, 15248, 15359, 11742, 14851, 13370, 14785, 14300, 10619, 15018, 14706, 14972, 13099, 15207, 12499, + 15268, 14629, 14564, 13816, 13753, 13507, 10619, 14803, 11740, 14743, 13686, 14298, 14329, 12678, 14499, 12939, + 14724, 13766, 14975, 15013, 14099, 15356, 14710, 14957, 10848, 15044, 14009, 10245, 15312, 12377, 13212, 15116, + 14785, 13033, 14266, 14222, 13888, 13106, 14366, 14604, 12147, 15127, 14874, 14980, 14634, 15357, 15011, 14765, + 11376, 15076, 14683, 14273, 14782, 13726, 12551, 14050, 14425, 13330, 14249, 12829, 12666, 12802, 14544, 13590, + 11015, 13287, 14452, 11412, 14293, 12544, 12373, 7161, 15168, 12029, 13397, 15336, 13704, 14422, 12772, 12952, + 13841, 11557, 12034, 12161, 14399, 14957, 14388, 12955, 15306, 15352, 14447, 12877, 13823, 15333, 15048, 12669, + 12551, 15058, 14994, 15357, 14359, 14312, 14969, 14351, 14527, 12895, 15178, 11467, 15102, 12797, 14111, 14636, + 13783, 9539, 13220, 15268, 12975, 14672, 12408, 11658, 15073, 13693, 14093, 10475, 14501, 12821, 12325, 14112, + 13847, 14735, 9671, 14514, 11729, 14977, 12408, 14818, 13506, 11228, 14439, 14335, 14490, 12710, 12359, 13309, + 15329, 13388, 13885, 13101, 13624, 15072, 13528, 15201, 12692, 14665, 12972, 14097, 14950, 12766, 12495, 13363, + 13826, 14949, 15252, 11551, 13519, 12951, 6589, 14840, 14630, 13414, 12477, 14981, 15326, 13993, 14407, 14613, + 13907, 14922, 14060, 14998, 13157, 14335, 13848, 14931, 14727, 14162, 14404, 14083, 11279, 14959, 14047, 9035, + 15216, 14569, 13997, 14773, 11712, 14708, 14355, 12276, 13620, 14639, 14663, 14917, 15189, 14401, 13682, 14840, + 11474, 10460, 10872, 13869, 14414, 14625, 13999, 15280, 14529, 15269, 14812, 15231, 14387, 9084, 13614, 14597, + 13976, 14996, 12548, 13622, 14453, 13686, 14472, 14634, 12966, 14442, 14425, 15300, 14188, 8885, 13121, 13973, + 14647, 14743, 13287, 10000, 12286, 14908, 11821, 13498, 14125, 14452, 12146, 14601, 13622, 14673, 14337, 14544, + 14783, 14311, 12716, 14894, 14454, 14836, 14497, 13245, 12322, 13927, 13144, 15118, 13606, 13342, 15008, 13188, + 13771, 15114, 15187, 15122, 7930, 15270, 13913, 14598, 10311, 14538, 14372, 15314, 13842, 14679, 15119, 13137, + 14157, 15326, 13524, 14605, 14732, 13358, 12023, 9651, 15356, 13742, 13606, 13058, 14571, 15139, 13606, 13558, + 14419, 14436, 14333, 14185, 14722, 14838, 15022, 13809, 15337, 14251, 12421, 10515, 11738, 14990, 14546, 14750, + 13837, 15133, 14671, 14943, 14720, 14903, 13155, 12321, 13441, 13233, 15267, 13802, 12803, 15325, 11665, 15156, + 14302, 14966, 15330, 9897, 13195, 15231, 14356, 14568, 13480, 14516, 14269, 14508, 13050, 14123, 11173, 14904, + 13432, 14929, 14482, 12792, 14084, 15258, 14489, 14587, 12760, 13447, 15038, 14837, 12888, 13869, 13497, 12856, + 10325, 15318, 14421, 15134, 13359, 15344, 15246, 13572, 14142, 13486, 14805, 13099, 14142, 14628, 14965, 14880, + 13816, 15264, 10616, 13425, 14751, 15169, 14664, 14581, 15141, 14840, 12979, 14669, 13675, 10827, 15317, 14582, + 8548, 14353, 14827, 14400, 14523, 13113, 12317, 15118, 13940, 15109, 15014, 14548, 13518, 13558, 15055, 10030, + 13913, 6039, 13832, 14763, 13489, 14714, 14698, 15270, 12745, 13885, 7862, 14786, 12068, 13375, 13349, 14218, + 13737, 15269, 13857, 14733, 15357, 14277, 15152, 12790, 11969, 14360, 15320, 14701, 9810, 14747, 15089, 13404, + 14125, 13067, 13196, 14485, 11637, 13380, 14666, 13208, 14370, 15359, 11496, 13155, 12697, 14841, 14895, 14786, + 12602, 14441, 14605, 15211, 14634, 14377, 13960, 12794, 12773, 14397, 15114, 15273, 14811, 15112, 14441, 12463, + 14871, 15250, 14291, 15143, 14847, 15140, 14389, 13266, 15098, 14272, 14505, 14761, 10450, 14574, 13442, 4731, + 12731, 15249, 14511, 13871, 13399, 14371, 13226, 13796, 13214, 14776, 15102, 10682, 15285, 11951, 14108, 14120, + 14393, 14670, 12520, 12488, 12821, 14478, 11697, 11659, 12309, 15062, 14265, 12642, 14022, 13408, 10861, 14684, + 12901, 14474, 15288, 13453, 15023, 7694, 13426, 13328, 15163, 14159, 11945, 14205, 15090, 14558, 13630, 14486, + 11309, 14553, 15016, 14802, 15271, 15263, 14951, 15328, 13911, 14998, 11542, 12180, 14822, 13322, 15137, 15090, + 9782, 14971, 14741, 14528, 14457, 14645, 11365, 14422, 13488, 13568, 14727, 15117, 14279, 14877, 14837, 14485, + 12349, 14348, 10731, 15212, 14105, 15320, 14552, 12035, 9232, 14954, 15140, 14992, 14590, 12854, 12775, 13903, + 14907, 14795, 13531, 14159, 14366, 15092, 13844, 14225, 12006, 14918, 15226, 13438, 12685, 8376, 12036, 15062, + 15306, 15210, 14792, 14989, 15033, 14562, 14838, 12668, 14894, 14056, 15340, 15032, 10400, 15215, 14179, 14583, + 13473, 14940, 14635, 14718, 14784, 14584, 15345, 14461, 11431, 14659, 15118, 11298, 14514, 13086, 14428, 14834, + 14396, 15087, 14928, 10931, 14640, 14849, 13734, 14536, 15327, 14698, 14471, 15318, 14673, 14555, 14893, 13600, + 14267, 13327, 15121, 14604, 15153, 15126, 14943, 14557, 12463, 15215, 14480, 12027, 15141, 7905, 15284, 14405, + 14792, 14603, 13488, 11833, 11816, 10189, 15162, 15254, 14566, 15246, 15071, 13840, 14887, 14033, 13736, 14898, + 13769, 14469, 13344, 8689, 12225, 11454, 12240, 14596, 15260, 12922, 13464, 14543, 12451, 13610, 12330, 10511, + 14544, 15073, 11880, 14536, 14545, 13422, 14817, 15020, 14653, 15294, 15092, 15221, 12289, 14547, 13884, 8813, + 14935, 14965, 14611, 14773, 13628, 13852, 13434, 13550, 11992, 10915, 15324, 15011, 13838, 15023, 15093, 14061, + 13531, 15205, 13559, 13963, 13764, 12389, 14758, 13526, 15222, 15340, 13065, 13427, 14689, 13566, 11851, 14426, + 13086, 14347, 14404, 13789, 13908, 12863, 13255, 13264, 14163, 15029, 11294, 14772, 15307, 15119, 13559, 15164, + 14221, 13876, 12933, 15134, 14165, 12726, 14529, 14485, 13804, 15352, 12748, 15050, 15270, 13816, 12864, 15254, + 14407, 14113, 14536, 14817, 11790, 14394, 11227, 15147, 15356, 12007, 14067, 12983, 14810, 14235, 14863, 13646, + 13020, 15262, 15131, 14339, 12319, 14990, 11515, 14812, 13664, 13359, 13931, 13132, 15330, 14122, 13699, 14131, + 15295, 15087, 14512, 15135, 12768, 12064, 15283, 14976, 12577, 12420, 14118, 15331, 14634, 13783, 14758, 15223, + 15169, 10894, 14692, 11596, 13846, 15066, 13842, 11404, 14606, 15316, 13375, 13484, 12568, 15296, 14680, 15241, + 15157, 15182, 14387, 13636, 14862, 12757, 13529, 12663, 15336, 15241, 14726, 14980, 12396, 13679, 15283, 14616, + 14507, 13852, 14693, 14650, 14477, 15265, 14968, 9778, 15215, 13970, 13434, 12491, 14711, 14649, 9522, 12717, + 15061, 11586, 11684, 14161, 15044, 12813, 13018, 12973, 14698, 14990, 12903, 11889, 13669, 10549, 12182, 12524, + 15356, 14661, 15294, 13458, 11028, 14837, 15082, 14945, 14369, 11829, 11404, 14424, 14357, 14435, 12784, 13582, + 14392, 12737, 14204, 15017, 15337, 13358, 15063, 15115, 15184, 11863, 11343, 3128, 14309, 13106, 12329, 13044, + 11142, 15087, 13848, 14761, 13360, 15307, 14869, 11102, 14916, 14512, 12419, 14351, 14296, 14674, 9985, 14253, + 11794, 13592, 14152, 14825, 13907, 14887, 12451, 11991, 15322, 15222, 13334, 13545, 14563, 14821, 14621, 14553, + 15163, 13090, 15208, 13635, 11411, 14925, 13904, 10423, 10794, 14697, 13910, 13520, 14750, 7998, 15131, 14698, + 14799, 14776, 14565, 14338, 13930, 13450, 13551, 14668, 13106, 11174, 14529, 13591, 13246, 15171, 14535, 14233, + 14656, 14286, 14529, 14293, 14684, 15231, 13815, 15334, 13810, 14066, 15138, 14874, 10611, 13998, 10581, 15205, + 14504, 14140, 15037, 14910, 9814, 12540, 15160, 14513, 15115, 14096, 14595, 14695, 14713, 14655, 14208, 12300, + 12421, 14998, 13709, 10409, 12684, 14475, 8785, 14667, 15312, 15317, 15306, 9687, 13763, 14961, 14691, 13153, + 15229, 13711, 15112, 11353, 13314, 14640, 12347, 14606, 14352, 14785, 14879, 12444, 12834, 12793, 12923, 15225, + 14400, 14903, 13742, 14622, 14893, 13585, 14162, 14063, 15247, 14711, 14844, 12258, 12406, 15305, 14267, 13546, + 14421, 14817, 14910, 15127, 14956, 13677, 13406, 12874, 15244, 12492, 14473, 12906, 13443, 15172, 14467, 13359, + 14653, 15234, 14492, 14976, 15251, 13458, 14668, 14263, 15047, 9996, 8807, 13803, 15269, 14812, 14465, 10338, + 13810, 14352, 13318, 14758, 12139, 15294, 14334, 15332, 11445, 14962, 12190, 13345, 15208, 10431, 14990, 10582, + 14454, 10961, 14265, 11411, 13652, 14561, 14717, 13653, 14414, 14389, 13902, 14676, 12659, 13691, 13024, 11659, + 14118, 12606, 13846, 13877, 14849, 12421, 14844, 11375, 13712, 9539, 14695, 13414, 15205, 14330, 11329, 15268, + 11608, 14673, 14592, 13459, 13619, 14533, 14524, 14751, 14562, 15041, 15052, 13886, 15036, 13753, 15040, 14315, + 13616, 13926, 11838, 14695, 12622, 15199, 14729, 15124, 13905, 12556, 12302, 13702, 14504, 15121, 15147, 15132, + 11057, 15001, 12265, 15056, 13357, 14406, 15328, 11747, 14402, 15263, 14950, 13657, 15152, 14903, 13980, 14895, + 14451, 14209, 14580, 13105, 14989, 10282, 13502, 14611, 9227, 12543, 12396, 14042, 11708, 14465, 11481, 10828, + 15116, 15266, 14614, 14847, 15225, 11416, 13976, 14832, 15066, 10634, 14400, 14411, 15253, 14795, 14474, 11597, + 14523, 14347, 13489, 14907, 14823, 13046, 9837, 15056, 14046, 14050, 14784, 15189, 14434, 13957, 13194, 11468, + 14432, 13786, 14596, 13847, 13824, 12249, 13558, 15309, 15313, 15358, 15082, 14868, 14993, 10613, 14391, 14508, + 13472, 14670, 14707, 10412, 13884, 11672, 14537, 14041, 14661, 10731, 14382, 14062, 14442, 15274, 13480, 12749, + 9619, 14884, 14629, 13352, 15158, 7724, 13326, 14538, 15304, 13029, 14113, 14694, 15034, 14622, 14014, 15249, + 14685, 15346, 15298, 14876, 12868, 14049, 13960, 14202, 10007, 14594, 14383, 11994, 14337, 14495, 14231, 14574, + 14348, 14949, 15054, 15357, 14580, 12445, 14573, 13879, 14770, 14820, 13708, 13473, 13585, 12446, 15215, 14541, + 13968, 15129, 12878, 11435, 11631, 14981, 14863, 14054, 14114, 14718, 14994, 14867, 14436, 14643, 11366, 13678, + 13235, 13210, 11896, 14642, 15344, 8683, 14325, 12385, 14110, 14970, 14240, 15276, 14695, 14338, 13483, 13374, + 14878, 12209, 13243, 14040, 15172, 12773, 11528, 14091, 13443, 14486, 12590, 14348, 14475, 14122, 14628, 14475, + 15187, 15164, 12662, 14781, 15134, 13784, 13990, 10150, 15124, 14538, 14728, 14988, 11828, 15344, 9906, 15172, + 12308, 11868, 14368, 12772, 14297, 13011, 11320, 12383, 10400, 12966, 13185, 15191, 11635, 14630, 13447, 14743, + 14966, 14019, 13794, 13695, 12787, 14628, 13791, 10818, 11432, 13987, 15078, 12614, 14144, 13423, 14077, 14174, + 12306, 13496, 13728, 14765, 14354, 12372, 13711, 12154, 15181, 14556, 13837, 13969, 13474, 14866, 14609, 14646, + 14357, 14577, 15318, 10482, 13337, 11546, 13747, 11615, 13916, 8656, 13604, 12634, 14888, 14507, 14477, 14492, + 15020, 14021, 14826, 15310, 13569, 13872, 14548, 13504, 15208, 14845, 12387, 11341, 13916, 15020, 14917, 14220, + 14462, 15083, 13563, 14569, 13759, 14879, 15307, 12764, 14985, 10516, 15128, 14572, 15024, 13732, 14101, 13481, + 15213, 13284, 12697, 14845, 14988, 15115, 15061, 14119, 14468, 11859, 14050, 14789, 13665, 14450, 13712, 14397, + 14424, 14742, 14995, 14350, 12994, 13854, 14523, 15042, 14584, 13614, 15061, 13357, 15028, 15357, 14913, 15188, + 14066, 14279, 14756, 15177, 14412, 11419, 13805, 9140, 15256, 10492, 13346, 14137, 14845, 12205, 14422, 14823, + 14643, 14985, 14954, 15261, 12339, 15181, 14444, 13562, 14284, 14952, 13485, 14872, 11907, 15305, 15091, 13361, + 15352, 15291, 12388, 15301, 13784, 9182, 14371, 14587, 14924, 13885, 12855, 15091, 14299, 14729, 13426, 12777, + 14699, 14588, 14489, 14458, 11590, 14361, 14041, 15147, 14395, 14936, 14644, 14470, 12124, 14157, 15208, 12925, + 11761, 14590, 14959, 13232, 15042, 14609, 14435, 15042, 11910, 14258, 15014, 14591, 11995, 15077, 14653, 15140, + 12902, 14018, 12162, 9546, 11685, 14575, 14727, 13806, 15189, 14865, 12169, 13505, 14961, 11917, 15353, 14272, + 13759, 15002, 14447, 11874, 15284, 14926, 12594, 13358, 14552, 15088, 15290, 12805, 15001, 14606, 15297, 15112, + 14410, 14522, 14546, 14833, 14188, 13987, 11356, 14836, 14356, 14922, 15104, 15329, 12495, 15113, 15235, 13459, + 14860, 15295, 12686, 14088, 13081, 15046, 14847, 15042, 15139, 14176, 12130, 14562, 14446, 15108, 15100, 15057, + 14371, 13822, 10892, 14943, 15219, 15303, 14460, 8730, 13407, 13806, 14375, 10806, 12393, 15089, 15034, 14551, + 13726, 14828, 14196, 14458, 14496, 15020, 13872, 14835, 12001, 12291, 5224, 15095, 15192, 13827, 14058, 13765, + 10880, 14168, 14393, 14712, 15251, 14548, 13486, 15323, 11294, 13879, 14565, 15254, 13910, 13996, 14629, 12812, + 14116, 14227, 14418, 14843, 11772, 14970, 13569, 14285, 11588, 14688, 13946, 14046, 15193, 14470, 11561, 12528, + 13693, 10778, 12521, 14892, 14749, 14678, 14803, 12652, 14890, 15261, 12669, 14452, 12936, 12680, 11828, 12254, + 12125, 15044, 12234, 14026, 12943, 13331, 12118, 13598, 14796, 14346, 15263, 12318, 11116, 13249, 14058, 14546, + 12642, 12629, 14723, 15325, 12817, 13527, 11144, 11501, 10303, 14425, 14763, 13340, 14157, 12445, 14088, 13949, + 12240, 14080, 14540, 15353, 13836, 12957, 14864, 14543, 12547, 15247, 11091, 14694, 12313, 14385, 14521, 15336, + 12590, 10424, 10200, 12606, 14245, 13587, 13240, 14629, 13355, 13996, 12840, 14600, 15277, 11731, 12965, 13884, + 13363, 14426, 15002, 15096, 14562, 14375, 15346, 12390, 13427, 13040, 14336, 14381, 14858, 14563, 13130, 13684, + 13603, 13668, 14059, 11875, 14453, 14789, 14445, 13499, 12419, 13315, 13775, 14896, 15200, 14355, 9507, 14143, + 13429, 14446, 14843, 12114, 14557, 14052, 15330, 13971, 14655, 14219, 14669, 12726, 9530, 14560, 12534, 14922, + 13454, 12550, 15327, 15127, 14498, 15281, 15358, 15077, 14169, 15221, 12690, 12922, 13980, 14791, 13545, 11650, + 13604, 15125, 14683, 13929, 12527, 14919, 14714, 14130, 13680, 12468, 15111, 15240, 14705, 12248, 15115, 14593, + 15251, 7523, 15344, 14104, 15128, 14411, 9334, 11175, 15247, 14114, 14752, 14382, 14550, 14870, 11999, 11661, + 15005, 13785, 13906, 11559, 12926, 14792, 12342, 14954, 12372, 15329, 14973, 13852, 11919, 15324, 14485, 15325, + 15223, 15237, 14678, 11782, 13574, 15200, 14526, 14710, 10920, 13792, 15308, 14766, 13793, 13495, 15065, 14914, + 13183, 14431, 14264, 15199, 12917, 15155, 14472, 14987, 13278, 12097, 14973, 14709, 14410, 12411, 15169, 14934, + 14776, 13774, 11856, 13471, 14419, 14615, 11475, 11209, 14783, 14430, 13697, 14858, 15120, 14817, 13895, 14366, + 14356, 14713, 14502, 15144, 14634, 14304, 14791, 14511, 14158, 14411, 9654, 14908, 14608, 14066, 13738, 14661, + 14798, 13605, 14107, 14311, 13950, 12235, 14382, 13157, 15106, 13105, 15194, 14157, 14967, 14133, 14373, 14183, + 13824, 13855, 13408, 14877, 15177, 14343, 15218, 15024, 13614, 11802, 14495, 12414, 14696, 11983, 11542, 12244, + 14384, 12474, 15127, 13080, 15321, 14770, 12918, 12297, 14959, 12500, 11447, 14177, 14672, 15099, 12775, 14540, + 14283, 11717, 13171, 14780, 15280, 14855, 12265, 14185, 14542, 14364, 15224, 10281, 15091, 15033, 12108, 15081, + 13397, 9565, 13754, 14267, 13441, 15003, 14502, 12401, 11568, 14955, 15063, 14820, 14188, 14440, 14451, 10311, + 14111, 12791, 13537, 14924, 12819, 12260, 12425, 13714, 13097, 13440, 12879, 14399, 14297, 12826, 14055, 13003, + 13813, 15048, 15087, 14951, 14545, 15103, 14061, 14657, 14010, 14015, 14761, 14812, 11055, 14067, 10060, 14206, + 14775, 13125, 10538, 14267, 14296, 13554, 13708, 13533, 14935, 14791, 10406, 14418, 14107, 10288, 10558, 15215, + 14529, 14925, 13944, 13867, 15355, 9219, 15236, 14208, 15199, 14758, 14618, 14683, 14289, 14167, 12079, 13336, + 12245, 11701, 14432, 13211, 15313, 13896, 12759, 15175, 9915, 12641, 15168, 14389, 14402, 14854, 11735, 14490, + 14381, 15123, 14559, 14662, 13665, 13840, 13535, 14721, 15079, 14575, 14963, 15254, 14729, 14088, 14752, 15086, + 14194, 14624, 14943, 14077, 12921, 14353, 14560, 10307, 15183, 14564, 15197, 14355, 14669, 14425, 13703, 15316, + 14604, 13204, 14833, 15083, 12929, 11465, 12702, 14823, 14376, 14556, 12402, 14791, 14632, 14447, 14466, 14895, + 14257, 12938, 14416, 13967, 13597, 13827, 12415, 15132, 14755, 14538, 14277, 13756, 14137, 9417, 14786, 14685, + 12904, 14869, 14838, 14490, 13757, 13792, 12172, 14206, 14679, 11719, 15016, 14680, 14811, 14191, 14161, 15218, + 14319, 10342, 15185, 15038, 14415, 14403, 15136, 13858, 13253, 13873, 12498, 14942, 15137, 14171, 14181, 14961, + 11235, 14897, 14988, 13513, 13556, 13678, 15043, 14050, 13796, 14946, 13176, 13445, 14101, 13040, 13095, 14430, + 13680, 13719, 15008, 14825, 14064, 12982, 11072, 15183, 14343, 14987, 14687, 14830, 13627, 11943, 14611, 13803, + 15193, 13728, 14711, 12537, 15097, 11908, 14292, 15167, 15083, 15306, 14348, 14374, 14371, 14573, 14759, 15040, + 14072, 9395, 15028, 14623, 13062, 14932, 12487, 13070, 14951, 13833, 14417, 15282, 12928, 15300, 15307, 6530, + 14693, 14786, 12433, 15230, 10134, 14762, 14823, 15104, 14908, 15305, 14673, 13774, 13363, 13131, 13428, 14372, + 13650, 14777, 12366, 15025, 13366, 11466, 14524, 14840, 14721, 15245, 13469, 15139, 14743, 14509, 14008, 13732, + 14311, 11522, 12520, 15218, 14174, 14425, 14326, 15216, 12578, 15227, 13467, 14996, 14833, 14565, 12594, 15176, + 15169, 15070, 14704, 15170, 13824, 11076, 14890, 14627, 14983, 14693, 12817, 14083, 14794, 14375, 14999, 14611, + 15314, 10252, 13584, 13587, 13970, 14676, 13677, 13523, 13610, 14250, 8164, 15049, 13088, 13054, 13219, 11860, + 11796, 14283, 14796, 14427, 13868, 14647, 14959, 13748, 12832, 14978, 15189, 14725, 11698, 12275, 14827, 15119, + 14748, 14902, 15221, 15199, 10250, 13016, 13315, 13817, 14427, 12269, 14673, 13744, 13917, 14069, 13800, 13803, + 14983, 14062, 12491, 11819, 13529, 13670, 14367, 14986, 14444, 14790, 14602, 15257, 14899, 14712, 14367, 12742, + 13134, 14697, 14478, 14801, 14547, 12864, 15292, 14942, 14905, 15087, 12861, 14985, 15198, 14866, 13909, 14348, + 14458, 12126, 15279, 14993, 12443, 11882, 14152, 15214, 11490, 11935, 13054, 14632, 13162, 14472, 15200, 9355, + 9112, 12256, 13831, 14021, 12411, 15321, 14924, 15026, 13410, 14383, 8596, 13868, 13773, 15199, 12404, 12350, + 15234, 14432, 11915, 13288, 13848, 15000, 14379, 10558, 13792, 13511, 15358, 14503, 13864, 11564, 15000, 11536, + 14563, 14700, 13868, 14448, 13735, 14414, 15001, 15186, 14845, 11506, 14754, 14929, 11794, 13234, 14566, 14305, + 10508, 15005, 14733, 14647, 12972, 11255, 14088, 15301, 9731, 14361, 14200, 15303, 15281, 14619, 14405, 11437, + 13915, 14682, 14275, 14256, 14459, 15177, 14852, 14999, 14702, 13975, 13517, 13810, 13954, 12766, 13620, 13660, + 14667, 13511, 10081, 14620, 14844, 14351, 12743, 13372, 13787, 15274, 14280, 15211, 13085, 12469, 11697, 12091, + 15284, 13588, 14527, 13760, 14974, 15279, 15035, 14465, 13339, 12871, 14831, 15081, 15133, 13779, 12415, 12632, + 15020, 11200, 13829, 14979, 13799, 14602, 14481, 14689, 14644, 15272, 14690, 14136, 11395, 11837, 14865, 14689, + 14251, 15215, 14740, 15079, 12161, 14806, 13894, 12066, 14401, 14766, 14146, 14497, 15250, 14089, 14353, 15289, + 14628, 12589, 11885, 15235, 12925, 14993, 13626, 9470, 14579, 15181, 12904, 14678, 14991, 15255, 15147, 14004, + 11023, 14151, 9495, 13617, 11629, 15070, 15039, 14533, 13446, 15344, 14561, 8538, 14711, 14732, 13732, 14455, + 14735, 12675, 13627, 15211, 13349, 14582, 14899, 13391, 14491, 14416, 14715, 13336, 15225, 14058, 9023, 13865, + 15217, 14662, 15063, 14723, 13997, 14173, 15091, 11360, 12371, 12333, 14665, 14451, 14746, 14941, 14814, 14293, + 12795, 11673, 12312, 12763, 15304, 14938, 13726, 14854, 13990, 14314, 15250, 8614, 14682, 12883, 15198, 10247, + 14642, 15081, 14389, 14707, 14863, 13353, 14657, 14463, 13870, 14877, 14714, 13628, 11608, 15261, 13189, 10856, + 14453, 14872, 15092, 14621, 14238, 15298, 14472, 13425, 13324, 15263, 8623, 14405, 13322, 12514, 14486, 15242, + 14357, 15297, 15230, 14578, 14450, 14888, 14598, 14645, 12761, 13102, 15092, 15312, 14430, 12320, 15235, 14728, + 8258, 14171, 15086, 15335, 14893, 14366, 15030, 15165, 15085, 14616, 9070, 11500, 11443, 13628, 4585, 14488, + 14644, 13799, 14540, 12711, 13843, 15187, 14423, 15049, 11485, 12594, 14116, 15091, 14440, 14323, 12476, 13588, + 10737, 12400, 14797, 14817, 15231, 13713, 13427, 14349, 12706, 14806, 11278, 14900, 12293, 10174, 14246, 12848, + 14102, 15176, 9436, 15116, 14769, 14590, 14281, 14258, 13160, 14664, 10172, 14693, 11227, 14678, 14296, 15311, + 14720, 15091, 14435, 15055, 12811, 14643, 14756, 14143, 13881, 13587, 11733, 14984, 14527, 13372, 14892, 12561, + 14482, 13884, 15299, 11248, 12712, 13238, 15241, 15067, 15036, 14522, 14054, 12950, 13732, 14005, 12742, 13505, + 11987, 13178, 14769, 15121, 15121, 15029, 15051, 12636, 13924, 14568, 14827, 14572, 13805, 13030, 15166, 15181, + 12909, 15240, 14544, 14571, 13777, 15230, 14738, 14193, 12364, 14341, 15139, 14620, 14863, 13736, 15186, 14442, + 14506, 13935, 13658, 13540, 14135, 10414, 12410, 14023, 14648, 14387, 14721, 14118, 14812, 14879, 12139, 14484, + 13969, 14216, 14251, 14356, 12644, 14453, 14881, 14994, 12697, 7579, 15028, 14053, 13352, 14941, 12495, 13927, + 14720, 14562, 13932, 13468, 14825, 13952, 14919, 13389, 14941, 14643, 12259, 13110, 15209, 11843, 12369, 13892, + 13189, 14522, 15140, 15054, 6955, 15345, 13866, 15040, 13634, 13993, 14337, 14618, 14563, 11691, 14397, 14574, + 13554, 15285, 14760, 13451, 14018, 10452, 14601, 12131, 14373, 15252, 14781, 12088, 11605, 11699, 15357, 14371, + 13676, 14899, 13861, 13756, 10380, 14988, 12993, 15309, 15164, 13442, 14556, 11852, 14348, 12145, 13740, 13848, + 14858, 15252, 15243, 14399, 14694, 13032, 13946, 14586, 13950, 13573, 11903, 15326, 14127, 14610, 14859, 15062, + 13795, 13170, 15250, 15132, 13980, 15173, 15349, 12022, 15027, 14527, 14402, 10490, 11969, 6246, 13901, 10311, + 13507, 13513, 14805, 10576, 15016, 14919, 13682, 12267, 15098, 12363, 14151, 13745, 14183, 13676, 12302, 10926, + 9438, 14542, 14343, 15358, 9857, 14891, 14815, 14511, 14156, 14926, 13486, 14452, 12575, 14000, 13749, 14820, + 10798, 14804, 13416, 14981, 15079, 15294, 13792, 12833, 14475, 15133, 12420, 14873, 13691, 14442, 14546, 15017, + 13347, 14913, 13794, 13849, 14891, 11244, 13863, 15051, 13028, 15066, 14889, 14800, 14000, 14976, 7456, 15293, + 13510, 12411, 14618, 14541, 13678, 10897, 14893, 13939, 15122, 14869, 14694, 14227, 14278, 12379, 14785, 13870, + 13356, 15260, 12154, 14667, 14616, 11313, 13822, 13213, 13998, 14422, 15042, 13800, 12761, 15293, 14258, 14733, + 14685, 12741, 14038, 13644, 15269, 14861, 13282, 14666, 14735, 13014, 14897, 13534, 13643, 12925, 14841, 14837, + 14963, 10082, 14906, 14589, 11680, 13500, 14957, 14385, 13476, 11944, 15022, 15328, 13530, 12014, 12981, 14814, + 11986, 14692, 15305, 14431, 14000, 12482, 14759, 14262, 13468, 14766, 12993, 14655, 14618, 13305, 13706, 11920, + 13159, 9970, 15160, 13491, 13174, 14803, 14357, 13170, 11658, 13979, 11752, 15311, 14746, 8899, 15113, 14663, + 14253, 13762, 15035, 14210, 14647, 14616, 15112, 14860, 15040, 14544, 12579, 13056, 14029, 14420, 11825, 14778, + 13967, 13009, 14593, 15060, 14264, 14862, 14478, 13670, 13644, 14756, 15261, 14665, 14288, 13558, 13489, 13436, + 13686, 15091, 13531, 13962, 15007, 13117, 13183, 13625, 14746, 12551, 13844, 15178, 14364, 13914, 14472, 14816, + 15214, 14413, 12445, 13819, 13997, 14136, 14494, 15233, 15049, 9435, 14718, 14500, 13344, 14502, 14982, 12094, + 14740, 14471, 13473, 15092, 10391, 14936, 14376, 15150, 15042, 15002, 14433, 14511, 12462, 13380, 14999, 12098, + 14470, 14863, 13747, 14144, 15014, 13006, 15247, 7810, 14510, 14346, 13588, 15111, 14093, 13819, 12390, 14707, + 13805, 11610, 15302, 12946, 15177, 12578, 11745, 11867, 13839, 15019, 15249, 14635, 12366, 13436, 14919, 7247, + 15346, 13377, 13718, 13375, 15207, 14101, 13152, 14702, 14939, 15028, 14493, 14213, 13392, 14432, 15275, 12691, + 11547, 15236, 14906, 14758, 14607, 14358, 10504, 14410, 11973, 13555, 14980, 14388, 9035, 13217, 13010, 10637, + 14606, 14149, 14878, 13249, 14498, 13400, 14457, 13512, 13736, 14716, 14244, 13849, 11735, 12843, 14508, 14323, + 14827, 14677, 12475, 15088, 14969, 13729, 14191, 13745, 13789, 11922, 11970, 13596, 13249, 14028, 15261, 15152, + 15251, 12693, 15205, 12962, 15265, 14749, 12900, 13525, 14737, 14754, 15156, 14407, 14153, 14269, 13281, 14774, + 13959, 14244, 14814, 12649, 15130, 11918, 14669, 12575, 14231, 13489, 14826, 13655, 12372, 11431, 14259, 15200, + 15257, 13912, 11523, 13665, 15001, 14750, 12646, 14699, 12536, 14180, 13522, 12376, 14897, 14572, 13935, 15233, + 14812, 10270, 14984, 13371, 14445, 14965, 14655, 14772, 14724, 14697, 13481, 13959, 15242, 14978, 14498, 15331, + 14682, 14923, 11412, 7584, 15019, 12386, 11513, 14619, 14904, 14440, 13956, 15104, 14923, 14388, 11490, 13507, + 14315, 15168, 14733, 14444, 14129, 14903, 12288, 15239, 14985, 14501, 15324, 13331, 14695, 14797, 15190, 13283, + 13331, 15264, 15162, 15196, 14556, 14718, 15045, 9410, 15206, 11612, 12673, 15051, 10622, 14113, 14028, 14999, + 15232, 13953, 14981, 15188, 12950, 12545, 14216, 11573, 14199, 13164, 15139, 15169, 15241, 12437, 14526, 13261, + 12691, 14890, 13716, 13437, 12456, 12957, 11957, 15084, 14454, 10577, 12667, 11406, 14330, 13810, 14721, 12837, + 13636, 15251, 15045, 13013, 14126, 13717, 10524, 14263, 13458, 11829, 5037, 14991, 14510, 15284, 15244, 10896, + 14547, 14851, 13086, 12775, 13755, 14845, 11898, 14646, 13735, 15070, 13546, 11660, 15048, 12705, 14778, 14996, + 14670, 15031, 15360, 9329, 15106, 14441, 14433, 14951, 14988, 13446, 15187, 13439, 14795, 12692, 15246, 14987, + 15216, 13311, 14740, 14966, 10952, 14152, 15062, 14514, 14427, 15135, 13718, 12833, 10294, 12555, 14320, 13921, + 13071, 11066, 15064, 15186, 14640, 15007, 13620, 14721, 14457, 14646, 14828, 15030, 12496, 15008, 12743, 14727, + 14763, 13165, 15051, 11819, 14187, 13455, 14521, 15145, 13347, 13047, 13710, 13440, 11247, 6536, 14690, 14594, + 15024, 12646, 15288, 12862, 14646, 14414, 12616, 12733, 15050, 15282, 9182, 14796, 14091, 14589, 15066, 13134, + 7373, 9541, 10544, 14476, 13666, 15100, 14442, 13500, 14010, 14511, 13099, 13895, 15220, 14552, 14014, 14772, + 14369, 12924, 14334, 13345, 12638, 14353, 9803, 13634, 13094, 11646, 15172, 14701, 6894, 11650, 13152, 15055, + 14696, 12585, 14312, 12396, 12352, 15355, 14249, 15210, 15294, 12298, 12423, 9397, 13565, 15180, 12313, 14460, + 15225, 14402, 13424, 13864, 14430, 9666, 14947, 12937, 13553, 11419, 15130, 13498, 12895, 11594, 13440, 14945, + 12921, 11823, 14967, 12820, 14397, 13362, 14894, 12326, 15314, 14473, 14709, 15302, 14672, 13499, 12535, 13612, + 12454, 15032, 14790, 15245, 14763, 13858, 14015, 15020, 12905, 13625, 14863, 14526, 12307, 15358, 14574, 14811, + 15264, 13361, 14687, 15206, 11792, 13353, 15109, 14592, 13608, 11991, 14778, 14519, 14687, 10735, 12592, 15129, + 14221, 13525, 13967, 14444, 6415, 13885, 15119, 12683, 15252, 13395, 14821, 14881, 12316, 13708, 15215, 14876, + 13402, 13457, 14060, 13392, 15207, 15030, 14607, 13826, 13630, 13218, 14047, 14024, 14430, 11878, 14734, 14383, + 15047, 14988, 14702, 14740, 13972, 14740, 14943, 14483, 8978, 13601, 12226, 14815, 15126, 11037, 14593, 13444, + 11406, 13908, 14549, 13229, 12130, 13888, 9535, 11493, 12853, 14372, 14443, 15340, 14876, 14813, 13595, 11475, + 14472, 12857, 13588, 10980, 14634, 14576, 14991, 15349, 11651, 14337, 14665, 14737, 15156, 11807, 11741, 14437, + 12903, 13801, 13266, 14785, 15056, 14400, 15092, 13890, 14833, 14835, 14003, 14685, 15352, 14604, 13575, 12462, + 14355, 13602, 8538, 14945, 14050, 12953, 14219, 14863, 13603, 12865, 13567, 11887, 13396, 15319, 15194, 14852, + 14536, 14936, 9612, 13746, 13993, 13991, 14446, 14947, 15110, 14523, 12920, 14231, 14608, 14368, 10782, 14975, + 14757, 14038, 14818, 15347, 8254, 14706, 12817, 14925, 12729, 14135, 13831, 12480, 14527, 14848, 13379, 14908, + 14043, 14488, 14332, 10550, 14709, 11877, 12829, 9280, 6776, 15268, 14677, 14353, 11286, 12534, 14090, 15226, + 15323, 12786, 13390, 15245, 14419, 13627, 14941, 15226, 13854, 14610, 8666, 12523, 15320, 15162, 15173, 15180, + 13845, 14474, 13978, 14526, 14561, 14843, 15167, 14835, 13874, 12713, 14941, 10472, 15249, 15202, 14787, 12812, + 14359, 12846, 13505, 13665, 12384, 15228, 13337, 13287, 13552, 13500, 14712, 13758, 14950, 15087, 13554, 14864, + 14965, 14978, 14776, 12600, 14634, 14685, 11818, 13502, 15173, 14537, 14730, 9306, 13559, 14590, 14721, 12916, + 9251, 15298, 14773, 14863, 9089, 14620, 13917, 14492, 15215, 12240, 13856, 12531, 14546, 13573, 15185, 13672, + 12178, 14952, 13774, 12134, 14345, 14834, 14954, 13422, 15215, 13909, 13424, 14504, 13729, 13921, 11745, 14678, + 13498, 14345, 13637, 13495, 14795, 7365, 15260, 15191, 14937, 15220, 15280, 13366, 11495, 12171, 13973, 14492, + 13213, 13367, 13024, 13504, 10058, 14584, 15000, 14338, 12321, 13543, 13784, 12567, 9797, 14442, 14461, 15212, + 11220, 12530, 14336, 13015, 13379, 14609, 13786, 13386, 14611, 11689, 15041, 14419, 15094, 14421, 13538, 15338, + 14415, 12741, 14932, 12936, 14801, 15291, 15111, 14808, 11586, 15132, 14658, 14557, 13864, 10442, 11873, 10694, + 14470, 12337, 14977, 13320, 13493, 15224, 14591, 13431, 14898, 12564, 12600, 13655, 15344, 13446, 10367, 13723, + 13837, 14875, 11211, 14537, 12863, 15017, 14408, 10342, 14846, 15080, 13590, 12453, 15124, 15270, 15101, 13224, + 13422, 14348, 11344, 14982, 13922, 11722, 15268, 12400, 12539, 8847, 14357, 15044, 13324, 15344, 10506, 15252, + 15164, 14177, 14996, 14392, 13331, 15031, 14373, 13033, 15285, 13527, 13896, 14150, 11288, 14562, 12476, 9333, + 14050, 14572, 15300, 13479, 10928, 15190, 14116, 15330, 15168, 14484, 14404, 13847, 13693, 13301, 13079, 14670, + 14683, 12812, 10642, 14010, 13727, 14993, 15233, 13686, 15005, 14707, 14701, 13251, 13048, 13247, 14925, 14738, + 14092, 14262, 14050, 15057, 14281, 13714, 14186, 14923, 13624, 14398, 13876, 12544, 14557, 9926, 14162, 12797, + 14454, 12728, 15338, 15127, 10281, 15276, 10542, 14091, 14789, 14389, 15153, 14655, 14658, 14373, 15074, 10670, + 13986, 13812, 14841, 14691, 14725, 14503, 15067, 15174, 14356, 15062, 14406, 14831, 15064, 14397, 13930, 12408, + 13229, 14450, 11582, 12878, 14547, 14293, 13676, 13588, 15178, 15249, 13768, 13431, 14260, 14550, 12639, 13755, + 14463, 13971, 14497, 12868, 15066, 12363, 14212, 14613, 15129, 11521, 14446, 14070, 11329, 15132, 11906, 14590, + 13733, 10306, 11695, 14412, 12723, 13628, 15201, 14266, 14262, 10804, 11677, 14592, 14370, 14732, 14458, 9051, + 14444, 11351, 15091, 13942, 14282, 14754, 12674, 12365, 15120, 15151, 14697, 14886, 12577, 15093, 13473, 14989, + 15231, 12454, 14252, 12386, 13479, 13479, 15243, 9455, 15217, 15217, 14664, 9015, 14953, 14145, 15190, 14908, + 13498, 15215, 11125, 15069, 14042, 14501, 13671, 14907, 14370, 14509, 14833, 13526, 13823, 14714, 10898, 14392, + 15063, 14244, 15025, 12832, 13839, 14054, 13714, 13553, 14841, 12508, 13486, 12294, 11981, 13344, 12688, 14847, + 14644, 10947, 14769, 14926, 14850, 14657, 9510, 15258, 13654, 15123, 14757, 12764, 13660, 13431, 13829, 14739, + 14696, 14316, 13447, 12819, 13811, 14663, 12700, 8846, 14562, 13627, 12566, 12509, 14256, 12830, 11695, 14486, + 13367, 14777, 15075, 12582, 14174, 14117, 6974, 15284, 12294, 14134, 13907, 14823, 14356, 15149, 14845, 15282, + 11821, 13797, 13831, 15096, 11385, 14150, 13476, 14995, 14408, 14837, 13164, 13560, 15120, 13624, 14786, 14691, + 12070, 14904, 14564, 14619, 15138, 9982, 12457, 13807, 13605, 13532, 12458, 15185, 14069, 8538, 12716, 12537, + 14399, 14888, 12272, 12245, 13273, 14936, 15353, 13614, 12825, 14104, 13912, 12470, 14435, 14806, 15093, 14566, + 14343, 13746, 14750, 14639, 14374, 14161, 14783, 7229, 8053, 13045, 15248, 14923, 14501, 15341, 15306, 14955, + 14963, 12735, 13954, 14949, 14659, 13900, 13556, 14201, 14449, 15132, 15050, 14130, 11604, 14780, 14882, 13829, + 10299, 14997, 13573, 14930, 15203, 14707, 14470, 14176, 15145, 15137, 14380, 15207, 14344, 11908, 13906, 13689, + 15057, 13896, 13752, 13533, 15295, 13424, 13572, 13988, 15234, 14552, 11397, 10000, 14149, 15052, 14933, 15167, + 11404, 14283, 11518, 10374, 14369, 15207, 13931, 12305, 13420, 13339, 15158, 13564, 14613, 14935, 14462, 15180, + 13073, 13751, 14788, 14754, 12337, 14931, 15333, 13949, 13904, 10053, 11383, 14950, 14702, 15334, 14841, 15231, + 15317, 15123, 10558, 12813, 13652, 10487, 14628, 12879, 14142, 11785, 13986, 14716, 14776, 13341, 15057, 14409, + 15311, 15070, 14258, 12985, 14123, 15165, 8762, 14413, 14575, 14811, 14451, 15335, 10071, 14362, 14422, 12300, + 13534, 12758, 13115, 13599, 15231, 12645, 13362, 14987, 13860, 12636, 14027, 14958, 15290, 15262, 14871, 14343, + 15070, 14135, 13289, 14799, 14364, 13318, 14551, 14334, 14164, 15303, 15355, 13432, 14589, 14095, 12680, 15288, + 12746, 13257, 15309, 15019, 13537, 14849, 13816, 9390, 14992, 11165, 14853, 11120, 15198, 14699, 10638, 14482, + 12592, 12446, 13873, 10198, 13993, 12169, 13688, 10250, 15194, 15318, 14787, 12495, 14614, 14876, 10352, 14573, + 14514, 12691, 15226, 14750, 12163, 15116, 13410, 13366, 14748, 15285, 15007, 10318, 14841, 15304, 12540, 14632, + 15210, 14032, 14574, 14965, 12946, 12404, 13559, 14463, 15052, 13361, 14476, 14064, 14931, 15186, 13085, 14773, + 15248, 12472, 12734, 15257, 14960, 15072, 12646, 14890, 14870, 14672, 15253, 15277, 14743, 14011, 14954, 13138, + 13804, 13753, 15317, 15271, 13790, 12592, 15332, 11425, 15086, 11552, 15318, 15119, 14474, 13797, 14387, 13211, + 13081, 14829, 14086, 10247, 14745, 15083, 15264, 14407, 15240, 15192, 13795, 13759, 9862, 14725, 11447, 14511, + 15326, 15065, 15312, 12271, 14630, 13321, 11779, 15271, 14921, 15279, 13553, 13977, 14271, 13678, 15034, 14027, + 12327, 14717, 13611, 15016, 15170, 15180, 14988, 13599, 9837, 13464, 13840, 14344, 14768, 15237, 13941, 9843, + 15001, 14689, 14434, 14791, 14833, 14338, 14694, 15327, 15245, 14132, 14852, 14796, 15187, 13826, 11881, 13465, + 14927, 13875, 13642, 15132, 14249, 13032, 13449, 12969, 13083, 14128, 12419, 12197, 11589, 13403, 13130, 14234, + 12872, 14860, 13842, 14835, 14528, 14633, 14473, 14801, 14642, 14904, 14925, 14838, 11272, 14627, 15092, 12713, + 13520, 14725, 14976, 14559, 14657, 14669, 13769, 15296, 15281, 11241, 14912, 14357, 14874, 15275, 13559, 15333, + 12455, 13920, 13976, 12239, 14835, 14442, 14656, 14517, 12459, 13207, 12913, 13168, 12971, 14058, 14440, 13595, + 14579, 13296, 15011, 13415, 14562, 13669, 12725, 14845, 13524, 13320, 15230, 15049, 14974, 13027, 13402, 15034, + 12041, 12493, 14902, 13997, 14602, 14071, 14354, 15111, 12754, 13865, 14510, 13498, 14505, 14668, 14092, 14582, + 14667, 10166, 13082, 14501, 13961, 15020, 13490, 13556, 14571, 14134, 13807, 14819, 13605, 14538, 14549, 12542, + 14878, 14728, 14204, 14075, 14465, 14039, 14543, 13728, 11713, 15340, 15050, 13626, 15239, 14898, 13674, 14959, + 13996, 14561, 11073, 14376, 10670, 13760, 13738, 14923, 12617, 14972, 14823, 12638, 14485, 13996, 14510, 15076, + 14871, 15326, 14346, 14776, 13459, 14644, 14608, 9160, 14462, 14299, 14936, 15098, 15249, 12594, 14989, 11333, + 13930, 14893, 14644, 11010, 13423, 12762, 14508, 13564, 10919, 9744, 14533, 15282, 12966, 14442, 14248, 11966, + 14544, 15330, 14135, 14466, 11191, 13398, 15145, 7513, 13122, 12627, 15202, 15013, 13466, 13953, 14949, 12545, + 14079, 13640, 11926, 15159, 15316, 11273, 14634, 8056, 14571, 15271, 13653, 12713, 14914, 12477, 8216, 13849, + 13352, 8594, 13479, 14845, 12993, 13927, 12971, 9372, 14026, 14486, 12059, 14592, 13147, 13569, 13091, 13705, + 14840, 14862, 12294, 13342, 13652, 14387, 14722, 13857, 15185, 13794, 13995, 11921, 12783, 14638, 14244, 14493, + 14595, 13628, 14200, 14844, 12203, 12460, 12061, 15309, 14756, 15250, 10910, 12927, 12344, 13749, 14095, 15237, + 14949, 15189, 9116, 14520, 11425, 14311, 14643, 14820, 13725, 14817, 15142, 14322, 14235, 14732, 15230, 13320, + 14232, 12220, 13524, 13508, 14887, 14122, 14452, 13828, 15106, 14550, 14751, 13546, 14943, 13391, 8335, 14874, + 14825, 15051, 12854, 14791, 14962, 12729, 14689, 12664, 14851, 15113, 14721, 13104, 15196, 15340, 14859, 13333, + 13664, 14774, 15166, 13315, 13716, 12465, 15050, 14839, 15337, 15033, 15219, 13263, 14863, 14367, 13403, 14568, + 13419, 14903, 14183, 11807, 14567, 13882, 15329, 14858, 13789, 14910, 13785, 14989, 14484, 14189, 13657, 13414, + 14565, 15303, 11476, 15251, 13769, 14936, 15188, 14607, 14008, 13524, 12408, 13368, 12848, 15076, 12941, 14498, + 13006, 12685, 15211, 12599, 14508, 12453, 14217, 14850, 14043, 15021, 13040, 13784, 12799, 14989, 14373, 14868, + 15289, 14599, 14880, 14788, 15304, 14668, 15088, 11286, 14188, 14064, 14858, 14844, 14960, 9604, 15340, 10247, + 11775, 15282, 15194, 14235, 14722, 12805, 14211, 15135, 13926, 14947, 14748, 14595, 13419, 15232, 13945, 15260, + 12754, 14078, 9682, 15002, 14867, 14583, 14538, 14136, 11795, 14982, 15239, 14766, 14516, 15154, 12441, 15281, + 14224, 14954, 13554, 14102, 14185, 14373, 14697, 15206, 15006, 12928, 15049, 14826, 14908, 14392, 9639, 8660, + 13952, 12544, 15249, 12828, 13726, 14882, 13538, 13990, 11388, 13929, 6739, 14728, 11957, 14734, 12376, 14052, + 12395, 11737, 15118, 14157, 13746, 14853, 15077, 13700, 11773, 14899, 13400, 9448, 12381, 13451, 14464, 14965, + 15063, 14257, 15080, 11966, 7366, 15325, 14975, 15338, 14349, 14678, 14182, 15122, 14543, 15177, 14752, 15312, + 14013, 12934, 14690, 13964, 14605, 14248, 10868, 15112, 14816, 14364, 13234, 11503, 14690, 14845, 14677, 14388, + 15183, 12586, 11864, 14253, 14590, 13955, 14639, 14417, 15318, 14867, 14348, 13571, 14696, 14561, 15039, 10297, + 15111, 14276, 14113, 15354, 14988, 13391, 15273, 15326, 12575, 12090, 11843, 15352, 14817, 15236, 12395, 15351, + 12605, 15161, 11721, 12692, 13591, 15281, 12443, 14426, 11772, 13851, 14355, 10726, 15323, 15254, 14334, 15310, + 14306, 15233, 13633, 14688, 14781, 13169, 14054, 14846, 13967, 14769, 12972, 14973, 14842, 13048, 15312, 14913, + 15037, 15120, 15230, 11844, 14965, 15326, 14402, 14262, 13732, 14415, 13058, 12454, 13852, 14558, 14987, 15102, + 14037, 14891, 9883, 14691, 15137, 14931, 14776, 14001, 14350, 13612, 15226, 14038, 14591, 12744, 14674, 12673, + 15330, 14513, 10597, 15330, 13147, 10887, 15214, 14576, 10706, 13532, 14960, 11714, 14312, 14270, 14114, 12681, + 14383, 14417, 12336, 12532, 14516, 13116, 15258, 14363, 14925, 14088, 14761, 14939, 13405, 13720, 14497, 10872, + 11921, 12967, 14114, 13572, 13698, 15350, 13717, 14353, 14214, 14287, 13622, 13630, 14398, 15070, 13897, 10540, + 14136, 13485, 13483, 12448, 14762, 15304, 13768, 12014, 14767, 13125, 13896, 14527, 14653, 14506, 13714, 11589, + 15342, 13927, 13267, 13905, 11456, 13774, 14888, 14062, 14003, 11343, 15297, 15263, 14618, 14543, 10524, 13262, + 12175, 15096, 12410, 14789, 15196, 14163, 14095, 14314, 13477, 7411, 15059, 13797, 11515, 13774, 15086, 10699, + 14520, 14991, 14369, 11591, 9291, 11718, 12680, 14114, 14214, 14446, 11315, 12692, 11227, 14702, 15139, 14815, + 14400, 13568, 12727, 15148, 15203, 15178, 12410, 13459, 14026, 15140, 14849, 15052, 10375, 15151, 14597, 15117, + 14783, 14435, 13534, 13704, 13688, 14753, 15105, 13397, 14613, 11410, 13635, 5758, 14934, 14781, 13534, 14560, + 13889, 13880, 14847, 15039, 14764, 14970, 14338, 14031, 15079, 13494, 15340, 15244, 15049, 13811, 14904, 11817, + 14199, 13847, 14599, 14996, 14665, 14686, 14503, 10288, 14204, 14739, 14683, 12994, 10492, 14625, 11604, 15259, + 14375, 14577, 14183, 14842, 14686, 14789, 14523, 13525, 13880, 12609, 14836, 15219, 11991, 14978, 15105, 14023, + 15058, 13521, 15226, 15204, 14064, 14376, 13586, 11531, 14661, 13652, 10519, 14755, 11836, 14620, 15218, 13958, + 12989, 12829, 13557, 10392, 14191, 13916, 14352, 15299, 13625, 13561, 13223, 14422, 12322, 12026, 14963, 13352, + 12511, 13112, 14479, 15130, 15242, 14463, 12301, 12675, 11290, 14718, 13564, 13279, 12493, 15069, 14611, 9861, + 14057, 14902, 14851, 14617, 13487, 12662, 15065, 14468, 15122, 13677, 14832, 14362, 15112, 14008, 13368, 14359, + 14557, 12605, 14231, 15325, 15100, 14238, 13872, 14602, 15130, 15301, 12158, 15011, 15231, 13467, 14157, 14855, + 11924, 13868, 15133, 13951, 14682, 13823, 14580, 14956, 10694, 14554, 14486, 13070, 11968, 12866, 13687, 14232, + 11072, 13617, 12997, 14892, 14327, 13950, 14609, 15243, 15308, 14690, 15193, 13800, 14202, 15157, 14795, 14988, + 15289, 12914, 12083, 13789, 11450, 13846, 14621, 14738, 14174, 14061, 14761, 9959, 14888, 14619, 13744, 14597, + 13426, 12568, 14503, 15167, 13491, 12789, 14501, 14993, 15135, 15238, 12899, 12935, 14107, 12301, 11993, 13853, + 14876, 14494, 14424, 14347, 14525, 13951, 14404, 14258, 14974, 12348, 15102, 14903, 14703, 14377, 14870, 15322, + 12739, 13497, 14593, 12426, 14269, 15166, 14728, 10574, 12572, 14660, 14567, 14929, 13330, 9836, 12072, 14527, + 13022, 11167, 11077, 15013, 15036, 14382, 15079, 12369, 13610, 10483, 10608, 14960, 14812, 15278, 14605, 15252, + 14054, 13667, 14513, 12845, 15046, 12638, 11349, 14934, 15256, 14431, 15211, 13640, 15234, 14641, 15326, 14609, + 14273, 13504, 12543, 15343, 14368, 14945, 14430, 13519, 14556, 14786, 12600, 14368, 15204, 15270, 11126, 14766, + 11847, 14593, 14189, 12628, 14843, 15137, 15178, 15123, 12934, 14364, 14734, 14266, 14650, 13647, 13476, 14888, + 14824, 12081, 15037, 14215, 15245, 14339, 14687, 14710, 8754, 13893, 13338, 15155, 9929, 10658, 14357, 12815, + 13023, 10557, 12851, 14374, 14093, 15167, 15270, 12773, 13335, 14718, 11237, 14729, 13329, 13444, 13902, 14809, + 12282, 13905, 11499, 13500, 14398, 14899, 13873, 13994, 15191, 14419, 13336, 14505, 14796, 8447, 13812, 11870, + 15336, 13118, 14737, 15224, 13678, 13421, 12417, 14877, 14856, 13692, 13457, 14992, 14716, 13877, 9291, 14908, + 13741, 13339, 13449, 15017, 11706, 14049, 14224, 14877, 14729, 15143, 14225, 12749, 14723, 12657, 14301, 15343, + 12743, 14778, 14409, 14744, 14902, 13100, 14159, 13389, 13069, 12557, 15046, 12236, 14049, 14047, 15236, 13692, + 12511, 13487, 14919, 15193, 14851, 11878, 14708, 15300, 13993, 12675, 13366, 15037, 14379, 14856, 13703, 15181, + 14088, 14119, 14791, 13020, 15072, 14936, 11780, 14997, 13881, 14034, 15101, 12699, 14706, 15040, 15233, 14568, + 15108, 12038, 15088, 15253, 13273, 14528, 13996, 14825, 5976, 12735, 14899, 14832, 14438, 14691, 14990, 15142, + 14932, 14647, 13674, 13243, 14560, 15074, 14735, 12577, 14009, 14516, 13668, 14752, 12693, 14463, 13849, 15286, + 14428, 14182, 15001, 12621, 15018, 14438, 13721, 14458, 12403, 14580, 14681, 14559, 11668, 14942, 14436, 12180, + 15213, 15358, 15153, 12486, 14546, 13011, 14087, 14864, 15132, 14676, 14564, 12745, 14669, 13184, 15269, 15052, + 12923, 13713, 13857, 14433, 12955, 13287, 14877, 14892, 14661, 15051, 14544, 15112, 10729, 9498, 15089, 6388, + 15257, 14300, 14726, 12454, 14476, 15215, 15094, 14684, 13820, 14335, 13704, 14915, 14479, 14382, 15145, 14552, + 14640, 15122, 14365, 14320, 11483, 15292, 14472, 15169, 12787, 14430, 13898, 13424, 14963, 11054, 15230, 9038, + 14688, 14706, 13740, 14463, 14793, 12599, 10548, 14313, 13019, 14932, 15321, 14826, 13839, 15313, 14907, 15246, + 15334, 14371, 15119, 13413, 11900, 13417, 15279, 14636, 14428, 15254, 13480, 14412, 14290, 15125, 14242, 13107, + 13101, 11794, 15200, 15117, 14457, 15314, 15154, 13505, 14558, 15067, 15092, 14946, 13960, 15155, 14424, 12587, + 13077, 11771, 13783, 11577, 11894, 13280, 15289, 9972, 13410, 14111, 15204, 15196, 14845, 14697, 14846, 14971, + 13382, 15032, 12104, 12175, 12431, 14944, 13079, 14441, 14253, 14939, 15056, 14946, 11638, 15185, 14453, 14231, + 13066, 14592, 14939, 14042, 9099, 13509, 15194, 15047, 13638, 12861, 14543, 12118, 14981, 12475, 14528, 13596, + 13941, 12826, 14186, 13528, 14204, 12306, 14517, 14942, 13975, 13763, 12536, 11592, 15084, 11293, 14991, 11453, + 14368, 13579, 13739, 14647, 14452, 14270, 14878, 14607, 15321, 14622, 14882, 15139, 13879, 14719, 14947, 14221, + 14467, 14432, 14768, 14846, 13547, 14952, 14257, 13548, 15136, 14922, 14258, 13988, 14170, 12134, 12309, 14794, + 13191, 13817, 14003, 14336, 14408, 14651, 12221, 14649, 14026, 12874, 14764, 13582, 15328, 14983, 14361, 15357, + 14611, 13394, 13592, 14807, 14897, 13455, 14661, 12330, 13763, 15192, 15122, 13599, 14048, 14338, 11801, 14113, + 15134, 15293, 14835, 14701, 12563, 14493, 14047, 14509, 15104, 14544, 13858, 14900, 13770, 13511, 14790, 12921, + 15008, 14386, 15290, 10322, 15094, 14495, 14378, 14020, 14858, 13785, 12133, 12208, 12884, 14817, 15060, 14782, + 13057, 11037, 11545, 13586, 8880, 14112, 12143, 14464, 15169, 15238, 14673, 15025, 13597, 15295, 13716, 14124, + 15258, 15021, 14102, 13440, 14985, 14105, 13936, 13840, 15088, 13399, 14402, 10490, 13196, 8942, 12330, 14065, + 13503, 14600, 15263, 14813, 12437, 14559, 13589, 14403, 15301, 13318, 14526, 15251, 15032, 13567, 8620, 15191, + 13554, 14628, 14516, 12778, 13682, 15335, 15302, 13251, 12875, 14566, 14983, 9683, 14615, 14853, 13339, 14179, + 14885, 14951, 14346, 11563, 14604, 14687, 14766, 13801, 14387, 15063, 14221, 14587, 14503, 14237, 15118, 9359, + 12345, 15234, 15019, 14452, 14732, 10734, 15187, 15061, 13677, 14848, 13364, 7745, 13517, 15205, 11324, 14555, + 14822, 14603, 13206, 15312, 14766, 15321, 14302, 14503, 13929, 13813, 14816, 14718, 12665, 13696, 15350, 13699, + 11598, 15197, 11464, 14379, 12474, 15003, 15287, 15302, 15302, 14356, 15181, 14571, 14959, 7885, 13928, 15118, + 13217, 12759, 15190, 15019, 12480, 13317, 10940, 14166, 15019, 14312, 6630, 15064, 15317, 14588, 15016, 13279, + 14377, 13396, 13638, 14157, 15047, 13732, 15034, 15031, 15200, 13505, 13054, 9905, 12881, 11007, 14831, 13794, + 15009, 14841, 15321, 12449, 12512, 15283, 11285, 14678, 14715, 9607, 14633, 15124, 15172, 13516, 13942, 12949, + 11990, 15178, 14958, 13399, 14996, 14748, 12881, 14765, 13490, 14310, 14636, 14530, 15322, 13964, 15161, 14707, + 13430, 15154, 14869, 13314, 14952, 14425, 6327, 14455, 12576, 14588, 13287, 15349, 14877, 15352, 14621, 14427, + 11219, 13254, 12857, 14148, 11485, 13363, 13008, 14469, 14857, 14343, 13504, 12956, 9455, 15291, 12692, 14466, + 15026, 14212, 14264, 15338, 12109, 14883, 15043, 13878, 14157, 14844, 14549, 11506, 14404, 14050, 15257, 14496, + 15289, 15246, 14891, 12987, 15251, 14795, 14871, 14350, 14488, 15155, 14164, 12674, 11648, 14386, 14560, 13252, + 11409, 13823, 12933, 14819, 14525, 11526, 14084, 10683, 12571, 14420, 13743, 9563, 14776, 14431, 12328, 14673, + 14912, 15147, 13866, 14683, 15119, 15297, 13990, 14333, 13922, 13579, 14432, 14481, 14705, 14885, 14996, 14381, + 12315, 12527, 14887, 13425, 15114, 14454, 14387, 12648, 13363, 15321, 13836, 14544, 14435, 14598, 14259, 13950, + 15192, 12862, 15271, 13224, 12249, 14383, 11543, 15324, 13380, 11513, 9217, 12328, 14006, 14735, 14865, 13053, + 15243, 14823, 14297, 14915, 15165, 14702, 9366, 12411, 10176, 14309, 15330, 10784, 13792, 11248, 13398, 14760, + 14798, 14994, 13840, 14157, 14617, 15256, 12854, 15010, 13599, 13158, 14837, 14101, 14889, 14128, 13432, 14787, + 14868, 13632, 10812, 14908, 12166, 14053, 13633, 14672, 14503, 14625, 13158, 15180, 14885, 15203, 11298, 14251, + 14654, 13301, 14757, 11126, 14224, 13390, 14511, 15193, 9898, 14138, 14960, 14638, 13922, 14565, 14086, 14188, + 15224, 13809, 14777, 14636, 15219, 14003, 15056, 15217, 15209, 14856, 14702, 13423, 9712, 15180, 14684, 10530, + 14577, 14188, 14337, 14935, 12754, 13796, 8452, 13086, 13067, 14636, 12744, 14285, 11010, 14732, 14437, 14711, + 13912, 14995, 12915, 13382, 15227, 15115, 15243, 13776, 15210, 14303, 14373, 11203, 12987, 14562, 13697, 12014, + 15106, 14818, 10686, 15042, 15095, 11987, 15202, 14031, 13689, 13432, 11963, 8939, 14570, 14614, 13757, 15032, + 11397, 15289, 14331, 14989, 14577, 14303, 14949, 14610, 15165, 11946, 10360, 14780, 13499, 15049, 14630, 12415, + 14426, 15329, 15216, 13582, 13819, 13493, 13918, 15349, 14180, 14673, 15055, 10834, 14177, 15213, 13542, 10651, + 14641, 14971, 13869, 9558, 14456, 3923, 14793, 14992, 14062, 14109, 14409, 14010, 15029, 14976, 13887, 12561, + 14305, 13974, 13641, 14058, 14910, 11559, 14458, 9806, 15283, 14571, 13846, 14888, 15135, 14611, 14799, 15201, + 13461, 14699, 14600, 13551, 14307, 13877, 14478, 15312, 13374, 14180, 14710, 14339, 13210, 13457, 14506, 14605, + 12863, 14559, 14883, 14734, 15259, 14185, 14703, 14262, 13608, 13825, 11476, 14493, 12793, 14104, 11889, 14546, + 13845, 13355, 14028, 13540, 15236, 14089, 15260, 15018, 15225, 12357, 12428, 14403, 14549, 14823, 13690, 14619, + 10875, 13276, 12100, 13927, 13136, 13474, 15240, 14797, 14527, 14692, 14711, 14656, 13066, 10146, 14738, 14392, + 13022, 14830, 13754, 13540, 14060, 14107, 12909, 10469, 11385, 11339, 15152, 15301, 12082, 14049, 13474, 14806, + }; + uint16_t ret[48 * 3 * 16 * 16] = { + 15328, 11885, 14850, 13491, 15282, 14562, 14814, 13442, 12873, 12411, 13541, 15219, 15145, 8543, 13470, 14598, + 14938, 14970, 14865, 12159, 14705, 12982, 14009, 14750, 13580, 14466, 11726, 14410, 13106, 13810, 15171, 14411, + 14411, 15232, 14482, 13788, 13191, 13792, 13705, 14444, 9862, 14574, 12804, 13856, 14761, 15342, 15242, 12447, + 14846, 14781, 14900, 15080, 14968, 14692, 15127, 14695, 15213, 10661, 15119, 14774, 12677, 14360, 14587, 13728, + 14417, 14791, 13625, 15134, 11958, 14734, 15209, 15306, 15341, 15271, 14841, 14778, 14606, 14441, 14982, 12896, + 14366, 15133, 14098, 13697, 13858, 14349, 15168, 13034, 10841, 14164, 14801, 15199, 13955, 14838, 11355, 12638, + 15228, 14452, 14734, 14727, 11609, 14302, 12017, 13700, 12920, 14671, 11125, 14345, 15186, 15048, 9528, 15239, + 14525, 15038, 14934, 14472, 13349, 12320, 13101, 15232, 14559, 13626, 11735, 14543, 13751, 14878, 14943, 14363, + 14482, 13101, 14341, 14824, 15340, 12450, 14380, 14466, 12756, 10301, 14885, 14534, 14327, 14443, 13017, 15290, + 14753, 14875, 15189, 15130, 15087, 8949, 12932, 13310, 14871, 14137, 13244, 14837, 14709, 14857, 15320, 13484, + 14731, 15313, 14424, 14597, 14803, 14892, 13612, 14385, 13900, 14506, 14658, 14822, 14391, 14373, 14394, 15053, + 14550, 14975, 13615, 15183, 13234, 10778, 15249, 14230, 14460, 13630, 14094, 15079, 12767, 15020, 14860, 14598, + 14579, 13626, 11477, 12396, 15018, 14669, 9116, 15200, 13240, 12567, 14689, 15182, 12855, 13499, 13822, 12372, + 13601, 10074, 15097, 14448, 15163, 14362, 13829, 15000, 14960, 14687, 13258, 15139, 14728, 14365, 14059, 15069, + 10362, 15032, 14309, 15005, 14602, 15063, 13876, 14282, 14595, 15000, 14596, 13982, 13040, 15247, 15121, 14609, + 13517, 15222, 14973, 15349, 14361, 13948, 12273, 14640, 15187, 14418, 15220, 15066, 14384, 13228, 10061, 13676, + 13611, 14058, 14727, 14246, 15337, 14514, 13846, 15223, 14428, 14932, 14018, 13378, 14697, 14303, 15138, 14831, + 14482, 13781, 15073, 15341, 14988, 15185, 14572, 14631, 7997, 15017, 13178, 13413, 14723, 14944, 13438, 14574, + 10424, 14740, 15086, 14723, 14981, 13562, 13483, 14385, 13668, 15014, 14640, 13307, 14461, 13975, 13667, 14926, + 10756, 13671, 15093, 14160, 15330, 15007, 14052, 13458, 14876, 10361, 15075, 14735, 12625, 14275, 15235, 15064, + 15359, 15291, 15054, 8704, 13491, 14576, 14382, 13633, 14998, 13827, 15131, 12957, 12529, 15152, 14445, 13982, + 14498, 14700, 12005, 14976, 14169, 14345, 11676, 13676, 15095, 14932, 14858, 14419, 15177, 15087, 13857, 5890, + 14957, 13713, 14478, 14845, 14520, 14560, 14495, 12517, 15282, 13751, 14447, 14527, 15233, 13356, 13502, 10378, + 14750, 12328, 11403, 12048, 14905, 15198, 5539, 14540, 13729, 13881, 14158, 15281, 14722, 15109, 14086, 14434, + 10583, 14638, 14808, 14561, 13153, 15118, 14569, 13466, 14169, 11826, 15281, 13498, 8474, 14473, 14769, 14484, + 15023, 10196, 14434, 14033, 14529, 12565, 14620, 15203, 14965, 14752, 12354, 15087, 14367, 14347, 15299, 11600, + 14956, 12913, 14868, 14614, 15344, 13008, 15242, 13769, 14135, 14594, 5519, 14996, 13819, 14630, 14573, 15157, + 13659, 11452, 13759, 15291, 13407, 13808, 14746, 13462, 15325, 13344, 15231, 10380, 14501, 12457, 14143, 14620, + 15051, 14789, 13464, 14722, 13460, 14236, 14929, 14640, 12253, 13549, 14462, 12041, 14932, 13549, 9494, 12638, + 14805, 11801, 14364, 11469, 13530, 14483, 15235, 14507, 14119, 14718, 8963, 11587, 14619, 14634, 14821, 14436, + 13938, 14102, 13029, 10818, 12621, 14230, 10329, 13563, 14912, 9259, 15189, 12298, 13754, 15147, 13471, 15113, + 13872, 12534, 13671, 15142, 14652, 10288, 14401, 13745, 13506, 14884, 12776, 14557, 14658, 14365, 11343, 13908, + 14929, 14992, 14619, 14616, 14434, 14901, 12606, 9326, 13323, 13393, 14589, 15153, 13107, 9555, 15155, 14611, + 14065, 14710, 15116, 12870, 15310, 14989, 13374, 13966, 11419, 15283, 13071, 15084, 15280, 12571, 14429, 15219, + 15153, 15188, 14802, 14911, 14142, 13873, 14346, 15007, 11385, 14735, 14868, 15154, 15039, 13867, 14894, 14679, + 12692, 12446, 14814, 15207, 13329, 15036, 12418, 13327, 14933, 13473, 12099, 15200, 11767, 15195, 13241, 12990, + 13693, 14649, 14126, 12766, 12334, 15069, 13986, 15202, 14067, 10855, 12825, 14433, 14976, 14815, 14103, 12600, + 11679, 14908, 13689, 13759, 13393, 11557, 15012, 14515, 7711, 13420, 11631, 15112, 13002, 13513, 12444, 11584, + 12543, 15218, 13929, 14104, 14650, 14562, 12319, 11597, 14604, 15226, 15092, 14962, 14933, 15158, 14628, 14557, + 14618, 14376, 14536, 14451, 15121, 13048, 13135, 15065, 15310, 14436, 13212, 13999, 12867, 10945, 14230, 13153, + 15045, 10757, 13045, 14666, 12769, 14347, 12819, 14376, 14649, 14341, 13867, 14106, 14062, 14749, 10974, 14920, + 14679, 11033, 12019, 15234, 12716, 15065, 14886, 15305, 15275, 8501, 14844, 14887, 13631, 15077, 13886, 13465, + 12916, 14269, 14437, 13364, 14320, 9806, 11327, 13339, 15343, 14318, 15221, 11391, 14663, 12691, 14457, 11977, + 8992, 14338, 13665, 14673, 11195, 14201, 15084, 9515, 14240, 13623, 14326, 14861, 13971, 13021, 13068, 14109, + 14908, 14617, 10001, 13457, 15176, 14274, 12258, 11264, 13880, 14642, 14476, 14281, 14468, 15326, 13854, 14852, + 14038, 15095, 11667, 13694, 14444, 14966, 13593, 13656, 13085, 14365, 13016, 13891, 13561, 13884, 12652, 14059, + 14922, 14593, 14654, 12428, 14389, 12311, 11366, 14448, 13849, 14749, 10674, 8430, 13455, 14794, 15354, 10677, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10806, 14440, 15259, 14670, 14347, 12599, 13709, 9025, 15046, 13527, 12670, 14701, 10226, 15044, 14027, 11629, + 14355, 13916, 14757, 12991, 14319, 14208, 13710, 14764, 13153, 13572, 14534, 13904, 14598, 15145, 13157, 13062, + 14376, 14080, 14945, 14873, 12494, 15019, 15201, 15265, 14741, 12609, 11272, 14822, 15031, 14502, 14894, 14518, + 13735, 14462, 9816, 14002, 15003, 13871, 15034, 14661, 14626, 14200, 13446, 14346, 14949, 14851, 8898, 15136, + 13612, 13542, 13608, 14715, 13124, 14662, 15199, 15245, 13455, 13105, 14272, 13678, 14587, 15027, 13719, 13665, + 12496, 10293, 14538, 11513, 14952, 13943, 14754, 13897, 14947, 15323, 14159, 14422, 14776, 15359, 14528, 15050, + 10513, 14932, 14608, 14861, 14933, 12795, 13409, 14275, 14925, 14039, 13235, 14773, 8235, 14041, 14183, 9757, + 12867, 13993, 14804, 14724, 10535, 15039, 12867, 10931, 12582, 14951, 14380, 14314, 15158, 14423, 13124, 13584, + 14254, 14092, 15290, 13830, 14724, 14773, 13169, 13113, 14763, 14598, 11269, 14788, 14804, 12747, 14549, 14246, + 14071, 14099, 14904, 12684, 14912, 11485, 14828, 15184, 14836, 13371, 14903, 14652, 14550, 13209, 14872, 14629, + 14187, 13842, 14392, 11885, 13617, 14972, 8803, 13586, 9700, 14633, 13454, 14672, 14321, 13313, 14985, 12899, + 12730, 13845, 12305, 15037, 15161, 14753, 11960, 11466, 14745, 14374, 14379, 14044, 12303, 13971, 15258, 15111, + 15173, 5052, 13519, 14898, 14537, 10294, 14242, 15318, 14491, 12357, 13390, 14340, 11174, 15134, 14534, 14907, + 13659, 12775, 9416, 11973, 14403, 14736, 15197, 14269, 11898, 12201, 13762, 14472, 14272, 13378, 14211, 14672, + 14865, 13855, 14708, 15328, 14812, 14768, 11932, 14690, 14211, 14734, 15181, 14166, 13080, 14116, 15037, 15192, + 14623, 15274, 11988, 14868, 15154, 14729, 13571, 5149, 12619, 15164, 14719, 11631, 12976, 13145, 14584, 15301, + 12418, 14527, 12452, 13573, 14876, 15331, 14052, 12436, 14576, 15306, 13368, 14095, 15021, 15011, 15206, 14853, + 14724, 14843, 14841, 15052, 13724, 14246, 13306, 13106, 14847, 12021, 15137, 11785, 15295, 14628, 13687, 14490, + 14690, 9665, 11880, 14920, 15064, 15125, 14715, 14828, 14656, 15080, 12669, 12046, 15330, 13180, 14516, 15332, + 14341, 15153, 12106, 15160, 14693, 10746, 13745, 15262, 14748, 10924, 14078, 15278, 11772, 15182, 13617, 14454, + 14475, 15188, 14490, 15298, 14132, 14274, 11785, 12699, 13890, 11652, 15110, 13687, 14420, 15108, 14392, 12790, + 15225, 12265, 13671, 14362, 14551, 14348, 13421, 14955, 10555, 15222, 11006, 15191, 12773, 14028, 14539, 14661, + 14804, 15155, 14675, 15283, 14981, 15070, 13790, 15082, 14475, 14430, 11930, 15191, 12819, 14450, 15059, 8207, + 12545, 14110, 14293, 14893, 13451, 15299, 14819, 15104, 14489, 11805, 11557, 13373, 14817, 11704, 14090, 14798, + 14022, 13075, 15333, 15064, 11976, 14139, 14867, 15172, 12429, 15220, 15131, 10353, 13628, 13686, 13666, 14169, + 12342, 15212, 14882, 15166, 13219, 14759, 14859, 14752, 15343, 13709, 13454, 13187, 12954, 15337, 13914, 14446, + 12840, 12921, 14613, 15108, 9232, 12577, 14271, 12812, 10916, 14458, 15239, 15240, 15276, 14434, 13904, 14992, + 14626, 14642, 9608, 13984, 13262, 12493, 14691, 13570, 14666, 12016, 12717, 14735, 13156, 11318, 15086, 10693, + 13414, 10697, 13995, 15304, 14847, 15146, 13445, 13162, 14727, 13701, 14611, 14325, 14950, 12760, 12925, 3613, + 14817, 14480, 13538, 14011, 12554, 12511, 12186, 14508, 12780, 13420, 12289, 12704, 15138, 13653, 14213, 12366, + 12588, 14127, 12628, 14492, 13363, 15061, 12055, 11082, 14500, 14201, 12462, 14112, 13886, 13199, 15087, 14190, + 14545, 14649, 13748, 12612, 14645, 12495, 11389, 14163, 15337, 12463, 13696, 14868, 14490, 13437, 13013, 14580, + 14544, 13390, 14803, 14577, 14003, 12021, 15045, 14845, 14823, 9029, 14050, 12553, 12536, 14494, 14810, 15064, + 12547, 14907, 9283, 15247, 14232, 15080, 14119, 14820, 15155, 14078, 14519, 14544, 13755, 8395, 14869, 13986, + 11899, 13868, 14944, 15136, 15122, 14361, 13511, 14936, 14120, 9040, 14897, 14498, 15103, 13648, 14619, 12763, + 12537, 13969, 14585, 12557, 11353, 14998, 14770, 12305, 14440, 12769, 12698, 15143, 13621, 14809, 15157, 14404, + 14767, 11931, 12585, 12905, 14800, 15299, 15000, 13216, 13483, 13812, 14068, 14665, 13937, 14383, 13533, 14895, + 14548, 15126, 14766, 15137, 12677, 15054, 11689, 13406, 14094, 14356, 14487, 15036, 14364, 14970, 12998, 14033, + 14969, 15311, 13789, 15019, 10557, 14580, 15257, 14221, 11327, 8658, 15193, 14099, 11778, 14145, 12786, 11156, + 13204, 13027, 14821, 14613, 13655, 7408, 12530, 13548, 15335, 14469, 14932, 14859, 9397, 14484, 15228, 14791, + 15260, 14303, 15073, 15286, 15038, 14598, 13892, 14440, 12673, 14358, 12511, 12763, 14345, 14101, 12670, 12518, + 13512, 12985, 13625, 14581, 14296, 14129, 13170, 10226, 15021, 13548, 13375, 14402, 13748, 12631, 13444, 14524, + 8627, 13572, 14606, 13884, 13889, 14342, 14296, 9488, 14118, 14490, 12350, 12647, 13634, 15184, 13772, 14845, + 14359, 498, 15355, 14621, 15096, 13522, 15120, 9838, 13918, 14594, 15299, 14399, 10548, 14368, 14217, 13569, + 15207, 12527, 13806, 12805, 11655, 13996, 13926, 15238, 10379, 15231, 14791, 15004, 13273, 15188, 15289, 13836, + 12145, 12872, 15175, 15025, 12520, 14378, 12193, 14924, 14710, 14433, 14919, 13174, 12998, 14370, 15178, 14863, + 15136, 14379, 13835, 15106, 15187, 14833, 12832, 14454, 14454, 13542, 13729, 13971, 14371, 14372, 11647, 12151, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13100, 13737, 11863, 14480, 15189, 12907, 14838, 12884, 12522, 15305, 14786, 14656, 13785, 14920, 12479, 10670, + 14386, 13078, 15106, 11820, 15089, 11222, 14332, 14155, 14558, 15169, 14207, 15105, 11509, 12249, 14401, 15338, + 13541, 13315, 14641, 15150, 13698, 13773, 12080, 13996, 13981, 13324, 9344, 13687, 14569, 11241, 14762, 12173, + 14894, 14398, 12087, 14358, 10037, 8643, 12350, 15117, 8771, 14150, 15032, 13083, 13782, 14363, 14416, 14639, + 14460, 12098, 15029, 14627, 14573, 12170, 13473, 15317, 14260, 14702, 15348, 12630, 14380, 14941, 15252, 15288, + 15120, 11521, 15316, 13801, 14512, 13421, 10843, 13650, 10918, 15256, 4724, 14413, 14097, 15263, 13472, 14807, + 15223, 14340, 5025, 9996, 13719, 12170, 3755, 14953, 13972, 15192, 14414, 13370, 13937, 14463, 11780, 14583, + 15074, 12776, 12417, 14762, 13982, 13123, 13283, 13828, 12752, 14851, 12719, 14856, 14624, 13481, 14880, 14585, + 13198, 14705, 14962, 13344, 13692, 15304, 13111, 14405, 14590, 15333, 13610, 13854, 15184, 14941, 14765, 13557, + 15243, 14786, 14904, 14603, 10301, 14122, 14838, 13003, 13400, 14832, 14488, 15344, 14803, 11713, 13751, 15076, + 13599, 13555, 14881, 11781, 10208, 8572, 12837, 14422, 13120, 15200, 15191, 14744, 13446, 14133, 12418, 14671, + 11903, 15010, 14259, 11493, 14867, 13117, 9917, 11807, 14459, 13403, 14868, 14147, 14950, 13150, 12473, 13761, + 7934, 11710, 12435, 15250, 14914, 15089, 13650, 12255, 14561, 15119, 12427, 14097, 15126, 11304, 14578, 14652, + 12595, 14309, 14454, 11569, 13641, 15245, 13103, 14783, 14400, 14530, 15270, 14804, 13215, 14617, 15029, 13321, + 14638, 9215, 14921, 14273, 12559, 11635, 14708, 13551, 13854, 11310, 14942, 15345, 15042, 14063, 13644, 14901, + 11602, 15043, 13401, 15334, 13866, 14824, 15223, 14552, 14053, 13480, 11280, 14800, 12414, 14690, 15139, 15155, + 14668, 14214, 13977, 15202, 12974, 15164, 10917, 14600, 13000, 11961, 13756, 14574, 14777, 13444, 12696, 14620, + 14892, 13257, 14452, 15184, 12769, 15123, 11585, 14233, 14920, 13356, 15212, 15064, 14475, 13585, 13192, 10359, + 14417, 15350, 13869, 14171, 14956, 14131, 14951, 12628, 14451, 15218, 11799, 14506, 13746, 13443, 14357, 14774, + 14393, 13749, 12463, 13472, 15287, 14445, 12963, 12887, 15098, 14430, 14328, 14918, 14471, 14941, 10944, 14578, + 14790, 12918, 14948, 11875, 13900, 13302, 10662, 15305, 15108, 14150, 13732, 15359, 12778, 11861, 13476, 14282, + 15176, 11559, 14305, 14397, 15274, 14686, 13810, 15048, 11959, 14419, 15012, 13336, 14880, 14207, 13799, 13183, + 13826, 14755, 10774, 13510, 13997, 14700, 11632, 14293, 14765, 15054, 15028, 13094, 13616, 14851, 13904, 14142, + 11796, 14932, 14834, 14412, 14104, 14853, 14362, 13846, 14825, 14789, 14360, 15100, 14031, 14261, 15078, 13442, + 14683, 9252, 14446, 15028, 15344, 13768, 15024, 14937, 14999, 15136, 14350, 11593, 15212, 13391, 15217, 15230, + 13867, 12957, 13097, 14612, 12753, 14569, 15261, 14443, 13773, 12019, 14762, 15299, 14805, 15288, 13799, 12660, + 13444, 12887, 14802, 15154, 14984, 11826, 12467, 15049, 14871, 14169, 15057, 11918, 14137, 10539, 8761, 13353, + 14221, 14581, 15271, 14040, 11938, 15000, 13980, 14590, 14806, 15249, 14623, 12797, 14090, 11084, 14362, 14543, + 14265, 14880, 15002, 13644, 13942, 14235, 14522, 15266, 14608, 14559, 15242, 14920, 15264, 12976, 12793, 12185, + 14684, 13546, 14340, 12751, 14235, 15315, 13419, 13980, 14733, 13929, 12893, 13106, 13555, 13846, 15244, 14468, + 15310, 13471, 13711, 14905, 15072, 13771, 10795, 12789, 12665, 15099, 14854, 15094, 15071, 12582, 14103, 13469, + 15339, 11059, 14956, 14553, 14544, 12138, 14793, 9570, 14338, 13544, 12310, 15192, 13345, 10917, 14402, 14268, + 13979, 14433, 15223, 15352, 14577, 14387, 13180, 13973, 14889, 14940, 14587, 13830, 13388, 14815, 14537, 14987, + 12717, 12567, 13701, 14669, 14024, 15239, 13520, 14676, 15021, 10311, 14545, 14503, 14702, 8870, 14414, 13411, + 14904, 13044, 12362, 12550, 12708, 10956, 13818, 14354, 14358, 11884, 15125, 14457, 14470, 13657, 14178, 14619, + 4633, 13932, 14326, 14894, 15232, 12887, 12889, 12046, 15192, 14470, 14972, 10841, 14415, 14349, 15090, 14367, + 15277, 13918, 15115, 14446, 12815, 14540, 14897, 13316, 14859, 14962, 13429, 12092, 15320, 13084, 14427, 15189, + 13439, 13364, 13764, 15137, 12270, 14740, 11387, 12467, 10379, 13631, 14537, 14286, 15110, 15139, 13567, 15045, + 14680, 14921, 13503, 14634, 13591, 13710, 14265, 13733, 14642, 5417, 12744, 13713, 15083, 14693, 11315, 14265, + 13364, 15356, 12229, 10819, 12788, 14521, 12468, 14758, 14549, 15358, 14728, 11773, 14118, 15168, 15185, 14785, + 14796, 15204, 15018, 14919, 14841, 15275, 8626, 11878, 14508, 14672, 15091, 12732, 14415, 14967, 14775, 14379, + 12379, 14472, 10921, 14510, 13923, 15172, 12023, 14395, 15228, 12522, 10008, 13424, 14746, 10574, 15157, 11741, + 14457, 13452, 8235, 13151, 13555, 12429, 14692, 13409, 11531, 14612, 14766, 14062, 14755, 14651, 15003, 13385, + 14138, 14267, 15052, 14676, 14585, 13785, 15319, 14839, 14355, 14462, 15131, 9994, 13997, 14458, 11639, 10862, + 14173, 11621, 11313, 13553, 14718, 15312, 14192, 14407, 11938, 13743, 14907, 14840, 12705, 14648, 15069, 14178, + 12748, 14481, 12907, 13277, 13081, 12138, 15141, 14394, 14713, 14261, 12189, 15057, 12550, 12569, 11634, 14929, + 13770, 15266, 15081, 12003, 14714, 14885, 14380, 12330, 14153, 7412, 14371, 15236, 14193, 14780, 14390, 14516, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14528, 15259, 14813, 14875, 13574, 15149, 12370, 12792, 14123, 14014, 14745, 12997, 14373, 13477, 15317, 13365, + 10620, 14372, 14603, 12749, 13451, 11382, 14409, 14004, 14389, 14545, 13380, 14828, 15157, 14647, 14735, 14349, + 14542, 14590, 14551, 15217, 12354, 14379, 13924, 14253, 14621, 13617, 13400, 8946, 14360, 15280, 13518, 14445, + 14674, 13496, 14373, 14553, 14493, 14595, 13460, 13418, 13790, 15032, 15096, 15090, 13832, 15037, 12628, 11426, + 14655, 14761, 14991, 14906, 14398, 14941, 13342, 14536, 14400, 12573, 12899, 14537, 14475, 13391, 10889, 11346, + 14415, 14039, 15263, 14585, 12954, 15082, 13027, 13875, 11123, 14512, 11722, 13597, 12144, 14666, 13715, 13062, + 14451, 13634, 13199, 15126, 12334, 15337, 15011, 14814, 14957, 15263, 14101, 13494, 13936, 14450, 9766, 15307, + 14544, 15194, 14101, 14460, 12530, 11327, 15343, 13844, 12925, 14755, 14847, 14112, 14708, 14547, 12935, 15191, + 14723, 11187, 14078, 13324, 14912, 12947, 14931, 13369, 13930, 14426, 15126, 13196, 15321, 13604, 15084, 15197, + 13240, 11633, 15180, 13648, 14165, 14391, 14630, 12661, 15258, 13525, 15129, 15089, 13050, 14956, 6661, 15009, + 12789, 8770, 13842, 14692, 15273, 14964, 13245, 13918, 15164, 14383, 13522, 12802, 13671, 9733, 13916, 14649, + 14688, 14324, 13962, 13887, 14538, 13890, 15238, 12831, 14492, 8858, 14571, 13637, 14300, 15059, 14464, 11724, + 13053, 9762, 15057, 14677, 15152, 13595, 13904, 15088, 14386, 14884, 12995, 13924, 13438, 14773, 13780, 13945, + 14591, 13610, 13913, 14483, 14623, 14079, 9957, 15343, 15343, 15163, 11828, 14960, 14599, 13635, 7484, 14999, + 14620, 14345, 14040, 11007, 14097, 14828, 15001, 14826, 10995, 13098, 14745, 15005, 14572, 13941, 12608, 11661, + 15225, 15049, 15081, 13567, 12072, 14714, 10858, 14775, 11260, 14543, 14377, 13544, 14498, 15268, 13488, 13397, + 15093, 14212, 15335, 14681, 14444, 11302, 14223, 14244, 14840, 15168, 14698, 14721, 15188, 14471, 14773, 14773, + 14935, 15286, 13107, 14618, 15222, 14179, 13768, 15010, 14168, 13340, 13985, 12549, 12618, 12922, 14805, 14669, + 15292, 13731, 14189, 12401, 13928, 13408, 14431, 14721, 13528, 10208, 12017, 12569, 11308, 13326, 10509, 8419, + 11487, 15088, 12384, 14467, 13202, 15117, 11240, 9243, 14486, 12285, 14445, 6554, 13174, 14850, 14712, 14918, + 12039, 3817, 11139, 14970, 13444, 15233, 14489, 15274, 14001, 14627, 13643, 13793, 11504, 12779, 14390, 14737, + 15331, 13994, 14366, 14195, 14488, 14717, 14404, 14802, 13367, 13056, 13854, 14904, 14472, 11928, 12298, 13701, + 13519, 15009, 14697, 13487, 15320, 12379, 14634, 14360, 12940, 14632, 14442, 14955, 15028, 14936, 13684, 11666, + 14849, 14129, 13702, 14807, 11437, 15179, 12857, 14691, 13423, 12751, 14088, 13769, 12535, 14137, 14394, 13172, + 15338, 14485, 14169, 9627, 13514, 14140, 14452, 15064, 14862, 14515, 8704, 13330, 13262, 13735, 13804, 14634, + 15153, 13999, 14318, 13328, 11181, 13291, 13686, 13945, 14948, 14387, 11671, 13830, 14146, 14918, 13474, 14099, + 14872, 14683, 14223, 13357, 13230, 15320, 14597, 15355, 15187, 14777, 14069, 14929, 15319, 14340, 10674, 14769, + 15352, 11758, 13808, 14800, 14715, 15017, 14281, 15004, 13497, 14382, 13167, 9910, 13489, 14567, 13061, 14680, + 15184, 13842, 13773, 15141, 14598, 15029, 14398, 14768, 12663, 14611, 14475, 15289, 13532, 13354, 15109, 14716, + 12745, 14663, 14349, 11981, 14371, 13369, 13120, 13727, 14909, 14645, 13330, 15037, 14450, 10489, 14395, 14562, + 14075, 13487, 12047, 10067, 15239, 12491, 13542, 13504, 13539, 13468, 15345, 8833, 14350, 12761, 13681, 12337, + 12686, 13994, 14361, 14658, 14779, 14994, 14221, 10379, 13928, 13425, 13473, 11863, 14809, 14768, 14662, 14020, + 14799, 12621, 13564, 13832, 13292, 11792, 13173, 12488, 15258, 15228, 13836, 13953, 15002, 15071, 14622, 15032, + 12950, 12939, 12851, 15216, 14972, 13757, 14541, 10012, 15167, 13305, 15235, 14529, 13038, 13739, 13119, 14680, + 12966, 15258, 13468, 14964, 14618, 15207, 13385, 15170, 13812, 15244, 13431, 13492, 15130, 12974, 14880, 12802, + 8495, 13707, 14412, 12698, 15190, 14999, 13418, 14791, 14545, 15177, 11650, 14341, 13858, 11394, 14125, 14690, + 14361, 12321, 15214, 14694, 14539, 12856, 14007, 14245, 12305, 13648, 7913, 15094, 15141, 11957, 11239, 14564, + 13913, 15230, 14282, 15029, 14494, 12887, 11379, 12101, 11726, 15355, 14542, 15036, 12671, 13974, 15277, 12852, + 13440, 11547, 14589, 15063, 14744, 14375, 14645, 14909, 14546, 14398, 14636, 14408, 14381, 14788, 12678, 14304, + 14525, 13612, 13886, 13524, 14916, 13696, 14097, 12690, 13499, 12932, 14481, 14962, 14194, 14920, 15293, 14524, + 15298, 13388, 15270, 15343, 15296, 14607, 13453, 14578, 11149, 12354, 13773, 14910, 15198, 15175, 7496, 13582, + 14824, 15285, 13693, 13655, 14641, 14962, 12215, 15124, 10264, 13055, 13453, 14727, 14868, 14557, 14951, 9525, + 14913, 14186, 14579, 13637, 15172, 14781, 11855, 13360, 14141, 15049, 14825, 14578, 13839, 14538, 12308, 14356, + 13723, 14576, 14734, 15296, 14976, 14679, 14645, 9776, 14420, 13912, 15019, 14538, 12684, 14119, 13605, 14540, + 11193, 13627, 10860, 15075, 13155, 13337, 15085, 14578, 13248, 13453, 14706, 14029, 14145, 14869, 15041, 13183, + 15328, 14854, 12953, 14986, 14680, 14754, 15328, 14298, 12309, 14845, 13220, 13190, 15302, 13390, 14157, 14890, + 12838, 12483, 13554, 14060, 13392, 15109, 14463, 14535, 15122, 14789, 14591, 13735, 15317, 15116, 14943, 10673, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13688, 13939, 13830, 13018, 14962, 14654, 15233, 11584, 13098, 13381, 13620, 15075, 14571, 14405, 11706, 13407, + 12422, 12161, 13626, 13051, 14244, 14034, 12734, 12594, 13612, 14455, 15259, 12666, 14604, 14758, 15051, 14822, + 14491, 14201, 13871, 11521, 14879, 14610, 14418, 13851, 14822, 13599, 15340, 10676, 13220, 14613, 14754, 13319, + 15339, 13413, 14024, 13569, 13262, 15270, 15033, 14630, 13069, 11588, 15310, 15355, 14948, 13600, 11996, 14965, + 14671, 13131, 11873, 14036, 15172, 14796, 14669, 14062, 14822, 15331, 9312, 13774, 14140, 15285, 13588, 14802, + 12317, 15246, 13623, 13408, 13690, 11995, 12298, 14018, 14050, 13370, 14523, 13435, 12596, 14049, 15129, 15126, + 15298, 14441, 11449, 15048, 12390, 14107, 13764, 13669, 13855, 14806, 15096, 14530, 15356, 14867, 13396, 12750, + 13859, 15346, 14700, 13770, 14876, 13701, 15004, 14941, 14359, 15309, 15053, 13627, 13020, 14524, 13883, 14985, + 14922, 14145, 14556, 12683, 12359, 15061, 14476, 13781, 14160, 14807, 13809, 14749, 14499, 10582, 12678, 13899, + 14585, 14951, 11802, 11508, 15110, 10205, 13319, 15300, 15265, 13485, 12512, 13401, 15048, 12988, 14909, 9493, + 11895, 14862, 13626, 9071, 14789, 14096, 15084, 15191, 12923, 14301, 15065, 13518, 15121, 15131, 14649, 14132, + 15205, 15250, 13481, 13930, 13733, 15289, 13614, 14772, 14913, 14812, 11627, 11381, 12054, 14325, 11943, 15178, + 14258, 14627, 13221, 15268, 14876, 15197, 14770, 15104, 11151, 10586, 15117, 13793, 11454, 12322, 4030, 14442, + 12942, 14249, 14647, 14469, 14359, 14648, 13850, 15035, 15201, 14353, 12522, 14935, 14700, 12949, 14487, 13746, + 12044, 14738, 14090, 14396, 14595, 13577, 13788, 15338, 14240, 14695, 14431, 8904, 12651, 14102, 15261, 13790, + 12629, 12687, 15002, 13012, 13447, 14093, 14838, 15008, 13082, 14348, 10027, 10243, 14703, 13283, 15106, 14203, + 14229, 14122, 14966, 14069, 15333, 15329, 13405, 14614, 13278, 13115, 14231, 9176, 12661, 15242, 13108, 14940, + 13867, 14822, 13583, 13931, 14936, 14844, 15255, 14543, 14682, 14888, 14367, 14842, 15355, 14490, 7562, 15315, + 14698, 15314, 14601, 14442, 14339, 12262, 14525, 13328, 13832, 14716, 14089, 15252, 14247, 14568, 12984, 15231, + 14405, 13496, 15148, 11565, 10111, 14427, 11560, 14821, 14491, 13406, 15030, 13053, 12573, 13538, 14053, 14695, + 14111, 14510, 15079, 14720, 15268, 11863, 13774, 12924, 12068, 14726, 14483, 14285, 12596, 13721, 14527, 12423, + 15079, 14565, 15037, 13755, 15009, 14704, 14954, 15344, 15027, 15319, 11701, 9529, 13240, 10286, 14525, 11705, + 14350, 14878, 11954, 15219, 15011, 14495, 13908, 14532, 14866, 13222, 13835, 15303, 13785, 13301, 14525, 14130, + 14570, 13225, 14752, 13612, 10278, 14734, 12484, 15142, 14748, 15327, 15218, 14984, 14718, 15020, 13251, 13549, + 14236, 14893, 14903, 15037, 13452, 12759, 14708, 14510, 14566, 13839, 15172, 14874, 11300, 15204, 13709, 15176, + 14045, 15183, 14734, 14307, 13904, 13452, 14499, 15191, 15091, 14597, 14999, 13944, 13444, 11968, 14303, 15273, + 14853, 13937, 12907, 12901, 13421, 14934, 15199, 13111, 14629, 13451, 14832, 10237, 15276, 14370, 15300, 14992, + 13816, 14403, 13504, 13864, 14772, 14760, 14948, 13573, 14198, 13720, 13646, 15196, 13879, 13572, 9994, 15250, + 13569, 13500, 11450, 13643, 14411, 12641, 14716, 13590, 14924, 15247, 14908, 14042, 12534, 15120, 14307, 14754, + 11417, 14986, 14789, 14993, 13794, 15155, 15185, 14673, 14707, 12358, 14411, 13823, 14985, 14349, 13367, 13257, + 14682, 15317, 14113, 15227, 13788, 13907, 11966, 12400, 9700, 14037, 15125, 13350, 12765, 13684, 13719, 14198, + 14686, 14760, 15355, 14773, 15113, 14837, 14842, 13915, 13169, 14426, 10379, 14442, 14960, 13499, 12782, 13825, + 15126, 14103, 14460, 15141, 15084, 14305, 14513, 13854, 13548, 13303, 15037, 14403, 14736, 14742, 14133, 14583, + 14807, 12895, 13711, 14543, 15024, 14495, 14510, 13317, 14962, 12098, 14885, 15310, 14685, 13472, 12684, 14401, + 13514, 14930, 14470, 14079, 14951, 12408, 14468, 13571, 12925, 15358, 11270, 12532, 14585, 12093, 13562, 13376, + 15283, 15078, 14410, 14649, 14495, 14283, 12328, 14845, 15304, 13452, 9134, 15207, 12841, 11672, 13009, 13572, + 13116, 13753, 14181, 14098, 14659, 14340, 12674, 12566, 14434, 13799, 9824, 14266, 13717, 14463, 14390, 11194, + 12107, 14124, 14452, 15310, 15092, 13664, 13610, 14358, 15260, 14465, 15344, 15340, 10772, 14759, 15321, 12335, + 14730, 15093, 15323, 14400, 12425, 9866, 14738, 13038, 14482, 14066, 13043, 14670, 13650, 14017, 13956, 14808, + 13588, 15249, 14592, 14330, 13341, 15158, 14700, 14110, 15076, 10650, 14516, 10898, 15285, 14400, 11325, 12890, + 13274, 11976, 14637, 14863, 15320, 14291, 14500, 9304, 14117, 14804, 13768, 13345, 14884, 8247, 12096, 11024, + 12731, 13591, 13924, 14406, 13790, 15169, 14277, 14092, 14474, 13260, 14476, 13400, 11300, 14262, 13876, 13588, + 15141, 13916, 15271, 15347, 14950, 15091, 13313, 12519, 15082, 14070, 8559, 12358, 12976, 15150, 13131, 14441, + 14612, 14812, 15031, 13911, 15274, 13781, 14398, 14289, 13718, 15076, 15286, 13810, 14562, 15063, 13400, 14366, + 14251, 14201, 11422, 11930, 12351, 12428, 13858, 12262, 14778, 12325, 15087, 14288, 14675, 14861, 14614, 14791, + 14776, 13992, 14399, 11819, 14181, 14032, 14506, 12438, 12745, 14328, 15213, 14859, 14216, 14302, 11919, 15061, + 13587, 14353, 13652, 12848, 12197, 12506, 13850, 12122, 14763, 14995, 14672, 14505, 13567, 14062, 14967, 13779, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13874, 14592, 15110, 15171, 13778, 13934, 15179, 14917, 14471, 14758, 12996, 13647, 15336, 10894, 14645, 13884, + 14168, 12875, 14462, 9565, 11275, 10315, 15079, 14458, 11414, 14097, 14465, 14456, 15002, 13535, 11826, 14664, + 13106, 14471, 14982, 15098, 13944, 14610, 14002, 14954, 15264, 11286, 10201, 14854, 13130, 13676, 13315, 14956, + 12607, 11751, 15222, 13631, 13389, 13404, 13069, 13615, 11895, 14957, 12143, 14349, 13842, 12831, 13020, 14340, + 14375, 15101, 15301, 13705, 15308, 14528, 14992, 14178, 10699, 11628, 14265, 14852, 10489, 15155, 15253, 15352, + 13652, 15344, 15281, 15358, 11364, 12886, 14570, 14621, 11460, 15120, 12985, 12773, 13593, 14337, 12949, 15188, + 13746, 14150, 12385, 11357, 15221, 13306, 14439, 14979, 14757, 13882, 15262, 14993, 13909, 14937, 15009, 15062, + 14514, 13596, 11148, 12408, 15087, 11912, 14466, 13987, 13529, 13163, 12493, 14609, 10747, 14424, 14654, 14205, + 14900, 13203, 14596, 15101, 13823, 15195, 11908, 15279, 15270, 14862, 14355, 9750, 14901, 14976, 13542, 12747, + 15166, 14287, 13151, 13908, 14032, 15312, 14840, 14043, 13577, 15221, 11521, 14022, 15264, 15312, 13556, 12428, + 14219, 15219, 12284, 15292, 14957, 13805, 15272, 12249, 14148, 8298, 12863, 13255, 15089, 11957, 14360, 14603, + 14585, 14398, 14201, 13498, 10844, 13690, 11234, 10269, 14082, 15030, 15216, 14575, 13281, 14009, 14599, 7582, + 14767, 14921, 14870, 15107, 6159, 14774, 15093, 15288, 14101, 12898, 12811, 14981, 14351, 14031, 14296, 13427, + 14019, 8120, 14402, 14540, 14342, 13471, 10264, 15036, 13162, 12228, 14299, 14604, 13342, 13824, 15098, 15266, + 13507, 15158, 14746, 15261, 9397, 14357, 14626, 13559, 15083, 15029, 10000, 12963, 15177, 14888, 14442, 14492, + 14308, 14354, 14823, 14002, 14700, 11592, 15340, 14472, 15080, 14921, 13395, 15315, 14102, 10645, 13766, 11186, + 13462, 14393, 12787, 15261, 14471, 13563, 13802, 14777, 8740, 15190, 14984, 14947, 13484, 14844, 9901, 15091, + 13583, 13855, 15289, 14117, 14885, 13571, 15283, 14259, 15286, 15015, 9842, 14590, 14719, 13467, 13568, 14531, + 14004, 14102, 15345, 14318, 13851, 14282, 13944, 14983, 14418, 15164, 13695, 14719, 13506, 13006, 10913, 14514, + 15163, 14664, 14670, 15236, 13983, 10808, 14182, 15262, 9612, 15097, 14893, 12444, 14463, 15018, 14489, 14465, + 12719, 14814, 10777, 13602, 15251, 12747, 15009, 6291, 14745, 14334, 14437, 14873, 12005, 14091, 14822, 14905, + 13626, 15179, 15278, 14511, 12545, 13871, 14949, 10846, 14529, 13121, 14409, 13738, 15029, 14559, 14801, 10574, + 14784, 14490, 12343, 14456, 14071, 12554, 14446, 13980, 14824, 13927, 13835, 13127, 14144, 8495, 13259, 12575, + 13318, 13925, 14654, 14759, 14730, 15127, 15001, 11363, 14292, 14752, 15196, 15119, 15344, 14378, 12417, 14836, + 13866, 13790, 13702, 7403, 11337, 15335, 13344, 15134, 15194, 11366, 12313, 14834, 10527, 15172, 10012, 14765, + 12390, 14606, 14254, 14380, 14496, 14993, 14282, 14900, 12466, 14910, 12465, 15188, 12674, 12460, 13879, 12462, + 13372, 13043, 13791, 13409, 15010, 14463, 14473, 8320, 14538, 13787, 13432, 14448, 14486, 14943, 15078, 13918, + 12931, 14696, 14563, 14753, 14521, 14675, 14717, 13942, 13914, 14653, 15321, 12926, 14377, 11811, 14358, 14819, + 11360, 14865, 14189, 9391, 14616, 14470, 15224, 14382, 15021, 15194, 12408, 13866, 14990, 14832, 14961, 14459, + 14988, 14035, 13761, 14905, 15277, 11684, 13267, 14474, 15181, 14918, 13847, 13494, 8073, 13641, 14653, 13377, + 13642, 13514, 14056, 15199, 12589, 14011, 14523, 12892, 15092, 15243, 14713, 15221, 14906, 14131, 12299, 14491, + 9696, 14013, 14498, 13488, 15192, 13719, 12060, 15008, 14190, 13221, 15183, 13439, 10168, 13440, 11290, 13275, + 14452, 12270, 15352, 15109, 14519, 14961, 13864, 15258, 13506, 14095, 14737, 13561, 14926, 15102, 13363, 15208, + 12347, 12058, 14845, 12651, 15294, 7694, 14313, 15206, 13426, 14629, 14790, 15182, 13368, 15350, 12634, 14576, + 14866, 14386, 9951, 14509, 14694, 13471, 14365, 14556, 14964, 15155, 15267, 15324, 12864, 13937, 13331, 11289, + 13741, 15272, 13390, 14810, 14562, 14515, 15322, 11201, 15327, 14003, 14105, 11620, 14972, 13766, 15082, 14528, + 12654, 13953, 11955, 14769, 13379, 13193, 14075, 14290, 14548, 12435, 12328, 14471, 9529, 11646, 15065, 12646, + 15023, 11908, 15242, 15197, 14613, 13901, 14448, 13839, 14682, 14455, 14938, 9576, 14558, 15084, 14172, 15267, + 14858, 14817, 14617, 12165, 13895, 15110, 12097, 14502, 13540, 14672, 14479, 13952, 14193, 14500, 11387, 11844, + 12759, 12856, 15010, 9348, 14664, 14497, 14638, 15014, 14685, 14552, 12330, 14961, 14057, 10648, 14405, 15278, + 11781, 14991, 14234, 15074, 13457, 15283, 13533, 7520, 15068, 15317, 9031, 13974, 14712, 14074, 15019, 14874, + 13770, 13685, 13166, 13443, 11692, 15141, 13670, 14973, 15347, 14750, 14917, 13150, 14376, 14386, 13212, 14432, + 14856, 6419, 13706, 14706, 15273, 13671, 10867, 14351, 9109, 12551, 14541, 14167, 12870, 15189, 14694, 15103, + 14468, 15148, 14469, 15118, 13211, 15044, 14329, 11255, 12909, 13149, 14628, 14582, 11341, 14945, 14325, 15332, + 14110, 13670, 14591, 15060, 13862, 13906, 14642, 13330, 14674, 11487, 14614, 14297, 15241, 12650, 14729, 15048, + 14621, 15054, 14423, 15140, 13827, 10249, 15054, 14409, 15152, 12799, 13197, 14247, 14252, 12734, 13883, 14188, + 14381, 14381, 9361, 14671, 14523, 15012, 15074, 14888, 13331, 13845, 14427, 14194, 14538, 15181, 13466, 12648, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14780, 14396, 12987, 14807, 14771, 14052, 15030, 14144, 12305, 14664, 12725, 14952, 14949, 15171, 12935, 12427, + 13791, 12409, 13637, 11147, 14998, 15002, 14630, 15288, 14501, 13960, 14042, 15139, 13479, 13245, 14179, 14676, + 14707, 14740, 14240, 13472, 14496, 14935, 13652, 13630, 10696, 14283, 14452, 14049, 14392, 13641, 8209, 13995, + 14505, 14351, 13595, 14915, 14679, 13984, 13716, 15357, 14703, 12838, 15212, 13981, 14970, 10001, 14963, 14602, + 13555, 13608, 13917, 15074, 10476, 10145, 14768, 15177, 11626, 14748, 14335, 14668, 14831, 13645, 14724, 14860, + 12438, 14406, 14115, 14658, 14572, 15110, 14391, 14838, 13983, 13027, 15130, 14535, 8860, 13839, 15012, 8923, + 15088, 14993, 13969, 14019, 14550, 14713, 14388, 14874, 15032, 11282, 12466, 13988, 13464, 14335, 14721, 15092, + 12751, 14097, 14703, 12228, 14228, 15068, 14468, 14128, 15356, 15336, 14128, 13674, 15300, 12420, 14429, 13708, + 11127, 10563, 14884, 14944, 14383, 15269, 12834, 13578, 14084, 8240, 14738, 14675, 12799, 14991, 8163, 14915, + 15249, 13603, 15162, 14352, 14883, 13189, 13164, 15101, 11917, 13979, 12944, 14783, 15068, 13911, 14871, 15066, + 13626, 14730, 12048, 15133, 13811, 14112, 12262, 15029, 6511, 14341, 14943, 9339, 12293, 15217, 14755, 13324, + 13510, 14898, 11699, 15089, 11710, 14006, 14768, 14818, 14749, 13432, 13255, 15206, 14405, 12752, 13319, 15256, + 13756, 15118, 14495, 14667, 15328, 14877, 14015, 14696, 15191, 13995, 14279, 14699, 14834, 15011, 14464, 13787, + 12501, 13589, 14073, 15024, 14601, 14572, 14087, 13789, 14940, 14546, 14726, 15154, 14584, 10473, 13924, 15117, + 13571, 14188, 12622, 14398, 14380, 14346, 14520, 14014, 12646, 14240, 14752, 15250, 14118, 11102, 14728, 14910, + 13569, 12423, 14291, 14315, 15043, 15198, 14158, 15055, 13840, 11689, 15240, 11561, 12958, 13178, 14728, 14209, + 12750, 8215, 15052, 14381, 13335, 15050, 14800, 14801, 13987, 14396, 14925, 14321, 14565, 14228, 11852, 13490, + 12694, 15323, 14310, 14370, 13734, 7803, 15164, 13411, 14762, 11455, 14552, 15045, 13815, 14346, 14988, 13645, + 14803, 14907, 10539, 14214, 15281, 14340, 14991, 13339, 15313, 14416, 13926, 13476, 13685, 15022, 14874, 15118, + 14144, 13977, 12840, 14352, 13584, 14477, 13578, 14360, 13853, 13907, 14954, 14541, 14560, 14078, 14068, 14422, + 15252, 15106, 13223, 14079, 15332, 14295, 14478, 14624, 12826, 13128, 12078, 15071, 13245, 12116, 14881, 13245, + 14337, 11600, 14455, 14650, 11630, 12785, 15240, 15323, 13272, 11770, 14774, 15089, 14799, 12080, 13231, 12404, + 14538, 12757, 14920, 14090, 14109, 12826, 15104, 14567, 14802, 14775, 15035, 13828, 15282, 14933, 15323, 9590, + 14575, 13400, 10762, 13361, 15316, 13275, 14935, 8560, 15004, 14446, 13417, 14804, 13832, 12332, 15163, 14690, + 15087, 14366, 14795, 14561, 13580, 13390, 14903, 13323, 13845, 13998, 14856, 14624, 14389, 14912, 14993, 13029, + 14916, 15163, 15342, 11459, 14851, 12037, 14894, 11985, 12511, 11363, 14647, 14663, 12345, 14411, 14594, 14530, + 14388, 14889, 12586, 14767, 13293, 14349, 15144, 13116, 12982, 14453, 12736, 12612, 5323, 13456, 14969, 15232, + 14934, 13368, 13510, 8160, 15069, 14876, 14135, 9374, 12314, 13501, 15132, 14568, 14050, 13451, 14567, 11978, + 14715, 14352, 15297, 15320, 8772, 13324, 13004, 14801, 13938, 10955, 14076, 14765, 12837, 13428, 13832, 13447, + 12776, 14847, 14698, 12995, 14099, 14367, 14054, 13756, 12909, 12611, 15021, 14398, 14551, 14716, 14894, 13641, + 14715, 13737, 15058, 12747, 15287, 14722, 12904, 12538, 11723, 12911, 12979, 15082, 11039, 15131, 14715, 14603, + 14525, 14774, 13501, 12183, 14845, 14458, 13275, 14669, 13721, 14211, 14253, 11660, 14832, 14352, 13984, 14863, + 13373, 15011, 12976, 10967, 11274, 14895, 13149, 14192, 13105, 14577, 14190, 14411, 14910, 13228, 13324, 11576, + 13562, 13760, 14585, 14196, 14810, 11771, 14510, 14581, 12523, 12633, 13449, 13701, 14849, 13362, 13839, 14296, + 14365, 14803, 15108, 14576, 11366, 15136, 14372, 14587, 13943, 13421, 14692, 14488, 15115, 13847, 14552, 12555, + 13452, 14848, 15025, 14216, 14507, 14695, 14966, 14490, 12212, 12909, 12629, 14855, 14358, 13522, 14450, 11516, + 12402, 14607, 13326, 14213, 15149, 13481, 14481, 13429, 13440, 14499, 14709, 15318, 14990, 13954, 13901, 12819, + 14504, 13079, 14889, 14574, 15266, 15331, 11881, 13473, 13822, 14571, 14560, 13666, 14520, 13598, 13296, 11898, + 11917, 14903, 15334, 13654, 12289, 14546, 14307, 12889, 14537, 14718, 14663, 13459, 10105, 15127, 13348, 13952, + 14649, 14661, 15280, 14986, 14029, 14466, 14807, 13227, 10486, 10893, 11814, 14594, 14766, 14740, 11707, 15356, + 15127, 15344, 15273, 14920, 14702, 14997, 15296, 15193, 14383, 14093, 14583, 15230, 14048, 13959, 14503, 14613, + 14231, 15260, 14866, 14776, 15046, 14668, 15284, 14810, 14674, 15075, 13835, 14443, 13496, 14356, 13608, 13780, + 14714, 13332, 14995, 14281, 13258, 15317, 14348, 12821, 14277, 14649, 14571, 14224, 11704, 14899, 13117, 14093, + 15155, 15342, 13426, 13698, 15263, 12160, 11829, 13512, 15270, 13446, 12170, 14496, 14081, 14153, 14543, 14813, + 12125, 13253, 14460, 14601, 13739, 10725, 14662, 11340, 8470, 12937, 13340, 14717, 14388, 15174, 14811, 10126, + 14996, 15150, 12733, 14752, 12472, 12592, 14823, 14983, 15118, 13137, 11701, 14596, 14917, 14329, 13793, 12906, + 8266, 15326, 15321, 14806, 14467, 15186, 14743, 13896, 14748, 13391, 15003, 15279, 14190, 13906, 9212, 14506, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14037, 14503, 15159, 13895, 14811, 11858, 14520, 15092, 15002, 15124, 14336, 14867, 13467, 10093, 14094, 13535, + 14288, 13626, 15263, 14937, 13626, 14345, 12795, 13708, 10623, 13200, 14709, 14926, 14672, 13429, 10302, 12384, + 14803, 12579, 14643, 11538, 13525, 13284, 15162, 13221, 13083, 15322, 13848, 11585, 14921, 14495, 15289, 14144, + 15254, 14737, 13381, 15267, 13382, 14416, 12839, 13707, 10792, 14810, 14365, 12095, 12527, 7058, 14716, 13650, + 14349, 14438, 14280, 15132, 15031, 12626, 14706, 13504, 15072, 14058, 14845, 15332, 14962, 13470, 14870, 13785, + 15228, 13997, 14871, 14665, 14713, 13731, 14462, 12829, 13143, 14834, 13977, 12987, 14900, 14502, 14869, 15123, + 12460, 12685, 13752, 12493, 10716, 13972, 13463, 13721, 14439, 14492, 15094, 13757, 11872, 12895, 14977, 10497, + 13980, 14974, 14541, 14768, 14169, 12693, 13539, 13517, 14785, 14213, 14143, 14398, 8643, 13958, 12594, 15008, + 13527, 14228, 10184, 13418, 12170, 15140, 14753, 15179, 15270, 14345, 14380, 14350, 15338, 13506, 14956, 15033, + 14753, 15153, 12669, 15188, 8507, 14125, 14172, 12659, 15070, 13982, 15243, 13862, 14254, 14551, 14635, 11903, + 15285, 14476, 14714, 12767, 14497, 11429, 11832, 14681, 14594, 15314, 14944, 13854, 14587, 12229, 15222, 12024, + 14204, 15127, 15359, 13634, 12336, 14612, 14940, 13530, 14509, 14126, 14713, 14598, 13869, 14013, 14053, 13414, + 14602, 9421, 12825, 14552, 14620, 14084, 13955, 14916, 14328, 14604, 15278, 10540, 14335, 15230, 12727, 12309, + 14486, 14317, 15030, 14677, 15332, 14935, 12451, 14932, 11407, 14619, 14859, 14836, 14645, 11329, 15045, 14892, + 13794, 15032, 15281, 14368, 13974, 14214, 14866, 13932, 14941, 12985, 15045, 14825, 15192, 14240, 11960, 11717, + 14743, 11860, 15298, 13588, 14560, 14905, 14307, 13459, 13827, 13665, 14285, 14669, 12040, 14076, 13881, 14346, + 14860, 7471, 14001, 11828, 15229, 13910, 14357, 14991, 15353, 12495, 14384, 13754, 11446, 13610, 12392, 13576, + 13695, 14964, 15073, 14726, 14813, 15051, 15115, 15017, 11324, 14687, 15282, 10710, 15087, 11983, 15131, 12546, + 14828, 13304, 11243, 14241, 12320, 12004, 13398, 13548, 12424, 15072, 14373, 14914, 14359, 15047, 15161, 14599, + 13162, 13552, 14791, 11594, 14391, 13729, 14718, 13886, 13895, 12010, 14006, 13083, 12672, 14337, 15060, 13322, + 15048, 14342, 14311, 13501, 13902, 14058, 13697, 14931, 15093, 14963, 15078, 12953, 13581, 14667, 14283, 13625, + 14903, 7818, 11587, 12953, 13739, 14220, 14268, 11513, 14736, 13956, 13878, 12806, 15242, 14208, 14735, 15202, + 11438, 13857, 14101, 13435, 13787, 14792, 14539, 14361, 14908, 12711, 12628, 14909, 15068, 15259, 9496, 14880, + 14455, 12019, 14452, 8533, 13078, 14034, 15066, 14823, 14103, 15189, 15323, 13668, 11217, 14386, 14459, 14857, + 9751, 15197, 9953, 14535, 15051, 14669, 11408, 14710, 13168, 14537, 13597, 13411, 15020, 14545, 12342, 14444, + 12395, 14630, 15051, 14945, 14601, 15022, 15231, 13622, 13332, 14932, 13319, 11252, 13396, 13736, 12538, 12744, + 12315, 14862, 15345, 11046, 14357, 13365, 15196, 13827, 14804, 13818, 12588, 6923, 14660, 14989, 13773, 12405, + 14859, 14836, 15208, 12954, 15223, 14273, 15061, 14685, 14635, 14187, 14383, 14608, 11955, 14462, 12940, 14162, + 11555, 15186, 14476, 14183, 14010, 9829, 11543, 13470, 9794, 14177, 12809, 14816, 13587, 12035, 14852, 14720, + 9787, 15095, 14423, 14586, 14298, 13554, 14431, 14850, 14875, 14595, 14871, 15290, 13334, 12954, 14399, 15026, + 13597, 8259, 12953, 12336, 14450, 15026, 13701, 14871, 13788, 14369, 13792, 15348, 13368, 14021, 14552, 13898, + 14062, 14689, 13702, 14737, 14898, 14374, 14853, 14850, 14343, 14253, 14357, 15216, 12462, 14964, 15058, 14774, + 13994, 13510, 13354, 14102, 14211, 14669, 15241, 11886, 14687, 15108, 13634, 10852, 15327, 15348, 13133, 12721, + 15236, 13236, 13002, 12294, 15132, 13939, 12685, 14192, 11241, 14510, 14884, 15094, 12118, 14660, 14270, 13712, + 14662, 14375, 13984, 14186, 13340, 15271, 14869, 11050, 11008, 14225, 15035, 15024, 11844, 13381, 11906, 9045, + 13751, 14551, 14657, 14549, 13392, 14264, 14835, 14502, 13439, 15125, 11995, 14783, 14387, 13993, 14486, 13526, + 14149, 13829, 13791, 14243, 14683, 14844, 15299, 13689, 14725, 14779, 13955, 13321, 14878, 14202, 14242, 15106, + 14794, 13002, 14664, 14284, 15051, 14003, 13397, 14495, 12613, 14793, 14362, 14119, 15236, 13528, 14588, 15181, + 15206, 12231, 15232, 15127, 10263, 14548, 11185, 14270, 13005, 13836, 14099, 14359, 14689, 14596, 14867, 13840, + 12913, 13221, 13894, 14318, 15051, 15218, 14563, 13545, 13479, 15163, 14488, 14474, 14496, 10305, 14667, 14648, + 15113, 15286, 14494, 15258, 14359, 14168, 15315, 15292, 15153, 15043, 12771, 15151, 15000, 15145, 15333, 13316, + 11218, 14287, 14294, 14737, 11264, 12475, 13644, 14426, 12943, 15189, 15110, 14948, 14125, 12981, 13352, 14344, + 13558, 14725, 14370, 11522, 10770, 14837, 12498, 10987, 14866, 11375, 14872, 13520, 13975, 15342, 10063, 13401, + 12832, 13053, 14679, 11445, 13710, 14067, 14392, 14849, 13653, 14044, 11615, 14536, 14839, 11670, 14621, 14665, + 15219, 15328, 12839, 15198, 13980, 13670, 15123, 14354, 13122, 13953, 12952, 14429, 9148, 12488, 14865, 8432, + 13970, 13887, 14769, 14920, 12181, 11912, 14361, 14344, 13499, 15265, 15231, 14364, 14348, 14434, 11644, 14513, + 15260, 12562, 13745, 13847, 15285, 14886, 14835, 12560, 15091, 13212, 13283, 14310, 8736, 13952, 15255, 14429, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13489, 14371, 13429, 10795, 14648, 13521, 15119, 14889, 10852, 13553, 15132, 14346, 14377, 12509, 14691, 14674, + 14402, 13931, 10657, 13375, 12069, 15207, 14835, 15295, 14425, 15043, 14485, 13727, 13193, 14862, 14725, 15154, + 14928, 13142, 10650, 14051, 15101, 11768, 12872, 14471, 14982, 13565, 13410, 13889, 14751, 14395, 14830, 14478, + 11804, 15024, 10308, 13808, 14498, 12214, 14201, 15006, 13477, 14541, 12942, 10689, 12698, 13742, 12493, 14933, + 12877, 14400, 11777, 12884, 15360, 12079, 14716, 13502, 15065, 14696, 14507, 12664, 14759, 14524, 14277, 15025, + 13266, 14540, 14464, 14437, 12999, 15038, 14823, 15038, 14286, 14823, 8543, 14930, 14579, 15343, 14800, 14574, + 14604, 14857, 13978, 14450, 13554, 14413, 12047, 12336, 15099, 14511, 14199, 14912, 12302, 10864, 14807, 14759, + 14921, 15056, 13976, 14623, 14930, 15140, 15259, 14045, 14888, 13933, 9774, 8596, 15181, 14640, 13843, 14554, + 13736, 13380, 15274, 15318, 14467, 13392, 12175, 12186, 13736, 11811, 15066, 14872, 13412, 14133, 14971, 14982, + 14883, 880, 11277, 14525, 13318, 14398, 12689, 14418, 15081, 12759, 13098, 15278, 14847, 14215, 14382, 12400, + 13516, 14803, 14983, 7434, 11669, 13268, 13796, 13400, 14622, 12680, 14492, 14387, 14727, 14675, 14564, 14643, + 15069, 14404, 15184, 15220, 12768, 12162, 14623, 14826, 12561, 14578, 14672, 15016, 14016, 13073, 15205, 12949, + 15214, 12347, 14821, 15209, 14098, 13658, 14867, 14580, 14501, 14913, 14908, 13815, 14972, 10577, 12091, 14618, + 14525, 14119, 14229, 12753, 15064, 15094, 15060, 12041, 13657, 14670, 13099, 14056, 13327, 13707, 14901, 14783, + 7198, 13563, 15049, 14411, 13973, 13386, 14561, 14240, 15181, 13393, 15028, 11309, 14282, 15263, 12798, 12651, + 14647, 13625, 10646, 13851, 15000, 14934, 15200, 13911, 14848, 14023, 13534, 9644, 14425, 15245, 15271, 13408, + 14953, 13936, 13599, 14724, 10460, 15280, 14474, 14444, 15113, 13258, 14585, 14490, 8741, 13861, 10194, 14907, + 10711, 14249, 12538, 15219, 15166, 13097, 14127, 15114, 12306, 13805, 14706, 14498, 14663, 9641, 14667, 14103, + 14636, 11821, 14486, 13561, 11296, 9304, 13058, 13549, 13859, 14445, 15223, 14921, 14473, 15250, 13458, 14927, + 14633, 12538, 13334, 13058, 15212, 13702, 14535, 14439, 14493, 12028, 13008, 13202, 14943, 11749, 14953, 14126, + 13612, 14791, 12119, 15252, 14916, 14958, 13954, 14652, 14302, 14097, 13789, 14451, 14366, 13292, 14952, 12402, + 14937, 14471, 14566, 15034, 15327, 15352, 15075, 15332, 13551, 14346, 15010, 13850, 14768, 13902, 9891, 14047, + 15343, 11185, 11756, 14894, 15017, 14035, 11474, 12837, 14372, 13445, 14956, 15185, 13971, 14397, 14669, 15195, + 14594, 15089, 13019, 14833, 14970, 14479, 14081, 15307, 14310, 15143, 14233, 12679, 12816, 12649, 15322, 13540, + 15131, 14682, 14535, 14505, 14921, 15131, 14792, 12499, 14670, 11528, 13983, 14354, 14872, 12550, 14703, 14390, + 10826, 14465, 14793, 14763, 14876, 15251, 12876, 15136, 6216, 14929, 15315, 14863, 15099, 15264, 11964, 12785, + 13405, 15295, 13412, 13712, 14464, 14545, 14750, 13972, 14511, 14496, 14863, 15071, 12137, 15061, 11529, 14817, + 14633, 11597, 14267, 13773, 14988, 11759, 13241, 12730, 14657, 14712, 14956, 11032, 12723, 14037, 14111, 15308, + 14267, 14666, 14835, 14113, 12373, 14832, 14717, 13795, 14244, 11425, 15350, 15330, 13270, 11445, 12240, 14384, + 13305, 12771, 14829, 14571, 14246, 15208, 14990, 15353, 14501, 14197, 14588, 11595, 11530, 14200, 14652, 14380, + 14543, 14543, 15308, 11600, 10077, 15199, 14147, 14783, 14347, 13013, 15085, 13390, 15107, 13863, 12379, 12221, + 14845, 14880, 13510, 13994, 14724, 14199, 14705, 13173, 13457, 10638, 14714, 13475, 14929, 15043, 15242, 14395, + 13913, 15250, 15224, 7807, 12613, 14739, 14591, 15101, 14492, 11326, 14674, 12425, 13361, 12513, 14784, 14928, + 12809, 15108, 13073, 14664, 13586, 15083, 15180, 15226, 13553, 11308, 9458, 15150, 11835, 14090, 13969, 14309, + 14546, 12458, 14978, 12304, 15329, 15204, 15231, 15348, 13590, 14920, 14642, 14279, 14916, 3882, 15126, 13578, + 14459, 13155, 13450, 5086, 13662, 13917, 14622, 13085, 13836, 14430, 14881, 14438, 12765, 14895, 10827, 13796, + 13715, 14295, 12643, 11065, 15195, 14581, 14649, 14656, 12292, 11635, 15088, 15112, 14773, 14755, 14524, 12463, + 14828, 15085, 15322, 13331, 14898, 15325, 14053, 15063, 12820, 15167, 9357, 14803, 14567, 15174, 14902, 14005, + 13964, 9647, 14568, 13632, 14292, 14820, 15081, 11424, 10842, 14171, 14257, 15166, 13800, 15086, 12341, 9641, + 11972, 14842, 12201, 12296, 13574, 13659, 14002, 13977, 13807, 14834, 14763, 15143, 15109, 14712, 15197, 14715, + 14968, 13936, 14075, 13322, 14746, 14396, 14851, 15235, 14365, 15348, 14705, 14755, 14460, 14732, 13650, 15268, + 14904, 14372, 15219, 13535, 14665, 15029, 12551, 14047, 13937, 13207, 13312, 14690, 13941, 13395, 13779, 15056, + 9234, 14090, 14703, 15292, 12294, 14485, 13861, 10574, 15192, 9504, 13698, 14617, 13968, 14756, 13958, 14386, + 14745, 13701, 13968, 15103, 15077, 13781, 15117, 15228, 14714, 15116, 14963, 15184, 15177, 14404, 14946, 14885, + 14389, 13488, 13428, 14810, 11727, 14954, 14715, 15286, 14753, 14240, 15163, 14051, 14572, 15198, 14706, 14750, + 13530, 12371, 15351, 14676, 12948, 15213, 12946, 13970, 15137, 13842, 15220, 11776, 11630, 12836, 13852, 14060, + 14843, 14065, 12158, 14624, 14158, 13432, 14673, 14596, 12844, 15187, 14806, 13236, 15143, 15049, 14696, 14623, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14356, 14568, 15054, 14118, 14793, 13192, 15225, 13675, 14871, 14515, 10631, 12937, 14409, 13554, 14172, 14046, + 14458, 15200, 14374, 12857, 14115, 14793, 8266, 14010, 13424, 14593, 11343, 13726, 14672, 14398, 12292, 14662, + 15283, 13597, 15046, 15335, 11901, 14437, 14487, 13471, 11917, 13886, 13720, 9006, 13496, 14048, 13326, 15193, + 14909, 12208, 11307, 15023, 13835, 15220, 12409, 14686, 11380, 13824, 10657, 13350, 13752, 6437, 14413, 13445, + 14646, 14591, 13665, 14659, 13676, 14243, 14273, 11800, 15175, 14974, 14290, 13574, 14942, 14982, 13076, 11796, + 14571, 12391, 14401, 11850, 10483, 14740, 14519, 13479, 13103, 14789, 13678, 13318, 13458, 15222, 15340, 13439, + 12908, 12797, 13014, 10724, 14750, 13746, 12647, 14551, 14099, 14427, 14385, 10800, 14102, 15214, 12400, 14742, + 14949, 15199, 13494, 13263, 14994, 13668, 14718, 14761, 15343, 11954, 13168, 14568, 14200, 15162, 11053, 14968, + 10330, 15298, 15247, 14500, 14539, 12316, 14669, 13319, 13025, 10134, 15070, 14807, 15300, 14434, 14266, 15336, + 15136, 8659, 14445, 12197, 13280, 15170, 14625, 14573, 13874, 14057, 8302, 13567, 14988, 15162, 14727, 14946, + 11268, 14714, 11984, 14489, 13375, 11398, 13910, 13218, 13802, 14801, 13368, 13574, 14609, 14235, 14431, 13344, + 13786, 8839, 14078, 14234, 14899, 12412, 10871, 14396, 13962, 14887, 14465, 14678, 15250, 15145, 12769, 15323, + 14741, 11921, 15160, 13788, 14419, 9040, 12862, 14003, 13409, 14405, 14909, 15327, 15083, 14901, 14903, 14994, + 15309, 11797, 14260, 15065, 13676, 13676, 14753, 14043, 13946, 14171, 13772, 14060, 11487, 15349, 14849, 15187, + 8246, 14506, 14458, 13942, 14980, 14983, 14375, 14430, 15187, 11949, 15163, 13381, 12503, 14038, 14191, 14212, + 12826, 13753, 13738, 13466, 12456, 14570, 14592, 14866, 8437, 13102, 13598, 11425, 13343, 14787, 14981, 11989, + 15301, 12782, 14724, 14323, 13981, 14909, 14986, 13624, 12102, 15176, 14826, 14955, 14111, 14923, 13308, 14454, + 15310, 15324, 11833, 14011, 15089, 11609, 14795, 15055, 14953, 12355, 13292, 15223, 14426, 14504, 14962, 14978, + 12448, 12624, 12443, 13719, 11301, 10250, 15070, 13358, 14030, 15305, 12067, 14853, 12896, 13775, 15114, 13894, + 13889, 12718, 12350, 13863, 14748, 14823, 14761, 15103, 15035, 14833, 14382, 14428, 14429, 9438, 14899, 14485, + 14234, 14455, 14095, 15017, 11942, 14355, 11842, 14453, 13818, 12859, 15340, 14630, 9560, 14946, 13937, 14712, + 12065, 12916, 5343, 12232, 13684, 12876, 10666, 14890, 13724, 14603, 14461, 13131, 13788, 14778, 12023, 15180, + 14523, 9614, 12100, 14749, 6185, 13950, 13408, 12167, 14091, 14927, 14263, 14892, 13447, 10676, 15003, 15276, + 13145, 15305, 15081, 13858, 15150, 12637, 12561, 12834, 14769, 14104, 11577, 15188, 13512, 9377, 9772, 14687, + 13673, 13374, 14526, 15276, 14427, 13016, 14611, 12797, 13482, 13028, 15294, 12671, 13751, 15349, 14687, 11613, + 15256, 14832, 12888, 8359, 13471, 14788, 11591, 14018, 11660, 14811, 13806, 13351, 13543, 14206, 15021, 9627, + 14944, 15307, 10379, 12184, 15303, 14370, 14453, 13943, 11967, 12905, 14755, 14871, 14773, 13897, 15181, 14679, + 13677, 14982, 13401, 14638, 10470, 14340, 12804, 13131, 15217, 15260, 12826, 14353, 14691, 14325, 13578, 11569, + 6822, 14430, 14092, 13660, 14012, 11202, 14974, 12335, 15242, 14375, 14111, 15056, 13477, 14464, 15238, 14933, + 14959, 14505, 11822, 14963, 15258, 14391, 12631, 14619, 13425, 15353, 13314, 15177, 11593, 11680, 14054, 14097, + 15082, 11792, 14844, 13558, 12987, 13266, 12813, 14407, 14281, 14018, 14535, 14337, 14825, 14351, 14814, 14759, + 14336, 13557, 14836, 11283, 6917, 15031, 14491, 14600, 15216, 14769, 15016, 14497, 15100, 14377, 14394, 14845, + 14493, 15199, 14959, 13359, 15298, 12178, 13583, 15142, 14382, 10128, 9231, 13598, 15203, 13813, 13346, 14645, + 14451, 12732, 14181, 14942, 15194, 12253, 13412, 14725, 14878, 14204, 14865, 14726, 15349, 14468, 13302, 13316, + 12639, 13432, 13790, 14448, 13333, 15159, 11998, 11499, 14463, 14508, 11133, 10606, 15334, 13986, 14833, 12613, + 14787, 14064, 11805, 13561, 14596, 14494, 11448, 15256, 13902, 15226, 14417, 14644, 14857, 13535, 14069, 14707, + 12494, 14205, 15124, 14483, 15271, 14725, 14676, 14836, 13727, 14024, 14887, 14748, 14657, 14793, 13202, 12999, + 14901, 13808, 15337, 13871, 15131, 13321, 12860, 11228, 11159, 13333, 14963, 11886, 14198, 15130, 14988, 13151, + 14521, 14860, 13132, 13696, 13806, 14328, 14657, 13692, 12688, 12437, 14654, 14555, 13410, 14849, 14478, 14708, + 13825, 14101, 9982, 9100, 15115, 11125, 14935, 14337, 15335, 15134, 15190, 12965, 12658, 14042, 13526, 13225, + 11704, 13933, 14342, 15004, 15062, 15156, 15313, 14884, 15180, 13325, 15176, 15010, 15164, 13613, 13118, 14770, + 14213, 7197, 13752, 11600, 14827, 12571, 15337, 15086, 15302, 15003, 14068, 13970, 14180, 14494, 15188, 13740, + 13468, 12805, 12307, 12642, 11181, 14003, 14822, 13081, 12420, 12449, 15022, 14784, 12677, 14648, 14705, 15317, + 8270, 12505, 10652, 15261, 14302, 15216, 14790, 12194, 13891, 14373, 15049, 15197, 13635, 14320, 13823, 13361, + 14639, 14649, 11203, 13859, 11782, 15310, 14537, 14322, 14315, 10355, 13645, 13202, 14318, 14229, 14429, 14594, + 14922, 14322, 14161, 14581, 14096, 15187, 14696, 15103, 13510, 12404, 14124, 14869, 14572, 14964, 14404, 14386, + 15171, 14316, 14786, 14861, 12776, 14146, 12500, 15264, 13458, 12713, 13048, 15148, 13283, 15274, 14858, 14474, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12502, 15015, 14075, 10714, 14799, 14965, 14773, 13342, 11650, 13781, 10519, 15314, 14645, 15169, 15292, 14816, + 9523, 14700, 14369, 14476, 12629, 14735, 14643, 12360, 14507, 14533, 14106, 7981, 15127, 14662, 14805, 15303, + 13964, 13560, 8326, 14863, 12863, 14256, 14696, 14806, 14651, 13579, 10041, 14193, 13356, 12177, 13593, 10508, + 12841, 14868, 13262, 14967, 12483, 15033, 12862, 14358, 14569, 15276, 12777, 14836, 13524, 14922, 14423, 14206, + 13492, 13416, 15020, 15272, 14242, 13589, 15327, 14573, 13737, 14856, 10947, 12421, 12219, 8339, 13710, 14303, + 14945, 13009, 15192, 14375, 8057, 15280, 14949, 14065, 11927, 12296, 12057, 11760, 13869, 14087, 13543, 13624, + 14867, 15099, 11924, 14606, 14828, 12833, 13791, 14820, 13773, 14778, 13803, 12758, 13918, 15090, 12061, 14526, + 10490, 14901, 13605, 12928, 15241, 14994, 14818, 13252, 14456, 13268, 15028, 13708, 14739, 14528, 12625, 15184, + 11438, 12852, 12621, 13898, 14689, 14851, 14809, 11134, 13764, 14498, 12863, 13544, 14887, 15314, 12906, 15152, + 14806, 15235, 10564, 11080, 15238, 13277, 13785, 12518, 14204, 15302, 14852, 14816, 14625, 14815, 13573, 14359, + 13134, 11620, 14331, 14136, 11449, 14497, 14380, 14762, 12989, 14020, 14720, 14603, 13940, 15233, 10718, 15328, + 14788, 14404, 10286, 14254, 11062, 13473, 14282, 14943, 13396, 15100, 15318, 14948, 15053, 15076, 14809, 14635, + 15358, 14013, 12949, 14573, 14994, 15231, 15216, 9948, 14855, 12247, 14337, 14458, 15191, 13090, 13948, 14636, + 15085, 11852, 13433, 13370, 11486, 14383, 11689, 14403, 13362, 12745, 15251, 10508, 14470, 14261, 14842, 12476, + 13520, 14657, 14018, 14630, 13478, 14949, 14571, 13924, 14970, 12768, 13447, 13431, 12775, 14277, 13043, 12582, + 14507, 14376, 11543, 15270, 12016, 15350, 14577, 15119, 14151, 15074, 13851, 14370, 15100, 14571, 15169, 14186, + 13876, 14568, 14698, 11198, 15296, 13361, 14231, 10162, 14376, 14260, 14400, 14805, 13716, 15314, 15340, 13381, + 13797, 14671, 13746, 14973, 15077, 12319, 13396, 11379, 15188, 15166, 12780, 13339, 9166, 14739, 13857, 13582, + 12663, 14609, 13448, 14503, 13177, 13484, 12055, 13954, 15187, 14458, 14503, 15225, 9767, 15354, 13453, 13810, + 14889, 12667, 15264, 12412, 13704, 14821, 15136, 14708, 13152, 14269, 10472, 10381, 14201, 15353, 14028, 13973, + 11728, 13639, 14165, 13167, 11037, 14450, 15280, 14365, 13493, 14672, 14394, 13216, 13308, 13372, 14760, 14019, + 15213, 11616, 14166, 14447, 14552, 14650, 12832, 12690, 14392, 15039, 14448, 13938, 15173, 14873, 14381, 15089, + 15035, 14287, 14659, 13395, 15223, 14899, 14919, 15182, 14485, 15064, 14901, 14998, 15312, 14602, 14051, 14595, + 14629, 14804, 12524, 14391, 14145, 15001, 13707, 14019, 14448, 14576, 15242, 14448, 14880, 13895, 9550, 14626, + 15118, 14716, 13440, 14594, 13924, 15158, 12915, 13682, 14028, 10376, 14314, 15095, 13464, 15053, 14544, 13388, + 14879, 13691, 14825, 13495, 14681, 11485, 14989, 14474, 13738, 11881, 15252, 14379, 14257, 13248, 15187, 14481, + 14594, 14912, 14494, 14865, 14457, 13605, 10779, 14727, 14304, 14600, 14394, 14783, 13527, 14638, 14645, 10368, + 14094, 14921, 14008, 15195, 15152, 14585, 13492, 13478, 11211, 15059, 13292, 13522, 15262, 14214, 12794, 11363, + 13888, 14688, 14000, 12424, 13853, 14643, 14606, 13974, 15164, 11420, 13965, 14002, 13182, 13855, 14417, 14111, + 14937, 15305, 12398, 9981, 11016, 15327, 6865, 13910, 14861, 14948, 13134, 13620, 13766, 12192, 12533, 15301, + 15140, 15011, 13773, 10423, 13174, 14934, 10007, 11644, 14710, 14889, 14263, 15084, 11438, 12507, 10467, 12729, + 13934, 14198, 14620, 15355, 14765, 15067, 15023, 15032, 13374, 15014, 9111, 15301, 13630, 13543, 15118, 9157, + 13566, 12777, 14894, 14831, 14336, 11704, 14826, 10753, 14625, 15021, 13886, 14104, 14938, 13697, 14292, 14846, + 13052, 15143, 14424, 14936, 13722, 15307, 15104, 15319, 14777, 15318, 13680, 15032, 15162, 11544, 14893, 14859, + 13537, 10422, 11428, 14378, 13823, 12577, 13899, 14792, 13846, 13454, 15166, 14996, 13706, 14920, 9593, 14192, + 14903, 13466, 15182, 14490, 12838, 14817, 14526, 14588, 14924, 15066, 14982, 14247, 10830, 15112, 13365, 15259, + 12614, 15181, 13703, 13464, 14987, 15102, 14555, 15315, 13090, 14549, 14923, 14900, 13966, 14200, 14502, 14636, + 11953, 10639, 14164, 15195, 12764, 13603, 9306, 14567, 14834, 14733, 15277, 12772, 9714, 13516, 15263, 14429, + 15213, 14007, 13823, 13900, 14238, 14867, 13541, 12585, 14416, 13318, 14616, 14207, 11097, 12144, 15234, 14847, + 10369, 13770, 15151, 13687, 13775, 14818, 13895, 14618, 12454, 12542, 13378, 14447, 14716, 15239, 14420, 12567, + 15295, 14377, 14335, 14003, 14990, 12929, 14782, 13097, 13348, 12356, 14436, 11399, 10458, 13903, 14391, 14974, + 11500, 14530, 15194, 13186, 14717, 14182, 13351, 13826, 12435, 13533, 14923, 14924, 14619, 13547, 14399, 14335, + 14154, 14686, 14367, 13703, 8219, 14848, 14083, 14500, 10819, 13587, 12988, 14308, 15030, 14456, 12012, 15337, + 15281, 12909, 10901, 14886, 14989, 14684, 11837, 15240, 9911, 13157, 14258, 14431, 14937, 12468, 13560, 15173, + 11812, 14224, 9805, 14268, 11121, 13566, 13822, 14958, 14262, 14782, 12796, 13464, 12031, 13580, 15078, 13299, + 13993, 14394, 10428, 14458, 14012, 14359, 13129, 14215, 13168, 13828, 13693, 14346, 13718, 15289, 14614, 15135, + 15310, 15141, 14177, 11267, 11396, 15074, 15323, 15302, 15024, 15277, 14197, 14518, 14491, 13458, 13382, 12669, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15068, 14388, 15036, 14550, 13517, 13774, 13670, 14759, 14039, 15074, 14753, 11509, 14674, 14891, 14689, 15069, + 15128, 14417, 14701, 14696, 12835, 15259, 14501, 15271, 12854, 13483, 13219, 14528, 14730, 13377, 13840, 15250, + 15060, 11495, 14680, 13655, 11757, 12509, 14376, 14721, 13711, 12740, 15005, 15050, 13502, 14702, 13437, 12754, + 5487, 14184, 13779, 13260, 15177, 14896, 15260, 11971, 15170, 14492, 14629, 12025, 13519, 9482, 12802, 13513, + 13714, 11911, 13824, 13530, 14063, 13119, 14593, 5581, 12973, 13882, 10780, 15081, 15029, 12627, 10625, 14427, + 15070, 14745, 14877, 15185, 14770, 14735, 14066, 13344, 14412, 14551, 13413, 14177, 13518, 11424, 13955, 13553, + 15086, 15301, 11908, 12407, 9894, 13584, 13406, 12900, 10768, 14587, 15264, 14260, 14541, 13997, 15292, 13154, + 15251, 13577, 14028, 14458, 14414, 14393, 15081, 15252, 14300, 13040, 11484, 10394, 14601, 13654, 10263, 15285, + 8328, 12666, 12695, 12390, 12257, 13623, 14808, 15341, 13753, 15084, 13682, 13499, 14708, 13474, 14988, 15214, + 10352, 11649, 11500, 13527, 14771, 15031, 9987, 14347, 14219, 10521, 13479, 5464, 15126, 14546, 15104, 13970, + 13899, 13522, 10412, 13036, 14996, 15016, 15003, 15227, 14079, 12410, 12967, 10903, 14378, 15326, 13875, 14923, + 14137, 14941, 14381, 14617, 14905, 12353, 14663, 13319, 15328, 11083, 13689, 13841, 14179, 14689, 13348, 9637, + 14444, 15011, 14627, 14611, 14641, 14944, 13643, 14392, 15000, 13618, 11954, 12823, 13722, 15334, 13537, 14808, + 13781, 14662, 14450, 11559, 11140, 14758, 14804, 13967, 14818, 14415, 15091, 15100, 15064, 11449, 14299, 15327, + 13809, 11687, 12677, 14410, 14101, 13365, 14320, 13026, 14497, 14896, 14894, 12172, 14583, 14726, 12824, 14491, + 15068, 14182, 14311, 14857, 13841, 15121, 14641, 12426, 14840, 14648, 13495, 12707, 14443, 14728, 12922, 15123, + 12856, 14973, 14885, 14775, 14426, 13193, 14576, 14384, 14394, 13407, 14751, 14231, 14890, 14615, 13824, 14394, + 10363, 14634, 12916, 13641, 13337, 14551, 14850, 13472, 15213, 13812, 14624, 14903, 15170, 14704, 13682, 15328, + 14643, 13244, 15104, 13624, 13262, 9648, 13807, 9173, 14325, 15097, 15346, 15304, 13838, 14990, 14753, 13769, + 11092, 14482, 12487, 14412, 12724, 14846, 14561, 13720, 13851, 15129, 14401, 13841, 13155, 15289, 15212, 15173, + 13235, 13686, 12439, 14865, 15197, 14153, 15099, 14111, 14310, 14385, 11164, 15226, 14561, 13560, 7261, 14350, + 14408, 14352, 13459, 13968, 14744, 14671, 12708, 15177, 14347, 13760, 14806, 15346, 11554, 14953, 14531, 14069, + 15189, 9511, 13361, 13746, 13912, 14010, 13120, 14383, 15287, 14657, 12399, 14882, 14720, 15339, 9335, 14513, + 13436, 13928, 12719, 13627, 14542, 14603, 15281, 14892, 13933, 13650, 12768, 14219, 12568, 14374, 14446, 14582, + 8483, 9459, 14605, 14572, 13915, 13492, 12429, 12522, 14077, 11208, 15055, 12501, 15073, 14346, 14923, 13391, + 15118, 14468, 14755, 14630, 14371, 13525, 15201, 9901, 14396, 12503, 13484, 14176, 14453, 14457, 14759, 14647, + 11631, 13595, 14239, 11829, 13028, 14253, 14909, 15057, 13788, 11097, 14617, 13893, 14056, 14356, 14691, 14192, + 14484, 14362, 15210, 15219, 14593, 12228, 13660, 14737, 13656, 11988, 14355, 14385, 14708, 14983, 14862, 14890, + 14558, 14885, 13400, 14124, 12464, 12502, 14617, 14781, 13986, 12389, 12313, 14733, 13609, 13669, 14850, 13121, + 15341, 15222, 15005, 13538, 14931, 14612, 13392, 13600, 14004, 13906, 14424, 14032, 14677, 12807, 13353, 14972, + 11924, 11612, 13491, 15276, 14686, 14449, 13707, 14913, 15126, 14324, 14815, 14848, 14871, 13463, 15105, 14489, + 14223, 14355, 14017, 14882, 13783, 14926, 13736, 13574, 12747, 15152, 15168, 15218, 13378, 15216, 13144, 13116, + 13364, 13789, 14910, 12360, 12877, 14178, 14595, 15043, 12677, 14522, 15237, 13468, 13542, 14794, 13270, 11305, + 11941, 10362, 12095, 15234, 12380, 13068, 14730, 14926, 15192, 12305, 13834, 15351, 15022, 14357, 15222, 14292, + 14487, 14809, 11920, 13515, 14588, 14837, 15193, 13861, 12398, 10373, 14496, 13911, 15063, 13135, 9449, 13874, + 14574, 15130, 13151, 14449, 14495, 14524, 14647, 15242, 12627, 11091, 14066, 14425, 12890, 15065, 12951, 15110, + 13544, 14418, 12707, 14715, 13109, 12536, 14851, 11026, 14722, 14829, 13581, 14866, 14535, 14388, 13398, 14338, + 15269, 14073, 14757, 9820, 13321, 12428, 14306, 14996, 15261, 11819, 15092, 13563, 15218, 13539, 15131, 14551, + 14172, 8133, 15261, 14178, 10698, 14925, 4437, 12320, 14701, 12706, 12856, 13922, 11153, 14876, 13806, 14693, + 12445, 14650, 11291, 13067, 12889, 15098, 13684, 14325, 8355, 15106, 15268, 14996, 15245, 15208, 12188, 14728, + 14717, 15351, 12581, 14158, 13653, 15359, 14801, 15356, 14475, 14550, 13934, 13820, 14404, 14351, 13652, 14211, + 14765, 15082, 13505, 14851, 13061, 11961, 13414, 9917, 15013, 11676, 14170, 14191, 15130, 15083, 12954, 14954, + 15188, 11600, 14372, 14978, 15053, 13542, 13800, 13857, 15279, 14762, 13347, 15287, 15175, 14169, 13910, 13657, + 14632, 8960, 14647, 13252, 488, 14342, 14942, 14219, 13528, 14331, 13423, 15063, 14367, 15145, 11741, 14617, + 14452, 14525, 13066, 14157, 9678, 14524, 11626, 13476, 13382, 15089, 13705, 15084, 14561, 9370, 14596, 14788, + 15160, 14472, 12799, 13308, 13945, 15031, 15027, 14398, 14729, 11112, 14959, 14987, 15268, 10725, 14657, 14406, + 10320, 14800, 14427, 13264, 14703, 15352, 15014, 14590, 14936, 10120, 14171, 9748, 14827, 14843, 14754, 14491, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15271, 14159, 14928, 14469, 13531, 15254, 14758, 14649, 14309, 13520, 14595, 14711, 11445, 14751, 13502, 12249, + 15263, 14366, 10931, 13344, 15205, 14407, 15223, 9522, 13106, 14750, 14695, 14844, 14962, 14562, 14611, 13558, + 14951, 15092, 14640, 8689, 13559, 14113, 15169, 12717, 12329, 7998, 14713, 12258, 12190, 15041, 9227, 15309, + 15328, 13844, 14849, 12225, 13963, 14536, 10894, 15061, 13044, 15131, 14655, 12406, 13345, 15052, 12543, 15313, + 13911, 14225, 13734, 11454, 13764, 14817, 14692, 11586, 11142, 14698, 14208, 15305, 15208, 13886, 12396, 15358, + 14998, 12006, 14536, 12240, 12389, 11790, 11596, 11684, 15087, 14799, 12300, 14267, 10431, 15036, 14042, 15082, + 11542, 14918, 15327, 14596, 14758, 14394, 13846, 14161, 13848, 14776, 12421, 13546, 14990, 13753, 11708, 14868, + 12180, 15226, 14698, 15260, 13526, 11227, 15066, 15044, 14761, 14565, 14998, 14421, 10582, 15040, 14465, 14993, + 14822, 13438, 14471, 12922, 15222, 15147, 13842, 12813, 13360, 14338, 13709, 14817, 14454, 14315, 11481, 10613, + 13322, 12685, 15318, 13464, 15340, 15356, 11404, 13018, 15307, 13930, 10409, 14910, 10961, 13616, 10828, 14391, + 15137, 8376, 14673, 14543, 13065, 12007, 14606, 12973, 14869, 13450, 12684, 15127, 14265, 13926, 15116, 14508, + 15090, 12036, 14555, 12451, 13427, 14067, 15316, 14698, 11102, 13551, 14475, 14956, 11411, 11838, 15266, 13472, + 9782, 15062, 14893, 13610, 14689, 12983, 13375, 14990, 14916, 14668, 8785, 13677, 13652, 14695, 14614, 14670, + 14971, 15306, 13600, 12330, 13566, 14810, 13484, 12903, 14512, 13106, 14667, 13406, 14561, 12622, 14847, 14707, + 14741, 15210, 14267, 10511, 11851, 14235, 12568, 11889, 12419, 11174, 15312, 12874, 14717, 15199, 15225, 10412, + 14528, 14792, 13327, 14544, 14426, 14863, 15296, 13669, 14351, 14529, 15317, 15244, 13653, 14729, 11416, 13884, + 14457, 14989, 15121, 15073, 13086, 13646, 14680, 10549, 14296, 13591, 15306, 12492, 14414, 15124, 13976, 11672, + 14645, 15033, 14604, 11880, 14347, 13020, 15241, 12182, 14674, 13246, 9687, 14473, 14389, 13905, 14832, 14537, + 11365, 14562, 15153, 14536, 14404, 15262, 15157, 12524, 9985, 15171, 13763, 12906, 13902, 12556, 15066, 14041, + 14422, 14838, 15126, 14545, 13789, 15131, 15182, 15356, 14253, 14535, 14961, 13443, 14676, 12302, 10634, 14661, + 13488, 12668, 14943, 13422, 13908, 14339, 14387, 14661, 11794, 14233, 14691, 15172, 12659, 13702, 14400, 10731, + 13568, 14894, 14557, 14817, 12863, 12319, 13636, 15294, 13592, 14656, 13153, 14467, 13691, 14504, 14411, 14382, + 14727, 14056, 12463, 15020, 13255, 14990, 14862, 13458, 14152, 14286, 15229, 13359, 13024, 15121, 15253, 14062, + 15117, 15340, 15215, 14653, 13264, 11515, 12757, 11028, 14825, 14529, 13711, 14653, 11659, 15147, 14795, 14442, + 14279, 15032, 14480, 15294, 14163, 14812, 13529, 14837, 13907, 14293, 15112, 15234, 14118, 15132, 14474, 15274, + 14877, 10400, 12027, 15092, 15029, 13664, 12663, 15082, 14887, 14684, 11353, 14492, 12606, 11057, 11597, 13480, + 14837, 15215, 15141, 15221, 11294, 13359, 15336, 14945, 12451, 15231, 13314, 14976, 13846, 15001, 14523, 12749, + 14485, 14179, 7905, 12289, 14772, 13931, 15241, 14369, 11991, 13815, 14640, 15251, 13877, 12265, 14347, 9619, + 12349, 14583, 15284, 14547, 15307, 13132, 14726, 11829, 15322, 15334, 12347, 13458, 14849, 15056, 13489, 14884, + 14348, 13473, 14405, 13884, 15119, 15330, 14980, 11404, 15222, 13810, 14606, 14668, 12421, 13357, 14907, 14629, + 10731, 14940, 14792, 8813, 13559, 14122, 12396, 14424, 13334, 14066, 14352, 14263, 14844, 14406, 14823, 13352, + 15212, 14635, 14603, 14935, 15164, 13699, 13679, 14357, 13545, 15138, 14785, 15047, 11375, 15328, 13046, 15158, + 14105, 14718, 13488, 14965, 14221, 14131, 15283, 14435, 14563, 14874, 14879, 9996, 13712, 11747, 9837, 7724, + 15320, 14784, 11833, 14611, 13876, 15295, 14616, 12784, 14821, 10611, 12444, 8807, 9539, 14402, 15056, 13326, + 14552, 14584, 11816, 14773, 12933, 15087, 14507, 13582, 14621, 13998, 12834, 13803, 14695, 15263, 14046, 14538, + 12035, 15345, 10189, 13628, 15134, 14512, 13852, 14392, 14553, 10581, 12793, 15269, 13414, 14950, 14050, 15304, + 9232, 14461, 15162, 13852, 14165, 15135, 14693, 12737, 15163, 15205, 12923, 14812, 15205, 13657, 14784, 13029, + 14954, 11431, 15254, 13434, 12726, 12768, 14650, 14204, 13090, 14504, 15225, 14465, 14330, 15152, 15189, 14113, + 15140, 14659, 14566, 13550, 14529, 12064, 14477, 15017, 15208, 14140, 14400, 10338, 11329, 14903, 14434, 14694, + 14992, 15118, 15246, 11992, 14485, 15283, 15265, 15337, 13635, 15037, 14903, 13810, 15268, 13980, 13957, 15034, + 14590, 11298, 15071, 10915, 13804, 14976, 14968, 13358, 11411, 14910, 13742, 14352, 11608, 14895, 13194, 14622, + 12854, 14514, 13840, 15324, 15352, 12577, 9778, 15063, 14925, 9814, 14622, 13318, 14673, 14451, 11468, 14014, + 12775, 13086, 14887, 15011, 12748, 12420, 15215, 15115, 13904, 12540, 14893, 14758, 14592, 14209, 14432, 15249, + 13903, 14428, 14033, 13838, 15050, 14118, 13970, 15184, 10423, 15160, 13585, 12139, 13459, 14580, 13786, 14685, + 14907, 14834, 13736, 15023, 15270, 15331, 13434, 11863, 10794, 14513, 14162, 15294, 13619, 13105, 14596, 15346, + 14795, 14396, 14898, 15093, 13816, 14634, 12491, 11343, 14697, 15115, 14063, 14334, 14533, 14989, 13847, 15298, + 13531, 15087, 13769, 14061, 12864, 13783, 14711, 3128, 13910, 14096, 15247, 15332, 14524, 10282, 13824, 14876, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14267, 14559, 12938, 11235, 15040, 14008, 13054, 14899, 13868, 14200, 12871, 14579, 11360, 14472, 15187, 12811, + 14296, 14662, 14416, 14897, 14072, 13732, 13219, 14712, 13773, 15303, 14831, 15181, 12371, 13425, 14423, 14643, + 13554, 13665, 13967, 14988, 9395, 14311, 11860, 14367, 15199, 15281, 15081, 12904, 12333, 13324, 15049, 14756, + 13708, 13840, 13597, 13513, 15028, 11522, 11796, 12742, 12404, 14619, 15133, 14678, 14665, 15263, 11485, 14143, + 13533, 13535, 13827, 13556, 14623, 12520, 14283, 13134, 12350, 14405, 13779, 14991, 14451, 8623, 12594, 13881, + 14935, 14721, 12415, 13678, 13062, 15218, 14796, 14697, 15234, 11437, 12415, 15255, 14746, 14405, 14116, 13587, + 14791, 15079, 15132, 15043, 14932, 14174, 14427, 14478, 14432, 13915, 12632, 15147, 14941, 13322, 15091, 11733, + 10406, 14575, 14755, 14050, 12487, 14425, 13868, 14801, 11915, 14682, 15020, 14004, 14814, 12514, 14440, 14984, + 14418, 14963, 14538, 13796, 13070, 14326, 14647, 14547, 13288, 14275, 11200, 11023, 14293, 14486, 14323, 14527, + 14107, 15254, 14277, 14946, 14951, 15216, 14959, 12864, 13848, 14256, 13829, 14151, 12795, 15242, 12476, 13372, + 10288, 14729, 13756, 13176, 13833, 12578, 13748, 15292, 15000, 14459, 14979, 9495, 11673, 14357, 13588, 14892, + 10558, 14088, 14137, 13445, 14417, 15227, 12832, 14942, 14379, 15177, 13799, 13617, 12312, 15297, 10737, 12561, + 15215, 14752, 9417, 14101, 15282, 13467, 14978, 14905, 10558, 14852, 14602, 11629, 12763, 15230, 12400, 14482, + 14529, 15086, 14786, 13040, 12928, 14996, 15189, 15087, 13792, 14999, 14481, 15070, 15304, 14578, 14797, 13884, + 14925, 14194, 14685, 13095, 15300, 14833, 14725, 12861, 13511, 14702, 14689, 15039, 14938, 14450, 14817, 15299, + 13944, 14624, 12904, 14430, 15307, 14565, 11698, 14985, 15358, 13975, 14644, 14533, 13726, 14888, 15231, 11248, + 13867, 14943, 14869, 13680, 6530, 12594, 12275, 15198, 14503, 13517, 15272, 13446, 14854, 14598, 13713, 12712, + 15355, 14077, 14838, 13719, 14693, 15176, 14827, 14866, 13864, 13810, 14690, 15344, 13990, 14645, 13427, 13238, + 9219, 12921, 14490, 15008, 14786, 15169, 15119, 13909, 11564, 13954, 14136, 14561, 14314, 12761, 14349, 15241, + 15236, 14353, 13757, 14825, 12433, 15070, 14748, 14348, 15000, 12766, 11395, 8538, 15250, 13102, 12706, 15067, + 14208, 14560, 13792, 14064, 15230, 14704, 14902, 14458, 11536, 13620, 11837, 14711, 8614, 15092, 14806, 15036, + 15199, 10307, 12172, 12982, 10134, 15170, 15221, 12126, 14563, 13660, 14865, 14732, 14682, 15312, 11278, 14522, + 14758, 15183, 14206, 11072, 14762, 13824, 15199, 15279, 14700, 14667, 14689, 13732, 12883, 14430, 14900, 14054, + 14618, 14564, 14679, 15183, 14823, 11076, 10250, 14993, 13868, 13511, 14251, 14455, 15198, 12320, 12293, 12950, + 14683, 15197, 11719, 14343, 15104, 14890, 13016, 12443, 14448, 10081, 15215, 14735, 10247, 15235, 10174, 13732, + 14289, 14355, 15016, 14987, 14908, 14627, 13315, 11882, 13735, 14620, 14740, 12675, 14642, 14728, 14246, 14005, + 14167, 14669, 14680, 14687, 15305, 14983, 13817, 14152, 14414, 14844, 15079, 13627, 15081, 8258, 12848, 12742, + 12079, 14425, 14811, 14830, 14673, 14693, 14427, 15214, 15001, 14351, 12161, 15211, 14389, 14171, 14102, 13505, + 13336, 13703, 14191, 13627, 13774, 12817, 12269, 11490, 15186, 12743, 14806, 13349, 14707, 15086, 15176, 11987, + 12245, 15316, 14161, 11943, 13363, 14083, 14673, 11935, 14845, 13372, 13894, 14582, 14863, 15335, 9436, 13178, + 11701, 14604, 15218, 14611, 13131, 14794, 13744, 13054, 11506, 13787, 12066, 14899, 13353, 14893, 15116, 14769, + 14432, 13204, 14319, 13803, 13428, 14375, 13917, 14632, 14754, 15274, 14401, 13391, 14657, 14366, 14769, 15121, + 13211, 14833, 10342, 15193, 14372, 14999, 14069, 13162, 14929, 14280, 14766, 14491, 14463, 15030, 14590, 15121, + 15313, 15083, 15185, 13728, 13650, 14611, 13800, 14472, 11794, 15211, 14146, 14416, 13870, 15165, 14281, 15029, + 13896, 12929, 15038, 14711, 14777, 15314, 13803, 15200, 13234, 13085, 14497, 14715, 14877, 15085, 14258, 15051, + 12759, 11465, 14415, 12537, 12366, 10252, 14983, 9355, 14566, 12469, 15250, 13336, 14714, 14616, 13160, 12636, + 15175, 12702, 14403, 15097, 15025, 13584, 14062, 9112, 14305, 11697, 14089, 15225, 13628, 9070, 14664, 13924, + 9915, 14823, 15136, 11908, 13366, 13587, 12491, 12256, 10508, 12091, 14353, 14058, 11608, 11500, 10172, 14568, + 12641, 14376, 13858, 14292, 11466, 13970, 11819, 13831, 15005, 15284, 15289, 9023, 15261, 11443, 14693, 14827, + 15168, 14556, 13253, 15167, 14524, 14676, 13529, 14021, 14733, 13588, 14628, 13865, 13189, 13628, 11227, 14572, + 14389, 12402, 13873, 15083, 14840, 13677, 13670, 12411, 14647, 14527, 12589, 15217, 10856, 4585, 14678, 13805, + 14402, 14791, 12498, 15306, 14721, 13523, 14367, 15321, 12972, 13760, 11885, 14662, 14453, 14488, 14296, 13030, + 14854, 14632, 14942, 14348, 15245, 13610, 14986, 14924, 11255, 14974, 15235, 15063, 14872, 14644, 15311, 15166, + 11735, 14447, 15137, 14374, 13469, 14250, 14444, 15026, 14088, 15279, 12925, 14723, 15092, 13799, 14720, 15181, + 14490, 14466, 14171, 14371, 15139, 8164, 14790, 13410, 15301, 15035, 14993, 13997, 14621, 14540, 15091, 12909, + 14381, 14895, 14181, 14573, 14743, 15049, 14602, 14383, 9731, 14465, 13626, 14173, 14238, 12711, 14435, 15240, + 15123, 14257, 14961, 14759, 14509, 13088, 15257, 8596, 14361, 13339, 9470, 15091, 15298, 13843, 15055, 14544, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14967, 13525, 11406, 12462, 13379, 15202, 14546, 12567, 12600, 13527, 13624, 13588, 12674, 14054, 14174, 12245, + 12820, 13967, 13908, 14355, 14908, 14787, 13573, 9797, 13655, 13896, 14398, 15178, 12365, 13714, 14117, 13273, + 14397, 14444, 14549, 13602, 14043, 12812, 15185, 14442, 15344, 14150, 13876, 15249, 15120, 13553, 6974, 14936, + 13362, 6415, 13229, 8538, 14488, 14359, 13672, 14461, 13446, 11288, 12544, 13768, 15151, 14841, 15284, 15353, + 14894, 13885, 12130, 14945, 14332, 12846, 12178, 15212, 10367, 14562, 14557, 13431, 14697, 12508, 12294, 13614, + 12326, 15119, 13888, 14050, 10550, 13505, 14952, 11220, 13723, 12476, 9926, 14260, 14886, 13486, 14134, 12825, + 15314, 12683, 9535, 12953, 14709, 13665, 13774, 12530, 13837, 9333, 14162, 14550, 12577, 12294, 13907, 14104, + 14473, 15252, 11493, 14219, 11877, 12384, 12134, 14336, 14875, 14050, 12797, 12639, 15093, 11981, 14823, 13912, + 14709, 13395, 12853, 14863, 12829, 15228, 14345, 13015, 11211, 14572, 14454, 13755, 13473, 13344, 14356, 12470, + 15302, 14821, 14372, 13603, 9280, 13337, 14834, 13379, 14537, 15300, 12728, 14463, 14989, 12688, 15149, 14435, + 14672, 14881, 14443, 12865, 6776, 13287, 14954, 14609, 12863, 13479, 15338, 13971, 15231, 14847, 14845, 14806, + 13499, 12316, 15340, 13567, 15268, 13552, 13422, 13786, 15017, 10928, 15127, 14497, 12454, 14644, 15282, 15093, + 12535, 13708, 14876, 11887, 14677, 13500, 15215, 13386, 14408, 15190, 10281, 12868, 14252, 10947, 11821, 14566, + 13612, 15215, 14813, 13396, 14353, 14712, 13909, 14611, 10342, 14116, 15276, 15066, 12386, 14769, 13797, 14343, + 12454, 14876, 13595, 15319, 11286, 13758, 13424, 11689, 14846, 15330, 10542, 12363, 13479, 14926, 13831, 13746, + 15032, 13402, 11475, 15194, 12534, 14950, 14504, 15041, 15080, 15168, 14091, 14212, 13479, 14850, 15096, 14750, + 14790, 13457, 14472, 14852, 14090, 15087, 13729, 14419, 13590, 14484, 14789, 14613, 15243, 14657, 11385, 14639, + 15245, 14060, 12857, 14536, 15226, 13554, 13921, 15094, 12453, 14404, 14389, 15129, 9455, 9510, 14150, 14374, + 14763, 13392, 13588, 14936, 15323, 14864, 11745, 14421, 15124, 13847, 15153, 11521, 15217, 15258, 13476, 14161, + 13858, 15207, 10980, 9612, 12786, 14965, 14678, 13538, 15270, 13693, 14655, 14446, 15217, 13654, 14995, 14783, + 14015, 15030, 14634, 13746, 13390, 14978, 13498, 15338, 15101, 13301, 14658, 14070, 14664, 15123, 14408, 7229, + 15020, 14607, 14576, 13993, 15245, 14776, 14345, 14415, 13224, 13079, 14373, 11329, 9015, 14757, 14837, 8053, + 12905, 13826, 14991, 13991, 14419, 12600, 13637, 12741, 13422, 14670, 15074, 15132, 14953, 12764, 13164, 13045, + 13625, 13630, 15349, 14446, 13627, 14634, 13495, 14932, 14348, 14683, 10670, 11906, 14145, 13660, 13560, 15248, + 14863, 13218, 11651, 14947, 14941, 14685, 14795, 12936, 11344, 12812, 13986, 14590, 15190, 13431, 15120, 14923, + 14526, 14047, 14337, 15110, 15226, 11818, 7365, 14801, 14982, 10642, 13812, 13733, 14908, 13829, 13624, 14501, + 12307, 14024, 14665, 14523, 13854, 13502, 15260, 15291, 13922, 14010, 14841, 10306, 13498, 14739, 14786, 15341, + 15358, 14430, 14737, 12920, 14610, 15173, 15191, 15111, 11722, 13727, 14691, 11695, 15215, 14696, 14691, 15306, + 14574, 11878, 15156, 14231, 8666, 14537, 14937, 14808, 15268, 14993, 14725, 14412, 11125, 14316, 12070, 14955, + 14811, 14734, 11807, 14608, 12523, 14730, 15220, 11586, 12400, 15233, 14503, 12723, 15069, 13447, 14904, 14963, + 15264, 14383, 11741, 14368, 15320, 9306, 15280, 15132, 12539, 13686, 15067, 13628, 14042, 12819, 14564, 12735, + 13361, 15047, 14437, 10782, 15162, 13559, 13366, 14658, 8847, 15005, 15174, 15201, 14501, 13811, 14619, 13954, + 14687, 14988, 12903, 14975, 15173, 14590, 11495, 14557, 14357, 14707, 14356, 14266, 13671, 14663, 15138, 14949, + 15206, 14702, 13801, 14757, 15180, 14721, 12171, 13864, 15044, 14701, 15062, 14262, 14907, 12700, 9982, 14659, + 11792, 14740, 13266, 14038, 13845, 12916, 13973, 10442, 13324, 13251, 14406, 10804, 14370, 8846, 12457, 13900, + 13353, 13972, 14785, 14818, 14474, 9251, 14492, 11873, 15344, 13048, 14831, 11677, 14509, 14562, 13807, 13556, + 15109, 14740, 15056, 15347, 13978, 15298, 13213, 10694, 10506, 13247, 15064, 14592, 14833, 13627, 13605, 14201, + 14592, 14943, 14400, 8254, 14526, 14773, 13367, 14470, 15252, 14925, 14397, 14370, 13526, 12566, 13532, 14449, + 13608, 14483, 15092, 14706, 14561, 14863, 13024, 12337, 15164, 14738, 13930, 14732, 13823, 12509, 12458, 15132, + 11991, 8978, 13890, 12817, 14843, 9089, 13504, 14977, 14177, 14092, 12408, 14458, 14714, 14256, 15185, 15050, + 14778, 13601, 14833, 14925, 15167, 14620, 10058, 13320, 14996, 14262, 13229, 9051, 10898, 12830, 14069, 14130, + 14519, 12226, 14835, 12729, 14835, 13917, 14584, 13493, 14392, 14050, 14450, 14444, 14392, 11695, 8538, 11604, + 14687, 14815, 14003, 14135, 13874, 14492, 15000, 15224, 13331, 15057, 11582, 11351, 15063, 14486, 12716, 14780, + 10735, 15126, 14685, 13831, 12713, 15215, 14338, 14591, 15031, 14281, 12878, 15091, 14244, 13367, 12537, 14882, + 12592, 11037, 15352, 12480, 14941, 12240, 12321, 13431, 14373, 13714, 14547, 13942, 15025, 14777, 14399, 13829, + 15129, 14593, 14604, 14527, 10472, 13856, 13543, 14898, 13033, 14186, 14293, 14282, 12832, 15075, 14888, 10299, + 14221, 13444, 13575, 14848, 15249, 12531, 13784, 12564, 15285, 14923, 13676, 14754, 13839, 12582, 12272, 14997, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14276, 15037, 12681, 13714, 14702, 15049, 14755, 14832, 14690, 14974, 14934, 14687, 13421, 14851, 13243, 13857, + 14113, 15120, 14383, 11589, 15139, 13811, 11836, 14362, 15193, 12348, 15256, 14710, 12417, 11878, 14560, 14433, + 15354, 15230, 14417, 15342, 14815, 14904, 14620, 15112, 13800, 15102, 14431, 8754, 14877, 14708, 15074, 12955, + 14988, 11844, 12336, 13927, 14400, 11817, 15218, 14008, 14202, 14903, 15211, 13893, 14856, 15300, 14735, 13287, + 13391, 14965, 12532, 13267, 13568, 14199, 13958, 13368, 15157, 14703, 13640, 13338, 13692, 13993, 12577, 14877, + 15273, 15326, 14516, 13905, 12727, 13847, 12989, 14359, 14795, 14377, 15234, 15155, 13457, 12675, 14009, 14892, + 15326, 14402, 13116, 11456, 15148, 14599, 12829, 14557, 14988, 14870, 14641, 9929, 14992, 13366, 14516, 14661, + 12575, 14262, 15258, 13774, 15203, 14996, 13557, 12605, 15289, 15322, 15326, 10658, 14716, 15037, 13668, 15051, + 12090, 13732, 14363, 14888, 15178, 14665, 10392, 14231, 12914, 12739, 14609, 14357, 13877, 14379, 14752, 14544, + 11843, 14415, 14925, 14062, 12410, 14686, 14191, 15325, 12083, 13497, 14273, 12815, 9291, 14856, 12693, 15112, + 15352, 13058, 14088, 14003, 13459, 14503, 13916, 15100, 13789, 14593, 13504, 13023, 14908, 13703, 14463, 10729, + 14817, 12454, 14761, 11343, 14026, 10288, 14352, 14238, 11450, 12426, 12543, 10557, 13741, 15181, 13849, 9498, + 15236, 13852, 14939, 15297, 15140, 14204, 15299, 13872, 13846, 14269, 15343, 12851, 13339, 14088, 15286, 15089, + 12395, 14558, 13405, 15263, 14849, 14739, 13625, 14602, 14621, 15166, 14368, 14374, 13449, 14119, 14428, 6388, + 15351, 14987, 13720, 14618, 15052, 14683, 13561, 15130, 14738, 14728, 14945, 14093, 15017, 14791, 14182, 15257, + 12605, 15102, 14497, 14543, 10375, 12994, 13223, 15301, 14174, 10574, 14430, 15167, 11706, 13020, 15001, 14300, + 15161, 14037, 10872, 10524, 15151, 10492, 14422, 12158, 14061, 12572, 13519, 15270, 14049, 15072, 12621, 14726, + 11721, 14891, 11921, 13262, 14597, 14625, 12322, 15011, 14761, 14660, 14556, 12773, 14224, 14936, 15018, 12454, + 12692, 9883, 12967, 12175, 15117, 11604, 12026, 15231, 9959, 14567, 14786, 13335, 14877, 11780, 14438, 14476, + 13591, 14691, 14114, 15096, 14783, 15259, 14963, 13467, 14888, 14929, 12600, 14718, 14729, 14997, 13721, 15215, + 15281, 15137, 13572, 12410, 14435, 14375, 13352, 14157, 14619, 13330, 14368, 11237, 15143, 13881, 14458, 15094, + 12443, 14931, 13698, 14789, 13534, 14577, 12511, 14855, 13744, 9836, 15204, 14729, 14225, 14034, 12403, 14684, + 14426, 14776, 15350, 15196, 13704, 14183, 13112, 11924, 14597, 12072, 15270, 13329, 12749, 15101, 14580, 13820, + 11772, 14001, 13717, 14163, 13688, 14842, 14479, 13868, 13426, 14527, 11126, 13444, 14723, 12699, 14681, 14335, + 13851, 14350, 14353, 14095, 14753, 14686, 15130, 15133, 12568, 13022, 14766, 13902, 12657, 14706, 14559, 13704, + 14355, 13612, 14214, 14314, 15105, 14789, 15242, 13951, 14503, 11167, 11847, 14809, 14301, 15040, 11668, 14915, + 10726, 15226, 14287, 13477, 13397, 14523, 14463, 14682, 15167, 11077, 14593, 12282, 15343, 15233, 14942, 14479, + 15323, 14038, 13622, 7411, 14613, 13525, 12301, 13823, 13491, 15013, 14189, 13905, 12743, 14568, 14436, 14382, + 15254, 14591, 13630, 15059, 11410, 13880, 12675, 14580, 12789, 15036, 12628, 11499, 14778, 15108, 12180, 15145, + 14334, 12744, 14398, 13797, 13635, 12609, 11290, 14956, 14501, 14382, 14843, 13500, 14409, 12038, 15213, 14552, + 15310, 14674, 15070, 11515, 5758, 14836, 14718, 10694, 14993, 15079, 15137, 14398, 14744, 15088, 15358, 14640, + 14306, 12673, 13897, 13774, 14934, 15219, 13564, 14554, 15135, 12369, 15178, 14899, 14902, 15253, 15153, 15122, + 15233, 15330, 10540, 15086, 14781, 11991, 13279, 14486, 15238, 13610, 15123, 13873, 13100, 13273, 12486, 14365, + 13633, 14513, 14136, 10699, 13534, 14978, 12493, 13070, 12899, 10483, 12934, 13994, 14159, 14528, 14546, 14320, + 14688, 10597, 13485, 14520, 14560, 15105, 15069, 11968, 12935, 10608, 14364, 15191, 13389, 13996, 13011, 11483, + 14781, 15330, 13483, 14991, 13889, 14023, 14611, 12866, 14107, 14960, 14734, 14419, 13069, 14825, 14087, 15292, + 13169, 13147, 12448, 14369, 13880, 15058, 9861, 13687, 12301, 14812, 14266, 13336, 12557, 5976, 14864, 14472, + 14054, 10887, 14762, 11591, 14847, 13521, 14057, 14232, 11993, 15278, 14650, 14505, 15046, 12735, 15132, 15169, + 14846, 15214, 15304, 9291, 15039, 15226, 14902, 11072, 13853, 14605, 13647, 14796, 12236, 14899, 14676, 12787, + 13967, 14576, 13768, 11718, 14764, 15204, 14851, 13617, 14876, 15252, 13476, 8447, 14049, 14832, 14564, 14430, + 14769, 10706, 12014, 12680, 14970, 14064, 14617, 12997, 14494, 14054, 14888, 13812, 14047, 14438, 12745, 13898, + 12972, 13532, 14767, 14114, 14338, 14376, 13487, 14892, 14424, 13667, 14824, 11870, 15236, 14691, 14669, 13424, + 14973, 14960, 13125, 14214, 14031, 13586, 12662, 14327, 14347, 14513, 12081, 15336, 13692, 14990, 13184, 14963, + 14842, 11714, 13896, 14446, 15079, 11531, 15065, 13950, 14525, 12845, 15037, 13118, 12511, 15142, 15269, 11054, + 13048, 14312, 14527, 11315, 13494, 14661, 14468, 14609, 13951, 15046, 14215, 14737, 13487, 14932, 15052, 15230, + 15312, 14270, 14653, 12692, 15340, 13652, 15122, 15243, 14404, 12638, 15245, 15224, 14919, 14647, 12923, 9038, + 14913, 14114, 14506, 11227, 15244, 10519, 13677, 15308, 14258, 11349, 14339, 13678, 15193, 13674, 13713, 14688, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14367, 14539, 15048, 14349, 14625, 13776, 12580, 13779, 13848, 15109, 14140, 15144, 11953, 12565, 15249, 13646, + 13759, 14940, 12092, 14699, 13384, 15001, 15020, 14451, 14436, 14946, 14667, 14587, 12257, 13530, 13410, 15023, + 15011, 14028, 10298, 15333, 15198, 12829, 14953, 12915, 15063, 15271, 13629, 15309, 13447, 12219, 14810, 15166, + 15140, 14146, 14507, 15008, 14475, 11399, 14966, 13903, 15202, 13274, 13641, 9768, 15213, 12679, 12607, 13668, + 6747, 15101, 12071, 15098, 14413, 13402, 12812, 11397, 7991, 14811, 14199, 14806, 14730, 11785, 14712, 15092, + 11961, 9149, 14191, 15002, 13898, 14056, 14725, 13473, 12454, 15349, 15080, 10641, 14191, 12428, 14769, 14881, + 15159, 14759, 12564, 15188, 15171, 15027, 13984, 14339, 14569, 15108, 15010, 12679, 11549, 13395, 13128, 15109, + 12210, 14970, 9954, 14717, 14705, 14578, 14418, 15308, 14507, 12929, 13637, 15302, 13957, 14468, 14769, 12013, + 14563, 15314, 14453, 14605, 14245, 15066, 11618, 13489, 15242, 11881, 13656, 14137, 15078, 14133, 14636, 14786, + 13937, 9577, 14630, 14331, 15357, 14444, 14770, 14623, 14189, 14529, 3757, 14722, 11271, 13876, 14974, 12482, + 15084, 14077, 14808, 15111, 14141, 15006, 14791, 13634, 12477, 12967, 14733, 15093, 15320, 15355, 14705, 12087, + 14727, 13954, 12806, 14245, 12551, 15277, 13429, 15280, 15026, 15100, 14443, 14614, 10176, 14197, 12447, 13891, + 13838, 14002, 14882, 14896, 13749, 14374, 12487, 13765, 14137, 15003, 15253, 14870, 13378, 15171, 14398, 13961, + 10296, 15189, 15336, 14538, 15001, 15281, 14627, 14485, 15182, 14537, 8282, 14857, 13629, 15154, 12940, 14172, + 13755, 13118, 14696, 12537, 14889, 13875, 13464, 14144, 14153, 10400, 11863, 11802, 15265, 15358, 13450, 14951, + 12906, 14485, 12691, 13673, 14992, 13100, 13739, 14515, 14038, 14440, 13178, 14257, 13382, 15138, 15107, 15085, + 10182, 15333, 14050, 14790, 14794, 15333, 14803, 12653, 15051, 13447, 13365, 14505, 13885, 14436, 9804, 14400, + 13990, 14099, 13400, 15352, 11280, 13191, 14079, 13820, 11818, 14481, 14243, 13398, 14098, 14207, 13631, 13606, + 15101, 14941, 12235, 14683, 13172, 14640, 14043, 9091, 12875, 12957, 14857, 8035, 15088, 12712, 15185, 14689, + 14816, 14362, 13709, 12347, 13982, 15063, 15070, 14787, 14156, 13401, 13115, 15151, 14630, 15067, 11205, 14685, + 8962, 14444, 14747, 12541, 13633, 12292, 12763, 13526, 13379, 14435, 15265, 13480, 14493, 13826, 15341, 13922, + 9712, 14298, 13351, 14526, 14830, 13746, 14482, 14892, 14945, 13712, 14333, 14095, 15268, 12652, 15288, 13594, + 14828, 11843, 11400, 15085, 14768, 11279, 10774, 15067, 13190, 14833, 11340, 14730, 12024, 14611, 10657, 12953, + 12140, 14765, 15204, 14476, 14894, 13429, 15083, 15108, 14808, 14918, 15202, 15172, 14613, 14430, 14806, 13033, + 14969, 10437, 13656, 11847, 12400, 14641, 13433, 12740, 14415, 13252, 15329, 14309, 14350, 11583, 14054, 14738, + 14766, 15007, 13630, 14347, 12916, 15043, 13932, 15325, 15151, 14857, 12137, 13334, 12880, 14347, 15223, 12113, + 12865, 14595, 12578, 14526, 14536, 9211, 13778, 15161, 14809, 15325, 13966, 13364, 14964, 14429, 14442, 11725, + 15164, 13516, 15130, 14372, 15139, 14285, 14071, 13636, 10591, 15144, 14850, 14741, 12594, 14011, 13387, 14728, + 15230, 14601, 14385, 14708, 14106, 15236, 13758, 15105, 14841, 14378, 14114, 14339, 14255, 15095, 14476, 14755, + 13161, 13295, 13870, 14712, 12703, 15245, 14003, 13793, 14834, 14786, 14846, 14131, 10965, 15354, 10850, 14883, + 15090, 14667, 15201, 14394, 14612, 13627, 15269, 15309, 12919, 14458, 14007, 14805, 14304, 15003, 13432, 15238, + 12751, 14513, 13465, 14949, 15016, 12493, 14382, 14452, 10564, 11925, 14923, 14514, 12734, 14677, 14181, 12512, + 14343, 14663, 14341, 7577, 14299, 13789, 14970, 10339, 15321, 15151, 14514, 14647, 11481, 13321, 14645, 13608, + 14590, 11916, 14700, 14376, 14445, 12859, 14092, 14171, 15153, 3426, 13397, 13464, 13548, 14470, 13613, 14708, + 11903, 14574, 12900, 14355, 13090, 15090, 14635, 15219, 10768, 14842, 14366, 15359, 15316, 15216, 14165, 13372, + 14647, 14285, 14352, 13973, 14344, 12935, 10773, 14568, 15283, 7203, 4943, 11443, 15099, 13638, 13911, 14480, + 14648, 14326, 8285, 14285, 14849, 14885, 14529, 15311, 14787, 14737, 12956, 14124, 14740, 12881, 15209, 12793, + 15260, 15182, 13410, 12919, 14537, 14695, 10421, 14482, 11381, 12727, 15278, 14157, 14700, 14425, 15189, 11609, + 14551, 13225, 13163, 13885, 14457, 14264, 12937, 12444, 15142, 15277, 13981, 14425, 14931, 10354, 14432, 14721, + 13825, 8788, 13352, 14661, 15054, 12568, 11323, 13566, 12636, 13999, 14636, 15304, 14336, 6338, 10986, 9222, + 13670, 14028, 14079, 13933, 15214, 15085, 9346, 14353, 14278, 15060, 14601, 14762, 12994, 10205, 14070, 14650, + 15179, 10873, 14689, 15295, 15213, 13092, 11871, 14724, 13902, 12613, 15233, 13636, 14395, 13917, 14140, 13828, + 14559, 11633, 14522, 14765, 13468, 12458, 14762, 10627, 14619, 14637, 12274, 13812, 12767, 10460, 13961, 14339, + 15286, 15297, 14404, 14965, 14421, 9633, 14755, 9299, 13887, 12327, 15194, 14660, 13798, 14181, 12074, 13536, + 15291, 12415, 10435, 13957, 14361, 15359, 13396, 13633, 13481, 13269, 13160, 14316, 13379, 14584, 13731, 10700, + 14530, 13705, 13590, 11713, 14760, 14079, 14476, 14310, 15204, 12663, 14643, 15333, 14176, 13323, 10999, 14405, + 13497, 14461, 13646, 14813, 15314, 14987, 14498, 14871, 14536, 13062, 14680, 13396, 14180, 14203, 14495, 14915, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12229, 13472, 14980, 10359, 13532, 14104, 10666, 15061, 14922, 12363, 15232, 9893, 12477, 8572, 12569, 14090, + 15301, 12637, 14475, 14515, 14253, 14270, 14268, 15305, 11509, 14334, 13744, 14153, 15151, 14781, 14485, 13518, + 14968, 14482, 15326, 14171, 12356, 14446, 11000, 14220, 15241, 15293, 15139, 12490, 14291, 14811, 15034, 14392, + 13203, 13355, 14960, 14357, 14842, 14916, 12088, 10852, 12842, 14807, 11292, 14787, 15304, 14350, 14525, 12942, + 14987, 14485, 14835, 13612, 14356, 15060, 14982, 12343, 15097, 14724, 14115, 14718, 14364, 13713, 14377, 15093, + 14818, 14710, 14132, 12234, 8275, 15001, 14744, 15294, 13532, 12513, 13987, 15166, 15198, 13737, 14869, 15208, + 15233, 14947, 13835, 9808, 14204, 14632, 14416, 14318, 15243, 13631, 15166, 13048, 14393, 14964, 14658, 15241, + 15297, 15303, 12654, 14587, 14569, 15280, 10726, 14338, 14119, 9963, 12437, 14675, 12144, 15066, 14176, 12124, + 14797, 13800, 14958, 12309, 15200, 14077, 15197, 11116, 12911, 14247, 14459, 15159, 14563, 14368, 15091, 15117, + 14261, 12155, 14607, 14627, 14344, 14397, 13960, 13796, 13616, 14616, 14742, 14298, 14941, 13922, 13894, 14729, + 14583, 14141, 15109, 14993, 14519, 11590, 12364, 14510, 14421, 15286, 11983, 14902, 15311, 12973, 13469, 14146, + 9065, 15254, 15345, 12511, 12744, 15239, 14630, 13641, 13278, 14368, 14485, 13963, 14618, 14534, 13571, 13915, + 12897, 15205, 15082, 12714, 13332, 14642, 15307, 14772, 14714, 15193, 15251, 13361, 14634, 9653, 14767, 14365, + 13514, 14983, 13662, 14810, 14963, 13594, 12448, 13955, 14550, 14974, 15086, 10777, 15184, 14233, 15103, 12929, + 15344, 14381, 14035, 10969, 14676, 14506, 14950, 15046, 14341, 11497, 15146, 12279, 15279, 14850, 15035, 10583, + 14601, 13257, 13924, 15185, 13864, 14520, 14357, 15183, 13854, 13759, 14912, 12479, 14173, 13934, 15349, 14241, + 11928, 15264, 15304, 12884, 15323, 13406, 10877, 12898, 14926, 14344, 14037, 14027, 12894, 14428, 9445, 15316, + 13489, 14543, 11220, 14508, 14380, 14046, 15151, 14519, 14916, 14736, 12399, 14369, 14870, 12531, 13669, 15027, + 13867, 13478, 8162, 13737, 14351, 15082, 13901, 14434, 14981, 13487, 13340, 12832, 14665, 12316, 15146, 14124, + 13474, 12879, 12776, 15335, 14406, 12461, 14117, 14345, 13162, 12579, 14509, 14500, 14786, 13517, 15299, 14647, + 14394, 14703, 14611, 14015, 14347, 12547, 12463, 14752, 14365, 13904, 11480, 14265, 14228, 13988, 14159, 15083, + 14430, 14698, 14084, 14100, 14741, 15083, 14285, 14932, 14779, 14922, 14585, 13970, 14354, 13836, 14752, 13726, + 12523, 12455, 15159, 14879, 15058, 12501, 14664, 15291, 13528, 12636, 13171, 11609, 13698, 13870, 15110, 13235, + 13733, 14425, 15339, 15071, 11278, 14819, 13719, 2974, 14579, 14511, 13032, 12540, 13756, 14712, 7374, 10923, + 14339, 13163, 12674, 14124, 12404, 13975, 12984, 13271, 14812, 15253, 13569, 12671, 14679, 14780, 15300, 13710, + 14793, 14390, 15305, 14566, 15001, 15108, 15048, 9376, 13337, 12729, 14686, 13769, 13365, 13931, 13979, 14531, + 13658, 14881, 9719, 13476, 14539, 14458, 13834, 13859, 13422, 14875, 14807, 14877, 12445, 14964, 14870, 13856, + 13797, 15238, 14987, 9858, 15309, 12356, 14808, 13267, 14579, 15249, 14917, 13826, 12422, 15218, 12060, 13224, + 14756, 14916, 15274, 10893, 14558, 14002, 13781, 13699, 14078, 14370, 12883, 13347, 15208, 15246, 13518, 13930, + 14106, 9858, 10258, 13435, 14463, 14930, 12442, 14702, 13677, 12239, 13833, 14442, 12858, 13632, 15251, 14439, + 14434, 9115, 12486, 14716, 15213, 14145, 14937, 13629, 14282, 13612, 14717, 14891, 12451, 13577, 15074, 15049, + 15049, 13893, 14667, 14590, 13855, 14944, 11247, 14944, 13732, 12925, 15275, 11363, 14779, 12325, 15159, 15100, + 12324, 10893, 14437, 13729, 15084, 14220, 13757, 14680, 14662, 15133, 13333, 14443, 14530, 14686, 14779, 14061, + 14621, 12817, 15187, 10660, 11548, 15170, 14288, 13944, 14118, 11852, 13913, 14755, 11999, 15033, 13377, 13127, + 12629, 15349, 14651, 13704, 14188, 14348, 14871, 11108, 14836, 14743, 14416, 14817, 14604, 14964, 13336, 13701, + 13453, 10392, 13615, 15186, 13820, 14473, 14634, 14355, 14637, 13894, 14590, 13507, 13924, 13266, 15121, 13276, + 14587, 10731, 13782, 14656, 13521, 13743, 13673, 13791, 8391, 13166, 13339, 13982, 15360, 14954, 14547, 14818, + 12373, 14641, 13300, 14596, 12319, 15024, 15242, 13264, 14988, 13960, 14436, 13923, 14445, 15219, 11057, 11741, + 13930, 14569, 14522, 14091, 13410, 14630, 15027, 15304, 14852, 14679, 14090, 14558, 14816, 14009, 12873, 11767, + 15351, 11312, 14797, 14906, 7033, 14071, 15219, 3785, 14932, 14069, 14520, 10325, 13185, 13321, 15355, 13864, + 14584, 13620, 9176, 15106, 14438, 14358, 12670, 15320, 13580, 14579, 15325, 8425, 7413, 14657, 14403, 12816, + 15158, 15050, 15116, 15088, 13001, 12917, 14494, 11381, 14615, 14405, 14531, 13212, 13932, 14645, 14807, 13941, + 13699, 14673, 12561, 12377, 14922, 14641, 14431, 9227, 12908, 14770, 14070, 13971, 12900, 13562, 13505, 14093, + 14867, 14757, 14407, 10960, 14798, 15142, 14550, 15318, 14558, 14841, 13842, 14275, 13974, 9116, 14767, 15272, + 14554, 10711, 12788, 14697, 13142, 14052, 15115, 14450, 15352, 14472, 14886, 12327, 14688, 14820, 14757, 13832, + 13811, 8939, 14620, 14796, 12939, 14417, 14009, 14852, 15026, 13579, 14184, 14386, 15003, 14654, 11807, 14625, + 12984, 13692, 7387, 14284, 14972, 14649, 12584, 13682, 14494, 14816, 14432, 14056, 13970, 9918, 14994, 12667, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13988, 13090, 15337, 11763, 14351, 13376, 14821, 15044, 11568, 15056, 14914, 15283, 14947, 15041, 14014, 14078, + 13768, 14935, 14218, 13825, 14823, 13388, 13548, 11707, 15077, 14911, 14485, 14911, 11711, 14517, 14651, 14463, + 15206, 15328, 15165, 14604, 14591, 13388, 14953, 13541, 14951, 15190, 14149, 8171, 14858, 14536, 15181, 8650, + 15114, 14697, 14979, 14913, 11249, 11536, 14541, 12056, 14767, 12623, 13641, 15199, 14500, 12066, 14444, 14977, + 14865, 13311, 13387, 14357, 14868, 11827, 15009, 14232, 14124, 14665, 14061, 10096, 15227, 15122, 12480, 14406, + 14149, 11217, 15041, 14465, 14497, 11729, 14804, 13460, 13272, 14917, 13624, 15106, 14660, 10984, 14360, 13424, + 15344, 13371, 13089, 12354, 13675, 15008, 12704, 6938, 13491, 12603, 15043, 14766, 10547, 15144, 12388, 15116, + 14485, 15058, 14939, 12724, 12025, 15313, 12643, 15285, 14310, 12560, 13901, 14091, 15213, 15179, 13745, 13100, + 10329, 14246, 14788, 14091, 11761, 13780, 14894, 13559, 13013, 14766, 11132, 15281, 14423, 12771, 14244, 14621, + 13094, 9479, 14363, 11655, 14646, 14959, 7405, 13484, 14563, 14890, 11414, 12465, 14056, 14741, 14175, 12223, + 15235, 13018, 9909, 14505, 14681, 14643, 13496, 14539, 11609, 14716, 14092, 13502, 15180, 10144, 14506, 14357, + 13574, 15033, 14896, 12640, 15196, 12830, 14671, 15212, 9908, 9405, 12971, 14440, 15320, 14972, 13044, 13534, + 14962, 15016, 13574, 9726, 12768, 14129, 14975, 15153, 10548, 15002, 14632, 13734, 15077, 14235, 14854, 14136, + 14440, 13819, 13931, 14529, 11757, 14954, 14359, 15277, 13058, 15040, 14750, 14484, 11877, 13625, 12052, 13768, + 11578, 14908, 13379, 13403, 13711, 15035, 14727, 13038, 14599, 10854, 14596, 14720, 13955, 13898, 9630, 13905, + 15204, 14472, 15028, 12611, 14870, 12840, 14976, 8838, 14678, 13490, 13202, 13450, 12284, 13152, 14735, 14774, + 5794, 15030, 10217, 11723, 15241, 15065, 15321, 12035, 14888, 14519, 9683, 14743, 11400, 15311, 15082, 9133, + 14991, 12496, 15320, 14887, 13429, 15239, 15144, 14867, 13971, 11609, 13800, 15067, 15283, 13439, 13829, 14596, + 14566, 13079, 15302, 14425, 15344, 14790, 14178, 14780, 14498, 13428, 13416, 10939, 14467, 10976, 14941, 14644, + 12948, 15257, 14722, 14499, 13532, 14529, 14406, 13754, 14554, 11621, 14690, 12344, 14088, 15312, 14509, 15001, + 15088, 14337, 14353, 13542, 13730, 13463, 11409, 14546, 15214, 13454, 15331, 14604, 13717, 14791, 13429, 13240, + 15223, 15014, 13415, 13642, 14587, 9928, 12577, 13081, 11970, 14776, 15182, 14900, 12675, 14292, 15120, 12315, + 14893, 14877, 11907, 15229, 15232, 15073, 12370, 15188, 14859, 12941, 12674, 14729, 11621, 15268, 14465, 13173, + 14718, 12398, 15089, 14458, 12736, 15089, 14937, 13649, 13490, 12516, 14472, 14391, 13346, 10855, 14036, 14713, + 15266, 11056, 14464, 12308, 14715, 12875, 13929, 10454, 14894, 14566, 15315, 13508, 11645, 13865, 14914, 13400, + 14860, 13797, 14615, 15290, 13076, 14445, 13907, 13764, 14281, 13969, 15223, 15022, 14702, 13886, 13310, 13023, + 14729, 13349, 14209, 9333, 14709, 13767, 14198, 13799, 13621, 15012, 12336, 15226, 12883, 13590, 15111, 14834, + 14739, 12724, 13976, 14704, 15022, 14651, 14421, 13385, 14831, 14637, 9635, 12990, 14537, 13236, 9967, 13206, + 12492, 11979, 14447, 13296, 12289, 15157, 14701, 14289, 14925, 14893, 14504, 14543, 13767, 14972, 14776, 14415, + 13438, 14367, 13543, 15185, 11087, 13641, 14970, 15279, 13341, 15238, 14433, 14283, 14506, 12454, 15342, 15272, + 15285, 15159, 13052, 14513, 10503, 14756, 10455, 14309, 15141, 13304, 15349, 14954, 15239, 13240, 15143, 14646, + 13762, 13543, 14719, 11227, 12821, 13954, 14350, 13607, 11824, 15122, 14183, 13324, 15142, 14822, 13153, 14638, + 14611, 15237, 13662, 15134, 12217, 15289, 13507, 14179, 10369, 14823, 13605, 12251, 8713, 14894, 13583, 14393, + 15022, 14626, 13525, 11643, 15183, 14373, 11481, 15062, 14471, 11969, 11405, 11773, 14340, 15129, 15018, 13907, + 14854, 14487, 15064, 10692, 13424, 13460, 14641, 13492, 12886, 11467, 15140, 13514, 14218, 14473, 14729, 12640, + 15222, 13110, 14969, 15101, 14539, 13509, 14923, 11848, 14432, 15255, 14590, 15275, 15027, 14609, 13978, 14238, + 14217, 15021, 14699, 13825, 12823, 14299, 14553, 14978, 15332, 12395, 13497, 13668, 14914, 14370, 15250, 15265, + 15065, 14638, 15261, 15230, 15294, 12410, 15084, 15005, 14472, 15198, 13354, 14310, 14455, 14505, 9250, 15151, + 15181, 15338, 14827, 15031, 12552, 11993, 14559, 9977, 15163, 13730, 14855, 12608, 14265, 15032, 13230, 14448, + 14957, 13871, 12852, 14465, 15007, 14882, 15050, 13434, 14695, 10491, 13672, 14663, 13297, 15233, 15016, 15357, + 14965, 12173, 13076, 14692, 14987, 15232, 14771, 13992, 15139, 14470, 15028, 14185, 12500, 11456, 14177, 12978, + 13366, 12403, 14600, 13526, 6495, 14693, 14995, 13139, 14175, 13914, 14684, 14414, 15083, 15043, 14373, 11079, + 13024, 14467, 15334, 14541, 12393, 14011, 10270, 14489, 12636, 15099, 12738, 13978, 15226, 15101, 14795, 11027, + 14443, 15300, 14933, 14702, 12083, 14146, 14429, 14431, 11975, 14970, 14612, 14431, 13676, 12398, 15022, 14160, + 13870, 15318, 14584, 14536, 14068, 11015, 14639, 13638, 14904, 14480, 14806, 13376, 13069, 14587, 15226, 14399, + 13641, 15175, 13019, 15078, 14439, 11152, 15244, 12375, 13456, 14146, 14046, 15345, 14520, 15185, 13438, 14719, + 12761, 12329, 15112, 14851, 13196, 15000, 13645, 14406, 15270, 14054, 15254, 14593, 14255, 11269, 9215, 14986, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13445, 14525, 14576, 12612, 13814, 15002, 14719, 14777, 12432, 14496, 15052, 14382, 14243, 15328, 12409, 14930, + 14999, 13574, 11190, 13639, 12071, 14797, 11340, 10330, 12997, 14910, 14180, 15189, 14937, 14477, 14475, 13861, + 15237, 13761, 13812, 15015, 14624, 15130, 13636, 12410, 15140, 14409, 15197, 12594, 13847, 13838, 15132, 14428, + 14208, 14554, 11946, 11744, 14884, 14474, 13774, 14200, 14726, 11458, 14014, 12554, 13861, 14609, 13053, 13327, + 9247, 14421, 11166, 15344, 13376, 12979, 12911, 11826, 14469, 14639, 14878, 14939, 11823, 12787, 12479, 14909, + 13841, 13392, 13410, 14587, 15066, 15015, 15332, 11899, 14514, 12315, 14049, 15168, 11695, 13932, 12819, 13774, + 13113, 13640, 15231, 14835, 14460, 13391, 15125, 14841, 13675, 13484, 13822, 15273, 13203, 14712, 14724, 14447, + 15065, 13365, 14572, 14880, 14420, 13653, 12525, 12482, 14419, 13827, 14705, 15142, 12906, 13406, 14867, 14958, + 15183, 15181, 15210, 11824, 14529, 14953, 14212, 14496, 13490, 14470, 14775, 12918, 11969, 15050, 14892, 13881, + 15011, 14393, 14147, 12585, 13578, 14581, 12302, 14379, 14403, 12899, 14201, 12629, 12299, 14628, 9317, 15045, + 11454, 13278, 14334, 13516, 12885, 14602, 12054, 11799, 13657, 10979, 15334, 14149, 14387, 14697, 14913, 13514, + 10544, 11598, 11286, 14627, 14220, 14850, 15216, 15270, 12771, 13754, 14633, 15012, 15122, 12711, 14892, 12467, + 15276, 13396, 15291, 11482, 14642, 13594, 14150, 12871, 14591, 14476, 12964, 12409, 14377, 14132, 15329, 11430, + 12948, 14798, 14563, 15305, 14633, 13269, 14827, 15355, 15098, 12408, 12962, 12894, 15247, 13454, 14640, 14960, + 14122, 14430, 14539, 15096, 13943, 13790, 14868, 11261, 14925, 15181, 7897, 15163, 14329, 15114, 13908, 13830, + 14390, 15129, 14985, 13177, 15022, 11237, 11987, 10261, 14505, 14432, 15184, 14357, 15179, 15156, 15193, 14469, + 13752, 13259, 14944, 15185, 14830, 15103, 14813, 13787, 14410, 14768, 13983, 14761, 14549, 15137, 13328, 9160, + 14223, 14458, 14654, 14624, 15103, 11329, 14617, 12267, 15132, 14920, 15318, 14958, 14852, 14817, 14985, 13402, + 14366, 14090, 14488, 11934, 13595, 11460, 15149, 14526, 13489, 13633, 11702, 10085, 13485, 12991, 14644, 13237, + 15008, 15181, 13653, 15154, 10314, 14881, 13458, 12996, 9307, 13852, 14337, 13324, 13892, 15143, 13335, 14822, + 15346, 14828, 14771, 15021, 13513, 14516, 14226, 13528, 14227, 14421, 13412, 15188, 14604, 13865, 13577, 12547, + 12446, 12336, 13877, 14578, 9918, 14808, 13823, 15015, 12980, 14914, 12711, 12467, 14560, 13805, 13919, 14501, + 14545, 12226, 11117, 14465, 14562, 14756, 15192, 14627, 14627, 14906, 14471, 13891, 15077, 14861, 13746, 14812, + 13070, 14111, 12986, 12955, 11288, 11748, 14857, 14924, 12911, 13147, 14832, 14992, 11859, 12609, 14396, 14263, + 11385, 13557, 11545, 15287, 14674, 12754, 13677, 15021, 12933, 15096, 15359, 15031, 14429, 14865, 10498, 14192, + 12531, 13809, 13814, 14434, 15074, 14621, 10916, 13318, 14341, 15150, 15197, 13331, 14979, 11438, 12372, 14340, + 13427, 14577, 13713, 13255, 14727, 14102, 14635, 14495, 12405, 15152, 14796, 15142, 10955, 14558, 12908, 14920, + 14552, 14540, 15057, 15139, 14874, 14680, 15327, 13610, 13647, 15028, 12316, 13757, 12375, 14802, 12988, 13374, + 14983, 15297, 14466, 14661, 12744, 13561, 14916, 12322, 13802, 6794, 15302, 14636, 14485, 14073, 13431, 13985, + 13329, 15253, 14961, 14812, 14366, 12841, 14579, 13621, 14533, 13654, 13740, 11876, 14533, 14497, 14535, 15002, + 14614, 11171, 13590, 13780, 15128, 13111, 15103, 14046, 13516, 12643, 12063, 14191, 13101, 13962, 14135, 12290, + 14833, 15002, 15113, 14557, 14160, 14308, 15231, 14539, 14072, 12321, 15296, 14770, 14164, 14640, 13701, 15074, + 11760, 12600, 13724, 14894, 13018, 12220, 14297, 12260, 15098, 9905, 14585, 14861, 14333, 15063, 14754, 11282, + 15128, 15221, 14666, 13713, 12519, 15131, 14686, 12909, 14824, 14489, 14843, 13555, 7383, 14557, 14765, 14210, + 14028, 14844, 14380, 11869, 14997, 14433, 12985, 14264, 12119, 14357, 14964, 14777, 14849, 13410, 14570, 13045, + 12172, 13729, 14844, 15117, 11703, 15127, 11593, 13400, 14759, 14885, 12029, 14344, 15353, 12978, 14539, 9652, + 12399, 14568, 12292, 11717, 14506, 15342, 15270, 14539, 9628, 15036, 14383, 14672, 14746, 14955, 13774, 13519, + 9657, 13883, 13896, 11711, 14380, 14740, 15251, 13754, 6598, 13573, 13608, 14513, 14890, 12982, 15324, 14097, + 15150, 14099, 10248, 14844, 14140, 11372, 14225, 14024, 12015, 12259, 8781, 13120, 15054, 14892, 13426, 14382, + 13591, 15214, 13821, 13745, 12926, 15040, 15239, 15112, 15116, 12456, 13052, 14607, 15295, 15248, 13292, 14857, + 14574, 12416, 12143, 13629, 11964, 10519, 13731, 13918, 14538, 14923, 14966, 14640, 14577, 15049, 14914, 13641, + 13650, 13418, 11436, 13139, 14445, 14611, 12810, 9985, 15174, 11498, 12324, 14553, 13811, 11916, 13577, 13339, + 14754, 14600, 10135, 14661, 15185, 14971, 11071, 15060, 14842, 15308, 12932, 13472, 11564, 12426, 13637, 14851, + 13467, 12071, 15208, 13135, 14813, 15320, 15288, 12807, 13914, 13551, 15324, 14758, 14366, 12775, 13428, 14470, + 15312, 15297, 10261, 14620, 13768, 13675, 10870, 15094, 14516, 13978, 14967, 12647, 15293, 13426, 14128, 14596, + 14427, 15222, 14939, 13198, 12691, 15306, 14821, 14898, 9605, 12599, 14214, 13923, 14424, 15105, 9976, 15089, + 12654, 14469, 15005, 13770, 14739, 12916, 9481, 14590, 13286, 15351, 13798, 14052, 15303, 13596, 14866, 15068, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14406, 14520, 14064, 14510, 14431, 13512, 10752, 14280, 14619, 14949, 14529, 14477, 15000, 14438, 14289, 13650, + 15304, 14271, 14393, 14692, 15259, 13723, 13464, 11713, 13427, 12769, 11322, 12918, 14698, 11609, 14566, 12586, + 6623, 13455, 13489, 14996, 14629, 14475, 14246, 13843, 14742, 12463, 13246, 15079, 10450, 15237, 12370, 14574, + 10980, 13888, 14918, 11737, 15330, 14715, 15306, 13997, 14166, 15035, 14000, 14300, 12933, 15047, 14647, 13881, + 15162, 15301, 14306, 13507, 15040, 10954, 15167, 15163, 15056, 14787, 15081, 15055, 15057, 14641, 13440, 15272, + 14651, 14018, 14791, 13811, 14384, 14969, 14267, 15158, 13480, 13704, 12357, 13797, 12957, 12356, 14480, 13333, + 13872, 9417, 15263, 15169, 13812, 15342, 14842, 13922, 14149, 14011, 14933, 11585, 13414, 12483, 14420, 15025, + 12796, 14702, 13385, 14703, 15132, 12669, 14659, 13903, 14863, 12924, 14096, 13793, 12811, 15284, 14841, 15106, + 12316, 12842, 13622, 13419, 14925, 12220, 15346, 15044, 15134, 14752, 14434, 13904, 14927, 15131, 11313, 15130, + 11521, 12574, 13232, 15261, 14083, 15141, 12125, 14143, 11270, 11144, 15011, 15093, 12471, 14767, 13264, 14537, + 15320, 15038, 14295, 14235, 14239, 9343, 14159, 13136, 14771, 12395, 12677, 14861, 13793, 14652, 12765, 14392, + 15085, 14669, 14601, 14149, 15192, 10273, 14386, 14932, 15076, 13561, 15355, 13054, 13557, 14600, 14499, 14403, + 15109, 14373, 14401, 14346, 14005, 15274, 15177, 14701, 13377, 14592, 12497, 13207, 14848, 13293, 14063, 13593, + 10805, 14732, 14460, 15308, 12194, 14392, 14361, 15014, 12658, 13750, 14063, 14870, 11122, 14818, 12019, 13872, + 14192, 15134, 13934, 14542, 13398, 13049, 15000, 13875, 12389, 14101, 13527, 12947, 14089, 13068, 14681, 15018, + 12222, 13142, 14542, 13031, 14804, 14832, 14915, 14694, 15038, 14632, 14628, 13727, 14553, 14415, 15262, 14854, + 14917, 13324, 14439, 14460, 9298, 14971, 14480, 13411, 14394, 12549, 14772, 9166, 15022, 15338, 14952, 14636, + 15281, 15228, 14465, 13974, 15265, 12643, 15066, 14193, 14534, 14222, 14759, 12715, 14564, 13435, 14263, 13988, + 14840, 15105, 14652, 13908, 14893, 14439, 14755, 14593, 14978, 13732, 15343, 12614, 15135, 14953, 14501, 14331, + 15210, 13906, 14395, 10962, 14374, 11897, 14622, 13130, 15169, 15000, 12492, 14582, 12648, 13962, 14823, 14757, + 14257, 12674, 12461, 13479, 15281, 11601, 13384, 10384, 15025, 12743, 11842, 14189, 12720, 14495, 14151, 11868, + 14075, 14373, 14182, 12066, 14953, 14608, 14474, 14502, 14831, 12231, 14900, 6773, 14561, 12964, 14686, 14781, + 13597, 14623, 14547, 15227, 15332, 13105, 15198, 15149, 12229, 15316, 15030, 14726, 14954, 11843, 7546, 14061, + 14111, 14791, 13272, 14939, 14864, 10804, 15239, 9291, 14731, 15258, 13870, 14261, 12107, 14879, 12877, 15263, + 13291, 13938, 10857, 13588, 14983, 15285, 14001, 15258, 15162, 14493, 13786, 15250, 14068, 13928, 14687, 11171, + 12432, 14090, 14708, 12441, 12488, 12609, 7620, 12429, 15238, 10146, 14629, 10369, 14150, 14441, 13327, 13860, + 14899, 13392, 13553, 13222, 11559, 14769, 14824, 13335, 14832, 13715, 14420, 15058, 15019, 14196, 14504, 15028, + 15355, 13249, 13925, 14503, 14497, 13960, 14982, 13442, 15035, 13340, 14354, 15213, 10674, 13552, 15097, 14399, + 13894, 10272, 12092, 11896, 13654, 15084, 13347, 15041, 15200, 10235, 14314, 15352, 13952, 14816, 15004, 14956, + 14036, 14970, 15034, 14776, 14429, 14410, 12748, 14712, 14336, 11117, 12124, 12505, 14461, 14794, 13898, 14676, + 14446, 13180, 14810, 12468, 14236, 15224, 13346, 14649, 14142, 14368, 13971, 13487, 15268, 13295, 14554, 15214, + 11826, 14510, 13595, 13818, 14290, 13786, 13113, 14542, 15098, 14735, 14911, 14567, 14513, 13499, 12480, 14157, + 13375, 14787, 9410, 13589, 11357, 14622, 12577, 15129, 14353, 12635, 15299, 15051, 12779, 15213, 12311, 12920, + 14851, 14814, 15156, 14281, 14511, 10726, 12383, 14014, 13200, 15035, 14575, 15341, 14716, 14311, 14473, 14577, + 14628, 13173, 15187, 13884, 12391, 12343, 12737, 12571, 12747, 15252, 14805, 14168, 12373, 13625, 15024, 13517, + 11404, 14502, 14824, 14997, 13520, 13936, 15073, 15201, 14467, 14440, 13950, 14627, 12208, 14892, 14711, 14422, + 14099, 14411, 14475, 14914, 15093, 14286, 13861, 13789, 13891, 11993, 9484, 14860, 15197, 14827, 14415, 14676, + 14639, 11095, 14377, 11826, 11490, 14324, 13131, 15228, 14454, 15147, 11443, 15175, 8393, 15173, 15094, 14508, + 15172, 15031, 11633, 9633, 10736, 14886, 14819, 14056, 14097, 14585, 15032, 13274, 13207, 15261, 13189, 14751, + 14906, 12280, 4104, 14548, 14727, 13936, 14716, 12447, 14343, 14218, 15221, 12649, 12041, 13720, 13481, 15067, + 15232, 14780, 14705, 13694, 14367, 14152, 9327, 13825, 12385, 9986, 15203, 15119, 14838, 12985, 11292, 15165, + 14555, 14463, 12186, 13859, 15283, 13796, 14664, 12019, 10757, 15021, 14261, 14616, 13248, 14512, 13980, 14020, + 14678, 14478, 13551, 14823, 15317, 14807, 14052, 15259, 14044, 13985, 13164, 14993, 14835, 14906, 13498, 14630, + 13799, 15348, 13476, 15313, 14471, 13313, 14989, 12655, 15104, 14851, 14839, 14654, 15034, 15329, 14212, 15197, + 15022, 14705, 12694, 14078, 14497, 12176, 13934, 10410, 14162, 14732, 14570, 14618, 12538, 14640, 14743, 12366, + 12466, 11022, 14281, 14743, 14187, 15277, 14496, 10233, 15268, 14627, 14392, 13353, 14528, 11752, 12875, 8838, + 14182, 13791, 14988, 14287, 13972, 14757, 15086, 14710, 13917, 13816, 13112, 14074, 12621, 13534, 13902, 12100, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15128, 14105, 11436, 14837, 15197, 13549, 13959, 13068, 15163, 10639, 14855, 14777, 13434, 15265, 14681, 14654, + 14800, 12852, 15131, 15010, 13379, 10812, 14806, 15352, 11585, 15116, 13544, 12899, 15313, 15147, 14783, 12199, + 14629, 15278, 13757, 11090, 12974, 14031, 14406, 14387, 10885, 13867, 14881, 13527, 14632, 13476, 14285, 13849, + 15318, 13340, 14493, 10395, 15080, 14863, 14366, 14515, 14681, 14960, 12384, 15232, 14067, 15185, 15212, 14643, + 14745, 10944, 13826, 11655, 14813, 15175, 12656, 13646, 14727, 6796, 14978, 15063, 11431, 15270, 12969, 14252, + 13777, 10805, 14085, 13094, 15215, 14655, 12916, 14390, 13479, 14730, 13904, 13617, 13439, 14835, 14347, 14493, + 11902, 13871, 15266, 15308, 14668, 14988, 11349, 14004, 15152, 7505, 14881, 13822, 14959, 14830, 15299, 13077, + 14668, 12374, 13909, 15048, 13482, 9761, 14596, 13050, 13672, 14568, 15206, 11478, 14928, 14961, 14053, 12424, + 15245, 14373, 14276, 8217, 14926, 13007, 14437, 13660, 14893, 12880, 9243, 11771, 11569, 14412, 14285, 14904, + 9422, 14123, 14799, 14715, 14853, 15195, 14966, 14367, 13898, 8610, 11911, 14829, 12318, 14308, 14491, 15251, + 14460, 14846, 14756, 14117, 14538, 14527, 14883, 14679, 14834, 14713, 14064, 14900, 13715, 15197, 13204, 14792, + 15276, 14993, 14599, 14435, 12329, 10990, 11658, 15095, 15182, 13061, 9838, 14237, 14111, 14957, 15033, 13852, + 14079, 14154, 14109, 14808, 14368, 14765, 14645, 13224, 12538, 15074, 9713, 14474, 9931, 13235, 10448, 13436, + 13454, 12343, 14722, 14536, 13547, 14620, 11570, 13487, 14435, 15031, 14914, 13869, 14688, 13493, 13444, 13817, + 14850, 14626, 12436, 14216, 13925, 13882, 14416, 14579, 8713, 14712, 14386, 15224, 14445, 13617, 14937, 14232, + 12793, 12890, 14762, 7461, 15355, 11626, 7253, 15192, 12317, 13516, 10776, 14654, 14128, 14188, 13304, 14811, + 14628, 14707, 12394, 14713, 11667, 15223, 14502, 13676, 14366, 13710, 14165, 12567, 11773, 14813, 15238, 15322, + 13265, 13914, 14812, 12708, 13556, 11768, 14770, 15329, 14351, 13138, 14561, 12362, 15351, 14271, 14825, 13707, + 15342, 15293, 14059, 8808, 15151, 14350, 14794, 14827, 14730, 13247, 9858, 14651, 14999, 15165, 13280, 13911, + 13430, 11653, 13749, 14686, 10711, 13395, 15156, 14587, 12811, 14457, 11332, 14221, 15241, 13978, 14185, 14679, + 15171, 8252, 13720, 15096, 10018, 14567, 15002, 13374, 12941, 13606, 15013, 12353, 15110, 13371, 12846, 14899, + 14622, 13646, 15122, 13180, 15043, 14011, 14861, 13962, 14403, 14426, 13365, 14513, 11939, 15147, 13273, 13506, + 14874, 15080, 13557, 14902, 14895, 13971, 14724, 13013, 10131, 12876, 14503, 14426, 9972, 13168, 13530, 14995, + 14638, 14727, 15033, 11743, 14554, 14426, 14865, 12696, 12148, 13825, 15343, 13904, 14506, 12971, 13868, 12414, + 14867, 12242, 15150, 15026, 15180, 13765, 13814, 11077, 13338, 15337, 14255, 10652, 10502, 13981, 13927, 12454, + 13376, 12578, 12350, 14217, 14672, 9068, 10004, 14687, 14471, 13796, 14595, 15148, 13247, 13331, 12718, 14480, + 13574, 13812, 15134, 14608, 14945, 10283, 15255, 13998, 13611, 14370, 14809, 13583, 14162, 14024, 15267, 12679, + 12745, 15291, 15114, 15254, 15177, 14819, 15259, 14934, 13819, 14752, 13539, 9897, 12704, 13855, 14600, 15010, + 12242, 14675, 15032, 14472, 15328, 14529, 10166, 13376, 14732, 14873, 14819, 14310, 15210, 14085, 13840, 14991, + 14893, 13407, 10841, 12166, 14239, 13628, 14153, 14742, 12821, 14716, 14872, 14449, 12426, 12501, 11296, 14824, + 15079, 15200, 15155, 14874, 15189, 13277, 14924, 14743, 14615, 14518, 13409, 14951, 14694, 13345, 13981, 15106, + 15046, 13326, 14684, 14069, 14749, 14385, 13511, 15155, 15091, 14360, 14929, 14901, 15061, 14739, 13516, 14612, + 11750, 14834, 1474, 15315, 15223, 14338, 12038, 11355, 15085, 11459, 14855, 14399, 14631, 13013, 13693, 11519, + 15160, 9253, 14957, 14006, 10994, 14635, 14776, 13574, 13400, 15339, 14354, 14922, 13762, 14471, 12752, 14017, + 14368, 14399, 14954, 14041, 14174, 14543, 13108, 14903, 10196, 13824, 14194, 13654, 15274, 13673, 15146, 14851, + 15337, 15310, 14912, 15074, 14802, 13542, 9317, 13439, 14402, 12660, 11594, 14446, 11372, 13882, 12619, 10248, + 14052, 14517, 15298, 14866, 13253, 14326, 15255, 14373, 14655, 13321, 14198, 14848, 13921, 13937, 13622, 14471, + 15047, 15090, 14299, 14133, 14821, 14956, 14533, 10556, 14325, 14819, 11637, 14666, 13967, 12925, 14694, 14420, + 14390, 14783, 13980, 14027, 14206, 13770, 14930, 14997, 14868, 14167, 14579, 13576, 15348, 13765, 15107, 14662, + 15202, 12948, 14101, 14391, 8284, 14885, 14611, 11996, 14683, 14301, 14112, 12474, 13532, 14435, 13718, 13134, + 14728, 13542, 13537, 15353, 14945, 15186, 15164, 12123, 13421, 14337, 15179, 12004, 9763, 7458, 11766, 14481, + 12149, 12343, 14731, 13698, 14446, 14787, 14634, 15251, 12839, 13465, 11363, 15090, 14500, 14456, 11934, 14493, + 14005, 11449, 10035, 14941, 12498, 14939, 15214, 12777, 15132, 14185, 15336, 15201, 14282, 13164, 14689, 14770, + 14515, 14311, 15110, 14840, 13891, 11674, 13947, 14606, 11607, 14197, 14675, 13508, 14985, 14668, 14722, 12428, + 10831, 11431, 14366, 15052, 9328, 15034, 12757, 14335, 12828, 15241, 14484, 14739, 15319, 14210, 15188, 12931, + 13751, 14683, 12161, 14603, 15156, 13767, 12084, 15067, 13441, 11575, 12912, 14651, 8583, 14847, 14880, 14192, + 13761, 11338, 15142, 12778, 13375, 12537, 13689, 15103, 14109, 11489, 13255, 13125, 15238, 15322, 14454, 14193, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12807, 13604, 15141, 15065, 13035, 11082, 14969, 13339, 13429, 13670, 13016, 13315, 13671, 14884, 14225, 14490, + 14587, 15036, 14463, 14628, 15167, 14252, 15357, 15247, 12728, 11964, 14342, 13508, 15120, 13506, 14825, 14347, + 15076, 9488, 15141, 13720, 13026, 13840, 15083, 14093, 14515, 11510, 8343, 15076, 13159, 10973, 13151, 15291, + 14630, 14029, 14837, 12221, 15087, 14583, 14023, 14024, 15023, 12752, 15048, 13065, 14996, 14524, 14886, 15273, + 14723, 15154, 13908, 14398, 13292, 15191, 14948, 14988, 12546, 11490, 13231, 12329, 11651, 13398, 14898, 13922, + 14541, 15127, 13977, 12198, 12613, 13801, 15239, 14954, 12873, 12476, 13176, 13670, 14894, 14798, 14689, 14540, + 14693, 13738, 13278, 15348, 15144, 10285, 14862, 14293, 15209, 14795, 14871, 13189, 14175, 13806, 12315, 14571, + 14944, 13045, 13063, 14804, 11851, 14358, 14499, 13781, 11956, 14258, 15099, 12575, 13996, 14794, 13956, 14786, + 15156, 13013, 14162, 12341, 14655, 14042, 8271, 15345, 13076, 15083, 15099, 11620, 13045, 11556, 15135, 15316, + 13382, 15115, 13368, 14920, 13368, 13367, 14094, 14237, 13532, 14592, 12387, 10429, 15238, 13530, 13317, 12883, + 14863, 10918, 13722, 14542, 14167, 14428, 14717, 13968, 13875, 13983, 14354, 15155, 14848, 13813, 14571, 14763, + 15288, 12879, 15148, 14401, 14096, 15358, 12443, 12465, 15204, 13820, 13601, 13039, 10925, 11445, 15024, 10915, + 13936, 13817, 14653, 12890, 14259, 15004, 14533, 14174, 14053, 15143, 13934, 15062, 14945, 13969, 13956, 14579, + 7296, 14520, 7561, 14895, 14269, 14565, 12189, 10541, 6427, 10663, 14575, 14265, 14621, 15120, 12047, 14420, + 14518, 15092, 14382, 13148, 13776, 8043, 13320, 13873, 13185, 14440, 14538, 13170, 15103, 10913, 15288, 15313, + 14214, 13683, 14664, 14834, 13653, 14705, 12521, 14918, 15144, 15163, 14359, 15331, 13440, 13989, 15047, 14486, + 14111, 12592, 14699, 15237, 14702, 14555, 11682, 14889, 15226, 14813, 14607, 13642, 13622, 14488, 14079, 8208, + 9640, 15144, 12041, 13178, 11812, 13120, 14252, 12125, 12345, 15116, 14953, 11654, 12725, 12726, 9264, 13773, + 15211, 14668, 13346, 4091, 14562, 13222, 11383, 15349, 12801, 15192, 15088, 14386, 11975, 14452, 12682, 15296, + 15196, 14679, 13952, 15130, 14919, 14429, 14362, 14999, 12385, 14905, 12440, 14970, 13577, 14851, 14385, 14511, + 15114, 14530, 14435, 13745, 15028, 6508, 15075, 13885, 14096, 13428, 13121, 13438, 13690, 14061, 14574, 12419, + 11283, 14886, 11873, 14412, 12432, 14375, 11884, 15136, 15065, 14881, 14552, 13289, 12144, 14987, 14772, 15196, + 13364, 11482, 12596, 14820, 14955, 13787, 13655, 11677, 15085, 11622, 15335, 13497, 14718, 14450, 13601, 14451, + 10803, 15149, 14862, 10495, 13503, 11759, 14805, 15290, 12706, 14760, 15053, 14216, 15256, 15184, 14097, 14679, + 13520, 14376, 11107, 15064, 15082, 11098, 14734, 13181, 14622, 13806, 12662, 14861, 15022, 14629, 15147, 15060, + 11627, 8480, 14416, 15166, 12926, 14382, 14701, 14441, 14301, 14280, 14836, 10806, 11791, 15009, 14826, 12823, + 14723, 14266, 13685, 14881, 14497, 14369, 14803, 14316, 14740, 14090, 15069, 15168, 15041, 13784, 11547, 14202, + 14424, 12516, 13194, 9933, 14553, 15283, 15114, 15089, 12523, 14596, 14154, 14091, 11332, 14570, 12399, 14225, + 13429, 14802, 11599, 12174, 12687, 12944, 12315, 13689, 13474, 14862, 11753, 15050, 13969, 14701, 10569, 15173, + 14509, 15111, 15065, 11354, 13182, 12356, 15225, 15351, 12760, 14781, 14151, 14500, 13561, 13775, 14289, 11649, + 14611, 12047, 14679, 13026, 13905, 15123, 13875, 10205, 13686, 15014, 15244, 13700, 14590, 13226, 15111, 15302, + 14820, 12600, 15107, 8418, 15326, 14350, 15327, 11249, 9024, 14656, 15152, 13444, 14574, 12717, 13716, 14495, + 12806, 8677, 10509, 14936, 12753, 14891, 14580, 15197, 15067, 14610, 14968, 15279, 13311, 13242, 13462, 10391, + 12666, 14672, 14556, 14622, 14262, 9056, 14379, 15174, 14218, 14722, 14037, 13276, 14442, 9740, 9466, 14768, + 14821, 13402, 13325, 13829, 13052, 14239, 14403, 14178, 15226, 12111, 14709, 14652, 14881, 10490, 14392, 13749, + 12459, 15177, 14214, 14452, 14591, 13661, 14850, 12328, 14650, 12691, 15301, 14687, 14976, 15227, 9643, 14044, + 12335, 14501, 13743, 14889, 14463, 13587, 13739, 14843, 15329, 14445, 15149, 10674, 15321, 12898, 14391, 13812, + 13598, 14379, 14617, 14744, 14492, 15228, 15052, 15353, 14554, 15172, 12502, 13563, 14965, 14606, 15070, 14396, + 10495, 14869, 14536, 14382, 15110, 13417, 13447, 14607, 12739, 12216, 14330, 14323, 14055, 14771, 14652, 14623, + 13334, 15221, 14749, 12329, 14193, 15278, 11702, 15023, 11226, 15307, 15128, 12332, 14736, 14565, 8908, 12471, + 11354, 14572, 14971, 14743, 14490, 14010, 15167, 12497, 13700, 12993, 12372, 13729, 14560, 14674, 14253, 13159, + 12409, 14636, 14173, 14830, 15072, 13658, 10206, 15087, 13413, 13252, 14055, 15034, 14967, 13710, 13240, 12272, + 12987, 14352, 14674, 14588, 14555, 12451, 14411, 14876, 12694, 14893, 13701, 14051, 14717, 14713, 10506, 14741, + 15054, 12983, 14896, 14386, 15119, 14768, 14634, 13455, 13011, 13431, 11965, 14172, 15140, 11120, 14233, 15125, + 9582, 11966, 14943, 11514, 14533, 14607, 11331, 14495, 13949, 15005, 15040, 13872, 15219, 11377, 13860, 14575, + 14840, 13641, 14726, 15306, 13968, 14745, 9001, 14577, 15111, 14888, 15344, 11838, 13817, 14549, 12541, 15253, + 13037, 14927, 15354, 15208, 15103, 13670, 12852, 11605, 14819, 13203, 13255, 14287, 14490, 14278, 14855, 15084, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15118, 15243, 14769, 14553, 13601, 14867, 13451, 13654, 13027, 15087, 14493, 15060, 14407, 14347, 14863, 12079, + 15140, 14985, 14752, 14553, 14526, 13056, 14483, 14424, 14390, 13941, 14965, 14476, 15339, 13631, 9407, 14495, + 13829, 15231, 13459, 14494, 13539, 15144, 11876, 11893, 15023, 14314, 14288, 10861, 15018, 12938, 15123, 13882, + 13255, 14380, 12138, 13921, 14928, 12493, 14861, 12995, 15027, 15172, 15313, 15258, 15048, 13487, 11084, 15004, + 14720, 15104, 13656, 8324, 15103, 11950, 14497, 14931, 12295, 14997, 15240, 14208, 14824, 12822, 13503, 12254, + 14067, 13081, 13841, 13945, 15207, 13454, 11945, 11712, 13427, 14329, 13598, 14663, 10910, 13523, 14813, 15038, + 13982, 15299, 15351, 14709, 13842, 12357, 15301, 14436, 13825, 11395, 12832, 15228, 14901, 12281, 15313, 14873, + 15101, 15181, 14960, 11046, 13334, 12178, 13666, 14940, 11441, 13257, 14944, 14130, 15181, 14493, 14815, 13488, + 15062, 14766, 12231, 14398, 13873, 11781, 14579, 12711, 14247, 11516, 11803, 14732, 14553, 13421, 13322, 12736, + 14702, 11635, 14912, 14794, 12179, 13978, 15133, 13052, 13657, 14159, 11346, 15087, 14655, 9978, 15118, 15306, + 12416, 13724, 13065, 12979, 15230, 14047, 11447, 14047, 14896, 4295, 14579, 13749, 15295, 14331, 15108, 13564, + 14706, 14564, 15149, 14827, 13806, 13865, 12854, 14285, 14714, 13683, 14869, 13653, 13861, 14394, 13160, 15145, + 13427, 12349, 15015, 12844, 14621, 14974, 14524, 14654, 15113, 13407, 7344, 14554, 14798, 14387, 13835, 12477, + 12424, 12506, 14623, 14197, 13230, 14030, 13442, 15003, 14767, 12786, 14991, 12380, 12365, 15164, 15216, 12418, + 14995, 15072, 15048, 15034, 12642, 14473, 12075, 13991, 14796, 12456, 12711, 14909, 14560, 10586, 13196, 14610, + 13499, 15035, 12606, 15184, 14849, 14854, 14316, 14485, 13311, 13814, 14500, 13749, 12667, 14669, 12710, 14986, + 13321, 10298, 13338, 14104, 15044, 12777, 10544, 13629, 15182, 15342, 14509, 14960, 9847, 10869, 15025, 14355, + 14885, 12880, 12618, 14631, 9667, 14509, 9171, 14554, 11421, 14731, 12595, 9537, 14499, 13955, 11941, 14657, + 14369, 14891, 13930, 15074, 12419, 13637, 13537, 14503, 14750, 8661, 13223, 13351, 13856, 14093, 13251, 13573, + 15285, 14536, 15101, 14630, 14706, 14409, 13383, 11560, 15026, 14602, 14911, 15083, 11947, 12764, 14998, 14661, + 14792, 14554, 14489, 14212, 15040, 14458, 12837, 12858, 15109, 14096, 13899, 14386, 10227, 12986, 13969, 13470, + 14741, 14970, 11568, 14690, 15152, 14643, 14310, 14204, 15349, 11800, 14613, 15231, 15117, 15098, 13742, 15084, + 15321, 15012, 13112, 12306, 15326, 14767, 15194, 15275, 15243, 14312, 12770, 12552, 14849, 14908, 15134, 13120, + 14558, 14917, 13529, 15165, 12692, 13434, 15180, 15243, 12703, 13669, 13763, 13396, 13038, 13599, 12344, 13627, + 14898, 15321, 15054, 12716, 15035, 14631, 12111, 11650, 11006, 9468, 14413, 15027, 11765, 14578, 15005, 7490, + 15079, 14922, 13702, 12321, 14719, 10224, 13259, 12482, 14751, 15240, 11259, 13479, 14843, 15284, 15243, 15241, + 13711, 13598, 12695, 14137, 14828, 14595, 13924, 14917, 14606, 14353, 15307, 8028, 15266, 10761, 14300, 14113, + 14721, 12379, 14764, 12042, 12955, 14733, 13229, 14927, 11579, 13610, 14959, 14653, 14239, 13899, 14519, 8965, + 12255, 14180, 14340, 11847, 11375, 13476, 12461, 14639, 14513, 15058, 13233, 15182, 14288, 15253, 14375, 14375, + 15228, 14939, 14151, 12415, 15203, 14935, 14780, 14420, 13937, 15356, 14130, 13519, 13075, 14471, 11731, 8997, + 14898, 12301, 14585, 15108, 14212, 13013, 14432, 12045, 12440, 13576, 14934, 14362, 14509, 14862, 14581, 13841, + 12537, 13844, 14845, 13597, 14796, 12900, 13217, 14419, 13107, 14846, 15208, 12812, 14519, 14851, 11457, 13641, + 13412, 14240, 15079, 12565, 12880, 12387, 14265, 14423, 14402, 13377, 10340, 14955, 15276, 10455, 14488, 15247, + 12809, 13366, 14263, 13114, 15176, 13990, 13815, 11853, 7793, 15222, 14180, 13653, 15303, 14558, 14723, 14338, + 14755, 14517, 14968, 13683, 13693, 15003, 14507, 13820, 13477, 11197, 13499, 14661, 11712, 14728, 14295, 15240, + 14394, 12588, 14855, 15246, 12824, 12917, 10955, 11192, 14706, 14438, 11881, 15332, 14621, 14077, 14364, 14535, + 13402, 13736, 15073, 9390, 14488, 14398, 14375, 14593, 13836, 15307, 12612, 12712, 13346, 14385, 12312, 14834, + 10463, 15359, 13851, 11671, 14402, 15123, 14198, 14358, 13428, 15180, 15042, 14713, 12532, 14702, 13927, 13692, + 14969, 14905, 14384, 15271, 15115, 13501, 14841, 14204, 13805, 14708, 13411, 14990, 14347, 14366, 14421, 14926, + 12746, 14096, 13833, 14671, 14082, 11977, 14372, 14459, 14618, 10353, 12433, 14119, 14654, 14525, 15121, 12316, + 14680, 13494, 15001, 15238, 13999, 12591, 12949, 14713, 15206, 12706, 15176, 14387, 13059, 14168, 9761, 14693, + 15033, 13470, 13154, 15233, 10339, 9193, 14745, 15100, 11878, 12494, 13670, 9209, 10712, 12677, 14396, 15010, + 13315, 13503, 13945, 14362, 15177, 12878, 13749, 10465, 14917, 10293, 14041, 14360, 14591, 12514, 14324, 13449, + 14853, 14344, 14816, 13022, 14707, 13831, 9732, 11451, 14579, 13243, 13568, 14424, 14578, 15163, 13730, 14831, + 15161, 12840, 11115, 13547, 14408, 14954, 14709, 15301, 13682, 14805, 15030, 12640, 14907, 13240, 9022, 14909, + 13295, 14915, 13271, 8559, 14477, 15000, 13517, 10159, 14023, 14609, 14543, 13489, 14751, 13026, 13909, 14384, + 7791, 13589, 15195, 14550, 14570, 8449, 14716, 12245, 14268, 14316, 14359, 14849, 13764, 15116, 12449, 9802, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14508, 14848, 10688, 14748, 14017, 14902, 13611, 13253, 14612, 14022, 15077, 14418, 13667, 12506, 13356, 13885, + 14815, 14423, 14408, 13372, 14017, 13690, 13096, 15015, 14498, 11611, 9961, 12471, 14681, 12544, 14732, 15162, + 9801, 14401, 15206, 14366, 14823, 4852, 14643, 14352, 14309, 15313, 14853, 15345, 14080, 13500, 14903, 11892, + 14027, 13768, 14995, 14571, 15210, 14397, 15189, 13753, 13942, 14777, 14497, 14411, 14890, 11304, 12797, 14564, + 11433, 12604, 14826, 14988, 14890, 15037, 15076, 11669, 12199, 13460, 13257, 15266, 14634, 14673, 14549, 13627, + 12739, 13077, 15340, 13758, 15308, 14417, 14798, 15324, 14406, 14460, 13316, 15311, 12800, 12686, 14965, 12813, + 10557, 12803, 14375, 12608, 14755, 14771, 14158, 11830, 13824, 15302, 13619, 13336, 13154, 12706, 12485, 15219, + 12800, 14934, 14481, 13123, 13615, 13388, 12974, 15153, 14680, 15003, 13734, 15253, 15027, 14019, 14837, 12673, + 14433, 13062, 14528, 14054, 10400, 15051, 9319, 10181, 13383, 14768, 13659, 14585, 13520, 14476, 5672, 14710, + 14852, 10860, 14493, 14814, 14463, 7526, 14167, 15170, 14580, 8521, 14422, 15308, 14726, 15079, 13255, 13452, + 14704, 14525, 15011, 13828, 14805, 14574, 15283, 14633, 13645, 13630, 12902, 14547, 11355, 13312, 14706, 15328, + 14892, 13206, 11335, 14988, 14779, 14849, 12904, 13928, 14953, 13562, 14249, 15311, 10908, 10565, 14391, 12750, + 14628, 15269, 13738, 15011, 15177, 14411, 15208, 13024, 15253, 13189, 14391, 14016, 13967, 13271, 13230, 14774, + 15340, 14278, 14107, 14660, 14589, 14831, 14573, 14636, 14920, 13901, 14447, 13676, 12917, 13878, 13861, 15275, + 12282, 11915, 11349, 12063, 10146, 14544, 14729, 14114, 12935, 14962, 15121, 14435, 14797, 14974, 14182, 14612, + 13520, 13000, 13236, 10776, 15298, 12455, 10032, 14821, 13901, 13534, 14542, 15110, 14324, 12583, 15031, 14473, + 13873, 11391, 14710, 11509, 10380, 10289, 15091, 15092, 14511, 15310, 13715, 14754, 13784, 13538, 13486, 14814, + 12393, 14797, 15186, 14915, 12193, 14874, 12726, 15297, 13292, 13347, 14946, 15246, 12510, 15002, 12070, 13776, + 13325, 14669, 11645, 14361, 14772, 12193, 14500, 14271, 13173, 15026, 13419, 14956, 13849, 10901, 14616, 15323, + 12503, 14265, 14722, 15288, 14286, 8626, 14747, 15120, 15208, 11846, 13089, 12092, 15119, 15070, 15083, 13624, + 15184, 11662, 14652, 14078, 14499, 11678, 12654, 14289, 12514, 14965, 13280, 14864, 14195, 11478, 14540, 14591, + 14205, 15198, 14905, 13152, 14493, 13467, 10031, 14115, 13166, 11903, 10966, 13184, 14448, 14174, 13510, 11370, + 15079, 13037, 14930, 14408, 15326, 15220, 11988, 13553, 14503, 14796, 13990, 10051, 14457, 14705, 14848, 15179, + 14984, 13222, 15270, 14737, 13333, 14408, 15273, 15138, 12933, 14974, 12318, 13287, 14382, 13475, 14132, 14128, + 13450, 11781, 15180, 15193, 13515, 13040, 14541, 15292, 13382, 12919, 15083, 14835, 15196, 13528, 14509, 11663, + 15289, 12932, 14434, 14573, 13145, 14489, 10397, 13742, 13937, 14293, 13896, 12969, 12293, 14304, 15313, 15175, + 13237, 13814, 14804, 14627, 13825, 11552, 13128, 14841, 12682, 13873, 12601, 14504, 14445, 10263, 13109, 15074, + 13389, 14376, 14864, 13837, 13869, 14941, 15000, 13191, 13888, 15207, 15305, 11977, 12725, 14114, 14941, 13387, + 15322, 14636, 14440, 14454, 15137, 14900, 14360, 14882, 14706, 10570, 11725, 13333, 13161, 15353, 14809, 11748, + 13569, 15172, 15106, 13364, 15044, 14474, 14925, 12468, 14848, 15015, 15215, 12168, 14850, 14293, 14254, 10347, + 14384, 14081, 13356, 15214, 13804, 15042, 14935, 14541, 14685, 13597, 14502, 12569, 14241, 15047, 14783, 14740, + 14176, 14250, 14862, 12836, 13789, 14923, 14504, 13301, 14317, 14383, 13774, 14004, 14225, 14844, 14574, 14474, + 13015, 14034, 14403, 11605, 14022, 15007, 8914, 14980, 14546, 14460, 14106, 14463, 13662, 15238, 15335, 11984, + 13255, 15357, 15240, 14342, 13982, 15293, 14953, 13988, 13380, 13381, 14358, 15011, 14483, 12988, 14124, 13989, + 14883, 15216, 14716, 15279, 14018, 14527, 15245, 13430, 13156, 13022, 13812, 14823, 14720, 15026, 14361, 14134, + 14335, 12081, 11372, 15203, 15061, 14485, 14795, 13402, 15112, 14033, 14591, 12620, 13624, 11270, 13153, 14819, + 15254, 15238, 14538, 14416, 12734, 14753, 14973, 12699, 14667, 14862, 14181, 12883, 14428, 14387, 15333, 8596, + 15045, 14886, 14497, 15158, 12864, 13415, 12691, 14279, 14761, 14255, 12608, 8910, 14666, 13091, 14943, 14885, + 14675, 9857, 15112, 14555, 13218, 13889, 14258, 15340, 14911, 15248, 14582, 14541, 14336, 14010, 14852, 13793, + 12299, 15348, 14425, 15055, 9616, 14010, 14525, 14786, 14891, 14857, 13570, 15247, 13522, 11729, 13006, 13188, + 13094, 13103, 10958, 13609, 14322, 14449, 12782, 10166, 14991, 13451, 14970, 14698, 12713, 14433, 14618, 14950, + 14112, 12921, 13608, 13139, 13482, 12564, 12727, 15079, 10993, 14282, 14168, 14874, 14915, 14587, 13407, 11691, + 12166, 13073, 15323, 12860, 14336, 13880, 15256, 12562, 14208, 12529, 13629, 13449, 13608, 14670, 13550, 14337, + 13923, 14376, 14676, 14528, 13847, 12024, 14669, 14993, 11875, 14571, 15278, 9619, 14097, 14564, 14695, 9993, + 15181, 14381, 13419, 10848, 12118, 14011, 14115, 13347, 13556, 15165, 13856, 15313, 14259, 11693, 13479, 14534, + 13800, 15294, 11458, 15006, 14678, 13694, 13292, 13753, 14818, 12540, 11617, 13831, 15049, 14150, 12990, 12173, + 14943, 15185, 14345, 14516, 14517, 13060, 13211, 15302, 14637, 13566, 15146, 15038, 13985, 13812, 14717, 10001, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14952, 13791, 14164, 12737, 11934, 15031, 13439, 12712, 12906, 12414, 14654, 14933, 12402, 13737, 14129, 13916, + 9443, 14167, 14576, 13085, 13727, 13493, 11702, 13936, 8239, 13861, 14721, 13159, 15302, 15111, 13543, 13498, + 11937, 14397, 15059, 15204, 13608, 14827, 15066, 13693, 15227, 14638, 13300, 14487, 14775, 12872, 13318, 13779, + 14579, 12071, 11940, 14051, 15260, 14601, 13948, 14483, 13949, 13357, 12749, 12669, 14359, 15329, 14508, 14371, + 15244, 15179, 13786, 14396, 14618, 15308, 14486, 15129, 14565, 15174, 13289, 13784, 12997, 14433, 13618, 11469, + 14480, 14915, 13362, 15059, 15265, 13447, 14679, 12058, 10475, 14519, 14396, 15006, 13361, 13998, 15294, 11767, + 12106, 11111, 15002, 14127, 12409, 11525, 13354, 11728, 13637, 14118, 14789, 13379, 14668, 13857, 13819, 14122, + 15240, 13133, 13630, 15096, 13763, 11630, 14672, 12426, 10304, 12576, 14652, 13720, 15103, 14901, 15025, 13811, + 12565, 14061, 14805, 12787, 12692, 13950, 11985, 15254, 14389, 13241, 14592, 12721, 14889, 13928, 14491, 14379, + 15035, 14808, 12637, 13372, 14274, 14932, 12043, 14637, 14407, 13737, 15034, 14744, 13763, 11479, 15021, 14644, + 14483, 14349, 13318, 14590, 14569, 14831, 14489, 15225, 11840, 14565, 14955, 15286, 14618, 13866, 12451, 9960, + 13555, 14732, 13276, 12794, 8732, 12406, 11430, 15246, 9493, 15322, 14776, 14938, 15109, 13212, 14627, 14725, + 14773, 14254, 14540, 15252, 15221, 15165, 13951, 14796, 9763, 14039, 12731, 11558, 14784, 14686, 13084, 13833, + 14443, 13980, 13237, 15175, 12906, 9297, 10323, 11540, 15153, 7341, 14417, 14840, 14783, 9810, 15089, 11515, + 15288, 14507, 15301, 14877, 14001, 14829, 11893, 15213, 12111, 15219, 14649, 13528, 14005, 14385, 14661, 14972, + 14196, 13467, 14751, 15204, 14784, 14628, 14723, 14585, 10545, 13904, 13549, 11570, 14624, 14832, 13458, 14356, + 14694, 14412, 11118, 12431, 10496, 14052, 14676, 14434, 15298, 13420, 15026, 14274, 15009, 15320, 13738, 14780, + 13034, 13975, 14138, 14991, 14761, 13836, 14622, 14761, 13706, 12807, 12786, 15349, 14305, 14453, 14940, 12621, + 15254, 13032, 15192, 14652, 14649, 15188, 13421, 12534, 14479, 12354, 13384, 14444, 15059, 14430, 15096, 15149, + 13918, 15270, 14830, 14231, 14735, 14998, 13436, 14919, 12828, 13022, 13025, 14506, 12652, 14159, 14319, 12476, + 15074, 13494, 14012, 13024, 15278, 12584, 14757, 11925, 14466, 14677, 15139, 14258, 9445, 13192, 14685, 15220, + 13901, 14868, 14776, 15178, 13954, 12968, 15203, 13487, 11046, 13898, 14431, 15246, 14930, 14598, 14800, 15085, + 13547, 15068, 15350, 11131, 12444, 14797, 14337, 12562, 13797, 15339, 14913, 11433, 13506, 14799, 15278, 12902, + 9518, 12596, 15349, 14012, 13669, 15277, 12302, 15300, 15230, 14891, 14499, 15172, 12445, 15092, 13701, 14224, + 10505, 14584, 14295, 14696, 14971, 13325, 9581, 14497, 15305, 13552, 15298, 14998, 14723, 13657, 14821, 14675, + 14162, 15202, 12489, 13319, 11496, 14871, 14921, 14980, 12021, 14359, 13662, 14997, 14136, 13367, 11137, 14876, + 14658, 14914, 13817, 14875, 14815, 14970, 14692, 14466, 15080, 14616, 14426, 15146, 13521, 14682, 14947, 13152, + 15166, 14671, 14504, 12262, 15071, 12657, 15274, 11895, 12696, 11954, 14174, 13037, 14416, 14055, 14616, 15097, + 13917, 14955, 14915, 14771, 14637, 14692, 13891, 8587, 15269, 15357, 12561, 13020, 15233, 14689, 14971, 12720, + 13612, 13702, 13794, 13530, 13664, 15166, 14821, 14649, 13965, 14418, 14219, 15327, 14662, 14158, 15197, 13541, + 14880, 11049, 14371, 10580, 14839, 12915, 15322, 14460, 10067, 14598, 14780, 14630, 14879, 9214, 14929, 10614, + 13214, 13784, 15114, 14624, 15078, 14672, 14461, 14814, 13321, 13532, 14370, 15147, 13995, 14880, 12929, 15011, + 15304, 13199, 13460, 14325, 14465, 14150, 14493, 11038, 14140, 15069, 15093, 14332, 15208, 13247, 14044, 14478, + 9578, 14734, 14720, 13931, 12970, 13607, 13847, 14660, 15226, 14594, 14842, 13484, 15038, 14894, 15108, 14438, + 13895, 15115, 14887, 14687, 14889, 13951, 15313, 12064, 14871, 12465, 12735, 13429, 14428, 14066, 13652, 12623, + 13297, 14844, 15314, 12389, 13107, 15011, 15217, 13937, 11824, 14704, 15298, 14604, 14783, 15001, 14203, 13604, + 13410, 13734, 13886, 13502, 14946, 13733, 15129, 15343, 15039, 14819, 12188, 14531, 14673, 14417, 13551, 15242, + 13358, 13705, 13471, 14555, 13596, 13365, 15088, 15110, 13560, 14915, 14612, 14836, 13898, 14974, 13170, 10264, + 10369, 13832, 12350, 15112, 13554, 14746, 13922, 12554, 14568, 14375, 13871, 13058, 14067, 14560, 14928, 13826, + 14428, 14898, 9445, 13883, 14500, 14446, 14712, 13086, 14566, 12668, 15343, 14116, 12327, 14567, 14932, 14989, + 10695, 13881, 11062, 13847, 14444, 13913, 15317, 14826, 12897, 13149, 13960, 13678, 14441, 15201, 13998, 14482, + 13116, 14538, 14049, 15121, 10719, 12215, 7829, 14304, 14515, 10448, 15205, 11022, 15007, 14736, 14130, 15021, + 14979, 14200, 15079, 14765, 14536, 15319, 14670, 13637, 13160, 13860, 15359, 14555, 14038, 10737, 12621, 11027, + 14744, 13854, 14346, 13915, 14800, 10442, 14947, 13344, 15166, 12194, 12679, 14888, 14850, 14995, 10270, 13520, + 12680, 13713, 14314, 10781, 13458, 13491, 10872, 14641, 15252, 14546, 14357, 15161, 12598, 13964, 13220, 14764, + 13791, 14137, 10317, 14514, 13915, 14906, 14368, 14928, 7708, 15264, 14084, 13921, 13989, 14403, 12906, 12688, + 9015, 15346, 11669, 14971, 12840, 14346, 10031, 12931, 14745, 14888, 15191, 13533, 15177, 14970, 13693, 15033, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12738, 11249, 14078, 14609, 10635, 15206, 15045, 15181, 14762, 14951, 11908, 12627, 13902, 14562, 13310, 14230, + 12320, 13379, 14419, 15109, 15096, 14750, 15152, 14785, 14703, 13826, 13854, 14276, 12430, 12546, 15195, 14483, + 13877, 14430, 13362, 13908, 15134, 10619, 12771, 13520, 14734, 14718, 12784, 15241, 13306, 14920, 15094, 14036, + 14946, 15241, 15126, 13104, 12806, 12576, 15295, 13943, 9539, 13549, 14754, 13228, 11641, 11291, 15211, 15039, + 14683, 14465, 14469, 10931, 14188, 14435, 9744, 13981, 13464, 12403, 14483, 10358, 11612, 15026, 15220, 13860, + 14893, 14665, 14998, 13958, 14432, 13865, 10341, 13826, 13903, 12640, 14775, 13409, 14799, 6149, 14425, 14006, + 14020, 14651, 14623, 15034, 10088, 12886, 14006, 15250, 14240, 12506, 14724, 9151, 12951, 14370, 13408, 8216, + 11840, 14857, 14514, 14464, 14527, 13831, 14437, 12810, 13324, 14518, 14036, 14627, 14400, 15228, 14872, 14567, + 14603, 13354, 15109, 15331, 15168, 14295, 14343, 12411, 15018, 14535, 14346, 13049, 14959, 10711, 12535, 13549, + 14455, 14467, 13236, 15210, 12972, 15354, 14363, 12894, 14419, 14266, 14499, 12680, 14454, 14710, 13727, 13548, + 14808, 13609, 15338, 13800, 14664, 14995, 14124, 11885, 14849, 15051, 14789, 13822, 12770, 14877, 15180, 12991, + 14985, 12161, 14796, 14004, 14313, 13860, 14673, 9542, 14777, 11800, 14905, 10741, 12959, 13514, 13777, 14344, + 14562, 14401, 14363, 14385, 12680, 14437, 14748, 15257, 14846, 14010, 14753, 15064, 14284, 15109, 12801, 14565, + 14370, 11231, 14708, 12807, 15227, 14652, 13543, 15227, 14686, 14587, 8575, 12809, 15284, 15216, 15025, 15239, + 15339, 15188, 9147, 14293, 14772, 12911, 13465, 13838, 13758, 10870, 14506, 14981, 13606, 13029, 15174, 13519, + 14566, 13866, 11325, 15130, 13606, 15283, 14092, 15245, 10796, 14941, 13429, 15041, 13653, 13480, 13740, 14479, + 11245, 12652, 15194, 10026, 15239, 8662, 12044, 15020, 14553, 10327, 15291, 15279, 14820, 14920, 13392, 14636, + 15296, 14034, 12691, 14909, 14536, 12582, 13575, 11587, 11714, 15152, 14809, 12708, 14467, 14688, 13897, 14791, + 13333, 13314, 10756, 11753, 14772, 13855, 14664, 13470, 14756, 10337, 14903, 14079, 14290, 15212, 15128, 14912, + 13140, 14527, 14501, 11855, 15325, 15186, 14832, 12911, 13880, 13648, 15045, 15317, 14226, 14357, 14520, 13960, + 14827, 13564, 11613, 15300, 15105, 12736, 15198, 13445, 13960, 13867, 14006, 14414, 12310, 14349, 14304, 14679, + 11879, 14815, 12245, 14740, 15068, 14855, 14892, 15114, 14402, 12902, 13194, 14601, 13683, 14947, 9654, 14260, + 14681, 13823, 14930, 14448, 13435, 13967, 14847, 14592, 14128, 14991, 13465, 14557, 15104, 14249, 9955, 12836, + 14663, 15291, 15235, 14678, 11663, 10824, 14350, 14672, 14811, 14370, 12414, 14938, 14696, 8497, 14451, 13943, + 12510, 14589, 14471, 13176, 15026, 13279, 13744, 11281, 15228, 14629, 12671, 12146, 14749, 15122, 11428, 15042, + 11654, 10756, 14055, 14957, 13465, 14478, 13352, 14569, 12191, 15217, 11273, 14416, 14911, 15180, 12840, 11486, + 9362, 14463, 15172, 13548, 13606, 13146, 14917, 15230, 15075, 14745, 10660, 13910, 14585, 15138, 10315, 14872, + 9295, 13327, 12666, 11036, 15269, 15143, 15142, 15071, 13495, 12295, 15328, 14677, 14879, 13895, 14146, 14926, + 13945, 12435, 7153, 14859, 14873, 8844, 14765, 14755, 14819, 12706, 14814, 14086, 14650, 14344, 14733, 12127, + 14321, 13289, 10281, 14565, 15053, 14834, 13739, 13297, 14925, 15232, 14602, 13708, 8843, 14807, 12836, 13175, + 13573, 11547, 13855, 13886, 12008, 14790, 13588, 13466, 13192, 13442, 13783, 14937, 15338, 14991, 14864, 11922, + 15137, 14431, 8940, 14870, 13898, 13972, 14843, 14854, 14451, 8630, 14049, 14725, 13357, 14709, 13536, 15029, + 15083, 14806, 14581, 13544, 14034, 14830, 14348, 14230, 13274, 14903, 12418, 14520, 14350, 14855, 15175, 15315, + 14655, 14714, 15134, 15224, 15092, 13885, 14393, 13670, 15275, 14491, 14695, 14885, 14966, 13392, 12420, 14057, + 10883, 14403, 13575, 14402, 14171, 14425, 14511, 14772, 15323, 13737, 13027, 14890, 14999, 14259, 13734, 14687, + 13525, 14952, 14845, 15037, 15159, 12745, 12085, 11247, 12621, 14907, 14937, 13327, 15086, 14835, 13811, 13569, + 14937, 13837, 14879, 14452, 15332, 15176, 15319, 12024, 11673, 15071, 14681, 14939, 11448, 6690, 14916, 15168, + 15281, 11629, 15241, 11838, 13073, 14986, 14413, 12595, 14821, 14567, 13605, 14963, 13030, 13431, 14507, 14395, + 11331, 14838, 11553, 14678, 9295, 13415, 12514, 14902, 14622, 15258, 13845, 15118, 10707, 13839, 13875, 15256, + 14372, 14682, 10826, 14704, 14197, 7892, 15220, 15343, 15041, 15136, 12089, 14841, 13334, 11789, 8736, 11450, + 14735, 15328, 11151, 14295, 14690, 13794, 15288, 14636, 13047, 15130, 14086, 14352, 13901, 13741, 14431, 14752, + 15086, 14894, 14341, 13214, 14343, 13078, 14673, 14261, 15036, 14600, 10033, 14741, 13315, 15206, 14762, 13777, + 14919, 13981, 13743, 13330, 14716, 13341, 13127, 14210, 10176, 13259, 11669, 14069, 14179, 14281, 13539, 15196, + 14876, 12356, 15275, 14642, 15228, 13023, 15032, 12691, 12348, 14739, 14557, 10636, 12371, 15153, 15306, 14837, + 14835, 14936, 14752, 12648, 12838, 14932, 13887, 15080, 14511, 13730, 12607, 14639, 14960, 15120, 13059, 14932, + 14881, 11792, 14655, 13911, 14270, 10760, 13436, 13908, 13909, 15358, 14998, 14809, 14702, 12320, 14962, 14778, + 15070, 14953, 12928, 14761, 15200, 8266, 13614, 14312, 15167, 13428, 15185, 14988, 15324, 14380, 15084, 11765, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13349, 14767, 13700, 15137, 14334, 14457, 15116, 12772, 12821, 15326, 15231, 13144, 14251, 12760, 15118, 14666, + 14570, 13757, 14429, 15122, 15071, 14649, 14785, 12952, 12325, 13993, 14387, 15118, 12421, 13447, 13940, 13208, + 15316, 15111, 9053, 14366, 14412, 15248, 13033, 13841, 14112, 14407, 9084, 13606, 10515, 15038, 15109, 14370, + 15083, 12785, 14636, 12838, 13232, 15359, 14266, 11557, 13847, 14613, 13614, 13342, 11738, 14837, 15014, 15359, + 14044, 14118, 15281, 15090, 15172, 11742, 14222, 12034, 14735, 13907, 14597, 15008, 14990, 12888, 14548, 11496, + 14738, 15275, 14612, 13389, 13742, 14851, 13888, 12161, 9671, 14922, 13976, 13188, 14546, 13869, 13518, 13155, + 14472, 14423, 14258, 15154, 14349, 13370, 13106, 14399, 14514, 14060, 14996, 13771, 14750, 13497, 13558, 12697, + 14020, 15354, 14490, 14669, 13203, 14785, 14366, 14957, 11729, 14998, 12548, 15114, 13837, 12856, 15055, 14841, + 15260, 14075, 15072, 13184, 15200, 14300, 14604, 14388, 14977, 13157, 13622, 15187, 15133, 10325, 10030, 14895, + 15077, 15301, 15101, 12690, 13991, 10619, 12147, 12955, 12408, 14335, 14453, 15122, 14671, 15318, 13913, 14786, + 14997, 15057, 15194, 14434, 13159, 15018, 15127, 15306, 14818, 13848, 13686, 7930, 14943, 14421, 6039, 12602, + 12785, 14510, 15132, 14992, 14396, 14706, 14874, 15352, 13506, 14931, 14472, 15270, 14720, 15134, 13832, 14441, + 12867, 15265, 14144, 13571, 14511, 14972, 14980, 14447, 11228, 14727, 14634, 13913, 14903, 13359, 14763, 14605, + 14977, 14065, 14505, 8573, 13914, 13099, 14634, 12877, 14439, 14162, 12966, 14598, 13155, 15344, 13489, 15211, + 12968, 13165, 15138, 15158, 14120, 15207, 15357, 13823, 14335, 14404, 14442, 10311, 12321, 15246, 14714, 14634, + 13616, 15340, 15131, 15175, 14604, 12499, 15011, 15333, 14490, 14083, 14425, 14538, 13441, 13572, 14698, 14377, + 14210, 15342, 13624, 13621, 11212, 15268, 14765, 15048, 12710, 11279, 15300, 14372, 13233, 14142, 15270, 13960, + 14708, 12955, 14818, 15181, 15002, 14629, 11376, 12669, 12359, 14959, 14188, 15314, 15267, 13486, 12745, 12794, + 12811, 11728, 13617, 15000, 14951, 14564, 15076, 12551, 13309, 14047, 8885, 13842, 13802, 14805, 13885, 12773, + 15331, 14355, 13744, 14453, 13471, 13816, 14683, 15058, 15329, 9035, 13121, 14679, 12803, 13099, 7862, 14397, + 12493, 14944, 13567, 15067, 12093, 13753, 14273, 14994, 13388, 15216, 13973, 15119, 15325, 14142, 14786, 15114, + 14129, 14571, 14475, 15179, 15091, 13507, 14782, 15357, 13885, 14569, 14647, 13137, 11665, 14628, 12068, 15273, + 13588, 15056, 13412, 15148, 13795, 10619, 13726, 14359, 13101, 13997, 14743, 14157, 15156, 14965, 13375, 14811, + 13360, 14540, 14327, 14195, 14343, 14803, 12551, 14312, 13624, 14773, 13287, 15326, 14302, 14880, 13349, 15112, + 13096, 15116, 14468, 14899, 13499, 11740, 14050, 14969, 15072, 11712, 10000, 13524, 14966, 13816, 14218, 14441, + 15036, 10546, 14236, 14473, 14479, 14743, 14425, 14351, 13528, 14708, 12286, 14605, 15330, 15264, 13737, 12463, + 12903, 14462, 14660, 14538, 13200, 13686, 13330, 14527, 15201, 14355, 14908, 14732, 9897, 10616, 15269, 14871, + 14620, 12304, 13455, 15279, 15339, 14298, 14249, 12895, 12692, 12276, 11821, 13358, 13195, 13425, 13857, 15250, + 14195, 14692, 13534, 14915, 13868, 14329, 12829, 15178, 14665, 13620, 13498, 12023, 15231, 14751, 14733, 14291, + 15201, 15130, 12920, 13118, 14514, 12678, 12666, 11467, 12972, 14639, 14125, 9651, 14356, 15169, 15357, 15143, + 14887, 14755, 15201, 14374, 15276, 14499, 12802, 15102, 14097, 14663, 14452, 15356, 14568, 14664, 14277, 14847, + 14937, 13700, 14236, 15287, 13549, 12939, 14544, 12797, 14950, 14917, 12146, 13742, 13480, 14581, 15152, 15140, + 14320, 12518, 13974, 13521, 11200, 14724, 13590, 14111, 12766, 15189, 14601, 13606, 14516, 15141, 12790, 14389, + 14042, 10757, 14759, 12019, 10937, 13766, 11015, 14636, 12495, 14401, 13622, 13058, 14269, 14840, 11969, 13266, + 14670, 12359, 11913, 14587, 14992, 14975, 13287, 13783, 13363, 13682, 14673, 14571, 14508, 12979, 14360, 15098, + 14723, 14116, 11899, 13669, 14008, 15013, 14452, 9539, 13826, 14840, 14337, 15139, 13050, 14669, 15320, 14272, + 14739, 13008, 15297, 15329, 14712, 14099, 11412, 13220, 14949, 11474, 14544, 13606, 14123, 13675, 14701, 14505, + 15356, 14498, 13802, 14644, 15246, 15356, 14293, 15268, 15252, 10460, 14783, 13558, 11173, 10827, 9810, 14761, + 14217, 12678, 15270, 10533, 13597, 14710, 12544, 12975, 11551, 10872, 14311, 14419, 14904, 15317, 14747, 10450, + 15119, 14402, 13194, 13570, 14446, 14957, 12373, 14672, 13519, 13869, 12716, 14436, 13432, 14582, 15089, 14574, + 13064, 13265, 15082, 14966, 13004, 10848, 7161, 12408, 12951, 14414, 14894, 14333, 14929, 8548, 13404, 13442, + 13647, 15143, 10277, 14334, 12179, 15044, 15168, 11658, 6589, 14625, 14454, 14185, 14482, 14353, 14125, 4731, + 13389, 14757, 13201, 13555, 14399, 14009, 12029, 15073, 14840, 13999, 14836, 14722, 12792, 14827, 13067, 12731, + 15212, 14556, 14185, 14537, 13260, 10245, 13397, 13693, 14630, 15280, 14497, 14838, 14084, 14400, 13196, 15249, + 12524, 13242, 14895, 13890, 14576, 15312, 15336, 14093, 13414, 14529, 13245, 15022, 15258, 14523, 14485, 14511, + 14960, 15205, 12670, 11772, 13079, 12377, 13704, 10475, 12477, 15269, 12322, 13809, 14489, 13113, 11637, 13871, + 13768, 14376, 13918, 14278, 13198, 13212, 14422, 14501, 14981, 14812, 13927, 15337, 14587, 12317, 13380, 13399, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12868, 14642, 14368, 14577, 15213, 14823, 15208, 14606, 12393, 14046, 14763, 13040, 14169, 14954, 11475, 14343, + 14049, 15344, 12772, 15318, 13284, 14643, 12925, 15297, 15089, 15193, 13340, 14336, 15221, 12372, 11209, 15218, + 13960, 8683, 14297, 10482, 12697, 14985, 11761, 15112, 15034, 14470, 14157, 14381, 12690, 15329, 14783, 15024, + 14202, 14325, 13011, 13337, 14845, 14954, 14590, 14410, 14551, 11561, 12445, 14858, 12922, 14973, 14430, 13614, + 10007, 12385, 11320, 11546, 14988, 15261, 14959, 14522, 13726, 12528, 14088, 14563, 13980, 13852, 13697, 11802, + 14594, 14110, 12383, 13747, 15115, 12339, 13232, 14546, 14828, 13693, 13949, 13130, 14791, 11919, 14858, 14495, + 14383, 14970, 10400, 11615, 15061, 15181, 15042, 14833, 14196, 10778, 12240, 13684, 13545, 15324, 15120, 12414, + 11994, 14240, 12966, 13916, 14119, 14444, 14609, 14188, 14458, 12521, 14080, 13603, 11650, 14485, 14817, 14696, + 14337, 15276, 13185, 8656, 14468, 13562, 14435, 13987, 14496, 14892, 14540, 13668, 13604, 15325, 13895, 11983, + 14495, 14695, 15191, 13604, 11859, 14284, 15042, 11356, 15020, 14749, 15353, 14059, 15125, 15223, 14366, 11542, + 14231, 14338, 11635, 12634, 14050, 14952, 11910, 14836, 13872, 14678, 13836, 11875, 14683, 15237, 14356, 12244, + 14574, 13483, 14630, 14888, 14789, 13485, 14258, 14356, 14835, 14803, 12957, 14453, 13929, 14678, 14713, 14384, + 14348, 13374, 13447, 14507, 13665, 14872, 15014, 14922, 12001, 12652, 14864, 14789, 12527, 11782, 14502, 12474, + 14949, 14878, 14743, 14477, 14450, 11907, 14591, 15104, 12291, 14890, 14543, 14445, 14919, 13574, 15144, 15127, + 15054, 12209, 14966, 14492, 13712, 15305, 11995, 15329, 5224, 15261, 12547, 13499, 14714, 15200, 14634, 13080, + 15357, 13243, 14019, 15020, 14397, 15091, 15077, 12495, 15095, 12669, 15247, 12419, 14130, 14526, 14304, 15321, + 14580, 14040, 13794, 14021, 14424, 13361, 14653, 15113, 15192, 14452, 11091, 13315, 13680, 14710, 14791, 14770, + 12445, 15172, 13695, 14826, 14742, 15352, 15140, 15235, 13827, 12936, 14694, 13775, 12468, 10920, 14511, 12918, + 14573, 12773, 12787, 15310, 14995, 15291, 12902, 13459, 14058, 12680, 12313, 14896, 15111, 13792, 14158, 12297, + 13879, 11528, 14628, 13569, 14350, 12388, 14018, 14860, 13765, 11828, 14385, 15200, 15240, 15308, 14411, 14959, + 14770, 14091, 13791, 13872, 12994, 15301, 12162, 15295, 10880, 12254, 14521, 14355, 14705, 14766, 9654, 12500, + 14820, 13443, 10818, 14548, 13854, 13784, 9546, 12686, 14168, 12125, 15336, 9507, 12248, 13793, 14908, 11447, + 13708, 14486, 11432, 13504, 14523, 9182, 11685, 14088, 14393, 15044, 12590, 14143, 15115, 13495, 14608, 14177, + 13473, 12590, 13987, 15208, 15042, 14371, 14575, 13081, 14712, 12234, 10424, 13429, 14593, 15065, 14066, 14672, + 13585, 14348, 15078, 14845, 14584, 14587, 14727, 15046, 15251, 14026, 10200, 14446, 15251, 14914, 13738, 15099, + 12446, 14475, 12614, 12387, 13614, 14924, 13806, 14847, 14548, 12943, 12606, 14843, 7523, 13183, 14661, 12775, + 15215, 14122, 14144, 11341, 15061, 13885, 15189, 15042, 13486, 13331, 14245, 12114, 15344, 14431, 14798, 14540, + 14541, 14628, 13423, 13916, 13357, 12855, 14865, 15139, 15323, 12118, 13587, 14557, 14104, 14264, 13605, 14283, + 13968, 14475, 14077, 15020, 15028, 15091, 12169, 14176, 11294, 13598, 13240, 14052, 15128, 15199, 14107, 11717, + 15129, 15187, 14174, 14917, 15357, 14299, 13505, 12130, 13879, 14796, 14629, 15330, 14411, 12917, 14311, 13171, + 12878, 15164, 12306, 14220, 14913, 14729, 14961, 14562, 14565, 14346, 13355, 13971, 9334, 15155, 13950, 14780, + 11435, 12662, 13496, 14462, 15188, 13426, 11917, 14446, 15254, 15263, 13996, 14655, 11175, 14472, 12235, 15280, + 11631, 14781, 13728, 15083, 14066, 12777, 15353, 15108, 13910, 12318, 12840, 14219, 15247, 14987, 14382, 14855, + 14981, 15134, 14765, 13563, 14279, 14699, 14272, 15100, 13996, 11116, 14600, 14669, 14114, 13278, 13157, 12265, + 14863, 13784, 14354, 14569, 14756, 14588, 13759, 15057, 14629, 13249, 15277, 12726, 14752, 12097, 15106, 14185, + 14054, 13990, 12372, 13759, 15177, 14489, 15002, 14371, 12812, 14058, 11731, 9530, 14382, 14973, 13105, 14542, + 14114, 10150, 13711, 14879, 14412, 14458, 14447, 13822, 14116, 14546, 12965, 14560, 14550, 14709, 15194, 14364, + 14718, 15124, 12154, 15307, 11419, 11590, 11874, 10892, 14227, 12642, 13884, 12534, 14870, 14410, 14157, 15224, + 14994, 14538, 15181, 12764, 13805, 14361, 15284, 14943, 14418, 12629, 13363, 14922, 11999, 12411, 14967, 10281, + 14867, 14728, 14556, 14985, 9140, 14041, 14926, 15219, 14843, 14723, 14426, 13454, 11661, 15169, 14133, 15091, + 14436, 14988, 13837, 10516, 15256, 15147, 12594, 15303, 11772, 15325, 15002, 12550, 15005, 14934, 14373, 15033, + 14643, 11828, 13969, 15128, 10492, 14395, 13358, 14460, 14970, 12817, 15096, 15327, 13785, 14776, 14183, 12108, + 11366, 15344, 13474, 14572, 13346, 14936, 14552, 8730, 13569, 13527, 14562, 15127, 13906, 13774, 13824, 15081, + 13678, 9906, 14866, 15024, 14137, 14644, 15088, 13407, 14285, 11144, 14375, 14498, 11559, 11856, 13855, 13397, + 13235, 15172, 14609, 13732, 14845, 14470, 15290, 13806, 11588, 11501, 15346, 15281, 12926, 13471, 13408, 9565, + 13210, 12308, 14646, 14101, 12205, 12124, 12805, 14375, 14688, 10303, 12390, 15358, 14792, 14419, 14877, 13754, + 11896, 11868, 14357, 13481, 14422, 14157, 15001, 10806, 13946, 14425, 13427, 15077, 12342, 14615, 15177, 14267, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14571, 13932, 14899, 13507, 15017, 14258, 13305, 14288, 14511, 14493, 11922, 12536, 15239, 11957, 14441, 14187, + 13777, 13468, 13861, 13513, 13347, 14733, 13706, 13558, 12462, 14213, 11970, 14180, 14985, 15084, 14433, 13455, + 15230, 14825, 13756, 14805, 14913, 14685, 11920, 13489, 13380, 13392, 13596, 13522, 14501, 14454, 14951, 14521, + 14738, 13952, 10380, 10576, 13794, 12741, 13159, 13436, 14999, 14432, 13249, 12376, 15324, 10577, 14988, 15145, + 14193, 14919, 14988, 15016, 13849, 14038, 9970, 13686, 12098, 15275, 14028, 14897, 13331, 12667, 13446, 13347, + 12364, 13389, 12993, 14919, 14891, 13644, 15160, 15091, 14470, 12691, 15261, 14572, 14695, 11406, 15187, 13047, + 14341, 14941, 15309, 13682, 11244, 15269, 13491, 13531, 14863, 11547, 15152, 13935, 14797, 14330, 13439, 13710, + 15139, 14643, 15164, 12267, 13863, 14861, 13174, 13962, 13747, 15236, 15251, 15233, 15190, 13810, 14795, 13440, + 14620, 12259, 13442, 15098, 15051, 13282, 14803, 15007, 14144, 14906, 12693, 14812, 13283, 14721, 12692, 11247, + 14863, 13110, 14556, 12363, 13028, 14666, 14357, 13117, 15014, 14758, 15205, 10270, 13331, 12837, 15246, 6536, + 13736, 15209, 11852, 14151, 15066, 14735, 13170, 13183, 13006, 14607, 12962, 14984, 15264, 13636, 14987, 14690, + 15186, 11843, 14348, 13745, 14889, 13014, 11658, 13625, 15247, 14358, 15265, 13371, 15162, 15251, 15216, 14594, + 14442, 12369, 12145, 14183, 14800, 14897, 13979, 14746, 7810, 10504, 14749, 14445, 15196, 15045, 13311, 15024, + 14506, 13892, 13740, 13676, 14000, 13534, 11752, 12551, 14510, 14410, 12900, 14965, 14556, 13013, 14740, 12646, + 13935, 13189, 13848, 12302, 14976, 13643, 15311, 13844, 14346, 11973, 13525, 14655, 14718, 14126, 14966, 15288, + 13658, 14522, 14858, 10926, 7456, 12925, 14746, 15178, 13588, 13555, 14737, 14772, 15045, 13717, 10952, 12862, + 13540, 15140, 15252, 9438, 15293, 14841, 8899, 14364, 15111, 14980, 14754, 14724, 9410, 10524, 14152, 14646, + 14135, 15054, 15243, 14542, 13510, 14837, 15113, 13914, 14093, 14388, 15156, 14697, 15206, 14263, 15062, 14414, + 10414, 6955, 14399, 14343, 12411, 14963, 14663, 14472, 13819, 9035, 14407, 13481, 11612, 13458, 14514, 12616, + 12410, 15345, 14694, 15358, 14618, 10082, 14253, 14816, 12390, 13217, 14153, 13959, 12673, 11829, 14427, 12733, + 14023, 13866, 13032, 9857, 14541, 14906, 13762, 15214, 14707, 13010, 14269, 15242, 15051, 5037, 15135, 15050, + 14648, 15040, 13946, 14891, 13678, 14589, 15035, 14413, 13805, 10637, 13281, 14978, 10622, 14991, 13718, 15282, + 14387, 13634, 14586, 14815, 10897, 11680, 14210, 12445, 11610, 14606, 14774, 14498, 14113, 14510, 12833, 9182, + 14721, 13993, 13950, 14511, 14893, 13500, 14647, 13819, 15302, 14149, 13959, 15331, 14028, 15284, 10294, 14796, + 14118, 14337, 13573, 14156, 13939, 14957, 14616, 13997, 12946, 14878, 14244, 14682, 14999, 15244, 12555, 14091, + 14812, 14618, 11903, 14926, 15122, 14385, 15112, 14136, 15177, 13249, 14814, 14923, 15232, 10896, 14320, 14589, + 14879, 14563, 15326, 13486, 14869, 13476, 14860, 14494, 12578, 14498, 12649, 11412, 13953, 14547, 13921, 15066, + 12139, 11691, 14127, 14452, 14694, 11944, 15040, 15233, 11745, 13400, 15130, 7584, 14981, 14851, 13071, 13134, + 14484, 14397, 14610, 12575, 14227, 15022, 14544, 15049, 11867, 14457, 11918, 15019, 15188, 13086, 11066, 7373, + 13969, 14574, 14859, 14000, 14278, 15328, 12579, 9435, 13839, 13512, 14669, 12386, 12950, 12775, 15064, 9541, + 14216, 13554, 15062, 13749, 12379, 13530, 13056, 14718, 15019, 13736, 12575, 11513, 12545, 13755, 15186, 10544, + 14251, 15285, 13795, 14820, 14785, 12014, 14029, 14500, 15249, 14716, 14231, 14619, 14216, 14845, 14640, 14476, + 14356, 14760, 13170, 10798, 13870, 12981, 14420, 13344, 14635, 14244, 13489, 14904, 11573, 11898, 15007, 13666, + 12644, 13451, 15250, 14804, 13356, 14814, 11825, 14502, 12366, 13849, 14826, 14440, 14199, 14646, 13620, 15100, + 14453, 14018, 15132, 13416, 15260, 11986, 14778, 14982, 13436, 11735, 13655, 13956, 13164, 13735, 14721, 14442, + 14881, 10452, 13980, 14981, 12154, 14692, 13967, 12094, 14919, 12843, 12372, 15104, 15139, 15070, 14457, 13500, + 14994, 14601, 15173, 15079, 14667, 15305, 13009, 14740, 7247, 14508, 11431, 14923, 15169, 13546, 14646, 14010, + 12697, 12131, 15349, 15294, 14616, 14431, 14593, 14471, 15346, 14323, 14259, 14388, 15241, 11660, 14828, 14511, + 7579, 14373, 12022, 13792, 11313, 14000, 15060, 13473, 13377, 14827, 15200, 11490, 12437, 15048, 15030, 13099, + 15028, 15252, 15027, 12833, 13822, 12482, 14264, 15092, 13718, 14677, 15257, 13507, 14526, 12705, 12496, 13895, + 14053, 14781, 14527, 14475, 13213, 14759, 14862, 10391, 13375, 12475, 13912, 14315, 13261, 14778, 15008, 15220, + 13352, 12088, 14402, 15133, 13998, 14262, 14478, 14936, 15207, 15088, 11523, 15168, 12691, 14996, 12743, 14552, + 14941, 11605, 10490, 12420, 14422, 13468, 13670, 14376, 14101, 14969, 13665, 14733, 14890, 14670, 14727, 14014, + 12495, 11699, 11969, 14873, 15042, 14766, 13644, 15150, 13152, 13729, 15001, 14444, 13716, 15031, 14763, 14772, + 13927, 15357, 6246, 13691, 13800, 12993, 14756, 15042, 14702, 14191, 14750, 14129, 13437, 15360, 13165, 14369, + 14720, 14371, 13901, 14442, 12761, 14655, 15261, 15002, 14939, 13745, 12646, 14903, 12456, 9329, 15051, 12924, + 14562, 13676, 10311, 14546, 15293, 14618, 14665, 14433, 15028, 13789, 14699, 12288, 12957, 15106, 11819, 14334, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13573, 13751, 13534, 14482, 13085, 14725, 15187, 14357, 14510, 14972, 13122, 13857, 14452, 13882, 15304, 14102, + 14930, 14788, 12758, 12592, 14773, 11447, 13826, 14874, 13498, 14823, 12627, 15185, 13828, 15329, 14668, 14185, + 15203, 14754, 13115, 12446, 15248, 14511, 11881, 15275, 14505, 12638, 15202, 13794, 15106, 14858, 15088, 14373, + 14707, 12337, 13599, 13873, 12472, 15326, 13465, 13559, 14668, 14485, 15013, 13995, 14550, 13789, 11286, 14697, + 14470, 14931, 15231, 10198, 12734, 15065, 14927, 15333, 14092, 13996, 13466, 11921, 14751, 14910, 14188, 15206, + 14176, 15333, 12645, 13993, 15257, 15312, 13875, 12455, 14582, 14510, 13953, 12783, 13546, 13785, 14064, 15006, + 15145, 13949, 13362, 12169, 14960, 12271, 13642, 13920, 14667, 15076, 14949, 14638, 14943, 14989, 14858, 12928, + 15137, 13904, 14987, 13688, 15072, 14630, 15132, 13976, 10166, 14871, 12545, 14244, 13391, 14484, 14844, 15049, + 14380, 10053, 13860, 10250, 12646, 13321, 14249, 12239, 13082, 15326, 14079, 14493, 8335, 14189, 14960, 14826, + 15207, 11383, 12636, 15194, 14890, 11779, 13032, 14835, 14501, 14346, 13640, 14595, 14874, 13657, 9604, 14908, + 14344, 14950, 14027, 15318, 14870, 15271, 13449, 14442, 13961, 14776, 11926, 13628, 14825, 13414, 15340, 14392, + 11908, 14702, 14958, 14787, 14672, 14921, 12969, 14656, 15020, 13459, 15159, 14200, 15051, 14565, 10247, 9639, + 13906, 15334, 15290, 12495, 15253, 15279, 13083, 14517, 13490, 14644, 15316, 14844, 12854, 15303, 11775, 8660, + 13689, 14841, 15262, 14614, 15277, 13553, 14128, 12459, 13556, 14608, 11273, 12203, 14791, 11476, 15282, 13952, + 15057, 15231, 14871, 14876, 14743, 13977, 12419, 13207, 14571, 9160, 14634, 12460, 14962, 15251, 15194, 12544, + 13896, 15317, 14343, 10352, 14011, 14271, 12197, 12913, 14134, 14462, 8056, 12061, 12729, 13769, 14235, 15249, + 13752, 15123, 15070, 14573, 14954, 13678, 11589, 13168, 13807, 14299, 14571, 15309, 14689, 14936, 14722, 12828, + 13533, 10558, 14135, 14514, 13138, 15034, 13403, 12971, 14819, 14936, 15271, 14756, 12664, 15188, 12805, 13726, + 15295, 12813, 13289, 12691, 13804, 14027, 13130, 14058, 13605, 15098, 13653, 15250, 14851, 14607, 14211, 14882, + 13424, 13652, 14799, 15226, 13753, 12327, 14234, 14440, 14538, 15249, 12713, 10910, 15113, 14008, 15135, 13538, + 13572, 10487, 14364, 14750, 15317, 14717, 12872, 13595, 14549, 12594, 14914, 12927, 14721, 13524, 13926, 13990, + 13988, 14628, 13318, 12163, 15271, 13611, 14860, 14579, 12542, 14989, 12477, 12344, 13104, 12408, 14947, 11388, + 15234, 12879, 14551, 15116, 13790, 15016, 13842, 13296, 14878, 11333, 8216, 13749, 15196, 13368, 14748, 13929, + 14552, 14142, 14334, 13410, 12592, 15170, 14835, 15011, 14728, 13930, 13849, 14095, 15340, 12848, 14595, 6739, + 11397, 11785, 14164, 13366, 15332, 15180, 14528, 13415, 14204, 14893, 13352, 15237, 14859, 15076, 13419, 14728, + 10000, 13986, 15303, 14748, 11425, 14988, 14633, 14562, 14075, 14644, 8594, 14949, 13333, 12941, 15232, 11957, + 14149, 14716, 15355, 15285, 15086, 13599, 14473, 13669, 14465, 11010, 13479, 15189, 13664, 14498, 13945, 14734, + 15052, 14776, 13432, 15007, 11552, 9837, 14801, 12725, 14039, 13423, 14845, 9116, 14774, 13006, 15260, 12376, + 14933, 13341, 14589, 10318, 15318, 13464, 14642, 14845, 14543, 12762, 12993, 14520, 15166, 12685, 12754, 14052, + 15167, 15057, 14095, 14841, 15119, 13840, 14904, 13524, 13728, 14508, 13927, 11425, 13315, 15211, 14078, 12395, + 11404, 14409, 12680, 15304, 14474, 14344, 14925, 13320, 11713, 13564, 12971, 14311, 13716, 12599, 9682, 11737, + 14283, 15311, 15288, 12540, 13797, 14768, 14838, 15230, 15340, 10919, 9372, 14643, 12465, 14508, 15002, 15118, + 11518, 15070, 12746, 14632, 14387, 15237, 11272, 15049, 15050, 9744, 14026, 14820, 15050, 12453, 14867, 14157, + 10374, 14258, 13257, 15210, 13211, 13941, 14627, 14974, 13626, 14533, 14486, 13725, 14839, 14217, 14583, 13746, + 14369, 12985, 15309, 14032, 13081, 9843, 15092, 13027, 15239, 15282, 12059, 14817, 15337, 14850, 14538, 14853, + 15207, 14123, 15019, 14574, 14829, 15001, 12713, 13402, 14898, 12966, 14592, 15142, 15033, 14043, 14136, 15077, + 13931, 15165, 13537, 14965, 14086, 14689, 13520, 15034, 13674, 14442, 13147, 14322, 15219, 15021, 11795, 13700, + 12305, 8762, 14849, 12946, 10247, 14434, 14725, 12041, 14959, 14248, 13569, 14235, 13263, 13040, 14982, 11773, + 13420, 14413, 13816, 12404, 14745, 14791, 14976, 12493, 13996, 11966, 13091, 14732, 14863, 13784, 15239, 14899, + 13339, 14575, 9390, 13559, 15083, 14833, 14559, 14902, 14561, 14544, 13705, 15230, 14367, 12799, 14766, 13400, + 15158, 14811, 14992, 14463, 15264, 14338, 14657, 13997, 11073, 15330, 14840, 13320, 13403, 14989, 14516, 9448, + 13564, 14451, 11165, 15052, 14407, 14694, 14669, 14602, 14376, 14135, 14862, 14232, 14568, 14373, 15154, 12381, + 14613, 15335, 14853, 13361, 15240, 15327, 13769, 14071, 10670, 14466, 12294, 12220, 13419, 14868, 12441, 13451, + 14935, 10071, 11120, 14476, 15192, 15245, 15296, 14354, 13760, 11191, 13342, 13524, 14903, 15289, 15281, 14464, + 14462, 14362, 15198, 14064, 13795, 14132, 15281, 15111, 13738, 13398, 13652, 13508, 14183, 14599, 14224, 14965, + 15180, 14422, 14699, 14931, 13759, 14852, 11241, 12754, 14923, 15145, 14387, 14887, 11807, 14880, 14954, 15063, + 13073, 12300, 10638, 15186, 9862, 14796, 14912, 13865, 12617, 7513, 14722, 14122, 14567, 14788, 13554, 14257, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14706, 13077, 13596, 12309, 13511, 13196, 14587, 15181, 9607, 14857, 10683, 11543, 14053, 12754, 14989, 13641, + 13740, 11771, 13941, 14794, 14790, 8942, 14503, 14571, 14633, 14343, 12571, 15324, 13633, 13796, 14577, 14058, + 14463, 13783, 12826, 13191, 12921, 12330, 14237, 14959, 15124, 13504, 14420, 13380, 14672, 8452, 14303, 14910, + 14793, 11577, 14186, 13817, 15008, 14065, 15118, 7885, 15172, 12956, 13743, 11513, 14503, 13086, 14949, 11559, + 12599, 11894, 13528, 14003, 14386, 13503, 9359, 13928, 13516, 9455, 9563, 9217, 14625, 13067, 14610, 14458, + 10548, 13280, 14204, 14336, 15290, 14600, 12345, 15118, 13942, 15291, 14776, 12328, 13158, 14636, 15165, 9806, + 14313, 15289, 12306, 14408, 10322, 15263, 15234, 13217, 12949, 12692, 14431, 14006, 15180, 12744, 11946, 15283, + 13019, 9972, 14517, 14651, 15094, 14813, 15019, 12759, 11990, 14466, 12328, 14735, 14885, 14285, 10360, 14571, + 14932, 13410, 14942, 12221, 14495, 12437, 14452, 15190, 15178, 15026, 14673, 14865, 15203, 11010, 14780, 13846, + 15321, 14111, 13975, 14649, 14378, 14559, 14732, 15019, 14958, 14212, 14912, 13053, 11298, 14732, 13499, 14888, + 14826, 15204, 13763, 14026, 14020, 13589, 10734, 12480, 13399, 14264, 15147, 15243, 14251, 14437, 15049, 15135, + 13839, 15196, 12536, 12874, 14858, 14403, 15187, 13317, 14996, 15338, 13866, 14823, 14654, 14711, 14630, 14611, + 15313, 14845, 11592, 14764, 13785, 15301, 15061, 10940, 14748, 12109, 14683, 14297, 13301, 13912, 12415, 14799, + 14907, 14697, 15084, 13582, 12133, 13318, 13677, 14166, 12881, 14883, 15119, 14915, 14757, 14995, 14426, 15201, + 15246, 14846, 11293, 15328, 12208, 14526, 14848, 15019, 14765, 15043, 15297, 15165, 11126, 12915, 15329, 13461, + 15334, 14971, 14991, 14983, 12884, 15251, 13364, 14312, 13490, 13878, 13990, 14702, 14224, 13382, 15216, 14699, + 14371, 13382, 11453, 14361, 14817, 15032, 7745, 6630, 14310, 14157, 14333, 9366, 13390, 15227, 13582, 14600, + 15119, 15032, 14368, 15357, 15060, 13567, 13517, 15064, 14636, 14844, 13922, 12411, 14511, 15115, 13819, 13551, + 13413, 12104, 13579, 14611, 14782, 8620, 15205, 15317, 14530, 14549, 13579, 10176, 15193, 15243, 13493, 14307, + 11900, 12175, 13739, 13394, 13057, 15191, 11324, 14588, 15322, 11506, 14432, 14309, 9898, 13776, 13918, 13877, + 13417, 12431, 14647, 13592, 11037, 13554, 14555, 15016, 13964, 14404, 14481, 15330, 14138, 15210, 15349, 14478, + 15279, 14944, 14452, 14807, 11545, 14628, 14822, 13279, 15161, 14050, 14705, 10784, 14960, 14303, 14180, 15312, + 14636, 13079, 14270, 14897, 13586, 14516, 14603, 14377, 14707, 15257, 14885, 13792, 14638, 14373, 14673, 13374, + 14428, 14441, 14878, 13455, 8880, 12778, 13206, 13396, 13430, 14496, 14996, 11248, 13922, 11203, 15055, 14180, + 15254, 14253, 14607, 14661, 14112, 13682, 15312, 13638, 15154, 15289, 14381, 13398, 14565, 12987, 10834, 14710, + 13480, 14939, 15321, 12330, 12143, 15335, 14766, 14157, 14869, 15246, 12315, 14760, 14086, 14562, 14177, 14339, + 14412, 15056, 14622, 13763, 14464, 15302, 15321, 15047, 13314, 14891, 12527, 14798, 14188, 13697, 15213, 13210, + 14290, 14946, 14882, 15192, 15169, 13251, 14302, 13732, 14952, 12987, 14887, 14994, 15224, 12014, 13542, 13457, + 15125, 11638, 15139, 15122, 15238, 12875, 14503, 15034, 14425, 15251, 13425, 13840, 13809, 15106, 10651, 14506, + 14242, 15185, 13879, 13599, 14673, 14566, 13929, 15031, 6327, 14795, 15114, 14157, 14777, 14818, 14641, 14605, + 13107, 14453, 14719, 14048, 15025, 14983, 13813, 15200, 14455, 14871, 14454, 14617, 14636, 10686, 14971, 12863, + 13101, 14231, 14947, 14338, 13597, 9683, 14816, 13505, 12576, 14350, 14387, 15256, 15219, 15042, 13869, 14559, + 11794, 13066, 14221, 11801, 15295, 14615, 14718, 13054, 14588, 14488, 12648, 12854, 14003, 15095, 9558, 14883, + 15200, 14592, 14467, 14113, 13716, 14853, 12665, 9905, 13287, 15155, 13363, 15010, 15056, 11987, 14456, 14734, + 15117, 14939, 14432, 15134, 14124, 13339, 13696, 12881, 15349, 14164, 15321, 13599, 15217, 15202, 3923, 15259, + 14457, 14042, 14768, 15293, 15258, 14179, 15350, 11007, 14877, 12674, 13836, 13158, 15209, 14031, 14793, 14185, + 15314, 9099, 14846, 14835, 15021, 14885, 13699, 14831, 15352, 11648, 14544, 14837, 14856, 13689, 14992, 14703, + 15154, 13509, 13547, 14701, 14102, 14951, 11598, 13794, 14621, 14386, 14435, 14101, 14702, 13432, 14062, 14262, + 13505, 15194, 14952, 12563, 13440, 14346, 15197, 15009, 14427, 14560, 14598, 14889, 13423, 11963, 14109, 13608, + 14558, 15047, 14257, 14493, 14985, 11563, 11464, 14841, 11219, 13252, 14259, 14128, 9712, 8939, 14409, 13825, + 15067, 13638, 13548, 14047, 14105, 14604, 14379, 15321, 13254, 11409, 13950, 13432, 15180, 14570, 14010, 11476, + 15092, 12861, 15136, 14509, 13936, 14687, 12474, 12449, 12857, 13823, 15192, 14787, 14684, 14614, 15029, 14493, + 14946, 14543, 14922, 15104, 13840, 14766, 15003, 12512, 14148, 12933, 12862, 14868, 10530, 13757, 14976, 12793, + 13960, 12118, 14258, 14544, 15088, 13801, 15287, 15283, 11485, 14819, 15271, 13632, 14577, 15032, 13887, 14104, + 15155, 14981, 13988, 13858, 13399, 14387, 15302, 11285, 13363, 14525, 13224, 10812, 14188, 11397, 12561, 11889, + 14424, 12475, 14170, 14900, 14402, 15063, 15302, 14678, 13008, 11526, 12249, 14908, 14337, 15289, 14305, 14546, + 12587, 14528, 12134, 13770, 10490, 14221, 14356, 14715, 14469, 14084, 14383, 12166, 14935, 14331, 13974, 13845, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15311, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13223, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14019, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12710, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15004, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14715, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11168, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15113, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15147, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11494, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12979, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12524, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13158, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13748, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13642, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15028, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12265, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13201, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11141, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9438, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11409, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15285, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14983, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14407, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14718, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15239, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11817, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13893, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15029, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11413, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13986, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13439, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14122, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14488, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9817, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14702, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12928, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11867, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15012, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11719, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15344, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13776, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14918, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13958, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15088, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14523, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14980, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14979, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14812, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13001, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13563, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13362, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14478, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13968, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13762, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13065, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12385, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15225, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14990, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15069, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13788, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14931, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14990, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14982, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15080, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13066, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14356, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15250, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12394, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9415, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14437, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12115, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14725, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13966, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12655, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14756, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13942, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13527, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13158, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11358, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15083, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14711, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14754, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15053, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14887, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14881, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13215, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14817, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13764, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14784, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15335, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15258, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14473, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13482, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14857, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14965, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12658, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14461, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14540, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15304, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14318, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8776, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12994, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14880, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10159, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14040, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14572, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11781, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15307, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12841, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13381, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15039, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12309, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14776, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14362, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15289, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12801, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13918, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12468, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15093, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14832, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13801, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14426, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13946, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15056, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13813, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14275, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15029, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13626, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14313, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14732, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14330, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14840, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14869, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15042, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15359, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13828, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14957, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10747, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14626, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14958, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14971, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12795, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14462, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14478, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15085, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13300, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13782, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13474, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12718, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14708, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12598, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10943, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13670, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10273, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14413, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13218, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12433, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10868, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15150, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13743, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14465, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15145, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11424, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15037, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14674, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13399, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14428, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14643, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11390, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14805, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13479, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13588, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14408, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14436, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14417, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14508, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14889, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14493, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14877, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14294, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14907, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15328, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14143, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11402, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13226, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13038, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15081, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12441, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13993, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15225, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15315, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12069, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13800, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15265, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14287, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14682, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14915, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14786, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14838, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13580, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15230, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14710, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14875, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14822, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14905, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13643, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10921, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13200, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14915, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15130, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8974, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15149, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14340, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13723, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15316, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13681, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14850, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14559, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9648, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14762, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13034, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13436, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14443, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14714, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14574, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14492, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12691, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14963, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12259, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11975, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14298, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14557, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9641, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13489, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13904, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15003, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10955, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14866, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13412, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14225, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14417, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14991, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15225, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13544, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14558, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15256, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14501, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13131, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14720, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14966, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12590, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14279, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14497, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11343, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11777, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14587, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13948, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14451, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13453, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14795, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14985, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12466, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12233, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13323, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14826, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11399, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13114, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11823, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14990, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14176, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12825, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12309, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14788, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13536, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14950, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13414, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13685, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14777, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13562, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14269, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14354, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14724, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14538, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12801, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15182, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14618, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14675, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15075, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14749, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14719, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14196, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13736, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15078, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12978, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14400, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12191, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13677, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13368, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14991, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12229, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14492, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13741, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14867, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15273, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13782, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14539, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14374, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15213, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13447, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15137, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15246, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15157, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15187, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12397, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13829, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13865, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14413, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15358, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12294, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15315, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14667, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13513, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12674, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13948, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13264, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10190, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14658, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13265, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13508, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12045, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14701, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14403, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13591, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15178, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14521, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12874, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12003, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15252, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14204, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8557, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14711, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14935, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13494, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14805, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14064, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13769, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14449, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14557, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14386, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13791, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13554, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14121, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13689, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14219, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14391, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14285, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12708, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13974, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14540, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13963, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11042, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12815, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12325, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14744, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11303, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12979, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14960, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14465, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15327, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14910, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10312, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14917, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15076, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11177, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13309, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14405, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14505, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12587, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13546, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14143, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15316, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14730, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14871, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15281, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14608, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13992, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13498, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11315, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15151, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13642, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13488, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14971, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14443, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13431, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14703, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12289, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14976, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13670, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11460, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13879, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14681, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13925, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15253, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 7895, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13159, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12588, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14612, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15076, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15088, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14904, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12790, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15330, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15009, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14875, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15321, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13641, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9955, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13717, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14057, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15316, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13065, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13927, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14612, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14544, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15316, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11303, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13348, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14617, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13322, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15251, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13988, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12943, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12453, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14228, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14908, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12458, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14760, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14680, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13034, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11972, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15139, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15257, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14269, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14159, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14162, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13521, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11571, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9925, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13604, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12404, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11356, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14953, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14756, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14494, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12470, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13291, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14781, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6478, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13846, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14913, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12481, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11316, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13570, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13279, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15226, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14906, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12050, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13830, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14730, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14299, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11877, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14444, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13879, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13613, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13359, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14773, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14964, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11519, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14371, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13226, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13796, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14776, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10682, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15285, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11951, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14108, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14670, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12520, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12488, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12821, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14478, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11697, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11659, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12309, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15062, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14265, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12642, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14022, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13408, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10861, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14684, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12901, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14474, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15288, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13453, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15023, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 7694, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13426, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13328, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15163, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14159, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11945, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14205, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15090, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14558, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13630, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14486, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11309, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14553, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15016, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14802, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13441, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15003, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14502, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12401, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11568, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14955, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15063, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14820, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14188, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14440, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14451, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10311, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12791, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13537, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14924, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12819, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12260, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13714, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13097, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13440, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12879, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14399, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14297, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12826, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14055, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13003, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13813, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15048, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15087, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14951, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14545, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15103, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14061, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14657, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14015, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14761, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14812, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11055, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14067, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10060, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14206, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14775, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10538, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13345, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12638, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9803, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13634, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13094, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11646, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15172, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14701, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6894, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11650, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15055, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14696, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12585, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14312, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12396, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12352, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15355, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14249, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15294, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12298, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12423, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9397, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13565, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12313, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14460, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15225, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14402, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13424, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13864, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14430, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14947, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12937, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13553, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11419, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15130, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13498, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12895, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11594, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13440, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14945, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12921, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11823, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15080, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11966, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 7366, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15325, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14975, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14349, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14678, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14182, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15122, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14543, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15177, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14752, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15312, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14013, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12934, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14690, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13964, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14605, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10868, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15112, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14816, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14364, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13234, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11503, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14690, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14845, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14677, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14388, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15183, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11864, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14253, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14590, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13955, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14417, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15318, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14867, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14348, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13571, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14696, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14561, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15039, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10297, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13355, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14028, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13540, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15236, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14089, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15260, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15018, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15225, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12357, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12428, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14403, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14549, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14823, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13690, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14619, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10875, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13276, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13927, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13136, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13474, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14797, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14527, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14692, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14711, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14656, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13066, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14738, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14392, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13022, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14830, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13754, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13540, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14060, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12909, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10469, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11385, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11339, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15301, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14049, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13474, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14806, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_FRACTAL_Z, std::vector({4, 4, 33, 47}), + std::vector({48, 3, 16, 16}), DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferHwcnFz, fp16_2c_1n_no_pad) { + uint16_t data[4 * 4 * 32 * 16] = { + 13806, 13095, 13727, 14808, 13612, 13867, 14515, 12368, 14613, 15295, 15356, 13586, 14432, 14739, 15106, 14484, + 13409, 15214, 15282, 14885, 14896, 14875, 14517, 9257, 14501, 14843, 15065, 14267, 12990, 14554, 15217, 10129, + 14744, 13007, 13407, 11702, 14995, 13373, 15292, 14921, 14753, 13614, 13575, 13989, 9145, 15049, 14442, 11777, + 14390, 13940, 14822, 15230, 11272, 14838, 10152, 14461, 8859, 14953, 14561, 14063, 11510, 14919, 10480, 15025, + 14666, 14613, 13324, 15090, 13397, 15106, 14520, 15205, 13308, 13386, 14750, 12426, 14589, 14590, 14929, 13459, + 13732, 15318, 14639, 14397, 15097, 13759, 7356, 10931, 14838, 14215, 14685, 13403, 14879, 14796, 13485, 12440, + 14911, 8399, 14502, 14091, 13644, 14027, 15003, 13237, 14734, 11784, 7548, 13579, 13743, 15239, 14600, 14040, + 11878, 15008, 12381, 14722, 13712, 14400, 11938, 9446, 15221, 13834, 15202, 14448, 13116, 14962, 14882, 15277, + 15333, 14080, 14033, 13890, 14402, 14420, 10648, 14716, 12297, 15011, 15221, 14593, 15110, 14956, 15224, 13431, + 15329, 15093, 14320, 12826, 13563, 13251, 14727, 14402, 14915, 12296, 15062, 14372, 15037, 14643, 14562, 7777, + 15094, 15293, 14900, 14372, 13739, 14092, 13876, 15024, 14376, 14041, 12717, 15171, 14981, 14574, 14861, 14026, + 13807, 11517, 14601, 13507, 13076, 15309, 14965, 14132, 14708, 14216, 8873, 13380, 11667, 14413, 13045, 14767, + 9960, 11656, 13814, 10819, 15341, 14816, 13910, 12320, 15036, 12565, 12416, 14423, 14166, 14205, 12163, 14341, + 13669, 14637, 13064, 14411, 14660, 15146, 14497, 11286, 11776, 15303, 13797, 11833, 10348, 14510, 14489, 14951, + 13150, 14682, 14964, 14557, 14655, 14649, 13778, 14853, 13877, 13854, 14537, 14805, 14339, 14604, 12755, 14446, + 14133, 13810, 13342, 14696, 14124, 14333, 14436, 8293, 14242, 9630, 14285, 12482, 14799, 15341, 13986, 15337, + 12834, 14571, 13842, 14693, 13897, 15110, 10902, 14994, 10270, 12038, 12516, 14821, 14603, 13836, 14620, 15187, + 15230, 13949, 13066, 11501, 13365, 14616, 11888, 14767, 14642, 15046, 14931, 15297, 14608, 13967, 13735, 14376, + 15014, 14186, 13073, 10631, 14345, 15195, 12164, 14945, 13149, 12423, 15277, 13360, 14388, 14847, 15293, 13555, + 13723, 14587, 14636, 15192, 9774, 13456, 15051, 13391, 12420, 11845, 12879, 11806, 15106, 14794, 13005, 14234, + 15335, 14399, 13686, 14887, 15130, 9538, 13607, 14439, 14871, 14433, 14242, 14751, 15034, 14852, 13567, 14611, + 12380, 15250, 14157, 13623, 15341, 14082, 14913, 14760, 14384, 14631, 14203, 14222, 14798, 14377, 14474, 14525, + 14014, 14135, 13244, 15103, 14455, 14541, 13822, 11518, 15225, 13323, 7199, 12220, 14036, 12299, 13621, 15276, + 14854, 13497, 15012, 15247, 13026, 15121, 14665, 12751, 14629, 12436, 15170, 14726, 13929, 14546, 15284, 14887, + 14651, 14146, 14324, 11633, 14114, 13958, 12640, 13035, 13499, 12723, 14832, 14954, 14855, 9684, 9371, 14591, + 14405, 14517, 14740, 13504, 11461, 10991, 10230, 15153, 14064, 13482, 11716, 12868, 13942, 14435, 14670, 15018, + 13692, 14627, 14448, 14203, 13789, 15011, 13862, 15126, 15125, 14149, 12027, 13994, 13656, 14623, 15004, 14164, + 14358, 15296, 12613, 13644, 14040, 12123, 15264, 4184, 14829, 10735, 5753, 14045, 14064, 13309, 14393, 14800, + 14656, 15188, 12738, 14801, 14428, 14264, 14183, 13442, 14254, 15283, 15150, 14656, 11472, 13355, 14786, 14264, + 10697, 13812, 14663, 14509, 13735, 13884, 14858, 14500, 11859, 15211, 15173, 14220, 15151, 13624, 15259, 13578, + 10813, 15186, 14629, 13457, 13525, 14590, 14588, 14935, 14544, 11815, 12337, 13570, 14359, 15195, 11959, 14637, + 14613, 12399, 14412, 13718, 14683, 11112, 13683, 12741, 11727, 14897, 13642, 13944, 15125, 14991, 14713, 14883, + 15260, 12383, 10308, 15299, 13465, 15153, 8844, 14363, 15204, 12358, 15209, 14698, 15277, 15338, 14719, 14466, + 13830, 13521, 14495, 13657, 14403, 13514, 14748, 14443, 14442, 15095, 14440, 14478, 14735, 14740, 13948, 14033, + 13496, 13515, 14764, 13314, 13069, 14956, 14907, 11661, 15225, 14348, 13229, 14677, 13825, 12459, 14140, 15206, + 14526, 12514, 14585, 15163, 14143, 12734, 15296, 14989, 14742, 11718, 12456, 13663, 13440, 6123, 13798, 12376, + 14287, 15120, 13386, 13620, 14396, 14561, 13464, 13720, 13816, 13352, 14067, 12389, 14635, 15136, 12883, 13846, + 13446, 11829, 14056, 14871, 13644, 12598, 14576, 14270, 12771, 14160, 15202, 11817, 14953, 13198, 14838, 14745, + 14400, 14569, 13546, 15069, 14425, 14027, 15212, 13096, 11350, 14093, 14407, 14022, 14426, 14707, 14780, 14269, + 14434, 14258, 14744, 12609, 14368, 12288, 15173, 12931, 14864, 15146, 14056, 13498, 14283, 15189, 13981, 13311, + 9240, 14248, 12462, 14624, 11895, 15162, 13921, 15299, 14540, 14388, 14987, 12622, 15081, 14175, 14421, 14437, + 14347, 15050, 13519, 14860, 14820, 11834, 14296, 14542, 15017, 12860, 13806, 15257, 14110, 13312, 14515, 15287, + 10303, 14190, 14690, 14665, 12245, 14735, 14956, 14860, 14638, 14899, 14598, 13066, 13694, 14519, 15162, 13602, + 15042, 11100, 14530, 6954, 13390, 11297, 12703, 15167, 15284, 14136, 13286, 13800, 13035, 13776, 15127, 10592, + 14725, 13528, 13359, 13572, 15103, 14288, 10059, 14578, 14485, 13672, 14954, 14692, 15040, 11330, 13005, 14797, + 13972, 14597, 14197, 14820, 14751, 14765, 15323, 13621, 15156, 9580, 12919, 12959, 15172, 14370, 15070, 14296, + 11263, 14768, 12119, 15172, 14817, 14637, 13926, 12512, 14697, 14314, 14373, 13788, 14452, 13998, 12854, 14952, + 15321, 13076, 14493, 9265, 14266, 14904, 9275, 14903, 15200, 14155, 14735, 14128, 13365, 14688, 13540, 13159, + 14934, 14642, 15075, 13506, 15222, 15182, 9578, 14508, 13424, 14929, 13941, 10393, 14705, 10687, 14939, 12726, + 14127, 9095, 13795, 14624, 14041, 11266, 14409, 13380, 15265, 14533, 13036, 13767, 14610, 13996, 10719, 12623, + 14374, 13752, 14071, 15068, 11756, 12401, 14449, 14911, 14489, 11589, 15152, 14054, 13388, 12578, 13978, 14007, + 13835, 15161, 14757, 14376, 13918, 14406, 12660, 14174, 15356, 13764, 13203, 13590, 13898, 15266, 14773, 14327, + 14000, 14794, 15330, 15032, 14612, 7367, 14397, 13772, 15217, 14780, 14851, 14519, 13610, 13687, 15356, 11601, + 12991, 14557, 14392, 8490, 13772, 14966, 15208, 13421, 14624, 9429, 14850, 14421, 15124, 14790, 14048, 13294, + 12655, 15031, 12525, 14565, 14916, 14719, 14572, 15357, 14688, 15198, 13728, 13106, 10922, 14621, 14659, 14507, + 14554, 10413, 9370, 13387, 14571, 14673, 12877, 15302, 13355, 14581, 15349, 14192, 12655, 15100, 14476, 13602, + 13529, 13247, 14656, 15353, 13731, 15087, 13523, 14183, 13647, 14655, 13542, 15299, 14710, 14827, 10528, 14723, + 14393, 10024, 13015, 14554, 8081, 13621, 14425, 14767, 14150, 12452, 15117, 15084, 14362, 13974, 11640, 14757, + 15288, 12095, 13698, 14972, 15056, 12374, 14507, 13738, 14759, 13795, 7740, 14183, 9493, 14028, 15316, 15130, + 14857, 13965, 14816, 13921, 13745, 15208, 14836, 12133, 14958, 14749, 13934, 15198, 13046, 13631, 14570, 15345, + 14369, 13737, 14895, 14970, 14099, 13311, 13700, 13259, 14632, 11422, 14620, 12998, 14099, 14377, 13737, 14107, + 11938, 13488, 13833, 14546, 15019, 15290, 15004, 15166, 14622, 14622, 15267, 14732, 14052, 13046, 13959, 13768, + 14380, 14868, 13898, 14908, 5924, 15353, 11701, 12646, 11670, 13484, 12968, 15162, 14877, 12961, 14689, 11524, + 14089, 12773, 13822, 13988, 13378, 13969, 15154, 12290, 14565, 14632, 14928, 12580, 14813, 10258, 12158, 13284, + 13061, 14418, 13771, 15288, 11609, 12532, 14655, 15321, 14607, 13132, 11631, 13974, 12917, 14443, 12514, 14617, + 13985, 14789, 15085, 13771, 13121, 14933, 14823, 14682, 14430, 12324, 13499, 14372, 14654, 13760, 14552, 13346, + 13309, 15058, 13974, 10906, 12380, 14630, 14805, 13820, 13998, 13700, 13624, 13707, 15173, 14003, 14339, 13742, + 14670, 14986, 13653, 13379, 13873, 14734, 14389, 13754, 9124, 11640, 14943, 13966, 14878, 13892, 13715, 13351, + 14059, 13037, 14592, 10648, 13028, 12127, 14908, 14807, 14394, 14392, 14217, 12246, 12750, 13119, 14564, 15245, + 15351, 14549, 15202, 14081, 15312, 14752, 13626, 14646, 12548, 13178, 13866, 15188, 13745, 15225, 14078, 14557, + 12826, 15071, 15066, 14983, 14240, 15282, 14682, 13230, 14203, 15012, 14416, 15073, 13928, 14116, 14599, 14647, + 15328, 15296, 14301, 14865, 13744, 11216, 14780, 10023, 14267, 14998, 15047, 14949, 14631, 14926, 13539, 15263, + 15047, 12439, 14566, 14046, 13916, 14668, 13047, 8764, 14373, 13251, 13734, 15291, 12897, 13931, 13664, 15282, + 15314, 13983, 11826, 14414, 11105, 13273, 14625, 14260, 14726, 14388, 14899, 11318, 14349, 14242, 14831, 13686, + 13342, 14427, 14777, 14708, 14755, 13522, 11429, 15230, 13785, 14603, 12260, 13488, 15048, 13541, 9110, 13764, + 12707, 15122, 15274, 10342, 14712, 10786, 10538, 14183, 15285, 13409, 15305, 13224, 14289, 14591, 14508, 14627, + 13824, 14966, 15345, 12714, 9400, 14793, 14180, 14283, 12357, 13830, 15145, 15291, 14825, 15180, 14399, 11282, + 14590, 12802, 14429, 11624, 13780, 11542, 14717, 13465, 12288, 15063, 11620, 13643, 15032, 13206, 9425, 14349, + 13242, 13172, 13402, 15245, 13069, 14608, 9124, 14233, 14369, 14376, 12624, 14424, 12223, 14903, 13459, 14617, + 14251, 14442, 14783, 13818, 14884, 11731, 14621, 14635, 15182, 14692, 11855, 15252, 13659, 12766, 13559, 12506, + 14667, 12649, 12001, 13820, 14723, 14639, 13368, 14420, 14389, 15202, 13787, 14763, 13390, 12451, 14642, 14609, + 14822, 15129, 15280, 14458, 14867, 14278, 14488, 15121, 14035, 14977, 14043, 14808, 14828, 14395, 13222, 13327, + 10728, 13731, 13935, 15226, 14908, 15135, 13028, 13848, 13779, 14063, 14907, 13060, 14775, 15350, 13534, 15006, + 15024, 14973, 15003, 14295, 13706, 14110, 13736, 15271, 14543, 14011, 14615, 12655, 14722, 13028, 14534, 14489, + 15037, 15264, 12406, 14343, 14217, 13185, 15042, 13631, 10571, 14375, 15326, 12831, 14584, 14655, 10803, 14609, + 13811, 12090, 14831, 14701, 10128, 13042, 14975, 15263, 14811, 14727, 15164, 14893, 15008, 14436, 15197, 15280, + 8743, 11209, 12488, 13807, 12894, 12182, 15086, 14366, 14429, 13417, 11720, 13331, 9330, 13118, 13074, 13090, + 15094, 13641, 14959, 15213, 15013, 15269, 12862, 12976, 15288, 15356, 14795, 15012, 12561, 14759, 15034, 14038, + 9778, 13031, 14808, 14678, 15356, 14094, 12393, 14951, 14752, 13653, 9924, 14883, 14733, 14909, 9676, 14145, + 15359, 10964, 15061, 13199, 14599, 13838, 14088, 15086, 11199, 14787, 13256, 12118, 12185, 14928, 12341, 14888, + 14545, 14423, 13994, 12794, 15248, 13271, 14320, 14416, 14941, 14685, 15140, 14756, 15094, 14757, 14868, 15304, + 14944, 14320, 11567, 14791, 15264, 15034, 15182, 14503, 13323, 13324, 14999, 14722, 14997, 12679, 14684, 15013, + 14564, 14474, 14261, 15086, 15023, 15228, 14266, 13352, 12831, 14639, 15103, 8938, 14001, 10684, 14068, 13174, + 14999, 10132, 13607, 13709, 12395, 15233, 14381, 14975, 12266, 12536, 14015, 6267, 12300, 14270, 13181, 12819, + 14848, 14634, 13462, 12356, 14361, 15281, 9308, 12407, 14451, 11467, 14665, 12594, 13265, 14448, 12481, 14728, + 14576, 15150, 14705, 15248, 13997, 13729, 13200, 15035, 14567, 11646, 14679, 15199, 12323, 12386, 11540, 12949, + 12735, 14896, 14629, 13832, 14972, 14151, 14776, 12629, 13226, 11525, 14325, 14652, 15305, 14345, 14111, 12583, + 11436, 11131, 14094, 13345, 14573, 14517, 14162, 12211, 14180, 15033, 12107, 14963, 14499, 14567, 14663, 14515, + 14208, 12023, 13363, 13989, 9301, 12282, 15235, 12073, 14008, 10672, 8516, 14669, 14291, 15329, 11501, 14646, + 15250, 14490, 12736, 14391, 12502, 14759, 14883, 14908, 13450, 12981, 15242, 14074, 14886, 15015, 12892, 12287, + 15315, 15306, 13141, 12640, 14110, 14439, 14998, 13098, 15344, 13419, 14353, 14571, 13869, 14523, 15106, 14900, + 7650, 14466, 14998, 13760, 15221, 14103, 15193, 9583, 13653, 14753, 14368, 14824, 12183, 15108, 13009, 14833, + 11544, 14396, 13033, 12363, 13964, 15339, 14118, 14507, 14458, 9970, 14571, 14136, 14167, 14161, 14747, 14367, + 14473, 14949, 14588, 15069, 15169, 14931, 15042, 13244, 13620, 14940, 14716, 12370, 14592, 15256, 15176, 13131, + 12861, 12425, 15104, 12757, 15320, 12552, 14503, 14623, 11175, 14994, 14067, 14113, 12414, 14574, 14220, 13715, + 8668, 12503, 14065, 14773, 14690, 13318, 14681, 14421, 14448, 14085, 12330, 11724, 14657, 14515, 13518, 15306, + 10256, 14230, 14367, 13378, 11179, 15211, 14538, 14355, 15336, 14982, 15106, 14784, 9286, 12571, 15291, 13989, + 14968, 15347, 14449, 13055, 14780, 15066, 13177, 14921, 10121, 14978, 13606, 13808, 15262, 10256, 14921, 13711, + 13666, 14818, 14741, 14899, 14671, 12992, 14115, 10424, 15316, 14101, 12937, 12902, 13383, 15154, 13193, 13249, + 13830, 12473, 14069, 12857, 14865, 14550, 14061, 14364, 14391, 14082, 11114, 11864, 13627, 15047, 12517, 12101, + 14416, 13567, 14676, 13817, 15350, 11259, 13090, 14283, 14316, 13800, 13907, 14270, 8484, 14951, 14977, 14549, + 14215, 8464, 13369, 15207, 14184, 12419, 13966, 15104, 13700, 11973, 14410, 15196, 14315, 12551, 13923, 12616, + 12194, 12795, 13982, 13941, 14871, 14597, 14685, 14758, 13830, 13317, 14707, 14301, 12967, 12983, 15329, 14714, + 13667, 13431, 14603, 14971, 14698, 14629, 13249, 14944, 12378, 10772, 14658, 11787, 12375, 12629, 14881, 13844, + 14424, 13714, 14615, 14181, 14520, 14386, 14306, 13402, 12004, 13022, 15341, 13101, 14546, 15166, 13331, 12086, + 13832, 14362, 14933, 14773, 14621, 7654, 14906, 14001, 14780, 11189, 13672, 14542, 14725, 14675, 14582, 15240, + 14958, 15084, 14714, 13194, 10711, 14779, 13307, 14489, 10644, 14622, 13923, 14944, 14815, 15229, 15110, 13351, + 14819, 14206, 12392, 14034, 12086, 12334, 13636, 14499, 14374, 14775, 10388, 14592, 15263, 14604, 12000, 15100, + 15277, 15345, 14364, 15348, 12008, 15327, 15336, 12929, 13654, 13554, 9368, 14385, 12476, 14563, 15293, 13711, + 15008, 14957, 14543, 12567, 12705, 14290, 14505, 14852, 14363, 13179, 15039, 14875, 15267, 14115, 15120, 14907, + 15287, 13638, 14907, 15242, 14866, 13320, 15180, 15313, 12481, 14317, 10209, 9322, 14561, 13739, 14981, 13836, + 12877, 15041, 14548, 14898, 13844, 13760, 11632, 14435, 12401, 14846, 14708, 14584, 14384, 14981, 12350, 8065, + 14770, 13863, 14405, 13518, 13138, 10392, 11647, 12903, 14035, 14660, 11324, 13966, 14641, 14639, 13342, 15142, + 13383, 12024, 12628, 12952, 15171, 15294, 15273, 12862, 14859, 12312, 14807, 15177, 12995, 15159, 15024, 14986, + 14448, 13614, 15231, 14404, 12676, 15251, 13482, 13668, 14898, 14941, 11418, 10582, 14568, 14933, 14642, 12728, + 14817, 15064, 14375, 13739, 14769, 14800, 13017, 8404, 14154, 14151, 13709, 15102, 14219, 14095, 14164, 14356, + 14790, 15122, 13603, 14630, 15081, 6234, 13574, 13672, 13566, 11194, 15308, 13840, 13811, 14604, 15289, 14967, + 15073, 14914, 14382, 12211, 12625, 15009, 13559, 14120, 13599, 14552, 7565, 12879, 13141, 9609, 15299, 15101, + 12549, 14388, 15092, 12394, 14384, 14419, 12981, 14902, 15351, 14487, 14458, 14500, 14431, 11934, 15293, 14636, + 14119, 15186, 14908, 13275, 14106, 15315, 14944, 12423, 14845, 13950, 14839, 15168, 14925, 14149, 14340, 12772, + 7531, 14506, 14357, 12727, 15227, 14830, 14884, 9975, 10239, 13422, 13400, 14334, 9820, 11421, 14492, 14378, + 14490, 14391, 14428, 15068, 14760, 12583, 13472, 13343, 14876, 15322, 15220, 15171, 14094, 11672, 14885, 15299, + 14917, 15313, 14480, 13776, 14902, 14584, 14689, 13032, 10764, 13786, 14932, 14706, 15328, 14286, 14803, 14396, + 15137, 14327, 13572, 14865, 14803, 14696, 13964, 11298, 12704, 14550, 14064, 11001, 15002, 11836, 14785, 11406, + 14942, 14346, 12835, 14943, 13640, 14735, 15016, 15262, 13978, 14891, 13379, 14554, 12950, 13824, 11564, 14949, + 15063, 15245, 14859, 12230, 15169, 13561, 14916, 13872, 14400, 12344, 10524, 14047, 14236, 14192, 15288, 14930, + 12800, 11550, 14806, 11367, 14639, 13894, 14386, 12825, 10757, 13934, 15321, 11438, 14273, 15115, 14508, 14599, + 12535, 15159, 14838, 12997, 14630, 14835, 15179, 15039, 12926, 14369, 12763, 11807, 13075, 15135, 14614, 14965, + 14550, 11270, 15298, 13566, 14843, 14139, 14811, 15337, 13671, 14605, 13306, 13332, 13870, 14734, 13640, 15118, + 12257, 11347, 11858, 14479, 14544, 13673, 14583, 13537, 15231, 13718, 14361, 14920, 14433, 15002, 15211, 14129, + 13453, 13502, 11924, 12717, 15137, 14024, 14426, 12980, 14780, 14303, 15174, 15158, 14269, 14374, 7792, 14717, + 14535, 9855, 15207, 14556, 14406, 14495, 15145, 14658, 14935, 15177, 13782, 14265, 12824, 8563, 14073, 12717, + 13345, 13594, 14659, 9114, 12902, 13318, 14955, 12709, 14510, 11136, 13201, 13580, 14843, 14923, 15038, 15171, + 14564, 14357, 12864, 14026, 15226, 15007, 14938, 14345, 12731, 13654, 12374, 13722, 14902, 13995, 13852, 15076, + 10660, 15198, 15217, 14911, 14343, 14348, 14428, 13377, 13551, 13633, 13791, 13548, 10796, 13318, 14685, 11495, + 14860, 15087, 15129, 14933, 12814, 14521, 14983, 13291, 14507, 13118, 13684, 15091, 15126, 13549, 15134, 11403, + 15070, 14996, 15234, 13741, 9226, 11271, 14918, 14201, 14552, 13796, 11755, 15139, 15211, 12835, 14442, 14146, + 14282, 7665, 14178, 13042, 14694, 14791, 15016, 14327, 14864, 14066, 14550, 6217, 14446, 13096, 13759, 15302, + 14813, 12401, 14974, 14435, 13636, 12642, 15261, 15161, 14426, 15060, 14280, 15129, 15278, 15180, 15218, 13293, + 15229, 13850, 14984, 11289, 15113, 14198, 13605, 13652, 15268, 13698, 13691, 13581, 14329, 11382, 15358, 11295, + 15181, 15257, 14778, 14311, 13805, 15098, 14663, 14269, 14705, 12986, 14366, 13882, 14571, 13101, 14447, 14550, + 13627, 14625, 14942, 14384, 12838, 13334, 9048, 15029, 14393, 13700, 13703, 15242, 12444, 14616, 13121, 14593, + 14824, 13241, 14911, 14911, 13554, 14799, 14759, 8446, 14086, 14767, 13603, 15136, 13046, 14048, 14462, 11841, + 12739, 15006, 14520, 11084, 10554, 14975, 12745, 13688, 14682, 14906, 14860, 15285, 13912, 14968, 12803, 13739, + 13364, 14643, 15122, 11453, 10925, 14355, 15214, 15145, 15193, 14211, 15247, 14544, 15050, 14720, 13044, 14877, + 12383, 11467, 15122, 14065, 15143, 14668, 14168, 13832, 13329, 14684, 15314, 13389, 14936, 14987, 14041, 13607, + 15206, 14449, 13328, 12937, 14915, 14260, 14964, 14135, 15334, 13518, 13398, 15357, 12295, 12879, 14855, 10682, + 12968, 14178, 13953, 15304, 15235, 15185, 14256, 14706, 14267, 14801, 14482, 14224, 14404, 15326, 14169, 12255, + 13845, 15275, 14475, 14169, 15022, 14384, 15260, 13489, 15009, 14157, 15033, 12798, 14670, 12576, 14802, 15158, + 14052, 15192, 15109, 14007, 14734, 15232, 15101, 11470, 13939, 13468, 13542, 13524, 15242, 12826, 14840, 15080, + 13342, 15327, 12483, 14129, 13671, 15055, 15314, 11696, 14542, 11470, 13993, 13982, 14851, 11399, 15174, 14479, + 14112, 12515, 12732, 14729, 12577, 13593, 15197, 12711, 11793, 10940, 14714, 12755, 12961, 14839, 14733, 15240, + 15326, 15332, 11469, 15126, 12487, 15295, 15223, 14188, 13699, 15306, 11545, 13444, 15201, 9126, 15045, 15347, + 13519, 9008, 13912, 14939, 14916, 14771, 13639, 11888, 15167, 14528, 15007, 14406, 13605, 14447, 13869, 12728, + 14702, 12337, 12030, 15028, 14176, 15087, 14656, 12422, 14681, 14822, 13338, 13587, 12821, 10611, 13790, 14541, + 15139, 14369, 14093, 15231, 13910, 10903, 14633, 13503, 9978, 15064, 13615, 13769, 15294, 15177, 13505, 12004, + 9794, 14856, 14926, 7941, 13507, 14548, 14089, 15234, 14332, 12310, 15175, 14363, 14875, 12065, 13845, 14519, + 14989, 15315, 14735, 14268, 13368, 14978, 10664, 15159, 14991, 11726, 14266, 13899, 13603, 13089, 14179, 13473, + 13660, 15302, 13580, 11360, 14979, 14486, 14799, 11461, 14439, 11442, 14926, 15156, 14101, 13455, 15316, 12621, + 14308, 14497, 13488, 13687, 13359, 14746, 15150, 11373, 14332, 13611, 14853, 12368, 15189, 14446, 14118, 12987, + 13547, 14539, 14114, 14351, 15101, 13074, 13587, 12512, 13517, 9006, 14637, 14340, 14708, 12369, 14635, 15162, + 9228, 14243, 14706, 13566, 15270, 13365, 15300, 14157, 13747, 14978, 14579, 14472, 12518, 13756, 15293, 13373, + 14086, 15107, 14460, 12114, 14527, 15242, 6978, 12515, 13859, 12820, 13849, 13849, 12797, 8637, 14204, 14697, + 11501, 9308, 15134, 14521, 14266, 12785, 13587, 13447, 13909, 14998, 15290, 14739, 14921, 13936, 15060, 15257, + 13199, 10840, 14221, 14720, 15038, 14533, 14357, 14077, 11344, 12516, 11405, 13507, 14325, 15286, 15192, 14976, + 14066, 14022, 13662, 14657, 15347, 14943, 14240, 15289, 14084, 9475, 15057, 14640, 14916, 14295, 11341, 14507, + 13637, 14135, 14761, 11402, 13441, 13965, 12508, 14480, 14037, 15353, 13340, 12895, 12525, 14772, 13714, 13974, + 14885, 14642, 14637, 15293, 10785, 10642, 13488, 15246, 14858, 15170, 14339, 13520, 14951, 11369, 14526, 11179, + 12466, 15300, 14557, 13686, 13747, 14826, 14611, 14911, 14567, 14509, 15044, 14872, 13849, 13590, 15070, 14012, + 14980, 11240, 14872, 15051, 15325, 12448, 14448, 10775, 14917, 14147, 14927, 14687, 9830, 15189, 12409, 15115, + 13458, 15056, 13223, 13472, 15061, 14869, 13821, 13067, 13879, 14486, 12163, 14147, 14961, 14082, 14761, 14418, + 14939, 14048, 9090, 14179, 13824, 14544, 14259, 15020, 15041, 15008, 13209, 12596, 14012, 15327, 11377, 14378, + 14661, 14486, 15110, 10552, 15333, 13660, 13390, 14757, 14941, 14792, 14752, 10426, 14834, 10525, 14414, 12497, + 14609, 13776, 12653, 14686, 14441, 12853, 11509, 14207, 13862, 14065, 14766, 14723, 14827, 12366, 13381, 14395, + 13838, 15099, 15235, 13972, 14532, 15143, 13809, 14739, 12697, 14652, 13692, 14734, 14305, 13794, 14331, 10096, + 12560, 13208, 11919, 13150, 15040, 12767, 14712, 14503, 12302, 15273, 10348, 11778, 14530, 14829, 13231, 14397, + 13764, 12737, 15214, 12956, 12790, 15107, 12710, 13622, 14686, 15191, 8974, 8901, 14604, 13517, 12519, 7660, + 14080, 12615, 14537, 14686, 14484, 14986, 14060, 14529, 14419, 12365, 14785, 13154, 14524, 15185, 13907, 10463, + 9674, 14232, 14162, 14282, 14350, 14214, 13876, 7291, 14781, 14826, 15204, 13889, 12672, 13651, 15160, 10888, + 14467, 14566, 15135, 15279, 11570, 13915, 11849, 14896, 12279, 15338, 12676, 13538, 15283, 15210, 14380, 14481, + 15317, 8981, 14024, 11822, 14084, 13564, 14603, 15304, 13438, 13331, 15102, 14831, 15191, 14865, 13992, 15142, + 13088, 14787, 15351, 12659, 15087, 15356, 14949, 15328, 14556, 13551, 14022, 12664, 8387, 14792, 14164, 11896, + 12167, 14836, 14403, 13919, 11485, 14687, 14762, 14320, 12362, 14905, 13773, 14347, 14898, 13939, 14955, 15141, + 13638, 14306, 13379, 10411, 14360, 14590, 15036, 13750, 14112, 13771, 11862, 15337, 13574, 10838, 15218, 11729, + 14851, 12388, 15134, 14422, 15065, 12071, 14394, 14186, 14317, 14420, 14679, 14605, 15323, 14751, 13058, 10505, + 7726, 13122, 14658, 13882, 13903, 12118, 15222, 10237, 13787, 13435, 12454, 15352, 13828, 12546, 14437, 13650, + 13569, 14860, 15170, 13765, 12346, 15032, 15350, 14736, 15185, 13332, 14956, 13593, 14435, 14764, 13052, 14519, + 14018, 14503, 9486, 11507, 13666, 14993, 11741, 12645, 13800, 14407, 15158, 9707, 14499, 13467, 13907, 14746, + 14392, 14656, 11324, 15249, 15301, 14954, 14010, 15037, 13190, 14681, 15209, 14299, 14049, 15252, 13789, 14672, + 10574, 14653, 11895, 14390, 13443, 14555, 12379, 14440, 14477, 13317, 15033, 13446, 13468, 15087, 12728, 13797, + 14838, 15352, 14248, 14377, 13232, 9848, 14712, 12790, 15300, 14559, 13838, 15023, 11970, 13831, 14692, 13483, + 9700, 11227, 9618, 12026, 10277, 15008, 14624, 12127, 14626, 13616, 14601, 13534, 14163, 15326, 13382, 15143, + 15056, 13131, 14693, 13998, 14768, 14492, 13730, 14610, 13825, 11281, 11817, 14627, 14507, 15081, 14687, 11281, + 15349, 14458, 15267, 13294, 13644, 13327, 13734, 14925, 14974, 14433, 14108, 14502, 13798, 14716, 13870, 14537, + 15249, 12439, 14078, 14500, 11968, 13125, 14763, 13934, 14815, 14647, 13657, 14574, 13642, 10620, 11331, 12178, + 14344, 12075, 13604, 13629, 14495, 15125, 15064, 14623, 15196, 15336, 9752, 14504, 14583, 13033, 13461, 15264, + 14875, 14123, 14915, 13841, 10161, 13399, 14146, 14428, 13326, 13381, 14358, 14160, 13739, 15011, 14515, 11971, + 14217, 15080, 14630, 15085, 13470, 14598, 14901, 13434, 14030, 13639, 15169, 12621, 15187, 15181, 14937, 14154, + 12830, 14970, 15145, 12995, 13979, 14640, 13853, 14843, 12617, 12614, 14433, 13653, 15359, 14858, 13778, 14771, + 12505, 15023, 14909, 14649, 14207, 14764, 11846, 12357, 14566, 13914, 14832, 14754, 13556, 10960, 12775, 15031, + 14619, 15231, 13184, 14876, 14243, 14602, 15114, 13440, 15092, 14997, 13830, 14095, 12906, 13686, 15034, 14472, + 12183, 12877, 14650, 13831, 12088, 14718, 13219, 15084, 15304, 12178, 12316, 14526, 14905, 9571, 15224, 14513, + 10995, 14959, 15218, 14457, 14438, 14942, 12535, 15048, 14834, 14603, 13238, 13351, 14942, 13431, 15000, 13631, + 13510, 14381, 15120, 12470, 12632, 14715, 14425, 13993, 14875, 13033, 13948, 13702, 13685, 14903, 12291, 14205, + 14519, 15248, 13815, 13203, 15277, 14510, 12082, 15163, 13850, 14631, 13590, 14867, 14363, 14087, 15170, 13387, + 14715, 13363, 14522, 14471, 13898, 14089, 14784, 12113, 15056, 13467, 14827, 9668, 11690, 13916, 14857, 10959, + 15264, 14016, 14320, 14933, 13048, 13717, 14343, 11618, 14563, 15052, 14542, 10817, 13519, 15266, 15356, 15202, + 11896, 15025, 13954, 14868, 12171, 14451, 15285, 13297, 14506, 14999, 9888, 15243, 13872, 14005, 9873, 14435, + 14405, 14551, 14173, 10745, 14532, 12334, 14693, 15217, 14585, 14761, 13665, 15063, 13589, 12738, 13566, 15199, + 12537, 14448, 14931, 13450, 11278, 13428, 14953, 14810, 14008, 14780, 15012, 14864, 15123, 15162, 13241, 14594, + 14631, 14748, 14979, 14764, 14362, 14812, 13591, 15260, 14476, 14850, 14259, 14673, 13588, 13686, 12912, 14589, + 15043, 14571, 12861, 13675, 15140, 15179, 12887, 13893, 13489, 12563, 14706, 13769, 14114, 14768, 15260, 15292, + 13759, 15224, 15006, 12667, 13070, 13980, 12151, 15033, 10084, 15034, 14778, 12201, 11638, 15210, 14166, 13004, + 14362, 15322, 14125, 14567, 9460, 13817, 14324, 12669, 14987, 13141, 14433, 14832, 13052, 12663, 12150, 13937, + 12341, 14653, 13660, 12791, 10882, 12038, 14821, 14242, 12224, 13018, 13949, 14970, 13357, 14500, 14785, 15196, + 12509, 15316, 15126, 15253, 9265, 13512, 14462, 13497, 14756, 14667, 10462, 13951, 14968, 12701, 13316, 13538, + 14744, 12309, 15115, 14530, 14224, 14585, 14521, 14488, 14806, 13741, 10275, 7312, 15201, 13568, 15337, 14904, + 14470, 13496, 14599, 14830, 15190, 14988, 14413, 13789, 14529, 14437, 14429, 11876, 14372, 14536, 15049, 13898, + 13602, 15062, 14691, 13918, 12928, 13022, 15312, 12396, 12246, 14864, 14662, 13570, 13891, 14186, 14746, 14465, + 14484, 14585, 14448, 15211, 14186, 14378, 13479, 12813, 14548, 7606, 13114, 14521, 14673, 13586, 13395, 14333, + 13868, 14744, 14047, 14341, 12840, 15038, 12619, 14995, 13370, 14282, 13480, 10657, 12327, 15055, 15358, 14453, + 12713, 11867, 14739, 13437, 14926, 12985, 13667, 15212, 12974, 14567, 15328, 14797, 10698, 14259, 14793, 14968, + 9327, 13180, 13004, 11648, 12454, 12971, 14527, 14679, 13450, 14547, 12990, 14360, 14727, 14684, 10982, 14366, + 13682, 15250, 14597, 14564, 14477, 14505, 13543, 15005, 14676, 10469, 13611, 14584, 14953, 14768, 13401, 15273, + 11483, 13338, 11901, 14961, 13797, 14560, 13204, 14721, 14389, 15198, 14378, 15008, 12495, 14767, 9769, 14928, + 13573, 15306, 9589, 14525, 15271, 15108, 13663, 14746, 14697, 13803, 12605, 13027, 15101, 14549, 13048, 14313, + 12059, 15242, 14855, 15337, 15025, 14935, 15134, 13405, 14131, 12513, 15241, 14871, 14351, 14601, 14982, 15135, + 15180, 15095, 15172, 13253, 12041, 13266, 12252, 13532, 14843, 12313, 13678, 12978, 15271, 12043, 14414, 14097, + 14349, 14965, 11744, 13750, 13977, 12267, 12570, 14729, 14288, 15324, 14589, 14824, 13875, 15200, 14465, 13566, + 14355, 13461, 13145, 11319, 14457, 11799, 14723, 14698, 13062, 13499, 14035, 15001, 14439, 14343, 11418, 15168, + 14044, 14972, 14751, 14552, 14498, 14726, 14438, 11072, 13483, 15096, 14373, 14428, 13410, 15128, 15006, 14769, + 14418, 14741, 14392, 14399, 12688, 14464, 10780, 15065, 12176, 12655, 7990, 13994, 14371, 13410, 14548, 11498, + 14791, 15172, 14857, 14664, 11823, 15285, 14471, 12875, 12146, 15265, 15298, 11449, 11053, 15148, 12772, 15048, + 14913, 15006, 14769, 14228, 14584, 12798, 15192, 10774, 14398, 14672, 15232, 12814, 12995, 14442, 14874, 11907, + 14910, 13687, 14127, 13729, 13538, 13689, 11610, 15291, 12771, 14596, 10423, 14414, 15045, 14176, 9344, 14882, + 13791, 14389, 14541, 13741, 14403, 14610, 14162, 14984, 12299, 12372, 11094, 10672, 14679, 14782, 15165, 14361, + 14898, 8269, 12368, 14505, 14856, 12242, 12651, 14028, 14855, 14457, 15338, 13931, 14909, 14698, 15060, 14302, + 15340, 13345, 11257, 15254, 14474, 13009, 14546, 13021, 14055, 13614, 13964, 13546, 13034, 13241, 14891, 14217, + 13388, 13754, 15068, 14360, 7563, 14843, 14422, 15266, 14519, 15195, 12648, 15057, 13959, 14728, 14679, 12358, + 5035, 14859, 11429, 15119, 12845, 12535, 14652, 14229, 13708, 12963, 14737, 14733, 9197, 14841, 14336, 14665, + 14845, 14799, 12985, 10617, 11083, 14646, 13937, 13496, 12459, 14376, 15258, 12816, 15230, 13260, 11535, 14497, + 14483, 13589, 15296, 12498, 10311, 14524, 12241, 13933, 14122, 14339, 9619, 15319, 15169, 13695, 14755, 13702, + 14507, 11170, 11544, 9659, 12015, 10561, 15208, 15020, 14369, 15241, 13292, 15309, 12952, 13312, 12264, 11397, + 12307, 13055, 13648, 13683, 13231, 13064, 13527, 14458, 13853, 14639, 14567, 15199, 14693, 14877, 15348, 13583, + 12506, 12967, 15284, 13526, 14063, 14675, 14535, 14672, 14097, 15307, 14555, 14720, 14981, 13450, 15224, 14701, + 13395, 15261, 14847, 14914, 14614, 13277, 15049, 15281, 12762, 14170, 13419, 11227, 15337, 15305, 14382, 15074, + 14687, 13663, 15107, 14406, 14555, 14687, 13921, 13272, 13654, 15196, 13112, 12662, 9582, 13803, 14646, 13419, + 12123, 14299, 14913, 14982, 15109, 12358, 14506, 15290, 13669, 13419, 13537, 14001, 15265, 15296, 13829, 13151, + 15223, 12795, 12201, 14725, 14157, 14828, 14484, 14406, 15282, 12288, 14542, 13685, 14636, 13832, 14460, 11817, + 14695, 14018, 14006, 15108, 15345, 15194, 11462, 15031, 11637, 11780, 10945, 14812, 13999, 14774, 12890, 13742, + 12665, 14627, 15150, 14436, 12809, 15049, 14725, 14722, 14870, 15221, 14708, 13562, 15023, 10913, 14669, 10366, + 14136, 14244, 14560, 14201, 13710, 9972, 14834, 14837, 13453, 15244, 15255, 13774, 12110, 14408, 14840, 13593, + 14892, 11787, 13859, 15327, 14171, 14638, 13585, 14476, 13632, 14571, 13722, 15040, 14388, 14643, 14938, 14513, + 10944, 14949, 14191, 13618, 13767, 13922, 15161, 12193, 14664, 14591, 11324, 14418, 13799, 11380, 12788, 14460, + 15017, 13479, 14678, 15350, 14225, 14277, 14553, 11932, 10758, 14753, 14575, 14515, 14663, 14633, 10672, 13909, + 13018, 13944, 14769, 14122, 13416, 13603, 14164, 14956, 15359, 14045, 14576, 14606, 14661, 15316, 15008, 13513, + 14119, 13335, 13362, 11413, 14476, 15140, 14577, 13736, 14637, 12006, 13385, 11999, 14037, 14777, 15179, 9557, + 13188, 13427, 13890, 14718, 14567, 15327, 14678, 12650, 13846, 14735, 14898, 13388, 12785, 14453, 15195, 12583, + 15001, 15273, 13856, 15103, 13677, 15202, 12473, 14237, 13560, 14321, 15053, 8317, 14473, 14631, 14545, 13503, + 14305, 14987, 14441, 12658, 14664, 14687, 15124, 14184, 15357, 14677, 9813, 14188, 14692, 14302, 14962, 13684, + 14412, 14816, 13925, 14742, 14853, 14577, 9172, 11759, 12303, 14717, 15015, 14948, 15261, 14137, 13946, 12705, + 15169, 13732, 15142, 15235, 13625, 8740, 12042, 15083, 14550, 14060, 14792, 14740, 13754, 14838, 14826, 14249, + 15065, 13803, 15097, 9748, 13725, 14852, 12221, 13749, 14860, 12533, 15025, 14408, 12952, 8727, 14979, 15263, + 15058, 15343, 14553, 14807, 13769, 14569, 12514, 14561, 14486, 12703, 15119, 13363, 15260, 15183, 9571, 12688, + 14475, 12182, 14163, 14240, 14082, 14578, 14560, 13788, 14729, 11953, 13958, 15286, 14092, 13887, 13415, 14963, + 12996, 15055, 14711, 14973, 15017, 14688, 15054, 14956, 14509, 12067, 13975, 14464, 14124, 15023, 12346, 14365, + 13395, 13405, 12392, 14429, 14399, 13595, 11584, 14460, 13765, 15050, 14895, 11738, 14668, 14377, 12476, 8929, + 15019, 14213, 15201, 14896, 14554, 14274, 12616, 14885, 15173, 14894, 12402, 13394, 15226, 13673, 13600, 14243, + 13580, 15174, 15329, 14340, 11962, 13186, 14929, 13558, 11156, 14448, 14223, 15144, 11319, 14042, 14570, 14610, + 13540, 14385, 15268, 13626, 14740, 15298, 13614, 11775, 14565, 13860, 11828, 12431, 13491, 14485, 14760, 14751, + 14261, 15259, 12926, 13296, 13014, 10890, 15114, 10767, 11008, 15328, 15036, 15189, 13800, 13437, 14497, 15275, + 12802, 11591, 12146, 13748, 14949, 14679, 12193, 14376, 13863, 13430, 14947, 14265, 15068, 15343, 12979, 15255, + 15212, 14621, 14478, 14034, 12602, 14292, 14369, 14488, 14602, 13909, 14424, 15064, 11429, 13361, 14972, 14419, + 13942, 14052, 12407, 14512, 4207, 15020, 14663, 11339, 13646, 14732, 14376, 14881, 11224, 15281, 14886, 12563, + 12193, 14906, 14984, 13938, 15279, 12328, 12226, 14830, 13963, 14612, 11434, 15295, 9824, 13979, 14227, 14334, + 11985, 15212, 12312, 14075, 15245, 13319, 13755, 12320, 15182, 10493, 14053, 15295, 15222, 13398, 15009, 14355, + 13349, 14955, 15254, 10463, 13949, 14970, 15236, 13849, 14596, 14396, 8567, 8557, 15204, 14410, 14465, 10071, + 12216, 13043, 14540, 12279, 14717, 14403, 13930, 13749, 14834, 14656, 13179, 10586, 14138, 11271, 15321, 11845, + 13792, 12672, 13737, 15048, 14470, 12782, 10370, 14732, 12644, 14516, 12156, 13867, 14092, 14742, 12082, 14783, + 12546, 13838, 15053, 13553, 13906, 13790, 13430, 11895, 12698, 15348, 15050, 14358, 15317, 12665, 13755, 13345, + 12799, 15092, 14772, 13642, 15211, 15148, 11596, 13934, 10821, 13028, 14744, 14924, 13427, 15293, 15175, 13287, + 15171, 14974, 13720, 11960, 14141, 14573, 11585, 13336, 14121, 15023, 12569, 14913, 13239, 14005, 15019, 12844, + 15081, 13355, 13947, 14998, 15234, 14650, 13726, 15018, 15150, 14932, 13672, 14909, 11930, 15209, 13660, 14653, + 13644, 13569, 13484, 13661, 15099, 14729, 13398, 12240, 13987, 13686, 13067, 13759, 12380, 14897, 14737, 12940, + 14257, 13891, 14128, 15213, 14797, 14786, 13912, 15128, 14787, 14395, 14951, 14233, 14268, 14679, 14986, 12506, + 14971, 11547, 15161, 13361, 12104, 14920, 14365, 15168, 14652, 14589, 14670, 14395, 14340, 11463, 15281, 15016, + 14445, 13318, 14641, 13990, 14795, 12010, 13075, 13704, 13576, 14152, 13395, 13213, 12915, 15321, 10080, 14119, + 14839, 13526, 13501, 15299, 15239, 12983, 13872, 14927, 12601, 12517, 13687, 15022, 15247, 12018, 14435, 12850, + 13677, 12932, 13984, 15283, 12584, 14927, 15243, 14194, 14027, 10594, 10676, 15180, 14034, 14781, 12848, 15074, + 14876, 14944, 14119, 14897, 12538, 12700, 14489, 11314, 12473, 10144, 14091, 12128, 13044, 15020, 10553, 13385, + 15093, 15177, 12460, 14515, 14574, 15204, 11860, 13998, 14796, 13802, 12486, 9487, 14942, 14487, 14995, 9895, + 13519, 13367, 14643, 14759, 14381, 12842, 14862, 12019, 14751, 13606, 15287, 13885, 15222, 14643, 11697, 14765, + 13870, 14413, 12419, 15246, 14415, 14411, 14926, 12659, 14308, 15299, 13854, 14817, 12381, 15103, 14987, 15235, + 11474, 13571, 13894, 13407, 13127, 10356, 15061, 15296, 12600, 13557, 15319, 12410, 14652, 14210, 12428, 13596, + 13255, 13937, 12744, 14566, 13836, 15314, 10165, 12450, 13535, 13019, 13995, 14510, 15091, 14707, 12775, 11806, + 14501, 14400, 14395, 12574, 13983, 13723, 14577, 14676, 13511, 13751, 14370, 11431, 14798, 14033, 13693, 14822, + 14972, 13450, 12549, 13653, 14784, 15217, 14965, 11513, 13519, 14176, 11450, 14611, 14649, 13537, 13567, 15011, + 14980, 13973, 10981, 14138, 13491, 13396, 14817, 12553, 15149, 13452, 10324, 11771, 14353, 14685, 14476, 15035, + 12465, 13442, 14264, 11472, 14602, 10401, 15117, 13024, 13562, 11535, 14247, 12564, 12800, 14577, 15090, 9617, + 15266, 15196, 14498, 13728, 14085, 14311, 15211, 13483, 14485, 14130, 15148, 14914, 14684, 15240, 12854, 14554, + 14162, 14889, 13358, 10468, 12148, 15040, 15052, 13928, 14866, 14838, 13200, 14924, 11605, 14460, 14964, 13569, + 15173, 14633, 14448, 13084, 12416, 15339, 14924, 8957, 12562, 14792, 14940, 11950, 14345, 12099, 14360, 13343, + 14199, 15309, 15041, 14379, 15315, 14517, 15318, 12539, 12652, 13903, 15057, 15325, 14764, 11618, 13598, 14449, + 14343, 14449, 13476, 14957, 14378, 14452, 15224, 15234, 14228, 14854, 15117, 14822, 14375, 11383, 14020, 13441, + 14527, 14049, 14235, 14099, 14275, 14012, 13154, 14870, 14223, 12718, 14571, 13827, 15094, 15032, 15253, 12767, + 13565, 15309, 14832, 14570, 14964, 15169, 14621, 14843, 14955, 15261, 15039, 10909, 13591, 13605, 13596, 15068, + 14592, 13526, 14438, 12434, 10400, 10181, 14896, 11461, 15123, 13822, 15082, 15316, 10784, 13972, 14759, 12854, + 10939, 14677, 14723, 13892, 14536, 14047, 11257, 13071, 14159, 12851, 13588, 14519, 14883, 14725, 13687, 14951, + 13855, 14759, 10402, 12680, 14663, 15308, 14641, 11168, 14386, 15195, 14806, 14169, 13636, 9233, 12102, 15026, + 10734, 13387, 14260, 14757, 14956, 12960, 14907, 13252, 13671, 14360, 14126, 15237, 14691, 14718, 13217, 14340, + 15100, 14830, 10891, 12647, 13725, 11010, 14814, 13004, 14537, 15148, 10887, 14799, 15086, 14934, 15053, 14570, + 14189, 14988, 14291, 13950, 14399, 12497, 12650, 14082, 12557, 14342, 14854, 12919, 14751, 14463, 14475, 13745, + 14207, 15263, 14912, 9448, 13509, 14035, 13295, 12200, 12978, 14807, 15222, 14306, 13775, 14415, 11292, 13997, + 13500, 14504, 14770, 14845, 12781, 14930, 11849, 13670, 15045, 14548, 12643, 14676, 12941, 9292, 10404, 13823, + 11894, 8894, 12214, 14108, 14808, 14569, 15171, 15143, 13212, 14829, 15271, 14240, 15332, 14874, 11384, 15182, + 14165, 13972, 12883, 14894, 13620, 14657, 14829, 13979, 14434, 15325, 10864, 13732, 14011, 14633, 14546, 14014, + 14037, 14625, 14895, 14053, 14931, 14729, 14821, 12297, 13495, 14992, 13442, 13109, 14574, 13666, 14261, 15184, + 12818, 12776, 14074, 13768, 9975, 15341, 13856, 14490, 14662, 14919, 10281, 11770, 13484, 12768, 14895, 9986, + 13540, 10212, 13693, 14511, 14657, 13556, 14667, 13274, 12813, 13596, 15269, 14907, 12962, 12307, 10909, 13013, + 15237, 14441, 15309, 14359, 13981, 14872, 12009, 14646, 12651, 14111, 15054, 13165, 13098, 10968, 14966, 12542, + 13692, 13776, 14400, 13850, 10011, 13903, 14327, 13558, 15231, 10654, 14067, 15189, 14183, 15170, 14563, 13660, + 14825, 10439, 14744, 12709, 15272, 10408, 13187, 14504, 11080, 12883, 14784, 11463, 14411, 11447, 13092, 14001, + 14424, 12834, 14910, 14836, 13709, 14467, 14550, 13603, 8812, 13415, 13600, 14212, 14535, 9536, 14463, 14615, + 14410, 14520, 13979, 14547, 15333, 12697, 10612, 14557, 13546, 14617, 12363, 15056, 14401, 15358, 15226, 9826, + 13868, 14841, 15127, 10861, 14675, 8511, 14137, 12500, 15115, 15072, 12832, 15018, 15173, 12946, 15258, 15100, + 14888, 11392, 14222, 15312, 12136, 12177, 14181, 14720, 14432, 11924, 14201, 10077, 11423, 14561, 14023, 15179, + 14139, 11788, 14668, 15110, 15241, 14816, 14784, 13538, 15073, 13516, 14642, 13509, 15257, 13711, 14403, 11030, + 14925, 14148, 14713, 14601, 13689, 13582, 14588, 12960, 13905, 14568, 15208, 14869, 14931, 15221, 13580, 13391, + 14095, 15315, 14798, 14807, 13224, 10841, 13381, 14552, 13685, 13568, 10778, 12282, 14479, 15127, 14495, 15268, + 14888, 14385, 15239, 15123, 14757, 14993, 14987, 13012, 12805, 14498, 13786, 14980, 14593, 15012, 14726, 14668, + 12137, 14745, 14530, 15306, 15310, 13810, 12470, 15062, 13894, 12667, 14939, 12975, 12428, 14870, 14364, 15047, + 12688, 14748, 13935, 15104, 13238, 14253, 14579, 14000, 13376, 11524, 14993, 14355, 14186, 14510, 11888, 14021, + 13616, 15352, 14379, 11392, 14734, 14902, 14026, 14771, 12158, 12850, 14711, 14103, 1251, 14993, 14555, 13834, + 15055, 13335, 13838, 14511, 11998, 10932, 14671, 14934, 10764, 15031, 12220, 13824, 14210, 13773, 12262, 13079, + 14053, 13673, 12848, 15074, 13943, 13212, 15216, 14001, 15280, 13412, 13708, 14588, 14612, 12710, 11100, 10269, + 14816, 14737, 14632, 13200, 13347, 13962, 13474, 12255, 11083, 14928, 14071, 14569, 13425, 12327, 13089, 14882, + 13795, 12938, 12100, 13915, 13723, 13745, 13224, 14053, 13846, 14868, 12921, 14754, 14918, 15283, 14959, 13604, + 15167, 13395, 14583, 15035, 14468, 12948, 11657, 10839, 13436, 15082, 14729, 15074, 13034, 14424, 14463, 12026, + 15031, 13490, 13805, 14563, 13594, 14250, 14796, 12437, 15185, 14789, 14559, 13823, 15093, 14516, 14677, 15232, + 13086, 13840, 14741, 10954, 14684, 8326, 14591, 13472, 14917, 13891, 15046, 9344, 12495, 12383, 14516, 12342, + 13906, 14387, 13570, 14716, 15285, 12783, 14397, 13988, 12712, 10267, 13346, 14737, 15064, 14967, 14003, 14528, + 13795, 12581, 12476, 15207, 13907, 12359, 14915, 14843, 15006, 15039, 13740, 12608, 13589, 15235, 14527, 10078, + 14415, 14830, 14430, 12356, 12935, 13679, 14805, 14593, 14854, 13958, 14968, 13009, 13592, 11092, 14931, 14037, + 15339, 12673, 15172, 14927, 14936, 14655, 14253, 15069, 14692, 12564, 14846, 12920, 14819, 13352, 15160, 13087, + 14054, 12202, 14915, 13486, 12524, 14475, 12204, 14961, 14702, 14748, 10367, 12480, 11528, 9441, 13582, 13976, + 13593, 11897, 14319, 14402, 14785, 14504, 15025, 14823, 14471, 15078, 12637, 14501, 13853, 11146, 14390, 14668, + 14913, 13502, 13909, 13905, 13603, 14379, 14498, 14921, 15164, 15218, 14439, 14401, 14881, 14753, 11015, 11331, + 12042, 15102, 12352, 13400, 14902, 14377, 14398, 14842, 14703, 14531, 11712, 13968, 14980, 13778, 12687, 13973, + 14867, 7852, 15220, 13141, 10438, 11414, 12048, 13084, 15070, 14430, 14625, 13344, 13198, 14491, 14049, 14890, + 14293, 13070, 15012, 13254, 14799, 13814, 14093, 13489, 14204, 15311, 10939, 13353, 11658, 15200, 15305, 9287, + 15283, 12281, 14842, 14753, 15087, 13733, 15177, 13717, 14295, 13866, 15216, 12309, 12603, 13812, 14454, 13443, + 13866, 15020, 12246, 12412, 15322, 14448, 15144, 14637, 15110, 14447, 13078, 14160, 13069, 14572, 12778, 14416, + 14460, 9069, 12766, 15090, 14507, 14380, 10532, 14637, 13602, 14695, 14562, 12225, 15332, 10573, 13944, 13977, + 14126, 14843, 14429, 14767, 14864, 12606, 12406, 14241, 15358, 15185, 14189, 14452, 14726, 14956, 15244, 13950, + 14105, 13694, 14506, 15348, 13524, 13717, 14324, 14492, 15225, 10410, 14360, 15318, 14410, 12739, 12432, 14708, + 14380, 15300, 14944, 14835, 14941, 15003, 14974, 15166, 10073, 15181, 14980, 12523, 14427, 11836, 12802, 13007, + 9611, 13317, 14980, 14874, 14388, 14365, 15333, 14957, 14242, 12196, 15237, 15329, 14881, 15139, 15179, 10190, + 8554, 15324, 10360, 14168, 13813, 14594, 10585, 14732, 15074, 14851, 13990, 14096, 14824, 14687, 14970, 14559, + 13503, 14470, 10057, 13073, 12589, 14724, 14143, 11728, 14465, 11973, 14063, 15251, 15335, 14464, 14688, 13111, + 14451, 14422, 14628, 14798, 14820, 12692, 12484, 10676, 11300, 12987, 14730, 14291, 13963, 13737, 15129, 14706, + 14824, 13379, 13781, 13135, 12695, 14164, 11623, 14263, 13621, 14569, 15311, 13552, 14181, 15227, 8998, 13622, + 14959, 13540, 14751, 14801, 10967, 14460, 15267, 14598, 14051, 12038, 14703, 12468, 11399, 14901, 13952, 15026, + 14065, 15337, 14856, 14668, 14785, 12600, 14609, 14504, 14438, 7242, 15078, 14176, 13349, 14047, 14642, 14393, + 13666, 15160, 14415, 13101, 14681, 13011, 15264, 14704, 12171, 12527, 12042, 9438, 14195, 14832, 15089, 12847, + 15171, 14535, 13089, 15066, 14340, 15184, 14540, 15318, 13553, 15270, 14673, 12814, 14078, 13859, 8105, 14385, + 13571, 8893, 14781, 13711, 15250, 12804, 14976, 13450, 12738, 14239, 14633, 15020, 12581, 14506, 14479, 10721, + 15328, 14092, 14328, 13899, 14677, 13079, 13032, 14781, 14573, 9585, 15135, 13280, 15224, 14017, 15158, 13851, + 15116, 11671, 13795, 14659, 14727, 15194, 14593, 13862, 14584, 15280, 14652, 10518, 14539, 13233, 14056, 15274, + 10610, 11865, 14293, 12050, 14647, 14766, 15096, 12445, 14661, 14249, 11984, 13570, 14442, 15203, 15158, 13355, + 14541, 15331, 12455, 12782, 10938, 14657, 15320, 10070, 11763, 15272, 11741, 13153, 15280, 12632, 14759, 14371, + 15339, 15005, 14986, 14766, 12421, 12201, 14942, 12707, 13699, 13540, 14824, 12870, 14479, 13655, 14886, 15236, + 13437, 11501, 13388, 14342, 14361, 14970, 13496, 14619, 12593, 14109, 14353, 13103, 15230, 15213, 14368, 14603, + 9224, 11219, 11341, 14894, 12232, 14938, 13409, 14630, 14481, 14432, 13106, 15027, 13681, 15306, 14382, 15198, + 14003, 14469, 14393, 14414, 14415, 13986, 12647, 13348, 12914, 13916, 11882, 7803, 14472, 14959, 14600, 14746, + 15325, 14857, 12707, 14876, 9777, 13410, 14516, 14811, 12850, 14565, 11523, 14895, 12811, 13263, 14650, 11105, + 14233, 14745, 14652, 15207, 13447, 11860, 14860, 13796, 12416, 13524, 14867, 12576, 14922, 13823, 15022, 14585, + 13994, 14540, 14722, 13265, 13559, 13015, 13121, 13522, 12806, 13689, 14953, 14601, 11681, 13950, 9583, 11657, + 14895, 14418, 13545, 13622, 14625, 15153, 10415, 14508, 14541, 14438, 14099, 15170, 12254, 12157, 12717, 10936, + 14458, 11733, 10043, 14872, 11894, 14474, 15073, 14905, 14230, 12543, 14116, 14829, 11882, 14500, 15200, 14863, + 13399, 14899, 14812, 13639, 9028, 14770, 15108, 14611, 15231, 15079, 15025, 15180, 12187, 14686, 12623, 15036, + 15060, 13926, 13052, 14841, 14769, 12948, 13321, 13669, 13874, 14128, 12586, 14577, 12844, 9482, 13975, 15023, + 14703, 15301, 14781, 14916, 11493, 11569, 15246, 13660, 14781, 10538, 12982, 14181, 14947, 15001, 14773, 14772, + 12968, 14612, 12855, 10480, 10202, 13354, 11704, 14528, 11334, 14280, 14329, 15063, 14459, 13434, 14468, 14534, + 14969, 13216, 11580, 12768, 13288, 13180, 12863, 14141, 11989, 9745, 14770, 12979, 15242, 14051, 14725, 15004, + 14453, 14353, 13818, 13133, 15346, 13725, 15142, 12837, 14972, 14816, 11904, 10652, 14504, 10953, 14628, 14594, + 13421, 14471, 14412, 10626, 14618, 13842, 15052, 15045, 14991, 11193, 13563, 13749, 12621, 15329, 13775, 12825, + 15243, 14551, 15149, 14939, 12163, 10914, 14696, 14448, 14309, 14149, 12410, 14245, 13657, 15165, 13542, 14935, + 14451, 14677, 13395, 14207, 14981, 13812, 12639, 12846, 14726, 13898, 15229, 13642, 11755, 12625, 14653, 11205, + 14413, 13828, 15129, 14874, 14883, 12691, 15308, 13536, 14859, 13508, 14592, 13063, 14892, 11889, 12310, 14860, + 14522, 14650, 14943, 14821, 13748, 13013, 9930, 13639, 12436, 15108, 13059, 12380, 13808, 12430, 14444, 14428, + 9661, 15210, 11113, 13436, 12583, 13719, 14545, 14631, 12623, 12425, 14397, 12134, 12356, 13695, 13039, 14872, + 15100, 14762, 14121, 14655, 12447, 14862, 14817, 12951, 12470, 14955, 14630, 15155, 15083, 14832, 15359, 13624, + 14948, 13583, 13195, 14988, 14549, 12420, 14199, 13681, 11020, 14452, 14495, 14551, 13654, 14805, 13439, 14422, + 8859, 14845, 15045, 13849, 14011, 14390, 13481, 10298, 14739, 13940, 14048, 12937, 15238, 13931, 13598, 15264, + 13935, 14966, 14968, 13013, 14077, 14820, 13355, 14265, 14674, 13525, 14106, 13620, 15212, 14254, 15248, 12642, + 14916, 11957, 9465, 13649, 14284, 14895, 15230, 15345, 14943, 11593, 14197, 12966, 13888, 15331, 14668, 12203, + 11960, 14268, 14645, 14589, 14339, 14086, 14637, 14501, 12803, 14916, 14870, 12192, 15265, 15270, 14795, 14094, + 14462, 11349, 14881, 13820, 15252, 14696, 14398, 14869, 15269, 11767, 13943, 15178, 14800, 13026, 14493, 14862, + 14558, 14820, 13439, 13976, 14701, 14957, 15054, 13033, 12883, 13629, 12174, 15198, 12472, 13807, 15026, 12576, + 15101, 12403, 11900, 14817, 14717, 14891, 14679, 12999, 14961, 13849, 12895, 14594, 13561, 12704, 14793, 12862, + 14070, 14424, 15125, 15304, 12784, 11613, 14338, 11309, 14465, 11996, 13678, 15172, 15101, 15182, 12233, 15345, + 15307, 14844, 14617, 15206, 14869, 15359, 15018, 12505, 14498, 13704, 8500, 13094, 13966, 15265, 14381, 13385, + 13332, 14755, 15300, 10485, 10250, 15127, 14600, 15214, 14748, 14733, 14486, 13853, 9297, 14949, 12868, 14590, + 10316, 11499, 14627, 14948, 14846, 14588, 15091, 14879, 12592, 14625, 12702, 14555, 7718, 14337, 15182, 14154, + 14389, 13560, 12690, 13057, 14800, 13483, 14599, 12948, 15217, 11807, 13910, 14786, 13595, 12667, 15163, 14767, + 12402, 15318, 14779, 14079, 14028, 15218, 14621, 14570, 14808, 14576, 14722, 14858, 15321, 15157, 14074, 12658, + 14476, 12779, 14894, 15034, 14960, 14869, 11864, 11528, 12515, 9060, 15035, 14681, 14856, 12661, 15193, 14788, + 14129, 14579, 14353, 15328, 14459, 13972, 15309, 13513, 12850, 14943, 13442, 13734, 14830, 14563, 14856, 10762, + 15041, 12809, 14677, 15262, 14925, 14584, 14021, 14097, 13080, 15188, 13881, 14949, 11253, 14288, 14830, 14939, + 14670, 15249, 7098, 14988, 15057, 8563, 13057, 8186, 15214, 14448, 15044, 13591, 13731, 13426, 14859, 14939, + 14790, 12860, 13490, 11709, 13836, 11733, 14698, 15247, 12909, 13361, 14796, 13263, 12592, 14733, 15230, 14991, + 13935, 14531, 14258, 14811, 13382, 11716, 15258, 14716, 15301, 14217, 14353, 13474, 12520, 12599, 14841, 15299, + 11847, 13442, 14048, 12844, 13432, 14694, 12483, 15098, 14646, 14646, 13179, 13262, 13453, 15353, 12288, 13849, + 15327, 13949, 11013, 14970, 13622, 14994, 11990, 15169, 10815, 14977, 13373, 12806, 10468, 13588, 13793, 13404, + 12801, 15128, 13758, 14994, 7276, 14949, 15119, 11587, 14482, 14964, 14394, 13689, 14966, 14020, 14552, 12516, + 15227, 13976, 14868, 13460, 15125, 14724, 13025, 15233, 15354, 15313, 15085, 14520, 15296, 15078, 14817, 14459, + 10306, 14729, 15262, 11967, 12907, 15217, 13617, 14905, 15228, 15260, 14587, 12315, 11956, 14355, 14448, 14957, + 12960, 14719, 14128, 12981, 13668, 14842, 14357, 13845, 12297, 11414, 14491, 13566, 12514, 12805, 15251, 14470, + 14359, 14748, 13045, 14251, 9435, 15050, 13357, 13429, 14775, 14702, 13388, 14680, 10417, 14583, 14288, 13398, + 13381, 15130, 14832, 12779, 14990, 15042, 12262, 15035, 14779, 14850, 14024, 14824, 14595, 13564, 14028, 13893, + 15007, 15148, 14559, 15209, 14339, 14804, 14656, 14390, 12600, 14346, 13464, 14974, 15286, 13566, 13377, 15017, + 14592, 14084, 14890, 12833, 14110, 14816, 14646, 15326, 15072, 11353, 14797, 14904, 14884, 13024, 14854, 13723, + 14454, 11742, 13164, 14461, 14460, 14611, 14795, 15154, 14100, 14330, 15296, 14785, 12334, 13998, 12730, 14887, + 15267, 13502, 13384, 15198, 14797, 10464, 12319, 14736, 15142, 14323, 12923, 13802, 15242, 15140, 14602, 13816, + 11474, 12620, 15340, 13894, 13333, 15278, 13934, 14750, 15115, 13314, 15303, 14569, 12255, 15282, 13545, 14917, + 13631, 11759, 11846, 13515, 12514, 14884, 13642, 15271, 15149, 14867, 14231, 14701, 12570, 13392, 12362, 12212, + 11282, 14468, 14769, 14125, 15208, 14533, 15214, 12801, 13563, 13520, 13923, 15177, 14869, 12760, 13816, 3789, + 13130, 13554, 13743, 14696, 12551, 11960, 14742, 11598, 13454, 11324, 15245, 14083, 14325, 12322, 14868, 15323, + 15211, 12557, 13093, 14069, 13585, 15342, 14593, 13480, 14731, 13489, 15059, 11290, 11792, 11867, 14346, 13072, + 14051, 13760, 14280, 10438, 14870, 14535, 14751, 14382, 15310, 14845, 14776, 9304, 11614, 15231, 13982, 15186, + 15328, 14286, 13556, 12810, 9470, 11616, 14830, 13598, 14619, 13247, 14617, 14408, 14725, 13389, 14985, 14260, + 15344, 14093, 14874, 13851, 14930, 12528, 14292, 13536, 14543, 14608, 10516, 12268, 15044, 14690, 10273, 13689, + 14003, 13776, 12759, 14211, 13814, 15245, 14702, 12117, 13043, 15012, 15292, 13442, 12632, 12989, 8287, 13027, + 14402, 14996, 14763, 14980, 11373, 14702, 14279, 14875, 12939, 10500, 14861, 15049, 14412, 14602, 14323, 14080, + 14959, 13741, 10130, 13944, 15252, 14641, 15049, 14536, 15239, 12633, 13865, 14351, 12727, 14994, 14620, 13828, + 14974, 13975, 14742, 15066, 12233, 14540, 14051, 14657, 13748, 12687, 14886, 14738, 14972, 12200, 10676, 13522, + 14588, 14253, 13669, 13384, 13965, 13549, 13046, 11916, 15244, 15054, 12492, 14804, 13828, 13204, 12426, 12530, + 13795, 14818, 10882, 14064, 13388, 8563, 11000, 11600, 13766, 15122, 12037, 15272, 10613, 15268, 14337, 14791, + 14781, 14876, 13899, 12851, 14427, 15072, 15116, 14927, 8464, 12039, 14535, 12311, 13200, 14596, 14006, 12450, + 14054, 14381, 15141, 15237, 11943, 14901, 10376, 14220, 14527, 12843, 12998, 13389, 12022, 14623, 14860, 15156, + 13511, 11781, 10750, 15191, 11754, 13728, 14800, 11718, 13321, 14687, 12554, 15225, 13325, 15190, 14966, 14158, + 14517, 14739, 15309, 14819, 12103, 11453, 11549, 14851, 14064, 14517, 15059, 14448, 11849, 15271, 13443, 9253, + 13415, 14580, 13848, 14746, 10935, 13725, 14331, 13301, 13082, 13927, 12797, 12848, 12736, 13887, 14080, 14090, + 15226, 10051, 12599, 13912, 14033, 15205, 14983, 14213, 9732, 15147, 14566, 14645, 15273, 15194, 13668, 14733, + 14865, 14678, 14318, 13077, 9741, 14649, 15347, 12441, 14984, 14426, 15295, 10535, 14162, 12205, 15351, 12705, + 11667, 14068, 14218, 14695, 14557, 13081, 15045, 14853, 14808, 15165, 14291, 12193, 14785, 15351, 13981, 14812, + 14587, 14667, 14369, 13990, 12921, 13983, 13963, 11990, 15211, 14620, 14215, 13125, 14754, 14799, 14803, 14477, + 12823, 13128, 15014, 14627, 13563, 14193, 14888, 14779, 11236, 15349, 14090, 12381, 12874, 14705, 15296, 14936, + 15123, 14624, 13958, 8712, 15040, 15076, 15146, 12587, 14394, 14790, 13503, 12384, 13643, 12519, 15214, 14536, + 14524, 14321, 14111, 14882, 15196, 11724, 12894, 13641, 14666, 14457, 15114, 15002, 14584, 15191, 14947, 13689, + 13406, 15161, 13870, 11763, 14967, 13856, 15340, 9569, 13472, 13944, 14690, 13761, 15244, 14902, 14782, 14693, + 12371, 15099, 14406, 14666, 14083, 14437, 14951, 12541, 15174, 14309, 14749, 14890, 10478, 15297, 13906, 14042, + 14212, 14583, 12347, 14068, 15229, 13796, 14463, 12489, 15219, 15071, 14859, 14452, 12310, 13902, 13493, 14175, + 14774, 13478, 13905, 14447, 14949, 12617, 11035, 14202, 14358, 14854, 13778, 13090, 14445, 14923, 14550, 12990, + 15244, 13427, 8807, 12945, 14874, 15158, 11314, 11351, 13063, 13288, 10667, 15194, 15317, 10084, 12837, 13325, + 14710, 15059, 15154, 15079, 14332, 14716, 13719, 14715, 13351, 14928, 15013, 12344, 15107, 15090, 15242, 14034, + 14539, 11738, 14461, 14888, 7751, 12545, 12389, 13837, 11697, 14558, 13120, 12864, 15099, 12064, 14180, 13161, + 11030, 13179, 14349, 15300, 13196, 14968, 14472, 11134, 12392, 15256, 14647, 14900, 14672, 14968, 14412, 15300, + 13002, 11841, 14867, 12343, 14422, 15013, 12850, 15351, 15346, 14527, 13421, 13814, 14393, 15172, 13345, 10050, + 13768, 15348, 13363, 14632, 13651, 13623, 13153, 13405, 11299, 14610, 15244, 11749, 11988, 14067, 11702, 11826, + 13666, 13687, 12396, 12505, 14571, 12838, 14451, 10683, 13777, 6957, 14933, 12792, 14491, 14014, 14664, 15253, + 14973, 14942, 14470, 14740, 14819, 15269, 10010, 14373, 14500, 14860, 13801, 14352, 14469, 15003, 12988, 13136, + 14078, 12301, 14831, 12189, 15055, 14248, 13749, 13466, 14449, 11899, 14948, 12126, 14345, 14903, 14494, 13378, + 14897, 13371, 14355, 12988, 14024, 15055, 13931, 14529, 9399, 11384, 14831, 15146, 15105, 11354, 15231, 10417, + 12024, 11668, 15114, 13172, 14624, 14834, 13858, 14497, 14455, 12455, 14650, 13086, 15089, 15282, 14953, 14031, + 10366, 13710, 14241, 13499, 9993, 14796, 13430, 13867, 14249, 14468, 15092, 14810, 14877, 15165, 15249, 14570, + 10070, 14477, 15352, 14084, 12828, 14972, 12745, 14697, 13839, 14760, 14662, 15155, 14265, 15136, 14527, 9797, + 11930, 14595, 14076, 13897, 15203, 15095, 15108, 11899, 15153, 15129, 14511, 15062, 14534, 14587, 13863, 14083, + 15323, 13411, 14949, 14780, 14358, 14443, 13710, 15357, 15329, 15329, 14775, 11356, 14892, 14726, 13839, 14721, + 14611, 13918, 12705, 14717, 11181, 14937, 15064, 14421, 9648, 14971, 12434, 11769, 15310, 15184, 14643, 15076, + 14409, 15162, 14073, 13787, 12133, 13780, 14582, 12894, 15053, 13951, 13896, 13548, 14032, 14878, 14111, 14712, + 15062, 15243, 13568, 15269, 10257, 13553, 11662, 12765, 14354, 15003, 15173, 14296, 10719, 14883, 13985, 14682, + 14645, 15323, 15048, 14640, 14107, 9716, 14163, 11814, 15240, 12578, 13917, 14693, 13624, 14661, 14921, 14621, + 14168, 15166, 14004, 12796, 15226, 13088, 9270, 14439, 14953, 14105, 8220, 14277, 13140, 14824, 14733, 13768, + 14159, 15131, 15106, 14040, 14482, 14811, 13335, 15124, 8131, 10032, 14585, 14501, 15215, 14172, 13992, 14608, + 13939, 13765, 14122, 14882, 15137, 12685, 14325, 15002, 14000, 15250, 15087, 14574, 11955, 12616, 13346, 15294, + 13095, 14917, 13608, 12795, 15152, 14877, 13784, 13232, 15241, 10553, 12770, 14654, 13645, 14229, 12389, 13999, + 15048, 13883, 13944, 14403, 12419, 12629, 11719, 14105, 14578, 14947, 15032, 14434, 14497, 13852, 14779, 13590, + 13903, 11106, 15319, 14296, 14632, 15072, 13492, 14068, 13768, 14378, 14869, 10337, 12420, 12626, 14485, 14663, + 13694, 14810, 13628, 14934, 13805, 12610, 13286, 10712, 14045, 15005, 11538, 13888, 13432, 15183, 14470, 13097, + 15198, 15164, 13433, 15301, 13100, 13408, 14500, 13998, 12457, 13867, 14243, 13705, 13220, 14452, 10322, 13968, + 12279, 10184, 12612, 13770, 13637, 14522, 14037, 12492, 14409, 14012, 14862, 13898, 13529, 14737, 14073, 12967, + 14659, 14375, 14459, 14337, 13975, 13659, 14602, 13483, 14402, 14387, 13703, 13516, 13922, 14766, 14459, 15098, + 12615, 14688, 6745, 14745, 14444, 11531, 13090, 13538, 14305, 13625, 14303, 13987, 14678, 13994, 15283, 14393, + 13483, 12570, 14991, 14935, 10944, 14448, 11078, 8691, 13747, 14041, 13943, 13727, 13403, 14697, 13057, 14069, + 14680, 13410, 12830, 12096, 10827, 13631, 14358, 13895, 14920, 12271, 15110, 13481, 15231, 13953, 11701, 14101, + 14926, 14578, 14171, 14269, 14154, 12276, 13525, 13631, 13931, 14410, 14561, 14014, 14445, 14943, 14876, 12734, + 14748, 14904, 13575, 14466, 15018, 12948, 15115, 13751, 14076, 12735, 14646, 12780, 12808, 15104, 15332, 13819, + 13256, 12498, 12370, 15124, 15098, 13828, 14412, 15001, 13916, 13380, 13909, 8114, 13465, 11397, 15279, 15174, + 14437, 14392, 14605, 12203, 14057, 13612, 14817, 13926, 13048, 14745, 15335, 14824, 14686, 13129, 13959, 14971, + 10483, 12955, 14941, 15201, 14485, 10974, 13614, 14802, 14763, 14562, 10395, 6941, 15341, 15085, 15299, 14932, + 12596, 12283, 14790, 15050, 14814, 15267, 13915, 11926, 13721, 13761, 14866, 14587, 14949, 6621, 14463, 12778, + 15051, 14710, 14974, 13756, 15000, 14944, 14137, 11155, 13714, 14200, 13809, 12403, 13338, 14707, 15252, 14033, + 12960, 13880, 14571, 15277, 14373, 12361, 13442, 13274, 12831, 9107, 14899, 15069, 13952, 10016, 14049, 13729, + 13572, 14005, 15005, 14100, 12705, 14963, 12939, 15139, 14857, 12714, 13232, 14677, 15356, 11264, 14352, 15356, + 13908, 15350, 12875, 13084, 14398, 13413, 15329, 14152, 13459, 15176, 13867, 11113, 15234, 13311, 15178, 14942, + 13527, 15206, 13179, 12627, 13668, 13243, 14779, 13263, 13570, 13760, 15258, 14683, 14561, 13916, 11213, 14221, + 15293, 10661, 14709, 14347, 5150, 15320, 14467, 15189, 14638, 14591, 14966, 9762, 15279, 12722, 13983, 14407, + 14801, 14441, 15287, 14197, 10334, 13418, 14369, 13912, 14789, 10844, 13377, 12132, 14902, 13155, 14232, 13948, + 14762, 14069, 13482, 13603, 14401, 14524, 13736, 14493, 14649, 12765, 12875, 14430, 15177, 7998, 13000, 13551, + 15354, 15185, 11780, 9588, 14309, 15191, 15158, 15326, 12828, 13350, 13562, 12494, 13498, 15069, 14849, 11357, + 12330, 12883, 9154, 13483, 13423, 13148, 12411, 13967, 13652, 14948, 14392, 15147, 15046, 12528, 13801, 13849, + }; + uint16_t ret[32 * 1 * 16 * 16] = { + 13806, 13409, 14744, 14390, 14666, 13732, 14911, 11878, 15333, 15329, 15094, 13807, 9960, 13669, 13150, 14133, + 13095, 15214, 13007, 13940, 14613, 15318, 8399, 15008, 14080, 15093, 15293, 11517, 11656, 14637, 14682, 13810, + 13727, 15282, 13407, 14822, 13324, 14639, 14502, 12381, 14033, 14320, 14900, 14601, 13814, 13064, 14964, 13342, + 14808, 14885, 11702, 15230, 15090, 14397, 14091, 14722, 13890, 12826, 14372, 13507, 10819, 14411, 14557, 14696, + 13612, 14896, 14995, 11272, 13397, 15097, 13644, 13712, 14402, 13563, 13739, 13076, 15341, 14660, 14655, 14124, + 13867, 14875, 13373, 14838, 15106, 13759, 14027, 14400, 14420, 13251, 14092, 15309, 14816, 15146, 14649, 14333, + 14515, 14517, 15292, 10152, 14520, 7356, 15003, 11938, 10648, 14727, 13876, 14965, 13910, 14497, 13778, 14436, + 12368, 9257, 14921, 14461, 15205, 10931, 13237, 9446, 14716, 14402, 15024, 14132, 12320, 11286, 14853, 8293, + 14613, 14501, 14753, 8859, 13308, 14838, 14734, 15221, 12297, 14915, 14376, 14708, 15036, 11776, 13877, 14242, + 15295, 14843, 13614, 14953, 13386, 14215, 11784, 13834, 15011, 12296, 14041, 14216, 12565, 15303, 13854, 9630, + 15356, 15065, 13575, 14561, 14750, 14685, 7548, 15202, 15221, 15062, 12717, 8873, 12416, 13797, 14537, 14285, + 13586, 14267, 13989, 14063, 12426, 13403, 13579, 14448, 14593, 14372, 15171, 13380, 14423, 11833, 14805, 12482, + 14432, 12990, 9145, 11510, 14589, 14879, 13743, 13116, 15110, 15037, 14981, 11667, 14166, 10348, 14339, 14799, + 14739, 14554, 15049, 14919, 14590, 14796, 15239, 14962, 14956, 14643, 14574, 14413, 14205, 14510, 14604, 15341, + 15106, 15217, 14442, 10480, 14929, 13485, 14600, 14882, 15224, 14562, 14861, 13045, 12163, 14489, 12755, 13986, + 14484, 10129, 11777, 15025, 13459, 12440, 14040, 15277, 13431, 7777, 14026, 14767, 14341, 14951, 14446, 15337, + 15260, 13830, 13496, 14526, 14287, 13446, 14400, 14434, 9240, 14347, 10303, 15042, 14725, 13972, 11263, 15321, + 12383, 13521, 13515, 12514, 15120, 11829, 14569, 14258, 14248, 15050, 14190, 11100, 13528, 14597, 14768, 13076, + 10308, 14495, 14764, 14585, 13386, 14056, 13546, 14744, 12462, 13519, 14690, 14530, 13359, 14197, 12119, 14493, + 15299, 13657, 13314, 15163, 13620, 14871, 15069, 12609, 14624, 14860, 14665, 6954, 13572, 14820, 15172, 9265, + 13465, 14403, 13069, 14143, 14396, 13644, 14425, 14368, 11895, 14820, 12245, 13390, 15103, 14751, 14817, 14266, + 15153, 13514, 14956, 12734, 14561, 12598, 14027, 12288, 15162, 11834, 14735, 11297, 14288, 14765, 14637, 14904, + 8844, 14748, 14907, 15296, 13464, 14576, 15212, 15173, 13921, 14296, 14956, 12703, 10059, 15323, 13926, 9275, + 14363, 14443, 11661, 14989, 13720, 14270, 13096, 12931, 15299, 14542, 14860, 15167, 14578, 13621, 12512, 14903, + 15204, 14442, 15225, 14742, 13816, 12771, 11350, 14864, 14540, 15017, 14638, 15284, 14485, 15156, 14697, 15200, + 12358, 15095, 14348, 11718, 13352, 14160, 14093, 15146, 14388, 12860, 14899, 14136, 13672, 9580, 14314, 14155, + 15209, 14440, 13229, 12456, 14067, 15202, 14407, 14056, 14987, 13806, 14598, 13286, 14954, 12919, 14373, 14735, + 14698, 14478, 14677, 13663, 12389, 11817, 14022, 13498, 12622, 15257, 13066, 13800, 14692, 12959, 13788, 14128, + 15277, 14735, 13825, 13440, 14635, 14953, 14426, 14283, 15081, 14110, 13694, 13035, 15040, 15172, 14452, 13365, + 15338, 14740, 12459, 6123, 15136, 13198, 14707, 15189, 14175, 13312, 14519, 13776, 11330, 14370, 13998, 14688, + 14719, 13948, 14140, 13798, 12883, 14838, 14780, 13981, 14421, 14515, 15162, 15127, 13005, 15070, 12854, 13540, + 14466, 14033, 15206, 12376, 13846, 14745, 14269, 13311, 14437, 15287, 13602, 10592, 14797, 14296, 14952, 13159, + 13061, 13985, 13309, 14670, 14059, 15351, 12826, 15328, 15047, 15314, 13342, 12707, 13824, 14590, 13242, 14251, + 14418, 14789, 15058, 14986, 13037, 14549, 15071, 15296, 12439, 13983, 14427, 15122, 14966, 12802, 13172, 14442, + 13771, 15085, 13974, 13653, 14592, 15202, 15066, 14301, 14566, 11826, 14777, 15274, 15345, 14429, 13402, 14783, + 15288, 13771, 10906, 13379, 10648, 14081, 14983, 14865, 14046, 14414, 14708, 10342, 12714, 11624, 15245, 13818, + 11609, 13121, 12380, 13873, 13028, 15312, 14240, 13744, 13916, 11105, 14755, 14712, 9400, 13780, 13069, 14884, + 12532, 14933, 14630, 14734, 12127, 14752, 15282, 11216, 14668, 13273, 13522, 10786, 14793, 11542, 14608, 11731, + 14655, 14823, 14805, 14389, 14908, 13626, 14682, 14780, 13047, 14625, 11429, 10538, 14180, 14717, 9124, 14621, + 15321, 14682, 13820, 13754, 14807, 14646, 13230, 10023, 8764, 14260, 15230, 14183, 14283, 13465, 14233, 14635, + 14607, 14430, 13998, 9124, 14394, 12548, 14203, 14267, 14373, 14726, 13785, 15285, 12357, 12288, 14369, 15182, + 13132, 12324, 13700, 11640, 14392, 13178, 15012, 14998, 13251, 14388, 14603, 13409, 13830, 15063, 14376, 14692, + 11631, 13499, 13624, 14943, 14217, 13866, 14416, 15047, 13734, 14899, 12260, 15305, 15145, 11620, 12624, 11855, + 13974, 14372, 13707, 13966, 12246, 15188, 15073, 14949, 15291, 11318, 13488, 13224, 15291, 13643, 14424, 15252, + 12917, 14654, 15173, 14878, 12750, 13745, 13928, 14631, 12897, 14349, 15048, 14289, 14825, 15032, 12223, 13659, + 14443, 13760, 14003, 13892, 13119, 15225, 14116, 14926, 13931, 14242, 13541, 14591, 15180, 13206, 14903, 12766, + 12514, 14552, 14339, 13715, 14564, 14078, 14599, 13539, 13664, 14831, 9110, 14508, 14399, 9425, 13459, 13559, + 14617, 13346, 13742, 13351, 15245, 14557, 14647, 15263, 15282, 13686, 13764, 14627, 11282, 14349, 14617, 12506, + 12735, 11436, 14208, 15250, 15315, 7650, 11544, 14473, 12861, 8668, 10256, 14968, 13666, 13830, 14416, 14215, + 14896, 11131, 12023, 14490, 15306, 14466, 14396, 14949, 12425, 12503, 14230, 15347, 14818, 12473, 13567, 8464, + 14629, 14094, 13363, 12736, 13141, 14998, 13033, 14588, 15104, 14065, 14367, 14449, 14741, 14069, 14676, 13369, + 13832, 13345, 13989, 14391, 12640, 13760, 12363, 15069, 12757, 14773, 13378, 13055, 14899, 12857, 13817, 15207, + 14972, 14573, 9301, 12502, 14110, 15221, 13964, 15169, 15320, 14690, 11179, 14780, 14671, 14865, 15350, 14184, + 14151, 14517, 12282, 14759, 14439, 14103, 15339, 14931, 12552, 13318, 15211, 15066, 12992, 14550, 11259, 12419, + 14776, 14162, 15235, 14883, 14998, 15193, 14118, 15042, 14503, 14681, 14538, 13177, 14115, 14061, 13090, 13966, + 12629, 12211, 12073, 14908, 13098, 9583, 14507, 13244, 14623, 14421, 14355, 14921, 10424, 14364, 14283, 15104, + 13226, 14180, 14008, 13450, 15344, 13653, 14458, 13620, 11175, 14448, 15336, 10121, 15316, 14391, 14316, 13700, + 11525, 15033, 10672, 12981, 13419, 14753, 9970, 14940, 14994, 14085, 14982, 14978, 14101, 14082, 13800, 11973, + 14325, 12107, 8516, 15242, 14353, 14368, 14571, 14716, 14067, 12330, 15106, 13606, 12937, 11114, 13907, 14410, + 14652, 14963, 14669, 14074, 14571, 14824, 14136, 12370, 14113, 11724, 14784, 13808, 12902, 11864, 14270, 15196, + 15305, 14499, 14291, 14886, 13869, 12183, 14167, 14592, 12414, 14657, 9286, 15262, 13383, 13627, 8484, 14315, + 14345, 14567, 15329, 15015, 14523, 15108, 14161, 15256, 14574, 14515, 12571, 10256, 15154, 15047, 14951, 12551, + 14111, 14663, 11501, 12892, 15106, 13009, 14747, 15176, 14220, 13518, 15291, 14921, 13193, 12517, 14977, 13923, + 12583, 14515, 14646, 12287, 14900, 14833, 14367, 13131, 13715, 15306, 13989, 13711, 13249, 12101, 14549, 12616, + 12549, 14119, 7531, 14490, 14917, 15137, 14942, 15063, 12800, 12535, 14550, 12257, 13453, 14535, 13345, 14564, + 14388, 15186, 14506, 14391, 15313, 14327, 14346, 15245, 11550, 15159, 11270, 11347, 13502, 9855, 13594, 14357, + 15092, 14908, 14357, 14428, 14480, 13572, 12835, 14859, 14806, 14838, 15298, 11858, 11924, 15207, 14659, 12864, + 12394, 13275, 12727, 15068, 13776, 14865, 14943, 12230, 11367, 12997, 13566, 14479, 12717, 14556, 9114, 14026, + 14384, 14106, 15227, 14760, 14902, 14803, 13640, 15169, 14639, 14630, 14843, 14544, 15137, 14406, 12902, 15226, + 14419, 15315, 14830, 12583, 14584, 14696, 14735, 13561, 13894, 14835, 14139, 13673, 14024, 14495, 13318, 15007, + 12981, 14944, 14884, 13472, 14689, 13964, 15016, 14916, 14386, 15179, 14811, 14583, 14426, 15145, 14955, 14938, + 14902, 12423, 9975, 13343, 13032, 11298, 15262, 13872, 12825, 15039, 15337, 13537, 12980, 14658, 12709, 14345, + 15351, 14845, 10239, 14876, 10764, 12704, 13978, 14400, 10757, 12926, 13671, 15231, 14780, 14935, 14510, 12731, + 14487, 13950, 13422, 15322, 13786, 14550, 14891, 12344, 13934, 14369, 14605, 13718, 14303, 15177, 11136, 13654, + 14458, 14839, 13400, 15220, 14932, 14064, 13379, 10524, 15321, 12763, 13306, 14361, 15174, 13782, 13201, 12374, + 14500, 15168, 14334, 15171, 14706, 11001, 14554, 14047, 11438, 11807, 13332, 14920, 15158, 14265, 13580, 13722, + 14431, 14925, 9820, 14094, 15328, 15002, 12950, 14236, 14273, 13075, 13870, 14433, 14269, 12824, 14843, 14902, + 11934, 14149, 11421, 11672, 14286, 11836, 13824, 14192, 15115, 15135, 14734, 15002, 14374, 8563, 14923, 13995, + 15293, 14340, 14492, 14885, 14803, 14785, 11564, 15288, 14508, 14614, 13640, 15211, 7792, 14073, 15038, 13852, + 14636, 12772, 14378, 15299, 14396, 11406, 14949, 14930, 14599, 14965, 15118, 14129, 14717, 12717, 15171, 15076, + 13342, 14112, 15326, 13519, 14702, 15139, 9794, 14989, 13660, 14308, 13547, 9228, 14086, 11501, 13199, 14066, + 15327, 12515, 15332, 9008, 12337, 14369, 14856, 15315, 15302, 14497, 14539, 14243, 15107, 9308, 10840, 14022, + 12483, 12732, 11469, 13912, 12030, 14093, 14926, 14735, 13580, 13488, 14114, 14706, 14460, 15134, 14221, 13662, + 14129, 14729, 15126, 14939, 15028, 15231, 7941, 14268, 11360, 13687, 14351, 13566, 12114, 14521, 14720, 14657, + 13671, 12577, 12487, 14916, 14176, 13910, 13507, 13368, 14979, 13359, 15101, 15270, 14527, 14266, 15038, 15347, + 15055, 13593, 15295, 14771, 15087, 10903, 14548, 14978, 14486, 14746, 13074, 13365, 15242, 12785, 14533, 14943, + 15314, 15197, 15223, 13639, 14656, 14633, 14089, 10664, 14799, 15150, 13587, 15300, 6978, 13587, 14357, 14240, + 11696, 12711, 14188, 11888, 12422, 13503, 15234, 15159, 11461, 11373, 12512, 14157, 12515, 13447, 14077, 15289, + 14542, 11793, 13699, 15167, 14681, 9978, 14332, 14991, 14439, 14332, 13517, 13747, 13859, 13909, 11344, 14084, + 11470, 10940, 15306, 14528, 14822, 15064, 12310, 11726, 11442, 13611, 9006, 14978, 12820, 14998, 12516, 9475, + 13993, 14714, 11545, 15007, 13338, 13615, 15175, 14266, 14926, 14853, 14637, 14579, 13849, 15290, 11405, 15057, + 13982, 12755, 13444, 14406, 13587, 13769, 14363, 13899, 15156, 12368, 14340, 14472, 13849, 14739, 13507, 14640, + 14851, 12961, 15201, 13605, 12821, 15294, 14875, 13603, 14101, 15189, 14708, 12518, 12797, 14921, 14325, 14916, + 11399, 14839, 9126, 14447, 10611, 15177, 12065, 13089, 13455, 14446, 12369, 13756, 8637, 13936, 15286, 14295, + 15174, 14733, 15045, 13869, 13790, 13505, 13845, 14179, 15316, 14118, 14635, 15293, 14204, 15060, 15192, 11341, + 14479, 15240, 15347, 12728, 14541, 12004, 14519, 13473, 12621, 12987, 15162, 13373, 14697, 15257, 14976, 14507, + 12167, 13638, 14851, 7726, 13569, 14018, 14392, 10574, 14838, 9700, 15056, 15349, 15249, 14344, 14875, 14217, + 14836, 14306, 12388, 13122, 14860, 14503, 14656, 14653, 15352, 11227, 13131, 14458, 12439, 12075, 14123, 15080, + 14403, 13379, 15134, 14658, 15170, 9486, 11324, 11895, 14248, 9618, 14693, 15267, 14078, 13604, 14915, 14630, + 13919, 10411, 14422, 13882, 13765, 11507, 15249, 14390, 14377, 12026, 13998, 13294, 14500, 13629, 13841, 15085, + 11485, 14360, 15065, 13903, 12346, 13666, 15301, 13443, 13232, 10277, 14768, 13644, 11968, 14495, 10161, 13470, + 14687, 14590, 12071, 12118, 15032, 14993, 14954, 14555, 9848, 15008, 14492, 13327, 13125, 15125, 13399, 14598, + 14762, 15036, 14394, 15222, 15350, 11741, 14010, 12379, 14712, 14624, 13730, 13734, 14763, 15064, 14146, 14901, + 14320, 13750, 14186, 10237, 14736, 12645, 15037, 14440, 12790, 12127, 14610, 14925, 13934, 14623, 14428, 13434, + 12362, 14112, 14317, 13787, 15185, 13800, 13190, 14477, 15300, 14626, 13825, 14974, 14815, 15196, 13326, 14030, + 14905, 13771, 14420, 13435, 13332, 14407, 14681, 13317, 14559, 13616, 11281, 14433, 14647, 15336, 13381, 13639, + 13773, 11862, 14679, 12454, 14956, 15158, 15209, 15033, 13838, 14601, 11817, 14108, 13657, 9752, 14358, 15169, + 14347, 15337, 14605, 15352, 13593, 9707, 14299, 13446, 15023, 13534, 14627, 14502, 14574, 14504, 14160, 12621, + 14898, 13574, 15323, 13828, 14435, 14499, 14049, 13468, 11970, 14163, 14507, 13798, 13642, 14583, 13739, 15187, + 13939, 10838, 14751, 12546, 14764, 13467, 15252, 15087, 13831, 15326, 15081, 14716, 10620, 13033, 15011, 15181, + 14955, 15218, 13058, 14437, 13052, 13907, 13789, 12728, 14692, 13382, 14687, 13870, 11331, 13461, 14515, 14937, + 15141, 11729, 10505, 13650, 14519, 14746, 14672, 13797, 13483, 15143, 11281, 14537, 12178, 15264, 11971, 14154, + 12341, 12509, 14744, 14470, 13602, 14484, 13868, 12713, 9327, 13682, 11483, 13573, 12059, 15180, 14349, 14355, + 14653, 15316, 12309, 13496, 15062, 14585, 14744, 11867, 13180, 15250, 13338, 15306, 15242, 15095, 14965, 13461, + 13660, 15126, 15115, 14599, 14691, 14448, 14047, 14739, 13004, 14597, 11901, 9589, 14855, 15172, 11744, 13145, + 12791, 15253, 14530, 14830, 13918, 15211, 14341, 13437, 11648, 14564, 14961, 14525, 15337, 13253, 13750, 11319, + 10882, 9265, 14224, 15190, 12928, 14186, 12840, 14926, 12454, 14477, 13797, 15271, 15025, 12041, 13977, 14457, + 12038, 13512, 14585, 14988, 13022, 14378, 15038, 12985, 12971, 14505, 14560, 15108, 14935, 13266, 12267, 11799, + 14821, 14462, 14521, 14413, 15312, 13479, 12619, 13667, 14527, 13543, 13204, 13663, 15134, 12252, 12570, 14723, + 14242, 13497, 14488, 13789, 12396, 12813, 14995, 15212, 14679, 15005, 14721, 14746, 13405, 13532, 14729, 14698, + 12224, 14756, 14806, 14529, 12246, 14548, 13370, 12974, 13450, 14676, 14389, 14697, 14131, 14843, 14288, 13062, + 13018, 14667, 13741, 14437, 14864, 7606, 14282, 14567, 14547, 10469, 15198, 13803, 12513, 12313, 15324, 13499, + 13949, 10462, 10275, 14429, 14662, 13114, 13480, 15328, 12990, 13611, 14378, 12605, 15241, 13678, 14589, 14035, + 14970, 13951, 7312, 11876, 13570, 14521, 10657, 14797, 14360, 14584, 15008, 13027, 14871, 12978, 14824, 15001, + 13357, 14968, 15201, 14372, 13891, 14673, 12327, 10698, 14727, 14953, 12495, 15101, 14351, 15271, 13875, 14439, + 14500, 12701, 13568, 14536, 14186, 13586, 15055, 14259, 14684, 14768, 14767, 14549, 14601, 12043, 15200, 14343, + 14785, 13316, 15337, 15049, 14746, 13395, 15358, 14793, 10982, 13401, 9769, 13048, 14982, 14414, 14465, 11418, + 15196, 13538, 14904, 13898, 14465, 14333, 14453, 14968, 14366, 15273, 14928, 14313, 15135, 14097, 13566, 15168, + 14687, 12123, 15223, 14695, 12665, 14136, 14892, 10944, 15017, 13018, 14119, 13188, 15001, 14305, 14412, 15169, + 13663, 14299, 12795, 14018, 14627, 14244, 11787, 14949, 13479, 13944, 13335, 13427, 15273, 14987, 14816, 13732, + 15107, 14913, 12201, 14006, 15150, 14560, 13859, 14191, 14678, 14769, 13362, 13890, 13856, 14441, 13925, 15142, + 14406, 14982, 14725, 15108, 14436, 14201, 15327, 13618, 15350, 14122, 11413, 14718, 15103, 12658, 14742, 15235, + 14555, 15109, 14157, 15345, 12809, 13710, 14171, 13767, 14225, 13416, 14476, 14567, 13677, 14664, 14853, 13625, + 14687, 12358, 14828, 15194, 15049, 9972, 14638, 13922, 14277, 13603, 15140, 15327, 15202, 14687, 14577, 8740, + 13921, 14506, 14484, 11462, 14725, 14834, 13585, 15161, 14553, 14164, 14577, 14678, 12473, 15124, 9172, 12042, + 13272, 15290, 14406, 15031, 14722, 14837, 14476, 12193, 11932, 14956, 13736, 12650, 14237, 14184, 11759, 15083, + 13654, 13669, 15282, 11637, 14870, 13453, 13632, 14664, 10758, 15359, 14637, 13846, 13560, 15357, 12303, 14550, + 15196, 13419, 12288, 11780, 15221, 15244, 14571, 14591, 14753, 14045, 12006, 14735, 14321, 14677, 14717, 14060, + 13112, 13537, 14542, 10945, 14708, 15255, 13722, 11324, 14575, 14576, 13385, 14898, 15053, 9813, 15015, 14792, + 12662, 14001, 13685, 14812, 13562, 13774, 15040, 14418, 14515, 14606, 11999, 13388, 8317, 14188, 14948, 14740, + 9582, 15265, 14636, 13999, 15023, 12110, 14388, 13799, 14663, 14661, 14037, 12785, 14473, 14692, 15261, 13754, + 13803, 15296, 13832, 14774, 10913, 14408, 14643, 11380, 14633, 15316, 14777, 14453, 14631, 14302, 14137, 14838, + 14646, 13829, 14460, 12890, 14669, 14840, 14938, 12788, 10672, 15008, 15179, 15195, 14545, 14962, 13946, 14826, + 13419, 13151, 11817, 13742, 10366, 13593, 14513, 14460, 13909, 13513, 9557, 12583, 13503, 13684, 12705, 14249, + 13792, 12546, 12799, 15171, 15081, 13644, 14257, 14971, 14445, 14839, 13677, 14876, 15093, 13519, 13870, 11474, + 12672, 13838, 15092, 14974, 13355, 13569, 13891, 11547, 13318, 13526, 12932, 14944, 15177, 13367, 14413, 13571, + 13737, 15053, 14772, 13720, 13947, 13484, 14128, 15161, 14641, 13501, 13984, 14119, 12460, 14643, 12419, 13894, + 15048, 13553, 13642, 11960, 14998, 13661, 15213, 13361, 13990, 15299, 15283, 14897, 14515, 14759, 15246, 13407, + 14470, 13906, 15211, 14141, 15234, 15099, 14797, 12104, 14795, 15239, 12584, 12538, 14574, 14381, 14415, 13127, + 12782, 13790, 15148, 14573, 14650, 14729, 14786, 14920, 12010, 12983, 14927, 12700, 15204, 12842, 14411, 10356, + 10370, 13430, 11596, 11585, 13726, 13398, 13912, 14365, 13075, 13872, 15243, 14489, 11860, 14862, 14926, 15061, + 14732, 11895, 13934, 13336, 15018, 12240, 15128, 15168, 13704, 14927, 14194, 11314, 13998, 12019, 12659, 15296, + 12644, 12698, 10821, 14121, 15150, 13987, 14787, 14652, 13576, 12601, 14027, 12473, 14796, 14751, 14308, 12600, + 14516, 15348, 13028, 15023, 14932, 13686, 14395, 14589, 14152, 12517, 10594, 10144, 13802, 13606, 15299, 13557, + 12156, 15050, 14744, 12569, 13672, 13067, 14951, 14670, 13395, 13687, 10676, 14091, 12486, 15287, 13854, 15319, + 13867, 14358, 14924, 14913, 14909, 13759, 14233, 14395, 13213, 15022, 15180, 12128, 9487, 13885, 14817, 12410, + 14092, 15317, 13427, 13239, 11930, 12380, 14268, 14340, 12915, 15247, 14034, 13044, 14942, 15222, 12381, 14652, + 14742, 12665, 15293, 14005, 15209, 14897, 14679, 11463, 15321, 12018, 14781, 15020, 14487, 14643, 15103, 14210, + 12082, 13755, 15175, 15019, 13660, 14737, 14986, 15281, 10080, 14435, 12848, 10553, 14995, 11697, 14987, 12428, + 14783, 13345, 13287, 12844, 14653, 12940, 12506, 15016, 14119, 12850, 15074, 13385, 9895, 14765, 15235, 13596, + 15100, 14189, 14207, 13500, 11894, 14165, 14037, 12818, 13540, 15237, 13692, 14825, 14424, 14410, 13868, 14888, + 14830, 14988, 15263, 14504, 8894, 13972, 14625, 12776, 10212, 14441, 13776, 10439, 12834, 14520, 14841, 11392, + 10891, 14291, 14912, 14770, 12214, 12883, 14895, 14074, 13693, 15309, 14400, 14744, 14910, 13979, 15127, 14222, + 12647, 13950, 9448, 14845, 14108, 14894, 14053, 13768, 14511, 14359, 13850, 12709, 14836, 14547, 10861, 15312, + 13725, 14399, 13509, 12781, 14808, 13620, 14931, 9975, 14657, 13981, 10011, 15272, 13709, 15333, 14675, 12136, + 11010, 12497, 14035, 14930, 14569, 14657, 14729, 15341, 13556, 14872, 13903, 10408, 14467, 12697, 8511, 12177, + 14814, 12650, 13295, 11849, 15171, 14829, 14821, 13856, 14667, 12009, 14327, 13187, 14550, 10612, 14137, 14181, + 13004, 14082, 12200, 13670, 15143, 13979, 12297, 14490, 13274, 14646, 13558, 14504, 13603, 14557, 12500, 14720, + 14537, 12557, 12978, 15045, 13212, 14434, 13495, 14662, 12813, 12651, 15231, 11080, 8812, 13546, 15115, 14432, + 15148, 14342, 14807, 14548, 14829, 15325, 14992, 14919, 13596, 14111, 10654, 12883, 13415, 14617, 15072, 11924, + 10887, 14854, 15222, 12643, 15271, 10864, 13442, 10281, 15269, 15054, 14067, 14784, 13600, 12363, 12832, 14201, + 14799, 12919, 14306, 14676, 14240, 13732, 13109, 11770, 14907, 13165, 15189, 11463, 14212, 15056, 15018, 10077, + 15086, 14751, 13775, 12941, 15332, 14011, 14574, 13484, 12962, 13098, 14183, 14411, 14535, 14401, 15173, 11423, + 14934, 14463, 14415, 9292, 14874, 14633, 13666, 12768, 12307, 10968, 15170, 11447, 9536, 15358, 12946, 14561, + 15053, 14475, 11292, 10404, 11384, 14546, 14261, 14895, 10909, 14966, 14563, 13092, 14463, 15226, 15258, 14023, + 14570, 13745, 13997, 13823, 15182, 14014, 15184, 9986, 13013, 12542, 13660, 14001, 14615, 9826, 15100, 15179, + 14415, 15339, 14054, 13593, 14913, 12042, 14867, 14293, 15283, 13866, 14460, 14126, 14105, 14380, 9611, 8554, + 14830, 12673, 12202, 11897, 13502, 15102, 7852, 13070, 12281, 15020, 9069, 14843, 13694, 15300, 13317, 15324, + 14430, 15172, 14915, 14319, 13909, 12352, 15220, 15012, 14842, 12246, 12766, 14429, 14506, 14944, 14980, 10360, + 12356, 14927, 13486, 14402, 13905, 13400, 13141, 13254, 14753, 12412, 15090, 14767, 15348, 14835, 14874, 14168, + 12935, 14936, 12524, 14785, 13603, 14902, 10438, 14799, 15087, 15322, 14507, 14864, 13524, 14941, 14388, 13813, + 13679, 14655, 14475, 14504, 14379, 14377, 11414, 13814, 13733, 14448, 14380, 12606, 13717, 15003, 14365, 14594, + 14805, 14253, 12204, 15025, 14498, 14398, 12048, 14093, 15177, 15144, 10532, 12406, 14324, 14974, 15333, 10585, + 14593, 15069, 14961, 14823, 14921, 14842, 13084, 13489, 13717, 14637, 14637, 14241, 14492, 15166, 14957, 14732, + 14854, 14692, 14702, 14471, 15164, 14703, 15070, 14204, 14295, 15110, 13602, 15358, 15225, 10073, 14242, 15074, + 13958, 12564, 14748, 15078, 15218, 14531, 14430, 15311, 13866, 14447, 14695, 15185, 10410, 15181, 12196, 14851, + 14968, 14846, 10367, 12637, 14439, 11712, 14625, 10939, 15216, 13078, 14562, 14189, 14360, 14980, 15237, 13990, + 13009, 12920, 12480, 14501, 14401, 13968, 13344, 13353, 12309, 14160, 12225, 14452, 15318, 12523, 15329, 14096, + 13592, 14819, 11528, 13853, 14881, 14980, 13198, 11658, 12603, 13069, 15332, 14726, 14410, 14427, 14881, 14824, + 11092, 13352, 9441, 11146, 14753, 13778, 14491, 15200, 13812, 14572, 10573, 14956, 12739, 11836, 15139, 14687, + 14931, 15160, 13582, 14390, 11015, 12687, 14049, 15305, 14454, 12778, 13944, 15244, 12432, 12802, 15179, 14970, + 14037, 13087, 13976, 14668, 11331, 13973, 14890, 9287, 13443, 14416, 13977, 13950, 14708, 13007, 10190, 14559, + 15325, 14233, 13994, 14895, 14458, 13399, 15060, 14703, 12968, 14969, 14453, 13421, 15243, 14451, 14413, 14522, + 14857, 14745, 14540, 14418, 11733, 14899, 13926, 15301, 14612, 13216, 14353, 14471, 14551, 14677, 13828, 14650, + 12707, 14652, 14722, 13545, 10043, 14812, 13052, 14781, 12855, 11580, 13818, 14412, 15149, 13395, 15129, 14943, + 14876, 15207, 13265, 13622, 14872, 13639, 14841, 14916, 10480, 12768, 13133, 10626, 14939, 14207, 14874, 14821, + 9777, 13447, 13559, 14625, 11894, 9028, 14769, 11493, 10202, 13288, 15346, 14618, 12163, 14981, 14883, 13748, + 13410, 11860, 13015, 15153, 14474, 14770, 12948, 11569, 13354, 13180, 13725, 13842, 10914, 13812, 12691, 13013, + 14516, 14860, 13121, 10415, 15073, 15108, 13321, 15246, 11704, 12863, 15142, 15052, 14696, 12639, 15308, 9930, + 14811, 13796, 13522, 14508, 14905, 14611, 13669, 13660, 14528, 14141, 12837, 15045, 14448, 12846, 13536, 13639, + 12850, 12416, 12806, 14541, 14230, 15231, 13874, 14781, 11334, 11989, 14972, 14991, 14309, 14726, 14859, 12436, + 14565, 13524, 13689, 14438, 12543, 15079, 14128, 10538, 14280, 9745, 14816, 11193, 14149, 13898, 13508, 15108, + 11523, 14867, 14953, 14099, 14116, 15025, 12586, 12982, 14329, 14770, 11904, 13563, 12410, 15229, 14592, 13059, + 14895, 12576, 14601, 15170, 14829, 15180, 14577, 14181, 15063, 12979, 10652, 13749, 14245, 13642, 13063, 12380, + 12811, 14922, 11681, 12254, 11882, 12187, 12844, 14947, 14459, 15242, 14504, 12621, 13657, 11755, 14892, 13808, + 13263, 13823, 13950, 12157, 14500, 14686, 9482, 15001, 13434, 14051, 10953, 15329, 15165, 12625, 11889, 12430, + 14650, 15022, 9583, 12717, 15200, 12623, 13975, 14773, 14468, 14725, 14628, 13775, 13542, 14653, 12310, 14444, + 11105, 14585, 11657, 10936, 14863, 15036, 15023, 14772, 14534, 15004, 14594, 12825, 14935, 11205, 14860, 14428, + 14476, 14129, 15041, 14670, 14790, 13935, 11847, 15327, 12801, 15227, 10306, 12960, 14359, 13381, 15007, 14592, + 12779, 14579, 12809, 15249, 12860, 14531, 13442, 13949, 15128, 13976, 14729, 14719, 14748, 15130, 15148, 14084, + 14894, 14353, 14677, 7098, 13490, 14258, 14048, 11013, 13758, 14868, 15262, 14128, 13045, 14832, 14559, 14890, + 15034, 15328, 15262, 14988, 11709, 14811, 12844, 14970, 14994, 13460, 11967, 12981, 14251, 12779, 15209, 12833, + 14960, 14459, 14925, 15057, 13836, 13382, 13432, 13622, 7276, 15125, 12907, 13668, 9435, 14990, 14339, 14110, + 14869, 13972, 14584, 8563, 11733, 11716, 14694, 14994, 14949, 14724, 15217, 14842, 15050, 15042, 14804, 14816, + 11864, 15309, 14021, 13057, 14698, 15258, 12483, 11990, 15119, 13025, 13617, 14357, 13357, 12262, 14656, 14646, + 11528, 13513, 14097, 8186, 15247, 14716, 15098, 15169, 11587, 15233, 14905, 13845, 13429, 15035, 14390, 15326, + 12515, 12850, 13080, 15214, 12909, 15301, 14646, 10815, 14482, 15354, 15228, 12297, 14775, 14779, 12600, 15072, + 9060, 14943, 15188, 14448, 13361, 14217, 14646, 14977, 14964, 15313, 15260, 11414, 14702, 14850, 14346, 11353, + 15035, 13442, 13881, 15044, 14796, 14353, 13179, 13373, 14394, 15085, 14587, 14491, 13388, 14024, 13464, 14797, + 14681, 13734, 14949, 13591, 13263, 13474, 13262, 12806, 13689, 14520, 12315, 13566, 14680, 14824, 14974, 14904, + 14856, 14830, 11253, 13731, 12592, 12520, 13453, 10468, 14966, 15296, 11956, 12514, 10417, 14595, 15286, 14884, + 12661, 14563, 14288, 13426, 14733, 12599, 15353, 13588, 14020, 15078, 14355, 12805, 14583, 13564, 13566, 13024, + 15193, 14856, 14830, 14859, 15230, 14841, 12288, 13793, 14552, 14817, 14448, 15251, 14288, 14028, 13377, 14854, + 14788, 10762, 14939, 14939, 14991, 15299, 13849, 13404, 12516, 14459, 14957, 14470, 13398, 13893, 15017, 13723, + 14781, 14054, 13511, 14517, 13415, 15226, 14865, 11667, 14587, 12823, 15123, 14524, 13406, 12371, 14212, 14774, + 14876, 14381, 11781, 14739, 14580, 10051, 14678, 14068, 14667, 13128, 14624, 14321, 15161, 15099, 14583, 13478, + 13899, 15141, 10750, 15309, 13848, 12599, 14318, 14218, 14369, 15014, 13958, 14111, 13870, 14406, 12347, 13905, + 12851, 15237, 15191, 14819, 14746, 13912, 13077, 14695, 13990, 14627, 8712, 14882, 11763, 14666, 14068, 14447, + 14427, 11943, 11754, 12103, 10935, 14033, 9741, 14557, 12921, 13563, 15040, 15196, 14967, 14083, 15229, 14949, + 15072, 14901, 13728, 11453, 13725, 15205, 14649, 13081, 13983, 14193, 15076, 11724, 13856, 14437, 13796, 12617, + 15116, 10376, 14800, 11549, 14331, 14983, 15347, 15045, 13963, 14888, 15146, 12894, 15340, 14951, 14463, 11035, + 14927, 14220, 11718, 14851, 13301, 14213, 12441, 14853, 11990, 14779, 12587, 13641, 9569, 12541, 12489, 14202, + 8464, 14527, 13321, 14064, 13082, 9732, 14984, 14808, 15211, 11236, 14394, 14666, 13472, 15174, 15219, 14358, + 12039, 12843, 14687, 14517, 13927, 15147, 14426, 15165, 14620, 15349, 14790, 14457, 13944, 14309, 15071, 14854, + 14535, 12998, 12554, 15059, 12797, 14566, 15295, 14291, 14215, 14090, 13503, 15114, 14690, 14749, 14859, 13778, + 12311, 13389, 15225, 14448, 12848, 14645, 10535, 12193, 13125, 12381, 12384, 15002, 13761, 14890, 14452, 13090, + 13200, 12022, 13325, 11849, 12736, 15273, 14162, 14785, 14754, 12874, 13643, 14584, 15244, 10478, 12310, 14445, + 14596, 14623, 15190, 15271, 13887, 15194, 12205, 15351, 14799, 14705, 12519, 15191, 14902, 15297, 13902, 14923, + 14006, 14860, 14966, 13443, 14080, 13668, 15351, 13981, 14803, 15296, 15214, 14947, 14782, 13906, 13493, 14550, + 12450, 15156, 14158, 9253, 14090, 14733, 12705, 14812, 14477, 14936, 14536, 13689, 14693, 14042, 14175, 12990, + 14409, 15062, 14645, 14168, 14159, 13939, 13095, 15048, 13903, 13694, 15198, 12279, 14659, 12615, 13483, 14680, + 15162, 15243, 15323, 15166, 15131, 13765, 14917, 13883, 11106, 14810, 15164, 10184, 14375, 14688, 12570, 13410, + 14073, 13568, 15048, 14004, 15106, 14122, 13608, 13944, 15319, 13628, 13433, 12612, 14459, 6745, 14991, 12830, + 13787, 15269, 14640, 12796, 14040, 14882, 12795, 14403, 14296, 14934, 15301, 13770, 14337, 14745, 14935, 12096, + 12133, 10257, 14107, 15226, 14482, 15137, 15152, 12419, 14632, 13805, 13100, 13637, 13975, 14444, 10944, 10827, + 13780, 13553, 9716, 13088, 14811, 12685, 14877, 12629, 15072, 12610, 13408, 14522, 13659, 11531, 14448, 13631, + 14582, 11662, 14163, 9270, 13335, 14325, 13784, 11719, 13492, 13286, 14500, 14037, 14602, 13090, 11078, 14358, + 12894, 12765, 11814, 14439, 15124, 15002, 13232, 14105, 14068, 10712, 13998, 12492, 13483, 13538, 8691, 13895, + 15053, 14354, 15240, 14953, 8131, 14000, 15241, 14578, 13768, 14045, 12457, 14409, 14402, 14305, 13747, 14920, + 13951, 15003, 12578, 14105, 10032, 15250, 10553, 14947, 14378, 15005, 13867, 14012, 14387, 13625, 14041, 12271, + 13896, 15173, 13917, 8220, 14585, 15087, 12770, 15032, 14869, 11538, 14243, 14862, 13703, 14303, 13943, 15110, + 13548, 14296, 14693, 14277, 14501, 14574, 14654, 14434, 10337, 13888, 13705, 13898, 13516, 13987, 13727, 13481, + 14032, 10719, 13624, 13140, 15215, 11955, 13645, 14497, 12420, 13432, 13220, 13529, 13922, 14678, 13403, 15231, + 14878, 14883, 14661, 14824, 14172, 12616, 14229, 13852, 12626, 15183, 14452, 14737, 14766, 13994, 14697, 13953, + 14111, 13985, 14921, 14733, 13992, 13346, 12389, 14779, 14485, 14470, 10322, 14073, 14459, 15283, 13057, 11701, + 14712, 14682, 14621, 13768, 14608, 15294, 13999, 13590, 14663, 13097, 13968, 12967, 15098, 14393, 14069, 14101, + 12834, 15230, 15014, 13723, 15335, 12380, 14014, 14854, 14651, 14405, 13692, 14358, 14656, 10697, 10813, 14613, + 14571, 13949, 14186, 14587, 14399, 15250, 14135, 13497, 14146, 14517, 14627, 15296, 15188, 13812, 15186, 12399, + 13842, 13066, 13073, 14636, 13686, 14157, 13244, 15012, 14324, 14740, 14448, 12613, 12738, 14663, 14629, 14412, + 14693, 11501, 10631, 15192, 14887, 13623, 15103, 15247, 11633, 13504, 14203, 13644, 14801, 14509, 13457, 13718, + 13897, 13365, 14345, 9774, 15130, 15341, 14455, 13026, 14114, 11461, 13789, 14040, 14428, 13735, 13525, 14683, + 15110, 14616, 15195, 13456, 9538, 14082, 14541, 15121, 13958, 10991, 15011, 12123, 14264, 13884, 14590, 11112, + 10902, 11888, 12164, 15051, 13607, 14913, 13822, 14665, 12640, 10230, 13862, 15264, 14183, 14858, 14588, 13683, + 14994, 14767, 14945, 13391, 14439, 14760, 11518, 12751, 13035, 15153, 15126, 4184, 13442, 14500, 14935, 12741, + 10270, 14642, 13149, 12420, 14871, 14384, 15225, 14629, 13499, 14064, 15125, 14829, 14254, 11859, 14544, 11727, + 12038, 15046, 12423, 11845, 14433, 14631, 13323, 12436, 12723, 13482, 14149, 10735, 15283, 15211, 11815, 14897, + 12516, 14931, 15277, 12879, 14242, 14203, 7199, 15170, 14832, 11716, 12027, 5753, 15150, 15173, 12337, 13642, + 14821, 15297, 13360, 11806, 14751, 14222, 12220, 14726, 14954, 12868, 13994, 14045, 14656, 14220, 13570, 13944, + 14603, 14608, 14388, 15106, 15034, 14798, 14036, 13929, 14855, 13942, 13656, 14064, 11472, 15151, 14359, 15125, + 13836, 13967, 14847, 14794, 14852, 14377, 12299, 14546, 9684, 14435, 14623, 13309, 13355, 13624, 15195, 14991, + 14620, 13735, 15293, 13005, 13567, 14474, 13621, 15284, 9371, 14670, 15004, 14393, 14786, 15259, 11959, 14713, + 15187, 14376, 13555, 14234, 14611, 14525, 15276, 14887, 14591, 15018, 14164, 14800, 14264, 13578, 14637, 14883, + 14934, 14127, 14374, 13835, 14000, 12991, 12655, 14554, 13529, 14393, 15288, 14857, 14369, 11938, 14380, 14089, + 14642, 9095, 13752, 15161, 14794, 14557, 15031, 10413, 13247, 10024, 12095, 13965, 13737, 13488, 14868, 12773, + 15075, 13795, 14071, 14757, 15330, 14392, 12525, 9370, 14656, 13015, 13698, 14816, 14895, 13833, 13898, 13822, + 13506, 14624, 15068, 14376, 15032, 8490, 14565, 13387, 15353, 14554, 14972, 13921, 14970, 14546, 14908, 13988, + 15222, 14041, 11756, 13918, 14612, 13772, 14916, 14571, 13731, 8081, 15056, 13745, 14099, 15019, 5924, 13378, + 15182, 11266, 12401, 14406, 7367, 14966, 14719, 14673, 15087, 13621, 12374, 15208, 13311, 15290, 15353, 13969, + 9578, 14409, 14449, 12660, 14397, 15208, 14572, 12877, 13523, 14425, 14507, 14836, 13700, 15004, 11701, 15154, + 14508, 13380, 14911, 14174, 13772, 13421, 15357, 15302, 14183, 14767, 13738, 12133, 13259, 15166, 12646, 12290, + 13424, 15265, 14489, 15356, 15217, 14624, 14688, 13355, 13647, 14150, 14759, 14958, 14632, 14622, 11670, 14565, + 14929, 14533, 11589, 13764, 14780, 9429, 15198, 14581, 14655, 12452, 13795, 14749, 11422, 14622, 13484, 14632, + 13941, 13036, 15152, 13203, 14851, 14850, 13728, 15349, 13542, 15117, 7740, 13934, 14620, 15267, 12968, 14928, + 10393, 13767, 14054, 13590, 14519, 14421, 13106, 14192, 15299, 15084, 14183, 15198, 12998, 14732, 15162, 12580, + 14705, 14610, 13388, 13898, 13610, 15124, 10922, 12655, 14710, 14362, 9493, 13046, 14099, 14052, 14877, 14813, + 10687, 13996, 12578, 15266, 13687, 14790, 14621, 15100, 14827, 13974, 14028, 13631, 14377, 13046, 12961, 10258, + 14939, 10719, 13978, 14773, 15356, 14048, 14659, 14476, 10528, 11640, 15316, 14570, 13737, 13959, 14689, 12158, + 12726, 12623, 14007, 14327, 11601, 13294, 14507, 13602, 14723, 14757, 15130, 15345, 14107, 13768, 11524, 13284, + 14667, 14822, 10728, 15024, 15037, 13811, 8743, 15094, 9778, 15359, 14545, 14944, 14564, 14999, 14848, 14576, + 12649, 15129, 13731, 14973, 15264, 12090, 11209, 13641, 13031, 10964, 14423, 14320, 14474, 10132, 14634, 15150, + 12001, 15280, 13935, 15003, 12406, 14831, 12488, 14959, 14808, 15061, 13994, 11567, 14261, 13607, 13462, 14705, + 13820, 14458, 15226, 14295, 14343, 14701, 13807, 15213, 14678, 13199, 12794, 14791, 15086, 13709, 12356, 15248, + 14723, 14867, 14908, 13706, 14217, 10128, 12894, 15013, 15356, 14599, 15248, 15264, 15023, 12395, 14361, 13997, + 14639, 14278, 15135, 14110, 13185, 13042, 12182, 15269, 14094, 13838, 13271, 15034, 15228, 15233, 15281, 13729, + 13368, 14488, 13028, 13736, 15042, 14975, 15086, 12862, 12393, 14088, 14320, 15182, 14266, 14381, 9308, 13200, + 14420, 15121, 13848, 15271, 13631, 15263, 14366, 12976, 14951, 15086, 14416, 14503, 13352, 14975, 12407, 15035, + 14389, 14035, 13779, 14543, 10571, 14811, 14429, 15288, 14752, 11199, 14941, 13323, 12831, 12266, 14451, 14567, + 15202, 14977, 14063, 14011, 14375, 14727, 13417, 15356, 13653, 14787, 14685, 13324, 14639, 12536, 11467, 11646, + 13787, 14043, 14907, 14615, 15326, 15164, 11720, 14795, 9924, 13256, 15140, 14999, 15103, 14015, 14665, 14679, + 14763, 14808, 13060, 12655, 12831, 14893, 13331, 15012, 14883, 12118, 14756, 14722, 8938, 6267, 12594, 15199, + 13390, 14828, 14775, 14722, 14584, 15008, 9330, 12561, 14733, 12185, 15094, 14997, 14001, 12300, 13265, 12323, + 12451, 14395, 15350, 13028, 14655, 14436, 13118, 14759, 14909, 14928, 14757, 12679, 10684, 14270, 14448, 12386, + 14642, 13222, 13534, 14534, 10803, 15197, 13074, 15034, 9676, 12341, 14868, 14684, 14068, 13181, 12481, 11540, + 14609, 13327, 15006, 14489, 14609, 15280, 13090, 14038, 14145, 14888, 15304, 15013, 13174, 12819, 14728, 12949, + 12194, 13667, 14424, 13832, 14958, 14819, 15277, 15008, 15287, 12877, 14770, 13383, 14448, 14817, 14790, 15073, + 12795, 13431, 13714, 14362, 15084, 14206, 15345, 14957, 13638, 15041, 13863, 12024, 13614, 15064, 15122, 14914, + 13982, 14603, 14615, 14933, 14714, 12392, 14364, 14543, 14907, 14548, 14405, 12628, 15231, 14375, 13603, 14382, + 13941, 14971, 14181, 14773, 13194, 14034, 15348, 12567, 15242, 14898, 13518, 12952, 14404, 13739, 14630, 12211, + 14871, 14698, 14520, 14621, 10711, 12086, 12008, 12705, 14866, 13844, 13138, 15171, 12676, 14769, 15081, 12625, + 14597, 14629, 14386, 7654, 14779, 12334, 15327, 14290, 13320, 13760, 10392, 15294, 15251, 14800, 6234, 15009, + 14685, 13249, 14306, 14906, 13307, 13636, 15336, 14505, 15180, 11632, 11647, 15273, 13482, 13017, 13574, 13559, + 14758, 14944, 13402, 14001, 14489, 14499, 12929, 14852, 15313, 14435, 12903, 12862, 13668, 8404, 13672, 14120, + 13830, 12378, 12004, 14780, 10644, 14374, 13654, 14363, 12481, 12401, 14035, 14859, 14898, 14154, 13566, 13599, + 13317, 10772, 13022, 11189, 14622, 14775, 13554, 13179, 14317, 14846, 14660, 12312, 14941, 14151, 11194, 14552, + 14707, 14658, 15341, 13672, 13923, 10388, 9368, 15039, 10209, 14708, 11324, 14807, 11418, 13709, 15308, 7565, + 14301, 11787, 13101, 14542, 14944, 14592, 14385, 14875, 9322, 14584, 13966, 15177, 10582, 15102, 13840, 12879, + 12967, 12375, 14546, 14725, 14815, 15263, 12476, 15267, 14561, 14384, 14641, 12995, 14568, 14219, 13811, 13141, + 12983, 12629, 15166, 14675, 15229, 14604, 14563, 14115, 13739, 14981, 14639, 15159, 14933, 14095, 14604, 9609, + 15329, 14881, 13331, 14582, 15110, 12000, 15293, 15120, 14981, 12350, 13342, 15024, 14642, 14164, 15289, 15299, + 14714, 13844, 12086, 15240, 13351, 15100, 13711, 14907, 13836, 8065, 15142, 14986, 12728, 14356, 14967, 15101, + 10660, 14860, 15070, 14282, 14813, 15229, 15181, 13627, 14824, 12739, 13364, 12383, 15206, 12968, 13845, 14052, + 15198, 15087, 14996, 7665, 12401, 13850, 15257, 14625, 13241, 15006, 14643, 11467, 14449, 14178, 15275, 15192, + 15217, 15129, 15234, 14178, 14974, 14984, 14778, 14942, 14911, 14520, 15122, 15122, 13328, 13953, 14475, 15109, + 14911, 14933, 13741, 13042, 14435, 11289, 14311, 14384, 14911, 11084, 11453, 14065, 12937, 15304, 14169, 14007, + 14343, 12814, 9226, 14694, 13636, 15113, 13805, 12838, 13554, 10554, 10925, 15143, 14915, 15235, 15022, 14734, + 14348, 14521, 11271, 14791, 12642, 14198, 15098, 13334, 14799, 14975, 14355, 14668, 14260, 15185, 14384, 15232, + 14428, 14983, 14918, 15016, 15261, 13605, 14663, 9048, 14759, 12745, 15214, 14168, 14964, 14256, 15260, 15101, + 13377, 13291, 14201, 14327, 15161, 13652, 14269, 15029, 8446, 13688, 15145, 13832, 14135, 14706, 13489, 11470, + 13551, 14507, 14552, 14864, 14426, 15268, 14705, 14393, 14086, 14682, 15193, 13329, 15334, 14267, 15009, 13939, + 13633, 13118, 13796, 14066, 15060, 13698, 12986, 13700, 14767, 14906, 14211, 14684, 13518, 14801, 14157, 13468, + 13791, 13684, 11755, 14550, 14280, 13691, 14366, 13703, 13603, 14860, 15247, 15314, 13398, 14482, 15033, 13542, + 13548, 15091, 15139, 6217, 15129, 13581, 13882, 15242, 15136, 15285, 14544, 13389, 15357, 14224, 12798, 13524, + 10796, 15126, 15211, 14446, 15278, 14329, 14571, 12444, 13046, 13912, 15050, 14936, 12295, 14404, 14670, 15242, + 13318, 13549, 12835, 13096, 15180, 11382, 13101, 14616, 14048, 14968, 14720, 14987, 12879, 15326, 12576, 12826, + 14685, 15134, 14442, 13759, 15218, 15358, 14447, 13121, 14462, 12803, 13044, 14041, 14855, 14169, 14802, 14840, + 11495, 11403, 14146, 15302, 13293, 11295, 14550, 14593, 11841, 13739, 14877, 13607, 10682, 12255, 15158, 15080, + 13637, 14885, 12466, 14980, 13458, 14939, 14661, 14609, 13838, 12560, 13764, 14080, 9674, 14467, 15317, 13088, + 14135, 14642, 15300, 11240, 15056, 14048, 14486, 13776, 15099, 13208, 12737, 12615, 14232, 14566, 8981, 14787, + 14761, 14637, 14557, 14872, 13223, 9090, 15110, 12653, 15235, 11919, 15214, 14537, 14162, 15135, 14024, 15351, + 11402, 15293, 13686, 15051, 13472, 14179, 10552, 14686, 13972, 13150, 12956, 14686, 14282, 15279, 11822, 12659, + 13441, 10785, 13747, 15325, 15061, 13824, 15333, 14441, 14532, 15040, 12790, 14484, 14350, 11570, 14084, 15087, + 13965, 10642, 14826, 12448, 14869, 14544, 13660, 12853, 15143, 12767, 15107, 14986, 14214, 13915, 13564, 15356, + 12508, 13488, 14611, 14448, 13821, 14259, 13390, 11509, 13809, 14712, 12710, 14060, 13876, 11849, 14603, 14949, + 14480, 15246, 14911, 10775, 13067, 15020, 14757, 14207, 14739, 14503, 13622, 14529, 7291, 14896, 15304, 15328, + 14037, 14858, 14567, 14917, 13879, 15041, 14941, 13862, 12697, 12302, 14686, 14419, 14781, 12279, 13438, 14556, + 15353, 15170, 14509, 14147, 14486, 15008, 14792, 14065, 14652, 15273, 15191, 12365, 14826, 15338, 13331, 13551, + 13340, 14339, 15044, 14927, 12163, 13209, 14752, 14766, 13692, 10348, 8974, 14785, 15204, 12676, 15102, 14022, + 12895, 13520, 14872, 14687, 14147, 12596, 10426, 14723, 14734, 11778, 8901, 13154, 13889, 13538, 14831, 12664, + 12525, 14951, 13849, 9830, 14961, 14012, 14834, 14827, 14305, 14530, 14604, 14524, 12672, 15283, 15191, 8387, + 14772, 11369, 13590, 15189, 14082, 15327, 10525, 12366, 13794, 14829, 13517, 15185, 13651, 15210, 14865, 14792, + 13714, 14526, 15070, 12409, 14761, 11377, 14414, 13381, 14331, 13231, 12519, 13907, 15160, 14380, 13992, 14164, + 13974, 11179, 14012, 15115, 14418, 14378, 12497, 14395, 10096, 14397, 7660, 10463, 10888, 14481, 15142, 11896, + 12830, 12505, 14619, 12183, 10995, 13510, 14519, 14715, 15264, 11896, 14405, 12537, 14631, 15043, 13759, 14362, + 14970, 15023, 15231, 12877, 14959, 14381, 15248, 13363, 14016, 15025, 14551, 14448, 14748, 14571, 15224, 15322, + 15145, 14909, 13184, 14650, 15218, 15120, 13815, 14522, 14320, 13954, 14173, 14931, 14979, 12861, 15006, 14125, + 12995, 14649, 14876, 13831, 14457, 12470, 13203, 14471, 14933, 14868, 10745, 13450, 14764, 13675, 12667, 14567, + 13979, 14207, 14243, 12088, 14438, 12632, 15277, 13898, 13048, 12171, 14532, 11278, 14362, 15140, 13070, 9460, + 14640, 14764, 14602, 14718, 14942, 14715, 14510, 14089, 13717, 14451, 12334, 13428, 14812, 15179, 13980, 13817, + 13853, 11846, 15114, 13219, 12535, 14425, 12082, 14784, 14343, 15285, 14693, 14953, 13591, 12887, 12151, 14324, + 14843, 12357, 13440, 15084, 15048, 13993, 15163, 12113, 11618, 13297, 15217, 14810, 15260, 13893, 15033, 12669, + 12617, 14566, 15092, 15304, 14834, 14875, 13850, 15056, 14563, 14506, 14585, 14008, 14476, 13489, 10084, 14987, + 12614, 13914, 14997, 12178, 14603, 13033, 14631, 13467, 15052, 14999, 14761, 14780, 14850, 12563, 15034, 13141, + 14433, 14832, 13830, 12316, 13238, 13948, 13590, 14827, 14542, 9888, 13665, 15012, 14259, 14706, 14778, 14433, + 13653, 14754, 14095, 14526, 13351, 13702, 14867, 9668, 10817, 15243, 15063, 14864, 14673, 13769, 12201, 14832, + 15359, 13556, 12906, 14905, 14942, 13685, 14363, 11690, 13519, 13872, 13589, 15123, 13588, 14114, 11638, 13052, + 14858, 10960, 13686, 9571, 13431, 14903, 14087, 13916, 15266, 14005, 12738, 15162, 13686, 14768, 15210, 12663, + 13778, 12775, 15034, 15224, 15000, 12291, 15170, 14857, 15356, 9873, 13566, 13241, 12912, 15260, 14166, 12150, + 14771, 15031, 14472, 14513, 13631, 14205, 13387, 10959, 15202, 14435, 15199, 14594, 14589, 15292, 13004, 13937, + 14044, 14418, 14791, 14913, 14910, 13791, 14898, 15340, 13388, 5035, 14845, 14483, 14507, 12307, 12506, 13395, + 14972, 14741, 15172, 15006, 13687, 14389, 8269, 13345, 13754, 14859, 14799, 13589, 11170, 13055, 12967, 15261, + 14751, 14392, 14857, 14769, 14127, 14541, 12368, 11257, 15068, 11429, 12985, 15296, 11544, 13648, 15284, 14847, + 14552, 14399, 14664, 14228, 13729, 13741, 14505, 15254, 14360, 15119, 10617, 12498, 9659, 13683, 13526, 14914, + 14498, 12688, 11823, 14584, 13538, 14403, 14856, 14474, 7563, 12845, 11083, 10311, 12015, 13231, 14063, 14614, + 14726, 14464, 15285, 12798, 13689, 14610, 12242, 13009, 14843, 12535, 14646, 14524, 10561, 13064, 14675, 13277, + 14438, 10780, 14471, 15192, 11610, 14162, 12651, 14546, 14422, 14652, 13937, 12241, 15208, 13527, 14535, 15049, + 11072, 15065, 12875, 10774, 15291, 14984, 14028, 13021, 15266, 14229, 13496, 13933, 15020, 14458, 14672, 15281, + 13483, 12176, 12146, 14398, 12771, 12299, 14855, 14055, 14519, 13708, 12459, 14122, 14369, 13853, 14097, 12762, + 15096, 12655, 15265, 14672, 14596, 12372, 14457, 13614, 15195, 12963, 14376, 14339, 15241, 14639, 15307, 14170, + 14373, 7990, 15298, 15232, 10423, 11094, 15338, 13964, 12648, 14737, 15258, 9619, 13292, 14567, 14555, 13419, + 14428, 13994, 11449, 12814, 14414, 10672, 13931, 13546, 15057, 14733, 12816, 15319, 15309, 15199, 14720, 11227, + 13410, 14371, 11053, 12995, 15045, 14679, 14909, 13034, 13959, 9197, 15230, 15169, 12952, 14693, 14981, 15337, + 15128, 13410, 15148, 14442, 14176, 14782, 14698, 13241, 14728, 14841, 13260, 13695, 13312, 14877, 13450, 15305, + 15006, 14548, 12772, 14874, 9344, 15165, 15060, 14891, 14679, 14336, 11535, 14755, 12264, 15348, 15224, 14382, + 14769, 11498, 15048, 11907, 14882, 14361, 14302, 14217, 12358, 14665, 14497, 13702, 11397, 13583, 14701, 15074, + 15065, 15058, 14475, 12996, 13395, 15019, 13580, 13540, 14261, 12802, 15212, 13942, 12193, 11985, 13349, 12216, + 13803, 15343, 12182, 15055, 13405, 14213, 15174, 14385, 15259, 11591, 14621, 14052, 14906, 15212, 14955, 13043, + 15097, 14553, 14163, 14711, 12392, 15201, 15329, 15268, 12926, 12146, 14478, 12407, 14984, 12312, 15254, 14540, + 9748, 14807, 14240, 14973, 14429, 14896, 14340, 13626, 13296, 13748, 14034, 14512, 13938, 14075, 10463, 12279, + 13725, 13769, 14082, 15017, 14399, 14554, 11962, 14740, 13014, 14949, 12602, 4207, 15279, 15245, 13949, 14717, + 14852, 14569, 14578, 14688, 13595, 14274, 13186, 15298, 10890, 14679, 14292, 15020, 12328, 13319, 14970, 14403, + 12221, 12514, 14560, 15054, 11584, 12616, 14929, 13614, 15114, 12193, 14369, 14663, 12226, 13755, 15236, 13930, + 13749, 14561, 13788, 14956, 14460, 14885, 13558, 11775, 10767, 14376, 14488, 11339, 14830, 12320, 13849, 13749, + 14860, 14486, 14729, 14509, 13765, 15173, 11156, 14565, 11008, 13863, 14602, 13646, 13963, 15182, 14596, 14834, + 12533, 12703, 11953, 12067, 15050, 14894, 14448, 13860, 15328, 13430, 13909, 14732, 14612, 10493, 14396, 14656, + 15025, 15119, 13958, 13975, 14895, 12402, 14223, 11828, 15036, 14947, 14424, 14376, 11434, 14053, 8567, 13179, + 14408, 13363, 15286, 14464, 11738, 13394, 15144, 12431, 15189, 14265, 15064, 14881, 15295, 15295, 8557, 10586, + 12952, 15260, 14092, 14124, 14668, 15226, 11319, 13491, 13800, 15068, 11429, 11224, 9824, 15222, 15204, 14138, + 8727, 15183, 13887, 15023, 14377, 13673, 14042, 14485, 13437, 15343, 13361, 15281, 13979, 13398, 14410, 11271, + 14979, 9571, 13415, 12346, 12476, 13600, 14570, 14760, 14497, 12979, 14972, 14886, 14227, 15009, 14465, 15321, + 15263, 12688, 14963, 14365, 8929, 14243, 14610, 14751, 15275, 15255, 14419, 12563, 14334, 14355, 10071, 11845, + 13255, 14501, 14972, 14980, 12465, 15266, 14162, 15173, 14199, 14343, 14527, 13565, 14592, 10939, 13855, 10734, + 13937, 14400, 13450, 13973, 13442, 15196, 14889, 14633, 15309, 14449, 14049, 15309, 13526, 14677, 14759, 13387, + 12744, 14395, 12549, 10981, 14264, 14498, 13358, 14448, 15041, 13476, 14235, 14832, 14438, 14723, 10402, 14260, + 14566, 12574, 13653, 14138, 11472, 13728, 10468, 13084, 14379, 14957, 14099, 14570, 12434, 13892, 12680, 14757, + 13836, 13983, 14784, 13491, 14602, 14085, 12148, 12416, 15315, 14378, 14275, 14964, 10400, 14536, 14663, 14956, + 15314, 13723, 15217, 13396, 10401, 14311, 15040, 15339, 14517, 14452, 14012, 15169, 10181, 14047, 15308, 12960, + 10165, 14577, 14965, 14817, 15117, 15211, 15052, 14924, 15318, 15224, 13154, 14621, 14896, 11257, 14641, 14907, + 12450, 14676, 11513, 12553, 13024, 13483, 13928, 8957, 12539, 15234, 14870, 14843, 11461, 13071, 11168, 13252, + 13535, 13511, 13519, 15149, 13562, 14485, 14866, 12562, 12652, 14228, 14223, 14955, 15123, 14159, 14386, 13671, + 13019, 13751, 14176, 13452, 11535, 14130, 14838, 14792, 13903, 14854, 12718, 15261, 13822, 12851, 15195, 14360, + 13995, 14370, 11450, 10324, 14247, 15148, 13200, 14940, 15057, 15117, 14571, 15039, 15082, 13588, 14806, 14126, + 14510, 11431, 14611, 11771, 12564, 14914, 14924, 11950, 15325, 14822, 13827, 10909, 15316, 14519, 14169, 15237, + 15091, 14798, 14649, 14353, 12800, 14684, 11605, 14345, 14764, 14375, 15094, 13591, 10784, 14883, 13636, 14691, + 14707, 14033, 13537, 14685, 14577, 15240, 14460, 12099, 11618, 11383, 15032, 13605, 13972, 14725, 9233, 14718, + 12775, 13693, 13567, 14476, 15090, 12854, 14964, 14360, 13598, 14020, 15253, 13596, 14759, 13687, 12102, 13217, + 11806, 14822, 15011, 15035, 9617, 14554, 13569, 13343, 14449, 13441, 12767, 15068, 12854, 14951, 15026, 14340, + 14139, 14925, 14095, 14888, 12137, 12688, 13616, 15055, 14053, 14816, 13795, 15167, 15031, 13086, 13906, 13795, + 11788, 14148, 15315, 14385, 14745, 14748, 15352, 13335, 13673, 14737, 12938, 13395, 13490, 13840, 14387, 12581, + 14668, 14713, 14798, 15239, 14530, 13935, 14379, 13838, 12848, 14632, 12100, 14583, 13805, 14741, 13570, 12476, + 15110, 14601, 14807, 15123, 15306, 15104, 11392, 14511, 15074, 13200, 13915, 15035, 14563, 10954, 14716, 15207, + 15241, 13689, 13224, 14757, 15310, 13238, 14734, 11998, 13943, 13347, 13723, 14468, 13594, 14684, 15285, 13907, + 14816, 13582, 10841, 14993, 13810, 14253, 14902, 10932, 13212, 13962, 13745, 12948, 14250, 8326, 12783, 12359, + 14784, 14588, 13381, 14987, 12470, 14579, 14026, 14671, 15216, 13474, 13224, 11657, 14796, 14591, 14397, 14915, + 13538, 12960, 14552, 13012, 15062, 14000, 14771, 14934, 14001, 12255, 14053, 10839, 12437, 13472, 13988, 14843, + 15073, 13905, 13685, 12805, 13894, 13376, 12158, 10764, 15280, 11083, 13846, 13436, 15185, 14917, 12712, 15006, + 13516, 14568, 13568, 14498, 12667, 11524, 12850, 15031, 13412, 14928, 14868, 15082, 14789, 13891, 10267, 15039, + 14642, 15208, 10778, 13786, 14939, 14993, 14711, 12220, 13708, 14071, 12921, 14729, 14559, 15046, 13346, 13740, + 13509, 14869, 12282, 14980, 12975, 14355, 14103, 13824, 14588, 14569, 14754, 15074, 13823, 9344, 14737, 12608, + 15257, 14931, 14479, 14593, 12428, 14186, 1251, 14210, 14612, 13425, 14918, 13034, 15093, 12495, 15064, 13589, + 13711, 15221, 15127, 15012, 14870, 14510, 14993, 13773, 12710, 12327, 15283, 14424, 14516, 12383, 14967, 15235, + 14403, 13580, 14495, 14726, 14364, 11888, 14555, 12262, 11100, 13089, 14959, 14463, 14677, 14516, 14003, 14527, + 11030, 13391, 15268, 14668, 15047, 14021, 13834, 13079, 10269, 14882, 13604, 12026, 15232, 12342, 14528, 10078, + 13503, 14451, 14824, 14959, 14065, 13666, 15171, 13571, 15328, 15116, 10610, 14541, 15339, 13437, 9224, 14003, + 14470, 14422, 13379, 13540, 15337, 15160, 14535, 8893, 14092, 11671, 11865, 15331, 15005, 11501, 11219, 14469, + 10057, 14628, 13781, 14751, 14856, 14415, 13089, 14781, 14328, 13795, 14293, 12455, 14986, 13388, 11341, 14393, + 13073, 14798, 13135, 14801, 14668, 13101, 15066, 13711, 13899, 14659, 12050, 12782, 14766, 14342, 14894, 14414, + 12589, 14820, 12695, 10967, 14785, 14681, 14340, 15250, 14677, 14727, 14647, 10938, 12421, 14361, 12232, 14415, + 14724, 12692, 14164, 14460, 12600, 13011, 15184, 12804, 13079, 15194, 14766, 14657, 12201, 14970, 14938, 13986, + 14143, 12484, 11623, 15267, 14609, 15264, 14540, 14976, 13032, 14593, 15096, 15320, 14942, 13496, 13409, 12647, + 11728, 10676, 14263, 14598, 14504, 14704, 15318, 13450, 14781, 13862, 12445, 10070, 12707, 14619, 14630, 13348, + 14465, 11300, 13621, 14051, 14438, 12171, 13553, 12738, 14573, 14584, 14661, 11763, 13699, 12593, 14481, 12914, + 11973, 12987, 14569, 12038, 7242, 12527, 15270, 14239, 9585, 15280, 14249, 15272, 13540, 14109, 14432, 13916, + 14063, 14730, 15311, 14703, 15078, 12042, 14673, 14633, 15135, 14652, 11984, 11741, 14824, 14353, 13106, 11882, + 15251, 14291, 13552, 12468, 14176, 9438, 12814, 15020, 13280, 10518, 13570, 13153, 12870, 13103, 15027, 7803, + 15335, 13963, 14181, 11399, 13349, 14195, 14078, 12581, 15224, 14539, 14442, 15280, 14479, 15230, 13681, 14472, + 14464, 13737, 15227, 14901, 14047, 14832, 13859, 14506, 14017, 13233, 15203, 12632, 13655, 15213, 15306, 14959, + 14688, 15129, 8998, 13952, 14642, 15089, 8105, 14479, 15158, 14056, 15158, 14759, 14886, 14368, 14382, 14600, + 13111, 14706, 13622, 15026, 14393, 12847, 14385, 10721, 13851, 15274, 13355, 14371, 15236, 14603, 15198, 14746, + 9661, 15100, 14948, 8859, 13935, 14916, 11960, 14462, 14558, 15101, 14070, 15307, 13332, 10316, 14389, 12402, + 15210, 14762, 13583, 14845, 14966, 11957, 14268, 11349, 14820, 12403, 14424, 14844, 14755, 11499, 13560, 15318, + 11113, 14121, 13195, 15045, 14968, 9465, 14645, 14881, 13439, 11900, 15125, 14617, 15300, 14627, 12690, 14779, + 13436, 14655, 14988, 13849, 13013, 13649, 14589, 13820, 13976, 14817, 15304, 15206, 10485, 14948, 13057, 14079, + 12583, 12447, 14549, 14011, 14077, 14284, 14339, 15252, 14701, 14717, 12784, 14869, 10250, 14846, 14800, 14028, + 13719, 14862, 12420, 14390, 14820, 14895, 14086, 14696, 14957, 14891, 11613, 15359, 15127, 14588, 13483, 15218, + 14545, 14817, 14199, 13481, 13355, 15230, 14637, 14398, 15054, 14679, 14338, 15018, 14600, 15091, 14599, 14621, + 14631, 12951, 13681, 10298, 14265, 15345, 14501, 14869, 13033, 12999, 11309, 12505, 15214, 14879, 12948, 14570, + 12623, 12470, 11020, 14739, 14674, 14943, 12803, 15269, 12883, 14961, 14465, 14498, 14748, 12592, 15217, 14808, + 12425, 14955, 14452, 13940, 13525, 11593, 14916, 11767, 13629, 13849, 11996, 13704, 14733, 14625, 11807, 14576, + 14397, 14630, 14495, 14048, 14106, 14197, 14870, 13943, 12174, 12895, 13678, 8500, 14486, 12702, 13910, 14722, + 12134, 15155, 14551, 12937, 13620, 12966, 12192, 15178, 15198, 14594, 15172, 13094, 13853, 14555, 14786, 14858, + 12356, 15083, 13654, 15238, 15212, 13888, 15265, 14800, 12472, 13561, 15101, 13966, 9297, 7718, 13595, 15321, + 13695, 14832, 14805, 13931, 14254, 15331, 15270, 13026, 13807, 12704, 15182, 15265, 14949, 14337, 12667, 15157, + 13039, 15359, 13439, 13598, 15248, 14668, 14795, 14493, 15026, 14793, 12233, 14381, 12868, 15182, 15163, 14074, + 14872, 13624, 14422, 15264, 12642, 12203, 14094, 14862, 12576, 12862, 15345, 13385, 14590, 14154, 14767, 12658, + 14454, 15267, 11474, 13631, 11282, 13130, 15211, 14051, 15328, 15344, 14003, 14402, 14959, 14974, 14588, 13795, + 11742, 13502, 12620, 11759, 14468, 13554, 12557, 13760, 14286, 14093, 13776, 14996, 13741, 13975, 14253, 14818, + 13164, 13384, 15340, 11846, 14769, 13743, 13093, 14280, 13556, 14874, 12759, 14763, 10130, 14742, 13669, 10882, + 14461, 15198, 13894, 13515, 14125, 14696, 14069, 10438, 12810, 13851, 14211, 14980, 13944, 15066, 13384, 14064, + 14460, 14797, 13333, 12514, 15208, 12551, 13585, 14870, 9470, 14930, 13814, 11373, 15252, 12233, 13965, 13388, + 14611, 10464, 15278, 14884, 14533, 11960, 15342, 14535, 11616, 12528, 15245, 14702, 14641, 14540, 13549, 8563, + 14795, 12319, 13934, 13642, 15214, 14742, 14593, 14751, 14830, 14292, 14702, 14279, 15049, 14051, 13046, 11000, + 15154, 14736, 14750, 15271, 12801, 11598, 13480, 14382, 13598, 13536, 12117, 14875, 14536, 14657, 11916, 11600, + 14100, 15142, 15115, 15149, 13563, 13454, 14731, 15310, 14619, 14543, 13043, 12939, 15239, 13748, 15244, 13766, + 14330, 14323, 13314, 14867, 13520, 11324, 13489, 14845, 13247, 14608, 15012, 10500, 12633, 12687, 15054, 15122, + 15296, 12923, 15303, 14231, 13923, 15245, 15059, 14776, 14617, 10516, 15292, 14861, 13865, 14886, 12492, 12037, + 14785, 13802, 14569, 14701, 15177, 14083, 11290, 9304, 14408, 12268, 13442, 15049, 14351, 14738, 14804, 15272, + 12334, 15242, 12255, 12570, 14869, 14325, 11792, 11614, 14725, 15044, 12632, 14412, 12727, 14972, 13828, 10613, + 13998, 15140, 15282, 13392, 12760, 12322, 11867, 15231, 13389, 14690, 12989, 14602, 14994, 12200, 13204, 15268, + 12730, 14602, 13545, 12362, 13816, 14868, 14346, 13982, 14985, 10273, 8287, 14323, 14620, 10676, 12426, 14337, + 14887, 13816, 14917, 12212, 3789, 15323, 13072, 15186, 14260, 13689, 13027, 14080, 13828, 13522, 12530, 14791, + 15244, 14710, 14539, 11030, 13002, 13768, 13666, 14973, 14078, 14897, 12024, 10366, 10070, 11930, 15323, 14611, + 13427, 15059, 11738, 13179, 11841, 15348, 13687, 14942, 12301, 13371, 11668, 13710, 14477, 14595, 13411, 13918, + 8807, 15154, 14461, 14349, 14867, 13363, 12396, 14470, 14831, 14355, 15114, 14241, 15352, 14076, 14949, 12705, + 12945, 15079, 14888, 15300, 12343, 14632, 12505, 14740, 12189, 12988, 13172, 13499, 14084, 13897, 14780, 14717, + 14874, 14332, 7751, 13196, 14422, 13651, 14571, 14819, 15055, 14024, 14624, 9993, 12828, 15203, 14358, 11181, + 15158, 14716, 12545, 14968, 15013, 13623, 12838, 15269, 14248, 15055, 14834, 14796, 14972, 15095, 14443, 14937, + 11314, 13719, 12389, 14472, 12850, 13153, 14451, 10010, 13749, 13931, 13858, 13430, 12745, 15108, 13710, 15064, + 11351, 14715, 13837, 11134, 15351, 13405, 10683, 14373, 13466, 14529, 14497, 13867, 14697, 11899, 15357, 14421, + 13063, 13351, 11697, 12392, 15346, 11299, 13777, 14500, 14449, 9399, 14455, 14249, 13839, 15153, 15329, 9648, + 13288, 14928, 14558, 15256, 14527, 14610, 6957, 14860, 11899, 11384, 12455, 14468, 14760, 15129, 15329, 14971, + 10667, 15013, 13120, 14647, 13421, 15244, 14933, 13801, 14948, 14831, 14650, 15092, 14662, 14511, 14775, 12434, + 15194, 12344, 12864, 14900, 13814, 11749, 12792, 14352, 12126, 15146, 13086, 14810, 15155, 15062, 11356, 11769, + 15317, 15107, 15099, 14672, 14393, 11988, 14491, 14469, 14345, 15105, 15089, 14877, 14265, 14534, 14892, 15310, + 10084, 15090, 12064, 14968, 15172, 14067, 14014, 15003, 14903, 11354, 15282, 15165, 15136, 14587, 14726, 15184, + 12837, 15242, 14180, 14412, 13345, 11702, 14664, 12988, 14494, 15231, 14953, 15249, 14527, 13863, 13839, 14643, + 13325, 14034, 13161, 15300, 10050, 11826, 15253, 13136, 13378, 10417, 14031, 14570, 9797, 14083, 14721, 15076, + 14926, 14748, 13256, 14437, 10483, 12596, 15051, 12960, 13572, 13908, 13527, 15293, 14801, 14762, 15354, 12330, + 14578, 14904, 12498, 14392, 12955, 12283, 14710, 13880, 14005, 15350, 15206, 10661, 14441, 14069, 15185, 12883, + 14171, 13575, 12370, 14605, 14941, 14790, 14974, 14571, 15005, 12875, 13179, 14709, 15287, 13482, 11780, 9154, + 14269, 14466, 15124, 12203, 15201, 15050, 13756, 15277, 14100, 13084, 12627, 14347, 14197, 13603, 9588, 13483, + 14154, 15018, 15098, 14057, 14485, 14814, 15000, 14373, 12705, 14398, 13668, 5150, 10334, 14401, 14309, 13423, + 12276, 12948, 13828, 13612, 10974, 15267, 14944, 12361, 14963, 13413, 13243, 15320, 13418, 14524, 15191, 13148, + 13525, 15115, 14412, 14817, 13614, 13915, 14137, 13442, 12939, 15329, 14779, 14467, 14369, 13736, 15158, 12411, + 13631, 13751, 15001, 13926, 14802, 11926, 11155, 13274, 15139, 14152, 13263, 15189, 13912, 14493, 15326, 13967, + 13931, 14076, 13916, 13048, 14763, 13721, 13714, 12831, 14857, 13459, 13570, 14638, 14789, 14649, 12828, 13652, + 14410, 12735, 13380, 14745, 14562, 13761, 14200, 9107, 12714, 15176, 13760, 14591, 10844, 12765, 13350, 14948, + 14561, 14646, 13909, 15335, 10395, 14866, 13809, 14899, 13232, 13867, 15258, 14966, 13377, 12875, 13562, 14392, + 14014, 12780, 8114, 14824, 6941, 14587, 12403, 15069, 14677, 11113, 14683, 9762, 12132, 14430, 12494, 15147, + 14445, 12808, 13465, 14686, 15341, 14949, 13338, 13952, 15356, 15234, 14561, 15279, 14902, 15177, 13498, 15046, + 14943, 15104, 11397, 13129, 15085, 6621, 14707, 10016, 11264, 13311, 13916, 12722, 13155, 7998, 15069, 12528, + 14876, 15332, 15279, 13959, 15299, 14463, 15252, 14049, 14352, 15178, 11213, 13983, 14232, 13000, 14849, 13801, + 12734, 13819, 15174, 14971, 14932, 12778, 14033, 13729, 15356, 14942, 14221, 14407, 13948, 13551, 11357, 13849, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_FRACTAL_Z, std::vector({4, 4, 32, 16}), + std::vector({32, 1, 16, 16}), DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferHwcnFz, uint8_1c_2n_pad_cn) { + uint8_t data[3 * 3 * 30 * 20] = { + 125, 165, 84, 250, 90, 201, 42, 35, 194, 76, 108, 1, 163, 194, 146, 247, 138, 181, 145, 159, 85, 36, 236, + 254, 208, 66, 249, 45, 64, 248, 69, 1, 16, 161, 132, 24, 2, 83, 105, 188, 69, 16, 222, 28, 216, 131, + 23, 103, 108, 233, 74, 151, 110, 194, 179, 67, 219, 178, 42, 220, 166, 204, 119, 208, 224, 81, 67, 189, 123, + 153, 63, 249, 140, 167, 75, 238, 162, 203, 244, 38, 119, 216, 11, 87, 236, 242, 23, 132, 11, 243, 69, 60, + 112, 190, 37, 37, 246, 160, 203, 76, 70, 242, 137, 18, 3, 147, 188, 146, 144, 119, 30, 48, 154, 150, 71, + 89, 7, 195, 59, 102, 157, 63, 45, 113, 219, 13, 231, 120, 243, 115, 26, 100, 12, 74, 10, 111, 17, 113, + 4, 242, 170, 244, 72, 149, 218, 238, 39, 95, 80, 123, 194, 191, 56, 85, 102, 120, 29, 134, 180, 35, 14, + 187, 134, 72, 109, 4, 237, 99, 146, 72, 46, 141, 138, 198, 190, 210, 222, 7, 105, 101, 11, 78, 212, 251, + 21, 97, 48, 90, 107, 103, 182, 247, 161, 151, 169, 97, 103, 106, 34, 220, 225, 56, 31, 16, 58, 83, 32, + 136, 251, 241, 194, 21, 38, 40, 118, 82, 72, 145, 222, 98, 215, 206, 64, 241, 51, 170, 122, 110, 178, 66, + 156, 61, 225, 193, 10, 80, 137, 51, 94, 49, 182, 35, 177, 158, 194, 194, 216, 0, 167, 114, 234, 59, 101, + 81, 176, 45, 20, 220, 26, 209, 243, 78, 53, 35, 158, 44, 14, 245, 215, 170, 1, 167, 120, 243, 169, 22, + 39, 74, 110, 20, 153, 88, 246, 190, 207, 65, 125, 97, 42, 109, 84, 46, 250, 175, 175, 235, 156, 230, 194, + 206, 117, 218, 64, 210, 249, 11, 110, 27, 92, 190, 34, 76, 48, 41, 32, 73, 19, 234, 30, 243, 91, 9, + 11, 6, 241, 12, 190, 77, 180, 181, 249, 55, 76, 39, 129, 50, 87, 70, 64, 241, 88, 46, 169, 60, 213, + 160, 42, 47, 174, 183, 58, 220, 234, 167, 107, 130, 91, 155, 67, 32, 206, 177, 229, 27, 30, 230, 110, 106, + 102, 251, 86, 74, 191, 44, 194, 28, 23, 95, 227, 89, 18, 106, 65, 172, 134, 136, 118, 4, 107, 84, 225, + 242, 28, 53, 108, 96, 21, 212, 240, 6, 118, 153, 90, 221, 122, 85, 18, 31, 4, 32, 74, 141, 242, 45, + 17, 104, 24, 199, 101, 114, 25, 204, 22, 148, 54, 248, 249, 221, 58, 2, 153, 184, 206, 244, 194, 5, 250, + 45, 98, 113, 5, 37, 222, 82, 199, 161, 106, 178, 251, 141, 214, 183, 4, 62, 36, 208, 34, 32, 86, 141, + 101, 71, 208, 213, 230, 212, 196, 211, 76, 27, 42, 20, 203, 153, 90, 51, 203, 224, 216, 209, 134, 136, 115, + 42, 62, 77, 91, 14, 139, 13, 75, 163, 1, 34, 196, 241, 225, 135, 189, 191, 166, 242, 63, 175, 180, 74, + 202, 79, 187, 2, 220, 136, 54, 24, 97, 204, 28, 89, 165, 167, 32, 96, 124, 244, 205, 108, 185, 6, 16, + 197, 140, 56, 188, 223, 179, 200, 31, 11, 92, 180, 103, 175, 143, 138, 200, 151, 67, 168, 140, 234, 253, 183, + 249, 7, 61, 33, 1, 134, 99, 239, 107, 127, 185, 200, 22, 205, 182, 42, 161, 181, 187, 173, 150, 29, 66, + 147, 116, 19, 132, 33, 0, 217, 88, 116, 30, 185, 223, 233, 186, 72, 195, 170, 95, 234, 139, 5, 215, 32, + 31, 121, 188, 121, 6, 77, 124, 154, 38, 159, 137, 218, 119, 239, 53, 87, 243, 130, 217, 194, 180, 101, 72, + 26, 233, 14, 153, 247, 23, 9, 168, 79, 206, 62, 96, 2, 84, 119, 241, 118, 91, 240, 162, 61, 227, 135, + 98, 223, 112, 86, 67, 244, 181, 187, 113, 125, 240, 45, 62, 186, 247, 19, 20, 241, 185, 165, 25, 84, 68, + 138, 109, 75, 218, 95, 37, 116, 172, 70, 190, 176, 56, 104, 169, 184, 68, 164, 128, 106, 207, 2, 253, 36, + 109, 198, 4, 112, 6, 198, 215, 130, 11, 233, 165, 79, 169, 214, 110, 67, 244, 196, 179, 158, 57, 110, 126, + 136, 58, 119, 106, 124, 8, 63, 42, 57, 12, 183, 69, 75, 168, 228, 8, 204, 112, 200, 57, 186, 124, 148, + 27, 109, 175, 234, 111, 41, 186, 23, 134, 174, 25, 150, 38, 42, 47, 74, 101, 27, 140, 18, 115, 251, 111, + 231, 162, 179, 169, 53, 120, 145, 1, 79, 51, 63, 174, 7, 208, 92, 69, 12, 56, 25, 182, 28, 108, 36, + 109, 190, 107, 1, 103, 130, 86, 41, 236, 6, 97, 41, 110, 78, 78, 17, 149, 66, 189, 175, 148, 50, 74, + 37, 73, 230, 180, 201, 77, 208, 206, 110, 158, 26, 115, 206, 97, 49, 187, 165, 139, 253, 87, 253, 116, 74, + 31, 159, 78, 179, 8, 65, 111, 16, 34, 7, 134, 109, 97, 55, 181, 244, 16, 137, 95, 112, 82, 129, 97, + 47, 215, 64, 246, 95, 98, 254, 160, 237, 71, 76, 49, 223, 47, 74, 45, 162, 197, 156, 34, 19, 222, 58, + 138, 208, 121, 218, 199, 45, 49, 190, 211, 167, 62, 54, 25, 101, 86, 212, 147, 28, 236, 253, 251, 67, 12, + 178, 152, 199, 9, 218, 9, 164, 195, 250, 241, 249, 143, 222, 250, 94, 12, 36, 232, 219, 128, 148, 186, 21, + 100, 50, 32, 205, 89, 252, 201, 99, 153, 71, 70, 227, 199, 189, 139, 20, 100, 88, 12, 70, 90, 212, 70, + 51, 37, 76, 186, 252, 19, 245, 18, 251, 176, 110, 239, 240, 29, 200, 232, 79, 192, 71, 172, 190, 97, 217, + 150, 29, 36, 31, 110, 183, 54, 197, 38, 163, 190, 188, 9, 234, 186, 180, 229, 19, 126, 196, 242, 27, 180, + 198, 206, 197, 1, 136, 210, 245, 185, 251, 135, 65, 150, 194, 190, 174, 41, 107, 151, 70, 225, 21, 227, 4, + 65, 220, 92, 210, 68, 145, 121, 13, 155, 197, 122, 4, 1, 133, 198, 134, 148, 132, 195, 133, 189, 129, 114, + 231, 23, 110, 91, 16, 254, 123, 223, 18, 157, 191, 183, 131, 25, 105, 15, 28, 195, 14, 217, 71, 75, 218, + 145, 35, 206, 193, 178, 240, 73, 219, 30, 135, 144, 177, 32, 30, 252, 82, 110, 188, 21, 196, 172, 180, 52, + 166, 191, 107, 210, 133, 67, 114, 5, 91, 140, 74, 101, 220, 95, 69, 253, 179, 67, 87, 148, 141, 12, 78, + 84, 6, 221, 203, 214, 0, 85, 30, 22, 248, 69, 222, 67, 84, 176, 14, 105, 188, 77, 75, 152, 188, 88, + 89, 180, 122, 164, 76, 149, 254, 45, 132, 151, 142, 85, 188, 44, 52, 88, 170, 209, 47, 220, 109, 225, 178, + 56, 143, 26, 157, 69, 216, 190, 138, 238, 67, 105, 18, 127, 201, 192, 254, 102, 17, 235, 160, 1, 109, 146, + 51, 209, 65, 233, 39, 106, 215, 128, 189, 113, 89, 73, 86, 116, 32, 149, 97, 45, 116, 49, 46, 227, 67, + 240, 44, 158, 98, 31, 238, 88, 104, 136, 175, 85, 101, 37, 10, 89, 108, 120, 194, 252, 101, 252, 162, 25, + 107, 134, 2, 181, 135, 199, 105, 143, 235, 21, 79, 154, 73, 93, 153, 158, 106, 20, 242, 140, 62, 209, 148, + 128, 52, 189, 216, 143, 29, 135, 120, 170, 227, 88, 209, 143, 73, 44, 87, 227, 162, 79, 48, 45, 246, 171, + 112, 61, 63, 170, 101, 99, 31, 9, 140, 77, 93, 250, 157, 184, 178, 242, 174, 76, 42, 67, 96, 105, 233, + 169, 100, 200, 96, 181, 19, 111, 132, 143, 4, 244, 97, 134, 140, 207, 135, 118, 165, 86, 194, 141, 44, 21, + 232, 169, 87, 235, 231, 146, 196, 225, 37, 47, 24, 215, 3, 200, 216, 63, 29, 89, 117, 90, 205, 129, 128, + 126, 174, 220, 78, 4, 95, 79, 26, 126, 132, 214, 153, 165, 79, 228, 29, 61, 109, 100, 82, 20, 164, 246, + 101, 141, 234, 126, 89, 70, 102, 45, 245, 161, 125, 48, 158, 253, 141, 194, 13, 14, 94, 111, 88, 15, 178, + 119, 4, 52, 99, 78, 143, 165, 140, 11, 222, 72, 3, 103, 226, 62, 177, 39, 76, 237, 110, 32, 172, 215, + 143, 0, 167, 18, 119, 163, 216, 176, 202, 54, 191, 81, 102, 207, 28, 171, 118, 239, 244, 82, 23, 98, 140, + 151, 122, 1, 140, 35, 251, 120, 203, 203, 139, 2, 64, 153, 224, 37, 120, 136, 37, 211, 25, 107, 69, 94, + 190, 141, 48, 57, 145, 251, 132, 245, 22, 36, 219, 207, 130, 85, 42, 219, 114, 181, 223, 66, 247, 65, 224, + 176, 118, 140, 169, 170, 8, 239, 92, 196, 77, 98, 59, 211, 141, 247, 222, 41, 128, 142, 22, 210, 129, 204, + 80, 168, 124, 159, 76, 248, 39, 146, 134, 112, 31, 202, 18, 208, 119, 43, 9, 66, 24, 251, 23, 146, 18, + 241, 61, 218, 169, 186, 196, 19, 152, 115, 33, 188, 171, 178, 115, 240, 82, 70, 251, 112, 188, 33, 48, 245, + 180, 53, 135, 210, 114, 99, 212, 57, 107, 241, 116, 76, 129, 6, 109, 83, 209, 125, 15, 77, 234, 250, 116, + 175, 191, 234, 59, 141, 51, 58, 48, 46, 154, 128, 19, 135, 112, 109, 227, 227, 125, 201, 32, 52, 150, 158, + 12, 125, 172, 28, 111, 243, 100, 149, 77, 159, 45, 147, 23, 47, 144, 189, 176, 84, 155, 208, 213, 252, 4, + 228, 232, 253, 108, 206, 23, 183, 75, 248, 226, 253, 170, 147, 106, 123, 217, 89, 155, 76, 247, 61, 80, 32, + 47, 197, 72, 10, 223, 14, 222, 40, 82, 16, 137, 228, 214, 213, 213, 158, 125, 159, 154, 148, 250, 149, 24, + 198, 154, 227, 142, 59, 64, 15, 19, 20, 105, 89, 39, 228, 51, 99, 105, 245, 89, 182, 116, 94, 70, 181, + 59, 159, 84, 104, 16, 7, 230, 13, 66, 201, 191, 143, 217, 10, 79, 105, 169, 209, 202, 133, 194, 41, 33, + 183, 33, 10, 154, 8, 212, 141, 215, 192, 22, 100, 125, 44, 211, 82, 23, 228, 158, 240, 100, 70, 254, 25, + 44, 232, 199, 251, 206, 160, 78, 138, 76, 96, 245, 78, 167, 142, 32, 59, 61, 207, 248, 4, 5, 85, 170, + 151, 243, 56, 33, 130, 6, 226, 84, 249, 108, 236, 1, 178, 130, 14, 206, 54, 75, 136, 15, 167, 188, 201, + 59, 213, 147, 110, 64, 89, 31, 248, 87, 8, 113, 19, 59, 67, 8, 93, 10, 66, 185, 95, 0, 35, 181, + 231, 157, 117, 7, 230, 125, 107, 162, 15, 158, 61, 112, 101, 183, 100, 195, 253, 71, 248, 137, 92, 12, 245, + 243, 223, 64, 161, 242, 228, 99, 45, 223, 67, 1, 88, 227, 86, 82, 83, 162, 81, 81, 199, 67, 181, 231, + 126, 222, 22, 8, 33, 236, 65, 46, 209, 227, 67, 32, 170, 60, 188, 128, 126, 102, 42, 66, 49, 133, 26, + 219, 40, 231, 155, 179, 233, 246, 149, 203, 21, 126, 253, 140, 6, 197, 26, 247, 3, 251, 238, 172, 201, 86, + 124, 25, 197, 182, 254, 160, 91, 220, 135, 144, 216, 86, 189, 202, 32, 216, 250, 160, 197, 252, 105, 217, 93, + 224, 229, 147, 244, 30, 13, 82, 95, 240, 28, 89, 58, 181, 52, 108, 218, 129, 15, 156, 159, 138, 57, 175, + 89, 118, 74, 66, 175, 133, 24, 132, 198, 102, 218, 45, 236, 245, 202, 210, 132, 61, 84, 218, 80, 203, 11, + 167, 107, 93, 85, 223, 52, 29, 205, 199, 29, 85, 103, 12, 222, 240, 29, 87, 9, 214, 182, 45, 46, 198, + 214, 18, 65, 2, 159, 92, 175, 93, 175, 1, 238, 180, 237, 193, 124, 152, 8, 217, 127, 18, 7, 81, 33, + 190, 28, 177, 6, 186, 40, 44, 221, 34, 178, 150, 241, 42, 139, 86, 224, 50, 74, 137, 230, 246, 14, 237, + 11, 90, 25, 108, 184, 73, 93, 159, 111, 73, 5, 126, 62, 39, 200, 188, 249, 195, 30, 47, 225, 147, 254, + 185, 99, 60, 208, 90, 226, 211, 92, 235, 191, 50, 60, 221, 98, 113, 248, 234, 108, 80, 198, 64, 74, 213, + 13, 13, 155, 200, 33, 240, 226, 180, 42, 173, 241, 252, 252, 184, 25, 124, 19, 106, 73, 191, 135, 4, 169, + 141, 167, 105, 143, 151, 211, 49, 117, 200, 160, 105, 178, 130, 102, 157, 51, 82, 54, 13, 204, 176, 247, 133, + 126, 146, 253, 208, 53, 78, 48, 114, 26, 41, 202, 59, 167, 132, 68, 37, 97, 25, 61, 220, 27, 142, 72, + 64, 57, 122, 64, 63, 253, 181, 128, 40, 235, 89, 172, 183, 252, 133, 126, 181, 0, 181, 254, 156, 148, 105, + 192, 250, 100, 31, 57, 169, 64, 174, 161, 137, 95, 243, 158, 64, 192, 220, 132, 32, 62, 15, 226, 238, 81, + 106, 214, 145, 143, 231, 106, 32, 34, 64, 10, 53, 90, 186, 210, 185, 23, 241, 181, 69, 34, 254, 38, 182, + 122, 2, 180, 201, 197, 23, 80, 147, 58, 200, 131, 198, 90, 152, 57, 30, 31, 169, 221, 74, 196, 126, 186, + 9, 254, 243, 70, 243, 90, 223, 127, 93, 34, 76, 106, 227, 10, 150, 180, 134, 71, 53, 142, 56, 199, 231, + 167, 209, 115, 52, 224, 160, 121, 203, 124, 182, 79, 123, 166, 39, 125, 121, 170, 180, 92, 234, 200, 134, 249, + 41, 78, 174, 194, 238, 211, 51, 54, 112, 163, 153, 233, 143, 164, 6, 121, 133, 198, 4, 113, 29, 38, 51, + 142, 245, 145, 68, 27, 102, 146, 44, 119, 18, 169, 165, 241, 177, 250, 135, 81, 127, 171, 76, 121, 76, 60, + 237, 166, 221, 229, 84, 230, 17, 190, 130, 112, 47, 207, 171, 127, 86, 241, 169, 147, 45, 171, 246, 181, 222, + 103, 132, 73, 111, 46, 177, 187, 151, 254, 251, 134, 197, 93, 253, 143, 214, 165, 66, 224, 113, 97, 131, 32, + 222, 64, 181, 168, 193, 225, 12, 242, 89, 199, 139, 166, 236, 195, 135, 33, 174, 58, 98, 23, 86, 32, 130, + 60, 151, 82, 207, 123, 173, 211, 44, 221, 165, 51, 69, 36, 238, 204, 130, 135, 68, 10, 181, 201, 195, 15, + 72, 148, 119, 74, 114, 115, 102, 164, 185, 101, 107, 185, 221, 251, 67, 143, 208, 27, 80, 198, 95, 33, 189, + 237, 83, 129, 27, 33, 161, 98, 22, 113, 194, 46, 33, 177, 24, 141, 232, 218, 13, 47, 13, 139, 198, 5, + 34, 157, 5, 120, 68, 118, 45, 186, 48, 160, 218, 25, 55, 226, 213, 34, 104, 132, 178, 66, 149, 173, 81, + 154, 100, 170, 126, 10, 251, 65, 132, 130, 132, 207, 117, 26, 234, 13, 56, 181, 29, 191, 7, 29, 3, 91, + 190, 201, 159, 106, 79, 180, 97, 123, 63, 10, 38, 38, 195, 152, 108, 144, 146, 46, 10, 23, 146, 148, 101, + 224, 33, 88, 246, 12, 32, 179, 149, 45, 6, 253, 157, 36, 173, 189, 139, 20, 21, 55, 8, 101, 9, 157, + 137, 237, 154, 249, 90, 24, 119, 186, 81, 211, 251, 225, 21, 143, 99, 184, 222, 224, 113, 93, 142, 29, 89, + 185, 101, 147, 201, 123, 38, 165, 204, 204, 9, 243, 130, 246, 143, 83, 213, 6, 216, 5, 92, 218, 22, 142, + 201, 34, 253, 54, 168, 80, 177, 217, 45, 82, 104, 241, 222, 70, 198, 241, 196, 187, 153, 5, 143, 193, 145, + 135, 11, 47, 50, 146, 186, 246, 86, 218, 253, 47, 14, 168, 50, 91, 82, 235, 127, 174, 75, 137, 63, 236, + 198, 81, 190, 119, 154, 214, 92, 66, 0, 22, 235, 5, 226, 244, 141, 13, 254, 57, 162, 90, 17, 106, 209, + 240, 197, 6, 49, 86, 164, 230, 37, 17, 81, 194, 245, 130, 253, 167, 31, 150, 3, 226, 221, 64, 105, 83, + 247, 134, 96, 132, 221, 10, 161, 215, 151, 154, 194, 46, 71, 44, 250, 29, 228, 218, 48, 125, 133, 12, 70, + 86, 188, 182, 2, 109, 87, 225, 139, 106, 104, 51, 213, 46, 175, 194, 90, 24, 21, 29, 151, 154, 246, 0, + 125, 11, 186, 137, 105, 45, 192, 205, 114, 240, 170, 219, 139, 56, 134, 103, 205, 160, 115, 41, 124, 220, 87, + 177, 161, 242, 250, 220, 239, 114, 59, 171, 9, 56, 124, 31, 108, 184, 80, 135, 144, 222, 144, 26, 10, 205, + 163, 115, 243, 247, 237, 231, 91, 5, 173, 80, 82, 137, 124, 237, 79, 103, 7, 88, 48, 72, 176, 86, 23, + 139, 239, 244, 91, 244, 49, 127, 102, 195, 42, 103, 101, 167, 41, 156, 158, 213, 55, 92, 28, 254, 138, 33, + 35, 114, 83, 200, 249, 61, 25, 138, 228, 67, 152, 168, 143, 141, 142, 33, 164, 47, 18, 83, 123, 11, 15, + 224, 12, 231, 84, 0, 132, 184, 48, 77, 81, 219, 163, 109, 96, 225, 15, 250, 29, 196, 212, 250, 206, 137, + 191, 127, 142, 104, 168, 146, 103, 5, 175, 148, 181, 23, 142, 47, 64, 133, 238, 50, 167, 189, 10, 235, 253, + 65, 22, 140, 217, 222, 28, 60, 27, 191, 43, 252, 215, 169, 34, 228, 117, 125, 113, 221, 132, 38, 100, 156, + 206, 73, 134, 67, 78, 180, 50, 77, 219, 75, 142, 94, 204, 170, 80, 52, 84, 200, 216, 105, 244, 173, 133, + 217, 151, 120, 78, 215, 188, 2, 241, 180, 214, 122, 91, 47, 150, 181, 132, 16, 59, 98, 27, 143, 3, 172, + 169, 122, 127, 101, 154, 52, 169, 86, 82, 167, 108, 198, 12, 166, 109, 231, 138, 79, 194, 234, 179, 70, 67, + 216, 55, 107, 13, 217, 150, 165, 109, 176, 183, 95, 175, 89, 35, 32, 80, 105, 186, 41, 95, 33, 93, 248, + 211, 127, 207, 171, 218, 203, 158, 26, 4, 205, 213, 110, 72, 68, 93, 248, 122, 7, 205, 47, 224, 248, 214, + 254, 173, 51, 79, 42, 114, 83, 107, 136, 44, 94, 240, 147, 220, 27, 32, 33, 62, 33, 46, 145, 4, 101, + 123, 8, 53, 156, 20, 105, 201, 101, 111, 136, 244, 97, 131, 193, 10, 172, 103, 187, 103, 88, 233, 253, 153, + 57, 67, 206, 188, 95, 227, 150, 9, 29, 17, 149, 77, 13, 138, 107, 38, 122, 246, 208, 199, 50, 12, 117, + 199, 48, 22, 65, 146, 27, 152, 196, 235, 187, 33, 84, 164, 163, 170, 240, 242, 230, 157, 91, 118, 238, 76, + 131, 21, 103, 180, 158, 67, 33, 170, 63, 249, 4, 140, 172, 22, 28, 224, 174, 112, 11, 218, 38, 177, 33, + 142, 193, 226, 245, 92, 104, 13, 239, 212, 34, 131, 179, 172, 11, 119, 119, 233, 92, 107, 108, 35, 81, 248, + 23, 107, 20, 179, 17, 170, 129, 26, 144, 98, 13, 139, 199, 152, 66, 234, 211, 9, 237, 144, 45, 75, 176, + 130, 56, 17, 80, 241, 98, 66, 88, 152, 166, 175, 209, 238, 239, 9, 164, 77, 247, 239, 187, 145, 83, 181, + 146, 61, 117, 56, 229, 237, 69, 28, 60, 107, 190, 165, 222, 215, 32, 129, 202, 17, 159, 93, 168, 127, 105, + 157, 58, 239, 161, 126, 123, 31, 188, 115, 227, 207, 109, 158, 70, 51, 67, 234, 37, 100, 127, 199, 132, 99, + 73, 117, 182, 153, 15, 94, 168, 63, 136, 91, 224, 221, 3, 154, 216, 38, 121, 229, 135, 220, 32, 105, 168, + 197, 197, 96, 52, 122, 65, 210, 43, 185, 206, 28, 114, 197, 112, 86, 68, 153, 21, 136, 141, 121, 172, 212, + 1, 119, 163, 100, 144, 36, 67, 72, 249, 129, 10, 64, 186, 76, 189, 171, 212, 214, 154, 100, 32, 33, 2, + 138, 46, 85, 204, 55, 57, 176, 188, 112, 184, 104, 230, 65, 87, 8, 123, 234, 138, 225, 213, 131, 181, 189, + 63, 78, 27, 142, 19, 161, 178, 225, 0, 225, 101, 76, 177, 2, 184, 158, 196, 251, 230, 250, 250, 6, 81, + 248, 94, 129, 12, 94, 216, 0, 208, 252, 241, 179, 27, 242, 147, 212, 173, 27, 172, 7, 12, 34, 139, 247, + 240, 248, 25, 129, 254, 51, 216, 143, 78, 150, 235, 212, 193, 218, 169, 199, 226, 92, 61, 165, 247, 1, 73, + 150, 9, 34, 10, 161, 58, 81, 66, 189, 170, 134, 3, 69, 33, 82, 201, 42, 164, 129, 76, 187, 165, 100, + 32, 94, 229, 182, 73, 69, 5, 143, 232, 163, 254, 162, 70, 118, 70, 132, 147, 81, 51, 201, 150, 224, 225, + 8, 89, 136, 221, 253, 194, 47, 143, 216, 39, 54, 24, 61, 21, 130, 24, 235, 121, 135, 72, 160, 64, 169, + 74, 240, 236, 107, 24, 62, 252, 38, 186, 4, 88, 41, 5, 238, 130, 51, 187, 244, 74, 48, 24, 24, 252, + 228, 181, 146, 12, 186, 47, 181, 156, 86, 214, 226, 82, 120, 245, 86, 195, 64, 216, 239, 221, 36, 86, 246, + 226, 222, 42, 251, 31, 186, 217, 24, 167, 53, 65, 39, 9, 61, 247, 235, 198, 56, 149, 87, 215, 185, 172, + 146, 164, 193, 207, 113, 35, 226, 99, 119, 233, 44, 118, 199, 136, 66, 1, 110, 4, 196, 148, 132, 215, 132, + 236, 55, 212, 89, 245, 151, 201, 164, 223, 52, 130, 184, 55, 240, 80, 75, 107, 172, 114, 165, 94, 178, 150, + 197, 8, 195, 45, 244, 127, 169, 250, 173, 144, 30, 151, 141, 132, 99, 194, 234, 140, 34, 3, 224, 226, 179, + 177, 68, 230, 71, 250, 42, 188, 142, 98, 98, 193, 250, 3, 211, 176, 78, 164, 119, 125, 161, 192, 241, 36, + 205, 166, 52, 52, 92, 14, 239, 10, 13, 197, 66, 230, 39, 233, 191, 220, 136, 11, 11, 219, 0, 231, 248, + 183, 174, 73, 120, 6, 26, 254, 85, 217, 208, 250, 195, 109, 55, 215, 235, 218, 82, 8, 224, 120, 66, 244, + 172, 150, 13, 134, 96, 22, 61, 72, 160, 52, 235, 62, 205, 132, 97, 232, 134, 138, 28, 5, 211, 252, 135, + 35, 87, 5, 138, 144, 190, 12, 148, 147, 39, 89, 39, 221, 147, 238, 253, 21, 157, 216, 55, 53, 177, 83, + 8, 122, 71, 62, 105, 203, 122, 111, 250, 108, 239, 234, 72, 218, 69, 168, 8, 132, 112, 21, 18, 46, 102, + 143, 55, 228, 94, 77, 173, 23, 229, 80, 185, 11, 212, 237, 122, 248, 195, 136, 85, 228, 94, 154, 57, 21, + 115, 94, 10, 250, 22, 69, 207, 97, 62, 243, 102, 210, 162, 96, 90, 197, 105, 73, 191, 90, 32, 166, 246, + 88, 228, 125, 222, 53, 197, 178, 2, 2, 217, 45, 216, 131, 231, 102, 175, 251, 187, 254, 162, 212, 9, 41, + 153, 188, 110, 101, 70, 37, 73, 94, 240, 20, 92, 21, 89, 26, 191, 154, 219, 67, 175, 203, 253, 42, 83, + 112, 158, 58, 114, 238, 154, 217, 14, 212, 112, 168, 138, 200, 18, 71, 155, 139, 69, 137, 64, 47, 210, 248, + 145, 159, 58, 129, 14, 225, 235, 35, 70, 19, 99, 19, 120, 194, 0, 5, 126, 122, 95, 189, 98, 34, 171, + 229, 71, 244, 138, 153, 176, 209, 34, 76, 19, 86, 19, 88, 232, 170, 232, 6, 217, 234, 92, 109, 145, 200, + 70, 222, 171, 30, 237, 221, 42, 224, 102, 121, 115, 197, 17, 20, 92, 100, 171, 41, 190, 236, 122, 202, 95, + 11, 198, 177, 204, 48, 150, 183, 149, 144, 212, 182, 121, 86, 119, 48, 20, 141, 168, 158, 158, 202, 90, 214, + 24, 144, 88, 182, 19, 13, 105, 16, 99, 80, 68, 205, 191, 198, 85, 241, 203, 71, 121, 184, 198, 21, 188, + 189, 171, 61, 131, 75, 185, 26, 156, 228, 222, 207, 3, 232, 202, 8, 229, 31, 200, 52, 61, 243, 248, 219, + 114, 136, 5, 233, 136, 235, 82, 87, 231, 41, 239, 132, 205, 144, 44, 139, 99, 217, 145, 249, 86, 159, 238, + 82, 130, 36, 33, 54, 105, 142, 52, 157, 193, 223, 118, 29, 16, 43, 114, 214, 84, 159, 25, 60, 25, 218, + 196, 15, 22, 220, 162, 138, 174, 96, 188, 49, 172, 204, 106, 140, 91, 199, 17, 242, 145, 96, 179, 34, 189, + 178, 224, 235, 245, 132, 159, 87, 57, 182, 174, 229, 102, 211, 86, 6, 210, 250, 33, 228, 41, 201, 155, 172, + 40, 5, 27, 41, 134, 224, 232, 97, 87, 219, 28, 122, 219, 12, 224, 66, 11, 161, 22, 106, 130, 222, 38, + 145, 134, 175, 184, 135, 93, 146, 224, 61, 28, 119, 138, 59, 136, 143, 24, 206, 52, 250, 99, 195, 71, 26, + 163, 29, 201, 97, 252, 230, 88, 160, 229, 234, 217, 97, 242, 161, 228, 204, 98, 75, 81, 155, 51, 184, 123, + 50, 26, 120, 50, 222, 50, 23, 79, 179, 183, 175, 182, 227, 27, 164, 181, 158, 126, 4, 229, 252, 206, 81, + 65, 89, 230, 179, 82, 54, 64, 229, 147, 44, 34, 139, 36, 98, 244, 56, 12, 133, 229, 156, 249, 82, 163, + 77, 120, 142, 79, 92, 251, 109, 173, 94, 194, 226, 43, 181, 118, 253, 215, 204, 23, 26, 161, 253, 206, 251, + 159, 163, 46, 246, 114, 201, 243, 66, 20, 251, 91, 9, 188, 25, 39, 82, 22, 146, 26, 232, 222, 252, 81, + 228, 238, 250, 212, 61, 75, 3, 155, 129, 64, 45, 102, 222, 172, 194, 133, 227, 35, 135, 142, 144, 21, 100, + 42, 75, 254, 68, 249, 83, 102, 98, 51, 24, 58, 114, 214, 146, 0, 80, 20, 66, 205, 95, 21, 39, 150, + 87, 65, 221, 153, 44, 223, 195, 180, 90, 63, 232, 177, 27, 52, 122, 95, 251, 235, 4, 209, 253, 109, 154, + 238, 198, 9, 161, 172, 68, 24, 87, 113, 100, 209, 227, 86, 230, 42, 251, 203, 8, 43, 60, 44, 145, 159, + 24, 102, 226, 114, 96, 107, 103, 103, 187, 136, 217, 25, 2, 92, 57, 89, 180, 109, 236, 8, 119, 59, 130, + 182, 143, 1, 238, 71, 164, 78, 17, 139, 31, 32, 203, 201, 223, 89, 148, 23, 129, 222, 19, 243, 139, 10, + 236, 107, 83, 141, 10, 130, 158, 10, 72, 215, 54, 157, 114, 158, 6, 19, 74, 132, 142, 66, 14, 246, 47, + 252, 34, 104, 238, 142, 130, 228, 73, 236, 1, 63, 239, 32, 190, 81, 45, 46, 55, 208, 94, 126, 134, 149, + 64, 230, 6, 104, 39, 121, 183, 110, 29, 229, 42, 80, 138, 200, 64, 100, 184, 103, 11, 190, 51, 221, 201, + 146, 71, 27, 192, 68, 157, 199, 56, 218, 151, 161, 127, 7, 217, 173, 200, 60, 207, 137, 27, 211, 88, 67, + 41, 26, 131, 186, 16, 109, 216, 54, 143, 58, 74, 68, 165, 79, 5, 235, 133, 58, 155, 81, 177, 52, 95, + 50, 174, 62, 11, 234, 204, 224, 230, 251, 151, 144, 15, 215, 60, 95, 41, 21, 168, 239, 21, 251, 21, 60, + 88, 212, 200, 140, 58, 196, 248, 242, 191, 92, 222, 166, 106, 122, 61, 94, 201, 63, 128, 163, 154, 225, 50, + 29, 26, 81, 138, 177, 56, 180, 54, 188, 139, 92, 141, 111, 251, 60, 14, 108, 218, 225, 37, 193, 54, 158, + 112, 159, 252, 156, 108, 115, 181, 201, 230, 119, 32, 57, 125, 206, 125, 126, 56, 142, 11, 196, 241, 61, 94, + 91, 52, 239, 60, 5, 207, 169, 35, 47, 116, 178, 223, 89, 55, 122, 29, 12, 34, 217, 238, 160, 253, 39, + 158, 75, 214, 216, 127, 52, 129, 233, 31, 145, 121, 196, 85, 243, 2, 155, 38, 31, 75, 11, 46, 65, 151, + 9, 33, 27, 58, 165, 132, 24, 3, 128, 58, 118, 131, 108, 5, 137, 5, 192, 40, 193, 86, 106, 185, 91, + 161, 146, 85, 115, 131, 216, 203, 249, 54, 157, 198, 72, 101, 114, 248, 71, 113, 163, 184, 82, 185, 87, 68, + 179, 222, 59, 75, 212, 183, 170, 248, 236, 227, 41, 248, 115, 99, 30, 202, 68, 217, 180, 50, 65, 168, 23, + 14, 21, 167, 146, 234, 213, 60, 11, 108, 159, 74, 195, 218, 181, 94, 61, 75, 42, 92, 52, 205, 56, 26, + 219, 33, 208, 154, 131, 163, 185, 145, 91, 120, 35, 43, 112, 59, 96, 84, 48, 130, 78, 184, 144, 32, 166, + 68, 172, 148, 196, 41, 12, 205, 85, 252, 66, 29, 56, 159, 81, 88, 177, 91, 86, 238, 39, 12, 129, 6, + 27, 212, 233, 9, 175, 3, 140, 45, 65, 75, 48, 243, 239, 107, 55, 63, 10, 91, 121, 245, 147, 59, 157, + 38, 118, 158, 244, 204, 130, 125, 183, 25, 206, 46, 130, 162, 238, 130, 253, 87, 102, 102, 59, 173, 36, 249, + 216, 46, 75, 185, 60, 149, 188, 93, 98, 6, 161, 102, 213, 69, 1, 72, 215, 75, 189, 6, 164, 201, 217, + 99, 113, 170, 103, 246, 230, 45, 66, 36, 81, 90, 68, 85, 227, 76, 30, 108, 167, 83, 95, 47, 107, 223, + 235, 133, 91, 199, 66, 70, 55, 152, 0, 185, 166, 219, 209, 144, 43, 111, 113, 151, 228, 157, 190, 173, 16, + 53, 79, 102, 171, 3, 44, 119, 151, 165, 163, 97, 34, 250, 159, 159, 161, 238, 38, 205, 137, 3, 169, 7, + 18, 67, 56, 115, 166, 116, 187, 220, 93, 179, 162, 198, 214, 86, 61, 14, 254, 186, 53, 226, 6, 200, 55, + 227, 202, 78, 87, 123, 121, 246, 133, 132, 223, 191, 124, 168, 162, 167, 101, 20, 138, 250, 115, 87, 151, 127, + 52, 37, 126, 251, 193, 131, 38, 77, 40, 56, 199, 161, 222, 205, 240, 116, 198, 186, 129, 236, 81, 94, 159, + 48, 94, 208, 52, 137, 82, 52, 3, 219, 109, 242, 16, 143, 183, 111, 10, 108, 54, 166, 119, 70, 118, 101, + 206, 136, 209, 181, 2, 253, 149, 166, 139, 232, 71, 157, 220, 89, 222, 71, 44, 177, 194, 14, 55, 166, 71, + 158, 154, 187, 40, 119, 170, 46, 150, 16, 201, 146, 23, 121, 212, 38, 184, 7, 55, 181, 46, 188, 152, 53, + 86, 30, 85, 65, 96, 107, 75, 61, 40, 181, 108, 215, 64, 81, 172, 149, 128, 148, 53, 90, 67, 135, 13, + 192, 198, 235, 39, 202, 62, 94, 184, 95, 176, 25, 23, 121, 15, 182, 182, 169, 67, 153, 39, 89, 50, 144, + 184, 97, 132, 127, 217, 75, 125, 79, 53, 18, 191, 105, 183, 22, 199, 111, 254, 140, 138, 11, 249, 147, 107, + 121, 41, 55, 225, 101, 109, 214, 220, 244, 99, 182, 143, 147, 166, 194, 190, 129, 218, 244, 116, 229, 169, 222, + 190, 78, 42, 201, 16, 4, 165, 23, 253, 254, 229, 238, 3, 201, 133, 104, 213, 17, 129, 215, 58, 76, 193, + 232, 242, 43, 196, 145, 215, 203, 66, 71, 167, 191, 14, 41, 29, 81, 21, 160, 26, 90, 7, 9, 219, 186, + 84, 12, 71, 200, 23, 55, 118, 115, 155, 61, 235, 8, 107, 147, 62, 224, 96, 246, 23, 149, 81, 67, 56, + 21, 12, 248, 158, 25, 7, 16, 112, 127, 82, 31, 184, 243, 207, 151, 47, 93, 69, 242, 208, 192, 52, 250, + 84, 78, 237, 217, 83, 10, 163, 232, 121, 22, 3, 203, 164, 106, 203, 153, 145, 157, 156, 156, 78, 164, 47, + 232, 56, 151, 195, 3, 218, 246, 10, 162, 150, 207, 185, 236, 128, 82, 221, 16, 161, 15, 248, 209, 110, 19, + 135, 50, 22, 190, 96, 222, 24, 144, 133, 127, 174, 130, 13, 15, 78, 115, 233, 62, 111, 161, 30, 200, 26, + 141, 47, 240, 49, 243, 11, 50, 94, 231, 242, 37, 195, 93, 8, 186, 102, 195, 31, 253, 134, 219, 139, 56, + 244, 165, 192, 242, 204, 165, 99, 228, 87, 27, 45, 67, 84, 46, 225, 205, 121, 205, 186, 28, 2, 248, 249, + 90, 113, 214, 183, 84, 160, 148, 42, 183, 146, 189, 191, 81, 153, 150, 25, 44, 87, + }; + uint8_t ret[9 * 2 * 16 * 32] = { + 125, 85, 69, 166, 119, 70, 157, 170, 14, 11, 225, 215, 182, 243, 153, 117, 91, 88, 206, 18, 118, 25, 5, + 101, 134, 166, 32, 103, 107, 0, 0, 0, 165, 36, 16, 204, 216, 242, 63, 244, 187, 78, 56, 206, 35, 78, + 88, 218, 9, 46, 177, 106, 153, 204, 37, 71, 136, 242, 96, 175, 127, 217, 0, 0, 84, 236, 222, 119, 11, + 137, 45, 72, 134, 212, 31, 64, 177, 53, 246, 64, 11, 169, 229, 65, 90, 22, 222, 208, 115, 63, 124, 143, + 185, 88, 0, 0, 250, 254, 28, 208, 87, 18, 113, 149, 72, 251, 16, 241, 158, 35, 190, 210, 6, 60, 27, + 172, 221, 148, 82, 213, 42, 175, 244, 138, 200, 116, 0, 0, 90, 208, 216, 224, 236, 3, 219, 218, 109, 21, + 58, 51, 194, 158, 207, 249, 241, 213, 30, 134, 122, 54, 199, 230, 62, 180, 205, 200, 22, 30, 0, 0, 201, + 66, 131, 81, 242, 147, 13, 238, 4, 97, 83, 170, 194, 44, 65, 11, 12, 160, 230, 136, 85, 248, 161, 212, + 77, 74, 108, 151, 205, 185, 0, 0, 42, 249, 23, 67, 23, 188, 231, 39, 237, 48, 32, 122, 216, 14, 125, + 110, 190, 42, 110, 118, 18, 249, 106, 196, 91, 202, 185, 67, 182, 223, 0, 0, 35, 45, 103, 189, 132, 146, + 120, 95, 99, 90, 136, 110, 0, 245, 97, 27, 77, 47, 106, 4, 31, 221, 178, 211, 14, 79, 6, 168, 42, + 233, 0, 0, 194, 64, 108, 123, 11, 144, 243, 80, 146, 107, 251, 178, 167, 215, 42, 92, 180, 174, 102, 107, + 4, 58, 251, 76, 139, 187, 16, 140, 161, 186, 0, 0, 76, 248, 233, 153, 243, 119, 115, 123, 72, 103, 241, + 66, 114, 170, 109, 190, 181, 183, 251, 84, 32, 2, 141, 27, 13, 2, 197, 234, 181, 72, 0, 0, 108, 69, + 74, 63, 69, 30, 26, 194, 46, 182, 194, 156, 234, 1, 84, 34, 249, 58, 86, 225, 74, 153, 214, 42, 75, + 220, 140, 253, 187, 195, 0, 0, 1, 1, 151, 249, 60, 48, 100, 191, 141, 247, 21, 61, 59, 167, 46, 76, + 55, 220, 74, 242, 141, 184, 183, 20, 163, 136, 56, 183, 173, 170, 0, 0, 163, 16, 110, 140, 112, 154, 12, + 56, 138, 161, 38, 225, 101, 120, 250, 48, 76, 234, 191, 28, 242, 206, 4, 203, 1, 54, 188, 249, 150, 95, + 0, 0, 194, 161, 194, 167, 190, 150, 74, 85, 198, 151, 40, 193, 81, 243, 175, 41, 39, 167, 44, 53, 45, + 244, 62, 153, 34, 24, 223, 7, 29, 234, 0, 0, 146, 132, 179, 75, 37, 71, 10, 102, 190, 169, 118, 10, + 176, 169, 175, 32, 129, 107, 194, 108, 17, 194, 36, 90, 196, 97, 179, 61, 66, 139, 0, 0, 247, 24, 67, + 238, 37, 89, 111, 120, 210, 97, 82, 80, 45, 22, 235, 73, 50, 130, 28, 96, 104, 5, 208, 51, 241, 204, + 200, 33, 147, 5, 0, 0, 138, 2, 219, 162, 246, 7, 17, 29, 222, 103, 72, 137, 20, 39, 156, 19, 87, + 91, 23, 21, 24, 250, 34, 203, 225, 28, 31, 1, 116, 215, 0, 0, 181, 83, 178, 203, 160, 195, 113, 134, + 7, 106, 145, 51, 220, 74, 230, 234, 70, 155, 95, 212, 199, 45, 32, 224, 135, 89, 11, 134, 19, 32, 0, + 0, 145, 105, 42, 244, 203, 59, 4, 180, 105, 34, 222, 94, 26, 110, 194, 30, 64, 67, 227, 240, 101, 98, + 86, 216, 189, 165, 92, 99, 132, 31, 0, 0, 159, 188, 220, 38, 76, 102, 242, 35, 101, 220, 98, 49, 209, + 20, 206, 243, 241, 32, 89, 6, 114, 113, 141, 209, 191, 167, 180, 239, 33, 121, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 188, 72, 162, 20, 169, 165, 42, 111, 162, 108, 189, + 187, 97, 71, 49, 9, 100, 90, 192, 186, 150, 155, 254, 206, 52, 148, 105, 44, 105, 128, 0, 0, 121, 26, + 61, 241, 184, 79, 57, 41, 179, 36, 175, 165, 55, 76, 190, 218, 50, 212, 71, 180, 194, 197, 123, 193, 166, + 141, 188, 52, 18, 189, 0, 0, 6, 233, 227, 185, 68, 169, 12, 186, 169, 109, 148, 139, 181, 49, 211, 9, + 32, 70, 172, 229, 190, 122, 223, 178, 191, 12, 77, 88, 127, 113, 0, 0, 77, 14, 135, 165, 164, 214, 183, + 23, 53, 190, 50, 253, 244, 223, 167, 164, 205, 51, 190, 19, 174, 4, 18, 240, 107, 78, 75, 170, 201, 89, + 0, 0, 124, 153, 98, 25, 128, 110, 69, 134, 120, 107, 74, 87, 16, 47, 62, 195, 89, 37, 97, 126, 41, + 1, 157, 73, 210, 84, 152, 209, 192, 73, 0, 0, 154, 247, 223, 84, 106, 67, 75, 174, 145, 1, 37, 253, + 137, 74, 54, 250, 252, 76, 217, 196, 107, 133, 191, 219, 133, 6, 188, 47, 254, 86, 0, 0, 38, 23, 112, + 68, 207, 244, 168, 25, 1, 103, 73, 116, 95, 45, 25, 241, 201, 186, 150, 242, 151, 198, 183, 30, 67, 221, + 88, 220, 102, 116, 0, 0, 159, 9, 86, 138, 2, 196, 228, 150, 79, 130, 230, 74, 112, 162, 101, 249, 99, + 252, 29, 27, 70, 134, 131, 135, 114, 203, 89, 109, 17, 32, 0, 0, 137, 168, 67, 109, 253, 179, 8, 38, + 51, 86, 180, 31, 82, 197, 86, 143, 153, 19, 36, 180, 225, 148, 25, 144, 5, 214, 180, 225, 235, 149, 0, + 0, 218, 79, 244, 75, 36, 158, 204, 42, 63, 41, 201, 159, 129, 156, 212, 222, 71, 245, 31, 198, 21, 132, + 105, 177, 91, 0, 122, 178, 160, 97, 0, 0, 119, 206, 181, 218, 109, 57, 112, 47, 174, 236, 77, 78, 97, + 34, 147, 250, 70, 18, 110, 206, 227, 195, 15, 32, 140, 85, 164, 56, 1, 45, 0, 0, 239, 62, 187, 95, + 198, 110, 200, 74, 7, 6, 208, 179, 47, 19, 28, 94, 227, 251, 183, 197, 4, 133, 28, 30, 74, 30, 76, + 143, 109, 116, 0, 0, 53, 96, 113, 37, 4, 126, 57, 101, 208, 97, 206, 8, 215, 222, 236, 12, 199, 176, + 54, 1, 65, 189, 195, 252, 101, 22, 149, 26, 146, 49, 0, 0, 87, 2, 125, 116, 112, 136, 186, 27, 92, + 41, 110, 65, 64, 58, 253, 36, 189, 110, 197, 136, 220, 129, 14, 82, 220, 248, 254, 157, 51, 46, 0, 0, + 243, 84, 240, 172, 6, 58, 124, 140, 69, 110, 158, 111, 246, 138, 251, 232, 139, 239, 38, 210, 92, 114, 217, + 110, 95, 69, 45, 69, 209, 227, 0, 0, 130, 119, 45, 70, 198, 119, 148, 18, 12, 78, 26, 16, 95, 208, + 67, 219, 20, 240, 163, 245, 210, 231, 71, 188, 69, 222, 132, 216, 65, 67, 0, 0, 217, 241, 62, 190, 215, + 106, 27, 115, 56, 78, 115, 34, 98, 121, 12, 128, 100, 29, 190, 185, 68, 23, 75, 21, 253, 67, 151, 190, + 233, 240, 0, 0, 194, 118, 186, 176, 130, 124, 109, 251, 25, 17, 206, 7, 254, 218, 178, 148, 88, 200, 188, + 251, 145, 110, 218, 196, 179, 84, 142, 138, 39, 44, 0, 0, 180, 91, 247, 56, 11, 8, 175, 111, 182, 149, + 97, 134, 160, 199, 152, 186, 12, 232, 9, 135, 121, 91, 145, 172, 67, 176, 85, 238, 106, 158, 0, 0, 101, + 240, 19, 104, 233, 63, 234, 231, 28, 66, 49, 109, 237, 45, 199, 21, 70, 79, 234, 65, 13, 16, 35, 180, + 87, 14, 188, 67, 215, 98, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 31, 134, 209, 79, 242, 134, 47, 79, 126, 119, 32, 239, 37, 207, 196, 248, 218, 245, 77, 227, 47, 253, + 40, 59, 159, 41, 240, 59, 178, 8, 0, 0, 238, 2, 148, 48, 174, 140, 24, 26, 89, 4, 172, 244, 120, + 130, 77, 39, 169, 180, 234, 125, 144, 170, 82, 64, 84, 33, 100, 61, 130, 113, 0, 0, 88, 181, 128, 45, + 76, 207, 215, 126, 70, 52, 215, 82, 136, 85, 98, 146, 186, 53, 250, 201, 189, 147, 16, 15, 104, 183, 70, + 207, 14, 19, 0, 0, 104, 135, 52, 246, 42, 135, 3, 132, 102, 99, 143, 23, 37, 42, 59, 134, 196, 135, + 116, 32, 176, 106, 137, 19, 16, 33, 254, 248, 206, 59, 0, 0, 136, 199, 189, 171, 67, 118, 200, 214, 45, + 78, 0, 98, 211, 219, 211, 112, 19, 210, 175, 52, 84, 123, 228, 20, 7, 10, 25, 4, 54, 67, 0, 0, + 175, 105, 216, 112, 96, 165, 216, 153, 245, 143, 167, 140, 25, 114, 141, 31, 152, 114, 191, 150, 155, 217, 214, + 105, 230, 154, 44, 5, 75, 8, 0, 0, 85, 143, 143, 61, 105, 86, 63, 165, 161, 165, 18, 151, 107, 181, + 247, 202, 115, 99, 234, 158, 208, 89, 213, 89, 13, 8, 232, 85, 136, 93, 0, 0, 101, 235, 29, 63, 233, + 194, 29, 79, 125, 140, 119, 122, 69, 223, 222, 18, 33, 212, 59, 12, 213, 155, 213, 39, 66, 212, 199, 170, + 15, 10, 0, 0, 37, 21, 135, 170, 169, 141, 89, 228, 48, 11, 163, 1, 94, 66, 41, 208, 188, 57, 141, + 125, 252, 76, 158, 228, 201, 141, 251, 151, 167, 66, 0, 0, 10, 79, 120, 101, 100, 44, 117, 29, 158, 222, + 216, 140, 190, 247, 128, 119, 171, 107, 51, 172, 4, 247, 125, 51, 191, 215, 206, 243, 188, 185, 0, 0, 89, + 154, 170, 99, 200, 21, 90, 61, 253, 72, 176, 35, 141, 65, 142, 43, 178, 241, 58, 28, 228, 61, 159, 99, + 143, 192, 160, 56, 201, 95, 0, 0, 108, 73, 227, 31, 96, 232, 205, 109, 141, 3, 202, 251, 48, 224, 22, + 9, 115, 116, 48, 111, 232, 80, 154, 105, 217, 22, 78, 33, 59, 0, 0, 0, 120, 93, 88, 9, 181, 169, + 129, 100, 194, 103, 54, 120, 57, 176, 210, 66, 240, 76, 46, 243, 253, 32, 148, 245, 10, 100, 138, 130, 213, + 35, 0, 0, 194, 153, 209, 140, 19, 87, 128, 82, 13, 226, 191, 203, 145, 118, 129, 24, 82, 129, 154, 100, + 108, 47, 250, 89, 79, 125, 76, 6, 147, 181, 0, 0, 252, 158, 143, 77, 111, 235, 126, 20, 14, 62, 81, + 203, 251, 140, 204, 251, 70, 6, 128, 149, 206, 197, 149, 182, 105, 44, 96, 226, 110, 231, 0, 0, 101, 106, + 73, 93, 132, 231, 174, 164, 94, 177, 102, 139, 132, 169, 80, 23, 251, 109, 19, 77, 23, 72, 24, 116, 169, + 211, 245, 84, 64, 157, 0, 0, 252, 20, 44, 250, 143, 146, 220, 246, 111, 39, 207, 2, 245, 170, 168, 146, + 112, 83, 135, 159, 183, 10, 198, 94, 209, 82, 78, 249, 89, 117, 0, 0, 162, 242, 87, 157, 4, 196, 78, + 101, 88, 76, 28, 64, 22, 8, 124, 18, 188, 209, 112, 45, 75, 223, 154, 70, 202, 23, 167, 108, 31, 7, + 0, 0, 25, 140, 227, 184, 244, 225, 4, 141, 15, 237, 171, 153, 36, 239, 159, 241, 33, 125, 109, 147, 248, + 14, 227, 181, 133, 228, 142, 236, 248, 230, 0, 0, 107, 62, 162, 178, 97, 37, 95, 234, 178, 110, 118, 224, + 219, 92, 76, 61, 48, 15, 227, 23, 226, 222, 142, 59, 194, 158, 32, 1, 87, 125, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 161, 126, 49, 3, 32, 58, 198, 52, 65, + 33, 230, 249, 98, 241, 117, 53, 57, 148, 62, 23, 90, 34, 121, 194, 142, 121, 147, 143, 166, 0, 0, 162, + 242, 222, 133, 251, 216, 181, 102, 29, 2, 190, 246, 195, 113, 252, 200, 78, 122, 105, 15, 241, 152, 76, 203, + 238, 245, 76, 45, 214, 236, 0, 0, 15, 228, 22, 26, 238, 250, 52, 218, 205, 159, 28, 14, 30, 248, 252, + 160, 48, 64, 192, 226, 181, 57, 106, 124, 211, 145, 60, 171, 165, 195, 0, 0, 158, 99, 8, 219, 172, 160, + 108, 45, 199, 92, 177, 237, 47, 234, 184, 105, 114, 63, 250, 238, 69, 30, 227, 182, 51, 68, 237, 246, 66, + 135, 0, 0, 61, 45, 33, 40, 201, 197, 218, 236, 29, 175, 6, 11, 225, 108, 25, 178, 26, 253, 100, 81, + 34, 31, 10, 79, 54, 27, 166, 181, 224, 33, 0, 0, 112, 223, 236, 231, 86, 252, 129, 245, 85, 93, 186, + 90, 147, 80, 124, 130, 41, 181, 31, 106, 254, 169, 150, 123, 112, 102, 221, 222, 113, 174, 0, 0, 101, 67, + 65, 155, 124, 105, 15, 202, 103, 175, 40, 25, 254, 198, 19, 102, 202, 128, 57, 214, 38, 221, 180, 166, 163, + 146, 229, 103, 97, 58, 0, 0, 183, 1, 46, 179, 25, 217, 156, 210, 12, 1, 44, 108, 185, 64, 106, 157, + 59, 40, 169, 145, 182, 74, 134, 39, 153, 44, 84, 132, 131, 98, 0, 0, 100, 88, 209, 233, 197, 93, 159, + 132, 222, 238, 221, 184, 99, 74, 73, 51, 167, 235, 64, 143, 122, 196, 71, 125, 233, 119, 230, 73, 32, 23, + 0, 0, 195, 227, 227, 246, 182, 224, 138, 61, 240, 180, 34, 73, 60, 213, 191, 82, 132, 89, 174, 231, 2, + 126, 53, 121, 143, 18, 17, 111, 222, 86, 0, 0, 253, 86, 67, 149, 254, 229, 57, 84, 29, 237, 178, 93, + 208, 13, 135, 54, 68, 172, 161, 106, 180, 186, 142, 170, 164, 169, 190, 46, 64, 32, 0, 0, 71, 82, 32, + 203, 160, 147, 175, 218, 87, 193, 150, 159, 90, 13, 4, 13, 37, 183, 137, 32, 201, 9, 56, 180, 6, 165, + 130, 177, 181, 130, 0, 0, 248, 83, 170, 21, 91, 244, 89, 80, 9, 124, 241, 111, 226, 155, 169, 204, 97, + 252, 95, 34, 197, 254, 199, 92, 121, 241, 112, 187, 168, 60, 0, 0, 137, 162, 60, 126, 220, 30, 118, 203, + 214, 152, 42, 73, 211, 200, 141, 176, 25, 133, 243, 64, 23, 243, 231, 234, 133, 177, 47, 151, 193, 151, 0, + 0, 92, 81, 188, 253, 135, 13, 74, 11, 182, 8, 139, 5, 92, 33, 167, 247, 61, 126, 158, 10, 80, 70, + 167, 200, 198, 250, 207, 254, 225, 82, 0, 0, 12, 81, 128, 140, 144, 82, 66, 167, 45, 217, 86, 126, 235, + 240, 105, 133, 220, 181, 64, 53, 147, 243, 209, 134, 4, 135, 171, 251, 12, 207, 0, 0, 245, 199, 126, 6, + 216, 95, 175, 107, 46, 127, 224, 62, 191, 226, 143, 126, 27, 0, 192, 90, 58, 90, 115, 249, 113, 81, 127, + 134, 242, 123, 0, 0, 243, 67, 102, 197, 86, 240, 133, 93, 198, 18, 50, 39, 50, 180, 151, 146, 142, 181, + 220, 186, 200, 223, 52, 41, 29, 127, 86, 197, 89, 173, 0, 0, 223, 181, 42, 26, 189, 28, 24, 85, 214, + 7, 74, 200, 60, 42, 211, 253, 72, 254, 132, 210, 131, 127, 224, 78, 38, 171, 241, 93, 199, 211, 0, 0, + 64, 231, 66, 247, 202, 89, 132, 223, 18, 81, 137, 188, 221, 173, 49, 208, 64, 156, 32, 185, 198, 93, 160, + 174, 51, 76, 169, 253, 139, 44, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 221, 115, 129, 5, 66, 181, 152, 253, 186, 123, 34, 193, 174, 13, 130, 154, 225, 137, 177, 26, 88, + 156, 168, 77, 146, 140, 156, 105, 16, 166, 0, 0, 165, 102, 27, 34, 149, 29, 108, 157, 81, 38, 253, 145, + 75, 254, 253, 194, 139, 105, 161, 10, 48, 158, 143, 81, 103, 217, 206, 244, 59, 109, 0, 0, 51, 164, 33, + 157, 173, 191, 144, 36, 211, 165, 54, 135, 137, 57, 167, 46, 106, 45, 242, 205, 72, 213, 141, 219, 5, 222, + 73, 173, 98, 231, 0, 0, 69, 185, 161, 5, 81, 7, 146, 173, 251, 204, 168, 11, 63, 162, 31, 71, 104, + 192, 250, 163, 176, 55, 142, 163, 175, 28, 134, 133, 27, 138, 0, 0, 36, 101, 98, 120, 154, 29, 46, 189, + 225, 204, 80, 47, 236, 90, 150, 44, 51, 205, 220, 115, 86, 92, 33, 109, 148, 60, 67, 217, 143, 79, 0, + 0, 238, 107, 22, 68, 100, 3, 10, 139, 21, 9, 177, 50, 198, 17, 3, 250, 213, 114, 239, 243, 23, 28, + 164, 96, 181, 27, 78, 151, 3, 194, 0, 0, 204, 185, 113, 118, 170, 91, 23, 20, 143, 243, 217, 146, 81, + 106, 226, 29, 46, 240, 114, 247, 139, 254, 47, 225, 23, 191, 180, 120, 172, 234, 0, 0, 130, 221, 194, 45, + 126, 190, 146, 21, 99, 130, 45, 186, 190, 209, 221, 228, 175, 170, 59, 237, 239, 138, 18, 15, 142, 43, 50, + 78, 169, 179, 0, 0, 135, 251, 46, 186, 10, 201, 148, 55, 184, 246, 82, 246, 119, 240, 64, 218, 194, 219, + 171, 231, 244, 33, 83, 250, 47, 252, 77, 215, 122, 70, 0, 0, 68, 67, 33, 48, 251, 159, 101, 8, 222, + 143, 104, 86, 154, 197, 105, 48, 90, 139, 9, 91, 91, 35, 123, 29, 64, 215, 219, 188, 127, 67, 0, 0, + 10, 143, 177, 160, 65, 106, 224, 101, 224, 83, 241, 218, 214, 6, 83, 125, 24, 56, 56, 5, 244, 114, 11, + 196, 133, 169, 75, 2, 101, 216, 0, 0, 181, 208, 24, 218, 132, 79, 33, 9, 113, 213, 222, 253, 92, 49, + 247, 133, 21, 134, 124, 173, 49, 83, 15, 212, 238, 34, 142, 241, 154, 55, 0, 0, 201, 27, 141, 25, 130, + 180, 88, 157, 93, 6, 70, 47, 66, 86, 134, 12, 29, 103, 31, 80, 127, 200, 224, 250, 50, 228, 94, 180, + 52, 107, 0, 0, 195, 80, 232, 55, 132, 97, 246, 137, 142, 216, 198, 14, 0, 164, 96, 70, 151, 205, 108, + 82, 102, 249, 12, 206, 167, 117, 204, 214, 169, 13, 0, 0, 15, 198, 218, 226, 207, 123, 12, 237, 29, 5, + 241, 168, 22, 230, 132, 86, 154, 160, 184, 137, 195, 61, 231, 137, 189, 125, 170, 122, 86, 217, 0, 0, 72, + 95, 13, 213, 117, 63, 32, 154, 89, 92, 196, 50, 235, 37, 221, 188, 246, 115, 80, 124, 42, 25, 84, 191, + 10, 113, 80, 91, 82, 150, 0, 0, 148, 33, 47, 34, 26, 10, 179, 249, 185, 218, 187, 91, 5, 17, 10, + 182, 0, 41, 135, 237, 103, 138, 0, 127, 235, 221, 52, 47, 167, 165, 0, 0, 119, 189, 13, 104, 234, 38, + 149, 90, 101, 22, 153, 82, 226, 81, 161, 2, 125, 124, 144, 79, 101, 228, 132, 142, 253, 132, 84, 150, 108, + 109, 0, 0, 74, 237, 139, 132, 13, 38, 45, 24, 147, 142, 5, 235, 244, 194, 215, 109, 11, 220, 222, 103, + 167, 67, 184, 104, 65, 38, 200, 181, 198, 176, 0, 0, 114, 83, 198, 178, 56, 195, 6, 119, 201, 201, 143, + 127, 141, 245, 151, 87, 186, 87, 144, 7, 41, 152, 48, 168, 22, 100, 216, 132, 12, 183, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 95, 26, 42, 8, 253, 208, 240, 172, 34, + 129, 80, 146, 168, 37, 216, 114, 249, 57, 27, 81, 12, 226, 33, 254, 143, 24, 181, 86, 149, 1, 0, 0, + 175, 4, 114, 53, 153, 199, 242, 22, 131, 26, 241, 61, 127, 100, 38, 197, 129, 176, 142, 248, 34, 92, 82, + 162, 216, 62, 146, 246, 87, 110, 0, 0, 89, 205, 83, 156, 57, 50, 230, 28, 179, 144, 98, 117, 105, 127, + 121, 112, 10, 188, 19, 94, 139, 61, 201, 70, 39, 252, 12, 226, 215, 4, 0, 0, 35, 213, 107, 20, 67, + 12, 157, 224, 172, 98, 66, 56, 157, 199, 229, 86, 64, 112, 161, 129, 247, 165, 42, 118, 54, 38, 186, 222, + 185, 196, 0, 0, 32, 110, 136, 105, 206, 117, 91, 174, 11, 13, 88, 229, 58, 132, 135, 68, 186, 184, 178, + 12, 240, 247, 164, 70, 24, 186, 47, 42, 172, 148, 0, 0, 80, 72, 44, 201, 188, 199, 118, 112, 119, 139, + 152, 237, 239, 99, 220, 153, 76, 104, 225, 94, 248, 1, 129, 132, 61, 4, 181, 251, 146, 132, 0, 0, 105, + 68, 94, 101, 95, 48, 238, 11, 119, 199, 166, 69, 161, 73, 32, 21, 189, 230, 0, 216, 25, 73, 76, 147, + 21, 88, 156, 31, 164, 215, 0, 0, 186, 93, 240, 111, 227, 22, 76, 218, 233, 152, 175, 28, 126, 117, 105, + 136, 171, 65, 225, 0, 129, 150, 187, 81, 130, 41, 86, 186, 193, 132, 0, 0, 41, 248, 147, 136, 150, 65, + 131, 38, 92, 66, 209, 60, 123, 182, 168, 141, 212, 87, 101, 208, 254, 9, 165, 51, 24, 5, 214, 217, 207, + 236, 0, 0, 95, 122, 220, 244, 9, 146, 21, 177, 107, 234, 238, 107, 31, 153, 197, 121, 214, 8, 76, 252, + 51, 34, 100, 201, 235, 238, 226, 24, 113, 55, 0, 0, 33, 7, 27, 97, 29, 27, 103, 33, 108, 211, 239, + 190, 188, 15, 197, 172, 154, 123, 177, 241, 216, 10, 32, 150, 121, 130, 82, 167, 35, 212, 0, 0, 93, 205, + 32, 131, 17, 152, 180, 142, 35, 9, 9, 165, 115, 94, 96, 212, 100, 234, 2, 179, 143, 161, 94, 224, 135, + 51, 120, 53, 226, 89, 0, 0, 248, 47, 33, 193, 149, 196, 158, 193, 81, 237, 164, 222, 227, 168, 52, 1, + 32, 138, 184, 27, 78, 58, 229, 225, 72, 187, 245, 65, 99, 245, 0, 0, 211, 224, 62, 10, 77, 235, 67, + 226, 248, 144, 77, 215, 207, 63, 122, 119, 33, 225, 158, 242, 150, 81, 182, 8, 160, 244, 86, 39, 119, 151, + 0, 0, 127, 248, 33, 172, 13, 187, 33, 245, 23, 45, 247, 32, 109, 136, 65, 163, 2, 213, 196, 147, 235, + 66, 73, 89, 64, 74, 195, 9, 233, 201, 0, 0, 207, 214, 46, 103, 138, 33, 170, 92, 107, 75, 239, 129, + 158, 91, 210, 100, 138, 131, 251, 212, 212, 189, 69, 136, 169, 48, 64, 61, 44, 164, 0, 0, 171, 254, 145, + 187, 107, 84, 63, 104, 20, 176, 187, 202, 70, 224, 43, 144, 46, 181, 230, 173, 193, 170, 5, 221, 74, 24, + 216, 247, 118, 223, 0, 0, 218, 173, 4, 103, 38, 164, 249, 13, 179, 130, 145, 17, 51, 221, 185, 36, 85, + 189, 250, 27, 218, 134, 143, 253, 240, 24, 239, 235, 199, 52, 0, 0, 203, 51, 101, 88, 122, 163, 4, 239, + 17, 56, 83, 159, 67, 3, 206, 67, 204, 63, 250, 172, 169, 3, 232, 194, 236, 252, 221, 198, 136, 130, 0, + 0, 158, 79, 123, 233, 246, 170, 140, 212, 170, 17, 181, 93, 234, 154, 28, 72, 55, 78, 6, 7, 199, 69, + 163, 47, 107, 228, 36, 56, 66, 184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 55, 144, 188, 52, 183, 120, 138, 238, 234, 80, 69, 125, 41, 203, 139, 194, 86, 224, 48, 144, + 21, 52, 139, 29, 49, 87, 41, 145, 195, 75, 0, 0, 240, 30, 142, 92, 174, 66, 28, 253, 72, 185, 207, + 222, 153, 253, 69, 0, 19, 102, 150, 88, 188, 61, 99, 16, 172, 57, 134, 134, 71, 81, 0, 0, 80, 151, + 98, 14, 73, 244, 5, 21, 218, 11, 97, 53, 188, 42, 137, 5, 88, 121, 183, 182, 189, 243, 217, 43, 204, + 182, 224, 175, 26, 155, 0, 0, 75, 141, 98, 239, 120, 172, 211, 157, 69, 212, 62, 197, 110, 83, 64, 126, + 232, 115, 149, 19, 171, 248, 145, 114, 106, 174, 232, 184, 163, 51, 0, 0, 107, 132, 193, 10, 6, 150, 252, + 216, 168, 237, 243, 178, 101, 112, 47, 122, 170, 197, 144, 13, 61, 219, 249, 214, 140, 229, 97, 135, 29, 184, + 0, 0, 172, 99, 250, 13, 26, 13, 135, 55, 8, 122, 102, 2, 70, 158, 210, 95, 232, 17, 212, 105, 131, + 114, 86, 84, 91, 102, 87, 93, 201, 123, 0, 0, 114, 194, 3, 197, 254, 134, 35, 53, 132, 248, 210, 2, + 37, 58, 248, 189, 6, 20, 182, 16, 75, 136, 159, 159, 199, 211, 219, 146, 97, 50, 0, 0, 165, 234, 211, + 66, 85, 96, 87, 177, 112, 195, 162, 217, 73, 114, 145, 98, 217, 92, 121, 99, 185, 5, 238, 25, 17, 86, + 28, 224, 252, 26, 0, 0, 94, 140, 176, 230, 217, 22, 5, 83, 21, 136, 96, 45, 94, 238, 159, 34, 234, + 100, 86, 80, 26, 233, 82, 60, 242, 6, 122, 61, 230, 120, 0, 0, 178, 34, 78, 39, 208, 61, 138, 8, + 18, 85, 90, 216, 240, 154, 58, 171, 92, 171, 119, 68, 156, 136, 130, 25, 145, 210, 219, 28, 88, 50, 0, + 0, 150, 3, 164, 233, 250, 72, 144, 122, 46, 228, 197, 131, 20, 217, 129, 229, 109, 41, 48, 205, 228, 235, + 36, 218, 96, 250, 12, 119, 160, 222, 0, 0, 197, 224, 119, 191, 195, 160, 190, 71, 102, 94, 105, 231, 92, + 14, 14, 71, 145, 190, 20, 191, 222, 82, 33, 196, 179, 33, 224, 138, 229, 50, 0, 0, 8, 226, 125, 220, + 109, 52, 12, 62, 143, 154, 73, 102, 21, 212, 225, 244, 200, 236, 141, 198, 207, 87, 54, 15, 34, 228, 66, + 59, 234, 23, 0, 0, 195, 179, 161, 136, 55, 235, 148, 105, 55, 57, 191, 175, 89, 112, 235, 138, 70, 122, + 168, 85, 3, 231, 105, 22, 189, 41, 11, 136, 217, 79, 0, 0, 45, 177, 192, 11, 215, 62, 147, 203, 228, + 21, 90, 251, 26, 168, 35, 153, 222, 202, 158, 241, 232, 41, 142, 220, 178, 201, 161, 143, 97, 179, 0, 0, + 244, 68, 241, 11, 235, 205, 39, 122, 94, 115, 32, 187, 191, 138, 70, 176, 171, 95, 158, 203, 202, 239, 52, + 162, 224, 155, 22, 24, 242, 183, 0, 0, 127, 230, 36, 219, 218, 132, 89, 111, 77, 94, 166, 254, 154, 200, + 19, 209, 30, 11, 202, 71, 8, 132, 157, 138, 235, 172, 106, 206, 161, 175, 0, 0, 169, 71, 205, 0, 82, + 97, 39, 250, 173, 10, 246, 162, 219, 18, 99, 34, 237, 198, 90, 121, 229, 205, 193, 174, 245, 40, 130, 52, + 228, 182, 0, 0, 250, 250, 166, 231, 8, 232, 221, 108, 23, 250, 88, 212, 67, 71, 19, 76, 221, 177, 214, + 184, 31, 144, 223, 96, 132, 5, 222, 250, 204, 227, 0, 0, 173, 42, 52, 248, 224, 134, 147, 239, 229, 22, + 228, 9, 175, 155, 120, 19, 42, 204, 24, 198, 200, 44, 118, 188, 159, 27, 38, 99, 98, 27, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 164, 139, 94, 201, 250, 100, 95, 251, + 230, 217, 17, 10, 34, 134, 11, 200, 165, 151, 248, 138, 112, 241, 34, 2, 131, 54, 183, 167, 26, 184, 0, + 0, 181, 36, 194, 243, 212, 42, 21, 235, 42, 25, 139, 130, 104, 149, 190, 60, 79, 144, 242, 177, 159, 61, + 217, 155, 108, 157, 170, 146, 219, 144, 0, 0, 158, 98, 226, 66, 61, 75, 39, 4, 251, 2, 31, 158, 238, + 64, 51, 207, 5, 15, 191, 56, 252, 94, 238, 38, 5, 198, 248, 234, 33, 32, 0, 0, 126, 244, 43, 20, + 75, 254, 150, 209, 203, 92, 32, 10, 142, 230, 221, 137, 235, 215, 92, 180, 156, 91, 160, 31, 137, 72, 236, + 213, 208, 166, 0, 0, 4, 56, 181, 251, 3, 68, 87, 253, 8, 57, 203, 72, 130, 6, 201, 27, 133, 60, + 222, 54, 108, 52, 253, 75, 5, 101, 227, 60, 154, 68, 0, 0, 229, 12, 118, 91, 155, 249, 65, 109, 43, + 89, 201, 215, 228, 104, 146, 211, 58, 95, 166, 188, 115, 239, 39, 11, 192, 114, 41, 11, 131, 172, 0, 0, + 252, 133, 253, 9, 129, 83, 221, 154, 60, 180, 223, 54, 73, 39, 71, 88, 155, 41, 106, 139, 181, 60, 158, + 46, 40, 248, 248, 108, 163, 148, 0, 0, 206, 229, 215, 188, 64, 102, 153, 238, 44, 109, 89, 157, 236, 121, + 27, 67, 81, 21, 122, 92, 201, 5, 75, 65, 193, 71, 115, 159, 185, 196, 0, 0, 81, 156, 204, 25, 45, + 98, 44, 198, 145, 236, 148, 114, 1, 183, 192, 41, 177, 168, 61, 141, 230, 207, 214, 151, 86, 113, 99, 74, + 145, 41, 0, 0, 65, 249, 23, 39, 102, 51, 223, 9, 159, 8, 23, 158, 63, 110, 68, 26, 52, 239, 94, + 111, 119, 169, 216, 9, 106, 163, 30, 195, 91, 12, 0, 0, 89, 82, 26, 82, 222, 24, 195, 161, 24, 119, + 129, 6, 239, 29, 157, 131, 95, 21, 201, 251, 32, 35, 127, 33, 185, 184, 202, 218, 120, 205, 0, 0, 230, + 163, 161, 22, 172, 58, 180, 172, 102, 59, 222, 19, 32, 229, 199, 186, 50, 251, 63, 60, 57, 47, 52, 27, + 91, 82, 68, 181, 35, 85, 0, 0, 179, 77, 253, 146, 194, 114, 90, 68, 226, 130, 19, 74, 190, 42, 56, + 16, 174, 21, 128, 14, 125, 116, 129, 58, 161, 185, 217, 94, 43, 252, 0, 0, 82, 120, 206, 26, 133, 214, + 63, 24, 114, 182, 243, 132, 81, 80, 218, 109, 62, 60, 163, 108, 206, 178, 233, 165, 146, 87, 180, 61, 112, + 66, 0, 0, 54, 142, 251, 232, 227, 146, 232, 87, 96, 143, 139, 142, 45, 138, 151, 216, 11, 88, 154, 218, + 125, 223, 31, 132, 85, 68, 50, 75, 59, 29, 0, 0, 64, 79, 159, 222, 35, 0, 177, 113, 107, 1, 10, + 66, 46, 200, 161, 54, 234, 212, 225, 225, 126, 89, 145, 24, 115, 179, 65, 42, 96, 56, 0, 0, 229, 92, + 163, 252, 135, 80, 27, 100, 103, 238, 236, 14, 55, 64, 127, 143, 204, 200, 50, 37, 56, 55, 121, 3, 131, + 222, 168, 92, 84, 159, 0, 0, 147, 251, 46, 81, 142, 20, 52, 209, 103, 71, 107, 246, 208, 100, 7, 58, + 224, 140, 29, 193, 142, 122, 196, 128, 216, 59, 23, 52, 48, 81, 0, 0, 44, 109, 246, 228, 144, 66, 122, + 227, 187, 164, 83, 47, 94, 184, 217, 74, 230, 58, 26, 54, 11, 29, 85, 58, 203, 75, 14, 205, 130, 88, + 0, 0, 34, 173, 114, 238, 21, 205, 95, 86, 136, 78, 141, 252, 126, 103, 173, 68, 251, 196, 81, 158, 196, + 12, 243, 118, 249, 212, 21, 56, 78, 177, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 91, 107, 46, 93, 246, 133, 173, 205, 14, 168, 56, 52, 181, 158, 188, 148, 182, 105, 244, + 4, 43, 186, 149, 93, 106, 207, 144, 243, 165, 248, 0, 0, 86, 55, 130, 98, 230, 91, 16, 137, 254, 162, + 199, 3, 2, 154, 152, 53, 182, 183, 99, 165, 196, 84, 81, 69, 203, 185, 133, 11, 192, 249, 0, 0, 238, + 63, 162, 6, 45, 199, 53, 3, 186, 167, 161, 219, 253, 187, 53, 90, 169, 22, 182, 23, 145, 12, 67, 242, + 153, 236, 127, 50, 242, 90, 0, 0, 39, 10, 238, 161, 66, 66, 79, 169, 53, 101, 222, 109, 149, 40, 86, + 67, 67, 199, 143, 253, 215, 71, 56, 208, 145, 128, 174, 94, 204, 113, 0, 0, 12, 91, 130, 102, 36, 70, + 102, 7, 226, 20, 205, 242, 166, 119, 30, 135, 153, 111, 147, 254, 203, 200, 21, 192, 157, 82, 130, 231, 165, + 214, 0, 0, 129, 121, 253, 213, 81, 55, 171, 18, 6, 138, 240, 16, 139, 170, 85, 13, 39, 254, 166, 229, + 66, 23, 12, 52, 156, 221, 13, 242, 99, 183, 0, 0, 6, 245, 87, 69, 90, 152, 3, 67, 200, 250, 116, + 143, 232, 46, 65, 192, 89, 140, 194, 238, 71, 55, 248, 250, 156, 16, 15, 37, 228, 84, 0, 0, 27, 147, + 102, 1, 68, 0, 44, 56, 55, 115, 198, 183, 71, 150, 96, 198, 50, 138, 190, 3, 167, 118, 158, 84, 78, + 161, 78, 195, 87, 160, 0, 0, 212, 59, 102, 72, 85, 185, 119, 115, 227, 87, 186, 111, 157, 16, 107, 235, + 144, 11, 129, 201, 191, 115, 25, 78, 164, 15, 115, 93, 27, 148, 0, 0, 233, 157, 59, 215, 227, 166, 151, + 166, 202, 151, 129, 10, 220, 201, 75, 39, 184, 249, 218, 133, 14, 155, 7, 237, 47, 248, 233, 8, 45, 42, + 0, 0, 9, 38, 173, 75, 76, 219, 165, 116, 78, 127, 236, 108, 89, 146, 61, 202, 97, 147, 244, 104, 41, + 61, 16, 217, 232, 209, 62, 186, 67, 183, 0, 0, 175, 118, 36, 189, 30, 209, 163, 187, 87, 52, 81, 54, + 222, 23, 40, 62, 132, 107, 116, 213, 29, 235, 112, 83, 56, 110, 111, 102, 84, 146, 0, 0, 3, 158, 249, + 6, 108, 144, 97, 220, 123, 37, 94, 166, 71, 121, 181, 94, 127, 121, 229, 17, 81, 8, 127, 10, 151, 19, + 161, 195, 46, 189, 0, 0, 140, 244, 216, 164, 167, 43, 34, 93, 121, 126, 159, 119, 44, 212, 108, 184, 217, + 41, 169, 129, 21, 107, 82, 163, 195, 135, 30, 31, 225, 191, 0, 0, 45, 204, 46, 201, 83, 111, 250, 179, + 246, 251, 48, 70, 177, 38, 215, 95, 75, 55, 222, 215, 160, 147, 31, 232, 3, 50, 200, 253, 205, 81, 0, + 0, 65, 130, 75, 217, 95, 113, 159, 162, 133, 193, 94, 118, 194, 184, 64, 176, 125, 225, 190, 58, 26, 62, + 184, 121, 218, 22, 26, 134, 121, 153, 0, 0, 75, 125, 185, 99, 47, 151, 159, 198, 132, 131, 208, 101, 14, + 7, 81, 25, 79, 101, 78, 76, 90, 224, 243, 22, 246, 190, 141, 219, 205, 150, 0, 0, 48, 183, 60, 113, + 107, 228, 161, 214, 223, 38, 52, 206, 55, 55, 172, 23, 53, 109, 42, 193, 7, 96, 207, 3, 10, 96, 47, + 139, 186, 25, 0, 0, 243, 25, 149, 170, 223, 157, 238, 86, 191, 77, 137, 136, 166, 181, 149, 121, 18, 214, + 201, 232, 9, 246, 151, 203, 162, 222, 240, 56, 28, 44, 0, 0, 239, 206, 188, 103, 235, 190, 38, 61, 124, + 40, 82, 209, 71, 46, 128, 15, 191, 220, 16, 242, 219, 23, 47, 164, 150, 24, 49, 244, 2, 87, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_FRACTAL_Z, std::vector({3, 3, 30, 20}), + std::vector({9, 2, 16, 32}), DT_UINT8}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferHwcnFz, fp32_2c_2n_pad) { + float data[5 * 5 * 31 * 17] = { + 0.593598886073616, 0.4810232050731812, 0.3478036101156211, 0.762881531801135, 0.028577636455854316, + 0.7156008663579433, 0.18361490618960774, 0.17418305506844411, 0.868328540443803, 0.2454222515962139, + 0.47452523495658727, 0.4970056033827158, 0.44033689982411994, 0.20882891733314446, 0.5732021035913847, + 0.08708056610611004, 0.7955889964539764, 0.7640503360588631, 0.8401136831438388, 0.862314576961328, + 0.4207283912096622, 0.5991317044421851, 0.17899343985762617, 0.32954442583557153, 0.31948736647554354, + 0.8972870220627548, 0.7073536726266456, 0.041438347867159275, 0.22641122187780194, 0.237252883210339, + 0.3927096579340904, 0.7409622576131739, 0.3051880686555266, 0.24195638565265287, 0.8995848902518164, + 0.044779165115809705, 0.7265453786063859, 0.2647576379692258, 0.21048966111106027, 0.47224562757448907, + 0.42175519674809736, 0.3958647885705613, 0.51509383654657, 0.25006775962896666, 0.5904022355055402, + 0.8591845333660317, 0.7874546149461759, 0.3051683274263698, 0.2290639679002101, 0.05499208294913038, + 0.37232433612404425, 0.6548775890570243, 0.3381492966273564, 0.3990489995709843, 0.9004613845739784, + 0.6694397427653418, 0.4938687334258216, 0.10666682823232998, 0.34196869016830245, 0.4144553904652547, + 0.6739125824939687, 0.6793523782315881, 0.24262210430854425, 0.7407960038253821, 0.6984980658550315, + 0.8325212880903263, 0.027214777055414197, 0.1820340231522195, 0.8412044288191766, 0.5206080076671794, + 0.7933147161011688, 0.7197322198409547, 0.30005986074297075, 0.932070188918579, 0.08987037681628562, + 0.09715095501322835, 0.014577791092747039, 0.6976435631334147, 0.5261717916797551, 0.5281887933678414, + 0.6960407038308487, 0.46410183847788966, 0.9207233447072046, 0.15072384193050814, 0.21530880742845604, + 0.5892340690827029, 0.02004867341974026, 0.09314295500017489, 0.30476350353769943, 0.3681036108249589, + 0.2952207303230576, 0.31866167217169594, 0.4959605824267873, 0.15801448394148743, 0.650924399569012, + 0.3446824272811363, 0.7296872891154248, 0.3605032367839367, 0.9231719122159048, 0.9474146710205339, + 0.5526495368531591, 0.5055310644029261, 0.2332279106020284, 0.025546947958476562, 0.31576728271006205, + 0.39291911010389213, 0.45877831380370426, 0.3996024914761337, 0.12446896049963141, 0.7580487511669882, + 0.22264358815364704, 0.30318909847958275, 0.11980142858465348, 0.8242576076118073, 0.0719144300021557, + 0.7209623238406305, 0.7012091007316089, 0.9837826851193835, 0.9965007859060797, 0.5441348813279586, + 0.7030645898390516, 0.8184720620327086, 0.2233338288842549, 0.9639414367524526, 0.3756977268668765, + 0.884932034280004, 0.38395229005034515, 0.5450426127551828, 0.7989389203837962, 0.7856508121051086, + 0.4226823294268798, 0.7808140169558024, 0.7000457426533268, 0.17825579738154085, 0.5861264339223254, + 0.568503464864409, 0.3651441508264721, 0.9497262000622213, 0.9558626677938953, 0.9202809420588011, + 0.47126325374736266, 0.3073641724528108, 0.2993922293811473, 0.0893967683976089, 0.35673438486349596, + 0.1319147541989184, 0.14202190413081717, 0.7480685173760554, 0.5866700378904192, 0.4869916440564551, + 0.723003973041874, 0.05516626705471395, 0.2878017615246292, 0.261073786630996, 0.3161574993701024, + 0.14069313873197198, 0.6774249117286338, 0.23368110179707224, 0.13643156766200293, 0.8335936956874417, + 0.9470119344960851, 0.9025375539568234, 0.6957737981454633, 0.2633388550262611, 0.12121181432866401, + 0.5259391748498912, 0.7367290318588684, 0.151081423900993, 0.19939505727525375, 0.14728746460860487, + 0.4595377428024995, 0.5120266707646158, 0.5722042120913776, 0.501716583816315, 0.31836804737899427, + 0.8085562285576157, 0.621389898932532, 0.2485564193119627, 0.5701856047432775, 0.035395566125893674, + 0.0919903179772098, 0.10604432286713128, 0.8404416599464803, 0.12860038413375907, 0.48945980023973557, + 0.009547298039236707, 0.14211637660664622, 0.3135189874348636, 0.5536882843401529, 0.46754492659771874, + 0.6308028672446349, 0.5020266281753762, 0.03779439381497185, 0.3153986073807654, 0.564585511491296, + 0.06173266022631119, 0.6249840540018826, 0.12884164259502529, 0.02454334913294509, 0.2245721137214033, + 0.14344738309947613, 0.7420366081457724, 0.9111708085676391, 0.9152636785078667, 0.6177834911425752, + 0.30990155481123993, 0.5327041615588409, 0.0224183069131898, 0.28135114234088454, 0.2756085535713453, + 0.8808870930320241, 0.03214874806890877, 0.9616911288733874, 0.9375165772182906, 0.42492940458309547, + 0.8469409244508359, 0.06094007599112705, 0.09024703327969741, 0.29337726136130815, 0.4934325829052457, + 0.44454216965617, 0.5841020456510232, 0.8616061390983086, 0.3262050907222336, 0.9456243710063653, + 0.6348272760218778, 0.9047101702125221, 0.8584106383590273, 0.2861615213186801, 0.45639216527738224, + 0.24131241133396497, 0.23105323339508366, 0.4958456761013731, 0.003179245712851775, 0.5042477018544159, + 0.6313796113621828, 0.7365049039159944, 0.4169155085534574, 0.012700883776225358, 0.512859678623727, + 0.9613778130020714, 0.25700651754797976, 0.1567504548037416, 0.3746771053844279, 0.012422655117338643, + 0.09439971450316509, 0.6505962977309145, 0.07595680959712214, 0.8334136272910464, 0.9004555370037944, + 0.4961499830217023, 0.7243912969474776, 0.45790464283773213, 0.9142629540580506, 0.3704774021439089, + 0.7146020341690771, 0.7727256424739116, 0.0522424158382756, 0.41352647668663955, 0.994663312091293, + 0.5901004198233865, 0.5898241443380592, 0.6431827666852941, 0.05109898111904021, 0.9635626193632769, + 0.08216278989323067, 0.9416523093400732, 0.8897286498937441, 0.5797351253004503, 0.40062092370169566, + 0.9821369815023923, 0.3374031316474292, 0.06174117900982046, 0.7563509513433027, 0.48371927402718495, + 0.983220450609312, 0.994454807209054, 0.9102582312286879, 0.08328294346257348, 0.007585375033768327, + 0.6575385725326951, 0.2990333922479931, 0.13238092838299198, 0.13890804052780736, 0.2823221332770227, + 0.686623785781755, 0.4057627556961917, 0.42384653406104467, 0.5133038946262652, 0.07600151442071679, + 0.8503478834743483, 0.7970120332604801, 0.8285848300454144, 0.6531831607546182, 0.5903464041026849, + 0.2592457574205125, 0.7258673347700841, 0.2983469669100758, 0.23266236713461463, 0.4390761620158786, + 0.20631143030205945, 0.20508752670639585, 0.9130707080458179, 0.21146772543661918, 0.27012612692098714, + 0.4744857756432088, 0.5799424630370211, 0.1376126113738475, 0.4680168030802708, 0.7815153862863152, + 0.8850850711692716, 0.26558269705971305, 0.5971494223497671, 0.384147794314181, 0.0795452234285362, + 0.7093531714206968, 0.47886198625417786, 0.20361778826617072, 0.39740444258894847, 0.9234363996138104, + 0.5634133269983861, 0.6695106398095443, 0.9678259401549132, 0.48921569619097927, 0.16878593756180382, + 0.3987753044485345, 0.6212764853437488, 0.4613473720661614, 0.9043380304636407, 0.2884677893053279, + 0.5147599026113846, 0.7374140365425506, 0.28763078443712586, 0.2573973326878555, 0.7086671927809276, + 0.07595723211603778, 0.34890599819115753, 0.037769134940686566, 0.3742150351977427, 0.45244961281531826, + 0.13798584146945847, 0.44863707051828616, 0.5283524344565729, 0.27569849682157654, 0.9550553617348797, + 0.9484520007081532, 0.838964090279667, 0.4591114931972913, 0.43631960912100864, 0.8292195420512238, + 0.6340605573999117, 0.22070275324013533, 0.3096210669410986, 0.38977961292050134, 0.8151886861481233, + 0.28137738866555895, 0.4478388053971142, 0.7038072423287693, 0.31318381692816166, 0.610834487901371, + 0.6823965289718269, 0.3159130385611112, 0.2589539532391051, 0.8306603710112038, 0.1054628479784595, + 0.40450476726612095, 0.2460484224594104, 0.09595970560295652, 0.2582160463405221, 0.7184988011100494, + 0.5116668807015653, 0.7607018115481479, 0.8925474177627563, 0.7159753212814014, 0.6827160884365773, + 0.6693740081942847, 0.5280856315219331, 0.10330941923817205, 0.46373756229711927, 0.3872594740253491, + 0.8445326611522498, 0.14549599475849628, 0.08653651334989632, 0.5766853060082714, 0.9283078509113353, + 0.48070313104894336, 0.6025256099554165, 0.84166432004193, 0.14208057166618826, 0.7689346923944991, + 0.908344992627332, 0.8540313293088498, 0.4802425368088651, 0.740852680410049, 0.1822134615284915, + 0.24663453343018316, 0.37378291165720157, 0.15684945965477548, 0.05804942268392754, 0.44335856245670846, + 0.5803649708848722, 0.033872446557603575, 0.3995191212679662, 0.6354374126975576, 0.7819906543590163, + 0.8513425475228935, 0.15039485554444054, 0.7241998216443456, 0.012611507623567442, 0.8886089200288345, + 0.04290730232514961, 0.28502921955235816, 0.4846367468682897, 0.816067630567994, 0.7160944513737847, + 0.4959392980473104, 0.6384421302257155, 0.27666801046108047, 0.1356957259131223, 0.617109091994503, + 0.385463200160131, 0.08923801278457466, 0.7638681122694169, 0.2198867412877108, 0.26171159497471774, + 0.8059627931405284, 0.9923725484387749, 0.845895757541442, 0.5731426263532864, 0.4434866286315431, + 0.2648151053055381, 0.41340369493789153, 0.9695160611097587, 0.1825589595147502, 0.49079397771001254, + 0.5138271773752828, 0.42214998877379684, 0.0928296071101008, 0.9690908414984437, 0.06901815544986045, + 0.9773473910563464, 0.14014769521659476, 0.9360337353773889, 0.7745197828425653, 0.6819692531358216, + 0.7543123436003192, 0.23971926436094648, 0.8560305536139433, 0.8308672785934286, 0.28014244029495816, + 0.5833463290167716, 0.5572928108735016, 0.008945521950959323, 0.7072934188630184, 0.3525753102840702, + 0.278108811624029, 0.6876632923779983, 0.4904760913221584, 0.030697059613750688, 0.4731276079321931, + 0.2849054475638024, 0.19729253727895313, 0.8462546641488713, 0.12309806832265668, 0.8172004660367465, + 0.9929629430818431, 0.8163055667453062, 0.22548546642264444, 0.40419160833352064, 0.08153264004585492, + 0.8671973547518749, 0.20957326823990885, 0.4999413321151954, 0.8973736477027696, 0.4529885491512896, + 0.2907886653413483, 0.35041928857685234, 0.38834605815962153, 0.22888532622451452, 0.6251885672305091, + 0.7623154408654318, 0.4362680673750108, 0.4146565988318993, 0.19444417411246517, 0.6215354530319409, + 0.6647228862844003, 0.8428100354980886, 0.7470089973668257, 0.4802903549870492, 0.5591859973668659, + 0.5559129808305836, 0.7025569269203741, 0.7836002284511876, 0.9934743712719277, 0.9222273247346949, + 0.28503701748533616, 0.9123727575142104, 0.6251088115596354, 0.16823098693563443, 0.8894523892337659, + 0.049391925066840314, 0.679417441127222, 0.2226676231823268, 0.6570716424674401, 0.1366927927546273, + 0.9690624578776225, 0.7073266911670176, 0.8252555123646345, 0.8178829541384771, 0.33840274361446054, + 0.7352228624783738, 0.6200707440077958, 0.9932724706350178, 0.6195961287452106, 0.21854723276407206, + 0.9749239371485767, 0.5862612894565067, 0.2581346155941424, 0.20719097668798836, 0.7382101833227492, + 0.22653439920927576, 0.5434581072851903, 0.13676736147335866, 0.1422391059254624, 0.6856913667658722, + 0.6893748618884612, 0.7646787710838534, 0.254864207208919, 0.8787445333365519, 0.05375714705653023, + 0.7231636247301719, 0.027548393248939873, 0.19551397445035223, 0.6879471457464532, 0.8598585098047076, + 0.8804411896920898, 0.4065916996039697, 0.28229441056452487, 0.5281997457506116, 0.8606365671621784, + 0.4268129065749112, 0.6084342402890576, 0.6078868424079517, 0.15043149367674513, 0.8840559717343647, + 0.9489539368183714, 0.8534001857378085, 0.17361779186876702, 0.18580751724856692, 0.396021555811379, + 0.9411698952673958, 0.9321026513955185, 0.38284263217118, 0.18633965320157897, 0.22985224728312492, + 0.8371512079527256, 0.6811002925409044, 0.660624967578473, 0.9271981127440494, 0.5240219285149362, + 0.10086746908945154, 0.16909888137272389, 0.5383303892178303, 0.644594667838801, 0.913108420970242, + 0.8384901380630441, 0.9031659295324586, 0.8535823096989902, 0.22673864302147995, 0.4287946631723204, + 0.9246647932559534, 0.3722858844607305, 0.3504484219036722, 0.01433608289477506, 0.6402133263516303, + 0.10676436042784176, 0.6810179025649159, 0.4062380921612012, 0.6452903410416716, 0.8971474117935213, + 0.15035462916965436, 0.9196381893926627, 0.6848992718751541, 0.1273558615809821, 0.44960734472191477, + 0.9479758145303829, 0.5851055024468935, 0.6855590277237142, 0.7216238316872927, 0.2786133864843655, + 0.8566802266036488, 0.9599617547623329, 0.15230173943272107, 0.9031433757449111, 0.9153274001730106, + 0.3097707842905164, 0.5710761866417872, 0.5477339248308509, 0.7605995653286933, 0.4538819498622414, + 0.19976872998007278, 0.39552906204900906, 0.6515756499374604, 0.0988698661935884, 0.7743622643862038, + 0.9093966330479945, 0.7891896293617638, 0.11392603808919821, 0.9871063239560357, 0.372098880749428, + 0.7659913170885068, 0.6296617719723965, 0.9029637355277926, 0.8510902073554719, 0.17552800922227452, + 0.893073083777841, 0.11343360867793151, 0.36589357848325355, 0.844271719810783, 0.6333632838090231, + 0.9020463093192808, 0.5216398515484101, 0.7194164479288156, 0.7426342896941175, 0.16830115043628857, + 0.3878043707008575, 0.8780981020855343, 0.3044409796516797, 0.7786489724738195, 0.3276296093741684, + 0.07092157059977744, 0.290802508299598, 0.05713051627690491, 0.8183873298634738, 0.6911733881871093, + 0.560020339438646, 0.012720043129243686, 0.17171637396314954, 0.7841989530099178, 0.993783030927795, + 0.35394453297079664, 0.5861961608942753, 0.03678683986411724, 0.4668032879652023, 0.573478187247429, + 0.9471222788590163, 0.7480681871444872, 0.7396416787629857, 0.6549658560134354, 0.6984077912683978, + 0.7045128593216903, 0.28566015003041256, 0.3691923793036713, 0.11149529395813496, 0.8710249783471142, + 0.5624804007078483, 0.1369135036396909, 0.8107352765897754, 0.5708605510921904, 0.5115453776645127, + 0.5984517263715972, 0.8897067762696691, 0.6252486120890782, 0.011642991275112147, 0.4881684662549931, + 0.6007702163154556, 0.857902842490028, 0.5422221257254408, 0.1897466176882897, 0.11101765420923704, + 0.5088609582876709, 0.5179304301603468, 0.12125585422541552, 0.6531441682016415, 0.7950871930975674, + 0.2570433034275209, 0.5587526903881904, 0.2762685608057043, 0.7214896503568283, 0.8817814404424481, + 0.2650827333618603, 0.5489209619076307, 0.8344830851734352, 0.0576256978132238, 0.6190392398627965, + 0.8007086546196863, 0.4709735503721877, 0.9145095023322514, 0.4356525320772865, 0.6947487876096541, + 0.7173675424075389, 0.007871872407570812, 0.9801897867998693, 0.2583427026511369, 0.5999565198667359, + 0.38027068371765504, 0.5622747890867535, 0.03795957014907747, 0.9526690906876173, 0.9387742137594037, + 0.3479803875633284, 0.39146347056866737, 0.06127421795401411, 0.0047655945739981664, 0.44050874572789867, + 0.4272591391651567, 0.37892607606439865, 0.9921364124025801, 0.3019412477582947, 0.424597397793149, + 0.411372043297783, 0.5323853017321353, 0.05336046567302655, 0.7998205073676302, 0.13081493418889845, + 0.1639972955750726, 0.28063147367385277, 0.48857187345040054, 0.5254607717287598, 0.49338385751076785, + 0.4149348374024552, 0.9296778777274274, 0.04293797049020576, 0.3762253290720645, 0.6790054168465538, + 0.29633582118536594, 0.8260116609585727, 0.3330923344392942, 0.11445372028009526, 0.5948873602102268, + 0.02017230039529183, 0.4135850771430212, 0.6030846455523572, 0.30695864964688924, 0.5935098145397808, + 0.0019244685659860483, 0.018436125967998374, 0.2672045198455989, 0.4030523140074629, 0.9862500339027105, + 0.017296512356210214, 0.9720962563477026, 0.2735670629256831, 0.5922475003965821, 0.0072773802162633405, + 0.06779950265839463, 0.03230960064967514, 0.2358484942113961, 0.6876803256834041, 0.3191987438682232, + 0.8596804632685916, 0.39694677154398783, 0.6374052786612026, 0.23382229548557953, 0.07811123274017873, + 0.4940467199482641, 0.567714413888491, 0.02151003170761434, 0.76319198104096, 0.9136099549764944, + 0.8782612832407313, 0.762454681523548, 0.8903009009929708, 0.25372493200952595, 0.2016671873781376, + 0.2060757695447304, 0.6943602656551667, 0.9630346192828135, 0.3463652212059517, 0.06829090486182787, + 0.19263564504761732, 0.32315326893263696, 0.9658661703626518, 0.21750950860604246, 0.23125919646370396, + 0.25633240022811465, 0.46934775479275015, 0.056819576144068296, 0.4068334280781687, 0.8166846578861927, + 0.19332921689264948, 0.9236993715891589, 0.749856701753336, 0.9234135213494146, 0.08419942590996321, + 0.47078252814943145, 0.518567994057743, 0.49186591814800074, 0.3853125544148821, 0.2860943499760865, + 0.1788104391696963, 0.4966194118714601, 0.24791065560281356, 0.5034813307343111, 0.49459375430687824, + 0.8634485101791174, 0.6474472493038106, 0.33138854836601195, 0.18275472370055468, 0.474056182824796, + 0.7864628505798014, 0.3579401402277915, 0.051129959395827096, 0.4372674204257446, 0.13315639773231036, + 0.1560219158981737, 0.27384907486598786, 0.3765067715933418, 0.9615222933019792, 0.5407503001119387, + 0.340729366104226, 0.4144518719787774, 0.901732368492859, 0.16969627035162727, 0.7404811365576643, + 0.10512015499408911, 0.3165620817480945, 0.36544859553311027, 0.18086654491352694, 0.6640764544139515, + 0.0007410697213198203, 0.7644906223723946, 0.048823581035504215, 0.8886550273938056, 0.043843836217736354, + 0.2354975097327563, 0.07623266353861846, 0.19180722872392997, 0.18818185057466297, 0.3426560703580187, + 0.3787333525187946, 0.5622367782465371, 0.17195832267247402, 0.9265538041938892, 0.5269920009830543, + 0.6377375758753615, 0.9400811171049379, 0.09676149403491074, 0.9771764007823234, 0.3145750434837641, + 0.13015741816987858, 0.6745193733917653, 0.18568100609014015, 0.6673808576960704, 0.2129648381099467, + 0.5928034714446895, 0.030404723947015433, 0.11877626040221745, 0.6079204669333379, 0.7903650693158433, + 0.9342000005155301, 0.3650551326938446, 0.783614499834809, 0.42017831903269565, 0.9524412366179466, + 0.31290973574706693, 0.333809199850141, 0.018933245117345843, 0.6148133428760904, 0.39036266224192295, + 0.9737284041729836, 0.5389992304596721, 0.8942289983089009, 0.5566004294762759, 0.5109201940895305, + 0.025302754483026768, 0.9377919655673319, 0.36132098104923716, 0.9595741806623073, 0.24493589653459602, + 0.9227303933000318, 0.5263219620202745, 0.6907490621323701, 0.38435718074696057, 0.44756642341828445, + 0.7874007689029296, 0.2189762181045023, 0.4131365103670408, 0.7000110795177936, 0.23797898188037647, + 0.08080519202078185, 0.02974750198426057, 0.19818767899624412, 0.9637717109507615, 0.08365775403277442, + 0.6168788855026891, 0.015644449998551524, 0.5264467132662293, 0.0012098621523040087, 0.541084535833613, + 0.9463743594644852, 0.930805454547134, 0.975378380949056, 0.7312832780866552, 0.3550107658196838, + 0.29425404361691787, 0.062121854287130795, 0.5079773285088168, 0.09956280813554375, 0.8440007755486699, + 0.6567797027748701, 0.06705679406513887, 0.6898874527808591, 0.7975501607023373, 0.0032063387045777603, + 0.2911296764898823, 0.7927264988564006, 0.8755307834415003, 0.5315693199453522, 0.32267184760929657, + 0.6605431700617775, 0.9384627533537213, 0.6378800316497563, 0.47381356672041586, 0.790909456101359, + 0.656595738477664, 0.23818473918145966, 0.29221016777839015, 0.39287075117417114, 0.005133070328970812, + 0.5568421534623479, 0.400876142501678, 0.7421432247015287, 0.8990037512449554, 0.2646458559895223, + 0.04800293031999969, 0.07138509896092038, 0.8853431894279555, 0.21639845447255202, 0.7996638799391259, + 0.9763840039816348, 0.7672567448181307, 0.21651043412280802, 0.8337872993833937, 0.6157257035076474, + 0.8937080874572724, 0.9374905850337778, 0.6870152141824932, 0.8660072278268933, 0.7401414123541316, + 0.5845797111419688, 0.48302881332725933, 0.13121791296797802, 0.5944444531568308, 0.6419042996764052, + 0.13765303280451724, 0.47498542817337597, 0.9372405956170919, 0.10968236651551133, 0.7055280441325609, + 0.48485333752760684, 0.2072637613549223, 0.648185474712454, 0.02887522737614967, 0.902584785060954, + 0.36567392305213464, 0.5179793618045656, 0.00903877777354778, 0.43342440313876085, 0.3770263276426552, + 0.3930701950014943, 0.5095849930468996, 0.9548033527505275, 0.17260198610286515, 0.24360648931241047, + 0.9439606846402825, 0.8884260674640121, 0.17747545681899313, 0.18576628852966004, 0.778448416936745, + 0.8275723004502537, 0.2951865658998095, 0.0019250327378541598, 0.31186335072704974, 0.7823740403085887, + 0.5304594782869321, 0.9573357353050627, 0.8381866821048932, 0.36421613226104477, 0.09492979213038455, + 0.5743632736335481, 0.22191012323761294, 0.3512568644747548, 0.9839590462832234, 0.8948480967993708, + 0.3135101746928498, 0.9767482027103868, 0.1791118276074677, 0.47032942155973456, 0.16217350671796626, + 0.1635244015846663, 0.14719912459303908, 0.8250154137794365, 0.731075283461769, 0.7738689778041125, + 0.3380795915577818, 0.7372403339241641, 0.1931032500556582, 0.1490105454723245, 0.9111359964391323, + 0.6978171328423688, 0.5161504735576508, 0.6511645058069245, 0.23666493426387136, 0.5747003924240637, + 0.011264641299907785, 0.5235039486497978, 0.6469999169636548, 0.9609055044681635, 0.031262751310596304, + 0.2556723867425599, 0.2539080362514551, 0.2686395134421844, 0.516512071087104, 0.17978511180694778, + 0.7538185647608706, 0.026597001614222515, 0.5309184308545543, 0.194678675750529, 0.3779983592021807, + 0.7135820313338745, 0.5043118982317626, 0.28211201923152873, 0.8110395166227209, 0.40824733481821995, + 0.929303161102385, 0.2908893487734502, 0.6266347391907159, 0.6290301935855606, 0.9842265081658566, + 0.5551737030768077, 0.4360415544126278, 0.04107327068170108, 0.22919268514603797, 0.3920484946662971, + 0.49331119239386456, 0.7355779050524593, 0.24086980177336992, 0.5437136134301128, 0.7162861520545488, + 0.594658948063741, 0.5510760695437954, 0.28218816695782356, 0.9790276867210764, 0.34079512381342236, + 0.5220196079295485, 0.4379634875906535, 0.23014987007243093, 0.44263087569464177, 0.4173792121384853, + 0.07904414301138918, 0.29169117724653026, 0.4220809931523001, 0.6774781950754846, 0.8835661186316296, + 0.9212334940526165, 0.7801506618079727, 0.3773422894846701, 0.5310942824706316, 0.3873352350324747, + 0.6253932165124783, 0.8932605268036224, 0.8726478482024886, 0.5373527530812233, 0.8600481727284203, + 0.918042741583884, 0.8601934876537072, 0.5516356151586437, 0.9444287998344876, 0.6819801213461811, + 0.8088443449685825, 0.05115835664365542, 0.8469422160730711, 0.006236962734933682, 0.19137289638190713, + 0.7819346356100608, 0.5810289158555193, 0.09685879870223768, 0.34827871816251865, 0.7704113254262928, + 0.9802408460166704, 0.7277997187186478, 0.15371577651638713, 0.29567384376495076, 0.9126289407364749, + 0.005642345369721635, 0.9293424961549993, 0.32645681417681105, 0.053840447225318444, 0.2650426442330527, + 0.0305978613140101, 0.7437100087603623, 0.5412269446643696, 0.41325093470060126, 0.9148613148672179, + 0.24479976063472064, 0.47500296037120016, 0.34527673407744874, 0.4829866830212408, 0.9025397313292765, + 0.3050776139625416, 0.06892286908639811, 0.7554552148111475, 0.7008610017036007, 0.19337399216816265, + 0.02249082050834006, 0.11239467057238772, 0.4030442802225406, 0.05094825459719099, 0.7754809831284823, + 0.17556207325455564, 0.40898244015650265, 0.7500176643692748, 0.9355037079911848, 0.9237427047317442, + 0.8920704442811604, 0.9732082132394165, 0.354678537938626, 0.7560346149083839, 0.35966099039389465, + 0.7358904720246477, 0.5841640354989537, 0.6518443760927354, 0.9907180473073051, 0.9187915740246261, + 0.7108254211518128, 0.41748440769112094, 0.37014857151329494, 0.6689073682061573, 0.450663991512848, + 0.6510416381579317, 0.2066282957427087, 0.6430121365978986, 0.36000477729935176, 0.4499959071601498, + 0.2510165158607539, 0.7868512927357355, 0.3866831747831503, 0.380079673287293, 0.9890535380865763, + 0.5777133467824651, 0.6489838592517869, 0.006574273358853078, 0.25462644370917586, 0.4760673738878337, + 0.9843825431171334, 0.9348394082241142, 0.09433957578977692, 0.14343498074223138, 0.9516920897170033, + 0.9876749595481583, 0.7601153094436749, 0.772531913925415, 0.8362993556629968, 0.782851130931092, + 0.13202015978889026, 0.5965715084536747, 0.20165325304560588, 0.06347584144850515, 0.20836881006630548, + 0.4466706415086723, 0.6675845484142084, 0.9932926659177506, 0.2362744726460665, 0.6794270156932601, + 0.6560317360531576, 0.7708246944234937, 0.965360847206718, 0.5348313356556613, 0.17935594816182743, + 0.08564942129829656, 0.3032921336149901, 0.45101045358482617, 0.014066342104010254, 0.6716243495247226, + 0.6080928162965836, 0.2411321322976424, 0.7350768164075168, 0.0932725386912655, 0.521712516718149, + 0.4837670894157401, 0.292596705011257, 0.417528641590168, 0.7572173324421296, 0.2607618036561612, + 0.4580342027348445, 0.15778303768306967, 0.6312561650854187, 0.9531267944167136, 0.6209213898599184, + 0.4683834965337599, 0.7817248865886222, 0.8892992380996494, 0.9155233698207275, 0.4051975994749256, + 0.018222351978108287, 0.7204773146095411, 0.6034513715339858, 0.11023977756057035, 0.7455190621567462, + 0.05147718869090945, 0.7147111282487313, 0.5696805674941983, 0.9709040013917621, 0.06036967944034932, + 0.014612283150246985, 0.9166767563321628, 0.2625366316770661, 0.40311373770047765, 0.5025532247815195, + 0.21435108488910437, 0.9798243783734578, 0.5739305828722121, 0.28793806316128023, 0.6239167717588799, + 0.37406566441051237, 0.3482774747247622, 0.5190347818005001, 0.076674299170163, 0.5768306843586077, + 0.4976312446082479, 0.9814136675055923, 0.27607345875361455, 0.049565844469375286, 0.10597991544110108, + 0.8995087408588687, 0.73700975037925, 0.08130297073952064, 0.4804941673124482, 0.417216927757345, + 0.5241758527155814, 0.24002476710515663, 0.4166221542178937, 0.6876591523449257, 0.4711505441557061, + 0.8745907195886096, 0.8730974499330287, 0.1817547186604802, 0.8891374629356662, 0.21021471138087588, + 0.17443805597520712, 0.46547835256661063, 0.3235241906767462, 0.5102816111445247, 0.5825211920966039, + 0.053435090850515565, 0.8002659099288251, 0.10400451682164813, 0.5807023201951226, 0.9455658271087966, + 0.5639806740426587, 0.2132238043810304, 0.7610702742279717, 0.6743965324150368, 0.5239713694640612, + 0.11774132570505835, 0.8488730493970424, 0.33783156185567964, 0.20270078809315717, 0.04339066054890195, + 0.11796054635079789, 0.06312378426804188, 0.9535782537637767, 0.8036520213804197, 0.45268897119631035, + 0.8906766847688037, 0.20347606656226802, 0.3492367980703305, 0.8773308474385161, 0.8769839808822592, + 0.8830057082031186, 0.4230471625224689, 0.8709406566142861, 0.8301563006282299, 0.9951573150019876, + 0.3368827880226678, 0.682408636964737, 0.38555476839112013, 0.6751488092558974, 0.7559655759550528, + 0.22916356934175386, 0.3591292701552792, 0.2990956825544848, 0.22783388070158717, 0.7078139195134496, + 0.18160103390385463, 0.019916212943096423, 0.5329638339053512, 0.8725956603209828, 0.29843172006984897, + 0.04672100038419713, 0.37505222809632976, 0.8509422924475109, 0.41480608989208956, 0.5912455240508804, + 0.8169683736721146, 0.9097232990844312, 0.9370399101958636, 0.0852161993078645, 0.6474144252324772, + 0.31255998356680126, 0.9636162940497957, 0.5056928530189118, 0.6556399000644536, 0.6952383709158715, + 0.7843375333940473, 0.6300582519924616, 0.6304303313175146, 0.1687913588935338, 0.2449052714859109, + 0.5471083528025282, 0.9438500977520248, 0.3578599675086902, 0.3920636261620205, 0.019843076837461893, + 0.4999959067584383, 0.7569017845526714, 0.8360554733297578, 0.23895712015875237, 0.4104198968591438, + 0.3251401894214039, 0.8156395210292493, 0.07391999485436107, 0.6987959773744749, 0.4840095934359627, + 0.2933285665130406, 0.35263305933459177, 0.6190635943050966, 0.6569010518132233, 0.771150181212638, + 0.0570997089403803, 0.7428968030632752, 0.5800125356591301, 0.9452222862822885, 0.520386510520803, + 0.1737095650275139, 0.40774398374507614, 0.4140887295190111, 0.7704869919128293, 0.027908130046646273, + 0.0065240400853051606, 0.4629280567139442, 0.8023452171244287, 0.12023138330979999, 0.46801299531478924, + 0.9154093664629628, 0.3461520909903373, 0.01073694360583044, 0.7062838976395889, 0.4579129461442367, + 0.645914194022499, 0.5729821033339437, 0.26054402962433976, 0.06184592502838726, 0.5837524946826165, + 0.14777052255092293, 0.4359744572472193, 0.10031631416625497, 0.9122617925646811, 0.2994217064336464, + 0.42899264776221846, 0.4530096690445389, 0.6673557183096349, 0.7087447442412279, 0.11647285480259817, + 0.17540724723726264, 0.6006100457854232, 0.2642370778882206, 0.15888900298069597, 0.8309895729247351, + 0.5405616642371476, 0.5993648001223835, 0.9038230251296515, 0.8800114344962787, 0.592294185051166, + 0.06616210555202529, 0.30311312940693835, 0.9482981928899138, 0.5370158686726986, 0.14399284185201, + 0.19610213711677715, 0.01554626208595622, 0.10303584410804101, 0.2521717031142888, 0.006685486780778516, + 0.32574598871778115, 0.9837084277620012, 0.41974878574508956, 0.36116384616070596, 0.3991402539155233, + 0.2286646319500545, 0.01602436897355808, 0.5287391927451673, 0.5745184698716473, 0.47645362665459823, + 0.3465069310400354, 0.550496768676035, 0.4986516095770759, 0.5430812412465242, 0.6100351917714626, + 0.35477121085778096, 0.3725466178774436, 0.08507026174420751, 0.193883517978652, 0.4410104002330809, + 0.8741825822059256, 0.881951063803435, 0.6386639884633829, 0.8970074838147561, 0.5384981726900777, + 0.08949106458248024, 0.4349572162206694, 0.46553152686085963, 0.7471528576398411, 0.31709197750263796, + 0.8748034467656577, 0.39198790507487, 0.17031012420207237, 0.4006691050007496, 0.033163014661722334, + 0.6040766027779229, 0.36924911372488667, 0.055153633345313424, 0.02541556228975106, 0.5404307400186054, + 0.43397172435813636, 0.399171641866614, 0.8191740074744085, 0.1263032738364399, 0.8296642336644061, + 0.40102385558626363, 0.5951965601721589, 0.4093432836700692, 0.2814589646144072, 0.08611523611718075, + 0.045443420371758525, 0.5582581347178946, 0.39731676228480306, 0.13496719088888554, 0.8600411890422959, + 0.6683435989287582, 0.2107098930167256, 0.3602894011512735, 0.6058611607220571, 0.11343203569471083, + 0.2127506995141395, 0.8692434992199101, 0.5463145684918219, 0.9181883285496246, 0.6434387605847418, + 0.8331504819118772, 0.9673958560175789, 0.757878329504308, 0.6654488293268923, 0.07577365560644655, + 0.3609207908477674, 0.6189323456561677, 0.8442918486378159, 0.9929340835729172, 0.6230236637717608, + 0.03652927766049019, 0.9814545140556741, 0.5331587133291521, 0.7238906179329939, 0.4447087288051237, + 0.2448366349652873, 0.03727156575564883, 0.9330622580837382, 0.3023694760838186, 0.5765749970593094, + 0.9535968460039294, 0.9734855564766233, 0.7582965585849417, 0.19209697094531197, 0.05726787273700418, + 0.40574433994558745, 0.014695112452643766, 0.8385520205601446, 0.557269086035321, 0.8817092100155618, + 0.045020363985128875, 0.9111472380912586, 0.6319819853679898, 0.6067375130703099, 0.6076216052900394, + 0.667191724733086, 0.6952903994293748, 0.12061729840712532, 0.07905253242440446, 0.00969060226969043, + 0.42740855550738366, 0.6342019010319667, 0.23971233234665323, 0.5930513089318565, 0.04058612627866931, + 0.7571077634496628, 0.0640676066682464, 0.2731657149659644, 0.015540583390663554, 0.20035792611947967, + 0.15784143655681837, 0.20729536710070606, 0.1646603527520325, 0.39891475752344774, 0.8678660083780976, + 0.5402271155578642, 0.6471289691298319, 0.5874445756566115, 0.18041774458082027, 0.08151507299434502, + 0.8013524426937643, 0.15396617414027158, 0.814155374844376, 0.029059099228687102, 0.5615284083328184, + 0.03911471667685218, 0.6627936863882162, 0.8012297575667066, 0.8053762153718919, 0.31779624406102136, + 0.223462212480803, 0.6786754522654187, 0.312658218279, 0.7688862105050804, 0.38868150345157215, + 0.5348036964274306, 0.7069072042307454, 0.17082743622605978, 0.7003557522570247, 0.13093225118285412, + 0.5760921142803725, 0.9671153916215034, 0.37807712808342175, 0.8184223806569545, 0.053081216742696835, + 0.5734467121152836, 0.6737512731711053, 0.561949642025323, 0.2957081174912921, 0.5580150941162714, + 0.29666308013322706, 0.614736218817265, 0.5501148618001405, 0.3313303935318651, 0.9495821689319028, + 0.7949182315946333, 0.6036603406202169, 0.6027473936155499, 0.9206606202169182, 0.9561858317984128, + 0.952254987469469, 0.5158146600263929, 0.7440050954925765, 0.6153748653498825, 0.26582106355477786, + 0.6214091550578642, 0.23869035863926813, 0.7877023614623473, 0.4882819723975579, 0.17390700972882978, + 0.15367640654289394, 0.12608007005283672, 0.29339866794400327, 0.910526857416002, 0.6616242454180209, + 0.9760409262237774, 0.0019720945481300767, 0.05723820961191328, 0.4114889213972587, 0.1972235766321545, + 0.8835529965166204, 0.8679627416593596, 0.05081926299070605, 0.2118592726441969, 0.7446691814681812, + 0.2627564186983168, 0.3170879649041092, 0.5447613727595725, 0.40765364669080983, 0.5089518255097655, + 0.19878145893575572, 0.23543352363697467, 0.7472987403230176, 0.9994164564500349, 0.6978350374902289, + 0.7908819989051763, 0.4526118083643049, 0.3080128832782695, 0.30709579680472265, 0.37112791219698815, + 0.6638110304342972, 0.07389833653048405, 0.24117138006295658, 0.9252480668701776, 0.7564664711327527, + 0.5067852983679874, 0.7587693474678124, 0.13819457853857942, 0.995358041617133, 0.994960240233519, + 0.4609615217259717, 0.49339281222944265, 0.0056861651420593295, 0.23179919327971843, 0.25797286101779515, + 0.7051697500990436, 0.5115465251175723, 0.7296995118709148, 0.9107857171842855, 0.056356757525981904, + 0.8878633888020097, 0.7723484968464406, 0.6896789478430785, 0.8593104121111546, 0.6720522733281707, + 0.025327923457216905, 0.9904165989973458, 0.6815180647255956, 0.6933566892850808, 0.6504135564088833, + 0.8198885010069196, 0.5757192615432264, 0.8054511844077265, 0.07229128010855934, 0.5770012137700922, + 0.6094761230163389, 0.36677192991507657, 0.8640871995934497, 0.0214610612561158, 0.07881112932203993, + 0.3400577428684012, 0.36899065197734016, 0.14940439314739096, 0.3832271740172364, 0.2081767267510808, + 0.7771852023409581, 0.6804909734267114, 0.9232501934815932, 0.4704824312845679, 0.6745722346987356, + 0.0017828183696398936, 0.12856074181677213, 0.5098792059926268, 0.7123241229125502, 0.2500014917667791, + 0.4076397769682518, 0.38097827414359153, 0.006970056278145842, 0.8660263887990407, 0.023764617083646633, + 0.7473241151810834, 0.3269851554029827, 0.9692484464569302, 0.07396539104530098, 0.9547435369999225, + 0.3016474294838747, 0.05244874557976331, 0.6451322323785469, 0.639705439399999, 0.8196337405122749, + 0.7237366583107295, 0.7572502470296556, 0.0343064883652171, 0.8378988336265385, 0.5333806120077678, + 0.11377280596671191, 0.5202134761721883, 0.25898452510968284, 0.17291177902784527, 0.7343517613599921, + 0.7208817435941467, 0.029161443719998736, 0.39907752651110673, 0.6440488665011326, 0.0636661659699238, + 0.9122205125664681, 0.1021941265770745, 0.2966015459295771, 0.29095719594187897, 0.46858704645348437, + 0.32742531610387615, 0.21714503148417896, 0.45219446231527505, 0.29574664157577135, 0.5984857063130656, + 0.7900072574432269, 0.8010006110947065, 0.35531936596642566, 0.006381472986381365, 0.6666710167047851, + 0.7284955977411508, 0.027470394942158527, 0.9069129182886623, 0.3436380384031217, 0.4759770541371189, + 0.0072386094663621225, 0.7959385907953724, 0.7394858873181787, 0.06148880936440482, 0.9288561452719108, + 0.9328152285289238, 0.5323594054882397, 0.7708410427932806, 0.9907541002604393, 0.5446432851917544, + 0.14618800867292647, 0.25800870202741777, 0.1612511518198667, 0.9763170688293102, 0.17691215046108066, + 0.5366921468787149, 0.09248317656748295, 0.913382367589849, 0.7629217260543542, 0.077607349740346, + 0.7198842980833533, 0.09117996700414377, 0.05720381657425644, 0.13730247895312242, 0.40695877570745, + 0.3181877995182233, 0.2112028523132563, 0.8928935035205012, 0.27004292282248754, 0.2441608400450831, + 0.6047927667113809, 0.46494213629693026, 0.0494559245826679, 0.7279579107848803, 0.840314656531769, + 0.021731550538059086, 0.9371481647386053, 0.025019417818456402, 0.8180472092656622, 0.8542741668912559, + 0.2810449275030956, 0.11623853276783058, 0.5270115805162756, 0.666923043048465, 0.9069146462791389, + 0.9577650909321496, 0.36105021010769534, 0.15314784624072308, 0.7386792462938379, 0.48920824111905903, + 0.9717753479147442, 0.1594182459232909, 0.5954460556106872, 0.2555411301823566, 0.8070146024325086, + 0.5318088610052745, 0.7742583756744349, 0.8223539945810213, 0.4165719145570902, 0.9841151577651998, + 0.9643265085630942, 0.722971444164863, 0.8091056232921011, 0.24894502552978393, 0.36808962582443583, + 0.8545082119774614, 0.6614666122549608, 0.16169284447345456, 0.12249962297955819, 0.6213974734429244, + 0.63251335216537, 0.7903884011209051, 0.8730698688662378, 0.004999791113198038, 0.4257745109240265, + 0.29333836892667287, 0.09622906506883855, 0.7283655069758669, 0.520370328160753, 0.757081699795822, + 0.7300780281210185, 0.6661171718445671, 0.8209644286385086, 0.8103468174867872, 0.013241173287716124, + 0.24063391972591164, 0.6617123430936773, 0.042692028989202546, 0.47573374434427096, 0.8751462249032647, + 0.12754350413739257, 0.3576869351255788, 0.44055994570995805, 0.07638206481897325, 0.15582296153556985, + 0.6792760584693588, 0.17195183470195763, 0.8333581842842995, 0.4233335693324414, 0.4944256177074621, + 0.6232253503437171, 0.15288277073162326, 0.7262505388412269, 0.32487643534193433, 0.38086950879660486, + 0.47753412809113893, 0.4283453212466125, 0.047045471026664676, 0.2627754896441308, 0.17394323019087632, + 0.6897350375333718, 0.5355420949574701, 0.35620232843611366, 0.4479690859518102, 0.6074471794458138, + 0.583367308879746, 0.38223296756570335, 0.027278017638104446, 0.7541502332665592, 0.30159860650826975, + 0.5116411866622956, 0.20994124637924272, 0.29139727349758127, 0.7734820246317141, 0.5269192307860279, + 0.8327863161297506, 0.9235585542150339, 0.3897385475903501, 0.9667969872974194, 0.511323762345732, + 0.5225119887418883, 0.9912065523711989, 0.4336894637903631, 0.6006182974561438, 0.2855248901501959, + 0.4811839975540484, 0.5904138065735336, 0.907519623003023, 0.8243746705124056, 0.9700839918998914, + 0.35957815148880246, 0.1548091646365206, 0.29146773952655247, 0.8678718660301749, 0.1041607435740014, + 0.9888230322682372, 0.6441799127660758, 0.8860324292356873, 0.6502853921155594, 0.8557269680861541, + 0.1202626370688562, 0.15130290169888672, 0.8549833357469854, 0.5936980433213721, 0.3769599313688713, + 0.21227445443949655, 0.9399314723212036, 0.5398288735628374, 0.7933940726611575, 0.751503176418468, + 0.5730339642987752, 0.18349454450476865, 0.5468101590366435, 0.2343122063662042, 0.33539736372103723, + 0.919287478259732, 0.8728653989870712, 0.17320401192837975, 0.3308198119618557, 0.8644909012254995, + 0.5080645529580723, 0.5312085054078496, 0.6941462596409878, 0.26566244195149735, 0.7954101254833267, + 0.2768221019406246, 0.8551416693789439, 0.9581611480678348, 0.10533031658160918, 0.03885033352272249, + 0.9234544523605029, 0.4969487731623575, 0.16182560634661736, 0.8235168961141208, 0.7178280791665211, + 0.5993064227459448, 0.5495448275790107, 0.0967320382345006, 0.16999199281502642, 0.6201627652029469, + 0.3429936953106305, 0.12262456989224724, 0.6232870615447319, 0.4862086377622101, 0.5895692096708252, + 0.7554597615621282, 0.014682307405730732, 0.2809428056808747, 0.8074509397361618, 0.5245350126500983, + 0.15179883706048358, 0.15971519514419752, 0.021043748021450814, 0.578403469379529, 0.6221482410439043, + 0.5149952176895068, 0.41149386439440483, 0.5932536163167526, 0.6095922010805379, 0.9190261695650536, + 0.504213887047256, 0.8677969158124854, 0.5353138598274435, 0.20465394690886418, 0.011121822389134506, + 0.9012316325785938, 0.8355426982863444, 0.47833468342363583, 0.6590677174117043, 0.6609686676614416, + 0.8763824510229986, 0.6177277586281875, 0.10275950645159926, 0.6373979923064175, 0.3688446020245262, + 0.19294719381954262, 0.916040440340509, 0.7320840459449758, 0.3222504692992577, 0.7452389831788838, + 0.3788131596453064, 0.7434425656523862, 0.4196558312617896, 0.18117937324393163, 0.35145457494662735, + 0.7587860532176893, 0.29410116884979864, 0.6454352542040965, 0.2938567940382111, 0.7562357965500408, + 0.9790368418515897, 0.24907151150529083, 0.6444561537012113, 0.7293654667591577, 0.7877079405023975, + 0.38841766861072746, 0.5083511811149798, 0.3156124416610675, 0.8613248812796934, 0.44763440335951254, + 0.08955613627666636, 0.7641168819602082, 0.16714435315916454, 0.8542110965236954, 0.8543685819997999, + 0.4375178515774526, 0.31275882215981243, 0.6516835996428301, 0.9666838460585926, 0.11230121343037147, + 0.867220820477516, 0.6977073306840095, 0.5060745852275809, 0.9319798398301528, 0.4064017137229581, + 0.2139931416625478, 0.6848259546038911, 0.8302619788711604, 0.4262266284371995, 0.7907007477786779, + 0.9547401805619603, 0.27453798617609904, 0.6245938060894479, 0.9298259173278483, 0.8073846839595531, + 0.9408452347471434, 0.6532127870549267, 0.49450365052053813, 0.5063410127610112, 0.07941173435225413, + 0.23260360704614902, 0.13320891480642583, 0.9643690182833452, 0.634136884885198, 0.7940043532505432, + 0.909720976541194, 0.1804815480459595, 0.011633874310123526, 0.5679052240796093, 0.6269643730746675, + 0.622259966080545, 0.5799333193015206, 0.8297789466341775, 0.5556790470157917, 0.218862273708482, + 0.009101458254194172, 0.6743285109321938, 0.07275696573139923, 0.3913338649487226, 0.2813398172834911, + 0.07621171165711682, 0.020263648949265578, 0.8494627176571065, 0.5906969377218659, 0.5960730297829765, + 0.5308347775052953, 0.5984227615483964, 0.9035452268867655, 0.3836394483807609, 0.07059496307526203, + 0.46853234419715717, 0.3993922950626534, 0.44738956365389115, 0.48631413869148266, 0.4259768623069238, + 0.4314405963376722, 0.2200091918037541, 0.045129807440502656, 0.4960311791166828, 0.5390265154419922, + 0.734746260686458, 0.016845849803279966, 0.5961912380671638, 0.5407077178091976, 0.43681114369227214, + 0.5290638648749636, 0.48193292475815286, 0.031148844729625758, 0.5849483548485327, 0.47410859530905247, + 0.34902586190660034, 0.2663387218795845, 0.6903529909013097, 0.2452186815471069, 0.36993439335820266, + 0.9004107777857757, 0.23432046845050336, 0.4031426695590322, 0.2655051349183011, 0.9880118129969047, + 0.8194489033100127, 0.515668633543935, 0.0026904428700516014, 0.8399828534944508, 0.7758667036392735, + 0.595214084553384, 0.5220207184917355, 0.47798068813174965, 0.3309015007373268, 0.09247118229082008, + 0.5016657872858797, 0.509980783450072, 0.5228790488626198, 0.10437280174539443, 0.607489141449997, + 0.7139487121754522, 0.5121047559499048, 0.15695083473869154, 0.5881692306130055, 0.6166695075719026, + 0.13033623050267118, 0.039751091994222665, 0.47653740126406163, 0.8242472102134765, 0.20490560382685707, + 0.29131931274653, 0.04806840012500846, 0.6783844288347701, 0.1291853650875049, 0.5665763536450508, + 0.3217630308251994, 0.6184451103881606, 0.710795665586663, 0.9629661365510314, 0.5214199950935625, + 0.8557779203016872, 0.15669056370348267, 0.21910121029494511, 0.13700517916498, 0.7699958834578208, + 0.7668561042298244, 0.7576080703418117, 0.1641298037630171, 0.5594413968754564, 0.858348804452439, + 0.28800923855585947, 0.9925322581480068, 0.005047256658422206, 0.4061196525138171, 0.6358658349540495, + 0.9230163846118975, 0.3786948046673424, 0.7008188743354362, 0.05444941639851175, 0.9034160440190593, + 0.9146615609838002, 0.4190394143016243, 0.18449171784271434, 0.7248338041487655, 0.45313613821292087, + 0.2353724090089958, 0.11891489402509348, 0.24764934052757293, 0.6088937495733816, 0.5134141762543564, + 0.8332033548229756, 0.6959386671209794, 0.2628004326454837, 0.06610917023313079, 0.9713986385950066, + 0.7553807312604052, 0.0879741202387384, 0.5221666087514342, 0.8052979814026987, 0.13378416626157497, + 0.9315663649824534, 0.27428204789992905, 0.593121819009934, 0.3622565646438295, 0.3017243955765979, + 0.09591142920200035, 0.8954195830043161, 0.7832616045733956, 0.7180920680357865, 0.4837569919868624, + 0.368175835596789, 0.9403039744300444, 0.9588321813585654, 0.2822368639498116, 0.9911737028438883, + 0.9614046281485216, 0.7761636171058446, 0.5813252606564113, 0.6661155242248452, 0.5259967311629242, + 0.47833956239785447, 0.270433496957167, 0.1830631816552385, 0.27037860742997977, 0.762511755100596, + 0.8595486107363447, 0.18181760020955995, 0.8950959753024484, 0.8903412227672707, 0.1691260537697311, + 0.7643259894599647, 0.08410308471963279, 0.5664087914586622, 0.3332765676600876, 0.8863421090178172, + 0.3847001455070639, 0.8750764537865127, 0.505947264305783, 0.49154609127497495, 0.6385396223926642, + 0.901978990855287, 0.6792802741675339, 0.9121208474881856, 0.760787124962656, 0.3431614810578424, + 0.2884081276420578, 0.14634285835163807, 0.14102290528339234, 0.5023236360316383, 0.8831229148663686, + 0.23512264238959424, 0.6310625333575002, 0.46954959123740625, 0.7101686652745309, 0.27142371660682907, + 0.5332831573104402, 0.8090315513219017, 0.6703368413594938, 0.7201723913753442, 0.8735566950883392, + 0.5236314655203572, 0.4041760819803951, 0.7069500713054788, 0.8561418303469486, 0.9329923243619995, + 0.811272836817539, 0.07161247981342123, 0.971952717265702, 0.8492166182228821, 0.6405884041366582, + 0.17691008133532626, 0.5290392503698454, 0.6565845581462822, 0.6005122900535488, 0.5250626326106349, + 0.11552968686895548, 0.004718702350720472, 0.6988326660543989, 0.5956056235878974, 0.47140819251562405, + 0.6536268773776686, 0.14553713210713182, 0.26093787125840806, 0.1885446231083373, 0.0627832682903855, + 0.5677936030686411, 0.8914360620701266, 0.982755699525093, 0.17399413776971306, 0.12976876256885772, + 0.27063867688666976, 0.4214725103000132, 0.20711829491106815, 0.9924200869828986, 0.150176473396466, + 0.5592533233861902, 0.14469769266153687, 0.1287418543090353, 0.6425243181643446, 0.35026847559903596, + 0.689774058650171, 0.891121814925713, 0.5814189775500054, 0.9586353076711583, 0.9648866927602616, + 0.4686652233091897, 0.03753027470915504, 0.3619445957575981, 0.47000259209531847, 0.9556851362045161, + 0.3588782476445602, 0.6653943915151054, 0.572385802750355, 0.19351817215691625, 0.19424319361429854, + 0.46485014922456436, 0.6528423312813869, 0.9343549456866106, 0.9303042643420043, 0.6036647277035097, + 0.584192908495738, 0.3025126385092616, 0.8970822283299967, 0.3356082745827975, 0.9689332972947742, + 0.8899891492988766, 0.7117467980975821, 0.5940481672987985, 0.10617832873429356, 0.4183541795618333, + 0.9383487421690893, 0.9340322061343359, 0.4202679445135561, 0.8744053849895144, 0.8493978902300945, + 0.8708130092037977, 0.940924954463901, 0.7140047195668955, 0.7316856083401261, 0.330552632864599, + 0.8311107446709194, 0.5224080690615437, 0.501084981517585, 0.2840817322674267, 0.7067469502908176, + 0.8357193202831436, 0.3535463764323008, 0.42735195305626905, 0.5749152014271254, 0.9319016828634743, + 0.9611372962147997, 0.734360899477411, 0.7488549249623585, 0.3306043172406822, 0.49110475345412996, + 0.334849241506592, 0.7569690074558361, 0.36491977602379455, 0.3042065425780318, 0.29874874315702893, + 0.5819808043774747, 0.7150121503830128, 0.41180272052220745, 0.029995160341639293, 0.8104740408863775, + 0.1919609461781211, 0.36793427064342843, 0.03779552657364027, 0.26283632645587507, 0.5958913570123173, + 0.39538371880258527, 0.8537278893431842, 0.6368670810370156, 0.43767169185007815, 0.5761098481511326, + 0.46180432979865615, 0.9920726684155271, 0.8570886811056379, 0.6861942911378779, 0.052912299602330015, + 0.445230038523568, 0.5659224156106615, 0.21076715343534513, 0.9765902909913105, 0.9214713943599442, + 0.02112106940224867, 0.4652083869028213, 0.3016271916299881, 0.9811191815839962, 0.22655336365064327, + 0.8662493188527037, 0.8714223255278624, 0.36391568689202003, 0.18467830413365693, 0.16193594019722202, + 0.9881879624168323, 0.5978441406101815, 0.5886733570058634, 0.41549672072081656, 0.6139370846972869, + 0.6274115409222962, 0.016604320970902586, 0.6297475238314658, 0.7460375000956913, 0.12726232148446426, + 0.4686401842016189, 0.4454075093994012, 0.05474063120169337, 0.44388093220216795, 0.7345031436174964, + 0.7038601512721551, 0.7597102937196527, 0.5025797683160366, 0.9532169134753004, 0.15432945738670745, + 0.14094001799091305, 0.9564139911631641, 0.5142858992076622, 0.19982892789792317, 0.9813451990818274, + 0.20989613661780226, 0.54137930672541, 0.6374132817106841, 0.6128504789331424, 0.5063168135407037, + 0.7075732542593347, 0.7995853299479707, 0.5033032762679162, 0.8508708667154223, 0.0023469253889222275, + 0.6551933584691292, 0.9628429069083267, 0.07709068370025784, 0.04937142942795214, 0.501734341235258, + 0.4361813940309124, 0.8405678039191875, 0.8303504164119847, 0.7275992206157286, 0.17818477677438638, + 0.41942294098273125, 0.03954457895243124, 0.7307089576656769, 0.22329056586297824, 0.8275198175889643, + 0.19586968898116242, 0.9067422957458163, 0.35634075803523935, 0.4105917748111635, 0.8730576304026549, + 0.7665370882733304, 0.27055565402220516, 0.12978571394064486, 0.2826677055860818, 0.22039962902640042, + 0.38973884756513, 0.7431004950867537, 0.3803470446861149, 0.14814585358898202, 0.2798760208098334, + 0.30915309869971364, 0.660268833742968, 0.1659214622221713, 0.2903558535417806, 0.13398218696009667, + 0.3025715082537356, 0.4621663612674023, 0.6041337467023327, 0.9724620379137456, 0.6221905647538579, + 0.9409704933185207, 0.2833115811726783, 0.9045167578041478, 0.5760900471997474, 0.9719022013449191, + 0.5792598189492231, 0.5467466552441257, 0.25140995301779245, 0.6411448581159954, 0.27451562135352914, + 0.9548391114972958, 0.7381852239091693, 0.3755030344179414, 0.7537944541084961, 0.773950909057429, + 0.6967149418204718, 0.38503186216926355, 0.9827436069597478, 0.09470769004153046, 0.31720626912526484, + 0.9110741697727067, 0.6510814009115845, 0.9201746009793418, 0.45192729685297983, 0.31604985037330313, + 0.8707027432369983, 0.72878744848115, 0.1674506865019706, 0.45124168255471575, 0.12119165773765295, + 0.10933983998602648, 0.1759277354431209, 0.9677755419491193, 0.7543396908321365, 0.45164956426919534, + 0.5963728605327647, 0.26977084004917873, 0.3189883437570836, 0.7109980988942437, 0.6940538896107066, + 0.8618778193602312, 0.18616680988497625, 0.8564126272186688, 0.7797000975797941, 0.37143021038381285, + 0.229715430762102, 0.6027495229517916, 0.7663663401195722, 0.41825418709262685, 0.9270367121338873, + 0.7161191538935998, 0.8189595632288778, 0.18382234387008245, 0.6628640127730127, 0.3743254528679858, + 0.4294233870750591, 0.718860607198264, 0.7794594699501223, 0.8846596193917995, 0.6164108991590378, + 0.9745909379214955, 0.32485647771425064, 0.0892726774993946, 0.40093259911851264, 0.42254373506295606, + 0.39393028080136006, 0.39651688472188695, 0.549841988115412, 0.048542346438529727, 0.2888982619923376, + 0.47180710264196857, 0.23386093131576902, 0.7328907292330431, 0.28708666052381304, 0.942068575357319, + 0.2528999002942254, 0.5550508497487092, 0.20540926899740464, 0.32415808102642585, 0.8205997779310548, + 0.49427223065769976, 0.10172574748059948, 0.6182015066099117, 0.0168747916309564, 0.8205573048608202, + 0.6319316826284354, 0.9707105907899966, 0.9609815770709899, 0.5465976433635854, 0.6556943612651338, + 0.34489441368775886, 0.8425690301930913, 0.09133822774162237, 0.6387195882728491, 0.944491561830992, + 0.1308772464793011, 0.31896100380010384, 0.8624981667877906, 0.045372401645406346, 0.4416695860497346, + 0.3647535046031083, 0.8898241301253859, 0.45388046690980344, 0.4539194661134508, 0.40014866124862314, + 0.6043331318409235, 0.225597828723479, 0.4351856621734109, 0.44880879282845876, 0.2578390693709248, + 0.23957843245223354, 0.853515288696181, 0.4212805829280021, 0.4476360345947321, 0.957532796167945, + 0.6760695251722348, 0.3486926036671979, 0.9016116830012303, 0.2401099429918745, 0.2686512463810278, + 0.9195041162228266, 0.04725788693025401, 0.2952466483612659, 0.6590764399603523, 0.7819920514006857, + 0.6313868952095871, 0.5364926500722277, 0.9696926718037973, 0.8751826183544384, 0.6186824179206017, + 0.374082748560687, 0.8772222262522045, 0.6879391730366049, 0.14362805694523428, 0.10258900417390948, + 0.030624092999827712, 0.2156870051567451, 0.2926490771238668, 0.5469005206327802, 0.330747347450309, + 0.6355137745938946, 0.4368436482640442, 0.045860876537570094, 0.9147336985253203, 0.25308651212453315, + 0.2033009806567786, 0.35338901995350347, 0.7763577947920451, 0.26157731245166527, 0.7724883314221036, + 0.27765832037469307, 0.47797931448220154, 0.009621719218537161, 0.4083663877312853, 0.7793867681039637, + 0.5562228661290906, 0.3822967495568692, 0.4971085320115517, 0.08403051155541907, 0.821585949940805, + 0.9483212988792594, 0.16471906339727194, 0.06567953209136246, 0.23541673198341673, 0.6159537228461544, + 0.6586280315124442, 0.624855040940067, 0.01975335660659383, 0.1321824391227766, 0.039616998101646095, + 0.5139780751436436, 0.5502928401223973, 0.5044157520313133, 0.8990362508495071, 0.5854541000025035, + 0.49644032080597733, 0.5813122434394614, 0.9111986590276077, 0.5343968402454335, 0.9235396916652409, + 0.6256679401453966, 0.5131644608523405, 0.2879634268912872, 0.833651860040737, 0.3822293767593232, + 0.23023981839485697, 0.01963008595705018, 0.5607261624052818, 0.889307367733486, 0.8783441042121376, + 0.16987981514433037, 0.7306983499233906, 0.295553109073168, 0.7330893824590752, 0.890747713179764, + 0.8881132017490202, 0.7878211800011237, 0.3325455186050863, 0.7603864278246477, 0.2019933710592563, + 0.03616447612692608, 0.6455001733547748, 0.7481126632793694, 0.15369079903344451, 0.0024937281665405875, + 0.27709067104418017, 0.8846532385677418, 0.24117039327577128, 0.06317144943471131, 0.9444785571164042, + 0.01782358288895658, 0.833015659889326, 0.7860226400883344, 0.9177800436549498, 0.3291753575571019, + 0.6517761560249987, 0.5284468204973548, 0.07208944509965698, 0.4570602303084288, 0.7270712505338848, + 0.45622318075015034, 0.2395434350054132, 0.9075826751030068, 0.6917856339346387, 0.7522929457113635, + 0.7293668496953926, 0.312628165514108, 0.6842945616639666, 0.5786553960093945, 0.2152607906976356, + 0.28125926214922903, 0.01839101463473014, 0.07841619323189297, 0.5016077336291183, 0.9380737405885754, + 0.43171108906984546, 0.9515955951974239, 0.8566606561807202, 0.8888370148444098, 0.9668880541327071, + 0.7126335539983967, 0.2506919945376952, 0.9150342727514221, 0.27972879203442225, 0.21309296620220664, + 0.8771565592890141, 0.8248181835344995, 0.407903336578901, 0.6261919774559914, 0.15448830170454542, + 0.650342581448249, 0.5783912430117581, 0.20117339528293765, 0.41829198319090544, 0.8893917877678217, + 0.7004838101367853, 0.0225843702280476, 0.49051685902100695, 0.4798787202837653, 0.36166205829511244, + 0.6448123689250198, 0.11273117860564708, 0.9992687859252543, 0.057081308493441174, 0.7669531195629786, + 0.5676699449111512, 0.629246729906571, 0.47706519398982994, 0.19950920991703436, 0.7174508285554029, + 0.5037478800738217, 0.7755401868853358, 0.14211726999494212, 0.9887677367889909, 0.22107299956570137, + 0.1944276201657491, 0.6247319538703652, 0.5328652692956197, 0.369734882137877, 0.7110835428501012, + 0.09684503508446674, 0.8628730794934161, 0.46245179318705176, 0.8592963156656919, 0.05845266770677893, + 0.21861187933028492, 0.35302641969598847, 0.6338801598932757, 0.10160167976213674, 0.7998408852853546, + 0.08502788350323942, 0.8256229897753778, 0.7424312741128043, 0.46458374739653474, 0.6950751911552967, + 0.2868091414051974, 0.7999388623453999, 0.7275464487575685, 0.6067369811297751, 0.5729965596000018, + 0.9416161602857652, 0.9520404706561949, 0.40670596199428977, 0.6682564382381969, 0.6131086295356114, + 0.1641702452017021, 0.8782985930036267, 0.337797267383135, 0.18595174624955346, 0.822385534420009, + 0.6123777706834113, 0.8208428029183531, 0.8401682582137678, 0.4917919572408409, 0.8253970211230612, + 0.3698147244591884, 0.33919865789353376, 0.52090449144065, 0.6661191948804852, 0.5468958323467437, + 0.11255285535319326, 0.7320548544220421, 0.790440054096459, 0.6959106780670404, 0.38521104324865885, + 0.630874201413209, 0.9378512943084845, 0.6693322726615967, 0.4279206617310426, 0.34287107711659814, + 0.6778283103105175, 0.76049658600806, 0.6099227530568464, 0.9534870041388331, 0.8061351183382518, + 0.3135908933117898, 0.577490163908776, 0.12623152983419705, 0.6535524651969317, 0.359584999961559, + 0.17281345409835414, 0.9797764544051829, 0.9993332907535163, 0.02061427907665525, 0.6306378935655539, + 0.22948063393875073, 0.43910477439374584, 0.10744537245405494, 0.03527823843358757, 0.6022939233400867, + 0.9292656259082479, 0.3063851229351757, 0.6478418605779962, 0.18123581682236667, 0.274580874337351, + 0.8403928580596725, 0.9566116078779778, 0.38385841290136236, 0.18177771286409294, 0.9729106904357516, + 0.12687276948547022, 0.7837037633224293, 0.5999146240368377, 0.5624944055111051, 0.17312317145315292, + 0.26364003985900264, 0.27540497346446446, 0.16183836452092126, 0.3151916842899276, 0.9302003981020814, + 0.7473549812959448, 0.44922175968269584, 0.290759136512585, 0.7630771259013419, 0.203714425415933, + 0.7250341276285767, 0.8486738245641993, 0.5305480649483519, 0.8452980450668409, 0.8625290061446751, + 0.43238121230078363, 0.33179498874334423, 0.17975708729355333, 0.896779247436195, 0.8790904486702958, + 0.2737617742477172, 0.48148756105293444, 0.7653001575845888, 0.26880411882189026, 0.06207883637394862, + 0.9641308083958836, 0.6764368348021307, 0.5360141090381192, 0.7806225775348318, 0.31340956446587886, + 0.8025558162777455, 0.7650160813607398, 0.9577446432151466, 0.6591398196747612, 0.8498265045440788, + 0.7785146687257701, 0.08353643199886873, 0.5673222295197347, 0.028756677806177522, 0.33719498936844294, + 0.16771599840844842, 0.056422690172737267, 0.2539597095707329, 0.9565881351105006, 0.8729920816448556, + 0.7236230725363881, 0.5707265014014935, 0.7478252072768828, 0.22375726216544867, 0.2661133945230454, + 0.9692691635607104, 0.5333215931349258, 0.9874939038858579, 0.9988976692260609, 0.40968039604671935, + 0.3721086015989137, 0.7974920656308787, 0.37204666831048017, 0.6256315495778213, 0.11069342503585078, + 0.10936587034913403, 0.09985314299399961, 0.20784120008496965, 0.22487029006756198, 0.5846555980149157, + 0.1456483078695484, 0.9073858174713302, 0.6869534043366224, 0.37920147657949965, 0.3802741205067254, + 0.01685422866909736, 0.16856581984046648, 0.5099205190232431, 0.44528751549689904, 0.5255654135159391, + 0.848130865225326, 0.9788332950206527, 0.6266634410552097, 0.4535665518913786, 0.4441711177478822, + 0.7855044221401776, 0.7305688676265056, 0.01523612609598124, 0.15717293533008292, 0.44153821320886544, + 0.0037458575600464172, 0.2458833558774125, 0.7416782150313002, 0.8504337218365385, 0.7103156251011279, + 0.8567806684057309, 0.4638229962277397, 0.2613138334139983, 0.9678808730797632, 0.07456554223190837, + 0.4443354453614965, 0.4533017182212946, 0.30526988294644686, 0.8257452901995608, 0.5419171954630608, + 0.47064337951537916, 0.24381333321285958, 0.7108519713439975, 0.415536874902104, 0.8935974674263123, + 0.8374378192802576, 0.841213225542713, 0.12841947670897202, 0.3681037649382465, 0.1171128433802966, + 0.4420128873271528, 0.4235651883029804, 0.36374188804029195, 0.47935505339565765, 0.809203270306487, + 0.9024914352067135, 0.46073156590275344, 0.965569299717643, 0.1986586815233018, 0.21136017914415994, + 0.9662563155132512, 0.800097586650271, 0.14278879657171628, 0.4161040834839912, 0.8047195534623773, + 0.011471745282361723, 0.31689436936338744, 0.5187960428319506, 0.46711705089950795, 0.14741533853050925, + 0.6192144041944005, 0.3916801847175707, 0.6252993714644912, 0.7748832993402146, 0.11654448019677288, + 0.13539717812752539, 0.25963438580982856, 0.5154284907713945, 0.13022574807305098, 0.30207814005863987, + 0.14681776332937668, 0.05903582087511794, 0.37974178219264954, 0.8941072542226942, 0.04385505716381355, + 0.5966109510098816, 0.1218620957471066, 0.6501414172364085, 0.2903849315904923, 0.2453921143156198, + 0.8353604150821978, 0.9660537711594184, 0.9205659804386037, 0.047165896784789774, 0.16215029165085515, + 0.7630796863199797, 0.13700805781852732, 0.610931945833276, 0.17480740656268967, 0.03739482678389261, + 0.41828651759010016, 0.6606389977925835, 0.2788621905645847, 0.3914488177801225, 0.4441622838740177, + 0.3975522239332363, 0.10151203958308108, 0.2872526558451919, 0.047072223921852285, 0.8011166160957196, + 0.8947827819397268, 0.5091285345006246, 0.16714647062441812, 0.01161740394936217, 0.8677851927552775, + 0.8683907047008793, 0.5746399597282609, 0.15935332758897336, 0.7419322400225521, 0.8319669455323974, + 0.045404790528550176, 0.580960851572881, 0.07406350954282426, 0.5934457742614856, 0.5009929494303532, + 0.5782843487222157, 0.06197518014204395, 0.6072480115598556, 0.691645674300595, 0.792964306805697, + 0.11130921446911235, 0.22321469973125518, 0.46261195666380783, 0.4167717654818589, 0.4693623661654466, + 0.5216287161587255, 0.0758397585797681, 0.6930235580606103, 0.45153892421127473, 0.1030568165615815, + 0.02635249935934303, 0.11356474525522375, 0.956785065162077, 0.32942240892937913, 0.42051773530954417, + 0.9334847493148096, 0.8856804953699705, 0.07235118401938379, 0.2857655472955716, 0.670721350405012, + 0.6264003127307056, 0.4152414555473498, 0.8251095788437943, 0.2212019110594996, 0.24169548217584247, + 0.26079051122940855, 0.9374776597565552, 0.3681498827701003, 0.36593016645675425, 0.7153695047111417, + 0.8394878414878438, 0.34848903900547945, 0.6205681752907498, 0.24863826138177947, 0.6833346177905405, + 0.46386371877582155, 0.8369413784629011, 0.937907827472411, 0.6884079949405499, 0.9398661337823254, + 0.29900534660025635, 0.7672168466547319, 0.84861326189263, 0.9272944088782501, 0.16223145506409542, + 0.8943722290360993, 0.544476435102529, 0.3986344162704535, 0.736050608422872, 0.44562916758597004, + 0.97874108060887, 0.5220755753422995, 0.812890327006206, 0.5353224311088555, 0.8947180297479725, + 0.27172627613556954, 0.774811246335248, 0.06871267935554981, 0.22994612358090216, 0.8611639658986988, + 0.49144340847385815, 0.44684021708200905, 0.7358215307664409, 0.6264614439246897, 0.3521793773197275, + 0.5259838826599396, 0.3200192486188471, 0.9129249441423458, 0.34740460901493253, 0.9511217924551874, + 0.9789089788711053, 0.6275426783871765, 0.13488158776931714, 0.479911473649974, 0.06899821073598944, + 0.44771198237155385, 0.005371773064485774, 0.9093452087593679, 0.45180950894789806, 0.29014474004643387, + 0.1803586098433546, 0.6157669370160647, 0.07054336065417866, 0.9992748069994739, 0.783524323965273, + 0.9825232973055474, 0.28226824269892314, 0.9069653620947788, 0.3396173190736195, 0.1590718022985833, + 0.46819822923796306, 0.6808591321990167, 0.6394051607664051, 0.01704922266251019, 0.23920615856461447, + 0.6230663482527815, 0.9563220830896147, 0.17680195243892283, 0.5691268559555982, 0.9763771250226043, + 0.35329027181968886, 0.07411148334266782, 0.8872823693194178, 0.09476251737661834, 0.5814831721205344, + 0.7948067450037246, 0.8028905964766107, 0.028283583878514706, 0.44575700214939706, 0.5526817563552453, + 0.6618782430579868, 0.2965446823628132, 0.8055790600964612, 0.6001601280190024, 0.08176609208335828, + 0.5340875805234683, 0.5769152976852452, 0.3574393966065852, 0.4588611184013076, 0.01647207891502034, + 0.883738856496437, 0.8582517839074131, 0.7949042898543022, 0.2253332552510231, 0.7432375052162117, + 0.1316170583136489, 0.5823003593699922, 0.460407132676545, 0.8791560589333144, 0.11054163355226143, + 0.01601889212357266, 0.5109227998653746, 0.18800160145525535, 0.855181759250229, 0.045639242258114, + 0.5278620767250894, 0.4957053602944951, 0.8188375924804283, 0.13666893116455414, 0.8263163568165413, + 0.665256432668485, 0.490099234358937, 0.38195995456137477, 0.0691827837637794, 0.9134980286002937, + 0.43764211570390577, 0.40472558273938886, 0.20289225585693782, 0.749717932738556, 0.7710480893850639, + 0.043343679624452625, 0.14392151951956644, 0.15256565983791548, 0.1740755627135041, 0.24479870583915198, + 0.7664993083960787, 0.28349613723639744, 0.20727470677195314, 0.8725091905212978, 0.5674839790642817, + 0.02344187504490547, 0.17805704916393394, 0.6878774899923238, 0.6407346870532822, 0.8976835567214221, + 0.8581912242486646, 0.7147420250579847, 0.9289837525489134, 0.9021465575546013, 0.5402298831330304, + 0.9215598527410163, 0.006885348395249791, 0.6075029851447706, 0.05750017560200649, 0.4012481557474349, + 0.6572730117075433, 0.7534681094535048, 0.8283094023019764, 0.9421112964950215, 0.8767360752235004, + 0.6120206767654984, 0.23173060102779797, 0.536116262473263, 0.8806567275119238, 0.7498948503129963, + 0.28379005896981657, 0.4947139383527708, 0.3625674014885949, 0.3717905704746459, 0.5074769884182634, + 0.864000373530315, 0.15632125319186785, 0.54998292220013, 0.686297717967469, 0.9835994822435665, + 0.013082294064716171, 0.8116285296468564, 0.7096322246251744, 0.010483431427880885, 0.46473715759504186, + 0.7148077654269936, 0.1014775341655918, 0.08295477317269673, 0.9655756730945574, 0.4323217237293163, + 0.04911637174591643, 0.5286660043783241, 0.8876349137207631, 0.42958961007366603, 0.5032347190668405, + 0.105519785610387, 0.9590906670885571, 0.1698198206083581, 0.29779532324535196, 0.00865674447804532, + 0.2515278347754073, 0.35768570963775315, 0.48456086693730116, 0.3742971778877998, 0.5377552035844773, + 0.5659737120801217, 0.8648979402914188, 0.3812265057675689, 0.36821735270883194, 0.5906755245164386, + 0.04374387850635353, 0.10817677930568348, 0.36041651869823677, 0.3683646927712847, 0.7724118357267153, + 0.6664467523488816, 0.48721639857078214, 0.13664647838204846, 0.7583197618393416, 0.7271540958138566, + 0.31653853745025673, 0.956756591780543, 0.08411457415218082, 0.5788203822489756, 0.7460601981458868, + 0.35341666049248954, 0.28532830678016485, 0.9863959952464284, 0.8579613387661763, 0.834219269038311, + 0.2890403053344216, 0.5051995035199153, 0.7685055515515048, 0.6309815139491525, 0.8530634614711372, + 0.775743751009495, 0.5388915563531502, 0.9157578011705529, 0.356946405392665, 0.10878027933422285, + 0.7808485394139456, 0.13126514680785484, 0.33446316025164335, 0.3059552785109415, 0.7269563633639569, + 0.18402623113654548, 0.6287830125010201, 0.1979185986583578, 0.7066606458053339, 0.949924909556279, + 0.40809768012635916, 0.5451451899248743, 0.26360489467441417, 0.27309448219682164, 0.5294282836269889, + 0.3430964469249539, 0.7091610274133524, 0.38265178931731025, 0.13744051633736876, 0.17882164153575442, + 0.1701658522960623, 0.3754745222006257, 0.8868153174153982, 0.561013533589659, 0.5098172940206056, + 0.32670345047704075, 0.9748323617965556, 0.49207126700248927, 0.20206778135764203, 0.17221568362493245, + 0.9056123072645033, 0.3341362487031918, 0.03971130386484134, 0.36549223445573353, 0.6968418788110982, + 0.33982383443687547, 0.8674420771845556, 0.8788508596307834, 0.787881179725703, 0.49208740489696723, + 0.5306944127049913, 0.37377102582022304, 0.830582896259954, 0.03514812256652411, 0.9166477132996476, + 0.7114870443892225, 0.4020919151946982, 0.33983491209816785, 0.667812647604594, 0.6534612299966921, + 0.24469021663074253, 0.8176952134719885, 0.012212210636933896, 0.9910690864914661, 0.4960632243651838, + 0.42021503058434306, 0.6923896603230232, 0.27032316004138224, 0.5813649475371736, 0.9960335104817426, + 0.02305153696347262, 0.5625129987609392, 0.8274539984340226, 0.5327089071792688, 0.4548763890070494, + 0.3361761444646123, 0.6092817163984496, 0.631252117636725, 0.3695240633707595, 0.6589254168063957, + 0.6487461787840058, 0.0028246048058131468, 0.6514215154032429, 0.6803455462291529, 0.8479013768678957, + 0.5166195591725397, 0.6531188425102978, 0.24290476111330428, 0.7416832678019736, 0.669997195219382, + 0.7043783427919248, 0.8438089402784784, 0.04837772462540435, 0.5456279938271088, 0.5929903865860452, + 0.5102578632878215, 0.45805718423635333, 0.592736484147044, 0.23120266878636297, 0.6141967510191293, + 0.3037063016949344, 0.5825678383174339, 0.006824005446752879, 0.9241692865061705, 0.17651995159467826, + 0.6820458838442376, 0.7344419633906124, 0.2980205213881637, 0.9995401378644633, 0.9698870801469252, + 0.9394513674332534, 0.3531069980525763, 0.09831391345415752, 0.6660241915206475, 0.1908163922568058, + 0.34869256731855436, 0.10839159692699929, 0.37416593433648515, 0.38664004715657774, 0.0031080575293124246, + 0.5591180817318233, 0.9793977177472067, 0.14221238621459364, 0.8333849128373928, 0.014781443106305248, + 0.8924227685665435, 0.259411785109229, 0.9596273094393966, 0.9078414992111065, 0.18996568867856012, + 0.4346046867850909, 0.5511729553074728, 0.9158968009013373, 0.4384525209987299, 0.5265199448379074, + 0.7144285653376025, 0.9374139853766337, 0.7252170060411147, 0.28619462311729715, 0.12833157798394623, + 0.2518127896573077, 0.4036979926565276, 0.7411899554362333, 0.6391019385586378, 0.03986309368705576, + 0.6668266814522485, 0.26345413390715355, 0.3092124842501628, 0.6572968446380479, 0.4745168442433033, + 0.8418096703331557, 0.4574962331503959, 0.3218325792005262, 0.2011020699485454, 0.345416437315765, + 0.22853652979201688, 0.6798682808003216, 0.9176036602601374, 0.4089171199138254, 0.4826634506645804, + 0.892826659752609, 0.08473543968120212, 0.30202329743218004, 0.08198584774862816, 0.2939756693550043, + 0.5407626160195672, 0.47106772833766475, 0.09277368067405634, 0.11050931708758516, 0.2538377609164546, + 0.849759018857136, 0.3356408944950159, 0.17397597569131207, 0.9160702541588075, 0.040409531695097756, + 0.8757831629583652, 0.41086524086667275, 0.0975643079100762, 0.92198350798494, 0.08192251839319975, + 0.9439347419972917, 0.3966199170077277, 0.5318468028843031, 0.15056332356432933, 0.4378697360013303, + 0.8591156144008775, 0.17110220385347885, 0.8365785682515359, 0.10817173773651945, 0.8358589728687413, + 0.4968272468215459, 0.09740247982122907, 0.9652969723451004, 0.5982548797330604, 0.09283322151259488, + 0.7681033804571532, 0.8637822242725997, 0.7532308700636157, 0.08772881532383214, 0.8553109952295498, + 0.9207489548928733, 0.08933331980231651, 0.4832292917120099, 0.8071002143734636, 0.662962770618595, + 0.3573278220628534, 0.7556874446974421, 0.7561095909230511, 0.8320656428752502, 0.5710383447043011, + 0.7266361258294035, 0.9344730157914692, 0.3475921050297659, 0.7748962801848931, 0.6336519344349677, + 0.30896682477774784, 0.639451746753038, 0.05655688357918853, 0.5988944537735662, 0.9048793860270714, + 0.8728020487005934, 0.5578297457525212, 0.5114474169991241, 0.5913083995219119, 0.15128113374231256, + 0.14536527022409018, 0.06660506917073905, 0.5899669993632768, 0.9548421435870874, 0.4902370966739692, + 0.5016816804608331, 0.4378864467393364, 0.4244544218434081, 0.7511342907260626, 0.5744539184349109, + 0.6126703393285171, 0.40521711810734307, 0.7838949411667171, 0.9752332441831983, 0.41193049178171437, + 0.7863628062148216, 0.059471198992037344, 0.569627027414568, 0.7365861656463706, 0.45285839554288065, + 0.35087639374767654, 0.9744776715941326, 0.0796699774233578, 0.020661211944400737, 0.5700554925779454, + 0.3453481921299084, 0.30885069322495196, 0.3825944151917511, 0.13824219174802266, 0.17307447566422463, + 0.5844264531050306, 0.6102460788643942, 0.6384107450735123, 0.4236421041964935, 0.5547317469528449, + 0.8615175421123146, 0.23974963440397923, 0.39628485951507675, 0.15221841466534802, 0.5276705844737218, + 0.136141563615936, 0.6784865479125944, 0.8506023929445711, 0.11655444412662697, 0.30777677320995267, + 0.17895768937260814, 0.10210628893434026, 0.15707188357566992, 0.04983401811126997, 0.2007824227043179, + 0.5592786361172313, 0.8336070619958866, 0.5429797006592874, 0.13060086231864543, 0.9953409475457488, + 0.7337865941702455, 0.9903626657274042, 0.927612117234379, 0.8731138711090393, 0.16708201038794612, + 0.48922652293939906, 0.8395849558268025, 0.6869146669603652, 0.23660487981636935, 0.9949116545986527, + 0.5506433099149022, 0.04570924521407671, 0.9003764942475971, 0.027797621438328646, 0.13487664612365857, + 0.38581888902530403, 0.7275677426908201, 0.4121782798774505, 0.39178072032330635, 0.20442325666991146, + 0.7214291877111609, 0.19238904353824715, 0.4775035636116858, 0.7799245364322277, 0.3731454732953863, + 0.6727030989231768, 0.10120733134020765, 0.2024014903308914, 0.32761541003113825, 0.7585651895957122, + 0.7948818869851003, 0.1442756689431154, 0.3805413570308248, 0.8280856211848014, 0.7701015624130993, + 0.35665820454334773, 0.5252260511892967, 0.44202900613933516, 0.5760276784213428, 0.583230219670361, + 0.4894688038987274, 0.12107035373683772, 0.41699450654228554, 0.4865669590064693, 0.5404370207465021, + 0.08126119245535113, 0.28014271080084074, 0.08655465355152658, 0.22209620032129718, 0.016526796049303738, + 0.4405947919807255, 0.12642370978659012, 0.2656557276470374, 0.4216371765431598, 0.6484872904510793, + 0.053557275591932685, 0.6409267111870179, 0.9511382246852246, 0.572770376231861, 0.5995383240000067, + 0.17006554159583864, 0.4500188567500252, 0.8546083242552374, 0.4647725316995882, 0.20153691718027023, + 0.23326170024172477, 0.01315890383369045, 0.39426325071597734, 0.8998994276512082, 0.3733966066602272, + 0.45397134172942355, 0.32964008708618775, 0.49571074634505363, 0.6386188897861239, 0.2659386416870321, + 0.9466781750610079, 0.5700642762725332, 0.7443127654348148, 0.6788684453993704, 0.003780322488710963, + 0.25299092535262335, 0.2644792383676571, 0.948567811915951, 0.2090744359258352, 0.7290713857990018, + 0.7942608071440376, 0.12091375932760828, 0.769730636537045, 0.7949207193766956, 0.8999935458460929, + 0.4012742330642166, 0.9153266609822232, 0.6397417631028177, 0.7932730618459921, 0.9139261589093255, + 0.6249277454726063, 0.17267839435466326, 0.33575983390283903, 0.7054336307431514, 0.024207678399074783, + 0.7264626456186376, 0.12050534662922807, 0.9331229768733734, 0.8821061132382705, 0.8081998817343296, + 0.9328573201631474, 0.46008178816604084, 0.43351623952469764, 0.2568773496215009, 0.8025163449125888, + 0.110288863283171, 0.32474182772796634, 0.1780398574926888, 0.0800403096430472, 0.011645167911605303, + 0.6338284397777352, 0.8776886621864297, 0.1688681371321733, 0.046517463374585954, 0.9113162153995553, + 0.23703429144125077, 0.7331009654591547, 0.7521447684080009, 0.2935133188087231, 0.8011430676008943, + 0.4577428017678281, 0.6562918869427468, 0.14124453273662463, 0.13781806855612266, 0.3871463244982992, + 0.0729242172016934, 0.374886080566739, 0.4492047565394687, 0.4212438993966362, 0.02997305868390787, + 0.016675600776181465, 0.04174927429735631, 0.87194850353947, 0.22534998061142142, 0.7568163484479314, + 0.8323620179888489, 0.23162892367450572, 0.8091875430466221, 0.23063207351770032, 0.937901877493705, + 0.39240732928674804, 0.3435443994747063, 0.49071743177539195, 0.30949514952081636, 0.4072598196835142, + 0.6826201904173964, 0.6685230997408758, 0.18617974088731515, 0.2820163582508177, 0.34105140563939007, + 0.874642812610883, 0.9018946510344662, 0.040194152331214594, 0.32325702771141485, 0.839710522303172, + 0.0678988368790151, 0.9442563612532328, 0.2964556483411802, 0.9446871229805314, 0.5847924031680212, + 0.2319048366095512, 0.4000252930567315, 0.36817407810534764, 0.14386721654135726, 0.6320054377004065, + 0.6548633698592232, 0.11336183090378926, 0.12922950357412155, 0.6307902481102885, 0.9345256694809793, + 0.8997198207013857, 0.7320329918401971, 0.36541046796691656, 0.7370442312340523, 0.5076652965322459, + 0.5921997907956869, 0.6689319576118546, 0.8114616712850159, 0.44849335211287467, 0.5664336821543433, + 0.33726991578102317, 0.4235335515063876, 0.28199088711534914, 0.007549178537996859, 0.8008289508513193, + 0.9571256645811363, 0.3377158566131442, 0.24062086517188574, 0.5686277342758833, 0.5857331388221931, + 0.15350519160231624, 0.8969991637787298, 0.11805324612354262, 0.7205200298378246, 0.4047811081188647, + 0.9748712604705357, 0.3794504578971697, 0.045459544182733236, 0.03332454398590523, 0.019225454045004087, + 0.7720219445543702, 0.27922681071674893, 0.10481635547965562, 0.000434512942967924, 0.16949733426377145, + 0.629932924911931, 0.7328482155944245, 0.10799745019824769, 0.14868647571275295, 0.871472641522454, + 0.35976913760282525, 0.741488519913346, 0.7762271183246635, 0.7063705890755443, 0.30317936565978376, + 0.7059192373038433, 0.8486260751618333, 0.7639308603763826, 0.8836081056764479, 0.8298237378304683, + 0.12813825639970022, 0.7300307837550417, 0.7254559541642349, 0.9095614713886431, 0.17120841579440182, + 0.9814076029319321, 0.5551154847619847, 0.9173017491898853, 0.1344367542682564, 0.3959795782509259, + 0.19978057387940917, 0.5706994113812855, 0.43616496265596105, 0.22151912979593635, 0.49377832061532834, + 0.8488414832309223, 0.02037191464805399, 0.30360945157955954, 0.15516287203321077, 0.18362061085193604, + 0.35363249983392486, 0.24676409317301617, 0.3354075766104766, 0.571417552799163, 0.07769462199392196, + 0.2247221057841431, 0.5622612391668614, 0.7436186401601469, 0.902722478502479, 0.2186860749737276, + 0.9877094584925603, 0.625735250081112, 0.4572072966709049, 0.5659951160507525, 0.823956611686349, + 0.42971780541472393, 0.8535516792370562, 0.9069144222713916, 0.9536150451449644, 0.0695887234362278, + 0.02608483547574192, 0.1489182022720522, 0.8188285363764694, 0.5304962796200833, 0.6232112266277363, + 0.4895440425887444, 0.23880618995087355, 0.7889940006810512, 0.40665768450540174, 0.3352195753342281, + 0.409293758363377, 0.5632009922298158, 0.10315889274994106, 0.5679926252045895, 0.6119322638951157, + 0.24754363198257268, 0.1261863771743933, 0.7314471168807961, 0.8110850180524334, 0.9704421387726528, + 0.13260084780482972, 0.49767739449206605, 0.4878096368906051, 0.7764042173362877, 0.4791843021003044, + 0.9788362114708161, 0.6898573579857727, 0.6239404498635375, 0.7180043873262187, 0.9550871126941947, + 0.05964404645455801, 0.640562112236525, 0.7240785599233878, 0.7570452557227183, 0.6013084103181621, + 0.5681616271802253, 0.9026940823656561, 0.35727714901271534, 0.849442271764126, 0.33296705592292763, + 0.5676124594920723, 0.11417823562383311, 0.8796968875308481, 0.2972409764312337, 0.5358015363976425, + 0.8412976480351794, 0.5134973073303452, 0.4127886609804259, 0.5246978941866212, 0.9383850288709271, + 0.7137980549614695, 0.5955416255379431, 0.5520746535140655, 0.9207688505694681, 0.5990466564146282, + 0.23025025663812915, 0.47418710646330475, 0.4992654627998311, 0.021032121111034474, 0.6526301414388473, + 0.6600822788051341, 0.4732171393114559, 0.8600931750711821, 0.5050032442107331, 0.6497879508246858, + 0.5733935619533868, 0.9904079475485486, 0.9250061471475929, 0.28504239520266494, 0.29093271089585526, + 0.8618449716250456, 0.818232545165206, 0.36049319993342555, 0.2765980640037464, 0.4197287035227817, + 0.8306184528273366, 0.9949665306252902, 0.7191590173111412, 0.589860982512306, 0.21695635453907858, + 0.2262301279083785, 0.44140532487451567, 0.03206077695045906, 0.3336339705602116, 0.922537120356813, + 0.46093894399498114, 0.09928204197631119, 0.26308236733193624, 0.23446391750979156, 0.7048648588208937, + 0.35352377242553856, 0.5167178904894868, 0.7897481242350088, 0.8850138215960075, 0.10710761533914204, + 0.18946803604705786, 0.2101569785277284, 0.4759973183178495, 0.8804845963062241, 0.01990837637122178, + 0.7218300891499372, 0.8520119657559687, 0.3707218789514428, 0.1148853503802364, 0.4407433063464362, + 0.7735989065236966, 0.13794744673886972, 0.4074685407237545, 0.011546974510698194, 0.7108045687053027, + 0.014763874241696229, 0.19239690076060478, 0.5475395269723, 0.31756020036638155, 0.9073009456383606, + 0.7993988428628335, 0.46123162085806113, 0.822601033805411, 0.5638289750586138, 0.03754648151316775, + 0.09800077183308087, 0.8588179254512341, 0.40742460909475664, 0.6110374641577164, 0.2888762398589546, + 0.38607626685978047, 0.714708236064929, 0.3047383564874385, 0.23499917039751883, 0.5938281014096896, + 0.8018821469715728, 0.3616333490092911, 0.3670814624164852, 0.6721719863283541, 0.8036271862155637, + 0.05075907918137357, 0.6954794891774735, 0.11927063162505802, 0.019774309130496492, 0.15528872006743177, + 0.9128949699826016, 0.5608173312227089, 0.6518200424841971, 0.6112066055221825, 0.7323801755395655, + 0.6617196203460759, 0.6567539339733838, 0.4534779231151662, 0.47164103013604886, 0.8962522595916385, + 0.38120544314893356, 0.5510284103303851, 0.03211268765191422, 0.2988625100787702, 0.5580381384843661, + 0.8548152965642551, 0.6101948804292825, 0.3852016626508601, 0.2520999804714785, 0.7119080852307973, + 0.8523059012957204, 0.6895588057137034, 0.27116239353946736, 0.02679934240278059, 0.9324830833221264, + 0.5289088129346182, 0.45180535449961234, 0.0285868917052019, 0.7318691639972379, 0.3206632152155907, + 0.7536018978621167, 0.24531749042414175, 0.20503959714688125, 0.4026922867735627, 0.6364470204499091, + 0.1593408222834406, 0.24458207906468254, 0.19611407476860865, 0.36068534617339865, 0.3803087723840173, + 0.6365500133050038, 0.07230266227363613, 0.675003005108912, 0.7083697174259761, 0.6189747999809762, + 0.2239611653225425, 0.01564639261998857, 0.615168393428636, 0.08483244025053827, 0.39011732698405344, + 0.04097588737924973, 0.2147475319145944, 0.42555196411278373, 0.9468980372448998, 0.6746891242818407, + 0.7395443995027574, 0.5054425767829615, 0.1515170512735472, 0.4284208769986033, 0.9922538098241769, + 0.6954699211462542, 0.6422258554477175, 0.9454461048469047, 0.8819780661714863, 0.30915939329329956, + 0.007665503601882118, 0.2933141398479888, 0.8749579356257668, 0.6623650914360045, 0.5764194562940069, + 0.35781621241780504, 0.03250818007567924, 0.7286890321020372, 0.3829623506229185, 0.9216274976985206, + 0.5489875817497797, 0.34231326089884595, 0.8653394665212267, 0.9943813587655478, 0.43681834186533397, + 0.8692538442562803, 0.43158863314991236, 0.09548041944429964, 0.7927036104730493, 0.6115414955966363, + 0.19177177950154034, 0.28052118140355986, 0.58499139075177, 0.582986780449661, 0.9237908156937465, + 0.46035155013734397, 0.4571067809154392, 0.7199984295422818, 0.925970822758673, 0.7614027282578862, + 0.23105835243541117, 0.1428959163707686, 0.6339921472031854, 0.24114847530416583, 0.9755652293874791, + 0.023106982236168583, 0.8836712696950489, 0.027038421058647844, 0.8713871987342261, 0.7772407939204838, + 0.34646306689748596, 0.2905443263753189, 0.5407090607875752, 0.2791811783845979, 0.7526523910602777, + 0.08764761947537547, 0.24422355910093008, 0.5203970954018187, 0.34319741897303246, 0.20548642647631032, + 0.8782572762871068, 0.15574830215294055, 0.5382255756368998, 0.32497288134413016, 0.8116635556373696, + 0.6372243583386563, 0.4467042048560078, 0.7280374952597318, 0.22160307238834565, 0.2190509610918746, + 0.9459663903369686, 0.6281099149580407, 0.012340116827947334, 0.7763897107852057, 0.8449370661524639, + 0.3727422421907918, 0.10182526861742758, 0.5681193950991523, 0.251066759796305, 0.013569741068614083, + 0.04004311080794554, 0.9316226845042491, 0.8664594133059137, 0.2339354171871474, 0.11150509179526491, + 0.7332171955958712, 0.49434568321462413, 0.7982521258185399, 0.3606415234391479, 0.27220540560301354, + 0.798142081119436, 0.7946629244712562, 0.5252208717494835, 0.1407489319992692, 0.058990671626808266, + 0.34013301543992536, 0.9731289628278863, 0.7705931645860948, 0.6224078624714754, 0.45826390204466283, + 0.9221610180122292, 0.4225145427554562, 0.9198728207304735, 0.8118347080823493, 0.291484360175839, + 0.2865428696480544, 0.09655425275122342, 0.422279320312697, 0.09107401964028494, 0.9845034346001325, + 0.33105487401642286, 0.8655499374511749, 0.8544239070898935, 0.57413683044571, 0.3380575581662082, + 0.5377010359484087, 0.6826530402002303, 0.5875594757324359, 0.2927459748477902, 0.9417966921400932, + 0.3901902896785713, 0.7310284017994487, 0.2838853644639475, 0.9184464023096507, 0.34909499138429856, + 0.07683950365371317, 0.2953760640277079, 0.2993185787300675, 0.12623281415807586, 0.7506250383360479, + 0.18482695590484688, 0.17104463062821762, 0.4279704931302868, 0.0799940492055824, 0.7701252234699344, + 0.8081299620559663, 0.7269755773571035, 0.514467815794025, 0.30809890321987676, 0.4527408474341541, + 0.27541904620018776, 0.19990082861971126, 0.41790227501758537, 0.7814301974934514, 0.5980485697134961, + 0.9179087382817581, 0.9845216591232598, 0.9396507176346909, 0.23020924351443262, 0.7480058599501489, + 0.0011330448786951441, 0.0046096641455557474, 0.7735665725912586, 0.6432606390369184, 0.4386967187852443, + 0.48677575784463667, 0.5838145872459536, 0.09881771510389414, 0.041301940571428886, 0.5763220059146738, + 0.29897422000844576, 0.8351506236602129, 0.29270115330017554, 0.4449066024306001, 0.46302166471328976, + 0.05370052324325125, 0.7152868385523025, 0.8136843846391464, 0.8210264126117206, 0.21877382547663937, + 0.11208115710885536, 0.7998449847648398, 0.15979457828514754, 0.48688077978015065, 0.46463538069874466, + 0.0615781355518612, 0.501659665927335, 0.4161568492088108, 0.3413108285763172, 0.4843833171679538, + 0.3801591528162508, 0.3823047602372387, 0.7455349344165368, 0.06949361723777969, 0.7628864600491926, + 0.09201575395197314, 0.645571794302345, 0.23280042992790895, 0.26704532460157304, 0.146336915880654, + 0.6253370924215746, 0.9158629477123741, 0.8395269715898029, 0.3681238982329388, 0.3275718595298879, + 0.9853701246412968, 0.8969222326087619, 0.46992130379473795, 0.9603905040096754, 0.9192463862654321, + 0.33466110765337265, 0.7776245457047064, 0.18638887198050236, 0.9690416628930539, 0.3575153823389049, + 0.15478016928451932, 0.05520342499487929, 0.6886604559307861, 0.4241453773399779, 0.05495041303210657, + 0.239337041322513, 0.057319812942231985, 0.10539215761821208, 0.036110763024358006, 0.16866034623074722, + 0.11788876761766842, 0.8283608572981356, 0.8775259545733388, 0.8014154537878861, 0.7419159296260555, + 0.7456187688390713, 0.07930350328202262, 0.3840371891358707, 0.05464653075302317, 0.9006398914089212, + 0.4203484487246204, 0.661881922716427, 0.38475453366815215, 0.7507282551915342, 0.2834354132033904, + 0.9125049537329575, 0.7593610415456928, 0.20340306536127473, 0.29049318421714176, 0.8458231511971182, + 0.8302819774808493, 0.26830464752428185, 0.6910863853575198, 0.9956876194528074, 0.0886959703062784, + 0.8404527350275506, 0.8217884465206378, 0.013166154478791658, 0.6585489583220985, 0.6882838912079647, + 0.48336989237912853, 0.6386469382259641, 0.5057189401955511, 0.5765054866954764, 0.4149013365809332, + 0.9873636008237946, 0.5684859039812322, 0.9571412460189319, 0.22442915635478777, 0.923493062213706, + 0.2400214084191451, 0.2091332653671102, 0.11112181120107112, 0.1021345195830975, 0.5461268527166426, + 0.4744751575526459, 0.5686887860771009, 0.6178369406531722, 0.25780886098437783, 0.5910888667494147, + 0.4499852761266794, 0.72009293077925, 0.1598971162689926, 0.6479608898741379, 0.22729351466546266, + 0.9735765524556603, 0.13965129561398604, 0.1519678427188984, 0.449518001300781, 0.17229636020605565, + 0.008119853591463388, 0.6335660711568952, 0.8302021828977706, 0.5949531430069489, 0.8317022548599013, + 0.72014480328128, 0.3268334821329065, 0.689614323347893, 0.9101996879909243, 0.39538436184127157, + 0.20698520808473286, 0.21159551944883026, 0.584846911953614, 0.007227759093389796, 0.5453855741313447, + 0.10146617670891556, 0.06756255237648279, 0.24725119102613125, 0.5974449670534565, 0.9929469938227945, + 0.3410887446172445, 0.3111012602083213, 0.40260618408882554, 0.17773481296521387, 0.694135267124367, + 0.1887944424218605, 0.8901939529401085, 0.5601680769016817, 0.40425893993902073, 0.3172253366789366, + 0.8695944550556686, 0.4941301159952066, 0.12062272582698275, 0.20885645611185755, 0.32207082748055327, + 0.6933036021889717, 0.7601945732076757, 0.044697392809522896, 0.6598837277308733, 0.8898081274354233, + 0.5243670287980848, 0.6041089549577633, 0.061088724940269956, 0.1578742270253738, 0.5277619249234302, + 0.43829584348459527, 0.5059700125959298, 0.583329731185928, 0.8454669290340412, 0.17432456966039822, + 0.4726918839808477, 0.22123986869422485, 0.17333437554409659, 0.4728946140815581, 0.6746240565268825, + 0.8044245582700752, 0.8097383632465782, 0.48148138531886664, 0.9322045891553753, 0.8254461306038644, + 0.0873050255447223, 0.02897673501529685, 0.11548158007973064, 0.2706026363671554, 0.6644302721854632, + 0.1629687812514684, 0.33403607376064093, 0.34762191471018733, 0.6654988331348484, 0.7460070857528022, + 0.7335881364552677, 0.7836905967023933, 0.4606721900167673, 0.06091987428217416, 0.4780587112592941, + 0.8680415692959852, 0.19847971790522323, 0.37273573423992434, 0.8262021081597541, 0.3066359885873061, + 0.5521008370409946, 0.8063472246920956, 0.8710259670292382, 0.5095401920748936, 0.7986167858838316, + 0.0034702802374645403, 0.9269269913872479, 0.6388704889727459, 0.5483650372238402, 0.9408737943208687, + 0.5399616600449335, 0.4916182512866716, 0.1129263390569143, 0.7398024918244763, 0.5852357357645377, + 0.8468437998305749, 0.5726871714367635, 0.9887094110644128, 0.913062446521641, 0.9342095456865167, + 0.07754944950252463, 0.4443004295045597, 0.5906138559947207, 0.2826794284901133, 0.15233585178738585, + 0.9120893867917101, 0.47350179636159306, 0.3045216529564212, 0.6120883067417713, 0.9257015333325467, + 0.17513817551614608, 0.11362529375648744, 0.3042403932758301, 0.006522025987297897, 0.4445141491022333, + 0.5620976940039826, 0.19835802646630218, 0.3896743717075899, 0.6929273414953148, 0.33239649060349685, + 0.3260274482123703, 0.9927516506451334, 0.5543415576823114, 0.03550029963290502, 0.27073100604147626, + 0.7045313348005063, 0.3160127059554184, 0.05092505280837156, 0.15977908322972212, 0.6380049930044317, + 0.8221906592313165, 0.03806040310382919, 0.09747866884483236, 0.027569329714363544, 0.9258101560031783, + 0.3217919500963162, 0.7468745355158487, 0.319328950719343, 0.2737124355041701, 0.26588339010875306, + 0.8021121176970554, 0.04169389103344068, 0.7169158360247077, 0.08718520103601202, 0.4153407892174844, + 0.7466789426089376, 0.039159864911258446, 0.17638146385127207, 0.9538105845281649, 0.3603543821900498, + 0.9839038093322251, 0.3576315841129001, 0.3733472673422079, 0.18646045342165707, 0.26041958721495495, + 0.9413924600940591, 0.846614144738822, 0.38614496242342633, 0.40700338285058846, 0.4196416940209917, + 0.3746838165756712, 0.09579296648497204, 0.4930246833981383, 0.6236013281003396, 0.9862951952239308, + 0.41479023583927566, 0.36527705139863653, 0.17274593484635548, 0.6472207926778464, 0.7718325188263199, + 0.4728266709022514, 0.058004606645780776, 0.5922518768547997, 0.3353983799725999, 0.4469683001113818, + 0.2727673702796928, 0.5140562631905672, 0.391786671938142, 0.31651144775946993, 0.28056618353981855, + 0.3274459233331143, 0.05996678309162495, 0.6118979335816942, 0.9252405494133178, 0.481769245556417, + 0.544365295209721, 0.0489769186608936, 0.4708545499391589, 0.798263508700279, 0.9997782794709145, + 0.5266610728808607, 0.33248902670941627, 0.27917012326359547, 0.6099656313302542, 0.7157034991892711, + 0.5020612990879311, 0.7529453894692695, 0.5270016494576781, 0.9386818972170392, 0.4289718978976914, + 0.8685730450479259, 0.9423508904927929, 0.8761310051370164, 0.29042002312156945, 0.8435776750579944, + 0.4415854175435997, 0.3280453429268567, 0.32805008435501715, 0.3723289401247849, 0.554257265934591, + 0.669766773875195, 0.04324453367062164, 0.019029489908541586, 0.6993674579793832, 0.11545055159651485, + 0.3042513451812904, 0.931642372603253, 0.5591503900888554, 0.10652631262263978, 0.9573648788529169, + 0.5164997555615614, 0.07890356217660877, 0.9072351602315323, 0.07604347769608344, 0.7003900652644465, + 0.5872592657410375, 0.22839932874366842, 0.7746435316043357, 0.31506504905359833, 0.6339025873724357, + 0.6322458864887919, 0.8448880299332934, 0.9698978138205689, 0.7492527219715809, 0.5173376718101484, + 0.36036090117244535, 0.48805794749519604, 0.1342831717809242, 0.4572433834414129, 0.08109303897767906, + 0.6316112686161911, 0.1928349821289902, 0.6857498932874878, 0.3226998675961492, 0.421896498374021, + 0.6404579579008204, 0.7477498623206709, 0.89765319546332, 0.35588857689161657, 0.7141631816479277, + 0.33307250821193424, 0.3113374834249292, 0.33177305576746974, 0.8141282194617936, 0.4376510140877625, + 0.9521839748237815, 0.41217768775258523, 0.028239838981191756, 0.6883659723332324, 0.0005544396012938657, + 0.6839395473383374, 0.06250832538222273, 0.5553286359038312, 0.15571888043468973, 0.696364377199371, + 0.9910886532410411, 0.8581814906898508, 0.5953320000760146, 0.12326813056732122, 0.5263917597266183, + 0.4314584744791835, 0.6760205263893488, 0.9805502060045745, 0.7035636148980582, 0.5815304378349276, + 0.5233485245426599, 0.40137429221874177, 0.4792099190040625, 0.09227551877344364, 0.5560880023462415, + 0.9138651825969223, 0.17591985450367564, 0.6701082359825398, 0.5263125206029599, 0.1613353654204155, + 0.16855181451904921, 0.23497771081483432, 0.7623662326579758, 0.03375334847886591, 0.9612302205534416, + 0.9594597891911049, 0.17657763720871733, 0.343891035523577, 0.5409058382220656, 0.027264003005508686, + 0.30051518591984805, 0.19327677378353925, 0.48308256924131066, 0.5329224371023705, 0.6856836014807042, + 0.34532053852101063, 0.544976349363945, 0.9343521824302912, 0.3386744391906218, 0.7050182640777745, + 0.6570906755288711, 0.9728233675392415, 0.8193336605123857, 0.46803897396370153, 0.20081428812230717, + 0.26064327745977856, 0.8704327821118215, 0.3201168864152234, 0.16496333366469929, 0.5375196384703764, + 0.8797460539654156, 0.339102401859497, 0.2913567334971431, 0.08678033642586569, 0.6035609583574784, + 0.035298194962102625, 0.9497078523551619, 0.15212428578974713, 0.46418539444849904, 0.11119042200562312, + 0.7899696721033281, 0.6828857566949451, 0.6237386689865467, 0.7587009748225302, 0.9015715876236244, + 0.27021373646647473, 0.2566137507266304, 0.8727492048177043, 0.9284561220489054, 0.609704956666533, + 0.11143923073027706, 0.2919740932747368, 0.9593713052664207, 0.08706421151979982, 0.4689548882768366, + 0.7440157979638651, 0.450099020479404, 0.19335035809894263, 0.42437938271556663, 0.7731211548208213, + 0.21712976824044317, 0.024687670663182115, 0.8406159199052989, 0.562703423536082, 0.051695957356858835, + 0.6526117391926302, 0.5055139243140477, 0.31123769846007554, 0.07106570646255728, 0.4246315906129178, + 0.26882139992693865, 0.5169886368595682, 0.4083536079157265, 0.7847214891875106, 0.8283238545828828, + 0.07279876409050867, 0.371751750625257, 0.7429369572881734, 0.5347933116349726, 0.47650715873622274, + 0.5672484307645792, 0.6656825714716701, 0.11046956750839054, 0.12141326751228354, 0.9554925536752018, + 0.92925089506823, 0.5594898611548063, 0.3515377708700119, 0.5413411836147314, 0.8922976074326969, + 0.13988354060716401, 0.8515925083639553, 0.6479305574066975, 0.14222191263329886, 0.17393159484677978, + 0.15660698078116086, 0.7795824294036924, 0.7661008988539012, 0.7876461146220163, 0.2202710861878927, + 0.27959431918054156, 0.8203033059292821, 0.6215820211300894, 0.5789031170672604, 0.27151517003644954, + 0.6906603418178786, 0.4943221721184726, 0.7173841503152356, 0.023808502086472783, 0.7624456490986126, + 0.08703644719297532, 0.9459097689718978, 0.3369634546367667, 0.25872130114257286, 0.45320150271340875, + 0.38622658848873337, 0.04016849947250056, 0.29286431266092783, 0.8437133274751509, 0.22415129384088184, + 0.2547572980121141, 0.3576928660408618, 0.29356840772261694, 0.33334492368819824, 0.6630914536733648, + 0.35087933227820634, 0.9537661948067595, 0.47796310052161595, 0.9083549955168818, 0.36024594603031235, + 0.057817385357112006, 0.9713430412884677, 0.900136510102264, 0.7206965344196591, 0.9247091165490395, + 0.026840409778698948, 0.22712160695682926, 0.48341813866074834, 0.5254416902917948, 0.10626717225760729, + 0.5747932773077489, 0.7623622526539375, 0.8451469833435137, 0.8350934497031639, 0.34562032709873225, + 0.2805892361107015, 0.35612958329179345, 0.555267626168172, 0.7468050858074814, 0.6391395164788279, + 0.9063192828456266, 0.9307473621062423, 0.07541107212581022, 0.8195407479955955, 0.6032033762053018, + 0.20916340967915914, 0.504131622811286, 0.8120520444453977, 0.7617576730955915, 0.5418266749901783, + 0.037292689143810454, 0.8524075649022169, 0.354268574182949, 0.3637421583897654, 0.1743611165086295, + 0.603944892304619, 0.9009402766942838, 0.4323359407198708, 0.20021436065605147, 0.6560228515508436, + 0.5010804319385781, 0.0511287010591398, 0.19073657771073915, 0.1714095928292927, 0.6531642502559213, + 0.8658555927642224, 0.9302590389088694, 0.05112151871975479, 0.2780018830917972, 0.28516085841456607, + 0.16069473469090123, 0.9892740025423785, 0.8795785103654621, 0.5689480104305605, 0.30681096946499076, + 0.4262876046960352, 0.7072446420453272, 0.28453323125146823, 0.6954354518265083, 0.954168045398346, + 0.253042541373075, 0.8288202709391087, 0.5831778759655847, 0.10389955201508994, 0.37122076741701515, + 0.9658834893244377, 0.5662946488122427, 0.028272242973204853, 0.4411772837741379, 0.12062550495013824, + 0.6073570264978326, 0.8653411787427259, 0.4151393341516898, 0.5808638161379612, 0.6138833085969274, + 0.8977395746882985, 0.5232019771811524, 0.62177229006521, 0.31740316568618476, 0.995086701350316, + 0.6388102279112748, 0.7711053581649382, 0.2625382090756866, 0.647632816673685, 0.6581783282449519, + 0.9590296395284202, 0.5443111982046303, 0.4650596113912554, 0.8033217839455645, 0.6061443765014596, + 0.6114861481980656, 0.7123998075864238, 0.49441569633943294, 0.38497775426200576, 0.14824853791205372, + 0.35778646431847727, 0.975353509593829, 0.6343500156066957, 0.6197857186803352, 0.9794661991030524, + 0.9389635184510942, 0.551370359461987, 0.4078593762317797, 0.19646536493085176, 0.5247929373445805, + 0.9967210254640664, 0.6600717068513265, 0.4532515547842052, 0.5584347001743613, 0.42540865720158294, + 0.41214267127589577, 0.24027570208833993, 0.9454412652249871, 0.5545422420140109, 0.6420106406501698, + 0.6864740877504173, 0.13283260317442536, 0.8351977892958619, 0.8838429412810814, 0.6197226585027977, + 0.03692634038821818, 0.9940592334107295, 0.6504233123061155, 0.36915777147398576, 0.21294932033510716, + 0.8883579550780574, 0.9434669752732507, 0.7871484874698491, 0.6015894352197685, 0.13503309950676634, + 0.08116878671607486, 0.764054517679632, 0.2380319816390839, 0.2108568990053281, 0.3364838531266461, + 0.11116497983504847, 0.8833894434336861, 0.4600334563063194, 0.958019660901518, 0.9707937123977975, + 0.3963534066253034, 0.6435509378707076, 0.6841863126530163, 0.36245169039605396, 0.37368579647818745, + 0.5354520776982417, 0.3849654845238749, 0.46892815849390534, 0.23618297415897105, 0.10342694193424029, + 0.8130884183622571, 0.3372389590811845, 0.8801979013903768, 0.042984279427440764, 0.7097337090682005, + 0.0814756957093864, 0.09154636360078883, 0.22527340154919095, 0.8839499357077213, 0.9451737260947497, + 0.07343567079567803, 0.23613221737731493, 0.9815290846929139, 0.690482753236433, 0.2759794348497405, + 0.4170426322035621, 0.49404998471487127, 0.2979102609086396, 0.20242075139504323, 0.6308040496182686, + 0.9270879507334671, 0.5193592698834862, 0.09518922592288293, 0.184435600244499, 0.39342780802849675, + 0.4463700453590007, 0.7687300247608775, 0.08593988677945608, 0.1731180592617615, 0.1806928628089064, + 0.6049544192717783, 0.10684971414479749, 0.413820958232727, 0.8031307423689796, 0.15606753675477536, + 0.13627177422745584, 0.48756484632735764, 0.4582096265522544, 0.08831411721637483, 0.7632283101577293, + 0.02797741131713738, 0.029511777050217902, 0.7607992497077846, 0.4836044251840753, 0.6522222003611574, + 0.17085040117637085, 0.9021095268566008, 0.8821334437464305, 0.6079746774750799, 0.21092625798587494, + 0.825916723970538, 0.7752651025076657, 0.4365061658395254, 0.4321089498057512, 0.2630724753590308, + 0.6410293801355194, 0.5652713210043107, 0.9398366430432499, 0.21958050038632437, 0.45247477547826354, + 0.4015590918589206, 0.38331125211299344, 0.45265698803227483, 0.1824777450505426, 0.5395044469777021, + 0.15241090515727973, 0.869737588686086, 0.5997374570749519, 0.1878326123802151, 0.12368498617336277, + 0.5366724972603243, 0.4015713567566792, 0.478066524133525, 0.656482385220244, 0.08128451465557396, + 0.4148539849835965, 0.46830393917091984, 0.48206962292530675, 0.2514376495449505, 0.7037771193238919, + 0.2049599520138644, 0.9420549010801207, 0.32802440380973774, 0.1168765737873787, 0.6992622579437291, + 0.5748531044001729, 0.9881684742546766, 0.5698486454313082, 0.9592638683873683, 0.12705650699996474, + 0.1821414994404741, 0.3404105387119726, 0.28741899750722677, 0.8248769541178915, 0.9501705620084879, + 0.7462544756466176, 0.30387021154632554, 0.7316891181443752, 0.5467577202065238, 0.8234489468009617, + 0.24052718062954348, 0.6841759063083684, 0.5940203198372817, 0.2313175487855773, 0.4454232374969589, + 0.9572113494446665, 0.23868397496750438, 0.9240312046556932, 0.045157510546772084, 0.12026425575659783, + 0.06320300150779501, 0.43913115040206707, 0.7441211348712762, 0.33642392601573956, 0.4226785113938254, + 0.5263586841009785, 0.4372834941523561, 0.06996075838814153, 0.4557765640424808, 0.5690574636850873, + 0.07104922123194513, 0.22158359078917667, 0.2779275763216398, 0.08544569817220382, 0.3223622692128285, + 0.6085360064683598, 0.2275507094394459, 0.39370086150224026, 0.9893669704468705, 0.5381972925676857, + 0.5208126143641184, 0.21282962530474703, 0.17940411248896482, 0.36826508650075673, 0.016104271412145765, + 0.10807769443396598, 0.6656576506377743, 0.8232047191609424, 0.7898464114044067, 0.800213043207197, + 0.07194530529495269, 0.09976295787416634, 0.9726500093164777, 0.6172403673579926, 0.32877090352709437, + 0.1787383519554343, 0.14272285748915792, 0.7217488225808556, 0.7362919863903333, 0.43304795625468573, + 0.7124964227990281, 0.009694054497233928, 0.3767839449361807, 0.8335823741701612, 0.2520012203321753, + 0.9366182058978678, 0.4258744687145989, 0.1996009431050395, 0.694581010115562, 0.7550021193863347, + 0.09107391429704748, 0.5411736112007386, 0.7593625966063166, 0.7550159639326586, 0.7195045155415846, + 0.36194505140217215, 0.8651320422128966, 0.417142640384067, 0.7001381695568094, 0.9668663378801738, + 0.8801002863569317, 0.6107489839813416, 0.7759858368909576, 0.9758879795439324, 0.026242611743799604, + 0.8109795801219014, 0.142615405116852, 0.34100339465861207, 0.8362647124554538, 0.7913231043578712, + 0.6255897481619649, 0.8350000536947025, 0.13194363010991073, 0.8855679365824665, 0.7496880659014675, + 0.1974426158414404, 0.8815449332315608, 0.46453609235193016, 0.02909398869545532, 0.8231118518743965, + 0.3158338658241946, 0.015614986936861919, 0.12559306626443711, 0.07350972816646983, 0.7262801950906309, + 0.8214383831452806, 0.9141209729516928, 0.4016534611332959, 0.572991105776365, 0.6361553043914702, + 0.7210276396962081, 0.28316629596911624, 0.9286396438149648, 0.2396905791925743, 0.7309278059084576, + 0.41757380393516597, 0.5600749778204993, 0.3447772771491663, 0.7080571519865836, 0.1862477305996323, + 0.3684919426938241, 0.4078485614096443, 0.9992788103760902, 0.0751126345361981, 0.6876053258257588, + 0.2034030002535041, 0.6248411361407403, 0.24531078815802299, 0.8854601917895091, 0.45957304568000146, + 0.15932971124280404, 0.7210266310300115, 0.4553199856685639, 0.45034256302789444, 0.9746418545233345, + 0.1559101207769158, 0.4710974856960465, 0.6748864174739727, 0.8189753374030909, 0.9700832504990944, + 0.9378619415351117, 0.833357598630905, 0.5370258527629953, 0.44527648107489015, 0.8092866880793879, + 0.08446757247159808, 0.6533677683246052, 0.5709920481858723, 0.9031317162919433, 0.5209286507313109, + 0.35503377813850834, 0.5127947919915399, 0.314034130582058, 0.6799716994923054, 0.12003429826155698, + 0.475973424469875, 0.029878397812445723, 0.6173847610379402, 0.8251970165155197, 0.8469731253688977, + 0.29828458974382177, 0.038674129987527395, 0.6894306836697651, 0.8022877763461331, 0.8973748137717328, + 0.5183593602300609, 0.5203793454636297, 0.8799487648515179, 0.16657995091485, 0.4036231641028515, + 0.964399823242302, 0.8111812248865525, 0.01467926489272231, 0.2306706727643011, 0.3255465227081581, + 0.9081205442121948, 0.740168423589265, 0.5206164155771656, 0.0007816594643210717, 0.683115943322798, + 0.6249174479831543, 0.14409662943894463, 0.6242011279883033, 0.8581198613701222, 0.6447245787246012, + 0.3183705513105286, 0.4032157204911123, 0.29663092205552966, 0.3947702156702956, 0.047188214883748136, + 0.2719413967526857, 0.7536961258543396, 0.524813765879986, 0.8519165957545065, 0.8459584043208143, + 0.08623103774330587, 0.5041566330672386, 0.8420355495482482, 0.4328374837170327, 0.32569915532223015, + 0.26708860374479326, 0.9409673661998634, 0.2873602299039645, 0.40259979725169626, 0.9192193500044818, + 0.7890490241768431, 0.8643163981007268, 0.8662056209294271, 0.9668877810883133, 0.10241243397612598, + 0.35671955259898025, 0.4073355559796621, 0.709097356688389, 0.8057442223850726, 0.2667058011628922, + 0.18096279993942266, 0.13683175689916338, 0.6220570882647428, 0.8227051604417782, 0.7492839118460441, + 0.15424023139548493, 0.45767420207236686, 0.9934311827779357, 0.35225351006597916, 0.7452851826741589, + 0.6443321962903846, 0.28929999051164557, 0.021296368752716455, 0.2641883880152509, 0.7532401820884128, + 0.3674268356992164, 0.3872399882234452, 0.879930869709762, 0.8549166605018542, 0.7265730836727542, + 0.40789339651046574, 0.7249746280900482, 0.5581752403898145, 0.3622653866890835, 0.8002956394633721, + 0.1738729983399271, 0.4875114572766778, 0.0902889111839571, 0.8932680697470176, 0.04746249838464589, + 0.325118829099289, 0.5700663279080852, 0.39982618378512635, 0.744196651206333, 0.27498739781393966, + 0.772017425704287, 0.6200108675153606, 0.21501592799343128, 0.2614347922741589, 0.1738693992655882, + 0.8834673402962447, 0.8792110943736163, 0.19761474295139136, 0.7002120069630986, 0.1739704138755046, + 0.5912730607588347, 0.45823718255184787, 0.8185858110520337, 0.41396766909159144, 0.8803109226673347, + 0.0759634068093169, 0.2584578614695483, 0.3740116130874709, 0.7365411758755057, 0.4867300821623828, + 0.6330476804291267, 0.027842806433470257, 0.8733502676473507, 0.14153261130904082, 0.11397632556039095, + 0.9528131530354661, 0.22527981430519306, 0.7741138011050145, 0.7902605589928353, 0.33901726435506596, + 0.5985268666493162, 0.8516311115854588, 0.07764893163844522, 0.374983715147429, 0.9439699149880643, + 0.5761269443602227, 0.5081222037332938, 0.21711521074809348, 0.5670322913623921, 0.7202401713375974, + 0.3179667535946573, 0.8051542386094057, 0.46516466439551873, 0.09842482951586506, 0.1465489941076854, + 0.7183202889810175, 0.4169045010747703, 0.43817156837719795, 0.5658707451344761, 0.9841541220244447, + 0.4521978180500349, 0.3244473068897339, 0.9780906882547484, 0.9587394945314034, 0.9537231450582898, + 0.6140243181385397, 0.2697266620196902, 0.39201854990125595, 0.5231389048691316, 0.8136337980643848, + 0.0850422709244194, 0.07532927925196109, 0.48678126682434475, 0.19813594600550655, 0.2689176471525758, + 0.6058892678764306, 0.886221161708688, 0.5595282565509634, 0.2018225048367598, 0.1337137105244105, + 0.8857683318672712, 0.8054379291434759, 0.6004074484706452, 0.15353250661433127, 0.168690189073049, + 0.16397199559366327, 0.6213452509644681, 0.6693649854283488, 0.736860992143695, 0.6155947439024998, + 0.45084083387040386, 0.07136536459018394, 0.6019964084128929, 0.6919561856409583, 0.10413862217829029, + 0.674313300247574, 0.4266327188633878, 0.8670071039488126, 0.8387597582464331, 0.7822659087928389, + 0.1272752399285857, 0.48134478777549794, 0.7457498243240924, 0.28981426825312817, 0.43548752001979074, + 0.11104971195587443, 0.25973998325194847, 0.4519151499337386, 0.18570873106510322, 0.11303582169867699, + 0.8657498274959576, 0.6641771250120069, 0.7569759738807702, 0.6552797366311471, 0.24926754799169282, + 0.5928000639712934, 0.9667785325782007, 0.8919781153413793, 0.3736505544359441, 0.2417298957019205, + 0.1029787726217739, 0.7693498285943238, 0.4223776492079895, 0.4279699300119745, 0.29918287690703327, + 0.5041061412134665, 0.8629733346170192, 0.8219128899172709, 0.07148858957180404, 0.011821732492649617, + 0.8268517559537538, 0.5105842168972987, 0.33516198589398927, 0.5899298184564171, 0.795154910460929, + 0.7207692014518258, 0.14129066807403834, 0.6298022872720083, 0.8271357939963007, 0.7990800793357093, + 0.6672363925703474, 0.09800901010960317, 0.4079606230176146, 0.6729821956263352, 0.5890368667325236, + 0.44426078653741263, 0.6832153824587172, 0.8270776312910332, 0.0636306990727582, 0.2373804021632232, + 0.16236494524331746, 0.2433854540049678, 0.5862625046816455, 0.736670366105544, 0.5361004674386493, + 0.9151111071436309, 0.16801040188956518, 0.5326758874470737, 0.7057560949622691, 0.8670419050715236, + 0.18457121777842234, 0.826885272168958, 0.14025741669252112, 0.28089396295739555, 0.9103089717088566, + 0.18246126378534233, 0.6979957948358568, 0.9626679561442861, 0.5933988483122893, 0.6454616564670834, + 0.07408469538304985, 0.38637634097511475, 0.37503208580391456, 0.5278925431047583, 0.3704634737497141, + 0.8294992246898419, 0.6122870882835016, 0.5889782624116725, 0.2862319274790316, 0.9663994445996943, + 0.492813912885392, 0.42836291340104327, 0.08388471898169403, 0.9443346878887307, 0.5280640854808734, + 0.220807779759849, 0.100285898322934, 0.15539988881750266, 0.0853182608183699, 0.7012294431180773, + 0.9898740414608184, 0.5241850393949182, 0.37707693171221324, 0.5050074229833029, 0.3997088170199421, + 0.32308222946529697, 0.8391527877967326, 0.16882656155304765, 0.9785787946765795, 0.3455452129171409, + 0.49104873953578954, 0.2073340620595716, 0.30383790045962555, 0.8592208182446371, 0.1614648036783346, + 0.9810050527585037, 0.8315379475578711, 0.9166092655465123, 0.5581198341186189, 0.6699502384969421, + 0.38587182031740364, 0.5218472658141504, 0.2643482802688778, 0.8839022498546698, 0.2801474548420625, + 0.8802332212266368, 0.3983498675776338, 0.4000824583064799, 0.5952104622986186, 0.6633346421171146, + 0.47211041129385045, 0.20850155808931647, 0.8443761891921956, 0.8713750674688749, 0.49066173812254776, + 0.10731210127948998, 0.08158507934625181, 0.3182176014915262, 0.07122834882910778, 0.37011257768537587, + 0.23681376365818507, 0.9747082071259978, 0.7174845879037445, 0.16115885380524853, 0.4995576112155681, + 0.8906353143652719, 0.6173222623180591, 0.1740617558512425, 0.905106955267205, 0.2851201794415653, + 0.17066866095976363, 0.41643556211146604, 0.4555789707332867, 0.9414116689067613, 0.718355781012543, + 0.5510327662592338, 0.6635670678822588, 0.9369116858322922, 0.6938786814292451, 0.5390875927552676, + 0.17121007923702924, 0.9348229216473823, 0.6618639075280947, 0.9062259888354912, 0.389642200487166, + 0.6781213286269252, 0.5399042952399655, 0.4143368691096895, 0.16066443230022687, 0.893678242874481, + 0.200092780098038, 0.782133463957693, 0.7030453425489953, 0.2907309643846363, 0.5187995753517216, + 0.012585292018807648, 0.6671757942343677, 0.9238135456822758, 0.3390631822151424, 0.42198702169509017, + 0.8998696087333117, 0.8574562396166349, 0.0069245931950348005, 0.061335986073354776, 0.17954298805181967, + 0.5993052769866493, 0.2573906938056304, 0.47394783343728475, 0.5711465941254845, 0.40364621360491193, + 0.25533358268866047, 0.2938105452548483, 0.4286130784144355, 0.8464288874189271, 0.002003939753644879, + 0.26505658560426537, 0.8085584603221163, 0.2521616143418479, 0.6037402270698863, 0.95198283964587, + 0.24280540851751664, 0.5237624049551446, 0.6371201131747636, 0.1851188003035632, 0.9985351294686939, + 0.43341924721023495, 0.2975043024619203, 0.5947376655456239, 0.5937161150633878, 0.5141013083273842, + 0.9266648759519832, 0.19953648296076, 0.9895438423213186, 0.03254451880979237, 0.8063916181716487, + 0.6092400516555709, 0.08096035197485107, 0.4970246247239263, 0.4977056165126006, 0.5644402318460526, + 0.05613790997615564, 0.04856859926822765, 0.30389832264220895, 0.947430126911431, 0.8529307005508067, + 0.16126708154990366, 0.47183605415452234, 0.48372259475122814, 0.2219900256462357, 0.11808993549145452, + 0.54952656795131, 0.044274160853165045, 0.13178543622219463, 0.10223509532411146, 0.9478108326140662, + 0.9813177822011769, 0.38296405375145703, 0.8775096765310761, 0.3614757631552219, 0.21142710215953953, + 0.40483603273066926, 0.3713334553910622, 0.6066448483664519, 0.26630784526857365, 0.9346027753135807, + 0.38011699449751846, 0.8648343180496709, 0.13696978863016307, 0.48260867475004554, 0.36897387015675365, + 0.3681036227743386, 0.7258879755754656, 0.12368626375567482, 0.9632208582185809, 0.6006673274179652, + 0.6805249467072081, 0.2641383172820556, 0.1849335093823019, 0.268269929364613, 0.7525295228332822, + 0.2356682034874814, 0.9020039580867859, 0.8363943726634937, 0.23376089325780725, 0.7152059978606942, + 0.8118494908235618, 0.522858439197307, 0.8789722467960559, 0.14432628756419108, 0.6831055023883971, + 0.7292560447209321, 0.11948393555196923, 0.7385205102413724, 0.7426624646754313, 0.47034011087882843, + 0.4934353559919167, 0.019385693875372256, 0.1644148700165221, 0.726223076382968, 0.983648127721609, + 0.14978408092510787, 0.0627806211257308, 0.6435646051031143, 0.7068845959118953, 0.22224805706308348, + 0.7025379059773825, 0.7671733937326713, 0.2422455397525054, 0.23900807248025624, 0.7851043245271349, + 0.9815784096070599, 0.5769621451479601, 0.30375770538268587, 0.053292699812170996, 0.7479847297205104, + 0.2656075933607587, 0.4867975298334347, 0.18273049473673608, 0.9771306149446537, 0.32390327309999245, + 0.9364207202505184, 0.9954867156798857, 0.4138903752062618, 0.26602582492560944, 0.40740484055113513, + 0.6980370072228764, 0.6091663927955362, 0.36317671281129504, 0.23178761512677915, 0.5916311699782492, + 0.5222937301706758, 0.38789177968838173, 0.0008073145459028819, 0.44979767898714806, 0.7961270911985423, + 0.37204307676191306, 0.8805650817441983, 0.6797124765132581, 0.9391605889195155, 0.7853430507915093, + 0.40205573121352856, 0.18465490756077885, 0.9453616601757405, 0.5264730740092183, 0.7043416308969769, + 0.7532247997132729, 0.8084522419324485, 0.4214988179069832, 0.6662843687119275, 0.32012453776257244, + 0.3842878917402419, 0.45150510115664044, 0.017029972173568608, 0.5567832376578825, 0.17592715514802293, + 0.12746371278869018, 0.7823002107182094, 0.07938684264490936, 0.2797387200581921, 0.5282546844623429, + 0.9142030702747967, 0.8667042897168531, 0.12414606504559378, 0.7496928489555684, 0.5975657442726912, + 0.5766856057261079, 0.27276209170071464, 0.5125491665122981, 0.7659760339624151, 0.2608915153611455, + 0.23737775852564869, 0.4223940470468984, 0.14802852633152375, 0.786343036995969, 0.11967164780855066, + 0.7861337723070215, 0.43752732063885147, 0.6768604572094098, 0.18523004129106302, 0.20414845785871094, + 0.5614546034547964, 0.9549593610810609, 0.14922932875338413, 0.11819412731842582, 0.5922593822618438, + 0.2662663597300604, 0.9776200228263446, 0.19329634942663665, 0.7675262420159139, 0.18728423744397238, + 0.9556381028694262, 0.1508380628216479, 0.25100251671942586, 0.6714140435259212, 0.26468793880869024, + 0.012888260450423461, 0.9419551063237229, 0.6647188764302583, 0.4919769298794783, 0.6617511993734612, + 0.7766537494564577, 0.6132936614795171, 0.9520748539513224, 0.4758149512203128, 0.2963528251071592, + 0.8957295163189739, 0.47038734096149415, 0.5347428795661616, 0.7229404776832763, 0.30329048026796657, + 0.13086779950956529, 0.576385998206916, 0.5031192572112638, 0.08546569675744897, 0.6139290139099071, + 0.9532334777361637, 0.0972216513194819, 0.046122603387791616, 0.4109748714090409, 0.3734415097137087, + 0.8488622185109916, 0.9080783886085111, 0.2906220507430505, 0.4456668922096847, 0.10672530411559611, + 0.7219238131733818, 0.7712303177556455, 0.9911404237013794, 0.28595967326832383, 0.4420566293816941, + 0.6702181006929773, 0.1487783459160792, 0.8886058420607406, 0.8167118055325171, 0.6955110843639645, + 0.6656860237845064, 0.7981045764809439, 0.11969361031982428, 0.0982143563938741, 0.510797159559359, + 0.5335903792447684, 0.5893565778166424, 0.9583203097730215, 0.24051468992665348, 0.06835476433229037, + 0.8573444562443651, 0.20345178680680154, 0.6257352813989081, 0.13222658442346147, 0.00529428268329124, + 0.9724117989276255, 0.014324409285141448, 0.7265996119632014, 0.6739076123487858, 0.6318646676694728, + 0.3559405609299813, 0.4661645469952733, 0.1328989515221285, 0.4729014463192641, 0.49410869144419534, + 0.8143454823684415, 0.6093260298711283, 0.12461622957767593, 0.6159678213721915, 0.4710535583582489, + 0.030609527071711717, 0.3345819402654616, 0.7300781287932298, 0.8624198654862665, 0.6825077720996533, + 0.31626709553647614, 0.03355759344599485, 0.7682065513833087, 0.5806823641299691, 0.11399674026618589, + 0.3622356198928768, 0.6890255321685174, 0.10557057735785325, 0.9198592525270698, 0.7975089868038633, + 0.6340505141934616, 0.5375031945181686, 0.46937569395097956, 0.38945202789643985, 0.36980752793090976, + 0.8780337092812143, 0.9611177839863301, 0.6660564374520576, 0.3676404135378726, 0.051104120594660074, + 0.8057757905385002, 0.25299132502119037, 0.42921784666683027, 0.3340232744157904, 0.1016803260245307, + 0.18722872155163184, 0.5344614555206574, 0.801643045648305, 0.152002398026577, 0.7197274830405708, + 0.04035717869170574, 0.8500618632153892, 0.004777046246910555, 0.865548458155214, 0.2954324387556416, + 0.8186291190531935, 0.32860401997993804, 0.6724341123120896, 0.13174070823702977, 0.9733610815737049, + 0.5184164486823317, 0.37418735343410015, 0.6173879315025659, 0.22785046720433844, 0.5114095646406202, + 0.9649452698790851, 0.2932465915452753, 0.9540857456390377, 0.1419609313043313, 0.6717675252283625, + 0.22011909568754273, 0.022506453413775107, 0.35301334217355207, 0.08547787887044911, 0.26128187412583626, + 0.049425215656540455, 0.07990308912872868, 0.1604461477236364, 0.9396490250068066, 0.009474634287685912, + 0.494175441387835, 0.46291077951839155, 0.15387428898659994, 0.5719043938250659, 0.3323979171193523, + 0.5129930262435822, 0.2801076430455637, 0.016919109647039354, 0.9840595398672755, 0.9612463832766082, + 0.5813372900534557, 0.24430423513000654, 0.7789854000542915, 0.10473015091550497, 0.30758985881518375, + 0.5345993954931548, 0.35332382901047177, 0.34805445081451014, 0.4093080040507797, 0.5569747742306358, + 0.2178918676147794, 0.8980912228786734, 0.7283075584013111, 0.4942848217319723, 0.676818169427821, + 0.3431240103317088, 0.6416195251069333, 0.26089749754585867, 0.9315098744214174, 0.9814873887157626, + 0.27753530999363785, 0.22006786595743133, 0.8402643515218982, 0.764481319745085, 0.17838081838790454, + 0.601023236000773, 0.5295840130217927, 0.34613239873973844, 0.08639984775299314, 0.2134377378630149, + 0.5179056795620993, 0.026870718289451645, 0.2060279428229742, 0.548040927732119, 0.5019802625919461, + 0.5546848746918437, 0.6627020144126606, 0.7898117029914349, 0.10114668177627817, 0.6979410329890304, + 0.4166445818731944, 0.652158873340379, 0.7661610713157183, 0.013600740936464795, 0.31614555598082206, + 0.40851197800871475, 0.008821865522855132, 0.5910419735717404, 0.8034268043946365, 0.21207086405040854, + 0.3809877878102963, 0.20305500980644842, 0.7195135069803356, 0.13782188018655184, 0.2348142041066399, + 0.34369662341954443, 0.9398953890117043, 0.71255092747532, 0.7603892981238097, 0.4154661310361333, + 0.7936818265729689, 0.33556475347376824, 0.22513057371336387, 0.7856924217542165, 0.840744681055929, + 0.8258693688550751, 0.34880246506606827, 0.8981483330588393, 0.10332679504730224, 0.31159722704092885, + 0.2976526019284299, 0.03615468174509062, 0.20532316160658248, 0.400784203708203, 0.17494363992366446, + 0.30158552547093875, 0.11111896953816602, 0.5861133805705742, 0.46365762672070077, 0.6361320008641982, + 0.7624936786649742, 0.45375575795993883, 0.5997061385711707, 0.8781121016833955, 0.5608055002062007, + 0.48716246828830867, 0.9290986568027505, 0.8587574054485324, 0.601149904423681, 0.6271793648207649, + 0.5409361240644139, 0.620009224601677, 0.25176893165523984, 0.5000574536805042, 0.6615724752761805, + 0.39392743840517497, 0.848525420471788, 0.6442961972740163, 0.4631464430375354, 0.05009411873654168, + 0.5030674592619186, 0.2743202314238007, 0.4438361539195024, 0.9977365103670578, 0.3038827547616273, + 0.4659087885683486, 0.3496225111697312, 0.6124950484645235, 0.7233257633122523, 0.6946083605747225, + 0.7789394704337231, 0.46178049371317786, 0.24418762833017915, 0.08716326828281562, 0.1764065658110866, + 0.46307556705195085, 0.4406449735157394, 0.059637880370541074, 0.5527294188792345, 0.09425152922270486, + 0.4864487864602529, 0.17550840088332098, 0.12961710680824035, 0.43031709993944645, 0.24148233188681767, + 0.8952159719927171, 0.1841036550906583, 0.7492365344550502, 0.924213144892689, 0.6854244841619022, + 0.1231896654886625, 0.3820534973069034, 0.06484413407338108, 0.551840499307867, 0.022990905350868673, + 0.5189039118565674, 0.8405980043269394, 0.3846280769862147, 0.1715330629962447, 0.9563108415854494, + 0.7723199422937873, 0.14003650594716255, 0.5195940538392563, 0.10303210560868725, 0.08417873734696724, + 0.3817836996299664, 0.5090147988631436, 0.9940291716379933, 0.13312964973882813, 0.5089979163603118, + 0.960133168625349, 0.959198122582794, 0.818366012123155, 0.9740932057522111, 0.42313049554336335, + 0.4809191891431963, 0.7481408864601188, 0.017301735789535755, 0.5428536357062527, 0.8690611790260024, + 0.5258920864696196, 0.10159930549807805, 0.46927976925156, 0.8476122996248373, 0.04834966426457343, + 0.8412849414939327, 0.1746944346828121, 0.5660435675608625, 0.23845110545254367, 0.6384567175385518, + 0.7255390883382695, 0.07974266966580024, 0.7305535096558342, 0.0132440960212582, 0.16469577551754844, + 0.8419344196055067, 0.34661896961474836, 0.6996651054043843, 0.6621177198391502, 0.4192629280369885, + 0.755457134146355, 0.5769018924373179, 0.8341205461615239, 0.5076774552245807, 0.1789304462515391, + 0.845244509611847, 0.44853357944386285, 0.4447206353044483, 0.8164766396288451, 0.06526657927525559, + 0.8172675045941381, 0.19138556592418154, 0.0323631521768829, 0.46389409823111905, 0.20666046496930268, + 0.7922791278723976, 0.35964406815188354, 0.7194674814307954, 0.11994698491120714, 0.9166528513943647, + 0.011996949145422708, 0.3575166338930088, 0.4039539728113667, 0.03605242057520419, 0.7607555717360293, + 0.7112645368767682, 0.908126605055545, 0.9737513438614959, 0.4372942928153264, 0.1823332191456879, + 0.019006003304453456, 0.3147479107066973, 0.8516564696700352, 0.18421314821010748, 0.1086110445346784, + 0.5783320343627858, 0.024710941581924772, 0.6624638318475884, 0.8536301976490064, 0.1873065783833996, + 0.9634708611467906, 0.6109180570888076, 0.3358098441196017, 0.3525461073448619, 0.6541700786625942, + 0.2352844194906839, 0.09594487577946043, 0.5556225295809254, 0.17151605448400897, 0.404219761336573, + 0.7959004922233122, 0.6628454167968892, 0.4886497962739613, 0.7996737252717977, 0.947529023756998, + 0.4212535595619563, 0.06585920967432546, 0.7240230287880906, 0.11971360870324632, 0.13441459266166333, + 0.2757694827430912, 0.2556645241877844, 0.8461719511206709, 0.024895745445476014, 0.5780879815166985, + 0.41460764203434974, 0.4115867806130522, 0.01725025908093314, 0.3719966887375674, 0.5802713930701716, + 0.9484165615250868, 0.1789127429187054, 0.6746917127146561, 0.5456106202130359, 0.6357428566154322, + 0.5850881474635748, 0.5669110257316874, 0.14839504946155402, 0.6263435263131425, 0.9076996469575036, + 0.24485456927199156, 0.3614606838360451, 0.7962971370012736, 0.23618765974799005, 0.7243107750010274, + 0.3655889364418473, 0.007883261144003617, 0.49510248651461897, 0.1160156279340131, 0.9185115659449252, + 0.3014803867868435, 0.617911280565307, 0.40566051837253236, 0.8418444527724096, 0.013811344218978006, + 0.4912740568461218, 0.9440194005664312, 0.5003802252469102, 0.4301475622444393, 0.5573003675458981, + 0.9283505880881653, 0.6498108571818509, 0.9765437956149597, 0.06837822554482054, 0.9576186243026893, + 0.6067115217790777, 0.2527127048251988, 0.7457747934813597, 0.7613945393112975, 0.3197191445507993, + 0.47678572581786627, 0.5753915719540151, 0.47577803402806107, 0.9204160518325147, 0.38556377043476064, + 0.3498607729187615, 0.7779829547087856, 0.7217386673623036, 0.8108887589535382, 0.573572154649218, + 0.014850298511975835, 0.7743262564982436, 0.8811359304488259, 0.7882151846722641, 0.15745329975285205, + 0.4917722741095677, 0.3825232454894061, 0.6132926977635724, 0.5458283541194745, 0.4396764417361617, + 0.11253712372167302, 0.9944255015198569, 0.41332238437456326, 0.7642852306299943, 0.947011187251023, + 0.6584280707041268, 0.9705955239223931, 0.22056074585745167, 0.6412557129008742, 0.9183842985462575, + 0.15844810911134333, 0.4229941855249255, 0.46631574234026296, 0.00621939507882463, 0.1950385007214145, + 0.1880793891162995, 0.7713957974702469, 0.5179417894573418, 0.3953955785502524, 0.22396846650767221, + 0.9442464218954505, 0.9427456511753987, 0.478645460257597, 0.3247264555339575, 0.4226414397011886, + 0.44270703115635646, 0.8182457128694718, 0.8699212612034424, 0.9739326404506112, 0.9733543008593656, + 0.02726957728766155, 0.27630544580981553, 0.12430218271549875, 0.3075648700087451, 0.009175972206771466, + 0.8887338132608575, 0.4677720599765097, 0.7096377848472598, 0.25514781837772915, 0.7349367527895383, + 0.8802044770314496, 0.679564526975614, 0.9805983407951738, 0.5746533318307063, 0.3178481938972292, + 0.10285927134768647, 0.04034334305071574, 0.35185204285537897, 0.8338903697005867, 0.47337485215991837, + 0.9977985078200367, 0.8826380338875994, 0.6154761781434394, 0.9945128268846568, 0.5187355449323019, + 0.9954741538769635, 0.051867397643591606, 0.03258376368065685, 0.49141104221819687, 0.42255885556289785, + 0.6602948270135885, 0.6533839627526647, 0.3317974819002759, 0.4750694525995892, 0.8372386603443212, + 0.8733516669036471, 0.7769429519442802, 0.5131899136457916, 0.9186914493171061, 0.6129202656974769, + 0.4795842008468192, 0.06010532154027859, 0.294737816664397, 0.522742012020562, 0.7480182527514979, + 0.8490507175550476, 0.961317798903892, 0.0157807641133888, 0.4719517919129781, 0.2854952552587243, + 0.25206545379617573, 0.3908122153054925, 0.7897478546617399, 0.8687881204806356, 0.2600847389486435, + 0.08231672621371333, 0.8996158217421836, 0.9656769113553033, 0.38695984760128954, 0.5245019326552095, + 0.7868368475303291, 0.9883877958236108, 0.8216632094900955, 0.6873921203738307, 0.5253556552531752, + 0.8012894187738542, 0.6220750104373972, 0.888442717413622, 0.8208059827597131, 0.7756075576254196, + 0.11239428226956549, 0.8446328153518666, 0.9724238495588396, 0.9423008441603514, 0.9479083858491435, + 0.33292400110433795, 0.8522010300319922, 0.19718871739413535, 0.7243083475278599, 0.5392876190319033, + 0.18767082401381263, 0.6052012637119696, 0.5319901699274354, 0.4359310852600138, 0.44239410464979356, + 0.5237736419559433, 0.5992656100976662, 0.8657522574689676, 0.38786819776307235, 0.40328619698366674, + 0.44238976235002736, 0.7208756753497657, 0.34888394788279076, 0.22403409027925325, 0.8972358572047573, + 0.36707639307014706, 0.5424392141638237, 0.383032722416067, 0.3250525214093817, 0.9474584034891509, + 0.3100026229661803, 0.513923963010145, 0.8974220003154247, 0.6638441571382975, 0.8761301480692648, + 0.9453765278793322, 0.546624273026458, 0.85207838750921, 0.5143383921113984, 0.3623639302685099, + 0.13053324444676095, 0.9202768804193477, 0.07632342250334967, 0.6169800846527169, 0.5619003531541614, + 0.4036979011287797, 0.811538638841858, 0.9994953892711391, 0.1727508046090681, 0.957246866903339, + 0.29111048030765574, 0.5740540319046417, 0.17925051541673342, 0.29532833281514037, 0.5750678863635276, + 0.7699362351165019, 0.9466495088395406, 0.29480313505154754, 0.030239207752203567, 0.9962218408618101, + 0.10974153884708437, 0.09722243908500983, 0.5996698961307735, 0.5787808033733464, 0.5217204183697274, + 0.7165588917721792, 0.2711698301456228, 0.634951382971681, 0.8810319218994965, 0.5802832617413877, + 0.9135617876692592, 0.8885909702945289, 0.07029886827552778, 0.4319423085389362, 0.8898903853626756, + 0.5659419880903022, 0.17772700247017748, 0.02466331055729487, 0.40976022381169763, 0.966139556726745, + 0.8905666938310178, 0.12032617007435498, 0.5586063680569328, 0.8751461193267505, 0.8800836348534977, + 0.0029626166269742793, 0.5955595092378418, 0.4201823500205377, 0.05907783015629442, 0.9747299600526338, + 0.6883157984178091, 0.808682619123844, 0.6759239697737995, 0.5777679635398814, 0.9473648921085042, + 0.8563561916384634, 0.8237877387676724, 0.09383499480582291, 0.8821162255230444, 0.963722603581999, + 0.40533384837008046, 0.7435095283420515, 0.6815137163413589, 0.4035740011091691, 0.9680023044214799, + 0.9006052594187848, 0.7701609780096528, 0.8996958126037286, 0.6742632000806458, 0.07078770013586377, + 0.32783805750246053, 0.3252077641220803, 0.5818546949935752, 0.9986972588534645, 0.41135942899025113, + 0.21557447472923785, 0.1039414625139794, 0.3609153461000294, 0.40970329286922735, 0.5307699973344617, + 0.4564196140309865, 0.4903645772887063, 0.27219908745510735, 0.18851889819465462, 0.3458624205156834, + 0.12862376654059382, 0.7421487287602753, 0.9389952599404652, 0.2242673631578419, 0.8804225912045001, + 0.8713359890054578, 0.049583658631340954, 0.461333142676972, 0.8573883991970475, 0.8273497455134113, + 0.7492467688476916, 0.25678848040690394, 0.37873638846192015, 0.8471733611977219, 0.13202980589322322, + 0.5712988419943197, 0.09333652786390811, 0.49013826107448333, 0.13479719164736526, 0.16427210317156737, + 0.7458477143946026, 0.6717577793330666, 0.053230410263521266, 0.38449156344428725, 0.0471516166478535, + 0.8989913539553348, 0.4722058983651367, 0.07965997745763154, 0.8432035709964336, 0.8324782794950882, + 0.8947212471888726, 0.7469459200927738, 0.17419155384609342, 0.2651924617755289, 0.5589110879508555, + 0.3511385321424483, 0.7514510123953554, 0.7056225704396397, 0.15174566815098, 0.7981908675892514, + 0.9288252857248814, 0.45359975411736064, 0.3699779432576733, 0.7731353896761294, 0.41420774199620525, + 0.02228872104020785, 0.41465227704663743, 0.7653060061362247, 0.8908977169428762, 0.1011691828617195, + 0.6564807960586904, 0.5139719894186772, 0.33447179866740595, 0.8641131762583587, 0.3731368840868027, + 0.9540592332554206, 0.33284795709302306, 0.3190165049679413, 0.5162097883698685, 0.6872266906081227, + 0.2698316041639177, 0.4267271934768603, 0.9018147501048259, 0.25237951358360167, 0.9673474522520681, + 0.5548373652390023, 0.4626239459993291, 0.641197100041672, 0.7376476490149778, 0.4945822751566302, + 0.45143515414896973, 0.8310063694046905, 0.04538666171000372, 0.03245551600311003, 0.6021067420685309, + 0.22800942678663183, 0.13754835383980213, 0.870025367204846, 0.665096511332824, 0.5613220687909719, + 0.9574402818826881, 0.8563491686752919, 0.9922585649322798, 0.3922046770600376, 0.1833590489222756, + 0.8839103204428493, 0.6555569212336647, 0.15339324608249016, 0.7116679797736236, 0.3922617998854798, + 0.10236728003834406, 0.8265888580963386, 0.3224121774815898, 0.8287424340622042, 0.035070667149747825, + 0.3652575354719756, 0.9267043375497335, 0.8728869219430866, 0.08204271765010362, 0.1492255884234367, + 0.4170688515544877, 0.771956724388477, 0.044597731967288334, 0.7336991267302188, 0.9766499750753773, + 0.12231095605609588, 0.1910086842711467, 0.9484090631319674, 0.34385217603414464, 0.1344870415162165, + 0.507938149476136, 0.48208843113546196, 0.27813541875234515, 0.19791944435791675, 0.23786602820686686, + 0.6903300123835496, 0.5961894060958159, 0.23249911075257257, 0.8138919594830557, 0.3799499388086798, + 0.6819955322248713, 0.36928830941636503, 0.6468658337445161, 0.46488224020181057, 0.08214264728628962, + 0.8453510643507857, 0.9797117889268059, 0.01977220954934067, 0.9637136364467382, 0.23671703482565176, + 0.4056144585417145, 0.14699763121945975, 0.6937513264579159, 0.4207670579418772, 0.9067321563098454, + 0.8212233378681554, 0.1057519884112128, 0.34408552508050194, 0.9183997698385484, 0.22192533997190822, + 0.016071882683825534, 0.9627225687023101, 0.055352349180295524, 0.14405210688930326, 0.6101861016419559, + 0.8779056770958061, 0.15608329894262518, 0.07432223402143268, 0.4132289395991454, 0.024997189316685375, + 0.15524098270391595, 0.1950971749529864, 0.7309472381294072, 0.23927118600741037, 0.7946732252598024, + 0.35563131604668985, 0.12565766817806967, 0.33186510576034656, 0.1721530585673765, 0.9988001159627045, + 0.7385131268261163, 0.7286680269191083, 0.1791914178802213, 0.33960873273145564, 0.5483552760268539, + 0.6077696769232941, 0.7948503933753279, 0.8774733671820405, 0.18258012906058707, 0.007296422973581018, + 0.7585564189859887, 0.34358668093841116, 0.7721745828024551, 0.5197165392956121, 0.8769738105716012, + 0.4392752247295628, 0.18005492065212647, 0.8930837029323517, 0.9499829868999502, 0.430343617416845, + 0.9175364653410282, 0.20879072691529266, 0.29158269203287435, 0.5707799218976682, 0.9783287496639033, + 0.5311466209625976, 0.7488417315106284, 0.8938172203110816, 0.5706410692793182, 0.08074443608853843, + 0.5256864530162101, 0.4510169902196284, 0.16121811719738177, 0.8830670187416448, 0.6496919403781685, + 0.22093909544907653, 0.5559081870429834, 0.46044429349843363, 0.8842196578677224, 0.1465616718963445, + 0.0757257527786177, 0.7528756877946456, 0.003084178328123688, 0.4691845598153699, 0.31381539717235307, + 0.6058485324201227, 0.19582450893471326, 0.11144612491270367, 0.19374836607412904, 0.6845493929953306, + 0.11223495018555207, 0.8578995942564314, 0.7750005349528613, 0.07940070206811212, 0.7801027584700448, + 0.9981734790961896, 0.09401327085732325, 0.1121342349702219, 0.2600149014638874, 0.6569416484785999, + 0.05991623228278431, 0.4984423392458157, 0.8232624720939001, 0.5869729660297904, 0.33510763771240815, + 0.4371704788769085, 0.8478505827922405, 0.24183704849197063, 0.9668233953149054, 0.9152740638129625, + 0.19571904093210168, 0.9355766790785968, 0.5465778859516064, 0.7933604748121533, 0.511221775098869, + 0.49923463664953527, 0.8532207938684989, 0.4679108016527036, 0.5067772388798092, 0.2704048239326218, + 0.5869214255077925, 0.0862281846497085, 0.750090992133997, 0.658262704080168, 0.9665796680808748, + 0.8862702309297142, 0.20581820371274773, 0.3920280902337029, 0.11357980729982498, 0.5975259255376344, + 0.639606862462329, 0.8586265768729285, 0.32968624769085, 0.42531727941624065, 0.378062526232407, + 0.6959350290099302, 0.4910887453927587, 0.963798098897601, 0.6205177405438455, 0.8757740083812433, + 0.38749684501963966, 0.9729899977836354, 0.8609736645667648, 0.1680739600864617, 0.9717517000641311, + 0.7480057773178751, 0.6577866749209712, 0.3481223764093462, 0.20281366317848648, 0.34560093168169415, + 0.6732149101529429, 0.34713120650625906, 0.12761583765159645, 0.48130888230598246, 0.6464280402009749, + 0.7896399306253052, 0.4163593492107014, 0.678967054038523, 0.6415583703587278, 0.5807430092405199, + 0.3669995942862181, 0.48521749000567294, 0.8945022467786367, 0.42957355903273287, 0.42634731483149446, + 0.2615944739282523, 0.9347327032185544, 0.4032350825257963, 0.5499019535617281, 0.6428267726285616, + 0.2427520631004768, 0.3529368554982325, 0.4075451710656085, 0.8396811846580285, 0.7664336534771249, + 0.7454445290010497, 0.594683054416274, 0.8059675965895596, 0.1993961285932777, 0.30104246053171047, + 0.5501865630967797, 0.34956514814140416, 0.32513960801078645, 0.9926430128488082, 0.33594598212290083, + 0.6401433234691679, 0.1469284059864766, 0.9890691024614449, 0.8884602325724144, 0.8051917742497957, + 0.03199218208780408, 0.10976367302113321, 0.9832721028893566, 0.23144096553103255, 0.17061123148787627, + 0.974937800310758, 0.7588624495212919, 0.3141678075997135, 0.6558336608388925, 0.45514767882151164, + 0.8920901974702699, 0.8580822574896303, 0.7515705744441694, 0.577880521408325, 0.28879451299186887, + 0.5950573103468585, 0.11336145536164366, 0.7685226209615665, 0.010736198574742062, 0.10124431371778109, + 0.5245043587726054, 0.3042017040162207, 0.48314875152685965, 0.15088638952458933, 0.6257977366948, + 0.051275753149904735, 0.1121297542502595, 0.7698788603273385, 0.036763797151347144, 0.41758280073753395, + 0.6634871532877683, 0.6278566901741018, 0.2730580685204371, 0.13744393941299682, 0.35356918702679585, + 0.13890633400886465, 0.07202312001767852, 0.5818889522809639, 0.9463835044296236, 0.6500400786927657, + 0.9940516369812838, 0.7438357387487614, 0.4441967202737509, 0.3968813868165332, 0.8057406054378565, + 0.8994298297473973, 0.30088197885039514, 0.20417201544831365, 0.42211518431276895, 0.6181918795148956, + 0.6667015370824502, 0.8053486024168381, 0.1225799065903952, 0.3830785103106693, 0.5353443888120824, + 0.6418683150777982, 0.036328296713261876, 0.9573587267816196, 0.6570380920114276, 0.12440565194231623, + 0.8218695413672762, 0.2046314841929372, 0.35324722386436747, 0.22643922546973982, 0.6320703183436863, + 0.3954084243144338, 0.7100007282107038, 0.7434773142380879, 0.308003659447983, 0.439697264558092, + 0.3622433517310881, 0.7682182667316569, 0.627708127542772, 0.7457734070783953, 0.13412875089129972, + 0.9284364512273396, 0.7579730733820352, 0.6279467335848792, 0.2421788541481572, 0.36266260215119017, + 0.4359939263394089, 0.9972556643064111, 0.314750495280666, 0.9835998703689925, 0.8524702210945999, + 0.8995638750306256, 0.876303279788564, 0.8900418947723354, 0.22939202976517903, 0.239046411083068, + 0.5762408480674619, 0.7721650166811792, 0.033369057715806005, 0.6062104098965748, 0.671267338107112, + 0.32297628636036624, 0.04578604303342626, 0.8001583859890014, 0.2386630271598421, 0.15578714591530785, + 0.07609393858141678, 0.2740311659018093, 0.22642144023181932, 0.8122458206435015, 0.7731145777875027, + 0.9989769175044537, 0.9596549003598565, 0.2894399301850562, 0.15198542143494165, 0.8793630817948156, + 0.6221220329322654, 0.27324957326399, 0.9571097473497495, 0.9769270908981721, 0.7100878664458213, + 0.4016917196825689, 0.2358676922121582, 0.6302803983452228, 0.09865377641884965, 0.9978343851459455, + 0.07292616008073904, 0.17569764521968534, 0.6625558888671477, 0.1527051594775165, 0.33929059955171625, + 0.6149784479853465, 0.6825778290448736, 0.3340698714928595, 0.9054661714859945, 0.40171262334950775, + 0.7164633241797145, 0.5671398172166763, 0.07579511166480779, 0.09576260123463154, 0.6759715887068972, + 0.5398152288898069, 0.21143275695656438, 0.555423702169805, 0.4203525375788547, 0.9924149224407816, + 0.7675035167597798, 0.537685187349412, 0.46887914202389724, 0.9436490780055035, 0.48434514567299913, + 0.13823837017830531, 0.48573873204238827, 0.4155400582038591, 0.16193809353652022, 0.6794202239012281, + 0.6741790995135312, 0.014961515013560467, 0.17675059360026468, 0.36622747291705593, 0.656344259835644, + 0.1884220582281788, 0.9539616929703971, 0.2611164858601731, 0.7632167601719415, 0.1754475871355382, + 0.31012307192762434, 0.545071189705105, 0.7431348213005746, 0.00912587596133585, 0.7969964217778731, + 0.8577567986605854, 0.293937160110448, 0.09656496544489146, 0.32743209122525563, 0.577227645989316, + 0.6666396237088997, 0.09118951619802484, 0.5294388643160259, 0.017097645012574825, 0.12381517151884414, + 0.40979418450761473, 0.9812751084866524, 0.5414881341152286, 0.2284718083387247, 0.12502942625184044, + 0.20594108776593623, 0.631639294167408, 0.47129295452204933, 0.7808651025074285, 0.3699290701317358, + 0.45312672286877176, 0.7252476462933776, 0.35020160607689066, 0.9178902457292402, 0.09245287089353127, + 0.7139204990603845, 0.454728551120156, 0.7639518329554216, 0.13267916773134902, 0.2120953312712095, + 0.1686663528787331, 0.8153134329144104, 0.518917115536705, 0.8750293600438538, 0.6757945842350678, + 0.0721029214978608, 0.18744536801458878, 0.8670947957967745, 0.849282495062534, 0.4567301306014674, + 0.5238981780488828, 0.6821351017819414, 0.4018119843053126, 0.413248627828918, 0.7908025440454534, + 0.06436912288920948, 0.26848888006345284, 0.126094841221167, 0.23924382207850836, 0.39983250992339414, + 0.7667187930556872, 0.5364152244879188, 0.41098856778442516, 0.6186542712946893, 0.6048377008954706, + 0.8193512119844159, 0.4305204338004601, 0.7449751169034136, 0.6977153555547937, 0.6957575999209226, + 0.6217465273713485, 0.5380026935228748, 0.7474986794418835, 0.3930622251364809, 0.1432469446192154, + 0.573748001476606, 0.24721081216827134, 0.10781756851762991, 0.6406855656325827, 0.1839760813054665, + 0.3495815321242659, 0.6249594554243484, 0.009699545497271922, 0.38646108435950044, 0.8371165165897874, + 0.17596204498400014, 0.04229303459900191, 0.6303515881266069, 0.8938158886867839, 0.26298163432446064, + 0.9614574845148894, 0.2400691667597833, 0.7536093041951328, 0.01864816011617698, 0.13246167554170774, + 0.2018273294368511, 0.5361103375114744, 0.08692573029619277, 0.28481005373740376, 0.44435298715707083, + 0.6564184302936832, 0.9902826572306924, 0.7858413120004434, 0.8759230636975925, 0.8296063028246792, + 0.7455691493096455, 0.9277084414907217, 0.9154425984205277, 0.5313900533292686, 0.7370258140027841, + 0.6406814198532975, 0.8073872975561917, 0.023207568730379324, 0.6620058452491424, 0.5420101268337991, + 0.20289153185846165, 0.8280169782081197, 0.45530763619178605, 0.6132443775488258, 0.348820222742827, + 0.5174224351082402, 0.6785388089428329, 0.20780047027581727, 0.8930438375858712, 0.005617921107824553, + 0.5406260226685579, 0.23155812789946095, 0.6476761701116637, 0.3782899501219493, 0.1307966296554397, + 0.39091855023851974, 0.1271348470405097, 0.053790479334393404, 0.26006369382804695, 0.9120382286627429, + 0.17072886231286066, 0.31148355389917304, 0.10631712714709618, 0.888166481373541, 0.22411040382431047, + 0.34857984167947764, 0.5463171321848674, 0.16069308320763953, 0.4928769605204446, 0.3370625562457469, + 0.03184601454406577, 0.6662784355811753, 0.9199232679408939, 0.2812214301379906, 0.8742299845363793, + 0.8599917887197637, 0.04208730433509089, 0.08827753761005885, 0.410411604408502, 0.6386629826025146, + 0.15746971328424042, 0.9717958876534641, 0.31479949920723715, 0.873893772619868, 0.2595286121289888, + 0.5461431835754313, 0.6058791341647569, 0.5093850426700087, 0.13104274843830965, 0.9011284579053315, + 0.150470781259205, 0.26484515335018355, 0.40718331022995524, 0.48719313860545177, 0.09402611096888736, + 0.5076152442754007, 0.6864073255257639, 0.42474377657735574, 0.80840150705639, 0.4760823614461068, + 0.23672039478552742, 0.29160858048524885, 0.05878656285326378, 0.4950043921201229, 0.32648155215582086, + 0.6012357522377618, 0.4618708271641596, 0.9570950973825609, 0.4930252171429771, 0.751274964848956, + 0.07912869529247013, 0.2892724207368875, 0.8505732172593448, 0.046603710954399746, 0.9245550241642134, + 0.21958776843823102, 0.024194292791385963, 0.797363165895259, 0.4528316921428639, 0.2959211584765945, + 0.36632051349153216, 0.7929667163493694, 0.44521063267568084, 0.3280869010730112, 0.5871645220452025, + 0.7158326330683904, 0.7735598456854874, 0.9726968567339525, 0.04919603874684064, 0.5872325753706338, + 0.9876345088852388, 0.3806365155194553, 0.6193685031915793, 0.7420143070225116, 0.562202618245672, + 0.8023095274326023, 0.9967487643007776, 0.27898967652436246, 0.7664835509099146, 0.9686178594829911, + 0.5803647824645309, 0.5031926160684639, 0.00764514433026986, 0.45186742079696307, 0.9157670915387612, + 0.35321306732016455, 0.8964499998251938, 0.012325671564315188, 0.6035352980788345, 0.43399689747143766, + 0.4446055039261313, 0.5097121584670914, 0.26282867980560454, 0.2104064159448541, 0.38303704745723155, + 0.39676654757487173, 0.9742229688886102, 0.34993956600089837, 0.7176936382710163, 0.2979929201955822, + 0.4725155828147207, 0.28115419643965434, 0.4131877565470071, 0.4688113116014013, 0.5909361664042784, + 0.04562057132542663, 0.09872678455409933, 0.07185295134099112, 0.22012386060423395, 0.9753584541931773, + 0.9768690972175536, 0.27748788865199225, 0.042176465806694874, 0.3449947184616311, 0.7731477599561548, + 0.576041010988514, 0.6864502085998221, 0.10091815677278959, 0.7355476225335006, 0.686370260569422, + 0.8619340829976754, 0.7131704036314269, 0.18721377643411574, 0.7468306648222953, 0.10404537667470015, + 0.427741847515952, 0.14423635170876226, 0.32966528606588774, 0.14377660097161893, 0.31479036407726335, + 0.36068704724699974, 0.6704005066574157, 0.29192119641915015, 0.9499255033265293, 0.6014134099015941, + 0.5354755238359237, 0.4235178921015007, 0.6145050176360808, 0.22528745791283433, 0.9905350305147922, + 0.32984246825773755, 0.9593429312184408, 0.29059723832981676, 0.06224657484427909, 0.7977772973726529, + 0.29855962341757925, 0.2320284226087671, 0.9672702374808932, 0.5791196885058808, 0.38150112973074735, + 0.41361766102309305, 0.5566981436336532, 0.19496873228265454, 0.30629488210624856, 0.5910764087486028, + 0.2306523802949394, 0.6819352587948346, 0.9511523647359975, 0.10661809566538027, 0.6912229538931315, + 0.9738645328742929, 0.9313744835688442, 0.5928784586859759, 0.6414767825989253, 0.22059265816843376, + 0.04065047156083146, 0.5443291403218877, 0.9980170139190858, 0.6772510398983849, 0.0014607562350618108, + 0.9743592357328642, 0.7582699007077365, 0.2695979134294637, 0.5410605083881921, 0.5377612638885569, + 0.9543799601257471, 0.8834152173981732, 0.8558592032759562, 0.6062580307791405, 0.4163638704062511, + 0.7874310238758198, 0.2064766994863031, 0.21234013905236793, 0.5321646176337375, 0.3797017267342948, + 0.679871115170112, 0.3506712010307328, 0.8492326600455272, 0.1263665495056563, 0.2558785219735803, + 0.18186591832575494, 0.44618806495961305, 0.15025985375846052, 0.44451373444644493, 0.14244275517156624, + 0.3559221752992082, 0.20514420449026094, 0.520257318083047, 0.5193802956969984, 0.9124786225470994, + 0.9954799109273688, 0.4054744543314709, 0.7930200796044709, 0.9992531702062878, 0.3962081961552827, + 0.5257109401814164, 0.40822371093087884, 0.7662508446378883, 0.4003884229085751, 0.028659316569985882, + 0.036609769762717126, 0.21683967300735585, 0.27792991861947103, 0.4346122771506937, 0.7339820616943736, + 0.7615273095795183, 0.6021771843103301, 0.5997816710629622, 0.028431638908734014, 0.9922597856583588, + 0.9528681627023161, 0.37894423281499257, 0.4493634272067669, 0.3019759058567133, 0.10641775735741177, + 0.8596880742042701, 0.6298303242014589, 0.16138159744750147, 0.6829116332585113, 0.03678713836250014, + 0.4590853646362283, 0.1376755638156525, 0.9234659885666138, 0.6776490697581575, 0.06293281774793091, + 0.593402981266393, 0.2400406445818748, 0.46611420429879713, 0.7855880046516998, 0.3170049821664035, + 0.16063248013696507, 0.9333640266540668, 0.5968269344699825, 0.8189250761503922, 0.4372751643058581, + 0.3343576114109177, 0.5166046188614271, 0.13521389331096978, 0.7319332576456725, 0.9582470381904129, + 0.3955084414156508, 0.6732825379248611, 0.8315467114668069, 0.5755872322168831, 0.9085865974550534, + 0.7274610310156122, 0.23637235206387242, 0.35647914417762827, 0.542346588817079, 0.10296370875360383, + 0.26231553103843963, 0.8132512797619142, 0.03160394623479401, 0.9362819528459664, 0.49694180530062304, + 0.6652745217638399, 0.4792290544190678, 0.28144361258422446, 0.2502793215029042, 0.6509973306543374, + 0.8623686485016602, 0.3915995572390577, 0.2573210607901675, 0.15878174849393378, 0.574702660971639, + 0.3247561325091265, 0.4758444799476046, 0.8246202565371312, 0.0024767798928380103, 0.6725517274146398, + 0.33382903536261166, 0.6123887285582649, 0.7551895232268804, 0.0027077614617521117, 0.5518928218581024, + 0.7731069195152767, 0.35762694888935787, 0.261957331822137, 0.1736234087834878, 0.5393796473228052, + 0.26265536728871053, 0.8044681505932153, 0.3742238333521457, 0.09074157735870425, 0.7304208834979972, + 0.2492251010356593, 0.19750323759722288, 0.4406251789902105, 0.3370383139382622, 0.8041402568981643, + 0.4993766685205866, 0.7293176712149692, 0.6603327243779286, 0.5745019381499755, 0.35187136846045264, + 0.9059499024551373, 0.8407881345611861, 0.8826890000928408, 0.14662158982266627, 0.7417154005143491, + 0.7059785417083816, 0.6601787602032277, 0.9231778999317669, 0.9930782010538035, 0.16950541890782578, + 0.29953669622579693, 0.9896343653184838, 0.24553185853314052, 0.2516361921497805, 0.9484126968310068, + 0.5500268598092636, 0.3350933898253611, 0.43885011041681843, 0.1765038631934982, 0.13418004941384787, + 0.5344879011310795, 0.06264352678520546, 0.17962188510178545, 0.2881220300857651, 0.5321174661068577, + 0.8388562063836482, 0.45018950039403227, 0.7071784147575183, 0.3857731158961153, 0.65299765354335, + 0.5353092818650595, 0.9564058826045736, 0.835846495517248, 0.8351221975443202, 0.34950875318430275, + 0.7944419536149493, 0.03103015289982447, 0.663819901886251, 0.5440610368284272, 0.9077205978060766, + 0.0338599876818938, 0.595317397305231, 0.6611606652562612, 0.7427273415715485, 0.3584586164057667, + 0.6958058152240291, 0.46625913163679544, 0.4694506286495609, 0.16442715310662093, 0.23063444664650545, + 0.726253607959635, 0.1157488629683695, 0.14737706619680302, 0.5460001207003308, 0.04359929903286086, + 0.3291915021643472, 0.9622276310991649, 0.08334607868973498, 0.2700362200516937, 0.016035811126763, + 0.4821196729628865, 0.6980546839225151, 0.7115154385462098, 0.21212410319962938, 0.6082470757397771, + 0.5173747933283613, 0.5696867318583311, 0.781265456010705, 0.6748163507463255, 0.48137101384806746, + 0.9932768562941524, 0.5277212945728229, 0.2205001163545992, 0.7207701056091972, 0.7371879806248237, + 0.6541796200153409, 0.33919996156368915, 0.05473421034406678, 0.1752278601889602, 0.6368814215972155, + 0.000164534861047394, 0.20470292892251996, 0.5238060569581895, 0.5998454094558137, 0.7421671202903873, + 0.6436339012544571, 0.0670996495310957, 0.9558734897603106, 0.2671709633095254, 0.11798569575523787, + 0.15589826132535167, 0.18367828580997114, 0.8394235300059207, 0.8847391883728367, 0.21160386512972973, + 0.7871679264663828, 0.9435575242809139, 0.7466707975584421, 0.6723027744255152, 0.8149792653912143, + 0.08396917536564585, 0.5787572859484466, 0.6304320067885459, 0.34289182715778943, 0.7748490420116133, + 0.9715179003803728, 0.5867344550617464, 0.02759887320657839, 0.07143391918306774, 0.7282989120635159, + 0.26077007616648473, 0.22188941437196985, 0.29649223403351266, 0.1768241902484493, 0.9884976357478628, + 0.7589120725428872, 0.35921758359392886, 0.7573248201999135, 0.6966976938875553, 0.873833867499568, + 0.9073807882699497, 0.4370284414089439, 0.23416893363822322, 0.8862077713571181, 0.3016046915063275, + 0.05154589310311697, 0.26052790755147626, 0.23821867021640264, 0.3840713652030233, 0.6345272299789647, + 0.47345121553514435, 0.6298412121911315, 0.900720575844496, 0.011912092687478393, 0.5336644761033456, + 0.6053721162111024, 0.45059008597699746, 0.1807973828205287, 0.13735799478263178, 0.9331770402541002, + 0.0970637457215261, 0.975444936989247, 0.6180427614038362, 0.18122171980619062, 0.20862723075994383, + 0.027525887162295315, 0.695265747241561, 0.9030199148449366, 0.09461970007579035, 0.6201823347023111, + 0.9933531360254236, 0.048788337003780824, 0.5231986147949365, 0.9333238141138935, 0.5647530988949201, + 0.11861556036163445, 0.37041975948475947, 0.2187790431541211, 0.261254768861501, 0.7516787812302785, + 0.9779734594463506, 0.4809702583897232, 0.3873988667206474, 0.8506255538967493, 0.7013074099672814, + 0.231629765157495, 0.4555976836579041, 0.040475326908593345, 0.3102456451288894, 0.638188398305703, + 0.8421366296460351, 0.12173473559784498, 0.8983418220037368, 0.9847539005944105, 0.35931904374593415, + 0.021911562364628212, 0.09737285522798322, 0.43807843530802404, 0.17061107408980647, 0.5867379487483775, + 0.4754343141952393, 0.37067124685282593, 0.12404426147727043, 0.8807548223437347, 0.47399156546908194, + 0.08833440184390029, 0.3153827426581478, 0.860474219603467, 0.66355408036683, 0.1593342611284404, + 0.4368471111461758, 0.6908125639590997, 0.003263982697885748, 0.6256196072631419, 0.1140600581933161, + 0.22762281546787988, 0.4103765789506256, 0.8513329942584479, 0.5604797251636211, 0.9236629938531732, + 0.34224444338583404, 0.5355888331588118, 0.6074226753924673, 0.7354364351652688, 0.8785300123973846, + 0.24110190924457642, 0.09336833738023242, 0.1501650200879202, 0.5121120134513772, 0.1565172192657961, + 0.05983591914896336, 0.04330228211170717, 0.6922677194042267, 0.12698107662785552, 0.6533789404881825, + 0.10908195897233752, 0.8459250046807176, 0.27304686771977627, 0.8596066818181741, 0.7959159479223603, + 0.39534113625649026, 0.6906930408986818, 0.7910867888794373, 0.17681976795940946, 0.6603832025665075, + 0.597343671869012, 0.2685112315326177, 0.7841639500085923, 0.18113056501032787, 0.9403421767616225, + 0.7906408165548021, 0.8356582782867027, 0.499799237651813, 0.24891013330857337, 0.07992982472178756, + 0.24723358723169098, 0.1692755837629465, 0.9846921754986828, 0.8962070864550179, 0.5607092208561264, + 0.27045817075177425, 0.97892692852942, 0.5483165442527448, 0.8990598462078819, 0.30412605513324664, + 0.12520712314657734, 0.29948758411808296, 0.1623872463463406, 0.7346370513184121, 0.6775133211386144, + 0.8016351672800698, 0.6220852542712305, 0.3791245796826538, 0.891670585115688, 0.7971245569704272, + 0.44287051237394937, 0.4860821415211599, 0.8050622278219007, 0.02127158943064844, 0.3438799531113126, + 0.8736066120150979, 0.35649840148156087, 0.8407850164313323, 0.13151854613894598, 0.3640358017665629, + 0.010961027000119916, 0.024259513411488687, 0.060631903018435906, 0.3684750380601215, 0.7992243079493305, + 0.8891398348201501, 0.9637814955920515, 0.46956371532470953, 0.41240675581615427, 0.654629505423387, + 0.26594137235106974, 0.37026334207581413, 0.7427239693241591, 0.6177483599538046, 0.9868086554879645, + 0.05115984888131431, 0.8942355877536444, 0.33330388678435385, 0.33976931127197374, 0.5771828536944154, + 0.7792502126192205, 0.5672366749313292, 0.25879183026854935, 0.19647791864051434, 0.363378905450481, + 0.9658948576059825, 0.07062378885477394, 0.2200002567895657, 0.9802248421139772, 0.9959655436889666, + 0.5331906043627915, 0.06417078323196634, 0.45894745199736486, 0.5125938554689378, 0.6229310413798937, + 0.4955063400259977, 0.40731510830218043, 0.6314055344124873, 0.3451234524118294, 0.5545160442833293, + 0.5029223668930879, 0.6531886111801544, 0.80863309768153, 0.8518687589766132, 0.06029578912110811, + 0.1235051413745456, 0.39316193762286034, 0.3183233677105958, 0.9343905393022421, 0.12842357138248828, + 0.903019577022815, 0.21893149738952233, 0.6635087904838025, 0.6109464924015234, 0.39547745072376894, + 0.5791385410621663, 0.31951216772681434, 0.483061091846241, 0.492272151464373, 0.13953103728514227, + 0.7725895446104453, 0.6696706423122938, 0.6569464994727782, 0.08972551091130632, 0.5861749878003335, + 0.3823996506188203, 0.7248420238565785, 0.17167317420633488, 0.9326644345799097, 0.9654867491465188, + 0.4443178133761624, 0.8504738579125849, 0.8939248278003772, 0.6198086445408558, 0.10731000340355334, + 0.21814460152768378, 0.18881769981087304, 0.8575217731684741, 0.9917166533547094, 0.08726633991704946, + 0.12996733679253603, 0.16864913810143478, 0.016249785138856665, 0.2171449320788671, 0.1950359811511242, + 0.8712294983819957, 0.534286039933084, 0.4552377705924677, 0.32887788681026, 0.9170206557785323, + 0.3255911164531847, 0.41547291975106804, 0.7727237385452939, 0.27723624069934905, 0.5977344231366636, + 0.4132660887675532, 0.5899134646673063, 0.09979297194337933, 0.9235285343485493, 0.27896355306718956, + 0.3348915748551431, 0.6607320029978295, 0.0007268986007156197, 0.7790600526117349, 0.27112239247136705, + 0.3086242004918396, 0.9751873415114282, 0.6297082439981169, 0.4120377709130624, 0.9577843033845876, + 0.7489479020722961, 0.12599043683129285, 0.8017117398059486, 0.9046364062009363, 0.300127821566353, + 0.14579814195732677, 0.44702736267506293, 0.7574060306303136, 0.7580462709337179, 0.3010163551532452, + 0.6845172321180952, 0.24934497733929906, 0.3601660744118099, 0.566502247106477, 0.6197171841012978, + 0.14174805576327942, 0.07014194111281069, 0.36282036045203625, 0.44373066112677617, 0.7228517329807524, + 0.5007797075327757, 0.8493606828333236, 0.8911258750838316, 0.8764878012548211, 0.6849806914119086, + 0.2452788750277758, 0.927251356798559, 0.865499919783512, 0.4950344737079855, 0.8131084238398228, + 0.38033978891220743, 0.29800484041688424, 0.8822942518531542, 0.2509808109819922, 0.6285476152245377, + 0.7685749999969874, 0.7637273837304658, 0.0497102556242095, 0.6888539522471323, 0.17258768287486925, + 0.19626533926024559, 0.5339925704970866, 0.2788962693557665, 0.13339155243432343, 0.6039324053500893, + 0.6246298542716482, 0.21248559119648736, 0.15410678425764945, 0.3622848859039649, 0.7121634187898556, + 0.9072766937825038, 0.1588639640657653, 0.2775382059821282, 0.7774055771796783, 0.3489623833616228, + 0.6941675225005275, 0.9633145417809131, 0.5139496066514742, 0.4110075835426359, 0.405888936323456, + 0.944578783619491, 0.9015990859985076, 0.5879574580957551, 0.9883671375462867, 0.9480868768512484, + 0.9890683503368907, 0.3357198602996515, 0.9099871039457682, 0.6839443030276484, 0.5889322436149684, + 0.1417634070574495, 0.7198942633220495, 0.7765384061746137, 0.6417377064056851, 0.3700889428504547, + 0.06122400996978217, 0.02785474448600833, 0.41649016463506894, 0.8110156997513283, 0.24529061544518282, + 0.8052814876414277, 0.46532273293598725, 0.1268744678799768, 0.043996373019142165, 0.894278089343396, + 0.7470134525329287, 0.5068762509464182, 0.45586050155634894, 0.42959315670918397, 0.727065287238925, + 0.07551395330279231, 0.5557708052804753, 0.43800255863043935, 0.07208951817176656, 0.7641237600433918, + 0.7110384695941595, 0.0009200091672624255, 0.8378225473631175, 0.07117254517669624, 0.14657777979090902, + 0.7470113425826548, 0.5211763824702424, 0.9540763819401126, 0.4122274568834915, 0.8990193112029147, + 0.9409287525863534, 0.17490531160119316, 0.16906271815680163, 0.07904856182423159, 0.3369476959186588, + 0.18257076978929387, 0.710828634080661, 0.07825063326064463, 0.5719875784416119, 0.7271250977358852, + 0.3177325142500902, 0.8675628349173496, 0.05875921853930155, 0.6150928033017015, 0.3511732172866613, + 0.7388856213534555, 0.4376091395515205, 0.08033390077884672, 0.14759984200256093, 0.7808558526217645, + 0.3395259099860961, 0.9054644266803126, 0.7187267383818935, 0.3065191086208827, 0.5944962102464852, + 0.0896119023278863, 0.2059749540727559, 0.5050424716492614, 0.20591371743942044, 0.6604525526891044, + 0.02987565456325536, 0.02855321140679945, 0.9620506763172254, 0.5102357288822965, 0.2569583542974698, + 0.1238064647738526, 0.946518284196775, 0.17423005364125466, 0.44119632832759614, 0.6350454365259237, + 0.94440751923726, 0.8959139854417814, 0.776968771149434, 0.41740130201464565, 0.7429349258289848, + 0.4725000801415684, 0.1210317850371001, 0.9451404892928696, 0.5073284931437386, 0.04772666903015921, + 0.07648629212083402, 0.5985426498583516, 0.7298008310007555, 0.8179512269860251, 0.08062090595086768, + 0.9137158639694065, 0.47043025898856794, 0.37618957576147793, 0.2127189003486457, 0.024496577714573586, + 0.33632746353372833, 0.32194485983105214, 0.02164567763833425, 0.3298107886630791, 0.6728338293514975, + 0.4536554242947598, 0.18663149621345776, 0.03149768492434768, 0.38222887341125134, 0.9103263698932323, + 0.9027418292698526, 0.7916556062564507, 0.49793518737507725, 0.11843117221756805, 0.7696893045286372, + 0.7624791773350115, 0.45306608434565365, 0.31567932516837227, 0.943234850980276, 0.43218969468566937, + 0.3079647519980262, 0.4026272377371113, 0.7852252113479263, 0.502430057215938, 0.8641450883728528, + 0.6760673929447647, 0.9345394974971337, 0.2752681619022208, 0.29768815820418637, 0.3428632219823373, + 0.6873572562937668, 0.3881560685957063, 0.17949527066878967, 0.6199701330612319, 0.1107181089336623, + 0.12649873119156152, 0.8637332297698778, 0.5971146502479803, 0.786863539131931, 0.9802417458748952, + 0.7561873542725256, 0.5091059600388003, 0.3225431747324218, 0.7067758856994609, 0.7205296241406043, + 0.4031590476800556, 0.04624977552084841, 0.7822016961039511, 0.1575796924171936, 0.9503813536250644, + 0.9398788283717338, 0.015840572108081097, 0.7197987215938938, 0.07206012244231708, 0.34207513405590584, + 0.9199978584243822, 0.9338146077783052, 0.011071645951356013, 0.567428915304961, 0.25029238632650186, + 0.40054663926007217, 0.36058872696147204, 0.3024465142441287, 0.08212159834448995, 0.6155249376345805, + 0.6301024528508102, 0.474548798776512, 0.3531810626781928, 0.05366768618472484, 0.7551061735631569, + 0.3195279939002994, 0.09607581302382451, 0.42358137933910056, 0.5967211795150688, 0.46664389244277993, + 0.8836223147005381, 0.3300353787445781, 0.4643866240014072, 0.899053553348829, 0.11186849603436566, + 0.22603496971075754, 0.7239776333134695, 0.7134784474402105, 0.94591130166259, 0.45657257486841296, + 0.16081403892106438, 0.10097643820377178, 0.8777089494075411, 0.9128937994748378, 0.5725730166380295, + 0.31600580567737446, 0.8302142799050533, 0.7990549208535302, 0.1318895517792641, 0.5939468853519743, + 0.0817973969846979, 0.25156973554043116, 0.25338401962260904, 0.5531487664049857, 0.0892594631034409, + 0.4019714793628233, 0.9965551327953546, 0.6262942611583128, 0.36988263378306185, 0.23436920293275532, + 0.13675540941440623, 0.36838321852597233, 0.4107320740414664, 0.6450559163393974, 0.9134854217228497, + 0.9323584077551262, 0.8305192965778545, 0.9200183752656503, 0.016818545936913787, 0.4116316428681339, + 0.9597946597426368, 0.2141465867102682, 0.33632140703471747, 0.46712296805080367, 0.8264304797926321, + 0.9415527801157786, 0.584980428587302, 0.014191695919459235, 0.2323852198302795, 0.18373902712846812, + 0.837331889710949, 0.1342083680048588, 0.5139781386250649, 0.3827967117242834, 0.962319034121119, + 0.807608959635635, 0.8009857504280683, 0.541286455040231, 0.11647817542008043, 0.9362022531415416, + 0.9151175013096735, 0.6177026683307458, 0.8826871248596089, 0.039485833136840176, 0.959103181681906, + 0.6443905754082856, 0.8305942204288111, 0.43459168075936105, 0.7868366644111666, 0.9018063340069323, + 0.12175136345256687, 0.9041836779973385, 0.9055577919565967, 0.2796205129365854, 0.5905099554167602, + 0.38924790876033455, 0.9099602507828936, 0.6259199182201985, 0.7570399958640042, 0.06287297702815053, + 0.48034370073467747, 0.9932140140262243, 0.9118380571158589, 0.2956318045568225, 0.055626985098968906, + 0.8111818470196777, 0.7271008256278614, 0.1816589392800083, 0.9863757504785956, 0.08009181918975739, + 0.4693202950802744, 0.6598326337418338, 0.5826409837508134, 0.5016008140296482, 0.38160366861251893, + 0.790376777313576, 0.5426348317590287, 0.697593879920313, 0.2549949211587845, 0.021983493509470264, + 0.1987915128764175, 0.7928763181187088, 0.7418311423405076, 0.6185047950075253, 0.5803937565006845, + 0.6311516923718504, 0.28937847627006597, 0.5028515083451491, 0.3105371511093644, 0.9283482134289395, + 0.8190162002594696, 0.7601255241684549, 0.7433335599752808, 0.7131261575650724, 0.6825729582791454, + 0.674022368679578, 0.2956260756989032, 0.013523555616418714, 0.4487765023838284, 0.8764700563766421, + 0.0476484032810216, 0.3877965225294895, 0.797357691849296, 0.4607721840924206, 0.8763906724879901, + 0.31803431422946005, 0.5219680726522239, 0.8712603368722328, 0.43425223817457936, 0.7372362394726145, + 0.7245741166259649, 0.6484094754404262, 0.5604891200923958, 0.43840441104787087, 0.8945907690567821, + 0.12664530740465962, 0.5024775223316081, 0.787532280726247, 0.3015331581960675, 0.7853396143984568, + 0.8238415522033353, 0.07191737967438394, 0.25137728217148847, 0.02962017379060622, 0.5217246710742118, + 0.8192996661908495, 0.26229409259488545, 0.8391310166185668, 0.6198069204259402, 0.254493711475265, + 0.3106870679813668, 0.32408511872819634, 0.7666764157872629, 0.11059051813128462, 0.3629983726846838, + 0.6269294501961069, 0.5888652810322299, 0.6519169977675151, 0.5860195814930064, 0.18888076989792213, + 0.5201099289766054, 0.7437111526306779, 0.4171106131718373, 0.6445816128410039, 0.5188602841702555, + 0.08218441353235384, 0.17890783732894966, 0.8375039674227638, 0.8919148231441918, 0.03749457607925066, + 0.53272574072743, 0.9024604948001891, 0.8439689357356778, 0.6372935991352188, 0.47043256415874224, + 0.8369882601581162, 0.9409002150811886, 0.6655243545469586, 0.9475035208800926, 0.1447812284363158, + 0.27839516124991337, 0.9231280004373151, 0.3340682869112773, 0.6707977309743394, 0.14132411999646566, + 0.28306903485535917, 0.29127355162015434, 0.49463601230067555, 0.39565204679363264, 0.19792388051549636, + 0.7407466175440733, 0.19407239246755792, 0.7170882997152261, 0.1692746198648225, 0.6430549416784092, + 0.8777906029435121, 0.07768310383675614, 0.7947788353493423, 0.4317769190956203, 0.30099462912190866, + 0.3258694035022829, 0.9733937497512101, 0.6861918598418274, 0.9692306257117655, 0.6230204922381581, + 0.1980643611303149, 0.8728821095697096, 0.6804596183680885, 0.427220969858229, 0.979960096265418, + 0.047622626900064136, 0.5457543431365761, 0.6382308280225599, 0.010936575444467467, 0.37669601059079194, + 0.7026205031898519, 0.4596162094167482, 0.7042418430043583, 0.5268878833167199, 0.6429701528789875, + 0.04120871299867901, 0.48349969655007063, 0.307542441011294, 0.16196316143368072, 0.029125940310131604, + 0.3278435636252649, 0.7133096555109707, 0.9775043314280928, 0.42170770669129765, 0.2111873196192524, + 0.5730795999651903, 0.5515796606164156, 0.7128707043442232, 0.4441508969089326, 0.748950117702922, + 0.16234761722353475, 0.3045181072487121, 0.3222809731713697, 0.900140237602878, 0.1606513515278285, + 0.37808015088265445, 0.684749637902132, 0.4916870950040342, 0.2678966664417787, 0.9277607818839387, + 0.4672604447984776, 0.6737040560793205, 0.42121178091069, 0.8763990170945234, 0.3058429580053279, + 0.4793061221594075, 0.11704609552430245, 0.5762816965807208, 0.46084564841796605, 0.9011714666852197, + 0.44413510868238426, 0.574644995538095, 0.9505953331433469, 0.12484446516456749, 0.6882052096054266, + 0.5399003983850295, 0.7774403297375743, 0.2302057369739422, 0.927238430398323, 0.8866575100660742, + 0.3260481261527052, 0.5257155704547867, 0.8520145924738496, 0.019969326620207783, 0.41670339685703073, + 0.3098135229714267, 0.7447872776192568, 0.6614500129363188, 0.426784327857325, 0.4013327602563459, + 0.3156558568570107, 0.9523155071222569, 0.19528591858809097, 0.02946960264933851, 0.6639994155871508, + 0.8744959927312456, 0.01289192548222362, 0.18242806678150358, 0.4666935066409813, 0.4202317548486756, + 0.8793030179641731, 0.497929209315464, 0.2514454163728843, 0.03246644146071498, 0.05883311151911752, + 0.4098408684770811, 0.7489468654728467, 0.08229113599397897, 0.7530995091630748, 0.5044629955092683, + 0.14489629123188397, 0.853382939077274, 0.9317921835308065, 0.555711725100172, 0.17037609108575047, + 0.9077033646641116, 0.3048707373335664, 0.618849805951286, 0.7017986142414105, 0.5268172747845213, + 0.9520801192537708, 0.49112590025669867, 0.20153281170765258, 0.7339595103711778, 0.2782399383077956, + 0.734782434763255, 0.8306116464110791, 0.485828769400106, 0.40685351370098966, 0.7434784791000327, + 0.3513847830683331, 0.13433587170845185, 0.39640867900033416, 0.8260967304171667, 0.8315256975602183, + 0.12786937218928796, 0.624191972477752, 0.8205741291859162, 0.10502839382859208, 0.5181186938865909, + 0.4102430705511876, 0.2575579342406279, 0.970954363863672, 0.839806250816905, 0.671955949035962, + 0.13431375872948292, 0.3786807488775139, 0.569124211044498, 0.9657463286859697, 0.7445101317133044, + 0.8858748287584243, 0.08691404253826274, 0.8911947862124853, 0.3118817932629737, 0.806333878982076, + 0.9056541109575096, 0.4341138565978081, 0.9222304579765279, 0.6124487967585517, 0.9918653076416558, + 0.8864753374707028, 0.26959115008462997, 0.2180597482559874, 0.81799763414866, 0.07454625455008745, + 0.1442506742224563, 0.6895290651859735, 0.9313296043479037, 0.8095014961360036, 0.7137675913289352, + 0.6870755218591845, 0.7928465265572342, 0.5485944061267944, 0.8272180148834627, 0.42441851490571403, + 0.5615943220625608, 0.9067582004487369, 0.9652589486160503, 0.7797793475959743, 0.777203020857793, + 0.8819122326564898, 0.2113031240763743, 0.9665122126277528, 0.4432714865589692, 0.03728554665895367, + 0.05130889547789719, 0.0610893714097005, 0.7015135298085294, 0.4432930399532825, 0.003367956927173732, + 0.258326945266001, 0.5923724550575528, 0.422634483748746, 0.07614823847734775, 0.6470169433721922, + 0.46350630017658667, 0.8555476109143667, 0.4598645636473221, 0.8129643008737778, 0.028534214701289806, + 0.40622994875853613, 0.8516620871733309, 0.7842835452733545, 0.18546703677910936, 0.8256150246709529, + 0.26895226377879944, 0.31350647270757537, 0.04091342668743836, 0.8771107657248962, 0.594814557350571, + 0.49996519376830717, 0.0021080542474207764, 0.7969671115226824, 0.06050920808752114, 0.5580046059526343, + 0.1542984485530986, 0.04854468263923506, 0.950700299398459, 0.18163358649785932, 0.7524681896441217, + 0.035688612985960266, 0.8979132576047186, 0.5557682276163827, 0.3741191768012868, 0.3252100702505052, + 0.8731747355771637, 0.1421207317433164, 0.5307701726892107, 0.3993092943058364, 0.1734656252349407, + 0.1766779245556892, 0.5387451869200437, 0.708741541127955, 0.1910549620195987, 0.017540874600352363, + 0.5562563221035275, 0.24619102991105324, 0.9950030543314351, 0.9239199878555392, 0.9509391888329551, + 0.43371607935957757, 0.8042401219281345, 0.06590487298099312, 0.40082671558030614, 0.9720391735910291, + 0.1639179624678071, 0.07084751698944058, 0.25561585624339833, 0.5187545953694392, 0.2043955571304693, + 0.25823959884300207, 0.26323703471860227, 0.33695530998898493, 0.4492876180568678, 0.9059783705709346, + 0.5074031767896926, 0.23949784512107586, 0.4090861039604716, 0.8027707198747399, 0.5617987006053388, + 0.7187226196220429, 0.6254985417228794, 0.44788264172372383, 0.574778873547074, 0.14590000076230625, + 0.607028968244777, 0.9314930397649978, 0.8702997215329452, 0.10084431602237554, 0.6529307993856848, + 0.9090801437399376, 0.2605564372445186, 0.028612331801815993, 0.13196706728579255, 0.02249972770865183, + 0.7755843942616588, 0.9960128168685074, 0.297409848058106, 0.45350404193205596, 0.9690431980322582, + 0.3669961656632227, 0.4373185858815869, 0.23424249313713097, 0.977475328346117, 0.09858678102585494, + 0.5426556964129738, 0.9628235963714177, 0.09316255431007736, 0.5033521116645547, 0.5170391469592438, + 0.42784162421906624, 0.4819691503234925, 0.9368018891260819, 0.833066447260375, 0.8813310463075268, + 0.15230153188408768, 0.21602383479606846, 0.42842407224737655, 0.5948840854878977, 0.8810269861584001, + 0.14002321561255215, 0.1071330426625392, 0.5289024134660352, 0.9993870826771801, 0.03670093058785795, + 0.9093036470374543, 0.033514863634174996, 0.3589096735314393, 0.968736500563332, 0.9287983530571882, + 0.7850029007556029, 0.8149602638141056, 0.13816081706664618, 0.21379952689204018, 0.8828361362660744, + 0.13472502021877197, 0.6329896576402828, 0.44786558864086445, 0.15789743374420173, 0.1709448380594758, + 0.19252572219692232, 0.18036270552792533, 0.11289880306326316, 0.021881784340196164, 0.7343459481719553, + 0.25465576439235804, 0.06474230589395158, 0.8301055699827586, 0.8150946346767723, 0.06496108861311467, + 0.4328476240888399, 0.8410388625230115, 0.33272714568608797, 0.06927435854485209, 0.5244114371406022, + 0.2386755729321265, 0.25437121516465033, 0.08765399997103263, 0.8571227794644961, 0.09269009505777925, + 0.29636262100159927, 0.12040022098514491, 0.9225054791443137, 0.5374934783524302, 0.9565172095539238, + 0.325581240363358, 0.9217879677685186, 0.04683064160348471, 0.22664723777562046, 0.051417608226309475, + 0.4299370825127503, 0.8988396073345346, 0.331828173555746, 0.4477802057415682, 0.0581538942759503, + 0.32732100922546326, 0.7600527093822825, 0.9941005491373017, 0.9382175644638661, 0.23454703097933838, + 0.8360686771555237, 0.4486011954525093, 0.2396109118703933, 0.4148259984511603, 0.13703382153647203, + 0.014768682305215775, 0.670754377369674, 0.9293271083699988, 0.30460680464896894, 0.3342485620198905, + 0.17038593322334694, 0.5397136324606268, 0.5235215265040551, 0.8559090413529336, 0.6751387265314722, + 0.9282913145034546, 0.8698949436935833, 0.5388192118347033, 0.4430773099812555, 0.21568728303108498, + 0.28014007544696073, 0.2147908428579447, 0.9397282674766603, 0.6986668768727101, 0.8519547074922509, + 0.8900227236978127, 0.5724301560051253, 0.22726472011548227, 0.8731278729063339, 0.6278161252233143, + 0.9906507345625638, 0.07421559133882827, 0.21954056799496124, 0.785302457185616, 0.8034737287747157, + 0.7143378383439901, 0.6410500667624983, 0.7873727703869978, 0.922896477893936, 0.621337010508387, + 0.8449748767018475, 0.03732651517241348, 0.30042935417259764, 0.9867906045964168, 0.20977519205133544, + 0.14950926130842845, 0.6433156024933611, 0.42388941776413813, 0.8407461750844862, 0.28245270264822564, + 0.7574826622334812, 0.34873932176565925, 0.03184765464322359, 0.8572631297299002, 0.3027267705853969, + 0.31705732907093465, 0.02054386896588034, 0.8538437196504572, 0.7683931395074164, 0.4928300458363979, + 0.13607794370091286, 0.8194258357427311, 0.7120256785725886, 0.21195043574798933, 0.9038086329201231, + 0.5923664788186207, 0.36072360134366377, 0.7497512182510989, 0.13611279636760565, 0.953434543863607, + 0.4698316695035778, 0.702907142318454, 0.018111817465786384, 0.5186143747786239, 0.1262756415570917, + 0.3106288762655872, 0.5909147810087665, 0.08583321246739573, 0.13213304451745556, 0.8579793159249135, + 0.4165854014765501, 0.2615606182462341, 0.16362377391955907, 0.2460391785032675, 0.2597621109600533, + 0.9273505810782228, 0.9017206455951964, 0.8408332366645039, 0.28428530243342887, 0.9068827011493362, + 0.8492585928070444, 0.362379647288799, 0.6131228749686409, 0.6560283056307786, 0.4972894478861094, + 0.47882233490179127, 0.8549647720950472, 0.6236830213567182, 0.9050253476055222, 0.46395581980774425, + 0.05228188387560795, 0.2740075559813191, 0.76312833986336, 0.998201109572808, 0.5279030033611596, + 0.28606412691046723, 0.7534742315481235, 0.6520857901115255, 0.7956635886122484, 0.9650200573520742, + 0.6529826920994365, 0.1530775704879982, 0.7221388509747162, 0.7427662365918138, 0.09747308321433545, + 0.07799659788964741, 0.11616476125468578, 0.8171304163681191, 0.3968356021217494, 0.7627434471753259, + 0.650141913944763, 0.5382042778798547, 0.20753332494455634, 0.5822707320428531, 0.29539406036939275, + 0.5252886457089591, 0.706758145774099, 0.21245689034478643, 0.20225922331967494, 0.31695010910675236, + 0.43008529261945827, 0.38205474507864545, 0.6505674331555952, 0.8334636225036565, 0.36602160070290324, + 0.8441237432776622, 0.35193890623697455, 0.5659586342616123, 0.7725671032679924, 0.2741246975527478, + 0.6921725205176384, 0.7852561605366281, 0.5158219651471875, 0.011243054744094594, 0.41310896576890466, + 0.45648556431344256, 0.7607634306231361, 0.8220103541446414, 0.03583507795950236, 0.8896008472504486, + 0.008490736139454946, 0.8996906260021484, 0.6581482230156697, 0.29949427086355307, 0.7611815140143026, + 0.7896569641883417, 0.8305010025820937, 0.4895358160689116, 0.07845910752333318, 0.764372330238618, + 0.020779029003142324, 0.43120977239632585, 0.45653962008349014, 0.3450016679166775, 0.7353521901879041, + 0.2174802520595922, 0.15316396490313255, 0.5684667853663021, 0.650338274041805, 0.881989424004692, + 0.4228887773715255, 0.7017405512185082, 0.12533753035135697, 0.4324324041965836, 0.38014984844379196, + 0.14910517424215053, 0.6067985070905968, 0.3234336649677997, 0.6362789547433481, 0.06365002436838152, + 0.14656248562479735, 0.5164533032240637, 0.2665684944776263, 0.049817317395853045, 0.9437810089681916, + 0.30306728941495165, 0.9283798786989146, 0.5117140120602129, 0.9134156109484025, 0.2354617936330704, + 0.6717649574105667, 0.5127266828750897, 0.38036721797278994, 0.9536945483930525, 0.8380368930039253, + 0.1919591608372203, 0.5297658162781413, 0.3930424142667378, 0.3812123715027973, 0.5732887006167159, + 0.15779654764310092, 0.9234508591332773, 0.8306322790133498, 0.26378479527925247, 0.9044077484662371, + 0.9950940798528922, 0.2394109867192058, 0.6916956933545159, 0.6673498470293923, 0.25697106359778543, + 0.6205263496586823, 0.9179728189664107, 0.614734048692408, 0.6027042796449013, 0.6323807244895218, + 0.8624134626120791, 0.35987345097452517, 0.1907794697781754, 0.3028583457674482, 0.0014578989246246676, + 0.12971474106068215, 0.9032407065460043, 0.17193771361794707, 0.8010115925730721, 0.41666776761366175, + 0.002685351317869067, 0.907979042972644, 0.6302723014781895, 0.6575827046029655, 0.6263104226338567, + 0.16092756634517225, 0.14532783350349865, 0.3718310126558474, 0.5572656769856846, 0.3424263786134797, + 0.024270514580325364, 0.9289092284509093, 0.4127028958078305, 0.995172449354327, 0.8289902073924227, + 0.9530126297057642, 0.3415707400145297, 0.6964158642930925, 0.500818324070304, 0.9862346803705233, + 0.37915290474476626, 0.4461255578262746, 0.30114558065352737, 0.5631128515605822, 0.21470083449509225, + 0.8956108702121777, 0.3024882468162112, 0.632879837833451, 0.9685183150957329, 0.9759845634823414, + 0.41423348932439163, 0.004546522645031303, 0.9304927494837946, 0.30929070993956775, 0.7826272454138963, + 0.2680006068166886, 0.04669414966964047, 0.856151707495239, 0.9989049820239663, 0.3111552068824247, + 0.16378082544758465, 0.8915583374517714, 0.7462871960223535, 0.24476108041642786, 0.8919083641817828, + 0.6966619446789848, 0.4448277223227939, 0.47562115955991857, 0.007052684253707531, 0.6771456594244746, + 0.8390026176115344, 0.9389863281069475, 0.5132723536770091, 0.28588637528251637, 0.5294010050193729, + 0.2744233679516902, 0.8507893439535678, 0.5236548930432765, 0.37112112192758095, 0.4775491658987586, + 0.5187137842568543, 0.8193025368566559, 0.45868787962240154, 0.21353220464874112, 0.6568767974937889, + 0.10943401504317929, 0.4259206916176925, 0.3557227685622363, 0.8753444290282524, 0.10487404904883002, + 0.7253708430697816, 0.38301454550749847, 0.7980044956400881, 0.6048926154492419, 0.1882119860377236, + 0.6922735149400973, 0.014127479673780918, 0.4700278297160906, 0.5434389008606694, 0.3024322195782827, + 0.24458759535876862, 0.45574574846480853, 0.7379460615617371, 0.023690727384484522, 0.6784004257720248, + 0.02360400024908793, 0.15354263266391432, 0.42650678906087014, 0.9732052582093879, 0.7271919013576847, + 0.7704986131316082, 0.6119778443997322, 0.4026952924740155, 0.29202480552983767, 0.9166629184786692, + 0.298992596956283, 0.3402690620046872, 0.628564336914344, 0.08854677037549652, 0.19030981978848005, + 0.1341174625131395, 0.24634964585544095, 0.07502297232413513, 0.9610349748415238, 0.02355636294467045, + 0.1846647883615723, 0.6032240304756097, 0.14366892969004819, 0.4235186782039845, 0.9788918318798452, + 0.13598935992309713, 0.5192993689272892, 0.5701233503562095, 0.5186534947305099, 0.02863088163602967, + 0.07723267818449431, 0.40085772597399394, 0.5404088809849145, 0.7206791384903469, 0.440133833531837, + 0.3288575659426388, 0.6447642606981637, 0.009895965772183879, 0.8596424114409699, 0.8953964701711203, + 0.2665242654042336, 0.18832545730747252, 0.6085402592486129, 0.8883002051270429, 0.9200943671522221, + 0.10221089709393838, 0.4816194160242998, 0.3976746983992273, 0.5840635013822746, 0.12886316469913406, + 0.03848746983483797, 0.8142270861493649, 0.19560645318162806, 0.6319427218288056, 0.9887248725968469, + 0.8380041436818071, 0.8346079391390944, 0.7399352175796418, 0.17103167998576296, 0.4459387493086917, + 0.6139692209707068, 0.024420994499118942, 0.34926705900556854, 0.7159384098666771, 0.4561739680088267, + 0.13350257292998435, 0.9230427461151006, 0.2900684191020383, 0.19770965037169863, 0.7706955893521374, + 0.30810970979128016, 0.4652612968535793, 0.2931118607225961, 0.9373998495596302, 0.11586465570461602, + 0.9771867918938736, 0.6307215397392307, 0.6696122755659888, 0.7581774483822272, 0.48425250907836204, + 0.669183078789963, 0.2386152622400628, 0.9661396481299975, 0.793054450271054, 0.8711795452250837, + 0.4136561532303761, 0.28436663148462527, 0.19544046811243343, 0.8783330659777079, 0.4309317810807699, + 0.6791337348202509, 0.11247980167197891, 0.07145675603275004, 0.6271055850123975, 0.8009821990819471, + 0.16261670057452993, 0.10096160568696722, 0.7750291408374171, 0.7619767309487686, 0.6234024471842868, + 0.7456791453022824, 0.9034169664629678, 0.38926051257600147, 0.5878183225017242, 0.4762511800403978, + 0.3427472851213783, 0.5229498196524415, 0.19100645840505737, 0.2063826479975016, 0.30986928493275756, + 0.4092137864518218, 0.441074716407614, 0.8839677990379725, 0.9429575143163699, 0.303162155012238, + 0.5253202610513421, 0.9453496962684602, 0.10624948595479444, 0.5621732507602889, 0.6583878187690957, + 0.6215633689577573, 0.562360421384021, 0.14917495864570574, 0.1990541175703977, 0.13673646082849844, + 0.8010236025928041, 0.9182708569476886, 0.3747334543024966, 0.1324946173907643, 0.18859605999193563, + 0.05443102316067028, 0.15378035856449046, 0.20938221090808495, 0.37698919753616655, 0.7564054703944864, + 0.5445364366349917, 0.2513179974053199, 0.9796191453417322, 0.1653088206068929, 0.20037231732659289, + 0.7566771749244087, 0.8140378697236609, 0.2671475283709549, 0.1265945021649606, 0.48145587740765083, + 0.04393292615504507, 0.6088494211643908, 0.5617434638102767, 0.12864982486943544, 0.9043544854206975, + 0.3092177161664381, 0.7558252141582021, 0.5583397485524766, 0.8958915681604015, 0.39166413971568137, + 0.05839221069377043, 0.8252067858503819, 0.367759370534129, 0.8047043908704921, 0.331070513727568, + 0.8427914006624496, 0.4546428420882097, 0.542628260527527, 0.3940549809751147, 0.3078168073980103, + 0.6433838365102315, 0.8654464557062445, 0.31112000075509294, 0.8402252959498852, 0.22090446924316398, + 0.9912339737001905, 0.13002675979418454, 0.6526138412698727, 0.09216879607904038, 0.3568983266626977, + 0.5241581243146272, 0.2869149872217063, 0.8194309518812257, 0.7346681932862287, 0.3428595859866824, + 0.3309934090725344, 0.4565862839563939, 0.30889149763843304, 0.37657346739120734, 0.9076027077945752, + 0.7298996208197877, 0.6020940366184432, 0.6713317484176837, 0.4777868315595878, 0.07832458625798133, + 0.9734320636176302, 0.82810657231182, 0.07414715861943444, 0.9476078973497248, 0.1538330289760801, + 0.29043280851801845, 0.5925956879200618, 0.34190091925081834, 0.1497710872432816, 0.05043284229565137, + 0.4252195556035926, 0.21628338657472368, 0.43032266953940124, 0.4187764379308838, 0.2134323748105864, + 0.6962346645567948, 0.8310724037921118, 0.2990105838756544, 0.5365570179433197, 0.3060407632888966, + 0.03278489977338639, 0.7896690512910464, 0.7761621858701311, 0.8854051017943605, 0.4825202708883515, + 0.8796702772317825, 0.9478429513901184, 0.9797923215172825, 0.9199145227784984, 0.9811092606310234, + 0.43833619232328425, 0.34955560173990274, 0.9270779744224682, 0.517775519068177, 0.3105554763905335, + 0.10963652896264764, 0.4812600005958907, 0.5294667536357233, 0.2041727064881632, 0.2838990139784022, + 0.6755739304087508, 0.28742329888160345, 0.5675581595326639, 0.25814051157914375, 0.812177371806046, + 0.7126000236614023, 0.7332494031347755, 0.7228397599802612, 0.4337637334792769, 0.8268815635888066, + 0.8412469413209996, 0.1474476927435372, 0.9234764162963465, 0.18798027885314128, 0.2697588639359385, + 0.3698505396815416, 0.3661020382169693, 0.7843390980659217, 0.9461578912277743, 0.714179585607935, + 0.48741192414739276, 0.9831417234799066, 0.20082315422941044, 0.13704347119861737, 0.8512555981805043, + 0.7964846003028568, 0.4091923671806147, 0.31045855241488995, 0.030896355996025582, 0.757015247112456, + 0.07197161050967338, 0.49144132964590803, 0.37924878597108735, 0.9059632653148812, 0.33021794490919254, + 0.5208171862090942, 0.1257182515892451, 0.4419299663780526, 0.8604289208687927, 0.13151079869255744, + 0.47587194131935573, 0.8999039827858538, 0.9845071663603152, 0.9237927583581761, 0.2386871432866735, + 0.5534649564750587, 0.20577112469977277, 0.08234585936313954, 0.3759674272553455, 0.6874593913390324, + 0.4813103230605512, 0.05287495437156575, 0.42250707783410546, 0.5229590720141394, 0.23903725103598494, + 0.3383166861373713, 0.4504454759605123, 0.1805762819338792, 0.522460710837446, 0.9306900111882482, + 0.04014079975154461, 0.7550358099700754, 0.6437113212818949, 0.9288104329310127, 0.5963936691423887, + 0.19749759108749632, 0.934834484154346, 0.592757344644676, 0.18007685104880522, 0.4897248549079305, + 0.49879746283718907, 0.5181743163633296, 0.6031889666449555, 0.7805639820944615, 0.44942693186825433, + 0.5177390179643111, 0.3503713482912416, 0.8656802141696627, 0.7261771617422799, 0.28516427684518286, + 0.18547117379016476, 0.2345504834009452, 0.659819961882109, 0.1562254714126211, 0.2116248413750591, + 0.7061814496258494, 0.9076819988382887, 0.44361192755265433, 0.4544050759784106, 0.05833381601508547, + 0.7271458786797137, 0.4584620530329657, 0.04557477464432602, 0.5273641821348103, 0.29305104679289384, + 0.9237274728426907, 0.42995883071646424, 0.150731269366838, 0.9148202227696328, 0.8219229104805507, + 0.7902051021070082, 0.736886207394238, 0.8806449743100146, 0.24449817784266992, 0.23503360698645348, + 0.8971672699276627, 0.18894389117369126, 0.45174444962748084, 0.17493126362489997, 0.836352740461245, + 0.084927652377224, 0.5874173473576125, 0.6471629055673879, 0.7636427026808356, 0.44699194592881064, + 0.9027928153471831, 0.646378969429277, 0.13980337040199153, 0.677975367124943, 0.5906936109268534, + 0.6785535977866709, 0.9966525280606527, 0.43725940771686045, 0.6265233510815367, 0.5872789577370551, + 0.4736801913093771, 0.9603892641978854, 0.7043171642877438, 0.8844463117078077, 0.256382311639917, + 0.7146256258613584, 0.4472986849872749, 0.5445726211043236, 0.07015458200730773, 0.9360515843195668, + 0.980163775973384, 0.985178025941136, 0.31340234650410514, 0.07092537391042053, 0.11350521820787318, + 0.803070845573163, 0.06508056419877961, 0.6849746996140124, 0.09192339619379086, 0.5446352308471222, + 0.194334173882318, 0.6399746259259406, 0.8848739610165371, 0.9057742710619541, 0.6574672138791803, + 0.28265824455266875, 0.4913992277519147, 0.2714428120631547, 0.9029762067351483, 0.404016607921286, + 0.9356512729551854, 0.9551581851145005, 0.6947902026070869, 0.45378359660950796, 0.5546194817466522, + 0.45385085549626847, 0.6891355904241119, 0.4111992242996626, 0.14430766081621926, 0.9150723702793041, + 0.5880079661439431, 0.3510187373660053, 0.8065104111670672, 0.12437922609279806, 0.649702855877862, + 0.9085297893027975, 0.08020124109526339, 0.23960649799812972, 0.9186503036241112, 0.6128251959085931, + 0.03236593895453177, 0.9989092644107732, 0.6139755811995422, 0.531200704449833, 0.28713352268247694, + 0.8119110437630124, 0.35993152453860877, 0.0038976147845962705, 0.8179732055564214, 0.6078572082016351, + 0.7039043930785024, 0.27955619536763154, 0.24943397543505585, 0.4377160763702883, 0.615845449978516, + 0.9926044305540519, 0.22369232034773734, 0.837911674296939, 0.9920312602240504, 0.5302385601739379, + 0.9984833143167338, 0.9100520555294735, 0.4881869363132024, 0.7572016769176416, 0.3525512655168155, + 0.12705607337288172, 0.8929567664411113, 0.5921658366807947, 0.647725265982549, 0.35778319465374697, + 0.5371288877829645, 0.9389809504451423, 0.10127913536672994, 0.7894555792452711, 0.6397745508699528, + 0.6904766711729032, 0.8851484705553312, 0.6595178169483946, 0.6493475195379939, 0.16266496588688184, + 0.587838729413158, 0.29962263955983737, 0.8480951556880845, 0.19518846326105055, 0.958636807198965, + 0.8393291964395642, 0.23316328948597598, 0.7770975444771326, 0.822916000728221, 0.47941556948808584, + 0.6581754842166929, 0.9855034320025721, 0.42924208861371305, 0.5855290138345451, 0.37064042308391554, + 0.004577152771646387, 0.8602928494761776, 0.9168552444961787, 0.9108480369512229, 0.893327036527009, + 0.07129503182075347, 0.36190677610423394, 0.13006685154737352, 0.22336747934409962, 0.34431254253104426, + 0.28420265690446167, 0.021455180502311988, 0.6181877024917093, 0.4057592785578884, 0.48003550172456666, + 0.2919520303069971, 0.20679666993623924, 0.6798627396673662, 0.15500270093277513, 0.5293676430470518, + 0.9218869284324538, 0.3503742982580723, 0.2859867879819108, 0.03712073690303541, 0.9716344142525796, + 0.26754891235392586, 0.6850147449388799, 0.5257546416193012, 0.32936781677422045, 0.9893830201440166, + 0.7339188689753977, 0.37023246543666033, 0.12021580596319947, 0.5516764367214837, 0.7742462655458693, + 0.5919373805543477, 0.2691200337827875, 0.45951566195383053, 0.016331049390413077, 0.6981087700606399, + 0.2840242998700213, 0.48674075724774, 0.01668891048601473, 0.3752845409527149, 0.27342915444662586, + 0.8528314174742758, 0.5980334785057538, 0.8387153508863529, 0.7713408897778646, 0.1468868545245562, + 0.5788988057292189, 0.7662859317916594, 0.7171159044268629, 0.817754650769796, 0.5482057597951033, + 0.011134810282633523, 0.9962390374902929, 0.5001931582796274, 0.40927892479579764, 0.29105779384028485, + 0.5380834481873809, 0.8967872515365237, 0.8549568232850915, 0.39646962987851675, 0.9263346961295181, + 0.27605583178904336, 0.46945195004362084, 0.42943183722258005, 0.11262561591381115, 0.7716819701640273, + 0.14477018287134902, 0.6463242408694697, 0.19207818907715868, 0.9075050013001706, 0.8815309744840273, + 0.05574673881075609, 0.46733411135466785, 0.6946143181497039, 0.38793879905655004, 0.14244221740052387, + 0.013480534620852858, 0.8156160068760853, 0.13182377894341468, 0.17330112801270003, 0.07297844321589542, + 0.5295509232980138, 0.827583416753828, 0.24020578767267653, 0.8297016951468782, 0.5513762741251969, + 0.8695402220854705, 0.7415436773009056, 0.7966852951467228, 0.5554113959439213, 0.9207630818846864, + 0.33572022350113084, 0.22413092997698647, 0.5038534734022597, 0.15440150898459948, 0.27218206910414566, + 0.30334534668880353, 0.901189057075367, 0.07523736611730225, 0.5033577300733442, 0.45203924270457674, + 0.9306268162679342, 0.5834937223166203, 0.7530955894999412, 0.05594106545234934, 0.9831695970237342, + 0.217181979191437, 0.16684512190997913, 0.27577889120361854, 0.5119652884890653, 0.804321900975404, + 0.33302707750831784, 0.5427229610650799, 0.713355641430544, 0.4544117040265301, 0.33263741058689944, + 0.798318758900835, 0.6515041772404812, 0.666030485712458, 0.6521824543381127, 0.8872099932516173, + 0.5827879256252866, 0.017851277943374932, 0.7721583926717096, 0.007916818373048695, 0.7513208927532176, + 0.006251438999501757, 0.15470405597679626, 0.2939876897339391, 0.5455649473093593, 0.25033664107375675, + 0.8045557115634316, 0.9331006720580732, 0.9690272763169439, 0.2422570434725364, 0.2815282348852275, + 0.14276309508816698, 0.8556170516642067, 0.978511304137195, 0.28589215577149185, 0.5638496956492505, + 0.1892770281860332, 0.2429746493942362, 0.0025389875820115426, 0.5633913400737991, 0.6142468214359225, + 0.29207927395164146, 0.15793765648113212, 0.8313075178154284, 0.6573958051418146, 0.8134065277629373, + 0.8314979071042918, 0.9047698912977503, 0.4737108529589482, 0.8924584932595006, 0.905213194623962, + 0.21746264351668498, 0.24084112883869346, 0.7548803774502774, 0.6620564587626303, 0.8177084069796109, + 0.4497355161525367, 0.49637305316783686, 0.06031253844750917, 0.32403381469056414, 0.5473538946711503, + 0.37171885576066066, 0.788324525461174, 0.8379583977067151, 0.7914415434285964, 0.7617362100641591, + 0.3571904839204214, 0.5030827068724871, 0.41326739640341403, 0.1494233919463478, 0.9734830518971015, + 0.07844539334766898, 0.6164318883173433, 0.26604132949575376, 0.3713751293399885, 0.4476627640071469, + 0.6634616329456422, 0.19624534318306774, 0.31960565477657243, 0.5067756679199623, 0.5431338293299597, + 0.146136863197592, 0.9490258028228717, 0.23849759863238973, 0.8360576308054636, 0.07430146291190665, + 0.945195923181658, 0.19785676440706323, 0.040920026147275634, 0.9351549004081587, 0.010145769760756762, + 0.9125721723942728, 0.10150767966133589, 0.35528649705266857, 0.07555803254555427, 0.6987575618880669, + 0.9165373839456068, 0.8050913147835768, 0.407948922917024, 0.813655360422871, 0.3389911551070691, + 0.7002163288508261, 0.9688778866268454, 0.5772244084612589, 0.7805785293121652, 0.8953649896934751, + 0.6409511513911823, 0.8095078510257275, 0.4777207829514538, 0.8451782444520659, 0.335433172985768, + 0.4977205800765071, 0.18074933627396428, 0.011889931891967853, 0.004166929790526908, 0.759591459823289, + 0.5173857851845302, 0.5684503772164061, 0.4252703819216933, 0.7322266370072392, 0.3619909853418556, + 0.11610589530722226, 0.5152027988693856, 0.9510668758737386, 0.6817389961102805, 0.48543136704276146, + 0.09787818155047345, 0.5607323995561343, 0.39469982049571495, 0.5325301308092999, 0.8948081253962866, + 0.8109321524273291, 0.45199903832174704, 0.4820248511800226, 0.6237657682852961, 0.11190767575217597, + 0.41468885862956595, 0.952487495368475, 0.5848071780937557, 0.6219312022306421, 0.6480574573833584, + 0.23261939097520345, 0.9539456368354925, 0.14453844585721976, 0.4358554432686752, 0.16975837956167883, + 0.7503551540536678, 0.2823068929428454, 0.4795388911622631, 0.5318493587525929, 0.23730579538535723, + 0.38065141915924616, 0.39011691639373147, 0.7022920387327858, 0.6125863012073, 0.16019686273882827, + 0.0741103312725464, 0.46118310745733904, 0.2915310883826556, 0.04594719731054464, 0.46898852177815886, + 0.3918139095493063, 0.5683958150409495, 0.0016127262625461602, 0.14430866096415318, 0.6241743150793291, + 0.39611481138107985, 0.7601364214124623, 0.13773906172571104, 0.9226491597075647, 0.1418100423271802, + 0.7045599148410046, 0.8398035524152211, 0.6528042630025218, 0.15778714571313024, 0.994870203207059, + 0.772800369146076, 0.9952882193920738, 0.17219088305195918, 0.49162170622134427, 0.4800398884972531, + 0.8179092277145791, 0.3316607663131258, 0.8185650957620395, 0.29415351559983705, 0.27665496802354606, + 0.6398419529595498, 0.30257000683340607, 0.10888843854080876, 0.4159560478956378, 0.09773646507064315, + 0.9506250929550092, 0.7165538499603809, 0.8272900417095005, 0.3761976935804109, 0.2894676415521634, + 0.04898031295079941, 0.815948657088464, 0.05740125344040248, 0.8862503765667086, 0.42230782730916483, + 0.7264912638482958, 0.8147927098290846, 0.25758121267255596, 0.21895483008399452, 0.7985490511307461, + 0.5103799906082482, 0.9884547684188146, 0.6686291645888692, 0.484441793375036, 0.8601060687899627, + 0.7384554517702806, 0.14807066769369526, 0.7261359772381609, 0.661126802613848, 0.5817499133519894, + 0.8564728643186336, 0.4635041841819878, 0.05298736734914866, 0.02056027360915802, 0.8716488273558501, + 0.04890808672895364, 0.2585022034479668, 0.5954012126022314, 0.26046382335600893, 0.23759124635806517, + 0.3992076404799272, 0.6422808583364977, 0.6649206411401364, 0.15411604551665736, 0.8190614274777871, + 0.3367949541526595, 0.8803702703035624, 0.6456050961200591, 0.87283709710225, 0.8043049555853818, + 0.3325368498089102, 0.05464647078648066, 0.405743459379887, 0.7246342475822394, 0.30369946870510367, + 0.5913308807974837, 0.4200052326488082, 0.5089633871150249, 0.15832813379306465, 0.2726185383027213, + 0.6691432118075796, 0.22192640914945416, 0.06201838201971843, 0.002263564670649254, 0.0639371951852924, + 0.8100798461788448, 0.7690821617520532, 0.9731670818648352, 0.7396124926918702, 0.7588132781952833, + 0.5362265551089003, 0.31083367094480774, 0.9458162437834565, 0.5368716137709668, 0.08517116370579714, + 0.47942796349580374, 0.5261160028198052, 0.9478110852305597, 0.6267918235164112, 0.34357318498071576, + 0.6090897163978664, 0.6989887666978286, 0.41884882789577915, 0.8729673065090883, 0.7348742173643088, + 0.14282913161490352, 0.5348405659282822, 0.8451824770339866, 0.47043432000714425, 0.5724754026148015, + 0.004845450283341046, 0.4720206792193603, 0.030236385139770006, 0.1985687036209035, 0.4100245842138641, + 0.840237889049285, 0.8560137977550417, 0.5878198999741003, 0.6759759178415035, 0.11156936792070449, + 0.19867837844226632, 0.4238266789464491, 0.9389462218243471, 0.35982402411540404, 0.9395142817895802, + 0.6088522839551925, 0.495809851872564, 0.08942223322155807, 0.8317660366187062, 0.7806329623166633, + 0.2155657397907439, 0.07307201630214533, 0.28768795460889784, 0.06343261200769512, 0.2071829239320203, + 0.3170780274656366, 0.1506043694932555, 0.9820792310895309, 0.7822160158913193, 0.6423659932917436, + 0.1613397919051106, 0.9266921848571944, 0.6553830922925206, 0.21051657636558396, 0.8249183015059579, + 0.4815123260352835, 0.031719666791598655, 0.967802166668163, 0.9454108618730942, 0.5293521875346701, + 0.09085996512648742, 0.8696086771802893, 0.6558475527340603, 0.8606217054585975, 0.2603260429046589, + 0.19818373225226793, 0.1316304532806627, 0.03325976044014889, 0.888070067786244, 0.2074158254497679, + 0.6008606689278838, 0.38379538997444373, 0.6563307875082035, 0.09297033104482366, 0.844243292293567, + 0.566462013342779, 0.05858721938768008, 0.7301052253226809, 0.7818378247449377, 0.3237691657907772, + 0.5829477442390647, 0.6171896212326372, 0.6296115046604114, 0.03480113021204967, 0.5125896014708659, + 0.37680058524588955, 0.704404013389534, 0.9500305502606794, 0.8212428356808356, 0.7922807731193054, + 0.35535937814698193, 0.9100702344711836, 0.34230593684147415, 0.2418192475838139, 0.705291324266203, + 0.08815248124690245, 0.22547032693757718, 0.07918319472573276, 0.11559808027279872, 0.22451817825232534, + 0.7320609817127802, 0.9612275526159547, 0.4936381757027637, 0.907907636732286, 0.4545769669460069, + 0.2993644032656997, 0.6677929763533068, 0.676986645523438, 0.1288921738833677, 0.7880550451710757, + 0.2796042846713743, 0.5448808055755662, 0.29454967840372437, 0.4260218234263137, 0.29740354682628034, + 0.9178929801080634, 0.22172410316296243, 0.9297353999522445, 0.02219761721791591, 0.41414702570616047, + 0.7923343710183619, 0.8027080274236873, 0.7870955110858734, 0.2792061593131313, 0.9001299507009777, + 0.9825931410330894, 0.05621048676635931, 0.5512165384711949, 0.4363577734331944, 0.29925251366005157, + 0.5150149640587699, 0.019048352354515852, 0.08842390316318727, 0.9485620997140334, 0.5714603385468893, + 0.11859155915861419, 0.3506808402123225, 0.913130375518895, 0.11039217262761347, 0.31010871275185115, + 0.7566882870404893, 0.5957937772573332, 0.8577310861905787, 0.2904796834316199, 0.7251122716795119, + 0.33767483963446376, 0.0069606300833238155, 0.4492464097619129, 0.1875332631036024, 0.22136822316909943, + 0.6817115912503763, 0.12318018950792442, 0.19987746865401068, 0.583265352991748, 0.9704051612366342, + 0.21003008044359084, 0.24747265432949528, 0.19117735457209262, 0.8192663869408409, 0.026375853087982515, + 0.5942077567376716, 0.41078526776480273, 0.0995417837300564, 0.2738310934479383, 0.7358879237072037, + 0.9271241546664925, 0.398580385827351, 0.8001633754794993, 0.5308150532993356, 0.8262151146797992, + 0.4794877783223914, 0.3797428456978046, 0.05375724388032843, 0.8519440767021866, 0.9534755878483753, + 0.503377712358978, 0.5137412913845685, 0.3298722428252219, 0.02845486763446292, 0.3450197447661091, + 0.7742909572231088, 0.4866561456938192, 0.17862507944582917, 0.7465599603487545, 0.9582392926917492, + 0.3500864866738508, 0.03638992047589751, 0.18111669035507327, 0.6163690387027178, 0.605756280334851, + 0.9292443696626554, 0.4805948261590651, 0.011583319124723213, 0.3992985263770744, 0.43130173101366487, + 0.2393933838154727, 0.988903362370932, 0.8794758096877898, 0.1635519969529431, 0.4163231676661515, + 0.6494780439688286, 0.6569544112621772, 0.9897758780710005, 0.43466661523174577, 0.46965615959224305, + 0.9349245631523007, 0.016688872563245805, 0.014403270787187061, 0.24846060226911404, 0.10417530992527435, + 0.44421497794056375, 0.33404080089968813, 0.29640078573238926, 0.25310573621742527, 0.34116386952303024, + 0.4809968269549316, 0.05837109701188958, 0.3648108851094741, 0.790803746483735, 0.08009873177326488, + 0.21476822079717706, 0.5248767360171599, 0.11200283200500039, 0.9202741846945232, 0.9186023788857333, + 0.41370936412042725, 0.26075958601276994, 0.5019925089149055, 0.687867618004529, 0.1977493883962561, + 0.33623960671264874, 0.4989861566010343, 0.26770485265829935, 0.5877758570729219, 0.7993433428774431, + 0.10093571811853375, 0.14858267063327268, 0.7100844468838183, 0.5402402874128852, 0.07867695207510406, + 0.4040196288562755, 0.047257770878987415, 0.8021081182848567, 0.505114629154694, 0.3096161674618526, + 0.03314899778447855, 0.3886846431301112, 0.567644579803449, 0.3524273147382102, 0.9442051201853123, + 0.4830346788060862, 0.24769129647869204, 0.8798936216652578, 0.7703383807832973, 0.9800458613977039, + 0.6957212216761687, 0.4169161408929506, 0.9794457364091196, 0.8568535984629932, 0.6811971754501239, + 0.505240951074268, 0.0708902729994767, 0.6061460243750493, 0.8229726969052238, 0.5760713345891831, + 0.624524358049323, 0.4858506581683727, 0.39105487377188475, 0.4875889692209341, 0.7289120198461425, + 0.6082640133635191, 0.9411595898410152, 0.9652384781192399, 0.8526429531296893, 0.2517181927328055, + 0.17933695516621095, 0.09657368137977318, 0.06761349757327517, 0.889875082020315, 0.9399593535167685, + 0.8358901392952233, 0.8209439285022231, 0.09943829854256803, 0.9539657462347683, 0.7647944314432883, + 0.701201744248708, 0.1251101106504363, 0.6142285588840186, 0.07560688438025576, 0.6082559580539005, + 0.7497563646344403, 0.5242846200245552, 0.8951499627990374, 0.752181402082427, 0.8098607095247266, + 0.22347819684613468, 0.03930090354822835, 0.8449563453332865, 0.5497935156523039, 0.9527466632646967, + 0.5481753461253085, 0.8553077558110618, 0.5388908909866726, 0.3447050204468344, 0.2560517454450957, + 0.9038186176416276, 0.5133759451494109, 0.4280029263408006, 0.873343299183039, 0.18983013829236062, + 0.6487657899344234, 0.39108628519373756, 0.07540889607542722, 0.8235203645997633, 0.8348662425894039, + 0.02899643273268404, 0.00544809409009217, 0.3751025770700822, 0.9023617692649458, 0.09378921599818801, + 0.7132175284472319, 0.7388896970298694, 0.1795439189088558, 0.9608167473851421, 0.8805611540165458, + 0.43995755436873085, 0.7227214436747231, 0.06436096853716844, 0.5277349322269849, 0.5612816137158163, + 0.8561568012120608, 0.2518238429398917, 0.37910883483589186, 0.7771488919052115, 0.28126775498855594, + 0.9742408728498494, 0.743957142985092, 0.7774538361413483, 0.1129593715148074, 0.6988316967657843, + 0.8791349281725823, 0.013985299649761695, 0.6309124138521598, 0.8838217880557527, 0.5633084641714449, + 0.7823892071207204, 0.3101551619842374, 0.49022993419510497, 0.5366202061872963, 0.4399406244482179, + 0.10650595733020196, 0.1651583201417326, 0.4479151314718127, 0.6122425093701338, 0.5315531909228914, + 0.13066677035636798, 0.7318995711720537, 0.219420185722919, 0.696496442824536, 0.8912735960548054, + 0.08912303187405968, 0.37589459000188763, 0.779515032098953, 0.6054987282458344, 0.965064775262806, + 0.07523392353887093, 0.35204784579892134, 0.10498090717712572, 0.39198086935849485, 0.9584446507571274, + 0.7339128685148123, 0.47159805230791974, 0.2957922713360184, 0.3042563102139143, 0.5530765247692794, + 0.9799373637002523, 0.0032622875447775312, 0.3621594415092303, 0.5555575214873543, 0.10449347456200508, + 0.5275911031586072, 0.14998202567570906, 0.9300791172269169, 0.19395370469190631, 0.8616331480731969, + 0.23952151876929173, 0.49896021819806735, 0.6275813002300293, 0.6587609868774141, 0.6403438028196977, + 0.0820986026950482, 0.1345765816114286, 0.9693511475604221, 0.4011192128765082, 0.9797117083332056, + 0.7198610195350436, 0.45110096091671004, 0.6360702166469601, 0.25822527856379607, 0.607954402719795, + 0.5951480848968579, 0.13110556053538458, 0.5077021816524762, 0.8377271318217183, 0.08779812685144617, + 0.5388153245715364, 0.1804017842666683, 0.7403540576604496, 0.6223932235798858, 0.6428224571229607, + 0.36576146081882377, 0.22022425623659092, 0.4508820428473178, 0.8486601873049032, 0.9124666803031429, + 0.36927808639599113, 0.9632103474324705, 0.38404569548614276, 0.10034476658779246, 0.60869755058736, + 0.44786677473041714, 0.36992152835415826, 0.1842086762144386, 0.34588548944033526, 0.9557114127496255, + 0.6176816874055919, 0.68669070131647, 0.706736815218591, 0.9051641692181785, 0.23696278823477612, + 0.10805433299033618, 0.8533074456545962, 0.9199972386741255, 0.33408535957924057, 0.704079197750175, + 0.7799438825307219, 0.5977249697585852, 0.8797896801779099, 0.3168484839237833, 0.882747049801107, + 0.21672960781100203, 0.22407358055547544, 0.5625454559284596, 0.31020471889324197, 0.013399623754964507, + 0.2971160947783905, 0.057976811544413476, 0.7182263407743993, 0.37190815230706, 0.403479359789107, + 0.10942454796334922, 0.24469626034581193, 0.4865555477115453, 0.22889056858432866, 0.5382319337791864, + 0.2118481651467614, 0.48115942815526014, 0.7456582332218422, 0.8538968920489578, 0.7338933338610225, + 0.09133753474181794, 0.504581938364347, 0.8384199034567146, 0.6349070450329913, 0.5560462071645996, + 0.18125834112146777, 0.9441476571686422, 0.003965465482779873, 0.9815084638103587, 0.7896767366689702, + 0.9470481444126627, 0.05887352703433224, 0.6411261866124442, 0.3766594375574035, 0.7914504269181175, + 0.28809648989381564, 0.5269459109399771, 0.14769806616521086, 0.020110698713186603, 0.2648698266995847, + 0.04177430884627942, 0.7818964760813322, 0.3184815432832945, 0.9635045462053818, 0.05756464076693413, + 0.03919403323811799, 0.7978095277053101, 0.007170022077887639, 0.6873020539797805, 0.2424969414813284, + 0.5882830229331992, 0.1567908855485176, 0.792993145482098, 0.24970078345529045, 0.8905431418544417, + 0.6699851166044413, 0.9626787314778625, 0.10919624802210393, 0.5740374921689396, 0.012419673149375754, + 0.4828152843977518, 0.07773695353473453, 0.6537545048880314, 0.2282323914595461, 0.838512198686958, + 0.40425190233591124, 0.9109797869238139, 0.820235290661094, 0.4405932288211023, 0.879548978087951, + 0.44853988227914665, 0.016692006261037395, 0.6085530322980023, 0.7427542828773237, 0.7955229561881966, + 0.08627379354160591, 0.962784356962757, 0.1262596202860955, 0.33873544820384693, 0.5299581611504169, + 0.8278282608943434, 0.8076430022111963, 0.5014658659241744, 0.33178489020230684, 0.9693789458544743, + 0.46555199696767424, 0.8481813292051562, 0.7664924889792716, 0.7243577884206216, 0.8076880088445598, + 0.5855809250257206, 0.623126126742494, 0.07422560748655072, 0.21381537448192722, 0.017495564761169424, + 0.8687688643493331, 0.15632277408697337, 0.3918635766170445, 0.6489847040055021, 0.994572923208129, + 0.313861866727421, 0.7444845623669116, 0.749917869618224, 0.29827505803905274, 0.3003876942246271, + 0.5272395236523542, 0.8513576293549324, 0.5411408728584076, 0.13506808373061063, 0.01705635218031354, + 0.05374819852706947, 0.12491484483078552, 0.8508095503062006, 0.6591879565314969, 0.6606705238935365, + 0.12152520929588928, 0.13752013135966323, 0.18632171831484134, 0.07829977853292136, 0.7664259932278479, + 0.9490186547936486, 0.8776331543876847, 0.9799737457979715, 0.8919126661641827, 0.8740529861566809, + 0.3023566935247226, 0.8738711169383052, 0.9741373049624726, 0.7351716281255066, 0.48255377118784715, + 0.3804547055456077, 0.9709112364570469, 0.751241464502428, 0.26844357125940255, 0.11857367654504836, + 0.794333955555089, 0.05041611333230889, 0.6161526215420079, 0.18364842527255254, 0.14093121239946482, + 0.09300692086633133, 0.02918921236160299, 0.5983519381070685, 0.7805780895616434, 0.8029089563219114, + 0.6189587658475317, 0.971417612132888, 0.2727185108649317, 0.6522175816565678, 0.3999524584363361, + 0.4309505278441734, 0.142891006959614, 0.8176832508669591, 0.4227089499034231, 0.8780346550913167, + 0.8646540357859253, 0.9537397211259445, 0.4294211575150887, 0.08996896893533579, 0.7695586705065749, + 0.6240419567737736, 0.17396562036705587, 0.1775659073072101, 0.5284122570418186, 0.6884729998799559, + 0.3348321863220919, 0.6992611565406978, 0.04718163249168106, 0.5944263580553424, 0.5598049251515739, + 0.8524084412015507, 0.6646037647589217, 0.5027785820097855, 0.7210733143495486, 0.25785061156532996, + 0.4555441274272486, 0.6107703318943722, 0.43285630147506504, 0.7007641906892125, 0.8649037748052864, + 0.2597136347033241, 0.4646194290289517, 0.13629406209727002, 0.6346120355797049, 0.9386268549080705, + 0.7549008274298782, 0.30389993519798797, 0.49869733849852904, 0.3838442393954189, 0.3011961399183939, + 0.40139917767957534, 0.3789946183270362, 0.6323922793742403, 0.06726811304127367, 0.7624895746669883, + 0.7263635274621134, 0.19880387648751718, 0.43739815881984534, 0.5976281005402438, 0.6183067088214383, + 0.9779520667214144, 0.40885237189458035, 0.8790369548509892, 0.12871542796660596, 0.9219741468820662, + 0.6043565015885304, 0.45684686795264784, 0.6660649687230704, 0.9399312650696577, 0.008097938087578749, + 0.8934337536224866, 0.8986092467105691, 0.19995899946194873, 0.6872022270103215, 0.31203971548835785, + 0.6758814437472129, 0.10758483698180288, 0.22310158891971665, 0.6942325344632331, 0.5041435656483805, + 0.20665982722067022, 0.40072474623965604, 0.9550009966879915, 0.6612263502734103, 0.4978490224783404, + 0.4810343310785773, 0.6932761857280684, 0.4895689504002815, 0.23483677982433293, 0.005427612055188824, + 0.6815676745418822, 0.566604553674063, 0.6862144036589076, 0.7944437003572663, 0.16057679483740717, + 0.17551940244712882, 0.1794100433444641, 0.238495398781432, 0.7678747257391761, 0.5510160949532444, + 0.4998035354971454, 0.7072824899487378, 0.5683823481766586, 0.6668866787671196, 0.24048343837374808, + 0.10024863546449725, 0.31474050328612535, 0.779931347174679, 0.06818610175031337, 0.16547277765693413, + 0.5358573613839419, 0.6048124995802331, 0.5552064669244209, 0.867985285351135, 0.20854538624485364, + 0.6152986840655872, 0.34814972919759457, 0.058825788045184346, 0.12488082700520198, 0.9808113538361639, + 0.6495602054785902, 0.36529185794440455, 0.7210620096750974, 0.8374326345875086, 0.9729791805521728, + 0.783862435254671, 0.7049076469957052, 0.2843055467293566, 0.4945203927895544, 0.13077532375819267, + 0.1366464858210079, 0.644071333405015, 0.6008774479712607, 0.6877935123273717, 0.2969070833015489, + 0.9908553738147915, 0.6585116648659487, 0.9575775147687592, 0.5735394435632827, 0.8182483313055581, + 0.6422632319254711, 0.41026775735347476, 0.25760538754054285, 0.583509324590221, 0.8113238507154572, + 0.33542005253569085, 0.18815480443419874, 0.6200394749183673, 0.4491148800547001, 0.841185882770977, + 0.23250965829296277, 0.12279886582014965, 0.20068655457130835, 0.4212494043943772, 0.08583283499217742, + 0.49650413162267193, 0.6940076024272812, 0.5471260849018768, 0.7138281187921892, 0.5899575854320958, + 0.7210733104422161, 0.2063319659546743, 0.546287050021144, 0.6327808019359032, 0.02867361525533907, + 0.12069216698852614, 0.16542118130211003, 0.8712806889100291, 0.7283916747281782, 0.2879453930664292, + 0.895062747210728, 0.9058494183669441, 0.6706935884079134, 0.19749929980303083, 0.9808938467611115, + 0.2620055568788331, 0.002574441420642004, 0.07405192884464429, 0.7435744252296878, 0.5122087346921007, + 0.14911664776968647, 0.8522445335776246, 0.19905100612586457, 0.7159180058586948, 0.613580711658753, + 0.46671131202148664, 0.5026257446107751, 0.7242337224502916, 0.6912174590902253, 0.6348406258478118, + 0.9448399638616968, 0.05893050227743879, 0.6909833611607202, 0.14239206856562037, 0.1842949006694724, + 0.5133614327054506, 0.22758820854298445, 0.8180314064062282, 0.3128788466690616, 0.18478204458702574, + 0.5997185729662033, 0.8990942977474451, 0.9830332879389645, 0.4130740509237302, 0.030750495424348046, + 0.948443527234965, 0.5787601125844303, 0.8553783507006383, 0.8051418275344663, 0.33175446126929475, + 0.6476843165622203, 0.23974003388816656, 0.44611301952286764, 0.9579825617111165, 0.5357642829744088, + 0.6953856808664445, 0.7495853697483824, 0.5667886704658744, 0.3483241099217952, 0.9722201734971978, + 0.9708285975698596, 0.37452867433105785, 0.22559894087481702, 0.8258561730373456, 0.5597014267284324, + 0.8045211524591267, 0.9042574654415093, 0.646166940700204, 0.5764130732926929, 0.5068034569981075, + 0.42337179054885354, 0.5964823537522983, 0.9384737226819604, 0.7210569687290942, 0.24609186123070648, + 0.7221340384104827, 0.4142475889769287, 0.420733577649696, 0.9802184395939197, 0.5164072151010167, + 0.4676308844300169, 0.997116862483971, 0.0976271286204834, 0.8055372293474643, 0.1580755667471383, + 0.5979447803210742, 0.4750670489171065, 0.6025050796649264, 0.7891291847465132, 0.6369119292437464, + 0.6037829656071078, 0.5909026583584631, 0.10484548389530757, 0.31247090256745313, 0.12630491706861102, + 0.9031002064855774, 0.48667161559557626, 0.5080047356001118, 0.8652785592649281, 0.9500743828386982, + 0.47244977850074155, 0.891498810950069, 0.2249773165113338, 0.05239100354466675, 0.3829819303230637, + 0.4874784573158589, 0.3563591058307455, 0.07779682924293041, 0.09107277515224033, 0.263378352139333, + 0.17609043183951767, 0.42439785804888075, 0.878651541732694, 0.6332258029656965, 0.7624536314733172, + 0.2913251265219915, 0.28651449249008565, 0.15756240329028193, 0.46747618652544276, 0.08167675274636144, + 0.027900281407346017, 0.2821878871892757, 0.013192744126595879, 0.7543630545915418, 0.8648879701609505, + 0.4049214241167811, 0.48917145843161214, 0.4106997578490129, 0.5748336410743426, 0.8716720482108311, + 0.4043766758724566, 0.10909831959983252, 0.7767648302604372, 0.6179205301392958, 0.741490602640871, + 0.3938791768241672, 0.574227414948527, 0.16963992490375757, 0.510543045066065, 0.42923409869979856, + 0.6345476653797548, 0.22190079316034428, 0.04799377277922434, 0.2458361311174827, 0.6830742240878567, + 0.22482363475774414, 0.8812633149670223, 0.28439192322520535, 0.5616191791668175, 0.8458283466690203, + 0.72951460331087, 0.6906713098954224, 0.5296592740879944, 0.22301051502349756, 0.4643652325114678, + 0.25749719775433744, 0.20233229554548393, 0.9979989097906906, 0.5149388848594345, 0.2727297777117407, + 0.9636739762600919, 0.22368183845700818, 0.5762363087065184, 0.5941099419362061, 0.47387292362096944, + 0.5446440844981102, 0.6227318078771208, 0.4259352384180458, 0.8538412555889207, 0.57771047892299, + 0.26765059869154484, 0.17093462678905502, 0.28605172752836594, 0.4385920239316041, 0.6999704170484969, + 0.8130735260392431, 0.8899356224922098, 0.7500953768852777, 0.5874936072974537, 0.7118368895885636, + 0.4724784996145066, 0.03811807445144355, 0.8970228942359098, 0.28761212805492, 0.9265218038092575, + 0.8776289324820068, 0.7078111089052069, 0.17342631851333679, 0.6788627185734604, 0.5669030368603662, + 0.4003973704276079, 0.5753283521487378, 0.7595218764311276, 0.3176949625035982, 0.905308573254881, + 0.30617318607861355, 0.9350188604597547, 0.7100306821161757, 0.5720068008791458, 0.6520057308649096, + 0.6394776495986748, 0.41012906214645206, 0.1297733902596525, 0.42889594247845375, 0.8635888930280424, + 0.9385964746829714, 0.3672512384225597, 0.2506190230922314, 0.33106464149625714, 0.4321388280483537, + 0.4669381105981064, 0.7449067981062075, 0.059730287599608634, 0.9764666358419519, 0.4388013631993991, + 0.12519477717707828, 0.6683742157638903, 0.25599177234566173, 0.7352715087345056, 0.709067404410877, + 0.9016536869224709, 0.42277395190103084, 0.3028111665227514, 0.7519063762929882, 0.5069633261666079, + 0.9622190308804511, 0.5930470115119086, 0.06735498269550222, 0.7447112324589688, 0.9336379232070264, + 0.32983641890341964, 0.8729524535192391, 0.3022330081239847, 0.5064836798975156, 0.04706353937462038, + 0.07640065135854157, 0.4744346554487049, 0.03958333067856634, 0.6895033024175298, 0.2856551340865151, + 0.4097484805979642, 0.6783577163383188, 0.47141796362639, 0.7240546198094218, 0.6789184975724107, + 0.5485472792790486, 0.6359704453952328, 0.7202917459840931, 0.9255996758421857, 0.058426799013740593, + 0.4550231290418173, 0.642949294125344, 0.5018054718510963, 0.695899724502078, 0.8639826581395479, + 0.5067494628471603, 0.8054500133190643, 0.9794209769425968, 0.040168604534327135, 0.9160185538525903, + 0.6857413534461929, 0.5126307580433558, 0.38918268946666923, 0.30352162315122166, 0.6217965345338181, + 0.7687109658657374, 0.023320819100120627, 0.2722628402534911, 0.27311225409169393, 0.9935379884731801, + 0.8281367881685542, 0.10932468007428797, 0.6031099422647556, 0.027244964577549213, 0.6245028232391507, + 0.84315741986657, 0.08697689202300873, 0.27585060358746616, 0.2258122827177883, 0.4548727076836522, + 0.06448215065872298, 0.3865628661413777, 0.2720621278890061, 0.5223414734005248, 0.2422184767640796, + 0.5965922643441786, 0.003270803018935564, 0.9380722821176506, 0.06997528542307763, 0.5908541492862722, + 0.8574369251313739, 0.8260962287189011, 0.14475981348683753, 0.48348432822663956, 0.7815309543802812, + 0.18671199251177661, 0.3095097915865872, 0.7743844640438137, 0.28789240334194677, 0.5837085411066791, + 0.9600701295023808, 0.5078465379243, 0.19823936486566207, 0.9864258682322867, 0.6791293508481343, + 0.8359894327992111, 0.11913712940531351, 0.6263410354284882, 0.004716871371654996, 0.446572437504548, + 0.2955585394413036, 0.5455137753523308, 0.7207797982715984, 0.7613173486872683, 0.32969520613831804, + 0.5778669424499493, 0.3729969224393933, 0.9474000925802235, 0.22838951754131187, 0.24962992680055318, + 0.4638862153474882, 0.3503601039991344, 0.5752986622114722, 0.6167882730013311, 0.016166882278584538, + 0.23464135687384824, 0.44193962693145183, 0.49259932847547583, 0.6728606841137473, 0.6723510835179843, + 0.23968641598432694, 0.8413124272589634, 0.7713216893689864, 0.6679545744751149, 0.9789084492467502, + 0.44120304581892156, 0.03981914599116343, 0.5263735412096952, 0.07466796616592464, 0.750903377517701, + 0.7919220165448883, 0.6061948636042997, 0.7523063018354651, 0.31871189327275284, 0.19139974152818284, + 0.5501756868289217, 0.048812953155671135, 0.6046961133864972, 0.9675637254398864, 0.09142391873503264, + 0.8492905496067468, 0.9718494376128888, 0.24935278260801408, 0.5781329946966143, 0.09578831470251492, + 0.2649244445114388, 0.4727977239932305, 0.26524972619501475, 0.7197520633562485, 0.9307089856673118, + 0.08263120735369411, 0.0418081163084244, 0.5416256380137555, 0.2711954854642268, 0.4648603192693386, + 0.4296642823689558, 0.9386673359665986, 0.6042109003018462, 0.8505179365039964, 0.9237797038913974, + 0.45917232215821424, 0.7468835807879611, 0.8299970215243967, 0.561676698699639, 0.6246939452512086, + 0.6942936837486009, 0.3660226408737306, 0.8588636547585764, 0.3309972233725321, 0.7934246891556981, + 0.7265886834140778, 0.6063733859123678, 0.16305374590883182, 0.65160327497373, 0.9717609814839947, + 0.9273026558524463, 0.16947941422488189, 0.38825444935967446, 0.11741934332418758, 0.4850737534468673, + 0.24657691834013584, 0.20296720926546707, 0.7309320605550313, 0.6530372015242072, 0.8741716520262102, + 0.08025839620847708, 0.34584725965462937, 0.7955805670006711, 0.42078644642790775, 0.39998991521513605, + 0.35665838137710737, 0.50336575292295, 0.7595919479872886, 0.2824449616754352, 0.6324515216003951, + 0.16345808969078734, 0.03027684420157717, 0.08442011062710586, 0.957022379317305, 0.8378931312416513, + 0.2140839732941855, 0.4806101624542982, 0.7802373818560964, 0.4746754608435698, 0.3121044843517543, + 0.7596216310070045, 0.6556746014980662, 0.21140224285486164, 0.09839547682199568, 0.0018566467706997436, + 0.7207143778823896, 0.9011596052431289, 0.7568308140809707, 0.5522701963038198, 0.8487920871270828, + 0.44367157176038174, 0.738446026608658, 0.017550546988452376, 0.9117137568688497, 0.06414267567084697, + 0.33934002103755456, 0.5064374717148273, 0.3207244161166639, 0.6210150036324743, 0.9079591527256549, + 0.693707227504025, 0.7719109626433589, 0.7680278360521288, 0.8662183970737692, 0.3507695822123934, + 0.19643373348735393, 0.061119682149786914, 0.9290509131265521, 0.5658144466031382, 0.14085478210518876, + 0.6827657221653612, 0.6244273772369522, 0.766883956427053, 0.16707367572021192, 0.13504707001826888, + 0.317140245800712, 0.44477883959825293, 0.6929387748006614, 0.6320252552761753, 0.42997298385410365, + 0.8635669855843434, 0.4213986122111929, 0.9715678770025232, 0.7307080993083939, 0.8682913713509118, + 0.3966239275247543, 0.9371410372713955, 0.6007715795453101, 0.17870046511907478, 0.18024034058420502, + 0.9385530005700005, 0.5899394263055184, 0.6877738909201744, 0.3236538890036543, 0.9458189029741451, + 0.04111139672265485, 0.12132103538746697, 0.4713601728171791, 0.4348870812442971, 0.3111173348815115, + 0.06292878663146373, 0.6907669380183818, 0.7639682229511765, 0.4638491087524822, 0.5509387234518162, + 0.9132598827329871, 0.10176144774438345, 0.09440756102928749, 0.5318967033074015, 0.4258534032539746, + 0.13168561420275893, 0.3900614127096529, 0.8985328913861919, 0.26078425732107535, 0.6554476034331919, + 0.7364380578048201, 0.23850059824347802, 0.1940136707463439, 0.9399081726198278, 0.3731413305456026, + 0.6200050024275854, 0.8764454243152909, 0.019217404507963276, 0.47131308699474717, 0.7260312952130658, + 0.14224038086716606, 0.9800964298042102, 0.2487697371876182, 0.7189038264488747, 0.692196641702235, + 0.47364679399759535, 0.03822285393933589, 0.26614469169119037, 0.8107377961241657, 0.486657793760217, + 0.40131000501767156, 0.1272870354021688, 0.9480851639351124, 0.956512674285242, 0.8809613690445776, + 0.17709381363370424, 0.2921388229005918, 0.10152655456885329, 0.03194162410505785, 0.7515090285233452, + 0.7749619058772391, 0.45402836747716335, 0.12143177048280573, 0.12727056013524096, 0.46776400749305136, + 0.3175927872524359, 0.3969358894586932, 0.5763608461657702, 0.720361900816688, 0.733576316155986, + 0.8726841640717216, 0.6813377874563836, 0.26713922471794216, 0.7016827480743889, 0.9498448863720076, + 0.8191489329480925, 0.4231750228601412, 0.05730895584711282, 0.9958840211239685, 0.48771609290246765, + 0.44056423771523756, 0.40124209662947063, 0.256503301091475, 0.3906640598232376, 0.8030455754136687, + 0.9440101228660442, 0.9836125290093509, 0.8694962363845806, 0.5034232171973247, 0.4862165333782803, + 0.2553056615265713, 0.2689948231452096, 0.022289309196923957, 0.3121057542077036, 0.9102195071996985, + 0.6438587518899679, 0.557814948896943, 0.6642041835690415, 0.8460110711659059, 0.5163015460284459, + 0.5834259770537166, 0.9418830529479596, 0.290161098210033, 0.8124404041411033, 0.8340422695855997, + 0.04304175515515862, 0.4322695891256726, 0.6069042523238396, 0.941453488625415, 0.9349234189706646, + 0.004353721633380214, 0.32349769262255756, 0.2431396493138379, 0.8067151331237356, 0.18171601533575454, + 0.5679872590196909, 0.41506962981932083, 0.09187113082663767, 0.8989929180692198, 0.8550326556170208, + 0.1699968325589175, 0.5748168468192442, 0.9389437951885738, 0.12918164394572362, 0.1686711617140798, + 0.6753604881338949, 0.9478446521796615, 0.6521564752588525, 0.8750303866429433, 0.33640814892998294, + 0.604388530115538, 0.2202256545303003, 0.8435394156266646, 0.7338585365932639, 0.3544781787762782, + 0.30735610053872786, 0.6140725225603954, 0.771334221456126, 0.4907016331498991, 0.5499603200345702, + 0.8422978921131469, 0.6661685311187996, 0.6619548994154616, 0.3592043376485212, 0.5193225975882179, + 0.27924929298380563, 0.9400634674863366, 0.5156660460617192, 0.4297324376868892, 0.0011210103673163774, + 0.2582054192433193, 0.04860997495346808, 0.5675171104560979, 0.01712140044412025, 0.15875595220324012, + 0.04144802778650236, 0.600404758018704, 0.7819793497806401, 0.8907430895470431, 0.22341997005710967, + 0.4562030112341884, 0.3531136516142821, 0.8070466841112061, 0.5022225266731181, 0.38887787293854403, + 0.14064482502921705, 0.5185842065917977, 0.8041322758562617, 0.15871714987847363, 0.47001992017242433, + 0.4927436178531379, 0.9584952117184731, 0.30342674898241684, 0.5236455293452065, 0.7764414316998594, + 0.29247779907894744, 0.29227584659504713, 0.47302482731716133, 0.14965956513627954, 0.16055238943198058, + 0.5649198019986985, 0.1728525607614556, 0.4849528931068312, 0.5248107841744819, 0.4801644237960456, + 0.046872474625495486, 0.3518377628846787, 0.8904114421508117, 0.35643638108953046, 0.3521812465945472, + 0.33995031015275845, 0.6319675690326143, 0.07481288952740617, 0.6233619613125061, 0.007880372901390209, + 0.9399853913901934, 0.5964201273927553, 0.9165452083082222, 0.5937161645890244, 0.12163479442356173, + 0.2788549366800721, 0.9865756294415141, 0.4414223039144495, 0.8251942040669727, 0.4922081343398308, + 0.9749754802453812, 0.3854024543662874, 0.08009376202048057, 0.8118843123314589, 0.29437127175916755, + 0.4014077074488228, 0.19509940039186768, 0.04266632450662222, 0.6884657081858001, 0.1216955570190934, + 0.2170088161938074, 0.7212915682956111, 0.9881238607976323, 0.611560875657677, 0.7904631250863082, + 0.45537467951473143, 0.21666224287466163, 0.9721110527364413, 0.4376174724471875, 0.7288912678361112, + 0.9508033345749596, 0.06824344151584916, 0.550273033113911, 0.020387554598512603, 0.9168226764244878, + 0.22471440081366145, 0.6088658842819133, 0.8710388165651253, 0.3770273640583811, 0.16246150922004476, + 0.4705571741180816, 0.3290719551033835, 0.35410473798331576, 0.7757471803943391, 0.7991735017692486, + 0.7348890041497381, 0.30143420360246465, 0.6455361985461284, 0.097021040254053, 0.8226085774215812, + 0.7110116883294046, 0.8502032341842173, 0.4210994844816299, 0.18885523947221294, 0.6590828390559235, + 0.4527529803827872, 0.010298410014078718, 0.9805065077862268, 0.852587990148116, 0.9960324878359781, + 0.7599150852713965, 0.9321035953213436, 0.2637101194353362, 0.9068595712073455, 0.5678419693502693, + 0.22497660186965074, 0.8902520346324687, 0.9673819798099772, 0.9017682508962988, 0.11639659320694484, + 0.766450901031293, 0.8318572231704785, 0.1773644425367451, 0.7744831630877447, 0.4502440759458981, + 0.7013268962937798, 0.12202930649205435, 0.17088576088715013, 0.22699054153673903, 0.7352273451062853, + 0.6689329043966824, 0.024531668297302134, 0.8601329218592405, 0.868696823133307, 0.8584200406113726, + 0.41305138487776727, 0.9306820216913246, 0.27261686235292326, 0.13408957060505167, 0.5657951307831707, + 0.5869045983159034, 0.6780982886374738, 0.7342811068643949, 0.669628536173643, 0.7835569922051313, + 0.24168020221120146, 0.14442949114979897, 0.20283738190475853, 0.11431256021175829, 0.9910801315392892, + 0.1967350481110527, 0.8362418179647403, 0.38339887830828967, 0.8614635125793015, 0.6056083292741613, + 0.38207684753401205, 0.41710476538817065, 0.501277291363725, 0.8368299537195543, 0.22990676641394636, + 0.014018152270963258, 0.13094558049833704, 0.0009357061217638574, 0.6612349069631362, 0.8627915200760733, + 0.10775594381167564, 0.9366985674896414, 0.18337636063568263, 0.6636086123083815, 0.8363753069693501, + 0.6875793086930732, 0.32071332891501314, 0.1500957462333432, 0.8543207334047052, 0.5820694298067532, + 0.21761753755245927, 0.8281760236234044, 0.6598206391558846, 0.687401654782377, 0.255397163237027, + 0.7012807952488621, 0.05127407810280038, 0.15864091293387306, 0.5834901527176845, 0.6137858234042942, + 0.29401723523350576, 0.103178529156985, 0.3515749528742018, 0.6241877547339204, 0.04271984432232634, + 0.11406464434568975, 0.4092240044392941, 0.3270514405508459, 0.060225556592617635, 0.7057552168282882, + 0.23344719535643488, 0.6993171085260081, 0.45874165401891887, 0.06517577370764216, 0.8913516995867351, + 0.45896039221433815, 0.2868450626642217, 0.09686345874046931, 0.7299437418119673, 0.6856942690748399, + 0.49476438269489287, 0.5935513640233603, 0.08198852755831187, 0.1288459981759551, 0.3732842093909715, + 0.7528655508311395, 0.713607489819386, 0.5296964882506627, 0.12494729244351699, 0.696321156738471, + 0.6190504641399058, 0.40317087893656034, 0.2701747473631586, 0.7471122286050705, 0.7603673806334562, + 0.6994839976202726, 0.12005739066464527, 0.5456880065024868, 0.5956666264862392, 0.560670410555911, + 0.6308205249021457, 0.38244687735586547, 0.5798390236388372, 0.5213693323981742, 0.318948793075461, + 0.5796393609535202, 0.1877283865808097, 0.10476454646339173, 0.3123148987334283, 0.49835850286273264, + 0.1612223397871464, 0.35290325114154386, 0.698162849272158, 0.3271068856625651, 0.4189939218626765, + 0.9999481294943786, 0.6992854026370279, 0.22232718325417733, 0.6519686461649451, 0.7598694836489935, + 0.8532133844147979, 0.8924128991910326, 0.4799385251243511, 0.6479251154483289, 0.25822983061761207, + 0.5668466754866145, 0.2774457104756757, 0.670048057645535, 0.39510819066862324, 0.4336362427422892, + 0.9816499048346137, 0.8073160804948453, 0.40424950356651457, 0.12733028899185772, 0.15648637057944303, + 0.607189318001182, 0.9113367066278435, 0.6193082899991398, 0.20118818041350117, 0.2551139670640812, + 0.6752157582517974, 0.2809981456104014, 0.2723253082207009, 0.6973612955359146, 0.7790571224805916, + 0.6557714246999514, 0.25081317657704216, 0.9709161295312517, 0.8732196152737165, 0.8866963202395287, + 0.8610113408389278, 0.8194364731021205, 0.2315030440596627, 0.03958617414076482, 0.745989982334892, + 0.07550555618941879, 0.5677398072305826, 0.3718820255015909, 0.2099853041861377, 0.5911814241408094, + 0.905387881812114, 0.07254401352811757, 0.7389883463939141, 0.504197710602281, 0.08837786759689104, + 0.5306796804351225, 0.7077650227087292, 0.5314878681065682, 0.8507696629028404, 0.40276157954655856, + 0.006559901756916564, 0.08762897819142246, 0.7632801769400355, 0.3381714104955309, 0.20096222721206902, + 0.011511082208297552, 0.925946707835502, 0.46540158302541146, 0.5738049508201617, 0.07740565031301261, + 0.1424626570022215, 0.3765208135061625, 0.8304777862990069, 0.5128203758125954, 0.5355688283316573, + 0.35565284949258635, 0.06493338342490451, 0.9911552954344669, 0.6486470502341128, 0.6023089678029014, + 0.3627191813727323, 0.8699984305763593, 0.8170965724011887, 0.18013841706249067, 0.9704873465917505, + 0.5140649665871343, 0.26758347449841524, 0.5880085209855035, 0.7892536663225015, 0.12824736732629305, + 0.7397141745161299, 0.46627368885619336, 0.18485765826692202, 0.0049014262712231416, 0.47798726402742664, + 0.37362563245129554, 0.7979464666023278, 0.9350436663065589, 0.5177835305924435, 0.23778709661641662, + 0.888518260101916, 0.6416861527790223, 0.45816388017771026, 0.2518735233657079, 0.9002833017573497, + 0.7030885980016678, 0.12842248682593838, 0.7920348405980873, 0.014086009550658751, 0.5277936124696514, + 0.6436335235485766, 0.9778261246727656, 0.836220066056041, 0.8168725052161434, 0.9746225850522218, + 0.4805929851840014, 0.15130034606916343, 0.6253657130414274, 0.5869953199424293, 0.758657454917902, + 0.7310643707078577, 0.6583087952517872, 0.3425763030596516, 0.17458922534835386, 0.06200536656436273, + 0.7267750797174634, 0.21234811509405438, 0.08107867167844707, 0.4924982960086912, 0.0073470010789585816, + 0.28980368292726444, 0.18329491526835495, 0.12441662244824536, 0.03439152814326396, 0.46305906685057774, + 0.7103201049541732, 0.5407049406961493, 0.018018527214290758, 0.14596636499065718, 0.6492843745216476, + 0.1829083604772751, 0.3847023871151104, 0.4060228161088898, 0.44486980471717374, 0.38512947571694933, + 0.6996946025317136, 0.6894600579493476, 0.5342295965944848, 0.7518163349196545, 0.14855478978398207, + 0.24198279604362372, 0.07733560207119383, 0.1911403914802865, 0.27161104079328147, 0.9378682094828321, + 0.19864863224526874, 0.35495528127620746, 0.5173284858790735, 0.7855879974628446, 0.33907351944620023, + 0.4420088555993379, 0.9932573409164348, 0.9210616999081693, 0.9540824866763601, 0.9508484044180693, + 0.7434719166840087, 0.6596706423435834, 0.3011743735590696, 0.567360506326563, 0.3321079474213857, + 0.6689059786880659, 0.4595389326489865, 0.314127860039213, 0.4805078807408467, 0.4068209458936858, + 0.5223013599705607, 0.2550720616526545, 0.7407319934550769, 0.7967447805389729, 0.6823430925510691, + 0.7552468913002445, 0.5716213584915647, 0.5156549200575771, 0.47834160529960457, 0.6094938341486238, + 0.901069202114916, 0.6586314128974872, 0.9419975443206123, 0.43255541931369257, 0.10227209094466017, + 0.5808461520431166, 0.46728958831771883, 0.08102478741666885, 0.5043055736649753, 0.9056770777596566, + 0.6564041260401954, 0.2947069472008217, 0.1218480717937318, 0.14453800217114288, 0.6085571101770215, + 0.8278234444819542, 0.1550822852257001, 0.5441341851669468, 0.4504288683830343, 0.8755827786863017, + 0.44171186359714165, 0.8006861810328124, 0.9123791598802871, 0.8088723911637784, 0.0629895995263573, + 0.9082807995805905, 0.24841720228487196, 0.3716076249350193, 0.9089773737811353, 0.9480752351939048, + 0.04129449279972208, 0.5046836252751412, 0.05242947607368609, 0.2466411287826097, 0.899226690820443, + 0.7125475896649786, 0.04941600309030314, 0.6390029982988026, 0.038862349614462444, 0.21512840472728822, + 0.23820369071008063, 0.4051076513692602, 0.8885018808545488, 0.6935139889038988, 0.09194440548804161, + 0.4763838989584793, 0.12134285469482364, 0.22483199177972668, 0.9322732489182661, 0.591355391344712, + 0.6715543806826336, 0.376664032409485, 0.23285571433310281, 0.7715720693355826, 0.4570484962778898, + 0.12673760624583574, 0.8927120581961654, 0.888961973440627, 0.262015468144127, 0.37930479678390594, + 0.04681398059197439, 0.6311100995202091, 0.9425603715679993, 0.03736472759032883, 0.4124355355203435, + 0.6015246661666919, 0.0015469293614804869, 0.746687069224817, 0.5234881668641809, 0.1863916641868243, + 0.4033867415840434, 0.5013945476643936, 0.35818205687111593, 0.03061449889454737, 0.06394030812352636, + 0.995292777311918, 0.9431033187554672, 0.829595968429577, 0.23107213439252527, 0.7245168920699859, + 0.7607346884580436, 0.696759584024281, 0.018196871170177875, 0.26473004238819, 0.3492309604498389, + 0.5983950153539378, 0.007761999004842757, 0.5405433184499941, 0.773664844990564, 0.2156931343209424, + 0.08134038856120718, 0.07053360339022174, 0.5636837351834537, 0.16255848899020076, 0.746623795157349, + 0.18136930773940896, 0.09970230640959787, 0.340861298506808, 0.7236697245063086, 0.012606241512445271, + 0.2938412398716117, 0.220318338219472, 0.2838181832110501, 0.8034328208413234, 0.8331120872092166, + 0.22777727589303554, 0.2609896110679869, 0.4873099235676889, 0.6875555456325994, 0.16740385943047797, + 0.18515418475740675, 0.16512006570990634, 0.08740074146230492, 0.16386502243358647, 0.4948750330248961, + 0.9746849678643001, 0.537536038118961, 0.05162792596829591, 0.07417656098429448, 0.167001215675458, + 0.7597603673357661, 0.6271730005592198, 0.8052617313084214, 0.6591286446456107, 0.6916369625198514, + 0.5020143492270686, 0.8947296984684618, 0.9765184711496413, 0.5992264677026412, 0.6148428874127354, + 0.5533954908361446, 0.468318084476103, 0.6728711996725182, 0.1854238778696723, 0.820910896749562, + 0.5999956362647416, 0.4572712153063121, 0.3428551679433728, 0.4635708577978108, 0.6944883939621205, + 0.8121544257041327, 0.19831733244167404, 0.8740867060574905, 0.5343816182866131, 0.5778671617664237, + 0.1874726852186367, 0.20287690597809083, 0.7812931577723563, 0.22665941819677315, 0.75842747532928, + 0.2611052306068461, 0.3359045688616409, 0.4134842438849258, 0.23639002123064523, 0.21950026845864, + 0.1960486143679916, 0.3100447514322918, 0.5707346589844915, 0.8848880738267868, 0.9645633866392591, + 0.5174477187518687, 0.011731643492788435, 0.2867817347949472, 0.45352626163514753, 0.768491398480592, + 0.6877009294015113, 0.5162319637902584, 0.592250929448377, 0.9685115104523315, 0.4920293173749266, + 0.18372184100553468, 0.4160122020213902, 0.4971411441348931, 0.11896883403900971, 0.49805096551202743, + 0.39113480730109573, 0.6260269090616651, 0.7245184714278275, 0.7805534329049238, 0.7048456344982128, + 0.6887905041649139, 0.450305157957691, 0.4673083932400628, 0.775916653222131, 0.2755455651159192, + 0.5182796086437944, 0.7332492500560673, 0.2830996458713074, 0.1249185444634886, 0.49867350073064154, + 0.23359058287458012, 0.5063288373750353, 0.7333156918306313, 0.9872308162057597, 0.9567087751335582, + 0.6238626029432375, 0.8529091528158861, 0.775873496549078, 0.05222681899273829, 0.15696642274242456, + 0.5183258702498142, 0.2013635345447039, 0.4320906653237585, 0.6548403872319425, 0.9752741290647994, + 0.7187840310652783, 0.8604668447860284, 0.7333419713917766, 0.15965344613263188, 0.34853140275602523, + 0.48100610033868474, 0.908588020164127, 0.5659866583594944, 0.20902546052840065, 0.3660097286587095, + 0.14452349400724374, 0.21462036861902123, 0.898856949017913, 0.7218622228378231, 0.028832214357689123, + 0.9201043597026283, 0.030400028413322455, 0.9310528196727699, 0.8208902575891458, 0.46604564295917383, + 0.3854286285632015, 0.7712388239661552, 0.0338122164381961, 0.08554810151272874, 0.7748213339500115, + 0.11673825354403844, 0.5964651150013288, 0.28787058142083055, 0.6433816576984764, 0.09562641538128203, + 0.2619056080889337, 0.7154627008688301, 0.809311589072756, 0.7613942782585106, 0.6084913664261036, + 0.17757915991952855, 0.10989784014797643, 0.17135608227773658, 0.3705276024440368, 0.5208851501218579, + 0.7172114446247627, 0.9710896126111231, 0.12053179006456294, 0.1632797894950372, 0.9770721674672078, + 0.04641675121145594, 0.53396424116567, 0.492943050056759, 0.9973441514052725, 0.7007935998816694, + 0.2250531177096461, 0.5920798904743751, 0.49348435943021485, 0.7560097443477528, 0.36382956801726607, + 0.44374679177671783, 0.834235507515291, 0.08594012448478627, 0.7358028954533745, 0.4083307230276515, + 0.7116409376300524, 0.9905879808866775, 0.1679545781363232, 0.47763464118221644, 0.22528302489661567, + 0.15349150740577944, 0.7918261140909724, 0.37767391404709694, 0.9705519033762046, 0.8181055437991887, + 0.8599529905577464, 0.2592285705025943, 0.19468088381171034, 0.8752550858792478, 0.009688109625222596, + 0.8204279507754189, 0.3774463841283995, 0.08985316900316265, 0.07816150917944442, 0.734247834802681, + 0.24541800265956126, 0.006531323038135084, 0.9851879595521903, 0.8159557006950702, 0.9152390003119352, + 0.5319809261237174, 0.6386674079101198, 0.10978652525432198, 0.9179144727300402, 0.8774715889203125, + 0.8255571666431932, 0.05295391962667573, 0.8973892624874723, 0.9490014296597458, 0.6649197448226704, + 0.35853226834317553, 0.9736227239838369, 0.8700606433427607, 0.6562432866780602, 0.223534836918463, + 0.7682764354814492, 0.23375654224951914, 0.7225109505681124, 0.3586205987176323, 0.08931339718820286, + 0.8029575172580515, 0.341220677302157, 0.45004851477486796, 0.19176816787796191, 0.4841654440774278, + 0.7818504798571786, 0.39949388610391945, 0.053781455786013566, 0.3583005645554188, 0.2815402664425799, + 0.8412338142278156, 0.37759858105309807, 0.12595533225633904, 0.1748861886824502, 0.6186546287139633, + 0.35904357270685705, 0.5525263377262613, 0.49677853983831355, 0.23355264266240994, 0.4707044916958194, + 0.9288130710432897, 0.7910028595074552, 0.0051187346385730415, 0.8200915707102844, 0.3058997781546249, + 0.37581486293323263, 0.5274618952752863, 0.8110004413668622, 0.1348608300652807, 0.977421171443131, + 0.5433468953173136, 0.830593853692327, 0.10687002394709755, 0.09444338231015492, 0.07102831092627537, + 0.1710117941744025, 0.7442189731558098, 0.029784523574134636, 0.47883485183893115, 0.6646633220873541, + 0.37106207027646454, 0.06514672139074096, 0.659585503702675, 0.927663325913023, 0.13781122858192307, + 0.520663790298233, 0.12920143317961563, 0.04344241624201606, 0.37270789788756975, 0.6372681529743224, + 0.3830671982001401, 0.4461071812170265, 0.6250505625688572, 0.7397426078340625, 0.5758253242449555, + 0.3856025390862867, 0.4137735598138511, 0.40420930428755664, 0.26060035445884067, 0.7420343235449529, + 0.5169426510393555, 0.3993281070498891, 0.7287040444454399, 0.22748076597122735, 0.4878689711986589, + 0.12971670498579768, 0.5258669899624339, 0.25115973275388237, 0.7582204235388904, 0.5149461336994416, + 0.35418093689606667, 0.6752493177019367, 0.2699628342360272, 0.41521288810838985, 0.3660109544474325, + 0.2784017842747605, 0.398658399440863, 0.6338830924291177, 0.8009780856588588, 0.5254870745041613, + 0.02751377464853988, 0.5137485157047458, 0.05635093449989126, 0.9481787635923713, 0.7658434020939691, + 0.1877115821909051, 0.14743328494735586, 0.07837172073276821, 0.08322655905080645, 0.23840774261453856, + 0.9485276775112947, 0.7789017536242545, 0.8579099495524767, 0.8717756930820605, 0.031157342048780623, + 0.9139856828182921, 0.12292092998392357, 0.06375458623195385, 0.7675919372173945, 0.5365361030960988, + 0.14391606339669494, 0.4484588557780347, 0.18638234818654498, 0.3900350451932797, 0.9426557074878114, + 0.6028459979476734, 0.8400254448572123, 0.06383347748382018, 0.9180294474416062, 0.7245163777745026, + 0.7480308446262529, 0.012144669987189882, 0.4521099723504216, 0.5544369498564855, 0.8231773274592316, + 0.9190230007568897, 0.6525337349854902, 0.9510119665627714, 0.20014804127153818, 0.3859200370116854, + 0.2588643089075897, 0.1756335323717947, 0.18846409007242304, 0.9579330549430756, 0.25233661935958984, + 0.5667477511445961, 0.7398301510718025, 0.9622867843254624, 0.23185007473676156, 0.4595251494850715, + 0.3960953551142714, 0.43457360253037414, 0.5400035025629444, 0.8228755552163319, 0.034111647984197746, + 0.5753024140226165, 0.8812832226256878, 0.5810297256380531, 0.6964273487865437, 0.8389486183710646, + 0.13610373446835167, 0.014507014703228127, 0.7561461053470839, 0.8580605139586261, 0.4057872003782853, + 0.24311846417513328, 0.12235014636787944, 0.6924040606005107, 0.4850671837724576, 0.031999205139682796, + 0.2922088976747118, 0.6796644001097146, 0.13958830301853697, 0.7954785558512478, 0.8774866003322361, + 0.26383559115507305, 0.5379495291266788, 0.37446240424315214, 0.8384998826361951, 0.14130468801994667, + 0.35043118493387726, 0.1436900472845828, 0.7169750793907497, 0.2011742211579901, 0.38334191058524747, + 0.6310449003715752, 0.6137935659191043, 0.7286938720327818, 0.28325018411534675, 0.8532269774247301, + 0.8102387692509151, 0.6043008283599701, 0.5319147910354022, 0.4617253482606256, 0.29894983471917413, + 0.5935186940246622, 0.8435867151607894, 0.7803241419901414, 0.39023073132066644, 0.9394979804574062, + 0.48305604653941303, 0.0019248664110667058, 0.3398682350233214, 0.08107431172779145, 0.3844349229186699, + 0.955629125734741, 0.4363365772569865, 0.011040969550112134, 0.757456389454082, 0.6847871156339033, + 0.8763783243353462, 0.9135166680238457, 0.7297767288705632, 0.0310795829702315, 0.6407986081788196, + 0.7702178388944013, 0.09981941385728033, 0.24389186626948656, 0.12197397071936245, 0.8753460330091902, + 0.3450239724341805, 0.8989392402055053, 0.9441111778509117, 0.04211963674122654, 0.30862062670183943, + 0.9284329582089073, 0.02804577852166812, 0.3171628414314781, 0.02338424730327704, 0.6689503643937548, + 0.6195686733271077, 0.5571625509127649, 0.22423446031769823, 0.7226987055785326, 0.144518714598037, + 0.6457190482195965, 0.5502682702103808, 0.009934579733899884, 0.7125842495556618, 0.9264256863768446, + 0.08669837782557521, 0.483267713126608, 0.9570046158892522, 0.6827262234688035, 0.3177064607725666, + 0.5342606048752624, 0.6578437913390262, 0.573476375452811, 0.4802672210574963, 0.20901503604330074, + 0.6286822007911628, 0.5382916151261778, 0.4693808992330756, 0.9106270711472852, 0.14177134438156302, + 0.13907702625647633, 0.3839316487237239, 0.7130425224741993, 0.7882822621144745, 0.48491665379357296, + 0.9112446769339284, 0.24428835341327915, 0.5758050577131167, 0.6491211512738853, 0.15555467388946398, + 0.9422095419779251, 0.7568656825101865, 0.9087979167759883, 0.03140839434700338, 0.11171008119720982, + 0.4931329328752394, 0.14503629565026654, 0.5575000629541691, 0.18605786062763552, 0.23187282580666357, + 0.7763822460185802, 0.532108146139854, 0.587551036540856, 0.4540617858222129, 0.5979697912004716, + 0.2918013256298655, 0.6740909740760708, 0.5404924898922674, 0.09586072145429347, 0.38398412373969115, + 0.4595043833347955, 0.4292459161245784, 0.24052089506544472, 0.8091157214456177, 0.6696978852544698, + 0.24444268470075714, 0.2922725579113702, 0.3154082184386422, 0.40744058245736914, 0.9053444586510178, + 0.6958691394067466, 0.27963327243677416, 0.19715794143763865, 0.11313252776219185, 0.823236629180382, + 0.8180737511241616, 0.730116891377213, 0.8315637907833437, 0.056299034535973846, 0.6802936791167834, + 0.1471385433037462, 0.5528588191379114, 0.3239293148572536, 0.8060069496395249, 0.44506468589215376, + 0.8496904255399494, 0.3769790497267881, 0.006883755979162687, 0.6106253268895397, 0.545665231293884, + 0.06124055493365488, 0.7676220746909378, 0.2599407267954814, 0.7659112506467587, 0.41899772050340534, + 0.1271903755730086, 0.5927297147190784, 0.7471433095253147, 0.9832868555901813, 0.7988031842106109, + 0.12295806351232264, 0.045611517313897565, 0.8114027913584192, 0.7242034278724583, 0.15144437141482503, + 0.8322810042944455, 0.5194096618006525, 0.8132375401805939, 0.0274141036139689, 0.504034436362323, + 0.46470706680037854, 0.6935291025306656, 0.9809251842948876, 0.3980849154805697, 0.6124242848104068, + 0.9863181152580893, 0.003652022852111303, 0.9365518870500562, 0.15438066442253662, 0.5471769081918529, + 0.2812903061812472, 0.023104958478796123, 0.31853670622238384, 0.11238728762609118, 0.101612999560444, + 0.520136700270328, 0.1876699078640358, 0.57321294644147, 0.8699410533227272, 0.14834014530082362, + 0.538300215664714, 0.19116441454410715, 0.45581832000701317, 0.8138434956475238, 0.024415782324894164, + 0.8537698526579839, 0.7408029349281222, 0.6823291641134823, 0.4931097448048505, 0.43375819673047755, + 0.574296549059155, 0.6771957699704143, 0.036304431915407775, 0.8765308167179109, 0.9783992851761845, + 0.14173930063189877, 0.21755471328285014, 0.9406454995717962, 0.7029946293578456, 0.6727005080563465, + 0.972919162385759, 0.1945385298289729, 0.938729607173556, 0.8204404408038716, 0.7241570324887215, + 0.6093389691188497, 0.7769749190486305, 0.36773379081836155, 0.2552097517079104, 0.9866977606213085, + 0.9784775617705486, 0.4785722609689733, 0.9369544158243106, 0.9815927322079414, 0.0626916070745912, + 0.7565986567258083, 0.36820407862468163, 0.1086965946506333, 0.11174429583831869, 0.327270522888665, + 0.7871467533399917, 0.7454847675821837, 0.722088639725986, 0.507396561923172, 0.6078722520122439, + 0.08992465693006091, 0.13008451536212629, 0.3380751817185115, 0.3446049508473783, 0.7736317862476276, + 0.6877773949865291, 0.211618101048893, 0.9904086637188828, 0.20640602668954966, 0.9588831313251175, + 0.5871133591318946, 0.04596336601225548, 0.22766253876168419, 0.6847299540747964, 0.5858706122464238, + 0.9414938492073547, 0.8896660453633093, 0.7055455062581931, 0.28855640760370027, 0.35211904873741195, + 0.8128776630331138, 0.3857859300048102, 0.9488416128096503, 0.3482608878833303, 0.8282525686938762, + 0.43662712246772906, 0.1716363139292323, 0.23868317345093926, 0.8676543852034392, 0.40786502203700226, + 0.24110777890503943, 0.14767366658383063, 0.2779155043023428, 0.6808477214941833, 0.5697588738868553, + 0.24644954994421764, 0.8654116982179187, 0.07520690791323559, 0.0631161772834603, 0.1640770811468253, + 0.18789638733316583, 0.9018218932878482, 0.8556688317503034, 0.5508840111771555, 0.5872278824545255, + 0.7822457676984101, 0.3813957481391139, 0.40631933125665165, 0.3572665920431616, 0.8029685362464445, + 0.6605779764497641, 0.8928486109614543, 0.45448154337934077, 0.06126178080901623, 0.47004438084872735, + 0.05467044866831938, 0.6634270728456326, 0.7679739170236175, 0.6281868142001242, 0.03890463990576798, + 0.8808759424779828, 0.8302544282964023, 0.7836249474846858, 0.21887572139776024, 0.9562034948827051, + 0.7074810911121381, 0.2628821458950734, 0.5165221094209821, 0.30351336721653455, 0.36408609446632323, + 0.7558828924626627, 0.8276563101881355, 0.9466878427193101, 0.10106646575901224, 0.7757588121533684, + 0.2742973483446487, 0.3148020044826927, 0.2533111614689332, 0.11049401163104011, 0.6338801305903102, + 0.903673790846596, 0.10081946011992693, 0.05059791791092505, 0.6294333885911099, 0.3547720585568278, + 0.024512389319083394, 0.6047957889090947, 0.5311515707964978, 0.007465862970746584, 0.736728673881844, + 0.8177845817627879, 0.6026618867208542, 0.025003866620137538, 0.5265115907244209, 0.6793473349324505, + 0.4567798938922808, 0.16101063423999107, 0.07458559375159257, 0.0021001596973070358, 0.5862390246005398, + 0.6628868923889102, 0.7442396561325131, 0.4108714827516402, 0.36832557597991455, 0.4141340163589434, + 0.3313321533358278, 0.5939341467630771, 0.2589292492012113, 0.4508125901832236, 0.6336151561500523, + 0.6903816285753669, 0.9153798189195478, 0.7334589450281805, 0.24621998300812165, 0.7373266224637472, + 0.04269045912469449, 0.5586131649489704, 0.2356027147351989, 0.2204489151654253, 0.9283152919733236, + 0.4846454067321725, 0.10088697192558171, 0.4017315490465736, 0.11728051217962265, 0.13054379896506152, + 0.057207028661755976, 0.8883023732291889, 0.3862916648191498, 0.6560344484940595, 0.9959236617176795, + 0.7014206907837336, 0.4833618504781758, 0.6890291362487181, 0.5856590484732683, 0.2974634455637143, + 0.6342040205693581, 0.5473016397262144, 0.6108014254220075, 0.9601689317116118, 0.03241048099028765, + 0.8419508262389035, 0.7692255219956106, 0.7697824483665853, 0.2865969871788444, 0.2269566595333925, + 0.5171224031826136, 0.9817642056448012, 0.4295441275848959, 0.17746716946820995, 0.2902435488798949, + 0.08839512336579591, 0.6947872191577352, 0.4145449729389693, 0.20364928823381578, 0.6669379682928661, + 0.09416816773558967, 0.9439937826771363, 0.9522801801108401, 0.26998546193319994, 0.954009715760126, + 0.39036876458265446, 0.17343696470648462, 0.03158837165736539, 0.46974095338659194, 0.9591916312520437, + 0.4818092894724819, 0.8424654427122448, 0.6177703565165559, 0.5914548268852293, 0.6428945625338994, + 0.8603162375957233, 0.23271212736880276, 0.24621748273807686, 0.15967824671716957, 0.9286072477312359, + 0.8170397770052048, 0.8398061997821833, 0.2667699626789384, 0.9527107787771562, 0.03911807639696219, + 0.9831132419698814, 0.8364943027951782, 0.9882352652888472, 0.42809371372109317, 0.15820338758268004, + 0.740440678365816, 0.8587144992549819, 0.15776944004020232, 0.4447292831113717, 0.8487430455797811, + 0.4304630478241951, 0.7482870459640396, 0.5325553314478796, 0.3817288785983075, 0.2959863339866108, + 0.3036384254295068, 0.05403551282698826, 0.7819253940893207, 0.27256952898405784, 0.39278557908492906, + 0.07530583895623799, 0.9609867537908252, 0.997318689003282, 0.7618409602286108, 0.40022865462158164, + 0.49922414091174294, 0.8568417232004445, 0.37683154053504464, 0.23935604223587337, 0.30006191815112726, + 0.3165839886769065, 0.9470989225190005, 0.05640277649211545, 0.04938711122995676, 0.6708839571401861, + 0.02615453029466408, 0.11431399169107104, 0.14716117670576156, 0.6595806661994528, 0.5466060092432324, + 0.001085314004465432, 0.6720305901532767, 0.24932378448431813, 0.0745504794710592, 0.08139439884329136, + 0.08959086551999396, 0.5009323921891324, 0.22788738147908705, 0.10305083852026253, 0.8834467563552302, + 0.25143454404244203, 0.110457185881497, 0.7495152135273823, 0.5686608542788599, 0.8229338392992638, + 0.08991798877156842, 0.2410263253220044, 0.9652390943836725, 0.0358022694433745, 0.217839006088278, + 0.3275437407314057, 0.5668190854040339, 0.9203251225795509, 0.8191432733734665, 0.22692614916535414, + 0.6341930008371865, 0.3386783052409815, 0.44704040959411084, 0.346397455595012, 0.19629161605155776, + 0.5569579088209491, 0.4319977818288674, 0.7663196597137937, 0.38955432595792994, 0.9274913629617861, + 0.8071949520180836, 0.5607930712657042, 0.5156488900153687, 0.8923833631763576, 0.75950261595991, + 0.6873981041605213, 0.7516527281314568, 0.07021963704690537, 0.5193847296233982, 0.35233223134043123, + 0.2729020484734431, 0.005293595469666013, 0.5542922854832517, 0.8451538021327818, 0.7609229643910963, + 0.20322139789881055, 0.19613460443243247, 0.4877221228569868, 0.07799327665391276, 0.4361623085868662, + 0.7848802379167125, 0.028815056694806218, 0.8651508625961432, 0.29092463751168984, 0.1390582423532981, + 0.6993197249789133, 0.14231757389282162, 0.1083863754216795, 0.3625472467980002, 0.14505024216963003, + 0.8200658665410974, 0.7968824639528654, 0.6618530684952463, 0.5160405218237385, 0.028628746088782964, + 0.35371581951523445, 0.5169226303879305, 0.24842576000539407, 0.03269211136047323, 0.05725122672515359, + 0.6077258812403299, 0.5224446580796269, 0.8201113759500455, 0.8591774693421538, 0.1675703588717632, + 0.2751789066131015, 0.019436234481995718, 0.2483777025372399, 0.7982952618845935, 0.5178779242802152, + 0.9385202108571628, 0.20063447638075604, 0.03164807497620137, 0.8822102510382267, 0.8118266315574242, + 0.9370349609091597, 0.9259509251189175, 0.11583949630007695, 0.2768616137303268, 0.0904213059931871, + 0.9915454567591488, 0.477127172211414, 0.21962298792153057, 0.7266879803102998, 0.23243483589723768, + 0.32505132567404005, 0.2655019323791298, 0.21591330787338403, 0.7151286837756502, 0.8777953478453259, + 0.5604171783545818, 0.023477569950988042, 0.06667830388497886, 0.5398821914237821, 0.5757771081366516, + 0.1387848773741488, 0.4833680301438109, 0.400170260600235, 0.8192016456448884, 0.4284601455139285, + 0.8631104395021726, 0.6584446572661563, 0.4503948970340629, 0.9247637083247597, 0.9437563044282649, + 0.5402796186589688, 0.9621385789411242, 0.3536093364032804, 0.7532195123155386, 0.0702541202543765, + 0.4647284718667497, 0.5728555116484396, 0.1675005224937116, 0.6977073213488211, 0.3612465032799286, + 0.37398922212992947, 0.4098425230443963, 0.6287358324055164, 0.025354059183320787, 0.537990984274354, + 0.29853901678616723, 0.41071197599028153, 0.11435729827840702, 0.6806771469477383, 0.33161805144909073, + 0.9620861996697827, 0.4811257193262529, 0.4749231575081784, 0.49005537358864426, 0.022926946423551198, + 0.0961682978459435, 0.5103674927351255, 0.29262177724592997, 0.2752587337690052, 0.9230332530056905, + 0.5668545411293267, 0.8699690342860524, 0.9387610369102437, 0.43480523057342557, 0.2979595447711083, + 0.0027241803325944725, 0.8341336664732593, 0.32757537002122084, 0.5468523567381457, 0.21439393230772963, + 0.575958911206891, 0.9939529256845033, 0.4317700940133713, 0.08449199698497156, 0.19424683250264851, + 0.05137991928971608, 0.012121750214663773, 0.34743696271912694, 0.8012358284257299, 0.22500284346279054, + 0.6034987756683131, 0.25536210422029126, 0.6944632499552538, 0.5008696086309401, 0.5472339667200282, + 0.6393426833736836, 0.2579398594254134, 0.21173799803936055, 0.9444304308969173, 0.25095830589189205, + 0.6312736667177735, 0.6485093642760426, 0.3202977353363913, 0.266356455942249, 0.6753131459458452, + 0.0521440293335389, 0.16219314369523896, 0.4002295871259327, 0.2878193817104582, 0.11044592193012814, + 0.5812748215288497, 0.08579977985722398, 0.4554551795692734, 0.9708449605395872, 0.9336509959017368, + 0.9037468325261414, 0.35499630352858136, 0.5745873084467717, 0.514629777013557, 0.4310077888023446, + 0.29043706156619076, 0.8759766805769359, 0.1628433377367976, 0.41648593550701585, 0.8385991890888439, + 0.12796506022323428, 0.6108704529131513, 0.7093630319959778, 0.551069523878771, 0.9272572666725544, + 0.12773641650492673, 0.4914023993531246, 0.988261355260223, 0.3858301885620561, 0.534837423261381, + 0.6771547015748663, 0.6025349603863696, 0.5123086994428299, 0.09161809230201745, 0.865339352101915, + 0.13789486515199068, 0.4866706863976924, 0.663569377481684, 0.6664523869186756, 0.6543286590390945, + 0.555256612188501, 0.1729395730341049, 0.260390530729226, 0.2213322737946819, 0.6545503986542881, + 0.5909430311054535, 0.12689603843821762, 0.09216669454504034, 0.8486122112898536, 0.9846536571621723, + 0.4204089270576824, 0.9974542833337509, 0.9504336395764662, 0.05856864439748655, 0.09607414656750368, + 0.3088302850857947, 0.7465920586351917, 0.4744672919397813, 0.15501583262226715, 0.8237080700233929, + 0.6562703582580695, 0.1094588589546801, 0.6090623335064049, 0.927256786606383, 0.545711638440446, + 0.7008757605588825, 0.4451534491986231, 0.8650744605917171, 0.4184327769855637, 0.4647785411275833, + 0.08515230026036491, 0.9098807629870131, 0.9932342604333486, 0.5906840413129445, 0.11267288744207593, + 0.21141560934429937, 0.6534195100849618, 0.23849340835747435, 0.19583402336156863, 0.8403388840215907, + 0.8551560927095075, 0.6307337374123051, 0.34144938572310357, 0.9657195507769762, 0.9025853847944545, + 0.8046540003081939, 0.39343576785993994, 0.6545922725314345, 0.3894181065887493, 0.07668062061479053, + 0.19314619197103522, 0.8898073123794926, 0.4820019923477086, 0.12335697463010498, 0.518606425913763, + 0.8037571948475324, 0.2203871820169212, 0.07961557346747206, 0.508516256134254, 0.7903889426741931, + 0.7544134932167482, 0.14990663636449686, 0.35572620464071913, 0.9805310612356171, 0.09157054120464558, + 0.5563336454536241, 0.9662444090555127, 0.5090206107819178, 0.8944698597733473, 0.8000772399525441, + 0.04067805058569762, 0.3805312136411677, 0.010863679227310574, 0.6575222861536396, 0.24093071006343825, + 0.12615167232749558, 0.34967340587406814, 0.862788329972486, 0.13587777602603968, 0.6041957173933654, + 0.2741317860937672, 0.5342941821706088, 0.9522304802915943, 0.18781842622570633, 0.6169147657473454, + 0.9595486421618156, 0.08553288558303762, 0.22403659314065727, 0.40039445737826707, 0.21933931192444955, + 0.5854697640452231, 0.5323545554233373, 0.32959624274894794, 0.39460798152765886, 0.7399584483045175, + 0.004125306225145953, 0.9105935327516589, 0.8020795679790991, 0.20726711699880285, 0.2668317830595627, + 0.9172049811077346, 0.8495827601838573, 0.28341632735419997, 0.9952259746480382, 0.23576871224088314, + 0.6806879637359067, 0.6044767139301521, 0.4433775846107185, 0.34513857810497683, 0.2994534045925553, + 0.05238415088672388, 0.6410694833723776, 0.6399598731768124, 0.03978133776424597, 0.5423330038145646, + 0.5970895839686575, 0.7863807099666166, 0.6333996240209262, 0.5462896632612788, 0.4775576864554053, + 0.7194197421541765, 0.9078034238059605, 0.653273952596768, 0.8403652650017311, 0.7320046609681803, + 0.3808008953187787, 0.678874817396369, 0.05023655136009764, 0.3804745842917795, 0.5191840201594836, + 0.1942054427327966, 0.4917273829488107, 0.9448996174821792, 0.5336360074458083, 0.9129484273936075, + 0.8188182782979259, 0.006171505314833681, 0.6271177954250524, 0.8140091244177107, 0.7725990689030878, + 0.8614352443379194, 0.4288072649460176, 0.022329314776708853, 0.666050636871286, 0.855826010530713, + 0.7869731610351378, 0.7835811772913304, 0.20433634929680133, 0.12489345978692135, 0.37881920204462194, + 0.08789321232933589, 0.7897819250093187, 0.8477882051689373, 0.016645549464185172, 0.41683321529801165, + 0.33832082214605197, 0.6199646304097349, 0.9468701284255318, 0.6916810523743708, 0.04336312618561067, + 0.9733136452622987, 0.8520285074708756, 0.21735661535944628, 0.5239419285502995, 0.29118621976589987, + 0.7762732987948073, 0.8504786503237889, 0.7705821836573169, 0.10990372813502114, 0.24823204807103083, + 0.6874061898061669, 0.34090806337380086, 0.13466120206921828, 0.995040753562894, 0.5058926033714615, + 0.30492567969563034, 0.06847536385247566, 0.297578369183902, 0.7793384593642458, 0.5916989106687826, + 0.08729935221447493, 0.15943979335783653, 0.4203789058169274, 0.27602220588351767, 0.7873128406599462, + 0.03267788753450884, 0.5036901374293759, 0.3744176620304426, 0.2059337366698395, 0.3607317242249408, + 0.06801433930055356, 0.6824701918978613, 0.022092532079795224, 0.00242090469860079, 0.9772791323491653, + 0.8565818459966438, 0.6969635381810998, 0.33317884351237215, 0.44727823322686355, 0.6453848435329949, + 0.3646355881793919, 0.10055533045164433, 0.817290240485568, 0.44222664123746036, 0.6013912203386002, + 0.3910117929094934, 0.9873771738376842, 0.7251662344448621, 0.8403405291787942, 0.5950131127906029, + 0.5718552844125294, 0.9251672475211994, 0.509054517101543, 0.006074733984967939, 0.018588341924630325, + 0.3271095725395706, 0.7576314882278509, 0.26915238015695464, 0.3637593133907019, 0.7784995456642605, + 0.41919714000481456, 0.06852122837363928, 0.04522492111154375, 0.9943936816201077, 0.5897337159109326, + 0.22798418752271354, 0.48925028748426713, 0.29616250017007784, 0.36240420648471006, 0.7286386898673131, + 0.5547136206013454, 0.07191988097727553, 0.11567880951890352, 0.8415967708712976, 0.6205088908104088, + 0.11669575952651479, 0.8722345350609377, 0.4619459388172661, 0.804942369740075, 0.5557809517139652, + 0.7757179788098014, 0.21746085068525223, 0.7031727612448657, 0.10106531972042998, 0.41312951472912907, + 0.8236253797218036, 0.4505377733927417, 0.9392364939354915, 0.7931287503261454, 0.15233835841563403, + 0.6984026520924237, 0.36137893303767754, 0.015955984928101796, 0.4382947653743352, 0.7427227439555167, + 0.7353732748118352, 0.8993421539040412, 0.6961830888047755, 0.13905241097991194, 0.5113997560843425, + 0.16058764363192768, 0.5854848505264455, 0.3997470625894327, 0.14207229687660716, 0.3229873897230634, + 0.21287197724077345, 0.42609120355968355, 0.8605832873616984, 0.5158185492873943, 0.2247684159340716, + 0.49986924411351596, 0.13343167175978632, 0.42341860591577296, 0.029401543826772736, 0.6675407223512813, + 0.5533450716289082, 0.6765319980777957, 0.5432334575915536, 0.7062191053760678, 0.1571880583935309, + 0.8730320512733531, 0.5817819167440702, 0.6898406824601082, 0.2216501304424735, 0.44493503735111994, + 0.7565297435830589, 0.2787904667860476, 0.004928831866292338, 0.965051216972245, 0.6933195621968967, + 0.9916883444019905, 0.8180467035034698, 0.2820621321784835, 0.10407498213564337, 0.2568365237182382, + 0.5249104021805109, 0.7767961908541196, 0.2613177121138983, 0.9628980545066194, 0.749451549274803, + 0.3753916132921151, 0.23421897933574898, 0.31298547260191445, 0.23573410648692827, 0.5033280718530574, + 0.9277999828057073, 0.5748949891547089, 0.16295051495838597, 0.8689641412498003, 0.18898102980231846, + 0.3588919574429309, 0.1264678447810591, 0.5834366224090215, 0.33911734894603585, 0.3656694537393448, + 0.32122146810751995, 0.16745988829368452, 0.43693359964853173, 0.06973799643233103, 0.3248355212026095, + 0.14763335781413833, 0.3476104746194667, 0.817602676763262, 0.36248458012369766, 0.856492545062612, + 0.4395026411777847, 0.73397694885687, 0.8391978238311073, 0.36800630745810115, 0.2827826768750704, + 0.5910756652557415, 0.33509839503813366, 0.8171650667891336, 0.8098128260619872, 0.2111290831550624, + 0.34677339899230586, 0.33426392482795697, 0.6071363374334533, 0.7600574087272135, 0.43576959236913915, + 0.8869780897386745, 0.041857536325446976, 0.7285900181449885, 0.186387445554707, 0.6215025200817137, + 0.7253414742410499, 0.020677481127253317, 0.06662986874779053, 0.21984992531434233, 0.6442975824048451, + 0.6832636543607493, 0.45441652471237903, 0.18619063557691684, 0.6461382302653605, 0.7022525663594434, + 0.32288687967206764, 0.0873048863620759, 0.8348281814451475, 0.5293361252902341, 0.014467480647615938, + 0.1501527318621213, 0.5931367881983234, 0.9696726865069838, 0.23107716328730177, 0.017392222492692122, + 0.2788126203480148, 0.9601670708450253, 0.6231690644599602, 0.7434756319353291, 0.7314555384049621, + 0.3692090950638399, 0.4388051532158317, 0.5906438943933574, 0.5261582532434398, 0.6278059405753478, + 0.49753012508749894, 0.37891756327998005, 0.7776016296014324, 0.3825186602895979, 0.8418506103990868, + 0.23722963005376518, 0.24405228072870488, 0.989778527477444, 0.46279376838224684, 0.9278832973815989, + 0.8699992663403898, 0.6005673958299734, 0.5993848146965396, 0.5412279949702886, 0.7878247969127772, + 0.034503088566865014, 0.9225404024690042, 0.1380945147552355, 0.09617948924474273, 0.14054399057748967, + 0.03139448457994143, 0.019264303448693676, 0.19224920405921286, 0.00788958407872642, 0.5850324340263784, + 0.8253959049385001, 0.9320730659873574, 0.4373846121161843, 0.7709179948508474, 0.609836614120597, + 0.49717153545619164, 0.8711852157801344, 0.9083987854437376, 0.6385629454668169, 0.2706557182733671, + 0.13770027869736168, 0.8134800604631495, 0.6400143268110565, 0.27881771881996487, 0.04912390822078061, + 0.1323452117059134, 0.21486066229408163, 0.5585782183352858, 0.3317528720253524, 0.540278945432416, + 0.36703815753029445, 0.48072500713347677, 0.36571935758307683, 0.7401813472919951, 0.6222244657965631, + 0.09128466624592846, 0.5288814183206524, 0.35443979169533024, 0.31533099998918124, 0.17483739156490963, + 0.4945933666564626, 0.3946644410397515, 0.64422714348506, 0.45858713372065474, 0.36781266380642164, + 0.1971410607971622, 0.5656166767331365, 0.3495356863711895, 0.3172500058453548, 0.9678199781398154, + 0.8573585772674752, 0.30740534181062773, 0.48411915205236533, 0.6159498726293186, 0.06467398382073486, + 0.15667172224247183, 0.8046957090079055, 0.7844166722513558, 0.15689674529769004, 0.6385414766258448, + 0.6804042169539273, 0.5599573171410683, 0.50273969636831, 0.17985583847578201, 0.6437282605817535, + 0.882365910511107, 0.05274652452949369, 0.1695957161260987, 0.47336767956517656, 0.022040856519291552, + 0.6018172384639489, 0.7313807486411453, 0.9778208885928666, 0.8409551009406433, 0.20922542168460456, + 0.8480336202701808, 0.5357538988125191, 0.8463753970421294, 0.22512554832323817, 0.24382546903627433, + 0.08070582084734468, 0.9226499775645752, 0.5979512189284536, 0.2651093874451851, 0.16649530226853937, + 0.7952211380906614, 0.424264937686758, 0.032507854476437226, 0.085542708571817, 0.7584875823907452, + 0.48503745888701366, 0.4273505872199257, 0.6669212686328694, 0.8125779502454057, 0.0019919964858199357, + 0.44318480118885717, 0.6420928202680382, 0.9289275673745219, 0.41973825161374934, 0.44315081480224183, + 0.6259165341074122, 0.13555799414241676, 0.5423805318035991, 0.96247474990818, 0.6124559517051499, + 0.05067308393557268, 0.46599182061100297, 0.11292872692499234, 0.9985736120943277, 0.5992490278072569, + 0.7238613412713313, 0.4437609858627347, 0.9804525071663491, 0.22677582633383397, 0.4974697714563904, + 0.3354251370554284, 0.06183872571323357, 0.5417410577066006, 0.7499159854776997, 0.9634128837361462, + 0.44705059066223585, 0.7310717053325555, 0.6017551285956273, 0.03021635973579806, 0.8263894348017284, + 0.34184328646679907, 0.6934786900076532, 0.8755539934243413, 0.886259510667948, 0.789738769562747, + 0.3728218841248855, 0.029137928463804474, 0.04965028509717184, 0.5198203037016257, 0.5592408071942575, + 0.5809156370747466, 0.7617801530466735, 0.5858241458997088, 0.2930079141807612, 0.30649055361496447, + 0.9726393850015339, 0.2464121938938435, 0.9196035236558062, 0.7259936900551144, 0.7133232806313118, + 0.5878263829178927, 0.636631938256863, 0.6144656609886606, 0.05715894106848618, 0.005880004361248914, + 0.679707909739096, 0.2186213158503424, 0.6133430992346649, 0.3015874026813069, 0.15621822494181803, + 0.8295640445140852, 0.01852965051811306, 0.828887386739987, 0.15781112803043973, 0.47861782385439244, + 0.9208908003218215, 0.7609839981943451, 0.026333045135565225, 0.8562632247719897, 0.012473466940510325, + 0.3260682569236243, 0.7557722949183431, 0.013596916473211706, 0.939948607735632, 0.409201124797154, + 0.15887422384485994, 0.7837441947981376, 0.8207712664549597, 0.049506506762836966, 0.3419266665341274, + 0.0298939857175895, 0.07531601920177633, 0.9184395417624108, 0.9727353981143915, 0.3014545650991026, + 0.16127113278716054, 0.4742789366783612, 0.8063095982365737, 0.784080478445802, 0.594633797665965, + 0.10736032510658122, 0.3789287686068691, 0.08267573889736679, 0.7498357051690385, 0.0006130839486564454, + 0.15309844048655807, 0.32974467126432716, 0.49930893276022026, 0.7185987311654768, 0.16356812949417132, + 0.7674880641725208, 0.23186902509431684, 0.43171947512796116, 0.3607870091219725, 0.07861920055464922, + 0.9143405146090642, 0.6939900324209817, 0.8252559453720194, 0.2737528690634361, 0.5352870930660506, + 0.5589018536273382, 0.8080174898444882, 0.27490275717512347, 0.0973435412134741, 0.8965124268468283, + 0.5696507894525211, 0.4932570010023084, 0.1143785860607246, 0.5312925748659688, 0.407633919215049, + 0.6723501416953159, 0.1701140553699696, 0.674761692465671, 0.28224951900771, 0.04937842339832421, + 0.6867630605177261, 0.916829981798807, 0.5374174482281462, 0.5950659436327118, 0.3050912809712565, + 0.37883544150282067, 0.3450343859162607, 0.965860524550409, 0.1409354034143927, 0.6985587446999647, + 0.4150112313344603, 0.1906910269033708, 0.9931025967186558, 0.8919750379869588, 0.660555119999795, + 0.36665573220807535, 0.3995095171574481, 0.4296598365023466, 0.01989849700213986, 0.018052517344653518, + 0.38526569226129925, 0.9800683026870219, 0.506288676207403, 0.8206251879085452, 0.1501477990573663, + 0.6004154451851471, 0.714101616673333, 0.9227664897499039, 0.5429091959929884, 0.1740731716209658, + 0.5469301484084275, 0.8544848155367878, 0.8750486607980661, 0.8985653478197856, 0.3076726891877498, + 0.03434871223448421, 0.08128110336736016, 0.15498478323301157, 0.8018903956433008, 0.2964672109857647, + 0.7035796449406677, 0.6306448814983877, 0.10959617578382852, 0.6458128271903357, 0.7516171473350333, + 0.6806632835380019, 0.11888752281041293, 0.8092421097793012, 0.6126079097409498, 0.2145593785956328, + 0.1481628621945109, 0.40003201860058957, 0.41300132734426387, 0.48601215825839583, 0.32320085891402706, + 0.48974377453984297, 0.31267030148076125, 0.48887291344483097, 0.6875677870984734, 0.2327666378513159, + 0.4859241264443517, 0.4147733820584908, 0.5454363386070727, 0.49640154925412217, 0.5398451098540821, + 0.8323304246448402, 0.4702413124605943, 0.11916838893032078, 0.9009896716229511, 0.19197758667721276, + 0.900836594320766, 0.8106694344455577, 0.9418253844653995, 0.06332430124605981, 0.4908897899926943, + 0.4561920374226772, 0.5048351151379978, 0.06164780395668834, 0.7012842955947854, 0.9931521625581684, + 0.28616498396404, 0.052591010400378835, 0.32033169405500295, 0.5225735424276101, 0.25127433737067917, + 0.6379917910568372, 0.11096274939206086, 0.5501441660489764, 0.34083688630436615, 0.3668192645944318, + 0.8749029394626905, 0.47745339291693634, 0.5164530650387731, 0.4806060285163555, 0.6315800725152276, + 0.7941266653769916, 0.7405011565816481, 0.9611417137511172, 0.15912725983710152, 0.003506592449767476, + 0.9312822541717654, 0.25159725483599815, 0.12598199516248132, 0.9526797769358988, 0.6572154760204325, + 0.3516367312822609, 0.8341733061763372, 0.41831100857965864, 0.310111039750102, 0.6498028556614855, + 0.7241174439329257, 0.633961532712646, 0.18228233853549336, 0.7801150969303667, 0.4964775618068632, + 0.6590319095994588, 0.7085520684629011, 0.8915409450436893, 0.8810594306794882, 0.2130708592860212, + 0.813453758457884, 0.24253872610367677, 0.1901627970373514, 0.5711005083911994, 0.7586833944574236, + 0.42596987376122886, 0.012908133371798969, 0.7043586011974605, 0.5717352304310166, 0.6103125188885554, + 0.7844733630790597, 0.5104378623474116, 0.5499833175620161, 0.8648788229870941, 0.47991554983998364, + 0.03263047601255409, 0.9766629260153232, 0.8635121646120435, 0.8913036372894022, 0.9125866614696632, + 0.3935725985979368, 0.9378705066825989, 0.6219832762988788, 0.2842726447148409, 0.6731404050429173, + 0.32356752343460526, 0.8907119658093242, 0.05466237135341856, 0.41020829495245903, 0.25440100368942453, + 0.05818847313656628, 0.4910682817255333, 0.5392549185797869, 0.7727254785212105, 0.4642778462807007, + 0.0869156598989933, 0.0029485271725963047, 0.6192632949400628, 0.6178495288086921, 0.27289155576504376, + 0.5500553656898928, 0.08064084523102322, 0.9642408219455398, 0.5836560038308837, 0.04329525589678418, + 0.6101749642453493, 0.6958685259543261, 0.6502409769887356, 0.46612625608306857, 0.08451831720702796, + 0.5650623831556305, 0.9330555317628653, 0.10349994135135498, 0.8237565815658521, 0.3114302365163585, + 0.29433434410944437, 0.8357408275575645, 0.2734414382926802, 0.007212850502877788, 0.703254095661067, + 0.9049579406346802, 0.8458425288636707, 0.84283034917116, 0.48538651511395514, 0.7161105097622201, + 0.026869232949236377, 0.1101749687207052, 0.5613147142445803, 0.335244686950531, 0.1572960090124308, + 0.23710485350577448, 0.2532346563624003, 0.5526177244498481, 0.6428826439383732, 0.7495770131275985, + 0.4566978242166606, 0.1412060935608218, 0.8497256970190455, 0.0931660900418888, 0.20714652772845288, + 0.13841083344533156, 0.1550589101655272, 0.08416754318344988, 0.6760353133462065, 0.12508051431179867, + 0.9214576170627046, 0.8042768937476864, 0.8697168547501292, 0.31768490225873736, 0.2695787844122769, + 0.970147806556169, 0.010399720490826825, 0.042850504765231445, 0.3288235287475405, 0.4039289383957364, + 0.3280247835683864, 0.6718416492820873, 0.05571950014578253, 0.1908114565908603, 0.45941496350396216, + 0.45465847717072594, 0.3457833275163299, 0.0024900972192264392, 0.5173711364393875, 0.9878949599339439, + 0.6934846133164922, 0.8352518312004765, 0.5840781265260641, 0.4597246380499571, 0.2719917237251668, + + }; + float ret[50 * 2 * 16 * 16] = { + 0.593598886073616, + 0.7640503360588631, + 0.8995848902518164, + 0.6548775890570243, + 0.8412044288191766, + 0.5892340690827029, + 0.2332279106020284, + 0.5441348813279586, + 0.3651441508264721, + 0.261073786630996, + 0.4595377428024995, + 0.3135189874348636, + 0.6177834911425752, + 0.5841020456510232, + 0.012700883776225358, + 0.7146020341690771, + 0.4810232050731812, + 0.8401136831438388, + 0.044779165115809705, + 0.3381492966273564, + 0.5206080076671794, + 0.02004867341974026, + 0.025546947958476562, + 0.7030645898390516, + 0.9497262000622213, + 0.3161574993701024, + 0.5120266707646158, + 0.5536882843401529, + 0.30990155481123993, + 0.8616061390983086, + 0.512859678623727, + 0.7727256424739116, + 0.3478036101156211, + 0.862314576961328, + 0.7265453786063859, + 0.3990489995709843, + 0.7933147161011688, + 0.09314295500017489, + 0.31576728271006205, + 0.8184720620327086, + 0.9558626677938953, + 0.14069313873197198, + 0.5722042120913776, + 0.46754492659771874, + 0.5327041615588409, + 0.3262050907222336, + 0.9613778130020714, + 0.0522424158382756, + 0.762881531801135, + 0.4207283912096622, + 0.2647576379692258, + 0.9004613845739784, + 0.7197322198409547, + 0.30476350353769943, + 0.39291911010389213, + 0.2233338288842549, + 0.9202809420588011, + 0.6774249117286338, + 0.501716583816315, + 0.6308028672446349, + 0.0224183069131898, + 0.9456243710063653, + 0.25700651754797976, + 0.41352647668663955, + 0.028577636455854316, + 0.5991317044421851, + 0.21048966111106027, + 0.6694397427653418, + 0.30005986074297075, + 0.3681036108249589, + 0.45877831380370426, + 0.9639414367524526, + 0.47126325374736266, + 0.23368110179707224, + 0.31836804737899427, + 0.5020266281753762, + 0.28135114234088454, + 0.6348272760218778, + 0.1567504548037416, + 0.994663312091293, + 0.7156008663579433, + 0.17899343985762617, + 0.47224562757448907, + 0.4938687334258216, + 0.932070188918579, + 0.2952207303230576, + 0.3996024914761337, + 0.3756977268668765, + 0.3073641724528108, + 0.13643156766200293, + 0.8085562285576157, + 0.03779439381497185, + 0.2756085535713453, + 0.9047101702125221, + 0.3746771053844279, + 0.5901004198233865, + 0.18361490618960774, + 0.32954442583557153, + 0.42175519674809736, + 0.10666682823232998, + 0.08987037681628562, + 0.31866167217169594, + 0.12446896049963141, + 0.884932034280004, + 0.2993922293811473, + 0.8335936956874417, + 0.621389898932532, + 0.3153986073807654, + 0.8808870930320241, + 0.8584106383590273, + 0.012422655117338643, + 0.5898241443380592, + 0.17418305506844411, + 0.31948736647554354, + 0.3958647885705613, + 0.34196869016830245, + 0.09715095501322835, + 0.4959605824267873, + 0.7580487511669882, + 0.38395229005034515, + 0.0893967683976089, + 0.9470119344960851, + 0.2485564193119627, + 0.564585511491296, + 0.03214874806890877, + 0.2861615213186801, + 0.09439971450316509, + 0.6431827666852941, + 0.868328540443803, + 0.8972870220627548, + 0.51509383654657, + 0.4144553904652547, + 0.014577791092747039, + 0.15801448394148743, + 0.22264358815364704, + 0.5450426127551828, + 0.35673438486349596, + 0.9025375539568234, + 0.5701856047432775, + 0.06173266022631119, + 0.9616911288733874, + 0.45639216527738224, + 0.6505962977309145, + 0.05109898111904021, + 0.2454222515962139, + 0.7073536726266456, + 0.25006775962896666, + 0.6739125824939687, + 0.6976435631334147, + 0.650924399569012, + 0.30318909847958275, + 0.7989389203837962, + 0.1319147541989184, + 0.6957737981454633, + 0.035395566125893674, + 0.6249840540018826, + 0.9375165772182906, + 0.24131241133396497, + 0.07595680959712214, + 0.9635626193632769, + 0.47452523495658727, + 0.041438347867159275, + 0.5904022355055402, + 0.6793523782315881, + 0.5261717916797551, + 0.3446824272811363, + 0.11980142858465348, + 0.7856508121051086, + 0.14202190413081717, + 0.2633388550262611, + 0.0919903179772098, + 0.12884164259502529, + 0.42492940458309547, + 0.23105323339508366, + 0.8334136272910464, + 0.08216278989323067, + 0.4970056033827158, + 0.22641122187780194, + 0.8591845333660317, + 0.24262210430854425, + 0.5281887933678414, + 0.7296872891154248, + 0.8242576076118073, + 0.4226823294268798, + 0.7480685173760554, + 0.12121181432866401, + 0.10604432286713128, + 0.02454334913294509, + 0.8469409244508359, + 0.4958456761013731, + 0.9004555370037944, + 0.9416523093400732, + 0.44033689982411994, + 0.237252883210339, + 0.7874546149461759, + 0.7407960038253821, + 0.6960407038308487, + 0.3605032367839367, + 0.0719144300021557, + 0.7808140169558024, + 0.5866700378904192, + 0.5259391748498912, + 0.8404416599464803, + 0.2245721137214033, + 0.06094007599112705, + 0.003179245712851775, + 0.4961499830217023, + 0.8897286498937441, + 0.20882891733314446, + 0.3927096579340904, + 0.3051683274263698, + 0.6984980658550315, + 0.46410183847788966, + 0.9231719122159048, + 0.7209623238406305, + 0.7000457426533268, + 0.4869916440564551, + 0.7367290318588684, + 0.12860038413375907, + 0.14344738309947613, + 0.09024703327969741, + 0.5042477018544159, + 0.7243912969474776, + 0.5797351253004503, + 0.5732021035913847, + 0.7409622576131739, + 0.2290639679002101, + 0.8325212880903263, + 0.9207233447072046, + 0.9474146710205339, + 0.7012091007316089, + 0.17825579738154085, + 0.723003973041874, + 0.151081423900993, + 0.48945980023973557, + 0.7420366081457724, + 0.29337726136130815, + 0.6313796113621828, + 0.45790464283773213, + 0.40062092370169566, + 0.08708056610611004, + 0.3051880686555266, + 0.05499208294913038, + 0.027214777055414197, + 0.15072384193050814, + 0.5526495368531591, + 0.9837826851193835, + 0.5861264339223254, + 0.05516626705471395, + 0.19939505727525375, + 0.009547298039236707, + 0.9111708085676391, + 0.4934325829052457, + 0.7365049039159944, + 0.9142629540580506, + 0.9821369815023923, + 0.7955889964539764, + 0.24195638565265287, + 0.37232433612404425, + 0.1820340231522195, + 0.21530880742845604, + 0.5055310644029261, + 0.9965007859060797, + 0.568503464864409, + 0.2878017615246292, + 0.14728746460860487, + 0.14211637660664622, + 0.9152636785078667, + 0.44454216965617, + 0.4169155085534574, + 0.3704774021439089, + 0.3374031316474292, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.254864207208919, + 0.8840559717343647, + 0.16909888137272389, + 0.6452903410416716, + 0.3097707842905164, + 0.9029637355277926, + 0.3276296093741684, + 0.7480681871444872, + 0.011642991275112147, + 0.2650827333618603, + 0.03795957014907747, + 0.13081493418889845, + 0.4135850771430212, + 0.6876803256834041, + 0.2060757695447304, + 0.749856701753336, + 0.8787445333365519, + 0.9489539368183714, + 0.5383303892178303, + 0.8971474117935213, + 0.5710761866417872, + 0.8510902073554719, + 0.07092157059977744, + 0.7396416787629857, + 0.4881684662549931, + 0.5489209619076307, + 0.9526690906876173, + 0.1639972955750726, + 0.6030846455523572, + 0.3191987438682232, + 0.6943602656551667, + 0.9234135213494146, + 0.05375714705653023, + 0.8534001857378085, + 0.644594667838801, + 0.15035462916965436, + 0.5477339248308509, + 0.17552800922227452, + 0.290802508299598, + 0.6549658560134354, + 0.6007702163154556, + 0.8344830851734352, + 0.9387742137594037, + 0.28063147367385277, + 0.30695864964688924, + 0.8596804632685916, + 0.9630346192828135, + 0.08419942590996321, + 0.7231636247301719, + 0.17361779186876702, + 0.913108420970242, + 0.9196381893926627, + 0.7605995653286933, + 0.893073083777841, + 0.05713051627690491, + 0.6984077912683978, + 0.857902842490028, + 0.0576256978132238, + 0.3479803875633284, + 0.48857187345040054, + 0.5935098145397808, + 0.39694677154398783, + 0.3463652212059517, + 0.47078252814943145, + 0.027548393248939873, + 0.18580751724856692, + 0.8384901380630441, + 0.6848992718751541, + 0.4538819498622414, + 0.11343360867793151, + 0.8183873298634738, + 0.7045128593216903, + 0.5422221257254408, + 0.6190392398627965, + 0.39146347056866737, + 0.5254607717287598, + 0.0019244685659860483, + 0.6374052786612026, + 0.06829090486182787, + 0.518567994057743, + 0.19551397445035223, + 0.396021555811379, + 0.9031659295324586, + 0.1273558615809821, + 0.19976872998007278, + 0.36589357848325355, + 0.6911733881871093, + 0.28566015003041256, + 0.1897466176882897, + 0.8007086546196863, + 0.06127421795401411, + 0.49338385751076785, + 0.018436125967998374, + 0.23382229548557953, + 0.19263564504761732, + 0.49186591814800074, + 0.6879471457464532, + 0.9411698952673958, + 0.8535823096989902, + 0.44960734472191477, + 0.39552906204900906, + 0.844271719810783, + 0.560020339438646, + 0.3691923793036713, + 0.11101765420923704, + 0.4709735503721877, + 0.0047655945739981664, + 0.4149348374024552, + 0.2672045198455989, + 0.07811123274017873, + 0.32315326893263696, + 0.3853125544148821, + 0.8598585098047076, + 0.9321026513955185, + 0.22673864302147995, + 0.9479758145303829, + 0.6515756499374604, + 0.6333632838090231, + 0.012720043129243686, + 0.11149529395813496, + 0.5088609582876709, + 0.9145095023322514, + 0.44050874572789867, + 0.9296778777274274, + 0.4030523140074629, + 0.4940467199482641, + 0.9658661703626518, + 0.2860943499760865, + 0.8804411896920898, + 0.38284263217118, + 0.4287946631723204, + 0.5851055024468935, + 0.0988698661935884, + 0.9020463093192808, + 0.17171637396314954, + 0.8710249783471142, + 0.5179304301603468, + 0.4356525320772865, + 0.4272591391651567, + 0.04293797049020576, + 0.9862500339027105, + 0.567714413888491, + 0.21750950860604246, + 0.1788104391696963, + 0.4065916996039697, + 0.18633965320157897, + 0.9246647932559534, + 0.6855590277237142, + 0.7743622643862038, + 0.5216398515484101, + 0.7841989530099178, + 0.5624804007078483, + 0.12125585422541552, + 0.6947487876096541, + 0.37892607606439865, + 0.3762253290720645, + 0.017296512356210214, + 0.02151003170761434, + 0.23125919646370396, + 0.4966194118714601, + 0.28229441056452487, + 0.22985224728312492, + 0.3722858844607305, + 0.7216238316872927, + 0.9093966330479945, + 0.7194164479288156, + 0.993783030927795, + 0.1369135036396909, + 0.6531441682016415, + 0.7173675424075389, + 0.9921364124025801, + 0.6790054168465538, + 0.9720962563477026, + 0.76319198104096, + 0.25633240022811465, + 0.24791065560281356, + 0.5281997457506116, + 0.8371512079527256, + 0.3504484219036722, + 0.2786133864843655, + 0.7891896293617638, + 0.7426342896941175, + 0.35394453297079664, + 0.8107352765897754, + 0.7950871930975674, + 0.007871872407570812, + 0.3019412477582947, + 0.29633582118536594, + 0.2735670629256831, + 0.9136099549764944, + 0.46934775479275015, + 0.5034813307343111, + 0.8606365671621784, + 0.6811002925409044, + 0.01433608289477506, + 0.8566802266036488, + 0.11392603808919821, + 0.16830115043628857, + 0.5861961608942753, + 0.5708605510921904, + 0.2570433034275209, + 0.9801897867998693, + 0.424597397793149, + 0.8260116609585727, + 0.5922475003965821, + 0.8782612832407313, + 0.056819576144068296, + 0.49459375430687824, + 0.4268129065749112, + 0.660624967578473, + 0.6402133263516303, + 0.9599617547623329, + 0.9871063239560357, + 0.3878043707008575, + 0.03678683986411724, + 0.5115453776645127, + 0.5587526903881904, + 0.2583427026511369, + 0.411372043297783, + 0.3330923344392942, + 0.0072773802162633405, + 0.762454681523548, + 0.4068334280781687, + 0.8634485101791174, + 0.6084342402890576, + 0.9271981127440494, + 0.10676436042784176, + 0.15230173943272107, + 0.372098880749428, + 0.8780981020855343, + 0.4668032879652023, + 0.5984517263715972, + 0.2762685608057043, + 0.5999565198667359, + 0.5323853017321353, + 0.11445372028009526, + 0.06779950265839463, + 0.8903009009929708, + 0.8166846578861927, + 0.6474472493038106, + 0.6078868424079517, + 0.5240219285149362, + 0.6810179025649159, + 0.9031433757449111, + 0.7659913170885068, + 0.3044409796516797, + 0.573478187247429, + 0.8897067762696691, + 0.7214896503568283, + 0.38027068371765504, + 0.05336046567302655, + 0.5948873602102268, + 0.03230960064967514, + 0.25372493200952595, + 0.19332921689264948, + 0.33138854836601195, + 0.15043149367674513, + 0.10086746908945154, + 0.4062380921612012, + 0.9153274001730106, + 0.6296617719723965, + 0.7786489724738195, + 0.9471222788590163, + 0.6252486120890782, + 0.8817814404424481, + 0.5622747890867535, + 0.7998205073676302, + 0.02017230039529183, + 0.2358484942113961, + 0.2016671873781376, + 0.9236993715891589, + 0.18275472370055468, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8835661186316296, + 0.05115835664365542, + 0.053840447225318444, + 0.02249082050834006, + 0.6518443760927354, + 0.9890535380865763, + 0.5965715084536747, + 0.014066342104010254, + 0.4683834965337599, + 0.2625366316770661, + 0.10597991544110108, + 0.46547835256661063, + 0.20270078809315717, + 0.3368827880226678, + 0.8509422924475109, + 0.2449052714859109, + 0.9212334940526165, + 0.8469422160730711, + 0.2650426442330527, + 0.11239467057238772, + 0.9907180473073051, + 0.5777133467824651, + 0.20165325304560588, + 0.6716243495247226, + 0.7817248865886222, + 0.40311373770047765, + 0.8995087408588687, + 0.3235241906767462, + 0.04339066054890195, + 0.682408636964737, + 0.41480608989208956, + 0.5471083528025282, + 0.7801506618079727, + 0.006236962734933682, + 0.0305978613140101, + 0.4030442802225406, + 0.9187915740246261, + 0.6489838592517869, + 0.06347584144850515, + 0.6080928162965836, + 0.8892992380996494, + 0.5025532247815195, + 0.73700975037925, + 0.5102816111445247, + 0.11796054635079789, + 0.38555476839112013, + 0.5912455240508804, + 0.9438500977520248, + 0.3773422894846701, + 0.19137289638190713, + 0.7437100087603623, + 0.05094825459719099, + 0.7108254211518128, + 0.006574273358853078, + 0.20836881006630548, + 0.2411321322976424, + 0.9155233698207275, + 0.21435108488910437, + 0.08130297073952064, + 0.5825211920966039, + 0.06312378426804188, + 0.6751488092558974, + 0.8169683736721146, + 0.3578599675086902, + 0.5310942824706316, + 0.7819346356100608, + 0.5412269446643696, + 0.7754809831284823, + 0.41748440769112094, + 0.25462644370917586, + 0.4466706415086723, + 0.7350768164075168, + 0.4051975994749256, + 0.9798243783734578, + 0.4804941673124482, + 0.053435090850515565, + 0.9535782537637767, + 0.7559655759550528, + 0.9097232990844312, + 0.3920636261620205, + 0.3873352350324747, + 0.5810289158555193, + 0.41325093470060126, + 0.17556207325455564, + 0.37014857151329494, + 0.4760673738878337, + 0.6675845484142084, + 0.0932725386912655, + 0.018222351978108287, + 0.5739305828722121, + 0.417216927757345, + 0.8002659099288251, + 0.8036520213804197, + 0.22916356934175386, + 0.9370399101958636, + 0.019843076837461893, + 0.6253932165124783, + 0.09685879870223768, + 0.9148613148672179, + 0.40898244015650265, + 0.6689073682061573, + 0.9843825431171334, + 0.9932926659177506, + 0.521712516718149, + 0.7204773146095411, + 0.28793806316128023, + 0.5241758527155814, + 0.10400451682164813, + 0.45268897119631035, + 0.3591292701552792, + 0.0852161993078645, + 0.4999959067584383, + 0.8932605268036224, + 0.34827871816251865, + 0.24479976063472064, + 0.7500176643692748, + 0.450663991512848, + 0.9348394082241142, + 0.2362744726460665, + 0.4837670894157401, + 0.6034513715339858, + 0.6239167717588799, + 0.24002476710515663, + 0.5807023201951226, + 0.8906766847688037, + 0.2990956825544848, + 0.6474144252324772, + 0.7569017845526714, + 0.8726478482024886, + 0.7704113254262928, + 0.47500296037120016, + 0.9355037079911848, + 0.6510416381579317, + 0.09433957578977692, + 0.6794270156932601, + 0.292596705011257, + 0.11023977756057035, + 0.37406566441051237, + 0.4166221542178937, + 0.9455658271087966, + 0.20347606656226802, + 0.22783388070158717, + 0.31255998356680126, + 0.8360554733297578, + 0.5373527530812233, + 0.9802408460166704, + 0.34527673407744874, + 0.9237427047317442, + 0.2066282957427087, + 0.14343498074223138, + 0.6560317360531576, + 0.417528641590168, + 0.7455190621567462, + 0.3482774747247622, + 0.6876591523449257, + 0.5639806740426587, + 0.3492367980703305, + 0.7078139195134496, + 0.9636162940497957, + 0.23895712015875237, + 0.8600481727284203, + 0.7277997187186478, + 0.4829866830212408, + 0.8920704442811604, + 0.6430121365978986, + 0.9516920897170033, + 0.7708246944234937, + 0.7572173324421296, + 0.05147718869090945, + 0.5190347818005001, + 0.4711505441557061, + 0.2132238043810304, + 0.8773308474385161, + 0.18160103390385463, + 0.5056928530189118, + 0.4104198968591438, + 0.918042741583884, + 0.15371577651638713, + 0.9025397313292765, + 0.9732082132394165, + 0.36000477729935176, + 0.9876749595481583, + 0.965360847206718, + 0.2607618036561612, + 0.7147111282487313, + 0.076674299170163, + 0.8745907195886096, + 0.7610702742279717, + 0.8769839808822592, + 0.019916212943096423, + 0.6556399000644536, + 0.3251401894214039, + 0.8601934876537072, + 0.29567384376495076, + 0.3050776139625416, + 0.354678537938626, + 0.4499959071601498, + 0.7601153094436749, + 0.5348313356556613, + 0.4580342027348445, + 0.5696805674941983, + 0.5768306843586077, + 0.8730974499330287, + 0.6743965324150368, + 0.8830057082031186, + 0.5329638339053512, + 0.6952383709158715, + 0.8156395210292493, + 0.5516356151586437, + 0.9126289407364749, + 0.06892286908639811, + 0.7560346149083839, + 0.2510165158607539, + 0.772531913925415, + 0.17935594816182743, + 0.15778303768306967, + 0.9709040013917621, + 0.4976312446082479, + 0.1817547186604802, + 0.5239713694640612, + 0.4230471625224689, + 0.8725956603209828, + 0.7843375333940473, + 0.07391999485436107, + 0.9444287998344876, + 0.005642345369721635, + 0.7554552148111475, + 0.35966099039389465, + 0.7868512927357355, + 0.8362993556629968, + 0.08564942129829656, + 0.6312561650854187, + 0.06036967944034932, + 0.9814136675055923, + 0.8891374629356662, + 0.11774132570505835, + 0.8709406566142861, + 0.29843172006984897, + 0.6300582519924616, + 0.6987959773744749, + 0.6819801213461811, + 0.9293424961549993, + 0.7008610017036007, + 0.7358904720246477, + 0.3866831747831503, + 0.782851130931092, + 0.3032921336149901, + 0.9531267944167136, + 0.014612283150246985, + 0.27607345875361455, + 0.21021471138087588, + 0.8488730493970424, + 0.8301563006282299, + 0.04672100038419713, + 0.6304303313175146, + 0.4840095934359627, + 0.8088443449685825, + 0.32645681417681105, + 0.19337399216816265, + 0.5841640354989537, + 0.380079673287293, + 0.13202015978889026, + 0.45101045358482617, + 0.6209213898599184, + 0.9166767563321628, + 0.049565844469375286, + 0.17443805597520712, + 0.33783156185567964, + 0.9951573150019876, + 0.37505222809632976, + 0.1687913588935338, + 0.2933285665130406, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23543352363697467, + 0.995358041617133, + 0.025327923457216905, + 0.14940439314739096, + 0.023764617083646633, + 0.5202134761721883, + 0.29574664157577135, + 0.9328152285289238, + 0.05720381657425644, + 0.8542741668912559, + 0.7742583756744349, + 0.004999791113198038, + 0.12754350413739257, + 0.047045471026664676, + 0.5269192307860279, + 0.1548091646365206, + 0.7472987403230176, + 0.994960240233519, + 0.9904165989973458, + 0.3832271740172364, + 0.7473241151810834, + 0.25898452510968284, + 0.5984857063130656, + 0.5323594054882397, + 0.13730247895312242, + 0.2810449275030956, + 0.8223539945810213, + 0.4257745109240265, + 0.3576869351255788, + 0.2627754896441308, + 0.8327863161297506, + 0.29146773952655247, + 0.9994164564500349, + 0.4609615217259717, + 0.6815180647255956, + 0.2081767267510808, + 0.3269851554029827, + 0.17291177902784527, + 0.7900072574432269, + 0.7708410427932806, + 0.40695877570745, + 0.11623853276783058, + 0.4165719145570902, + 0.29333836892667287, + 0.44055994570995805, + 0.17394323019087632, + 0.9235585542150339, + 0.8678718660301749, + 0.6978350374902289, + 0.49339281222944265, + 0.6933566892850808, + 0.7771852023409581, + 0.9692484464569302, + 0.7343517613599921, + 0.8010006110947065, + 0.9907541002604393, + 0.3181877995182233, + 0.5270115805162756, + 0.9841151577651998, + 0.09622906506883855, + 0.07638206481897325, + 0.6897350375333718, + 0.3897385475903501, + 0.1041607435740014, + 0.7908819989051763, + 0.0056861651420593295, + 0.6504135564088833, + 0.6804909734267114, + 0.07396539104530098, + 0.7208817435941467, + 0.35531936596642566, + 0.5446432851917544, + 0.2112028523132563, + 0.666923043048465, + 0.9643265085630942, + 0.7283655069758669, + 0.15582296153556985, + 0.5355420949574701, + 0.9667969872974194, + 0.9888230322682372, + 0.4526118083643049, + 0.23179919327971843, + 0.8198885010069196, + 0.9232501934815932, + 0.9547435369999225, + 0.029161443719998736, + 0.006381472986381365, + 0.14618800867292647, + 0.8928935035205012, + 0.9069146462791389, + 0.722971444164863, + 0.520370328160753, + 0.6792760584693588, + 0.35620232843611366, + 0.511323762345732, + 0.6441799127660758, + 0.3080128832782695, + 0.25797286101779515, + 0.5757192615432264, + 0.4704824312845679, + 0.3016474294838747, + 0.39907752651110673, + 0.6666710167047851, + 0.25800870202741777, + 0.27004292282248754, + 0.9577650909321496, + 0.8091056232921011, + 0.757081699795822, + 0.17195183470195763, + 0.4479690859518102, + 0.5225119887418883, + 0.8860324292356873, + 0.30709579680472265, + 0.7051697500990436, + 0.8054511844077265, + 0.6745722346987356, + 0.05244874557976331, + 0.6440488665011326, + 0.7284955977411508, + 0.1612511518198667, + 0.2441608400450831, + 0.36105021010769534, + 0.24894502552978393, + 0.7300780281210185, + 0.8333581842842995, + 0.6074471794458138, + 0.9912065523711989, + 0.6502853921155594, + 0.37112791219698815, + 0.5115465251175723, + 0.07229128010855934, + 0.0017828183696398936, + 0.6451322323785469, + 0.0636661659699238, + 0.027470394942158527, + 0.9763170688293102, + 0.6047927667113809, + 0.15314784624072308, + 0.36808962582443583, + 0.6661171718445671, + 0.4233335693324414, + 0.583367308879746, + 0.4336894637903631, + 0.8557269680861541, + 0.6638110304342972, + 0.7296995118709148, + 0.5770012137700922, + 0.12856074181677213, + 0.639705439399999, + 0.9122205125664681, + 0.9069129182886623, + 0.17691215046108066, + 0.46494213629693026, + 0.7386792462938379, + 0.8545082119774614, + 0.8209644286385086, + 0.4944256177074621, + 0.38223296756570335, + 0.6006182974561438, + 0.1202626370688562, + 0.07389833653048405, + 0.9107857171842855, + 0.6094761230163389, + 0.5098792059926268, + 0.8196337405122749, + 0.1021941265770745, + 0.3436380384031217, + 0.5366921468787149, + 0.0494559245826679, + 0.48920824111905903, + 0.6614666122549608, + 0.8103468174867872, + 0.6232253503437171, + 0.027278017638104446, + 0.2855248901501959, + 0.15130290169888672, + 0.24117138006295658, + 0.056356757525981904, + 0.36677192991507657, + 0.7123241229125502, + 0.7237366583107295, + 0.2966015459295771, + 0.4759770541371189, + 0.09248317656748295, + 0.7279579107848803, + 0.9717753479147442, + 0.16169284447345456, + 0.013241173287716124, + 0.15288277073162326, + 0.7541502332665592, + 0.4811839975540484, + 0.8549833357469854, + 0.9252480668701776, + 0.8878633888020097, + 0.8640871995934497, + 0.2500014917667791, + 0.7572502470296556, + 0.29095719594187897, + 0.0072386094663621225, + 0.913382367589849, + 0.840314656531769, + 0.1594182459232909, + 0.12249962297955819, + 0.24063391972591164, + 0.7262505388412269, + 0.30159860650826975, + 0.5904138065735336, + 0.5936980433213721, + 0.7564664711327527, + 0.7723484968464406, + 0.0214610612561158, + 0.4076397769682518, + 0.0343064883652171, + 0.46858704645348437, + 0.7959385907953724, + 0.7629217260543542, + 0.021731550538059086, + 0.5954460556106872, + 0.6213974734429244, + 0.6617123430936773, + 0.32487643534193433, + 0.5116411866622956, + 0.907519623003023, + 0.3769599313688713, + 0.5067852983679874, + 0.6896789478430785, + 0.07881112932203993, + 0.38097827414359153, + 0.8378988336265385, + 0.32742531610387615, + 0.7394858873181787, + 0.077607349740346, + 0.9371481647386053, + 0.2555411301823566, + 0.63251335216537, + 0.042692028989202546, + 0.38086950879660486, + 0.20994124637924272, + 0.8243746705124056, + 0.21227445443949655, + 0.7587693474678124, + 0.8593104121111546, + 0.3400577428684012, + 0.006970056278145842, + 0.5333806120077678, + 0.21714503148417896, + 0.06148880936440482, + 0.7198842980833533, + 0.025019417818456402, + 0.8070146024325086, + 0.7903884011209051, + 0.47573374434427096, + 0.47753412809113893, + 0.29139727349758127, + 0.9700839918998914, + 0.9399314723212036, + 0.13819457853857942, + 0.6720522733281707, + 0.36899065197734016, + 0.8660263887990407, + 0.11377280596671191, + 0.45219446231527505, + 0.9288561452719108, + 0.09117996700414377, + 0.8180472092656622, + 0.5318088610052745, + 0.8730698688662378, + 0.8751462249032647, + 0.4283453212466125, + 0.7734820246317141, + 0.35957815148880246, + 0.5398288735628374, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8052979814026987, + 0.9614046281485216, + 0.5664087914586622, + 0.8831229148663686, + 0.07161247981342123, + 0.1885446231083373, + 0.689774058650171, + 0.9343549456866106, + 0.8493978902300945, + 0.734360899477411, + 0.26283632645587507, + 0.02112106940224867, + 0.6297475238314658, + 0.9813451990818274, + 0.8405678039191875, + 0.2826677055860818, + 0.13378416626157497, + 0.7761636171058446, + 0.3332765676600876, + 0.23512264238959424, + 0.971952717265702, + 0.0627832682903855, + 0.891121814925713, + 0.9303042643420043, + 0.8708130092037977, + 0.7488549249623585, + 0.5958913570123173, + 0.4652083869028213, + 0.7460375000956913, + 0.20989613661780226, + 0.8303504164119847, + 0.22039962902640042, + 0.9315663649824534, + 0.5813252606564113, + 0.8863421090178172, + 0.6310625333575002, + 0.8492166182228821, + 0.5677936030686411, + 0.5814189775500054, + 0.6036647277035097, + 0.940924954463901, + 0.3306043172406822, + 0.39538371880258527, + 0.3016271916299881, + 0.12726232148446426, + 0.54137930672541, + 0.7275992206157286, + 0.38973884756513, + 0.27428204789992905, + 0.6661155242248452, + 0.3847001455070639, + 0.46954959123740625, + 0.6405884041366582, + 0.8914360620701266, + 0.9586353076711583, + 0.584192908495738, + 0.7140047195668955, + 0.49110475345412996, + 0.8537278893431842, + 0.9811191815839962, + 0.4686401842016189, + 0.6374132817106841, + 0.17818477677438638, + 0.7431004950867537, + 0.593121819009934, + 0.5259967311629242, + 0.8750764537865127, + 0.7101686652745309, + 0.17691008133532626, + 0.982755699525093, + 0.9648866927602616, + 0.3025126385092616, + 0.7316856083401261, + 0.334849241506592, + 0.6368670810370156, + 0.22655336365064327, + 0.4454075093994012, + 0.6128504789331424, + 0.41942294098273125, + 0.3803470446861149, + 0.3622565646438295, + 0.47833956239785447, + 0.505947264305783, + 0.27142371660682907, + 0.5290392503698454, + 0.17399413776971306, + 0.4686652233091897, + 0.8970822283299967, + 0.330552632864599, + 0.7569690074558361, + 0.43767169185007815, + 0.8662493188527037, + 0.05474063120169337, + 0.5063168135407037, + 0.03954457895243124, + 0.14814585358898202, + 0.3017243955765979, + 0.270433496957167, + 0.49154609127497495, + 0.5332831573104402, + 0.6565845581462822, + 0.12976876256885772, + 0.03753027470915504, + 0.3356082745827975, + 0.8311107446709194, + 0.36491977602379455, + 0.5761098481511326, + 0.8714223255278624, + 0.44388093220216795, + 0.7075732542593347, + 0.7307089576656769, + 0.2798760208098334, + 0.09591142920200035, + 0.1830631816552385, + 0.6385396223926642, + 0.8090315513219017, + 0.6005122900535488, + 0.27063867688666976, + 0.3619445957575981, + 0.9689332972947742, + 0.5224080690615437, + 0.3042065425780318, + 0.46180432979865615, + 0.36391568689202003, + 0.7345031436174964, + 0.7995853299479707, + 0.22329056586297824, + 0.30915309869971364, + 0.8954195830043161, + 0.27037860742997977, + 0.901978990855287, + 0.6703368413594938, + 0.5250626326106349, + 0.4214725103000132, + 0.47000259209531847, + 0.8899891492988766, + 0.501084981517585, + 0.29874874315702893, + 0.9920726684155271, + 0.18467830413365693, + 0.7038601512721551, + 0.5033032762679162, + 0.8275198175889643, + 0.660268833742968, + 0.7832616045733956, + 0.762511755100596, + 0.6792802741675339, + 0.7201723913753442, + 0.11552968686895548, + 0.20711829491106815, + 0.9556851362045161, + 0.7117467980975821, + 0.2840817322674267, + 0.5819808043774747, + 0.8570886811056379, + 0.16193594019722202, + 0.7597102937196527, + 0.8508708667154223, + 0.19586968898116242, + 0.1659214622221713, + 0.7180920680357865, + 0.8595486107363447, + 0.9121208474881856, + 0.8735566950883392, + 0.004718702350720472, + 0.9924200869828986, + 0.3588782476445602, + 0.5940481672987985, + 0.7067469502908176, + 0.7150121503830128, + 0.6861942911378779, + 0.9881879624168323, + 0.5025797683160366, + 0.0023469253889222275, + 0.9067422957458163, + 0.2903558535417806, + 0.4837569919868624, + 0.18181760020955995, + 0.760787124962656, + 0.5236314655203572, + 0.6988326660543989, + 0.150176473396466, + 0.6653943915151054, + 0.10617832873429356, + 0.8357193202831436, + 0.41180272052220745, + 0.052912299602330015, + 0.5978441406101815, + 0.9532169134753004, + 0.6551933584691292, + 0.35634075803523935, + 0.13398218696009667, + 0.368175835596789, + 0.8950959753024484, + 0.3431614810578424, + 0.4041760819803951, + 0.5956056235878974, + 0.5592533233861902, + 0.572385802750355, + 0.4183541795618333, + 0.3535463764323008, + 0.029995160341639293, + 0.445230038523568, + 0.5886733570058634, + 0.15432945738670745, + 0.9628429069083267, + 0.4105917748111635, + 0.3025715082537356, + 0.9403039744300444, + 0.8903412227672707, + 0.2884081276420578, + 0.7069500713054788, + 0.47140819251562405, + 0.14469769266153687, + 0.19351817215691625, + 0.9383487421690893, + 0.42735195305626905, + 0.8104740408863775, + 0.5659224156106615, + 0.41549672072081656, + 0.14094001799091305, + 0.07709068370025784, + 0.8730576304026549, + 0.4621663612674023, + 0.9588321813585654, + 0.1691260537697311, + 0.14634285835163807, + 0.8561418303469486, + 0.6536268773776686, + 0.1287418543090353, + 0.19424319361429854, + 0.9340322061343359, + 0.5749152014271254, + 0.1919609461781211, + 0.21076715343534513, + 0.6139370846972869, + 0.9564139911631641, + 0.04937142942795214, + 0.7665370882733304, + 0.6041337467023327, + 0.2822368639498116, + 0.7643259894599647, + 0.14102290528339234, + 0.9329923243619995, + 0.14553713210713182, + 0.6425243181643446, + 0.46485014922456436, + 0.4202679445135561, + 0.9319016828634743, + 0.36793427064342843, + 0.9765902909913105, + 0.6274115409222962, + 0.5142858992076622, + 0.501734341235258, + 0.27055565402220516, + 0.9724620379137456, + 0.9911737028438883, + 0.08410308471963279, + 0.5023236360316383, + 0.811272836817539, + 0.26093787125840806, + 0.35026847559903596, + 0.6528423312813869, + 0.8744053849895144, + 0.9611372962147997, + 0.03779552657364027, + 0.9214713943599442, + 0.016604320970902586, + 0.19982892789792317, + 0.4361813940309124, + 0.12978571394064486, + 0.6221905647538579, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7004838101367853, + 0.14211726999494212, + 0.7998408852853546, + 0.8782985930036267, + 0.6959106780670404, + 0.17281345409835414, + 0.38385841290136236, + 0.203714425415933, + 0.6764368348021307, + 0.9565881351105006, + 0.10936587034913403, + 0.6266634410552097, + 0.07456554223190837, + 0.4235651883029804, + 0.46711705089950795, + 0.5966109510098816, + 0.0225843702280476, + 0.9887677367889909, + 0.08502788350323942, + 0.337797267383135, + 0.38521104324865885, + 0.9797764544051829, + 0.18177771286409294, + 0.7250341276285767, + 0.5360141090381192, + 0.8729920816448556, + 0.09985314299399961, + 0.4535665518913786, + 0.4443354453614965, + 0.36374188804029195, + 0.14741533853050925, + 0.1218620957471066, + 0.49051685902100695, + 0.22107299956570137, + 0.8256229897753778, + 0.18595174624955346, + 0.630874201413209, + 0.9993332907535163, + 0.9729106904357516, + 0.8486738245641993, + 0.7806225775348318, + 0.7236230725363881, + 0.20784120008496965, + 0.4441711177478822, + 0.4533017182212946, + 0.47935505339565765, + 0.6192144041944005, + 0.6501414172364085, + 0.4798787202837653, + 0.1944276201657491, + 0.7424312741128043, + 0.822385534420009, + 0.9378512943084845, + 0.02061427907665525, + 0.12687276948547022, + 0.5305480649483519, + 0.31340956446587886, + 0.5707265014014935, + 0.22487029006756198, + 0.7855044221401776, + 0.30526988294644686, + 0.809203270306487, + 0.3916801847175707, + 0.2903849315904923, + 0.36166205829511244, + 0.6247319538703652, + 0.46458374739653474, + 0.6123777706834113, + 0.6693322726615967, + 0.6306378935655539, + 0.7837037633224293, + 0.8452980450668409, + 0.8025558162777455, + 0.7478252072768828, + 0.5846555980149157, + 0.7305688676265056, + 0.8257452901995608, + 0.9024914352067135, + 0.6252993714644912, + 0.2453921143156198, + 0.6448123689250198, + 0.5328652692956197, + 0.6950751911552967, + 0.8208428029183531, + 0.4279206617310426, + 0.22948063393875073, + 0.5999146240368377, + 0.8625290061446751, + 0.7650160813607398, + 0.22375726216544867, + 0.1456483078695484, + 0.01523612609598124, + 0.5419171954630608, + 0.46073156590275344, + 0.7748832993402146, + 0.8353604150821978, + 0.11273117860564708, + 0.369734882137877, + 0.2868091414051974, + 0.8401682582137678, + 0.34287107711659814, + 0.43910477439374584, + 0.5624944055111051, + 0.43238121230078363, + 0.9577446432151466, + 0.2661133945230454, + 0.9073858174713302, + 0.15717293533008292, + 0.47064337951537916, + 0.965569299717643, + 0.11654448019677288, + 0.9660537711594184, + 0.9992687859252543, + 0.7110835428501012, + 0.7999388623453999, + 0.4917919572408409, + 0.6778283103105175, + 0.10744537245405494, + 0.17312317145315292, + 0.33179498874334423, + 0.6591398196747612, + 0.9692691635607104, + 0.6869534043366224, + 0.44153821320886544, + 0.24381333321285958, + 0.1986586815233018, + 0.13539717812752539, + 0.9205659804386037, + 0.057081308493441174, + 0.09684503508446674, + 0.7275464487575685, + 0.8253970211230612, + 0.76049658600806, + 0.03527823843358757, + 0.26364003985900264, + 0.17975708729355333, + 0.8498265045440788, + 0.5333215931349258, + 0.37920147657949965, + 0.0037458575600464172, + 0.7108519713439975, + 0.21136017914415994, + 0.25963438580982856, + 0.047165896784789774, + 0.7669531195629786, + 0.8628730794934161, + 0.6067369811297751, + 0.3698147244591884, + 0.6099227530568464, + 0.6022939233400867, + 0.27540497346446446, + 0.896779247436195, + 0.7785146687257701, + 0.9874939038858579, + 0.3802741205067254, + 0.2458833558774125, + 0.415536874902104, + 0.9662563155132512, + 0.5154284907713945, + 0.16215029165085515, + 0.5676699449111512, + 0.46245179318705176, + 0.5729965596000018, + 0.33919865789353376, + 0.9534870041388331, + 0.9292656259082479, + 0.16183836452092126, + 0.8790904486702958, + 0.08353643199886873, + 0.9988976692260609, + 0.01685422866909736, + 0.7416782150313002, + 0.8935974674263123, + 0.800097586650271, + 0.13022574807305098, + 0.7630796863199797, + 0.629246729906571, + 0.8592963156656919, + 0.9416161602857652, + 0.52090449144065, + 0.8061351183382518, + 0.3063851229351757, + 0.3151916842899276, + 0.2737617742477172, + 0.5673222295197347, + 0.40968039604671935, + 0.16856581984046648, + 0.8504337218365385, + 0.8374378192802576, + 0.14278879657171628, + 0.30207814005863987, + 0.13700805781852732, + 0.47706519398982994, + 0.05845266770677893, + 0.9520404706561949, + 0.6661191948804852, + 0.3135908933117898, + 0.6478418605779962, + 0.9302003981020814, + 0.48148756105293444, + 0.028756677806177522, + 0.3721086015989137, + 0.5099205190232431, + 0.7103156251011279, + 0.841213225542713, + 0.4161040834839912, + 0.14681776332937668, + 0.610931945833276, + 0.19950920991703436, + 0.21861187933028492, + 0.40670596199428977, + 0.5468958323467437, + 0.577490163908776, + 0.18123581682236667, + 0.7473549812959448, + 0.7653001575845888, + 0.33719498936844294, + 0.7974920656308787, + 0.44528751549689904, + 0.8567806684057309, + 0.12841947670897202, + 0.8047195534623773, + 0.05903582087511794, + 0.17480740656268967, + 0.7174508285554029, + 0.35302641969598847, + 0.6682564382381969, + 0.11255285535319326, + 0.12623152983419705, + 0.274580874337351, + 0.44922175968269584, + 0.26880411882189026, + 0.16771599840844842, + 0.37204666831048017, + 0.5255654135159391, + 0.4638229962277397, + 0.3681037649382465, + 0.011471745282361723, + 0.37974178219264954, + 0.03739482678389261, + 0.5037478800738217, + 0.6338801598932757, + 0.6131086295356114, + 0.7320548544220421, + 0.6535524651969317, + 0.8403928580596725, + 0.290759136512585, + 0.06207883637394862, + 0.056422690172737267, + 0.6256315495778213, + 0.848130865225326, + 0.2613138334139983, + 0.1171128433802966, + 0.31689436936338744, + 0.8941072542226942, + 0.41828651759010016, + 0.7755401868853358, + 0.10160167976213674, + 0.1641702452017021, + 0.790440054096459, + 0.359584999961559, + 0.9566116078779778, + 0.7630771259013419, + 0.9641308083958836, + 0.2539597095707329, + 0.11069342503585078, + 0.9788332950206527, + 0.9678808730797632, + 0.4420128873271528, + 0.5187960428319506, + 0.04385505716381355, + 0.6606389977925835, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.36041651869823677, + 0.834219269038311, + 0.6287830125010201, + 0.561013533589659, + 0.5306944127049913, + 0.27032316004138224, + 0.8479013768678957, + 0.5825678383174339, + 0.38664004715657774, + 0.7144285653376025, + 0.3218325792005262, + 0.2538377609164546, + 0.17110220385347885, + 0.8071002143734636, + 0.8728020487005934, + 0.7838949411667171, + 0.3683646927712847, + 0.2890403053344216, + 0.1979185986583578, + 0.5098172940206056, + 0.37377102582022304, + 0.5813649475371736, + 0.5166195591725397, + 0.006824005446752879, + 0.0031080575293124246, + 0.9374139853766337, + 0.2011020699485454, + 0.849759018857136, + 0.8365785682515359, + 0.662962770618595, + 0.5578297457525212, + 0.9752332441831983, + 0.7724118357267153, + 0.5051995035199153, + 0.7066606458053339, + 0.32670345047704075, + 0.830582896259954, + 0.9960335104817426, + 0.6531188425102978, + 0.9241692865061705, + 0.5591180817318233, + 0.7252170060411147, + 0.345416437315765, + 0.3356408944950159, + 0.10817173773651945, + 0.3573278220628534, + 0.5114474169991241, + 0.41193049178171437, + 0.6664467523488816, + 0.7685055515515048, + 0.949924909556279, + 0.9748323617965556, + 0.03514812256652411, + 0.02305153696347262, + 0.24290476111330428, + 0.17651995159467826, + 0.9793977177472067, + 0.28619462311729715, + 0.22853652979201688, + 0.17397597569131207, + 0.8358589728687413, + 0.7556874446974421, + 0.5913083995219119, + 0.7863628062148216, + 0.48721639857078214, + 0.6309815139491525, + 0.40809768012635916, + 0.49207126700248927, + 0.9166477132996476, + 0.5625129987609392, + 0.7416832678019736, + 0.6820458838442376, + 0.14221238621459364, + 0.12833157798394623, + 0.6798682808003216, + 0.9160702541588075, + 0.4968272468215459, + 0.7561095909230511, + 0.15128113374231256, + 0.059471198992037344, + 0.13664647838204846, + 0.8530634614711372, + 0.5451451899248743, + 0.20206778135764203, + 0.7114870443892225, + 0.8274539984340226, + 0.669997195219382, + 0.7344419633906124, + 0.8333849128373928, + 0.2518127896573077, + 0.9176036602601374, + 0.040409531695097756, + 0.09740247982122907, + 0.8320656428752502, + 0.14536527022409018, + 0.569627027414568, + 0.7583197618393416, + 0.775743751009495, + 0.26360489467441417, + 0.17221568362493245, + 0.4020919151946982, + 0.5327089071792688, + 0.7043783427919248, + 0.2980205213881637, + 0.014781443106305248, + 0.4036979926565276, + 0.4089171199138254, + 0.8757831629583652, + 0.9652969723451004, + 0.5710383447043011, + 0.06660506917073905, + 0.7365861656463706, + 0.7271540958138566, + 0.5388915563531502, + 0.27309448219682164, + 0.9056123072645033, + 0.33983491209816785, + 0.4548763890070494, + 0.8438089402784784, + 0.9995401378644633, + 0.8924227685665435, + 0.7411899554362333, + 0.4826634506645804, + 0.41086524086667275, + 0.5982548797330604, + 0.7266361258294035, + 0.5899669993632768, + 0.45285839554288065, + 0.31653853745025673, + 0.9157578011705529, + 0.5294282836269889, + 0.3341362487031918, + 0.667812647604594, + 0.3361761444646123, + 0.04837772462540435, + 0.9698870801469252, + 0.259411785109229, + 0.6391019385586378, + 0.892826659752609, + 0.0975643079100762, + 0.09283322151259488, + 0.9344730157914692, + 0.9548421435870874, + 0.35087639374767654, + 0.956756591780543, + 0.356946405392665, + 0.3430964469249539, + 0.03971130386484134, + 0.6534612299966921, + 0.6092817163984496, + 0.5456279938271088, + 0.9394513674332534, + 0.9596273094393966, + 0.03986309368705576, + 0.08473543968120212, + 0.92198350798494, + 0.7681033804571532, + 0.3475921050297659, + 0.4902370966739692, + 0.9744776715941326, + 0.08411457415218082, + 0.10878027933422285, + 0.7091610274133524, + 0.36549223445573353, + 0.24469021663074253, + 0.631252117636725, + 0.5929903865860452, + 0.3531069980525763, + 0.9078414992111065, + 0.6668266814522485, + 0.30202329743218004, + 0.08192251839319975, + 0.8637822242725997, + 0.7748962801848931, + 0.5016816804608331, + 0.0796699774233578, + 0.5788203822489756, + 0.7808485394139456, + 0.38265178931731025, + 0.6968418788110982, + 0.8176952134719885, + 0.3695240633707595, + 0.5102578632878215, + 0.09831391345415752, + 0.18996568867856012, + 0.26345413390715355, + 0.08198584774862816, + 0.9439347419972917, + 0.7532308700636157, + 0.6336519344349677, + 0.4378864467393364, + 0.020661211944400737, + 0.7460601981458868, + 0.13126514680785484, + 0.13744051633736876, + 0.33982383443687547, + 0.012212210636933896, + 0.6589254168063957, + 0.45805718423635333, + 0.6660241915206475, + 0.4346046867850909, + 0.3092124842501628, + 0.2939756693550043, + 0.3966199170077277, + 0.08772881532383214, + 0.30896682477774784, + 0.4244544218434081, + 0.5700554925779454, + 0.35341666049248954, + 0.33446316025164335, + 0.17882164153575442, + 0.8674420771845556, + 0.9910690864914661, + 0.6487461787840058, + 0.592736484147044, + 0.1908163922568058, + 0.5511729553074728, + 0.6572968446380479, + 0.5407626160195672, + 0.5318468028843031, + 0.8553109952295498, + 0.639451746753038, + 0.7511342907260626, + 0.3453481921299084, + 0.28532830678016485, + 0.3059552785109415, + 0.1701658522960623, + 0.8788508596307834, + 0.4960632243651838, + 0.0028246048058131468, + 0.23120266878636297, + 0.34869256731855436, + 0.9158968009013373, + 0.4745168442433033, + 0.47106772833766475, + 0.15056332356432933, + 0.9207489548928733, + 0.05655688357918853, + 0.5744539184349109, + 0.30885069322495196, + 0.9863959952464284, + 0.7269563633639569, + 0.3754745222006257, + 0.787881179725703, + 0.42021503058434306, + 0.6514215154032429, + 0.6141967510191293, + 0.10839159692699929, + 0.4384525209987299, + 0.8418096703331557, + 0.09277368067405634, + 0.4378697360013303, + 0.08933331980231651, + 0.5988944537735662, + 0.6126703393285171, + 0.3825944151917511, + 0.8579613387661763, + 0.18402623113654548, + 0.8868153174153982, + 0.49208740489696723, + 0.6923896603230232, + 0.6803455462291529, + 0.3037063016949344, + 0.37416593433648515, + 0.5265199448379074, + 0.4574962331503959, + 0.11050931708758516, + 0.8591156144008775, + 0.4832292917120099, + 0.9048793860270714, + 0.40521711810734307, + 0.13824219174802266, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17120841579440182, + 0.24676409317301617, + 0.9536150451449644, + 0.24754363198257268, + 0.7240785599233878, + 0.9383850288709271, + 0.9904079475485486, + 0.3336339705602116, + 0.7218300891499372, + 0.822601033805411, + 0.8036271862155637, + 0.5510284103303851, + 0.7318691639972379, + 0.2239611653225425, + 0.9454461048469047, + 0.43681834186533397, + 0.9814076029319321, + 0.3354075766104766, + 0.0695887234362278, + 0.1261863771743933, + 0.7570452557227183, + 0.7137980549614695, + 0.9250061471475929, + 0.922537120356813, + 0.8520119657559687, + 0.5638289750586138, + 0.05075907918137357, + 0.03211268765191422, + 0.3206632152155907, + 0.01564639261998857, + 0.8819780661714863, + 0.8692538442562803, + 0.5551154847619847, + 0.571417552799163, + 0.02608483547574192, + 0.7314471168807961, + 0.6013084103181621, + 0.5955416255379431, + 0.28504239520266494, + 0.46093894399498114, + 0.3707218789514428, + 0.03754648151316775, + 0.6954794891774735, + 0.2988625100787702, + 0.7536018978621167, + 0.615168393428636, + 0.30915939329329956, + 0.43158863314991236, + 0.9173017491898853, + 0.07769462199392196, + 0.1489182022720522, + 0.8110850180524334, + 0.5681616271802253, + 0.5520746535140655, + 0.29093271089585526, + 0.09928204197631119, + 0.1148853503802364, + 0.09800077183308087, + 0.11927063162505802, + 0.5580381384843661, + 0.24531749042414175, + 0.08483244025053827, + 0.007665503601882118, + 0.09548041944429964, + 0.1344367542682564, + 0.2247221057841431, + 0.8188285363764694, + 0.9704421387726528, + 0.9026940823656561, + 0.9207688505694681, + 0.8618449716250456, + 0.26308236733193624, + 0.4407433063464362, + 0.8588179254512341, + 0.019774309130496492, + 0.8548152965642551, + 0.20503959714688125, + 0.39011732698405344, + 0.2933141398479888, + 0.7927036104730493, + 0.3959795782509259, + 0.5622612391668614, + 0.5304962796200833, + 0.13260084780482972, + 0.35727714901271534, + 0.5990466564146282, + 0.818232545165206, + 0.23446391750979156, + 0.7735989065236966, + 0.40742460909475664, + 0.15528872006743177, + 0.6101948804292825, + 0.4026922867735627, + 0.04097588737924973, + 0.8749579356257668, + 0.6115414955966363, + 0.19978057387940917, + 0.7436186401601469, + 0.6232112266277363, + 0.49767739449206605, + 0.849442271764126, + 0.23025025663812915, + 0.36049319993342555, + 0.7048648588208937, + 0.13794744673886972, + 0.6110374641577164, + 0.9128949699826016, + 0.3852016626508601, + 0.6364470204499091, + 0.2147475319145944, + 0.6623650914360045, + 0.19177177950154034, + 0.5706994113812855, + 0.902722478502479, + 0.4895440425887444, + 0.4878096368906051, + 0.33296705592292763, + 0.47418710646330475, + 0.2765980640037464, + 0.35352377242553856, + 0.4074685407237545, + 0.2888762398589546, + 0.5608173312227089, + 0.2520999804714785, + 0.1593408222834406, + 0.42555196411278373, + 0.5764194562940069, + 0.28052118140355986, + 0.43616496265596105, + 0.2186860749737276, + 0.23880618995087355, + 0.7764042173362877, + 0.5676124594920723, + 0.4992654627998311, + 0.4197287035227817, + 0.5167178904894868, + 0.011546974510698194, + 0.38607626685978047, + 0.6518200424841971, + 0.7119080852307973, + 0.24458207906468254, + 0.9468980372448998, + 0.35781621241780504, + 0.58499139075177, + 0.22151912979593635, + 0.9877094584925603, + 0.7889940006810512, + 0.4791843021003044, + 0.11417823562383311, + 0.021032121111034474, + 0.8306184528273366, + 0.7897481242350088, + 0.7108045687053027, + 0.714708236064929, + 0.6112066055221825, + 0.8523059012957204, + 0.19611407476860865, + 0.6746891242818407, + 0.03250818007567924, + 0.582986780449661, + 0.49377832061532834, + 0.625735250081112, + 0.40665768450540174, + 0.9788362114708161, + 0.8796968875308481, + 0.6526301414388473, + 0.9949665306252902, + 0.8850138215960075, + 0.014763874241696229, + 0.3047383564874385, + 0.7323801755395655, + 0.6895588057137034, + 0.36068534617339865, + 0.7395443995027574, + 0.7286890321020372, + 0.9237908156937465, + 0.8488414832309223, + 0.4572072966709049, + 0.3352195753342281, + 0.6898573579857727, + 0.2972409764312337, + 0.6600822788051341, + 0.7191590173111412, + 0.10710761533914204, + 0.19239690076060478, + 0.23499917039751883, + 0.6617196203460759, + 0.27116239353946736, + 0.3803087723840173, + 0.5054425767829615, + 0.3829623506229185, + 0.46035155013734397, + 0.02037191464805399, + 0.5659951160507525, + 0.409293758363377, + 0.6239404498635375, + 0.5358015363976425, + 0.4732171393114559, + 0.589860982512306, + 0.18946803604705786, + 0.5475395269723, + 0.5938281014096896, + 0.6567539339733838, + 0.02679934240278059, + 0.6365500133050038, + 0.1515170512735472, + 0.9216274976985206, + 0.4571067809154392, + 0.30360945157955954, + 0.823956611686349, + 0.5632009922298158, + 0.7180043873262187, + 0.8412976480351794, + 0.8600931750711821, + 0.21695635453907858, + 0.2101569785277284, + 0.31756020036638155, + 0.8018821469715728, + 0.4534779231151662, + 0.9324830833221264, + 0.07230266227363613, + 0.4284208769986033, + 0.5489875817497797, + 0.7199984295422818, + 0.15516287203321077, + 0.42971780541472393, + 0.10315889274994106, + 0.9550871126941947, + 0.5134973073303452, + 0.5050032442107331, + 0.2262301279083785, + 0.4759973183178495, + 0.9073009456383606, + 0.3616333490092911, + 0.47164103013604886, + 0.5289088129346182, + 0.675003005108912, + 0.9922538098241769, + 0.34231326089884595, + 0.925970822758673, + 0.18362061085193604, + 0.8535516792370562, + 0.5679926252045895, + 0.05964404645455801, + 0.4127886609804259, + 0.6497879508246858, + 0.44140532487451567, + 0.8804845963062241, + 0.7993988428628335, + 0.3670814624164852, + 0.8962522595916385, + 0.45180535449961234, + 0.7083697174259761, + 0.6954699211462542, + 0.8653394665212267, + 0.7614027282578862, + 0.35363249983392486, + 0.9069144222713916, + 0.6119322638951157, + 0.640562112236525, + 0.5246978941866212, + 0.5733935619533868, + 0.03206077695045906, + 0.01990837637122178, + 0.46123162085806113, + 0.6721719863283541, + 0.38120544314893356, + 0.0285868917052019, + 0.6189747999809762, + 0.6422258554477175, + 0.9943813587655478, + 0.23105835243541117, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3111012602083213, + 0.6598837277308733, + 0.8044245582700752, + 0.4606721900167673, + 0.9408737943208687, + 0.47350179636159306, + 0.03550029963290502, + 0.8021121176970554, + 0.38614496242342633, + 0.4469683001113818, + 0.33248902670941627, + 0.3723289401247849, + 0.5872592657410375, + 0.6857498932874878, + 0.0005544396012938657, + 0.40137429221874177, + 0.40260618408882554, + 0.8898081274354233, + 0.8097383632465782, + 0.06091987428217416, + 0.5399616600449335, + 0.3045216529564212, + 0.27073100604147626, + 0.04169389103344068, + 0.40700338285058846, + 0.2727673702796928, + 0.27917012326359547, + 0.554257265934591, + 0.22839932874366842, + 0.3226998675961492, + 0.6839395473383374, + 0.4792099190040625, + 0.17773481296521387, + 0.5243670287980848, + 0.48148138531886664, + 0.4780587112592941, + 0.4916182512866716, + 0.6120883067417713, + 0.7045313348005063, + 0.7169158360247077, + 0.4196416940209917, + 0.5140562631905672, + 0.6099656313302542, + 0.669766773875195, + 0.7746435316043357, + 0.421896498374021, + 0.06250832538222273, + 0.09227551877344364, + 0.694135267124367, + 0.6041089549577633, + 0.9322045891553753, + 0.8680415692959852, + 0.1129263390569143, + 0.9257015333325467, + 0.3160127059554184, + 0.08718520103601202, + 0.3746838165756712, + 0.391786671938142, + 0.7157034991892711, + 0.04324453367062164, + 0.31506504905359833, + 0.6404579579008204, + 0.5553286359038312, + 0.5560880023462415, + 0.1887944424218605, + 0.061088724940269956, + 0.8254461306038644, + 0.19847971790522323, + 0.7398024918244763, + 0.17513817551614608, + 0.05092505280837156, + 0.4153407892174844, + 0.09579296648497204, + 0.31651144775946993, + 0.5020612990879311, + 0.019029489908541586, + 0.6339025873724357, + 0.7477498623206709, + 0.15571888043468973, + 0.9138651825969223, + 0.8901939529401085, + 0.1578742270253738, + 0.0873050255447223, + 0.37273573423992434, + 0.5852357357645377, + 0.11362529375648744, + 0.15977908322972212, + 0.7466789426089376, + 0.4930246833981383, + 0.28056618353981855, + 0.7529453894692695, + 0.6993674579793832, + 0.6322458864887919, + 0.89765319546332, + 0.696364377199371, + 0.17591985450367564, + 0.5601680769016817, + 0.5277619249234302, + 0.02897673501529685, + 0.8262021081597541, + 0.8468437998305749, + 0.3042403932758301, + 0.6380049930044317, + 0.039159864911258446, + 0.6236013281003396, + 0.3274459233331143, + 0.5270016494576781, + 0.11545055159651485, + 0.8448880299332934, + 0.35588857689161657, + 0.9910886532410411, + 0.6701082359825398, + 0.40425893993902073, + 0.43829584348459527, + 0.11548158007973064, + 0.3066359885873061, + 0.5726871714367635, + 0.006522025987297897, + 0.8221906592313165, + 0.17638146385127207, + 0.9862951952239308, + 0.05996678309162495, + 0.9386818972170392, + 0.3042513451812904, + 0.9698978138205689, + 0.7141631816479277, + 0.8581814906898508, + 0.5263125206029599, + 0.3172253366789366, + 0.5059700125959298, + 0.2706026363671554, + 0.5521008370409946, + 0.9887094110644128, + 0.4445141491022333, + 0.03806040310382919, + 0.9538105845281649, + 0.41479023583927566, + 0.6118979335816942, + 0.4289718978976914, + 0.931642372603253, + 0.7492527219715809, + 0.33307250821193424, + 0.5953320000760146, + 0.1613353654204155, + 0.8695944550556686, + 0.583329731185928, + 0.6644302721854632, + 0.8063472246920956, + 0.913062446521641, + 0.5620976940039826, + 0.09747866884483236, + 0.3603543821900498, + 0.36527705139863653, + 0.9252405494133178, + 0.8685730450479259, + 0.5591503900888554, + 0.5173376718101484, + 0.3113374834249292, + 0.12326813056732122, + 0.16855181451904921, + 0.4941301159952066, + 0.8454669290340412, + 0.1629687812514684, + 0.8710259670292382, + 0.9342095456865167, + 0.19835802646630218, + 0.027569329714363544, + 0.9839038093322251, + 0.17274593484635548, + 0.481769245556417, + 0.9423508904927929, + 0.10652631262263978, + 0.36036090117244535, + 0.33177305576746974, + 0.5263917597266183, + 0.23497771081483432, + 0.12062272582698275, + 0.17432456966039822, + 0.33403607376064093, + 0.5095401920748936, + 0.07754944950252463, + 0.3896743717075899, + 0.9258101560031783, + 0.3576315841129001, + 0.6472207926778464, + 0.544365295209721, + 0.8761310051370164, + 0.9573648788529169, + 0.48805794749519604, + 0.8141282194617936, + 0.4314584744791835, + 0.7623662326579758, + 0.20885645611185755, + 0.4726918839808477, + 0.34762191471018733, + 0.7986167858838316, + 0.4443004295045597, + 0.6929273414953148, + 0.3217919500963162, + 0.3733472673422079, + 0.7718325188263199, + 0.0489769186608936, + 0.29042002312156945, + 0.5164997555615614, + 0.1342831717809242, + 0.4376510140877625, + 0.6760205263893488, + 0.03375334847886591, + 0.32207082748055327, + 0.22123986869422485, + 0.6654988331348484, + 0.0034702802374645403, + 0.5906138559947207, + 0.33239649060349685, + 0.7468745355158487, + 0.18646045342165707, + 0.4728266709022514, + 0.4708545499391589, + 0.8435776750579944, + 0.07890356217660877, + 0.4572433834414129, + 0.9521839748237815, + 0.9805502060045745, + 0.9612302205534416, + 0.6933036021889717, + 0.17333437554409659, + 0.7460070857528022, + 0.9269269913872479, + 0.2826794284901133, + 0.3260274482123703, + 0.319328950719343, + 0.26041958721495495, + 0.058004606645780776, + 0.798263508700279, + 0.4415854175435997, + 0.9072351602315323, + 0.08109303897767906, + 0.41217768775258523, + 0.7035636148980582, + 0.9594597891911049, + 0.7601945732076757, + 0.4728946140815581, + 0.7335881364552677, + 0.6388704889727459, + 0.15233585178738585, + 0.9927516506451334, + 0.2737124355041701, + 0.9413924600940591, + 0.5922518768547997, + 0.9997782794709145, + 0.3280453429268567, + 0.07604347769608344, + 0.6316112686161911, + 0.028239838981191756, + 0.5815304378349276, + 0.17657763720871733, + 0.044697392809522896, + 0.6746240565268825, + 0.7836905967023933, + 0.5483650372238402, + 0.9120893867917101, + 0.5543415576823114, + 0.26588339010875306, + 0.846614144738822, + 0.3353983799725999, + 0.5266610728808607, + 0.32805008435501715, + 0.7003900652644465, + 0.1928349821289902, + 0.6883659723332324, + 0.5233485245426599, + 0.343891035523577, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2108568990053281, + 0.8130884183622571, + 0.2979102609086396, + 0.15606753675477536, + 0.7752651025076657, + 0.1878326123802151, + 0.5748531044001729, + 0.5940203198372817, + 0.5690574636850873, + 0.6656576506377743, + 0.8335823741701612, + 0.8801002863569317, + 0.46453609235193016, + 0.7309278059084576, + 0.7210266310300115, + 0.9031317162919433, + 0.3364838531266461, + 0.3372389590811845, + 0.20242075139504323, + 0.13627177422745584, + 0.4365061658395254, + 0.12368498617336277, + 0.9881684742546766, + 0.2313175487855773, + 0.07104922123194513, + 0.8232047191609424, + 0.2520012203321753, + 0.6107489839813416, + 0.02909398869545532, + 0.41757380393516597, + 0.4553199856685639, + 0.5209286507313109, + 0.11116497983504847, + 0.8801979013903768, + 0.6308040496182686, + 0.48756484632735764, + 0.4321089498057512, + 0.5366724972603243, + 0.5698486454313082, + 0.4454232374969589, + 0.22158359078917667, + 0.7898464114044067, + 0.9366182058978678, + 0.7759858368909576, + 0.8231118518743965, + 0.5600749778204993, + 0.45034256302789444, + 0.35503377813850834, + 0.8833894434336861, + 0.042984279427440764, + 0.9270879507334671, + 0.4582096265522544, + 0.2630724753590308, + 0.4015713567566792, + 0.9592638683873683, + 0.9572113494446665, + 0.2779275763216398, + 0.800213043207197, + 0.4258744687145989, + 0.9758879795439324, + 0.3158338658241946, + 0.3447772771491663, + 0.9746418545233345, + 0.5127947919915399, + 0.4600334563063194, + 0.7097337090682005, + 0.5193592698834862, + 0.08831411721637483, + 0.6410293801355194, + 0.478066524133525, + 0.12705650699996474, + 0.23868397496750438, + 0.08544569817220382, + 0.07194530529495269, + 0.1996009431050395, + 0.026242611743799604, + 0.015614986936861919, + 0.7080571519865836, + 0.1559101207769158, + 0.314034130582058, + 0.958019660901518, + 0.0814756957093864, + 0.09518922592288293, + 0.7632283101577293, + 0.5652713210043107, + 0.656482385220244, + 0.1821414994404741, + 0.9240312046556932, + 0.3223622692128285, + 0.09976295787416634, + 0.694581010115562, + 0.8109795801219014, + 0.12559306626443711, + 0.1862477305996323, + 0.4710974856960465, + 0.6799716994923054, + 0.9707937123977975, + 0.09154636360078883, + 0.184435600244499, + 0.02797741131713738, + 0.9398366430432499, + 0.08128451465557396, + 0.3404105387119726, + 0.045157510546772084, + 0.6085360064683598, + 0.9726500093164777, + 0.7550021193863347, + 0.142615405116852, + 0.07350972816646983, + 0.3684919426938241, + 0.6748864174739727, + 0.12003429826155698, + 0.3963534066253034, + 0.22527340154919095, + 0.39342780802849675, + 0.029511777050217902, + 0.21958050038632437, + 0.4148539849835965, + 0.28741899750722677, + 0.12026425575659783, + 0.2275507094394459, + 0.6172403673579926, + 0.09107391429704748, + 0.34100339465861207, + 0.7262801950906309, + 0.4078485614096443, + 0.8189753374030909, + 0.475973424469875, + 0.6435509378707076, + 0.8839499357077213, + 0.4463700453590007, + 0.7607992497077846, + 0.45247477547826354, + 0.46830393917091984, + 0.8248769541178915, + 0.06320300150779501, + 0.39370086150224026, + 0.32877090352709437, + 0.5411736112007386, + 0.8362647124554538, + 0.8214383831452806, + 0.9992788103760902, + 0.9700832504990944, + 0.029878397812445723, + 0.6841863126530163, + 0.9451737260947497, + 0.7687300247608775, + 0.4836044251840753, + 0.4015590918589206, + 0.48206962292530675, + 0.9501705620084879, + 0.43913115040206707, + 0.9893669704468705, + 0.1787383519554343, + 0.7593625966063166, + 0.7913231043578712, + 0.9141209729516928, + 0.0751126345361981, + 0.9378619415351117, + 0.6173847610379402, + 0.36245169039605396, + 0.07343567079567803, + 0.08593988677945608, + 0.6522222003611574, + 0.38331125211299344, + 0.2514376495449505, + 0.7462544756466176, + 0.7441211348712762, + 0.5381972925676857, + 0.14272285748915792, + 0.7550159639326586, + 0.6255897481619649, + 0.4016534611332959, + 0.6876053258257588, + 0.833357598630905, + 0.8251970165155197, + 0.37368579647818745, + 0.23613221737731493, + 0.1731180592617615, + 0.17085040117637085, + 0.45265698803227483, + 0.7037771193238919, + 0.30387021154632554, + 0.33642392601573956, + 0.5208126143641184, + 0.7217488225808556, + 0.7195045155415846, + 0.8350000536947025, + 0.572991105776365, + 0.2034030002535041, + 0.5370258527629953, + 0.8469731253688977, + 0.5354520776982417, + 0.9815290846929139, + 0.1806928628089064, + 0.9021095268566008, + 0.1824777450505426, + 0.2049599520138644, + 0.7316891181443752, + 0.4226785113938254, + 0.21282962530474703, + 0.7362919863903333, + 0.36194505140217215, + 0.13194363010991073, + 0.6361553043914702, + 0.6248411361407403, + 0.44527648107489015, + 0.29828458974382177, + 0.3849654845238749, + 0.690482753236433, + 0.6049544192717783, + 0.8821334437464305, + 0.5395044469777021, + 0.9420549010801207, + 0.5467577202065238, + 0.5263586841009785, + 0.17940411248896482, + 0.43304795625468573, + 0.8651320422128966, + 0.8855679365824665, + 0.7210276396962081, + 0.24531078815802299, + 0.8092866880793879, + 0.038674129987527395, + 0.46892815849390534, + 0.2759794348497405, + 0.10684971414479749, + 0.6079746774750799, + 0.15241090515727973, + 0.32802440380973774, + 0.8234489468009617, + 0.4372834941523561, + 0.36826508650075673, + 0.7124964227990281, + 0.417142640384067, + 0.7496880659014675, + 0.28316629596911624, + 0.8854601917895091, + 0.08446757247159808, + 0.6894306836697651, + 0.23618297415897105, + 0.4170426322035621, + 0.413820958232727, + 0.21092625798587494, + 0.869737588686086, + 0.1168765737873787, + 0.24052718062954348, + 0.06996075838814153, + 0.016104271412145765, + 0.009694054497233928, + 0.7001381695568094, + 0.1974426158414404, + 0.9286396438149648, + 0.45957304568000146, + 0.6533677683246052, + 0.8022877763461331, + 0.10342694193424029, + 0.49404998471487127, + 0.8031307423689796, + 0.825916723970538, + 0.5997374570749519, + 0.6992622579437291, + 0.6841759063083684, + 0.4557765640424808, + 0.10807769443396598, + 0.3767839449361807, + 0.9668663378801738, + 0.8815449332315608, + 0.2396905791925743, + 0.15932971124280404, + 0.5709920481858723, + 0.8973748137717328, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.492813912885392, + 0.16882656155304765, + 0.2801474548420625, + 0.9747082071259978, + 0.6938786814292451, + 0.012585292018807648, + 0.4286130784144355, + 0.5141013083273842, + 0.47183605415452234, + 0.26630784526857365, + 0.2356682034874814, + 0.1644148700165221, + 0.7479847297205104, + 0.38789177968838173, + 0.6662843687119275, + 0.5766856057261079, + 0.42836291340104327, + 0.9785787946765795, + 0.8802332212266368, + 0.7174845879037445, + 0.5390875927552676, + 0.6671757942343677, + 0.8464288874189271, + 0.9266648759519832, + 0.48372259475122814, + 0.9346027753135807, + 0.9020039580867859, + 0.726223076382968, + 0.2656075933607587, + 0.0008073145459028819, + 0.32012453776257244, + 0.27276209170071464, + 0.08388471898169403, + 0.3455452129171409, + 0.3983498675776338, + 0.16115885380524853, + 0.17121007923702924, + 0.9238135456822758, + 0.002003939753644879, + 0.19953648296076, + 0.2219900256462357, + 0.38011699449751846, + 0.8363943726634937, + 0.983648127721609, + 0.4867975298334347, + 0.44979767898714806, + 0.3842878917402419, + 0.5125491665122981, + 0.9443346878887307, + 0.49104873953578954, + 0.4000824583064799, + 0.4995576112155681, + 0.9348229216473823, + 0.3390631822151424, + 0.26505658560426537, + 0.9895438423213186, + 0.11808993549145452, + 0.8648343180496709, + 0.23376089325780725, + 0.14978408092510787, + 0.18273049473673608, + 0.7961270911985423, + 0.45150510115664044, + 0.7659760339624151, + 0.5280640854808734, + 0.2073340620595716, + 0.5952104622986186, + 0.8906353143652719, + 0.6618639075280947, + 0.42198702169509017, + 0.8085584603221163, + 0.03254451880979237, + 0.54952656795131, + 0.13696978863016307, + 0.7152059978606942, + 0.0627806211257308, + 0.9771306149446537, + 0.37204307676191306, + 0.017029972173568608, + 0.2608915153611455, + 0.220807779759849, + 0.30383790045962555, + 0.6633346421171146, + 0.6173222623180591, + 0.9062259888354912, + 0.8998696087333117, + 0.2521616143418479, + 0.8063916181716487, + 0.044274160853165045, + 0.48260867475004554, + 0.8118494908235618, + 0.6435646051031143, + 0.32390327309999245, + 0.8805650817441983, + 0.5567832376578825, + 0.23737775852564869, + 0.100285898322934, + 0.8592208182446371, + 0.47211041129385045, + 0.1740617558512425, + 0.389642200487166, + 0.8574562396166349, + 0.6037402270698863, + 0.6092400516555709, + 0.13178543622219463, + 0.36897387015675365, + 0.522858439197307, + 0.7068845959118953, + 0.9364207202505184, + 0.6797124765132581, + 0.17592715514802293, + 0.4223940470468984, + 0.15539988881750266, + 0.1614648036783346, + 0.20850155808931647, + 0.905106955267205, + 0.6781213286269252, + 0.0069245931950348005, + 0.95198283964587, + 0.08096035197485107, + 0.10223509532411146, + 0.3681036227743386, + 0.8789722467960559, + 0.22224805706308348, + 0.9954867156798857, + 0.9391605889195155, + 0.12746371278869018, + 0.14802852633152375, + 0.0853182608183699, + 0.9810050527585037, + 0.8443761891921956, + 0.2851201794415653, + 0.5399042952399655, + 0.061335986073354776, + 0.24280540851751664, + 0.4970246247239263, + 0.9478108326140662, + 0.7258879755754656, + 0.14432628756419108, + 0.7025379059773825, + 0.4138903752062618, + 0.7853430507915093, + 0.7823002107182094, + 0.786343036995969, + 0.7012294431180773, + 0.8315379475578711, + 0.8713750674688749, + 0.17066866095976363, + 0.4143368691096895, + 0.17954298805181967, + 0.5237624049551446, + 0.4977056165126006, + 0.9813177822011769, + 0.12368626375567482, + 0.6831055023883971, + 0.7671733937326713, + 0.26602582492560944, + 0.40205573121352856, + 0.07938684264490936, + 0.11967164780855066, + 0.9898740414608184, + 0.9166092655465123, + 0.49066173812254776, + 0.41643556211146604, + 0.16066443230022687, + 0.5993052769866493, + 0.6371201131747636, + 0.5644402318460526, + 0.38296405375145703, + 0.9632208582185809, + 0.7292560447209321, + 0.2422455397525054, + 0.40740484055113513, + 0.18465490756077885, + 0.2797387200581921, + 0.7861337723070215, + 0.5241850393949182, + 0.5581198341186189, + 0.10731210127948998, + 0.4555789707332867, + 0.893678242874481, + 0.2573906938056304, + 0.1851188003035632, + 0.05613790997615564, + 0.8775096765310761, + 0.6006673274179652, + 0.11948393555196923, + 0.23900807248025624, + 0.6980370072228764, + 0.9453616601757405, + 0.5282546844623429, + 0.43752732063885147, + 0.37707693171221324, + 0.6699502384969421, + 0.08158507934625181, + 0.9414116689067613, + 0.200092780098038, + 0.47394783343728475, + 0.9985351294686939, + 0.04856859926822765, + 0.3614757631552219, + 0.6805249467072081, + 0.7385205102413724, + 0.7851043245271349, + 0.6091663927955362, + 0.5264730740092183, + 0.9142030702747967, + 0.6768604572094098, + 0.5050074229833029, + 0.38587182031740364, + 0.3182176014915262, + 0.718355781012543, + 0.782133463957693, + 0.5711465941254845, + 0.43341924721023495, + 0.30389832264220895, + 0.21142710215953953, + 0.2641383172820556, + 0.7426624646754313, + 0.9815784096070599, + 0.36317671281129504, + 0.7043416308969769, + 0.8667042897168531, + 0.18523004129106302, + 0.3997088170199421, + 0.5218472658141504, + 0.07122834882910778, + 0.5510327662592338, + 0.7030453425489953, + 0.40364621360491193, + 0.2975043024619203, + 0.947430126911431, + 0.40483603273066926, + 0.1849335093823019, + 0.47034011087882843, + 0.5769621451479601, + 0.23178761512677915, + 0.7532247997132729, + 0.12414606504559378, + 0.20414845785871094, + 0.32308222946529697, + 0.2643482802688778, + 0.37011257768537587, + 0.6635670678822588, + 0.2907309643846363, + 0.25533358268866047, + 0.5947376655456239, + 0.8529307005508067, + 0.3713334553910622, + 0.268269929364613, + 0.4934353559919167, + 0.30375770538268587, + 0.5916311699782492, + 0.8084522419324485, + 0.7496928489555684, + 0.5614546034547964, + 0.8391527877967326, + 0.8839022498546698, + 0.23681376365818507, + 0.9369116858322922, + 0.5187995753517216, + 0.2938105452548483, + 0.5937161150633878, + 0.16126708154990366, + 0.6066448483664519, + 0.7525295228332822, + 0.019385693875372256, + 0.053292699812170996, + 0.5222937301706758, + 0.4214988179069832, + 0.5975657442726912, + 0.9549593610810609, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6442961972740163, + 0.1764065658110866, + 0.3820534973069034, + 0.13312964973882813, + 0.8412849414939327, + 0.8341205461615239, + 0.9166528513943647, + 0.024710941581924772, + 0.7996737252717977, + 0.9484165615250868, + 0.49510248651461897, + 0.9576186243026893, + 0.7743262564982436, + 0.6412557129008742, + 0.44270703115635646, + 0.9805983407951738, + 0.4631464430375354, + 0.46307556705195085, + 0.06484413407338108, + 0.5089979163603118, + 0.1746944346828121, + 0.5076774552245807, + 0.011996949145422708, + 0.6624638318475884, + 0.947529023756998, + 0.1789127429187054, + 0.1160156279340131, + 0.6067115217790777, + 0.8811359304488259, + 0.9183842985462575, + 0.8182457128694718, + 0.5746533318307063, + 0.05009411873654168, + 0.4406449735157394, + 0.551840499307867, + 0.960133168625349, + 0.5660435675608625, + 0.1789304462515391, + 0.3575166338930088, + 0.8536301976490064, + 0.4212535595619563, + 0.6746917127146561, + 0.9185115659449252, + 0.2527127048251988, + 0.7882151846722641, + 0.15844810911134333, + 0.8699212612034424, + 0.3178481938972292, + 0.5030674592619186, + 0.059637880370541074, + 0.022990905350868673, + 0.959198122582794, + 0.23845110545254367, + 0.845244509611847, + 0.4039539728113667, + 0.1873065783833996, + 0.06585920967432546, + 0.5456106202130359, + 0.3014803867868435, + 0.7457747934813597, + 0.15745329975285205, + 0.4229941855249255, + 0.9739326404506112, + 0.10285927134768647, + 0.2743202314238007, + 0.5527294188792345, + 0.5189039118565674, + 0.818366012123155, + 0.6384567175385518, + 0.44853357944386285, + 0.03605242057520419, + 0.9634708611467906, + 0.7240230287880906, + 0.6357428566154322, + 0.617911280565307, + 0.7613945393112975, + 0.4917722741095677, + 0.46631574234026296, + 0.9733543008593656, + 0.04034334305071574, + 0.4438361539195024, + 0.09425152922270486, + 0.8405980043269394, + 0.9740932057522111, + 0.7255390883382695, + 0.4447206353044483, + 0.7607555717360293, + 0.6109180570888076, + 0.11971360870324632, + 0.5850881474635748, + 0.40566051837253236, + 0.3197191445507993, + 0.3825232454894061, + 0.00621939507882463, + 0.02726957728766155, + 0.35185204285537897, + 0.9977365103670578, + 0.4864487864602529, + 0.3846280769862147, + 0.42313049554336335, + 0.07974266966580024, + 0.8164766396288451, + 0.7112645368767682, + 0.3358098441196017, + 0.13441459266166333, + 0.5669110257316874, + 0.8418444527724096, + 0.47678572581786627, + 0.6132926977635724, + 0.1950385007214145, + 0.27630544580981553, + 0.8338903697005867, + 0.3038827547616273, + 0.17550840088332098, + 0.1715330629962447, + 0.4809191891431963, + 0.7305535096558342, + 0.06526657927525559, + 0.908126605055545, + 0.3525461073448619, + 0.2757694827430912, + 0.14839504946155402, + 0.013811344218978006, + 0.5753915719540151, + 0.5458283541194745, + 0.1880793891162995, + 0.12430218271549875, + 0.47337485215991837, + 0.4659087885683486, + 0.12961710680824035, + 0.9563108415854494, + 0.7481408864601188, + 0.0132440960212582, + 0.8172675045941381, + 0.9737513438614959, + 0.6541700786625942, + 0.2556645241877844, + 0.6263435263131425, + 0.4912740568461218, + 0.47577803402806107, + 0.4396764417361617, + 0.7713957974702469, + 0.3075648700087451, + 0.9977985078200367, + 0.3496225111697312, + 0.43031709993944645, + 0.7723199422937873, + 0.017301735789535755, + 0.16469577551754844, + 0.19138556592418154, + 0.4372942928153264, + 0.2352844194906839, + 0.8461719511206709, + 0.9076996469575036, + 0.9440194005664312, + 0.9204160518325147, + 0.11253712372167302, + 0.5179417894573418, + 0.009175972206771466, + 0.8826380338875994, + 0.6124950484645235, + 0.24148233188681767, + 0.14003650594716255, + 0.5428536357062527, + 0.8419344196055067, + 0.0323631521768829, + 0.1823332191456879, + 0.09594487577946043, + 0.024895745445476014, + 0.24485456927199156, + 0.5003802252469102, + 0.38556377043476064, + 0.9944255015198569, + 0.3953955785502524, + 0.8887338132608575, + 0.6154761781434394, + 0.7233257633122523, + 0.8952159719927171, + 0.5195940538392563, + 0.8690611790260024, + 0.34661896961474836, + 0.46389409823111905, + 0.019006003304453456, + 0.5556225295809254, + 0.5780879815166985, + 0.3614606838360451, + 0.4301475622444393, + 0.3498607729187615, + 0.41332238437456326, + 0.22396846650767221, + 0.4677720599765097, + 0.9945128268846568, + 0.6946083605747225, + 0.1841036550906583, + 0.10303210560868725, + 0.5258920864696196, + 0.6996651054043843, + 0.20666046496930268, + 0.3147479107066973, + 0.17151605448400897, + 0.41460764203434974, + 0.7962971370012736, + 0.5573003675458981, + 0.7779829547087856, + 0.7642852306299943, + 0.9442464218954505, + 0.7096377848472598, + 0.5187355449323019, + 0.7789394704337231, + 0.7492365344550502, + 0.08417873734696724, + 0.10159930549807805, + 0.6621177198391502, + 0.7922791278723976, + 0.8516564696700352, + 0.404219761336573, + 0.4115867806130522, + 0.23618765974799005, + 0.9283505880881653, + 0.7217386673623036, + 0.947011187251023, + 0.9427456511753987, + 0.25514781837772915, + 0.9954741538769635, + 0.46178049371317786, + 0.924213144892689, + 0.3817836996299664, + 0.46927976925156, + 0.4192629280369885, + 0.35964406815188354, + 0.18421314821010748, + 0.7959004922233122, + 0.01725025908093314, + 0.7243107750010274, + 0.6498108571818509, + 0.8108887589535382, + 0.6584280707041268, + 0.478645460257597, + 0.7349367527895383, + 0.051867397643591606, + 0.24418762833017915, + 0.6854244841619022, + 0.5090147988631436, + 0.8476122996248373, + 0.755457134146355, + 0.7194674814307954, + 0.1086110445346784, + 0.6628454167968892, + 0.3719966887375674, + 0.3655889364418473, + 0.9765437956149597, + 0.573572154649218, + 0.9705955239223931, + 0.3247264555339575, + 0.8802044770314496, + 0.03258376368065685, + 0.08716326828281562, + 0.1231896654886625, + 0.9940291716379933, + 0.04834966426457343, + 0.5769018924373179, + 0.11994698491120714, + 0.5783320343627858, + 0.4886497962739613, + 0.5802713930701716, + 0.007883261144003617, + 0.06837822554482054, + 0.014850298511975835, + 0.22056074585745167, + 0.4226414397011886, + 0.679564526975614, + 0.49141104221819687, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3922617998854798, + 0.1910086842711467, + 0.46488224020181057, + 0.016071882683825534, + 0.33186510576034656, + 0.8769738105716012, + 0.4510169902196284, + 0.19374836607412904, + 0.4371704788769085, + 0.750090992133997, + 0.8757740083812433, + 0.4163593492107014, + 0.8396811846580285, + 0.03199218208780408, + 0.7685226209615665, + 0.35356918702679585, + 0.10236728003834406, + 0.9484090631319674, + 0.08214264728628962, + 0.9627225687023101, + 0.1721530585673765, + 0.4392752247295628, + 0.16121811719738177, + 0.6845493929953306, + 0.8478505827922405, + 0.658262704080168, + 0.38749684501963966, + 0.678967054038523, + 0.7664336534771249, + 0.10976367302113321, + 0.010736198574742062, + 0.13890633400886465, + 0.8265888580963386, + 0.34385217603414464, + 0.8453510643507857, + 0.055352349180295524, + 0.9988001159627045, + 0.18005492065212647, + 0.8830670187416448, + 0.11223495018555207, + 0.24183704849197063, + 0.9665796680808748, + 0.9729899977836354, + 0.6415583703587278, + 0.7454445290010497, + 0.9832721028893566, + 0.10124431371778109, + 0.07202312001767852, + 0.3224121774815898, + 0.1344870415162165, + 0.9797117889268059, + 0.14405210688930326, + 0.7385131268261163, + 0.8930837029323517, + 0.6496919403781685, + 0.8578995942564314, + 0.9668233953149054, + 0.8862702309297142, + 0.8609736645667648, + 0.5807430092405199, + 0.594683054416274, + 0.23144096553103255, + 0.5245043587726054, + 0.5818889522809639, + 0.8287424340622042, + 0.507938149476136, + 0.01977220954934067, + 0.6101861016419559, + 0.7286680269191083, + 0.9499829868999502, + 0.22093909544907653, + 0.7750005349528613, + 0.9152740638129625, + 0.20581820371274773, + 0.1680739600864617, + 0.3669995942862181, + 0.8059675965895596, + 0.17061123148787627, + 0.3042017040162207, + 0.9463835044296236, + 0.035070667149747825, + 0.48208843113546196, + 0.9637136364467382, + 0.8779056770958061, + 0.1791914178802213, + 0.430343617416845, + 0.5559081870429834, + 0.07940070206811212, + 0.19571904093210168, + 0.3920280902337029, + 0.9717517000641311, + 0.48521749000567294, + 0.1993961285932777, + 0.974937800310758, + 0.48314875152685965, + 0.6500400786927657, + 0.3652575354719756, + 0.27813541875234515, + 0.23671703482565176, + 0.15608329894262518, + 0.33960873273145564, + 0.9175364653410282, + 0.46044429349843363, + 0.7801027584700448, + 0.9355766790785968, + 0.11357980729982498, + 0.7480057773178751, + 0.8945022467786367, + 0.30104246053171047, + 0.7588624495212919, + 0.15088638952458933, + 0.9940516369812838, + 0.9267043375497335, + 0.19791944435791675, + 0.4056144585417145, + 0.07432223402143268, + 0.5483552760268539, + 0.20879072691529266, + 0.8842196578677224, + 0.9981734790961896, + 0.5465778859516064, + 0.5975259255376344, + 0.6577866749209712, + 0.42957355903273287, + 0.5501865630967797, + 0.3141678075997135, + 0.6257977366948, + 0.7438357387487614, + 0.8728869219430866, + 0.23786602820686686, + 0.14699763121945975, + 0.4132289395991454, + 0.6077696769232941, + 0.29158269203287435, + 0.1465616718963445, + 0.09401327085732325, + 0.7933604748121533, + 0.639606862462329, + 0.3481223764093462, + 0.42634731483149446, + 0.34956514814140416, + 0.6558336608388925, + 0.051275753149904735, + 0.4441967202737509, + 0.08204271765010362, + 0.6903300123835496, + 0.6937513264579159, + 0.024997189316685375, + 0.7948503933753279, + 0.5707799218976682, + 0.0757257527786177, + 0.1121342349702219, + 0.511221775098869, + 0.8586265768729285, + 0.20281366317848648, + 0.2615944739282523, + 0.32513960801078645, + 0.45514767882151164, + 0.1121297542502595, + 0.3968813868165332, + 0.1492255884234367, + 0.5961894060958159, + 0.4207670579418772, + 0.15524098270391595, + 0.8774733671820405, + 0.9783287496639033, + 0.7528756877946456, + 0.2600149014638874, + 0.49923463664953527, + 0.32968624769085, + 0.34560093168169415, + 0.9347327032185544, + 0.9926430128488082, + 0.8920901974702699, + 0.7698788603273385, + 0.8057406054378565, + 0.4170688515544877, + 0.23249911075257257, + 0.9067321563098454, + 0.1950971749529864, + 0.18258012906058707, + 0.5311466209625976, + 0.003084178328123688, + 0.6569416484785999, + 0.8532207938684989, + 0.42531727941624065, + 0.6732149101529429, + 0.4032350825257963, + 0.33594598212290083, + 0.8580822574896303, + 0.036763797151347144, + 0.8994298297473973, + 0.771956724388477, + 0.8138919594830557, + 0.8212233378681554, + 0.7309472381294072, + 0.007296422973581018, + 0.7488417315106284, + 0.4691845598153699, + 0.05991623228278431, + 0.4679108016527036, + 0.378062526232407, + 0.34713120650625906, + 0.5499019535617281, + 0.6401433234691679, + 0.7515705744441694, + 0.41758280073753395, + 0.30088197885039514, + 0.044597731967288334, + 0.3799499388086798, + 0.1057519884112128, + 0.23927118600741037, + 0.7585564189859887, + 0.8938172203110816, + 0.31381539717235307, + 0.4984423392458157, + 0.5067772388798092, + 0.6959350290099302, + 0.12761583765159645, + 0.6428267726285616, + 0.1469284059864766, + 0.577880521408325, + 0.6634871532877683, + 0.20417201544831365, + 0.7336991267302188, + 0.6819955322248713, + 0.34408552508050194, + 0.7946732252598024, + 0.34358668093841116, + 0.5706410692793182, + 0.6058485324201227, + 0.8232624720939001, + 0.2704048239326218, + 0.4910887453927587, + 0.48130888230598246, + 0.2427520631004768, + 0.9890691024614449, + 0.28879451299186887, + 0.6278566901741018, + 0.42211518431276895, + 0.9766499750753773, + 0.36928830941636503, + 0.9183997698385484, + 0.35563131604668985, + 0.7721745828024551, + 0.08074443608853843, + 0.19582450893471326, + 0.5869729660297904, + 0.5869214255077925, + 0.963798098897601, + 0.6464280402009749, + 0.3529368554982325, + 0.8884602325724144, + 0.5950573103468585, + 0.2730580685204371, + 0.6181918795148956, + 0.12231095605609588, + 0.6468658337445161, + 0.22192533997190822, + 0.12565766817806967, + 0.5197165392956121, + 0.5256864530162101, + 0.11144612491270367, + 0.33510763771240815, + 0.0862281846497085, + 0.6205177405438455, + 0.7896399306253052, + 0.4075451710656085, + 0.8051917742497957, + 0.11336145536164366, + 0.13744393941299682, + 0.6667015370824502, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.04208730433509089, + 0.48719313860545177, + 0.07912869529247013, + 0.9726968567339525, + 0.9157670915387612, + 0.28115419643965434, + 0.7355476225335006, + 0.5354755238359237, + 0.19496873228265454, + 0.0014607562350618108, + 0.3506712010307328, + 0.9992531702062878, + 0.9528681627023161, + 0.46611420429879713, + 0.9085865974550534, + 0.3915995572390577, + 0.08827753761005885, + 0.09402611096888736, + 0.2892724207368875, + 0.04919603874684064, + 0.35321306732016455, + 0.4131877565470071, + 0.686370260569422, + 0.4235178921015007, + 0.30629488210624856, + 0.9743592357328642, + 0.8492326600455272, + 0.3962081961552827, + 0.37894423281499257, + 0.7855880046516998, + 0.7274610310156122, + 0.2573210607901675, + 0.410411604408502, + 0.5076152442754007, + 0.8505732172593448, + 0.5872325753706338, + 0.8964499998251938, + 0.4688113116014013, + 0.8619340829976754, + 0.6145050176360808, + 0.5910764087486028, + 0.7582699007077365, + 0.1263665495056563, + 0.5257109401814164, + 0.4493634272067669, + 0.3170049821664035, + 0.23637235206387242, + 0.15878174849393378, + 0.6386629826025146, + 0.6864073255257639, + 0.046603710954399746, + 0.9876345088852388, + 0.012325671564315188, + 0.5909361664042784, + 0.7131704036314269, + 0.22528745791283433, + 0.2306523802949394, + 0.2695979134294637, + 0.2558785219735803, + 0.40822371093087884, + 0.3019759058567133, + 0.16063248013696507, + 0.35647914417762827, + 0.574702660971639, + 0.15746971328424042, + 0.42474377657735574, + 0.9245550241642134, + 0.3806365155194553, + 0.6035352980788345, + 0.04562057132542663, + 0.18721377643411574, + 0.9905350305147922, + 0.6819352587948346, + 0.5410605083881921, + 0.18186591832575494, + 0.7662508446378883, + 0.10641775735741177, + 0.9333640266540668, + 0.542346588817079, + 0.3247561325091265, + 0.9717958876534641, + 0.80840150705639, + 0.21958776843823102, + 0.6193685031915793, + 0.43399689747143766, + 0.09872678455409933, + 0.7468306648222953, + 0.32984246825773755, + 0.9511523647359975, + 0.5377612638885569, + 0.44618806495961305, + 0.4003884229085751, + 0.8596880742042701, + 0.5968269344699825, + 0.10296370875360383, + 0.4758444799476046, + 0.31479949920723715, + 0.4760823614461068, + 0.024194292791385963, + 0.7420143070225116, + 0.4446055039261313, + 0.07185295134099112, + 0.10404537667470015, + 0.9593429312184408, + 0.10661809566538027, + 0.9543799601257471, + 0.15025985375846052, + 0.028659316569985882, + 0.6298303242014589, + 0.8189250761503922, + 0.26231553103843963, + 0.8246202565371312, + 0.873893772619868, + 0.23672039478552742, + 0.797363165895259, + 0.562202618245672, + 0.5097121584670914, + 0.22012386060423395, + 0.427741847515952, + 0.29059723832981676, + 0.6912229538931315, + 0.8834152173981732, + 0.44451373444644493, + 0.036609769762717126, + 0.16138159744750147, + 0.4372751643058581, + 0.8132512797619142, + 0.0024767798928380103, + 0.2595286121289888, + 0.29160858048524885, + 0.4528316921428639, + 0.8023095274326023, + 0.26282867980560454, + 0.9753584541931773, + 0.14423635170876226, + 0.06224657484427909, + 0.9738645328742929, + 0.8558592032759562, + 0.14244275517156624, + 0.21683967300735585, + 0.6829116332585113, + 0.3343576114109177, + 0.03160394623479401, + 0.6725517274146398, + 0.5461431835754313, + 0.05878656285326378, + 0.2959211584765945, + 0.9967487643007776, + 0.2104064159448541, + 0.9768690972175536, + 0.32966528606588774, + 0.7977772973726529, + 0.9313744835688442, + 0.6062580307791405, + 0.3559221752992082, + 0.27792991861947103, + 0.03678713836250014, + 0.5166046188614271, + 0.9362819528459664, + 0.33382903536261166, + 0.6058791341647569, + 0.4950043921201229, + 0.36632051349153216, + 0.27898967652436246, + 0.38303704745723155, + 0.27748788865199225, + 0.14377660097161893, + 0.29855962341757925, + 0.5928784586859759, + 0.4163638704062511, + 0.20514420449026094, + 0.4346122771506937, + 0.4590853646362283, + 0.13521389331096978, + 0.49694180530062304, + 0.6123887285582649, + 0.5093850426700087, + 0.32648155215582086, + 0.7929667163493694, + 0.7664835509099146, + 0.39676654757487173, + 0.042176465806694874, + 0.31479036407726335, + 0.2320284226087671, + 0.6414767825989253, + 0.7874310238758198, + 0.520257318083047, + 0.7339820616943736, + 0.1376755638156525, + 0.7319332576456725, + 0.6652745217638399, + 0.7551895232268804, + 0.13104274843830965, + 0.6012357522377618, + 0.44521063267568084, + 0.9686178594829911, + 0.9742229688886102, + 0.3449947184616311, + 0.36068704724699974, + 0.9672702374808932, + 0.22059265816843376, + 0.2064766994863031, + 0.5193802956969984, + 0.7615273095795183, + 0.9234659885666138, + 0.9582470381904129, + 0.4792290544190678, + 0.0027077614617521117, + 0.9011284579053315, + 0.4618708271641596, + 0.3280869010730112, + 0.5803647824645309, + 0.34993956600089837, + 0.7731477599561548, + 0.6704005066574157, + 0.5791196885058808, + 0.04065047156083146, + 0.21234013905236793, + 0.9124786225470994, + 0.6021771843103301, + 0.6776490697581575, + 0.3955084414156508, + 0.28144361258422446, + 0.5518928218581024, + 0.150470781259205, + 0.9570950973825609, + 0.5871645220452025, + 0.5031926160684639, + 0.7176936382710163, + 0.576041010988514, + 0.29192119641915015, + 0.38150112973074735, + 0.5443291403218877, + 0.5321646176337375, + 0.9954799109273688, + 0.5997816710629622, + 0.06293281774793091, + 0.6732825379248611, + 0.2502793215029042, + 0.7731069195152767, + 0.26484515335018355, + 0.4930252171429771, + 0.7158326330683904, + 0.00764514433026986, + 0.2979929201955822, + 0.6864502085998221, + 0.9499255033265293, + 0.41361766102309305, + 0.9980170139190858, + 0.3797017267342948, + 0.4054744543314709, + 0.028431638908734014, + 0.593402981266393, + 0.8315467114668069, + 0.6509973306543374, + 0.35762694888935787, + 0.40718331022995524, + 0.751274964848956, + 0.7735598456854874, + 0.45186742079696307, + 0.4725155828147207, + 0.10091815677278959, + 0.6014134099015941, + 0.5566981436336532, + 0.6772510398983849, + 0.679871115170112, + 0.7930200796044709, + 0.9922597856583588, + 0.2400406445818748, + 0.5755872322168831, + 0.8623686485016602, + 0.261957331822137, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8962070864550179, + 0.44287051237394937, + 0.46956371532470953, + 0.363378905450481, + 0.6531886111801544, + 0.492272151464373, + 0.21814460152768378, + 0.7727237385452939, + 0.9577843033845876, + 0.07014194111281069, + 0.2509808109819922, + 0.9072766937825038, + 0.9099871039457682, + 0.894278089343396, + 0.5211763824702424, + 0.6150928033017015, + 0.5607092208561264, + 0.4860821415211599, + 0.41240675581615427, + 0.9658948576059825, + 0.80863309768153, + 0.13953103728514227, + 0.18881769981087304, + 0.27723624069934905, + 0.7489479020722961, + 0.36282036045203625, + 0.6285476152245377, + 0.1588639640657653, + 0.6839443030276484, + 0.7470134525329287, + 0.9540763819401126, + 0.3511732172866613, + 0.27045817075177425, + 0.8050622278219007, + 0.654629505423387, + 0.07062378885477394, + 0.8518687589766132, + 0.7725895446104453, + 0.8575217731684741, + 0.5977344231366636, + 0.12599043683129285, + 0.44373066112677617, + 0.7685749999969874, + 0.2775382059821282, + 0.5889322436149684, + 0.5068762509464182, + 0.4122274568834915, + 0.7388856213534555, + 0.97892692852942, + 0.02127158943064844, + 0.26594137235106974, + 0.2200002567895657, + 0.06029578912110811, + 0.6696706423122938, + 0.9917166533547094, + 0.4132660887675532, + 0.8017117398059486, + 0.7228517329807524, + 0.7637273837304658, + 0.7774055771796783, + 0.1417634070574495, + 0.45586050155634894, + 0.8990193112029147, + 0.4376091395515205, + 0.5483165442527448, + 0.3438799531113126, + 0.37026334207581413, + 0.9802248421139772, + 0.1235051413745456, + 0.6569464994727782, + 0.08726633991704946, + 0.5899134646673063, + 0.9046364062009363, + 0.5007797075327757, + 0.0497102556242095, + 0.3489623833616228, + 0.7198942633220495, + 0.42959315670918397, + 0.9409287525863534, + 0.08033390077884672, + 0.8990598462078819, + 0.8736066120150979, + 0.7427239693241591, + 0.9959655436889666, + 0.39316193762286034, + 0.08972551091130632, + 0.12996733679253603, + 0.09979297194337933, + 0.300127821566353, + 0.8493606828333236, + 0.6888539522471323, + 0.6941675225005275, + 0.7765384061746137, + 0.727065287238925, + 0.17490531160119316, + 0.14759984200256093, + 0.30412605513324664, + 0.35649840148156087, + 0.6177483599538046, + 0.5331906043627915, + 0.3183233677105958, + 0.5861749878003335, + 0.16864913810143478, + 0.9235285343485493, + 0.14579814195732677, + 0.8911258750838316, + 0.17258768287486925, + 0.9633145417809131, + 0.6417377064056851, + 0.07551395330279231, + 0.16906271815680163, + 0.7808558526217645, + 0.12520712314657734, + 0.8407850164313323, + 0.9868086554879645, + 0.06417078323196634, + 0.9343905393022421, + 0.3823996506188203, + 0.016249785138856665, + 0.27896355306718956, + 0.44702736267506293, + 0.8764878012548211, + 0.19626533926024559, + 0.5139496066514742, + 0.3700889428504547, + 0.5557708052804753, + 0.07904856182423159, + 0.3395259099860961, + 0.29948758411808296, + 0.13151854613894598, + 0.05115984888131431, + 0.45894745199736486, + 0.12842357138248828, + 0.7248420238565785, + 0.2171449320788671, + 0.3348915748551431, + 0.7574060306303136, + 0.6849806914119086, + 0.5339925704970866, + 0.4110075835426359, + 0.06122400996978217, + 0.43800255863043935, + 0.3369476959186588, + 0.9054644266803126, + 0.1623872463463406, + 0.3640358017665629, + 0.8942355877536444, + 0.5125938554689378, + 0.903019577022815, + 0.17167317420633488, + 0.1950359811511242, + 0.6607320029978295, + 0.7580462709337179, + 0.2452788750277758, + 0.2788962693557665, + 0.405888936323456, + 0.02785474448600833, + 0.07208951817176656, + 0.18257076978929387, + 0.7187267383818935, + 0.7346370513184121, + 0.010961027000119916, + 0.33330388678435385, + 0.6229310413798937, + 0.21893149738952233, + 0.9326644345799097, + 0.8712294983819957, + 0.0007268986007156197, + 0.3010163551532452, + 0.927251356798559, + 0.13339155243432343, + 0.944578783619491, + 0.41649016463506894, + 0.7641237600433918, + 0.710828634080661, + 0.3065191086208827, + 0.6775133211386144, + 0.024259513411488687, + 0.33976931127197374, + 0.4955063400259977, + 0.6635087904838025, + 0.9654867491465188, + 0.534286039933084, + 0.7790600526117349, + 0.6845172321180952, + 0.865499919783512, + 0.6039324053500893, + 0.9015990859985076, + 0.8110156997513283, + 0.7110384695941595, + 0.07825063326064463, + 0.5944962102464852, + 0.8016351672800698, + 0.060631903018435906, + 0.5771828536944154, + 0.40731510830218043, + 0.6109464924015234, + 0.4443178133761624, + 0.4552377705924677, + 0.27112239247136705, + 0.24934497733929906, + 0.4950344737079855, + 0.6246298542716482, + 0.5879574580957551, + 0.24529061544518282, + 0.0009200091672624255, + 0.5719875784416119, + 0.0896119023278863, + 0.6220852542712305, + 0.3684750380601215, + 0.7792502126192205, + 0.6314055344124873, + 0.39547745072376894, + 0.8504738579125849, + 0.32887788681026, + 0.3086242004918396, + 0.3601660744118099, + 0.8131084238398228, + 0.21248559119648736, + 0.9883671375462867, + 0.8052814876414277, + 0.8378225473631175, + 0.7271250977358852, + 0.2059749540727559, + 0.3791245796826538, + 0.7992243079493305, + 0.5672366749313292, + 0.3451234524118294, + 0.5791385410621663, + 0.8939248278003772, + 0.9170206557785323, + 0.9751873415114282, + 0.566502247106477, + 0.38033978891220743, + 0.15410678425764945, + 0.9480868768512484, + 0.46532273293598725, + 0.07117254517669624, + 0.3177325142500902, + 0.5050424716492614, + 0.891670585115688, + 0.8891398348201501, + 0.25879183026854935, + 0.5545160442833293, + 0.31951216772681434, + 0.6198086445408558, + 0.3255911164531847, + 0.6297082439981169, + 0.6197171841012978, + 0.29800484041688424, + 0.3622848859039649, + 0.9890683503368907, + 0.1268744678799768, + 0.14657777979090902, + 0.8675628349173496, + 0.20591371743942044, + 0.7971245569704272, + 0.9637814955920515, + 0.19647791864051434, + 0.5029223668930879, + 0.483061091846241, + 0.10731000340355334, + 0.41547291975106804, + 0.4120377709130624, + 0.14174805576327942, + 0.8822942518531542, + 0.7121634187898556, + 0.3357198602996515, + 0.043996373019142165, + 0.7470113425826548, + 0.05875921853930155, + 0.6604525526891044, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3106870679813668, + 0.8375039674227638, + 0.14132411999646566, + 0.9733937497512101, + 0.5268878833167199, + 0.16234761722353475, + 0.5762816965807208, + 0.41670339685703073, + 0.497929209315464, + 0.7017986142414105, + 0.12786937218928796, + 0.8911947862124853, + 0.7137675913289352, + 0.0610893714097005, + 0.18546703677910936, + 0.035688612985960266, + 0.32408511872819634, + 0.8919148231441918, + 0.28306903485535917, + 0.6861918598418274, + 0.6429701528789875, + 0.3045181072487121, + 0.46084564841796605, + 0.3098135229714267, + 0.2514454163728843, + 0.5268172747845213, + 0.624191972477752, + 0.3118817932629737, + 0.6870755218591845, + 0.7015135298085294, + 0.8256150246709529, + 0.8979132576047186, + 0.7666764157872629, + 0.03749457607925066, + 0.29127355162015434, + 0.9692306257117655, + 0.04120871299867901, + 0.3222809731713697, + 0.9011714666852197, + 0.7447872776192568, + 0.03246644146071498, + 0.9520801192537708, + 0.8205741291859162, + 0.806333878982076, + 0.7928465265572342, + 0.4432930399532825, + 0.26895226377879944, + 0.5557682276163827, + 0.11059051813128462, + 0.53272574072743, + 0.49463601230067555, + 0.6230204922381581, + 0.48349969655007063, + 0.900140237602878, + 0.44413510868238426, + 0.6614500129363188, + 0.05883311151911752, + 0.49112590025669867, + 0.10502839382859208, + 0.9056541109575096, + 0.5485944061267944, + 0.003367956927173732, + 0.31350647270757537, + 0.3741191768012868, + 0.3629983726846838, + 0.9024604948001891, + 0.39565204679363264, + 0.1980643611303149, + 0.307542441011294, + 0.1606513515278285, + 0.574644995538095, + 0.426784327857325, + 0.4098408684770811, + 0.20153281170765258, + 0.5181186938865909, + 0.4341138565978081, + 0.8272180148834627, + 0.258326945266001, + 0.04091342668743836, + 0.3252100702505052, + 0.6269294501961069, + 0.8439689357356778, + 0.19792388051549636, + 0.8728821095697096, + 0.16196316143368072, + 0.37808015088265445, + 0.9505953331433469, + 0.4013327602563459, + 0.7489468654728467, + 0.7339595103711778, + 0.4102430705511876, + 0.9222304579765279, + 0.42441851490571403, + 0.5923724550575528, + 0.8771107657248962, + 0.8731747355771637, + 0.5888652810322299, + 0.6372935991352188, + 0.7407466175440733, + 0.6804596183680885, + 0.029125940310131604, + 0.684749637902132, + 0.12484446516456749, + 0.3156558568570107, + 0.08229113599397897, + 0.2782399383077956, + 0.2575579342406279, + 0.6124487967585517, + 0.5615943220625608, + 0.422634483748746, + 0.594814557350571, + 0.1421207317433164, + 0.6519169977675151, + 0.47043256415874224, + 0.19407239246755792, + 0.427220969858229, + 0.3278435636252649, + 0.4916870950040342, + 0.6882052096054266, + 0.9523155071222569, + 0.7530995091630748, + 0.734782434763255, + 0.970954363863672, + 0.9918653076416558, + 0.9067582004487369, + 0.07614823847734775, + 0.49996519376830717, + 0.5307701726892107, + 0.5860195814930064, + 0.8369882601581162, + 0.7170882997152261, + 0.979960096265418, + 0.7133096555109707, + 0.2678966664417787, + 0.5399003983850295, + 0.19528591858809097, + 0.5044629955092683, + 0.8306116464110791, + 0.839806250816905, + 0.8864753374707028, + 0.9652589486160503, + 0.6470169433721922, + 0.0021080542474207764, + 0.3993092943058364, + 0.18888076989792213, + 0.9409002150811886, + 0.1692746198648225, + 0.047622626900064136, + 0.9775043314280928, + 0.9277607818839387, + 0.7774403297375743, + 0.02946960264933851, + 0.14489629123188397, + 0.485828769400106, + 0.671955949035962, + 0.26959115008462997, + 0.7797793475959743, + 0.46350630017658667, + 0.7969671115226824, + 0.1734656252349407, + 0.5201099289766054, + 0.6655243545469586, + 0.6430549416784092, + 0.5457543431365761, + 0.42170770669129765, + 0.4672604447984776, + 0.2302057369739422, + 0.6639994155871508, + 0.853382939077274, + 0.40685351370098966, + 0.13431375872948292, + 0.2180597482559874, + 0.777203020857793, + 0.8555476109143667, + 0.06050920808752114, + 0.1766779245556892, + 0.7437111526306779, + 0.9475035208800926, + 0.8777906029435121, + 0.6382308280225599, + 0.2111873196192524, + 0.6737040560793205, + 0.927238430398323, + 0.8744959927312456, + 0.9317921835308065, + 0.7434784791000327, + 0.3786807488775139, + 0.81799763414866, + 0.8819122326564898, + 0.4598645636473221, + 0.5580046059526343, + 0.5387451869200437, + 0.4171106131718373, + 0.1447812284363158, + 0.07768310383675614, + 0.010936575444467467, + 0.5730795999651903, + 0.42121178091069, + 0.8866575100660742, + 0.01289192548222362, + 0.555711725100172, + 0.3513847830683331, + 0.569124211044498, + 0.07454625455008745, + 0.2113031240763743, + 0.8129643008737778, + 0.1542984485530986, + 0.708741541127955, + 0.6445816128410039, + 0.27839516124991337, + 0.7947788353493423, + 0.37669601059079194, + 0.5515796606164156, + 0.8763990170945234, + 0.3260481261527052, + 0.18242806678150358, + 0.17037609108575047, + 0.13433587170845185, + 0.9657463286859697, + 0.1442506742224563, + 0.9665122126277528, + 0.028534214701289806, + 0.04854468263923506, + 0.1910549620195987, + 0.5188602841702555, + 0.9231280004373151, + 0.4317769190956203, + 0.7026205031898519, + 0.7128707043442232, + 0.3058429580053279, + 0.5257155704547867, + 0.4666935066409813, + 0.9077033646641116, + 0.39640867900033416, + 0.7445101317133044, + 0.6895290651859735, + 0.4432714865589692, + 0.40622994875853613, + 0.950700299398459, + 0.017540874600352363, + 0.08218441353235384, + 0.3340682869112773, + 0.30099462912190866, + 0.4596162094167482, + 0.4441508969089326, + 0.4793061221594075, + 0.8520145924738496, + 0.4202317548486756, + 0.3048707373335664, + 0.8260967304171667, + 0.8858748287584243, + 0.9313296043479037, + 0.03728554665895367, + 0.8516620871733309, + 0.18163358649785932, + 0.5562563221035275, + 0.17890783732894966, + 0.6707977309743394, + 0.3258694035022829, + 0.7042418430043583, + 0.748950117702922, + 0.11704609552430245, + 0.019969326620207783, + 0.8793030179641731, + 0.618849805951286, + 0.8315256975602183, + 0.08691404253826274, + 0.8095014961360036, + 0.05130889547789719, + 0.7842835452733545, + 0.7524681896441217, + 0.24619102991105324, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6505674331555952, + 0.8896008472504486, + 0.15316396490313255, + 0.049817317395853045, + 0.15779654764310092, + 0.1907794697781754, + 0.3424263786134797, + 0.3024882468162112, + 0.24476108041642786, + 0.5187137842568543, + 0.4700278297160906, + 0.9166629184786692, + 0.5192993689272892, + 0.8883002051270429, + 0.6139692209707068, + 0.6696122755659888, + 0.8334636225036565, + 0.008490736139454946, + 0.5684667853663021, + 0.9437810089681916, + 0.9234508591332773, + 0.3028583457674482, + 0.024270514580325364, + 0.632879837833451, + 0.8919083641817828, + 0.8193025368566559, + 0.5434389008606694, + 0.298992596956283, + 0.5701233503562095, + 0.9200943671522221, + 0.024420994499118942, + 0.7581774483822272, + 0.36602160070290324, + 0.8996906260021484, + 0.650338274041805, + 0.30306728941495165, + 0.8306322790133498, + 0.0014578989246246676, + 0.9289092284509093, + 0.9685183150957329, + 0.6966619446789848, + 0.45868787962240154, + 0.3024322195782827, + 0.3402690620046872, + 0.5186534947305099, + 0.10221089709393838, + 0.34926705900556854, + 0.48425250907836204, + 0.8441237432776622, + 0.6581482230156697, + 0.881989424004692, + 0.9283798786989146, + 0.26378479527925247, + 0.12971474106068215, + 0.4127028958078305, + 0.9759845634823414, + 0.4448277223227939, + 0.21353220464874112, + 0.24458759535876862, + 0.628564336914344, + 0.02863088163602967, + 0.4816194160242998, + 0.7159384098666771, + 0.669183078789963, + 0.35193890623697455, + 0.29949427086355307, + 0.4228887773715255, + 0.5117140120602129, + 0.9044077484662371, + 0.9032407065460043, + 0.995172449354327, + 0.41423348932439163, + 0.47562115955991857, + 0.6568767974937889, + 0.45574574846480853, + 0.08854677037549652, + 0.07723267818449431, + 0.3976746983992273, + 0.4561739680088267, + 0.2386152622400628, + 0.5659586342616123, + 0.7611815140143026, + 0.7017405512185082, + 0.9134156109484025, + 0.9950940798528922, + 0.17193771361794707, + 0.8289902073924227, + 0.004546522645031303, + 0.007052684253707531, + 0.10943401504317929, + 0.7379460615617371, + 0.19030981978848005, + 0.40085772597399394, + 0.5840635013822746, + 0.13350257292998435, + 0.9661396481299975, + 0.7725671032679924, + 0.7896569641883417, + 0.12533753035135697, + 0.2354617936330704, + 0.2394109867192058, + 0.8010115925730721, + 0.9530126297057642, + 0.9304927494837946, + 0.6771456594244746, + 0.4259206916176925, + 0.023690727384484522, + 0.1341174625131395, + 0.5404088809849145, + 0.12886316469913406, + 0.9230427461151006, + 0.793054450271054, + 0.2741246975527478, + 0.8305010025820937, + 0.4324324041965836, + 0.6717649574105667, + 0.6916956933545159, + 0.41666776761366175, + 0.3415707400145297, + 0.30929070993956775, + 0.8390026176115344, + 0.3557227685622363, + 0.6784004257720248, + 0.24634964585544095, + 0.7206791384903469, + 0.03848746983483797, + 0.2900684191020383, + 0.8711795452250837, + 0.6921725205176384, + 0.4895358160689116, + 0.38014984844379196, + 0.5127266828750897, + 0.6673498470293923, + 0.002685351317869067, + 0.6964158642930925, + 0.7826272454138963, + 0.9389863281069475, + 0.8753444290282524, + 0.02360400024908793, + 0.07502297232413513, + 0.440133833531837, + 0.8142270861493649, + 0.19770965037169863, + 0.4136561532303761, + 0.7852561605366281, + 0.07845910752333318, + 0.14910517424215053, + 0.38036721797278994, + 0.25697106359778543, + 0.907979042972644, + 0.500818324070304, + 0.2680006068166886, + 0.5132723536770091, + 0.10487404904883002, + 0.15354263266391432, + 0.9610349748415238, + 0.3288575659426388, + 0.19560645318162806, + 0.7706955893521374, + 0.28436663148462527, + 0.5158219651471875, + 0.764372330238618, + 0.6067985070905968, + 0.9536945483930525, + 0.6205263496586823, + 0.6302723014781895, + 0.9862346803705233, + 0.04669414966964047, + 0.28588637528251637, + 0.7253708430697816, + 0.42650678906087014, + 0.02355636294467045, + 0.6447642606981637, + 0.6319427218288056, + 0.30810970979128016, + 0.19544046811243343, + 0.011243054744094594, + 0.020779029003142324, + 0.3234336649677997, + 0.8380368930039253, + 0.9179728189664107, + 0.6575827046029655, + 0.37915290474476626, + 0.856151707495239, + 0.5294010050193729, + 0.38301454550749847, + 0.9732052582093879, + 0.1846647883615723, + 0.009895965772183879, + 0.9887248725968469, + 0.4652612968535793, + 0.8783330659777079, + 0.41310896576890466, + 0.43120977239632585, + 0.6362789547433481, + 0.1919591608372203, + 0.614734048692408, + 0.6263104226338567, + 0.4461255578262746, + 0.9989049820239663, + 0.2744233679516902, + 0.7980044956400881, + 0.7271919013576847, + 0.6032240304756097, + 0.8596424114409699, + 0.8380041436818071, + 0.2931118607225961, + 0.4309317810807699, + 0.45648556431344256, + 0.45653962008349014, + 0.06365002436838152, + 0.5297658162781413, + 0.6027042796449013, + 0.16092756634517225, + 0.30114558065352737, + 0.3111552068824247, + 0.8507893439535678, + 0.6048926154492419, + 0.7704986131316082, + 0.14366892969004819, + 0.8953964701711203, + 0.8346079391390944, + 0.9373998495596302, + 0.6791337348202509, + 0.7607634306231361, + 0.3450016679166775, + 0.14656248562479735, + 0.3930424142667378, + 0.6323807244895218, + 0.14532783350349865, + 0.5631128515605822, + 0.16378082544758465, + 0.5236548930432765, + 0.1882119860377236, + 0.6119778443997322, + 0.4235186782039845, + 0.2665242654042336, + 0.7399352175796418, + 0.11586465570461602, + 0.11247980167197891, + 0.8220103541446414, + 0.7353521901879041, + 0.5164533032240637, + 0.3812123715027973, + 0.8624134626120791, + 0.3718310126558474, + 0.21470083449509225, + 0.8915583374517714, + 0.37112112192758095, + 0.6922735149400973, + 0.4026952924740155, + 0.9788918318798452, + 0.18832545730747252, + 0.17103167998576296, + 0.9771867918938736, + 0.07145675603275004, + 0.03583507795950236, + 0.2174802520595922, + 0.2665684944776263, + 0.5732887006167159, + 0.35987345097452517, + 0.5572656769856846, + 0.8956108702121777, + 0.7462871960223535, + 0.4775491658987586, + 0.014127479673780918, + 0.29202480552983767, + 0.13598935992309713, + 0.6085402592486129, + 0.4459387493086917, + 0.6307215397392307, + 0.6271055850123975, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5906936109268534, + 0.985178025941136, + 0.9029762067351483, + 0.9085297893027975, + 0.24943397543505585, + 0.35778319465374697, + 0.23316328948597598, + 0.22336747934409962, + 0.26754891235392586, + 0.01668891048601473, + 0.29105779384028485, + 0.46733411135466785, + 0.5554113959439213, + 0.217181979191437, + 0.7721583926717096, + 0.5638496956492505, + 0.6785535977866709, + 0.31340234650410514, + 0.404016607921286, + 0.08020124109526339, + 0.4377160763702883, + 0.5371288877829645, + 0.7770975444771326, + 0.34431254253104426, + 0.6850147449388799, + 0.3752845409527149, + 0.5380834481873809, + 0.6946143181497039, + 0.9207630818846864, + 0.16684512190997913, + 0.007916818373048695, + 0.1892770281860332, + 0.9966525280606527, + 0.07092537391042053, + 0.9356512729551854, + 0.23960649799812972, + 0.615845449978516, + 0.9389809504451423, + 0.822916000728221, + 0.28420265690446167, + 0.5257546416193012, + 0.27342915444662586, + 0.8967872515365237, + 0.38793879905655004, + 0.33572022350113084, + 0.27577889120361854, + 0.7513208927532176, + 0.2429746493942362, + 0.43725940771686045, + 0.11350521820787318, + 0.9551581851145005, + 0.9186503036241112, + 0.9926044305540519, + 0.10127913536672994, + 0.47941556948808584, + 0.021455180502311988, + 0.32936781677422045, + 0.8528314174742758, + 0.8549568232850915, + 0.14244221740052387, + 0.22413092997698647, + 0.5119652884890653, + 0.006251438999501757, + 0.0025389875820115426, + 0.6265233510815367, + 0.803070845573163, + 0.6947902026070869, + 0.6128251959085931, + 0.22369232034773734, + 0.7894555792452711, + 0.6581754842166929, + 0.6181877024917093, + 0.9893830201440166, + 0.5980334785057538, + 0.39646962987851675, + 0.013480534620852858, + 0.5038534734022597, + 0.804321900975404, + 0.15470405597679626, + 0.5633913400737991, + 0.5872789577370551, + 0.06508056419877961, + 0.45378359660950796, + 0.03236593895453177, + 0.837911674296939, + 0.6397745508699528, + 0.9855034320025721, + 0.4057592785578884, + 0.7339188689753977, + 0.8387153508863529, + 0.9263346961295181, + 0.8156160068760853, + 0.15440150898459948, + 0.33302707750831784, + 0.2939876897339391, + 0.6142468214359225, + 0.4736801913093771, + 0.6849746996140124, + 0.5546194817466522, + 0.9989092644107732, + 0.9920312602240504, + 0.6904766711729032, + 0.42924208861371305, + 0.48003550172456666, + 0.37023246543666033, + 0.7713408897778646, + 0.27605583178904336, + 0.13182377894341468, + 0.27218206910414566, + 0.5427229610650799, + 0.5455649473093593, + 0.29207927395164146, + 0.9603892641978854, + 0.09192339619379086, + 0.45385085549626847, + 0.6139755811995422, + 0.5302385601739379, + 0.8851484705553312, + 0.5855290138345451, + 0.2919520303069971, + 0.12021580596319947, + 0.1468868545245562, + 0.46945195004362084, + 0.17330112801270003, + 0.30334534668880353, + 0.713355641430544, + 0.25033664107375675, + 0.15793765648113212, + 0.7043171642877438, + 0.5446352308471222, + 0.6891355904241119, + 0.531200704449833, + 0.9984833143167338, + 0.6595178169483946, + 0.37064042308391554, + 0.20679666993623924, + 0.5516764367214837, + 0.5788988057292189, + 0.42943183722258005, + 0.07297844321589542, + 0.901189057075367, + 0.4544117040265301, + 0.8045557115634316, + 0.8313075178154284, + 0.8844463117078077, + 0.194334173882318, + 0.4111992242996626, + 0.28713352268247694, + 0.9100520555294735, + 0.6493475195379939, + 0.004577152771646387, + 0.6798627396673662, + 0.7742462655458693, + 0.7662859317916594, + 0.11262561591381115, + 0.5295509232980138, + 0.07523736611730225, + 0.33263741058689944, + 0.9331006720580732, + 0.6573958051418146, + 0.256382311639917, + 0.6399746259259406, + 0.14430766081621926, + 0.8119110437630124, + 0.4881869363132024, + 0.16266496588688184, + 0.8602928494761776, + 0.15500270093277513, + 0.5919373805543477, + 0.7171159044268629, + 0.7716819701640273, + 0.827583416753828, + 0.5033577300733442, + 0.798318758900835, + 0.9690272763169439, + 0.8134065277629373, + 0.7146256258613584, + 0.8848739610165371, + 0.9150723702793041, + 0.35993152453860877, + 0.7572016769176416, + 0.587838729413158, + 0.9168552444961787, + 0.5293676430470518, + 0.2691200337827875, + 0.817754650769796, + 0.14477018287134902, + 0.24020578767267653, + 0.45203924270457674, + 0.6515041772404812, + 0.2422570434725364, + 0.8314979071042918, + 0.4472986849872749, + 0.9057742710619541, + 0.5880079661439431, + 0.0038976147845962705, + 0.3525512655168155, + 0.29962263955983737, + 0.9108480369512229, + 0.9218869284324538, + 0.45951566195383053, + 0.5482057597951033, + 0.6463242408694697, + 0.8297016951468782, + 0.9306268162679342, + 0.666030485712458, + 0.2815282348852275, + 0.9047698912977503, + 0.5445726211043236, + 0.6574672138791803, + 0.3510187373660053, + 0.8179732055564214, + 0.12705607337288172, + 0.8480951556880845, + 0.893327036527009, + 0.3503742982580723, + 0.016331049390413077, + 0.011134810282633523, + 0.19207818907715868, + 0.5513762741251969, + 0.5834937223166203, + 0.6521824543381127, + 0.14276309508816698, + 0.4737108529589482, + 0.07015458200730773, + 0.28265824455266875, + 0.8065104111670672, + 0.6078572082016351, + 0.8929567664411113, + 0.19518846326105055, + 0.07129503182075347, + 0.2859867879819108, + 0.6981087700606399, + 0.9962390374902929, + 0.9075050013001706, + 0.8695402220854705, + 0.7530955894999412, + 0.8872099932516173, + 0.8556170516642067, + 0.8924584932595006, + 0.9360515843195668, + 0.4913992277519147, + 0.12437922609279806, + 0.7039043930785024, + 0.5921658366807947, + 0.958636807198965, + 0.36190677610423394, + 0.03712073690303541, + 0.2840242998700213, + 0.5001931582796274, + 0.8815309744840273, + 0.7415436773009056, + 0.05594106545234934, + 0.5827879256252866, + 0.978511304137195, + 0.905213194623962, + 0.980163775973384, + 0.2714428120631547, + 0.649702855877862, + 0.27955619536763154, + 0.647725265982549, + 0.8393291964395642, + 0.13006685154737352, + 0.9716344142525796, + 0.48674075724774, + 0.40927892479579764, + 0.05574673881075609, + 0.7966852951467228, + 0.9831695970237342, + 0.017851277943374932, + 0.28589215577149185, + 0.21746264351668498, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9266921848571944, + 0.888070067786244, + 0.37680058524588955, + 0.4936381757027637, + 0.41414702570616047, + 0.3506808402123225, + 0.583265352991748, + 0.4794877783223914, + 0.18111669035507327, + 0.46965615959224305, + 0.5248767360171599, + 0.5402402874128852, + 0.6957212216761687, + 0.9652384781192399, + 0.6082559580539005, + 0.5133759451494109, + 0.6553830922925206, + 0.2074158254497679, + 0.704404013389534, + 0.907907636732286, + 0.7923343710183619, + 0.913130375518895, + 0.9704051612366342, + 0.3797428456978046, + 0.6163690387027178, + 0.9349245631523007, + 0.11200283200500039, + 0.07867695207510406, + 0.4169161408929506, + 0.8526429531296893, + 0.7497563646344403, + 0.4280029263408006, + 0.21051657636558396, + 0.6008606689278838, + 0.9500305502606794, + 0.4545769669460069, + 0.8027080274236873, + 0.11039217262761347, + 0.21003008044359084, + 0.05375724388032843, + 0.605756280334851, + 0.016688872563245805, + 0.9202741846945232, + 0.4040196288562755, + 0.9794457364091196, + 0.2517181927328055, + 0.5242846200245552, + 0.873343299183039, + 0.8249183015059579, + 0.38379538997444373, + 0.8212428356808356, + 0.2993644032656997, + 0.7870955110858734, + 0.31010871275185115, + 0.24747265432949528, + 0.8519440767021866, + 0.9292443696626554, + 0.014403270787187061, + 0.9186023788857333, + 0.047257770878987415, + 0.8568535984629932, + 0.17933695516621095, + 0.8951499627990374, + 0.18983013829236062, + 0.4815123260352835, + 0.6563307875082035, + 0.7922807731193054, + 0.6677929763533068, + 0.2792061593131313, + 0.7566882870404893, + 0.19117735457209262, + 0.9534755878483753, + 0.4805948261590651, + 0.24846060226911404, + 0.41370936412042725, + 0.8021081182848567, + 0.6811971754501239, + 0.09657368137977318, + 0.752181402082427, + 0.6487657899344234, + 0.031719666791598655, + 0.09297033104482366, + 0.35535937814698193, + 0.676986645523438, + 0.9001299507009777, + 0.5957937772573332, + 0.8192663869408409, + 0.503377712358978, + 0.011583319124723213, + 0.10417530992527435, + 0.26075958601276994, + 0.505114629154694, + 0.505240951074268, + 0.06761349757327517, + 0.8098607095247266, + 0.39108628519373756, + 0.967802166668163, + 0.844243292293567, + 0.9100702344711836, + 0.1288921738833677, + 0.9825931410330894, + 0.8577310861905787, + 0.026375853087982515, + 0.5137412913845685, + 0.3992985263770744, + 0.44421497794056375, + 0.5019925089149055, + 0.3096161674618526, + 0.0708902729994767, + 0.889875082020315, + 0.22347819684613468, + 0.07540889607542722, + 0.9454108618730942, + 0.566462013342779, + 0.34230593684147415, + 0.7880550451710757, + 0.05621048676635931, + 0.2904796834316199, + 0.5942077567376716, + 0.3298722428252219, + 0.43130173101366487, + 0.33404080089968813, + 0.687867618004529, + 0.03314899778447855, + 0.6061460243750493, + 0.9399593535167685, + 0.03930090354822835, + 0.8235203645997633, + 0.5293521875346701, + 0.05858721938768008, + 0.2418192475838139, + 0.2796042846713743, + 0.5512165384711949, + 0.7251122716795119, + 0.41078526776480273, + 0.02845486763446292, + 0.2393933838154727, + 0.29640078573238926, + 0.1977493883962561, + 0.3886846431301112, + 0.8229726969052238, + 0.8358901392952233, + 0.8449563453332865, + 0.8348662425894039, + 0.09085996512648742, + 0.7301052253226809, + 0.705291324266203, + 0.5448808055755662, + 0.4363577734331944, + 0.33767483963446376, + 0.0995417837300564, + 0.3450197447661091, + 0.988903362370932, + 0.25310573621742527, + 0.33623960671264874, + 0.567644579803449, + 0.5760713345891831, + 0.8209439285022231, + 0.5497935156523039, + 0.02899643273268404, + 0.8696086771802893, + 0.7818378247449377, + 0.08815248124690245, + 0.29454967840372437, + 0.29925251366005157, + 0.0069606300833238155, + 0.2738310934479383, + 0.7742909572231088, + 0.8794758096877898, + 0.34116386952303024, + 0.4989861566010343, + 0.3524273147382102, + 0.624524358049323, + 0.09943829854256803, + 0.9527466632646967, + 0.00544809409009217, + 0.6558475527340603, + 0.3237691657907772, + 0.22547032693757718, + 0.4260218234263137, + 0.5150149640587699, + 0.4492464097619129, + 0.7358879237072037, + 0.4866561456938192, + 0.1635519969529431, + 0.4809968269549316, + 0.26770485265829935, + 0.9442051201853123, + 0.4858506581683727, + 0.9539657462347683, + 0.5481753461253085, + 0.3751025770700822, + 0.8606217054585975, + 0.5829477442390647, + 0.07918319472573276, + 0.29740354682628034, + 0.019048352354515852, + 0.1875332631036024, + 0.9271241546664925, + 0.17862507944582917, + 0.4163231676661515, + 0.05837109701188958, + 0.5877758570729219, + 0.4830346788060862, + 0.39105487377188475, + 0.7647944314432883, + 0.8553077558110618, + 0.9023617692649458, + 0.2603260429046589, + 0.6171896212326372, + 0.11559808027279872, + 0.9178929801080634, + 0.08842390316318727, + 0.22136822316909943, + 0.398580385827351, + 0.7465599603487545, + 0.6494780439688286, + 0.3648108851094741, + 0.7993433428774431, + 0.24769129647869204, + 0.4875889692209341, + 0.701201744248708, + 0.5388908909866726, + 0.09378921599818801, + 0.19818373225226793, + 0.6296115046604114, + 0.22451817825232534, + 0.22172410316296243, + 0.9485620997140334, + 0.6817115912503763, + 0.8001633754794993, + 0.9582392926917492, + 0.6569544112621772, + 0.790803746483735, + 0.10093571811853375, + 0.8798936216652578, + 0.7289120198461425, + 0.1251101106504363, + 0.3447050204468344, + 0.7132175284472319, + 0.1316304532806627, + 0.03480113021204967, + 0.7320609817127802, + 0.9297353999522445, + 0.5714603385468893, + 0.12318018950792442, + 0.5308150532993356, + 0.3500864866738508, + 0.9897758780710005, + 0.08009873177326488, + 0.14858267063327268, + 0.7703383807832973, + 0.6082640133635191, + 0.6142285588840186, + 0.2560517454450957, + 0.7388896970298694, + 0.03325976044014889, + 0.5125896014708659, + 0.9612275526159547, + 0.02219761721791591, + 0.11859155915861419, + 0.19987746865401068, + 0.8262151146797992, + 0.03638992047589751, + 0.43466661523174577, + 0.21476822079717706, + 0.7100844468838183, + 0.9800458613977039, + 0.9411595898410152, + 0.07560688438025576, + 0.9038186176416276, + 0.1795439189088558, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18364842527255254, + 0.8646540357859253, + 0.5027785820097855, + 0.3011961399183939, + 0.45684686795264784, + 0.6612263502734103, + 0.4998035354971454, + 0.058825788045184346, + 0.2969070833015489, + 0.12279886582014965, + 0.7283916747281782, + 0.46671131202148664, + 0.9830332879389645, + 0.9722201734971978, + 0.4142475889769287, + 0.31247090256745313, + 0.14093121239946482, + 0.9537397211259445, + 0.7210733143495486, + 0.40139917767957534, + 0.6660649687230704, + 0.4978490224783404, + 0.7072824899487378, + 0.12488082700520198, + 0.9908553738147915, + 0.20068655457130835, + 0.2879453930664292, + 0.5026257446107751, + 0.4130740509237302, + 0.9708285975698596, + 0.420733577649696, + 0.12630491706861102, + 0.09300692086633133, + 0.4294211575150887, + 0.25785061156532996, + 0.3789946183270362, + 0.9399312650696577, + 0.4810343310785773, + 0.5683823481766586, + 0.9808113538361639, + 0.6585116648659487, + 0.4212494043943772, + 0.895062747210728, + 0.7242337224502916, + 0.030750495424348046, + 0.37452867433105785, + 0.9802184395939197, + 0.9031002064855774, + 0.02918921236160299, + 0.08996896893533579, + 0.4555441274272486, + 0.6323922793742403, + 0.008097938087578749, + 0.6932761857280684, + 0.6668866787671196, + 0.6495602054785902, + 0.9575775147687592, + 0.08583283499217742, + 0.9058494183669441, + 0.6912174590902253, + 0.948443527234965, + 0.22559894087481702, + 0.5164072151010167, + 0.48667161559557626, + 0.5983519381070685, + 0.7695586705065749, + 0.6107703318943722, + 0.06726811304127367, + 0.8934337536224866, + 0.4895689504002815, + 0.24048343837374808, + 0.36529185794440455, + 0.5735394435632827, + 0.49650413162267193, + 0.6706935884079134, + 0.6348406258478118, + 0.5787601125844303, + 0.8258561730373456, + 0.4676308844300169, + 0.5080047356001118, + 0.7805780895616434, + 0.6240419567737736, + 0.43285630147506504, + 0.7624895746669883, + 0.8986092467105691, + 0.23483677982433293, + 0.10024863546449725, + 0.7210620096750974, + 0.8182483313055581, + 0.6940076024272812, + 0.19749929980303083, + 0.9448399638616968, + 0.8553783507006383, + 0.5597014267284324, + 0.997116862483971, + 0.8652785592649281, + 0.8029089563219114, + 0.17396562036705587, + 0.7007641906892125, + 0.7263635274621134, + 0.19995899946194873, + 0.005427612055188824, + 0.31474050328612535, + 0.8374326345875086, + 0.6422632319254711, + 0.5471260849018768, + 0.9808938467611115, + 0.05893050227743879, + 0.8051418275344663, + 0.8045211524591267, + 0.0976271286204834, + 0.9500743828386982, + 0.6189587658475317, + 0.1775659073072101, + 0.8649037748052864, + 0.19880387648751718, + 0.6872022270103215, + 0.6815676745418822, + 0.779931347174679, + 0.9729791805521728, + 0.41026775735347476, + 0.7138281187921892, + 0.2620055568788331, + 0.6909833611607202, + 0.33175446126929475, + 0.9042574654415093, + 0.8055372293474643, + 0.47244977850074155, + 0.971417612132888, + 0.5284122570418186, + 0.2597136347033241, + 0.43739815881984534, + 0.31203971548835785, + 0.566604553674063, + 0.06818610175031337, + 0.783862435254671, + 0.25760538754054285, + 0.5899575854320958, + 0.002574441420642004, + 0.14239206856562037, + 0.6476843165622203, + 0.646166940700204, + 0.1580755667471383, + 0.891498810950069, + 0.2727185108649317, + 0.6884729998799559, + 0.4646194290289517, + 0.5976281005402438, + 0.6758814437472129, + 0.6862144036589076, + 0.16547277765693413, + 0.7049076469957052, + 0.583509324590221, + 0.7210733104422161, + 0.07405192884464429, + 0.1842949006694724, + 0.23974003388816656, + 0.5764130732926929, + 0.5979447803210742, + 0.2249773165113338, + 0.6522175816565678, + 0.3348321863220919, + 0.13629406209727002, + 0.6183067088214383, + 0.10758483698180288, + 0.7944437003572663, + 0.5358573613839419, + 0.2843055467293566, + 0.8113238507154572, + 0.2063319659546743, + 0.7435744252296878, + 0.5133614327054506, + 0.44611301952286764, + 0.5068034569981075, + 0.4750670489171065, + 0.05239100354466675, + 0.3999524584363361, + 0.6992611565406978, + 0.6346120355797049, + 0.9779520667214144, + 0.22310158891971665, + 0.16057679483740717, + 0.6048124995802331, + 0.4945203927895544, + 0.33542005253569085, + 0.546287050021144, + 0.5122087346921007, + 0.22758820854298445, + 0.9579825617111165, + 0.42337179054885354, + 0.6025050796649264, + 0.3829819303230637, + 0.4309505278441734, + 0.04718163249168106, + 0.9386268549080705, + 0.40885237189458035, + 0.6942325344632331, + 0.17551940244712882, + 0.5552064669244209, + 0.13077532375819267, + 0.18815480443419874, + 0.6327808019359032, + 0.14911664776968647, + 0.8180314064062282, + 0.5357642829744088, + 0.5964823537522983, + 0.7891291847465132, + 0.4874784573158589, + 0.142891006959614, + 0.5944263580553424, + 0.7549008274298782, + 0.8790369548509892, + 0.5041435656483805, + 0.1794100433444641, + 0.867985285351135, + 0.1366464858210079, + 0.6200394749183673, + 0.02867361525533907, + 0.8522445335776246, + 0.3128788466690616, + 0.6953856808664445, + 0.9384737226819604, + 0.6369119292437464, + 0.3563591058307455, + 0.8176832508669591, + 0.5598049251515739, + 0.30389993519798797, + 0.12871542796660596, + 0.20665982722067022, + 0.238495398781432, + 0.20854538624485364, + 0.644071333405015, + 0.4491148800547001, + 0.12069216698852614, + 0.19905100612586457, + 0.18478204458702574, + 0.7495853697483824, + 0.7210569687290942, + 0.6037829656071078, + 0.07779682924293041, + 0.4227089499034231, + 0.8524084412015507, + 0.49869733849852904, + 0.9219741468820662, + 0.40072474623965604, + 0.7678747257391761, + 0.6152986840655872, + 0.6008774479712607, + 0.841185882770977, + 0.16542118130211003, + 0.7159180058586948, + 0.5997185729662033, + 0.5667886704658744, + 0.24609186123070648, + 0.5909026583584631, + 0.09107277515224033, + 0.8780346550913167, + 0.6646037647589217, + 0.3838442393954189, + 0.6043565015885304, + 0.9550009966879915, + 0.5510160949532444, + 0.34814972919759457, + 0.6877935123273717, + 0.23250965829296277, + 0.8712806889100291, + 0.613580711658753, + 0.8990942977474451, + 0.3483241099217952, + 0.7221340384104827, + 0.10484548389530757, + 0.263378352139333, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4296642823689558, + 0.16305374590883182, + 0.39998991521513605, + 0.6556746014980662, + 0.6210150036324743, + 0.317140245800712, + 0.6877738909201744, + 0.4258534032539746, + 0.9800964298042102, + 0.03194162410505785, + 0.8191489329480925, + 0.022289309196923957, + 0.9349234189706646, + 0.9478446521796615, + 0.3592043376485212, + 0.4562030112341884, + 0.9386673359665986, + 0.65160327497373, + 0.35665838137710737, + 0.21140224285486164, + 0.9079591527256549, + 0.44477883959825293, + 0.3236538890036543, + 0.13168561420275893, + 0.2487697371876182, + 0.7515090285233452, + 0.4231750228601412, + 0.3121057542077036, + 0.004353721633380214, + 0.6521564752588525, + 0.5193225975882179, + 0.3531136516142821, + 0.6042109003018462, + 0.9717609814839947, + 0.50336575292295, + 0.09839547682199568, + 0.693707227504025, + 0.6929387748006614, + 0.9458189029741451, + 0.3900614127096529, + 0.7189038264488747, + 0.7749619058772391, + 0.05730895584711282, + 0.9102195071996985, + 0.32349769262255756, + 0.8750303866429433, + 0.27924929298380563, + 0.8070466841112061, + 0.8505179365039964, + 0.9273026558524463, + 0.7595919479872886, + 0.0018566467706997436, + 0.7719109626433589, + 0.6320252552761753, + 0.04111139672265485, + 0.8985328913861919, + 0.692196641702235, + 0.45402836747716335, + 0.9958840211239685, + 0.6438587518899679, + 0.2431396493138379, + 0.33640814892998294, + 0.9400634674863366, + 0.5022225266731181, + 0.9237797038913974, + 0.16947941422488189, + 0.2824449616754352, + 0.7207143778823896, + 0.7680278360521288, + 0.42997298385410365, + 0.12132103538746697, + 0.26078425732107535, + 0.47364679399759535, + 0.12143177048280573, + 0.48771609290246765, + 0.557814948896943, + 0.8067151331237356, + 0.604388530115538, + 0.5156660460617192, + 0.38887787293854403, + 0.45917232215821424, + 0.38825444935967446, + 0.6324515216003951, + 0.9011596052431289, + 0.8662183970737692, + 0.8635669855843434, + 0.4713601728171791, + 0.6554476034331919, + 0.03822285393933589, + 0.12727056013524096, + 0.44056423771523756, + 0.6642041835690415, + 0.18171601533575454, + 0.2202256545303003, + 0.4297324376868892, + 0.14064482502921705, + 0.7468835807879611, + 0.11741934332418758, + 0.16345808969078734, + 0.7568308140809707, + 0.3507695822123934, + 0.4213986122111929, + 0.4348870812442971, + 0.7364380578048201, + 0.26614469169119037, + 0.46776400749305136, + 0.40124209662947063, + 0.8460110711659059, + 0.5679872590196909, + 0.8435394156266646, + 0.0011210103673163774, + 0.5185842065917977, + 0.8299970215243967, + 0.4850737534468673, + 0.03027684420157717, + 0.5522701963038198, + 0.19643373348735393, + 0.9715678770025232, + 0.3111173348815115, + 0.23850059824347802, + 0.8107377961241657, + 0.3175927872524359, + 0.256503301091475, + 0.5163015460284459, + 0.41506962981932083, + 0.7338585365932639, + 0.2582054192433193, + 0.8041322758562617, + 0.561676698699639, + 0.24657691834013584, + 0.08442011062710586, + 0.8487920871270828, + 0.061119682149786914, + 0.7307080993083939, + 0.06292878663146373, + 0.1940136707463439, + 0.486657793760217, + 0.3969358894586932, + 0.3906640598232376, + 0.5834259770537166, + 0.09187113082663767, + 0.3544781787762782, + 0.04860997495346808, + 0.15871714987847363, + 0.6246939452512086, + 0.20296720926546707, + 0.957022379317305, + 0.44367157176038174, + 0.9290509131265521, + 0.8682913713509118, + 0.6907669380183818, + 0.9399081726198278, + 0.40131000501767156, + 0.5763608461657702, + 0.8030455754136687, + 0.9418830529479596, + 0.8989929180692198, + 0.30735610053872786, + 0.5675171104560979, + 0.47001992017242433, + 0.6942936837486009, + 0.7309320605550313, + 0.8378931312416513, + 0.738446026608658, + 0.5658144466031382, + 0.3966239275247543, + 0.7639682229511765, + 0.3731413305456026, + 0.1272870354021688, + 0.720361900816688, + 0.9440101228660442, + 0.290161098210033, + 0.8550326556170208, + 0.6140725225603954, + 0.01712140044412025, + 0.4927436178531379, + 0.3660226408737306, + 0.6530372015242072, + 0.2140839732941855, + 0.017550546988452376, + 0.14085478210518876, + 0.9371410372713955, + 0.4638491087524822, + 0.6200050024275854, + 0.9480851639351124, + 0.733576316155986, + 0.9836125290093509, + 0.8124404041411033, + 0.1699968325589175, + 0.771334221456126, + 0.15875595220324012, + 0.9584952117184731, + 0.8588636547585764, + 0.8741716520262102, + 0.4806101624542982, + 0.9117137568688497, + 0.6827657221653612, + 0.6007715795453101, + 0.5509387234518162, + 0.8764454243152909, + 0.956512674285242, + 0.8726841640717216, + 0.8694962363845806, + 0.8340422695855997, + 0.5748168468192442, + 0.4907016331498991, + 0.04144802778650236, + 0.30342674898241684, + 0.3309972233725321, + 0.08025839620847708, + 0.7802373818560964, + 0.06414267567084697, + 0.6244273772369522, + 0.17870046511907478, + 0.9132598827329871, + 0.019217404507963276, + 0.8809613690445776, + 0.6813377874563836, + 0.5034232171973247, + 0.04304175515515862, + 0.9389437951885738, + 0.5499603200345702, + 0.600404758018704, + 0.5236455293452065, + 0.7934246891556981, + 0.34584725965462937, + 0.4746754608435698, + 0.33934002103755456, + 0.766883956427053, + 0.18024034058420502, + 0.10176144774438345, + 0.47131308699474717, + 0.17709381363370424, + 0.26713922471794216, + 0.4862165333782803, + 0.4322695891256726, + 0.12918164394572362, + 0.8422978921131469, + 0.7819793497806401, + 0.7764414316998594, + 0.7265886834140778, + 0.7955805670006711, + 0.3121044843517543, + 0.5064374717148273, + 0.16707367572021192, + 0.9385530005700005, + 0.09440756102928749, + 0.7260312952130658, + 0.2921388229005918, + 0.7016827480743889, + 0.2553056615265713, + 0.6069042523238396, + 0.1686711617140798, + 0.6661685311187996, + 0.8907430895470431, + 0.29247779907894744, + 0.6063733859123678, + 0.42078644642790775, + 0.7596216310070045, + 0.3207244161166639, + 0.13504707001826888, + 0.5899394263055184, + 0.5318967033074015, + 0.14224038086716606, + 0.10152655456885329, + 0.9498448863720076, + 0.2689948231452096, + 0.941453488625415, + 0.6753604881338949, + 0.6619548994154616, + 0.22341997005710967, + 0.29227584659504713, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7389883463939141, + 0.07740565031301261, + 0.26758347449841524, + 0.2518735233657079, + 0.7310643707078577, + 0.018018527214290758, + 0.9378682094828321, + 0.4595389326489865, + 0.43255541931369257, + 0.44171186359714165, + 0.6390029982988026, + 0.4570484962778898, + 0.5013945476643936, + 0.773664844990564, + 0.22777727589303554, + 0.8052617313084214, + 0.504197710602281, + 0.1424626570022215, + 0.5880085209855035, + 0.9002833017573497, + 0.6583087952517872, + 0.14596636499065718, + 0.19864863224526874, + 0.314127860039213, + 0.10227209094466017, + 0.8006861810328124, + 0.038862349614462444, + 0.12673760624583574, + 0.35818205687111593, + 0.2156931343209424, + 0.2609896110679869, + 0.6591286446456107, + 0.08837786759689104, + 0.3765208135061625, + 0.7892536663225015, + 0.7030885980016678, + 0.3425763030596516, + 0.6492843745216476, + 0.35495528127620746, + 0.4805078807408467, + 0.5808461520431166, + 0.9123791598802871, + 0.21512840472728822, + 0.8927120581961654, + 0.03061449889454737, + 0.08134038856120718, + 0.4873099235676889, + 0.6916369625198514, + 0.5306796804351225, + 0.8304777862990069, + 0.12824736732629305, + 0.12842248682593838, + 0.17458922534835386, + 0.1829083604772751, + 0.5173284858790735, + 0.4068209458936858, + 0.46728958831771883, + 0.8088723911637784, + 0.23820369071008063, + 0.888961973440627, + 0.06394030812352636, + 0.07053360339022174, + 0.6875555456325994, + 0.5020143492270686, + 0.7077650227087292, + 0.5128203758125954, + 0.7397141745161299, + 0.7920348405980873, + 0.06200536656436273, + 0.3847023871151104, + 0.7855879974628446, + 0.5223013599705607, + 0.08102478741666885, + 0.0629895995263573, + 0.4051076513692602, + 0.262015468144127, + 0.995292777311918, + 0.5636837351834537, + 0.16740385943047797, + 0.8947296984684618, + 0.5314878681065682, + 0.5355688283316573, + 0.46627368885619336, + 0.014086009550658751, + 0.7267750797174634, + 0.4060228161088898, + 0.33907351944620023, + 0.2550720616526545, + 0.5043055736649753, + 0.9082807995805905, + 0.8885018808545488, + 0.37930479678390594, + 0.9431033187554672, + 0.16255848899020076, + 0.18515418475740675, + 0.9765184711496413, + 0.8507696629028404, + 0.35565284949258635, + 0.18485765826692202, + 0.5277936124696514, + 0.21234811509405438, + 0.44486980471717374, + 0.4420088555993379, + 0.7407319934550769, + 0.9056770777596566, + 0.24841720228487196, + 0.6935139889038988, + 0.04681398059197439, + 0.829595968429577, + 0.746623795157349, + 0.16512006570990634, + 0.5992264677026412, + 0.40276157954655856, + 0.06493338342490451, + 0.0049014262712231416, + 0.6436335235485766, + 0.08107867167844707, + 0.38512947571694933, + 0.9932573409164348, + 0.7967447805389729, + 0.6564041260401954, + 0.3716076249350193, + 0.09194440548804161, + 0.6311100995202091, + 0.23107213439252527, + 0.18136930773940896, + 0.08740074146230492, + 0.6148428874127354, + 0.006559901756916564, + 0.9911552954344669, + 0.47798726402742664, + 0.9778261246727656, + 0.4924982960086912, + 0.6996946025317136, + 0.9210616999081693, + 0.6823430925510691, + 0.2947069472008217, + 0.9089773737811353, + 0.4763838989584793, + 0.9425603715679993, + 0.7245168920699859, + 0.09970230640959787, + 0.16386502243358647, + 0.5533954908361446, + 0.08762897819142246, + 0.6486470502341128, + 0.37362563245129554, + 0.836220066056041, + 0.0073470010789585816, + 0.6894600579493476, + 0.9540824866763601, + 0.7552468913002445, + 0.1218480717937318, + 0.9480752351939048, + 0.12134285469482364, + 0.03736472759032883, + 0.7607346884580436, + 0.340861298506808, + 0.4948750330248961, + 0.468318084476103, + 0.7632801769400355, + 0.6023089678029014, + 0.7979464666023278, + 0.8168725052161434, + 0.28980368292726444, + 0.5342295965944848, + 0.9508484044180693, + 0.5716213584915647, + 0.14453800217114288, + 0.04129449279972208, + 0.22483199177972668, + 0.4124355355203435, + 0.696759584024281, + 0.7236697245063086, + 0.9746849678643001, + 0.6728711996725182, + 0.3381714104955309, + 0.3627191813727323, + 0.9350436663065589, + 0.9746225850522218, + 0.18329491526835495, + 0.7518163349196545, + 0.7434719166840087, + 0.5156549200575771, + 0.6085571101770215, + 0.5046836252751412, + 0.9322732489182661, + 0.6015246661666919, + 0.018196871170177875, + 0.012606241512445271, + 0.537536038118961, + 0.1854238778696723, + 0.20096222721206902, + 0.8699984305763593, + 0.5177835305924435, + 0.4805929851840014, + 0.12441662244824536, + 0.14855478978398207, + 0.6596706423435834, + 0.47834160529960457, + 0.8278234444819542, + 0.05242947607368609, + 0.591355391344712, + 0.0015469293614804869, + 0.26473004238819, + 0.2938412398716117, + 0.05162792596829591, + 0.820910896749562, + 0.011511082208297552, + 0.8170965724011887, + 0.23778709661641662, + 0.15130034606916343, + 0.03439152814326396, + 0.24198279604362372, + 0.3011743735590696, + 0.6094938341486238, + 0.1550822852257001, + 0.2466411287826097, + 0.6715543806826336, + 0.746687069224817, + 0.3492309604498389, + 0.220318338219472, + 0.07417656098429448, + 0.5999956362647416, + 0.925946707835502, + 0.18013841706249067, + 0.888518260101916, + 0.6253657130414274, + 0.46305906685057774, + 0.07733560207119383, + 0.567360506326563, + 0.901069202114916, + 0.5441341851669468, + 0.899226690820443, + 0.376664032409485, + 0.5234881668641809, + 0.5983950153539378, + 0.2838181832110501, + 0.167001215675458, + 0.4572712153063121, + 0.46540158302541146, + 0.9704873465917505, + 0.6416861527790223, + 0.5869953199424293, + 0.7103201049541732, + 0.1911403914802865, + 0.3321079474213857, + 0.6586314128974872, + 0.4504288683830343, + 0.7125475896649786, + 0.23285571433310281, + 0.1863916641868243, + 0.007761999004842757, + 0.8034328208413234, + 0.7597603673357661, + 0.3428551679433728, + 0.5738049508201617, + 0.5140649665871343, + 0.45816388017771026, + 0.758657454917902, + 0.5407049406961493, + 0.27161104079328147, + 0.6689059786880659, + 0.9419975443206123, + 0.8755827786863017, + 0.04941600309030314, + 0.7715720693355826, + 0.4033867415840434, + 0.5405433184499941, + 0.8331120872092166, + 0.6271730005592198, + 0.4635708577978108, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7658434020939691, + 0.4484588557780347, + 0.20014804127153818, + 0.5753024140226165, + 0.13958830301853697, + 0.8532269774247301, + 0.4363365772569865, + 0.04211963674122654, + 0.08669837782557521, + 0.7130425224741993, + 0.23187282580666357, + 0.2922725579113702, + 0.8060069496395249, + 0.12295806351232264, + 0.9365518870500562, + 0.024415782324894164, + 0.1877115821909051, + 0.18638234818654498, + 0.3859200370116854, + 0.8812832226256878, + 0.7954785558512478, + 0.8102387692509151, + 0.011040969550112134, + 0.30862062670183943, + 0.483267713126608, + 0.7882822621144745, + 0.7763822460185802, + 0.3154082184386422, + 0.44506468589215376, + 0.045611517313897565, + 0.15438066442253662, + 0.8537698526579839, + 0.14743328494735586, + 0.3900350451932797, + 0.2588643089075897, + 0.5810297256380531, + 0.8774866003322361, + 0.6043008283599701, + 0.757456389454082, + 0.9284329582089073, + 0.9570046158892522, + 0.48491665379357296, + 0.532108146139854, + 0.40744058245736914, + 0.8496904255399494, + 0.8114027913584192, + 0.5471769081918529, + 0.7408029349281222, + 0.07837172073276821, + 0.9426557074878114, + 0.1756335323717947, + 0.6964273487865437, + 0.26383559115507305, + 0.5319147910354022, + 0.6847871156339033, + 0.02804577852166812, + 0.6827262234688035, + 0.9112446769339284, + 0.587551036540856, + 0.9053444586510178, + 0.3769790497267881, + 0.7242034278724583, + 0.2812903061812472, + 0.6823291641134823, + 0.08322655905080645, + 0.6028459979476734, + 0.18846409007242304, + 0.8389486183710646, + 0.5379495291266788, + 0.4617253482606256, + 0.8763783243353462, + 0.3171628414314781, + 0.3177064607725666, + 0.24428835341327915, + 0.4540617858222129, + 0.6958691394067466, + 0.006883755979162687, + 0.15144437141482503, + 0.023104958478796123, + 0.4931097448048505, + 0.23840774261453856, + 0.8400254448572123, + 0.9579330549430756, + 0.13610373446835167, + 0.37446240424315214, + 0.29894983471917413, + 0.9135166680238457, + 0.02338424730327704, + 0.5342606048752624, + 0.5758050577131167, + 0.5979697912004716, + 0.27963327243677416, + 0.6106253268895397, + 0.8322810042944455, + 0.31853670622238384, + 0.43375819673047755, + 0.9485276775112947, + 0.06383347748382018, + 0.25233661935958984, + 0.014507014703228127, + 0.8384998826361951, + 0.5935186940246622, + 0.7297767288705632, + 0.6689503643937548, + 0.6578437913390262, + 0.6491211512738853, + 0.2918013256298655, + 0.19715794143763865, + 0.545665231293884, + 0.5194096618006525, + 0.11238728762609118, + 0.574296549059155, + 0.7789017536242545, + 0.9180294474416062, + 0.5667477511445961, + 0.7561461053470839, + 0.14130468801994667, + 0.8435867151607894, + 0.0310795829702315, + 0.6195686733271077, + 0.573476375452811, + 0.15555467388946398, + 0.6740909740760708, + 0.11313252776219185, + 0.06124055493365488, + 0.8132375401805939, + 0.101612999560444, + 0.6771957699704143, + 0.8579099495524767, + 0.7245163777745026, + 0.7398301510718025, + 0.8580605139586261, + 0.35043118493387726, + 0.7803241419901414, + 0.6407986081788196, + 0.5571625509127649, + 0.4802672210574963, + 0.9422095419779251, + 0.5404924898922674, + 0.823236629180382, + 0.7676220746909378, + 0.0274141036139689, + 0.520136700270328, + 0.036304431915407775, + 0.8717756930820605, + 0.7480308446262529, + 0.9622867843254624, + 0.4057872003782853, + 0.1436900472845828, + 0.39023073132066644, + 0.7702178388944013, + 0.22423446031769823, + 0.20901503604330074, + 0.7568656825101865, + 0.09586072145429347, + 0.8180737511241616, + 0.2599407267954814, + 0.504034436362323, + 0.1876699078640358, + 0.8765308167179109, + 0.031157342048780623, + 0.012144669987189882, + 0.23185007473676156, + 0.24311846417513328, + 0.7169750793907497, + 0.9394979804574062, + 0.09981941385728033, + 0.7226987055785326, + 0.6286822007911628, + 0.9087979167759883, + 0.38398412373969115, + 0.730116891377213, + 0.7659112506467587, + 0.46470706680037854, + 0.57321294644147, + 0.9783992851761845, + 0.9139856828182921, + 0.4521099723504216, + 0.4595251494850715, + 0.12235014636787944, + 0.2011742211579901, + 0.48305604653941303, + 0.24389186626948656, + 0.144518714598037, + 0.5382916151261778, + 0.03140839434700338, + 0.4595043833347955, + 0.8315637907833437, + 0.41899772050340534, + 0.6935291025306656, + 0.8699410533227272, + 0.14173930063189877, + 0.12292092998392357, + 0.5544369498564855, + 0.3960953551142714, + 0.6924040606005107, + 0.38334191058524747, + 0.0019248664110667058, + 0.12197397071936245, + 0.6457190482195965, + 0.4693808992330756, + 0.11171008119720982, + 0.4292459161245784, + 0.056299034535973846, + 0.1271903755730086, + 0.9809251842948876, + 0.14834014530082362, + 0.21755471328285014, + 0.06375458623195385, + 0.8231773274592316, + 0.43457360253037414, + 0.4850671837724576, + 0.6310449003715752, + 0.3398682350233214, + 0.8753460330091902, + 0.5502682702103808, + 0.9106270711472852, + 0.4931329328752394, + 0.24052089506544472, + 0.6802936791167834, + 0.5927297147190784, + 0.3980849154805697, + 0.538300215664714, + 0.9406454995717962, + 0.7675919372173945, + 0.9190230007568897, + 0.5400035025629444, + 0.031999205139682796, + 0.6137935659191043, + 0.08107431172779145, + 0.3450239724341805, + 0.009934579733899884, + 0.14177134438156302, + 0.14503629565026654, + 0.8091157214456177, + 0.1471385433037462, + 0.7471433095253147, + 0.6124242848104068, + 0.19116441454410715, + 0.7029946293578456, + 0.5365361030960988, + 0.6525337349854902, + 0.8228755552163319, + 0.2922088976747118, + 0.7286938720327818, + 0.3844349229186699, + 0.8989392402055053, + 0.7125842495556618, + 0.13907702625647633, + 0.5575000629541691, + 0.6696978852544698, + 0.5528588191379114, + 0.9832868555901813, + 0.9863181152580893, + 0.45581832000701317, + 0.6727005080563465, + 0.14391606339669494, + 0.9510119665627714, + 0.034111647984197746, + 0.6796644001097146, + 0.28325018411534675, + 0.955629125734741, + 0.9441111778509117, + 0.9264256863768446, + 0.3839316487237239, + 0.18605786062763552, + 0.24444268470075714, + 0.3239293148572536, + 0.7988031842106109, + 0.003652022852111303, + 0.8138434956475238, + 0.972919162385759, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.110457185881497, + 0.346397455595012, + 0.2729020484734431, + 0.1083863754216795, + 0.1675703588717632, + 0.477127172211414, + 0.8192016456448884, + 0.37398922212992947, + 0.29262177724592997, + 0.19424683250264851, + 0.6485093642760426, + 0.514629777013557, + 0.6771547015748663, + 0.09216669454504034, + 0.545711638440446, + 0.6307337374123051, + 0.7495152135273823, + 0.19629161605155776, + 0.005293595469666013, + 0.3625472467980002, + 0.2751789066131015, + 0.21962298792153057, + 0.4284601455139285, + 0.4098425230443963, + 0.2752587337690052, + 0.05137991928971608, + 0.3202977353363913, + 0.4310077888023446, + 0.6025349603863696, + 0.8486122112898536, + 0.7008757605588825, + 0.34144938572310357, + 0.5686608542788599, + 0.5569579088209491, + 0.5542922854832517, + 0.14505024216963003, + 0.019436234481995718, + 0.7266879803102998, + 0.8631104395021726, + 0.6287358324055164, + 0.9230332530056905, + 0.012121750214663773, + 0.266356455942249, + 0.29043706156619076, + 0.5123086994428299, + 0.9846536571621723, + 0.4451534491986231, + 0.9657195507769762, + 0.8229338392992638, + 0.4319977818288674, + 0.8451538021327818, + 0.8200658665410974, + 0.2483777025372399, + 0.23243483589723768, + 0.6584446572661563, + 0.025354059183320787, + 0.5668545411293267, + 0.34743696271912694, + 0.6753131459458452, + 0.8759766805769359, + 0.09161809230201745, + 0.4204089270576824, + 0.8650744605917171, + 0.9025853847944545, + 0.08991798877156842, + 0.7663196597137937, + 0.7609229643910963, + 0.7968824639528654, + 0.7982952618845935, + 0.32505132567404005, + 0.4503948970340629, + 0.537990984274354, + 0.8699690342860524, + 0.8012358284257299, + 0.0521440293335389, + 0.1628433377367976, + 0.865339352101915, + 0.9974542833337509, + 0.4184327769855637, + 0.8046540003081939, + 0.2410263253220044, + 0.38955432595792994, + 0.20322139789881055, + 0.6618530684952463, + 0.5178779242802152, + 0.2655019323791298, + 0.9247637083247597, + 0.29853901678616723, + 0.9387610369102437, + 0.22500284346279054, + 0.16219314369523896, + 0.41648593550701585, + 0.13789486515199068, + 0.9504336395764662, + 0.4647785411275833, + 0.39343576785993994, + 0.9652390943836725, + 0.9274913629617861, + 0.19613460443243247, + 0.5160405218237385, + 0.9385202108571628, + 0.21591330787338403, + 0.9437563044282649, + 0.41071197599028153, + 0.43480523057342557, + 0.6034987756683131, + 0.4002295871259327, + 0.8385991890888439, + 0.4866706863976924, + 0.05856864439748655, + 0.08515230026036491, + 0.6545922725314345, + 0.0358022694433745, + 0.8071949520180836, + 0.4877221228569868, + 0.028628746088782964, + 0.20063447638075604, + 0.7151286837756502, + 0.5402796186589688, + 0.11435729827840702, + 0.2979595447711083, + 0.25536210422029126, + 0.2878193817104582, + 0.12796506022323428, + 0.663569377481684, + 0.09607414656750368, + 0.9098807629870131, + 0.3894181065887493, + 0.217839006088278, + 0.5607930712657042, + 0.07799327665391276, + 0.35371581951523445, + 0.03164807497620137, + 0.8777953478453259, + 0.9621385789411242, + 0.6806771469477383, + 0.0027241803325944725, + 0.6944632499552538, + 0.11044592193012814, + 0.6108704529131513, + 0.6664523869186756, + 0.3088302850857947, + 0.9932342604333486, + 0.07668062061479053, + 0.3275437407314057, + 0.5156488900153687, + 0.4361623085868662, + 0.5169226303879305, + 0.8822102510382267, + 0.5604171783545818, + 0.3536093364032804, + 0.33161805144909073, + 0.8341336664732593, + 0.5008696086309401, + 0.5812748215288497, + 0.7093630319959778, + 0.6543286590390945, + 0.7465920586351917, + 0.5906840413129445, + 0.19314619197103522, + 0.5668190854040339, + 0.8923833631763576, + 0.7848802379167125, + 0.24842576000539407, + 0.8118266315574242, + 0.023477569950988042, + 0.7532195123155386, + 0.9620861996697827, + 0.32757537002122084, + 0.5472339667200282, + 0.08579977985722398, + 0.551069523878771, + 0.555256612188501, + 0.4744672919397813, + 0.11267288744207593, + 0.8898073123794926, + 0.9203251225795509, + 0.75950261595991, + 0.028815056694806218, + 0.03269211136047323, + 0.9370349609091597, + 0.06667830388497886, + 0.0702541202543765, + 0.4811257193262529, + 0.5468523567381457, + 0.6393426833736836, + 0.4554551795692734, + 0.9272572666725544, + 0.1729395730341049, + 0.15501583262226715, + 0.21141560934429937, + 0.4820019923477086, + 0.8191432733734665, + 0.6873981041605213, + 0.8651508625961432, + 0.05725122672515359, + 0.9259509251189175, + 0.5398821914237821, + 0.4647284718667497, + 0.4749231575081784, + 0.21439393230772963, + 0.2579398594254134, + 0.9708449605395872, + 0.12773641650492673, + 0.260390530729226, + 0.8237080700233929, + 0.6534195100849618, + 0.12335697463010498, + 0.22692614916535414, + 0.7516527281314568, + 0.29092463751168984, + 0.6077258812403299, + 0.11583949630007695, + 0.5757771081366516, + 0.5728555116484396, + 0.49005537358864426, + 0.575958911206891, + 0.21173799803936055, + 0.9336509959017368, + 0.4914023993531246, + 0.2213322737946819, + 0.6562703582580695, + 0.23849340835747435, + 0.518606425913763, + 0.6341930008371865, + 0.07021963704690537, + 0.1390582423532981, + 0.5224446580796269, + 0.2768616137303268, + 0.1387848773741488, + 0.1675005224937116, + 0.022926946423551198, + 0.9939529256845033, + 0.9444304308969173, + 0.9037468325261414, + 0.988261355260223, + 0.6545503986542881, + 0.1094588589546801, + 0.19583402336156863, + 0.8037571948475324, + 0.3386783052409815, + 0.5193847296233982, + 0.6993197249789133, + 0.8201113759500455, + 0.0904213059931871, + 0.4833680301438109, + 0.6977073213488211, + 0.0961682978459435, + 0.4317700940133713, + 0.25095830589189205, + 0.35499630352858136, + 0.3858301885620561, + 0.5909430311054535, + 0.6090623335064049, + 0.8403388840215907, + 0.2203871820169212, + 0.44704040959411084, + 0.35233223134043123, + 0.14231757389282162, + 0.8591774693421538, + 0.9915454567591488, + 0.400170260600235, + 0.3612465032799286, + 0.5103674927351255, + 0.08449199698497156, + 0.6312736667177735, + 0.5745873084467717, + 0.534837423261381, + 0.12689603843821762, + 0.927256786606383, + 0.8551560927095075, + 0.07961557346747206, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.33911734894603585, + 0.5910756652557415, + 0.06662986874779053, + 0.017392222492692122, + 0.24405228072870488, + 0.00788958407872642, + 0.1323452117059134, + 0.64422714348506, + 0.6385414766258448, + 0.5357538988125191, + 0.8125779502454057, + 0.7238613412713313, + 0.8755539934243413, + 0.7133232806313118, + 0.7609839981943451, + 0.9727353981143915, + 0.3656694537393448, + 0.33509839503813366, + 0.21984992531434233, + 0.2788126203480148, + 0.989778527477444, + 0.5850324340263784, + 0.21486066229408163, + 0.45858713372065474, + 0.6804042169539273, + 0.8463753970421294, + 0.0019919964858199357, + 0.4437609858627347, + 0.886259510667948, + 0.5878263829178927, + 0.026333045135565225, + 0.3014545650991026, + 0.32122146810751995, + 0.8171650667891336, + 0.6442975824048451, + 0.9601670708450253, + 0.46279376838224684, + 0.8253959049385001, + 0.5585782183352858, + 0.36781266380642164, + 0.5599573171410683, + 0.22512554832323817, + 0.44318480118885717, + 0.9804525071663491, + 0.789738769562747, + 0.636631938256863, + 0.8562632247719897, + 0.16127113278716054, + 0.16745988829368452, + 0.8098128260619872, + 0.6832636543607493, + 0.6231690644599602, + 0.9278832973815989, + 0.9320730659873574, + 0.3317528720253524, + 0.1971410607971622, + 0.50273969636831, + 0.24382546903627433, + 0.6420928202680382, + 0.22677582633383397, + 0.3728218841248855, + 0.6144656609886606, + 0.012473466940510325, + 0.4742789366783612, + 0.43693359964853173, + 0.2111290831550624, + 0.45441652471237903, + 0.7434756319353291, + 0.8699992663403898, + 0.4373846121161843, + 0.540278945432416, + 0.5656166767331365, + 0.17985583847578201, + 0.08070582084734468, + 0.9289275673745219, + 0.4974697714563904, + 0.029137928463804474, + 0.05715894106848618, + 0.3260682569236243, + 0.8063095982365737, + 0.06973799643233103, + 0.34677339899230586, + 0.18619063557691684, + 0.7314555384049621, + 0.6005673958299734, + 0.7709179948508474, + 0.36703815753029445, + 0.3495356863711895, + 0.6437282605817535, + 0.9226499775645752, + 0.41973825161374934, + 0.3354251370554284, + 0.04965028509717184, + 0.005880004361248914, + 0.7557722949183431, + 0.784080478445802, + 0.3248355212026095, + 0.33426392482795697, + 0.6461382302653605, + 0.3692090950638399, + 0.5993848146965396, + 0.609836614120597, + 0.48072500713347677, + 0.3172500058453548, + 0.882365910511107, + 0.5979512189284536, + 0.44315081480224183, + 0.06183872571323357, + 0.5198203037016257, + 0.679707909739096, + 0.013596916473211706, + 0.594633797665965, + 0.14763335781413833, + 0.6071363374334533, + 0.7022525663594434, + 0.4388051532158317, + 0.5412279949702886, + 0.49717153545619164, + 0.36571935758307683, + 0.9678199781398154, + 0.05274652452949369, + 0.2651093874451851, + 0.6259165341074122, + 0.5417410577066006, + 0.5592408071942575, + 0.2186213158503424, + 0.939948607735632, + 0.10736032510658122, + 0.3476104746194667, + 0.7600574087272135, + 0.32288687967206764, + 0.5906438943933574, + 0.7878247969127772, + 0.8711852157801344, + 0.7401813472919951, + 0.8573585772674752, + 0.1695957161260987, + 0.16649530226853937, + 0.13555799414241676, + 0.7499159854776997, + 0.5809156370747466, + 0.6133430992346649, + 0.409201124797154, + 0.3789287686068691, + 0.817602676763262, + 0.43576959236913915, + 0.0873048863620759, + 0.5261582532434398, + 0.034503088566865014, + 0.9083987854437376, + 0.6222244657965631, + 0.30740534181062773, + 0.47336767956517656, + 0.7952211380906614, + 0.5423805318035991, + 0.9634128837361462, + 0.7617801530466735, + 0.3015874026813069, + 0.15887422384485994, + 0.08267573889736679, + 0.36248458012369766, + 0.8869780897386745, + 0.8348281814451475, + 0.6278059405753478, + 0.9225404024690042, + 0.6385629454668169, + 0.09128466624592846, + 0.48411915205236533, + 0.022040856519291552, + 0.424264937686758, + 0.96247474990818, + 0.44705059066223585, + 0.5858241458997088, + 0.15621822494181803, + 0.7837441947981376, + 0.7498357051690385, + 0.856492545062612, + 0.041857536325446976, + 0.5293361252902341, + 0.49753012508749894, + 0.1380945147552355, + 0.2706557182733671, + 0.5288814183206524, + 0.6159498726293186, + 0.6018172384639489, + 0.032507854476437226, + 0.6124559517051499, + 0.7310717053325555, + 0.2930079141807612, + 0.8295640445140852, + 0.8207712664549597, + 0.0006130839486564454, + 0.4395026411777847, + 0.7285900181449885, + 0.014467480647615938, + 0.37891756327998005, + 0.09617948924474273, + 0.13770027869736168, + 0.35443979169533024, + 0.06467398382073486, + 0.7313807486411453, + 0.085542708571817, + 0.05067308393557268, + 0.6017551285956273, + 0.30649055361496447, + 0.01852965051811306, + 0.049506506762836966, + 0.15309844048655807, + 0.73397694885687, + 0.186387445554707, + 0.1501527318621213, + 0.7776016296014324, + 0.14054399057748967, + 0.8134800604631495, + 0.31533099998918124, + 0.15667172224247183, + 0.9778208885928666, + 0.7584875823907452, + 0.46599182061100297, + 0.03021635973579806, + 0.9726393850015339, + 0.828887386739987, + 0.3419266665341274, + 0.32974467126432716, + 0.8391978238311073, + 0.6215025200817137, + 0.5931367881983234, + 0.3825186602895979, + 0.03139448457994143, + 0.6400143268110565, + 0.17483739156490963, + 0.8046957090079055, + 0.8409551009406433, + 0.48503745888701366, + 0.11292872692499234, + 0.8263894348017284, + 0.2464121938938435, + 0.15781112803043973, + 0.0298939857175895, + 0.49930893276022026, + 0.36800630745810115, + 0.7253414742410499, + 0.9696726865069838, + 0.8418506103990868, + 0.019264303448693676, + 0.27881771881996487, + 0.4945933666564626, + 0.7844166722513558, + 0.20922542168460456, + 0.4273505872199257, + 0.9985736120943277, + 0.34184328646679907, + 0.9196035236558062, + 0.47861782385439244, + 0.07531601920177633, + 0.7185987311654768, + 0.2827826768750704, + 0.020677481127253317, + 0.23107716328730177, + 0.23722963005376518, + 0.19224920405921286, + 0.04912390822078061, + 0.3946644410397515, + 0.15689674529769004, + 0.8480336202701808, + 0.6669212686328694, + 0.5992490278072569, + 0.6934786900076532, + 0.7259936900551144, + 0.9208908003218215, + 0.9184395417624108, + 0.16356812949417132, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06174117900982046, + 0.07600151442071679, + 0.5799424630370211, + 0.48921569619097927, + 0.13798584146945847, + 0.7038072423287693, + 0.6827160884365773, + 0.8540313293088498, + 0.012611507623567442, + 0.8059627931405284, + 0.9360337353773889, + 0.4731276079321931, + 0.35041928857685234, + 0.9934743712719277, + 0.7352228624783738, + 0.0, + 0.7563509513433027, + 0.8503478834743483, + 0.1376126113738475, + 0.16878593756180382, + 0.44863707051828616, + 0.31318381692816166, + 0.6693740081942847, + 0.4802425368088651, + 0.8886089200288345, + 0.9923725484387749, + 0.7745197828425653, + 0.2849054475638024, + 0.38834605815962153, + 0.9222273247346949, + 0.6200707440077958, + 0.0, + 0.48371927402718495, + 0.7970120332604801, + 0.4680168030802708, + 0.3987753044485345, + 0.5283524344565729, + 0.610834487901371, + 0.5280856315219331, + 0.740852680410049, + 0.04290730232514961, + 0.845895757541442, + 0.6819692531358216, + 0.19729253727895313, + 0.22888532622451452, + 0.28503701748533616, + 0.9932724706350178, + 0.0, + 0.983220450609312, + 0.8285848300454144, + 0.7815153862863152, + 0.6212764853437488, + 0.27569849682157654, + 0.6823965289718269, + 0.10330941923817205, + 0.1822134615284915, + 0.28502921955235816, + 0.5731426263532864, + 0.7543123436003192, + 0.8462546641488713, + 0.6251885672305091, + 0.9123727575142104, + 0.6195961287452106, + 0.0, + 0.994454807209054, + 0.6531831607546182, + 0.8850850711692716, + 0.4613473720661614, + 0.9550553617348797, + 0.3159130385611112, + 0.46373756229711927, + 0.24663453343018316, + 0.4846367468682897, + 0.4434866286315431, + 0.23971926436094648, + 0.12309806832265668, + 0.7623154408654318, + 0.6251088115596354, + 0.21854723276407206, + 0.0, + 0.9102582312286879, + 0.5903464041026849, + 0.26558269705971305, + 0.9043380304636407, + 0.9484520007081532, + 0.2589539532391051, + 0.3872594740253491, + 0.37378291165720157, + 0.816067630567994, + 0.2648151053055381, + 0.8560305536139433, + 0.8172004660367465, + 0.4362680673750108, + 0.16823098693563443, + 0.9749239371485767, + 0.0, + 0.08328294346257348, + 0.2592457574205125, + 0.5971494223497671, + 0.2884677893053279, + 0.838964090279667, + 0.8306603710112038, + 0.8445326611522498, + 0.15684945965477548, + 0.7160944513737847, + 0.41340369493789153, + 0.8308672785934286, + 0.9929629430818431, + 0.4146565988318993, + 0.8894523892337659, + 0.5862612894565067, + 0.0, + 0.007585375033768327, + 0.7258673347700841, + 0.384147794314181, + 0.5147599026113846, + 0.4591114931972913, + 0.1054628479784595, + 0.14549599475849628, + 0.05804942268392754, + 0.4959392980473104, + 0.9695160611097587, + 0.28014244029495816, + 0.8163055667453062, + 0.19444417411246517, + 0.049391925066840314, + 0.2581346155941424, + 0.0, + 0.6575385725326951, + 0.2983469669100758, + 0.0795452234285362, + 0.7374140365425506, + 0.43631960912100864, + 0.40450476726612095, + 0.08653651334989632, + 0.44335856245670846, + 0.6384421302257155, + 0.1825589595147502, + 0.5833463290167716, + 0.22548546642264444, + 0.6215354530319409, + 0.679417441127222, + 0.20719097668798836, + 0.0, + 0.2990333922479931, + 0.23266236713461463, + 0.7093531714206968, + 0.28763078443712586, + 0.8292195420512238, + 0.2460484224594104, + 0.5766853060082714, + 0.5803649708848722, + 0.27666801046108047, + 0.49079397771001254, + 0.5572928108735016, + 0.40419160833352064, + 0.6647228862844003, + 0.2226676231823268, + 0.7382101833227492, + 0.0, + 0.13238092838299198, + 0.4390761620158786, + 0.47886198625417786, + 0.2573973326878555, + 0.6340605573999117, + 0.09595970560295652, + 0.9283078509113353, + 0.033872446557603575, + 0.1356957259131223, + 0.5138271773752828, + 0.008945521950959323, + 0.08153264004585492, + 0.8428100354980886, + 0.6570716424674401, + 0.22653439920927576, + 0.0, + 0.13890804052780736, + 0.20631143030205945, + 0.20361778826617072, + 0.7086671927809276, + 0.22070275324013533, + 0.2582160463405221, + 0.48070313104894336, + 0.3995191212679662, + 0.617109091994503, + 0.42214998877379684, + 0.7072934188630184, + 0.8671973547518749, + 0.7470089973668257, + 0.1366927927546273, + 0.5434581072851903, + 0.0, + 0.2823221332770227, + 0.20508752670639585, + 0.39740444258894847, + 0.07595723211603778, + 0.3096210669410986, + 0.7184988011100494, + 0.6025256099554165, + 0.6354374126975576, + 0.385463200160131, + 0.0928296071101008, + 0.3525753102840702, + 0.20957326823990885, + 0.4802903549870492, + 0.9690624578776225, + 0.13676736147335866, + 0.0, + 0.686623785781755, + 0.9130707080458179, + 0.9234363996138104, + 0.34890599819115753, + 0.38977961292050134, + 0.5116668807015653, + 0.84166432004193, + 0.7819906543590163, + 0.08923801278457466, + 0.9690908414984437, + 0.278108811624029, + 0.4999413321151954, + 0.5591859973668659, + 0.7073266911670176, + 0.1422391059254624, + 0.0, + 0.4057627556961917, + 0.21146772543661918, + 0.5634133269983861, + 0.037769134940686566, + 0.8151886861481233, + 0.7607018115481479, + 0.14208057166618826, + 0.8513425475228935, + 0.7638681122694169, + 0.06901815544986045, + 0.6876632923779983, + 0.8973736477027696, + 0.5559129808305836, + 0.8252555123646345, + 0.6856913667658722, + 0.0, + 0.42384653406104467, + 0.27012612692098714, + 0.6695106398095443, + 0.3742150351977427, + 0.28137738866555895, + 0.8925474177627563, + 0.7689346923944991, + 0.15039485554444054, + 0.2198867412877108, + 0.9773473910563464, + 0.4904760913221584, + 0.4529885491512896, + 0.7025569269203741, + 0.8178829541384771, + 0.6893748618884612, + 0.0, + 0.5133038946262652, + 0.4744857756432088, + 0.9678259401549132, + 0.45244961281531826, + 0.4478388053971142, + 0.7159753212814014, + 0.908344992627332, + 0.7241998216443456, + 0.26171159497471774, + 0.14014769521659476, + 0.030697059613750688, + 0.2907886653413483, + 0.7836002284511876, + 0.33840274361446054, + 0.7646787710838534, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.474056182824796, + 0.3165620817480945, + 0.9265538041938892, + 0.9342000005155301, + 0.36132098104923716, + 0.08365775403277442, + 0.06705679406513887, + 0.39287075117417114, + 0.8937080874572724, + 0.648185474712454, + 0.778448416936745, + 0.9767482027103868, + 0.23666493426387136, + 0.7135820313338745, + 0.24086980177336992, + 0.0, + 0.7864628505798014, + 0.36544859553311027, + 0.5269920009830543, + 0.3650551326938446, + 0.9595741806623073, + 0.6168788855026891, + 0.6898874527808591, + 0.005133070328970812, + 0.9374905850337778, + 0.02887522737614967, + 0.8275723004502537, + 0.1791118276074677, + 0.5747003924240637, + 0.5043118982317626, + 0.5437136134301128, + 0.0, + 0.3579401402277915, + 0.18086654491352694, + 0.6377375758753615, + 0.783614499834809, + 0.24493589653459602, + 0.015644449998551524, + 0.7975501607023373, + 0.5568421534623479, + 0.6870152141824932, + 0.902584785060954, + 0.2951865658998095, + 0.47032942155973456, + 0.011264641299907785, + 0.28211201923152873, + 0.7162861520545488, + 0.0, + 0.051129959395827096, + 0.6640764544139515, + 0.9400811171049379, + 0.42017831903269565, + 0.9227303933000318, + 0.5264467132662293, + 0.0032063387045777603, + 0.400876142501678, + 0.8660072278268933, + 0.36567392305213464, + 0.0019250327378541598, + 0.16217350671796626, + 0.5235039486497978, + 0.8110395166227209, + 0.594658948063741, + 0.0, + 0.4372674204257446, + 0.0007410697213198203, + 0.09676149403491074, + 0.9524412366179466, + 0.5263219620202745, + 0.0012098621523040087, + 0.2911296764898823, + 0.7421432247015287, + 0.7401414123541316, + 0.5179793618045656, + 0.31186335072704974, + 0.1635244015846663, + 0.6469999169636548, + 0.40824733481821995, + 0.5510760695437954, + 0.0, + 0.13315639773231036, + 0.7644906223723946, + 0.9771764007823234, + 0.31290973574706693, + 0.6907490621323701, + 0.541084535833613, + 0.7927264988564006, + 0.8990037512449554, + 0.5845797111419688, + 0.00903877777354778, + 0.7823740403085887, + 0.14719912459303908, + 0.9609055044681635, + 0.929303161102385, + 0.28218816695782356, + 0.0, + 0.1560219158981737, + 0.048823581035504215, + 0.3145750434837641, + 0.333809199850141, + 0.38435718074696057, + 0.9463743594644852, + 0.8755307834415003, + 0.2646458559895223, + 0.48302881332725933, + 0.43342440313876085, + 0.5304594782869321, + 0.8250154137794365, + 0.031262751310596304, + 0.2908893487734502, + 0.9790276867210764, + 0.0, + 0.27384907486598786, + 0.8886550273938056, + 0.13015741816987858, + 0.018933245117345843, + 0.44756642341828445, + 0.930805454547134, + 0.5315693199453522, + 0.04800293031999969, + 0.13121791296797802, + 0.3770263276426552, + 0.9573357353050627, + 0.731075283461769, + 0.2556723867425599, + 0.6266347391907159, + 0.34079512381342236, + 0.0, + 0.3765067715933418, + 0.043843836217736354, + 0.6745193733917653, + 0.6148133428760904, + 0.7874007689029296, + 0.975378380949056, + 0.32267184760929657, + 0.07138509896092038, + 0.5944444531568308, + 0.3930701950014943, + 0.8381866821048932, + 0.7738689778041125, + 0.2539080362514551, + 0.6290301935855606, + 0.5220196079295485, + 0.0, + 0.9615222933019792, + 0.2354975097327563, + 0.18568100609014015, + 0.39036266224192295, + 0.2189762181045023, + 0.7312832780866552, + 0.6605431700617775, + 0.8853431894279555, + 0.6419042996764052, + 0.5095849930468996, + 0.36421613226104477, + 0.3380795915577818, + 0.2686395134421844, + 0.9842265081658566, + 0.4379634875906535, + 0.0, + 0.5407503001119387, + 0.07623266353861846, + 0.6673808576960704, + 0.9737284041729836, + 0.4131365103670408, + 0.3550107658196838, + 0.9384627533537213, + 0.21639845447255202, + 0.13765303280451724, + 0.9548033527505275, + 0.09492979213038455, + 0.7372403339241641, + 0.516512071087104, + 0.5551737030768077, + 0.23014987007243093, + 0.0, + 0.340729366104226, + 0.19180722872392997, + 0.2129648381099467, + 0.5389992304596721, + 0.7000110795177936, + 0.29425404361691787, + 0.6378800316497563, + 0.7996638799391259, + 0.47498542817337597, + 0.17260198610286515, + 0.5743632736335481, + 0.1931032500556582, + 0.17978511180694778, + 0.4360415544126278, + 0.44263087569464177, + 0.0, + 0.4144518719787774, + 0.18818185057466297, + 0.5928034714446895, + 0.8942289983089009, + 0.23797898188037647, + 0.062121854287130795, + 0.47381356672041586, + 0.9763840039816348, + 0.9372405956170919, + 0.24360648931241047, + 0.22191012323761294, + 0.1490105454723245, + 0.7538185647608706, + 0.04107327068170108, + 0.4173792121384853, + 0.0, + 0.901732368492859, + 0.3426560703580187, + 0.030404723947015433, + 0.5566004294762759, + 0.08080519202078185, + 0.5079773285088168, + 0.790909456101359, + 0.7672567448181307, + 0.10968236651551133, + 0.9439606846402825, + 0.3512568644747548, + 0.9111359964391323, + 0.026597001614222515, + 0.22919268514603797, + 0.07904414301138918, + 0.0, + 0.16969627035162727, + 0.3787333525187946, + 0.11877626040221745, + 0.5109201940895305, + 0.02974750198426057, + 0.09956280813554375, + 0.656595738477664, + 0.21651043412280802, + 0.7055280441325609, + 0.8884260674640121, + 0.9839590462832234, + 0.6978171328423688, + 0.5309184308545543, + 0.3920484946662971, + 0.29169117724653026, + 0.0, + 0.7404811365576643, + 0.5622367782465371, + 0.6079204669333379, + 0.025302754483026768, + 0.19818767899624412, + 0.8440007755486699, + 0.23818473918145966, + 0.8337872993833937, + 0.48485333752760684, + 0.17747545681899313, + 0.8948480967993708, + 0.5161504735576508, + 0.194678675750529, + 0.49331119239386456, + 0.4220809931523001, + 0.0, + 0.10512015499408911, + 0.17195832267247402, + 0.7903650693158433, + 0.9377919655673319, + 0.9637717109507615, + 0.6567797027748701, + 0.29221016777839015, + 0.6157257035076474, + 0.2072637613549223, + 0.18576628852966004, + 0.3135101746928498, + 0.6511645058069245, + 0.3779983592021807, + 0.7355779050524593, + 0.6774781950754846, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.35263305933459177, + 0.12023138330979999, + 0.42899264776221846, + 0.9482981928899138, + 0.47645362665459823, + 0.4349572162206694, + 0.1263032738364399, + 0.2127506995141395, + 0.5331587133291521, + 0.8817092100155618, + 0.0640676066682464, + 0.029059099228687102, + 0.5760921142803725, + 0.6027473936155499, + 0.6616242454180209, + 0.0, + 0.6190635943050966, + 0.46801299531478924, + 0.4530096690445389, + 0.5370158686726986, + 0.3465069310400354, + 0.46553152686085963, + 0.8296642336644061, + 0.8692434992199101, + 0.7238906179329939, + 0.045020363985128875, + 0.2731657149659644, + 0.5615284083328184, + 0.9671153916215034, + 0.9206606202169182, + 0.9760409262237774, + 0.0, + 0.6569010518132233, + 0.9154093664629628, + 0.6673557183096349, + 0.14399284185201, + 0.550496768676035, + 0.7471528576398411, + 0.40102385558626363, + 0.5463145684918219, + 0.4447087288051237, + 0.9111472380912586, + 0.015540583390663554, + 0.03911471667685218, + 0.37807712808342175, + 0.9561858317984128, + 0.0019720945481300767, + 0.0, + 0.771150181212638, + 0.3461520909903373, + 0.7087447442412279, + 0.19610213711677715, + 0.4986516095770759, + 0.31709197750263796, + 0.5951965601721589, + 0.9181883285496246, + 0.2448366349652873, + 0.6319819853679898, + 0.20035792611947967, + 0.6627936863882162, + 0.8184223806569545, + 0.952254987469469, + 0.05723820961191328, + 0.0, + 0.0570997089403803, + 0.01073694360583044, + 0.11647285480259817, + 0.01554626208595622, + 0.5430812412465242, + 0.8748034467656577, + 0.4093432836700692, + 0.6434387605847418, + 0.03727156575564883, + 0.6067375130703099, + 0.15784143655681837, + 0.8012297575667066, + 0.053081216742696835, + 0.5158146600263929, + 0.4114889213972587, + 0.0, + 0.7428968030632752, + 0.7062838976395889, + 0.17540724723726264, + 0.10303584410804101, + 0.6100351917714626, + 0.39198790507487, + 0.2814589646144072, + 0.8331504819118772, + 0.9330622580837382, + 0.6076216052900394, + 0.20729536710070606, + 0.8053762153718919, + 0.5734467121152836, + 0.7440050954925765, + 0.1972235766321545, + 0.0, + 0.5800125356591301, + 0.4579129461442367, + 0.6006100457854232, + 0.2521717031142888, + 0.35477121085778096, + 0.17031012420207237, + 0.08611523611718075, + 0.9673958560175789, + 0.3023694760838186, + 0.667191724733086, + 0.1646603527520325, + 0.31779624406102136, + 0.6737512731711053, + 0.6153748653498825, + 0.8835529965166204, + 0.0, + 0.9452222862822885, + 0.645914194022499, + 0.2642370778882206, + 0.006685486780778516, + 0.3725466178774436, + 0.4006691050007496, + 0.045443420371758525, + 0.757878329504308, + 0.5765749970593094, + 0.6952903994293748, + 0.39891475752344774, + 0.223462212480803, + 0.561949642025323, + 0.26582106355477786, + 0.8679627416593596, + 0.0, + 0.520386510520803, + 0.5729821033339437, + 0.15888900298069597, + 0.32574598871778115, + 0.08507026174420751, + 0.033163014661722334, + 0.5582581347178946, + 0.6654488293268923, + 0.9535968460039294, + 0.12061729840712532, + 0.8678660083780976, + 0.6786754522654187, + 0.2957081174912921, + 0.6214091550578642, + 0.05081926299070605, + 0.0, + 0.1737095650275139, + 0.26054402962433976, + 0.8309895729247351, + 0.9837084277620012, + 0.193883517978652, + 0.6040766027779229, + 0.39731676228480306, + 0.07577365560644655, + 0.9734855564766233, + 0.07905253242440446, + 0.5402271155578642, + 0.312658218279, + 0.5580150941162714, + 0.23869035863926813, + 0.2118592726441969, + 0.0, + 0.40774398374507614, + 0.06184592502838726, + 0.5405616642371476, + 0.41974878574508956, + 0.4410104002330809, + 0.36924911372488667, + 0.13496719088888554, + 0.3609207908477674, + 0.7582965585849417, + 0.00969060226969043, + 0.6471289691298319, + 0.7688862105050804, + 0.29666308013322706, + 0.7877023614623473, + 0.7446691814681812, + 0.0, + 0.4140887295190111, + 0.5837524946826165, + 0.5993648001223835, + 0.36116384616070596, + 0.8741825822059256, + 0.055153633345313424, + 0.8600411890422959, + 0.6189323456561677, + 0.19209697094531197, + 0.42740855550738366, + 0.5874445756566115, + 0.38868150345157215, + 0.614736218817265, + 0.4882819723975579, + 0.2627564186983168, + 0.0, + 0.7704869919128293, + 0.14777052255092293, + 0.9038230251296515, + 0.3991402539155233, + 0.881951063803435, + 0.02541556228975106, + 0.6683435989287582, + 0.8442918486378159, + 0.05726787273700418, + 0.6342019010319667, + 0.18041774458082027, + 0.5348036964274306, + 0.5501148618001405, + 0.17390700972882978, + 0.3170879649041092, + 0.0, + 0.027908130046646273, + 0.4359744572472193, + 0.8800114344962787, + 0.2286646319500545, + 0.6386639884633829, + 0.5404307400186054, + 0.2107098930167256, + 0.9929340835729172, + 0.40574433994558745, + 0.23971233234665323, + 0.08151507299434502, + 0.7069072042307454, + 0.3313303935318651, + 0.15367640654289394, + 0.5447613727595725, + 0.0, + 0.0065240400853051606, + 0.10031631416625497, + 0.592294185051166, + 0.01602436897355808, + 0.8970074838147561, + 0.43397172435813636, + 0.3602894011512735, + 0.6230236637717608, + 0.014695112452643766, + 0.5930513089318565, + 0.8013524426937643, + 0.17082743622605978, + 0.9495821689319028, + 0.12608007005283672, + 0.40765364669080983, + 0.0, + 0.4629280567139442, + 0.9122617925646811, + 0.06616210555202529, + 0.5287391927451673, + 0.5384981726900777, + 0.399171641866614, + 0.6058611607220571, + 0.03652927766049019, + 0.8385520205601446, + 0.04058612627866931, + 0.15396617414027158, + 0.7003557522570247, + 0.7949182315946333, + 0.29339866794400327, + 0.5089518255097655, + 0.0, + 0.8023452171244287, + 0.2994217064336464, + 0.30311312940693835, + 0.5745184698716473, + 0.08949106458248024, + 0.8191740074744085, + 0.11343203569471083, + 0.9814545140556741, + 0.557269086035321, + 0.7571077634496628, + 0.814155374844376, + 0.13093225118285412, + 0.6036603406202169, + 0.910526857416002, + 0.19878145893575572, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7933940726611575, + 0.2768221019406246, + 0.6232870615447319, + 0.9190261695650536, + 0.916040440340509, + 0.7293654667591577, + 0.867220820477516, + 0.49450365052053813, + 0.218862273708482, + 0.3993922950626534, + 0.5849483548485327, + 0.595214084553384, + 0.47653740126406163, + 0.7699958834578208, + 0.4190394143016243, + 0.0, + 0.751503176418468, + 0.8551416693789439, + 0.4862086377622101, + 0.504213887047256, + 0.7320840459449758, + 0.7877079405023975, + 0.6977073306840095, + 0.5063410127610112, + 0.009101458254194172, + 0.44738956365389115, + 0.47410859530905247, + 0.5220207184917355, + 0.8242472102134765, + 0.7668561042298244, + 0.18449171784271434, + 0.0, + 0.5730339642987752, + 0.9581611480678348, + 0.5895692096708252, + 0.8677969158124854, + 0.3222504692992577, + 0.38841766861072746, + 0.5060745852275809, + 0.07941173435225413, + 0.6743285109321938, + 0.48631413869148266, + 0.34902586190660034, + 0.47798068813174965, + 0.20490560382685707, + 0.7576080703418117, + 0.7248338041487655, + 0.0, + 0.18349454450476865, + 0.10533031658160918, + 0.7554597615621282, + 0.5353138598274435, + 0.7452389831788838, + 0.5083511811149798, + 0.9319798398301528, + 0.23260360704614902, + 0.07275696573139923, + 0.4259768623069238, + 0.2663387218795845, + 0.3309015007373268, + 0.29131931274653, + 0.1641298037630171, + 0.45313613821292087, + 0.0, + 0.5468101590366435, + 0.03885033352272249, + 0.014682307405730732, + 0.20465394690886418, + 0.3788131596453064, + 0.3156124416610675, + 0.4064017137229581, + 0.13320891480642583, + 0.3913338649487226, + 0.4314405963376722, + 0.6903529909013097, + 0.09247118229082008, + 0.04806840012500846, + 0.5594413968754564, + 0.2353724090089958, + 0.0, + 0.2343122063662042, + 0.9234544523605029, + 0.2809428056808747, + 0.011121822389134506, + 0.7434425656523862, + 0.8613248812796934, + 0.2139931416625478, + 0.9643690182833452, + 0.2813398172834911, + 0.2200091918037541, + 0.2452186815471069, + 0.5016657872858797, + 0.6783844288347701, + 0.858348804452439, + 0.11891489402509348, + 0.0, + 0.33539736372103723, + 0.4969487731623575, + 0.8074509397361618, + 0.9012316325785938, + 0.4196558312617896, + 0.44763440335951254, + 0.6848259546038911, + 0.634136884885198, + 0.07621171165711682, + 0.045129807440502656, + 0.36993439335820266, + 0.509980783450072, + 0.1291853650875049, + 0.28800923855585947, + 0.24764934052757293, + 0.0, + 0.919287478259732, + 0.16182560634661736, + 0.5245350126500983, + 0.8355426982863444, + 0.18117937324393163, + 0.08955613627666636, + 0.8302619788711604, + 0.7940043532505432, + 0.020263648949265578, + 0.4960311791166828, + 0.9004107777857757, + 0.5228790488626198, + 0.5665763536450508, + 0.9925322581480068, + 0.6088937495733816, + 0.0, + 0.8728653989870712, + 0.8235168961141208, + 0.15179883706048358, + 0.47833468342363583, + 0.35145457494662735, + 0.7641168819602082, + 0.4262266284371995, + 0.909720976541194, + 0.8494627176571065, + 0.5390265154419922, + 0.23432046845050336, + 0.10437280174539443, + 0.3217630308251994, + 0.005047256658422206, + 0.5134141762543564, + 0.0, + 0.17320401192837975, + 0.7178280791665211, + 0.15971519514419752, + 0.6590677174117043, + 0.7587860532176893, + 0.16714435315916454, + 0.7907007477786779, + 0.1804815480459595, + 0.5906969377218659, + 0.734746260686458, + 0.4031426695590322, + 0.607489141449997, + 0.6184451103881606, + 0.4061196525138171, + 0.8332033548229756, + 0.0, + 0.3308198119618557, + 0.5993064227459448, + 0.021043748021450814, + 0.6609686676614416, + 0.29410116884979864, + 0.8542110965236954, + 0.9547401805619603, + 0.011633874310123526, + 0.5960730297829765, + 0.016845849803279966, + 0.2655051349183011, + 0.7139487121754522, + 0.710795665586663, + 0.6358658349540495, + 0.6959386671209794, + 0.0, + 0.8644909012254995, + 0.5495448275790107, + 0.578403469379529, + 0.8763824510229986, + 0.6454352542040965, + 0.8543685819997999, + 0.27453798617609904, + 0.5679052240796093, + 0.5308347775052953, + 0.5961912380671638, + 0.9880118129969047, + 0.5121047559499048, + 0.9629661365510314, + 0.9230163846118975, + 0.2628004326454837, + 0.0, + 0.5080645529580723, + 0.0967320382345006, + 0.6221482410439043, + 0.6177277586281875, + 0.2938567940382111, + 0.4375178515774526, + 0.6245938060894479, + 0.6269643730746675, + 0.5984227615483964, + 0.5407077178091976, + 0.8194489033100127, + 0.15695083473869154, + 0.5214199950935625, + 0.3786948046673424, + 0.06610917023313079, + 0.0, + 0.5312085054078496, + 0.16999199281502642, + 0.5149952176895068, + 0.10275950645159926, + 0.7562357965500408, + 0.31275882215981243, + 0.9298259173278483, + 0.622259966080545, + 0.9035452268867655, + 0.43681114369227214, + 0.515668633543935, + 0.5881692306130055, + 0.8557779203016872, + 0.7008188743354362, + 0.9713986385950066, + 0.0, + 0.6941462596409878, + 0.6201627652029469, + 0.41149386439440483, + 0.6373979923064175, + 0.9790368418515897, + 0.6516835996428301, + 0.8073846839595531, + 0.5799333193015206, + 0.3836394483807609, + 0.5290638648749636, + 0.0026904428700516014, + 0.6166695075719026, + 0.15669056370348267, + 0.05444941639851175, + 0.7553807312604052, + 0.0, + 0.26566244195149735, + 0.3429936953106305, + 0.5932536163167526, + 0.3688446020245262, + 0.24907151150529083, + 0.9666838460585926, + 0.9408452347471434, + 0.8297789466341775, + 0.07059496307526203, + 0.48193292475815286, + 0.8399828534944508, + 0.13033623050267118, + 0.21910121029494511, + 0.9034160440190593, + 0.0879741202387384, + 0.0, + 0.7954101254833267, + 0.12262456989224724, + 0.6095922010805379, + 0.19294719381954262, + 0.6444561537012113, + 0.11230121343037147, + 0.6532127870549267, + 0.5556790470157917, + 0.46853234419715717, + 0.031148844729625758, + 0.7758667036392735, + 0.039751091994222665, + 0.13700517916498, + 0.9146615609838002, + 0.5221666087514342, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9409704933185207, + 0.9827436069597478, + 0.45164956426919534, + 0.8189595632288778, + 0.048542346438529727, + 0.6319316826284354, + 0.45388046690980344, + 0.2686512463810278, + 0.2156870051567451, + 0.4083663877312853, + 0.5139780751436436, + 0.5607261624052818, + 0.0024937281665405875, + 0.2395434350054132, + 0.8888370148444098, + 0.0, + 0.2833115811726783, + 0.09470769004153046, + 0.5963728605327647, + 0.18382234387008245, + 0.2888982619923376, + 0.9707105907899966, + 0.4539194661134508, + 0.9195041162228266, + 0.2926490771238668, + 0.7793867681039637, + 0.5502928401223973, + 0.889307367733486, + 0.27709067104418017, + 0.9075826751030068, + 0.9668880541327071, + 0.0, + 0.9045167578041478, + 0.31720626912526484, + 0.26977084004917873, + 0.6628640127730127, + 0.47180710264196857, + 0.9609815770709899, + 0.40014866124862314, + 0.04725788693025401, + 0.5469005206327802, + 0.5562228661290906, + 0.5044157520313133, + 0.8783441042121376, + 0.8846532385677418, + 0.6917856339346387, + 0.7126335539983967, + 0.0, + 0.5760900471997474, + 0.9110741697727067, + 0.3189883437570836, + 0.3743254528679858, + 0.23386093131576902, + 0.5465976433635854, + 0.6043331318409235, + 0.2952466483612659, + 0.330747347450309, + 0.3822967495568692, + 0.8990362508495071, + 0.16987981514433037, + 0.24117039327577128, + 0.7522929457113635, + 0.2506919945376952, + 0.0, + 0.9719022013449191, + 0.6510814009115845, + 0.7109980988942437, + 0.4294233870750591, + 0.7328907292330431, + 0.6556943612651338, + 0.225597828723479, + 0.6590764399603523, + 0.6355137745938946, + 0.4971085320115517, + 0.5854541000025035, + 0.7306983499233906, + 0.06317144943471131, + 0.7293668496953926, + 0.9150342727514221, + 0.0, + 0.5792598189492231, + 0.9201746009793418, + 0.6940538896107066, + 0.718860607198264, + 0.28708666052381304, + 0.34489441368775886, + 0.4351856621734109, + 0.7819920514006857, + 0.4368436482640442, + 0.08403051155541907, + 0.49644032080597733, + 0.295553109073168, + 0.9444785571164042, + 0.312628165514108, + 0.27972879203442225, + 0.0, + 0.5467466552441257, + 0.45192729685297983, + 0.8618778193602312, + 0.7794594699501223, + 0.942068575357319, + 0.8425690301930913, + 0.44880879282845876, + 0.6313868952095871, + 0.045860876537570094, + 0.821585949940805, + 0.5813122434394614, + 0.7330893824590752, + 0.01782358288895658, + 0.6842945616639666, + 0.21309296620220664, + 0.0, + 0.25140995301779245, + 0.31604985037330313, + 0.18616680988497625, + 0.8846596193917995, + 0.2528999002942254, + 0.09133822774162237, + 0.2578390693709248, + 0.5364926500722277, + 0.9147336985253203, + 0.9483212988792594, + 0.9111986590276077, + 0.890747713179764, + 0.833015659889326, + 0.5786553960093945, + 0.8771565592890141, + 0.0, + 0.6411448581159954, + 0.8707027432369983, + 0.8564126272186688, + 0.6164108991590378, + 0.5550508497487092, + 0.6387195882728491, + 0.23957843245223354, + 0.9696926718037973, + 0.25308651212453315, + 0.16471906339727194, + 0.5343968402454335, + 0.8881132017490202, + 0.7860226400883344, + 0.2152607906976356, + 0.8248181835344995, + 0.0, + 0.27451562135352914, + 0.72878744848115, + 0.7797000975797941, + 0.9745909379214955, + 0.20540926899740464, + 0.944491561830992, + 0.853515288696181, + 0.8751826183544384, + 0.2033009806567786, + 0.06567953209136246, + 0.9235396916652409, + 0.7878211800011237, + 0.9177800436549498, + 0.28125926214922903, + 0.407903336578901, + 0.0, + 0.9548391114972958, + 0.1674506865019706, + 0.37143021038381285, + 0.32485647771425064, + 0.32415808102642585, + 0.1308772464793011, + 0.4212805829280021, + 0.6186824179206017, + 0.35338901995350347, + 0.23541673198341673, + 0.6256679401453966, + 0.3325455186050863, + 0.3291753575571019, + 0.01839101463473014, + 0.6261919774559914, + 0.0, + 0.7381852239091693, + 0.45124168255471575, + 0.229715430762102, + 0.0892726774993946, + 0.8205997779310548, + 0.31896100380010384, + 0.4476360345947321, + 0.374082748560687, + 0.7763577947920451, + 0.6159537228461544, + 0.5131644608523405, + 0.7603864278246477, + 0.6517761560249987, + 0.07841619323189297, + 0.15448830170454542, + 0.0, + 0.3755030344179414, + 0.12119165773765295, + 0.6027495229517916, + 0.40093259911851264, + 0.49427223065769976, + 0.8624981667877906, + 0.957532796167945, + 0.8772222262522045, + 0.26157731245166527, + 0.6586280315124442, + 0.2879634268912872, + 0.2019933710592563, + 0.5284468204973548, + 0.5016077336291183, + 0.650342581448249, + 0.0, + 0.7537944541084961, + 0.10933983998602648, + 0.7663663401195722, + 0.42254373506295606, + 0.10172574748059948, + 0.045372401645406346, + 0.6760695251722348, + 0.6879391730366049, + 0.7724883314221036, + 0.624855040940067, + 0.833651860040737, + 0.03616447612692608, + 0.07208944509965698, + 0.9380737405885754, + 0.5783912430117581, + 0.0, + 0.773950909057429, + 0.1759277354431209, + 0.41825418709262685, + 0.39393028080136006, + 0.6182015066099117, + 0.4416695860497346, + 0.3486926036671979, + 0.14362805694523428, + 0.27765832037469307, + 0.01975335660659383, + 0.3822293767593232, + 0.6455001733547748, + 0.4570602303084288, + 0.43171108906984546, + 0.20117339528293765, + 0.0, + 0.6967149418204718, + 0.9677755419491193, + 0.9270367121338873, + 0.39651688472188695, + 0.0168747916309564, + 0.3647535046031083, + 0.9016116830012303, + 0.10258900417390948, + 0.47797931448220154, + 0.1321824391227766, + 0.23023981839485697, + 0.7481126632793694, + 0.7270712505338848, + 0.9515955951974239, + 0.41829198319090544, + 0.0, + 0.38503186216926355, + 0.7543396908321365, + 0.7161191538935998, + 0.549841988115412, + 0.8205573048608202, + 0.8898241301253859, + 0.2401099429918745, + 0.030624092999827712, + 0.009621719218537161, + 0.039616998101646095, + 0.01963008595705018, + 0.15369079903344451, + 0.45622318075015034, + 0.8566606561807202, + 0.8893917877678217, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2788621905645847, + 0.8319669455323974, + 0.0758397585797681, + 0.2212019110594996, + 0.29900534660025635, + 0.06871267935554981, + 0.06899821073598944, + 0.6808591321990167, + 0.44575700214939706, + 0.1316170583136489, + 0.38195995456137477, + 0.5674839790642817, + 0.7534681094535048, + 0.686297717967469, + 0.105519785610387, + 0.0, + 0.3914488177801225, + 0.045404790528550176, + 0.6930235580606103, + 0.24169548217584247, + 0.7672168466547319, + 0.22994612358090216, + 0.44771198237155385, + 0.6394051607664051, + 0.5526817563552453, + 0.5823003593699922, + 0.0691827837637794, + 0.02344187504490547, + 0.8283094023019764, + 0.9835994822435665, + 0.9590906670885571, + 0.0, + 0.4441622838740177, + 0.580960851572881, + 0.45153892421127473, + 0.26079051122940855, + 0.84861326189263, + 0.8611639658986988, + 0.005371773064485774, + 0.01704922266251019, + 0.6618782430579868, + 0.460407132676545, + 0.9134980286002937, + 0.17805704916393394, + 0.9421112964950215, + 0.013082294064716171, + 0.1698198206083581, + 0.0, + 0.3975522239332363, + 0.07406350954282426, + 0.1030568165615815, + 0.9374776597565552, + 0.9272944088782501, + 0.49144340847385815, + 0.9093452087593679, + 0.23920615856461447, + 0.2965446823628132, + 0.8791560589333144, + 0.43764211570390577, + 0.6878774899923238, + 0.8767360752235004, + 0.8116285296468564, + 0.29779532324535196, + 0.0, + 0.10151203958308108, + 0.5934457742614856, + 0.02635249935934303, + 0.3681498827701003, + 0.16223145506409542, + 0.44684021708200905, + 0.45180950894789806, + 0.6230663482527815, + 0.8055790600964612, + 0.11054163355226143, + 0.40472558273938886, + 0.6407346870532822, + 0.6120206767654984, + 0.7096322246251744, + 0.00865674447804532, + 0.0, + 0.2872526558451919, + 0.5009929494303532, + 0.11356474525522375, + 0.36593016645675425, + 0.8943722290360993, + 0.7358215307664409, + 0.29014474004643387, + 0.9563220830896147, + 0.6001601280190024, + 0.01601889212357266, + 0.20289225585693782, + 0.8976835567214221, + 0.23173060102779797, + 0.010483431427880885, + 0.2515278347754073, + 0.0, + 0.047072223921852285, + 0.5782843487222157, + 0.956785065162077, + 0.7153695047111417, + 0.544476435102529, + 0.6264614439246897, + 0.1803586098433546, + 0.17680195243892283, + 0.08176609208335828, + 0.5109227998653746, + 0.749717932738556, + 0.8581912242486646, + 0.536116262473263, + 0.46473715759504186, + 0.35768570963775315, + 0.0, + 0.8011166160957196, + 0.06197518014204395, + 0.32942240892937913, + 0.8394878414878438, + 0.3986344162704535, + 0.3521793773197275, + 0.6157669370160647, + 0.5691268559555982, + 0.5340875805234683, + 0.18800160145525535, + 0.7710480893850639, + 0.7147420250579847, + 0.8806567275119238, + 0.7148077654269936, + 0.48456086693730116, + 0.0, + 0.8947827819397268, + 0.6072480115598556, + 0.42051773530954417, + 0.34848903900547945, + 0.736050608422872, + 0.5259838826599396, + 0.07054336065417866, + 0.9763771250226043, + 0.5769152976852452, + 0.855181759250229, + 0.043343679624452625, + 0.9289837525489134, + 0.7498948503129963, + 0.1014775341655918, + 0.3742971778877998, + 0.0, + 0.5091285345006246, + 0.691645674300595, + 0.9334847493148096, + 0.6205681752907498, + 0.44562916758597004, + 0.3200192486188471, + 0.9992748069994739, + 0.35329027181968886, + 0.3574393966065852, + 0.045639242258114, + 0.14392151951956644, + 0.9021465575546013, + 0.28379005896981657, + 0.08295477317269673, + 0.5377552035844773, + 0.0, + 0.16714647062441812, + 0.792964306805697, + 0.8856804953699705, + 0.24863826138177947, + 0.97874108060887, + 0.9129249441423458, + 0.783524323965273, + 0.07411148334266782, + 0.4588611184013076, + 0.5278620767250894, + 0.15256565983791548, + 0.5402298831330304, + 0.4947139383527708, + 0.9655756730945574, + 0.5659737120801217, + 0.0, + 0.01161740394936217, + 0.11130921446911235, + 0.07235118401938379, + 0.6833346177905405, + 0.5220755753422995, + 0.34740460901493253, + 0.9825232973055474, + 0.8872823693194178, + 0.01647207891502034, + 0.4957053602944951, + 0.1740755627135041, + 0.9215598527410163, + 0.3625674014885949, + 0.4323217237293163, + 0.8648979402914188, + 0.0, + 0.8677851927552775, + 0.22321469973125518, + 0.2857655472955716, + 0.46386371877582155, + 0.812890327006206, + 0.9511217924551874, + 0.28226824269892314, + 0.09476251737661834, + 0.883738856496437, + 0.8188375924804283, + 0.24479870583915198, + 0.006885348395249791, + 0.3717905704746459, + 0.04911637174591643, + 0.3812265057675689, + 0.0, + 0.8683907047008793, + 0.46261195666380783, + 0.670721350405012, + 0.8369413784629011, + 0.5353224311088555, + 0.9789089788711053, + 0.9069653620947788, + 0.5814831721205344, + 0.8582517839074131, + 0.13666893116455414, + 0.7664993083960787, + 0.6075029851447706, + 0.5074769884182634, + 0.5286660043783241, + 0.36821735270883194, + 0.0, + 0.5746399597282609, + 0.4167717654818589, + 0.6264003127307056, + 0.937907827472411, + 0.8947180297479725, + 0.6275426783871765, + 0.3396173190736195, + 0.7948067450037246, + 0.7949042898543022, + 0.8263163568165413, + 0.28349613723639744, + 0.05750017560200649, + 0.864000373530315, + 0.8876349137207631, + 0.5906755245164386, + 0.0, + 0.15935332758897336, + 0.4693623661654466, + 0.4152414555473498, + 0.6884079949405499, + 0.27172627613556954, + 0.13488158776931714, + 0.1590718022985833, + 0.8028905964766107, + 0.2253332552510231, + 0.665256432668485, + 0.20727470677195314, + 0.4012481557474349, + 0.15632125319186785, + 0.42958961007366603, + 0.04374387850635353, + 0.0, + 0.7419322400225521, + 0.5216287161587255, + 0.8251095788437943, + 0.9398661337823254, + 0.774811246335248, + 0.479911473649974, + 0.46819822923796306, + 0.028283583878514706, + 0.7432375052162117, + 0.490099234358937, + 0.8725091905212978, + 0.6572730117075433, + 0.54998292220013, + 0.5032347190668405, + 0.10817677930568348, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17307447566422463, + 0.10210628893434026, + 0.23660487981636935, + 0.6727030989231768, + 0.41699450654228554, + 0.5995383240000067, + 0.5700642762725332, + 0.7932730618459921, + 0.110288863283171, + 0.14124453273662463, + 0.937901877493705, + 0.9442563612532328, + 0.7370442312340523, + 0.15350519160231624, + 0.10799745019824769, + 0.0, + 0.5844264531050306, + 0.15707188357566992, + 0.9949116545986527, + 0.10120733134020765, + 0.4865669590064693, + 0.17006554159583864, + 0.7443127654348148, + 0.9139261589093255, + 0.32474182772796634, + 0.13781806855612266, + 0.39240732928674804, + 0.2964556483411802, + 0.5076652965322459, + 0.8969991637787298, + 0.14868647571275295, + 0.0, + 0.6102460788643942, + 0.04983401811126997, + 0.5506433099149022, + 0.2024014903308914, + 0.5404370207465021, + 0.4500188567500252, + 0.6788684453993704, + 0.6249277454726063, + 0.1780398574926888, + 0.3871463244982992, + 0.3435443994747063, + 0.9446871229805314, + 0.5921997907956869, + 0.11805324612354262, + 0.871472641522454, + 0.0, + 0.6384107450735123, + 0.2007824227043179, + 0.04570924521407671, + 0.32761541003113825, + 0.08126119245535113, + 0.8546083242552374, + 0.003780322488710963, + 0.17267839435466326, + 0.0800403096430472, + 0.0729242172016934, + 0.49071743177539195, + 0.5847924031680212, + 0.6689319576118546, + 0.7205200298378246, + 0.35976913760282525, + 0.0, + 0.4236421041964935, + 0.5592786361172313, + 0.9003764942475971, + 0.7585651895957122, + 0.28014271080084074, + 0.4647725316995882, + 0.25299092535262335, + 0.33575983390283903, + 0.011645167911605303, + 0.374886080566739, + 0.30949514952081636, + 0.2319048366095512, + 0.8114616712850159, + 0.4047811081188647, + 0.741488519913346, + 0.0, + 0.5547317469528449, + 0.8336070619958866, + 0.027797621438328646, + 0.7948818869851003, + 0.08655465355152658, + 0.20153691718027023, + 0.2644792383676571, + 0.7054336307431514, + 0.6338284397777352, + 0.4492047565394687, + 0.4072598196835142, + 0.4000252930567315, + 0.44849335211287467, + 0.9748712604705357, + 0.7762271183246635, + 0.0, + 0.8615175421123146, + 0.5429797006592874, + 0.13487664612365857, + 0.1442756689431154, + 0.22209620032129718, + 0.23326170024172477, + 0.948567811915951, + 0.024207678399074783, + 0.8776886621864297, + 0.4212438993966362, + 0.6826201904173964, + 0.36817407810534764, + 0.5664336821543433, + 0.3794504578971697, + 0.7063705890755443, + 0.0, + 0.23974963440397923, + 0.13060086231864543, + 0.38581888902530403, + 0.3805413570308248, + 0.016526796049303738, + 0.01315890383369045, + 0.2090744359258352, + 0.7264626456186376, + 0.1688681371321733, + 0.02997305868390787, + 0.6685230997408758, + 0.14386721654135726, + 0.33726991578102317, + 0.045459544182733236, + 0.30317936565978376, + 0.0, + 0.39628485951507675, + 0.9953409475457488, + 0.7275677426908201, + 0.8280856211848014, + 0.4405947919807255, + 0.39426325071597734, + 0.7290713857990018, + 0.12050534662922807, + 0.046517463374585954, + 0.016675600776181465, + 0.18617974088731515, + 0.6320054377004065, + 0.4235335515063876, + 0.03332454398590523, + 0.7059192373038433, + 0.0, + 0.15221841466534802, + 0.7337865941702455, + 0.4121782798774505, + 0.7701015624130993, + 0.12642370978659012, + 0.8998994276512082, + 0.7942608071440376, + 0.9331229768733734, + 0.9113162153995553, + 0.04174927429735631, + 0.2820163582508177, + 0.6548633698592232, + 0.28199088711534914, + 0.019225454045004087, + 0.8486260751618333, + 0.0, + 0.5276705844737218, + 0.9903626657274042, + 0.39178072032330635, + 0.35665820454334773, + 0.2656557276470374, + 0.3733966066602272, + 0.12091375932760828, + 0.8821061132382705, + 0.23703429144125077, + 0.87194850353947, + 0.34105140563939007, + 0.11336183090378926, + 0.007549178537996859, + 0.7720219445543702, + 0.7639308603763826, + 0.0, + 0.136141563615936, + 0.927612117234379, + 0.20442325666991146, + 0.5252260511892967, + 0.4216371765431598, + 0.45397134172942355, + 0.769730636537045, + 0.8081998817343296, + 0.7331009654591547, + 0.22534998061142142, + 0.874642812610883, + 0.12922950357412155, + 0.8008289508513193, + 0.27922681071674893, + 0.8836081056764479, + 0.0, + 0.6784865479125944, + 0.8731138711090393, + 0.7214291877111609, + 0.44202900613933516, + 0.6484872904510793, + 0.32964008708618775, + 0.7949207193766956, + 0.9328573201631474, + 0.7521447684080009, + 0.7568163484479314, + 0.9018946510344662, + 0.6307902481102885, + 0.9571256645811363, + 0.10481635547965562, + 0.8298237378304683, + 0.0, + 0.8506023929445711, + 0.16708201038794612, + 0.19238904353824715, + 0.5760276784213428, + 0.053557275591932685, + 0.49571074634505363, + 0.8999935458460929, + 0.46008178816604084, + 0.2935133188087231, + 0.8323620179888489, + 0.040194152331214594, + 0.9345256694809793, + 0.3377158566131442, + 0.000434512942967924, + 0.12813825639970022, + 0.0, + 0.11655444412662697, + 0.48922652293939906, + 0.4775035636116858, + 0.583230219670361, + 0.6409267111870179, + 0.6386188897861239, + 0.4012742330642166, + 0.43351623952469764, + 0.8011430676008943, + 0.23162892367450572, + 0.32325702771141485, + 0.8997198207013857, + 0.24062086517188574, + 0.16949733426377145, + 0.7300307837550417, + 0.0, + 0.30777677320995267, + 0.8395849558268025, + 0.7799245364322277, + 0.4894688038987274, + 0.9511382246852246, + 0.2659386416870321, + 0.9153266609822232, + 0.2568773496215009, + 0.4577428017678281, + 0.8091875430466221, + 0.839710522303172, + 0.7320329918401971, + 0.5686277342758833, + 0.629932924911931, + 0.7254559541642349, + 0.0, + 0.17895768937260814, + 0.6869146669603652, + 0.3731454732953863, + 0.12107035373683772, + 0.572770376231861, + 0.9466781750610079, + 0.6397417631028177, + 0.8025163449125888, + 0.6562918869427468, + 0.23063207351770032, + 0.0678988368790151, + 0.36541046796691656, + 0.5857331388221931, + 0.7328482155944245, + 0.9095614713886431, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1428959163707686, + 0.34319741897303246, + 0.3727422421907918, + 0.5252208717494835, + 0.9845034346001325, + 0.2953760640277079, + 0.7814301974934514, + 0.29897422000844576, + 0.4161568492088108, + 0.3275718595298879, + 0.057319812942231985, + 0.7507282551915342, + 0.48336989237912853, + 0.6178369406531722, + 0.8317022548599013, + 0.0, + 0.6339921472031854, + 0.20548642647631032, + 0.10182526861742758, + 0.1407489319992692, + 0.33105487401642286, + 0.2993185787300675, + 0.5980485697134961, + 0.8351506236602129, + 0.3413108285763172, + 0.9853701246412968, + 0.10539215761821208, + 0.2834354132033904, + 0.6386469382259641, + 0.25780886098437783, + 0.72014480328128, + 0.0, + 0.24114847530416583, + 0.8782572762871068, + 0.5681193950991523, + 0.058990671626808266, + 0.8655499374511749, + 0.12623281415807586, + 0.9179087382817581, + 0.29270115330017554, + 0.4843833171679538, + 0.8969222326087619, + 0.036110763024358006, + 0.9125049537329575, + 0.5057189401955511, + 0.5910888667494147, + 0.3268334821329065, + 0.0, + 0.9755652293874791, + 0.15574830215294055, + 0.251066759796305, + 0.34013301543992536, + 0.8544239070898935, + 0.7506250383360479, + 0.9845216591232598, + 0.4449066024306001, + 0.3801591528162508, + 0.46992130379473795, + 0.16866034623074722, + 0.7593610415456928, + 0.5765054866954764, + 0.4499852761266794, + 0.689614323347893, + 0.0, + 0.023106982236168583, + 0.5382255756368998, + 0.013569741068614083, + 0.9731289628278863, + 0.57413683044571, + 0.18482695590484688, + 0.9396507176346909, + 0.46302166471328976, + 0.3823047602372387, + 0.9603905040096754, + 0.11788876761766842, + 0.20340306536127473, + 0.4149013365809332, + 0.72009293077925, + 0.9101996879909243, + 0.0, + 0.8836712696950489, + 0.32497288134413016, + 0.04004311080794554, + 0.7705931645860948, + 0.3380575581662082, + 0.17104463062821762, + 0.23020924351443262, + 0.05370052324325125, + 0.7455349344165368, + 0.9192463862654321, + 0.8283608572981356, + 0.29049318421714176, + 0.9873636008237946, + 0.1598971162689926, + 0.39538436184127157, + 0.0, + 0.027038421058647844, + 0.8116635556373696, + 0.9316226845042491, + 0.6224078624714754, + 0.5377010359484087, + 0.4279704931302868, + 0.7480058599501489, + 0.7152868385523025, + 0.06949361723777969, + 0.33466110765337265, + 0.8775259545733388, + 0.8458231511971182, + 0.5684859039812322, + 0.6479608898741379, + 0.20698520808473286, + 0.0, + 0.8713871987342261, + 0.6372243583386563, + 0.8664594133059137, + 0.45826390204466283, + 0.6826530402002303, + 0.0799940492055824, + 0.0011330448786951441, + 0.8136843846391464, + 0.7628864600491926, + 0.7776245457047064, + 0.8014154537878861, + 0.8302819774808493, + 0.9571412460189319, + 0.22729351466546266, + 0.21159551944883026, + 0.0, + 0.7772407939204838, + 0.4467042048560078, + 0.2339354171871474, + 0.9221610180122292, + 0.5875594757324359, + 0.7701252234699344, + 0.0046096641455557474, + 0.8210264126117206, + 0.09201575395197314, + 0.18638887198050236, + 0.7419159296260555, + 0.26830464752428185, + 0.22442915635478777, + 0.9735765524556603, + 0.584846911953614, + 0.0, + 0.34646306689748596, + 0.7280374952597318, + 0.11150509179526491, + 0.4225145427554562, + 0.2927459748477902, + 0.8081299620559663, + 0.7735665725912586, + 0.21877382547663937, + 0.645571794302345, + 0.9690416628930539, + 0.7456187688390713, + 0.6910863853575198, + 0.923493062213706, + 0.13965129561398604, + 0.007227759093389796, + 0.0, + 0.2905443263753189, + 0.22160307238834565, + 0.7332171955958712, + 0.9198728207304735, + 0.9417966921400932, + 0.7269755773571035, + 0.6432606390369184, + 0.11208115710885536, + 0.23280042992790895, + 0.3575153823389049, + 0.07930350328202262, + 0.9956876194528074, + 0.2400214084191451, + 0.1519678427188984, + 0.5453855741313447, + 0.0, + 0.5407090607875752, + 0.2190509610918746, + 0.49434568321462413, + 0.8118347080823493, + 0.3901902896785713, + 0.514467815794025, + 0.4386967187852443, + 0.7998449847648398, + 0.26704532460157304, + 0.15478016928451932, + 0.3840371891358707, + 0.0886959703062784, + 0.2091332653671102, + 0.449518001300781, + 0.10146617670891556, + 0.0, + 0.2791811783845979, + 0.9459663903369686, + 0.7982521258185399, + 0.291484360175839, + 0.7310284017994487, + 0.30809890321987676, + 0.48677575784463667, + 0.15979457828514754, + 0.146336915880654, + 0.05520342499487929, + 0.05464653075302317, + 0.8404527350275506, + 0.11112181120107112, + 0.17229636020605565, + 0.06756255237648279, + 0.0, + 0.7526523910602777, + 0.6281099149580407, + 0.3606415234391479, + 0.2865428696480544, + 0.2838853644639475, + 0.4527408474341541, + 0.5838145872459536, + 0.48688077978015065, + 0.6253370924215746, + 0.6886604559307861, + 0.9006398914089212, + 0.8217884465206378, + 0.1021345195830975, + 0.008119853591463388, + 0.24725119102613125, + 0.0, + 0.08764761947537547, + 0.012340116827947334, + 0.27220540560301354, + 0.09655425275122342, + 0.9184464023096507, + 0.27541904620018776, + 0.09881771510389414, + 0.46463538069874466, + 0.9158629477123741, + 0.4241453773399779, + 0.4203484487246204, + 0.013166154478791658, + 0.5461268527166426, + 0.6335660711568952, + 0.5974449670534565, + 0.0, + 0.24422355910093008, + 0.7763897107852057, + 0.798142081119436, + 0.422279320312697, + 0.34909499138429856, + 0.19990082861971126, + 0.041301940571428886, + 0.0615781355518612, + 0.8395269715898029, + 0.05495041303210657, + 0.661881922716427, + 0.6585489583220985, + 0.4744751575526459, + 0.8302021828977706, + 0.9929469938227945, + 0.0, + 0.5203970954018187, + 0.8449370661524639, + 0.7946629244712562, + 0.09107401964028494, + 0.07683950365371317, + 0.41790227501758537, + 0.5763220059146738, + 0.501659665927335, + 0.3681238982329388, + 0.239337041322513, + 0.38475453366815215, + 0.6882838912079647, + 0.5686887860771009, + 0.5949531430069489, + 0.3410887446172445, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5409058382220656, + 0.26064327745977856, + 0.6237386689865467, + 0.7731211548208213, + 0.371751750625257, + 0.14222191263329886, + 0.08703644719297532, + 0.47796310052161595, + 0.34562032709873225, + 0.8524075649022169, + 0.2780018830917972, + 0.9658834893244377, + 0.2625382090756866, + 0.9794661991030524, + 0.13283260317442536, + 0.0, + 0.027264003005508686, + 0.8704327821118215, + 0.7587009748225302, + 0.21712976824044317, + 0.7429369572881734, + 0.17393159484677978, + 0.9459097689718978, + 0.9083549955168818, + 0.2805892361107015, + 0.354268574182949, + 0.28516085841456607, + 0.5662946488122427, + 0.647632816673685, + 0.9389635184510942, + 0.8351977892958619, + 0.0, + 0.30051518591984805, + 0.3201168864152234, + 0.9015715876236244, + 0.024687670663182115, + 0.5347933116349726, + 0.15660698078116086, + 0.3369634546367667, + 0.36024594603031235, + 0.35612958329179345, + 0.3637421583897654, + 0.16069473469090123, + 0.028272242973204853, + 0.6581783282449519, + 0.551370359461987, + 0.8838429412810814, + 0.0, + 0.19327677378353925, + 0.16496333366469929, + 0.27021373646647473, + 0.8406159199052989, + 0.47650715873622274, + 0.7795824294036924, + 0.25872130114257286, + 0.057817385357112006, + 0.555267626168172, + 0.1743611165086295, + 0.9892740025423785, + 0.4411772837741379, + 0.9590296395284202, + 0.4078593762317797, + 0.6197226585027977, + 0.0, + 0.48308256924131066, + 0.5375196384703764, + 0.2566137507266304, + 0.562703423536082, + 0.5672484307645792, + 0.7661008988539012, + 0.45320150271340875, + 0.9713430412884677, + 0.7468050858074814, + 0.603944892304619, + 0.8795785103654621, + 0.12062550495013824, + 0.5443111982046303, + 0.19646536493085176, + 0.03692634038821818, + 0.0, + 0.5329224371023705, + 0.8797460539654156, + 0.8727492048177043, + 0.051695957356858835, + 0.6656825714716701, + 0.7876461146220163, + 0.38622658848873337, + 0.900136510102264, + 0.6391395164788279, + 0.9009402766942838, + 0.5689480104305605, + 0.6073570264978326, + 0.4650596113912554, + 0.5247929373445805, + 0.9940592334107295, + 0.0, + 0.6856836014807042, + 0.339102401859497, + 0.9284561220489054, + 0.6526117391926302, + 0.11046956750839054, + 0.2202710861878927, + 0.04016849947250056, + 0.7206965344196591, + 0.9063192828456266, + 0.4323359407198708, + 0.30681096946499076, + 0.8653411787427259, + 0.8033217839455645, + 0.9967210254640664, + 0.6504233123061155, + 0.0, + 0.34532053852101063, + 0.2913567334971431, + 0.609704956666533, + 0.5055139243140477, + 0.12141326751228354, + 0.27959431918054156, + 0.29286431266092783, + 0.9247091165490395, + 0.9307473621062423, + 0.20021436065605147, + 0.4262876046960352, + 0.4151393341516898, + 0.6061443765014596, + 0.6600717068513265, + 0.36915777147398576, + 0.0, + 0.544976349363945, + 0.08678033642586569, + 0.11143923073027706, + 0.31123769846007554, + 0.9554925536752018, + 0.8203033059292821, + 0.8437133274751509, + 0.026840409778698948, + 0.07541107212581022, + 0.6560228515508436, + 0.7072446420453272, + 0.5808638161379612, + 0.6114861481980656, + 0.4532515547842052, + 0.21294932033510716, + 0.0, + 0.9343521824302912, + 0.6035609583574784, + 0.2919740932747368, + 0.07106570646255728, + 0.92925089506823, + 0.6215820211300894, + 0.22415129384088184, + 0.22712160695682926, + 0.8195407479955955, + 0.5010804319385781, + 0.28453323125146823, + 0.6138833085969274, + 0.7123998075864238, + 0.5584347001743613, + 0.8883579550780574, + 0.0, + 0.3386744391906218, + 0.035298194962102625, + 0.9593713052664207, + 0.4246315906129178, + 0.5594898611548063, + 0.5789031170672604, + 0.2547572980121141, + 0.48341813866074834, + 0.6032033762053018, + 0.0511287010591398, + 0.6954354518265083, + 0.8977395746882985, + 0.49441569633943294, + 0.42540865720158294, + 0.9434669752732507, + 0.0, + 0.7050182640777745, + 0.9497078523551619, + 0.08706421151979982, + 0.26882139992693865, + 0.3515377708700119, + 0.27151517003644954, + 0.3576928660408618, + 0.5254416902917948, + 0.20916340967915914, + 0.19073657771073915, + 0.954168045398346, + 0.5232019771811524, + 0.38497775426200576, + 0.41214267127589577, + 0.7871484874698491, + 0.0, + 0.6570906755288711, + 0.15212428578974713, + 0.4689548882768366, + 0.5169886368595682, + 0.5413411836147314, + 0.6906603418178786, + 0.29356840772261694, + 0.10626717225760729, + 0.504131622811286, + 0.1714095928292927, + 0.253042541373075, + 0.62177229006521, + 0.14824853791205372, + 0.24027570208833993, + 0.6015894352197685, + 0.0, + 0.9728233675392415, + 0.46418539444849904, + 0.7440157979638651, + 0.4083536079157265, + 0.8922976074326969, + 0.4943221721184726, + 0.33334492368819824, + 0.5747932773077489, + 0.8120520444453977, + 0.6531642502559213, + 0.8288202709391087, + 0.31740316568618476, + 0.35778646431847727, + 0.9454412652249871, + 0.13503309950676634, + 0.0, + 0.8193336605123857, + 0.11119042200562312, + 0.450099020479404, + 0.7847214891875106, + 0.13988354060716401, + 0.7173841503152356, + 0.6630914536733648, + 0.7623622526539375, + 0.7617576730955915, + 0.8658555927642224, + 0.5831778759655847, + 0.995086701350316, + 0.975353509593829, + 0.5545422420140109, + 0.08116878671607486, + 0.0, + 0.46803897396370153, + 0.7899696721033281, + 0.19335035809894263, + 0.8283238545828828, + 0.8515925083639553, + 0.023808502086472783, + 0.35087933227820634, + 0.8451469833435137, + 0.5418266749901783, + 0.9302590389088694, + 0.10389955201508994, + 0.6388102279112748, + 0.6343500156066957, + 0.6420106406501698, + 0.764054517679632, + 0.0, + 0.20081428812230717, + 0.6828857566949451, + 0.42437938271556663, + 0.07279876409050867, + 0.6479305574066975, + 0.7624456490986126, + 0.9537661948067595, + 0.8350934497031639, + 0.037292689143810454, + 0.05112151871975479, + 0.37122076741701515, + 0.7711053581649382, + 0.6197857186803352, + 0.6864740877504173, + 0.2380319816390839, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5183593602300609, + 0.6242011279883033, + 0.32569915532223015, + 0.13683175689916338, + 0.8549166605018542, + 0.772017425704287, + 0.3740116130874709, + 0.9439699149880643, + 0.3244473068897339, + 0.2018225048367598, + 0.674313300247574, + 0.7569759738807702, + 0.011821732492649617, + 0.6832153824587172, + 0.28089396295739555, + 0.0, + 0.5203793454636297, + 0.8581198613701222, + 0.26708860374479326, + 0.6220570882647428, + 0.7265730836727542, + 0.6200108675153606, + 0.7365411758755057, + 0.5761269443602227, + 0.9780906882547484, + 0.1337137105244105, + 0.4266327188633878, + 0.6552797366311471, + 0.8268517559537538, + 0.8270776312910332, + 0.9103089717088566, + 0.0, + 0.8799487648515179, + 0.6447245787246012, + 0.9409673661998634, + 0.8227051604417782, + 0.40789339651046574, + 0.21501592799343128, + 0.4867300821623828, + 0.5081222037332938, + 0.9587394945314034, + 0.8857683318672712, + 0.8670071039488126, + 0.24926754799169282, + 0.5105842168972987, + 0.0636306990727582, + 0.18246126378534233, + 0.0, + 0.16657995091485, + 0.3183705513105286, + 0.2873602299039645, + 0.7492839118460441, + 0.7249746280900482, + 0.2614347922741589, + 0.6330476804291267, + 0.21711521074809348, + 0.9537231450582898, + 0.8054379291434759, + 0.8387597582464331, + 0.5928000639712934, + 0.33516198589398927, + 0.2373804021632232, + 0.6979957948358568, + 0.0, + 0.4036231641028515, + 0.4032157204911123, + 0.40259979725169626, + 0.15424023139548493, + 0.5581752403898145, + 0.1738693992655882, + 0.027842806433470257, + 0.5670322913623921, + 0.6140243181385397, + 0.6004074484706452, + 0.7822659087928389, + 0.9667785325782007, + 0.5899298184564171, + 0.16236494524331746, + 0.9626679561442861, + 0.0, + 0.964399823242302, + 0.29663092205552966, + 0.9192193500044818, + 0.45767420207236686, + 0.3622653866890835, + 0.8834673402962447, + 0.8733502676473507, + 0.7202401713375974, + 0.2697266620196902, + 0.15353250661433127, + 0.1272752399285857, + 0.8919781153413793, + 0.795154910460929, + 0.2433854540049678, + 0.5933988483122893, + 0.0, + 0.8111812248865525, + 0.3947702156702956, + 0.7890490241768431, + 0.9934311827779357, + 0.8002956394633721, + 0.8792110943736163, + 0.14153261130904082, + 0.3179667535946573, + 0.39201854990125595, + 0.168690189073049, + 0.48134478777549794, + 0.3736505544359441, + 0.7207692014518258, + 0.5862625046816455, + 0.6454616564670834, + 0.0, + 0.01467926489272231, + 0.047188214883748136, + 0.8643163981007268, + 0.35225351006597916, + 0.1738729983399271, + 0.19761474295139136, + 0.11397632556039095, + 0.8051542386094057, + 0.5231389048691316, + 0.16397199559366327, + 0.7457498243240924, + 0.2417298957019205, + 0.14129066807403834, + 0.736670366105544, + 0.07408469538304985, + 0.0, + 0.2306706727643011, + 0.2719413967526857, + 0.8662056209294271, + 0.7452851826741589, + 0.4875114572766778, + 0.7002120069630986, + 0.9528131530354661, + 0.46516466439551873, + 0.8136337980643848, + 0.6213452509644681, + 0.28981426825312817, + 0.1029787726217739, + 0.6298022872720083, + 0.5361004674386493, + 0.38637634097511475, + 0.0, + 0.3255465227081581, + 0.7536961258543396, + 0.9668877810883133, + 0.6443321962903846, + 0.0902889111839571, + 0.1739704138755046, + 0.22527981430519306, + 0.09842482951586506, + 0.0850422709244194, + 0.6693649854283488, + 0.43548752001979074, + 0.7693498285943238, + 0.8271357939963007, + 0.9151111071436309, + 0.37503208580391456, + 0.0, + 0.9081205442121948, + 0.524813765879986, + 0.10241243397612598, + 0.28929999051164557, + 0.8932680697470176, + 0.5912730607588347, + 0.7741138011050145, + 0.1465489941076854, + 0.07532927925196109, + 0.736860992143695, + 0.11104971195587443, + 0.4223776492079895, + 0.7990800793357093, + 0.16801040188956518, + 0.5278925431047583, + 0.0, + 0.740168423589265, + 0.8519165957545065, + 0.35671955259898025, + 0.021296368752716455, + 0.04746249838464589, + 0.45823718255184787, + 0.7902605589928353, + 0.7183202889810175, + 0.48678126682434475, + 0.6155947439024998, + 0.25973998325194847, + 0.4279699300119745, + 0.6672363925703474, + 0.5326758874470737, + 0.3704634737497141, + 0.0, + 0.5206164155771656, + 0.8459584043208143, + 0.4073355559796621, + 0.2641883880152509, + 0.325118829099289, + 0.8185858110520337, + 0.33901726435506596, + 0.4169045010747703, + 0.19813594600550655, + 0.45084083387040386, + 0.4519151499337386, + 0.29918287690703327, + 0.09800901010960317, + 0.7057560949622691, + 0.8294992246898419, + 0.0, + 0.0007816594643210717, + 0.08623103774330587, + 0.709097356688389, + 0.7532401820884128, + 0.5700663279080852, + 0.41396766909159144, + 0.5985268666493162, + 0.43817156837719795, + 0.2689176471525758, + 0.07136536459018394, + 0.18570873106510322, + 0.5041061412134665, + 0.4079606230176146, + 0.8670419050715236, + 0.6122870882835016, + 0.0, + 0.683115943322798, + 0.5041566330672386, + 0.8057442223850726, + 0.3674268356992164, + 0.39982618378512635, + 0.8803109226673347, + 0.8516311115854588, + 0.5658707451344761, + 0.6058892678764306, + 0.6019964084128929, + 0.11303582169867699, + 0.8629733346170192, + 0.6729821956263352, + 0.18457121777842234, + 0.5889782624116725, + 0.0, + 0.6249174479831543, + 0.8420355495482482, + 0.2667058011628922, + 0.3872399882234452, + 0.744196651206333, + 0.0759634068093169, + 0.07764893163844522, + 0.9841541220244447, + 0.886221161708688, + 0.6919561856409583, + 0.8657498274959576, + 0.8219128899172709, + 0.5890368667325236, + 0.826885272168958, + 0.2862319274790316, + 0.0, + 0.14409662943894463, + 0.4328374837170327, + 0.18096279993942266, + 0.879930869709762, + 0.27498739781393966, + 0.2584578614695483, + 0.374983715147429, + 0.4521978180500349, + 0.5595282565509634, + 0.10413862217829029, + 0.6641771250120069, + 0.07148858957180404, + 0.44426078653741263, + 0.14025741669252112, + 0.9663994445996943, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.14922932875338413, + 0.6617511993734612, + 0.0972216513194819, + 0.8167118055325171, + 0.9724117989276255, + 0.7300781287932298, + 0.36980752793090976, + 0.8500618632153892, + 0.1419609313043313, + 0.5129930262435822, + 0.7283075584013111, + 0.2134377378630149, + 0.008821865522855132, + 0.7856924217542165, + 0.7624936786649742, + 0.0, + 0.11819412731842582, + 0.7766537494564577, + 0.046122603387791616, + 0.6955110843639645, + 0.014324409285141448, + 0.8624198654862665, + 0.8780337092812143, + 0.004777046246910555, + 0.6717675252283625, + 0.2801076430455637, + 0.4942848217319723, + 0.5179056795620993, + 0.5910419735717404, + 0.840744681055929, + 0.45375575795993883, + 0.0, + 0.5922593822618438, + 0.6132936614795171, + 0.4109748714090409, + 0.6656860237845064, + 0.7265996119632014, + 0.6825077720996533, + 0.9611177839863301, + 0.865548458155214, + 0.22011909568754273, + 0.016919109647039354, + 0.676818169427821, + 0.026870718289451645, + 0.8034268043946365, + 0.8258693688550751, + 0.5997061385711707, + 0.0, + 0.2662663597300604, + 0.9520748539513224, + 0.3734415097137087, + 0.7981045764809439, + 0.6739076123487858, + 0.31626709553647614, + 0.6660564374520576, + 0.2954324387556416, + 0.022506453413775107, + 0.9840595398672755, + 0.3431240103317088, + 0.2060279428229742, + 0.21207086405040854, + 0.34880246506606827, + 0.8781121016833955, + 0.0, + 0.9776200228263446, + 0.4758149512203128, + 0.8488622185109916, + 0.11969361031982428, + 0.6318646676694728, + 0.03355759344599485, + 0.3676404135378726, + 0.8186291190531935, + 0.35301334217355207, + 0.9612463832766082, + 0.6416195251069333, + 0.548040927732119, + 0.3809877878102963, + 0.8981483330588393, + 0.5608055002062007, + 0.0, + 0.19329634942663665, + 0.2963528251071592, + 0.9080783886085111, + 0.0982143563938741, + 0.3559405609299813, + 0.7682065513833087, + 0.051104120594660074, + 0.32860401997993804, + 0.08547787887044911, + 0.5813372900534557, + 0.26089749754585867, + 0.5019802625919461, + 0.20305500980644842, + 0.10332679504730224, + 0.48716246828830867, + 0.0, + 0.7675262420159139, + 0.8957295163189739, + 0.2906220507430505, + 0.510797159559359, + 0.4661645469952733, + 0.5806823641299691, + 0.8057757905385002, + 0.6724341123120896, + 0.26128187412583626, + 0.24430423513000654, + 0.9315098744214174, + 0.5546848746918437, + 0.7195135069803356, + 0.31159722704092885, + 0.9290986568027505, + 0.0, + 0.18728423744397238, + 0.47038734096149415, + 0.4456668922096847, + 0.5335903792447684, + 0.1328989515221285, + 0.11399674026618589, + 0.25299132502119037, + 0.13174070823702977, + 0.049425215656540455, + 0.7789854000542915, + 0.9814873887157626, + 0.6627020144126606, + 0.13782188018655184, + 0.2976526019284299, + 0.8587574054485324, + 0.0, + 0.9556381028694262, + 0.5347428795661616, + 0.10672530411559611, + 0.5893565778166424, + 0.4729014463192641, + 0.3622356198928768, + 0.42921784666683027, + 0.9733610815737049, + 0.07990308912872868, + 0.10473015091550497, + 0.27753530999363785, + 0.7898117029914349, + 0.2348142041066399, + 0.03615468174509062, + 0.601149904423681, + 0.0, + 0.1508380628216479, + 0.7229404776832763, + 0.7219238131733818, + 0.9583203097730215, + 0.49410869144419534, + 0.6890255321685174, + 0.3340232744157904, + 0.5184164486823317, + 0.1604461477236364, + 0.30758985881518375, + 0.22006786595743133, + 0.10114668177627817, + 0.34369662341954443, + 0.20532316160658248, + 0.6271793648207649, + 0.0, + 0.25100251671942586, + 0.30329048026796657, + 0.7712303177556455, + 0.24051468992665348, + 0.8143454823684415, + 0.10557057735785325, + 0.1016803260245307, + 0.37418735343410015, + 0.9396490250068066, + 0.5345993954931548, + 0.8402643515218982, + 0.6979410329890304, + 0.9398953890117043, + 0.400784203708203, + 0.5409361240644139, + 0.0, + 0.6714140435259212, + 0.13086779950956529, + 0.9911404237013794, + 0.06835476433229037, + 0.6093260298711283, + 0.9198592525270698, + 0.18722872155163184, + 0.6173879315025659, + 0.009474634287685912, + 0.35332382901047177, + 0.764481319745085, + 0.4166445818731944, + 0.71255092747532, + 0.17494363992366446, + 0.620009224601677, + 0.0, + 0.26468793880869024, + 0.576385998206916, + 0.28595967326832383, + 0.8573444562443651, + 0.12461622957767593, + 0.7975089868038633, + 0.5344614555206574, + 0.22785046720433844, + 0.494175441387835, + 0.34805445081451014, + 0.17838081838790454, + 0.652158873340379, + 0.7603892981238097, + 0.30158552547093875, + 0.25176893165523984, + 0.0, + 0.012888260450423461, + 0.5031192572112638, + 0.4420566293816941, + 0.20345178680680154, + 0.6159678213721915, + 0.6340505141934616, + 0.801643045648305, + 0.5114095646406202, + 0.46291077951839155, + 0.4093080040507797, + 0.601023236000773, + 0.7661610713157183, + 0.4154661310361333, + 0.11111896953816602, + 0.5000574536805042, + 0.0, + 0.9419551063237229, + 0.08546569675744897, + 0.6702181006929773, + 0.6257352813989081, + 0.4710535583582489, + 0.5375031945181686, + 0.152002398026577, + 0.9649452698790851, + 0.15387428898659994, + 0.5569747742306358, + 0.5295840130217927, + 0.013600740936464795, + 0.7936818265729689, + 0.5861133805705742, + 0.6615724752761805, + 0.0, + 0.6647188764302583, + 0.6139290139099071, + 0.1487783459160792, + 0.13222658442346147, + 0.030609527071711717, + 0.46937569395097956, + 0.7197274830405708, + 0.2932465915452753, + 0.5719043938250659, + 0.2178918676147794, + 0.34613239873973844, + 0.31614555598082206, + 0.33556475347376824, + 0.46365762672070077, + 0.39392743840517497, + 0.0, + 0.4919769298794783, + 0.9532334777361637, + 0.8886058420607406, + 0.00529428268329124, + 0.3345819402654616, + 0.38945202789643985, + 0.04035717869170574, + 0.9540857456390377, + 0.3323979171193523, + 0.8980912228786734, + 0.08639984775299314, + 0.40851197800871475, + 0.22513057371336387, + 0.6361320008641982, + 0.848525420471788, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.42255885556289785, + 0.961317798903892, + 0.6873921203738307, + 0.18767082401381263, + 0.383032722416067, + 0.5619003531541614, + 0.09722243908500983, + 0.40976022381169763, + 0.8563561916384634, + 0.5818546949935752, + 0.8804225912045001, + 0.6717577793330666, + 0.15174566815098, + 0.9540592332554206, + 0.04538666171000372, + 0.0, + 0.6602948270135885, + 0.0157807641133888, + 0.5253556552531752, + 0.6052012637119696, + 0.3250525214093817, + 0.4036979011287797, + 0.5996698961307735, + 0.966139556726745, + 0.8237877387676724, + 0.9986972588534645, + 0.8713359890054578, + 0.053230410263521266, + 0.7981908675892514, + 0.33284795709302306, + 0.03245551600311003, + 0.0, + 0.6533839627526647, + 0.4719517919129781, + 0.8012894187738542, + 0.5319901699274354, + 0.9474584034891509, + 0.811538638841858, + 0.5787808033733464, + 0.8905666938310178, + 0.09383499480582291, + 0.41135942899025113, + 0.049583658631340954, + 0.38449156344428725, + 0.9288252857248814, + 0.3190165049679413, + 0.6021067420685309, + 0.0, + 0.3317974819002759, + 0.2854952552587243, + 0.6220750104373972, + 0.4359310852600138, + 0.3100026229661803, + 0.9994953892711391, + 0.5217204183697274, + 0.12032617007435498, + 0.8821162255230444, + 0.21557447472923785, + 0.461333142676972, + 0.0471516166478535, + 0.45359975411736064, + 0.5162097883698685, + 0.22800942678663183, + 0.0, + 0.4750694525995892, + 0.25206545379617573, + 0.888442717413622, + 0.44239410464979356, + 0.513923963010145, + 0.1727508046090681, + 0.7165588917721792, + 0.5586063680569328, + 0.963722603581999, + 0.1039414625139794, + 0.8573883991970475, + 0.8989913539553348, + 0.3699779432576733, + 0.6872266906081227, + 0.13754835383980213, + 0.0, + 0.8372386603443212, + 0.3908122153054925, + 0.8208059827597131, + 0.5237736419559433, + 0.8974220003154247, + 0.957246866903339, + 0.2711698301456228, + 0.8751461193267505, + 0.40533384837008046, + 0.3609153461000294, + 0.8273497455134113, + 0.4722058983651367, + 0.7731353896761294, + 0.2698316041639177, + 0.870025367204846, + 0.0, + 0.8733516669036471, + 0.7897478546617399, + 0.7756075576254196, + 0.5992656100976662, + 0.6638441571382975, + 0.29111048030765574, + 0.634951382971681, + 0.8800836348534977, + 0.7435095283420515, + 0.40970329286922735, + 0.7492467688476916, + 0.07965997745763154, + 0.41420774199620525, + 0.4267271934768603, + 0.665096511332824, + 0.0, + 0.7769429519442802, + 0.8687881204806356, + 0.11239428226956549, + 0.8657522574689676, + 0.8761301480692648, + 0.5740540319046417, + 0.8810319218994965, + 0.0029626166269742793, + 0.6815137163413589, + 0.5307699973344617, + 0.25678848040690394, + 0.8432035709964336, + 0.02228872104020785, + 0.9018147501048259, + 0.5613220687909719, + 0.0, + 0.5131899136457916, + 0.2600847389486435, + 0.8446328153518666, + 0.38786819776307235, + 0.9453765278793322, + 0.17925051541673342, + 0.5802832617413877, + 0.5955595092378418, + 0.4035740011091691, + 0.4564196140309865, + 0.37873638846192015, + 0.8324782794950882, + 0.41465227704663743, + 0.25237951358360167, + 0.9574402818826881, + 0.0, + 0.9186914493171061, + 0.08231672621371333, + 0.9724238495588396, + 0.40328619698366674, + 0.546624273026458, + 0.29532833281514037, + 0.9135617876692592, + 0.4201823500205377, + 0.9680023044214799, + 0.4903645772887063, + 0.8471733611977219, + 0.8947212471888726, + 0.7653060061362247, + 0.9673474522520681, + 0.8563491686752919, + 0.0, + 0.6129202656974769, + 0.8996158217421836, + 0.9423008441603514, + 0.44238976235002736, + 0.85207838750921, + 0.5750678863635276, + 0.8885909702945289, + 0.05907783015629442, + 0.9006052594187848, + 0.27219908745510735, + 0.13202980589322322, + 0.7469459200927738, + 0.8908977169428762, + 0.5548373652390023, + 0.9922585649322798, + 0.0, + 0.4795842008468192, + 0.9656769113553033, + 0.9479083858491435, + 0.7208756753497657, + 0.5143383921113984, + 0.7699362351165019, + 0.07029886827552778, + 0.9747299600526338, + 0.7701609780096528, + 0.18851889819465462, + 0.5712988419943197, + 0.17419155384609342, + 0.1011691828617195, + 0.4626239459993291, + 0.3922046770600376, + 0.0, + 0.06010532154027859, + 0.38695984760128954, + 0.33292400110433795, + 0.34888394788279076, + 0.3623639302685099, + 0.9466495088395406, + 0.4319423085389362, + 0.6883157984178091, + 0.8996958126037286, + 0.3458624205156834, + 0.09333652786390811, + 0.2651924617755289, + 0.6564807960586904, + 0.641197100041672, + 0.1833590489222756, + 0.0, + 0.294737816664397, + 0.5245019326552095, + 0.8522010300319922, + 0.22403409027925325, + 0.13053324444676095, + 0.29480313505154754, + 0.8898903853626756, + 0.808682619123844, + 0.6742632000806458, + 0.12862376654059382, + 0.49013826107448333, + 0.5589110879508555, + 0.5139719894186772, + 0.7376476490149778, + 0.8839103204428493, + 0.0, + 0.522742012020562, + 0.7868368475303291, + 0.19718871739413535, + 0.8972358572047573, + 0.9202768804193477, + 0.030239207752203567, + 0.5659419880903022, + 0.6759239697737995, + 0.07078770013586377, + 0.7421487287602753, + 0.13479719164736526, + 0.3511385321424483, + 0.33447179866740595, + 0.4945822751566302, + 0.6555569212336647, + 0.0, + 0.7480182527514979, + 0.9883877958236108, + 0.7243083475278599, + 0.36707639307014706, + 0.07632342250334967, + 0.9962218408618101, + 0.17772700247017748, + 0.5777679635398814, + 0.32783805750246053, + 0.9389952599404652, + 0.16427210317156737, + 0.7514510123953554, + 0.8641131762583587, + 0.45143515414896973, + 0.15339324608249016, + 0.0, + 0.8490507175550476, + 0.8216632094900955, + 0.5392876190319033, + 0.5424392141638237, + 0.6169800846527169, + 0.10974153884708437, + 0.02466331055729487, + 0.9473648921085042, + 0.3252077641220803, + 0.2242673631578419, + 0.7458477143946026, + 0.7056225704396397, + 0.3731368840868027, + 0.8310063694046905, + 0.7116679797736236, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8053486024168381, + 0.308003659447983, + 0.8995638750306256, + 0.22642144023181932, + 0.9978343851459455, + 0.21143275695656438, + 0.36622747291705593, + 0.6666396237088997, + 0.35020160607689066, + 0.4567301306014674, + 0.4305204338004601, + 0.38646108435950044, + 0.6564184302936832, + 0.45530763619178605, + 0.9120382286627429, + 0.0, + 0.1225799065903952, + 0.439697264558092, + 0.876303279788564, + 0.8122458206435015, + 0.07292616008073904, + 0.555423702169805, + 0.656344259835644, + 0.09118951619802484, + 0.9178902457292402, + 0.5238981780488828, + 0.7449751169034136, + 0.8371165165897874, + 0.9902826572306924, + 0.6132443775488258, + 0.17072886231286066, + 0.0, + 0.3830785103106693, + 0.3622433517310881, + 0.8900418947723354, + 0.7731145777875027, + 0.17569764521968534, + 0.4203525375788547, + 0.1884220582281788, + 0.5294388643160259, + 0.09245287089353127, + 0.6821351017819414, + 0.6977153555547937, + 0.17596204498400014, + 0.7858413120004434, + 0.348820222742827, + 0.31148355389917304, + 0.0, + 0.5353443888120824, + 0.7682182667316569, + 0.22939202976517903, + 0.9989769175044537, + 0.6625558888671477, + 0.9924149224407816, + 0.9539616929703971, + 0.017097645012574825, + 0.7139204990603845, + 0.4018119843053126, + 0.6957575999209226, + 0.04229303459900191, + 0.8759230636975925, + 0.5174224351082402, + 0.10631712714709618, + 0.0, + 0.6418683150777982, + 0.627708127542772, + 0.239046411083068, + 0.9596549003598565, + 0.1527051594775165, + 0.7675035167597798, + 0.2611164858601731, + 0.12381517151884414, + 0.454728551120156, + 0.413248627828918, + 0.6217465273713485, + 0.6303515881266069, + 0.8296063028246792, + 0.6785388089428329, + 0.888166481373541, + 0.0, + 0.036328296713261876, + 0.7457734070783953, + 0.5762408480674619, + 0.2894399301850562, + 0.33929059955171625, + 0.537685187349412, + 0.7632167601719415, + 0.40979418450761473, + 0.7639518329554216, + 0.7908025440454534, + 0.5380026935228748, + 0.8938158886867839, + 0.7455691493096455, + 0.20780047027581727, + 0.22411040382431047, + 0.0, + 0.9573587267816196, + 0.13412875089129972, + 0.7721650166811792, + 0.15198542143494165, + 0.6149784479853465, + 0.46887914202389724, + 0.1754475871355382, + 0.9812751084866524, + 0.13267916773134902, + 0.06436912288920948, + 0.7474986794418835, + 0.26298163432446064, + 0.9277084414907217, + 0.8930438375858712, + 0.34857984167947764, + 0.0, + 0.6570380920114276, + 0.9284364512273396, + 0.033369057715806005, + 0.8793630817948156, + 0.6825778290448736, + 0.9436490780055035, + 0.31012307192762434, + 0.5414881341152286, + 0.2120953312712095, + 0.26848888006345284, + 0.3930622251364809, + 0.9614574845148894, + 0.9154425984205277, + 0.005617921107824553, + 0.5463171321848674, + 0.0, + 0.12440565194231623, + 0.7579730733820352, + 0.6062104098965748, + 0.6221220329322654, + 0.3340698714928595, + 0.48434514567299913, + 0.545071189705105, + 0.2284718083387247, + 0.1686663528787331, + 0.126094841221167, + 0.1432469446192154, + 0.2400691667597833, + 0.5313900533292686, + 0.5406260226685579, + 0.16069308320763953, + 0.0, + 0.8218695413672762, + 0.6279467335848792, + 0.671267338107112, + 0.27324957326399, + 0.9054661714859945, + 0.13823837017830531, + 0.7431348213005746, + 0.12502942625184044, + 0.8153134329144104, + 0.23924382207850836, + 0.573748001476606, + 0.7536093041951328, + 0.7370258140027841, + 0.23155812789946095, + 0.4928769605204446, + 0.0, + 0.2046314841929372, + 0.2421788541481572, + 0.32297628636036624, + 0.9571097473497495, + 0.40171262334950775, + 0.48573873204238827, + 0.00912587596133585, + 0.20594108776593623, + 0.518917115536705, + 0.39983250992339414, + 0.24721081216827134, + 0.01864816011617698, + 0.6406814198532975, + 0.6476761701116637, + 0.3370625562457469, + 0.0, + 0.35324722386436747, + 0.36266260215119017, + 0.04578604303342626, + 0.9769270908981721, + 0.7164633241797145, + 0.4155400582038591, + 0.7969964217778731, + 0.631639294167408, + 0.8750293600438538, + 0.7667187930556872, + 0.10781756851762991, + 0.13246167554170774, + 0.8073872975561917, + 0.3782899501219493, + 0.03184601454406577, + 0.0, + 0.22643922546973982, + 0.4359939263394089, + 0.8001583859890014, + 0.7100878664458213, + 0.5671398172166763, + 0.16193809353652022, + 0.8577567986605854, + 0.47129295452204933, + 0.6757945842350678, + 0.5364152244879188, + 0.6406855656325827, + 0.2018273294368511, + 0.023207568730379324, + 0.1307966296554397, + 0.6662784355811753, + 0.0, + 0.6320703183436863, + 0.9972556643064111, + 0.2386630271598421, + 0.4016917196825689, + 0.07579511166480779, + 0.6794202239012281, + 0.293937160110448, + 0.7808651025074285, + 0.0721029214978608, + 0.41098856778442516, + 0.1839760813054665, + 0.5361103375114744, + 0.6620058452491424, + 0.39091855023851974, + 0.9199232679408939, + 0.0, + 0.3954084243144338, + 0.314750495280666, + 0.15578714591530785, + 0.2358676922121582, + 0.09576260123463154, + 0.6741790995135312, + 0.09656496544489146, + 0.3699290701317358, + 0.18744536801458878, + 0.6186542712946893, + 0.3495815321242659, + 0.08692573029619277, + 0.5420101268337991, + 0.1271348470405097, + 0.2812214301379906, + 0.0, + 0.7100007282107038, + 0.9835998703689925, + 0.07609393858141678, + 0.6302803983452228, + 0.6759715887068972, + 0.014961515013560467, + 0.32743209122525563, + 0.45312672286877176, + 0.8670947957967745, + 0.6048377008954706, + 0.6249594554243484, + 0.28481005373740376, + 0.20289153185846165, + 0.053790479334393404, + 0.8742299845363793, + 0.0, + 0.7434773142380879, + 0.8524702210945999, + 0.2740311659018093, + 0.09865377641884965, + 0.5398152288898069, + 0.17675059360026468, + 0.577227645989316, + 0.7252476462933776, + 0.849282495062534, + 0.8193512119844159, + 0.009699545497271922, + 0.44435298715707083, + 0.8280169782081197, + 0.26006369382804695, + 0.8599917887197637, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1736234087834878, + 0.9059499024551373, + 0.43885011041681843, + 0.34950875318430275, + 0.1157488629683695, + 0.6748163507463255, + 0.6436339012544571, + 0.6304320067885459, + 0.873833867499568, + 0.45059008597699746, + 0.9333238141138935, + 0.8421366296460351, + 0.860474219603467, + 0.8785300123973846, + 0.6906930408986818, + 0.0, + 0.5393796473228052, + 0.8407881345611861, + 0.1765038631934982, + 0.7944419536149493, + 0.14737706619680302, + 0.48137101384806746, + 0.0670996495310957, + 0.34289182715778943, + 0.9073807882699497, + 0.1807973828205287, + 0.5647530988949201, + 0.12173473559784498, + 0.66355408036683, + 0.24110190924457642, + 0.7910867888794373, + 0.0, + 0.26265536728871053, + 0.8826890000928408, + 0.13418004941384787, + 0.03103015289982447, + 0.5460001207003308, + 0.9932768562941524, + 0.9558734897603106, + 0.7748490420116133, + 0.4370284414089439, + 0.13735799478263178, + 0.11861556036163445, + 0.8983418220037368, + 0.1593342611284404, + 0.09336833738023242, + 0.17681976795940946, + 0.0, + 0.8044681505932153, + 0.14662158982266627, + 0.5344879011310795, + 0.663819901886251, + 0.04359929903286086, + 0.5277212945728229, + 0.2671709633095254, + 0.9715179003803728, + 0.23416893363822322, + 0.9331770402541002, + 0.37041975948475947, + 0.9847539005944105, + 0.4368471111461758, + 0.1501650200879202, + 0.6603832025665075, + 0.0, + 0.3742238333521457, + 0.7417154005143491, + 0.06264352678520546, + 0.5440610368284272, + 0.3291915021643472, + 0.2205001163545992, + 0.11798569575523787, + 0.5867344550617464, + 0.8862077713571181, + 0.0970637457215261, + 0.2187790431541211, + 0.35931904374593415, + 0.6908125639590997, + 0.5121120134513772, + 0.597343671869012, + 0.0, + 0.09074157735870425, + 0.7059785417083816, + 0.17962188510178545, + 0.9077205978060766, + 0.9622276310991649, + 0.7207701056091972, + 0.15589826132535167, + 0.02759887320657839, + 0.3016046915063275, + 0.975444936989247, + 0.261254768861501, + 0.021911562364628212, + 0.003263982697885748, + 0.1565172192657961, + 0.2685112315326177, + 0.0, + 0.7304208834979972, + 0.6601787602032277, + 0.2881220300857651, + 0.0338599876818938, + 0.08334607868973498, + 0.7371879806248237, + 0.18367828580997114, + 0.07143391918306774, + 0.05154589310311697, + 0.6180427614038362, + 0.7516787812302785, + 0.09737285522798322, + 0.6256196072631419, + 0.05983591914896336, + 0.7841639500085923, + 0.0, + 0.2492251010356593, + 0.9231778999317669, + 0.5321174661068577, + 0.595317397305231, + 0.2700362200516937, + 0.6541796200153409, + 0.8394235300059207, + 0.7282989120635159, + 0.26052790755147626, + 0.18122171980619062, + 0.9779734594463506, + 0.43807843530802404, + 0.1140600581933161, + 0.04330228211170717, + 0.18113056501032787, + 0.0, + 0.19750323759722288, + 0.9930782010538035, + 0.8388562063836482, + 0.6611606652562612, + 0.016035811126763, + 0.33919996156368915, + 0.8847391883728367, + 0.26077007616648473, + 0.23821867021640264, + 0.20862723075994383, + 0.4809702583897232, + 0.17061107408980647, + 0.22762281546787988, + 0.6922677194042267, + 0.9403421767616225, + 0.0, + 0.4406251789902105, + 0.16950541890782578, + 0.45018950039403227, + 0.7427273415715485, + 0.4821196729628865, + 0.05473421034406678, + 0.21160386512972973, + 0.22188941437196985, + 0.3840713652030233, + 0.027525887162295315, + 0.3873988667206474, + 0.5867379487483775, + 0.4103765789506256, + 0.12698107662785552, + 0.7906408165548021, + 0.0, + 0.3370383139382622, + 0.29953669622579693, + 0.7071784147575183, + 0.3584586164057667, + 0.6980546839225151, + 0.1752278601889602, + 0.7871679264663828, + 0.29649223403351266, + 0.6345272299789647, + 0.695265747241561, + 0.8506255538967493, + 0.4754343141952393, + 0.8513329942584479, + 0.6533789404881825, + 0.8356582782867027, + 0.0, + 0.8041402568981643, + 0.9896343653184838, + 0.3857731158961153, + 0.6958058152240291, + 0.7115154385462098, + 0.6368814215972155, + 0.9435575242809139, + 0.1768241902484493, + 0.47345121553514435, + 0.9030199148449366, + 0.7013074099672814, + 0.37067124685282593, + 0.5604797251636211, + 0.10908195897233752, + 0.499799237651813, + 0.0, + 0.4993766685205866, + 0.24553185853314052, + 0.65299765354335, + 0.46625913163679544, + 0.21212410319962938, + 0.000164534861047394, + 0.7466707975584421, + 0.9884976357478628, + 0.6298412121911315, + 0.09461970007579035, + 0.231629765157495, + 0.12404426147727043, + 0.9236629938531732, + 0.8459250046807176, + 0.24891013330857337, + 0.0, + 0.7293176712149692, + 0.2516361921497805, + 0.5353092818650595, + 0.4694506286495609, + 0.6082470757397771, + 0.20470292892251996, + 0.6723027744255152, + 0.7589120725428872, + 0.900720575844496, + 0.6201823347023111, + 0.4555976836579041, + 0.8807548223437347, + 0.34224444338583404, + 0.27304686771977627, + 0.07992982472178756, + 0.0, + 0.6603327243779286, + 0.9484126968310068, + 0.9564058826045736, + 0.16442715310662093, + 0.5173747933283613, + 0.5238060569581895, + 0.8149792653912143, + 0.35921758359392886, + 0.011912092687478393, + 0.9933531360254236, + 0.040475326908593345, + 0.47399156546908194, + 0.5355888331588118, + 0.8596066818181741, + 0.24723358723169098, + 0.0, + 0.5745019381499755, + 0.5500268598092636, + 0.835846495517248, + 0.23063444664650545, + 0.5696867318583311, + 0.5998454094558137, + 0.08396917536564585, + 0.7573248201999135, + 0.5336644761033456, + 0.048788337003780824, + 0.3102456451288894, + 0.08833440184390029, + 0.6074226753924673, + 0.7959159479223603, + 0.1692755837629465, + 0.0, + 0.35187136846045264, + 0.3350933898253611, + 0.8351221975443202, + 0.726253607959635, + 0.781265456010705, + 0.7421671202903873, + 0.5787572859484466, + 0.6966976938875553, + 0.6053721162111024, + 0.5231986147949365, + 0.638188398305703, + 0.3153827426581478, + 0.7354364351652688, + 0.39534113625649026, + 0.9846921754986828, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.02987565456325536, + 0.9451404892928696, + 0.6728338293514975, + 0.4026272377371113, + 0.786863539131931, + 0.9199978584243822, + 0.42358137933910056, + 0.5725730166380295, + 0.36838321852597233, + 0.2323852198302795, + 0.6443905754082856, + 0.9118380571158589, + 0.021983493509470264, + 0.2956260756989032, + 0.43840441104787087, + 0.0, + 0.02855321140679945, + 0.5073284931437386, + 0.4536554242947598, + 0.7852252113479263, + 0.9802417458748952, + 0.9338146077783052, + 0.5967211795150688, + 0.31600580567737446, + 0.4107320740414664, + 0.18373902712846812, + 0.8305942204288111, + 0.2956318045568225, + 0.1987915128764175, + 0.013523555616418714, + 0.8945907690567821, + 0.0, + 0.9620506763172254, + 0.04772666903015921, + 0.18663149621345776, + 0.502430057215938, + 0.7561873542725256, + 0.011071645951356013, + 0.46664389244277993, + 0.8302142799050533, + 0.6450559163393974, + 0.837331889710949, + 0.43459168075936105, + 0.055626985098968906, + 0.7928763181187088, + 0.4487765023838284, + 0.12664530740465962, + 0.0, + 0.5102357288822965, + 0.07648629212083402, + 0.03149768492434768, + 0.8641450883728528, + 0.5091059600388003, + 0.567428915304961, + 0.8836223147005381, + 0.7990549208535302, + 0.9134854217228497, + 0.1342083680048588, + 0.7868366644111666, + 0.8111818470196777, + 0.7418311423405076, + 0.8764700563766421, + 0.5024775223316081, + 0.0, + 0.2569583542974698, + 0.5985426498583516, + 0.38222887341125134, + 0.6760673929447647, + 0.3225431747324218, + 0.25029238632650186, + 0.3300353787445781, + 0.1318895517792641, + 0.9323584077551262, + 0.5139781386250649, + 0.9018063340069323, + 0.7271008256278614, + 0.6185047950075253, + 0.0476484032810216, + 0.787532280726247, + 0.0, + 0.1238064647738526, + 0.7298008310007555, + 0.9103263698932323, + 0.9345394974971337, + 0.7067758856994609, + 0.40054663926007217, + 0.4643866240014072, + 0.5939468853519743, + 0.8305192965778545, + 0.3827967117242834, + 0.12175136345256687, + 0.1816589392800083, + 0.5803937565006845, + 0.3877965225294895, + 0.3015331581960675, + 0.0, + 0.946518284196775, + 0.8179512269860251, + 0.9027418292698526, + 0.2752681619022208, + 0.7205296241406043, + 0.36058872696147204, + 0.899053553348829, + 0.0817973969846979, + 0.9200183752656503, + 0.962319034121119, + 0.9041836779973385, + 0.9863757504785956, + 0.6311516923718504, + 0.797357691849296, + 0.7853396143984568, + 0.0, + 0.17423005364125466, + 0.08062090595086768, + 0.7916556062564507, + 0.29768815820418637, + 0.4031590476800556, + 0.3024465142441287, + 0.11186849603436566, + 0.25156973554043116, + 0.016818545936913787, + 0.807608959635635, + 0.9055577919565967, + 0.08009181918975739, + 0.28937847627006597, + 0.4607721840924206, + 0.8238415522033353, + 0.0, + 0.44119632832759614, + 0.9137158639694065, + 0.49793518737507725, + 0.3428632219823373, + 0.04624977552084841, + 0.08212159834448995, + 0.22603496971075754, + 0.25338401962260904, + 0.4116316428681339, + 0.8009857504280683, + 0.2796205129365854, + 0.4693202950802744, + 0.5028515083451491, + 0.8763906724879901, + 0.07191737967438394, + 0.0, + 0.6350454365259237, + 0.47043025898856794, + 0.11843117221756805, + 0.6873572562937668, + 0.7822016961039511, + 0.6155249376345805, + 0.7239776333134695, + 0.5531487664049857, + 0.9597946597426368, + 0.541286455040231, + 0.5905099554167602, + 0.6598326337418338, + 0.3105371511093644, + 0.31803431422946005, + 0.25137728217148847, + 0.0, + 0.94440751923726, + 0.37618957576147793, + 0.7696893045286372, + 0.3881560685957063, + 0.1575796924171936, + 0.6301024528508102, + 0.7134784474402105, + 0.0892594631034409, + 0.2141465867102682, + 0.11647817542008043, + 0.38924790876033455, + 0.5826409837508134, + 0.9283482134289395, + 0.5219680726522239, + 0.02962017379060622, + 0.0, + 0.8959139854417814, + 0.2127189003486457, + 0.7624791773350115, + 0.17949527066878967, + 0.9503813536250644, + 0.474548798776512, + 0.94591130166259, + 0.4019714793628233, + 0.33632140703471747, + 0.9362022531415416, + 0.9099602507828936, + 0.5016008140296482, + 0.8190162002594696, + 0.8712603368722328, + 0.5217246710742118, + 0.0, + 0.776968771149434, + 0.024496577714573586, + 0.45306608434565365, + 0.6199701330612319, + 0.9398788283717338, + 0.3531810626781928, + 0.45657257486841296, + 0.9965551327953546, + 0.46712296805080367, + 0.9151175013096735, + 0.6259199182201985, + 0.38160366861251893, + 0.7601255241684549, + 0.43425223817457936, + 0.8192996661908495, + 0.0, + 0.41740130201464565, + 0.33632746353372833, + 0.31567932516837227, + 0.1107181089336623, + 0.015840572108081097, + 0.05366768618472484, + 0.16081403892106438, + 0.6262942611583128, + 0.8264304797926321, + 0.6177026683307458, + 0.7570399958640042, + 0.790376777313576, + 0.7433335599752808, + 0.7372362394726145, + 0.26229409259488545, + 0.0, + 0.7429349258289848, + 0.32194485983105214, + 0.943234850980276, + 0.12649873119156152, + 0.7197987215938938, + 0.7551061735631569, + 0.10097643820377178, + 0.36988263378306185, + 0.9415527801157786, + 0.8826871248596089, + 0.06287297702815053, + 0.5426348317590287, + 0.7131261575650724, + 0.7245741166259649, + 0.8391310166185668, + 0.0, + 0.4725000801415684, + 0.02164567763833425, + 0.43218969468566937, + 0.8637332297698778, + 0.07206012244231708, + 0.3195279939002994, + 0.8777089494075411, + 0.23436920293275532, + 0.584980428587302, + 0.039485833136840176, + 0.48034370073467747, + 0.697593879920313, + 0.6825729582791454, + 0.6484094754404262, + 0.6198069204259402, + 0.0, + 0.1210317850371001, + 0.3298107886630791, + 0.3079647519980262, + 0.5971146502479803, + 0.34207513405590584, + 0.09607581302382451, + 0.9128937994748378, + 0.13675540941440623, + 0.014191695919459235, + 0.959103181681906, + 0.9932140140262243, + 0.2549949211587845, + 0.674022368679578, + 0.5604891200923958, + 0.254493711475265, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9950030543314351, + 0.9059783705709346, + 0.2605564372445186, + 0.5033521116645547, + 0.9093036470374543, + 0.11289880306326316, + 0.09269009505777925, + 0.7600527093822825, + 0.8559090413529336, + 0.9906507345625638, + 0.42388941776413813, + 0.9038086329201231, + 0.2615606182462341, + 0.9050253476055222, + 0.07799659788964741, + 0.0, + 0.9239199878555392, + 0.5074031767896926, + 0.028612331801815993, + 0.5170391469592438, + 0.033514863634174996, + 0.021881784340196164, + 0.29636262100159927, + 0.9941005491373017, + 0.6751387265314722, + 0.07421559133882827, + 0.8407461750844862, + 0.5923664788186207, + 0.16362377391955907, + 0.46395581980774425, + 0.11616476125468578, + 0.0, + 0.9509391888329551, + 0.23949784512107586, + 0.13196706728579255, + 0.42784162421906624, + 0.3589096735314393, + 0.7343459481719553, + 0.12040022098514491, + 0.9382175644638661, + 0.9282913145034546, + 0.21954056799496124, + 0.28245270264822564, + 0.36072360134366377, + 0.2460391785032675, + 0.05228188387560795, + 0.8171304163681191, + 0.0, + 0.43371607935957757, + 0.4090861039604716, + 0.02249972770865183, + 0.4819691503234925, + 0.968736500563332, + 0.25465576439235804, + 0.9225054791443137, + 0.23454703097933838, + 0.8698949436935833, + 0.785302457185616, + 0.7574826622334812, + 0.7497512182510989, + 0.2597621109600533, + 0.2740075559813191, + 0.3968356021217494, + 0.0, + 0.8042401219281345, + 0.8027707198747399, + 0.7755843942616588, + 0.9368018891260819, + 0.9287983530571882, + 0.06474230589395158, + 0.5374934783524302, + 0.8360686771555237, + 0.5388192118347033, + 0.8034737287747157, + 0.34873932176565925, + 0.13611279636760565, + 0.9273505810782228, + 0.76312833986336, + 0.7627434471753259, + 0.0, + 0.06590487298099312, + 0.5617987006053388, + 0.9960128168685074, + 0.833066447260375, + 0.7850029007556029, + 0.8301055699827586, + 0.9565172095539238, + 0.4486011954525093, + 0.4430773099812555, + 0.7143378383439901, + 0.03184765464322359, + 0.953434543863607, + 0.9017206455951964, + 0.998201109572808, + 0.650141913944763, + 0.0, + 0.40082671558030614, + 0.7187226196220429, + 0.297409848058106, + 0.8813310463075268, + 0.8149602638141056, + 0.8150946346767723, + 0.325581240363358, + 0.2396109118703933, + 0.21568728303108498, + 0.6410500667624983, + 0.8572631297299002, + 0.4698316695035778, + 0.8408332366645039, + 0.5279030033611596, + 0.5382042778798547, + 0.0, + 0.9720391735910291, + 0.6254985417228794, + 0.45350404193205596, + 0.15230153188408768, + 0.13816081706664618, + 0.06496108861311467, + 0.9217879677685186, + 0.4148259984511603, + 0.28014007544696073, + 0.7873727703869978, + 0.3027267705853969, + 0.702907142318454, + 0.28428530243342887, + 0.28606412691046723, + 0.20753332494455634, + 0.0, + 0.1639179624678071, + 0.44788264172372383, + 0.9690431980322582, + 0.21602383479606846, + 0.21379952689204018, + 0.4328476240888399, + 0.04683064160348471, + 0.13703382153647203, + 0.2147908428579447, + 0.922896477893936, + 0.31705732907093465, + 0.018111817465786384, + 0.9068827011493362, + 0.7534742315481235, + 0.5822707320428531, + 0.0, + 0.07084751698944058, + 0.574778873547074, + 0.3669961656632227, + 0.42842407224737655, + 0.8828361362660744, + 0.8410388625230115, + 0.22664723777562046, + 0.014768682305215775, + 0.9397282674766603, + 0.621337010508387, + 0.02054386896588034, + 0.5186143747786239, + 0.8492585928070444, + 0.6520857901115255, + 0.29539406036939275, + 0.0, + 0.25561585624339833, + 0.14590000076230625, + 0.4373185858815869, + 0.5948840854878977, + 0.13472502021877197, + 0.33272714568608797, + 0.051417608226309475, + 0.670754377369674, + 0.6986668768727101, + 0.8449748767018475, + 0.8538437196504572, + 0.1262756415570917, + 0.362379647288799, + 0.7956635886122484, + 0.5252886457089591, + 0.0, + 0.5187545953694392, + 0.607028968244777, + 0.23424249313713097, + 0.8810269861584001, + 0.6329896576402828, + 0.06927435854485209, + 0.4299370825127503, + 0.9293271083699988, + 0.8519547074922509, + 0.03732651517241348, + 0.7683931395074164, + 0.3106288762655872, + 0.6131228749686409, + 0.9650200573520742, + 0.706758145774099, + 0.0, + 0.2043955571304693, + 0.9314930397649978, + 0.977475328346117, + 0.14002321561255215, + 0.44786558864086445, + 0.5244114371406022, + 0.8988396073345346, + 0.30460680464896894, + 0.8900227236978127, + 0.30042935417259764, + 0.4928300458363979, + 0.5909147810087665, + 0.6560283056307786, + 0.6529826920994365, + 0.21245689034478643, + 0.0, + 0.25823959884300207, + 0.8702997215329452, + 0.09858678102585494, + 0.1071330426625392, + 0.15789743374420173, + 0.2386755729321265, + 0.331828173555746, + 0.3342485620198905, + 0.5724301560051253, + 0.9867906045964168, + 0.13607794370091286, + 0.08583321246739573, + 0.4972894478861094, + 0.1530775704879982, + 0.20225922331967494, + 0.0, + 0.26323703471860227, + 0.10084431602237554, + 0.5426556964129738, + 0.5289024134660352, + 0.1709448380594758, + 0.25437121516465033, + 0.4477802057415682, + 0.17038593322334694, + 0.22726472011548227, + 0.20977519205133544, + 0.8194258357427311, + 0.13213304451745556, + 0.47882233490179127, + 0.7221388509747162, + 0.31695010910675236, + 0.0, + 0.33695530998898493, + 0.6529307993856848, + 0.9628235963714177, + 0.9993870826771801, + 0.19252572219692232, + 0.08765399997103263, + 0.0581538942759503, + 0.5397136324606268, + 0.8731278729063339, + 0.14950926130842845, + 0.7120256785725886, + 0.8579793159249135, + 0.8549647720950472, + 0.7427662365918138, + 0.43008529261945827, + 0.0, + 0.4492876180568678, + 0.9090801437399376, + 0.09316255431007736, + 0.03670093058785795, + 0.18036270552792533, + 0.8571227794644961, + 0.32732100922546326, + 0.5235215265040551, + 0.6278161252233143, + 0.6433156024933611, + 0.21195043574798933, + 0.4165854014765501, + 0.6236830213567182, + 0.09747308321433545, + 0.38205474507864545, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8009821990819471, + 0.441074716407614, + 0.1324946173907643, + 0.04393292615504507, + 0.542628260527527, + 0.3428595859866824, + 0.5925956879200618, + 0.8854051017943605, + 0.6755739304087508, + 0.7843390980659217, + 0.33021794490919254, + 0.05287495437156575, + 0.18007685104880522, + 0.7061814496258494, + 0.8806449743100146, + 0.0, + 0.16261670057452993, + 0.8839677990379725, + 0.18859605999193563, + 0.6088494211643908, + 0.3940549809751147, + 0.3309934090725344, + 0.34190091925081834, + 0.4825202708883515, + 0.28742329888160345, + 0.9461578912277743, + 0.5208171862090942, + 0.42250707783410546, + 0.4897248549079305, + 0.9076819988382887, + 0.24449817784266992, + 0.0, + 0.10096160568696722, + 0.9429575143163699, + 0.05443102316067028, + 0.5617434638102767, + 0.3078168073980103, + 0.4565862839563939, + 0.1497710872432816, + 0.8796702772317825, + 0.5675581595326639, + 0.714179585607935, + 0.1257182515892451, + 0.5229590720141394, + 0.49879746283718907, + 0.44361192755265433, + 0.23503360698645348, + 0.0, + 0.7750291408374171, + 0.303162155012238, + 0.15378035856449046, + 0.12864982486943544, + 0.6433838365102315, + 0.30889149763843304, + 0.05043284229565137, + 0.9478429513901184, + 0.25814051157914375, + 0.48741192414739276, + 0.4419299663780526, + 0.23903725103598494, + 0.5181743163633296, + 0.4544050759784106, + 0.8971672699276627, + 0.0, + 0.7619767309487686, + 0.5253202610513421, + 0.20938221090808495, + 0.9043544854206975, + 0.8654464557062445, + 0.37657346739120734, + 0.4252195556035926, + 0.9797923215172825, + 0.812177371806046, + 0.9831417234799066, + 0.8604289208687927, + 0.3383166861373713, + 0.6031889666449555, + 0.05833381601508547, + 0.18894389117369126, + 0.0, + 0.6234024471842868, + 0.9453496962684602, + 0.37698919753616655, + 0.3092177161664381, + 0.31112000075509294, + 0.9076027077945752, + 0.21628338657472368, + 0.9199145227784984, + 0.7126000236614023, + 0.20082315422941044, + 0.13151079869255744, + 0.4504454759605123, + 0.7805639820944615, + 0.7271458786797137, + 0.45174444962748084, + 0.0, + 0.7456791453022824, + 0.10624948595479444, + 0.7564054703944864, + 0.7558252141582021, + 0.8402252959498852, + 0.7298996208197877, + 0.43032266953940124, + 0.9811092606310234, + 0.7332494031347755, + 0.13704347119861737, + 0.47587194131935573, + 0.1805762819338792, + 0.44942693186825433, + 0.4584620530329657, + 0.17493126362489997, + 0.0, + 0.9034169664629678, + 0.5621732507602889, + 0.5445364366349917, + 0.5583397485524766, + 0.22090446924316398, + 0.6020940366184432, + 0.4187764379308838, + 0.43833619232328425, + 0.7228397599802612, + 0.8512555981805043, + 0.8999039827858538, + 0.522460710837446, + 0.5177390179643111, + 0.04557477464432602, + 0.836352740461245, + 0.0, + 0.38926051257600147, + 0.6583878187690957, + 0.2513179974053199, + 0.8958915681604015, + 0.9912339737001905, + 0.6713317484176837, + 0.2134323748105864, + 0.34955560173990274, + 0.4337637334792769, + 0.7964846003028568, + 0.9845071663603152, + 0.9306900111882482, + 0.3503713482912416, + 0.5273641821348103, + 0.084927652377224, + 0.0, + 0.5878183225017242, + 0.6215633689577573, + 0.9796191453417322, + 0.39166413971568137, + 0.13002675979418454, + 0.4777868315595878, + 0.6962346645567948, + 0.9270779744224682, + 0.8268815635888066, + 0.4091923671806147, + 0.9237927583581761, + 0.04014079975154461, + 0.8656802141696627, + 0.29305104679289384, + 0.5874173473576125, + 0.0, + 0.4762511800403978, + 0.562360421384021, + 0.1653088206068929, + 0.05839221069377043, + 0.6526138412698727, + 0.07832458625798133, + 0.8310724037921118, + 0.517775519068177, + 0.8412469413209996, + 0.31045855241488995, + 0.2386871432866735, + 0.7550358099700754, + 0.7261771617422799, + 0.9237274728426907, + 0.6471629055673879, + 0.0, + 0.3427472851213783, + 0.14917495864570574, + 0.20037231732659289, + 0.8252067858503819, + 0.09216879607904038, + 0.9734320636176302, + 0.2990105838756544, + 0.3105554763905335, + 0.1474476927435372, + 0.030896355996025582, + 0.5534649564750587, + 0.6437113212818949, + 0.28516427684518286, + 0.42995883071646424, + 0.7636427026808356, + 0.0, + 0.5229498196524415, + 0.1990541175703977, + 0.7566771749244087, + 0.367759370534129, + 0.3568983266626977, + 0.82810657231182, + 0.5365570179433197, + 0.10963652896264764, + 0.9234764162963465, + 0.757015247112456, + 0.20577112469977277, + 0.9288104329310127, + 0.18547117379016476, + 0.150731269366838, + 0.44699194592881064, + 0.0, + 0.19100645840505737, + 0.13673646082849844, + 0.8140378697236609, + 0.8047043908704921, + 0.5241581243146272, + 0.07414715861943444, + 0.3060407632888966, + 0.4812600005958907, + 0.18798027885314128, + 0.07197161050967338, + 0.08234585936313954, + 0.5963936691423887, + 0.2345504834009452, + 0.9148202227696328, + 0.9027928153471831, + 0.0, + 0.2063826479975016, + 0.8010236025928041, + 0.2671475283709549, + 0.331070513727568, + 0.2869149872217063, + 0.9476078973497248, + 0.03278489977338639, + 0.5294667536357233, + 0.2697588639359385, + 0.49144132964590803, + 0.3759674272553455, + 0.19749759108749632, + 0.659819961882109, + 0.8219229104805507, + 0.646378969429277, + 0.0, + 0.30986928493275756, + 0.9182708569476886, + 0.1265945021649606, + 0.8427914006624496, + 0.8194309518812257, + 0.1538330289760801, + 0.7896690512910464, + 0.2041727064881632, + 0.3698505396815416, + 0.37924878597108735, + 0.6874593913390324, + 0.934834484154346, + 0.1562254714126211, + 0.7902051021070082, + 0.13980337040199153, + 0.0, + 0.4092137864518218, + 0.3747334543024966, + 0.48145587740765083, + 0.4546428420882097, + 0.7346681932862287, + 0.29043280851801845, + 0.7761621858701311, + 0.2838990139784022, + 0.3661020382169693, + 0.9059632653148812, + 0.4813103230605512, + 0.592757344644676, + 0.2116248413750591, + 0.736886207394238, + 0.677975367124943, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.24084112883869346, + 0.1494233919463478, + 0.945195923181658, + 0.5772244084612589, + 0.3619909853418556, + 0.952487495368475, + 0.6125863012073, + 0.7045599148410046, + 0.10888843854080876, + 0.7985490511307461, + 0.2585022034479668, + 0.7246342475822394, + 0.5362265551089003, + 0.8451824770339866, + 0.9395142817895802, + 0.0, + 0.7548803774502774, + 0.9734830518971015, + 0.19785676440706323, + 0.7805785293121652, + 0.11610589530722226, + 0.5848071780937557, + 0.16019686273882827, + 0.8398035524152211, + 0.4159560478956378, + 0.5103799906082482, + 0.5954012126022314, + 0.30369946870510367, + 0.31083367094480774, + 0.47043432000714425, + 0.6088522839551925, + 0.0, + 0.6620564587626303, + 0.07844539334766898, + 0.040920026147275634, + 0.8953649896934751, + 0.5152027988693856, + 0.6219312022306421, + 0.0741103312725464, + 0.6528042630025218, + 0.09773646507064315, + 0.9884547684188146, + 0.26046382335600893, + 0.5913308807974837, + 0.9458162437834565, + 0.5724754026148015, + 0.495809851872564, + 0.0, + 0.8177084069796109, + 0.6164318883173433, + 0.9351549004081587, + 0.6409511513911823, + 0.9510668758737386, + 0.6480574573833584, + 0.46118310745733904, + 0.15778714571313024, + 0.9506250929550092, + 0.6686291645888692, + 0.23759124635806517, + 0.4200052326488082, + 0.5368716137709668, + 0.004845450283341046, + 0.08942223322155807, + 0.0, + 0.4497355161525367, + 0.26604132949575376, + 0.010145769760756762, + 0.8095078510257275, + 0.6817389961102805, + 0.23261939097520345, + 0.2915310883826556, + 0.994870203207059, + 0.7165538499603809, + 0.484441793375036, + 0.3992076404799272, + 0.5089633871150249, + 0.08517116370579714, + 0.4720206792193603, + 0.8317660366187062, + 0.0, + 0.49637305316783686, + 0.3713751293399885, + 0.9125721723942728, + 0.4777207829514538, + 0.48543136704276146, + 0.9539456368354925, + 0.04594719731054464, + 0.772800369146076, + 0.8272900417095005, + 0.8601060687899627, + 0.6422808583364977, + 0.15832813379306465, + 0.47942796349580374, + 0.030236385139770006, + 0.7806329623166633, + 0.0, + 0.06031253844750917, + 0.4476627640071469, + 0.10150767966133589, + 0.8451782444520659, + 0.09787818155047345, + 0.14453844585721976, + 0.46898852177815886, + 0.9952882193920738, + 0.3761976935804109, + 0.7384554517702806, + 0.6649206411401364, + 0.2726185383027213, + 0.5261160028198052, + 0.1985687036209035, + 0.2155657397907439, + 0.0, + 0.32403381469056414, + 0.6634616329456422, + 0.35528649705266857, + 0.335433172985768, + 0.5607323995561343, + 0.4358554432686752, + 0.3918139095493063, + 0.17219088305195918, + 0.2894676415521634, + 0.14807066769369526, + 0.15411604551665736, + 0.6691432118075796, + 0.9478110852305597, + 0.4100245842138641, + 0.07307201630214533, + 0.0, + 0.5473538946711503, + 0.19624534318306774, + 0.07555803254555427, + 0.4977205800765071, + 0.39469982049571495, + 0.16975837956167883, + 0.5683958150409495, + 0.49162170622134427, + 0.04898031295079941, + 0.7261359772381609, + 0.8190614274777871, + 0.22192640914945416, + 0.6267918235164112, + 0.840237889049285, + 0.28768795460889784, + 0.0, + 0.37171885576066066, + 0.31960565477657243, + 0.6987575618880669, + 0.18074933627396428, + 0.5325301308092999, + 0.7503551540536678, + 0.0016127262625461602, + 0.4800398884972531, + 0.815948657088464, + 0.661126802613848, + 0.3367949541526595, + 0.06201838201971843, + 0.34357318498071576, + 0.8560137977550417, + 0.06343261200769512, + 0.0, + 0.788324525461174, + 0.5067756679199623, + 0.9165373839456068, + 0.011889931891967853, + 0.8948081253962866, + 0.2823068929428454, + 0.14430866096415318, + 0.8179092277145791, + 0.05740125344040248, + 0.5817499133519894, + 0.8803702703035624, + 0.002263564670649254, + 0.6090897163978664, + 0.5878198999741003, + 0.2071829239320203, + 0.0, + 0.8379583977067151, + 0.5431338293299597, + 0.8050913147835768, + 0.004166929790526908, + 0.8109321524273291, + 0.4795388911622631, + 0.6241743150793291, + 0.3316607663131258, + 0.8862503765667086, + 0.8564728643186336, + 0.6456050961200591, + 0.0639371951852924, + 0.6989887666978286, + 0.6759759178415035, + 0.3170780274656366, + 0.0, + 0.7914415434285964, + 0.146136863197592, + 0.407948922917024, + 0.759591459823289, + 0.45199903832174704, + 0.5318493587525929, + 0.39611481138107985, + 0.8185650957620395, + 0.42230782730916483, + 0.4635041841819878, + 0.87283709710225, + 0.8100798461788448, + 0.41884882789577915, + 0.11156936792070449, + 0.1506043694932555, + 0.0, + 0.7617362100641591, + 0.9490258028228717, + 0.813655360422871, + 0.5173857851845302, + 0.4820248511800226, + 0.23730579538535723, + 0.7601364214124623, + 0.29415351559983705, + 0.7264912638482958, + 0.05298736734914866, + 0.8043049555853818, + 0.7690821617520532, + 0.8729673065090883, + 0.19867837844226632, + 0.9820792310895309, + 0.0, + 0.3571904839204214, + 0.23849759863238973, + 0.3389911551070691, + 0.5684503772164061, + 0.6237657682852961, + 0.38065141915924616, + 0.13773906172571104, + 0.27665496802354606, + 0.8147927098290846, + 0.02056027360915802, + 0.3325368498089102, + 0.9731670818648352, + 0.7348742173643088, + 0.4238266789464491, + 0.7822160158913193, + 0.0, + 0.5030827068724871, + 0.8360576308054636, + 0.7002163288508261, + 0.4252703819216933, + 0.11190767575217597, + 0.39011691639373147, + 0.9226491597075647, + 0.6398419529595498, + 0.25758121267255596, + 0.8716488273558501, + 0.05464647078648066, + 0.7396124926918702, + 0.14282913161490352, + 0.9389462218243471, + 0.6423659932917436, + 0.0, + 0.41326739640341403, + 0.07430146291190665, + 0.9688778866268454, + 0.7322266370072392, + 0.41468885862956595, + 0.7022920387327858, + 0.1418100423271802, + 0.30257000683340607, + 0.21895483008399452, + 0.04890808672895364, + 0.405743459379887, + 0.7588132781952833, + 0.5348405659282822, + 0.35982402411540404, + 0.1613397919051106, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9608167473851421, + 0.8791349281725823, + 0.219420185722919, + 0.5530765247692794, + 0.1345765816114286, + 0.6223932235798858, + 0.6176816874055919, + 0.5625454559284596, + 0.7338933338610225, + 0.5269459109399771, + 0.24970078345529045, + 0.44853988227914665, + 0.7664924889792716, + 0.3003876942246271, + 0.8776331543876847, + 0.0, + 0.8805611540165458, + 0.013985299649761695, + 0.696496442824536, + 0.9799373637002523, + 0.9693511475604221, + 0.6428224571229607, + 0.68669070131647, + 0.31020471889324197, + 0.09133753474181794, + 0.14769806616521086, + 0.8905431418544417, + 0.016692006261037395, + 0.7243577884206216, + 0.5272395236523542, + 0.9799737457979715, + 0.0, + 0.43995755436873085, + 0.6309124138521598, + 0.8912735960548054, + 0.0032622875447775312, + 0.4011192128765082, + 0.36576146081882377, + 0.706736815218591, + 0.013399623754964507, + 0.504581938364347, + 0.020110698713186603, + 0.6699851166044413, + 0.6085530322980023, + 0.8076880088445598, + 0.8513576293549324, + 0.8919126661641827, + 0.0, + 0.7227214436747231, + 0.8838217880557527, + 0.08912303187405968, + 0.3621594415092303, + 0.9797117083332056, + 0.22022425623659092, + 0.9051641692181785, + 0.2971160947783905, + 0.8384199034567146, + 0.2648698266995847, + 0.9626787314778625, + 0.7427542828773237, + 0.5855809250257206, + 0.5411408728584076, + 0.8740529861566809, + 0.0, + 0.06436096853716844, + 0.5633084641714449, + 0.37589459000188763, + 0.5555575214873543, + 0.7198610195350436, + 0.4508820428473178, + 0.23696278823477612, + 0.057976811544413476, + 0.6349070450329913, + 0.04177430884627942, + 0.10919624802210393, + 0.7955229561881966, + 0.623126126742494, + 0.13506808373061063, + 0.3023566935247226, + 0.0, + 0.5277349322269849, + 0.7823892071207204, + 0.779515032098953, + 0.10449347456200508, + 0.45110096091671004, + 0.8486601873049032, + 0.10805433299033618, + 0.7182263407743993, + 0.5560462071645996, + 0.7818964760813322, + 0.5740374921689396, + 0.08627379354160591, + 0.07422560748655072, + 0.01705635218031354, + 0.8738711169383052, + 0.0, + 0.5612816137158163, + 0.3101551619842374, + 0.6054987282458344, + 0.5275911031586072, + 0.6360702166469601, + 0.9124666803031429, + 0.8533074456545962, + 0.37190815230706, + 0.18125834112146777, + 0.3184815432832945, + 0.012419673149375754, + 0.962784356962757, + 0.21381537448192722, + 0.05374819852706947, + 0.9741373049624726, + 0.0, + 0.8561568012120608, + 0.49022993419510497, + 0.965064775262806, + 0.14998202567570906, + 0.25822527856379607, + 0.36927808639599113, + 0.9199972386741255, + 0.403479359789107, + 0.9441476571686422, + 0.9635045462053818, + 0.4828152843977518, + 0.1262596202860955, + 0.017495564761169424, + 0.12491484483078552, + 0.7351716281255066, + 0.0, + 0.2518238429398917, + 0.5366202061872963, + 0.07523392353887093, + 0.9300791172269169, + 0.607954402719795, + 0.9632103474324705, + 0.33408535957924057, + 0.10942454796334922, + 0.003965465482779873, + 0.05756464076693413, + 0.07773695353473453, + 0.33873544820384693, + 0.8687688643493331, + 0.8508095503062006, + 0.48255377118784715, + 0.0, + 0.37910883483589186, + 0.4399406244482179, + 0.35204784579892134, + 0.19395370469190631, + 0.5951480848968579, + 0.38404569548614276, + 0.704079197750175, + 0.24469626034581193, + 0.9815084638103587, + 0.03919403323811799, + 0.6537545048880314, + 0.5299581611504169, + 0.15632277408697337, + 0.6591879565314969, + 0.3804547055456077, + 0.0, + 0.7771488919052115, + 0.10650595733020196, + 0.10498090717712572, + 0.8616331480731969, + 0.13110556053538458, + 0.10034476658779246, + 0.7799438825307219, + 0.4865555477115453, + 0.7896767366689702, + 0.7978095277053101, + 0.2282323914595461, + 0.8278282608943434, + 0.3918635766170445, + 0.6606705238935365, + 0.9709112364570469, + 0.0, + 0.28126775498855594, + 0.1651583201417326, + 0.39198086935849485, + 0.23952151876929173, + 0.5077021816524762, + 0.60869755058736, + 0.5977249697585852, + 0.22889056858432866, + 0.9470481444126627, + 0.007170022077887639, + 0.838512198686958, + 0.8076430022111963, + 0.6489847040055021, + 0.12152520929588928, + 0.751241464502428, + 0.0, + 0.9742408728498494, + 0.4479151314718127, + 0.9584446507571274, + 0.49896021819806735, + 0.8377271318217183, + 0.44786677473041714, + 0.8797896801779099, + 0.5382319337791864, + 0.05887352703433224, + 0.6873020539797805, + 0.40425190233591124, + 0.5014658659241744, + 0.994572923208129, + 0.13752013135966323, + 0.26844357125940255, + 0.0, + 0.743957142985092, + 0.6122425093701338, + 0.7339128685148123, + 0.6275813002300293, + 0.08779812685144617, + 0.36992152835415826, + 0.3168484839237833, + 0.2118481651467614, + 0.6411261866124442, + 0.2424969414813284, + 0.9109797869238139, + 0.33178489020230684, + 0.313861866727421, + 0.18632171831484134, + 0.11857367654504836, + 0.0, + 0.7774538361413483, + 0.5315531909228914, + 0.47159805230791974, + 0.6587609868774141, + 0.5388153245715364, + 0.1842086762144386, + 0.882747049801107, + 0.48115942815526014, + 0.3766594375574035, + 0.5882830229331992, + 0.820235290661094, + 0.9693789458544743, + 0.7444845623669116, + 0.07829977853292136, + 0.794333955555089, + 0.0, + 0.1129593715148074, + 0.13066677035636798, + 0.2957922713360184, + 0.6403438028196977, + 0.1804017842666683, + 0.34588548944033526, + 0.21672960781100203, + 0.7456582332218422, + 0.7914504269181175, + 0.1567908855485176, + 0.4405932288211023, + 0.46555199696767424, + 0.749917869618224, + 0.7664259932278479, + 0.05041611333230889, + 0.0, + 0.6988316967657843, + 0.7318995711720537, + 0.3042563102139143, + 0.0820986026950482, + 0.7403540576604496, + 0.9557114127496255, + 0.22407358055547544, + 0.8538968920489578, + 0.28809648989381564, + 0.792993145482098, + 0.879548978087951, + 0.8481813292051562, + 0.29827505803905274, + 0.9490186547936486, + 0.6161526215420079, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17609043183951767, + 0.4106997578490129, + 0.6830742240878567, + 0.22368183845700818, + 0.5874936072974537, + 0.30617318607861355, + 0.059730287599608634, + 0.9336379232070264, + 0.6359704453952328, + 0.30352162315122166, + 0.06448215065872298, + 0.7743844640438137, + 0.32969520613831804, + 0.8413124272589634, + 0.9675637254398864, + 0.0, + 0.42439785804888075, + 0.5748336410743426, + 0.22482363475774414, + 0.5762363087065184, + 0.7118368895885636, + 0.9350188604597547, + 0.9764666358419519, + 0.32983641890341964, + 0.7202917459840931, + 0.6217965345338181, + 0.3865628661413777, + 0.28789240334194677, + 0.5778669424499493, + 0.7713216893689864, + 0.09142391873503264, + 0.0, + 0.878651541732694, + 0.8716720482108311, + 0.8812633149670223, + 0.5941099419362061, + 0.4724784996145066, + 0.7100306821161757, + 0.4388013631993991, + 0.8729524535192391, + 0.9255996758421857, + 0.7687109658657374, + 0.2720621278890061, + 0.5837085411066791, + 0.3729969224393933, + 0.6679545744751149, + 0.8492905496067468, + 0.0, + 0.6332258029656965, + 0.4043766758724566, + 0.28439192322520535, + 0.47387292362096944, + 0.03811807445144355, + 0.5720068008791458, + 0.12519477717707828, + 0.3022330081239847, + 0.058426799013740593, + 0.023320819100120627, + 0.5223414734005248, + 0.9600701295023808, + 0.9474000925802235, + 0.9789084492467502, + 0.9718494376128888, + 0.0, + 0.7624536314733172, + 0.10909831959983252, + 0.5616191791668175, + 0.5446440844981102, + 0.8970228942359098, + 0.6520057308649096, + 0.6683742157638903, + 0.5064836798975156, + 0.4550231290418173, + 0.2722628402534911, + 0.2422184767640796, + 0.5078465379243, + 0.22838951754131187, + 0.44120304581892156, + 0.24935278260801408, + 0.0, + 0.2913251265219915, + 0.7767648302604372, + 0.8458283466690203, + 0.6227318078771208, + 0.28761212805492, + 0.6394776495986748, + 0.25599177234566173, + 0.04706353937462038, + 0.642949294125344, + 0.27311225409169393, + 0.5965922643441786, + 0.19823936486566207, + 0.24962992680055318, + 0.03981914599116343, + 0.5781329946966143, + 0.0, + 0.28651449249008565, + 0.6179205301392958, + 0.72951460331087, + 0.4259352384180458, + 0.9265218038092575, + 0.41012906214645206, + 0.7352715087345056, + 0.07640065135854157, + 0.5018054718510963, + 0.9935379884731801, + 0.003270803018935564, + 0.9864258682322867, + 0.4638862153474882, + 0.5263735412096952, + 0.09578831470251492, + 0.0, + 0.15756240329028193, + 0.741490602640871, + 0.6906713098954224, + 0.8538412555889207, + 0.8776289324820068, + 0.1297733902596525, + 0.709067404410877, + 0.4744346554487049, + 0.695899724502078, + 0.8281367881685542, + 0.9380722821176506, + 0.6791293508481343, + 0.3503601039991344, + 0.07466796616592464, + 0.2649244445114388, + 0.0, + 0.46747618652544276, + 0.3938791768241672, + 0.5296592740879944, + 0.57771047892299, + 0.7078111089052069, + 0.42889594247845375, + 0.9016536869224709, + 0.03958333067856634, + 0.8639826581395479, + 0.10932468007428797, + 0.06997528542307763, + 0.8359894327992111, + 0.5752986622114722, + 0.750903377517701, + 0.4727977239932305, + 0.0, + 0.08167675274636144, + 0.574227414948527, + 0.22301051502349756, + 0.26765059869154484, + 0.17342631851333679, + 0.8635888930280424, + 0.42277395190103084, + 0.6895033024175298, + 0.5067494628471603, + 0.6031099422647556, + 0.5908541492862722, + 0.11913712940531351, + 0.6167882730013311, + 0.7919220165448883, + 0.26524972619501475, + 0.0, + 0.027900281407346017, + 0.16963992490375757, + 0.4643652325114678, + 0.17093462678905502, + 0.6788627185734604, + 0.9385964746829714, + 0.3028111665227514, + 0.2856551340865151, + 0.8054500133190643, + 0.027244964577549213, + 0.8574369251313739, + 0.6263410354284882, + 0.016166882278584538, + 0.6061948636042997, + 0.7197520633562485, + 0.0, + 0.2821878871892757, + 0.510543045066065, + 0.25749719775433744, + 0.28605172752836594, + 0.5669030368603662, + 0.3672512384225597, + 0.7519063762929882, + 0.4097484805979642, + 0.9794209769425968, + 0.6245028232391507, + 0.8260962287189011, + 0.004716871371654996, + 0.23464135687384824, + 0.7523063018354651, + 0.9307089856673118, + 0.0, + 0.013192744126595879, + 0.42923409869979856, + 0.20233229554548393, + 0.4385920239316041, + 0.4003973704276079, + 0.2506190230922314, + 0.5069633261666079, + 0.6783577163383188, + 0.040168604534327135, + 0.84315741986657, + 0.14475981348683753, + 0.446572437504548, + 0.44193962693145183, + 0.31871189327275284, + 0.08263120735369411, + 0.0, + 0.7543630545915418, + 0.6345476653797548, + 0.9979989097906906, + 0.6999704170484969, + 0.5753283521487378, + 0.33106464149625714, + 0.9622190308804511, + 0.47141796362639, + 0.9160185538525903, + 0.08697689202300873, + 0.48348432822663956, + 0.2955585394413036, + 0.49259932847547583, + 0.19139974152818284, + 0.0418081163084244, + 0.0, + 0.8648879701609505, + 0.22190079316034428, + 0.5149388848594345, + 0.8130735260392431, + 0.7595218764311276, + 0.4321388280483537, + 0.5930470115119086, + 0.7240546198094218, + 0.6857413534461929, + 0.27585060358746616, + 0.7815309543802812, + 0.5455137753523308, + 0.6728606841137473, + 0.5501756868289217, + 0.5416256380137555, + 0.0, + 0.4049214241167811, + 0.04799377277922434, + 0.2727297777117407, + 0.8899356224922098, + 0.3176949625035982, + 0.4669381105981064, + 0.06735498269550222, + 0.6789184975724107, + 0.5126307580433558, + 0.2258122827177883, + 0.18671199251177661, + 0.7207797982715984, + 0.6723510835179843, + 0.048812953155671135, + 0.2711954854642268, + 0.0, + 0.48917145843161214, + 0.2458361311174827, + 0.9636739762600919, + 0.7500953768852777, + 0.905308573254881, + 0.7449067981062075, + 0.7447112324589688, + 0.5485472792790486, + 0.38918268946666923, + 0.4548727076836522, + 0.3095097915865872, + 0.7613173486872683, + 0.23968641598432694, + 0.6046961133864972, + 0.4648603192693386, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.47302482731716133, + 0.007880372901390209, + 0.19509940039186768, + 0.020387554598512603, + 0.7110116883294046, + 0.9673819798099772, + 0.8584200406113726, + 0.8362418179647403, + 0.6636086123083815, + 0.29401723523350576, + 0.09686345874046931, + 0.7603673806334562, + 0.35290325114154386, + 0.39510819066862324, + 0.6557714246999514, + 0.0, + 0.14965956513627954, + 0.9399853913901934, + 0.04266632450662222, + 0.9168226764244878, + 0.8502032341842173, + 0.9017682508962988, + 0.41305138487776727, + 0.38339887830828967, + 0.8363753069693501, + 0.103178529156985, + 0.7299437418119673, + 0.6994839976202726, + 0.698162849272158, + 0.4336362427422892, + 0.25081317657704216, + 0.0, + 0.16055238943198058, + 0.5964201273927553, + 0.6884657081858001, + 0.22471440081366145, + 0.4210994844816299, + 0.11639659320694484, + 0.9306820216913246, + 0.8614635125793015, + 0.6875793086930732, + 0.3515749528742018, + 0.6856942690748399, + 0.12005739066464527, + 0.3271068856625651, + 0.9816499048346137, + 0.9709161295312517, + 0.0, + 0.5649198019986985, + 0.9165452083082222, + 0.1216955570190934, + 0.6088658842819133, + 0.18885523947221294, + 0.766450901031293, + 0.27261686235292326, + 0.6056083292741613, + 0.32071332891501314, + 0.6241877547339204, + 0.49476438269489287, + 0.5456880065024868, + 0.4189939218626765, + 0.8073160804948453, + 0.8732196152737165, + 0.0, + 0.1728525607614556, + 0.5937161645890244, + 0.2170088161938074, + 0.8710388165651253, + 0.6590828390559235, + 0.8318572231704785, + 0.13408957060505167, + 0.38207684753401205, + 0.1500957462333432, + 0.04271984432232634, + 0.5935513640233603, + 0.5956666264862392, + 0.9999481294943786, + 0.40424950356651457, + 0.8866963202395287, + 0.0, + 0.4849528931068312, + 0.12163479442356173, + 0.7212915682956111, + 0.3770273640583811, + 0.4527529803827872, + 0.1773644425367451, + 0.5657951307831707, + 0.41710476538817065, + 0.8543207334047052, + 0.11406464434568975, + 0.08198852755831187, + 0.560670410555911, + 0.6992854026370279, + 0.12733028899185772, + 0.8610113408389278, + 0.0, + 0.5248107841744819, + 0.2788549366800721, + 0.9881238607976323, + 0.16246150922004476, + 0.010298410014078718, + 0.7744831630877447, + 0.5869045983159034, + 0.501277291363725, + 0.5820694298067532, + 0.4092240044392941, + 0.1288459981759551, + 0.6308205249021457, + 0.22232718325417733, + 0.15648637057944303, + 0.8194364731021205, + 0.0, + 0.4801644237960456, + 0.9865756294415141, + 0.611560875657677, + 0.4705571741180816, + 0.9805065077862268, + 0.4502440759458981, + 0.6780982886374738, + 0.8368299537195543, + 0.21761753755245927, + 0.3270514405508459, + 0.3732842093909715, + 0.38244687735586547, + 0.6519686461649451, + 0.607189318001182, + 0.2315030440596627, + 0.0, + 0.046872474625495486, + 0.4414223039144495, + 0.7904631250863082, + 0.3290719551033835, + 0.852587990148116, + 0.7013268962937798, + 0.7342811068643949, + 0.22990676641394636, + 0.8281760236234044, + 0.060225556592617635, + 0.7528655508311395, + 0.5798390236388372, + 0.7598694836489935, + 0.9113367066278435, + 0.03958617414076482, + 0.0, + 0.3518377628846787, + 0.8251942040669727, + 0.45537467951473143, + 0.35410473798331576, + 0.9960324878359781, + 0.12202930649205435, + 0.669628536173643, + 0.014018152270963258, + 0.6598206391558846, + 0.7057552168282882, + 0.713607489819386, + 0.5213693323981742, + 0.8532133844147979, + 0.6193082899991398, + 0.745989982334892, + 0.0, + 0.8904114421508117, + 0.4922081343398308, + 0.21666224287466163, + 0.7757471803943391, + 0.7599150852713965, + 0.17088576088715013, + 0.7835569922051313, + 0.13094558049833704, + 0.687401654782377, + 0.23344719535643488, + 0.5296964882506627, + 0.318948793075461, + 0.8924128991910326, + 0.20118818041350117, + 0.07550555618941879, + 0.0, + 0.35643638108953046, + 0.9749754802453812, + 0.9721110527364413, + 0.7991735017692486, + 0.9321035953213436, + 0.22699054153673903, + 0.24168020221120146, + 0.0009357061217638574, + 0.255397163237027, + 0.6993171085260081, + 0.12494729244351699, + 0.5796393609535202, + 0.4799385251243511, + 0.2551139670640812, + 0.5677398072305826, + 0.0, + 0.3521812465945472, + 0.3854024543662874, + 0.4376174724471875, + 0.7348890041497381, + 0.2637101194353362, + 0.7352273451062853, + 0.14442949114979897, + 0.6612349069631362, + 0.7012807952488621, + 0.45874165401891887, + 0.696321156738471, + 0.1877283865808097, + 0.6479251154483289, + 0.6752157582517974, + 0.3718820255015909, + 0.0, + 0.33995031015275845, + 0.08009376202048057, + 0.7288912678361112, + 0.30143420360246465, + 0.9068595712073455, + 0.6689329043966824, + 0.20283738190475853, + 0.8627915200760733, + 0.05127407810280038, + 0.06517577370764216, + 0.6190504641399058, + 0.10476454646339173, + 0.25822983061761207, + 0.2809981456104014, + 0.2099853041861377, + 0.0, + 0.6319675690326143, + 0.8118843123314589, + 0.9508033345749596, + 0.6455361985461284, + 0.5678419693502693, + 0.024531668297302134, + 0.11431256021175829, + 0.10775594381167564, + 0.15864091293387306, + 0.8913516995867351, + 0.40317087893656034, + 0.3123148987334283, + 0.5668466754866145, + 0.2723253082207009, + 0.5911814241408094, + 0.0, + 0.07481288952740617, + 0.29437127175916755, + 0.06824344151584916, + 0.097021040254053, + 0.22497660186965074, + 0.8601329218592405, + 0.9910801315392892, + 0.9366985674896414, + 0.5834901527176845, + 0.45896039221433815, + 0.2701747473631586, + 0.49835850286273264, + 0.2774457104756757, + 0.6973612955359146, + 0.905387881812114, + 0.0, + 0.6233619613125061, + 0.4014077074488228, + 0.550273033113911, + 0.8226085774215812, + 0.8902520346324687, + 0.868696823133307, + 0.1967350481110527, + 0.18337636063568263, + 0.6137858234042942, + 0.2868450626642217, + 0.7471122286050705, + 0.1612223397871464, + 0.670048057645535, + 0.7790571224805916, + 0.07254401352811757, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6944883939621205, + 0.3100447514322918, + 0.11896883403900971, + 0.23359058287458012, + 0.7333419713917766, + 0.46604564295917383, + 0.10989784014797643, + 0.7560097443477528, + 0.8599529905577464, + 0.10978652525432198, + 0.08931339718820286, + 0.5525263377262613, + 0.09444338231015492, + 0.3830671982001401, + 0.25115973275388237, + 0.0, + 0.8121544257041327, + 0.5707346589844915, + 0.49805096551202743, + 0.5063288373750353, + 0.15965344613263188, + 0.3854286285632015, + 0.17135608227773658, + 0.36382956801726607, + 0.2592285705025943, + 0.9179144727300402, + 0.8029575172580515, + 0.49677853983831355, + 0.07102831092627537, + 0.4461071812170265, + 0.7582204235388904, + 0.0, + 0.19831733244167404, + 0.8848880738267868, + 0.39113480730109573, + 0.7333156918306313, + 0.34853140275602523, + 0.7712388239661552, + 0.3705276024440368, + 0.44374679177671783, + 0.19468088381171034, + 0.8774715889203125, + 0.341220677302157, + 0.23355264266240994, + 0.1710117941744025, + 0.6250505625688572, + 0.5149461336994416, + 0.0, + 0.8740867060574905, + 0.9645633866392591, + 0.6260269090616651, + 0.9872308162057597, + 0.48100610033868474, + 0.0338122164381961, + 0.5208851501218579, + 0.834235507515291, + 0.8752550858792478, + 0.8255571666431932, + 0.45004851477486796, + 0.4707044916958194, + 0.7442189731558098, + 0.7397426078340625, + 0.35418093689606667, + 0.0, + 0.5343816182866131, + 0.5174477187518687, + 0.7245184714278275, + 0.9567087751335582, + 0.908588020164127, + 0.08554810151272874, + 0.7172114446247627, + 0.08594012448478627, + 0.009688109625222596, + 0.05295391962667573, + 0.19176816787796191, + 0.9288130710432897, + 0.029784523574134636, + 0.5758253242449555, + 0.6752493177019367, + 0.0, + 0.5778671617664237, + 0.011731643492788435, + 0.7805534329049238, + 0.6238626029432375, + 0.5659866583594944, + 0.7748213339500115, + 0.9710896126111231, + 0.7358028954533745, + 0.8204279507754189, + 0.8973892624874723, + 0.4841654440774278, + 0.7910028595074552, + 0.47883485183893115, + 0.3856025390862867, + 0.2699628342360272, + 0.0, + 0.1874726852186367, + 0.2867817347949472, + 0.7048456344982128, + 0.8529091528158861, + 0.20902546052840065, + 0.11673825354403844, + 0.12053179006456294, + 0.4083307230276515, + 0.3774463841283995, + 0.9490014296597458, + 0.7818504798571786, + 0.0051187346385730415, + 0.6646633220873541, + 0.4137735598138511, + 0.41521288810838985, + 0.0, + 0.20287690597809083, + 0.45352626163514753, + 0.6887905041649139, + 0.775873496549078, + 0.3660097286587095, + 0.5964651150013288, + 0.1632797894950372, + 0.7116409376300524, + 0.08985316900316265, + 0.6649197448226704, + 0.39949388610391945, + 0.8200915707102844, + 0.37106207027646454, + 0.40420930428755664, + 0.3660109544474325, + 0.0, + 0.7812931577723563, + 0.768491398480592, + 0.450305157957691, + 0.05222681899273829, + 0.14452349400724374, + 0.28787058142083055, + 0.9770721674672078, + 0.9905879808866775, + 0.07816150917944442, + 0.35853226834317553, + 0.053781455786013566, + 0.3058997781546249, + 0.06514672139074096, + 0.26060035445884067, + 0.2784017842747605, + 0.0, + 0.22665941819677315, + 0.6877009294015113, + 0.4673083932400628, + 0.15696642274242456, + 0.21462036861902123, + 0.6433816576984764, + 0.04641675121145594, + 0.1679545781363232, + 0.734247834802681, + 0.9736227239838369, + 0.3583005645554188, + 0.37581486293323263, + 0.659585503702675, + 0.7420343235449529, + 0.398658399440863, + 0.0, + 0.75842747532928, + 0.5162319637902584, + 0.775916653222131, + 0.5183258702498142, + 0.898856949017913, + 0.09562641538128203, + 0.53396424116567, + 0.47763464118221644, + 0.24541800265956126, + 0.8700606433427607, + 0.2815402664425799, + 0.5274618952752863, + 0.927663325913023, + 0.5169426510393555, + 0.6338830924291177, + 0.0, + 0.2611052306068461, + 0.592250929448377, + 0.2755455651159192, + 0.2013635345447039, + 0.7218622228378231, + 0.2619056080889337, + 0.492943050056759, + 0.22528302489661567, + 0.006531323038135084, + 0.6562432866780602, + 0.8412338142278156, + 0.8110004413668622, + 0.13781122858192307, + 0.3993281070498891, + 0.8009780856588588, + 0.0, + 0.3359045688616409, + 0.9685115104523315, + 0.5182796086437944, + 0.4320906653237585, + 0.028832214357689123, + 0.7154627008688301, + 0.9973441514052725, + 0.15349150740577944, + 0.9851879595521903, + 0.223534836918463, + 0.37759858105309807, + 0.1348608300652807, + 0.520663790298233, + 0.7287040444454399, + 0.5254870745041613, + 0.0, + 0.4134842438849258, + 0.4920293173749266, + 0.7332492500560673, + 0.6548403872319425, + 0.9201043597026283, + 0.809311589072756, + 0.7007935998816694, + 0.7918261140909724, + 0.8159557006950702, + 0.7682764354814492, + 0.12595533225633904, + 0.977421171443131, + 0.12920143317961563, + 0.22748076597122735, + 0.02751377464853988, + 0.0, + 0.23639002123064523, + 0.18372184100553468, + 0.2830996458713074, + 0.9752741290647994, + 0.030400028413322455, + 0.7613942782585106, + 0.2250531177096461, + 0.37767391404709694, + 0.9152390003119352, + 0.23375654224951914, + 0.1748861886824502, + 0.5433468953173136, + 0.04344241624201606, + 0.4878689711986589, + 0.5137485157047458, + 0.0, + 0.21950026845864, + 0.4160122020213902, + 0.1249185444634886, + 0.7187840310652783, + 0.9310528196727699, + 0.6084913664261036, + 0.5920798904743751, + 0.9705519033762046, + 0.5319809261237174, + 0.7225109505681124, + 0.6186546287139633, + 0.830593853692327, + 0.37270789788756975, + 0.12971670498579768, + 0.05635093449989126, + 0.0, + 0.1960486143679916, + 0.4971411441348931, + 0.49867350073064154, + 0.8604668447860284, + 0.8208902575891458, + 0.17757915991952855, + 0.49348435943021485, + 0.8181055437991887, + 0.6386674079101198, + 0.3586205987176323, + 0.35904357270685705, + 0.10687002394709755, + 0.6372681529743224, + 0.5258669899624339, + 0.9481787635923713, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1945385298289729, + 0.11174429583831869, + 0.5871133591318946, + 0.23868317345093926, + 0.5872278824545255, + 0.8302544282964023, + 0.11049401163104011, + 0.4567798938922808, + 0.7334589450281805, + 0.9959236617176795, + 0.9817642056448012, + 0.46974095338659194, + 0.9831132419698814, + 0.7819253940893207, + 0.6708839571401861, + 0.0, + 0.938729607173556, + 0.327270522888665, + 0.04596336601225548, + 0.8676543852034392, + 0.7822457676984101, + 0.7836249474846858, + 0.6338801305903102, + 0.16101063423999107, + 0.24621998300812165, + 0.7014206907837336, + 0.4295441275848959, + 0.9591916312520437, + 0.8364943027951782, + 0.27256952898405784, + 0.02615453029466408, + 0.0, + 0.8204404408038716, + 0.7871467533399917, + 0.22766253876168419, + 0.40786502203700226, + 0.3813957481391139, + 0.21887572139776024, + 0.903673790846596, + 0.07458559375159257, + 0.7373266224637472, + 0.4833618504781758, + 0.17746716946820995, + 0.4818092894724819, + 0.9882352652888472, + 0.39278557908492906, + 0.11431399169107104, + 0.0, + 0.7241570324887215, + 0.7454847675821837, + 0.6847299540747964, + 0.24110777890503943, + 0.40631933125665165, + 0.9562034948827051, + 0.10081946011992693, + 0.0021001596973070358, + 0.04269045912469449, + 0.6890291362487181, + 0.2902435488798949, + 0.8424654427122448, + 0.42809371372109317, + 0.07530583895623799, + 0.14716117670576156, + 0.0, + 0.6093389691188497, + 0.722088639725986, + 0.5858706122464238, + 0.14767366658383063, + 0.3572665920431616, + 0.7074810911121381, + 0.05059791791092505, + 0.5862390246005398, + 0.5586131649489704, + 0.5856590484732683, + 0.08839512336579591, + 0.6177703565165559, + 0.15820338758268004, + 0.9609867537908252, + 0.6595806661994528, + 0.0, + 0.7769749190486305, + 0.507396561923172, + 0.9414938492073547, + 0.2779155043023428, + 0.8029685362464445, + 0.2628821458950734, + 0.6294333885911099, + 0.6628868923889102, + 0.2356027147351989, + 0.2974634455637143, + 0.6947872191577352, + 0.5914548268852293, + 0.740440678365816, + 0.997318689003282, + 0.5466060092432324, + 0.0, + 0.36773379081836155, + 0.6078722520122439, + 0.8896660453633093, + 0.6808477214941833, + 0.6605779764497641, + 0.5165221094209821, + 0.3547720585568278, + 0.7442396561325131, + 0.2204489151654253, + 0.6342040205693581, + 0.4145449729389693, + 0.6428945625338994, + 0.8587144992549819, + 0.7618409602286108, + 0.001085314004465432, + 0.0, + 0.2552097517079104, + 0.08992465693006091, + 0.7055455062581931, + 0.5697588738868553, + 0.8928486109614543, + 0.30351336721653455, + 0.024512389319083394, + 0.4108714827516402, + 0.9283152919733236, + 0.5473016397262144, + 0.20364928823381578, + 0.8603162375957233, + 0.15776944004020232, + 0.40022865462158164, + 0.6720305901532767, + 0.0, + 0.9866977606213085, + 0.13008451536212629, + 0.28855640760370027, + 0.24644954994421764, + 0.45448154337934077, + 0.36408609446632323, + 0.6047957889090947, + 0.36832557597991455, + 0.4846454067321725, + 0.6108014254220075, + 0.6669379682928661, + 0.23271212736880276, + 0.4447292831113717, + 0.49922414091174294, + 0.24932378448431813, + 0.0, + 0.9784775617705486, + 0.3380751817185115, + 0.35211904873741195, + 0.8654116982179187, + 0.06126178080901623, + 0.7558828924626627, + 0.5311515707964978, + 0.4141340163589434, + 0.10088697192558171, + 0.9601689317116118, + 0.09416816773558967, + 0.24621748273807686, + 0.8487430455797811, + 0.8568417232004445, + 0.0745504794710592, + 0.0, + 0.4785722609689733, + 0.3446049508473783, + 0.8128776630331138, + 0.07520690791323559, + 0.47004438084872735, + 0.8276563101881355, + 0.007465862970746584, + 0.3313321533358278, + 0.4017315490465736, + 0.03241048099028765, + 0.9439937826771363, + 0.15967824671716957, + 0.4304630478241951, + 0.37683154053504464, + 0.08139439884329136, + 0.0, + 0.9369544158243106, + 0.7736317862476276, + 0.3857859300048102, + 0.0631161772834603, + 0.05467044866831938, + 0.9466878427193101, + 0.736728673881844, + 0.5939341467630771, + 0.11728051217962265, + 0.8419508262389035, + 0.9522801801108401, + 0.9286072477312359, + 0.7482870459640396, + 0.23935604223587337, + 0.08959086551999396, + 0.0, + 0.9815927322079414, + 0.6877773949865291, + 0.9488416128096503, + 0.1640770811468253, + 0.6634270728456326, + 0.10106646575901224, + 0.8177845817627879, + 0.2589292492012113, + 0.13054379896506152, + 0.7692255219956106, + 0.26998546193319994, + 0.8170397770052048, + 0.5325553314478796, + 0.30006191815112726, + 0.5009323921891324, + 0.0, + 0.0626916070745912, + 0.211618101048893, + 0.3482608878833303, + 0.18789638733316583, + 0.7679739170236175, + 0.7757588121533684, + 0.6026618867208542, + 0.4508125901832236, + 0.057207028661755976, + 0.7697824483665853, + 0.954009715760126, + 0.8398061997821833, + 0.3817288785983075, + 0.3165839886769065, + 0.22788738147908705, + 0.0, + 0.7565986567258083, + 0.9904086637188828, + 0.8282525686938762, + 0.9018218932878482, + 0.6281868142001242, + 0.2742973483446487, + 0.025003866620137538, + 0.6336151561500523, + 0.8883023732291889, + 0.2865969871788444, + 0.39036876458265446, + 0.2667699626789384, + 0.2959863339866108, + 0.9470989225190005, + 0.10305083852026253, + 0.0, + 0.36820407862468163, + 0.20640602668954966, + 0.43662712246772906, + 0.8556688317503034, + 0.03890463990576798, + 0.3148020044826927, + 0.5265115907244209, + 0.6903816285753669, + 0.3862916648191498, + 0.2269566595333925, + 0.17343696470648462, + 0.9527107787771562, + 0.3036384254295068, + 0.05640277649211545, + 0.8834467563552302, + 0.0, + 0.1086965946506333, + 0.9588831313251175, + 0.1716363139292323, + 0.5508840111771555, + 0.8808759424779828, + 0.2533111614689332, + 0.6793473349324505, + 0.9153798189195478, + 0.6560344484940595, + 0.5171224031826136, + 0.03158837165736539, + 0.03911807639696219, + 0.05403551282698826, + 0.04938711122995676, + 0.25143454404244203, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.508516256134254, + 0.12615167232749558, + 0.32959624274894794, + 0.2994534045925553, + 0.678874817396369, + 0.666050636871286, + 0.9733136452622987, + 0.297578369183902, + 0.9772791323491653, + 0.9251672475211994, + 0.36240420648471006, + 0.8236253797218036, + 0.3997470625894327, + 0.1571880583935309, + 0.7767961908541196, + 0.0, + 0.7903889426741931, + 0.34967340587406814, + 0.39460798152765886, + 0.05238415088672388, + 0.05023655136009764, + 0.855826010530713, + 0.8520285074708756, + 0.7793384593642458, + 0.8565818459966438, + 0.509054517101543, + 0.7286386898673131, + 0.4505377733927417, + 0.14207229687660716, + 0.8730320512733531, + 0.2613177121138983, + 0.0, + 0.7544134932167482, + 0.862788329972486, + 0.7399584483045175, + 0.6410694833723776, + 0.3804745842917795, + 0.7869731610351378, + 0.21735661535944628, + 0.5916989106687826, + 0.6969635381810998, + 0.006074733984967939, + 0.5547136206013454, + 0.9392364939354915, + 0.3229873897230634, + 0.5817819167440702, + 0.9628980545066194, + 0.0, + 0.14990663636449686, + 0.13587777602603968, + 0.004125306225145953, + 0.6399598731768124, + 0.5191840201594836, + 0.7835811772913304, + 0.5239419285502995, + 0.08729935221447493, + 0.33317884351237215, + 0.018588341924630325, + 0.07191988097727553, + 0.7931287503261454, + 0.21287197724077345, + 0.6898406824601082, + 0.749451549274803, + 0.0, + 0.35572620464071913, + 0.6041957173933654, + 0.9105935327516589, + 0.03978133776424597, + 0.1942054427327966, + 0.20433634929680133, + 0.29118621976589987, + 0.15943979335783653, + 0.44727823322686355, + 0.3271095725395706, + 0.11567880951890352, + 0.15233835841563403, + 0.42609120355968355, + 0.2216501304424735, + 0.3753916132921151, + 0.0, + 0.9805310612356171, + 0.2741317860937672, + 0.8020795679790991, + 0.5423330038145646, + 0.4917273829488107, + 0.12489345978692135, + 0.7762732987948073, + 0.4203789058169274, + 0.6453848435329949, + 0.7576314882278509, + 0.8415967708712976, + 0.6984026520924237, + 0.8605832873616984, + 0.44493503735111994, + 0.23421897933574898, + 0.0, + 0.09157054120464558, + 0.5342941821706088, + 0.20726711699880285, + 0.5970895839686575, + 0.9448996174821792, + 0.37881920204462194, + 0.8504786503237889, + 0.27602220588351767, + 0.3646355881793919, + 0.26915238015695464, + 0.6205088908104088, + 0.36137893303767754, + 0.5158185492873943, + 0.7565297435830589, + 0.31298547260191445, + 0.0, + 0.5563336454536241, + 0.9522304802915943, + 0.2668317830595627, + 0.7863807099666166, + 0.5336360074458083, + 0.08789321232933589, + 0.7705821836573169, + 0.7873128406599462, + 0.10055533045164433, + 0.3637593133907019, + 0.11669575952651479, + 0.015955984928101796, + 0.2247684159340716, + 0.2787904667860476, + 0.23573410648692827, + 0.0, + 0.9662444090555127, + 0.18781842622570633, + 0.9172049811077346, + 0.6333996240209262, + 0.9129484273936075, + 0.7897819250093187, + 0.10990372813502114, + 0.03267788753450884, + 0.817290240485568, + 0.7784995456642605, + 0.8722345350609377, + 0.4382947653743352, + 0.49986924411351596, + 0.004928831866292338, + 0.5033280718530574, + 0.0, + 0.5090206107819178, + 0.6169147657473454, + 0.8495827601838573, + 0.5462896632612788, + 0.8188182782979259, + 0.8477882051689373, + 0.24823204807103083, + 0.5036901374293759, + 0.44222664123746036, + 0.41919714000481456, + 0.4619459388172661, + 0.7427227439555167, + 0.13343167175978632, + 0.965051216972245, + 0.9277999828057073, + 0.0, + 0.8944698597733473, + 0.9595486421618156, + 0.28341632735419997, + 0.4775576864554053, + 0.006171505314833681, + 0.016645549464185172, + 0.6874061898061669, + 0.3744176620304426, + 0.6013912203386002, + 0.06852122837363928, + 0.804942369740075, + 0.7353732748118352, + 0.42341860591577296, + 0.6933195621968967, + 0.5748949891547089, + 0.0, + 0.8000772399525441, + 0.08553288558303762, + 0.9952259746480382, + 0.7194197421541765, + 0.6271177954250524, + 0.41683321529801165, + 0.34090806337380086, + 0.2059337366698395, + 0.3910117929094934, + 0.04522492111154375, + 0.5557809517139652, + 0.8993421539040412, + 0.029401543826772736, + 0.9916883444019905, + 0.16295051495838597, + 0.0, + 0.04067805058569762, + 0.22403659314065727, + 0.23576871224088314, + 0.9078034238059605, + 0.8140091244177107, + 0.33832082214605197, + 0.13466120206921828, + 0.3607317242249408, + 0.9873771738376842, + 0.9943936816201077, + 0.7757179788098014, + 0.6961830888047755, + 0.6675407223512813, + 0.8180467035034698, + 0.8689641412498003, + 0.0, + 0.3805312136411677, + 0.40039445737826707, + 0.6806879637359067, + 0.653273952596768, + 0.7725990689030878, + 0.6199646304097349, + 0.995040753562894, + 0.06801433930055356, + 0.7251662344448621, + 0.5897337159109326, + 0.21746085068525223, + 0.13905241097991194, + 0.5533450716289082, + 0.2820621321784835, + 0.18898102980231846, + 0.0, + 0.010863679227310574, + 0.21933931192444955, + 0.6044767139301521, + 0.8403652650017311, + 0.8614352443379194, + 0.9468701284255318, + 0.5058926033714615, + 0.6824701918978613, + 0.8403405291787942, + 0.22798418752271354, + 0.7031727612448657, + 0.5113997560843425, + 0.6765319980777957, + 0.10407498213564337, + 0.3588919574429309, + 0.0, + 0.6575222861536396, + 0.5854697640452231, + 0.4433775846107185, + 0.7320046609681803, + 0.4288072649460176, + 0.6916810523743708, + 0.30492567969563034, + 0.022092532079795224, + 0.5950131127906029, + 0.48925028748426713, + 0.10106531972042998, + 0.16058764363192768, + 0.5432334575915536, + 0.2568365237182382, + 0.1264678447810591, + 0.0, + 0.24093071006343825, + 0.5323545554233373, + 0.34513857810497683, + 0.3808008953187787, + 0.022329314776708853, + 0.04336312618561067, + 0.06847536385247566, + 0.00242090469860079, + 0.5718552844125294, + 0.29616250017007784, + 0.41312951472912907, + 0.5854848505264455, + 0.7062191053760678, + 0.5249104021805109, + 0.5834366224090215, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7674880641725208, + 0.1143785860607246, + 0.6985587446999647, + 0.714101616673333, + 0.6458128271903357, + 0.4859241264443517, + 0.06164780395668834, + 0.6315800725152276, + 0.633961532712646, + 0.5717352304310166, + 0.32356752343460526, + 0.9642408219455398, + 0.703254095661067, + 0.1412060935608218, + 0.3288235287475405, + 0.0, + 0.23186902509431684, + 0.5312925748659688, + 0.4150112313344603, + 0.9227664897499039, + 0.7516171473350333, + 0.4147733820584908, + 0.7012842955947854, + 0.7941266653769916, + 0.18228233853549336, + 0.6103125188885554, + 0.8907119658093242, + 0.5836560038308837, + 0.9049579406346802, + 0.8497256970190455, + 0.4039289383957364, + 0.0, + 0.43171947512796116, + 0.407633919215049, + 0.1906910269033708, + 0.5429091959929884, + 0.6806632835380019, + 0.5454363386070727, + 0.9931521625581684, + 0.7405011565816481, + 0.7801150969303667, + 0.7844733630790597, + 0.05466237135341856, + 0.04329525589678418, + 0.8458425288636707, + 0.0931660900418888, + 0.3280247835683864, + 0.0, + 0.3607870091219725, + 0.6723501416953159, + 0.9931025967186558, + 0.1740731716209658, + 0.11888752281041293, + 0.49640154925412217, + 0.28616498396404, + 0.9611417137511172, + 0.4964775618068632, + 0.5104378623474116, + 0.41020829495245903, + 0.6101749642453493, + 0.84283034917116, + 0.20714652772845288, + 0.6718416492820873, + 0.0, + 0.07861920055464922, + 0.1701140553699696, + 0.8919750379869588, + 0.5469301484084275, + 0.8092421097793012, + 0.5398451098540821, + 0.052591010400378835, + 0.15912725983710152, + 0.6590319095994588, + 0.5499833175620161, + 0.25440100368942453, + 0.6958685259543261, + 0.48538651511395514, + 0.13841083344533156, + 0.05571950014578253, + 0.0, + 0.9143405146090642, + 0.674761692465671, + 0.660555119999795, + 0.8544848155367878, + 0.6126079097409498, + 0.8323304246448402, + 0.32033169405500295, + 0.003506592449767476, + 0.7085520684629011, + 0.8648788229870941, + 0.05818847313656628, + 0.6502409769887356, + 0.7161105097622201, + 0.1550589101655272, + 0.1908114565908603, + 0.0, + 0.6939900324209817, + 0.28224951900771, + 0.36665573220807535, + 0.8750486607980661, + 0.2145593785956328, + 0.4702413124605943, + 0.5225735424276101, + 0.9312822541717654, + 0.8915409450436893, + 0.47991554983998364, + 0.4910682817255333, + 0.46612625608306857, + 0.026869232949236377, + 0.08416754318344988, + 0.45941496350396216, + 0.0, + 0.8252559453720194, + 0.04937842339832421, + 0.3995095171574481, + 0.8985653478197856, + 0.1481628621945109, + 0.11916838893032078, + 0.25127433737067917, + 0.25159725483599815, + 0.8810594306794882, + 0.03263047601255409, + 0.5392549185797869, + 0.08451831720702796, + 0.1101749687207052, + 0.6760353133462065, + 0.45465847717072594, + 0.0, + 0.2737528690634361, + 0.6867630605177261, + 0.4296598365023466, + 0.3076726891877498, + 0.40003201860058957, + 0.9009896716229511, + 0.6379917910568372, + 0.12598199516248132, + 0.2130708592860212, + 0.9766629260153232, + 0.7727254785212105, + 0.5650623831556305, + 0.5613147142445803, + 0.12508051431179867, + 0.3457833275163299, + 0.0, + 0.5352870930660506, + 0.916829981798807, + 0.01989849700213986, + 0.03434871223448421, + 0.41300132734426387, + 0.19197758667721276, + 0.11096274939206086, + 0.9526797769358988, + 0.813453758457884, + 0.8635121646120435, + 0.4642778462807007, + 0.9330555317628653, + 0.335244686950531, + 0.9214576170627046, + 0.0024900972192264392, + 0.0, + 0.5589018536273382, + 0.5374174482281462, + 0.018052517344653518, + 0.08128110336736016, + 0.48601215825839583, + 0.900836594320766, + 0.5501441660489764, + 0.6572154760204325, + 0.24253872610367677, + 0.8913036372894022, + 0.0869156598989933, + 0.10349994135135498, + 0.1572960090124308, + 0.8042768937476864, + 0.5173711364393875, + 0.0, + 0.8080174898444882, + 0.5950659436327118, + 0.38526569226129925, + 0.15498478323301157, + 0.32320085891402706, + 0.8106694344455577, + 0.34083688630436615, + 0.3516367312822609, + 0.1901627970373514, + 0.9125866614696632, + 0.0029485271725963047, + 0.8237565815658521, + 0.23710485350577448, + 0.8697168547501292, + 0.9878949599339439, + 0.0, + 0.27490275717512347, + 0.3050912809712565, + 0.9800683026870219, + 0.8018903956433008, + 0.48974377453984297, + 0.9418253844653995, + 0.3668192645944318, + 0.8341733061763372, + 0.5711005083911994, + 0.3935725985979368, + 0.6192632949400628, + 0.3114302365163585, + 0.2532346563624003, + 0.31768490225873736, + 0.6934846133164922, + 0.0, + 0.0973435412134741, + 0.37883544150282067, + 0.506288676207403, + 0.2964672109857647, + 0.31267030148076125, + 0.06332430124605981, + 0.8749029394626905, + 0.41831100857965864, + 0.7586833944574236, + 0.9378705066825989, + 0.6178495288086921, + 0.29433434410944437, + 0.5526177244498481, + 0.2695787844122769, + 0.8352518312004765, + 0.0, + 0.8965124268468283, + 0.3450343859162607, + 0.8206251879085452, + 0.7035796449406677, + 0.48887291344483097, + 0.4908897899926943, + 0.47745339291693634, + 0.310111039750102, + 0.42596987376122886, + 0.6219832762988788, + 0.27289155576504376, + 0.8357408275575645, + 0.6428826439383732, + 0.970147806556169, + 0.5840781265260641, + 0.0, + 0.5696507894525211, + 0.965860524550409, + 0.1501477990573663, + 0.6306448814983877, + 0.6875677870984734, + 0.4561920374226772, + 0.5164530650387731, + 0.6498028556614855, + 0.012908133371798969, + 0.2842726447148409, + 0.5500553656898928, + 0.2734414382926802, + 0.7495770131275985, + 0.010399720490826825, + 0.4597246380499571, + 0.0, + 0.4932570010023084, + 0.1409354034143927, + 0.6004154451851471, + 0.10959617578382852, + 0.2327666378513159, + 0.5048351151379978, + 0.4806060285163555, + 0.7241174439329257, + 0.7043586011974605, + 0.6731404050429173, + 0.08064084523102322, + 0.007212850502877788, + 0.4566978242166606, + 0.042850504765231445, + 0.2719917237251668, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_FRACTAL_Z, std::vector({5, 5, 31, 17}), + std::vector({50, 2, 16, 16}), DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 4); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferHwcnFz, build_transfer_fp32) { + float data[5 * 5 * 31 * 17]; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_FRACTAL_Z, std::vector({5, 5, 31, 17}), + std::vector({50, 2, 16, 16}), DT_FLOAT}; + auto transfer = BuildFormatTransfer(args); + EXPECT_NE(transfer, nullptr); +} + +TEST_F(UtestFormatTransferHwcnFz, build_transfer_fp16) { + uint16_t data[4 * 4 * 3 * 1]; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_FRACTAL_Z, std::vector({4, 4, 3, 1}), + std::vector({16, 1, 16, 16}), DT_FLOAT16}; + auto transfer = BuildFormatTransfer(args); + EXPECT_NE(transfer, nullptr); +} + +TEST_F(UtestFormatTransferHwcnFz, build_transfer_int8) { + int8_t data[4 * 4 * 3 * 1]; + TransArgs args{ + reinterpret_cast(data), FORMAT_HWCN, FORMAT_FRACTAL_Z, std::vector({4, 4, 3, 1}), + std::vector({16, 1, 16, 32}), DT_INT8}; + auto transfer = BuildFormatTransfer(args); + EXPECT_NE(transfer, nullptr); +} + +TEST_F(UtestFormatTransferHwcnFz, build_transfer_not_support) { + float data[50 * 2 * 16 * 16]; + TransArgs args{ + reinterpret_cast(data), FORMAT_FRACTAL_Z, FORMAT_RESERVED, std::vector({50, 2, 16, 16}), + std::vector({5, 5, 31, 17}), DT_FLOAT}; + auto transfer = BuildFormatTransfer(args); + EXPECT_EQ(transfer, nullptr); +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_nchw_5d_unittest.cc b/tests/ut/ge/common/format_transfer_nchw_5d_unittest.cc new file mode 100644 index 00000000..47207c59 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_nchw_5d_unittest.cc @@ -0,0 +1,634 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h" + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class UtestFormatTransferNchw5d : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferNchw5d, nchw_to_5d_uint8) { + uint8_t data[1 * 3 * 4 * 4] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, + 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, 115, 116, + 201, 202, 203, 204, 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216}; + + uint8_t data_5d[1 * 1 * 4 * 4 * 32] = { + 1, 101, 201, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 2, 102, 202, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 3, 103, 203, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 4, 104, 204, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 5, 105, 205, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6, 106, 206, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 7, 107, 207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8, 108, 208, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9, 109, 209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10, 110, 210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11, 111, 211, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12, 112, 212, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13, 113, 213, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14, 114, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15, 115, 215, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 16, 116, 216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{data, FORMAT_NCHW, FORMAT_NC1HWC0, {1, 3, 4, 4}, {1, 1, 4, 4, 32}, DT_UINT8}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, 4 * 4 * 32); + for (int i = 0; i < 4 * 4 * 32; ++i) { + EXPECT_EQ(*(result.data.get() + i), data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNchw5d, nchw_to_5d_uint8_32c) { + uint8_t data[1 * 32 * 4 * 4] = { + 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, 21, 22, + 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, 44, 45, + 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, 67, 68, + 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, 90, 91, + 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, 113, 114, + 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, 136, 137, + 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, 159, 160, + 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, 182, 183, + 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, 205, 206, + 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, 228, 229, + 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, 251, 252, + 253, 254, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20, + 21, 22, 23, 24, 25, 26, 27, 28, 29, 30, 31, 32, 33, 34, 35, 36, 37, 38, 39, 40, 41, 42, 43, + 44, 45, 46, 47, 48, 49, 50, 51, 52, 53, 54, 55, 56, 57, 58, 59, 60, 61, 62, 63, 64, 65, 66, + 67, 68, 69, 70, 71, 72, 73, 74, 75, 76, 77, 78, 79, 80, 81, 82, 83, 84, 85, 86, 87, 88, 89, + 90, 91, 92, 93, 94, 95, 96, 97, 98, 99, 100, 101, 102, 103, 104, 105, 106, 107, 108, 109, 110, 111, 112, + 113, 114, 115, 116, 117, 118, 119, 120, 121, 122, 123, 124, 125, 126, 127, 128, 129, 130, 131, 132, 133, 134, 135, + 136, 137, 138, 139, 140, 141, 142, 143, 144, 145, 146, 147, 148, 149, 150, 151, 152, 153, 154, 155, 156, 157, 158, + 159, 160, 161, 162, 163, 164, 165, 166, 167, 168, 169, 170, 171, 172, 173, 174, 175, 176, 177, 178, 179, 180, 181, + 182, 183, 184, 185, 186, 187, 188, 189, 190, 191, 192, 193, 194, 195, 196, 197, 198, 199, 200, 201, 202, 203, 204, + 205, 206, 207, 208, 209, 210, 211, 212, 213, 214, 215, 216, 217, 218, 219, 220, 221, 222, 223, 224, 225, 226, 227, + 228, 229, 230, 231, 232, 233, 234, 235, 236, 237, 238, 239, 240, 241, 242, 243, 244, 245, 246, 247, 248, 249, 250, + 251, 252, 253, 254, 0, 1, + }; + + uint8_t data_5d[1 * 1 * 4 * 4 * 32] = { + 0, 16, 32, 48, 64, 80, 96, 112, 128, 144, 160, 176, 192, 208, 224, 240, 1, 17, 33, 49, 65, 81, 97, + 113, 129, 145, 161, 177, 193, 209, 225, 241, 1, 17, 33, 49, 65, 81, 97, 113, 129, 145, 161, 177, 193, 209, + 225, 241, 2, 18, 34, 50, 66, 82, 98, 114, 130, 146, 162, 178, 194, 210, 226, 242, 2, 18, 34, 50, 66, + 82, 98, 114, 130, 146, 162, 178, 194, 210, 226, 242, 3, 19, 35, 51, 67, 83, 99, 115, 131, 147, 163, 179, + 195, 211, 227, 243, 3, 19, 35, 51, 67, 83, 99, 115, 131, 147, 163, 179, 195, 211, 227, 243, 4, 20, 36, + 52, 68, 84, 100, 116, 132, 148, 164, 180, 196, 212, 228, 244, 4, 20, 36, 52, 68, 84, 100, 116, 132, 148, + 164, 180, 196, 212, 228, 244, 5, 21, 37, 53, 69, 85, 101, 117, 133, 149, 165, 181, 197, 213, 229, 245, 5, + 21, 37, 53, 69, 85, 101, 117, 133, 149, 165, 181, 197, 213, 229, 245, 6, 22, 38, 54, 70, 86, 102, 118, + 134, 150, 166, 182, 198, 214, 230, 246, 6, 22, 38, 54, 70, 86, 102, 118, 134, 150, 166, 182, 198, 214, 230, + 246, 7, 23, 39, 55, 71, 87, 103, 119, 135, 151, 167, 183, 199, 215, 231, 247, 7, 23, 39, 55, 71, 87, + 103, 119, 135, 151, 167, 183, 199, 215, 231, 247, 8, 24, 40, 56, 72, 88, 104, 120, 136, 152, 168, 184, 200, + 216, 232, 248, 8, 24, 40, 56, 72, 88, 104, 120, 136, 152, 168, 184, 200, 216, 232, 248, 9, 25, 41, 57, + 73, 89, 105, 121, 137, 153, 169, 185, 201, 217, 233, 249, 9, 25, 41, 57, 73, 89, 105, 121, 137, 153, 169, + 185, 201, 217, 233, 249, 10, 26, 42, 58, 74, 90, 106, 122, 138, 154, 170, 186, 202, 218, 234, 250, 10, 26, + 42, 58, 74, 90, 106, 122, 138, 154, 170, 186, 202, 218, 234, 250, 11, 27, 43, 59, 75, 91, 107, 123, 139, + 155, 171, 187, 203, 219, 235, 251, 11, 27, 43, 59, 75, 91, 107, 123, 139, 155, 171, 187, 203, 219, 235, 251, + 12, 28, 44, 60, 76, 92, 108, 124, 140, 156, 172, 188, 204, 220, 236, 252, 12, 28, 44, 60, 76, 92, 108, + 124, 140, 156, 172, 188, 204, 220, 236, 252, 13, 29, 45, 61, 77, 93, 109, 125, 141, 157, 173, 189, 205, 221, + 237, 253, 13, 29, 45, 61, 77, 93, 109, 125, 141, 157, 173, 189, 205, 221, 237, 253, 14, 30, 46, 62, 78, + 94, 110, 126, 142, 158, 174, 190, 206, 222, 238, 254, 14, 30, 46, 62, 78, 94, 110, 126, 142, 158, 174, 190, + 206, 222, 238, 254, 15, 31, 47, 63, 79, 95, 111, 127, 143, 159, 175, 191, 207, 223, 239, 0, 15, 31, 47, + 63, 79, 95, 111, 127, 143, 159, 175, 191, 207, 223, 239, 0, 16, 32, 48, 64, 80, 96, 112, 128, 144, 160, + 176, 192, 208, 224, 240, 1, + }; + + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{data, FORMAT_NCHW, FORMAT_NC1HWC0, {1, 32, 4, 4}, {1, 1, 4, 4, 32}, DT_UINT8}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, 4 * 4 * 32); + for (int i = 0; i < 4 * 4 * 32; ++i) { + EXPECT_EQ(*(result.data.get() + i), data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNchw5d, nchw_to_5d_fp16_single) { + uint16_t data[1 * 1 * 1 * 1] = {13425}; + uint16_t data_5d[1 * 1 * 1 * 1 * 16] = { + 13425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_NC1HWC0, {1, 1, 1, 1}, {1, 1, 1, 1, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNchw5d, nchw_to_5f_fp16) { + uint16_t data[1 * 16 * 4 * 4] = { + 14862, 8282, 14921, 15123, 15088, 15295, 14780, 15230, 14510, 14734, 12601, 14616, 14632, 14174, 13613, 13120, + 14023, 15213, 14896, 14600, 12353, 15216, 14590, 15069, 15157, 14821, 12343, 14758, 14955, 15183, 13815, 14373, + 14816, 14495, 15259, 14627, 15138, 15254, 12232, 15161, 14759, 14667, 11266, 15067, 15207, 14922, 14349, 13953, + 11390, 14221, 14167, 12936, 15162, 14722, 14709, 14221, 15108, 12783, 14534, 12250, 11988, 11460, 12802, 15234, + 14779, 15299, 13653, 14972, 15290, 15216, 12227, 14997, 12693, 11479, 14482, 14251, 12695, 14856, 14386, 13729, + 11715, 14126, 14609, 11265, 14850, 14427, 14429, 14818, 14636, 13083, 12896, 14158, 15015, 14838, 14675, 13999, + 14638, 14322, 12054, 13908, 14862, 12508, 15354, 11639, 14575, 12017, 12191, 14423, 15123, 14643, 13697, 13505, + 14509, 11435, 13925, 13264, 14131, 14553, 12318, 13424, 13679, 10590, 14423, 13791, 14705, 14625, 13069, 13100, + 14441, 14238, 14673, 13523, 10827, 15158, 13589, 15310, 12659, 12052, 15219, 15224, 14942, 15080, 12385, 14788, + 13503, 15287, 14744, 14115, 14038, 11562, 14488, 11455, 15217, 15280, 14576, 14959, 14763, 14732, 14200, 15024, + 14814, 15273, 10492, 13541, 12047, 15171, 14560, 13646, 11509, 12302, 10054, 14637, 13985, 15128, 13553, 14270, + 13795, 14807, 14718, 13446, 13469, 14775, 15145, 12862, 10551, 14675, 15120, 12702, 14995, 15241, 14937, 13430, + 13847, 15163, 12709, 8391, 13654, 15161, 14640, 12989, 14286, 14546, 15021, 15359, 13719, 13484, 14979, 15140, + 14745, 14883, 14964, 10700, 15009, 13439, 14740, 15032, 13520, 14962, 14918, 15334, 14939, 14453, 14747, 15104, + 15059, 14912, 14952, 14445, 15189, 15097, 14165, 13677, 13470, 12830, 13288, 15144, 14033, 14983, 14663, 14409, + 13746, 12849, 15255, 11586, 12226, 8566, 12610, 14551, 12282, 13908, 14529, 9603, 14696, 13360, 14843, 14096, + }; + + uint16_t data_5d[1 * 1 * 4 * 4 * 16] = { + 14862, 14023, 14816, 11390, 14779, 11715, 14638, 14509, 14441, 13503, 14814, 13795, 13847, 14745, 15059, 13746, + 8282, 15213, 14495, 14221, 15299, 14126, 14322, 11435, 14238, 15287, 15273, 14807, 15163, 14883, 14912, 12849, + 14921, 14896, 15259, 14167, 13653, 14609, 12054, 13925, 14673, 14744, 10492, 14718, 12709, 14964, 14952, 15255, + 15123, 14600, 14627, 12936, 14972, 11265, 13908, 13264, 13523, 14115, 13541, 13446, 8391, 10700, 14445, 11586, + 15088, 12353, 15138, 15162, 15290, 14850, 14862, 14131, 10827, 14038, 12047, 13469, 13654, 15009, 15189, 12226, + 15295, 15216, 15254, 14722, 15216, 14427, 12508, 14553, 15158, 11562, 15171, 14775, 15161, 13439, 15097, 8566, + 14780, 14590, 12232, 14709, 12227, 14429, 15354, 12318, 13589, 14488, 14560, 15145, 14640, 14740, 14165, 12610, + 15230, 15069, 15161, 14221, 14997, 14818, 11639, 13424, 15310, 11455, 13646, 12862, 12989, 15032, 13677, 14551, + 14510, 15157, 14759, 15108, 12693, 14636, 14575, 13679, 12659, 15217, 11509, 10551, 14286, 13520, 13470, 12282, + 14734, 14821, 14667, 12783, 11479, 13083, 12017, 10590, 12052, 15280, 12302, 14675, 14546, 14962, 12830, 13908, + 12601, 12343, 11266, 14534, 14482, 12896, 12191, 14423, 15219, 14576, 10054, 15120, 15021, 14918, 13288, 14529, + 14616, 14758, 15067, 12250, 14251, 14158, 14423, 13791, 15224, 14959, 14637, 12702, 15359, 15334, 15144, 9603, + 14632, 14955, 15207, 11988, 12695, 15015, 15123, 14705, 14942, 14763, 13985, 14995, 13719, 14939, 14033, 14696, + 14174, 15183, 14922, 11460, 14856, 14838, 14643, 14625, 15080, 14732, 15128, 15241, 13484, 14453, 14983, 13360, + 13613, 13815, 14349, 12802, 14386, 14675, 13697, 13069, 12385, 14200, 13553, 14937, 14979, 14747, 14663, 14843, + 13120, 14373, 13953, 15234, 13729, 13999, 13505, 13100, 14788, 15024, 14270, 13430, 15140, 15104, 14409, 14096, + }; + + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_NC1HWC0, {1, 16, 4, 4}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNchw5d, nchw_to_5d_fp16_17c) { + uint16_t data[1 * 17 * 4 * 4] = { + 14568, 13864, 14728, 9931, 13909, 15238, 13252, 14643, 14995, 14363, 15298, 14911, 15106, 14522, 12107, 14647, + 14421, 11756, 15305, 15085, 15000, 12103, 13647, 10549, 12539, 7084, 14873, 12182, 9793, 11816, 12592, 15209, + 14797, 13374, 13959, 14748, 14607, 15218, 14843, 14527, 15308, 12950, 14293, 14215, 11630, 13598, 14456, 15114, + 14544, 14495, 14165, 14941, 13415, 13477, 13552, 14460, 14687, 14904, 13047, 14713, 10997, 14531, 13482, 13465, + 13105, 11969, 13934, 15255, 15009, 14363, 12019, 14840, 14654, 15248, 14263, 12221, 14930, 14892, 14755, 14893, + 13370, 11541, 14044, 13546, 12345, 15129, 13836, 13478, 14183, 11753, 13515, 9609, 14946, 14832, 15340, 14925, + 15172, 13723, 14807, 9992, 14431, 13895, 14410, 14703, 12403, 14283, 14198, 15221, 15178, 12603, 15225, 14931, + 14381, 14035, 14168, 15150, 11364, 14014, 14987, 14563, 14639, 15168, 13906, 15010, 12638, 11430, 14599, 12569, + 15270, 12632, 14771, 14654, 12769, 14424, 13967, 10958, 14824, 14818, 14489, 11716, 14766, 13780, 14681, 12727, + 12195, 12958, 13262, 15272, 14173, 12544, 15002, 14177, 12740, 13238, 9541, 12863, 14440, 14985, 14962, 15193, + 13233, 15051, 14305, 14954, 15184, 15284, 14597, 15050, 7525, 15014, 13617, 15349, 14660, 14740, 12123, 13636, + 15350, 13749, 14341, 14247, 12405, 14554, 13275, 15096, 14283, 12556, 13656, 14575, 13576, 14461, 10863, 15240, + 13571, 15124, 14705, 14786, 14950, 14972, 15273, 14875, 14554, 12828, 13406, 13794, 14684, 10439, 15160, 14501, + 10961, 13845, 11336, 14715, 14596, 14031, 15332, 14397, 14766, 14049, 14513, 15164, 15073, 11816, 13471, 14656, + 10330, 13742, 13744, 14736, 14995, 12372, 13185, 14143, 15104, 14558, 13360, 14754, 12315, 14360, 12378, 13539, + 13865, 14891, 12774, 14596, 14367, 14881, 13735, 13434, 15021, 15137, 13604, 11357, 15039, 14929, 15048, 14598, + 14699, 15057, 13683, 14368, 8654, 11915, 15315, 14933, 14449, 15148, 13736, 14541, 12031, 15255, 14550, 14823, + }; + uint16_t data_5d[1 * 2 * 4 * 4 * 16] = { + 14568, 14421, 14797, 14544, 13105, 13370, 15172, 14381, 15270, 12195, 13233, 15350, 13571, 10961, 10330, 13865, + 13864, 11756, 13374, 14495, 11969, 11541, 13723, 14035, 12632, 12958, 15051, 13749, 15124, 13845, 13742, 14891, + 14728, 15305, 13959, 14165, 13934, 14044, 14807, 14168, 14771, 13262, 14305, 14341, 14705, 11336, 13744, 12774, + 9931, 15085, 14748, 14941, 15255, 13546, 9992, 15150, 14654, 15272, 14954, 14247, 14786, 14715, 14736, 14596, + 13909, 15000, 14607, 13415, 15009, 12345, 14431, 11364, 12769, 14173, 15184, 12405, 14950, 14596, 14995, 14367, + 15238, 12103, 15218, 13477, 14363, 15129, 13895, 14014, 14424, 12544, 15284, 14554, 14972, 14031, 12372, 14881, + 13252, 13647, 14843, 13552, 12019, 13836, 14410, 14987, 13967, 15002, 14597, 13275, 15273, 15332, 13185, 13735, + 14643, 10549, 14527, 14460, 14840, 13478, 14703, 14563, 10958, 14177, 15050, 15096, 14875, 14397, 14143, 13434, + 14995, 12539, 15308, 14687, 14654, 14183, 12403, 14639, 14824, 12740, 7525, 14283, 14554, 14766, 15104, 15021, + 14363, 7084, 12950, 14904, 15248, 11753, 14283, 15168, 14818, 13238, 15014, 12556, 12828, 14049, 14558, 15137, + 15298, 14873, 14293, 13047, 14263, 13515, 14198, 13906, 14489, 9541, 13617, 13656, 13406, 14513, 13360, 13604, + 14911, 12182, 14215, 14713, 12221, 9609, 15221, 15010, 11716, 12863, 15349, 14575, 13794, 15164, 14754, 11357, + 15106, 9793, 11630, 10997, 14930, 14946, 15178, 12638, 14766, 14440, 14660, 13576, 14684, 15073, 12315, 15039, + 14522, 11816, 13598, 14531, 14892, 14832, 12603, 11430, 13780, 14985, 14740, 14461, 10439, 11816, 14360, 14929, + 12107, 12592, 14456, 13482, 14755, 15340, 15225, 14599, 14681, 14962, 12123, 10863, 15160, 13471, 12378, 15048, + 14647, 15209, 15114, 13465, 14893, 14925, 14931, 12569, 12727, 15193, 13636, 15240, 14501, 14656, 13539, 14598, + 14699, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15057, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13683, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14368, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 8654, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11915, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15315, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14933, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14449, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13736, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14541, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12031, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15255, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14550, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14823, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_NC1HWC0, {1, 17, 4, 4}, {1, 2, 4, 4, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNchw5d, nchw_to_5d_float) { + float data[1 * 8 * 4 * 4] = { + 0.5892849569036265, 0.6803315204121938, 0.5177982416755607, 0.12129040093083365, 0.04237103024867994, + 0.7428342506399291, 0.5359802823824235, 0.04499999698629653, 0.7610018014391726, 0.38621323898777005, + 0.4360751167195973, 0.3834964892197801, 0.5888008090373326, 0.09836678109117547, 0.9950749943600654, + 0.9635155267139188, 0.5378606253569519, 0.2383735299723022, 0.5049400994471483, 0.2967628815119744, + 0.9172822428045107, 0.5429433505121389, 0.38499549462545024, 0.23619965842338686, 0.720664799527641, + 0.02313921408863151, 0.6488943229526023, 0.3390551602851408, 0.728417105824467, 0.9053225912171141, + 0.32494694533300583, 0.9119093270624166, 0.9647657094436359, 0.7219930950678662, 0.36167953499559, + 0.5984012357524195, 0.9544874847178995, 0.02306924612189265, 0.8026403495895027, 0.22551907272533667, + 0.36263992795411604, 0.58886941262115, 0.5735986398876265, 0.5252128788659909, 0.0827150730694497, + 0.17498225712307047, 0.4845825388200229, 0.40605108821850533, 0.9274359210940875, 0.7147299778467197, + 0.32288439175726646, 0.4065504767493492, 0.6286803275241362, 0.20374542713340105, 0.7445032000224268, + 0.9674821461856206, 0.909400577299532, 0.40363134678641066, 0.9627522330737276, 0.6933785292758723, + 0.9641353478602301, 0.7754020225695061, 0.620702777688872, 0.11214574817054179, 0.894884208921027, + 0.7101293717077931, 0.36970203638442056, 0.9356214764169016, 0.8339204066613951, 0.516307604153244, + 0.7030058401326411, 0.3991170380257899, 0.691216036157706, 0.7414799310134091, 0.22811510970169568, + 0.5609880702374889, 0.22477373948238633, 0.12881731266251306, 0.4573255943473218, 0.17517491298262455, + 0.4664711535236884, 0.36304572216421005, 0.04147865556156949, 0.321799545851576, 0.3134658252359267, + 0.9168822528697251, 0.2070779910124293, 0.4370570617679451, 0.5458639932730591, 0.34286569365484054, + 0.4939443382175456, 0.3412383781775876, 0.299800764913397, 0.3458667905629188, 0.5037122283316625, + 0.13792096399324794, 0.5296944120355204, 0.6593970795972891, 0.672741074468844, 0.5297456647001881, + 0.006066715407170187, 0.8287877066716592, 0.18033462634801634, 0.5000576732820233, 0.8853254925542572, + 0.38219052838295775, 0.17776888090118503, 0.2556143927933693, 0.46146366919906867, 0.4037875054768396, + 0.062043324444360226, 0.4479202861693887, 0.25183795798980213, 0.6102048134444441, 0.9471408150891643, + 0.8789211226767781, 0.5987926543415545, 0.08687291331362201, 0.09890376596175199, 0.39921593538893263, + 0.8463226026274682, 0.5365747044508772, 0.762082525622205, 0.6515229727575028, 0.10831064130367352, + 0.4628228725538879, 0.820619798511191, 0.5779888725124475, + }; + float data_5d[1 * 1 * 4 * 4 * 16] = { + 0.5892849569036265, + 0.5378606253569519, + 0.9647657094436359, + 0.9274359210940875, + 0.894884208921027, + 0.4664711535236884, + 0.5296944120355204, + 0.25183795798980213, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6803315204121938, + 0.2383735299723022, + 0.7219930950678662, + 0.7147299778467197, + 0.7101293717077931, + 0.36304572216421005, + 0.6593970795972891, + 0.6102048134444441, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5177982416755607, + 0.5049400994471483, + 0.36167953499559, + 0.32288439175726646, + 0.36970203638442056, + 0.04147865556156949, + 0.672741074468844, + 0.9471408150891643, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.12129040093083365, + 0.2967628815119744, + 0.5984012357524195, + 0.4065504767493492, + 0.9356214764169016, + 0.321799545851576, + 0.5297456647001881, + 0.8789211226767781, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.04237103024867994, + 0.9172822428045107, + 0.9544874847178995, + 0.6286803275241362, + 0.8339204066613951, + 0.3134658252359267, + 0.006066715407170187, + 0.5987926543415545, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7428342506399291, + 0.5429433505121389, + 0.02306924612189265, + 0.20374542713340105, + 0.516307604153244, + 0.9168822528697251, + 0.8287877066716592, + 0.08687291331362201, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5359802823824235, + 0.38499549462545024, + 0.8026403495895027, + 0.7445032000224268, + 0.7030058401326411, + 0.2070779910124293, + 0.18033462634801634, + 0.09890376596175199, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.04499999698629653, + 0.23619965842338686, + 0.22551907272533667, + 0.9674821461856206, + 0.3991170380257899, + 0.4370570617679451, + 0.5000576732820233, + 0.39921593538893263, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7610018014391726, + 0.720664799527641, + 0.36263992795411604, + 0.909400577299532, + 0.691216036157706, + 0.5458639932730591, + 0.8853254925542572, + 0.8463226026274682, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.38621323898777005, + 0.02313921408863151, + 0.58886941262115, + 0.40363134678641066, + 0.7414799310134091, + 0.34286569365484054, + 0.38219052838295775, + 0.5365747044508772, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4360751167195973, + 0.6488943229526023, + 0.5735986398876265, + 0.9627522330737276, + 0.22811510970169568, + 0.4939443382175456, + 0.17776888090118503, + 0.762082525622205, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3834964892197801, + 0.3390551602851408, + 0.5252128788659909, + 0.6933785292758723, + 0.5609880702374889, + 0.3412383781775876, + 0.2556143927933693, + 0.6515229727575028, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5888008090373326, + 0.728417105824467, + 0.0827150730694497, + 0.9641353478602301, + 0.22477373948238633, + 0.299800764913397, + 0.46146366919906867, + 0.10831064130367352, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.09836678109117547, + 0.9053225912171141, + 0.17498225712307047, + 0.7754020225695061, + 0.12881731266251306, + 0.3458667905629188, + 0.4037875054768396, + 0.4628228725538879, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9950749943600654, + 0.32494694533300583, + 0.4845825388200229, + 0.620702777688872, + 0.4573255943473218, + 0.5037122283316625, + 0.062043324444360226, + 0.820619798511191, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9635155267139188, + 0.9119093270624166, + 0.40605108821850533, + 0.11214574817054179, + 0.17517491298262455, + 0.13792096399324794, + 0.4479202861693887, + 0.5779888725124475, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_NC1HWC0, {1, 8, 4, 4}, {1, 1, 4, 4, 16}, DT_FLOAT}; + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNchw5d, invalid_src_shape1) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_NC1HWC0, {1, 0, 4, 4}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_NE(transfer.TransFormat(args, result), SUCCESS); +} + +TEST_F(UtestFormatTransferNchw5d, invalid_src_shape2) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_NC1HWC0, {1, 1, 1, 4, 4}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_NE(transfer.TransFormat(args, result), SUCCESS); +} + +TEST_F(UtestFormatTransferNchw5d, invalid_src_shape3) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_NC1HWC0, {1, -1, 4, 4}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_NE(transfer.TransFormat(args, result), SUCCESS); +} + +TEST_F(UtestFormatTransferNchw5d, invalid_src_data_type) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_NC1HWC0, {1, 1, 1, 4, 4}, {1, 1, 4, 4, 16}, DT_UNDEFINED}; + + TransResult result; + EXPECT_NE(transfer.TransFormat(args, result), SUCCESS); +} + +TEST_F(UtestFormatTransferNchw5d, invalid_dst_shape) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_NC1HWC0, {1, 1, 4, 4}, {1, 2, 4, 4, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_NE(transfer.TransFormat(args, result), SUCCESS); +} + +TEST_F(UtestFormatTransferNchw5d, unsupport_src_format) { + uint16_t data[1 * 4 * 4 * 1] = {0}; + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_NC1HWC0, {1 * 4 * 4 * 1}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_NE(transfer.TransFormat(args, result), SUCCESS); +} + +TEST_F(UtestFormatTransferNchw5d, unsupport_dst_format) { + uint16_t data[1 * 1 * 4 * 4] = {0}; + FormatTransferNchwNc1hwc0 transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_RESERVED, {1, 1, 4, 4}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_NE(transfer.TransFormat(args, result), SUCCESS); +} +} // namespace formats +} // namespace ge diff --git a/tests/ut/ge/common/format_transfer_nchw_fractalz_unittest.cc b/tests/ut/ge/common/format_transfer_nchw_fractalz_unittest.cc new file mode 100644 index 00000000..5af488a2 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_nchw_fractalz_unittest.cc @@ -0,0 +1,16876 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_fractal_z.h" + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class UtestFormatTransferNchwFz : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferNchwFz, fp16_1) { + uint16_t data[1 * 1 * 1 * 1] = {11306}; + uint16_t ret[1 * 1 * 16 * 16] = { + 11306, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, {1, 1, 1, 1}, {1, 1, 16, 16}, DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferNchwFz, fp16_1c_1n) { + uint16_t data[1 * 3 * 8 * 8] = { + 14385, 15300, 11806, 14585, 14045, 12537, 15203, 14080, 14864, 12612, 13383, 13489, 14424, 10836, 13959, 14459, + 14764, 14710, 13755, 11957, 13318, 12080, 15155, 12237, 14494, 10659, 14390, 12442, 13173, 14711, 14915, 15248, + 15269, 14975, 12726, 14530, 15099, 14602, 14428, 15106, 13340, 13799, 10559, 12961, 14665, 9564, 14428, 13065, + 15170, 13707, 15216, 15265, 13985, 14368, 12397, 15359, 14548, 14535, 12129, 15226, 12642, 14219, 13679, 13083, + 14823, 13810, 15008, 12468, 15136, 14161, 13868, 14969, 13424, 12964, 11602, 12928, 13523, 13891, 14212, 15208, + 15084, 14341, 10833, 13157, 14769, 13843, 11183, 13676, 12831, 8752, 13660, 13514, 14994, 13804, 13206, 15278, + 14920, 13567, 13391, 10380, 14690, 14201, 14810, 13383, 14192, 14220, 12880, 15025, 14849, 13480, 14940, 12028, + 15091, 13359, 14533, 14710, 11463, 10727, 14864, 12736, 15222, 14630, 11018, 15249, 14031, 14392, 15176, 13443, + 11050, 14342, 14489, 14586, 15152, 14568, 15176, 15066, 15208, 15182, 14573, 13580, 13150, 14758, 12688, 13040, + 11581, 14375, 11518, 14598, 14283, 14481, 14611, 14060, 14335, 12134, 14652, 12488, 11655, 15057, 14512, 14213, + 15190, 11385, 15129, 14801, 15017, 10859, 14462, 14169, 14090, 14488, 14702, 11157, 15235, 15180, 13368, 14829, + 13507, 13143, 14702, 13675, 14890, 14555, 12709, 12149, 15082, 13998, 13633, 13184, 10695, 12271, 14894, 14352, + }; + uint16_t ret[64 * 1 * 16 * 16] = { + 14385, 14823, 11050, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15300, 13810, 14342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11806, 15008, 14489, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14585, 12468, 14586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14045, 15136, 15152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12537, 14161, 14568, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15203, 13868, 15176, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14080, 14969, 15066, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14864, 13424, 15208, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12612, 12964, 15182, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13383, 11602, 14573, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13489, 12928, 13580, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14424, 13523, 13150, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10836, 13891, 14758, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13959, 14212, 12688, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14459, 15208, 13040, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14764, 15084, 11581, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14710, 14341, 14375, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13755, 10833, 11518, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11957, 13157, 14598, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13318, 14769, 14283, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12080, 13843, 14481, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15155, 11183, 14611, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12237, 13676, 14060, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14494, 12831, 14335, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10659, 8752, 12134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14390, 13660, 14652, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12442, 13514, 12488, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13173, 14994, 11655, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14711, 13804, 15057, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14915, 13206, 14512, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15248, 15278, 14213, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15269, 14920, 15190, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14975, 13567, 11385, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12726, 13391, 15129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14530, 10380, 14801, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15099, 14690, 15017, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14602, 14201, 10859, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14428, 14810, 14462, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15106, 13383, 14169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13340, 14192, 14090, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13799, 14220, 14488, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10559, 12880, 14702, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12961, 15025, 11157, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14665, 14849, 15235, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9564, 13480, 15180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14428, 14940, 13368, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13065, 12028, 14829, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15170, 15091, 13507, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13707, 13359, 13143, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15216, 14533, 14702, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15265, 14710, 13675, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13985, 11463, 14890, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14368, 10727, 14555, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12397, 14864, 12709, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15359, 12736, 12149, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14548, 15222, 15082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14535, 14630, 13998, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12129, 11018, 13633, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15226, 15249, 13184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12642, 14031, 10695, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14219, 14392, 12271, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13679, 15176, 14894, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13083, 13443, 14352, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, std::vector({1, 3, 8, 8}), + std::vector({64, 1, 16, 16}), DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferNchwFz, fp16_2c_1n) { + uint16_t data[1 * 32 * 4 * 4] = { + 14867, 15260, 15277, 15286, 14899, 12865, 14893, 14649, 10910, 13382, 13039, 13155, 11677, 14341, 12719, 14066, + 15330, 11148, 13316, 15053, 13813, 14793, 13857, 14364, 13590, 15008, 14517, 13678, 13611, 14513, 12071, 14396, + 14385, 14656, 9144, 14472, 14339, 14279, 15070, 11123, 12866, 14446, 10596, 15291, 14536, 14799, 13981, 12124, + 15003, 15236, 13999, 14966, 15205, 14453, 14795, 15273, 14513, 15116, 15132, 10846, 10608, 12729, 14019, 14089, + 12935, 13385, 10469, 14478, 11543, 14643, 14739, 14707, 14600, 15115, 14131, 13323, 14824, 14947, 13462, 14821, + 14269, 14590, 12509, 14986, 14304, 14389, 14561, 13291, 14786, 14335, 12607, 13133, 13199, 12042, 15137, 15035, + 9976, 14423, 14578, 13714, 12685, 14748, 13489, 14809, 12972, 13017, 14557, 14367, 14106, 14625, 15324, 15289, + 13500, 7244, 13651, 12858, 15100, 14518, 14778, 13937, 12934, 11625, 14812, 15314, 15210, 13392, 15228, 12955, + 15182, 13785, 14747, 15313, 14196, 14893, 14210, 12367, 15348, 12802, 11302, 12816, 10281, 14608, 14356, 14592, + 14597, 15056, 11908, 10393, 14021, 14200, 14858, 11542, 13815, 13380, 11632, 14235, 14595, 13983, 13165, 13524, + 14831, 14930, 15080, 15240, 14104, 14153, 14658, 15064, 13790, 14858, 14888, 13569, 15105, 13883, 14479, 14919, + 15226, 15066, 14545, 13136, 11783, 14518, 13889, 13842, 14811, 15068, 14498, 14049, 14278, 13784, 13729, 13632, + 12121, 13637, 14136, 10049, 14874, 14644, 13645, 15206, 14168, 12147, 13312, 15110, 14720, 15275, 13455, 13769, + 12059, 14808, 14737, 14668, 11360, 14521, 14342, 15113, 14908, 14828, 14754, 15150, 14534, 15181, 14204, 14936, + 15258, 14938, 14839, 13418, 13695, 12712, 14297, 13589, 13678, 6232, 14396, 15349, 11716, 14090, 14141, 11801, + 14565, 14523, 11302, 14626, 13770, 14526, 12845, 12474, 11505, 15018, 12776, 14233, 13747, 13813, 15055, 14563, + 14861, 14253, 13596, 14372, 12765, 13154, 14334, 14416, 14236, 14729, 14102, 14996, 10304, 13716, 14889, 14045, + 15148, 14892, 13118, 14349, 12497, 15081, 14478, 15104, 14398, 12607, 14523, 11511, 11408, 14245, 11378, 15347, + 14979, 14925, 14236, 15086, 13533, 14479, 13791, 15033, 14398, 14464, 14800, 14286, 13096, 15292, 14462, 14728, + 14985, 13759, 13436, 14925, 15339, 12117, 11228, 15345, 14823, 12612, 15155, 14784, 11800, 13748, 14189, 12536, + 10842, 14601, 12842, 15133, 15036, 14953, 14496, 13868, 15067, 14718, 14608, 14773, 13997, 14732, 12721, 14192, + 14717, 14969, 12499, 14086, 15207, 14876, 12881, 14960, 14144, 15045, 13750, 12622, 14500, 13861, 13938, 15335, + 12653, 14136, 12591, 14012, 12416, 12548, 14632, 14258, 14884, 8308, 15008, 14802, 13959, 13396, 8659, 14457, + 14314, 14137, 13562, 14984, 14809, 13185, 15287, 13397, 10017, 12533, 13505, 13458, 11148, 11516, 15316, 15259, + 13596, 11640, 13858, 14617, 12563, 13554, 13718, 13002, 13906, 13399, 11313, 14422, 14358, 13557, 13219, 15126, + 13125, 13494, 12741, 14341, 11387, 14482, 14415, 15244, 12473, 14858, 14900, 13949, 13495, 15117, 15016, 14787, + 15207, 15014, 12832, 15025, 14900, 15302, 14782, 15027, 14901, 11874, 14432, 14295, 15075, 14907, 14037, 12862, + 14534, 14763, 9119, 14748, 13454, 14902, 14935, 12974, 14602, 11547, 15130, 15312, 13094, 15076, 14050, 13799, + 15205, 15359, 13509, 14297, 10385, 14708, 10783, 15312, 14019, 14112, 15177, 12018, 14884, 11945, 14913, 14630, + 15005, 12720, 14823, 14931, 11809, 15106, 12931, 13229, 12636, 15295, 12308, 14426, 13399, 11875, 12958, 15166, + 15111, 14207, 14944, 13756, 11322, 13549, 13825, 15005, 14737, 8808, 13885, 14740, 14172, 13638, 14642, 12766, + 15115, 13766, 12234, 14754, 14346, 15234, 12724, 14836, 15351, 13645, 14799, 12345, 14245, 14517, 12593, 14652, + }; + uint16_t ret[16 * 2 * 16 * 16] = { + 14867, 15330, 14385, 15003, 12935, 14269, 9976, 13500, 15182, 14597, 14831, 15226, 12121, 12059, 15258, 14565, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15260, 11148, 14656, 15236, 13385, 14590, 14423, 7244, 13785, 15056, 14930, 15066, 13637, 14808, 14938, 14523, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15277, 13316, 9144, 13999, 10469, 12509, 14578, 13651, 14747, 11908, 15080, 14545, 14136, 14737, 14839, 11302, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15286, 15053, 14472, 14966, 14478, 14986, 13714, 12858, 15313, 10393, 15240, 13136, 10049, 14668, 13418, 14626, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14899, 13813, 14339, 15205, 11543, 14304, 12685, 15100, 14196, 14021, 14104, 11783, 14874, 11360, 13695, 13770, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12865, 14793, 14279, 14453, 14643, 14389, 14748, 14518, 14893, 14200, 14153, 14518, 14644, 14521, 12712, 14526, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14893, 13857, 15070, 14795, 14739, 14561, 13489, 14778, 14210, 14858, 14658, 13889, 13645, 14342, 14297, 12845, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14649, 14364, 11123, 15273, 14707, 13291, 14809, 13937, 12367, 11542, 15064, 13842, 15206, 15113, 13589, 12474, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10910, 13590, 12866, 14513, 14600, 14786, 12972, 12934, 15348, 13815, 13790, 14811, 14168, 14908, 13678, 11505, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13382, 15008, 14446, 15116, 15115, 14335, 13017, 11625, 12802, 13380, 14858, 15068, 12147, 14828, 6232, 15018, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13039, 14517, 10596, 15132, 14131, 12607, 14557, 14812, 11302, 11632, 14888, 14498, 13312, 14754, 14396, 12776, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13155, 13678, 15291, 10846, 13323, 13133, 14367, 15314, 12816, 14235, 13569, 14049, 15110, 15150, 15349, 14233, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11677, 13611, 14536, 10608, 14824, 13199, 14106, 15210, 10281, 14595, 15105, 14278, 14720, 14534, 11716, 13747, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14341, 14513, 14799, 12729, 14947, 12042, 14625, 13392, 14608, 13983, 13883, 13784, 15275, 15181, 14090, 13813, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12719, 12071, 13981, 14019, 13462, 15137, 15324, 15228, 14356, 13165, 14479, 13729, 13455, 14204, 14141, 15055, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14066, 14396, 12124, 14089, 14821, 15035, 15289, 12955, 14592, 13524, 14919, 13632, 13769, 14936, 11801, 14563, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14861, 15148, 14979, 14985, 10842, 14717, 12653, 14314, 13596, 13125, 15207, 14534, 15205, 15005, 15111, 15115, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14253, 14892, 14925, 13759, 14601, 14969, 14136, 14137, 11640, 13494, 15014, 14763, 15359, 12720, 14207, 13766, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13596, 13118, 14236, 13436, 12842, 12499, 12591, 13562, 13858, 12741, 12832, 9119, 13509, 14823, 14944, 12234, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14372, 14349, 15086, 14925, 15133, 14086, 14012, 14984, 14617, 14341, 15025, 14748, 14297, 14931, 13756, 14754, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12765, 12497, 13533, 15339, 15036, 15207, 12416, 14809, 12563, 11387, 14900, 13454, 10385, 11809, 11322, 14346, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13154, 15081, 14479, 12117, 14953, 14876, 12548, 13185, 13554, 14482, 15302, 14902, 14708, 15106, 13549, 15234, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14334, 14478, 13791, 11228, 14496, 12881, 14632, 15287, 13718, 14415, 14782, 14935, 10783, 12931, 13825, 12724, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14416, 15104, 15033, 15345, 13868, 14960, 14258, 13397, 13002, 15244, 15027, 12974, 15312, 13229, 15005, 14836, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14236, 14398, 14398, 14823, 15067, 14144, 14884, 10017, 13906, 12473, 14901, 14602, 14019, 12636, 14737, 15351, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14729, 12607, 14464, 12612, 14718, 15045, 8308, 12533, 13399, 14858, 11874, 11547, 14112, 15295, 8808, 13645, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14102, 14523, 14800, 15155, 14608, 13750, 15008, 13505, 11313, 14900, 14432, 15130, 15177, 12308, 13885, 14799, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14996, 11511, 14286, 14784, 14773, 12622, 14802, 13458, 14422, 13949, 14295, 15312, 12018, 14426, 14740, 12345, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10304, 11408, 13096, 11800, 13997, 14500, 13959, 11148, 14358, 13495, 15075, 13094, 14884, 13399, 14172, 14245, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13716, 14245, 15292, 13748, 14732, 13861, 13396, 11516, 13557, 15117, 14907, 15076, 11945, 11875, 13638, 14517, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14889, 11378, 14462, 14189, 12721, 13938, 8659, 15316, 13219, 15016, 14037, 14050, 14913, 12958, 14642, 12593, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14045, 15347, 14728, 12536, 14192, 15335, 14457, 15259, 15126, 14787, 12862, 13799, 14630, 15166, 12766, 14652, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, std::vector({1, 32, 4, 4}), + std::vector({32, 1, 16, 16}), DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 2); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferNchwFz, fp16_2c_2n_pad) { + uint16_t data[31 * 17 * 4 * 4]{ + 14402, 14379, 14633, 14774, 13826, 14869, 13703, 15068, 13624, 13537, 14521, 15114, 13281, 14976, 12899, 15129, + 12289, 12461, 12096, 10696, 14558, 14464, 14389, 15323, 15219, 13308, 14041, 13518, 14388, 15320, 14145, 12633, + 13852, 14410, 14115, 11497, 14819, 13048, 15357, 14681, 15248, 14640, 13757, 13980, 14653, 14856, 11482, 12271, + 13540, 12429, 13971, 14888, 12988, 15321, 14609, 13548, 14965, 15148, 15160, 12685, 14235, 14747, 14941, 15335, + 14810, 12870, 11429, 14307, 14957, 14165, 13507, 15335, 15060, 15034, 15311, 13682, 14949, 14545, 14311, 14478, + 11027, 15231, 14833, 10577, 14400, 14471, 12798, 15263, 15010, 14107, 10812, 14272, 14756, 14933, 11793, 13558, + 12387, 14480, 9432, 14724, 13686, 15178, 7127, 14452, 13930, 14178, 14911, 13538, 13327, 12942, 15308, 14348, + 11288, 10750, 10395, 14163, 14618, 14458, 11317, 14390, 15194, 14971, 13180, 15247, 14157, 12746, 13356, 12826, + 14169, 15177, 11319, 14063, 13328, 14065, 12473, 15172, 12830, 15062, 14396, 14919, 8095, 14663, 15285, 11074, + 14805, 13884, 14428, 14944, 14431, 13189, 10181, 15146, 14422, 14689, 15068, 14981, 14198, 14383, 14843, 14488, + 15159, 14158, 14843, 14811, 13607, 15082, 13934, 13595, 14830, 14087, 15157, 14820, 12592, 14315, 14161, 14704, + 13632, 13253, 14232, 11742, 14661, 11794, 14768, 10439, 15237, 14869, 12268, 14814, 14315, 15076, 14144, 8808, + 8595, 13506, 12548, 13451, 14936, 13946, 9166, 11487, 13611, 15071, 14916, 12429, 14425, 12981, 14181, 13510, + 13403, 12498, 7919, 15307, 14149, 15057, 14686, 13501, 13620, 13326, 14896, 15300, 14529, 12639, 13929, 12700, + 9001, 13153, 15088, 14621, 12825, 14106, 14612, 15019, 13647, 11647, 15259, 15148, 14431, 14651, 15262, 15345, + 14821, 14708, 14305, 14696, 13775, 12556, 14553, 14596, 11235, 14732, 14839, 14391, 13207, 12301, 15282, 13496, + 14065, 13863, 15014, 14777, 14892, 12814, 13125, 15044, 14655, 15189, 14590, 12415, 13728, 14926, 14932, 14764, + 15355, 12676, 14813, 11644, 15105, 12800, 14405, 10732, 12721, 15127, 14816, 12449, 15309, 13831, 13569, 15207, + 14719, 9604, 14686, 13284, 14833, 14904, 15088, 14678, 15095, 14809, 13398, 15326, 14636, 13248, 14586, 14772, + 14958, 15172, 14342, 14089, 14258, 13405, 14868, 14431, 12673, 14949, 13859, 10562, 14993, 13561, 13484, 14952, + 10920, 13863, 13174, 12359, 13959, 15042, 14786, 13899, 14605, 14381, 15227, 13005, 13322, 14089, 14018, 13082, + 9389, 15312, 14400, 14499, 14433, 14837, 13899, 14042, 14997, 12504, 14596, 10240, 14044, 7133, 14595, 14483, + 13408, 14436, 14498, 13580, 14944, 14805, 14672, 12729, 13016, 13193, 15092, 12977, 15251, 15107, 12157, 14744, + 12314, 13490, 12616, 12784, 14866, 15288, 13778, 15029, 12163, 13002, 14842, 13699, 14487, 15243, 11768, 13120, + 11907, 15083, 9064, 15080, 13562, 14200, 14107, 14491, 13819, 12799, 14530, 12957, 11736, 14283, 14595, 13042, + 14759, 13908, 12837, 12547, 12613, 14559, 14426, 14984, 14467, 14962, 14260, 14309, 14643, 13685, 15219, 12760, + 14419, 12270, 13379, 15254, 14804, 14015, 11742, 14901, 11807, 13657, 13213, 14718, 14772, 13837, 13311, 13895, + 15124, 12599, 15281, 13663, 13340, 14732, 15192, 13832, 14527, 10077, 15051, 13593, 14514, 13150, 15300, 13191, + 14591, 15223, 14225, 13678, 12922, 13770, 13051, 9329, 12291, 14056, 14141, 13903, 11930, 15097, 14819, 13038, + 13422, 12463, 13027, 13441, 13934, 13548, 15294, 15243, 15243, 12819, 14495, 14501, 14370, 15066, 15228, 12863, + 14465, 14561, 11927, 14072, 15105, 9506, 15066, 12170, 12578, 14915, 14633, 14105, 9144, 15321, 5703, 15248, + 13347, 14493, 14870, 13255, 14797, 13812, 12955, 14658, 15091, 14919, 15262, 13537, 14312, 14754, 15260, 14447, + 14063, 12352, 14120, 15329, 15133, 14514, 14408, 15279, 13726, 12232, 14959, 13399, 14996, 15065, 14424, 15060, + 14792, 12424, 9458, 10585, 14736, 13592, 14694, 14952, 14586, 14830, 12547, 14625, 12400, 14950, 14851, 14374, + 14776, 14537, 14831, 14373, 15333, 14774, 12938, 15357, 13384, 13289, 13207, 15004, 13933, 15145, 13975, 12681, + 14719, 11611, 15038, 14519, 12573, 13596, 13429, 14441, 14376, 14861, 12909, 14600, 14605, 14604, 14023, 9499, + 15329, 12643, 13931, 15027, 12382, 15240, 14812, 13583, 14716, 13851, 7459, 14481, 14132, 9350, 15247, 14706, + 14763, 14922, 13712, 13187, 15063, 14154, 13719, 15282, 12223, 14027, 13892, 11560, 14396, 14595, 11991, 13362, + 13881, 14035, 14883, 14387, 14096, 14763, 14230, 12512, 12235, 14182, 12596, 12340, 12388, 14753, 12462, 14831, + 14609, 11970, 14616, 11512, 14016, 14776, 13673, 14152, 14832, 13716, 12119, 14614, 12102, 11829, 15047, 13935, + 12510, 14910, 14223, 14306, 13757, 11326, 15331, 14992, 12210, 14593, 10983, 14917, 15068, 11989, 14329, 10062, + 11670, 14490, 13614, 13573, 14299, 15072, 15249, 12920, 11776, 10044, 12957, 10587, 13772, 14282, 14918, 14542, + 15296, 14761, 13670, 14483, 12979, 12365, 12350, 11432, 13509, 14614, 14531, 13746, 13114, 15170, 14216, 14510, + 15309, 15061, 14720, 14117, 14479, 14618, 12389, 13789, 15227, 15248, 13544, 14556, 14727, 13365, 14559, 15345, + 14152, 13666, 13492, 12714, 14525, 12291, 13700, 12309, 14195, 14593, 14509, 11536, 13750, 11849, 11484, 13422, + 14890, 14130, 13209, 14456, 14495, 13529, 15096, 11671, 13535, 4916, 15115, 15142, 15137, 15026, 14921, 14830, + 13556, 15296, 11716, 15265, 14297, 14645, 11518, 15117, 15291, 13955, 13785, 13452, 14107, 15063, 14635, 12740, + 12447, 13639, 14816, 14567, 15025, 14881, 12991, 14427, 15102, 12799, 13379, 12608, 10686, 14496, 14301, 12078, + 13171, 15249, 12861, 14418, 14322, 14303, 13629, 13272, 11448, 11306, 13939, 14624, 13506, 14898, 13155, 13737, + 13672, 8727, 14087, 10373, 13354, 12724, 13212, 11178, 15113, 14428, 14480, 15003, 14924, 13614, 14491, 13762, + 13601, 14831, 10198, 14248, 14119, 13918, 11619, 14970, 14298, 13402, 14792, 15078, 14575, 14180, 14120, 13085, + 14945, 10349, 13979, 15274, 15353, 14721, 13903, 14031, 10803, 14533, 15343, 13146, 14572, 14455, 15118, 14374, + 14355, 14741, 14569, 14673, 15071, 15261, 13330, 14744, 10303, 12502, 9636, 14870, 15097, 11490, 14144, 14772, + 12883, 15080, 15068, 14411, 14392, 13305, 14984, 13344, 13793, 15031, 10720, 14150, 14554, 14996, 12558, 14502, + 14015, 14391, 14409, 12520, 13362, 9595, 13370, 14939, 14396, 15285, 13944, 15225, 15329, 13146, 14257, 12734, + 12895, 14718, 14972, 14462, 15358, 13349, 14435, 15199, 15062, 13372, 11941, 14224, 15000, 15316, 12490, 13597, + 14170, 14301, 13483, 12592, 14349, 15174, 14623, 15014, 15117, 14418, 13620, 15215, 15066, 13678, 13843, 14395, + 14818, 14930, 11160, 14885, 15337, 14179, 14816, 11916, 13010, 14099, 12337, 15046, 11758, 12519, 15011, 14447, + 15274, 14129, 12313, 13104, 15269, 15000, 13469, 14529, 15326, 14479, 14093, 15346, 8366, 12904, 13518, 14932, + 13020, 14326, 13228, 11688, 14668, 14621, 14498, 15187, 9846, 12259, 11449, 14393, 15122, 15215, 15244, 15197, + 14222, 13851, 14602, 14095, 15329, 13367, 14406, 14468, 14612, 11665, 14929, 15237, 13639, 10141, 14844, 14258, + 14246, 15293, 14673, 8674, 12969, 14865, 8639, 14870, 14329, 13712, 13804, 15270, 11621, 14359, 14244, 12867, + 12003, 14246, 11905, 14517, 12346, 14095, 12049, 13745, 12856, 13578, 14913, 15042, 10547, 12764, 11510, 15233, + 11254, 12144, 15025, 12548, 14223, 12241, 14655, 15204, 15285, 14538, 14288, 14430, 14410, 14952, 14862, 13123, + 14901, 14809, 10021, 14583, 13431, 14631, 11804, 14232, 15201, 13574, 12117, 14683, 14117, 14945, 10261, 13901, + 13659, 12441, 15117, 15064, 14558, 13502, 15118, 12588, 15345, 14768, 13934, 14882, 14278, 14318, 13492, 14823, + 14984, 13698, 15153, 13103, 13445, 14964, 14408, 12287, 15235, 13431, 14156, 13610, 14783, 12545, 8340, 14414, + 14857, 15236, 14503, 12692, 13097, 13652, 14563, 13493, 13275, 14246, 14919, 13941, 14055, 10566, 13511, 14873, + 14532, 15026, 13462, 13427, 15137, 14076, 14352, 14771, 15001, 13558, 15329, 14700, 12150, 15021, 12616, 12598, + 14496, 10637, 15339, 13123, 10664, 12994, 12358, 14219, 14679, 13296, 10598, 12601, 12072, 13737, 14608, 12265, + 13605, 14544, 14120, 10293, 12592, 12775, 15207, 14873, 13208, 14159, 13388, 15178, 14949, 12487, 14084, 13458, + 14803, 14678, 10311, 13432, 14963, 14553, 9121, 14458, 13688, 7712, 14816, 13571, 12702, 14273, 14340, 13322, + 14524, 12307, 15128, 14966, 14986, 14754, 11664, 13407, 14351, 13937, 14286, 14787, 14558, 15306, 13877, 15095, + 10024, 13911, 11470, 12557, 12031, 12972, 9385, 8415, 12979, 15047, 15191, 15318, 13354, 14737, 14281, 12678, + 14259, 15175, 12619, 14568, 14361, 14178, 13938, 14643, 11526, 6603, 13799, 12600, 13587, 14678, 14336, 14753, + 14899, 15000, 13442, 12131, 15034, 14496, 14900, 13754, 14595, 14638, 15118, 13036, 14553, 13369, 10026, 12056, + 10336, 13365, 13318, 13572, 15017, 15292, 13038, 15236, 13734, 14787, 14854, 14689, 11582, 14656, 12765, 14876, + 14532, 15001, 14462, 12341, 14861, 13452, 11285, 15176, 14689, 15185, 10596, 14263, 12985, 14651, 15152, 14289, + 12566, 14693, 14980, 13878, 14407, 15024, 11678, 13704, 12398, 14661, 14308, 15030, 12942, 11771, 13970, 11887, + 14884, 14497, 13346, 13997, 15341, 14505, 13124, 12266, 14682, 13100, 14981, 13271, 14217, 14753, 14958, 11132, + 10386, 14887, 10339, 14997, 13711, 14176, 14488, 14775, 14396, 14816, 13629, 10435, 11719, 15329, 15249, 14634, + 14316, 12578, 14495, 13166, 14400, 14770, 14949, 14766, 15209, 13365, 14640, 12379, 10286, 11653, 14512, 14333, + 14828, 14367, 13259, 14640, 15313, 12292, 14965, 13609, 11280, 14185, 14988, 12355, 13961, 14861, 13765, 14876, + 14809, 14256, 14816, 14879, 14709, 14789, 15304, 10730, 14779, 14742, 12668, 14546, 14120, 13370, 13705, 14487, + 14208, 12827, 13700, 15161, 15127, 13212, 14851, 15299, 11610, 13495, 14632, 13567, 15011, 14351, 13677, 13749, + 14952, 13042, 12911, 12277, 8940, 14490, 13899, 13740, 13183, 15285, 12333, 14677, 12922, 14876, 14586, 15255, + 14336, 13771, 13893, 10751, 15160, 14727, 12719, 14521, 13543, 13335, 11533, 14471, 15223, 13650, 12913, 13232, + 12187, 13831, 14669, 14927, 10954, 14735, 15145, 15354, 13745, 14104, 13140, 13314, 14233, 14235, 14076, 9419, + 13557, 13797, 15124, 13409, 13437, 14427, 15224, 14516, 15148, 12570, 15177, 15305, 14738, 14345, 15062, 13113, + 15065, 15268, 14457, 14429, 13989, 15345, 12562, 14693, 12792, 14599, 14705, 12584, 14341, 14666, 14704, 15125, + 15203, 15168, 14563, 14919, 14458, 14713, 15133, 10414, 14050, 14137, 13552, 14631, 14852, 11985, 14657, 13709, + 15318, 13592, 13906, 15264, 14346, 14383, 13413, 11588, 15274, 14520, 13724, 11957, 14378, 15343, 13504, 8430, + 12324, 15333, 15166, 14671, 14929, 13684, 15089, 15132, 14644, 11365, 14799, 14793, 13464, 10510, 11789, 14475, + 13915, 13043, 11106, 14425, 10392, 14334, 14601, 13881, 14806, 12667, 13199, 15038, 15071, 14125, 13853, 14072, + 8520, 15348, 14892, 14677, 14398, 12964, 14925, 15354, 15147, 7907, 15279, 8991, 15134, 13014, 12733, 14976, + 13101, 14720, 14597, 13876, 15291, 12167, 14932, 14552, 14733, 14115, 15194, 14965, 13977, 12078, 15155, 13150, + 14928, 14460, 15320, 13591, 12367, 12637, 13333, 14913, 14071, 14945, 13727, 15046, 13730, 14598, 14505, 13565, + 15148, 14043, 12485, 13313, 14418, 15330, 12989, 14830, 11773, 14848, 13375, 13934, 14450, 14707, 10599, 11062, + 12837, 14486, 14134, 11291, 12551, 14924, 14941, 14537, 11541, 11701, 15246, 13647, 14537, 14807, 12417, 15349, + 14358, 10748, 13921, 14313, 14450, 15036, 11314, 15189, 15325, 15022, 12346, 13532, 11988, 14031, 13781, 13720, + 14794, 14262, 12826, 13999, 14488, 14548, 14486, 14608, 14506, 14795, 13047, 14115, 13453, 14574, 14709, 15089, + 11756, 14985, 13921, 14319, 15095, 15048, 9072, 14221, 14687, 13538, 15146, 15349, 10473, 15117, 14543, 14737, + 13444, 13232, 14758, 14107, 14916, 14465, 13839, 14705, 12335, 14895, 15094, 14380, 14846, 14259, 14560, 14169, + 14120, 14590, 14909, 13828, 14416, 11265, 12928, 14484, 14768, 14556, 13587, 13663, 15224, 12994, 12499, 14668, + 13324, 12376, 14365, 15272, 12284, 9254, 14683, 13013, 14983, 14678, 13719, 13892, 14809, 12920, 12124, 14598, + 15242, 14716, 12603, 14484, 13667, 14577, 13609, 13738, 13610, 14957, 14039, 13726, 14926, 14841, 13801, 14397, + 14846, 14813, 13223, 14103, 13726, 14937, 13802, 13998, 15357, 11519, 14223, 13892, 10310, 12705, 13898, 14420, + 14609, 12310, 13127, 13717, 14808, 15135, 13812, 14919, 13141, 14121, 14334, 15344, 13068, 12426, 14684, 14360, + 14722, 13740, 15191, 14366, 14886, 14963, 9200, 14976, 14685, 14699, 9141, 15272, 14659, 10228, 9283, 13381, + 15019, 14201, 14982, 13275, 12011, 10080, 14860, 15234, 14214, 10350, 13058, 14946, 14381, 12535, 13609, 12958, + 13205, 13379, 12878, 14444, 14410, 9216, 15096, 11936, 12797, 14075, 13097, 15288, 13033, 13332, 13356, 13602, + 13693, 13640, 14189, 14996, 14884, 12579, 12380, 13260, 12342, 13752, 14011, 11327, 13457, 14389, 14633, 11956, + 15019, 13591, 14970, 12885, 14709, 12549, 14703, 13261, 13386, 12190, 14735, 12495, 15043, 14773, 14525, 13638, + 13263, 13190, 15115, 13558, 15335, 14339, 14657, 14576, 14986, 12278, 13165, 14350, 14285, 12520, 14352, 15086, + 13500, 14543, 12713, 14974, 12206, 14805, 12983, 14725, 15346, 15205, 14608, 15288, 14822, 14113, 14154, 14589, + 11845, 10802, 13664, 14438, 14610, 13225, 14806, 15335, 15291, 13492, 13898, 13236, 14320, 11590, 14939, 14067, + 12709, 15231, 14020, 12789, 14247, 15103, 12310, 15300, 14202, 14934, 14846, 15099, 14747, 13318, 14956, 11622, + 13293, 11537, 14562, 14954, 14466, 15231, 14985, 14602, 15196, 11438, 15077, 13013, 15336, 14054, 15259, 14921, + 14946, 13782, 14515, 15163, 15209, 14635, 13588, 14531, 13896, 15002, 14128, 12954, 13115, 12727, 14605, 14765, + 12727, 10729, 15122, 13587, 15182, 12600, 12573, 13185, 15246, 13212, 14801, 12867, 12970, 14045, 12805, 13278, + 12279, 13332, 15343, 11654, 14639, 14236, 14206, 13996, 14830, 13807, 14569, 14925, 11143, 15238, 12921, 14506, + 13721, 11635, 11395, 13805, 13531, 12448, 13846, 14956, 14969, 14459, 8896, 11699, 15268, 14760, 14092, 15108, + 14261, 13524, 11082, 15267, 14698, 14538, 15246, 13931, 13360, 12631, 11603, 12644, 14511, 14441, 14138, 11403, + 6931, 11244, 14740, 12551, 13701, 14025, 14010, 14093, 14089, 10293, 14566, 11685, 14287, 13800, 14778, 11014, + 14579, 12494, 12097, 15187, 13845, 14585, 14226, 14721, 12898, 13520, 15100, 14909, 14914, 12903, 11760, 14973, + 15140, 13989, 15132, 12325, 11439, 14354, 13598, 14044, 13476, 15127, 14442, 15264, 12641, 14482, 14176, 12967, + 15252, 15284, 14941, 12852, 14604, 14734, 13837, 14652, 11673, 11775, 13576, 14874, 13277, 14181, 12487, 11848, + 14686, 13862, 14863, 14838, 15190, 12476, 14150, 15086, 10807, 15109, 15088, 11388, 14756, 15116, 14835, 13624, + 13994, 14603, 14428, 14401, 9397, 15154, 15255, 12694, 14838, 14629, 12282, 14071, 12932, 14771, 13389, 14924, + 14934, 13622, 12155, 15213, 13368, 14948, 15338, 12117, 13520, 14842, 14433, 12527, 11826, 14710, 15044, 15303, + 14897, 12856, 9555, 15251, 14343, 14140, 14668, 14414, 13466, 14325, 15299, 12382, 14039, 12219, 13351, 15065, + 14748, 15165, 13747, 14928, 14534, 15315, 13606, 14041, 14693, 14465, 13872, 14128, 13819, 14550, 13906, 11164, + 12729, 13434, 15117, 14501, 14046, 9922, 13786, 15146, 14963, 13558, 13745, 14680, 15342, 15131, 14751, 14361, + 14560, 14572, 9091, 15157, 15277, 15067, 14572, 14382, 11844, 15010, 13441, 11357, 15009, 12956, 11136, 13992, + 14591, 14424, 11442, 10234, 14000, 14953, 11525, 11595, 12761, 13803, 14853, 14657, 11671, 15206, 12328, 14433, + 14318, 15242, 15334, 14105, 13293, 14587, 13374, 13929, 14496, 15010, 13529, 13139, 11919, 14729, 15036, 14897, + 13643, 14869, 14680, 14818, 14977, 13522, 14858, 13828, 14504, 14643, 12404, 15323, 14493, 14452, 14690, 14085, + 15213, 12322, 14657, 12091, 10428, 15237, 15018, 12504, 14724, 10557, 12848, 14513, 15019, 14133, 15177, 13432, + 14626, 14700, 15288, 14329, 14737, 14907, 14807, 15160, 14947, 14667, 14324, 13166, 14550, 14825, 13000, 13679, + 14254, 15336, 14832, 10152, 14760, 13558, 15229, 15236, 13740, 13447, 12418, 15126, 14406, 14782, 13450, 12375, + 14498, 13617, 15327, 14803, 14594, 11370, 15316, 15088, 15234, 14523, 13344, 13490, 14474, 13950, 14235, 14695, + 15019, 10878, 14530, 15328, 13517, 14899, 13853, 13170, 15091, 13688, 13374, 12492, 12501, 14989, 12819, 14953, + 15162, 11790, 14762, 15142, 14970, 14456, 15130, 15254, 7194, 12757, 14484, 14094, 14314, 14822, 14293, 12370, + 14222, 14230, 11369, 10653, 15112, 13350, 14369, 15086, 15065, 14386, 12633, 15206, 14509, 14550, 11417, 12341, + 14382, 13544, 12046, 12672, 13351, 11326, 13994, 11902, 14590, 11731, 14552, 14004, 14271, 14960, 11860, 15181, + 14808, 15139, 14239, 12911, 11824, 14597, 15006, 14226, 14707, 14541, 10802, 14287, 12944, 13546, 13690, 14370, + 15215, 12874, 14444, 14842, 15183, 14251, 14406, 14445, 14749, 14117, 13784, 10214, 7321, 15291, 13293, 13683, + 14977, 14437, 14427, 14444, 13677, 13013, 15073, 15348, 5073, 15042, 12116, 12793, 14959, 15047, 14079, 13684, + 14343, 15094, 13630, 8237, 15109, 14686, 14783, 15218, 14503, 10056, 12288, 13024, 14552, 14721, 14799, 14325, + 14110, 10034, 14440, 14515, 12718, 14832, 14370, 14063, 14474, 12985, 14760, 14627, 13422, 12796, 15218, 13300, + 14728, 13460, 14185, 12908, 9219, 14892, 15032, 13379, 11454, 13616, 12854, 7760, 14685, 13049, 14458, 13569, + 14801, 12359, 15358, 14428, 15136, 14019, 14014, 13255, 14668, 8243, 11920, 15161, 14722, 13984, 13535, 14682, + 14096, 13631, 15096, 9218, 15347, 13791, 10685, 14481, 13793, 13429, 14307, 15253, 14798, 14351, 13315, 13562, + 14190, 13440, 14596, 14433, 11343, 11530, 4925, 14968, 10188, 15344, 15094, 10408, 15248, 15339, 12326, 14868, + 15069, 13762, 13507, 15114, 9799, 14562, 14985, 14077, 13698, 13323, 11698, 12813, 13906, 13726, 14279, 14656, + 14546, 14905, 12182, 14777, 12893, 12312, 14101, 9896, 14965, 14677, 10655, 12688, 15278, 14888, 14893, 14537, + 14639, 12550, 15128, 14666, 14379, 14723, 13339, 15116, 14368, 14744, 11817, 15255, 13789, 13541, 11920, 15188, + 13854, 14582, 15258, 13371, 15119, 13665, 15075, 14713, 13231, 15155, 11286, 13870, 12703, 14481, 14464, 9057, + 13561, 12519, 13328, 14986, 14656, 14479, 13804, 10682, 12985, 14935, 12912, 13770, 12355, 11449, 9670, 13714, + 12404, 14664, 13382, 11184, 14713, 14093, 14547, 13433, 14669, 14482, 15163, 13946, 14521, 15145, 15145, 14465, + 14199, 14553, 14445, 14102, 14743, 14238, 14464, 14524, 14546, 14885, 14920, 15027, 13515, 15314, 11534, 14510, + 12155, 13654, 15311, 13957, 13445, 14112, 14298, 15358, 13316, 13472, 14672, 11218, 14498, 14862, 12507, 12882, + 13052, 15352, 14266, 15101, 15284, 9318, 13471, 15069, 11003, 9380, 14529, 13474, 9958, 14949, 12900, 13514, + 14615, 15101, 14393, 11326, 14671, 15011, 11166, 15167, 15198, 14745, 13075, 14726, 15012, 15320, 14952, 15251, + 8692, 14574, 14805, 13973, 10449, 13937, 14144, 14675, 10440, 15337, 13138, 14544, 15088, 15218, 15346, 14690, + 14454, 14744, 15317, 14778, 14915, 14942, 13830, 15288, 14237, 13099, 13504, 12516, 14775, 13523, 13570, 14821, + 13955, 12251, 15329, 14560, 11675, 15305, 14830, 14473, 12337, 14818, 14404, 13443, 14876, 13857, 10871, 15016, + 14348, 11255, 14980, 14038, 13835, 14487, 14440, 9509, 15004, 11235, 13522, 13627, 14482, 14019, 10659, 14776, + 14669, 14438, 13958, 14695, 15073, 13004, 13469, 10959, 13679, 13413, 14504, 14660, 14983, 14322, 12697, 14634, + 11157, 15262, 13184, 14070, 14294, 14928, 14581, 14592, 14384, 12975, 15314, 13474, 13877, 13756, 14657, 11925, + 14725, 10879, 14426, 14095, 13314, 13728, 14996, 11853, 14118, 15112, 14969, 12360, 15083, 14519, 15218, 14755, + 14013, 14460, 14342, 15143, 14800, 14388, 10658, 13742, 15351, 14182, 11547, 14074, 13026, 13801, 14874, 14577, + 10298, 14690, 14592, 13917, 15093, 13848, 15141, 12502, 15011, 12813, 14734, 14088, 15222, 13015, 13795, 14514, + 13418, 14639, 14623, 14689, 13773, 14156, 15300, 14591, 13250, 14332, 14905, 15354, 13686, 10633, 11730, 14490, + 10995, 15314, 14516, 15251, 14372, 14883, 12828, 14779, 14965, 14958, 15107, 13954, 15189, 14755, 14170, 14143, + 13411, 15079, 15332, 14533, 14477, 12481, 11910, 14374, 13119, 13170, 13908, 13999, 10315, 11690, 14592, 12098, + 14635, 15153, 15033, 14213, 14362, 14363, 15233, 12589, 14695, 15205, 9897, 12297, 12789, 14555, 14551, 15029, + 14876, 14607, 14530, 14681, 15249, 14664, 14074, 14430, 14839, 15230, 13781, 12222, 13710, 14487, 15168, 14748, + 14851, 13996, 14147, 13595, 13173, 13576, 14825, 15011, 13717, 11066, 8014, 14771, 14859, 14256, 12243, 14516, + 13306, 14167, 14488, 14483, 14433, 14540, 10296, 13495, 15021, 15054, 14788, 15322, 11598, 13564, 14472, 14139, + 13907, 14901, 15019, 14818, 15222, 14525, 6472, 13969, 14399, 14337, 14205, 13871, 13848, 12702, 13646, 9531, + 14367, 13147, 15210, 14307, 14375, 12863, 13301, 14488, 14391, 12766, 10000, 13605, 14604, 11090, 14970, 13087, + 13818, 14311, 14461, 14756, 12764, 13897, 14411, 14595, 14016, 15359, 12874, 14143, 9806, 15349, 14591, 15199, + 14603, 12178, 14918, 14408, 10838, 14980, 13930, 13054, 15065, 12418, 13865, 7336, 15131, 13077, 14193, 14901, + 13917, 14388, 15027, 14375, 13635, 12519, 13658, 12588, 11456, 14437, 14373, 14738, 11235, 14269, 15329, 12515, + 13571, 15357, 15086, 14778, 14860, 10133, 13691, 14097, 15173, 14934, 14442, 14951, 14405, 15188, 13948, 15335, + 12292, 10437, 14963, 13320, 12041, 14768, 12774, 14656, 13852, 13624, 11927, 15193, 14651, 15267, 14617, 14867, + 13815, 15084, 14973, 15238, 14606, 14491, 14457, 12787, 15143, 13930, 12877, 13706, 7898, 13954, 14947, 11315, + 14143, 14408, 14397, 15167, 11191, 14279, 13385, 11270, 12510, 14445, 9398, 12842, 14241, 14889, 12265, 15279, + 12729, 14791, 14615, 12734, 13448, 15303, 12395, 11327, 13078, 11758, 15062, 14169, 11721, 14325, 11429, 15326, + 14858, 14912, 14523, 11814, 12967, 14788, 13480, 14427, 13523, 11151, 14598, 13381, 14579, 13875, 12986, 13345, + 15061, 14584, 14954, 12876, 15126, 13938, 15240, 9596, 12696, 15070, 12366, 14768, 13142, 14970, 14938, 13394, + 15024, 13196, 14492, 13531, 14826, 14218, 14183, 13774, 12475, 13375, 13106, 13068, 13406, 13963, 11598, 14553, + 15344, 14388, 14660, 14709, 14981, 13993, 14804, 14524, 15034, 14845, 15232, 13489, 13562, 14442, 14570, 10464, + 14674, 14588, 15128, 11673, 14958, 12747, 13111, 14488, 12986, 14640, 14749, 12370, 14886, 14405, 14835, 13564, + 14804, 14193, 14428, 13621, 15111, 14766, 12161, 15046, 13311, 14535, 14962, 9925, 15058, 13956, 14436, 15067, + 13627, 14915, 13198, 15265, 11041, 14758, 13433, 14663, 15095, 14063, 15242, 14721, 13923, 14661, 14768, 13190, + 13595, 15183, 15025, 14342, 13992, 15134, 13397, 13406, 14617, 12811, 14564, 11967, 12291, 14439, 13390, 15029, + 12462, 14922, 11796, 13895, 11696, 11550, 11153, 14576, 14512, 13791, 15028, 15162, 13886, 14251, 14957, 13106, + 12942, 15118, 13754, 14828, 14786, 13267, 13484, 14093, 14924, 14659, 14269, 14758, 14662, 14473, 13054, 12338, + 14556, 14491, 10344, 15154, 13659, 15176, 13262, 14342, 15033, 14855, 10100, 15157, 14492, 14590, 14693, 15167, + 14479, 13039, 12936, 15105, 15300, 14390, 14721, 15286, 14022, 15269, 15054, 15129, 8785, 15279, 15213, 14579, + 15022, 14974, 13814, 14826, 12961, 13032, 15101, 14650, 14667, 14759, 13584, 14126, 14705, 15312, 13255, 12368, + 14480, 14845, 13778, 15055, 13481, 15199, 14392, 14377, 13732, 12418, 13193, 10893, 14093, 14900, 14875, 14287, + 14651, 13376, 13428, 14856, 13953, 7701, 12967, 13071, 13972, 14590, 13375, 13267, 13319, 15103, 14467, 14676, + 15247, 14354, 12732, 12222, 14445, 12821, 13096, 14216, 10315, 14903, 14611, 14922, 14371, 15129, 4195, 14490, + 14809, 15131, 14407, 14474, 12547, 14612, 15050, 14870, 13957, 15111, 11912, 14078, 13297, 12912, 12705, 14064, + 14470, 12635, 13246, 14890, 14996, 14240, 14258, 11484, 11762, 12959, 14210, 14992, 14752, 14690, 14166, 15245, + 15184, 14944, 14762, 13533, 13247, 13172, 13298, 11818, 14519, 14816, 14297, 15240, 14933, 14609, 14194, 14118, + 14802, 14679, 10268, 14549, 14583, 14106, 13745, 13472, 12738, 15175, 13091, 14910, 13149, 13013, 15188, 14384, + 12978, 13913, 14164, 15012, 15063, 14524, 13910, 14018, 11713, 13615, 15098, 11827, 14319, 14625, 14352, 15154, + 15009, 15109, 13468, 15206, 15331, 11571, 13163, 14590, 13805, 14527, 14903, 14301, 15048, 14788, 15112, 14798, + 15202, 13552, 14436, 6181, 15349, 14674, 13595, 14654, 13776, 14460, 15187, 14121, 10985, 14695, 13565, 15267, + 14529, 14077, 11718, 15025, 15059, 14203, 15296, 14050, 14075, 13587, 14270, 14654, 15077, 13120, 12581, 13168, + 13084, 15065, 14979, 13511, 10628, 14836, 14917, 13534, 13017, 13475, 13106, 14724, 14658, 14112, 14909, 14834, + 11909, 9046, 14861, 14215, 14658, 14982, 15165, 14611, 13693, 13530, 12456, 11465, 15309, 13991, 12447, 14392, + 12829, 14797, 13928, 14497, 14949, 15277, 13876, 13152, 15306, 13578, 14514, 14836, 15038, 14719, 13551, 14985, + 15295, 15152, 14641, 14852, 13345, 14391, 14900, 14750, 13871, 14655, 10900, 14496, 14519, 13317, 12369, 13895, + 14812, 14474, 14702, 14441, 13287, 14544, 13580, 13006, 13714, 14145, 11718, 13914, 14080, 14953, 12856, 13405, + 14878, 11354, 12458, 15245, 6968, 15275, 13654, 15123, 14722, 13091, 15114, 14521, 13682, 14861, 13661, 15033, + 15096, 13724, 14597, 14216, 15168, 15191, 13811, 14961, 13695, 14493, 12913, 10037, 14333, 14541, 14488, 14233, + 13653, 14821, 14620, 12786, 13684, 13172, 15324, 13266, 14558, 15283, 13413, 13585, 11960, 12677, 14803, 14648, + 14935, 8686, 14603, 14495, 15181, 8810, 14953, 14259, 10367, 13338, 13196, 13562, 12674, 14062, 12494, 13778, + 13949, 15155, 10232, 13347, 12586, 13700, 15001, 15053, 14406, 15283, 15113, 13179, 14814, 13603, 14553, 12922, + 15068, 14645, 14343, 12467, 14444, 13263, 13261, 13472, 14442, 14787, 9223, 14937, 13905, 11757, 11957, 9520, + 15269, 14280, 15360, 12646, 15134, 10573, 14929, 13898, 14412, 13546, 14720, 14686, 14884, 13703, 15300, 13830, + 13831, 13560, 12329, 15206, 12724, 13434, 10310, 15258, 13807, 13712, 14567, 13899, 12316, 13908, 10307, 14078, + 13386, 14454, 14416, 14573, 13452, 14279, 15148, 15107, 14965, 11718, 14824, 14493, 14700, 12437, 11859, 13337, + 14766, 12429, 8166, 13597, 14371, 12949, 14903, 12516, 12142, 14405, 14460, 13764, 12754, 9314, 13746, 15191, + 14954, 11976, 11019, 12273, 15022, 15185, 14690, 14833, 12032, 13876, 14432, 13087, 14468, 12550, 15120, 14099, + 14239, 7793, 15011, 13346, 12886, 14524, 13676, 14551, 14047, 15129, 15139, 15281, 14064, 8987, 11608, 14020, + 14171, 13349, 14075, 11284, 14345, 12222, 14771, 11824, 14323, 13036, 14575, 13444, 15130, 14382, 14829, 15230, + 11621, 11970, 14114, 14446, 9328, 9011, 12151, 11692, 14402, 11492, 14995, 13940, 11870, 11060, 15028, 13925, + 15245, 11974, 14414, 12268, 14528, 14474, 12725, 10772, 14032, 14549, 14689, 13250, 13111, 15021, 14252, 14606, + 13899, 11055, 14717, 13935, 13394, 14669, 13397, 12845, 15062, 13751, 13067, 14737, 14785, 14353, 12338, 14374, + 15152, 14006, 14340, 15023, 14302, 13270, 9584, 14651, 9547, 11630, 9113, 14233, 13544, 14740, 13494, 7937, + 15235, 13838, 15139, 12175, 15294, 12139, 13397, 13355, 15305, 15334, 15235, 12423, 14927, 14386, 14231, 15081, + 12208, 12354, 14753, 14930, 13302, 15027, 13815, 15317, 13396, 12925, 14072, 15178, 12934, 15250, 13788, 15094, + 14666, 14891, 15240, 14322, 15292, 14873, 14885, 14194, 14386, 14550, 14282, 13957, 10080, 15262, 12999, 14374, + 13278, 15255, 14814, 12500, 12829, 15351, 14535, 7340, 14355, 13560, 14710, 11705, 14046, 9436, 14335, 15336, + 14746, 13720, 14748, 14429, 15314, 10338, 13590, 12994, 12711, 10352, 15017, 15191, 12328, 14217, 14031, 8978, + 14092, 14502, 14476, 14175, 14816, 11195, 14330, 7635, 12453, 14345, 14294, 13076, 13732, 13844, 12662, 14746, + 13507, 10339, 14488, 14375, 14267, 10468, 13784, 14559, 15122, 12805, 12492, 14108, 14914, 15278, 14483, 14837, + 15111, 14936, 11043, 14225, 12337, 12922, 11299, 14401, 11737, 15336, 13506, 14963, 13920, 13635, 15057, 12443, + 13682, 13696, 14510, 14464, 13664, 12852, 13677, 15101, 9868, 13736, 11041, 14682, 10934, 13995, 13938, 14773, + 14883, 15107, 12898, 10470, 13757, 15329, 13268, 11995, 9404, 14685, 13773, 14005, 15350, 14881, 8440, 11460, + 15186, 11576, 13609, 13521, 15094, 13019, 15053, 14869, 8654, 14177, 13757, 15184, 14796, 15223, 14639, 14236, + 12843, 15216, 12786, 14523, 14433, 15216, 15359, 11906, 13810, 13255, 9798, 14534, 13997, 14603, 14175, 8503, + 14421, 15054, 13935, 13100, 14262, 14025, 13674, 14659, 11623, 14140, 12974, 12839, 14814, 14039, 15089, 14702, + 12372, 14624, 14567, 13429, 15222, 12075, 14833, 14950, 14351, 15219, 14446, 15109, 10346, 14987, 10597, 14511, + 12425, 13412, 12992, 14254, 15285, 14664, 12385, 10345, 14662, 12555, 12195, 14780, 14982, 14338, 13834, 13591, + 13297, 14306, 11696, 12366, 15004, 10601, 15337, 11481, 9434, 14448, 9658, 15302, 12412, 14804, 15083, 13645, + 13970, 10568, 15028, 15032, 14089, 14565, 14208, 11671, 14048, 13703, 13742, 15102, 11890, 15039, 15312, 12096, + 15276, 14587, 14636, 12712, 11922, 14456, 15157, 15243, 14453, 13956, 14931, 14704, 13211, 13950, 14730, 14453, + 11920, 13559, 14499, 15247, 15192, 14549, 14784, 15227, 15195, 14323, 14846, 15217, 14526, 14757, 14421, 14490, + 15006, 15300, 14751, 14670, 14744, 13911, 14540, 13809, 14929, 15171, 14542, 11798, 7608, 15027, 15270, 13910, + 14257, 14269, 15252, 15326, 14025, 15072, 14971, 15075, 13726, 14701, 14320, 14783, 15091, 14131, 15034, 14809, + 13132, 12607, 14273, 8883, 15242, 14912, 14463, 15209, 13899, 14203, 12161, 14588, 14696, 15027, 10214, 9859, + 14866, 14541, 13307, 13942, 14988, 14966, 14055, 11382, 12514, 13357, 15116, 15198, 13887, 14733, 14149, 11043, + 13398, 14415, 13844, 14724, 13491, 12650, 12399, 15318, 13635, 14545, 12761, 15147, 15345, 12575, 12903, 15091, + 15335, 12965, 11071, 14433, 15083, 14579, 14457, 14605, 13615, 14625, 14822, 12612, 13824, 14899, 13272, 12568, + 15000, 15145, 14553, 12196, 13125, 14821, 14490, 10374, 15060, 13522, 13260, 10312, 14880, 15147, 10594, 13063, + 13704, 10865, 12454, 14995, 11604, 14229, 14423, 14656, 12259, 14356, 15196, 13328, 14774, 14539, 15231, 12664, + 15281, 12531, 11382, 14129, 13901, 14870, 6364, 14784, 14948, 13543, 14983, 14922, 14041, 14019, 13343, 12483, + 13112, 14627, 13440, 15098, 10310, 13792, 13283, 14056, 13234, 11589, 15271, 15029, 13713, 14406, 14936, 12531, + 13769, 14380, 14607, 14725, 10571, 15332, 13591, 13719, 13610, 13550, 12995, 10556, 15009, 11221, 14773, 13754, + 14760, 14476, 15161, 14151, 12334, 14001, 13908, 14638, 14556, 14519, 13760, 14484, 15119, 14913, 15295, 15352, + 13475, 13385, 13849, 14591, 13386, 14882, 13477, 14897, 15331, 14323, 11677, 12727, 14820, 15175, 12305, 15089, + 13698, 12942, 14990, 15325, 10272, 14616, 13892, 13245, 12515, 15206, 15335, 13751, 14655, 15293, 14615, 13728, + 14499, 10869, 13858, 10495, 13301, 15217, 10530, 15223, 15072, 15103, 15026, 14369, 12334, 13944, 13113, 11477, + 15030, 11206, 14570, 12650, 15064, 15235, 14196, 14995, 11554, 14940, 15032, 14926, 14429, 9951, 7949, 12352, + 14342, 14110, 14867, 15110, 11879, 13263, 9198, 14179, 11654, 10586, 14363, 14853, 13663, 15132, 15000, 11202, + 15154, 15039, 14131, 15086, 14576, 13333, 14071, 14903, 14101, 14358, 14872, 15310, 14259, 14276, 14883, 12859, + 14952, 9443, 15356, 13387, 10482, 13661, 14759, 14925, 13582, 14954, 13753, 13435, 11618, 15309, 14394, 8557, + 9074, 12794, 12642, 13799, 14958, 14636, 15199, 13112, 14605, 13572, 15275, 10187, 12647, 11477, 15307, 13822, + 12046, 14946, 13217, 14354, 11616, 9404, 15175, 15073, 12639, 13673, 14934, 14401, 15121, 13522, 13544, 13402, + 14860, 14995, 11878, 15048, 14554, 15318, 12198, 15073, 14676, 14714, 15101, 15200, 14091, 9043, 14360, 13676, + 14751, 14134, 14944, 14023, 14794, 15017, 12898, 15021, 14604, 7564, 13562, 12906, 14538, 14880, 14421, 10691, + 13603, 14693, 10750, 15143, 13244, 15227, 15348, 12597, 15274, 9717, 14657, 15279, 13483, 14893, 14897, 15003, + 11537, 14666, 14647, 14346, 14444, 14077, 11350, 15350, 14545, 14139, 11250, 15019, 15153, 10032, 14917, 11804, + 14708, 14989, 13837, 14518, 14136, 11937, 10184, 9024, 14700, 12563, 13622, 13355, 15303, 12896, 11127, 14072, + 12316, 11793, 14591, 14516, 14047, 15058, 15169, 14790, 14400, 14969, 15131, 14651, 11594, 12839, 12531, 11966, + 11701, 12480, 14343, 14706, 14854, 8858, 14391, 12986, 9641, 12041, 15102, 13387, 14800, 13480, 14685, 15007, + 14308, 15113, 12894, 14400, 14623, 14339, 14836, 14229, 14999, 15183, 15186, 14433, 14366, 15326, 12592, 13977, + 11710, 13660, 14669, 13687, 15082, 11505, 15240, 14765, 11556, 14301, 14419, 14551, 11562, 8593, 15233, 14454, + 13911, 13025, 14586, 13830, 13854, 14795, 14585, 13021, 15043, 14382, 14856, 13391, 14498, 10118, 15031, 14459, + 15075, 14645, 14970, 14554, 13204, 15063, 11593, 15236, 11083, 14828, 14402, 14118, 15267, 15130, 14239, 11890, + 14568, 14029, 12816, 14444, 14768, 14568, 13293, 12624, 14104, 15130, 11097, 14722, 14114, 14666, 15016, 14098, + 12946, 14780, 13995, 14353, 14714, 14210, 14839, 14827, 14856, 14383, 15183, 14891, 13669, 12011, 13867, 14407, + 12962, 14497, 11248, 15151, 13330, 14573, 14629, 11126, 14386, 14505, 15235, 13990, 12398, 12737, 15320, 13689, + 12315, 15299, 15353, 14423, 12857, 13913, 11089, 14815, 12375, 14363, 14167, 5843, 14270, 10912, 15334, 14974, + 12828, 15145, 14566, 8487, 14109, 13482, 14511, 13124, 12995, 10760, 9888, 12131, 13029, 14774, 14753, 11621, + 14600, 12294, 14838, 14858, 10505, 8317, 12171, 11843, 13416, 13472, 14476, 15192, 15225, 12419, 12701, 11774, + 15237, 14905, 12767, 13453, 15276, 14624, 14492, 14994, 14927, 12977, 13534, 15148, 14450, 14430, 14847, 12809, + 12665, 14661, 14363, 14810, 13856, 15180, 14863, 10524, 15252, 15338, 12744, 14414, 15071, 14359, 15179, 14080, + 12387, 11443, 12677, 14534, 13383, 15047, 14229, 14922, 15085, 15199, 14564, 14604, 15015, 13123, 14850, 14352, + 15167, 13140, 15029, 13427, 15325, 13804, 14680, 13076, 14730, 14584, 12131, 15079, 13722, 12935, 14042, 12959, + 14544, 14311, 14034, 13325, 14371, 15037, 15245, 13394, 15341, 14846, 15200, 12226, 14481, 15153, 13791, 13544, + 15042, 14003, 10915, 14918, 13744, 14885, 13817, 14614, 14234, 14324, 13018, 7803, 13872, 14394, 14559, 14411, + 12462, 13561, 14965, 15296, 14888, 13830, 14374, 15058, 9350, 12027, 13109, 10866, 11952, 14949, 11986, 12972, + 15284, 15314, 14393, 15080, 13009, 13054, 14489, 14349, 15060, 15346, 13829, 14738, 13001, 14113, 15197, 15266, + 14667, 14892, 13749, 14842, 13484, 15277, 14934, 11430, 14652, 14523, 14142, 14102, 14710, 14684, 14747, 14629, + 14184, 14809, 14888, 14704, 13659, 8931, 12806, 14686, 13643, 13870, 14821, 13970, 14538, 13348, 15275, 15013, + 14449, 13535, 12912, 14026, 14535, 14791, 14636, 14919, 13215, 13064, 14718, 15067, 14735, 15086, 13160, 15166, + 14428, 12842, 14157, 13991, 13033, 14553, 14843, 13633, 10203, 13332, 11350, 14543, 15116, 12414, 14448, 14163, + 14351, 15227, 12477, 10914, 15251, 15165, 12029, 13639, 14742, 14160, 13458, 14071, 12694, 13651, 15170, 13738, + 15348, 14534, 13838, 14816, 13440, 14583, 13870, 14790, 12977, 13485, 14263, 14180, 14141, 13458, 14344, 13829, + 15299, 13468, 12290, 15045, 13479, 13324, 12783, 13423, 15356, 14701, 14819, 15247, 15168, 14803, 15001, 14381, + 14783, 14777, 12232, 14649, 13830, 13881, 14715, 14598, 14228, 13714, 14899, 11606, 14990, 15243, 14447, 14666, + 12810, 12869, 13419, 14902, 14471, 12596, 13506, 14675, 14781, 13091, 11836, 13890, 13575, 12783, 13626, 15249, + 14127, 11685, 14643, 14866, 15209, 14734, 15174, 13694, 14448, 14463, 15002, 11493, 14597, 11518, 14376, 14878, + 14890, 14879, 14877, 14437, 12633, 14694, 14376, 13787, 13935, 14509, 12015, 13911, 10412, 14356, 13635, 14485, + 14184, 13661, 14879, 14425, 13784, 13115, 13423, 12274, 14956, 14562, 13231, 14480, 13787, 14634, 13367, 15023, + 9201, 14314, 13383, 14023, 12862, 14394, 13738, 14260, 14205, 11792, 13408, 13782, 13336, 14364, 13908, 11462, + 12311, 12576, 14649, 15283, 13615, 9514, 12444, 11583, 14368, 15151, 11930, 13709, 15341, 13764, 14136, 14618, + 15125, 13520, 13527, 14573, 13439, 13875, 14558, 13380, 15054, 15324, 13611, 13827, 14384, 14562, 11662, 15282, + 13550, 15102, 14441, 14357, 11794, 15347, 12865, 13426, 15304, 12958, 14337, 13780, 14827, 13552, 13239, 9025, + 15200, 13322, 15160, 11566, 13377, 12064, 14347, 12401, 14781, 13065, 15196, 11400, 13904, 11792, 15069, 10056, + 13236, 12698, 13934, 15055, 13452, 14342, 15335, 11267, 10657, 15243, 13508, 15072, 14229, 13272, 11325, 12482, + 14286, 14268, 13328, 14607, 15347, 11916, 13690, 15311, 14554, 13239, 12894, 12473, 14331, 14166, 14682, 14399, + 14352, 13505, 14586, 12724, 14386, 13715, 13522, 14742, 13900, 15350, 11714, 14587, 14880, 13093, 9847, 11935, + 14444, 15323, 13407, 12836, 14979, 13902, 14786, 15215, 14452, 12792, 15190, 11537, 14209, 14020, 14223, 14529, + 14558, 14348, 13580, 13413, 13897, 12100, 14362, 13759, 13681, 12425, 11316, 12289, 10221, 14624, 14389, 13849, + 9967, 13954, 13475, 13747, 12796, 13801, 14716, 9595, 13863, 13439, 14151, 13455, 14378, 14673, 14973, 15329, + 14572, 14903, 11751, 14717, 12501, 14774, 12440, 13786, 13466, 15105, 15326, 14526, 15247, 11360, 15085, 10444, + 15254, 13785, 15161, 14463, 13963, 12740, 14663, 12602, 12423, 15022, 15287, 15094, 13762, 14245, 15009, 10458, + 15045, 14378, 14851, 15013, 13629, 14707, 14844, 14856, 13130, 11235, 14572, 12745, 15124, 14856, 14663, 15164, + 13801, 13589, 14579, 14579, 15051, 13822, 14773, 14785, 15163, 15296, 14340, 9218, 13800, 14758, 10946, 14283, + 14841, 15077, 12361, 12294, 14902, 15298, 9077, 14738, 11731, 13945, 14503, 15088, 14845, 13310, 12595, 15047, + 12947, 14844, 15143, 14879, 15350, 14155, 12870, 14311, 14533, 14790, 14626, 15056, 14774, 14676, 13998, 15122, + 15065, 14877, 11779, 15221, 12400, 14852, 11268, 12707, 14894, 14394, 15076, 15208, 12305, 12681, 9601, 15061, + 14880, 14023, 14347, 14948, 14509, 14043, 13449, 14501, 12325, 13123, 14068, 14648, 15287, 11917, 14156, 15336, + 14239, 14486, 15340, 15299, 13238, 15250, 14771, 14582, 14494, 15251, 12422, 14387, 14592, 15018, 11295, 13797, + 15183, 14448, 15178, 13466, 14809, 13527, 14844, 13607, 14009, 14424, 14483, 14917, 13862, 12183, 14518, 15325, + 12408, 14507, 14467, 10446, 15239, 14589, 13446, 13323, 15151, 13850, 15212, 15032, 9171, 13263, 12537, 14232, + 14605, 13758, 13465, 15047, 15104, 12808, 13647, 14156, 12925, 15040, 13402, 12307, 13419, 13681, 10824, 15121, + 14977, 15168, 8601, 13726, 13226, 14671, 14683, 15115, 14477, 10156, 14674, 9955, 14593, 14529, 14509, 13841, + 12482, 12245, 14510, 13818, 15030, 12719, 13705, 11588, 14835, 14728, 14647, 12872, 15168, 12440, 14633, 14858, + 14857, 14379, 11762, 15354, 12257, 13862, 13822, 15100, 15222, 14653, 14210, 10061, 12761, 15073, 12603, 14648, + 14744, 14242, 14033, 14450, 14353, 15096, 14537, 14608, 14148, 14565, 15006, 14341, 14892, 14063, 14577, 14813, + 11292, 11658, 13531, 12399, 13550, 14930, 15140, 14805, 14077, 13784, 14530, 14633, 14663, 15030, 14325, 15091, + 11937, 14744, 13428, 15264, 10249, 14423, 12749, 15186, 14784, 13583, 15210, 14675, 14776, 11081, 14560, 12648, + 14706, 11331, 13998, 15216, 14165, 13174, 15121, 14098, 13705, 14000, 12721, 14866, 14976, 14591, 12694, 14668, + 15322, 14951, 11058, 15054, 15016, 14918, 15204, 10408, 13474, 15004, 13927, 15325, 13314, 11958, 11881, 15276, + 13482, 15038, 13442, 14677, 15044, 14393, 13774, 13179, 14610, 15110, 15354, 13700, 15073, 15136, 15345, 10408, + 12420, 14948, 15027, 14375, 15211, 14673, 14670, 13165, 14458, 15241, 12381, 12137, 12674, 13898, 15215, 10465, + 14965, 15088, 11686, 14636, 15160, 13466, 14668, 12900, 14355, 14464, 15243, 14069, 14822, 14443, 12140, 15050, + 12978, 14722, 14933, 13466, 13653, 11384, 14565, 15129, 14702, 12955, 14850, 15208, 14765, 13995, 14573, 14160, + 14939, 15243, 15052, 13989, 12291, 13939, 13758, 14569, 14786, 14795, 15076, 13356, 15289, 14057, 14656, 13669, + 15313, 14382, 15124, 15181, 10955, 11986, 13730, 14500, 12446, 15179, 15261, 13354, 13832, 10504, 13104, 14655, + 14848, 15343, 14766, 14871, 15055, 15010, 14424, 14574, 13750, 15057, 10670, 13682, 14804, 13954, 14137, 15008, + 15123, 14736, 11392, 14873, 8800, 14063, 13741, 14311, 13968, 14548, 13510, 14550, 14411, 13927, 14680, 15149, + 13955, 15104, 15291, 13049, 13594, 13568, 13231, 13574, 13497, 12142, 13559, 9415, 15166, 12744, 14727, 15034, + 15176, 14563, 14762, 15052, 14758, 14347, 13698, 12820, 12855, 14204, 10514, 12219, 14909, 14617, 14107, 14635, + 13256, 14717, 9723, 13506, 15046, 15136, 14174, 13975, 11803, 14339, 14573, 14245, 14581, 14437, 14586, 15302, + 13717, 10431, 14811, 13462, 14590, 14448, 11559, 14707, 14628, 14852, 8620, 14355, 14703, 15311, 14857, 13467, + 12591, 15346, 15062, 13480, 14687, 14993, 13695, 12358, 13082, 15275, 13998, 11465, 15126, 15068, 15337, 11320, + 14557, 15197, 15193, 15236, 13394, 14350, 13599, 14687, 12309, 11507, 10362, 14609, 14984, 15004, 11186, 15279, + 14749, 13735, 10349, 14844, 14336, 14946, 14945, 11370, 14529, 15108, 14784, 14134, 13050, 7602, 15053, 15145, + 8647, 14642, 13312, 13027, 13651, 14852, 10707, 10558, 14802, 13474, 10743, 14551, 15107, 15221, 14947, 13339, + 14672, 14661, 13594, 14467, 15016, 11392, 14937, 15038, 14758, 13761, 14881, 14889, 14908, 15353, 15277, 15047, + 11742, 11502, 14974, 12319, 15041, 13839, 13208, 14426, 13469, 13802, 13378, 14736, 14379, 14844, 13652, 8492, + 12934, 12512, 15282, 12825, 13545, 11065, 15255, 14599, 14231, 14374, 13660, 13331, 15357, 14487, 13831, 13484, + 12885, 15320, 13357, 10661, 15152, 14588, 14871, 14631, 14884, 14918, 15118, 12648, 14384, 15278, 12764, 14358, + 15266, 15152, 13630, 15270, 13812, 13645, 14720, 12818, 14635, 14610, 14459, 11684, 12170, 14855, 14462, 14790, + 15263, 14818, 14902, 14720, 14193, 15071, 13320, 14799, 14696, 13878, 14676, 14757, 15186, 13187, 14954, 14121, + 13137, 10891, 6814, 14841, 12873, 14341, 10999, 14054, 9437, 15338, 14387, 14961, 13363, 11666, 15013, 15256, + 14374, 14429, 14823, 14546, 12956, 14562, 13556, 11866, 12615, 11663, 14999, 14788, 14094, 12805, 14735, 14492, + 15272, 15334, 12348, 13485, 13936, 14347, 14846, 15039, 8456, 13099, 14541, 15216, 15131, 12928, 14759, 14495, + 15169, 12609, 15254, 15219, 14021, 14474, 10317, 12306, 15133, 13066, 13982, 15263, 14898, 12768, 14923, 14379, + 8550, 13661, 11617, 14136, 14434, 14600, 14804, 10307, 14727, 14252, 14125, 14019, 14342, 14658, 8551, 13603, + 14026, 14380, 14628, 13957, 14928, 14828, 14698, 14684, 14499, 15254, 13918, 11677, 15090, 7348, 14570, 15282, + 14688, 14429, 14255, 15203, 15053, 13689, 15150, 11834, 14248, 14581, 13424, 15117, 15226, 13047, 15358, 13101, + 15111, 12389, 13503, 14669, 14978, 10874, 14451, 14905, 11598, 12711, 12457, 12509, 14958, 13835, 15245, 14512, + 14892, 15235, 14730, 14742, 14763, 15183, 13892, 9403, 11706, 13388, 14744, 15101, 14340, 11651, 14816, 14199, + 12049, 12891, 14654, 14070, 14622, 11950, 12602, 11437, 15003, 13708, 10829, 13684, 14612, 12457, 14734, 15169, + 14532, 12516, 14792, 14410, 14896, 11471, 14749, 14648, 13853, 13940, 15230, 14919, 12815, 14692, 14971, 14894, + 12929, 13803, 13820, 14587, 15301, 12894, 15291, 15074, 14528, 12384, 11625, 14932, 13927, 12098, 15208, 14209, + 13731, 14302, 14659, 14104, 15050, 15168, 11603, 14955, 13639, 15192, 13706, 12004, 13914, 14889, 12952, 14965, + 15015, 13926, 13421, 13831, 13302, 15281, 14435, 15200, 15201, 14525, 14871, 13786, 14786, 13845, 12846, 15284, + 10406, 14361, 12091, 14027, 11064, 14024, 14509, 15173, 13439, 15316, 15211, 14691, 14459, 14825, 14985, 14068, + 15283, 15141, 12595, 15156, 14745, 13250, 14731, 13605, 10531, 14766, 14743, 7719, 12572, 15299, 14813, 9544, + 15056, 14492, 14134, 14349, 12485, 14944, 15268, 15262, 13316, 12479, 15015, 15137, 12060, 14087, 8634, 13332, + 14663, 14630, 14844, 14418, 13010, 14714, 13587, 14381, 14663, 15227, 13199, 12794, 10287, 12794, 15070, 15286, + 15197, 13694, 13339, 12925, 14215, 13405, 14042, 14504, 14413, 7297, 11460, 13755, 14665, 15135, 12693, 14710, + 14070, 15018, 11345, 14412, 14190, 14627, 14476, 14706, 11985, 12960, 12476, 14670, 13979, 13823, 14531, 11613, + 15157, 13256, 14182, 13995, 14598, 14129, 14818, 14354, 13562, 15157, 13403, 14884, 15070, 14469, 7817, 15354, + 15176, 14412, 13778, 15287, 11413, 13354, 14939, 14743, 12773, 12697, 15277, 6460, 9430, 14422, 14175, 13708, + 14837, 14184, 14931, 13683, 15059, 9382, 14748, 15337, 15208, 13079, 13169, 11961, 13653, 13172, 15264, 13892, + 12923, 14555, 15042, 15192, 14840, 10852, 13206, 13409, 13928, 15121, 15293, 13286, 11688, 13470, 14279, 13712, + 15067, 15251, 15119, 15000, 12990, 11939, 15068, 14642, 14549, 11424, 14080, 15241, 14436, 11489, 14764, 13608, + 13773, 15016, 13958, 14010, 15262, 10867, 14655, 14268, 14146, 12680, 13554, 14853, 13386, 14456, 6596, 13578, + 14392, 14257, 14068, 15253, 13948, 15345, 12468, 14789, 14797, 14262, 12442, 12137, 14209, 14897, 14706, 14036, + 13864, 14495, 15257, 14249, 12620, 11935, 14429, 14944, 9880, 14308, 14985, 13602, 14062, 14513, 14619, 14619, + 13479, 14148, 15093, 14898, 14478, 10035, 14518, 14568, 11792, 10448, 14066, 14396, 14526, 12444, 14492, 14832, + 15246, 14570, 14847, 13838, 13647, 13619, 13369, 12170, 14764, 14260, 15210, 15333, 13840, 15330, 10603, 12956, + 11879, 13319, 13486, 14861, 12444, 15148, 12260, 13883, 15229, 8657, 14791, 11885, 13974, 13965, 13054, 14467, + 14585, 15169, 13739, 14553, 14592, 11569, 14974, 11447, 14669, 13722, 15323, 15354, 12540, 13733, 13316, 13883, + 13794, 12711, 14538, 13990, 14162, 11920, 15254, 13104, 14314, 14656, 14265, 14870, 14728, 14928, 14601, 11593, + 14499, 14371, 15130, 14401, 14271, 15204, 13466, 10327, 14915, 14369, 14558, 15033, 15181, 13571, 14544, 13838, + 14802, 14993, 14807, 13981, 13440, 14509, 14908, 13366, 14729, 14400, 14069, 15355, 13579, 12734, 12521, 15191, + 12367, 13813, 12499, 13611, 15353, 11898, 13545, 13485, 14855, 14838, 14994, 15008, 15201, 14482, 12599, 14717, + 15129, 12538, 14800, 13579, 13803, 14700, 15052, 15263, 13969, 13624, 15037, 14664, 13679, 14512, 14599, 12866, + 15331, 14997, 10530, 15156, 8540, 11104, 12978, 14420, 11660, 13529, 15105, 15104, 14547, 15152, 14708, 13553, + 14589, 14861, 14715, 13974, 14138, 15085, 14428, 14988, 10718, 8447, 12346, 14911, 10079, 13734, 13769, 11320, + 13571, 15193, 14722, 13554, 14681, 11813, 14879, 14463, 14496, 9413, 14842, 14301, 8373, 12670, 14922, 13374, + 13482, 14736, 14937, 14389, 14784, 13805, 14056, 12294, 14388, 14858, 14255, 11977, 15089, 14767, 14526, 13573, + 14394, 4533, 13305, 15323, 14778, 14439, 15245, 14787, 15065, 14569, 14737, 14393, 13436, 11410, 13328, 14363, + 14457, 14850, 14940, 14737, 13467, 12699, 14791, 14837, 13227, 11964, 12820, 14343, 14652, 12819, 13172, 9403, + 12846, 13029, 15320, 14357, 13517, 14409, 13883, 14633, 14170, 13605, 12654, 12836, 15218, 12825, 13819, 15343, + 14652, 14177, 13399, 14935, 14795, 12223, 8417, 13237, 14723, 12390, 15235, 14766, 13794, 14931, 10934, 13873, + 13441, 15229, 15070, 14470, 14727, 12384, 14086, 14020, 13123, 14508, 14283, 12698, 14695, 13675, 14303, 14781, + 11368, 14818, 14769, 12365, 15021, 13636, 11600, 12941, 10553, 13340, 13623, 14500, 14495, 13686, 14878, 9553, + 15347, 13569, 15077, 13978, 14633, 14366, 14734, 11567, 14819, 15181, 14493, 14934, 11434, 12069, 8124, 13937, + 12779, 13882, 12688, 12607, 11536, 13286, 14842, 12943, 15063, 15235, 12396, 12836, 13729, 13955, 13354, 12206, + 14722, 14725, 14882, 14331, 13062, 12238, 15056, 12383, 14600, 13919, 13549, 14637, 14457, 15206, 14921, 15322, + 13745, 13966, 13834, 12056, 15182, 13361, 13241, 14595, 14569, 14151, 13894, 14471, 14750, 14125, 14543, 14692, + 14396, 14946, 13685, 14406, 15136, 14855, 11687, 11361, 12020, 14204, 14453, 14192, 13953, 13530, 13798, 14836, + 15323, 13791, 6802, 11662, 13729, 13820, 12201, 14394, 13081, 15075, 14552, 15029, 14900, 14171, 15034, 12846, + 14403, 15108, 13742, 12894, 12616, 12140, 15112, 13062, 15275, 15330, 15102, 14811, 14345, 11805, 14823, 14908, + 10843, 11432, 14331, 14534, 14904, 13632, 14901, 14608, 15235, 13339, 12439, 12884, 14743, 14993, 12955, 15352, + 13657, 14445, 15268, 14367, 15062, 14112, 15191, 13039, 13644, 9388, 12825, 14442, 8968, 14774, 14768, 14397, + 14527, 11595, 15315, 14090, 13574, 14398, 13983, 14528, 13407, 15057, 12694, 12308, 13501, 15087, 12814, 14597, + 14543, 13988, 15102, 14528, 15183, 15269, 14748, 14863, 12307, 14477, 13811, 14963, 14411, 14289, 15046, 14143, + 14413, 15102, 13398, 14488, 14613, 14445, 14640, 14237, 13787, 14745, 13997, 14062, 14203, 14745, 14910, 11696, + 11866, 15283, 14687, 14863, 14575, 14416, 13608, 11278, 12354, 14029, 10065, 11497, 15088, 14288, 12731, 14374, + 14673, 13409, 14984, 13964, 15177, 13336, 14968, 15322, 13318, 14185, 14513, 14934, 13765, 11321, 11502, 15102, + 14595, 12407, 13290, 15341, 14324, 14574, 11884, 13976, 14845, 15295, 14812, 12533, 14305, 14504, 15217, 13916, + 12352, 12135, 13514, 14708, 12463, 15311, 14754, 11609, 12191, 12946, 13473, 14256, 13611, 15098, 14096, 14190, + 14620, 15187, 13110, 15256, 14350, 14875, 15298, 13768, 13135, 12571, 12791, 14962, 11454, 13589, 15056, 13450, + 14372, 15339, 14748, 14637, 13541, 13113, 14973, 14365, 14179, 15094, 14328, 13988, 12431, 13506, 13701, 15129, + 13934, 14282, 12588, 12783, 14136, 15139, 13208, 11604, 13987, 14492, 15066, 15160, 13916, 15000, 14933, 14920, + 14477, 15035, 14708, 14807, 14595, 14909, 14392, 13060, 12175, 12319, 15048, 15042, 15239, 14225, 13839, 11822, + 11687, 12978, 12295, 12723, 13447, 13747, 12509, 14791, 13546, 12556, 11463, 13813, 15101, 14552, 13640, 15233, + 15349, 14306, 11914, 14545, 13667, 14339, 15216, 15337, 14929, 15210, 14968, 15163, 14880, 10865, 13472, 14898, + 13762, 14404, 14968, 14097, 12860, 14855, 14594, 12749, 12261, 13587, 11341, 14976, 15032, 13645, 13642, 14593, + 12631, 14859, 15328, 14443, 14883, 13676, 12750, 14914, 12478, 14448, 14479, 14372, 14959, 10572, 15028, 12774, + 11892, 14338, 14022, 13578, 8592, 15212, 12442, 15285, 14068, 14693, 15234, 14674, 13428, 14811, 15075, 14934, + 13379, 13535, 14892, 15235, 10946, 14025, 14654, 10445, 14602, 14812, 14784, 14915, 15156, 13932, 14471, 14347, + 13748, 10517, 14949, 14376, 14763, 14557, 8887, 14254, 11931, 14077, 14348, 15000, 12868, 14066, 14484, 14450, + 13053, 15324, 13417, 10492, 15303, 12556, 15126, 14709, 13139, 4628, 12320, 12889, 14880, 14928, 13619, 15266, + 14172, 14669, 14639, 14227, 6439, 15124, 15348, 14005, 14229, 13329, 14464, 14187, 12725, 10842, 13327, 9268, + 14416, 13911, 14573, 14257, 15345, 14405, 15275, 14777, 14906, 14049, 12833, 14454, 14404, 13085, 14529, 15297, + 9122, 14587, 14701, 14517, 13778, 13626, 15196, 13562, 14997, 15012, 15018, 14337, 10812, 13980, 14757, 9337, + 14190, 14338, 14468, 13074, 13625, 11728, 15133, 15016, 14764, 15272, 12601, 12817, 13679, 10430, 14998, 14273, + 13939, 15074, 7623, 13956, 14840, 14451, 14631, 14809, 12458, 14018, 15296, 15282, 15352, 15270, 13596, 15174, + 14895, 13076, 15360, 15012, 9883, 14560, 12924, 14390, 14770, 13920, 14451, 11888, 13889, 13851, 14590, 9064, + 14580, 14566, 14885, 12834, 14114, 14291, 15214, 14800, 12595, 15039, 14974, 14712, 14393, 14104, 13665, 14082, + 13387, 14621, 14298, 14665, 15209, 13838, 15231, 11768, 14593, 14819, 14719, 12716, 14057, 11861, 15055, 15048, + 14882, 14444, 14867, 15113, 14145, 12243, 12469, 14780, 15092, 15049, 14989, 12968, 14521, 12539, 13691, 15183, + 14436, 14832, 13729, 14075, 15115, 12702, 15163, 10247, 14573, 8219, 14241, 14828, 11904, 13621, 14136, 14733, + 13201, 13622, 12630, 14663, 12188, 14666, 14886, 14129, 12408, 15157, 12347, 11331, 13946, 15241, 14868, 12449, + 15039, 11182, 10966, 11274, 15200, 14894, 13209, 13263, 14569, 14588, 14848, 14553, 13475, 15220, 13456, 14415, + 10837, 14552, 14239, 12808, 13942, 15155, 14857, 15312, 12626, 13018, 10659, 14960, 15140, 9377, 13010, 14596, + 11398, 15308, 15346, 14613, 15005, 13828, 13094, 11437, 13585, 8563, 13709, 11745, 12184, 15000, 13747, 13726, + 15275, 14269, 11740, 13731, 12946, 13482, 14757, 14098, 9730, 14406, 12121, 14426, 12043, 14933, 13788, 15278, + 15245, 14726, 12908, 9269, 14307, 13260, 10118, 9705, 15271, 12439, 15087, 13329, 5618, 11082, 14739, 14017, + 14809, 12637, 14318, 13759, 13028, 15114, 13482, 14547, 14099, 14969, 14941, 15029, 13417, 13061, 14456, 14580, + 14355, 12679, 14383, 14773, 13982, 15356, 14015, 13770, 9833, 15118, 13012, 14753, 13754, 14661, 13381, 14793, + 15359, 13504, 14697, 13818, 14061, 12522, 12457, 14482, 15323, 15124, 15149, 12324, 15095, 14605, 14842, 15231, + 11869, 14826, 13947, 14939, 14396, 12589, 15280, 13844, 14235, 14341, 14270, 15039, 14830, 12090, 15239, 13321, + 10164, 13816, 14003, 13577, 13036, 14429, 15191, 14617, 14347, 15111, 15081, 9540, 14417, 11294, 10946, 13312, + 15226, 13790, 15118, 15076, 14599, 14321, 12659, 11447, 13916, 14712, 14664, 13603, 14349, 14511, 14313, 14997, + 14592, 13480, 15315, 14060, 15145, 11816, 14979, 14689, 13526, 14384, 11446, 13163, 14508, 13747, 15197, 13352, + 13638, 15038, 13408, 14401, 15179, 14720, 10039, 14040, 12626, 15184, 13555, 13497, 12684, 14102, 12329, 15232, + 15138, 15183, 15097, 14793, 11650, 14901, 15302, 9593, 4843, 14832, 13773, 14905, 14265, 14932, 13187, 14729, + 11821, 14511, 15062, 14857, 14069, 15091, 14040, 13796, 13964, 15038, 9525, 13530, 14503, 15220, 15178, 15251, + 14889, 14781, 13466, 14301, 5960, 14708, 15152, 14328, 14462, 15212, 12308, 14697, 14177, 12548, 14432, 13066, + 14319, 14688, 14631, 13712, 12265, 15102, 14613, 15214, 15147, 13771, 15295, 15327, 14799, 14598, 13369, 15096, + 15280, 10941, 15271, 12022, 6008, 13833, 14368, 15107, 13742, 15150, 14422, 13809, 12752, 13892, 14970, 13439, + 11849, 14618, 14338, 15027, 15274, 14262, 15052, 12752, 14465, 14718, 13152, 14862, 15176, 15322, 13321, 13541, + 14801, 13681, 14774, 14451, 14211, 14833, 7702, 13656, 15343, 15113, 12539, 15121, 11621, 15107, 12521, 15199, + 15155, 15059, 13913, 12693, 14426, 13741, 15188, 13108, 13473, 11440, 15244, 13496, 14074, 11542, 14943, 13725, + 14847, 15338, 15192, 13091, 11462, 14068, 12011, 13414, 14725, 12559, 13480, 13204, 14476, 15160, 11130, 14965, + 12413, 13181, 11323, 14102, 13629, 12569, 15295, 14289, 11858, 15188, 14684, 14345, 13631, 10795, 14829, 15104, + 12605, 13779, 13644, 14973, 12487, 15129, 15117, 12943, 13955, 11945, 13586, 15099, 14161, 14857, 11468, 12674, + 14302, 15359, 15193, 14541, 14997, 14959, 12240, 14371, 13906, 14994, 14812, 11908, 14996, 15081, 14687, 14034, + 14236, 15202, 13226, 14394, 14795, 14941, 13213, 14489, 14243, 12731, 14554, 15058, 14413, 14780, 14478, 9564, + 13488, 14087, 12455, 12319, 14372, 12067, 14653, 14929, 11319, 12423, 14730, 11816, 14905, 13869, 14998, 12353, + 15153, 11400, 14878, 14442, 14979, 13117, 14876, 15131, 12066, 14827, 13256, 14434, 15034, 14676, 14565, 14342, + 14586, 14998, 13991, 13945, 14710, 11035, 14278, 8191, 13485, 10994, 13691, 13870, 15041, 12543, 14567, 11883, + 14315, 10675, 14922, 14174, 13970, 14525, 13345, 14299, 13712, 13780, 12453, 13792, 14168, 15249, 13773, 13095, + 14635, 14181, 9979, 15296, 14242, 14822, 15178, 15217, 15240, 15293, 6673, 13100, 12507, 14737, 15297, 12822, + 13609, 13456, 14498, 13737, 14238, 15154, 15162, 12381, 12487, 15005, 14755, 14035, 15127, 13867, 13552, 14708, + 14408, 14632, 13985, 12505, 14428, 12193, 15144, 14814, 14136, 10856, 15323, 13718, 14898, 14420, 14613, 15247, + 14435, 15282, 15328, 12070, 12309, 12353, 14372, 12436, 12509, 14405, 14365, 14104, 11142, 14356, 13802, 14645, + 14729, 11306, 13769, 14032, 14079, 12062, 13511, 14757, 14870, 13899, 14017, 10908, 12775, 15208, 15311, 13155, + 13844, 12694, 13469, 15111, 14731, 15253, 11769, 15321, 13362, 14340, 14530, 11752, 14944, 15003, 10983, 14587, + 13668, 14297, 14753, 14664, 13716, 7524, 14226, 13325, 14005, 13822, 13733, 14814, 14429, 13838, 14762, 14819, + 13404, 15198, 13777, 15028, 12461, 11820, 15322, 15271, 15272, 15261, 14433, 15270, 13475, 14914, 14291, 13913, + 14993, 14827, 13021, 15205, 14103, 15269, 14012, 13998, 14363, 14717, 14073, 14354, 14620, 13949, 8820, 14674, + 14513, 15234, 14817, 15261, 14672, 14204, 14255, 14568, 13714, 14426, 14792, 15227, 15249, 15354, 15110, 13300, + 14499, 13342, 12744, 12545, 14793, 12379, 13956, 12976, 13531, 14448, 14871, 14075, 15342, 14987, 11271, 13757, + 14288, 14529, 14427, 10754, 15270, 14779, 15099, 15015, 14422, 14240, 14318, 14864, 14723, 13796, 15056, 8992, + 14638, 14798, 14805, 13607, 14582, 14729, 15249, 13968, 15141, 14409, 15157, 14910, 11573, 15292, 14468, 14810, + 13946, 12579, 15338, 12081, 11006, 14520, 14689, 15105, 14918, 13899, 14565, 14112, 15098, 13661, 8369, 14241, + 15084, 15092, 14357, 14304, 14795, 14892, 12919, 14117, 11091, 15087, 11757, 14770, 12652, 14913, 14950, 14566, + 14046, 14859, 14424, 14120, 10261, 14716, 13908, 14382, 14222, 15056, 11358, 15135, 14089, 14461, 15161, 14986, + 9750, 14419, 14575, 13452, 15022, 11635, 12514, 14825, 14809, 13786, 14243, 15286, 14582, 14694, 11667, 14578, + 14236, 13541, 13141, 14387, 13456, 12328, 13463, 14978, 13447, 14197, 13404, 14790, 14455, 12464, 14952, 15054, + 14230, 15345, 15068, 12740, 10990, 14415, 11246, 15082, 13807, 14851, 14209, 12491, 14365, 13470, 15068, 15239, + 15334, 14380, 15301, 14213, 14975, 15153, 15298, 13132, 14797, 15074, 14693, 10339, 13275, 14888, 13913, 14988, + 13333, 13978, 13172, 14911, 14182, 14087, 11002, 14496, 13252, 12401, 12953, 10311, 13550, 11147, 12478, 14905, + 13980, 14658, 13389, 10828, 11774, 14244, 14798, 14136, 14985, 15096, 14188, 12666, 15278, 15148, 13105, 14537, + 13381, 15288, 14128, 11950, 14543, 12310, 14681, 12642, 14648, 13457, 14867, 13673, 12935, 12601, 13826, 13648, + 14158, 15008, 13845, 12310, 14296, 11970, 9376, 15169, 14804, 15233, 12148, 13661, 14109, 14848, 12650, 14056, + 13403, 14733, 13948, 14133, 14577, 14913, 15205, 13368, 15332, 14686, 9734, 15197, 14868, 14154, 13910, 14587, + 14360, 15078, 13164, 10991, 12172, 14371, 15054, 13330, 14268, 14366, 10471, 15228, 15056, 10547, 13450, 14932, + 14622, 15102, 6953, 14903, 15197, 14692, 14394, 14832, 12289, 11804, 14921, 13740, 15294, 15327, 13828, 12516, + 14809, 11953, 14881, 14904, 13660, 13445, 14110, 14393, 13912, 14916, 12206, 12822, 11444, 13569, 12719, 14622, + 13975, 14346, 11920, 14665, 14874, 14503, 15204, 10579, 14421, 14755, 11834, 14016, 14365, 14875, 14882, 15270, + 14883, 15077, 14451, 14359, 13630, 12341, 14080, 13145, 14889, 14701, 14153, 14858, 14405, 14520, 14228, 15241, + 14256, 12457, 12594, 14769, 14721, 14057, 10622, 14858, 14216, 14937, 13803, 14433, 11798, 14455, 14588, 14960, + 15203, 14880, 13751, 14239, 14789, 12766, 13402, 13637, 14934, 9540, 12527, 13865, 13938, 13110, 14683, 11623, + 13637, 14468, 14620, 14176, 14983, 14089, 15006, 14285, 14891, 11746, 13412, 14953, 12237, 15166, 15289, 12726, + 13372, 12311, 13028, 14811, 14679, 13462, 15167, 15162, 14272, 14478, 14278, 14904, 15015, 13329, 14730, 14406, + 13785, 14827, 11517, 15169, 8074, 14524, 14702, 13869, 12013, 15083, 14379, 12803, 13686, 15242, 13633, 14502, + 12105, 13801, 14912, 14471, 15054, 10619, 13939, 14862, 14294, 14148, 10916, 15006, 12382, 12645, 14851, 13488, + 15241, 13408, 14340, 14257, 14748, 15141, 14354, 14771, 13501, 14683, 15165, 13634, 14689, 14231, 12290, 14570, + 13515, 13066, 14374, 14658, 14095, 12732, 13869, 14729, 14046, 11412, 14495, 13596, 13340, 9729, 14166, 10964, + 14955, 9924, 13897, 14671, 12765, 14778, 11458, 11524, 13595, 14585, 13531, 13522, 15273, 13808, 10686, 13249, + 10557, 14922, 14445, 10205, 14911, 12784, 13285, 14575, 12407, 14639, 13432, 11651, 11760, 14604, 14612, 10442, + 13692, 11749, 14212, 11993, 15031, 13937, 14710, 14182, 14815, 13010, 14209, 8251, 15173, 12699, 14558, 12306, + 14690, 15297, 14758, 13955, 12726, 10271, 13428, 14030, 13861, 15113, 15262, 14412, 11573, 14319, 14602, 13676, + 13984, 14236, 15026, 15301, 13938, 13962, 15164, 14736, 14917, 12314, 11705, 15116, 11228, 14690, 11166, 15122, + 12329, 14400, 11242, 15308, 14928, 15253, 14765, 14840, 14472, 14873, 12347, 13971, 14127, 14702, 11270, 13435, + 12525, 13642, 15082, 15003, 14699, 12820, 12100, 14302, 14542, 14338, 14005, 10563, 15262, 12763, 15023, 14169, + }; + + uint16_t ret[32 * 2 * 16 * 16]{ + 14402, 12289, 13852, 13540, 14810, 11027, 12387, 11288, 14169, 14805, 15159, 13632, 8595, 13403, 9001, 14821, + 15355, 14719, 14958, 10920, 9389, 13408, 12314, 11907, 14759, 14419, 15124, 14591, 13422, 14465, 13347, 14063, + 14776, 14719, 15329, 14763, 13881, 14609, 12510, 11670, 15296, 15309, 14152, 14890, 13556, 12447, 13171, 13672, + 14945, 14355, 12883, 14015, 12895, 14170, 14818, 15274, 13020, 14222, 14246, 12003, 11254, 14901, 13659, 14984, + 14532, 14496, 13605, 14803, 14524, 10024, 14259, 14899, 10336, 14532, 12566, 14884, 10386, 14316, 14828, 14809, + 14952, 14336, 12187, 13557, 15065, 15203, 15318, 12324, 13915, 8520, 13101, 14928, 15148, 12837, 14358, 14794, + 13444, 14120, 13324, 15242, 14846, 14609, 14722, 15019, 13205, 13693, 15019, 13263, 13500, 11845, 12709, 13293, + 12727, 12279, 13721, 14261, 6931, 14579, 15140, 15252, 14686, 13994, 14934, 14897, 14748, 12729, 14560, 14591, + 13643, 15213, 14626, 14254, 14498, 15019, 15162, 14222, 14382, 14808, 15215, 14977, 14343, 14110, 14728, 14801, + 14190, 15069, 14546, 14639, 13854, 13561, 12404, 14199, 12155, 13052, 14615, 8692, 14454, 13955, 14348, 14669, + 14725, 14013, 10298, 13418, 10995, 13411, 14635, 14876, 14851, 13306, 13907, 14367, 13818, 14603, 13917, 13571, + 13815, 14143, 12729, 14858, 15061, 15024, 15344, 14674, 14804, 13627, 13595, 12462, 12942, 14556, 14479, 15022, + 14651, 15247, 14809, 14470, 15184, 14802, 12978, 15009, 15202, 14529, 13084, 11909, 12829, 15295, 14812, 14878, + 13653, 14935, 13949, 15068, 15269, 13831, 13386, 14766, 14954, 14239, 14171, 11621, 15245, 13899, 15152, 15235, + 14666, 13278, 14746, 14092, 13507, 15111, 13682, 14883, 15186, 12843, 14421, 12372, 12425, 13297, 13970, 15276, + 15006, 14257, 13132, 14866, 13398, 15335, 15000, 13704, 15281, 13112, 13769, 14760, 13475, 13698, 14499, 15030, + 15154, 14952, 9074, 12046, 14860, 14751, 13603, 11537, 14708, 12316, 11701, 14308, 11710, 13911, 15075, 14568, + 12962, 12315, 12828, 14600, 15237, 12665, 12387, 15167, 14544, 15042, 12462, 15284, 14667, 14184, 14449, 14428, + 15348, 15299, 14783, 12810, 14127, 14890, 14184, 9201, 12311, 15125, 13550, 15200, 13236, 14286, 14352, 14444, + 9967, 14572, 15254, 15045, 13801, 14841, 12947, 15065, 14880, 14239, 15183, 12408, 14605, 14977, 12482, 14857, + 11292, 11937, 14706, 15322, 13482, 12420, 14965, 12978, 14939, 15313, 14848, 15123, 13955, 15176, 13256, 13717, + 14557, 14749, 8647, 14672, 11742, 12934, 12885, 15266, 15263, 13137, 14374, 15272, 15169, 8550, 14026, 14688, + 14892, 12049, 14532, 12929, 13731, 15015, 10406, 15283, 15056, 14663, 15197, 14070, 15157, 15176, 14837, 12923, + 13773, 14392, 13864, 13479, 15246, 11879, 14585, 13794, 14499, 14802, 12367, 15129, 15331, 14589, 13571, 13482, + 14457, 12846, 14652, 13441, 11368, 15347, 12779, 14722, 13745, 14396, 15323, 14403, 10843, 13657, 14527, 14543, + 11866, 14673, 14595, 12352, 14620, 14372, 13934, 14477, 11687, 15349, 13762, 12631, 11892, 13379, 13748, 13053, + 14416, 9122, 14190, 13939, 14895, 14580, 13387, 14882, 14436, 13201, 15039, 10837, 11398, 15275, 15245, 14809, + 15359, 11869, 10164, 15226, 14592, 13638, 15138, 11821, 14889, 14319, 15280, 11849, 14801, 15155, 14847, 12413, + 14302, 14236, 13488, 15153, 14586, 14315, 14635, 13609, 14408, 14435, 14729, 13844, 13668, 13404, 14993, 14513, + 14288, 14638, 13946, 15084, 14046, 9750, 14236, 14230, 15334, 13333, 13980, 13381, 14158, 13403, 14360, 14622, + 13975, 14883, 14256, 15203, 13637, 13372, 13785, 12105, 15241, 13515, 14955, 10557, 13692, 14690, 13984, 12329, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14379, 12461, 14410, 12429, 12870, 15231, 14480, 10750, 15177, 13884, 14158, 13253, 13506, 12498, 13153, 14708, + 12676, 9604, 15172, 13863, 15312, 14436, 13490, 15083, 13908, 12270, 12599, 15223, 12463, 14561, 14493, 12352, + 14537, 11611, 12643, 14922, 14035, 11970, 14910, 14490, 14761, 15061, 13666, 14130, 15296, 13639, 15249, 8727, + 10349, 14741, 15080, 14391, 14718, 14301, 14930, 14129, 14326, 13851, 15293, 14246, 12144, 14809, 12441, 13698, + 15026, 10637, 14544, 14678, 12307, 13911, 15175, 15000, 13365, 15001, 14693, 14497, 14887, 12578, 14367, 14256, + 13042, 13771, 13831, 13797, 15268, 15168, 13592, 15333, 13043, 15348, 14720, 14460, 14043, 14486, 10748, 14262, + 13232, 14590, 12376, 14716, 14813, 12310, 13740, 14201, 13379, 13640, 13591, 13190, 14543, 10802, 15231, 11537, + 10729, 13332, 11635, 13524, 11244, 12494, 13989, 15284, 13862, 14603, 13622, 12856, 15165, 13434, 14572, 14424, + 14869, 12322, 14700, 15336, 13617, 10878, 11790, 14230, 13544, 15139, 12874, 14437, 15094, 10034, 13460, 12359, + 13440, 13762, 14905, 12550, 14582, 12519, 14664, 14553, 13654, 15352, 15101, 14574, 14744, 12251, 11255, 14438, + 10879, 14460, 14690, 14639, 15314, 15079, 15153, 14607, 13996, 14167, 14901, 13147, 14311, 12178, 14388, 15357, + 15084, 14408, 14791, 14912, 14584, 13196, 14388, 14588, 14193, 14915, 15183, 14922, 15118, 14491, 13039, 14974, + 13376, 14354, 15131, 12635, 14944, 14679, 13913, 15109, 13552, 14077, 15065, 9046, 14797, 15152, 14474, 11354, + 14821, 8686, 15155, 14645, 14280, 13560, 14454, 12429, 11976, 7793, 13349, 11970, 11974, 11055, 14006, 13838, + 14891, 15255, 13720, 14502, 10339, 14936, 13696, 15107, 11576, 15216, 15054, 14624, 13412, 14306, 10568, 14587, + 15300, 14269, 12607, 14541, 14415, 12965, 15145, 10865, 12531, 14627, 14380, 14476, 13385, 12942, 10869, 11206, + 15039, 9443, 12794, 14946, 14995, 14134, 14693, 14666, 14989, 11793, 12480, 15113, 13660, 13025, 14645, 14029, + 14497, 15299, 15145, 12294, 14905, 14661, 11443, 13140, 14311, 14003, 13561, 15314, 14892, 14809, 13535, 12842, + 14534, 13468, 14777, 12869, 11685, 14879, 13661, 14314, 12576, 13520, 15102, 13322, 12698, 14268, 13505, 15323, + 13954, 14903, 13785, 14378, 13589, 15077, 14844, 14877, 14023, 14486, 14448, 14507, 13758, 15168, 12245, 14379, + 11658, 14744, 11331, 14951, 15038, 14948, 15088, 14722, 15243, 14382, 15343, 14736, 15104, 14563, 14717, 10431, + 15197, 13735, 14642, 14661, 11502, 12512, 15320, 15152, 14818, 10891, 14429, 15334, 12609, 13661, 14380, 14429, + 15235, 12891, 12516, 13803, 14302, 13926, 14361, 15141, 14492, 14630, 13694, 15018, 13256, 14412, 14184, 14555, + 15016, 14257, 14495, 14148, 14570, 13319, 15169, 12711, 14371, 14993, 13813, 12538, 14997, 14861, 15193, 14736, + 14850, 13029, 14177, 15229, 14818, 13569, 13882, 14725, 13966, 14946, 13791, 15108, 11432, 14445, 11595, 13988, + 15283, 13409, 12407, 12135, 15187, 15339, 14282, 15035, 12978, 14306, 14404, 14859, 14338, 13535, 10517, 15324, + 13911, 14587, 14338, 15074, 13076, 14566, 14621, 14444, 14832, 13622, 11182, 14552, 15308, 14269, 14726, 12637, + 13504, 14826, 13816, 13790, 13480, 15038, 15183, 14511, 14781, 14688, 10941, 14618, 13681, 15059, 15338, 13181, + 15359, 15202, 14087, 11400, 14998, 10675, 14181, 13456, 14632, 15282, 11306, 12694, 14297, 15198, 14827, 15234, + 14529, 14798, 12579, 15092, 14859, 14419, 13541, 15345, 14380, 13978, 14658, 15288, 15008, 14733, 15078, 15102, + 14346, 15077, 12457, 14880, 14468, 12311, 14827, 13801, 13408, 13066, 9924, 14922, 11749, 15297, 14236, 14400, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14633, 12096, 14115, 13971, 11429, 14833, 9432, 10395, 11319, 14428, 14843, 14232, 12548, 7919, 15088, 14305, + 14813, 14686, 14342, 13174, 14400, 14498, 12616, 9064, 12837, 13379, 15281, 14225, 13027, 11927, 14870, 14120, + 14831, 15038, 13931, 13712, 14883, 14616, 14223, 13614, 13670, 14720, 13492, 13209, 11716, 14816, 12861, 14087, + 13979, 14569, 15068, 14409, 14972, 13483, 11160, 12313, 13228, 14602, 14673, 11905, 15025, 10021, 15117, 15153, + 13462, 15339, 14120, 10311, 15128, 11470, 12619, 13442, 13318, 14462, 14980, 13346, 10339, 14495, 13259, 14816, + 12911, 13893, 14669, 15124, 14457, 14563, 13906, 15166, 11106, 14892, 14597, 15320, 12485, 14134, 13921, 12826, + 14758, 14909, 14365, 12603, 13223, 13127, 15191, 14982, 12878, 14189, 14970, 15115, 12713, 13664, 14020, 14562, + 15122, 15343, 11395, 11082, 14740, 12097, 15132, 14941, 14863, 14428, 12155, 9555, 13747, 15117, 9091, 11442, + 14680, 14657, 15288, 14832, 15327, 14530, 14762, 11369, 12046, 14239, 14444, 14427, 13630, 14440, 14185, 15358, + 14596, 13507, 12182, 15128, 15258, 13328, 13382, 14445, 15311, 14266, 14393, 14805, 15317, 15329, 14980, 13958, + 14426, 14342, 14592, 14623, 14516, 15332, 15033, 14530, 14147, 14488, 15019, 15210, 14461, 14918, 15027, 15086, + 14973, 14397, 14615, 14523, 14954, 14492, 14660, 15128, 14428, 13198, 15025, 11796, 13754, 10344, 12936, 13814, + 13428, 12732, 14407, 13246, 14762, 10268, 14164, 13468, 14436, 11718, 14979, 14861, 13928, 14641, 14702, 12458, + 14620, 14603, 10232, 14343, 15360, 12329, 14416, 8166, 11019, 15011, 14075, 14114, 14414, 14717, 14340, 15139, + 15240, 14814, 14748, 14476, 14488, 11043, 14510, 12898, 13609, 12786, 13935, 14567, 12992, 11696, 15028, 14636, + 14751, 15252, 14273, 13307, 13844, 11071, 14553, 12454, 11382, 13440, 14607, 15161, 13849, 14990, 13858, 14570, + 14131, 15356, 12642, 13217, 11878, 14944, 10750, 14647, 13837, 14591, 14343, 12894, 14669, 14586, 14970, 12816, + 11248, 15353, 14566, 14838, 12767, 14363, 12677, 15029, 14034, 10915, 14965, 14393, 13749, 14888, 12912, 14157, + 13838, 12290, 12232, 13419, 14643, 14877, 14879, 13383, 14649, 13527, 14441, 15160, 13934, 13328, 14586, 13407, + 13475, 11751, 15161, 14851, 14579, 12361, 15143, 11779, 14347, 15340, 15178, 14467, 13465, 8601, 14510, 11762, + 13531, 13428, 13998, 11058, 13442, 15027, 11686, 14933, 15052, 15124, 14766, 11392, 15291, 14762, 9723, 14811, + 15193, 10349, 13312, 13594, 14974, 15282, 13357, 13630, 14902, 6814, 14823, 12348, 15254, 11617, 14628, 14255, + 14730, 14654, 14792, 13820, 14659, 13421, 12091, 12595, 14134, 14844, 13339, 11345, 14182, 13778, 14931, 15042, + 13958, 14068, 15257, 15093, 14847, 13486, 13739, 14538, 15130, 14807, 12499, 14800, 10530, 14715, 14722, 14937, + 14940, 15320, 13399, 15070, 14769, 15077, 12688, 14882, 13834, 13685, 6802, 13742, 14331, 15268, 15315, 15102, + 14687, 14984, 13290, 13514, 13110, 14748, 12588, 14708, 12295, 11914, 14968, 15328, 14022, 14892, 14949, 13417, + 14573, 14701, 14468, 7623, 15360, 14885, 14298, 14867, 13729, 12630, 10966, 14239, 15346, 11740, 12908, 14318, + 14697, 13947, 14003, 15118, 15315, 13408, 15097, 15062, 13466, 14631, 15271, 14338, 14774, 13913, 15192, 11323, + 15193, 13226, 12455, 14878, 13991, 14922, 9979, 14498, 13985, 15328, 13769, 13469, 14753, 13777, 13021, 14817, + 14427, 14805, 15338, 14357, 14424, 14575, 13141, 15068, 15301, 13172, 13389, 14128, 13845, 13948, 13164, 6953, + 11920, 14451, 12594, 13751, 14620, 13028, 11517, 14912, 14340, 14374, 13897, 14445, 14212, 14758, 15026, 11242, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14774, 10696, 11497, 14888, 14307, 10577, 14724, 14163, 14063, 14944, 14811, 11742, 13451, 15307, 14621, 14696, + 11644, 13284, 14089, 12359, 14499, 13580, 12784, 15080, 12547, 15254, 13663, 13678, 13441, 14072, 13255, 15329, + 14373, 14519, 15027, 13187, 14387, 11512, 14306, 13573, 14483, 14117, 12714, 14456, 15265, 14567, 14418, 10373, + 15274, 14673, 14411, 12520, 14462, 12592, 14885, 13104, 11688, 14095, 8674, 14517, 12548, 14583, 15064, 13103, + 13427, 13123, 10293, 13432, 14966, 12557, 14568, 12131, 13572, 12341, 13878, 13997, 14997, 13166, 14640, 14879, + 12277, 10751, 14927, 13409, 14429, 14919, 15264, 14671, 14425, 14677, 13876, 13591, 13313, 11291, 14313, 13999, + 14107, 13828, 15272, 14484, 14103, 13717, 14366, 13275, 14444, 14996, 12885, 13558, 14974, 14438, 12789, 14954, + 13587, 11654, 13805, 15267, 12551, 15187, 12325, 12852, 14838, 14401, 15213, 15251, 14928, 14501, 15157, 10234, + 14818, 12091, 14329, 10152, 14803, 15328, 15142, 10653, 12672, 12911, 14842, 14444, 8237, 14515, 12908, 14428, + 14433, 15114, 14777, 14666, 13371, 14986, 11184, 14102, 13957, 15101, 11326, 13973, 14778, 14560, 14038, 14695, + 14095, 15143, 13917, 14689, 15251, 14533, 14213, 14681, 13595, 14483, 14818, 14307, 14756, 14408, 14375, 14778, + 15238, 15167, 12734, 11814, 12876, 13531, 14709, 11673, 13621, 15265, 14342, 13895, 14828, 15154, 15105, 14826, + 14856, 12222, 14474, 14890, 13533, 14549, 15012, 15206, 6181, 15025, 13511, 14215, 14497, 14852, 14441, 15245, + 12786, 14495, 13347, 12467, 12646, 15206, 14573, 13597, 12273, 13346, 11284, 14446, 12268, 13935, 15023, 12175, + 14322, 12500, 14429, 14175, 14375, 14225, 14464, 10470, 13521, 14523, 13100, 13429, 14254, 12366, 15032, 12712, + 14670, 15326, 8883, 13942, 14724, 14433, 12196, 14995, 14129, 15098, 14725, 14151, 14591, 15325, 10495, 12650, + 15086, 13387, 13799, 14354, 15048, 14023, 15143, 14346, 14518, 14516, 14706, 14400, 13687, 13830, 14554, 14444, + 15151, 14423, 8487, 14858, 13453, 14810, 14534, 13427, 13325, 14918, 15296, 15080, 14842, 14704, 14026, 13991, + 14816, 15045, 14649, 14902, 14866, 14437, 14425, 14023, 15283, 14573, 14357, 11566, 15055, 14607, 12724, 12836, + 13747, 14717, 14463, 15013, 14579, 12294, 14879, 15221, 14948, 15299, 13466, 10446, 15047, 13726, 13818, 15354, + 12399, 15264, 15216, 15054, 14677, 14375, 14636, 13466, 13989, 15181, 14871, 14873, 13049, 15052, 13506, 13462, + 15236, 14844, 13027, 14467, 12319, 12825, 10661, 15270, 14720, 14841, 14546, 13485, 15219, 14136, 13957, 15203, + 14742, 14070, 14410, 14587, 14104, 13831, 14027, 15156, 14349, 14418, 12925, 14412, 13995, 15287, 13683, 15192, + 14010, 15253, 14249, 14898, 13838, 14861, 14553, 13990, 14401, 13981, 13611, 13579, 15156, 13974, 13554, 14389, + 14737, 14357, 14935, 14470, 12365, 13978, 12607, 14331, 12056, 14406, 11662, 12894, 14534, 14367, 14090, 14528, + 14863, 13964, 15341, 14708, 15256, 14637, 12783, 14807, 12723, 14545, 14097, 14443, 13578, 15235, 14376, 10492, + 14257, 14517, 13074, 13956, 15012, 12834, 14665, 15113, 14075, 14663, 11274, 12808, 14613, 13731, 9269, 13759, + 13818, 14939, 13577, 15076, 14060, 14401, 14793, 14857, 14301, 13712, 12022, 15027, 14451, 12693, 13091, 14102, + 14541, 14394, 12319, 14442, 13945, 14174, 15296, 13737, 12505, 12070, 14032, 15111, 14664, 15028, 15205, 15261, + 10754, 13607, 12081, 14304, 14120, 13452, 14387, 12740, 14213, 14911, 10828, 11950, 12310, 14133, 10991, 14903, + 14665, 14359, 14769, 14239, 14176, 14811, 15169, 14471, 14257, 14658, 14671, 10205, 11993, 13955, 15301, 15308, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13826, 14558, 14819, 12988, 14957, 14400, 13686, 14618, 13328, 14431, 13607, 14661, 14936, 14149, 12825, 13775, + 15105, 14833, 14258, 13959, 14433, 14944, 14866, 13562, 12613, 14804, 13340, 12922, 13934, 15105, 14797, 15133, + 15333, 12573, 12382, 15063, 14096, 14016, 13757, 14299, 12979, 14479, 14525, 14495, 14297, 15025, 14322, 13354, + 15353, 15071, 14392, 13362, 15358, 14349, 15337, 15269, 14668, 15329, 12969, 12346, 14223, 13431, 14558, 13445, + 15137, 10664, 12592, 14963, 14986, 12031, 14361, 15034, 15017, 14861, 14407, 15341, 13711, 14400, 15313, 14709, + 8940, 15160, 10954, 13437, 13989, 14458, 14346, 14929, 10392, 14398, 15291, 12367, 14418, 12551, 14450, 14488, + 14916, 14416, 12284, 13667, 13726, 14808, 14886, 12011, 14410, 14884, 14709, 15335, 12206, 14610, 14247, 14466, + 15182, 14639, 13531, 14698, 13701, 13845, 11439, 14604, 15190, 9397, 13368, 14343, 14534, 14046, 15277, 14000, + 14977, 10428, 14737, 14760, 14594, 13517, 14970, 15112, 13351, 11824, 15183, 13677, 15109, 12718, 9219, 15136, + 11343, 9799, 12893, 14379, 15119, 14656, 14713, 14743, 13445, 15284, 14671, 10449, 14915, 11675, 13835, 15073, + 13314, 14800, 15093, 13773, 14372, 14477, 14362, 15249, 13173, 14433, 15222, 14375, 12764, 10838, 13635, 14860, + 14606, 11191, 13448, 12967, 15126, 14826, 14981, 14958, 15111, 11041, 13992, 11696, 14786, 13659, 15300, 12961, + 13953, 14445, 12547, 14996, 13247, 14583, 15063, 15331, 15349, 15059, 10628, 14658, 14949, 13345, 13287, 6968, + 13684, 15181, 12586, 14444, 15134, 12724, 13452, 14371, 15022, 12886, 14345, 9328, 14528, 13394, 14302, 15294, + 15292, 12829, 15314, 14816, 14267, 12337, 13664, 13757, 15094, 14433, 14262, 15222, 15285, 15004, 14089, 11922, + 14744, 14025, 15242, 14988, 13491, 15083, 13125, 11604, 13901, 10310, 10571, 12334, 13386, 10272, 13301, 15064, + 14576, 10482, 14958, 11616, 14554, 14794, 13244, 14444, 14136, 14047, 14854, 14623, 15082, 13854, 13204, 14768, + 13330, 12857, 14109, 10505, 15276, 13856, 13383, 15325, 14371, 13744, 14888, 13009, 13484, 13659, 14535, 13033, + 13440, 13479, 13830, 14471, 15209, 12633, 13784, 12862, 13615, 13439, 11794, 13377, 13452, 15347, 14386, 14979, + 12796, 12501, 13963, 13629, 15051, 14902, 15350, 12400, 14509, 13238, 14809, 15239, 15104, 13226, 15030, 12257, + 13550, 10249, 14165, 15016, 15044, 15211, 15160, 13653, 12291, 10955, 15055, 8800, 13594, 14758, 15046, 14590, + 13394, 14336, 13651, 15016, 15041, 13545, 15152, 13812, 14193, 12873, 12956, 13936, 14021, 14434, 14928, 15053, + 14763, 14622, 14896, 15301, 15050, 13302, 11064, 14745, 12485, 13010, 14215, 14190, 14598, 11413, 15059, 14840, + 15262, 13948, 12620, 14478, 13647, 12444, 14592, 14162, 14271, 13440, 15353, 13803, 8540, 14138, 14681, 14784, + 13467, 13517, 14795, 14727, 15021, 14633, 11536, 13062, 15182, 15136, 13729, 12616, 14904, 15062, 13574, 15183, + 14575, 15177, 14324, 12463, 14350, 13541, 14136, 14595, 13447, 13667, 12860, 14883, 8592, 10946, 14763, 15303, + 15345, 13778, 13625, 14840, 9883, 14114, 15209, 14145, 15115, 12188, 15200, 13942, 15005, 12946, 14307, 13028, + 14061, 14396, 13036, 14599, 15145, 15179, 11650, 14069, 5960, 12265, 6008, 15274, 14211, 14426, 11462, 13629, + 14997, 14795, 14372, 14979, 14710, 13970, 14242, 14238, 14428, 12309, 14079, 14731, 13716, 12461, 14103, 14672, + 15270, 14582, 11006, 14795, 10261, 15022, 13456, 10990, 14975, 14182, 11774, 14543, 14296, 14577, 12172, 15197, + 14874, 13630, 14721, 14789, 14983, 14679, 8074, 15054, 14748, 14095, 12765, 14911, 15031, 12726, 13938, 14928, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14869, 14464, 13048, 15321, 14165, 14471, 15178, 14458, 14065, 13189, 15082, 11794, 13946, 15057, 14106, 12556, + 12800, 14904, 13405, 15042, 14837, 14805, 15288, 14200, 14559, 14015, 14732, 13770, 13548, 9506, 13812, 14514, + 14774, 13596, 15240, 14154, 14763, 14776, 11326, 15072, 12365, 14618, 12291, 13529, 14645, 14881, 14303, 12724, + 14721, 15261, 13305, 9595, 13349, 15174, 14179, 15000, 14621, 13367, 14865, 14095, 12241, 14631, 13502, 14964, + 14076, 12994, 12775, 14553, 14754, 12972, 14178, 14496, 15292, 13452, 15024, 14505, 14176, 14770, 12292, 14789, + 14490, 14727, 14735, 14427, 15345, 14713, 14383, 13684, 14334, 12964, 12167, 12637, 15330, 14924, 15036, 14548, + 14465, 11265, 9254, 14577, 14937, 15135, 14963, 10080, 9216, 12579, 12549, 14339, 14805, 13225, 15103, 15231, + 12600, 14236, 12448, 14538, 14025, 14585, 14354, 14734, 12476, 15154, 14948, 14140, 15315, 9922, 15067, 14953, + 13522, 15237, 14907, 13558, 11370, 14899, 14456, 13350, 11326, 14597, 14251, 13013, 14686, 14832, 14892, 14019, + 11530, 14562, 12312, 14723, 13665, 14479, 14093, 14238, 14112, 9318, 15011, 13937, 14942, 15305, 14487, 13004, + 13728, 14388, 13848, 14156, 14883, 12481, 14363, 14664, 13576, 14540, 14525, 12863, 13897, 14980, 12519, 10133, + 14491, 14279, 15303, 14788, 13938, 14218, 13993, 12747, 14766, 14758, 15134, 11550, 13267, 15176, 14390, 13032, + 7701, 12821, 14612, 14240, 13172, 14106, 14524, 11571, 14674, 14203, 14836, 14982, 15277, 14391, 14544, 15275, + 13172, 8810, 13700, 13263, 10573, 13434, 14279, 12949, 15185, 14524, 12222, 9011, 14474, 14669, 13270, 12139, + 14873, 15351, 10338, 11195, 10468, 12922, 12852, 15329, 13019, 15216, 14025, 12075, 14664, 10601, 14565, 14456, + 13911, 15072, 14912, 14966, 12650, 14579, 14821, 14229, 14870, 13792, 15332, 14001, 14882, 14616, 15217, 15235, + 13333, 13661, 14636, 9404, 15318, 15017, 15227, 14077, 11937, 15058, 8858, 14339, 11505, 14795, 15063, 14568, + 14573, 13913, 13482, 8317, 14624, 15180, 15047, 13804, 15037, 14885, 13830, 13054, 15277, 8931, 14791, 14553, + 14583, 13324, 13881, 12596, 14734, 14694, 13115, 14394, 9514, 13875, 15347, 12064, 14342, 11916, 13715, 13902, + 13801, 14774, 12740, 14707, 13822, 15298, 14155, 14852, 14043, 15250, 13527, 14589, 12808, 14671, 12719, 13862, + 14930, 14423, 13174, 14918, 14393, 14673, 13466, 11384, 13939, 11986, 15010, 14063, 13568, 14347, 15136, 14448, + 14350, 14946, 14852, 11392, 13839, 11065, 14588, 13645, 15071, 14341, 14562, 14347, 14474, 14600, 14828, 13689, + 15183, 11950, 11471, 12894, 15168, 15281, 14024, 13250, 14944, 14714, 13405, 14627, 14129, 13354, 9382, 10852, + 10867, 15345, 11935, 10035, 13619, 15148, 11569, 11920, 15204, 14509, 11898, 14700, 11104, 15085, 11813, 13805, + 12699, 14409, 12223, 12384, 13636, 14366, 13286, 12238, 13361, 14855, 13820, 12140, 13632, 14112, 14398, 15269, + 14416, 13336, 14574, 15311, 14875, 13113, 15139, 14909, 13747, 14339, 14855, 13676, 15212, 14025, 14557, 12556, + 14405, 13626, 11728, 14451, 14560, 14291, 13838, 12243, 12702, 14666, 14894, 15155, 13828, 13482, 13260, 15114, + 12522, 12589, 14429, 14321, 11816, 14720, 14901, 15091, 14708, 15102, 13833, 14262, 14833, 13741, 14068, 12569, + 14959, 14941, 12067, 13117, 11035, 14525, 14822, 15154, 12193, 12353, 12062, 15253, 7524, 11820, 15269, 14204, + 14779, 14729, 14520, 14892, 14716, 11635, 12328, 14415, 15153, 14087, 14244, 12310, 11970, 14913, 14371, 14692, + 14503, 12341, 14057, 12766, 14089, 13462, 14524, 10619, 15141, 12732, 14778, 12784, 13937, 10271, 13962, 15253, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13703, 14389, 15357, 14609, 13507, 12798, 7127, 11317, 12473, 10181, 13934, 14768, 9166, 14686, 14612, 14553, + 14405, 15088, 14868, 14786, 13899, 14672, 13778, 14107, 14426, 11742, 15192, 13051, 15294, 15066, 12955, 14408, + 12938, 13429, 14812, 13719, 14230, 13673, 15331, 15249, 12350, 12389, 13700, 15096, 11518, 12991, 13629, 13212, + 13903, 13330, 14984, 13370, 14435, 14623, 14816, 13469, 14498, 14406, 8639, 12049, 14655, 11804, 15118, 14408, + 14352, 12358, 15207, 9121, 11664, 9385, 13938, 14900, 13038, 11285, 11678, 13124, 14488, 14949, 14965, 15304, + 13899, 12719, 15145, 15224, 12562, 15133, 13413, 15089, 14601, 14925, 14932, 13333, 12989, 14941, 11314, 14486, + 13839, 12928, 14683, 13609, 13802, 13812, 9200, 14860, 15096, 12380, 14703, 14657, 12983, 14806, 12310, 14985, + 12573, 14206, 13846, 15246, 14010, 14226, 13598, 13837, 14150, 15255, 15338, 14668, 13606, 13786, 14572, 11525, + 14858, 15018, 14807, 15229, 15316, 13853, 15130, 14369, 13994, 15006, 14406, 15073, 14783, 14370, 15032, 14014, + 4925, 14985, 14101, 13339, 15075, 13804, 14547, 14464, 14298, 13471, 11166, 14144, 13830, 14830, 14440, 13469, + 14996, 10658, 15141, 15300, 12828, 11910, 15233, 14074, 14825, 10296, 6472, 13301, 14411, 13930, 13658, 13691, + 14457, 13385, 12395, 13480, 15240, 14183, 14804, 13111, 12161, 13433, 13397, 11153, 13484, 13262, 14721, 15101, + 12967, 13096, 15050, 14258, 13298, 13745, 13910, 13163, 13595, 15296, 14917, 15165, 13876, 14900, 13580, 13654, + 15324, 14953, 15001, 13261, 14929, 10310, 15148, 14903, 14690, 13676, 14771, 12151, 12725, 13397, 9584, 13397, + 14885, 14535, 13590, 14330, 13784, 11299, 13677, 13268, 15053, 15359, 13674, 14833, 12385, 15337, 14208, 15157, + 14540, 14971, 14463, 14055, 12399, 14457, 14490, 14423, 6364, 13283, 13591, 13908, 13477, 13892, 10530, 14196, + 14071, 14759, 15199, 15175, 12198, 12898, 15348, 11350, 10184, 15169, 14391, 14836, 15240, 14585, 11593, 13293, + 14629, 11089, 14511, 12171, 14492, 14863, 14229, 14680, 15245, 13817, 14374, 14489, 14934, 12806, 14636, 14843, + 13870, 12783, 14715, 13506, 15174, 14376, 13423, 13738, 12444, 14558, 12865, 14347, 15335, 13690, 13522, 14786, + 14716, 12440, 14663, 14844, 14773, 9077, 12870, 11268, 13449, 14771, 14844, 13446, 13647, 14683, 13705, 13822, + 15140, 12749, 15121, 15204, 13774, 14670, 14668, 14565, 13758, 13730, 14424, 13741, 13231, 13698, 14174, 11559, + 13599, 14945, 10707, 14937, 13208, 15255, 14871, 14720, 13320, 10999, 13556, 14846, 10317, 14804, 14698, 15150, + 13892, 12602, 14749, 15291, 11603, 14435, 14509, 14731, 15268, 13587, 14042, 14476, 14818, 14939, 14748, 13206, + 14655, 12468, 14429, 14518, 13369, 12260, 14974, 15254, 13466, 14908, 13545, 15052, 12978, 14428, 14879, 14056, + 14791, 13883, 8417, 14086, 11600, 14734, 14842, 15056, 13241, 11687, 12201, 15112, 14901, 15191, 13983, 14748, + 13608, 14968, 11884, 14754, 15298, 14973, 13208, 14392, 12509, 15216, 14594, 12750, 12442, 14654, 8887, 15126, + 15275, 15196, 15133, 14631, 12924, 15214, 15231, 12469, 15163, 14886, 13209, 14857, 13094, 14757, 10118, 13482, + 12457, 15280, 15191, 12659, 14979, 10039, 15302, 14040, 15152, 14613, 14368, 15052, 7702, 15188, 12011, 15295, + 12240, 13213, 14653, 14876, 14278, 13345, 15178, 15162, 15144, 14372, 13511, 11769, 14226, 15322, 14012, 14255, + 15099, 15249, 14689, 12919, 13908, 12514, 13463, 11246, 15298, 11002, 14798, 14681, 9376, 15205, 15054, 14394, + 15204, 14080, 10622, 13402, 15006, 15167, 14702, 13939, 14354, 13869, 11458, 13285, 14710, 13428, 15164, 14765, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15068, 15323, 14681, 13548, 15335, 15263, 14452, 14390, 15172, 15146, 13595, 10439, 11487, 13501, 15019, 14596, + 10732, 14678, 14431, 13899, 14042, 12729, 15029, 14491, 14984, 14901, 13832, 9329, 15243, 12170, 14658, 15279, + 15357, 14441, 13583, 15282, 12512, 14152, 14992, 12920, 11432, 13789, 12309, 11671, 15117, 14427, 13272, 11178, + 14031, 14744, 13344, 14939, 15199, 15014, 11916, 14529, 15187, 14468, 14870, 13745, 15204, 14232, 12588, 12287, + 14771, 14219, 14873, 14458, 13407, 8415, 14643, 13754, 15236, 15176, 13704, 12266, 14775, 14766, 13609, 10730, + 13740, 14521, 15354, 14516, 14693, 10414, 11588, 15132, 13881, 15354, 14552, 14913, 14830, 14537, 15189, 14608, + 14705, 14484, 13013, 13738, 13998, 14919, 14976, 15234, 11936, 13260, 13261, 14576, 14725, 15335, 15300, 14602, + 13185, 13996, 14956, 13931, 14093, 14721, 14044, 14652, 15086, 12694, 12117, 14414, 14041, 15146, 14382, 11595, + 13828, 12504, 15160, 15236, 15088, 13170, 15254, 15086, 11902, 14226, 14445, 15348, 15218, 14063, 13379, 13255, + 14968, 14077, 9896, 15116, 14713, 10682, 13433, 14524, 15358, 15069, 15167, 14675, 15288, 14473, 9509, 10959, + 11853, 13742, 12502, 14591, 14779, 14374, 12589, 14430, 15011, 13495, 13969, 14488, 14595, 13054, 12588, 14097, + 12787, 11270, 11327, 14427, 9596, 13774, 14524, 14488, 15046, 14663, 13406, 14576, 14093, 14342, 15286, 14650, + 13071, 14216, 14870, 11484, 11818, 13472, 14018, 14590, 14654, 14050, 13534, 14611, 13152, 14750, 13006, 15123, + 13266, 14259, 15053, 13472, 13898, 15258, 15107, 12516, 14833, 14551, 11824, 11692, 10772, 12845, 14651, 13355, + 14194, 7340, 12994, 7635, 14559, 14401, 15101, 11995, 14869, 11906, 14659, 14950, 10345, 11481, 11671, 15243, + 13809, 15075, 15209, 11382, 15318, 14605, 10374, 14656, 14784, 14056, 13719, 14638, 14897, 13245, 15223, 14995, + 14903, 14925, 13112, 15073, 15073, 15021, 12597, 15350, 9024, 14790, 12986, 14229, 14765, 13021, 15236, 12624, + 11126, 14815, 13124, 11843, 14994, 10524, 14922, 13076, 13394, 14614, 15058, 14349, 11430, 14686, 14919, 13633, + 14790, 13423, 14598, 14675, 13694, 13787, 12274, 14260, 11583, 13380, 13426, 12401, 11267, 15311, 14742, 15215, + 9595, 13786, 12602, 14856, 14785, 14738, 14311, 12707, 14501, 14582, 13607, 13323, 14156, 15115, 11588, 15100, + 14805, 15186, 14098, 10408, 13179, 13165, 12900, 15129, 14569, 14500, 14574, 14311, 13574, 12820, 13975, 14707, + 14687, 11370, 10558, 15038, 14426, 14599, 14631, 12818, 14799, 14054, 11866, 15039, 12306, 10307, 14684, 11834, + 9403, 11437, 14648, 15074, 14955, 15200, 15173, 13605, 15262, 14381, 14504, 14706, 14354, 14743, 15337, 13409, + 14268, 14789, 14944, 14568, 12170, 13883, 11447, 13104, 10327, 13366, 13485, 15263, 14420, 14988, 14463, 12294, + 14837, 14633, 13237, 14020, 12941, 11567, 12943, 12383, 14595, 11361, 14394, 13062, 14608, 13039, 14528, 14863, + 11278, 15322, 13976, 11609, 13768, 14365, 11604, 13060, 14791, 15337, 12749, 14914, 15285, 10445, 14254, 14709, + 14777, 13562, 15016, 14809, 14390, 14800, 11768, 14780, 10247, 14129, 13263, 15312, 11437, 14098, 9705, 14547, + 14482, 13844, 14617, 11447, 14689, 14040, 9593, 13796, 14328, 15214, 15107, 12752, 13656, 13108, 13414, 14289, + 14371, 14489, 14929, 15131, 8191, 14299, 15217, 12381, 14814, 12436, 14757, 15321, 13325, 15271, 13998, 14568, + 15015, 13968, 15105, 14117, 14382, 14825, 14978, 15082, 13132, 14496, 14136, 12642, 15169, 13368, 13330, 14832, + 10579, 13145, 14858, 13637, 14285, 15162, 13869, 14862, 14771, 14729, 11524, 14575, 14182, 14030, 14736, 14840, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13624, 15219, 15248, 14965, 15060, 15010, 13930, 15194, 12830, 14422, 14830, 15237, 13611, 13620, 13647, 11235, + 12721, 15095, 12673, 14605, 14997, 13016, 12163, 13819, 14467, 11807, 14527, 12291, 15243, 12578, 15091, 13726, + 13384, 14376, 14716, 12223, 12235, 14832, 12210, 11776, 13509, 15227, 14195, 13535, 15291, 15102, 11448, 15113, + 10803, 10303, 13793, 14396, 15062, 15117, 13010, 15326, 9846, 14612, 14329, 12856, 15285, 15201, 15345, 15235, + 15001, 14679, 13208, 13688, 14351, 12979, 11526, 14595, 13734, 14689, 12398, 14682, 14396, 15209, 11280, 14779, + 13183, 13543, 13745, 15148, 12792, 14050, 15274, 14644, 14806, 15147, 14733, 14071, 11773, 11541, 15325, 14506, + 12335, 14768, 14983, 13610, 15357, 13141, 14685, 14214, 12797, 12342, 13386, 14986, 15346, 15291, 14202, 15196, + 15246, 14830, 14969, 13360, 14089, 12898, 13476, 11673, 10807, 14838, 13520, 13466, 14693, 14963, 11844, 12761, + 14504, 14724, 14947, 13740, 15234, 15091, 7194, 15065, 14590, 14707, 14749, 5073, 14503, 14474, 11454, 14668, + 10188, 13698, 14965, 14368, 13231, 12985, 14669, 14546, 13316, 11003, 15198, 10440, 14237, 12337, 15004, 13679, + 14118, 15351, 15011, 13250, 14965, 13119, 14695, 14839, 13717, 15021, 14399, 14391, 14016, 15065, 11456, 15173, + 15143, 12510, 13078, 13523, 12696, 12475, 15034, 12986, 13311, 15095, 14617, 14512, 14924, 15033, 14022, 14667, + 13972, 10315, 13957, 11762, 14519, 12738, 11713, 13805, 13776, 14075, 13017, 13693, 15306, 13871, 13714, 14722, + 14558, 10367, 14406, 14442, 14412, 13807, 14965, 12142, 12032, 14047, 14323, 14402, 14032, 15062, 9547, 15305, + 14386, 14355, 12711, 12453, 15122, 11737, 9868, 9404, 8654, 13810, 11623, 14351, 14662, 9434, 14048, 14453, + 14929, 13726, 13899, 12514, 13635, 13615, 15060, 12259, 14948, 13234, 13610, 14556, 15331, 12515, 15072, 11554, + 14101, 13582, 14605, 12639, 14676, 14604, 15274, 14545, 14700, 14400, 9641, 14999, 11556, 15043, 11083, 14104, + 14386, 12375, 12995, 13416, 14927, 15252, 15085, 14730, 15341, 14234, 9350, 15060, 14652, 13643, 13215, 10203, + 12977, 15356, 14228, 14781, 14448, 13935, 14956, 14205, 14368, 15054, 15304, 14781, 10657, 14554, 13900, 14452, + 13863, 13466, 12423, 13130, 15163, 11731, 14533, 14894, 12325, 14494, 14009, 15151, 12925, 14477, 14835, 15222, + 14077, 14784, 13705, 13474, 14610, 14458, 14355, 14702, 14786, 12446, 13750, 13968, 13497, 12855, 11803, 14628, + 12309, 14529, 14802, 14758, 13469, 14231, 14884, 14635, 14696, 9437, 12615, 8456, 15133, 14727, 14499, 14248, + 11706, 15003, 13853, 14528, 13639, 15201, 13439, 10531, 13316, 14663, 14413, 11985, 13562, 12773, 15208, 13928, + 14146, 14797, 9880, 11792, 14764, 15229, 14669, 14314, 14915, 14729, 14855, 13969, 11660, 10718, 14496, 14388, + 13227, 14170, 14723, 13123, 10553, 14819, 15063, 14600, 14569, 12020, 13081, 15275, 15235, 13644, 13407, 12307, + 12354, 13318, 14845, 12191, 13135, 14179, 13987, 12175, 13546, 14929, 12261, 12478, 14068, 14602, 11931, 13139, + 14906, 14997, 14764, 12458, 14770, 12595, 14593, 15092, 14573, 12408, 14569, 12626, 13585, 9730, 15271, 14099, + 15323, 14235, 14347, 13916, 13526, 12626, 4843, 13964, 14462, 15147, 13742, 14465, 15343, 13473, 14725, 11858, + 13906, 14243, 11319, 12066, 13485, 13712, 15240, 12487, 14136, 12509, 14870, 13362, 14005, 15272, 14363, 13714, + 14422, 15141, 14918, 11091, 14222, 14809, 13447, 13807, 14797, 13252, 14985, 14648, 14804, 15332, 14268, 12289, + 14421, 14889, 14216, 14934, 14891, 14272, 12013, 14294, 13501, 14046, 13595, 12407, 14815, 13861, 14917, 14472, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13537, 13308, 14640, 15148, 15034, 14107, 14178, 14971, 15062, 14689, 14087, 14869, 15071, 13326, 11647, 14732, + 15127, 14809, 14949, 14381, 12504, 13193, 13002, 12799, 14962, 13657, 10077, 14056, 12819, 14915, 14919, 12232, + 13289, 14861, 13851, 14027, 14182, 13716, 14593, 10044, 14614, 15248, 14593, 4916, 13955, 12799, 11306, 14428, + 14533, 12502, 15031, 15285, 13372, 14418, 14099, 14479, 12259, 11665, 13712, 13578, 14538, 13574, 14768, 13431, + 13558, 13296, 14159, 7712, 13937, 15047, 6603, 14638, 14787, 15185, 14661, 13100, 14816, 13365, 14185, 14742, + 15285, 13335, 14104, 12570, 14599, 14137, 14520, 11365, 12667, 7907, 14115, 14945, 14848, 11701, 15022, 14795, + 14895, 14556, 14678, 14957, 11519, 14121, 14699, 10350, 14075, 13752, 12190, 12278, 15205, 13492, 14934, 11438, + 13212, 13807, 14459, 12631, 10293, 13520, 15127, 11775, 15109, 14629, 14842, 14325, 14465, 13558, 15010, 13803, + 14643, 10557, 14667, 13447, 14523, 13688, 12757, 14386, 11731, 14541, 14117, 15042, 10056, 12985, 13616, 8243, + 15344, 13323, 14677, 14744, 15155, 14935, 14482, 14885, 13472, 9380, 14745, 15337, 13099, 14818, 11235, 13413, + 15112, 14182, 12813, 14332, 14958, 13170, 15205, 15230, 11066, 15054, 14337, 12766, 15359, 12418, 14437, 14934, + 13930, 14445, 11758, 11151, 15070, 13375, 14845, 14640, 14535, 14063, 12811, 13791, 14659, 14855, 15269, 14759, + 14590, 14903, 15111, 12959, 14816, 15175, 13615, 14527, 14460, 13587, 13475, 13530, 13578, 14655, 14145, 13091, + 15283, 13338, 15283, 14787, 13546, 13712, 11718, 14405, 13876, 15129, 13036, 11492, 14549, 13751, 11630, 15334, + 14550, 13560, 10352, 14345, 12805, 15336, 13736, 14685, 14177, 13255, 14140, 15219, 12555, 14448, 13703, 13956, + 15171, 14701, 14203, 13357, 14545, 14625, 13522, 14356, 13543, 11589, 13550, 14519, 14323, 15206, 15103, 14940, + 14358, 14954, 13572, 13673, 14714, 7564, 9717, 14139, 12563, 14969, 12041, 15183, 14301, 14382, 14828, 15130, + 14505, 14363, 10760, 13472, 12977, 15338, 15199, 14584, 14846, 14324, 12027, 15346, 14523, 13870, 13064, 13332, + 13485, 14701, 13714, 13091, 14463, 14509, 14562, 11792, 15151, 15324, 12958, 13065, 15243, 13239, 15350, 12792, + 13439, 15105, 15022, 11235, 15296, 13945, 14790, 14394, 13123, 15251, 14424, 13850, 15040, 10156, 14728, 14653, + 13784, 13583, 14000, 15004, 15110, 15241, 14464, 12955, 14795, 15179, 15057, 14548, 12142, 14204, 14339, 14852, + 11507, 15108, 13474, 13761, 13802, 14374, 14918, 14610, 13878, 15338, 11663, 13099, 13066, 14252, 15254, 14581, + 13388, 13708, 13940, 12384, 15192, 14525, 15316, 14766, 12479, 15227, 7297, 12960, 15157, 12697, 13079, 15121, + 12680, 14262, 14308, 10448, 14260, 8657, 13722, 14656, 14369, 14400, 14838, 13624, 13529, 8447, 9413, 14858, + 11964, 13605, 12390, 14508, 13340, 15181, 15235, 13919, 14151, 14204, 15075, 15330, 13339, 9388, 15057, 14477, + 14029, 14185, 15295, 12946, 12571, 15094, 14492, 12319, 12556, 15210, 13587, 14448, 14693, 14812, 14077, 4628, + 14049, 15012, 15272, 14018, 13920, 15039, 14819, 15049, 8219, 15157, 14588, 13018, 8563, 14406, 12439, 14969, + 15124, 14341, 15111, 14712, 14384, 15184, 14832, 15038, 15212, 13771, 15150, 14718, 15113, 11440, 12559, 15188, + 14994, 12731, 12423, 14827, 10994, 13780, 15293, 15005, 10856, 14405, 13899, 14340, 13822, 15261, 14717, 14426, + 14240, 14409, 13899, 15087, 15056, 13786, 14197, 14851, 15074, 12401, 15096, 13457, 15233, 14686, 14366, 11804, + 14755, 14701, 14937, 9540, 11746, 14478, 15083, 14148, 14683, 11412, 14585, 14639, 13010, 15113, 12314, 14873, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14521, 14041, 13757, 15160, 15311, 10812, 14911, 13180, 14396, 15068, 15157, 12268, 14916, 14896, 15259, 14839, + 14816, 13398, 13859, 15227, 14596, 15092, 14842, 14530, 14260, 13213, 15051, 14141, 14495, 14633, 15262, 14959, + 13207, 12909, 7459, 13892, 12596, 12119, 10983, 12957, 14531, 13544, 14509, 15115, 13785, 13379, 13939, 14480, + 15343, 9636, 10720, 13944, 11941, 13620, 12337, 14093, 11449, 14929, 13804, 14913, 14288, 12117, 13934, 14156, + 15329, 10598, 13388, 14816, 14286, 15191, 13799, 15118, 14854, 10596, 14308, 14981, 13629, 14640, 14988, 12668, + 12333, 11533, 13140, 15177, 14705, 13552, 13724, 14799, 13199, 15279, 15194, 13727, 13375, 15246, 12346, 13047, + 15094, 13587, 13719, 14039, 14223, 14334, 9141, 13058, 13097, 14011, 14735, 13165, 14608, 13898, 14846, 15077, + 14801, 14569, 8896, 11603, 14566, 15100, 14442, 13576, 15088, 12282, 14433, 15299, 13872, 13745, 13441, 14853, + 12404, 12848, 14324, 12418, 13344, 13374, 14484, 12633, 14552, 10802, 13784, 12116, 12288, 14760, 12854, 11920, + 15094, 11698, 10655, 11817, 11286, 12912, 15163, 14920, 14672, 14529, 13075, 13138, 13504, 14404, 13522, 14504, + 14969, 11547, 14734, 14905, 15107, 13908, 9897, 13781, 8014, 14788, 14205, 10000, 12874, 13865, 14373, 14442, + 12877, 9398, 15062, 14598, 12366, 13106, 15232, 14749, 14962, 15242, 14564, 15028, 14269, 10100, 15054, 13584, + 13375, 14611, 11912, 14210, 14297, 13091, 15098, 14903, 15187, 14270, 13106, 12456, 14514, 10900, 11718, 15114, + 13413, 13196, 15113, 9223, 14720, 14567, 14824, 14460, 14432, 15139, 14575, 14995, 14689, 13067, 9113, 15235, + 14282, 14710, 15017, 14294, 12492, 13506, 11041, 13773, 13757, 9798, 12974, 14446, 12195, 9658, 13742, 14931, + 14542, 14320, 12161, 15116, 12761, 14822, 13260, 15196, 14983, 15271, 12995, 13760, 11677, 15335, 15026, 15032, + 14872, 13753, 15275, 14934, 15101, 13562, 14657, 11250, 13622, 15131, 15102, 15186, 14419, 14856, 14402, 11097, + 15235, 14167, 9888, 14476, 13534, 12744, 14564, 12131, 15200, 13018, 13109, 13829, 14142, 14821, 14718, 11350, + 14263, 14819, 14899, 11836, 15002, 12015, 13231, 13408, 11930, 13611, 14337, 15196, 13508, 12894, 11714, 15190, + 14151, 15326, 15287, 14572, 14340, 14503, 14626, 15076, 14068, 12422, 14483, 15212, 13402, 14674, 14647, 14210, + 14530, 15210, 12721, 13927, 15354, 12381, 15243, 14850, 15076, 15261, 10670, 13510, 13559, 10514, 14573, 8620, + 10362, 14784, 10743, 14881, 13378, 13660, 15118, 14459, 14676, 14387, 14999, 14541, 13982, 14125, 13918, 13424, + 14744, 10829, 15230, 11625, 13706, 14871, 15211, 14743, 15015, 13199, 11460, 12476, 13403, 15277, 13169, 15293, + 13554, 12442, 14985, 14066, 15210, 14791, 15323, 14265, 14558, 14069, 14994, 15037, 15105, 12346, 14842, 14255, + 12820, 12654, 15235, 14283, 13623, 14493, 12396, 13549, 13894, 14453, 14552, 15102, 12439, 12825, 12694, 13811, + 10065, 14513, 14812, 13473, 12791, 14328, 15066, 15048, 11463, 14968, 11341, 14479, 15234, 14784, 14348, 12320, + 12833, 15018, 12601, 15296, 14451, 14974, 14719, 14989, 14241, 12347, 14848, 10659, 13709, 12121, 15087, 14941, + 15149, 14270, 15081, 14664, 11446, 13555, 13773, 9525, 12308, 15295, 14422, 13152, 12539, 15244, 13480, 14684, + 14812, 14554, 14730, 13256, 13691, 12453, 6673, 14755, 15323, 14365, 14017, 14530, 13733, 14433, 14073, 14792, + 14318, 15157, 14565, 11757, 11358, 14243, 13404, 14209, 14693, 12953, 14188, 14867, 12148, 9734, 10471, 14921, + 11834, 14153, 13803, 12527, 13412, 14278, 14379, 10916, 15165, 14495, 13531, 13432, 14209, 15262, 11705, 12347, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15114, 13518, 13980, 12685, 13682, 14272, 13538, 15247, 14919, 14981, 14820, 14814, 12429, 15300, 15148, 14391, + 12449, 15326, 10562, 13005, 10240, 12977, 13699, 12957, 14309, 14718, 13593, 13903, 14501, 14105, 13537, 13399, + 15004, 14600, 14481, 11560, 12340, 14614, 14917, 10587, 13746, 14556, 11536, 15142, 13452, 12608, 14624, 15003, + 13146, 14870, 14150, 15225, 14224, 15215, 15046, 15346, 14393, 15237, 15270, 15042, 14430, 14683, 14882, 13610, + 14700, 12601, 15178, 13571, 14787, 15318, 12600, 13036, 14689, 14263, 15030, 13271, 10435, 12379, 12355, 14546, + 14677, 14471, 13314, 15305, 12584, 14631, 11957, 14793, 15038, 8991, 14965, 15046, 13934, 13647, 13532, 14115, + 14380, 13663, 13892, 13726, 13892, 15344, 15272, 14946, 15288, 11327, 12495, 14350, 15288, 13236, 15099, 13013, + 12867, 14925, 11699, 12644, 11685, 14909, 15264, 14874, 11388, 14071, 12527, 12382, 14128, 14680, 11357, 14657, + 15323, 14513, 13166, 15126, 13490, 12492, 14094, 15206, 14004, 14287, 10214, 12793, 13024, 14627, 7760, 15161, + 10408, 12813, 12688, 15255, 13870, 13770, 13946, 15027, 11218, 13474, 14726, 14544, 12516, 13443, 13627, 14660, + 12360, 14074, 14088, 15354, 13954, 13999, 12297, 12222, 14771, 15322, 13871, 13605, 14143, 7336, 14738, 14951, + 13706, 12842, 14169, 13381, 14768, 13068, 13489, 12370, 9925, 14721, 11967, 15162, 14758, 15157, 15129, 14126, + 13267, 14922, 14078, 14992, 15240, 14910, 11827, 14301, 14121, 14654, 14724, 11465, 14836, 14496, 13914, 14521, + 13585, 13562, 13179, 14937, 14686, 13899, 14493, 13764, 13087, 15281, 13444, 13940, 13250, 14737, 14233, 12423, + 13957, 11705, 15191, 13076, 14108, 14963, 14682, 14005, 15184, 14534, 12839, 15109, 14780, 15302, 15102, 14704, + 11798, 14783, 14588, 15198, 15147, 12612, 10312, 13328, 14922, 15029, 10556, 14484, 12727, 13751, 14369, 14926, + 15310, 13435, 10187, 14401, 15200, 12906, 15279, 15019, 13355, 14651, 13387, 14433, 14551, 13391, 14118, 14722, + 13990, 5843, 12131, 15192, 15148, 14414, 14604, 15079, 12226, 7803, 10866, 14738, 14102, 13970, 15067, 14543, + 14180, 15247, 11606, 13890, 11493, 13911, 14480, 13782, 13709, 13827, 13780, 11400, 15072, 12473, 14587, 11537, + 13455, 14526, 15094, 12745, 9218, 15088, 15056, 15208, 14648, 14387, 14917, 15032, 12307, 9955, 12872, 10061, + 14633, 14675, 14866, 15325, 13700, 12137, 14069, 15208, 13356, 13354, 13682, 14550, 9415, 12219, 14245, 14355, + 14609, 14134, 14551, 14889, 14736, 13331, 12648, 11684, 14757, 14961, 14788, 15216, 15263, 14019, 11677, 15117, + 15101, 13684, 14919, 14932, 12004, 13786, 14691, 7719, 15137, 12794, 13755, 14670, 14884, 6460, 11961, 13286, + 14853, 12137, 13602, 14396, 15333, 11885, 15354, 14870, 15033, 15355, 15008, 14664, 15104, 14911, 14301, 11977, + 14343, 12836, 14766, 12698, 14500, 14934, 12836, 14637, 14471, 14192, 15029, 14811, 12884, 14442, 12308, 14963, + 11497, 14934, 12533, 14256, 14962, 13988, 15160, 15042, 13813, 15163, 14976, 14372, 14674, 14915, 15000, 12889, + 14454, 14337, 12817, 15282, 11888, 14712, 12716, 12968, 14828, 11331, 14553, 14960, 11745, 14426, 13329, 15029, + 12324, 15039, 9540, 13603, 13163, 13497, 14905, 13530, 14697, 15327, 13809, 14862, 15121, 13496, 13204, 14345, + 11908, 15058, 11816, 14434, 13870, 13792, 13100, 14035, 13718, 14104, 10908, 11752, 14814, 15270, 14354, 15227, + 14864, 14910, 14112, 14770, 15135, 15286, 14790, 12491, 10339, 10311, 12666, 13673, 13661, 15197, 15228, 13740, + 14016, 14858, 14433, 13865, 14953, 14904, 12803, 15006, 13634, 13596, 13522, 11651, 8251, 14412, 15116, 13971, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13281, 14388, 14653, 14235, 14949, 14756, 13327, 14157, 8095, 14198, 12592, 14315, 14425, 14529, 14431, 13207, + 15309, 14636, 14993, 13322, 14044, 15251, 14487, 11736, 14643, 14772, 14514, 11930, 14370, 9144, 14312, 14996, + 13933, 14605, 14132, 14396, 12388, 12102, 15068, 13772, 13114, 14727, 13750, 15137, 14107, 10686, 13506, 14924, + 14572, 15097, 14554, 15329, 15000, 15066, 11758, 8366, 15122, 13639, 11621, 10547, 14410, 14117, 14278, 14783, + 12150, 12072, 14949, 12702, 14558, 13354, 13587, 14553, 11582, 12985, 12942, 14217, 11719, 10286, 13961, 14120, + 12922, 15223, 14233, 14738, 14341, 14852, 14378, 13464, 15071, 15134, 13977, 13730, 14450, 14537, 11988, 13453, + 14846, 15224, 14809, 14926, 10310, 13068, 14659, 14381, 13033, 13457, 15043, 14285, 14822, 14320, 14747, 15336, + 12970, 11143, 15268, 14511, 14287, 14914, 12641, 13277, 14756, 12932, 11826, 14039, 13819, 15342, 15009, 11671, + 14493, 15019, 14550, 14406, 14474, 12501, 14314, 14509, 14271, 12944, 7321, 14959, 14552, 13422, 14685, 14722, + 15248, 13906, 15278, 13789, 12703, 12355, 14521, 13515, 14498, 9958, 15012, 15088, 14775, 14876, 14482, 14983, + 15083, 13026, 15222, 13686, 15189, 10315, 12789, 13710, 14859, 11598, 13848, 14604, 9806, 15131, 11235, 14405, + 7898, 14241, 11721, 14579, 13142, 13406, 13562, 14886, 15058, 13923, 12291, 13886, 14662, 14492, 8785, 14705, + 13319, 14371, 13297, 14752, 14933, 13149, 14319, 15048, 10985, 15077, 14658, 15309, 15038, 14519, 14080, 13682, + 11960, 12674, 14814, 13905, 14884, 12316, 14700, 12754, 14468, 14064, 15130, 11870, 13111, 14785, 13544, 14927, + 10080, 14046, 12328, 13732, 14914, 13920, 10934, 15350, 14796, 13997, 14814, 10346, 14982, 12412, 11890, 13211, + 7608, 15091, 14696, 13887, 15345, 13824, 14880, 14774, 14041, 13713, 15009, 15119, 14820, 14655, 12334, 14429, + 14259, 11618, 12647, 15121, 14091, 14538, 13483, 15153, 15303, 11594, 14800, 14366, 11562, 14498, 15267, 14114, + 12398, 14270, 13029, 15225, 14450, 15071, 15015, 13722, 14481, 13872, 11952, 13001, 14710, 14538, 14735, 15116, + 14141, 15168, 14990, 13575, 14597, 10412, 13787, 13336, 15341, 14384, 14827, 13904, 14229, 14331, 14880, 14209, + 14378, 15247, 13762, 15124, 13800, 14845, 14774, 12305, 15287, 14592, 13862, 9171, 13419, 14593, 15168, 12761, + 14663, 14776, 14976, 13314, 15073, 12674, 14822, 14765, 15289, 13832, 14804, 14411, 15166, 14909, 14581, 14703, + 14984, 13050, 15107, 14908, 14379, 15357, 14384, 12170, 15186, 13363, 14094, 15131, 14898, 14342, 15090, 15226, + 14340, 14612, 12815, 13927, 13914, 14786, 14459, 12572, 12060, 10287, 14665, 13979, 15070, 9430, 13653, 11688, + 13386, 14209, 14062, 14526, 13840, 13974, 12540, 14728, 15181, 13579, 15201, 13679, 14547, 10079, 8373, 15089, + 14652, 15218, 13794, 14695, 14495, 11434, 13729, 14457, 14750, 13953, 14900, 14345, 14743, 8968, 13501, 14411, + 15088, 13765, 14305, 13611, 11454, 12431, 13916, 15239, 15101, 14880, 15032, 14959, 13428, 15156, 12868, 14880, + 14404, 10812, 13679, 15352, 13889, 14393, 14057, 14521, 11904, 13946, 13475, 15140, 12184, 12043, 5618, 13417, + 15095, 14830, 14417, 14349, 14508, 12684, 14265, 14503, 14177, 14799, 12752, 15176, 11621, 14074, 14476, 13631, + 14996, 14413, 14905, 15034, 15041, 14168, 12507, 15127, 14898, 11142, 12775, 14944, 14429, 13475, 14620, 15249, + 14723, 11573, 15098, 12652, 14089, 14582, 14455, 14365, 13275, 13550, 15278, 12935, 14109, 14868, 15056, 15294, + 14365, 14405, 11798, 13938, 12237, 15015, 13686, 12382, 14689, 13340, 15273, 11760, 15173, 11573, 11228, 14127, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14976, 15320, 14856, 14747, 14545, 14933, 12942, 12746, 14663, 14383, 14315, 15076, 12981, 12639, 14651, 12301, + 13831, 13248, 13561, 14089, 7133, 15107, 15243, 14283, 13685, 13837, 13150, 15097, 15066, 15321, 14754, 15065, + 15145, 14604, 9350, 14595, 14753, 11829, 11989, 14282, 15170, 13365, 11849, 15026, 15063, 14496, 14898, 13614, + 14455, 11490, 14996, 13146, 15316, 13678, 12519, 12904, 15215, 10141, 14359, 12764, 14952, 14945, 14318, 12545, + 15021, 13737, 12487, 14273, 15306, 14737, 14678, 13369, 14656, 14651, 11771, 14753, 15329, 11653, 14861, 13370, + 14876, 13650, 14235, 14345, 14666, 11985, 15343, 10510, 14125, 13014, 12078, 14598, 14707, 14807, 14031, 14574, + 14259, 12994, 12920, 14841, 12705, 12426, 10228, 12535, 13332, 14389, 14773, 12520, 14113, 11590, 13318, 14054, + 14045, 15238, 14760, 14441, 13800, 12903, 14482, 14181, 15116, 14771, 14710, 12219, 14550, 15131, 12956, 15206, + 14452, 14133, 14825, 14782, 13950, 14989, 14822, 14550, 14960, 13546, 15291, 15047, 14721, 12796, 13049, 13984, + 15339, 13726, 14888, 13541, 14481, 11449, 15145, 15314, 14862, 14949, 15320, 15218, 13523, 13857, 14019, 14322, + 14519, 13801, 13015, 10633, 14755, 11690, 14555, 14487, 14256, 13564, 12702, 11090, 15349, 13077, 14269, 15188, + 13954, 14889, 14325, 13875, 14970, 13963, 14442, 14405, 13956, 14661, 14439, 14251, 14473, 14590, 15279, 15312, + 15103, 15129, 12912, 14690, 14609, 13013, 14625, 14788, 14695, 13120, 14112, 13991, 14719, 13317, 14953, 14861, + 12677, 14062, 13603, 11757, 13703, 13908, 12437, 9314, 12550, 8987, 14382, 11060, 15021, 14353, 14740, 14386, + 15262, 9436, 14217, 13844, 15278, 13635, 13995, 14881, 15223, 14603, 14039, 14987, 14338, 14804, 15039, 13950, + 15027, 14131, 15027, 14733, 12575, 14899, 15147, 14539, 14019, 14406, 11221, 14913, 15175, 15293, 13944, 9951, + 14276, 15309, 11477, 13522, 9043, 14880, 14893, 10032, 12896, 12839, 13480, 15326, 8593, 10118, 15130, 14666, + 12737, 10912, 14774, 12419, 14430, 14359, 13123, 12935, 15153, 14394, 14949, 14113, 14684, 13348, 15086, 12414, + 13458, 14803, 15243, 12783, 11518, 14356, 14634, 14364, 13764, 14562, 13552, 11792, 13272, 14166, 13093, 14020, + 14673, 11360, 14245, 14856, 14758, 13310, 14676, 12681, 11917, 15018, 12183, 13263, 13681, 14529, 12440, 15073, + 15030, 11081, 14591, 11958, 15136, 13898, 14443, 13995, 14057, 10504, 13954, 13927, 12744, 14617, 14437, 15311, + 15004, 7602, 15221, 15353, 14844, 14487, 15278, 14855, 13187, 11666, 12805, 12928, 12768, 14658, 7348, 13047, + 11651, 12457, 14692, 12098, 14889, 13845, 14825, 15299, 14087, 12794, 15135, 13823, 14469, 14422, 13172, 13470, + 14456, 14897, 14513, 12444, 15330, 13965, 13733, 14928, 13571, 12734, 14482, 14512, 15152, 13734, 12670, 14767, + 12819, 12825, 14931, 13675, 13686, 12069, 13955, 15206, 14125, 13530, 14171, 11805, 14993, 14774, 15087, 14289, + 14288, 11321, 14504, 15098, 13589, 13506, 15000, 14225, 14552, 10865, 13645, 10572, 14811, 13932, 14066, 14928, + 13085, 13980, 10430, 15270, 13851, 14104, 11861, 12539, 13621, 15241, 15220, 9377, 15000, 14933, 11082, 13061, + 14605, 12090, 11294, 14511, 13747, 14102, 14932, 15220, 12548, 14598, 13892, 15322, 15107, 11542, 15160, 10795, + 15081, 14780, 13869, 14676, 12543, 15249, 14737, 13867, 14420, 14356, 15208, 15003, 13838, 14914, 13949, 15354, + 13796, 15292, 13661, 14913, 14461, 14694, 12464, 13470, 14888, 11147, 15148, 12601, 14848, 14154, 10547, 15327, + 14875, 14520, 14455, 13110, 15166, 13329, 15242, 12645, 14231, 9729, 13808, 14604, 12699, 14319, 14690, 14702, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12899, 14145, 11482, 14941, 14311, 11793, 15308, 13356, 15285, 14843, 14161, 14144, 14181, 13929, 15262, 15282, + 13569, 14586, 13484, 14018, 14595, 12157, 11768, 14595, 15219, 13311, 15300, 14819, 15228, 5703, 15260, 14424, + 13975, 14023, 15247, 11991, 12462, 15047, 14329, 14918, 14216, 14559, 11484, 14921, 14635, 14301, 13155, 14491, + 15118, 14144, 12558, 14257, 12490, 13843, 15011, 13518, 15244, 14844, 14244, 11510, 14862, 10261, 13492, 8340, + 12616, 14608, 14084, 14340, 13877, 14281, 14336, 10026, 12765, 15152, 13970, 14958, 15249, 14512, 13765, 13705, + 14586, 12913, 14076, 15062, 14704, 14657, 13504, 11789, 13853, 12733, 15155, 14505, 10599, 12417, 13781, 14709, + 14560, 12499, 12124, 13801, 13898, 14684, 9283, 13609, 13356, 14633, 14525, 14352, 14154, 14939, 14956, 15259, + 12805, 12921, 14092, 14138, 14778, 11760, 14176, 12487, 14835, 13389, 15044, 13351, 13906, 14751, 11136, 12328, + 14690, 15177, 13000, 13450, 14235, 12819, 14293, 11417, 11860, 13690, 13293, 14079, 14799, 15218, 14458, 13535, + 12326, 14279, 14893, 11920, 14464, 9670, 15145, 11534, 12507, 12900, 14952, 15346, 13570, 10871, 10659, 12697, + 15218, 14874, 13795, 11730, 14170, 14592, 14551, 15168, 12243, 14472, 13646, 14970, 14591, 14193, 15329, 13948, + 14947, 12265, 11429, 12986, 14938, 11598, 14570, 14835, 14436, 14768, 13390, 14957, 13054, 14693, 15213, 13255, + 14467, 4195, 12705, 14166, 14194, 15188, 14352, 15112, 13565, 12581, 14909, 12447, 13551, 12369, 12856, 13661, + 14803, 12494, 14553, 11957, 15300, 10307, 11859, 13746, 15120, 11608, 14829, 15028, 14252, 12338, 13494, 14231, + 12999, 14335, 14031, 12662, 14483, 15057, 13938, 8440, 14639, 14175, 15089, 10597, 13834, 15083, 15312, 14730, + 15270, 15034, 10214, 14149, 12903, 13272, 10594, 15231, 13343, 14936, 14773, 15295, 12305, 14615, 13113, 7949, + 14883, 14394, 15307, 13544, 14360, 14421, 14897, 14917, 11127, 12531, 14685, 12592, 15233, 15031, 14239, 15016, + 15320, 15334, 14753, 12701, 14847, 15179, 14850, 14042, 13791, 14559, 11986, 15197, 14747, 15275, 13160, 14448, + 14344, 15001, 14447, 13626, 14376, 13635, 13367, 13908, 14136, 11662, 13239, 15069, 11325, 14682, 9847, 14223, + 14973, 15085, 15009, 14663, 10946, 12595, 13998, 9601, 14156, 11295, 14518, 12537, 10824, 14509, 14633, 12603, + 14325, 14560, 12694, 11881, 15345, 15215, 12140, 14573, 14656, 13104, 14137, 14680, 14727, 14107, 14586, 14857, + 11186, 15053, 14947, 15277, 13652, 13831, 12764, 14462, 14954, 15013, 14735, 14759, 14923, 8551, 14570, 15358, + 14816, 14734, 14971, 15208, 12952, 12846, 14985, 14813, 8634, 15070, 12693, 14531, 7817, 14175, 15264, 14279, + 6596, 14706, 14619, 14492, 10603, 13054, 13316, 14601, 14544, 12521, 12599, 14599, 14708, 13769, 14922, 14526, + 13172, 13819, 10934, 14303, 14878, 8124, 13354, 14921, 14543, 13798, 15034, 14823, 12955, 14768, 12814, 15046, + 12731, 11502, 15217, 14096, 15056, 13701, 14933, 13839, 13640, 13472, 13642, 15028, 15075, 14471, 14484, 13619, + 14529, 14757, 14998, 13596, 14590, 13665, 15055, 13691, 14136, 14868, 13456, 13010, 13747, 13788, 14739, 14456, + 14842, 15239, 10946, 14313, 15197, 12329, 13187, 15178, 14432, 13369, 14970, 13321, 12521, 14943, 11130, 14829, + 14687, 14478, 14998, 14565, 14567, 13773, 15297, 13552, 14613, 13802, 15311, 10983, 14762, 14291, 8820, 15110, + 15056, 14468, 8369, 14950, 15161, 11667, 14952, 15068, 13913, 12478, 13105, 13826, 12650, 13910, 13450, 13828, + 14882, 14228, 14588, 14683, 15289, 14730, 13633, 14851, 12290, 14166, 10686, 14612, 14558, 14602, 11166, 11270, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15129, 12633, 12271, 15335, 14478, 13558, 14348, 12826, 11074, 14488, 14704, 8808, 13510, 12700, 15345, 13496, + 15207, 14772, 14952, 13082, 14483, 14744, 13120, 13042, 12760, 13895, 13191, 13038, 12863, 15248, 14447, 15060, + 12681, 9499, 14706, 13362, 14831, 13935, 10062, 14542, 14510, 15345, 13422, 14830, 12740, 12078, 13737, 13762, + 14374, 14772, 14502, 12734, 13597, 14395, 14447, 14932, 15197, 14258, 12867, 15233, 13123, 13901, 14823, 14414, + 12598, 12265, 13458, 13322, 15095, 12678, 14753, 12056, 14876, 14289, 11887, 11132, 14634, 14333, 14876, 14487, + 15255, 13232, 9419, 13113, 15125, 13709, 8430, 14475, 14072, 14976, 13150, 13565, 11062, 15349, 13720, 15089, + 14169, 14668, 14598, 14397, 14420, 14360, 13381, 12958, 13602, 11956, 13638, 15086, 14589, 14067, 11622, 14921, + 13278, 14506, 15108, 11403, 11014, 14973, 12967, 11848, 13624, 14924, 15303, 15065, 11164, 14361, 13992, 14433, + 14085, 13432, 13679, 12375, 14695, 14953, 12370, 12341, 15181, 14370, 13683, 13684, 14325, 13300, 13569, 14682, + 14868, 14656, 14537, 15188, 9057, 13714, 14465, 14510, 12882, 13514, 15251, 14690, 14821, 15016, 14776, 14634, + 14755, 14577, 14514, 14490, 14143, 12098, 15029, 14748, 14516, 14139, 9531, 13087, 15199, 14901, 12515, 15335, + 11315, 15279, 15326, 13345, 13394, 14553, 10464, 13564, 15067, 13190, 15029, 13106, 12338, 15167, 14579, 12368, + 14676, 14490, 14064, 15245, 14118, 14384, 15154, 14798, 15267, 13168, 14834, 14392, 14985, 13895, 13405, 15033, + 14648, 13778, 12922, 9520, 13830, 14078, 13337, 15191, 14099, 14020, 15230, 13925, 14606, 14374, 7937, 15081, + 14374, 15336, 8978, 14746, 14837, 12443, 14773, 11460, 14236, 8503, 14702, 14511, 13591, 13645, 12096, 14453, + 13910, 14809, 9859, 11043, 15091, 12568, 13063, 12664, 12483, 12531, 13754, 15352, 15089, 13728, 11477, 12352, + 12859, 8557, 13822, 13402, 13676, 10691, 15003, 11804, 14072, 11966, 15007, 13977, 14454, 14459, 11890, 14098, + 13689, 14974, 11621, 11774, 12809, 14080, 14352, 12959, 13544, 14411, 12972, 15266, 14629, 15013, 15166, 14163, + 13829, 14381, 14666, 15249, 14878, 14485, 15023, 11462, 14618, 15282, 9025, 10056, 12482, 14399, 11935, 14529, + 15329, 10444, 10458, 15164, 14283, 15047, 15122, 15061, 15336, 13797, 15325, 14232, 15121, 13841, 14858, 14648, + 15091, 12648, 14668, 15276, 10408, 10465, 15050, 14160, 13669, 14655, 15008, 15149, 15034, 14635, 15302, 13467, + 15279, 15145, 13339, 15047, 8492, 13484, 14358, 14790, 14121, 15256, 14492, 14495, 14379, 13603, 15282, 13101, + 14199, 15169, 14894, 14209, 14965, 15284, 14068, 9544, 13332, 15286, 14710, 11613, 15354, 13708, 13892, 13712, + 13578, 14036, 14619, 14832, 12956, 14467, 13883, 11593, 13838, 15191, 14717, 12866, 13553, 11320, 13374, 13573, + 9403, 15343, 13873, 14781, 9553, 13937, 12206, 15322, 14692, 14836, 12846, 14908, 15352, 14397, 14597, 14143, + 14374, 15102, 13916, 14190, 13450, 15129, 14920, 11822, 15233, 14898, 14593, 12774, 14934, 14347, 14450, 15266, + 15297, 9337, 14273, 15174, 9064, 14082, 15048, 15183, 14733, 12449, 14415, 14596, 13726, 15278, 14017, 14580, + 15231, 13321, 13312, 14997, 13352, 15232, 14729, 15251, 13066, 15096, 13439, 13541, 15199, 13725, 14965, 15104, + 14034, 9564, 12353, 14342, 11883, 13095, 12822, 14708, 15247, 14645, 13155, 14587, 14819, 13913, 14674, 13300, + 8992, 14810, 14241, 14566, 14986, 14578, 15054, 15239, 14988, 14905, 14537, 13648, 14056, 14587, 14932, 12516, + 15270, 15241, 14960, 11623, 12726, 14406, 14502, 13488, 14570, 10964, 13249, 10442, 12306, 13676, 15122, 13435, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14065, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14792, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13601, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14857, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14208, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11756, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14946, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14318, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11157, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12292, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14480, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12208, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11920, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12946, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14558, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14744, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12591, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15067, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14394, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14413, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14172, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14355, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12605, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14499, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14809, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12525, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13863, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12424, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14831, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15236, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12827, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14985, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13782, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15242, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13631, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15262, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10437, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14845, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13724, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12354, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13559, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14780, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15227, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14348, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14242, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15346, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12389, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15251, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 4533, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14669, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12679, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13779, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11953, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13642, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15014, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9458, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14503, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13700, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13921, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14515, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15334, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14963, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13778, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14597, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14753, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14499, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14867, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13995, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12477, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13580, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14033, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15062, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13503, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15119, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13305, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13398, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14383, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13644, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12744, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14881, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14777, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10585, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12692, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14319, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15163, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9218, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14070, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15055, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14216, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14930, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15247, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10914, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13413, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14450, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13480, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14669, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15323, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14488, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14227, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14773, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14973, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12545, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14904, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15003, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14892, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14736, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14119, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13097, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15127, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15095, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13293, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15347, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14294, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12041, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13481, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15168, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13302, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15192, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11879, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14714, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15251, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13897, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14687, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14978, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12990, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14778, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14613, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6439, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13982, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12487, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14793, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13660, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14699, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12814, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13592, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13918, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13652, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13212, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15048, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14635, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14587, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13791, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14928, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14768, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15199, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15191, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15027, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14549, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13263, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14210, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15165, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15096, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14993, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10874, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11939, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14439, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14445, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15124, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15356, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12379, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13445, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12820, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13125, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14694, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11619, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14563, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14851, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9072, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13588, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13374, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10685, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14581, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12774, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14392, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13811, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13815, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14784, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14839, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12029, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14362, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14537, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13695, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14451, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15068, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15245, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14640, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15348, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14015, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15117, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13956, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15044, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14952, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14970, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13493, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15299, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14221, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14531, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13929, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14481, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14592, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14656, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14377, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14961, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15317, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15227, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14827, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13759, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14608, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12358, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14905, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14642, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14787, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14237, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14005, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13770, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12943, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12976, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14302, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14655, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14298, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13275, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11610, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14687, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13896, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14496, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13793, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14384, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13852, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13732, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13695, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13396, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15195, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11654, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14856, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14742, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13681, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11598, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14549, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15065, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13787, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14229, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9833, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13955, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13531, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13912, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14542, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15189, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14830, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13402, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14246, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13495, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13538, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15002, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15010, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13429, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12975, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13624, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12418, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14493, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12925, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14323, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14383, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14160, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14565, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15275, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12711, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11424, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14569, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14745, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13329, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11945, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14448, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14916, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14338, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14590, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12547, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14792, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14919, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14632, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14128, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13529, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14307, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15314, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11927, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12913, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14072, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14846, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14363, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15183, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13458, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11316, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15006, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13998, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12457, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14080, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14737, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13997, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14464, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13012, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13586, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14871, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12206, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14005, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12415, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14625, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15078, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13941, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13567, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15349, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12954, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13139, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15253, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13474, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10893, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10037, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15178, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15217, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14853, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14891, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14071, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12289, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14341, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11465, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12509, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15241, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14062, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14187, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14753, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15099, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14075, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12822, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10563, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12400, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14575, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14055, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15011, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10473, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13115, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11919, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14798, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13877, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14651, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14093, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14333, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12934, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14526, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13663, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13669, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12694, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10221, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14892, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15126, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14958, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14436, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13436, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14203, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12725, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13754, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14161, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11444, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15262, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14926, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14950, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14180, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10566, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15117, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12727, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14729, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14351, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13756, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15267, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14900, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14541, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15250, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14757, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15132, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12011, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13651, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14624, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14063, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15068, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13835, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11489, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11410, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14745, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10842, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14661, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14857, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14987, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13569, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12763, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14932, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14851, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13511, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13677, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14543, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14605, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15036, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13315, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14657, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14617, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14875, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14488, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13788, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14421, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15000, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13867, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15170, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14389, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14577, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15337, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15245, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14764, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13328, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14910, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13327, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13381, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11468, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11271, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12719, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15023, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14764, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14374, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13085, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14873, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13749, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14737, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14765, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14897, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13562, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11925, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14867, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14287, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14233, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15094, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14490, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11202, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14407, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13738, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13849, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14813, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11320, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14512, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13608, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14363, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11696, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 9268, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14793, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12674, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13757, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14622, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, std::vector({31, 17, 4, 4}), + std::vector({32, 2, 16, 16}), DT_FLOAT16}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, 32 * 2 * 16 * 16 * 2); + for (int i = 0; i < 32 * 2 * 16 * 16; ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferNchwFz, int8_1) { + uint8_t data[1 * 1 * 1 * 1] = {70}; + uint8_t ret[1 * 1 * 16 * 32] = { + 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, {1, 1, 1, 1}, {1, 1, 16, 32}, DT_INT8}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferNchwFz, uint8_2c_2n) { + uint8_t data[32 * 64 * 3 * 3] = { + 116, 236, 243, 202, 173, 49, 65, 195, 129, 147, 208, 52, 95, 22, 14, 0, 158, 123, 48, 214, 35, 238, 226, + 165, 2, 152, 179, 65, 128, 243, 151, 3, 125, 102, 201, 0, 40, 143, 164, 240, 58, 38, 208, 47, 98, 100, + 174, 44, 73, 56, 234, 197, 184, 44, 194, 8, 25, 56, 21, 212, 78, 69, 123, 33, 97, 232, 197, 162, 177, + 89, 214, 158, 93, 20, 117, 48, 95, 252, 253, 244, 153, 130, 131, 63, 28, 138, 102, 148, 196, 214, 44, 92, + 184, 205, 227, 94, 54, 120, 212, 108, 3, 188, 164, 193, 192, 65, 178, 15, 251, 111, 186, 189, 219, 125, 210, + 30, 1, 49, 89, 141, 253, 167, 223, 121, 13, 161, 249, 133, 41, 254, 51, 42, 163, 46, 203, 225, 46, 92, + 28, 62, 106, 68, 232, 21, 210, 128, 190, 23, 109, 116, 98, 124, 203, 23, 3, 119, 254, 214, 50, 184, 168, + 223, 163, 208, 45, 106, 247, 202, 25, 100, 152, 80, 226, 149, 178, 136, 37, 28, 199, 53, 144, 19, 232, 201, + 218, 223, 13, 160, 213, 249, 55, 108, 96, 199, 33, 61, 157, 54, 202, 80, 28, 233, 104, 125, 167, 0, 155, + 157, 233, 95, 111, 15, 225, 2, 95, 166, 185, 205, 203, 135, 51, 40, 192, 21, 203, 38, 129, 233, 73, 184, + 145, 206, 45, 126, 167, 170, 77, 202, 98, 180, 59, 231, 130, 187, 242, 199, 40, 223, 159, 220, 206, 147, 87, + 249, 251, 189, 122, 84, 183, 31, 66, 122, 159, 249, 47, 179, 81, 88, 77, 13, 245, 122, 62, 146, 211, 198, + 145, 149, 165, 235, 132, 96, 229, 210, 28, 207, 208, 29, 197, 214, 131, 27, 2, 217, 212, 51, 233, 27, 44, + 226, 210, 139, 68, 24, 186, 9, 53, 114, 44, 173, 13, 141, 85, 60, 15, 156, 78, 197, 64, 183, 204, 217, + 46, 200, 7, 160, 132, 173, 108, 251, 110, 4, 222, 40, 254, 99, 92, 156, 5, 69, 84, 243, 4, 156, 26, + 20, 124, 0, 251, 212, 241, 151, 153, 48, 193, 249, 70, 48, 32, 214, 101, 242, 38, 21, 248, 57, 124, 161, + 104, 26, 143, 217, 25, 107, 200, 194, 191, 241, 36, 211, 16, 164, 114, 181, 106, 59, 176, 201, 199, 231, 72, + 162, 98, 158, 10, 136, 42, 192, 32, 242, 32, 162, 169, 52, 246, 174, 221, 223, 95, 162, 185, 245, 29, 129, + 140, 187, 239, 30, 193, 130, 119, 2, 39, 251, 227, 103, 94, 253, 151, 71, 208, 251, 220, 56, 74, 79, 23, + 192, 42, 56, 65, 73, 246, 29, 195, 78, 58, 120, 203, 165, 163, 201, 198, 217, 163, 116, 78, 74, 7, 195, + 198, 90, 88, 123, 71, 128, 80, 141, 24, 157, 159, 55, 250, 149, 184, 38, 139, 86, 32, 191, 24, 204, 82, + 116, 195, 146, 206, 141, 103, 221, 44, 76, 230, 13, 110, 0, 184, 186, 48, 130, 232, 34, 190, 119, 48, 167, + 116, 149, 199, 66, 22, 237, 68, 79, 194, 217, 110, 97, 32, 21, 207, 110, 23, 1, 191, 224, 97, 127, 166, + 204, 49, 223, 142, 92, 158, 199, 6, 34, 211, 73, 72, 173, 74, 30, 103, 62, 229, 192, 30, 102, 57, 222, + 212, 118, 132, 212, 190, 126, 181, 149, 158, 223, 29, 132, 167, 156, 48, 85, 246, 217, 88, 124, 131, 159, 237, + 252, 60, 81, 54, 71, 76, 82, 242, 78, 167, 192, 250, 5, 142, 9, 27, 168, 246, 75, 171, 200, 128, 119, + 49, 241, 101, 4, 222, 182, 23, 154, 39, 215, 108, 79, 40, 182, 79, 79, 49, 20, 69, 240, 105, 246, 215, + 111, 36, 108, 192, 69, 223, 179, 205, 100, 136, 13, 160, 93, 91, 132, 98, 39, 69, 246, 141, 237, 135, 55, + 151, 51, 153, 39, 129, 139, 59, 67, 90, 197, 231, 47, 129, 40, 241, 165, 191, 131, 18, 95, 147, 135, 49, + 228, 186, 84, 161, 178, 64, 123, 229, 28, 71, 112, 139, 195, 21, 99, 106, 31, 118, 101, 121, 7, 91, 128, + 136, 124, 147, 8, 241, 227, 130, 39, 151, 123, 234, 239, 240, 232, 211, 110, 176, 32, 91, 153, 230, 99, 53, + 174, 105, 231, 236, 115, 146, 97, 87, 172, 25, 204, 221, 103, 23, 208, 83, 125, 81, 87, 18, 233, 126, 188, + 244, 102, 140, 120, 37, 68, 128, 36, 177, 23, 236, 65, 70, 217, 217, 51, 189, 170, 99, 242, 77, 187, 153, + 85, 70, 111, 176, 94, 108, 100, 17, 67, 226, 89, 50, 201, 87, 106, 210, 234, 156, 51, 88, 9, 204, 4, + 132, 196, 163, 144, 232, 14, 49, 124, 77, 230, 81, 146, 128, 112, 226, 2, 230, 72, 106, 254, 126, 130, 191, + 2, 104, 232, 136, 73, 202, 174, 183, 32, 174, 27, 185, 186, 88, 173, 165, 62, 214, 23, 166, 35, 27, 9, + 106, 104, 144, 133, 112, 231, 191, 10, 8, 169, 204, 70, 149, 175, 2, 116, 26, 142, 44, 127, 123, 58, 56, + 203, 215, 226, 196, 81, 244, 147, 68, 74, 79, 122, 230, 79, 132, 121, 133, 93, 198, 240, 12, 222, 134, 141, + 84, 46, 2, 164, 39, 173, 159, 219, 24, 200, 0, 78, 41, 113, 82, 168, 214, 112, 237, 253, 149, 163, 119, + 244, 99, 218, 150, 153, 182, 109, 134, 87, 156, 175, 133, 72, 158, 166, 146, 181, 38, 135, 12, 138, 217, 157, + 187, 103, 200, 238, 239, 46, 26, 127, 20, 205, 101, 143, 79, 51, 221, 90, 204, 10, 216, 175, 95, 17, 78, + 63, 152, 111, 108, 168, 118, 150, 208, 194, 183, 23, 73, 36, 48, 11, 183, 126, 169, 222, 112, 189, 185, 157, + 59, 130, 174, 219, 216, 175, 140, 192, 69, 141, 224, 79, 32, 12, 44, 190, 199, 189, 76, 26, 103, 9, 43, + 22, 80, 218, 135, 235, 101, 253, 75, 154, 32, 216, 185, 145, 38, 243, 189, 234, 154, 180, 36, 130, 152, 138, + 22, 236, 147, 71, 135, 205, 76, 118, 25, 3, 178, 252, 222, 207, 131, 207, 181, 249, 50, 232, 51, 111, 213, + 43, 126, 43, 65, 193, 68, 214, 12, 76, 116, 135, 118, 187, 233, 123, 75, 58, 36, 251, 46, 184, 119, 105, + 226, 182, 103, 185, 57, 219, 175, 240, 251, 2, 193, 44, 173, 166, 45, 140, 185, 90, 130, 45, 110, 225, 54, + 3, 94, 175, 208, 154, 150, 146, 12, 37, 89, 253, 71, 171, 191, 30, 139, 121, 94, 51, 16, 110, 100, 72, + 27, 87, 199, 230, 73, 95, 202, 79, 90, 20, 55, 181, 157, 180, 11, 116, 238, 236, 51, 24, 251, 119, 211, + 210, 40, 107, 186, 66, 43, 148, 37, 27, 182, 217, 65, 235, 20, 9, 54, 166, 99, 48, 182, 254, 149, 212, + 188, 55, 124, 50, 163, 177, 2, 109, 98, 246, 194, 16, 250, 203, 181, 0, 31, 183, 151, 222, 168, 145, 44, + 73, 109, 28, 75, 221, 205, 106, 56, 254, 24, 25, 182, 152, 216, 45, 31, 163, 185, 134, 241, 168, 236, 74, + 212, 152, 172, 4, 206, 72, 252, 213, 156, 241, 153, 204, 205, 95, 116, 212, 237, 132, 125, 29, 166, 127, 144, + 170, 28, 183, 12, 221, 217, 9, 129, 187, 41, 112, 29, 192, 40, 40, 162, 125, 157, 32, 191, 17, 215, 57, + 55, 101, 143, 130, 160, 213, 139, 6, 24, 3, 227, 193, 142, 228, 73, 86, 205, 184, 46, 187, 95, 85, 23, + 34, 136, 76, 246, 225, 13, 26, 138, 191, 143, 183, 98, 178, 195, 172, 92, 115, 244, 239, 6, 113, 101, 74, + 237, 75, 184, 186, 36, 43, 178, 244, 150, 16, 68, 123, 46, 94, 206, 139, 153, 144, 107, 11, 139, 76, 181, + 129, 38, 225, 250, 207, 174, 181, 129, 46, 103, 145, 219, 121, 124, 167, 102, 205, 115, 148, 30, 179, 219, 144, + 209, 99, 40, 186, 92, 214, 17, 184, 36, 0, 154, 188, 100, 131, 223, 47, 110, 31, 2, 58, 210, 212, 139, + 161, 184, 48, 168, 222, 241, 15, 247, 23, 21, 3, 36, 120, 235, 50, 125, 45, 187, 187, 78, 13, 176, 123, + 94, 118, 134, 35, 143, 85, 129, 82, 129, 218, 21, 93, 71, 81, 151, 10, 68, 21, 172, 136, 146, 230, 230, + 206, 144, 158, 204, 207, 101, 65, 30, 29, 33, 246, 131, 143, 254, 12, 74, 29, 193, 251, 185, 118, 107, 143, + 107, 104, 124, 32, 109, 245, 150, 239, 106, 81, 89, 158, 181, 159, 87, 99, 112, 60, 126, 46, 204, 143, 15, + 93, 252, 62, 191, 159, 155, 232, 92, 171, 31, 30, 101, 74, 25, 94, 174, 58, 162, 166, 250, 167, 127, 23, + 203, 87, 250, 145, 60, 12, 169, 50, 174, 96, 87, 21, 114, 37, 50, 73, 131, 198, 129, 154, 187, 127, 191, + 145, 105, 65, 71, 61, 48, 45, 99, 136, 145, 240, 74, 55, 8, 199, 109, 168, 101, 252, 61, 4, 163, 57, + 26, 147, 182, 240, 87, 203, 209, 71, 123, 23, 196, 72, 246, 198, 74, 170, 40, 37, 205, 178, 190, 24, 40, + 25, 44, 224, 206, 62, 57, 27, 75, 188, 212, 246, 245, 240, 86, 65, 56, 160, 89, 169, 246, 227, 72, 3, + 51, 250, 31, 126, 78, 139, 185, 59, 230, 133, 193, 215, 224, 76, 235, 12, 64, 124, 176, 174, 173, 125, 113, + 61, 236, 43, 215, 177, 31, 175, 139, 8, 243, 76, 87, 150, 173, 229, 101, 81, 201, 71, 98, 165, 244, 137, + 89, 155, 18, 183, 59, 104, 216, 215, 13, 83, 85, 76, 80, 179, 204, 176, 150, 162, 133, 106, 123, 207, 213, + 105, 161, 243, 46, 179, 201, 82, 63, 62, 230, 226, 178, 110, 13, 147, 188, 164, 203, 10, 232, 71, 77, 179, + 39, 94, 100, 194, 81, 48, 216, 190, 242, 244, 60, 116, 150, 69, 101, 156, 231, 91, 100, 243, 236, 106, 65, + 146, 20, 126, 19, 55, 21, 221, 45, 245, 155, 233, 218, 161, 250, 138, 193, 177, 53, 146, 230, 204, 163, 144, + 148, 85, 91, 144, 227, 138, 71, 54, 165, 36, 117, 30, 149, 54, 87, 198, 177, 246, 252, 36, 63, 134, 209, + 144, 190, 111, 24, 145, 44, 158, 1, 117, 224, 223, 155, 147, 74, 10, 174, 98, 244, 29, 9, 138, 141, 93, + 27, 236, 175, 187, 131, 86, 161, 177, 153, 153, 189, 228, 132, 1, 164, 190, 14, 117, 56, 215, 68, 103, 3, + 190, 201, 127, 112, 48, 207, 36, 33, 79, 36, 194, 138, 185, 48, 72, 111, 90, 0, 148, 167, 123, 49, 164, + 86, 178, 40, 178, 66, 2, 197, 196, 108, 105, 118, 191, 205, 33, 172, 208, 162, 74, 174, 47, 166, 87, 205, + 6, 215, 68, 20, 157, 125, 241, 74, 31, 239, 161, 219, 143, 118, 29, 111, 44, 96, 23, 16, 245, 233, 164, + 201, 59, 221, 115, 238, 146, 9, 98, 36, 202, 64, 25, 144, 93, 157, 57, 192, 125, 200, 96, 137, 154, 80, + 112, 117, 254, 246, 159, 98, 180, 45, 101, 145, 189, 128, 199, 216, 176, 194, 238, 33, 4, 203, 226, 140, 50, + 196, 57, 249, 66, 229, 60, 46, 98, 202, 230, 65, 234, 169, 212, 10, 0, 68, 99, 171, 68, 1, 91, 81, + 89, 166, 209, 23, 245, 241, 150, 62, 65, 75, 183, 151, 55, 94, 160, 37, 200, 9, 111, 15, 235, 223, 179, + 200, 120, 53, 90, 145, 106, 183, 195, 37, 209, 44, 237, 77, 223, 144, 232, 86, 233, 73, 224, 60, 16, 234, + 254, 12, 45, 200, 208, 76, 13, 41, 97, 65, 140, 231, 2, 223, 224, 153, 182, 210, 54, 1, 95, 198, 204, + 232, 239, 250, 161, 204, 250, 123, 115, 53, 46, 113, 68, 0, 249, 178, 191, 48, 122, 85, 130, 154, 231, 29, + 245, 184, 184, 237, 225, 89, 22, 168, 105, 85, 162, 18, 100, 242, 39, 175, 8, 75, 202, 77, 210, 20, 124, + 70, 229, 78, 179, 6, 139, 243, 83, 92, 144, 114, 188, 67, 231, 246, 136, 220, 176, 67, 56, 186, 28, 216, + 126, 233, 104, 197, 178, 77, 204, 220, 81, 29, 124, 198, 158, 123, 175, 190, 219, 57, 47, 209, 162, 224, 216, + 121, 161, 24, 79, 48, 189, 164, 239, 232, 191, 193, 97, 21, 86, 35, 219, 236, 86, 161, 27, 119, 224, 144, + 34, 179, 250, 52, 242, 223, 102, 185, 84, 163, 245, 178, 218, 158, 124, 68, 57, 190, 9, 79, 171, 1, 45, + 223, 100, 151, 249, 205, 37, 235, 224, 135, 81, 119, 209, 183, 68, 158, 179, 30, 121, 202, 74, 206, 115, 250, + 147, 245, 182, 33, 55, 152, 46, 139, 166, 154, 186, 69, 227, 229, 85, 218, 178, 172, 110, 120, 225, 69, 212, + 101, 15, 19, 91, 228, 89, 0, 195, 62, 230, 2, 136, 143, 122, 86, 71, 108, 32, 254, 21, 106, 247, 65, + 195, 45, 225, 248, 105, 30, 225, 127, 227, 41, 232, 27, 254, 162, 104, 227, 97, 187, 227, 144, 18, 225, 6, + 242, 189, 45, 114, 153, 48, 165, 152, 174, 87, 223, 122, 176, 35, 245, 221, 195, 186, 98, 185, 119, 21, 133, + 43, 193, 113, 37, 239, 79, 97, 36, 199, 1, 253, 185, 233, 167, 199, 102, 77, 85, 151, 153, 165, 161, 112, + 246, 77, 232, 210, 82, 119, 164, 121, 235, 251, 192, 137, 194, 45, 79, 117, 8, 20, 9, 4, 71, 26, 168, + 191, 251, 126, 152, 114, 133, 24, 137, 9, 222, 189, 24, 236, 158, 192, 108, 251, 206, 190, 134, 235, 216, 219, + 116, 70, 48, 116, 136, 182, 253, 177, 59, 161, 64, 38, 31, 248, 217, 134, 17, 105, 24, 194, 243, 135, 93, + 56, 148, 203, 7, 242, 53, 28, 171, 254, 213, 38, 29, 22, 153, 156, 127, 216, 225, 28, 16, 40, 186, 146, + 46, 64, 15, 175, 213, 9, 97, 55, 82, 103, 133, 27, 167, 109, 3, 56, 195, 178, 214, 127, 52, 198, 118, + 28, 186, 49, 209, 239, 176, 211, 205, 73, 132, 111, 215, 99, 230, 85, 39, 226, 28, 222, 252, 32, 60, 155, + 92, 1, 26, 70, 157, 224, 186, 183, 247, 142, 11, 127, 254, 65, 86, 197, 98, 251, 17, 241, 99, 215, 159, + 140, 33, 252, 190, 201, 142, 33, 134, 106, 249, 91, 11, 102, 161, 221, 91, 188, 78, 127, 204, 95, 0, 249, + 159, 49, 22, 107, 180, 138, 41, 245, 145, 4, 108, 219, 32, 243, 186, 137, 104, 237, 194, 160, 171, 72, 239, + 72, 176, 10, 41, 151, 234, 74, 7, 161, 145, 203, 72, 42, 228, 162, 134, 206, 252, 18, 14, 202, 67, 55, + 185, 59, 122, 218, 1, 185, 45, 97, 181, 13, 89, 9, 8, 206, 226, 112, 2, 130, 51, 192, 105, 200, 47, + 79, 235, 146, 78, 205, 116, 21, 113, 186, 227, 32, 172, 168, 229, 74, 115, 136, 212, 218, 198, 17, 15, 238, + 118, 182, 85, 91, 143, 234, 187, 130, 237, 90, 104, 221, 97, 164, 183, 33, 99, 45, 233, 228, 89, 18, 177, + 205, 190, 119, 33, 49, 136, 1, 196, 97, 95, 33, 69, 244, 108, 58, 158, 156, 56, 170, 98, 88, 8, 75, + 186, 157, 240, 231, 145, 204, 142, 122, 160, 167, 144, 172, 251, 214, 22, 225, 8, 89, 243, 153, 204, 128, 193, + 187, 84, 231, 9, 55, 196, 198, 14, 54, 106, 121, 95, 231, 229, 48, 97, 90, 113, 124, 250, 126, 4, 111, + 211, 171, 22, 121, 42, 151, 151, 217, 51, 248, 69, 161, 245, 33, 119, 34, 54, 129, 46, 39, 42, 157, 145, + 219, 125, 194, 201, 169, 64, 106, 64, 17, 101, 138, 89, 148, 63, 2, 20, 202, 24, 109, 42, 236, 57, 95, + 250, 53, 41, 163, 5, 67, 194, 128, 253, 76, 224, 216, 100, 187, 32, 34, 6, 87, 196, 201, 151, 193, 133, + 65, 144, 53, 198, 47, 84, 9, 41, 154, 128, 169, 58, 158, 238, 178, 96, 172, 215, 37, 171, 28, 62, 9, + 107, 198, 21, 208, 157, 31, 243, 115, 240, 49, 172, 114, 235, 12, 23, 197, 231, 8, 54, 14, 16, 114, 108, + 52, 221, 155, 140, 33, 204, 132, 100, 106, 220, 130, 113, 168, 44, 193, 3, 66, 250, 79, 232, 54, 20, 108, + 83, 176, 133, 130, 236, 6, 216, 161, 101, 108, 86, 127, 98, 179, 22, 225, 210, 111, 86, 216, 66, 145, 190, + 248, 114, 117, 124, 154, 34, 133, 155, 1, 203, 23, 180, 84, 225, 140, 7, 25, 63, 64, 180, 190, 110, 215, + 181, 1, 98, 142, 183, 222, 90, 218, 114, 175, 152, 116, 34, 248, 80, 195, 54, 169, 183, 23, 115, 55, 212, + 124, 29, 211, 117, 162, 65, 111, 203, 87, 109, 176, 223, 20, 153, 249, 38, 138, 72, 104, 162, 243, 73, 52, + 116, 114, 12, 82, 184, 247, 70, 82, 196, 219, 143, 17, 200, 219, 79, 95, 18, 154, 225, 125, 86, 201, 73, + 194, 249, 37, 54, 150, 141, 35, 179, 7, 19, 13, 10, 30, 98, 224, 239, 98, 15, 159, 170, 245, 149, 39, + 6, 162, 83, 123, 104, 211, 25, 245, 249, 153, 175, 254, 181, 254, 167, 53, 86, 110, 111, 204, 5, 230, 230, + 181, 74, 223, 85, 123, 99, 118, 221, 52, 106, 214, 209, 163, 12, 143, 174, 194, 214, 17, 201, 200, 119, 87, + 112, 128, 27, 130, 220, 7, 209, 29, 164, 6, 144, 4, 225, 9, 240, 106, 108, 50, 206, 32, 123, 141, 58, + 79, 173, 180, 225, 51, 179, 177, 248, 44, 44, 55, 172, 146, 176, 166, 26, 114, 231, 79, 113, 56, 175, 149, + 213, 215, 161, 47, 149, 221, 9, 22, 239, 155, 192, 91, 9, 46, 145, 164, 248, 70, 220, 150, 62, 104, 203, + 254, 50, 123, 108, 175, 68, 54, 178, 28, 101, 189, 13, 30, 79, 65, 235, 143, 40, 54, 149, 213, 14, 107, + 103, 11, 130, 24, 174, 129, 128, 96, 137, 210, 189, 138, 222, 19, 200, 17, 188, 150, 16, 67, 28, 232, 240, + 50, 54, 86, 117, 0, 128, 163, 70, 14, 67, 39, 100, 44, 42, 24, 136, 10, 42, 237, 79, 252, 98, 233, + 245, 63, 34, 80, 172, 83, 18, 58, 32, 10, 148, 137, 12, 47, 81, 62, 76, 250, 68, 230, 90, 18, 139, + 185, 158, 7, 236, 225, 54, 50, 237, 53, 85, 48, 29, 175, 235, 76, 199, 127, 67, 123, 180, 193, 49, 218, + 83, 239, 72, 24, 174, 200, 165, 39, 169, 97, 192, 42, 20, 58, 93, 124, 253, 40, 202, 72, 39, 88, 108, + 221, 171, 196, 245, 238, 184, 209, 35, 98, 111, 218, 196, 211, 65, 44, 239, 51, 225, 137, 87, 226, 141, 186, + 225, 68, 163, 33, 152, 77, 37, 120, 236, 102, 134, 30, 252, 170, 51, 10, 36, 122, 214, 51, 162, 159, 178, + 164, 15, 137, 60, 248, 195, 115, 251, 102, 149, 90, 210, 151, 222, 175, 60, 189, 182, 179, 162, 250, 159, 91, + 233, 164, 88, 9, 51, 153, 57, 122, 177, 247, 96, 133, 186, 99, 60, 153, 178, 205, 44, 26, 117, 79, 243, + 217, 184, 3, 253, 108, 9, 218, 195, 9, 104, 247, 68, 58, 60, 253, 98, 44, 94, 31, 58, 132, 244, 180, + 13, 178, 189, 208, 31, 57, 61, 212, 127, 100, 184, 49, 64, 104, 46, 136, 78, 219, 210, 108, 181, 247, 207, + 184, 250, 182, 31, 147, 194, 118, 84, 40, 61, 41, 245, 227, 159, 56, 118, 124, 26, 203, 150, 168, 210, 78, + 105, 133, 191, 185, 170, 82, 81, 84, 179, 112, 155, 131, 238, 172, 99, 253, 201, 168, 47, 92, 148, 216, 63, + 87, 187, 27, 110, 58, 182, 196, 112, 206, 164, 71, 82, 197, 36, 157, 151, 160, 219, 113, 6, 247, 99, 139, + 79, 187, 245, 125, 36, 132, 181, 72, 120, 34, 72, 51, 33, 143, 135, 149, 30, 83, 155, 151, 177, 102, 3, + 179, 76, 144, 9, 77, 116, 6, 16, 28, 208, 194, 68, 150, 26, 38, 94, 179, 123, 108, 21, 196, 10, 2, + 164, 75, 118, 147, 231, 19, 222, 69, 224, 174, 59, 198, 80, 126, 199, 32, 224, 173, 188, 251, 7, 15, 18, + 115, 208, 228, 128, 160, 107, 115, 218, 195, 80, 217, 192, 87, 209, 182, 215, 90, 65, 52, 248, 68, 167, 18, + 120, 226, 68, 211, 246, 179, 82, 36, 3, 222, 212, 218, 39, 206, 190, 141, 245, 210, 9, 92, 126, 31, 106, + 64, 159, 14, 191, 32, 38, 246, 237, 85, 69, 48, 215, 62, 249, 14, 74, 211, 99, 209, 247, 194, 252, 9, + 210, 84, 110, 98, 169, 176, 15, 98, 110, 19, 114, 118, 160, 193, 14, 169, 249, 63, 240, 210, 170, 211, 10, + 24, 142, 70, 20, 111, 175, 251, 89, 219, 25, 56, 205, 16, 15, 52, 50, 177, 226, 81, 106, 149, 128, 87, + 153, 219, 117, 151, 195, 201, 74, 177, 128, 146, 173, 247, 160, 29, 142, 107, 155, 153, 93, 39, 67, 45, 252, + 143, 208, 48, 201, 199, 91, 183, 6, 247, 21, 57, 191, 19, 173, 212, 20, 5, 101, 147, 72, 190, 160, 132, + 108, 171, 242, 102, 50, 56, 211, 216, 129, 177, 210, 3, 247, 194, 11, 36, 116, 131, 198, 1, 148, 88, 78, + 143, 48, 175, 190, 244, 2, 84, 167, 202, 190, 27, 216, 249, 155, 36, 167, 111, 47, 64, 66, 109, 245, 113, + 228, 80, 212, 228, 60, 100, 98, 4, 176, 250, 96, 7, 40, 67, 152, 209, 226, 69, 61, 16, 71, 178, 52, + 138, 220, 77, 210, 93, 133, 168, 99, 159, 14, 61, 248, 38, 17, 195, 177, 10, 110, 12, 132, 242, 53, 193, + 185, 236, 20, 118, 3, 252, 221, 168, 132, 179, 226, 67, 133, 229, 228, 34, 89, 124, 198, 252, 79, 141, 223, + 7, 199, 201, 126, 100, 25, 27, 125, 47, 252, 79, 55, 164, 21, 176, 179, 44, 131, 234, 57, 59, 113, 179, + 11, 73, 1, 30, 252, 39, 98, 84, 167, 99, 35, 55, 65, 54, 159, 111, 15, 158, 9, 134, 61, 213, 226, + 235, 134, 61, 186, 198, 212, 183, 98, 1, 28, 16, 75, 3, 150, 193, 200, 90, 123, 69, 104, 212, 113, 170, + 106, 100, 136, 91, 19, 196, 89, 79, 129, 111, 226, 161, 59, 223, 109, 243, 199, 249, 143, 181, 230, 231, 68, + 173, 126, 138, 91, 9, 119, 81, 238, 154, 86, 0, 190, 214, 145, 71, 223, 48, 78, 150, 68, 167, 143, 189, + 87, 216, 98, 52, 94, 109, 45, 181, 2, 170, 102, 248, 95, 116, 98, 99, 91, 147, 242, 153, 121, 164, 76, + 14, 92, 75, 209, 33, 47, 17, 242, 180, 59, 6, 74, 80, 35, 136, 89, 60, 53, 79, 84, 176, 225, 52, + 249, 29, 65, 75, 147, 123, 67, 247, 115, 197, 228, 28, 160, 250, 118, 207, 67, 63, 38, 129, 63, 223, 218, + 238, 148, 9, 137, 189, 182, 14, 103, 225, 86, 21, 92, 57, 100, 197, 220, 250, 185, 41, 173, 73, 4, 9, + 165, 194, 216, 90, 59, 239, 252, 20, 99, 95, 155, 51, 60, 14, 189, 129, 127, 101, 34, 186, 31, 116, 226, + 52, 162, 250, 72, 174, 221, 218, 154, 82, 218, 176, 246, 2, 44, 97, 153, 170, 146, 216, 171, 53, 246, 70, + 248, 163, 241, 83, 9, 204, 185, 180, 40, 9, 184, 54, 198, 211, 168, 202, 63, 13, 244, 223, 216, 155, 105, + 115, 59, 123, 168, 251, 252, 137, 175, 138, 253, 16, 82, 160, 238, 140, 233, 20, 142, 173, 229, 52, 96, 137, + 98, 89, 119, 114, 128, 159, 203, 22, 204, 101, 186, 80, 118, 217, 106, 53, 120, 137, 187, 98, 251, 172, 151, + 208, 34, 230, 254, 10, 95, 210, 130, 54, 211, 243, 21, 19, 188, 30, 113, 194, 16, 201, 101, 166, 79, 192, + 81, 86, 56, 174, 122, 197, 116, 251, 224, 50, 248, 37, 51, 169, 6, 88, 3, 231, 44, 192, 78, 18, 7, + 102, 144, 53, 181, 223, 17, 147, 168, 160, 20, 22, 74, 243, 146, 141, 86, 165, 44, 17, 188, 84, 236, 2, + 110, 45, 154, 134, 152, 158, 190, 71, 130, 77, 89, 49, 213, 65, 135, 171, 185, 176, 173, 181, 169, 248, 23, + 76, 227, 241, 205, 168, 229, 91, 42, 53, 102, 95, 186, 254, 91, 155, 116, 95, 254, 70, 194, 3, 28, 221, + 137, 188, 139, 164, 103, 113, 232, 233, 16, 124, 177, 36, 152, 157, 216, 119, 230, 73, 100, 133, 143, 101, 122, + 145, 0, 56, 2, 129, 62, 103, 134, 201, 145, 187, 189, 90, 180, 159, 0, 106, 154, 130, 121, 228, 27, 43, + 145, 147, 20, 191, 66, 82, 23, 214, 242, 202, 199, 53, 143, 164, 3, 246, 100, 15, 172, 59, 227, 129, 161, + 115, 97, 239, 104, 85, 201, 167, 96, 12, 58, 102, 213, 104, 191, 64, 211, 59, 112, 35, 120, 83, 143, 240, + 106, 47, 244, 41, 189, 254, 175, 21, 181, 141, 166, 197, 133, 44, 238, 175, 134, 170, 37, 23, 90, 87, 121, + 102, 59, 54, 219, 82, 120, 194, 127, 152, 177, 63, 229, 69, 234, 235, 103, 128, 178, 97, 60, 126, 217, 245, + 246, 242, 220, 69, 167, 98, 235, 121, 76, 42, 148, 206, 199, 26, 133, 143, 56, 253, 200, 204, 154, 36, 158, + 120, 195, 145, 149, 214, 125, 193, 127, 49, 201, 87, 65, 75, 14, 199, 46, 242, 126, 179, 194, 211, 6, 34, + 176, 127, 99, 30, 101, 21, 49, 8, 7, 206, 97, 229, 63, 231, 43, 51, 33, 201, 206, 65, 82, 12, 253, + 176, 200, 182, 153, 48, 55, 80, 113, 245, 207, 237, 151, 6, 41, 135, 253, 74, 208, 170, 229, 36, 92, 26, + 24, 226, 239, 77, 141, 59, 222, 148, 78, 32, 79, 65, 195, 111, 28, 243, 193, 84, 59, 46, 22, 155, 137, + 149, 113, 186, 16, 115, 211, 18, 138, 74, 234, 20, 183, 203, 12, 140, 119, 72, 187, 188, 150, 137, 205, 20, + 233, 82, 86, 216, 127, 103, 145, 197, 57, 239, 27, 228, 69, 20, 175, 193, 229, 232, 68, 112, 120, 141, 6, + 121, 194, 70, 166, 204, 31, 134, 49, 54, 54, 199, 126, 26, 24, 237, 227, 38, 179, 31, 154, 143, 32, 22, + 105, 33, 232, 224, 156, 204, 152, 237, 38, 219, 221, 44, 73, 16, 251, 206, 56, 180, 194, 181, 2, 64, 205, + 163, 162, 194, 230, 64, 3, 6, 9, 25, 16, 115, 195, 158, 109, 78, 159, 49, 167, 150, 159, 57, 9, 144, + 71, 128, 200, 254, 136, 19, 104, 37, 36, 26, 229, 112, 139, 139, 40, 242, 5, 128, 131, 33, 43, 148, 206, + 17, 153, 116, 74, 116, 40, 40, 85, 160, 231, 240, 185, 239, 76, 66, 144, 228, 137, 253, 212, 139, 115, 62, + 137, 203, 90, 207, 53, 226, 141, 148, 198, 212, 183, 165, 42, 208, 37, 17, 71, 183, 116, 186, 198, 249, 121, + 170, 159, 215, 87, 158, 40, 110, 117, 201, 93, 216, 64, 69, 86, 109, 80, 35, 142, 125, 1, 144, 62, 21, + 185, 61, 58, 230, 74, 144, 151, 26, 112, 128, 131, 20, 100, 209, 31, 81, 100, 102, 21, 211, 182, 52, 161, + 72, 154, 171, 186, 137, 209, 212, 169, 124, 23, 214, 143, 176, 198, 4, 4, 149, 73, 151, 56, 140, 253, 17, + 68, 142, 196, 246, 15, 191, 62, 233, 33, 87, 108, 173, 7, 96, 101, 140, 9, 193, 121, 71, 88, 135, 179, + 200, 10, 60, 161, 186, 167, 235, 98, 185, 174, 229, 16, 127, 216, 104, 159, 72, 154, 238, 36, 153, 132, 240, + 98, 132, 248, 64, 91, 52, 238, 166, 172, 158, 208, 123, 119, 189, 56, 6, 226, 69, 223, 106, 59, 3, 135, + 125, 184, 237, 19, 24, 189, 25, 119, 82, 13, 11, 167, 31, 220, 163, 73, 16, 141, 239, 222, 211, 5, 63, + 237, 182, 138, 199, 24, 168, 44, 125, 177, 164, 95, 185, 68, 177, 206, 37, 177, 61, 95, 97, 181, 128, 158, + 170, 172, 155, 143, 209, 216, 218, 220, 77, 209, 74, 207, 60, 115, 254, 104, 87, 65, 89, 224, 18, 55, 180, + 239, 95, 57, 137, 195, 245, 22, 0, 164, 207, 171, 142, 237, 233, 87, 56, 214, 182, 68, 0, 1, 225, 65, + 41, 240, 124, 68, 40, 245, 57, 237, 45, 177, 221, 137, 78, 11, 176, 84, 25, 251, 203, 161, 228, 26, 138, + 212, 240, 6, 21, 143, 161, 232, 108, 240, 35, 84, 148, 124, 36, 75, 239, 135, 150, 112, 45, 1, 116, 214, + 1, 59, 214, 62, 152, 143, 154, 166, 115, 32, 113, 153, 7, 191, 147, 202, 66, 64, 157, 186, 249, 164, 162, + 170, 226, 180, 234, 104, 134, 59, 168, 92, 52, 90, 106, 76, 239, 231, 14, 237, 15, 131, 81, 92, 9, 95, + 190, 21, 28, 96, 190, 44, 26, 212, 83, 34, 8, 106, 5, 119, 66, 72, 200, 136, 252, 212, 104, 131, 93, + 150, 37, 67, 188, 26, 105, 125, 201, 41, 248, 61, 238, 176, 254, 205, 157, 37, 194, 191, 239, 3, 128, 90, + 221, 250, 223, 180, 111, 131, 174, 22, 235, 151, 229, 108, 1, 42, 55, 58, 118, 166, 28, 157, 107, 140, 243, + 84, 129, 138, 94, 122, 47, 107, 223, 26, 228, 89, 30, 143, 160, 219, 238, 153, 58, 12, 110, 180, 104, 98, + 245, 8, 138, 108, 160, 182, 252, 79, 162, 122, 83, 131, 54, 210, 120, 167, 53, 209, 54, 4, 147, 138, 244, + 169, 238, 169, 25, 2, 230, 73, 200, 163, 52, 86, 40, 25, 238, 244, 33, 116, 148, 173, 147, 135, 39, 75, + 86, 51, 82, 114, 23, 218, 161, 170, 53, 219, 211, 12, 208, 157, 169, 18, 113, 37, 99, 192, 2, 184, 8, + 171, 164, 92, 160, 112, 234, 8, 213, 83, 45, 99, 160, 60, 243, 9, 72, 62, 9, 20, 130, 217, 47, 82, + 78, 17, 57, 28, 187, 192, 196, 192, 225, 143, 87, 14, 161, 67, 75, 71, 97, 209, 62, 254, 65, 223, 239, + 46, 53, 251, 91, 237, 205, 149, 158, 5, 168, 32, 4, 10, 160, 66, 155, 1, 113, 225, 82, 148, 224, 18, + 216, 244, 134, 107, 47, 218, 142, 52, 125, 203, 81, 169, 79, 11, 33, 252, 90, 152, 47, 59, 121, 182, 150, + 200, 108, 246, 137, 103, 227, 2, 252, 120, 186, 137, 21, 192, 202, 98, 173, 49, 230, 39, 43, 178, 121, 21, + 113, 178, 244, 235, 68, 8, 97, 23, 181, 11, 166, 162, 179, 66, 10, 19, 77, 139, 38, 50, 213, 134, 242, + 151, 37, 116, 112, 185, 132, 118, 239, 18, 9, 180, 117, 229, 66, 118, 228, 109, 45, 64, 67, 177, 112, 173, + 245, 212, 25, 119, 108, 231, 206, 41, 211, 111, 111, 174, 43, 172, 225, 191, 43, 161, 118, 157, 108, 49, 214, + 215, 125, 54, 236, 242, 167, 205, 75, 35, 12, 124, 238, 163, 73, 238, 226, 235, 43, 199, 69, 147, 20, 187, + 157, 210, 250, 178, 43, 92, 20, 125, 51, 151, 183, 94, 85, 14, 23, 221, 244, 230, 22, 75, 194, 198, 213, + 65, 152, 200, 102, 79, 197, 202, 222, 228, 10, 198, 73, 150, 102, 26, 244, 41, 161, 196, 142, 52, 63, 138, + 74, 140, 121, 119, 192, 209, 183, 138, 53, 132, 29, 121, 161, 138, 249, 70, 46, 233, 57, 211, 60, 215, 232, + 54, 249, 118, 230, 200, 77, 233, 5, 115, 145, 203, 66, 248, 227, 171, 86, 125, 133, 25, 19, 147, 181, 243, + 40, 207, 58, 199, 15, 105, 244, 245, 15, 140, 244, 159, 62, 229, 80, 209, 252, 245, 154, 72, 113, 122, 80, + 160, 94, 146, 71, 83, 19, 28, 206, 174, 20, 30, 70, 234, 203, 242, 190, 243, 27, 83, 243, 145, 51, 217, + 70, 36, 72, 139, 82, 41, 94, 29, 68, 46, 68, 161, 76, 56, 170, 37, 20, 20, 21, 24, 219, 173, 28, + 54, 66, 125, 168, 8, 193, 232, 20, 220, 81, 187, 18, 37, 186, 78, 82, 86, 244, 184, 23, 120, 101, 245, + 229, 248, 192, 185, 208, 23, 136, 85, 110, 125, 203, 63, 44, 158, 155, 194, 97, 220, 229, 60, 159, 80, 8, + 175, 90, 93, 227, 69, 113, 139, 93, 220, 169, 100, 144, 27, 72, 242, 169, 247, 195, 37, 237, 223, 122, 138, + 70, 78, 68, 139, 102, 164, 228, 51, 227, 199, 1, 193, 164, 178, 126, 99, 99, 172, 20, 24, 50, 57, 5, + 69, 113, 39, 109, 77, 210, 232, 70, 80, 122, 168, 156, 238, 118, 88, 234, 190, 130, 95, 94, 195, 161, 197, + 215, 245, 236, 5, 29, 104, 233, 238, 207, 197, 155, 85, 202, 202, 195, 232, 79, 11, 43, 233, 49, 106, 121, + 60, 219, 6, 99, 174, 206, 121, 202, 245, 136, 5, 35, 121, 174, 51, 231, 40, 75, 13, 127, 143, 159, 225, + 158, 19, 66, 191, 193, 104, 96, 37, 128, 106, 92, 68, 1, 210, 198, 37, 78, 121, 197, 160, 72, 95, 109, + 84, 134, 65, 1, 37, 11, 214, 143, 101, 214, 86, 201, 218, 116, 213, 189, 57, 103, 69, 89, 102, 75, 114, + 170, 46, 139, 156, 241, 193, 86, 243, 50, 60, 223, 142, 250, 81, 21, 22, 37, 192, 102, 100, 186, 214, 146, + 147, 87, 230, 180, 77, 70, 12, 6, 150, 198, 11, 47, 149, 126, 132, 57, 204, 254, 230, 141, 246, 14, 51, + 160, 227, 167, 137, 176, 214, 186, 7, 218, 69, 250, 109, 123, 30, 150, 176, 44, 204, 61, 33, 135, 193, 172, + 233, 221, 251, 109, 154, 13, 245, 105, 245, 43, 184, 209, 46, 116, 214, 90, 131, 0, 52, 91, 165, 118, 140, + 243, 118, 81, 240, 88, 34, 72, 157, 177, 138, 101, 16, 45, 16, 98, 222, 7, 173, 80, 236, 106, 201, 80, + 1, 115, 189, 246, 141, 208, 88, 9, 167, 75, 200, 34, 210, 173, 249, 171, 219, 144, 160, 6, 231, 135, 228, + 193, 146, 16, 176, 117, 20, 170, 62, 72, 168, 41, 207, 138, 168, 88, 86, 241, 72, 205, 154, 78, 93, 168, + 229, 29, 166, 232, 149, 66, 130, 154, 236, 240, 91, 88, 155, 196, 139, 203, 76, 181, 105, 49, 67, 3, 127, + 28, 214, 233, 107, 10, 110, 60, 248, 222, 73, 110, 127, 44, 228, 87, 180, 215, 49, 146, 48, 129, 203, 227, + 197, 11, 23, 107, 189, 234, 38, 195, 42, 19, 13, 40, 254, 15, 97, 223, 251, 35, 236, 238, 29, 173, 199, + 181, 47, 185, 71, 125, 198, 121, 25, 211, 172, 75, 18, 225, 164, 161, 65, 198, 247, 125, 226, 249, 154, 48, + 207, 148, 151, 46, 45, 81, 154, 173, 208, 133, 107, 161, 0, 74, 14, 192, 209, 254, 126, 126, 187, 33, 171, + 210, 18, 227, 72, 174, 250, 23, 70, 71, 150, 152, 193, 43, 113, 150, 68, 247, 169, 113, 248, 210, 120, 118, + 198, 206, 229, 154, 232, 80, 175, 68, 247, 58, 57, 248, 230, 141, 93, 49, 197, 159, 177, 120, 11, 137, 188, + 42, 61, 110, 223, 29, 156, 166, 212, 135, 51, 53, 0, 191, 167, 223, 196, 231, 229, 30, 200, 248, 57, 192, + 87, 49, 172, 185, 90, 48, 123, 222, 244, 169, 22, 198, 196, 239, 95, 160, 196, 19, 127, 223, 224, 72, 24, + 62, 147, 80, 65, 77, 9, 1, 203, 210, 75, 51, 138, 250, 40, 114, 127, 230, 237, 88, 69, 173, 200, 56, + 0, 221, 88, 249, 104, 237, 12, 159, 182, 85, 113, 38, 55, 25, 132, 180, 167, 145, 192, 209, 167, 102, 17, + 21, 40, 23, 244, 10, 21, 73, 207, 43, 2, 159, 127, 0, 51, 70, 238, 78, 32, 238, 212, 154, 47, 143, + 60, 38, 135, 253, 133, 93, 69, 3, 249, 40, 157, 166, 138, 216, 169, 108, 142, 37, 254, 143, 48, 20, 101, + 79, 72, 198, 233, 64, 118, 116, 136, 117, 231, 51, 247, 165, 206, 24, 35, 241, 83, 55, 86, 15, 28, 96, + 40, 71, 171, 92, 129, 77, 9, 133, 159, 213, 173, 220, 104, 221, 247, 97, 208, 18, 163, 182, 199, 52, 81, + 128, 206, 100, 154, 116, 246, 86, 183, 204, 115, 117, 207, 184, 160, 76, 172, 220, 96, 225, 65, 212, 107, 223, + 148, 59, 213, 3, 204, 98, 228, 51, 68, 208, 143, 72, 61, 211, 121, 236, 238, 202, 1, 140, 154, 6, 136, + 129, 118, 57, 51, 133, 123, 225, 211, 11, 243, 103, 170, 230, 131, 222, 160, 49, 66, 251, 246, 92, 69, 126, + 145, 76, 236, 9, 3, 156, 165, 87, 175, 179, 18, 85, 241, 24, 220, 84, 182, 110, 145, 28, 142, 171, 252, + 226, 36, 72, 243, 111, 35, 157, 151, 207, 199, 239, 85, 133, 100, 157, 105, 35, 89, 17, 27, 44, 190, 136, + 19, 70, 122, 180, 163, 47, 99, 123, 95, 2, 138, 103, 21, 190, 31, 198, 125, 40, 32, 148, 36, 139, 122, + 140, 165, 139, 169, 91, 38, 96, 88, 20, 159, 165, 101, 18, 228, 53, 171, 237, 71, 53, 25, 253, 202, 250, + 187, 44, 227, 29, 105, 237, 96, 0, 11, 73, 161, 128, 105, 213, 137, 68, 72, 187, 60, 51, 10, 155, 34, + 115, 94, 246, 81, 212, 135, 156, 164, 43, 152, 220, 84, 168, 94, 151, 156, 62, 42, 10, 79, 57, 102, 233, + 58, 56, 242, 150, 14, 23, 80, 247, 186, 249, 110, 88, 18, 55, 202, 203, 83, 16, 101, 199, 234, 212, 101, + 123, 240, 49, 204, 227, 45, 45, 175, 74, 196, 34, 96, 8, 163, 248, 84, 164, 19, 73, 24, 111, 92, 216, + 121, 1, 175, 213, 171, 182, 72, 106, 11, 216, 152, 102, 37, 186, 164, 104, 85, 112, 245, 154, 24, 226, 248, + 31, 88, 90, 34, 134, 59, 139, 92, 173, 80, 144, 186, 222, 134, 229, 1, 103, 113, 124, 226, 83, 50, 4, + 226, 206, 159, 207, 137, 139, 136, 14, 105, 131, 148, 85, 46, 197, 146, 248, 156, 221, 145, 252, 36, 67, 159, + 9, 164, 252, 29, 94, 45, 190, 41, 252, 133, 79, 42, 82, 38, 15, 30, 221, 97, 224, 238, 136, 1, 210, + 128, 104, 218, 203, 204, 160, 95, 162, 207, 55, 119, 20, 215, 45, 177, 89, 18, 96, 237, 64, 137, 89, 89, + 130, 175, 188, 62, 20, 237, 104, 243, 145, 187, 87, 59, 242, 147, 178, 109, 65, 211, 100, 177, 110, 144, 175, + 34, 73, 20, 102, 202, 117, 8, 123, 171, 17, 193, 101, 142, 200, 6, 30, 214, 17, 69, 8, 38, 190, 122, + 68, 213, 5, 222, 185, 140, 16, 7, 245, 47, 221, 44, 50, 88, 99, 105, 2, 18, 59, 42, 72, 217, 34, + 34, 169, 76, 35, 148, 143, 57, 165, 150, 98, 141, 236, 176, 197, 125, 58, 15, 154, 221, 143, 233, 246, 62, + 248, 17, 15, 39, 74, 236, 46, 5, 220, 43, 217, 231, 127, 224, 94, 80, 58, 143, 140, 8, 56, 86, 246, + 68, 96, 54, 179, 105, 143, 5, 180, 60, 194, 57, 19, 218, 112, 129, 124, 7, 222, 75, 47, 91, 45, 192, + 190, 251, 88, 4, 23, 156, 113, 170, 99, 49, 110, 241, 65, 100, 51, 66, 3, 108, 106, 60, 121, 122, 71, + 43, 242, 161, 22, 249, 231, 182, 118, 237, 12, 33, 242, 134, 87, 106, 112, 0, 106, 193, 60, 133, 112, 245, + 99, 93, 147, 244, 26, 55, 90, 1, 107, 7, 29, 87, 115, 26, 243, 240, 153, 24, 220, 252, 63, 116, 162, + 76, 11, 18, 140, 173, 31, 51, 6, 197, 219, 240, 239, 21, 20, 143, 95, 32, 153, 210, 58, 127, 248, 194, + 24, 109, 31, 67, 147, 194, 30, 49, 28, 227, 240, 60, 100, 191, 3, 73, 57, 12, 241, 97, 154, 112, 88, + 82, 85, 24, 100, 150, 184, 78, 132, 146, 87, 151, 86, 196, 207, 65, 24, 88, 117, 110, 192, 203, 194, 8, + 218, 92, 117, 250, 132, 108, 83, 157, 253, 136, 238, 218, 27, 226, 51, 195, 126, 21, 45, 241, 122, 212, 87, + 250, 36, 87, 246, 233, 204, 183, 219, 95, 56, 202, 47, 181, 68, 106, 197, 192, 44, 110, 78, 219, 158, 78, + 75, 98, 53, 11, 127, 2, 220, 40, 49, 64, 104, 70, 239, 92, 9, 134, 115, 135, 166, 235, 104, 130, 106, + 129, 10, 97, 233, 251, 21, 127, 17, 48, 244, 200, 51, 165, 176, 114, 111, 175, 186, 206, 189, 209, 151, 6, + 128, 217, 16, 203, 5, 216, 169, 216, 67, 238, 22, 220, 123, 83, 174, 86, 26, 223, 15, 131, 61, 23, 151, + 50, 108, 240, 129, 229, 176, 153, 135, 129, 106, 10, 115, 204, 162, 186, 43, 102, 241, 107, 195, 221, 55, 254, + 214, 56, 218, 131, 221, 151, 48, 118, 104, 162, 166, 221, 118, 125, 142, 130, 252, 62, 33, 94, 65, 95, 80, + 108, 9, 180, 113, 128, 123, 213, 12, 237, 233, 53, 167, 2, 244, 113, 39, 157, 234, 53, 56, 154, 85, 199, + 143, 84, 216, 89, 163, 16, 179, 166, 152, 196, 210, 20, 138, 148, 109, 187, 179, 111, 136, 82, 135, 112, 97, + 210, 216, 200, 236, 216, 82, 30, 149, 246, 247, 220, 244, 226, 32, 247, 42, 250, 124, 9, 168, 47, 113, 127, + 6, 152, 73, 82, 124, 214, 72, 92, 79, 148, 197, 104, 78, 7, 68, 169, 138, 126, 196, 131, 139, 72, 219, + 205, 49, 92, 231, 32, 119, 93, 14, 218, 120, 222, 68, 141, 29, 23, 185, 210, 225, 141, 65, 157, 231, 245, + 2, 205, 239, 138, 114, 197, 191, 229, 196, 171, 249, 107, 177, 200, 188, 218, 2, 113, 231, 248, 133, 44, 90, + 157, 41, 44, 46, 205, 4, 79, 46, 30, 48, 218, 154, 157, 127, 249, 192, 4, 147, 176, 190, 107, 64, 47, + 6, 75, 30, 73, 90, 244, 200, 170, 21, 152, 37, 236, 108, 66, 184, 191, 97, 210, 3, 148, 101, 141, 189, + 189, 47, 95, 230, 63, 163, 227, 211, 235, 159, 213, 155, 207, 191, 46, 41, 24, 169, 167, 144, 99, 220, 113, + 50, 18, 49, 156, 11, 64, 159, 172, 46, 139, 115, 12, 240, 71, 70, 34, 118, 203, 32, 21, 178, 69, 92, + 37, 160, 11, 167, 38, 115, 196, 122, 62, 126, 92, 244, 122, 178, 18, 81, 248, 53, 208, 248, 21, 9, 177, + 75, 171, 244, 45, 110, 128, 179, 61, 71, 250, 242, 52, 152, 160, 104, 45, 247, 106, 122, 16, 78, 90, 191, + 180, 103, 178, 118, 223, 94, 122, 188, 36, 114, 129, 81, 104, 122, 165, 180, 120, 15, 60, 164, 127, 237, 118, + 17, 115, 150, 28, 253, 10, 51, 245, 9, 178, 13, 57, 137, 125, 75, 66, 118, 40, 95, 54, 99, 21, 184, + 52, 52, 58, 130, 136, 80, 71, 180, 27, 127, 58, 8, 33, 76, 160, 172, 109, 235, 224, 189, 174, 169, 202, + 14, 51, 168, 62, 168, 130, 223, 248, 136, 92, 22, 26, 144, 211, 212, 120, 13, 218, 199, 159, 57, 216, 218, + 188, 92, 223, 95, 138, 164, 251, 76, 185, 61, 170, 169, 173, 95, 73, 111, 54, 90, 71, 149, 197, 211, 98, + 229, 26, 44, 167, 102, 10, 46, 38, 58, 203, 36, 212, 115, 127, 162, 215, 11, 148, 116, 132, 4, 49, 94, + 31, 84, 156, 173, 250, 84, 2, 74, 157, 55, 50, 40, 72, 56, 92, 234, 129, 56, 189, 205, 32, 3, 129, + 250, 208, 184, 161, 174, 106, 142, 86, 24, 167, 103, 243, 143, 238, 150, 158, 56, 75, 222, 212, 30, 188, 37, + 155, 150, 218, 191, 162, 67, 163, 109, 53, 222, 117, 244, 24, 246, 169, 223, 119, 223, 201, 173, 63, 209, 209, + 51, 191, 246, 183, 12, 218, 60, 179, 195, 104, 199, 132, 131, 203, 82, 26, 217, 204, 215, 97, 131, 20, 63, + 137, 233, 208, 201, 198, 254, 99, 181, 254, 120, 129, 72, 167, 47, 243, 60, 96, 201, 187, 6, 40, 240, 133, + 180, 88, 238, 168, 222, 54, 129, 57, 80, 27, 190, 201, 35, 174, 129, 195, 229, 29, 251, 22, 151, 254, 59, + 30, 84, 39, 155, 150, 146, 133, 54, 54, 123, 116, 132, 218, 195, 83, 51, 171, 210, 193, 254, 235, 154, 64, + 107, 24, 228, 225, 224, 32, 198, 46, 31, 74, 100, 76, 78, 112, 63, 56, 16, 17, 163, 17, 2, 222, 227, + 160, 196, 16, 86, 68, 217, 50, 12, 117, 128, 73, 47, 83, 41, 219, 39, 150, 218, 21, 20, 243, 13, 159, + 175, 16, 183, 103, 149, 83, 16, 112, 117, 251, 56, 0, 248, 237, 14, 124, 232, 90, 63, 82, 139, 4, 125, + 242, 159, 40, 84, 70, 74, 233, 51, 80, 145, 183, 165, 0, 36, 210, 227, 149, 133, 236, 123, 233, 253, 28, + 121, 25, 46, 93, 43, 196, 4, 182, 92, 39, 128, 120, 31, 95, 200, 73, 32, 144, 92, 63, 96, 184, 67, + 28, 169, 245, 227, 209, 216, 17, 158, 180, 180, 99, 92, 219, 112, 135, 89, 193, 236, 188, 252, 177, 168, 121, + 63, 131, 122, 131, 12, 142, 149, 7, 13, 125, 25, 218, 240, 121, 186, 148, 251, 47, 145, 82, 152, 163, 133, + 133, 80, 130, 134, 170, 212, 218, 223, 134, 106, 245, 73, 244, 92, 100, 52, 96, 225, 27, 154, 81, 65, 215, + 208, 227, 113, 36, 83, 84, 14, 120, 42, 59, 76, 176, 53, 253, 130, 79, 86, 243, 217, 167, 248, 169, 123, + 134, 211, 172, 82, 115, 91, 31, 165, 210, 103, 155, 27, 252, 164, 36, 50, 207, 69, 215, 100, 155, 52, 12, + 242, 5, 103, 201, 225, 97, 115, 20, 160, 5, 218, 202, 201, 169, 216, 22, 100, 246, 196, 166, 82, 42, 11, + 200, 2, 14, 153, 75, 61, 11, 91, 202, 167, 217, 196, 109, 225, 6, 67, 16, 237, 88, 41, 114, 54, 229, + 73, 209, 26, 203, 70, 11, 253, 18, 166, 64, 63, 24, 253, 150, 108, 99, 184, 182, 171, 0, 172, 167, 190, + 51, 157, 207, 182, 155, 122, 21, 16, 5, 252, 111, 232, 185, 45, 243, 98, 54, 174, 86, 65, 63, 129, 234, + 127, 53, 224, 210, 66, 139, 157, 173, 86, 43, 0, 187, 160, 244, 198, 200, 79, 226, 39, 190, 239, 40, 26, + 150, 22, 200, 3, 142, 180, 115, 168, 251, 38, 163, 54, 127, 227, 236, 94, 111, 130, 60, 24, 152, 191, 108, + 33, 32, 132, 188, 26, 18, 135, 224, 233, 187, 166, 33, 5, 105, 161, 5, 97, 61, 56, 152, 184, 136, 70, + 206, 32, 247, 233, 244, 8, 193, 154, 58, 143, 157, 176, 69, 157, 70, 244, 232, 74, 1, 58, 13, 151, 90, + 10, 47, 22, 177, 154, 89, 199, 39, 196, 55, 133, 67, 157, 52, 68, 200, 162, 173, 92, 78, 147, 15, 12, + 172, 218, 1, 7, 234, 169, 225, 0, 212, 164, 148, 145, 25, 186, 170, 54, 47, 238, 101, 163, 61, 201, 173, + 144, 26, 76, 26, 43, 188, 105, 0, 3, 177, 221, 119, 138, 36, 170, 21, 133, 165, 138, 38, 30, 7, 134, + 163, 221, 214, 199, 228, 158, 89, 211, 126, 36, 53, 51, 175, 87, 139, 167, 112, 144, 157, 247, 84, 183, 139, + 236, 206, 147, 161, 177, 57, 79, 239, 173, 91, 252, 138, 90, 87, 3, 154, 51, 96, 141, 177, 47, 197, 44, + 108, 170, 221, 156, 243, 136, 54, 248, 228, 40, 245, 232, 4, 212, 133, 153, 215, 120, 119, 129, 112, 121, 71, + 31, 75, 173, 144, 127, 172, 1, 93, 131, 159, 117, 166, 231, 179, 34, 172, 31, 78, 167, 193, 122, 119, 184, + 201, 236, 156, 108, 174, 163, 9, 239, 229, 27, 69, 8, 192, 16, 63, 244, 58, 2, 247, 204, 154, 246, 80, + 113, 30, 104, 236, 109, 56, 210, 254, 4, 164, 136, 147, 5, 16, 135, 245, 237, 172, 207, 150, 122, 39, 84, + 244, 216, 74, 171, 9, 244, 235, 118, 211, 169, 24, 22, 241, 173, 127, 224, 143, 158, 102, 94, 37, 64, 23, + 221, 57, 227, 2, 128, 11, 158, 221, 96, 28, 188, 213, 217, 221, 31, 31, 77, 106, 65, 185, 34, 135, 19, + 202, 81, 49, 88, 252, 244, 194, 111, 180, 221, 203, 220, 54, 237, 42, 220, 209, 37, 136, 127, 191, 238, 40, + 152, 152, 9, 112, 45, 146, 79, 196, 243, 72, 54, 129, 193, 167, 194, 142, 29, 162, 107, 254, 147, 111, 14, + 210, 208, 56, 205, 221, 227, 83, 44, 123, 148, 248, 140, 13, 208, 230, 92, 166, 190, 158, 159, 182, 32, 118, + 197, 243, 79, 194, 78, 122, 89, 125, 167, 31, 179, 70, 62, 231, 110, 45, 126, 221, 77, 116, 95, 191, 170, + 205, 218, 156, 146, 113, 96, 115, 88, 122, 243, 236, 228, 139, 191, 175, 189, 254, 57, 210, 192, 244, 127, 239, + 89, 28, 19, 178, 153, 155, 208, 110, 224, 97, 141, 104, 112, 89, 6, 73, 196, 115, 24, 204, 183, 253, 112, + 191, 177, 111, 253, 225, 195, 41, 75, 19, 31, 148, 148, 171, 21, 32, 186, 228, 92, 227, 167, 133, 143, 61, + 215, 215, 218, 215, 211, 143, 6, 21, 166, 161, 24, 113, 37, 169, 18, 79, 54, 114, 24, 54, 227, 151, 210, + 144, 113, 22, 82, 94, 170, 116, 41, 43, 148, 38, 95, 235, 158, 100, 121, 1, 146, 220, 194, 59, 229, 130, + 70, 19, 146, 34, 33, 190, 5, 31, 250, 142, 68, 79, 9, 126, 105, 225, 3, 112, 4, 203, 84, 123, 182, + 189, 82, 127, 207, 247, 106, 211, 130, 222, 241, 105, 216, 75, 25, 147, 121, 206, 124, 146, 35, 134, 128, 191, + 29, 105, 161, 121, 75, 67, 7, 99, 232, 216, 194, 211, 218, 144, 154, 197, 157, 204, 242, 128, 221, 121, 87, + 102, 86, 27, 203, 18, 65, 101, 129, 208, 254, 216, 181, 55, 55, 43, 81, 36, 85, 23, 211, 57, 130, 18, + 78, 103, 190, 171, 208, 198, 0, 166, 252, 152, 220, 141, 55, 141, 226, 157, 214, 139, 104, 250, 49, 2, 109, + 62, 31, 182, 112, 223, 182, 166, 151, 174, 153, 97, 194, 123, 121, 34, 13, 28, 218, 32, 83, 217, 182, 12, + 160, 95, 85, 169, 216, 128, 188, 251, 203, 222, 223, 99, 241, 243, 173, 106, 216, 158, 134, 69, 157, 71, 217, + 40, 217, 144, 7, 2, 22, 51, 77, 95, 33, 204, 178, 218, 113, 171, 37, 160, 185, 190, 40, 179, 126, 123, + 48, 127, 12, 179, 168, 247, 24, 79, 70, 18, 197, 65, 51, 138, 225, 104, 124, 213, 83, 95, 145, 80, 210, + 250, 41, 101, 82, 211, 99, 118, 53, 87, 98, 254, 215, 75, 194, 93, 96, 180, 51, 142, 109, 152, 42, 199, + 34, 233, 125, 219, 200, 234, 20, 228, 58, 18, 165, 161, 218, 100, 74, 20, 177, 72, 74, 3, 39, 74, 68, + 90, 188, 93, 217, 55, 61, 129, 102, 228, 192, 27, 150, 234, 138, 250, 168, 59, 179, 82, 241, 43, 184, 253, + 164, 121, 247, 78, 134, 104, 162, 222, 27, 84, 81, 135, 100, 238, 168, 50, 42, 207, 136, 206, 216, 115, 220, + 143, 15, 74, 130, 16, 33, 216, 36, 182, 211, 100, 187, 33, 103, 72, 67, 167, 90, 226, 121, 159, 243, 163, + 10, 230, 192, 219, 210, 196, 202, 64, 163, 7, 169, 215, 169, 85, 19, 50, 14, 93, 182, 71, 218, 154, 25, + 134, 248, 196, 77, 156, 154, 155, 113, 199, 76, 39, 162, 180, 130, 105, 249, 33, 155, 8, 172, 200, 64, 198, + 50, 182, 240, 26, 214, 7, 104, 103, 199, 49, 98, 183, 177, 135, 116, 136, 11, 172, 197, 163, 147, 229, 165, + 151, 180, 248, 132, 250, 176, 174, 15, 73, 230, 159, 141, 29, 167, 82, 140, 61, 76, 221, 167, 177, 32, 32, + 104, 5, 94, 201, 36, 154, 123, 19, 99, 63, 226, 105, 108, 35, 215, 33, 155, 159, 181, 182, 10, 105, 112, + 81, 185, 108, 177, 94, 104, 71, 173, 118, 170, 118, 34, 131, 51, 148, 16, 113, 30, 147, 56, 200, 167, 7, + 227, 236, 235, 166, 124, 178, 48, 242, 236, 188, 109, 144, 35, 30, 68, 3, 53, 103, 58, 67, 135, 167, 219, + 138, 45, 205, 104, 9, 196, 53, 24, 16, 180, 191, 2, 208, 76, 57, 60, 24, 197, 54, 190, 31, 91, 23, + 169, 47, 135, 108, 21, 107, 169, 3, 137, 216, 129, 112, 42, 124, 229, 128, 48, 245, 112, 78, 167, 213, 133, + 229, 9, 98, 156, 243, 184, 54, 251, 37, 45, 64, 33, 189, 137, 49, 108, 241, 51, 169, 72, 123, 202, 89, + 174, 168, 238, 1, 219, 241, 231, 247, 219, 60, 75, 34, 106, 219, 126, 221, 58, 155, 223, 235, 46, 238, 160, + 153, 214, 146, 46, 243, 19, 59, 244, 253, 225, 190, 176, 135, 28, 176, 91, 226, 68, 69, 100, 125, 154, 66, + 98, 230, 206, 199, 119, 229, 176, 218, 176, 32, 8, 72, 19, 151, 21, 167, 122, 10, 51, 227, 98, 34, 214, + 139, 211, 5, 92, 123, 133, 116, 152, 127, 206, 32, 43, 195, 41, 57, 151, 112, 252, 250, 171, 151, 227, 195, + 245, 145, 136, 63, 254, 93, 59, 124, 197, 132, 202, 252, 161, 77, 220, 106, 249, 48, 88, 199, 10, 100, 233, + 204, 210, 105, 196, 210, 240, 133, 83, 112, 17, 33, 53, 211, 183, 204, 105, 192, 51, 27, 9, 93, 249, 156, + 177, 155, 218, 174, 36, 140, 153, 151, 81, 79, 99, 176, 183, 10, 126, 173, 136, 180, 164, 244, 136, 89, 16, + 90, 164, 175, 236, 142, 125, 87, 54, 38, 19, 244, 221, 141, 100, 216, 69, 36, 207, 139, 162, 184, 0, 61, + 67, 223, 231, 144, 138, 59, 121, 21, 145, 134, 109, 120, 18, 114, 242, 85, 185, 34, 241, 104, 230, 219, 47, + 251, 64, 247, 80, 106, 38, 200, 1, 13, 93, 235, 86, 21, 241, 79, 19, 247, 184, 11, 10, 243, 180, 47, + 37, 67, 92, 31, 199, 202, 245, 156, 238, 245, 206, 54, 95, 50, 166, 6, 119, 186, 172, 248, 91, 225, 165, + 51, 52, 35, 214, 88, 81, 44, 251, 18, 203, 50, 242, 89, 196, 220, 137, 60, 154, 41, 249, 104, 115, 99, + 250, 113, 100, 85, 22, 195, 202, 194, 47, 71, 50, 106, 101, 83, 174, 217, 91, 16, 73, 25, 26, 106, 202, + 240, 237, 244, 141, 119, 14, 32, 79, 80, 173, 107, 64, 207, 93, 188, 210, 5, 2, 185, 107, 203, 68, 113, + 78, 167, 222, 67, 220, 244, 111, 152, 131, 69, 182, 20, 249, 222, 148, 175, 102, 16, 70, 236, 68, 1, 160, + 111, 67, 135, 75, 172, 193, 225, 133, 45, 124, 137, 151, 197, 25, 12, 73, 11, 125, 67, 58, 141, 245, 37, + 69, 112, 58, 37, 29, 134, 178, 51, 239, 140, 145, 64, 196, 88, 193, 122, 196, 90, 80, 141, 185, 35, 19, + 55, 68, 104, 204, 160, 151, 114, 43, 189, 162, 122, 241, 142, 150, 46, 95, 180, 19, 134, 42, 219, 127, 146, + 211, 16, 251, 171, 83, 152, 63, 97, 174, 81, 53, 240, 36, 26, 119, 201, 254, 64, 48, 188, 50, 203, 85, + 179, 172, 172, 242, 21, 66, 53, 155, 191, 157, 12, 106, 231, 130, 1, 246, 203, 116, 134, 234, 222, 77, 102, + 59, 211, 176, 111, 228, 202, 165, 219, 30, 230, 191, 81, 162, 189, 80, 158, 142, 150, 194, 2, 156, 97, 189, + 6, 226, 78, 29, 167, 38, 232, 188, 239, 158, 212, 209, 182, 25, 23, 138, 198, 83, 117, 160, 245, 92, 47, + 164, 16, 163, 186, 194, 137, 203, 99, 144, 185, 233, 113, 99, 120, 23, 53, 221, 245, 179, 213, 209, 119, 99, + 140, 120, 156, 59, 124, 19, 56, 81, 13, 234, 113, 147, 192, 208, 114, 243, 146, 133, 157, 143, 7, 206, 230, + 253, 49, 203, 104, 201, 248, 49, 229, 14, 81, 125, 78, 54, 49, 55, 82, 215, 106, 97, 147, 83, 118, 31, + 142, 19, 189, 206, 18, 142, 73, 56, 39, 41, 223, 179, 43, 239, 168, 153, 200, 54, 39, 113, 108, 2, 27, + 134, 93, 75, 187, 246, 100, 114, 80, 3, 32, 203, 239, 95, 125, 92, 207, 72, 249, 137, 171, 150, 98, 175, + 185, 191, 227, 158, 44, 169, 42, 2, 249, 108, 164, 154, 70, 143, 113, 41, 20, 22, 222, 114, 85, 216, 130, + 112, 183, 130, 187, 93, 114, 224, 175, 93, 137, 85, 253, 85, 9, 20, 84, 248, 120, 185, 210, 149, 5, 189, + 174, 228, 93, 21, 137, 202, 74, 13, 161, 200, 11, 79, 207, 74, 83, 227, 140, 254, 138, 162, 175, 120, 244, + 242, 64, 54, 61, 247, 232, 241, 139, 37, 198, 4, 95, 235, 104, 160, 207, 100, 50, 221, 127, 149, 125, 52, + 212, 100, 94, 20, 50, 167, 228, 52, 47, 56, 251, 133, 229, 41, 16, 152, 149, 199, 108, 239, 248, 136, 115, + 242, 74, 173, 45, 17, 178, 85, 50, 161, 183, 208, 42, 211, 174, 12, 150, 10, 6, 126, 27, 25, 126, 165, + 80, 24, 54, 29, 4, 88, 193, 219, 233, 77, 183, 73, 155, 55, 11, 65, 103, 208, 227, 194, 78, 231, 250, + 254, 30, 93, 238, 178, 164, 167, 124, 62, 206, 154, 36, 213, 5, 149, 247, 122, 3, 164, 217, 112, 68, 111, + 46, 127, 60, 217, 169, 164, 130, 154, 19, 2, 160, 175, 214, 55, 170, 73, 142, 57, 206, 9, 198, 77, 80, + 158, 142, 179, 10, 203, 74, 228, 104, 75, 149, 177, 105, 30, 60, 248, 131, 59, 80, 177, 213, 24, 238, 148, + 98, 23, 126, 128, 236, 50, 131, 254, 230, 160, 162, 160, 86, 236, 131, 242, 165, 58, 98, 32, 243, 34, 40, + 226, 46, 192, 130, 246, 137, 229, 117, 29, 233, 11, 105, 160, 1, 201, 55, 173, 169, 87, 211, 121, 43, 71, + 123, 243, 47, 121, 242, 211, 194, 194, 176, 7, 153, 52, 221, 246, 152, 206, 252, 155, 239, 216, 236, 53, 156, + 188, 244, 95, 104, 2, 229, 65, 139, 26, 230, 149, 118, 150, 253, 130, 156, 23, 189, 34, 12, 23, 221, 2, + 75, 216, 151, 5, 80, 198, 104, 197, 219, 71, 29, 103, 140, 241, 125, 254, 76, 34, 69, 31, 107, 235, 71, + 195, 144, 150, 14, 217, 163, 45, 41, 19, 171, 204, 149, 171, 85, 124, 57, 119, 96, 235, 2, 242, 254, 53, + 180, 91, 145, 40, 38, 228, 189, 175, 217, 184, 15, 38, 222, 169, 100, 68, 11, 175, 91, 15, 175, 9, 51, + 199, 207, 40, 244, 179, 53, 193, 53, 50, 232, 127, 88, 1, 120, 177, 228, 107, 43, 188, 216, 5, 201, 243, + 29, 190, 140, 10, 178, 30, 221, 169, 190, 204, 204, 56, 213, 5, 83, 147, 97, 61, 7, 151, 182, 224, 142, + 215, 7, 187, 184, 22, 142, 170, 236, 70, 252, 223, 234, 239, 16, 228, 149, 196, 44, 124, 83, 42, 251, 209, + 47, 152, 184, 33, 78, 185, 200, 148, 116, 42, 52, 222, 72, 99, 25, 13, 206, 135, 244, 19, 131, 114, 150, + 178, 103, 162, 76, 183, 89, 221, 252, 139, 165, 80, 250, 201, 190, 181, 189, 249, 224, 170, 100, 184, 242, 215, + 149, 205, 192, 89, 108, 61, 57, 216, 225, 188, 63, 160, 159, 128, 170, 23, 43, 110, 75, 30, 232, 194, 75, + 87, 80, 88, 175, 33, 69, 48, 215, 143, 106, 192, 179, 113, 151, 203, 112, 170, 241, 199, 250, 16, 220, 143, + 179, 200, 224, 83, 226, 43, 254, 151, 105, 170, 215, 175, 84, 99, 32, 220, 181, 61, 236, 108, 234, 70, 181, + 58, 119, 146, 10, 168, 201, 84, 16, 84, 74, 70, 55, 110, 114, 129, 23, 2, 229, 252, 243, 48, 98, 26, + 33, 55, 148, 145, 46, 40, 127, 119, 34, 122, 245, 183, 213, 208, 221, 237, 113, 189, 175, 223, 41, 220, 53, + 129, 148, 98, 9, 161, 63, 71, 26, 167, 124, 113, 58, 177, 155, 126, 207, 89, 125, 110, 80, 236, 5, 187, + 179, 216, 187, 176, 187, 69, 39, 118, 121, 0, 149, 216, 169, 251, 51, 19, 6, 136, 139, 245, 210, 148, 149, + 52, 30, 94, 229, 144, 85, 206, 117, 56, 232, 252, 176, 149, 148, 25, 6, 70, 142, 118, 190, 180, 15, 7, + 166, 239, 203, 216, 247, 108, 227, 100, 210, 18, 54, 80, 44, 97, 202, 36, 196, 93, 140, 153, 58, 248, 110, + 70, 32, 238, 246, 53, 15, 223, 75, 93, 38, 194, 65, 172, 251, 133, 229, 113, 180, 139, 232, 157, 243, 123, + 140, 154, 112, 98, 26, 17, 155, 20, 199, 222, 183, 106, 214, 175, 167, 148, 158, 131, 155, 11, 225, 126, 55, + 132, 247, 79, 88, 36, 8, 150, 0, 160, 100, 31, 51, 175, 191, 253, 207, 127, 47, 3, 208, 38, 29, 76, + 238, 137, 75, 10, 43, 107, 167, 222, 167, 53, 15, 160, 201, 156, 108, 95, 236, 2, 193, 219, 178, 35, 168, + 48, 144, 68, 19, 102, 235, 220, 17, 59, 237, 57, 112, 222, 150, 149, 68, 11, 157, 240, 7, 123, 235, 164, + 139, 33, 5, 128, 59, 219, 54, 43, 38, 239, 139, 142, 247, 46, 47, 241, 44, 229, 72, 62, 192, 25, 154, + 111, 49, 97, 225, 5, 95, 234, 62, 10, 185, 93, 244, 20, 76, 64, 36, 247, 180, 254, 176, 205, 80, 209, + 156, 206, 26, 6, 34, 100, 57, 66, 245, 236, 149, 66, 224, 182, 91, 132, 173, 129, 180, 235, 13, 33, 107, + 180, 240, 117, 139, 106, 94, 111, 86, 190, 102, 92, 12, 194, 53, 96, 60, 165, 158, 0, 12, 75, 191, 198, + 37, 162, 236, 212, 162, 102, 233, 16, 194, 223, 168, 217, 98, 191, 171, 204, 46, 20, 101, 164, 153, 32, 239, + 85, 118, 2, 162, 200, 204, 28, 113, 111, 160, 97, 252, 33, 213, 185, 105, 73, 191, 48, 182, 92, 146, 101, + 47, 54, 52, 165, 94, 180, 55, 100, 36, 91, 10, 111, 239, 232, 65, 175, 86, 109, 84, 211, 241, 119, 29, + 254, 12, 134, 86, 99, 156, 253, 69, 106, 156, 10, 119, 187, 207, 38, 223, 187, 129, 114, 176, 198, 10, 87, + 211, 168, 44, 170, 148, 110, 40, 211, 65, 46, 215, 200, 160, 165, 225, 225, 60, 99, 145, 86, 201, 225, 126, + 110, 54, 66, 227, 220, 128, 60, 90, 182, 251, 19, 123, 146, 162, 160, 51, 95, 82, 113, 220, 229, 52, 216, + 169, 132, 89, 215, 4, 51, 224, 244, 5, 141, 165, 25, 192, 204, 18, 239, 122, 239, 47, 46, 248, 30, 143, + 83, 49, 32, 248, 1, 23, 45, 192, 88, 153, 133, 227, 9, 234, 47, 124, 92, 33, 248, 176, 242, 107, 223, + 230, 57, 19, 30, 246, 212, 137, 107, 37, 32, 192, 51, 234, 85, 236, 63, 184, 72, 203, 120, 171, 252, 57, + 76, 72, 248, 58, 198, 157, 213, 106, 73, 236, 3, 152, 60, 59, 27, 79, 92, 127, 84, 4, 90, 102, 196, + 128, 210, 166, 38, 217, 173, 238, 92, 222, 252, 206, 103, 99, 101, 230, 233, 142, 45, 88, 66, 113, 2, 73, + 166, 135, 123, 31, 7, 187, 89, 141, 18, 174, 6, 210, 145, 117, 242, 187, 151, 85, 197, 103, 84, 94, 222, + 124, 199, 29, 84, 88, 243, 178, 74, 28, 97, 191, 120, 6, 34, 164, 138, 92, 221, 219, 167, 147, 82, 192, + 50, 86, 154, 95, 44, 152, 158, 140, 130, 3, 18, 128, 25, 153, 181, 155, 175, 246, 45, 27, 2, 43, 221, + 140, 240, 230, 98, 193, 0, 114, 79, 103, 86, 182, 28, 56, 36, 216, 166, 43, 239, 118, 101, 231, 188, 244, + 68, 129, 191, 121, 242, 228, 89, 145, 242, 213, 28, 100, 75, 118, 23, 175, 189, 90, 69, 11, 243, 20, 97, + 242, 236, 188, 63, 58, 110, 53, 45, 182, 96, 68, 145, 152, 146, 71, 96, 245, 227, 155, 156, 145, 127, 201, + 68, 1, 183, 129, 29, 199, 99, 54, 120, 3, 195, 198, 62, 210, 125, 192, 212, 7, 127, 10, 29, 106, 181, + 65, 64, 71, 62, 105, 50, 42, 129, 33, 95, 68, 16, 73, 155, 12, 131, 126, 108, 0, 13, 247, 87, 75, + 35, 248, 173, 175, 120, 218, 185, 189, 117, 84, 215, 100, 165, 157, 134, 196, 20, 144, 51, 139, 72, 45, 173, + 34, 84, 225, 138, 101, 65, 34, 120, 212, 68, 85, 26, 165, 62, 190, 40, 1, 5, 248, 224, 2, 188, 207, + 185, 226, 164, 96, 228, 179, 90, 170, 219, 108, 174, 146, 128, 137, 191, 76, 112, 97, 203, 6, 174, 241, 227, + 126, 60, 28, 119, 125, 102, 179, 234, 133, 124, 167, 9, 27, 87, 158, 170, 76, 125, 191, 129, 166, 84, 228, + 1, 247, 2, 111, 1, 107, 14, 119, 182, 85, 151, 9, 152, 88, 5, 120, 158, 26, 183, 151, 170, 120, 32, + 23, 19, 158, 124, 181, 125, 175, 176, 31, 135, 162, 4, 116, 101, 43, 84, 54, 52, 75, 247, 127, 66, 132, + 35, 77, 110, 190, 63, 150, 6, 117, 249, 131, 130, 194, 164, 27, 92, 252, 99, 206, 76, 181, 250, 216, 217, + 65, 187, 253, 207, 130, 193, 69, 12, 3, 147, 80, 46, 73, 251, 179, 100, 16, 30, 88, 59, 114, 20, 160, + 63, 219, 1, 28, 133, 37, 42, 25, 212, 71, 243, 37, 224, 177, 217, 156, 33, 3, 83, 37, 68, 73, 194, + 182, 168, 141, 57, 24, 46, 10, 144, 37, 235, 28, 229, 145, 105, 24, 157, 62, 120, 27, 230, 37, 237, 149, + 204, 66, 32, 230, 75, 111, 219, 61, 85, 102, 176, 232, 247, 11, 80, 56, 66, 187, 115, 161, 170, 210, 25, + 51, 188, 200, 224, 206, 196, 24, 21, 109, 152, 159, 187, 170, 29, 188, 87, 40, 33, 104, 195, 45, 90, 176, + 55, 97, 56, 32, 20, 70, 236, 41, 16, 228, 108, 215, 232, 89, 157, 87, 209, 206, 216, 136, 26, 34, 75, + 49, 115, 194, 71, 213, 186, 48, 193, 227, 253, 146, 169, 53, 52, 134, 195, 43, 10, 47, 34, 132, 212, 145, + 199, 123, 50, 122, 140, 131, 208, 137, 80, 86, 65, 217, 117, 204, 165, 53, 61, 162, 184, 245, 183, 98, 148, + 196, 103, 7, 171, 44, 30, 57, 54, 134, 150, 201, 29, 197, 4, 17, 149, 130, 31, 11, 247, 97, 213, 65, + 145, 26, 4, 179, 187, 36, 171, 40, 164, 192, 77, 40, 75, 151, 205, 26, 31, 242, 208, 210, 147, 238, 223, + 246, 170, 252, 118, 97, 2, 52, 188, 35, 216, 28, 76, 250, 54, 79, 52, 227, 187, 178, 92, 246, 213, 159, + 215, 3, 217, 179, 132, 51, 124, 79, 196, 53, 228, 212, 80, 50, 115, 63, 107, 83, 53, 160, 176, 118, 247, + 126, 163, 98, 4, 26, 167, 127, 18, 157, 35, 107, 173, 64, 186, 201, 251, 64, 219, 189, 52, 76, 71, 47, + 47, 69, 153, 118, 147, 115, 110, 30, 151, 155, 68, 78, 131, 75, 134, 157, 72, 180, 254, 220, 150, 9, 100, + 124, 68, 178, 23, 15, 187, 84, 32, 149, 23, 159, 64, 180, 122, 230, 94, 237, 11, 107, 167, 100, 22, 96, + 25, 145, 68, 47, 142, 71, 8, 20, 194, 35, 165, 229, 47, 133, 87, 156, 54, 127, 175, 62, 246, 61, 216, + 11, 206, 101, 244, 233, 84, 146, 182, 67, 88, 5, 159, 209, 15, 165, 193, 103, 195, 144, 186, 150, 82, 132, + 223, 48, 145, 103, 44, 13, 7, 210, 128, 97, 128, 123, 246, 157, 59, 31, 246, 85, 85, 242, 78, 81, 92, + 240, 18, 207, 192, 53, 157, 31, 209, 133, 25, 212, 114, 94, 70, 150, 102, 150, 182, 37, 214, 64, 218, 224, + 124, 166, 132, 56, 66, 80, 189, 100, 171, 19, 89, 113, 48, 220, 49, 91, 216, 159, 57, 51, 102, 249, 169, + 207, 29, 253, 208, 198, 110, 101, 158, 190, 87, 100, 213, 35, 55, 205, 100, 157, 137, 77, 128, 254, 61, 184, + 9, 129, 145, 161, 227, 209, 198, 14, 119, 172, 250, 78, 84, 163, 157, 227, 123, 206, 150, 155, 2, 84, 7, + 180, 224, 132, 134, 40, 157, 154, 215, 80, 231, 118, 114, 110, 12, 101, 4, 222, 85, 46, 156, 181, 30, 126, + 136, 5, 91, 185, 93, 166, 33, 38, 44, 225, 144, 21, 30, 244, 172, 72, 229, 52, 35, 74, 162, 46, 61, + 47, 93, 24, 241, 60, 253, 190, 104, 11, 205, 106, 115, 74, 19, 254, 139, 229, 191, 15, 119, 211, 91, 126, + 77, 6, 84, 180, 157, 169, 73, 147, 154, 201, 132, 65, 118, 185, 156, 154, 6, 176, 67, 87, 128, 75, 244, + 78, 181, 43, 29, 72, 49, 217, 137, 132, 158, 11, 179, 103, 170, 92, 179, 231, 61, 209, 191, 83, 138, 66, + 209, 129, 31, 254, 185, 41, 36, 239, 230, 97, 83, 199, 115, 243, 20, 230, 194, 102, 103, 254, 202, 214, 253, + 85, 32, 17, 85, 211, 149, 96, 88, 37, 12, 154, 209, 176, 73, 53, 153, 132, 199, 18, 5, 80, 58, 48, + 139, 85, 212, 192, 138, 209, 88, 62, 250, 242, 46, 230, 90, 196, 36, 23, 214, 55, 157, 249, 220, 121, 162, + 4, 133, 22, 40, 180, 111, 157, 87, 200, 5, 167, 159, 63, 190, 176, 172, 65, 135, 220, 119, 78, 94, 151, + 62, 58, 161, 104, 200, 84, 191, 254, 43, 190, 92, 19, 101, 22, 25, 182, 22, 123, 54, 191, 246, 195, 139, + 246, 173, 150, 56, 147, 222, 149, 83, 181, 209, 199, 27, 117, 95, 151, 163, 167, 253, 64, 187, 240, 145, 193, + 153, 88, 94, 31, 211, 228, 154, 207, 219, 243, 213, 212, 138, 237, 216, 13, 82, 208, 113, 249, 30, 44, 251, + 239, 110, 40, 166, 96, 191, 78, 120, 96, 242, 212, 55, 13, 70, 160, 15, 71, 150, 122, 132, 111, 142, 43, + 97, 93, 41, 120, 222, 41, 171, 64, 178, 2, 37, 79, 77, 82, 108, 106, 181, 223, 57, 203, 248, 232, 138, + 76, 97, 69, 172, 98, 191, 227, 112, 58, 175, 13, 162, 147, 103, 164, 219, 93, 31, 127, 22, 65, 27, 143, + 142, 157, 106, 249, 239, 4, 145, 135, 174, 181, 132, 184, 125, 117, 193, 87, 198, 219, 241, 13, 136, 149, 11, + 221, 102, 72, 245, 34, 112, 230, 240, 66, 85, 180, 143, 147, 142, 152, 3, 78, 16, 52, 90, 144, 41, 229, + 238, 74, 79, 15, 58, 137, 251, 101, 93, 43, 4, 32, 170, 31, 156, 47, 249, 40, 231, 78, 195, 138, 139, + 138, 249, 119, 183, 118, 197, 167, 248, 229, 118, 251, 199, 203, 180, 209, 17, 227, 166, 215, 214, 20, 213, 254, + 54, 181, 83, 8, 18, 243, 42, 93, 167, 246, 113, 246, 67, 215, 36, 94, 235, 75, 113, 205, 148, 107, 92, + 148, 11, 38, 20, 186, 247, 237, 188, 234, 241, 176, 154, 44, 140, 115, 251, 55, 253, 151, 136, 175, 219, 71, + 250, 17, 248, 129, 47, 208, 68, 191, 130, 22, 57, 192, 226, 88, 253, 248, 48, 253, 47, 198, 234, 78, 210, + 106, 27, 127, 245, 113, 103, 192, 48, 48, 65, 191, 41, 36, 131, 191, 109, 29, 143, 187, 82, 84, 123, 165, + 65, 125, 122, 151, 81, 253, 14, 42, 68, 159, 234, 153, 196, 53, 154, 221, 123, 4, 19, 239, 253, 87, 249, + 123, 219, 67, 124, 19, 196, 88, 197, 37, 82, 115, 48, 72, 68, 19, 41, 118, 182, 21, 36, 55, 173, 157, + 100, 168, 98, 115, 119, 34, 66, 221, 181, 213, 58, 185, 203, 196, 68, 245, 21, 29, 154, 212, 162, 134, 175, + 66, 91, 170, 153, 147, 200, 176, 169, 142, 213, 168, 149, 133, 120, 114, 98, 32, 254, 102, 202, 226, 245, 129, + 72, 219, 153, 49, 172, 81, 43, 73, 184, 234, 8, 251, 121, 54, 16, 165, 241, 132, 50, 201, 223, 249, 150, + 193, 52, 131, 161, 128, 8, 183, 8, 186, 173, 87, 73, 117, 92, 13, 221, 31, 52, 123, 247, 165, 209, 35, + 232, 162, 227, 1, 66, 225, 82, 205, 185, 139, 211, 160, 46, 65, 179, 41, 216, 29, 162, 182, 134, 130, 28, + 65, 22, 80, 163, 137, 237, 217, 46, 251, 119, 30, 1, 96, 8, 225, 36, 143, 226, 41, 211, 159, 234, 152, + 89, 221, 181, 204, 145, 138, 233, 166, 83, 7, 161, 248, 239, 134, 115, 2, 31, 165, 225, 153, 234, 129, 64, + 13, 65, 75, 145, 57, 4, 113, 243, 131, 144, 99, 203, 13, 5, 182, 142, 93, 117, 30, 171, 55, 109, 56, + 136, 248, 156, 93, 241, 117, 16, 196, 139, 110, 43, 6, 111, 16, 209, 209, 59, 2, 208, 149, 56, 169, 3, + 84, 203, 152, 18, 233, 21, 98, 90, 82, 177, 226, 5, 212, 237, 196, 122, 112, 204, 244, 186, 115, 65, 194, + 112, 55, 141, 7, 147, 176, 164, 102, 138, 53, 233, 37, 46, 143, 142, 87, 208, 205, 35, 154, 187, 117, 81, + 42, 69, 82, 64, 207, 243, 219, 89, 141, 49, 215, 89, 222, 120, 41, 134, 23, 146, 154, 226, 9, 196, 51, + 211, 79, 33, 174, 66, 51, 34, 187, 66, 191, 204, 29, 239, 246, 238, 60, 234, 217, 170, 119, 221, 138, 7, + 222, 88, 122, 195, 111, 68, 209, 22, 202, 243, 105, 96, 148, 12, 187, 33, 30, 140, 244, 52, 204, 177, 179, + 9, 203, 118, 245, 193, 146, 18, 173, 188, 84, 207, 96, 16, 84, 0, 101, 172, 148, 102, 199, 213, 21, 238, + 138, 19, 157, 104, 95, 24, 36, 246, 41, 156, 179, 225, 233, 227, 148, 66, 53, 254, 141, 140, 23, 191, 9, + 49, 10, 67, 55, 57, 227, 213, 212, 98, 67, 207, 234, 36, 73, 180, 166, 94, 230, 175, 166, 58, 196, 234, + 182, 131, 66, 36, 108, 91, 251, 6, 39, 33, 209, 187, 199, 182, 41, 155, 65, 250, 153, 61, 33, 112, 42, + 201, 142, 148, 96, 61, 96, 160, 42, 233, 195, 115, 14, 207, 175, 197, 232, 112, 124, 41, 190, 232, 56, 104, + 164, 75, 68, 249, 144, 184, 237, 56, 129, 87, 124, 236, 63, 101, 132, 47, 148, 252, 78, 121, 0, 7, 81, + 192, 216, 67, 136, 212, 125, 191, 167, 167, 159, 233, 160, 112, 40, 14, 62, 48, 100, 104, 129, 48, 241, 152, + 67, 58, 135, 25, 99, 88, 254, 113, 89, 126, 211, 208, 15, 249, 245, 250, 233, 7, 129, 91, 157, 225, 63, + 137, 248, 196, 66, 120, 69, 73, 203, 209, 87, 163, 120, 111, 85, 68, 18, 47, 187, 148, 151, 226, 41, 99, + 56, 154, 58, 238, 24, 7, 22, 173, 133, 162, 246, 216, 112, 252, 199, 58, 224, 184, 23, 115, 209, 232, 244, + 218, 74, 63, 117, 69, 196, 198, 210, 52, 199, 150, 62, 236, 66, 47, 161, 94, 172, 4, 110, 179, 41, 172, + 139, 72, 246, 177, 237, 253, 8, 50, 135, 20, 93, 25, 21, 101, 160, 145, 159, 74, 99, 69, 132, 248, 46, + 182, 20, 199, 160, 210, 6, 45, 49, 178, 124, 128, 249, 192, 94, 121, 186, 76, 15, 149, 103, 42, 130, 247, + 242, 3, 97, 171, 32, 157, 56, 112, 202, 116, 124, 175, 49, 49, 13, 146, 94, 109, 61, 14, 186, 161, 145, + 66, 80, 41, 57, 133, 218, 79, 58, 214, 219, 170, 143, 106, 98, 82, 164, 229, 187, 121, 170, 147, 28, 126, + 49, 134, 16, 121, 104, 213, 207, 55, 31, 39, 165, 103, 61, 67, 209, 4, 15, 24, 72, 155, 150, 105, 149, + 141, 253, 245, 124, 137, 126, 219, 28, 27, 131, 66, 242, 215, 90, 51, 159, 134, 136, 223, 66, 241, 136, 2, + 25, 235, 176, 83, 13, 210, 136, 4, 233, 191, 162, 15, 101, 231, 113, 56, 155, 4, 128, 40, 0, 132, 4, + 195, 148, 210, 245, 69, 22, 5, 156, 62, 43, 254, 39, 236, 55, 178, 211, 26, 61, 95, 249, 158, 98, 58, + 126, 178, 45, 20, 4, 153, 216, 85, 218, 125, 2, 219, 11, 33, 202, 145, 46, 192, 201, 249, 197, 22, 231, + 132, 73, 181, 153, 253, 64, 209, 137, 253, 174, 100, 228, 157, 28, 32, 78, 145, 1, 188, 251, 147, 2, 105, + 174, 239, 64, 244, 189, 118, 147, 55, 37, 82, 130, 241, 200, 35, 167, 90, 181, 232, 171, 177, 5, 223, 204, + 175, 130, 121, 27, 202, 124, 20, 15, 18, 233, 247, 32, 8, 218, 100, 128, 235, 202, 196, 0, 6, 202, 97, + 14, 85, 247, 199, 155, 8, 99, 3, 1, 239, 18, 200, 131, 130, 114, 145, 111, 96, 53, 201, 164, 168, 87, + 67, 209, 194, 45, 237, 41, 65, 94, 48, 187, 239, 126, 181, 121, 87, 63, 143, 176, 50, 131, 15, 11, 132, + 242, 29, 126, 198, 51, 16, 79, 89, 207, 228, 220, 76, 97, 8, 237, 197, 192, 205, 89, 201, 224, 143, 44, + 50, 135, 189, 226, 212, 1, 252, 71, 204, 33, 24, 234, 46, 125, 85, 205, 192, 106, 241, 43, 198, 246, 55, + 193, 238, 227, 185, 103, 104, 145, 139, 224, 113, 133, 25, 136, 143, 93, 146, 44, 96, 150, 145, 202, 102, 135, + 32, 215, 241, 150, 139, 34, 202, 208, 10, 22, 82, 249, 79, 99, 189, 174, 240, 36, 254, 8, 142, 86, 148, + 159, 197, 235, 34, 146, 99, 94, 242, 45, 195, 178, 23, 164, 245, 32, 52, 46, 244, 233, 217, 79, 87, 159, + 48, 138, 69, 34, 93, 1, 171, 191, 56, 47, 90, 26, 215, 139, 248, 194, 138, 99, 138, 197, 145, 102, 230, + 180, 96, 115, 32, 156, 204, 162, 180, 82, 191, 5, 92, 199, 107, 79, 253, 207, 37, 46, 160, 158, 28, 136, + 240, 145, 103, 52, 186, 250, 170, 142, 123, 8, 101, 182, 242, 41, 29, 206, 115, 196, 62, 163, 29, 191, 58, + 19, 157, 99, 239, 228, 138, 33, 233, 177, 189, 137, 60, 6, 228, 52, 139, 213, 125, 36, 47, 165, 175, 168, + 111, 215, 212, 92, 94, 26, 195, 247, 83, 154, 66, 237, 232, 156, 250, 192, 162, 156, 113, 23, 10, 219, 72, + 195, 148, 19, 0, 56, 27, 78, 147, 62, 141, 184, 253, 244, 129, 60, 218, 69, 175, 229, 72, 199, 198, 234, + 37, 137, 38, 20, 153, 46, 216, 145, 51, 164, 141, 43, 219, 11, 151, 40, 242, 225, 116, 227, 110, 192, 132, + 97, 39, 120, 222, 133, 187, 122, 136, 67, 206, 92, 226, 184, 132, 75, 88, 194, 235, 13, 8, 10, 46, 87, + 54, 101, 36, 249, 89, 4, 164, 89, 106, 167, 141, 125, 159, 7, 14, 221, 42, 36, 60, 51, 20, 148, 145, + 2, 80, 243, 99, 60, 73, 220, 146, 217, 130, 167, 230, 235, 170, 49, 11, 169, 39, 214, 242, 44, 202, 32, + 117, 126, 20, 3, 155, 147, 142, 21, 210, 35, 192, 151, 226, 199, 14, 72, 84, 219, 49, 133, 122, 93, 186, + 38, 77, 32, 242, 12, 154, 188, 182, 215, 52, 10, 92, 149, 115, 79, 13, 131, 24, 110, 158, 76, 110, 98, + 126, 85, 241, 130, 27, 71, 40, 60, 112, 18, 201, 63, 215, 235, 42, 190, 155, 35, 204, 114, 138, 199, 175, + 169, 183, 0, 231, 193, 171, 207, 217, 58, 158, 254, 113, 32, 208, 106, 31, 223, 64, 1, 81, 16, 89, 161, + 51, 212, 158, 20, 179, 95, 248, 28, 189, 89, 89, 83, 71, 191, 186, 233, 157, 18, 171, 160, 49, 74, 145, + 155, 194, 202, 195, 86, 45, 60, 183, 156, 22, 145, 188, 182, 202, 176, 160, 78, 138, 124, 119, 25, 252, 79, + 16, 113, 97, 248, 79, 84, 60, 107, 47, 75, 197, 187, 138, 1, 2, 114, 184, 226, 10, 166, 88, 71, 93, + 10, 119, 15, 129, 151, 119, 50, 62, 142, 134, 211, 108, 0, 93, 254, 235, 178, 65, 108, 138, 184, 67, 124, + 250, 149, 45, 228, 168, 14, 237, 39, 193, 119, 35, 14, 144, 88, 225, 208, 72, 170, 182, 197, 14, 133, 117, + 197, 247, 121, 125, 134, 149, 53, 62, 148, 169, 131, 42, 193, 169, 87, 110, 192, 38, 26, 31, 133, 198, 79, + 94, 42, 233, 51, 133, 184, 155, 47, 136, 103, 15, 252, 19, 115, 97, 16, 122, 24, 56, 50, 170, 153, 22, + 126, 149, 108, 86, 124, 111, 159, 120, 206, 57, 31, 53, 0, 136, 6, 19, 234, 222, 235, 112, 209, 29, 4, + 171, 251, 56, 27, 132, 197, 137, 55, 187, 40, 50, 90, 101, 64, 169, 74, 241, 33, 73, 172, 167, 148, 183, + 59, 141, 84, 163, 215, 19, 211, 188, 108, 218, 145, 42, 240, 207, 73, 47, 104, 146, 68, 165, 127, 3, 119, + 46, 252, 193, 104, 39, 196, 64, 73, 19, 71, 61, 16, 135, 199, 47, 138, 142, 208, 74, 242, 76, 17, 84, + 61, 208, 53, 145, 190, 32, 149, 160, 35, 161, 101, 60, 59, 69, 66, 41, 193, 147, 177, 146, 227, 236, 16, + 17, 185, 104, 17, 192, 167, 82, 6, 172, 5, 87, 16, 76, 21, 102, 144, 164, 183, 69, 37, 120, 208, 20, + 78, 41, 149, 9, 176, 83, 94, 33, 144, 7, 158, 128, 86, 24, 121, 101, 37, 98, 70, 180, 214, 31, 66, + 252, 124, 54, 194, 79, 202, 126, 245, 49, 90, 32, 204, 90, 61, 171, 170, 52, 5, 1, 121, 93, 78, 221, + 134, 75, 114, 232, 176, 168, 182, 123, 210, 230, 9, 208, 225, 121, 103, 121, 58, 151, 19, 127, 231, 81, 71, + 158, 197, 55, 194, 9, 40, 121, 206, 239, 5, 64, 55, 98, 120, 183, 187, 164, 160, 138, 213, 193, 52, 67, + 72, 6, 81, 176, 42, 192, 170, 89, 231, 240, 165, 226, 178, 196, 221, 128, 39, 10, 196, 188, 181, 64, 129, + 46, 121, 5, 14, 74, 4, 164, 168, 130, 22, 174, 162, 172, 121, 155, 180, 101, 84, 54, 94, 34, 71, 87, + 138, 210, 223, 231, 126, 76, 194, 187, 37, 118, 99, 133, 77, 218, 113, 75, 198, 130, 21, 230, 4, 231, 184, + 150, 213, 251, 3, 2, 34, 10, 72, 116, 77, 201, 2, 117, 239, 99, 211, 47, 9, 97, 53, 210, 113, 248, + 218, 109, 246, 119, 254, 94, 11, 30, 121, 169, 29, 127, 62, 74, 12, 0, 117, 104, 107, 86, 241, 180, 154, + 10, 83, 31, 140, 109, 241, 31, 131, 68, 184, 95, 16, 102, 190, 248, 32, 88, 235, 54, 249, 179, 177, 59, + 38, 55, 6, 109, 173, 91, 105, 86, 117, 60, 102, 56, 3, 17, 155, 187, 133, 245, 48, 13, 122, 205, 97, + 207, 11, 221, 5, 128, 21, 253, 132, 51, 183, 15, 201, 179, 106, 206, 196, 1, 68, 89, 82, 147, 139, 212, + 1, 121, 206, 22, 149, 27, 208, 137, 74, 139, 107, 140, 197, 111, 84, 224, 94, 179, 218, 2, 217, 77, 62, + 54, 71, 145, 245, 195, 141, 31, 127, 210, 44, 167, 202, 229, 0, 167, 215, 102, 24, 244, 189, 40, 5, 182, + 167, 26, 127, 207, 45, 122, 184, 29, 178, 92, 111, 152, 97, 72, 148, 181, 75, 138, 17, 28, 215, 155, 147, + 155, 73, 67, 86, 67, 140, 177, 185, 145, 140, 14, 181, 56, 210, 234, 57, 235, 234, 137, 36, 50, 110, 251, + 117, 211, 97, 138, 174, 189, 133, 26, 216, 155, 108, 209, 249, 52, 9, 50, 112, 150, 164, 235, 105, 144, 115, + 238, 191, 69, 108, 238, 152, 79, 22, 162, 181, 41, 222, 252, 124, 241, 125, 106, 122, 166, 36, 234, 4, 86, + 202, 2, 42, 204, 139, 16, 253, 108, 180, 210, 6, 162, 108, 48, 0, 27, 184, 200, 13, 86, 181, 52, 216, + 55, 118, 124, 228, 38, 6, 252, 20, 148, 79, 130, 179, 208, 83, 8, 8, 108, 223, 240, 101, 135, 97, 94, + 168, 160, 198, 227, 53, 251, 62, 173, 186, 247, 214, 238, 109, 235, 84, 140, 52, 206, 175, 17, 46, 148, 237, + 142, 177, 149, 80, 66, 235, 229, 152, 94, 173, 1, 254, 11, 53, 180, 130, 16, 107, 214, 1, 204, 13, 197, + 123, 6, 19, 85, 204, 25, 38, 220, 231, 213, 95, 251, 123, 172, 239, 164, 19, 130, 162, 189, 117, 214, 47, + 73, 219, 152, 168, 138, 83, 48, 67, 110, 253, 112, 219, 79, 245, 76, 178, 227, 115, 83, 222, 181, 159, 131, + 150, 82, 154, 25, 18, 190, 47, 117, 110, 189, 40, 73, 53, 70, 238, 233, 69, 203, 121, 104, 125, 21, 63, + 69, 217, 80, 36, 164, 190, 165, 138, 48, 20, 188, 82, 38, 144, 214, 226, 253, 10, 229, 49, 221, 205, 141, + 15, 182, 227, 16, 4, 80, 220, 120, 225, 90, 99, 71, 73, 67, 136, 44, 46, 137, 194, 23, 113, 252, 85, + 76, 152, 239, 152, 69, 200, 243, 56, 201, 3, 6, 191, 94, 105, 232, 89, 156, 44, 118, 104, 71, 117, 2, + 43, 219, 142, 79, 229, 88, 4, 110, 170, 139, 162, 94, 223, 60, 53, 224, 131, 108, 228, 63, 251, 156, 235, + 3, 9, 194, 108, 37, 212, 161, 172, 46, 172, 178, 172, 203, 1, 180, 219, 39, 173, 2, 198, 33, 50, 199, + 20, 214, 242, 61, 21, 138, 217, 17, 101, 67, 245, 215, 16, 91, 8, 25, 222, 222, 82, 38, 113, 232, 125, + 208, 189, 71, 11, 106, 143, 127, 138, 36, 229, 58, 74, 127, 132, 162, 33, 162, 144, 45, 95, 26, 172, 83, + 247, 65, 148, 235, 243, 220, 98, 54, 88, 3, 203, 179, 196, 77, 8, 183, 58, 64, 0, 100, 122, 183, 142, + 154, 133, 62, 105, 240, 213, 172, 60, 73, 59, 109, 64, 81, 43, 81, 238, 251, 164, 171, 103, 32, 62, 50, + 75, 126, 192, 161, 188, 4, 81, 85, 12, 14, 227, 104, 143, 130, 246, 24, 109, 123, 236, 21, 197, 14, 118, + 241, 1, 137, 162, 197, 5, 117, 121, 239, 199, 163, 52, 220, 16, 180, 72, 28, 248, 25, 168, 151, 128, 15, + 235, 249, 124, 35, 90, 24, 91, 123, 49, 4, 249, 191, 32, 197, 102, 189, 55, 241, 78, 68, 67, 213, 14, + 72, 44, 10, 183, 37, 71, 248, 54, 140, 134, 243, 237, 29, 10, 212, 21, 238, 139, 37, 24, 181, 211, 221, + 88, 184, 127, 95, 56, 35, 63, 202, 129, 247, 77, 193, 192, 86, 195, 153, 226, 95, 56, 1, 149, 226, 216, + 249, 137, 129, 225, 175, 157, 122, 206, 110, 164, 241, 54, 17, 137, 56, 28, 144, 179, 80, 19, 39, 59, 6, + 245, 185, 35, 116, 211, 210, 249, 163, 73, 83, 48, 123, 63, 189, 65, 53, 156, 115, 41, 104, 22, 205, 151, + 86, 168, 3, 122, 116, 213, 147, 241, 193, 65, 173, 77, 211, 4, 192, 9, 176, 199, 217, 102, 98, 160, 158, + 57, 14, 17, 95, 190, 68, 204, 133, 15, 188, 131, 204, 109, 104, 110, 50, 41, 89, 155, 213, 98, 248, 55, + 155, 239, 24, 35, 235, 197, 170, 241, 144, 203, 133, 207, 207, 215, 219, 57, 21, 98, 33, 94, 38, 87, 194, + 26, 147, 192, 169, 51, 60, 191, 203, 26, 70, 54, 146, 55, 251, 155, 2, 120, 207, 197, 24, 32, 223, 136, + 50, 150, 162, 233, 157, 60, 226, 82, 162, 134, 129, 48, 90, 241, 253, 101, 230, 160, 94, 156, 138, 39, 221, + 190, 133, 158, 247, 106, 121, 143, 117, 20, 213, 75, 211, 187, 70, 227, 91, 8, 193, 24, 153, 47, 212, 186, + 17, 253, 75, 250, 7, 159, 213, 41, 226, 166, 58, 132, 173, 150, 102, 228, 57, 36, 66, 177, 0, 84, 212, + 114, 111, 15, 215, 223, 10, 195, 79, 227, 6, 66, 30, 35, 29, 18, 160, 150, 230, 159, 7, 204, 243, 142, + 232, 117, 138, 186, 153, 104, 182, 69, 83, 142, 173, 206, 173, 127, 91, 156, 191, 24, 13, 214, 241, 223, 96, + 222, 16, 179, 106, 152, 140, 102, 75, 91, 242, 117, 154, 206, 106, 127, 173, 203, 206, 10, 209, 146, 136, 67, + 156, 226, 59, 123, 25, 130, 209, 69, 58, 37, 230, 27, 251, 193, 74, 93, 87, 152, 221, 130, 98, 82, 77, + 189, 86, 167, 19, 201, 81, 94, 247, 247, 233, 198, 126, 172, 81, 126, 45, 178, 152, 105, 192, 227, 58, 108, + 117, 151, 9, 220, 80, 134, 233, 214, 104, 136, 153, 198, 30, 233, 33, 229, 200, 62, 248, 18, 9, 73, 60, + 33, 105, 208, 223, 18, 104, 209, 244, 197, 107, 240, 37, 197, 13, 108, 106, 56, 22, 109, 183, 59, 150, 238, + 16, 12, 149, 138, 155, 234, 219, 57, 105, 251, 86, 197, 141, 47, 170, 188, 73, 237, 73, 247, 88, 124, 184, + 67, 57, 218, 237, 174, 252, 0, 236, 31, 248, 225, 54, 222, 85, 59, 11, 64, 32, 142, 131, 44, 33, 55, + 53, 29, 109, 46, 22, 85, 238, 101, 188, 229, 249, 182, 194, 91, 106, 54, 192, 242, 66, 73, 142, 141, 117, + 5, 188, 45, 216, 112, 230, 131, 227, 126, 192, 153, 141, 83, 212, 117, 240, 33, 6, 33, 27, 203, 27, 100, + 18, 95, 150, 208, 161, 166, 30, 110, 197, 14, 94, 113, 52, 162, 159, 42, 136, 179, 152, 34, 78, 28, 170, + 146, 134, 198, 203, 52, 240, 7, 241, 130, 230, 75, 180, 228, 250, 220, 93, 170, 165, 218, 193, 181, 220, 160, + 110, 199, 206, 116, 117, 5, 107, 2, 193, 164, 184, 135, 25, 42, 45, 86, 166, 232, 134, 103, 126, 254, 177, + 63, 138, 79, 184, 208, 55, 15, 187, 49, 37, 122, 209, 3, 26, 107, 80, 207, 115, 137, 89, 26, 251, 138, + 140, 132, 127, 24, 254, 79, 124, 234, 123, 49, 16, 30, 121, 102, 99, 130, 198, 140, 75, 158, 189, 69, 31, + 223, 153, 112, 43, 24, 232, 170, 19, 239, 135, 167, 18, 250, 32, 248, 21, 46, 237, 238, 129, 194, 124, 80, + 237, 178, 186, 248, 22, 193, 145, 170, 154, 120, 185, 199, 88, 133, 238, 3, 20, 123, 21, 82, 144, 202, 66, + 100, 123, 82, 225, 48, 239, 149, 78, 157, 81, 16, 61, 59, 58, 178, 99, 163, 239, 105, 195, 224, 66, 126, + 43, 129, 178, 206, 36, 76, 132, 31, 28, 116, 229, 19, 64, 50, 100, 112, 235, 162, 101, 68, 66, 51, 236, + 17, 199, 73, 137, 149, 237, 231, 174, 134, 7, 246, 25, 74, 8, 64, 28, 230, 86, 176, 116, 89, 205, 140, + 42, 245, 17, 188, 105, 197, 40, 207, 32, 248, 31, 54, 127, 178, 171, 123, 159, 25, 106, 212, 123, 170, 158, + 220, 191, 112, 202, 225, 39, 133, 30, 92, 50, 151, 46, 147, 169, 129, 179, 72, 186, 250, 243, 3, 122, 16, + 201, 118, 8, 159, 83, 8, 45, 12, 36, 167, 249, 65, 186, 159, 209, 252, 122, 123, 71, 57, 155, 217, 244, + 185, 153, 132, 31, 195, 229, 2, 225, 141, 203, 31, 113, 79, 22, 37, 15, 36, 204, 200, 177, 65, 162, 250, + 235, 128, 134, 42, 252, 129, 253, 81, 222, 230, 237, 156, 213, 39, 186, 251, 86, 231, 87, 203, 16, 134, 98, + 157, 194, 117, 67, 231, 75, 9, 233, 236, 66, 9, 67, 116, 137, 155, 142, 194, 185, 18, 184, 150, 170, 223, + 184, 147, 170, 67, 67, 130, 67, 155, 226, 52, 76, 111, 65, 185, 15, 191, 148, 212, 145, 210, 242, 124, 165, + 42, 242, 137, 225, 110, 68, 113, 197, 213, 193, 176, 80, 12, 5, 223, 197, 227, 118, 85, 35, 144, 102, 71, + 110, 137, 94, 74, 132, 245, 156, 100, 201, 181, 205, 41, 200, 36, 34, 48, 94, 154, 228, 229, 207, 63, 9, + 248, 213, 167, 133, 189, 75, 115, 57, 29, 152, 163, 119, 188, 26, 208, 157, 32, 59, 219, 214, 8, 96, 140, + 51, 16, 251, 170, 23, 30, 26, 0, 164, 153, 45, 63, 112, 150, 121, 254, 112, 250, 168, 124, 114, 70, 50, + 178, 232, 239, 77, 23, 169, 224, 208, 41, 187, 157, 30, 38, 2, 205, 187, 86, 134, 120, 87, 207, 1, 245, + 167, 98, 6, 156, 153, 76, 228, 246, 198, 170, 142, 244, 205, 98, 240, 68, 209, 151, 12, 39, 59, 147, 157, + 155, 111, 144, 172, 213, 135, 159, 183, 32, 48, 141, 8, 126, 235, 16, 17, 95, 250, 47, 28, 24, 56, 211, + 74, 118, 40, 196, 159, 249, 151, 241, 239, 20, 2, 52, 207, 233, 46, 89, 41, 107, 11, 157, 246, 55, 27, + 25, 228, 34, 42, 106, 166, 10, 247, 193, 6, 119, 109, 42, 28, 95, 48, 101, 38, 223, 96, 29, 9, 50, + 197, 233, 191, 188, 93, 181, 36, 55, 1, 22, 205, 120, 220, 72, 26, 175, 22, 187, 240, 24, 58, 194, 102, + 214, 63, 9, 106, 120, 237, 150, 25, 143, 240, 233, 91, 220, 146, 95, 226, 33, 102, 224, 78, 151, 108, 183, + 225, 156, 181, 54, 178, 203, 177, 20, 208, 63, 57, 120, 48, 59, 19, 171, 224, 73, 139, 42, 206, 65, 95, + 43, 23, 155, 47, 252, 169, 214, 142, 221, 155, 123, 150, 164, 53, 49, 244, 228, 120, 159, 78, 0, 108, 117, + 161, 186, 192, 162, 133, 11, 195, 60, 139, 101, 149, 218, 106, 120, 235, 107, 85, 104, 25, 123, 212, 67, 55, + 191, 7, 127, 142, 223, 222, 200, 218, 5, 227, 237, 5, 90, 13, 219, 141, 11, 222, 150, 202, 169, 193, 105, + 195, 126, 134, 130, 45, 238, 58, 180, 94, 140, 252, 43, 103, 20, 238, 73, 69, 111, 134, 124, 174, 77, 111, + 5, 58, 115, 188, 27, 248, 214, 73, 23, 72, 231, 69, 61, 141, 177, 173, 124, 159, 238, 27, 236, 18, 43, + 236, 138, 116, 16, 42, 19, 34, 69, 94, 128, 210, 146, 249, 181, 157, 209, 171, 140, 158, 93, 183, 89, 145, + 239, 244, 6, 164, 245, 129, 233, 10, 187, 239, 246, 26, 56, 32, 70, 207, 3, 187, 121, 58, 69, 87, 213, + 238, 245, 102, 227, 116, 246, 192, 62, 24, 162, 25, 127, 67, 33, 74, 6, 215, 199, 87, 193, 24, 163, 39, + 48, 51, 159, 217, 27, 220, 53, 199, 170, 116, 25, 145, 223, 137, 249, 245, 2, 225, 116, 94, 20, 252, 189, + 89, 57, 94, 82, 163, 48, 119, 235, 204, 202, 33, 155, 176, 53, 131, 254, 82, 106, 160, 74, 95, 17, 157, + 128, 212, 26, 231, 225, 10, 67, 84, 139, 63, 147, 31, 61, 123, 177, 239, 66, 198, 62, 210, 19, 40, 243, + 145, 160, 245, 251, 243, 52, 134, 49, 111, 29, 135, 90, 166, 157, 89, 222, 44, 126, 29, 139, 196, 73, 13, + 126, 31, 143, 227, 84, 10, 91, 36, 43, 239, 208, 10, 108, 248, 136, 139, 127, 61, 51, 180, 75, 118, 77, + 222, 79, 226, 72, 174, 252, 25, 15, 241, 51, 168, 254, 49, 106, 167, 164, 217, 48, 203, 251, 93, 74, 179, + 51, 235, 229, 77, 242, 79, 195, 193, 207, 89, 121, 201, 179, 61, 182, 116, 188, 4, 210, 249, 56, 178, 95, + 200, 201, 184, 137, 163, 113, 28, 33, 243, 204, 243, 7, 178, 57, 239, 195, 105, 124, 162, 231, 201, 202, 9, + 78, 17, 245, 211, 85, 42, 192, 205, 212, 141, 90, 121, 237, 12, 192, 245, 46, 4, 250, 33, 221, 253, 78, + 30, 170, 196, 25, 147, 102, 143, 16, 167, 247, 79, 93, 175, 70, 243, 77, 135, 211, 127, 190, 130, 53, 230, + 81, 67, 246, 111, 84, 230, 183, 213, 14, 58, 57, 32, 67, 40, 44, 242, 235, 73, 169, 110, 46, 213, 35, + 33, 21, 62, 137, 58, 230, 227, 166, 55, 28, 158, 222, 106, 163, 64, 132, 55, 132, 244, 59, 131, 24, 160, + 249, 60, 176, 192, 21, 66, 128, 191, 177, 91, 25, 158, 168, 7, 49, 174, 182, 90, 3, 212, 70, 90, 250, + 55, 10, 234, 26, 34, 190, 219, 228, 169, 182, 130, 31, 113, 81, 3, 3, 49, 53, 38, 99, 182, 77, 173, + 148, 110, 174, 180, 180, 240, 16, 253, 223, 173, 23, 155, 29, 71, 44, 188, 227, 11, 233, 14, 58, 193, 60, + 25, 192, 187, 23, 171, 115, 14, 33, 218, 103, 135, 8, 157, 250, 218, 61, 199, 77, 77, 73, 214, 211, 191, + 165, 96, 118, 241, 113, 203, 224, 12, 47, 115, 105, 140, 15, 73, 8, 195, 119, 92, 231, 198, 161, 238, 40, + 89, 108, 30, 52, 222, 146, 189, 8, 238, 191, 243, 156, 214, 245, 107, 198, 132, 53, 7, 115, 169, 130, 67, + 40, 39, 240, 218, 225, 33, 15, 182, 52, 97, 87, 134, 19, 167, 145, 5, 139, 27, 250, 102, 160, 201, 150, + 13, 180, 51, 55, 40, 160, 212, 247, 126, 245, 114, 164, 25, 180, 174, 179, 221, 251, 226, 9, 129, 71, 79, + 140, 79, 73, 224, 78, 205, 15, 131, 174, 211, 154, 173, 171, 254, 10, 224, 77, 145, 212, 247, 132, 99, 4, + 213, 43, 68, 228, 49, 70, 142, 161, 65, 137, 113, 134, 105, 169, 144, 66, 98, 58, 183, 56, 71, 234, 211, + 199, 26, 21, 38, 189, 198, 236, 221, 163, 177, 230, 96, 67, 175, 63, 122, 209, 232, 226, 127, 224, 229, 40, + 205, 120, 235, 82, 54, 106, 179, 246, 66, 170, 139, 241, 87, 185, 12, 38, 222, 138, 181, 150, 7, 82, 16, + 36, 172, 155, 10, 68, 219, 184, 17, 243, 249, 188, 62, 228, 90, 120, 248, 73, 40, 75, 180, 46, 170, 215, + 139, 196, 59, 182, 142, 40, 151, 149, 175, 172, 38, 69, 134, 166, 196, 111, 63, 37, 152, 33, 58, 39, 34, + 63, 238, 202, 220, 4, 54, 63, 168, 144, 76, 213, 111, 185, 235, 132, 165, 122, 133, 119, 132, 238, 94, 154, + 174, 178, 38, 59, 76, 214, 53, 99, 109, 207, 9, 228, 148, 128, 101, 169, 91, 39, 60, 186, 22, 45, 56, + 22, 125, 211, 81, 14, 191, 181, 122, 76, 63, 196, 181, 128, 61, 115, 34, 155, 122, 121, 127, 247, 247, 177, + 134, 214, 4, 119, 177, 251, 5, 197, 56, 141, 252, 129, 97, 22, 87, 231, 28, 166, 104, 114, 189, 149, 89, + 226, 131, 24, 252, 150, 13, 211, 225, 106, 132, 139, 33, 196, 70, 243, 49, 64, 215, 182, 34, 200, 61, 89, + 160, 131, 2, 66, 81, 162, 174, 176, 190, 202, 236, 50, 184, 66, 136, 210, 15, 178, 12, 251, 96, 129, 208, + 42, 249, 241, 42, 26, 74, 141, 51, 155, 131, 29, 27, 173, 179, 156, 89, 51, 35, 96, 218, 140, 112, 169, + 164, 73, 221, 252, 230, 108, 92, 61, 250, 160, 165, 70, 138, 36, 38, 250, 87, 224, 201, 84, 152, 216, 132, + 215, 163, 137, 105, 81, 82, 196, 239, 14, 222, 234, 74, 233, 142, 194, 12, 163, 214, 184, 251, 162, 124, 212, + 1, 43, 187, 138, 36, 113, 217, 24, 232, 148, 140, 90, 101, 92, 31, 166, 155, 227, 58, 111, 99, 249, 244, + 136, 108, 102, 70, 10, 58, 210, 73, 133, 162, 70, 254, 38, 149, 239, 154, 222, 234, 100, 64, 200, 109, 199, + 27, 77, 141, 239, 122, 212, 165, 213, 216, 158, 192, 83, 25, 186, 239, 4, 156, 69, 104, 4, 81, 226, 148, + 61, 210, 128, 185, 226, 220, 22, 47, 215, 163, 177, 232, 228, 196, 54, 22, 33, 250, 77, 156, 78, 31, 37, + 87, 19, 114, 207, 19, 153, 45, 118, 161, 126, 117, 154, 228, 42, 70, 88, 124, 233, 142, 184, 206, 206, 190, + 151, 60, 24, 46, 185, 176, 202, 180, 87, 251, 208, 13, 9, 84, 105, 190, 186, 37, 174, 35, 2, 198, 135, + 174, 70, 253, 210, 215, 214, 79, 53, 97, 86, 94, 208, 134, 28, 89, 170, 198, 9, 103, 132, 40, 40, 215, + 242, 75, 114, 70, 8, 163, 220, 207, 181, 54, 70, 11, 10, 238, 84, 177, 151, 143, 16, 16, 197, 217, 64, + 155, 151, 197, 247, 121, 179, 235, 51, 28, 173, 99, 202, 85, 197, 116, 89, 55, 196, 223, 144, 99, 128, 1, + 246, 36, 107, 65, 179, 127, 181, 252, 164, 147, 147, 245, 136, 59, 212, 120, 171, 111, 165, 135, 172, 214, 164, + 184, 154, 183, 12, 157, 205, 190, 193, 110, 53, 95, 127, 182, 177, 183, 202, 213, 101, 19, 224, 26, 164, 86, + 211, 139, 148, 93, 237, 127, 238, 100, 237, 46, 217, 18, 57, 78, 32, 125, 167, 130, 16, 202, 44, 207, 81, + 206, 140, 183, 95, 152, 28, 76, 119, 83, 248, 230, 15, 139, 212, 91, 242, 185, 34, 173, 147, 167, 30, 133, + 110, 173, 57, 220, 202, 220, 135, 196, 163, 47, 174, 5, 151, 232, 119, 28, 37, 36, 136, 150, 91, 205, 191, + 112, 189, 128, 119, 111, 169, 104, 224, 106, 243, 198, 53, 108, 138, 221, 84, 160, 211, 232, 168, 184, 120, 231, + 16, 248, 138, 90, 107, 54, 248, 33, 54, 65, 185, 206, 26, 186, 112, 97, 113, 84, 94, 227, 233, 182, 148, + 51, 4, 14, 198, 28, 243, 4, 33, 81, 5, 146, 103, 248, 225, 50, 247, 219, 79, 248, 74, 53, 176, 201, + 214, 136, 12, 34, 147, 251, 113, 11, 118, 146, 130, 226, 106, 85, 56, 166, 40, 104, 115, 151, 124, 4, 130, + 220, 229, 84, 115, 8, 219, 197, 95, 161, 118, 94, 166, 213, 62, 46, 142, 121, 153, 79, 221, 97, 112, 131, + 66, 174, 18, 33, 154, 221, 45, 46, 105, 249, 159, 135, 107, 186, 135, 15, 184, 84, 213, 37, 33, 64, 223, + 166, 223, 35, 55, 187, 68, 23, 208, 161, 103, 12, 28, 73, 158, 248, 75, 243, 7, 81, 177, 39, 109, 155, + 66, 8, 18, 237, 119, 199, 19, 34, 188, 22, 123, 96, 86, 9, 140, 49, 216, 160, 227, 245, 27, 161, 215, + 112, 59, 149, 141, 56, 228, 41, 146, 190, 107, 146, 147, 243, 39, 228, 229, 49, 22, 175, 128, 185, 201, 62, + 190, 11, 195, 202, 127, 172, 157, 94, 30, 140, 48, 175, 41, 227, 244, 166, 97, 238, 228, 153, 137, 19, 136, + 215, 237, 41, 164, 72, 216, 85, 143, 245, 86, 149, 105, 43, 115, 251, 174, 19, 236, 158, 186, 225, 124, 38, + 167, 215, 39, 58, 218, 248, 22, 92, 251, 240, 175, 221, 132, 116, 54, 201, 106, 172, 41, 61, 221, 83, 52, + 206, 154, 90, 195, 218, 238, 16, 36, 124, 216, 75, 146, 68, 152, 192, 176, 59, 75, 62, 214, 55, 79, 225, + 129, 53, 205, 30, 52, 120, 16, 105, 245, 177, 45, 163, 102, 243, 227, 86, 12, 21, 123, 85, 85, 92, 11, + 25, 155, 198, 106, 199, 108, 35, 103, 223, 5, 153, 54, 149, 42, 152, 151, 141, 45, 79, 115, 168, 158, 74, + 1, 113, 68, 26, 63, 79, 120, 215, 192, 106, 228, 125, 169, 216, 136, 83, 189, 212, 154, 72, 18, 22, 154, + 197, 61, 18, 129, 223, 110, 74, 237, 179, 38, 146, 135, 19, 25, 197, 220, 20, 202, 26, 239, 1, 153, 101, + 22, 154, 4, 213, 36, 73, 111, 216, 190, 227, 181, 176, 39, 152, 97, 224, 124, 154, 89, 126, 73, 2, 157, + 239, 41, 246, 35, 185, 212, 187, 53, 77, 29, 16, 244, 130, 169, 6, 130, 224, 237, 88, 81, 33, 222, 222, + 186, 237, 216, 63, 1, 71, 130, 182, 137, 24, 13, 92, 109, 86, 119, 179, 129, 182, 198, 229, 35, 250, 69, + 93, 88, 133, 243, 169, 247, 141, 153, 106, 29, 102, 127, 108, 91, 230, 53, 235, 139, 14, 178, 165, 28, 102, + 44, 151, 227, 236, 59, 149, 68, 44, 65, 125, 135, 198, 222, 207, 203, 51, 188, 210, 249, 53, 229, 247, 18, + 119, 111, 232, 120, 17, 82, 22, 54, 196, 160, 182, 196, 26, 29, 186, 46, 22, 99, 220, 253, 191, 148, 140, + 121, 238, 135, 207, 129, 68, 43, 142, 212, 98, 55, 159, 141, 137, 19, 149, 58, 241, 23, 47, 73, 243, 37, + 172, 34, 138, 122, 7, 117, 83, 152, 215, 218, 202, 61, 46, 168, 79, 55, 207, 43, 168, 50, 211, 220, 204, + 194, 75, 179, 24, 132, 85, 190, 185, 123, 116, 76, 69, 228, 203, 88, 42, 241, 107, 103, 117, 222, 62, 95, + 85, 14, 42, 219, 27, 54, 32, 245, 2, 32, 37, 104, 138, 248, 31, 188, 90, 245, 200, 41, 218, 232, 24, + 106, 187, 187, 203, 212, 21, 158, 137, 147, 170, 253, 98, 214, 76, 29, 234, 110, 95, 234, 67, 89, 197, 92, + 148, 230, 171, 161, 4, 13, 188, 37, 92, 24, 244, 137, 146, 89, 116, 72, 63, 177, 59, 141, 86, 228, 222, + 99, 91, 199, 50, 192, 161, 79, 228, 70, 16, 64, 127, 41, 21, 132, 167, 209, 11, 235, 106, 32, 70, 196, + 179, 27, 40, 246, 195, 9, 89, 37, 33, 89, 92, 33, 208, 176, 144, 150, 144, 110, 102, 36, 37, 131, 143, + 32, 62, 226, 116, 4, 106, 6, 100, 156, 56, 202, 231, 83, 138, 22, 42, 147, 111, 229, 209, 99, 55, 44, + 72, 162, 254, 182, 1, 158, 132, 27, 18, 141, 208, 7, 189, 207, 244, 186, 129, 191, 47, 220, 144, 87, 151, + 240, 38, 40, 34, 168, 220, 221, 7, 30, 81, 21, 121, 162, 112, 105, 101, 234, 9, 237, 65, 199, 160, 229, + 227, 197, 217, 177, 12, 52, 20, 48, 159, 79, 239, 84, 144, 224, 67, 178, 140, 97, 100, 229, 247, 121, 68, + 248, 112, 40, 219, 25, 98, 240, 229, 87, 60, 239, 130, 138, 238, 205, 142, 221, 140, 83, 99, 166, 199, 183, + 135, 4, 36, 130, 66, 81, 171, 219, 227, 67, 237, 147, 50, 49, 223, 65, 82, 147, 9, 26, 157, 130, 112, + 93, 3, 36, 74, 106, 58, 241, 205, 91, 35, 137, 148, 201, 81, 4, 73, 17, 232, 30, 83, 175, 242, 187, + 165, 151, 164, 150, 60, 238, 196, 108, 186, 250, 96, 22, 125, 73, 28, 155, 242, 163, 173, 42, 93, 111, 197, + 18, 191, 19, 199, 180, 250, 86, 132, 243, 244, 237, 143, 144, 157, 29, 41, 182, 239, 1, 251, 152, 124, 186, + 45, 7, 88, 142, 125, 100, 82, 73, 145, 177, 201, 185, 251, 114, 131, 73, 64, 209, 111, 220, 231, 21, 60, + 8, 203, 239, 230, 103, 254, 209, 60, 57, 246, 139, 123, 109, 178, 242, 212, 138, 238, 182, 39, 181, 52, 189, + 188, 240, 49, 250, 189, 219, 184, 27, 17, 41, 253, 216, 94, 134, 98, 205, 168, 84, 105, 92, 27, 136, 181, + 140, 186, 208, 218, 119, 180, 56, 72, 0, 173, 93, 188, 111, 168, 206, 21, 249, 160, 126, 180, 21, 14, 214, + 32, 11, 183, 239, 88, 68, 120, 212, 89, 208, 109, 32, 74, 59, 48, 109, 143, 161, 187, 196, 36, 55, 89, + 111, 132, 124, 59, 112, 141, 91, 97, 254, 12, 208, 246, 190, 196, 83, 244, 223, 191, 66, 64, 236, 42, 205, + 131, 105, 75, 211, 224, 201, 169, 203, 182, 48, 36, 245, 91, 118, 227, 5, 42, 36, 183, 185, 134, 224, 196, + 84, 3, 32, 162, 200, 154, 223, 99, 215, 191, 251, 227, 47, 31, 167, 35, 210, 230, 163, 224, 39, 43, 70, + 165, 213, 168, 193, 40, 122, 199, 16, 228, 11, 233, 169, 2, 112, 135, 207, 206, 158, 24, 71, 134, 246, 207, + 202, 138, 211, 79, 123, 132, 100, 12, 2, 186, 181, 227, 9, 30, 124, 195, 52, 39, 42, 53, 182, 222, 136, + 151, 118, 154, 98, 116, 155, 228, 239, 242, 197, 236, 57, 172, 188, 229, 178, 125, 151, 162, 147, 146, 115, 86, + 118, 92, 248, 57, 31, 181, 113, 185, 36, 133, 239, 143, 77, 72, 148, 122, 9, 0, 45, 83, 29, 86, 143, + 54, 112, 106, 240, 144, 57, 183, 1, 5, 1, 58, 194, 244, 155, 132, 235, 203, 26, 138, 152, 181, 23, 218, + 244, 219, 215, 234, 43, 228, 245, 27, 201, 87, 237, 187, 15, 163, 189, 3, 209, 86, 247, 252, 58, 106, 145, + 209, 133, 229, 212, 213, 21, 224, 133, 250, 95, 145, 235, 103, 37, 84, 100, 32, 225, 19, 24, 224, 22, 111, + 14, 245, 9, 99, 0, 192, 25, 111, 16, 105, 189, 212, 142, 35, 124, 215, 198, 21, 199, 155, 40, 98, 17, + 105, 119, 236, 71, 84, 50, 189, 57, 115, + }; + uint8_t ret[18 * 2 * 16 * 32] = { + 116, 147, 48, 65, 40, 100, 194, 33, 93, 130, 44, 108, 251, 49, 249, 225, 210, 23, 163, 80, 144, 249, 202, + 157, 185, 38, 167, 187, 87, 122, 245, 235, 60, 192, 171, 182, 79, 111, 136, 246, 129, 40, 49, 28, 118, 8, + 240, 99, 87, 125, 140, 65, 77, 100, 210, 196, 81, 254, 202, 173, 106, 169, 44, 81, 124, 16, 168, 106, 31, + 152, 153, 29, 217, 40, 55, 3, 46, 225, 195, 74, 150, 144, 250, 121, 219, 184, 110, 48, 36, 13, 129, 10, + 144, 246, 185, 245, 24, 147, 141, 177, 14, 127, 138, 123, 197, 208, 215, 161, 16, 146, 157, 112, 145, 4, 229, + 212, 81, 65, 9, 90, 77, 16, 41, 182, 250, 68, 154, 22, 136, 248, 93, 254, 225, 175, 167, 198, 205, 226, + 26, 127, 99, 33, 91, 49, 108, 160, 234, 162, 185, 13, 51, 205, 229, 238, 237, 45, 33, 244, 8, 122, 141, + 224, 6, 153, 111, 123, 12, 87, 164, 50, 225, 146, 175, 22, 248, 123, 13, 213, 128, 17, 54, 39, 79, 83, + 81, 185, 85, 123, 174, 58, 108, 98, 82, 141, 159, 48, 247, 176, 14, 24, 25, 81, 195, 29, 252, 247, 101, + 102, 247, 88, 167, 111, 212, 7, 71, 168, 177, 236, 226, 252, 25, 176, 11, 99, 130, 194, 56, 37, 78, 147, + 86, 45, 89, 181, 229, 155, 137, 124, 100, 129, 180, 43, 242, 15, 104, 104, 143, 21, 134, 54, 229, 126, 235, + 143, 195, 87, 112, 102, 186, 176, 253, 233, 9, 60, 16, 153, 238, 6, 184, 11, 222, 168, 206, 170, 209, 89, + 195, 233, 65, 45, 251, 21, 124, 116, 166, 66, 180, 106, 11, 38, 185, 66, 173, 211, 161, 236, 163, 20, 125, + 244, 200, 73, 52, 183, 70, 249, 203, 19, 105, 80, 160, 20, 83, 82, 56, 28, 220, 244, 185, 44, 200, 6, + 20, 88, 229, 240, 105, 10, 228, 227, 42, 35, 71, 225, 154, 173, 209, 227, 193, 210, 175, 49, 61, 53, 200, + 48, 95, 62, 75, 88, 104, 25, 84, 57, 80, 203, 240, 34, 24, 182, 164, 31, 80, 124, 137, 197, 159, 252, + 97, 203, 215, 89, 243, 65, 20, 101, 38, 16, 105, 169, 141, 143, 236, 94, 204, 55, 118, 252, 180, 167, 154, + 179, 187, 216, 220, 168, 214, 68, 205, 120, 141, 114, 200, 90, 30, 147, 73, 108, 141, 211, 24, 49, 12, 178, + 196, 81, 174, 59, 54, 210, 225, 78, 222, 12, 150, 183, 0, 139, 233, 227, 25, 128, 63, 216, 135, 63, 125, + 145, 170, 92, 215, 42, 243, 82, 252, 52, 20, 100, 133, 31, 159, 167, 174, 16, 80, 4, 172, 171, 241, 64, + 221, 77, 49, 220, 191, 79, 142, 208, 248, 159, 122, 110, 205, 243, 210, 153, 89, 112, 19, 92, 20, 188, 27, + 241, 104, 168, 143, 211, 226, 210, 85, 25, 199, 155, 26, 177, 229, 15, 61, 94, 105, 10, 71, 16, 236, 109, + 67, 196, 57, 169, 216, 112, 60, 100, 106, 26, 32, 210, 167, 182, 236, 193, 12, 69, 140, 80, 160, 150, 146, + 174, 64, 242, 231, 77, 219, 142, 78, 209, 245, 203, 53, 120, 113, 143, 80, 128, 86, 34, 117, 173, 47, 52, + 236, 65, 156, 216, 29, 31, 163, 124, 180, 184, 91, 179, 120, 243, 190, 61, 184, 239, 251, 148, 206, 162, 250, + 184, 193, 102, 150, 164, 38, 229, 225, 20, 80, 66, 173, 117, 12, 75, 233, 204, 118, 97, 182, 180, 65, 254, + 156, 114, 148, 165, 126, 182, 82, 215, 192, 30, 139, 65, 190, 185, 108, 203, 125, 87, 228, 182, 26, 124, 116, + 66, 117, 99, 253, 46, 114, 42, 156, 168, 28, 230, 111, 80, 51, 152, 104, 20, 89, 75, 64, 189, 91, 29, + 100, 128, 209, 157, 180, 231, 46, 93, 244, 61, 11, 191, 180, 118, 75, 137, 231, 31, 199, 202, 96, 153, 85, + 46, 249, 111, 176, 62, 78, 48, 29, 122, 153, 253, 88, 41, 168, 58, 212, 200, 114, 72, 234, 50, 128, 92, + 35, 185, 29, 163, 96, 234, 166, 31, 75, 203, 55, 16, 209, 203, 63, 209, 187, 238, 112, 232, 210, 94, 246, + 25, 132, 45, 186, 3, 124, 14, 218, 82, 49, 39, 72, 137, 90, 2, 233, 4, 245, 236, 98, 85, 46, 181, + 19, 189, 36, 94, 156, 72, 62, 175, 20, 219, 192, 136, 194, 36, 125, 20, 220, 11, 126, 192, 133, 154, 79, + 126, 18, 204, 193, 208, 161, 189, 18, 195, 124, 32, 121, 168, 103, 158, 5, 138, 42, 196, 129, 130, 84, 231, + 77, 231, 72, 47, 246, 127, 241, 31, 32, 55, 102, 13, 21, 206, 1, 139, 218, 195, 227, 71, 113, 243, 89, + 219, 162, 63, 212, 180, 20, 67, 82, 106, 132, 83, 88, 64, 105, 81, 62, 85, 109, 137, 52, 151, 91, 189, + 44, 243, 24, 35, 123, 251, 82, 247, 178, 9, 198, 9, 209, 106, 12, 86, 247, 252, 59, 53, 229, 66, 112, + 212, 100, 197, 179, 203, 228, 220, 2, 166, 79, 209, 26, 124, 110, 5, 71, 201, 154, 133, 188, 96, 0, 112, + 239, 30, 207, 228, 68, 111, 141, 28, 249, 46, 25, 6, 223, 93, 72, 102, 143, 102, 54, 48, 65, 142, 252, + 167, 51, 89, 210, 163, 57, 9, 212, 4, 25, 175, 53, 213, 235, 62, 222, 131, 128, 174, 10, 130, 99, 240, + 44, 25, 103, 77, 113, 73, 40, 238, 211, 49, 131, 236, 251, 74, 156, 164, 160, 201, 81, 142, 212, 232, 227, + 70, 38, 109, 213, 156, 128, 232, 78, 45, 88, 60, 208, 35, 214, 89, 242, 54, 208, 243, 18, 96, 27, 41, + 229, 11, 48, 153, 216, 251, 167, 240, 41, 218, 152, 225, 245, 21, 106, 149, 158, 215, 189, 18, 135, 1, 111, + 224, 41, 16, 156, 111, 182, 189, 87, 7, 234, 217, 84, 247, 240, 142, 4, 237, 26, 58, 4, 165, 250, 173, + 180, 157, 186, 145, 209, 230, 109, 52, 27, 168, 208, 188, 236, 208, 214, 128, 143, 174, 8, 97, 20, 131, 92, + 3, 111, 89, 133, 46, 128, 3, 208, 226, 19, 55, 80, 233, 205, 129, 170, 242, 249, 159, 122, 132, 81, 250, + 200, 23, 79, 36, 13, 141, 139, 241, 228, 71, 101, 241, 232, 53, 172, 81, 120, 70, 187, 17, 234, 163, 146, + 126, 174, 165, 104, 204, 127, 244, 50, 250, 145, 56, 163, 172, 204, 166, 9, 162, 101, 227, 187, 13, 172, 237, + 16, 107, 207, 124, 144, 36, 31, 168, 120, 176, 82, 68, 158, 131, 118, 150, 145, 74, 93, 153, 117, 112, 185, + 49, 196, 162, 68, 219, 245, 9, 57, 117, 189, 203, 60, 10, 89, 75, 111, 145, 223, 234, 97, 210, 161, 0, + 231, 168, 182, 217, 56, 213, 28, 213, 109, 118, 73, 28, 70, 254, 215, 134, 188, 22, 219, 171, 74, 134, 59, + 89, 192, 116, 74, 118, 90, 233, 49, 108, 75, 160, 35, 239, 162, 175, 204, 99, 143, 112, 6, 206, 51, 176, + 149, 239, 70, 108, 30, 14, 96, 188, 86, 100, 252, 18, 62, 158, 48, 180, 200, 93, 221, 111, 36, 245, 14, + 215, 194, 15, 169, 142, 56, 106, 201, 142, 143, 21, 147, 50, 194, 78, 202, 47, 228, 40, 178, 99, 10, 20, + 67, 79, 27, 179, 73, 35, 54, 16, 174, 51, 18, 168, 165, 154, 49, 169, 91, 116, 188, 177, 133, 62, 159, + 145, 202, 172, 85, 191, 240, 181, 170, 219, 69, 217, 121, 56, 145, 65, 128, 21, 137, 198, 17, 33, 193, 161, + 127, 132, 166, 226, 237, 167, 211, 44, 37, 172, 74, 224, 245, 87, 41, 177, 203, 143, 36, 214, 115, 64, 234, + 76, 166, 50, 132, 118, 245, 111, 118, 242, 73, 187, 51, 230, 102, 150, 63, 138, 46, 118, 66, 147, 244, 209, + 94, 30, 243, 41, 170, 54, 81, 184, 208, 158, 34, 231, 170, 86, 29, 91, 49, 110, 87, 197, 19, 236, 125, + 164, 48, 208, 254, 72, 43, 120, 68, 197, 110, 0, 248, 123, 160, 147, 51, 69, 237, 132, 168, 102, 247, 83, + 49, 96, 111, 72, 104, 88, 144, 226, 139, 146, 9, 133, 224, 204, 45, 89, 145, 211, 102, 142, 190, 7, 2, + 76, 236, 233, 46, 80, 162, 254, 104, 62, 113, 2, 85, 166, 179, 200, 244, 47, 72, 169, 49, 222, 65, 197, + 188, 157, 48, 176, 90, 66, 189, 235, 169, 156, 240, 69, 122, 248, 129, 30, 123, 193, 224, 112, 227, 117, 218, + 103, 248, 4, 51, 149, 46, 120, 96, 17, 89, 131, 25, 82, 212, 100, 208, 59, 217, 115, 164, 12, 160, 246, + 153, 75, 117, 193, 163, 63, 113, 164, 207, 9, 173, 23, 96, 106, 88, 54, 238, 196, 29, 56, 140, 182, 89, + 45, 218, 236, 192, 155, 6, 191, 31, 227, 177, 93, 150, 43, 162, 50, 15, 100, 121, 196, 19, 134, 76, 8, + 214, 135, 165, 73, 76, 201, 108, 105, 173, 113, 235, 144, 135, 53, 60, 47, 129, 78, 154, 85, 101, 106, 79, + 5, 222, 20, 68, 225, 73, 112, 145, 141, 151, 46, 211, 81, 48, 21, 130, 102, 30, 150, 29, 182, 92, 99, + 221, 156, 147, 7, 177, 236, 236, 40, 29, 169, 121, 221, 53, 139, 23, 151, 103, 107, 45, 57, 91, 15, 15, + 53, 177, 29, 204, 7, 22, 16, 209, 116, 135, 76, 201, 242, 219, 235, 149, 139, 239, 72, 5, 76, 209, 245, + 129, 139, 194, 191, 16, 46, 2, 252, 92, 55, 175, 12, 10, 176, 110, 225, 110, 251, 113, 4, 204, 143, 72, + 34, 40, 226, 174, 6, 102, 158, 1, 85, 183, 181, 101, 132, 249, 206, 207, 73, 20, 25, 33, 141, 229, 37, + 219, 56, 188, 159, 195, 70, 157, 49, 218, 100, 216, 253, 213, 254, 198, 227, 224, 118, 156, 166, 172, 47, 205, + 15, 157, 185, 244, 132, 61, 254, 115, 214, 88, 132, 212, 230, 220, 157, 172, 58, 210, 48, 143, 151, 196, 87, + 197, 118, 98, 185, 162, 176, 98, 219, 8, 201, 8, 13, 232, 139, 162, 137, 8, 152, 83, 165, 145, 13, 109, + 196, 59, 152, 137, 87, 148, 24, 252, 244, 52, 172, 177, 21, 248, 49, 76, 97, 175, 186, 79, 164, 134, 165, + 155, 126, 51, 25, 191, 128, 69, 55, 58, 218, 192, 153, 157, 137, 47, 26, 250, 195, 141, 229, 153, 11, 132, + 67, 235, 249, 159, 148, 146, 169, 20, 151, 122, 188, 13, 85, 201, 114, 171, 106, 51, 89, 171, 86, 54, 204, + 93, 182, 121, 197, 64, 213, 192, 221, 46, 22, 54, 126, 218, 184, 116, 9, 119, 62, 180, 131, 88, 6, 56, + 122, 253, 196, 121, 107, 2, 141, 16, 73, 252, 56, 156, 142, 94, 251, 161, 219, 214, 245, 38, 143, 162, 247, + 3, 0, 240, 43, 50, 12, 123, 162, 220, 128, 123, 55, 10, 237, 181, 63, 25, 193, 77, 247, 152, 220, 30, + 73, 244, 56, 149, 197, 88, 0, 11, 29, 249, 73, 230, 117, 18, 14, 152, 52, 250, 160, 193, 232, 184, 3, + 251, 234, 68, 223, 110, 181, 228, 189, 26, 140, 164, 250, 77, 38, 1, 246, 209, 144, 8, 24, 151, 89, 228, + 119, 96, 181, 26, 214, 240, 224, 178, 59, 95, 221, 25, 164, 235, 121, 249, 113, 239, 78, 141, 250, 147, 70, + 230, 14, 73, 137, 106, 24, 191, 182, 234, 31, 182, 16, 188, 192, 135, 73, 203, 8, 89, 191, 225, 64, 2, + 50, 96, 141, 89, 73, 165, 84, 82, 194, 1, 148, 58, 10, 149, 199, 216, 69, 185, 228, 31, 118, 124, 24, + 13, 2, 79, 170, 75, 70, 161, 7, 237, 86, 161, 146, 49, 195, 175, 137, 85, 174, 215, 175, 61, 238, 192, + 129, 177, 123, 199, 42, 74, 192, 212, 129, 19, 153, 216, 124, 246, 244, 56, 229, 1, 207, 151, 30, 9, 177, + 144, 121, 229, 221, 36, 147, 157, 241, 73, 151, 96, 42, 250, 29, 45, 177, 111, 103, 178, 189, 17, 84, 218, + 111, 243, 52, 35, 243, 164, 44, 25, 232, 117, 63, 184, 188, 186, 141, 41, 92, 190, 119, 45, 149, 232, 108, + 28, 95, 203, 233, 77, 199, 251, 249, 62, 96, 54, 5, 128, 154, 49, 108, 160, 237, 59, 165, 186, 112, 121, + 227, 211, 174, 25, 87, 37, 217, 153, 67, 156, 144, 128, 130, 183, 62, 144, 70, 123, 147, 163, 203, 44, 254, + 185, 4, 205, 127, 129, 125, 143, 193, 95, 26, 92, 75, 68, 11, 174, 167, 209, 0, 2, 222, 235, 123, 129, + 21, 204, 143, 107, 239, 44, 10, 27, 153, 56, 48, 48, 164, 108, 74, 20, 143, 233, 98, 192, 254, 128, 226, + 46, 0, 166, 183, 15, 106, 144, 254, 65, 54, 204, 249, 29, 105, 253, 134, 148, 38, 16, 9, 3, 28, 132, + 222, 157, 65, 159, 106, 78, 107, 32, 72, 7, 206, 122, 9, 105, 21, 115, 182, 104, 228, 136, 58, 186, 167, + 179, 98, 83, 254, 5, 118, 174, 128, 144, 32, 179, 166, 213, 155, 220, 175, 79, 107, 137, 150, 117, 44, 98, + 58, 76, 7, 29, 193, 165, 124, 171, 218, 3, 210, 191, 62, 252, 98, 249, 70, 205, 149, 74, 107, 208, 57, + 72, 56, 11, 143, 190, 64, 60, 67, 52, 159, 110, 118, 133, 141, 125, 44, 1, 55, 211, 201, 122, 169, 7, + 160, 44, 134, 213, 248, 42, 95, 139, 36, 143, 103, 0, 147, 199, 59, 201, 64, 106, 141, 37, 82, 234, 245, + 76, 253, 149, 75, 131, 211, 209, 4, 68, 87, 121, 186, 216, 240, 172, 69, 19, 31, 5, 125, 177, 155, 207, + 18, 22, 56, 240, 221, 161, 161, 75, 1, 32, 157, 104, 239, 162, 213, 118, 228, 212, 111, 157, 167, 238, 157, + 151, 22, 79, 102, 138, 53, 233, 230, 248, 181, 245, 252, 146, 70, 145, 94, 37, 66, 187, 23, 23, 155, 210, + 135, 62, 241, 166, 88, 67, 60, 180, 11, 13, 238, 198, 161, 207, 133, 126, 174, 113, 118, 247, 159, 223, 191, + 57, 222, 196, 80, 138, 173, 12, 180, 94, 233, 186, 16, 204, 8, 92, 106, 85, 90, 186, 83, 136, 248, 164, + 79, 238, 160, 177, 130, 187, 100, 202, 200, 122, 245, 18, 35, 176, 246, 5, 58, 186, 214, 162, 33, 128, 244, + 199, 152, 111, 236, 226, 113, 92, 138, 92, 68, 157, 191, 218, 41, 218, 190, 244, 184, 189, 159, 167, 11, 71, + 92, 62, 53, 195, 84, 116, 254, 32, 63, 160, 128, 21, 149, 237, 125, 80, 133, 93, 31, 184, 158, 193, 122, + 218, 152, 218, 52, 227, 76, 167, 91, 36, 242, 5, 196, 215, 173, 166, 122, 9, 244, 30, 136, 150, 244, 127, + 221, 28, 65, 252, 237, 40, 243, 162, 205, 13, 32, 125, 126, 156, 228, 244, 208, 73, 177, 148, 167, 72, 217, + 234, 184, 222, 42, 74, 187, 159, 202, 50, 248, 39, 172, 7, 116, 151, 230, 221, 36, 35, 112, 118, 30, 166, + 35, 167, 24, 24, 135, 112, 167, 41, 22, 83, 202, 80, 2, 67, 249, 1, 133, 11, 58, 64, 185, 114, 95, + 16, 53, 188, 66, 1, 59, 230, 194, 167, 25, 47, 144, 245, 59, 192, 206, 213, 50, 131, 226, 233, 87, 242, + 246, 156, 26, 189, 5, 140, 235, 41, 119, 145, 38, 175, 193, 228, 190, 204, 151, 142, 228, 47, 42, 244, 183, + 190, 215, 178, 220, 68, 33, 139, 62, 95, 64, 156, 236, 180, 106, 53, 198, 194, 20, 162, 33, 146, 100, 86, + 134, 119, 198, 40, 225, 54, 19, 220, 51, 18, 83, 45, 120, 1, 164, 146, 174, 179, 170, 247, 151, 151, 125, + 43, 35, 131, 76, 130, 251, 160, 212, 3, 57, 145, 237, 61, 66, 200, 187, 45, 236, 87, 115, 224, 171, 159, + 208, 35, 61, 14, 123, 132, 114, 181, 33, 72, 93, 106, 119, 169, 156, 78, 158, 209, 185, 243, 253, 37, 199, + 192, 90, 121, 87, 65, 161, 106, 65, 187, 81, 53, 249, 37, 182, 115, 203, 134, 169, 32, 153, 251, 223, 183, + 221, 162, 211, 182, 237, 225, 89, 7, 225, 57, 5, 56, 139, 2, 18, 248, 163, 151, 7, 199, 218, 199, 4, + 237, 101, 46, 178, 15, 171, 49, 161, 58, 229, 16, 103, 150, 219, 159, 235, 162, 40, 22, 178, 126, 125, 201, + 253, 99, 60, 165, 195, 192, 148, 184, 72, 46, 151, 97, 206, 13, 89, 7, 145, 217, 39, 3, 226, 93, 182, + 131, 241, 63, 138, 207, 31, 212, 89, 160, 45, 194, 90, 78, 123, 58, 55, 55, 193, 170, 128, 121, 174, 94, + 76, 113, 150, 77, 97, 254, 74, 154, 68, 235, 109, 3, 205, 132, 1, 206, 140, 217, 31, 4, 67, 85, 201, + 44, 79, 223, 156, 172, 39, 242, 215, 113, 127, 33, 65, 203, 100, 213, 81, 75, 14, 236, 197, 16, 15, 49, + 241, 183, 29, 211, 202, 130, 74, 189, 233, 105, 80, 233, 60, 197, 22, 138, 141, 124, 236, 64, 109, 182, 142, + 131, 240, 95, 94, 34, 240, 220, 110, 164, 134, 208, 26, 138, 123, 113, 197, 137, 205, 229, 75, 208, 51, 153, + 168, 23, 2, 245, 198, 151, 172, 126, 56, 241, 41, 34, 109, 29, 36, 175, 63, 233, 78, 203, 19, 43, 155, + 15, 217, 229, 201, 56, 28, 195, 17, 90, 33, 102, 243, 81, 58, 169, 58, 163, 160, 177, 90, 26, 113, 77, + 253, 227, 187, 8, 214, 224, 195, 108, 243, 106, 215, 66, 184, 129, 51, 51, 221, 70, 152, 196, 12, 43, 140, + 111, 58, 239, 27, 158, 104, 226, 196, 37, 161, 233, 46, 9, 198, 53, 198, 114, 11, 103, 81, 119, 9, 215, + 190, 22, 202, 41, 19, 143, 19, 39, 221, 221, 16, 176, 53, 45, 85, 108, 152, 1, 106, 154, 223, 25, 101, + 190, 154, 35, 130, 202, 209, 158, 244, 240, 81, 237, 12, 224, 68, 87, 140, 130, 50, 130, 205, 17, 164, 22, + 93, 86, 41, 7, 201, 220, 254, 242, 188, 41, 105, 119, 168, 202, 95, 238, 151, 240, 73, 56, 197, 48, 28, + 205, 164, 189, 253, 254, 28, 23, 254, 106, 178, 201, 96, 233, 111, 135, 73, 202, 40, 189, 47, 146, 229, 71, + 142, 119, 39, 20, 192, 93, 135, 67, 191, 84, 139, 7, 130, 110, 105, 204, 18, 68, 217, 85, 226, 51, 232, + 112, 191, 32, 214, 133, 149, 58, 68, 177, 181, 73, 24, 134, 206, 95, 144, 187, 157, 130, 142, 85, 138, 115, + 184, 123, 139, 181, 102, 99, 154, 58, 241, 50, 94, 218, 172, 207, 254, 143, 106, 158, 174, 236, 189, 215, 207, + 72, 86, 105, 174, 157, 118, 164, 36, 125, 246, 199, 140, 98, 68, 209, 151, 235, 183, 232, 12, 140, 1, 250, + 178, 245, 85, 177, 17, 203, 29, 40, 97, 56, 186, 111, 252, 224, 86, 140, 249, 127, 180, 243, 239, 161, 252, + 218, 8, 200, 113, 136, 85, 221, 89, 1, 158, 157, 144, 7, 15, 123, 181, 230, 221, 194, 27, 4, 123, 177, + 26, 215, 192, 150, 68, 65, 103, 210, 16, 0, 42, 233, 32, 250, 236, 175, 49, 39, 253, 196, 196, 222, 9, + 32, 249, 9, 110, 63, 20, 16, 128, 177, 155, 48, 191, 190, 211, 36, 48, 27, 66, 100, 152, 138, 14, 12, + 3, 229, 223, 47, 131, 30, 65, 243, 101, 197, 6, 102, 20, 17, 152, 65, 23, 53, 254, 164, 152, 101, 134, + 106, 20, 53, 227, 167, 211, 47, 166, 23, 120, 235, 246, 42, 200, 214, 14, 20, 182, 212, 4, 142, 108, 71, + 167, 104, 98, 158, 223, 24, 220, 63, 177, 61, 143, 60, 55, 0, 214, 124, 137, 228, 232, 239, 59, 113, 186, + 134, 231, 179, 134, 239, 109, 25, 174, 108, 205, 226, 210, 183, 75, 197, 26, 74, 132, 57, 200, 227, 243, 15, + 245, 71, 234, 51, 29, 20, 125, 18, 120, 136, 194, 173, 228, 72, 72, 232, 155, 3, 248, 215, 23, 40, 29, + 121, 65, 148, 107, 126, 250, 150, 198, 58, 177, 29, 167, 192, 244, 19, 65, 250, 200, 159, 167, 151, 58, 249, + 101, 227, 163, 216, 11, 112, 34, 222, 50, 14, 156, 252, 42, 136, 95, 89, 175, 87, 177, 117, 6, 68, 47, + 59, 148, 197, 62, 220, 143, 43, 56, 166, 94, 123, 113, 143, 196, 136, 216, 32, 127, 79, 126, 231, 141, 231, + 229, 2, 44, 154, 107, 200, 191, 47, 213, 144, 64, 70, 37, 126, 208, 229, 39, 132, 235, 198, 56, 196, 73, + 20, 83, 14, 242, 145, 236, 43, 95, 67, 180, 236, 131, 240, 163, 223, 96, 113, 176, 248, 31, 50, 5, 218, + 166, 120, 144, 231, 119, 239, 58, 104, 147, 122, 235, 224, 57, 188, 185, 244, 42, 152, 72, 107, 221, 208, 118, + 167, 221, 146, 139, 127, 110, 196, 111, 148, 133, 74, 55, 138, 253, 27, 207, 130, 33, 243, 64, 14, 196, 162, + 200, 104, 136, 180, 159, 167, 154, 215, 81, 170, 147, 124, 30, 219, 16, 197, 108, 42, 213, 249, 195, 174, 240, + 173, 185, 220, 222, 160, 45, 125, 37, 196, 35, 43, 180, 251, 240, 50, 53, 246, 211, 191, 2, 38, 23, 164, + 185, 179, 124, 208, 230, 24, 131, 242, 46, 11, 211, 211, 152, 188, 230, 34, 80, 241, 71, 19, 96, 40, 222, + 9, 53, 107, 140, 56, 182, 170, 149, 152, 52, 19, 89, 181, 149, 35, 17, 11, 5, 142, 192, 234, 36, 206, + 149, 235, 94, 96, 37, 223, 101, 200, 213, 101, 36, 109, 86, 187, 10, 211, 60, 66, 123, 229, 224, 239, 49, + 173, 212, 5, 96, 128, 241, 234, 76, 2, 9, 170, 175, 84, 77, 130, 181, 193, 179, 63, 71, 83, 24, 105, + 149, 85, 187, 224, 170, 90, 41, 209, 194, 124, 19, 57, 198, 55, 184, 119, 206, 134, 110, 30, 38, 229, 24, + 115, 211, 73, 154, 181, 11, 191, 41, 20, 85, 12, 18, 138, 196, 162, 200, 135, 104, 27, 191, 82, 253, 154, + 123, 82, 21, 119, 196, 175, 142, 254, 49, 121, 249, 8, 31, 227, 160, 134, 217, 36, 221, 161, 153, 4, 182, + 136, 110, 208, 233, 196, 120, 226, 22, 58, 74, 150, 110, 253, 160, 182, 124, 149, 32, 49, 145, 214, 187, 121, + 61, 105, 28, 134, 176, 15, 0, 5, 211, 178, 2, 249, 64, 239, 6, 175, 247, 162, 19, 253, 199, 216, 40, + 39, 92, 8, 4, 14, 2, 130, 214, 155, 199, 186, 215, 24, 130, 215, 199, 217, 223, 158, 83, 49, 60, 79, + 61, 221, 210, 151, 194, 98, 52, 89, 39, 5, 162, 34, 194, 75, 213, 201, 53, 94, 12, 10, 184, 54, 173, + 17, 97, 51, 68, 22, 197, 77, 127, 80, 136, 76, 3, 118, 229, 60, 235, 46, 173, 61, 16, 232, 138, 162, + 148, 179, 122, 172, 238, 126, 227, 21, 5, 180, 235, 4, 78, 37, 10, 221, 129, 209, 93, 86, 198, 192, 134, + 33, 33, 107, 109, 155, 47, 184, 31, 32, 46, 194, 141, 227, 33, 150, 113, 78, 7, 93, 199, 184, 103, 55, + 107, 140, 49, 197, 227, 94, 41, 207, 115, 157, 16, 45, 124, 169, 205, 167, 170, 12, 213, 235, 211, 239, 107, + 42, 42, 9, 55, 22, 9, 91, 151, 177, 171, 23, 123, 241, 48, 77, 179, 178, 33, 105, 245, 121, 221, 143, + 77, 67, 57, 110, 230, 64, 249, 91, 3, 34, 81, 173, 223, 11, 23, 157, 211, 12, 119, 30, 156, 132, 182, + 81, 66, 208, 155, 35, 252, 138, 216, 239, 163, 187, 90, 99, 210, 154, 77, 192, 4, 220, 54, 87, 126, 142, + 185, 84, 135, 97, 9, 70, 10, 12, 177, 199, 140, 112, 107, 175, 127, 227, 136, 245, 236, 58, 132, 83, 36, + 59, 205, 163, 85, 35, 151, 113, 228, 72, 110, 197, 22, 227, 89, 185, 169, 231, 99, 132, 186, 38, 21, 65, + 52, 67, 248, 60, 83, 66, 49, 112, 91, 232, 150, 125, 111, 132, 182, 88, 185, 231, 209, 212, 240, 253, 92, + 180, 206, 173, 22, 226, 3, 58, 56, 21, 162, 95, 138, 227, 193, 219, 167, 51, 62, 109, 214, 247, 136, 218, + 199, 104, 15, 51, 184, 98, 223, 122, 179, 211, 210, 76, 9, 49, 215, 69, 69, 91, 55, 90, 131, 161, 195, + 91, 39, 176, 231, 221, 233, 128, 51, 70, 89, 88, 14, 226, 2, 174, 23, 112, 175, 56, 74, 2, 0, 109, + 25, 241, 72, 116, 170, 41, 32, 160, 228, 23, 191, 244, 186, 46, 76, 129, 205, 40, 188, 210, 15, 125, 118, + 21, 136, 101, 12, 107, 81, 1, 98, 175, 228, 68, 36, 111, 178, 118, 47, 125, 29, 201, 202, 200, 159, 216, + 50, 202, 99, 23, 55, 223, 195, 86, 45, 231, 95, 123, 191, 184, 162, 59, 105, 7, 22, 186, 55, 195, 49, + 215, 32, 186, 197, 33, 91, 204, 138, 186, 72, 145, 18, 1, 206, 47, 186, 212, 91, 97, 18, 196, 156, 240, + 172, 19, 159, 104, 254, 230, 52, 214, 130, 225, 141, 248, 114, 161, 91, 62, 54, 235, 11, 189, 67, 128, 24, + 245, 10, 68, 225, 235, 218, 169, 40, 245, 211, 212, 92, 38, 14, 210, 19, 240, 111, 15, 87, 128, 153, 201, + 19, 160, 216, 116, 175, 216, 109, 98, 209, 220, 61, 132, 252, 228, 7, 252, 234, 252, 54, 21, 166, 116, 88, + 144, 22, 188, 158, 135, 76, 102, 70, 103, 157, 122, 201, 154, 191, 143, 129, 96, 59, 244, 197, 90, 194, 103, + 242, 148, 204, 125, 199, 100, 52, 169, 149, 196, 173, 88, 235, 159, 132, 208, 106, 189, 163, 237, 164, 95, 209, + 115, 180, 164, 182, 68, 78, 26, 108, 135, 214, 153, 249, 59, 14, 66, 242, 18, 45, 119, 43, 49, 75, 235, + 250, 94, 194, 202, 244, 140, 29, 211, 77, 171, 40, 140, 154, 83, 203, 217, 68, 20, 168, 37, 101, 85, 97, + 249, 193, 168, 205, 149, 196, 127, 222, 49, 107, 254, 173, 25, 198, 151, 161, 187, 23, 68, 206, 57, 120, 156, + 223, 87, 169, 127, 77, 40, 56, 182, 145, 156, 56, 110, 199, 45, 248, 121, 216, 245, 134, 134, 4, 105, 221, + 29, 82, 1, 162, 18, 188, 59, 110, 8, 30, 213, 221, 42, 143, 125, 248, 43, 140, 102, 218, 221, 65, 213, + 39, 84, 210, 82, 82, 247, 6, 148, 196, 32, 29, 245, 196, 113, 46, 157, 64, 170, 97, 95, 155, 99, 159, + 34, 160, 92, 248, 29, 155, 218, 154, 46, 16, 16, 47, 243, 16, 124, 159, 183, 123, 196, 200, 28, 180, 188, + 12, 121, 133, 134, 225, 36, 53, 169, 165, 207, 103, 202, 82, 119, 127, 179, 184, 229, 2, 236, 5, 39, 118, + 143, 227, 213, 34, 194, 220, 152, 54, 254, 227, 230, 197, 31, 77, 113, 191, 239, 224, 115, 253, 171, 143, 3, + 61, 250, 164, 84, 136, 16, 103, 163, 163, 93, 77, 180, 64, 103, 11, 248, 141, 177, 123, 33, 185, 118, 56, + 178, 68, 138, 180, 54, 21, 124, 133, 104, 202, 217, 237, 107, 107, 244, 148, 111, 124, 67, 29, 88, 19, 189, + 19, 171, 36, 203, 155, 203, 176, 81, 156, 232, 138, 16, 233, 213, 19, 114, 253, 238, 254, 165, 192, 105, 121, + 194, 206, 244, 149, 12, 198, 125, 195, 171, 235, 38, 169, 51, 50, 43, 10, 213, 224, 236, 196, 184, 222, 131, + 221, 189, 205, 168, 59, 157, 128, 247, 25, 62, 247, 26, 66, 13, 111, 60, 162, 168, 164, 204, 185, 47, 91, + 84, 99, 207, 87, 65, 99, 227, 146, 52, 244, 122, 32, 34, 68, 248, 228, 137, 227, 133, 125, 111, 152, 120, + 176, 54, 110, 194, 250, 69, 100, 219, 243, 37, 46, 24, 204, 102, 115, 206, 29, 176, 16, 206, 71, 166, 89, + 51, 110, 205, 9, 172, 150, 40, 12, 126, 44, 52, 241, 74, 91, 147, 6, 43, 179, 83, 36, 230, 32, 154, + 5, 209, 36, 4, 5, 220, 200, 127, 41, 84, 14, 221, 219, 115, 36, 34, 68, 66, 213, 102, 172, 54, 150, + 186, 52, 1, 46, 130, 46, 143, 181, 248, 234, 113, 142, 248, 43, 149, 21, 66, 111, 41, 173, 224, 63, 62, + 179, 8, 145, 20, 128, 103, 157, 13, 66, 219, 121, 104, 67, 149, 27, 136, 83, 101, 132, 156, 26, 45, 219, + 197, 209, 228, 228, 168, 83, 156, 0, 244, 198, 145, 242, 120, 226, 10, 164, 221, 80, 167, 242, 147, 14, 38, + 52, 110, 27, 235, 175, 58, 64, 20, 71, 74, 183, 202, 171, 134, 230, 19, 9, 120, 67, 231, 10, 14, 172, + 71, 187, 198, 251, 2, 210, 11, 0, 83, 95, 249, 91, 155, 207, 183, 89, 149, 111, 62, 210, 220, 44, 152, + 6, 104, 88, 53, 3, 172, 2, 21, 91, 125, 36, 144, 235, 196, 183, 60, 251, 192, 104, 197, 117, 72, 249, + 249, 68, 71, 212, 88, 247, 69, 87, 167, 126, 227, 233, 229, 105, 240, 183, 234, 170, 67, 248, 142, 22, 91, + 117, 126, 6, 208, 52, 28, 241, 170, 206, 135, 126, 15, 80, 132, 16, 213, 118, 74, 200, 63, 57, 32, 251, + 63, 114, 224, 187, 98, 142, 39, 135, 16, 74, 20, 11, 106, 28, 50, 1, 187, 106, 220, 108, 20, 224, 155, + 150, 51, 203, 242, 61, 95, 243, 124, 211, 237, 253, 16, 135, 246, 32, 46, 227, 132, 60, 25, 212, 190, 3, + 148, 173, 233, 171, 250, 191, 47, 92, 52, 214, 139, 34, 162, 136, 42, 131, 96, 230, 36, 132, 14, 214, 138, + 101, 249, 73, 222, 141, 83, 81, 22, 22, 19, 117, 184, 176, 105, 174, 86, 103, 8, 238, 28, 39, 19, 49, + 59, 146, 128, 172, 244, 215, 86, 158, 218, 116, 52, 124, 75, 30, 102, 92, 103, 141, 68, 125, 18, 74, 220, + 154, 181, 126, 212, 6, 83, 55, 27, 129, 40, 121, 199, 20, 178, 112, 239, 99, 81, 223, 93, 35, 30, 60, + 73, 197, 243, 239, 142, 251, 21, 60, 138, 49, 216, 27, 56, 21, 49, 14, 165, 125, 38, 234, 212, 177, 252, + 102, 94, 192, 125, 223, 42, 106, 116, 50, 202, 37, 223, 33, 125, 225, 40, 145, 180, 159, 84, 81, 198, 28, + 82, 27, 241, 108, 240, 223, 132, 151, 197, 18, 178, 21, 128, 151, 32, 236, 103, 126, 36, 189, 111, 50, 9, + 49, 2, 104, 27, 166, 231, 2, 203, 79, 109, 31, 28, 182, 168, 252, 212, 28, 112, 191, 213, 73, 34, 143, + 239, 36, 94, 181, 46, 115, 186, 100, 212, 247, 45, 134, 93, 146, 65, 74, 104, 89, 117, 244, 187, 132, 103, + 33, 90, 40, 191, 166, 241, 111, 59, 64, 96, 98, 176, 196, 230, 171, 245, 94, 179, 37, 233, 200, 2, 198, + 115, 48, 184, 18, 161, 24, 242, 153, 146, 82, 178, 209, 99, 60, 183, 98, 252, 11, 95, 41, 137, 176, 203, + 14, 185, 226, 79, 227, 218, 143, 164, 177, 97, 56, 231, 251, 13, 170, 211, 167, 181, 106, 17, 220, 9, 58, + 44, 231, 47, 9, 104, 178, 143, 130, 138, 28, 163, 136, 63, 148, 230, 54, 76, 83, 97, 202, 238, 65, 218, + 126, 246, 74, 84, 114, 210, 175, 52, 153, 146, 93, 199, 173, 132, 129, 131, 190, 249, 245, 4, 226, 77, 248, + 242, 221, 34, 199, 79, 57, 39, 159, 19, 79, 251, 3, 53, 74, 84, 190, 171, 227, 95, 194, 113, 216, 145, + 145, 130, 66, 164, 161, 12, 112, 41, 133, 87, 127, 128, 220, 206, 154, 193, 46, 209, 161, 124, 73, 246, 7, + 135, 98, 72, 248, 123, 59, 25, 73, 182, 95, 97, 216, 254, 239, 207, 68, 40, 11, 138, 240, 150, 62, 7, + 164, 168, 237, 10, 151, 9, 64, 108, 172, 214, 35, 43, 178, 85, 198, 222, 41, 121, 121, 60, 233, 86, 207, + 244, 72, 19, 242, 70, 46, 21, 8, 186, 245, 110, 220, 171, 146, 41, 154, 66, 139, 28, 73, 146, 189, 15, + 199, 211, 247, 46, 0, 33, 70, 247, 229, 248, 11, 166, 196, 49, 22, 223, 9, 114, 0, 85, 192, 62, 242, + 88, 234, 45, 84, 1, 152, 154, 59, 229, 226, 131, 145, 94, 38, 210, 207, 96, 62, 242, 144, 123, 214, 5, + 44, 72, 57, 58, 17, 217, 8, 241, 131, 118, 95, 12, 157, 216, 20, 135, 30, 42, 152, 197, 131, 119, 23, + 2, 171, 231, 205, 127, 47, 21, 210, 230, 207, 220, 172, 118, 11, 244, 21, 251, 150, 195, 64, 31, 17, 86, + 83, 13, 112, 232, 40, 165, 233, 4, 73, 169, 99, 252, 142, 186, 133, 106, 27, 83, 253, 123, 210, 69, 201, + 201, 42, 129, 172, 34, 201, 27, 247, 109, 16, 84, 211, 158, 2, 217, 135, 111, 209, 9, 129, 147, 83, 92, + 243, 179, 116, 96, 175, 89, 97, 24, 225, 21, 61, 39, 129, 168, 121, 81, 206, 33, 72, 10, 7, 182, 156, + 130, 198, 199, 172, 132, 29, 32, 19, 155, 108, 34, 200, 48, 3, 45, 191, 190, 107, 229, 229, 115, 194, 91, + 244, 64, 203, 111, 175, 67, 137, 58, 134, 193, 55, 162, 134, 83, 26, 85, 191, 116, 111, 162, 97, 188, 198, + 163, 113, 209, 56, 243, 49, 148, 230, 58, 130, 160, 43, 194, 252, 95, 118, 23, 104, 254, 144, 204, 2, 228, + 100, 199, 232, 188, 178, 5, 142, 70, 44, 33, 72, 114, 252, 249, 192, 48, 237, 240, 59, 46, 154, 10, 180, + 6, 224, 33, 86, 165, 236, 217, 153, 28, 105, 54, 10, 211, 156, 38, 211, 46, 145, 220, 162, 216, 5, 239, + 248, 84, 85, 224, 179, 191, 126, 124, 191, 1, 88, 32, 31, 52, 190, 164, 216, 12, 16, 1, 37, 68, 10, + 157, 66, 176, 161, 196, 188, 55, 228, 216, 213, 132, 113, 102, 101, 100, 129, 250, 155, 157, 101, 136, 225, 35, + 60, 19, 126, 154, 176, 29, 103, 138, 239, 194, 17, 209, 80, 88, 23, 133, 167, 119, 84, 245, 36, 123, 42, + 123, 67, 48, 55, 66, 245, 91, 168, 202, 81, 16, 193, 173, 123, 66, 65, 28, 251, 226, 204, 239, 129, 243, + 93, 156, 6, 56, 98, 120, 85, 99, 133, 184, 117, 236, 41, 50, 159, 199, 249, 42, 56, 146, 80, 170, 170, + 213, 209, 141, 131, 223, 13, 231, 4, 62, 61, 20, 11, 22, 137, 138, 52, 111, 154, 113, 56, 129, 234, 51, + 225, 222, 184, 46, 89, 42, 243, 230, 44, 142, 72, 77, 10, 158, 71, 42, 169, 158, 1, 179, 191, 145, 156, + 126, 170, 75, 9, 127, 40, 183, 72, 240, 196, 74, 121, 87, 37, 130, 3, 117, 113, 30, 117, 31, 16, 179, + 105, 187, 11, 15, 82, 27, 84, 54, 44, 120, 46, 239, 191, 71, 4, 224, 9, 178, 198, 138, 8, 208, 229, + 45, 243, 77, 142, 73, 164, 161, 143, 14, 121, 28, 124, 191, 67, 248, 21, 184, 77, 58, 152, 19, 172, 58, + 214, 200, 208, 37, 59, 219, 188, 57, 225, 131, 85, 106, 5, 192, 33, 161, 162, 170, 130, 165, 116, 25, 254, + 187, 207, 127, 30, 193, 85, 132, 36, 9, 29, 59, 170, 112, 70, 208, 86, 6, 244, 59, 159, 17, 118, 2, + 157, 166, 95, 197, 22, 240, 120, 146, 183, 208, 73, 47, 164, 168, 251, 79, 182, 200, 204, 162, 85, 12, 78, + 167, 211, 111, 67, 213, 166, 55, 176, 158, 70, 219, 3, 110, 23, 14, 115, 218, 165, 115, 231, 222, 245, 33, + 200, 174, 210, 249, 29, 218, 108, 38, 215, 222, 184, 36, 92, 244, 133, 234, 239, 25, 226, 47, 33, 114, 154, + 206, 202, 190, 70, 94, 132, 163, 84, 73, 109, 34, 216, 149, 147, 185, 157, 166, 237, 149, 186, 248, 54, 206, + 216, 62, 52, 243, 11, 223, 45, 26, 169, 22, 237, 20, 4, 176, 73, 187, 130, 138, 44, 18, 191, 34, 162, + 160, 48, 140, 40, 130, 166, 171, 65, 3, 137, 83, 238, 28, 18, 244, 1, 125, 114, 60, 57, 238, 250, 94, + 136, 72, 249, 65, 0, 2, 102, 208, 197, 78, 89, 253, 148, 54, 65, 210, 121, 163, 68, 98, 184, 25, 28, + 13, 61, 167, 2, 192, 206, 59, 220, 183, 88, 145, 207, 242, 168, 101, 79, 105, 179, 98, 51, 231, 95, 64, + 99, 136, 123, 91, 115, 23, 188, 177, 170, 176, 201, 204, 124, 230, 232, 185, 35, 191, 116, 215, 122, 98, 183, + 75, 152, 236, 213, 237, 183, 29, 17, 139, 86, 136, 183, 6, 43, 206, 129, 103, 148, 92, 131, 139, 23, 187, + 35, 71, 230, 30, 29, 124, 158, 224, 29, 131, 1, 3, 79, 0, 178, 205, 87, 74, 44, 221, 25, 137, 180, + 194, 57, 65, 68, 241, 160, 200, 209, 73, 208, 223, 204, 53, 122, 237, 100, 64, 194, 53, 156, 46, 103, 214, + 239, 230, 155, 247, 251, 190, 102, 0, 245, 104, 10, 72, 202, 45, 112, 235, 32, 198, 234, 183, 205, 95, 170, + 145, 214, 10, 245, 25, 53, 74, 214, 201, 7, 240, 79, 44, 79, 149, 46, 203, 28, 40, 24, 222, 232, 70, + 10, 34, 137, 90, 50, 199, 239, 192, 72, 184, 44, 39, 31, 237, 211, 110, 118, 170, 251, 50, 219, 173, 39, + 91, 212, 108, 177, 198, 244, 155, 113, 176, 69, 210, 38, 53, 168, 89, 201, 55, 59, 98, 111, 188, 192, 224, + 231, 181, 243, 236, 71, 185, 241, 186, 3, 232, 119, 0, 187, 121, 82, 3, 115, 58, 35, 189, 44, 121, 152, + 178, 69, 199, 36, 127, 242, 31, 72, 23, 151, 15, 96, 179, 185, 154, 64, 119, 3, 119, 16, 138, 185, 181, + 218, 104, 95, 171, 0, 245, 176, 212, 35, 112, 152, 191, 162, 92, 15, 19, 37, 180, 67, 231, 225, 215, 12, + 199, 43, 14, 213, 228, 161, 119, 161, 215, 5, 125, 58, 159, 113, 28, 190, 36, 68, 24, 193, 78, 229, 125, + 229, 219, 16, 207, 78, 130, 203, 214, 110, 48, 234, 97, 181, 172, 125, 45, 74, 171, 71, 169, 154, 230, 137, + 212, 231, 172, 198, 224, 1, 127, 221, 113, 209, 42, 150, 18, 212, 175, 164, 175, 102, 24, 139, 1, 206, 148, + 252, 45, 15, 128, 55, 237, 20, 147, 175, 171, 17, 222, 50, 217, 165, 15, 15, 231, 56, 107, 221, 125, 80, + 237, 234, 89, 138, 112, 149, 250, 73, 104, 139, 93, 185, 205, 249, 248, 4, 249, 6, 152, 3, 63, 191, 113, + 46, 203, 167, 122, 9, 22, 146, 83, 107, 74, 163, 68, 41, 159, 117, 90, 84, 0, 253, 182, 32, 245, 92, + 177, 149, 148, 80, 245, 154, 84, 130, 134, 103, 215, 225, 169, 11, 112, 1, 172, 236, 69, 204, 56, 135, 244, + 169, 102, 128, 221, 19, 180, 37, 112, 193, 111, 44, 166, 79, 70, 95, 115, 189, 28, 141, 204, 195, 32, 215, + 74, 102, 59, 247, 135, 216, 216, 67, 230, 169, 71, 154, 105, 50, 49, 197, 250, 167, 32, 99, 159, 177, 131, + 167, 242, 53, 205, 2, 31, 169, 128, 9, 99, 47, 16, 141, 207, 68, 152, 102, 135, 151, 141, 178, 122, 68, + 122, 42, 152, 119, 179, 157, 134, 228, 189, 189, 239, 83, 186, 99, 119, 81, 146, 203, 98, 160, 98, 246, 1, + 71, 176, 155, 104, 150, 221, 197, 76, 150, 149, 242, 189, 68, 207, 127, 216, 30, 83, 215, 252, 124, 78, 99, + 150, 139, 224, 89, 144, 57, 7, 219, 47, 111, 185, 254, 34, 182, 107, 190, 158, 212, 98, 32, 113, 73, 52, + 111, 241, 253, 223, 168, 215, 86, 128, 160, 169, 141, 47, 1, 225, 26, 2, 90, 76, 60, 167, 129, 107, 5, + 23, 135, 75, 63, 27, 217, 3, 30, 28, 224, 73, 144, 62, 32, 232, 170, 24, 87, 97, 108, 136, 186, 56, + 48, 249, 158, 157, 145, 78, 2, 154, 4, 5, 144, 74, 253, 254, 77, 201, 67, 72, 170, 66, 230, 102, 85, + 176, 58, 62, 214, 22, 159, 78, 191, 113, 131, 165, 68, 4, 124, 72, 173, 221, 21, 170, 149, 226, 43, 165, + 52, 87, 247, 225, 179, 65, 119, 41, 145, 134, 64, 131, 117, 93, 111, 169, 90, 69, 68, 56, 162, 23, 69, + 66, 172, 135, 74, 160, 192, 130, 112, 94, 41, 143, 147, 207, 4, 253, 66, 66, 210, 113, 195, 43, 95, 4, + 33, 231, 253, 33, 139, 215, 66, 23, 27, 60, 37, 164, 116, 133, 132, 87, 106, 36, 99, 235, 202, 21, 84, + 32, 92, 76, 40, 190, 183, 254, 81, 95, 186, 155, 22, 245, 52, 114, 208, 231, 121, 187, 6, 165, 188, 4, + 155, 138, 118, 21, 2, 239, 248, 121, 104, 140, 102, 177, 86, 133, 221, 201, 147, 208, 224, 71, 167, 225, 137, + 152, 94, 117, 110, 131, 194, 172, 33, 217, 25, 189, 58, 95, 220, 8, 154, 59, 171, 188, 130, 118, 239, 248, + 35, 32, 213, 54, 238, 127, 193, 37, 221, 201, 81, 108, 104, 62, 223, 197, 150, 57, 73, 218, 54, 44, 238, + 54, 188, 153, 27, 166, 159, 146, 230, 218, 117, 42, 177, 49, 115, 24, 121, 176, 35, 245, 34, 248, 152, 219, + 23, 150, 50, 41, 134, 156, 205, 147, 183, 95, 40, 52, 246, 10, 48, 233, 205, 24, 237, 95, 225, 63, 139, + 252, 53, 254, 93, 195, 116, 201, 243, 231, 42, 192, 30, 247, 127, 84, 40, 35, 55, 132, 192, 168, 90, 228, + 49, 174, 155, 58, 14, 61, 96, 105, 198, 146, 107, 196, 61, 176, 15, 241, 27, 140, 92, 250, 163, 234, 251, + 113, 31, 136, 162, 100, 122, 186, 148, 215, 250, 207, 228, 206, 180, 186, 253, 208, 40, 220, 177, 158, 155, 188, + 160, 141, 243, 201, 94, 97, 41, 105, 225, 22, 201, 154, 75, 214, 120, 227, 25, 5, 79, 63, 216, 154, 179, + 202, 213, 39, 2, 53, 224, 22, 72, 141, 47, 168, 112, 229, 159, 97, 219, 138, 199, 219, 82, 36, 148, 175, + 196, 155, 191, 237, 251, 100, 131, 8, 246, 182, 189, 134, 181, 0, 160, 195, 158, 152, 201, 47, 184, 69, 214, + 244, 196, 120, 178, 30, 13, 46, 232, 124, 168, 100, 199, 160, 157, 0, 95, 21, 45, 231, 206, 31, 77, 149, + 208, 78, 246, 4, 40, 246, 205, 39, 153, 47, 147, 123, 106, 124, 234, 153, 146, 208, 244, 23, 99, 94, 87, + 4, 77, 72, 136, 186, 27, 10, 26, 226, 230, 246, 151, 221, 216, 74, 156, 132, 12, 192, 215, 6, 205, 76, + 98, 113, 178, 139, 38, 145, 30, 214, 223, 161, 21, 187, 143, 81, 230, 29, 193, 32, 181, 223, 9, 86, 164, + 190, 36, 148, 66, 33, 205, 31, 96, 115, 144, 154, 45, 238, 249, 234, 1, 150, 37, 120, 44, 224, 76, 224, + 232, 46, 85, 225, 242, 38, 243, 28, 127, 64, 133, 127, 176, 85, 92, 142, 17, 201, 161, 249, 145, 237, 41, + 42, 67, 97, 2, 146, 172, 17, 187, 33, 190, 33, 98, 204, 22, 30, 149, 245, 86, 223, 209, 200, 209, 106, + 173, 55, 113, 221, 145, 254, 101, 54, 174, 19, 240, 14, 42, 80, 12, 18, 237, 127, 72, 42, 39, 209, 239, + 206, 106, 85, 99, 98, 160, 211, 89, 177, 117, 247, 67, 183, 20, 171, 210, 1, 2, 36, 228, 250, 61, 93, + 17, 193, 132, 124, 126, 164, 113, 84, 15, 30, 81, 50, 44, 223, 146, 2, 130, 176, 205, 254, 28, 233, 230, + 56, 189, 228, 23, 246, 97, 102, 120, 254, 238, 102, 177, 97, 167, 26, 158, 49, 126, 81, 154, 214, 56, 191, + 101, 200, 174, 238, 91, 189, 135, 82, 141, 199, 68, 128, 220, 87, 57, 142, 1, 57, 84, 240, 84, 45, 143, + 147, 170, 52, 131, 77, 116, 117, 177, 206, 191, 125, 124, 69, 92, 23, 65, 10, 196, 192, 138, 232, 115, 133, + 199, 62, 122, 206, 243, 72, 161, 219, 232, 82, 248, 203, 60, 144, 176, 138, 93, 154, 76, 233, 127, 129, 38, + 223, 47, 75, 226, 81, 14, 210, 150, 113, 232, 141, 188, 135, 229, 185, 196, 72, 203, 230, 88, 38, 167, 10, + 14, 55, 101, 74, 19, 213, 37, 226, 92, 103, 159, 85, 36, 190, 30, 104, 119, 64, 237, 178, 34, 17, 69, + 185, 88, 34, 150, 154, 39, 127, 86, 195, 151, 142, 108, 233, 53, 163, 148, 97, 246, 124, 82, 78, 72, 14, + 210, 239, 107, 133, 79, 192, 75, 37, 148, 163, 46, 50, 139, 32, 38, 178, 177, 151, 133, 51, 24, 100, 17, + 217, 219, 175, 251, 63, 70, 36, 28, 92, 144, 227, 219, 168, 7, 251, 130, 73, 81, 14, 79, 211, 155, 100, + 97, 216, 200, 121, 93, 31, 156, 8, 154, 210, 245, 216, 24, 94, 11, 31, 202, 221, 136, 45, 167, 14, 123, + 190, 194, 62, 191, 88, 254, 19, 104, 183, 41, 186, 215, 68, 228, 179, 78, 100, 115, 36, 167, 192, 215, 218, + 155, 249, 182, 98, 163, 176, 82, 104, 63, 181, 94, 51, 7, 236, 103, 104, 208, 91, 3, 48, 98, 250, 71, + 73, 119, 93, 113, 131, 16, 75, 197, 245, 51, 196, 104, 241, 219, 63, 201, 172, 12, 234, 202, 80, 6, 158, + 117, 194, 120, 99, 13, 133, 104, 23, 162, 32, 137, 201, 123, 7, 239, 2, 253, 2, 219, 34, 14, 171, 254, + 175, 11, 40, 88, 5, 221, 147, 7, 223, 83, 185, 25, 178, 165, 170, 108, 68, 112, 123, 54, 241, 49, 93, + 176, 100, 91, 180, 102, 0, 162, 191, 239, 111, 191, 165, 239, 119, 69, 187, 44, 200, 201, 60, 51, 132, 165, + 46, 23, 138, 165, 188, 170, 112, 28, 9, 166, 14, 120, 19, 162, 247, 150, 92, 65, 147, 88, 133, 177, 194, + 37, 120, 230, 247, 210, 21, 40, 56, 215, 26, 48, 66, 220, 169, 190, 137, 161, 84, 84, 215, 222, 91, 21, + 162, 190, 139, 6, 132, 87, 49, 92, 209, 97, 103, 211, 73, 48, 250, 55, 40, 63, 94, 254, 103, 191, 65, + 159, 19, 19, 68, 157, 181, 29, 153, 133, 245, 73, 241, 131, 73, 165, 82, 41, 22, 30, 211, 138, 115, 13, + 144, 30, 241, 16, 3, 82, 73, 18, 154, 246, 115, 196, 47, 139, 20, 99, 210, 94, 247, 202, 109, 57, 106, + 28, 55, 15, 245, 242, 241, 136, 56, 148, 254, 249, 153, 202, 132, 174, 233, 213, 212, 237, 10, 78, 218, 137, + 141, 227, 187, 75, 54, 167, 60, 60, 170, 32, 210, 219, 242, 149, 110, 60, 155, 0, 113, 16, 248, 233, 194, + 145, 49, 5, 232, 225, 81, 206, 164, 81, 226, 181, 164, 180, 210, 99, 230, 34, 99, 218, 169, 107, 109, 190, + 59, 117, 245, 5, 179, 139, 137, 94, 145, 202, 90, 194, 69, 105, 2, 170, 108, 108, 203, 50, 17, 222, 71, + 74, 26, 98, 183, 133, 109, 103, 4, 246, 241, 199, 25, 90, 197, 14, 140, 139, 95, 192, 230, 130, 81, 126, + 117, 136, 248, 18, 13, 238, 105, 237, 237, 222, 33, 101, 192, 45, 141, 203, 30, 42, 134, 75, 193, 5, 45, + 63, 37, 137, 254, 102, 80, 144, 156, 48, 213, 163, 214, 30, 121, 178, 187, 120, 153, 98, 157, 32, 250, 196, + 207, 55, 247, 101, 191, 120, 58, 150, 226, 156, 57, 42, 169, 49, 49, 74, 193, 188, 184, 7, 201, 192, 245, + 170, 79, 190, 230, 44, 33, 28, 244, 21, 7, 250, 169, 53, 180, 29, 193, 33, 199, 118, 140, 161, 189, 198, + 70, 89, 190, 178, 42, 173, 112, 61, 87, 137, 74, 162, 217, 166, 108, 70, 64, 212, 239, 61, 163, 77, 19, + 42, 190, 87, 37, 210, 134, 40, 207, 151, 248, 66, 22, 227, 56, 39, 62, 30, 238, 164, 43, 124, 92, 106, + 90, 146, 55, 16, 86, 155, 153, 115, 79, 136, 197, 38, 26, 36, 152, 157, 77, 237, 42, 162, 208, 220, 220, + 105, 227, 79, 100, 25, 238, 183, 227, 147, 74, 201, 242, 108, 242, 19, 143, 152, 82, 73, 203, 139, 39, 219, + 98, 140, 173, 126, 129, 123, 179, 0, 98, 44, 123, 158, 153, 214, 212, 15, 1, 161, 203, 21, 203, 223, 152, + 53, 213, 54, 155, 166, 203, 126, 130, 147, 66, 13, 165, 29, 167, 75, 222, 182, 215, 100, 69, 39, 129, 135, + 229, 31, 147, 239, 230, 97, 83, 102, 236, 242, 108, 106, 132, 230, 106, 73, 88, 9, 8, 142, 196, 79, 194, + 222, 205, 45, 212, 241, 125, 221, 40, 57, 24, 184, 246, 178, 101, 244, 153, 225, 219, 179, 17, 47, 184, 3, + 78, 85, 151, 206, 33, 251, 109, 159, 155, 138, 161, 190, 201, 194, 167, 2, 172, 6, 239, 23, 238, 93, 80, + 101, 33, 66, 169, 91, 62, 200, 53, 237, 60, 13, 153, 239, 113, 130, 89, 39, 31, 135, 171, 216, 15, 27, + 52, 211, 39, 1, 11, 241, 142, 221, 159, 4, 194, 151, 228, 55, 181, 130, 78, 168, 15, 130, 99, 119, 69, + 88, 142, 225, 98, 39, 249, 110, 85, 163, 119, 29, 108, 180, 172, 56, 9, 164, 50, 189, 149, 129, 200, 50, + 67, 237, 172, 47, 139, 53, 67, 24, 20, 88, 35, 51, 190, 64, 69, 209, 169, 193, 10, 219, 226, 151, 160, + 45, 6, 5, 242, 3, 148, 84, 167, 80, 96, 16, 133, 195, 185, 179, 198, 100, 21, 179, 167, 158, 113, 86, + 248, 192, 17, 141, 110, 77, 173, 168, 91, 221, 16, 73, 2, 90, 27, 214, 100, 239, 213, 83, 175, 175, 59, + 63, 60, 98, 133, 120, 201, 179, 100, 171, 143, 140, 62, 140, 10, 229, 36, 52, 56, 125, 13, 239, 24, 177, + 158, 77, 65, 137, 237, 225, 237, 25, 6, 148, 1, 154, 202, 226, 90, 81, 139, 112, 229, 112, 41, 43, 54, + 238, 147, 20, 221, 152, 198, 142, 209, 249, 54, 145, 25, 15, 229, 80, 174, 27, 139, 76, 173, 20, 86, 192, + 63, 159, 160, 117, 168, 168, 236, 181, 107, 44, 203, 195, 251, 185, 18, 249, 154, 192, 18, 152, 248, 80, 93, + 42, 51, 30, 90, 239, 24, 210, 237, 249, 55, 102, 79, 23, 202, 123, 196, 73, 171, 186, 248, 173, 113, 207, + 46, 67, 41, 221, 218, 20, 137, 104, 109, 73, 193, 8, 140, 99, 34, 98, 221, 74, 224, 246, 221, 48, 130, + 9, 53, 56, 16, 109, 210, 247, 9, 124, 7, 219, 218, 225, 138, 177, 44, 46, 4, 30, 236, 101, 227, 41, + 18, 115, 21, 115, 18, 75, 254, 54, 171, 228, 76, 2, 50, 39, 16, 56, 82, 74, 210, 121, 39, 92, 209, + 112, 121, 13, 47, 134, 244, 65, 120, 86, 172, 27, 155, 115, 22, 2, 71, 131, 78, 108, 192, 246, 254, 237, + 74, 22, 37, 158, 31, 81, 203, 127, 146, 194, 210, 148, 158, 78, 231, 170, 122, 57, 178, 112, 253, 75, 228, + 218, 90, 192, 82, 134, 238, 220, 182, 90, 219, 169, 154, 113, 33, 240, 183, 147, 174, 140, 5, 226, 182, 104, + 148, 227, 188, 58, 9, 76, 23, 137, 245, 156, 113, 50, 25, 14, 188, 78, 69, 70, 172, 25, 37, 239, 90, + 204, 142, 127, 97, 254, 172, 106, 222, 165, 158, 226, 212, 160, 137, 23, 140, 234, 157, 201, 126, 160, 243, 229, + 55, 243, 153, 216, 229, 130, 75, 71, 69, 217, 85, 53, 217, 175, 244, 1, 201, 169, 97, 187, 234, 42, 200, + 13, 103, 80, 100, 61, 19, 222, 235, 43, 44, 97, 244, 205, 57, 132, 240, 92, 12, 102, 171, 85, 160, 48, + 94, 232, 29, 106, 129, 170, 160, 225, 90, 95, 89, 25, 248, 45, 101, 62, 207, 219, 97, 119, 27, 84, 119, + 158, 158, 4, 127, 6, 252, 187, 80, 59, 37, 217, 182, 235, 27, 75, 11, 25, 109, 33, 32, 232, 34, 193, + 80, 49, 207, 87, 77, 227, 163, 7, 80, 85, 185, 30, 46, 104, 229, 84, 65, 128, 217, 179, 129, 83, 254, + 149, 53, 139, 242, 157, 180, 190, 151, 43, 192, 109, 125, 234, 239, 196, 19, 100, 213, 154, 147, 120, 129, 184, + 132, 161, 117, 209, 205, 216, 80, 1, 159, 233, 2, 65, 99, 171, 117, 209, 84, 177, 203, 47, 58, 216, 209, + 198, 161, 72, 93, 69, 6, 121, 242, 116, 61, 133, 98, 126, 31, 24, 124, 215, 136, 4, 155, 210, 39, 158, + 216, 145, 73, 100, 177, 125, 92, 232, 219, 147, 69, 38, 43, 110, 122, 88, 101, 141, 51, 73, 49, 117, 35, + 49, 12, 115, 98, 112, 35, 231, 32, 89, 28, 157, 202, 188, 90, 1, 176, 121, 71, 239, 160, 176, 178, 64, + 168, 101, 223, 133, 4, 10, 211, 109, 29, 86, 241, 248, 38, 60, 48, 128, 106, 212, 74, 179, 245, 229, 99, + 23, 200, 232, 43, 139, 228, 37, 1, 199, 101, 222, 11, 127, 172, 54, 58, 62, 64, 32, 81, 24, 1, 163, + 168, 24, 102, 72, 134, 37, 56, 86, 27, 98, 94, 45, 151, 153, 18, 104, 108, 16, 251, 73, 174, 85, 55, + 188, 242, 216, 83, 27, 110, 136, 198, 180, 181, 107, 86, 138, 122, 89, 79, 99, 12, 102, 100, 94, 167, 119, + 8, 26, 254, 232, 157, 87, 76, 240, 155, 48, 47, 159, 233, 27, 193, 38, 188, 220, 194, 25, 33, 181, 120, + 206, 214, 244, 106, 179, 207, 4, 137, 178, 202, 205, 46, 196, 93, 130, 183, 242, 21, 158, 59, 66, 49, 55, + 182, 38, 180, 71, 60, 218, 77, 241, 15, 238, 8, 132, 243, 160, 202, 12, 26, 179, 169, 250, 224, 105, 233, + 124, 24, 155, 102, 254, 200, 165, 4, 210, 177, 156, 153, 70, 151, 251, 174, 215, 28, 215, 181, 143, 75, 8, + 123, 245, 228, 228, 190, 140, 228, 72, 115, 38, 251, 172, 195, 68, 79, 105, 12, 198, 54, 168, 120, 83, 61, + 146, 239, 73, 97, 239, 29, 88, 147, 254, 7, 144, 221, 101, 197, 239, 229, 98, 205, 135, 67, 9, 106, 81, + 187, 186, 163, 199, 144, 124, 73, 64, 239, 123, 181, 184, 205, 186, 93, 180, 197, 27, 53, 156, 7, 40, 4, + 151, 101, 26, 36, 201, 42, 174, 140, 251, 220, 73, 163, 195, 24, 86, 206, 0, 48, 79, 23, 223, 72, 102, + 181, 85, 132, 141, 24, 112, 150, 72, 217, 127, 204, 111, 73, 189, 140, 190, 80, 216, 36, 205, 131, 43, 116, + 251, 57, 166, 54, 37, 94, 230, 157, 119, 37, 166, 87, 93, 31, 166, 60, 37, 191, 136, 101, 240, 246, 24, + 75, 160, 31, 215, 173, 175, 101, 155, 85, 106, 201, 147, 39, 244, 100, 55, 250, 144, 165, 246, 175, 229, 114, + 56, 77, 175, 121, 191, 161, 242, 158, 45, 135, 121, 33, 227, 69, 195, 108, 225, 27, 18, 165, 221, 193, 253, + 153, 119, 79, 191, 222, 190, 8, 231, 95, 126, 151, 34, 125, 138, 42, 67, 32, 65, 128, 37, 157, 12, 108, + 106, 250, 130, 98, 145, 155, 25, 98, 116, 115, 111, 38, 114, 143, 125, 225, 33, 252, 159, 251, 189, 88, 133, + 117, 218, 98, 178, 184, 108, 194, 56, 105, 112, 47, 58, 36, 139, 120, 83, 9, 150, 10, 69, 224, 228, 192, + 68, 9, 198, 150, 170, 129, 249, 91, 214, 143, 181, 91, 75, 74, 176, 67, 207, 148, 21, 173, 239, 189, 52, + 218, 216, 9, 211, 105, 138, 142, 114, 118, 172, 194, 21, 43, 176, 207, 170, 141, 111, 137, 74, 187, 216, 69, + 141, 49, 38, 232, 44, 2, 6, 159, 128, 229, 33, 40, 66, 137, 212, 116, 158, 86, 21, 92, 26, 72, 37, + 61, 239, 131, 55, 84, 228, 12, 160, 210, 244, 163, 148, 114, 208, 184, 213, 62, 57, 14, 65, 149, 155, 244, + 81, 59, 227, 98, 113, 80, 93, 247, 68, 193, 50, 232, 234, 245, 155, 233, 206, 51, 158, 106, 197, 37, 116, + 114, 50, 192, 180, 149, 14, 7, 44, 251, 209, 165, 72, 222, 115, 17, 43, 32, 253, 138, 20, 136, 241, 171, + 220, 199, 86, 172, 59, 143, 140, 123, 222, 145, 179, 145, 111, 100, 136, 95, 40, 169, 18, 202, 0, 72, 246, + 68, 194, 75, 23, 100, 71, 237, 106, 244, 115, 116, 6, 32, 31, 60, 154, 78, 24, 92, 238, 241, 204, 106, + 75, 64, 166, 251, 176, 6, 67, 223, 129, 171, 242, 16, 94, 165, 17, 178, 95, 136, 76, 202, 136, 218, 95, + 173, 211, 38, 11, 156, 40, 32, 142, 158, 150, 117, 173, 218, 82, 137, 120, 187, 222, 14, 196, 114, 253, 99, + 157, 111, 65, 139, 198, 150, 38, 60, 26, 105, 70, 58, 74, 177, 157, 15, 0, 47, 76, 119, 30, 89, 167, + 206, 252, 177, 136, 215, 37, 151, 41, 1, 146, 79, 84, 211, 121, 105, 194, 128, 65, 43, 78, 152, 104, 223, + 121, 12, 203, 158, 7, 218, 126, 79, 124, 101, 215, 152, 20, 243, 137, 89, 219, 155, 46, 135, 154, 218, 122, + 5, 43, 151, 59, 106, 210, 33, 9, 140, 126, 90, 19, 139, 138, 114, 47, 13, 184, 31, 95, 225, 251, 248, + 55, 142, 41, 39, 246, 125, 175, 249, 22, 187, 85, 5, 13, 140, 54, 95, 149, 228, 152, 74, 208, 27, 88, + 11, 254, 206, 164, 169, 55, 80, 75, 57, 23, 80, 192, 250, 43, 32, 58, 74, 252, 46, 208, 53, 167, 125, + 176, 169, 148, 117, 70, 203, 80, 58, 223, 229, 154, 183, 11, 8, 253, 238, 53, 192, 92, 19, 51, 171, 213, + 79, 210, 206, 66, 187, 242, 124, 97, 219, 44, 153, 221, 103, 239, 121, 75, 20, 45, 245, 183, 198, 29, 42, + 131, 248, 215, 227, 10, 122, 117, 98, 54, 130, 4, 40, 147, 52, 52, 3, 228, 160, 167, 201, 47, 155, 254, + 15, 122, 96, 194, 127, 244, 209, 82, 210, 246, 207, 114, 190, 54, 147, 95, 193, 219, 208, 166, 13, 142, 64, + 181, 69, 162, 65, 145, 87, 102, 180, 90, 137, 156, 138, 118, 215, 18, 215, 92, 234, 253, 129, 226, 226, 186, + 176, 142, 42, 49, 154, 66, 246, 7, 202, 140, 245, 16, 21, 246, 53, 67, 234, 58, 251, 155, 142, 115, 190, + 184, 132, 192, 159, 104, 99, 249, 228, 147, 147, 90, 130, 247, 0, 8, 114, 67, 187, 50, 51, 8, 44, 204, + 106, 185, 136, 102, 208, 240, 235, 23, 79, 171, 194, 96, 5, 160, 250, 29, 182, 252, 107, 184, 15, 108, 184, + 237, 208, 247, 131, 31, 184, 97, 126, 57, 235, 132, 64, 183, 108, 146, 104, 135, 17, 160, 193, 104, 16, 120, + 94, 101, 0, 182, 178, 138, 86, 56, 110, 26, 112, 69, 222, 234, 253, 27, 118, 130, 101, 251, 84, 142, 173, + 214, 204, 172, 47, 110, 115, 25, 53, 21, 138, 253, 195, 249, 164, 80, 211, 189, 151, 193, 199, 95, 109, 248, + 241, 21, 192, 146, 32, 226, 101, 133, 75, 153, 159, 102, 114, 6, 159, 153, 127, 96, 91, 206, 130, 153, 167, + 129, 193, 238, 100, 81, 105, 36, 50, 236, 134, 86, 188, 127, 170, 30, 72, 8, 65, 155, 2, 15, 128, 237, + 203, 75, 155, 184, 52, 145, 228, 192, 218, 212, 200, 141, 126, 252, 124, 248, 177, 236, 128, 158, 245, 32, 213, + 24, 199, 217, 223, 252, 235, 82, 26, 31, 19, 134, 222, 31, 208, 180, 53, 218, 19, 201, 247, 221, 73, 173, + 132, 142, 66, 26, 230, 127, 106, 12, 36, 249, 75, 142, 166, 34, 144, 133, 59, 148, 45, 122, 155, 4, 129, + 189, 16, 121, 197, 1, 164, 111, 12, 182, 164, 100, 167, 183, 15, 167, 135, 28, 189, 198, 168, 54, 112, 51, + 5, 248, 147, 85, 130, 161, 153, 33, 107, 64, 81, 71, 119, 93, 29, 14, 59, 207, 18, 196, 99, 207, 141, + 243, 152, 207, 179, 69, 222, 32, 188, 187, 253, 67, 13, 116, 99, 16, 235, 195, 176, 143, 21, 120, 109, 132, + 208, 64, 201, 227, 84, 191, 163, 40, 112, 207, 2, 39, 98, 172, 115, 185, 9, 106, 194, 181, 245, 3, 133, + 145, 24, 192, 124, 105, 214, 44, 114, 78, 160, 254, 156, 153, 242, 143, 211, 199, 192, 221, 187, 227, 56, 246, + 201, 198, 157, 32, 141, 184, 167, 194, 1, 142, 173, 57, 149, 246, 121, 84, 200, 237, 153, 158, 157, 20, 10, + 108, 36, 185, 192, 199, 218, 185, 130, 76, 207, 126, 135, 46, 219, 45, 3, 89, 51, 73, 180, 211, 27, 99, + 99, 252, 30, 250, 12, 50, 145, 145, 252, 87, 198, 40, 188, 89, 126, 224, 125, 139, 81, 18, 76, 123, 82, + 188, 94, 60, 243, 21, 138, 148, 36, 252, 8, 78, 188, 186, 204, 190, 161, 193, 27, 223, 124, 223, 81, 202, + 55, 229, 212, 62, 32, 248, 254, 225, 152, 195, 113, 185, 165, 164, 117, 251, 189, 134, 89, 9, 231, 4, 217, + 54, 194, 89, 236, 194, 34, 144, 169, 171, 31, 23, 52, 220, 79, 236, 179, 190, 1, 63, 142, 34, 55, 203, + 138, 12, 17, 86, 137, 152, 170, 178, 102, 182, 9, 186, 79, 195, 44, 189, 49, 181, 118, 118, 133, 155, 92, + 182, 157, 79, 34, 155, 77, 26, 2, 224, 173, 128, 87, 167, 134, 212, 193, 106, 111, 143, 9, 145, 189, 2, + 147, 209, 80, 225, 247, 67, 9, 92, 73, 252, 129, 162, 176, 171, 204, 168, 115, 253, 173, 128, 217, 151, 211, + 49, 51, 200, 237, 229, 59, 28, 149, 234, 188, 127, 20, 6, 54, 179, 224, 73, 64, 9, 49, 200, 112, 43, + 40, 144, 203, 183, 186, 40, 109, 185, 9, 212, 200, 67, 238, 3, 174, 58, 129, 89, 110, 182, 120, 169, 52, + 173, 23, 157, 8, 83, 9, 28, 161, 223, 158, 1, 134, 169, 121, 2, 173, 178, 8, 220, 195, 139, 164, 57, + 70, 190, 236, 85, 49, 121, 231, 19, 92, 160, 11, 213, 170, 60, 102, 77, 126, 51, 218, 204, 109, 46, 118, + 157, 7, 189, 21, 2, 238, 133, 216, 101, 117, 83, 92, 104, 52, 183, 220, 213, 72, 154, 225, 160, 76, 18, + 28, 35, 157, 19, 2, 32, 91, 228, 250, 11, 187, 81, 96, 57, 47, 156, 51, 43, 12, 193, 26, 26, 162, + 197, 153, 67, 100, 112, 132, 88, 117, 218, 122, 183, 197, 98, 104, 235, 21, 114, 128, 238, 15, 229, 244, 52, + 78, 122, 180, 115, 13, 54, 80, 160, 14, 92, 199, 138, 95, 98, 58, 148, 173, 72, 3, 86, 56, 218, 244, + 63, 60, 26, 233, 129, 6, 54, 153, 109, 54, 18, 184, 207, 232, 63, 157, 200, 22, 163, 24, 18, 161, 206, + 143, 1, 154, 52, 12, 212, 238, 26, 138, 7, 211, 112, 147, 138, 47, 54, 211, 169, 210, 43, 146, 34, 9, + 123, 130, 206, 161, 211, 221, 101, 81, 103, 220, 250, 182, 34, 160, 222, 134, 2, 113, 123, 70, 213, 82, 75, + 42, 228, 184, 49, 174, 60, 223, 243, 28, 66, 176, 10, 92, 195, 227, 124, 249, 105, 53, 93, 153, 173, 164, + 244, 162, 59, 242, 251, 93, 11, 199, 50, 165, 18, 49, 82, 19, 223, 113, 100, 92, 185, 108, 222, 93, 9, + 189, 161, 254, 61, 235, 125, 52, 149, 173, 42, 25, 193, 65, 30, 154, 217, 164, 170, 158, 149, 216, 43, 88, + 179, 16, 254, 220, 119, 70, 243, 40, 221, 129, 124, 110, 187, 251, 149, 56, 142, 216, 44, 248, 75, 113, 112, + 106, 225, 150, 207, 137, 15, 88, 33, 30, 234, 252, 106, 92, 166, 103, 113, 89, 187, 199, 191, 167, 152, 181, + 140, 86, 118, 242, 118, 97, 182, 227, 129, 62, 106, 129, 126, 173, 100, 253, 47, 140, 204, 148, 134, 31, 179, + 75, 238, 188, 227, 217, 212, 176, 127, 251, 69, 68, 220, 187, 230, 25, 35, 175, 233, 15, 132, 128, 85, 192, + 94, 92, 191, 222, 151, 153, 243, 113, 96, 70, 43, 178, 223, 172, 147, 27, 135, 198, 72, 143, 144, 251, 47, + 249, 251, 214, 243, 36, 148, 241, 151, 47, 88, 5, 115, 164, 87, 69, 215, 226, 51, 238, 222, 243, 244, 193, + 84, 238, 41, 254, 55, 36, 196, 6, 65, 148, 14, 232, 237, 47, 216, 233, 129, 88, 245, 157, 2, 55, 181, + 121, 32, 6, 99, 145, 209, 239, 131, 16, 237, 50, 33, 241, 103, 143, 135, 10, 36, 34, 164, 87, 191, 138, + 115, 92, 158, 170, 206, 202, 79, 47, 226, 129, 0, 67, 39, 72, 121, 42, 133, 155, 16, 149, 31, 112, 197, + 169, 59, 218, 68, 39, 199, 84, 35, 147, 17, 76, 208, 33, 37, 167, 167, 92, 17, 67, 210, 251, 216, 150, + 108, 252, 4, 108, 184, 124, 179, 135, 62, 140, 177, 1, 1, 25, 239, 73, 253, 83, 18, 70, 63, 48, 10, + 153, 137, 241, 19, 210, 65, 86, 65, 217, 190, 104, 55, 144, 98, 169, 55, 223, 82, 230, 158, 211, 47, 213, + 228, 111, 66, 7, 104, 91, 222, 242, 10, 198, 112, 18, 194, 145, 3, 123, 16, 195, 76, 100, 17, 7, 176, + 105, 178, 158, 92, 186, 159, 186, 217, 225, 36, 134, 156, 16, 9, 142, 147, 76, 210, 120, 162, 106, 67, 218, + 11, 134, 43, 174, 214, 173, 138, 210, 93, 129, 70, 238, 162, 87, 27, 137, 189, 204, 106, 231, 61, 40, 49, + 44, 143, 10, 75, 7, 225, 167, 150, 126, 251, 224, 171, 99, 161, 98, 21, 96, 224, 179, 38, 172, 188, 180, + 40, 196, 63, 76, 119, 76, 128, 56, 76, 122, 119, 97, 149, 16, 179, 116, 246, 147, 165, 157, 177, 86, 237, + 130, 95, 139, 30, 196, 37, 128, 53, 184, 248, 97, 4, 146, 74, 251, 56, 220, 118, 79, 154, 186, 223, 33, + 130, 179, 88, 102, 178, 149, 203, 119, 160, 220, 129, 137, 37, 215, 43, 24, 228, 62, 245, 90, 187, 98, 89, + 188, 72, 91, 64, 106, 9, 144, 32, 14, 212, 143, 124, 246, 236, 169, 5, 3, 251, 224, 122, 135, 202, 186, + 42, 116, 188, 86, 36, 0, 240, 244, 23, 27, 209, 229, 235, 224, 25, 215, 119, 131, 226, 44, 197, 132, 99, + 26, 48, 38, 217, 16, 231, 32, 223, 239, 103, 74, 29, 198, 90, 159, 191, 103, 186, 116, 217, 191, 92, 74, + 222, 158, 217, 133, 46, 0, 253, 182, 166, 187, 205, 216, 168, 48, 157, 69, 189, 135, 145, 152, 118, 181, 43, + 118, 184, 175, 140, 94, 253, 16, 95, 11, 210, 182, 48, 112, 62, 101, 167, 169, 73, 105, 240, 61, 203, 74, + 25, 212, 169, 78, 76, 113, 8, 201, 183, 80, 207, 63, 164, 100, 116, 236, 221, 193, 85, 117, 36, 75, 179, + 67, 28, 220, 219, 24, 97, 119, 102, 68, 100, 119, 74, 152, 85, 101, 230, 254, 105, 162, 6, 174, 186, 37, + 233, 161, 121, 8, 126, 24, 235, 243, 55, 229, 111, 51, 129, 201, 148, 57, 128, 6, 53, 58, 28, 243, 197, + 221, 130, 232, 6, 22, 248, 203, 64, 183, 248, 212, 87, 72, 82, 200, 201, 87, 77, 51, 164, 149, 179, 51, + 99, 243, 9, 94, 208, 64, 247, 84, 124, 191, 131, 148, 196, 151, 187, 72, 151, 116, 38, 164, 174, 188, 160, + 209, 18, 61, 183, 200, 100, 226, 181, 119, 71, 87, 170, 242, 33, 35, 52, 115, 63, 137, 57, 4, 20, 127, + 250, 246, 53, 185, 202, 59, 16, 229, 159, 106, 208, 6, 8, 33, 182, 151, 36, 222, 243, 113, 20, 150, 103, + 175, 121, 54, 31, 156, 16, 205, 25, 167, 254, 139, 148, 85, 228, 90, 165, 198, 110, 80, 61, 95, 83, 136, + 188, 176, 128, 22, 118, 138, 30, 180, 252, 167, 238, 86, 147, 218, 169, 171, 45, 20, 187, 67, 239, 5, 113, + 107, 79, 182, 252, 49, 244, 175, 169, 37, 102, 178, 5, 80, 130, 5, 202, 106, 202, 40, 66, 68, 72, 214, + 189, 46, 223, 100, 70, 132, 160, 69, 61, 154, 116, 140, 177, 173, 246, 40, 159, 212, 93, 169, 79, 231, 55, + 129, 221, 81, 204, 96, 3, 61, 6, 211, 49, 236, 85, 142, 157, 105, 70, 138, 148, 38, 53, 187, 73, 60, + 212, 54, 19, 91, 113, 66, 242, 33, 60, 55, 243, 76, 219, 210, 147, 191, 88, 146, 117, 250, 27, 212, 219, + 192, 53, 70, 104, 127, 111, 217, 22, 131, 176, 45, 152, 90, 188, 120, 150, 57, 99, 71, 172, 51, 22, 159, + 164, 73, 229, 203, 116, 250, 56, 129, 24, 75, 191, 24, 209, 179, 217, 208, 72, 40, 129, 75, 225, 229, 166, + 182, 182, 185, 129, 173, 79, 200, 54, 152, 135, 5, 32, 157, 58, 89, 68, 172, 164, 101, 43, 36, 134, 126, + 144, 161, 90, 197, 248, 143, 18, 144, 148, 220, 33, 126, 182, 222, 124, 121, 218, 121, 129, 36, 190, 141, 49, + 166, 13, 95, 223, 69, 22, 171, 48, 18, 83, 211, 194, 199, 58, 54, 108, 168, 75, 235, 19, 176, 98, 32, + 51, 123, 41, 195, 197, 48, 196, 211, 249, 151, 136, 175, 221, 184, 121, 85, 64, 235, 10, 202, 166, 51, 203, + 229, 215, 189, 179, 108, 114, 207, 191, 164, 114, 114, 20, 174, 200, 138, 247, 104, 52, 47, 199, 45, 211, 126, + 219, 103, 93, 36, 112, 130, 73, 142, 177, 225, 110, 175, 113, 220, 151, 181, 146, 55, 48, 127, 237, 148, 113, + 80, 69, 51, 52, 232, 118, 247, 97, 110, 93, 180, 98, 214, 126, 0, 127, 75, 160, 153, 248, 246, 85, 57, + 73, 127, 38, 99, 2, 141, 151, 29, 120, 147, 158, 155, 240, 182, 101, 228, 23, 242, 96, 155, 29, 210, 181, + 33, 108, 175, 165, 146, 34, 131, 165, 196, 150, 11, 187, 151, 223, 35, 187, 179, 80, 118, 18, 64, 153, 78, + 150, 84, 94, 145, 165, 62, 84, 165, 223, 97, 85, 53, 70, 19, 246, 149, 163, 88, 213, 249, 191, 160, 97, + 2, 57, 98, 103, 143, 174, 219, 245, 147, 41, 101, 249, 119, 199, 20, 42, 94, 11, 176, 136, 208, 253, 212, + 65, 102, 208, 82, 89, 9, 34, 60, 88, 105, 52, 146, 0, 138, 156, 141, 57, 73, 234, 39, 250, 96, 207, + 56, 56, 148, 67, 160, 48, 254, 250, 28, 105, 37, 232, 27, 8, 202, 3, 111, 194, 126, 15, 79, 197, 135, + 24, 43, 104, 93, 32, 22, 254, 146, 245, 159, 56, 99, 32, 199, 28, 142, 115, 176, 16, 75, 10, 151, 93, + 124, 193, 170, 125, 193, 198, 47, 122, 108, 53, 209, 137, 74, 141, 145, 165, 196, 47, 61, 161, 177, 192, 21, + 20, 144, 98, 215, 26, 111, 28, 140, 234, 117, 155, 164, 238, 124, 86, 180, 200, 228, 208, 97, 173, 52, 149, + 254, 204, 38, 164, 219, 112, 222, 190, 238, 69, 20, 229, 226, 129, 54, 39, 249, 53, 168, 173, 102, 68, 110, + 155, 203, 33, 51, 251, 136, 162, 160, 247, 187, 212, 41, 57, 15, 30, 204, 182, 156, 16, 117, 209, 140, 43, + 250, 124, 170, 20, 82, 61, 224, 132, 112, 199, 246, 116, 197, 171, 220, 50, 250, 83, 159, 244, 141, 204, 42, + 213, 134, 233, 194, 170, 111, 242, 159, 133, 120, 55, 5, 222, 130, 103, 77, 73, 124, 116, 146, 183, 233, 207, + 245, 25, 193, 220, 249, 89, 202, 160, 225, 123, 243, 111, 126, 227, 108, 118, 115, 33, 145, 13, 245, 226, 78, + 254, 4, 65, 58, 38, 67, 229, 246, 222, 155, 62, 46, 151, 111, 238, 213, 132, 214, 101, 22, 63, 121, 177, + 22, 89, 197, 235, 89, 36, 147, 135, 205, 183, 211, 46, 16, 152, 212, 133, 163, 36, 119, 108, 120, 33, 113, + 14, 103, 53, 113, 166, 229, 94, 221, 221, 135, 166, 222, 182, 129, 133, 127, 165, 68, 51, 111, 182, 253, 68, + 19, 172, 218, 168, 132, 203, 95, 2, 245, 203, 214, 197, 37, 63, 199, 127, 32, 89, 150, 62, 214, 89, 161, + 59, 190, 42, 203, 42, 32, 227, 39, 199, 207, 138, 181, 53, 155, 229, 118, 133, 45, 144, 155, 218, 201, 86, + 212, 103, 22, 111, 198, 236, 27, 210, 173, 64, 173, 92, 20, 193, 21, 25, 164, 72, 242, 95, 30, 94, 79, + 195, 217, 88, 55, 24, 221, 48, 149, 110, 224, 158, 30, 212, 223, 88, 93, 2, 78, 149, 109, 146, 103, 101, + 175, 118, 11, 59, 141, 76, 235, 38, 138, 25, 249, 65, 187, 119, 240, 185, 175, 71, 110, 202, 116, 40, 217, + 182, 60, 191, 74, 127, 50, 131, 65, 74, 4, 209, 170, 44, 246, 246, 139, 235, 61, 243, 71, 59, 179, 213, + 62, 203, 194, 150, 106, 45, 177, 91, 30, 63, 202, 6, 231, 216, 81, 57, 79, 21, 224, 185, 57, 151, 209, + 206, 46, 218, 15, 2, 21, 30, 104, 242, 87, 98, 239, 167, 112, 235, 20, 152, 236, 216, 153, 196, 48, 211, + 248, 46, 169, 63, 95, 253, 87, 198, 158, 62, 115, 231, 155, 113, 54, 216, 225, 114, 23, 180, 222, 80, 124, + 109, 104, 184, 219, 73, 226, 37, 10, 15, 90, 162, 153, 60, 217, 104, 31, 31, 104, 207, 40, 26, 185, 238, + 216, 112, 160, 245, 51, 177, 6, 94, 75, 59, 251, 107, 182, 120, 213, 98, 90, 136, 161, 230, 81, 223, 216, + 102, 153, 47, 136, 249, 197, 38, 189, 100, 9, 99, 101, 72, 2, 246, 180, 63, 123, 82, 52, 203, 53, 34, + 34, 7, 201, 153, 6, 92, 148, 193, 186, 183, 137, 145, 193, 194, 199, 154, 204, 251, 163, 16, 150, 136, 139, + 206, 160, 137, 207, 42, 249, 117, 35, 58, 190, 34, 252, 26, 254, 90, 235, 166, 94, 143, 104, 79, 53, 169, + 40, 135, 161, 18, 164, 99, 130, 192, 75, 46, 168, 225, 47, 11, 150, 120, 230, 235, 90, 100, 237, 164, 126, + 69, 122, 95, 29, 202, 121, 245, 75, 191, 1, 95, 143, 57, 139, 142, 186, 12, 57, 227, 250, 33, 13, 214, + 243, 138, 80, 141, 23, 127, 154, 69, 108, 72, 51, 86, 77, 247, 128, 115, 225, 204, 211, 136, 11, 66, 9, + 241, 171, 151, 35, 122, 103, 36, 96, 171, 44, 161, 51, 135, 179, 218, 45, 170, 3, 161, 242, 133, 90, 240, + 11, 240, 58, 194, 3, 82, 87, 110, 132, 226, 87, 95, 44, 11, 239, 130, 17, 175, 16, 220, 61, 153, 110, + 160, 191, 36, 15, 28, 137, 21, 180, 109, 168, 26, 57, 251, 111, 26, 36, 132, 84, 92, 250, 167, 222, 162, + 246, 209, 195, 204, 201, 167, 240, 57, 61, 6, 73, 64, 171, 155, 45, 234, 86, 226, 3, 127, 191, 224, 97, + 247, 176, 13, 199, 200, 218, 148, 163, 188, 170, 163, 36, 157, 177, 87, 44, 228, 6, 79, 113, 38, 194, 190, + 105, 189, 241, 146, 75, 144, 87, 208, 85, 171, 55, 2, 151, 28, 85, 99, 157, 51, 37, 127, 197, 95, 99, + 93, 34, 18, 251, 241, 238, 34, 46, 59, 91, 230, 8, 227, 133, 57, 245, 132, 88, 210, 183, 156, 81, 180, + 236, 141, 0, 21, 185, 247, 86, 243, 245, 6, 52, 50, 14, 106, 206, 43, 2, 80, 72, 227, 154, 85, 224, + 84, 228, 11, 162, 232, 160, 212, 56, 108, 17, 174, 165, 233, 208, 238, 213, 68, 154, 142, 179, 105, 188, 75, + 33, 151, 143, 105, 61, 10, 110, 98, 119, 113, 98, 58, 236, 39, 19, 30, 252, 190, 108, 202, 70, 38, 139, + 26, 175, 55, 160, 47, 10, 201, 133, 176, 212, 236, 76, 236, 84, 217, 101, 73, 18, 85, 84, 6, 82, 140, + 175, 230, 28, 231, 89, 175, 236, 68, 156, 199, 125, 65, 95, 0, 120, 157, 169, 132, 208, 53, 103, 201, 247, + 36, 205, 246, 216, 178, 132, 50, 247, 157, 219, 118, 131, 9, 32, 237, 68, 229, 246, 146, 193, 48, 128, 242, + 157, 150, 101, 195, 83, 167, 94, 212, 30, 78, 15, 93, 37, 203, 191, 164, 142, 181, 241, 34, 142, 229, 93, + 40, 183, 203, 213, 93, 235, 38, 154, 175, 68, 248, 237, 194, 138, 205, 64, 222, 196, 187, 234, 122, 96, 204, + 18, 101, 19, 179, 140, 227, 180, 182, 33, 153, 61, 175, 104, 129, 252, 136, 112, 241, 113, 233, 32, 174, 82, + 171, 202, 218, 97, 1, 96, 45, 181, 11, 89, 192, 189, 234, 198, 145, 146, 215, 82, 8, 99, 32, 48, 47, + 138, 156, 107, 136, 123, 196, 160, 113, 197, 166, 119, 254, 250, 119, 182, 134, 169, 79, 136, 24, 86, 0, 29, + 55, 241, 84, 42, 127, 64, 138, 208, 101, 146, 167, 102, 78, 7, 70, 102, 127, 152, 215, 177, 57, 211, 108, + 235, 152, 241, 202, 210, 13, 38, 83, 94, 186, 206, 80, 11, 13, 220, 19, 152, 219, 181, 47, 233, 217, 188, + 49, 95, 225, 17, 59, 163, 156, 3, 77, 98, 204, 50, 239, 133, 94, 60, 155, 50, 134, 94, 106, 70, 186, + 226, 36, 215, 35, 243, 69, 191, 179, 154, 146, 75, 24, 32, 80, 154, 123, 225, 59, 66, 31, 235, 73, 25, + 89, 40, 123, 191, 151, 243, 8, 209, 185, 203, 200, 252, 39, 98, 236, 185, 67, 65, 124, 78, 11, 235, 191, + 227, 150, 45, 20, 111, 23, 159, 16, 249, 89, 10, 3, 102, 127, 24, 53, 245, 57, 33, 74, 10, 177, 145, + 29, 29, 84, 248, 77, 169, 15, 5, 180, 114, 9, 205, 10, 213, 137, 183, 189, 175, 40, 66, 138, 10, 228, + 170, 149, 63, 202, 111, 238, 53, 169, 125, 196, 127, 251, 87, 226, 217, 51, 55, 107, 245, 172, 190, 202, 139, + 217, 202, 28, 91, 110, 47, 136, 111, 138, 231, 54, 84, 198, 248, 176, 11, 40, 84, 166, 97, 45, 15, 223, + 222, 137, 182, 243, 108, 28, 44, 188, 232, 196, 191, 43, 149, 34, 202, 50, 85, 88, 85, 32, 200, 212, 76, + 92, 92, 177, 50, 41, 70, 37, 144, 226, 32, 208, 187, 112, 196, 205, 182, 36, 162, 47, 43, 16, 206, 211, + 227, 182, 228, 178, 92, 239, 83, 57, 132, 244, 87, 247, 213, 37, 111, 16, 21, 71, 2, 139, 13, 183, 108, + 156, 124, 249, 248, 107, 114, 162, 32, 162, 193, 253, 23, 78, 163, 123, 250, 204, 44, 130, 199, 97, 97, 199, + 103, 118, 29, 124, 198, 164, 41, 163, 134, 181, 200, 143, 95, 150, 183, 130, 224, 26, 101, 243, 22, 3, 50, + 193, 233, 105, 251, 90, 208, 171, 100, 79, 238, 107, 65, 254, 126, 159, 25, 23, 174, 198, 71, 55, 163, 71, + 40, 224, 245, 227, 185, 12, 236, 76, 98, 104, 204, 105, 230, 10, 81, 69, 65, 245, 53, 144, 149, 134, 77, + 139, 246, 126, 29, 47, 48, 86, 144, 84, 190, 249, 183, 115, 139, 178, 19, 136, 106, 225, 227, 189, 223, 185, + 79, 199, 246, 251, 9, 114, 158, 219, 204, 198, 97, 171, 69, 39, 64, 2, 250, 76, 196, 47, 238, 9, 240, + 8, 140, 168, 20, 161, 210, 117, 180, 190, 90, 195, 29, 176, 162, 247, 79, 194, 141, 120, 36, 137, 210, 250, + 57, 153, 184, 247, 58, 57, 46, 184, 61, 203, 170, 172, 63, 206, 219, 125, 33, 102, 16, 179, 118, 198, 7, + 115, 215, 226, 226, 1, 123, 91, 59, 231, 238, 48, 98, 248, 121, 17, 89, 29, 228, 129, 182, 197, 165, 95, + 34, 174, 44, 70, 40, 13, 168, 160, 96, 22, 120, 230, 176, 206, 206, 48, 41, 26, 78, 84, 16, 203, 205, + 197, 229, 70, 126, 143, 152, 206, 162, 115, 159, 19, 40, 17, 231, 253, 53, 208, 121, 201, 142, 230, 21, 8, + 212, 105, 205, 221, 151, 28, 122, 160, 98, 162, 209, 25, 25, 39, 170, 113, 92, 160, 217, 196, 71, 53, 32, + 82, 218, 33, 200, 186, 39, 68, 93, 144, 223, 228, 99, 113, 168, 94, 104, 195, 60, 136, 13, 193, 210, 109, + 101, 103, 156, 250, 214, 6, 204, 167, 109, 135, 245, 90, 118, 101, 236, 208, 244, 0, 47, 3, 142, 198, 247, + 15, 9, 97, 206, 117, 65, 98, 121, 129, 243, 251, 3, 24, 252, 207, 89, 180, 21, 139, 88, 237, 227, 128, + 10, 156, 105, 112, 192, 99, 108, 22, 134, 112, 1, 153, 18, 239, 127, 30, 73, 85, 151, 192, 108, 51, 250, + 56, 110, 127, 92, 106, 48, 186, 203, 123, 23, 135, 128, 104, 180, 114, 60, 253, 125, 184, 27, 235, 62, 144, + 216, 76, 54, 44, 212, 4, 2, 234, 208, 103, 212, 67, 169, 51, 104, 215, 198, 47, 133, 80, 11, 67, 209, + 63, 0, 122, 243, 127, 43, 39, 142, 227, 108, 233, 61, 233, 69, 151, 39, 162, 1, 145, 61, 105, 21, 221, + 53, 247, 57, 3, 108, 40, 21, 54, 22, 95, 59, 5, 225, 82, 105, 35, 67, 154, 102, 254, 23, 208, 141, + 109, 174, 218, 169, 241, 71, 77, 160, 12, 65, 145, 118, 96, 233, 165, 37, 51, 1, 106, 238, 244, 226, 206, + 72, 98, 116, 151, 145, 202, 199, 240, 204, 177, 79, 164, 142, 100, 61, 145, 34, 80, 21, 180, 156, 119, 35, + 242, 81, 97, 18, 239, 27, 3, 249, 158, 70, 216, 175, 248, 93, 79, 175, 241, 207, 100, 251, 239, 178, 12, + 80, 77, 227, 178, 5, 111, 19, 57, 10, 30, 63, 30, 69, 203, 179, 170, 236, 168, 114, 26, 34, 189, 9, + 177, 5, 118, 6, 94, 176, 180, 227, 36, 32, 194, 232, 17, 167, 132, 100, 3, 43, 156, 227, 242, 137, 63, + 72, 3, 4, 173, 230, 166, 174, 197, 88, 34, 192, 130, 246, 98, 56, 188, 145, 189, 188, 145, 145, 99, 192, + 64, 68, 13, 218, 134, 53, 212, 137, 61, 7, 29, 97, 171, 26, 170, 28, 92, 51, 115, 126, 35, 189, 147, + 75, 100, 149, 11, 47, 47, 61, 182, 103, 145, 123, 78, 31, 102, 22, 139, 181, 253, 31, 138, 44, 120, 71, + 41, 79, 248, 227, 219, 157, 132, 13, 112, 152, 238, 43, 231, 118, 180, 254, 167, 75, 20, 44, 219, 191, 48, + 196, 112, 53, 35, 207, 120, 51, 66, 217, 195, 148, 177, 173, 172, 157, 225, 23, 213, 166, 131, 209, 61, 96, + 197, 164, 87, 78, 212, 40, 152, 89, 7, 78, 239, 130, 177, 124, 100, 14, 239, 53, 237, 121, 132, 207, 205, + 226, 46, 246, 139, 44, 241, 249, 142, 94, 52, 138, 90, 197, 204, 79, 240, 8, 62, 78, 97, 187, 88, 50, + 235, 149, 35, 197, 149, 87, 94, 103, 56, 124, 136, 4, 187, 33, 163, 240, 3, 73, 142, 53, 60, 227, 82, + 144, 41, 158, 180, 24, 207, 97, 155, 185, 235, 97, 209, 105, 79, 125, 2, 6, 86, 6, 8, 168, 247, 175, + 66, 53, 197, 231, 130, 168, 79, 159, 117, 69, 80, 82, 221, 56, 175, 137, 6, 73, 115, 122, 211, 160, 133, + 41, 24, 207, 38, 191, 2, 150, 129, 156, 121, 227, 17, 166, 66, 223, 29, 142, 83, 24, 106, 206, 136, 158, + 232, 248, 237, 120, 21, 48, 58, 126, 28, 162, 137, 74, 205, 207, 159, 112, 46, 3, 45, 252, 153, 31, 177, + 129, 186, 157, 66, 18, 67, 185, 165, 0, 195, 107, 7, 237, 202, 238, 238, 5, 72, 238, 42, 181, 145, 187, + 187, 227, 67, 163, 199, 2, 94, 155, 95, 67, 239, 160, 135, 139, 10, 136, 222, 130, 182, 139, 51, 164, 129, + 15, 224, 43, 113, 56, 198, 63, 205, 170, 181, 68, 90, 215, 175, 37, 220, 185, 94, 99, 91, 211, 181, 247, + 5, 231, 131, 64, 28, 196, 65, 136, 214, 193, 213, 148, 18, 44, 76, 242, 173, 174, 150, 169, 221, 16, 65, + 94, 28, 225, 201, 118, 104, 115, 213, 112, 46, 184, 35, 186, 24, 198, 169, 91, 102, 65, 210, 120, 26, 148, + 142, 58, 138, 61, 211, 190, 42, 14, 37, 41, 21, 29, 148, 24, 59, 192, 21, 196, 33, 110, 116, 11, 109, + 196, 141, 83, 131, 48, 183, 200, 31, 70, 228, 158, 79, 9, 222, 239, 125, 248, 143, 29, 183, 235, 219, 237, + 252, 21, 84, 14, 105, 199, 84, 217, 68, 141, 204, 251, 5, 0, 70, 57, 200, 181, 98, 162, 185, 130, 151, + 192, 58, 116, 71, 149, 82, 76, 232, 66, 32, 127, 6, 62, 132, 132, 131, 240, 39, 113, 119, 87, 38, 238, + 79, 17, 208, 126, 174, 79, 103, 253, 189, 236, 178, 232, 68, 123, 226, 2, 130, 154, 191, 72, 90, 236, 186, + 235, 149, 46, 155, 94, 203, 96, 129, 61, 8, 57, 123, 37, 206, 240, 72, 59, 64, 43, 87, 165, 216, 176, + 161, 226, 232, 48, 101, 146, 155, 146, 227, 54, 209, 210, 243, 136, 233, 124, 209, 189, 35, 34, 163, 9, 205, + 68, 250, 166, 172, 91, 143, 247, 127, 97, 45, 122, 119, 97, 102, 77, 192, 4, 133, 192, 116, 128, 14, 90, + 22, 161, 42, 106, 20, 53, 224, 201, 84, 178, 107, 49, 54, 33, 44, 108, 101, 111, 124, 84, 110, 218, 54, + 211, 223, 243, 70, 95, 249, 186, 236, 122, 60, 151, 159, 122, 178, 3, 68, 132, 61, 136, 250, 41, 150, 82, + 99, 87, 164, 113, 36, 143, 3, 28, 123, 147, 80, 15, 218, 90, 68, 235, 28, 69, 19, 223, 68, 154, 78, + 52, 95, 164, 242, 60, 65, 28, 63, 14, 220, 194, 155, 186, 221, 97, 248, 9, 244, 251, 238, 137, 204, 137, + 254, 127, 97, 65, 55, 135, 24, 32, 59, 115, 12, 20, 57, 232, 166, 26, 32, 237, 56, 194, 195, 57, 104, + 242, 153, 240, 212, 226, 37, 170, 93, 125, 74, 28, 106, 104, 125, 157, 250, 229, 157, 47, 219, 245, 122, 54, + 2, 238, 75, 53, 37, 160, 60, 47, 192, 97, 251, 4, 148, 142, 252, 108, 137, 43, 8, 227, 27, 122, 51, + 99, 39, 156, 195, 233, 232, 219, 5, 127, 104, 198, 84, 214, 69, 241, 81, 146, 150, 254, 137, 123, 193, 105, + 131, 81, 16, 106, 88, 10, 51, 143, 249, 37, 233, 165, 28, 133, 208, 100, 207, 212, 228, 236, 118, 103, 246, + 156, 220, 226, 199, 17, 163, 190, 122, 20, 71, 29, 105, 155, 164, 143, 129, 190, 49, 106, 249, 87, 245, 107, + 24, 140, 21, 248, 49, 57, 24, 86, 203, 83, 195, 36, 202, 78, 2, 9, 129, 244, 206, 5, 83, 151, 129, + 179, 45, 103, 129, 164, 10, 75, 52, 127, 224, 168, 211, 218, 185, 90, 167, 115, 49, 74, 129, 184, 243, 30, + 163, 223, 191, 199, 97, 254, 243, 180, 27, 91, 16, 26, 24, 172, 21, 98, 53, 0, 190, 180, 236, 33, 187, + 56, 244, 157, 90, 196, 173, 7, 25, 201, 0, 133, 214, 51, 84, 79, 154, 170, 245, 166, 114, 82, 235, 229, + 31, 3, 127, 216, 134, 7, 197, 86, 216, 211, 198, 226, 62, 153, 32, 216, 243, 217, 95, 185, 179, 51, 80, + 53, 180, 125, 161, 45, 169, 219, 219, 160, 253, 68, 199, 19, 34, 152, 112, 136, 252, 10, 133, 105, 155, 99, + 244, 125, 216, 67, 134, 241, 106, 241, 47, 238, 186, 214, 89, 125, 147, 142, 168, 134, 32, 137, 44, 143, 130, + 93, 120, 21, 207, 120, 139, 100, 94, 133, 248, 85, 150, 24, 183, 194, 164, 149, 46, 2, 206, 203, 60, 160, + 232, 48, 112, 200, 215, 108, 201, 129, 33, 122, 175, 161, 155, 187, 121, 136, 229, 149, 15, 100, 196, 238, 65, + 157, 155, 148, 247, 31, 208, 107, 108, 9, 107, 107, 184, 248, 152, 90, 238, 233, 135, 6, 103, 243, 164, 50, + 3, 45, 193, 36, 244, 242, 90, 63, 152, 127, 54, 212, 71, 16, 247, 185, 196, 52, 145, 80, 162, 171, 197, + 213, 40, 31, 252, 76, 246, 124, 63, 163, 107, 52, 115, 134, 124, 23, 107, 142, 133, 216, 67, 195, 103, 246, + 81, 209, 150, 25, 246, 209, 64, 211, 237, 251, 96, 150, 120, 77, 232, 112, 93, 106, 184, 136, 230, 3, 74, + 4, 78, 197, 209, 54, 246, 113, 186, 140, 71, 130, 253, 122, 55, 233, 154, 243, 41, 211, 191, 170, 111, 12, + 179, 188, 148, 104, 233, 191, 212, 94, 66, 187, 33, 160, 232, 75, 124, 121, 125, 14, 67, 126, 129, 145, 64, + 241, 5, 20, 128, 85, 18, 201, 41, 87, 242, 228, 89, 212, 125, 55, 224, 96, 150, 79, 86, 242, 46, 69, + 26, 145, 162, 253, 145, 101, 163, 138, 248, 138, 71, 62, 178, 45, 14, 14, 53, 110, 42, 15, 50, 111, 6, + 171, 40, 73, 215, 207, 119, 19, 208, 145, 59, 236, 6, 164, 149, 128, 214, 244, 45, 72, 147, 145, 234, 138, + 249, 144, 22, 106, 42, 162, 181, 252, 8, 160, 214, 17, 235, 180, 123, 213, 162, 138, 245, 131, 110, 203, 36, + 38, 205, 1, 157, 56, 245, 83, 41, 116, 4, 158, 15, 89, 35, 207, 87, 203, 120, 162, 48, 138, 143, 91, + 253, 58, 177, 10, 18, 232, 142, 13, 152, 106, 67, 189, 170, 21, 178, 185, 82, 239, 178, 43, 116, 101, 149, + 8, 140, 32, 25, 202, 147, 122, 12, 122, 132, 113, 65, 253, 251, 194, 9, 184, 130, 15, 42, 108, 60, 85, + 127, 5, 169, 58, 73, 58, 231, 27, 19, 157, 239, 239, 121, 116, 33, 39, 170, 225, 82, 176, 17, 84, 66, + 245, 90, 196, 91, 139, 79, 67, 52, 27, 55, 25, 71, 131, 77, 68, 134, 71, 236, 122, 120, 139, 150, 219, + 120, 139, 172, 152, 4, 235, 154, 109, 39, 81, 128, 247, 197, 28, 24, 155, 173, 223, 179, 59, 164, 110, 101, + 93, 57, 207, 119, 185, 57, 5, 91, 104, 84, 248, 185, 227, 243, 50, 214, 146, 115, 8, 62, 131, 105, 84, + 55, 237, 13, 229, 247, 230, 44, 125, 249, 17, 29, 140, 212, 241, 122, 46, 220, 185, 241, 42, 104, 218, 158, + 234, 230, 244, 141, 161, 132, 179, 89, 102, 4, 183, 32, 36, 91, 244, 105, 36, 185, 154, 167, 165, 11, 24, + 123, 30, 136, 242, 151, 57, 77, 86, 1, 203, 215, 187, 58, 224, 100, 245, 189, 155, 50, 212, 24, 85, 217, + 110, 69, 251, 48, 124, 194, 106, 158, 169, 245, 119, 71, 42, 120, 78, 128, 184, 116, 230, 34, 22, 21, 166, + 34, 229, 212, 167, 159, 12, 173, 82, 244, 156, 135, 239, 51, 78, 194, 169, 219, 32, 9, 75, 234, 147, 252, + 51, 214, 75, 182, 193, 45, 150, 30, 27, 20, 51, 66, 20, 212, 204, 232, 174, 87, 87, 154, 48, 199, 26, + 23, 205, 62, 86, 3, 230, 124, 215, 150, 244, 215, 150, 243, 178, 71, 216, 156, 20, 233, 230, 138, 87, 144, + 20, 83, 220, 104, 198, 162, 164, 219, 179, 245, 79, 37, 158, 147, 154, 110, 228, 122, 65, 227, 187, 114, 176, + 21, 36, 77, 232, 137, 71, 24, 108, 70, 193, 54, 113, 121, 245, 157, 64, 202, 41, 216, 151, 9, 96, 198, + 172, 14, 204, 193, 83, 108, 86, 154, 225, 215, 114, 169, 117, 20, 73, 82, 18, 37, 225, 102, 214, 248, 222, + 91, 177, 205, 253, 58, 244, 212, 78, 182, 245, 168, 81, 253, 187, 71, 6, 132, 135, 179, 208, 108, 231, 126, + 18, 195, 65, 211, 134, 16, 104, 196, 109, 173, 86, 150, 94, 116, 76, 180, 53, 75, 160, 223, 103, 250, 216, + 51, 31, 218, 153, 163, 184, 223, 252, 140, 98, 101, 187, 10, 99, 229, 82, 80, 253, 226, 79, 46, 211, 140, + 233, 239, 68, 204, 24, 22, 38, 180, 230, 158, 9, 37, 5, 116, 185, 139, 141, 17, 159, 216, 1, 144, 96, + 5, 131, 201, 37, 223, 108, 107, 107, 238, 8, 83, 4, 230, 244, 86, 219, 99, 112, 243, 82, 225, 209, 91, + 10, 224, 52, 90, 246, 21, 178, 97, 69, 72, 138, 227, 172, 109, 238, 161, 238, 79, 6, 35, 143, 96, 37, + 134, 86, 89, 193, 21, 147, 198, 230, 176, 30, 172, 245, 0, 240, 45, 201, 9, 21, 70, 60, 40, 254, 64, + 206, 96, 159, 18, 154, 184, 107, 51, 238, 57, 170, 92, 165, 84, 36, 239, 27, 47, 31, 140, 159, 53, 105, + 213, 34, 43, 5, 124, 251, 110, 60, 231, 106, 99, 7, 220, 173, 20, 194, 28, 12, 100, 196, 194, 157, 126, + 87, 47, 219, 220, 134, 10, 200, 189, 216, 174, 50, 106, 61, 247, 178, 81, 127, 51, 66, 52, 58, 189, 130, + 212, 188, 61, 71, 102, 127, 94, 157, 56, 161, 143, 188, 109, 119, 246, 132, 131, 99, 60, 88, 190, 202, 237, + 203, 253, 167, 16, 54, 224, 187, 239, 115, 94, 32, 166, 152, 8, 70, 10, 55, 92, 234, 186, 173, 3, 165, + 199, 175, 183, 239, 51, 221, 232, 161, 24, 94, 158, 130, 250, 112, 207, 75, 128, 99, 157, 27, 181, 57, 0, + 157, 31, 97, 83, 128, 173, 40, 33, 190, 168, 138, 210, 87, 51, 219, 218, 64, 72, 241, 126, 153, 225, 69, + 119, 151, 214, 127, 252, 63, 161, 100, 83, 192, 218, 176, 136, 87, 69, 223, 109, 104, 38, 79, 37, 245, 172, + 88, 196, 78, 83, 73, 153, 93, 203, 171, 169, 113, 112, 137, 185, 137, 74, 244, 37, 50, 20, 229, 136, 50, + 10, 54, 73, 78, 167, 247, 127, 160, 9, 74, 248, 159, 194, 215, 170, 224, 175, 234, 84, 23, 55, 245, 223, + 63, 126, 179, 0, 139, 144, 148, 7, 210, 93, 246, 172, 243, 20, 158, 79, 51, 38, 167, 95, 234, 223, 37, + 72, 58, 60, 102, 92, 142, 123, 210, 84, 178, 138, 86, 18, 27, 0, 216, 68, 213, 69, 58, 146, 201, 120, + 7, 62, 73, 87, 189, 20, 134, 199, 86, 184, 44, 4, 65, 164, 242, 118, 250, 213, 79, 107, 98, 173, 76, + 110, 157, 68, 159, 167, 71, 87, 11, 88, 144, 44, 157, 92, 133, 182, 182, 173, 199, 187, 228, 216, 239, 242, + 122, 222, 82, 138, 58, 31, 249, 125, 149, 240, 78, 79, 32, 195, 167, 17, 181, 113, 205, 247, 115, 250, 22, + 47, 112, 141, 37, 187, 219, 134, 79, 204, 119, 68, 187, 9, 84, 102, 95, 227, 9, 98, 230, 36, 199, 112, + 42, 112, 68, 236, 0, 191, 62, 58, 211, 91, 1, 244, 200, 223, 15, 235, 247, 200, 164, 65, 63, 29, 220, + 201, 1, 85, 193, 113, 150, 139, 99, 148, 45, 244, 34, 215, 102, 180, 207, 103, 182, 29, 124, 79, 1, 93, + 142, 65, 228, 144, 133, 62, 192, 233, 252, 170, 159, 19, 251, 50, 172, 19, 73, 46, 71, 74, 190, 69, 16, + 172, 183, 9, 86, 31, 189, 122, 148, 155, 140, 137, 174, 52, 115, 162, 122, 204, 108, 52, 20, 108, 198, 238, + 46, 229, 130, 6, 95, 189, 83, 76, 150, 189, 121, 164, 144, 141, 149, 122, 28, 185, 48, 104, 213, 192, 57, + 188, 155, 235, 215, 194, 26, 207, 233, 90, 39, 117, 8, 75, 132, 0, 195, 160, 117, 173, 214, 140, 127, 156, + 69, 19, 46, 186, 199, 144, 149, 99, 129, 229, 68, 237, 64, 42, 248, 106, 225, 169, 16, 36, 123, 31, 79, + 162, 81, 86, 117, 67, 150, 67, 191, 242, 117, 139, 104, 142, 90, 193, 180, 69, 115, 69, 236, 34, 209, 244, + 246, 58, 246, 74, 48, 116, 116, 163, 53, 157, 139, 198, 251, 166, 73, 36, 127, 226, 40, 97, 250, 40, 180, + 79, 174, 145, 228, 105, 234, 221, 209, 235, 241, 7, 184, 248, 196, 38, 33, 54, 132, 174, 207, 60, 14, 61, + 177, 56, 166, 252, 151, 99, 144, 127, 212, 184, 53, 19, 237, 78, 81, 83, 34, 220, 151, 205, 224, 160, 138, + 206, 233, 4, 247, 136, 130, 151, 219, 46, 66, 249, 213, 187, 216, 92, 35, 141, 53, 151, 135, 53, 82, 186, + 121, 98, 23, 7, 168, 204, 123, 107, 219, 138, 232, 137, 110, 171, 137, 86, 79, 167, 27, 92, 36, 106, 239, + 74, 55, 97, 223, 75, 245, 134, 223, 35, 213, 233, 71, 132, 124, 151, 197, 162, 31, 72, 143, 5, 26, 234, + 15, 106, 133, 32, 9, 212, 40, 189, 51, 186, 60, 46, 4, 84, 212, 32, 161, 191, 59, 10, 52, 29, 2, + 208, 56, 203, 74, 80, 38, 195, 13, 190, 237, 207, 204, 211, 192, 190, 156, 237, 222, 159, 168, 99, 175, 12, + 46, 221, 63, 183, 222, 216, 12, 43, 154, 154, 71, 222, 111, 12, 58, 103, 44, 110, 146, 139, 87, 55, 24, + 43, 9, 188, 143, 92, 58, 250, 21, 187, 45, 109, 147, 196, 178, 57, 65, 51, 133, 176, 177, 173, 137, 13, + 162, 46, 110, 77, 190, 231, 126, 218, 204, 71, 198, 190, 124, 92, 176, 197, 158, 224, 239, 236, 250, 178, 171, + 235, 179, 245, 186, 120, 89, 86, 195, 41, 227, 153, 35, 133, 199, 85, 210, 194, 26, 137, 251, 48, 187, 106, + 124, 42, 33, 145, 17, 24, 163, 100, 193, 41, 172, 21, 114, 16, 132, 3, 176, 86, 216, 34, 140, 181, 175, + 183, 162, 153, 52, 196, 154, 54, 68, 134, 51, 195, 175, 233, 247, 44, 108, 60, 180, 127, 219, 31, 227, 210, + 84, 201, 27, 82, 247, 181, 149, 76, 194, 21, 19, 199, 115, 80, 52, 246, 61, 75, 212, 89, 243, 126, 0, + 68, 109, 98, 14, 59, 79, 147, 250, 218, 225, 185, 90, 60, 116, 154, 170, 241, 54, 216, 137, 233, 89, 186, + 98, 95, 30, 63, 12, 113, 74, 239, 65, 22, 18, 119, 82, 27, 112, 31, 237, 105, 219, 194, 64, 109, 144, + 36, 128, 74, 239, 115, 148, 71, 215, 64, 144, 151, 190, 119, 93, 41, 194, 180, 1, 140, 223, 153, 138, 131, + 147, 73, 33, 51, 211, 192, 234, 9, 78, 143, 62, 237, 160, 18, 125, 152, 137, 192, 121, 23, 113, 242, 70, + 199, 20, 77, 118, 197, 207, 11, 99, 121, 159, 37, 78, 65, 201, 102, 86, 22, 87, 11, 141, 214, 150, 233, + 43, 52, 88, 16, 80, 167, 73, 238, 38, 157, 143, 118, 24, 40, 213, 163, 116, 160, 223, 68, 202, 51, 230, + 69, 87, 182, 72, 85, 44, 99, 198, 165, 165, 25, 237, 137, 115, 152, 180, 7, 88, 241, 121, 182, 112, 93, + 29, 252, 31, 143, 24, 227, 241, 150, 207, 8, 253, 21, 246, 181, 158, 40, 115, 97, 51, 209, 169, 86, 108, + 10, 71, 106, 118, 104, 237, 245, 118, 58, 8, 174, 223, 120, 92, 170, 149, 10, 162, 31, 55, 189, 174, 238, + 37, 53, 223, 183, 131, 20, 181, 96, 238, 201, 167, 88, 70, 150, 190, 5, 174, 210, 160, 40, 168, 111, 132, + 33, 184, 193, 244, 47, 133, 78, 169, 170, 144, 177, 138, 228, 87, 139, 173, 96, 156, 4, 24, 54, 170, 100, + 70, 142, 4, 247, 25, 191, 232, 204, 203, 55, 130, 166, 214, 182, 194, 217, 188, 106, 217, 204, 40, 247, 225, + 250, 98, 142, 200, 100, 33, 123, 231, 221, 214, 190, 100, 229, 21, 139, 206, 250, 254, 77, 233, 112, 51, 174, + 183, 89, 54, 36, 231, 120, 230, 200, 19, 67, 206, 248, 81, 220, 54, 118, 56, 200, 75, 239, 150, 42, 41, + 183, 85, 210, 202, 83, 242, 198, 221, 50, 41, 115, 161, 6, 29, 155, 231, 124, 122, 60, 175, 198, 228, 131, + 128, 75, 143, 241, 83, 84, 70, 16, 2, 148, 183, 41, 71, 207, 216, 149, 245, 85, 25, 166, 18, 140, 53, + 251, 123, 199, 131, 88, 175, 29, 222, 236, 47, 230, 32, 203, 198, 59, 196, 222, 45, 31, 145, 94, 74, 92, + 154, 128, 2, 114, 166, 129, 28, 11, 110, 71, 68, 3, 127, 105, 155, 75, 117, 144, 195, 123, 65, 245, 30, + 17, 145, 192, 208, 97, 54, 159, 196, 83, 4, 64, 71, 30, 72, 178, 64, 100, 8, 156, 206, 5, 186, 13, + 59, 240, 25, 37, 22, 150, 27, 240, 154, 13, 110, 212, 132, 41, 108, 76, 175, 127, 239, 117, 11, 66, 16, + 15, 170, 138, 248, 227, 83, 246, 148, 237, 251, 17, 57, 198, 204, 7, 46, 117, 89, 23, 33, 29, 221, 209, + 33, 203, 207, 199, 24, 148, 49, 67, 175, 108, 182, 42, 233, 124, 249, 63, 7, 167, 48, 135, 208, 157, 188, + 189, 35, 204, 18, 202, 199, 131, 168, 94, 143, 126, 76, 224, 252, 205, 238, 133, 145, 34, 189, 159, 195, 233, + 93, 139, 230, 82, 37, 52, 242, 191, 119, 84, 2, 10, 134, 108, 168, 88, 117, 148, 38, 51, 19, 153, 120, + 234, 56, 90, 167, 211, 47, 252, 61, 242, 32, 66, 17, 5, 69, 176, 24, 66, 40, 184, 181, 73, 14, 36, + 189, 9, 238, 181, 166, 139, 48, 216, 148, 223, 227, 109, 148, 152, 16, 19, 251, 117, 48, 178, 82, 40, 104, + 190, 214, 15, 226, 206, 144, 35, 123, 22, 147, 9, 14, 131, 213, 197, 219, 26, 70, 197, 157, 241, 221, 20, + 193, 250, 173, 84, 79, 150, 138, 206, 241, 102, 173, 226, 31, 239, 237, 248, 88, 202, 78, 163, 178, 19, 66, + 231, 28, 245, 31, 212, 39, 129, 201, 167, 71, 195, 22, 250, 222, 231, 67, 116, 170, 155, 148, 137, 161, 101, + 25, 223, 13, 105, 94, 111, 188, 61, 18, 69, 171, 6, 26, 69, 192, 6, 51, 25, 94, 48, 131, 128, 63, + 62, 243, 157, 13, 43, 61, 72, 39, 87, 102, 160, 174, 140, 211, 212, 49, 169, 211, 163, 232, 82, 87, 82, + 17, 73, 59, 69, 58, 63, 165, 178, 9, 186, 191, 115, 134, 141, 104, 150, 197, 202, 99, 181, 120, 154, 95, + 224, 127, 32, 206, 248, 173, 202, 232, 191, 106, 211, 90, 26, 182, 33, 219, 12, 226, 124, 197, 142, 174, 159, + 37, 68, 63, 109, 250, 153, 235, 227, 198, 229, 22, 46, 238, 55, 47, 117, 79, 194, 116, 103, 27, 248, 24, + 147, 95, 161, 146, 228, 228, 209, 40, 33, 37, 6, 88, 59, 89, 254, 191, 211, 91, 224, 99, 210, 168, 169, + 134, 100, 195, 118, 236, 147, 181, 148, 54, 1, 138, 43, 163, 145, 250, 225, 99, 142, 98, 57, 233, 9, 15, + 200, 222, 243, 241, 214, 104, 241, 176, 136, 246, 129, 39, 251, 65, 165, 7, 141, 139, 146, 110, 119, 68, 110, + 49, 73, 30, 126, 48, 252, 134, 219, 214, 218, 133, 138, 26, 90, 152, 23, 112, 175, 44, 22, 32, 180, 135, + 207, 213, 76, 36, 185, 173, 225, 12, 121, 199, 181, 251, 148, 54, 55, 15, 171, 162, 145, 114, 127, 99, 168, + 182, 72, 190, 27, 56, 250, 193, 174, 31, 229, 89, 83, 133, 179, 13, 179, 242, 91, 19, 161, 163, 54, 177, + 111, 70, 144, 67, 178, 123, 216, 232, 86, 52, 218, 1, 224, 30, 182, 69, 225, 0, 71, 45, 232, 144, 48, + 245, 43, 1, 151, 82, 45, 168, 9, 206, 116, 84, 121, 250, 151, 119, 219, 101, 109, 5, 187, 133, 154, 215, + 208, 235, 114, 100, 66, 133, 127, 66, 133, 7, 1, 152, 23, 65, 249, 116, 219, 225, 150, 163, 30, 162, 115, + 60, 164, 96, 26, 9, 253, 13, 100, 210, 147, 159, 78, 179, 168, 110, 197, 99, 72, 30, 144, 68, 196, 222, + 32, 208, 217, 248, 179, 186, 3, 113, 79, 199, 138, 190, 167, 45, 99, 92, 6, 84, 123, 118, 238, 86, 41, + 59, 14, 226, 82, 146, 83, 198, 155, 175, 20, 119, 80, 251, 210, 101, 231, 253, 245, 208, 77, 195, 155, 138, + 72, 86, 228, 120, 134, 227, 33, 221, 181, 3, 78, 71, 26, 131, 116, 76, 62, 198, 183, 87, 69, 62, 26, + 44, 66, 150, 248, 191, 111, 42, 243, 26, 58, 108, 54, 138, 200, 116, 82, 12, 2, 8, 72, 17, 87, 254, + 205, 66, 216, 203, 47, 103, 202, 21, 181, 139, 169, 78, 1, 24, 210, 88, 215, 197, 43, 174, 174, 225, 128, + 121, 1, 218, 75, 243, 37, 230, 47, 246, 186, 176, 221, 184, 91, 34, 98, 1, 75, 207, 78, 135, 166, 48, + 116, 35, 71, 173, 182, 246, 76, 148, 208, 1, 133, 131, 126, 175, 110, 243, 133, 190, 123, 125, 139, 101, 253, + 96, 68, 94, 220, 60, 222, 4, 65, 122, 118, 0, 147, 87, 63, 51, 95, 109, 240, 97, 184, 65, 218, 136, + 45, 233, 68, 78, 49, 135, 233, 165, 151, 216, 26, 240, 115, 250, 122, 223, 122, 118, 9, 40, 130, 33, 169, + 248, 13, 223, 169, 197, 46, 215, 84, 50, 205, 106, 150, 155, 222, 201, 12, 203, 63, 254, 201, 168, 35, 217, + 41, 11, 108, 51, 252, 86, 66, 244, 26, 251, 130, 188, 5, 136, 154, 232, 22, 67, 147, 225, 54, 26, 221, + 38, 158, 139, 236, 91, 141, 243, 212, 113, 227, 116, 121, 19, 68, 203, 106, 147, 29, 216, 242, 18, 55, 18, + 252, 139, 112, 123, 182, 251, 216, 144, 178, 179, 24, 104, 41, 254, 109, 234, 74, 189, 202, 247, 58, 146, 176, + 125, 176, 167, 211, 32, 171, 93, 220, 204, 17, 27, 36, 10, 16, 38, 207, 144, 18, 219, 1, 247, 92, 54, + 91, 44, 137, 49, 31, 39, 54, 187, 95, 98, 2, 20, 130, 253, 149, 74, 227, 64, 4, 127, 167, 16, 242, + 183, 126, 4, 55, 250, 62, 3, 217, 214, 77, 104, 59, 170, 87, 106, 199, 226, 99, 181, 84, 229, 145, 213, + 220, 26, 89, 187, 216, 210, 206, 6, 239, 54, 153, 15, 133, 140, 222, 155, 36, 191, 76, 167, 2, 124, 57, + 192, 120, 157, 27, 128, 252, 88, 7, 117, 222, 28, 221, 95, 25, 43, 79, 43, 191, 100, 243, 53, 96, 1, + 195, 10, 50, 12, 35, 84, 51, 43, 50, 217, 183, 57, 149, 26, 77, 210, 2, 79, 215, 53, 53, 26, 186, + 47, 151, 180, 23, 180, 22, 20, 54, 101, 159, 150, 7, 31, 18, 212, 214, 123, 56, 117, 145, 207, 82, 40, + 55, 111, 171, 106, 97, 13, 22, 4, 193, 221, 85, 52, 58, 31, 139, 229, 166, 8, 67, 107, 188, 55, 248, + 192, 234, 244, 147, 143, 81, 141, 146, 174, 239, 138, 22, 30, 118, 96, 213, 36, 66, 10, 207, 166, 91, 41, + 201, 195, 41, 144, 101, 81, 167, 100, 25, 15, 225, 251, 118, 167, 175, 233, 196, 155, 130, 87, 48, 176, 198, + 97, 143, 71, 192, 227, 25, 202, 202, 174, 197, 178, 217, 1, 248, 180, 191, 46, 186, 41, 58, 25, 60, 114, + 119, 211, 138, 14, 225, 197, 169, 26, 133, 115, 22, 206, 222, 27, 101, 148, 188, 104, 193, 16, 76, 149, 41, + 185, 87, 37, 83, 121, 252, 5, 29, 75, 67, 181, 50, 133, 50, 191, 41, 36, 16, 0, 55, 79, 240, 53, + 235, 237, 94, 107, 85, 123, 214, 67, 227, 154, 73, 125, 165, 226, 182, 216, 110, 179, 116, 63, 205, 241, 176, + 17, 204, 98, 170, 57, 147, 54, 24, 60, 253, 190, 213, 24, 7, 150, 212, 227, 230, 186, 173, 223, 75, 203, + 59, 223, 135, 238, 22, 133, 66, 157, 239, 206, 64, 51, 174, 230, 17, 54, 123, 133, 179, 118, 249, 57, 229, + 37, 235, 230, 87, 231, 137, 223, 226, 212, 225, 186, 149, 123, 222, 219, 195, 140, 134, 27, 141, 43, 94, 140, + 164, 56, 87, 62, 215, 159, 145, 20, 119, 254, 212, 147, 210, 52, 89, 126, 239, 51, 174, 240, 134, 160, 212, + 179, 79, 154, 247, 70, 144, 199, 177, 226, 54, 185, 16, 243, 40, 182, 134, 39, 168, 122, 38, 228, 22, 181, + 34, 214, 252, 114, 13, 247, 85, 128, 252, 171, 183, 127, 26, 238, 125, 140, 230, 147, 220, 119, 112, 243, 232, + 107, 186, 148, 81, 79, 34, 106, 4, 95, 121, 18, 135, 33, 23, 1, 86, 69, 106, 139, 236, 222, 247, 54, + 22, 135, 159, 73, 83, 55, 75, 76, 117, 54, 31, 106, 170, 234, 4, 89, 222, 70, 11, 246, 208, 131, 100, + 68, 48, 111, 12, 66, 224, 118, 196, 215, 230, 193, 2, 246, 12, 52, 154, 57, 146, 113, 122, 112, 58, 152, + 228, 189, 209, 95, 19, 0, 35, 17, 115, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, std::vector({32, 64, 3, 3}), + std::vector({18, 2, 16, 32}), DT_INT8}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferNchwFz, uint8_2c_2n_pad) { + uint8_t data[30 * 40 * 3 * 3] = { + 240, 195, 239, 13, 48, 181, 136, 177, 217, 211, 45, 33, 95, 18, 221, 5, 112, 37, 76, 243, 215, 65, 119, + 101, 119, 96, 145, 44, 108, 73, 72, 170, 49, 80, 168, 160, 3, 74, 18, 163, 51, 140, 154, 65, 185, 198, + 253, 26, 28, 246, 116, 4, 84, 189, 126, 186, 154, 217, 252, 13, 241, 5, 159, 60, 6, 53, 68, 48, 221, + 70, 126, 26, 124, 115, 220, 31, 249, 248, 159, 102, 106, 45, 25, 228, 90, 136, 121, 227, 156, 138, 242, 199, + 109, 114, 156, 205, 2, 175, 109, 246, 9, 64, 236, 157, 207, 238, 133, 0, 105, 146, 3, 33, 210, 222, 104, + 130, 151, 69, 30, 247, 210, 220, 163, 242, 245, 61, 129, 225, 135, 76, 33, 213, 90, 182, 151, 230, 40, 99, + 143, 131, 141, 84, 135, 232, 218, 249, 72, 181, 244, 133, 111, 221, 89, 122, 192, 211, 176, 223, 135, 231, 171, + 181, 49, 134, 90, 83, 212, 129, 89, 41, 85, 224, 87, 45, 45, 54, 132, 60, 132, 52, 200, 202, 197, 61, + 220, 26, 9, 114, 217, 134, 201, 240, 217, 48, 213, 57, 212, 92, 254, 217, 229, 151, 151, 249, 37, 178, 61, + 171, 196, 252, 37, 139, 69, 67, 126, 143, 166, 136, 138, 43, 186, 92, 199, 162, 47, 50, 151, 40, 240, 35, + 2, 38, 0, 204, 153, 244, 230, 105, 168, 124, 232, 187, 108, 189, 19, 166, 149, 201, 99, 235, 71, 147, 241, + 214, 70, 12, 232, 86, 141, 39, 140, 155, 179, 86, 161, 67, 139, 213, 223, 212, 136, 117, 164, 28, 118, 17, + 56, 211, 240, 221, 22, 11, 254, 170, 144, 16, 238, 94, 19, 246, 179, 133, 205, 16, 65, 91, 72, 146, 132, + 22, 88, 228, 212, 26, 177, 129, 65, 223, 46, 96, 152, 92, 246, 207, 126, 18, 201, 192, 9, 6, 209, 117, + 200, 161, 222, 82, 216, 213, 33, 18, 172, 249, 167, 192, 47, 254, 50, 162, 34, 129, 241, 25, 28, 254, 241, + 100, 17, 240, 172, 97, 97, 129, 102, 78, 133, 67, 191, 99, 160, 209, 34, 10, 209, 242, 109, 135, 31, 243, + 244, 3, 209, 71, 186, 200, 245, 123, 157, 152, 160, 89, 30, 252, 101, 118, 175, 112, 6, 87, 144, 21, 196, + 217, 54, 4, 160, 85, 194, 31, 188, 242, 180, 144, 96, 172, 214, 251, 39, 151, 249, 146, 178, 122, 8, 166, + 8, 198, 34, 171, 154, 248, 28, 95, 78, 106, 26, 224, 175, 45, 241, 41, 6, 16, 96, 19, 120, 91, 21, + 153, 208, 34, 205, 154, 14, 215, 194, 170, 16, 176, 141, 104, 115, 81, 46, 212, 245, 120, 135, 83, 121, 84, + 71, 159, 240, 156, 44, 37, 161, 183, 233, 66, 156, 203, 54, 4, 111, 62, 100, 171, 209, 127, 87, 118, 8, + 233, 177, 120, 126, 224, 141, 148, 184, 16, 60, 111, 66, 232, 80, 165, 40, 130, 54, 206, 171, 249, 112, 236, + 216, 117, 166, 211, 42, 71, 68, 168, 197, 52, 99, 151, 231, 208, 231, 37, 38, 55, 164, 214, 202, 13, 212, + 18, 16, 89, 103, 113, 109, 2, 53, 103, 86, 128, 60, 58, 235, 40, 49, 118, 197, 182, 118, 3, 8, 75, + 97, 114, 69, 110, 74, 126, 211, 32, 76, 82, 49, 213, 153, 133, 218, 237, 17, 87, 130, 94, 206, 152, 116, + 252, 221, 52, 57, 225, 168, 203, 245, 85, 174, 250, 17, 114, 223, 66, 22, 229, 2, 157, 55, 12, 131, 16, + 213, 240, 129, 150, 234, 118, 78, 52, 199, 222, 102, 59, 226, 251, 68, 204, 188, 203, 221, 51, 172, 147, 49, + 169, 143, 5, 94, 26, 205, 155, 192, 126, 228, 26, 181, 107, 64, 7, 132, 88, 79, 5, 165, 182, 99, 106, + 39, 104, 2, 82, 141, 246, 136, 56, 229, 248, 42, 30, 238, 194, 15, 154, 20, 3, 8, 231, 119, 235, 179, + 204, 175, 46, 64, 109, 214, 79, 82, 150, 188, 18, 96, 231, 191, 189, 46, 136, 178, 11, 77, 151, 165, 111, + 29, 143, 65, 254, 80, 128, 43, 10, 62, 162, 120, 99, 199, 246, 245, 101, 36, 222, 191, 220, 13, 164, 168, + 195, 141, 20, 229, 23, 240, 195, 200, 52, 83, 225, 85, 56, 53, 211, 121, 119, 123, 224, 225, 196, 63, 12, + 17, 11, 32, 210, 86, 119, 26, 19, 13, 60, 76, 196, 89, 7, 55, 167, 118, 105, 90, 120, 177, 6, 223, + 31, 127, 128, 234, 203, 157, 237, 208, 117, 112, 250, 47, 168, 28, 197, 193, 109, 214, 201, 41, 248, 191, 227, + 124, 163, 71, 13, 5, 125, 50, 184, 120, 226, 171, 114, 95, 29, 207, 194, 111, 232, 157, 80, 214, 8, 206, + 104, 63, 198, 115, 178, 229, 52, 201, 203, 173, 84, 58, 185, 69, 75, 118, 162, 92, 2, 56, 212, 108, 18, + 105, 200, 156, 122, 161, 14, 222, 67, 188, 231, 85, 9, 22, 111, 250, 156, 235, 120, 236, 48, 5, 208, 236, + 150, 84, 224, 67, 81, 166, 110, 4, 194, 70, 196, 200, 160, 172, 177, 8, 31, 42, 198, 62, 30, 230, 7, + 50, 6, 252, 59, 57, 205, 214, 85, 80, 157, 75, 129, 210, 213, 142, 220, 133, 235, 97, 113, 195, 149, 2, + 73, 25, 105, 159, 238, 200, 77, 99, 73, 0, 143, 4, 6, 158, 134, 238, 244, 145, 96, 188, 192, 53, 31, + 189, 178, 181, 31, 241, 218, 112, 155, 125, 92, 146, 179, 119, 191, 248, 210, 223, 123, 116, 171, 57, 199, 204, + 75, 221, 234, 33, 170, 44, 162, 6, 212, 236, 217, 108, 251, 140, 23, 124, 196, 209, 181, 39, 221, 242, 23, + 238, 203, 48, 58, 44, 86, 56, 116, 57, 54, 7, 70, 190, 130, 79, 254, 41, 216, 103, 32, 50, 237, 215, + 93, 252, 40, 219, 253, 18, 123, 185, 113, 42, 53, 97, 246, 241, 63, 89, 162, 248, 114, 63, 182, 233, 175, + 186, 235, 245, 171, 135, 86, 191, 30, 229, 101, 200, 77, 76, 88, 70, 225, 222, 68, 148, 48, 142, 48, 251, + 20, 26, 197, 20, 236, 199, 16, 54, 247, 115, 52, 6, 158, 30, 132, 93, 76, 62, 220, 65, 216, 175, 35, + 47, 151, 152, 3, 58, 149, 20, 223, 163, 44, 202, 37, 65, 54, 11, 21, 134, 49, 183, 35, 72, 92, 65, + 55, 213, 36, 112, 3, 188, 62, 68, 128, 31, 196, 78, 249, 63, 108, 59, 157, 142, 248, 6, 45, 183, 177, + 216, 136, 223, 111, 162, 222, 194, 33, 129, 144, 115, 146, 170, 55, 136, 141, 29, 61, 63, 33, 35, 153, 196, + 150, 173, 80, 84, 207, 174, 40, 137, 82, 171, 68, 106, 184, 191, 90, 210, 253, 148, 102, 77, 175, 158, 194, + 6, 134, 215, 225, 49, 253, 226, 179, 44, 176, 137, 78, 143, 72, 96, 109, 7, 68, 188, 118, 176, 108, 7, + 34, 17, 76, 254, 168, 27, 88, 148, 184, 11, 42, 242, 140, 2, 249, 240, 210, 135, 213, 12, 230, 118, 212, + 31, 239, 234, 133, 104, 72, 171, 111, 18, 169, 120, 27, 119, 165, 204, 90, 83, 54, 196, 229, 37, 52, 19, + 132, 1, 121, 156, 139, 180, 102, 11, 159, 66, 222, 242, 246, 114, 38, 118, 179, 71, 112, 144, 108, 181, 188, + 62, 181, 108, 150, 224, 101, 97, 22, 131, 174, 116, 148, 175, 187, 53, 40, 74, 204, 97, 87, 130, 177, 194, + 137, 94, 57, 79, 225, 38, 222, 152, 199, 132, 208, 25, 31, 222, 130, 242, 20, 25, 212, 135, 234, 71, 116, + 193, 200, 120, 122, 124, 168, 230, 20, 213, 137, 63, 4, 6, 90, 116, 123, 182, 4, 20, 177, 62, 42, 242, + 197, 184, 89, 117, 192, 49, 236, 241, 231, 233, 69, 218, 144, 239, 236, 202, 75, 135, 141, 59, 249, 196, 37, + 246, 185, 181, 51, 42, 62, 56, 62, 193, 47, 210, 26, 95, 84, 151, 236, 60, 119, 66, 252, 75, 231, 96, + 16, 226, 106, 184, 8, 64, 174, 61, 208, 114, 43, 87, 71, 23, 95, 116, 100, 6, 187, 5, 189, 191, 36, + 28, 97, 10, 32, 236, 35, 150, 171, 184, 94, 121, 57, 176, 100, 174, 42, 180, 47, 0, 132, 146, 150, 172, + 84, 83, 6, 223, 109, 86, 87, 181, 113, 107, 54, 36, 53, 1, 130, 214, 19, 122, 30, 191, 183, 151, 252, + 48, 123, 66, 114, 198, 178, 199, 107, 188, 191, 22, 187, 146, 183, 93, 74, 99, 74, 16, 47, 110, 219, 65, + 57, 201, 94, 129, 208, 162, 111, 98, 24, 103, 215, 20, 176, 41, 220, 250, 55, 61, 4, 111, 121, 66, 190, + 127, 170, 175, 237, 59, 115, 136, 21, 87, 26, 53, 168, 247, 55, 10, 135, 31, 97, 204, 171, 151, 127, 206, + 122, 96, 225, 131, 155, 178, 105, 149, 203, 169, 59, 129, 5, 63, 136, 114, 92, 62, 9, 172, 230, 86, 213, + 168, 251, 213, 87, 136, 231, 240, 168, 98, 182, 228, 103, 21, 9, 208, 241, 203, 144, 168, 75, 34, 26, 189, + 220, 175, 187, 179, 217, 107, 177, 29, 235, 185, 210, 177, 155, 151, 114, 3, 163, 27, 123, 155, 207, 204, 36, + 237, 211, 57, 246, 157, 251, 231, 251, 59, 90, 235, 189, 78, 141, 221, 108, 132, 143, 218, 50, 180, 157, 97, + 119, 213, 240, 86, 51, 32, 1, 215, 47, 116, 103, 76, 238, 44, 235, 54, 72, 228, 156, 194, 29, 197, 169, + 149, 219, 29, 146, 144, 136, 102, 26, 168, 20, 217, 22, 47, 207, 244, 250, 107, 232, 169, 209, 3, 30, 240, + 90, 14, 42, 106, 170, 194, 212, 24, 170, 154, 91, 23, 244, 56, 108, 159, 156, 210, 97, 181, 151, 31, 87, + 250, 26, 107, 225, 253, 154, 225, 83, 72, 206, 139, 241, 207, 94, 17, 227, 150, 236, 10, 238, 26, 123, 108, + 133, 200, 204, 247, 175, 184, 15, 42, 87, 92, 9, 210, 9, 77, 200, 189, 6, 204, 41, 82, 110, 24, 63, + 152, 224, 35, 108, 124, 6, 108, 226, 144, 179, 37, 156, 128, 206, 73, 175, 136, 38, 238, 133, 196, 188, 123, + 123, 17, 50, 168, 66, 49, 32, 80, 190, 164, 109, 226, 148, 32, 131, 215, 190, 190, 189, 40, 206, 227, 79, + 190, 242, 17, 234, 1, 7, 161, 237, 63, 24, 67, 150, 161, 192, 18, 253, 0, 222, 114, 159, 22, 61, 17, + 70, 114, 58, 180, 220, 169, 48, 186, 82, 242, 213, 76, 149, 99, 170, 104, 252, 123, 242, 218, 125, 68, 166, + 60, 85, 49, 239, 169, 145, 210, 69, 165, 235, 216, 45, 201, 106, 124, 192, 143, 236, 45, 229, 177, 31, 165, + 76, 147, 21, 0, 52, 98, 195, 109, 192, 94, 66, 223, 47, 8, 146, 81, 41, 127, 159, 46, 111, 67, 94, + 55, 205, 220, 59, 160, 216, 198, 79, 47, 34, 152, 188, 47, 161, 61, 160, 206, 98, 80, 195, 122, 99, 165, + 86, 130, 166, 90, 201, 40, 7, 177, 52, 118, 203, 225, 158, 151, 189, 102, 98, 82, 198, 121, 20, 125, 142, + 113, 190, 123, 13, 228, 206, 124, 58, 108, 26, 90, 18, 240, 78, 161, 172, 172, 21, 66, 54, 122, 146, 96, + 46, 109, 152, 28, 26, 138, 10, 247, 15, 16, 147, 116, 208, 220, 46, 139, 27, 142, 86, 240, 230, 32, 71, + 68, 232, 254, 223, 94, 221, 29, 248, 198, 72, 5, 212, 154, 15, 46, 29, 207, 76, 69, 146, 190, 176, 251, + 123, 126, 232, 134, 46, 29, 23, 236, 22, 240, 76, 46, 194, 27, 237, 179, 234, 211, 194, 164, 230, 237, 14, + 46, 248, 82, 61, 86, 64, 44, 66, 190, 69, 79, 111, 96, 152, 150, 234, 77, 237, 186, 216, 9, 110, 158, + 250, 169, 188, 140, 212, 28, 31, 95, 99, 193, 37, 125, 71, 95, 40, 14, 155, 153, 207, 131, 165, 42, 96, + 108, 127, 30, 217, 98, 181, 26, 172, 223, 174, 165, 81, 68, 233, 65, 192, 178, 78, 143, 170, 68, 12, 61, + 6, 64, 142, 173, 78, 94, 191, 89, 97, 112, 90, 200, 212, 193, 199, 15, 179, 119, 136, 205, 114, 234, 220, + 155, 152, 173, 159, 129, 62, 108, 39, 94, 209, 20, 131, 124, 66, 167, 162, 141, 223, 106, 58, 103, 210, 171, + 153, 222, 157, 11, 81, 172, 224, 25, 242, 144, 84, 45, 40, 225, 192, 13, 18, 157, 250, 89, 166, 71, 130, + 3, 98, 148, 66, 41, 81, 116, 9, 80, 9, 19, 205, 44, 228, 183, 146, 68, 57, 104, 122, 218, 139, 58, + 46, 128, 243, 19, 4, 47, 227, 12, 43, 96, 93, 149, 44, 89, 204, 187, 58, 58, 223, 154, 97, 146, 56, + 221, 118, 86, 169, 253, 48, 136, 40, 219, 142, 185, 14, 77, 168, 78, 187, 202, 124, 245, 250, 108, 12, 241, + 136, 136, 175, 243, 117, 217, 4, 11, 115, 21, 14, 221, 185, 250, 125, 245, 13, 71, 212, 32, 230, 8, 94, + 247, 132, 197, 18, 206, 178, 131, 38, 93, 22, 230, 174, 112, 144, 10, 88, 75, 132, 77, 23, 252, 22, 18, + 101, 130, 88, 180, 18, 244, 253, 145, 67, 206, 30, 109, 141, 2, 11, 64, 15, 74, 16, 201, 114, 40, 17, + 225, 146, 231, 35, 9, 55, 49, 143, 181, 104, 102, 88, 171, 38, 68, 4, 91, 225, 232, 186, 100, 210, 41, + 59, 54, 30, 67, 129, 3, 163, 125, 218, 188, 203, 40, 210, 7, 34, 250, 245, 10, 245, 139, 106, 3, 237, + 202, 72, 61, 143, 79, 213, 17, 53, 73, 203, 129, 231, 229, 133, 216, 186, 188, 251, 187, 244, 58, 136, 116, + 1, 178, 142, 172, 20, 233, 134, 247, 86, 186, 59, 161, 77, 187, 86, 45, 177, 241, 41, 114, 35, 156, 173, + 70, 92, 178, 198, 138, 91, 29, 206, 80, 26, 61, 8, 88, 240, 237, 88, 209, 57, 191, 78, 203, 42, 12, + 154, 105, 165, 52, 52, 29, 70, 172, 52, 210, 154, 190, 205, 26, 123, 223, 66, 108, 4, 45, 191, 162, 58, + 142, 205, 254, 64, 205, 84, 235, 41, 50, 18, 77, 97, 86, 52, 189, 80, 198, 254, 78, 26, 131, 142, 69, + 210, 45, 123, 5, 162, 7, 208, 32, 46, 123, 37, 195, 30, 60, 160, 223, 165, 98, 110, 34, 41, 44, 166, + 156, 204, 121, 75, 120, 24, 193, 5, 31, 125, 203, 39, 229, 11, 192, 90, 243, 142, 135, 199, 165, 143, 140, + 37, 133, 30, 65, 18, 88, 11, 190, 181, 126, 158, 2, 174, 225, 221, 151, 73, 64, 216, 212, 28, 40, 131, + 85, 236, 20, 143, 125, 245, 239, 86, 178, 236, 179, 9, 160, 49, 231, 97, 93, 190, 28, 31, 91, 70, 39, + 64, 96, 28, 218, 154, 62, 214, 84, 19, 153, 2, 193, 100, 23, 185, 182, 102, 66, 206, 222, 149, 247, 247, + 19, 193, 64, 56, 68, 149, 175, 139, 30, 221, 10, 242, 35, 166, 113, 63, 206, 186, 163, 217, 25, 17, 217, + 3, 89, 113, 182, 117, 105, 159, 112, 62, 140, 65, 244, 120, 118, 88, 175, 157, 195, 140, 143, 107, 33, 198, + 11, 8, 241, 59, 225, 190, 135, 228, 19, 34, 228, 125, 196, 107, 50, 245, 204, 73, 9, 81, 126, 199, 86, + 88, 178, 220, 53, 87, 74, 24, 188, 156, 104, 122, 20, 111, 16, 102, 212, 169, 10, 172, 50, 167, 89, 28, + 87, 180, 47, 247, 113, 94, 186, 96, 99, 16, 231, 7, 228, 103, 49, 45, 27, 55, 123, 212, 130, 212, 38, + 70, 118, 57, 205, 7, 156, 146, 79, 35, 57, 9, 182, 32, 214, 32, 23, 85, 154, 3, 61, 60, 240, 96, + 105, 153, 63, 3, 229, 235, 129, 194, 129, 233, 71, 122, 8, 195, 137, 39, 158, 86, 2, 147, 77, 28, 41, + 234, 144, 180, 213, 36, 254, 157, 160, 80, 99, 125, 205, 57, 170, 136, 228, 54, 58, 185, 181, 121, 42, 229, + 155, 233, 247, 66, 20, 91, 34, 212, 197, 99, 157, 10, 45, 249, 81, 250, 32, 192, 232, 167, 102, 205, 211, + 168, 240, 103, 117, 91, 82, 184, 28, 85, 221, 91, 167, 233, 46, 254, 95, 182, 186, 219, 28, 102, 154, 227, + 65, 35, 63, 59, 133, 173, 145, 133, 192, 208, 126, 248, 179, 120, 137, 177, 241, 57, 0, 154, 142, 48, 73, + 69, 166, 192, 96, 175, 162, 14, 253, 31, 67, 71, 150, 49, 24, 85, 93, 156, 20, 243, 141, 127, 185, 56, + 71, 60, 137, 29, 51, 87, 39, 138, 130, 32, 229, 70, 180, 35, 252, 134, 167, 121, 33, 246, 56, 143, 42, + 245, 44, 77, 235, 181, 13, 15, 81, 212, 64, 75, 109, 25, 84, 134, 38, 31, 52, 168, 205, 52, 152, 157, + 231, 198, 161, 117, 102, 15, 247, 59, 213, 48, 57, 29, 170, 58, 211, 174, 40, 54, 239, 188, 1, 240, 174, + 197, 70, 149, 134, 94, 48, 49, 251, 144, 51, 153, 185, 169, 222, 68, 237, 130, 187, 13, 74, 224, 43, 17, + 124, 133, 125, 87, 192, 247, 186, 158, 221, 54, 223, 233, 245, 249, 169, 204, 100, 206, 17, 127, 167, 139, 140, + 82, 248, 223, 92, 193, 158, 120, 240, 89, 195, 139, 218, 248, 248, 228, 2, 21, 184, 193, 120, 233, 42, 37, + 209, 20, 81, 131, 72, 196, 124, 21, 216, 160, 168, 250, 208, 109, 225, 126, 148, 204, 106, 101, 13, 235, 239, + 156, 211, 4, 47, 216, 254, 113, 157, 186, 126, 253, 5, 230, 4, 67, 223, 76, 193, 198, 164, 5, 113, 20, + 197, 160, 29, 119, 72, 15, 221, 150, 159, 179, 113, 65, 121, 46, 156, 230, 111, 230, 187, 2, 246, 77, 240, + 10, 146, 68, 199, 198, 38, 120, 201, 4, 27, 103, 236, 116, 13, 224, 57, 126, 138, 232, 206, 227, 131, 251, + 34, 229, 4, 253, 96, 79, 10, 72, 73, 50, 20, 245, 69, 231, 134, 7, 48, 207, 41, 138, 241, 158, 80, + 160, 55, 140, 3, 201, 105, 12, 182, 49, 154, 91, 5, 22, 233, 92, 231, 40, 47, 215, 16, 197, 82, 175, + 105, 76, 229, 30, 9, 88, 29, 203, 193, 199, 129, 14, 239, 168, 129, 192, 158, 100, 248, 88, 64, 139, 177, + 123, 30, 132, 170, 32, 165, 55, 215, 127, 246, 204, 110, 177, 37, 131, 221, 68, 208, 217, 31, 248, 14, 227, + 97, 123, 100, 196, 119, 41, 206, 110, 224, 192, 165, 235, 142, 116, 241, 98, 222, 69, 246, 89, 108, 113, 229, + 35, 69, 70, 21, 129, 130, 30, 36, 27, 143, 247, 243, 63, 185, 206, 229, 204, 171, 201, 45, 60, 93, 227, + 113, 243, 113, 226, 165, 144, 231, 81, 137, 46, 3, 61, 22, 129, 12, 63, 63, 253, 130, 235, 29, 161, 75, + 178, 214, 152, 203, 80, 181, 193, 146, 141, 80, 98, 130, 194, 93, 118, 134, 207, 69, 146, 74, 248, 46, 125, + 39, 253, 253, 64, 30, 172, 73, 199, 1, 228, 64, 221, 111, 166, 64, 184, 121, 248, 72, 12, 118, 237, 122, + 126, 218, 20, 246, 213, 246, 51, 59, 228, 77, 171, 156, 237, 247, 238, 60, 27, 162, 153, 219, 15, 253, 209, + 48, 179, 202, 191, 62, 100, 219, 193, 243, 246, 178, 43, 99, 30, 238, 122, 134, 153, 118, 67, 238, 243, 112, + 28, 241, 194, 249, 147, 235, 24, 182, 127, 2, 142, 70, 181, 247, 162, 32, 158, 53, 214, 251, 197, 85, 70, + 88, 54, 128, 90, 159, 13, 135, 244, 10, 30, 31, 165, 27, 70, 105, 104, 53, 183, 35, 243, 122, 152, 216, + 98, 141, 196, 169, 222, 223, 143, 245, 210, 212, 35, 171, 140, 66, 198, 20, 245, 70, 140, 190, 106, 7, 183, + 199, 38, 46, 85, 52, 23, 1, 199, 239, 241, 202, 96, 140, 236, 154, 182, 50, 82, 211, 211, 104, 217, 217, + 252, 94, 204, 239, 98, 64, 210, 33, 201, 226, 208, 151, 112, 46, 90, 212, 22, 53, 212, 221, 34, 66, 177, + 204, 205, 99, 8, 79, 134, 141, 236, 157, 183, 192, 123, 212, 234, 39, 208, 105, 185, 215, 235, 34, 95, 158, + 54, 94, 46, 42, 172, 47, 34, 233, 238, 195, 45, 6, 166, 27, 11, 22, 243, 126, 9, 239, 62, 238, 84, + 181, 224, 223, 232, 227, 60, 46, 242, 67, 92, 68, 205, 62, 210, 199, 29, 25, 205, 190, 30, 57, 112, 112, + 183, 114, 223, 105, 213, 118, 113, 86, 152, 121, 22, 91, 167, 37, 135, 221, 124, 192, 73, 45, 213, 8, 37, + 51, 152, 18, 17, 114, 73, 63, 78, 65, 8, 136, 151, 20, 33, 181, 61, 223, 9, 99, 138, 166, 114, 150, + 30, 99, 143, 155, 198, 203, 142, 218, 152, 238, 15, 57, 164, 25, 179, 61, 172, 16, 243, 27, 17, 139, 31, + 9, 237, 128, 224, 247, 112, 165, 178, 203, 102, 129, 31, 38, 62, 242, 121, 166, 241, 241, 172, 26, 222, 91, + 101, 15, 11, 115, 121, 20, 34, 38, 24, 206, 238, 215, 104, 221, 214, 81, 84, 96, 26, 225, 20, 152, 149, + 50, 251, 152, 62, 69, 94, 18, 174, 220, 172, 25, 206, 162, 201, 55, 150, 108, 123, 160, 81, 167, 168, 46, + 118, 243, 12, 15, 218, 125, 152, 216, 79, 175, 79, 65, 114, 244, 175, 127, 163, 90, 202, 68, 127, 80, 135, + 206, 9, 237, 117, 228, 244, 163, 26, 211, 25, 123, 107, 96, 61, 74, 132, 202, 180, 55, 73, 196, 65, 252, + 62, 60, 99, 218, 17, 223, 118, 132, 74, 75, 88, 139, 48, 125, 60, 14, 10, 45, 24, 95, 129, 159, 2, + 90, 174, 204, 18, 184, 240, 56, 210, 34, 93, 191, 143, 165, 38, 139, 23, 36, 49, 140, 181, 252, 148, 245, + 137, 240, 153, 31, 62, 111, 22, 14, 156, 200, 97, 175, 157, 253, 104, 178, 161, 184, 192, 97, 73, 11, 38, + 39, 197, 162, 144, 107, 164, 27, 253, 194, 42, 149, 138, 40, 194, 166, 138, 28, 100, 16, 97, 229, 112, 70, + 163, 229, 69, 164, 33, 72, 49, 232, 172, 207, 80, 23, 150, 241, 25, 216, 249, 120, 70, 236, 203, 81, 29, + 173, 31, 160, 34, 234, 133, 144, 187, 36, 160, 100, 56, 179, 51, 25, 128, 162, 63, 129, 184, 172, 224, 113, + 209, 223, 158, 190, 123, 123, 4, 137, 127, 164, 184, 26, 17, 106, 41, 29, 55, 232, 240, 88, 197, 89, 13, + 75, 254, 163, 201, 61, 135, 179, 184, 69, 41, 70, 136, 104, 211, 82, 123, 70, 188, 51, 247, 246, 5, 163, + 125, 107, 81, 125, 150, 156, 7, 22, 48, 156, 108, 5, 252, 181, 48, 48, 230, 253, 62, 88, 151, 228, 94, + 91, 178, 159, 35, 30, 30, 206, 218, 92, 182, 14, 88, 97, 40, 45, 57, 234, 154, 38, 62, 193, 7, 244, + 111, 43, 140, 96, 210, 130, 189, 189, 42, 35, 92, 57, 202, 236, 67, 254, 243, 189, 129, 135, 129, 171, 124, + 206, 41, 178, 62, 101, 166, 215, 146, 15, 44, 138, 145, 20, 170, 200, 240, 72, 193, 71, 154, 249, 68, 117, + 178, 66, 206, 113, 58, 124, 154, 172, 140, 32, 56, 75, 81, 235, 141, 252, 107, 124, 143, 148, 100, 61, 12, + 79, 133, 129, 77, 0, 142, 187, 251, 16, 57, 251, 105, 141, 91, 77, 89, 132, 141, 114, 198, 168, 17, 244, + 194, 60, 36, 93, 107, 146, 177, 122, 77, 15, 100, 184, 200, 127, 45, 209, 142, 121, 65, 211, 126, 179, 161, + 51, 105, 28, 126, 14, 186, 54, 131, 107, 80, 199, 232, 215, 178, 36, 253, 42, 36, 60, 106, 140, 114, 126, + 198, 248, 147, 44, 5, 78, 21, 34, 241, 61, 223, 92, 31, 249, 239, 242, 84, 80, 26, 9, 80, 36, 81, + 3, 77, 10, 233, 35, 65, 79, 125, 81, 136, 35, 10, 248, 165, 65, 91, 23, 76, 168, 206, 179, 35, 186, + 179, 61, 128, 46, 8, 158, 38, 152, 208, 239, 57, 64, 194, 155, 187, 12, 96, 78, 177, 232, 75, 21, 98, + 115, 69, 191, 240, 82, 9, 185, 129, 227, 115, 132, 141, 169, 254, 116, 152, 38, 64, 175, 148, 219, 210, 61, + 248, 109, 167, 13, 148, 159, 78, 16, 59, 102, 29, 8, 220, 113, 161, 204, 150, 4, 25, 53, 58, 22, 158, + 90, 207, 155, 240, 61, 109, 101, 213, 213, 168, 38, 137, 91, 125, 68, 133, 103, 102, 176, 95, 60, 142, 121, + 159, 177, 148, 2, 65, 249, 36, 215, 171, 89, 142, 238, 125, 78, 201, 1, 175, 50, 120, 129, 177, 216, 76, + 201, 234, 41, 159, 149, 53, 206, 125, 4, 97, 13, 33, 181, 232, 235, 234, 220, 14, 165, 76, 15, 125, 140, + 246, 6, 104, 191, 155, 58, 247, 90, 106, 57, 247, 30, 60, 150, 248, 124, 122, 147, 174, 155, 170, 155, 192, + 50, 108, 43, 189, 209, 138, 188, 171, 223, 32, 17, 134, 231, 205, 205, 136, 199, 225, 220, 183, 6, 161, 53, + 107, 141, 58, 35, 1, 166, 92, 25, 41, 88, 16, 182, 59, 0, 99, 73, 100, 238, 225, 32, 38, 113, 171, + 239, 144, 236, 229, 176, 139, 46, 165, 235, 5, 192, 78, 202, 63, 131, 223, 101, 18, 128, 22, 103, 126, 148, + 114, 251, 86, 166, 194, 150, 14, 2, 68, 67, 177, 190, 203, 10, 75, 52, 151, 4, 36, 77, 72, 229, 87, + 137, 203, 51, 54, 34, 72, 164, 121, 30, 146, 127, 63, 8, 100, 119, 33, 71, 91, 81, 80, 16, 17, 17, + 106, 73, 195, 162, 110, 184, 65, 37, 180, 208, 171, 195, 28, 204, 60, 199, 21, 10, 241, 156, 172, 179, 71, + 229, 180, 235, 238, 143, 150, 93, 164, 219, 230, 84, 46, 190, 123, 251, 237, 154, 88, 196, 147, 132, 74, 113, + 239, 40, 185, 184, 154, 76, 31, 48, 234, 57, 69, 147, 158, 173, 54, 49, 26, 168, 209, 205, 109, 112, 184, + 104, 30, 115, 28, 43, 87, 2, 218, 171, 103, 87, 113, 211, 98, 229, 55, 118, 169, 139, 82, 215, 249, 67, + 222, 122, 121, 110, 196, 231, 201, 125, 239, 59, 139, 79, 157, 162, 93, 121, 154, 92, 28, 232, 200, 35, 44, + 38, 114, 44, 126, 72, 68, 7, 79, 44, 91, 215, 6, 139, 16, 109, 240, 13, 227, 167, 241, 49, 75, 58, + 120, 64, 48, 38, 159, 128, 136, 55, 223, 187, 233, 75, 46, 167, 101, 213, 132, 23, 245, 41, 160, 146, 223, + 44, 253, 28, 214, 82, 220, 205, 118, 95, 113, 206, 0, 111, 6, 3, 74, 238, 217, 37, 164, 76, 246, 45, + 0, 229, 233, 70, 196, 60, 96, 46, 196, 7, 251, 156, 77, 124, 6, 80, 157, 158, 64, 6, 223, 205, 254, + 124, 37, 0, 247, 150, 212, 205, 8, 109, 99, 164, 146, 238, 76, 94, 131, 156, 195, 93, 247, 101, 125, 171, + 145, 87, 130, 213, 227, 6, 207, 151, 84, 9, 60, 15, 141, 55, 30, 14, 173, 26, 115, 247, 169, 239, 3, + 231, 17, 15, 217, 124, 139, 213, 139, 8, 39, 210, 203, 231, 126, 175, 46, 182, 54, 241, 244, 98, 183, 77, + 194, 138, 72, 221, 201, 77, 219, 115, 10, 252, 62, 152, 20, 16, 101, 26, 82, 86, 87, 86, 58, 104, 7, + 116, 102, 6, 216, 87, 204, 132, 192, 199, 86, 11, 111, 77, 86, 130, 89, 15, 31, 179, 107, 44, 100, 180, + 207, 30, 16, 214, 39, 130, 154, 201, 197, 220, 10, 177, 182, 233, 162, 146, 189, 108, 109, 35, 113, 96, 105, + 101, 32, 24, 18, 203, 101, 130, 40, 100, 38, 128, 180, 56, 114, 70, 30, 15, 160, 11, 33, 164, 192, 1, + 157, 234, 104, 202, 202, 172, 69, 221, 252, 203, 75, 95, 5, 99, 230, 11, 220, 44, 113, 227, 224, 234, 69, + 168, 8, 208, 93, 229, 254, 216, 154, 186, 40, 79, 94, 224, 233, 55, 78, 108, 217, 154, 4, 203, 164, 204, + 73, 159, 56, 185, 159, 128, 28, 115, 97, 244, 32, 131, 76, 19, 32, 209, 163, 148, 196, 8, 241, 46, 197, + 92, 70, 103, 212, 233, 31, 79, 44, 160, 96, 53, 97, 208, 120, 81, 251, 253, 90, 47, 102, 103, 174, 91, + 113, 130, 56, 211, 172, 220, 81, 126, 111, 84, 75, 8, 208, 134, 144, 165, 203, 151, 134, 238, 137, 10, 177, + 107, 121, 245, 60, 0, 33, 234, 190, 171, 56, 25, 22, 124, 148, 32, 210, 144, 210, 142, 31, 119, 253, 109, + 129, 103, 17, 103, 194, 140, 36, 123, 47, 180, 220, 5, 64, 232, 251, 5, 15, 87, 45, 87, 40, 45, 161, + 71, 149, 30, 158, 11, 204, 167, 214, 152, 120, 32, 170, 122, 53, 135, 217, 152, 118, 137, 9, 55, 208, 113, + 205, 238, 9, 23, 74, 141, 225, 180, 213, 184, 222, 218, 171, 103, 0, 195, 25, 65, 161, 162, 25, 87, 78, + 202, 12, 37, 13, 58, 172, 94, 132, 77, 134, 228, 117, 45, 183, 45, 81, 80, 151, 58, 173, 225, 42, 6, + 31, 64, 4, 194, 131, 175, 129, 204, 102, 105, 115, 106, 239, 132, 225, 22, 213, 163, 137, 161, 193, 76, 126, + 52, 94, 190, 239, 34, 246, 161, 43, 163, 250, 214, 124, 235, 209, 191, 147, 53, 136, 181, 47, 134, 31, 181, + 32, 24, 211, 158, 83, 254, 122, 64, 252, 245, 236, 0, 211, 73, 191, 45, 175, 239, 137, 43, 62, 228, 108, + 204, 172, 2, 164, 150, 162, 170, 202, 39, 10, 101, 19, 185, 189, 30, 181, 3, 238, 187, 164, 194, 106, 87, + 169, 121, 146, 40, 29, 239, 58, 246, 238, 231, 99, 52, 249, 92, 54, 193, 46, 13, 250, 82, 126, 76, 252, + 3, 185, 69, 74, 42, 150, 74, 36, 51, 88, 113, 136, 231, 101, 106, 234, 163, 43, 10, 172, 26, 222, 59, + 27, 212, 74, 92, 148, 14, 40, 36, 37, 249, 221, 111, 148, 79, 44, 168, 196, 174, 248, 80, 151, 153, 73, + 200, 193, 210, 245, 97, 19, 203, 149, 58, 216, 191, 164, 167, 106, 64, 82, 216, 151, 153, 194, 143, 187, 133, + 117, 252, 33, 23, 203, 28, 167, 30, 214, 167, 204, 18, 79, 112, 216, 231, 189, 219, 118, 194, 115, 126, 51, + 177, 55, 212, 122, 52, 215, 149, 9, 61, 56, 186, 53, 249, 6, 199, 43, 40, 91, 44, 94, 191, 245, 14, + 215, 197, 46, 179, 226, 10, 235, 230, 99, 251, 164, 222, 25, 2, 97, 208, 115, 77, 174, 251, 199, 10, 136, + 72, 137, 48, 113, 3, 96, 247, 57, 0, 211, 168, 254, 201, 24, 94, 226, 205, 44, 71, 177, 227, 248, 242, + 90, 81, 242, 157, 77, 4, 46, 127, 249, 40, 47, 44, 115, 85, 120, 147, 135, 160, 42, 192, 25, 17, 166, + 50, 188, 205, 215, 16, 200, 132, 36, 113, 157, 30, 147, 72, 41, 213, 229, 19, 250, 218, 105, 227, 218, 241, + 35, 95, 246, 41, 126, 96, 60, 62, 95, 63, 230, 21, 101, 230, 66, 46, 117, 173, 114, 195, 148, 23, 122, + 116, 55, 102, 14, 162, 203, 202, 250, 118, 169, 237, 169, 95, 207, 134, 235, 97, 194, 185, 192, 153, 189, 247, + 90, 33, 231, 119, 3, 149, 32, 62, 72, 71, 80, 189, 100, 29, 226, 87, 109, 54, 84, 155, 19, 133, 13, + 144, 62, 237, 236, 30, 78, 164, 25, 24, 89, 63, 174, 207, 198, 58, 208, 111, 93, 192, 109, 252, 201, 145, + 155, 123, 132, 172, 123, 185, 46, 100, 183, 63, 205, 59, 183, 224, 27, 216, 9, 111, 186, 90, 24, 70, 234, + 47, 181, 201, 189, 176, 87, 71, 227, 52, 55, 101, 55, 25, 240, 120, 9, 207, 73, 172, 49, 201, 251, 194, + 249, 197, 232, 95, 224, 240, 245, 216, 160, 67, 157, 27, 104, 251, 182, 132, 48, 6, 150, 228, 219, 162, 78, + 57, 12, 126, 161, 183, 77, 210, 76, 22, 153, 44, 143, 252, 6, 119, 179, 24, 196, 70, 195, 247, 125, 202, + 171, 100, 100, 155, 117, 19, 15, 132, 64, 62, 176, 38, 225, 63, 128, 244, 216, 76, 229, 243, 44, 100, 247, + 227, 44, 175, 47, 251, 3, 240, 78, 209, 126, 56, 152, 94, 10, 149, 137, 99, 97, 35, 152, 244, 18, 119, + 227, 93, 198, 119, 203, 224, 30, 157, 24, 178, 6, 55, 108, 3, 105, 30, 95, 80, 89, 136, 125, 68, 233, + 215, 159, 68, 73, 3, 62, 46, 200, 130, 24, 70, 10, 254, 218, 141, 132, 8, 4, 94, 232, 226, 223, 190, + 164, 6, 254, 94, 110, 72, 221, 37, 135, 172, 67, 108, 194, 105, 146, 81, 47, 166, 11, 33, 190, 131, 218, + 69, 242, 178, 5, 55, 195, 1, 38, 85, 61, 110, 239, 93, 227, 178, 184, 63, 168, 192, 53, 78, 93, 159, + 174, 222, 123, 111, 156, 101, 150, 172, 225, 86, 60, 79, 234, 78, 144, 122, 74, 195, 208, 46, 234, 189, 130, + 51, 37, 242, 104, 153, 193, 243, 71, 172, 14, 93, 241, 243, 117, 66, 199, 56, 223, 25, 125, 44, 112, 63, + 50, 142, 241, 62, 206, 35, 198, 31, 73, 44, 75, 231, 108, 124, 80, 94, 70, 157, 225, 203, 31, 75, 112, + 82, 228, 104, 93, 195, 131, 221, 168, 10, 38, 240, 173, 64, 186, 195, 217, 12, 137, 6, 233, 242, 104, 190, + 114, 243, 52, 17, 40, 234, 157, 201, 94, 147, 46, 237, 58, 64, 52, 226, 228, 228, 203, 161, 229, 59, 225, + 200, 120, 200, 195, 209, 77, 122, 143, 112, 133, 51, 210, 95, 159, 51, 248, 128, 150, 165, 187, 40, 198, 188, + 188, 160, 100, 137, 140, 80, 189, 10, 4, 59, 78, 218, 242, 107, 230, 69, 247, 167, 222, 57, 58, 120, 41, + 25, 33, 141, 188, 184, 7, 236, 75, 221, 86, 108, 246, 146, 54, 15, 10, 168, 183, 232, 199, 129, 167, 93, + 96, 67, 168, 232, 162, 145, 157, 165, 142, 100, 21, 83, 50, 225, 100, 180, 121, 52, 254, 61, 180, 136, 87, + 161, 140, 138, 191, 134, 140, 105, 23, 105, 132, 122, 176, 213, 12, 40, 186, 34, 182, 242, 175, 45, 54, 74, + 230, 218, 64, 136, 165, 249, 84, 191, 143, 172, 174, 138, 194, 96, 20, 146, 206, 232, 120, 137, 93, 112, 18, + 28, 71, 106, 30, 174, 205, 220, 96, 144, 120, 17, 169, 245, 82, 20, 35, 155, 243, 65, 194, 168, 59, 136, + 32, 10, 171, 207, 80, 117, 221, 90, 87, 197, 242, 243, 5, 14, 171, 93, 14, 232, 166, 172, 122, 57, 38, + 140, 243, 199, 26, 41, 123, 53, 31, 51, 233, 73, 246, 59, 33, 108, 197, 178, 229, 252, 92, 187, 148, 96, + 2, 194, 78, 66, 245, 203, 204, 95, 252, 244, 208, 81, 189, 13, 189, 35, 73, 2, 77, 240, 173, 174, 73, + 197, 204, 162, 148, 62, 35, 2, 66, 189, 188, 142, 117, 59, 203, 37, 160, 30, 238, 225, 220, 159, 12, 249, + 117, 95, 94, 52, 191, 177, 32, 95, 160, 201, 51, 164, 136, 252, 73, 202, 8, 232, 235, 135, 101, 51, 35, + 230, 61, 44, 69, 205, 181, 32, 97, 243, 242, 118, 68, 231, 118, 40, 210, 241, 163, 201, 179, 81, 10, 213, + 229, 78, 119, 221, 74, 33, 178, 214, 80, 221, 121, 150, 126, 157, 19, 165, 6, 53, 17, 134, 209, 166, 85, + 241, 185, 94, 126, 52, 200, 110, 21, 52, 58, 128, 113, 186, 9, 8, 198, 76, 215, 55, 33, 213, 170, 163, + 247, 226, 81, 71, 164, 173, 124, 101, 163, 88, 222, 13, 252, 141, 69, 135, 183, 100, 223, 71, 96, 131, 145, + 61, 150, 47, 42, 141, 224, 175, 214, 163, 70, 222, 20, 233, 193, 186, 242, 106, 80, 108, 226, 92, 131, 52, + 133, 206, 149, 86, 20, 218, 43, 196, 30, 168, 141, 245, 225, 129, 62, 37, 207, 164, 254, 93, 17, 16, 214, + 12, 110, 186, 238, 186, 10, 68, 104, 159, 87, 2, 2, 131, 210, 107, 89, 137, 19, 36, 7, 114, 139, 98, + 221, 90, 133, 236, 115, 233, 215, 246, 110, 180, 164, 114, 224, 166, 95, 77, 219, 195, 50, 35, 122, 181, 119, + 107, 150, 45, 98, 126, 186, 70, 83, 145, 144, 171, 251, 251, 188, 183, 151, 115, 59, 133, 210, 141, 16, 164, + 198, 78, 30, 110, 154, 139, 65, 253, 164, 21, 226, 241, 83, 228, 174, 141, 237, 199, 111, 5, 220, 131, 55, + 222, 145, 231, 183, 197, 150, 181, 69, 156, 241, 124, 244, 11, 200, 253, 105, 196, 108, 135, 20, 237, 182, 223, + 35, 128, 14, 241, 42, 96, 152, 181, 89, 93, 141, 242, 167, 220, 225, 44, 58, 120, 30, 189, 91, 245, 244, + 94, 94, 189, 223, 92, 149, 17, 6, 111, 10, 214, 210, 180, 23, 142, 143, 8, 159, 144, 50, 100, 78, 2, + 11, 128, 173, 160, 239, 30, 92, 76, 165, 81, 180, 54, 119, 83, 220, 204, 47, 122, 253, 249, 235, 99, 146, + 127, 237, 185, 20, 30, 3, 128, 146, 126, 52, 52, 31, 251, 103, 247, 185, 42, 243, 208, 134, 191, 119, 226, + 141, 139, 5, 80, 107, 50, 131, 247, 8, 157, 251, 151, 96, 246, 111, 239, 158, 211, 28, 97, 76, 125, 214, + 161, 10, 2, 171, 74, 140, 188, 232, 189, 10, 77, 113, 45, 253, 94, 206, 222, 153, 141, 2, 206, 180, 104, + 166, 207, 0, 192, 149, 8, 46, 104, 174, 171, 39, 152, 176, 192, 132, 6, 2, 44, 70, 241, 32, 176, 172, + 49, 72, 79, 171, 62, 1, 107, 77, 42, 217, 53, 96, 145, 193, 61, 207, 15, 242, 154, 131, 252, 197, 128, + 64, 157, 254, 250, 160, 224, 157, 197, 18, 224, 67, 246, 2, 23, 35, 90, 186, 182, 148, 177, 42, 118, 35, + 146, 104, 27, 233, 207, 140, 224, 103, 180, 166, 49, 226, 75, 87, 84, 165, 160, 122, 116, 182, 134, 49, 250, + 41, 177, 165, 146, 50, 44, 173, 229, 49, 195, 30, 234, 183, 118, 236, 63, 201, 71, 154, 86, 87, 15, 34, + 22, 192, 87, 90, 39, 120, 166, 59, 176, 192, 245, 203, 178, 184, 153, 245, 90, 91, 27, 215, 31, 48, 210, + 125, 252, 30, 164, 229, 137, 43, 214, 244, 226, 214, 77, 157, 217, 239, 237, 205, 146, 37, 11, 68, 179, 93, + 1, 199, 22, 185, 177, 128, 110, 101, 96, 5, 4, 245, 71, 70, 164, 8, 189, 136, 43, 201, 156, 174, 153, + 22, 254, 250, 10, 186, 37, 148, 16, 209, 145, 95, 246, 43, 154, 48, 131, 149, 2, 158, 149, 164, 79, 245, + 64, 180, 78, 99, 113, 20, 10, 236, 249, 99, 69, 93, 78, 194, 217, 87, 156, 20, 251, 126, 77, 139, 28, + 80, 175, 42, 16, 200, 17, 197, 174, 233, 218, 201, 245, 31, 55, 85, 213, 136, 62, 163, 223, 153, 224, 59, + 97, 59, 25, 85, 153, 128, 251, 118, 39, 227, 31, 28, 214, 228, 109, 253, 139, 163, 224, 119, 223, 253, 152, + 214, 65, 73, 88, 15, 16, 73, 82, 210, 141, 147, 167, 236, 88, 211, 26, 210, 16, 218, 84, 134, 35, 208, + 22, 195, 32, 160, 112, 27, 189, 83, 166, 201, 201, 205, 114, 49, 18, 163, 5, 40, 12, 198, 151, 245, 33, + 161, 170, 10, 206, 12, 48, 248, 244, 96, 47, 90, 37, 71, 93, 76, 47, 188, 101, 243, 62, 212, 66, 8, + 160, 30, 176, 254, 222, 118, 69, 236, 51, 146, 51, 148, 12, 48, 201, 19, 84, 206, 37, 165, 252, 56, 23, + 122, 124, 223, 150, 107, 192, 67, 148, 71, 29, 125, 185, 213, 151, 214, 127, 140, 225, 140, 92, 35, 152, 220, + 93, 1, 172, 112, 205, 184, 110, 1, 114, 17, 20, 36, 240, 120, 75, 228, 142, 90, 31, 147, 195, 186, 42, + 95, 74, 188, 26, 3, 196, 53, 217, 126, 18, 100, 96, 68, 66, 145, 249, 48, 117, 248, 21, 130, 8, 12, + 9, 1, 136, 206, 140, 232, 31, 226, 163, 114, 239, 7, 24, 204, 188, 32, 41, 157, 33, 99, 240, 249, 11, + 23, 107, 175, 224, 154, 5, 182, 50, 74, 185, 34, 241, 196, 220, 168, 38, 40, 94, 107, 63, 125, 218, 244, + 43, 176, 5, 93, 144, 82, 64, 83, 59, 160, 104, 6, 179, 38, 106, 163, 164, 188, 102, 17, 183, 199, 152, + 28, 198, 147, 113, 80, 188, 242, 211, 146, 18, 238, 154, 43, 172, 86, 170, 7, 12, 66, 60, 10, 170, 138, + 95, 18, 119, 122, 251, 58, 119, 221, 46, 6, 202, 24, 26, 162, 97, 0, 155, 9, 163, 232, 14, 254, 69, + 241, 156, 101, 19, 153, 87, 156, 56, 30, 43, 21, 158, 207, 77, 208, 220, 23, 133, 11, 229, 205, 59, 80, + 149, 88, 222, 191, 86, 40, 160, 69, 203, 20, 240, 229, 125, 173, 133, 167, 48, 173, 128, 101, 23, 227, 40, + 240, 74, 39, 164, 238, 10, 112, 220, 163, 128, 12, 211, 5, 6, 44, 14, 62, 231, 70, 41, 52, 242, 88, + 40, 192, 63, 27, 109, 70, 124, 240, 237, 192, 223, 202, 105, 77, 189, 43, 70, 109, 171, 221, 64, 196, 218, + 47, 204, 214, 217, 162, 172, 230, 44, 240, 216, 253, 41, 117, 138, 161, 156, 205, 32, 229, 86, 157, 86, 121, + 145, 91, 40, 41, 47, 65, 101, 199, 111, 225, 82, 238, 150, 3, 11, 65, 137, 203, 122, 156, 174, 125, 73, + 20, 104, 164, 157, 227, 44, 69, 231, 72, 146, 175, 45, 198, 167, 105, 228, 240, 236, 222, 250, 151, 211, 28, + 48, 171, 88, 16, 55, 5, 117, 251, 58, 66, 0, 90, 173, 243, 119, 217, 22, 101, 117, 75, 238, 10, 16, + 78, 209, 84, 50, 1, 194, 54, 177, 187, 226, 182, 20, 53, 126, 233, 167, 14, 219, 88, 28, 99, 99, 92, + 77, 27, 196, 240, 115, 180, 216, 141, 25, 201, 119, 213, 27, 163, 14, 159, 163, 47, 183, 77, 216, 99, 110, + 118, 244, 15, 159, 119, 99, 114, 197, 151, 165, 66, 250, 216, 149, 184, 42, 17, 167, 195, 166, 143, 60, 87, + 95, 81, 250, 47, 189, 149, 144, 8, 40, 233, 252, 129, 55, 145, 132, 193, 168, 253, 77, 21, 69, 131, 33, + 143, 179, 178, 100, 69, 99, 177, 161, 189, 221, 191, 216, 23, 16, 109, 110, 46, 107, 253, 90, 88, 216, 90, + 206, 48, 234, 146, 60, 181, 96, 49, 149, 105, 201, 5, 174, 92, 27, 166, 115, 16, 135, 153, 117, 133, 181, + 170, 106, 179, 106, 65, 226, 79, 253, 237, 110, 247, 215, 26, 6, 131, 47, 74, 193, 231, 115, 252, 165, 249, + 92, 160, 40, 161, 45, 241, 59, 224, 233, 154, 211, 77, 211, 96, 15, 135, 193, 238, 52, 196, 76, 139, 131, + 136, 254, 70, 211, 195, 228, 188, 12, 174, 110, 56, 220, 178, 140, 180, 152, 57, 185, 75, 165, 158, 155, 37, + 50, 27, 173, 34, 119, 209, 183, 252, 142, 34, 135, 148, 95, 85, 127, 75, 79, 76, 172, 247, 160, 196, 157, + 103, 163, 69, 47, 169, 159, 229, 58, 92, 240, 9, 43, 134, 98, 137, 121, 100, 40, 178, 57, 190, 140, 199, + 39, 104, 218, 18, 40, 117, 193, 74, 206, 101, 66, 224, 33, 138, 248, 4, 129, 49, 19, 113, 20, 166, 122, + 158, 58, 50, 17, 136, 175, 217, 51, 35, 66, 196, 1, 113, 159, 157, 33, 195, 207, 206, 182, 143, 29, 193, + 177, 243, 251, 109, 104, 119, 19, 47, 148, 219, 186, 51, 98, 210, 164, 92, 131, 194, 38, 52, 117, 12, 147, + 164, 40, 93, 247, 77, 147, 68, 245, 151, 46, 20, 55, 136, 72, 12, 182, 38, 146, 102, 103, 40, 41, 76, + 133, 104, 78, 96, 71, 131, 113, 89, 217, 90, 46, 52, 29, 195, 199, 5, 196, 233, 16, 58, 249, 226, 83, + 119, 97, 140, 101, 151, 177, 137, 158, 249, 56, 84, 81, 24, 101, 17, 4, 214, 148, 12, 185, 54, 144, 121, + 44, 98, 240, 144, 219, 192, 7, 9, 138, 31, 148, 48, 77, 151, 253, 213, 200, 146, 207, 230, 108, 158, 245, + 72, 61, 23, 243, 217, 2, 217, 164, 136, 96, 144, 116, 91, 43, 23, 163, 0, 248, 119, 25, 68, 162, 167, + 205, 39, 37, 124, 171, 71, 109, 43, 134, 224, 97, 244, 82, 69, 38, 215, 134, 111, 56, 145, 22, 236, 146, + 23, 182, 172, 143, 4, 61, 85, 199, 236, 174, 109, 79, 215, 149, 108, 117, 250, 206, 79, 238, 100, 218, 21, + 207, 5, 1, 212, 233, 118, 222, 100, 77, 171, 61, 80, 164, 228, 177, 69, 52, 238, 249, 227, 19, 217, 174, + 156, 185, 55, 109, 203, 221, 148, 197, 186, 89, 207, 120, 134, 187, 180, 78, 173, 126, 22, 46, 138, 167, 228, + 211, 191, 1, 85, 63, 156, 61, 48, 198, 42, 158, 223, 172, 9, 240, 176, 194, 43, 26, 88, 152, 228, 241, + 12, 126, 237, 235, 224, 120, 144, 113, 135, 44, 142, 77, 129, 191, 125, 137, 44, 59, 39, 78, 168, 82, 107, + 6, 230, 50, 69, 43, 171, 102, 135, 220, 50, 39, 253, 90, 70, 85, 242, 243, 254, 217, 222, 51, 87, 70, + 65, 46, 165, 32, 233, 249, 251, 130, 217, 93, 94, 228, 175, 185, 253, 192, 177, 220, 85, 186, 79, 185, 55, + 42, 253, 70, 129, 103, 40, 174, 89, 56, 209, 42, 169, 21, 45, 126, 86, 123, 163, 125, 151, 179, 186, 22, + 140, 152, 30, 125, 132, 17, 13, 22, 32, 119, 76, 174, 137, 186, 168, 21, 57, 168, 91, 95, 52, 78, 195, + 26, 109, 13, 145, 107, 44, 143, 133, 180, 241, 187, 237, 139, 136, 20, 94, 129, 211, 145, 164, 165, 239, 250, + 195, 195, 208, 139, 169, 110, 73, 32, 205, 151, 124, 101, 186, 222, 88, 248, 231, 64, 61, 232, 225, 5, 99, + 61, 79, 160, 161, 170, 68, 244, 7, 68, 115, 230, 115, 48, 76, 18, 167, 106, 195, 81, 90, 86, 164, 207, + 120, 155, 101, 238, 186, 11, 106, 26, 235, 233, 13, 190, 56, 35, 59, 82, 112, 98, 174, 221, 184, 230, 132, + 131, 173, 123, 50, 45, 190, 69, 129, 220, 6, 99, 15, 63, 246, 178, 31, 107, 14, 28, 30, 19, 138, 239, + 43, 49, 73, 248, 220, 37, 177, 26, 231, 97, 224, 102, 57, 203, 190, 140, 225, 60, 27, 215, 162, 116, 187, + 164, 152, 126, 8, 171, 167, 206, 127, 17, 208, 41, 127, 232, 157, 154, 20, 216, 146, 108, 15, 224, 58, 7, + 77, 10, 20, 223, 182, 144, 159, 44, 108, 85, 91, 124, 71, 240, 21, 18, 13, 157, 172, 147, 206, 237, 89, + 205, 28, 57, 101, 114, 220, 188, 214, 147, 104, 76, 96, 138, 49, 241, 202, 29, 78, 97, 2, 92, 132, 126, + 175, 143, 92, 159, 243, 98, 168, 82, 236, 210, 162, 58, 70, 120, 240, 117, 46, 173, 82, 192, 179, 209, 14, + 217, 58, 227, 65, 185, 208, 103, 110, 138, 118, 235, 128, 100, 201, 15, 222, 123, 206, 204, 33, 162, 136, 168, + 34, 159, 18, 15, 43, 168, 234, 196, 187, 80, 136, 217, 224, 153, 216, 189, 124, 225, 91, 185, 62, 248, 127, + 144, 234, 9, 18, 233, 176, 10, 12, 60, 123, 176, 83, 109, 194, 74, 159, 70, 69, 37, 101, 209, 235, 163, + 185, 125, 166, 25, 182, 114, 26, 3, 173, 73, 95, 7, 2, 165, 75, 127, 29, 61, 181, 66, 124, 62, 23, + 169, 195, 75, 80, 102, 57, 208, 156, 205, 120, 4, 248, 161, 130, 118, 226, 33, 139, 176, 227, 236, 147, 180, + 74, 222, 51, 78, 134, 252, 102, 46, 128, 158, 219, 71, 178, 51, 34, 215, 44, 166, 156, 35, 100, 34, 241, + 162, 2, 14, 155, 7, 173, 220, 58, 39, 69, 242, 184, 21, 227, 236, 174, 57, 36, 53, 133, 20, 198, 184, + 118, 82, 121, 212, 88, 185, 129, 193, 251, 84, 8, 188, 193, 54, 107, 149, 109, 130, 110, 151, 71, 238, 225, + 2, 196, 44, 211, 108, 249, 136, 187, 206, 88, 241, 45, 48, 188, 113, 133, 172, 75, 245, 90, 245, 107, 79, + 12, 112, 108, 96, 188, 61, 233, 75, 55, 175, 253, 210, 28, 145, 125, 244, 77, 116, 130, 84, 208, 190, 221, + 92, 171, 217, 197, 10, 28, 54, 214, 153, 39, 114, 128, 57, 110, 149, 162, 227, 141, 228, 186, 103, 10, 198, + 74, 254, 107, 98, 214, 18, 87, 27, 129, 5, 244, 156, 152, 146, 108, 196, 179, 191, 239, 85, 199, 214, 194, + 164, 216, 6, 150, 238, 92, 154, 232, 164, 61, 155, 118, 174, 113, 189, 63, 135, 125, 24, 229, 119, 200, 7, + 41, 62, 63, 113, 160, 69, 147, 155, 103, 80, 27, 183, 228, 178, 62, 94, 16, 126, 238, 223, 44, 127, 8, + 47, 77, 237, 93, 170, 12, 163, 119, 154, 221, 100, 10, 94, 247, 9, 155, 185, 213, 218, 23, 42, 118, 119, + 159, 177, 11, 10, 111, 215, 170, 80, 51, 167, 127, 108, 87, 194, 207, 133, 168, 233, 4, 166, 175, 57, 78, + 65, 241, 6, 246, 77, 51, 195, 218, 147, 208, 33, 42, 146, 25, 86, 233, 201, 53, 74, 98, 88, 99, 162, + 170, 31, 72, 224, 126, 253, 129, 143, 136, 193, 17, 160, 133, 73, 192, 215, 62, 129, 212, 103, 3, 215, 92, + 98, 162, 130, 0, 182, 132, 14, 100, 172, 111, 66, 2, 218, 84, 253, 221, 53, 181, 86, 140, 79, 44, 8, + 146, 116, 94, 159, 135, 247, 64, 161, 236, 192, 239, 167, 77, 104, 137, 42, 135, 212, 111, 188, 132, 231, 104, + 3, 26, 107, 168, 172, 197, 101, 65, 51, 237, 88, 49, 154, 47, 195, 76, 136, 126, 109, 166, 50, 222, 150, + 102, 76, 63, 5, 253, 21, 137, 177, 77, 62, 96, 236, 182, 236, 119, 110, 166, 143, 58, 82, 218, 35, 68, + 224, 22, 48, 5, 32, 88, 98, 120, 93, 172, 138, 129, 148, 161, 25, 238, 254, 85, 217, 130, 235, 49, 122, + 145, 94, 166, 114, 141, 163, 154, 172, 180, 162, 82, 248, 223, 187, 63, 167, 106, 30, 218, 77, 184, 97, 42, + 120, 38, 141, 101, 100, 177, 176, 29, 147, 250, 84, 227, 241, 240, 220, 139, 217, 13, 110, 131, 133, 239, 88, + 209, 108, 176, 3, 184, 15, 179, 20, 234, 60, 163, 222, 69, 212, 229, 205, 254, 237, 45, 8, 58, 152, 230, + 20, 204, 188, 206, 79, 160, 35, 178, 88, 75, 246, 136, 215, 233, 239, 44, 214, 224, 219, 109, 79, 12, 117, + 170, 2, 234, 9, 212, 187, 94, 57, 81, 35, 155, 29, 106, 117, 171, 71, 156, 30, 212, 49, 169, 42, 33, + 85, 115, 9, 54, 161, 186, 55, 76, 93, 95, 191, 245, 206, 46, 215, 206, 250, 70, 92, 111, 199, 62, 251, + 72, 147, 64, 52, 152, 121, 16, 228, 108, 78, 215, 195, 33, 25, 218, 63, 31, 1, 164, 193, 28, 32, 39, + 171, 87, 40, 136, 234, 85, 170, 125, 16, 45, 33, 84, 90, 151, 21, 135, 204, 108, 203, 17, 67, 249, 29, + 10, 32, 8, 132, 55, 41, 215, 50, 53, 173, 138, 154, 70, 30, 186, 34, 163, 31, 1, 182, 47, 247, 74, + 12, 210, 242, 66, 47, 150, 106, 14, 17, 236, 127, 84, 220, 192, 242, 118, 56, 197, 219, 93, 188, 70, 123, + 252, 8, 63, 162, 63, 146, 111, 230, 242, 8, 141, 213, 192, 83, 25, 81, 174, 127, 209, 31, 155, 194, 90, + 237, 121, 129, 222, 232, 2, 240, 185, 38, 0, 85, 243, 247, 184, 132, 148, 200, 143, 55, 82, 205, 233, 234, + 138, 242, 29, 168, 83, 4, 65, 170, 155, 205, 96, 37, 93, 176, 133, 142, 221, 203, 162, 71, 20, 207, 60, + 71, 104, 220, 16, 197, 251, 74, 177, 35, 190, 219, 226, 22, 28, 100, 159, 128, 94, 244, 172, 101, 38, 17, + 107, 205, 104, 186, 67, 44, 218, 50, 144, 122, 63, 135, 24, 63, 77, 214, 183, 199, 173, 163, 27, 65, 183, + 203, 103, 21, 5, 51, 227, 77, 130, 146, 226, 36, 191, 247, 59, 126, 87, 236, 146, 67, 13, 242, 57, 82, + 59, 159, 236, 202, 212, 195, 211, 119, 171, 63, 121, 37, 44, 96, 36, 88, 211, 232, 67, 202, 113, 42, 25, + 32, 216, 99, 133, 76, 166, 115, 108, 130, 155, 227, 211, 99, 64, 228, 81, 155, 113, 220, 62, 122, 100, 21, + 252, 235, 244, 154, 23, 96, 77, 233, 56, 243, 240, 69, 57, 102, 15, 181, 40, 243, 30, 121, 119, 105, 90, + 210, 231, 247, 197, 7, 164, 8, 117, 28, 231, 186, 111, 57, 163, 46, 216, 103, 36, 79, 19, 49, 40, 119, + 68, 56, 157, 46, 53, 7, 51, 27, 33, 25, 192, 176, 43, 225, 141, 36, 73, 147, 27, 13, 174, 76, 72, + 80, 65, 215, 124, 10, 104, 11, 189, 215, 116, 168, 133, 31, 50, 136, 167, 88, 253, 24, 137, 94, 108, 75, + 227, 99, 2, 203, 202, 182, 244, 96, 163, 63, 42, 147, 91, 162, 15, 252, 144, 32, 110, 4, 128, 34, 185, + 38, 151, 1, 142, 81, 80, 180, 126, 30, 208, 56, 92, 105, 49, 38, 65, 246, 159, 18, 29, 155, 114, 86, + 108, 91, 144, 143, 107, 181, 125, 97, 99, 127, 179, 250, 181, 27, 218, 37, 132, 106, 177, 124, 59, 101, 77, + 127, 8, 189, 117, 67, 108, 221, 73, 170, 197, 183, 188, 175, 151, 116, 24, 242, 93, 147, 44, 208, 100, 168, + 50, 132, 21, 100, 50, 177, 13, 37, 204, 50, 247, 153, 245, 172, 225, 52, 95, 112, 142, 218, 58, 74, 192, + 1, 84, 213, 150, 40, 246, 38, 250, 35, 71, 127, 121, 14, 180, 90, 12, 111, 196, 120, 115, 156, 158, 110, + 218, 177, 223, 0, 237, 180, 37, 233, 246, 73, 166, 154, 166, 99, 218, 73, 178, 20, 194, 135, 84, 116, 157, + 41, 137, 159, 52, 249, 25, 51, 220, 9, 170, 74, 230, 240, 171, 95, 28, 201, 75, 173, 193, 12, 76, 9, + 30, 203, 250, 138, 126, 52, 160, 186, 118, 4, 250, 31, 97, 217, 219, 128, 60, 227, 83, 208, 160, 214, 61, + 90, 12, 228, 76, 64, 105, 176, 242, 176, 13, 239, 174, 28, 98, 128, 35, 246, 51, 191, 97, 56, 182, 196, + 77, 209, 84, 167, 89, 5, 102, 249, 211, 9, 42, 80, 27, 90, 184, 42, 223, 120, 214, 63, 216, 250, 196, + 144, 140, 166, 96, 53, 50, 208, 195, 52, 99, 157, 139, 200, 240, 196, 83, 119, 69, 142, 116, 116, 168, 120, + 168, 70, 124, 230, 39, 203, 65, 21, 100, 85, 233, 21, 147, 26, 135, 136, 164, 99, 165, 204, 110, 242, 200, + 234, 125, 125, 78, 173, 33, 50, 134, 251, 44, 31, 164, 106, 159, 211, 185, 240, 249, 31, 121, 85, 194, 102, + 121, 130, 250, 229, 203, 61, 33, 9, 47, 230, 207, 63, 200, 24, 216, 119, 137, 85, 232, 93, 207, 178, 228, + 165, 198, 201, 45, 145, 50, 92, 57, 47, 193, 221, 17, 124, 81, 229, 102, 52, 107, 247, 118, 17, 93, 75, + 66, 224, 214, 131, 110, 49, 161, 12, 206, 237, 116, 196, 161, 201, 130, 31, 158, 138, 192, 250, 177, 3, 5, + 84, 188, 51, 174, 17, 226, 167, 133, 62, 58, 192, 120, 196, 54, 114, 139, 106, 132, 230, 24, 182, 121, 29, + 44, 64, 82, 161, 227, 112, 177, 76, 109, 243, 248, 74, 230, 39, 78, 170, 214, 54, 92, 86, 249, 103, 190, + 70, 101, 30, 155, 144, 58, 167, 21, 103, 174, 248, 148, 236, 154, 107, 181, 22, 13, 101, 204, 111, 135, 23, + 42, 172, 15, 156, 102, 41, 176, 212, 192, 128, 189, 161, 41, 131, 33, 240, 106, 69, 105, 159, 124, 223, 90, + 145, 86, 21, 20, 57, 89, 46, 133, 15, 65, 65, 209, 167, 21, 153, 77, 125, 145, 227, 58, 24, 52, 206, + 118, 12, 205, 215, 218, 22, 19, 227, 74, 232, 136, 92, 87, 119, 0, 121, 12, 126, 248, 114, 244, 94, 118, + 135, 197, 54, 70, 205, 45, 106, 28, 42, 107, 105, 238, 28, 147, 128, 158, 198, 110, 17, 93, 7, 228, 8, + 43, 213, 134, 41, 84, 33, 221, 246, 203, 87, 39, 197, 7, 210, 4, 168, 42, 192, 209, 71, 103, 91, 213, + 181, 32, 27, 20, 191, 13, 115, 29, 55, 19, 186, 93, 169, 137, 247, 127, 161, 166, 177, 124, 191, 189, 68, + 173, 172, 196, 28, 33, 139, 153, 148, 69, 121, 156, 66, 113, 78, 242, 178, 59, 170, 239, 112, 54, 2, 196, + 107, 110, 208, 162, 71, 135, 126, 253, 35, 102, 41, 148, 172, 21, 170, 47, 184, 66, 224, 197, 207, 193, 146, + 248, 43, 5, 235, 232, 198, 66, 130, 196, 81, 133, 138, 81, 222, 204, 248, 59, 214, 203, 97, 147, 20, 159, + 10, 130, 54, 61, 32, 245, 208, 246, 87, 235, 12, 230, 104, 219, 121, 234, 253, 45, 112, 234, 219, 242, 172, + 146, 200, 219, 59, 199, 233, 202, 234, 159, 126, 8, 14, 188, 23, 130, 254, 114, 12, 86, 47, 184, 231, 136, + 230, 50, 83, 234, 152, 221, 145, 203, 252, 171, 190, 63, 199, 2, 68, 169, 247, 9, 105, 61, 26, 209, 9, + 65, 173, 251, 109, 36, 163, 26, 70, 97, 132, 7, 16, 153, 246, 189, 166, 247, 252, 79, 68, 229, 82, 191, + 160, 134, 214, 208, 29, 39, 26, 204, 211, 102, 207, 171, 5, 178, 88, 220, 107, 165, 136, 147, 248, 93, 130, + 61, 10, 132, 88, 130, 77, 161, 230, 237, 138, 125, 129, 154, 97, 113, 181, 61, 71, 223, 148, 233, 136, 126, + 152, 116, 67, 149, 82, 67, 200, 6, 20, 171, 12, 225, 173, 123, 143, 165, 90, 18, 226, 41, 55, 188, 147, + 230, 20, 131, 245, 108, 124, 149, 111, 35, 95, 95, 200, 182, 72, 8, 119, 93, 46, 133, 50, 241, 147, 175, + 193, 225, 85, 236, 239, 111, 55, 41, 187, 33, 148, 247, 194, 61, 79, 92, 82, 167, 244, 46, 254, 219, 28, + 19, 135, 98, 29, 10, 247, 213, 43, 99, 130, 221, 56, 89, 226, 118, 180, 139, 31, 213, 39, 72, 112, 208, + 180, 79, 35, 107, 164, 14, 245, 201, 78, 221, 111, 155, 58, 220, 68, 31, 27, 244, 79, 240, 196, 137, 60, + 139, 43, 171, 49, 50, 20, 227, 166, 119, 24, 139, 193, 115, 53, 72, 90, 1, 220, 237, 199, 78, 201, 249, + 27, 161, 195, 179, 215, 130, 183, 129, 69, 176, 49, 186, 145, 20, 210, 111, 7, 215, 12, 217, 69, 76, 177, + 104, 115, 253, 73, 67, 40, 110, 98, 80, 254, 184, 120, 250, 97, 250, 101, 214, 65, 59, 210, 251, 139, 210, + 104, 52, 29, 36, 48, 120, 224, 147, 235, 157, 253, 104, 249, 178, 31, 15, 9, 232, 128, 88, 38, 59, 156, + 64, 125, 128, 141, 106, 238, 30, 31, 149, 56, 135, 42, 198, 148, 220, 251, 4, 5, 224, 28, 212, 54, 47, + 212, 69, 3, 41, 244, 131, 80, 180, 1, 44, 211, 190, 170, 127, 208, 221, 21, 187, 178, 192, 51, 26, 203, + 80, 213, 153, 127, 245, 245, 106, 60, 84, 162, 159, 208, 213, 124, 135, 127, 69, 211, 44, 78, 179, 127, 196, + 244, 237, 237, 138, 34, 32, 123, 62, 228, 119, 146, 87, 167, 23, 147, 13, 115, 112, 141, 222, 100, 95, 143, + 50, 120, 65, 186, 70, 61, 203, 82, 216, 109, 211, 225, 63, 174, 247, 130, 10, 7, 214, 146, 58, 94, 195, + 173, 71, 185, 15, 83, 251, 15, 50, 194, 2, 217, 59, 229, 231, 11, 47, 85, 124, 112, 60, 152, 217, 214, + 133, 99, 10, 129, 211, 182, 17, 47, 248, 225, 103, 230, 188, 145, 199, 178, 102, 38, 142, 89, 17, 245, 99, + 22, 242, 37, 84, 242, 152, 108, 96, 95, 181, 229, 65, 58, 77, 114, 238, 65, 206, 71, 243, 94, 208, 251, + 6, 120, 124, 164, 234, 158, 123, 104, 214, 125, 150, 230, 183, 12, 215, 91, 247, 118, 195, 204, 106, 233, 215, + 197, 93, 226, 41, 220, 91, 106, 38, 188, 46, 75, 9, 111, 49, 40, 252, 21, 196, 160, 247, 3, 12, 142, + 49, 229, 44, 220, 2, 8, 195, 75, 66, 30, 181, 106, 177, 61, 58, 7, 100, 226, 60, 144, 226, 175, 253, + 17, 90, 75, 102, 157, 178, 183, 161, 220, 121, 72, 192, 96, 205, 27, 171, 128, 114, 253, 177, 38, 120, 54, + 194, 230, 47, 106, 3, 109, 157, 79, 204, 200, 201, 73, 130, 178, 148, 78, 218, 46, 210, 56, 232, 142, 101, + 75, 132, 142, 165, 241, 77, 28, 162, 69, 72, 58, 116, 98, 79, 118, 66, 253, 117, 100, 177, 252, 46, 249, + 49, 79, 108, 102, 204, 171, 101, 13, 179, 40, 218, 242, 173, 155, 221, 113, 61, 39, 124, 54, 64, 153, 163, + 225, 89, 62, 10, 250, 104, 171, 133, 171, 125, 95, 170, 114, 250, 10, 112, 47, 146, 197, 125, 192, 16, 88, + 187, 230, 241, 4, 69, 243, 4, 97, 195, 77, 197, 197, 25, 136, 111, 57, 96, 11, 123, 159, 108, 9, 191, + 178, 35, 164, 121, 122, 174, 74, 228, 250, 19, 224, 238, 184, 71, 133, 78, 145, 108, 165, 147, 111, 141, 234, + 101, 156, 208, 115, 96, 98, 252, 129, 33, 219, 169, 177, 180, 29, 208, 175, 114, 142, 119, 118, 98, 190, 250, + 163, 235, 127, 155, 38, 61, 104, 72, 70, 111, 114, 3, 59, 172, 120, 50, 249, 212, 43, 157, 136, 93, 166, + 208, 130, 136, 125, 184, 96, 188, 159, 220, 193, 9, 232, 37, 171, 105, 10, 71, 102, 45, 67, 26, 12, 183, + 122, 243, 88, 60, 248, 78, 4, 220, 167, 244, 123, 174, 53, 238, 208, 81, 161, 100, 32, 204, 70, 73, 173, + 184, 63, 161, 32, 84, 106, 222, 208, 126, 210, 147, 117, 77, 215, 74, 186, 118, 52, 9, 5, 171, 16, 252, + 241, 91, 168, 127, 121, 246, 221, 13, 12, 202, 167, 143, 232, 167, 134, 106, 67, 31, 186, 198, 134, 59, 39, + 220, 107, 96, 104, 187, 89, 201, 185, 19, 119, 11, 73, 21, 233, 101, 60, 116, 140, 175, 17, 46, 248, 151, + 149, 26, 38, 114, 189, 220, 86, 183, 222, 117, 62, 65, 114, 220, 237, 216, 219, 27, 15, 172, 110, 237, 130, + 117, 80, 166, 135, 187, 99, 226, 59, 9, 140, 137, 164, 167, 241, 172, 236, 216, 149, 74, 177, 185, 125, 190, + 241, 33, 6, 23, 53, 231, 77, 63, 32, 35, 235, 12, 120, 228, 152, 4, 226, 229, 190, 176, 181, 207, 45, + 58, 171, 25, 25, 178, 222, 23, 145, 38, 108, 122, 139, 209, 154, 249, 237, 97, 253, 234, 13, 131, 158, 6, + 63, 32, 254, 93, 82, 8, 8, 2, 138, 164, 92, 95, 48, 3, 95, 105, 126, 167, 123, 225, 224, 170, 233, + 198, 52, 122, 175, 173, 172, 6, 91, 248, 179, 92, 148, 177, 123, 148, 91, 98, 191, 205, 182, 187, 146, 191, + 16, 85, 162, 86, 39, 136, 226, 251, 10, 70, 1, 183, 139, 140, 23, 182, 185, 14, 164, 232, 191, 15, 213, + 128, 185, 215, 148, 46, 154, 47, 237, 220, 249, 168, 226, 105, + }; + uint8_t ret[18 * 2 * 16 * 32] = { + 240, 211, 76, 44, 3, 198, 126, 60, 124, 45, 242, 246, 105, 69, 129, 230, 218, 122, 49, 224, 200, 134, 254, + 171, 166, 50, 153, 189, 241, 155, 136, 221, 34, 3, 160, 87, 194, 251, 8, 106, 96, 154, 115, 84, 233, 171, + 126, 232, 112, 168, 38, 89, 60, 3, 211, 237, 221, 250, 55, 118, 68, 169, 228, 5, 200, 119, 32, 196, 177, + 237, 193, 163, 171, 80, 229, 75, 105, 231, 236, 81, 172, 7, 80, 235, 159, 6, 53, 155, 223, 234, 108, 221, + 56, 254, 252, 53, 65, 128, 142, 111, 170, 153, 137, 253, 215, 78, 176, 88, 240, 239, 120, 229, 180, 38, 62, + 174, 97, 225, 222, 116, 213, 4, 117, 144, 196, 62, 60, 106, 93, 57, 103, 4, 59, 55, 206, 203, 62, 87, + 21, 26, 29, 163, 57, 189, 180, 1, 54, 219, 217, 209, 194, 108, 250, 206, 10, 175, 77, 63, 144, 38, 210, + 192, 147, 66, 46, 216, 61, 86, 118, 198, 228, 78, 96, 15, 142, 223, 154, 176, 236, 234, 82, 111, 9, 31, + 14, 127, 165, 170, 94, 199, 155, 209, 146, 40, 202, 175, 221, 230, 131, 88, 130, 30, 201, 55, 68, 59, 188, + 245, 79, 133, 116, 86, 241, 198, 88, 42, 172, 66, 254, 97, 131, 208, 223, 204, 23, 247, 30, 186, 182, 120, + 33, 228, 204, 220, 20, 167, 186, 45, 118, 9, 61, 235, 137, 234, 99, 185, 20, 249, 211, 85, 186, 59, 179, + 48, 253, 156, 48, 68, 124, 54, 17, 193, 248, 37, 216, 204, 47, 230, 113, 150, 111, 68, 236, 227, 10, 7, + 55, 91, 16, 88, 129, 123, 246, 217, 119, 116, 229, 27, 118, 51, 60, 179, 178, 67, 235, 162, 88, 30, 35, + 222, 66, 183, 239, 82, 239, 112, 66, 236, 105, 46, 6, 62, 46, 29, 114, 22, 45, 73, 181, 30, 162, 168, + 216, 163, 237, 107, 196, 118, 14, 174, 191, 181, 111, 104, 39, 42, 16, 33, 241, 29, 36, 63, 190, 17, 89, + 184, 70, 81, 5, 151, 206, 57, 0, 91, 244, 77, 121, 126, 215, 114, 34, 84, 10, 10, 179, 38, 12, 69, + 132, 148, 159, 161, 90, 168, 176, 65, 78, 76, 4, 14, 191, 60, 155, 171, 77, 72, 119, 106, 208, 241, 143, + 123, 113, 234, 168, 28, 211, 249, 125, 154, 44, 6, 49, 136, 213, 253, 206, 164, 60, 6, 124, 99, 93, 227, + 55, 3, 177, 113, 130, 30, 234, 75, 227, 254, 55, 73, 244, 196, 233, 120, 91, 111, 151, 60, 124, 253, 123, + 15, 30, 170, 55, 225, 195, 12, 228, 173, 175, 225, 40, 249, 76, 36, 163, 74, 111, 151, 203, 82, 252, 204, + 194, 215, 199, 215, 251, 174, 3, 24, 242, 249, 160, 215, 72, 218, 62, 117, 102, 169, 153, 32, 172, 224, 251, + 78, 22, 196, 155, 225, 100, 78, 99, 198, 55, 125, 46, 132, 6, 67, 33, 195, 178, 174, 86, 208, 153, 117, + 63, 73, 157, 93, 64, 104, 108, 199, 145, 100, 161, 132, 242, 165, 96, 18, 144, 243, 207, 5, 57, 31, 178, + 78, 81, 173, 2, 160, 95, 51, 135, 181, 40, 229, 221, 17, 52, 9, 186, 2, 114, 215, 77, 150, 171, 210, + 139, 174, 222, 241, 135, 42, 220, 244, 111, 159, 160, 119, 99, 146, 42, 5, 151, 76, 188, 206, 207, 39, 241, + 1, 71, 90, 178, 48, 214, 205, 22, 245, 156, 148, 131, 180, 69, 126, 17, 85, 97, 227, 224, 15, 88, 208, + 166, 40, 206, 71, 66, 236, 84, 223, 185, 35, 185, 107, 144, 38, 152, 146, 12, 122, 26, 254, 56, 23, 222, + 229, 23, 112, 14, 192, 223, 221, 172, 161, 145, 225, 122, 227, 167, 28, 58, 101, 50, 53, 179, 191, 90, 181, + 27, 170, 110, 231, 45, 96, 131, 174, 185, 34, 95, 196, 58, 100, 218, 224, 20, 217, 33, 243, 186, 52, 147, + 12, 133, 90, 16, 151, 134, 172, 79, 100, 222, 69, 185, 207, 46, 156, 240, 12, 44, 39, 43, 70, 70, 217, + 220, 129, 21, 186, 22, 57, 13, 237, 165, 73, 248, 79, 230, 90, 17, 146, 223, 71, 237, 214, 29, 92, 58, + 179, 103, 222, 159, 136, 185, 176, 74, 185, 73, 181, 102, 130, 180, 128, 166, 155, 21, 198, 193, 109, 44, 45, + 164, 61, 24, 160, 178, 8, 154, 213, 10, 87, 57, 218, 201, 72, 160, 3, 14, 221, 116, 239, 188, 197, 195, + 102, 62, 58, 32, 161, 122, 180, 30, 101, 71, 115, 191, 111, 121, 218, 171, 45, 203, 55, 30, 74, 17, 197, + 162, 192, 194, 185, 200, 29, 37, 20, 74, 159, 205, 63, 163, 227, 126, 59, 63, 67, 27, 73, 215, 133, 94, + 244, 252, 151, 56, 29, 181, 218, 127, 197, 147, 50, 172, 192, 35, 196, 0, 166, 116, 220, 201, 250, 31, 160, + 176, 35, 209, 42, 102, 47, 85, 45, 124, 93, 12, 158, 51, 120, 182, 177, 170, 101, 248, 204, 41, 33, 145, + 65, 227, 218, 119, 118, 42, 110, 41, 7, 91, 29, 161, 196, 121, 146, 126, 86, 152, 2, 9, 97, 252, 208, + 5, 93, 230, 61, 67, 225, 55, 149, 119, 225, 148, 46, 247, 118, 180, 221, 79, 50, 53, 249, 69, 215, 80, + 221, 213, 159, 78, 32, 147, 50, 109, 214, 83, 231, 214, 248, 38, 84, 58, 208, 104, 247, 226, 9, 3, 195, + 7, 90, 72, 177, 109, 148, 75, 72, 9, 228, 145, 208, 177, 98, 104, 50, 130, 9, 67, 78, 208, 184, 210, + 9, 121, 167, 39, 19, 140, 114, 114, 237, 59, 216, 6, 12, 181, 23, 237, 32, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 195, 45, 243, 108, 74, 253, 186, 6, 115, 25, 199, + 9, 146, 30, 225, 40, 249, 192, 134, 87, 202, 201, 217, 196, 136, 151, 244, 19, 214, 179, 117, 22, 10, 209, + 89, 144, 31, 39, 198, 26, 19, 14, 81, 71, 66, 209, 224, 80, 236, 197, 55, 103, 58, 8, 32, 17, 52, + 17, 12, 78, 204, 143, 26, 165, 52, 123, 210, 89, 6, 208, 109, 71, 114, 214, 52, 118, 200, 85, 48, 166, + 177, 50, 157, 97, 238, 158, 31, 125, 123, 33, 251, 242, 116, 41, 40, 97, 55, 31, 248, 162, 55, 196, 82, + 148, 225, 143, 108, 148, 210, 234, 27, 37, 102, 118, 181, 116, 87, 38, 130, 193, 137, 20, 192, 239, 37, 193, + 119, 184, 74, 201, 215, 111, 115, 10, 122, 169, 9, 136, 9, 189, 235, 27, 246, 78, 157, 215, 72, 29, 22, + 3, 212, 159, 26, 139, 238, 184, 200, 152, 179, 238, 69, 143, 21, 223, 111, 198, 160, 130, 203, 121, 206, 161, + 46, 16, 86, 94, 15, 251, 22, 211, 61, 96, 110, 95, 155, 30, 81, 68, 191, 15, 152, 20, 56, 219, 124, + 243, 185, 8, 38, 75, 88, 109, 114, 49, 4, 54, 203, 139, 213, 216, 1, 186, 41, 138, 240, 12, 52, 108, + 64, 86, 142, 32, 165, 121, 185, 19, 221, 163, 117, 118, 198, 19, 73, 53, 111, 89, 96, 27, 57, 182, 60, + 129, 39, 144, 125, 181, 91, 81, 168, 221, 219, 133, 120, 73, 31, 20, 49, 237, 133, 223, 127, 158, 228, 209, + 160, 106, 216, 4, 20, 159, 230, 199, 116, 131, 72, 48, 140, 5, 197, 29, 192, 30, 204, 31, 41, 241, 35, + 143, 237, 59, 27, 202, 43, 238, 24, 32, 54, 31, 243, 223, 198, 199, 241, 211, 98, 46, 177, 157, 185, 42, + 166, 238, 242, 25, 223, 91, 213, 63, 61, 99, 201, 46, 79, 90, 117, 96, 65, 132, 10, 204, 143, 252, 22, + 178, 197, 149, 97, 72, 25, 173, 160, 129, 123, 106, 13, 69, 188, 125, 252, 228, 218, 234, 142, 77, 194, 15, + 65, 14, 178, 126, 241, 80, 233, 248, 35, 152, 96, 191, 141, 219, 78, 204, 207, 38, 95, 249, 201, 201, 97, + 165, 155, 150, 192, 223, 72, 164, 33, 73, 171, 156, 150, 251, 239, 57, 209, 43, 98, 67, 239, 92, 126, 139, + 75, 55, 132, 28, 0, 76, 96, 80, 37, 164, 247, 6, 30, 231, 182, 96, 40, 15, 104, 95, 224, 216, 78, + 159, 32, 8, 31, 81, 113, 84, 134, 0, 148, 109, 47, 87, 158, 122, 208, 180, 25, 37, 117, 225, 129, 22, + 29, 92, 252, 51, 43, 92, 148, 153, 149, 216, 33, 18, 115, 149, 43, 197, 164, 251, 96, 94, 90, 40, 42, + 16, 41, 241, 95, 173, 14, 95, 189, 62, 49, 240, 182, 57, 153, 70, 117, 63, 247, 209, 97, 119, 108, 68, + 200, 8, 254, 108, 190, 1, 184, 222, 60, 46, 193, 66, 50, 44, 225, 195, 186, 190, 246, 129, 157, 180, 140, + 122, 175, 249, 20, 28, 120, 65, 80, 14, 38, 51, 229, 66, 189, 174, 66, 30, 94, 164, 101, 32, 210, 78, + 121, 134, 200, 8, 238, 131, 139, 246, 219, 45, 251, 141, 65, 141, 145, 124, 20, 96, 225, 94, 10, 144, 239, + 83, 146, 126, 243, 80, 96, 125, 232, 222, 0, 152, 32, 107, 154, 39, 184, 210, 244, 146, 185, 71, 174, 16, + 149, 78, 93, 77, 197, 213, 59, 31, 119, 16, 211, 22, 201, 12, 12, 93, 8, 51, 206, 150, 213, 152, 34, + 63, 82, 106, 28, 18, 66, 251, 162, 69, 30, 133, 191, 125, 227, 220, 62, 63, 202, 64, 230, 156, 91, 82, + 156, 44, 105, 48, 66, 117, 1, 126, 178, 216, 88, 96, 166, 106, 247, 115, 241, 15, 136, 110, 75, 119, 85, + 157, 92, 40, 18, 33, 166, 51, 195, 251, 51, 117, 68, 182, 104, 46, 58, 177, 111, 143, 215, 218, 100, 52, + 55, 120, 138, 61, 176, 126, 142, 78, 171, 85, 65, 93, 85, 103, 45, 22, 32, 168, 145, 139, 239, 32, 231, + 160, 115, 86, 208, 108, 182, 240, 89, 147, 78, 159, 70, 209, 110, 123, 18, 217, 62, 10, 159, 125, 95, 66, + 57, 118, 74, 158, 156, 7, 227, 184, 251, 130, 211, 48, 216, 155, 229, 69, 62, 47, 221, 218, 111, 194, 78, + 147, 53, 224, 133, 215, 100, 53, 94, 167, 132, 101, 76, 76, 96, 82, 88, 25, 145, 162, 218, 100, 156, 9, + 245, 199, 16, 63, 87, 33, 17, 41, 186, 12, 236, 219, 63, 83, 90, 38, 143, 168, 93, 207, 177, 128, 104, + 135, 27, 77, 87, 159, 121, 202, 33, 147, 124, 31, 108, 96, 144, 1, 92, 155, 125, 37, 8, 183, 44, 177, + 225, 1, 71, 120, 237, 99, 157, 9, 75, 138, 97, 214, 242, 246, 84, 80, 121, 230, 232, 145, 81, 75, 206, + 138, 174, 196, 121, 76, 214, 30, 148, 111, 176, 240, 86, 65, 58, 22, 0, 135, 107, 17, 84, 210, 213, 55, + 166, 28, 234, 200, 8, 47, 221, 68, 65, 132, 79, 29, 178, 130, 237, 71, 149, 173, 188, 111, 93, 85, 247, + 254, 213, 180, 79, 111, 240, 20, 72, 27, 176, 12, 180, 21, 153, 208, 179, 123, 13, 120, 211, 146, 251, 11, + 133, 225, 142, 242, 77, 251, 214, 118, 41, 111, 12, 75, 100, 75, 192, 38, 157, 78, 132, 58, 191, 250, 108, + 115, 180, 190, 72, 249, 136, 232, 26, 4, 81, 63, 147, 5, 246, 134, 220, 119, 175, 189, 220, 130, 9, 149, + 23, 120, 207, 145, 97, 254, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 239, 33, 215, 73, 18, 26, 154, 53, 220, 228, 109, 64, 3, 247, 135, 99, 72, 211, 90, 45, 197, 240, + 229, 252, 138, 40, 230, 166, 70, 86, 164, 11, 209, 71, 30, 21, 188, 151, 34, 224, 120, 215, 46, 159, 156, + 127, 141, 165, 216, 52, 164, 113, 235, 75, 76, 87, 57, 114, 131, 52, 188, 5, 181, 182, 83, 224, 86, 7, + 223, 117, 214, 13, 95, 8, 201, 162, 156, 9, 5, 110, 8, 6, 75, 113, 200, 134, 189, 92, 116, 170, 140, + 23, 57, 216, 219, 246, 213, 196, 6, 222, 136, 150, 171, 102, 49, 72, 7, 184, 135, 133, 119, 52, 11, 179, + 108, 148, 130, 222, 242, 200, 63, 177, 49, 236, 246, 47, 66, 8, 99, 94, 20, 121, 136, 135, 96, 59, 172, + 231, 208, 220, 185, 123, 157, 141, 97, 47, 228, 146, 47, 30, 24, 156, 107, 241, 26, 15, 189, 224, 37, 133, + 165, 236, 0, 47, 67, 79, 206, 166, 225, 20, 124, 172, 109, 147, 240, 221, 46, 123, 240, 194, 86, 152, 158, + 99, 153, 217, 68, 12, 89, 179, 173, 131, 221, 142, 245, 117, 250, 94, 93, 132, 180, 141, 40, 143, 91, 30, + 40, 106, 17, 186, 178, 59, 114, 91, 237, 154, 210, 4, 205, 52, 69, 46, 98, 75, 182, 193, 10, 217, 105, + 88, 11, 34, 9, 87, 16, 28, 99, 55, 205, 32, 240, 194, 158, 180, 205, 121, 34, 250, 240, 91, 28, 173, + 137, 69, 67, 243, 251, 130, 125, 233, 167, 120, 2, 20, 168, 101, 254, 67, 197, 179, 187, 198, 13, 251, 73, + 207, 3, 22, 82, 203, 158, 132, 110, 248, 206, 98, 69, 247, 122, 228, 162, 191, 99, 243, 182, 158, 128, 165, + 122, 143, 20, 38, 202, 211, 64, 90, 204, 183, 215, 172, 27, 84, 67, 205, 105, 167, 8, 78, 223, 143, 55, + 118, 175, 202, 228, 61, 252, 74, 45, 18, 165, 148, 14, 161, 162, 138, 229, 49, 216, 31, 100, 184, 123, 41, + 75, 41, 51, 150, 181, 94, 92, 154, 187, 89, 60, 100, 211, 186, 36, 198, 61, 26, 35, 165, 186, 208, 78, + 240, 169, 210, 16, 150, 155, 137, 60, 36, 1, 234, 13, 76, 58, 248, 50, 32, 229, 121, 71, 195, 195, 172, + 93, 237, 40, 69, 205, 87, 229, 222, 59, 28, 72, 16, 58, 223, 23, 214, 111, 246, 46, 157, 0, 146, 101, + 207, 14, 17, 233, 105, 100, 160, 202, 5, 234, 154, 108, 56, 131, 241, 79, 251, 130, 75, 238, 33, 32, 129, + 180, 45, 11, 53, 113, 213, 65, 13, 45, 42, 204, 213, 239, 54, 3, 88, 10, 148, 79, 73, 58, 151, 23, + 79, 126, 9, 40, 46, 222, 199, 247, 226, 81, 47, 192, 200, 213, 35, 63, 114, 162, 207, 247, 72, 201, 245, + 132, 12, 44, 195, 19, 128, 227, 126, 35, 203, 3, 233, 130, 4, 94, 194, 131, 38, 63, 123, 79, 234, 243, + 199, 142, 75, 203, 131, 195, 114, 146, 167, 165, 121, 138, 176, 45, 84, 146, 71, 17, 194, 117, 171, 140, 233, + 252, 245, 13, 73, 189, 238, 52, 136, 51, 97, 241, 119, 150, 209, 110, 198, 186, 210, 98, 110, 195, 98, 251, + 16, 253, 237, 231, 244, 237, 152, 44, 94, 214, 50, 30, 220, 127, 52, 208, 107, 246, 214, 189, 153, 192, 176, + 176, 77, 86, 120, 153, 125, 226, 37, 177, 70, 153, 209, 2, 99, 78, 139, 174, 136, 25, 28, 223, 73, 26, + 195, 201, 198, 48, 76, 160, 146, 37, 107, 151, 220, 241, 125, 64, 163, 198, 238, 60, 58, 97, 241, 43, 11, + 86, 173, 40, 163, 231, 27, 105, 196, 44, 205, 40, 238, 174, 69, 228, 171, 0, 75, 194, 233, 100, 23, 216, + 49, 115, 179, 215, 252, 59, 135, 254, 56, 165, 209, 127, 103, 240, 178, 40, 138, 122, 35, 207, 109, 98, 12, + 245, 38, 78, 52, 249, 137, 56, 4, 149, 21, 77, 238, 109, 134, 167, 48, 194, 237, 77, 168, 102, 242, 46, + 94, 186, 40, 126, 140, 119, 91, 107, 136, 250, 205, 64, 161, 48, 164, 41, 15, 144, 21, 205, 104, 97, 243, + 120, 14, 138, 206, 15, 224, 248, 12, 70, 166, 7, 124, 208, 226, 222, 219, 35, 173, 236, 118, 84, 110, 108, + 188, 6, 118, 119, 147, 94, 77, 100, 23, 215, 207, 65, 208, 74, 126, 73, 92, 172, 181, 159, 77, 231, 65, + 136, 63, 236, 218, 98, 238, 94, 82, 77, 177, 30, 54, 206, 62, 228, 31, 40, 84, 67, 215, 34, 210, 127, + 93, 146, 25, 237, 0, 55, 83, 176, 60, 35, 94, 186, 24, 65, 130, 236, 236, 37, 113, 25, 27, 10, 50, + 75, 163, 32, 142, 105, 114, 97, 132, 189, 188, 208, 13, 52, 84, 127, 115, 180, 218, 41, 170, 173, 126, 217, + 61, 176, 51, 167, 27, 130, 207, 93, 50, 229, 66, 237, 192, 17, 54, 29, 109, 54, 155, 236, 135, 212, 106, + 21, 209, 24, 19, 121, 197, 105, 93, 33, 4, 181, 19, 177, 33, 253, 219, 14, 184, 145, 169, 173, 7, 68, + 39, 88, 61, 138, 223, 82, 123, 147, 35, 46, 236, 194, 219, 43, 139, 35, 155, 196, 227, 90, 161, 49, 217, + 1, 187, 127, 213, 127, 62, 115, 65, 225, 58, 15, 47, 99, 103, 89, 152, 114, 6, 125, 195, 220, 49, 142, + 66, 226, 102, 96, 120, 79, 218, 142, 116, 178, 19, 165, 96, 29, 250, 70, 212, 125, 37, 12, 220, 161, 161, + 117, 171, 221, 106, 107, 11, 17, 220, 237, 117, 140, 74, 53, 228, 45, 38, 253, 93, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 95, 65, 72, 163, 28, 217, 68, 31, 90, + 114, 236, 33, 210, 76, 143, 181, 176, 83, 45, 61, 217, 151, 37, 43, 240, 105, 149, 12, 161, 28, 254, 242, + 186, 252, 196, 242, 249, 171, 175, 91, 194, 212, 240, 203, 87, 148, 40, 117, 99, 214, 109, 40, 97, 82, 130, + 225, 223, 16, 199, 203, 94, 107, 99, 225, 225, 119, 55, 31, 112, 201, 5, 29, 206, 203, 92, 122, 22, 208, + 4, 31, 252, 129, 195, 77, 238, 178, 146, 171, 44, 23, 238, 54, 103, 253, 241, 36, 78, 45, 194, 141, 173, + 68, 77, 253, 96, 34, 11, 213, 104, 165, 19, 159, 71, 150, 175, 177, 152, 20, 120, 4, 62, 236, 202, 185, + 210, 252, 64, 74, 129, 176, 66, 21, 31, 225, 129, 230, 240, 241, 175, 210, 155, 251, 221, 119, 116, 156, 144, + 207, 240, 170, 210, 225, 207, 123, 42, 6, 35, 156, 196, 235, 45, 52, 8, 94, 47, 98, 90, 158, 125, 58, + 172, 152, 116, 230, 29, 29, 126, 76, 164, 64, 150, 250, 193, 207, 98, 233, 61, 97, 119, 159, 124, 118, 185, + 250, 217, 125, 247, 22, 77, 18, 2, 17, 181, 225, 67, 210, 3, 53, 188, 142, 161, 35, 29, 88, 105, 154, + 45, 84, 189, 210, 123, 110, 120, 102, 64, 242, 25, 159, 175, 8, 228, 81, 74, 102, 87, 16, 123, 7, 214, + 96, 129, 86, 213, 57, 42, 212, 32, 103, 167, 102, 145, 177, 166, 71, 141, 144, 187, 87, 245, 139, 240, 21, + 81, 250, 13, 113, 223, 160, 113, 2, 38, 224, 34, 50, 41, 201, 233, 175, 193, 100, 170, 177, 14, 110, 222, + 70, 243, 126, 77, 153, 62, 30, 112, 127, 53, 90, 27, 152, 245, 245, 46, 96, 104, 210, 212, 205, 192, 235, + 47, 11, 181, 92, 190, 213, 37, 37, 65, 9, 155, 150, 243, 79, 68, 244, 74, 62, 75, 24, 184, 38, 245, + 156, 184, 144, 40, 112, 232, 249, 160, 56, 172, 4, 29, 254, 70, 247, 156, 48, 91, 182, 38, 251, 132, 36, + 184, 126, 54, 253, 248, 223, 9, 65, 65, 179, 239, 177, 82, 254, 61, 59, 4, 240, 91, 142, 215, 175, 41, + 33, 15, 247, 124, 108, 17, 87, 30, 91, 162, 28, 179, 164, 154, 185, 147, 109, 2, 55, 122, 139, 232, 68, + 109, 120, 187, 245, 82, 6, 45, 196, 158, 247, 238, 125, 151, 173, 15, 162, 101, 38, 11, 202, 99, 69, 186, + 217, 185, 76, 46, 44, 253, 56, 8, 137, 234, 210, 103, 220, 87, 204, 135, 205, 184, 161, 58, 183, 6, 102, + 163, 58, 193, 185, 113, 172, 14, 44, 200, 216, 153, 203, 112, 51, 61, 91, 179, 25, 10, 57, 205, 242, 44, + 25, 132, 229, 95, 230, 195, 203, 134, 90, 71, 251, 216, 48, 126, 143, 247, 15, 244, 44, 56, 152, 224, 105, + 215, 24, 94, 110, 105, 218, 85, 168, 111, 234, 189, 71, 56, 241, 231, 31, 221, 217, 243, 54, 93, 142, 52, + 191, 213, 54, 191, 206, 106, 169, 168, 221, 93, 243, 73, 92, 203, 189, 197, 188, 225, 191, 252, 35, 243, 163, + 221, 126, 166, 21, 76, 10, 107, 221, 180, 50, 126, 188, 164, 164, 199, 183, 11, 182, 181, 58, 189, 210, 100, + 92, 204, 237, 52, 134, 50, 111, 161, 10, 141, 149, 192, 172, 42, 87, 166, 245, 252, 214, 11, 128, 164, 22, + 145, 158, 113, 194, 28, 233, 62, 85, 214, 253, 82, 210, 32, 205, 151, 248, 47, 30, 51, 165, 192, 214, 93, + 196, 218, 83, 164, 147, 154, 10, 119, 0, 156, 21, 229, 40, 133, 240, 128, 70, 109, 77, 218, 240, 32, 41, + 150, 125, 231, 240, 88, 90, 238, 54, 167, 69, 16, 90, 149, 16, 106, 26, 165, 224, 193, 70, 220, 158, 183, + 75, 163, 9, 57, 117, 248, 158, 66, 206, 104, 210, 147, 151, 146, 96, 29, 226, 158, 145, 61, 108, 207, 171, + 249, 203, 187, 228, 198, 43, 235, 129, 82, 135, 243, 165, 228, 79, 174, 86, 152, 76, 95, 44, 20, 195, 151, + 61, 170, 76, 207, 127, 224, 159, 18, 28, 76, 2, 98, 240, 217, 118, 204, 43, 153, 127, 60, 69, 25, 2, + 62, 156, 33, 51, 71, 100, 220, 174, 82, 8, 151, 249, 113, 150, 174, 200, 155, 16, 237, 10, 42, 170, 133, + 241, 33, 98, 253, 192, 98, 111, 86, 135, 104, 104, 51, 126, 5, 182, 35, 120, 254, 166, 248, 184, 176, 212, + 161, 46, 251, 108, 1, 136, 90, 249, 50, 163, 242, 84, 188, 111, 81, 121, 85, 82, 4, 133, 71, 190, 244, + 67, 63, 183, 146, 146, 202, 44, 42, 192, 13, 104, 136, 227, 63, 110, 81, 49, 86, 99, 106, 117, 175, 100, + 37, 95, 213, 121, 156, 37, 73, 137, 74, 193, 52, 219, 90, 13, 191, 89, 90, 250, 63, 207, 92, 102, 224, + 116, 250, 226, 114, 44, 243, 92, 144, 154, 23, 192, 69, 20, 167, 52, 227, 12, 54, 238, 7, 221, 168, 32, + 186, 124, 139, 45, 59, 188, 231, 203, 247, 251, 16, 229, 26, 220, 10, 125, 148, 67, 143, 230, 95, 133, 239, + 61, 28, 99, 31, 107, 58, 137, 166, 1, 195, 186, 69, 44, 178, 245, 124, 196, 228, 112, 186, 63, 94, 50, + 85, 10, 230, 17, 108, 238, 120, 150, 204, 91, 40, 49, 30, 60, 157, 205, 54, 204, 46, 165, 98, 35, 224, + 147, 98, 208, 163, 111, 43, 184, 171, 183, 167, 100, 32, 77, 16, 13, 67, 96, 73, 46, 86, 216, 80, 137, + 177, 231, 152, 58, 108, 234, 82, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 48, 18, 119, 170, 51, 246, 252, 48, 249, 136, 156, 157, 210, 220, 33, 131, 244, 223, 212, 54, 220, + 48, 151, 139, 186, 35, 168, 201, 232, 67, 118, 170, 109, 200, 101, 217, 180, 146, 154, 45, 21, 170, 245, 156, + 54, 118, 184, 130, 166, 151, 202, 2, 49, 114, 49, 94, 168, 66, 213, 222, 221, 26, 64, 106, 85, 196, 26, + 167, 127, 250, 41, 125, 207, 104, 173, 2, 161, 111, 236, 194, 42, 59, 210, 149, 99, 244, 181, 179, 57, 162, + 124, 203, 7, 32, 18, 63, 112, 249, 183, 33, 29, 80, 106, 175, 226, 109, 17, 42, 12, 72, 204, 132, 66, + 112, 224, 187, 194, 199, 25, 122, 6, 42, 241, 75, 181, 26, 75, 174, 16, 208, 41, 190, 87, 97, 131, 5, + 86, 168, 203, 187, 177, 207, 231, 108, 213, 103, 194, 136, 244, 90, 154, 97, 253, 94, 108, 87, 204, 108, 128, + 188, 216, 229, 98, 146, 55, 34, 80, 201, 151, 142, 108, 21, 28, 208, 32, 248, 207, 232, 46, 230, 44, 234, + 169, 37, 131, 181, 65, 6, 112, 136, 129, 66, 86, 14, 108, 4, 245, 132, 230, 23, 244, 11, 225, 104, 232, + 129, 7, 237, 73, 251, 172, 77, 156, 206, 209, 165, 190, 191, 235, 80, 45, 37, 34, 24, 66, 56, 35, 17, + 112, 157, 241, 125, 126, 24, 212, 180, 231, 212, 156, 32, 105, 233, 2, 36, 170, 229, 197, 192, 117, 233, 154, + 133, 241, 192, 150, 127, 51, 13, 192, 249, 140, 89, 184, 131, 208, 235, 157, 76, 29, 65, 246, 120, 57, 229, + 20, 138, 105, 92, 105, 199, 248, 32, 37, 227, 224, 69, 21, 63, 218, 171, 219, 100, 238, 28, 2, 214, 159, + 70, 216, 210, 70, 85, 140, 217, 33, 22, 99, 123, 34, 34, 22, 224, 68, 30, 118, 135, 51, 8, 99, 198, + 108, 12, 65, 127, 163, 132, 60, 88, 95, 240, 139, 137, 200, 192, 107, 194, 70, 172, 120, 34, 179, 224, 137, + 55, 163, 136, 246, 7, 48, 178, 14, 62, 16, 141, 93, 200, 179, 131, 42, 147, 92, 80, 79, 91, 61, 57, + 232, 9, 116, 248, 102, 25, 61, 125, 121, 171, 50, 159, 181, 125, 90, 122, 43, 134, 137, 146, 81, 110, 204, + 71, 219, 88, 184, 158, 112, 218, 118, 121, 79, 200, 7, 240, 64, 233, 41, 220, 3, 0, 7, 64, 150, 76, + 171, 84, 26, 217, 146, 32, 128, 33, 172, 230, 168, 40, 154, 159, 19, 197, 160, 90, 211, 208, 10, 190, 144, + 17, 5, 40, 167, 217, 238, 222, 162, 172, 45, 31, 105, 137, 246, 46, 69, 136, 26, 40, 168, 193, 191, 194, + 28, 216, 177, 56, 44, 226, 2, 136, 0, 44, 157, 115, 17, 36, 19, 246, 21, 148, 202, 235, 33, 80, 194, + 160, 6, 161, 252, 125, 132, 216, 175, 152, 244, 30, 30, 159, 70, 232, 72, 146, 69, 61, 192, 156, 78, 130, + 172, 223, 62, 108, 75, 168, 12, 52, 15, 96, 100, 254, 134, 12, 74, 143, 232, 30, 245, 59, 90, 14, 199, + 246, 187, 204, 35, 204, 142, 220, 177, 73, 230, 242, 201, 74, 157, 85, 52, 215, 68, 89, 90, 164, 35, 186, + 183, 198, 21, 111, 197, 200, 223, 89, 120, 223, 180, 78, 76, 47, 185, 31, 191, 131, 239, 10, 77, 2, 8, + 132, 49, 217, 15, 59, 90, 30, 77, 68, 110, 8, 254, 95, 149, 20, 217, 80, 218, 163, 153, 228, 152, 210, + 16, 160, 114, 245, 244, 188, 176, 148, 252, 67, 127, 1, 220, 244, 59, 188, 113, 43, 170, 221, 155, 101, 158, + 205, 160, 167, 74, 12, 41, 70, 189, 47, 216, 229, 47, 3, 73, 72, 236, 16, 173, 10, 177, 14, 99, 109, + 206, 105, 135, 65, 6, 249, 233, 238, 211, 178, 155, 252, 79, 69, 43, 190, 193, 4, 58, 196, 182, 119, 164, + 164, 46, 102, 71, 195, 83, 249, 22, 85, 117, 5, 61, 227, 221, 180, 211, 42, 26, 224, 191, 107, 220, 254, + 32, 175, 185, 89, 123, 30, 174, 52, 143, 94, 195, 124, 232, 68, 18, 120, 232, 58, 44, 13, 57, 96, 92, + 168, 117, 58, 235, 33, 168, 216, 144, 123, 37, 182, 165, 23, 205, 139, 78, 178, 34, 58, 57, 121, 188, 71, + 136, 133, 238, 113, 7, 103, 126, 93, 94, 118, 80, 168, 6, 42, 88, 129, 215, 162, 66, 140, 247, 137, 3, + 237, 109, 253, 236, 68, 93, 85, 114, 223, 97, 29, 49, 186, 215, 72, 78, 164, 234, 151, 29, 53, 31, 66, + 220, 70, 230, 174, 129, 243, 205, 65, 142, 104, 219, 172, 44, 77, 203, 226, 67, 212, 96, 25, 176, 174, 11, + 167, 99, 42, 4, 80, 38, 108, 127, 177, 67, 151, 168, 204, 112, 150, 14, 158, 233, 178, 159, 230, 12, 160, + 128, 12, 239, 97, 5, 184, 229, 200, 178, 57, 52, 214, 196, 177, 167, 139, 64, 248, 86, 58, 107, 42, 128, + 105, 57, 21, 206, 74, 126, 70, 28, 228, 246, 42, 27, 93, 191, 153, 112, 199, 23, 136, 252, 9, 109, 153, + 82, 204, 107, 132, 129, 233, 200, 165, 20, 95, 50, 111, 79, 19, 130, 213, 164, 220, 60, 119, 220, 179, 145, + 76, 211, 192, 245, 135, 244, 119, 141, 70, 174, 195, 194, 124, 129, 188, 245, 96, 65, 124, 230, 106, 106, 252, + 229, 181, 144, 178, 27, 194, 200, 210, 241, 79, 164, 238, 111, 252, 175, 235, 114, 157, 96, 105, 122, 244, 32, + 84, 215, 252, 12, 31, 104, 21, 248, 183, 219, 166, 164, 185, 77, 4, 171, 122, 13, 8, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 181, 221, 101, 49, 140, 116, 13, 221, 248, + 121, 205, 207, 222, 163, 213, 141, 133, 135, 129, 132, 26, 213, 249, 69, 92, 2, 124, 99, 86, 139, 17, 144, + 135, 245, 118, 54, 144, 178, 248, 241, 153, 16, 120, 44, 4, 8, 16, 54, 211, 231, 13, 53, 118, 69, 213, + 206, 203, 22, 240, 102, 51, 205, 7, 39, 56, 63, 19, 118, 128, 47, 248, 50, 194, 63, 84, 56, 14, 250, + 150, 70, 198, 57, 213, 2, 73, 145, 31, 119, 199, 6, 196, 48, 70, 50, 123, 89, 3, 63, 177, 129, 61, + 84, 184, 158, 179, 7, 76, 242, 230, 171, 90, 1, 222, 144, 101, 53, 137, 132, 212, 124, 90, 242, 231, 135, + 51, 95, 231, 61, 47, 162, 220, 127, 26, 204, 155, 63, 213, 98, 144, 179, 155, 204, 251, 132, 240, 76, 29, + 102, 250, 14, 91, 181, 154, 17, 133, 92, 41, 124, 206, 123, 45, 177, 195, 81, 205, 152, 195, 40, 189, 113, + 26, 66, 26, 220, 71, 198, 76, 134, 194, 237, 66, 77, 188, 125, 165, 26, 192, 64, 90, 205, 62, 167, 169, + 77, 12, 11, 13, 197, 174, 252, 253, 64, 146, 102, 186, 3, 34, 202, 203, 187, 20, 187, 173, 80, 57, 52, + 205, 162, 41, 198, 123, 195, 41, 193, 206, 68, 166, 217, 62, 195, 59, 196, 199, 188, 169, 47, 7, 130, 146, + 23, 153, 71, 147, 254, 136, 155, 99, 232, 91, 46, 227, 192, 57, 96, 49, 185, 153, 74, 247, 169, 82, 195, + 193, 72, 109, 239, 186, 193, 119, 121, 77, 201, 126, 4, 245, 241, 12, 231, 76, 129, 88, 165, 131, 97, 192, + 246, 129, 185, 20, 156, 15, 219, 122, 241, 142, 251, 13, 105, 98, 212, 140, 52, 236, 217, 201, 53, 8, 212, + 95, 233, 243, 223, 205, 57, 113, 221, 152, 136, 138, 203, 123, 15, 114, 80, 26, 202, 99, 139, 129, 56, 23, + 240, 97, 97, 164, 166, 163, 207, 70, 234, 51, 113, 127, 232, 201, 104, 5, 22, 230, 159, 88, 193, 57, 114, + 107, 127, 161, 107, 36, 44, 31, 36, 125, 23, 128, 64, 75, 185, 152, 109, 29, 53, 109, 68, 159, 89, 120, + 149, 232, 140, 106, 147, 189, 231, 203, 127, 80, 184, 60, 229, 230, 196, 154, 173, 184, 171, 169, 110, 157, 35, + 79, 13, 48, 75, 160, 205, 74, 229, 251, 6, 212, 94, 145, 9, 115, 124, 189, 24, 180, 164, 69, 11, 8, + 79, 4, 128, 32, 92, 96, 47, 172, 134, 177, 171, 210, 103, 64, 45, 214, 152, 9, 218, 25, 94, 81, 64, + 115, 161, 238, 13, 74, 231, 222, 36, 196, 210, 164, 143, 167, 231, 55, 186, 94, 10, 97, 72, 211, 71, 77, + 85, 166, 113, 250, 41, 101, 23, 250, 97, 231, 189, 249, 67, 150, 183, 6, 202, 64, 76, 47, 94, 18, 157, + 95, 68, 10, 226, 221, 81, 242, 110, 53, 101, 144, 51, 14, 25, 206, 124, 112, 10, 137, 17, 10, 67, 21, + 61, 140, 40, 230, 172, 120, 174, 82, 136, 87, 232, 26, 59, 148, 95, 73, 162, 117, 159, 32, 202, 61, 118, + 179, 33, 19, 241, 58, 55, 104, 137, 133, 114, 122, 70, 151, 78, 226, 5, 150, 253, 35, 93, 30, 92, 23, + 2, 165, 122, 20, 251, 119, 247, 158, 2, 113, 206, 46, 6, 72, 53, 34, 176, 91, 164, 157, 179, 101, 189, + 250, 246, 164, 10, 87, 175, 201, 223, 128, 109, 214, 141, 218, 112, 49, 33, 96, 101, 254, 12, 56, 148, 140, + 172, 168, 43, 160, 102, 80, 172, 138, 46, 9, 19, 207, 59, 69, 48, 39, 211, 52, 124, 43, 204, 253, 86, + 65, 11, 20, 146, 222, 55, 243, 16, 187, 219, 177, 110, 48, 201, 153, 226, 131, 92, 154, 52, 195, 140, 37, + 142, 76, 47, 134, 140, 74, 129, 50, 1, 143, 19, 92, 40, 20, 103, 131, 199, 119, 56, 236, 199, 250, 1, + 80, 19, 148, 78, 191, 158, 88, 120, 125, 6, 50, 217, 233, 185, 55, 56, 163, 125, 137, 78, 133, 129, 208, + 101, 225, 244, 167, 155, 157, 7, 108, 157, 101, 138, 132, 82, 46, 227, 128, 162, 234, 189, 234, 176, 101, 114, + 75, 169, 120, 176, 134, 51, 241, 39, 36, 212, 193, 238, 187, 172, 92, 189, 41, 80, 238, 170, 247, 119, 51, + 233, 246, 146, 99, 143, 62, 130, 2, 79, 64, 42, 26, 88, 166, 21, 119, 224, 172, 217, 141, 187, 42, 147, + 169, 55, 206, 147, 215, 193, 85, 21, 10, 173, 1, 47, 192, 123, 242, 127, 222, 247, 233, 170, 221, 220, 226, + 101, 218, 214, 103, 36, 13, 195, 36, 32, 43, 76, 189, 88, 2, 147, 128, 180, 65, 91, 179, 124, 108, 116, + 50, 50, 142, 40, 180, 110, 246, 20, 52, 240, 76, 186, 60, 228, 174, 56, 102, 42, 203, 24, 228, 47, 107, + 131, 161, 3, 133, 106, 82, 74, 249, 167, 181, 172, 189, 159, 89, 153, 118, 232, 248, 205, 147, 8, 203, 192, + 20, 169, 189, 148, 234, 233, 130, 230, 171, 105, 36, 246, 191, 211, 165, 88, 154, 136, 6, 90, 131, 200, 241, + 55, 92, 135, 221, 39, 14, 68, 139, 24, 237, 215, 20, 177, 190, 51, 106, 127, 237, 146, 222, 61, 247, 173, + 2, 112, 211, 145, 99, 95, 206, 164, 183, 233, 38, 21, 44, 106, 226, 183, 171, 230, 201, 56, 77, 118, 121, + 184, 141, 129, 114, 127, 3, 136, 188, 10, 243, 123, 204, 106, 74, 241, 202, 186, 187, 233, 151, 222, 27, 135, + 167, 125, 63, 226, 25, 139, 131, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 136, 5, 119, 80, 154, 4, 241, 70, 159, 227, 2, 238, 104, 242, 90, 84, 111, 231, 89, 60, + 9, 57, 37, 67, 199, 38, 232, 235, 141, 213, 56, 16, 31, 123, 175, 4, 96, 122, 28, 41, 208, 176, 135, + 37, 111, 233, 60, 206, 42, 208, 212, 103, 197, 110, 153, 152, 245, 229, 129, 59, 172, 155, 132, 104, 53, 12, + 13, 105, 234, 168, 191, 184, 111, 198, 58, 212, 222, 156, 84, 196, 62, 205, 142, 73, 0, 96, 241, 191, 204, + 212, 209, 58, 190, 237, 185, 162, 188, 108, 216, 144, 63, 207, 191, 194, 44, 68, 254, 140, 118, 111, 83, 121, + 242, 108, 97, 40, 94, 208, 135, 168, 116, 197, 233, 141, 42, 84, 96, 208, 110, 111, 250, 170, 53, 171, 178, + 136, 168, 182, 168, 217, 151, 36, 59, 143, 86, 238, 197, 26, 107, 42, 23, 151, 225, 227, 200, 9, 82, 6, + 73, 123, 201, 31, 109, 41, 220, 188, 122, 7, 102, 190, 90, 54, 138, 46, 68, 72, 69, 46, 27, 14, 190, + 237, 140, 71, 42, 172, 178, 142, 200, 114, 108, 162, 253, 168, 241, 115, 71, 18, 112, 22, 145, 15, 231, 88, + 100, 163, 250, 72, 129, 244, 233, 86, 70, 26, 191, 52, 26, 58, 50, 254, 5, 30, 44, 5, 222, 149, 113, + 3, 140, 140, 225, 107, 86, 156, 10, 247, 228, 212, 79, 85, 63, 122, 77, 157, 228, 233, 157, 167, 82, 254, + 65, 208, 0, 175, 24, 56, 185, 224, 186, 204, 248, 139, 120, 196, 225, 156, 126, 198, 72, 46, 240, 4, 138, + 253, 69, 158, 182, 40, 229, 14, 64, 55, 221, 123, 165, 89, 130, 206, 246, 237, 253, 193, 134, 194, 70, 197, + 135, 104, 141, 35, 190, 23, 154, 252, 226, 212, 79, 234, 158, 238, 126, 232, 62, 112, 86, 124, 18, 151, 166, + 142, 160, 218, 244, 135, 211, 180, 218, 48, 159, 210, 36, 153, 175, 73, 27, 138, 229, 80, 236, 133, 25, 209, + 164, 240, 61, 211, 163, 48, 253, 35, 97, 7, 251, 198, 146, 45, 51, 80, 60, 5, 249, 81, 81, 76, 46, + 194, 21, 129, 38, 167, 8, 58, 101, 133, 177, 142, 129, 53, 235, 246, 57, 174, 209, 205, 51, 63, 16, 65, + 199, 180, 84, 147, 76, 54, 104, 103, 139, 196, 162, 44, 44, 227, 38, 46, 146, 118, 238, 233, 156, 223, 205, + 131, 87, 60, 247, 139, 108, 18, 56, 192, 221, 220, 208, 94, 203, 28, 209, 70, 53, 102, 220, 144, 107, 56, + 142, 194, 232, 161, 152, 118, 23, 171, 87, 132, 80, 4, 106, 193, 231, 250, 42, 101, 59, 37, 174, 245, 167, + 187, 30, 189, 212, 53, 191, 235, 208, 137, 168, 177, 4, 120, 50, 157, 218, 126, 230, 122, 118, 194, 119, 100, + 197, 157, 228, 77, 119, 171, 62, 229, 251, 10, 119, 24, 80, 73, 254, 223, 37, 47, 178, 239, 78, 150, 122, + 37, 93, 125, 35, 80, 82, 38, 6, 40, 168, 168, 83, 180, 105, 186, 218, 174, 137, 205, 20, 32, 197, 166, + 41, 33, 96, 252, 2, 148, 59, 12, 95, 8, 44, 68, 81, 178, 165, 185, 128, 33, 159, 19, 236, 224, 181, + 83, 115, 30, 241, 220, 181, 105, 128, 141, 189, 149, 142, 11, 81, 253, 30, 103, 226, 8, 211, 171, 45, 180, + 104, 2, 79, 96, 22, 192, 27, 229, 217, 93, 96, 136, 10, 43, 79, 236, 156, 42, 245, 153, 251, 253, 65, + 147, 84, 27, 18, 161, 47, 243, 222, 48, 23, 71, 225, 112, 38, 176, 104, 17, 188, 86, 95, 6, 163, 153, + 77, 80, 203, 173, 164, 5, 242, 240, 70, 214, 41, 157, 101, 65, 104, 175, 250, 5, 119, 78, 226, 88, 161, + 46, 234, 5, 117, 79, 47, 160, 211, 196, 228, 180, 50, 34, 172, 169, 98, 199, 206, 49, 17, 113, 29, 47, + 131, 93, 55, 40, 113, 5, 97, 84, 146, 236, 206, 212, 164, 217, 197, 173, 1, 223, 152, 144, 137, 230, 39, + 222, 249, 253, 42, 209, 125, 132, 186, 195, 180, 211, 139, 186, 5, 7, 106, 101, 154, 77, 85, 172, 114, 49, + 126, 236, 173, 65, 100, 136, 196, 124, 9, 83, 209, 26, 127, 195, 4, 227, 252, 34, 162, 69, 53, 88, 54, + 225, 206, 75, 154, 63, 62, 27, 223, 12, 9, 159, 167, 4, 77, 25, 162, 136, 129, 0, 218, 44, 161, 135, + 107, 49, 50, 137, 110, 22, 138, 130, 163, 63, 120, 250, 42, 76, 250, 64, 195, 28, 170, 135, 32, 138, 182, + 150, 242, 252, 8, 209, 232, 184, 234, 155, 203, 16, 22, 38, 50, 183, 21, 191, 242, 211, 88, 216, 225, 72, + 215, 253, 203, 91, 34, 126, 246, 144, 250, 59, 221, 24, 132, 247, 218, 246, 90, 218, 73, 194, 249, 171, 9, + 118, 227, 76, 28, 182, 249, 223, 61, 216, 165, 193, 247, 110, 201, 5, 62, 132, 161, 230, 103, 21, 22, 15, + 161, 124, 46, 77, 12, 136, 114, 45, 128, 43, 87, 209, 191, 137, 68, 69, 219, 202, 254, 50, 190, 61, 163, + 189, 160, 102, 136, 130, 97, 126, 20, 18, 245, 182, 147, 41, 82, 98, 56, 72, 245, 31, 43, 139, 199, 130, + 210, 104, 170, 26, 60, 69, 237, 87, 100, 203, 130, 71, 217, 60, 182, 199, 22, 181, 71, 234, 12, 215, 188, + 196, 220, 177, 175, 161, 128, 47, 73, 232, 28, 66, 122, 71, 234, 33, 142, 155, 59, 93, 159, 71, 88, 174, + 70, 222, 186, 91, 167, 198, 89, 101, 149, 117, 15, 187, 241, 190, 32, 229, 25, 209, 158, 2, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 177, 112, 96, 168, 65, 84, 5, 126, + 102, 156, 175, 133, 130, 245, 182, 135, 221, 171, 41, 132, 114, 212, 178, 126, 162, 0, 187, 71, 39, 223, 211, + 238, 243, 157, 112, 160, 172, 8, 95, 6, 34, 141, 83, 161, 62, 177, 111, 171, 71, 231, 18, 86, 182, 74, + 133, 116, 85, 2, 150, 226, 147, 192, 88, 2, 211, 17, 60, 90, 203, 28, 227, 120, 232, 115, 185, 108, 67, + 235, 224, 200, 30, 214, 220, 25, 143, 188, 218, 248, 75, 236, 181, 44, 130, 215, 113, 248, 62, 59, 136, 115, + 33, 174, 90, 6, 176, 188, 168, 2, 212, 18, 54, 156, 246, 181, 22, 74, 57, 25, 234, 230, 123, 184, 69, + 59, 62, 151, 16, 114, 219, 98, 55, 175, 168, 151, 105, 114, 251, 228, 75, 107, 114, 237, 90, 218, 51, 44, + 169, 168, 232, 106, 244, 31, 83, 150, 204, 210, 110, 108, 175, 17, 106, 165, 192, 127, 59, 47, 99, 177, 98, + 123, 18, 122, 10, 139, 232, 5, 146, 29, 237, 46, 69, 186, 212, 95, 96, 223, 78, 173, 212, 234, 39, 141, + 48, 78, 136, 21, 212, 206, 144, 18, 67, 74, 35, 171, 210, 125, 245, 61, 231, 58, 134, 45, 92, 61, 78, + 29, 123, 142, 18, 78, 162, 60, 166, 31, 149, 175, 63, 89, 65, 143, 190, 50, 88, 104, 172, 113, 103, 38, + 35, 154, 3, 8, 28, 160, 54, 247, 10, 102, 184, 95, 35, 126, 154, 162, 85, 71, 169, 43, 158, 100, 223, + 218, 233, 124, 126, 211, 253, 164, 15, 156, 10, 27, 232, 96, 231, 80, 49, 47, 30, 239, 139, 215, 68, 100, + 235, 108, 30, 229, 213, 247, 209, 243, 153, 249, 181, 85, 244, 53, 196, 171, 106, 1, 182, 94, 208, 221, 134, + 39, 54, 195, 9, 227, 210, 112, 152, 192, 17, 20, 114, 218, 81, 125, 175, 206, 25, 55, 17, 125, 2, 34, + 49, 31, 157, 11, 253, 28, 69, 23, 203, 144, 128, 223, 184, 88, 135, 82, 125, 156, 62, 30, 40, 244, 105, + 168, 177, 209, 105, 199, 106, 78, 239, 3, 136, 168, 8, 155, 98, 227, 64, 13, 220, 22, 213, 103, 148, 238, + 177, 206, 234, 6, 247, 155, 138, 205, 54, 8, 17, 37, 21, 235, 46, 132, 31, 49, 30, 87, 82, 231, 93, + 38, 91, 167, 159, 167, 223, 95, 217, 70, 77, 205, 8, 156, 130, 15, 169, 213, 109, 203, 114, 1, 252, 44, + 93, 224, 164, 115, 163, 103, 97, 103, 81, 165, 121, 25, 31, 140, 251, 71, 120, 137, 74, 103, 78, 77, 151, + 194, 239, 76, 99, 82, 150, 106, 27, 249, 248, 97, 106, 133, 214, 219, 122, 249, 245, 230, 115, 48, 254, 227, + 46, 147, 188, 30, 105, 96, 66, 116, 169, 185, 3, 29, 232, 27, 219, 210, 179, 100, 176, 243, 3, 149, 227, + 178, 89, 3, 218, 190, 135, 166, 5, 93, 93, 172, 74, 242, 241, 44, 198, 94, 228, 240, 233, 234, 183, 232, + 50, 136, 23, 34, 64, 138, 93, 220, 35, 10, 242, 172, 123, 108, 2, 244, 77, 62, 203, 249, 160, 232, 69, + 231, 10, 214, 6, 94, 113, 213, 87, 36, 115, 166, 119, 145, 59, 110, 83, 131, 69, 196, 14, 242, 91, 17, + 143, 128, 180, 249, 3, 247, 141, 157, 28, 74, 253, 104, 174, 44, 171, 145, 192, 245, 215, 137, 239, 1, 5, + 43, 186, 154, 245, 249, 20, 16, 31, 224, 118, 139, 73, 167, 134, 189, 163, 170, 90, 62, 118, 201, 122, 29, + 140, 205, 40, 5, 6, 183, 242, 170, 18, 202, 232, 87, 208, 149, 20, 128, 238, 6, 88, 237, 109, 217, 117, + 86, 199, 137, 164, 45, 151, 117, 217, 209, 182, 28, 189, 107, 146, 174, 133, 253, 74, 40, 77, 76, 188, 152, + 27, 135, 247, 159, 137, 39, 101, 19, 136, 159, 193, 148, 194, 247, 136, 41, 89, 196, 140, 81, 23, 174, 79, + 233, 228, 174, 186, 126, 85, 172, 228, 113, 44, 50, 253, 51, 251, 192, 253, 42, 151, 17, 168, 26, 241, 145, + 169, 222, 99, 68, 195, 238, 20, 10, 91, 147, 220, 241, 175, 210, 82, 185, 201, 168, 187, 225, 18, 109, 235, + 3, 29, 75, 248, 236, 102, 215, 2, 242, 133, 185, 107, 2, 88, 245, 232, 135, 63, 183, 44, 163, 155, 177, + 127, 166, 51, 86, 170, 193, 212, 182, 84, 8, 236, 212, 168, 154, 222, 177, 166, 48, 129, 235, 154, 167, 38, + 84, 33, 93, 70, 52, 33, 32, 125, 204, 8, 154, 47, 106, 118, 8, 141, 31, 2, 132, 138, 205, 162, 197, + 28, 17, 144, 199, 5, 247, 57, 119, 211, 99, 141, 80, 116, 24, 202, 162, 185, 30, 159, 143, 181, 101, 73, + 242, 21, 153, 58, 38, 12, 177, 166, 135, 25, 95, 30, 4, 83, 64, 98, 196, 211, 120, 33, 119, 198, 221, + 118, 49, 130, 84, 58, 230, 227, 39, 190, 103, 13, 156, 41, 223, 133, 125, 205, 92, 244, 106, 158, 213, 39, + 71, 13, 247, 173, 121, 242, 234, 114, 83, 63, 26, 26, 166, 134, 207, 147, 77, 113, 152, 171, 226, 108, 72, + 175, 187, 167, 29, 89, 112, 201, 27, 171, 193, 78, 183, 111, 115, 127, 203, 84, 211, 138, 167, 95, 82, 10, + 185, 59, 152, 17, 178, 242, 229, 243, 158, 215, 197, 46, 160, 2, 61, 253, 220, 114, 106, 130, 142, 162, 253, + 174, 133, 101, 219, 119, 38, 172, 166, 220, 102, 60, 53, 73, 208, 118, 168, 143, 134, 201, 60, 26, 62, 172, + 99, 172, 241, 35, 190, 178, 154, 6, 138, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 217, 37, 145, 160, 185, 189, 159, 26, 106, 138, 109, 0, 151, 61, 151, 232, 89, 181, 85, + 52, 217, 92, 61, 143, 47, 204, 108, 147, 140, 212, 240, 94, 244, 152, 6, 85, 214, 166, 78, 16, 205, 104, + 121, 183, 100, 120, 66, 249, 68, 37, 16, 128, 118, 126, 218, 252, 174, 157, 234, 251, 49, 126, 79, 82, 121, + 11, 76, 120, 157, 197, 124, 226, 157, 178, 69, 18, 188, 120, 67, 160, 230, 85, 133, 105, 4, 192, 112, 210, + 221, 217, 39, 86, 79, 93, 42, 114, 68, 157, 223, 146, 35, 40, 210, 134, 137, 118, 27, 249, 31, 169, 196, + 139, 114, 188, 131, 204, 79, 31, 71, 20, 182, 89, 218, 249, 56, 236, 226, 43, 65, 24, 61, 237, 247, 127, + 149, 92, 213, 103, 34, 177, 3, 211, 235, 50, 32, 235, 149, 20, 169, 170, 56, 87, 72, 236, 247, 9, 24, + 226, 136, 50, 124, 76, 94, 159, 160, 161, 165, 52, 82, 13, 240, 146, 247, 27, 254, 212, 190, 23, 179, 248, + 79, 216, 28, 40, 108, 174, 143, 78, 193, 220, 94, 223, 136, 187, 136, 14, 32, 178, 10, 101, 206, 16, 9, + 38, 41, 218, 10, 143, 229, 136, 247, 177, 178, 8, 203, 70, 223, 205, 77, 26, 7, 160, 156, 125, 247, 139, + 206, 113, 244, 107, 135, 245, 178, 122, 50, 94, 49, 70, 57, 3, 229, 195, 41, 80, 58, 66, 45, 205, 28, + 182, 63, 248, 142, 14, 93, 60, 222, 17, 221, 206, 92, 248, 42, 21, 148, 4, 5, 5, 221, 230, 146, 103, + 206, 79, 134, 160, 154, 215, 9, 168, 177, 127, 208, 196, 142, 113, 36, 204, 246, 238, 48, 246, 118, 147, 247, + 70, 10, 183, 169, 140, 7, 199, 50, 204, 151, 34, 141, 208, 94, 45, 239, 60, 199, 183, 121, 73, 114, 33, + 150, 152, 167, 152, 127, 9, 123, 73, 223, 60, 90, 93, 140, 62, 253, 38, 194, 100, 164, 150, 81, 187, 162, + 158, 26, 197, 179, 123, 107, 108, 88, 30, 45, 111, 141, 17, 122, 142, 28, 232, 140, 21, 242, 77, 35, 206, + 158, 187, 115, 115, 175, 148, 113, 158, 213, 102, 2, 125, 216, 125, 220, 104, 30, 170, 188, 136, 34, 100, 17, + 180, 10, 238, 190, 74, 48, 26, 115, 113, 215, 201, 121, 114, 215, 241, 128, 101, 44, 113, 37, 196, 124, 254, + 109, 195, 213, 141, 239, 139, 35, 101, 70, 157, 203, 113, 229, 233, 204, 97, 148, 212, 208, 174, 126, 203, 245, + 22, 119, 36, 5, 149, 32, 9, 141, 0, 202, 134, 58, 131, 132, 126, 52, 126, 74, 234, 212, 221, 80, 19, + 64, 117, 167, 118, 52, 6, 14, 99, 77, 113, 201, 248, 127, 135, 205, 147, 227, 60, 46, 55, 237, 192, 149, + 226, 95, 104, 162, 76, 24, 100, 38, 44, 240, 137, 93, 6, 136, 62, 141, 164, 172, 11, 55, 227, 159, 225, + 195, 104, 243, 112, 31, 70, 104, 173, 242, 157, 232, 162, 225, 87, 105, 182, 136, 194, 112, 96, 155, 171, 243, + 122, 53, 197, 194, 208, 240, 35, 37, 117, 201, 235, 205, 118, 213, 80, 53, 126, 186, 170, 2, 7, 233, 95, + 107, 144, 133, 154, 228, 55, 156, 108, 241, 167, 245, 6, 8, 173, 54, 235, 128, 185, 139, 251, 97, 140, 94, + 166, 171, 70, 62, 193, 87, 203, 31, 43, 237, 199, 4, 201, 37, 48, 64, 99, 251, 200, 55, 59, 39, 163, + 88, 236, 35, 83, 5, 10, 37, 212, 69, 19, 124, 125, 92, 184, 94, 93, 179, 199, 211, 7, 119, 24, 14, + 156, 220, 88, 240, 101, 10, 44, 40, 192, 171, 162, 138, 121, 111, 203, 157, 198, 211, 251, 22, 84, 20, 99, + 221, 253, 60, 92, 181, 237, 193, 161, 211, 139, 12, 57, 173, 148, 160, 229, 121, 104, 66, 113, 175, 157, 177, + 219, 38, 77, 72, 76, 217, 233, 101, 24, 182, 109, 238, 118, 177, 156, 89, 22, 63, 9, 241, 135, 59, 69, + 90, 87, 130, 177, 70, 169, 179, 13, 21, 109, 187, 164, 110, 88, 61, 115, 81, 186, 216, 20, 124, 206, 188, + 202, 143, 162, 192, 208, 15, 34, 80, 91, 233, 194, 163, 173, 61, 80, 161, 147, 46, 44, 14, 184, 20, 129, + 149, 196, 241, 90, 164, 125, 113, 228, 127, 119, 185, 11, 108, 175, 195, 233, 31, 17, 103, 132, 253, 146, 192, + 111, 172, 47, 150, 77, 143, 5, 148, 49, 172, 106, 141, 227, 85, 95, 92, 152, 25, 39, 16, 108, 132, 70, + 247, 14, 56, 63, 213, 155, 240, 148, 242, 96, 71, 251, 100, 107, 122, 173, 51, 59, 82, 171, 232, 133, 36, + 65, 168, 137, 182, 15, 38, 208, 18, 107, 27, 77, 170, 93, 100, 245, 74, 250, 111, 223, 154, 84, 51, 28, + 203, 250, 208, 105, 128, 77, 9, 214, 9, 137, 201, 17, 17, 161, 31, 188, 192, 24, 112, 78, 70, 174, 101, + 102, 131, 90, 15, 145, 215, 87, 94, 28, 198, 134, 197, 103, 115, 127, 172, 156, 172, 159, 12, 234, 199, 209, + 70, 247, 214, 171, 248, 161, 181, 116, 12, 41, 124, 8, 193, 33, 244, 10, 226, 208, 78, 244, 49, 115, 201, + 129, 7, 253, 208, 80, 162, 44, 34, 23, 143, 216, 7, 15, 229, 217, 47, 102, 37, 65, 94, 123, 91, 93, + 75, 247, 8, 58, 17, 121, 253, 3, 178, 101, 69, 117, 74, 78, 156, 169, 118, 61, 120, 208, 193, 45, 248, + 238, 173, 126, 52, 127, 232, 59, 185, 116, 38, 65, 110, 226, 236, 33, 235, 176, 222, 249, 63, 164, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 146, 65, 18, 222, 192, 28, + 129, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 141, 194, 179, 150, 178, 254, 199, 164, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 86, 70, 20, 115, 220, 58, 54, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 87, 189, 150, + 42, 83, 54, 191, 178, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 168, 148, 227, 237, 0, 58, 76, 125, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 106, 81, 225, 130, 80, 57, 19, 44, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 203, 199, 88, 221, 85, 236, 28, 154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 137, 70, 56, 15, 38, 198, 57, 188, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 171, 226, 22, 161, + 146, 207, 253, 221, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 238, 243, 247, 62, 91, 24, 96, 62, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 92, 135, 166, 200, 178, 32, 143, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 199, 58, 182, 38, 46, 223, 251, 177, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 8, 54, 221, 20, 104, 192, 15, 16, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 52, 250, 181, 83, 73, + 108, 39, 238, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 87, 62, 63, 109, 185, 27, 47, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 201, 228, 200, 210, 40, 189, 69, 33, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 163, + 163, 100, 42, 233, 131, 196, 207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 61, 64, 224, 148, 207, 87, 250, 49, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 110, 228, 74, 100, 21, 232, + 188, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 99, 141, 163, 15, 250, 143, 144, 193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 101, 121, 138, 146, 243, 91, 162, 43, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 59, + 131, 6, 28, 220, 203, 187, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 245, 233, 244, 171, 114, 186, 18, 108, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 241, 239, 20, 254, 188, 136, 79, + 94, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 76, 64, 21, 56, 243, 197, 57, 40, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 63, 50, 196, 168, 85, 165, 173, 159, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 66, 54, 126, + 47, 43, 133, 97, 245, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 73, 250, 139, 147, 9, 128, 42, 212, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 204, 155, 163, 171, 146, 4, 25, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 92, 225, 172, 148, 16, 70, 164, 46, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 246, 132, 223, 201, 82, 47, 254, 102, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 246, 15, 204, 188, 11, 80, 246, 168, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 182, 191, 225, 26, 52, 65, 149, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 71, 191, 171, 180, 6, 36, 183, 199, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 66, 32, 79, 63, 222, + 180, 149, 68, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 58, 172, 192, 3, 9, 104, 4, 89, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 165, 11, 151, 236, 179, 31, 62, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 29, + 180, 143, 81, 31, 161, 29, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 201, 165, 129, 75, 141, 69, 64, 111, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 27, 112, 242, 101, 206, + 26, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 140, 57, 129, 215, 240, 66, 56, 148, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 225, 35, 59, 113, 165, 101, 86, 190, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 241, + 201, 16, 7, 199, 31, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 94, 214, 47, 254, 191, 204, 10, 187, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 109, 237, 174, 252, 46, 216, 181, + 101, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 94, 228, 195, 95, 198, 10, 247, 141, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 247, 88, 223, 141, 193, 52, 30, 164, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 207, 157, 67, + 177, 140, 84, 41, 195, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 1, 142, 188, 96, 130, 31, 32, 107, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 92, 25, 47, 159, 216, 60, 8, 168, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 17, 44, 31, 207, 217, 43, 167, 134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 106, 82, 173, 99, 30, 37, 190, 164, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 75, 77, 217, + 128, 103, 87, 196, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 240, 88, 234, 237, 206, 215, 12, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 166, 228, 252, 243, 30, 7, 163, 119, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 216, 208, 83, 70, 233, 204, 33, 211, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 113, 2, 253, 184, 5, 138, 147, 208, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 67, 97, 210, 235, 232, + 141, 198, 54, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 177, 171, 221, 225, 125, 197, 69, 136, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 95, 224, 6, 91, 85, 1, 232, 154, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 179, 22, 46, 192, 216, 254, + 241, 78, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 136, 154, 175, 18, 77, 128, 245, 195, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 233, 30, 222, 197, 6, 216, 20, 21, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 23, 36, + 184, 47, 223, 53, 151, 107, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 49, 131, 190, 24, 114, 220, 99, 166, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 103, 224, 13, 98, 19, 122, 47, + 204, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 229, 143, 190, 73, 20, 9, 91, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 51, 35, 42, 212, 52, 117, 170, 240, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 45, 144, 12, + 178, 80, 146, 30, 166, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 57, 17, 165, 121, 15, 238, 225, 94, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, 202, 171, 146, 72, 206, 75, 100, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 220, 1, 0, 171, 235, 18, 166, 203, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 210, 244, 77, 101, 116, 86, 179, 39, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 190, 124, 134, 122, + 45, 172, 101, 164, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 54, 236, 207, 201, 100, 9, 201, 55, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 147, 203, 209, 159, 188, 4, 167, 188, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 226, 222, 71, 224, 186, 133, 168, 254, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 15, 254, 246, 42, 224, 165, 177, 30, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 114, 90, 26, 68, 8, + 226, 41, 175, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 77, 201, 183, 119, 149, 87, 40, 253, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 4, 98, 148, 230, 2, 23, 205, 224, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 26, + 112, 123, 15, 19, 177, 140, 152, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 79, 55, 116, 197, 57, 10, 27, 179, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 220, 209, 60, 45, 79, 233, + 117, 81, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 115, 81, 235, 240, 121, 164, 46, 68, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 250, 195, 119, 124, 21, 110, 50, 185, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 78, 196, + 35, 66, 235, 81, 20, 246, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 40, 250, 104, 157, 128, 106, 148, 47, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 252, 101, 113, 89, 95, 125, 243, + 111, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 48, 170, 91, 98, 162, 183, 191, 47, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 133, 88, 96, 9, 213, 50, 100, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 56, 20, 46, 96, 151, 43, 101, 141, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 175, 229, 68, 20, 158, 175, 223, 134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 95, 28, 94, 0, 109, 1, 252, 188, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 32, 215, 242, 67, + 159, 169, 170, 60, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 210, 25, 18, 148, 205, 218, 227, 187, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 140, 181, 64, 143, 160, 70, 84, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 87, 252, 245, 64, 168, 102, 58, 174, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 60, 231, 63, 214, 98, 74, 172, 64, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 164, 139, 178, 166, 11, + 215, 20, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 210, 236, 124, 15, 193, 113, 81, 61, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 183, 166, 99, 239, 5, 128, 194, 10, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 203, + 98, 219, 26, 102, 11, 107, 130, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 239, 235, 31, 64, 175, 2, 19, 194, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 84, 30, 198, 145, 183, 111, + 189, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 46, 161, 77, 51, 188, 59, 222, 184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 81, 13, 96, 175, 242, 206, 141, 93, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 242, 250, + 2, 118, 103, 160, 165, 234, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 17, 31, 3, 66, 12, 163, 157, 224, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 27, 119, 77, 99, 184, 95, 233, + 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 214, 240, 48, 108, 217, 163, 39, 97, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 235, 98, 50, 63, 138, 26, 225, 126, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 12, 175, 130, + 10, 110, 198, 129, 191, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 139, 108, 163, 8, 160, 239, 170, 35, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 108, 155, 244, 69, 119, 8, 216, 56, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 196, 52, 69, 230, 147, 242, 134, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 242, 107, 102, 224, 232, 222, 159, 87, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 110, 101, 52, 253, + 88, 238, 220, 212, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 46, 13, 61, 62, 170, 192, 4, 57, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 3, 233, 248, 191, 86, 139, 15, 237, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 205, 228, 152, 6, 33, + 162, 17, 67, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 229, 3, 64, 231, 165, 10, 36, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 186, 101, 148, 236, 30, 35, 163, 49, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 116, + 97, 121, 132, 86, 130, 48, 191, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 80, 190, 17, 150, 22, 48, 104, 85, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 171, 242, 157, 66, 44, 139, + 12, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 192, 37, 126, 216, 125, 49, 39, 19, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 39, 134, 44, 75, 205, 15, 211, 197, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 93, 81, + 63, 152, 130, 248, 73, 184, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 25, 31, 203, 241, 115, 104, 152, 174, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 130, 67, 206, 44, 71, 58, 235, + 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 6, 92, 73, 144, 192, 22, 150, 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 231, 183, 115, 82, 6, 111, 44, 154, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 34, 209, 181, + 252, 239, 164, 185, 106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 155, 78, 58, 155, 63, 186, 176, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 237, 229, 122, 248, 160, 78, 57, 7, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 71, 252, 131, 214, 106, 149, 245, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 154, 160, 23, 35, 180, 122, 146, 183, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 20, 147, 196, 145, + 9, 114, 33, 154, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 196, 213, 216, 114, 42, 81, 252, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 148, 144, 77, 158, 164, 0, 37, 244, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 233, 174, 45, 246, 239, 231, 60, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 112, 253, 84, 28, 149, 74, 5, 239, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 217, 176, 222, 58, 35, + 44, 2, 155, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 130, 113, 154, 57, 105, 117, 103, 157, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 144, 99, 142, 39, 26, 200, 251, 249, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 178, + 110, 41, 197, 198, 204, 10, 235, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 98, 214, 29, 104, 38, 30, 251, 69, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 249, 179, 39, 10, 114, 16, + 97, 96, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 95, 198, 179, 205, 39, 140, 213, 220, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 16, 212, 92, 209, 18, 34, 240, 191, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 248, 8, 109, 191, 111, 62, 222, + 229, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 235, 200, 48, 199, 132, 47, 44, 183, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 100, 10, 57, 146, 87, 214, 123, 22, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 190, 190, 234, + 161, 61, 186, 252, 49, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 153, 144, 250, 41, 228, 58, 43, 58, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 90, 133, 158, 212, 245, 231, 64, 153, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 138, 167, 77, 109, 52, 247, 174, 70, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 227, 137, 253, 203, 194, 46, 199, 121, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 179, 9, 102, 241, + 121, 221, 149, 220, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 189, 254, 41, 138, 154, 124, 141, 79, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 161, 25, 100, 236, 78, 103, 14, 52, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 126, 77, 10, 86, 216, 77, 100, 201, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 246, 191, 32, 245, 137, 150, 189, 87, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 19, 164, 208, 123, 205, + 90, 87, 120, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 58, 59, 143, 128, 100, 218, 58, 236, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 164, 141, 145, 163, 80, 86, 225, 16, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 131, + 224, 35, 146, 166, 116, 50, 118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 36, 195, 53, 249, 1, 239, 99, 5, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 240, 27, 99, 197, 17, 250, + 129, 69, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 12, 219, 151, 245, 136, 248, 124, 82, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 221, 190, 178, 43, 97, 27, 171, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 108, 210, + 208, 54, 162, 254, 244, 85, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 13, 3, 69, 152, 178, 214, 234, 29, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 155, 220, 23, 102, 90, 28, 36, + 46, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 140, 157, 116, 203, 135, 234, 44, 31, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 59, 208, 148, 207, 66, 248, 130, 12, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 80, 65, 36, + 249, 59, 31, 4, 3, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 49, 40, 124, 250, 250, 88, 195, 11, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 105, 52, 92, 182, 136, 23, 128, 249, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 65, 26, 246, 117, + 172, 129, 172, 99, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 42, 231, 214, 189, 29, 162, 191, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 245, 77, 142, 16, 93, 151, 202, 35, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6, 32, 176, 150, 181, 19, 66, 187, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 164, 189, 1, 192, 17, 82, 123, 239, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 222, 84, 89, 81, 183, + 46, 96, 223, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 243, 30, 2, 28, 239, 97, 96, 2, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 130, 121, 235, 25, 152, 59, 40, 149, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 113, + 46, 130, 80, 93, 125, 1, 248, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 61, 237, 129, 172, 20, 214, 50, 172, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 189, 243, 178, 145, 249, 154, + 252, 133, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 53, 41, 238, 229, 202, 126, 2, 151, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 175, 194, 252, 87, 87, 86, 180, 197, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 161, 147, + 24, 236, 43, 162, 30, 169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 133, 25, 111, 132, 59, 24, 71, 9, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 64, 225, 112, 150, 137, 242, 120, + 75, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 173, 69, 61, 70, 108, 20, 129, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 252, 157, 90, 104, 49, 182, 44, 236, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 240, 186, 217, + 48, 136, 7, 240, 182, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 115, 163, 110, 151, 167, 47, 55, 131, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 185, 192, 253, 72, 96, 119, 171, 69, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 190, 184, 69, 31, 49, 224, 215, 167, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 96, 28, 190, 214, 227, 107, 156, 199, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 110, 184, 212, 230, + 88, 224, 9, 106, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 227, 62, 96, 15, 210, 231, 79, 53, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 166, 139, 116, 65, 136, 125, 31, 121, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 170, 162, 172, 193, 130, 59, 54, 230, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 254, 59, 48, 178, 156, 149, 5, 41, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 79, 218, 54, 104, 10, + 187, 77, 123, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 126, 122, 148, 187, 226, 182, 185, 168, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 91, 177, 207, 200, 249, 241, 97, 160, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 30, 119, 79, 46, 143, 120, + 220, 240, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 171, 76, 48, 54, 76, 152, 37, 72, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 187, 236, 100, 172, 113, 122, 114, 146, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 109, 40, + 7, 18, 70, 242, 242, 169, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 157, 45, 166, 116, 146, 128, 93, 154, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 142, 65, 174, 40, 86, 93, 28, + 193, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 32, 33, 181, 84, 157, 213, 54, 134, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 243, 3, 235, 181, 118, 39, 228, 72, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 172, 128, 31, + 26, 34, 81, 251, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 42, 189, 62, 20, 68, 172, 107, 129, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 107, 88, 225, 176, 63, 148, 68, 4, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 46, 138, 62, 86, 204, 130, 207, 220, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 43, 53, 211, 0, 62, 170, 181, 121, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 13, 24, 93, 172, + 183, 70, 227, 207, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 52, 200, 133, 165, 140, 107, 41, 221, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 124, 135, 150, 222, 226, 218, 62, 12, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 197, 197, 186, 27, 226, 134, 173, 63, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 120, 42, 126, 117, 206, 24, 249, 50, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 180, 14, 118, 165, 195, + 189, 145, 33, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 54, 7, 213, 61, 144, 25, 71, 38, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 56, 230, 129, 107, 73, 102, 162, 206, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 188, + 145, 221, 153, 141, 98, 152, 214, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 131, 15, 229, 20, 75, 219, 212, 117, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 211, 122, 77, 181, 231, 186, + 19, 7, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 96, 200, 168, 21, 164, 125, 164, 85, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 239, 71, 21, 146, 196, 214, 61, 104, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 184, 210, + 120, 31, 64, 56, 224, 244, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 108, 242, 64, 171, 112, 230, 197, 159, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 167, 175, 177, 146, 251, 185, 215, + 226, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 72, 129, 126, + 161, 167, 25, 97, 209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 238, 235, 82, 136, 65, 99, 13, 195, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 135, 88, 251, 247, 62, 3, 65, 92, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 5, 35, 174, 84, 107, 30, 198, 183, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 226, 206, 161, 253, 114, 213, 218, 145, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 11, 40, 71, 9, + 68, 243, 149, 97, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 135, 18, 225, 131, 178, 190, 218, 100, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 229, 246, 13, 134, 231, 48, 239, 94, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 113, 61, 29, 193, 134, 253, 64, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 16, 224, 38, 222, 38, 84, 152, 206, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 35, 129, 101, 170, 117, + 140, 124, 77, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 141, 16, 32, 139, 131, 114, 67, 36, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 182, 72, 152, 58, 132, 89, 30, 10, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 163, + 136, 158, 211, 228, 202, 3, 146, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 144, 89, 192, 123, 224, 234, 52, 73, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 226, 120, 51, 187, 80, 230, + 25, 86, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 101, 183, 47, 20, 92, 43, 37, 110, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 128, 18, 182, 233, 75, 49, 229, 201, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 75, 95, + 18, 248, 140, 204, 11, 74, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 216, 159, 244, 66, 166, 149, 132, 143, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 144, 9, 200, 23, 116, 68, 109, + 215, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 35, 132, 220, 14, 248, 57, 116, 127, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 61, 125, 92, 39, 228, 214, 146, 194, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 133, 179, 205, + 204, 246, 109, 187, 171, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 99, 100, 233, 40, 247, 111, 49, 51, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 53, 240, 120, 100, 99, 78, 106, 194, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 112, 135, 170, 248, 81, 203, 32, 219, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 120, 251, 224, 15, 125, 135, 28, 131, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 102, 173, 153, 133, + 47, 241, 197, 108, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 123, 173, 123, 191, 10, 14, 148, 105, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, std::vector({30, 40, 3, 3}), + std::vector({18, 2, 16, 32}), DT_INT8}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0])); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferNchwFz, fp32_1) { + float data[1 * 1 * 1 * 1] = {0.8329063818189503}; + float ret[1 * 1 * 16 * 16] = { + 0.8329063818189503, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, {1, 1, 1, 1}, {1, 1, 16, 16}, DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 4); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferNchwFz, fp32_3c_3n_pad) { + float data[37 * 45 * 2 * 2] = { + 0.44166954813792036, 0.49872269429620975, 0.6271994044987186, 0.9001409152216447, + 0.04307375857392848, 0.582114074620672, 0.9264698414266803, 0.5865297769079123, + 0.3427145650542528, 0.8794132005881704, 0.36259376947960476, 0.25924453383855506, + 0.0003788955781413028, 0.8791269471098638, 0.3569063921265889, 0.8148236302016606, + 0.6810816515514638, 0.03226249236680412, 0.26536135121543347, 0.5210914867897581, + 0.8503025686377245, 0.19053767468224259, 0.7644677558027794, 0.6664800591976118, + 0.30431620883041643, 0.8857867809462828, 0.23010486455028845, 0.8012012723630623, + 0.15785038224109538, 0.46640704279272016, 0.25505471899614596, 0.6507807730728263, + 0.9269203960072342, 0.5013249220480327, 0.3667980786598808, 0.01847183664685148, + 0.7312365836651692, 0.4914649117424874, 0.7019553951808579, 0.12714437420022462, + 0.8294678547271637, 0.8851055753508216, 0.6437687138614585, 0.11525453037463174, + 0.4275215513314521, 0.5253699854838281, 0.45462695932935426, 0.15457705654247023, + 0.6685263590871087, 0.4521564166563363, 0.09208630834065346, 0.7304539916759515, + 0.5688754184736142, 0.49111394776088735, 0.1674994497775757, 0.5538550137177134, + 0.7348172902756986, 0.18706261818854597, 0.7985811459510032, 0.6884899594599051, + 0.44002668384141674, 0.8632130905792896, 0.004235301770942557, 0.06305399665355571, + 0.6433914392340844, 0.10406700769064525, 0.9722547340793554, 0.3549045467615085, + 0.6550667959796783, 0.5888129420262596, 0.42311798749778873, 0.04239916419002787, + 0.35252017540885294, 0.5309221355543069, 0.8259993625275874, 0.5620492807776227, + 0.14651025251043837, 0.004952319375278513, 0.14431184294517263, 0.7297384973354955, + 0.1318827621416322, 0.9661794953197592, 0.7372411999886771, 0.09268565975236731, + 0.09152723949279828, 0.20428426118359488, 0.45457639494703495, 0.20630548950635297, + 0.3474262545596789, 0.24309042232942835, 0.8770054067746237, 0.7095356570435677, + 0.4862410339098905, 0.6278417354346337, 0.6430646798804541, 0.8930377673795764, + 0.45401082863472053, 0.3013071693491415, 0.03826965244178693, 0.4579675275762498, + 0.17396993513666825, 0.03348953173020319, 0.7763284429361277, 0.19413855245370026, + 0.5088777106591748, 0.07560931807279525, 0.4804645674817959, 0.8748843605341408, + 0.9572339540509394, 0.8332301944444822, 0.5180034759098409, 0.3538955519023814, + 0.9399808405489267, 0.9276909634857712, 0.3788159574747495, 0.1229623458205813, + 0.5833197346884914, 0.6926715865432231, 0.5734171907621304, 0.46491295105148867, + 0.9124637727960749, 0.5602121372899481, 0.1826212314925999, 0.027880553346125603, + 0.8156926920482467, 0.03454352265127836, 0.162202255176494, 0.7883915454558059, + 0.9028294403098109, 0.6344004898947448, 0.9575352194569359, 0.39648975671140774, + 0.4971422391169885, 0.15316020761695914, 0.9024464867117323, 0.7124643155516597, + 0.25139569740384937, 0.7456387094968034, 0.18019086659616945, 0.4915702302822955, + 0.9526629990842792, 0.46986842960309116, 0.09967012130313302, 0.6857942034808334, + 0.8496943481229121, 0.6274897156346233, 0.3363965003078747, 0.22013224995653757, + 0.1265589331571274, 0.775581456829888, 0.7183751205972345, 0.6943696066496853, + 0.7907764661094938, 0.07556252308338662, 0.025797182427518717, 0.04913005830458905, + 0.17996515611350639, 0.5230143227286745, 0.34349775412299366, 0.4438111512341175, + 0.40041638314818695, 0.33159472695777015, 0.22531896687963515, 0.1788476661859162, + 0.5080397153321874, 0.10285375137375108, 0.5460089461559253, 0.5525118585986091, + 0.3211194239648827, 0.13669157841416257, 0.3133672315446907, 0.735254559436984, + 0.7870851762060899, 0.7703373552655466, 0.8045227957165217, 0.08660049895372113, + 0.7921313766981629, 0.1403236625530464, 0.555876019856778, 0.986325683928554, + 0.5269286567983883, 0.8117705343353353, 0.4813280272416872, 0.10194740570021243, + 0.7225038524751558, 0.23809778498286072, 0.7469415625364333, 0.996297059126408, + 0.5611298409256944, 0.2348375918144413, 0.5327804681949095, 0.5595128219901133, + 0.5853334434070222, 0.08945118660744811, 0.7939122988767758, 0.1281311616830373, + 0.026684856314108663, 0.33002824843625456, 0.20081913102711146, 0.15734836104585115, + 0.688117042002729, 0.4952272679127343, 0.16342026476030647, 0.15455979948213694, + 0.8444262077285541, 0.5541139404295038, 0.7410735160226759, 0.6843733820866527, + 0.06350491342649123, 0.05860482596495664, 0.979769354784465, 0.9695538998205576, + 0.7798902407824239, 0.14254936200789714, 0.46985773018681976, 0.6511886618287761, + 0.3156636155985213, 0.05546611943559421, 0.6863635798404532, 0.454276249403717, + 0.4734006703854433, 0.16655392496411603, 0.8474697511604699, 0.4331065163054624, + 0.15931023211489348, 0.8411555966233066, 0.3287981027308836, 0.43000518947591215, + 0.034307016619573694, 0.1047216771050905, 0.7814064498207857, 0.783754833871294, + 0.772280025098758, 0.2522112326848579, 0.7651022871471064, 0.8983537696675657, + 0.569287257671341, 0.9917311118591357, 0.40748074874761653, 0.004781603549286251, + 0.8592478084009422, 0.8999214821027939, 0.6201674240879183, 0.8634877656131325, + 0.30911684722630806, 0.8462638251914404, 0.2740165716458879, 0.3496459203733686, + 0.5289569682951596, 0.7289914599662678, 0.8627263150228915, 0.010395869176374584, + 0.10831430505089235, 0.6643425015917764, 0.6702440817105968, 0.10686317273713519, + 0.6318991209235206, 0.697819238351, 0.20039769284162146, 0.31608992920712375, + 0.9279919033196171, 0.3078318765135186, 0.9156578210903792, 0.6729912075120715, + 0.5330292052871144, 0.15053669568104944, 0.0026964571220174927, 0.6762396591853704, + 0.25366556394137707, 0.8184493205572059, 0.13402745339052125, 0.1474086507975666, + 0.2660516092693641, 0.4518123963691467, 0.8410724469405078, 0.0726699229940776, + 0.5356748729714257, 0.0031109001593860874, 0.5129095474419372, 0.5923168490645327, + 0.0171953887490085, 0.2881222840576103, 0.2620231768659732, 0.1956312970045544, + 0.02117671429493495, 0.3978287780669312, 0.35291484742075163, 0.23798182084711117, + 0.9154277554227607, 0.3573124066140412, 0.7016550104498763, 0.4597870400283238, + 0.3473046036825943, 0.45444970553182373, 0.13303487770402356, 0.39278746229491135, + 0.7093801271897968, 0.367087024099215, 0.9834248626333502, 0.40653157438392773, + 0.22814820706964323, 0.3999771465739842, 0.723579466970588, 0.28409364950416216, + 0.4162558246910435, 0.26789393968780595, 0.7884278688266475, 0.16941774500541196, + 0.4132792226194869, 0.4117545169358421, 0.6913332103347046, 0.004844335655207477, + 0.31176712205095924, 0.9783955783157174, 0.22245899232364708, 0.7607516373080648, + 0.6699076118671365, 0.6300809314927382, 0.6516946836403165, 0.3444010702742777, + 0.4628674105896252, 0.6519430323222988, 0.3257349583819974, 0.17963125887882414, + 0.002706941176983668, 0.03426279779790331, 0.4883787842182088, 0.4425233931083449, + 0.5799461493120972, 0.6708757959742054, 0.8674769281721243, 0.262003243682855, + 0.4372276660366662, 0.5433833288253564, 0.6065403092061109, 0.14683051730486973, + 0.8587857879261196, 0.7770725423138007, 0.9067975316888497, 0.4423921837812813, + 0.5051884365664271, 0.4152897566207333, 0.693100473115213, 0.9583377108542843, + 0.27759836862359344, 0.016186137478753526, 0.23247304472981967, 0.537577568521976, + 0.8297372643744151, 0.5329910099563924, 0.4605515038321585, 0.8671075025029406, + 0.9339605564631311, 0.4340343338394457, 0.8348635465517218, 0.707075923751942, + 0.7150532682074798, 0.49634335609052604, 0.4848361702081525, 0.6118093255122149, + 0.4359487952693004, 0.5647952717516906, 0.7842184667774055, 0.6474975088736912, + 0.6475379020612276, 0.5846928020032719, 0.696575510235226, 0.37832932314324796, + 0.34867474286113354, 0.17704671513095693, 0.2593928030932182, 0.857698230887831, + 0.1256434578956731, 0.37949319710948726, 0.17402554677973225, 0.5457870596634349, + 0.14465673405698776, 0.8125590098154514, 0.8364261161659661, 0.8394235886532205, + 0.0663454514818631, 0.4967104355941421, 0.41482687096275417, 0.15349812178484878, + 0.6621924036536312, 0.7563120977819371, 0.8641238867913393, 0.7626846294714452, + 0.7621883076372917, 0.9444805267350456, 0.33714991558453766, 0.9116434249533474, + 0.9062449282680001, 0.9285531098995821, 0.8518751815185701, 0.9707013478619836, + 0.9536722219436357, 0.04085651969533721, 0.9357470753188956, 0.21205999749128934, + 0.8565925393476546, 0.10968185151529919, 0.056254828649715205, 0.5477583970869144, + 0.3527447679275013, 0.31835984148358976, 0.6189044426563634, 0.9992452614602896, + 0.8334993727563206, 0.46420790874197915, 0.994783618719053, 0.7043524832141461, + 0.7163184369676103, 0.0923742336495238, 0.7131063882750018, 0.07218330203025192, + 0.23591825350189433, 0.5824677076820898, 0.9502632407373436, 0.4681651394763202, + 0.7469435868881419, 0.7378166679174171, 0.11088424948614228, 0.9958496145910709, + 0.9181068856174649, 0.9465586483627629, 0.8604926504358261, 0.6994947622330466, + 0.924934395004348, 0.8078161053529695, 0.017435962284417794, 0.0805697263118933, + 0.7501721622884192, 0.813980330473732, 0.4523711551825791, 0.6886465523640434, + 0.009746449705100857, 0.6040989949225825, 0.14407762016335435, 0.334479527356387, + 0.7800240186513618, 0.6965216391465563, 0.1003697520538046, 0.9901326646819454, + 0.05160876225891253, 0.9153801935758921, 0.9649955048434758, 0.9576290979616602, + 0.6248224457369207, 0.6432716251247464, 0.4352728743722325, 0.7542209866552148, + 0.6376445054012511, 0.4805965127216455, 0.5434097746661704, 0.15509383431110246, + 0.6150499103914511, 0.9899861715766959, 0.7859145329474153, 0.3319379098474641, + 0.4828663703220699, 0.9603634971811542, 0.11523232295259123, 0.6241128331334818, + 0.8440888299533157, 0.2639315049303803, 0.11831249430020163, 0.32307593675839086, + 0.8801793594318112, 0.6064808139059639, 0.2708796186083072, 0.6508322056689247, + 0.7222913721031597, 0.09423448438874371, 0.9407948562702801, 0.20749537504490367, + 0.3882390324665115, 0.8586235386400609, 0.8951425783429127, 0.41937200490620796, + 0.502743076476809, 0.1885721968432893, 0.21779656404027659, 0.05632227344272123, + 0.8753508332079064, 0.14195638196987703, 0.08338242265746565, 0.2527475219333315, + 0.9637412051951595, 0.3872374905527389, 0.3434274206390554, 0.4768301709526289, + 0.6216711520273386, 0.9577720736573577, 0.8366095208978671, 0.09873370493806377, + 0.34507354975285465, 0.16348883143568127, 0.7368307465766999, 0.12368561854617122, + 0.5022409721697791, 0.4207392526250259, 0.4318603731716837, 0.6901418695378564, + 0.9466003601217432, 0.015732027662631376, 0.5008617386373394, 0.31131274888539573, + 0.414922388594683, 0.33241342024540144, 0.5551877728925136, 0.9611960250938479, + 0.5635060389002671, 0.3154844227181034, 0.9944328676646609, 0.04326840036964252, + 0.5930052832199914, 0.37649958979097575, 0.5705390234345237, 0.07163674495064343, + 0.6600253634230627, 0.3750038272037318, 0.5659354751375515, 0.06254107456875724, + 0.10335697783639897, 0.16652286466225585, 0.5707184258103133, 0.3374169863902369, + 0.08312090864037991, 0.7458939503901071, 0.6279588295081213, 0.6360675805312543, + 0.3309539816379585, 0.5609499046967278, 0.16279115803896915, 0.39117111907339097, + 0.05504760299836842, 0.07926737892234881, 0.8266429406491902, 0.6446835949605887, + 0.41451837109645195, 0.697016300370185, 0.5765498555193996, 0.21891209807097622, + 0.2878984260468489, 0.4070582529802277, 0.587274570394774, 0.7433717546734127, + 0.6794459570364636, 0.5285685024299306, 0.7022610940802061, 0.3630343787104311, + 0.26492426370897504, 0.7019263232239848, 0.3564208328852714, 0.5995993663092729, + 0.3069506582096254, 0.5054043188417666, 0.5605623483071223, 0.8240599479452418, + 0.08312792795924828, 0.5954726203941156, 0.8711410580944787, 0.07513190384100732, + 0.20245674686239068, 0.6009488186681777, 0.7542068310615544, 0.2535561324617509, + 0.9651150999001393, 0.37806608862254276, 0.12580690683106277, 0.259274604322687, + 0.16538573936154377, 0.5362359184350861, 0.19511467633329538, 0.4765660481609173, + 0.20497483692388607, 0.661172161312716, 0.7739464169701958, 0.2578199680747182, + 0.566960698002111, 0.45468429642073815, 0.5473052414387926, 0.3305419735734779, + 0.15402104039426656, 0.5079696901117611, 0.0028599333556914575, 0.005070967981792185, + 0.4137445986366697, 0.1731747849592724, 0.7095330866753558, 0.3830626973957306, + 0.7243057299151163, 0.12104134786710796, 0.6053678545479122, 0.5910679449252563, + 0.75433224500989, 0.4887731715287613, 0.6213530721931816, 0.48092522218469214, + 0.08834338294524535, 0.5044126229512266, 0.9592522642800507, 0.5299158564080676, + 0.03213652586063531, 0.96934027201795, 0.9008270961457054, 0.07922505757588172, + 0.30171861955480994, 0.731220848725321, 0.31492945179529197, 0.7349473013224028, + 0.7564403400075214, 0.8112472481796852, 0.9281003329677461, 0.06879913829175632, + 0.8645557850702533, 0.9247754790061143, 0.9662156590336831, 0.6091930957781132, + 0.8562204252001854, 0.13149979362482211, 0.4892800652685698, 0.48397322622183747, + 0.8664271685603836, 0.5334820731544022, 0.24187432915651907, 0.3905432650552372, + 0.2899826696811185, 0.6619315315715587, 0.5516319990282166, 0.02001009486312677, + 0.7758553067161579, 0.9409356740169719, 0.01805150820824497, 0.6889770658457663, + 0.40290983069009967, 0.7877526647569125, 0.40773854718661073, 0.19665293993866206, + 0.7951695298519721, 0.057098343065618895, 0.937762045700286, 0.3847005267950533, + 0.6329097221142049, 0.20756942710750204, 0.6325266311382628, 0.2602679913341992, + 0.7494229815234873, 0.20173899516231752, 0.5274875157434945, 0.8761433546392916, + 0.9733924403534888, 0.12726293739895855, 0.8461514960825782, 0.904478428722847, + 0.7412173002569975, 0.5368463241107692, 0.37295302932506535, 0.12409919407314007, + 0.13664668096168653, 0.9332231179113105, 0.4204274634950068, 0.3662717495548532, + 0.9620734470397198, 0.03866993296032162, 0.41398087251310967, 0.42887924711912384, + 0.05120384888603313, 0.8198124151484139, 0.5936560909932644, 0.9306970131403783, + 0.6701268592511975, 0.45814066000975373, 0.537111364957012, 0.009393561114221516, + 0.7492150910884211, 0.6828762184607317, 0.49279801971206494, 0.9373830941283787, + 0.9261616816162551, 0.5556499402574135, 0.14406009508002438, 0.08685296828169053, + 0.6862725096583036, 0.7826235255629705, 0.012583857244834684, 0.27231098100936457, + 0.7606300137316441, 0.04737771542343672, 0.10895765819373193, 0.16422688852354173, + 0.5411934864586017, 0.3503974356532773, 0.04167792834859119, 0.8668225584467041, + 0.2447863998800348, 0.6173105668319053, 0.13127552364632467, 0.26679638314132337, + 0.0016782306935890778, 0.8199324263342636, 0.46313043324559466, 0.8352296795606325, + 0.7724977730522188, 0.7900572729527513, 0.5674340921173507, 0.3304657489407983, + 0.8746275884659321, 0.695079405824329, 0.9404644310352864, 0.17774490462565573, + 0.7195237832360659, 0.30363932311499975, 0.1427392014487997, 0.49986747926626385, + 0.4898245492370237, 0.8876889206430193, 0.46994161724285666, 0.45225373046417006, + 0.27623319940397784, 0.3433174065200666, 0.5495665705069847, 0.27826615999689375, + 0.47360004601500694, 0.7693887217890181, 0.9827985717552564, 0.01591156332579713, + 0.5438492509363436, 0.6645417117507658, 0.11068898807947902, 0.21304380031505676, + 0.3094509109895641, 0.5911643553246058, 0.08898689748408184, 0.44032115426769214, + 0.6766005056045522, 0.7803167409842258, 0.7548778490392412, 0.8750640139081357, + 0.6807570960866145, 0.18918313024237488, 0.9360678606466022, 0.23840089872753056, + 0.7179374654029288, 0.8968132928802025, 0.7954700851158711, 0.13354192470167037, + 0.787922270567647, 0.06493513860209299, 0.32331047016203274, 0.2548021628164966, + 0.3442212152056977, 0.45470399424076535, 0.706540304326524, 0.48944233335635956, + 0.61916128232351, 0.6621672645635656, 0.009466677125219869, 0.6578077020064123, + 0.47370621362547916, 0.6284468697795359, 0.1556312453987625, 0.3777112139212466, + 0.7626685229206793, 0.37841172644923027, 0.9211139965639348, 0.7495032615284326, + 0.1908610884109866, 0.3187093411707905, 0.951326141526167, 0.26602024491194587, + 0.7230969298199336, 0.8258839233627036, 0.7195749102963507, 0.5156705142078448, + 0.9676336916897936, 0.9822019927635267, 0.6893248992970754, 0.8394237537024324, + 0.5048744155547631, 0.5600171127897822, 0.24237549807347103, 0.5553441766038674, + 0.400032358315232, 0.5924254897959692, 0.49049019093169655, 0.3272464145611804, + 0.32823470490794115, 0.0779056577088989, 0.11535306930465383, 0.6070551704766106, + 0.008550701575643194, 0.25382552216958476, 0.14683034622018776, 0.05928383409938465, + 0.3364376724973619, 0.4384069180466962, 0.7960198012575255, 0.7381262016399551, + 0.0830295155089209, 0.3392559277958307, 0.3748747053041538, 0.7202487172523507, + 0.477717208357743, 0.4245028182503814, 0.6780337701235462, 0.8309182605925579, + 0.4036066726063734, 0.06490556159285155, 0.22827893829056423, 0.7470260145990597, + 0.582776239092556, 0.40212915296179563, 0.6589680457817595, 0.39532018866107776, + 0.578728908737299, 0.23662457127647374, 0.7164080215580085, 0.41021816655867793, + 0.7085848276965371, 0.3545785551821127, 0.06358559421596888, 0.2826249834817621, + 0.710667307687193, 0.29150677713279993, 0.0887098458979334, 0.5774041753088472, + 0.7219420919268968, 0.9655106101896986, 0.7819344803079296, 0.6747135253892305, + 0.44224492508612845, 0.5787672470313454, 0.9462145861556999, 0.44401252969036187, + 0.7379121647334217, 0.6185705931616747, 0.570946712866895, 0.35721118540607444, + 0.024735761139530354, 0.8865093123778779, 0.7230320518146561, 0.1025627032248585, + 0.8091634338643698, 0.10902301152380622, 0.40574258116180184, 0.46883064076847114, + 0.9029713299396926, 0.1866901581159216, 0.6776005091994876, 0.7640363576349184, + 0.12340368237851163, 0.6487556337689282, 0.12011975313798184, 0.5684341524056692, + 0.9924917935318855, 0.7478475099481988, 0.7002597682484383, 0.31922441664723433, + 0.5886250331319656, 0.2971874200121487, 0.05983248281789433, 0.21195130191334866, + 0.43259929603315683, 0.3657964532681872, 0.422025842666363, 0.2455538391481168, + 0.9358736726716493, 0.3511809417479921, 0.7929841784064777, 0.8209365678775756, + 0.08533557521396007, 0.7086506127043893, 0.738759429426645, 0.5134276556344041, + 0.16958103095811305, 0.4422196693468696, 0.451177364786558, 0.41319424117196857, + 0.5624756994733194, 0.14495163003479516, 0.10920802220731785, 0.9895448340230715, + 0.029588915602866472, 0.6016932960700164, 0.27150542484518414, 0.7471631357567835, + 0.17621943714203347, 0.13522066374694341, 0.42766542351716375, 0.5207122924470425, + 0.15742462677687685, 0.42783133243893656, 0.4784696148715032, 0.6938545381255015, + 0.26684421786346324, 0.233620214954685, 0.09536808129336949, 0.5862312824353137, + 0.8456571389418028, 0.9613973085600989, 0.5705370484057619, 0.2315644238782848, + 0.5099827307022872, 0.7717408628492098, 0.3128486066217375, 0.10336561119676724, + 0.8280148670220571, 0.8277068369585787, 0.789384620312103, 0.9730381350553119, + 0.007897168430538004, 0.9852937629591335, 0.45570885553393825, 0.7606555946172777, + 0.913569293118159, 0.2385371173197105, 0.6015102886725543, 0.3384380355432103, + 0.25306680573262974, 0.6025304361192038, 0.247968859398907, 0.7385416103790785, + 0.819983055785511, 0.0778803181290173, 0.9517353479323505, 0.4213994102939299, + 0.40700176557932166, 0.3429557305469143, 0.29826401213617126, 0.23194476754070237, + 0.21390073450029923, 0.30921486213973715, 0.6956611010430033, 0.10064886309635879, + 0.8306959283917633, 0.4457466644086183, 0.5600876670999226, 0.21876683797406515, + 0.607170300883798, 0.6927939274746441, 0.07592251569758046, 0.48122637945466284, + 0.19311250465001595, 0.6884518017317282, 0.6725623115052313, 0.6094713205837278, + 0.7183240748928417, 0.28013330071727127, 0.10528876606606097, 0.8956291727875784, + 0.7322513474423443, 0.4965665845905123, 0.3759712418524386, 0.7615850714624053, + 0.908938917129835, 0.6540522027619009, 0.40034313112058606, 0.8450595534897567, + 0.5169567047446194, 0.1332349737030577, 0.12194193098553696, 0.3724483030206014, + 0.47185004668127173, 0.7702062881160354, 0.8953755998792168, 0.3009690454643301, + 0.1929840130822802, 0.27350356357418815, 0.8347215213785666, 0.7431351863889715, + 0.40232408439438583, 0.5831316710851149, 0.9473576096207942, 0.954941151802188, + 0.774601228625806, 0.5427467349016213, 0.9149318015445747, 0.8038592659328729, + 0.8372199679898146, 0.784435735068533, 0.35012721512924905, 0.25098714374175435, + 0.4810893945908544, 0.4154629814539512, 0.9300156325395408, 0.17563310137879862, + 0.4434739696926412, 0.7535583729153315, 0.953785725974651, 0.7213564489816591, + 0.15899647174144238, 0.12978357993597545, 0.3262803245833792, 0.4139762649613933, + 0.3445008973870618, 0.8425734197751842, 0.5277846351757475, 0.7065114394592031, + 0.32729312211881234, 0.5894793485408748, 0.10049187173673446, 0.9563454091674533, + 0.3855457512311107, 0.9793338044263413, 0.16558401729427663, 0.7860014417175775, + 0.4066017207846868, 0.45884461472099725, 0.2222196136400102, 0.442118114505634, + 0.20349673349829012, 0.9445531111032253, 0.2818945178574618, 0.1782282767489215, + 0.3588002237590776, 0.2232301590771144, 0.25311476538632394, 0.09334735841774455, + 0.4046449179723879, 0.13011810353508024, 0.597960827441359, 0.44099353489775484, + 0.9927600319614182, 0.2766332087457337, 0.555975280005527, 0.19602419854598585, + 0.45652972921713997, 0.47556683616513107, 0.08550185347264039, 0.9702684508620554, + 0.5754205597341694, 0.5141225724888612, 0.74890154080135, 0.7539077128632379, + 0.8254422585423511, 0.9011355810005489, 0.9799252557200413, 0.9640998959870103, + 0.6569047664343146, 0.5878134223271019, 0.7130005751400752, 0.8003192989370567, + 0.7010039097601395, 0.42234901143062353, 0.18015322299830117, 0.01740875604679193, + 0.9750708781143511, 0.4572479693783218, 0.9855372047495685, 0.09206529945564446, + 0.5711497560606417, 0.1679726709377053, 0.06773568484484738, 0.8218672059955199, + 0.8273339336262673, 0.40583985447908166, 0.5582481349367698, 0.7412209222435967, + 0.13102642027744393, 0.23709012984483846, 0.8322065900045672, 0.875257888019629, + 0.08295895288025279, 0.36616289330911533, 0.25819671005181166, 0.9026395858304028, + 0.16791466338389482, 0.8241732346126419, 0.42316840271800416, 0.2688902357901004, + 0.8848628835533121, 0.6387651318446043, 0.9820265716705749, 0.05083812913513497, + 0.6018123471620269, 0.6071974229361227, 0.7003539588024469, 0.3849577416732348, + 0.7875744952742323, 0.8458393838748731, 0.39131470600778995, 0.9147679061819083, + 0.058148911362952194, 0.09715518717781602, 0.25125539353796833, 0.8183006868514918, + 0.3976588665651378, 0.21645747068138055, 0.813199176397541, 0.3227045959121022, + 0.7626679577973312, 0.7732748600158968, 0.8381580698250548, 0.01411165406485304, + 0.6970250251456936, 0.20856953801272204, 0.5004754874071722, 0.9887008685761358, + 0.4381810472866039, 0.43833444890909934, 0.760506513615787, 0.6517394121082511, + 0.1921241856189122, 0.661218606505018, 0.46426532888903915, 0.74762730164112, + 0.6450987513912385, 0.3650460027735124, 0.9020054771131834, 0.6023359473085469, + 0.8073753891922654, 0.49842897975867795, 0.8026081030266163, 0.46297101516119865, + 0.5694205222123573, 0.07133473852731675, 0.7996957890195311, 0.6952813429840727, + 0.03589630770849228, 0.17441351939559846, 0.08112678281565666, 0.46539398553912337, + 0.7715926856540837, 0.2400442117752748, 0.14807847772129135, 0.9588236144252194, + 0.11882941999956287, 0.5870045264346687, 0.8397711336669689, 0.0047686709774116265, + 0.6424258749408532, 0.1938389638426371, 0.23535604849535918, 0.6153562699998499, + 0.5674651194005915, 0.9574547370135406, 0.6673877311358172, 0.6024659570158596, + 0.2537965751303326, 0.1998890286210746, 0.25374747576575163, 0.5957357095829988, + 0.9785797706775553, 0.48860432385099484, 0.46100643186058265, 0.8119996911862927, + 0.1719107077878761, 0.8907528336068216, 0.3064627895723101, 0.28039932458444805, + 0.05436789890625704, 0.12179038600806547, 0.6320797033253904, 0.6407735963299092, + 0.012252317615219721, 0.46897400918318044, 0.11368618234391159, 0.8637411051962457, + 0.5218886566614853, 0.23894283106963043, 0.33839301759424467, 0.5789495887799048, + 0.871828373326038, 0.18575252270884057, 0.522102717777887, 0.18368331103064894, + 0.27945906904933726, 0.2062812534890599, 0.795569442779689, 0.4265078800776473, + 0.8822423084388226, 0.08253037402894248, 0.8381685559588181, 0.8429147192278797, + 0.30113632953712954, 0.5744481648284241, 0.3360448377607559, 0.8045207074115083, + 0.7623491632691546, 0.9667479058542807, 0.15696279503668642, 0.5614334497181991, + 0.05299196333039602, 0.3310241654523084, 0.5251342679049629, 0.33947354089270965, + 0.6927360832902495, 0.8055656752518913, 0.03839127781368057, 0.810362056015736, + 0.169516639560239, 0.025971266365874546, 0.8323452802944274, 0.6772678374369094, + 0.8799304214533643, 0.7868922706469863, 0.06384231674359508, 0.6239133178324836, + 0.860071919420882, 0.6024072287820333, 0.8654302932957053, 0.014289087987596427, + 0.6491800288758339, 0.9934575064042653, 0.39828502257716103, 0.7711828371272454, + 0.04276236562186597, 0.0878523368193097, 0.016295220243004782, 0.707555000570396, + 0.5349001531767665, 0.7926619647815201, 0.22239473484973193, 0.5834271207574656, + 0.23911530279950155, 0.05494639736246243, 0.653771450432396, 0.6266097818002464, + 0.8730322260986225, 0.17815376014498752, 0.5902376063535052, 0.9856015341087936, + 0.9372545272341452, 0.054655650777180154, 0.3782380387852313, 0.2741353451838012, + 0.7026312965246855, 0.7877406082349225, 0.31811860326851227, 0.3609108843642973, + 0.38263604482201086, 0.13812506406463143, 0.35809649216351, 0.6408490345865535, + 0.22657303478615087, 0.7139319442180692, 0.08665946538448721, 0.6371169542426284, + 0.8133846139493341, 0.4935473822248203, 0.48500998781391647, 0.41259481555123245, + 0.18761402739010324, 0.03671626111478854, 0.8837320406702113, 0.7495789807241621, + 0.9629831550673493, 0.8481745584611498, 0.5997382279267112, 0.37690314873798403, + 0.9034095322749641, 0.26635700483893976, 0.4704888516653011, 0.3129033899445508, + 0.9471372719265045, 0.7124521090845021, 0.49048384871218476, 0.616269064611748, + 0.697194390936586, 0.24682345519427384, 0.4220967517637866, 0.20094037871951687, + 0.20927555469706427, 0.8576108449387947, 0.5066938318558056, 0.8503982592577363, + 0.631045959341615, 0.7301205029513615, 0.7900745500343653, 0.06399312445029615, + 0.8352082664190045, 0.5624961408696214, 0.1455459751211956, 0.18576472656679532, + 0.9278384321220222, 0.3638274678244726, 0.8140580775258335, 0.59663474685793, + 0.2611229933525716, 0.7582957722499413, 0.213412326850846, 0.7330349137078572, + 0.5587010898818803, 0.9648517530634904, 0.928886818960128, 0.07614109805077285, + 0.6296359067003408, 0.7842912136945769, 0.18699379337075595, 0.6893430524803945, + 0.2963695031241057, 0.16475486574151976, 0.021096952333120034, 0.1787552300041606, + 0.681887814087412, 0.06253482108181252, 0.5533515389185333, 0.5766319811068539, + 0.3879511258310916, 0.5821652454564654, 0.5127156866627538, 0.4351858925138925, + 0.5336050341777332, 0.5153993893640534, 0.8868906748414581, 0.8541960713608484, + 0.9031292676920084, 0.9930529756601338, 0.2624862020804677, 0.6845743356987847, + 0.5059431795785202, 0.6114781272159479, 0.4929603053577416, 0.4187164547736638, + 0.17423229087168046, 0.4654345764701384, 0.5106915402640104, 0.3761111779991463, + 0.3270362471197956, 0.8315518503970734, 0.07320996584499662, 0.2939829973136848, + 0.30069138960926156, 0.1816124283177819, 0.605345886684561, 0.9364108699088778, + 0.25238316568772057, 0.9389082148050869, 0.11821753312055572, 0.08402075728466218, + 0.9774163423334867, 0.020019685511636465, 0.574877924233263, 0.019342737435628088, + 0.25098369099747053, 0.8939129390903313, 0.7063330653180013, 0.17401950134326727, + 0.16817433532530202, 0.8808350209113772, 0.8648914451473718, 0.41970430629069067, + 0.5928064631074025, 0.6348000014542999, 0.08049058971369172, 0.49230924700923095, + 0.43992063804300274, 0.8614758746615542, 0.5890699627810956, 0.10086556748929099, + 0.24580837966662072, 0.9640011403006244, 0.288377453440137, 0.8827004022267325, + 0.4315900007688692, 0.008783581447682987, 0.7947850520195106, 0.7483468764437824, + 0.2781528168084697, 0.9288190943293071, 0.3447082711544185, 0.38620073700337143, + 0.8317453227483677, 0.4446885718480822, 0.6240667946178164, 0.37797738720053087, + 0.1541487121198002, 0.439776809837106, 0.5579835891339692, 0.0039039967843818024, + 0.5351445193071775, 0.4363519321144812, 0.6194437558335423, 0.3294361257279106, + 0.16805310823628328, 0.9103789325896244, 0.9854382252869921, 0.7944980236743932, + 0.23222514200325173, 0.5190534738497645, 0.3768858668878222, 0.06925009366749613, + 0.5693991316492101, 0.6140094816193452, 0.16708280972702905, 0.5910717076565115, + 0.24899869036799982, 0.011095671877870528, 0.7387365281384822, 0.9584144280651584, + 0.41368177795272276, 0.6312106881696702, 0.19772963976896507, 0.0556885292285445, + 0.6979127425560141, 0.9831761079816763, 0.581929629084124, 0.32668620024644646, + 0.16810257168867537, 0.7158554332563897, 0.9817353315293328, 0.5016887910216669, + 0.5985773887665021, 0.021962320541030644, 0.39844810901548067, 0.8172857931535926, + 0.24433885971836533, 0.38399155630054016, 0.4650670553447489, 0.8523303083930169, + 0.2832773508785057, 0.5186096104322861, 0.26427236635894436, 0.1748323344137528, + 0.532977441185836, 0.923008426280978, 0.08812045183547601, 0.06166596692673654, + 0.2733232728942776, 0.5952220500968215, 0.796791418146551, 0.49012709572096813, + 0.4847106505484732, 0.410225899424006, 0.10014225972601243, 0.7681777068057823, + 0.8115591540415253, 0.8141727646152921, 0.269675657615838, 0.6515965875593094, + 0.3350712240949053, 0.5273810538588848, 0.9902861843315299, 0.12442146199879256, + 0.27269060427074066, 0.33735224674100495, 0.5785406650526382, 0.31283091439122546, + 0.10583596581040589, 0.9363565147406563, 0.25987680147606096, 0.6902954653367727, + 0.7135945681401519, 0.12032639174445858, 0.11208451234437578, 0.19616941210980066, + 0.972627074755428, 0.9229605513358294, 0.9504013942535778, 0.5000302685434677, + 0.7800778214445974, 0.4972430475561045, 0.029689325117214893, 0.5645688414899266, + 0.42937813886846965, 0.7357423355385769, 0.14269330674477565, 0.4429705595848673, + 0.8613658262003889, 0.10944536582441444, 0.2288025284434444, 0.8841636807579821, + 0.22431946306168216, 0.65505768427964, 0.4914181370527274, 0.3496221859162666, + 0.668298649743206, 0.6039442327642132, 0.14626715991443162, 0.13036177793631887, + 0.012905395010521903, 0.5929261786522119, 0.28915355169129864, 0.519673841594743, + 0.05592958934392844, 0.0455377907149731, 0.6754514526812337, 0.5105353617414455, + 0.23803073721653156, 0.1458609199361791, 0.5435592006192091, 0.427382026594161, + 0.18595355696473537, 0.05220103595566228, 0.2889961282841257, 0.7957096966420499, + 0.2810237028905399, 0.8742692458643788, 0.2124467533663147, 0.1992501781501017, + 0.2122143260877204, 0.07507888946422059, 0.5603908414206202, 0.9445726730451817, + 0.7072101010898707, 0.37106245061371024, 0.28508198430675924, 0.2815387642858218, + 0.010168003939234116, 0.5195080171512712, 0.8642117767779967, 0.07368717030750993, + 0.019740389923905477, 0.0359408498089947, 0.6532349923807944, 0.2960764970057521, + 0.11529082336301166, 0.8247921933566784, 0.8080703198107446, 0.39804958307957616, + 0.5583325097350079, 0.8999715657284064, 0.6370943517000477, 0.9766636317988647, + 0.9994327185328193, 0.7616392658568134, 0.4856446029264464, 0.7746406322185208, + 0.03158139006288707, 0.862768640500308, 0.7557112544188301, 0.5084392052197624, + 0.03427162352223179, 0.23107854839233022, 0.4222671575947191, 0.7723231639424878, + 0.259919792776066, 0.08909349239383202, 0.6267643983700978, 0.8710401942042832, + 0.3010250397730413, 0.49458619902185674, 0.6919945293540982, 0.8406723232447347, + 0.663725098001257, 0.11452111618409067, 0.7073304799817682, 0.6282391813889078, + 0.3606841864519844, 0.16206856922740787, 0.7643325796461904, 0.6874609795594397, + 0.8337340277312517, 0.12857309417449803, 0.34789775961730773, 0.7132116289858705, + 0.6004482465661388, 0.645697906745114, 0.6381857931732241, 0.11338595486989123, + 0.08254798062262159, 0.4207049632070897, 0.8727142646928097, 0.1195203055576024, + 0.09380464219909246, 0.4984885576072223, 0.21354622545459123, 0.8591837615980576, + 0.6726058515992647, 0.17480397715616847, 0.30858868692760055, 0.13597907371215368, + 0.278959676723728, 0.35618548221763335, 0.14562590576418122, 0.4929481696106627, + 0.6484793579991979, 0.6992506441369746, 0.8714526578489289, 0.3560493124141312, + 0.10913957063722657, 0.37425380379677675, 0.9333605272737515, 0.6514762560115345, + 0.8578164200417109, 0.44165841273706696, 0.9183801009283663, 0.3714117830684973, + 0.19026696817368605, 0.969406466716224, 0.12930580122610302, 0.6830488413835156, + 0.9459250279725616, 0.6328953923302919, 0.4473295083809634, 0.23076786696884677, + 0.10015031570808497, 0.45892816046120366, 0.5887594878988505, 0.792561679646134, + 0.2193431311103755, 0.2222194741741711, 0.9277886820893544, 0.6963772913133708, + 0.4259324011431741, 0.7514158601755893, 0.3936631317439634, 0.5339925643976342, + 0.21053773161209688, 0.5203726441177824, 0.5915564957840513, 0.7127491862155224, + 0.656322051845015, 0.9638643380912487, 0.14745185728442556, 0.5454644937790126, + 0.9485023202812123, 0.2731920221576536, 0.8049424148953641, 0.6409805946568784, + 0.6617916539246315, 0.5758478057821936, 0.0689186247870377, 0.19609627983460365, + 0.027904052322429873, 0.8685305061338993, 0.7327797374967434, 0.7572855661122344, + 0.5813700180834056, 0.5588261706383468, 0.19690193497758146, 0.4734047935958343, + 0.6408191510994479, 0.16212931835656053, 0.6366510877209287, 0.32627877277786266, + 0.191804049698732, 0.28379442460103144, 0.6757576073478814, 0.802735306284264, + 0.028060584971021485, 0.5577167002583155, 0.407526432784978, 0.17953761400030488, + 0.26787711957452087, 0.06798458106150163, 0.16803295403221097, 0.4804772443645222, + 0.32616109171787244, 0.22940508873533094, 0.3093169471793096, 0.7437986993864869, + 0.5781403361257853, 0.006891759256384522, 0.7401817315715277, 0.9882059071733065, + 0.9781873751378118, 0.2967676554861042, 0.322614103267186, 0.8237014357474968, + 0.17557832067528545, 0.5076993696919473, 0.28636228761624405, 0.04166200857800617, + 0.830646905238987, 0.906979449165617, 0.5020274680301321, 0.4091437172119238, + 0.519311073178922, 0.34139000007089304, 0.5101273913427341, 0.1671980582532291, + 0.6352033805958868, 0.3858228882139979, 0.8997556485816697, 0.7659388029109063, + 0.43175058985170955, 0.9809740312733957, 0.5159544794242966, 0.9681310945597621, + 0.9376269937186791, 0.32794365251447477, 0.984522184165227, 0.9425758016852048, + 0.4503447656080847, 0.7200928548635808, 0.0293003059492154, 0.38651052646664463, + 0.681030236919638, 0.3595991288776009, 0.62016196145894, 0.5738062381035053, + 0.8929318853789425, 0.48580662317154866, 0.32211266084413215, 0.12451899941332001, + 0.3817625796466221, 0.2765279008133119, 0.546719886164097, 0.28904881937638316, + 0.734123357922829, 0.33563160802544967, 0.20842943221456256, 0.6971909761832441, + 0.4033149911360091, 0.7784796018200869, 0.9061874532836226, 0.8370316872556588, + 0.7154437097072017, 0.6219379622279917, 0.8777690582448747, 0.7726145455962234, + 0.13861357120150752, 0.6654929365238098, 0.6368058868907747, 0.525155037090338, + 0.0023343769506486423, 0.4540192161362343, 0.6507662246190344, 0.009716816982416776, + 0.13711512819967986, 0.33992845007081796, 0.15345347510183127, 0.05192140908550569, + 0.2957711730928547, 0.4212345455217853, 0.7749110376247555, 0.07326393435548606, + 0.3253552896489281, 0.276044932762166, 0.41224337860789595, 0.7600420478108455, + 0.4499757181719678, 0.7173753181254552, 0.2098301063757333, 0.6166986558502394, + 0.7357228983798126, 0.13016995246210672, 0.4319279083166, 0.7868311391347459, + 0.5365813555938099, 0.6165320425741847, 0.9274165022614098, 0.9382866278342784, + 0.0572127779279733, 0.08372422708454275, 0.13912602770390325, 0.6738599587826435, + 0.7003932155869389, 0.941759385202786, 0.8026557702636293, 0.5453900732133369, + 0.7793242306671802, 0.27514289050092255, 0.5382934819345343, 0.6624806268145025, + 0.370638885605293, 0.6604933337480409, 0.4652929495558714, 0.3473317428074729, + 0.5116149836200966, 0.9900794948287656, 0.15466507254220296, 0.04796607144898102, + 0.3397189277587196, 0.5104443559308209, 0.2790408680916321, 0.9099045001606086, + 0.8212190704639679, 0.4845250590888124, 0.17152349761300256, 0.179919222005401, + 0.11150334158527875, 0.8187285818968446, 0.7718562879851862, 0.8550061953477406, + 0.7305138532746923, 0.9509580948227806, 0.8936381148405804, 0.8759590315861163, + 0.2727836214524296, 0.3644730358176105, 0.8987700812096371, 0.8351616261830698, + 0.945530909565055, 0.3712339168363077, 0.132036940677129, 0.9369055311754738, + 0.395929659086812, 0.74045599723979, 0.5404435908625388, 0.9064736984103624, + 0.9336479618615219, 0.5922985090428279, 0.576224812661493, 0.40828432780579427, + 0.44221417113155803, 0.6319840300789652, 0.4238874670468027, 0.37551972431879266, + 0.12520262415258965, 0.8316273685487323, 0.038694641494613546, 0.44664343184477073, + 0.8188999439090872, 0.6767084591069911, 0.06878788637726574, 0.18306034449525166, + 0.14389440924597607, 0.9590755728283509, 0.3636745686855236, 0.6713070612868395, + 0.6829770401574513, 0.22094235352733438, 0.6609331889501101, 0.23787631227068995, + 0.7812033889387705, 0.6061793647994644, 0.030289721923193125, 0.595247587049231, + 0.3085742104930158, 0.7741510761759416, 0.9813593314280998, 0.9871256155661056, + 0.3520710838734862, 0.6750463138001177, 0.9354610230433348, 0.3825050661148899, + 0.5386917374649499, 0.9632557086420768, 0.41990836330752934, 0.6236977207204356, + 0.5519388322597185, 0.6833883761281654, 0.24345781896307728, 0.06922833268086881, + 0.9009351749099564, 0.887404179202379, 0.45884111957755847, 0.5475916746402403, + 0.4849237975588041, 0.9980910880274598, 0.5969558549361805, 0.3468700887024071, + 0.5182911869169183, 0.896320857203976, 0.9089245518914847, 0.8140694112554991, + 0.760752897661161, 0.593626642998047, 0.20670107887611633, 0.4029758014205066, + 0.5475862574697538, 0.46786432051257343, 0.14894613475586727, 0.34607283321092097, + 0.5842294775781987, 0.8730977124086077, 0.8989648541115576, 0.8920140006638025, + 0.44129255310450777, 0.8348467421556682, 0.9605502557204222, 0.29242431793941304, + 0.8854429411392765, 0.24116239287396757, 0.5540717585868633, 0.14052633498314282, + 0.6155214832882219, 0.22248818636897771, 0.4737583328127568, 0.9087875882434279, + 0.4239186860099896, 0.8809431675645322, 0.3858028250476381, 0.58520385044777, + 0.22353403393361526, 0.7023477947564686, 0.8346688971742567, 0.5222944939144947, + 0.14268540196846513, 0.8234458079618402, 0.27389193775556775, 0.6702771905253612, + 0.5612854911884845, 0.8612749329294441, 0.531185454084861, 0.43617748526952704, + 0.7033592775001094, 0.602504311567513, 0.9992345722091545, 0.4263759489804464, + 0.3351952827021891, 0.6222899392720013, 0.28135438212227215, 0.017206386497560455, + 0.1103368324210493, 0.3647916138732764, 0.6710495450666522, 0.6471540522859932, + 0.44812538223049314, 0.14447081742686496, 0.9859224068537418, 0.9985507040936809, + 0.37825878406407887, 0.7676925097477939, 0.8514815765491701, 0.23446778303374283, + 0.8915142705304654, 0.4986916333216087, 0.12509308062307067, 0.2023839895593661, + 0.7056909237155317, 0.3298520806253834, 0.6039071591953129, 0.761411741009969, + 0.8248734577725525, 0.9552536701328393, 0.7741094568407183, 0.8950888981899489, + 0.09598686033164405, 0.49353454736416136, 0.7798495783901541, 0.12273466267227784, + 0.23882056218299386, 0.402443583906819, 0.8139544901179179, 0.06548282997775123, + 0.06047649342653272, 0.5489854213472108, 0.22565665321872452, 0.08216940800935102, + 0.2652383530543201, 0.25075175660923765, 0.8527785683376323, 0.664875993452441, + 0.006373500277563449, 0.9330092058196776, 0.9695659711377269, 0.1570662755318215, + 0.22675142853651498, 0.497544798370694, 0.7997058513802497, 0.17986995583107723, + 0.442851282694645, 0.829771043276624, 0.950290015238654, 0.7220585583107574, + 0.09886892756427157, 0.05251801777970955, 0.784499662508879, 0.05451321672212228, + 0.4488549555810257, 0.8666027173973242, 0.6300750191552654, 0.8075070107999621, + 0.6990517334835473, 0.9982308029748193, 0.19883797269254577, 0.1556828023863619, + 0.7223576223600511, 0.7591543951250316, 0.25990371634646814, 0.7817632442147195, + 0.5252788150978276, 0.714477114198495, 0.7621681433020496, 0.6999382073221959, + 0.4540932758947144, 0.6684515677638289, 0.1014634415593828, 0.6403751364474352, + 0.4462684781017259, 0.0829806623502698, 0.025527963323986658, 0.46212838809238854, + 0.5728559597473211, 0.2047334636595548, 0.4553628308622195, 0.48200265351224136, + 0.3015784488467229, 0.9461600418468807, 0.8534481279309463, 0.67016651074133, + 0.9355769105316407, 0.4682533427834763, 0.9793389401609446, 0.8239336437639129, + 0.23625649983123986, 0.536843311798013, 0.3215160628847986, 0.5194279318176563, + 0.597712965388332, 0.7827145768809525, 0.2977442893652992, 0.7620000860398798, + 0.022599972586581085, 0.30134252307872467, 0.03952269136874165, 0.7397278979498215, + 0.3244491415465972, 0.9180118819776794, 0.6685305974181874, 0.03516125275990889, + 0.24643915603026256, 0.7431383231541865, 0.18366361857795044, 0.7362856375269557, + 0.31838977486203324, 0.6723426691220966, 0.930499158170201, 0.4564609978477814, + 0.9045433296007812, 0.8982688827991663, 0.8559229897542748, 0.7748833918242423, + 0.6094306396092523, 0.6478684954660008, 0.7349685198119638, 0.364539006117635, + 0.14612234864189266, 0.12641906108110756, 0.4753492843112488, 0.26825983484391713, + 0.7334476518610263, 0.09134521905986925, 0.36112127032602126, 0.15130254844414925, + 0.07205098934580179, 0.28381329612633655, 0.2365354746108721, 0.09318818848840993, + 0.004229192177879315, 0.1930090834260455, 0.2190266020986179, 0.1468868187819261, + 0.13242039824566865, 0.4607618490816854, 0.17662830150082642, 0.3803179201680549, + 0.43600799558928105, 0.3625025924122818, 0.5126810000248355, 0.7447286870072714, + 0.2833559721230442, 0.8500462364151191, 0.8371293691255902, 0.9814045266281899, + 0.6138540572625962, 0.18157836931719495, 0.040987564538892296, 0.2123411569068986, + 0.19334672174794099, 0.4692359578344284, 0.4346117425487437, 0.8980796038235784, + 0.04231863715020834, 0.1747959107332444, 0.49679201146401764, 0.8284930549614943, + 0.11220635744045326, 0.7183973210709489, 0.057440853028103755, 0.6218946843063636, + 0.7214468264472101, 0.8710347273056426, 0.33573322529858285, 0.960736461286523, + 0.5622081704839755, 0.30167003628114264, 0.9845185840592812, 0.2730377868620113, + 0.6634910867705797, 0.4563097949816506, 0.7078373084009667, 0.09488395898595936, + 0.09836050813793273, 0.9789140328583384, 0.9637999037579471, 0.8776791738282163, + 0.3875575452673514, 0.38213564508486353, 0.3127145397701139, 0.58527906318683, + 0.8093268893471381, 0.3617103895265228, 0.918158030578588, 0.7968145645973814, + 0.3159833958779399, 0.3245621121742297, 0.26557436108642674, 0.5104941582045647, + 0.55067197453573, 0.6139989296367647, 0.9758998325022192, 0.7522666298277298, + 0.7038601487512629, 0.0020986655433156143, 0.07612762173113752, 0.08362759408882559, + 0.0613040332999264, 0.4062327662230819, 0.688327210890938, 0.8557411533812304, + 0.753513873349543, 0.274952160983969, 0.5407046834914535, 0.2793056447170922, + 0.8627899449341406, 0.9139886721176192, 0.4445640999574022, 0.4633285206679253, + 0.1802476863007061, 0.4878442763468034, 0.8108683448099567, 0.9643091594761276, + 0.8504141880279061, 0.5973156196301829, 0.10710219601041682, 0.9110558977005889, + 0.7009377291414854, 0.05565172733164114, 0.058195869330314176, 0.5420193242776757, + 0.3796763024251937, 0.6861232291730925, 0.5935524452091785, 0.44543313970081433, + 0.5478722646376566, 0.9452575141946173, 0.03291427279147585, 0.17924898345020468, + 0.5811068774631647, 0.7966701764321158, 0.09016513696916051, 0.6772952323869773, + 0.7925331594034736, 0.8925428237773564, 0.318868168359166, 0.8405436244742135, + 0.6705470057399852, 0.10787488154007552, 0.8797955425965943, 0.3130894042876252, + 0.9060653010521963, 0.7848586244744863, 0.582385835985094, 0.8442578893248381, + 0.15847714555109604, 0.8905292176439835, 0.8686944692306185, 0.5456512903405899, + 0.8868353388238326, 0.09777312604959598, 0.8513225487330578, 0.5395215170961729, + 0.03592347358785197, 0.41780143169262884, 0.5005659651891566, 0.11458446830716273, + 0.9013490640593242, 0.8108750745228607, 0.6658416034368901, 0.03550382138189767, + 0.7955080468613454, 0.49074806404789173, 0.32244334157112176, 0.7129285539645298, + 0.24835609047844476, 0.7787010450829042, 0.36212066315728597, 0.14344840215568566, + 0.6298165964297995, 0.3035818248313433, 0.3527667540127555, 0.5082813878704179, + 0.45440333291296253, 0.035987373886854135, 0.8367793554124504, 0.0419617551564988, + 0.36393362969608567, 0.7475710332070825, 0.36764156869564213, 0.3835614408814402, + 0.36842665507300465, 0.30066812912879526, 0.38846961600980434, 0.11428781977710878, + 0.26031028350961527, 0.40234329774399946, 0.046132326698889226, 0.0452867595679628, + 0.07011624926846793, 0.8010030565116638, 0.7512526313549056, 0.9284619966644981, + 0.5350311265495459, 0.32518309466866335, 0.5601195449139258, 0.19894460327367158, + 0.6243858594133542, 0.03983578333531368, 0.05122474970940183, 0.9280031635913709, + 0.7067149810394754, 0.45228235722609234, 0.16822883346753625, 0.5200338062131293, + 0.6678342036887911, 0.1861385580786682, 0.9596920794353617, 0.81052624710307, + 0.6712474137254042, 0.6069705396669021, 0.7767494969653084, 0.14304873364579263, + 0.4979582256219468, 0.5815015641098671, 0.5457546026465194, 0.005727322269841517, + 0.27958034946510657, 0.8995535859052592, 0.27189693152003713, 0.17142315267359254, + 0.2574542451128564, 0.42110875297256545, 0.878717595023631, 0.6951807639506289, + 0.4947666449554591, 0.2962370195584294, 0.8589217946561216, 0.7256239457381716, + 0.1978514852323624, 0.8018327481906669, 0.22184349273821347, 0.8643444118470223, + 0.9545558551298996, 0.3492931800958463, 0.6551625977875849, 0.45268270020125323, + 0.3121787209947099, 0.4164095227466944, 0.24409790964275402, 0.21965790317696698, + 0.769399110742128, 0.9446505009893393, 0.27570706849680027, 0.9633913739468847, + 0.6346338307826896, 0.43836899170920274, 0.051588066178941405, 0.770307488387535, + 0.8833285321845498, 0.5567360567278871, 0.6219512912973157, 0.07819396829537384, + 0.8553090945117494, 0.3970203638415486, 0.6937187942519144, 0.3419170401726893, + 0.883339911974656, 0.9504705519586321, 0.4311868670829897, 0.9232864381280271, + 0.7305548788281594, 0.4922856506076626, 0.5561215944229493, 0.05074709438073732, + 0.9190448522966589, 0.8088420808436142, 0.31256177473587843, 0.7936540086227151, + 0.8693770767639527, 0.9619144887305694, 0.978651615216966, 0.36436755061851733, + 0.8288294895188997, 0.7777930820721777, 0.8946810827124051, 0.47197955436349515, + 0.3887930423762146, 0.33298158894732954, 0.9354645652293422, 0.8143908442536172, + 0.8916778022598548, 0.15309944701863953, 0.2650408402857629, 0.007508044926387014, + 0.7364672905106205, 0.26617107014592056, 0.832894569943264, 0.9039477664948096, + 0.861102983571584, 0.7912348133811137, 0.29766363866724543, 0.07244549693017344, + 0.8992433213098389, 0.7268708609307327, 0.226855949120919, 0.5319344152241747, + 0.054564473707647054, 0.809261064948484, 0.1014456601962076, 0.28852750174838293, + 0.5738149238294871, 0.8872866854523105, 0.2601556489387933, 0.6324182595073818, + 0.5683638015103154, 0.02018482219592832, 0.3010360152061219, 0.15635428641897176, + 0.16487468189882515, 0.29897151490209883, 0.018224387802540765, 0.1242259888131334, + 0.43433613414947125, 0.11491027929144892, 0.7707048469309895, 0.7994271132337173, + 0.8664382140081228, 0.8738777322540163, 0.03142514858464274, 0.9945458087889819, + 0.20521923743173853, 0.4654944007098949, 0.7360410001930832, 0.9379171637481046, + 0.3904584310354958, 0.21347021766015817, 0.6190058192823232, 0.9273493620959045, + 0.7414925329383845, 0.4164411116066573, 0.4570556373127622, 0.4333063185731828, + 0.6537863956684857, 0.6527169584935041, 0.9792244869160934, 0.6996379153387984, + 0.9720388804607526, 0.4830745490684596, 0.45835491583760357, 0.7079899694928319, + 0.936535094920315, 0.9512534436676542, 0.7036667791104366, 0.9728903216842792, + 0.5667766638280655, 0.48791595428289924, 0.28581060443827866, 0.16265723058631343, + 0.33669842214183554, 0.7135619420704647, 0.3761201560049453, 0.924803685970466, + 0.41010538704340793, 0.5524938574075309, 0.975685212653098, 0.7610010380071708, + 0.28568444272968585, 0.958729870250965, 0.4980645442650463, 0.7024364697783744, + 0.4870415699569436, 0.5214041559816355, 0.6734493625924612, 0.9399399075876578, + 0.23907152639385632, 0.8379933121471851, 0.6252954247712945, 0.6426285641997748, + 0.24555236706882477, 0.6399515009589466, 0.1272208931602823, 0.46552705882830514, + 0.9447497417407644, 0.9417570081495491, 0.9970259571499339, 0.5956141876118459, + 0.3528056732024888, 0.7486729433719952, 0.9138639235533571, 0.5856653282262468, + 0.09717383952404945, 0.9259502582636769, 0.0965135679589354, 0.6550633767510058, + 0.9760558830495002, 0.615510122410444, 0.8977589395129851, 0.2975978126673411, + 0.4998781501659897, 0.5401325906954734, 0.02664326742275369, 0.870535910030066, + 0.2528620255074816, 0.17560152588047784, 0.9592522085272938, 0.9984446661641264, + 0.03567485596575537, 0.8238758737847581, 0.07818213996663659, 0.4643445196348496, + 0.9537700664670703, 0.3670756983039326, 0.8911505143281897, 0.1589740124489375, + 0.5760018279420768, 0.814241717243615, 0.35590494005804263, 0.00925076115001855, + 0.04790704979554017, 0.7340221787959715, 0.279800658060547, 0.2983934978565449, + 0.6322434199877983, 0.3037610031627833, 0.7551312547840624, 0.4484841735323615, + 0.8942679752954235, 0.7129053560923528, 0.2890183006417396, 0.2950992812860116, + 0.9727894445055597, 0.8764347534123248, 0.6345511402536168, 0.08561520607852902, + 0.8545833443376987, 0.1674059683880449, 0.30079162478787524, 0.9573718467240949, + 0.26636026749680763, 0.7124177034785016, 0.05912231718880834, 0.4050705097577936, + 0.7770974489369057, 0.005785789929338647, 0.8417731633748113, 0.5855428220264268, + 0.49674859879415734, 0.8221104632533346, 0.39288776609135, 0.8586793317556853, + 0.33195680582551257, 0.08105297984834636, 0.6261815972960728, 0.06433609328327128, + 0.9277739924308167, 0.4218572159516276, 0.7696783995161812, 0.30453393314569266, + 0.13449689316900093, 0.5370213639233761, 0.640929037412604, 0.3768789866658515, + 0.4083446003487765, 0.8613249076952216, 0.8425614658767965, 0.7020301805546415, + 0.0584370707338272, 0.3889095341657427, 0.1007880926069531, 0.7299760925374299, + 0.8897188381580371, 0.6862441092537032, 0.3622365281632627, 0.3191318156508369, + 0.7547797430485684, 0.5347038131477231, 0.44440002307385307, 0.03869674812232948, + 0.01446253897646943, 0.807892405254043, 0.286719791491863, 0.533244763296928, + 0.7100634864804531, 0.7871630304590124, 0.42635583548795175, 0.9779561321754502, + 0.7654704964607266, 0.3364174458754926, 0.31073788930195057, 0.5489726539284406, + 0.010761984238630062, 0.6215498191132648, 0.3321898011383212, 0.9666290368590116, + 0.3252380926358641, 0.7045495218288327, 0.560079705221792, 0.9688966463245884, + 0.8950788090502504, 0.27335000291484834, 0.8559747839394626, 0.17172656866770752, + 0.26951424043797545, 0.725281506679176, 0.8889930150039421, 0.440593430038282, + 0.848653116387211, 0.14776900150358052, 0.47324717287077855, 0.14637116481000023, + 0.35996257216503225, 0.35920833759659765, 0.6108707859368635, 0.03351924429384756, + 0.9323214628354488, 0.8022032069624152, 0.9792891472495763, 0.5553889126965885, + 0.7127093833905638, 0.5776509330399803, 0.1926645155438872, 0.5239003123361083, + 0.35916944559858066, 0.49078774391835744, 0.876138912755312, 0.8926530934408122, + 0.9092281325802658, 0.08619761260863779, 0.23205434258038304, 0.25063605435069614, + 0.390473329024966, 0.32465703784722955, 0.34298536304191996, 0.43217555660058715, + 0.2048913611172284, 0.26914563151248994, 0.2524219206316275, 0.7185713229842058, + 0.6032064629516926, 0.9124211400435175, 0.10813637741392035, 0.7006386155756965, + 0.33693023432030134, 0.9070264363179478, 0.5483862070260394, 0.11504504617892097, + 0.31796009445608775, 0.317909310877886, 0.6081590947379143, 0.4316269756672091, + 0.08855832445432443, 0.7903135621863384, 0.035156722930959616, 0.8645577271992757, + 0.9894948762233142, 0.1774327895895852, 0.6081815537879649, 0.7038539277833612, + 0.027487575615617987, 0.2540576913143622, 0.7806027882930798, 0.7797552909813606, + 0.6174670223554523, 0.8646407561360216, 0.5520593628359234, 0.004565601876026615, + 0.3101202128602528, 0.43793557141718775, 0.06238122197854312, 0.27592458230289285, + 0.9862775077129775, 0.8466814834992581, 0.38086561284804366, 0.7198637264056693, + 0.34481376426119703, 0.6199688962923056, 0.1909190141632633, 0.8289151445592633, + 0.7702092625272077, 0.04908539286403257, 0.9926833355210261, 0.9193893465376856, + 0.7257972247400568, 0.5463019695739538, 0.7216834531887901, 0.41545939498895923, + 0.11106509791158592, 0.22999442096319045, 0.9273421929135721, 0.64013024315279, + 0.8719001559272971, 0.5146267845560454, 0.5706449986578908, 0.5805262276387257, + 0.2267037598095707, 0.8662577405125906, 0.200718101519632, 0.6019549687057907, + 0.20385235971626592, 0.14771967906797856, 0.2317842424120221, 0.7113435306823725, + 0.8918402070558002, 0.32172287586571136, 0.04119440154705489, 0.08998785345906324, + 0.9562853568683328, 0.6559043666999361, 0.8157136551158818, 0.7248132747696855, + 0.8868802152991563, 0.848884328881342, 0.7857540774441798, 0.14546661772775016, + 0.8113760628121882, 0.807728386901432, 0.7395193237968447, 0.8359841835683716, + 0.9224309984907021, 0.9911706160219387, 0.6192001754131568, 0.09344926218037752, + 0.04722639726342004, 0.07992173661068314, 0.6578504208437965, 0.8800423179471679, + 0.059726423138840135, 0.9694280244294227, 0.5764504744616069, 0.05144135982164122, + 0.5479354046874065, 0.02889768818970684, 0.6432009536405543, 0.8808812858948356, + 0.22280443617301204, 0.7127219895499965, 0.5375897917229031, 0.0201951582000508, + 0.5829377027889625, 0.5854290994795582, 0.3899136253964405, 0.8399049891572149, + 0.8504005276155674, 0.2721210357328915, 0.8290486564347775, 0.8708337415915963, + 0.3195723972838831, 0.33132431719139965, 0.36583899344380266, 0.04194791258295083, + 0.9846397041389282, 0.16925419183163382, 0.7238813973149291, 0.4001504433387769, + 0.8495502703659478, 0.9009893108772054, 0.01830445639651257, 0.8541233122216242, + 0.26697551023026644, 0.5565960048843075, 0.7002144852795528, 0.5248208257814692, + 0.49765875114991065, 0.6624849749394808, 0.3652150369479038, 0.16188666413474806, + 0.3062500953952072, 0.5524930250075998, 0.97359340280292, 0.07411414640722169, + 0.5809387335917323, 0.27342017827751264, 0.4849150842836918, 0.7737440468468048, + 0.9466877478491078, 0.7722345572678084, 0.20060407324732488, 0.4486373798668981, + 0.757331305379941, 0.30705374619745074, 0.5409100577430763, 0.9110811311444915, + 0.8244639953917496, 0.5672238036229547, 0.515411326175804, 0.5190662840193289, + 0.07299856859652876, 0.38140722833030105, 0.5795672856802954, 0.3599725854585929, + 0.17109013553251362, 0.3851687562497098, 0.48405983008931086, 0.33725842726843935, + 0.3074624940578241, 0.35373383802076097, 0.5352760101101476, 0.2889296828360126, + 0.9958333257414079, 0.5164869321661556, 0.2698893322659366, 0.19488775802398262, + 0.5326369225951026, 0.8339411705264502, 0.16808333494994143, 0.9438232137216197, + 0.5566640629948447, 0.7515703678851053, 0.2444487767339869, 0.8949432397744512, + 0.12602651306236645, 0.7871480663922484, 0.05753327809315223, 0.5668689532623353, + 0.5887313184100555, 0.2298736613451513, 0.5867521885451676, 0.548598546660434, + 0.01523090789645698, 0.4701470304663081, 0.5775017376630199, 0.48756685748376827, + 0.3147734409308881, 0.021678855314892664, 0.053754360443974525, 0.7242109678490023, + 0.2404170314202997, 0.3541465214566609, 0.7245527085112154, 0.20196992270647895, + 0.48276661713378266, 0.8423699137816405, 0.6135807459491986, 0.27089202311006577, + 0.9045883890693331, 0.13173432325663237, 0.8891003037916227, 0.14391300865964207, + 0.08289766492862771, 0.4201898408129492, 0.5292724555441057, 0.7173029869175258, + 0.32357976777098096, 0.5922910739183277, 0.5815901891218167, 0.9575040336586064, + 0.5748977713903817, 0.8798345270536851, 0.8379657066532696, 0.4051269363467702, + 0.4313243700192768, 0.4934981641716867, 0.7021655831082331, 0.8510506712793462, + 0.5299278341835632, 0.765892319072405, 0.7223081516058559, 0.546885228820698, + 0.12396149370749077, 0.6464860423784723, 0.12599255658672592, 0.3516963133474692, + 0.5880342745534667, 0.11529352943973847, 0.8755661438776697, 0.10119332381459378, + 0.10118683966689535, 0.12062854727193406, 0.9677641683884044, 0.656326693640479, + 0.49890114789962525, 0.19390438497201268, 0.8108445804174672, 0.8152173803214833, + 0.718291481901579, 0.1057925602768447, 0.8151810052734241, 0.9190827414501545, + 0.8938760378348415, 0.8241050495606304, 0.3356615399744517, 0.8514008098368976, + 0.42924566164108335, 0.7022990870281988, 0.5316738334471584, 0.23288979349343641, + 0.8811760165188481, 0.9491090086077753, 0.89577953942293, 0.6278897311955806, + 0.561473256620475, 0.7034513234813158, 0.7469504654154129, 0.4922106813726256, + 0.4989187770879301, 0.29674259955470694, 0.225738129481033, 0.4991536919836277, + 0.9293088540312703, 0.5370774466098068, 0.37251075503579856, 0.03843047857801063, + 0.09858858986546393, 0.03515555478428156, 0.6391549669948947, 0.14344720716149884, + 0.6910656946088193, 0.15740678233637861, 0.8616029497466651, 0.22969136337129137, + 0.7885874790016763, 0.8525622204293992, 0.08726532548644406, 0.9985465341275686, + 0.7985096920305207, 0.6659502505164258, 0.5944652923538913, 0.10928878512026263, + 0.678079864969368, 0.604618648423304, 0.1302996670222204, 0.5696787960710844, + 0.39633746081961074, 0.7041993592685627, 0.12316380287841522, 0.32882359926662175, + 0.2072971613344654, 0.5566158744713994, 0.40493651037057943, 0.7929501398959433, + 0.5901326115297979, 0.047576709711365406, 0.8856657465078077, 0.8225722396802971, + 0.8426425122639908, 0.8941846866358626, 0.32075505134921123, 0.42929053260312844, + 0.3856582602659743, 0.9827938783891512, 0.4509837435107429, 0.03915174257397358, + 0.44380854692095506, 0.5609853149756677, 0.4342614730549491, 0.8409214208439791, + 0.9045313460305532, 0.7694287789611061, 0.9586179312430214, 0.855362907376121, + 0.6940373189900426, 0.6144659800321695, 0.44511842080107333, 0.7811234343528172, + 0.8193949525915959, 0.5703880594458475, 0.38594474204811746, 0.2154516583014615, + 0.028520547773044358, 0.1533497112842136, 0.5828882934061577, 0.8454636784026129, + 0.36678312156491055, 0.2083770661280342, 0.04781641145757887, 0.8730097140319055, + 0.8909521808800771, 0.555109704836016, 0.6811577022134, 0.13826779263457267, + 0.9722809503007704, 0.27413066594238844, 0.5123002430720334, 0.0010361464179842184, + 0.07045861648088714, 0.5992704839597666, 0.9962176932704665, 0.8816311942309901, + 0.18185485779144916, 0.48964837864793054, 0.6173367715582871, 0.2467417039039631, + 0.6359426110544535, 0.3022131562112462, 0.8196523230775039, 0.7669653158026208, + 0.07695595315492987, 0.23745404594214325, 0.5425007475032013, 0.40996037913859373, + 0.7094361454198964, 0.7772185454991528, 0.011979435913138103, 0.6988583536513918, + 0.4736419191168978, 0.6358493825352493, 0.024380356224368516, 0.12801599854757018, + 0.41032082421966476, 0.7156910728714808, 0.870127908452522, 0.5966822109848381, + 0.13721517015234475, 0.3984640894255359, 0.5474178270290164, 0.44152496670677854, + 0.447404596866686, 0.18178206411155562, 0.5568564850622483, 0.20722987924067904, + 0.6961261151735172, 0.07004766534908935, 0.5178030369849991, 0.4280383074500259, + 0.21885105613278566, 0.1582955600967466, 0.742696754130134, 0.9625433879714335, + 0.707380541532476, 0.9626789038341915, 0.19643036097098832, 0.5802636137500992, + 0.393478497342046, 0.9680733166314949, 0.3611473080795088, 0.6500141101336993, + 0.9575727383521713, 0.08301829371483349, 0.0631262724273397, 0.9626245320972233, + 0.1976347555048752, 0.16605123225763596, 0.40629016682259855, 0.7806732313352842, + 0.4146430735788973, 0.2764933417293113, 0.1726823974227566, 0.4591571139835079, + 0.8880482788199178, 0.12602928850293937, 0.9528158071580496, 0.6624687288781167, + 0.8159711813982033, 0.5387478147352246, 0.18300334312104516, 0.9897944310582785, + 0.6313798437728453, 0.29888838191879286, 0.9784399038252511, 0.5849294505595107, + 0.41093772589171873, 0.27724972070226594, 0.2668295505422189, 0.1174341003943632, + 0.05858570452655276, 0.2394356523139669, 0.772147412106295, 0.1483117351921458, + 0.1293641519054759, 0.6625011421786597, 0.7545760550593441, 0.39907388271523314, + 0.3292684365493326, 0.3330661038455013, 0.7769330360979695, 0.9753998403865101, + 0.035707234893787576, 0.9089806530976261, 0.45503741607689285, 0.7607893071236965, + 0.35554802446569644, 0.5515119825278225, 0.16144162949480823, 0.504732153888512, + 0.769594023018115, 0.23739180193147158, 0.5900123628051581, 0.8716719462302307, + 0.5139857863805445, 0.20521929415520634, 0.403173516885026, 0.400963205809142, + 0.8626316765120458, 0.4705239970701276, 0.060888960732690234, 0.7614813122713756, + 0.4344249885448844, 0.915127818763805, 0.12181775260503214, 0.4655365269027145, + 0.012343562523475349, 0.7848445325193564, 0.1766394352099505, 0.689627215529766, + 0.31208784521225474, 0.5627020082946157, 0.22995887459909736, 0.731174423115421, + 0.8330568524945078, 0.7562221220956725, 0.9851244781137406, 0.3981959712991786, + 0.5939773543813534, 0.6435105549771692, 0.14619542363008597, 0.0999707518171149, + 0.5812904983178214, 0.8008654263958069, 0.32444881651601176, 0.4328274211419346, + 0.4486951999414156, 0.5986581218020003, 0.6872986723632716, 0.14494924409225407, + 0.5354134508654219, 0.36576071478173067, 0.8975010605688444, 0.6254361911688172, + 0.6705077244265885, 0.2562398730395865, 0.5671990831930617, 0.876385728944829, + 0.9763969186458143, 0.36246354873249964, 0.3634433479264154, 0.6777545700206987, + 0.2472233004105332, 0.57458997757583, 0.7436775231215327, 0.1553217842080471, + 0.9638701931246583, 0.9426845283898099, 0.7434848024062098, 0.22368740675030951, + 0.14249000382914023, 0.862050219278911, 0.5147096038401864, 0.7931309397755107, + 0.31912428779353497, 0.36932700877708713, 0.9848888323071824, 0.18970443535638315, + 0.6719019531087078, 0.49902775344818495, 0.2505481000055251, 0.8066597174480088, + 0.5311292590414224, 0.39630241481634976, 0.4570871122929343, 0.15948804559775043, + 0.8081945502973661, 0.8368875390734017, 0.20628653393155671, 0.5345566558753493, + 0.78532008901544, 0.7659443701665629, 0.6176443866924944, 0.6706241949437718, + 0.6863865358520691, 0.8186816060201058, 0.9758959273814721, 0.4253965654778582, + 0.14733375429237705, 0.7275775375167961, 0.5157813708573159, 0.6042056357653907, + 0.11755778045555298, 0.9579735434380794, 0.4888692205662686, 0.9386558175482362, + 0.8577019249822763, 0.26528228209323423, 0.13056368281859565, 0.04978158075450534, + 0.23899999012425677, 0.04736433270210616, 0.5256251663097435, 0.07252453735723718, + 0.8665051233500843, 0.6597264366536395, 0.9680383380692306, 0.9685072910064021, + 0.45430003925302787, 0.7703059996095263, 0.1079616327897569, 0.06865822627865392, + 0.7331914607979834, 0.7121606308464292, 0.5765115714130643, 0.008106935491648204, + 0.31691419742449944, 0.05996696984481786, 0.644462481066819, 0.9614725321034653, + 0.8736851324955441, 0.08250454135914254, 0.5030377077189374, 0.24085777706908618, + 0.7134339083680625, 0.5922821695162334, 0.9743894944757384, 0.21488767782879636, + 0.8467228842279854, 0.34865131186235476, 0.7059235640075332, 0.7692543465199125, + 0.43729266244792664, 0.34603953537333054, 0.5351118887689688, 0.4639950294070895, + 0.3852594548928723, 0.07398139176729679, 0.9544718066910667, 0.514451306246599, + 0.5916828188750132, 0.21025394979175327, 0.13424586807762695, 0.5513326533813504, + 0.051566895205039875, 0.30782334734783556, 0.5993209583752188, 0.7879729738722937, + 0.900597830211724, 0.6434653905314204, 0.3928325099757637, 0.6936962962279827, + 0.6604009077531166, 0.22844806125400163, 0.5239453650001472, 0.5409883209998311, + 0.8807339873509351, 0.7415080554984124, 0.24317424944249855, 0.27798079948879384, + 0.4229217022397571, 0.8559868536396876, 0.21449895830650934, 0.6543419128317919, + 0.15200410242542117, 0.5930870212329282, 0.3951155156990216, 0.8384964273110884, + 0.8280423790819811, 0.2211604234743123, 0.7511204184605516, 0.6675725081700755, + 0.8146211139127217, 0.9569626246585295, 0.5185407384009807, 0.4556515419547643, + 0.06920421228769436, 0.23056200224511736, 0.28540090097678017, 0.812140934841021, + 0.18856450195470287, 0.9289903667797225, 0.588325769065312, 0.09568520131138514, + 0.5283057698851015, 0.43605903789099487, 0.08714739390351978, 0.03374397970894971, + 0.6167212152293503, 0.17815895542809823, 0.5788944733124046, 0.051407548448625606, + 0.659349537938685, 0.8104777122229284, 0.6388229052185809, 0.5893865460846739, + 0.3134218255519302, 0.28794655637789435, 0.4997638431761786, 0.49218368533386314, + 0.5958855850198255, 0.7730501815828105, 0.5677915595670125, 0.7102620128442536, + 0.795355593755201, 0.5304215864652778, 0.6007612981607046, 0.9964337241373918, + 0.04351814150754807, 0.859361547710027, 0.41180095629592506, 0.6054977227959006, + 0.5866072454867128, 0.13983456519802473, 0.8659759378768227, 0.29051862563698805, + 0.26343832558409075, 0.16197860637739092, 0.9385441276971631, 0.31651565460141484, + 0.6362832492521777, 0.18650540334871102, 0.9810254140496176, 0.7282131704311045, + 0.4958468018799851, 0.9941421106245526, 0.6587638922156728, 0.5569046271695639, + 0.7969549518529243, 0.8654073421607111, 0.5040850318176497, 0.44984242432994825, + 0.7458936917504971, 0.6175053442233922, 0.7960517210096801, 0.025015869854195794, + 0.9676019966726607, 0.588365381568232, 0.5250283553384886, 0.13774560092976795, + 0.9510753823859966, 0.4825976142091274, 0.5502413961528074, 0.34969604518797115, + 0.6381584251543052, 0.17256052285558898, 0.14274899546640085, 0.3376280114986824, + 0.6808481211959632, 0.07069119860168616, 0.09998284318299033, 0.49127392307108864, + 0.5513156555424487, 0.3500335429995659, 0.3808124149471467, 0.04375119314433118, + 0.8460504066041273, 0.3946303413259684, 0.8173797644017552, 0.9844668600283752, + 0.32224124117380737, 0.4758662954891658, 0.13292420533133154, 0.1017861810025199, + 0.5930781163842266, 0.2082632032935472, 0.722210711587862, 0.40508563896563055, + 0.7784204262529493, 0.440664359561293, 0.369853370947289, 0.5597944153914333, + 0.5652031321749275, 0.1688371354557342, 0.4182962896780704, 0.8966351519859024, + 0.6794981633453622, 0.5296655557006779, 0.21280269232667204, 0.7847779274864025, + 0.9809704455279764, 0.7987372472449284, 0.7112465191480444, 0.6961150269222923, + 0.17461496906612384, 0.06943405301490102, 0.30541058974295543, 0.5583473113468189, + 0.3615283940617009, 0.2600843124658757, 0.29700994575964157, 0.9357815521539812, + 0.061756698896572315, 0.20877051593259888, 0.7949246739176529, 0.6852917915022754, + 0.7157303886625387, 0.9773429172090287, 0.34932643592138557, 0.011744764559683873, + 0.03251072985680692, 0.6507961599249666, 0.2601759378482653, 0.42300361665841013, + 0.8602417860380033, 0.6348440808406297, 0.7347076837691057, 0.43775003852117744, + 0.6263447861764662, 0.5838493291165655, 0.9618529234108794, 0.5682592697896438, + 0.6266511263055534, 0.7206985650577848, 0.42070429337161164, 0.7399576524389204, + 0.5690971290567406, 0.5696443190794005, 0.7227703906619335, 0.5564650905392832, + 0.9608495657627738, 0.13683722511085716, 0.08205956135202597, 0.9218913687490342, + 0.20280112950334284, 0.5969060813848549, 0.9647634803065985, 0.27433934963735596, + 0.7334357422608602, 0.31420056369876215, 0.8277581819576262, 0.9390761218132709, + 0.14251466106556665, 0.8590310734180775, 0.24512256520401055, 0.6520723361229258, + 0.4076471661522455, 0.5731621743608273, 0.9211348965335291, 0.20356613213748154, + 0.3684670420234416, 0.34743467608485556, 0.748695853751142, 0.057615305671817274, + 0.5781258293215672, 0.030543557978518043, 0.11209775151323809, 0.0994312759988597, + 0.04659791676493952, 0.35200700997732937, 0.41012369780046687, 0.4689953509309186, + 0.17839282318085214, 0.5697285748534576, 0.8959122922810832, 0.6779321483921179, + 0.6852369818961417, 0.5350753365446518, 0.4437020404684159, 0.5445916111630644, + 0.6054274864314704, 0.18579591525695616, 0.47447040056318446, 0.9458461906458582, + 0.32878826341723344, 0.8284004108458354, 0.6662203740276853, 0.508175561611127, + 0.9375112350615182, 0.2321432832357444, 0.34618157780424086, 0.3724033120797884, + 0.8396196888695882, 0.0799880572850793, 0.9732658202373514, 0.36690946169539695, + 0.6394379580497745, 0.6928827958842657, 0.03757092900977144, 0.004387011060732138, + 0.0796032255162884, 0.44396984422383845, 0.8445924318357904, 0.6314258068212949, + 0.588812344881673, 0.8927740237003481, 0.3662948146257604, 0.9570099552743379, + 0.41401959397296284, 0.1320144216140361, 0.40626273590335205, 0.3461337237948321, + 0.5556985856249116, 0.16045118115995427, 0.29408513702046013, 0.5440246588610647, + 0.18461015355342003, 0.420793808578927, 0.11571328907545575, 0.14527355682694953, + 0.14895278335119166, 0.8927726806463218, 0.8049582900341045, 0.880194178403426, + 0.9957740401218327, 0.5944620926560128, 0.43267727424022207, 0.6229934798296954, + 0.22030501982536088, 0.8045161489708144, 0.1005692764268149, 0.455459077907966, + 0.09928405607108193, 0.31172266984748465, 0.8993031203771943, 0.1593029445026095, + 0.06857933036817898, 0.6509540424445969, 0.1919213643996598, 0.1738087164796367, + 0.1206316939342732, 0.7774855787144023, 0.5439038319947664, 0.27896968325464333, + 0.7973051833978605, 0.6585354603911842, 0.8720779268326746, 0.3752299520426411, + 0.5865667338642133, 0.13914992367874413, 0.17923028843505817, 0.46120181717572295, + 0.3938140382551396, 0.980156513711137, 0.7675623476766362, 0.19405609689297976, + 0.8241719118420732, 0.5934758689472284, 0.8839095607517173, 0.45576380758062385, + 0.5794838465309735, 0.11628253151165036, 0.4186563263256182, 0.7110368611535203, + 0.8553211995952452, 0.22034396006938395, 0.3121230306158105, 0.22662115720322595, + 0.677171111538811, 0.13015280874295532, 0.535275615152003, 0.5178449803466542, + 0.324650597972588, 0.10098664892923037, 0.7252525301135283, 0.3208270048875955, + 0.6997094148654934, 0.9005159499778512, 0.33241033387049146, 0.5190935034239609, + 0.4167490782080582, 0.5737086149859272, 0.2123118892973238, 0.2679545742682611, + 0.6471209587294987, 0.9691156062096731, 0.5801491657766561, 0.814946369371183, + 0.5548737401045137, 0.5469416306435377, 0.23699892219432583, 0.9324774732562575, + 0.06048542911246013, 0.47020867155366575, 0.2085028857251099, 0.7298753469933401, + 0.8309196548596574, 0.42240535722306805, 0.29402948730856315, 0.6346342692413988, + 0.9243017825628977, 0.08499227558313571, 0.9837858944776281, 0.1760671731053659, + 0.9064636859735287, 0.6331418825289039, 0.34745566339751055, 0.9640939007362479, + 0.9490721267620666, 0.9347360070905624, 0.4151243598338422, 0.3955888208375058, + 0.430812881616372, 0.6348976620077825, 0.7167775533081169, 0.4146559915334438, + 0.614182548788152, 0.5631504935893676, 0.7966145593092426, 0.3278317905925373, + 0.14623692229559904, 0.06063010064983543, 0.694448199105987, 0.18649442045727138, + 0.012595988509650713, 0.8720065584474361, 0.7614144337726138, 0.006814463507152313, + 0.6674826329280994, 0.5869005789165606, 0.9903885909151705, 0.5632164114405475, + 0.40790482633815195, 0.05536062358065652, 0.5775975651510218, 0.48520771759928183, + 0.6263689845732856, 0.9565566182748682, 0.8704274130188115, 0.05496322287847344, + 0.4064095914119452, 0.128903241480822, 0.1892356912182035, 0.7512425089428771, + 0.11906305856131416, 0.556962084054479, 0.4156505651397996, 0.31736022940430886, + 0.8976206070137216, 0.05129350460083426, 0.5373857954714363, 0.03714957827615872, + 0.8124436215340494, 0.8996525331608716, 0.27815717521683414, 0.8032977983368944, + 0.8537996311175677, 0.230160033291952, 0.09293798060999447, 0.06202615284815882, + 0.2067447079667145, 0.47583578687062644, 0.3445313192923223, 0.7530004434640041, + 0.0909812300785392, 0.13162050801298497, 0.4371364326534972, 0.5585411456791424, + 0.2238610543093844, 0.45516098988920173, 0.19657515412167492, 0.5852281615561222, + 0.07047721629284709, 0.10374045896560435, 0.9508249260246321, 0.7401832333063805, + 0.7470910253148448, 0.2534718720856247, 0.9466442771157112, 0.024517148299622593, + 0.6025971528796166, 0.3968872002939713, 0.39107825438522936, 0.5135009293866668, + 0.021030409166508224, 0.6063409719952474, 0.5284090958939895, 0.5638275762353786, + 0.18461931399314524, 0.8549538743737869, 0.8238837861448407, 0.34121674305483796, + 0.3641448130766566, 0.5640376621397918, 0.5672563134411254, 0.042953925668501536, + 0.5669892130656131, 0.6545201832704236, 0.38310105477510015, 0.577267126203172, + 0.2820866870118134, 0.9615084053713764, 0.579352698268399, 0.6333561693526448, + 0.8335676482744249, 0.7530193432282711, 0.6600387450544518, 0.8599755969767562, + 0.34543273093526583, 0.2841846870971455, 0.28521639274999944, 0.29537849420346163, + 0.8192913481262087, 0.5452944586279989, 0.04879320097341899, 0.7011141727180478, + 0.5627658694618872, 0.8890828766201677, 0.8330231236827359, 0.9901599728272302, + 0.03720439262504094, 0.20440866713311123, 0.9544829899721675, 0.9480208996192407, + 0.039150272346090365, 0.13193363658731838, 0.9659970555474423, 0.1267239716403714, + 0.8178301165327282, 0.5767215111131037, 0.7257066141579142, 0.5343485705027584, + 0.2696432587134189, 0.026763297351841908, 0.8344834971740566, 0.7027221365496716, + 0.1797149601274559, 0.35475264814583496, 0.817944058314642, 0.813386721351238, + 0.28155814287589287, 0.3805004952056973, 0.9672199042874626, 0.9775449069116537, + 0.01838790297751669, 0.07053369998779246, 0.5776047344329789, 0.01625405735856278, + 0.03628830850140874, 0.4703763966578943, 0.5225526608011035, 0.7704773585665169, + 0.7297664742286003, 0.34129700429264276, 0.2775833166469701, 0.35388799583931574, + 0.465699276350639, 0.2913280236202286, 0.34420970493726366, 0.05277337934540127, + 0.3324413960990795, 0.7632903133232857, 0.8845174827064491, 0.7793845483130121, + 0.9241365174521774, 0.8938490366343718, 0.2678709205600661, 0.7538732789929227, + 0.5486511567142761, 0.11630889165366465, 0.825109994146468, 0.856401401840712, + 0.2693290503424638, 0.8114227679822276, 0.07276034077451898, 0.01869364272728491, + 0.5436266166530527, 0.7975074584119255, 0.6610151872850507, 0.27260275080692653, + 0.7015751622010856, 0.9920692633016523, 0.5085963961567416, 0.08934219264407073, + 0.7135552553835582, 0.6718768917934348, 0.8315261586654303, 0.29540170492511797, + 0.4633764093942768, 0.10453278612480843, 0.6144591454614012, 0.9775926129646434, + 0.33920962189339976, 0.35423235286058363, 0.7593754667534023, 0.3935042844297054, + 0.039072280263130366, 0.23515421673357584, 0.8612284843099522, 0.4345452316753744, + 0.6109935880071121, 0.3070337942708744, 0.2753640407643436, 0.5111567482341669, + 0.2211496572505689, 0.7684245826279257, 0.370462210862769, 0.8635851819841119, + 0.5135084081689768, 0.5015297465607211, 0.6295371600946423, 0.3137503923916056, + 0.6366565981344982, 0.3030689614036959, 0.7929661179719301, 0.19676978689923852, + 0.8926860112953223, 0.9178982395043443, 0.4146380475031288, 0.643191537180066, + 0.4866725536965475, 0.5731088131761239, 0.03257524157655578, 0.26896171031070615, + 0.8546348010088943, 0.19910715547620972, 0.8893690695560387, 0.1164281869501701, + 0.8006972484793788, 0.7132181800300946, 0.6908108374584946, 0.45188225231868817, + 0.39519584508777406, 0.11715390167581274, 0.07050998532346375, 0.45693372825553413, + 0.6023625015869961, 0.026426762268978332, 0.1681389943452659, 0.6108962390341512, + 0.8375514354388983, 0.20085757234860424, 0.05827528052767261, 0.2792682956674706, + 0.6271271283375135, 0.5788714350564417, 0.729333466096751, 0.25586841768146906, + 0.9946118467644914, 0.3202541028214144, 0.9597762730437452, 0.14259184482750353, + 0.048276625663025374, 0.1460224467061415, 0.7458310071884376, 0.38339651651364326, + 0.6244360562062728, 0.9515190150924367, 0.26287934274233804, 0.9987310614908452, + 0.02808578252898497, 0.0569353211582152, 0.47415694806644093, 0.5449807794953476, + 0.5407749359650981, 0.3515314920161333, 0.34640001815941146, 0.07881192295903594, + 0.7781647617051484, 0.35676569235164524, 0.7023358229078066, 0.9483831182535897, + 0.5811313582631528, 0.505605757572102, 0.8473361393609283, 0.21319307369621376, + 0.21411974298465775, 0.945476563456652, 0.6126762294667626, 0.11720185464148059, + 0.7130392679493621, 0.8476556833084478, 0.37631638863992944, 0.7756308156116596, + 0.39310637299648354, 0.061229464662047595, 0.16368446893613675, 0.2158686580047242, + 0.9116144210876892, 0.599857653264359, 0.5264218851922361, 0.547081626234461, + 0.325914680331864, 0.13728507973746584, 0.030096923595554048, 0.26390780312110174, + 0.42486333837046897, 0.46988108079381585, 0.5915576382845237, 0.5411165882068674, + 0.5336076594319062, 0.06296655967577103, 0.49065759755420035, 0.607719227939013, + 0.5815544881512839, 0.8321724893781268, 0.7912234792148146, 0.2332011526848804, + 0.513465401582817, 0.07378259213776417, 0.32554264546976464, 0.5447343172887128, + 0.2970074797047717, 0.8489115892806867, 0.9426212005671426, 0.07952327191536368, + 0.5628189429557754, 0.7170003596034836, 0.765715613567543, 0.4411174472652958, + 0.01654347333164996, 0.21119053907216268, 0.5192670301771658, 0.3148716111237674, + 0.9646905210187212, 0.29124624040531644, 0.6416502806324441, 0.7875168188595044, + 0.49182438345843726, 0.5780436355940655, 0.8336392939516185, 0.16989662241359915, + 0.5381777458264859, 0.030295171103866037, 0.19859545581269888, 0.9139199107827592, + 0.1675003191883312, 0.5434264223032743, 0.37718400889257186, 0.6192796992539356, + 0.6402289173493387, 0.24136524837202822, 0.9090248533823663, 0.9688653860787863, + 0.45009441972100284, 0.09328520423793907, 0.39444406283236844, 0.4461730241783517, + 0.9688943292920577, 0.044636563349069736, 0.5412696334756325, 0.6948083289983689, + 0.2570145166706659, 0.3422258350596028, 0.501338373359947, 0.4873854940051615, + 0.13709496786369568, 0.46135308232837946, 0.982145175958047, 0.22745726092172325, + 0.85145265846938, 0.515594098997283, 0.3761394351925581, 0.3119254306654259, + 0.7729232411310919, 0.9078460719582939, 0.22546253478374623, 0.9447652033155128, + 0.8731464949208254, 0.23871860078453422, 0.4919408448445003, 0.28371518130064755, + 0.024115387667105193, 0.4240023592836627, 0.6006686918123524, 0.19786604994819934, + 0.6128468699489975, 0.60237221178917, 0.0952593786220981, 0.89677104028514, + 0.2817485754408312, 0.6581233231670309, 0.33101610509813706, 0.21442717821636081, + 0.6360312019413297, 0.9113905198333937, 0.5195053545351246, 0.6146547774998682, + 0.2806115333164484, 0.13852856951174664, 0.9104774606499484, 0.6932891745832127, + 0.13559815914215723, 0.2978732910764659, 0.11947568274871168, 0.9867231873796841, + 0.29708925648248996, 0.45664211087209905, 0.5285378601866524, 0.9347571758164502, + 0.23749674860750714, 0.5147194765248094, 0.6463423379438457, 0.4191811738465673, + 0.9531510658051373, 0.2000383243200442, 0.4250601254725521, 0.22121624623532998, + 0.08930342925650314, 0.04324023024297452, 0.5024941113568316, 0.6413528774235441, + 0.9750260903279553, 0.6154567994157862, 0.18696399952309095, 0.5849597483193713, + 0.8725354279992786, 0.008404015663836195, 0.04615838794682936, 0.8209556290552137, + 0.8827598459818592, 0.4194872746811864, 0.8880658733183382, 0.024419570367295362, + 0.7479312187131603, 0.8129038460181042, 0.15036697652738618, 0.6935256982364931, + 0.802523221667957, 0.5145741172686937, 0.7558458207788462, 0.8429562131817895, + 0.8206176821124108, 0.07774599796362314, 0.870235287488823, 0.580060907882237, + 0.02712811450046093, 0.9695286154991549, 0.957082206284556, 0.1019783887398118, + 0.19654287383785907, 0.7559596393922553, 0.29500827673499486, 0.6057417497936475, + 0.7690014384711187, 0.03783596820202395, 0.7655606435850764, 0.6584741002630332, + 0.653988722895146, 0.6362040862554912, 0.15956475228326839, 0.14414056722005097, + 0.8211059790657037, 0.44519279183167915, 0.39728758722369306, 0.3628847725566814, + 0.2829610769020825, 0.9704803492013365, 0.7556880097328045, 0.5240237532459261, + 0.6083513342712102, 0.08543011106131027, 0.4607323172600133, 0.484035525255997, + 0.9326237772248412, 0.8638676274584424, 0.8744459402120672, 0.7529067981012579, + 0.9015193522982415, 0.5549844899419032, 0.6453033634579781, 0.26109749279767136, + 0.7441776576916009, 0.6606252077689398, 0.2349676152742367, 0.9618875480154634, + 0.43027356530349137, 0.7425801495285858, 0.8000350586905398, 0.22924239007051617, + 0.4527258130955085, 0.7304169585063964, 0.7441806246347922, 0.872896888725154, + 0.07759684593654426, 0.293120563204055, 0.7368193973641121, 0.28979064651987896, + 0.6068352310915744, 0.9052346146862206, 0.3589933612170658, 0.47019487763795464, + 0.48517140331348496, 0.29002488813304284, 0.45099315710518617, 0.996564713235235, + 0.8950800466856947, 0.37022589398674954, 0.9461444427630092, 0.33781122895621085, + 0.3041109497708152, 0.393759015701048, 0.6472384754560551, 0.6074433297575362, + 0.087748834915378, 0.8874811238627718, 0.524433094278233, 0.20970260338326985, + 0.21442939598927924, 0.34952998752542574, 0.5899814017695107, 0.08678385139936784, + 0.08551845053736373, 0.9012960505708011, 0.06513774844706433, 0.34200269240889725, + 0.791371294081719, 0.7678518215703581, 0.9127451928528002, 0.8039019392545506, + 0.7175561976760566, 0.9348031950275889, 0.9261129417306103, 0.4235371140929114, + 0.8380256862590599, 0.10462064876744948, 0.35966354689091795, 0.8742732682454764, + 0.6804242407960092, 0.45118951390128537, 0.4175742258200632, 0.16456896829187628, + 0.5396189290921029, 0.5280023188975842, 0.6019050340328873, 0.47765523750150707, + 0.7491886955916292, 0.9027131717120288, 0.11667331090191357, 0.8505665358297997, + 0.8086334698637788, 0.9899944776583948, 0.4049651667708708, 0.981731629952774, + 0.8762270921954172, 0.17895797806876146, 0.9975736301511068, 0.2711659248267585, + 0.6995240149645893, 0.9312680122335908, 0.5456108748099171, 0.42721422903484363, + 0.46220715504000853, 0.24491840962442002, 0.17397294688760923, 0.6129679632372846, + 0.282350262690521, 0.9346017695584203, 0.14489390075010034, 0.14974179960184286, + 0.6293698598145906, 0.9330556213809605, 0.32705180690965974, 0.874154146012628, + 0.4115499740473926, 0.5535089600812554, 0.9736432359868827, 0.44784152963522306, + 0.1761471859768603, 0.5473422345347194, 0.8849543522240473, 0.15023716773938034, + 0.3722192354344217, 0.07290743393768884, 0.359774089632495, 0.5452314004224904, + 0.5918743512036624, 0.229297142855464, 0.716316751235886, 0.547789079393371, + 0.7341188406002573, 0.9375970601021524, 0.5349125973375516, 0.6016739747363528, + 0.3630271132032489, 0.31504347814602884, 0.7901791886647448, 0.5400323245956503, + 0.6447061852981948, 0.6806309139961257, 0.7353865468591724, 0.08400663102441097, + 0.9905774657159216, 0.8454801632200767, 0.849196582241191, 0.3071538341129647, + 0.8429459656420545, 0.7207016952763173, 0.22626456195845024, 0.37268659400138693, + 0.9537404308197139, 0.19918346205071835, 0.6096444010079363, 0.8384369271676938, + 0.7144223871918833, 0.5085192114177742, 0.7523672046025225, 0.6830314446497705, + 0.012128031490853175, 0.8628636554695384, 0.26327447590871433, 0.18174508318770222, + 0.5954922430965913, 0.22702975872293218, 0.6196353695853406, 0.5906582460778282, + 0.7629357610192029, 0.11839267824439936, 0.4289706005759374, 0.9732459440261039, + 0.2090960151666983, 0.7780294935086977, 0.5751637742742446, 0.09911408617631379, + 0.8564239824947635, 0.3181179934392875, 0.08271754504478757, 0.8263888793866928, + 0.3202475285082964, 0.5592272906622197, 0.4804425705084825, 0.9806324406157072, + 0.07966272764529769, 0.05294524800781131, 0.46638741921372073, 0.7439120782649592, + 0.14190231445855683, 0.17928597963762782, 0.09322183544323615, 0.845006133082171, + 0.26357746512963576, 0.45416482348464005, 0.5509813517740249, 0.7556611985947994, + 0.8399809744246433, 0.2068832666783309, 0.7758468545529311, 0.44913030612332017, + 0.5887376478221283, 0.9001925703979451, 0.8445601426248558, 0.7659448285551973, + 0.7172580648739464, 0.44793138533515275, 0.9374971635840088, 0.8162190222096968, + 0.4225061729280529, 0.6109366179635362, 0.5287909468719879, 0.9715341162192149, + 0.9902679103585281, 0.634231132912984, 0.830540674286405, 0.551750060242085, + 0.6114803581586474, 0.5281101122601308, 0.849043187276534, 0.9791742146760088, + 0.5265289626344798, 0.393573450923832, 0.9673393972808528, 0.386355572841914, + 0.04401588847007154, 0.43945288936608173, 0.29435396767907773, 0.8381283866121838, + 0.3268399051793768, 0.9226653935940995, 0.43163699721420534, 0.5238379831649844, + 0.30224367983829126, 0.7781666574435526, 0.4549582101124804, 0.9929054038872701, + 0.004288882276485162, 0.7049376325216603, 0.0827635441904947, 0.7799395425765457, + 0.47652878007750155, 0.4414719831985249, 0.13217799620307835, 0.08902190649392094, + 0.4783795641262657, 0.279166156457582, 0.7109164753626097, 0.6809158871817893, + 0.6280790804360852, 0.0889457054781051, 0.9817317067119571, 0.6535451186876484, + 0.1939527534182891, 0.6408559181231186, 0.8993866462438552, 0.9150451910013864, + 0.7707936015605422, 0.9469322510206291, 0.7824096376890705, 0.9140153867152234, + 0.9897121559582179, 0.51539501082556, 0.0011796245137059014, 0.4494420410671016, + 0.5906906861197991, 0.8649291733453168, 0.5319086037684029, 0.0679451586804235, + 0.3881176084508102, 0.1922418341303912, 0.26340352924063204, 0.8596461225762598, + 0.0007355823247353621, 0.8752414013019265, 0.6672961252734662, 0.039522828359158435, + 0.020702880321358808, 0.265436687436848, 0.590333629644554, 0.6909471239397792, + 0.4915783671918532, 0.6131539097918961, 0.905176968193664, 0.3148528326259237, + 0.04267572489930438, 0.6880047232097563, 0.7485084759165672, 0.8369291406936449, + 0.8045255396796128, 0.5171223455893764, 0.25610198554541663, 0.00362118422663682, + 0.4993102314543969, 0.6820213443757897, 0.691216985231457, 0.0990361687096799, + 0.33646793001018027, 0.3056480346725515, 0.601899774477644, 0.48099216441977055, + 0.6800428994742934, 0.8982633876716558, 0.819540584848403, 0.12138344156765979, + 0.21239711733448496, 0.9638087043651087, 0.18142656632995713, 0.25491275752343345, + 0.2357106932568147, 0.1956339938954209, 0.8002730458640235, 0.254942686250915, + 0.39721084817295127, 0.21485355994864874, 0.021310042854654987, 0.07812066448341792, + 0.5166113367963899, 0.794076674190759, 0.5963198231631923, 0.8047033786386484, + 0.7502377990481346, 0.2080826921184018, 0.5919196788009434, 0.6109955940896713, + 0.31526198663695637, 0.3683360333359762, 0.30209545853939945, 0.988162111664907, + 0.19430450443341651, 0.39939768922278784, 0.4144727964246955, 0.5162248705844749, + 0.03096668203774733, 0.3095931761518177, 0.8102379734730197, 0.6836604803201668, + 0.04353890026348073, 0.1092986198414877, 0.015041937914722636, 0.8572107201204873, + 0.26153720130032976, 0.9961909608289251, 0.8013947696150248, 0.11317991219242696, + 0.40797695847745563, 0.6660756565564173, 0.48239064242241414, 0.5167955852515312, + 0.4822308233919498, 0.24097806011824785, 0.8825847692270286, 0.10665185258114718, + 0.38838712661483255, 0.8595103986396891, 0.11598518144530445, 0.8556421983765271, + 0.5445750385171892, 0.875127329739873, 0.8815327699603377, 0.277041972171043, + 0.9092292850900755, 0.5020356367656407, 0.21266400093800553, 0.4930072932661348, + 0.23512773599142145, 0.4486892179131988, 0.47805508860243007, 0.38919236787781086, + 0.48956487345180444, 0.08912107945254233, 0.23114484364594667, 0.015254189018723996, + 0.5681523943997663, 0.3121044213611983, 0.8815887369367482, 0.905484789793142, + 0.5840009839976608, 0.6488582756150842, 0.25876476828962425, 0.5804004334936455, + 0.04662179178947112, 0.938482109018782, 0.8361392890402467, 0.12761095105809406, + 0.10083204328914475, 0.7540725301210417, 0.6480884237735768, 0.6538982229537511, + 0.46857648337816005, 0.9401349838335883, 0.3593697618964248, 0.6074744845978282, + 0.47382496562043686, 0.2775519370237508, 0.8331013083828452, 0.46898817485214184, + 0.6463445496807434, 0.966442183365752, 0.6245737425004668, 0.7430145537005464, + 0.30482798537045297, 0.44154315419470036, 0.5001138883464654, 0.7898224137342151, + 0.13402539318584494, 0.7451690610060407, 0.615421486656359, 0.340859836784284, + 0.16369729624741114, 0.5355128768810619, 0.3567908820070378, 0.39211755611624555, + 0.3427152872307945, 0.42016133957466684, 0.20324097308101197, 0.01703805972089767, + 0.20090861388795567, 0.6545758662007065, 0.05200813788991243, 0.6669179967390473, + 0.5079904446023673, 0.989527953023055, 0.46801780194695985, 0.195173647475112, + 0.47906548235497515, 0.7082184256293507, 0.06364213960029319, 0.28923800680533573, + 0.6117359347358423, 0.8516911565016979, 0.1894664348617915, 0.8736712480631994, + 0.12079294015895137, 0.8660292443231999, 0.6279422152184789, 0.452933474066471, + 0.9174891392441704, 0.30321767658689824, 0.5512595717894255, 0.006626749152742861, + 0.612832676356461, 0.3585285687861417, 0.06886444137408543, 0.48595346851736576, + 0.033297932400682906, 0.613830088510247, 0.16109537177974032, 0.7903878201980087, + 0.6374448390850164, 0.19989772662744087, 0.43826070345924906, 0.2441306221792372, + 0.48660432290592537, 0.5527258098947379, 0.04187067657234822, 0.9982730406611128, + 0.5680257018541957, 0.6636809855961555, 0.9069448023247579, 0.7838792766781194, + 0.9795395758881432, 0.48128037298133386, 0.6967780641770658, 0.6243411434087454, + 0.3357148930170015, 0.2211533966817012, 0.4290583801583937, 0.6049181159420812, + 0.8737095409169909, 0.38951846802701395, 0.7497984680392851, 0.12023460660763075, + 0.061453224110397, 0.36297157175788575, 0.15245974144609353, 0.46733867494658143, + 0.0994720709557756, 0.217220651771879, 0.4075132436283174, 0.23804095597848396, + 0.1538321778128191, 0.5488322861187763, 0.2511958010276626, 0.8788252153941557, + 0.8896657737511348, 0.882703222618888, 0.449404196322713, 0.7628286860975334, + 0.19034307027869501, 0.04107581127707327, 0.29864527488946346, 0.6735571268889189, + 0.1250423323446953, 0.7412994205474549, 0.6169430754024365, 0.6448719194070088, + 0.1548444787183586, 0.19350060967828553, 0.9358757098887812, 0.4955865997185239, + 0.3814606552422225, 0.808333767071158, 0.8893725860482015, 0.8314930952847817, + 0.07658829765350039, 0.6786665191857298, 0.26344055729456783, 0.8999781696551226, + 0.9179029339706499, 0.6880179017224024, 0.7358005345075653, 0.6218672247952459, + 0.8428904602092945, 0.6897088506075467, 0.0866478002745189, 0.11183195906536336, + 0.5449751961879864, 0.4012465546597901, 0.676011607792312, 0.850620088685216, + 0.501694517133544, 0.07345426384317011, 0.3758616640096889, 0.02763808067347462, + 0.40158697805251353, 0.567046588734965, 0.9071177213106677, 0.049560996017135595, + 0.9233781540238465, 0.9343530203413852, 0.8324523892626204, 0.9847360784391932, + 0.1669275054638537, 0.7198408269058651, 0.9243233456765426, 0.41842963766630525, + 0.5759673452028913, 0.4814933614651903, 0.9567610563439569, 0.1481173710821374, + 0.17682380650592577, 0.31843559452506287, 0.28199231210183096, 0.12598660187017097, + 0.5034892572147942, 0.008544760721867162, 0.03968535501964232, 0.17516276537023667, + 0.1284048790556197, 0.6620679875836152, 0.4694901436008866, 0.5677078083328646, + 0.6230604090828477, 0.2366338664190193, 0.8496179713167248, 0.8758161161150434, + 0.8500698921130023, 0.6799320154096153, 0.6636303105817718, 0.7451459004062713, + 0.28453104021235043, 0.35065822545560266, 0.5416492892029748, 0.8593863759099308, + 0.8881521277185567, 0.7937573790108394, 0.8453045417134741, 0.3883041142578194, + 0.9692550442113321, 0.12918713202051024, 0.30833376851262206, 0.11265001374990269, + 0.7948178629174499, 0.5761841396692509, 0.3812462766983682, 0.05326094048995844, + 0.36760773563514226, 0.2874727759508864, 0.295796112519496, 0.689370023219251, + 0.08073182604307583, 0.37365358264706827, 0.4714510710728065, 0.06014774622321528, + 0.4411476996440761, 0.6698791584453614, 0.6611891433295813, 0.937500159444114, + 0.5656213072285704, 0.6030269190651167, 0.8420212601559625, 0.1727409835801198, + 0.9239053840748805, 0.6264028839976385, 0.1125688968219265, 0.8856214515269026, + 0.6804999389955505, 0.9488939629715639, 0.8247076523657833, 0.05553512743333855, + 0.053925699492599954, 0.4302664849326089, 0.0696503695580536, 0.21439679717817517, + 0.34025394693542277, 0.3133696396655272, 0.6802742636853687, 0.4185547030247506, + 0.06998706760566176, 0.8776100604509095, 0.7288722782291346, 0.4309589285259948, + 0.5528713455540756, 0.6191524737760634, 0.3729964152126811, 0.9066385895738821, + 0.5014765148599959, 0.40436343425321375, 0.7374995575362278, 0.42290278924861513, + 0.6411610195119898, 0.7580275705851004, 0.710099317310232, 0.0870942910442053, + 0.3638831089125175, 0.05701653428777742, 0.7946002564847682, 0.1871708694411338, + 0.2905866558114406, 0.07110586834065757, 0.2943823407634739, 0.9305040573092062, + 0.43102053974232446, 0.6453588397582007, 0.9684289895706213, 0.9062083865833443, + 0.4015208835080437, 0.9966162959923662, 0.8004890102333629, 0.8951728190855966, + 0.9160701780405514, 0.7894368924910821, 0.3527011486497985, 0.7980826966664449, + 0.7837797805894802, 0.7272415025291709, 0.3986484072479929, 0.8153028072478584, + 0.35996974719030417, 0.2380432713874716, 0.6137178816521027, 0.18295428850134254, + 0.16332747875259546, 0.7627714963554943, 0.36081637436434355, 0.038634595801635085, + 0.2745764856472338, 0.28487742636995594, 0.4007736877599293, 0.9727084118593076, + 0.23999807956723496, 0.9170802733402591, 0.09920283754378034, 0.9893485212910471, + 0.03421609814847737, 0.4444204195274494, 0.6880354334301659, 0.48428703032736375, + 0.13553395643062238, 0.5372505007965466, 0.8986798972269063, 0.7690651523551951, + 0.5517692241934921, 0.28290864093703405, 0.0015032880172971552, 0.9359138477250851, + 0.7204026770607993, 0.38645633201505325, 0.2376676101711943, 0.9556098816891438, + 0.13424177315622177, 0.4338850730466851, 0.0690403785370941, 0.08735238503978138, + 0.7567112331923415, 0.14440394738526774, 0.6527257331853219, 0.1242308926220479, + 0.7286767416433939, 0.0890399517207251, 0.5903750585147725, 0.17699289618163005, + 0.3598248690063782, 0.865819520516121, 0.7091455312100029, 0.6485904419878334, + 0.23522826265925234, 0.9437174255221553, 0.3166064470610629, 0.9788418660783021, + 0.8560531605505547, 0.8504193180920397, 0.5325384340410123, 0.41313591818283957, + 0.5831059281510506, 0.8294585761928512, 0.20741293321957932, 0.09988887916101519, + 0.23946260660347973, 0.12800711707912016, 0.6239021034830816, 0.31862184638679025, + 0.14718936588589016, 0.16928816244678513, 0.050226448976075866, 0.4433539696020661, + 0.6199389106897404, 0.33866454242652777, 0.6137436325748963, 0.35937501847926134, + 0.16838743255887156, 0.7254951183639806, 0.8850251571579181, 0.1918514711628594, + 0.5295718237230015, 0.14671945003793585, 0.48066177631705087, 0.37035578582658235, + 0.5846797157143064, 0.16618786674908292, 0.36188906275157695, 0.7247960213601661, + 0.868211673042633, 0.5918788857097884, 0.8090970392521835, 0.9790561943658818, + 0.9927261947936933, 0.9184163766576937, 0.318113901899028, 0.7084949251000522, + 0.2070098721381619, 0.01346563799883671, 0.5923439222949263, 0.9094366901577481, + 0.32952001942733844, 0.5743066236028878, 0.9735467777955552, 0.9773620150843084, + 0.9668444780911369, 0.40230004346630344, 0.17633148979714908, 0.9675001165668295, + 0.014592186594883372, 0.33863190854568737, 0.7958151580013121, 0.29882394915065713, + 0.7970903609024033, 0.8416256823246926, 0.8423420925123177, 0.057603330487783655, + 0.6990625811630484, 0.6788666546060725, 0.6243703185804795, 0.030916772578805318, + 0.38875997029835085, 0.5803240401185477, 0.5443761380310224, 0.08176930652062564, + 0.4744144231510765, 0.23906897088801837, 0.8320573496340441, 0.7235053476252652, + 0.28516057090759916, 0.09180866317157044, 0.6324771301851917, 0.5148431564320813, + 0.7523563795311982, 0.3735235411243021, 0.5527453149583392, 0.4983451256248954, + 0.34902274864989435, 0.08084404411482948, 0.28735858320033536, 0.3108519426219638, + 0.349056714320124, 0.7427424387949301, 0.6460043375052836, 0.13396562723950822, + 0.15101337524356917, 0.10703884431285349, 0.5396786187168046, 0.9704225749913137, + 0.29385606796325336, 0.7982807236158526, 0.3511098267640179, 0.007449232298065933, + 0.835806317299432, 0.5237447600093503, 0.8549346101991091, 0.9040002690066152, + 0.5476702736550557, 0.7470797251487761, 0.9082524283124408, 0.6377573558862272, + 0.42369296852175853, 0.5577037735732207, 0.9067300774741744, 0.6880619742970875, + 0.5425734096471484, 0.6867628611394454, 0.08351643147420151, 0.668961930750152, + 0.24280807058329779, 0.7649560915172015, 0.16144846442234084, 0.17310502209771605, + 0.4383165705549248, 0.6682421320361475, 0.48999489592626033, 0.7961411197051519, + 0.9095037508670331, 0.7825755929597615, 0.08596354949952578, 0.901917100305027, + 0.5382484337009568, 0.9974337219833577, 0.748430085445022, 0.745405318697651, + 0.13723130196844224, 0.7353331412328397, 0.3691116329275165, 0.7145975387584649, + 0.5863919467486739, 0.6657199695285304, 0.7312355234132424, 0.2748776970483794, + 0.052880078363425786, 0.697430771327525, 0.7192038723646083, 0.3531872322991578, + 0.41960430507865654, 0.2304286782953665, 0.02287711607706544, 0.3075833904179307, + 0.1428952851614408, 0.6510234164698306, 0.45028070729135317, 0.9453700208245632, + 0.5199495851750379, 0.19054826493688826, 0.4507140915853943, 0.010116874920188312, + 0.7659184089081081, 0.15592544057670454, 0.13710764777100182, 0.06636870486528612, + 0.17191101025077882, 0.3221659858895043, 0.531273604554789, 0.6835349127313585, + 0.1697347789346958, 0.44298787436309517, 0.017852449875078724, 0.28338196568753127, + 0.6683029282409125, 0.5497033719539377, 0.614652574359509, 0.8471188669185117, + 0.9831938421999552, 0.7392202753264133, 0.1992681227467077, 0.9076291876910116, + 0.4451456859197379, 0.011000978283891794, 0.6228072794433337, 0.26922267345500395, + 0.7266432465779755, 0.8725746577827116, 0.40128124175915414, 0.25715084258330834, + 0.44807905220602384, 0.9876208353612252, 0.002601644060992614, 0.607481791499339, + 0.7528609041430132, 0.13942112970434362, 0.11469248538140198, 0.4895428273516049, + 0.8275864135606552, 0.34581419624547305, 0.9228706367423446, 0.08468055059005386, + 0.5101244552930857, 0.21949694167652645, 0.9906377110794229, 0.3096520314924064, + 0.27122063745708336, 0.604122968851207, 0.15580034363219175, 0.9103822694962878, + 0.03574147149101781, 0.2061007124517743, 0.5072352672997331, 0.9494258655814259, + 0.8499077698724496, 0.1385071273901718, 0.08170679154114213, 0.47317632686727784, + 0.8807748825925836, 0.8298728324755968, 0.4603442506838389, 0.8304178063026396, + 0.7886325920294049, 0.6759092662972309, 0.7881844576908136, 0.3136948972078951, + 0.8012737103550386, 0.905634666350063, 0.8082140678481798, 0.32981627099159916, + 0.7910561730198871, 0.05696448262288256, 0.7486267738234921, 0.8910770335402092, + 0.3916767189067516, 0.910430797275563, 0.5506223034344521, 0.42086218568564404, + 0.5267924268123125, 0.5706995377457933, 0.7722370731192654, 0.6326600040373183, + 0.8340027909506321, 0.49962084029010345, 0.5309999940369844, 0.6653463095609469, + 0.15880158201628114, 0.745543311950532, 0.7656170698330114, 0.6698822741348551, + 0.9376868182706565, 0.8710406030686705, 0.9957700224788494, 0.9871476009164123, + 0.5864283826398795, 0.1828880096128641, 0.038625545305283615, 0.08656246050258021, + 0.6800592569128557, 0.9731615456854407, 0.08252555817173335, 0.8947087689465619, + 0.7412593163159412, 0.8616255758256796, 0.31976943409621605, 0.8853890548636507, + 0.17006482976879866, 0.7292040027342938, 0.03169430900162673, 0.9491472012593519, + 0.8586946843619252, 0.06443623437120938, 0.2562763333098427, 0.025245535252461315, + 0.13123540752479168, 0.014650719297637727, 0.8725613653621531, 0.008958190938421096, + 0.3117760303045205, 0.0023928806710279416, 0.7042335434214668, 0.6077543376374114, + 0.6139508800308294, 0.9201137965676772, 0.6525440055951811, 0.9708595550566951, + 0.30435289793546216, 0.09116908423654368, 0.6929224266276129, 0.7905682080952495, + 0.08404659566815154, 0.7557580083041496, 0.28844753281858804, 0.9586640865972693, + 0.22222097585761458, 0.5795308220058911, 0.7806366279639314, 0.6005370348365431, + 0.0015237707795864308, 0.12695290247632174, 0.25818605129567584, 0.4767305061194619, + 0.3978426903430806, 0.4952549426166929, 0.4048224938846018, 0.883892968606259, + 0.8695091638733617, 0.3712576724571395, 0.07310318446907105, 0.2471136247363015, + 0.22670254802389178, 0.37338233575983093, 0.6653606494847659, 0.8358558420852564, + 0.1482114518437857, 0.08192562682462345, 0.9699511775738996, 0.21107983263156915, + 0.7148266840817413, 0.39971196876569703, 0.14645477418550845, 0.39437355153445386, + 0.25108769965985933, 0.7919030429487443, 0.18051200067902895, 0.6528285379439994, + 0.19463811780745865, 0.06734173833331702, 0.8189946011240749, 0.2834143949921576, + 0.7271201732957122, 0.9642131061140009, 0.5931680981398827, 0.07116121978438295, + 0.7798701128439969, 0.32781286572822055, 0.0001339564577385044, 0.6982041247808188, + 0.2038333656681255, 0.6016180691187267, 0.34986642839308224, 0.5893495870264561, + 0.6254032365377729, 0.8090685559709809, 0.8402892529324945, 0.5687294018914093, + 0.3719664997535329, 0.898423599030536, 0.4346250906943161, 0.6954711403640765, + 0.9670054673080518, 0.3434738983803315, 0.44914804644415895, 0.524281107962152, + 0.7432068048717004, 0.870267945384438, 0.9276441176340413, 0.7689156228195979, + 0.20360965733129965, 0.5875569789292655, 0.021600086135395657, 0.14398116608924172, + 0.6189165170600164, 0.7252181742077203, 0.5065743878658895, 0.1838694153262853, + 0.06457774532971128, 0.12412193354249013, 0.20980292857085636, 0.9047881036208002, + 0.792982579227081, 0.7214819825256202, 0.43038454843277996, 0.2832664339236689, + 0.31548782008661846, 0.9426069421287532, 0.7377473322168556, 0.4604413304932282, + 0.628167105720581, 0.51567110124762, 0.005431104253906249, 0.34340596987332195, + 0.6793466167682524, 0.6386304774589313, 0.7900904352035243, 0.01911226653419673, + 0.0895209062591108, 0.7716196409485585, 0.7429370395139598, 0.08520634556918005, + 0.6947548366683828, 0.8558180066852747, 0.4540191348782765, 0.5350098723295885, + 0.6825116312510786, 0.8820703201230866, 0.12621244898798223, 0.6023575103762875, + 0.1157549769762759, 0.5432530971490609, 0.33102117051726354, 0.8012606335666543, + 0.5222363614986275, 0.663775478005426, 0.46537921758000333, 0.28605538656110285, + 0.03885659544932951, 0.6782255587798305, 0.6699753630372093, 0.15558345100761328, + 0.7116301840554686, 0.9884407811311055, 0.15878750297138589, 0.9169576290879906, + 0.7665984065521804, 0.1188899682580612, 0.9095366579822816, 0.19532396997443402, + 0.11195658661875607, 0.3540280296665226, 0.47524855998266613, 0.8399571940157706, + 0.4185786899570766, 0.4212442791621328, 0.02223162793846467, 0.5793190667733108, + 0.6920977075784367, 0.8649172089925998, 0.374142174565405, 0.7775297119886831, + 0.15370854231932873, 0.5653777920240877, 0.1751056586499734, 0.6892136528524128, + 0.13699526170593668, 0.5440573619342335, 0.8008753629673093, 0.4472363008894372, + 0.05578681209307079, 0.4652136646511056, 0.16708452139561303, 0.3439303028422843, + 0.07422690201088744, 0.5739953113633971, 0.36249124205890704, 0.9171198919956922, + 0.2279937210166575, 0.5596283473438739, 0.8920135815455199, 0.14729560858669288, + 0.3544316121765825, 0.3951815138963941, 0.6756043429257418, 0.9418046210444497, + 0.7766360959320119, 0.6687018774505851, 0.6835551622967121, 0.9887118139060092, + 0.9149272641717396, 0.4154900690837027, 0.2762814304064456, 0.006630810823948785, + 0.15122974808610623, 0.7763738023541471, 0.1700272713088029, 0.81354031666843, + 0.7539081143367642, 0.642136271180784, 0.812454514555075, 0.5610282674887278, + 0.8591507258913956, 0.7457054997666415, 0.12778556068659352, 0.44356475648922544, + 0.26176370341499045, 0.016516224441699867, 0.004290147632306729, 0.22640873029678465, + 0.8030692034976082, 0.7787840467849276, 0.7522582602457509, 0.9781522750589372, + 0.052396846089859395, 0.6132548117789737, 0.6814887851217211, 0.5175932336804455, + 0.47984941465482556, 0.11079171938354215, 0.7432631433721439, 0.22121136255998752, + 0.5386906475225569, 0.8626464558003436, 0.12535879515294623, 0.9574237873040147, + 0.09079089536411455, 0.9238030472592919, 0.05338739789362279, 0.19071532383830148, + 0.4176874531365481, 0.8510792555474312, 0.1998489418720122, 0.29424230083772496, + 0.7132953021430298, 0.7742960489247359, 0.10088672097030649, 0.9261083529237409, + 0.17093217963883267, 0.9403297298385496, 0.6746710308795896, 0.983407244107186, + 0.4221013638216137, 0.27167111450692505, 0.049172909242495955, 0.1622160528562574, + 0.692333500659214, 0.00818209815950166, 0.569748779117895, 0.8067889121058818, + 0.7391359265468849, 0.4423727321579124, 0.76696934036463, 0.9919048137847639, + 0.8018051903260712, 0.6345547259024037, 0.7756573486476269, 0.02088948322027495, + 0.16340824954415878, 0.40323492894481483, 0.13729143154864343, 0.2937015191114739, + 0.08114169273771643, 0.3518930356651795, 0.5998634113225406, 0.2449234353989107, + 0.6924003214194676, 0.5931727223556779, 0.9081168257657946, 0.9694363332002457, + 0.8602903226402437, 0.9898386264051025, 0.43820226344802526, 0.9087551244254385, + 0.493965658081453, 0.7966536624574603, 0.8313631300978163, 0.14059446799981923, + 0.3352309849008611, 0.7428677830283745, 0.32673470604586485, 0.0015735886915587693, + 0.46757265635379996, 0.2138363691349463, 0.43296311239924923, 0.7317692186088581, + 0.07414355427874642, 0.08932984065363281, 0.9392436863889443, 0.48295076903212164, + 0.13899943685549387, 0.16514324684232518, 0.616695195171261, 0.31002441467910913, + 0.14422148404619684, 0.7333037631806374, 0.5177949375916849, 0.0824649941636647, + 0.8371351659817691, 0.33191978612564443, 0.9596635276378405, 0.7046041585575201, + 0.49864815408400187, 0.4646981921749078, 0.8785710988719285, 0.527831762451573, + 0.4902896429397935, 0.36391024347264567, 0.996486603690265, 0.9518638382568707, + 0.2994909011386164, 0.48921922440986787, 0.3565529887370077, 0.2866294218148713, + 0.5674345631675577, 0.48873929834171614, 0.4605530036142562, 0.07230353404555179, + 0.051599524714266765, 0.9664901957757331, 0.48834702786568074, 0.2342083606660702, + 0.026270039923457777, 0.8768090525941173, 0.01654939205628081, 0.5897469401483456, + 0.34645994834486626, 0.5848969230113864, 0.7881781753155032, 0.8899225313024621, + 0.0964183648380339, 0.21440454008976384, 0.00013193256029708778, 0.5976814114008757, + 0.644736271722694, 0.0027275917696421015, 0.5528191969971925, 0.7274468157465319, + 0.9843436463998434, 0.5555683665825039, 0.6951002588148754, 0.7716788530687794, + 0.8315882467372318, 0.929133022911197, 0.3486617738803698, 0.34592367022246595, + 0.34431798045467543, 0.6702829463721361, 0.6667106969199146, 0.24659976811942497, + 0.4136374217717127, 0.4212731966436015, 0.674218280106361, 0.5948167582886236, + 0.547264756294244, 0.3283094476143993, 0.4657810913492053, 0.14488651124192264, + 0.7130734181857544, 0.41498780863726514, 0.018406521313439672, 0.9881738830545875, + 0.44580163338492973, 0.06247448195301408, 0.6497472819602108, 0.02115682842914146, + 0.8590384683777281, 0.008661591989204798, 0.9453222808947551, 0.7072050004713644, + 0.05673244008761047, 0.5792345974589062, 0.32962531044070564, 0.6864175824810845, + 0.7274039173683481, 0.5181493646433266, 0.9658579518167977, 0.075332154556627, + 0.4065671697105727, 0.3866859080877916, 0.145445126824565, 0.6967670826620894, + 0.12103753859719812, 0.629868050919494, 0.9271635466079461, 0.4536130608013994, + 0.5045690870746585, 0.9739522384651846, 0.5440922365642499, 0.6188408694546577, + 0.5211757773614668, 0.8319533982633284, 0.029721693547344552, 0.9604273409511243, + 0.23437496173428507, 0.3477466676347637, 0.597922345332943, 0.47815815485181856, + 0.7368831630732353, 0.3383020992232656, 0.3049446033995109, 0.7829458489686137, + 0.7443859269387701, 0.4676897387824628, 0.45089746231236205, 0.48634563252579743, + 0.43886509007321595, 0.3947924938391364, 0.4223228602753234, 0.22455979049072372, + 0.7411963911096001, 0.6531321271651636, 0.849541091982193, 0.6314408696681745, + 0.27526014098701246, 0.5163793649942291, 0.8377949603520214, 0.1649897575332031, + 0.6596009893800752, 0.5483961205070903, 0.5204105335695532, 0.18271970882890398, + 0.593266163389924, 0.9258372807099275, 0.625197860183363, 0.7813363229835547, + 0.11341374440845742, 0.024033085179401614, 0.37906946531813646, 0.03488542896200164, + 0.5047926406532258, 0.2729116625646527, 0.7250428392459488, 0.3842268914915319, + 0.31309142847148685, 0.21660729878285445, 0.6887239468148271, 0.004993540892198722, + 0.8458079254507886, 0.007368060080645722, 0.29894534701711073, 0.35312852934434835, + 0.20376818749236536, 0.7691293902017794, 0.13815532984971746, 0.1590089534532222, + 0.27737585101899687, 0.7055049235505297, 0.06542810541646804, 0.3855205550869971, + 0.6856840631479204, 0.8951889418641149, 0.33644299008693435, 0.23681088643373305, + 0.5119092163008693, 0.7074313623854898, 0.7407172820834985, 0.3590709425989018, + 0.9387528311714048, 0.5895482317715295, 0.38387599952929596, 0.9919373370584181, + 0.29271557312564467, 0.373487073585656, 0.4219037928478685, 0.8953112122854749, + 0.8509307252596261, 0.5925508056840836, 0.1289443517795088, 0.07532606031712319, + 0.2308098835082638, 0.554857705945244, 0.08496171090310822, 0.20259384294125882, + 0.18452835134222978, 0.15133118597324935, 0.4615594571811862, 0.9545205119859813, + 0.1846637695507004, 0.4620868083094941, 0.15237791846133564, 0.36948936251579545, + 0.5020829010793352, 0.7396696193495315, 0.6656169224973635, 0.37584820125948326, + 0.9273750665079447, 0.6420980277396114, 0.17534283002199857, 0.2425308785977046, + 0.9452127092615741, 0.577537497174393, 0.012030155110893004, 0.08334220637055301, + 0.9281465196476314, 0.941555330011809, 0.9348840551629855, 0.34249336138921815, + 0.9949287309299863, 0.29662734413677916, 0.2045755711917514, 0.03613182421648209, + 0.7596144976266055, 0.6442765506012742, 0.6710668991638102, 0.3356617354905028, + 0.7700854185870692, 0.21901564190685519, 0.12342865273395087, 0.34909342847846403, + 0.44742946293077634, 0.6709957763246563, 0.8333403232406411, 0.12226959596809617, + 0.23531339704765664, 0.3776299334060649, 0.7112286512960347, 0.0872677185676285, + 0.4788652169760982, 0.45309615028691297, 0.6255379373550257, 0.37637909221467925, + 0.08750845964819831, 0.21244737992271556, 0.06998754557037834, 0.6268807304639307, + 0.37399361800120046, 0.11416674094184265, 0.11364741736519457, 0.5279495187043634, + 0.5766536754312145, 0.9034483267479185, 0.24633712987428003, 0.7259486518800289, + 0.933527479901645, 0.9933610208307955, 0.061068546275511726, 0.8681116504148529, + 0.5994630901841026, 0.07906453952726389, 0.5518616008772085, 0.7481793929795738, + 0.09753080681374215, 0.10130409555855568, 0.47255442367894573, 0.9946720429087478, + 0.4472145097516641, 0.1541469842620875, 0.14066143536276432, 0.5057842367270858, + 0.07385962488321485, 0.8200936176927385, 0.35575680975752033, 0.6799967869166224, + 0.530580447843163, 0.9164102954086099, 0.07987411488916707, 0.32697912481835567, + 0.7142357931484125, 0.44315651854783367, 0.31684164402249493, 0.07729703025529, + 0.4612525787168896, 0.06885258484274792, 0.907818098736975, 0.1014003315694808, + 0.5859366834617029, 0.03390395009051328, 0.91074170468797, 0.14357735877077438, + 0.9957309791929156, 0.1278846862048807, 0.9257745495908779, 0.19195787139286924, + 0.4266346219445917, 0.8074318862062672, 0.6065417996780311, 0.9012510881659451, + 0.34325433046211107, 0.6648750725774587, 0.7959044902153384, 0.6844996555399977, + 0.9957444290037094, 0.03011965948308104, 0.4408257612405182, 0.83692227956657, + 0.5766995410946815, 0.9655287650938614, 0.2532218946914593, 0.2907661740996005, + 0.9752590042731862, 0.4167376881927827, 0.8269854681749177, 0.09215798301937239, + 0.2742805049916154, 0.8743817055687811, 0.8722717625070588, 0.32506670179785224, + 0.5150168900942156, 0.6510099739682977, 0.040155452864908514, 0.2335972074267122, + 0.28812723959723785, 0.738200001141994, 0.5939837161093867, 0.30882776750968677, + 0.15230798169019144, 0.7352625559274293, 0.7139002575508098, 0.37565771873524767, + 0.10293621921355078, 0.6534316283814973, 0.9637975373382056, 0.10469474885196783, + 0.5347245432817731, 0.834718811852973, 0.8625546835517777, 0.0003268391492184941, + 0.6060281282344564, 0.8915741267568583, 0.8073983445855917, 0.04842219651420632, + 0.021613747077693368, 0.23626495887230559, 0.1738206792279524, 0.6401177465010348, + 0.4981891667545588, 0.6485747214226244, 0.27665739967875624, 0.14871710449862985, + 0.8595117812472408, 0.9449725835841281, 0.38085535043140495, 0.21138852532645513, + 0.26797423259163267, 0.5533296139613121, 0.3035697369031277, 0.8434926396965746, + 0.20041139333024605, 0.9575121386036994, 0.917020126076206, 0.11292881370902896, + 0.9648337969983836, 0.009960802635459176, 0.687371128410807, 0.18588957838068365, + 0.7414231355356855, 0.3193247267560779, 0.4369072621226425, 0.561626742191324, + 0.45875196081660574, 0.6625804085861026, 0.41563738874349976, 0.839247078088345, + 0.08571882623271232, 0.3156077430137063, 0.9650227216326556, 0.016254225904769548, + 0.3113913346904329, 0.9093079524937882, 0.49554172171114463, 0.8601646035374751, + 0.07162315074120229, 0.5091554009703184, 0.36049533818472823, 0.5971042984870208, + 0.80119063473808, 0.5659226876983238, 0.5581306868486844, 0.7916233499070181, + 0.07630922466197532, 0.36947705214830495, 0.35216206701708885, 0.10652446899275125, + 0.04622563404650104, 0.5753114757729788, 0.8760942758774782, 0.4938524246376115, + 0.5766685406757934, 0.9337930231018294, 0.16918589950893104, 0.40120537322978933, + 0.10254445580952054, 0.2808710536582856, 0.6246760601088422, 0.21909074827124253, + 0.1322123648239686, 0.6304228676507453, 0.9484961691400952, 0.7165679364688612, + 0.8946207565900517, 0.7993276100288724, 0.7179814738422576, 0.11877658899063726, + 0.9612697646847594, 0.635287952403161, 0.20317788640515677, 0.3802264537724305, + 0.2502756790363073, 0.7474654651506052, 0.47999845173062605, 0.7735392931984596, + 0.2259206770930715, 0.7641578054457718, 0.5521146150579526, 0.17749717425748612, + 0.9363413586939628, 0.5434848899640735, 0.5594807159304569, 0.6439461486641193, + 0.5660142730972817, 0.573777693667837, 0.8753717104681724, 0.4243043288295506, + 0.5840800293060847, 0.07788813501479297, 0.4737119498621364, 0.14788865372908921, + 0.5157214734477279, 0.4981923297675872, 0.8005997339055952, 0.08325099351220544, + 0.7125177892193846, 0.6245684114795713, 0.32665093012336877, 0.2070690221517849, + 0.48911365188670475, 0.6886527250995123, 0.3060637553208424, 0.18555582781884994, + 0.732940451760642, 0.29001954013611997, 0.5193570151615291, 0.20599926727585816, + 0.3606855509794995, 0.6844722986646388, 0.9232446034747206, 0.3276077257142528, + 0.9606126648852817, 0.9962670097577428, 0.5839195666831178, 0.5564294484289228, + 0.44390846989682864, 0.3045264506253025, 0.6563318467068981, 0.6798824857098574, + 0.9863983509977264, 0.06073493704660615, 0.8987445685891152, 0.47072885962143984, + 0.42783276875038223, 0.7016114226163853, 0.4107474748269918, 0.9725248849400044, + 0.5487260411718942, 0.3602459848801891, 0.2048243212640377, 0.7048913158329146, + 0.09978268079067831, 0.38870605625308163, 0.08986458471739434, 0.17329341074020088, + 0.7588457975246111, 0.46957188509803294, 0.6454315630015993, 0.6497784298780183, + 0.37096835863377275, 0.788456049657768, 0.18238737115851422, 0.02302855400784809, + 0.09361584807709855, 0.19042719802989005, 0.5660847647213608, 0.40977346500091627, + 0.9379522071320472, 0.7213162333057185, 0.23647181209233414, 0.0033656616046955357, + 0.4502405572571988, 0.3981406588868234, 0.23528616394420376, 0.8505326587792429, + 0.6983507476945467, 0.6564206332115747, 0.2253368547532426, 0.027073706575102885, + 0.7617142278287264, 0.20819468049475387, 0.0849225570877471, 0.6464851412960609, + 0.30698793237345046, 0.8477836932596925, 0.7758283820044125, 0.5333516351133905, + 0.9547284565002674, 0.09509547838645183, 0.9349731153279355, 0.33145884219003663, + 0.025276347946489475, 0.6678093876719788, 0.8024795080366857, 0.2006497010404803, + 0.5559753686724215, 0.693490335470632, 0.17331488709064247, 0.26543984340233817, + 0.810107763246411, 0.19946017744264954, 0.34062414334743607, 0.2538621594146887, + 0.8976140916619194, 0.8179136289285175, 0.13997599867166644, 0.4526263232933132, + 0.9099088899621858, 0.9180265225007459, 0.26900148113273137, 0.20434295051741924, + 0.9254333505897326, 0.689089443312313, 0.07692988071785556, 0.8613476323833271, + 0.8659284862740101, 0.6153050099838057, 0.6822108692118121, 0.09370227858004332, + 0.13605600718391264, 0.6490578286423443, 0.6814686551352754, 0.5047745431177096, + 0.4585919553715657, 0.20144146814319286, 0.08025259641589777, 0.07794727167664028, + 0.2675986122499858, 0.5631800635784436, 0.9908708420189721, 0.27306994272222207, + 0.5816013229059506, 0.36701970427583996, 0.8920860583334906, 0.43146305906240956, + 0.06430051522916791, 0.6002227281987669, 0.2174747196456609, 0.007307980851831486, + 0.016468331482508036, 0.7162567662580243, 0.9386166310013702, 0.8562247091342103, + 0.46461879531982275, 0.725948397033208, 0.8558348561789517, 0.8213089085731266, + 0.6474912610414628, 0.331760712038768, 0.5033407160815184, 0.6191148446878322, + 0.2279905381598354, 0.09077987241513097, 0.09855296979839656, 0.869400313638827, + 0.4443723711865347, 0.22512659912010913, 0.055731805531847534, 0.042596364904384565, + 0.8387878531620769, 0.19952423249150786, 0.06301955233274859, 0.6295236223148944, + 0.22796433090951584, 0.0786425178763317, 0.5275102254528009, 0.07818271657348519, + 0.8671531578576058, 0.022463564129366786, 0.5895022924397852, 0.2877821098352795, + 0.732509828973493, 0.38476040175712745, 0.029079675892063728, 0.5680441746255821, + 0.21041446332525948, 0.9855856274400493, 0.3805577084950771, 0.20433285491935527, + 0.4389103799693328, 0.744899902121528, 0.1598673420261677, 0.4380217994839748, + 0.1612383216056471, 0.173239268213766, 0.4776894730936536, 0.04295133613711055, + 0.0337971095486701, 0.5330575305803935, 0.0575630317703546, 0.3625065647667213, + 0.8376028929591584, 0.5749958693067055, 0.781576393369037, 0.7139774732191775, + 0.2509516687552569, 0.9600576022820434, 0.5726308089540263, 0.12526434319385582, + 0.04372958504123803, 0.6276034654102257, 0.18968396369879814, 0.8401067133435376, + 0.10829543771883066, 0.3257483104941373, 0.007120204066152103, 0.3146537754974823, + 0.47136564347535725, 0.3325103931498934, 0.16565592032403087, 0.6113580359248376, + 0.040901099660420215, 0.19007604447889503, 0.734501660443507, 0.4602909570938073, + 0.6829838329748097, 0.36203910540376316, 0.5053944623476256, 0.5664242575184198, + 0.9008772611237756, 0.8185885262141374, 0.6101537318103396, 0.47271779898380517, + 0.7995038877033812, 0.915654612345703, 0.6526619761340613, 0.605301324702423, + 0.18432817691314374, 0.31047977619783196, 0.45012993309019456, 0.3848070657480769, + 0.588212831802506, 0.05952501130191601, 0.3089435114570902, 0.9335631889232083, + 0.49146042948979607, 0.4596513558251957, 0.6334332878893612, 0.6481079104876946, + 0.4169034492348723, 0.28099470671200755, 0.5176086077744201, 0.5758801513570434, + 0.8251238156587757, 0.547101947791375, 0.5146178202604017, 0.28038677022510106, + 0.9274832343240935, 0.18488093539725048, 0.028105336399675274, 0.5074527727239252, + 0.46903404196108633, 0.01007876648848316, 0.6082886334835492, 0.6532932019441385, + 0.013322708717175336, 0.20502802392466968, 0.8797583778901342, 0.7702188786437175, + 0.2070786155168104, 0.38895566725539643, 0.7826099427413439, 0.5325397998121318, + 0.43039832506201603, 0.11956790739406642, 0.48374117189420707, 0.521196161101503, + 0.741509414519302, 0.9750196788370971, 0.9612235203749636, 0.7587722758456197, + 0.8559660620247886, 0.9138992273970941, 0.6114331959310799, 0.8117369559273934, + 0.901719577933688, 0.701400171576896, 0.7385315905991914, 0.6577139777741439, + 0.3573211113918865, 0.5981766645443906, 0.2135052254575136, 0.21447273042019654, + 0.463114727921643, 0.38252018001231747, 0.05858394796197941, 0.4541717159198426, + 0.25795093077506215, 0.550178943853917, 0.6074798782765946, 0.8936860806925331, + 0.17742943571758174, 0.28711809648492037, 0.8706352730090806, 0.03259955820080618, + 0.5341624433983146, 0.5365854451854938, 0.9459290988119022, 0.10479310791009855, + 0.8134479602019148, 0.8463616333227205, 0.4712719019432351, 0.5187574249202905, + 0.2798036370823188, 0.40017328106577943, 0.24201990492354541, 0.1501803335775994, + 0.15302358389850723, 0.5068089873783856, 0.9535736234373514, 0.03288622417870046, + 0.7015671129360214, 0.9026674864626972, 0.42083707496232403, 0.8890961090665441, + 0.22196306938755506, 0.09519957018152558, 0.6740314203846373, 0.5748014478855076, + 0.33144124755948756, 0.6838473126915998, 0.16971209630966588, 0.09583671032660623, + 0.9482134549763758, 0.7231698372680777, 0.5742303699985246, 0.4930437622198026, + 0.5150647663319219, 0.2295847263058195, 0.7327366937410855, 0.4321090411928221, + 0.9758773717822556, 0.12583285304938707, 0.9405644946850603, 0.9104346576565615, + 0.6468008220481677, 0.23005234330930902, 0.5786096097706357, 0.37325720086992775, + 0.7084352601695846, 0.5728703312238945, 0.49658644476946334, 0.7113884002808946, + 0.4826248690253867, 0.1160630670680961, 0.21483279615304118, 0.06376699481848302, + 0.8179452815539806, 0.9239224610358371, 0.8266983414259692, 0.15005107932116457, + 0.3032117946809746, 0.6996004199523914, 0.7590468492897986, 0.656564365356466, + 0.39423528116383466, 0.7010507713651717, 0.10724705111767208, 0.6022885366497458, + 0.4104758009549806, 0.44599435904728324, 0.8813171861249125, 0.41912787326407697, + 0.34666030266179226, 0.7546111016734821, 0.502072873305139, 0.8113953100511652, + 0.3906585059360066, 0.6880327753691288, 0.2009483429728417, 0.5686489068644626, + 0.7035529411451102, 0.7542045323372278, 0.3898342926612306, 0.9761232177864375, + 0.7574158067044097, 0.8779834335120824, 0.6709364736649338, 0.9011184378252315, + 0.3147458744641488, 0.32507247452038013, 0.21425964411179554, 0.0035184977431740316, + 0.5228165812896587, 0.6755876929931922, 0.013567251954202941, 0.5085004166721712, + 0.6878583580235702, 0.3195537542321295, 0.04895580614551076, 0.5853264099637752, + 0.12258934342127947, 0.4775424364827874, 0.7723295089612472, 0.7305713048870448, + 0.5905926752348803, 0.5872388885915949, 0.8401306748988329, 0.5427167619605937, + 0.666300626716617, 0.06371688631748307, 0.4790432612213785, 0.6839227253018579, + 0.8418026794499541, 0.7063680359352544, 0.23878150132656706, 0.4304997879237987, + 0.8499306791035737, 0.29550272530719146, 0.3780748358941388, 0.8872528400552816, + 0.6841236146016769, 0.0020789323725591835, 0.45270398360611497, 0.5738622605190704, + 0.9363900750810993, 0.9539075814199297, 0.9055972717405635, 0.9464556721808023, + 0.003059722706603596, 0.9088009828885675, 0.05899686570080087, 0.4366875371600666, + 0.9352359607368278, 0.31573543162442885, 0.3944099276425578, 0.7015320430192149, + 0.6695945776146562, 0.9019690368286237, 0.3285027647930605, 0.8928303952679375, + 0.913586580025072, 0.7711039763337023, 0.4499075842270144, 0.7637018341856634, + 0.6111123296509879, 0.19600971504978393, 0.6082558785681481, 0.41831753821220163, + 0.8826003107426394, 0.9151416538581988, 0.40042855168713165, 0.0451842472192171, + 0.1626758120278794, 0.9218579593517987, 0.2880362662259337, 0.47078269324762245, + 0.006281199967602347, 0.06203144365918323, 0.464908376651873, 0.35672164906963466, + 0.3356831627161171, 0.3378509869446701, 0.6693174034963342, 0.6319239460366327, + 0.8927467860136303, 0.3377276388792566, 0.24874242176210337, 0.06528998092347693, + 0.6098478623660899, 0.9269997080150415, 0.06831183061275226, 0.0902546632754927, + 0.5024041499549409, 0.8318595488436714, 0.646045682925065, 0.7027263070461592, + 0.742962892612677, 0.9990016616215829, 0.8323934392975538, 0.5156229639556337, + 0.34240764189813633, 0.01018111257064902, 0.325841488450341, 0.7653123303419905, + 0.48255831405792426, 0.41020656510564624, 0.4635758551378758, 0.05726505208284005, + 0.25578448367957807, 0.09492924494978128, 0.06394320130790931, 0.6342781661686804, + 0.7097760726965076, 0.3960640490955174, 0.877809327547959, 0.4531178897882824, + 0.46644259092740326, 0.8117347678607901, 0.3427040946174452, 0.5142562136725705, + 0.13916666260320254, 0.04435940978161934, 0.8406961237189915, 0.39114820121480665, + 0.4408456081272869, 0.3205847200219718, 0.8756087310309373, 0.171645453520327, + 0.910102549296364, 0.6238037512072698, 0.029741299187516335, 0.7228381796204829, + 0.32908522462614864, 0.3399981999708901, 0.27400439074289173, 0.15975239670324493, + 0.7534795450583789, 0.24410352857670758, 0.33709400123157174, 0.04731184462084859, + 0.8656602443354487, 0.6815260099217533, 0.36896055753764456, 0.07231256295730748, + 0.0591357015193974, 0.12834271889295057, 0.3416505672313318, 0.11875518231351734, + 0.5583924605457291, 0.0922367588428229, 0.8844414238178754, 0.807731536615229, + 0.11486528401217866, 0.8027483983155068, 0.6892704939498873, 0.24324504191164564, + 0.029317726775774045, 0.8906513622394374, 0.1599387511718089, 0.22348145034258027, + 0.5760452900832792, 0.5053747555115768, 0.8081628359731009, 0.05495708052495485, + 0.19972365072425602, 0.8865711504423313, 0.45601578393234043, 0.22715775439997743, + 0.13639701690136963, 0.13074521458852162, 0.41645063387674763, 0.37411652073382473, + 0.23636013008377532, 0.5754967858487199, 0.5317760097917982, 0.33104631196750023, + 0.0676935603745904, 0.8212256748704595, 0.7174770235880121, 0.7165737612100622, + 0.104081210292845, 0.5961594358188973, 0.4321230913297708, 0.6275614631863294, + 0.4848511405386148, 0.46430577758997216, 0.8825441202244202, 0.03322084887204235, + 0.3801453729548483, 0.1899194173679094, 0.9437978650654052, 0.44531451278182943, + 0.372630596108276, 0.06371590044276054, 0.09787722944911825, 0.29430493226113874, + 0.5356991409607268, 0.7705639919914617, 0.21232357140065194, 0.31106785774321555, + 0.6820961506731384, 0.957730133774548, 0.34012235977773053, 0.0675415631504287, + 0.1565073488126688, 0.7886807849944825, 0.9900840629248212, 0.9575282351682902, + 0.3062168476208318, 0.2886123701852217, 0.24011612257882264, 0.8171361730176914, + 0.8958103050507237, 0.3665257490567677, 0.7787425841783737, 0.29569941905984587, + 0.43592786657050697, 0.5628183789478507, 0.7113229421471596, 0.9714979482732561, + 0.4765382140075889, 0.5891715591205987, 0.0759999733174358, 0.022121573521780014, + 0.903078700838711, 0.8233797845130467, 0.6785454298758874, 0.05194316512115826, + 0.08288887197735462, 0.4809360048939878, 0.9711262672525652, 0.20209246322013696, + 0.9519999167321322, 0.7516036852281149, 0.9736571444542115, 0.16537133710213492, + 0.8017609547533799, 0.5649474437186026, 0.7820837349162352, 0.28892218100986955, + 0.03133991734693464, 0.6679540112646597, 0.5553167231350766, 0.4073987002455086, + 0.7531857690931871, 0.7323110796643038, 0.33287804522112296, 0.22583624442185124, + 0.9485199964845112, 0.11134130023878241, 0.4153982870283548, 0.4276875414048068, + 0.7213349087514701, 0.11379866842504482, 0.12614724822596002, 0.06225620312733837, + 0.3937176366300814, 0.24364953515824506, 0.3618538737408433, 0.10884143194195617, + 0.6133014838464537, 0.4629829242910801, 0.6656460153219789, 0.8805827759846833, + 0.34101710495674475, 0.3185008302555752, 0.07673435985825394, 0.7037242334774125, + 0.7648799012279782, 0.9070898275381692, 0.5876958465509375, 0.47375185092126915, + 0.8902007858767228, 0.502844154363563, 0.7448320939806813, 0.8501745456167581, + 0.8935245733024005, 0.7382651793776293, 0.7274920981681308, 0.5095615409262099, + 0.7978402677683297, 0.08507104920222786, 0.7882503449093154, 0.540424323998919, + 0.4638154062447596, 0.6967584577394271, 0.7718584339838213, 0.7036420763180282, + 0.9920023369578655, 0.1626157031905784, 0.1879874152763492, 0.8367024870181061, + 0.5753073865142466, 0.2731946312315383, 0.9294333046187256, 0.09287858863873744, + 0.6595044621120822, 0.9008403185949457, 0.20786883493318187, 0.13258159063259523, + 0.7783462524811412, 0.7329574528681749, 0.7219916821139297, 0.20879233222808669, + 0.14162103978361207, 0.09297428175477396, 0.7852178163677799, 0.17669553664960846, + 0.42606061654683003, 0.0988697016095238, 0.7423310716265287, 0.7706897142413555, + 0.956820524525592, 0.33067537767204735, 0.21422306772962296, 0.33216610135018865, + 0.1111316210642761, 0.5997345439993701, 0.6393804988926635, 0.2765656144117612, + 0.6708104219834241, 0.86391015018573, 0.7744971238505398, 0.48899200161796097, + 0.13619621302166018, 0.29435177123326306, 0.9078925058895949, 0.6463544185250047, + 0.7494713932486778, 0.5470125738318735, 0.5728205367620257, 0.8270334409153715, + 0.9242535222947666, 0.6825990509125524, 0.7077311778702258, 0.3589889486151604, + 0.18067353205227754, 0.9767502068161129, 0.726321672577723, 0.3022359169863216, + 0.6010132403575262, 0.8094647300020432, 0.6274185140387438, 0.7225457890577125, + 0.5123020631706507, 0.7360544610333721, 0.04991151977269326, 0.11861724973155252, + 0.7180768962977342, 0.6993491069744604, 0.20752145445827375, 0.25486009434188805, + 0.7745188169649472, 0.9912106807596729, 0.7691429853682675, 0.6439346189860077, + 0.29666959040787966, 0.6028685029322579, 0.4444771245783087, 0.6261859121821446, + 0.1259749476534906, 0.7569815139783284, 0.2962562997117554, 0.488959383491686, + 0.271256956068024, 0.07675788501729641, 0.8295977414831182, 0.3950592813215933, + 0.6790598322975793, 0.6956192514654892, 0.7904946977502278, 0.41827724692101365, + 0.5550279501948888, 0.4236268651524372, 0.6927207321705072, 0.7050301842540171, + 0.9784250783596109, 0.1791098728938777, 0.9037082239728184, 0.6680257397608798, + 0.6181733895012386, 0.9768899151738116, 0.36591940378229215, 0.7917677710764943, + 0.6855235813536104, 0.11817692281697534, 0.5958469577757173, 0.4180598864261298, + 0.3243110718981157, 0.3340673734155253, 0.7347639530264786, 0.16055839250136206, + 0.047348497698460834, 0.8461192832483528, 0.14688850811807308, 0.6482638093997666, + 0.9359298653699178, 0.34861246847020433, 0.48995371218602024, 0.7550817845977703, + 0.6253788774380002, 0.11886112198051313, 0.4433574711886632, 0.10231297925313387, + 0.4384192218904619, 0.15254393787596654, 0.3654366130729987, 0.45658498806957304, + 0.6799206841724489, 0.3085611929502874, 0.13538510942791504, 0.8698764611728227, + 0.4979653420419564, 0.6722203964971958, 0.9736290728260159, 0.19496280224412987, + 0.2737807064227379, 0.43429560942721956, 0.5320074370128091, 0.06431308014841242, + 0.11027508726731905, 0.10014426838099222, 0.9768402373848661, 0.46262421018595534, + 0.6681367091263978, 0.5843286448732778, 0.003340011729282777, 0.917116733526388, + 0.6254777774407944, 0.805507149335438, 0.25485049814910954, 0.6074322969727793, + 0.9570816574590979, 0.09488811649209461, 0.9616899014222248, 0.2749836649916848, + 0.29566276965432947, 0.9899876239296427, 0.5531300702888099, 0.4116406163541566, + 0.6073046163939841, 0.624643603018433, 0.7638593932921186, 0.28094692890467177, + 0.7128700882110096, 0.2712462923681047, 0.0852762078132896, 0.0690072434400375, + 0.017776703949417105, 0.6851989638705765, 0.7624777353969142, 0.787277349822785, + 0.07172018629417398, 0.7787853590627548, 0.9836349783165318, 0.32026971014453487, + 0.924179019144257, 0.736600285799896, 0.03713771538942745, 0.08660784852691517, + 0.7096184306730414, 0.7410242950461109, 0.4842287818860809, 0.2697873517464343, + 0.4436211973899019, 0.4878197424582644, 0.24075603648316157, 0.18865175603891926, + 0.5375324673870242, 0.2839228931507771, 0.19569196084837825, 0.47206686055672953, + 0.8219209587605331, 0.9492040626586575, 0.6931808170186252, 0.4589125247032527, + 0.827853591371546, 0.7477086831460902, 0.21700506140528608, 0.7947524532739587, + 0.5318478831934534, 0.32555547869575097, 0.6893896815081554, 0.3228066353105682, + 0.35837606313310655, 0.23314083931931007, 0.00952379354506816, 0.19095091260177122, + 0.22809543099015217, 0.20962234549652414, 0.2523152860228668, 0.486891604204169, + 0.5763119663993084, 0.5703518327793337, 0.3665206678698878, 0.3632330056344869, + 0.07687814467551612, 0.6400953176490832, 0.012516153921648066, 0.236036799000914, + 0.5861698922650569, 0.8983366995534606, 0.6627902465865023, 0.6136357462984242, + 0.5418414851841203, 0.3132129404132644, 0.744033594830627, 0.1825280923918009, + 0.616729039220696, 0.5289028283915365, 0.7033184833395857, 0.2740336473069692, + 0.3321167015642672, 0.9865543017341086, 0.724972374599111, 0.034852437614189435, + 0.5343913748679306, 0.8626066171498894, 0.7597109118121218, 0.6418977943500506, + 0.7337808754574986, 0.07682530861893255, 0.22150043622534699, 0.48429246869840337, + 0.30560063539625915, 0.8139630415752064, 0.27975012292456725, 0.724704693314636, + 0.37139642305203324, 0.007763740667271146, 0.18499135023809266, 0.3961306001119218, + 0.628548218002559, 0.5288142314425128, 0.3554849946464518, 0.5616620349658505, + 0.32830648200561185, 0.22566670335113082, 0.8473785421742102, 0.21727130167468123, + 0.23435990576456311, 0.6739400681093219, 0.2934946434738166, 0.8230007661343022, + 0.17160412640720546, 0.35769135402718577, 0.6954493581241974, 0.8446019888042419, + 0.76013735636343, 0.3763213330060545, 0.5632440844712139, 0.9343257976154357, + 0.3126255526093422, 0.36712698444156466, 0.14005366473312275, 0.5246546893862399, + 0.9368125904658339, 0.04843081129539095, 0.7961455420379222, 0.7935260270960133, + 0.8734833369971956, 0.04855264414267868, 0.5017781627450661, 0.016760347244534568, + 0.5742354592389959, 0.9976507654359112, 0.6888137930576982, 0.22273010730669784, + 0.204120776576738, 0.8499862146778021, 0.17124829055279, 0.2027178455204759, + 0.14961315161516786, 0.7544933808870451, 0.798701266698071, 0.6959365333165284, + 0.14434125334857317, 0.2661975925923973, 0.012314172722164174, 0.009471587956916161, + 0.8081118002658578, 0.14052983633970928, 0.19687118859820252, 0.771326960302206, + 0.03237656736639172, 0.8473517558150588, 0.5450156268002251, 0.05542478733911549, + 0.1564603243589875, 0.6566154879746668, 0.25376535758604135, 0.5303798598438999, + 0.31264498734644, 0.5736733907960259, 0.8196039266327361, 0.9243478023239993, + 0.11681693385836267, 0.13224303783410207, 0.07673644112481748, 0.9157746455423513, + 0.8924788517647907, 0.00284422317906996, 0.038404070927022915, 0.9089597246423947, + 0.13223604641985065, 0.2812911240373971, 0.3026869432546221, 0.6436484690375646, + 0.05830065809028828, 0.6075703479468125, 0.7365910391279963, 0.8405802319103391, + 0.38634707027084914, 0.8552734666502619, 0.8001203658725758, 0.9183664358639222, + 0.6717965582344371, 0.5929569960222573, 0.49899978983174464, 0.3673185177987697, + 0.5009190144526422, 0.8021053799221894, 0.2514131947780418, 0.04806005257298929, + 0.22983732809212554, 0.7056693614564971, 0.2636519327124738, 0.5232326285822311, + 0.46834953269732604, 0.9601364320359428, 0.048492850918106956, 0.6907705338964202, + 0.7651798724852019, 0.7646570697173272, 0.5841845941410169, 0.5902910320222682, + 0.09641796710044437, 0.9135113823517769, 0.23534542047896145, 0.5558980740927516, + 0.14663784632374333, 0.7524574409843053, 0.46406416378122606, 0.42781006148574074, + 0.8989298096211726, 0.17234469344542547, 0.793374772700839, 0.6663822417341555, + 0.4137577738364733, 0.1330020884590426, 0.7278348420486463, 0.8215177886959044, + 0.9607601703032548, 0.4528633040623917, 0.9969676684454453, 0.3361841355888917, + 0.6622852506097389, 0.40192801927627464, 0.9015255728644763, 0.00884867316800575, + 0.22136038766575694, 0.8259122020932622, 0.982551039451459, 0.45589283035368555, + 0.7476810544799087, 0.9238806426533371, 0.9692803131091698, 0.2786213639921723, + 0.9635762228637361, 0.5373055715245347, 0.019463114454430497, 0.7802024752893354, + 0.5844765756448353, 0.11242963321975741, 0.29117704631643815, 0.350252671514489, + 0.11336523688838418, 0.8650129592033415, 0.7648227326425444, 0.7445158871377848, + 0.046783497096835824, 0.4205912874083033, 0.7666125077528164, 0.08539071131417797, + 0.66210713196685, 0.9560690387702061, 0.7196789870310156, 0.2322940592138497, + 0.013573658063372185, 0.33664094049883364, 0.5955894400444404, 0.9289823831286147, + 0.387397645799443, 0.2972438859705323, 0.7097065976975364, 0.3604319636351504, + 0.31743393853659885, 0.7069855690798323, 0.8546331547971184, 0.6020435366907627, + 0.8457843240657554, 0.18149085368254592, 0.7442773208504156, 0.5940991221745717, + 0.5672761478259544, 0.9278500717736248, 0.13564538441165264, 0.07182425738119547, + 0.3226398402673879, 0.3007483665542521, 0.6899930729840695, 0.8325697634031315, + 0.15658154982220907, 0.7417099410885516, 0.23813288318261894, 0.8159064950858222, + 0.5418069263098081, 0.46159191931394117, 0.4993290281902606, 0.4962530904242528, + 0.6306682140142076, 0.9039894572473723, 0.6544468464027663, 0.13500364941078247, + 0.8181802480722219, 0.48828290307141, 0.8386904056608249, 0.4540997335994863, + 0.251592078147698, 0.5933033667502858, 0.779332408059024, 0.5326084004962612, + 0.006033516504653336, 0.9275750302448893, 0.10602515821631009, 0.13545141577260267, + }; + float ret[12 * 3 * 16 * 16] = { + 0.44166954813792036, + 0.04307375857392848, + 0.3427145650542528, + 0.0003788955781413028, + 0.6810816515514638, + 0.8503025686377245, + 0.30431620883041643, + 0.15785038224109538, + 0.9269203960072342, + 0.7312365836651692, + 0.8294678547271637, + 0.4275215513314521, + 0.6685263590871087, + 0.5688754184736142, + 0.7348172902756986, + 0.44002668384141674, + 0.5269286567983883, + 0.7225038524751558, + 0.5611298409256944, + 0.5853334434070222, + 0.026684856314108663, + 0.688117042002729, + 0.8444262077285541, + 0.06350491342649123, + 0.7798902407824239, + 0.3156636155985213, + 0.4734006703854433, + 0.15931023211489348, + 0.034307016619573694, + 0.772280025098758, + 0.569287257671341, + 0.8592478084009422, + 0.4359487952693004, + 0.6475379020612276, + 0.34867474286113354, + 0.1256434578956731, + 0.14465673405698776, + 0.0663454514818631, + 0.6621924036536312, + 0.7621883076372917, + 0.9062449282680001, + 0.9536722219436357, + 0.8565925393476546, + 0.3527447679275013, + 0.8334993727563206, + 0.7163184369676103, + 0.23591825350189433, + 0.7469435868881419, + 0.41451837109645195, + 0.2878984260468489, + 0.6794459570364636, + 0.26492426370897504, + 0.3069506582096254, + 0.08312792795924828, + 0.20245674686239068, + 0.9651150999001393, + 0.16538573936154377, + 0.20497483692388607, + 0.566960698002111, + 0.15402104039426656, + 0.4137445986366697, + 0.7243057299151163, + 0.75433224500989, + 0.08834338294524535, + 0.27623319940397784, + 0.47360004601500694, + 0.5438492509363436, + 0.3094509109895641, + 0.6766005056045522, + 0.6807570960866145, + 0.7179374654029288, + 0.787922270567647, + 0.3442212152056977, + 0.61916128232351, + 0.47370621362547916, + 0.7626685229206793, + 0.1908610884109866, + 0.7230969298199336, + 0.9676336916897936, + 0.5048744155547631, + 0.8456571389418028, + 0.5099827307022872, + 0.8280148670220571, + 0.007897168430538004, + 0.913569293118159, + 0.25306680573262974, + 0.819983055785511, + 0.40700176557932166, + 0.21390073450029923, + 0.8306959283917633, + 0.607170300883798, + 0.19311250465001595, + 0.7183240748928417, + 0.7322513474423443, + 0.908938917129835, + 0.5169567047446194, + 0.7875744952742323, + 0.058148911362952194, + 0.3976588665651378, + 0.7626679577973312, + 0.6970250251456936, + 0.4381810472866039, + 0.1921241856189122, + 0.6450987513912385, + 0.8073753891922654, + 0.5694205222123573, + 0.03589630770849228, + 0.7715926856540837, + 0.11882941999956287, + 0.6424258749408532, + 0.5674651194005915, + 0.2537965751303326, + 0.697194390936586, + 0.20927555469706427, + 0.631045959341615, + 0.8352082664190045, + 0.9278384321220222, + 0.2611229933525716, + 0.5587010898818803, + 0.6296359067003408, + 0.2963695031241057, + 0.681887814087412, + 0.3879511258310916, + 0.5336050341777332, + 0.9031292676920084, + 0.5059431795785202, + 0.17423229087168046, + 0.3270362471197956, + 0.10583596581040589, + 0.7135945681401519, + 0.972627074755428, + 0.7800778214445974, + 0.42937813886846965, + 0.8613658262003889, + 0.22431946306168216, + 0.668298649743206, + 0.012905395010521903, + 0.05592958934392844, + 0.23803073721653156, + 0.18595355696473537, + 0.2810237028905399, + 0.2122143260877204, + 0.7072101010898707, + 0.010168003939234116, + 0.5813700180834056, + 0.6408191510994479, + 0.191804049698732, + 0.028060584971021485, + 0.26787711957452087, + 0.32616109171787244, + 0.5781403361257853, + 0.9781873751378118, + 0.17557832067528545, + 0.830646905238987, + 0.519311073178922, + 0.6352033805958868, + 0.43175058985170955, + 0.9376269937186791, + 0.4503447656080847, + 0.681030236919638, + 0.14389440924597607, + 0.6829770401574513, + 0.7812033889387705, + 0.3085742104930158, + 0.3520710838734862, + 0.5386917374649499, + 0.5519388322597185, + 0.9009351749099564, + 0.4849237975588041, + 0.5182911869169183, + 0.760752897661161, + 0.5475862574697538, + 0.5842294775781987, + 0.44129255310450777, + 0.8854429411392765, + 0.6155214832882219, + 0.23625649983123986, + 0.597712965388332, + 0.022599972586581085, + 0.3244491415465972, + 0.24643915603026256, + 0.31838977486203324, + 0.9045433296007812, + 0.6094306396092523, + 0.14612234864189266, + 0.7334476518610263, + 0.07205098934580179, + 0.004229192177879315, + 0.13242039824566865, + 0.43600799558928105, + 0.2833559721230442, + 0.6138540572625962, + 0.24835609047844476, + 0.6298165964297995, + 0.45440333291296253, + 0.36393362969608567, + 0.36842665507300465, + 0.26031028350961527, + 0.07011624926846793, + 0.5350311265495459, + 0.6243858594133542, + 0.7067149810394754, + 0.6678342036887911, + 0.6712474137254042, + 0.4979582256219468, + 0.27958034946510657, + 0.2574542451128564, + 0.4947666449554591, + 0.5667766638280655, + 0.33669842214183554, + 0.41010538704340793, + 0.28568444272968585, + 0.4870415699569436, + 0.23907152639385632, + 0.24555236706882477, + 0.9447497417407644, + 0.3528056732024888, + 0.09717383952404945, + 0.9760558830495002, + 0.4998781501659897, + 0.2528620255074816, + 0.03567485596575537, + 0.9537700664670703, + 0.5760018279420768, + 0.2048913611172284, + 0.6032064629516926, + 0.33693023432030134, + 0.31796009445608775, + 0.08855832445432443, + 0.9894948762233142, + 0.027487575615617987, + 0.6174670223554523, + 0.3101202128602528, + 0.9862775077129775, + 0.34481376426119703, + 0.7702092625272077, + 0.7257972247400568, + 0.11106509791158592, + 0.8719001559272971, + 0.2267037598095707, + 0.5887313184100555, + 0.01523090789645698, + 0.3147734409308881, + 0.2404170314202997, + 0.48276661713378266, + 0.9045883890693331, + 0.08289766492862771, + 0.32357976777098096, + 0.5748977713903817, + 0.4313243700192768, + 0.5299278341835632, + 0.12396149370749077, + 0.5880342745534667, + 0.10118683966689535, + 0.49890114789962525, + 0.718291481901579, + 0.4736419191168978, + 0.41032082421966476, + 0.13721517015234475, + 0.447404596866686, + 0.6961261151735172, + 0.21885105613278566, + 0.707380541532476, + 0.393478497342046, + 0.9575727383521713, + 0.1976347555048752, + 0.4146430735788973, + 0.8880482788199178, + 0.8159711813982033, + 0.6313798437728453, + 0.41093772589171873, + 0.05858570452655276, + 0.23899999012425677, + 0.8665051233500843, + 0.45430003925302787, + 0.7331914607979834, + 0.31691419742449944, + 0.8736851324955441, + 0.7134339083680625, + 0.8467228842279854, + 0.43729266244792664, + 0.3852594548928723, + 0.5916828188750132, + 0.051566895205039875, + 0.900597830211724, + 0.6604009077531166, + 0.8807339873509351, + 0.4229217022397571, + 0.9809704455279764, + 0.17461496906612384, + 0.3615283940617009, + 0.061756698896572315, + 0.7157303886625387, + 0.03251072985680692, + 0.8602417860380033, + 0.6263447861764662, + 0.6266511263055534, + 0.5690971290567406, + 0.9608495657627738, + 0.20280112950334284, + 0.7334357422608602, + 0.14251466106556665, + 0.4076471661522455, + 0.3684670420234416, + 0.4167490782080582, + 0.6471209587294987, + 0.5548737401045137, + 0.06048542911246013, + 0.8309196548596574, + 0.9243017825628977, + 0.9064636859735287, + 0.9490721267620666, + 0.430812881616372, + 0.614182548788152, + 0.14623692229559904, + 0.012595988509650713, + 0.6674826329280994, + 0.40790482633815195, + 0.6263689845732856, + 0.4064095914119452, + 0.3324413960990795, + 0.9241365174521774, + 0.5486511567142761, + 0.2693290503424638, + 0.5436266166530527, + 0.7015751622010856, + 0.7135552553835582, + 0.4633764093942768, + 0.33920962189339976, + 0.039072280263130366, + 0.6109935880071121, + 0.2211496572505689, + 0.5135084081689768, + 0.6366565981344982, + 0.8926860112953223, + 0.4866725536965475, + 0.6402289173493387, + 0.45009441972100284, + 0.9688943292920577, + 0.2570145166706659, + 0.13709496786369568, + 0.85145265846938, + 0.7729232411310919, + 0.8731464949208254, + 0.024115387667105193, + 0.6128468699489975, + 0.2817485754408312, + 0.6360312019413297, + 0.2806115333164484, + 0.13559815914215723, + 0.29708925648248996, + 0.23749674860750714, + 0.7175561976760566, + 0.8380256862590599, + 0.6804242407960092, + 0.5396189290921029, + 0.7491886955916292, + 0.8086334698637788, + 0.8762270921954172, + 0.6995240149645893, + 0.46220715504000853, + 0.282350262690521, + 0.6293698598145906, + 0.4115499740473926, + 0.1761471859768603, + 0.3722192354344217, + 0.5918743512036624, + 0.7341188406002573, + 0.1939527534182891, + 0.7707936015605422, + 0.9897121559582179, + 0.5906906861197991, + 0.3881176084508102, + 0.0007355823247353621, + 0.020702880321358808, + 0.4915783671918532, + 0.04267572489930438, + 0.8045255396796128, + 0.4993102314543969, + 0.33646793001018027, + 0.6800428994742934, + 0.21239711733448496, + 0.2357106932568147, + 0.39721084817295127, + 0.12079294015895137, + 0.9174891392441704, + 0.612832676356461, + 0.033297932400682906, + 0.6374448390850164, + 0.48660432290592537, + 0.5680257018541957, + 0.9795395758881432, + 0.3357148930170015, + 0.8737095409169909, + 0.061453224110397, + 0.0994720709557756, + 0.1538321778128191, + 0.8896657737511348, + 0.19034307027869501, + 0.1250423323446953, + 0.5528713455540756, + 0.5014765148599959, + 0.6411610195119898, + 0.3638831089125175, + 0.2905866558114406, + 0.43102053974232446, + 0.4015208835080437, + 0.9160701780405514, + 0.7837797805894802, + 0.35996974719030417, + 0.16332747875259546, + 0.2745764856472338, + 0.23999807956723496, + 0.03421609814847737, + 0.13553395643062238, + 0.5517692241934921, + 0.29385606796325336, + 0.835806317299432, + 0.5476702736550557, + 0.42369296852175853, + 0.5425734096471484, + 0.24280807058329779, + 0.4383165705549248, + 0.9095037508670331, + 0.5382484337009568, + 0.13723130196844224, + 0.5863919467486739, + 0.052880078363425786, + 0.41960430507865654, + 0.1428952851614408, + 0.5199495851750379, + 0.7659184089081081, + 0.6139508800308294, + 0.30435289793546216, + 0.08404659566815154, + 0.22222097585761458, + 0.0015237707795864308, + 0.3978426903430806, + 0.8695091638733617, + 0.22670254802389178, + 0.1482114518437857, + 0.7148266840817413, + 0.25108769965985933, + 0.19463811780745865, + 0.7271201732957122, + 0.7798701128439969, + 0.2038333656681255, + 0.6254032365377729, + 0.15122974808610623, + 0.7539081143367642, + 0.8591507258913956, + 0.26176370341499045, + 0.8030692034976082, + 0.052396846089859395, + 0.47984941465482556, + 0.5386906475225569, + 0.09079089536411455, + 0.4176874531365481, + 0.7132953021430298, + 0.17093217963883267, + 0.4221013638216137, + 0.692333500659214, + 0.7391359265468849, + 0.8018051903260712, + 0.7274039173683481, + 0.4065671697105727, + 0.12103753859719812, + 0.5045690870746585, + 0.5211757773614668, + 0.23437496173428507, + 0.7368831630732353, + 0.7443859269387701, + 0.43886509007321595, + 0.7411963911096001, + 0.27526014098701246, + 0.6596009893800752, + 0.593266163389924, + 0.11341374440845742, + 0.5047926406532258, + 0.31309142847148685, + 0.530580447843163, + 0.7142357931484125, + 0.4612525787168896, + 0.5859366834617029, + 0.9957309791929156, + 0.4266346219445917, + 0.34325433046211107, + 0.9957444290037094, + 0.5766995410946815, + 0.9752590042731862, + 0.2742805049916154, + 0.5150168900942156, + 0.28812723959723785, + 0.15230798169019144, + 0.10293621921355078, + 0.5347245432817731, + 0.3606855509794995, + 0.9606126648852817, + 0.44390846989682864, + 0.9863983509977264, + 0.42783276875038223, + 0.5487260411718942, + 0.09978268079067831, + 0.7588457975246111, + 0.37096835863377275, + 0.09361584807709855, + 0.9379522071320472, + 0.4502405572571988, + 0.6983507476945467, + 0.7617142278287264, + 0.30698793237345046, + 0.9547284565002674, + 0.47136564347535725, + 0.040901099660420215, + 0.6829838329748097, + 0.9008772611237756, + 0.7995038877033812, + 0.18432817691314374, + 0.588212831802506, + 0.49146042948979607, + 0.4169034492348723, + 0.8251238156587757, + 0.9274832343240935, + 0.46903404196108633, + 0.013322708717175336, + 0.2070786155168104, + 0.43039832506201603, + 0.741509414519302, + 0.6878583580235702, + 0.12258934342127947, + 0.5905926752348803, + 0.666300626716617, + 0.8418026794499541, + 0.8499306791035737, + 0.6841236146016769, + 0.9363900750810993, + 0.003059722706603596, + 0.9352359607368278, + 0.6695945776146562, + 0.913586580025072, + 0.6111123296509879, + 0.8826003107426394, + 0.1626758120278794, + 0.006281199967602347, + 0.5356991409607268, + 0.6820961506731384, + 0.1565073488126688, + 0.3062168476208318, + 0.8958103050507237, + 0.43592786657050697, + 0.4765382140075889, + 0.903078700838711, + 0.08288887197735462, + 0.9519999167321322, + 0.8017609547533799, + 0.03133991734693464, + 0.7531857690931871, + 0.9485199964845112, + 0.7213349087514701, + 0.3937176366300814, + 0.9784250783596109, + 0.6181733895012386, + 0.6855235813536104, + 0.3243110718981157, + 0.047348497698460834, + 0.9359298653699178, + 0.6253788774380002, + 0.4384192218904619, + 0.6799206841724489, + 0.4979653420419564, + 0.2737807064227379, + 0.11027508726731905, + 0.6681367091263978, + 0.6254777774407944, + 0.9570816574590979, + 0.29566276965432947, + 0.9368125904658339, + 0.8734833369971956, + 0.5742354592389959, + 0.204120776576738, + 0.14961315161516786, + 0.14434125334857317, + 0.8081118002658578, + 0.03237656736639172, + 0.1564603243589875, + 0.31264498734644, + 0.11681693385836267, + 0.8924788517647907, + 0.13223604641985065, + 0.05830065809028828, + 0.38634707027084914, + 0.6717965582344371, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.49872269429620975, + 0.582114074620672, + 0.8794132005881704, + 0.8791269471098638, + 0.03226249236680412, + 0.19053767468224259, + 0.8857867809462828, + 0.46640704279272016, + 0.5013249220480327, + 0.4914649117424874, + 0.8851055753508216, + 0.5253699854838281, + 0.4521564166563363, + 0.49111394776088735, + 0.18706261818854597, + 0.8632130905792896, + 0.8117705343353353, + 0.23809778498286072, + 0.2348375918144413, + 0.08945118660744811, + 0.33002824843625456, + 0.4952272679127343, + 0.5541139404295038, + 0.05860482596495664, + 0.14254936200789714, + 0.05546611943559421, + 0.16655392496411603, + 0.8411555966233066, + 0.1047216771050905, + 0.2522112326848579, + 0.9917311118591357, + 0.8999214821027939, + 0.5647952717516906, + 0.5846928020032719, + 0.17704671513095693, + 0.37949319710948726, + 0.8125590098154514, + 0.4967104355941421, + 0.7563120977819371, + 0.9444805267350456, + 0.9285531098995821, + 0.04085651969533721, + 0.10968185151529919, + 0.31835984148358976, + 0.46420790874197915, + 0.0923742336495238, + 0.5824677076820898, + 0.7378166679174171, + 0.697016300370185, + 0.4070582529802277, + 0.5285685024299306, + 0.7019263232239848, + 0.5054043188417666, + 0.5954726203941156, + 0.6009488186681777, + 0.37806608862254276, + 0.5362359184350861, + 0.661172161312716, + 0.45468429642073815, + 0.5079696901117611, + 0.1731747849592724, + 0.12104134786710796, + 0.4887731715287613, + 0.5044126229512266, + 0.3433174065200666, + 0.7693887217890181, + 0.6645417117507658, + 0.5911643553246058, + 0.7803167409842258, + 0.18918313024237488, + 0.8968132928802025, + 0.06493513860209299, + 0.45470399424076535, + 0.6621672645635656, + 0.6284468697795359, + 0.37841172644923027, + 0.3187093411707905, + 0.8258839233627036, + 0.9822019927635267, + 0.5600171127897822, + 0.9613973085600989, + 0.7717408628492098, + 0.8277068369585787, + 0.9852937629591335, + 0.2385371173197105, + 0.6025304361192038, + 0.0778803181290173, + 0.3429557305469143, + 0.30921486213973715, + 0.4457466644086183, + 0.6927939274746441, + 0.6884518017317282, + 0.28013330071727127, + 0.4965665845905123, + 0.6540522027619009, + 0.1332349737030577, + 0.8458393838748731, + 0.09715518717781602, + 0.21645747068138055, + 0.7732748600158968, + 0.20856953801272204, + 0.43833444890909934, + 0.661218606505018, + 0.3650460027735124, + 0.49842897975867795, + 0.07133473852731675, + 0.17441351939559846, + 0.2400442117752748, + 0.5870045264346687, + 0.1938389638426371, + 0.9574547370135406, + 0.1998890286210746, + 0.24682345519427384, + 0.8576108449387947, + 0.7301205029513615, + 0.5624961408696214, + 0.3638274678244726, + 0.7582957722499413, + 0.9648517530634904, + 0.7842912136945769, + 0.16475486574151976, + 0.06253482108181252, + 0.5821652454564654, + 0.5153993893640534, + 0.9930529756601338, + 0.6114781272159479, + 0.4654345764701384, + 0.8315518503970734, + 0.9363565147406563, + 0.12032639174445858, + 0.9229605513358294, + 0.4972430475561045, + 0.7357423355385769, + 0.10944536582441444, + 0.65505768427964, + 0.6039442327642132, + 0.5929261786522119, + 0.0455377907149731, + 0.1458609199361791, + 0.05220103595566228, + 0.8742692458643788, + 0.07507888946422059, + 0.37106245061371024, + 0.5195080171512712, + 0.5588261706383468, + 0.16212931835656053, + 0.28379442460103144, + 0.5577167002583155, + 0.06798458106150163, + 0.22940508873533094, + 0.006891759256384522, + 0.2967676554861042, + 0.5076993696919473, + 0.906979449165617, + 0.34139000007089304, + 0.3858228882139979, + 0.9809740312733957, + 0.32794365251447477, + 0.7200928548635808, + 0.3595991288776009, + 0.9590755728283509, + 0.22094235352733438, + 0.6061793647994644, + 0.7741510761759416, + 0.6750463138001177, + 0.9632557086420768, + 0.6833883761281654, + 0.887404179202379, + 0.9980910880274598, + 0.896320857203976, + 0.593626642998047, + 0.46786432051257343, + 0.8730977124086077, + 0.8348467421556682, + 0.24116239287396757, + 0.22248818636897771, + 0.536843311798013, + 0.7827145768809525, + 0.30134252307872467, + 0.9180118819776794, + 0.7431383231541865, + 0.6723426691220966, + 0.8982688827991663, + 0.6478684954660008, + 0.12641906108110756, + 0.09134521905986925, + 0.28381329612633655, + 0.1930090834260455, + 0.4607618490816854, + 0.3625025924122818, + 0.8500462364151191, + 0.18157836931719495, + 0.7787010450829042, + 0.3035818248313433, + 0.035987373886854135, + 0.7475710332070825, + 0.30066812912879526, + 0.40234329774399946, + 0.8010030565116638, + 0.32518309466866335, + 0.03983578333531368, + 0.45228235722609234, + 0.1861385580786682, + 0.6069705396669021, + 0.5815015641098671, + 0.8995535859052592, + 0.42110875297256545, + 0.2962370195584294, + 0.48791595428289924, + 0.7135619420704647, + 0.5524938574075309, + 0.958729870250965, + 0.5214041559816355, + 0.8379933121471851, + 0.6399515009589466, + 0.9417570081495491, + 0.7486729433719952, + 0.9259502582636769, + 0.615510122410444, + 0.5401325906954734, + 0.17560152588047784, + 0.8238758737847581, + 0.3670756983039326, + 0.814241717243615, + 0.26914563151248994, + 0.9124211400435175, + 0.9070264363179478, + 0.317909310877886, + 0.7903135621863384, + 0.1774327895895852, + 0.2540576913143622, + 0.8646407561360216, + 0.43793557141718775, + 0.8466814834992581, + 0.6199688962923056, + 0.04908539286403257, + 0.5463019695739538, + 0.22999442096319045, + 0.5146267845560454, + 0.8662577405125906, + 0.2298736613451513, + 0.4701470304663081, + 0.021678855314892664, + 0.3541465214566609, + 0.8423699137816405, + 0.13173432325663237, + 0.4201898408129492, + 0.5922910739183277, + 0.8798345270536851, + 0.4934981641716867, + 0.765892319072405, + 0.6464860423784723, + 0.11529352943973847, + 0.12062854727193406, + 0.19390438497201268, + 0.1057925602768447, + 0.6358493825352493, + 0.7156910728714808, + 0.3984640894255359, + 0.18178206411155562, + 0.07004766534908935, + 0.1582955600967466, + 0.9626789038341915, + 0.9680733166314949, + 0.08301829371483349, + 0.16605123225763596, + 0.2764933417293113, + 0.12602928850293937, + 0.5387478147352246, + 0.29888838191879286, + 0.27724972070226594, + 0.2394356523139669, + 0.04736433270210616, + 0.6597264366536395, + 0.7703059996095263, + 0.7121606308464292, + 0.05996696984481786, + 0.08250454135914254, + 0.5922821695162334, + 0.34865131186235476, + 0.34603953537333054, + 0.07398139176729679, + 0.21025394979175327, + 0.30782334734783556, + 0.6434653905314204, + 0.22844806125400163, + 0.7415080554984124, + 0.8559868536396876, + 0.7987372472449284, + 0.06943405301490102, + 0.2600843124658757, + 0.20877051593259888, + 0.9773429172090287, + 0.6507961599249666, + 0.6348440808406297, + 0.5838493291165655, + 0.7206985650577848, + 0.5696443190794005, + 0.13683722511085716, + 0.5969060813848549, + 0.31420056369876215, + 0.8590310734180775, + 0.5731621743608273, + 0.34743467608485556, + 0.5737086149859272, + 0.9691156062096731, + 0.5469416306435377, + 0.47020867155366575, + 0.42240535722306805, + 0.08499227558313571, + 0.6331418825289039, + 0.9347360070905624, + 0.6348976620077825, + 0.5631504935893676, + 0.06063010064983543, + 0.8720065584474361, + 0.5869005789165606, + 0.05536062358065652, + 0.9565566182748682, + 0.128903241480822, + 0.7632903133232857, + 0.8938490366343718, + 0.11630889165366465, + 0.8114227679822276, + 0.7975074584119255, + 0.9920692633016523, + 0.6718768917934348, + 0.10453278612480843, + 0.35423235286058363, + 0.23515421673357584, + 0.3070337942708744, + 0.7684245826279257, + 0.5015297465607211, + 0.3030689614036959, + 0.9178982395043443, + 0.5731088131761239, + 0.24136524837202822, + 0.09328520423793907, + 0.044636563349069736, + 0.3422258350596028, + 0.46135308232837946, + 0.515594098997283, + 0.9078460719582939, + 0.23871860078453422, + 0.4240023592836627, + 0.60237221178917, + 0.6581233231670309, + 0.9113905198333937, + 0.13852856951174664, + 0.2978732910764659, + 0.45664211087209905, + 0.5147194765248094, + 0.9348031950275889, + 0.10462064876744948, + 0.45118951390128537, + 0.5280023188975842, + 0.9027131717120288, + 0.9899944776583948, + 0.17895797806876146, + 0.9312680122335908, + 0.24491840962442002, + 0.9346017695584203, + 0.9330556213809605, + 0.5535089600812554, + 0.5473422345347194, + 0.07290743393768884, + 0.229297142855464, + 0.9375970601021524, + 0.6408559181231186, + 0.9469322510206291, + 0.51539501082556, + 0.8649291733453168, + 0.1922418341303912, + 0.8752414013019265, + 0.265436687436848, + 0.6131539097918961, + 0.6880047232097563, + 0.5171223455893764, + 0.6820213443757897, + 0.3056480346725515, + 0.8982633876716558, + 0.9638087043651087, + 0.1956339938954209, + 0.21485355994864874, + 0.8660292443231999, + 0.30321767658689824, + 0.3585285687861417, + 0.613830088510247, + 0.19989772662744087, + 0.5527258098947379, + 0.6636809855961555, + 0.48128037298133386, + 0.2211533966817012, + 0.38951846802701395, + 0.36297157175788575, + 0.217220651771879, + 0.5488322861187763, + 0.882703222618888, + 0.04107581127707327, + 0.7412994205474549, + 0.6191524737760634, + 0.40436343425321375, + 0.7580275705851004, + 0.05701653428777742, + 0.07110586834065757, + 0.6453588397582007, + 0.9966162959923662, + 0.7894368924910821, + 0.7272415025291709, + 0.2380432713874716, + 0.7627714963554943, + 0.28487742636995594, + 0.9170802733402591, + 0.4444204195274494, + 0.5372505007965466, + 0.28290864093703405, + 0.7982807236158526, + 0.5237447600093503, + 0.7470797251487761, + 0.5577037735732207, + 0.6867628611394454, + 0.7649560915172015, + 0.6682421320361475, + 0.7825755929597615, + 0.9974337219833577, + 0.7353331412328397, + 0.6657199695285304, + 0.697430771327525, + 0.2304286782953665, + 0.6510234164698306, + 0.19054826493688826, + 0.15592544057670454, + 0.9201137965676772, + 0.09116908423654368, + 0.7557580083041496, + 0.5795308220058911, + 0.12695290247632174, + 0.4952549426166929, + 0.3712576724571395, + 0.37338233575983093, + 0.08192562682462345, + 0.39971196876569703, + 0.7919030429487443, + 0.06734173833331702, + 0.9642131061140009, + 0.32781286572822055, + 0.6016180691187267, + 0.8090685559709809, + 0.7763738023541471, + 0.642136271180784, + 0.7457054997666415, + 0.016516224441699867, + 0.7787840467849276, + 0.6132548117789737, + 0.11079171938354215, + 0.8626464558003436, + 0.9238030472592919, + 0.8510792555474312, + 0.7742960489247359, + 0.9403297298385496, + 0.27167111450692505, + 0.00818209815950166, + 0.4423727321579124, + 0.6345547259024037, + 0.5181493646433266, + 0.3866859080877916, + 0.629868050919494, + 0.9739522384651846, + 0.8319533982633284, + 0.3477466676347637, + 0.3383020992232656, + 0.4676897387824628, + 0.3947924938391364, + 0.6531321271651636, + 0.5163793649942291, + 0.5483961205070903, + 0.9258372807099275, + 0.024033085179401614, + 0.2729116625646527, + 0.21660729878285445, + 0.9164102954086099, + 0.44315651854783367, + 0.06885258484274792, + 0.03390395009051328, + 0.1278846862048807, + 0.8074318862062672, + 0.6648750725774587, + 0.03011965948308104, + 0.9655287650938614, + 0.4167376881927827, + 0.8743817055687811, + 0.6510099739682977, + 0.738200001141994, + 0.7352625559274293, + 0.6534316283814973, + 0.834718811852973, + 0.6844722986646388, + 0.9962670097577428, + 0.3045264506253025, + 0.06073493704660615, + 0.7016114226163853, + 0.3602459848801891, + 0.38870605625308163, + 0.46957188509803294, + 0.788456049657768, + 0.19042719802989005, + 0.7213162333057185, + 0.3981406588868234, + 0.6564206332115747, + 0.20819468049475387, + 0.8477836932596925, + 0.09509547838645183, + 0.3325103931498934, + 0.19007604447889503, + 0.36203910540376316, + 0.8185885262141374, + 0.915654612345703, + 0.31047977619783196, + 0.05952501130191601, + 0.4596513558251957, + 0.28099470671200755, + 0.547101947791375, + 0.18488093539725048, + 0.01007876648848316, + 0.20502802392466968, + 0.38895566725539643, + 0.11956790739406642, + 0.9750196788370971, + 0.3195537542321295, + 0.4775424364827874, + 0.5872388885915949, + 0.06371688631748307, + 0.7063680359352544, + 0.29550272530719146, + 0.0020789323725591835, + 0.9539075814199297, + 0.9088009828885675, + 0.31573543162442885, + 0.9019690368286237, + 0.7711039763337023, + 0.19600971504978393, + 0.9151416538581988, + 0.9218579593517987, + 0.06203144365918323, + 0.7705639919914617, + 0.957730133774548, + 0.7886807849944825, + 0.2886123701852217, + 0.3665257490567677, + 0.5628183789478507, + 0.5891715591205987, + 0.8233797845130467, + 0.4809360048939878, + 0.7516036852281149, + 0.5649474437186026, + 0.6679540112646597, + 0.7323110796643038, + 0.11134130023878241, + 0.11379866842504482, + 0.24364953515824506, + 0.1791098728938777, + 0.9768899151738116, + 0.11817692281697534, + 0.3340673734155253, + 0.8461192832483528, + 0.34861246847020433, + 0.11886112198051313, + 0.15254393787596654, + 0.3085611929502874, + 0.6722203964971958, + 0.43429560942721956, + 0.10014426838099222, + 0.5843286448732778, + 0.805507149335438, + 0.09488811649209461, + 0.9899876239296427, + 0.04843081129539095, + 0.04855264414267868, + 0.9976507654359112, + 0.8499862146778021, + 0.7544933808870451, + 0.2661975925923973, + 0.14052983633970928, + 0.8473517558150588, + 0.6566154879746668, + 0.5736733907960259, + 0.13224303783410207, + 0.00284422317906996, + 0.2812911240373971, + 0.6075703479468125, + 0.8552734666502619, + 0.5929569960222573, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6271994044987186, + 0.9264698414266803, + 0.36259376947960476, + 0.3569063921265889, + 0.26536135121543347, + 0.7644677558027794, + 0.23010486455028845, + 0.25505471899614596, + 0.3667980786598808, + 0.7019553951808579, + 0.6437687138614585, + 0.45462695932935426, + 0.09208630834065346, + 0.1674994497775757, + 0.7985811459510032, + 0.004235301770942557, + 0.4813280272416872, + 0.7469415625364333, + 0.5327804681949095, + 0.7939122988767758, + 0.20081913102711146, + 0.16342026476030647, + 0.7410735160226759, + 0.979769354784465, + 0.46985773018681976, + 0.6863635798404532, + 0.8474697511604699, + 0.3287981027308836, + 0.7814064498207857, + 0.7651022871471064, + 0.40748074874761653, + 0.6201674240879183, + 0.7842184667774055, + 0.696575510235226, + 0.2593928030932182, + 0.17402554677973225, + 0.8364261161659661, + 0.41482687096275417, + 0.8641238867913393, + 0.33714991558453766, + 0.8518751815185701, + 0.9357470753188956, + 0.056254828649715205, + 0.6189044426563634, + 0.994783618719053, + 0.7131063882750018, + 0.9502632407373436, + 0.11088424948614228, + 0.5765498555193996, + 0.587274570394774, + 0.7022610940802061, + 0.3564208328852714, + 0.5605623483071223, + 0.8711410580944787, + 0.7542068310615544, + 0.12580690683106277, + 0.19511467633329538, + 0.7739464169701958, + 0.5473052414387926, + 0.0028599333556914575, + 0.7095330866753558, + 0.6053678545479122, + 0.6213530721931816, + 0.9592522642800507, + 0.5495665705069847, + 0.9827985717552564, + 0.11068898807947902, + 0.08898689748408184, + 0.7548778490392412, + 0.9360678606466022, + 0.7954700851158711, + 0.32331047016203274, + 0.706540304326524, + 0.009466677125219869, + 0.1556312453987625, + 0.9211139965639348, + 0.951326141526167, + 0.7195749102963507, + 0.6893248992970754, + 0.24237549807347103, + 0.5705370484057619, + 0.3128486066217375, + 0.789384620312103, + 0.45570885553393825, + 0.6015102886725543, + 0.247968859398907, + 0.9517353479323505, + 0.29826401213617126, + 0.6956611010430033, + 0.5600876670999226, + 0.07592251569758046, + 0.6725623115052313, + 0.10528876606606097, + 0.3759712418524386, + 0.40034313112058606, + 0.12194193098553696, + 0.39131470600778995, + 0.25125539353796833, + 0.813199176397541, + 0.8381580698250548, + 0.5004754874071722, + 0.760506513615787, + 0.46426532888903915, + 0.9020054771131834, + 0.8026081030266163, + 0.7996957890195311, + 0.08112678281565666, + 0.14807847772129135, + 0.8397711336669689, + 0.23535604849535918, + 0.6673877311358172, + 0.25374747576575163, + 0.4220967517637866, + 0.5066938318558056, + 0.7900745500343653, + 0.1455459751211956, + 0.8140580775258335, + 0.213412326850846, + 0.928886818960128, + 0.18699379337075595, + 0.021096952333120034, + 0.5533515389185333, + 0.5127156866627538, + 0.8868906748414581, + 0.2624862020804677, + 0.4929603053577416, + 0.5106915402640104, + 0.07320996584499662, + 0.25987680147606096, + 0.11208451234437578, + 0.9504013942535778, + 0.029689325117214893, + 0.14269330674477565, + 0.2288025284434444, + 0.4914181370527274, + 0.14626715991443162, + 0.28915355169129864, + 0.6754514526812337, + 0.5435592006192091, + 0.2889961282841257, + 0.2124467533663147, + 0.5603908414206202, + 0.28508198430675924, + 0.8642117767779967, + 0.19690193497758146, + 0.6366510877209287, + 0.6757576073478814, + 0.407526432784978, + 0.16803295403221097, + 0.3093169471793096, + 0.7401817315715277, + 0.322614103267186, + 0.28636228761624405, + 0.5020274680301321, + 0.5101273913427341, + 0.8997556485816697, + 0.5159544794242966, + 0.984522184165227, + 0.0293003059492154, + 0.62016196145894, + 0.3636745686855236, + 0.6609331889501101, + 0.030289721923193125, + 0.9813593314280998, + 0.9354610230433348, + 0.41990836330752934, + 0.24345781896307728, + 0.45884111957755847, + 0.5969558549361805, + 0.9089245518914847, + 0.20670107887611633, + 0.14894613475586727, + 0.8989648541115576, + 0.9605502557204222, + 0.5540717585868633, + 0.4737583328127568, + 0.3215160628847986, + 0.2977442893652992, + 0.03952269136874165, + 0.6685305974181874, + 0.18366361857795044, + 0.930499158170201, + 0.8559229897542748, + 0.7349685198119638, + 0.4753492843112488, + 0.36112127032602126, + 0.2365354746108721, + 0.2190266020986179, + 0.17662830150082642, + 0.5126810000248355, + 0.8371293691255902, + 0.040987564538892296, + 0.36212066315728597, + 0.3527667540127555, + 0.8367793554124504, + 0.36764156869564213, + 0.38846961600980434, + 0.046132326698889226, + 0.7512526313549056, + 0.5601195449139258, + 0.05122474970940183, + 0.16822883346753625, + 0.9596920794353617, + 0.7767494969653084, + 0.5457546026465194, + 0.27189693152003713, + 0.878717595023631, + 0.8589217946561216, + 0.28581060443827866, + 0.3761201560049453, + 0.975685212653098, + 0.4980645442650463, + 0.6734493625924612, + 0.6252954247712945, + 0.1272208931602823, + 0.9970259571499339, + 0.9138639235533571, + 0.0965135679589354, + 0.8977589395129851, + 0.02664326742275369, + 0.9592522085272938, + 0.07818213996663659, + 0.8911505143281897, + 0.35590494005804263, + 0.2524219206316275, + 0.10813637741392035, + 0.5483862070260394, + 0.6081590947379143, + 0.035156722930959616, + 0.6081815537879649, + 0.7806027882930798, + 0.5520593628359234, + 0.06238122197854312, + 0.38086561284804366, + 0.1909190141632633, + 0.9926833355210261, + 0.7216834531887901, + 0.9273421929135721, + 0.5706449986578908, + 0.200718101519632, + 0.5867521885451676, + 0.5775017376630199, + 0.053754360443974525, + 0.7245527085112154, + 0.6135807459491986, + 0.8891003037916227, + 0.5292724555441057, + 0.5815901891218167, + 0.8379657066532696, + 0.7021655831082331, + 0.7223081516058559, + 0.12599255658672592, + 0.8755661438776697, + 0.9677641683884044, + 0.8108445804174672, + 0.8151810052734241, + 0.024380356224368516, + 0.870127908452522, + 0.5474178270290164, + 0.5568564850622483, + 0.5178030369849991, + 0.742696754130134, + 0.19643036097098832, + 0.3611473080795088, + 0.0631262724273397, + 0.40629016682259855, + 0.1726823974227566, + 0.9528158071580496, + 0.18300334312104516, + 0.9784399038252511, + 0.2668295505422189, + 0.772147412106295, + 0.5256251663097435, + 0.9680383380692306, + 0.1079616327897569, + 0.5765115714130643, + 0.644462481066819, + 0.5030377077189374, + 0.9743894944757384, + 0.7059235640075332, + 0.5351118887689688, + 0.9544718066910667, + 0.13424586807762695, + 0.5993209583752188, + 0.3928325099757637, + 0.5239453650001472, + 0.24317424944249855, + 0.21449895830650934, + 0.7112465191480444, + 0.30541058974295543, + 0.29700994575964157, + 0.7949246739176529, + 0.34932643592138557, + 0.2601759378482653, + 0.7347076837691057, + 0.9618529234108794, + 0.42070429337161164, + 0.7227703906619335, + 0.08205956135202597, + 0.9647634803065985, + 0.8277581819576262, + 0.24512256520401055, + 0.9211348965335291, + 0.748695853751142, + 0.2123118892973238, + 0.5801491657766561, + 0.23699892219432583, + 0.2085028857251099, + 0.29402948730856315, + 0.9837858944776281, + 0.34745566339751055, + 0.4151243598338422, + 0.7167775533081169, + 0.7966145593092426, + 0.694448199105987, + 0.7614144337726138, + 0.9903885909151705, + 0.5775975651510218, + 0.8704274130188115, + 0.1892356912182035, + 0.8845174827064491, + 0.2678709205600661, + 0.825109994146468, + 0.07276034077451898, + 0.6610151872850507, + 0.5085963961567416, + 0.8315261586654303, + 0.6144591454614012, + 0.7593754667534023, + 0.8612284843099522, + 0.2753640407643436, + 0.370462210862769, + 0.6295371600946423, + 0.7929661179719301, + 0.4146380475031288, + 0.03257524157655578, + 0.9090248533823663, + 0.39444406283236844, + 0.5412696334756325, + 0.501338373359947, + 0.982145175958047, + 0.3761394351925581, + 0.22546253478374623, + 0.4919408448445003, + 0.6006686918123524, + 0.0952593786220981, + 0.33101610509813706, + 0.5195053545351246, + 0.9104774606499484, + 0.11947568274871168, + 0.5285378601866524, + 0.6463423379438457, + 0.9261129417306103, + 0.35966354689091795, + 0.4175742258200632, + 0.6019050340328873, + 0.11667331090191357, + 0.4049651667708708, + 0.9975736301511068, + 0.5456108748099171, + 0.17397294688760923, + 0.14489390075010034, + 0.32705180690965974, + 0.9736432359868827, + 0.8849543522240473, + 0.359774089632495, + 0.716316751235886, + 0.5349125973375516, + 0.8993866462438552, + 0.7824096376890705, + 0.0011796245137059014, + 0.5319086037684029, + 0.26340352924063204, + 0.6672961252734662, + 0.590333629644554, + 0.905176968193664, + 0.7485084759165672, + 0.25610198554541663, + 0.691216985231457, + 0.601899774477644, + 0.819540584848403, + 0.18142656632995713, + 0.8002730458640235, + 0.021310042854654987, + 0.6279422152184789, + 0.5512595717894255, + 0.06886444137408543, + 0.16109537177974032, + 0.43826070345924906, + 0.04187067657234822, + 0.9069448023247579, + 0.6967780641770658, + 0.4290583801583937, + 0.7497984680392851, + 0.15245974144609353, + 0.4075132436283174, + 0.2511958010276626, + 0.449404196322713, + 0.29864527488946346, + 0.6169430754024365, + 0.3729964152126811, + 0.7374995575362278, + 0.710099317310232, + 0.7946002564847682, + 0.2943823407634739, + 0.9684289895706213, + 0.8004890102333629, + 0.3527011486497985, + 0.3986484072479929, + 0.6137178816521027, + 0.36081637436434355, + 0.4007736877599293, + 0.09920283754378034, + 0.6880354334301659, + 0.8986798972269063, + 0.0015032880172971552, + 0.3511098267640179, + 0.8549346101991091, + 0.9082524283124408, + 0.9067300774741744, + 0.08351643147420151, + 0.16144846442234084, + 0.48999489592626033, + 0.08596354949952578, + 0.748430085445022, + 0.3691116329275165, + 0.7312355234132424, + 0.7192038723646083, + 0.02287711607706544, + 0.45028070729135317, + 0.4507140915853943, + 0.13710764777100182, + 0.6525440055951811, + 0.6929224266276129, + 0.28844753281858804, + 0.7806366279639314, + 0.25818605129567584, + 0.4048224938846018, + 0.07310318446907105, + 0.6653606494847659, + 0.9699511775738996, + 0.14645477418550845, + 0.18051200067902895, + 0.8189946011240749, + 0.5931680981398827, + 0.0001339564577385044, + 0.34986642839308224, + 0.8402892529324945, + 0.1700272713088029, + 0.812454514555075, + 0.12778556068659352, + 0.004290147632306729, + 0.7522582602457509, + 0.6814887851217211, + 0.7432631433721439, + 0.12535879515294623, + 0.05338739789362279, + 0.1998489418720122, + 0.10088672097030649, + 0.6746710308795896, + 0.049172909242495955, + 0.569748779117895, + 0.76696934036463, + 0.7756573486476269, + 0.9658579518167977, + 0.145445126824565, + 0.9271635466079461, + 0.5440922365642499, + 0.029721693547344552, + 0.597922345332943, + 0.3049446033995109, + 0.45089746231236205, + 0.4223228602753234, + 0.849541091982193, + 0.8377949603520214, + 0.5204105335695532, + 0.625197860183363, + 0.37906946531813646, + 0.7250428392459488, + 0.6887239468148271, + 0.07987411488916707, + 0.31684164402249493, + 0.907818098736975, + 0.91074170468797, + 0.9257745495908779, + 0.6065417996780311, + 0.7959044902153384, + 0.4408257612405182, + 0.2532218946914593, + 0.8269854681749177, + 0.8722717625070588, + 0.040155452864908514, + 0.5939837161093867, + 0.7139002575508098, + 0.9637975373382056, + 0.8625546835517777, + 0.9232446034747206, + 0.5839195666831178, + 0.6563318467068981, + 0.8987445685891152, + 0.4107474748269918, + 0.2048243212640377, + 0.08986458471739434, + 0.6454315630015993, + 0.18238737115851422, + 0.5660847647213608, + 0.23647181209233414, + 0.23528616394420376, + 0.2253368547532426, + 0.0849225570877471, + 0.7758283820044125, + 0.9349731153279355, + 0.16565592032403087, + 0.734501660443507, + 0.5053944623476256, + 0.6101537318103396, + 0.6526619761340613, + 0.45012993309019456, + 0.3089435114570902, + 0.6334332878893612, + 0.5176086077744201, + 0.5146178202604017, + 0.028105336399675274, + 0.6082886334835492, + 0.8797583778901342, + 0.7826099427413439, + 0.48374117189420707, + 0.9612235203749636, + 0.04895580614551076, + 0.7723295089612472, + 0.8401306748988329, + 0.4790432612213785, + 0.23878150132656706, + 0.3780748358941388, + 0.45270398360611497, + 0.9055972717405635, + 0.05899686570080087, + 0.3944099276425578, + 0.3285027647930605, + 0.4499075842270144, + 0.6082558785681481, + 0.40042855168713165, + 0.2880362662259337, + 0.464908376651873, + 0.21232357140065194, + 0.34012235977773053, + 0.9900840629248212, + 0.24011612257882264, + 0.7787425841783737, + 0.7113229421471596, + 0.0759999733174358, + 0.6785454298758874, + 0.9711262672525652, + 0.9736571444542115, + 0.7820837349162352, + 0.5553167231350766, + 0.33287804522112296, + 0.4153982870283548, + 0.12614724822596002, + 0.3618538737408433, + 0.9037082239728184, + 0.36591940378229215, + 0.5958469577757173, + 0.7347639530264786, + 0.14688850811807308, + 0.48995371218602024, + 0.4433574711886632, + 0.3654366130729987, + 0.13538510942791504, + 0.9736290728260159, + 0.5320074370128091, + 0.9768402373848661, + 0.003340011729282777, + 0.25485049814910954, + 0.9616899014222248, + 0.5531300702888099, + 0.7961455420379222, + 0.5017781627450661, + 0.6888137930576982, + 0.17124829055279, + 0.798701266698071, + 0.012314172722164174, + 0.19687118859820252, + 0.5450156268002251, + 0.25376535758604135, + 0.8196039266327361, + 0.07673644112481748, + 0.038404070927022915, + 0.3026869432546221, + 0.7365910391279963, + 0.8001203658725758, + 0.49899978983174464, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9001409152216447, + 0.5865297769079123, + 0.25924453383855506, + 0.8148236302016606, + 0.5210914867897581, + 0.6664800591976118, + 0.8012012723630623, + 0.6507807730728263, + 0.01847183664685148, + 0.12714437420022462, + 0.11525453037463174, + 0.15457705654247023, + 0.7304539916759515, + 0.5538550137177134, + 0.6884899594599051, + 0.06305399665355571, + 0.10194740570021243, + 0.996297059126408, + 0.5595128219901133, + 0.1281311616830373, + 0.15734836104585115, + 0.15455979948213694, + 0.6843733820866527, + 0.9695538998205576, + 0.6511886618287761, + 0.454276249403717, + 0.4331065163054624, + 0.43000518947591215, + 0.783754833871294, + 0.8983537696675657, + 0.004781603549286251, + 0.8634877656131325, + 0.6474975088736912, + 0.37832932314324796, + 0.857698230887831, + 0.5457870596634349, + 0.8394235886532205, + 0.15349812178484878, + 0.7626846294714452, + 0.9116434249533474, + 0.9707013478619836, + 0.21205999749128934, + 0.5477583970869144, + 0.9992452614602896, + 0.7043524832141461, + 0.07218330203025192, + 0.4681651394763202, + 0.9958496145910709, + 0.21891209807097622, + 0.7433717546734127, + 0.3630343787104311, + 0.5995993663092729, + 0.8240599479452418, + 0.07513190384100732, + 0.2535561324617509, + 0.259274604322687, + 0.4765660481609173, + 0.2578199680747182, + 0.3305419735734779, + 0.005070967981792185, + 0.3830626973957306, + 0.5910679449252563, + 0.48092522218469214, + 0.5299158564080676, + 0.27826615999689375, + 0.01591156332579713, + 0.21304380031505676, + 0.44032115426769214, + 0.8750640139081357, + 0.23840089872753056, + 0.13354192470167037, + 0.2548021628164966, + 0.48944233335635956, + 0.6578077020064123, + 0.3777112139212466, + 0.7495032615284326, + 0.26602024491194587, + 0.5156705142078448, + 0.8394237537024324, + 0.5553441766038674, + 0.2315644238782848, + 0.10336561119676724, + 0.9730381350553119, + 0.7606555946172777, + 0.3384380355432103, + 0.7385416103790785, + 0.4213994102939299, + 0.23194476754070237, + 0.10064886309635879, + 0.21876683797406515, + 0.48122637945466284, + 0.6094713205837278, + 0.8956291727875784, + 0.7615850714624053, + 0.8450595534897567, + 0.3724483030206014, + 0.9147679061819083, + 0.8183006868514918, + 0.3227045959121022, + 0.01411165406485304, + 0.9887008685761358, + 0.6517394121082511, + 0.74762730164112, + 0.6023359473085469, + 0.46297101516119865, + 0.6952813429840727, + 0.46539398553912337, + 0.9588236144252194, + 0.0047686709774116265, + 0.6153562699998499, + 0.6024659570158596, + 0.5957357095829988, + 0.20094037871951687, + 0.8503982592577363, + 0.06399312445029615, + 0.18576472656679532, + 0.59663474685793, + 0.7330349137078572, + 0.07614109805077285, + 0.6893430524803945, + 0.1787552300041606, + 0.5766319811068539, + 0.4351858925138925, + 0.8541960713608484, + 0.6845743356987847, + 0.4187164547736638, + 0.3761111779991463, + 0.2939829973136848, + 0.6902954653367727, + 0.19616941210980066, + 0.5000302685434677, + 0.5645688414899266, + 0.4429705595848673, + 0.8841636807579821, + 0.3496221859162666, + 0.13036177793631887, + 0.519673841594743, + 0.5105353617414455, + 0.427382026594161, + 0.7957096966420499, + 0.1992501781501017, + 0.9445726730451817, + 0.2815387642858218, + 0.07368717030750993, + 0.4734047935958343, + 0.32627877277786266, + 0.802735306284264, + 0.17953761400030488, + 0.4804772443645222, + 0.7437986993864869, + 0.9882059071733065, + 0.8237014357474968, + 0.04166200857800617, + 0.4091437172119238, + 0.1671980582532291, + 0.7659388029109063, + 0.9681310945597621, + 0.9425758016852048, + 0.38651052646664463, + 0.5738062381035053, + 0.6713070612868395, + 0.23787631227068995, + 0.595247587049231, + 0.9871256155661056, + 0.3825050661148899, + 0.6236977207204356, + 0.06922833268086881, + 0.5475916746402403, + 0.3468700887024071, + 0.8140694112554991, + 0.4029758014205066, + 0.34607283321092097, + 0.8920140006638025, + 0.29242431793941304, + 0.14052633498314282, + 0.9087875882434279, + 0.5194279318176563, + 0.7620000860398798, + 0.7397278979498215, + 0.03516125275990889, + 0.7362856375269557, + 0.4564609978477814, + 0.7748833918242423, + 0.364539006117635, + 0.26825983484391713, + 0.15130254844414925, + 0.09318818848840993, + 0.1468868187819261, + 0.3803179201680549, + 0.7447286870072714, + 0.9814045266281899, + 0.2123411569068986, + 0.14344840215568566, + 0.5082813878704179, + 0.0419617551564988, + 0.3835614408814402, + 0.11428781977710878, + 0.0452867595679628, + 0.9284619966644981, + 0.19894460327367158, + 0.9280031635913709, + 0.5200338062131293, + 0.81052624710307, + 0.14304873364579263, + 0.005727322269841517, + 0.17142315267359254, + 0.6951807639506289, + 0.7256239457381716, + 0.16265723058631343, + 0.924803685970466, + 0.7610010380071708, + 0.7024364697783744, + 0.9399399075876578, + 0.6426285641997748, + 0.46552705882830514, + 0.5956141876118459, + 0.5856653282262468, + 0.6550633767510058, + 0.2975978126673411, + 0.870535910030066, + 0.9984446661641264, + 0.4643445196348496, + 0.1589740124489375, + 0.00925076115001855, + 0.7185713229842058, + 0.7006386155756965, + 0.11504504617892097, + 0.4316269756672091, + 0.8645577271992757, + 0.7038539277833612, + 0.7797552909813606, + 0.004565601876026615, + 0.27592458230289285, + 0.7198637264056693, + 0.8289151445592633, + 0.9193893465376856, + 0.41545939498895923, + 0.64013024315279, + 0.5805262276387257, + 0.6019549687057907, + 0.548598546660434, + 0.48756685748376827, + 0.7242109678490023, + 0.20196992270647895, + 0.27089202311006577, + 0.14391300865964207, + 0.7173029869175258, + 0.9575040336586064, + 0.4051269363467702, + 0.8510506712793462, + 0.546885228820698, + 0.3516963133474692, + 0.10119332381459378, + 0.656326693640479, + 0.8152173803214833, + 0.9190827414501545, + 0.12801599854757018, + 0.5966822109848381, + 0.44152496670677854, + 0.20722987924067904, + 0.4280383074500259, + 0.9625433879714335, + 0.5802636137500992, + 0.6500141101336993, + 0.9626245320972233, + 0.7806732313352842, + 0.4591571139835079, + 0.6624687288781167, + 0.9897944310582785, + 0.5849294505595107, + 0.1174341003943632, + 0.1483117351921458, + 0.07252453735723718, + 0.9685072910064021, + 0.06865822627865392, + 0.008106935491648204, + 0.9614725321034653, + 0.24085777706908618, + 0.21488767782879636, + 0.7692543465199125, + 0.4639950294070895, + 0.514451306246599, + 0.5513326533813504, + 0.7879729738722937, + 0.6936962962279827, + 0.5409883209998311, + 0.27798079948879384, + 0.6543419128317919, + 0.6961150269222923, + 0.5583473113468189, + 0.9357815521539812, + 0.6852917915022754, + 0.011744764559683873, + 0.42300361665841013, + 0.43775003852117744, + 0.5682592697896438, + 0.7399576524389204, + 0.5564650905392832, + 0.9218913687490342, + 0.27433934963735596, + 0.9390761218132709, + 0.6520723361229258, + 0.20356613213748154, + 0.057615305671817274, + 0.2679545742682611, + 0.814946369371183, + 0.9324774732562575, + 0.7298753469933401, + 0.6346342692413988, + 0.1760671731053659, + 0.9640939007362479, + 0.3955888208375058, + 0.4146559915334438, + 0.3278317905925373, + 0.18649442045727138, + 0.006814463507152313, + 0.5632164114405475, + 0.48520771759928183, + 0.05496322287847344, + 0.7512425089428771, + 0.7793845483130121, + 0.7538732789929227, + 0.856401401840712, + 0.01869364272728491, + 0.27260275080692653, + 0.08934219264407073, + 0.29540170492511797, + 0.9775926129646434, + 0.3935042844297054, + 0.4345452316753744, + 0.5111567482341669, + 0.8635851819841119, + 0.3137503923916056, + 0.19676978689923852, + 0.643191537180066, + 0.26896171031070615, + 0.9688653860787863, + 0.4461730241783517, + 0.6948083289983689, + 0.4873854940051615, + 0.22745726092172325, + 0.3119254306654259, + 0.9447652033155128, + 0.28371518130064755, + 0.19786604994819934, + 0.89677104028514, + 0.21442717821636081, + 0.6146547774998682, + 0.6932891745832127, + 0.9867231873796841, + 0.9347571758164502, + 0.4191811738465673, + 0.4235371140929114, + 0.8742732682454764, + 0.16456896829187628, + 0.47765523750150707, + 0.8505665358297997, + 0.981731629952774, + 0.2711659248267585, + 0.42721422903484363, + 0.6129679632372846, + 0.14974179960184286, + 0.874154146012628, + 0.44784152963522306, + 0.15023716773938034, + 0.5452314004224904, + 0.547789079393371, + 0.6016739747363528, + 0.9150451910013864, + 0.9140153867152234, + 0.4494420410671016, + 0.0679451586804235, + 0.8596461225762598, + 0.039522828359158435, + 0.6909471239397792, + 0.3148528326259237, + 0.8369291406936449, + 0.00362118422663682, + 0.0990361687096799, + 0.48099216441977055, + 0.12138344156765979, + 0.25491275752343345, + 0.254942686250915, + 0.07812066448341792, + 0.452933474066471, + 0.006626749152742861, + 0.48595346851736576, + 0.7903878201980087, + 0.2441306221792372, + 0.9982730406611128, + 0.7838792766781194, + 0.6243411434087454, + 0.6049181159420812, + 0.12023460660763075, + 0.46733867494658143, + 0.23804095597848396, + 0.8788252153941557, + 0.7628286860975334, + 0.6735571268889189, + 0.6448719194070088, + 0.9066385895738821, + 0.42290278924861513, + 0.0870942910442053, + 0.1871708694411338, + 0.9305040573092062, + 0.9062083865833443, + 0.8951728190855966, + 0.7980826966664449, + 0.8153028072478584, + 0.18295428850134254, + 0.038634595801635085, + 0.9727084118593076, + 0.9893485212910471, + 0.48428703032736375, + 0.7690651523551951, + 0.9359138477250851, + 0.007449232298065933, + 0.9040002690066152, + 0.6377573558862272, + 0.6880619742970875, + 0.668961930750152, + 0.17310502209771605, + 0.7961411197051519, + 0.901917100305027, + 0.745405318697651, + 0.7145975387584649, + 0.2748776970483794, + 0.3531872322991578, + 0.3075833904179307, + 0.9453700208245632, + 0.010116874920188312, + 0.06636870486528612, + 0.9708595550566951, + 0.7905682080952495, + 0.9586640865972693, + 0.6005370348365431, + 0.4767305061194619, + 0.883892968606259, + 0.2471136247363015, + 0.8358558420852564, + 0.21107983263156915, + 0.39437355153445386, + 0.6528285379439994, + 0.2834143949921576, + 0.07116121978438295, + 0.6982041247808188, + 0.5893495870264561, + 0.5687294018914093, + 0.81354031666843, + 0.5610282674887278, + 0.44356475648922544, + 0.22640873029678465, + 0.9781522750589372, + 0.5175932336804455, + 0.22121136255998752, + 0.9574237873040147, + 0.19071532383830148, + 0.29424230083772496, + 0.9261083529237409, + 0.983407244107186, + 0.1622160528562574, + 0.8067889121058818, + 0.9919048137847639, + 0.02088948322027495, + 0.075332154556627, + 0.6967670826620894, + 0.4536130608013994, + 0.6188408694546577, + 0.9604273409511243, + 0.47815815485181856, + 0.7829458489686137, + 0.48634563252579743, + 0.22455979049072372, + 0.6314408696681745, + 0.1649897575332031, + 0.18271970882890398, + 0.7813363229835547, + 0.03488542896200164, + 0.3842268914915319, + 0.004993540892198722, + 0.32697912481835567, + 0.07729703025529, + 0.1014003315694808, + 0.14357735877077438, + 0.19195787139286924, + 0.9012510881659451, + 0.6844996555399977, + 0.83692227956657, + 0.2907661740996005, + 0.09215798301937239, + 0.32506670179785224, + 0.2335972074267122, + 0.30882776750968677, + 0.37565771873524767, + 0.10469474885196783, + 0.0003268391492184941, + 0.3276077257142528, + 0.5564294484289228, + 0.6798824857098574, + 0.47072885962143984, + 0.9725248849400044, + 0.7048913158329146, + 0.17329341074020088, + 0.6497784298780183, + 0.02302855400784809, + 0.40977346500091627, + 0.0033656616046955357, + 0.8505326587792429, + 0.027073706575102885, + 0.6464851412960609, + 0.5333516351133905, + 0.33145884219003663, + 0.6113580359248376, + 0.4602909570938073, + 0.5664242575184198, + 0.47271779898380517, + 0.605301324702423, + 0.3848070657480769, + 0.9335631889232083, + 0.6481079104876946, + 0.5758801513570434, + 0.28038677022510106, + 0.5074527727239252, + 0.6532932019441385, + 0.7702188786437175, + 0.5325397998121318, + 0.521196161101503, + 0.7587722758456197, + 0.5853264099637752, + 0.7305713048870448, + 0.5427167619605937, + 0.6839227253018579, + 0.4304997879237987, + 0.8872528400552816, + 0.5738622605190704, + 0.9464556721808023, + 0.4366875371600666, + 0.7015320430192149, + 0.8928303952679375, + 0.7637018341856634, + 0.41831753821220163, + 0.0451842472192171, + 0.47078269324762245, + 0.35672164906963466, + 0.31106785774321555, + 0.0675415631504287, + 0.9575282351682902, + 0.8171361730176914, + 0.29569941905984587, + 0.9714979482732561, + 0.022121573521780014, + 0.05194316512115826, + 0.20209246322013696, + 0.16537133710213492, + 0.28892218100986955, + 0.4073987002455086, + 0.22583624442185124, + 0.4276875414048068, + 0.06225620312733837, + 0.10884143194195617, + 0.6680257397608798, + 0.7917677710764943, + 0.4180598864261298, + 0.16055839250136206, + 0.6482638093997666, + 0.7550817845977703, + 0.10231297925313387, + 0.45658498806957304, + 0.8698764611728227, + 0.19496280224412987, + 0.06431308014841242, + 0.46262421018595534, + 0.917116733526388, + 0.6074322969727793, + 0.2749836649916848, + 0.4116406163541566, + 0.7935260270960133, + 0.016760347244534568, + 0.22273010730669784, + 0.2027178455204759, + 0.6959365333165284, + 0.009471587956916161, + 0.771326960302206, + 0.05542478733911549, + 0.5303798598438999, + 0.9243478023239993, + 0.9157746455423513, + 0.9089597246423947, + 0.6436484690375646, + 0.8405802319103391, + 0.9183664358639222, + 0.3673185177987697, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6433914392340844, + 0.6550667959796783, + 0.35252017540885294, + 0.14651025251043837, + 0.1318827621416322, + 0.09152723949279828, + 0.3474262545596789, + 0.4862410339098905, + 0.45401082863472053, + 0.17396993513666825, + 0.5088777106591748, + 0.9572339540509394, + 0.9399808405489267, + 0.5833197346884914, + 0.9124637727960749, + 0.8156926920482467, + 0.30911684722630806, + 0.5289569682951596, + 0.10831430505089235, + 0.6318991209235206, + 0.9279919033196171, + 0.5330292052871144, + 0.25366556394137707, + 0.2660516092693641, + 0.5356748729714257, + 0.0171953887490085, + 0.02117671429493495, + 0.9154277554227607, + 0.3473046036825943, + 0.7093801271897968, + 0.22814820706964323, + 0.4162558246910435, + 0.9181068856174649, + 0.924934395004348, + 0.7501721622884192, + 0.009746449705100857, + 0.7800240186513618, + 0.05160876225891253, + 0.6248224457369207, + 0.6376445054012511, + 0.6150499103914511, + 0.4828663703220699, + 0.8440888299533157, + 0.8801793594318112, + 0.7222913721031597, + 0.3882390324665115, + 0.502743076476809, + 0.8753508332079064, + 0.03213652586063531, + 0.30171861955480994, + 0.7564403400075214, + 0.8645557850702533, + 0.8562204252001854, + 0.8664271685603836, + 0.2899826696811185, + 0.7758553067161579, + 0.40290983069009967, + 0.7951695298519721, + 0.6329097221142049, + 0.7494229815234873, + 0.9733924403534888, + 0.7412173002569975, + 0.13664668096168653, + 0.9620734470397198, + 0.400032358315232, + 0.32823470490794115, + 0.008550701575643194, + 0.3364376724973619, + 0.0830295155089209, + 0.477717208357743, + 0.4036066726063734, + 0.582776239092556, + 0.578728908737299, + 0.7085848276965371, + 0.710667307687193, + 0.7219420919268968, + 0.44224492508612845, + 0.7379121647334217, + 0.024735761139530354, + 0.8091634338643698, + 0.47185004668127173, + 0.1929840130822802, + 0.40232408439438583, + 0.774601228625806, + 0.8372199679898146, + 0.4810893945908544, + 0.4434739696926412, + 0.15899647174144238, + 0.3445008973870618, + 0.32729312211881234, + 0.3855457512311107, + 0.4066017207846868, + 0.20349673349829012, + 0.3588002237590776, + 0.4046449179723879, + 0.9927600319614182, + 0.9785797706775553, + 0.1719107077878761, + 0.05436789890625704, + 0.012252317615219721, + 0.5218886566614853, + 0.871828373326038, + 0.27945906904933726, + 0.8822423084388226, + 0.30113632953712954, + 0.7623491632691546, + 0.05299196333039602, + 0.6927360832902495, + 0.169516639560239, + 0.8799304214533643, + 0.860071919420882, + 0.6491800288758339, + 0.30069138960926156, + 0.25238316568772057, + 0.9774163423334867, + 0.25098369099747053, + 0.16817433532530202, + 0.5928064631074025, + 0.43992063804300274, + 0.24580837966662072, + 0.4315900007688692, + 0.2781528168084697, + 0.8317453227483677, + 0.1541487121198002, + 0.5351445193071775, + 0.16805310823628328, + 0.23222514200325173, + 0.5693991316492101, + 0.019740389923905477, + 0.11529082336301166, + 0.5583325097350079, + 0.9994327185328193, + 0.03158139006288707, + 0.03427162352223179, + 0.259919792776066, + 0.3010250397730413, + 0.663725098001257, + 0.3606841864519844, + 0.8337340277312517, + 0.6004482465661388, + 0.08254798062262159, + 0.09380464219909246, + 0.6726058515992647, + 0.278959676723728, + 0.8929318853789425, + 0.3817625796466221, + 0.734123357922829, + 0.4033149911360091, + 0.7154437097072017, + 0.13861357120150752, + 0.0023343769506486423, + 0.13711512819967986, + 0.2957711730928547, + 0.3253552896489281, + 0.4499757181719678, + 0.7357228983798126, + 0.5365813555938099, + 0.0572127779279733, + 0.7003932155869389, + 0.7793242306671802, + 0.4239186860099896, + 0.22353403393361526, + 0.14268540196846513, + 0.5612854911884845, + 0.7033592775001094, + 0.3351952827021891, + 0.1103368324210493, + 0.44812538223049314, + 0.37825878406407887, + 0.8915142705304654, + 0.7056909237155317, + 0.8248734577725525, + 0.09598686033164405, + 0.23882056218299386, + 0.06047649342653272, + 0.2652383530543201, + 0.19334672174794099, + 0.04231863715020834, + 0.11220635744045326, + 0.7214468264472101, + 0.5622081704839755, + 0.6634910867705797, + 0.09836050813793273, + 0.3875575452673514, + 0.8093268893471381, + 0.3159833958779399, + 0.55067197453573, + 0.7038601487512629, + 0.0613040332999264, + 0.753513873349543, + 0.8627899449341406, + 0.1802476863007061, + 0.1978514852323624, + 0.9545558551298996, + 0.3121787209947099, + 0.769399110742128, + 0.6346338307826896, + 0.8833285321845498, + 0.8553090945117494, + 0.883339911974656, + 0.7305548788281594, + 0.9190448522966589, + 0.8693770767639527, + 0.8288294895188997, + 0.3887930423762146, + 0.8916778022598548, + 0.7364672905106205, + 0.861102983571584, + 0.04790704979554017, + 0.6322434199877983, + 0.8942679752954235, + 0.9727894445055597, + 0.8545833443376987, + 0.26636026749680763, + 0.7770974489369057, + 0.49674859879415734, + 0.33195680582551257, + 0.9277739924308167, + 0.13449689316900093, + 0.4083446003487765, + 0.0584370707338272, + 0.8897188381580371, + 0.7547797430485684, + 0.01446253897646943, + 0.20385235971626592, + 0.8918402070558002, + 0.9562853568683328, + 0.8868802152991563, + 0.8113760628121882, + 0.9224309984907021, + 0.04722639726342004, + 0.059726423138840135, + 0.5479354046874065, + 0.22280443617301204, + 0.5829377027889625, + 0.8504005276155674, + 0.3195723972838831, + 0.9846397041389282, + 0.8495502703659478, + 0.26697551023026644, + 0.8938760378348415, + 0.42924566164108335, + 0.8811760165188481, + 0.561473256620475, + 0.4989187770879301, + 0.9293088540312703, + 0.09858858986546393, + 0.6910656946088193, + 0.7885874790016763, + 0.7985096920305207, + 0.678079864969368, + 0.39633746081961074, + 0.2072971613344654, + 0.5901326115297979, + 0.8426425122639908, + 0.3856582602659743, + 0.1293641519054759, + 0.3292684365493326, + 0.035707234893787576, + 0.35554802446569644, + 0.769594023018115, + 0.5139857863805445, + 0.8626316765120458, + 0.4344249885448844, + 0.012343562523475349, + 0.31208784521225474, + 0.8330568524945078, + 0.5939773543813534, + 0.5812904983178214, + 0.4486951999414156, + 0.5354134508654219, + 0.6705077244265885, + 0.15200410242542117, + 0.8280423790819811, + 0.8146211139127217, + 0.06920421228769436, + 0.18856450195470287, + 0.5283057698851015, + 0.6167212152293503, + 0.659349537938685, + 0.3134218255519302, + 0.5958855850198255, + 0.795355593755201, + 0.04351814150754807, + 0.5866072454867128, + 0.26343832558409075, + 0.6362832492521777, + 0.4958468018799851, + 0.5781258293215672, + 0.04659791676493952, + 0.17839282318085214, + 0.6852369818961417, + 0.6054274864314704, + 0.32878826341723344, + 0.9375112350615182, + 0.8396196888695882, + 0.6394379580497745, + 0.0796032255162884, + 0.588812344881673, + 0.41401959397296284, + 0.5556985856249116, + 0.18461015355342003, + 0.14895278335119166, + 0.9957740401218327, + 0.11906305856131416, + 0.8976206070137216, + 0.8124436215340494, + 0.8537996311175677, + 0.2067447079667145, + 0.0909812300785392, + 0.2238610543093844, + 0.07047721629284709, + 0.7470910253148448, + 0.6025971528796166, + 0.021030409166508224, + 0.18461931399314524, + 0.3641448130766566, + 0.5669892130656131, + 0.2820866870118134, + 0.8335676482744249, + 0.8546348010088943, + 0.8006972484793788, + 0.39519584508777406, + 0.6023625015869961, + 0.8375514354388983, + 0.6271271283375135, + 0.9946118467644914, + 0.048276625663025374, + 0.6244360562062728, + 0.02808578252898497, + 0.5407749359650981, + 0.7781647617051484, + 0.5811313582631528, + 0.21411974298465775, + 0.7130392679493621, + 0.39310637299648354, + 0.9531510658051373, + 0.08930342925650314, + 0.9750260903279553, + 0.8725354279992786, + 0.8827598459818592, + 0.7479312187131603, + 0.802523221667957, + 0.8206176821124108, + 0.02712811450046093, + 0.19654287383785907, + 0.7690014384711187, + 0.653988722895146, + 0.8211059790657037, + 0.2829610769020825, + 0.6083513342712102, + 0.9326237772248412, + 0.3630271132032489, + 0.6447061852981948, + 0.9905774657159216, + 0.8429459656420545, + 0.9537404308197139, + 0.7144223871918833, + 0.012128031490853175, + 0.5954922430965913, + 0.7629357610192029, + 0.2090960151666983, + 0.8564239824947635, + 0.3202475285082964, + 0.07966272764529769, + 0.14190231445855683, + 0.26357746512963576, + 0.8399809744246433, + 0.5166113367963899, + 0.7502377990481346, + 0.31526198663695637, + 0.19430450443341651, + 0.03096668203774733, + 0.04353890026348073, + 0.26153720130032976, + 0.40797695847745563, + 0.4822308233919498, + 0.38838712661483255, + 0.5445750385171892, + 0.9092292850900755, + 0.23512773599142145, + 0.48956487345180444, + 0.5681523943997663, + 0.5840009839976608, + 0.1548444787183586, + 0.3814606552422225, + 0.07658829765350039, + 0.9179029339706499, + 0.8428904602092945, + 0.5449751961879864, + 0.501694517133544, + 0.40158697805251353, + 0.9233781540238465, + 0.1669275054638537, + 0.5759673452028913, + 0.17682380650592577, + 0.5034892572147942, + 0.1284048790556197, + 0.6230604090828477, + 0.8500698921130023, + 0.7204026770607993, + 0.13424177315622177, + 0.7567112331923415, + 0.7286767416433939, + 0.3598248690063782, + 0.23522826265925234, + 0.8560531605505547, + 0.5831059281510506, + 0.23946260660347973, + 0.14718936588589016, + 0.6199389106897404, + 0.16838743255887156, + 0.5295718237230015, + 0.5846797157143064, + 0.868211673042633, + 0.9927261947936933, + 0.17191101025077882, + 0.1697347789346958, + 0.6683029282409125, + 0.9831938421999552, + 0.4451456859197379, + 0.7266432465779755, + 0.44807905220602384, + 0.7528609041430132, + 0.8275864135606552, + 0.5101244552930857, + 0.27122063745708336, + 0.03574147149101781, + 0.8499077698724496, + 0.8807748825925836, + 0.7886325920294049, + 0.8012737103550386, + 0.3719664997535329, + 0.9670054673080518, + 0.7432068048717004, + 0.20360965733129965, + 0.6189165170600164, + 0.06457774532971128, + 0.792982579227081, + 0.31548782008661846, + 0.628167105720581, + 0.6793466167682524, + 0.0895209062591108, + 0.6947548366683828, + 0.6825116312510786, + 0.1157549769762759, + 0.5222363614986275, + 0.03885659544932951, + 0.16340824954415878, + 0.08114169273771643, + 0.6924003214194676, + 0.8602903226402437, + 0.493965658081453, + 0.3352309849008611, + 0.46757265635379996, + 0.07414355427874642, + 0.13899943685549387, + 0.14422148404619684, + 0.8371351659817691, + 0.49864815408400187, + 0.4902896429397935, + 0.2994909011386164, + 0.5674345631675577, + 0.051599524714266765, + 0.8458079254507886, + 0.20376818749236536, + 0.27737585101899687, + 0.6856840631479204, + 0.5119092163008693, + 0.9387528311714048, + 0.29271557312564467, + 0.8509307252596261, + 0.2308098835082638, + 0.18452835134222978, + 0.1846637695507004, + 0.5020829010793352, + 0.9273750665079447, + 0.9452127092615741, + 0.9281465196476314, + 0.9949287309299863, + 0.6060281282344564, + 0.021613747077693368, + 0.4981891667545588, + 0.8595117812472408, + 0.26797423259163267, + 0.20041139333024605, + 0.9648337969983836, + 0.7414231355356855, + 0.45875196081660574, + 0.08571882623271232, + 0.3113913346904329, + 0.07162315074120229, + 0.80119063473808, + 0.07630922466197532, + 0.04622563404650104, + 0.5766685406757934, + 0.025276347946489475, + 0.5559753686724215, + 0.810107763246411, + 0.8976140916619194, + 0.9099088899621858, + 0.9254333505897326, + 0.8659284862740101, + 0.13605600718391264, + 0.4585919553715657, + 0.2675986122499858, + 0.5816013229059506, + 0.06430051522916791, + 0.016468331482508036, + 0.46461879531982275, + 0.6474912610414628, + 0.2279905381598354, + 0.8559660620247886, + 0.901719577933688, + 0.3573211113918865, + 0.463114727921643, + 0.25795093077506215, + 0.17742943571758174, + 0.5341624433983146, + 0.8134479602019148, + 0.2798036370823188, + 0.15302358389850723, + 0.7015671129360214, + 0.22196306938755506, + 0.33144124755948756, + 0.9482134549763758, + 0.5150647663319219, + 0.9758773717822556, + 0.3356831627161171, + 0.8927467860136303, + 0.6098478623660899, + 0.5024041499549409, + 0.742962892612677, + 0.34240764189813633, + 0.48255831405792426, + 0.25578448367957807, + 0.7097760726965076, + 0.46644259092740326, + 0.13916666260320254, + 0.4408456081272869, + 0.910102549296364, + 0.32908522462614864, + 0.7534795450583789, + 0.8656602443354487, + 0.6133014838464537, + 0.34101710495674475, + 0.7648799012279782, + 0.8902007858767228, + 0.8935245733024005, + 0.7978402677683297, + 0.4638154062447596, + 0.9920023369578655, + 0.5753073865142466, + 0.6595044621120822, + 0.7783462524811412, + 0.14162103978361207, + 0.42606061654683003, + 0.956820524525592, + 0.1111316210642761, + 0.6708104219834241, + 0.6073046163939841, + 0.7128700882110096, + 0.017776703949417105, + 0.07172018629417398, + 0.924179019144257, + 0.7096184306730414, + 0.4436211973899019, + 0.5375324673870242, + 0.8219209587605331, + 0.827853591371546, + 0.5318478831934534, + 0.35837606313310655, + 0.22809543099015217, + 0.5763119663993084, + 0.07687814467551612, + 0.5861698922650569, + 0.5009190144526422, + 0.22983732809212554, + 0.46834953269732604, + 0.7651798724852019, + 0.09641796710044437, + 0.14663784632374333, + 0.8989298096211726, + 0.4137577738364733, + 0.9607601703032548, + 0.6622852506097389, + 0.22136038766575694, + 0.7476810544799087, + 0.9635762228637361, + 0.5844765756448353, + 0.11336523688838418, + 0.046783497096835824, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10406700769064525, + 0.5888129420262596, + 0.5309221355543069, + 0.004952319375278513, + 0.9661794953197592, + 0.20428426118359488, + 0.24309042232942835, + 0.6278417354346337, + 0.3013071693491415, + 0.03348953173020319, + 0.07560931807279525, + 0.8332301944444822, + 0.9276909634857712, + 0.6926715865432231, + 0.5602121372899481, + 0.03454352265127836, + 0.8462638251914404, + 0.7289914599662678, + 0.6643425015917764, + 0.697819238351, + 0.3078318765135186, + 0.15053669568104944, + 0.8184493205572059, + 0.4518123963691467, + 0.0031109001593860874, + 0.2881222840576103, + 0.3978287780669312, + 0.3573124066140412, + 0.45444970553182373, + 0.367087024099215, + 0.3999771465739842, + 0.26789393968780595, + 0.9465586483627629, + 0.8078161053529695, + 0.813980330473732, + 0.6040989949225825, + 0.6965216391465563, + 0.9153801935758921, + 0.6432716251247464, + 0.4805965127216455, + 0.9899861715766959, + 0.9603634971811542, + 0.2639315049303803, + 0.6064808139059639, + 0.09423448438874371, + 0.8586235386400609, + 0.1885721968432893, + 0.14195638196987703, + 0.96934027201795, + 0.731220848725321, + 0.8112472481796852, + 0.9247754790061143, + 0.13149979362482211, + 0.5334820731544022, + 0.6619315315715587, + 0.9409356740169719, + 0.7877526647569125, + 0.057098343065618895, + 0.20756942710750204, + 0.20173899516231752, + 0.12726293739895855, + 0.5368463241107692, + 0.9332231179113105, + 0.03866993296032162, + 0.5924254897959692, + 0.0779056577088989, + 0.25382552216958476, + 0.4384069180466962, + 0.3392559277958307, + 0.4245028182503814, + 0.06490556159285155, + 0.40212915296179563, + 0.23662457127647374, + 0.3545785551821127, + 0.29150677713279993, + 0.9655106101896986, + 0.5787672470313454, + 0.6185705931616747, + 0.8865093123778779, + 0.10902301152380622, + 0.7702062881160354, + 0.27350356357418815, + 0.5831316710851149, + 0.5427467349016213, + 0.784435735068533, + 0.4154629814539512, + 0.7535583729153315, + 0.12978357993597545, + 0.8425734197751842, + 0.5894793485408748, + 0.9793338044263413, + 0.45884461472099725, + 0.9445531111032253, + 0.2232301590771144, + 0.13011810353508024, + 0.2766332087457337, + 0.48860432385099484, + 0.8907528336068216, + 0.12179038600806547, + 0.46897400918318044, + 0.23894283106963043, + 0.18575252270884057, + 0.2062812534890599, + 0.08253037402894248, + 0.5744481648284241, + 0.9667479058542807, + 0.3310241654523084, + 0.8055656752518913, + 0.025971266365874546, + 0.7868922706469863, + 0.6024072287820333, + 0.9934575064042653, + 0.1816124283177819, + 0.9389082148050869, + 0.020019685511636465, + 0.8939129390903313, + 0.8808350209113772, + 0.6348000014542999, + 0.8614758746615542, + 0.9640011403006244, + 0.008783581447682987, + 0.9288190943293071, + 0.4446885718480822, + 0.439776809837106, + 0.4363519321144812, + 0.9103789325896244, + 0.5190534738497645, + 0.6140094816193452, + 0.0359408498089947, + 0.8247921933566784, + 0.8999715657284064, + 0.7616392658568134, + 0.862768640500308, + 0.23107854839233022, + 0.08909349239383202, + 0.49458619902185674, + 0.11452111618409067, + 0.16206856922740787, + 0.12857309417449803, + 0.645697906745114, + 0.4207049632070897, + 0.4984885576072223, + 0.17480397715616847, + 0.35618548221763335, + 0.48580662317154866, + 0.2765279008133119, + 0.33563160802544967, + 0.7784796018200869, + 0.6219379622279917, + 0.6654929365238098, + 0.4540192161362343, + 0.33992845007081796, + 0.4212345455217853, + 0.276044932762166, + 0.7173753181254552, + 0.13016995246210672, + 0.6165320425741847, + 0.08372422708454275, + 0.941759385202786, + 0.27514289050092255, + 0.8809431675645322, + 0.7023477947564686, + 0.8234458079618402, + 0.8612749329294441, + 0.602504311567513, + 0.6222899392720013, + 0.3647916138732764, + 0.14447081742686496, + 0.7676925097477939, + 0.4986916333216087, + 0.3298520806253834, + 0.9552536701328393, + 0.49353454736416136, + 0.402443583906819, + 0.5489854213472108, + 0.25075175660923765, + 0.4692359578344284, + 0.1747959107332444, + 0.7183973210709489, + 0.8710347273056426, + 0.30167003628114264, + 0.4563097949816506, + 0.9789140328583384, + 0.38213564508486353, + 0.3617103895265228, + 0.3245621121742297, + 0.6139989296367647, + 0.0020986655433156143, + 0.4062327662230819, + 0.274952160983969, + 0.9139886721176192, + 0.4878442763468034, + 0.8018327481906669, + 0.3492931800958463, + 0.4164095227466944, + 0.9446505009893393, + 0.43836899170920274, + 0.5567360567278871, + 0.3970203638415486, + 0.9504705519586321, + 0.4922856506076626, + 0.8088420808436142, + 0.9619144887305694, + 0.7777930820721777, + 0.33298158894732954, + 0.15309944701863953, + 0.26617107014592056, + 0.7912348133811137, + 0.7340221787959715, + 0.3037610031627833, + 0.7129053560923528, + 0.8764347534123248, + 0.1674059683880449, + 0.7124177034785016, + 0.005785789929338647, + 0.8221104632533346, + 0.08105297984834636, + 0.4218572159516276, + 0.5370213639233761, + 0.8613249076952216, + 0.3889095341657427, + 0.6862441092537032, + 0.5347038131477231, + 0.807892405254043, + 0.14771967906797856, + 0.32172287586571136, + 0.6559043666999361, + 0.848884328881342, + 0.807728386901432, + 0.9911706160219387, + 0.07992173661068314, + 0.9694280244294227, + 0.02889768818970684, + 0.7127219895499965, + 0.5854290994795582, + 0.2721210357328915, + 0.33132431719139965, + 0.16925419183163382, + 0.9009893108772054, + 0.5565960048843075, + 0.8241050495606304, + 0.7022990870281988, + 0.9491090086077753, + 0.7034513234813158, + 0.29674259955470694, + 0.5370774466098068, + 0.03515555478428156, + 0.15740678233637861, + 0.8525622204293992, + 0.6659502505164258, + 0.604618648423304, + 0.7041993592685627, + 0.5566158744713994, + 0.047576709711365406, + 0.8941846866358626, + 0.9827938783891512, + 0.6625011421786597, + 0.3330661038455013, + 0.9089806530976261, + 0.5515119825278225, + 0.23739180193147158, + 0.20521929415520634, + 0.4705239970701276, + 0.915127818763805, + 0.7848445325193564, + 0.5627020082946157, + 0.7562221220956725, + 0.6435105549771692, + 0.8008654263958069, + 0.5986581218020003, + 0.36576071478173067, + 0.2562398730395865, + 0.5930870212329282, + 0.2211604234743123, + 0.9569626246585295, + 0.23056200224511736, + 0.9289903667797225, + 0.43605903789099487, + 0.17815895542809823, + 0.8104777122229284, + 0.28794655637789435, + 0.7730501815828105, + 0.5304215864652778, + 0.859361547710027, + 0.13983456519802473, + 0.16197860637739092, + 0.18650540334871102, + 0.9941421106245526, + 0.030543557978518043, + 0.35200700997732937, + 0.5697285748534576, + 0.5350753365446518, + 0.18579591525695616, + 0.8284004108458354, + 0.2321432832357444, + 0.0799880572850793, + 0.6928827958842657, + 0.44396984422383845, + 0.8927740237003481, + 0.1320144216140361, + 0.16045118115995427, + 0.420793808578927, + 0.8927726806463218, + 0.5944620926560128, + 0.556962084054479, + 0.05129350460083426, + 0.8996525331608716, + 0.230160033291952, + 0.47583578687062644, + 0.13162050801298497, + 0.45516098988920173, + 0.10374045896560435, + 0.2534718720856247, + 0.3968872002939713, + 0.6063409719952474, + 0.8549538743737869, + 0.5640376621397918, + 0.6545201832704236, + 0.9615084053713764, + 0.7530193432282711, + 0.19910715547620972, + 0.7132181800300946, + 0.11715390167581274, + 0.026426762268978332, + 0.20085757234860424, + 0.5788714350564417, + 0.3202541028214144, + 0.1460224467061415, + 0.9515190150924367, + 0.0569353211582152, + 0.3515314920161333, + 0.35676569235164524, + 0.505605757572102, + 0.945476563456652, + 0.8476556833084478, + 0.061229464662047595, + 0.2000383243200442, + 0.04324023024297452, + 0.6154567994157862, + 0.008404015663836195, + 0.4194872746811864, + 0.8129038460181042, + 0.5145741172686937, + 0.07774599796362314, + 0.9695286154991549, + 0.7559596393922553, + 0.03783596820202395, + 0.6362040862554912, + 0.44519279183167915, + 0.9704803492013365, + 0.08543011106131027, + 0.8638676274584424, + 0.31504347814602884, + 0.6806309139961257, + 0.8454801632200767, + 0.7207016952763173, + 0.19918346205071835, + 0.5085192114177742, + 0.8628636554695384, + 0.22702975872293218, + 0.11839267824439936, + 0.7780294935086977, + 0.3181179934392875, + 0.5592272906622197, + 0.05294524800781131, + 0.17928597963762782, + 0.45416482348464005, + 0.2068832666783309, + 0.794076674190759, + 0.2080826921184018, + 0.3683360333359762, + 0.39939768922278784, + 0.3095931761518177, + 0.1092986198414877, + 0.9961909608289251, + 0.6660756565564173, + 0.24097806011824785, + 0.8595103986396891, + 0.875127329739873, + 0.5020356367656407, + 0.4486892179131988, + 0.08912107945254233, + 0.3121044213611983, + 0.6488582756150842, + 0.19350060967828553, + 0.808333767071158, + 0.6786665191857298, + 0.6880179017224024, + 0.6897088506075467, + 0.4012465546597901, + 0.07345426384317011, + 0.567046588734965, + 0.9343530203413852, + 0.7198408269058651, + 0.4814933614651903, + 0.31843559452506287, + 0.008544760721867162, + 0.6620679875836152, + 0.2366338664190193, + 0.6799320154096153, + 0.38645633201505325, + 0.4338850730466851, + 0.14440394738526774, + 0.0890399517207251, + 0.865819520516121, + 0.9437174255221553, + 0.8504193180920397, + 0.8294585761928512, + 0.12800711707912016, + 0.16928816244678513, + 0.33866454242652777, + 0.7254951183639806, + 0.14671945003793585, + 0.16618786674908292, + 0.5918788857097884, + 0.9184163766576937, + 0.3221659858895043, + 0.44298787436309517, + 0.5497033719539377, + 0.7392202753264133, + 0.011000978283891794, + 0.8725746577827116, + 0.9876208353612252, + 0.13942112970434362, + 0.34581419624547305, + 0.21949694167652645, + 0.604122968851207, + 0.2061007124517743, + 0.1385071273901718, + 0.8298728324755968, + 0.6759092662972309, + 0.905634666350063, + 0.898423599030536, + 0.3434738983803315, + 0.870267945384438, + 0.5875569789292655, + 0.7252181742077203, + 0.12412193354249013, + 0.7214819825256202, + 0.9426069421287532, + 0.51567110124762, + 0.6386304774589313, + 0.7716196409485585, + 0.8558180066852747, + 0.8820703201230866, + 0.5432530971490609, + 0.663775478005426, + 0.6782255587798305, + 0.40323492894481483, + 0.3518930356651795, + 0.5931727223556779, + 0.9898386264051025, + 0.7966536624574603, + 0.7428677830283745, + 0.2138363691349463, + 0.08932984065363281, + 0.16514324684232518, + 0.7333037631806374, + 0.33191978612564443, + 0.4646981921749078, + 0.36391024347264567, + 0.48921922440986787, + 0.48873929834171614, + 0.9664901957757331, + 0.007368060080645722, + 0.7691293902017794, + 0.7055049235505297, + 0.8951889418641149, + 0.7074313623854898, + 0.5895482317715295, + 0.373487073585656, + 0.5925508056840836, + 0.554857705945244, + 0.15133118597324935, + 0.4620868083094941, + 0.7396696193495315, + 0.6420980277396114, + 0.577537497174393, + 0.941555330011809, + 0.29662734413677916, + 0.8915741267568583, + 0.23626495887230559, + 0.6485747214226244, + 0.9449725835841281, + 0.5533296139613121, + 0.9575121386036994, + 0.009960802635459176, + 0.3193247267560779, + 0.6625804085861026, + 0.3156077430137063, + 0.9093079524937882, + 0.5091554009703184, + 0.5659226876983238, + 0.36947705214830495, + 0.5753114757729788, + 0.9337930231018294, + 0.6678093876719788, + 0.693490335470632, + 0.19946017744264954, + 0.8179136289285175, + 0.9180265225007459, + 0.689089443312313, + 0.6153050099838057, + 0.6490578286423443, + 0.20144146814319286, + 0.5631800635784436, + 0.36701970427583996, + 0.6002227281987669, + 0.7162567662580243, + 0.725948397033208, + 0.331760712038768, + 0.09077987241513097, + 0.9138992273970941, + 0.701400171576896, + 0.5981766645443906, + 0.38252018001231747, + 0.550178943853917, + 0.28711809648492037, + 0.5365854451854938, + 0.8463616333227205, + 0.40017328106577943, + 0.5068089873783856, + 0.9026674864626972, + 0.09519957018152558, + 0.6838473126915998, + 0.7231698372680777, + 0.2295847263058195, + 0.12583285304938707, + 0.3378509869446701, + 0.3377276388792566, + 0.9269997080150415, + 0.8318595488436714, + 0.9990016616215829, + 0.01018111257064902, + 0.41020656510564624, + 0.09492924494978128, + 0.3960640490955174, + 0.8117347678607901, + 0.04435940978161934, + 0.3205847200219718, + 0.6238037512072698, + 0.3399981999708901, + 0.24410352857670758, + 0.6815260099217533, + 0.4629829242910801, + 0.3185008302555752, + 0.9070898275381692, + 0.502844154363563, + 0.7382651793776293, + 0.08507104920222786, + 0.6967584577394271, + 0.1626157031905784, + 0.2731946312315383, + 0.9008403185949457, + 0.7329574528681749, + 0.09297428175477396, + 0.0988697016095238, + 0.33067537767204735, + 0.5997345439993701, + 0.86391015018573, + 0.624643603018433, + 0.2712462923681047, + 0.6851989638705765, + 0.7787853590627548, + 0.736600285799896, + 0.7410242950461109, + 0.4878197424582644, + 0.2839228931507771, + 0.9492040626586575, + 0.7477086831460902, + 0.32555547869575097, + 0.23314083931931007, + 0.20962234549652414, + 0.5703518327793337, + 0.6400953176490832, + 0.8983366995534606, + 0.8021053799221894, + 0.7056693614564971, + 0.9601364320359428, + 0.7646570697173272, + 0.9135113823517769, + 0.7524574409843053, + 0.17234469344542547, + 0.1330020884590426, + 0.4528633040623917, + 0.40192801927627464, + 0.8259122020932622, + 0.9238806426533371, + 0.5373055715245347, + 0.11242963321975741, + 0.8650129592033415, + 0.4205912874083033, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9722547340793554, + 0.42311798749778873, + 0.8259993625275874, + 0.14431184294517263, + 0.7372411999886771, + 0.45457639494703495, + 0.8770054067746237, + 0.6430646798804541, + 0.03826965244178693, + 0.7763284429361277, + 0.4804645674817959, + 0.5180034759098409, + 0.3788159574747495, + 0.5734171907621304, + 0.1826212314925999, + 0.162202255176494, + 0.2740165716458879, + 0.8627263150228915, + 0.6702440817105968, + 0.20039769284162146, + 0.9156578210903792, + 0.0026964571220174927, + 0.13402745339052125, + 0.8410724469405078, + 0.5129095474419372, + 0.2620231768659732, + 0.35291484742075163, + 0.7016550104498763, + 0.13303487770402356, + 0.9834248626333502, + 0.723579466970588, + 0.7884278688266475, + 0.8604926504358261, + 0.017435962284417794, + 0.4523711551825791, + 0.14407762016335435, + 0.1003697520538046, + 0.9649955048434758, + 0.4352728743722325, + 0.5434097746661704, + 0.7859145329474153, + 0.11523232295259123, + 0.11831249430020163, + 0.2708796186083072, + 0.9407948562702801, + 0.8951425783429127, + 0.21779656404027659, + 0.08338242265746565, + 0.9008270961457054, + 0.31492945179529197, + 0.9281003329677461, + 0.9662156590336831, + 0.4892800652685698, + 0.24187432915651907, + 0.5516319990282166, + 0.01805150820824497, + 0.40773854718661073, + 0.937762045700286, + 0.6325266311382628, + 0.5274875157434945, + 0.8461514960825782, + 0.37295302932506535, + 0.4204274634950068, + 0.41398087251310967, + 0.49049019093169655, + 0.11535306930465383, + 0.14683034622018776, + 0.7960198012575255, + 0.3748747053041538, + 0.6780337701235462, + 0.22827893829056423, + 0.6589680457817595, + 0.7164080215580085, + 0.06358559421596888, + 0.0887098458979334, + 0.7819344803079296, + 0.9462145861556999, + 0.570946712866895, + 0.7230320518146561, + 0.40574258116180184, + 0.8953755998792168, + 0.8347215213785666, + 0.9473576096207942, + 0.9149318015445747, + 0.35012721512924905, + 0.9300156325395408, + 0.953785725974651, + 0.3262803245833792, + 0.5277846351757475, + 0.10049187173673446, + 0.16558401729427663, + 0.2222196136400102, + 0.2818945178574618, + 0.25311476538632394, + 0.597960827441359, + 0.555975280005527, + 0.46100643186058265, + 0.3064627895723101, + 0.6320797033253904, + 0.11368618234391159, + 0.33839301759424467, + 0.522102717777887, + 0.795569442779689, + 0.8381685559588181, + 0.3360448377607559, + 0.15696279503668642, + 0.5251342679049629, + 0.03839127781368057, + 0.8323452802944274, + 0.06384231674359508, + 0.8654302932957053, + 0.39828502257716103, + 0.605345886684561, + 0.11821753312055572, + 0.574877924233263, + 0.7063330653180013, + 0.8648914451473718, + 0.08049058971369172, + 0.5890699627810956, + 0.288377453440137, + 0.7947850520195106, + 0.3447082711544185, + 0.6240667946178164, + 0.5579835891339692, + 0.6194437558335423, + 0.9854382252869921, + 0.3768858668878222, + 0.16708280972702905, + 0.6532349923807944, + 0.8080703198107446, + 0.6370943517000477, + 0.4856446029264464, + 0.7557112544188301, + 0.4222671575947191, + 0.6267643983700978, + 0.6919945293540982, + 0.7073304799817682, + 0.7643325796461904, + 0.34789775961730773, + 0.6381857931732241, + 0.8727142646928097, + 0.21354622545459123, + 0.30858868692760055, + 0.14562590576418122, + 0.32211266084413215, + 0.546719886164097, + 0.20842943221456256, + 0.9061874532836226, + 0.8777690582448747, + 0.6368058868907747, + 0.6507662246190344, + 0.15345347510183127, + 0.7749110376247555, + 0.41224337860789595, + 0.2098301063757333, + 0.4319279083166, + 0.9274165022614098, + 0.13912602770390325, + 0.8026557702636293, + 0.5382934819345343, + 0.3858028250476381, + 0.8346688971742567, + 0.27389193775556775, + 0.531185454084861, + 0.9992345722091545, + 0.28135438212227215, + 0.6710495450666522, + 0.9859224068537418, + 0.8514815765491701, + 0.12509308062307067, + 0.6039071591953129, + 0.7741094568407183, + 0.7798495783901541, + 0.8139544901179179, + 0.22565665321872452, + 0.8527785683376323, + 0.4346117425487437, + 0.49679201146401764, + 0.057440853028103755, + 0.33573322529858285, + 0.9845185840592812, + 0.7078373084009667, + 0.9637999037579471, + 0.3127145397701139, + 0.918158030578588, + 0.26557436108642674, + 0.9758998325022192, + 0.07612762173113752, + 0.688327210890938, + 0.5407046834914535, + 0.4445640999574022, + 0.8108683448099567, + 0.22184349273821347, + 0.6551625977875849, + 0.24409790964275402, + 0.27570706849680027, + 0.051588066178941405, + 0.6219512912973157, + 0.6937187942519144, + 0.4311868670829897, + 0.5561215944229493, + 0.31256177473587843, + 0.978651615216966, + 0.8946810827124051, + 0.9354645652293422, + 0.2650408402857629, + 0.832894569943264, + 0.29766363866724543, + 0.279800658060547, + 0.7551312547840624, + 0.2890183006417396, + 0.6345511402536168, + 0.30079162478787524, + 0.05912231718880834, + 0.8417731633748113, + 0.39288776609135, + 0.6261815972960728, + 0.7696783995161812, + 0.640929037412604, + 0.8425614658767965, + 0.1007880926069531, + 0.3622365281632627, + 0.44440002307385307, + 0.286719791491863, + 0.2317842424120221, + 0.04119440154705489, + 0.8157136551158818, + 0.7857540774441798, + 0.7395193237968447, + 0.6192001754131568, + 0.6578504208437965, + 0.5764504744616069, + 0.6432009536405543, + 0.5375897917229031, + 0.3899136253964405, + 0.8290486564347775, + 0.36583899344380266, + 0.7238813973149291, + 0.01830445639651257, + 0.7002144852795528, + 0.3356615399744517, + 0.5316738334471584, + 0.89577953942293, + 0.7469504654154129, + 0.225738129481033, + 0.37251075503579856, + 0.6391549669948947, + 0.8616029497466651, + 0.08726532548644406, + 0.5944652923538913, + 0.1302996670222204, + 0.12316380287841522, + 0.40493651037057943, + 0.8856657465078077, + 0.32075505134921123, + 0.4509837435107429, + 0.7545760550593441, + 0.7769330360979695, + 0.45503741607689285, + 0.16144162949480823, + 0.5900123628051581, + 0.403173516885026, + 0.060888960732690234, + 0.12181775260503214, + 0.1766394352099505, + 0.22995887459909736, + 0.9851244781137406, + 0.14619542363008597, + 0.32444881651601176, + 0.6872986723632716, + 0.8975010605688444, + 0.5671990831930617, + 0.3951155156990216, + 0.7511204184605516, + 0.5185407384009807, + 0.28540090097678017, + 0.588325769065312, + 0.08714739390351978, + 0.5788944733124046, + 0.6388229052185809, + 0.4997638431761786, + 0.5677915595670125, + 0.6007612981607046, + 0.41180095629592506, + 0.8659759378768227, + 0.9385441276971631, + 0.9810254140496176, + 0.6587638922156728, + 0.11209775151323809, + 0.41012369780046687, + 0.8959122922810832, + 0.4437020404684159, + 0.47447040056318446, + 0.6662203740276853, + 0.34618157780424086, + 0.9732658202373514, + 0.03757092900977144, + 0.8445924318357904, + 0.3662948146257604, + 0.40626273590335205, + 0.29408513702046013, + 0.11571328907545575, + 0.8049582900341045, + 0.43267727424022207, + 0.4156505651397996, + 0.5373857954714363, + 0.27815717521683414, + 0.09293798060999447, + 0.3445313192923223, + 0.4371364326534972, + 0.19657515412167492, + 0.9508249260246321, + 0.9466442771157112, + 0.39107825438522936, + 0.5284090958939895, + 0.8238837861448407, + 0.5672563134411254, + 0.38310105477510015, + 0.579352698268399, + 0.6600387450544518, + 0.8893690695560387, + 0.6908108374584946, + 0.07050998532346375, + 0.1681389943452659, + 0.05827528052767261, + 0.729333466096751, + 0.9597762730437452, + 0.7458310071884376, + 0.26287934274233804, + 0.47415694806644093, + 0.34640001815941146, + 0.7023358229078066, + 0.8473361393609283, + 0.6126762294667626, + 0.37631638863992944, + 0.16368446893613675, + 0.4250601254725521, + 0.5024941113568316, + 0.18696399952309095, + 0.04615838794682936, + 0.8880658733183382, + 0.15036697652738618, + 0.7558458207788462, + 0.870235287488823, + 0.957082206284556, + 0.29500827673499486, + 0.7655606435850764, + 0.15956475228326839, + 0.39728758722369306, + 0.7556880097328045, + 0.4607323172600133, + 0.8744459402120672, + 0.7901791886647448, + 0.7353865468591724, + 0.849196582241191, + 0.22626456195845024, + 0.6096444010079363, + 0.7523672046025225, + 0.26327447590871433, + 0.6196353695853406, + 0.4289706005759374, + 0.5751637742742446, + 0.08271754504478757, + 0.4804425705084825, + 0.46638741921372073, + 0.09322183544323615, + 0.5509813517740249, + 0.7758468545529311, + 0.5963198231631923, + 0.5919196788009434, + 0.30209545853939945, + 0.4144727964246955, + 0.8102379734730197, + 0.015041937914722636, + 0.8013947696150248, + 0.48239064242241414, + 0.8825847692270286, + 0.11598518144530445, + 0.8815327699603377, + 0.21266400093800553, + 0.47805508860243007, + 0.23114484364594667, + 0.8815887369367482, + 0.25876476828962425, + 0.9358757098887812, + 0.8893725860482015, + 0.26344055729456783, + 0.7358005345075653, + 0.0866478002745189, + 0.676011607792312, + 0.3758616640096889, + 0.9071177213106677, + 0.8324523892626204, + 0.9243233456765426, + 0.9567610563439569, + 0.28199231210183096, + 0.03968535501964232, + 0.4694901436008866, + 0.8496179713167248, + 0.6636303105817718, + 0.2376676101711943, + 0.0690403785370941, + 0.6527257331853219, + 0.5903750585147725, + 0.7091455312100029, + 0.3166064470610629, + 0.5325384340410123, + 0.20741293321957932, + 0.6239021034830816, + 0.050226448976075866, + 0.6137436325748963, + 0.8850251571579181, + 0.48066177631705087, + 0.36188906275157695, + 0.8090970392521835, + 0.318113901899028, + 0.531273604554789, + 0.017852449875078724, + 0.614652574359509, + 0.1992681227467077, + 0.6228072794433337, + 0.40128124175915414, + 0.002601644060992614, + 0.11469248538140198, + 0.9228706367423446, + 0.9906377110794229, + 0.15580034363219175, + 0.5072352672997331, + 0.08170679154114213, + 0.4603442506838389, + 0.7881844576908136, + 0.8082140678481798, + 0.4346250906943161, + 0.44914804644415895, + 0.9276441176340413, + 0.021600086135395657, + 0.5065743878658895, + 0.20980292857085636, + 0.43038454843277996, + 0.7377473322168556, + 0.005431104253906249, + 0.7900904352035243, + 0.7429370395139598, + 0.4540191348782765, + 0.12621244898798223, + 0.33102117051726354, + 0.46537921758000333, + 0.6699753630372093, + 0.13729143154864343, + 0.5998634113225406, + 0.9081168257657946, + 0.43820226344802526, + 0.8313631300978163, + 0.32673470604586485, + 0.43296311239924923, + 0.9392436863889443, + 0.616695195171261, + 0.5177949375916849, + 0.9596635276378405, + 0.8785710988719285, + 0.996486603690265, + 0.3565529887370077, + 0.4605530036142562, + 0.48834702786568074, + 0.29894534701711073, + 0.13815532984971746, + 0.06542810541646804, + 0.33644299008693435, + 0.7407172820834985, + 0.38387599952929596, + 0.4219037928478685, + 0.1289443517795088, + 0.08496171090310822, + 0.4615594571811862, + 0.15237791846133564, + 0.6656169224973635, + 0.17534283002199857, + 0.012030155110893004, + 0.9348840551629855, + 0.2045755711917514, + 0.8073983445855917, + 0.1738206792279524, + 0.27665739967875624, + 0.38085535043140495, + 0.3035697369031277, + 0.917020126076206, + 0.687371128410807, + 0.4369072621226425, + 0.41563738874349976, + 0.9650227216326556, + 0.49554172171114463, + 0.36049533818472823, + 0.5581306868486844, + 0.35216206701708885, + 0.8760942758774782, + 0.16918589950893104, + 0.8024795080366857, + 0.17331488709064247, + 0.34062414334743607, + 0.13997599867166644, + 0.26900148113273137, + 0.07692988071785556, + 0.6822108692118121, + 0.6814686551352754, + 0.08025259641589777, + 0.9908708420189721, + 0.8920860583334906, + 0.2174747196456609, + 0.9386166310013702, + 0.8558348561789517, + 0.5033407160815184, + 0.09855296979839656, + 0.6114331959310799, + 0.7385315905991914, + 0.2135052254575136, + 0.05858394796197941, + 0.6074798782765946, + 0.8706352730090806, + 0.9459290988119022, + 0.4712719019432351, + 0.24201990492354541, + 0.9535736234373514, + 0.42083707496232403, + 0.6740314203846373, + 0.16971209630966588, + 0.5742303699985246, + 0.7327366937410855, + 0.9405644946850603, + 0.6693174034963342, + 0.24874242176210337, + 0.06831183061275226, + 0.646045682925065, + 0.8323934392975538, + 0.325841488450341, + 0.4635758551378758, + 0.06394320130790931, + 0.877809327547959, + 0.3427040946174452, + 0.8406961237189915, + 0.8756087310309373, + 0.029741299187516335, + 0.27400439074289173, + 0.33709400123157174, + 0.36896055753764456, + 0.6656460153219789, + 0.07673435985825394, + 0.5876958465509375, + 0.7448320939806813, + 0.7274920981681308, + 0.7882503449093154, + 0.7718584339838213, + 0.1879874152763492, + 0.9294333046187256, + 0.20786883493318187, + 0.7219916821139297, + 0.7852178163677799, + 0.7423310716265287, + 0.21422306772962296, + 0.6393804988926635, + 0.7744971238505398, + 0.7638593932921186, + 0.0852762078132896, + 0.7624777353969142, + 0.9836349783165318, + 0.03713771538942745, + 0.4842287818860809, + 0.24075603648316157, + 0.19569196084837825, + 0.6931808170186252, + 0.21700506140528608, + 0.6893896815081554, + 0.00952379354506816, + 0.2523152860228668, + 0.3665206678698878, + 0.012516153921648066, + 0.6627902465865023, + 0.2514131947780418, + 0.2636519327124738, + 0.048492850918106956, + 0.5841845941410169, + 0.23534542047896145, + 0.46406416378122606, + 0.793374772700839, + 0.7278348420486463, + 0.9969676684454453, + 0.9015255728644763, + 0.982551039451459, + 0.9692803131091698, + 0.019463114454430497, + 0.29117704631643815, + 0.7648227326425444, + 0.7666125077528164, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3549045467615085, + 0.04239916419002787, + 0.5620492807776227, + 0.7297384973354955, + 0.09268565975236731, + 0.20630548950635297, + 0.7095356570435677, + 0.8930377673795764, + 0.4579675275762498, + 0.19413855245370026, + 0.8748843605341408, + 0.3538955519023814, + 0.1229623458205813, + 0.46491295105148867, + 0.027880553346125603, + 0.7883915454558059, + 0.3496459203733686, + 0.010395869176374584, + 0.10686317273713519, + 0.31608992920712375, + 0.6729912075120715, + 0.6762396591853704, + 0.1474086507975666, + 0.0726699229940776, + 0.5923168490645327, + 0.1956312970045544, + 0.23798182084711117, + 0.4597870400283238, + 0.39278746229491135, + 0.40653157438392773, + 0.28409364950416216, + 0.16941774500541196, + 0.6994947622330466, + 0.0805697263118933, + 0.6886465523640434, + 0.334479527356387, + 0.9901326646819454, + 0.9576290979616602, + 0.7542209866552148, + 0.15509383431110246, + 0.3319379098474641, + 0.6241128331334818, + 0.32307593675839086, + 0.6508322056689247, + 0.20749537504490367, + 0.41937200490620796, + 0.05632227344272123, + 0.2527475219333315, + 0.07922505757588172, + 0.7349473013224028, + 0.06879913829175632, + 0.6091930957781132, + 0.48397322622183747, + 0.3905432650552372, + 0.02001009486312677, + 0.6889770658457663, + 0.19665293993866206, + 0.3847005267950533, + 0.2602679913341992, + 0.8761433546392916, + 0.904478428722847, + 0.12409919407314007, + 0.3662717495548532, + 0.42887924711912384, + 0.3272464145611804, + 0.6070551704766106, + 0.05928383409938465, + 0.7381262016399551, + 0.7202487172523507, + 0.8309182605925579, + 0.7470260145990597, + 0.39532018866107776, + 0.41021816655867793, + 0.2826249834817621, + 0.5774041753088472, + 0.6747135253892305, + 0.44401252969036187, + 0.35721118540607444, + 0.1025627032248585, + 0.46883064076847114, + 0.3009690454643301, + 0.7431351863889715, + 0.954941151802188, + 0.8038592659328729, + 0.25098714374175435, + 0.17563310137879862, + 0.7213564489816591, + 0.4139762649613933, + 0.7065114394592031, + 0.9563454091674533, + 0.7860014417175775, + 0.442118114505634, + 0.1782282767489215, + 0.09334735841774455, + 0.44099353489775484, + 0.19602419854598585, + 0.8119996911862927, + 0.28039932458444805, + 0.6407735963299092, + 0.8637411051962457, + 0.5789495887799048, + 0.18368331103064894, + 0.4265078800776473, + 0.8429147192278797, + 0.8045207074115083, + 0.5614334497181991, + 0.33947354089270965, + 0.810362056015736, + 0.6772678374369094, + 0.6239133178324836, + 0.014289087987596427, + 0.7711828371272454, + 0.9364108699088778, + 0.08402075728466218, + 0.019342737435628088, + 0.17401950134326727, + 0.41970430629069067, + 0.49230924700923095, + 0.10086556748929099, + 0.8827004022267325, + 0.7483468764437824, + 0.38620073700337143, + 0.37797738720053087, + 0.0039039967843818024, + 0.3294361257279106, + 0.7944980236743932, + 0.06925009366749613, + 0.5910717076565115, + 0.2960764970057521, + 0.39804958307957616, + 0.9766636317988647, + 0.7746406322185208, + 0.5084392052197624, + 0.7723231639424878, + 0.8710401942042832, + 0.8406723232447347, + 0.6282391813889078, + 0.6874609795594397, + 0.7132116289858705, + 0.11338595486989123, + 0.1195203055576024, + 0.8591837615980576, + 0.13597907371215368, + 0.4929481696106627, + 0.12451899941332001, + 0.28904881937638316, + 0.6971909761832441, + 0.8370316872556588, + 0.7726145455962234, + 0.525155037090338, + 0.009716816982416776, + 0.05192140908550569, + 0.07326393435548606, + 0.7600420478108455, + 0.6166986558502394, + 0.7868311391347459, + 0.9382866278342784, + 0.6738599587826435, + 0.5453900732133369, + 0.6624806268145025, + 0.58520385044777, + 0.5222944939144947, + 0.6702771905253612, + 0.43617748526952704, + 0.4263759489804464, + 0.017206386497560455, + 0.6471540522859932, + 0.9985507040936809, + 0.23446778303374283, + 0.2023839895593661, + 0.761411741009969, + 0.8950888981899489, + 0.12273466267227784, + 0.06548282997775123, + 0.08216940800935102, + 0.664875993452441, + 0.8980796038235784, + 0.8284930549614943, + 0.6218946843063636, + 0.960736461286523, + 0.2730377868620113, + 0.09488395898595936, + 0.8776791738282163, + 0.58527906318683, + 0.7968145645973814, + 0.5104941582045647, + 0.7522666298277298, + 0.08362759408882559, + 0.8557411533812304, + 0.2793056447170922, + 0.4633285206679253, + 0.9643091594761276, + 0.8643444118470223, + 0.45268270020125323, + 0.21965790317696698, + 0.9633913739468847, + 0.770307488387535, + 0.07819396829537384, + 0.3419170401726893, + 0.9232864381280271, + 0.05074709438073732, + 0.7936540086227151, + 0.36436755061851733, + 0.47197955436349515, + 0.8143908442536172, + 0.007508044926387014, + 0.9039477664948096, + 0.07244549693017344, + 0.2983934978565449, + 0.4484841735323615, + 0.2950992812860116, + 0.08561520607852902, + 0.9573718467240949, + 0.4050705097577936, + 0.5855428220264268, + 0.8586793317556853, + 0.06433609328327128, + 0.30453393314569266, + 0.3768789866658515, + 0.7020301805546415, + 0.7299760925374299, + 0.3191318156508369, + 0.03869674812232948, + 0.533244763296928, + 0.7113435306823725, + 0.08998785345906324, + 0.7248132747696855, + 0.14546661772775016, + 0.8359841835683716, + 0.09344926218037752, + 0.8800423179471679, + 0.05144135982164122, + 0.8808812858948356, + 0.0201951582000508, + 0.8399049891572149, + 0.8708337415915963, + 0.04194791258295083, + 0.4001504433387769, + 0.8541233122216242, + 0.5248208257814692, + 0.8514008098368976, + 0.23288979349343641, + 0.6278897311955806, + 0.4922106813726256, + 0.4991536919836277, + 0.03843047857801063, + 0.14344720716149884, + 0.22969136337129137, + 0.9985465341275686, + 0.10928878512026263, + 0.5696787960710844, + 0.32882359926662175, + 0.7929501398959433, + 0.8225722396802971, + 0.42929053260312844, + 0.03915174257397358, + 0.39907388271523314, + 0.9753998403865101, + 0.7607893071236965, + 0.504732153888512, + 0.8716719462302307, + 0.400963205809142, + 0.7614813122713756, + 0.4655365269027145, + 0.689627215529766, + 0.731174423115421, + 0.3981959712991786, + 0.0999707518171149, + 0.4328274211419346, + 0.14494924409225407, + 0.6254361911688172, + 0.876385728944829, + 0.8384964273110884, + 0.6675725081700755, + 0.4556515419547643, + 0.812140934841021, + 0.09568520131138514, + 0.03374397970894971, + 0.051407548448625606, + 0.5893865460846739, + 0.49218368533386314, + 0.7102620128442536, + 0.9964337241373918, + 0.6054977227959006, + 0.29051862563698805, + 0.31651565460141484, + 0.7282131704311045, + 0.5569046271695639, + 0.0994312759988597, + 0.4689953509309186, + 0.6779321483921179, + 0.5445916111630644, + 0.9458461906458582, + 0.508175561611127, + 0.3724033120797884, + 0.36690946169539695, + 0.004387011060732138, + 0.6314258068212949, + 0.9570099552743379, + 0.3461337237948321, + 0.5440246588610647, + 0.14527355682694953, + 0.880194178403426, + 0.6229934798296954, + 0.31736022940430886, + 0.03714957827615872, + 0.8032977983368944, + 0.06202615284815882, + 0.7530004434640041, + 0.5585411456791424, + 0.5852281615561222, + 0.7401832333063805, + 0.024517148299622593, + 0.5135009293866668, + 0.5638275762353786, + 0.34121674305483796, + 0.042953925668501536, + 0.577267126203172, + 0.6333561693526448, + 0.8599755969767562, + 0.1164281869501701, + 0.45188225231868817, + 0.45693372825553413, + 0.6108962390341512, + 0.2792682956674706, + 0.25586841768146906, + 0.14259184482750353, + 0.38339651651364326, + 0.9987310614908452, + 0.5449807794953476, + 0.07881192295903594, + 0.9483831182535897, + 0.21319307369621376, + 0.11720185464148059, + 0.7756308156116596, + 0.2158686580047242, + 0.22121624623532998, + 0.6413528774235441, + 0.5849597483193713, + 0.8209556290552137, + 0.024419570367295362, + 0.6935256982364931, + 0.8429562131817895, + 0.580060907882237, + 0.1019783887398118, + 0.6057417497936475, + 0.6584741002630332, + 0.14414056722005097, + 0.3628847725566814, + 0.5240237532459261, + 0.484035525255997, + 0.7529067981012579, + 0.5400323245956503, + 0.08400663102441097, + 0.3071538341129647, + 0.37268659400138693, + 0.8384369271676938, + 0.6830314446497705, + 0.18174508318770222, + 0.5906582460778282, + 0.9732459440261039, + 0.09911408617631379, + 0.8263888793866928, + 0.9806324406157072, + 0.7439120782649592, + 0.845006133082171, + 0.7556611985947994, + 0.44913030612332017, + 0.8047033786386484, + 0.6109955940896713, + 0.988162111664907, + 0.5162248705844749, + 0.6836604803201668, + 0.8572107201204873, + 0.11317991219242696, + 0.5167955852515312, + 0.10665185258114718, + 0.8556421983765271, + 0.277041972171043, + 0.4930072932661348, + 0.38919236787781086, + 0.015254189018723996, + 0.905484789793142, + 0.5804004334936455, + 0.4955865997185239, + 0.8314930952847817, + 0.8999781696551226, + 0.6218672247952459, + 0.11183195906536336, + 0.850620088685216, + 0.02763808067347462, + 0.049560996017135595, + 0.9847360784391932, + 0.41842963766630525, + 0.1481173710821374, + 0.12598660187017097, + 0.17516276537023667, + 0.5677078083328646, + 0.8758161161150434, + 0.7451459004062713, + 0.9556098816891438, + 0.08735238503978138, + 0.1242308926220479, + 0.17699289618163005, + 0.6485904419878334, + 0.9788418660783021, + 0.41313591818283957, + 0.09988887916101519, + 0.31862184638679025, + 0.4433539696020661, + 0.35937501847926134, + 0.1918514711628594, + 0.37035578582658235, + 0.7247960213601661, + 0.9790561943658818, + 0.7084949251000522, + 0.6835349127313585, + 0.28338196568753127, + 0.8471188669185117, + 0.9076291876910116, + 0.26922267345500395, + 0.25715084258330834, + 0.607481791499339, + 0.4895428273516049, + 0.08468055059005386, + 0.3096520314924064, + 0.9103822694962878, + 0.9494258655814259, + 0.47317632686727784, + 0.8304178063026396, + 0.3136948972078951, + 0.32981627099159916, + 0.6954711403640765, + 0.524281107962152, + 0.7689156228195979, + 0.14398116608924172, + 0.1838694153262853, + 0.9047881036208002, + 0.2832664339236689, + 0.4604413304932282, + 0.34340596987332195, + 0.01911226653419673, + 0.08520634556918005, + 0.5350098723295885, + 0.6023575103762875, + 0.8012606335666543, + 0.28605538656110285, + 0.15558345100761328, + 0.2937015191114739, + 0.2449234353989107, + 0.9694363332002457, + 0.9087551244254385, + 0.14059446799981923, + 0.0015735886915587693, + 0.7317692186088581, + 0.48295076903212164, + 0.31002441467910913, + 0.0824649941636647, + 0.7046041585575201, + 0.527831762451573, + 0.9518638382568707, + 0.2866294218148713, + 0.07230353404555179, + 0.2342083606660702, + 0.35312852934434835, + 0.1590089534532222, + 0.3855205550869971, + 0.23681088643373305, + 0.3590709425989018, + 0.9919373370584181, + 0.8953112122854749, + 0.07532606031712319, + 0.20259384294125882, + 0.9545205119859813, + 0.36948936251579545, + 0.37584820125948326, + 0.2425308785977046, + 0.08334220637055301, + 0.34249336138921815, + 0.03613182421648209, + 0.04842219651420632, + 0.6401177465010348, + 0.14871710449862985, + 0.21138852532645513, + 0.8434926396965746, + 0.11292881370902896, + 0.18588957838068365, + 0.561626742191324, + 0.839247078088345, + 0.016254225904769548, + 0.8601646035374751, + 0.5971042984870208, + 0.7916233499070181, + 0.10652446899275125, + 0.4938524246376115, + 0.40120537322978933, + 0.2006497010404803, + 0.26543984340233817, + 0.2538621594146887, + 0.4526263232933132, + 0.20434295051741924, + 0.8613476323833271, + 0.09370227858004332, + 0.5047745431177096, + 0.07794727167664028, + 0.27306994272222207, + 0.43146305906240956, + 0.007307980851831486, + 0.8562247091342103, + 0.8213089085731266, + 0.6191148446878322, + 0.869400313638827, + 0.8117369559273934, + 0.6577139777741439, + 0.21447273042019654, + 0.4541717159198426, + 0.8936860806925331, + 0.03259955820080618, + 0.10479310791009855, + 0.5187574249202905, + 0.1501803335775994, + 0.03288622417870046, + 0.8890961090665441, + 0.5748014478855076, + 0.09583671032660623, + 0.4930437622198026, + 0.4321090411928221, + 0.9104346576565615, + 0.6319239460366327, + 0.06528998092347693, + 0.0902546632754927, + 0.7027263070461592, + 0.5156229639556337, + 0.7653123303419905, + 0.05726505208284005, + 0.6342781661686804, + 0.4531178897882824, + 0.5142562136725705, + 0.39114820121480665, + 0.171645453520327, + 0.7228381796204829, + 0.15975239670324493, + 0.04731184462084859, + 0.07231256295730748, + 0.8805827759846833, + 0.7037242334774125, + 0.47375185092126915, + 0.8501745456167581, + 0.5095615409262099, + 0.540424323998919, + 0.7036420763180282, + 0.8367024870181061, + 0.09287858863873744, + 0.13258159063259523, + 0.20879233222808669, + 0.17669553664960846, + 0.7706897142413555, + 0.33216610135018865, + 0.2765656144117612, + 0.48899200161796097, + 0.28094692890467177, + 0.0690072434400375, + 0.787277349822785, + 0.32026971014453487, + 0.08660784852691517, + 0.2697873517464343, + 0.18865175603891926, + 0.47206686055672953, + 0.4589125247032527, + 0.7947524532739587, + 0.3228066353105682, + 0.19095091260177122, + 0.486891604204169, + 0.3632330056344869, + 0.236036799000914, + 0.6136357462984242, + 0.04806005257298929, + 0.5232326285822311, + 0.6907705338964202, + 0.5902910320222682, + 0.5558980740927516, + 0.42781006148574074, + 0.6663822417341555, + 0.8215177886959044, + 0.3361841355888917, + 0.00884867316800575, + 0.45589283035368555, + 0.2786213639921723, + 0.7802024752893354, + 0.350252671514489, + 0.7445158871377848, + 0.08539071131417797, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9028294403098109, + 0.4971422391169885, + 0.25139569740384937, + 0.9526629990842792, + 0.8496943481229121, + 0.1265589331571274, + 0.7907764661094938, + 0.17996515611350639, + 0.40041638314818695, + 0.5080397153321874, + 0.3211194239648827, + 0.7870851762060899, + 0.7921313766981629, + 0.0, + 0.0, + 0.0, + 0.4132792226194869, + 0.31176712205095924, + 0.6699076118671365, + 0.4628674105896252, + 0.002706941176983668, + 0.5799461493120972, + 0.4372276660366662, + 0.8587857879261196, + 0.5051884365664271, + 0.27759836862359344, + 0.8297372643744151, + 0.9339605564631311, + 0.7150532682074798, + 0.0, + 0.0, + 0.0, + 0.9637412051951595, + 0.6216711520273386, + 0.34507354975285465, + 0.5022409721697791, + 0.9466003601217432, + 0.414922388594683, + 0.5635060389002671, + 0.5930052832199914, + 0.6600253634230627, + 0.10335697783639897, + 0.08312090864037991, + 0.3309539816379585, + 0.05504760299836842, + 0.0, + 0.0, + 0.0, + 0.05120384888603313, + 0.6701268592511975, + 0.7492150910884211, + 0.9261616816162551, + 0.6862725096583036, + 0.7606300137316441, + 0.5411934864586017, + 0.2447863998800348, + 0.0016782306935890778, + 0.7724977730522188, + 0.8746275884659321, + 0.7195237832360659, + 0.4898245492370237, + 0.0, + 0.0, + 0.0, + 0.9029713299396926, + 0.12340368237851163, + 0.9924917935318855, + 0.5886250331319656, + 0.43259929603315683, + 0.9358736726716493, + 0.08533557521396007, + 0.16958103095811305, + 0.5624756994733194, + 0.029588915602866472, + 0.17621943714203347, + 0.15742462677687685, + 0.26684421786346324, + 0.0, + 0.0, + 0.0, + 0.45652972921713997, + 0.5754205597341694, + 0.8254422585423511, + 0.6569047664343146, + 0.7010039097601395, + 0.9750708781143511, + 0.5711497560606417, + 0.8273339336262673, + 0.13102642027744393, + 0.08295895288025279, + 0.16791466338389482, + 0.8848628835533121, + 0.6018123471620269, + 0.0, + 0.0, + 0.0, + 0.04276236562186597, + 0.5349001531767665, + 0.23911530279950155, + 0.8730322260986225, + 0.9372545272341452, + 0.7026312965246855, + 0.38263604482201086, + 0.22657303478615087, + 0.8133846139493341, + 0.18761402739010324, + 0.9629831550673493, + 0.9034095322749641, + 0.9471372719265045, + 0.0, + 0.0, + 0.0, + 0.24899869036799982, + 0.41368177795272276, + 0.6979127425560141, + 0.16810257168867537, + 0.5985773887665021, + 0.24433885971836533, + 0.2832773508785057, + 0.532977441185836, + 0.2733232728942776, + 0.4847106505484732, + 0.8115591540415253, + 0.3350712240949053, + 0.27269060427074066, + 0.0, + 0.0, + 0.0, + 0.6484793579991979, + 0.10913957063722657, + 0.8578164200417109, + 0.19026696817368605, + 0.9459250279725616, + 0.10015031570808497, + 0.2193431311103755, + 0.4259324011431741, + 0.21053773161209688, + 0.656322051845015, + 0.9485023202812123, + 0.6617916539246315, + 0.027904052322429873, + 0.0, + 0.0, + 0.0, + 0.370638885605293, + 0.5116149836200966, + 0.3397189277587196, + 0.8212190704639679, + 0.11150334158527875, + 0.7305138532746923, + 0.2727836214524296, + 0.945530909565055, + 0.395929659086812, + 0.9336479618615219, + 0.44221417113155803, + 0.12520262415258965, + 0.8188999439090872, + 0.0, + 0.0, + 0.0, + 0.006373500277563449, + 0.22675142853651498, + 0.442851282694645, + 0.09886892756427157, + 0.4488549555810257, + 0.6990517334835473, + 0.7223576223600511, + 0.5252788150978276, + 0.4540932758947144, + 0.4462684781017259, + 0.5728559597473211, + 0.3015784488467229, + 0.9355769105316407, + 0.0, + 0.0, + 0.0, + 0.8504141880279061, + 0.7009377291414854, + 0.3796763024251937, + 0.5478722646376566, + 0.5811068774631647, + 0.7925331594034736, + 0.6705470057399852, + 0.9060653010521963, + 0.15847714555109604, + 0.8868353388238326, + 0.03592347358785197, + 0.9013490640593242, + 0.7955080468613454, + 0.0, + 0.0, + 0.0, + 0.8992433213098389, + 0.054564473707647054, + 0.5738149238294871, + 0.5683638015103154, + 0.16487468189882515, + 0.43433613414947125, + 0.8664382140081228, + 0.20521923743173853, + 0.3904584310354958, + 0.7414925329383845, + 0.6537863956684857, + 0.9720388804607526, + 0.936535094920315, + 0.0, + 0.0, + 0.0, + 0.7100634864804531, + 0.7654704964607266, + 0.010761984238630062, + 0.3252380926358641, + 0.8950788090502504, + 0.26951424043797545, + 0.848653116387211, + 0.35996257216503225, + 0.9323214628354488, + 0.7127093833905638, + 0.35916944559858066, + 0.9092281325802658, + 0.390473329024966, + 0.0, + 0.0, + 0.0, + 0.49765875114991065, + 0.3062500953952072, + 0.5809387335917323, + 0.9466877478491078, + 0.757331305379941, + 0.8244639953917496, + 0.07299856859652876, + 0.17109013553251362, + 0.3074624940578241, + 0.9958333257414079, + 0.5326369225951026, + 0.5566640629948447, + 0.12602651306236645, + 0.0, + 0.0, + 0.0, + 0.44380854692095506, + 0.9045313460305532, + 0.6940373189900426, + 0.8193949525915959, + 0.028520547773044358, + 0.36678312156491055, + 0.8909521808800771, + 0.9722809503007704, + 0.07045861648088714, + 0.18185485779144916, + 0.6359426110544535, + 0.07695595315492987, + 0.7094361454198964, + 0.0, + 0.0, + 0.0, + 0.9763969186458143, + 0.2472233004105332, + 0.9638701931246583, + 0.14249000382914023, + 0.31912428779353497, + 0.6719019531087078, + 0.5311292590414224, + 0.8081945502973661, + 0.78532008901544, + 0.6863865358520691, + 0.14733375429237705, + 0.11755778045555298, + 0.8577019249822763, + 0.0, + 0.0, + 0.0, + 0.7969549518529243, + 0.7458936917504971, + 0.9676019966726607, + 0.9510753823859966, + 0.6381584251543052, + 0.6808481211959632, + 0.5513156555424487, + 0.8460504066041273, + 0.32224124117380737, + 0.5930781163842266, + 0.7784204262529493, + 0.5652031321749275, + 0.6794981633453622, + 0.0, + 0.0, + 0.0, + 0.22030501982536088, + 0.09928405607108193, + 0.06857933036817898, + 0.1206316939342732, + 0.7973051833978605, + 0.5865667338642133, + 0.3938140382551396, + 0.8241719118420732, + 0.5794838465309735, + 0.8553211995952452, + 0.677171111538811, + 0.324650597972588, + 0.6997094148654934, + 0.0, + 0.0, + 0.0, + 0.34543273093526583, + 0.8192913481262087, + 0.5627658694618872, + 0.03720439262504094, + 0.039150272346090365, + 0.8178301165327282, + 0.2696432587134189, + 0.1797149601274559, + 0.28155814287589287, + 0.01838790297751669, + 0.03628830850140874, + 0.7297664742286003, + 0.465699276350639, + 0.0, + 0.0, + 0.0, + 0.9116144210876892, + 0.325914680331864, + 0.42486333837046897, + 0.5336076594319062, + 0.5815544881512839, + 0.513465401582817, + 0.2970074797047717, + 0.5628189429557754, + 0.01654347333164996, + 0.9646905210187212, + 0.49182438345843726, + 0.5381777458264859, + 0.1675003191883312, + 0.0, + 0.0, + 0.0, + 0.9015193522982415, + 0.7441776576916009, + 0.43027356530349137, + 0.4527258130955085, + 0.07759684593654426, + 0.6068352310915744, + 0.48517140331348496, + 0.8950800466856947, + 0.3041109497708152, + 0.087748834915378, + 0.21442939598927924, + 0.08551845053736373, + 0.791371294081719, + 0.0, + 0.0, + 0.0, + 0.5887376478221283, + 0.7172580648739464, + 0.4225061729280529, + 0.9902679103585281, + 0.6114803581586474, + 0.5265289626344798, + 0.04401588847007154, + 0.3268399051793768, + 0.30224367983829126, + 0.004288882276485162, + 0.47652878007750155, + 0.4783795641262657, + 0.6280790804360852, + 0.0, + 0.0, + 0.0, + 0.04662179178947112, + 0.10083204328914475, + 0.46857648337816005, + 0.47382496562043686, + 0.6463445496807434, + 0.30482798537045297, + 0.13402539318584494, + 0.16369729624741114, + 0.3427152872307945, + 0.20090861388795567, + 0.5079904446023673, + 0.47906548235497515, + 0.6117359347358423, + 0.0, + 0.0, + 0.0, + 0.28453104021235043, + 0.8881521277185567, + 0.9692550442113321, + 0.7948178629174499, + 0.36760773563514226, + 0.08073182604307583, + 0.4411476996440761, + 0.5656213072285704, + 0.9239053840748805, + 0.6804999389955505, + 0.053925699492599954, + 0.34025394693542277, + 0.06998706760566176, + 0.0, + 0.0, + 0.0, + 0.2070098721381619, + 0.32952001942733844, + 0.9668444780911369, + 0.014592186594883372, + 0.7970903609024033, + 0.6990625811630484, + 0.38875997029835085, + 0.4744144231510765, + 0.28516057090759916, + 0.7523563795311982, + 0.34902274864989435, + 0.349056714320124, + 0.15101337524356917, + 0.0, + 0.0, + 0.0, + 0.7910561730198871, + 0.3916767189067516, + 0.5267924268123125, + 0.8340027909506321, + 0.15880158201628114, + 0.9376868182706565, + 0.5864283826398795, + 0.6800592569128557, + 0.7412593163159412, + 0.17006482976879866, + 0.8586946843619252, + 0.13123540752479168, + 0.3117760303045205, + 0.0, + 0.0, + 0.0, + 0.7116301840554686, + 0.7665984065521804, + 0.11195658661875607, + 0.4185786899570766, + 0.6920977075784367, + 0.15370854231932873, + 0.13699526170593668, + 0.05578681209307079, + 0.07422690201088744, + 0.2279937210166575, + 0.3544316121765825, + 0.7766360959320119, + 0.9149272641717396, + 0.0, + 0.0, + 0.0, + 0.026270039923457777, + 0.34645994834486626, + 0.0964183648380339, + 0.644736271722694, + 0.9843436463998434, + 0.8315882467372318, + 0.34431798045467543, + 0.4136374217717127, + 0.547264756294244, + 0.7130734181857544, + 0.44580163338492973, + 0.8590384683777281, + 0.05673244008761047, + 0.0, + 0.0, + 0.0, + 0.7596144976266055, + 0.7700854185870692, + 0.44742946293077634, + 0.23531339704765664, + 0.4788652169760982, + 0.08750845964819831, + 0.37399361800120046, + 0.5766536754312145, + 0.933527479901645, + 0.5994630901841026, + 0.09753080681374215, + 0.4472145097516641, + 0.07385962488321485, + 0.0, + 0.0, + 0.0, + 0.10254445580952054, + 0.1322123648239686, + 0.8946207565900517, + 0.9612697646847594, + 0.2502756790363073, + 0.2259206770930715, + 0.9363413586939628, + 0.5660142730972817, + 0.5840800293060847, + 0.5157214734477279, + 0.7125177892193846, + 0.48911365188670475, + 0.732940451760642, + 0.0, + 0.0, + 0.0, + 0.4443723711865347, + 0.8387878531620769, + 0.22796433090951584, + 0.8671531578576058, + 0.732509828973493, + 0.21041446332525948, + 0.4389103799693328, + 0.1612383216056471, + 0.0337971095486701, + 0.8376028929591584, + 0.2509516687552569, + 0.04372958504123803, + 0.10829543771883066, + 0.0, + 0.0, + 0.0, + 0.6468008220481677, + 0.7084352601695846, + 0.4826248690253867, + 0.8179452815539806, + 0.3032117946809746, + 0.39423528116383466, + 0.4104758009549806, + 0.34666030266179226, + 0.3906585059360066, + 0.7035529411451102, + 0.7574158067044097, + 0.3147458744641488, + 0.5228165812896587, + 0.0, + 0.0, + 0.0, + 0.0591357015193974, + 0.5583924605457291, + 0.11486528401217866, + 0.029317726775774045, + 0.5760452900832792, + 0.19972365072425602, + 0.13639701690136963, + 0.23636013008377532, + 0.0676935603745904, + 0.104081210292845, + 0.4848511405386148, + 0.3801453729548483, + 0.372630596108276, + 0.0, + 0.0, + 0.0, + 0.13619621302166018, + 0.7494713932486778, + 0.9242535222947666, + 0.18067353205227754, + 0.6010132403575262, + 0.5123020631706507, + 0.7180768962977342, + 0.7745188169649472, + 0.29666959040787966, + 0.1259749476534906, + 0.271256956068024, + 0.6790598322975793, + 0.5550279501948888, + 0.0, + 0.0, + 0.0, + 0.5418414851841203, + 0.616729039220696, + 0.3321167015642672, + 0.5343913748679306, + 0.7337808754574986, + 0.30560063539625915, + 0.37139642305203324, + 0.628548218002559, + 0.32830648200561185, + 0.23435990576456311, + 0.17160412640720546, + 0.76013735636343, + 0.3126255526093422, + 0.0, + 0.0, + 0.0, + 0.66210713196685, + 0.013573658063372185, + 0.387397645799443, + 0.31743393853659885, + 0.8457843240657554, + 0.5672761478259544, + 0.3226398402673879, + 0.15658154982220907, + 0.5418069263098081, + 0.6306682140142076, + 0.8181802480722219, + 0.251592078147698, + 0.006033516504653336, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6344004898947448, + 0.15316020761695914, + 0.7456387094968034, + 0.46986842960309116, + 0.6274897156346233, + 0.775581456829888, + 0.07556252308338662, + 0.5230143227286745, + 0.33159472695777015, + 0.10285375137375108, + 0.13669157841416257, + 0.7703373552655466, + 0.1403236625530464, + 0.0, + 0.0, + 0.0, + 0.4117545169358421, + 0.9783955783157174, + 0.6300809314927382, + 0.6519430323222988, + 0.03426279779790331, + 0.6708757959742054, + 0.5433833288253564, + 0.7770725423138007, + 0.4152897566207333, + 0.016186137478753526, + 0.5329910099563924, + 0.4340343338394457, + 0.49634335609052604, + 0.0, + 0.0, + 0.0, + 0.3872374905527389, + 0.9577720736573577, + 0.16348883143568127, + 0.4207392526250259, + 0.015732027662631376, + 0.33241342024540144, + 0.3154844227181034, + 0.37649958979097575, + 0.3750038272037318, + 0.16652286466225585, + 0.7458939503901071, + 0.5609499046967278, + 0.07926737892234881, + 0.0, + 0.0, + 0.0, + 0.8198124151484139, + 0.45814066000975373, + 0.6828762184607317, + 0.5556499402574135, + 0.7826235255629705, + 0.04737771542343672, + 0.3503974356532773, + 0.6173105668319053, + 0.8199324263342636, + 0.7900572729527513, + 0.695079405824329, + 0.30363932311499975, + 0.8876889206430193, + 0.0, + 0.0, + 0.0, + 0.1866901581159216, + 0.6487556337689282, + 0.7478475099481988, + 0.2971874200121487, + 0.3657964532681872, + 0.3511809417479921, + 0.7086506127043893, + 0.4422196693468696, + 0.14495163003479516, + 0.6016932960700164, + 0.13522066374694341, + 0.42783133243893656, + 0.233620214954685, + 0.0, + 0.0, + 0.0, + 0.47556683616513107, + 0.5141225724888612, + 0.9011355810005489, + 0.5878134223271019, + 0.42234901143062353, + 0.4572479693783218, + 0.1679726709377053, + 0.40583985447908166, + 0.23709012984483846, + 0.36616289330911533, + 0.8241732346126419, + 0.6387651318446043, + 0.6071974229361227, + 0.0, + 0.0, + 0.0, + 0.0878523368193097, + 0.7926619647815201, + 0.05494639736246243, + 0.17815376014498752, + 0.054655650777180154, + 0.7877406082349225, + 0.13812506406463143, + 0.7139319442180692, + 0.4935473822248203, + 0.03671626111478854, + 0.8481745584611498, + 0.26635700483893976, + 0.7124521090845021, + 0.0, + 0.0, + 0.0, + 0.011095671877870528, + 0.6312106881696702, + 0.9831761079816763, + 0.7158554332563897, + 0.021962320541030644, + 0.38399155630054016, + 0.5186096104322861, + 0.923008426280978, + 0.5952220500968215, + 0.410225899424006, + 0.8141727646152921, + 0.5273810538588848, + 0.33735224674100495, + 0.0, + 0.0, + 0.0, + 0.6992506441369746, + 0.37425380379677675, + 0.44165841273706696, + 0.969406466716224, + 0.6328953923302919, + 0.45892816046120366, + 0.2222194741741711, + 0.7514158601755893, + 0.5203726441177824, + 0.9638643380912487, + 0.2731920221576536, + 0.5758478057821936, + 0.8685305061338993, + 0.0, + 0.0, + 0.0, + 0.6604933337480409, + 0.9900794948287656, + 0.5104443559308209, + 0.4845250590888124, + 0.8187285818968446, + 0.9509580948227806, + 0.3644730358176105, + 0.3712339168363077, + 0.74045599723979, + 0.5922985090428279, + 0.6319840300789652, + 0.8316273685487323, + 0.6767084591069911, + 0.0, + 0.0, + 0.0, + 0.9330092058196776, + 0.497544798370694, + 0.829771043276624, + 0.05251801777970955, + 0.8666027173973242, + 0.9982308029748193, + 0.7591543951250316, + 0.714477114198495, + 0.6684515677638289, + 0.0829806623502698, + 0.2047334636595548, + 0.9461600418468807, + 0.4682533427834763, + 0.0, + 0.0, + 0.0, + 0.5973156196301829, + 0.05565172733164114, + 0.6861232291730925, + 0.9452575141946173, + 0.7966701764321158, + 0.8925428237773564, + 0.10787488154007552, + 0.7848586244744863, + 0.8905292176439835, + 0.09777312604959598, + 0.41780143169262884, + 0.8108750745228607, + 0.49074806404789173, + 0.0, + 0.0, + 0.0, + 0.7268708609307327, + 0.809261064948484, + 0.8872866854523105, + 0.02018482219592832, + 0.29897151490209883, + 0.11491027929144892, + 0.8738777322540163, + 0.4654944007098949, + 0.21347021766015817, + 0.4164411116066573, + 0.6527169584935041, + 0.4830745490684596, + 0.9512534436676542, + 0.0, + 0.0, + 0.0, + 0.7871630304590124, + 0.3364174458754926, + 0.6215498191132648, + 0.7045495218288327, + 0.27335000291484834, + 0.725281506679176, + 0.14776900150358052, + 0.35920833759659765, + 0.8022032069624152, + 0.5776509330399803, + 0.49078774391835744, + 0.08619761260863779, + 0.32465703784722955, + 0.0, + 0.0, + 0.0, + 0.6624849749394808, + 0.5524930250075998, + 0.27342017827751264, + 0.7722345572678084, + 0.30705374619745074, + 0.5672238036229547, + 0.38140722833030105, + 0.3851687562497098, + 0.35373383802076097, + 0.5164869321661556, + 0.8339411705264502, + 0.7515703678851053, + 0.7871480663922484, + 0.0, + 0.0, + 0.0, + 0.5609853149756677, + 0.7694287789611061, + 0.6144659800321695, + 0.5703880594458475, + 0.1533497112842136, + 0.2083770661280342, + 0.555109704836016, + 0.27413066594238844, + 0.5992704839597666, + 0.48964837864793054, + 0.3022131562112462, + 0.23745404594214325, + 0.7772185454991528, + 0.0, + 0.0, + 0.0, + 0.36246354873249964, + 0.57458997757583, + 0.9426845283898099, + 0.862050219278911, + 0.36932700877708713, + 0.49902775344818495, + 0.39630241481634976, + 0.8368875390734017, + 0.7659443701665629, + 0.8186816060201058, + 0.7275775375167961, + 0.9579735434380794, + 0.26528228209323423, + 0.0, + 0.0, + 0.0, + 0.8654073421607111, + 0.6175053442233922, + 0.588365381568232, + 0.4825976142091274, + 0.17256052285558898, + 0.07069119860168616, + 0.3500335429995659, + 0.3946303413259684, + 0.4758662954891658, + 0.2082632032935472, + 0.440664359561293, + 0.1688371354557342, + 0.5296655557006779, + 0.0, + 0.0, + 0.0, + 0.8045161489708144, + 0.31172266984748465, + 0.6509540424445969, + 0.7774855787144023, + 0.6585354603911842, + 0.13914992367874413, + 0.980156513711137, + 0.5934758689472284, + 0.11628253151165036, + 0.22034396006938395, + 0.13015280874295532, + 0.10098664892923037, + 0.9005159499778512, + 0.0, + 0.0, + 0.0, + 0.2841846870971455, + 0.5452944586279989, + 0.8890828766201677, + 0.20440866713311123, + 0.13193363658731838, + 0.5767215111131037, + 0.026763297351841908, + 0.35475264814583496, + 0.3805004952056973, + 0.07053369998779246, + 0.4703763966578943, + 0.34129700429264276, + 0.2913280236202286, + 0.0, + 0.0, + 0.0, + 0.599857653264359, + 0.13728507973746584, + 0.46988108079381585, + 0.06296655967577103, + 0.8321724893781268, + 0.07378259213776417, + 0.8489115892806867, + 0.7170003596034836, + 0.21119053907216268, + 0.29124624040531644, + 0.5780436355940655, + 0.030295171103866037, + 0.5434264223032743, + 0.0, + 0.0, + 0.0, + 0.5549844899419032, + 0.6606252077689398, + 0.7425801495285858, + 0.7304169585063964, + 0.293120563204055, + 0.9052346146862206, + 0.29002488813304284, + 0.37022589398674954, + 0.393759015701048, + 0.8874811238627718, + 0.34952998752542574, + 0.9012960505708011, + 0.7678518215703581, + 0.0, + 0.0, + 0.0, + 0.9001925703979451, + 0.44793138533515275, + 0.6109366179635362, + 0.634231132912984, + 0.5281101122601308, + 0.393573450923832, + 0.43945288936608173, + 0.9226653935940995, + 0.7781666574435526, + 0.7049376325216603, + 0.4414719831985249, + 0.279166156457582, + 0.0889457054781051, + 0.0, + 0.0, + 0.0, + 0.938482109018782, + 0.7540725301210417, + 0.9401349838335883, + 0.2775519370237508, + 0.966442183365752, + 0.44154315419470036, + 0.7451690610060407, + 0.5355128768810619, + 0.42016133957466684, + 0.6545758662007065, + 0.989527953023055, + 0.7082184256293507, + 0.8516911565016979, + 0.0, + 0.0, + 0.0, + 0.35065822545560266, + 0.7937573790108394, + 0.12918713202051024, + 0.5761841396692509, + 0.2874727759508864, + 0.37365358264706827, + 0.6698791584453614, + 0.6030269190651167, + 0.6264028839976385, + 0.9488939629715639, + 0.4302664849326089, + 0.3133696396655272, + 0.8776100604509095, + 0.0, + 0.0, + 0.0, + 0.01346563799883671, + 0.5743066236028878, + 0.40230004346630344, + 0.33863190854568737, + 0.8416256823246926, + 0.6788666546060725, + 0.5803240401185477, + 0.23906897088801837, + 0.09180866317157044, + 0.3735235411243021, + 0.08084404411482948, + 0.7427424387949301, + 0.10703884431285349, + 0.0, + 0.0, + 0.0, + 0.05696448262288256, + 0.910430797275563, + 0.5706995377457933, + 0.49962084029010345, + 0.745543311950532, + 0.8710406030686705, + 0.1828880096128641, + 0.9731615456854407, + 0.8616255758256796, + 0.7292040027342938, + 0.06443623437120938, + 0.014650719297637727, + 0.0023928806710279416, + 0.0, + 0.0, + 0.0, + 0.9884407811311055, + 0.1188899682580612, + 0.3540280296665226, + 0.4212442791621328, + 0.8649172089925998, + 0.5653777920240877, + 0.5440573619342335, + 0.4652136646511056, + 0.5739953113633971, + 0.5596283473438739, + 0.3951815138963941, + 0.6687018774505851, + 0.4154900690837027, + 0.0, + 0.0, + 0.0, + 0.8768090525941173, + 0.5848969230113864, + 0.21440454008976384, + 0.0027275917696421015, + 0.5555683665825039, + 0.929133022911197, + 0.6702829463721361, + 0.4212731966436015, + 0.3283094476143993, + 0.41498780863726514, + 0.06247448195301408, + 0.008661591989204798, + 0.5792345974589062, + 0.0, + 0.0, + 0.0, + 0.6442765506012742, + 0.21901564190685519, + 0.6709957763246563, + 0.3776299334060649, + 0.45309615028691297, + 0.21244737992271556, + 0.11416674094184265, + 0.9034483267479185, + 0.9933610208307955, + 0.07906453952726389, + 0.10130409555855568, + 0.1541469842620875, + 0.8200936176927385, + 0.0, + 0.0, + 0.0, + 0.2808710536582856, + 0.6304228676507453, + 0.7993276100288724, + 0.635287952403161, + 0.7474654651506052, + 0.7641578054457718, + 0.5434848899640735, + 0.573777693667837, + 0.07788813501479297, + 0.4981923297675872, + 0.6245684114795713, + 0.6886527250995123, + 0.29001954013611997, + 0.0, + 0.0, + 0.0, + 0.22512659912010913, + 0.19952423249150786, + 0.0786425178763317, + 0.022463564129366786, + 0.38476040175712745, + 0.9855856274400493, + 0.744899902121528, + 0.173239268213766, + 0.5330575305803935, + 0.5749958693067055, + 0.9600576022820434, + 0.6276034654102257, + 0.3257483104941373, + 0.0, + 0.0, + 0.0, + 0.23005234330930902, + 0.5728703312238945, + 0.1160630670680961, + 0.9239224610358371, + 0.6996004199523914, + 0.7010507713651717, + 0.44599435904728324, + 0.7546111016734821, + 0.6880327753691288, + 0.7542045323372278, + 0.8779834335120824, + 0.32507247452038013, + 0.6755876929931922, + 0.0, + 0.0, + 0.0, + 0.12834271889295057, + 0.0922367588428229, + 0.8027483983155068, + 0.8906513622394374, + 0.5053747555115768, + 0.8865711504423313, + 0.13074521458852162, + 0.5754967858487199, + 0.8212256748704595, + 0.5961594358188973, + 0.46430577758997216, + 0.1899194173679094, + 0.06371590044276054, + 0.0, + 0.0, + 0.0, + 0.29435177123326306, + 0.5470125738318735, + 0.6825990509125524, + 0.9767502068161129, + 0.8094647300020432, + 0.7360544610333721, + 0.6993491069744604, + 0.9912106807596729, + 0.6028685029322579, + 0.7569815139783284, + 0.07675788501729641, + 0.6956192514654892, + 0.4236268651524372, + 0.0, + 0.0, + 0.0, + 0.3132129404132644, + 0.5289028283915365, + 0.9865543017341086, + 0.8626066171498894, + 0.07682530861893255, + 0.8139630415752064, + 0.007763740667271146, + 0.5288142314425128, + 0.22566670335113082, + 0.6739400681093219, + 0.35769135402718577, + 0.3763213330060545, + 0.36712698444156466, + 0.0, + 0.0, + 0.0, + 0.9560690387702061, + 0.33664094049883364, + 0.2972438859705323, + 0.7069855690798323, + 0.18149085368254592, + 0.9278500717736248, + 0.3007483665542521, + 0.7417099410885516, + 0.46159191931394117, + 0.9039894572473723, + 0.48828290307141, + 0.5933033667502858, + 0.9275750302448893, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9575352194569359, + 0.9024464867117323, + 0.18019086659616945, + 0.09967012130313302, + 0.3363965003078747, + 0.7183751205972345, + 0.025797182427518717, + 0.34349775412299366, + 0.22531896687963515, + 0.5460089461559253, + 0.3133672315446907, + 0.8045227957165217, + 0.555876019856778, + 0.0, + 0.0, + 0.0, + 0.6913332103347046, + 0.22245899232364708, + 0.6516946836403165, + 0.3257349583819974, + 0.4883787842182088, + 0.8674769281721243, + 0.6065403092061109, + 0.9067975316888497, + 0.693100473115213, + 0.23247304472981967, + 0.4605515038321585, + 0.8348635465517218, + 0.4848361702081525, + 0.0, + 0.0, + 0.0, + 0.3434274206390554, + 0.8366095208978671, + 0.7368307465766999, + 0.4318603731716837, + 0.5008617386373394, + 0.5551877728925136, + 0.9944328676646609, + 0.5705390234345237, + 0.5659354751375515, + 0.5707184258103133, + 0.6279588295081213, + 0.16279115803896915, + 0.8266429406491902, + 0.0, + 0.0, + 0.0, + 0.5936560909932644, + 0.537111364957012, + 0.49279801971206494, + 0.14406009508002438, + 0.012583857244834684, + 0.10895765819373193, + 0.04167792834859119, + 0.13127552364632467, + 0.46313043324559466, + 0.5674340921173507, + 0.9404644310352864, + 0.1427392014487997, + 0.46994161724285666, + 0.0, + 0.0, + 0.0, + 0.6776005091994876, + 0.12011975313798184, + 0.7002597682484383, + 0.05983248281789433, + 0.422025842666363, + 0.7929841784064777, + 0.738759429426645, + 0.451177364786558, + 0.10920802220731785, + 0.27150542484518414, + 0.42766542351716375, + 0.4784696148715032, + 0.09536808129336949, + 0.0, + 0.0, + 0.0, + 0.08550185347264039, + 0.74890154080135, + 0.9799252557200413, + 0.7130005751400752, + 0.18015322299830117, + 0.9855372047495685, + 0.06773568484484738, + 0.5582481349367698, + 0.8322065900045672, + 0.25819671005181166, + 0.42316840271800416, + 0.9820265716705749, + 0.7003539588024469, + 0.0, + 0.0, + 0.0, + 0.016295220243004782, + 0.22239473484973193, + 0.653771450432396, + 0.5902376063535052, + 0.3782380387852313, + 0.31811860326851227, + 0.35809649216351, + 0.08665946538448721, + 0.48500998781391647, + 0.8837320406702113, + 0.5997382279267112, + 0.4704888516653011, + 0.49048384871218476, + 0.0, + 0.0, + 0.0, + 0.7387365281384822, + 0.19772963976896507, + 0.581929629084124, + 0.9817353315293328, + 0.39844810901548067, + 0.4650670553447489, + 0.26427236635894436, + 0.08812045183547601, + 0.796791418146551, + 0.10014225972601243, + 0.269675657615838, + 0.9902861843315299, + 0.5785406650526382, + 0.0, + 0.0, + 0.0, + 0.8714526578489289, + 0.9333605272737515, + 0.9183801009283663, + 0.12930580122610302, + 0.4473295083809634, + 0.5887594878988505, + 0.9277886820893544, + 0.3936631317439634, + 0.5915564957840513, + 0.14745185728442556, + 0.8049424148953641, + 0.0689186247870377, + 0.7327797374967434, + 0.0, + 0.0, + 0.0, + 0.4652929495558714, + 0.15466507254220296, + 0.2790408680916321, + 0.17152349761300256, + 0.7718562879851862, + 0.8936381148405804, + 0.8987700812096371, + 0.132036940677129, + 0.5404435908625388, + 0.576224812661493, + 0.4238874670468027, + 0.038694641494613546, + 0.06878788637726574, + 0.0, + 0.0, + 0.0, + 0.9695659711377269, + 0.7997058513802497, + 0.950290015238654, + 0.784499662508879, + 0.6300750191552654, + 0.19883797269254577, + 0.25990371634646814, + 0.7621681433020496, + 0.1014634415593828, + 0.025527963323986658, + 0.4553628308622195, + 0.8534481279309463, + 0.9793389401609446, + 0.0, + 0.0, + 0.0, + 0.10710219601041682, + 0.058195869330314176, + 0.5935524452091785, + 0.03291427279147585, + 0.09016513696916051, + 0.318868168359166, + 0.8797955425965943, + 0.582385835985094, + 0.8686944692306185, + 0.8513225487330578, + 0.5005659651891566, + 0.6658416034368901, + 0.32244334157112176, + 0.0, + 0.0, + 0.0, + 0.226855949120919, + 0.1014456601962076, + 0.2601556489387933, + 0.3010360152061219, + 0.018224387802540765, + 0.7707048469309895, + 0.03142514858464274, + 0.7360410001930832, + 0.6190058192823232, + 0.4570556373127622, + 0.9792244869160934, + 0.45835491583760357, + 0.7036667791104366, + 0.0, + 0.0, + 0.0, + 0.42635583548795175, + 0.31073788930195057, + 0.3321898011383212, + 0.560079705221792, + 0.8559747839394626, + 0.8889930150039421, + 0.47324717287077855, + 0.6108707859368635, + 0.9792891472495763, + 0.1926645155438872, + 0.876138912755312, + 0.23205434258038304, + 0.34298536304191996, + 0.0, + 0.0, + 0.0, + 0.3652150369479038, + 0.97359340280292, + 0.4849150842836918, + 0.20060407324732488, + 0.5409100577430763, + 0.515411326175804, + 0.5795672856802954, + 0.48405983008931086, + 0.5352760101101476, + 0.2698893322659366, + 0.16808333494994143, + 0.2444487767339869, + 0.05753327809315223, + 0.0, + 0.0, + 0.0, + 0.4342614730549491, + 0.9586179312430214, + 0.44511842080107333, + 0.38594474204811746, + 0.5828882934061577, + 0.04781641145757887, + 0.6811577022134, + 0.5123002430720334, + 0.9962176932704665, + 0.6173367715582871, + 0.8196523230775039, + 0.5425007475032013, + 0.011979435913138103, + 0.0, + 0.0, + 0.0, + 0.3634433479264154, + 0.7436775231215327, + 0.7434848024062098, + 0.5147096038401864, + 0.9848888323071824, + 0.2505481000055251, + 0.4570871122929343, + 0.20628653393155671, + 0.6176443866924944, + 0.9758959273814721, + 0.5157813708573159, + 0.4888692205662686, + 0.13056368281859565, + 0.0, + 0.0, + 0.0, + 0.5040850318176497, + 0.7960517210096801, + 0.5250283553384886, + 0.5502413961528074, + 0.14274899546640085, + 0.09998284318299033, + 0.3808124149471467, + 0.8173797644017552, + 0.13292420533133154, + 0.722210711587862, + 0.369853370947289, + 0.4182962896780704, + 0.21280269232667204, + 0.0, + 0.0, + 0.0, + 0.1005692764268149, + 0.8993031203771943, + 0.1919213643996598, + 0.5439038319947664, + 0.8720779268326746, + 0.17923028843505817, + 0.7675623476766362, + 0.8839095607517173, + 0.4186563263256182, + 0.3121230306158105, + 0.535275615152003, + 0.7252525301135283, + 0.33241033387049146, + 0.0, + 0.0, + 0.0, + 0.28521639274999944, + 0.04879320097341899, + 0.8330231236827359, + 0.9544829899721675, + 0.9659970555474423, + 0.7257066141579142, + 0.8344834971740566, + 0.817944058314642, + 0.9672199042874626, + 0.5776047344329789, + 0.5225526608011035, + 0.2775833166469701, + 0.34420970493726366, + 0.0, + 0.0, + 0.0, + 0.5264218851922361, + 0.030096923595554048, + 0.5915576382845237, + 0.49065759755420035, + 0.7912234792148146, + 0.32554264546976464, + 0.9426212005671426, + 0.765715613567543, + 0.5192670301771658, + 0.6416502806324441, + 0.8336392939516185, + 0.19859545581269888, + 0.37718400889257186, + 0.0, + 0.0, + 0.0, + 0.6453033634579781, + 0.2349676152742367, + 0.8000350586905398, + 0.7441806246347922, + 0.7368193973641121, + 0.3589933612170658, + 0.45099315710518617, + 0.9461444427630092, + 0.6472384754560551, + 0.524433094278233, + 0.5899814017695107, + 0.06513774844706433, + 0.9127451928528002, + 0.0, + 0.0, + 0.0, + 0.8445601426248558, + 0.9374971635840088, + 0.5287909468719879, + 0.830540674286405, + 0.849043187276534, + 0.9673393972808528, + 0.29435396767907773, + 0.43163699721420534, + 0.4549582101124804, + 0.0827635441904947, + 0.13217799620307835, + 0.7109164753626097, + 0.9817317067119571, + 0.0, + 0.0, + 0.0, + 0.8361392890402467, + 0.6480884237735768, + 0.3593697618964248, + 0.8331013083828452, + 0.6245737425004668, + 0.5001138883464654, + 0.615421486656359, + 0.3567908820070378, + 0.20324097308101197, + 0.05200813788991243, + 0.46801780194695985, + 0.06364213960029319, + 0.1894664348617915, + 0.0, + 0.0, + 0.0, + 0.5416492892029748, + 0.8453045417134741, + 0.30833376851262206, + 0.3812462766983682, + 0.295796112519496, + 0.4714510710728065, + 0.6611891433295813, + 0.8420212601559625, + 0.1125688968219265, + 0.8247076523657833, + 0.0696503695580536, + 0.6802742636853687, + 0.7288722782291346, + 0.0, + 0.0, + 0.0, + 0.5923439222949263, + 0.9735467777955552, + 0.17633148979714908, + 0.7958151580013121, + 0.8423420925123177, + 0.6243703185804795, + 0.5443761380310224, + 0.8320573496340441, + 0.6324771301851917, + 0.5527453149583392, + 0.28735858320033536, + 0.6460043375052836, + 0.5396786187168046, + 0.0, + 0.0, + 0.0, + 0.7486267738234921, + 0.5506223034344521, + 0.7722370731192654, + 0.5309999940369844, + 0.7656170698330114, + 0.9957700224788494, + 0.038625545305283615, + 0.08252555817173335, + 0.31976943409621605, + 0.03169430900162673, + 0.2562763333098427, + 0.8725613653621531, + 0.7042335434214668, + 0.0, + 0.0, + 0.0, + 0.15878750297138589, + 0.9095366579822816, + 0.47524855998266613, + 0.02223162793846467, + 0.374142174565405, + 0.1751056586499734, + 0.8008753629673093, + 0.16708452139561303, + 0.36249124205890704, + 0.8920135815455199, + 0.6756043429257418, + 0.6835551622967121, + 0.2762814304064456, + 0.0, + 0.0, + 0.0, + 0.01654939205628081, + 0.7881781753155032, + 0.00013193256029708778, + 0.5528191969971925, + 0.6951002588148754, + 0.3486617738803698, + 0.6667106969199146, + 0.674218280106361, + 0.4657810913492053, + 0.018406521313439672, + 0.6497472819602108, + 0.9453222808947551, + 0.32962531044070564, + 0.0, + 0.0, + 0.0, + 0.6710668991638102, + 0.12342865273395087, + 0.8333403232406411, + 0.7112286512960347, + 0.6255379373550257, + 0.06998754557037834, + 0.11364741736519457, + 0.24633712987428003, + 0.061068546275511726, + 0.5518616008772085, + 0.47255442367894573, + 0.14066143536276432, + 0.35575680975752033, + 0.0, + 0.0, + 0.0, + 0.6246760601088422, + 0.9484961691400952, + 0.7179814738422576, + 0.20317788640515677, + 0.47999845173062605, + 0.5521146150579526, + 0.5594807159304569, + 0.8753717104681724, + 0.4737119498621364, + 0.8005997339055952, + 0.32665093012336877, + 0.3060637553208424, + 0.5193570151615291, + 0.0, + 0.0, + 0.0, + 0.055731805531847534, + 0.06301955233274859, + 0.5275102254528009, + 0.5895022924397852, + 0.029079675892063728, + 0.3805577084950771, + 0.1598673420261677, + 0.4776894730936536, + 0.0575630317703546, + 0.781576393369037, + 0.5726308089540263, + 0.18968396369879814, + 0.007120204066152103, + 0.0, + 0.0, + 0.0, + 0.5786096097706357, + 0.49658644476946334, + 0.21483279615304118, + 0.8266983414259692, + 0.7590468492897986, + 0.10724705111767208, + 0.8813171861249125, + 0.502072873305139, + 0.2009483429728417, + 0.3898342926612306, + 0.6709364736649338, + 0.21425964411179554, + 0.013567251954202941, + 0.0, + 0.0, + 0.0, + 0.3416505672313318, + 0.8844414238178754, + 0.6892704939498873, + 0.1599387511718089, + 0.8081628359731009, + 0.45601578393234043, + 0.41645063387674763, + 0.5317760097917982, + 0.7174770235880121, + 0.4321230913297708, + 0.8825441202244202, + 0.9437978650654052, + 0.09787722944911825, + 0.0, + 0.0, + 0.0, + 0.9078925058895949, + 0.5728205367620257, + 0.7077311778702258, + 0.726321672577723, + 0.6274185140387438, + 0.04991151977269326, + 0.20752145445827375, + 0.7691429853682675, + 0.4444771245783087, + 0.2962562997117554, + 0.8295977414831182, + 0.7904946977502278, + 0.6927207321705072, + 0.0, + 0.0, + 0.0, + 0.744033594830627, + 0.7033184833395857, + 0.724972374599111, + 0.7597109118121218, + 0.22150043622534699, + 0.27975012292456725, + 0.18499135023809266, + 0.3554849946464518, + 0.8473785421742102, + 0.2934946434738166, + 0.6954493581241974, + 0.5632440844712139, + 0.14005366473312275, + 0.0, + 0.0, + 0.0, + 0.7196789870310156, + 0.5955894400444404, + 0.7097065976975364, + 0.8546331547971184, + 0.7442773208504156, + 0.13564538441165264, + 0.6899930729840695, + 0.23813288318261894, + 0.4993290281902606, + 0.6544468464027663, + 0.8386904056608249, + 0.779332408059024, + 0.10602515821631009, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.39648975671140774, + 0.7124643155516597, + 0.4915702302822955, + 0.6857942034808334, + 0.22013224995653757, + 0.6943696066496853, + 0.04913005830458905, + 0.4438111512341175, + 0.1788476661859162, + 0.5525118585986091, + 0.735254559436984, + 0.08660049895372113, + 0.986325683928554, + 0.0, + 0.0, + 0.0, + 0.004844335655207477, + 0.7607516373080648, + 0.3444010702742777, + 0.17963125887882414, + 0.4425233931083449, + 0.262003243682855, + 0.14683051730486973, + 0.4423921837812813, + 0.9583377108542843, + 0.537577568521976, + 0.8671075025029406, + 0.707075923751942, + 0.6118093255122149, + 0.0, + 0.0, + 0.0, + 0.4768301709526289, + 0.09873370493806377, + 0.12368561854617122, + 0.6901418695378564, + 0.31131274888539573, + 0.9611960250938479, + 0.04326840036964252, + 0.07163674495064343, + 0.06254107456875724, + 0.3374169863902369, + 0.6360675805312543, + 0.39117111907339097, + 0.6446835949605887, + 0.0, + 0.0, + 0.0, + 0.9306970131403783, + 0.009393561114221516, + 0.9373830941283787, + 0.08685296828169053, + 0.27231098100936457, + 0.16422688852354173, + 0.8668225584467041, + 0.26679638314132337, + 0.8352296795606325, + 0.3304657489407983, + 0.17774490462565573, + 0.49986747926626385, + 0.45225373046417006, + 0.0, + 0.0, + 0.0, + 0.7640363576349184, + 0.5684341524056692, + 0.31922441664723433, + 0.21195130191334866, + 0.2455538391481168, + 0.8209365678775756, + 0.5134276556344041, + 0.41319424117196857, + 0.9895448340230715, + 0.7471631357567835, + 0.5207122924470425, + 0.6938545381255015, + 0.5862312824353137, + 0.0, + 0.0, + 0.0, + 0.9702684508620554, + 0.7539077128632379, + 0.9640998959870103, + 0.8003192989370567, + 0.01740875604679193, + 0.09206529945564446, + 0.8218672059955199, + 0.7412209222435967, + 0.875257888019629, + 0.9026395858304028, + 0.2688902357901004, + 0.05083812913513497, + 0.3849577416732348, + 0.0, + 0.0, + 0.0, + 0.707555000570396, + 0.5834271207574656, + 0.6266097818002464, + 0.9856015341087936, + 0.2741353451838012, + 0.3609108843642973, + 0.6408490345865535, + 0.6371169542426284, + 0.41259481555123245, + 0.7495789807241621, + 0.37690314873798403, + 0.3129033899445508, + 0.616269064611748, + 0.0, + 0.0, + 0.0, + 0.9584144280651584, + 0.0556885292285445, + 0.32668620024644646, + 0.5016887910216669, + 0.8172857931535926, + 0.8523303083930169, + 0.1748323344137528, + 0.06166596692673654, + 0.49012709572096813, + 0.7681777068057823, + 0.6515965875593094, + 0.12442146199879256, + 0.31283091439122546, + 0.0, + 0.0, + 0.0, + 0.3560493124141312, + 0.6514762560115345, + 0.3714117830684973, + 0.6830488413835156, + 0.23076786696884677, + 0.792561679646134, + 0.6963772913133708, + 0.5339925643976342, + 0.7127491862155224, + 0.5454644937790126, + 0.6409805946568784, + 0.19609627983460365, + 0.7572855661122344, + 0.0, + 0.0, + 0.0, + 0.3473317428074729, + 0.04796607144898102, + 0.9099045001606086, + 0.179919222005401, + 0.8550061953477406, + 0.8759590315861163, + 0.8351616261830698, + 0.9369055311754738, + 0.9064736984103624, + 0.40828432780579427, + 0.37551972431879266, + 0.44664343184477073, + 0.18306034449525166, + 0.0, + 0.0, + 0.0, + 0.1570662755318215, + 0.17986995583107723, + 0.7220585583107574, + 0.05451321672212228, + 0.8075070107999621, + 0.1556828023863619, + 0.7817632442147195, + 0.6999382073221959, + 0.6403751364474352, + 0.46212838809238854, + 0.48200265351224136, + 0.67016651074133, + 0.8239336437639129, + 0.0, + 0.0, + 0.0, + 0.9110558977005889, + 0.5420193242776757, + 0.44543313970081433, + 0.17924898345020468, + 0.6772952323869773, + 0.8405436244742135, + 0.3130894042876252, + 0.8442578893248381, + 0.5456512903405899, + 0.5395215170961729, + 0.11458446830716273, + 0.03550382138189767, + 0.7129285539645298, + 0.0, + 0.0, + 0.0, + 0.5319344152241747, + 0.28852750174838293, + 0.6324182595073818, + 0.15635428641897176, + 0.1242259888131334, + 0.7994271132337173, + 0.9945458087889819, + 0.9379171637481046, + 0.9273493620959045, + 0.4333063185731828, + 0.6996379153387984, + 0.7079899694928319, + 0.9728903216842792, + 0.0, + 0.0, + 0.0, + 0.9779561321754502, + 0.5489726539284406, + 0.9666290368590116, + 0.9688966463245884, + 0.17172656866770752, + 0.440593430038282, + 0.14637116481000023, + 0.03351924429384756, + 0.5553889126965885, + 0.5239003123361083, + 0.8926530934408122, + 0.25063605435069614, + 0.43217555660058715, + 0.0, + 0.0, + 0.0, + 0.16188666413474806, + 0.07411414640722169, + 0.7737440468468048, + 0.4486373798668981, + 0.9110811311444915, + 0.5190662840193289, + 0.3599725854585929, + 0.33725842726843935, + 0.2889296828360126, + 0.19488775802398262, + 0.9438232137216197, + 0.8949432397744512, + 0.5668689532623353, + 0.0, + 0.0, + 0.0, + 0.8409214208439791, + 0.855362907376121, + 0.7811234343528172, + 0.2154516583014615, + 0.8454636784026129, + 0.8730097140319055, + 0.13826779263457267, + 0.0010361464179842184, + 0.8816311942309901, + 0.2467417039039631, + 0.7669653158026208, + 0.40996037913859373, + 0.6988583536513918, + 0.0, + 0.0, + 0.0, + 0.6777545700206987, + 0.1553217842080471, + 0.22368740675030951, + 0.7931309397755107, + 0.18970443535638315, + 0.8066597174480088, + 0.15948804559775043, + 0.5345566558753493, + 0.6706241949437718, + 0.4253965654778582, + 0.6042056357653907, + 0.9386558175482362, + 0.04978158075450534, + 0.0, + 0.0, + 0.0, + 0.44984242432994825, + 0.025015869854195794, + 0.13774560092976795, + 0.34969604518797115, + 0.3376280114986824, + 0.49127392307108864, + 0.04375119314433118, + 0.9844668600283752, + 0.1017861810025199, + 0.40508563896563055, + 0.5597944153914333, + 0.8966351519859024, + 0.7847779274864025, + 0.0, + 0.0, + 0.0, + 0.455459077907966, + 0.1593029445026095, + 0.1738087164796367, + 0.27896968325464333, + 0.3752299520426411, + 0.46120181717572295, + 0.19405609689297976, + 0.45576380758062385, + 0.7110368611535203, + 0.22662115720322595, + 0.5178449803466542, + 0.3208270048875955, + 0.5190935034239609, + 0.0, + 0.0, + 0.0, + 0.29537849420346163, + 0.7011141727180478, + 0.9901599728272302, + 0.9480208996192407, + 0.1267239716403714, + 0.5343485705027584, + 0.7027221365496716, + 0.813386721351238, + 0.9775449069116537, + 0.01625405735856278, + 0.7704773585665169, + 0.35388799583931574, + 0.05277337934540127, + 0.0, + 0.0, + 0.0, + 0.547081626234461, + 0.26390780312110174, + 0.5411165882068674, + 0.607719227939013, + 0.2332011526848804, + 0.5447343172887128, + 0.07952327191536368, + 0.4411174472652958, + 0.3148716111237674, + 0.7875168188595044, + 0.16989662241359915, + 0.9139199107827592, + 0.6192796992539356, + 0.0, + 0.0, + 0.0, + 0.26109749279767136, + 0.9618875480154634, + 0.22924239007051617, + 0.872896888725154, + 0.28979064651987896, + 0.47019487763795464, + 0.996564713235235, + 0.33781122895621085, + 0.6074433297575362, + 0.20970260338326985, + 0.08678385139936784, + 0.34200269240889725, + 0.8039019392545506, + 0.0, + 0.0, + 0.0, + 0.7659448285551973, + 0.8162190222096968, + 0.9715341162192149, + 0.551750060242085, + 0.9791742146760088, + 0.386355572841914, + 0.8381283866121838, + 0.5238379831649844, + 0.9929054038872701, + 0.7799395425765457, + 0.08902190649392094, + 0.6809158871817893, + 0.6535451186876484, + 0.0, + 0.0, + 0.0, + 0.12761095105809406, + 0.6538982229537511, + 0.6074744845978282, + 0.46898817485214184, + 0.7430145537005464, + 0.7898224137342151, + 0.340859836784284, + 0.39211755611624555, + 0.01703805972089767, + 0.6669179967390473, + 0.195173647475112, + 0.28923800680533573, + 0.8736712480631994, + 0.0, + 0.0, + 0.0, + 0.8593863759099308, + 0.3883041142578194, + 0.11265001374990269, + 0.05326094048995844, + 0.689370023219251, + 0.06014774622321528, + 0.937500159444114, + 0.1727409835801198, + 0.8856214515269026, + 0.05553512743333855, + 0.21439679717817517, + 0.4185547030247506, + 0.4309589285259948, + 0.0, + 0.0, + 0.0, + 0.9094366901577481, + 0.9773620150843084, + 0.9675001165668295, + 0.29882394915065713, + 0.057603330487783655, + 0.030916772578805318, + 0.08176930652062564, + 0.7235053476252652, + 0.5148431564320813, + 0.4983451256248954, + 0.3108519426219638, + 0.13396562723950822, + 0.9704225749913137, + 0.0, + 0.0, + 0.0, + 0.8910770335402092, + 0.42086218568564404, + 0.6326600040373183, + 0.6653463095609469, + 0.6698822741348551, + 0.9871476009164123, + 0.08656246050258021, + 0.8947087689465619, + 0.8853890548636507, + 0.9491472012593519, + 0.025245535252461315, + 0.008958190938421096, + 0.6077543376374114, + 0.0, + 0.0, + 0.0, + 0.9169576290879906, + 0.19532396997443402, + 0.8399571940157706, + 0.5793190667733108, + 0.7775297119886831, + 0.6892136528524128, + 0.4472363008894372, + 0.3439303028422843, + 0.9171198919956922, + 0.14729560858669288, + 0.9418046210444497, + 0.9887118139060092, + 0.006630810823948785, + 0.0, + 0.0, + 0.0, + 0.5897469401483456, + 0.8899225313024621, + 0.5976814114008757, + 0.7274468157465319, + 0.7716788530687794, + 0.34592367022246595, + 0.24659976811942497, + 0.5948167582886236, + 0.14488651124192264, + 0.9881738830545875, + 0.02115682842914146, + 0.7072050004713644, + 0.6864175824810845, + 0.0, + 0.0, + 0.0, + 0.3356617354905028, + 0.34909342847846403, + 0.12226959596809617, + 0.0872677185676285, + 0.37637909221467925, + 0.6268807304639307, + 0.5279495187043634, + 0.7259486518800289, + 0.8681116504148529, + 0.7481793929795738, + 0.9946720429087478, + 0.5057842367270858, + 0.6799967869166224, + 0.0, + 0.0, + 0.0, + 0.21909074827124253, + 0.7165679364688612, + 0.11877658899063726, + 0.3802264537724305, + 0.7735392931984596, + 0.17749717425748612, + 0.6439461486641193, + 0.4243043288295506, + 0.14788865372908921, + 0.08325099351220544, + 0.2070690221517849, + 0.18555582781884994, + 0.20599926727585816, + 0.0, + 0.0, + 0.0, + 0.042596364904384565, + 0.6295236223148944, + 0.07818271657348519, + 0.2877821098352795, + 0.5680441746255821, + 0.20433285491935527, + 0.4380217994839748, + 0.04295133613711055, + 0.3625065647667213, + 0.7139774732191775, + 0.12526434319385582, + 0.8401067133435376, + 0.3146537754974823, + 0.0, + 0.0, + 0.0, + 0.37325720086992775, + 0.7113884002808946, + 0.06376699481848302, + 0.15005107932116457, + 0.656564365356466, + 0.6022885366497458, + 0.41912787326407697, + 0.8113953100511652, + 0.5686489068644626, + 0.9761232177864375, + 0.9011184378252315, + 0.0035184977431740316, + 0.5085004166721712, + 0.0, + 0.0, + 0.0, + 0.11875518231351734, + 0.807731536615229, + 0.24324504191164564, + 0.22348145034258027, + 0.05495708052495485, + 0.22715775439997743, + 0.37411652073382473, + 0.33104631196750023, + 0.7165737612100622, + 0.6275614631863294, + 0.03322084887204235, + 0.44531451278182943, + 0.29430493226113874, + 0.0, + 0.0, + 0.0, + 0.6463544185250047, + 0.8270334409153715, + 0.3589889486151604, + 0.3022359169863216, + 0.7225457890577125, + 0.11861724973155252, + 0.25486009434188805, + 0.6439346189860077, + 0.6261859121821446, + 0.488959383491686, + 0.3950592813215933, + 0.41827724692101365, + 0.7050301842540171, + 0.0, + 0.0, + 0.0, + 0.1825280923918009, + 0.2740336473069692, + 0.034852437614189435, + 0.6418977943500506, + 0.48429246869840337, + 0.724704693314636, + 0.3961306001119218, + 0.5616620349658505, + 0.21727130167468123, + 0.8230007661343022, + 0.8446019888042419, + 0.9343257976154357, + 0.5246546893862399, + 0.0, + 0.0, + 0.0, + 0.2322940592138497, + 0.9289823831286147, + 0.3604319636351504, + 0.6020435366907627, + 0.5940991221745717, + 0.07182425738119547, + 0.8325697634031315, + 0.8159064950858222, + 0.4962530904242528, + 0.13500364941078247, + 0.4540997335994863, + 0.5326084004962612, + 0.13545141577260267, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + FormatTransferFractalZ transfer; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, std::vector({37, 45, 2, 2}), + std::vector({12, 3, 16, 16}), DT_FLOAT}; + + TransResult result; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret) / sizeof(ret[0]) * 4); + for (int i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTransferNchwFz, build_transfer_fp32) { + float data[17 * 31 * 5 * 5]; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, std::vector({17, 31, 5, 5}), + std::vector({50, 2, 16, 16}), DT_FLOAT}; + auto transfer = BuildFormatTransfer(args); + EXPECT_NE(transfer, nullptr); +} + +TEST_F(UtestFormatTransferNchwFz, build_transfer_fp16) { + uint16_t data[1 * 1 * 5 * 5]; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, std::vector({1, 1, 5, 5}), + std::vector({25, 1, 16, 16}), DT_FLOAT16}; + auto transfer = BuildFormatTransfer(args); + EXPECT_NE(transfer, nullptr); +} + +TEST_F(UtestFormatTransferNchwFz, build_transfer_uint8) { + uint8_t data[64 * 64 * 2 * 2]; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_FRACTAL_Z, std::vector({64, 64, 2, 2}), + std::vector({8, 4, 16, 32}), DT_UINT8}; + auto transfer = BuildFormatTransfer(args); + EXPECT_NE(transfer, nullptr); +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_nhwc_5d_unittest.cc b/tests/ut/ge/common/format_transfer_nhwc_5d_unittest.cc new file mode 100644 index 00000000..8d1ff256 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_nhwc_5d_unittest.cc @@ -0,0 +1,750 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/fp16_t.h" + +namespace ge { +namespace formats { +class UtestFormatTransferNhwc5d : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferNhwc5d, nhwc_to_5d_uint8) { + uint8_t data[1 * 4 * 4 * 3] = { + 2, 6, 1, 6, 11, 12, 30, 24, 4, 28, 22, 25, 20, 5, 18, 15, 23, 27, 1, 25, 26, 24, 11, 8, + 21, 15, 6, 5, 23, 17, 11, 18, 21, 24, 14, 20, 19, 12, 23, 16, 3, 9, 10, 3, 15, 31, 18, 9, + }; + + uint8_t data_5d[1 * 1 * 4 * 4 * 32] = { + 2, 6, 1, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 6, 11, 12, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 30, 24, 4, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 28, 22, 25, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 20, 5, 18, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15, 23, 27, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 1, 25, 26, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 24, 11, 8, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 21, 15, 6, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 5, 23, 17, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11, 18, 21, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 24, 14, 20, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 19, 12, 23, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 16, 3, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10, 3, 15, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 31, 18, 9, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{data, FORMAT_NHWC, FORMAT_NC1HWC0, {1, 4, 4, 3}, {1, 1, 4, 4, 32}, DT_UINT8}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNhwc5d, nhwc_to_5d_uint8_32c) { + uint8_t data[1 * 4 * 4 * 32] = { + 21, 14, 25, 9, 25, 3, 8, 21, 13, 29, 15, 6, 9, 30, 2, 9, 10, 25, 10, 3, 10, 21, 28, 31, 5, 6, 9, + 18, 5, 16, 28, 21, 3, 1, 25, 5, 20, 25, 2, 21, 11, 19, 12, 7, 25, 6, 26, 17, 2, 31, 26, 23, 28, 23, + 4, 12, 0, 6, 16, 29, 18, 2, 27, 21, 14, 15, 27, 14, 30, 4, 28, 26, 6, 21, 8, 28, 4, 20, 20, 14, 14, + 21, 15, 14, 2, 0, 3, 21, 22, 28, 23, 2, 23, 23, 17, 6, 17, 5, 28, 23, 6, 2, 0, 26, 3, 24, 30, 9, + 23, 24, 5, 12, 27, 14, 5, 30, 20, 20, 23, 12, 6, 24, 11, 16, 13, 19, 18, 16, 12, 5, 23, 15, 29, 31, 2, + 17, 6, 28, 27, 9, 18, 7, 19, 10, 28, 14, 28, 16, 17, 8, 18, 7, 16, 0, 24, 26, 9, 10, 8, 26, 23, 4, + 21, 21, 13, 9, 24, 13, 0, 6, 10, 3, 14, 21, 22, 28, 4, 13, 20, 20, 10, 17, 24, 31, 6, 14, 21, 29, 31, + 0, 5, 28, 0, 2, 18, 25, 0, 28, 14, 27, 0, 25, 20, 15, 9, 9, 24, 22, 0, 4, 24, 28, 17, 14, 27, 14, + 30, 10, 8, 30, 29, 23, 11, 9, 23, 17, 7, 31, 22, 12, 6, 9, 10, 15, 12, 14, 11, 12, 19, 19, 29, 9, 21, + 12, 1, 27, 29, 8, 11, 3, 7, 18, 13, 15, 23, 5, 9, 6, 8, 31, 20, 12, 14, 26, 31, 16, 18, 5, 15, 1, + 27, 8, 21, 15, 13, 13, 5, 11, 18, 9, 25, 31, 17, 23, 5, 10, 9, 22, 19, 14, 13, 27, 11, 20, 1, 16, 2, + 12, 2, 11, 28, 20, 9, 2, 31, 9, 7, 3, 16, 26, 7, 21, 11, 24, 25, 5, 7, 28, 19, 6, 6, 4, 0, 6, + 28, 6, 20, 10, 12, 16, 7, 3, 3, 17, 1, 11, 21, 5, 14, 18, 17, 17, 2, 21, 15, 20, 0, 15, 13, 29, 21, + 11, 15, 25, 25, 25, 9, 21, 25, 15, 19, 1, 31, 21, 18, 18, 14, 5, 25, 26, 2, 17, 15, 8, 27, 15, 20, 11, + 30, 27, 1, 3, 0, 22, 8, 11, 1, 24, 29, 28, 24, 31, 15, 31, 8, 15, 8, 0, 0, 20, 19, 21, 21, 20, 25, + 31, 24, 20, 3, 3, 4, 14, 15, 4, 12, 26, 22, 13, 10, 13, 22, 9, 14, 12, 7, 13, 19, 3, 23, 24, 3, 15, + 23, 25, 16, 5, 30, 21, 10, 28, 7, 9, 18, 9, 3, 12, 25, 9, 18, 31, 15, 11, 1, 11, 10, 8, 28, 4, 19, + 27, 22, 17, 10, 23, 25, 2, 19, 16, 2, 19, 28, 25, 24, 2, 2, 4, 17, 4, 12, 26, 4, 25, 31, 27, 31, 14, + 17, 13, 24, 5, 10, 10, 17, 26, 16, 15, 25, 18, 15, 10, 22, 13, 30, 16, 23, 10, 23, 24, 10, 14, 5, 17, + }; + + uint8_t data_5d[1 * 1 * 4 * 4 * 32] = { + 21, 14, 25, 9, 25, 3, 8, 21, 13, 29, 15, 6, 9, 30, 2, 9, 10, 25, 10, 3, 10, 21, 28, 31, 5, 6, 9, + 18, 5, 16, 28, 21, 3, 1, 25, 5, 20, 25, 2, 21, 11, 19, 12, 7, 25, 6, 26, 17, 2, 31, 26, 23, 28, 23, + 4, 12, 0, 6, 16, 29, 18, 2, 27, 21, 14, 15, 27, 14, 30, 4, 28, 26, 6, 21, 8, 28, 4, 20, 20, 14, 14, + 21, 15, 14, 2, 0, 3, 21, 22, 28, 23, 2, 23, 23, 17, 6, 17, 5, 28, 23, 6, 2, 0, 26, 3, 24, 30, 9, + 23, 24, 5, 12, 27, 14, 5, 30, 20, 20, 23, 12, 6, 24, 11, 16, 13, 19, 18, 16, 12, 5, 23, 15, 29, 31, 2, + 17, 6, 28, 27, 9, 18, 7, 19, 10, 28, 14, 28, 16, 17, 8, 18, 7, 16, 0, 24, 26, 9, 10, 8, 26, 23, 4, + 21, 21, 13, 9, 24, 13, 0, 6, 10, 3, 14, 21, 22, 28, 4, 13, 20, 20, 10, 17, 24, 31, 6, 14, 21, 29, 31, + 0, 5, 28, 0, 2, 18, 25, 0, 28, 14, 27, 0, 25, 20, 15, 9, 9, 24, 22, 0, 4, 24, 28, 17, 14, 27, 14, + 30, 10, 8, 30, 29, 23, 11, 9, 23, 17, 7, 31, 22, 12, 6, 9, 10, 15, 12, 14, 11, 12, 19, 19, 29, 9, 21, + 12, 1, 27, 29, 8, 11, 3, 7, 18, 13, 15, 23, 5, 9, 6, 8, 31, 20, 12, 14, 26, 31, 16, 18, 5, 15, 1, + 27, 8, 21, 15, 13, 13, 5, 11, 18, 9, 25, 31, 17, 23, 5, 10, 9, 22, 19, 14, 13, 27, 11, 20, 1, 16, 2, + 12, 2, 11, 28, 20, 9, 2, 31, 9, 7, 3, 16, 26, 7, 21, 11, 24, 25, 5, 7, 28, 19, 6, 6, 4, 0, 6, + 28, 6, 20, 10, 12, 16, 7, 3, 3, 17, 1, 11, 21, 5, 14, 18, 17, 17, 2, 21, 15, 20, 0, 15, 13, 29, 21, + 11, 15, 25, 25, 25, 9, 21, 25, 15, 19, 1, 31, 21, 18, 18, 14, 5, 25, 26, 2, 17, 15, 8, 27, 15, 20, 11, + 30, 27, 1, 3, 0, 22, 8, 11, 1, 24, 29, 28, 24, 31, 15, 31, 8, 15, 8, 0, 0, 20, 19, 21, 21, 20, 25, + 31, 24, 20, 3, 3, 4, 14, 15, 4, 12, 26, 22, 13, 10, 13, 22, 9, 14, 12, 7, 13, 19, 3, 23, 24, 3, 15, + 23, 25, 16, 5, 30, 21, 10, 28, 7, 9, 18, 9, 3, 12, 25, 9, 18, 31, 15, 11, 1, 11, 10, 8, 28, 4, 19, + 27, 22, 17, 10, 23, 25, 2, 19, 16, 2, 19, 28, 25, 24, 2, 2, 4, 17, 4, 12, 26, 4, 25, 31, 27, 31, 14, + 17, 13, 24, 5, 10, 10, 17, 26, 16, 15, 25, 18, 15, 10, 22, 13, 30, 16, 23, 10, 23, 24, 10, 14, 5, 17, + }; + + TransArgs args{data, FORMAT_NHWC, FORMAT_NC1HWC0, {1, 4, 4, 32}, {1, 1, 4, 4, 32}, DT_UINT8}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNhwc5d, nhwc_to_5d_fp16_single) { + uint16_t data[1 * 1 * 1 * 1] = {13425}; + uint16_t data_5d[1 * 1 * 1 * 1 * 16] = { + 13425, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_NC1HWC0, {1, 1, 1, 1}, {1, 1, 1, 1, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNhwc5d, nhwc_to_5d_fp16) { + uint16_t data[1 * 4 * 4 * 16] = { + 15272, 12501, 13940, 10024, 13356, 13068, 12088, 13733, 15257, 14104, 11089, 15298, 10597, 14359, 14402, 14748, + 14596, 14063, 14674, 13393, 12937, 13466, 14313, 13295, 15000, 15167, 15311, 13122, 10691, 15165, 14621, 14000, + 13584, 14715, 15105, 14479, 14007, 9846, 14325, 12765, 13343, 13988, 10021, 14598, 14623, 15077, 15204, 12528, + 12024, 14236, 14857, 13009, 15216, 12916, 12754, 14807, 15174, 15075, 12998, 13834, 15174, 13674, 15251, 12683, + 13116, 14819, 11956, 14416, 14717, 14954, 15267, 15143, 15292, 9704, 14781, 14965, 14808, 15008, 11416, 15074, + 14168, 14417, 13441, 10673, 14945, 15114, 15358, 15116, 11950, 12057, 15321, 14973, 14950, 13984, 14900, 11361, + 10161, 14742, 13366, 13683, 13439, 12865, 10623, 14720, 14545, 13063, 10190, 12474, 9850, 15088, 15228, 14195, + 13428, 12443, 14719, 14816, 13231, 12818, 13667, 9680, 14814, 13924, 12757, 15178, 13444, 13673, 14405, 12711, + 15279, 14207, 9089, 13774, 13008, 14685, 13887, 15293, 13983, 14590, 15232, 15285, 15071, 14974, 15257, 13900, + 14907, 15269, 10955, 13635, 15132, 15026, 14218, 14498, 15235, 11243, 14704, 11563, 14394, 6840, 13619, 14655, + 12830, 14094, 12487, 13016, 13128, 15082, 6517, 14170, 14713, 14208, 13583, 12831, 15064, 13157, 13761, 14456, + 14905, 14798, 11391, 14668, 13906, 11053, 12381, 15210, 13567, 15159, 15270, 15073, 13887, 11861, 14615, 12627, + 15209, 14630, 13394, 14228, 14184, 13719, 14805, 13748, 14215, 13234, 13053, 14651, 14753, 14560, 12289, 14957, + 12826, 14788, 15236, 14249, 15211, 14329, 14830, 14793, 13202, 14635, 14489, 14664, 10751, 10992, 13459, 13658, + 14947, 14484, 15045, 14431, 14644, 13939, 14088, 14092, 14765, 14096, 14696, 13201, 15162, 14751, 14119, 13506, + 14659, 15355, 14904, 13374, 15048, 15188, 14733, 14307, 12518, 12511, 15187, 11018, 13072, 15023, 11355, 14216, + }; + + uint16_t data_5d[1 * 1 * 4 * 4 * 16] = { + 15272, 12501, 13940, 10024, 13356, 13068, 12088, 13733, 15257, 14104, 11089, 15298, 10597, 14359, 14402, 14748, + 14596, 14063, 14674, 13393, 12937, 13466, 14313, 13295, 15000, 15167, 15311, 13122, 10691, 15165, 14621, 14000, + 13584, 14715, 15105, 14479, 14007, 9846, 14325, 12765, 13343, 13988, 10021, 14598, 14623, 15077, 15204, 12528, + 12024, 14236, 14857, 13009, 15216, 12916, 12754, 14807, 15174, 15075, 12998, 13834, 15174, 13674, 15251, 12683, + 13116, 14819, 11956, 14416, 14717, 14954, 15267, 15143, 15292, 9704, 14781, 14965, 14808, 15008, 11416, 15074, + 14168, 14417, 13441, 10673, 14945, 15114, 15358, 15116, 11950, 12057, 15321, 14973, 14950, 13984, 14900, 11361, + 10161, 14742, 13366, 13683, 13439, 12865, 10623, 14720, 14545, 13063, 10190, 12474, 9850, 15088, 15228, 14195, + 13428, 12443, 14719, 14816, 13231, 12818, 13667, 9680, 14814, 13924, 12757, 15178, 13444, 13673, 14405, 12711, + 15279, 14207, 9089, 13774, 13008, 14685, 13887, 15293, 13983, 14590, 15232, 15285, 15071, 14974, 15257, 13900, + 14907, 15269, 10955, 13635, 15132, 15026, 14218, 14498, 15235, 11243, 14704, 11563, 14394, 6840, 13619, 14655, + 12830, 14094, 12487, 13016, 13128, 15082, 6517, 14170, 14713, 14208, 13583, 12831, 15064, 13157, 13761, 14456, + 14905, 14798, 11391, 14668, 13906, 11053, 12381, 15210, 13567, 15159, 15270, 15073, 13887, 11861, 14615, 12627, + 15209, 14630, 13394, 14228, 14184, 13719, 14805, 13748, 14215, 13234, 13053, 14651, 14753, 14560, 12289, 14957, + 12826, 14788, 15236, 14249, 15211, 14329, 14830, 14793, 13202, 14635, 14489, 14664, 10751, 10992, 13459, 13658, + 14947, 14484, 15045, 14431, 14644, 13939, 14088, 14092, 14765, 14096, 14696, 13201, 15162, 14751, 14119, 13506, + 14659, 15355, 14904, 13374, 15048, 15188, 14733, 14307, 12518, 12511, 15187, 11018, 13072, 15023, 11355, 14216, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_NC1HWC0, {1, 4, 4, 16}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNhwc5d, nhwc_to_5d_fp16_17c) { + uint16_t data[1 * 4 * 4 * 17] = { + 13688, 13163, 15170, 12549, 12876, 15228, 14672, 14988, 13134, 14510, 9810, 15108, 14863, 13964, 13563, 12807, + 13855, 13713, 14783, 14854, 13049, 11513, 15025, 14681, 13663, 13722, 14976, 15068, 14816, 13831, 13642, 15235, + 13133, 14666, 14169, 11361, 14948, 14421, 14255, 14285, 15057, 11992, 14788, 13201, 15119, 14856, 14793, 12473, + 15215, 14789, 14728, 14311, 12980, 10457, 10908, 12896, 14619, 15085, 12907, 11787, 13731, 13228, 15259, 14651, + 13829, 12858, 14998, 13957, 15122, 13691, 13185, 11770, 13198, 12714, 15199, 13931, 9780, 14569, 15353, 14807, + 14196, 14570, 14753, 14042, 14983, 12953, 14874, 15089, 13383, 13795, 12129, 14556, 13240, 14965, 13331, 11285, + 15188, 15110, 14909, 14485, 14336, 13854, 13931, 15042, 13277, 14478, 12890, 14101, 14639, 14380, 14453, 15222, + 13431, 13650, 14330, 15188, 15118, 13645, 13878, 8366, 15192, 9233, 13516, 14812, 15289, 14551, 14772, 10564, + 13672, 14892, 11295, 15075, 15080, 14504, 11827, 15286, 13510, 13808, 10051, 12669, 15104, 14790, 15166, 12735, + 13908, 15202, 15103, 13600, 10602, 14248, 14810, 14408, 14317, 11545, 14353, 12064, 14610, 14610, 14557, 14438, + 11772, 14486, 14024, 15136, 11316, 15070, 14996, 13987, 12120, 14548, 13976, 13462, 13614, 14785, 12854, 11411, + 14179, 13944, 12796, 13491, 13781, 14549, 13506, 13369, 14249, 14973, 12837, 14507, 13462, 14467, 14856, 14851, + 14744, 15209, 12085, 13802, 13234, 15098, 12964, 14569, 15206, 11899, 11525, 15082, 14794, 14982, 14552, 15075, + 14741, 14730, 15013, 13483, 14997, 13342, 11416, 12620, 15125, 13969, 13737, 14041, 11219, 15355, 11361, 12378, + 13717, 13299, 14856, 12113, 15291, 12946, 15038, 14854, 13568, 13635, 14525, 12769, 13663, 13672, 14164, 14763, + 14661, 12871, 10567, 13383, 14679, 13331, 15247, 14464, 14642, 13930, 15218, 15040, 14614, 10533, 13511, 13577, + 12696, 11780, 11060, 15069, 11544, 15333, 15295, 14505, 11951, 12790, 14115, 14978, 12227, 14806, 15238, 14393, + }; + uint16_t data_5d[1 * 2 * 4 * 4 * 16] = { + 13688, 13163, 15170, 12549, 12876, 15228, 14672, 14988, 13134, 14510, 9810, 15108, 14863, 13964, 13563, 12807, + 13713, 14783, 14854, 13049, 11513, 15025, 14681, 13663, 13722, 14976, 15068, 14816, 13831, 13642, 15235, 13133, + 14169, 11361, 14948, 14421, 14255, 14285, 15057, 11992, 14788, 13201, 15119, 14856, 14793, 12473, 15215, 14789, + 14311, 12980, 10457, 10908, 12896, 14619, 15085, 12907, 11787, 13731, 13228, 15259, 14651, 13829, 12858, 14998, + 15122, 13691, 13185, 11770, 13198, 12714, 15199, 13931, 9780, 14569, 15353, 14807, 14196, 14570, 14753, 14042, + 12953, 14874, 15089, 13383, 13795, 12129, 14556, 13240, 14965, 13331, 11285, 15188, 15110, 14909, 14485, 14336, + 13931, 15042, 13277, 14478, 12890, 14101, 14639, 14380, 14453, 15222, 13431, 13650, 14330, 15188, 15118, 13645, + 8366, 15192, 9233, 13516, 14812, 15289, 14551, 14772, 10564, 13672, 14892, 11295, 15075, 15080, 14504, 11827, + 13510, 13808, 10051, 12669, 15104, 14790, 15166, 12735, 13908, 15202, 15103, 13600, 10602, 14248, 14810, 14408, + 11545, 14353, 12064, 14610, 14610, 14557, 14438, 11772, 14486, 14024, 15136, 11316, 15070, 14996, 13987, 12120, + 13976, 13462, 13614, 14785, 12854, 11411, 14179, 13944, 12796, 13491, 13781, 14549, 13506, 13369, 14249, 14973, + 14507, 13462, 14467, 14856, 14851, 14744, 15209, 12085, 13802, 13234, 15098, 12964, 14569, 15206, 11899, 11525, + 14794, 14982, 14552, 15075, 14741, 14730, 15013, 13483, 14997, 13342, 11416, 12620, 15125, 13969, 13737, 14041, + 15355, 11361, 12378, 13717, 13299, 14856, 12113, 15291, 12946, 15038, 14854, 13568, 13635, 14525, 12769, 13663, + 14164, 14763, 14661, 12871, 10567, 13383, 14679, 13331, 15247, 14464, 14642, 13930, 15218, 15040, 14614, 10533, + 13577, 12696, 11780, 11060, 15069, 11544, 15333, 15295, 14505, 11951, 12790, 14115, 14978, 12227, 14806, 15238, + 13855, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14666, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14728, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13957, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14983, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13854, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13878, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15286, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14317, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14548, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12837, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15082, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11219, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13672, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13511, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14393, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_NC1HWC0, {1, 4, 4, 17}, {1, 2, 4, 4, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNhwc5d, nhwc_to_5d_float) { + float data[1 * 4 * 4 * 8] = { + 0.14395748132615127, 0.7088975015001723, 0.33509522683279236, 0.519409599070846, 0.6877815703183492, + 0.11202024034801872, 0.006276379581528935, 0.3691877050360296, 0.8286682894080306, 0.8577776441477236, + 0.8620479285149965, 0.4785995315212451, 0.44290479161201646, 0.9298880355625483, 0.2079760936942212, + 0.7584010867023864, 0.04130212504113229, 0.6886546611913413, 0.8905605080864101, 0.44890343542909616, + 0.033926825396621396, 0.5658782347882929, 0.0154316787055232, 0.6715229410700493, 0.15104032692043634, + 0.7987494899164227, 0.4107814620344469, 0.2739026396531693, 0.78314190163481, 0.5572462878749022, + 0.49687645697979144, 0.2610836567590066, 0.662984178264575, 0.8272197084584877, 0.951921638821051, + 0.22020310043156965, 0.7970234862803476, 0.786770080635131, 0.2782844900661975, 0.2214492309528462, + 0.05973945191243013, 0.7787265114728507, 0.5885108317539937, 0.6350434845578384, 0.03432358265902924, + 0.13814464833713236, 0.47716000964132366, 0.2172979558296817, 0.28184686482223664, 0.023912988786341294, + 0.564989222222373, 0.6350727555041364, 0.17406682486362202, 0.5782687973077343, 0.7691109852619834, + 0.6283233021413348, 0.7351740165991039, 0.3215521304014334, 0.6774009330532079, 0.33739099291474717, + 0.9568828717913317, 0.00406630044661338, 0.022773887476273513, 0.0062475315550286625, 0.11386475535418572, + 0.31803152343086083, 0.5060149804451273, 0.9748224337922627, 0.08021564523597269, 0.36955307731376397, + 0.45745050755121797, 0.6991568446588684, 0.48452855411290163, 0.1687682253709446, 0.8171081226272253, + 0.5722562245860371, 0.38631439575235693, 0.4152775169941805, 0.5471240543016923, 0.47255359909361083, + 0.19979061254107167, 0.6128813529241708, 0.23241802167600212, 0.6598280464895825, 0.39993127352459, + 0.6179092276151944, 0.3842495249973191, 0.07172557002264568, 0.5232161572150006, 0.33507445318217577, + 0.6669179668737779, 0.5710568144146737, 0.09743181036899662, 0.1960181228757637, 0.024614338703409122, + 0.04305198418453349, 0.8272287766449594, 0.3104293133165287, 0.295404336140902, 0.869972288744926, + 0.6598182869917978, 0.1256465164983911, 0.6611169004945606, 0.887335228528663, 0.30319799367763645, + 0.10221678669180034, 0.822023968653782, 0.7054515545991238, 0.7026671130911287, 0.6583675813685899, + 0.14794276026959252, 0.12379423708188408, 0.010717044340432524, 0.3335554745873852, 0.6960727743111309, + 0.835599614916433, 0.6695589997837782, 0.928169629281005, 0.2751019740519224, 0.09543122169280194, + 0.5117813618227156, 0.33444700996623, 0.5634565397240759, 0.5205229823558587, 0.7650601279838857, + 0.517468037811738, 0.5880785947369374, 0.2177496979194814, + }; + float data_5d[1 * 1 * 4 * 4 * 16] = { + 0.14395748132615127, + 0.7088975015001723, + 0.33509522683279236, + 0.519409599070846, + 0.6877815703183492, + 0.11202024034801872, + 0.006276379581528935, + 0.3691877050360296, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8286682894080306, + 0.8577776441477236, + 0.8620479285149965, + 0.4785995315212451, + 0.44290479161201646, + 0.9298880355625483, + 0.2079760936942212, + 0.7584010867023864, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.04130212504113229, + 0.6886546611913413, + 0.8905605080864101, + 0.44890343542909616, + 0.033926825396621396, + 0.5658782347882929, + 0.0154316787055232, + 0.6715229410700493, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.15104032692043634, + 0.7987494899164227, + 0.4107814620344469, + 0.2739026396531693, + 0.78314190163481, + 0.5572462878749022, + 0.49687645697979144, + 0.2610836567590066, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.662984178264575, + 0.8272197084584877, + 0.951921638821051, + 0.22020310043156965, + 0.7970234862803476, + 0.786770080635131, + 0.2782844900661975, + 0.2214492309528462, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.05973945191243013, + 0.7787265114728507, + 0.5885108317539937, + 0.6350434845578384, + 0.03432358265902924, + 0.13814464833713236, + 0.47716000964132366, + 0.2172979558296817, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.28184686482223664, + 0.023912988786341294, + 0.564989222222373, + 0.6350727555041364, + 0.17406682486362202, + 0.5782687973077343, + 0.7691109852619834, + 0.6283233021413348, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7351740165991039, + 0.3215521304014334, + 0.6774009330532079, + 0.33739099291474717, + 0.9568828717913317, + 0.00406630044661338, + 0.022773887476273513, + 0.0062475315550286625, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11386475535418572, + 0.31803152343086083, + 0.5060149804451273, + 0.9748224337922627, + 0.08021564523597269, + 0.36955307731376397, + 0.45745050755121797, + 0.6991568446588684, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.48452855411290163, + 0.1687682253709446, + 0.8171081226272253, + 0.5722562245860371, + 0.38631439575235693, + 0.4152775169941805, + 0.5471240543016923, + 0.47255359909361083, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19979061254107167, + 0.6128813529241708, + 0.23241802167600212, + 0.6598280464895825, + 0.39993127352459, + 0.6179092276151944, + 0.3842495249973191, + 0.07172557002264568, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5232161572150006, + 0.33507445318217577, + 0.6669179668737779, + 0.5710568144146737, + 0.09743181036899662, + 0.1960181228757637, + 0.024614338703409122, + 0.04305198418453349, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8272287766449594, + 0.3104293133165287, + 0.295404336140902, + 0.869972288744926, + 0.6598182869917978, + 0.1256465164983911, + 0.6611169004945606, + 0.887335228528663, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.30319799367763645, + 0.10221678669180034, + 0.822023968653782, + 0.7054515545991238, + 0.7026671130911287, + 0.6583675813685899, + 0.14794276026959252, + 0.12379423708188408, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.010717044340432524, + 0.3335554745873852, + 0.6960727743111309, + 0.835599614916433, + 0.6695589997837782, + 0.928169629281005, + 0.2751019740519224, + 0.09543122169280194, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5117813618227156, + 0.33444700996623, + 0.5634565397240759, + 0.5205229823558587, + 0.7650601279838857, + 0.517468037811738, + 0.5880785947369374, + 0.2177496979194814, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_NC1HWC0, {1, 4, 4, 8}, {1, 1, 4, 4, 16}, DT_FLOAT}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } +} + +TEST_F(UtestFormatTransferNhwc5d, nhwc_to_5d_float2) { + float data[1 * 4 * 4 * 16] = { + 0.4564786036237277, 0.9979708631827585, 0.3433449574519194, 0.8327170836954324, 0.7102312568870411, + 0.4620266296757265, 0.731409804774576, 0.6657620022925489, 0.9035335884051056, 0.9985587438582897, + 0.9914301800355473, 0.7964115237958334, 0.8584244270496925, 0.1378300681142447, 0.9146423401653642, + 0.5838660267355298, 0.08206209108887697, 0.5978772929814459, 0.8606477151292675, 0.9621549085527097, + 0.9903169628823607, 0.4413502878053672, 0.3419319864126722, 0.2055590347352939, 0.6958247381061989, + 0.31025181192729134, 0.8754696913785533, 0.16342168578297833, 0.48930764038405494, 0.4313174744262651, + 0.9702299296220505, 0.8608633387702506, 0.32414390364635537, 0.9499311305911101, 0.16162894064122346, + 0.7903673191014232, 0.9747281169251742, 0.19047786660677712, 0.5261388562183582, 0.08832658004278726, + 0.5026034609888064, 0.6012786562129157, 0.22023272766428525, 0.020620813194720755, 0.05621537431872736, + 0.9065752732717621, 0.10241901312343715, 0.7468164462101752, 0.6025800857266902, 0.8563704540567573, + 0.06781353150900471, 0.07449933352495186, 0.6462834271091005, 0.6443763466531751, 0.23443689347408292, + 0.9007764794631198, 0.2654578696798139, 0.34714459047552515, 0.9442670098376124, 0.6551617300899828, + 0.18577821984901555, 0.6554056318808226, 0.9256976155576719, 0.5652951773970069, 0.9036782198563219, + 0.4044957431656302, 0.7720355215505535, 0.9615844951249943, 0.9583990983695638, 0.6734156011463083, + 0.07600462174220979, 0.6483143942496717, 0.7721284471233, 0.8844698574851142, 0.36446559980424187, + 0.2020712252813367, 0.5532440010466321, 0.774082923402127, 0.15753243710488063, 0.7494487845016365, + 0.9777126596615223, 0.5977366091149126, 0.6118678708384551, 0.5776021887764191, 0.7052275323758874, + 0.8643536179940521, 0.7351423461892231, 0.7472712957396752, 0.8320997849162026, 0.6893202120458873, + 0.6061132885338082, 0.7922244558608248, 0.0694183157723195, 0.46637306821083124, 0.24761782321363746, + 0.841220929773547, 0.19251486945606122, 0.38619121136754864, 0.7777480845728865, 0.5153172040040658, + 0.3221852259862178, 0.4422626646996969, 0.37297798349444533, 0.28636326416033275, 0.872322772220719, + 0.36684031320178934, 0.19151281097764805, 0.011093279580771997, 0.4565429665149845, 0.33857376446626597, + 0.3893706571125203, 0.09763853688754631, 0.7857370586828217, 0.8196400350274258, 0.6233161446810015, + 0.7290476585781567, 0.32302816180960103, 0.6465065602583131, 0.7039055178319722, 0.737269028941926, + 0.4423557810299573, 0.2936287529863649, 0.17383709070689168, 0.07431649542543162, 0.3395265133684905, + 0.6974196233605686, 0.029369533449977392, 0.8462674586537903, 0.1862823571798382, 0.7055362309875918, + 0.17654051410721183, 0.6964155103280214, 0.8712303990874309, 0.8568724052794551, 0.12588302504561832, + 0.7811576698822645, 0.4767670325567058, 0.6159667262200001, 0.05490717393005784, 0.6057928538737336, + 0.8546815077994012, 0.35062523335992557, 0.12129862795339719, 0.26548933441616984, 0.7385743748060322, + 0.40555441839717865, 0.9982561364349957, 0.3188263567371774, 0.9424891762411117, 0.6987901399365776, + 0.20540961831616766, 0.02167040277535437, 0.3185967031398763, 0.019782643683121437, 0.8438032683953371, + 0.31134590764527503, 0.4314797108424774, 0.8627081052949851, 0.8372865189589538, 0.06727616337496733, + 0.3842235730532829, 0.5620567599469514, 0.4872978433211791, 0.6475139880108287, 0.415262171705042, + 0.16946425200394755, 0.37100485677595374, 0.5675133342481045, 0.39388891080376875, 0.11961877709354252, + 0.6050560699851201, 0.6113694663807391, 0.7148151069557451, 0.3227410199494769, 0.47070000531233336, + 0.6184325422121354, 0.9366505496068971, 0.044456665466675815, 0.6052379671242412, 0.0566508245272247, + 0.293920203298791, 0.13061352402232362, 0.11203690960569768, 0.8258966037746421, 0.2946765132214273, + 0.8466489444530028, 0.7451823806430503, 0.15781221269704837, 0.4021853233314594, 0.664241200519387, + 0.7091761574997846, 0.7412572946122742, 0.12408378577361201, 0.6351997345547254, 0.49923734082324256, + 0.7532652811776691, 0.4831886413654197, 0.8560909415971528, 0.5359252954295073, 0.27940116425452954, + 0.697831712634371, 0.6651941742331731, 0.38894273544941194, 0.6511702859506332, 0.24281567468797605, + 0.28501013033328104, 0.4337819166171949, 0.5961407181707284, 0.5496662828770404, 0.41579648284745674, + 0.8751091806383153, 0.20914271806373042, 0.5223981065749688, 0.36853485871088787, 0.6950926163206822, + 0.8626788290432134, 0.280786741801478, 0.9968042088585791, 0.047574943064607855, 0.3553705848433375, + 0.8500631428597956, 0.7900997773399084, 0.08208295892311868, 0.5799468056312337, 0.8678034851684076, + 0.6620161902490288, 0.10118441445571336, 0.5346674104167647, 0.46546006518032723, 0.7854958793456643, + 0.41889328134628867, 0.4788457107828109, 0.5693837686243997, 0.03982329678460883, 0.5036591611514133, + 0.8634869905751454, 0.36418146420377306, 0.3560197611754259, 0.4237274215048007, 0.9272113781908002, + 0.4733639065953018, 0.11277189215022076, 0.46944385729018046, 0.5240510466702447, 0.3809929220315893, + 0.9044904830984387, 0.630375764858229, 0.40395182381843286, 0.39508838980681005, 0.762649660569511, + 0.7194310117846976, 0.992973488796045, 0.22591279583891666, 0.4331673497772569, 0.6014661361937058, + 0.7590490257651524, + }; + float data_5d[1 * 1 * 4 * 4 * 16] = { + 0.4564786036237277, 0.9979708631827585, 0.3433449574519194, 0.8327170836954324, 0.7102312568870411, + 0.4620266296757265, 0.731409804774576, 0.6657620022925489, 0.9035335884051056, 0.9985587438582897, + 0.9914301800355473, 0.7964115237958334, 0.8584244270496925, 0.1378300681142447, 0.9146423401653642, + 0.5838660267355298, 0.08206209108887697, 0.5978772929814459, 0.8606477151292675, 0.9621549085527097, + 0.9903169628823607, 0.4413502878053672, 0.3419319864126722, 0.2055590347352939, 0.6958247381061989, + 0.31025181192729134, 0.8754696913785533, 0.16342168578297833, 0.48930764038405494, 0.4313174744262651, + 0.9702299296220505, 0.8608633387702506, 0.32414390364635537, 0.9499311305911101, 0.16162894064122346, + 0.7903673191014232, 0.9747281169251742, 0.19047786660677712, 0.5261388562183582, 0.08832658004278726, + 0.5026034609888064, 0.6012786562129157, 0.22023272766428525, 0.020620813194720755, 0.05621537431872736, + 0.9065752732717621, 0.10241901312343715, 0.7468164462101752, 0.6025800857266902, 0.8563704540567573, + 0.06781353150900471, 0.07449933352495186, 0.6462834271091005, 0.6443763466531751, 0.23443689347408292, + 0.9007764794631198, 0.2654578696798139, 0.34714459047552515, 0.9442670098376124, 0.6551617300899828, + 0.18577821984901555, 0.6554056318808226, 0.9256976155576719, 0.5652951773970069, 0.9036782198563219, + 0.4044957431656302, 0.7720355215505535, 0.9615844951249943, 0.9583990983695638, 0.6734156011463083, + 0.07600462174220979, 0.6483143942496717, 0.7721284471233, 0.8844698574851142, 0.36446559980424187, + 0.2020712252813367, 0.5532440010466321, 0.774082923402127, 0.15753243710488063, 0.7494487845016365, + 0.9777126596615223, 0.5977366091149126, 0.6118678708384551, 0.5776021887764191, 0.7052275323758874, + 0.8643536179940521, 0.7351423461892231, 0.7472712957396752, 0.8320997849162026, 0.6893202120458873, + 0.6061132885338082, 0.7922244558608248, 0.0694183157723195, 0.46637306821083124, 0.24761782321363746, + 0.841220929773547, 0.19251486945606122, 0.38619121136754864, 0.7777480845728865, 0.5153172040040658, + 0.3221852259862178, 0.4422626646996969, 0.37297798349444533, 0.28636326416033275, 0.872322772220719, + 0.36684031320178934, 0.19151281097764805, 0.011093279580771997, 0.4565429665149845, 0.33857376446626597, + 0.3893706571125203, 0.09763853688754631, 0.7857370586828217, 0.8196400350274258, 0.6233161446810015, + 0.7290476585781567, 0.32302816180960103, 0.6465065602583131, 0.7039055178319722, 0.737269028941926, + 0.4423557810299573, 0.2936287529863649, 0.17383709070689168, 0.07431649542543162, 0.3395265133684905, + 0.6974196233605686, 0.029369533449977392, 0.8462674586537903, 0.1862823571798382, 0.7055362309875918, + 0.17654051410721183, 0.6964155103280214, 0.8712303990874309, 0.8568724052794551, 0.12588302504561832, + 0.7811576698822645, 0.4767670325567058, 0.6159667262200001, 0.05490717393005784, 0.6057928538737336, + 0.8546815077994012, 0.35062523335992557, 0.12129862795339719, 0.26548933441616984, 0.7385743748060322, + 0.40555441839717865, 0.9982561364349957, 0.3188263567371774, 0.9424891762411117, 0.6987901399365776, + 0.20540961831616766, 0.02167040277535437, 0.3185967031398763, 0.019782643683121437, 0.8438032683953371, + 0.31134590764527503, 0.4314797108424774, 0.8627081052949851, 0.8372865189589538, 0.06727616337496733, + 0.3842235730532829, 0.5620567599469514, 0.4872978433211791, 0.6475139880108287, 0.415262171705042, + 0.16946425200394755, 0.37100485677595374, 0.5675133342481045, 0.39388891080376875, 0.11961877709354252, + 0.6050560699851201, 0.6113694663807391, 0.7148151069557451, 0.3227410199494769, 0.47070000531233336, + 0.6184325422121354, 0.9366505496068971, 0.044456665466675815, 0.6052379671242412, 0.0566508245272247, + 0.293920203298791, 0.13061352402232362, 0.11203690960569768, 0.8258966037746421, 0.2946765132214273, + 0.8466489444530028, 0.7451823806430503, 0.15781221269704837, 0.4021853233314594, 0.664241200519387, + 0.7091761574997846, 0.7412572946122742, 0.12408378577361201, 0.6351997345547254, 0.49923734082324256, + 0.7532652811776691, 0.4831886413654197, 0.8560909415971528, 0.5359252954295073, 0.27940116425452954, + 0.697831712634371, 0.6651941742331731, 0.38894273544941194, 0.6511702859506332, 0.24281567468797605, + 0.28501013033328104, 0.4337819166171949, 0.5961407181707284, 0.5496662828770404, 0.41579648284745674, + 0.8751091806383153, 0.20914271806373042, 0.5223981065749688, 0.36853485871088787, 0.6950926163206822, + 0.8626788290432134, 0.280786741801478, 0.9968042088585791, 0.047574943064607855, 0.3553705848433375, + 0.8500631428597956, 0.7900997773399084, 0.08208295892311868, 0.5799468056312337, 0.8678034851684076, + 0.6620161902490288, 0.10118441445571336, 0.5346674104167647, 0.46546006518032723, 0.7854958793456643, + 0.41889328134628867, 0.4788457107828109, 0.5693837686243997, 0.03982329678460883, 0.5036591611514133, + 0.8634869905751454, 0.36418146420377306, 0.3560197611754259, 0.4237274215048007, 0.9272113781908002, + 0.4733639065953018, 0.11277189215022076, 0.46944385729018046, 0.5240510466702447, 0.3809929220315893, + 0.9044904830984387, 0.630375764858229, 0.40395182381843286, 0.39508838980681005, 0.762649660569511, + 0.7194310117846976, 0.992973488796045, 0.22591279583891666, 0.4331673497772569, 0.6014661361937058, + 0.7590490257651524, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_NC1HWC0, {1, 4, 4, 16}, {1, 1, 4, 4, 16}, DT_FLOAT}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_5d)); + for (int i = 0; i < sizeof(data_5d) / sizeof(data_5d[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], data_5d[i]); + } + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, SUCCESS); +} + +TEST_F(UtestFormatTransferNhwc5d, invalid_src_shape1) { + uint16_t data[1 * 4 * 4 * 1] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_NC1HWC0, {1, 4, 4}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNhwc5d, invalid_src_shape2) { + uint16_t data[1 * 4 * 4 * 1] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_NC1HWC0, {1, 4, 4, 0}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNhwc5d, invalid_src_format) { + uint16_t data[1 * 4 * 4 * 1] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NCHW, FORMAT_NC1HWC0, {1, 4, 4, 1}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, UNSUPPORTED); +} + +TEST_F(UtestFormatTransferNhwc5d, invalid_dst_shape2) { + uint16_t data[1 * 4 * 4 * 1] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_NC1HWC0, {1, 4, 4, 1}, {1, 2, 4, 4, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNhwc5d, invalid_src_data_type) { + uint16_t data[1 * 4 * 4 * 1] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_NC1HWC0, {1, 4, 4, 1}, {1, 1, 4, 4, 16}, DT_UNDEFINED}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} + +TEST_F(UtestFormatTransferNhwc5d, unsupport_dst_format) { + uint16_t data[1 * 4 * 4 * 1] = {0}; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_RESERVED, {1, 4, 4, 1}, {1, 1, 4, 4, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferNhwcNc1hwc0 transfer; + EXPECT_EQ(transfer.TransFormat(args, result), PARAM_INVALID); +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_nhwc_fractalz_unittest.cc b/tests/ut/ge/common/format_transfer_nhwc_fractalz_unittest.cc new file mode 100644 index 00000000..424285a1 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_nhwc_fractalz_unittest.cc @@ -0,0 +1,5354 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_fractal_z.h" + +#include "common/formats/format_transfers/format_transfer.h" + +namespace ge { +namespace formats { +class UtestFormatTransferNhwcFz : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransferNhwcFz, nhwc_to_fracz_fp16_success_lt_cube) { + uint16_t data_4d[1 * 1 * 16 * 16] = { + 15108, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[1 * 1 * 1 * 1] = { + 15108, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_Z, {1, 1, 1, 1}, {1, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferFractalZ transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_4d)); + for (int i = 0; i < sizeof(data_4d) / sizeof(data_4d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_4d[i]); + } +} + +TEST_F(UtestFormatTransferNhwcFz, nhwc_to_fracz_fp16_success_eq_cube) { + uint16_t data_4d[4 * 1 * 16 * 16] = { + 14422, 14250, 15207, 11368, 13196, 12957, 15106, 15099, 15047, 13771, 13891, 14758, 13298, 13411, 15283, 15351, + 13386, 13586, 12661, 14761, 15025, 10628, 12072, 14528, 14866, 13804, 15034, 14930, 14536, 13776, 11278, 10879, + 12538, 13720, 14793, 8244, 14509, 15201, 14563, 12649, 15320, 14814, 14229, 13811, 13734, 13795, 13354, 12915, + 13498, 14218, 14748, 9860, 13268, 11456, 14650, 14749, 14957, 13538, 14260, 15152, 14451, 14932, 12831, 14326, + 13007, 14492, 14752, 15121, 14798, 14508, 14502, 14809, 13826, 15346, 14598, 13712, 14560, 14605, 14954, 14740, + 13011, 13442, 14945, 13182, 15009, 8704, 12211, 14842, 15205, 12795, 10772, 13892, 12902, 14761, 14601, 12663, + 13702, 14580, 15322, 14745, 14750, 14264, 14433, 13854, 14778, 15011, 14745, 14599, 13430, 9216, 14570, 14514, + 15336, 9439, 15021, 15107, 14006, 12950, 13622, 14707, 14981, 14850, 13337, 13280, 15124, 10206, 13412, 13832, + 15303, 14455, 15079, 12787, 14613, 14302, 15036, 13536, 15243, 13739, 15259, 15099, 13211, 14770, 15097, 14402, + 15204, 14955, 14621, 12097, 15083, 14119, 14814, 4005, 10998, 14856, 13380, 14647, 14540, 14008, 11346, 14363, + 14313, 15108, 6776, 15180, 14647, 13917, 14629, 13412, 13480, 10439, 14776, 14563, 15328, 14777, 14436, 14902, + 15255, 14241, 13324, 15082, 14738, 15265, 14657, 13571, 15280, 15082, 9338, 12341, 14991, 14847, 14948, 14037, + 13624, 14947, 14968, 12058, 14060, 13434, 14349, 14972, 12716, 14287, 12350, 12619, 15263, 13831, 15160, 15032, + 14591, 12370, 13537, 11343, 13909, 12663, 15078, 14938, 14409, 13852, 12414, 13903, 13825, 13958, 10736, 11772, + 13429, 13236, 15033, 13910, 15008, 14564, 14344, 14546, 15227, 14491, 14858, 14957, 12725, 13536, 13030, 12562, + 15163, 13294, 15229, 15146, 11977, 14509, 15259, 12425, 15334, 13340, 15329, 15071, 14342, 14555, 14473, 15054, + 14551, 14307, 14721, 14255, 13352, 15201, 15191, 13045, 12146, 13419, 14884, 13352, 14843, 14356, 10607, 15253, + 12948, 13749, 13930, 14144, 9963, 15200, 14364, 14952, 14652, 15188, 14500, 14558, 15289, 14978, 10959, 14101, + 15131, 14699, 15179, 14896, 14614, 13448, 14463, 12326, 13099, 13035, 12361, 13939, 15216, 15031, 15104, 15122, + 14754, 14837, 14295, 12738, 13007, 13385, 13712, 13448, 14487, 13904, 14940, 12247, 15013, 13784, 15169, 11543, + 15351, 14657, 15047, 14243, 14234, 14986, 15288, 9009, 14717, 13685, 13416, 12043, 14874, 14362, 12565, 15139, + 15351, 11442, 14669, 12929, 15084, 12574, 12783, 14385, 10838, 14537, 14440, 14713, 15140, 14316, 14694, 11250, + 14399, 14942, 14509, 11151, 15125, 13241, 14844, 5053, 13906, 14498, 10895, 15031, 15220, 15172, 15160, 13782, + 7336, 13280, 15245, 15190, 12154, 13669, 14612, 14388, 9114, 14866, 15180, 12447, 14803, 13802, 13786, 14692, + 13145, 13633, 10303, 14120, 14322, 9452, 13412, 14852, 15342, 12512, 12724, 14482, 14708, 13886, 14371, 13771, + 13399, 14432, 13585, 15028, 14173, 14347, 15077, 13051, 12138, 14267, 11114, 13958, 14961, 13844, 15272, 14643, + 12614, 13212, 13324, 15077, 14349, 14662, 14273, 10859, 5552, 10874, 13863, 14104, 13495, 14904, 11517, 15108, + 14999, 13078, 15106, 14639, 13345, 6413, 13979, 15205, 14332, 12647, 13386, 15312, 10976, 15089, 14089, 14370, + 8462, 13359, 14322, 14221, 14371, 14449, 13536, 12118, 14288, 13483, 12182, 13621, 15261, 14723, 12623, 13323, + 14817, 14529, 13499, 14350, 11063, 14315, 12718, 14574, 14180, 12505, 13109, 11207, 14801, 13781, 14392, 13549, + 14481, 13731, 15236, 9398, 15358, 12351, 14751, 14509, 11815, 14787, 9669, 14645, 15099, 14490, 13121, 15050, + 15076, 14756, 14958, 4978, 14576, 11508, 14746, 13161, 14408, 10590, 13429, 14945, 13993, 15033, 13183, 13027, + 13040, 15055, 14563, 15154, 14639, 15242, 14828, 12673, 12310, 15149, 12333, 11282, 14004, 15172, 11527, 15094, + 14725, 14941, 14435, 12561, 13832, 13489, 13186, 11553, 14927, 14861, 11684, 14518, 14331, 10694, 12473, 14447, + 14008, 15061, 14954, 15103, 14619, 13134, 14321, 14898, 10583, 15066, 15013, 14557, 14566, 14025, 13341, 14381, + 13587, 11771, 12011, 15320, 13462, 14503, 14944, 12535, 11976, 12949, 11427, 12383, 14405, 13595, 15002, 14362, + 13336, 13825, 11229, 13185, 14932, 13706, 14583, 15300, 15332, 14997, 11169, 15248, 14661, 9558, 13210, 13015, + 15314, 14332, 14428, 15128, 12862, 15058, 14929, 14899, 8721, 14699, 14163, 14774, 14835, 14416, 14252, 12609, + 11329, 13053, 11842, 15323, 12010, 13224, 12629, 15297, 12422, 11133, 9125, 11724, 14585, 14376, 14733, 14556, + 11591, 12995, 14556, 14822, 14914, 15332, 13424, 14846, 13522, 14947, 9886, 15179, 11710, 14524, 12604, 13600, + 14898, 14863, 14901, 9668, 13991, 15090, 14553, 9288, 13662, 12807, 13881, 15143, 14591, 10092, 15358, 9907, + 15349, 14811, 14578, 14389, 15154, 13072, 13805, 13115, 14879, 14902, 14466, 14540, 9926, 14475, 13389, 14468, + 14489, 14673, 14794, 13494, 15219, 15314, 14266, 12104, 14068, 12709, 13135, 15088, 15118, 14229, 15145, 13771, + 14614, 14426, 14336, 14375, 15264, 13925, 13501, 14613, 14973, 13381, 15025, 14770, 11856, 15035, 13759, 14437, + 14171, 15349, 14474, 14662, 13505, 15329, 15210, 15353, 13535, 12864, 14658, 14968, 9562, 13352, 12356, 13565, + 14481, 15092, 12920, 15290, 14046, 10403, 14299, 13885, 9128, 13798, 14575, 14484, 14728, 14471, 14678, 15356, + 13955, 14939, 15149, 14775, 15303, 14614, 14530, 13890, 14051, 14869, 15253, 12810, 12328, 14590, 13489, 13653, + 14448, 15013, 11047, 13979, 12332, 14777, 14414, 15286, 12226, 14359, 13505, 13476, 13932, 14273, 14620, 14246, + 14987, 13774, 14562, 12418, 15303, 13383, 15006, 11992, 13652, 14342, 15025, 13315, 13500, 12981, 13108, 14252, + 14185, 14384, 14865, 12340, 13978, 14106, 12372, 14367, 10200, 15157, 13088, 14517, 15146, 14143, 14497, 13560, + 12995, 13539, 14724, 14606, 13340, 13378, 13432, 15143, 13249, 13229, 13168, 13910, 14061, 14456, 14896, 12972, + 13595, 13416, 13143, 9473, 14306, 13768, 15134, 13998, 10894, 15136, 12425, 13283, 11400, 13338, 12118, 12982, + 11692, 15215, 14555, 10674, 14734, 12815, 13038, 14365, 7730, 14605, 12123, 14907, 12370, 14712, 14057, 11671, + 14851, 14573, 14218, 11442, 14831, 14930, 13150, 12025, 15170, 7647, 14724, 14542, 14814, 14375, 14807, 13142, + 10157, 14206, 14185, 15359, 15121, 15264, 15117, 12578, 12061, 14273, 15339, 15092, 12803, 13734, 12847, 13867, + 11298, 13600, 13421, 14637, 11295, 15278, 13706, 14380, 11848, 14498, 15301, 15005, 14836, 14136, 14218, 14496, + 12261, 15284, 12586, 12621, 15127, 12804, 13534, 14163, 12599, 12975, 14403, 14132, 13920, 14636, 14395, 14503, + 11160, 15052, 14894, 13730, 13449, 14626, 13776, 13444, 12879, 13214, 13681, 14877, 14295, 14621, 13375, 8695, + 14774, 14179, 14221, 15341, 12290, 14558, 12476, 11462, 13847, 14278, 14174, 14092, 15192, 15254, 15205, 14700, + 14907, 10047, 12606, 12503, 14393, 14854, 14327, 15155, 13000, 10717, 15247, 13657, 13965, 15243, 14989, 13768, + 15198, 11511, 13788, 12911, 13993, 13693, 12862, 14613, 15056, 15244, 14444, 15174, 15312, 15133, 14859, 14225, + 14558, 14003, 10672, 14388, 14487, 15001, 15022, 14634, 13742, 12429, 13937, 15102, 12090, 15326, 13650, 13590, + 13648, 10701, 14528, 15178, 14926, 14584, 13916, 13144, 10598, 15269, 14247, 12646, 15283, 14202, 14649, 14873, + 15252, 14565, 14099, 14651, 15105, 12658, 14767, 12639, 13930, 13290, 14558, 13149, 10365, 13507, 14337, 14605, + }; + uint16_t data[16 * 2 * 2 * 16] = { + 14422, 14250, 15207, 11368, 13196, 12957, 15106, 15099, 15047, 13771, 13891, 14758, 13298, 13411, 15283, 15351, + 14551, 14307, 14721, 14255, 13352, 15201, 15191, 13045, 12146, 13419, 14884, 13352, 14843, 14356, 10607, 15253, + 13040, 15055, 14563, 15154, 14639, 15242, 14828, 12673, 12310, 15149, 12333, 11282, 14004, 15172, 11527, 15094, + 14987, 13774, 14562, 12418, 15303, 13383, 15006, 11992, 13652, 14342, 15025, 13315, 13500, 12981, 13108, 14252, + 13386, 13586, 12661, 14761, 15025, 10628, 12072, 14528, 14866, 13804, 15034, 14930, 14536, 13776, 11278, 10879, + 12948, 13749, 13930, 14144, 9963, 15200, 14364, 14952, 14652, 15188, 14500, 14558, 15289, 14978, 10959, 14101, + 14725, 14941, 14435, 12561, 13832, 13489, 13186, 11553, 14927, 14861, 11684, 14518, 14331, 10694, 12473, 14447, + 14185, 14384, 14865, 12340, 13978, 14106, 12372, 14367, 10200, 15157, 13088, 14517, 15146, 14143, 14497, 13560, + 12538, 13720, 14793, 8244, 14509, 15201, 14563, 12649, 15320, 14814, 14229, 13811, 13734, 13795, 13354, 12915, + 15131, 14699, 15179, 14896, 14614, 13448, 14463, 12326, 13099, 13035, 12361, 13939, 15216, 15031, 15104, 15122, + 14008, 15061, 14954, 15103, 14619, 13134, 14321, 14898, 10583, 15066, 15013, 14557, 14566, 14025, 13341, 14381, + 12995, 13539, 14724, 14606, 13340, 13378, 13432, 15143, 13249, 13229, 13168, 13910, 14061, 14456, 14896, 12972, + 13498, 14218, 14748, 9860, 13268, 11456, 14650, 14749, 14957, 13538, 14260, 15152, 14451, 14932, 12831, 14326, + 14754, 14837, 14295, 12738, 13007, 13385, 13712, 13448, 14487, 13904, 14940, 12247, 15013, 13784, 15169, 11543, + 13587, 11771, 12011, 15320, 13462, 14503, 14944, 12535, 11976, 12949, 11427, 12383, 14405, 13595, 15002, 14362, + 13595, 13416, 13143, 9473, 14306, 13768, 15134, 13998, 10894, 15136, 12425, 13283, 11400, 13338, 12118, 12982, + 13007, 14492, 14752, 15121, 14798, 14508, 14502, 14809, 13826, 15346, 14598, 13712, 14560, 14605, 14954, 14740, + 15351, 14657, 15047, 14243, 14234, 14986, 15288, 9009, 14717, 13685, 13416, 12043, 14874, 14362, 12565, 15139, + 13336, 13825, 11229, 13185, 14932, 13706, 14583, 15300, 15332, 14997, 11169, 15248, 14661, 9558, 13210, 13015, + 11692, 15215, 14555, 10674, 14734, 12815, 13038, 14365, 7730, 14605, 12123, 14907, 12370, 14712, 14057, 11671, + 13011, 13442, 14945, 13182, 15009, 8704, 12211, 14842, 15205, 12795, 10772, 13892, 12902, 14761, 14601, 12663, + 15351, 11442, 14669, 12929, 15084, 12574, 12783, 14385, 10838, 14537, 14440, 14713, 15140, 14316, 14694, 11250, + 15314, 14332, 14428, 15128, 12862, 15058, 14929, 14899, 8721, 14699, 14163, 14774, 14835, 14416, 14252, 12609, + 14851, 14573, 14218, 11442, 14831, 14930, 13150, 12025, 15170, 7647, 14724, 14542, 14814, 14375, 14807, 13142, + 13702, 14580, 15322, 14745, 14750, 14264, 14433, 13854, 14778, 15011, 14745, 14599, 13430, 9216, 14570, 14514, + 14399, 14942, 14509, 11151, 15125, 13241, 14844, 5053, 13906, 14498, 10895, 15031, 15220, 15172, 15160, 13782, + 11329, 13053, 11842, 15323, 12010, 13224, 12629, 15297, 12422, 11133, 9125, 11724, 14585, 14376, 14733, 14556, + 10157, 14206, 14185, 15359, 15121, 15264, 15117, 12578, 12061, 14273, 15339, 15092, 12803, 13734, 12847, 13867, + 15336, 9439, 15021, 15107, 14006, 12950, 13622, 14707, 14981, 14850, 13337, 13280, 15124, 10206, 13412, 13832, + 7336, 13280, 15245, 15190, 12154, 13669, 14612, 14388, 9114, 14866, 15180, 12447, 14803, 13802, 13786, 14692, + 11591, 12995, 14556, 14822, 14914, 15332, 13424, 14846, 13522, 14947, 9886, 15179, 11710, 14524, 12604, 13600, + 11298, 13600, 13421, 14637, 11295, 15278, 13706, 14380, 11848, 14498, 15301, 15005, 14836, 14136, 14218, 14496, + 15303, 14455, 15079, 12787, 14613, 14302, 15036, 13536, 15243, 13739, 15259, 15099, 13211, 14770, 15097, 14402, + 13145, 13633, 10303, 14120, 14322, 9452, 13412, 14852, 15342, 12512, 12724, 14482, 14708, 13886, 14371, 13771, + 14898, 14863, 14901, 9668, 13991, 15090, 14553, 9288, 13662, 12807, 13881, 15143, 14591, 10092, 15358, 9907, + 12261, 15284, 12586, 12621, 15127, 12804, 13534, 14163, 12599, 12975, 14403, 14132, 13920, 14636, 14395, 14503, + 15204, 14955, 14621, 12097, 15083, 14119, 14814, 4005, 10998, 14856, 13380, 14647, 14540, 14008, 11346, 14363, + 13399, 14432, 13585, 15028, 14173, 14347, 15077, 13051, 12138, 14267, 11114, 13958, 14961, 13844, 15272, 14643, + 15349, 14811, 14578, 14389, 15154, 13072, 13805, 13115, 14879, 14902, 14466, 14540, 9926, 14475, 13389, 14468, + 11160, 15052, 14894, 13730, 13449, 14626, 13776, 13444, 12879, 13214, 13681, 14877, 14295, 14621, 13375, 8695, + 14313, 15108, 6776, 15180, 14647, 13917, 14629, 13412, 13480, 10439, 14776, 14563, 15328, 14777, 14436, 14902, + 12614, 13212, 13324, 15077, 14349, 14662, 14273, 10859, 5552, 10874, 13863, 14104, 13495, 14904, 11517, 15108, + 14489, 14673, 14794, 13494, 15219, 15314, 14266, 12104, 14068, 12709, 13135, 15088, 15118, 14229, 15145, 13771, + 14774, 14179, 14221, 15341, 12290, 14558, 12476, 11462, 13847, 14278, 14174, 14092, 15192, 15254, 15205, 14700, + 15255, 14241, 13324, 15082, 14738, 15265, 14657, 13571, 15280, 15082, 9338, 12341, 14991, 14847, 14948, 14037, + 14999, 13078, 15106, 14639, 13345, 6413, 13979, 15205, 14332, 12647, 13386, 15312, 10976, 15089, 14089, 14370, + 14614, 14426, 14336, 14375, 15264, 13925, 13501, 14613, 14973, 13381, 15025, 14770, 11856, 15035, 13759, 14437, + 14907, 10047, 12606, 12503, 14393, 14854, 14327, 15155, 13000, 10717, 15247, 13657, 13965, 15243, 14989, 13768, + 13624, 14947, 14968, 12058, 14060, 13434, 14349, 14972, 12716, 14287, 12350, 12619, 15263, 13831, 15160, 15032, + 8462, 13359, 14322, 14221, 14371, 14449, 13536, 12118, 14288, 13483, 12182, 13621, 15261, 14723, 12623, 13323, + 14171, 15349, 14474, 14662, 13505, 15329, 15210, 15353, 13535, 12864, 14658, 14968, 9562, 13352, 12356, 13565, + 15198, 11511, 13788, 12911, 13993, 13693, 12862, 14613, 15056, 15244, 14444, 15174, 15312, 15133, 14859, 14225, + 14591, 12370, 13537, 11343, 13909, 12663, 15078, 14938, 14409, 13852, 12414, 13903, 13825, 13958, 10736, 11772, + 14817, 14529, 13499, 14350, 11063, 14315, 12718, 14574, 14180, 12505, 13109, 11207, 14801, 13781, 14392, 13549, + 14481, 15092, 12920, 15290, 14046, 10403, 14299, 13885, 9128, 13798, 14575, 14484, 14728, 14471, 14678, 15356, + 14558, 14003, 10672, 14388, 14487, 15001, 15022, 14634, 13742, 12429, 13937, 15102, 12090, 15326, 13650, 13590, + 13429, 13236, 15033, 13910, 15008, 14564, 14344, 14546, 15227, 14491, 14858, 14957, 12725, 13536, 13030, 12562, + 14481, 13731, 15236, 9398, 15358, 12351, 14751, 14509, 11815, 14787, 9669, 14645, 15099, 14490, 13121, 15050, + 13955, 14939, 15149, 14775, 15303, 14614, 14530, 13890, 14051, 14869, 15253, 12810, 12328, 14590, 13489, 13653, + 13648, 10701, 14528, 15178, 14926, 14584, 13916, 13144, 10598, 15269, 14247, 12646, 15283, 14202, 14649, 14873, + 15163, 13294, 15229, 15146, 11977, 14509, 15259, 12425, 15334, 13340, 15329, 15071, 14342, 14555, 14473, 15054, + 15076, 14756, 14958, 4978, 14576, 11508, 14746, 13161, 14408, 10590, 13429, 14945, 13993, 15033, 13183, 13027, + 14448, 15013, 11047, 13979, 12332, 14777, 14414, 15286, 12226, 14359, 13505, 13476, 13932, 14273, 14620, 14246, + 15252, 14565, 14099, 14651, 15105, 12658, 14767, 12639, 13930, 13290, 14558, 13149, 10365, 13507, 14337, 14605, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_Z, {16, 2, 2, 16}, {4, 1, 16, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferFractalZ transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_4d)); + for (int i = 0; i < sizeof(data_4d) / sizeof(data_4d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_4d[i]); + } + Status status = + transfer.TransShape(args.src_format, args.src_shape, args.src_data_type, args.dst_format, args.dst_shape); + EXPECT_EQ(status, SUCCESS); +} + +TEST_F(UtestFormatTransferNhwcFz, nhwc_to_fracz_fp16_success_gt_cube) { + uint16_t data_4d[8 * 2 * 16 * 16] = { + 14643, 15084, 14775, 14950, 14594, 14803, 12177, 13120, 15030, 12525, 12640, 14817, 15252, 14692, 15054, 14655, + 13395, 13556, 14508, 12414, 14272, 14148, 15056, 14734, 12544, 14138, 14641, 14793, 8925, 14903, 14744, 15219, + 14346, 12140, 12310, 14303, 12541, 14031, 11419, 14800, 11586, 14817, 14949, 13869, 9911, 14768, 14748, 11469, + 8984, 13553, 15077, 11291, 14419, 14259, 12234, 15036, 14970, 14166, 13225, 15283, 11555, 10732, 14294, 13613, + 13563, 14758, 12310, 13452, 15248, 7913, 14820, 15188, 13607, 15097, 12595, 15041, 13419, 14927, 11680, 13391, + 14730, 15210, 14573, 14986, 14480, 14810, 14421, 14868, 10163, 14768, 10289, 11549, 15206, 13973, 13249, 14465, + 14496, 11267, 13298, 14689, 9725, 13593, 14101, 9950, 14135, 15068, 14225, 12521, 13628, 11982, 13805, 13082, + 15123, 14687, 8903, 14758, 15064, 14312, 14878, 13979, 14309, 13373, 14744, 14816, 15310, 8674, 9634, 12371, + 13822, 13824, 14557, 10587, 14401, 15024, 14156, 14041, 13187, 14616, 15076, 13747, 13466, 13658, 15143, 13651, + 14175, 13921, 14447, 14076, 13601, 15003, 15049, 15102, 13701, 13614, 14557, 14391, 14514, 13575, 15138, 15103, + 15112, 14971, 14817, 14402, 14761, 9003, 14242, 14980, 14496, 14497, 14609, 13874, 8555, 13947, 12563, 10897, + 14602, 14543, 14369, 14684, 15008, 14482, 14680, 14910, 14616, 13475, 15173, 14966, 14472, 14434, 13850, 14037, + 14811, 14348, 15039, 14880, 10391, 12333, 15321, 14740, 15207, 14610, 14648, 12813, 6567, 14928, 14413, 14744, + 11330, 14223, 14667, 13822, 13416, 15092, 15163, 13721, 14985, 9430, 12456, 14628, 15093, 15078, 15046, 15078, + 14990, 14651, 14006, 13975, 15298, 14288, 14979, 13086, 14670, 13331, 12948, 14906, 15132, 15037, 14229, 14915, + 13694, 14670, 13931, 9961, 15073, 12960, 14769, 14151, 14950, 14695, 12619, 14347, 14390, 11545, 14696, 13679, + 15111, 12417, 14618, 12814, 13727, 14465, 14517, 14346, 14509, 13867, 15316, 13620, 13718, 13418, 13772, 12245, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14386, 13893, 14559, 12220, 11761, 14936, 13865, 14796, 14800, 14307, 14951, 15107, 14857, 14806, 15154, 9412, + 15010, 14921, 13512, 13793, 15309, 13425, 9729, 14167, 13513, 14683, 13483, 14709, 13920, 15026, 11308, 12400, + 15305, 14339, 15090, 15119, 12392, 15007, 15177, 14208, 15306, 13599, 13798, 14864, 11722, 13015, 14789, 14611, + 14106, 12803, 15151, 15303, 14577, 13538, 14339, 13154, 14084, 14672, 14793, 12240, 15070, 15005, 12175, 12888, + 12691, 15117, 13446, 14515, 13913, 15123, 15252, 15002, 11511, 14787, 14912, 14650, 12035, 14548, 14950, 14458, + 14888, 14514, 12791, 12573, 14604, 14992, 13541, 13780, 14397, 7631, 14614, 14986, 11108, 9225, 13665, 11943, + 12347, 13665, 13279, 14423, 12845, 13661, 15214, 15317, 14435, 12791, 13900, 15242, 15298, 13969, 14826, 12584, + 13064, 14236, 15186, 14340, 14893, 10331, 15251, 13083, 13584, 14344, 12569, 15036, 14204, 14777, 13837, 9226, + 14390, 14256, 9829, 14674, 14495, 11709, 13436, 10770, 11661, 12662, 14120, 13109, 14908, 14266, 15293, 13191, + 15269, 12610, 11706, 15330, 15108, 13080, 13123, 15201, 13707, 10054, 15187, 14830, 9929, 14661, 13177, 14356, + 12342, 14773, 13089, 12318, 13166, 14760, 13345, 14645, 14567, 15115, 13404, 14136, 14565, 13476, 13556, 14580, + 13406, 15357, 14433, 15156, 14827, 13972, 14672, 13399, 12478, 14720, 13335, 14073, 14411, 14526, 12450, 14861, + 14134, 9725, 12565, 14915, 14003, 11550, 14061, 14535, 13853, 12453, 14490, 15016, 15297, 14140, 14761, 14240, + 13114, 14017, 15109, 14027, 14750, 15099, 14659, 15315, 15280, 13607, 15302, 12982, 14741, 13700, 14019, 14737, + 15142, 14341, 15349, 15248, 14658, 14385, 15234, 13992, 12674, 14441, 14450, 13356, 14453, 11286, 14279, 13864, + 13637, 13656, 14231, 15128, 13956, 13701, 14356, 13255, 14956, 14416, 14589, 10372, 13312, 13646, 12487, 14193, + 14248, 12300, 14051, 14149, 12979, 14556, 12855, 13393, 12614, 13063, 14960, 14242, 15221, 14739, 15318, 12088, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14043, 14715, 4461, 12136, 11320, 12141, 13421, 14144, 14654, 14438, 12365, 14905, 14475, 11313, 14675, 14024, + 15086, 14135, 14081, 12712, 13637, 14202, 15194, 10907, 14310, 13866, 12043, 14719, 12523, 12397, 11980, 14952, + 14791, 14589, 14889, 13021, 12165, 14606, 14915, 14481, 7432, 11318, 12487, 14494, 13898, 12595, 15251, 13996, + 15238, 14606, 13433, 13580, 14933, 15357, 14675, 15004, 15062, 13963, 14702, 15011, 12695, 13389, 12565, 6232, + 14874, 14986, 13113, 12631, 14683, 12689, 13168, 14350, 12712, 14651, 12576, 14268, 14113, 13122, 12348, 14734, + 13589, 11061, 14828, 15344, 15174, 14972, 11092, 13911, 15065, 13101, 9859, 14503, 15312, 15270, 13689, 9178, + 14120, 14380, 13990, 13286, 13716, 15054, 14484, 14296, 14432, 14359, 12895, 14532, 13144, 13461, 14701, 14595, + 10068, 14653, 15162, 13017, 12396, 12356, 13637, 14958, 15264, 13340, 14541, 11381, 13584, 13979, 12390, 13560, + 14273, 12290, 14310, 14260, 14359, 11935, 14459, 15121, 11360, 10629, 15080, 10805, 14465, 15294, 11377, 13617, + 13625, 14975, 13361, 15188, 11507, 13782, 13706, 14910, 15032, 14859, 14381, 14483, 14288, 14813, 15212, 15315, + 15223, 14809, 11125, 14758, 15244, 14876, 14124, 11921, 14989, 15058, 14938, 14835, 13565, 14624, 14678, 15205, + 14567, 13504, 14861, 11531, 14618, 10146, 13621, 14624, 14849, 14853, 11429, 14339, 11584, 14625, 14717, 13815, + 14536, 14510, 13687, 13496, 14323, 14679, 14930, 13994, 14464, 13903, 13502, 15069, 14769, 15098, 8267, 14274, + 15092, 12553, 15171, 15033, 14416, 12944, 13450, 7388, 14432, 13499, 11582, 15087, 14321, 13927, 14848, 12613, + 14363, 14848, 14352, 15196, 13999, 15332, 14209, 14842, 14958, 12534, 14343, 13924, 13708, 13461, 11637, 14547, + 11127, 13365, 9418, 14569, 13315, 14468, 10755, 14684, 12758, 14080, 15053, 12528, 14937, 15230, 13987, 11713, + 14433, 13539, 13780, 7173, 14561, 12659, 12992, 11349, 12457, 14520, 14523, 13782, 12775, 14346, 15013, 13325, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11880, 11753, 6459, 14392, 13877, 15195, 14598, 14550, 13889, 14746, 11107, 13097, 14615, 9419, 13007, 13018, + 14222, 12796, 12392, 14443, 14882, 13277, 8521, 15181, 13088, 14393, 13834, 15332, 13605, 12444, 14993, 9433, + 13463, 10558, 10884, 13659, 13271, 14576, 14577, 14459, 14699, 12142, 15236, 9687, 14845, 13997, 12791, 13754, + 14956, 14343, 13564, 14853, 12144, 14074, 13545, 15078, 10321, 14195, 13709, 12695, 15156, 13280, 15276, 12365, + 15203, 12099, 13895, 13919, 14560, 14204, 13339, 14947, 13279, 14393, 14668, 14655, 14563, 15148, 15102, 15131, + 10412, 14766, 13574, 13560, 13926, 15086, 14189, 15212, 12819, 15044, 14025, 13684, 12454, 15232, 10332, 14054, + 12012, 12532, 13700, 14412, 14968, 13642, 13344, 14660, 15205, 15286, 12482, 14829, 14276, 14337, 12881, 14818, + 14140, 14478, 14003, 12802, 14959, 13420, 15192, 14788, 14340, 15351, 13404, 9944, 14488, 13117, 10405, 15095, + 13808, 12943, 14496, 12012, 15045, 14421, 13980, 14862, 15299, 10098, 15161, 10924, 11461, 14357, 14699, 14415, + 15077, 13517, 12150, 13717, 10732, 5651, 12664, 14255, 12880, 13683, 13378, 14858, 7146, 11804, 13439, 12732, + 13152, 12775, 11870, 9128, 13044, 14974, 15290, 14926, 13213, 13608, 15171, 13437, 14144, 13658, 14529, 12541, + 13217, 14336, 14399, 13413, 13503, 14854, 14557, 13883, 13508, 13327, 14788, 15249, 15292, 14663, 13973, 13029, + 14428, 14215, 12959, 15012, 14580, 14184, 14571, 12924, 14133, 14731, 13555, 14168, 14746, 10015, 15212, 14626, + 14724, 12444, 12317, 12709, 14912, 12731, 13557, 13513, 14632, 14075, 12545, 12588, 15317, 14740, 12682, 12690, + 14808, 14686, 14691, 13338, 14594, 13349, 14735, 14704, 14174, 9442, 14914, 13437, 14599, 14152, 13606, 12522, + 14940, 13113, 14935, 14237, 13916, 14691, 14536, 15305, 14955, 12251, 15001, 13519, 13402, 14632, 13716, 14866, + 15112, 14792, 14770, 13838, 14739, 14985, 11484, 14655, 12182, 15192, 14306, 11545, 12635, 15232, 13368, 14235, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14911, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14571, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14660, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13027, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13376, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12388, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15116, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14953, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12801, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15198, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13575, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14810, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14546, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14914, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14220, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14375, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14571, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13694, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13834, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14864, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15179, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14566, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14786, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14435, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14856, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14639, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11209, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14472, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14804, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11788, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11765, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12954, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14575, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12897, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14459, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13661, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14339, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14353, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14364, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14557, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15342, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14197, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14118, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14933, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13492, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12510, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14703, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13792, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15041, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15284, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12352, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14824, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13448, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11359, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14515, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15269, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14357, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15224, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13933, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14652, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 12772, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 13786, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14769, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 10686, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 14445, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 15254, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 11142, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, + }; + uint16_t data[17 * 2 * 2 * 17] = { + 14643, 15084, 14775, 14950, 14594, 14803, 12177, 13120, 15030, 12525, 12640, 14817, 15252, 14692, 15054, 14655, + 14911, 14386, 13893, 14559, 12220, 11761, 14936, 13865, 14796, 14800, 14307, 14951, 15107, 14857, 14806, 15154, + 9412, 13694, 14043, 14715, 4461, 12136, 11320, 12141, 13421, 14144, 14654, 14438, 12365, 14905, 14475, 11313, + 14675, 14024, 14459, 11880, 11753, 6459, 14392, 13877, 15195, 14598, 14550, 13889, 14746, 11107, 13097, 14615, + 9419, 13007, 13018, 12352, 13395, 13556, 14508, 12414, 14272, 14148, 15056, 14734, 12544, 14138, 14641, 14793, + 8925, 14903, 14744, 15219, 14571, 15010, 14921, 13512, 13793, 15309, 13425, 9729, 14167, 13513, 14683, 13483, + 14709, 13920, 15026, 11308, 12400, 13834, 15086, 14135, 14081, 12712, 13637, 14202, 15194, 10907, 14310, 13866, + 12043, 14719, 12523, 12397, 11980, 14952, 13661, 14222, 12796, 12392, 14443, 14882, 13277, 8521, 15181, 13088, + 14393, 13834, 15332, 13605, 12444, 14993, 9433, 14824, 14346, 12140, 12310, 14303, 12541, 14031, 11419, 14800, + 11586, 14817, 14949, 13869, 9911, 14768, 14748, 11469, 14660, 15305, 14339, 15090, 15119, 12392, 15007, 15177, + 14208, 15306, 13599, 13798, 14864, 11722, 13015, 14789, 14611, 14864, 14791, 14589, 14889, 13021, 12165, 14606, + 14915, 14481, 7432, 11318, 12487, 14494, 13898, 12595, 15251, 13996, 14339, 13463, 10558, 10884, 13659, 13271, + 14576, 14577, 14459, 14699, 12142, 15236, 9687, 14845, 13997, 12791, 13754, 13448, 8984, 13553, 15077, 11291, + 14419, 14259, 12234, 15036, 14970, 14166, 13225, 15283, 11555, 10732, 14294, 13613, 13027, 14106, 12803, 15151, + 15303, 14577, 13538, 14339, 13154, 14084, 14672, 14793, 12240, 15070, 15005, 12175, 12888, 15179, 15238, 14606, + 13433, 13580, 14933, 15357, 14675, 15004, 15062, 13963, 14702, 15011, 12695, 13389, 12565, 6232, 14142, 14956, + 14343, 13564, 14853, 12144, 14074, 13545, 15078, 10321, 14195, 13709, 12695, 15156, 13280, 15276, 12365, 11359, + 13563, 14758, 12310, 13452, 15248, 7913, 14820, 15188, 13607, 15097, 12595, 15041, 13419, 14927, 11680, 13391, + 13376, 12691, 15117, 13446, 14515, 13913, 15123, 15252, 15002, 11511, 14787, 14912, 14650, 12035, 14548, 14950, + 14458, 14566, 14874, 14986, 13113, 12631, 14683, 12689, 13168, 14350, 12712, 14651, 12576, 14268, 14113, 13122, + 12348, 14734, 14353, 15203, 12099, 13895, 13919, 14560, 14204, 13339, 14947, 13279, 14393, 14668, 14655, 14563, + 15148, 15102, 15131, 14515, 14730, 15210, 14573, 14986, 14480, 14810, 14421, 14868, 10163, 14768, 10289, 11549, + 15206, 13973, 13249, 14465, 12388, 14888, 14514, 12791, 12573, 14604, 14992, 13541, 13780, 14397, 7631, 14614, + 14986, 11108, 9225, 13665, 11943, 14786, 13589, 11061, 14828, 15344, 15174, 14972, 11092, 13911, 15065, 13101, + 9859, 14503, 15312, 15270, 13689, 9178, 14364, 10412, 14766, 13574, 13560, 13926, 15086, 14189, 15212, 12819, + 15044, 14025, 13684, 12454, 15232, 10332, 14054, 15269, 14496, 11267, 13298, 14689, 9725, 13593, 14101, 9950, + 14135, 15068, 14225, 12521, 13628, 11982, 13805, 13082, 15116, 12347, 13665, 13279, 14423, 12845, 13661, 15214, + 15317, 14435, 12791, 13900, 15242, 15298, 13969, 14826, 12584, 14435, 14120, 14380, 13990, 13286, 13716, 15054, + 14484, 14296, 14432, 14359, 12895, 14532, 13144, 13461, 14701, 14595, 14557, 12012, 12532, 13700, 14412, 14968, + 13642, 13344, 14660, 15205, 15286, 12482, 14829, 14276, 14337, 12881, 14818, 14357, 15123, 14687, 8903, 14758, + 15064, 14312, 14878, 13979, 14309, 13373, 14744, 14816, 15310, 8674, 9634, 12371, 14953, 13064, 14236, 15186, + 14340, 14893, 10331, 15251, 13083, 13584, 14344, 12569, 15036, 14204, 14777, 13837, 9226, 14856, 10068, 14653, + 15162, 13017, 12396, 12356, 13637, 14958, 15264, 13340, 14541, 11381, 13584, 13979, 12390, 13560, 15342, 14140, + 14478, 14003, 12802, 14959, 13420, 15192, 14788, 14340, 15351, 13404, 9944, 14488, 13117, 10405, 15095, 15224, + 13822, 13824, 14557, 10587, 14401, 15024, 14156, 14041, 13187, 14616, 15076, 13747, 13466, 13658, 15143, 13651, + 12801, 14390, 14256, 9829, 14674, 14495, 11709, 13436, 10770, 11661, 12662, 14120, 13109, 14908, 14266, 15293, + 13191, 14639, 14273, 12290, 14310, 14260, 14359, 11935, 14459, 15121, 11360, 10629, 15080, 10805, 14465, 15294, + 11377, 13617, 14197, 13808, 12943, 14496, 12012, 15045, 14421, 13980, 14862, 15299, 10098, 15161, 10924, 11461, + 14357, 14699, 14415, 13933, 14175, 13921, 14447, 14076, 13601, 15003, 15049, 15102, 13701, 13614, 14557, 14391, + 14514, 13575, 15138, 15103, 15198, 15269, 12610, 11706, 15330, 15108, 13080, 13123, 15201, 13707, 10054, 15187, + 14830, 9929, 14661, 13177, 14356, 11209, 13625, 14975, 13361, 15188, 11507, 13782, 13706, 14910, 15032, 14859, + 14381, 14483, 14288, 14813, 15212, 15315, 14118, 15077, 13517, 12150, 13717, 10732, 5651, 12664, 14255, 12880, + 13683, 13378, 14858, 7146, 11804, 13439, 12732, 14652, 15112, 14971, 14817, 14402, 14761, 9003, 14242, 14980, + 14496, 14497, 14609, 13874, 8555, 13947, 12563, 10897, 13575, 12342, 14773, 13089, 12318, 13166, 14760, 13345, + 14645, 14567, 15115, 13404, 14136, 14565, 13476, 13556, 14580, 14472, 15223, 14809, 11125, 14758, 15244, 14876, + 14124, 11921, 14989, 15058, 14938, 14835, 13565, 14624, 14678, 15205, 14933, 13152, 12775, 11870, 9128, 13044, + 14974, 15290, 14926, 13213, 13608, 15171, 13437, 14144, 13658, 14529, 12541, 12772, 14602, 14543, 14369, 14684, + 15008, 14482, 14680, 14910, 14616, 13475, 15173, 14966, 14472, 14434, 13850, 14037, 14810, 13406, 15357, 14433, + 15156, 14827, 13972, 14672, 13399, 12478, 14720, 13335, 14073, 14411, 14526, 12450, 14861, 14804, 14567, 13504, + 14861, 11531, 14618, 10146, 13621, 14624, 14849, 14853, 11429, 14339, 11584, 14625, 14717, 13815, 13492, 13217, + 14336, 14399, 13413, 13503, 14854, 14557, 13883, 13508, 13327, 14788, 15249, 15292, 14663, 13973, 13029, 13786, + 14811, 14348, 15039, 14880, 10391, 12333, 15321, 14740, 15207, 14610, 14648, 12813, 6567, 14928, 14413, 14744, + 14546, 14134, 9725, 12565, 14915, 14003, 11550, 14061, 14535, 13853, 12453, 14490, 15016, 15297, 14140, 14761, + 14240, 11788, 14536, 14510, 13687, 13496, 14323, 14679, 14930, 13994, 14464, 13903, 13502, 15069, 14769, 15098, + 8267, 14274, 12510, 14428, 14215, 12959, 15012, 14580, 14184, 14571, 12924, 14133, 14731, 13555, 14168, 14746, + 10015, 15212, 14626, 14769, 11330, 14223, 14667, 13822, 13416, 15092, 15163, 13721, 14985, 9430, 12456, 14628, + 15093, 15078, 15046, 15078, 14914, 13114, 14017, 15109, 14027, 14750, 15099, 14659, 15315, 15280, 13607, 15302, + 12982, 14741, 13700, 14019, 14737, 11765, 15092, 12553, 15171, 15033, 14416, 12944, 13450, 7388, 14432, 13499, + 11582, 15087, 14321, 13927, 14848, 12613, 14703, 14724, 12444, 12317, 12709, 14912, 12731, 13557, 13513, 14632, + 14075, 12545, 12588, 15317, 14740, 12682, 12690, 10686, 14990, 14651, 14006, 13975, 15298, 14288, 14979, 13086, + 14670, 13331, 12948, 14906, 15132, 15037, 14229, 14915, 14220, 15142, 14341, 15349, 15248, 14658, 14385, 15234, + 13992, 12674, 14441, 14450, 13356, 14453, 11286, 14279, 13864, 12954, 14363, 14848, 14352, 15196, 13999, 15332, + 14209, 14842, 14958, 12534, 14343, 13924, 13708, 13461, 11637, 14547, 13792, 14808, 14686, 14691, 13338, 14594, + 13349, 14735, 14704, 14174, 9442, 14914, 13437, 14599, 14152, 13606, 12522, 14445, 13694, 14670, 13931, 9961, + 15073, 12960, 14769, 14151, 14950, 14695, 12619, 14347, 14390, 11545, 14696, 13679, 14375, 13637, 13656, 14231, + 15128, 13956, 13701, 14356, 13255, 14956, 14416, 14589, 10372, 13312, 13646, 12487, 14193, 14575, 11127, 13365, + 9418, 14569, 13315, 14468, 10755, 14684, 12758, 14080, 15053, 12528, 14937, 15230, 13987, 11713, 15041, 14940, + 13113, 14935, 14237, 13916, 14691, 14536, 15305, 14955, 12251, 15001, 13519, 13402, 14632, 13716, 14866, 15254, + 15111, 12417, 14618, 12814, 13727, 14465, 14517, 14346, 14509, 13867, 15316, 13620, 13718, 13418, 13772, 12245, + 14571, 14248, 12300, 14051, 14149, 12979, 14556, 12855, 13393, 12614, 13063, 14960, 14242, 15221, 14739, 15318, + 12088, 12897, 14433, 13539, 13780, 7173, 14561, 12659, 12992, 11349, 12457, 14520, 14523, 13782, 12775, 14346, + 15013, 13325, 15284, 15112, 14792, 14770, 13838, 14739, 14985, 11484, 14655, 12182, 15192, 14306, 11545, 12635, + 15232, 13368, 14235, 11142, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_Z, {17, 2, 2, 17}, {8, 2, 16, 16}, DT_FLOAT16}; + TransResult result; + + FormatTransferFractalZ transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_4d)); + for (int i = 0; i < sizeof(data_4d) / sizeof(data_4d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_4d[i]); + } +} + +TEST_F(UtestFormatTransferNhwcFz, nhwc_to_fracz_fp32_success_eq_cube) { + float data_4d[4 * 1 * 16 * 16] = { + 0.484169822867354, 0.8140947249809813, 0.4940455204015697, 0.7135851157134282, 0.8768734157203649, + 0.4184033435957544, 0.44292626120971623, 0.696532137919516, 0.9171321370698801, 0.7151284988967475, + 0.5109347861993496, 0.38142119707414746, 0.6698099769064979, 0.11820154777502989, 0.13012960479351465, + 0.16817089869937873, 0.5302578028349337, 0.7252510992951486, 0.8213662663451495, 0.255865605133453, + 0.5347808590628998, 0.03484423993654684, 0.4053151604997237, 0.1268756305743911, 0.7512214677373925, + 0.7021691682264435, 0.5790723649519143, 0.19175327358031247, 0.5748858471708752, 0.34613167229455966, + 0.9081476470039521, 0.6213341986714184, 0.8738901859075797, 0.4191500219416586, 0.9272377212501333, + 0.6088513158752427, 0.43503086957072457, 0.7568802524830642, 0.6218604254226138, 0.4893048459594117, + 0.020152542775963678, 0.7267242398249997, 0.9274966736416573, 0.49200037295381294, 0.42692100017651613, + 0.7892621068774159, 0.6845597457970014, 0.8711448933002091, 0.9718143171574413, 0.6597628763384622, + 0.34484306915489416, 0.880431818533278, 0.7955875470049112, 0.8757665484415526, 0.9915924406526361, + 0.04436704716518447, 0.02262359380765988, 0.7052472895837604, 0.7057299524522475, 0.3194782791822033, + 0.09871902909610286, 0.364619480384346, 0.06943275085154965, 0.4214138425069305, 0.6434162586445695, + 0.5323110978433342, 0.1248556733436923, 0.15902590556482255, 0.4712411793898579, 0.7894290004428436, + 0.5032833161606196, 0.527059468896646, 0.9191006481472115, 0.25305549423863427, 0.6340006741168577, + 0.358184464955455, 0.25972912049066155, 0.18926991931768677, 0.27595176819944, 0.8161194869680272, + 0.6487804290051798, 0.17197756477570048, 0.1792393744184949, 0.7262808230390461, 0.5266352244919847, + 0.3774751618211455, 0.14059289117160334, 0.08529920580377037, 0.28344872274215105, 0.9959953800069987, + 0.08464059391988354, 0.6670563867386871, 0.4687389304005585, 0.532305594930434, 0.28410362935440214, + 0.07129098950073931, 0.90130643092616, 0.1863314734620497, 0.7182836382686819, 0.05869761827209363, + 0.43215310837091325, 0.5109770631402548, 0.8507620188714253, 0.9448034463404849, 0.8204290492967814, + 0.30175616425548013, 0.6347738558965675, 0.7115068243493056, 0.9106453434867423, 0.7081724799882917, + 0.9480983751448969, 0.30683290902116644, 0.13870348711282365, 0.882637588475232, 0.8911520456086311, + 0.13836682539246214, 0.9507965084267185, 0.5052382254338335, 0.45961247912692105, 0.979182256053379, + 0.4013111553698335, 0.7788395612306059, 0.8062745586439758, 0.19584512860236825, 0.275650733936018, + 0.23472653953495393, 0.5268011060669119, 0.11615458325117867, 0.37003145722910347, 0.7429488863605441, + 0.7499741271194941, 0.849429488346799, 0.764998070770062, 0.8627353540448067, 0.2639189465037146, + 0.18543192561486244, 0.8406963006607173, 0.29867265092172757, 0.5830874946322163, 0.7266542276089016, + 0.47732859671105865, 0.5276495401100896, 0.0285892476174624, 0.10154908386840933, 0.4088412035484068, + 0.7434536979800712, 0.9525161199007488, 0.07138799479311309, 0.8519420240978587, 0.08747097413510685, + 0.06259133820158325, 0.6102164785932812, 0.5272535740517759, 0.3882790321273053, 0.5219871401239666, + 0.08342830725989958, 0.06992376312633286, 0.45588347082683367, 0.8507181024497145, 0.8168001145505971, + 0.6288218440807817, 0.5627797837213618, 0.8642380932712407, 0.6296224131150504, 0.810297385035068, + 0.9521660572032933, 0.006989866947720524, 0.15360690769811158, 0.609592365107798, 0.856111276058204, + 0.5569449989810563, 0.6358938242284586, 0.43974619938434945, 0.23369648280955435, 0.17610792852502044, + 0.5393700616427376, 0.14414511952710007, 0.9499336265300194, 0.6444587977800735, 0.7418623715827386, + 0.044802260433499996, 0.850581483363445, 0.05712457779883895, 0.15282854721354788, 0.3482893221081773, + 0.6247260064132429, 0.42099978778043057, 0.7479237929881165, 0.02099064732615097, 0.013098766701113651, + 0.7226056703495463, 0.04820012498442583, 0.30512165089613275, 0.7316809143395435, 0.33837084272328344, + 0.031181701520131222, 0.8998729244301411, 0.13963009532040604, 0.5524300751373528, 0.998751142570437, + 0.8079412199511319, 0.38764559152084754, 0.3210831483519665, 0.7304185865067885, 0.5822602684215884, + 0.6364590811085312, 0.10627778088019713, 0.5399985372878428, 0.3140212188001139, 0.6511943391734281, + 0.7376690823137533, 0.16614528063403933, 0.45753024916507445, 0.6563453150496379, 0.011057777761439902, + 0.0018264730571969645, 0.3140058709073794, 0.8479357203115255, 0.9718814368259422, 0.2938044793936031, + 0.389604645965164, 0.7259980798553759, 0.384597064894432, 0.6461406690633517, 0.28149855317178163, + 0.03265235541107758, 0.7517419899840844, 0.3364005732928873, 0.7329527122940036, 0.43759767242690906, + 0.3172137189567925, 0.2707815810278883, 0.6642248201697422, 0.3972911147544086, 0.009127503808928017, + 0.1234374452587974, 0.39970292448775213, 0.11989802753965029, 0.9927165644356675, 0.8609568773974031, + 0.8160834394485424, 0.8645970999084115, 0.43923002522405574, 0.3760052855173096, 0.08902326927063253, + 0.09948678314542847, 0.3578210615036086, 0.8485940282988038, 0.2516468752579376, 0.7365499301475807, + 0.5612758649671108, 0.7859804827616864, 0.8851363067086282, 0.8935474690220618, 0.5336384648157917, + 0.23839322063516444, 0.8176105543815939, 0.629909392274251, 0.31313221345906006, 0.3221421451570968, + 0.5818901337492731, 0.9534404285747669, 0.30689058064437647, 0.7213959303267752, 0.6899507455723445, + 0.1145659733805463, 0.36766351628538607, 0.882482457649216, 0.8273160847663066, 0.344437602133445, + 0.6831369390963725, 0.15697402810153005, 0.7568275537113736, 0.3692026509026701, 0.25344774469478004, + 0.24956690401767134, 0.012836876914660733, 0.985850540555467, 0.8626773871697432, 0.5476182090476109, + 0.3623970160812814, 0.9376822616892581, 0.14087495346759915, 0.23523285339278033, 0.037762342255319714, + 0.020689168902525368, 0.6838812404349178, 0.5299351074122371, 0.7796551239198329, 0.46483996491976176, + 0.3896989084777407, 0.0246526241822681, 0.150842690954615, 0.2168947002788526, 0.1149972560028748, + 0.8984092864643223, 0.12025595226036978, 0.39457447774297594, 0.27976746576883327, 0.9120741897014442, + 0.07701027303624552, 0.8145938381183272, 0.988408599764702, 0.9891205523056846, 0.38958399171559976, + 0.19562724561171574, 0.7406668247925765, 0.5176452064093667, 0.1833071142826258, 0.981011435762899, + 0.1038485455897169, 0.6360424773286126, 0.07505098129761023, 0.7110748432628814, 0.23864180641973576, + 0.6311090911472507, 0.2898434465648123, 0.024685643880116404, 0.1206400134468234, 0.9642954826184025, + 0.1307237743057954, 0.3682368837401174, 0.10696899485835165, 0.4381449790035217, 0.8778405603604115, + 0.33026410718319044, 0.43155412970009344, 0.688941263482083, 0.6484615948673739, 0.20103619018128005, + 0.08393097766033852, 0.8913511709861782, 0.39172683394484564, 0.8390068848742098, 0.8242315116115289, + 0.016933814177084616, 0.09858800624814978, 0.4866184190503341, 0.5797421505352837, 0.7275188051657857, + 0.8257101240399874, 0.2805336430088575, 0.5360449509204634, 0.48837931206904084, 0.9312020866617485, + 0.7410346712070657, 0.08381751355840361, 0.622083699685216, 0.5182872441017575, 0.20300436297731594, + 0.8408847205120992, 0.4065634267823589, 0.538625415898359, 0.24508951206516771, 0.6706828695669036, + 0.8841876638401157, 0.14646094171092727, 0.0873651816168074, 0.3789152991251008, 0.7514417784728014, + 0.36660529419052745, 0.9058668780554411, 0.3905648290701539, 0.46346477300678546, 0.1784939685332635, + 0.5505805956291808, 0.57458139202536, 0.37655520125792563, 0.21289003081542524, 0.41033428588999377, + 0.13922941757955198, 0.7339905698509648, 0.6420365465526778, 0.5935365043786606, 0.031448611228246826, + 0.6446584269225506, 0.3168819180982111, 0.6919296394674304, 0.7611936857915317, 0.6581989409515427, + 0.7119379353798316, 0.19877568188642458, 0.29773337106911635, 0.6353172249870478, 0.7355153876080694, + 0.5965273658488094, 0.9873670681950693, 0.4231813740764955, 0.2367076098094284, 0.7766733432911771, + 0.6429580955121145, 0.8307521364617134, 0.6525257497726437, 0.6857387208240013, 0.6812935881218981, + 0.24823806562055328, 0.23222434233678046, 0.14734449630794566, 0.8593695744180271, 0.9521390092531038, + 0.814592050185302, 0.8530358400139052, 0.7553391685463368, 0.467322607265681, 0.40821268238278274, + 0.15224535394276528, 0.7721886316195505, 0.3188390117882929, 0.18861616707188456, 0.3183899178336651, + 0.14272447128849497, 0.490605601502897, 0.046383765771242036, 0.16415576287753442, 0.22888703447357206, + 0.5375730996026021, 0.535512670941786, 0.5912839261540107, 0.6983894048201051, 0.15323404928308648, + 0.8182668948670956, 0.29446560941893807, 0.418354823269551, 0.21870888711201486, 0.6424499336686369, + 0.3214700322107067, 0.24736247781577525, 0.7305497857337323, 0.568789544871731, 0.6533320204219322, + 0.33898139527730176, 0.16552403626291268, 0.10248361115345117, 0.8892140389602003, 0.9313325327216908, + 0.7919070208756257, 0.0708315749326881, 0.2188073927368014, 0.8127787161494373, 0.6257366952292145, + 0.6924998870427207, 0.26793105051769306, 0.7826490483783863, 0.5966997551066409, 0.2129270109769711, + 0.44769099683243785, 0.7574472416898225, 0.26729283252732317, 0.4140922332840643, 0.32495829469129234, + 0.7880364474443821, 0.6521293980091717, 0.23933614126005887, 0.40069381935472004, 0.3253434499214978, + 0.43850426563837475, 0.3582655821803802, 0.02315611544237217, 0.7562158058764148, 0.7040259198312322, + 0.5059192662520398, 0.052574245106938755, 0.0044571360420935235, 0.19994806707117685, 0.17143060225012974, + 0.7905787674108862, 0.700764812184617, 0.9178067635156595, 0.007883547902275412, 0.04345877753973626, + 0.40662108491778937, 0.32503174588972183, 0.05007085167672787, 0.9581958519270192, 0.20437076813207167, + 0.2397322863247101, 0.2432309640469006, 0.945083897778037, 0.715659408560469, 0.24955287567344286, + 0.28473276918917545, 0.5837263814586879, 0.2741346897790268, 0.5647425538488864, 0.969909805392653, + 0.7261702056098153, 0.5430436390733616, 0.08333922871976862, 0.5609888136600801, 0.014806506476614079, + 0.9455759599914065, 0.3193070470406175, 0.08542679684744281, 0.6744677936417323, 0.27782578504968625, + 0.29358418856919855, 0.23213589762876796, 0.005758486459847889, 0.36868262474116764, 0.264207113494464, + 0.36157643004033746, 0.9637257888559727, 0.5737186764359195, 0.5315516381845716, 0.5101088363620886, + 0.039248651321625805, 0.9681240024592983, 0.08677429466572284, 0.9518099421221371, 0.7952823450404229, + 0.05713730664487837, 0.17614720504232317, 0.5864853334395049, 0.5355917571532767, 0.14016085457936855, + 0.4699268852262969, 0.588214580416792, 0.7315541026108536, 0.2615939952632047, 0.6940338433274365, + 0.9654764709266905, 0.17552901543503086, 0.2673106011552777, 0.16278327321010144, 0.2829647956459945, + 0.027781103679172303, 0.9715339351098571, 0.5104277134265274, 0.26688023346613854, 0.7675111602635859, + 0.8456916770759129, 0.9418274416132982, 0.4082337424410951, 0.2966721771835694, 0.4702670949658979, + 0.6827917396699957, 0.34049137335485646, 0.979883320082454, 0.9532270119107664, 0.8572037348042103, + 0.848146567657908, 0.431869811030539, 0.7941580229111237, 0.3436486153857896, 0.7273788931852989, + 0.49224680779113494, 0.3862553126741952, 0.9807757900646529, 0.701979947074233, 0.26056383661375004, + 0.6931835277270919, 0.7067946120442928, 0.4582357436150932, 0.11829934680864151, 0.2095153024553008, + 0.9185955528449318, 0.12823153669882037, 0.7424605800987469, 0.25214577369248303, 0.9208309549123908, + 0.28212583854425577, 0.33137007225108495, 0.24341818673228321, 0.2619469700798778, 0.28262355294589947, + 0.7755838712533507, 0.4053638246615504, 0.7976887925976534, 0.5522240670284019, 0.7227130132090629, + 0.5864070673077637, 0.42782888384518447, 0.28679923903462323, 0.16844780802036896, 0.17193836732536305, + 0.007199948294376979, 0.8432770047003173, 0.5329394715412086, 0.9027093385378767, 0.13464647139460417, + 0.8011185435869719, 0.8299531042565435, 0.16599661763813744, 0.6152441825738123, 0.25151020323522655, + 0.447172104562374, 0.5702866695855857, 0.8192062209036824, 0.23552991431295267, 0.32632467763985884, + 0.038987953903722006, 0.11718840620381377, 0.9992640055987563, 0.5148951028222453, 0.15248694988722633, + 0.6534611001367189, 0.031062641103680733, 0.8694831172503376, 0.24862657671809274, 0.6479564397956218, + 0.504649233907658, 0.07716366084071735, 0.6049908845448526, 0.7137870858666769, 0.7054652650387054, + 0.36350442596954213, 0.8795945111811084, 0.582926532185696, 0.8714877396827005, 0.06711091117831969, + 0.2982765088342778, 0.3329050905695813, 0.4865987400975561, 0.6201284834454466, 0.46275286462889154, + 0.8851839278084489, 0.9999593670233206, 0.2758236652060908, 0.17890347032894782, 0.048086683036078326, + 0.28413000356301654, 0.9536736877854535, 0.44798896242923036, 0.29983211622633354, 0.6784308970797828, + 0.08779676069610531, 0.42626312256206, 0.22394679796804695, 0.9244223195097627, 0.15901668161462468, + 0.7618146378555393, 0.07687698303830115, 0.1929344802840327, 0.7553281474784295, 0.8025581206851257, + 0.14022257845130615, 0.9138712084521406, 0.8040597327540038, 0.6328239965840534, 0.6024965111935174, + 0.5589881068558278, 0.048191039611119035, 0.1404201318896956, 0.40850427818021384, 0.7203622066092176, + 0.8471988115463831, 0.8860087660548518, 0.45971281262939667, 0.08887747998382489, 0.9740387490405953, + 0.8585000450094692, 0.36058899298739555, 0.4278199731132287, 0.8375237958882933, 0.01417622872482538, + 0.16029654637258495, 0.057127838376448525, 0.35749934689051033, 0.2930410231839118, 0.8409144982684501, + 0.07593293596409612, 0.356635418237763, 0.226719753247275, 0.0007109181986780788, 0.7184665789785081, + 0.395824938261763, 0.4867276192621923, 0.5172670089066486, 0.5841508888388441, 0.08047869657287621, + 0.9482746906157185, 0.41294535716449154, 0.19159501891328978, 0.5578510869508337, 0.29546640911784916, + 0.2869367181145863, 0.7294147056255083, 0.3991485374776169, 0.4445980140617459, 0.6662896463638776, + 0.8614878148940442, 0.1808774427317561, 0.4614411389376568, 0.3303364346947858, 0.8028852020786104, + 0.5169929310162137, 0.14136531759583693, 0.9831280581298246, 0.5630783736198899, 0.8608024828157775, + 0.3931840317246529, 0.022722258330870937, 0.1056061545184902, 0.6060652796018042, 0.3352528944308385, + 0.8977154563912682, 0.7221307220260405, 0.46922215009200297, 0.4677265831501206, 0.49431288865022915, + 0.42276247133689504, 0.6788161518073327, 0.04439101827129144, 0.028811435615006542, 0.6839680845965146, + 0.9986738127631021, 0.07122924252967722, 0.6409050009983894, 0.8555552153820654, 0.9370992032325905, + 0.7376153474252244, 0.05650174272501618, 0.9555164199341558, 0.24096140966133506, 0.15235115309934777, + 0.2227762469886192, 0.7035491425028773, 0.4883693004134255, 0.4991880671239809, 0.2038723768682421, + 0.4424646154063001, 0.42947559846701466, 0.15496718950982546, 0.8613665468468363, 0.6173709310699306, + 0.34240192063131025, 0.17697172987534104, 0.05400248287734222, 0.47400410893135847, 0.5657413124334275, + 0.6886788284227651, 0.693192811671234, 0.5531430026891606, 0.766990407259576, 0.8518985685483231, + 0.5389165861980743, 0.9950513588111255, 0.09523381762884475, 0.7696317754383216, 0.6698100854232568, + 0.43625543466202976, 0.2330898194698613, 0.4127476057822641, 0.9294766285644805, 0.05680528782969185, + 0.6537538588698784, 0.17396071383121814, 0.9595723514929978, 0.42975991044177986, 0.5431447973683532, + 0.8833668707212631, 0.26133302914884515, 0.14661930610537355, 0.7433132788379365, 0.3574322869140888, + 0.2341799637707056, 0.8349985525202629, 0.8160746720387089, 0.6407640183325787, 0.6197726260454984, + 0.41237945120152153, 0.1830987925080485, 0.7553599629295343, 0.05454691882123808, 0.5562940920005512, + 0.5102400141204675, 0.9541297229195699, 0.5577971859903864, 0.2938597050255226, 0.8403402780212583, + 0.47661930123442686, 0.4103920153350502, 0.3119461029229196, 0.3801380305294956, 0.3434681831027364, + 0.7197973788263885, 0.0987748411433772, 0.01388200527143002, 0.1651316494632148, 0.9141734949661398, + 0.2719071586501257, 0.711375764410416, 0.3627408492437765, 0.18177223898925443, 0.7975651507116626, + 0.6859695133005402, 0.990527721423081, 0.08901845138285569, 0.47695118798375047, 0.4581629061404291, + 0.6647405334731831, 0.4779626404166062, 0.5638043017770987, 0.3701472211861363, 0.3793667615579185, + 0.49023578639191434, 0.5453313612019725, 0.580306136685107, 0.9098682664715276, 0.4207018293353191, + 0.741785389613043, 0.9262602540365329, 0.703428108041273, 0.8044195024538315, 0.27721795146313744, + 0.1630139202426243, 0.34579225120146284, 0.11553068934190114, 0.5928663617496019, 0.8952425964480728, + 0.47925358700494936, 0.345789231337841, 0.3110153026634752, 0.4628526273295195, 0.39546232211152244, + 0.04056519079629961, 0.6268438803068547, 0.8592557500599268, 0.9575425033955798, 0.3502505288978284, + 0.059258039358505155, 0.7518021608110209, 0.8739573910743396, 0.7067417457943123, 0.23881851357439288, + 0.5516104950394077, 0.16949728066552805, 0.17699999941441635, 0.3259215888704792, 0.6829558794210719, + 0.8768342657870796, 0.0828341270227837, 0.7034737450184155, 0.546752909248851, 0.7267979136936736, + 0.03712586684634822, 0.03808220856446631, 0.4344229625584559, 0.5073943443532393, 0.0876469591985547, + 0.5479537630990768, 0.15022676424664938, 0.7065732472092339, 0.5267637826119574, 0.49239129236901735, + 0.8218857103459851, 0.7979465301023618, 0.029548757715175067, 0.6932694244947931, 0.5595855181231474, + 0.7570733169968579, 0.3926838954245869, 0.06662638382372121, 0.7354175771053119, 0.6689967299755643, + 0.1503851907294197, 0.09334662866451149, 0.6985469006194642, 0.3853073559702226, 0.7598678794046461, + 0.8430367249722724, 0.09609005474058485, 0.3327016836424206, 0.5147633020947202, 0.9849763413326892, + 0.45758533152883063, 0.5410627384028919, 0.8101421663582148, 0.4745789007815937, 0.9695833469428511, + 0.7406298601731718, 0.8928719524735501, 0.07408106888379695, 0.48443913560921603, 0.9875039549398698, + 0.9207282373994886, 0.916412184605058, 0.5325664548137249, 0.5186829523993904, 0.0011137436559524039, + 0.7348700397039633, 0.09290265494838656, 0.23580663015210346, 0.35820294378273676, 0.7639205360566864, + 0.9465492151069357, 0.4252441824179847, 0.5316291043266255, 0.8122536967408113, 0.5789541420307059, + 0.22093739858997807, 0.721478902701215, 0.6885946622229991, 0.17835185715057533, 0.7238629983060336, + 0.20125336494303825, 0.16681217791113911, 0.9318773298015346, 0.1515859497360037, 0.21766180447529104, + 0.4792250665535124, 0.24411300991628737, 0.3217581574119305, 0.8390359459774884, 0.9659682387174011, + 0.40034412355266336, 0.5255065433133791, 0.42837064152809445, 0.7654114974398095, 0.3865006028597384, + 0.24734575434479678, 0.41425008509412264, 0.8361654352683431, 0.4321112716511004, 0.011850429412228025, + 0.4725427310129483, 0.13015593957624672, 0.4613391477382468, 0.6411806951208191, 0.5748219462376235, + 0.7676968075086735, 0.4551543834911985, 0.3981321361133089, 0.5537117150328572, 0.640188645994515, + 0.8733008957299926, 0.7680564850159917, 0.591855214421642, 0.7758621954461007, 0.9883180605472691, + 0.7032276300145796, 0.31585307578954147, 0.28146832444773495, 0.9471559002035407, 0.44047203430454707, + 0.29854162639920856, 0.07733153530994863, 0.8953385322255972, 0.8718260794429866, 0.8244843036717918, + 0.9473049441286394, 0.13636726569332214, 0.9029170461356006, 0.2711524085545959, 0.008956533117231325, + 0.14719117410301985, 0.7352499716127123, 0.9005647194328514, 0.04239801036045521, 0.32113153439789777, + 0.23008784431942686, 0.25059835793851803, 0.7821196322936473, 0.3571983300993258, 0.2624691465092742, + 0.8018016489757368, 0.5248393284810376, 0.6977900148749919, 0.33093445127674614, 0.7554471103150353, + 0.7756032542183166, 0.006225581322849116, 0.4008868869085592, 0.20812329467775148, 0.17556686644385078, + 0.6096597423735237, 0.6191160871486041, 0.7349303921541335, 0.9192496344735487, 0.24229302674522868, + 0.9899032791595263, 0.29044779138561216, 0.8321620650117503, 0.7598221463622512, 0.4229989353498531, + 0.9387553475507483, 0.9984378083937185, 0.8151195733132465, 0.7295039076200354, 0.0922729891266788, + 0.8668318130738214, 0.23372381782403595, 0.03956728658695974, 0.9606270879918739, 0.8167629519087327, + 0.6627590908369925, 0.3128149140030476, 0.6317394199057031, 0.12021250922401605, 0.42604564092211705, + 0.3977230376064276, 0.022079790505487362, 0.37848317176894164, 0.46852397650558253, 0.20735781040531553, + 0.8524713092661131, 0.44086749842642614, 0.9183713393663233, 0.9450411621274358, 0.697302770534409, + 0.9135605184069232, 0.09015468169209084, 0.704151745426799, 0.5400455593098272, 0.5480655211682695, + 0.30738071301825654, 0.15067120555532854, 0.925682378381145, 0.3371623491173492, 0.9597353973741677, + 0.9125053638178813, 0.5331026395371243, 0.7058339751156828, 0.28253557117321937, 0.7269649285767406, + 0.12353532557783586, 0.4491118104036773, 0.6637148382841026, 0.6351120142249237, 0.864473727452456, + 0.8265584180895602, 0.6156722515329398, 0.8643555219638754, 0.6571916479877694, + }; + float data[2 * 2 * 16 * 16] = { + 0.484169822867354, 0.8140947249809813, 0.4940455204015697, 0.7135851157134282, 0.8768734157203649, + 0.4184033435957544, 0.44292626120971623, 0.696532137919516, 0.9171321370698801, 0.7151284988967475, + 0.5109347861993496, 0.38142119707414746, 0.6698099769064979, 0.11820154777502989, 0.13012960479351465, + 0.16817089869937873, 0.8176105543815939, 0.629909392274251, 0.31313221345906006, 0.3221421451570968, + 0.5818901337492731, 0.9534404285747669, 0.30689058064437647, 0.7213959303267752, 0.6899507455723445, + 0.1145659733805463, 0.36766351628538607, 0.882482457649216, 0.8273160847663066, 0.344437602133445, + 0.6831369390963725, 0.15697402810153005, 0.5864853334395049, 0.5355917571532767, 0.14016085457936855, + 0.4699268852262969, 0.588214580416792, 0.7315541026108536, 0.2615939952632047, 0.6940338433274365, + 0.9654764709266905, 0.17552901543503086, 0.2673106011552777, 0.16278327321010144, 0.2829647956459945, + 0.027781103679172303, 0.9715339351098571, 0.5104277134265274, 0.2938597050255226, 0.8403402780212583, + 0.47661930123442686, 0.4103920153350502, 0.3119461029229196, 0.3801380305294956, 0.3434681831027364, + 0.7197973788263885, 0.0987748411433772, 0.01388200527143002, 0.1651316494632148, 0.9141734949661398, + 0.2719071586501257, 0.711375764410416, 0.3627408492437765, 0.18177223898925443, 0.5302578028349337, + 0.7252510992951486, 0.8213662663451495, 0.255865605133453, 0.5347808590628998, 0.03484423993654684, + 0.4053151604997237, 0.1268756305743911, 0.7512214677373925, 0.7021691682264435, 0.5790723649519143, + 0.19175327358031247, 0.5748858471708752, 0.34613167229455966, 0.9081476470039521, 0.6213341986714184, + 0.7568275537113736, 0.3692026509026701, 0.25344774469478004, 0.24956690401767134, 0.012836876914660733, + 0.985850540555467, 0.8626773871697432, 0.5476182090476109, 0.3623970160812814, 0.9376822616892581, + 0.14087495346759915, 0.23523285339278033, 0.037762342255319714, 0.020689168902525368, 0.6838812404349178, + 0.5299351074122371, 0.26688023346613854, 0.7675111602635859, 0.8456916770759129, 0.9418274416132982, + 0.4082337424410951, 0.2966721771835694, 0.4702670949658979, 0.6827917396699957, 0.34049137335485646, + 0.979883320082454, 0.9532270119107664, 0.8572037348042103, 0.848146567657908, 0.431869811030539, + 0.7941580229111237, 0.3436486153857896, 0.7975651507116626, 0.6859695133005402, 0.990527721423081, + 0.08901845138285569, 0.47695118798375047, 0.4581629061404291, 0.6647405334731831, 0.4779626404166062, + 0.5638043017770987, 0.3701472211861363, 0.3793667615579185, 0.49023578639191434, 0.5453313612019725, + 0.580306136685107, 0.9098682664715276, 0.4207018293353191, 0.8738901859075797, 0.4191500219416586, + 0.9272377212501333, 0.6088513158752427, 0.43503086957072457, 0.7568802524830642, 0.6218604254226138, + 0.4893048459594117, 0.020152542775963678, 0.7267242398249997, 0.9274966736416573, 0.49200037295381294, + 0.42692100017651613, 0.7892621068774159, 0.6845597457970014, 0.8711448933002091, 0.7796551239198329, + 0.46483996491976176, 0.3896989084777407, 0.0246526241822681, 0.150842690954615, 0.2168947002788526, + 0.1149972560028748, 0.8984092864643223, 0.12025595226036978, 0.39457447774297594, 0.27976746576883327, + 0.9120741897014442, 0.07701027303624552, 0.8145938381183272, 0.988408599764702, 0.9891205523056846, + 0.7273788931852989, 0.49224680779113494, 0.3862553126741952, 0.9807757900646529, 0.701979947074233, + 0.26056383661375004, 0.6931835277270919, 0.7067946120442928, 0.4582357436150932, 0.11829934680864151, + 0.2095153024553008, 0.9185955528449318, 0.12823153669882037, 0.7424605800987469, 0.25214577369248303, + 0.9208309549123908, 0.741785389613043, 0.9262602540365329, 0.703428108041273, 0.8044195024538315, + 0.27721795146313744, 0.1630139202426243, 0.34579225120146284, 0.11553068934190114, 0.5928663617496019, + 0.8952425964480728, 0.47925358700494936, 0.345789231337841, 0.3110153026634752, 0.4628526273295195, + 0.39546232211152244, 0.04056519079629961, 0.9718143171574413, 0.6597628763384622, 0.34484306915489416, + 0.880431818533278, 0.7955875470049112, 0.8757665484415526, 0.9915924406526361, 0.04436704716518447, + 0.02262359380765988, 0.7052472895837604, 0.7057299524522475, 0.3194782791822033, 0.09871902909610286, + 0.364619480384346, 0.06943275085154965, 0.4214138425069305, 0.38958399171559976, 0.19562724561171574, + 0.7406668247925765, 0.5176452064093667, 0.1833071142826258, 0.981011435762899, 0.1038485455897169, + 0.6360424773286126, 0.07505098129761023, 0.7110748432628814, 0.23864180641973576, 0.6311090911472507, + 0.2898434465648123, 0.024685643880116404, 0.1206400134468234, 0.9642954826184025, 0.28212583854425577, + 0.33137007225108495, 0.24341818673228321, 0.2619469700798778, 0.28262355294589947, 0.7755838712533507, + 0.4053638246615504, 0.7976887925976534, 0.5522240670284019, 0.7227130132090629, 0.5864070673077637, + 0.42782888384518447, 0.28679923903462323, 0.16844780802036896, 0.17193836732536305, 0.007199948294376979, + 0.6268438803068547, 0.8592557500599268, 0.9575425033955798, 0.3502505288978284, 0.059258039358505155, + 0.7518021608110209, 0.8739573910743396, 0.7067417457943123, 0.23881851357439288, 0.5516104950394077, + 0.16949728066552805, 0.17699999941441635, 0.3259215888704792, 0.6829558794210719, 0.8768342657870796, + 0.0828341270227837, 0.6434162586445695, 0.5323110978433342, 0.1248556733436923, 0.15902590556482255, + 0.4712411793898579, 0.7894290004428436, 0.5032833161606196, 0.527059468896646, 0.9191006481472115, + 0.25305549423863427, 0.6340006741168577, 0.358184464955455, 0.25972912049066155, 0.18926991931768677, + 0.27595176819944, 0.8161194869680272, 0.1307237743057954, 0.3682368837401174, 0.10696899485835165, + 0.4381449790035217, 0.8778405603604115, 0.33026410718319044, 0.43155412970009344, 0.688941263482083, + 0.6484615948673739, 0.20103619018128005, 0.08393097766033852, 0.8913511709861782, 0.39172683394484564, + 0.8390068848742098, 0.8242315116115289, 0.016933814177084616, 0.8432770047003173, 0.5329394715412086, + 0.9027093385378767, 0.13464647139460417, 0.8011185435869719, 0.8299531042565435, 0.16599661763813744, + 0.6152441825738123, 0.25151020323522655, 0.447172104562374, 0.5702866695855857, 0.8192062209036824, + 0.23552991431295267, 0.32632467763985884, 0.038987953903722006, 0.11718840620381377, 0.7034737450184155, + 0.546752909248851, 0.7267979136936736, 0.03712586684634822, 0.03808220856446631, 0.4344229625584559, + 0.5073943443532393, 0.0876469591985547, 0.5479537630990768, 0.15022676424664938, 0.7065732472092339, + 0.5267637826119574, 0.49239129236901735, 0.8218857103459851, 0.7979465301023618, 0.029548757715175067, + 0.6487804290051798, 0.17197756477570048, 0.1792393744184949, 0.7262808230390461, 0.5266352244919847, + 0.3774751618211455, 0.14059289117160334, 0.08529920580377037, 0.28344872274215105, 0.9959953800069987, + 0.08464059391988354, 0.6670563867386871, 0.4687389304005585, 0.532305594930434, 0.28410362935440214, + 0.07129098950073931, 0.09858800624814978, 0.4866184190503341, 0.5797421505352837, 0.7275188051657857, + 0.8257101240399874, 0.2805336430088575, 0.5360449509204634, 0.48837931206904084, 0.9312020866617485, + 0.7410346712070657, 0.08381751355840361, 0.622083699685216, 0.5182872441017575, 0.20300436297731594, + 0.8408847205120992, 0.4065634267823589, 0.9992640055987563, 0.5148951028222453, 0.15248694988722633, + 0.6534611001367189, 0.031062641103680733, 0.8694831172503376, 0.24862657671809274, 0.6479564397956218, + 0.504649233907658, 0.07716366084071735, 0.6049908845448526, 0.7137870858666769, 0.7054652650387054, + 0.36350442596954213, 0.8795945111811084, 0.582926532185696, 0.6932694244947931, 0.5595855181231474, + 0.7570733169968579, 0.3926838954245869, 0.06662638382372121, 0.7354175771053119, 0.6689967299755643, + 0.1503851907294197, 0.09334662866451149, 0.6985469006194642, 0.3853073559702226, 0.7598678794046461, + 0.8430367249722724, 0.09609005474058485, 0.3327016836424206, 0.5147633020947202, 0.90130643092616, + 0.1863314734620497, 0.7182836382686819, 0.05869761827209363, 0.43215310837091325, 0.5109770631402548, + 0.8507620188714253, 0.9448034463404849, 0.8204290492967814, 0.30175616425548013, 0.6347738558965675, + 0.7115068243493056, 0.9106453434867423, 0.7081724799882917, 0.9480983751448969, 0.30683290902116644, + 0.538625415898359, 0.24508951206516771, 0.6706828695669036, 0.8841876638401157, 0.14646094171092727, + 0.0873651816168074, 0.3789152991251008, 0.7514417784728014, 0.36660529419052745, 0.9058668780554411, + 0.3905648290701539, 0.46346477300678546, 0.1784939685332635, 0.5505805956291808, 0.57458139202536, + 0.37655520125792563, 0.8714877396827005, 0.06711091117831969, 0.2982765088342778, 0.3329050905695813, + 0.4865987400975561, 0.6201284834454466, 0.46275286462889154, 0.8851839278084489, 0.9999593670233206, + 0.2758236652060908, 0.17890347032894782, 0.048086683036078326, 0.28413000356301654, 0.9536736877854535, + 0.44798896242923036, 0.29983211622633354, 0.9849763413326892, 0.45758533152883063, 0.5410627384028919, + 0.8101421663582148, 0.4745789007815937, 0.9695833469428511, 0.7406298601731718, 0.8928719524735501, + 0.07408106888379695, 0.48443913560921603, 0.9875039549398698, 0.9207282373994886, 0.916412184605058, + 0.5325664548137249, 0.5186829523993904, 0.0011137436559524039, 0.13870348711282365, 0.882637588475232, + 0.8911520456086311, 0.13836682539246214, 0.9507965084267185, 0.5052382254338335, 0.45961247912692105, + 0.979182256053379, 0.4013111553698335, 0.7788395612306059, 0.8062745586439758, 0.19584512860236825, + 0.275650733936018, 0.23472653953495393, 0.5268011060669119, 0.11615458325117867, 0.21289003081542524, + 0.41033428588999377, 0.13922941757955198, 0.7339905698509648, 0.6420365465526778, 0.5935365043786606, + 0.031448611228246826, 0.6446584269225506, 0.3168819180982111, 0.6919296394674304, 0.7611936857915317, + 0.6581989409515427, 0.7119379353798316, 0.19877568188642458, 0.29773337106911635, 0.6353172249870478, + 0.6784308970797828, 0.08779676069610531, 0.42626312256206, 0.22394679796804695, 0.9244223195097627, + 0.15901668161462468, 0.7618146378555393, 0.07687698303830115, 0.1929344802840327, 0.7553281474784295, + 0.8025581206851257, 0.14022257845130615, 0.9138712084521406, 0.8040597327540038, 0.6328239965840534, + 0.6024965111935174, 0.7348700397039633, 0.09290265494838656, 0.23580663015210346, 0.35820294378273676, + 0.7639205360566864, 0.9465492151069357, 0.4252441824179847, 0.5316291043266255, 0.8122536967408113, + 0.5789541420307059, 0.22093739858997807, 0.721478902701215, 0.6885946622229991, 0.17835185715057533, + 0.7238629983060336, 0.20125336494303825, 0.37003145722910347, 0.7429488863605441, 0.7499741271194941, + 0.849429488346799, 0.764998070770062, 0.8627353540448067, 0.2639189465037146, 0.18543192561486244, + 0.8406963006607173, 0.29867265092172757, 0.5830874946322163, 0.7266542276089016, 0.47732859671105865, + 0.5276495401100896, 0.0285892476174624, 0.10154908386840933, 0.7355153876080694, 0.5965273658488094, + 0.9873670681950693, 0.4231813740764955, 0.2367076098094284, 0.7766733432911771, 0.6429580955121145, + 0.8307521364617134, 0.6525257497726437, 0.6857387208240013, 0.6812935881218981, 0.24823806562055328, + 0.23222434233678046, 0.14734449630794566, 0.8593695744180271, 0.9521390092531038, 0.5589881068558278, + 0.048191039611119035, 0.1404201318896956, 0.40850427818021384, 0.7203622066092176, 0.8471988115463831, + 0.8860087660548518, 0.45971281262939667, 0.08887747998382489, 0.9740387490405953, 0.8585000450094692, + 0.36058899298739555, 0.4278199731132287, 0.8375237958882933, 0.01417622872482538, 0.16029654637258495, + 0.16681217791113911, 0.9318773298015346, 0.1515859497360037, 0.21766180447529104, 0.4792250665535124, + 0.24411300991628737, 0.3217581574119305, 0.8390359459774884, 0.9659682387174011, 0.40034412355266336, + 0.5255065433133791, 0.42837064152809445, 0.7654114974398095, 0.3865006028597384, 0.24734575434479678, + 0.41425008509412264, 0.4088412035484068, 0.7434536979800712, 0.9525161199007488, 0.07138799479311309, + 0.8519420240978587, 0.08747097413510685, 0.06259133820158325, 0.6102164785932812, 0.5272535740517759, + 0.3882790321273053, 0.5219871401239666, 0.08342830725989958, 0.06992376312633286, 0.45588347082683367, + 0.8507181024497145, 0.8168001145505971, 0.814592050185302, 0.8530358400139052, 0.7553391685463368, + 0.467322607265681, 0.40821268238278274, 0.15224535394276528, 0.7721886316195505, 0.3188390117882929, + 0.18861616707188456, 0.3183899178336651, 0.14272447128849497, 0.490605601502897, 0.046383765771242036, + 0.16415576287753442, 0.22888703447357206, 0.5375730996026021, 0.057127838376448525, 0.35749934689051033, + 0.2930410231839118, 0.8409144982684501, 0.07593293596409612, 0.356635418237763, 0.226719753247275, + 0.0007109181986780788, 0.7184665789785081, 0.395824938261763, 0.4867276192621923, 0.5172670089066486, + 0.5841508888388441, 0.08047869657287621, 0.9482746906157185, 0.41294535716449154, 0.8361654352683431, + 0.4321112716511004, 0.011850429412228025, 0.4725427310129483, 0.13015593957624672, 0.4613391477382468, + 0.6411806951208191, 0.5748219462376235, 0.7676968075086735, 0.4551543834911985, 0.3981321361133089, + 0.5537117150328572, 0.640188645994515, 0.8733008957299926, 0.7680564850159917, 0.591855214421642, + 0.6288218440807817, 0.5627797837213618, 0.8642380932712407, 0.6296224131150504, 0.810297385035068, + 0.9521660572032933, 0.006989866947720524, 0.15360690769811158, 0.609592365107798, 0.856111276058204, + 0.5569449989810563, 0.6358938242284586, 0.43974619938434945, 0.23369648280955435, 0.17610792852502044, + 0.5393700616427376, 0.535512670941786, 0.5912839261540107, 0.6983894048201051, 0.15323404928308648, + 0.8182668948670956, 0.29446560941893807, 0.418354823269551, 0.21870888711201486, 0.6424499336686369, + 0.3214700322107067, 0.24736247781577525, 0.7305497857337323, 0.568789544871731, 0.6533320204219322, + 0.33898139527730176, 0.16552403626291268, 0.19159501891328978, 0.5578510869508337, 0.29546640911784916, + 0.2869367181145863, 0.7294147056255083, 0.3991485374776169, 0.4445980140617459, 0.6662896463638776, + 0.8614878148940442, 0.1808774427317561, 0.4614411389376568, 0.3303364346947858, 0.8028852020786104, + 0.5169929310162137, 0.14136531759583693, 0.9831280581298246, 0.7758621954461007, 0.9883180605472691, + 0.7032276300145796, 0.31585307578954147, 0.28146832444773495, 0.9471559002035407, 0.44047203430454707, + 0.29854162639920856, 0.07733153530994863, 0.8953385322255972, 0.8718260794429866, 0.8244843036717918, + 0.9473049441286394, 0.13636726569332214, 0.9029170461356006, 0.2711524085545959, 0.14414511952710007, + 0.9499336265300194, 0.6444587977800735, 0.7418623715827386, 0.044802260433499996, 0.850581483363445, + 0.05712457779883895, 0.15282854721354788, 0.3482893221081773, 0.6247260064132429, 0.42099978778043057, + 0.7479237929881165, 0.02099064732615097, 0.013098766701113651, 0.7226056703495463, 0.04820012498442583, + 0.10248361115345117, 0.8892140389602003, 0.9313325327216908, 0.7919070208756257, 0.0708315749326881, + 0.2188073927368014, 0.8127787161494373, 0.6257366952292145, 0.6924998870427207, 0.26793105051769306, + 0.7826490483783863, 0.5966997551066409, 0.2129270109769711, 0.44769099683243785, 0.7574472416898225, + 0.26729283252732317, 0.5630783736198899, 0.8608024828157775, 0.3931840317246529, 0.022722258330870937, + 0.1056061545184902, 0.6060652796018042, 0.3352528944308385, 0.8977154563912682, 0.7221307220260405, + 0.46922215009200297, 0.4677265831501206, 0.49431288865022915, 0.42276247133689504, 0.6788161518073327, + 0.04439101827129144, 0.028811435615006542, 0.008956533117231325, 0.14719117410301985, 0.7352499716127123, + 0.9005647194328514, 0.04239801036045521, 0.32113153439789777, 0.23008784431942686, 0.25059835793851803, + 0.7821196322936473, 0.3571983300993258, 0.2624691465092742, 0.8018016489757368, 0.5248393284810376, + 0.6977900148749919, 0.33093445127674614, 0.7554471103150353, 0.30512165089613275, 0.7316809143395435, + 0.33837084272328344, 0.031181701520131222, 0.8998729244301411, 0.13963009532040604, 0.5524300751373528, + 0.998751142570437, 0.8079412199511319, 0.38764559152084754, 0.3210831483519665, 0.7304185865067885, + 0.5822602684215884, 0.6364590811085312, 0.10627778088019713, 0.5399985372878428, 0.4140922332840643, + 0.32495829469129234, 0.7880364474443821, 0.6521293980091717, 0.23933614126005887, 0.40069381935472004, + 0.3253434499214978, 0.43850426563837475, 0.3582655821803802, 0.02315611544237217, 0.7562158058764148, + 0.7040259198312322, 0.5059192662520398, 0.052574245106938755, 0.0044571360420935235, 0.19994806707117685, + 0.6839680845965146, 0.9986738127631021, 0.07122924252967722, 0.6409050009983894, 0.8555552153820654, + 0.9370992032325905, 0.7376153474252244, 0.05650174272501618, 0.9555164199341558, 0.24096140966133506, + 0.15235115309934777, 0.2227762469886192, 0.7035491425028773, 0.4883693004134255, 0.4991880671239809, + 0.2038723768682421, 0.7756032542183166, 0.006225581322849116, 0.4008868869085592, 0.20812329467775148, + 0.17556686644385078, 0.6096597423735237, 0.6191160871486041, 0.7349303921541335, 0.9192496344735487, + 0.24229302674522868, 0.9899032791595263, 0.29044779138561216, 0.8321620650117503, 0.7598221463622512, + 0.4229989353498531, 0.9387553475507483, 0.3140212188001139, 0.6511943391734281, 0.7376690823137533, + 0.16614528063403933, 0.45753024916507445, 0.6563453150496379, 0.011057777761439902, 0.0018264730571969645, + 0.3140058709073794, 0.8479357203115255, 0.9718814368259422, 0.2938044793936031, 0.389604645965164, + 0.7259980798553759, 0.384597064894432, 0.6461406690633517, 0.17143060225012974, 0.7905787674108862, + 0.700764812184617, 0.9178067635156595, 0.007883547902275412, 0.04345877753973626, 0.40662108491778937, + 0.32503174588972183, 0.05007085167672787, 0.9581958519270192, 0.20437076813207167, 0.2397322863247101, + 0.2432309640469006, 0.945083897778037, 0.715659408560469, 0.24955287567344286, 0.4424646154063001, + 0.42947559846701466, 0.15496718950982546, 0.8613665468468363, 0.6173709310699306, 0.34240192063131025, + 0.17697172987534104, 0.05400248287734222, 0.47400410893135847, 0.5657413124334275, 0.6886788284227651, + 0.693192811671234, 0.5531430026891606, 0.766990407259576, 0.8518985685483231, 0.5389165861980743, + 0.9984378083937185, 0.8151195733132465, 0.7295039076200354, 0.0922729891266788, 0.8668318130738214, + 0.23372381782403595, 0.03956728658695974, 0.9606270879918739, 0.8167629519087327, 0.6627590908369925, + 0.3128149140030476, 0.6317394199057031, 0.12021250922401605, 0.42604564092211705, 0.3977230376064276, + 0.022079790505487362, 0.28149855317178163, 0.03265235541107758, 0.7517419899840844, 0.3364005732928873, + 0.7329527122940036, 0.43759767242690906, 0.3172137189567925, 0.2707815810278883, 0.6642248201697422, + 0.3972911147544086, 0.009127503808928017, 0.1234374452587974, 0.39970292448775213, 0.11989802753965029, + 0.9927165644356675, 0.8609568773974031, 0.28473276918917545, 0.5837263814586879, 0.2741346897790268, + 0.5647425538488864, 0.969909805392653, 0.7261702056098153, 0.5430436390733616, 0.08333922871976862, + 0.5609888136600801, 0.014806506476614079, 0.9455759599914065, 0.3193070470406175, 0.08542679684744281, + 0.6744677936417323, 0.27782578504968625, 0.29358418856919855, 0.9950513588111255, 0.09523381762884475, + 0.7696317754383216, 0.6698100854232568, 0.43625543466202976, 0.2330898194698613, 0.4127476057822641, + 0.9294766285644805, 0.05680528782969185, 0.6537538588698784, 0.17396071383121814, 0.9595723514929978, + 0.42975991044177986, 0.5431447973683532, 0.8833668707212631, 0.26133302914884515, 0.37848317176894164, + 0.46852397650558253, 0.20735781040531553, 0.8524713092661131, 0.44086749842642614, 0.9183713393663233, + 0.9450411621274358, 0.697302770534409, 0.9135605184069232, 0.09015468169209084, 0.704151745426799, + 0.5400455593098272, 0.5480655211682695, 0.30738071301825654, 0.15067120555532854, 0.925682378381145, + 0.8160834394485424, 0.8645970999084115, 0.43923002522405574, 0.3760052855173096, 0.08902326927063253, + 0.09948678314542847, 0.3578210615036086, 0.8485940282988038, 0.2516468752579376, 0.7365499301475807, + 0.5612758649671108, 0.7859804827616864, 0.8851363067086282, 0.8935474690220618, 0.5336384648157917, + 0.23839322063516444, 0.23213589762876796, 0.005758486459847889, 0.36868262474116764, 0.264207113494464, + 0.36157643004033746, 0.9637257888559727, 0.5737186764359195, 0.5315516381845716, 0.5101088363620886, + 0.039248651321625805, 0.9681240024592983, 0.08677429466572284, 0.9518099421221371, 0.7952823450404229, + 0.05713730664487837, 0.17614720504232317, 0.14661930610537355, 0.7433132788379365, 0.3574322869140888, + 0.2341799637707056, 0.8349985525202629, 0.8160746720387089, 0.6407640183325787, 0.6197726260454984, + 0.41237945120152153, 0.1830987925080485, 0.7553599629295343, 0.05454691882123808, 0.5562940920005512, + 0.5102400141204675, 0.9541297229195699, 0.5577971859903864, 0.3371623491173492, 0.9597353973741677, + 0.9125053638178813, 0.5331026395371243, 0.7058339751156828, 0.28253557117321937, 0.7269649285767406, + 0.12353532557783586, 0.4491118104036773, 0.6637148382841026, 0.6351120142249237, 0.864473727452456, + 0.8265584180895602, 0.6156722515329398, 0.8643555219638754, 0.6571916479877694, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_Z, {16, 2, 2, 16}, {4, 1, 16, 16}, DT_FLOAT}; + TransResult result; + + FormatTransferFractalZ transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_4d)); + for (int i = 0; i < sizeof(data_4d) / sizeof(data_4d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_4d[i]); + } +} + +TEST_F(UtestFormatTransferNhwcFz, nhwc_to_fracz_fp32_success_gt_cube) { + float data_4d[8 * 2 * 16 * 16] = { + 0.9818316040500025, + 0.6304740371328553, + 0.3403190259672165, + 0.1545772791147686, + 0.636859736696714, + 0.7286646469411707, + 0.6881973306039132, + 0.11522718733202364, + 0.05221067208366703, + 0.760070751319305, + 0.7630787390114239, + 0.03062661180995363, + 0.6996367230214888, + 0.05874377568360356, + 0.7989654896241913, + 0.48340672056664313, + 0.9824687054977398, + 0.9118200012456436, + 0.5325827377037693, + 0.13215275466376963, + 0.16258783427877654, + 0.8733241462879588, + 0.9811689540387104, + 0.16717227735564522, + 0.17175025957595413, + 0.26751940129597196, + 0.6902723346114396, + 0.9141926645323374, + 0.1602148239484672, + 0.590826374279885, + 0.12879622297601634, + 0.9993413116954417, + 0.108757112003669, + 0.1058705156043852, + 0.29841868434742747, + 0.29680370546430235, + 0.7166628643533716, + 0.7961560236596105, + 0.2998001849925561, + 0.17227883677244848, + 0.273205026266526, + 0.4057715298841855, + 0.9241372689749366, + 0.4818191702106074, + 0.8407488865265121, + 0.37467834871018846, + 0.08393661785563533, + 0.9287360118914758, + 0.9323262118603665, + 0.0959850628867761, + 0.07841190876627901, + 0.7575281447707486, + 0.6451504082047301, + 0.2867531294639032, + 0.6038261174674847, + 0.5722864102604774, + 0.5980542102140242, + 0.29143691325023924, + 0.3323126425519707, + 0.6213156440241487, + 0.2783744506447783, + 0.8192692866948531, + 0.9580914367293034, + 0.8180085498115592, + 0.46737984143039313, + 0.21761303972473267, + 0.010092223694828983, + 0.133924872196312, + 0.6457939399463842, + 0.7108065587126572, + 0.6213477205246348, + 0.03780712693181687, + 0.052964796784310986, + 0.7720495422151494, + 0.07597908210473048, + 0.3880721492592797, + 0.8673754495725557, + 0.2159619821305203, + 0.9349796659144918, + 0.19280128505408822, + 0.19225222099905237, + 0.6381200674681307, + 0.09517206563329228, + 0.7401210217185733, + 0.9213966447997659, + 0.8282252903940375, + 0.25575682793740195, + 0.09419161503451245, + 0.7133067524485386, + 0.30622040856962174, + 0.04567030385976745, + 0.4421966327959601, + 0.9470399466953359, + 0.1863959618388854, + 0.603892794005211, + 0.7305230388181004, + 0.28364151338783206, + 0.7685411345675934, + 0.8988615642504377, + 0.7248529774876415, + 0.8955713245004737, + 0.9694464156380791, + 0.13627973385112224, + 0.0262054823780018, + 0.9598488199046755, + 0.6637745866350796, + 0.567021314483643, + 0.8028105889988362, + 0.5449983501324986, + 0.7405568508319487, + 0.7479203765092981, + 0.6091595342627724, + 0.3571584723082011, + 0.6634376909205661, + 0.7823687380994333, + 0.46769811417519425, + 0.43313252298354554, + 0.4512165332058773, + 0.7409639493996218, + 0.773542103389995, + 0.03251757301743474, + 0.2918512326156285, + 0.2707389025126231, + 0.48862277656898045, + 0.0677707878489644, + 0.5319713656658794, + 0.19055872328014922, + 0.42372710919648093, + 0.5410233673552419, + 0.7053385617098781, + 0.5796221225781045, + 0.6774744169203547, + 0.38184777780992407, + 0.5921849312009162, + 0.4139673487900193, + 0.6324891962264935, + 0.23998117926531592, + 0.3006459023062501, + 0.7543836662968866, + 0.21903128794922366, + 0.8847078454339562, + 0.43658173987503657, + 0.13725114471518118, + 0.5082197711407339, + 0.18832064432940687, + 0.08574080793259364, + 0.30059358691237237, + 0.6482106495699304, + 0.8511603041679399, + 0.2701156230003453, + 0.21298871203478398, + 0.08203197080945912, + 0.6725944240256472, + 0.3066569123534626, + 0.2662259874220829, + 0.16779728161704843, + 0.7158644073214633, + 0.6878815896858301, + 0.9934704148141994, + 0.40929994195329833, + 0.9879146301551541, + 0.8134508106867051, + 0.4772264929878759, + 0.5239150673556234, + 0.04973375923455958, + 0.9408063913333713, + 0.8933576927435202, + 0.7690497885609424, + 0.3432688849226637, + 0.09841971497506807, + 0.6489987050683524, + 0.4612619245600613, + 0.9668831791357312, + 0.6773541509970112, + 0.8113556563575658, + 0.5103191595379972, + 0.692501163915668, + 0.872476678417899, + 0.39847669533309527, + 0.2550816582357872, + 0.44785761524405, + 0.6631992982892411, + 0.07909667197674031, + 0.15595306847956636, + 0.7549753608310522, + 0.7497451144627425, + 0.961356053996849, + 0.047012242220749845, + 0.39968661191290844, + 0.7900745768809934, + 0.18988750301939106, + 0.34309148527453104, + 0.8666802227613912, + 0.33819954591027035, + 0.6754386381771887, + 0.012056309491297434, + 0.6529806784933322, + 0.9651442384174174, + 0.9978184657413758, + 0.6258752595314446, + 0.4974480025947464, + 0.28768692957840036, + 0.5851676600752466, + 0.38541216263965494, + 0.5412711123583736, + 0.7940464609305427, + 0.008865031997954298, + 0.05848479024922548, + 0.9396557362265029, + 0.4326393542047332, + 0.2488609657348656, + 0.7562763464489093, + 0.9983141149044218, + 0.008402913032401704, + 0.19571855359179036, + 0.690386550914582, + 0.7536429455538223, + 0.9378744182203695, + 0.7586425829635426, + 0.1445178013804469, + 0.9109541843179556, + 0.07821616551324684, + 0.05078095176454778, + 0.9130796647001046, + 0.20480686637597612, + 0.8200671332173322, + 0.4353078372661333, + 0.9539464470659443, + 0.8303326350536121, + 0.6278771584995406, + 0.2509169090532466, + 0.1774990588847868, + 0.4984180010157796, + 0.37490941253308996, + 0.747904697670194, + 0.4689694228884409, + 0.07865978524122308, + 0.018278256806917637, + 0.7339557318674227, + 0.9194551313394231, + 0.48861250541718937, + 0.01814006325736084, + 0.7541958032692393, + 0.8774612716989119, + 0.3760072307293194, + 0.2342391882453575, + 0.5507128966242177, + 0.8814107840171872, + 0.7629915021375545, + 0.022596785422104193, + 0.4676586074696423, + 0.6323496539184577, + 0.10709853732458496, + 0.24458540292747133, + 0.7432044950079842, + 0.991407564153164, + 0.6266082092973347, + 0.04229595441634182, + 0.8184353596321909, + 0.10225103323158635, + 0.11394336686699535, + 0.4022127256961855, + 0.30838155456109306, + 0.3195787933473151, + 0.11242740506794102, + 0.06655005190369112, + 0.5103072329299937, + 0.22668773651030127, + 0.32922441298388727, + 0.8140348723745937, + 0.5638190653380799, + 0.26541973372425653, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6639543926405075, + 0.590430754187306, + 0.13166255110692238, + 0.46762259840886866, + 0.8254471928133637, + 0.4192809120720761, + 0.9109983519609524, + 0.06282341072241782, + 0.11304156042728886, + 0.5823811143881991, + 0.8401405236826993, + 0.39537016399274805, + 0.3393604217038657, + 0.981346463187018, + 0.8145676445740513, + 0.9617260330265892, + 0.8974954048898902, + 0.9194970967140944, + 0.9244598046892946, + 0.5278043489398178, + 0.6099197828719526, + 0.6616471969341827, + 0.6838181481733964, + 0.3321388894085553, + 0.32772025405797767, + 0.8045236337225692, + 0.543424952115358, + 0.31439754345644666, + 0.20390144864709325, + 0.7287843878803184, + 0.9530947743998013, + 0.9986403638627129, + 0.3975263188390229, + 0.5781879687545874, + 0.8724409861023287, + 0.8566257438266888, + 0.14606536893076372, + 0.12865091525620642, + 0.8838077200484497, + 0.7193688017246426, + 0.5554742650350404, + 0.1590835864032687, + 0.14658353856064854, + 0.8378474528967791, + 0.48599324242561204, + 0.4618113619505574, + 0.42866889296674937, + 0.5822891945174828, + 0.9030309982011011, + 0.41714977847695556, + 0.46228541811630763, + 0.2749445339474653, + 0.25981719721224195, + 0.8216787813868978, + 0.4538230925345249, + 0.7282865762095903, + 0.8788563382405642, + 0.8826153372351091, + 0.008215547008057156, + 0.7021704582311347, + 0.142877593970687, + 0.03730299658952074, + 0.5109381307125636, + 0.496533373979947, + 0.6221167728428014, + 0.594797090684722, + 0.9484422878814833, + 0.5779176961086859, + 0.05054664123568153, + 0.6904408145042924, + 0.9456781975122573, + 0.07234907191568096, + 0.31454539272595317, + 0.039782261918874906, + 0.9587127521414383, + 0.8805585025554179, + 0.9859544914622989, + 0.6275317338277425, + 0.14352580582038044, + 0.7695326504943156, + 0.41043924117549035, + 0.3199344511919423, + 0.17009843234582034, + 0.19333893045555228, + 0.4065598982414558, + 0.4783366067549203, + 0.2925065077781679, + 0.46612867625915266, + 0.2260401750143446, + 0.9769887894299829, + 0.626959110785719, + 0.9005591753667794, + 0.22730676915421288, + 0.43752553937624405, + 0.20832599816771735, + 0.0648434614140343, + 0.5533790070863268, + 0.08314085235301405, + 0.8425708019179065, + 0.15917383752943826, + 0.6527601974253487, + 0.30126252545266263, + 0.6463253615283967, + 0.4485504420576313, + 0.6669832516919934, + 0.6390350894633332, + 0.5841276848813355, + 0.3633156947810412, + 0.20053851382498278, + 0.2991176134182495, + 0.2237519088309342, + 0.6924468706663672, + 0.6267863686588406, + 0.6957686899861832, + 0.8481678412966801, + 0.6679132796626048, + 0.2520678664501832, + 0.6859580348999734, + 0.41653832488428466, + 0.3443130260330426, + 0.9265385049178334, + 0.4632590871458663, + 0.2792541905414425, + 0.297175390928865, + 0.9649706249008346, + 0.8868549044180415, + 0.27859114362929416, + 0.7113192182809037, + 0.9797397982051781, + 0.5567150341480799, + 0.7202351101992689, + 0.16413702022000565, + 0.336467018972884, + 0.0377230903593081, + 0.8602687151091007, + 0.11839554304283328, + 0.14208471520735977, + 0.43607071414863874, + 0.8741600257317956, + 0.6640170355939985, + 0.0909679192917131, + 0.9106544964694222, + 0.804698027266483, + 0.35002717921135407, + 0.151688664815809, + 0.7393767419885556, + 0.48217967745270196, + 0.9602943716631533, + 0.30319852768975375, + 0.1330627046343439, + 0.6710020756991474, + 0.27078039927991027, + 0.11186194386988224, + 0.5457171365684865, + 0.7880448237433672, + 0.013805055471389882, + 0.6180892645154643, + 0.48759905827516603, + 0.8233479375602223, + 0.4264784017101182, + 0.9633894232982487, + 0.7448212055191065, + 0.5452953261409613, + 0.4856970915644032, + 0.8693281473365151, + 0.6373330823065603, + 0.5771587194750025, + 0.8691299641815123, + 0.0815851410708418, + 0.6806300549508425, + 0.5075760571421807, + 0.601124222598236, + 0.07050306902810866, + 0.36920524642079033, + 0.18818005490550915, + 0.5583892243115647, + 0.5927420217262831, + 0.7580186574311277, + 0.5033605618537017, + 0.9425017651769168, + 0.48862010484707064, + 0.9853716648830263, + 0.39984394946359525, + 0.5558506900537014, + 0.5908610536703583, + 0.22602671513614803, + 0.8798987778429802, + 0.9531473683222749, + 0.7410953157434534, + 0.30111561488120153, + 0.4403622314059401, + 0.052456464089102095, + 0.01209641118408944, + 0.46704246287932405, + 0.4750820624217653, + 0.10164673751729414, + 0.9796758207356858, + 0.8610487700070613, + 0.6211171296662068, + 0.8731238761599872, + 0.37578834957139673, + 0.1120201496751766, + 0.011314071997954644, + 0.2572189570511383, + 0.879811392407261, + 0.10362272859591204, + 0.01645233098389376, + 0.9500294012864102, + 0.9690251328314643, + 0.42575080842172963, + 0.7001480708771485, + 0.4041297773374436, + 0.4185210345469337, + 0.7190995465953123, + 0.7640566649798914, + 0.23242098306402514, + 0.3542816499880481, + 0.5507254523321877, + 0.3029570242047027, + 0.3038459304238752, + 0.9598648212640765, + 0.7204511737388988, + 0.8790782739473424, + 0.7794923303105117, + 0.8815381229146946, + 0.03744741097255122, + 0.9047169962685846, + 0.4610613790126912, + 0.2475915213540839, + 0.4298710416601963, + 0.8221398481995089, + 0.23343189008359377, + 0.5350763782441681, + 0.14739879873747308, + 0.33280129854717244, + 0.23821919516506462, + 0.7167817307593071, + 0.8773007050805388, + 0.48070282049083113, + 0.08899633176897048, + 0.14443919067113453, + 0.9763728131494807, + 0.13169707338891068, + 0.2574020118799679, + 0.9117198409079358, + 0.4813044045568353, + 0.7148710776112692, + 0.10816472148122425, + 0.4872820160105873, + 0.7843715998368327, + 0.9792048963235088, + 0.5032302898011388, + 0.9133488145295496, + 0.8343572569411505, + 0.45894548309522043, + 0.8041104427062074, + 0.5871631865450887, + 0.056334307939404415, + 0.7231806850118402, + 0.6744439230965042, + 0.8458065219298543, + 0.009223176587899173, + 0.6934220235290146, + 0.7552659426337391, + 0.40417163368066655, + 0.18495513003494457, + 0.716067507969818, + 0.14155481130771042, + 0.029361076807311592, + 0.8830993452371644, + 0.6613103229791982, + 0.9398338818781078, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3578659411897861, + 0.7301301934786895, + 0.8618464466584491, + 0.18816515849292015, + 0.9107401874129222, + 0.03292249492058863, + 0.8173098249178635, + 0.2326748580551784, + 0.8712189622758891, + 0.052152358807047494, + 0.5500602809399827, + 0.29888382736441554, + 0.7171636160760438, + 0.61107113112707, + 0.8742559685716594, + 0.27079826979584654, + 0.034930190616391354, + 0.7646888920837598, + 0.33294464574985105, + 0.18964393968657645, + 0.8359357998538534, + 0.603777432373904, + 0.8080737216440349, + 0.43253091727556037, + 0.29704589713859186, + 0.35101583873151454, + 0.27797579020562, + 0.4409871040404285, + 0.03183653010616194, + 0.6888718475212194, + 0.743930296258262, + 0.1383195679389676, + 0.7653124697875597, + 0.6197416698501532, + 0.03370111396538, + 0.277730833007543, + 0.47173387429680513, + 0.004031925171442463, + 0.4216810529719548, + 0.1527542910995907, + 0.755050985872739, + 0.5103916874099794, + 0.24663350472458012, + 0.9944239408861326, + 0.8532667918305049, + 0.571258224550583, + 0.8203424472901112, + 0.7752687501935466, + 0.3379267797413644, + 0.04363591901403252, + 0.021159698090064682, + 0.9844201045382279, + 0.9320998367516875, + 0.7508176117887979, + 0.6279835385302436, + 0.8296132817079003, + 0.7742499180798184, + 0.22327265194322599, + 0.05523148177019932, + 0.015431424053453768, + 0.9810406410163607, + 0.6360185797616702, + 0.318578721332871, + 0.8545232136203632, + 0.23240215777624518, + 0.5821386011719336, + 0.40705038971314067, + 0.9726331053853133, + 0.5877629214351668, + 0.737951863797599, + 0.13209112961192793, + 0.32596609082453, + 0.9051706320834314, + 0.006592902179112681, + 0.44539065743524575, + 0.21040963856054318, + 0.02648671883280662, + 0.8307275617617943, + 0.4998882468907887, + 0.553040303946554, + 0.5297816530911937, + 0.5843239411260238, + 0.1571513016873196, + 0.4588380934037194, + 0.8580445067796844, + 0.08760914807929077, + 0.3167980809508252, + 0.5688868537877791, + 0.7432713240307058, + 0.47849574835183317, + 0.08569476671747134, + 0.0028373026780509347, + 0.36146132556611577, + 0.11732007318482318, + 0.8826440135398512, + 0.8057998064675778, + 0.04722817815152447, + 0.5163195461666986, + 0.8923939769755282, + 0.05625559077233866, + 0.061665786518363186, + 0.662644780876765, + 0.6339151490480687, + 0.23437975062774363, + 0.12890375862760606, + 0.3501592943353464, + 0.09947716801883921, + 0.9783908021362502, + 0.5039620019339041, + 0.7894302782128263, + 0.40318009245339925, + 0.010992512436047153, + 0.7885896931523694, + 0.09972075576249273, + 0.327050109765748, + 0.040566933728248045, + 0.5192171211123873, + 0.5010930644256814, + 0.1751335903258584, + 0.973049431223127, + 0.04605996955300973, + 0.5346739560986009, + 0.8356986203333525, + 0.8197705199993206, + 0.00016846934847547512, + 0.9767516366117579, + 0.7373887650278352, + 0.25343735960629343, + 0.32665192959159395, + 0.9515843270568628, + 0.7912931593367771, + 0.3390543201499241, + 0.39116094675420365, + 0.48473912485681336, + 0.7748044486400519, + 0.9504668800747522, + 0.17379382917448194, + 0.9058195197373183, + 0.2269566175662039, + 0.038537709312595037, + 0.26170965377152067, + 0.39139592915908183, + 0.21106987419176348, + 0.2688423270567659, + 0.0588646031637704, + 0.8142663494405596, + 0.7230712928151145, + 0.10388770611234022, + 0.6320570126609345, + 0.3854592292304997, + 0.8401593200228608, + 0.19857080876585775, + 0.4632224663542698, + 0.8338896779165693, + 0.07498742940886116, + 0.019157468796003774, + 0.1973426137959765, + 0.9481227475490336, + 0.18022422840604635, + 0.8193635052157996, + 0.6971962981473214, + 0.03280146452993471, + 0.6100845348121183, + 0.2564881425807104, + 0.40952730901052836, + 0.11198840547195466, + 0.6384039317824369, + 0.40293878780785075, + 0.022692351439830727, + 0.8875572687105943, + 0.06480422446351664, + 0.10051348811549643, + 0.1980882930823188, + 0.9179898183709072, + 0.6861308896442132, + 0.6956857450336589, + 0.31107695018579007, + 0.7677549648467343, + 0.6201418408382342, + 0.804275349391573, + 0.2870986397529448, + 0.1397119476044384, + 0.8556387210623139, + 0.6062812206466079, + 0.6575142947882678, + 0.45585821923189085, + 0.4099371374021249, + 0.7082884101648349, + 0.8291749729575852, + 0.9185967769978759, + 0.04060621570729972, + 0.4474316536950259, + 0.8363505443285942, + 0.8575757647091935, + 0.039799459941253335, + 0.36286406449207986, + 0.5131647448366778, + 0.39819043236603746, + 0.7618607865355099, + 0.22711968862867105, + 0.9394712456140883, + 0.027435600732236387, + 0.7758712942733171, + 0.2025711015279088, + 0.9991247917543385, + 0.9938131541840006, + 0.186905251806328, + 0.6306921520671528, + 0.9934696708319142, + 0.4155138570203807, + 0.33151958596483977, + 0.7351239628799405, + 0.20830903644833842, + 0.7740550628440345, + 0.4191234862718004, + 0.23674572501250302, + 0.059571405085674156, + 0.9129267438467386, + 0.06451592284712904, + 0.6857709392141015, + 0.8380269534549667, + 0.39885910326895746, + 0.2834031729279012, + 0.2382256471610743, + 0.9638471775191081, + 0.7215840179080331, + 0.36423940072103955, + 0.17430531770484514, + 0.9415074521796919, + 0.2738258731873858, + 0.9306468185245629, + 0.6656678169751403, + 0.1155683928384511, + 0.8747709248985905, + 0.12635835804712037, + 0.5759875379497646, + 0.5731929978928748, + 0.03741903689544501, + 0.19725909198310998, + 0.7735858565736194, + 0.4790973691825182, + 0.0981833133220551, + 0.8764985001957379, + 0.5258092509418005, + 0.8342042669276518, + 0.8790350110712204, + 0.7094946960609122, + 0.7708450113645778, + 0.4180654412461543, + 0.4242979420279134, + 0.24139470932602636, + 0.2209321570893309, + 0.7846941996259452, + 0.12945719412306944, + 0.7602339455424194, + 0.10743040252161462, + 0.46685068003629737, + 0.046369043986771774, + 0.23308706601773554, + 0.4844512179420487, + 0.9643822582174879, + 0.6006453251508257, + 0.6936888180218606, + 0.3013953093096562, + 0.7354859248567551, + 0.8349944098141118, + 0.4048627428035879, + 0.44505698895658985, + 0.7751604897094069, + 0.8744909179894252, + 0.947828891904564, + 0.8640222948104718, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6314201140353499, + 0.11044529796167624, + 0.5234602021756846, + 0.8383531146832072, + 0.35499006367650765, + 0.9442108337417638, + 0.8956398237978505, + 0.1882592063763192, + 0.37890051336945096, + 0.2616367877576542, + 0.42834342236173995, + 0.963048643409656, + 0.9217911303998747, + 0.3863817294878098, + 0.0974751151813743, + 0.8599500573727576, + 0.057089287186821935, + 0.5533755728797164, + 0.6005915913361521, + 0.1620083752339584, + 0.6061041103116162, + 0.3995882134501906, + 0.7085082598407784, + 0.08407809216757356, + 0.9084207349081772, + 0.7272457718657432, + 0.32992683980137916, + 0.4946824285274082, + 0.615073271111235, + 0.4193961465372614, + 0.30553456868372697, + 0.05060126762779704, + 0.291593602521547, + 0.92010890659423, + 0.983625492452828, + 0.48639081135981677, + 0.4765739226583956, + 0.10661041022380258, + 0.5214502864845213, + 0.8599870683300007, + 0.5179732137433295, + 0.8809917505983484, + 0.722106976281213, + 0.48823452875607354, + 0.3503839647122472, + 0.9865830045574908, + 0.5426630898906415, + 0.5217709034718941, + 0.10414112024944422, + 0.7758532076224506, + 0.3073683279926598, + 0.8740808571085373, + 0.31398731169122107, + 0.9731775740247134, + 0.006681240523999987, + 0.15988007000514737, + 0.12796151451926918, + 0.34990462264973865, + 0.0656152325665823, + 0.573229266270487, + 0.8187327005316167, + 0.7370367403897583, + 0.5653206456450454, + 0.6242381891062653, + 0.6005553470527086, + 0.5780289963610858, + 0.4615664520082169, + 0.33336403885310373, + 0.3607367809090497, + 0.41916265564486266, + 0.01606174796202109, + 0.14162979911924312, + 0.4134088930503992, + 0.8487134257579578, + 0.5428387502949792, + 0.7715775547811061, + 0.3987807271022865, + 0.9327040075466025, + 0.5991429329697456, + 0.42591643248294897, + 0.9341257420123151, + 0.00901387215143401, + 0.31965995666124947, + 0.30354580893146743, + 0.29702838396393805, + 0.8783416396086332, + 0.661370050417091, + 0.5972415473147473, + 0.7785999094421769, + 0.22056824973746136, + 0.5673911270103246, + 0.43802545724601716, + 0.7305663462744951, + 0.16885785839419054, + 0.5057125448994015, + 0.6601524988598084, + 0.41780992611018475, + 0.3827436867828057, + 0.4994412662997859, + 0.6760343095970682, + 0.804379353505974, + 0.6968983017423745, + 0.7087690507974629, + 0.9060334223047234, + 0.7408399172400371, + 0.4399334354759946, + 0.0505122443751177, + 0.6419455093215115, + 0.6284706946091785, + 0.03490785589787804, + 0.23143346520326058, + 0.918425982705948, + 0.13419172631284015, + 0.19703920828417498, + 0.8385184444257783, + 0.5863278446649178, + 0.8162272321723847, + 0.9352789460968992, + 0.8647561254544748, + 0.15975188650299932, + 0.17836973320343386, + 0.6864848216734166, + 0.36415105850464413, + 0.3426658491754645, + 0.08329749299302491, + 0.42193590764837385, + 0.6642021794485784, + 0.6636584135623489, + 0.6053567411529699, + 0.6988705097367177, + 0.3729424883568403, + 0.04411243675556986, + 0.534604760793021, + 0.6085219738551051, + 0.12447165936004256, + 0.05883726779432985, + 0.24847343117316678, + 0.5063340116556715, + 0.3976127704496062, + 0.08152081595341609, + 0.3506522585285301, + 0.1683344260481151, + 0.48691479882706157, + 0.8353082899370665, + 0.14637104867208328, + 0.2129098116028093, + 0.5796866670289799, + 0.39100590803988866, + 0.32139056156796886, + 0.15058673720696558, + 0.010737680262784766, + 0.004595895220052548, + 0.5243334524071466, + 0.3062990700506023, + 0.8989948145015754, + 0.3995636846032241, + 0.4560874030151093, + 0.948023137115712, + 0.540281310007275, + 0.26457559429892186, + 0.6515191420301568, + 0.9554532124850424, + 0.20822584693009594, + 0.21886259526121254, + 0.8463453916651477, + 0.7415404364419516, + 0.5419971984385825, + 0.5497563263808113, + 0.980394542822379, + 0.15307532804101953, + 0.572368260863743, + 0.48863254508712584, + 0.6979314145830208, + 0.43581241800260095, + 0.6989017345810631, + 0.8187807739671614, + 0.2575823090229311, + 0.8240801799547735, + 0.1448669779417252, + 0.8116170735906293, + 0.9082959459536531, + 0.8862781397082458, + 0.7324826297013995, + 0.6259636012578446, + 0.028853519416508266, + 0.6228437907861963, + 0.02409626691061728, + 0.9203609494394416, + 0.8461273177832925, + 0.2736512366046855, + 0.04020369061681861, + 0.902228304152258, + 0.550880345014923, + 0.07148117151736633, + 0.41054299237597647, + 0.10848671426422485, + 0.9728118525566118, + 0.20990798653999276, + 0.9308342861844455, + 0.4978127378528848, + 0.7351482579424665, + 0.5623836482085756, + 0.143782791890815, + 0.5898839079534395, + 0.2546773800832157, + 0.666653682817035, + 0.7467649896168816, + 0.7339634345720852, + 0.707682901066885, + 0.9101318378861685, + 0.5123191129010624, + 0.9439951816262205, + 0.5463959087566002, + 0.05760611428924023, + 0.676503376165408, + 0.44429048072068333, + 0.5230209296642828, + 0.3224367091169168, + 0.6913680319012937, + 0.17110739904678152, + 0.16779286710665142, + 0.6335681699796741, + 0.07931757368130377, + 0.46888858792493004, + 0.18197527553087145, + 0.002638591858257655, + 0.691716324882675, + 0.044337163682884206, + 0.6293554666845848, + 0.40764392827851403, + 0.2538655700672533, + 0.8281469647313634, + 0.023926045054366463, + 0.8167534085038402, + 0.43320133354594026, + 0.24862060630736682, + 0.09125749468359245, + 0.8118122437010404, + 0.9973271765376198, + 0.9865525895778507, + 0.47291321372136974, + 0.8635521722706079, + 0.4701334745256115, + 0.1506952535704139, + 0.9018427816330945, + 0.6748520059749242, + 0.6573750823451866, + 0.8618860620086874, + 0.08827502135749443, + 0.6729164433989506, + 0.21618869286155384, + 0.4287518670302243, + 0.2407327438728406, + 0.6338004043821022, + 0.8515817109182252, + 0.9659962777150152, + 0.6549008736011241, + 0.8479504542038493, + 0.006679071347445054, + 0.2724677376754999, + 0.9915443051333377, + 0.21071715217089027, + 0.8684246639753141, + 0.7856846182004006, + 0.6859853232823806, + 0.4789831650688282, + 0.8185051822940307, + 0.5789517433188925, + 0.6919294885862914, + 0.921789377332735, + 0.41376092831273026, + 0.29579140546489957, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06536693360779877, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8189538792252146, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8485698722432129, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.42689303422429525, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7474879973990272, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9950215619461602, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41411358428231926, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.21626088206538852, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.22487940449338928, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4682545943541082, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9510104387444676, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18639355175119265, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8815486106206981, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9534802944405797, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5689890773146097, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3329371174770699, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7461012639404849, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7556533452821178, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6957123659099351, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.08242896820190027, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.23616744795045597, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.13692726551997658, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16712455310677277, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8035400908448518, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1837892082294046, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.05376976135766964, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3992656098634447, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9176895603923744, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.19440695649597994, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8475060356712824, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.18787115939366794, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9920266776266308, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8961208287376816, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.41160144347284433, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.17742916588766944, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07851007465436122, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.06660419203737089, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.07000294703215704, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9664967803907726, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.16668958705182269, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.060520301199066595, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7159738475986986, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7139909208243974, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5477377714401691, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8949482680224852, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6066615918823247, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3799019937820075, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.006052425806343575, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.7784261014648269, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8334328323308305, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.3595682016631978, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9593119247449344, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.11585279449665897, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.5430944490786588, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4432788242430207, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.44626736830361025, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8072981979896457, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.9707561358089939, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.694866090191261, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.2577772577688977, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.6567808670786782, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.10632659645304032, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.1875989205574723, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.40947541132428245, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.05797277783754817, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.8163056554998648, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.4315493010082393, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.534265983707278, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + 0.0, + }; + float data[17 * 2 * 2 * 17] = { + 0.9818316040500025, 0.6304740371328553, 0.3403190259672165, 0.1545772791147686, 0.636859736696714, + 0.7286646469411707, 0.6881973306039132, 0.11522718733202364, 0.05221067208366703, 0.760070751319305, + 0.7630787390114239, 0.03062661180995363, 0.6996367230214888, 0.05874377568360356, 0.7989654896241913, + 0.48340672056664313, 0.06536693360779877, 0.6639543926405075, 0.590430754187306, 0.13166255110692238, + 0.46762259840886866, 0.8254471928133637, 0.4192809120720761, 0.9109983519609524, 0.06282341072241782, + 0.11304156042728886, 0.5823811143881991, 0.8401405236826993, 0.39537016399274805, 0.3393604217038657, + 0.981346463187018, 0.8145676445740513, 0.9617260330265892, 0.7556533452821178, 0.3578659411897861, + 0.7301301934786895, 0.8618464466584491, 0.18816515849292015, 0.9107401874129222, 0.03292249492058863, + 0.8173098249178635, 0.2326748580551784, 0.8712189622758891, 0.052152358807047494, 0.5500602809399827, + 0.29888382736441554, 0.7171636160760438, 0.61107113112707, 0.8742559685716594, 0.27079826979584654, + 0.17742916588766944, 0.6314201140353499, 0.11044529796167624, 0.5234602021756846, 0.8383531146832072, + 0.35499006367650765, 0.9442108337417638, 0.8956398237978505, 0.1882592063763192, 0.37890051336945096, + 0.2616367877576542, 0.42834342236173995, 0.963048643409656, 0.9217911303998747, 0.3863817294878098, + 0.0974751151813743, 0.8599500573727576, 0.9593119247449344, 0.9824687054977398, 0.9118200012456436, + 0.5325827377037693, 0.13215275466376963, 0.16258783427877654, 0.8733241462879588, 0.9811689540387104, + 0.16717227735564522, 0.17175025957595413, 0.26751940129597196, 0.6902723346114396, 0.9141926645323374, + 0.1602148239484672, 0.590826374279885, 0.12879622297601634, 0.9993413116954417, 0.8189538792252146, + 0.8974954048898902, 0.9194970967140944, 0.9244598046892946, 0.5278043489398178, 0.6099197828719526, + 0.6616471969341827, 0.6838181481733964, 0.3321388894085553, 0.32772025405797767, 0.8045236337225692, + 0.543424952115358, 0.31439754345644666, 0.20390144864709325, 0.7287843878803184, 0.9530947743998013, + 0.9986403638627129, 0.6957123659099351, 0.034930190616391354, 0.7646888920837598, 0.33294464574985105, + 0.18964393968657645, 0.8359357998538534, 0.603777432373904, 0.8080737216440349, 0.43253091727556037, + 0.29704589713859186, 0.35101583873151454, 0.27797579020562, 0.4409871040404285, 0.03183653010616194, + 0.6888718475212194, 0.743930296258262, 0.1383195679389676, 0.07851007465436122, 0.057089287186821935, + 0.5533755728797164, 0.6005915913361521, 0.1620083752339584, 0.6061041103116162, 0.3995882134501906, + 0.7085082598407784, 0.08407809216757356, 0.9084207349081772, 0.7272457718657432, 0.32992683980137916, + 0.4946824285274082, 0.615073271111235, 0.4193961465372614, 0.30553456868372697, 0.05060126762779704, + 0.11585279449665897, 0.108757112003669, 0.1058705156043852, 0.29841868434742747, 0.29680370546430235, + 0.7166628643533716, 0.7961560236596105, 0.2998001849925561, 0.17227883677244848, 0.273205026266526, + 0.4057715298841855, 0.9241372689749366, 0.4818191702106074, 0.8407488865265121, 0.37467834871018846, + 0.08393661785563533, 0.9287360118914758, 0.8485698722432129, 0.3975263188390229, 0.5781879687545874, + 0.8724409861023287, 0.8566257438266888, 0.14606536893076372, 0.12865091525620642, 0.8838077200484497, + 0.7193688017246426, 0.5554742650350404, 0.1590835864032687, 0.14658353856064854, 0.8378474528967791, + 0.48599324242561204, 0.4618113619505574, 0.42866889296674937, 0.5822891945174828, 0.08242896820190027, + 0.7653124697875597, 0.6197416698501532, 0.03370111396538, 0.277730833007543, 0.47173387429680513, + 0.004031925171442463, 0.4216810529719548, 0.1527542910995907, 0.755050985872739, 0.5103916874099794, + 0.24663350472458012, 0.9944239408861326, 0.8532667918305049, 0.571258224550583, 0.8203424472901112, + 0.7752687501935466, 0.06660419203737089, 0.291593602521547, 0.92010890659423, 0.983625492452828, + 0.48639081135981677, 0.4765739226583956, 0.10661041022380258, 0.5214502864845213, 0.8599870683300007, + 0.5179732137433295, 0.8809917505983484, 0.722106976281213, 0.48823452875607354, 0.3503839647122472, + 0.9865830045574908, 0.5426630898906415, 0.5217709034718941, 0.5430944490786588, 0.9323262118603665, + 0.0959850628867761, 0.07841190876627901, 0.7575281447707486, 0.6451504082047301, 0.2867531294639032, + 0.6038261174674847, 0.5722864102604774, 0.5980542102140242, 0.29143691325023924, 0.3323126425519707, + 0.6213156440241487, 0.2783744506447783, 0.8192692866948531, 0.9580914367293034, 0.8180085498115592, + 0.42689303422429525, 0.9030309982011011, 0.41714977847695556, 0.46228541811630763, 0.2749445339474653, + 0.25981719721224195, 0.8216787813868978, 0.4538230925345249, 0.7282865762095903, 0.8788563382405642, + 0.8826153372351091, 0.008215547008057156, 0.7021704582311347, 0.142877593970687, 0.03730299658952074, + 0.5109381307125636, 0.496533373979947, 0.23616744795045597, 0.3379267797413644, 0.04363591901403252, + 0.021159698090064682, 0.9844201045382279, 0.9320998367516875, 0.7508176117887979, 0.6279835385302436, + 0.8296132817079003, 0.7742499180798184, 0.22327265194322599, 0.05523148177019932, 0.015431424053453768, + 0.9810406410163607, 0.6360185797616702, 0.318578721332871, 0.8545232136203632, 0.07000294703215704, + 0.10414112024944422, 0.7758532076224506, 0.3073683279926598, 0.8740808571085373, 0.31398731169122107, + 0.9731775740247134, 0.006681240523999987, 0.15988007000514737, 0.12796151451926918, 0.34990462264973865, + 0.0656152325665823, 0.573229266270487, 0.8187327005316167, 0.7370367403897583, 0.5653206456450454, + 0.6242381891062653, 0.4432788242430207, 0.46737984143039313, 0.21761303972473267, 0.010092223694828983, + 0.133924872196312, 0.6457939399463842, 0.7108065587126572, 0.6213477205246348, 0.03780712693181687, + 0.052964796784310986, 0.7720495422151494, 0.07597908210473048, 0.3880721492592797, 0.8673754495725557, + 0.2159619821305203, 0.9349796659144918, 0.19280128505408822, 0.7474879973990272, 0.6221167728428014, + 0.594797090684722, 0.9484422878814833, 0.5779176961086859, 0.05054664123568153, 0.6904408145042924, + 0.9456781975122573, 0.07234907191568096, 0.31454539272595317, 0.039782261918874906, 0.9587127521414383, + 0.8805585025554179, 0.9859544914622989, 0.6275317338277425, 0.14352580582038044, 0.7695326504943156, + 0.13692726551997658, 0.23240215777624518, 0.5821386011719336, 0.40705038971314067, 0.9726331053853133, + 0.5877629214351668, 0.737951863797599, 0.13209112961192793, 0.32596609082453, 0.9051706320834314, + 0.006592902179112681, 0.44539065743524575, 0.21040963856054318, 0.02648671883280662, 0.8307275617617943, + 0.4998882468907887, 0.553040303946554, 0.9664967803907726, 0.6005553470527086, 0.5780289963610858, + 0.4615664520082169, 0.33336403885310373, 0.3607367809090497, 0.41916265564486266, 0.01606174796202109, + 0.14162979911924312, 0.4134088930503992, 0.8487134257579578, 0.5428387502949792, 0.7715775547811061, + 0.3987807271022865, 0.9327040075466025, 0.5991429329697456, 0.42591643248294897, 0.44626736830361025, + 0.19225222099905237, 0.6381200674681307, 0.09517206563329228, 0.7401210217185733, 0.9213966447997659, + 0.8282252903940375, 0.25575682793740195, 0.09419161503451245, 0.7133067524485386, 0.30622040856962174, + 0.04567030385976745, 0.4421966327959601, 0.9470399466953359, 0.1863959618388854, 0.603892794005211, + 0.7305230388181004, 0.9950215619461602, 0.41043924117549035, 0.3199344511919423, 0.17009843234582034, + 0.19333893045555228, 0.4065598982414558, 0.4783366067549203, 0.2925065077781679, 0.46612867625915266, + 0.2260401750143446, 0.9769887894299829, 0.626959110785719, 0.9005591753667794, 0.22730676915421288, + 0.43752553937624405, 0.20832599816771735, 0.0648434614140343, 0.16712455310677277, 0.5297816530911937, + 0.5843239411260238, 0.1571513016873196, 0.4588380934037194, 0.8580445067796844, 0.08760914807929077, + 0.3167980809508252, 0.5688868537877791, 0.7432713240307058, 0.47849574835183317, 0.08569476671747134, + 0.0028373026780509347, 0.36146132556611577, 0.11732007318482318, 0.8826440135398512, 0.8057998064675778, + 0.16668958705182269, 0.9341257420123151, 0.00901387215143401, 0.31965995666124947, 0.30354580893146743, + 0.29702838396393805, 0.8783416396086332, 0.661370050417091, 0.5972415473147473, 0.7785999094421769, + 0.22056824973746136, 0.5673911270103246, 0.43802545724601716, 0.7305663462744951, 0.16885785839419054, + 0.5057125448994015, 0.6601524988598084, 0.8072981979896457, 0.28364151338783206, 0.7685411345675934, + 0.8988615642504377, 0.7248529774876415, 0.8955713245004737, 0.9694464156380791, 0.13627973385112224, + 0.0262054823780018, 0.9598488199046755, 0.6637745866350796, 0.567021314483643, 0.8028105889988362, + 0.5449983501324986, 0.7405568508319487, 0.7479203765092981, 0.6091595342627724, 0.41411358428231926, + 0.5533790070863268, 0.08314085235301405, 0.8425708019179065, 0.15917383752943826, 0.6527601974253487, + 0.30126252545266263, 0.6463253615283967, 0.4485504420576313, 0.6669832516919934, 0.6390350894633332, + 0.5841276848813355, 0.3633156947810412, 0.20053851382498278, 0.2991176134182495, 0.2237519088309342, + 0.6924468706663672, 0.8035400908448518, 0.04722817815152447, 0.5163195461666986, 0.8923939769755282, + 0.05625559077233866, 0.061665786518363186, 0.662644780876765, 0.6339151490480687, 0.23437975062774363, + 0.12890375862760606, 0.3501592943353464, 0.09947716801883921, 0.9783908021362502, 0.5039620019339041, + 0.7894302782128263, 0.40318009245339925, 0.010992512436047153, 0.060520301199066595, 0.41780992611018475, + 0.3827436867828057, 0.4994412662997859, 0.6760343095970682, 0.804379353505974, 0.6968983017423745, + 0.7087690507974629, 0.9060334223047234, 0.7408399172400371, 0.4399334354759946, 0.0505122443751177, + 0.6419455093215115, 0.6284706946091785, 0.03490785589787804, 0.23143346520326058, 0.918425982705948, + 0.9707561358089939, 0.3571584723082011, 0.6634376909205661, 0.7823687380994333, 0.46769811417519425, + 0.43313252298354554, 0.4512165332058773, 0.7409639493996218, 0.773542103389995, 0.03251757301743474, + 0.2918512326156285, 0.2707389025126231, 0.48862277656898045, 0.0677707878489644, 0.5319713656658794, + 0.19055872328014922, 0.42372710919648093, 0.21626088206538852, 0.6267863686588406, 0.6957686899861832, + 0.8481678412966801, 0.6679132796626048, 0.2520678664501832, 0.6859580348999734, 0.41653832488428466, + 0.3443130260330426, 0.9265385049178334, 0.4632590871458663, 0.2792541905414425, 0.297175390928865, + 0.9649706249008346, 0.8868549044180415, 0.27859114362929416, 0.7113192182809037, 0.1837892082294046, + 0.7885896931523694, 0.09972075576249273, 0.327050109765748, 0.040566933728248045, 0.5192171211123873, + 0.5010930644256814, 0.1751335903258584, 0.973049431223127, 0.04605996955300973, 0.5346739560986009, + 0.8356986203333525, 0.8197705199993206, 0.00016846934847547512, 0.9767516366117579, 0.7373887650278352, + 0.25343735960629343, 0.7159738475986986, 0.13419172631284015, 0.19703920828417498, 0.8385184444257783, + 0.5863278446649178, 0.8162272321723847, 0.9352789460968992, 0.8647561254544748, 0.15975188650299932, + 0.17836973320343386, 0.6864848216734166, 0.36415105850464413, 0.3426658491754645, 0.08329749299302491, + 0.42193590764837385, 0.6642021794485784, 0.6636584135623489, 0.694866090191261, 0.5410233673552419, + 0.7053385617098781, 0.5796221225781045, 0.6774744169203547, 0.38184777780992407, 0.5921849312009162, + 0.4139673487900193, 0.6324891962264935, 0.23998117926531592, 0.3006459023062501, 0.7543836662968866, + 0.21903128794922366, 0.8847078454339562, 0.43658173987503657, 0.13725114471518118, 0.5082197711407339, + 0.22487940449338928, 0.9797397982051781, 0.5567150341480799, 0.7202351101992689, 0.16413702022000565, + 0.336467018972884, 0.0377230903593081, 0.8602687151091007, 0.11839554304283328, 0.14208471520735977, + 0.43607071414863874, 0.8741600257317956, 0.6640170355939985, 0.0909679192917131, 0.9106544964694222, + 0.804698027266483, 0.35002717921135407, 0.05376976135766964, 0.32665192959159395, 0.9515843270568628, + 0.7912931593367771, 0.3390543201499241, 0.39116094675420365, 0.48473912485681336, 0.7748044486400519, + 0.9504668800747522, 0.17379382917448194, 0.9058195197373183, 0.2269566175662039, 0.038537709312595037, + 0.26170965377152067, 0.39139592915908183, 0.21106987419176348, 0.2688423270567659, 0.7139909208243974, + 0.6053567411529699, 0.6988705097367177, 0.3729424883568403, 0.04411243675556986, 0.534604760793021, + 0.6085219738551051, 0.12447165936004256, 0.05883726779432985, 0.24847343117316678, 0.5063340116556715, + 0.3976127704496062, 0.08152081595341609, 0.3506522585285301, 0.1683344260481151, 0.48691479882706157, + 0.8353082899370665, 0.2577772577688977, 0.18832064432940687, 0.08574080793259364, 0.30059358691237237, + 0.6482106495699304, 0.8511603041679399, 0.2701156230003453, 0.21298871203478398, 0.08203197080945912, + 0.6725944240256472, 0.3066569123534626, 0.2662259874220829, 0.16779728161704843, 0.7158644073214633, + 0.6878815896858301, 0.9934704148141994, 0.40929994195329833, 0.4682545943541082, 0.151688664815809, + 0.7393767419885556, 0.48217967745270196, 0.9602943716631533, 0.30319852768975375, 0.1330627046343439, + 0.6710020756991474, 0.27078039927991027, 0.11186194386988224, 0.5457171365684865, 0.7880448237433672, + 0.013805055471389882, 0.6180892645154643, 0.48759905827516603, 0.8233479375602223, 0.4264784017101182, + 0.3992656098634447, 0.0588646031637704, 0.8142663494405596, 0.7230712928151145, 0.10388770611234022, + 0.6320570126609345, 0.3854592292304997, 0.8401593200228608, 0.19857080876585775, 0.4632224663542698, + 0.8338896779165693, 0.07498742940886116, 0.019157468796003774, 0.1973426137959765, 0.9481227475490336, + 0.18022422840604635, 0.8193635052157996, 0.5477377714401691, 0.14637104867208328, 0.2129098116028093, + 0.5796866670289799, 0.39100590803988866, 0.32139056156796886, 0.15058673720696558, 0.010737680262784766, + 0.004595895220052548, 0.5243334524071466, 0.3062990700506023, 0.8989948145015754, 0.3995636846032241, + 0.4560874030151093, 0.948023137115712, 0.540281310007275, 0.26457559429892186, 0.6567808670786782, + 0.9879146301551541, 0.8134508106867051, 0.4772264929878759, 0.5239150673556234, 0.04973375923455958, + 0.9408063913333713, 0.8933576927435202, 0.7690497885609424, 0.3432688849226637, 0.09841971497506807, + 0.6489987050683524, 0.4612619245600613, 0.9668831791357312, 0.6773541509970112, 0.8113556563575658, + 0.5103191595379972, 0.9510104387444676, 0.9633894232982487, 0.7448212055191065, 0.5452953261409613, + 0.4856970915644032, 0.8693281473365151, 0.6373330823065603, 0.5771587194750025, 0.8691299641815123, + 0.0815851410708418, 0.6806300549508425, 0.5075760571421807, 0.601124222598236, 0.07050306902810866, + 0.36920524642079033, 0.18818005490550915, 0.5583892243115647, 0.9176895603923744, 0.6971962981473214, + 0.03280146452993471, 0.6100845348121183, 0.2564881425807104, 0.40952730901052836, 0.11198840547195466, + 0.6384039317824369, 0.40293878780785075, 0.022692351439830727, 0.8875572687105943, 0.06480422446351664, + 0.10051348811549643, 0.1980882930823188, 0.9179898183709072, 0.6861308896442132, 0.6956857450336589, + 0.8949482680224852, 0.6515191420301568, 0.9554532124850424, 0.20822584693009594, 0.21886259526121254, + 0.8463453916651477, 0.7415404364419516, 0.5419971984385825, 0.5497563263808113, 0.980394542822379, + 0.15307532804101953, 0.572368260863743, 0.48863254508712584, 0.6979314145830208, 0.43581241800260095, + 0.6989017345810631, 0.8187807739671614, 0.10632659645304032, 0.692501163915668, 0.872476678417899, + 0.39847669533309527, 0.2550816582357872, 0.44785761524405, 0.6631992982892411, 0.07909667197674031, + 0.15595306847956636, 0.7549753608310522, 0.7497451144627425, 0.961356053996849, 0.047012242220749845, + 0.39968661191290844, 0.7900745768809934, 0.18988750301939106, 0.34309148527453104, 0.18639355175119265, + 0.5927420217262831, 0.7580186574311277, 0.5033605618537017, 0.9425017651769168, 0.48862010484707064, + 0.9853716648830263, 0.39984394946359525, 0.5558506900537014, 0.5908610536703583, 0.22602671513614803, + 0.8798987778429802, 0.9531473683222749, 0.7410953157434534, 0.30111561488120153, 0.4403622314059401, + 0.052456464089102095, 0.19440695649597994, 0.31107695018579007, 0.7677549648467343, 0.6201418408382342, + 0.804275349391573, 0.2870986397529448, 0.1397119476044384, 0.8556387210623139, 0.6062812206466079, + 0.6575142947882678, 0.45585821923189085, 0.4099371374021249, 0.7082884101648349, 0.8291749729575852, + 0.9185967769978759, 0.04060621570729972, 0.4474316536950259, 0.6066615918823247, 0.2575823090229311, + 0.8240801799547735, 0.1448669779417252, 0.8116170735906293, 0.9082959459536531, 0.8862781397082458, + 0.7324826297013995, 0.6259636012578446, 0.028853519416508266, 0.6228437907861963, 0.02409626691061728, + 0.9203609494394416, 0.8461273177832925, 0.2736512366046855, 0.04020369061681861, 0.902228304152258, + 0.1875989205574723, 0.8666802227613912, 0.33819954591027035, 0.6754386381771887, 0.012056309491297434, + 0.6529806784933322, 0.9651442384174174, 0.9978184657413758, 0.6258752595314446, 0.4974480025947464, + 0.28768692957840036, 0.5851676600752466, 0.38541216263965494, 0.5412711123583736, 0.7940464609305427, + 0.008865031997954298, 0.05848479024922548, 0.8815486106206981, 0.01209641118408944, 0.46704246287932405, + 0.4750820624217653, 0.10164673751729414, 0.9796758207356858, 0.8610487700070613, 0.6211171296662068, + 0.8731238761599872, 0.37578834957139673, 0.1120201496751766, 0.011314071997954644, 0.2572189570511383, + 0.879811392407261, 0.10362272859591204, 0.01645233098389376, 0.9500294012864102, 0.8475060356712824, + 0.8363505443285942, 0.8575757647091935, 0.039799459941253335, 0.36286406449207986, 0.5131647448366778, + 0.39819043236603746, 0.7618607865355099, 0.22711968862867105, 0.9394712456140883, 0.027435600732236387, + 0.7758712942733171, 0.2025711015279088, 0.9991247917543385, 0.9938131541840006, 0.186905251806328, + 0.6306921520671528, 0.3799019937820075, 0.550880345014923, 0.07148117151736633, 0.41054299237597647, + 0.10848671426422485, 0.9728118525566118, 0.20990798653999276, 0.9308342861844455, 0.4978127378528848, + 0.7351482579424665, 0.5623836482085756, 0.143782791890815, 0.5898839079534395, 0.2546773800832157, + 0.666653682817035, 0.7467649896168816, 0.7339634345720852, 0.40947541132428245, 0.9396557362265029, + 0.4326393542047332, 0.2488609657348656, 0.7562763464489093, 0.9983141149044218, 0.008402913032401704, + 0.19571855359179036, 0.690386550914582, 0.7536429455538223, 0.9378744182203695, 0.7586425829635426, + 0.1445178013804469, 0.9109541843179556, 0.07821616551324684, 0.05078095176454778, 0.9130796647001046, + 0.9534802944405797, 0.9690251328314643, 0.42575080842172963, 0.7001480708771485, 0.4041297773374436, + 0.4185210345469337, 0.7190995465953123, 0.7640566649798914, 0.23242098306402514, 0.3542816499880481, + 0.5507254523321877, 0.3029570242047027, 0.3038459304238752, 0.9598648212640765, 0.7204511737388988, + 0.8790782739473424, 0.7794923303105117, 0.18787115939366794, 0.9934696708319142, 0.4155138570203807, + 0.33151958596483977, 0.7351239628799405, 0.20830903644833842, 0.7740550628440345, 0.4191234862718004, + 0.23674572501250302, 0.059571405085674156, 0.9129267438467386, 0.06451592284712904, 0.6857709392141015, + 0.8380269534549667, 0.39885910326895746, 0.2834031729279012, 0.2382256471610743, 0.006052425806343575, + 0.707682901066885, 0.9101318378861685, 0.5123191129010624, 0.9439951816262205, 0.5463959087566002, + 0.05760611428924023, 0.676503376165408, 0.44429048072068333, 0.5230209296642828, 0.3224367091169168, + 0.6913680319012937, 0.17110739904678152, 0.16779286710665142, 0.6335681699796741, 0.07931757368130377, + 0.46888858792493004, 0.05797277783754817, 0.20480686637597612, 0.8200671332173322, 0.4353078372661333, + 0.9539464470659443, 0.8303326350536121, 0.6278771584995406, 0.2509169090532466, 0.1774990588847868, + 0.4984180010157796, 0.37490941253308996, 0.747904697670194, 0.4689694228884409, 0.07865978524122308, + 0.018278256806917637, 0.7339557318674227, 0.9194551313394231, 0.5689890773146097, 0.8815381229146946, + 0.03744741097255122, 0.9047169962685846, 0.4610613790126912, 0.2475915213540839, 0.4298710416601963, + 0.8221398481995089, 0.23343189008359377, 0.5350763782441681, 0.14739879873747308, 0.33280129854717244, + 0.23821919516506462, 0.7167817307593071, 0.8773007050805388, 0.48070282049083113, 0.08899633176897048, + 0.9920266776266308, 0.9638471775191081, 0.7215840179080331, 0.36423940072103955, 0.17430531770484514, + 0.9415074521796919, 0.2738258731873858, 0.9306468185245629, 0.6656678169751403, 0.1155683928384511, + 0.8747709248985905, 0.12635835804712037, 0.5759875379497646, 0.5731929978928748, 0.03741903689544501, + 0.19725909198310998, 0.7735858565736194, 0.7784261014648269, 0.18197527553087145, 0.002638591858257655, + 0.691716324882675, 0.044337163682884206, 0.6293554666845848, 0.40764392827851403, 0.2538655700672533, + 0.8281469647313634, 0.023926045054366463, 0.8167534085038402, 0.43320133354594026, 0.24862060630736682, + 0.09125749468359245, 0.8118122437010404, 0.9973271765376198, 0.9865525895778507, 0.8163056554998648, + 0.48861250541718937, 0.01814006325736084, 0.7541958032692393, 0.8774612716989119, 0.3760072307293194, + 0.2342391882453575, 0.5507128966242177, 0.8814107840171872, 0.7629915021375545, 0.022596785422104193, + 0.4676586074696423, 0.6323496539184577, 0.10709853732458496, 0.24458540292747133, 0.7432044950079842, + 0.991407564153164, 0.3329371174770699, 0.14443919067113453, 0.9763728131494807, 0.13169707338891068, + 0.2574020118799679, 0.9117198409079358, 0.4813044045568353, 0.7148710776112692, 0.10816472148122425, + 0.4872820160105873, 0.7843715998368327, 0.9792048963235088, 0.5032302898011388, 0.9133488145295496, + 0.8343572569411505, 0.45894548309522043, 0.8041104427062074, 0.8961208287376816, 0.4790973691825182, + 0.0981833133220551, 0.8764985001957379, 0.5258092509418005, 0.8342042669276518, 0.8790350110712204, + 0.7094946960609122, 0.7708450113645778, 0.4180654412461543, 0.4242979420279134, 0.24139470932602636, + 0.2209321570893309, 0.7846941996259452, 0.12945719412306944, 0.7602339455424194, 0.10743040252161462, + 0.8334328323308305, 0.47291321372136974, 0.8635521722706079, 0.4701334745256115, 0.1506952535704139, + 0.9018427816330945, 0.6748520059749242, 0.6573750823451866, 0.8618860620086874, 0.08827502135749443, + 0.6729164433989506, 0.21618869286155384, 0.4287518670302243, 0.2407327438728406, 0.6338004043821022, + 0.8515817109182252, 0.9659962777150152, 0.4315493010082393, 0.6266082092973347, 0.04229595441634182, + 0.8184353596321909, 0.10225103323158635, 0.11394336686699535, 0.4022127256961855, 0.30838155456109306, + 0.3195787933473151, 0.11242740506794102, 0.06655005190369112, 0.5103072329299937, 0.22668773651030127, + 0.32922441298388727, 0.8140348723745937, 0.5638190653380799, 0.26541973372425653, 0.7461012639404849, + 0.5871631865450887, 0.056334307939404415, 0.7231806850118402, 0.6744439230965042, 0.8458065219298543, + 0.009223176587899173, 0.6934220235290146, 0.7552659426337391, 0.40417163368066655, 0.18495513003494457, + 0.716067507969818, 0.14155481130771042, 0.029361076807311592, 0.8830993452371644, 0.6613103229791982, + 0.9398338818781078, 0.41160144347284433, 0.46685068003629737, 0.046369043986771774, 0.23308706601773554, + 0.4844512179420487, 0.9643822582174879, 0.6006453251508257, 0.6936888180218606, 0.3013953093096562, + 0.7354859248567551, 0.8349944098141118, 0.4048627428035879, 0.44505698895658985, 0.7751604897094069, + 0.8744909179894252, 0.947828891904564, 0.8640222948104718, 0.3595682016631978, 0.6549008736011241, + 0.8479504542038493, 0.006679071347445054, 0.2724677376754999, 0.9915443051333377, 0.21071715217089027, + 0.8684246639753141, 0.7856846182004006, 0.6859853232823806, 0.4789831650688282, 0.8185051822940307, + 0.5789517433188925, 0.6919294885862914, 0.921789377332735, 0.41376092831273026, 0.29579140546489957, + 0.534265983707278, + }; + + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_Z, {17, 2, 2, 17}, {8, 2, 16, 16}, DT_FLOAT}; + TransResult result; + + FormatTransferFractalZ transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(data_4d)); + for (int i = 0; i < sizeof(data_4d) / sizeof(data_4d[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], data_4d[i]); + } +} + +TEST_F(UtestFormatTransferNhwcFz, build_transfer_fp32) { + float data[17 * 5 * 5 * 31]; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_Z, std::vector({17, 5, 5, 31}), + std::vector({50, 2, 16, 16}), DT_FLOAT}; + auto transfer = BuildFormatTransfer(args); + EXPECT_NE(transfer, nullptr); +} + +TEST_F(UtestFormatTransferNhwcFz, build_transfer_fp16) { + uint16_t data[1 * 5 * 5 * 1]; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_Z, std::vector({1, 5, 5, 1}), + std::vector({25, 1, 16, 16}), DT_FLOAT16}; + auto transfer = BuildFormatTransfer(args); + EXPECT_NE(transfer, nullptr); +} + +TEST_F(UtestFormatTransferNhwcFz, build_transfer_uint8) { + uint8_t data[64 * 2 * 2 * 64]; + TransArgs args{ + reinterpret_cast(data), FORMAT_NHWC, FORMAT_FRACTAL_Z, std::vector({64, 2, 2, 64}), + std::vector({8, 4, 16, 32}), DT_UINT8}; + auto transfer = BuildFormatTransfer(args); + EXPECT_NE(transfer, nullptr); +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_transpose_unittest.cc b/tests/ut/ge/common/format_transfer_transpose_unittest.cc new file mode 100644 index 00000000..935797b2 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_transpose_unittest.cc @@ -0,0 +1,4656 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_transpose.h" + +namespace ge { +namespace formats { +class UtestFormatTranspose : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTranspose, one) { + uint8_t data[1] = {100}; + uint8_t ret[1] = {100}; + + TransResult result; + EXPECT_EQ(TransposeWithShapeCheck(data, std::vector({1, 1, 1, 1}), std::vector({1, 1, 1, 1}), + DT_UINT8, std::vector({3, 2, 1, 0}), result), + SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((result.data.get())[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, no_trans) { + uint8_t data[5] = {1, 2, 3, 4, 5}; + uint8_t ret[5] = {1, 2, 3, 4, 5}; + + TransResult result; + EXPECT_EQ(TransposeWithShapeCheck(data, std::vector({1, 1, 1, 5}), std::vector({1, 1, 1, 5}), + DT_UINT8, std::vector({2, 1, 0, 3}), result), + SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((result.data.get())[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, param_invalid) { + uint8_t data[5] = {1, 2, 3, 4, 5}; + + TransResult result; + EXPECT_NE(TransposeWithShapeCheck(nullptr, std::vector({1, 1, 1, 5}), std::vector({1, 1, 1, 5}), + DT_UINT8, std::vector({2, 1, 0, 3}), result), + SUCCESS); + EXPECT_NE(TransposeWithShapeCheck(data, std::vector({}), std::vector({1, 1, 1, 5}), DT_UINT8, + std::vector({2, 1, 0, 3}), result), + SUCCESS); + EXPECT_NE(TransposeWithShapeCheck(data, std::vector({1, 1, 1, 5}), std::vector({1, 1, 1, 5}), + DT_UINT8, std::vector({2, 1, 0}), result), + SUCCESS); + EXPECT_NE(TransposeWithShapeCheck(data, std::vector({1, 1, 1, 5}), std::vector({1, 1, 1, 5}), + DT_UINT8, std::vector({}), result), + SUCCESS); + EXPECT_NE(TransposeWithShapeCheck(data, std::vector({1, 1, 1, 5}), std::vector({1, 1, 1, 5}), + DT_UINT8, std::vector({2, 1, 0, 0}), result), + SUCCESS); + EXPECT_NE(TransposeWithShapeCheck(data, std::vector({1, 1, 1, 5}), std::vector({1, 1, 1, 5}), + DT_UNDEFINED, std::vector({2, 1, 0, 3}), result), + SUCCESS); + EXPECT_NE(TransposeWithShapeCheck(data, std::vector({1, 1, 1, 5}), std::vector({1, 1, 1, 5}), + DT_UINT8, std::vector({2, 1, 0, 3, 4}), result), + SUCCESS); + EXPECT_NE(TransposeWithShapeCheck(data, std::vector({1, 1, 1, 5}), std::vector({1, 1, 1, 5}), + DT_UINT8, std::vector({3, 1, 0, 2}), result), + SUCCESS); +} + +TEST_F(UtestFormatTranspose, transpose_with_shape_check_2d) { + uint8_t data[4] = {1, 2, 3, 4}; + uint8_t ret[4] = {1, 3, 2, 4}; + + TransResult result; + EXPECT_EQ(TransposeWithShapeCheck(data, std::vector({2, 2}), std::vector({2, 2}), DT_UINT8, + std::vector({1, 0}), result), + SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((result.data.get())[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, nchw_to_hwcn1) { + uint16_t data[1 * 3 * 4 * 4] = { + 15322, 14946, 12601, 14058, 12641, 14470, 14686, 15052, 11964, 14846, 13154, 13571, 14947, 12467, 12786, 14238, + 15334, 14814, 13608, 12163, 14169, 15274, 14780, 15303, 14986, 14841, 14290, 13785, 13878, 11576, 14411, 14522, + 14394, 13508, 13021, 14691, 13263, 15145, 14724, 15167, 14523, 13334, 14834, 13844, 9902, 14984, 15051, 14511, + }; + uint16_t ret[4 * 4 * 3 * 1] = { + 15322, 15334, 14394, 14946, 14814, 13508, 12601, 13608, 13021, 14058, 12163, 14691, 12641, 14169, 13263, 14470, + 15274, 15145, 14686, 14780, 14724, 15052, 15303, 15167, 11964, 14986, 14523, 14846, 14841, 13334, 13154, 14290, + 14834, 13571, 13785, 13844, 14947, 13878, 9902, 12467, 11576, 14984, 12786, 14411, 15051, 14238, 14522, 14511, + }; + + TransResult result; + EXPECT_EQ(TransposeWithShapeCheck(reinterpret_cast(data), std::vector({1, 3, 4, 4}), + std::vector({4, 4, 3, 1}), DT_FLOAT16, std::vector({2, 3, 1, 0}), + result), + SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, nchw_to_hwcn2) { + uint16_t data[8 * 3 * 16 * 16] = { + 15218, 13789, 13063, 9806, 14988, 15221, 11097, 15162, 11381, 15333, 14921, 14612, 14806, 13951, 13796, 9764, + 12642, 12389, 15154, 14243, 14542, 13551, 14819, 14219, 12500, 11825, 13389, 13967, 14601, 12846, 12777, 14399, + 13604, 15360, 14275, 1482, 12466, 12181, 15129, 13778, 15327, 14392, 14481, 14440, 14443, 13718, 15180, 14864, + 14155, 14405, 13414, 13070, 15013, 15134, 12188, 12467, 14942, 14851, 13225, 14767, 14392, 15052, 13812, 15322, + 15092, 13656, 15121, 14915, 14587, 15050, 13050, 14725, 14548, 15140, 14582, 15256, 14727, 14590, 14849, 13786, + 11356, 14817, 12373, 12298, 13860, 11082, 14150, 13574, 14752, 14076, 14821, 14959, 14625, 15264, 14614, 14827, + 14355, 10344, 14213, 14615, 15061, 12439, 14468, 13154, 15284, 14383, 14890, 14412, 14149, 14472, 14926, 13993, + 14641, 15171, 14781, 13145, 12947, 14460, 15079, 14334, 12615, 14999, 15186, 15196, 14353, 14469, 14463, 13002, + 13222, 12367, 15230, 14639, 11884, 13924, 14306, 10514, 14267, 14709, 13312, 14045, 14767, 15236, 13136, 13564, + 15090, 13879, 14777, 13060, 14849, 13626, 14772, 12754, 13192, 11059, 14809, 13715, 14743, 15326, 11682, 14940, + 15001, 15259, 12495, 13345, 14374, 14623, 12469, 14286, 15152, 14539, 13554, 12103, 14891, 15214, 14792, 14589, + 13333, 14774, 13236, 15267, 12822, 15313, 13400, 10680, 15178, 11675, 14763, 14768, 13488, 12296, 15327, 13629, + 10754, 15079, 15134, 13082, 13724, 12625, 15072, 12016, 12175, 8693, 12380, 15244, 14810, 13580, 14749, 14387, + 14789, 13201, 15287, 14421, 11550, 14539, 15190, 15051, 15165, 14662, 15348, 13068, 15344, 14915, 14271, 15164, + 15139, 14424, 14958, 14993, 14808, 13020, 13805, 14455, 14005, 11593, 14578, 13430, 13973, 14623, 13813, 14042, + 15117, 14152, 14845, 13981, 13084, 15173, 15197, 12494, 15290, 15310, 15113, 15059, 14753, 13861, 14350, 9261, + 14570, 15273, 10601, 14614, 13511, 12111, 10253, 14059, 13561, 13971, 14712, 13934, 13660, 15269, 14971, 12354, + 14996, 14591, 13842, 12773, 13814, 12790, 14484, 12382, 14624, 14191, 12378, 15116, 12684, 12997, 14967, 14958, + 13047, 15269, 11493, 14822, 13442, 14588, 15007, 15143, 14340, 15358, 15114, 14642, 14519, 14391, 13350, 14960, + 14955, 15137, 12575, 15041, 14078, 11693, 13219, 14677, 12349, 14568, 13739, 12587, 15351, 13895, 13355, 14526, + 14910, 14515, 14676, 15074, 13644, 13561, 12228, 13685, 12776, 14880, 14721, 14385, 13613, 14790, 14501, 15228, + 14632, 12788, 11308, 8196, 12790, 11970, 14845, 14754, 14978, 13231, 13764, 14244, 14707, 14632, 9242, 14691, + 14346, 13486, 14071, 14448, 15003, 12118, 14713, 14558, 14760, 14422, 13875, 13085, 14393, 14779, 13814, 13111, + 13383, 14018, 14014, 14839, 14210, 12437, 15339, 13604, 13255, 14629, 15189, 11812, 15144, 14950, 13682, 12963, + 14100, 14212, 14821, 14517, 15276, 14529, 8493, 13500, 10752, 14522, 14509, 13676, 14529, 12640, 11053, 15280, + 14044, 13446, 11286, 11612, 14305, 14579, 13706, 12916, 14970, 13922, 15135, 15114, 13592, 15021, 14016, 15263, + 14702, 15000, 13327, 14491, 11318, 14949, 14963, 12681, 14150, 9512, 11371, 14225, 12227, 14448, 13459, 14305, + 15239, 14422, 14733, 13827, 14706, 14982, 12596, 14136, 9851, 9769, 14905, 15025, 14162, 15093, 14252, 12626, + 12374, 12762, 12035, 13241, 10954, 14646, 13949, 14180, 14235, 14558, 10275, 11083, 14919, 13859, 13848, 13996, + 14710, 13592, 12164, 14703, 13770, 14721, 14737, 10447, 15344, 14407, 14465, 12551, 12164, 14097, 14774, 11433, + 15191, 13576, 14643, 14531, 14848, 14582, 15327, 12373, 13330, 14428, 13499, 14613, 15328, 14293, 12528, 12854, + 15050, 11194, 13966, 10159, 12044, 15323, 12442, 14291, 13730, 7621, 12430, 14331, 15017, 14669, 14306, 14172, + 13537, 13048, 14247, 14831, 14781, 14660, 14549, 15142, 14835, 14988, 12002, 14360, 13185, 15315, 14304, 14346, + 13827, 14474, 14714, 10817, 13536, 13742, 11544, 13650, 13566, 11179, 14200, 9798, 14579, 14496, 14902, 13323, + 14848, 13776, 14646, 13786, 14569, 12899, 14372, 13270, 14343, 13424, 15342, 14625, 11393, 15071, 14705, 14128, + 14430, 14405, 15129, 14692, 12506, 15086, 15131, 9296, 14861, 13993, 12487, 10378, 7262, 14524, 14602, 12789, + 15325, 14425, 13418, 15162, 14152, 14702, 15297, 15187, 13744, 15250, 14461, 14875, 10122, 14458, 14063, 12743, + 14546, 14711, 14665, 14763, 14571, 12480, 14459, 15059, 12350, 14258, 15143, 14633, 14291, 14381, 13334, 13746, + 11936, 13767, 14704, 14724, 15269, 10572, 14916, 14240, 14564, 14939, 13403, 14049, 14328, 12835, 14571, 13628, + 15218, 14857, 14581, 14943, 15309, 14376, 14452, 14178, 14727, 14594, 15309, 14408, 15190, 15155, 13353, 14081, + 14178, 14753, 12683, 14510, 14358, 8334, 15131, 13286, 14329, 11491, 12389, 14982, 14191, 11493, 14472, 12930, + 14930, 14031, 14943, 14279, 14742, 13768, 12287, 14877, 13965, 13996, 14883, 14902, 11537, 14967, 11473, 15198, + 14982, 13857, 14537, 14448, 14696, 14388, 14096, 14829, 15314, 12586, 14195, 14287, 13738, 14967, 14392, 15152, + 14879, 15328, 14645, 14385, 14702, 14024, 14833, 15136, 13371, 13783, 13702, 13579, 12598, 15077, 13490, 10935, + 15127, 11977, 14303, 12646, 12844, 14046, 8742, 14972, 14982, 14420, 14442, 15101, 14515, 14755, 11503, 14877, + 13653, 14387, 14611, 14332, 15150, 13412, 14739, 13091, 8129, 13834, 14871, 15002, 12871, 13629, 13654, 15171, + 15009, 15085, 12316, 11567, 15009, 15261, 13573, 15056, 14732, 13588, 14379, 15171, 15325, 13221, 11819, 12691, + 14416, 7682, 14939, 12788, 15052, 14503, 11688, 14527, 11339, 15313, 15150, 14100, 13396, 14594, 14431, 15039, + 12000, 14817, 13488, 15202, 14710, 14741, 14217, 14517, 13382, 14079, 12370, 14990, 14584, 12725, 14595, 12642, + 11533, 14510, 9512, 14471, 14828, 12574, 14521, 14982, 13539, 14860, 14720, 10552, 14985, 13661, 11420, 13696, + 14362, 10515, 14653, 13420, 13574, 13842, 14508, 14830, 14449, 11494, 14313, 14673, 14209, 14817, 14760, 15325, + 12427, 14847, 14085, 13609, 10869, 14273, 14995, 14660, 11322, 11275, 13323, 14472, 14533, 13474, 14696, 14739, + 14767, 8285, 14735, 14583, 13389, 13920, 14861, 14999, 14418, 14663, 13542, 10913, 14829, 14093, 15194, 13299, + 14175, 14737, 13490, 15007, 13549, 14065, 14150, 9607, 12753, 14712, 14758, 12337, 13471, 13781, 15016, 12606, + 14779, 15001, 13475, 14449, 14971, 12807, 13895, 14198, 14598, 14393, 13989, 15225, 15306, 14117, 15039, 13918, + 14849, 12393, 14612, 13905, 14883, 15059, 13914, 14043, 14936, 14430, 13070, 12897, 13972, 14337, 13769, 14968, + 14557, 14746, 13365, 13025, 15023, 15299, 14620, 13806, 14778, 15358, 14191, 12524, 12255, 14752, 15312, 9470, + 12437, 15227, 14637, 15304, 14797, 12762, 10875, 12814, 3788, 15006, 14751, 14499, 14294, 14850, 14893, 14214, + 15286, 13012, 14822, 12537, 11468, 14371, 14215, 14813, 15033, 15196, 14925, 14348, 14536, 12966, 14970, 11085, + 13579, 14872, 14248, 11043, 13828, 14114, 14287, 14391, 13944, 15329, 15003, 11492, 14154, 13456, 12964, 12496, + 15261, 14923, 15336, 15037, 13390, 14493, 14713, 15019, 14293, 13816, 12662, 15160, 14489, 13974, 15213, 14889, + 15212, 15267, 14752, 8384, 14623, 14903, 11864, 9326, 15255, 11005, 13914, 14549, 14398, 15244, 13693, 14341, + 11535, 14654, 14674, 15241, 15328, 12293, 13321, 15081, 12861, 13159, 14620, 13644, 14188, 14633, 14903, 13592, + 15103, 14350, 15345, 14846, 14388, 13738, 10832, 14052, 14990, 12614, 14796, 13244, 12419, 14204, 12856, 10574, + 14500, 14512, 13559, 15188, 15274, 13894, 13515, 13458, 12779, 15261, 14628, 15334, 8955, 13353, 15231, 14660, + 11967, 4435, 14773, 15006, 14794, 14499, 14999, 11392, 13341, 11410, 14180, 14814, 13278, 11612, 14759, 15079, + 13932, 15020, 14475, 14692, 14334, 15157, 13972, 12724, 13396, 14572, 13554, 15306, 9348, 12110, 14500, 14187, + 14349, 15306, 11988, 15276, 13176, 13510, 15352, 11060, 10274, 14055, 13916, 13974, 13812, 14888, 9789, 14606, + 14849, 14668, 14740, 12950, 14676, 10339, 13928, 6121, 13608, 14449, 13549, 14252, 14362, 14584, 15153, 15264, + 14961, 14122, 14968, 14814, 14811, 9819, 14972, 13767, 14648, 13956, 14341, 13975, 13825, 13647, 13696, 15314, + 13906, 14318, 10364, 12538, 14474, 12996, 15339, 8103, 14442, 15331, 14930, 14339, 13716, 14063, 14031, 14113, + 15074, 13536, 14418, 12597, 14481, 14542, 15172, 14849, 14469, 14431, 14729, 13943, 15047, 14663, 15267, 13504, + 13551, 14845, 11375, 12531, 14638, 14169, 14607, 15284, 13715, 13134, 14272, 10766, 11235, 11853, 15015, 15210, + 13829, 15151, 13477, 10944, 13181, 12564, 12647, 14486, 13412, 15301, 12634, 14011, 11887, 14983, 14634, 14411, + 10804, 9485, 12962, 11214, 12328, 12395, 13989, 14528, 13806, 12067, 15158, 13410, 15067, 14638, 14541, 14087, + 12958, 13592, 14412, 14995, 14641, 11765, 15090, 14705, 15000, 12734, 15234, 14372, 14295, 13277, 14770, 14771, + 12810, 14520, 11019, 15314, 13267, 14179, 12742, 13361, 14904, 12140, 11992, 12478, 14525, 14825, 11815, 15216, + 13662, 14363, 14692, 14344, 12561, 14433, 14937, 13507, 15345, 6760, 15265, 13347, 15342, 15000, 12328, 14138, + 15350, 14572, 14830, 14685, 14397, 13494, 15258, 14661, 11364, 15343, 11424, 14350, 15078, 14725, 10612, 15315, + 12576, 15192, 14077, 13441, 11546, 14829, 12268, 12293, 14655, 14450, 13528, 13954, 15103, 14345, 14934, 13875, + 12185, 14507, 12663, 12896, 15354, 14390, 14705, 13549, 12529, 13556, 12874, 13536, 13400, 14685, 14849, 14374, + 14298, 13022, 14550, 11880, 14537, 14675, 14003, 15281, 15122, 14851, 15039, 15298, 14211, 13042, 14503, 14517, + 13683, 14763, 11346, 15044, 15321, 15096, 15189, 14985, 15021, 15196, 14337, 14357, 14564, 14943, 14373, 13649, + 14416, 14748, 14960, 14525, 13729, 13920, 14933, 14221, 13133, 15164, 9397, 15266, 14966, 14091, 14537, 13835, + 12753, 14873, 12268, 12311, 13190, 14888, 13878, 12792, 14889, 11654, 13777, 14329, 14540, 14158, 12539, 14376, + 14553, 14089, 13427, 14262, 14407, 14149, 14517, 13011, 14917, 13779, 14755, 13513, 14555, 14474, 13286, 14699, + 15067, 14412, 14982, 11359, 15004, 14990, 15069, 14919, 14950, 13937, 14195, 12140, 14201, 14802, 13937, 15146, + 15181, 11793, 14301, 14790, 13138, 14787, 14514, 15099, 13834, 13912, 15017, 14093, 12065, 12842, 13406, 14385, + 15135, 15214, 14418, 11339, 12475, 13842, 12559, 13595, 14595, 14670, 13792, 15253, 11269, 14291, 15079, 13200, + 12335, 12378, 14434, 11908, 13681, 14793, 15127, 14356, 12978, 14811, 11775, 13919, 12760, 14758, 13115, 13031, + 14185, 14786, 14407, 12650, 13962, 14642, 15049, 13677, 15335, 14354, 14537, 14108, 14673, 14002, 14507, 13544, + 15122, 13339, 15089, 14106, 11694, 14445, 12387, 12979, 14687, 15034, 13409, 14313, 15168, 15124, 10487, 13561, + 14101, 12167, 14054, 13066, 14116, 14450, 14392, 13553, 12829, 15132, 14744, 13027, 14425, 14314, 14351, 14538, + 13429, 15317, 13803, 15163, 13876, 11961, 13437, 14771, 15096, 14590, 14350, 13138, 12992, 11998, 14850, 14797, + 15226, 15234, 11873, 14678, 12445, 13654, 15166, 10400, 13354, 11459, 11870, 14241, 12585, 12391, 11801, 13821, + 15332, 15356, 15000, 14560, 14702, 12324, 11848, 15176, 15082, 14412, 13784, 14528, 14376, 14842, 15115, 13823, + 14363, 11269, 13465, 13087, 13715, 12974, 14681, 15022, 12342, 14265, 14674, 12826, 14340, 14520, 13047, 15316, + 14807, 14481, 12598, 14704, 13351, 15082, 15067, 13866, 13518, 14341, 14837, 15240, 14617, 14434, 10382, 14981, + 12757, 15213, 15012, 14836, 15098, 14446, 14205, 11360, 14867, 11183, 14925, 14973, 15132, 14662, 15281, 14472, + 13549, 13569, 14746, 14360, 13577, 13462, 13398, 13902, 15106, 12754, 15259, 13547, 13611, 14983, 14772, 14599, + 12614, 14450, 10473, 13737, 14772, 13969, 11955, 10848, 14957, 14402, 11446, 14605, 14938, 12756, 15299, 14719, + 12453, 14875, 12038, 14527, 12941, 11553, 13448, 13231, 14153, 14431, 13622, 14480, 14111, 12829, 10315, 14477, + 14403, 13575, 14268, 9819, 12755, 10403, 13660, 13787, 13443, 13699, 13385, 8281, 14241, 13847, 13816, 14800, + 13570, 14818, 13494, 14393, 14849, 14506, 14615, 13788, 13944, 13122, 15143, 15217, 15164, 12921, 12855, 13307, + 15167, 14993, 12959, 14562, 13566, 13639, 13129, 15091, 14359, 13464, 15020, 13249, 13602, 15278, 15101, 14380, + 11149, 14829, 13966, 13610, 14757, 13690, 15147, 14562, 9557, 14820, 10297, 15225, 12498, 12835, 13894, 13180, + 14180, 14472, 15092, 15186, 14861, 15154, 15050, 14575, 14916, 11435, 11264, 12924, 14926, 11680, 12350, 14206, + 11983, 12585, 13831, 14440, 9680, 14165, 14189, 15262, 12423, 14919, 15232, 12348, 14704, 11942, 13874, 14876, + 14497, 13414, 15086, 14382, 14736, 13597, 15042, 14514, 13887, 12503, 14151, 10874, 12706, 14781, 11952, 15081, + 13753, 14921, 10081, 14791, 14602, 14387, 14731, 14813, 14692, 15260, 12198, 15102, 7559, 15310, 15000, 12493, + 13863, 14925, 15108, 15296, 15195, 13642, 10878, 14602, 12378, 13938, 12365, 11940, 12834, 14280, 10956, 13874, + 12301, 13047, 15057, 11779, 15144, 13292, 15326, 14378, 15042, 12083, 14036, 14905, 14105, 11852, 14379, 13007, + 15076, 11791, 14982, 14432, 13345, 12955, 12652, 12331, 13562, 13186, 14600, 15088, 14488, 11367, 9824, 14634, + 10852, 8895, 14501, 14407, 12460, 11978, 12420, 13791, 14591, 13477, 12615, 13527, 13335, 12086, 12608, 14836, + 15036, 12800, 12850, 10882, 14328, 14603, 14703, 14957, 15217, 14357, 13375, 15151, 12154, 13324, 14318, 13470, + 13695, 12912, 14830, 15347, 10255, 11560, 14822, 11273, 13541, 13047, 12132, 11038, 14862, 13650, 15021, 13383, + 14704, 15064, 12557, 14741, 15103, 15065, 13970, 13449, 15065, 14729, 13885, 14012, 11767, 12651, 13736, 11480, + 10809, 14768, 12047, 10174, 14746, 13892, 14563, 13479, 13101, 14285, 11775, 14068, 13810, 12422, 13599, 12848, + 10933, 13937, 14770, 14699, 6755, 14867, 12798, 14866, 13186, 14845, 14471, 14563, 13181, 15190, 14848, 15338, + 13435, 15224, 13233, 14862, 15286, 14406, 14487, 11236, 14493, 12916, 15208, 15183, 14474, 14285, 14420, 13579, + 13761, 13549, 14512, 14650, 12536, 14683, 13692, 15078, 14256, 15032, 15239, 12488, 14582, 14558, 13495, 15244, + 12941, 14475, 14822, 13426, 14937, 15092, 14967, 13737, 11368, 8816, 12385, 13206, 13019, 14724, 13788, 15160, + 14782, 13039, 14092, 9339, 14015, 14925, 14904, 14032, 15160, 15310, 9672, 14639, 15305, 13544, 12614, 14706, + 15357, 14737, 15114, 15205, 14450, 14460, 14702, 14517, 14558, 14919, 12885, 14779, 9461, 15319, 12568, 13550, + 13249, 15267, 14166, 12426, 14439, 14854, 15035, 13305, 12616, 14024, 14951, 14754, 13931, 13462, 14956, 13139, + 13726, 12398, 15305, 15256, 13080, 14628, 13610, 14813, 14644, 14243, 10331, 12727, 13859, 14606, 13554, 15157, + 14667, 13664, 14833, 14667, 13740, 12873, 15189, 14387, 14935, 13890, 13907, 15342, 12773, 13886, 15152, 14829, + 14052, 15030, 14446, 13166, 11313, 13975, 12073, 15246, 13404, 13810, 14752, 13630, 12629, 14754, 14912, 15334, + 15248, 13617, 14580, 13750, 15094, 11908, 13645, 12972, 15324, 13209, 14508, 14948, 14917, 10841, 13988, 14567, + 15046, 14293, 14564, 15061, 13722, 14086, 13305, 12296, 15074, 11627, 13281, 14549, 15352, 15279, 10810, 12869, + 13563, 13654, 14388, 14615, 13898, 8926, 12892, 15250, 13918, 11639, 14022, 14410, 14616, 15294, 12059, 14953, + 14264, 12695, 14934, 12955, 12851, 12398, 14708, 14724, 15006, 14703, 13545, 14384, 12363, 14777, 13486, 14228, + 10036, 15317, 14048, 14216, 12377, 14131, 14999, 13452, 14206, 14608, 14568, 14954, 15056, 14978, 14941, 13611, + 14465, 10974, 11154, 13454, 13849, 15128, 13232, 13263, 13805, 12720, 14753, 15302, 14813, 13988, 15097, 14569, + 14419, 14515, 13400, 14627, 15072, 14987, 14911, 12918, 15047, 14054, 14510, 12601, 13932, 13963, 14796, 14262, + 14543, 14954, 13863, 15314, 14891, 15044, 15000, 10362, 13999, 14362, 14886, 15260, 13733, 14715, 13677, 14179, + 13461, 15171, 13255, 12089, 13883, 14715, 13940, 12857, 15187, 14448, 11068, 12947, 14766, 14592, 14554, 14713, + 12549, 14724, 13530, 14049, 13640, 14975, 15169, 14740, 14781, 14982, 13547, 14360, 14404, 14653, 12007, 14521, + 15066, 13764, 14341, 14599, 12674, 13646, 15312, 14299, 13950, 11480, 14820, 14926, 11365, 14436, 13748, 14521, + 14343, 12673, 15240, 14511, 14531, 14921, 11856, 10672, 14513, 11422, 14267, 14624, 14096, 14264, 14773, 12577, + 14849, 14396, 13814, 14431, 14493, 14454, 15094, 14543, 13719, 14999, 14623, 14355, 14927, 13783, 11995, 13354, + 13517, 15226, 12717, 13305, 11503, 13058, 14477, 14143, 14676, 14797, 15093, 15280, 13727, 14812, 12558, 15080, + 13425, 15139, 9968, 14118, 15199, 14935, 15126, 12173, 12127, 14356, 11130, 14839, 14669, 12827, 13560, 14363, + 13291, 12197, 14606, 13961, 12407, 13838, 13609, 10933, 14797, 14693, 14894, 12819, 15181, 14266, 12483, 14778, + 13803, 15112, 13295, 13649, 15304, 14515, 14509, 13828, 13939, 12574, 14691, 11065, 15267, 13477, 15307, 12992, + 14477, 15151, 15050, 14785, 15320, 13844, 14343, 12466, 13747, 11589, 15058, 14497, 12988, 15079, 11984, 13057, + 9645, 14557, 12935, 12120, 12978, 13611, 15133, 15224, 14521, 12592, 11947, 11948, 13705, 13194, 15241, 14754, + 14016, 15197, 14378, 13162, 15305, 14933, 14654, 14238, 12331, 12860, 14858, 14483, 14264, 14769, 13967, 15119, + 13986, 14493, 14432, 13412, 14617, 11662, 15196, 13119, 14498, 13529, 15286, 15321, 11370, 14532, 15227, 13392, + 14097, 14637, 15208, 14817, 12945, 14095, 14846, 12191, 10899, 15228, 14225, 13875, 15100, 10730, 10770, 13969, + 12594, 13401, 14523, 12975, 15331, 12924, 13689, 12641, 15094, 13801, 13510, 14202, 14975, 14560, 15164, 13450, + 13354, 14846, 14516, 11089, 7334, 14847, 15098, 15210, 14816, 13192, 11889, 13900, 14064, 13592, 13239, 15359, + 12686, 13006, 14305, 15181, 15206, 14598, 14896, 13860, 11996, 14719, 13409, 13244, 13598, 9775, 13135, 15343, + 15321, 9551, 15133, 13243, 14185, 14265, 15254, 14978, 12747, 14091, 11499, 13492, 14328, 10964, 14113, 14227, + 15138, 11925, 12754, 3873, 15047, 13736, 12350, 11932, 11774, 13963, 13048, 14789, 14451, 12479, 6610, 15154, + 14789, 13080, 12579, 12129, 14038, 14687, 14069, 14857, 15255, 14685, 13454, 14171, 14347, 14007, 14237, 14108, + 14757, 14436, 13444, 13824, 15123, 13518, 15019, 13228, 11665, 15206, 15109, 13958, 14803, 13359, 14668, 14707, + 15034, 14941, 14339, 10120, 10777, 14487, 14812, 15274, 15104, 12058, 13964, 14993, 14475, 15261, 13037, 14607, + 15140, 14810, 14837, 13388, 14050, 14805, 12317, 14722, 15357, 14709, 15157, 13555, 15254, 12942, 14840, 14920, + 14759, 11817, 13541, 14296, 15199, 14513, 13584, 14362, 12359, 14684, 11194, 14668, 12307, 13418, 11673, 15172, + 15145, 13068, 15264, 13351, 15274, 14581, 14805, 11564, 14730, 14527, 13492, 14449, 13701, 13942, 13481, 11407, + 12956, 11925, 15310, 14960, 12303, 14335, 15054, 14477, 11355, 14784, 14305, 11761, 12263, 10673, 14744, 15214, + 13724, 13674, 14046, 15184, 13808, 12558, 14618, 14923, 14989, 13734, 14786, 13873, 14346, 12346, 15316, 15288, + 14388, 14784, 13808, 14683, 10925, 11547, 15095, 13958, 12414, 14936, 15114, 14561, 15177, 14050, 15211, 13883, + 11972, 13692, 12668, 15067, 14932, 13989, 13546, 15029, 15103, 13987, 13977, 14628, 15011, 13745, 14388, 11339, + 14239, 14823, 12055, 14385, 13505, 15069, 12890, 10847, 11435, 13030, 14225, 14338, 15158, 14754, 14950, 14383, + 14993, 13348, 14562, 11122, 14869, 14116, 14956, 10588, 15139, 14888, 15055, 13713, 14514, 12954, 13654, 14003, + 15148, 15120, 14750, 12962, 13575, 14642, 12932, 13262, 7181, 15037, 11619, 15023, 15027, 13120, 14309, 14964, + 13036, 14128, 13901, 14874, 11956, 14804, 14831, 13670, 15066, 13214, 14680, 14584, 12729, 15291, 13680, 13619, + 14114, 13117, 14525, 13116, 13796, 12870, 14526, 12798, 15348, 13716, 14632, 13739, 12766, 12730, 13864, 13454, + 14219, 15095, 14938, 12190, 14505, 12490, 15038, 12884, 14871, 14775, 14986, 14809, 14545, 13664, 14730, 15043, + 15191, 14277, 14629, 14008, 12450, 13827, 15191, 13931, 12636, 15311, 14901, 15084, 14400, 13334, 12484, 12524, + 12832, 14005, 14077, 12812, 13717, 12661, 14230, 14921, 14051, 14707, 15135, 14413, 12917, 13698, 11471, 10725, + 9615, 15022, 14500, 13365, 10421, 12536, 15188, 14197, 14375, 14979, 14090, 11338, 14550, 14559, 14190, 14574, + 13334, 14657, 14981, 14456, 14528, 13325, 13570, 14126, 15008, 14939, 12215, 15002, 14911, 14418, 14687, 14352, + 15172, 14618, 14715, 14737, 15282, 11515, 14128, 14516, 10892, 14651, 14423, 14152, 12103, 14863, 14561, 14580, + 11807, 10206, 14894, 15341, 15113, 14223, 15313, 13059, 14871, 14364, 14857, 13826, 13969, 14695, 14633, 12700, + 14038, 14457, 12936, 15023, 12474, 12787, 14232, 12032, 14352, 12403, 14806, 14551, 8254, 14803, 15345, 15280, + 14507, 13762, 14437, 14300, 15086, 11288, 15235, 13770, 12362, 15320, 13930, 12560, 13735, 14187, 14893, 14958, + 15265, 13859, 14610, 13824, 12803, 13576, 14577, 13724, 11331, 13781, 14589, 14848, 14746, 10967, 14184, 13019, + 12384, 13919, 14315, 12791, 15187, 14963, 14643, 15157, 14968, 15017, 15013, 14565, 13542, 15084, 12332, 14269, + 12474, 14828, 14313, 11836, 12849, 13760, 12379, 14538, 13680, 14697, 14845, 14326, 12735, 13501, 12418, 15339, + 12468, 13051, 15024, 14892, 14594, 12898, 13940, 15119, 15303, 15293, 14412, 13927, 15358, 13362, 14022, 13883, + 14468, 15259, 10314, 14310, 15146, 12298, 15256, 14378, 13156, 15276, 15095, 14001, 12988, 15194, 15009, 13693, + 14962, 14035, 13504, 14565, 12695, 14892, 12042, 13467, 15086, 14038, 11729, 13315, 14456, 12959, 12862, 15089, + 14346, 12992, 14969, 13334, 13768, 14851, 13396, 12098, 14503, 12604, 15336, 14453, 13555, 10722, 13244, 13445, + 13920, 13408, 15168, 13464, 14124, 14604, 13870, 11290, 15340, 14406, 13358, 15271, 14384, 15092, 14459, 14571, + 14709, 14353, 12653, 14996, 14869, 12916, 14074, 13844, 14472, 14444, 14147, 12691, 15213, 9991, 13987, 14988, + 13418, 14775, 14085, 12471, 14547, 14435, 14419, 14479, 14598, 14469, 14816, 14536, 13478, 14464, 14050, 15085, + 14479, 13123, 14959, 14630, 15040, 13672, 13936, 13157, 14915, 14868, 8012, 14958, 14921, 15230, 14213, 13767, + 15009, 14914, 12673, 15290, 14073, 13147, 14785, 14703, 15058, 14363, 14600, 13354, 13537, 14100, 14118, 14160, + 14982, 15317, 14821, 13578, 14833, 14695, 10473, 15351, 14057, 14503, 13681, 14499, 14926, 14095, 14415, 14327, + 13850, 13011, 14528, 13618, 13647, 14029, 15139, 15048, 13493, 15079, 12291, 11978, 12003, 14520, 15266, 13406, + 10919, 12541, 14721, 6213, 13008, 13471, 15245, 14307, 12178, 13278, 15199, 13721, 14651, 11570, 10743, 14344, + 9351, 10886, 15040, 8793, 15014, 14733, 12420, 13812, 15314, 10682, 13903, 15294, 13948, 15268, 14805, 12245, + 11423, 15186, 14234, 11252, 13994, 13337, 15320, 14119, 13125, 14434, 14755, 12922, 13051, 12661, 14851, 14886, + 14444, 14780, 14636, 14928, 14993, 14768, 15256, 14787, 14566, 11765, 13914, 15150, 14661, 14130, 14170, 12957, + 12306, 14572, 13578, 14560, 15185, 14704, 15304, 14897, 14641, 15101, 15017, 14766, 15304, 11597, 14300, 12834, + 15025, 14367, 14320, 15194, 14342, 14914, 12288, 12544, 14335, 14707, 13957, 13680, 14040, 14595, 14003, 15131, + 14777, 14791, 15079, 13713, 14837, 14701, 15304, 14702, 12108, 13631, 14949, 14856, 14770, 14970, 13955, 14024, + 12838, 12643, 13572, 14734, 13467, 14907, 15024, 11855, 14593, 15059, 9452, 15033, 12884, 15115, 13212, 14524, + 8330, 14731, 14806, 13861, 14660, 13219, 14480, 15086, 13647, 15324, 10735, 12495, 14559, 14964, 11286, 14908, + 10278, 15282, 14825, 14374, 13156, 14627, 14667, 14242, 12257, 13120, 11541, 14376, 10429, 12219, 13593, 14905, + 14880, 15176, 11704, 15114, 14461, 14812, 14059, 14091, 14481, 13238, 11473, 11426, 15012, 14112, 13947, 14790, + 13613, 13668, 12480, 14141, 13363, 15227, 12453, 15264, 12544, 15067, 14511, 14920, 15039, 14657, 14855, 15110, + 13843, 12336, 13502, 14413, 14783, 12502, 14967, 14345, 14804, 15272, 14285, 15285, 14375, 14453, 15343, 14431, + 13657, 14740, 14844, 14972, 12547, 14985, 14362, 13985, 12274, 14815, 12668, 11610, 14759, 12743, 14528, 12898, + 14993, 14861, 14661, 12400, 14833, 13927, 15202, 14251, 14858, 12946, 13324, 15233, 13136, 11571, 12885, 15172, + 9778, 14909, 15357, 14910, 14510, 14446, 14588, 14553, 13194, 13477, 14801, 14488, 14507, 14881, 6980, 15179, + 15317, 14907, 13446, 14528, 14198, 13955, 14794, 12750, 14453, 15345, 14373, 14580, 14225, 13514, 14549, 15043, + 15292, 13611, 14208, 15058, 14236, 13492, 14752, 14762, 12984, 11133, 14288, 14261, 14909, 14264, 13359, 12165, + 14727, 14579, 12582, 13073, 14274, 14592, 15255, 10553, 15096, 15039, 15197, 13512, 12693, 14844, 10294, 14836, + 14422, 14668, 14058, 12529, 12108, 14673, 14248, 14641, 13283, 14494, 13047, 14026, 15132, 15173, 13523, 13486, + 14669, 9654, 14658, 13230, 14698, 14627, 13102, 12101, 15346, 14893, 13753, 14560, 13343, 12247, 14695, 12735, + 14622, 13441, 14692, 11659, 14188, 14914, 12839, 12889, 11337, 14973, 13190, 15100, 14512, 14047, 12658, 14248, + 14552, 14434, 15184, 13185, 14058, 12366, 14823, 15135, 15063, 15074, 14102, 14646, 15142, 14577, 13980, 10700, + 14058, 14421, 14322, 14257, 15340, 13598, 15053, 15353, 14877, 13795, 13438, 14101, 15112, 13784, 13512, 14845, + 14597, 14686, 13792, 13354, 14345, 15149, 14013, 14798, 14836, 13547, 14391, 11226, 14620, 14128, 8600, 13745, + 10650, 14735, 14656, 12519, 13206, 14149, 14049, 12914, 14021, 14062, 12689, 14512, 10419, 13706, 13723, 14920, + 14766, 15034, 14887, 12259, 12655, 15095, 14544, 15192, 13533, 14381, 13394, 14325, 15048, 13821, 13666, 14317, + 14777, 14724, 14020, 11402, 14342, 14717, 12257, 12215, 14255, 12738, 14213, 14556, 14527, 14075, 15163, 11574, + 15261, 13724, 12727, 14674, 15195, 13147, 15036, 15071, 14971, 14566, 15164, 13735, 14733, 14695, 13403, 13867, + 12648, 14275, 13749, 14945, 15214, 14233, 8642, 13952, 14929, 14409, 14274, 13879, 9378, 14429, 15062, 15340, + 12419, 11760, 13704, 13560, 13889, 14461, 14580, 14429, 15254, 14635, 12906, 13736, 13277, 12039, 14374, 13760, + 14528, 12951, 13428, 12562, 13234, 14335, 14952, 14908, 14962, 12488, 14453, 13639, 12989, 14435, 13996, 14183, + 12502, 15288, 14232, 15041, 15246, 13288, 12936, 15321, 14738, 14925, 14310, 11959, 13860, 14198, 14055, 14318, + 14787, 13512, 13965, 8042, 15030, 9184, 14467, 15078, 11620, 10594, 14474, 14681, 12986, 14248, 14275, 14052, + 14986, 12078, 13190, 15182, 14424, 14081, 13355, 14583, 14672, 14336, 15065, 14958, 13588, 14626, 14626, 14724, + 14094, 15209, 13943, 9600, 15263, 15227, 14223, 14747, 13210, 14912, 14563, 10713, 14061, 11812, 13908, 12306, + 14418, 15311, 13827, 12251, 13555, 13818, 13320, 14512, 14610, 14641, 13744, 14562, 14738, 12917, 12750, 10776, + 13335, 14306, 13834, 14762, 13481, 14799, 14599, 10128, 15349, 13570, 15279, 13927, 13434, 14618, 11660, 14290, + 15116, 11570, 12278, 13500, 15314, 13394, 15267, 14464, 14531, 14954, 14923, 13644, 14268, 14618, 11847, 12437, + 15177, 15176, 14966, 14698, 10899, 15257, 12724, 13493, 14985, 14931, 13532, 13565, 14786, 14010, 14408, 13635, + 14507, 14435, 13148, 11691, 12045, 15249, 13324, 14388, 15201, 14404, 11615, 14654, 10615, 11218, 12317, 14885, + 14677, 15129, 14463, 12614, 14815, 15046, 14393, 15163, 14271, 13957, 14957, 14697, 14209, 14757, 11000, 15283, + 12334, 14884, 14570, 13276, 15243, 14108, 9724, 12914, 14727, 10978, 15159, 15097, 14190, 15312, 11475, 13625, + 15173, 14999, 14433, 14192, 15153, 14488, 14109, 14879, 15339, 11292, 14980, 15138, 15273, 14843, 14968, 14920, + 13560, 14850, 15275, 14804, 12301, 15170, 11419, 14042, 11293, 13892, 14103, 15306, 13769, 12441, 15190, 15186, + 13384, 12808, 11371, 15342, 13814, 14588, 14972, 14868, 14753, 12697, 15187, 13871, 13549, 14736, 11961, 14156, + 14581, 14336, 14643, 14517, 13993, 14439, 14956, 14568, 15327, 11686, 11731, 12019, 8432, 14345, 9738, 13893, + 13819, 14253, 11351, 11585, 11172, 14405, 14727, 14847, 14921, 12027, 12296, 15173, 12582, 13787, 12812, 13969, + 13831, 14353, 13354, 15075, 14875, 14975, 15017, 13472, 14404, 14249, 10728, 14048, 14371, 14350, 14149, 9056, + 15264, 14431, 15066, 13852, 14604, 12457, 12733, 14755, 13486, 13945, 13586, 14673, 13291, 14990, 15345, 15133, + 11274, 5298, 14538, 7809, 14909, 13909, 14245, 13367, 15161, 15267, 14853, 14668, 15166, 12708, 6841, 14367, + 13406, 15227, 14611, 14463, 15285, 13009, 12641, 15203, 14952, 14170, 14893, 15332, 14808, 14928, 8828, 13370, + 14353, 9471, 14879, 13680, 12029, 13759, 11798, 15281, 13665, 13695, 15053, 15144, 11506, 15360, 13149, 13788, + 12565, 14364, 15162, 13179, 13781, 12637, 12878, 14301, 15152, 14471, 13554, 13816, 15245, 11918, 12444, 14729, + 13444, 14412, 14748, 15278, 14395, 14470, 15149, 14514, 14821, 13434, 13693, 14435, 13402, 14310, 15016, 10116, + 12859, 14433, 15208, 13362, 14734, 13919, 15235, 11982, 13999, 14235, 13441, 14338, 14298, 11710, 13711, 10520, + 15359, 13622, 15107, 10751, 15083, 12412, 15300, 14151, 14199, 14345, 13464, 13433, 11815, 13502, 15317, 15064, + 12604, 3463, 14300, 14415, 13232, 12790, 14926, 14338, 14702, 14100, 14679, 14403, 14776, 15315, 14699, 13691, + 13933, 13894, 12123, 15106, 12939, 12459, 13511, 13956, 12596, 14864, 13427, 10252, 14459, 14673, 13595, 13307, + 12870, 11893, 13185, 14965, 14629, 13386, 14473, 5911, 14997, 13340, 13948, 14240, 12318, 14964, 8944, 10144, + 15037, 13518, 14970, 15181, 15023, 14379, 14413, 15288, 15102, 13385, 13776, 14374, 14145, 15133, 13331, 13700, + 14668, 14365, 13477, 12767, 15165, 15359, 13335, 12995, 13801, 14599, 12980, 12834, 14886, 15341, 14866, 14769, + 14990, 15270, 10061, 14613, 14247, 14439, 11215, 12383, 15069, 12985, 15161, 15317, 13110, 15066, 15014, 14623, + 13951, 12326, 13540, 15000, 14808, 14754, 15108, 15081, 15105, 12056, 13153, 10839, 14351, 15336, 13849, 12937, + 13491, 14573, 15292, 14995, 14783, 15083, 15199, 13129, 13649, 15065, 15191, 13407, 14522, 13768, 15185, 14915, + 14114, 15216, 10627, 14860, 14427, 15048, 14640, 14423, 13988, 12462, 11858, 14583, 13965, 14271, 14372, 11429, + 13747, 12993, 14680, 13477, 13327, 15005, 13688, 14123, 14923, 14148, 13400, 10973, 15350, 11896, 11537, 14870, + 13092, 15127, 13849, 14717, 14999, 14780, 11476, 14881, 11205, 14072, 14463, 13066, 8487, 14861, 15294, 15066, + 12609, 14076, 14513, 9345, 9259, 15016, 9944, 14546, 10877, 11405, 14338, 14860, 14503, 14342, 12323, 14900, + 11873, 9967, 14211, 13594, 12061, 13727, 14286, 13564, 13649, 14378, 13026, 13793, 13369, 14360, 14666, 14407, + 15331, 14338, 15203, 14709, 13857, 15018, 14949, 14739, 14855, 14548, 11317, 14884, 13394, 14256, 14814, 12852, + 13930, 12997, 12682, 15303, 15313, 12775, 15204, 13995, 12211, 14890, 14110, 12662, 15172, 14836, 14740, 15269, + 13967, 14349, 15225, 14640, 14615, 13811, 14248, 14586, 14199, 15312, 12735, 14640, 13790, 11272, 15344, 14621, + 14608, 14802, 15003, 10266, 14296, 9500, 13383, 13953, 14877, 13651, 13840, 15110, 15032, 14695, 15236, 14370, + 15128, 14654, 14963, 14052, 13931, 14486, 15173, 13545, 13254, 13505, 12941, 12752, 14366, 15287, 12375, 13317, + 14035, 13733, 11644, 13874, 15214, 14604, 14374, 15271, 14303, 14186, 13261, 14448, 14722, 14697, 15200, 14822, + 15177, 14634, 11510, 15063, 13414, 14598, 13546, 14370, 12670, 11670, 12596, 12886, 14645, 15103, 14666, 14158, + 14441, 14176, 12283, 15338, 13723, 14997, 11677, 15221, 14799, 13601, 14806, 13039, 13856, 14066, 11499, 14808, + 15162, 12171, 14364, 14610, 13827, 14609, 15131, 13963, 12645, 12388, 14282, 13610, 15287, 12249, 13493, 14893, + 12784, 13312, 13323, 15118, 14592, 12914, 14677, 14463, 14080, 9575, 15189, 15050, 13630, 15194, 15152, 14190, + 15010, 13790, 11101, 15050, 9382, 13766, 14614, 11843, 10357, 15121, 14363, 15024, 11649, 14837, 13573, 13417, + 14678, 14697, 13620, 13494, 14008, 12382, 14931, 14526, 15024, 10531, 12578, 14999, 14688, 14988, 13990, 14695, + 15023, 10403, 13875, 13942, 15091, 14980, 14388, 11504, 14582, 13769, 14716, 11785, 12997, 14014, 15188, 14361, + 14273, 14409, 7729, 14733, 12450, 12628, 12432, 14651, 14676, 13389, 12659, 14798, 13592, 14970, 15056, 13942, + 11800, 11301, 13461, 12508, 14831, 14112, 14934, 15106, 14996, 14810, 13201, 13448, 14198, 14902, 10136, 15184, + 13244, 14972, 12665, 13937, 13947, 12821, 13638, 14208, 11331, 12622, 14734, 14825, 10915, 14744, 13737, 14355, + 15173, 11336, 14402, 13431, 12443, 14728, 11977, 15316, 14614, 12748, 13321, 15262, 13551, 15133, 14462, 15002, + 14747, 13611, 13042, 14538, 13974, 14781, 14363, 15003, 14375, 15181, 9321, 12026, 14085, 15286, 13447, 13874, + 15197, 14873, 11636, 15152, 14220, 15088, 14562, 8319, 13527, 13143, 12333, 15025, 14200, 7386, 15127, 14002, + 15130, 15040, 15110, 14152, 14592, 12306, 11117, 10956, 11308, 13804, 14272, 15225, 13817, 13673, 12988, 15145, + 15230, 13812, 14046, 12758, 13959, 14512, 14775, 15063, 14497, 14679, 11830, 13636, 11293, 13255, 10004, 14486, + 13220, 14711, 14924, 15043, 13579, 15296, 14716, 12300, 14404, 14494, 12125, 13755, 14675, 14738, 13735, 12059, + 14365, 13542, 15057, 11654, 15042, 12108, 11294, 14065, 14713, 13973, 13038, 13832, 14226, 15061, 15314, 12933, + 14392, 14482, 12975, 14081, 13749, 15178, 14077, 11318, 15253, 14473, 10576, 13982, 13447, 13369, 14980, 12925, + 15007, 14229, 14870, 11437, 15036, 10133, 15355, 10676, 13193, 13460, 11757, 14618, 15245, 14130, 12940, 12682, + 14568, 12988, 14988, 14834, 15040, 6859, 14969, 13710, 13276, 15295, 14045, 14643, 15119, 14030, 14419, 14796, + 13871, 13523, 15087, 14435, 14791, 13931, 14435, 11915, 11540, 14382, 12476, 15243, 15032, 15146, 13698, 12939, + 14635, 15311, 13406, 15073, 13591, 14331, 14409, 12316, 14905, 13886, 13896, 9780, 14564, 15196, 13183, 14094, + 15351, 14608, 13488, 15242, 11923, 15098, 14717, 12353, 14921, 8391, 15137, 15016, 13869, 14768, 9070, 9468, + 14704, 13499, 14340, 14399, 15153, 14457, 12856, 15002, 14575, 15300, 13278, 14089, 14208, 11821, 9884, 14798, + 12265, 13619, 14544, 14349, 15037, 15300, 15181, 15144, 14794, 12552, 14188, 14683, 13929, 14884, 14893, 13930, + 15181, 15120, 12783, 12751, 15117, 15030, 12678, 13724, 14812, 13078, 15161, 13756, 14738, 13928, 12667, 12744, + 14341, 14734, 15027, 12444, 13480, 11411, 14606, 8850, 13804, 14920, 15225, 14578, 14709, 12427, 9550, 14292, + 14815, 13430, 7984, 15116, 14345, 14420, 10854, 15130, 14761, 7493, 14692, 11789, 15282, 14598, 13571, 13384, + 13347, 10487, 14251, 13003, 13086, 15247, 13405, 15270, 15011, 14692, 15319, 14717, 12816, 13346, 14571, 15191, + 15328, 15005, 14003, 15235, 15156, 12527, 14719, 15221, 13280, 14704, 13151, 14133, 14185, 12821, 15261, 12364, + 13471, 15163, 14766, 13435, 15180, 11697, 14882, 15146, 15189, 14255, 14553, 10959, 11501, 13420, 14612, 12845, + 14645, 12955, 14762, 15018, 15275, 15123, 14427, 14345, 11940, 13840, 14644, 14429, 14748, 14883, 11869, 15129, + 12859, 13106, 13287, 14515, 13052, 15351, 14896, 14387, 7659, 14432, 14390, 14761, 14148, 13759, 15124, 15098, + 14583, 14450, 14415, 11894, 12305, 13454, 15309, 14256, 14742, 14394, 14444, 13531, 12184, 14542, 12247, 13452, + 13691, 14885, 11229, 13735, 15209, 14649, 13608, 14492, 14464, 13767, 14584, 13750, 14206, 12337, 10246, 12768, + 13544, 15151, 15056, 15203, 12786, 14411, 14230, 14637, 14983, 13307, 15151, 15301, 15236, 11333, 14971, 15147, + 15341, 12940, 15056, 14806, 14776, 13062, 14751, 15103, 11685, 14269, 15071, 15200, 13980, 12782, 15099, 13870, + 15108, 11671, 14815, 10845, 14341, 14987, 14131, 14740, 15118, 14677, 15050, 10537, 13065, 13681, 15191, 13329, + 15288, 14810, 15150, 14376, 13496, 15158, 14679, 9719, 14461, 13751, 12515, 14614, 14784, 14524, 14561, 13210, + 11423, 13598, 12837, 14953, 12813, 14541, 12952, 15272, 12314, 12508, 14576, 10167, 14289, 12777, 15032, 14690, + 11190, 14924, 14753, 14481, 15280, 15158, 14538, 14988, 14413, 13634, 15095, 14711, 13064, 14701, 15091, 14629, + 12820, 12348, 12769, 9412, 13498, 13272, 14303, 13326, 12813, 14723, 14621, 15013, 14669, 14032, 14840, 15187, + 14509, 9608, 11418, 15359, 12654, 14170, 13775, 15262, 13300, 14955, 14094, 15068, 15192, 11902, 15304, 14672, + 14678, 15239, 15291, 11994, 13998, 14588, 14403, 13584, 9800, 14566, 15081, 15205, 13739, 11780, 14499, 15107, + 13557, 8078, 13356, 14825, 14055, 11887, 12696, 10346, 14774, 14767, 14670, 14724, 15208, 14489, 13620, 13561, + 12825, 14804, 13142, 15239, 14432, 14430, 15100, 12811, 14641, 14262, 14482, 14535, 15119, 12861, 14437, 15337, + 15113, 15094, 15271, 14767, 13761, 15008, 13398, 14406, 14873, 15089, 14814, 14390, 13930, 14530, 11085, 13302, + 14334, 14263, 15120, 14371, 15220, 13865, 13436, 14158, 11361, 15166, 14638, 15025, 15318, 15108, 14395, 15159, + 11392, 15063, 15195, 12784, 13446, 12542, 14382, 14585, 15347, 14075, 15037, 15354, 14354, 8965, 10321, 11294, + 14355, 7897, 15016, 14919, 13248, 13559, 14902, 12035, 13180, 13637, 11185, 15265, 13974, 11685, 10559, 14762, + 13023, 15022, 14438, 13522, 14823, 13712, 14517, 13085, 14153, 14639, 14881, 13127, 14644, 13699, 14995, 13195, + 12010, 14306, 14834, 14341, 15287, 14707, 14704, 13345, 13015, 9909, 15337, 14020, 15156, 11968, 14436, 14519, + 10085, 11721, 13093, 14349, 15137, 13732, 14606, 13149, 15103, 13724, 15329, 14980, 12200, 13435, 13544, 15236, + 15129, 14940, 14020, 15266, 14598, 14553, 15119, 14959, 15193, 14209, 13286, 14481, 15104, 14666, 15247, 13656, + 14029, 13671, 14472, 14088, 12661, 15033, 14479, 15158, 14926, 14067, 14431, 14510, 14213, 10423, 12290, 14124, + 13771, 12993, 14274, 14882, 13904, 14825, 12590, 14363, 15173, 15024, 14689, 15201, 12799, 12581, 11910, 12041, + 12413, 12043, 15002, 15185, 15277, 13604, 12226, 13595, 15057, 14034, 15045, 14697, 11716, 15244, 14280, 15078, + 14906, 13513, 15240, 13580, 9321, 14993, 13161, 11324, 14676, 13584, 14727, 14796, 12385, 15108, 9792, 13514, + 15196, 14627, 14192, 12219, 15223, 13367, 15244, 13489, 14191, 14201, 14505, 15031, 14884, 14722, 13538, 8450, + 14227, 13098, 14707, 13463, 13248, 14008, 14613, 14001, 14589, 13967, 13640, 13344, 13927, 14364, 14436, 12350, + 14587, 14752, 15168, 14002, 14780, 14740, 13114, 14654, 13595, 13782, 11757, 14742, 10344, 13009, 14981, 14552, + 13893, 14579, 14388, 14455, 15317, 11047, 8780, 12909, 14503, 14526, 14122, 14813, 14672, 14346, 14385, 11685, + 15238, 14748, 12760, 14868, 12645, 13702, 13361, 14171, 15355, 14163, 13625, 14869, 13061, 14757, 12315, 12576, + 13520, 14671, 7508, 14481, 14720, 13861, 14001, 14394, 15070, 7713, 14049, 13871, 14661, 15206, 13872, 13570, + 15096, 14326, 14542, 14938, 13706, 14763, 13985, 15327, 15094, 13508, 14182, 13645, 14161, 4383, 14644, 12542, + 14513, 14543, 13444, 13721, 14462, 15073, 14350, 15124, 14545, 11041, 13935, 14845, 11770, 12553, 14604, 13580, + 14343, 14625, 14557, 14833, 13000, 14155, 14389, 12561, 14349, 15101, 14737, 10255, 14944, 14892, 14670, 2232, + 14889, 14835, 14009, 15273, 13103, 14382, 15147, 14519, 14936, 14032, 14003, 15210, 14550, 14523, 14839, 14893, + 14766, 13478, 12661, 15335, 8021, 14297, 10344, 10285, 14271, 14062, 13358, 14273, 13252, 14759, 11409, 14572, + 15235, 14200, 13630, 14782, 13523, 15120, 14672, 15050, 9742, 14868, 14365, 13785, 13874, 13291, 12751, 13469, + 14434, 12939, 14338, 14145, 12845, 14902, 13516, 15277, 13071, 14471, 13834, 13963, 13736, 14655, 14811, 12686, + 14710, 15033, 8349, 13943, 14176, 14575, 15249, 14713, 14097, 14548, 13546, 14274, 12352, 11948, 14930, 15001, + 12253, 10201, 15350, 10813, 13725, 12857, 15214, 15143, 14347, 14819, 14198, 10498, 12048, 11027, 13844, 14649, + 14636, 13847, 13239, 12471, 15155, 8549, 13449, 15019, 12616, 14065, 14869, 14512, 12852, 14485, 14322, 14013, + 14518, 14965, 13465, 12930, 14763, 14349, 14754, 14549, 14808, 13041, 12226, 14933, 13436, 13774, 14400, 14924, + 13695, 11877, 14601, 15146, 14120, 13561, 13714, 14984, 14278, 13772, 15169, 11031, 15241, 13154, 14205, 15006, + 14824, 14123, 14830, 14090, 14550, 12732, 12341, 14401, 15045, 15163, 14753, 14983, 15344, 14520, 14509, 14447, + 13592, 14904, 13538, 14480, 14724, 14965, 15360, 13579, 10575, 14523, 12729, 14512, 15016, 11102, 14400, 14489, + 14347, 15126, 15155, 14346, 13018, 13258, 13450, 15063, 14280, 15183, 13856, 14490, 13522, 14596, 13422, 14867, + 15251, 13171, 11683, 14805, 14969, 13319, 14948, 14742, 14958, 14396, 14982, 15171, 13541, 13659, 12500, 15231, + 12638, 14163, 15340, 15208, 13191, 15255, 14144, 9530, 11872, 14454, 14589, 15155, 13483, 14490, 14996, 14230, + 15274, 14457, 13963, 12362, 15107, 12713, 14007, 10500, 14413, 14043, 9338, 14526, 13463, 14565, 14756, 11904, + 13696, 13693, 12241, 13223, 14407, 13386, 12724, 15155, 14920, 13962, 14717, 11634, 12880, 15160, 15169, 14293, + 8156, 14105, 14541, 13926, 15043, 14492, 14534, 14132, 14157, 13910, 14401, 15176, 14680, 14561, 12791, 14759, + 14602, 15107, 14764, 14560, 14810, 15270, 15343, 15243, 12927, 13736, 14123, 14245, 11980, 11029, 12833, 14994, + 10386, 14840, 11634, 14103, 11899, 13417, 13614, 13277, 15184, 13904, 13702, 14158, 11270, 14291, 11789, 13538, + 12101, 15081, 14930, 14529, 11035, 15048, 15278, 15306, 12646, 14936, 14827, 12972, 15251, 15308, 14616, 15251, + 14713, 12537, 13337, 11895, 15220, 14931, 15147, 14719, 14453, 14146, 14821, 15267, 12124, 13721, 14293, 13917, + 10766, 13873, 14937, 14519, 14467, 14827, 13313, 14249, 13492, 15042, 14694, 15180, 14526, 15351, 14368, 15207, + 14145, 13564, 14676, 13211, 12484, 15066, 14791, 15270, 13457, 13080, 14377, 15243, 14937, 13190, 14399, 13989, + 13401, 15339, 15119, 13560, 5320, 13866, 15077, 14716, 12468, 13400, 14784, 15170, 7929, 15272, 14279, 15082, + 13636, 13111, 14351, 14844, 10603, 13524, 15184, 13382, 14961, 15276, 14878, 12396, 13999, 13919, 15244, 13794, + 14452, 15234, 14629, 13727, 14734, 14269, 15035, 13347, 14374, 6474, 14268, 13970, 12928, 13934, 15145, 13028, + 15166, 14517, 12997, 13460, 13886, 14586, 14570, 14850, 14298, 13117, 14339, 14037, 15032, 15086, 14547, 15328, + 13619, 15212, 15018, 12562, 14034, 15204, 14849, 11125, 14345, 14819, 14442, 13955, 14813, 14440, 14388, 14915, + 13931, 11836, 14358, 13945, 11286, 14821, 13685, 15173, 14907, 12319, 15003, 14916, 13710, 13001, 14452, 15357, + 10458, 15175, 13364, 13066, 13544, 13965, 15172, 13894, 13822, 13999, 13317, 15246, 14439, 13769, 13162, 13929, + 14197, 14221, 14267, 15104, 14051, 14385, 12467, 13062, 14211, 13401, 14403, 13916, 14897, 14340, 14533, 13414, + 11692, 11573, 14369, 13053, 12228, 14285, 15088, 14747, 14259, 10285, 14469, 14601, 13911, 14725, 14254, 14062, + 14557, 15222, 12314, 10570, 12399, 14066, 11366, 13560, 13664, 13481, 14949, 15180, 14063, 12109, 14947, 14733, + 13114, 14747, 12657, 14870, 13751, 14524, 12440, 15226, 13397, 14985, 13611, 13734, 11876, 13606, 14705, 13763, + 14046, 13856, 12965, 15320, 13556, 12985, 14126, 14429, 11914, 14094, 14810, 14479, 15152, 11322, 15193, 15314, + 13969, 14807, 10339, 14658, 14467, 14852, 14893, 15045, 14753, 12692, 14880, 13819, 14487, 11560, 14831, 13334, + 13921, 14386, 14451, 13081, 11163, 14774, 15292, 14345, 9352, 11786, 15002, 14526, 15236, 14807, 14055, 14525, + 14612, 14843, 10387, 13825, 13245, 15198, 13844, 14945, 9262, 15064, 11901, 14543, 13928, 14456, 14732, 14512, + 11881, 14126, 12565, 15311, 13871, 14886, 14648, 10251, 12139, 14471, 15136, 15181, 14061, 15144, 10329, 15286, + 9320, 12559, 13120, 7376, 15002, 14406, 12635, 9936, 15023, 14286, 13169, 12831, 10280, 8443, 12690, 14727, + 13580, 11936, 14578, 13241, 12235, 15066, 14056, 14869, 11079, 13376, 14772, 13761, 14332, 14193, 14730, 14555, + 12284, 14192, 12558, 12286, 14365, 14669, 13385, 13065, 15203, 14062, 10917, 14832, 14169, 15036, 14762, 14590, + 12606, 15305, 12166, 13768, 13982, 14372, 13964, 13587, 13364, 14632, 14652, 15033, 13618, 14445, 14667, 15097, + 15262, 13214, 14377, 12749, 12324, 12743, 13977, 14033, 14955, 11165, 13611, 13665, 15159, 10704, 10892, 14253, + 8830, 11904, 12804, 13374, 13882, 11712, 13391, 13716, 9564, 11900, 13449, 15090, 14662, 12934, 14385, 12376, + 15342, 15128, 15011, 9948, 14725, 15073, 10810, 15318, 14478, 15169, 10680, 15167, 14705, 15314, 11605, 13945, + 14169, 15276, 14462, 14660, 10582, 13592, 15220, 14719, 14729, 12083, 14998, 14074, 14526, 12458, 14019, 13318, + 13656, 15005, 10013, 15172, 15004, 15182, 12050, 14433, 15073, 11794, 13657, 15263, 14688, 13235, 11053, 15198, + }; + uint16_t ret[16 * 16 * 3 * 8] = { + 15218, 12000, 14363, 13803, 13850, 13819, 14568, 14889, 14570, 14500, 15076, 15145, 9778, 13951, 13544, 8156, + 13537, 12185, 15248, 11807, 14528, 12784, 12010, 14557, 13789, 14817, 11269, 15112, 13011, 14253, 12988, 14835, + 15273, 14512, 11791, 13068, 14909, 12326, 15151, 14105, 13048, 14507, 13617, 10206, 12951, 13312, 14306, 15222, + 13063, 13488, 13465, 13295, 14528, 11351, 14988, 14009, 10601, 13559, 14982, 15264, 15357, 13540, 15056, 14541, + 14247, 12663, 14580, 14894, 13428, 13323, 14834, 12314, 9806, 15202, 13087, 13649, 13618, 11585, 14834, 15273, + 14614, 15188, 14432, 13351, 14910, 15000, 15203, 13926, 14831, 12896, 13750, 15341, 12562, 15118, 14341, 10570, + 14988, 14710, 13715, 15304, 13647, 11172, 15040, 13103, 13511, 15274, 13345, 15274, 14510, 14808, 12786, 15043, + 14781, 15354, 15094, 15113, 13234, 14592, 15287, 12399, 15221, 14741, 12974, 14515, 14029, 14405, 6859, 14382, + 12111, 13894, 12955, 14581, 14446, 14754, 14411, 14492, 14660, 14390, 11908, 14223, 14335, 12914, 14707, 14066, + 11097, 14217, 14681, 14509, 15139, 14727, 14969, 15147, 10253, 13515, 12652, 14805, 14588, 15108, 14230, 14534, + 14549, 14705, 13645, 15313, 14952, 14677, 14704, 11366, 15162, 14517, 15022, 13828, 15048, 14847, 13710, 14519, + 14059, 13458, 12331, 11564, 14553, 15081, 14637, 14132, 15142, 13549, 12972, 13059, 14908, 14463, 13345, 13560, + 11381, 13382, 12342, 13939, 13493, 14921, 13276, 14936, 13561, 12779, 13562, 14730, 13194, 15105, 14983, 14157, + 14835, 12529, 15324, 14871, 14962, 14080, 13015, 13664, 15333, 14079, 14265, 12574, 15079, 12027, 15295, 14032, + 13971, 15261, 13186, 14527, 13477, 12056, 13307, 13910, 14988, 13556, 13209, 14364, 12488, 9575, 9909, 13481, + 14921, 12370, 14674, 14691, 12291, 12296, 14045, 14003, 14712, 14628, 14600, 13492, 14801, 13153, 15151, 14401, + 12002, 12874, 14508, 14857, 14453, 15189, 15337, 14949, 14612, 14990, 12826, 11065, 11978, 15173, 14643, 15210, + 13934, 15334, 15088, 14449, 14488, 10839, 15301, 15176, 14360, 13536, 14948, 13826, 13639, 15050, 14020, 15180, + 14806, 14584, 14340, 15267, 12003, 12582, 15119, 14550, 13660, 8955, 14488, 13701, 14507, 14351, 15236, 14680, + 13185, 13400, 14917, 13969, 12989, 13630, 15156, 14063, 13951, 12725, 14520, 13477, 14520, 13787, 14030, 14523, + 15269, 13353, 11367, 13942, 14881, 15336, 11333, 14561, 15315, 14685, 10841, 14695, 14435, 15194, 11968, 12109, + 13796, 14595, 13047, 15307, 15266, 12812, 14419, 14839, 14971, 15231, 9824, 13481, 6980, 13849, 14971, 12791, + 14304, 14849, 13988, 14633, 13996, 15152, 14436, 14947, 9764, 12642, 15316, 12992, 13406, 13969, 14796, 14893, + 12354, 14660, 14634, 11407, 15179, 12937, 15147, 14759, 14346, 14374, 14567, 12700, 14183, 14190, 14519, 14733, + 12642, 11533, 14807, 14477, 10919, 13831, 13871, 14766, 14996, 11967, 10852, 12956, 15317, 13491, 15341, 14602, + 13827, 14298, 15046, 14038, 12502, 15010, 10085, 13114, 12389, 14510, 14481, 15151, 12541, 14353, 13523, 13478, + 14591, 4435, 8895, 11925, 14907, 14573, 12940, 15107, 14474, 13022, 14293, 14457, 15288, 13790, 11721, 14747, + 15154, 9512, 12598, 15050, 14721, 13354, 15087, 12661, 13842, 14773, 14501, 15310, 13446, 15292, 15056, 14764, + 14714, 14550, 14564, 12936, 14232, 11101, 13093, 12657, 14243, 14471, 14704, 14785, 6213, 15075, 14435, 15335, + 12773, 15006, 14407, 14960, 14528, 14995, 14806, 14560, 10817, 11880, 15061, 15023, 15041, 15050, 14349, 14870, + 14542, 14828, 13351, 15320, 13008, 14875, 14791, 8021, 13814, 14794, 12460, 12303, 14198, 14783, 14776, 14810, + 13536, 14537, 13722, 12474, 15246, 9382, 15137, 13751, 13551, 12574, 15082, 13844, 13471, 14975, 13931, 14297, + 12790, 14499, 11978, 14335, 13955, 15083, 13062, 15270, 13742, 14675, 14086, 12787, 13288, 13766, 13732, 14524, + 14819, 14521, 15067, 14343, 15245, 15017, 14435, 10344, 14484, 14999, 12420, 15054, 14794, 15199, 14751, 15343, + 11544, 14003, 13305, 14232, 12936, 14614, 14606, 12440, 14219, 14982, 13866, 12466, 14307, 13472, 11915, 10285, + 12382, 11392, 13791, 14477, 12750, 13129, 15103, 15243, 13650, 15281, 12296, 12032, 15321, 11843, 13149, 15226, + 12500, 13539, 13518, 13747, 12178, 14404, 11540, 14271, 14624, 13341, 14591, 11355, 14453, 13649, 11685, 12927, + 13566, 15122, 15074, 14352, 14738, 10357, 15103, 13397, 11825, 14860, 14341, 11589, 13278, 14249, 14382, 14062, + 14191, 11410, 13477, 14784, 15345, 15065, 14269, 13736, 11179, 14851, 11627, 12403, 14925, 15121, 13724, 14985, + 13389, 14720, 14837, 15058, 15199, 10728, 12476, 13358, 12378, 14180, 12615, 14305, 14373, 15191, 15071, 14123, + 14200, 15039, 13281, 14806, 14310, 14363, 15329, 13611, 13967, 10552, 15240, 14497, 13721, 14048, 15243, 14273, + 15116, 14814, 13527, 11761, 14580, 13407, 15200, 14245, 9798, 15298, 14549, 14551, 11959, 15024, 14980, 13734, + 14601, 14985, 14617, 12988, 14651, 14371, 15032, 13252, 12684, 13278, 13335, 12263, 14225, 14522, 13980, 11980, + 14579, 14211, 15352, 8254, 13860, 11649, 12200, 11876, 12846, 13661, 14434, 15079, 11570, 14350, 15146, 14759, + 12997, 11612, 12086, 10673, 13514, 13768, 12782, 11029, 14496, 13042, 15279, 14803, 14198, 14837, 13435, 13606, + 12777, 11420, 10382, 11984, 10743, 14149, 13698, 11409, 14967, 14759, 12608, 14744, 14549, 15185, 15099, 12833, + 14902, 14503, 10810, 15345, 14055, 13573, 13544, 14705, 14399, 13696, 14981, 13057, 14344, 9056, 12939, 14572, + 14958, 15079, 14836, 15214, 15043, 14915, 13870, 14994, 13323, 14517, 12869, 15280, 14318, 13417, 15236, 13763, + 13604, 14362, 12757, 9645, 9351, 15264, 14635, 15235, 13047, 13932, 15036, 13724, 15292, 14114, 15108, 10386, + 14848, 13683, 13563, 14507, 14787, 14678, 15129, 14046, 15360, 10515, 15213, 14557, 10886, 14431, 15311, 14200, + 15269, 15020, 12800, 13674, 13611, 15216, 11671, 14840, 13776, 14763, 13654, 13762, 13512, 14697, 14940, 13856, + 14275, 14653, 15012, 12935, 15040, 15066, 13406, 13630, 11493, 14475, 12850, 14046, 14208, 10627, 14815, 11634, + 14646, 11346, 14388, 14437, 13965, 13620, 14020, 12965, 1482, 13420, 14836, 12120, 8793, 13852, 15073, 14782, + 14822, 14692, 10882, 15184, 15058, 14860, 10845, 14103, 13786, 15044, 14615, 14300, 8042, 13494, 15266, 15320, + 12466, 13574, 15098, 12978, 15014, 14604, 13591, 13523, 13442, 14334, 14328, 13808, 14236, 14427, 14341, 11899, + 14569, 15321, 13898, 15086, 15030, 14008, 14598, 13556, 12181, 13842, 14446, 13611, 14733, 12457, 14331, 15120, + 14588, 15157, 14603, 12558, 13492, 15048, 14987, 13417, 12899, 15096, 8926, 11288, 9184, 12382, 14553, 12985, + 15129, 14508, 14205, 15133, 12420, 12733, 14409, 14672, 15007, 13972, 14703, 14618, 14752, 14640, 14131, 13614, + 14372, 15189, 12892, 15235, 14467, 14931, 15119, 14126, 13778, 14830, 11360, 15224, 13812, 14755, 12316, 15050, + 15143, 12724, 14957, 14923, 14762, 14423, 14740, 13277, 13270, 14985, 15250, 13770, 15078, 14526, 14959, 14429, + 15327, 14449, 14867, 14521, 15314, 13486, 14905, 9742, 14340, 13396, 15217, 14989, 12984, 13988, 15118, 15184, + 14343, 15021, 13918, 12362, 11620, 15024, 15193, 11914, 14392, 11494, 11183, 12592, 10682, 13945, 13886, 14868, + 15358, 14572, 14357, 13734, 11133, 12462, 14677, 13904, 13424, 15196, 11639, 15320, 10594, 10531, 14209, 14094, + 14481, 14313, 14925, 11947, 13903, 13586, 13896, 14365, 15114, 13554, 13375, 14786, 14288, 11858, 15050, 13702, + 15342, 14337, 14022, 13930, 14474, 12578, 13286, 14810, 14440, 14673, 14973, 11948, 15294, 14673, 9780, 13785, + 14642, 15306, 15151, 13873, 14261, 14583, 10537, 14158, 14625, 14357, 14410, 12560, 14681, 14999, 14481, 14479, + 14443, 14209, 15132, 13705, 13948, 13291, 14564, 13874, 14519, 9348, 12154, 14346, 14909, 13965, 13065, 11270, + 11393, 14564, 14616, 13735, 12986, 14688, 15104, 15152, 13718, 14817, 14662, 13194, 15268, 14990, 15196, 13291, + 14391, 12110, 13324, 12346, 14264, 14271, 13681, 14291, 15071, 14943, 15294, 14187, 14248, 14988, 14666, 11322, + 15180, 14760, 15281, 15241, 14805, 15345, 13183, 12751, 13350, 14500, 14318, 15316, 13359, 14372, 15191, 11789, + 14705, 14373, 12059, 14893, 14275, 13990, 15247, 15193, 14864, 15325, 14472, 14754, 12245, 15133, 14094, 13469, + 14960, 14187, 13470, 15288, 12165, 11429, 13329, 13538, 14128, 13649, 14953, 14958, 14052, 14695, 13656, 15314, + 14155, 12427, 13549, 14016, 11423, 11274, 15351, 14434, 14955, 14349, 13695, 14388, 14727, 13747, 15288, 12101, + 14430, 14416, 14264, 15265, 14986, 15023, 14029, 13969, 14405, 14847, 13569, 15197, 15186, 5298, 14608, 12939, + 15137, 15306, 12912, 14784, 14579, 12993, 14810, 15081, 14405, 14748, 12695, 13859, 12078, 10403, 13671, 14807, + 13414, 14085, 14746, 14378, 14234, 14538, 13488, 14338, 12575, 11988, 14830, 13808, 12582, 14680, 15150, 14930, + 15129, 14960, 14934, 14610, 13190, 13875, 14472, 10339, 13070, 13609, 14360, 13162, 11252, 7809, 15242, 14145, + 15041, 15276, 15347, 14683, 13073, 13477, 14376, 14529, 14692, 14525, 12955, 13824, 15182, 13942, 14088, 14658, + 15013, 10869, 13577, 15305, 13994, 14909, 11923, 12845, 14078, 13176, 10255, 10925, 14274, 13327, 13496, 11035, + 12506, 13729, 12851, 12803, 14424, 15091, 12661, 14467, 15134, 14273, 13462, 14933, 13337, 13909, 15098, 14902, + 11693, 13510, 11560, 11547, 14592, 15005, 15158, 15048, 15086, 13920, 12398, 13576, 14081, 14980, 15033, 14852, + 12188, 14995, 13398, 14654, 15320, 14245, 14717, 13516, 13219, 15352, 14822, 15095, 15255, 13688, 14679, 15278, + 15131, 14933, 14708, 14577, 13355, 14388, 14479, 14893, 12467, 14660, 13902, 14238, 14119, 13367, 12353, 15277, + 14677, 11060, 11273, 13958, 10553, 14123, 9719, 15306, 9296, 14221, 14724, 13724, 14583, 11504, 15158, 15045, + 14942, 11322, 15106, 12331, 13125, 15161, 14921, 13071, 12349, 10274, 13541, 12414, 15096, 14923, 14461, 12646, + 14861, 13133, 15006, 11331, 14672, 14582, 14926, 14753, 14851, 11275, 12754, 12860, 14434, 15267, 8391, 14471, + 14568, 14055, 13047, 14936, 15039, 14148, 13751, 14936, 13993, 15164, 14703, 13781, 14336, 13769, 14067, 12692, + 13225, 13323, 15259, 14858, 14755, 14853, 15137, 13834, 13739, 13916, 12132, 15114, 15197, 13400, 12515, 14827, + 12487, 9397, 13545, 14589, 15065, 14716, 14431, 14880, 14767, 14472, 13547, 14483, 12922, 14668, 15016, 13963, + 12587, 13974, 11038, 14561, 13512, 10973, 14614, 12972, 10378, 15266, 14384, 14848, 14958, 11785, 14510, 13819, + 14392, 14533, 13611, 14264, 13051, 15166, 13869, 13736, 15351, 13812, 14862, 15177, 12693, 15350, 14784, 15251, + 7262, 14966, 12363, 14746, 13588, 12997, 14213, 14487, 15052, 13474, 14983, 14769, 12661, 12708, 14768, 14655, + 13895, 14888, 13650, 14050, 14844, 11896, 14524, 15308, 14524, 14091, 14777, 10967, 14626, 14014, 10423, 11560, + 13812, 14696, 14772, 13967, 14851, 6841, 9070, 14811, 13355, 9789, 15021, 15211, 10294, 11537, 14561, 14616, + 14602, 14537, 13486, 14184, 14626, 15188, 12290, 14831, 15322, 14739, 14599, 15119, 14886, 14367, 9468, 12686, + 14526, 14606, 13383, 13883, 14836, 14870, 13210, 15251, 12789, 13835, 14228, 13019, 14724, 14361, 14124, 13334, + 15092, 14767, 12614, 13986, 14444, 13406, 14704, 14710, 14910, 14849, 14704, 11972, 14422, 13092, 11423, 14713, + 15325, 12753, 10036, 12384, 14094, 14273, 13771, 13921, 13656, 8285, 14450, 14493, 14780, 15227, 13499, 15033, + 14515, 14668, 15064, 13692, 14668, 15127, 13598, 12537, 14425, 14873, 15317, 13919, 15209, 14409, 12993, 14386, + 15121, 14735, 10473, 14432, 14636, 14611, 14340, 8349, 14676, 14740, 12557, 12668, 14058, 13849, 12837, 13337, + 13418, 12268, 14048, 14315, 13943, 7729, 14274, 14451, 14915, 14583, 13737, 13412, 14928, 14463, 14399, 13943, + 15074, 12950, 14741, 15067, 12529, 14717, 14953, 11895, 15162, 12311, 14216, 12791, 9600, 14733, 14882, 13081, + 14587, 13389, 14772, 14617, 14993, 15285, 15153, 14176, 13644, 14676, 15103, 14932, 12108, 14999, 12813, 15220, + 14152, 13190, 12377, 15187, 15263, 12450, 13904, 11163, 15050, 13920, 13969, 11662, 14768, 13009, 14457, 14575, + 13561, 10339, 15065, 13989, 14673, 14780, 14541, 14931, 14702, 14888, 14131, 14963, 15227, 12628, 14825, 14774, + 13050, 14861, 11955, 15196, 15256, 12641, 12856, 15249, 12228, 13928, 13970, 13546, 14248, 11476, 12952, 15147, + 15297, 13878, 14999, 14643, 14223, 12432, 12590, 15292, 14725, 14999, 10848, 13119, 14787, 15203, 15002, 14713, + 13685, 6121, 13449, 15029, 14641, 14881, 15272, 14719, 15187, 12792, 13452, 15157, 14747, 14651, 14363, 14345, + 14548, 14418, 14957, 14498, 14566, 14952, 14575, 14097, 12776, 13608, 15065, 15103, 13283, 11205, 12314, 14453, + 13744, 14889, 14206, 14968, 13210, 14676, 15173, 9352, 15140, 14663, 14402, 13529, 11765, 14170, 15300, 14548, + 14880, 14449, 14729, 13987, 14494, 14072, 12508, 14146, 15250, 11654, 14608, 15017, 14912, 13389, 15024, 11786, + 14582, 13542, 11446, 15286, 13914, 14893, 13278, 13546, 14721, 13549, 13885, 13977, 13047, 14463, 14576, 14821, + 14461, 13777, 14568, 15013, 14563, 12659, 14689, 15002, 15256, 10913, 14605, 15321, 15150, 15332, 14089, 14274, + 14385, 14252, 14012, 14628, 14026, 13066, 10167, 15267, 14875, 14329, 14954, 14565, 10713, 14798, 15201, 14526, + 14727, 14829, 14938, 11370, 14661, 14808, 14208, 12352, 13613, 14362, 11767, 15011, 15132, 8487, 14289, 12124, + 10122, 14540, 15056, 13542, 14061, 13592, 12799, 15236, 14590, 14093, 12756, 14532, 14130, 14928, 11821, 11948, + 14790, 14584, 12651, 13745, 15173, 14861, 12777, 13721, 14458, 14158, 14978, 15084, 11812, 14970, 12581, 14807, + 14849, 15194, 15299, 15227, 14170, 8828, 9884, 14930, 14501, 15153, 13736, 14388, 13523, 15294, 15032, 14293, + 14063, 12539, 14941, 12332, 13908, 15056, 11910, 14055, 13786, 13299, 14719, 13392, 12957, 13370, 14798, 15001, + 15228, 15264, 11480, 11339, 13486, 15066, 14690, 13917, 12743, 14376, 13611, 14269, 12306, 13942, 12041, 14525, + 11356, 14175, 12453, 14097, 12306, 14353, 12265, 12253, 14632, 14961, 10809, 14239, 14669, 12609, 11190, 10766, + 14546, 14553, 14465, 12474, 14418, 11800, 12413, 14612, 14817, 14737, 14875, 14637, 14572, 9471, 13619, 10201, + 12788, 14122, 14768, 14823, 9654, 14076, 14924, 13873, 14711, 14089, 10974, 14828, 15311, 11301, 12043, 14843, + 12373, 13490, 12038, 15208, 13578, 14879, 14544, 15350, 11308, 14968, 12047, 12055, 14658, 14513, 14753, 14937, + 14665, 13427, 11154, 14313, 13827, 13461, 15002, 10387, 12298, 15007, 14527, 14817, 14560, 13680, 14349, 10813, + 8196, 14814, 10174, 14385, 13230, 9345, 14481, 14519, 14763, 14262, 13454, 11836, 12251, 12508, 15185, 13825, + 13860, 13549, 12941, 12945, 15185, 12029, 15037, 13725, 12790, 14811, 14746, 13505, 14698, 9259, 15280, 14467, + 14571, 14407, 13849, 12849, 13555, 14831, 15277, 13245, 11082, 14065, 11553, 14095, 14704, 13759, 15300, 12857, + 11970, 9819, 13892, 15069, 14627, 15016, 15158, 14827, 12480, 14149, 15128, 13760, 13818, 14112, 13604, 15198, + 14150, 14150, 13448, 14846, 15304, 11798, 15181, 15214, 14845, 14972, 14563, 12890, 13102, 9944, 14538, 13313, + 14459, 14517, 13232, 12379, 13320, 14934, 12226, 13844, 13574, 9607, 13231, 12191, 14897, 15281, 15144, 15143, + 14754, 13767, 13479, 10847, 12101, 14546, 14988, 14249, 15059, 13011, 13263, 14538, 14512, 15106, 13595, 14945, + 14752, 12753, 14153, 10899, 14641, 13665, 14794, 14347, 14978, 14648, 13101, 11435, 15346, 10877, 14413, 13492, + 12350, 14917, 13805, 13680, 14610, 14996, 15057, 9262, 14076, 14712, 14431, 15228, 15101, 13695, 12552, 14819, + 13231, 13956, 14285, 13030, 14893, 11405, 13634, 15042, 14258, 13779, 12720, 14697, 14641, 14810, 14034, 15064, + 14821, 14758, 13622, 14225, 15017, 15053, 14188, 14198, 13764, 14341, 11775, 14225, 13753, 14338, 15095, 14694, + 15143, 14755, 14753, 14845, 13744, 13201, 15045, 11901, 14959, 12337, 14480, 13875, 14766, 15144, 14683, 10498, + 14244, 13975, 14068, 14338, 14560, 14860, 14711, 15180, 14633, 13513, 15302, 14326, 14562, 13448, 14697, 14543, + 14625, 13471, 14111, 15100, 15304, 11506, 13929, 12048, 14707, 13825, 13810, 15158, 13343, 14503, 13064, 14526, + 14291, 14555, 14813, 12735, 14738, 14198, 11716, 13928, 15264, 13781, 12829, 10730, 11597, 15360, 14884, 11027, + 14632, 13647, 12422, 14754, 12247, 14342, 14701, 15351, 14381, 14474, 13988, 13501, 12917, 14902, 15244, 14456, + 14614, 15016, 10315, 10770, 14300, 13149, 14893, 13844, 9242, 13696, 13599, 14950, 14695, 12323, 15091, 14368, + 13334, 13286, 15097, 12418, 12750, 10136, 14280, 14732, 14827, 12606, 14477, 13969, 12834, 13788, 13930, 14649, + 14691, 15314, 12848, 14383, 12735, 14900, 14629, 15207, 13746, 14699, 14569, 15339, 10776, 15184, 15078, 14512, + 14355, 14779, 14403, 12594, 15025, 12565, 15181, 14636, 14346, 13906, 10933, 14993, 14622, 11873, 12820, 14145, + 11936, 15067, 14419, 12468, 13335, 13244, 14906, 11881, 10344, 15001, 13575, 13401, 14367, 14364, 15120, 13847, + 13486, 14318, 13937, 13348, 13441, 9967, 12348, 13564, 13767, 14412, 14515, 13051, 14306, 14972, 13513, 14126, + 14213, 13475, 14268, 14523, 14320, 15162, 12783, 13239, 14071, 10364, 14770, 14562, 14692, 14211, 12769, 14676, + 14704, 14982, 13400, 15024, 13834, 12665, 15240, 12565, 14615, 14449, 9819, 12975, 15194, 13179, 12751, 12471, + 14448, 12538, 14699, 11122, 11659, 13594, 9412, 13211, 14724, 11359, 14627, 14892, 14762, 13937, 13580, 15311, + 15061, 14971, 12755, 15331, 14342, 13781, 15117, 15155, 15003, 14474, 6755, 14869, 14188, 12061, 13498, 12484, + 15269, 15004, 15072, 14594, 13481, 13947, 9321, 13871, 12439, 12807, 10403, 12924, 14914, 12637, 15030, 8549, + 12118, 12996, 14867, 14116, 14914, 13727, 13272, 15066, 10572, 14990, 14987, 12898, 14799, 12821, 14993, 14886, + 14468, 13895, 13660, 13689, 12288, 12878, 12678, 13449, 14713, 15339, 12798, 14956, 12839, 14286, 14303, 14791, + 14916, 15069, 14911, 13940, 14599, 13638, 13161, 14648, 13154, 14198, 13787, 12641, 12544, 14301, 13724, 15019, + 14558, 8103, 14866, 10588, 12889, 13564, 13326, 15270, 14240, 14919, 12918, 15119, 10128, 14208, 11324, 10251, + 15284, 14598, 13443, 15094, 14335, 15152, 14812, 12616, 14760, 14442, 13186, 15139, 11337, 13649, 12813, 13457, + 14564, 14950, 15047, 15303, 15349, 11331, 14676, 12139, 14383, 14393, 13699, 13801, 14707, 14471, 13078, 14065, + 14422, 15331, 14845, 14888, 14973, 14378, 14723, 13080, 14939, 13937, 14054, 15293, 13570, 12622, 13584, 14471, + 14890, 13989, 13385, 13510, 13957, 13554, 15161, 14869, 13875, 14930, 14471, 15055, 13190, 13026, 14621, 14377, + 13403, 14195, 14510, 14412, 15279, 14734, 14727, 15136, 14412, 15225, 8281, 14202, 13680, 13816, 13756, 14512, + 13085, 14339, 14563, 13713, 15100, 13793, 15013, 15243, 14049, 12140, 12601, 13927, 13927, 14825, 14796, 15181, + 14149, 15306, 14241, 14975, 14040, 15245, 14738, 12852, 14393, 13716, 13181, 14514, 14512, 13369, 14669, 14937, + 14328, 14201, 13932, 15358, 13434, 10915, 12385, 14061, 14472, 14117, 13847, 14560, 14595, 11918, 13928, 14485, + 14779, 14063, 15190, 12954, 14047, 14360, 14032, 13190, 12835, 14802, 13963, 13362, 14618, 14744, 15108, 15144, + 14926, 15039, 13816, 15164, 14003, 12444, 12667, 14322, 13814, 14031, 14848, 13654, 12658, 14666, 14840, 14399, + 14571, 13937, 14796, 14022, 11660, 13737, 9792, 10329, 13993, 13918, 14800, 13450, 15131, 14729, 12744, 14013, + 13111, 14113, 15338, 14003, 14248, 14407, 15187, 13989, 13628, 15146, 14262, 13883, 14290, 14355, 13514, 15286, + 14641, 14849, 13570, 13354, 14777, 13444, 14341, 14518, 13383, 15074, 13435, 15148, 14552, 15331, 14509, 13401, + 15218, 15181, 14543, 14468, 15116, 15173, 15196, 9320, 15171, 12393, 14818, 14846, 14791, 14412, 14734, 14965, + 14018, 13536, 15224, 15120, 14434, 14338, 9608, 15339, 14857, 11793, 14954, 15259, 11570, 11336, 14627, 12559, + 14781, 14612, 13494, 14516, 15079, 14748, 15027, 13465, 14014, 14418, 13233, 14750, 15184, 15203, 11418, 15119, + 14581, 14301, 13863, 10314, 12278, 14402, 14192, 13120, 13145, 13905, 14393, 11089, 13713, 15278, 12444, 12930, + 14839, 12597, 14862, 12962, 13185, 14709, 15359, 13560, 14943, 14790, 15314, 14310, 13500, 13431, 12219, 7376, + 12947, 14883, 14849, 7334, 14837, 14395, 13480, 14763, 14210, 14481, 15286, 13575, 14058, 13857, 12654, 5320, + 15309, 13138, 14891, 15146, 15314, 12443, 15223, 15002, 14460, 15059, 14506, 14847, 14701, 14470, 11411, 14349, + 12437, 14542, 14406, 14642, 12366, 15018, 14170, 13866, 14376, 14787, 15044, 12298, 13394, 14728, 13367, 14406, + 15079, 13914, 14615, 15098, 15304, 15149, 14606, 14754, 15339, 15172, 14487, 12932, 14823, 14949, 13775, 15077, + 14452, 14514, 15000, 15256, 15267, 11977, 15244, 12635, 14334, 14043, 13788, 15210, 14702, 14514, 8850, 14549, + 13604, 14849, 11236, 13262, 15135, 14739, 15262, 14716, 14178, 15099, 10362, 14378, 14464, 15316, 13489, 9936, + 12615, 14936, 13944, 14816, 12108, 14821, 13804, 14808, 13255, 14469, 14493, 7181, 15063, 14855, 13300, 12468, + 14727, 13834, 13999, 13156, 14531, 14614, 14191, 15023, 14999, 14430, 13122, 13192, 13631, 13434, 14920, 13041, + 14629, 14431, 12916, 15037, 15074, 14548, 14955, 13400, 14594, 13912, 14362, 15276, 14954, 12748, 14201, 14286, + 15186, 13070, 15143, 11889, 14949, 13693, 15225, 12226, 15189, 14729, 15208, 11619, 14102, 11317, 14094, 14784, + 15309, 15017, 14886, 15095, 14923, 13321, 14505, 13169, 15196, 12897, 15217, 13900, 14856, 14435, 14578, 14933, + 11812, 13943, 15183, 15023, 14646, 14884, 15068, 15170, 14408, 14093, 15260, 14001, 13644, 15262, 15031, 12831, + 14353, 13972, 15164, 14064, 14770, 13402, 14709, 13436, 15144, 15047, 14474, 15027, 15142, 13394, 15192, 7929, + 15190, 12065, 13733, 12988, 14268, 13551, 14884, 10280, 14469, 14337, 12921, 13592, 14970, 14310, 12427, 13774, + 14950, 14663, 14285, 13120, 14577, 14256, 11902, 15272, 15155, 12842, 14715, 15194, 14618, 15133, 14722, 8443, + 14463, 13769, 12855, 13239, 13955, 15016, 9550, 14400, 13682, 15267, 14420, 14309, 13980, 14814, 15304, 14279, + 13353, 13406, 13677, 15009, 11847, 14462, 13538, 12690, 13002, 14968, 13307, 15359, 14024, 10116, 14292, 14924, + 12963, 13504, 13579, 14964, 10700, 12852, 14672, 15082, 14081, 14385, 14179, 13693, 12437, 15002, 8450, 14727, + 13222, 14557, 15167, 12686, 12838, 12859, 14815, 13695, 14100, 13551, 13761, 13036, 14058, 13930, 14678, 13636, + 14178, 15135, 13461, 14962, 15177, 14747, 14227, 13580, 12367, 14746, 14993, 13006, 12643, 14433, 13430, 11877, + 14212, 14845, 13549, 14128, 14421, 12997, 15239, 13111, 14753, 15214, 15171, 14035, 15176, 13611, 13098, 11936, + 15230, 13365, 12959, 14305, 13572, 15208, 7984, 14601, 14821, 11375, 14512, 13901, 14322, 12682, 15291, 14351, + 12683, 14418, 13255, 13504, 14966, 13042, 14707, 14578, 14639, 13025, 14562, 15181, 14734, 13362, 15116, 15146, + 14517, 12531, 14650, 14874, 14257, 15303, 11994, 14844, 14510, 11339, 12089, 14565, 14698, 14538, 13463, 13241, + 11884, 15023, 13566, 15206, 13467, 14734, 14345, 14120, 15276, 14638, 12536, 11956, 15340, 15313, 13998, 10603, + 14358, 12475, 13883, 12695, 10899, 13974, 13248, 12235, 13924, 15299, 13639, 14598, 14907, 13919, 14420, 13561, + 14529, 14169, 14683, 14804, 13598, 12775, 14588, 13524, 8334, 13842, 14715, 14892, 15257, 14781, 14008, 15066, + 14306, 14620, 13129, 14896, 15024, 15235, 10854, 13714, 8493, 14607, 13692, 14831, 15053, 15204, 14403, 15184, + 15131, 12559, 13940, 12042, 12724, 14363, 14613, 14056, 10514, 13806, 15091, 13860, 11855, 11982, 15130, 14984, + 13500, 15284, 15078, 13670, 15353, 13995, 13584, 13382, 13286, 13595, 12857, 13467, 13493, 15003, 14001, 14869, + 14267, 14778, 14359, 11996, 14593, 13999, 14761, 14278, 10752, 13715, 14256, 15066, 14877, 12211, 9800, 14961, + 14329, 14595, 15187, 15086, 14985, 14375, 14589, 11079, 14709, 15358, 13464, 14719, 15059, 14235, 7493, 13772, + 14522, 13134, 15032, 13214, 13795, 14890, 14566, 15276, 11491, 14670, 14448, 14038, 14931, 15181, 13967, 13376, + 13312, 14191, 15020, 13409, 9452, 13441, 14692, 15169, 14509, 14272, 15239, 14680, 13438, 14110, 15081, 14878, + 12389, 13792, 11068, 11729, 13532, 9321, 13640, 14772, 14045, 12524, 13249, 13244, 15033, 14338, 11789, 11031, + 13676, 10766, 12488, 14584, 14101, 12662, 15205, 12396, 14982, 15253, 12947, 13315, 13565, 12026, 13344, 13761, + 14767, 12255, 13602, 13598, 12884, 14298, 15282, 15241, 14529, 11235, 14582, 12729, 15112, 15172, 13739, 13999, + 14191, 11269, 14766, 14456, 14786, 14085, 13927, 14332, 15236, 14752, 15278, 9775, 15115, 11710, 14598, 13154, + 12640, 11853, 14558, 15291, 13784, 14836, 11780, 13919, 11493, 14291, 14592, 12959, 14010, 15286, 14364, 14193, + 13136, 15312, 15101, 13135, 13212, 13711, 13571, 14205, 11053, 15015, 13495, 13680, 13512, 14740, 14499, 15244, + 14472, 15079, 14554, 12862, 14408, 13447, 14436, 14730, 13564, 9470, 14380, 15343, 14524, 10520, 13384, 15006, + 15280, 15210, 15244, 13619, 14845, 15269, 15107, 13794, 12930, 13200, 14713, 15089, 13635, 13874, 12350, 14555, + 15090, 12437, 11149, 15321, 8330, 15359, 13347, 14824, 14044, 13829, 12941, 14114, 14597, 13967, 13557, 14452, + 14930, 12335, 12549, 14346, 14507, 15197, 14587, 12284, 13879, 15227, 14829, 9551, 14731, 13622, 10487, 14123, + 13446, 15151, 14475, 13117, 14686, 14349, 8078, 15234, 14031, 12378, 14724, 12992, 14435, 14873, 14752, 14192, + 14777, 14637, 13966, 15133, 14806, 15107, 14251, 14830, 11286, 13477, 14822, 14525, 13792, 15225, 13356, 14629, + 14943, 14434, 13530, 14969, 13148, 11636, 15168, 12558, 13060, 15304, 13610, 13243, 13861, 10751, 13003, 14090, + 11612, 10944, 13426, 13116, 13354, 14640, 14825, 13727, 14279, 11908, 14049, 13334, 11691, 15152, 14002, 12286, + 14849, 14797, 14757, 14185, 14660, 15083, 13086, 14550, 14305, 13181, 14937, 13796, 14345, 14615, 14055, 14734, + 14742, 13681, 13640, 13768, 12045, 14220, 14780, 14365, 13626, 12762, 13690, 14265, 13219, 12412, 15247, 12732, + 14579, 12564, 15092, 12870, 15149, 13811, 11887, 14269, 13768, 14793, 14975, 14851, 15249, 15088, 14740, 14669, + 14772, 10875, 15147, 15254, 14480, 15300, 13405, 12341, 13706, 12647, 14967, 14526, 14013, 14248, 12696, 15035, + 12287, 15127, 15169, 13396, 13324, 14562, 13114, 13385, 12754, 12814, 14562, 14978, 15086, 14151, 15270, 14401, + 12916, 14486, 13737, 12798, 14798, 14586, 10346, 13347, 14877, 14356, 14740, 12098, 14388, 8319, 14654, 13065, + 13192, 3788, 9557, 12747, 13647, 14199, 15011, 15045, 14970, 13412, 11368, 15348, 14836, 14199, 14774, 14374, + 13965, 12978, 14781, 14503, 15201, 13527, 13595, 15203, 11059, 15006, 14820, 14091, 15324, 14345, 14692, 15163, + 13922, 15301, 8816, 13716, 13547, 15312, 14767, 6474, 13996, 14811, 14982, 12604, 14404, 13143, 13782, 14062, + 14809, 14751, 10297, 11499, 10735, 13464, 15319, 14753, 15135, 12634, 12385, 14632, 14391, 12735, 14670, 14268, + 14883, 11775, 13547, 15336, 11615, 12333, 11757, 10917, 13715, 14499, 15225, 13492, 12495, 13433, 14717, 14983, + 15114, 14011, 13206, 13739, 11226, 14640, 14724, 13970, 14902, 13919, 14360, 14453, 14654, 15025, 14742, 14832, + 14743, 14294, 12498, 14328, 14559, 11815, 12816, 15344, 13592, 11887, 13019, 12766, 14620, 13790, 15208, 12928, + 11537, 12760, 14404, 13555, 10615, 14200, 10344, 14169, 15326, 14850, 12835, 10964, 14964, 13502, 13346, 14520, + 15021, 14983, 14724, 12730, 14128, 11272, 14489, 13934, 14967, 14758, 14653, 10722, 11218, 7386, 13009, 15036, + 11682, 14893, 13894, 14113, 11286, 15317, 14571, 14509, 14016, 14634, 13788, 13864, 8600, 15344, 13620, 15145, + 11473, 13115, 12007, 13244, 12317, 15127, 14981, 14762, 14940, 14214, 13180, 14227, 14908, 15064, 15191, 14447, + 15263, 14411, 15160, 13454, 13745, 14621, 13561, 13028, 15198, 13031, 14521, 13445, 14885, 14002, 14552, 14590, + 15001, 15286, 14180, 15138, 10278, 12604, 15328, 13592, 14702, 10804, 14782, 14219, 10650, 14608, 12825, 15166, + 14982, 14185, 15066, 13920, 14677, 15130, 13893, 12606, 15259, 13012, 14472, 11925, 15282, 3463, 15005, 14904, + 15000, 9485, 13039, 15095, 14735, 14802, 14804, 14517, 13857, 14786, 13764, 13408, 15129, 15040, 14579, 15305, + 12495, 14822, 15092, 12754, 14825, 14300, 14003, 13538, 13327, 12962, 14092, 14938, 14656, 15003, 13142, 12997, + 14537, 14407, 14341, 15168, 14463, 15110, 14388, 12166, 13345, 12537, 15186, 3873, 14374, 14415, 15235, 14480, + 14491, 11214, 9339, 12190, 12519, 10266, 15239, 13460, 14448, 12650, 14599, 13464, 12614, 14152, 14455, 13768, + 14374, 11468, 14861, 15047, 13156, 13232, 15156, 14724, 11318, 12328, 14015, 14505, 13206, 14296, 14432, 13886, + 14696, 13962, 12674, 14124, 14815, 14592, 15317, 13982, 14623, 14371, 15154, 13736, 14627, 12790, 12527, 14965, + 14949, 12395, 14925, 12490, 14149, 9500, 14430, 14586, 14388, 14642, 13646, 14604, 15046, 12306, 11047, 14372, + 12469, 14215, 15050, 12350, 14667, 14926, 14719, 15360, 14963, 13989, 14904, 15038, 14049, 13383, 15100, 14570, + 14096, 15049, 15312, 13870, 14393, 11117, 8780, 13964, 14286, 14813, 14575, 11932, 14242, 14338, 15221, 13579, + 12681, 14528, 14032, 12884, 12914, 13953, 12811, 14850, 14829, 13677, 14299, 11290, 15163, 10956, 12909, 13587, + 15152, 15033, 14916, 11774, 12257, 14702, 13280, 10575, 14150, 13806, 15160, 14871, 14021, 14877, 14641, 14298, + 15314, 15335, 13950, 15340, 14271, 11308, 14503, 13364, 14539, 15196, 11435, 13963, 13120, 14100, 14704, 14523, + 9512, 12067, 15310, 14775, 14062, 13651, 14262, 13117, 12586, 14354, 11480, 14406, 13957, 13804, 14526, 14632, + 13554, 14925, 11264, 13048, 11541, 14679, 13151, 12729, 11371, 15158, 9672, 14986, 12689, 13840, 14482, 14339, + 14195, 14537, 14820, 13358, 14957, 14272, 14122, 14652, 12103, 14348, 12924, 14789, 14376, 14403, 14133, 14512, + 14225, 13410, 14639, 14809, 14512, 15110, 14535, 14037, 14287, 14108, 14926, 15271, 14697, 15225, 14813, 15033, + 14891, 14536, 14926, 14451, 10429, 14776, 14185, 15016, 12227, 15067, 15305, 14545, 10419, 15032, 15119, 15032, + 13738, 14673, 11365, 14384, 14209, 13817, 14672, 13618, 15214, 12966, 11680, 12479, 12219, 15315, 12821, 11102, + 14448, 14638, 13544, 13664, 13706, 14695, 12861, 15086, 14967, 14002, 14436, 15092, 14757, 13673, 14346, 14445, + 14792, 14970, 12350, 6610, 13593, 14699, 15261, 14400, 13459, 14541, 12614, 14730, 13723, 15236, 14437, 14547, + 14392, 14507, 13748, 14459, 11000, 12988, 14385, 14667, 14589, 11085, 14206, 15154, 14905, 13691, 12364, 14489, + 14305, 14087, 14706, 15043, 14920, 14370, 15337, 15328, 15152, 13544, 14521, 14571, 15283, 15145, 11685, 15097, + 13333, 13579, 11983, 14789, 14880, 13933, 13471, 14347, 15239, 12958, 15357, 15191, 14766, 15128, 15113, 13619, + 14879, 15122, 14343, 14709, 12334, 15230, 15238, 15262, 14774, 14872, 12585, 13080, 15176, 13894, 15163, 15126, + 14422, 13592, 14737, 14277, 15034, 14654, 15094, 15212, 15328, 13339, 12673, 14353, 14884, 13812, 14748, 13214, + 13236, 14248, 13831, 12579, 11704, 12123, 14766, 15155, 14733, 14412, 15114, 14629, 14887, 14963, 15271, 15018, + 14645, 15089, 15240, 12653, 14570, 14046, 12760, 14377, 15267, 11043, 14440, 12129, 15114, 15106, 13435, 14346, + 13827, 14995, 15205, 14008, 12259, 14052, 14767, 12562, 14385, 14106, 14511, 14996, 13276, 12758, 14868, 12749, + 12822, 13828, 9680, 14038, 14461, 12939, 15180, 13018, 14706, 14641, 14450, 12450, 12655, 13931, 13761, 14034, + 14702, 11694, 14531, 14869, 15243, 13959, 12645, 12324, 15313, 14114, 14165, 14687, 14812, 12459, 11697, 13258, + 14982, 11765, 14460, 13827, 15095, 14486, 15008, 15204, 14024, 14445, 14921, 12916, 14108, 14512, 13702, 12743, + 13400, 14287, 14189, 14069, 14059, 13511, 14882, 13450, 12596, 15090, 14702, 15191, 14544, 15173, 13398, 14849, + 14833, 12387, 11856, 14074, 9724, 14775, 13361, 13977, 10680, 14391, 15262, 14857, 14091, 13956, 15146, 15063, + 14136, 14705, 14517, 13931, 15192, 13545, 14406, 11125, 15136, 12979, 10672, 13844, 12914, 15063, 14171, 14033, + 15178, 13944, 12423, 15255, 14481, 12596, 15189, 14280, 9851, 15000, 14558, 12636, 13533, 13254, 14873, 14345, + 13371, 14687, 14513, 14472, 14727, 14497, 15355, 14955, 11675, 15329, 14919, 14685, 13238, 14864, 14255, 15183, + 9769, 12734, 14919, 15311, 14381, 13505, 15089, 14819, 13783, 15034, 11422, 14444, 10978, 14679, 14163, 11165, + 14763, 15003, 15232, 13454, 11473, 13427, 14553, 13856, 14905, 15234, 12885, 14901, 13394, 12941, 14814, 14442, + 13702, 13409, 14267, 14147, 15159, 11830, 13625, 13611, 14768, 11492, 12348, 14171, 11426, 10252, 10959, 14490, + 15025, 14372, 14779, 15084, 14325, 12752, 14390, 13955, 13579, 14313, 14624, 12691, 15097, 13636, 14869, 13665, + 13488, 14154, 14704, 14347, 15012, 14459, 11501, 13522, 14162, 14295, 9461, 14400, 15048, 14366, 13930, 14813, + 12598, 15168, 14096, 15213, 14190, 11293, 13061, 15159, 12296, 13456, 11942, 14007, 14112, 14673, 13420, 14596, + 15093, 13277, 15319, 13334, 13821, 15287, 14530, 14440, 15077, 15124, 14264, 9991, 15312, 13255, 14757, 10704, + 15327, 12964, 13874, 14237, 13947, 13595, 14612, 13422, 14252, 14770, 12568, 12484, 13666, 12375, 11085, 14388, + 13490, 10487, 14773, 13987, 11475, 10004, 12315, 10892, 13629, 12496, 14876, 14108, 14790, 13307, 12845, 14867, + 12626, 14771, 13550, 12524, 14317, 13317, 13302, 14915, 10935, 13561, 12577, 14988, 13625, 14486, 12576, 14253, + 10754, 15261, 14497, 14757, 13613, 12870, 14645, 15251, 12374, 12810, 13249, 12832, 14777, 14035, 14334, 13931, + 15127, 14101, 14849, 13418, 15173, 13220, 13520, 8830, 15079, 14923, 13414, 14436, 13668, 11893, 12955, 13171, + 12762, 14520, 15267, 14005, 14724, 13733, 14263, 11836, 11977, 12167, 14396, 14775, 14999, 14711, 14671, 11904, + 15134, 15336, 15086, 13444, 12480, 13185, 14762, 11683, 12035, 11019, 14166, 14077, 14020, 11644, 15120, 14358, + 14303, 14054, 13814, 14085, 14433, 14924, 7508, 12804, 13082, 15037, 14382, 13824, 14141, 14965, 15018, 14805, + 13241, 15314, 12426, 12812, 11402, 13874, 14371, 13945, 12646, 13066, 14431, 12471, 14192, 15043, 14481, 13374, + 13724, 13390, 14736, 15123, 13363, 14629, 15275, 14969, 10954, 13267, 14439, 13717, 14342, 15214, 15220, 11286, + 12844, 14116, 14493, 14547, 15153, 13579, 14720, 13882, 12625, 14493, 13597, 13518, 15227, 13386, 15123, 13319, + 14646, 14179, 14854, 12661, 14717, 14604, 13865, 14821, 14046, 14450, 14454, 14435, 14488, 15296, 13861, 11712, + 15072, 14713, 15042, 15019, 12453, 14473, 14427, 14948, 13949, 12742, 15035, 14230, 12257, 14374, 13436, 13685, + 8742, 14392, 15094, 14419, 14109, 14716, 14001, 13391, 12016, 15019, 14514, 13228, 15264, 5911, 14345, 14742, + 14180, 13361, 13305, 14921, 12215, 15271, 14158, 15173, 14972, 13553, 14543, 14479, 14879, 12300, 14394, 13716, + 12175, 14293, 13887, 11665, 12544, 14997, 11940, 14958, 14235, 14904, 12616, 14051, 14255, 14303, 11361, 14907, + 14982, 12829, 13719, 14598, 15339, 14404, 15070, 9564, 8693, 13816, 12503, 15206, 15067, 13340, 13840, 14396, + 14558, 12140, 14024, 14707, 12738, 14186, 15166, 12319, 14420, 15132, 14999, 14469, 11292, 14494, 7713, 11900, + 12380, 12662, 14151, 15109, 14511, 13948, 14644, 14982, 10275, 11992, 14951, 15135, 14213, 13261, 14638, 15003, + 14442, 14744, 14623, 14816, 14980, 12125, 14049, 13449, 15244, 15160, 10874, 13958, 14920, 14240, 14429, 15171, + 11083, 12478, 14754, 14413, 14556, 14448, 15025, 14916, 15101, 13027, 14355, 14536, 15138, 13755, 13871, 15090, + 14810, 14489, 12706, 14803, 15039, 12318, 14748, 13541, 14919, 14525, 13931, 12917, 14527, 14722, 15318, 13710, + 14515, 14425, 14927, 13478, 15273, 14675, 14661, 14662, 13580, 13974, 14781, 13359, 14657, 14964, 14883, 13659, + 13859, 14825, 13462, 13698, 14075, 14697, 15108, 13001, 14755, 14314, 13783, 14464, 14843, 14738, 15206, 12934, + 14749, 15213, 11952, 14668, 14855, 8944, 11869, 12500, 13848, 11815, 14956, 11471, 15163, 15200, 14395, 14452, + 11503, 14351, 11995, 14050, 14968, 13735, 13872, 14385, 14387, 14889, 15081, 14707, 15110, 10144, 15129, 15231, + 13996, 15216, 13139, 10725, 11574, 14822, 15159, 15357, 14877, 14538, 13354, 15085, 14920, 12059, 13570, 12376, + 14789, 15212, 13753, 15034, 13843, 15037, 12859, 12638, 14710, 13662, 13726, 9615, 15261, 15177, 11392, 10458, + 13653, 13429, 13517, 14479, 13560, 14365, 15096, 15342, 13201, 15267, 14921, 14941, 12336, 13518, 13106, 14163, + 13592, 14363, 12398, 15022, 13724, 14634, 15063, 15175, 14387, 15317, 15226, 13123, 14850, 13542, 14326, 15128, + 15287, 14752, 10081, 14339, 13502, 14970, 13287, 15340, 12164, 14692, 15305, 14500, 12727, 11510, 15195, 13364, + 14611, 13803, 12717, 14959, 15275, 15057, 14542, 15011, 14421, 8384, 14791, 10120, 14413, 15181, 14515, 15208, + 14703, 14344, 15256, 13365, 14674, 15063, 12784, 13066, 14332, 15163, 13305, 14630, 14804, 11654, 14938, 9948, + 11550, 14623, 14602, 10777, 14783, 15023, 13052, 13191, 13770, 12561, 13080, 10421, 15195, 13414, 13446, 13544, + 15150, 13876, 11503, 15040, 12301, 15042, 13706, 14725, 14539, 14903, 14387, 14487, 12502, 14379, 15351, 15255, + 14721, 14433, 14628, 12536, 13147, 14598, 12542, 13965, 13412, 11961, 13058, 13672, 15170, 12108, 14763, 15073, + 15190, 11864, 14731, 14812, 14967, 14413, 14896, 14144, 14737, 14937, 13610, 15188, 15036, 13546, 14382, 15172, + 14739, 13437, 14477, 13936, 11419, 11294, 13985, 10810, 15051, 9326, 14813, 15274, 14345, 15288, 14387, 9530, + 10447, 13507, 14813, 14197, 15071, 14370, 14585, 13894, 13091, 14771, 14143, 13157, 14042, 14065, 15327, 15318, + 15165, 15255, 14692, 15104, 14804, 15102, 7659, 11872, 15344, 15345, 14644, 14375, 14971, 12670, 15347, 13822, + 8129, 15096, 14676, 14915, 11293, 14713, 15094, 14478, 14662, 11005, 15260, 12058, 15272, 13385, 14432, 14454, + 14407, 6760, 14243, 14979, 14566, 11670, 14075, 13999, 13834, 14590, 14797, 14868, 13892, 13973, 13508, 15169, + 15348, 13914, 12198, 13964, 14285, 13776, 14390, 14589, 14465, 15265, 10331, 14090, 15164, 12596, 15037, 13317, + 14871, 14350, 15093, 8012, 14103, 13038, 14182, 10680, 13068, 14549, 15102, 14993, 15285, 14374, 14761, 15155, + 12551, 13347, 12727, 11338, 13735, 12886, 15354, 15246, 15002, 13138, 15280, 14958, 15306, 13832, 13645, 15167, + 15344, 14398, 7559, 14475, 14375, 14145, 14148, 13483, 12164, 15342, 13859, 14550, 14733, 14645, 14354, 14439, + 12871, 12992, 13727, 14921, 13769, 14226, 14161, 14705, 14915, 15244, 15310, 15261, 14453, 15133, 13759, 14490, + 14097, 15000, 14606, 14559, 14695, 15103, 8965, 13769, 13629, 11998, 14812, 15230, 12441, 15061, 4383, 15314, + 14271, 13693, 15000, 13037, 15343, 13331, 15124, 14996, 14774, 12328, 13554, 14190, 13403, 14666, 10321, 13162, + 13654, 14850, 12558, 14213, 15190, 15314, 14644, 11605, 15164, 14341, 12493, 14607, 14431, 13700, 15098, 14230, + 11433, 14138, 15157, 14574, 13867, 14158, 11294, 13929, 15171, 14797, 15080, 13767, 15186, 12933, 12542, 13945, + 15139, 11535, 13863, 15140, 13657, 14668, 14583, 15274, 15191, 15350, 14667, 13334, 12648, 14441, 14355, 14197, + 15009, 15226, 13425, 15009, 13384, 14392, 14513, 14169, 14424, 14654, 14925, 14810, 14740, 14365, 14450, 14457, + 13576, 14572, 13664, 14657, 14275, 14176, 7897, 14221, 15085, 15234, 15139, 14914, 12808, 14482, 14543, 15276, + 14958, 14674, 15108, 14837, 14844, 13477, 14415, 13963, 14643, 14830, 14833, 14981, 13749, 12283, 15016, 14267, + 12316, 11873, 9968, 12673, 11371, 12975, 13444, 14462, 14993, 15241, 15296, 13388, 14972, 12767, 11894, 12362, + 14531, 14685, 14667, 14456, 14945, 15338, 14919, 15104, 11567, 14678, 14118, 15290, 15342, 14081, 13721, 14660, + 14808, 15328, 15195, 14050, 12547, 15165, 12305, 15107, 14848, 14397, 13740, 14528, 15214, 13723, 13248, 14051, + 15009, 12445, 15199, 14073, 13814, 13749, 14462, 10582, 13020, 12293, 13642, 14805, 14985, 15359, 13454, 12713, + 14582, 13494, 12873, 13325, 14233, 14997, 13559, 14385, 15261, 13654, 14935, 13147, 14588, 15178, 15073, 13592, + 13805, 13321, 10878, 12317, 14362, 13335, 15309, 14007, 15327, 15258, 15189, 13570, 8642, 11677, 14902, 12467, + 13573, 15166, 15126, 14785, 14972, 14077, 14350, 15220, 14455, 15081, 14602, 14722, 13985, 12995, 14256, 10500, + 12373, 14661, 14387, 14126, 13952, 15221, 12035, 13062, 15056, 10400, 12173, 14703, 14868, 11318, 15124, 14719, + 14005, 12861, 12378, 15357, 12274, 13801, 14742, 14413, 13330, 11364, 14935, 15008, 14929, 14799, 13180, 14211, + 14732, 13354, 12127, 15058, 14753, 15253, 14545, 14729, 11593, 13159, 13938, 14709, 14815, 14599, 14394, 14043, + 14428, 15343, 13890, 14939, 14409, 13601, 13637, 13401, 13588, 11459, 14356, 14363, 12697, 14473, 11041, 12083, + 14578, 14620, 12365, 15157, 12668, 12980, 14444, 9338, 13499, 11424, 13907, 12215, 14274, 14806, 11185, 14403, + 14379, 11870, 11130, 14600, 15187, 10576, 13935, 14998, 13430, 13644, 11940, 13555, 11610, 12834, 13531, 14526, + 14613, 14350, 15342, 15002, 13879, 13039, 15265, 13916, 15171, 14241, 14839, 13354, 13871, 13982, 14845, 14074, + 13973, 14188, 12834, 15254, 14759, 14886, 12184, 13463, 15328, 15078, 12773, 14911, 9378, 13856, 13974, 14897, + 15325, 12585, 14669, 13537, 13549, 13447, 11770, 14526, 14623, 14633, 14280, 12942, 12743, 15341, 14542, 14565, + 14293, 14725, 13886, 14418, 14429, 14066, 11685, 14340, 13221, 12391, 12827, 14100, 14736, 13369, 12553, 12458, + 13813, 14903, 10956, 14840, 14528, 14866, 12247, 14756, 12528, 10612, 15152, 14687, 15062, 11499, 10559, 14533, + 11819, 11801, 13560, 14118, 11961, 14980, 14604, 14019, 14042, 13592, 13874, 14920, 12898, 14769, 13452, 11904, + 12854, 15315, 14829, 14352, 15340, 14808, 14762, 13414, 12691, 13821, 14363, 14160, 14156, 12925, 13580, 13318, + 15117, 15103, 12301, 14759, 14993, 14990, 13691, 13696, 15050, 12576, 14052, 15172, 12419, 15162, 13023, 11692, + 14416, 15332, 13291, 14982, 14581, 15007, 14343, 13656, 14152, 14350, 13047, 11817, 14861, 15270, 14885, 13693, + 11194, 15192, 15030, 14618, 11760, 12171, 15022, 11573, 7682, 15356, 12197, 15317, 14336, 14229, 14625, 15005, + 14845, 15345, 15057, 13541, 14661, 10061, 11229, 12241, 13966, 14077, 14446, 14715, 13704, 14364, 14438, 14369, + 14939, 15000, 14606, 14821, 14643, 14870, 14557, 10013, 13981, 14846, 11779, 14296, 12400, 14613, 13735, 13223, + 10159, 13441, 13166, 14737, 13560, 14610, 13522, 13053, 12788, 14560, 13961, 13578, 14517, 11437, 14833, 15172, + 13084, 14388, 15144, 15199, 14833, 14247, 15209, 14407, 12044, 11546, 11313, 15282, 13889, 13827, 14823, 12228, + 15052, 14702, 12407, 14833, 13993, 15036, 13000, 15004, 15173, 13738, 13292, 14513, 13927, 14439, 14649, 13386, + 15323, 14829, 13975, 11515, 14461, 14609, 13712, 14285, 14503, 12324, 13838, 14695, 14439, 10133, 14155, 15182, + 15197, 10832, 15326, 13584, 15202, 11215, 13608, 12724, 12442, 12268, 12073, 14128, 14580, 15131, 14517, 15088, + 11688, 11848, 13609, 10473, 14956, 15355, 14389, 12050, 12494, 14052, 14378, 14362, 14251, 12383, 14492, 15155, + 14291, 12293, 15246, 14516, 14429, 13963, 13085, 14747, 14527, 15176, 10933, 15351, 14568, 10676, 12561, 14433, + 15290, 14990, 15042, 12359, 14858, 15069, 14464, 14920, 13730, 14655, 13404, 10892, 15254, 12645, 14153, 14259, + 11339, 15082, 14797, 14057, 15327, 13193, 14349, 15073, 15310, 12614, 12083, 14684, 12946, 12985, 13767, 13962, + 7621, 14450, 13810, 14651, 14635, 12388, 14639, 10285, 15313, 14412, 14693, 14503, 11686, 13460, 15101, 11794, + 15113, 14796, 14036, 11194, 13324, 15161, 14584, 14717, 12430, 13528, 14752, 14423, 12906, 14282, 14881, 14469, + 15150, 13784, 14894, 13681, 11731, 11757, 14737, 13657, 15059, 13244, 14905, 14668, 15233, 15317, 13750, 11634, + 14331, 13954, 13630, 14152, 13736, 13610, 13127, 14601, 14100, 14528, 12819, 14499, 12019, 14618, 10255, 15263, + 14753, 12419, 14105, 12307, 13136, 13110, 14206, 12880, 15017, 15103, 12629, 12103, 13277, 15287, 14644, 13911, + 13396, 14376, 15181, 14926, 8432, 15245, 14944, 14688, 13861, 14204, 11852, 13418, 11571, 15066, 12337, 15160, + 14669, 14345, 14754, 14863, 12039, 12249, 13699, 14725, 14594, 14842, 14266, 14095, 14345, 14130, 14892, 13235, + 14350, 12856, 14379, 11673, 12885, 15014, 10246, 15169, 14306, 14934, 14912, 14561, 14374, 13493, 14995, 14254, + 14431, 15115, 12483, 14415, 9738, 12940, 14670, 11053, 9261, 10574, 13007, 15172, 15172, 14623, 12768, 14293, + 14172, 13875, 15334, 14580, 13760, 14893, 13195, 14062, 15039, 13823, 14778, 14327, 13893, 12682, 2232, 15198, + }; + + TransResult result; + EXPECT_EQ(TransposeWithShapeCheck(reinterpret_cast(data), std::vector({8, 3, 16, 16}), + std::vector({16, 16, 3, 8}), DT_FLOAT16, + std::vector({2, 3, 1, 0}), result), + SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, hwcn_to_nchw1) { + uint16_t data[8 * 8 * 1 * 1] = { + 12208, 14583, 13592, 12554, 13844, 14952, 13550, 13912, 12895, 14609, 13628, 14807, 13682, 8931, 14783, 15331, + 13308, 14319, 13440, 14485, 15059, 9591, 14072, 12425, 11684, 13490, 12707, 15081, 14622, 15357, 14149, 14457, + 15011, 15043, 13933, 14880, 12999, 14028, 14471, 15004, 9253, 14649, 12929, 14265, 10738, 8334, 11893, 13852, + 14621, 15000, 13395, 15062, 14872, 14369, 15272, 14260, 12609, 15326, 14455, 13991, 14012, 14669, 15110, 12708, + }; + uint16_t ret[1 * 1 * 8 * 8] = { + 12208, 14583, 13592, 12554, 13844, 14952, 13550, 13912, 12895, 14609, 13628, 14807, 13682, 8931, 14783, 15331, + 13308, 14319, 13440, 14485, 15059, 9591, 14072, 12425, 11684, 13490, 12707, 15081, 14622, 15357, 14149, 14457, + 15011, 15043, 13933, 14880, 12999, 14028, 14471, 15004, 9253, 14649, 12929, 14265, 10738, 8334, 11893, 13852, + 14621, 15000, 13395, 15062, 14872, 14369, 15272, 14260, 12609, 15326, 14455, 13991, 14012, 14669, 15110, 12708, + }; + + TransResult result; + EXPECT_EQ(TransposeWithShapeCheck(reinterpret_cast(data), std::vector({8, 8, 1, 1}), + std::vector({1, 1, 8, 8}), DT_FLOAT16, std::vector({3, 2, 0, 1}), + result), + SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, hwcn_to_nchw2) { + uint16_t data[4 * 4 * 3 * 16] = { + 12206, 15252, 13115, 14551, 14648, 14331, 14547, 14493, 15331, 14787, 13916, 13683, 13639, 14658, 13168, 13867, + 15344, 14769, 11735, 14827, 11799, 14241, 13577, 13575, 13692, 14962, 12658, 13574, 14902, 14145, 7260, 12731, + 11408, 15249, 15005, 14593, 14345, 11556, 14944, 12381, 14931, 12928, 13439, 15081, 14916, 12555, 14132, 15127, + 14518, 15027, 11342, 13075, 15300, 14569, 13861, 15295, 13685, 14481, 13679, 14997, 15217, 12472, 13102, 13861, + 14582, 12594, 15219, 13730, 14243, 14966, 15296, 11656, 12353, 15221, 13763, 11059, 14747, 14431, 15281, 15235, + 13495, 14435, 13960, 14883, 12593, 12703, 12977, 12798, 14912, 13359, 15310, 11484, 9325, 14662, 15035, 15149, + 15212, 13904, 13065, 14156, 14951, 12352, 14899, 13666, 10334, 12081, 14557, 14707, 15348, 10316, 13515, 10960, + 14978, 14715, 6436, 14506, 14127, 15167, 14712, 15016, 12241, 14464, 14839, 14696, 14961, 14089, 14863, 14278, + 7571, 14403, 14152, 14773, 14591, 14633, 14603, 14616, 12746, 15220, 13835, 14451, 14613, 14520, 15271, 14043, + 14874, 14337, 10594, 13156, 14346, 14762, 13793, 14521, 11469, 11686, 14168, 14274, 14848, 13413, 14627, 15323, + 10132, 10983, 11165, 14733, 13978, 14598, 15114, 14608, 14132, 13145, 13837, 11092, 10036, 15051, 13517, 14497, + 15057, 14136, 15166, 15132, 14350, 14478, 14065, 14028, 15070, 13898, 15257, 12292, 13666, 13896, 14723, 15254, + 12933, 13444, 15216, 15000, 11997, 14840, 15167, 14240, 11907, 12932, 9154, 14593, 15147, 14779, 14203, 12811, + 13948, 13791, 10662, 14352, 12429, 13710, 14748, 14300, 14831, 13140, 13839, 13813, 11961, 15318, 14536, 8683, + 14951, 13481, 14009, 11298, 14538, 15313, 14625, 13450, 14782, 13429, 12462, 13913, 13976, 13812, 15037, 14354, + 13690, 13927, 14747, 14392, 14526, 15059, 13637, 15164, 13527, 13434, 15306, 12664, 14668, 15072, 14216, 13813, + 15217, 14980, 13342, 11405, 14030, 12222, 13681, 14135, 12413, 13891, 10703, 14576, 15105, 15005, 15026, 14929, + 10239, 14260, 14900, 14587, 13563, 14644, 14823, 13589, 13685, 15235, 14477, 14623, 14938, 13067, 14160, 14611, + 11279, 14070, 14572, 13528, 14678, 14340, 14645, 15018, 15221, 15250, 14497, 10306, 13701, 14622, 14397, 13593, + 13391, 14954, 15042, 14097, 13225, 13382, 14385, 14779, 13280, 14509, 14870, 15158, 13003, 14188, 12173, 15064, + 14106, 14853, 13813, 12196, 15086, 6765, 14511, 12297, 14482, 15344, 13705, 13905, 14782, 15071, 13085, 14698, + 12004, 14351, 10879, 15172, 14439, 12797, 14288, 14771, 13457, 14878, 10827, 15075, 15017, 13578, 14290, 14126, + 13426, 12512, 13487, 15008, 13848, 13495, 11581, 13424, 10513, 13823, 14494, 13993, 13684, 15115, 15088, 12791, + 15360, 15015, 8406, 14809, 12276, 12274, 13489, 14998, 10335, 14889, 14803, 13224, 13536, 12404, 14424, 11213, + 15157, 14739, 12099, 15027, 14989, 12158, 14607, 10455, 14856, 15038, 14580, 12491, 14339, 15192, 14607, 14462, + 13799, 13832, 14513, 13889, 14693, 11150, 15086, 13549, 14363, 14829, 14626, 14953, 11531, 13649, 10310, 12240, + 15212, 13940, 11392, 13551, 11780, 13611, 14171, 14478, 15107, 14519, 15184, 14398, 14267, 13960, 11746, 14939, + 14185, 15022, 15053, 13762, 14435, 15065, 13523, 9470, 13809, 15196, 15229, 13988, 13501, 14448, 15124, 12087, + 15300, 14205, 12480, 15345, 14788, 14223, 11611, 13596, 14903, 11919, 13534, 13161, 13832, 15353, 14978, 14903, + 14410, 14415, 13090, 14363, 6160, 10966, 15249, 15268, 14483, 13302, 13538, 13705, 15167, 13341, 13709, 14738, + 10866, 14805, 14677, 12874, 15177, 10810, 13384, 13555, 14705, 15268, 14834, 14844, 13639, 12453, 15012, 14508, + 14897, 13627, 14656, 14783, 14377, 15047, 15134, 14725, 15038, 14015, 14824, 13431, 15062, 15124, 11154, 13814, + 7605, 12835, 12239, 14365, 15076, 15133, 14621, 14480, 14101, 15265, 12292, 14628, 11876, 14320, 14225, 14177, + 14800, 14643, 13435, 13410, 10145, 15234, 11691, 12565, 14255, 14868, 13245, 14949, 14416, 14645, 15261, 14210, + 15060, 15184, 10396, 13901, 14392, 14835, 14241, 14281, 11744, 14559, 14236, 13846, 13497, 13032, 14583, 9447, + 14494, 15196, 13685, 14705, 12661, 14857, 14981, 15331, 15298, 15078, 13418, 12069, 13039, 14718, 14923, 12300, + 13110, 12442, 15076, 14083, 13887, 14665, 14378, 12215, 14512, 14963, 13900, 14863, 14382, 9322, 14497, 13468, + 14544, 12335, 14297, 13821, 14734, 9506, 13633, 14530, 14809, 12961, 14581, 14600, 14916, 15076, 10203, 14486, + 14752, 13722, 14469, 12696, 12560, 14403, 12743, 11937, 13255, 14475, 15212, 14673, 14454, 14550, 13758, 11741, + 10383, 13722, 14579, 15032, 12904, 14360, 14926, 15141, 13955, 13031, 13524, 15201, 13712, 12978, 13662, 12256, + 12998, 12263, 15114, 11278, 11997, 14528, 13760, 14618, 6477, 13601, 14591, 12414, 11659, 14744, 13576, 13765, + 14081, 15269, 15016, 13540, 14590, 13578, 13526, 9811, 14859, 14122, 15222, 14965, 12828, 15267, 12766, 14260, + 14634, 13610, 15201, 14891, 15051, 13857, 14211, 13871, 13557, 12209, 14773, 14605, 15329, 15074, 13888, 15033, + 13706, 15241, 15281, 14806, 14353, 14217, 15132, 14041, 14921, 14763, 13738, 14526, 13445, 14405, 14971, 14432, + 11517, 14883, 15354, 14434, 13586, 14360, 14611, 14971, 15327, 14782, 14440, 10158, 12160, 13486, 13841, 12795, + 15006, 9795, 13712, 15263, 14896, 13801, 13765, 12379, 12541, 12256, 11983, 14998, 13342, 14099, 12299, 15326, + 14029, 9905, 15106, 12900, 13846, 13807, 14898, 7140, 14001, 13386, 5532, 13747, 10703, 14840, 12719, 13926, + 14440, 14937, 14260, 14670, 9617, 14905, 14194, 15101, 14392, 14461, 13383, 14808, 14814, 12682, 14588, 12954, + }; + uint16_t ret[16 * 3 * 4 * 4] = { + 12206, 14518, 15212, 14874, 12933, 13690, 11279, 12004, 15157, 14185, 10866, 14800, 13110, 10383, 14634, 15006, + 15344, 14582, 14978, 10132, 13948, 15217, 13391, 13426, 13799, 15300, 14897, 15060, 14544, 12998, 13706, 14029, + 11408, 13495, 7571, 15057, 14951, 10239, 14106, 15360, 15212, 14410, 7605, 14494, 14752, 14081, 11517, 14440, + 15252, 15027, 13904, 14337, 13444, 13927, 14070, 14351, 14739, 15022, 14805, 14643, 12442, 13722, 13610, 9795, + 14769, 12594, 14715, 10983, 13791, 14980, 14954, 12512, 13832, 14205, 13627, 15184, 12335, 12263, 15241, 9905, + 15249, 14435, 14403, 14136, 13481, 14260, 14853, 15015, 13940, 14415, 12835, 15196, 13722, 15269, 14883, 14937, + 13115, 11342, 13065, 10594, 15216, 14747, 14572, 10879, 12099, 15053, 14677, 13435, 15076, 14579, 15201, 13712, + 11735, 15219, 6436, 11165, 10662, 13342, 15042, 13487, 14513, 12480, 14656, 10396, 14297, 15114, 15281, 15106, + 15005, 13960, 14152, 15166, 14009, 14900, 13813, 8406, 11392, 13090, 12239, 13685, 14469, 15016, 15354, 14260, + 14551, 13075, 14156, 13156, 15000, 14392, 13528, 15172, 15027, 13762, 12874, 13410, 14083, 15032, 14891, 15263, + 14827, 13730, 14506, 14733, 14352, 11405, 14097, 15008, 13889, 15345, 14783, 13901, 13821, 11278, 14806, 12900, + 14593, 14883, 14773, 15132, 11298, 14587, 12196, 14809, 13551, 14363, 14365, 14705, 12696, 13540, 14434, 14670, + 14648, 15300, 14951, 14346, 11997, 14526, 14678, 14439, 14989, 14435, 15177, 10145, 13887, 12904, 15051, 14896, + 11799, 14243, 14127, 13978, 12429, 14030, 13225, 13848, 14693, 14788, 14377, 14392, 14734, 11997, 14353, 13846, + 14345, 12593, 14591, 14350, 14538, 13563, 15086, 12276, 11780, 6160, 15076, 12661, 12560, 14590, 13586, 9617, + 14331, 14569, 12352, 14762, 14840, 15059, 14340, 12797, 12158, 15065, 10810, 15234, 14665, 14360, 13857, 13801, + 14241, 14966, 15167, 14598, 13710, 12222, 13382, 13495, 11150, 14223, 15047, 14835, 9506, 14528, 14217, 13807, + 11556, 12703, 14633, 14478, 15313, 14644, 6765, 12274, 13611, 10966, 15133, 14857, 14403, 13578, 14360, 14905, + 14547, 13861, 14899, 13793, 15167, 13637, 14645, 14288, 14607, 13523, 13384, 11691, 14378, 14926, 14211, 13765, + 13577, 15296, 14712, 15114, 14748, 13681, 14385, 11581, 15086, 11611, 15134, 14241, 13633, 13760, 15132, 14898, + 14944, 12977, 14603, 14065, 14625, 14823, 14511, 13489, 14171, 15249, 14621, 14981, 12743, 13526, 14611, 14194, + 14493, 15295, 13666, 14521, 14240, 15164, 15018, 14771, 10455, 9470, 13555, 12565, 12215, 15141, 13871, 12379, + 13575, 11656, 15016, 14608, 14300, 14135, 14779, 13424, 13549, 13596, 14725, 14281, 14530, 14618, 14041, 7140, + 12381, 12798, 14616, 14028, 13450, 13589, 12297, 14998, 14478, 15268, 14480, 15331, 11937, 9811, 14971, 15101, + 15331, 13685, 10334, 11469, 11907, 13527, 15221, 13457, 14856, 13809, 14705, 14255, 14512, 13955, 13557, 12541, + 13692, 12353, 12241, 14132, 14831, 12413, 13280, 10513, 14363, 14903, 15038, 11744, 14809, 6477, 14921, 14001, + 14931, 14912, 12746, 15070, 14782, 13685, 14482, 10335, 15107, 14483, 14101, 15298, 13255, 14859, 15327, 14392, + 14787, 14481, 12081, 11686, 12932, 13434, 15250, 14878, 15038, 15196, 15268, 14868, 14963, 13031, 12209, 12256, + 14962, 15221, 14464, 13145, 13140, 13891, 14509, 13823, 14829, 11919, 14015, 14559, 12961, 13601, 14763, 13386, + 12928, 13359, 15220, 13898, 13429, 15235, 15344, 14889, 14519, 13302, 15265, 15078, 14475, 14122, 14782, 14461, + 13916, 13679, 14557, 14168, 9154, 15306, 14497, 10827, 14580, 15229, 14834, 13245, 13900, 13524, 14773, 11983, + 12658, 13763, 14839, 13837, 13839, 10703, 14870, 14494, 14626, 13534, 14824, 14236, 14581, 14591, 13738, 5532, + 13439, 15310, 13835, 15257, 12462, 14477, 13705, 14803, 15184, 13538, 12292, 13418, 15212, 15222, 14440, 13383, + 13683, 14997, 14707, 14274, 14593, 12664, 10306, 15075, 12491, 13988, 14844, 14949, 14863, 15201, 14605, 14998, + 13574, 11059, 14696, 11092, 13813, 14576, 15158, 13993, 14953, 13161, 13431, 13846, 14600, 12414, 14526, 13747, + 15081, 11484, 14451, 12292, 13913, 14623, 13905, 13224, 14398, 13705, 14628, 12069, 14673, 14965, 10158, 14808, + 13639, 15217, 15348, 14848, 15147, 14668, 13701, 15017, 14339, 13501, 13639, 14416, 14382, 13712, 15329, 13342, + 14902, 14747, 14961, 10036, 11961, 15105, 13003, 13684, 11531, 13832, 15062, 13497, 14916, 11659, 13445, 10703, + 14916, 9325, 14613, 13666, 13976, 14938, 14782, 13536, 14267, 15167, 11876, 13039, 14454, 12828, 12160, 14814, + 14658, 12472, 10316, 13413, 14779, 15072, 14622, 13578, 15192, 14448, 12453, 14645, 9322, 12978, 15074, 14099, + 14145, 14431, 14089, 15051, 15318, 15005, 14188, 15115, 13649, 15353, 15124, 13032, 15076, 14744, 14405, 14840, + 12555, 14662, 14520, 13896, 13812, 13067, 15071, 12404, 13960, 13341, 14320, 14718, 14550, 15267, 13486, 12682, + 13168, 13102, 13515, 14627, 14203, 14216, 14397, 14290, 14607, 15124, 15012, 15261, 14497, 13662, 13888, 12299, + 7260, 15281, 14863, 13517, 14536, 15026, 12173, 15088, 10310, 14978, 11154, 14583, 10203, 13576, 14971, 12719, + 14132, 15035, 15271, 14723, 15037, 14160, 13085, 14424, 11746, 13709, 14225, 14923, 13758, 12766, 13841, 14588, + 13867, 13861, 10960, 15323, 12811, 13813, 13593, 14126, 14462, 12087, 14508, 14210, 13468, 12256, 15033, 15326, + 12731, 15235, 14278, 14497, 8683, 14929, 15064, 12791, 12240, 14903, 13814, 9447, 14486, 13765, 14432, 13926, + 15127, 15149, 14043, 15254, 14354, 14611, 14698, 11213, 14939, 14738, 14177, 12300, 11741, 14260, 12795, 12954, + }; + + TransResult result; + EXPECT_EQ(TransposeWithShapeCheck(reinterpret_cast(data), std::vector({4, 4, 3, 16}), + std::vector({16, 3, 4, 4}), DT_FLOAT16, std::vector({3, 2, 0, 1}), + result), + SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, nchw_to_nwhc1) { + uint16_t data[1 * 3 * 16 * 16] = { + 15138, 15285, 15305, 14188, 14917, 14539, 13026, 15186, 14901, 14198, 15327, 12601, 12723, 12843, 13527, 5636, + 14323, 13311, 13260, 11315, 13447, 10891, 11350, 14395, 13040, 14110, 13739, 15303, 13228, 14478, 13412, 13350, + 12715, 11890, 14308, 12381, 13344, 12792, 13412, 13072, 13564, 14773, 11703, 14846, 14889, 14628, 10420, 15130, + 14524, 14282, 14103, 11586, 14670, 14065, 15015, 15314, 13847, 14456, 14438, 14569, 10655, 14982, 15302, 14388, + 13753, 14070, 15263, 13548, 15159, 14710, 15006, 13474, 13868, 14463, 12932, 14461, 15219, 15322, 13726, 14618, + 14644, 14765, 15153, 15289, 14789, 14931, 14729, 8423, 15072, 14658, 13417, 14713, 12172, 15191, 14796, 11733, + 11645, 14482, 15039, 13645, 14881, 13766, 13106, 14162, 14938, 9462, 14857, 15231, 14844, 14331, 14264, 14431, + 14910, 12552, 14977, 14634, 12400, 13414, 13240, 14303, 14162, 13199, 13692, 14083, 15018, 14261, 14550, 14533, + 14562, 15003, 15357, 14524, 14448, 10108, 13554, 14737, 15118, 14498, 13551, 13798, 10768, 14727, 14935, 9055, + 14652, 14040, 14486, 14690, 14505, 13849, 13645, 12774, 14157, 13842, 11143, 14098, 14237, 15297, 12973, 11624, + 11638, 14438, 15311, 14540, 13562, 12432, 13749, 13942, 15214, 10796, 14266, 13503, 15133, 14871, 14906, 14956, + 13467, 15002, 15153, 14589, 12496, 14271, 13305, 14049, 13902, 13873, 14607, 15283, 12517, 9796, 15036, 13485, + 14708, 14903, 15090, 14669, 14003, 12108, 13723, 14447, 12820, 11243, 15183, 14337, 14389, 14789, 13653, 14352, + 14970, 13438, 14715, 14878, 14552, 15008, 15178, 14564, 15046, 14876, 15234, 14751, 15163, 15061, 13584, 15141, + 14528, 14429, 15071, 14646, 14826, 11412, 11456, 12875, 11324, 13219, 12527, 14367, 14979, 14001, 14876, 15043, + 15184, 13589, 14900, 13338, 15193, 13246, 15165, 15225, 14651, 11430, 12811, 14703, 14388, 4920, 11728, 12464, + 14827, 15161, 15265, 11542, 15203, 14118, 12868, 14101, 15081, 14444, 13457, 15292, 14643, 12311, 15122, 14496, + 13753, 10580, 9081, 14957, 12218, 14076, 14672, 14374, 14267, 13916, 13408, 13744, 13593, 14391, 7869, 14992, + 14724, 14638, 12330, 14376, 13012, 12311, 12541, 13213, 14633, 13677, 14809, 14919, 15044, 15272, 9451, 15025, + 14030, 9949, 13823, 14900, 14840, 15274, 14211, 14907, 14618, 15290, 11872, 9312, 14709, 14956, 15154, 14851, + 13245, 13366, 15084, 7584, 13450, 15090, 14693, 14654, 14966, 8900, 13422, 11507, 13915, 10770, 11675, 6726, + 13729, 14998, 15296, 15351, 15097, 14974, 9528, 11730, 14167, 13765, 14614, 14994, 14907, 12989, 13613, 14979, + 12930, 13858, 15303, 13404, 9581, 14992, 15357, 13692, 14429, 11887, 12971, 13835, 13819, 14731, 12948, 15063, + 14112, 11764, 13620, 12351, 10466, 12549, 8672, 13141, 11323, 14581, 14933, 14758, 15206, 14021, 14883, 13857, + 14826, 14588, 15041, 13433, 12975, 14468, 13352, 14764, 14713, 15350, 12713, 14057, 14346, 13777, 13015, 11552, + 13561, 13992, 12382, 14644, 14435, 13880, 15333, 13984, 13788, 14346, 11520, 13742, 13489, 14098, 15274, 12027, + 12995, 13948, 13876, 14489, 14379, 13277, 14114, 10457, 13874, 15243, 14798, 11909, 13930, 13134, 8864, 12406, + 15070, 12818, 12502, 12561, 14452, 11308, 13577, 14513, 14202, 14455, 15316, 12912, 12803, 14019, 12189, 14691, + 14648, 14457, 12294, 14927, 10971, 14063, 15217, 12306, 14971, 15293, 13632, 10881, 14851, 14571, 15251, 14949, + 13783, 15295, 14708, 14861, 14110, 14351, 14333, 13210, 15261, 14314, 12303, 11509, 14255, 15288, 13999, 15208, + 13459, 14501, 15233, 10148, 13325, 14074, 13976, 15120, 15118, 14316, 12014, 14093, 13702, 15084, 14158, 15210, + 14780, 11558, 10534, 9818, 13670, 14125, 14616, 14560, 13503, 15093, 14205, 14968, 14380, 15076, 14417, 12445, + 12431, 14715, 8483, 13720, 14171, 15358, 13989, 13519, 14839, 12038, 11355, 9632, 10150, 13373, 10819, 15203, + 9960, 14945, 12132, 13744, 10536, 14865, 14718, 15239, 14284, 15331, 14372, 14648, 10139, 13853, 14461, 11171, + 9982, 14388, 9448, 14142, 9383, 14864, 13758, 14933, 8104, 14475, 13137, 14221, 15080, 10739, 12488, 11478, + 14895, 14847, 14162, 15263, 14936, 15048, 14861, 13531, 13583, 15197, 13411, 15098, 13613, 14513, 14270, 13267, + 12768, 13394, 14788, 13782, 13516, 13881, 12439, 14436, 14447, 14864, 15183, 14718, 13652, 13242, 15342, 14715, + 13339, 13949, 14665, 13904, 12527, 11550, 14727, 11488, 15340, 15349, 14345, 12392, 14643, 13791, 14593, 12762, + 14572, 13616, 12307, 13741, 12268, 15279, 14870, 14339, 13547, 14763, 13540, 15158, 14977, 13205, 11910, 13228, + 11630, 14865, 12982, 14472, 14709, 14615, 14520, 14774, 15194, 14246, 14191, 14683, 15115, 13045, 9622, 13950, + 14857, 14621, 15343, 12632, 14779, 12501, 14581, 14525, 15111, 14843, 14338, 13195, 9861, 12156, 13970, 15242, + 14546, 15343, 14627, 14330, 14915, 15331, 15332, 13649, 14953, 12655, 13963, 13207, 11045, 14401, 14848, 12672, + 13035, 13377, 14387, 14437, 13794, 14952, 14409, 14466, 8825, 14561, 14439, 11931, 14741, 13847, 14369, 14019, + 14731, 14888, 13990, 14955, 14382, 13361, 9983, 14355, 15058, 14606, 15012, 14740, 13073, 14253, 14626, 14394, + 15294, 12535, 14181, 14740, 14612, 15082, 13503, 12274, 15341, 13605, 10329, 15089, 13168, 14830, 14055, 15000, + 14952, 12984, 14684, 12872, 14472, 14946, 13419, 13916, 13005, 14075, 12791, 14654, 15327, 13627, 14549, 14920, + 15259, 12502, 13364, 14853, 15049, 14448, 14724, 12881, 14794, 14494, 13425, 15086, 14338, 14190, 14361, 12458, + 14420, 10571, 14813, 13330, 14491, 15131, 14097, 15183, 15062, 14564, 14374, 15320, 15242, 14943, 13706, 14496, + }; + uint16_t ret[1 * 16 * 16 * 3] = { + 15138, 14827, 12431, 15285, 15161, 14715, 15305, 15265, 8483, 14188, 11542, 13720, 14917, 15203, 14171, 14539, + 14118, 15358, 13026, 12868, 13989, 15186, 14101, 13519, 14901, 15081, 14839, 14198, 14444, 12038, 15327, 13457, + 11355, 12601, 15292, 9632, 12723, 14643, 10150, 12843, 12311, 13373, 13527, 15122, 10819, 5636, 14496, 15203, + 14323, 13753, 9960, 13311, 10580, 14945, 13260, 9081, 12132, 11315, 14957, 13744, 13447, 12218, 10536, 10891, + 14076, 14865, 11350, 14672, 14718, 14395, 14374, 15239, 13040, 14267, 14284, 14110, 13916, 15331, 13739, 13408, + 14372, 15303, 13744, 14648, 13228, 13593, 10139, 14478, 14391, 13853, 13412, 7869, 14461, 13350, 14992, 11171, + 12715, 14724, 9982, 11890, 14638, 14388, 14308, 12330, 9448, 12381, 14376, 14142, 13344, 13012, 9383, 12792, + 12311, 14864, 13412, 12541, 13758, 13072, 13213, 14933, 13564, 14633, 8104, 14773, 13677, 14475, 11703, 14809, + 13137, 14846, 14919, 14221, 14889, 15044, 15080, 14628, 15272, 10739, 10420, 9451, 12488, 15130, 15025, 11478, + 14524, 14030, 14895, 14282, 9949, 14847, 14103, 13823, 14162, 11586, 14900, 15263, 14670, 14840, 14936, 14065, + 15274, 15048, 15015, 14211, 14861, 15314, 14907, 13531, 13847, 14618, 13583, 14456, 15290, 15197, 14438, 11872, + 13411, 14569, 9312, 15098, 10655, 14709, 13613, 14982, 14956, 14513, 15302, 15154, 14270, 14388, 14851, 13267, + 13753, 13245, 12768, 14070, 13366, 13394, 15263, 15084, 14788, 13548, 7584, 13782, 15159, 13450, 13516, 14710, + 15090, 13881, 15006, 14693, 12439, 13474, 14654, 14436, 13868, 14966, 14447, 14463, 8900, 14864, 12932, 13422, + 15183, 14461, 11507, 14718, 15219, 13915, 13652, 15322, 10770, 13242, 13726, 11675, 15342, 14618, 6726, 14715, + 14644, 13729, 13339, 14765, 14998, 13949, 15153, 15296, 14665, 15289, 15351, 13904, 14789, 15097, 12527, 14931, + 14974, 11550, 14729, 9528, 14727, 8423, 11730, 11488, 15072, 14167, 15340, 14658, 13765, 15349, 13417, 14614, + 14345, 14713, 14994, 12392, 12172, 14907, 14643, 15191, 12989, 13791, 14796, 13613, 14593, 11733, 14979, 12762, + 11645, 12930, 14572, 14482, 13858, 13616, 15039, 15303, 12307, 13645, 13404, 13741, 14881, 9581, 12268, 13766, + 14992, 15279, 13106, 15357, 14870, 14162, 13692, 14339, 14938, 14429, 13547, 9462, 11887, 14763, 14857, 12971, + 13540, 15231, 13835, 15158, 14844, 13819, 14977, 14331, 14731, 13205, 14264, 12948, 11910, 14431, 15063, 13228, + 14910, 14112, 11630, 12552, 11764, 14865, 14977, 13620, 12982, 14634, 12351, 14472, 12400, 10466, 14709, 13414, + 12549, 14615, 13240, 8672, 14520, 14303, 13141, 14774, 14162, 11323, 15194, 13199, 14581, 14246, 13692, 14933, + 14191, 14083, 14758, 14683, 15018, 15206, 15115, 14261, 14021, 13045, 14550, 14883, 9622, 14533, 13857, 13950, + 14562, 14826, 14857, 15003, 14588, 14621, 15357, 15041, 15343, 14524, 13433, 12632, 14448, 12975, 14779, 10108, + 14468, 12501, 13554, 13352, 14581, 14737, 14764, 14525, 15118, 14713, 15111, 14498, 15350, 14843, 13551, 12713, + 14338, 13798, 14057, 13195, 10768, 14346, 9861, 14727, 13777, 12156, 14935, 13015, 13970, 9055, 11552, 15242, + 14652, 13561, 14546, 14040, 13992, 15343, 14486, 12382, 14627, 14690, 14644, 14330, 14505, 14435, 14915, 13849, + 13880, 15331, 13645, 15333, 15332, 12774, 13984, 13649, 14157, 13788, 14953, 13842, 14346, 12655, 11143, 11520, + 13963, 14098, 13742, 13207, 14237, 13489, 11045, 15297, 14098, 14401, 12973, 15274, 14848, 11624, 12027, 12672, + 11638, 12995, 13035, 14438, 13948, 13377, 15311, 13876, 14387, 14540, 14489, 14437, 13562, 14379, 13794, 12432, + 13277, 14952, 13749, 14114, 14409, 13942, 10457, 14466, 15214, 13874, 8825, 10796, 15243, 14561, 14266, 14798, + 14439, 13503, 11909, 11931, 15133, 13930, 14741, 14871, 13134, 13847, 14906, 8864, 14369, 14956, 12406, 14019, + 13467, 15070, 14731, 15002, 12818, 14888, 15153, 12502, 13990, 14589, 12561, 14955, 12496, 14452, 14382, 14271, + 11308, 13361, 13305, 13577, 9983, 14049, 14513, 14355, 13902, 14202, 15058, 13873, 14455, 14606, 14607, 15316, + 15012, 15283, 12912, 14740, 12517, 12803, 13073, 9796, 14019, 14253, 15036, 12189, 14626, 13485, 14691, 14394, + 14708, 14648, 15294, 14903, 14457, 12535, 15090, 12294, 14181, 14669, 14927, 14740, 14003, 10971, 14612, 12108, + 14063, 15082, 13723, 15217, 13503, 14447, 12306, 12274, 12820, 14971, 15341, 11243, 15293, 13605, 15183, 13632, + 10329, 14337, 10881, 15089, 14389, 14851, 13168, 14789, 14571, 14830, 13653, 15251, 14055, 14352, 14949, 15000, + 14970, 13783, 14952, 13438, 15295, 12984, 14715, 14708, 14684, 14878, 14861, 12872, 14552, 14110, 14472, 15008, + 14351, 14946, 15178, 14333, 13419, 14564, 13210, 13916, 15046, 15261, 13005, 14876, 14314, 14075, 15234, 12303, + 12791, 14751, 11509, 14654, 15163, 14255, 15327, 15061, 15288, 13627, 13584, 13999, 14549, 15141, 15208, 14920, + 14528, 13459, 15259, 14429, 14501, 12502, 15071, 15233, 13364, 14646, 10148, 14853, 14826, 13325, 15049, 11412, + 14074, 14448, 11456, 13976, 14724, 12875, 15120, 12881, 11324, 15118, 14794, 13219, 14316, 14494, 12527, 12014, + 13425, 14367, 14093, 15086, 14979, 13702, 14338, 14001, 15084, 14190, 14876, 14158, 14361, 15043, 15210, 12458, + 15184, 14780, 14420, 13589, 11558, 10571, 14900, 10534, 14813, 13338, 9818, 13330, 15193, 13670, 14491, 13246, + 14125, 15131, 15165, 14616, 14097, 15225, 14560, 15183, 14651, 13503, 15062, 11430, 15093, 14564, 12811, 14205, + 14374, 14703, 14968, 15320, 14388, 14380, 15242, 4920, 15076, 14943, 11728, 14417, 13706, 12464, 12445, 14496, + }; + + TransResult result; + EXPECT_EQ(TransposeWithShapeCheck(reinterpret_cast(data), std::vector({1, 3, 16, 16}), + std::vector({1, 16, 16, 3}), DT_FLOAT16, + std::vector({0, 2, 3, 1}), result), + SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, nchw_to_nwhc2) { + float data[8 * 1 * 24 * 24] = { + 0.5062321930371455, 0.32798915578871457, 0.5961646765278394, 0.8617070783970479, + 0.5119373011648897, 0.048024524076240915, 0.23214736441672879, 0.022538554986683446, + 0.6778444052900103, 0.15032959905073096, 0.5500250141965424, 0.17113763529559678, + 0.656904861049231, 0.19596667507728938, 0.368244176122847, 0.8691059710553981, + 0.8464938643825788, 0.4477211400459462, 0.5681620999676735, 0.5442100896841062, + 0.45368229567875673, 0.32546382920612726, 0.18083321574168632, 0.1750322980444311, + 0.6315210221145376, 0.291527227581442, 0.05240995759120004, 0.24235809909562644, + 0.5248510042885183, 0.8444330870217652, 0.06953102068444816, 0.5862038697632712, + 0.8055402660428673, 0.32368384028646024, 0.8548542644341387, 0.21770747056725892, + 0.3311304238509687, 0.2913804661242798, 0.5967050302041854, 0.6869036787651093, + 0.6298993086675535, 0.5342681029051217, 0.3675795216292139, 0.1750638889243652, + 0.1964814553063049, 0.6605207614470802, 0.7789210272606384, 0.7780470338595302, + 0.7160454591867403, 0.6301114551776474, 0.10752566733413593, 0.673119869792655, + 0.9373805013424287, 0.9178365192390866, 0.6325325250871165, 0.4247316161331426, + 0.3801992968018262, 0.8945772418220316, 0.7875543669721298, 0.8539699115784273, + 0.2777908194509292, 0.05650104741824036, 0.831507030885315, 0.585983808212166, + 0.9408480852535616, 0.949221458223217, 0.6494317804282785, 0.22667549478928184, + 0.9867749341169173, 0.3449374672076423, 0.009603964019212485, 0.2966442120522559, + 0.9140488516957659, 0.15654966719968966, 0.5479895146896403, 0.27964020188818406, + 0.5875175857991517, 0.7674359344871401, 0.2662078550688938, 0.601592864568559, + 0.6298513101287228, 0.0007956317020597625, 0.447670613189198, 0.30214703733479853, + 0.7095682335989801, 0.19776718502242907, 0.42373907375609765, 0.9964465820586225, + 0.5657844698280747, 0.9185125060473368, 0.48818414990794645, 0.697598606946429, + 0.23424318111262687, 0.011707822252632338, 0.7337824717352953, 0.5484898408514123, + 0.9929544517415723, 0.6792158760625665, 0.7816028696554761, 0.2405083444025402, + 0.9948803917035984, 0.49246398759410004, 0.5942569141797983, 0.7385481148993642, + 0.4338463026994005, 0.5692156943634276, 0.5157300177718895, 0.9788902748230957, + 0.06944950117329118, 0.7440570791479629, 0.20611744118485498, 0.829837881152689, + 0.8641965625879906, 0.3754093562128916, 0.5140923803643517, 0.01318160757340514, + 0.38912810288926725, 0.7269493917324703, 0.9667767963135668, 0.5146438339988334, + 0.1746242957861096, 0.22260117687528302, 0.5147147658513315, 0.5133063050966878, + 0.6037766881886195, 0.3032343397374424, 0.1558403376681612, 0.19823611829999466, + 0.24791261176807733, 0.06511786233551131, 0.3331160374011095, 0.8764778347789234, + 0.8987904993981825, 0.8850320999146167, 0.0028995817761239273, 0.6114011938310822, + 0.12891905917964652, 0.08841529382295288, 0.8913889051181442, 0.4231141187500551, + 0.5682157758555647, 0.407222042568696, 0.3964456887714989, 0.9083944497303571, + 0.06771895423049012, 0.4659372804769496, 0.12192491449923182, 0.5590082975575247, + 0.7224902246551163, 0.39659001433585184, 0.9959804799025049, 0.945510847494734, + 0.5069062072334803, 0.18366862681431562, 0.6477785312377683, 0.5047962857729253, + 0.12702932873653094, 0.5937694806711893, 0.41217223605167486, 0.586260286286227, + 0.3920397226267349, 0.3009089562278783, 0.11545124344136348, 0.5638317723887502, + 0.03331899478754563, 0.9197389199405746, 0.025063552422119995, 0.08206586723651477, + 0.561096691084764, 0.32084876537534424, 0.9737123506773547, 0.5557153417628414, + 0.33548532198689474, 0.32221438944414504, 0.7714228952988429, 0.227449953513027, + 0.505078000677675, 0.8363531865621979, 0.9104996077916944, 0.7467391082732583, + 0.6486863954134673, 0.7840496913389229, 0.8405166909749249, 0.8967567419124038, + 0.013132746284066021, 0.3492828320186896, 0.2785912741702441, 0.505561232182855, + 0.4916777884425205, 0.6784144755559384, 0.5602904610155944, 0.2556291571530711, + 0.5058833835796643, 0.8065225079160115, 0.4146453573569093, 0.6165567410355937, + 0.3812072863001894, 0.07838633522280025, 0.24554517496324435, 0.06732991743024797, + 0.6172542861800441, 0.012086029165487733, 0.06664288136003349, 0.7043239742078187, + 0.9123664013856383, 0.5335504894308023, 0.08698653277464463, 0.8290196080564468, + 0.8834656717225267, 0.7939247877872073, 0.8884737365869878, 0.4219642380552622, + 0.3534603068640917, 0.8494470631397504, 0.867447552664446, 0.7042030090952853, + 0.7030967670548717, 0.3633266332102917, 0.7912923465426464, 0.2846016100557003, + 0.7669181884111332, 0.1336003148255347, 0.8551305090127473, 0.6448324422438613, + 0.5692776375393788, 0.6125267654644435, 0.2860188283121068, 0.49101502666954544, + 0.7802334407422508, 0.621595999177262, 0.5264555414863354, 0.06256813978886833, + 0.5777567913293464, 0.2185899920497919, 0.5499062346038069, 0.09727935441077395, + 0.9154994096446494, 0.5410406629503918, 0.6478454956274339, 0.41913577704769356, + 0.892374352838081, 0.5878433926092157, 0.9247681810220608, 0.7574161009131323, + 0.40387819954296333, 0.9955865183599879, 0.6498057102561908, 0.23720161272280726, + 0.864962875216823, 0.9744249317817247, 0.1185707073275365, 0.05405958997809279, + 0.21350815438098536, 0.3987264207477995, 0.1901865924242161, 0.8986424402957333, + 0.42756234012515826, 0.5624195702180508, 0.9532024381859805, 0.9331967234220007, + 0.8326262505350779, 0.22204330543949213, 0.5683570413885555, 0.08080650468291284, + 0.7404946579897478, 0.31863905911250545, 0.6545183726954837, 0.9473683813047713, + 0.8142567915681516, 0.14552167710658281, 0.41939814504849826, 0.7794508592612299, + 0.16005597528354443, 0.06867044382609655, 0.4633637274837288, 0.9395113600474436, + 0.002398516729492317, 0.9331948896762047, 0.333821020943754, 0.0870242810975258, + 0.04707664181499216, 0.890444754645273, 0.990579393015404, 0.38653468246552036, + 0.8350018469307008, 0.14186236169840283, 0.5069963680419538, 0.02949034420068819, + 0.9045393529914117, 0.6776465708902875, 0.5951058678362742, 0.16017357369287422, + 0.06829755903301449, 0.22104222238956384, 0.5781531354968936, 0.07078049523276742, + 0.7611775873389636, 0.2506638620506646, 0.23424450010646203, 0.28042226030744977, + 0.4967031301230611, 0.030155802498213036, 0.8417135739779523, 0.4403787816993532, + 0.3194871756596648, 0.024408552447829357, 0.2059121031814014, 0.3304385738990716, + 0.5797427697765347, 0.1488101415168943, 0.07609937614156859, 0.5251133458576794, + 0.38618038104356356, 0.19176610098489089, 0.9987200933842041, 0.3747523857547992, + 0.7577922282660177, 0.13581872016346808, 0.41112542292513443, 0.3692939492355587, + 0.9088817938289541, 0.7068660497692852, 0.06394517786397358, 0.1083252676531502, + 0.1808100870922431, 0.5183031556872817, 0.35323756937224593, 0.5759670034306098, + 0.12944299733834008, 0.9448739674356532, 0.8143588597980085, 0.754802608217618, + 0.9916004739452933, 0.5266178440345204, 0.3778661306592547, 0.6762089130655262, + 0.6609729192353024, 0.657458949700941, 0.6666661408928232, 0.5718295386522686, + 0.815902395364949, 0.27572360372175064, 0.8380770893404555, 0.4430394275199342, + 0.9035539285337746, 0.04305770944290821, 0.0951941872821771, 0.9609676535237013, + 0.7882514248825381, 0.19512411644583338, 0.4947849919452858, 0.6098697295924015, + 0.8027845561907938, 0.3133522370325581, 0.4510194566927449, 0.8428718966430689, + 0.011938006691672332, 0.588867003240751, 0.6690157767048753, 0.4177212705525565, + 0.24674398716514967, 0.7417180449599303, 0.1631995066554942, 0.20262693904300233, + 0.2889357295845064, 0.25740925871269715, 0.6363896486385333, 0.6631190151834848, + 0.20898391694798513, 0.4655689891239263, 0.223154251464165, 0.6477665820136516, + 0.4813056620640841, 0.8843717058980306, 0.6943907285971115, 0.7994117260598272, + 0.1978499897603565, 0.17401412776352376, 0.47091902939013897, 0.3008107310922036, + 0.125356561257992, 0.3362214717360684, 0.008551259225046404, 0.39550964281789236, + 0.8838122008258186, 0.33856044053751155, 0.45407795863420086, 0.4242202694122168, + 0.5423685678418622, 0.558757767842896, 0.09995937937290533, 0.3246315609823286, + 0.6142558040977343, 0.9805749745475485, 0.4657660405624161, 0.9279995363145068, + 0.6559071400579842, 0.2666869937974796, 0.9408718356383589, 0.3545865805029773, + 0.45232948292420505, 0.038505613707505826, 0.00019750497073367512, 0.8737664246958026, + 0.43338860317926897, 0.9723377963347053, 0.6733478271906433, 0.8521676579248147, + 0.7992002524821615, 0.7134241267734911, 0.998215305453406, 0.13173821749133452, + 0.6230927805896207, 0.16597961720329535, 0.5667854438476946, 0.5600991731548618, + 0.7391743889484617, 0.8706462935030473, 0.6023748393452749, 0.3939117403888631, + 0.0179925828091827, 0.4629518715296074, 0.19687388319715216, 0.9137799213875131, + 0.1811662494483287, 0.7822154412029618, 0.23591318340225398, 0.0073897600933453544, + 0.7601835361487541, 0.04661639551634078, 0.47412956237438675, 0.8348234008595401, + 0.9694614871742564, 0.0021255061100916572, 0.3294672766798996, 0.35964304759653043, + 0.6252163536877482, 0.7291002031297731, 0.23755815280234072, 0.9573553281440201, + 0.35208871653768337, 0.12429876607920087, 0.4238129988973137, 0.42030861325609814, + 0.7668992868698534, 0.38672467591564863, 0.6167188625787685, 0.671182673030282, + 0.665041879014701, 0.5979076099092636, 0.08434169920676948, 0.05820304772467022, + 0.6223318283529414, 0.9319341880648322, 0.1563432403380529, 0.9138723236340262, + 0.7996797512923042, 0.4900864446713954, 0.017416089288305514, 0.49040996322083463, + 0.721019678050312, 0.5566481270821583, 0.9257258344448913, 0.030456107481957595, + 0.9633633518879525, 0.025420652239322328, 0.7106922196820691, 0.02782056144663425, + 0.5784283306561472, 0.7642952738653159, 0.49634054661506544, 0.3124518393246003, + 0.2427587464131412, 0.6298184514564478, 0.7423785073124218, 0.37271871104187015, + 0.2722751335358491, 0.2666776013490191, 0.5891324834913104, 0.6876742719251587, + 0.7955883463128232, 0.6707296977799054, 0.9496618100588802, 0.7868528046712148, + 0.7701224409391159, 0.7508487281409736, 0.013211049081081838, 0.10798949667513813, + 0.8090270633430225, 0.048012855163304335, 0.09028922573374432, 0.035025503671681, + 0.1919225111223416, 0.5650389680077842, 0.38354264470450605, 0.046771281245718055, + 0.862924116881804, 0.9649825973770343, 0.5043943142222184, 0.8715482370863882, + 0.26597899853303353, 0.8270579811608609, 0.1142848962533316, 0.4224047306507669, + 0.1012610270560822, 0.8223568939055197, 0.7167013789334914, 0.9315114766412789, + 0.13538563056707842, 0.1971618987154049, 0.1346229767296998, 0.7413487911805234, + 0.8335779236442179, 0.5523076600264788, 0.17805699078916626, 0.9749964559656057, + 0.6548616850045488, 0.6408638652632012, 0.05243142084756114, 0.41338426132755013, + 0.6577911569645122, 0.5218993909847612, 0.8215042477853458, 0.26000525222244186, + 0.97323011169599, 0.17753901458016086, 0.5995954962734206, 0.7072822896238021, + 0.26560859373579837, 0.25711211095662156, 0.07508148094639788, 0.4665311792834703, + 0.8382346611355168, 0.912159129225236, 0.18294092199868883, 0.5446560489946072, + 0.7656095966972155, 0.7678502522889049, 0.015170391334308642, 0.29784029213417207, + 0.8387694383482851, 0.46689527632178685, 0.36712996103456175, 0.019326900402604652, + 0.3426577641914963, 0.4488032775872126, 0.12123730305165703, 0.16833207718529186, + 0.0009628642460063341, 0.961297092575098, 0.6197070762250381, 0.9422212994559002, + 0.46566024250211124, 0.765393556661448, 0.6389778787947291, 0.4565768368332409, + 0.9670253688068792, 0.21973766319640908, 0.1670312116243018, 0.8981162893436722, + 0.839205738663824, 0.7745179886765066, 0.30909292667742505, 0.04511559705068158, + 0.9825690351992592, 0.004746150572906838, 0.5123475897899985, 0.80432432962385, + 0.26105396874260267, 0.16816967722507736, 0.35652723083069127, 0.7015377869256973, + 0.07821081337514191, 0.18523471563017568, 0.8343612489445518, 0.7959091927132941, + 0.7188927625558327, 0.9287626078448632, 0.09973920754860399, 0.3463778089006021, + 0.33659796153984745, 0.921845854796007, 0.00950593702181235, 0.06469285813398706, + 0.25338043680754496, 0.5238467611159003, 0.3167593672722743, 0.09329128287229482, + 0.6484704059568994, 0.9061324833635789, 0.48944359249477554, 0.8886033139636679, + 0.5775004603975735, 0.7994597749363578, 0.2685524226371466, 0.47423484982081954, + 0.9478611351812041, 0.24263144009029924, 0.453565431301858, 0.42525493803037606, + 0.11490633583693022, 0.20757224400351415, 0.5868995409837178, 0.5205919982656838, + 0.34503754603792125, 0.5987197103905514, 0.6868505593330014, 0.7638415147641093, + 0.8267185942940226, 0.6625091189429029, 0.6693741441502425, 0.38345424336010436, + 0.7081765066435062, 0.42905141236087785, 0.2328912829363069, 0.46170680700262057, + 0.439085657841746, 0.48283110128008133, 0.6895936395080633, 0.915300650877659, + 0.5065137439640514, 0.009253996333368542, 0.17912608340116165, 0.9877014563961273, + 0.2077031505895378, 0.8852031617482035, 0.39883058097563073, 0.9280883776056293, + 0.5468439956477785, 0.8348508115176584, 0.9623588674800001, 0.10797271631363137, + 0.11652847794724375, 0.4006687049764782, 0.855832471124214, 0.8786977408679026, + 0.7141267893561709, 0.9917530089388696, 0.5096096656188075, 0.16953365739403836, + 0.7251961536843469, 0.8943385078565467, 0.8112754308812744, 0.9761274106045587, + 0.9326604989238073, 0.9903928487769879, 0.269577210979857, 0.8189637058521028, + 0.7381833350806415, 0.28154334423945915, 0.1250465350737835, 0.3968102520265465, + 0.9604970122135507, 0.9722187833692912, 0.4243054458397719, 0.9403312613720402, + 0.8703241311060701, 0.6089211633348707, 0.5345658877440552, 0.5130240560137153, + 0.5368930457625992, 0.06336696382328744, 0.045978147806837844, 0.21270102297516136, + 0.5183775131533372, 0.4575562226466785, 0.6969774129134045, 0.49689162515596763, + 0.3039279916586811, 0.9104938758536323, 0.842826717308411, 0.9030531490528709, + 0.5622318797083762, 0.8880003119744561, 0.22917815472979386, 0.8454761943337168, + 0.5490186483045347, 0.8903499500434628, 0.19690296474232793, 0.4942586480451958, + 0.6208256248541093, 0.793673761384259, 0.15704525492115684, 0.8064819707072164, + 0.3612275689614989, 0.6409665812254784, 0.718535811027167, 0.011627527251532266, + 0.18251899551078254, 0.8816150389346538, 0.5141424262186775, 0.5585514761068199, + 0.08751818562303404, 0.2032257845367399, 0.30428349184187875, 0.3562439026805052, + 0.9902869789914716, 0.617485694423711, 0.23085218840071986, 0.9200353993391799, + 0.1438119306579585, 0.9637545013683527, 0.06926888721997271, 0.45809005417603854, + 0.7797894826426551, 0.033071563699438644, 0.5346070887767089, 0.3785868582170747, + 0.7792362852031324, 0.44049360572247687, 0.5655966414697406, 0.49836455834115634, + 0.41982706944388515, 0.9142509416402275, 0.8055104920505853, 0.7882368863968197, + 0.7861541561206552, 0.8750002361453983, 0.3141872249221326, 0.6297863342928883, + 0.13752402310502843, 0.8408271137857017, 0.20203166178667864, 0.6132166149173786, + 0.00565266221263494, 0.4346366940226415, 0.5750892505021086, 0.7484967824901719, + 0.554368174682661, 0.6462518960809419, 0.7575408699684905, 0.8304301645939067, + 0.14686776200689766, 0.01775895453516141, 0.20381742085491494, 0.5374100389215499, + 0.3543295568618047, 0.881281278910346, 0.7387949907094862, 0.6532668366398822, + 0.35871434741569874, 0.34577990378538315, 0.7511426552642608, 0.1749342021093535, + 0.44179651111839446, 0.3183243633827524, 0.8952139313460276, 0.550987565451777, + 0.11245280703451577, 0.29603527735935475, 0.0024376097979692535, 0.5580957824575782, + 0.6785730314640288, 0.6989604199143635, 0.9280856338597502, 0.8528138190991756, + 0.8155971681047525, 0.19627454847903825, 0.23483652834817348, 0.9749127497086097, + 0.9952437850116509, 0.39823054819356907, 0.3902382135861282, 0.9436877000327134, + 0.7855667825786122, 0.8211945319467171, 0.9350703325513966, 0.022807010966559904, + 0.5701088951610942, 0.21262277387562822, 0.182285558875648, 0.5457926090191436, + 0.0022151330954638393, 0.9380139047727055, 0.8430496283743121, 0.1022224243661769, + 0.29636066130596317, 0.9479549160834284, 0.8250427156664355, 0.25458526111733715, + 0.6582997945231689, 0.24489199861326638, 0.7480060827586592, 0.16758586360820604, + 0.675393772428663, 0.08917020068830872, 0.49411868825207117, 0.9293064155184508, + 0.04710616624830932, 0.9032842176572544, 0.7887575986187616, 0.45685702914654747, + 0.4652494433853783, 0.08590753836423537, 0.8677926147593354, 0.9757594778331132, + 0.03326488446691034, 0.09080413307553958, 0.2677322617144291, 0.003004139162062991, + 0.37075805819564844, 0.5619563194408237, 0.5326269575702928, 0.34340862047732745, + 0.9016417148983238, 0.1816640209011503, 0.3079844132700572, 0.697831111602062, + 0.15550348544202108, 0.08709128824924162, 0.7318882367213184, 0.5226189451379162, + 0.4714561263097441, 0.6536425776379847, 0.8780547121445959, 0.4605063736250262, + 0.1108839197488849, 0.49048534621825135, 0.348779380113877, 0.3573496061128525, + 0.6477277145926619, 0.3267103713187982, 0.154258739206506, 0.09146321915634115, + 0.15653234956704554, 0.2659173882939163, 0.9017235470997731, 0.39463656456034923, + 0.156590494640086, 0.8173011759177684, 0.4863177935656502, 0.889074583881877, + 0.1961541858280207, 0.4073608554088233, 0.17383269122504952, 0.49264426743742795, + 0.28034389601998155, 0.5593685058504049, 0.8717081492192968, 0.8138787395058248, + 0.6849150775885576, 0.7294561911157406, 0.6007567942294963, 0.4566856125549694, + 0.7762810018455528, 0.037485912765460316, 0.317156754519343, 0.8194347358709463, + 0.8742357018784637, 0.45420248053764034, 0.545021334035525, 0.40484355148999873, + 0.9642956470123379, 0.9521463750801464, 0.009809236244757491, 0.5632225263832438, + 0.827726924457394, 0.19127589974101955, 0.8270672169675024, 0.9389636338572646, + 0.03389523795263183, 0.8566288277688866, 0.41601508004675314, 0.4533324145773725, + 0.3986860505212597, 0.7125644912081883, 0.4991806916020406, 0.6155778733311165, + 0.9130384484529627, 0.5487870717147846, 0.7780523937434449, 0.28193646085785884, + 0.23331327786702805, 0.9816900774238352, 0.1350024308188581, 0.780189727376689, + 0.9156908233955465, 0.009622250928327358, 0.14554767074354535, 0.6099444642853934, + 0.49974097393273886, 0.7069659077805105, 0.7816183976265796, 0.6893721747092452, + 0.39131354032672916, 0.37859141906212357, 0.5906888260321975, 0.6601801424705058, + 0.17536941466724143, 0.2319276355700719, 0.7489062937368479, 0.25317403533372795, + 0.13515277633234812, 0.9347494213774822, 0.9266229609375172, 0.9943887686013935, + 0.05448279130963951, 0.2941520004646705, 0.8162845180903855, 0.9728186588423753, + 0.04784001124036252, 0.22656891211505314, 0.8623002348807659, 0.5872338171414982, + 0.8172407294972533, 0.8935080553239964, 0.9015605149533703, 0.41310471749037525, + 0.9472794835647472, 0.6520347680487271, 0.31311178341617885, 0.3881266733594382, + 0.5940963916604006, 0.34213553626717264, 0.7731917933130059, 0.5966087262785921, + 0.41236805548735, 0.38464411318442915, 0.948890845138434, 0.6579713101413165, + 0.0864903742655957, 0.9024542523251091, 0.5321617601250037, 0.4679140323179162, + 0.545330586994838, 0.45181337751924944, 0.46408729390763515, 0.6739482447724412, + 0.0051732115035305615, 0.19981216404826385, 0.5290130517806954, 0.7192318331618469, + 0.7212983477553033, 0.3213459838833309, 0.8810398893805294, 0.24407230150381054, + 0.7705033913302648, 0.8348143443275565, 0.9798327350813848, 0.7153785787888585, + 0.17227162583571132, 0.013486902192362704, 0.05279184836340933, 0.4048402752271233, + 0.6541987863733343, 0.9572730371926592, 0.25874032756911736, 0.1708286437732166, + 0.7458853835638297, 0.8957748918471427, 0.8504052252091574, 0.8440260407324176, + 0.09998791094771531, 0.5981663701129188, 0.9909795971630094, 0.7599927671359444, + 0.5930460739990847, 0.778268386360668, 0.6314661520179152, 0.5758158555084719, + 0.1933560284115361, 0.04267202229386946, 0.5808063777329563, 0.2846012429745495, + 0.8127455357361613, 0.8547361041799995, 0.4236263477471254, 0.5126017051751544, + 0.486864207649195, 0.675148578661926, 0.521602594310036, 0.1714438367929747, + 0.987577581508166, 0.6240301905818986, 0.3972660860873791, 0.21466797526556947, + 0.163736157424353, 0.16097272448599187, 0.9867228726368442, 0.44686891216524294, + 0.8529040770303731, 0.8427084849973342, 0.5248669060092062, 0.6332209039546157, + 0.3381060884944972, 0.6410528682127311, 0.5184896117965878, 0.0775072340265841, + 0.20847724173835658, 0.8421532756187703, 0.5690884663921209, 0.16660278165823805, + 0.7114283732757188, 0.12485689980485426, 0.4778653717597874, 0.8936810245378618, + 0.25572041201895246, 0.6479725646536676, 0.0521651501340884, 0.2899282678297366, + 0.1567842561578523, 0.2618292683133343, 0.48167163493515797, 0.8678615958116621, + 0.2825109203364219, 0.029167488033803868, 0.30085749125988204, 0.1430984046038365, + 0.8105162116377423, 0.3240150277783528, 0.4227679344777656, 0.6462688810750794, + 0.7669111613819273, 0.3591939105703398, 0.5406014889841781, 0.9133014898668583, + 0.23078416856951423, 0.7282831287496329, 0.7696332086423125, 0.036116417171488946, + 0.7188034785773543, 0.46641137151322454, 0.13517594491126694, 0.023029089551247717, + 0.3240281523339036, 0.28219840623687475, 0.6176277759655265, 0.822929704075167, + 0.9746339984503541, 0.09356935168701108, 0.9358670947921622, 0.5150705189935418, + 0.08698190944361806, 0.5067019194005934, 0.4607138934794833, 0.5903911985128997, + 0.04743558869608677, 0.0816011762024833, 0.18434623268203787, 0.7063485147604806, + 0.16133266570652083, 0.7545927345078352, 0.9883244202548223, 0.1932863168822514, + 0.18928374882276033, 0.8001091255310002, 0.7155860340551159, 0.3789846917866153, + 0.9611113142900227, 0.879396081488193, 0.5388680749811205, 0.8361159884411873, + 0.866329387811841, 0.2730682799221704, 0.3194169335393262, 0.9423646280256972, + 0.24435425023073687, 0.7470170905071631, 0.7738507071603729, 0.3986280845457605, + 0.030480186468068826, 0.610744914811935, 0.2678410545126796, 0.11563606352242406, + 0.35435733790292934, 7.629086834592957e-05, 0.31506645823114665, 0.52596582150875, + 0.4947711935372616, 0.2900674918365881, 0.9559480836459326, 0.8641971546827826, + 0.7759990209000613, 0.25189193242507424, 0.7764929030552792, 0.6740451053768206, + 0.9442503568797899, 0.27015556612912295, 0.4722170294917363, 0.6923440714494277, + 0.4625235039406205, 0.40653633557229074, 0.8614408947586382, 0.8005107123593145, + 0.38952152310149324, 0.5138242206735628, 0.3034512156454581, 0.5075533946811591, + 0.6409178909749544, 0.7867224176037622, 0.5704724276154304, 0.558636617357991, + 0.2794783538323291, 0.29715752029602904, 0.5187519349640142, 0.9873759371136224, + 0.9258075437182876, 0.21802053184517123, 0.43578239292876575, 0.5802450337786992, + 0.23230142264547604, 0.9663956282775855, 0.3046095724793134, 0.7813239552985765, + 0.7139029823908456, 0.11346499503069729, 0.17481934399946641, 0.030608752823210073, + 0.7198536329641824, 0.5851914340312228, 0.10405825121810075, 0.4328732174062543, + 0.05984690834909834, 0.059751406592598366, 0.7099125485628539, 0.2327422314064267, + 0.22293859772007485, 0.366334373824144, 0.6749561293186479, 0.8275468647809106, + 0.4836623121233168, 0.5182234230734467, 0.17814665168896882, 0.767156836968558, + 0.9957233089372526, 0.020767935241929103, 0.9075908646866189, 0.910011621335872, + 0.7026196573836658, 0.935762210311436, 0.7711845413595912, 0.46739814450201234, + 0.6994581401527824, 0.5616043805626976, 0.11408951973501213, 0.6998783598361171, + 0.929936385814316, 0.665750680755247, 0.6799953175502683, 0.020929310077360097, + 0.9533380343595762, 0.19059543824646374, 0.09924735025128095, 0.4388686123460397, + 0.7029603377770199, 0.0283956241097576, 0.08107860028919389, 0.5042102876279084, + 0.2165582424681859, 0.061309458649308146, 0.5425731942471365, 0.6896535206703505, + 0.7865332719261746, 0.21144979197123848, 0.044240593795785155, 0.7172785881835658, + 0.8087701489136178, 0.12632062877512984, 0.3135882124553674, 0.7030096426830668, + 0.32427739389406196, 0.26177958169560045, 0.8364626116621018, 0.045119232141371146, + 0.15965420000508967, 0.9353298867548688, 0.20383187633308142, 0.9738936519827217, + 0.40298551153980444, 0.7975560547282584, 0.18171915196160293, 0.7518553129466249, + 0.3343056185111044, 0.4899186327978281, 0.9156154657107153, 0.9113609778955688, + 0.9693327204195537, 0.2857523592884468, 0.05130307975288906, 0.71266558707065, + 0.562649641637568, 0.8915442240259507, 0.164649064386867, 0.027705380608350705, + 0.5407021702897924, 0.7889782067611623, 0.5259864378330708, 0.6845669681814901, + 0.6376220227714763, 0.8514856160558509, 0.6510253682186862, 0.5170756234714353, + 0.7067784474209631, 0.9994574991301162, 0.6757298661011472, 0.7466542251346405, + 0.31532433459065423, 0.2932461574194528, 0.6129823292461669, 0.24096303449059953, + 0.7484722467815119, 0.2738466277749775, 0.6041763997009431, 0.9490113319971468, + 0.16474717260481941, 0.6344892971888295, 0.1605409102690507, 0.36949116587874564, + 0.9062309293016031, 0.5676917708189916, 0.6579501227105841, 0.1556984298808446, + 0.10526105347022718, 0.0432597501143015, 0.12340518927265864, 0.24480904517286162, + 0.9559497778713313, 0.4650345002210139, 0.5570696004138982, 0.8604647108283786, + 0.08281791268299465, 0.6909086243849096, 0.6361793771611001, 0.18333854334943112, + 0.09914338253655386, 0.42672616390085993, 0.029455181622296656, 0.8806910005582591, + 0.501593639037123, 0.5384016604659735, 0.048500484858614024, 0.33157374470623313, + 0.7819900133063813, 0.8036696202277274, 0.8870946895919508, 0.6443897992391144, + 0.487593702664578, 0.47427283897570727, 0.45319916886863576, 0.34084491767048486, + 0.4097600967808508, 0.6369527481950622, 0.988582322369492, 0.7429270774935287, + 0.2755575157901178, 0.5337860064252411, 0.08527367015946075, 0.2868764543425195, + 0.2818610831832793, 0.09656564597512085, 0.16326420842851086, 0.078524756766712, + 0.692034898414409, 0.02929249805951828, 0.04078870767209608, 0.68618278656732, + 0.4164950388301115, 0.15439502423081763, 0.2608044219197406, 0.9485716022280892, + 0.1489511084132873, 0.5371867964769852, 0.2679471165751426, 0.26879005137532985, + 0.6385648413295864, 0.6934480563650385, 0.5889259374697515, 0.9444658629484035, + 0.5739860439847068, 0.4642829135480352, 0.5485487373822201, 0.055600233329058835, + 0.9503995048048212, 0.03693809906500234, 0.29259340143434953, 0.3763819233620549, + 0.054765734537462474, 0.6690946345171489, 0.22901017308395843, 0.3851663832175244, + 0.9320166382726104, 0.40543494470602504, 0.7132939144212584, 0.438468625983355, + 0.40289388689969197, 0.1080205205554402, 0.36245057716210094, 0.36015390409792614, + 0.4083542707925487, 0.6457341101335805, 0.5280984956406288, 0.27661051569717643, + 0.15463113535167072, 0.6339351618412589, 0.25386082756030703, 0.3773882514499456, + 0.31448240237275205, 0.5379416898801859, 0.5758417787990292, 0.4931403175245719, + 0.19895720697450392, 0.9106510540309283, 0.7050910060490698, 0.45995862112822117, + 0.4978773762163059, 0.4305593369082561, 0.5163888955360598, 0.5881241308899362, + 0.7443503376805837, 0.043441356160601075, 0.7620942885064099, 0.25391141995016, + 0.10404423400504292, 0.32906430947818444, 0.7134706951754703, 0.6142847460386224, + 0.8025796936730528, 0.732389054290974, 0.41473925626260966, 0.6923681125801747, + 0.7145973260035485, 0.6951361442463575, 0.3418245407724094, 0.011957990757860193, + 0.10193439601418242, 0.7414001168798778, 0.4136476884443896, 0.690481795572699, + 0.9133795729869558, 0.8514948184579377, 0.6159790140475269, 0.35329256022365085, + 0.885435642977142, 0.9162085873379183, 0.8567070326140995, 0.25296933567683344, + 0.35424346143915453, 0.5002598794309785, 0.8659747694089831, 0.061456731329130654, + 0.18850808844299283, 0.041641953786621944, 0.4423078043675779, 0.19516736871253626, + 0.7748134390819658, 0.15669760064831195, 0.9117988105447139, 0.17529069687430643, + 0.21239361243420785, 0.5598792774908463, 0.019901661165872664, 0.7807569139436707, + 0.3951855412813505, 0.04765166480638605, 0.1252275191352593, 0.7764889850899379, + 0.9903254692712204, 0.4160674609967715, 0.6575123873469116, 0.38692550571427453, + 0.9421299229690594, 0.13425578776714753, 0.6592775206040412, 0.6054569516862356, + 0.04995466341550436, 0.31838895529498445, 0.5594098049245086, 0.5431828600421421, + 0.9655536479587977, 0.9443783667221035, 0.52733461507154, 0.23509187824030708, + 0.8359559719413645, 0.40800777231855934, 0.8192441772939613, 0.4585598560257491, + 0.5789392096126688, 0.73483080985693, 0.3857779429185293, 0.32684564099220315, + 0.4342809981627095, 0.14049178871154044, 0.3249377070613626, 0.9649364229542966, + 0.546159980425236, 0.7495624435760676, 0.46499080808841164, 0.9595431299879549, + 0.5511422996681221, 0.006284943257787723, 0.9579373386678119, 0.8627631305545066, + 0.42931315127083947, 0.7117063444496837, 0.3529688849071867, 0.6581981238160464, + 0.8607842753730445, 0.1035946540281355, 0.33884413032963134, 0.6507898433398873, + 0.5959572838859108, 0.8452792802539334, 0.3526427347097323, 0.2554030188129234, + 0.3190780690938113, 0.5359166867354336, 0.899592971396041, 0.9121275113754743, + 0.18881523918281573, 0.16581486849400462, 0.1864109651954301, 0.028607524048106736, + 0.13031259865406175, 0.6048366380665815, 0.5957170162769209, 0.7081464546990173, + 0.8903217482370849, 0.09598610365359428, 0.519585064267526, 0.09657738036679875, + 0.3475479951857743, 0.4963494792070573, 0.5292251865940365, 0.029110455518635447, + 0.13835821078098043, 0.658289086360752, 0.20830808279559043, 0.0736493897230931, + 0.8598564030205227, 0.8590472853784719, 0.9610234425963425, 0.48137361683309265, + 0.17898859647327336, 0.5440140974851478, 0.22730089140323917, 0.018558462503961426, + 0.4863017370860887, 0.9382132934461014, 0.19297219357216033, 0.9940483705415563, + 0.7732193700811595, 0.4881593204224687, 0.21141437672188212, 0.4440192193919973, + 0.14557017262370142, 0.7702782436739344, 0.6601890426095425, 0.23468998861913448, + 0.7782546277335423, 0.4448974128041292, 0.10784083571982617, 0.14854640192380364, + 0.41589035421442533, 0.3197984326244462, 0.7245022420692844, 0.21576829484306959, + 0.10265703279256322, 0.45756321284804535, 0.9403807825834055, 0.5526588429924927, + 0.6064363553897896, 0.44265137283869593, 0.569212185802443, 0.7912368277559028, + 0.8887896757133944, 0.9701565615505346, 0.6977265802358966, 0.536834680339334, + 0.053981826061055393, 0.8756559562878227, 0.19057490879409256, 0.3320634721865183, + 0.4459034795768957, 0.19803414491520221, 0.41464347378057187, 0.4296586033181208, + 0.3521609076476333, 0.7204422760571739, 0.538750650251617, 0.9405722533748734, + 0.6186423782405404, 0.6233966614987376, 0.318102330519885, 0.8543971214766016, + 0.6515592077046153, 0.65087215946289, 0.07339238123019876, 0.4727928631310542, + 0.020176923010824965, 0.8738978388137459, 0.400978242333368, 0.7942854137885756, + 0.6403430773994766, 0.5661281788485651, 0.9813443426749566, 0.5483196198615674, + 0.06707226963185708, 0.7333912975866296, 0.7783524129943937, 0.5686775672271452, + 0.4256925483989712, 0.5147086104638293, 0.21476000417538343, 0.916855433021808, + 0.3714678841508565, 0.21945150784688328, 0.5895851611906975, 0.1444626597607006, + 0.738918199088585, 0.4481544686964083, 0.9856359446672089, 0.29954706102055195, + 0.611250956442058, 0.9430562414796368, 0.7229305445999924, 0.1858020448235872, + 0.6618648057683056, 0.8030276227107247, 0.8628099361519953, 0.9052610609469864, + 0.48514713211278393, 0.21873607262787087, 0.5236259810661064, 0.230898613761495, + 0.17879765773860556, 0.6151719520382292, 0.8151124499805635, 0.5781982220346872, + 0.3922148718803763, 0.04935560570672859, 0.6166915363174584, 0.5081051021238706, + 0.7010454655614318, 0.4156377880919485, 0.3492037798091502, 0.602200690923089, + 0.09046365783660171, 0.16223935565771608, 0.5214901402508125, 0.5044965309511683, + 0.8718711479270838, 0.9893060211426482, 0.4959762454399149, 0.6638722685693331, + 0.27567906452334956, 0.9612735276985851, 0.017683914342381035, 0.6880525776298796, + 0.26384910388692295, 0.4717037005472474, 0.04709738998294177, 0.9206613460317158, + 0.30609401558422245, 0.650794876466132, 0.9549873305910817, 0.8260862345328126, + 0.4305511457337493, 0.15775833413756224, 0.5698073932330375, 0.6732764670891707, + 0.9398683076373318, 0.8884952039532361, 0.9747580378552119, 0.7012280138803675, + 0.8531212614847642, 0.5588857939967947, 0.4865193397093649, 0.35605598711197395, + 0.23192417196470105, 0.8943987506597288, 0.4581750581036229, 0.8316761648384743, + 0.1956329153934292, 0.7862726579345383, 0.2825441517137014, 0.12262868378623026, + 0.8276433833405171, 0.6205529971010263, 0.32692718075055793, 0.8228061122431176, + 0.01971216456364311, 0.34028396625042623, 0.9809049097581001, 0.42615866343689, + 0.31026080743670426, 0.655869132710115, 0.21848187364501515, 0.7168610879867828, + 0.4594451911566828, 0.665814039596452, 0.36315476515717227, 0.5792757848529761, + 0.2194177814751842, 0.04385373639898016, 0.518617925385258, 0.3231278017612045, + 0.2594374757740957, 0.29246587337951047, 0.9954689561632492, 0.3501425234728255, + 0.6709665721562044, 0.16735352771914958, 0.8260129706431494, 0.40920719564891483, + 0.6726330850570945, 0.9363557645462725, 0.6556123113787984, 0.24270806527297228, + 0.7235976249239356, 0.24447580020214432, 0.1570820781313833, 0.8148349956024216, + 0.6933255834175878, 0.3001748159747639, 0.4345877719588055, 0.5503760162209426, + 0.25779097166992126, 0.4929286821874297, 0.9452724276975171, 0.665277564567275, + 0.8981405030205403, 0.0670257099481898, 0.6939751122857064, 0.28055550151293496, + 0.1332388904235442, 0.5375935529800575, 0.7796575064872702, 0.6544817490995954, + 0.07015083766352537, 0.5144850339572572, 0.8525538781907521, 0.9739838860835902, + 0.4968967376586929, 0.4127145314686359, 0.27933761555501435, 0.3672016010943906, + 0.6624024674092706, 0.24544598528646022, 0.730314983254911, 0.17701101837412136, + 0.4254799754930676, 0.35423136895259566, 0.5768227897299716, 0.5744645070463099, + 0.6089490929009442, 0.8468421104716097, 0.062493117646656215, 0.9433883841846586, + 0.3224314773749847, 0.3879437547436835, 0.06542481862518479, 0.4524283508742747, + 0.855669739307951, 0.9355414144830774, 0.016953557859817336, 0.08887746141991093, + 0.3077384846166704, 0.20142444310220264, 0.23430527951848246, 0.26726979711914867, + 0.7538977184667861, 0.14055565974103956, 0.12019543811452327, 0.8032361878205014, + 0.7275211502307414, 0.5391190118588421, 0.39040871413175204, 0.9111147281454772, + 0.9493616269229143, 0.7539309432514376, 0.32713630350951395, 0.748126071640409, + 0.7235166954765612, 0.5451528985198291, 0.08939363370630093, 0.9726577168929407, + 0.5056002065235468, 0.8720214092930499, 0.14252130964260157, 0.8017993349366525, + 0.8789412016297585, 0.19733101052115432, 0.155381302468528, 0.8861006212175135, + 0.28308405796973546, 0.3421889072421824, 0.2741378883081277, 0.562728159108378, + 0.7383010713186272, 0.2940145578132707, 0.28188592835883397, 0.876776834120555, + 0.4094498174275354, 0.2609720457658792, 0.9077216796152148, 0.9167908537532, + 0.6309256990750524, 0.2932255295296887, 0.6187393252348707, 0.02419640268937162, + 0.7615549945291642, 0.40152068077174974, 0.636001364738158, 0.31693811983415865, + 0.17459370162390564, 0.09635478622334415, 0.773110596039915, 0.36730671076204746, + 0.4443424697638383, 0.6269672542570564, 0.18052959491911025, 0.8763251766396633, + 0.9298965932343471, 0.3114788775550925, 0.5129502631275407, 0.2671988790638957, + 0.03395469873611412, 0.20677057622909345, 0.029755677202100816, 0.4219143640858508, + 0.27766396280399275, 0.7626861520170851, 0.2804449959173896, 0.41126630075958903, + 0.6130098684306445, 0.7749747562665911, 0.6306743873349789, 0.17398026115826748, + 0.8664778902230784, 0.13631390978684288, 0.126146245275511, 0.14392278284610127, + 0.4957647401686809, 0.49393608417974944, 0.7608848150197474, 0.7708537446533661, + 0.17041707735566713, 0.5292679237649189, 0.12997774787294147, 0.5833293771008747, + 0.22158461844207578, 0.38454048634657145, 0.25170772654694007, 0.18384322498407457, + 0.9106010886281795, 0.09659133036787504, 0.04798999449920649, 0.5971959180210877, + 0.33920293291125825, 0.0026978486282175362, 0.40823027702490455, 0.7474551858830002, + 0.8447835860688525, 0.999476073467762, 0.11292905422267163, 0.2891202214831097, + 0.6782488374010278, 0.5640985110366258, 0.8986345118209681, 0.8303221656535216, + 0.3838014827262134, 0.6954806707019082, 0.04745548087955098, 0.19020545853032067, + 0.12142096074765196, 0.8083835608993636, 0.2703724701412815, 0.04308441751967196, + 0.8978514331468422, 0.9227768394778075, 0.054487218354160816, 0.8881844540343677, + 0.05812500740473081, 0.43395580484342355, 0.9128284726568942, 0.46153699068964205, + 0.9329113950684834, 0.5436808169668677, 0.8551864909610408, 0.35430629048202467, + 0.20331160404943038, 0.010592767504785994, 0.768145204266474, 0.7716453181066588, + 0.07399950484290962, 0.35304343137569727, 0.7125013832160177, 0.32282759187212584, + 0.5098121496598533, 0.9429961075410074, 0.23690140194649634, 0.28545879601150004, + 0.36072796490221837, 0.08273436245662757, 0.13321861653992195, 0.45428395959680057, + 0.3292870423168869, 0.3717537796542958, 0.9151157324904851, 0.032565267234247575, + 0.5663024224678861, 0.21258936026884934, 0.018792364759155578, 0.33061571155470315, + 0.4408215380032614, 0.6373830559219769, 0.409083548130421, 0.5719023860863325, + 0.5966825436453085, 0.3384202473006439, 0.0652522862056627, 0.7184518347094753, + 0.7333243428502271, 0.3397689359164483, 0.39030612456817504, 0.49021356874309363, + 0.07567478335266509, 0.8152990173046628, 0.865796090334987, 0.9772443897479798, + 0.8237383881843146, 0.6529526140669909, 0.642639906551107, 0.48148740771686727, + 0.32824558349693844, 0.21997446692237632, 0.2826342393226089, 0.6925547552930775, + 0.8127702758298999, 0.8449833779720203, 0.04123879385713569, 0.15065632287258246, + 0.08510557609762459, 0.5458280878881934, 0.8586504699178478, 0.9911190250770345, + 0.5763284424876813, 0.7391889450985766, 0.40039453665752356, 0.6580974766512633, + 0.9713635948599508, 0.9633153015071205, 0.9892725316197725, 0.7655195394029977, + 0.2626076882068743, 0.5724930703594869, 0.8770329572359922, 0.9241957706353401, + 0.5311898231459584, 0.9787947183754235, 0.29236240390648527, 0.1370530031021152, + 0.21557458708432398, 0.21930911835253963, 0.8715098439481456, 0.18225413327885254, + 0.1273980429476339, 0.781536231097221, 0.7399707114697983, 0.03176008087944826, + 0.07484892991644743, 0.9938729205296434, 0.7208500631972949, 0.7020940151356819, + 0.649863542644937, 0.07171846155065342, 0.24072337013910783, 0.3613692423637672, + 0.28109344159579286, 0.6906532840886619, 0.2697954961712017, 0.28147888050363834, + 0.8129444048447061, 0.6482743700303343, 0.567516198772247, 0.3377204549692112, + 0.5318035966189143, 0.13996490823612162, 0.28411043736720165, 0.7086066320571008, + 0.7364523892364573, 0.24583114874653533, 0.2718401559841159, 0.4699898162815598, + 0.8368333921696967, 0.6039469513318906, 0.6881629540763242, 0.10914136009539432, + 0.03794547642282364, 0.16071126773904743, 0.6071822780850678, 0.999449454081704, + 0.38785614009596714, 0.2305249584020561, 0.9381100298775975, 0.6723682337201471, + 0.0270853082856215, 0.38452454801976443, 0.8104401851265016, 0.4148616154771626, + 0.25348586490031955, 0.9559803040006986, 0.23669298529343408, 0.8683776502568926, + 0.6818154399643029, 0.386930406487093, 0.4831551067581342, 0.762948076915824, + 0.3080418124016162, 0.5783325642474083, 0.02031458126010055, 0.07023369879228158, + 0.2510418307035861, 0.06557828934649368, 0.20332294284183228, 0.6988094459473019, + 0.40342220501450765, 0.9809970493544985, 0.26572276292341057, 0.5928075161063842, + 0.8549134765023864, 0.567763428815418, 0.5470926603460404, 0.9274902125377089, + 0.4928070771606695, 0.526894280369654, 0.6376977924010896, 0.22799338981773765, + 0.666609321330193, 0.30868196403098336, 0.14279972266571528, 0.5062364564484477, + 0.9157139519719435, 0.4438514752002488, 0.0967753755965477, 0.8364362062093621, + 0.7507227281441472, 0.034410730069850626, 0.6782441014462247, 0.48150125869929983, + 0.8610360109688682, 0.040318507891833, 0.981454376414118, 0.9489949777983132, + 0.8363894005173848, 0.7935877513663323, 0.7161463843240741, 0.005320974874661322, + 0.7219264019524386, 0.5939046135812794, 0.7367415608933358, 0.4901397664186017, + 0.7455950792620178, 0.38453281111378745, 0.11932732877577257, 0.23846310575164364, + 0.5070859241209125, 0.4714858979832832, 0.9511226798432605, 0.816798240831224, + 0.9906645769037737, 0.11961893403587176, 0.31535187276502774, 0.6103025507014492, + 0.780168389505919, 0.4873081795149584, 0.5385034675060101, 0.979880443701223, + 0.38006057763820444, 0.8983108410336961, 0.7260784501222746, 0.08445070147685141, + 0.19108936840472623, 0.8684070135879295, 0.29639093380738957, 0.24831227601153794, + 0.49319075696101833, 0.6248867669334034, 0.043314312738550464, 0.6751994351532093, + 0.9839650562784863, 0.23577156317354986, 0.655407771653181, 0.9998838732393714, + 0.914266783053472, 0.9781753528714074, 0.002991513868391582, 0.27941324936645506, + 0.8503441860013552, 0.47965377849050583, 0.6425737018053364, 0.6917759376427142, + 0.5997255515022856, 0.060241281568058636, 0.5393389196251592, 0.18985718347501923, + 0.9351558163273623, 0.05023015623962879, 0.6468904895820736, 0.014573021284780774, + 0.07314826533484264, 0.7385634948320089, 0.4736186028473338, 0.961769707187561, + 0.6466132639765876, 0.6540721879369729, 0.5897323507539489, 0.24160977712950327, + 0.34607925280409346, 0.6030672545910204, 0.09219015818821708, 0.2189044326893821, + 0.12227548581400349, 0.8947981854220011, 0.252625479705346, 0.9946789181499702, + 0.16317149618234628, 0.8889540539831564, 0.11335746065295138, 0.3021348506513677, + 0.32426537462416727, 0.3221710260970897, 0.7955224454834644, 0.31995046679141825, + 0.4603696522905393, 0.15274124315980264, 0.5767785824549683, 0.9089273609938713, + 0.014181800749509588, 0.8968517292934882, 0.8576432177022097, 0.41392758451611766, + 0.49075724214154415, 0.6724006689201423, 0.7419828793396049, 0.5918792807869941, + 0.4964571450177757, 0.8462797472729263, 0.24074647479855604, 0.17389013966384548, + 0.4336537753610863, 0.20804076413234251, 0.8346292608572363, 0.16330788557412612, + 0.6470926565729509, 0.24909881173874848, 0.7800536906520463, 0.9027679093109797, + 0.46621683989976836, 0.8825818086255208, 0.6483997410357604, 0.07090902131552845, + 0.4615378723949781, 0.3984200929917433, 0.793556355780704, 0.18864072780222685, + 0.6683790309702944, 0.8281722909526522, 0.8584671461088224, 0.7674560890352315, + 0.30241765553139677, 0.41475831093913884, 0.22776361413963964, 0.38489260664050606, + 0.42790348692893054, 0.4855741541423584, 0.7700059150649414, 0.4463345406584335, + 0.3754021383352054, 0.554149430056228, 0.7229379198353453, 0.3532397254539792, + 0.3768025729772698, 0.668903032222659, 0.9064030808327798, 0.435833055858148, + 0.8685532076495873, 0.2886386669233906, 0.43587518450474105, 0.694872592542479, + 0.1796499204429559, 0.22480783387822612, 0.23350737891309348, 0.8144091649840206, + 0.44616818509133616, 0.22082898026287756, 0.17930805292422558, 0.15071070399700426, + 0.3155340984080618, 0.6760742950253298, 0.2621675138999626, 0.17823328215968037, + 0.08015587675406999, 0.9763732534524071, 0.8110620473055706, 0.9566564231509402, + 0.25767909123698307, 0.3601800348476929, 0.950425899846338, 0.174356764476159, + 0.08077041781361871, 0.7687961673279716, 0.4354185072613621, 0.8310249565932173, + 0.3721057577278879, 0.5340391905817158, 0.4872996797842588, 0.761276430509516, + 0.7341317116279491, 0.11585158022036635, 0.8117034975015432, 0.04017399270252342, + 0.5161920405254127, 0.02912026146204605, 0.283469044387504, 0.6056600705599993, + 0.5892643002783994, 0.19327527302460534, 0.6201029277090222, 0.9439856016164112, + 0.8563446020541817, 0.6525564675585972, 0.9151952958944338, 0.582373416388159, + 0.7669560791324367, 0.02468294355083911, 0.32790419872418497, 0.16828688898958633, + 0.9785136875320333, 0.7559030686500858, 0.4880628030762244, 0.49742040693088596, + 0.3807874726702045, 0.2518072818614463, 0.28293848736377514, 0.052065141479679444, + 0.12673432223072, 0.00493210913236386, 0.0659657665976604, 0.7546657122157793, + 0.5142072130637574, 0.47101495152740935, 0.12423809933676722, 0.631621048904926, + 0.9375168340975689, 0.9710427645428577, 0.6951321450506236, 0.12975755461724825, + 0.9371463814511896, 0.06329947410562009, 0.17250706916082792, 0.27504520679952815, + 0.7997764878570511, 0.6280084931708173, 0.8790770172906017, 0.8699573351880034, + 0.47162638306287774, 0.429977964766746, 0.6852470236642084, 0.02825069851350992, + 0.7187392702298809, 0.6897047244877237, 0.21155052406886465, 0.5782009379374243, + 0.844124004652653, 0.13477494670163825, 0.39460860995867053, 0.21886878537466137, + 0.9331765626546623, 0.39975766464681506, 0.05306072541805107, 0.5198958252400147, + 0.30504398126008225, 0.7433307539428954, 0.12287466682408421, 0.9670010933795353, + 0.747858359423017, 0.17207327768824898, 0.2545220160759658, 0.5911442854526012, + 0.785125289716672, 0.34374725201679623, 0.1881988882326393, 0.2864207541695756, + 0.022946369182973436, 0.035928263856476184, 0.8248370296597001, 0.0326694849755228, + 0.7293502533474547, 0.6077561758020177, 0.5308356230207454, 0.03316711866227229, + 0.7568490275843757, 0.520780966891579, 0.23241392652065784, 0.6897984319358339, + 0.6291991128180034, 0.4441797944319068, 0.5568587932114791, 0.4498088314464638, + 0.7010019040522237, 0.6507673483154275, 0.45381116863257953, 0.8343724626292929, + 0.9726485983607335, 0.08883727023216881, 0.3240589354871075, 0.17091823673070605, + 0.7065195713066943, 0.8019676727987474, 0.5441090274902036, 0.7569920288292978, + 0.7284967317947634, 0.41634232641384894, 0.07570625995129976, 0.4839470331704978, + 0.22728929068210257, 0.6017907078082997, 0.2752057089278115, 0.6197057838032689, + 0.5062528432426374, 0.9578668123268466, 0.17145202413217242, 0.24939262342805169, + 0.6288714928454562, 0.5306228139658058, 0.5871282516007716, 0.5544974175100911, + 0.6268496169607662, 0.8447978213259328, 0.5990456111341405, 0.4975881427045391, + 0.9759159347677031, 0.6791081992495608, 0.5316632896786118, 0.731025524465835, + 0.37977185358977994, 0.18206518830018836, 0.655684273695134, 0.7157391210381715, + 0.9914665753650298, 0.5048593361026893, 0.9551772829563094, 0.7745323297735499, + 0.1264456909491325, 0.3699896710346614, 0.6285338457271957, 0.514443036953066, + 0.9899529710790698, 0.7325623262164351, 0.42250238115582217, 0.9471771192019562, + 0.5681364044488664, 0.6486542055885673, 0.13720737835505248, 0.6846959458412011, + 0.4521690685914095, 0.9371433944439886, 0.9945327264012567, 0.33107065688983917, + 0.9343690829724055, 0.6856832306125681, 0.799031808457499, 0.7380137292376373, + 0.9746119191774075, 0.6114354110845129, 0.0891424280914559, 0.5091757110322869, + 0.603309404646988, 0.4295246842942808, 0.722036085094832, 0.5625133815588351, + 0.13050048794622393, 0.9927393684728438, 0.45025947906275665, 0.3250450093807251, + 0.09561872500769664, 0.8633353598344091, 0.40442616095861705, 0.6049206328617713, + 0.8428239809093349, 0.3359551331198569, 0.22248396807251225, 0.5502353681183261, + 0.40267811045207713, 0.5815240359991557, 0.3014924083991003, 0.4250990948690391, + 0.08277471680497728, 0.16446000783135628, 0.7582322318598479, 0.4513912032123134, + 0.21989757035321167, 0.6951066958420687, 0.4703863012569903, 0.6716716338779879, + 0.5407943154328158, 0.4271833226075261, 0.08610893472179548, 0.693695330552891, + 0.31314361600309726, 0.5535216118808298, 0.5990984962870414, 0.5131726892092602, + 0.806119912485667, 0.37726715649026266, 0.09688997160729618, 0.18191235600914468, + 0.7408756680625669, 0.067357737461333, 0.34430792748579353, 0.35918926626247394, + 0.136124078264096, 0.3541995171494954, 0.06349779403656919, 0.3754301718183216, + 0.1727189346883753, 0.7580345332123546, 0.5946766069056495, 0.12308175309808889, + 0.5448895422266883, 0.2753416276479139, 0.23336713618230265, 0.4308822662286048, + 0.5621756856088449, 0.7611032921388645, 0.349812128693896, 0.3881534377705992, + 0.2835050132379042, 0.036894377980221216, 0.9307542758686738, 0.35984184065576474, + 0.057717777604340714, 0.16357715519612248, 0.5474191675603088, 0.17209729645140837, + 0.1035218074281723, 0.550510065989391, 0.41363664298505376, 0.2424585443425049, + 0.12297541084632557, 0.9285138515876707, 0.6290588065058829, 0.8255063271949105, + 0.20874626048136935, 0.9395041387697237, 0.04600701773713345, 0.24156959559907465, + 0.37041024160368163, 0.7264228362952664, 0.04321551557401371, 0.8319348475055413, + 0.5128934010550922, 0.9925867471857552, 0.9216267877935361, 0.24718436038946778, + 0.4698226024943565, 0.5439280672034829, 0.598067075383266, 0.28494763010177293, + 0.03773308148441756, 0.14574269431127063, 0.6053555487266239, 0.8475034881533703, + 0.2020262485026808, 0.7042307474819337, 0.5005531552761207, 0.8192645819547374, + 0.20759838646694628, 0.42254359806485775, 0.49037819615284917, 0.19201492423287725, + 0.37500924763986043, 0.3021041849823324, 0.3213504994388482, 0.46774067505236416, + 0.0026173221305848537, 0.2236980382939876, 0.8209726800498244, 0.1341877362757261, + 0.7037171416664391, 0.756226471861304, 0.9793647620935093, 0.44373960477005947, + 0.7700194618684637, 0.5673608450062326, 0.013838598953302195, 0.6574138322136758, + 0.7782853882176008, 0.9813441063355254, 0.6393022773267258, 0.9384754208419224, + 0.9544023739700562, 0.8689363282216476, 0.25686033022026544, 0.5677299374542079, + 0.20226249527907136, 0.7413309609345924, 0.3902110116265247, 0.817381412418007, + 0.67832670080788, 0.46537137842295107, 0.6072049619454954, 0.931659554219823, + 0.7232985553535521, 0.5613079712934398, 0.9529395011786647, 0.3233140991562946, + 0.8867061849259075, 0.3102047303338008, 0.5569391637391777, 0.7072786765330655, + 0.05248664008431159, 0.9215047227212699, 0.32001852496573835, 0.2947421881907316, + 0.16826022413555786, 0.9470234077238253, 0.02390800364358514, 0.921284435754136, + 0.5062140210208729, 0.38265197505015147, 0.1803325603165793, 0.35597726956395437, + 0.7943681735300347, 0.41606259121453004, 0.9499169772053542, 0.022111673063570425, + 0.5235643488450099, 0.13857697122597168, 0.4254536166360948, 0.22832181147804687, + 0.23456948978616432, 0.9860684004280239, 0.8756978874984395, 0.03544764170358694, + 0.08973630772908203, 0.5083562496701145, 0.011202382317988091, 0.8990670746555705, + 0.9537673849619107, 0.7085713197473092, 0.986232021543572, 0.5081578876729579, + 0.6945190695598703, 0.9024711459516134, 0.4208577171692106, 0.766413072935944, + 0.48028012847993773, 0.22874311332279218, 0.2133704688789767, 0.5125128278114942, + 0.8880847121160246, 0.3606612472771661, 0.34170378950422586, 0.39477693551558457, + 0.6340623996399476, 0.6953872129694927, 0.7092038405181214, 0.9624278741295362, + 0.3905353641961543, 0.10208035797721937, 0.6101978379539461, 0.7583740478481424, + 0.12625795541839047, 0.6348773324268129, 0.8871458755396873, 0.01670678244223589, + 0.4146830711435052, 0.5372807865088944, 0.12542194922035377, 0.6512231246433252, + 0.4980918754933916, 0.2091095413841736, 0.21206766259675913, 0.031624355402636306, + 0.49247318203534907, 0.8251416658391146, 0.2265518914158131, 0.45579558773586915, + 0.2793801346253608, 0.48011367142312156, 0.19316663611067186, 0.9630653586008612, + 0.2844483346903066, 0.7428791983417421, 0.20350354070834398, 0.2553320192818015, + 0.8212809248620727, 0.7661178866721406, 0.765443338343973, 0.5677574750675933, + 0.05410405845144206, 0.8535598075847425, 0.5267124302899265, 0.16596308978355268, + 0.5165349840530941, 0.6009663705061661, 0.8001224351986231, 0.12433512035597516, + 0.7328391838711578, 0.46843650518351376, 0.19453361700593552, 0.2457914503921359, + 0.9715041128172724, 0.5035253179717091, 0.2738750246354873, 0.76571572353285, + 0.8322725912841784, 0.9742207856586259, 0.17221294952368482, 0.30429255859284565, + 0.150446075776693, 0.5774457449870724, 0.055204948247565166, 0.9049180192781775, + 0.09219720725874558, 0.7249660525087812, 0.7634298317772668, 0.09532513565669298, + 0.6037902965957798, 0.775876091952754, 0.30048105187387053, 0.5321075407103473, + 0.9676971504359092, 0.26881827714334805, 0.3270462715063217, 0.14109347688658214, + 0.718905496165389, 0.5178340264469525, 0.4641920385691337, 0.6886634266980226, + 0.7541743566585278, 0.9338177907400674, 0.6377610366495927, 0.07859012814204014, + 0.047881143812792626, 0.509518876595083, 0.4662566613906933, 0.044753641991725934, + 0.2888474238293234, 0.4300753033759601, 0.24090396899360778, 0.4880819616569849, + 0.49344032440942565, 0.3267337118213137, 0.30739545858667916, 0.8347976589605131, + 0.8468181237471225, 0.9397711276395985, 0.05076516895725314, 0.35528884635673386, + 0.47869109342428395, 0.6485992654884545, 0.13077849395002672, 0.7561835005330849, + 0.13019119770401266, 0.522947346864396, 0.71681653486775, 0.9545807279091522, + 0.048471853442748514, 0.7656959974208565, 0.33260665541814094, 0.3107698976359098, + 0.784262134457234, 0.2390556069831512, 0.2171033013333129, 0.22106855321775898, + 0.1777305668557082, 0.7010763743538238, 0.34687353348311567, 0.6713227825218957, + 0.31741624899077014, 0.27699680164190077, 0.24966398933329337, 0.4378580357548568, + 0.8220315880834599, 0.8780303052900865, 0.3282223224417613, 0.5304855331661286, + 0.8759301198567931, 0.8022054018812916, 0.8087106870029261, 0.39915235222002377, + 0.811956074326436, 0.9536455298780298, 0.421213924992475, 0.43164313734964477, + 0.5227138249336246, 0.7090043439645167, 0.2998320373339187, 0.5545990176184411, + 0.3997327842492463, 0.7316793591805311, 0.8754030075046323, 0.843831916727075, + 0.742684393168072, 0.17453735067792653, 0.8625298941919014, 0.11381870844975783, + 0.27979009889454887, 0.5768907698535688, 0.06594773733933168, 0.9579455432204982, + 0.5946935153059155, 0.45202323444648607, 0.9019434787093433, 0.7818194846873125, + 0.3967066863484422, 0.8228406568388705, 0.09721161204039697, 0.4801522356674264, + 0.01959637598493358, 0.9850457198522312, 0.24050822306737707, 0.18371526827375517, + 0.5845485582312591, 0.17652807791034186, 0.8647449178268475, 0.2895166509368404, + 0.2419901155988491, 0.6941088664970383, 0.1560864519399333, 0.635427742150778, + 0.5816818559836522, 0.9849155684106518, 0.025588074983513787, 0.49459827852911853, + 0.05425377571263956, 0.4743775865820451, 0.7564149515738253, 0.16276984906703595, + 0.7687548469520425, 0.41373225024064564, 0.4544064319037999, 0.41801259667280766, + 0.6314198197027366, 0.20323482183642694, 0.34349885200542063, 0.27104141183367414, + 0.7982388531148257, 0.9953984175532257, 0.8327122421658457, 0.4281616517848509, + 0.9687894499866775, 0.5466038572944231, 0.22487868502002484, 0.8384645649234528, + 0.36865165146819756, 0.9336942046854281, 0.6160039963189174, 0.651923536201123, + 0.14310507886655377, 0.06001830530380037, 0.22971825195338103, 0.9060489022166647, + 0.17227243918298796, 0.03562839494317671, 0.9833405699591752, 0.879047113237965, + 0.06427639038238131, 0.11198359585969286, 0.5120162273713391, 0.8195681998043876, + 0.518411923936739, 0.5964168071235564, 0.8644480695528365, 0.9742302432739681, + 0.596784065205653, 0.6009605776303654, 0.9172413061542587, 0.9722770947384021, + 0.8230312406014156, 0.6689544008223667, 0.06070494793081682, 0.3249888584728904, + 0.623899766373403, 0.6526210484092227, 0.6766952396244709, 0.34652804154497674, + 0.4071283107450684, 0.8222639692618214, 0.011132519537103813, 0.5760595421942754, + 0.28287338543402074, 0.37983591169858555, 0.40870027326127367, 0.07771932097463397, + 0.03700348128441577, 0.34853902862782316, 0.34976756257395014, 0.20268620904747214, + 0.17602558997716566, 0.07375838827968006, 0.8258050102203596, 0.22552025356880057, + 0.6174348425138062, 0.8743948521904985, 0.04826767861785841, 0.8881260245700738, + 0.20517630380817353, 0.7158063809593872, 0.6424390262914653, 0.28579474166696406, + 0.6943570209638391, 0.8915403913726628, 0.18101264400038986, 0.43672810024025943, + 0.9720888013306086, 0.9163586355462026, 0.8554811772547043, 0.8390860580206726, + 0.9764990516164289, 0.623084563941527, 0.7274814276805354, 0.593214063381061, + 0.1366543769893125, 0.5942756436334763, 0.940654759078095, 0.8976841341685899, + 0.9170727635406218, 0.5512259884901765, 0.8732383847949075, 0.15876424149214485, + 0.983846243401889, 0.06524064955606268, 0.22095794008768244, 0.29289061978348774, + 0.01906759179346751, 0.6027549581512781, 0.06775862309828506, 0.25143958689042967, + 0.5669673318847419, 0.28294105817696213, 0.3174541416076303, 0.33060755385741214, + 0.4008286112428354, 0.36612160617925404, 0.3324377788357381, 0.6469852128501971, + 0.12960876007392885, 0.5573253264003803, 0.18164164552223339, 0.05488525449383397, + 0.8333901979014496, 0.7013270063862299, 0.00973447081459422, 0.44646013225218206, + 0.04509404363374836, 0.6007283625082954, 0.5760011451082034, 0.44293294050290455, + 0.7335522619452794, 0.27903604275828786, 0.1476826817000655, 0.32432344497821075, + 0.6870931336062818, 0.00857861060126519, 0.7820956055279831, 0.4726044283862554, + 0.5732177277699113, 0.14034121755523665, 0.41804284566429306, 0.3953383120567484, + 0.803883857137373, 0.8793775032337189, 0.7863521774621048, 0.9210368915228765, + 0.527325528345828, 0.2898124695522769, 0.31385222532735757, 0.6472638868328983, + 0.5235968800554752, 0.08089383474428724, 0.6501478763743553, 0.1226793956172183, + 0.7544937807956091, 0.1011571047349803, 0.8036833193003035, 0.3947568249380681, + 0.7113878526534941, 0.4334600130160059, 0.995734610188258, 0.08750876548564512, + 0.41692095049348066, 0.4161099210925021, 0.558301508648263, 0.5234324181480597, + 0.6021309454264144, 0.11982213855056323, 0.652984817383927, 0.31994110490998406, + 0.44025076342842107, 0.9018474001363644, 0.1780521010787487, 0.0928203326209236, + 0.5689257589296584, 0.9307858048545634, 0.5019191507483846, 0.0732359171678838, + 0.7472470560306381, 0.7606290137163079, 0.1759363749688474, 0.9865090966718135, + 0.14162530231861792, 0.2257810200578727, 0.8625577589305529, 0.6182840554609289, + 0.7627928373929174, 0.7138267678661641, 0.6271079408184336, 0.26542849852208483, + 0.5438712954597966, 0.6534648091675265, 0.9366397928644904, 0.24959632558445233, + 0.6955069355781494, 0.08747607765195109, 0.4496760092668539, 0.6515312368984656, + 0.36913050836766426, 0.1884121970188235, 0.35484115555318807, 0.35296784884342236, + 0.25069533860349336, 0.9058873429939744, 0.7443484994879821, 0.750346435642241, + 0.7617724756232275, 0.2922607375715792, 0.5466903040314682, 0.8097090004421225, + 0.35407885946174, 0.6348456786908329, 0.4096851448355948, 0.6754014404335078, + 0.8487343259562462, 0.6503887202649459, 0.023939621711200898, 0.0697668884428575, + 0.1932051338234947, 0.07098041071523209, 0.8895455416639768, 0.5970662891404159, + 0.9672124324991637, 0.25710752222232724, 0.2512360193659343, 0.9118588060460109, + 0.12641331224828478, 0.2591745695762785, 0.5354205089164524, 0.9697500651356975, + 0.7039769817809971, 0.9674853015743734, 0.7581464456430004, 0.009967363889854308, + 0.9554403083896627, 0.6514960891704691, 0.9819054955872719, 0.1655400926361219, + 0.6854831617362053, 0.6538109602793701, 0.29197658934366655, 0.7026261377883648, + 0.3361358393099737, 0.593058167944492, 0.3193604213031376, 0.9112331297151058, + 0.7565016778289977, 0.8148016159609519, 0.12956549959331298, 0.8840025203699886, + 0.15092088574069407, 0.20505821088972076, 0.2243079482977779, 0.8559123643624608, + 0.3470255353137828, 0.1316340364530686, 0.6434241700287737, 0.6784281820296103, + 0.20732777369293587, 0.6251808986824415, 0.6394504726870864, 0.999032394532801, + 0.7786813383845239, 0.8636701630404309, 0.0023017909877571796, 0.696449724735671, + 0.10128652819511497, 0.4003153911577081, 0.2469767716265694, 0.8768832326648365, + 0.34089952102337384, 0.11367436364895223, 0.607022940049033, 0.310423907918116, + 0.17692819881508148, 0.893849330155664, 0.4880236414430278, 0.9956660538325679, + 0.6061924103235181, 0.43946977537006016, 0.014921239872288439, 0.463398031987058, + 0.8435515709522923, 0.05922628363397964, 0.9644192036892971, 0.09735680952505932, + 0.338804944299988, 0.122083944069586, 0.5384101447575116, 0.894293825409865, + 0.7951670891521336, 0.3730764160164859, 0.20353695631133162, 0.7561530473263479, + 0.6832856935883096, 0.600988913103398, 0.35514255942741124, 0.8499665948777161, + 0.5804484005287095, 0.7105133890231672, 0.5155572582331448, 0.9580874216612449, + 0.42311501062470824, 0.13594740755997503, 0.5577114428718954, 0.02444976711511415, + 0.6073524946467592, 0.42490323282605924, 0.9560741306479802, 0.17062467593248265, + 0.8032635864461389, 0.4236136037107485, 0.46938391908058286, 0.5712751546570827, + 0.6589700800129294, 0.07547209392481724, 0.3993441495689085, 0.3406978944629723, + 0.7797752606137046, 0.7855007898427816, 0.4117897980533102, 0.6899309686736047, + 0.6962784108609973, 0.026169671983859666, 0.40177616240464586, 0.940360830179024, + 0.7321905293315297, 0.4795568133323319, 0.1987575279476046, 0.6127453916604444, + 0.757610880372048, 0.8914703620980189, 0.10720148213654712, 0.7929011279820072, + 0.6530747492772068, 0.308240250783111, 0.3398854214559405, 0.17318325577009575, + 0.3507555425234006, 0.6115615288020042, 0.6172965474103689, 0.773900176617242, + 0.6826457756065216, 0.6383381900788276, 0.7095117173673956, 0.13989700600187094, + 0.8519068750537195, 0.2514355559461625, 0.44387467049936746, 0.08873576599160304, + 0.6307491605599468, 0.3192212749113975, 0.19105550091405066, 0.5681426912540237, + 0.4852586570071583, 0.6901510334227848, 0.14730516213349298, 0.7377793699206823, + 0.07770239636167253, 0.6353314323740415, 0.7769348461239013, 0.03146407552911468, + 0.881921416148157, 0.5196241210509628, 0.18644631162996206, 0.1800152993000519, + 0.8469011263430949, 0.2044030747533443, 0.6714715606818085, 0.2652006622562567, + 0.16588414879773827, 0.004974429525489654, 0.9421216848976225, 0.8680591840359811, + 0.47536229896418614, 0.9087132127558714, 0.36269836666060895, 0.08994196144024535, + 0.46652965119134227, 0.7635088426479022, 0.7580543424043561, 0.2814576427741803, + 0.703103211229103, 0.030089454512598146, 0.431288495782855, 0.2822540862742956, + 0.7883604392754977, 0.40414295830619484, 0.466555918451075, 0.4449073577848274, + 0.3738360598670297, 0.10771135525719733, 0.11598141083836666, 0.608694960356133, + 0.8076241092099635, 0.4556595939890059, 0.5354984218212357, 0.04529301994047097, + 0.2178671393843703, 0.6882823913952348, 0.865528980527933, 0.19956656357560643, + 0.22844588988154046, 0.46974197301841936, 0.5309853116091245, 0.6839795443783202, + 0.29595752432872535, 0.4532475799120991, 0.9284317401382437, 0.8788823041707505, + 0.2166810902369931, 0.5487203138636405, 0.05600987779157096, 0.3084775342972511, + 0.5393125299204842, 0.37240064437816334, 0.3468973654158175, 0.9350645534385649, + 0.24039614056758862, 0.7402803066693978, 0.08306562891693459, 0.28044191179432376, + 0.8657987335237997, 0.9934355754376296, 0.1270705175053224, 0.6460880715079992, + 0.9198821934593109, 0.6070568194283551, 0.6464985488505831, 0.15197537639099556, + 0.2664357790754843, 0.9231485651377281, 0.5340418003367865, 0.9957229597354874, + 0.8319214218168568, 0.5422138048023872, 0.9699107535181725, 0.4752429518518142, + 0.05408065715452448, 0.412116834384297, 0.6195568540366302, 0.881986809712991, + 0.15550908602876878, 0.7044936352812271, 0.09920603364957725, 0.7458878830244886, + 0.6374540922492773, 0.41588511532180117, 0.3429122583047075, 0.48188295150668603, + 0.45953260031670007, 0.47601109541209097, 0.6130040691925399, 0.37050149227938656, + 0.6783163427743601, 0.997835766092076, 0.3119262852356456, 0.41049362892379715, + 0.2871220882198977, 0.08075775985242728, 0.646921181653617, 0.8765279657302582, + 0.23805860015263158, 0.712917806978227, 0.7988705300739493, 0.8923433911728404, + 0.42345599054528416, 0.18133583214691362, 0.29445384375862804, 0.877237778478381, + 0.4062427754927561, 0.39792110919390933, 0.5595455188757487, 0.9451098578780788, + 0.39714743857270696, 0.1710267066560992, 0.26824919626807686, 0.9956553743600529, + 0.9283196879255161, 0.07718521227499053, 0.4612312284809391, 0.09342894139926139, + 0.6584945035502988, 0.7224986788775738, 0.980272950501206, 0.7600669820648661, + 0.6208086249350163, 0.6086644042782733, 0.48308153203364257, 0.660326199954179, + 0.9276878850610732, 0.8633830105539054, 0.7962187452157085, 0.9194747290923341, + 0.16231806206359722, 0.9499107963424789, 0.06252948055080076, 0.886862482313068, + 0.5294739865073963, 0.7551343176832722, 0.3967203263856607, 0.3205093213479737, + 0.11273108728145032, 0.5400172007465541, 0.5463584941018298, 0.40902420190783784, + 0.34753173597357556, 0.36190947502133863, 0.8449872186646363, 0.7122210627434624, + 0.9946162402041455, 0.9132488211351393, 0.876847448826782, 0.7803556977166688, + 0.9356233374490821, 0.3252320637795516, 0.7962997602423394, 0.3947709697277736, + 0.27607928050158137, 0.9617201147257819, 0.6892430274739247, 0.5706639923472847, + 0.8567674143010484, 0.759254344754482, 0.4640661001028902, 0.849388861265472, + 0.915484880536155, 0.6803950008264036, 0.2233794778069803, 0.9322812689951643, + 0.6255594311375979, 0.8293069490130799, 0.5363731200101739, 0.22248705406746683, + 0.03548460537717968, 0.5681856389695299, 0.22852476978776393, 0.25863174388684307, + 0.8597279049431693, 0.7032097750280445, 0.49225726322638097, 0.4614451564658706, + 0.40804943937105376, 0.7560063316576442, 0.2990920309244123, 0.36319226695497175, + 0.1658941783846568, 0.9423775873373108, 0.33575270283571046, 0.18716742670642605, + 0.42701655041668696, 0.0062520852671191784, 0.9121308724576324, 0.5179750346926351, + 0.13397622219055638, 0.41617067939349794, 0.22602489432339978, 0.8549501299106076, + 0.34313173874115677, 0.27630233769563395, 0.6506926207027762, 0.2534847525768975, + 0.9379397622811656, 0.24893248547242908, 0.25003116137255343, 0.24185977400785785, + 0.035595161608137915, 0.43731034171619265, 0.9865984103159812, 0.6692875029221403, + 0.39193623341329087, 0.8149634125086003, 0.3685145118514238, 0.3321064154658263, + 0.8821130091244994, 0.5126819098142537, 0.7521217015064541, 0.2490810176728513, + 0.7473789792510476, 0.1377191248691576, 0.8731112905545552, 0.37167076667157495, + 0.2018551847032306, 0.6391343316493395, 0.6498829810818338, 0.5095016824529556, + 0.12411368836434056, 0.6857165595366345, 0.728389597919631, 0.8384343918540044, + 0.13893416384908752, 0.051156043086339476, 0.3945522835366221, 0.7021774315180461, + 0.9664029679637711, 0.8937346328630393, 0.457178494165455, 0.21522711616193102, + 0.6336352979366736, 0.26726767288419706, 0.13628950637506665, 0.8252202343876291, + 0.8953549328626846, 0.1406626559991494, 0.9727894305213565, 0.13233188045808586, + 0.38048680651099376, 0.8619913912534997, 0.2680556820317197, 0.26289459991228925, + 0.7922081914968131, 0.8526785089625418, 0.23177886240082834, 0.5635282170289971, + 0.2942218688725433, 0.9319828382613095, 0.36720463922998925, 0.98388126028702, + 0.5648451215100978, 0.9973939542919464, 0.9896063503276095, 0.6678758243457174, + 0.30974219900139965, 0.6138943763484822, 0.7659247094960486, 0.7327510716461801, + 0.4580874332818764, 0.7148855948300462, 0.3015210526811154, 0.3193783762708463, + 0.5729194299390246, 0.7009199288239003, 0.1498401214370051, 0.8024617709837892, + 0.6546582644003875, 0.6992573312662181, 0.6233980510220369, 0.2816126619003816, + 0.9722556402827515, 0.8089546292787112, 0.8228667190501502, 0.3965111409530997, + 0.21891778563745656, 0.13706461948658555, 0.4930115559366377, 0.08061879901104274, + 0.9723893757331308, 0.09685347646885045, 0.35444636370166327, 0.054297663446587596, + 0.2747322224417924, 0.24392971152181964, 0.18653707247554163, 0.6059851654730029, + 0.6831785302075528, 0.36331807826529416, 0.911774786154115, 0.24997575428215468, + 0.6786094473828951, 0.6104874988450785, 0.3031168390839635, 0.410700724383396, + 0.5441503499168883, 0.6307108716552837, 0.7790601468219183, 0.697505254353997, + 0.9685361200100565, 0.2817021433162583, 0.11693715636983493, 0.5124421281724756, + 0.5381636229756999, 0.2826808604940013, 0.06584110328759119, 0.5204186195482896, + 0.3163423828270894, 0.3494352418870309, 0.875103991061865, 0.008370670187697438, + 0.042207344779274125, 0.06946350667391499, 0.7652448085238555, 0.8849312566549212, + 0.21916084245243994, 0.12139915967065251, 0.6864097172319, 0.9174864218121146, + 0.707314017888608, 0.46782580051186906, 0.449272860553954, 0.6218540956306559, + 0.4832808027986517, 0.6734352802484965, 0.09801798837998055, 0.7656392694188594, + 0.5803233074672121, 0.016925723804065762, 0.4280916359177751, 0.759272753065939, + 0.3546974115594703, 0.9730373850561599, 0.2938207533589179, 0.4717204342156366, + 0.760910396938359, 0.2651220337342728, 0.5942567905334719, 0.6481821197668466, + 0.2895714547071302, 0.26823150371057913, 0.5449562468235388, 0.520914828762417, + 0.5824006220574875, 0.6943794810741687, 0.5902701522923899, 0.28597743896497085, + 0.6906386637349428, 0.41787531512303433, 0.38227786476971226, 0.5300921568839477, + 0.2857412596573464, 0.6242493428037001, 0.15299829554197697, 0.8562897909007144, + 0.16292018821779197, 0.5840137481432885, 0.0402405580781926, 0.23787132538690514, + 0.02776599953972303, 0.11692608239388758, 0.288342139829152, 0.6706689963111256, + 0.2839947727752087, 0.7475480285762276, 0.03851000884593336, 0.10153799408455466, + 0.14982167863718432, 0.3834776944894259, 0.1975377435113611, 0.6118508790784729, + 0.9680924500451281, 0.46467210253689606, 0.09010588274027209, 0.6136328993397414, + 0.25988950283817835, 0.38455137915690174, 0.9271848868044517, 0.22389518132293573, + 0.04571433156010063, 0.4132690224056872, 0.8095602899860317, 0.2004302755831857, + 0.8245073000661697, 0.4025263860553472, 0.4360528711995313, 0.7644179539602122, + 0.5282084404936749, 0.7836857694796493, 0.5285920916618999, 0.32404378742603523, + 0.07969461980030712, 0.43825279850385856, 0.3910540496216194, 0.18601781392486605, + 0.23184017385286504, 0.2023338017648867, 0.7205492578820715, 0.46163607994538014, + 0.10564143426524919, 0.8560466828351337, 0.18314823447283957, 0.8790698403280611, + 0.7235404635750523, 0.5700427190537816, 0.663288944624978, 0.20529307657969187, + 0.025872315445884086, 0.26585233191055013, 0.49916586962692366, 0.7745665262881396, + 0.9627762879628154, 0.2063350190884421, 0.6834666446671213, 0.47480586450535844, + 0.28448716214382685, 0.18825684592312497, 0.8325226059057012, 0.6286561310639678, + 0.1087577996666701, 0.6207378740918625, 0.4104288522350794, 0.4108005415293736, + 0.9696660955122183, 0.11915598038634478, 0.8193173491910225, 0.2746599332463444, + 0.6098656218024554, 0.9292887066073673, 0.6142569905538732, 0.05766198666724043, + 0.2891271226686991, 0.7332286872225152, 0.8408570347410926, 0.12660806202788155, + 0.5489109956266367, 0.07599553849100638, 0.9426092890379096, 0.24765591006824883, + 0.08682472760964643, 0.3047969726136577, 0.1583511693504679, 0.010906917670314709, + 0.25896069702163316, 0.944522556205986, 0.06234151012262623, 0.07594537970249937, + 0.16105869616486723, 0.45356629802200643, 0.6374632663710944, 0.3001914280312634, + 0.3597821367694215, 0.8925621967245283, 0.5858933471277528, 0.07241246235730059, + 0.5227573031421745, 0.826769449555715, 0.6226000232785439, 0.5007372389529805, + 0.6254709820295706, 0.923561320563299, 0.5797200818087839, 0.11245274382307002, + 0.8040764735441351, 0.03714647611264532, 0.05933763519542601, 0.6553880446377699, + 0.9868084819068811, 0.846601969915844, 0.9912595685257292, 0.5411967110606196, + 0.3194458019544081, 0.28503696973115, 0.940558947933651, 0.015463034936463949, + 0.9542894277531666, 0.02249221602169349, 0.3513696995287743, 0.3965583453713156, + 0.6480682314401847, 0.04238822866653824, 0.09266875642374328, 0.7134561008482744, + 0.7814274812633255, 0.09770666780296278, 0.8599989901279588, 0.971347798540542, + 0.5212471557348936, 0.31747991512880813, 0.1450807372463252, 0.65117620072487, + 0.9083013925754693, 0.4222592379453515, 0.6905458283758098, 0.01975954713367367, + 0.3018825082636548, 0.3989451876225656, 0.033855521484832285, 0.25926813158625406, + 0.5400023479033437, 0.7352796709581246, 0.2863491137150781, 0.3993791960463924, + 0.26115064828523893, 0.3261317553000693, 0.41279615227638355, 0.09280822785115939, + 0.39962864076829185, 0.0691091881928031, 0.1099761175200129, 0.37020732327571115, + 0.47322203903962057, 0.6020481442203578, 0.2913197256907183, 0.8480503658447193, + 0.8369212226762754, 0.2159265533102135, 0.2040596933892561, 0.06270244678302228, + 0.30482629522564386, 0.30300441826102287, 0.10545107458851533, 0.9884500735278379, + 0.8224121625418398, 0.9606029349645083, 0.8137241016430465, 0.12256305862616801, + 0.3994648845872275, 0.13611055611209455, 0.3738234989638656, 0.42328184892804965, + 0.9544277910360667, 0.8602725531268285, 0.708119222125779, 0.5581734247006436, + 0.6308586413059186, 0.060826386546066535, 0.621321063968908, 0.8521662217677738, + 0.4581651056279984, 0.29113686416864226, 0.36072087688611665, 0.04325286466960776, + 0.5830232688027613, 0.36606945453901496, 0.31953469940448864, 0.4914081079298772, + 0.6417067214650192, 0.8435793516150908, 0.41203235803708327, 0.4704391766670645, + 0.7394040219384242, 0.13060880814567233, 0.17040717390092686, 0.07174560642273808, + 0.6625377359225677, 0.6025236807306683, 0.8800347546510215, 0.8617598458907858, + 0.8344530020929027, 0.34794343617049683, 0.012617908061536953, 0.24343754932350425, + 0.5029522917900464, 0.5709632194339286, 0.13859388237834847, 0.6036266706759434, + 0.6385922161244988, 0.08416762540551215, 0.9756385479332002, 0.02840231297593976, + 0.18962846029883196, 0.7663379017587776, 0.33597121615285286, 0.21157564433012366, + 0.681081775586533, 0.9639938724568192, 0.04278878227784799, 0.10497675111970428, + 0.2044066673074052, 0.7811112368170888, 0.7592405724946069, 0.9022518225287105, + 0.8772158453470777, 0.6456587085569585, 0.7197790789091134, 0.3039208514116106, + 0.5019971797043902, 0.8450484251992039, 0.31052008957605826, 0.9093877796634839, + 0.5703807373291514, 0.8825570205508473, 0.9302426324219213, 0.5410206187846938, + 0.02754354153544869, 0.3058589283856108, 0.9262969126813804, 0.8178829845848371, + 0.47457588199265455, 0.5186260420566258, 0.24145499687714422, 0.03303822455616745, + 0.3348919328070018, 0.3556239273348505, 0.7599882333173726, 0.31110488751399223, + 0.3117440864080102, 0.16944818831864428, 0.6417638090268055, 0.23236349580680038, + 0.34587506485912056, 0.971536990155157, 0.3940709548855814, 0.318828670016601, + 0.0648933258955775, 0.6832368804711757, 0.7001761872175356, 0.8929530106971526, + 0.6065172313437823, 0.7909968577662695, 0.11683051975031034, 0.7070988588976426, + 0.8471864001611398, 0.11578590196345606, 0.3780528252661348, 0.9625045931257172, + 0.638938863378801, 0.8233951066669498, 0.8385351425218545, 0.7824515461131216, + 0.5263191463937491, 0.8028713395585221, 0.5625443352568897, 0.9872417926766145, + 0.3681408361637485, 0.8846904840847047, 0.26192715803387, 0.7632130339005606, + 0.6292201295843374, 0.8353086549751415, 0.43072615273592507, 0.6299852117578107, + 0.07623559606686936, 0.9200085390206806, 0.7842562196034487, 0.1959979896446249, + 0.016376131571457675, 0.28265288649826603, 0.6861428381734898, 0.7076968226853629, + 0.7119703873874031, 0.5306741901401327, 0.4220208590103546, 0.7947033412160596, + 0.5151485053643317, 0.3972058702127157, 0.05462822465266903, 0.9877359986860426, + 0.8798662117666064, 0.5505568449033837, 0.40553513014245557, 0.5858695579179326, + 0.08895043292653859, 0.6099264183012664, 0.008441314642171793, 0.9760493177766492, + 0.9613678249547101, 0.9441676049064263, 0.8214246003169379, 0.15584451755630535, + 0.48076835271245866, 0.6035024949020215, 0.37612498047889287, 0.5799200401584894, + 0.302084172353971, 0.9265083065612753, 0.29098565888689487, 0.24602687393655698, + 0.17426263173097645, 0.7501661287022459, 0.41162147864955323, 0.3868646027379613, + 0.1256639886534533, 0.3813662077911856, 0.9778206599687802, 0.45455850729819236, + 0.1053539762985487, 0.3862350707804897, 0.9920370708143132, 0.9459078484251585, + 0.4436734813563835, 0.8830642404597325, 0.10166965940626216, 0.4025768580854986, + 0.7772630634910311, 0.12323142023199984, 0.45824203964740673, 0.23243528127384294, + 0.6578826604427969, 0.3765290956320032, 0.1199617226242986, 0.22675296032919734, + 0.5968539305969898, 0.4005983511468866, 0.715752364949822, 0.3128140640026855, + 0.4494720523772502, 0.419991467540507, 0.2731538167729105, 0.502086594317894, + 0.3094183489498403, 0.7552780010454833, 0.8805577295149887, 0.902189146147546, + 0.742954772014594, 0.9781813303477034, 0.6363166483167275, 0.3725438257403427, + 0.5120983390279673, 0.9872115141031088, 0.790564840505096, 0.16139999953510809, + 0.5981465436960732, 0.48790510653037156, 0.22935582142949718, 0.07371614277372229, + 0.19509813591282787, 0.1840965142883928, 0.20431153425451687, 0.4969302291390366, + 0.959140169330124, 0.2683435899702894, 0.9031893362312581, 0.3687029098463134, + 0.03620141355461626, 0.38881221139540645, 0.6753349209801678, 0.41152602910201375, + 0.0052301830419607676, 0.2863106714133702, 0.9992699446336895, 0.5659708014679586, + 0.8456850748575278, 0.7421248690482918, 0.5586035143499297, 0.8794148465102803, + 0.45707397600309463, 0.5541658388396462, 0.16386096814897566, 0.6592858371803196, + 0.6564806733580074, 0.8010196200791545, 0.6279639188224753, 0.2031856641673151, + 0.6458557703550399, 0.9151545829474347, 0.8769786758299422, 0.6799180659849733, + 0.6904471817906085, 0.23228805280446052, 0.6000224856943689, 0.3033046816911986, + 0.715991163634202, 0.32302123066508803, 0.5859542166483501, 0.5390683057467652, + 0.6875983398917556, 0.37324507167224097, 0.8686317365735977, 0.5818381906408095, + 0.7710230038037504, 0.8764752631082748, 0.515242166767882, 0.12610688056788477, + 0.3145630456262617, 0.13012984233586966, 0.7414249298066473, 0.815210289604652, + 0.90905210927758, 0.40423585066392786, 0.142438698726981, 0.59167598593775, + 0.6689854076381082, 0.3206711453776283, 0.7272396974165417, 0.8345578978997291, + 0.07672547710487265, 0.6434193235607697, 0.3087203445341079, 0.651369014143892, + 0.46951534551298835, 0.30502054553689417, 0.8437561055455536, 0.7924896011797614, + 0.3664547599672989, 0.8317041294688604, 0.9665712425143648, 0.0528305184676503, + 0.42771418724822874, 0.5206491406449003, 0.12193005095900578, 0.4697342463508719, + 0.09725307745032208, 0.35966530333572944, 0.7915871678529378, 0.21241115265506227, + 0.5366875174680859, 0.25023860092547445, 0.421005615630762, 0.4256991973898384, + 0.6986680430026889, 0.4710757414192256, 0.9811204098637973, 0.2583935201252616, + 0.9095401812853682, 0.6937367869104578, 0.37541234022892944, 0.12262670586598434, + 0.994332061313041, 0.7990247671002593, 0.10245145295586389, 0.08550967457652403, + 0.10042039847600326, 0.40650226458438943, 0.3226490169437305, 0.7383812349137626, + 0.5422736648704087, 0.2049997570656822, 0.9508231734138662, 0.5776571135926977, + 0.9805809124988424, 0.5965730283331564, 0.18099114349409262, 0.5201657020789454, + 0.3114135281665332, 0.7884053455933022, 0.21162100753327995, 0.6096628526066536, + 0.7786818515772684, 0.6306164268422206, 0.6526082550782316, 0.023776793523503148, + 0.727388103720703, 0.9474420794921771, 0.003255341526218092, 0.21311983991759342, + 0.2854201373436912, 0.025128788509482858, 0.3997306435338569, 0.12942599790128517, + 0.4193927666504138, 0.13846417764974062, 0.9002683982969045, 0.4192267650843825, + 0.10688797317660048, 0.8968779315227644, 0.09357801381969166, 0.593435488229436, + 0.1615617507037027, 0.3969104868778146, 0.2058168097911255, 0.8300275718594782, + 0.7133536924772892, 0.8099949020241295, 0.5971023657788384, 0.5047962655945608, + 0.4948308954480757, 0.7002644892137885, 0.6456106033965258, 0.3756926613839967, + 0.4182245496265845, 0.37524631154075816, 0.4190639562169405, 0.7083014940847334, + 0.4869688244503334, 0.1829384027232731, 0.05683992393624493, 0.6721339534854102, + 0.5140657078896878, 0.0009040149477490544, 0.7280619391959026, 0.3712765579804925, + 0.3807863333053648, 0.06030370411166375, 0.7489312687602148, 0.13619860548171914, + 0.30491461196827496, 0.15709812233775566, 0.7427612714485209, 0.4200114750902272, + 0.47452414939913423, 0.42721054046010754, 0.512269991709128, 0.7839968756026318, + 0.5189604172493596, 0.9678649031766825, 0.7694430117038082, 0.9661851659130114, + 0.11724320877673322, 0.16555987123424643, 0.7659982845287457, 0.8397851933122642, + 0.8308141524759082, 0.974514522674073, 0.5888823552385681, 0.6015497511834978, + 0.33766940452199956, 0.5172169007025784, 0.1151684664937851, 0.09917792697238204, + 0.5060481494154199, 0.6207034439235293, 0.3015730812304874, 0.7350401881896099, + 0.8745082574081339, 0.12569113600408566, 0.33003765531415297, 0.9538674588418777, + 0.16196141155248933, 0.9713885889181488, 0.8649598840900669, 0.8189622870992674, + 0.5981536968459639, 0.3375690293755511, 0.15780833091803836, 0.4688922374638951, + 0.6809653067294745, 0.8099489346942302, 0.9397818624567011, 0.2208160269515801, + 0.13731709291694183, 0.03399745247583141, 0.007770520790769564, 0.0136079750537067, + 0.08667409124482761, 0.5404703786188497, 0.5275884784108503, 0.6220797998515979, + 0.8949268584833554, 0.06220240112376141, 0.4547565499702113, 0.5616755944201894, + 0.456214811192033, 0.14549587585697776, 0.15705333126784715, 0.8513521876352311, + 0.8555707570987535, 0.6777777182584442, 0.5753792445417174, 0.8092710122108979, + 0.43039805717631474, 0.10813170001182448, 0.4061835414413786, 0.7620290676832067, + 0.9710955949401937, 0.848538152922535, 0.4388449636077072, 0.07822048979791274, + 0.007685888257720652, 0.032468281577470615, 0.04676620436150869, 0.7547309136235593, + 0.8557161437216616, 0.2578097946169817, 0.4414396192494072, 0.40608624202104315, + 0.14065329873717702, 0.19644810328555762, 0.7695470456140437, 0.13664420438195413, + 0.9931986787283871, 0.9715427572974837, 0.9915401647749037, 0.45657463234448803, + 0.26859395150418963, 0.28062277894225207, 0.7828418607966642, 0.8739985683364804, + 0.08575203567051937, 0.41551736379629955, 0.6065216383819657, 0.06172263793491739, + 0.17010506879235443, 0.8864487706687101, 0.6171103631418245, 0.7628600111470007, + 0.210788208367501, 0.5956951636240592, 0.5256402517039437, 0.04064796753551947, + 0.6579871047194946, 0.3968511365349989, 0.6603727903769271, 0.006700832888891561, + 0.5821682715045241, 0.22546133794562584, 0.734471981011275, 0.5390135191250676, + 0.5775081355839604, 0.38847698857575796, 0.8453847336741982, 0.4658590048775938, + 0.941477806709561, 0.2975637512420789, 0.5945939092960826, 0.4070850205548726, + 0.8249067365327899, 0.365635140634358, 0.1403827626894042, 0.45392877256083153, + 0.9623768954201665, 0.24757332720679925, 0.5574796800533819, 0.736642339096794, + 0.72489895136655, 0.7478504406583955, 0.4001746826211111, 0.28816400083168947, + 0.18556235721551018, 0.23141442594983586, 0.3790518688428455, 0.9732520483918374, + 0.25647077127159446, 0.2224949006171092, 0.2831329493595981, 0.011121202826457122, + 0.6181425590910106, 0.43467899935186194, 0.9187154389042755, 0.17237356356419375, + 0.18633908703966862, 0.4458063841849417, 0.3912464854881712, 0.1799738696676394, + 0.425829975954919, 0.939064828155414, 0.13128887350533702, 0.9652928261801049, + 0.3915637445603185, 0.3649603875072934, 0.7271590688509747, 0.13534767083588983, + 0.08817532104193404, 0.9364372076271191, 0.18814363443177118, 0.727429761686807, + 0.9932277290945939, 0.8204972337366694, 0.6334548375315132, 0.628648558317215, + 0.6263936559908078, 0.7832969276419149, 0.8028167340995543, 0.045674747125095005, + 0.7943958091580005, 0.30107941329116483, 0.12030684478676545, 0.8668632578967099, + 0.3626401220761749, 0.07416698248426812, 0.5871969658905604, 0.6456015981458945, + 0.813032797809856, 0.6463180884774861, 0.7425872273440208, 0.8178583750516089, + 0.547200975814759, 0.6297972581993809, 0.6253475045992721, 0.8544965868004621, + 0.040157587010636986, 0.022012759569749574, 0.5641656956003043, 0.8060041308551817, + 0.15623669049953204, 0.7650688260591991, 0.9561747603815457, 0.9936242403254489, + 0.17832816620466918, 0.5425655964854472, 0.006858244863806817, 0.6331805004320914, + 0.4377186829104204, 0.9432754160853529, 0.8283466290492493, 0.1611242045057537, + 0.7127304224124086, 0.2656178802989031, 0.10526616849523729, 0.003916817073540968, + 0.36758966750883704, 0.16178653008055832, 0.205410845759337, 0.34398519008443884, + 0.03173678051946971, 0.9685580908102037, 0.11752290040258584, 0.20097463649952418, + 0.01699043671132916, 0.8259350267817422, 0.4270421810657322, 0.002046184804222584, + 0.2893358209362099, 0.4032794151614779, 0.1551112824129457, 0.8879493926574799, + 0.8828050518790767, 0.3910640017637924, 0.28207238476046215, 0.1453512595691372, + 0.6329104504723897, 0.9356594893434306, 0.8830201378973107, 0.32787900254361746, + 0.7524218897758113, 0.3021357790059974, 0.18880418288444234, 0.8252838539655625, + 0.5677734267472772, 0.29468100398338315, 0.6644797868172837, 0.2345284390740826, + 0.7833541222696632, 0.4622428710965072, 0.8180356085413665, 0.12176541112976969, + 0.584843100502246, 0.2076456133040948, 0.43119125370999967, 0.028992135873369018, + 0.8390508629446168, 0.30494489048984996, 0.6280335683239568, 0.05307911693569578, + 0.8573102056913785, 0.6401741471794711, 0.27379230359133766, 0.645161117163579, + 0.9104097310863962, 0.3315222698376239, 0.5311818367945779, 0.8652131787064083, + 0.05266485162702483, 0.9801456049196392, 0.8704025678050438, 0.4548102891048551, + 0.7721429272543505, 0.3241649064877178, 0.31469187001564736, 0.26334638977412717, + 0.03186660049441992, 0.8468763130656728, 0.19742761055665636, 0.34570779894698056, + 0.6323455327655063, 0.6875167796567199, 0.12259887929079005, 0.0363323122402297, + 0.939543188238221, 0.5489445325725613, 0.3518013616113064, 0.14453563213451215, + 0.5645100414748986, 0.2060199151787191, 0.28922738636034717, 0.5886677026646538, + 0.6672156439868581, 0.8099904763954191, 0.9395220274019341, 0.31919505562810946, + 0.5667026842767869, 0.19415684121677623, 0.4903622602229071, 0.5434191206814979, + 0.39852215448310746, 0.7065311902984609, 0.6081023293754982, 0.3592801531792621, + 0.12116546645226811, 0.5914790317147564, 0.26251860772831725, 0.14464917158931845, + 0.5297961258930557, 0.6478751446227324, 0.3604381026258262, 0.4107232060499867, + 0.3953008268223248, 0.5323642799386871, 0.9563146192752506, 0.7587602314961697, + 0.00027251187538357424, 0.121153316398938, 0.9594212052512119, 0.22759321662521337, + 0.4565702003992079, 0.6702201922004807, 0.36616708105055973, 0.991695561201666, + 0.6951684959011308, 0.6190500620335893, 0.21584536965123813, 0.9312075516725733, + 0.15601909764491273, 0.5179008075031754, 0.8255063490752199, 0.13494994051878462, + 0.9642310090290442, 0.8272333261653693, 0.6366021917962937, 0.6272395345063634, + 0.3602636410876908, 0.940511749873045, 0.0009212294184705039, 0.4377848361798714, + 0.02176286603887012, 0.7769672962061562, 0.782387956693346, 0.46341576437396803, + 0.656171249198167, 0.885971474612103, 0.7242765021053905, 0.08290761159813875, + 0.06457500605283284, 0.15185945222172026, 0.08498716445225962, 0.14438083935346202, + 0.04711043670053583, 0.6778996368590091, 0.12432456118167912, 0.7563736054661448, + 0.873973499785114, 0.1555684810832395, 0.34224996347013026, 0.36172841570411396, + 0.972676126220914, 0.49547019565160233, 0.4529244105540434, 0.9962839176328748, + 0.800125119953785, 0.1738920376037768, 0.18284623538536637, 0.7190213390838295, + 0.06399111697888138, 0.43131888124822604, 0.3345685233817419, 0.6918408961440536, + 0.09191401030276525, 0.6094467690319858, 0.3155152765543553, 0.8511304133976706, + 0.3775476493148492, 0.37718085648958466, 0.5018890912409433, 0.2716755222971863, + 0.9214684748828923, 0.8989643462692504, 0.4649826286370933, 0.11209393856729333, + 0.22266178357086241, 0.7288050076897057, 0.44885240894498246, 0.036118231365422915, + 0.8853099398791242, 0.30371015324266315, 0.6325498537175843, 0.9268946284372944, + 0.574772224567346, 0.9628607169992635, 0.8551770482833655, 0.008154746481042974, + 0.7077596087419412, 0.7718971969298235, 0.17830719702486064, 0.8533683250297552, + 0.30414568345346915, 0.07114972057433411, 0.2633680005183603, 0.23447101218581512, + 0.6559142128080728, 0.3462490453925803, 0.2572834998233635, 0.934642244682551, + 0.19259723052883282, 0.25112809867144714, 0.719306929764853, 0.046652277704817946, + 0.8048231508977957, 0.4035945094394259, 0.754843016228095, 0.44265447223390053, + 0.24163832945686836, 0.3508978652304121, 0.9487337341165799, 0.19954028994243, + 0.7753665058367634, 0.1261038361438086, 0.83914023410398, 0.40628145534635696, + 0.8904822604676276, 0.2052272748293289, 0.46178436950453083, 0.591897140583733, + 0.03771394287310881, 0.34487766607126213, 0.41897458442963253, 0.9070545816314942, + 0.5885735157093085, 0.030665400969038248, 0.5685906497268208, 0.25636706329293135, + 0.16236531729561277, 0.3672592843529242, 0.26167612287900055, 0.9198553184427375, + 0.5501237881758122, 0.9076478851904309, 0.7175677146126508, 0.6348659851182928, + 0.738882198702744, 0.053165947486076504, 0.07685485583976492, 0.4170544035053654, + 0.7530172238532147, 0.3824441052690454, 0.13183659456583174, 0.6946075275422765, + 0.5961802083489254, 0.0692154179454898, 0.28199758977763356, 0.31683855437220854, + 0.538331444827026, 0.8005022089671784, 0.6995404503460174, 0.2876084865438928, + 0.6803895260833673, 0.9614047085146076, 0.4217842196495911, 0.7451619152592427, + 0.022636969868852996, 0.09071007475742177, 0.8257945290556595, 0.8672837704089812, + 0.9135872070709495, 0.4681264936047449, 0.5697319106939637, 0.6892971196577914, + 0.8251242176940218, 0.24268165740736847, 0.08918571417302146, 0.40770710749949846, + 0.00887880468457436, 0.22222523143189055, 0.08802297896712996, 0.04229681129423801, + 0.27951120411954833, 0.3764968275482389, 0.7686805463597162, 0.4401306364653953, + 0.4573791871848498, 0.9550081226207489, 0.7591288480403849, 0.4915536774744538, + 0.205604153132955, 0.134868882715783, 0.48918377446252714, 0.7090594014668168, + 0.6843626683545188, 0.08242553746318204, 0.4642178367053419, 0.23937677032495597, + 0.7309833110928308, 0.6956405719072711, 0.8535413315180679, 0.5446728940069856, + 0.08533491880607769, 0.3944004625683343, 0.7994991032172084, 0.2164477184644662, + 0.7135783586336566, 0.4884831950340768, 0.08373053917017537, 0.5917610478476284, + 0.6674922873440412, 0.35014975370696755, 0.49517124851287797, 0.6613573248279636, + 0.3802171804990466, 0.11391385038906288, 0.8544333398683165, 0.8983031966127826, + 0.6680778169363261, 0.23296680235267564, 0.7980421992414213, 0.513600541684651, + 0.525730009757563, 0.37865528098107526, 0.7443857543743437, 0.32898043690749845, + 0.9267339237232587, 0.3916316623498167, 0.21583244809116986, 0.6108933063764448, + 0.12190382848315895, 0.05795075615419576, 0.18585339493384467, 0.4265454993211716, + 0.7841012401338366, 0.33272396587502584, 0.8534312979592312, 0.6009275803559111, + 0.23578725703009518, 0.8324782123117312, 0.7826363192089726, 0.36174336024410503, + 0.46230743039848843, 0.47376876037061144, 0.3678668010995072, 0.4017431618223575, + 0.07840386621932616, 0.9279892070050552, 0.42793000217781685, 0.9587557377816119, + 0.5263942942488995, 0.22350792178613854, 0.052445741452695316, 0.9908397138466685, + 0.2500980404942075, 0.6829265211588706, 0.11852451787498941, 0.3013589198082067, + 0.8329930182716929, 0.760481752519026, 0.9505592437525293, 0.08324982179541662, + 0.8163606057049256, 0.4311119312995815, 0.0023911672020161534, 0.5746291987093409, + 0.22315561753963054, 0.5084757985483405, 0.9426895664145185, 0.3709217647432501, + 0.5826098985446154, 0.23198610325080737, 0.943977046006374, 0.47921249737199123, + 0.4412200057857809, 0.9476458707861092, 0.045738384280730515, 0.7440122966208216, + 0.5587358512482475, 0.6867640944542505, 0.5441339218722309, 0.22356366387145854, + 0.7911120734356646, 0.3726953927021709, 0.018267589919367833, 0.04969670279331795, + 0.30194420494378316, 0.7617935659252226, 0.03786146595146933, 0.8032907384522256, + 0.5148928437192715, 0.026497693642896003, 0.7562626268164586, 0.24084937566731512, + 0.9900162673871896, 0.5770596352891295, 0.37977057043713136, 0.8054207224094382, + 0.6449681468487811, 0.47661518558967464, 0.6054630496227391, 0.8585937854748453, + 0.010922473849501069, 0.8801276229682918, 0.3842900661568762, 0.302059066968726, + 0.5282769570892245, 0.35696922171147905, 0.45964440944787543, 0.43105541008311343, + 0.266541068882056, 0.1773350480081508, 0.625605513254196, 0.07627923477584031, + 0.35297661206462905, 0.06399992814421374, 0.8012573029771279, 0.5988537160717968, + 0.775179820309573, 0.7868373451818418, 0.5288757403368023, 0.6927594213461519, + 0.9076439953515621, 0.7746089955402077, 0.47985422368325525, 0.5547764500676566, + 0.7754494052083825, 0.7423050553637645, 0.8712909935000541, 0.4295708781702815, + 0.9914003267663137, 0.5892289698919039, 0.07275209414974826, 0.26120918587482855, + 0.1569674079759794, 0.7865373097173526, 0.4572516078945914, 0.6816153429914849, + 0.4239542214959864, 0.891339465192573, 0.9220303240191849, 0.8704793332059096, + 0.8311150077207858, 0.6768975025540995, 0.40411864967994826, 0.9459818123797102, + 0.8453923354042879, 0.05854081503333863, 0.1558983828448729, 0.5223476813940962, + 0.802904691532934, 0.3459328047581237, 0.23543281492526158, 0.17112892107419742, + 0.09439383621100295, 0.015444498324765465, 0.9349297310812379, 0.3205826664760323, + 0.7639851607294579, 0.904151529210235, 0.43898243616850585, 0.8224805696304519, + 0.6096093745024677, 0.07622355975099515, 0.6860819926917638, 0.32974242461211856, + 0.2632632314535682, 0.7772794526938429, 0.20396662442736302, 0.05717543744910236, + 0.9521053962218727, 0.819668916988034, 0.009412243942937826, 0.674833232923555, + 0.5595359880184179, 0.08426763215952948, 0.1845440135798554, 0.17549086921303902, + 0.5671045939097512, 0.6947564656008464, 0.935433974251328, 0.06876677844106338, + 0.716142997612416, 0.8335624602169247, 0.751900542744193, 0.21008292822787822, + 0.5314087389791223, 0.9539426568404354, 0.49432460498496733, 0.7662805473894664, + 0.12067300248409352, 0.9490060225911943, 0.38129757160028743, 0.3304637622494121, + 0.6923925118449249, 0.6763837818953047, 0.31376603445089246, 0.5500136363506121, + 0.9695662573635561, 0.7969621201873063, 0.6622009734643651, 0.9208935437033601, + 0.6345354077323916, 0.5539869805621622, 0.8271605574226225, 0.8381378777363601, + 0.6482903130871641, 0.9121341296999985, 0.01877471354184357, 0.04333833299240708, + 0.29663301105482776, 0.7826167619606251, 0.6260000064979128, 0.022931176394607178, + 0.4702176111712024, 0.34464613773622854, 0.5622233704239853, 0.9308045897435863, + 0.17052869748875965, 0.5380218062477551, 0.8575389690788897, 0.16184626778171896, + 0.19261265882286527, 0.415940048096498, 0.10152831619720848, 0.2557631049165596, + 0.5248283755391164, 0.5727453970128159, 0.8788143445646042, 0.2959781503580521, + 0.43549633487840866, 0.3609061940005751, 0.1909449912841873, 0.5392619410749471, + 0.9567071103280261, 0.30935584991961085, 0.6105278507130479, 0.11064377140205917, + 0.31109553996540673, 0.04125403009092443, 0.34873047660954803, 0.16387727336452274, + 0.4483096419197691, 0.35998169886965325, 0.8856564503011035, 0.6129385575064529, + }; + float ret[8 * 24 * 24 * 1] = { + 0.5062321930371455, 0.32798915578871457, 0.5961646765278394, 0.8617070783970479, + 0.5119373011648897, 0.048024524076240915, 0.23214736441672879, 0.022538554986683446, + 0.6778444052900103, 0.15032959905073096, 0.5500250141965424, 0.17113763529559678, + 0.656904861049231, 0.19596667507728938, 0.368244176122847, 0.8691059710553981, + 0.8464938643825788, 0.4477211400459462, 0.5681620999676735, 0.5442100896841062, + 0.45368229567875673, 0.32546382920612726, 0.18083321574168632, 0.1750322980444311, + 0.6315210221145376, 0.291527227581442, 0.05240995759120004, 0.24235809909562644, + 0.5248510042885183, 0.8444330870217652, 0.06953102068444816, 0.5862038697632712, + 0.8055402660428673, 0.32368384028646024, 0.8548542644341387, 0.21770747056725892, + 0.3311304238509687, 0.2913804661242798, 0.5967050302041854, 0.6869036787651093, + 0.6298993086675535, 0.5342681029051217, 0.3675795216292139, 0.1750638889243652, + 0.1964814553063049, 0.6605207614470802, 0.7789210272606384, 0.7780470338595302, + 0.7160454591867403, 0.6301114551776474, 0.10752566733413593, 0.673119869792655, + 0.9373805013424287, 0.9178365192390866, 0.6325325250871165, 0.4247316161331426, + 0.3801992968018262, 0.8945772418220316, 0.7875543669721298, 0.8539699115784273, + 0.2777908194509292, 0.05650104741824036, 0.831507030885315, 0.585983808212166, + 0.9408480852535616, 0.949221458223217, 0.6494317804282785, 0.22667549478928184, + 0.9867749341169173, 0.3449374672076423, 0.009603964019212485, 0.2966442120522559, + 0.9140488516957659, 0.15654966719968966, 0.5479895146896403, 0.27964020188818406, + 0.5875175857991517, 0.7674359344871401, 0.2662078550688938, 0.601592864568559, + 0.6298513101287228, 0.0007956317020597625, 0.447670613189198, 0.30214703733479853, + 0.7095682335989801, 0.19776718502242907, 0.42373907375609765, 0.9964465820586225, + 0.5657844698280747, 0.9185125060473368, 0.48818414990794645, 0.697598606946429, + 0.23424318111262687, 0.011707822252632338, 0.7337824717352953, 0.5484898408514123, + 0.9929544517415723, 0.6792158760625665, 0.7816028696554761, 0.2405083444025402, + 0.9948803917035984, 0.49246398759410004, 0.5942569141797983, 0.7385481148993642, + 0.4338463026994005, 0.5692156943634276, 0.5157300177718895, 0.9788902748230957, + 0.06944950117329118, 0.7440570791479629, 0.20611744118485498, 0.829837881152689, + 0.8641965625879906, 0.3754093562128916, 0.5140923803643517, 0.01318160757340514, + 0.38912810288926725, 0.7269493917324703, 0.9667767963135668, 0.5146438339988334, + 0.1746242957861096, 0.22260117687528302, 0.5147147658513315, 0.5133063050966878, + 0.6037766881886195, 0.3032343397374424, 0.1558403376681612, 0.19823611829999466, + 0.24791261176807733, 0.06511786233551131, 0.3331160374011095, 0.8764778347789234, + 0.8987904993981825, 0.8850320999146167, 0.0028995817761239273, 0.6114011938310822, + 0.12891905917964652, 0.08841529382295288, 0.8913889051181442, 0.4231141187500551, + 0.5682157758555647, 0.407222042568696, 0.3964456887714989, 0.9083944497303571, + 0.06771895423049012, 0.4659372804769496, 0.12192491449923182, 0.5590082975575247, + 0.7224902246551163, 0.39659001433585184, 0.9959804799025049, 0.945510847494734, + 0.5069062072334803, 0.18366862681431562, 0.6477785312377683, 0.5047962857729253, + 0.12702932873653094, 0.5937694806711893, 0.41217223605167486, 0.586260286286227, + 0.3920397226267349, 0.3009089562278783, 0.11545124344136348, 0.5638317723887502, + 0.03331899478754563, 0.9197389199405746, 0.025063552422119995, 0.08206586723651477, + 0.561096691084764, 0.32084876537534424, 0.9737123506773547, 0.5557153417628414, + 0.33548532198689474, 0.32221438944414504, 0.7714228952988429, 0.227449953513027, + 0.505078000677675, 0.8363531865621979, 0.9104996077916944, 0.7467391082732583, + 0.6486863954134673, 0.7840496913389229, 0.8405166909749249, 0.8967567419124038, + 0.013132746284066021, 0.3492828320186896, 0.2785912741702441, 0.505561232182855, + 0.4916777884425205, 0.6784144755559384, 0.5602904610155944, 0.2556291571530711, + 0.5058833835796643, 0.8065225079160115, 0.4146453573569093, 0.6165567410355937, + 0.3812072863001894, 0.07838633522280025, 0.24554517496324435, 0.06732991743024797, + 0.6172542861800441, 0.012086029165487733, 0.06664288136003349, 0.7043239742078187, + 0.9123664013856383, 0.5335504894308023, 0.08698653277464463, 0.8290196080564468, + 0.8834656717225267, 0.7939247877872073, 0.8884737365869878, 0.4219642380552622, + 0.3534603068640917, 0.8494470631397504, 0.867447552664446, 0.7042030090952853, + 0.7030967670548717, 0.3633266332102917, 0.7912923465426464, 0.2846016100557003, + 0.7669181884111332, 0.1336003148255347, 0.8551305090127473, 0.6448324422438613, + 0.5692776375393788, 0.6125267654644435, 0.2860188283121068, 0.49101502666954544, + 0.7802334407422508, 0.621595999177262, 0.5264555414863354, 0.06256813978886833, + 0.5777567913293464, 0.2185899920497919, 0.5499062346038069, 0.09727935441077395, + 0.9154994096446494, 0.5410406629503918, 0.6478454956274339, 0.41913577704769356, + 0.892374352838081, 0.5878433926092157, 0.9247681810220608, 0.7574161009131323, + 0.40387819954296333, 0.9955865183599879, 0.6498057102561908, 0.23720161272280726, + 0.864962875216823, 0.9744249317817247, 0.1185707073275365, 0.05405958997809279, + 0.21350815438098536, 0.3987264207477995, 0.1901865924242161, 0.8986424402957333, + 0.42756234012515826, 0.5624195702180508, 0.9532024381859805, 0.9331967234220007, + 0.8326262505350779, 0.22204330543949213, 0.5683570413885555, 0.08080650468291284, + 0.7404946579897478, 0.31863905911250545, 0.6545183726954837, 0.9473683813047713, + 0.8142567915681516, 0.14552167710658281, 0.41939814504849826, 0.7794508592612299, + 0.16005597528354443, 0.06867044382609655, 0.4633637274837288, 0.9395113600474436, + 0.002398516729492317, 0.9331948896762047, 0.333821020943754, 0.0870242810975258, + 0.04707664181499216, 0.890444754645273, 0.990579393015404, 0.38653468246552036, + 0.8350018469307008, 0.14186236169840283, 0.5069963680419538, 0.02949034420068819, + 0.9045393529914117, 0.6776465708902875, 0.5951058678362742, 0.16017357369287422, + 0.06829755903301449, 0.22104222238956384, 0.5781531354968936, 0.07078049523276742, + 0.7611775873389636, 0.2506638620506646, 0.23424450010646203, 0.28042226030744977, + 0.4967031301230611, 0.030155802498213036, 0.8417135739779523, 0.4403787816993532, + 0.3194871756596648, 0.024408552447829357, 0.2059121031814014, 0.3304385738990716, + 0.5797427697765347, 0.1488101415168943, 0.07609937614156859, 0.5251133458576794, + 0.38618038104356356, 0.19176610098489089, 0.9987200933842041, 0.3747523857547992, + 0.7577922282660177, 0.13581872016346808, 0.41112542292513443, 0.3692939492355587, + 0.9088817938289541, 0.7068660497692852, 0.06394517786397358, 0.1083252676531502, + 0.1808100870922431, 0.5183031556872817, 0.35323756937224593, 0.5759670034306098, + 0.12944299733834008, 0.9448739674356532, 0.8143588597980085, 0.754802608217618, + 0.9916004739452933, 0.5266178440345204, 0.3778661306592547, 0.6762089130655262, + 0.6609729192353024, 0.657458949700941, 0.6666661408928232, 0.5718295386522686, + 0.815902395364949, 0.27572360372175064, 0.8380770893404555, 0.4430394275199342, + 0.9035539285337746, 0.04305770944290821, 0.0951941872821771, 0.9609676535237013, + 0.7882514248825381, 0.19512411644583338, 0.4947849919452858, 0.6098697295924015, + 0.8027845561907938, 0.3133522370325581, 0.4510194566927449, 0.8428718966430689, + 0.011938006691672332, 0.588867003240751, 0.6690157767048753, 0.4177212705525565, + 0.24674398716514967, 0.7417180449599303, 0.1631995066554942, 0.20262693904300233, + 0.2889357295845064, 0.25740925871269715, 0.6363896486385333, 0.6631190151834848, + 0.20898391694798513, 0.4655689891239263, 0.223154251464165, 0.6477665820136516, + 0.4813056620640841, 0.8843717058980306, 0.6943907285971115, 0.7994117260598272, + 0.1978499897603565, 0.17401412776352376, 0.47091902939013897, 0.3008107310922036, + 0.125356561257992, 0.3362214717360684, 0.008551259225046404, 0.39550964281789236, + 0.8838122008258186, 0.33856044053751155, 0.45407795863420086, 0.4242202694122168, + 0.5423685678418622, 0.558757767842896, 0.09995937937290533, 0.3246315609823286, + 0.6142558040977343, 0.9805749745475485, 0.4657660405624161, 0.9279995363145068, + 0.6559071400579842, 0.2666869937974796, 0.9408718356383589, 0.3545865805029773, + 0.45232948292420505, 0.038505613707505826, 0.00019750497073367512, 0.8737664246958026, + 0.43338860317926897, 0.9723377963347053, 0.6733478271906433, 0.8521676579248147, + 0.7992002524821615, 0.7134241267734911, 0.998215305453406, 0.13173821749133452, + 0.6230927805896207, 0.16597961720329535, 0.5667854438476946, 0.5600991731548618, + 0.7391743889484617, 0.8706462935030473, 0.6023748393452749, 0.3939117403888631, + 0.0179925828091827, 0.4629518715296074, 0.19687388319715216, 0.9137799213875131, + 0.1811662494483287, 0.7822154412029618, 0.23591318340225398, 0.0073897600933453544, + 0.7601835361487541, 0.04661639551634078, 0.47412956237438675, 0.8348234008595401, + 0.9694614871742564, 0.0021255061100916572, 0.3294672766798996, 0.35964304759653043, + 0.6252163536877482, 0.7291002031297731, 0.23755815280234072, 0.9573553281440201, + 0.35208871653768337, 0.12429876607920087, 0.4238129988973137, 0.42030861325609814, + 0.7668992868698534, 0.38672467591564863, 0.6167188625787685, 0.671182673030282, + 0.665041879014701, 0.5979076099092636, 0.08434169920676948, 0.05820304772467022, + 0.6223318283529414, 0.9319341880648322, 0.1563432403380529, 0.9138723236340262, + 0.7996797512923042, 0.4900864446713954, 0.017416089288305514, 0.49040996322083463, + 0.721019678050312, 0.5566481270821583, 0.9257258344448913, 0.030456107481957595, + 0.9633633518879525, 0.025420652239322328, 0.7106922196820691, 0.02782056144663425, + 0.5784283306561472, 0.7642952738653159, 0.49634054661506544, 0.3124518393246003, + 0.2427587464131412, 0.6298184514564478, 0.7423785073124218, 0.37271871104187015, + 0.2722751335358491, 0.2666776013490191, 0.5891324834913104, 0.6876742719251587, + 0.7955883463128232, 0.6707296977799054, 0.9496618100588802, 0.7868528046712148, + 0.7701224409391159, 0.7508487281409736, 0.013211049081081838, 0.10798949667513813, + 0.8090270633430225, 0.048012855163304335, 0.09028922573374432, 0.035025503671681, + 0.1919225111223416, 0.5650389680077842, 0.38354264470450605, 0.046771281245718055, + 0.862924116881804, 0.9649825973770343, 0.5043943142222184, 0.8715482370863882, + 0.26597899853303353, 0.8270579811608609, 0.1142848962533316, 0.4224047306507669, + 0.1012610270560822, 0.8223568939055197, 0.7167013789334914, 0.9315114766412789, + 0.13538563056707842, 0.1971618987154049, 0.1346229767296998, 0.7413487911805234, + 0.8335779236442179, 0.5523076600264788, 0.17805699078916626, 0.9749964559656057, + 0.6548616850045488, 0.6408638652632012, 0.05243142084756114, 0.41338426132755013, + 0.6577911569645122, 0.5218993909847612, 0.8215042477853458, 0.26000525222244186, + 0.97323011169599, 0.17753901458016086, 0.5995954962734206, 0.7072822896238021, + 0.26560859373579837, 0.25711211095662156, 0.07508148094639788, 0.4665311792834703, + 0.8382346611355168, 0.912159129225236, 0.18294092199868883, 0.5446560489946072, + 0.7656095966972155, 0.7678502522889049, 0.015170391334308642, 0.29784029213417207, + 0.8387694383482851, 0.46689527632178685, 0.36712996103456175, 0.019326900402604652, + 0.3426577641914963, 0.4488032775872126, 0.12123730305165703, 0.16833207718529186, + 0.0009628642460063341, 0.961297092575098, 0.6197070762250381, 0.9422212994559002, + 0.46566024250211124, 0.765393556661448, 0.6389778787947291, 0.4565768368332409, + 0.9670253688068792, 0.21973766319640908, 0.1670312116243018, 0.8981162893436722, + 0.839205738663824, 0.7745179886765066, 0.30909292667742505, 0.04511559705068158, + 0.9825690351992592, 0.004746150572906838, 0.5123475897899985, 0.80432432962385, + 0.26105396874260267, 0.16816967722507736, 0.35652723083069127, 0.7015377869256973, + 0.07821081337514191, 0.18523471563017568, 0.8343612489445518, 0.7959091927132941, + 0.7188927625558327, 0.9287626078448632, 0.09973920754860399, 0.3463778089006021, + 0.33659796153984745, 0.921845854796007, 0.00950593702181235, 0.06469285813398706, + 0.25338043680754496, 0.5238467611159003, 0.3167593672722743, 0.09329128287229482, + 0.6484704059568994, 0.9061324833635789, 0.48944359249477554, 0.8886033139636679, + 0.5775004603975735, 0.7994597749363578, 0.2685524226371466, 0.47423484982081954, + 0.9478611351812041, 0.24263144009029924, 0.453565431301858, 0.42525493803037606, + 0.11490633583693022, 0.20757224400351415, 0.5868995409837178, 0.5205919982656838, + 0.34503754603792125, 0.5987197103905514, 0.6868505593330014, 0.7638415147641093, + 0.8267185942940226, 0.6625091189429029, 0.6693741441502425, 0.38345424336010436, + 0.7081765066435062, 0.42905141236087785, 0.2328912829363069, 0.46170680700262057, + 0.439085657841746, 0.48283110128008133, 0.6895936395080633, 0.915300650877659, + 0.5065137439640514, 0.009253996333368542, 0.17912608340116165, 0.9877014563961273, + 0.2077031505895378, 0.8852031617482035, 0.39883058097563073, 0.9280883776056293, + 0.5468439956477785, 0.8348508115176584, 0.9623588674800001, 0.10797271631363137, + 0.11652847794724375, 0.4006687049764782, 0.855832471124214, 0.8786977408679026, + 0.7141267893561709, 0.9917530089388696, 0.5096096656188075, 0.16953365739403836, + 0.7251961536843469, 0.8943385078565467, 0.8112754308812744, 0.9761274106045587, + 0.9326604989238073, 0.9903928487769879, 0.269577210979857, 0.8189637058521028, + 0.7381833350806415, 0.28154334423945915, 0.1250465350737835, 0.3968102520265465, + 0.9604970122135507, 0.9722187833692912, 0.4243054458397719, 0.9403312613720402, + 0.8703241311060701, 0.6089211633348707, 0.5345658877440552, 0.5130240560137153, + 0.5368930457625992, 0.06336696382328744, 0.045978147806837844, 0.21270102297516136, + 0.5183775131533372, 0.4575562226466785, 0.6969774129134045, 0.49689162515596763, + 0.3039279916586811, 0.9104938758536323, 0.842826717308411, 0.9030531490528709, + 0.5622318797083762, 0.8880003119744561, 0.22917815472979386, 0.8454761943337168, + 0.5490186483045347, 0.8903499500434628, 0.19690296474232793, 0.4942586480451958, + 0.6208256248541093, 0.793673761384259, 0.15704525492115684, 0.8064819707072164, + 0.3612275689614989, 0.6409665812254784, 0.718535811027167, 0.011627527251532266, + 0.18251899551078254, 0.8816150389346538, 0.5141424262186775, 0.5585514761068199, + 0.08751818562303404, 0.2032257845367399, 0.30428349184187875, 0.3562439026805052, + 0.9902869789914716, 0.617485694423711, 0.23085218840071986, 0.9200353993391799, + 0.1438119306579585, 0.9637545013683527, 0.06926888721997271, 0.45809005417603854, + 0.7797894826426551, 0.033071563699438644, 0.5346070887767089, 0.3785868582170747, + 0.7792362852031324, 0.44049360572247687, 0.5655966414697406, 0.49836455834115634, + 0.41982706944388515, 0.9142509416402275, 0.8055104920505853, 0.7882368863968197, + 0.7861541561206552, 0.8750002361453983, 0.3141872249221326, 0.6297863342928883, + 0.13752402310502843, 0.8408271137857017, 0.20203166178667864, 0.6132166149173786, + 0.00565266221263494, 0.4346366940226415, 0.5750892505021086, 0.7484967824901719, + 0.554368174682661, 0.6462518960809419, 0.7575408699684905, 0.8304301645939067, + 0.14686776200689766, 0.01775895453516141, 0.20381742085491494, 0.5374100389215499, + 0.3543295568618047, 0.881281278910346, 0.7387949907094862, 0.6532668366398822, + 0.35871434741569874, 0.34577990378538315, 0.7511426552642608, 0.1749342021093535, + 0.44179651111839446, 0.3183243633827524, 0.8952139313460276, 0.550987565451777, + 0.11245280703451577, 0.29603527735935475, 0.0024376097979692535, 0.5580957824575782, + 0.6785730314640288, 0.6989604199143635, 0.9280856338597502, 0.8528138190991756, + 0.8155971681047525, 0.19627454847903825, 0.23483652834817348, 0.9749127497086097, + 0.9952437850116509, 0.39823054819356907, 0.3902382135861282, 0.9436877000327134, + 0.7855667825786122, 0.8211945319467171, 0.9350703325513966, 0.022807010966559904, + 0.5701088951610942, 0.21262277387562822, 0.182285558875648, 0.5457926090191436, + 0.0022151330954638393, 0.9380139047727055, 0.8430496283743121, 0.1022224243661769, + 0.29636066130596317, 0.9479549160834284, 0.8250427156664355, 0.25458526111733715, + 0.6582997945231689, 0.24489199861326638, 0.7480060827586592, 0.16758586360820604, + 0.675393772428663, 0.08917020068830872, 0.49411868825207117, 0.9293064155184508, + 0.04710616624830932, 0.9032842176572544, 0.7887575986187616, 0.45685702914654747, + 0.4652494433853783, 0.08590753836423537, 0.8677926147593354, 0.9757594778331132, + 0.03326488446691034, 0.09080413307553958, 0.2677322617144291, 0.003004139162062991, + 0.37075805819564844, 0.5619563194408237, 0.5326269575702928, 0.34340862047732745, + 0.9016417148983238, 0.1816640209011503, 0.3079844132700572, 0.697831111602062, + 0.15550348544202108, 0.08709128824924162, 0.7318882367213184, 0.5226189451379162, + 0.4714561263097441, 0.6536425776379847, 0.8780547121445959, 0.4605063736250262, + 0.1108839197488849, 0.49048534621825135, 0.348779380113877, 0.3573496061128525, + 0.6477277145926619, 0.3267103713187982, 0.154258739206506, 0.09146321915634115, + 0.15653234956704554, 0.2659173882939163, 0.9017235470997731, 0.39463656456034923, + 0.156590494640086, 0.8173011759177684, 0.4863177935656502, 0.889074583881877, + 0.1961541858280207, 0.4073608554088233, 0.17383269122504952, 0.49264426743742795, + 0.28034389601998155, 0.5593685058504049, 0.8717081492192968, 0.8138787395058248, + 0.6849150775885576, 0.7294561911157406, 0.6007567942294963, 0.4566856125549694, + 0.7762810018455528, 0.037485912765460316, 0.317156754519343, 0.8194347358709463, + 0.8742357018784637, 0.45420248053764034, 0.545021334035525, 0.40484355148999873, + 0.9642956470123379, 0.9521463750801464, 0.009809236244757491, 0.5632225263832438, + 0.827726924457394, 0.19127589974101955, 0.8270672169675024, 0.9389636338572646, + 0.03389523795263183, 0.8566288277688866, 0.41601508004675314, 0.4533324145773725, + 0.3986860505212597, 0.7125644912081883, 0.4991806916020406, 0.6155778733311165, + 0.9130384484529627, 0.5487870717147846, 0.7780523937434449, 0.28193646085785884, + 0.23331327786702805, 0.9816900774238352, 0.1350024308188581, 0.780189727376689, + 0.9156908233955465, 0.009622250928327358, 0.14554767074354535, 0.6099444642853934, + 0.49974097393273886, 0.7069659077805105, 0.7816183976265796, 0.6893721747092452, + 0.39131354032672916, 0.37859141906212357, 0.5906888260321975, 0.6601801424705058, + 0.17536941466724143, 0.2319276355700719, 0.7489062937368479, 0.25317403533372795, + 0.13515277633234812, 0.9347494213774822, 0.9266229609375172, 0.9943887686013935, + 0.05448279130963951, 0.2941520004646705, 0.8162845180903855, 0.9728186588423753, + 0.04784001124036252, 0.22656891211505314, 0.8623002348807659, 0.5872338171414982, + 0.8172407294972533, 0.8935080553239964, 0.9015605149533703, 0.41310471749037525, + 0.9472794835647472, 0.6520347680487271, 0.31311178341617885, 0.3881266733594382, + 0.5940963916604006, 0.34213553626717264, 0.7731917933130059, 0.5966087262785921, + 0.41236805548735, 0.38464411318442915, 0.948890845138434, 0.6579713101413165, + 0.0864903742655957, 0.9024542523251091, 0.5321617601250037, 0.4679140323179162, + 0.545330586994838, 0.45181337751924944, 0.46408729390763515, 0.6739482447724412, + 0.0051732115035305615, 0.19981216404826385, 0.5290130517806954, 0.7192318331618469, + 0.7212983477553033, 0.3213459838833309, 0.8810398893805294, 0.24407230150381054, + 0.7705033913302648, 0.8348143443275565, 0.9798327350813848, 0.7153785787888585, + 0.17227162583571132, 0.013486902192362704, 0.05279184836340933, 0.4048402752271233, + 0.6541987863733343, 0.9572730371926592, 0.25874032756911736, 0.1708286437732166, + 0.7458853835638297, 0.8957748918471427, 0.8504052252091574, 0.8440260407324176, + 0.09998791094771531, 0.5981663701129188, 0.9909795971630094, 0.7599927671359444, + 0.5930460739990847, 0.778268386360668, 0.6314661520179152, 0.5758158555084719, + 0.1933560284115361, 0.04267202229386946, 0.5808063777329563, 0.2846012429745495, + 0.8127455357361613, 0.8547361041799995, 0.4236263477471254, 0.5126017051751544, + 0.486864207649195, 0.675148578661926, 0.521602594310036, 0.1714438367929747, + 0.987577581508166, 0.6240301905818986, 0.3972660860873791, 0.21466797526556947, + 0.163736157424353, 0.16097272448599187, 0.9867228726368442, 0.44686891216524294, + 0.8529040770303731, 0.8427084849973342, 0.5248669060092062, 0.6332209039546157, + 0.3381060884944972, 0.6410528682127311, 0.5184896117965878, 0.0775072340265841, + 0.20847724173835658, 0.8421532756187703, 0.5690884663921209, 0.16660278165823805, + 0.7114283732757188, 0.12485689980485426, 0.4778653717597874, 0.8936810245378618, + 0.25572041201895246, 0.6479725646536676, 0.0521651501340884, 0.2899282678297366, + 0.1567842561578523, 0.2618292683133343, 0.48167163493515797, 0.8678615958116621, + 0.2825109203364219, 0.029167488033803868, 0.30085749125988204, 0.1430984046038365, + 0.8105162116377423, 0.3240150277783528, 0.4227679344777656, 0.6462688810750794, + 0.7669111613819273, 0.3591939105703398, 0.5406014889841781, 0.9133014898668583, + 0.23078416856951423, 0.7282831287496329, 0.7696332086423125, 0.036116417171488946, + 0.7188034785773543, 0.46641137151322454, 0.13517594491126694, 0.023029089551247717, + 0.3240281523339036, 0.28219840623687475, 0.6176277759655265, 0.822929704075167, + 0.9746339984503541, 0.09356935168701108, 0.9358670947921622, 0.5150705189935418, + 0.08698190944361806, 0.5067019194005934, 0.4607138934794833, 0.5903911985128997, + 0.04743558869608677, 0.0816011762024833, 0.18434623268203787, 0.7063485147604806, + 0.16133266570652083, 0.7545927345078352, 0.9883244202548223, 0.1932863168822514, + 0.18928374882276033, 0.8001091255310002, 0.7155860340551159, 0.3789846917866153, + 0.9611113142900227, 0.879396081488193, 0.5388680749811205, 0.8361159884411873, + 0.866329387811841, 0.2730682799221704, 0.3194169335393262, 0.9423646280256972, + 0.24435425023073687, 0.7470170905071631, 0.7738507071603729, 0.3986280845457605, + 0.030480186468068826, 0.610744914811935, 0.2678410545126796, 0.11563606352242406, + 0.35435733790292934, 7.629086834592957e-05, 0.31506645823114665, 0.52596582150875, + 0.4947711935372616, 0.2900674918365881, 0.9559480836459326, 0.8641971546827826, + 0.7759990209000613, 0.25189193242507424, 0.7764929030552792, 0.6740451053768206, + 0.9442503568797899, 0.27015556612912295, 0.4722170294917363, 0.6923440714494277, + 0.4625235039406205, 0.40653633557229074, 0.8614408947586382, 0.8005107123593145, + 0.38952152310149324, 0.5138242206735628, 0.3034512156454581, 0.5075533946811591, + 0.6409178909749544, 0.7867224176037622, 0.5704724276154304, 0.558636617357991, + 0.2794783538323291, 0.29715752029602904, 0.5187519349640142, 0.9873759371136224, + 0.9258075437182876, 0.21802053184517123, 0.43578239292876575, 0.5802450337786992, + 0.23230142264547604, 0.9663956282775855, 0.3046095724793134, 0.7813239552985765, + 0.7139029823908456, 0.11346499503069729, 0.17481934399946641, 0.030608752823210073, + 0.7198536329641824, 0.5851914340312228, 0.10405825121810075, 0.4328732174062543, + 0.05984690834909834, 0.059751406592598366, 0.7099125485628539, 0.2327422314064267, + 0.22293859772007485, 0.366334373824144, 0.6749561293186479, 0.8275468647809106, + 0.4836623121233168, 0.5182234230734467, 0.17814665168896882, 0.767156836968558, + 0.9957233089372526, 0.020767935241929103, 0.9075908646866189, 0.910011621335872, + 0.7026196573836658, 0.935762210311436, 0.7711845413595912, 0.46739814450201234, + 0.6994581401527824, 0.5616043805626976, 0.11408951973501213, 0.6998783598361171, + 0.929936385814316, 0.665750680755247, 0.6799953175502683, 0.020929310077360097, + 0.9533380343595762, 0.19059543824646374, 0.09924735025128095, 0.4388686123460397, + 0.7029603377770199, 0.0283956241097576, 0.08107860028919389, 0.5042102876279084, + 0.2165582424681859, 0.061309458649308146, 0.5425731942471365, 0.6896535206703505, + 0.7865332719261746, 0.21144979197123848, 0.044240593795785155, 0.7172785881835658, + 0.8087701489136178, 0.12632062877512984, 0.3135882124553674, 0.7030096426830668, + 0.32427739389406196, 0.26177958169560045, 0.8364626116621018, 0.045119232141371146, + 0.15965420000508967, 0.9353298867548688, 0.20383187633308142, 0.9738936519827217, + 0.40298551153980444, 0.7975560547282584, 0.18171915196160293, 0.7518553129466249, + 0.3343056185111044, 0.4899186327978281, 0.9156154657107153, 0.9113609778955688, + 0.9693327204195537, 0.2857523592884468, 0.05130307975288906, 0.71266558707065, + 0.562649641637568, 0.8915442240259507, 0.164649064386867, 0.027705380608350705, + 0.5407021702897924, 0.7889782067611623, 0.5259864378330708, 0.6845669681814901, + 0.6376220227714763, 0.8514856160558509, 0.6510253682186862, 0.5170756234714353, + 0.7067784474209631, 0.9994574991301162, 0.6757298661011472, 0.7466542251346405, + 0.31532433459065423, 0.2932461574194528, 0.6129823292461669, 0.24096303449059953, + 0.7484722467815119, 0.2738466277749775, 0.6041763997009431, 0.9490113319971468, + 0.16474717260481941, 0.6344892971888295, 0.1605409102690507, 0.36949116587874564, + 0.9062309293016031, 0.5676917708189916, 0.6579501227105841, 0.1556984298808446, + 0.10526105347022718, 0.0432597501143015, 0.12340518927265864, 0.24480904517286162, + 0.9559497778713313, 0.4650345002210139, 0.5570696004138982, 0.8604647108283786, + 0.08281791268299465, 0.6909086243849096, 0.6361793771611001, 0.18333854334943112, + 0.09914338253655386, 0.42672616390085993, 0.029455181622296656, 0.8806910005582591, + 0.501593639037123, 0.5384016604659735, 0.048500484858614024, 0.33157374470623313, + 0.7819900133063813, 0.8036696202277274, 0.8870946895919508, 0.6443897992391144, + 0.487593702664578, 0.47427283897570727, 0.45319916886863576, 0.34084491767048486, + 0.4097600967808508, 0.6369527481950622, 0.988582322369492, 0.7429270774935287, + 0.2755575157901178, 0.5337860064252411, 0.08527367015946075, 0.2868764543425195, + 0.2818610831832793, 0.09656564597512085, 0.16326420842851086, 0.078524756766712, + 0.692034898414409, 0.02929249805951828, 0.04078870767209608, 0.68618278656732, + 0.4164950388301115, 0.15439502423081763, 0.2608044219197406, 0.9485716022280892, + 0.1489511084132873, 0.5371867964769852, 0.2679471165751426, 0.26879005137532985, + 0.6385648413295864, 0.6934480563650385, 0.5889259374697515, 0.9444658629484035, + 0.5739860439847068, 0.4642829135480352, 0.5485487373822201, 0.055600233329058835, + 0.9503995048048212, 0.03693809906500234, 0.29259340143434953, 0.3763819233620549, + 0.054765734537462474, 0.6690946345171489, 0.22901017308395843, 0.3851663832175244, + 0.9320166382726104, 0.40543494470602504, 0.7132939144212584, 0.438468625983355, + 0.40289388689969197, 0.1080205205554402, 0.36245057716210094, 0.36015390409792614, + 0.4083542707925487, 0.6457341101335805, 0.5280984956406288, 0.27661051569717643, + 0.15463113535167072, 0.6339351618412589, 0.25386082756030703, 0.3773882514499456, + 0.31448240237275205, 0.5379416898801859, 0.5758417787990292, 0.4931403175245719, + 0.19895720697450392, 0.9106510540309283, 0.7050910060490698, 0.45995862112822117, + 0.4978773762163059, 0.4305593369082561, 0.5163888955360598, 0.5881241308899362, + 0.7443503376805837, 0.043441356160601075, 0.7620942885064099, 0.25391141995016, + 0.10404423400504292, 0.32906430947818444, 0.7134706951754703, 0.6142847460386224, + 0.8025796936730528, 0.732389054290974, 0.41473925626260966, 0.6923681125801747, + 0.7145973260035485, 0.6951361442463575, 0.3418245407724094, 0.011957990757860193, + 0.10193439601418242, 0.7414001168798778, 0.4136476884443896, 0.690481795572699, + 0.9133795729869558, 0.8514948184579377, 0.6159790140475269, 0.35329256022365085, + 0.885435642977142, 0.9162085873379183, 0.8567070326140995, 0.25296933567683344, + 0.35424346143915453, 0.5002598794309785, 0.8659747694089831, 0.061456731329130654, + 0.18850808844299283, 0.041641953786621944, 0.4423078043675779, 0.19516736871253626, + 0.7748134390819658, 0.15669760064831195, 0.9117988105447139, 0.17529069687430643, + 0.21239361243420785, 0.5598792774908463, 0.019901661165872664, 0.7807569139436707, + 0.3951855412813505, 0.04765166480638605, 0.1252275191352593, 0.7764889850899379, + 0.9903254692712204, 0.4160674609967715, 0.6575123873469116, 0.38692550571427453, + 0.9421299229690594, 0.13425578776714753, 0.6592775206040412, 0.6054569516862356, + 0.04995466341550436, 0.31838895529498445, 0.5594098049245086, 0.5431828600421421, + 0.9655536479587977, 0.9443783667221035, 0.52733461507154, 0.23509187824030708, + 0.8359559719413645, 0.40800777231855934, 0.8192441772939613, 0.4585598560257491, + 0.5789392096126688, 0.73483080985693, 0.3857779429185293, 0.32684564099220315, + 0.4342809981627095, 0.14049178871154044, 0.3249377070613626, 0.9649364229542966, + 0.546159980425236, 0.7495624435760676, 0.46499080808841164, 0.9595431299879549, + 0.5511422996681221, 0.006284943257787723, 0.9579373386678119, 0.8627631305545066, + 0.42931315127083947, 0.7117063444496837, 0.3529688849071867, 0.6581981238160464, + 0.8607842753730445, 0.1035946540281355, 0.33884413032963134, 0.6507898433398873, + 0.5959572838859108, 0.8452792802539334, 0.3526427347097323, 0.2554030188129234, + 0.3190780690938113, 0.5359166867354336, 0.899592971396041, 0.9121275113754743, + 0.18881523918281573, 0.16581486849400462, 0.1864109651954301, 0.028607524048106736, + 0.13031259865406175, 0.6048366380665815, 0.5957170162769209, 0.7081464546990173, + 0.8903217482370849, 0.09598610365359428, 0.519585064267526, 0.09657738036679875, + 0.3475479951857743, 0.4963494792070573, 0.5292251865940365, 0.029110455518635447, + 0.13835821078098043, 0.658289086360752, 0.20830808279559043, 0.0736493897230931, + 0.8598564030205227, 0.8590472853784719, 0.9610234425963425, 0.48137361683309265, + 0.17898859647327336, 0.5440140974851478, 0.22730089140323917, 0.018558462503961426, + 0.4863017370860887, 0.9382132934461014, 0.19297219357216033, 0.9940483705415563, + 0.7732193700811595, 0.4881593204224687, 0.21141437672188212, 0.4440192193919973, + 0.14557017262370142, 0.7702782436739344, 0.6601890426095425, 0.23468998861913448, + 0.7782546277335423, 0.4448974128041292, 0.10784083571982617, 0.14854640192380364, + 0.41589035421442533, 0.3197984326244462, 0.7245022420692844, 0.21576829484306959, + 0.10265703279256322, 0.45756321284804535, 0.9403807825834055, 0.5526588429924927, + 0.6064363553897896, 0.44265137283869593, 0.569212185802443, 0.7912368277559028, + 0.8887896757133944, 0.9701565615505346, 0.6977265802358966, 0.536834680339334, + 0.053981826061055393, 0.8756559562878227, 0.19057490879409256, 0.3320634721865183, + 0.4459034795768957, 0.19803414491520221, 0.41464347378057187, 0.4296586033181208, + 0.3521609076476333, 0.7204422760571739, 0.538750650251617, 0.9405722533748734, + 0.6186423782405404, 0.6233966614987376, 0.318102330519885, 0.8543971214766016, + 0.6515592077046153, 0.65087215946289, 0.07339238123019876, 0.4727928631310542, + 0.020176923010824965, 0.8738978388137459, 0.400978242333368, 0.7942854137885756, + 0.6403430773994766, 0.5661281788485651, 0.9813443426749566, 0.5483196198615674, + 0.06707226963185708, 0.7333912975866296, 0.7783524129943937, 0.5686775672271452, + 0.4256925483989712, 0.5147086104638293, 0.21476000417538343, 0.916855433021808, + 0.3714678841508565, 0.21945150784688328, 0.5895851611906975, 0.1444626597607006, + 0.738918199088585, 0.4481544686964083, 0.9856359446672089, 0.29954706102055195, + 0.611250956442058, 0.9430562414796368, 0.7229305445999924, 0.1858020448235872, + 0.6618648057683056, 0.8030276227107247, 0.8628099361519953, 0.9052610609469864, + 0.48514713211278393, 0.21873607262787087, 0.5236259810661064, 0.230898613761495, + 0.17879765773860556, 0.6151719520382292, 0.8151124499805635, 0.5781982220346872, + 0.3922148718803763, 0.04935560570672859, 0.6166915363174584, 0.5081051021238706, + 0.7010454655614318, 0.4156377880919485, 0.3492037798091502, 0.602200690923089, + 0.09046365783660171, 0.16223935565771608, 0.5214901402508125, 0.5044965309511683, + 0.8718711479270838, 0.9893060211426482, 0.4959762454399149, 0.6638722685693331, + 0.27567906452334956, 0.9612735276985851, 0.017683914342381035, 0.6880525776298796, + 0.26384910388692295, 0.4717037005472474, 0.04709738998294177, 0.9206613460317158, + 0.30609401558422245, 0.650794876466132, 0.9549873305910817, 0.8260862345328126, + 0.4305511457337493, 0.15775833413756224, 0.5698073932330375, 0.6732764670891707, + 0.9398683076373318, 0.8884952039532361, 0.9747580378552119, 0.7012280138803675, + 0.8531212614847642, 0.5588857939967947, 0.4865193397093649, 0.35605598711197395, + 0.23192417196470105, 0.8943987506597288, 0.4581750581036229, 0.8316761648384743, + 0.1956329153934292, 0.7862726579345383, 0.2825441517137014, 0.12262868378623026, + 0.8276433833405171, 0.6205529971010263, 0.32692718075055793, 0.8228061122431176, + 0.01971216456364311, 0.34028396625042623, 0.9809049097581001, 0.42615866343689, + 0.31026080743670426, 0.655869132710115, 0.21848187364501515, 0.7168610879867828, + 0.4594451911566828, 0.665814039596452, 0.36315476515717227, 0.5792757848529761, + 0.2194177814751842, 0.04385373639898016, 0.518617925385258, 0.3231278017612045, + 0.2594374757740957, 0.29246587337951047, 0.9954689561632492, 0.3501425234728255, + 0.6709665721562044, 0.16735352771914958, 0.8260129706431494, 0.40920719564891483, + 0.6726330850570945, 0.9363557645462725, 0.6556123113787984, 0.24270806527297228, + 0.7235976249239356, 0.24447580020214432, 0.1570820781313833, 0.8148349956024216, + 0.6933255834175878, 0.3001748159747639, 0.4345877719588055, 0.5503760162209426, + 0.25779097166992126, 0.4929286821874297, 0.9452724276975171, 0.665277564567275, + 0.8981405030205403, 0.0670257099481898, 0.6939751122857064, 0.28055550151293496, + 0.1332388904235442, 0.5375935529800575, 0.7796575064872702, 0.6544817490995954, + 0.07015083766352537, 0.5144850339572572, 0.8525538781907521, 0.9739838860835902, + 0.4968967376586929, 0.4127145314686359, 0.27933761555501435, 0.3672016010943906, + 0.6624024674092706, 0.24544598528646022, 0.730314983254911, 0.17701101837412136, + 0.4254799754930676, 0.35423136895259566, 0.5768227897299716, 0.5744645070463099, + 0.6089490929009442, 0.8468421104716097, 0.062493117646656215, 0.9433883841846586, + 0.3224314773749847, 0.3879437547436835, 0.06542481862518479, 0.4524283508742747, + 0.855669739307951, 0.9355414144830774, 0.016953557859817336, 0.08887746141991093, + 0.3077384846166704, 0.20142444310220264, 0.23430527951848246, 0.26726979711914867, + 0.7538977184667861, 0.14055565974103956, 0.12019543811452327, 0.8032361878205014, + 0.7275211502307414, 0.5391190118588421, 0.39040871413175204, 0.9111147281454772, + 0.9493616269229143, 0.7539309432514376, 0.32713630350951395, 0.748126071640409, + 0.7235166954765612, 0.5451528985198291, 0.08939363370630093, 0.9726577168929407, + 0.5056002065235468, 0.8720214092930499, 0.14252130964260157, 0.8017993349366525, + 0.8789412016297585, 0.19733101052115432, 0.155381302468528, 0.8861006212175135, + 0.28308405796973546, 0.3421889072421824, 0.2741378883081277, 0.562728159108378, + 0.7383010713186272, 0.2940145578132707, 0.28188592835883397, 0.876776834120555, + 0.4094498174275354, 0.2609720457658792, 0.9077216796152148, 0.9167908537532, + 0.6309256990750524, 0.2932255295296887, 0.6187393252348707, 0.02419640268937162, + 0.7615549945291642, 0.40152068077174974, 0.636001364738158, 0.31693811983415865, + 0.17459370162390564, 0.09635478622334415, 0.773110596039915, 0.36730671076204746, + 0.4443424697638383, 0.6269672542570564, 0.18052959491911025, 0.8763251766396633, + 0.9298965932343471, 0.3114788775550925, 0.5129502631275407, 0.2671988790638957, + 0.03395469873611412, 0.20677057622909345, 0.029755677202100816, 0.4219143640858508, + 0.27766396280399275, 0.7626861520170851, 0.2804449959173896, 0.41126630075958903, + 0.6130098684306445, 0.7749747562665911, 0.6306743873349789, 0.17398026115826748, + 0.8664778902230784, 0.13631390978684288, 0.126146245275511, 0.14392278284610127, + 0.4957647401686809, 0.49393608417974944, 0.7608848150197474, 0.7708537446533661, + 0.17041707735566713, 0.5292679237649189, 0.12997774787294147, 0.5833293771008747, + 0.22158461844207578, 0.38454048634657145, 0.25170772654694007, 0.18384322498407457, + 0.9106010886281795, 0.09659133036787504, 0.04798999449920649, 0.5971959180210877, + 0.33920293291125825, 0.0026978486282175362, 0.40823027702490455, 0.7474551858830002, + 0.8447835860688525, 0.999476073467762, 0.11292905422267163, 0.2891202214831097, + 0.6782488374010278, 0.5640985110366258, 0.8986345118209681, 0.8303221656535216, + 0.3838014827262134, 0.6954806707019082, 0.04745548087955098, 0.19020545853032067, + 0.12142096074765196, 0.8083835608993636, 0.2703724701412815, 0.04308441751967196, + 0.8978514331468422, 0.9227768394778075, 0.054487218354160816, 0.8881844540343677, + 0.05812500740473081, 0.43395580484342355, 0.9128284726568942, 0.46153699068964205, + 0.9329113950684834, 0.5436808169668677, 0.8551864909610408, 0.35430629048202467, + 0.20331160404943038, 0.010592767504785994, 0.768145204266474, 0.7716453181066588, + 0.07399950484290962, 0.35304343137569727, 0.7125013832160177, 0.32282759187212584, + 0.5098121496598533, 0.9429961075410074, 0.23690140194649634, 0.28545879601150004, + 0.36072796490221837, 0.08273436245662757, 0.13321861653992195, 0.45428395959680057, + 0.3292870423168869, 0.3717537796542958, 0.9151157324904851, 0.032565267234247575, + 0.5663024224678861, 0.21258936026884934, 0.018792364759155578, 0.33061571155470315, + 0.4408215380032614, 0.6373830559219769, 0.409083548130421, 0.5719023860863325, + 0.5966825436453085, 0.3384202473006439, 0.0652522862056627, 0.7184518347094753, + 0.7333243428502271, 0.3397689359164483, 0.39030612456817504, 0.49021356874309363, + 0.07567478335266509, 0.8152990173046628, 0.865796090334987, 0.9772443897479798, + 0.8237383881843146, 0.6529526140669909, 0.642639906551107, 0.48148740771686727, + 0.32824558349693844, 0.21997446692237632, 0.2826342393226089, 0.6925547552930775, + 0.8127702758298999, 0.8449833779720203, 0.04123879385713569, 0.15065632287258246, + 0.08510557609762459, 0.5458280878881934, 0.8586504699178478, 0.9911190250770345, + 0.5763284424876813, 0.7391889450985766, 0.40039453665752356, 0.6580974766512633, + 0.9713635948599508, 0.9633153015071205, 0.9892725316197725, 0.7655195394029977, + 0.2626076882068743, 0.5724930703594869, 0.8770329572359922, 0.9241957706353401, + 0.5311898231459584, 0.9787947183754235, 0.29236240390648527, 0.1370530031021152, + 0.21557458708432398, 0.21930911835253963, 0.8715098439481456, 0.18225413327885254, + 0.1273980429476339, 0.781536231097221, 0.7399707114697983, 0.03176008087944826, + 0.07484892991644743, 0.9938729205296434, 0.7208500631972949, 0.7020940151356819, + 0.649863542644937, 0.07171846155065342, 0.24072337013910783, 0.3613692423637672, + 0.28109344159579286, 0.6906532840886619, 0.2697954961712017, 0.28147888050363834, + 0.8129444048447061, 0.6482743700303343, 0.567516198772247, 0.3377204549692112, + 0.5318035966189143, 0.13996490823612162, 0.28411043736720165, 0.7086066320571008, + 0.7364523892364573, 0.24583114874653533, 0.2718401559841159, 0.4699898162815598, + 0.8368333921696967, 0.6039469513318906, 0.6881629540763242, 0.10914136009539432, + 0.03794547642282364, 0.16071126773904743, 0.6071822780850678, 0.999449454081704, + 0.38785614009596714, 0.2305249584020561, 0.9381100298775975, 0.6723682337201471, + 0.0270853082856215, 0.38452454801976443, 0.8104401851265016, 0.4148616154771626, + 0.25348586490031955, 0.9559803040006986, 0.23669298529343408, 0.8683776502568926, + 0.6818154399643029, 0.386930406487093, 0.4831551067581342, 0.762948076915824, + 0.3080418124016162, 0.5783325642474083, 0.02031458126010055, 0.07023369879228158, + 0.2510418307035861, 0.06557828934649368, 0.20332294284183228, 0.6988094459473019, + 0.40342220501450765, 0.9809970493544985, 0.26572276292341057, 0.5928075161063842, + 0.8549134765023864, 0.567763428815418, 0.5470926603460404, 0.9274902125377089, + 0.4928070771606695, 0.526894280369654, 0.6376977924010896, 0.22799338981773765, + 0.666609321330193, 0.30868196403098336, 0.14279972266571528, 0.5062364564484477, + 0.9157139519719435, 0.4438514752002488, 0.0967753755965477, 0.8364362062093621, + 0.7507227281441472, 0.034410730069850626, 0.6782441014462247, 0.48150125869929983, + 0.8610360109688682, 0.040318507891833, 0.981454376414118, 0.9489949777983132, + 0.8363894005173848, 0.7935877513663323, 0.7161463843240741, 0.005320974874661322, + 0.7219264019524386, 0.5939046135812794, 0.7367415608933358, 0.4901397664186017, + 0.7455950792620178, 0.38453281111378745, 0.11932732877577257, 0.23846310575164364, + 0.5070859241209125, 0.4714858979832832, 0.9511226798432605, 0.816798240831224, + 0.9906645769037737, 0.11961893403587176, 0.31535187276502774, 0.6103025507014492, + 0.780168389505919, 0.4873081795149584, 0.5385034675060101, 0.979880443701223, + 0.38006057763820444, 0.8983108410336961, 0.7260784501222746, 0.08445070147685141, + 0.19108936840472623, 0.8684070135879295, 0.29639093380738957, 0.24831227601153794, + 0.49319075696101833, 0.6248867669334034, 0.043314312738550464, 0.6751994351532093, + 0.9839650562784863, 0.23577156317354986, 0.655407771653181, 0.9998838732393714, + 0.914266783053472, 0.9781753528714074, 0.002991513868391582, 0.27941324936645506, + 0.8503441860013552, 0.47965377849050583, 0.6425737018053364, 0.6917759376427142, + 0.5997255515022856, 0.060241281568058636, 0.5393389196251592, 0.18985718347501923, + 0.9351558163273623, 0.05023015623962879, 0.6468904895820736, 0.014573021284780774, + 0.07314826533484264, 0.7385634948320089, 0.4736186028473338, 0.961769707187561, + 0.6466132639765876, 0.6540721879369729, 0.5897323507539489, 0.24160977712950327, + 0.34607925280409346, 0.6030672545910204, 0.09219015818821708, 0.2189044326893821, + 0.12227548581400349, 0.8947981854220011, 0.252625479705346, 0.9946789181499702, + 0.16317149618234628, 0.8889540539831564, 0.11335746065295138, 0.3021348506513677, + 0.32426537462416727, 0.3221710260970897, 0.7955224454834644, 0.31995046679141825, + 0.4603696522905393, 0.15274124315980264, 0.5767785824549683, 0.9089273609938713, + 0.014181800749509588, 0.8968517292934882, 0.8576432177022097, 0.41392758451611766, + 0.49075724214154415, 0.6724006689201423, 0.7419828793396049, 0.5918792807869941, + 0.4964571450177757, 0.8462797472729263, 0.24074647479855604, 0.17389013966384548, + 0.4336537753610863, 0.20804076413234251, 0.8346292608572363, 0.16330788557412612, + 0.6470926565729509, 0.24909881173874848, 0.7800536906520463, 0.9027679093109797, + 0.46621683989976836, 0.8825818086255208, 0.6483997410357604, 0.07090902131552845, + 0.4615378723949781, 0.3984200929917433, 0.793556355780704, 0.18864072780222685, + 0.6683790309702944, 0.8281722909526522, 0.8584671461088224, 0.7674560890352315, + 0.30241765553139677, 0.41475831093913884, 0.22776361413963964, 0.38489260664050606, + 0.42790348692893054, 0.4855741541423584, 0.7700059150649414, 0.4463345406584335, + 0.3754021383352054, 0.554149430056228, 0.7229379198353453, 0.3532397254539792, + 0.3768025729772698, 0.668903032222659, 0.9064030808327798, 0.435833055858148, + 0.8685532076495873, 0.2886386669233906, 0.43587518450474105, 0.694872592542479, + 0.1796499204429559, 0.22480783387822612, 0.23350737891309348, 0.8144091649840206, + 0.44616818509133616, 0.22082898026287756, 0.17930805292422558, 0.15071070399700426, + 0.3155340984080618, 0.6760742950253298, 0.2621675138999626, 0.17823328215968037, + 0.08015587675406999, 0.9763732534524071, 0.8110620473055706, 0.9566564231509402, + 0.25767909123698307, 0.3601800348476929, 0.950425899846338, 0.174356764476159, + 0.08077041781361871, 0.7687961673279716, 0.4354185072613621, 0.8310249565932173, + 0.3721057577278879, 0.5340391905817158, 0.4872996797842588, 0.761276430509516, + 0.7341317116279491, 0.11585158022036635, 0.8117034975015432, 0.04017399270252342, + 0.5161920405254127, 0.02912026146204605, 0.283469044387504, 0.6056600705599993, + 0.5892643002783994, 0.19327527302460534, 0.6201029277090222, 0.9439856016164112, + 0.8563446020541817, 0.6525564675585972, 0.9151952958944338, 0.582373416388159, + 0.7669560791324367, 0.02468294355083911, 0.32790419872418497, 0.16828688898958633, + 0.9785136875320333, 0.7559030686500858, 0.4880628030762244, 0.49742040693088596, + 0.3807874726702045, 0.2518072818614463, 0.28293848736377514, 0.052065141479679444, + 0.12673432223072, 0.00493210913236386, 0.0659657665976604, 0.7546657122157793, + 0.5142072130637574, 0.47101495152740935, 0.12423809933676722, 0.631621048904926, + 0.9375168340975689, 0.9710427645428577, 0.6951321450506236, 0.12975755461724825, + 0.9371463814511896, 0.06329947410562009, 0.17250706916082792, 0.27504520679952815, + 0.7997764878570511, 0.6280084931708173, 0.8790770172906017, 0.8699573351880034, + 0.47162638306287774, 0.429977964766746, 0.6852470236642084, 0.02825069851350992, + 0.7187392702298809, 0.6897047244877237, 0.21155052406886465, 0.5782009379374243, + 0.844124004652653, 0.13477494670163825, 0.39460860995867053, 0.21886878537466137, + 0.9331765626546623, 0.39975766464681506, 0.05306072541805107, 0.5198958252400147, + 0.30504398126008225, 0.7433307539428954, 0.12287466682408421, 0.9670010933795353, + 0.747858359423017, 0.17207327768824898, 0.2545220160759658, 0.5911442854526012, + 0.785125289716672, 0.34374725201679623, 0.1881988882326393, 0.2864207541695756, + 0.022946369182973436, 0.035928263856476184, 0.8248370296597001, 0.0326694849755228, + 0.7293502533474547, 0.6077561758020177, 0.5308356230207454, 0.03316711866227229, + 0.7568490275843757, 0.520780966891579, 0.23241392652065784, 0.6897984319358339, + 0.6291991128180034, 0.4441797944319068, 0.5568587932114791, 0.4498088314464638, + 0.7010019040522237, 0.6507673483154275, 0.45381116863257953, 0.8343724626292929, + 0.9726485983607335, 0.08883727023216881, 0.3240589354871075, 0.17091823673070605, + 0.7065195713066943, 0.8019676727987474, 0.5441090274902036, 0.7569920288292978, + 0.7284967317947634, 0.41634232641384894, 0.07570625995129976, 0.4839470331704978, + 0.22728929068210257, 0.6017907078082997, 0.2752057089278115, 0.6197057838032689, + 0.5062528432426374, 0.9578668123268466, 0.17145202413217242, 0.24939262342805169, + 0.6288714928454562, 0.5306228139658058, 0.5871282516007716, 0.5544974175100911, + 0.6268496169607662, 0.8447978213259328, 0.5990456111341405, 0.4975881427045391, + 0.9759159347677031, 0.6791081992495608, 0.5316632896786118, 0.731025524465835, + 0.37977185358977994, 0.18206518830018836, 0.655684273695134, 0.7157391210381715, + 0.9914665753650298, 0.5048593361026893, 0.9551772829563094, 0.7745323297735499, + 0.1264456909491325, 0.3699896710346614, 0.6285338457271957, 0.514443036953066, + 0.9899529710790698, 0.7325623262164351, 0.42250238115582217, 0.9471771192019562, + 0.5681364044488664, 0.6486542055885673, 0.13720737835505248, 0.6846959458412011, + 0.4521690685914095, 0.9371433944439886, 0.9945327264012567, 0.33107065688983917, + 0.9343690829724055, 0.6856832306125681, 0.799031808457499, 0.7380137292376373, + 0.9746119191774075, 0.6114354110845129, 0.0891424280914559, 0.5091757110322869, + 0.603309404646988, 0.4295246842942808, 0.722036085094832, 0.5625133815588351, + 0.13050048794622393, 0.9927393684728438, 0.45025947906275665, 0.3250450093807251, + 0.09561872500769664, 0.8633353598344091, 0.40442616095861705, 0.6049206328617713, + 0.8428239809093349, 0.3359551331198569, 0.22248396807251225, 0.5502353681183261, + 0.40267811045207713, 0.5815240359991557, 0.3014924083991003, 0.4250990948690391, + 0.08277471680497728, 0.16446000783135628, 0.7582322318598479, 0.4513912032123134, + 0.21989757035321167, 0.6951066958420687, 0.4703863012569903, 0.6716716338779879, + 0.5407943154328158, 0.4271833226075261, 0.08610893472179548, 0.693695330552891, + 0.31314361600309726, 0.5535216118808298, 0.5990984962870414, 0.5131726892092602, + 0.806119912485667, 0.37726715649026266, 0.09688997160729618, 0.18191235600914468, + 0.7408756680625669, 0.067357737461333, 0.34430792748579353, 0.35918926626247394, + 0.136124078264096, 0.3541995171494954, 0.06349779403656919, 0.3754301718183216, + 0.1727189346883753, 0.7580345332123546, 0.5946766069056495, 0.12308175309808889, + 0.5448895422266883, 0.2753416276479139, 0.23336713618230265, 0.4308822662286048, + 0.5621756856088449, 0.7611032921388645, 0.349812128693896, 0.3881534377705992, + 0.2835050132379042, 0.036894377980221216, 0.9307542758686738, 0.35984184065576474, + 0.057717777604340714, 0.16357715519612248, 0.5474191675603088, 0.17209729645140837, + 0.1035218074281723, 0.550510065989391, 0.41363664298505376, 0.2424585443425049, + 0.12297541084632557, 0.9285138515876707, 0.6290588065058829, 0.8255063271949105, + 0.20874626048136935, 0.9395041387697237, 0.04600701773713345, 0.24156959559907465, + 0.37041024160368163, 0.7264228362952664, 0.04321551557401371, 0.8319348475055413, + 0.5128934010550922, 0.9925867471857552, 0.9216267877935361, 0.24718436038946778, + 0.4698226024943565, 0.5439280672034829, 0.598067075383266, 0.28494763010177293, + 0.03773308148441756, 0.14574269431127063, 0.6053555487266239, 0.8475034881533703, + 0.2020262485026808, 0.7042307474819337, 0.5005531552761207, 0.8192645819547374, + 0.20759838646694628, 0.42254359806485775, 0.49037819615284917, 0.19201492423287725, + 0.37500924763986043, 0.3021041849823324, 0.3213504994388482, 0.46774067505236416, + 0.0026173221305848537, 0.2236980382939876, 0.8209726800498244, 0.1341877362757261, + 0.7037171416664391, 0.756226471861304, 0.9793647620935093, 0.44373960477005947, + 0.7700194618684637, 0.5673608450062326, 0.013838598953302195, 0.6574138322136758, + 0.7782853882176008, 0.9813441063355254, 0.6393022773267258, 0.9384754208419224, + 0.9544023739700562, 0.8689363282216476, 0.25686033022026544, 0.5677299374542079, + 0.20226249527907136, 0.7413309609345924, 0.3902110116265247, 0.817381412418007, + 0.67832670080788, 0.46537137842295107, 0.6072049619454954, 0.931659554219823, + 0.7232985553535521, 0.5613079712934398, 0.9529395011786647, 0.3233140991562946, + 0.8867061849259075, 0.3102047303338008, 0.5569391637391777, 0.7072786765330655, + 0.05248664008431159, 0.9215047227212699, 0.32001852496573835, 0.2947421881907316, + 0.16826022413555786, 0.9470234077238253, 0.02390800364358514, 0.921284435754136, + 0.5062140210208729, 0.38265197505015147, 0.1803325603165793, 0.35597726956395437, + 0.7943681735300347, 0.41606259121453004, 0.9499169772053542, 0.022111673063570425, + 0.5235643488450099, 0.13857697122597168, 0.4254536166360948, 0.22832181147804687, + 0.23456948978616432, 0.9860684004280239, 0.8756978874984395, 0.03544764170358694, + 0.08973630772908203, 0.5083562496701145, 0.011202382317988091, 0.8990670746555705, + 0.9537673849619107, 0.7085713197473092, 0.986232021543572, 0.5081578876729579, + 0.6945190695598703, 0.9024711459516134, 0.4208577171692106, 0.766413072935944, + 0.48028012847993773, 0.22874311332279218, 0.2133704688789767, 0.5125128278114942, + 0.8880847121160246, 0.3606612472771661, 0.34170378950422586, 0.39477693551558457, + 0.6340623996399476, 0.6953872129694927, 0.7092038405181214, 0.9624278741295362, + 0.3905353641961543, 0.10208035797721937, 0.6101978379539461, 0.7583740478481424, + 0.12625795541839047, 0.6348773324268129, 0.8871458755396873, 0.01670678244223589, + 0.4146830711435052, 0.5372807865088944, 0.12542194922035377, 0.6512231246433252, + 0.4980918754933916, 0.2091095413841736, 0.21206766259675913, 0.031624355402636306, + 0.49247318203534907, 0.8251416658391146, 0.2265518914158131, 0.45579558773586915, + 0.2793801346253608, 0.48011367142312156, 0.19316663611067186, 0.9630653586008612, + 0.2844483346903066, 0.7428791983417421, 0.20350354070834398, 0.2553320192818015, + 0.8212809248620727, 0.7661178866721406, 0.765443338343973, 0.5677574750675933, + 0.05410405845144206, 0.8535598075847425, 0.5267124302899265, 0.16596308978355268, + 0.5165349840530941, 0.6009663705061661, 0.8001224351986231, 0.12433512035597516, + 0.7328391838711578, 0.46843650518351376, 0.19453361700593552, 0.2457914503921359, + 0.9715041128172724, 0.5035253179717091, 0.2738750246354873, 0.76571572353285, + 0.8322725912841784, 0.9742207856586259, 0.17221294952368482, 0.30429255859284565, + 0.150446075776693, 0.5774457449870724, 0.055204948247565166, 0.9049180192781775, + 0.09219720725874558, 0.7249660525087812, 0.7634298317772668, 0.09532513565669298, + 0.6037902965957798, 0.775876091952754, 0.30048105187387053, 0.5321075407103473, + 0.9676971504359092, 0.26881827714334805, 0.3270462715063217, 0.14109347688658214, + 0.718905496165389, 0.5178340264469525, 0.4641920385691337, 0.6886634266980226, + 0.7541743566585278, 0.9338177907400674, 0.6377610366495927, 0.07859012814204014, + 0.047881143812792626, 0.509518876595083, 0.4662566613906933, 0.044753641991725934, + 0.2888474238293234, 0.4300753033759601, 0.24090396899360778, 0.4880819616569849, + 0.49344032440942565, 0.3267337118213137, 0.30739545858667916, 0.8347976589605131, + 0.8468181237471225, 0.9397711276395985, 0.05076516895725314, 0.35528884635673386, + 0.47869109342428395, 0.6485992654884545, 0.13077849395002672, 0.7561835005330849, + 0.13019119770401266, 0.522947346864396, 0.71681653486775, 0.9545807279091522, + 0.048471853442748514, 0.7656959974208565, 0.33260665541814094, 0.3107698976359098, + 0.784262134457234, 0.2390556069831512, 0.2171033013333129, 0.22106855321775898, + 0.1777305668557082, 0.7010763743538238, 0.34687353348311567, 0.6713227825218957, + 0.31741624899077014, 0.27699680164190077, 0.24966398933329337, 0.4378580357548568, + 0.8220315880834599, 0.8780303052900865, 0.3282223224417613, 0.5304855331661286, + 0.8759301198567931, 0.8022054018812916, 0.8087106870029261, 0.39915235222002377, + 0.811956074326436, 0.9536455298780298, 0.421213924992475, 0.43164313734964477, + 0.5227138249336246, 0.7090043439645167, 0.2998320373339187, 0.5545990176184411, + 0.3997327842492463, 0.7316793591805311, 0.8754030075046323, 0.843831916727075, + 0.742684393168072, 0.17453735067792653, 0.8625298941919014, 0.11381870844975783, + 0.27979009889454887, 0.5768907698535688, 0.06594773733933168, 0.9579455432204982, + 0.5946935153059155, 0.45202323444648607, 0.9019434787093433, 0.7818194846873125, + 0.3967066863484422, 0.8228406568388705, 0.09721161204039697, 0.4801522356674264, + 0.01959637598493358, 0.9850457198522312, 0.24050822306737707, 0.18371526827375517, + 0.5845485582312591, 0.17652807791034186, 0.8647449178268475, 0.2895166509368404, + 0.2419901155988491, 0.6941088664970383, 0.1560864519399333, 0.635427742150778, + 0.5816818559836522, 0.9849155684106518, 0.025588074983513787, 0.49459827852911853, + 0.05425377571263956, 0.4743775865820451, 0.7564149515738253, 0.16276984906703595, + 0.7687548469520425, 0.41373225024064564, 0.4544064319037999, 0.41801259667280766, + 0.6314198197027366, 0.20323482183642694, 0.34349885200542063, 0.27104141183367414, + 0.7982388531148257, 0.9953984175532257, 0.8327122421658457, 0.4281616517848509, + 0.9687894499866775, 0.5466038572944231, 0.22487868502002484, 0.8384645649234528, + 0.36865165146819756, 0.9336942046854281, 0.6160039963189174, 0.651923536201123, + 0.14310507886655377, 0.06001830530380037, 0.22971825195338103, 0.9060489022166647, + 0.17227243918298796, 0.03562839494317671, 0.9833405699591752, 0.879047113237965, + 0.06427639038238131, 0.11198359585969286, 0.5120162273713391, 0.8195681998043876, + 0.518411923936739, 0.5964168071235564, 0.8644480695528365, 0.9742302432739681, + 0.596784065205653, 0.6009605776303654, 0.9172413061542587, 0.9722770947384021, + 0.8230312406014156, 0.6689544008223667, 0.06070494793081682, 0.3249888584728904, + 0.623899766373403, 0.6526210484092227, 0.6766952396244709, 0.34652804154497674, + 0.4071283107450684, 0.8222639692618214, 0.011132519537103813, 0.5760595421942754, + 0.28287338543402074, 0.37983591169858555, 0.40870027326127367, 0.07771932097463397, + 0.03700348128441577, 0.34853902862782316, 0.34976756257395014, 0.20268620904747214, + 0.17602558997716566, 0.07375838827968006, 0.8258050102203596, 0.22552025356880057, + 0.6174348425138062, 0.8743948521904985, 0.04826767861785841, 0.8881260245700738, + 0.20517630380817353, 0.7158063809593872, 0.6424390262914653, 0.28579474166696406, + 0.6943570209638391, 0.8915403913726628, 0.18101264400038986, 0.43672810024025943, + 0.9720888013306086, 0.9163586355462026, 0.8554811772547043, 0.8390860580206726, + 0.9764990516164289, 0.623084563941527, 0.7274814276805354, 0.593214063381061, + 0.1366543769893125, 0.5942756436334763, 0.940654759078095, 0.8976841341685899, + 0.9170727635406218, 0.5512259884901765, 0.8732383847949075, 0.15876424149214485, + 0.983846243401889, 0.06524064955606268, 0.22095794008768244, 0.29289061978348774, + 0.01906759179346751, 0.6027549581512781, 0.06775862309828506, 0.25143958689042967, + 0.5669673318847419, 0.28294105817696213, 0.3174541416076303, 0.33060755385741214, + 0.4008286112428354, 0.36612160617925404, 0.3324377788357381, 0.6469852128501971, + 0.12960876007392885, 0.5573253264003803, 0.18164164552223339, 0.05488525449383397, + 0.8333901979014496, 0.7013270063862299, 0.00973447081459422, 0.44646013225218206, + 0.04509404363374836, 0.6007283625082954, 0.5760011451082034, 0.44293294050290455, + 0.7335522619452794, 0.27903604275828786, 0.1476826817000655, 0.32432344497821075, + 0.6870931336062818, 0.00857861060126519, 0.7820956055279831, 0.4726044283862554, + 0.5732177277699113, 0.14034121755523665, 0.41804284566429306, 0.3953383120567484, + 0.803883857137373, 0.8793775032337189, 0.7863521774621048, 0.9210368915228765, + 0.527325528345828, 0.2898124695522769, 0.31385222532735757, 0.6472638868328983, + 0.5235968800554752, 0.08089383474428724, 0.6501478763743553, 0.1226793956172183, + 0.7544937807956091, 0.1011571047349803, 0.8036833193003035, 0.3947568249380681, + 0.7113878526534941, 0.4334600130160059, 0.995734610188258, 0.08750876548564512, + 0.41692095049348066, 0.4161099210925021, 0.558301508648263, 0.5234324181480597, + 0.6021309454264144, 0.11982213855056323, 0.652984817383927, 0.31994110490998406, + 0.44025076342842107, 0.9018474001363644, 0.1780521010787487, 0.0928203326209236, + 0.5689257589296584, 0.9307858048545634, 0.5019191507483846, 0.0732359171678838, + 0.7472470560306381, 0.7606290137163079, 0.1759363749688474, 0.9865090966718135, + 0.14162530231861792, 0.2257810200578727, 0.8625577589305529, 0.6182840554609289, + 0.7627928373929174, 0.7138267678661641, 0.6271079408184336, 0.26542849852208483, + 0.5438712954597966, 0.6534648091675265, 0.9366397928644904, 0.24959632558445233, + 0.6955069355781494, 0.08747607765195109, 0.4496760092668539, 0.6515312368984656, + 0.36913050836766426, 0.1884121970188235, 0.35484115555318807, 0.35296784884342236, + 0.25069533860349336, 0.9058873429939744, 0.7443484994879821, 0.750346435642241, + 0.7617724756232275, 0.2922607375715792, 0.5466903040314682, 0.8097090004421225, + 0.35407885946174, 0.6348456786908329, 0.4096851448355948, 0.6754014404335078, + 0.8487343259562462, 0.6503887202649459, 0.023939621711200898, 0.0697668884428575, + 0.1932051338234947, 0.07098041071523209, 0.8895455416639768, 0.5970662891404159, + 0.9672124324991637, 0.25710752222232724, 0.2512360193659343, 0.9118588060460109, + 0.12641331224828478, 0.2591745695762785, 0.5354205089164524, 0.9697500651356975, + 0.7039769817809971, 0.9674853015743734, 0.7581464456430004, 0.009967363889854308, + 0.9554403083896627, 0.6514960891704691, 0.9819054955872719, 0.1655400926361219, + 0.6854831617362053, 0.6538109602793701, 0.29197658934366655, 0.7026261377883648, + 0.3361358393099737, 0.593058167944492, 0.3193604213031376, 0.9112331297151058, + 0.7565016778289977, 0.8148016159609519, 0.12956549959331298, 0.8840025203699886, + 0.15092088574069407, 0.20505821088972076, 0.2243079482977779, 0.8559123643624608, + 0.3470255353137828, 0.1316340364530686, 0.6434241700287737, 0.6784281820296103, + 0.20732777369293587, 0.6251808986824415, 0.6394504726870864, 0.999032394532801, + 0.7786813383845239, 0.8636701630404309, 0.0023017909877571796, 0.696449724735671, + 0.10128652819511497, 0.4003153911577081, 0.2469767716265694, 0.8768832326648365, + 0.34089952102337384, 0.11367436364895223, 0.607022940049033, 0.310423907918116, + 0.17692819881508148, 0.893849330155664, 0.4880236414430278, 0.9956660538325679, + 0.6061924103235181, 0.43946977537006016, 0.014921239872288439, 0.463398031987058, + 0.8435515709522923, 0.05922628363397964, 0.9644192036892971, 0.09735680952505932, + 0.338804944299988, 0.122083944069586, 0.5384101447575116, 0.894293825409865, + 0.7951670891521336, 0.3730764160164859, 0.20353695631133162, 0.7561530473263479, + 0.6832856935883096, 0.600988913103398, 0.35514255942741124, 0.8499665948777161, + 0.5804484005287095, 0.7105133890231672, 0.5155572582331448, 0.9580874216612449, + 0.42311501062470824, 0.13594740755997503, 0.5577114428718954, 0.02444976711511415, + 0.6073524946467592, 0.42490323282605924, 0.9560741306479802, 0.17062467593248265, + 0.8032635864461389, 0.4236136037107485, 0.46938391908058286, 0.5712751546570827, + 0.6589700800129294, 0.07547209392481724, 0.3993441495689085, 0.3406978944629723, + 0.7797752606137046, 0.7855007898427816, 0.4117897980533102, 0.6899309686736047, + 0.6962784108609973, 0.026169671983859666, 0.40177616240464586, 0.940360830179024, + 0.7321905293315297, 0.4795568133323319, 0.1987575279476046, 0.6127453916604444, + 0.757610880372048, 0.8914703620980189, 0.10720148213654712, 0.7929011279820072, + 0.6530747492772068, 0.308240250783111, 0.3398854214559405, 0.17318325577009575, + 0.3507555425234006, 0.6115615288020042, 0.6172965474103689, 0.773900176617242, + 0.6826457756065216, 0.6383381900788276, 0.7095117173673956, 0.13989700600187094, + 0.8519068750537195, 0.2514355559461625, 0.44387467049936746, 0.08873576599160304, + 0.6307491605599468, 0.3192212749113975, 0.19105550091405066, 0.5681426912540237, + 0.4852586570071583, 0.6901510334227848, 0.14730516213349298, 0.7377793699206823, + 0.07770239636167253, 0.6353314323740415, 0.7769348461239013, 0.03146407552911468, + 0.881921416148157, 0.5196241210509628, 0.18644631162996206, 0.1800152993000519, + 0.8469011263430949, 0.2044030747533443, 0.6714715606818085, 0.2652006622562567, + 0.16588414879773827, 0.004974429525489654, 0.9421216848976225, 0.8680591840359811, + 0.47536229896418614, 0.9087132127558714, 0.36269836666060895, 0.08994196144024535, + 0.46652965119134227, 0.7635088426479022, 0.7580543424043561, 0.2814576427741803, + 0.703103211229103, 0.030089454512598146, 0.431288495782855, 0.2822540862742956, + 0.7883604392754977, 0.40414295830619484, 0.466555918451075, 0.4449073577848274, + 0.3738360598670297, 0.10771135525719733, 0.11598141083836666, 0.608694960356133, + 0.8076241092099635, 0.4556595939890059, 0.5354984218212357, 0.04529301994047097, + 0.2178671393843703, 0.6882823913952348, 0.865528980527933, 0.19956656357560643, + 0.22844588988154046, 0.46974197301841936, 0.5309853116091245, 0.6839795443783202, + 0.29595752432872535, 0.4532475799120991, 0.9284317401382437, 0.8788823041707505, + 0.2166810902369931, 0.5487203138636405, 0.05600987779157096, 0.3084775342972511, + 0.5393125299204842, 0.37240064437816334, 0.3468973654158175, 0.9350645534385649, + 0.24039614056758862, 0.7402803066693978, 0.08306562891693459, 0.28044191179432376, + 0.8657987335237997, 0.9934355754376296, 0.1270705175053224, 0.6460880715079992, + 0.9198821934593109, 0.6070568194283551, 0.6464985488505831, 0.15197537639099556, + 0.2664357790754843, 0.9231485651377281, 0.5340418003367865, 0.9957229597354874, + 0.8319214218168568, 0.5422138048023872, 0.9699107535181725, 0.4752429518518142, + 0.05408065715452448, 0.412116834384297, 0.6195568540366302, 0.881986809712991, + 0.15550908602876878, 0.7044936352812271, 0.09920603364957725, 0.7458878830244886, + 0.6374540922492773, 0.41588511532180117, 0.3429122583047075, 0.48188295150668603, + 0.45953260031670007, 0.47601109541209097, 0.6130040691925399, 0.37050149227938656, + 0.6783163427743601, 0.997835766092076, 0.3119262852356456, 0.41049362892379715, + 0.2871220882198977, 0.08075775985242728, 0.646921181653617, 0.8765279657302582, + 0.23805860015263158, 0.712917806978227, 0.7988705300739493, 0.8923433911728404, + 0.42345599054528416, 0.18133583214691362, 0.29445384375862804, 0.877237778478381, + 0.4062427754927561, 0.39792110919390933, 0.5595455188757487, 0.9451098578780788, + 0.39714743857270696, 0.1710267066560992, 0.26824919626807686, 0.9956553743600529, + 0.9283196879255161, 0.07718521227499053, 0.4612312284809391, 0.09342894139926139, + 0.6584945035502988, 0.7224986788775738, 0.980272950501206, 0.7600669820648661, + 0.6208086249350163, 0.6086644042782733, 0.48308153203364257, 0.660326199954179, + 0.9276878850610732, 0.8633830105539054, 0.7962187452157085, 0.9194747290923341, + 0.16231806206359722, 0.9499107963424789, 0.06252948055080076, 0.886862482313068, + 0.5294739865073963, 0.7551343176832722, 0.3967203263856607, 0.3205093213479737, + 0.11273108728145032, 0.5400172007465541, 0.5463584941018298, 0.40902420190783784, + 0.34753173597357556, 0.36190947502133863, 0.8449872186646363, 0.7122210627434624, + 0.9946162402041455, 0.9132488211351393, 0.876847448826782, 0.7803556977166688, + 0.9356233374490821, 0.3252320637795516, 0.7962997602423394, 0.3947709697277736, + 0.27607928050158137, 0.9617201147257819, 0.6892430274739247, 0.5706639923472847, + 0.8567674143010484, 0.759254344754482, 0.4640661001028902, 0.849388861265472, + 0.915484880536155, 0.6803950008264036, 0.2233794778069803, 0.9322812689951643, + 0.6255594311375979, 0.8293069490130799, 0.5363731200101739, 0.22248705406746683, + 0.03548460537717968, 0.5681856389695299, 0.22852476978776393, 0.25863174388684307, + 0.8597279049431693, 0.7032097750280445, 0.49225726322638097, 0.4614451564658706, + 0.40804943937105376, 0.7560063316576442, 0.2990920309244123, 0.36319226695497175, + 0.1658941783846568, 0.9423775873373108, 0.33575270283571046, 0.18716742670642605, + 0.42701655041668696, 0.0062520852671191784, 0.9121308724576324, 0.5179750346926351, + 0.13397622219055638, 0.41617067939349794, 0.22602489432339978, 0.8549501299106076, + 0.34313173874115677, 0.27630233769563395, 0.6506926207027762, 0.2534847525768975, + 0.9379397622811656, 0.24893248547242908, 0.25003116137255343, 0.24185977400785785, + 0.035595161608137915, 0.43731034171619265, 0.9865984103159812, 0.6692875029221403, + 0.39193623341329087, 0.8149634125086003, 0.3685145118514238, 0.3321064154658263, + 0.8821130091244994, 0.5126819098142537, 0.7521217015064541, 0.2490810176728513, + 0.7473789792510476, 0.1377191248691576, 0.8731112905545552, 0.37167076667157495, + 0.2018551847032306, 0.6391343316493395, 0.6498829810818338, 0.5095016824529556, + 0.12411368836434056, 0.6857165595366345, 0.728389597919631, 0.8384343918540044, + 0.13893416384908752, 0.051156043086339476, 0.3945522835366221, 0.7021774315180461, + 0.9664029679637711, 0.8937346328630393, 0.457178494165455, 0.21522711616193102, + 0.6336352979366736, 0.26726767288419706, 0.13628950637506665, 0.8252202343876291, + 0.8953549328626846, 0.1406626559991494, 0.9727894305213565, 0.13233188045808586, + 0.38048680651099376, 0.8619913912534997, 0.2680556820317197, 0.26289459991228925, + 0.7922081914968131, 0.8526785089625418, 0.23177886240082834, 0.5635282170289971, + 0.2942218688725433, 0.9319828382613095, 0.36720463922998925, 0.98388126028702, + 0.5648451215100978, 0.9973939542919464, 0.9896063503276095, 0.6678758243457174, + 0.30974219900139965, 0.6138943763484822, 0.7659247094960486, 0.7327510716461801, + 0.4580874332818764, 0.7148855948300462, 0.3015210526811154, 0.3193783762708463, + 0.5729194299390246, 0.7009199288239003, 0.1498401214370051, 0.8024617709837892, + 0.6546582644003875, 0.6992573312662181, 0.6233980510220369, 0.2816126619003816, + 0.9722556402827515, 0.8089546292787112, 0.8228667190501502, 0.3965111409530997, + 0.21891778563745656, 0.13706461948658555, 0.4930115559366377, 0.08061879901104274, + 0.9723893757331308, 0.09685347646885045, 0.35444636370166327, 0.054297663446587596, + 0.2747322224417924, 0.24392971152181964, 0.18653707247554163, 0.6059851654730029, + 0.6831785302075528, 0.36331807826529416, 0.911774786154115, 0.24997575428215468, + 0.6786094473828951, 0.6104874988450785, 0.3031168390839635, 0.410700724383396, + 0.5441503499168883, 0.6307108716552837, 0.7790601468219183, 0.697505254353997, + 0.9685361200100565, 0.2817021433162583, 0.11693715636983493, 0.5124421281724756, + 0.5381636229756999, 0.2826808604940013, 0.06584110328759119, 0.5204186195482896, + 0.3163423828270894, 0.3494352418870309, 0.875103991061865, 0.008370670187697438, + 0.042207344779274125, 0.06946350667391499, 0.7652448085238555, 0.8849312566549212, + 0.21916084245243994, 0.12139915967065251, 0.6864097172319, 0.9174864218121146, + 0.707314017888608, 0.46782580051186906, 0.449272860553954, 0.6218540956306559, + 0.4832808027986517, 0.6734352802484965, 0.09801798837998055, 0.7656392694188594, + 0.5803233074672121, 0.016925723804065762, 0.4280916359177751, 0.759272753065939, + 0.3546974115594703, 0.9730373850561599, 0.2938207533589179, 0.4717204342156366, + 0.760910396938359, 0.2651220337342728, 0.5942567905334719, 0.6481821197668466, + 0.2895714547071302, 0.26823150371057913, 0.5449562468235388, 0.520914828762417, + 0.5824006220574875, 0.6943794810741687, 0.5902701522923899, 0.28597743896497085, + 0.6906386637349428, 0.41787531512303433, 0.38227786476971226, 0.5300921568839477, + 0.2857412596573464, 0.6242493428037001, 0.15299829554197697, 0.8562897909007144, + 0.16292018821779197, 0.5840137481432885, 0.0402405580781926, 0.23787132538690514, + 0.02776599953972303, 0.11692608239388758, 0.288342139829152, 0.6706689963111256, + 0.2839947727752087, 0.7475480285762276, 0.03851000884593336, 0.10153799408455466, + 0.14982167863718432, 0.3834776944894259, 0.1975377435113611, 0.6118508790784729, + 0.9680924500451281, 0.46467210253689606, 0.09010588274027209, 0.6136328993397414, + 0.25988950283817835, 0.38455137915690174, 0.9271848868044517, 0.22389518132293573, + 0.04571433156010063, 0.4132690224056872, 0.8095602899860317, 0.2004302755831857, + 0.8245073000661697, 0.4025263860553472, 0.4360528711995313, 0.7644179539602122, + 0.5282084404936749, 0.7836857694796493, 0.5285920916618999, 0.32404378742603523, + 0.07969461980030712, 0.43825279850385856, 0.3910540496216194, 0.18601781392486605, + 0.23184017385286504, 0.2023338017648867, 0.7205492578820715, 0.46163607994538014, + 0.10564143426524919, 0.8560466828351337, 0.18314823447283957, 0.8790698403280611, + 0.7235404635750523, 0.5700427190537816, 0.663288944624978, 0.20529307657969187, + 0.025872315445884086, 0.26585233191055013, 0.49916586962692366, 0.7745665262881396, + 0.9627762879628154, 0.2063350190884421, 0.6834666446671213, 0.47480586450535844, + 0.28448716214382685, 0.18825684592312497, 0.8325226059057012, 0.6286561310639678, + 0.1087577996666701, 0.6207378740918625, 0.4104288522350794, 0.4108005415293736, + 0.9696660955122183, 0.11915598038634478, 0.8193173491910225, 0.2746599332463444, + 0.6098656218024554, 0.9292887066073673, 0.6142569905538732, 0.05766198666724043, + 0.2891271226686991, 0.7332286872225152, 0.8408570347410926, 0.12660806202788155, + 0.5489109956266367, 0.07599553849100638, 0.9426092890379096, 0.24765591006824883, + 0.08682472760964643, 0.3047969726136577, 0.1583511693504679, 0.010906917670314709, + 0.25896069702163316, 0.944522556205986, 0.06234151012262623, 0.07594537970249937, + 0.16105869616486723, 0.45356629802200643, 0.6374632663710944, 0.3001914280312634, + 0.3597821367694215, 0.8925621967245283, 0.5858933471277528, 0.07241246235730059, + 0.5227573031421745, 0.826769449555715, 0.6226000232785439, 0.5007372389529805, + 0.6254709820295706, 0.923561320563299, 0.5797200818087839, 0.11245274382307002, + 0.8040764735441351, 0.03714647611264532, 0.05933763519542601, 0.6553880446377699, + 0.9868084819068811, 0.846601969915844, 0.9912595685257292, 0.5411967110606196, + 0.3194458019544081, 0.28503696973115, 0.940558947933651, 0.015463034936463949, + 0.9542894277531666, 0.02249221602169349, 0.3513696995287743, 0.3965583453713156, + 0.6480682314401847, 0.04238822866653824, 0.09266875642374328, 0.7134561008482744, + 0.7814274812633255, 0.09770666780296278, 0.8599989901279588, 0.971347798540542, + 0.5212471557348936, 0.31747991512880813, 0.1450807372463252, 0.65117620072487, + 0.9083013925754693, 0.4222592379453515, 0.6905458283758098, 0.01975954713367367, + 0.3018825082636548, 0.3989451876225656, 0.033855521484832285, 0.25926813158625406, + 0.5400023479033437, 0.7352796709581246, 0.2863491137150781, 0.3993791960463924, + 0.26115064828523893, 0.3261317553000693, 0.41279615227638355, 0.09280822785115939, + 0.39962864076829185, 0.0691091881928031, 0.1099761175200129, 0.37020732327571115, + 0.47322203903962057, 0.6020481442203578, 0.2913197256907183, 0.8480503658447193, + 0.8369212226762754, 0.2159265533102135, 0.2040596933892561, 0.06270244678302228, + 0.30482629522564386, 0.30300441826102287, 0.10545107458851533, 0.9884500735278379, + 0.8224121625418398, 0.9606029349645083, 0.8137241016430465, 0.12256305862616801, + 0.3994648845872275, 0.13611055611209455, 0.3738234989638656, 0.42328184892804965, + 0.9544277910360667, 0.8602725531268285, 0.708119222125779, 0.5581734247006436, + 0.6308586413059186, 0.060826386546066535, 0.621321063968908, 0.8521662217677738, + 0.4581651056279984, 0.29113686416864226, 0.36072087688611665, 0.04325286466960776, + 0.5830232688027613, 0.36606945453901496, 0.31953469940448864, 0.4914081079298772, + 0.6417067214650192, 0.8435793516150908, 0.41203235803708327, 0.4704391766670645, + 0.7394040219384242, 0.13060880814567233, 0.17040717390092686, 0.07174560642273808, + 0.6625377359225677, 0.6025236807306683, 0.8800347546510215, 0.8617598458907858, + 0.8344530020929027, 0.34794343617049683, 0.012617908061536953, 0.24343754932350425, + 0.5029522917900464, 0.5709632194339286, 0.13859388237834847, 0.6036266706759434, + 0.6385922161244988, 0.08416762540551215, 0.9756385479332002, 0.02840231297593976, + 0.18962846029883196, 0.7663379017587776, 0.33597121615285286, 0.21157564433012366, + 0.681081775586533, 0.9639938724568192, 0.04278878227784799, 0.10497675111970428, + 0.2044066673074052, 0.7811112368170888, 0.7592405724946069, 0.9022518225287105, + 0.8772158453470777, 0.6456587085569585, 0.7197790789091134, 0.3039208514116106, + 0.5019971797043902, 0.8450484251992039, 0.31052008957605826, 0.9093877796634839, + 0.5703807373291514, 0.8825570205508473, 0.9302426324219213, 0.5410206187846938, + 0.02754354153544869, 0.3058589283856108, 0.9262969126813804, 0.8178829845848371, + 0.47457588199265455, 0.5186260420566258, 0.24145499687714422, 0.03303822455616745, + 0.3348919328070018, 0.3556239273348505, 0.7599882333173726, 0.31110488751399223, + 0.3117440864080102, 0.16944818831864428, 0.6417638090268055, 0.23236349580680038, + 0.34587506485912056, 0.971536990155157, 0.3940709548855814, 0.318828670016601, + 0.0648933258955775, 0.6832368804711757, 0.7001761872175356, 0.8929530106971526, + 0.6065172313437823, 0.7909968577662695, 0.11683051975031034, 0.7070988588976426, + 0.8471864001611398, 0.11578590196345606, 0.3780528252661348, 0.9625045931257172, + 0.638938863378801, 0.8233951066669498, 0.8385351425218545, 0.7824515461131216, + 0.5263191463937491, 0.8028713395585221, 0.5625443352568897, 0.9872417926766145, + 0.3681408361637485, 0.8846904840847047, 0.26192715803387, 0.7632130339005606, + 0.6292201295843374, 0.8353086549751415, 0.43072615273592507, 0.6299852117578107, + 0.07623559606686936, 0.9200085390206806, 0.7842562196034487, 0.1959979896446249, + 0.016376131571457675, 0.28265288649826603, 0.6861428381734898, 0.7076968226853629, + 0.7119703873874031, 0.5306741901401327, 0.4220208590103546, 0.7947033412160596, + 0.5151485053643317, 0.3972058702127157, 0.05462822465266903, 0.9877359986860426, + 0.8798662117666064, 0.5505568449033837, 0.40553513014245557, 0.5858695579179326, + 0.08895043292653859, 0.6099264183012664, 0.008441314642171793, 0.9760493177766492, + 0.9613678249547101, 0.9441676049064263, 0.8214246003169379, 0.15584451755630535, + 0.48076835271245866, 0.6035024949020215, 0.37612498047889287, 0.5799200401584894, + 0.302084172353971, 0.9265083065612753, 0.29098565888689487, 0.24602687393655698, + 0.17426263173097645, 0.7501661287022459, 0.41162147864955323, 0.3868646027379613, + 0.1256639886534533, 0.3813662077911856, 0.9778206599687802, 0.45455850729819236, + 0.1053539762985487, 0.3862350707804897, 0.9920370708143132, 0.9459078484251585, + 0.4436734813563835, 0.8830642404597325, 0.10166965940626216, 0.4025768580854986, + 0.7772630634910311, 0.12323142023199984, 0.45824203964740673, 0.23243528127384294, + 0.6578826604427969, 0.3765290956320032, 0.1199617226242986, 0.22675296032919734, + 0.5968539305969898, 0.4005983511468866, 0.715752364949822, 0.3128140640026855, + 0.4494720523772502, 0.419991467540507, 0.2731538167729105, 0.502086594317894, + 0.3094183489498403, 0.7552780010454833, 0.8805577295149887, 0.902189146147546, + 0.742954772014594, 0.9781813303477034, 0.6363166483167275, 0.3725438257403427, + 0.5120983390279673, 0.9872115141031088, 0.790564840505096, 0.16139999953510809, + 0.5981465436960732, 0.48790510653037156, 0.22935582142949718, 0.07371614277372229, + 0.19509813591282787, 0.1840965142883928, 0.20431153425451687, 0.4969302291390366, + 0.959140169330124, 0.2683435899702894, 0.9031893362312581, 0.3687029098463134, + 0.03620141355461626, 0.38881221139540645, 0.6753349209801678, 0.41152602910201375, + 0.0052301830419607676, 0.2863106714133702, 0.9992699446336895, 0.5659708014679586, + 0.8456850748575278, 0.7421248690482918, 0.5586035143499297, 0.8794148465102803, + 0.45707397600309463, 0.5541658388396462, 0.16386096814897566, 0.6592858371803196, + 0.6564806733580074, 0.8010196200791545, 0.6279639188224753, 0.2031856641673151, + 0.6458557703550399, 0.9151545829474347, 0.8769786758299422, 0.6799180659849733, + 0.6904471817906085, 0.23228805280446052, 0.6000224856943689, 0.3033046816911986, + 0.715991163634202, 0.32302123066508803, 0.5859542166483501, 0.5390683057467652, + 0.6875983398917556, 0.37324507167224097, 0.8686317365735977, 0.5818381906408095, + 0.7710230038037504, 0.8764752631082748, 0.515242166767882, 0.12610688056788477, + 0.3145630456262617, 0.13012984233586966, 0.7414249298066473, 0.815210289604652, + 0.90905210927758, 0.40423585066392786, 0.142438698726981, 0.59167598593775, + 0.6689854076381082, 0.3206711453776283, 0.7272396974165417, 0.8345578978997291, + 0.07672547710487265, 0.6434193235607697, 0.3087203445341079, 0.651369014143892, + 0.46951534551298835, 0.30502054553689417, 0.8437561055455536, 0.7924896011797614, + 0.3664547599672989, 0.8317041294688604, 0.9665712425143648, 0.0528305184676503, + 0.42771418724822874, 0.5206491406449003, 0.12193005095900578, 0.4697342463508719, + 0.09725307745032208, 0.35966530333572944, 0.7915871678529378, 0.21241115265506227, + 0.5366875174680859, 0.25023860092547445, 0.421005615630762, 0.4256991973898384, + 0.6986680430026889, 0.4710757414192256, 0.9811204098637973, 0.2583935201252616, + 0.9095401812853682, 0.6937367869104578, 0.37541234022892944, 0.12262670586598434, + 0.994332061313041, 0.7990247671002593, 0.10245145295586389, 0.08550967457652403, + 0.10042039847600326, 0.40650226458438943, 0.3226490169437305, 0.7383812349137626, + 0.5422736648704087, 0.2049997570656822, 0.9508231734138662, 0.5776571135926977, + 0.9805809124988424, 0.5965730283331564, 0.18099114349409262, 0.5201657020789454, + 0.3114135281665332, 0.7884053455933022, 0.21162100753327995, 0.6096628526066536, + 0.7786818515772684, 0.6306164268422206, 0.6526082550782316, 0.023776793523503148, + 0.727388103720703, 0.9474420794921771, 0.003255341526218092, 0.21311983991759342, + 0.2854201373436912, 0.025128788509482858, 0.3997306435338569, 0.12942599790128517, + 0.4193927666504138, 0.13846417764974062, 0.9002683982969045, 0.4192267650843825, + 0.10688797317660048, 0.8968779315227644, 0.09357801381969166, 0.593435488229436, + 0.1615617507037027, 0.3969104868778146, 0.2058168097911255, 0.8300275718594782, + 0.7133536924772892, 0.8099949020241295, 0.5971023657788384, 0.5047962655945608, + 0.4948308954480757, 0.7002644892137885, 0.6456106033965258, 0.3756926613839967, + 0.4182245496265845, 0.37524631154075816, 0.4190639562169405, 0.7083014940847334, + 0.4869688244503334, 0.1829384027232731, 0.05683992393624493, 0.6721339534854102, + 0.5140657078896878, 0.0009040149477490544, 0.7280619391959026, 0.3712765579804925, + 0.3807863333053648, 0.06030370411166375, 0.7489312687602148, 0.13619860548171914, + 0.30491461196827496, 0.15709812233775566, 0.7427612714485209, 0.4200114750902272, + 0.47452414939913423, 0.42721054046010754, 0.512269991709128, 0.7839968756026318, + 0.5189604172493596, 0.9678649031766825, 0.7694430117038082, 0.9661851659130114, + 0.11724320877673322, 0.16555987123424643, 0.7659982845287457, 0.8397851933122642, + 0.8308141524759082, 0.974514522674073, 0.5888823552385681, 0.6015497511834978, + 0.33766940452199956, 0.5172169007025784, 0.1151684664937851, 0.09917792697238204, + 0.5060481494154199, 0.6207034439235293, 0.3015730812304874, 0.7350401881896099, + 0.8745082574081339, 0.12569113600408566, 0.33003765531415297, 0.9538674588418777, + 0.16196141155248933, 0.9713885889181488, 0.8649598840900669, 0.8189622870992674, + 0.5981536968459639, 0.3375690293755511, 0.15780833091803836, 0.4688922374638951, + 0.6809653067294745, 0.8099489346942302, 0.9397818624567011, 0.2208160269515801, + 0.13731709291694183, 0.03399745247583141, 0.007770520790769564, 0.0136079750537067, + 0.08667409124482761, 0.5404703786188497, 0.5275884784108503, 0.6220797998515979, + 0.8949268584833554, 0.06220240112376141, 0.4547565499702113, 0.5616755944201894, + 0.456214811192033, 0.14549587585697776, 0.15705333126784715, 0.8513521876352311, + 0.8555707570987535, 0.6777777182584442, 0.5753792445417174, 0.8092710122108979, + 0.43039805717631474, 0.10813170001182448, 0.4061835414413786, 0.7620290676832067, + 0.9710955949401937, 0.848538152922535, 0.4388449636077072, 0.07822048979791274, + 0.007685888257720652, 0.032468281577470615, 0.04676620436150869, 0.7547309136235593, + 0.8557161437216616, 0.2578097946169817, 0.4414396192494072, 0.40608624202104315, + 0.14065329873717702, 0.19644810328555762, 0.7695470456140437, 0.13664420438195413, + 0.9931986787283871, 0.9715427572974837, 0.9915401647749037, 0.45657463234448803, + 0.26859395150418963, 0.28062277894225207, 0.7828418607966642, 0.8739985683364804, + 0.08575203567051937, 0.41551736379629955, 0.6065216383819657, 0.06172263793491739, + 0.17010506879235443, 0.8864487706687101, 0.6171103631418245, 0.7628600111470007, + 0.210788208367501, 0.5956951636240592, 0.5256402517039437, 0.04064796753551947, + 0.6579871047194946, 0.3968511365349989, 0.6603727903769271, 0.006700832888891561, + 0.5821682715045241, 0.22546133794562584, 0.734471981011275, 0.5390135191250676, + 0.5775081355839604, 0.38847698857575796, 0.8453847336741982, 0.4658590048775938, + 0.941477806709561, 0.2975637512420789, 0.5945939092960826, 0.4070850205548726, + 0.8249067365327899, 0.365635140634358, 0.1403827626894042, 0.45392877256083153, + 0.9623768954201665, 0.24757332720679925, 0.5574796800533819, 0.736642339096794, + 0.72489895136655, 0.7478504406583955, 0.4001746826211111, 0.28816400083168947, + 0.18556235721551018, 0.23141442594983586, 0.3790518688428455, 0.9732520483918374, + 0.25647077127159446, 0.2224949006171092, 0.2831329493595981, 0.011121202826457122, + 0.6181425590910106, 0.43467899935186194, 0.9187154389042755, 0.17237356356419375, + 0.18633908703966862, 0.4458063841849417, 0.3912464854881712, 0.1799738696676394, + 0.425829975954919, 0.939064828155414, 0.13128887350533702, 0.9652928261801049, + 0.3915637445603185, 0.3649603875072934, 0.7271590688509747, 0.13534767083588983, + 0.08817532104193404, 0.9364372076271191, 0.18814363443177118, 0.727429761686807, + 0.9932277290945939, 0.8204972337366694, 0.6334548375315132, 0.628648558317215, + 0.6263936559908078, 0.7832969276419149, 0.8028167340995543, 0.045674747125095005, + 0.7943958091580005, 0.30107941329116483, 0.12030684478676545, 0.8668632578967099, + 0.3626401220761749, 0.07416698248426812, 0.5871969658905604, 0.6456015981458945, + 0.813032797809856, 0.6463180884774861, 0.7425872273440208, 0.8178583750516089, + 0.547200975814759, 0.6297972581993809, 0.6253475045992721, 0.8544965868004621, + 0.040157587010636986, 0.022012759569749574, 0.5641656956003043, 0.8060041308551817, + 0.15623669049953204, 0.7650688260591991, 0.9561747603815457, 0.9936242403254489, + 0.17832816620466918, 0.5425655964854472, 0.006858244863806817, 0.6331805004320914, + 0.4377186829104204, 0.9432754160853529, 0.8283466290492493, 0.1611242045057537, + 0.7127304224124086, 0.2656178802989031, 0.10526616849523729, 0.003916817073540968, + 0.36758966750883704, 0.16178653008055832, 0.205410845759337, 0.34398519008443884, + 0.03173678051946971, 0.9685580908102037, 0.11752290040258584, 0.20097463649952418, + 0.01699043671132916, 0.8259350267817422, 0.4270421810657322, 0.002046184804222584, + 0.2893358209362099, 0.4032794151614779, 0.1551112824129457, 0.8879493926574799, + 0.8828050518790767, 0.3910640017637924, 0.28207238476046215, 0.1453512595691372, + 0.6329104504723897, 0.9356594893434306, 0.8830201378973107, 0.32787900254361746, + 0.7524218897758113, 0.3021357790059974, 0.18880418288444234, 0.8252838539655625, + 0.5677734267472772, 0.29468100398338315, 0.6644797868172837, 0.2345284390740826, + 0.7833541222696632, 0.4622428710965072, 0.8180356085413665, 0.12176541112976969, + 0.584843100502246, 0.2076456133040948, 0.43119125370999967, 0.028992135873369018, + 0.8390508629446168, 0.30494489048984996, 0.6280335683239568, 0.05307911693569578, + 0.8573102056913785, 0.6401741471794711, 0.27379230359133766, 0.645161117163579, + 0.9104097310863962, 0.3315222698376239, 0.5311818367945779, 0.8652131787064083, + 0.05266485162702483, 0.9801456049196392, 0.8704025678050438, 0.4548102891048551, + 0.7721429272543505, 0.3241649064877178, 0.31469187001564736, 0.26334638977412717, + 0.03186660049441992, 0.8468763130656728, 0.19742761055665636, 0.34570779894698056, + 0.6323455327655063, 0.6875167796567199, 0.12259887929079005, 0.0363323122402297, + 0.939543188238221, 0.5489445325725613, 0.3518013616113064, 0.14453563213451215, + 0.5645100414748986, 0.2060199151787191, 0.28922738636034717, 0.5886677026646538, + 0.6672156439868581, 0.8099904763954191, 0.9395220274019341, 0.31919505562810946, + 0.5667026842767869, 0.19415684121677623, 0.4903622602229071, 0.5434191206814979, + 0.39852215448310746, 0.7065311902984609, 0.6081023293754982, 0.3592801531792621, + 0.12116546645226811, 0.5914790317147564, 0.26251860772831725, 0.14464917158931845, + 0.5297961258930557, 0.6478751446227324, 0.3604381026258262, 0.4107232060499867, + 0.3953008268223248, 0.5323642799386871, 0.9563146192752506, 0.7587602314961697, + 0.00027251187538357424, 0.121153316398938, 0.9594212052512119, 0.22759321662521337, + 0.4565702003992079, 0.6702201922004807, 0.36616708105055973, 0.991695561201666, + 0.6951684959011308, 0.6190500620335893, 0.21584536965123813, 0.9312075516725733, + 0.15601909764491273, 0.5179008075031754, 0.8255063490752199, 0.13494994051878462, + 0.9642310090290442, 0.8272333261653693, 0.6366021917962937, 0.6272395345063634, + 0.3602636410876908, 0.940511749873045, 0.0009212294184705039, 0.4377848361798714, + 0.02176286603887012, 0.7769672962061562, 0.782387956693346, 0.46341576437396803, + 0.656171249198167, 0.885971474612103, 0.7242765021053905, 0.08290761159813875, + 0.06457500605283284, 0.15185945222172026, 0.08498716445225962, 0.14438083935346202, + 0.04711043670053583, 0.6778996368590091, 0.12432456118167912, 0.7563736054661448, + 0.873973499785114, 0.1555684810832395, 0.34224996347013026, 0.36172841570411396, + 0.972676126220914, 0.49547019565160233, 0.4529244105540434, 0.9962839176328748, + 0.800125119953785, 0.1738920376037768, 0.18284623538536637, 0.7190213390838295, + 0.06399111697888138, 0.43131888124822604, 0.3345685233817419, 0.6918408961440536, + 0.09191401030276525, 0.6094467690319858, 0.3155152765543553, 0.8511304133976706, + 0.3775476493148492, 0.37718085648958466, 0.5018890912409433, 0.2716755222971863, + 0.9214684748828923, 0.8989643462692504, 0.4649826286370933, 0.11209393856729333, + 0.22266178357086241, 0.7288050076897057, 0.44885240894498246, 0.036118231365422915, + 0.8853099398791242, 0.30371015324266315, 0.6325498537175843, 0.9268946284372944, + 0.574772224567346, 0.9628607169992635, 0.8551770482833655, 0.008154746481042974, + 0.7077596087419412, 0.7718971969298235, 0.17830719702486064, 0.8533683250297552, + 0.30414568345346915, 0.07114972057433411, 0.2633680005183603, 0.23447101218581512, + 0.6559142128080728, 0.3462490453925803, 0.2572834998233635, 0.934642244682551, + 0.19259723052883282, 0.25112809867144714, 0.719306929764853, 0.046652277704817946, + 0.8048231508977957, 0.4035945094394259, 0.754843016228095, 0.44265447223390053, + 0.24163832945686836, 0.3508978652304121, 0.9487337341165799, 0.19954028994243, + 0.7753665058367634, 0.1261038361438086, 0.83914023410398, 0.40628145534635696, + 0.8904822604676276, 0.2052272748293289, 0.46178436950453083, 0.591897140583733, + 0.03771394287310881, 0.34487766607126213, 0.41897458442963253, 0.9070545816314942, + 0.5885735157093085, 0.030665400969038248, 0.5685906497268208, 0.25636706329293135, + 0.16236531729561277, 0.3672592843529242, 0.26167612287900055, 0.9198553184427375, + 0.5501237881758122, 0.9076478851904309, 0.7175677146126508, 0.6348659851182928, + 0.738882198702744, 0.053165947486076504, 0.07685485583976492, 0.4170544035053654, + 0.7530172238532147, 0.3824441052690454, 0.13183659456583174, 0.6946075275422765, + 0.5961802083489254, 0.0692154179454898, 0.28199758977763356, 0.31683855437220854, + 0.538331444827026, 0.8005022089671784, 0.6995404503460174, 0.2876084865438928, + 0.6803895260833673, 0.9614047085146076, 0.4217842196495911, 0.7451619152592427, + 0.022636969868852996, 0.09071007475742177, 0.8257945290556595, 0.8672837704089812, + 0.9135872070709495, 0.4681264936047449, 0.5697319106939637, 0.6892971196577914, + 0.8251242176940218, 0.24268165740736847, 0.08918571417302146, 0.40770710749949846, + 0.00887880468457436, 0.22222523143189055, 0.08802297896712996, 0.04229681129423801, + 0.27951120411954833, 0.3764968275482389, 0.7686805463597162, 0.4401306364653953, + 0.4573791871848498, 0.9550081226207489, 0.7591288480403849, 0.4915536774744538, + 0.205604153132955, 0.134868882715783, 0.48918377446252714, 0.7090594014668168, + 0.6843626683545188, 0.08242553746318204, 0.4642178367053419, 0.23937677032495597, + 0.7309833110928308, 0.6956405719072711, 0.8535413315180679, 0.5446728940069856, + 0.08533491880607769, 0.3944004625683343, 0.7994991032172084, 0.2164477184644662, + 0.7135783586336566, 0.4884831950340768, 0.08373053917017537, 0.5917610478476284, + 0.6674922873440412, 0.35014975370696755, 0.49517124851287797, 0.6613573248279636, + 0.3802171804990466, 0.11391385038906288, 0.8544333398683165, 0.8983031966127826, + 0.6680778169363261, 0.23296680235267564, 0.7980421992414213, 0.513600541684651, + 0.525730009757563, 0.37865528098107526, 0.7443857543743437, 0.32898043690749845, + 0.9267339237232587, 0.3916316623498167, 0.21583244809116986, 0.6108933063764448, + 0.12190382848315895, 0.05795075615419576, 0.18585339493384467, 0.4265454993211716, + 0.7841012401338366, 0.33272396587502584, 0.8534312979592312, 0.6009275803559111, + 0.23578725703009518, 0.8324782123117312, 0.7826363192089726, 0.36174336024410503, + 0.46230743039848843, 0.47376876037061144, 0.3678668010995072, 0.4017431618223575, + 0.07840386621932616, 0.9279892070050552, 0.42793000217781685, 0.9587557377816119, + 0.5263942942488995, 0.22350792178613854, 0.052445741452695316, 0.9908397138466685, + 0.2500980404942075, 0.6829265211588706, 0.11852451787498941, 0.3013589198082067, + 0.8329930182716929, 0.760481752519026, 0.9505592437525293, 0.08324982179541662, + 0.8163606057049256, 0.4311119312995815, 0.0023911672020161534, 0.5746291987093409, + 0.22315561753963054, 0.5084757985483405, 0.9426895664145185, 0.3709217647432501, + 0.5826098985446154, 0.23198610325080737, 0.943977046006374, 0.47921249737199123, + 0.4412200057857809, 0.9476458707861092, 0.045738384280730515, 0.7440122966208216, + 0.5587358512482475, 0.6867640944542505, 0.5441339218722309, 0.22356366387145854, + 0.7911120734356646, 0.3726953927021709, 0.018267589919367833, 0.04969670279331795, + 0.30194420494378316, 0.7617935659252226, 0.03786146595146933, 0.8032907384522256, + 0.5148928437192715, 0.026497693642896003, 0.7562626268164586, 0.24084937566731512, + 0.9900162673871896, 0.5770596352891295, 0.37977057043713136, 0.8054207224094382, + 0.6449681468487811, 0.47661518558967464, 0.6054630496227391, 0.8585937854748453, + 0.010922473849501069, 0.8801276229682918, 0.3842900661568762, 0.302059066968726, + 0.5282769570892245, 0.35696922171147905, 0.45964440944787543, 0.43105541008311343, + 0.266541068882056, 0.1773350480081508, 0.625605513254196, 0.07627923477584031, + 0.35297661206462905, 0.06399992814421374, 0.8012573029771279, 0.5988537160717968, + 0.775179820309573, 0.7868373451818418, 0.5288757403368023, 0.6927594213461519, + 0.9076439953515621, 0.7746089955402077, 0.47985422368325525, 0.5547764500676566, + 0.7754494052083825, 0.7423050553637645, 0.8712909935000541, 0.4295708781702815, + 0.9914003267663137, 0.5892289698919039, 0.07275209414974826, 0.26120918587482855, + 0.1569674079759794, 0.7865373097173526, 0.4572516078945914, 0.6816153429914849, + 0.4239542214959864, 0.891339465192573, 0.9220303240191849, 0.8704793332059096, + 0.8311150077207858, 0.6768975025540995, 0.40411864967994826, 0.9459818123797102, + 0.8453923354042879, 0.05854081503333863, 0.1558983828448729, 0.5223476813940962, + 0.802904691532934, 0.3459328047581237, 0.23543281492526158, 0.17112892107419742, + 0.09439383621100295, 0.015444498324765465, 0.9349297310812379, 0.3205826664760323, + 0.7639851607294579, 0.904151529210235, 0.43898243616850585, 0.8224805696304519, + 0.6096093745024677, 0.07622355975099515, 0.6860819926917638, 0.32974242461211856, + 0.2632632314535682, 0.7772794526938429, 0.20396662442736302, 0.05717543744910236, + 0.9521053962218727, 0.819668916988034, 0.009412243942937826, 0.674833232923555, + 0.5595359880184179, 0.08426763215952948, 0.1845440135798554, 0.17549086921303902, + 0.5671045939097512, 0.6947564656008464, 0.935433974251328, 0.06876677844106338, + 0.716142997612416, 0.8335624602169247, 0.751900542744193, 0.21008292822787822, + 0.5314087389791223, 0.9539426568404354, 0.49432460498496733, 0.7662805473894664, + 0.12067300248409352, 0.9490060225911943, 0.38129757160028743, 0.3304637622494121, + 0.6923925118449249, 0.6763837818953047, 0.31376603445089246, 0.5500136363506121, + 0.9695662573635561, 0.7969621201873063, 0.6622009734643651, 0.9208935437033601, + 0.6345354077323916, 0.5539869805621622, 0.8271605574226225, 0.8381378777363601, + 0.6482903130871641, 0.9121341296999985, 0.01877471354184357, 0.04333833299240708, + 0.29663301105482776, 0.7826167619606251, 0.6260000064979128, 0.022931176394607178, + 0.4702176111712024, 0.34464613773622854, 0.5622233704239853, 0.9308045897435863, + 0.17052869748875965, 0.5380218062477551, 0.8575389690788897, 0.16184626778171896, + 0.19261265882286527, 0.415940048096498, 0.10152831619720848, 0.2557631049165596, + 0.5248283755391164, 0.5727453970128159, 0.8788143445646042, 0.2959781503580521, + 0.43549633487840866, 0.3609061940005751, 0.1909449912841873, 0.5392619410749471, + 0.9567071103280261, 0.30935584991961085, 0.6105278507130479, 0.11064377140205917, + 0.31109553996540673, 0.04125403009092443, 0.34873047660954803, 0.16387727336452274, + 0.4483096419197691, 0.35998169886965325, 0.8856564503011035, 0.6129385575064529, + }; + + TransResult result; + EXPECT_EQ(TransposeWithShapeCheck(reinterpret_cast(data), std::vector({8, 1, 24, 24}), + std::vector({8, 24, 24, 1}), DT_FLOAT, std::vector({0, 2, 3, 1}), + result), + SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, nwhc_to_nchw1) { + float data[1 * 24 * 24 * 3] = { + 0.5576938179613676, 0.5480679233387061, 0.6417021568083997, 0.6750876516427856, 0.24353641279477833, + 0.9823224495525934, 0.7731501711752171, 0.6953374325865181, 0.3602570492037227, 0.7433939974308816, + 0.7004059077803506, 0.9726701877796005, 0.914153253113105, 0.19895076559790037, 0.11256718369098428, + 0.0022265480498459, 0.012614468166029846, 0.42981846729932316, 0.19866318622083912, 0.6931676652271119, + 0.5259354601865902, 0.8774716463189989, 0.04933882811306345, 0.7554362854591897, 0.24900653579384002, + 0.6817184766496525, 0.46022153322521797, 0.5520332209722566, 0.8967786967807705, 0.4793136280134549, + 0.9912034603665761, 0.0370223139896404, 0.10844142009323199, 0.3323270294507361, 0.27005041666217366, + 0.5502761733796465, 0.32836141278496667, 0.0603131267983219, 0.8811862678452823, 0.9715790422861825, + 0.20690528700779764, 0.24009629432278679, 0.7242783251562759, 0.5567967676701338, 0.9493545988254306, + 0.5527640012764481, 0.20463828478253465, 0.7964892755562489, 0.2724333255839113, 0.14616326216863784, + 0.4662575194407572, 0.1527199695829402, 0.6296157294792657, 0.5936738217081192, 0.44418427285801776, + 0.9640954753646813, 0.16592882093823258, 0.990476417594261, 0.36846030912030836, 0.9859866019657224, + 0.14510035718537184, 0.002330958564625618, 0.2358787856060547, 0.13460757957409675, 0.5270410644078244, + 0.5608551370694833, 0.6744658506273948, 0.9281730412702204, 0.945964541591334, 0.5451854837499628, + 0.45519656900706884, 0.5600342383454875, 0.6893843112155923, 0.05767366375949612, 0.720030390653406, + 0.2673573323323931, 0.19679167663135078, 0.20677275937071815, 0.7533764068311143, 0.45322589140773506, + 0.16132030555888355, 0.4715876236843328, 0.37355093745443213, 0.23481442646444717, 0.0964560448883105, + 0.4316713413341995, 0.1486933723029148, 0.10510210443177692, 0.8613778389131638, 0.07823117805129631, + 0.3254970321441475, 0.36363172551401024, 0.2085993840241619, 0.46320953694522016, 0.041909481052292086, + 0.10858147254244044, 0.8749740701787951, 0.40803137171006676, 0.6945817896285724, 0.15502984464465686, + 0.1644490965265104, 0.42563070985356066, 0.6654145558749625, 0.013508862338429739, 0.04004884332870884, + 0.9014031191103209, 0.18087791778059537, 0.00303398548561018, 0.0038954249311695666, 0.4808668404807881, + 0.8542934622625173, 0.7367959934022067, 0.9896217208245889, 0.25149635186390307, 0.9050016678033207, + 0.17031133805671994, 0.2494715702411735, 0.5680930437476771, 0.954549933820125, 0.2557558871357666, + 0.41318707539729127, 0.21830098176483825, 0.9861938251287953, 0.4672252846267282, 0.32074855054918405, + 0.9196828767636545, 0.4672430576408313, 0.7055647713037171, 0.38979676119223083, 0.0358453349442871, + 0.4017341341400964, 0.383631632999867, 0.7047848728340145, 0.06834239147352672, 0.7616502164965523, + 0.4107587048469111, 0.06595343518235453, 0.5989834812104214, 0.8862218980474931, 0.7927013837463739, + 0.6918854518083776, 0.5545361856803096, 0.736351436409856, 0.5911325727260477, 0.8370808709961333, + 0.6542416800517766, 0.2613787103362436, 0.9904920761291204, 0.2611136415462978, 0.6065738141875372, + 0.31842130537927016, 0.8052245691122532, 0.9653573346034848, 0.21230284391317644, 0.5036101372743131, + 0.5147461408512055, 0.5839095046253248, 0.09263478211668819, 0.47813464137618733, 0.21555704875753656, + 0.2557611178903839, 0.6801750264462293, 0.887653344662633, 0.7150756721261634, 0.3791725115370653, + 0.33115085237976816, 0.7396670803132176, 0.2212103957921122, 0.3443790134865867, 0.7249200847189967, + 0.198746899771007, 0.827802415010975, 0.7983836884890896, 0.36473450549974684, 0.5798915139606355, + 0.26459354102142374, 0.32253490241826444, 0.7454675023371802, 0.14864548914507048, 0.7436637136812738, + 0.7875162903072089, 0.5586611197141035, 0.13444458615243549, 0.0584828568322322, 0.910350130922468, + 0.6934462857903217, 0.7681269695003293, 0.05593434871311043, 0.5944713140884937, 0.2688403266948515, + 0.8149779553107497, 0.4721117833920081, 0.7872139137118884, 0.4767667472242454, 0.49288902255267475, + 0.535368023415492, 0.285442788189455, 0.41371401125823193, 0.6359195295816026, 0.40006884264320053, + 0.0396525318325186, 0.1128901838996228, 0.18928550101259878, 0.41446455995710885, 0.950038806411715, + 0.8969656410445062, 0.4738685767876225, 0.27339773093230824, 0.7896722423559953, 0.11421088543024749, + 0.01579675654776469, 0.6525673934788851, 0.10070832493884896, 0.20296075269570268, 0.9034835925846796, + 0.10616464455460517, 0.8467913390425336, 0.570442903736869, 0.4167239255685742, 0.41551022895908507, + 0.41536471357181215, 0.994852989759743, 0.2513109849484241, 0.10265360872088514, 0.23008445309632586, + 0.9572558290726614, 0.633573216040501, 0.35510350164702775, 0.07832911776016815, 0.554940923788402, + 0.8425244929657464, 0.7547507557346254, 0.7603504216202375, 0.32454656934246384, 0.12844765027072702, + 0.2815142314209388, 0.02176898509566194, 0.11939825073119481, 0.32091865605809244, 0.3157593965935449, + 0.6690397046010088, 0.48941224655962345, 0.623726143557632, 0.1575709812498055, 0.22003985504243984, + 0.3229516294381015, 0.7095236946648049, 0.13607798635337598, 0.44804658784405627, 0.6310339935297161, + 0.37901062340884994, 0.08598374871008696, 0.6158424915733098, 0.8706905255134766, 0.5386230791292398, + 0.7144996557898061, 0.6356807028139803, 0.4425710876561455, 0.5365983278275497, 0.418779162764653, + 0.9469773008495318, 0.30927187206231654, 0.40936893971458166, 0.548309347248721, 0.8179140740843444, + 0.25667308138726475, 0.49060334690872587, 0.19326779070356015, 0.8626424645173111, 0.7063301594750016, + 0.5119758149283705, 0.8168071724420188, 0.16641329612366873, 0.06754426402085834, 0.40923910723158874, + 0.816499405124871, 0.9634459187238493, 0.8487566728412825, 0.3896457253625296, 0.3301225172106783, + 0.8825770435506887, 0.7402659538376386, 0.26080864555541483, 0.6978130505246456, 0.9847530894291007, + 0.8441762152997584, 0.5256740994874012, 0.4843213425499143, 0.7017503043482971, 0.3591905906139944, + 0.43038245869640657, 0.11348934492273088, 0.5799067477434993, 0.9152388228522966, 0.7600253102481189, + 0.9185955898312771, 0.4871844797098087, 0.008110304658435208, 0.2737326463684574, 0.41937549531706264, + 0.3112662550079637, 0.9097323699454616, 0.7956152302108189, 0.509438821155469, 0.3930555738378564, + 0.04276396841566121, 0.7814793117071854, 0.47911845356231575, 0.9389220865118408, 0.20120685769092328, + 0.7290407441224809, 0.49700650040477534, 0.7567614845781191, 0.46055879064643657, 0.2777255383285019, + 0.6165739435500294, 0.03207769742241595, 0.8953375189380939, 0.6671961409330499, 0.7975276475662959, + 0.30314186832239975, 0.9596376673343043, 0.640093909231198, 0.8436821711164175, 0.1237345548042279, + 0.022886213132301436, 0.250299242916484, 0.7843568303497188, 0.8609937497701943, 0.29284076270596604, + 0.7675030315279653, 0.5631041599341441, 0.24860916694926627, 0.5560400086719663, 0.679972623497357, + 0.6297084257593848, 0.9726350531669181, 0.48577991928492015, 0.36414041461821367, 0.7074466158829527, + 0.30538790875902155, 0.8636898203206251, 0.14009878314486368, 0.41736161076242584, 0.03884685622262596, + 0.6049713928898043, 0.14830629853298138, 0.174040945005705, 0.8394226322869928, 0.23500812773962576, + 0.9027325229904037, 0.07210232833626673, 0.524592542808368, 0.22636793436272418, 0.6058030611127116, + 0.27437111512336143, 0.39057502635271857, 0.3942672946450676, 0.05965904887727835, 0.837149862010498, + 0.46762342221081143, 0.2918591238325783, 0.6197874282482766, 0.606961856564349, 0.25598434684729043, + 0.04034986432833665, 0.6853285787984453, 0.6924920794474219, 0.6199159287396855, 0.5294984766990256, + 0.2497349859377368, 0.8618246322637451, 0.57362010476332, 0.8363347417707052, 0.5663522588730695, + 0.7372789472300072, 0.5282798641981886, 0.19114958847316454, 0.768101200276183, 0.7312494341931193, + 0.9037344787676701, 0.6008521026900275, 0.17757135544241376, 0.697998863172609, 0.270660842170115, + 0.3173567907124467, 0.4408583750989511, 0.9688837432017031, 0.15325240685171326, 0.5873937362047855, + 0.6060192520674768, 0.662708767649447, 0.7479938702505384, 0.036559310251870425, 0.2781731367872762, + 0.5903087296332238, 0.44416396951615966, 0.6364481666190819, 0.0599973686234887, 0.6462328722469546, + 0.2903981867328158, 0.11453030427680466, 0.04898283124047065, 0.6496930178456091, 0.0889054813776653, + 0.6316790255001938, 0.039653072192095906, 0.7935221546263311, 0.22023940332382175, 0.4279605117814289, + 0.20525060803459405, 0.5404833527145714, 0.23717483185086208, 0.9786164402703464, 0.5525056845431044, + 0.5850515639649501, 0.13534554243929542, 0.8898566020334531, 0.9272006593578849, 0.33271341280193, + 0.39999980363972587, 0.6694541951302534, 0.29583524445415277, 0.8389631975404395, 0.17883121234879118, + 0.5208034566041221, 0.8778591814372076, 0.6436522945258664, 0.715961659170718, 0.5162066124785467, + 0.7792030175008168, 0.03980357054734318, 0.28297437515915647, 0.8949085776808361, 0.3025012559871788, + 0.18238528044315205, 0.2496632683186638, 0.43986870620151064, 0.005381997132809846, 0.832457996422202, + 0.3070560150214996, 0.46999899129919254, 0.9674353985309306, 0.3528169416286786, 0.13825446763275795, + 0.8528802875571208, 0.9265141773481097, 0.49222794645391554, 0.816763289811324, 0.7501567103388216, + 0.7030820831074535, 0.14688252055455986, 0.08674132803750234, 0.6804400231686772, 0.5642551096559059, + 0.36407252083885433, 0.8965979777482974, 0.26530479151162056, 0.08833198768361261, 0.694946176773454, + 0.5049743798224671, 0.35322988319400805, 0.5085146662370216, 0.3715996766715721, 0.7636110816368085, + 0.8240197376801905, 0.21947402581862918, 0.877615468813198, 0.31776581529233927, 0.32254310459801105, + 0.8381086608218498, 0.5732121905660629, 0.685007228645308, 0.1264748334029474, 0.01390285801777269, + 0.7470850821296687, 0.7567472028783842, 0.8757581539790499, 0.8160227628454733, 0.31610283349055857, + 0.907322093268178, 0.9137489054962102, 0.10241018294853332, 0.6978352458800421, 0.8560251137039097, + 0.3903715982953918, 0.6894629329539623, 0.024534003121607695, 0.768703697365061, 0.8407068458923248, + 0.04521871276101308, 0.3911478980175912, 0.6048366748080617, 0.02367129352716124, 0.6373792187400823, + 0.05367367526069333, 0.5783021171133504, 0.9085193553173185, 0.3077714610041279, 0.9139449448889625, + 0.8484878574968941, 0.6279264755906374, 0.265504442325993, 0.055705717555123546, 0.41911626457549866, + 0.8182890630399161, 0.1856657129414182, 0.605908791928396, 0.5049329705377718, 0.14795677979058397, + 0.11473188943088652, 0.6800245200790865, 0.0851766335876345, 0.4066331998413033, 0.6083213289049307, + 0.7408546264277205, 0.5917865120906529, 0.1811262015095133, 0.1515139082850422, 0.18245356961058734, + 0.3202506649996555, 0.8347462304916916, 0.5391565304461164, 0.06139823543315803, 0.5556014140682733, + 0.5624966145847786, 0.7975693287711333, 0.4231662930615506, 0.7504475253633486, 0.3783321068312443, + 0.4156346278851234, 0.7615701321627198, 0.4215509767310476, 0.6195024953733143, 0.31028122589925966, + 0.4817147366535751, 0.6450608352745005, 0.848323852071799, 0.14335792395875457, 0.5794559042167301, + 0.4694763662956777, 0.569959697175572, 0.44398307932725645, 0.7516506165708773, 0.8326804416539675, + 0.3386896839514363, 0.4838291003934557, 0.7881931631320024, 0.9947910463626235, 0.9379908782009793, + 0.6250101717036214, 0.4295650024457074, 0.11489890753723331, 0.8150438598132201, 0.46668569716119923, + 0.10687123028889634, 0.8322242767985054, 0.1683570545191131, 0.3005593571845695, 0.5141109538083758, + 0.38481488230328686, 0.2790369765995646, 0.101622534940505, 0.21198524988541256, 0.5618268632620098, + 0.6020576478139832, 0.6861363838530093, 0.2506744442166098, 0.862875834352914, 0.515617605529789, + 0.5774223878377291, 0.44412999428852895, 0.7871540883141954, 0.039557168583882696, 0.09303719406445532, + 0.6186809320638805, 0.47547710684047406, 0.6365615446012882, 0.3635186528606992, 0.5404640956394502, + 0.391697345346507, 0.24250401036270308, 0.5173064771408595, 0.6276038960866358, 0.48087219344337306, + 0.8595156094358232, 0.8384240955628381, 0.8664804169889542, 0.10311625824407467, 0.15117205862777972, + 0.2842565683987759, 0.4313108920421431, 0.6859166896968107, 0.8708403014819337, 0.9238740143377883, + 0.46096307835937356, 0.5259228242606849, 0.20687524999638973, 0.7628475477918988, 0.2355911329326772, + 0.15255012458685524, 0.010532812497153099, 0.7287022892124747, 0.5427457659655633, 0.39110174343301596, + 0.5628813409165464, 0.8640947828253543, 0.18687012766308642, 0.2208727363386015, 0.4023460735613871, + 0.9953633859063187, 0.9843585772801463, 0.8152799497172286, 0.7336318791938153, 0.8585384885791164, + 0.22226454850151056, 0.7412558552103334, 0.09585638221818615, 0.09327791456333101, 0.21084765923063864, + 0.700121735075444, 0.540143349882634, 0.4907451177552751, 0.9457904755937173, 0.5557027424836565, + 0.40601299165301985, 0.8741342123700807, 0.6043822127088522, 0.38880973678461517, 0.2913965400017039, + 0.635045386209623, 0.5584246149451094, 0.3590258586858043, 0.11895408520648643, 0.10807602415016127, + 0.9324394591487412, 0.21412622156242278, 0.18275262543142878, 0.2785720549608761, 0.4168915541552838, + 0.17402817591505615, 0.5955109224024612, 0.6860410428994667, 0.2015937407328915, 0.6093048651358479, + 0.8362971024617537, 0.8849118337827289, 0.5579648244813307, 0.1604197275319652, 0.377331907017168, + 0.48923170302655916, 0.0015534694918136749, 0.3377638457595308, 0.040295444090547194, 0.16683864032852347, + 0.5086224074379112, 0.301895126385361, 0.0380164358652878, 0.7761068172424201, 0.7161338480188987, + 0.03511408488081447, 0.8458824121077834, 0.6212148954455755, 0.4807109372544841, 0.5825144300241644, + 0.3252230173406172, 0.15151664214217997, 0.31379789941978886, 0.5859846921828898, 0.10548618728899939, + 0.7799327059130073, 0.5017178396499485, 0.6302059258053215, 0.6462918881194284, 0.037315649813791096, + 0.4131988922704686, 0.5496040687131041, 0.5815118195729374, 0.9333959432119138, 0.5606280619195357, + 0.10529898355514655, 0.40858542505531903, 0.2622984173231212, 0.14560109732536652, 0.09631357136150354, + 0.9533428464170677, 0.7821128845752052, 0.087564812497272, 0.6927678723431243, 0.3908157894176354, + 0.12769479893109448, 0.28670323747922943, 0.32053314608704864, 0.5944289290157524, 0.918328531851499, + 0.7690452018739381, 0.1504256727793163, 0.9181276962383033, 0.8676929204814569, 0.8245058563795197, + 0.22471326407596626, 0.020209696387830434, 0.28398136463137413, 0.8742061750012948, 0.7080830488792054, + 0.9427807014726228, 0.035691735149414594, 0.30867899872761073, 0.8079236541236925, 0.9830142838917029, + 0.14378068100085928, 0.585802992322683, 0.04618299224281264, 0.906567586147243, 0.6431029480572278, + 0.978991611793116, 0.3927665003335742, 0.9852044970284112, 0.90920549543672, 0.9916129484423926, + 0.33240595005615725, 0.9641936713436415, 0.5415271388101568, 0.386704376120808, 0.352173417181341, + 0.12689054054302085, 0.05460816935049695, 0.02749063973513044, 0.15320500124098402, 0.05589652047614602, + 0.671264386437005, 0.690019196582955, 0.9792882505670737, 0.32074385906006564, 0.6785067283721109, + 0.5650652839468214, 0.3323412691373995, 0.33346421693887085, 0.4522524488689421, 0.6222518184003937, + 0.2796375762118375, 0.08701083613219196, 0.10327769425645661, 0.42199350041600436, 0.716345677249626, + 0.7478402622044948, 0.2699935286819868, 0.5839245986158135, 0.141175087073799, 0.315394010904003, + 0.40122325862538133, 0.13567435950327245, 0.6302297688265931, 0.13032245945931542, 0.06388400985569753, + 0.06165112274849838, 0.00256671520725571, 0.12022737044113196, 0.7570811347298088, 0.5869283108488538, + 0.04865807065818539, 0.5279310747451385, 0.5873285471779122, 0.9346626555052675, 0.9355999029242515, + 0.8488560464150674, 0.25035821705527694, 0.005502195322430903, 0.8061332855050097, 0.263615164003856, + 0.15228560281857328, 0.33938585999470916, 0.9103450312680861, 0.0024754880413346836, 0.6884758620551574, + 0.0822476396562889, 0.76205750363049, 0.8172347973442694, 0.9214016455934766, 0.06309530860411938, + 0.6477500880815694, 0.5471141631488152, 0.34430528503769187, 0.675451984773968, 0.050092295886368765, + 0.8108396869466088, 0.5223003594325687, 0.12916099885366905, 0.5944367076434868, 0.15458609355946118, + 0.09350772631506876, 0.6759427595358818, 0.43259107705555877, 0.4128426881984989, 0.873314370429706, + 0.28851810307018777, 0.03426701049550318, 0.2835014145918603, 0.3838462692063309, 0.1114557011437497, + 0.0647251610020978, 0.4657950297766186, 0.9034110226366754, 0.3257198707861738, 0.22707077383985486, + 0.6238078513067472, 0.1795620365862599, 0.8502192712624803, 0.6339993330568727, 0.07541607354608881, + 0.3677973310632141, 0.1017850925706153, 0.7292249395583527, 0.13403602141551874, 0.7574287993118725, + 0.9902209094287545, 0.9530360146704632, 0.4855538696652342, 0.08944953657777999, 0.9826440640203283, + 0.0993780068507144, 0.3673644605674675, 0.42089152977923683, 0.7840823982240984, 0.1642628862370007, + 0.42180479050443176, 0.9062905450373338, 0.11299431248688241, 0.15720226657930703, 0.5188575412994682, + 0.4695447110816021, 0.1891201711835082, 0.07931477242460494, 0.2869951115264081, 0.5353614977912917, + 0.3302454137917453, 0.7582745052506809, 0.6288357150342787, 0.9683115424644188, 0.47703270425834443, + 0.8147188655365647, 0.24089419521054534, 0.42683269753999564, 0.09049937046097911, 0.5221400082332656, + 0.25422858972984796, 0.4428874994360409, 0.6263972136839191, 0.025660370575602554, 0.9941624285613735, + 0.45655675103301285, 0.7868622372963548, 0.47490144756023966, 0.03623782209981852, 0.23858764239770813, + 0.792905409090189, 0.10528149339125581, 0.6938787595875271, 0.6373109153860876, 0.2369904707552496, + 0.2535497506022084, 0.362784258536637, 0.8871970275709483, 0.5371714185957712, 0.18612006988393281, + 0.3105201870093366, 0.6431145666956772, 0.43510833638726243, 0.8650306308730747, 0.6647676364679233, + 0.3671635077914902, 0.26975405692122023, 0.5513817267954401, 0.11409342083819618, 0.7401906890372566, + 0.4543637222307828, 0.11234212817124256, 0.015236741198945358, 0.2718008029139152, 0.24775037353256546, + 0.6932025352071033, 0.35994464028232376, 0.6212388268713533, 0.2353715196443774, 0.7056842331140273, + 0.6281062349165449, 0.8990410572378255, 0.6150478247355882, 0.14983193641298098, 0.13175976405917256, + 0.052748374640018136, 0.6762103626227616, 0.8299117018175071, 0.8940709299691674, 0.10448467671854955, + 0.5854392560620701, 0.9869207117898028, 0.598250699186468, 0.09941022297908908, 0.6428387188696808, + 0.6938074993804616, 0.49345606955707433, 0.10564621295462462, 0.8730247668913396, 0.5693111552840876, + 0.6451107754618787, 0.8268479077193474, 0.6109414753717066, 0.5642582138462809, 0.9972427087729138, + 0.6210232613493811, 0.5919936865559303, 0.5191046008923272, 0.12187375071541129, 0.23663160371727765, + 0.8911388789395779, 0.09170073062022499, 0.9151169937896009, 0.509264551627256, 0.6775185133293696, + 0.5807847949216597, 0.5049738483313311, 0.8926164238380273, 0.9055894805021459, 0.3087999310101207, + 0.11153600376557016, 0.5216542563502468, 0.7752137734199523, 0.36336189612943315, 0.578729717420777, + 0.155536036963988, 0.28575020651201066, 0.734142200046549, 0.8352195411494564, 0.3006161153901252, + 0.3385247520939818, 0.629649706648176, 0.4751978516601386, 0.046652306728973136, 0.1939043713632571, + 0.25762053965235965, 0.5827420694536557, 0.7669274996476307, 0.8259088244580408, 0.7131797928316381, + 0.11693237140089863, 0.2996039384723006, 0.8060861134326347, 0.9811994408643644, 0.9168704216696336, + 0.8332240607025703, 0.8081707614206302, 0.8741744876565002, 0.06749958945821999, 0.1746768750613229, + 0.4619743507528482, 0.516280600505945, 0.11924956852409285, 0.7190526080623696, 0.7492979599727628, + 0.6107457676123809, 0.43764245520627065, 0.39770626841941925, 0.5994539427198272, 0.029650825870803477, + 0.43677939717333114, 0.7269927062916417, 0.9399851529255545, 0.4170048577307417, 0.21157476325939661, + 0.1719914610443789, 0.4824712574759782, 0.7019666424206221, 0.29969992238435406, 0.5524848644926086, + 0.5389999399640987, 0.13512016024776252, 0.9193314618277667, 0.9060064439328889, 0.6519346860670783, + 0.6803898392289517, 0.4356093165139081, 0.04216471446744485, 0.38598035373059003, 0.8690026759179446, + 0.529699047649506, 0.9906048951292297, 0.8222557998599189, 0.8936486293424404, 0.8370374083834665, + 0.07222599597929102, 0.5614875873690469, 0.6866355448825368, 0.9189440327960383, 0.38854122489677667, + 0.7472313769107196, 0.009679487835292266, 0.42965353004741547, 0.4961903042881499, 0.5197248295836395, + 0.20866020350533698, 0.971688128137546, 0.3812919886435633, 0.8442419283811183, 0.5035567916576319, + 0.5842646327860408, 0.751957287605275, 0.12934923846644109, 0.9048431033119261, 0.6247835602850056, + 0.6663491618485525, 0.6191447480411455, 0.38868231376485674, 0.16429514721037275, 0.047048187901140404, + 0.1347824186806431, 0.004461437904420995, 0.9762230649248507, 0.6635426573687336, 0.4755094707753683, + 0.08415024415060357, 0.6146381733545787, 0.3487710412736914, 0.15657338996981707, 0.4914780532252889, + 0.7186224430800245, 0.47278786793745786, 0.22586533734423897, 0.1388136048065316, 0.9421248651798148, + 0.18687021665791048, 0.886751975525458, 0.27210373932980336, 0.1871193534007316, 0.16936019822848425, + 0.9304125472360207, 0.46427170052831335, 0.6327522727545662, 0.07545431142174563, 0.2887757304460784, + 0.635380555069413, 0.04655276242142847, 0.29264159982594307, 0.40152844863600323, 0.5698289275272337, + 0.48230928722272504, 0.5114169171284336, 0.22789767714751497, 0.3825012877262921, 0.5185614672521711, + 0.1737430953954605, 0.20581048451737827, 0.04940680888298521, 0.058372511339505606, 0.36888096201496545, + 0.5927527420448586, 0.6294706430315368, 0.31310297106149565, 0.004081843984338196, 0.07647535710909648, + 0.9227331395115886, 0.611581049519875, 0.22245198307387348, 0.14118035675934504, 0.5753891435829265, + 0.6009183620680273, 0.8821080388976726, 0.3574326544426977, 0.3343173459330723, 0.07400286558248481, + 0.6079510499897496, 0.6421025402503626, 0.6417623640115212, 0.16449092617603212, 0.8897818639890097, + 0.5170022343266591, 0.6995314134978298, 0.6474260353677674, 0.8420817289489861, 0.004239835106883305, + 0.4813679241351504, 0.2507963913182081, 0.35085826191749636, 0.3824802249210911, 0.7812620902445477, + 0.39165826007683324, 0.7824337622838438, 0.902646230736842, 0.7876851397804997, 0.8744568184404363, + 0.776047650573657, 0.006211069452425777, 0.26783566243254986, 0.8952462478818862, 0.2791760055342227, + 0.7023106099713998, 0.7907303599044537, 0.9213991389869884, 0.17502498094734908, 0.0031263566165733447, + 0.8730810656506978, 0.8814801958973274, 0.9809862858948138, 0.9524559675550403, 0.6161602839573905, + 0.81280099967171, 0.290153972227604, 0.14335035008289465, 0.8477577457444683, 0.8401166936915387, + 0.5212668248749791, 0.02104693531456414, 0.7383245499273487, 0.5626987498664584, 0.2934473998659016, + 0.3456826354918213, 0.22702830817160635, 0.4496243188766037, 0.4910238089090576, 0.9793781225504641, + 0.9468223697800644, 0.3401409033329521, 0.6180799786358538, 0.863014788820724, 0.13095056557012497, + 0.5857524793318057, 0.20029206183457626, 0.0361080988081901, 0.5129980956789748, 0.17455369794424835, + 0.6798675384017852, 0.04555314684145728, 0.1592728573611274, 0.9027129404222184, 0.5417022462380706, + 0.36209650006181326, 0.6357024603609229, 0.6289268057180017, 0.19553033285442378, 0.45043719562592965, + 0.8266341879328025, 0.7773840612765499, 0.8308068439506016, 0.4304725604114389, 0.41676684693732735, + 0.8708314311735844, 0.8248490675287319, 0.26264441069811695, 0.9044186668920613, 0.5936822156957996, + 0.7875285367610492, 0.3118444321158038, 0.6916715068300263, 0.5669435387631938, 0.6193299528144401, + 0.35712243943419775, 0.9175778213344772, 0.343460209344983, 0.6460542393416723, 0.21516198411188503, + 0.45642068377669887, 0.2987386214304467, 0.0632550323284714, 0.7362366025637085, 0.6128438567584318, + 0.9428771776851117, 0.17489798955430746, 0.6625086065781743, 0.2172456296265367, 0.4475923979474332, + 0.5564922845944792, 0.28320517322282546, 0.49267771485941037, 0.6547427411553769, 0.9242596499269207, + 0.9147805216639984, 0.6073412491734623, 0.46989203803910085, 0.45935706630078055, 0.19798791257453774, + 0.9334249248647313, 0.8744326138824626, 0.6205838180552334, 0.7762732211789131, 0.928459089113231, + 0.937820769140132, 0.013582125340719675, 0.5324907160051711, 0.02684611862059849, 0.06927193578744395, + 0.6400214688609211, 0.5133721282618873, 0.07999318330104577, 0.2577710810650832, 0.817321363351518, + 0.8076114189907397, 0.324657289350905, 0.9650672686251389, 0.2729254863821494, 0.7809831977836019, + 0.1652336449783368, 0.13662799169926676, 0.6776495828489617, 0.14255174957497696, 0.38418911218942264, + 0.361423843762243, 0.8685882991583032, 0.192680742555284, 0.3148856602740503, 0.748113225863308, + 0.8618407210129345, 0.7156839650988885, 0.9623403051807798, 0.9973527869739185, 0.008585147934917492, + 0.946331588194702, 0.5553431375586435, 0.02054704844964983, 0.842338238512127, 0.6252142939582864, + 0.0454440429846138, 0.8768779524997832, 0.05197495573171229, 0.467402461137189, 0.7245722869024117, + 0.8526963916761623, 0.3482114761714936, 0.5870305114470717, 0.6134256688006303, 0.2274168614700276, + 0.9080883353774347, 0.6004815985037668, 0.3675988930092098, 0.824331351985735, 0.4949438124009987, + 0.8227638227658204, 0.8099433384130386, 0.7102282370301588, 0.4735140918259736, 0.7338703370134286, + 0.8445820971086851, 0.19596949116497853, 0.7028907486937389, 0.005417090289743465, 0.2777603577620631, + 0.603675472091728, 0.328955924989748, 0.9341776586261046, 0.5540450376230617, 0.9936257238147864, + 0.4102405081961351, 0.7671813626238805, 0.3691590461742723, 0.02728682877510813, 0.4823869343924113, + 0.788192482427858, 0.22180088690053323, 0.03448562234999297, 0.3719357169116606, 0.4969098885447305, + 0.191655313797513, 0.5804718234186538, 0.8900318941903801, 0.6934508297023688, 0.5530225699005005, + 0.30074428571738887, 0.8415057046819492, 0.17183516891041328, 0.12675873590477205, 0.2840572816367717, + 0.9269887520833086, 0.22488977278831046, 0.3168242242390731, 0.9847372594336158, 0.6509731962068283, + 0.5073316119069321, 0.12487505196657323, 0.22154354697564693, 0.7685722942349587, 0.2743762334211701, + 0.6865202518278289, 0.8318371362076777, 0.5386258032132064, 0.8605771528753087, 0.4179331943064153, + 0.567405809525627, 0.3562212006461868, 0.7963357296035821, 0.4994531897202181, 0.7866783613717578, + 0.9008311272506405, 0.8699430893250892, 0.9993156197161146, 0.9139351509015179, 0.431449717755505, + 0.620176535824029, 0.8539376483870202, 0.07848072387288219, 0.01424037752463747, 0.24407675438189358, + 0.701333216078581, 0.12046807952838845, 0.8390349031208393, 0.7077691706016829, 0.14105948480485575, + 0.3113502459956934, 0.5657395772346601, 0.8130737272213242, 0.3365673473812547, 0.7796612467421044, + 0.23107384517188134, 0.8118009929054556, 0.16788633369095085, 0.030079422556517188, 0.5371500828826153, + 0.6015148664464552, 0.17149243221501909, 0.1172621795708465, 0.7925641613400864, 0.2696618250878239, + 0.34008210185151533, 0.5207478736401517, 0.09029226432107296, 0.6243414247626625, 0.8086350261894591, + 0.9280525217627306, 0.8179986367464573, 0.22809792345840785, 0.174299626331055, 0.18547930426962, + 0.5314239340501304, 0.601922588468467, 0.2659164721740783, 0.6546297288104892, 0.7750427002091234, + 0.6832042772679686, 0.4156617408655624, 0.7108870437063272, 0.4922211900243272, 0.6602291458395536, + 0.9340984899103667, 0.26225209481552736, 0.5787861259275977, 0.4188843096595267, 0.5075709771319182, + 0.32679093039873, 0.319215520593643, 0.9015861727324869, 0.9903680740595437, 0.11771029618746809, + 0.0698828908110074, 0.5844288716113109, 0.8979402481813988, 0.17347031758337805, 0.3335669457212541, + 0.7666755194391025, 0.13900424509002107, 0.18184438650179768, 0.474170125390853, 0.6494817087100319, + 0.49904484954494976, 0.13960662583364958, 0.17721720406416797, 0.49090390100838, 0.8076048584231867, + 0.5180775508098545, 0.2747889640775497, 0.7643973635630646, 0.998600064745631, 0.05432415993063511, + 0.14612254019625004, 0.05100113670893769, 0.6348270472795142, 0.9276971598715721, 0.02581820063068263, + 0.7618910314479553, 0.08292626610364884, 0.0015068117399120728, 0.5450893147590388, 0.6305267716579811, + 0.49595914894179594, 0.6042571272764584, 0.14456224796821027, 0.9701432397722076, 0.7003300711950627, + 0.493939632833688, 0.9677948626091621, 0.9564276322323998, 0.24714075783963407, 0.9473621800880258, + 0.7162882508139052, 0.637129169365288, 0.7739890486016565, 0.6364250055940857, 0.34416412881956693, + 0.4607199148631299, 0.1827420856730403, 0.384871223091256, 0.42115413371097565, 0.22520478035347657, + 0.4221255541659472, 0.47553531617318, 0.5826635954679447, 0.2970822199027767, 0.3303683669471392, + 0.44776861852667227, 0.7290686471896674, 0.41721974940422435, 0.47684193819117915, 0.7622610881055619, + 0.4701127290373278, 0.7535603587898286, 0.24234999643629496, 0.6912929504723353, 0.6020583704478337, + 0.7827833151078375, 0.6394518382053578, 0.03227298489054431, 0.47079183685813464, 0.25761725598378504, + 0.2989311962894181, 0.9136482203752961, 0.6441555783810878, 0.9584970068604538, 0.7768386939495876, + 0.3621519413519899, 0.3271443319211841, 0.8984271382188533, 0.003205103140524912, 0.6145214518321328, + 0.7831497277215794, 0.6446271971021358, 0.35438910375402444, 0.8603641619194752, 0.1708378830206404, + 0.7967789884708172, 0.4086200513012481, 0.6969884754450677, 0.3870316925454582, 0.5322382329384319, + 0.0096584224912859, 0.6264169770789575, 0.2921659601352481, 0.6253221565828221, 0.9117837886557485, + 0.20374405516041583, 0.21591547095868324, 0.5470831816133089, 0.6910280220095179, 0.8633431978353517, + 0.6046816337812168, 0.7350636029248344, 0.4660768528067444, 0.4298312288933326, 0.03120480264321246, + 0.04434142372395833, 0.5185989584440972, 0.9056713041598138, 0.8104628144429474, 0.19619711194429745, + 0.2120133204613942, 0.006407291414015304, 0.2585861442212085, 0.9676421774705622, 0.35999532618759145, + 0.19113832261170938, 0.33548288170860974, 0.2585580653888524, 0.8554931389682613, 0.6391815211464841, + 0.4800788135670906, 0.3940763810605721, 0.31535986613369604, 0.8209588135749953, 0.4541325511456713, + 0.5877570824102291, 0.7583566943672284, 0.6029788742971568, 0.6861092075074696, 0.46621208455465135, + 0.18007423591415328, 0.528332340935112, 0.1138522150371325, 0.07879987486346618, 0.7269711145951828, + 0.5582429424002487, 0.8617872480152714, 0.20812038948111622, 0.7402118148501086, 0.8454488591069442, + 0.4223837383275706, 0.23416208241975545, 0.4220834167811641, 0.4631539491432527, 0.1626564116660122, + 0.3309452324727121, 0.8213528389917474, 0.34861525565456, 0.8734450834953609, 0.8880552830046495, + 0.619340243121358, 0.5775673004205525, 0.19101754008288407, 0.3544830751061945, 0.3013493746590046, + 0.9129590937779266, 0.9012416695964349, 0.6803789596610658, 0.19564066112257705, 0.03260488368221248, + 0.8705799211122627, 0.8409450026881247, 0.34475518131557215, 0.03784801519303338, 0.9751661267040307, + 0.43034168879885626, 0.11065161544340107, 0.7276057777519807, 0.004982163159524933, 0.6945447838872708, + 0.7273041470712368, 0.9593936841500735, 0.13379258768168645, 0.3777965634633952, 0.2703595868217632, + 0.15506449640296105, 0.20424268820820335, 0.3527948681199745, 0.8002681114870956, 0.7594567227703592, + 0.946495649399147, 0.28728350198169084, 0.5580175944700493, 0.7032420767984198, 0.08092465521124148, + 0.32656469883293826, 0.7174885959886983, 0.871252990017252, 0.41670888180343424, 0.6083365236824875, + 0.7249396470594075, 0.5620465315393466, 0.5157197115732972, 0.5506549211709697, 0.3285477028094975, + 0.7093308180251068, 0.659084127611101, 0.02765869743583449, 0.6218822201313294, 0.39482091817932796, + 0.626135560747538, 0.2580755081198327, 0.3478174802894556, 0.9676938413436679, 0.33238224766067537, + 0.3310147497770647, 0.9559943893515941, 0.4911376916833988, 0.09063791913423813, 0.4046315875494847, + 0.4740020520121634, 0.4765906670364052, 0.08583441419976279, 0.8472274380619484, 0.552660546415093, + 0.004701142838580807, 0.8830999516501286, 0.21208354271106145, 0.16396793443292523, 0.7933072283261428, + 0.8362655604438428, 0.002905946258930814, 0.6696363008399273, 0.4184101631532844, 0.7544300764519166, + 0.4629514408891108, 0.8815767929529509, 0.14872496584233852, 0.38953018339451484, 0.6031094795257194, + 0.7357426137848664, 0.6960142716191104, 0.5308984163386145, 0.6471850332368065, 0.9507310652965659, + 0.05355118172872486, 0.26789766611378607, 0.81171534461288, 0.8815252676442482, 0.27322340111095766, + 0.5694545400487966, 0.4643747260984441, 0.2680931795929846, 0.9639847915873286, 0.12682309670693648, + 0.8177762874567295, 0.6777803851961555, 0.7292121888029454, 0.8736686253239911, 0.09298362953922867, + 0.41648039805853876, 0.8251362617469858, 0.9705641885675158, 0.2011963854694191, 0.9577690476905739, + 0.22843672315476948, 0.020419675906707013, 0.7581006474779918, 0.38291815926073636, 0.9924212687610415, + 0.6853565742888263, 0.7213851262299584, 0.6432205505596658, 0.3880096040198705, 0.9856208118658878, + 0.47945352909793304, 0.6672084718234337, 0.34523037644096277, 0.42050931713665096, 0.034634868583634404, + 0.9342795633486977, 0.2875754593792146, 0.6066253338972054, 0.7808305389779022, 0.5612945690875106, + 0.9404065215610417, 0.9401144197667547, 0.2894685372960125, 0.17461530457647234, 0.04754095571588024, + 0.6773364043368078, 0.5802981115586667, 0.6317070842942244, 0.1407365728127774, 0.9850309591715055, + 0.7809932815700706, 0.9836260301540849, 0.12108199859136526, 0.16366744058478355, 0.7850185780027568, + 0.18388083399956057, 0.9126319991757875, 0.43542132106992804, 0.5242994382487212, 0.7587538803727455, + 0.8773437341277317, 0.9349653031183464, 0.09854330443257797, 0.8274753008171292, 0.9395017553746543, + 0.3411167233785305, 0.9825311160775828, 0.7548311572520328, 0.3414971623264297, 0.8882855662786443, + 0.7757145759635072, 0.23480704090360538, 0.7555530210610234, 0.2594007449598288, 0.3004489790483701, + 0.1633986233662451, 0.9677764682764135, 0.9554587458033513, 0.7961431105300174, 0.6288482538745904, + 0.8884132965339431, 0.5852057892875403, 0.07372112834870348, 0.7220664089605319, 0.6151123645576676, + 0.2773844267431448, 0.25458076239534255, 0.4992589489046285, 0.8491493815037809, 0.024866663757468488, + 0.0424358193013229, 0.3993980794861991, 0.7933963563504444, 0.9012822919659152, 0.9135968462592506, + 0.4006494422735588, 0.4075832830891558, 0.2701036343384797, 0.20306804721545846, 0.23404002509717414, + 0.43759924003124595, 0.6264063678412422, 0.3721469206496021, 0.5207484113258347, 0.9755662926676001, + 0.718375881245492, 0.7969089682462961, 0.8501140321994687, 0.9364329737732714, 0.1425059391434702, + 0.9597826876409328, 0.15670568562557385, 0.5817575280658813, 0.9063728174897496, 0.8546284305750449, + 0.35439249722790844, 0.18820064869908193, 0.7522727281834883, 0.3261970280116724, 0.9167584194413457, + 0.10593348360373511, 0.3683029453317247, 0.9309643913035914, 0.45258186077884677, 0.5803936269369658, + 0.40568625284440596, 0.4828478564591584, 0.4590798443606058, 0.5651911451209986, 0.3015797182224651, + 0.7979313236417561, 0.08060630938246094, 0.9517418500418697, 0.3021183772119437, 0.9187005402936496, + 0.6840531382369449, 0.5180709751097267, 0.6876828482380173, 0.3888003563218816, 0.2653720936625277, + 0.7571624379645097, 0.7403022510887606, 0.06001106938898426, + }; + float ret[1 * 3 * 24 * 24] = { + 0.5576938179613676, 0.6750876516427856, 0.7731501711752171, 0.7433939974308816, 0.914153253113105, + 0.0022265480498459, 0.19866318622083912, 0.8774716463189989, 0.24900653579384002, 0.5520332209722566, + 0.9912034603665761, 0.3323270294507361, 0.32836141278496667, 0.9715790422861825, 0.7242783251562759, + 0.5527640012764481, 0.2724333255839113, 0.1527199695829402, 0.44418427285801776, 0.990476417594261, + 0.14510035718537184, 0.13460757957409675, 0.6744658506273948, 0.5451854837499628, 0.6893843112155923, + 0.2673573323323931, 0.7533764068311143, 0.4715876236843328, 0.0964560448883105, 0.10510210443177692, + 0.3254970321441475, 0.46320953694522016, 0.8749740701787951, 0.15502984464465686, 0.6654145558749625, + 0.9014031191103209, 0.0038954249311695666, 0.7367959934022067, 0.9050016678033207, 0.5680930437476771, + 0.41318707539729127, 0.4672252846267282, 0.4672430576408313, 0.0358453349442871, 0.7047848728340145, + 0.4107587048469111, 0.8862218980474931, 0.5545361856803096, 0.8370808709961333, 0.9904920761291204, + 0.31842130537927016, 0.21230284391317644, 0.5839095046253248, 0.21555704875753656, 0.887653344662633, + 0.33115085237976816, 0.3443790134865867, 0.827802415010975, 0.5798915139606355, 0.7454675023371802, + 0.7875162903072089, 0.0584828568322322, 0.7681269695003293, 0.2688403266948515, 0.7872139137118884, + 0.535368023415492, 0.6359195295816026, 0.1128901838996228, 0.950038806411715, 0.27339773093230824, + 0.01579675654776469, 0.20296075269570268, 0.8467913390425336, 0.41551022895908507, 0.2513109849484241, + 0.9572558290726614, 0.07832911776016815, 0.7547507557346254, 0.12844765027072702, 0.11939825073119481, + 0.6690397046010088, 0.1575709812498055, 0.7095236946648049, 0.6310339935297161, 0.6158424915733098, + 0.7144996557898061, 0.5365983278275497, 0.30927187206231654, 0.8179140740843444, 0.19326779070356015, + 0.5119758149283705, 0.06754426402085834, 0.9634459187238493, 0.3301225172106783, 0.26080864555541483, + 0.8441762152997584, 0.7017503043482971, 0.11348934492273088, 0.7600253102481189, 0.008110304658435208, + 0.3112662550079637, 0.509438821155469, 0.7814793117071854, 0.20120685769092328, 0.7567614845781191, + 0.6165739435500294, 0.6671961409330499, 0.9596376673343043, 0.1237345548042279, 0.7843568303497188, + 0.7675030315279653, 0.5560400086719663, 0.9726350531669181, 0.7074466158829527, 0.14009878314486368, + 0.6049713928898043, 0.8394226322869928, 0.07210232833626673, 0.6058030611127116, 0.3942672946450676, + 0.46762342221081143, 0.606961856564349, 0.6853285787984453, 0.5294984766990256, 0.57362010476332, + 0.7372789472300072, 0.768101200276183, 0.6008521026900275, 0.270660842170115, 0.9688837432017031, + 0.6060192520674768, 0.036559310251870425, 0.44416396951615966, 0.6462328722469546, 0.04898283124047065, + 0.6316790255001938, 0.22023940332382175, 0.5404833527145714, 0.5525056845431044, 0.8898566020334531, + 0.39999980363972587, 0.8389631975404395, 0.8778591814372076, 0.5162066124785467, 0.28297437515915647, + 0.18238528044315205, 0.005381997132809846, 0.46999899129919254, 0.13825446763275795, 0.49222794645391554, + 0.7030820831074535, 0.6804400231686772, 0.8965979777482974, 0.694946176773454, 0.5085146662370216, + 0.8240197376801905, 0.31776581529233927, 0.5732121905660629, 0.01390285801777269, 0.8757581539790499, + 0.907322093268178, 0.6978352458800421, 0.6894629329539623, 0.8407068458923248, 0.6048366748080617, + 0.05367367526069333, 0.3077714610041279, 0.6279264755906374, 0.41911626457549866, 0.605908791928396, + 0.11473188943088652, 0.4066331998413033, 0.5917865120906529, 0.18245356961058734, 0.5391565304461164, + 0.5624966145847786, 0.7504475253633486, 0.7615701321627198, 0.31028122589925966, 0.848323852071799, + 0.4694763662956777, 0.7516506165708773, 0.4838291003934557, 0.9379908782009793, 0.11489890753723331, + 0.10687123028889634, 0.3005593571845695, 0.2790369765995646, 0.5618268632620098, 0.2506744442166098, + 0.5774223878377291, 0.039557168583882696, 0.47547710684047406, 0.5404640956394502, 0.5173064771408595, + 0.8595156094358232, 0.10311625824407467, 0.4313108920421431, 0.9238740143377883, 0.20687524999638973, + 0.15255012458685524, 0.5427457659655633, 0.8640947828253543, 0.4023460735613871, 0.8152799497172286, + 0.22226454850151056, 0.09327791456333101, 0.540143349882634, 0.5557027424836565, 0.6043822127088522, + 0.635045386209623, 0.11895408520648643, 0.21412622156242278, 0.4168915541552838, 0.6860410428994667, + 0.8362971024617537, 0.1604197275319652, 0.0015534694918136749, 0.16683864032852347, 0.0380164358652878, + 0.03511408488081447, 0.4807109372544841, 0.15151664214217997, 0.10548618728899939, 0.6302059258053215, + 0.4131988922704686, 0.9333959432119138, 0.40858542505531903, 0.09631357136150354, 0.087564812497272, + 0.12769479893109448, 0.5944289290157524, 0.1504256727793163, 0.8245058563795197, 0.28398136463137413, + 0.9427807014726228, 0.8079236541236925, 0.585802992322683, 0.6431029480572278, 0.9852044970284112, + 0.33240595005615725, 0.386704376120808, 0.05460816935049695, 0.05589652047614602, 0.9792882505670737, + 0.5650652839468214, 0.4522524488689421, 0.08701083613219196, 0.716345677249626, 0.5839245986158135, + 0.40122325862538133, 0.13032245945931542, 0.00256671520725571, 0.5869283108488538, 0.5873285471779122, + 0.8488560464150674, 0.8061332855050097, 0.33938585999470916, 0.6884758620551574, 0.8172347973442694, + 0.6477500880815694, 0.675451984773968, 0.5223003594325687, 0.15458609355946118, 0.43259107705555877, + 0.28851810307018777, 0.3838462692063309, 0.4657950297766186, 0.22707077383985486, 0.8502192712624803, + 0.3677973310632141, 0.13403602141551874, 0.9530360146704632, 0.9826440640203283, 0.42089152977923683, + 0.42180479050443176, 0.15720226657930703, 0.1891201711835082, 0.5353614977912917, 0.6288357150342787, + 0.8147188655365647, 0.09049937046097911, 0.4428874994360409, 0.9941624285613735, 0.47490144756023966, + 0.792905409090189, 0.6373109153860876, 0.362784258536637, 0.18612006988393281, 0.43510833638726243, + 0.3671635077914902, 0.11409342083819618, 0.11234212817124256, 0.24775037353256546, 0.6212388268713533, + 0.6281062349165449, 0.14983193641298098, 0.6762103626227616, 0.10448467671854955, 0.598250699186468, + 0.6938074993804616, 0.8730247668913396, 0.8268479077193474, 0.9972427087729138, 0.5191046008923272, + 0.8911388789395779, 0.509264551627256, 0.5049738483313311, 0.3087999310101207, 0.7752137734199523, + 0.155536036963988, 0.8352195411494564, 0.629649706648176, 0.1939043713632571, 0.7669274996476307, + 0.11693237140089863, 0.9811994408643644, 0.8081707614206302, 0.1746768750613229, 0.11924956852409285, + 0.6107457676123809, 0.5994539427198272, 0.7269927062916417, 0.21157476325939661, 0.7019666424206221, + 0.5389999399640987, 0.9060064439328889, 0.4356093165139081, 0.8690026759179446, 0.8222557998599189, + 0.07222599597929102, 0.9189440327960383, 0.009679487835292266, 0.5197248295836395, 0.3812919886435633, + 0.5842646327860408, 0.9048431033119261, 0.6191447480411455, 0.047048187901140404, 0.9762230649248507, + 0.08415024415060357, 0.15657338996981707, 0.47278786793745786, 0.9421248651798148, 0.27210373932980336, + 0.9304125472360207, 0.07545431142174563, 0.04655276242142847, 0.5698289275272337, 0.22789767714751497, + 0.1737430953954605, 0.058372511339505606, 0.6294706430315368, 0.07647535710909648, 0.22245198307387348, + 0.6009183620680273, 0.3343173459330723, 0.6421025402503626, 0.8897818639890097, 0.6474260353677674, + 0.4813679241351504, 0.3824802249210911, 0.7824337622838438, 0.8744568184404363, 0.26783566243254986, + 0.7023106099713998, 0.17502498094734908, 0.8814801958973274, 0.6161602839573905, 0.14335035008289465, + 0.5212668248749791, 0.5626987498664584, 0.22702830817160635, 0.9793781225504641, 0.6180799786358538, + 0.5857524793318057, 0.5129980956789748, 0.04555314684145728, 0.5417022462380706, 0.6289268057180017, + 0.8266341879328025, 0.4304725604114389, 0.8248490675287319, 0.5936822156957996, 0.6916715068300263, + 0.35712243943419775, 0.6460542393416723, 0.2987386214304467, 0.6128438567584318, 0.6625086065781743, + 0.5564922845944792, 0.6547427411553769, 0.6073412491734623, 0.19798791257453774, 0.6205838180552334, + 0.937820769140132, 0.02684611862059849, 0.5133721282618873, 0.817321363351518, 0.9650672686251389, + 0.1652336449783368, 0.14255174957497696, 0.8685882991583032, 0.748113225863308, 0.9623403051807798, + 0.946331588194702, 0.842338238512127, 0.8768779524997832, 0.7245722869024117, 0.5870305114470717, + 0.9080883353774347, 0.824331351985735, 0.8099433384130386, 0.7338703370134286, 0.7028907486937389, + 0.603675472091728, 0.5540450376230617, 0.7671813626238805, 0.4823869343924113, 0.03448562234999297, + 0.191655313797513, 0.6934508297023688, 0.8415057046819492, 0.2840572816367717, 0.3168242242390731, + 0.5073316119069321, 0.7685722942349587, 0.8318371362076777, 0.4179331943064153, 0.7963357296035821, + 0.9008311272506405, 0.9139351509015179, 0.8539376483870202, 0.24407675438189358, 0.8390349031208393, + 0.3113502459956934, 0.3365673473812547, 0.8118009929054556, 0.5371500828826153, 0.1172621795708465, + 0.34008210185151533, 0.6243414247626625, 0.8179986367464573, 0.18547930426962, 0.2659164721740783, + 0.6832042772679686, 0.4922211900243272, 0.26225209481552736, 0.5075709771319182, 0.9015861727324869, + 0.0698828908110074, 0.17347031758337805, 0.13900424509002107, 0.6494817087100319, 0.17721720406416797, + 0.5180775508098545, 0.998600064745631, 0.05100113670893769, 0.02581820063068263, 0.0015068117399120728, + 0.49595914894179594, 0.9701432397722076, 0.9677948626091621, 0.9473621800880258, 0.7739890486016565, + 0.4607199148631299, 0.42115413371097565, 0.47553531617318, 0.3303683669471392, 0.41721974940422435, + 0.4701127290373278, 0.6912929504723353, 0.6394518382053578, 0.25761725598378504, 0.6441555783810878, + 0.3621519413519899, 0.003205103140524912, 0.6446271971021358, 0.1708378830206404, 0.6969884754450677, + 0.0096584224912859, 0.6253221565828221, 0.21591547095868324, 0.8633431978353517, 0.4660768528067444, + 0.04434142372395833, 0.8104628144429474, 0.006407291414015304, 0.35999532618759145, 0.2585580653888524, + 0.4800788135670906, 0.8209588135749953, 0.7583566943672284, 0.46621208455465135, 0.1138522150371325, + 0.5582429424002487, 0.7402118148501086, 0.23416208241975545, 0.1626564116660122, 0.34861525565456, + 0.619340243121358, 0.3544830751061945, 0.9012416695964349, 0.03260488368221248, 0.34475518131557215, + 0.43034168879885626, 0.004982163159524933, 0.9593936841500735, 0.2703595868217632, 0.3527948681199745, + 0.946495649399147, 0.7032420767984198, 0.7174885959886983, 0.6083365236824875, 0.5157197115732972, + 0.7093308180251068, 0.6218822201313294, 0.2580755081198327, 0.33238224766067537, 0.4911376916833988, + 0.4740020520121634, 0.8472274380619484, 0.8830999516501286, 0.7933072283261428, 0.6696363008399273, + 0.4629514408891108, 0.38953018339451484, 0.6960142716191104, 0.9507310652965659, 0.81171534461288, + 0.5694545400487966, 0.9639847915873286, 0.6777803851961555, 0.09298362953922867, 0.9705641885675158, + 0.22843672315476948, 0.38291815926073636, 0.7213851262299584, 0.9856208118658878, 0.34523037644096277, + 0.9342795633486977, 0.7808305389779022, 0.9401144197667547, 0.04754095571588024, 0.6317070842942244, + 0.7809932815700706, 0.16366744058478355, 0.9126319991757875, 0.7587538803727455, 0.09854330443257797, + 0.3411167233785305, 0.3414971623264297, 0.23480704090360538, 0.3004489790483701, 0.9554587458033513, + 0.8884132965339431, 0.7220664089605319, 0.25458076239534255, 0.024866663757468488, 0.7933963563504444, + 0.4006494422735588, 0.20306804721545846, 0.6264063678412422, 0.9755662926676001, 0.8501140321994687, + 0.9597826876409328, 0.9063728174897496, 0.18820064869908193, 0.9167584194413457, 0.9309643913035914, + 0.40568625284440596, 0.5651911451209986, 0.08060630938246094, 0.9187005402936496, 0.6876828482380173, + 0.7571624379645097, 0.5480679233387061, 0.24353641279477833, 0.6953374325865181, 0.7004059077803506, + 0.19895076559790037, 0.012614468166029846, 0.6931676652271119, 0.04933882811306345, 0.6817184766496525, + 0.8967786967807705, 0.0370223139896404, 0.27005041666217366, 0.0603131267983219, 0.20690528700779764, + 0.5567967676701338, 0.20463828478253465, 0.14616326216863784, 0.6296157294792657, 0.9640954753646813, + 0.36846030912030836, 0.002330958564625618, 0.5270410644078244, 0.9281730412702204, 0.45519656900706884, + 0.05767366375949612, 0.19679167663135078, 0.45322589140773506, 0.37355093745443213, 0.4316713413341995, + 0.8613778389131638, 0.36363172551401024, 0.041909481052292086, 0.40803137171006676, 0.1644490965265104, + 0.013508862338429739, 0.18087791778059537, 0.4808668404807881, 0.9896217208245889, 0.17031133805671994, + 0.954549933820125, 0.21830098176483825, 0.32074855054918405, 0.7055647713037171, 0.4017341341400964, + 0.06834239147352672, 0.06595343518235453, 0.7927013837463739, 0.736351436409856, 0.6542416800517766, + 0.2611136415462978, 0.8052245691122532, 0.5036101372743131, 0.09263478211668819, 0.2557611178903839, + 0.7150756721261634, 0.7396670803132176, 0.7249200847189967, 0.7983836884890896, 0.26459354102142374, + 0.14864548914507048, 0.5586611197141035, 0.910350130922468, 0.05593434871311043, 0.8149779553107497, + 0.4767667472242454, 0.285442788189455, 0.40006884264320053, 0.18928550101259878, 0.8969656410445062, + 0.7896722423559953, 0.6525673934788851, 0.9034835925846796, 0.570442903736869, 0.41536471357181215, + 0.10265360872088514, 0.633573216040501, 0.554940923788402, 0.7603504216202375, 0.2815142314209388, + 0.32091865605809244, 0.48941224655962345, 0.22003985504243984, 0.13607798635337598, 0.37901062340884994, + 0.8706905255134766, 0.6356807028139803, 0.418779162764653, 0.40936893971458166, 0.25667308138726475, + 0.8626424645173111, 0.8168071724420188, 0.40923910723158874, 0.8487566728412825, 0.8825770435506887, + 0.6978130505246456, 0.5256740994874012, 0.3591905906139944, 0.5799067477434993, 0.9185955898312771, + 0.2737326463684574, 0.9097323699454616, 0.3930555738378564, 0.47911845356231575, 0.7290407441224809, + 0.46055879064643657, 0.03207769742241595, 0.7975276475662959, 0.640093909231198, 0.022886213132301436, + 0.8609937497701943, 0.5631041599341441, 0.679972623497357, 0.48577991928492015, 0.30538790875902155, + 0.41736161076242584, 0.14830629853298138, 0.23500812773962576, 0.524592542808368, 0.27437111512336143, + 0.05965904887727835, 0.2918591238325783, 0.25598434684729043, 0.6924920794474219, 0.2497349859377368, + 0.8363347417707052, 0.5282798641981886, 0.7312494341931193, 0.17757135544241376, 0.3173567907124467, + 0.15325240685171326, 0.662708767649447, 0.2781731367872762, 0.6364481666190819, 0.2903981867328158, + 0.6496930178456091, 0.039653072192095906, 0.4279605117814289, 0.23717483185086208, 0.5850515639649501, + 0.9272006593578849, 0.6694541951302534, 0.17883121234879118, 0.6436522945258664, 0.7792030175008168, + 0.8949085776808361, 0.2496632683186638, 0.832457996422202, 0.9674353985309306, 0.8528802875571208, + 0.816763289811324, 0.14688252055455986, 0.5642551096559059, 0.26530479151162056, 0.5049743798224671, + 0.3715996766715721, 0.21947402581862918, 0.32254310459801105, 0.685007228645308, 0.7470850821296687, + 0.8160227628454733, 0.9137489054962102, 0.8560251137039097, 0.024534003121607695, 0.04521871276101308, + 0.02367129352716124, 0.5783021171133504, 0.9139449448889625, 0.265504442325993, 0.8182890630399161, + 0.5049329705377718, 0.6800245200790865, 0.6083213289049307, 0.1811262015095133, 0.3202506649996555, + 0.06139823543315803, 0.7975693287711333, 0.3783321068312443, 0.4215509767310476, 0.4817147366535751, + 0.14335792395875457, 0.569959697175572, 0.8326804416539675, 0.7881931631320024, 0.6250101717036214, + 0.8150438598132201, 0.8322242767985054, 0.5141109538083758, 0.101622534940505, 0.6020576478139832, + 0.862875834352914, 0.44412999428852895, 0.09303719406445532, 0.6365615446012882, 0.391697345346507, + 0.6276038960866358, 0.8384240955628381, 0.15117205862777972, 0.6859166896968107, 0.46096307835937356, + 0.7628475477918988, 0.010532812497153099, 0.39110174343301596, 0.18687012766308642, 0.9953633859063187, + 0.7336318791938153, 0.7412558552103334, 0.21084765923063864, 0.4907451177552751, 0.40601299165301985, + 0.38880973678461517, 0.5584246149451094, 0.10807602415016127, 0.18275262543142878, 0.17402817591505615, + 0.2015937407328915, 0.8849118337827289, 0.377331907017168, 0.3377638457595308, 0.5086224074379112, + 0.7761068172424201, 0.8458824121077834, 0.5825144300241644, 0.31379789941978886, 0.7799327059130073, + 0.6462918881194284, 0.5496040687131041, 0.5606280619195357, 0.2622984173231212, 0.9533428464170677, + 0.6927678723431243, 0.28670323747922943, 0.918328531851499, 0.9181276962383033, 0.22471326407596626, + 0.8742061750012948, 0.035691735149414594, 0.9830142838917029, 0.04618299224281264, 0.978991611793116, + 0.90920549543672, 0.9641936713436415, 0.352173417181341, 0.02749063973513044, 0.671264386437005, + 0.32074385906006564, 0.3323412691373995, 0.6222518184003937, 0.10327769425645661, 0.7478402622044948, + 0.141175087073799, 0.13567435950327245, 0.06388400985569753, 0.12022737044113196, 0.04865807065818539, + 0.9346626555052675, 0.25035821705527694, 0.263615164003856, 0.9103450312680861, 0.0822476396562889, + 0.9214016455934766, 0.5471141631488152, 0.050092295886368765, 0.12916099885366905, 0.09350772631506876, + 0.4128426881984989, 0.03426701049550318, 0.1114557011437497, 0.9034110226366754, 0.6238078513067472, + 0.6339993330568727, 0.1017850925706153, 0.7574287993118725, 0.4855538696652342, 0.0993780068507144, + 0.7840823982240984, 0.9062905450373338, 0.5188575412994682, 0.07931477242460494, 0.3302454137917453, + 0.9683115424644188, 0.24089419521054534, 0.5221400082332656, 0.6263972136839191, 0.45655675103301285, + 0.03623782209981852, 0.10528149339125581, 0.2369904707552496, 0.8871970275709483, 0.3105201870093366, + 0.8650306308730747, 0.26975405692122023, 0.7401906890372566, 0.015236741198945358, 0.6932025352071033, + 0.2353715196443774, 0.8990410572378255, 0.13175976405917256, 0.8299117018175071, 0.5854392560620701, + 0.09941022297908908, 0.49345606955707433, 0.5693111552840876, 0.6109414753717066, 0.6210232613493811, + 0.12187375071541129, 0.09170073062022499, 0.6775185133293696, 0.8926164238380273, 0.11153600376557016, + 0.36336189612943315, 0.28575020651201066, 0.3006161153901252, 0.4751978516601386, 0.25762053965235965, + 0.8259088244580408, 0.2996039384723006, 0.9168704216696336, 0.8741744876565002, 0.4619743507528482, + 0.7190526080623696, 0.43764245520627065, 0.029650825870803477, 0.9399851529255545, 0.1719914610443789, + 0.29969992238435406, 0.13512016024776252, 0.6519346860670783, 0.04216471446744485, 0.529699047649506, + 0.8936486293424404, 0.5614875873690469, 0.38854122489677667, 0.42965353004741547, 0.20866020350533698, + 0.8442419283811183, 0.751957287605275, 0.6247835602850056, 0.38868231376485674, 0.1347824186806431, + 0.6635426573687336, 0.6146381733545787, 0.4914780532252889, 0.22586533734423897, 0.18687021665791048, + 0.1871193534007316, 0.46427170052831335, 0.2887757304460784, 0.29264159982594307, 0.48230928722272504, + 0.3825012877262921, 0.20581048451737827, 0.36888096201496545, 0.31310297106149565, 0.9227331395115886, + 0.14118035675934504, 0.8821080388976726, 0.07400286558248481, 0.6417623640115212, 0.5170022343266591, + 0.8420817289489861, 0.2507963913182081, 0.7812620902445477, 0.902646230736842, 0.776047650573657, + 0.8952462478818862, 0.7907303599044537, 0.0031263566165733447, 0.9809862858948138, 0.81280099967171, + 0.8477577457444683, 0.02104693531456414, 0.2934473998659016, 0.4496243188766037, 0.9468223697800644, + 0.863014788820724, 0.20029206183457626, 0.17455369794424835, 0.1592728573611274, 0.36209650006181326, + 0.19553033285442378, 0.7773840612765499, 0.41676684693732735, 0.26264441069811695, 0.7875285367610492, + 0.5669435387631938, 0.9175778213344772, 0.21516198411188503, 0.0632550323284714, 0.9428771776851117, + 0.2172456296265367, 0.28320517322282546, 0.9242596499269207, 0.46989203803910085, 0.9334249248647313, + 0.7762732211789131, 0.013582125340719675, 0.06927193578744395, 0.07999318330104577, 0.8076114189907397, + 0.2729254863821494, 0.13662799169926676, 0.38418911218942264, 0.192680742555284, 0.8618407210129345, + 0.9973527869739185, 0.5553431375586435, 0.6252142939582864, 0.05197495573171229, 0.8526963916761623, + 0.6134256688006303, 0.6004815985037668, 0.4949438124009987, 0.7102282370301588, 0.8445820971086851, + 0.005417090289743465, 0.328955924989748, 0.9936257238147864, 0.3691590461742723, 0.788192482427858, + 0.3719357169116606, 0.5804718234186538, 0.5530225699005005, 0.17183516891041328, 0.9269887520833086, + 0.9847372594336158, 0.12487505196657323, 0.2743762334211701, 0.5386258032132064, 0.567405809525627, + 0.4994531897202181, 0.8699430893250892, 0.431449717755505, 0.07848072387288219, 0.701333216078581, + 0.7077691706016829, 0.5657395772346601, 0.7796612467421044, 0.16788633369095085, 0.6015148664464552, + 0.7925641613400864, 0.5207478736401517, 0.8086350261894591, 0.22809792345840785, 0.5314239340501304, + 0.6546297288104892, 0.4156617408655624, 0.6602291458395536, 0.5787861259275977, 0.32679093039873, + 0.9903680740595437, 0.5844288716113109, 0.3335669457212541, 0.18184438650179768, 0.49904484954494976, + 0.49090390100838, 0.2747889640775497, 0.05432415993063511, 0.6348270472795142, 0.7618910314479553, + 0.5450893147590388, 0.6042571272764584, 0.7003300711950627, 0.9564276322323998, 0.7162882508139052, + 0.6364250055940857, 0.1827420856730403, 0.22520478035347657, 0.5826635954679447, 0.44776861852667227, + 0.47684193819117915, 0.7535603587898286, 0.6020583704478337, 0.03227298489054431, 0.2989311962894181, + 0.9584970068604538, 0.3271443319211841, 0.6145214518321328, 0.35438910375402444, 0.7967789884708172, + 0.3870316925454582, 0.6264169770789575, 0.9117837886557485, 0.5470831816133089, 0.6046816337812168, + 0.4298312288933326, 0.5185989584440972, 0.19619711194429745, 0.2585861442212085, 0.19113832261170938, + 0.8554931389682613, 0.3940763810605721, 0.4541325511456713, 0.6029788742971568, 0.18007423591415328, + 0.07879987486346618, 0.8617872480152714, 0.8454488591069442, 0.4220834167811641, 0.3309452324727121, + 0.8734450834953609, 0.5775673004205525, 0.3013493746590046, 0.6803789596610658, 0.8705799211122627, + 0.03784801519303338, 0.11065161544340107, 0.6945447838872708, 0.13379258768168645, 0.15506449640296105, + 0.8002681114870956, 0.28728350198169084, 0.08092465521124148, 0.871252990017252, 0.7249396470594075, + 0.5506549211709697, 0.659084127611101, 0.39482091817932796, 0.3478174802894556, 0.3310147497770647, + 0.09063791913423813, 0.4765906670364052, 0.552660546415093, 0.21208354271106145, 0.8362655604438428, + 0.4184101631532844, 0.8815767929529509, 0.6031094795257194, 0.5308984163386145, 0.05355118172872486, + 0.8815252676442482, 0.4643747260984441, 0.12682309670693648, 0.7292121888029454, 0.41648039805853876, + 0.2011963854694191, 0.020419675906707013, 0.9924212687610415, 0.6432205505596658, 0.47945352909793304, + 0.42050931713665096, 0.2875754593792146, 0.5612945690875106, 0.2894685372960125, 0.6773364043368078, + 0.1407365728127774, 0.9836260301540849, 0.7850185780027568, 0.43542132106992804, 0.8773437341277317, + 0.8274753008171292, 0.9825311160775828, 0.8882855662786443, 0.7555530210610234, 0.1633986233662451, + 0.7961431105300174, 0.5852057892875403, 0.6151123645576676, 0.4992589489046285, 0.0424358193013229, + 0.9012822919659152, 0.4075832830891558, 0.23404002509717414, 0.3721469206496021, 0.718375881245492, + 0.9364329737732714, 0.15670568562557385, 0.8546284305750449, 0.7522727281834883, 0.10593348360373511, + 0.45258186077884677, 0.4828478564591584, 0.3015797182224651, 0.9517418500418697, 0.6840531382369449, + 0.3888003563218816, 0.7403022510887606, 0.6417021568083997, 0.9823224495525934, 0.3602570492037227, + 0.9726701877796005, 0.11256718369098428, 0.42981846729932316, 0.5259354601865902, 0.7554362854591897, + 0.46022153322521797, 0.4793136280134549, 0.10844142009323199, 0.5502761733796465, 0.8811862678452823, + 0.24009629432278679, 0.9493545988254306, 0.7964892755562489, 0.4662575194407572, 0.5936738217081192, + 0.16592882093823258, 0.9859866019657224, 0.2358787856060547, 0.5608551370694833, 0.945964541591334, + 0.5600342383454875, 0.720030390653406, 0.20677275937071815, 0.16132030555888355, 0.23481442646444717, + 0.1486933723029148, 0.07823117805129631, 0.2085993840241619, 0.10858147254244044, 0.6945817896285724, + 0.42563070985356066, 0.04004884332870884, 0.00303398548561018, 0.8542934622625173, 0.25149635186390307, + 0.2494715702411735, 0.2557558871357666, 0.9861938251287953, 0.9196828767636545, 0.38979676119223083, + 0.383631632999867, 0.7616502164965523, 0.5989834812104214, 0.6918854518083776, 0.5911325727260477, + 0.2613787103362436, 0.6065738141875372, 0.9653573346034848, 0.5147461408512055, 0.47813464137618733, + 0.6801750264462293, 0.3791725115370653, 0.2212103957921122, 0.198746899771007, 0.36473450549974684, + 0.32253490241826444, 0.7436637136812738, 0.13444458615243549, 0.6934462857903217, 0.5944713140884937, + 0.4721117833920081, 0.49288902255267475, 0.41371401125823193, 0.0396525318325186, 0.41446455995710885, + 0.4738685767876225, 0.11421088543024749, 0.10070832493884896, 0.10616464455460517, 0.4167239255685742, + 0.994852989759743, 0.23008445309632586, 0.35510350164702775, 0.8425244929657464, 0.32454656934246384, + 0.02176898509566194, 0.3157593965935449, 0.623726143557632, 0.3229516294381015, 0.44804658784405627, + 0.08598374871008696, 0.5386230791292398, 0.4425710876561455, 0.9469773008495318, 0.548309347248721, + 0.49060334690872587, 0.7063301594750016, 0.16641329612366873, 0.816499405124871, 0.3896457253625296, + 0.7402659538376386, 0.9847530894291007, 0.4843213425499143, 0.43038245869640657, 0.9152388228522966, + 0.4871844797098087, 0.41937549531706264, 0.7956152302108189, 0.04276396841566121, 0.9389220865118408, + 0.49700650040477534, 0.2777255383285019, 0.8953375189380939, 0.30314186832239975, 0.8436821711164175, + 0.250299242916484, 0.29284076270596604, 0.24860916694926627, 0.6297084257593848, 0.36414041461821367, + 0.8636898203206251, 0.03884685622262596, 0.174040945005705, 0.9027325229904037, 0.22636793436272418, + 0.39057502635271857, 0.837149862010498, 0.6197874282482766, 0.04034986432833665, 0.6199159287396855, + 0.8618246322637451, 0.5663522588730695, 0.19114958847316454, 0.9037344787676701, 0.697998863172609, + 0.4408583750989511, 0.5873937362047855, 0.7479938702505384, 0.5903087296332238, 0.0599973686234887, + 0.11453030427680466, 0.0889054813776653, 0.7935221546263311, 0.20525060803459405, 0.9786164402703464, + 0.13534554243929542, 0.33271341280193, 0.29583524445415277, 0.5208034566041221, 0.715961659170718, + 0.03980357054734318, 0.3025012559871788, 0.43986870620151064, 0.3070560150214996, 0.3528169416286786, + 0.9265141773481097, 0.7501567103388216, 0.08674132803750234, 0.36407252083885433, 0.08833198768361261, + 0.35322988319400805, 0.7636110816368085, 0.877615468813198, 0.8381086608218498, 0.1264748334029474, + 0.7567472028783842, 0.31610283349055857, 0.10241018294853332, 0.3903715982953918, 0.768703697365061, + 0.3911478980175912, 0.6373792187400823, 0.9085193553173185, 0.8484878574968941, 0.055705717555123546, + 0.1856657129414182, 0.14795677979058397, 0.0851766335876345, 0.7408546264277205, 0.1515139082850422, + 0.8347462304916916, 0.5556014140682733, 0.4231662930615506, 0.4156346278851234, 0.6195024953733143, + 0.6450608352745005, 0.5794559042167301, 0.44398307932725645, 0.3386896839514363, 0.9947910463626235, + 0.4295650024457074, 0.46668569716119923, 0.1683570545191131, 0.38481488230328686, 0.21198524988541256, + 0.6861363838530093, 0.515617605529789, 0.7871540883141954, 0.6186809320638805, 0.3635186528606992, + 0.24250401036270308, 0.48087219344337306, 0.8664804169889542, 0.2842565683987759, 0.8708403014819337, + 0.5259228242606849, 0.2355911329326772, 0.7287022892124747, 0.5628813409165464, 0.2208727363386015, + 0.9843585772801463, 0.8585384885791164, 0.09585638221818615, 0.700121735075444, 0.9457904755937173, + 0.8741342123700807, 0.2913965400017039, 0.3590258586858043, 0.9324394591487412, 0.2785720549608761, + 0.5955109224024612, 0.6093048651358479, 0.5579648244813307, 0.48923170302655916, 0.040295444090547194, + 0.301895126385361, 0.7161338480188987, 0.6212148954455755, 0.3252230173406172, 0.5859846921828898, + 0.5017178396499485, 0.037315649813791096, 0.5815118195729374, 0.10529898355514655, 0.14560109732536652, + 0.7821128845752052, 0.3908157894176354, 0.32053314608704864, 0.7690452018739381, 0.8676929204814569, + 0.020209696387830434, 0.7080830488792054, 0.30867899872761073, 0.14378068100085928, 0.906567586147243, + 0.3927665003335742, 0.9916129484423926, 0.5415271388101568, 0.12689054054302085, 0.15320500124098402, + 0.690019196582955, 0.6785067283721109, 0.33346421693887085, 0.2796375762118375, 0.42199350041600436, + 0.2699935286819868, 0.315394010904003, 0.6302297688265931, 0.06165112274849838, 0.7570811347298088, + 0.5279310747451385, 0.9355999029242515, 0.005502195322430903, 0.15228560281857328, 0.0024754880413346836, + 0.76205750363049, 0.06309530860411938, 0.34430528503769187, 0.8108396869466088, 0.5944367076434868, + 0.6759427595358818, 0.873314370429706, 0.2835014145918603, 0.0647251610020978, 0.3257198707861738, + 0.1795620365862599, 0.07541607354608881, 0.7292249395583527, 0.9902209094287545, 0.08944953657777999, + 0.3673644605674675, 0.1642628862370007, 0.11299431248688241, 0.4695447110816021, 0.2869951115264081, + 0.7582745052506809, 0.47703270425834443, 0.42683269753999564, 0.25422858972984796, 0.025660370575602554, + 0.7868622372963548, 0.23858764239770813, 0.6938787595875271, 0.2535497506022084, 0.5371714185957712, + 0.6431145666956772, 0.6647676364679233, 0.5513817267954401, 0.4543637222307828, 0.2718008029139152, + 0.35994464028232376, 0.7056842331140273, 0.6150478247355882, 0.052748374640018136, 0.8940709299691674, + 0.9869207117898028, 0.6428387188696808, 0.10564621295462462, 0.6451107754618787, 0.5642582138462809, + 0.5919936865559303, 0.23663160371727765, 0.9151169937896009, 0.5807847949216597, 0.9055894805021459, + 0.5216542563502468, 0.578729717420777, 0.734142200046549, 0.3385247520939818, 0.046652306728973136, + 0.5827420694536557, 0.7131797928316381, 0.8060861134326347, 0.8332240607025703, 0.06749958945821999, + 0.516280600505945, 0.7492979599727628, 0.39770626841941925, 0.43677939717333114, 0.4170048577307417, + 0.4824712574759782, 0.5524848644926086, 0.9193314618277667, 0.6803898392289517, 0.38598035373059003, + 0.9906048951292297, 0.8370374083834665, 0.6866355448825368, 0.7472313769107196, 0.4961903042881499, + 0.971688128137546, 0.5035567916576319, 0.12934923846644109, 0.6663491618485525, 0.16429514721037275, + 0.004461437904420995, 0.4755094707753683, 0.3487710412736914, 0.7186224430800245, 0.1388136048065316, + 0.886751975525458, 0.16936019822848425, 0.6327522727545662, 0.635380555069413, 0.40152844863600323, + 0.5114169171284336, 0.5185614672521711, 0.04940680888298521, 0.5927527420448586, 0.004081843984338196, + 0.611581049519875, 0.5753891435829265, 0.3574326544426977, 0.6079510499897496, 0.16449092617603212, + 0.6995314134978298, 0.004239835106883305, 0.35085826191749636, 0.39165826007683324, 0.7876851397804997, + 0.006211069452425777, 0.2791760055342227, 0.9213991389869884, 0.8730810656506978, 0.9524559675550403, + 0.290153972227604, 0.8401166936915387, 0.7383245499273487, 0.3456826354918213, 0.4910238089090576, + 0.3401409033329521, 0.13095056557012497, 0.0361080988081901, 0.6798675384017852, 0.9027129404222184, + 0.6357024603609229, 0.45043719562592965, 0.8308068439506016, 0.8708314311735844, 0.9044186668920613, + 0.3118444321158038, 0.6193299528144401, 0.343460209344983, 0.45642068377669887, 0.7362366025637085, + 0.17489798955430746, 0.4475923979474332, 0.49267771485941037, 0.9147805216639984, 0.45935706630078055, + 0.8744326138824626, 0.928459089113231, 0.5324907160051711, 0.6400214688609211, 0.2577710810650832, + 0.324657289350905, 0.7809831977836019, 0.6776495828489617, 0.361423843762243, 0.3148856602740503, + 0.7156839650988885, 0.008585147934917492, 0.02054704844964983, 0.0454440429846138, 0.467402461137189, + 0.3482114761714936, 0.2274168614700276, 0.3675988930092098, 0.8227638227658204, 0.4735140918259736, + 0.19596949116497853, 0.2777603577620631, 0.9341776586261046, 0.4102405081961351, 0.02728682877510813, + 0.22180088690053323, 0.4969098885447305, 0.8900318941903801, 0.30074428571738887, 0.12675873590477205, + 0.22488977278831046, 0.6509731962068283, 0.22154354697564693, 0.6865202518278289, 0.8605771528753087, + 0.3562212006461868, 0.7866783613717578, 0.9993156197161146, 0.620176535824029, 0.01424037752463747, + 0.12046807952838845, 0.14105948480485575, 0.8130737272213242, 0.23107384517188134, 0.030079422556517188, + 0.17149243221501909, 0.2696618250878239, 0.09029226432107296, 0.9280525217627306, 0.174299626331055, + 0.601922588468467, 0.7750427002091234, 0.7108870437063272, 0.9340984899103667, 0.4188843096595267, + 0.319215520593643, 0.11771029618746809, 0.8979402481813988, 0.7666755194391025, 0.474170125390853, + 0.13960662583364958, 0.8076048584231867, 0.7643973635630646, 0.14612254019625004, 0.9276971598715721, + 0.08292626610364884, 0.6305267716579811, 0.14456224796821027, 0.493939632833688, 0.24714075783963407, + 0.637129169365288, 0.34416412881956693, 0.384871223091256, 0.4221255541659472, 0.2970822199027767, + 0.7290686471896674, 0.7622610881055619, 0.24234999643629496, 0.7827833151078375, 0.47079183685813464, + 0.9136482203752961, 0.7768386939495876, 0.8984271382188533, 0.7831497277215794, 0.8603641619194752, + 0.4086200513012481, 0.5322382329384319, 0.2921659601352481, 0.20374405516041583, 0.6910280220095179, + 0.7350636029248344, 0.03120480264321246, 0.9056713041598138, 0.2120133204613942, 0.9676421774705622, + 0.33548288170860974, 0.6391815211464841, 0.31535986613369604, 0.5877570824102291, 0.6861092075074696, + 0.528332340935112, 0.7269711145951828, 0.20812038948111622, 0.4223837383275706, 0.4631539491432527, + 0.8213528389917474, 0.8880552830046495, 0.19101754008288407, 0.9129590937779266, 0.19564066112257705, + 0.8409450026881247, 0.9751661267040307, 0.7276057777519807, 0.7273041470712368, 0.3777965634633952, + 0.20424268820820335, 0.7594567227703592, 0.5580175944700493, 0.32656469883293826, 0.41670888180343424, + 0.5620465315393466, 0.3285477028094975, 0.02765869743583449, 0.626135560747538, 0.9676938413436679, + 0.9559943893515941, 0.4046315875494847, 0.08583441419976279, 0.004701142838580807, 0.16396793443292523, + 0.002905946258930814, 0.7544300764519166, 0.14872496584233852, 0.7357426137848664, 0.6471850332368065, + 0.26789766611378607, 0.27322340111095766, 0.2680931795929846, 0.8177762874567295, 0.8736686253239911, + 0.8251362617469858, 0.9577690476905739, 0.7581006474779918, 0.6853565742888263, 0.3880096040198705, + 0.6672084718234337, 0.034634868583634404, 0.6066253338972054, 0.9404065215610417, 0.17461530457647234, + 0.5802981115586667, 0.9850309591715055, 0.12108199859136526, 0.18388083399956057, 0.5242994382487212, + 0.9349653031183464, 0.9395017553746543, 0.7548311572520328, 0.7757145759635072, 0.2594007449598288, + 0.9677764682764135, 0.6288482538745904, 0.07372112834870348, 0.2773844267431448, 0.8491493815037809, + 0.3993980794861991, 0.9135968462592506, 0.2701036343384797, 0.43759924003124595, 0.5207484113258347, + 0.7969089682462961, 0.1425059391434702, 0.5817575280658813, 0.35439249722790844, 0.3261970280116724, + 0.3683029453317247, 0.5803936269369658, 0.4590798443606058, 0.7979313236417561, 0.3021183772119437, + 0.5180709751097267, 0.2653720936625277, 0.06001106938898426, + }; + + TransResult result; + EXPECT_EQ(TransposeWithShapeCheck(reinterpret_cast(data), std::vector({1, 24, 24, 3}), + std::vector({1, 3, 24, 24}), DT_FLOAT, std::vector({0, 3, 1, 2}), + result), + SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_FLOAT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, trans_shape) { + FormatTransferTranspose transfer; + std::vector dst_shape; + EXPECT_EQ(transfer.TransShape(FORMAT_NCHW, std::vector({1, 3, 8, 8}), DT_FLOAT16, FORMAT_HWCN, dst_shape), + SUCCESS); + EXPECT_EQ(dst_shape, std::vector({8, 8, 3, 1})); + + EXPECT_EQ(transfer.TransShape(FORMAT_NCHW, std::vector({1, 3, 8, 8}), DT_FLOAT16, FORMAT_NHWC, dst_shape), + SUCCESS); + EXPECT_EQ(dst_shape, std::vector({1, 8, 8, 3})); + + EXPECT_EQ(transfer.TransShape(FORMAT_NHWC, std::vector({1, 8, 8, 3}), DT_FLOAT16, FORMAT_NCHW, dst_shape), + SUCCESS); + EXPECT_EQ(dst_shape, std::vector({1, 3, 8, 8})); + + EXPECT_EQ(transfer.TransShape(FORMAT_HWCN, std::vector({8, 8, 3, 1}), DT_FLOAT16, FORMAT_NCHW, dst_shape), + SUCCESS); + EXPECT_EQ(dst_shape, std::vector({1, 3, 8, 8})); +} + +TEST_F(UtestFormatTranspose, nchw_to_chwn1) { + uint16_t data[1 * 2 * 3 * 4] = { + 14329, 11393, 10909, 14508, 14671, 14284, 15230, 13538, 12337, 14968, 12427, 12814, + 13675, 15281, 14619, 14878, 14470, 14688, 14055, 14566, 12998, 11163, 13835, 9363, + }; + uint16_t ret[2 * 3 * 4 * 1] = { + 14329, 11393, 10909, 14508, 14671, 14284, 15230, 13538, 12337, 14968, 12427, 12814, + 13675, 15281, 14619, 14878, 14470, 14688, 14055, 14566, 12998, 11163, 13835, 9363, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_NCHW, FORMAT_CHWN, {1, 2, 3, 4}, {2, 3, 4, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, nchw_to_chwn2) { + uint16_t data[2 * 3 * 4 * 5] = { + 14828, 14590, 13310, 14873, 14460, 14266, 14191, 15059, 14631, 14744, 8712, 9060, 10329, 14793, 14813, + 12083, 14752, 14771, 12768, 14540, 13767, 14935, 15248, 15328, 14863, 14383, 12246, 13844, 14675, 12828, + 13122, 15284, 14630, 14721, 13883, 13363, 11811, 9642, 14012, 13396, 15086, 14222, 14797, 14173, 14397, + 15042, 11974, 15166, 13588, 15095, 15230, 12827, 15275, 12324, 12618, 14631, 13488, 14433, 14346, 11441, + 14553, 13612, 13193, 12393, 15356, 12178, 13389, 15035, 12536, 14468, 11337, 13481, 12476, 12398, 12752, + 11443, 15122, 15330, 10727, 10380, 12948, 13559, 13328, 14691, 11713, 13918, 13916, 13506, 13322, 11775, + 14914, 13585, 14353, 12896, 8004, 14401, 10588, 14515, 11702, 14488, 13873, 14808, 13674, 13734, 15352, + 14930, 14605, 14998, 14980, 14631, 13056, 15090, 14404, 15268, 12694, 14921, 15298, 14651, 15286, 15220, + }; + uint16_t ret[3 * 4 * 5 * 2] = { + 14828, 14553, 14590, 13612, 13310, 13193, 14873, 12393, 14460, 15356, 14266, 12178, 14191, 13389, 15059, + 15035, 14631, 12536, 14744, 14468, 8712, 11337, 9060, 13481, 10329, 12476, 14793, 12398, 14813, 12752, + 12083, 11443, 14752, 15122, 14771, 15330, 12768, 10727, 14540, 10380, 13767, 12948, 14935, 13559, 15248, + 13328, 15328, 14691, 14863, 11713, 14383, 13918, 12246, 13916, 13844, 13506, 14675, 13322, 12828, 11775, + 13122, 14914, 15284, 13585, 14630, 14353, 14721, 12896, 13883, 8004, 13363, 14401, 11811, 10588, 9642, + 14515, 14012, 11702, 13396, 14488, 15086, 13873, 14222, 14808, 14797, 13674, 14173, 13734, 14397, 15352, + 15042, 14930, 11974, 14605, 15166, 14998, 13588, 14980, 15095, 14631, 15230, 13056, 12827, 15090, 15275, + 14404, 12324, 15268, 12618, 12694, 14631, 14921, 13488, 15298, 14433, 14651, 14346, 15286, 11441, 15220, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_NCHW, FORMAT_CHWN, {2, 3, 4, 5}, {3, 4, 5, 2}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, chwn_to_nchw1) { + uint16_t ret[1 * 2 * 3 * 4] = { + 14329, 11393, 10909, 14508, 14671, 14284, 15230, 13538, 12337, 14968, 12427, 12814, + 13675, 15281, 14619, 14878, 14470, 14688, 14055, 14566, 12998, 11163, 13835, 9363, + }; + uint16_t data[2 * 3 * 4 * 1] = { + 14329, 11393, 10909, 14508, 14671, 14284, 15230, 13538, 12337, 14968, 12427, 12814, + 13675, 15281, 14619, 14878, 14470, 14688, 14055, 14566, 12998, 11163, 13835, 9363, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_CHWN, FORMAT_NCHW, {2, 3, 4, 1}, {1, 2, 3, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, chwn_to_nchw2) { + uint16_t ret[2 * 3 * 4 * 5] = { + 14828, 14590, 13310, 14873, 14460, 14266, 14191, 15059, 14631, 14744, 8712, 9060, 10329, 14793, 14813, + 12083, 14752, 14771, 12768, 14540, 13767, 14935, 15248, 15328, 14863, 14383, 12246, 13844, 14675, 12828, + 13122, 15284, 14630, 14721, 13883, 13363, 11811, 9642, 14012, 13396, 15086, 14222, 14797, 14173, 14397, + 15042, 11974, 15166, 13588, 15095, 15230, 12827, 15275, 12324, 12618, 14631, 13488, 14433, 14346, 11441, + 14553, 13612, 13193, 12393, 15356, 12178, 13389, 15035, 12536, 14468, 11337, 13481, 12476, 12398, 12752, + 11443, 15122, 15330, 10727, 10380, 12948, 13559, 13328, 14691, 11713, 13918, 13916, 13506, 13322, 11775, + 14914, 13585, 14353, 12896, 8004, 14401, 10588, 14515, 11702, 14488, 13873, 14808, 13674, 13734, 15352, + 14930, 14605, 14998, 14980, 14631, 13056, 15090, 14404, 15268, 12694, 14921, 15298, 14651, 15286, 15220, + }; + uint16_t data[3 * 4 * 5 * 2] = { + 14828, 14553, 14590, 13612, 13310, 13193, 14873, 12393, 14460, 15356, 14266, 12178, 14191, 13389, 15059, + 15035, 14631, 12536, 14744, 14468, 8712, 11337, 9060, 13481, 10329, 12476, 14793, 12398, 14813, 12752, + 12083, 11443, 14752, 15122, 14771, 15330, 12768, 10727, 14540, 10380, 13767, 12948, 14935, 13559, 15248, + 13328, 15328, 14691, 14863, 11713, 14383, 13918, 12246, 13916, 13844, 13506, 14675, 13322, 12828, 11775, + 13122, 14914, 15284, 13585, 14630, 14353, 14721, 12896, 13883, 8004, 13363, 14401, 11811, 10588, 9642, + 14515, 14012, 11702, 13396, 14488, 15086, 13873, 14222, 14808, 14797, 13674, 14173, 13734, 14397, 15352, + 15042, 14930, 11974, 14605, 15166, 14998, 13588, 14980, 15095, 14631, 15230, 13056, 12827, 15090, 15275, + 14404, 12324, 15268, 12618, 12694, 14631, 14921, 13488, 15298, 14433, 14651, 14346, 15286, 11441, 15220, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_CHWN, FORMAT_NCHW, {3, 4, 5, 2}, {2, 3, 4, 5}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, nhwc_to_chwn1) { + uint16_t data[1 * 2 * 3 * 4] = { + 15156, 14765, 15193, 12386, 15207, 14866, 12565, 14373, 14437, 14176, 14992, 12660, + 14101, 15184, 14197, 13624, 14646, 15009, 15165, 14585, 14831, 14233, 13940, 15290, + }; + uint16_t ret[4 * 2 * 3 * 1] = { + 15156, 15207, 14437, 14101, 14646, 14831, 14765, 14866, 14176, 15184, 15009, 14233, + 15193, 12565, 14992, 14197, 15165, 13940, 12386, 14373, 12660, 13624, 14585, 15290, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_NHWC, FORMAT_CHWN, {1, 2, 3, 4}, {4, 2, 3, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, nhwc_to_chwn2) { + uint16_t data[2 * 3 * 4 * 5] = { + 12684, 13426, 14435, 12149, 14707, 13284, 13922, 13767, 14770, 15231, 13815, 15355, 14172, 13694, 15160, + 14684, 15030, 13894, 14877, 14792, 14940, 15102, 9406, 14297, 11410, 10120, 14727, 15269, 9594, 14439, + 11287, 15342, 15302, 9009, 14128, 11963, 14935, 13439, 15290, 14492, 14781, 14814, 15034, 13992, 14962, + 10638, 14344, 15162, 13625, 14435, 14596, 14294, 14798, 14402, 12369, 14539, 12314, 14760, 11785, 15006, + 14488, 15211, 14801, 14529, 14751, 14758, 14626, 12523, 14740, 14917, 12841, 15125, 14354, 12682, 13958, + 13454, 11957, 15264, 8764, 13459, 12437, 14388, 14768, 13619, 14159, 15110, 14878, 14464, 14564, 12856, + 14755, 12487, 13430, 14506, 14335, 14380, 13689, 14393, 15072, 14684, 14925, 13423, 14413, 14998, 13522, + 14881, 15081, 7247, 13016, 13873, 12762, 13382, 13563, 14333, 15270, 15006, 15300, 13663, 13677, 13900, + }; + uint16_t ret[5 * 3 * 4 * 2] = { + 12684, 14488, 13284, 14758, 13815, 12841, 14684, 13454, 14940, 12437, 10120, 15110, 11287, 14755, 11963, + 14380, 14781, 14925, 10638, 14881, 14596, 12762, 14539, 15006, 13426, 15211, 13922, 14626, 15355, 15125, + 15030, 11957, 15102, 14388, 14727, 14878, 15342, 12487, 14935, 13689, 14814, 13423, 14344, 15081, 14294, + 13382, 12314, 15300, 14435, 14801, 13767, 12523, 14172, 14354, 13894, 15264, 9406, 14768, 15269, 14464, + 15302, 13430, 13439, 14393, 15034, 14413, 15162, 7247, 14798, 13563, 14760, 13663, 12149, 14529, 14770, + 14740, 13694, 12682, 14877, 8764, 14297, 13619, 9594, 14564, 9009, 14506, 15290, 15072, 13992, 14998, + 13625, 13016, 14402, 14333, 11785, 13677, 14707, 14751, 15231, 14917, 15160, 13958, 14792, 13459, 11410, + 14159, 14439, 12856, 14128, 14335, 14492, 14684, 14962, 13522, 14435, 13873, 12369, 15270, 15006, 13900, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_NHWC, FORMAT_CHWN, {2, 3, 4, 5}, {5, 3, 4, 2}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, chwn_to_nhwc1) { + uint16_t ret[1 * 2 * 3 * 4] = { + 15156, 14765, 15193, 12386, 15207, 14866, 12565, 14373, 14437, 14176, 14992, 12660, + 14101, 15184, 14197, 13624, 14646, 15009, 15165, 14585, 14831, 14233, 13940, 15290, + }; + uint16_t data[4 * 2 * 3 * 1] = { + 15156, 15207, 14437, 14101, 14646, 14831, 14765, 14866, 14176, 15184, 15009, 14233, + 15193, 12565, 14992, 14197, 15165, 13940, 12386, 14373, 12660, 13624, 14585, 15290, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_CHWN, FORMAT_NHWC, {4, 2, 3, 1}, {1, 2, 3, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, chwn_to_nhwc2) { + uint16_t ret[2 * 3 * 4 * 5] = { + 12684, 13426, 14435, 12149, 14707, 13284, 13922, 13767, 14770, 15231, 13815, 15355, 14172, 13694, 15160, + 14684, 15030, 13894, 14877, 14792, 14940, 15102, 9406, 14297, 11410, 10120, 14727, 15269, 9594, 14439, + 11287, 15342, 15302, 9009, 14128, 11963, 14935, 13439, 15290, 14492, 14781, 14814, 15034, 13992, 14962, + 10638, 14344, 15162, 13625, 14435, 14596, 14294, 14798, 14402, 12369, 14539, 12314, 14760, 11785, 15006, + 14488, 15211, 14801, 14529, 14751, 14758, 14626, 12523, 14740, 14917, 12841, 15125, 14354, 12682, 13958, + 13454, 11957, 15264, 8764, 13459, 12437, 14388, 14768, 13619, 14159, 15110, 14878, 14464, 14564, 12856, + 14755, 12487, 13430, 14506, 14335, 14380, 13689, 14393, 15072, 14684, 14925, 13423, 14413, 14998, 13522, + 14881, 15081, 7247, 13016, 13873, 12762, 13382, 13563, 14333, 15270, 15006, 15300, 13663, 13677, 13900, + }; + uint16_t data[5 * 3 * 4 * 2] = { + 12684, 14488, 13284, 14758, 13815, 12841, 14684, 13454, 14940, 12437, 10120, 15110, 11287, 14755, 11963, + 14380, 14781, 14925, 10638, 14881, 14596, 12762, 14539, 15006, 13426, 15211, 13922, 14626, 15355, 15125, + 15030, 11957, 15102, 14388, 14727, 14878, 15342, 12487, 14935, 13689, 14814, 13423, 14344, 15081, 14294, + 13382, 12314, 15300, 14435, 14801, 13767, 12523, 14172, 14354, 13894, 15264, 9406, 14768, 15269, 14464, + 15302, 13430, 13439, 14393, 15034, 14413, 15162, 7247, 14798, 13563, 14760, 13663, 12149, 14529, 14770, + 14740, 13694, 12682, 14877, 8764, 14297, 13619, 9594, 14564, 9009, 14506, 15290, 15072, 13992, 14998, + 13625, 13016, 14402, 14333, 11785, 13677, 14707, 14751, 15231, 14917, 15160, 13958, 14792, 13459, 11410, + 14159, 14439, 12856, 14128, 14335, 14492, 14684, 14962, 13522, 14435, 13873, 12369, 15270, 15006, 13900, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_CHWN, FORMAT_NHWC, {5, 3, 4, 2}, {2, 3, 4, 5}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, nhwc_to_hwcn1) { + uint16_t data[1 * 2 * 3 * 4] = { + 14129, 12008, 13612, 14950, 9964, 14974, 14727, 13558, 14770, 13280, 13988, 14765, + 12016, 12426, 14025, 13745, 13664, 14682, 14137, 11786, 13039, 15002, 11979, 14393, + }; + uint16_t ret[2 * 3 * 4 * 1] = { + 14129, 12008, 13612, 14950, 9964, 14974, 14727, 13558, 14770, 13280, 13988, 14765, + 12016, 12426, 14025, 13745, 13664, 14682, 14137, 11786, 13039, 15002, 11979, 14393, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_NHWC, FORMAT_HWCN, {1, 2, 3, 4}, {2, 3, 4, 1}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, nhwc_to_hwcn2) { + uint16_t data[2 * 3 * 4 * 5] = { + 15241, 14379, 14509, 15065, 14406, 15216, 13592, 15337, 14061, 14560, 13329, 13347, 14049, 13721, 14899, + 13968, 12837, 14084, 14590, 14354, 14536, 13206, 14320, 14102, 13426, 14722, 13367, 14296, 10820, 15236, + 13542, 15195, 13589, 14402, 14992, 14552, 13629, 14822, 10726, 14672, 13646, 14451, 11311, 14799, 15108, + 14615, 14204, 12904, 14085, 14527, 15208, 13901, 14160, 15021, 15228, 14349, 12507, 11221, 14349, 15351, + 15275, 14430, 14909, 14487, 14214, 14199, 11764, 14547, 14863, 14847, 14610, 14552, 13374, 15204, 13327, + 15292, 14080, 15144, 11463, 13983, 14836, 14657, 14479, 13186, 11808, 14743, 14113, 14734, 14672, 14588, + 14590, 13482, 13693, 11461, 14844, 13050, 14797, 13567, 14446, 13603, 14551, 14501, 14589, 14529, 15077, + 13922, 14478, 14975, 14988, 14516, 14389, 15255, 13558, 14055, 12882, 15062, 15016, 11621, 15223, 15042, + }; + uint16_t ret[3 * 4 * 5 * 2] = { + 15241, 15275, 14379, 14430, 14509, 14909, 15065, 14487, 14406, 14214, 15216, 14199, 13592, 11764, 15337, + 14547, 14061, 14863, 14560, 14847, 13329, 14610, 13347, 14552, 14049, 13374, 13721, 15204, 14899, 13327, + 13968, 15292, 12837, 14080, 14084, 15144, 14590, 11463, 14354, 13983, 14536, 14836, 13206, 14657, 14320, + 14479, 14102, 13186, 13426, 11808, 14722, 14743, 13367, 14113, 14296, 14734, 10820, 14672, 15236, 14588, + 13542, 14590, 15195, 13482, 13589, 13693, 14402, 11461, 14992, 14844, 14552, 13050, 13629, 14797, 14822, + 13567, 10726, 14446, 14672, 13603, 13646, 14551, 14451, 14501, 11311, 14589, 14799, 14529, 15108, 15077, + 14615, 13922, 14204, 14478, 12904, 14975, 14085, 14988, 14527, 14516, 15208, 14389, 13901, 15255, 14160, + 13558, 15021, 14055, 15228, 12882, 14349, 15062, 12507, 15016, 11221, 11621, 14349, 15223, 15351, 15042, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_NHWC, FORMAT_HWCN, {2, 3, 4, 5}, {3, 4, 5, 2}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, hwcn_to_nhwc1) { + uint16_t ret[1 * 2 * 3 * 4] = { + 14129, 12008, 13612, 14950, 9964, 14974, 14727, 13558, 14770, 13280, 13988, 14765, + 12016, 12426, 14025, 13745, 13664, 14682, 14137, 11786, 13039, 15002, 11979, 14393, + }; + uint16_t data[2 * 3 * 4 * 1] = { + 14129, 12008, 13612, 14950, 9964, 14974, 14727, 13558, 14770, 13280, 13988, 14765, + 12016, 12426, 14025, 13745, 13664, 14682, 14137, 11786, 13039, 15002, 11979, 14393, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_HWCN, FORMAT_NHWC, {2, 3, 4, 1}, {1, 2, 3, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, hwcn_to_nhwc2) { + uint16_t ret[2 * 3 * 4 * 5] = { + 15241, 14379, 14509, 15065, 14406, 15216, 13592, 15337, 14061, 14560, 13329, 13347, 14049, 13721, 14899, + 13968, 12837, 14084, 14590, 14354, 14536, 13206, 14320, 14102, 13426, 14722, 13367, 14296, 10820, 15236, + 13542, 15195, 13589, 14402, 14992, 14552, 13629, 14822, 10726, 14672, 13646, 14451, 11311, 14799, 15108, + 14615, 14204, 12904, 14085, 14527, 15208, 13901, 14160, 15021, 15228, 14349, 12507, 11221, 14349, 15351, + 15275, 14430, 14909, 14487, 14214, 14199, 11764, 14547, 14863, 14847, 14610, 14552, 13374, 15204, 13327, + 15292, 14080, 15144, 11463, 13983, 14836, 14657, 14479, 13186, 11808, 14743, 14113, 14734, 14672, 14588, + 14590, 13482, 13693, 11461, 14844, 13050, 14797, 13567, 14446, 13603, 14551, 14501, 14589, 14529, 15077, + 13922, 14478, 14975, 14988, 14516, 14389, 15255, 13558, 14055, 12882, 15062, 15016, 11621, 15223, 15042, + }; + uint16_t data[3 * 4 * 5 * 2] = { + 15241, 15275, 14379, 14430, 14509, 14909, 15065, 14487, 14406, 14214, 15216, 14199, 13592, 11764, 15337, + 14547, 14061, 14863, 14560, 14847, 13329, 14610, 13347, 14552, 14049, 13374, 13721, 15204, 14899, 13327, + 13968, 15292, 12837, 14080, 14084, 15144, 14590, 11463, 14354, 13983, 14536, 14836, 13206, 14657, 14320, + 14479, 14102, 13186, 13426, 11808, 14722, 14743, 13367, 14113, 14296, 14734, 10820, 14672, 15236, 14588, + 13542, 14590, 15195, 13482, 13589, 13693, 14402, 11461, 14992, 14844, 14552, 13050, 13629, 14797, 14822, + 13567, 10726, 14446, 14672, 13603, 13646, 14551, 14451, 14501, 11311, 14589, 14799, 14529, 15108, 15077, + 14615, 13922, 14204, 14478, 12904, 14975, 14085, 14988, 14527, 14516, 15208, 14389, 13901, 15255, 14160, + 13558, 15021, 14055, 15228, 12882, 14349, 15062, 12507, 15016, 11221, 11621, 14349, 15223, 15351, 15042, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_HWCN, FORMAT_NHWC, {3, 4, 5, 2}, {2, 3, 4, 5}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, hwcn_to_chwn1) { + uint16_t data[1 * 2 * 3 * 4] = { + 14583, 12849, 14184, 14611, 12516, 11629, 15235, 13769, 9859, 15209, 14862, 15129, + 14201, 10199, 14281, 15009, 14606, 13650, 14919, 14789, 14460, 13307, 13646, 14611, + }; + uint16_t ret[3 * 1 * 2 * 4] = { + 14583, 12849, 14184, 14611, 14201, 10199, 14281, 15009, 12516, 11629, 15235, 13769, + 14606, 13650, 14919, 14789, 9859, 15209, 14862, 15129, 14460, 13307, 13646, 14611, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_HWCN, FORMAT_CHWN, {1, 2, 3, 4}, {3, 1, 2, 4}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, hwcn_to_chwn2) { + uint16_t data[2 * 3 * 4 * 5] = { + 11800, 12670, 15017, 13966, 12622, 14634, 14363, 14746, 13889, 12607, 9805, 14984, 9119, 13275, 14114, + 15158, 14442, 15308, 12364, 14944, 14014, 15035, 14983, 14868, 13560, 15094, 14419, 13245, 13675, 14802, + 13906, 9343, 14854, 13488, 14350, 13566, 15225, 14936, 11090, 14666, 14580, 14419, 13987, 13333, 14821, + 15338, 15153, 12083, 12206, 13802, 14941, 14897, 15181, 14527, 14508, 14883, 14709, 8970, 15133, 13956, + 10066, 11934, 13896, 14886, 13739, 10711, 14594, 12352, 14841, 14405, 15351, 13700, 13904, 14990, 13186, + 14602, 14762, 14686, 12950, 15127, 14630, 13246, 9233, 13646, 14467, 12789, 13639, 12463, 11667, 14927, + 14596, 13614, 13617, 11596, 15260, 14454, 13549, 13174, 14261, 13739, 8588, 14189, 13479, 11268, 14322, + 14807, 15008, 13276, 12450, 12053, 13016, 14735, 13952, 13959, 13576, 14340, 14408, 13392, 14753, 13954, + }; + uint16_t ret[4 * 2 * 3 * 5] = { + 11800, 12670, 15017, 13966, 12622, 14014, 15035, 14983, 14868, 13560, 14580, 14419, 13987, 13333, 14821, + 10066, 11934, 13896, 14886, 13739, 14630, 13246, 9233, 13646, 14467, 8588, 14189, 13479, 11268, 14322, + 14634, 14363, 14746, 13889, 12607, 15094, 14419, 13245, 13675, 14802, 15338, 15153, 12083, 12206, 13802, + 10711, 14594, 12352, 14841, 14405, 12789, 13639, 12463, 11667, 14927, 14807, 15008, 13276, 12450, 12053, + 9805, 14984, 9119, 13275, 14114, 13906, 9343, 14854, 13488, 14350, 14941, 14897, 15181, 14527, 14508, + 15351, 13700, 13904, 14990, 13186, 14596, 13614, 13617, 11596, 15260, 13016, 14735, 13952, 13959, 13576, + 15158, 14442, 15308, 12364, 14944, 13566, 15225, 14936, 11090, 14666, 14883, 14709, 8970, 15133, 13956, + 14602, 14762, 14686, 12950, 15127, 14454, 13549, 13174, 14261, 13739, 14340, 14408, 13392, 14753, 13954, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_HWCN, FORMAT_CHWN, {2, 3, 4, 5}, {4, 2, 3, 5}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, chwn_to_hwcn1) { + uint16_t ret[1 * 2 * 3 * 4] = { + 14583, 12849, 14184, 14611, 12516, 11629, 15235, 13769, 9859, 15209, 14862, 15129, + 14201, 10199, 14281, 15009, 14606, 13650, 14919, 14789, 14460, 13307, 13646, 14611, + }; + uint16_t data[3 * 1 * 2 * 4] = { + 14583, 12849, 14184, 14611, 14201, 10199, 14281, 15009, 12516, 11629, 15235, 13769, + 14606, 13650, 14919, 14789, 9859, 15209, 14862, 15129, 14460, 13307, 13646, 14611, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_CHWN, FORMAT_HWCN, {3, 1, 2, 4}, {1, 2, 3, 4}, DT_FLOAT16}; + TransResult result; + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} + +TEST_F(UtestFormatTranspose, chwn_to_hwcn2) { + uint16_t ret[2 * 3 * 4 * 5] = { + 11800, 12670, 15017, 13966, 12622, 14634, 14363, 14746, 13889, 12607, 9805, 14984, 9119, 13275, 14114, + 15158, 14442, 15308, 12364, 14944, 14014, 15035, 14983, 14868, 13560, 15094, 14419, 13245, 13675, 14802, + 13906, 9343, 14854, 13488, 14350, 13566, 15225, 14936, 11090, 14666, 14580, 14419, 13987, 13333, 14821, + 15338, 15153, 12083, 12206, 13802, 14941, 14897, 15181, 14527, 14508, 14883, 14709, 8970, 15133, 13956, + 10066, 11934, 13896, 14886, 13739, 10711, 14594, 12352, 14841, 14405, 15351, 13700, 13904, 14990, 13186, + 14602, 14762, 14686, 12950, 15127, 14630, 13246, 9233, 13646, 14467, 12789, 13639, 12463, 11667, 14927, + 14596, 13614, 13617, 11596, 15260, 14454, 13549, 13174, 14261, 13739, 8588, 14189, 13479, 11268, 14322, + 14807, 15008, 13276, 12450, 12053, 13016, 14735, 13952, 13959, 13576, 14340, 14408, 13392, 14753, 13954, + }; + uint16_t data[4 * 2 * 3 * 5] = { + 11800, 12670, 15017, 13966, 12622, 14014, 15035, 14983, 14868, 13560, 14580, 14419, 13987, 13333, 14821, + 10066, 11934, 13896, 14886, 13739, 14630, 13246, 9233, 13646, 14467, 8588, 14189, 13479, 11268, 14322, + 14634, 14363, 14746, 13889, 12607, 15094, 14419, 13245, 13675, 14802, 15338, 15153, 12083, 12206, 13802, + 10711, 14594, 12352, 14841, 14405, 12789, 13639, 12463, 11667, 14927, 14807, 15008, 13276, 12450, 12053, + 9805, 14984, 9119, 13275, 14114, 13906, 9343, 14854, 13488, 14350, 14941, 14897, 15181, 14527, 14508, + 15351, 13700, 13904, 14990, 13186, 14596, 13614, 13617, 11596, 15260, 13016, 14735, 13952, 13959, 13576, + 15158, 14442, 15308, 12364, 14944, 13566, 15225, 14936, 11090, 14666, 14883, 14709, 8970, 15133, 13956, + 14602, 14762, 14686, 12950, 15127, 14454, 13549, 13174, 14261, 13739, 14340, 14408, 13392, 14753, 13954, + }; + + TransArgs args{reinterpret_cast(data), FORMAT_CHWN, FORMAT_HWCN, {4, 2, 3, 5}, {2, 3, 4, 5}, DT_FLOAT16}; + TransResult result; + + FormatTransferTranspose transfer; + EXPECT_EQ(transfer.TransFormat(args, result), SUCCESS); + EXPECT_EQ(result.length, sizeof(ret)); + for (size_t i = 0; i < sizeof(ret) / sizeof(ret[0]); ++i) { + EXPECT_EQ((reinterpret_cast(result.data.get()))[i], ret[i]); + } +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/format_transfer_unittest.cc b/tests/ut/ge/common/format_transfer_unittest.cc new file mode 100644 index 00000000..bf82a4b7 --- /dev/null +++ b/tests/ut/ge/common/format_transfer_unittest.cc @@ -0,0 +1,81 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h" + +#include "common/formats/format_transfers/format_transfer.h" +#include "common/formats/utils/formats_trans_utils.h" + +namespace ge { +namespace formats { + +class UtestFormatTransfer : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestFormatTransfer, build_transfer_success) { + uint8_t data[1 * 3 * 224 * 224 * 2]; + TransArgs args{data, FORMAT_NCHW, FORMAT_NC1HWC0, {1, 3, 224, 224}, {1, 1, 224, 224, 16}, DT_FLOAT16}; + auto transfer = BuildFormatTransfer(args); + EXPECT_NE(transfer, nullptr); +} + +TEST_F(UtestFormatTransfer, build_unsupported_transfer) { + uint8_t data[1 * 3 * 224 * 224 * 2]; + TransArgs args1{data, FORMAT_RESERVED, FORMAT_NCHW, {1, 1, 224, 224, 16}, {1, 3, 224, 224}, DT_FLOAT16}; + auto transfer1 = BuildFormatTransfer(args1); + EXPECT_EQ(transfer1, nullptr); + + TransArgs args2{data, FORMAT_NCHW, FORMAT_RESERVED, {1, 3, 224, 224}, {1, 1, 224, 224, 16}, DT_FLOAT16}; + auto transfer2 = BuildFormatTransfer(args2); + EXPECT_EQ(transfer2, nullptr); +} + +TEST_F(UtestFormatTransfer, get_size_by_data_type) { + EXPECT_EQ(GetSizeByDataType(DT_FLOAT), 4); + EXPECT_EQ(GetSizeByDataType(DT_FLOAT16), 2); + EXPECT_EQ(GetSizeByDataType(DT_INT8), 1); + EXPECT_EQ(GetSizeByDataType(DT_INT16), 2); + EXPECT_EQ(GetSizeByDataType(DT_UINT16), 2); + EXPECT_EQ(GetSizeByDataType(DT_UINT8), 1); + EXPECT_EQ(GetSizeByDataType(DT_INT32), 4); + EXPECT_EQ(GetSizeByDataType(DT_INT64), 8); + EXPECT_EQ(GetSizeByDataType(DT_UINT32), 4); + EXPECT_EQ(GetSizeByDataType(DT_UINT64), 8); + EXPECT_EQ(GetSizeByDataType(DT_BOOL), 1); + EXPECT_EQ(GetSizeByDataType(DT_DOUBLE), 8); + EXPECT_EQ(GetSizeByDataType(DT_STRING), -1); + EXPECT_EQ(GetSizeByDataType(DT_DUAL_SUB_INT8), 1); + EXPECT_EQ(GetSizeByDataType(DT_DUAL_SUB_UINT8), 1); + EXPECT_EQ(GetSizeByDataType(DT_COMPLEX64), 8); + EXPECT_EQ(GetSizeByDataType(DT_COMPLEX128), 16); + EXPECT_EQ(GetSizeByDataType(DT_QINT8), 1); + EXPECT_EQ(GetSizeByDataType(DT_QINT16), 2); + EXPECT_EQ(GetSizeByDataType(DT_QINT32), 4); + EXPECT_EQ(GetSizeByDataType(DT_QUINT8), 1); + EXPECT_EQ(GetSizeByDataType(DT_QUINT16), 2); + EXPECT_EQ(GetSizeByDataType(DT_RESOURCE), -1); + EXPECT_EQ(GetSizeByDataType(DT_STRING_REF), -1); + EXPECT_EQ(GetSizeByDataType(DT_DUAL), 5); + EXPECT_EQ(GetSizeByDataType(DT_UNDEFINED), -1); + EXPECT_EQ(DT_UNDEFINED, 26); +} +} // namespace formats +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/common/ge_format_util_unittest.cc b/tests/ut/ge/common/ge_format_util_unittest.cc new file mode 100644 index 00000000..1f066aa0 --- /dev/null +++ b/tests/ut/ge/common/ge_format_util_unittest.cc @@ -0,0 +1,39 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/ge_format_util.h" + +#include "common/formats/formats.h" + +namespace ge { +TEST(UtestGeFormatUtilTest, test_trans_shape_failure) { + Shape shape({1, 3, 224, 224}); + TensorDesc src_desc(shape, FORMAT_ND, DT_FLOAT16); + std::vector dst_shape; + EXPECT_NE(GeFormatUtil::TransShape(src_desc, FORMAT_RESERVED, dst_shape), SUCCESS); +} + +TEST(UtestGeFormatUtilTest, test_trans_shape_success) { + Shape shape({1, 3, 224, 224}); + TensorDesc src_desc(shape, FORMAT_NCHW, DT_FLOAT16); + std::vector dst_shape; + std::vector expected_shape{1, 1, 224, 224, 16}; + EXPECT_EQ(GeFormatUtil::TransShape(src_desc, FORMAT_NC1HWC0, dst_shape), SUCCESS); + EXPECT_EQ(dst_shape, expected_shape); +} +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/engine_manager/src.json b/tests/ut/ge/engine_manager/src.json new file mode 100755 index 00000000..e35f5d45 --- /dev/null +++ b/tests/ut/ge/engine_manager/src.json @@ -0,0 +1,20 @@ +{ + "schedule_units" : [ { + "id" : "TS_1", + "name" : "1980_hwts", + "ex_attrs" : "", + "cal_engines" : [ + { + "id" : "DNN_VM_GE_LOCAL", + "name" : "GE_LOCAL", + "independent" : false, + "attch" : false, + "skip_assign_stream" : true + }, + {"id" : "DNN_V100", "name" : "AICORE", "independent" : false, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_AICPU", "name" : "AICPU", "independent" : false, "attch" : true, "skip_assign_stream" : false}, + {"id" : "DNN_HCCL", "name" : "HCCL", "independent" : true, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_RTS", "name" : "RTS", "independent" : false, "attch" : true, "skip_assign_stream" : false} + ] + } ] +} diff --git a/tests/ut/ge/engine_manager/test1.json b/tests/ut/ge/engine_manager/test1.json new file mode 100755 index 00000000..2745f222 --- /dev/null +++ b/tests/ut/ge/engine_manager/test1.json @@ -0,0 +1,20 @@ +{ + "schedule" : [ { + "id" : "TS_1", + "name" : "1980_hwts", + "ex_attrs" : "", + "cal_engines" : [ + { + "id" : "DNN_VM_GE_LOCAL", + "name" : "GE_LOCAL", + "independent" : false, + "attch" : false, + "skip_assign_stream" : true + }, + {"id" : "DNN_V100", "name" : "AICORE", "independent" : false, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_AICPU", "name" : "AICPU", "independent" : false, "attch" : true, "skip_assign_stream" : false}, + {"id" : "DNN_HCCL", "name" : "HCCL", "independent" : true, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_RTS", "name" : "RTS", "independent" : false, "attch" : true, "skip_assign_stream" : false} + ] + } ] +} diff --git a/tests/ut/ge/engine_manager/test2.json b/tests/ut/ge/engine_manager/test2.json new file mode 100755 index 00000000..bc0c69a1 --- /dev/null +++ b/tests/ut/ge/engine_manager/test2.json @@ -0,0 +1,20 @@ +{ + "schedule_units" : [ { + "id" : "TS_1", + "name" : "1980_hwts", + "ex_attrs" : "", + "engines" : [ + { + "id" : "DNN_VM_GE_LOCAL", + "name" : "GE_LOCAL", + "independent" : false, + "attch" : false, + "skip_assign_stream" : true + }, + {"id" : "DNN_V100", "name" : "AICORE", "independent" : false, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_AICPU", "name" : "AICPU", "independent" : false, "attch" : true, "skip_assign_stream" : false}, + {"id" : "DNN_HCCL", "name" : "HCCL", "independent" : true, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_RTS", "name" : "RTS", "independent" : false, "attch" : true, "skip_assign_stream" : false} + ] + } ] +} diff --git a/tests/ut/ge/engine_manager/test3.json b/tests/ut/ge/engine_manager/test3.json new file mode 100755 index 00000000..407b26a1 --- /dev/null +++ b/tests/ut/ge/engine_manager/test3.json @@ -0,0 +1,20 @@ +{ + "schedule_units" : [ { + "id" : "", + "name" : "1980_hwts", + "ex_attrs" : "", + "cal_engines" : [ + { + "id" : "DNN_VM_GE_LOCAL", + "name" : "GE_LOCAL", + "independent" : false, + "attch" : false, + "skip_assign_stream" : true + }, + {"id" : "DNN_V100", "name" : "AICORE", "independent" : false, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_AICPU", "name" : "AICPU", "independent" : false, "attch" : true, "skip_assign_stream" : false}, + {"id" : "DNN_HCCL", "name" : "HCCL", "independent" : true, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_RTS", "name" : "RTS", "independent" : false, "attch" : true, "skip_assign_stream" : false} + ] + } ] +} diff --git a/tests/ut/ge/engine_manager/test4.json b/tests/ut/ge/engine_manager/test4.json new file mode 100755 index 00000000..0ed9c4a2 --- /dev/null +++ b/tests/ut/ge/engine_manager/test4.json @@ -0,0 +1,34 @@ +{ + "schedule_units" : [ + { + "id" : "TS_1", + "name" : "1980_hwts", + "ex_attrs" : "", + "cal_engines" : [ + { + "id" : "DNN_VM_GE_LOCAL", + "name" : "GE_LOCAL", + "independent" : false, + "attch" : false, + "skip_assign_stream" : true + }, + {"id" : "DNN_V100", "name" : "AICORE", "independent" : false, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_AICPU", "name" : "AICPU", "independent" : false, "attch" : true, "skip_assign_stream" : false}, + {"id" : "DNN_HCCL", "name" : "HCCL", "independent" : true, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_RTS", "name" : "RTS", "independent" : false, "attch" : true, "skip_assign_stream" : false} + ] + }, + { + "id" : "TS_2", + "name" : "1980_hwts", + "ex_attrs" : "", + "cal_engines" : [ { + "id" : "DNN_VM_GE_LOCAL", + "name" : "GE_LOCAL", + "independent" : false, + "attch" : false, + "skip_assign_stream" : true + } ] + } + ] +} diff --git a/tests/ut/ge/engine_manager/test5.json b/tests/ut/ge/engine_manager/test5.json new file mode 100755 index 00000000..ee74eaf6 --- /dev/null +++ b/tests/ut/ge/engine_manager/test5.json @@ -0,0 +1,27 @@ +{ + "schedule_units" : [ { + "id" : "TS_1", + "name" : "1980_hwts", + "ex_attrs" : "", + "cal_engines" : [ + { + "id" : "DNN_VM_GE_LOCAL", + "name" : "GE_LOCAL", + "independent" : false, + "attch" : false, + "skip_assign_stream" : true + }, + { + "id" : "DNN_VM_GE_LOCAL", + "name" : "GE_LOCAL", + "independent" : false, + "attch" : false, + "skip_assign_stream" : true + }, + {"id" : "DNN_V100", "name" : "AICORE", "independent" : false, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_AICPU", "name" : "AICPU", "independent" : false, "attch" : true, "skip_assign_stream" : false}, + {"id" : "DNN_HCCL", "name" : "HCCL", "independent" : true, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_RTS", "name" : "RTS", "independent" : false, "attch" : true, "skip_assign_stream" : false} + ] + } ] +} diff --git a/tests/ut/ge/engine_manager/test6.json b/tests/ut/ge/engine_manager/test6.json new file mode 100755 index 00000000..a28b0db3 --- /dev/null +++ b/tests/ut/ge/engine_manager/test6.json @@ -0,0 +1,20 @@ +{ + "schedule_units" : [ { + "id" : "TS_1", + "name" : "1980_hwts", + "ex_attrs" : "", + "cal_engines" : [ + { + "id" : "DNN_VM_GE_LOCAL", + "name" : "GE_LOCAL", + "independent" : false, + "attch" : false, + "skip_assign_stream" : true + }, + {"id" : "DNN_V100", "name" : "AICORE", "independent" : false, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_AICPU", "independent" : false, "attch" : true, "skip_assign_stream" : false}, + {"id" : "DNN_HCCL", "name" : "HCCL", "independent" : true, "attch" : false, "skip_assign_stream" : false}, + {"id" : "DNN_VM_RTS", "name" : "RTS", "independent" : false, "attch" : true, "skip_assign_stream" : false} + ] + } ] +} diff --git a/tests/ut/ge/gen_node.h b/tests/ut/ge/gen_node.h new file mode 100644 index 00000000..628ae55f --- /dev/null +++ b/tests/ut/ge/gen_node.h @@ -0,0 +1,57 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef UT_GE_Gen_Node_H_ +#define UT_GE_Gen_Node_H_ + +#include + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/compute_graph.h" +#include "graph/op_desc.h" +#include "graph/optimize/common/params.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "register/op_registry.h" + +static ge::NodePtr GenNodeFromOpDesc(ge::OpDescPtr op_desc); + +static ge::NodePtr GenNodeFromOpDesc(ge::OpDescPtr op_desc) { + if (!op_desc) { + return nullptr; + } + static auto g = std::make_shared("g"); + return g->AddNode(std::move(op_desc)); +} + +static void AddInputDesc(ge::OpDescPtr op_desc, int num) { + for (int i = 0; i < num; ++i) { + ge::GeTensorDesc tensor; + tensor.SetFormat(ge::FORMAT_NCHW); + tensor.SetShape(ge::GeShape({1, 1, 1, 1})); + tensor.SetDataType(ge::DT_FLOAT); + ge::TensorUtils::SetRealDimCnt(tensor, 4); + op_desc->AddInputDesc(tensor); + } +} + +#endif // UT_GE_Gen_Node_H_ diff --git a/tests/ut/ge/graph/build/logical_stream_allocator_unittest.cc b/tests/ut/ge/graph/build/logical_stream_allocator_unittest.cc new file mode 100644 index 00000000..823b2c60 --- /dev/null +++ b/tests/ut/ge/graph/build/logical_stream_allocator_unittest.cc @@ -0,0 +1,869 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#define protected public +#define private public +#include "graph/manager/graph_manager_utils.h" +#undef protected +#undef private + +#include "graph/build/logical_stream_allocator.h" + +#include "common/types.h" +#include "common/util.h" + +#include "graph/compute_graph.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" + +using namespace std; + +namespace ge { +class UtestLogicalStreamAllocator : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + static SubGraphInfoPtr BuildSubGraph(ComputeGraphPtr compute_graph, const string &engine_name, + const string &stream_label = "") { + SubGraphInfoPtr subgraph = make_shared(); + subgraph->SetSubGraph(compute_graph); + subgraph->SetEngineName(engine_name); + subgraph->SetStreamLabel(stream_label); + return subgraph; + } + + NodePtr AddPlaceHolder(ComputeGraphPtr compute_graph, const string &name) { + OpDescPtr op_desc = std::make_shared(name, "PlaceHolder"); + op_desc->AddInputDesc(GeTensorDesc()); + op_desc->AddOutputDesc(GeTensorDesc()); + NodePtr node = compute_graph->AddNode(op_desc); + node->SetOwnerComputeGraph(compute_graph); + return node; + } + + NodePtr AddEnd(ComputeGraphPtr compute_graph, const string &name) { + OpDescPtr op_desc = std::make_shared(name, "End"); + op_desc->AddInputDesc(GeTensorDesc()); + op_desc->AddOutputDesc(GeTensorDesc()); + NodePtr node = compute_graph->AddNode(op_desc); + node->SetOwnerComputeGraph(compute_graph); + return node; + } + + void AddPlaceHolderAndEnd(SubGraphInfoPtr subgraph, int in_num, int out_num) { + ComputeGraphPtr compute_graph = subgraph->GetSubGraph(); + + std::unordered_map pld_2_end_map; + if (in_num == 1) { + NodePtr node = AddPlaceHolder(compute_graph, "placeholder"); + pld_2_end_map.emplace(node, nullptr); + } else { + for (int i = 0; i < in_num; i++) { + NodePtr node = AddPlaceHolder(compute_graph, "placeholder" + to_string(i + 1)); + pld_2_end_map.emplace(node, nullptr); + } + } + subgraph->SetPld2EndMap(pld_2_end_map); + + std::unordered_map end_2_pld_map; + if (out_num == 1) { + NodePtr node = AddEnd(compute_graph, "end"); + end_2_pld_map.emplace(node, nullptr); + } else { + for (int i = 0; i < out_num; i++) { + NodePtr node = AddEnd(compute_graph, "end" + to_string(i + 1)); + end_2_pld_map.emplace(node, nullptr); + } + } + + subgraph->SetEnd2PldMap(end_2_pld_map); + } + + SubGraphInfoPtr CreateDataSubgraph(const string &name = "data") { + ComputeGraphPtr compute_graph = make_shared(name); + OpDescPtr op_desc = std::make_shared("data", "Data"); + op_desc->AddOutputDesc(GeTensorDesc()); + compute_graph->AddNode(op_desc); + + SubGraphInfoPtr subgraph = BuildSubGraph(compute_graph, "ge_local", ""); + AddPlaceHolderAndEnd(subgraph, 0, 1); + return subgraph; + } + + SubGraphInfoPtr CreateConstSubgraph(const string &name = "const") { + ComputeGraphPtr compute_graph = make_shared(name); + OpDescPtr op_desc = std::make_shared("constant", "Constant"); + op_desc->AddOutputDesc(GeTensorDesc()); + compute_graph->AddNode(op_desc); + + SubGraphInfoPtr subgraph = BuildSubGraph(compute_graph, "ge_local", ""); + AddPlaceHolderAndEnd(subgraph, 0, 1); + return subgraph; + } + + SubGraphInfoPtr CreateSubgraphWithNodeName(const string &graph_name, const string &node_name, const string &engine, + const string &stream_label = "", int in_num = 1, int out_num = 1) { + ComputeGraphPtr compute_graph = make_shared(graph_name); + OpDescPtr op_desc = std::make_shared(node_name, "Relu"); + op_desc->AddInputDesc(GeTensorDesc()); + op_desc->AddOutputDesc(GeTensorDesc()); + compute_graph->AddNode(op_desc); + + SubGraphInfoPtr subgraph = BuildSubGraph(compute_graph, engine, stream_label); + AddPlaceHolderAndEnd(subgraph, in_num, out_num); + + return subgraph; + } + + SubGraphInfoPtr CreateSubgraphWithName(const string &name, const string &engine, const string &stream_label = "", + int in_num = 1, int out_num = 1) { + ComputeGraphPtr compute_graph = make_shared(name); + OpDescPtr op_desc = std::make_shared("relu", "Relu"); + op_desc->AddInputDesc(GeTensorDesc()); + op_desc->AddOutputDesc(GeTensorDesc()); + compute_graph->AddNode(op_desc); + + SubGraphInfoPtr subgraph = BuildSubGraph(compute_graph, engine, stream_label); + AddPlaceHolderAndEnd(subgraph, in_num, out_num); + + return subgraph; + } + + SubGraphInfoPtr CreateSubgraph(const string &engine, const string &stream_label = "", int in_num = 1, + int out_num = 1) { + return CreateSubgraphWithName("graph", engine, stream_label, in_num, out_num); + } + + void LinkSubGraph(SubGraphInfoPtr subgraph1, const string &end_name, SubGraphInfoPtr subgraph2, + const string &placeholder_name) { + NodePtr end_node = subgraph1->GetSubGraph()->FindNode(end_name); + assert(end_node != nullptr); + + NodePtr placeholder_node = subgraph2->GetSubGraph()->FindNode(placeholder_name); + assert(placeholder_node != nullptr); + + NodePtr const_node = subgraph1->GetSubGraph()->FindNode("constant"); + if (const_node != nullptr) { + AttrUtils::SetStr(placeholder_node->GetOpDesc(), "parentOpType", "Constant"); + } else { + AttrUtils::SetStr(placeholder_node->GetOpDesc(), "parentOpType", "xxx"); + } + + subgraph1->end_to_pld_[end_node] = placeholder_node; + subgraph2->pld_to_end_[placeholder_node] = end_node; + } + + int64_t GetStream(SubGraphInfoPtr subgraph) { + int64_t stream_id = kInvalidStream; + ComputeGraphPtr compute_graph = subgraph->GetSubGraph(); + for (NodePtr node : compute_graph->GetDirectNode()) { + if (stream_id == kInvalidStream) { + stream_id = node->GetOpDesc()->GetStreamId(); + } else { + assert(stream_id == node->GetOpDesc()->GetStreamId()); + } + } + + return stream_id; + } + + bool ExpectStreamEq(SubGraphInfoPtr subgraph, int64_t expect) { return GetStream(subgraph) == expect; } + + bool ExpectStreamNe(SubGraphInfoPtr subgraph, int64_t expect) { return GetStream(subgraph) != expect; } + Status AssignLogicalStreams(vector subgraphs, vector &confs, + std::map &max_parallel_num, ComputeGraphPtr &whole_graph) { + SchedulerConf scheduler_conf; + if (confs.empty()) { + for (const auto &subgraph : subgraphs) { + EngineConfPtr conf = make_shared(); + conf->id = subgraph->GetEngineName(); + if (conf->id == "ge_local") { + conf->skip_assign_stream = true; + conf->attach = true; + } + scheduler_conf.cal_engines[conf->id] = conf; + } + } else { + for (auto &conf : confs) { + scheduler_conf.cal_engines[conf->id] = conf; + } + } + + for (const auto &item : scheduler_conf.cal_engines) { + EngineConfPtr conf = item.second; + conf->scheduler_id = "scheduler"; + } + + map scheduler_confs; + scheduler_confs["scheduler"] = scheduler_conf; + LogicalStreamAllocator allocator(scheduler_confs, max_parallel_num); + int64_t stream_num = 0; + return allocator.Assign(whole_graph, subgraphs, stream_num); + } + + Status AssignLogicalStreams(vector subgraphs, std::map &max_parallel_num, + vector &confs) { + ComputeGraphPtr whole_graph = make_shared("whole_graph"); + return AssignLogicalStreams(subgraphs, confs, max_parallel_num, whole_graph); + } + + Status AssignLogicalStreams(vector subgraphs, + vector confs = vector()) { + std::map max_parallel_num; + return AssignLogicalStreams(subgraphs, max_parallel_num, confs); + } + + Status AssignLogicalStreams(vector subgraphs, std::map &max_parallel_num) { + vector confs; + return AssignLogicalStreams(subgraphs, max_parallel_num, confs); + } + + /// typical case + /// Subgraph3_1 Subgraph3_2 + /// (GenMask1,cpu) (GenMask2,cpu) + /// | | + /// Subgraph1 -> Subgraph2 -> Subgraph8 -> Subgraph10 + /// (GetNext,cpu) (DoMask,core) (AllReduce1,hccl) (Apply1,core) + /// | | + /// Subgraph4 -> Subgraph5 -> Subgraph6 -> Subgraph7 -> Subgraph9 + /// (cpu) (core) (core) (AllReduce2,hccl) (Apply2,core) + void TestAll(int parallel_num) { + auto const1 = CreateConstSubgraph(); + auto const2 = CreateConstSubgraph(); + auto get_next = CreateSubgraphWithName("get_next", "aicpu", "get_next", 0, 1); + auto genmask1 = CreateSubgraphWithName("genmask1", "aicpu", "", 1, 1); + auto genmask2 = CreateSubgraphWithName("genmask2", "aicpu", "", 1, 1); + auto domask = CreateSubgraphWithName("domask", "aicore", "", 3, 2); + auto subgraph4 = CreateSubgraphWithName("subgraph4", "aicpu", "", 1, 1); + auto subgraph5 = CreateSubgraphWithName("subgraph5", "aicore", "", 1, 1); + auto subgraph6 = CreateSubgraphWithName("subgraph6", "aicore", "", 1, 1); + auto allreduce1 = CreateSubgraphWithName("allreduce1", "hccl", "", 1, 2); + auto allreduce2 = CreateSubgraphWithName("allreduce2", "hccl", "", 2, 1); + auto apply1 = CreateSubgraphWithName("apply1", "aicore", "", 1, 1); + auto apply2 = CreateSubgraphWithName("apply2", "aicore", "", 1, 1); + + LinkSubGraph(const1, "end", genmask1, "placeholder"); + LinkSubGraph(const2, "end", genmask2, "placeholder"); + LinkSubGraph(get_next, "end", domask, "placeholder1"); + LinkSubGraph(genmask1, "end", domask, "placeholder2"); + LinkSubGraph(genmask2, "end", domask, "placeholder3"); + LinkSubGraph(domask, "end1", subgraph4, "placeholder"); + LinkSubGraph(domask, "end2", allreduce1, "placeholder"); + LinkSubGraph(subgraph4, "end", subgraph5, "placeholder"); + LinkSubGraph(subgraph5, "end", subgraph6, "placeholder"); + LinkSubGraph(subgraph6, "end", allreduce2, "placeholder1"); + LinkSubGraph(allreduce1, "end1", allreduce2, "placeholder2"); + LinkSubGraph(allreduce1, "end2", apply1, "placeholder"); + LinkSubGraph(allreduce2, "end", apply2, "placeholder"); + + EngineConfPtr conf1 = make_shared(); + conf1->id = "ge_local"; + conf1->skip_assign_stream = true; + conf1->attach = true; + EngineConfPtr conf2 = make_shared(); + conf2->id = "aicore"; + EngineConfPtr conf3 = make_shared(); + conf3->id = "aicpu"; + conf3->attach = true; + EngineConfPtr conf4 = make_shared(); + conf4->id = "hccl"; + conf4->independent = true; + vector confs = {conf1, conf2, conf3, conf4}; + + std::map max_parallel_num; + max_parallel_num["aicore"] = parallel_num; + max_parallel_num["aicpu"] = parallel_num; + + Status status = AssignLogicalStreams({const1, const2, get_next, genmask1, genmask2, domask, subgraph4, subgraph5, + subgraph6, allreduce1, allreduce2, apply1, apply2}, + max_parallel_num, confs); + EXPECT_EQ(status, ge::SUCCESS); + + EXPECT_EQ(GetStream(get_next), 0); + EXPECT_EQ(GetStream(allreduce1), 1); + EXPECT_EQ(GetStream(allreduce2), 1); + + EXPECT_EQ(GetStream(subgraph4), GetStream(subgraph5)); + EXPECT_EQ(GetStream(subgraph5), GetStream(subgraph6)); + + EXPECT_NE(GetStream(get_next), GetStream(subgraph4)); + EXPECT_NE(GetStream(genmask2), GetStream(subgraph4)); + + if (parallel_num == 1) { + EXPECT_EQ(GetStream(apply1), GetStream(apply2)); + } else { + EXPECT_NE(GetStream(apply1), GetStream(apply2)); + } + } + + /// Set one graph: + /// stream id: 1 1 1 + /// B --> C(AllReduce) --- D + /// / + /// stream id: 0 A + /// \ + /// E --> F(AllReduce) --- G + /// stream id: 2 2 2 + /// + void make_graph_with_allreduce(ge::ComputeGraphPtr graph) { + ge::OpDescPtr op_a = make_shared("A", DATA); + auto desc_temp_ptr = make_shared(); + auto desc_temp = *desc_temp_ptr; + op_a->AddInputDesc(desc_temp); + op_a->AddOutputDesc(desc_temp); + + ge::OpDescPtr op_b = make_shared("B", "testa"); + op_b->AddInputDesc(desc_temp); + op_b->AddOutputDesc(desc_temp); + + ge::OpDescPtr op_c = make_shared("C", "HcomAllReduce"); + op_c->AddInputDesc(desc_temp); + op_c->AddOutputDesc(desc_temp); + + ge::OpDescPtr op_d = make_shared("D", "testa"); + op_d->AddInputDesc(desc_temp); + op_d->AddOutputDesc(desc_temp); + + ge::OpDescPtr op_e = make_shared("E", "testa"); + op_e->AddInputDesc(desc_temp); + op_e->AddOutputDesc(desc_temp); + + ge::OpDescPtr op_f = make_shared("F", "HcomAllReduce"); + op_f->AddInputDesc(desc_temp); + op_f->AddOutputDesc(desc_temp); + + ge::OpDescPtr op_g = make_shared("G", "testa"); + op_g->AddInputDesc(desc_temp); + op_g->AddOutputDesc(desc_temp); + + // add node + ge::NodePtr node_a = graph->AddNode(op_a); + ge::NodePtr node_b = graph->AddNode(op_b); + ge::NodePtr node_c = graph->AddNode(op_c); + ge::NodePtr node_d = graph->AddNode(op_d); + ge::NodePtr node_e = graph->AddNode(op_e); + ge::NodePtr node_f = graph->AddNode(op_f); + ge::NodePtr node_g = graph->AddNode(op_g); + + // add edge + ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_b->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_e->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_b->GetOutDataAnchor(0), node_c->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_c->GetOutDataAnchor(0), node_d->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_e->GetOutDataAnchor(0), node_f->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_f->GetOutDataAnchor(0), node_g->GetInDataAnchor(0)); + + // add stream id + node_a->GetOpDesc()->SetStreamId(0); + node_b->GetOpDesc()->SetStreamId(1); + node_c->GetOpDesc()->SetStreamId(1); + node_d->GetOpDesc()->SetStreamId(1); + node_e->GetOpDesc()->SetStreamId(2); + node_f->GetOpDesc()->SetStreamId(2); + node_g->GetOpDesc()->SetStreamId(2); + } +}; + +// case of single subgraph (without streamlabel) +TEST_F(UtestLogicalStreamAllocator, test_single_subgraph) { + SubGraphInfoPtr subgraph = CreateSubgraph("engine1", ""); + Status status = AssignLogicalStreams({subgraph}); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph), 0); +} + +// case of single subgraph (with streamlabel) +TEST_F(UtestLogicalStreamAllocator, test_single_subgraph_with_label) { + SubGraphInfoPtr subgraph = CreateSubgraph("engine1", "label1"); + Status status = AssignLogicalStreams({subgraph}); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph), 0); +} + +// if the subgraphs are with same engine, then reues stream +TEST_F(UtestLogicalStreamAllocator, test_same_engine) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraphWithName("graph1", "engine1", ""); + SubGraphInfoPtr subgraph2 = CreateSubgraphWithName("graph2", "engine1", ""); + SubGraphInfoPtr subgraph3 = CreateSubgraphWithName("graph3", "engine1", ""); + SubGraphInfoPtr subgraph4 = CreateSubgraphWithName("graph4", "engine1", ""); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end", subgraph2, "placeholder"); + LinkSubGraph(subgraph2, "end", subgraph3, "placeholder"); + LinkSubGraph(subgraph3, "end", subgraph4, "placeholder"); + + std::map max_parallel_num; + max_parallel_num["engine1"] = 100; + + Status status = AssignLogicalStreams({subgraph1, subgraph2, subgraph3, subgraph4}, max_parallel_num); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(GetStream(subgraph2), 0); + EXPECT_EQ(GetStream(subgraph3), 0); + EXPECT_EQ(GetStream(subgraph4), 0); +} + +// if the subgraphs are with different engine and different control uint, then unreues stream +TEST_F(UtestLogicalStreamAllocator, test_diff_engine) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraph("engine1", ""); + SubGraphInfoPtr subgraph2 = CreateSubgraph("engine2", ""); + SubGraphInfoPtr subgraph3 = CreateSubgraph("engine3", ""); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end", subgraph2, "placeholder"); + LinkSubGraph(subgraph2, "end", subgraph3, "placeholder"); + + Status status = AssignLogicalStreams({subgraph1, subgraph2, subgraph3}); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(GetStream(subgraph2), 1); + EXPECT_EQ(GetStream(subgraph3), 2); +} + +// if the subgraphs are with different engine and same control uint, then reues stream +TEST_F(UtestLogicalStreamAllocator, test_engine_attach) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraphWithName("graph1", "engine1", ""); + SubGraphInfoPtr subgraph2 = CreateSubgraphWithName("graph2", "engine2", ""); + SubGraphInfoPtr subgraph3 = CreateSubgraphWithName("graph3", "engine3", ""); + SubGraphInfoPtr subgraph4 = CreateSubgraphWithName("graph4", "engine4", ""); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end", subgraph2, "placeholder"); + LinkSubGraph(subgraph2, "end", subgraph3, "placeholder"); + LinkSubGraph(subgraph3, "end", subgraph4, "placeholder"); + + EngineConfPtr conf1 = make_shared(); + conf1->id = subgraph1->GetEngineName(); + EngineConfPtr conf2 = make_shared(); + conf2->id = subgraph2->GetEngineName(); + conf2->attach = true; + EngineConfPtr conf3 = make_shared(); + conf3->id = subgraph3->GetEngineName(); + conf3->attach = true; + EngineConfPtr conf4 = make_shared(); + conf4->id = subgraph4->GetEngineName(); + + Status status = AssignLogicalStreams({subgraph1, subgraph2, subgraph3, subgraph4}, {conf1, conf2, conf3, conf4}); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(GetStream(subgraph2), 0); + EXPECT_EQ(GetStream(subgraph3), 0); + EXPECT_EQ(GetStream(subgraph4), 1); +} + +// if the param of engine skip_assign_stream is true, unset stream, stream id is 0 +TEST_F(UtestLogicalStreamAllocator, test_skip_assign_stream) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraph("engine1", ""); + SubGraphInfoPtr subgraph2 = CreateSubgraph("engine2", ""); + SubGraphInfoPtr subgraph3 = CreateSubgraph("engine3", ""); + SubGraphInfoPtr subgraph4 = CreateSubgraph("engine4", ""); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end", subgraph2, "placeholder"); + LinkSubGraph(subgraph2, "end", subgraph3, "placeholder"); + LinkSubGraph(subgraph3, "end", subgraph4, "placeholder"); + + EngineConfPtr conf1 = make_shared(); + conf1->id = subgraph1->GetEngineName(); + EngineConfPtr conf2 = make_shared(); + conf2->id = subgraph2->GetEngineName(); + EngineConfPtr conf3 = make_shared(); + conf3->id = subgraph3->GetEngineName(); + conf3->skip_assign_stream = true; + conf3->attach = true; + EngineConfPtr conf4 = make_shared(); + conf4->id = subgraph4->GetEngineName(); + + Status status = AssignLogicalStreams({subgraph1, subgraph2, subgraph3, subgraph4}, {conf1, conf2, conf3, conf4}); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(GetStream(subgraph2), 1); + EXPECT_EQ(GetStream(subgraph4), 2); +} + +// if stream id of same label is different, then different label with different stream id +TEST_F(UtestLogicalStreamAllocator, test_stream_label) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraph("engine1", "label1"); + SubGraphInfoPtr subgraph2 = CreateSubgraph("engine2", "label2"); + SubGraphInfoPtr subgraph3 = CreateSubgraph("engine3", "label1"); + SubGraphInfoPtr subgraph4 = CreateSubgraph("engine4", "label2"); + SubGraphInfoPtr subgraph5 = CreateSubgraph("engine5", "label2"); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end", subgraph2, "placeholder"); + LinkSubGraph(subgraph2, "end", subgraph3, "placeholder"); + LinkSubGraph(subgraph3, "end", subgraph4, "placeholder"); + LinkSubGraph(subgraph4, "end", subgraph5, "placeholder"); + + Status status = AssignLogicalStreams({subgraph1, subgraph2, subgraph3, subgraph4, subgraph5}); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(GetStream(subgraph2), 1); + EXPECT_EQ(GetStream(subgraph3), 0); + EXPECT_EQ(GetStream(subgraph4), 1); + EXPECT_EQ(GetStream(subgraph5), 1); +} + +TEST_F(UtestLogicalStreamAllocator, test_label_not_reusable) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraphWithName("graph1", "engine1", "label1"); + SubGraphInfoPtr subgraph2 = CreateSubgraphWithName("graph2", "engine1", "label1"); + SubGraphInfoPtr subgraph3 = CreateSubgraphWithName("graph3", "engine1", ""); + SubGraphInfoPtr subgraph4 = CreateSubgraphWithName("graph4", "engine1", ""); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end", subgraph2, "placeholder"); + LinkSubGraph(subgraph2, "end", subgraph3, "placeholder"); + LinkSubGraph(subgraph3, "end", subgraph4, "placeholder"); + + Status status = AssignLogicalStreams({subgraph1, subgraph2, subgraph3, subgraph4}); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(GetStream(subgraph2), 0); + EXPECT_EQ(GetStream(subgraph3), 1); + EXPECT_EQ(GetStream(subgraph4), 1); +} +/// data +/// | | +/// sub1 sub2 +/// \ / +/// sub3 +TEST_F(UtestLogicalStreamAllocator, test_label_not_reusable2) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraphWithName("graph1", "engine1", "label1"); + SubGraphInfoPtr subgraph2 = CreateSubgraphWithName("graph2", "engine1", "label2"); + SubGraphInfoPtr subgraph3 = CreateSubgraphWithName("graph3", "engine2", "", 2, 1); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(data, "end", subgraph2, "placeholder"); + LinkSubGraph(subgraph1, "end", subgraph3, "placeholder1"); + LinkSubGraph(subgraph2, "end", subgraph3, "placeholder2"); + + EngineConfPtr conf1 = make_shared(); + conf1->id = subgraph1->GetEngineName(); + EngineConfPtr conf2 = make_shared(); + conf2->id = subgraph3->GetEngineName(); + conf2->attach = true; + Status status = AssignLogicalStreams({subgraph1, subgraph2, subgraph3}, {conf1, conf2}); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(GetStream(subgraph2), 1); + EXPECT_EQ(GetStream(subgraph3), 2); +} + +/// case of multi-output, then unuse stream +/// sub1 +/// / | \ +/// sub2 sub3 sub4 +TEST_F(UtestLogicalStreamAllocator, test_multiOut_new_stream) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraph("engine1", "", 1, 3); + SubGraphInfoPtr subgraph2 = CreateSubgraph("engine1"); + SubGraphInfoPtr subgraph3 = CreateSubgraph("engine1"); + SubGraphInfoPtr subgraph4 = CreateSubgraph("engine1"); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end1", subgraph2, "placeholder"); + LinkSubGraph(subgraph1, "end2", subgraph3, "placeholder"); + LinkSubGraph(subgraph1, "end3", subgraph4, "placeholder"); + + std::map max_parallel_num; + max_parallel_num["engine1"] = 100; + Status status = AssignLogicalStreams({subgraph1, subgraph2, subgraph3, subgraph4}, max_parallel_num); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(GetStream(subgraph2), 1); + EXPECT_EQ(GetStream(subgraph3), 2); + EXPECT_EQ(GetStream(subgraph4), 3); +} + +/// if paralle id 1, then use stream +/// sub1 +/// / | | \ +/// sub2 sub3 sub4 sub5 +TEST_F(UtestLogicalStreamAllocator, test_parallel_one) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraph("engine1", "", 1, 4); + SubGraphInfoPtr subgraph2 = CreateSubgraph("engine1"); + SubGraphInfoPtr subgraph3 = CreateSubgraph("engine2"); + SubGraphInfoPtr subgraph4 = CreateSubgraph("engine1"); + SubGraphInfoPtr subgraph5 = CreateSubgraph("engine2"); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end1", subgraph2, "placeholder"); + LinkSubGraph(subgraph1, "end2", subgraph3, "placeholder"); + LinkSubGraph(subgraph1, "end3", subgraph4, "placeholder"); + LinkSubGraph(subgraph1, "end4", subgraph5, "placeholder"); + + std::map max_parallel_num; + max_parallel_num["engine1"] = 1; + max_parallel_num["engine2"] = 1; + Status status = AssignLogicalStreams({subgraph1, subgraph2, subgraph3, subgraph4, subgraph5}, max_parallel_num); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(GetStream(subgraph2), 0); + EXPECT_EQ(GetStream(subgraph3), 1); + EXPECT_EQ(GetStream(subgraph4), 0); + EXPECT_EQ(GetStream(subgraph5), 1); +} + +/// if the param of engine independent is true, then set independent stream +/// sub1 +/// / | | \ +/// sub2 sub3 sub4 sub5 +TEST_F(UtestLogicalStreamAllocator, test_independent) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraph("engine1", "", 1, 4); + SubGraphInfoPtr subgraph2 = CreateSubgraph("engine1"); + SubGraphInfoPtr subgraph3 = CreateSubgraph("engine2"); + SubGraphInfoPtr subgraph4 = CreateSubgraph("engine1"); + SubGraphInfoPtr subgraph5 = CreateSubgraph("engine2"); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end1", subgraph2, "placeholder"); + LinkSubGraph(subgraph1, "end2", subgraph3, "placeholder"); + LinkSubGraph(subgraph1, "end3", subgraph4, "placeholder"); + LinkSubGraph(subgraph1, "end4", subgraph5, "placeholder"); + + std::map max_parallel_num; + max_parallel_num["engine1"] = 100; + max_parallel_num["engine2"] = 100; + + EngineConfPtr conf1 = make_shared(); + conf1->id = "engine1"; + conf1->independent = true; + EngineConfPtr conf2 = make_shared(); + conf2->id = "engine2"; + conf2->independent = true; + vector confs = {conf1, conf2}; + + Status status = + AssignLogicalStreams({subgraph1, subgraph2, subgraph3, subgraph4, subgraph5}, max_parallel_num, confs); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(GetStream(subgraph2), 0); + EXPECT_EQ(GetStream(subgraph3), 1); + EXPECT_EQ(GetStream(subgraph4), 0); + EXPECT_EQ(GetStream(subgraph5), 1); +} + +/// set stream based on stream label, and then based on independent +/// sub1 +/// / | | \ +/// sub2 sub3 sub4 sub5 +TEST_F(UtestLogicalStreamAllocator, test_independent_switch_label) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraphWithName("sub1", "engine0", "", 1, 4); + SubGraphInfoPtr subgraph2 = CreateSubgraphWithName("sub2", "engine1", "label1"); + SubGraphInfoPtr subgraph3 = CreateSubgraphWithName("sub3", "engine2", "label1"); + SubGraphInfoPtr subgraph4 = CreateSubgraphWithName("sub4", "engine1", "label2"); + SubGraphInfoPtr subgraph5 = CreateSubgraphWithName("sub5", "engine2", "label2"); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end1", subgraph2, "placeholder"); + LinkSubGraph(subgraph1, "end2", subgraph3, "placeholder"); + LinkSubGraph(subgraph1, "end3", subgraph4, "placeholder"); + LinkSubGraph(subgraph1, "end4", subgraph5, "placeholder"); + + std::map max_parallel_num; + max_parallel_num["engine0"] = 1; + max_parallel_num["engine1"] = 100; + max_parallel_num["engine2"] = 100; + + EngineConfPtr conf1 = make_shared(); + conf1->id = "engine0"; + conf1->independent = false; + EngineConfPtr conf2 = make_shared(); + conf2->id = "engine1"; + conf2->independent = false; + EngineConfPtr conf3 = make_shared(); + conf3->id = "engine2"; + conf3->independent = true; + vector confs = {conf1, conf2, conf3}; + + Status status = + AssignLogicalStreams({subgraph1, subgraph2, subgraph3, subgraph4, subgraph5}, max_parallel_num, confs); + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 4); + EXPECT_EQ(GetStream(subgraph2), 0); + EXPECT_EQ(GetStream(subgraph3), 2); + EXPECT_EQ(GetStream(subgraph4), 1); + EXPECT_EQ(GetStream(subgraph5), 3); +} + +/// subgraph without input of locate stream +/// data genmask1 +/// | / +/// domask1 genmask2 +/// | / +/// domask2 genmask3 +/// | / +/// domask3 +TEST_F(UtestLogicalStreamAllocator, test_no_input) { + auto data = CreateDataSubgraph(); + auto genmask1 = CreateSubgraphWithName("genmask1", "engine1", "", 0, 1); + auto domask1 = CreateSubgraphWithName("domask1", "engine1", "", 2, 1); + auto genmask2 = CreateSubgraphWithName("genmask2", "engine1", "", 0, 1); + auto domask2 = CreateSubgraphWithName("domask2", "engine1", "", 2, 1); + auto genmask3 = CreateSubgraphWithName("genmask3", "engine1", "", 0, 1); + auto domask3 = CreateSubgraphWithName("domask3", "engine1", "", 2, 1); + + LinkSubGraph(data, "end", domask1, "placeholder1"); + LinkSubGraph(genmask1, "end", domask1, "placeholder2"); + LinkSubGraph(domask1, "end", domask2, "placeholder1"); + LinkSubGraph(genmask2, "end", domask2, "placeholder2"); + LinkSubGraph(domask2, "end", domask3, "placeholder1"); + LinkSubGraph(genmask3, "end", domask3, "placeholder2"); + + Status status = AssignLogicalStreams({data, genmask1, domask1, genmask2, domask2, genmask3, domask3}); + EXPECT_EQ(status, ge::SUCCESS); + + EXPECT_EQ(GetStream(genmask1), 0); + EXPECT_EQ(GetStream(genmask2), 0); + EXPECT_EQ(GetStream(genmask3), 0); + EXPECT_EQ(GetStream(domask1), 0); + EXPECT_EQ(GetStream(domask2), 0); + EXPECT_EQ(GetStream(domask3), 0); +} + +/// subgraph with const input locate stream +/// data genmask1 - const1 +/// | / +/// domask1 genmask2 - const2 +/// | / +/// domask2 genmask3 - const3 +/// | / +/// domask3 +TEST_F(UtestLogicalStreamAllocator, test_const_input) { + auto data = CreateDataSubgraph(); + auto const1 = CreateConstSubgraph(); + auto const2 = CreateConstSubgraph(); + auto const3 = CreateConstSubgraph(); + auto genmask1 = CreateSubgraphWithName("genmask1", "engine1", "", 1, 1); + auto domask1 = CreateSubgraphWithName("domask1", "engine1", "", 2, 1); + auto genmask2 = CreateSubgraphWithName("genmask2", "engine1", "", 1, 1); + auto domask2 = CreateSubgraphWithName("domask2", "engine1", "", 2, 1); + auto genmask3 = CreateSubgraphWithName("genmask3", "engine1", "", 1, 1); + auto domask3 = CreateSubgraphWithName("domask3", "engine1", "", 2, 1); + + LinkSubGraph(const1, "end", genmask1, "placeholder"); + LinkSubGraph(const2, "end", genmask2, "placeholder"); + LinkSubGraph(const3, "end", genmask3, "placeholder"); + LinkSubGraph(data, "end", domask1, "placeholder1"); + LinkSubGraph(genmask1, "end", domask1, "placeholder2"); + LinkSubGraph(domask1, "end", domask2, "placeholder1"); + LinkSubGraph(genmask2, "end", domask2, "placeholder2"); + LinkSubGraph(domask2, "end", domask3, "placeholder1"); + LinkSubGraph(genmask3, "end", domask3, "placeholder2"); + + Status status = + AssignLogicalStreams({data, const1, const2, const3, genmask1, domask1, genmask2, domask2, genmask3, domask3}); + EXPECT_EQ(status, ge::SUCCESS); + + EXPECT_EQ(GetStream(genmask1), 0); + EXPECT_EQ(GetStream(genmask2), 0); + EXPECT_EQ(GetStream(genmask3), 0); + EXPECT_EQ(GetStream(domask1), 0); + EXPECT_EQ(GetStream(domask2), 0); + EXPECT_EQ(GetStream(domask3), 0); +} + +TEST_F(UtestLogicalStreamAllocator, TestAllParallelNum1) { TestAll(1); } + +TEST_F(UtestLogicalStreamAllocator, TestAllParallelNum2) { TestAll(2); } + +TEST_F(UtestLogicalStreamAllocator, TestReusableSubgraphNotAssignedStream) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraphWithName("graph1", "engine1", ""); + SubGraphInfoPtr subgraph2 = CreateSubgraphWithName("graph2", "engine1", ""); + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end", subgraph2, "placeholder"); + + Status status = AssignLogicalStreams({data, subgraph2, subgraph1}); + EXPECT_EQ(status, ge::SUCCESS); +} +/// Optimize for case like: +/// NodeA(stream1) -> Const(stream2) -> NodeB(stream1) +/// To case: +/// NodeA(stream1) -> Const(stream1) -> NodeB(stream1) +/// Which could reduce event number (Const could be other type which belong to skipped engine subgraph) + +/// data +/// | +/// subgraph1(label) +/// | +/// const2 +/// | +/// subgrah3(label) +TEST_F(UtestLogicalStreamAllocator, test_reassign_stream) { + SubGraphInfoPtr data = CreateDataSubgraph(); + SubGraphInfoPtr subgraph1 = CreateSubgraphWithNodeName("subgraph1", "relu1", "engine1", "label"); + SubGraphInfoPtr const2 = CreateSubgraphWithNodeName("const2", "const2", "ge_local"); + SubGraphInfoPtr subgraph3 = CreateSubgraphWithNodeName("subgrah3", "relu3", "engine1", "label"); + + LinkSubGraph(data, "end", subgraph1, "placeholder"); + LinkSubGraph(subgraph1, "end", const2, "placeholder"); + LinkSubGraph(const2, "end", subgraph3, "placeholder"); + + EngineConfPtr conf1 = make_shared(); + conf1->id = subgraph1->GetEngineName(); + EngineConfPtr conf2 = make_shared(); + conf2->id = const2->GetEngineName(); + conf2->skip_assign_stream = true; + EngineConfPtr conf3 = make_shared(); + conf3->id = subgraph3->GetEngineName(); + + auto node1 = subgraph1->GetSubGraph()->FindNode("relu1"); + auto node2 = const2->GetSubGraph()->FindNode("const2"); + auto node3 = subgraph3->GetSubGraph()->FindNode("relu3"); + ComputeGraphPtr whole_graph = make_shared("whole_graph"); + auto node1_1 = whole_graph->AddNode(node1->GetOpDesc()); + auto node1_2 = whole_graph->AddNode(node2->GetOpDesc()); + auto node1_3 = whole_graph->AddNode(node3->GetOpDesc()); + GraphUtils::AddEdge(node1_1->GetOutControlAnchor(), node1_2->GetInControlAnchor()); + GraphUtils::AddEdge(node1_2->GetOutDataAnchor(0), node1_3->GetInDataAnchor(0)); + GraphUtils::AddEdge(node1->GetOutControlAnchor(), node2->GetInControlAnchor()); + + std::map max_parallel_num; + vector subgraphs = {subgraph1, const2, subgraph3}; + vector confs = {conf1, conf2, conf3}; + Status status = AssignLogicalStreams(subgraphs, confs, max_parallel_num, whole_graph); + + EXPECT_EQ(status, ge::SUCCESS); + EXPECT_EQ(GetStream(subgraph1), 0); + EXPECT_EQ(node2->GetOpDesc()->GetStreamId(), 0); + EXPECT_EQ(GetStream(subgraph3), 0); +} + +TEST_F(UtestLogicalStreamAllocator, test_all_reduce_parallel_pass) { + graphStatus ret = GRAPH_SUCCESS; + + ge::ComputeGraphPtr graph = make_shared(""); + graph->SetName("TestAllReduceParallelPass"); + make_graph_with_allreduce(graph); + + std::map max_parallel_num; + LogicalStreamPass::Context context; + context.next_stream = 3; + context.hcom_parallel = true; + vector subgraphs; + LogicalStreamPassPtr allreduce_pass = std::make_shared(); + ret = allreduce_pass->Run(graph, subgraphs, context); + + EXPECT_EQ(ret, SUCCESS); +} + +} // namespace ge diff --git a/tests/ut/ge/graph/build/mem_assigner_unittest.cc b/tests/ut/ge/graph/build/mem_assigner_unittest.cc new file mode 100644 index 00000000..903ac9ad --- /dev/null +++ b/tests/ut/ge/graph/build/mem_assigner_unittest.cc @@ -0,0 +1,184 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "graph/anchor.h" +#include "graph/attr_value.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/node_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "omg/omg_inner_types.h" + +#define protected public +#define private public +#include "graph/build/memory/binary_block_mem_assigner.h" +#include "graph/build/memory/hybrid_mem_assigner.h" +#include "graph/build/memory/max_block_mem_assigner.h" +#undef protected +#undef private + +using namespace std; +using namespace testing; +using namespace ge; +using domi::GetContext; + +class UtestMemoryAssignerTest : public testing::Test { + public: + ge::OpDescPtr createOpWithWsSize(const string &name, int64_t wsByte, const string &type = "some") { + ge::OpDescPtr op_def = make_shared(name, type); + auto desc_temp_ptr = make_shared(); + auto desc_temp = *desc_temp_ptr; + + TensorUtils::SetSize(desc_temp, 1024); + op_def->AddInputDesc(desc_temp); + op_def->AddOutputDesc(desc_temp); + + std::vector workspace_bytes; + workspace_bytes.push_back(wsByte); + op_def->SetWorkspaceBytes(workspace_bytes); + return op_def; + } + void make_graph(ge::ComputeGraphPtr graph) { + ge::OpDescPtr op_def_a = createOpWithWsSize("A", 6000); + op_def_a->SetStreamId(0); + ge::OpDescPtr op_def_b = createOpWithWsSize("B", 120000); + op_def_b->SetStreamId(0); + ge::OpDescPtr op_def_c = createOpWithWsSize("C", 16000); + op_def_c->SetStreamId(1); + ge::OpDescPtr op_def_d = createOpWithWsSize("D", 24000); + op_def_d->SetStreamId(2); + ge::OpDescPtr op_def_e = createOpWithWsSize("E", 24000); + op_def_e->SetStreamId(3); + ge::OpDescPtr op_def_f = createOpWithWsSize("F", 30000); + op_def_f->SetStreamId(2); + ge::OpDescPtr op_def_g = createOpWithWsSize("G", 32000); + op_def_g->SetStreamId(3); + ge::OpDescPtr op_def_h = createOpWithWsSize("H", 48000); + op_def_h->SetStreamId(2); + ge::OpDescPtr op_def_i = createOpWithWsSize("I", 60000); + op_def_i->SetStreamId(2); + ge::OpDescPtr op_def_j = createOpWithWsSize("J", 256000, NETOUTPUT); + op_def_j->SetStreamId(3); + + // add node + ge::NodePtr node_a = graph->AddNode(op_def_a); + ge::NodePtr node_b = graph->AddNode(op_def_b); + ge::NodePtr node_c = graph->AddNode(op_def_c); + ge::NodePtr node_d = graph->AddNode(op_def_d); + ge::NodePtr node_e = graph->AddNode(op_def_e); + ge::NodePtr node_f = graph->AddNode(op_def_f); + ge::NodePtr node_g = graph->AddNode(op_def_g); + ge::NodePtr node_h = graph->AddNode(op_def_h); + ge::NodePtr node_i = graph->AddNode(op_def_i); + ge::NodePtr node_j = graph->AddNode(op_def_j); + + // add edge + ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_b->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_c->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_b->GetOutDataAnchor(0), node_d->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_b->GetOutDataAnchor(0), node_e->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_c->GetOutDataAnchor(0), node_g->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_d->GetOutDataAnchor(0), node_f->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_e->GetOutDataAnchor(0), node_g->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(node_f->GetOutDataAnchor(0), node_h->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_g->GetOutDataAnchor(0), node_j->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_h->GetOutDataAnchor(0), node_i->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_i->GetOutDataAnchor(0), node_j->GetInDataAnchor(1)); + + GetContext().out_nodes_map["H"] = {0}; + GetContext().out_nodes_map["I"] = {0}; + GetContext().out_nodes_map["J"] = {0}; + graph->TopologicalSorting(); + } + + void make_reuse_graph(ge::ComputeGraphPtr graph) { + ge::OpDescPtr op_def_a = createOpWithWsSize("A", 6000); + ge::OpDescPtr op_def_b = createOpWithWsSize("B", 120000); + + ge::OpDescPtr op_def_c = make_shared("C", "Some"); + auto desc_input_ptr = make_shared(); + auto desc_input = *desc_input_ptr; + + TensorUtils::SetSize(desc_input, 1024); + op_def_c->AddInputDesc(desc_input); + + auto desc_output_ptr = make_shared(); + auto desc_output = *desc_output_ptr; + TensorUtils::SetSize(desc_output, 6500); + ge::TensorUtils::SetReuseInput(desc_output, true); + ge::TensorUtils::SetReuseInputIndex(desc_output, 0); + op_def_c->AddOutputDesc(desc_output); + + ge::OpDescPtr op_def_d = make_shared("D", "CONSTANT"); + + ge::NodePtr node_a = graph->AddNode(op_def_a); + ge::NodePtr node_b = graph->AddNode(op_def_b); + ge::NodePtr node_c = graph->AddNode(op_def_c); + ge::NodePtr node_d = graph->AddNode(op_def_d); + + ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_b->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_c->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_d->GetInDataAnchor(0)); + GetContext().out_nodes_map["B"] = {0}; + GetContext().out_nodes_map["C"] = {0}; + graph->TopologicalSorting(); + } + + protected: + void SetUp() {} + + void TearDown() { GetContext().out_nodes_map.clear(); } +}; + +TEST_F(UtestMemoryAssignerTest, MemoryBlock_Resize_RealSizeList_is_empty) { + ge::ComputeGraphPtr graph = make_shared(""); + ge::OpDescPtr op_def_a = createOpWithWsSize("A", 6000); + ge::NodePtr node_a = graph->AddNode(op_def_a); + MemoryBlock* memory_block = new MemoryBlock(0); + memory_block->Init(1, kOutput, node_a, 0); + memory_block->real_size_list_.clear(); + memory_block->Resize(); + + EXPECT_EQ(memory_block->Size(), 0); + + delete memory_block; +} + +namespace ge { + +class MockBlockMemAssigner : public BlockMemAssigner { + public: + explicit MockBlockMemAssigner(ge::ComputeGraphPtr compute_graph) : BlockMemAssigner(compute_graph){}; + + virtual ~MockBlockMemAssigner(){}; + + Status GetMemoryRanges(std::vector &ranges) override { return FAILED; } + +}; +} // namespace ge + +// when check GetMemoryRanges return fail, Assign return fail +TEST_F(UtestMemoryAssignerTest, Mock_block_mem_assigner_failed) { + ge::ComputeGraphPtr graph = make_shared(""); + make_graph(graph); + MockBlockMemAssigner mock_assigner(graph); + + EXPECT_EQ(mock_assigner.Assign(), FAILED); +} diff --git a/tests/ut/ge/graph/ge_executor_unittest.cc b/tests/ut/ge/graph/ge_executor_unittest.cc new file mode 100644 index 00000000..7f5388ad --- /dev/null +++ b/tests/ut/ge/graph/ge_executor_unittest.cc @@ -0,0 +1,87 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "common/util.h" +#include "runtime/mem.h" +#include "common/util.h" +#include "omg/omg_inner_types.h" + +#define private public +#define protected public +#include "executor/ge_executor.h" + +#include "common/auth/file_saver.h" +#include "common/debug/log.h" +#include "common/properties_manager.h" +#include "common/types.h" +#include "graph/load/graph_loader.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/load/new_model_manager/davinci_model_parser.h" +#include "graph/load/new_model_manager/model_manager.h" +#include "graph/manager/graph_mem_allocator.h" +#include "graph/utils/graph_utils.h" +#include "proto/ge_ir.pb.h" +#undef private +#undef protected + +using namespace std; +using namespace ge; + +class UtestGeExecutor : public testing::Test { + protected: + static void InitModelDefault(ge::Model &model) { + ge::AttrUtils::SetInt(&model, ATTR_MODEL_MEMORY_SIZE, 0); + ge::AttrUtils::SetInt(&model, ATTR_MODEL_WEIGHT_SIZE, 0); + ge::AttrUtils::SetInt(&model, ATTR_MODEL_STREAM_NUM, 0); + ge::AttrUtils::SetInt(&model, ATTR_MODEL_EVENT_NUM, 0); + ge::AttrUtils::SetStr(&model, ATTR_MODEL_TARGET_TYPE, "MINI"); // domi::MINI + + auto compute_graph = std::make_shared("graph"); + auto graph = ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + } + + void SetUp() { + unsetenv("FMK_SYSMODE"); + unsetenv("FMK_DUMP_PATH"); + unsetenv("FMK_USE_FUSION"); + unsetenv("DAVINCI_TIMESTAT_ENABLE"); + } +}; + +TEST_F(UtestGeExecutor, fail_UnloadModel_model_manager_stop_unload_error) { + uint32_t model_id = 1; + ge::GeExecutor ge_executor; + ge_executor.is_init_ = true; + ge::Status ret = ge_executor.UnloadModel(model_id); + EXPECT_EQ(ge::PARAM_INVALID, ret); + + ge_executor.is_init_ = false; + ret = ge_executor.UnloadModel(model_id); + EXPECT_EQ(ge::GE_EXEC_NOT_INIT, ret); +} + +TEST_F(UtestGeExecutor, fail_CommandHandle_model_manager_HandleCommand_error) { + ge::Command cmd; + ge::GeExecutor ge_executor; + ge::Status ret = ge_executor.CommandHandle(cmd); + EXPECT_EQ(ge::PARAM_INVALID, ret); +} diff --git a/tests/ut/ge/graph/graph_load_unittest.cc b/tests/ut/ge/graph/graph_load_unittest.cc new file mode 100644 index 00000000..e516ec7a --- /dev/null +++ b/tests/ut/ge/graph/graph_load_unittest.cc @@ -0,0 +1,96 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +#include "common/debug/log.h" +#include "common/helper/model_helper.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/load/new_model_manager/davinci_model_parser.h" +#include "graph/op_desc.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/op_desc_utils.h" + +#define protected public +#define private public +#include "graph/load/graph_loader.h" + +#include "framework/common/ge_inner_error_codes.h" +#include "graph/load/new_model_manager/model_manager.h" +#include "graph/manager/graph_manager_utils.h" +#include "model/ge_model.h" +#undef private +#undef protected + +using namespace testing; +namespace ge { + +class UtestGraphGraphLoad : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphGraphLoad, load_graph_param_invalid1) { + std::shared_ptr graph_run_listener = nullptr; + SubGraphInfo sub_graph1; + ge::SubGraphInfoPtr sub_graph_ptr1 = std::make_shared(sub_graph1); + ModelIdInfo model_Id_info; + model_Id_info.model_id = 1; + + GeModelPtr ge_model_ptr = std::make_shared(); + sub_graph_ptr1->SetGeModelPtr(ge_model_ptr); + + std::vector input_flag; + input_flag.push_back(false); + sub_graph_ptr1->SetInputFlag(input_flag); + + ge::GraphLoader graph_load; + EXPECT_EQ(GE_GRAPH_PARAM_NULLPTR, graph_load.LoadGraph(sub_graph_ptr1->ge_model_ptr_, graph_run_listener, model_Id_info)); + sub_graph_ptr1->SetModelIdInfo(model_Id_info); +} + +TEST_F(UtestGraphGraphLoad, load_graph_param_invalid2) { + std::mutex sync_run_mutex; + std::condition_variable condition; + std::shared_ptr listener = std::make_shared(); + listener->mutex_ = &sync_run_mutex; + listener->condition_ = &condition; + + SubGraphInfo sub_graph1; + ge::SubGraphInfoPtr sub_graph_ptr1 = std::make_shared(sub_graph1); + ModelIdInfo model_Id_info; + model_Id_info.model_id = 1; + + GeModelPtr ge_model_ptr = std::make_shared(); + sub_graph_ptr1->SetGeModelPtr(ge_model_ptr); + + std::vector input_flag; + input_flag.push_back(false); + sub_graph_ptr1->SetInputFlag(input_flag); + + ge::GraphLoader graph_load; + EXPECT_EQ(GE_GRAPH_PARAM_NULLPTR, graph_load.LoadGraph(sub_graph_ptr1->ge_model_ptr_, listener, model_Id_info)); + sub_graph_ptr1->SetModelIdInfo(model_Id_info); +} +} // namespace ge diff --git a/tests/ut/ge/graph/load/data_dumper_unittest.cc b/tests/ut/ge/graph/load/data_dumper_unittest.cc new file mode 100644 index 00000000..d94d21ec --- /dev/null +++ b/tests/ut/ge/graph/load/data_dumper_unittest.cc @@ -0,0 +1,63 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define private public +#define protected public +#include "graph/load/new_model_manager/data_dumper.h" +#include "graph/load/new_model_manager/davinci_model.h" +#undef private +#undef protected + +using namespace std; + +namespace ge { +class UtestDataDumper : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +std::vector stub_get_output_addrs(const RuntimeParam &model_param, ConstOpDescPtr op_desc) { + std::vector res; + res.emplace_back(reinterpret_cast(23333)); + return res; +} + +TEST_F(UtestDataDumper, LoadDumpInfo_no_output_addrs_fail) { + DataDumper data_dumper; + data_dumper.SetModelName("test"); + data_dumper.SetModelId(2333); + data_dumper.SetMemory(std::move(RuntimeParam{})); + std::shared_ptr op_desc_1(new OpDesc()); + op_desc_1->AddOutputDesc("test", GeTensorDesc()); + data_dumper.SaveDumpTask(0, op_desc_1, 0); + + Status ret = data_dumper.LoadDumpInfo(); + EXPECT_EQ(ret, PARAM_INVALID); +} + +TEST_F(UtestDataDumper, UnloadDumpInfo_success) { + DataDumper data_dumper; + data_dumper.SetModelName("test"); + data_dumper.SetModelId(2333); + + Status ret = data_dumper.UnloadDumpInfo(); + EXPECT_EQ(ret, SUCCESS); +} +} // namespace ge diff --git a/tests/ut/ge/graph/load/end_graph_task_unittest.cc b/tests/ut/ge/graph/load/end_graph_task_unittest.cc new file mode 100644 index 00000000..dedb2dac --- /dev/null +++ b/tests/ut/ge/graph/load/end_graph_task_unittest.cc @@ -0,0 +1,55 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define private public +#define protected public +#include "graph/load/new_model_manager/task_info/end_graph_task_info.h" +#include "graph/load/new_model_manager/davinci_model.h" +#undef private +#undef protected + +using namespace std; + +namespace ge { +class UtestEndGraphTask : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +// test Init_EndGraphTaskInfo_failed +TEST_F(UtestEndGraphTask, init_end_graph_task_info) { + domi::TaskDef task_def; + EndGraphTaskInfo task_info; + EXPECT_EQ(task_info.Init(task_def, nullptr), PARAM_INVALID); + + DavinciModel model(0, nullptr); + task_def.set_stream_id(0); + EXPECT_EQ(task_info.Init(task_def, &model), FAILED); + + model.stream_list_.push_back((void *)0x12345); + EXPECT_EQ(task_info.Init(task_def, &model), SUCCESS); + model.stream_list_.clear(); +} + +TEST_F(UtestEndGraphTask, distribute_success) { + EndGraphTaskInfo task_info; + EXPECT_EQ(task_info.Distribute(), SUCCESS); +} +} // namespace ge diff --git a/tests/ut/ge/graph/load/new_model_manager_data_inputer_unittest.cc b/tests/ut/ge/graph/load/new_model_manager_data_inputer_unittest.cc new file mode 100644 index 00000000..56e673f7 --- /dev/null +++ b/tests/ut/ge/graph/load/new_model_manager_data_inputer_unittest.cc @@ -0,0 +1,64 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + +#include + +#include "graph/load/new_model_manager/data_inputer.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/types.h" +#include "new_op_test_utils.h" + +using namespace std; +using namespace testing; + +namespace ge { + +class UtestModelManagerDataInputer : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +/// InputDataWrapper +/// constructor +/// GetInput +TEST_F(UtestModelManagerDataInputer, inputdatawrapper_construct) { + InputDataWrapper *input_data_wrapper = new InputDataWrapper(); + + input_data_wrapper->GetInput(); + + delete input_data_wrapper; +} + +/// InputDataWrapper +/// Init func with correct input +TEST_F(UtestModelManagerDataInputer, success_inputdatawrapper_init) { + InputDataWrapper *input_data_wrapper = new InputDataWrapper(); + ge::InputData input_data; + ge::OutputData output_data; + Status ret = input_data_wrapper->Init(input_data, output_data); + + EXPECT_EQ(ret, SUCCESS); + + delete input_data_wrapper; + input_data_wrapper = NULL; +} + +} // namespace ge diff --git a/tests/ut/ge/graph/load/new_model_manager_davinci_model_unittest.cc b/tests/ut/ge/graph/load/new_model_manager_davinci_model_unittest.cc new file mode 100644 index 00000000..c8c69689 --- /dev/null +++ b/tests/ut/ge/graph/load/new_model_manager_davinci_model_unittest.cc @@ -0,0 +1,1441 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/types.h" + +#define private public +#define protected public +#include "graph/compute_graph.h" +#include "graph/utils/graph_utils.h" +#include "graph/model_serialize.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/load/new_model_manager/model_output.h" +#include "common/properties_manager.h" +#include "common/op/ge_op_utils.h" +#include +#include "runtime/dev.h" +#include "runtime/kernel.h" +#include "cce/fwk_adpt_struct.h" +#include "graph/load/new_model_manager/task_info/task_info_factory.h" +#include "graph/load/new_model_manager/task_info/task_info.h" +#include "graph/load/new_model_manager/task_info/stream_active_task_info.h" +#include "graph/load/new_model_manager/task_info/stream_switch_task_info.h" +#include "graph/load/new_model_manager/task_info/profiler_trace_task_info.h" +#include "graph/load/new_model_manager/task_info/memcpy_async_task_info.h" +#include "graph/load/new_model_manager/task_info/label_goto_task_info.h" +#include "graph/load/new_model_manager/task_info/label_set_task_info.h" +#include "graph/load/new_model_manager/task_info/kernel_ex_task_info.h" +#include "graph/load/new_model_manager/task_info/kernel_task_info.h" +#include "graph/load/new_model_manager/task_info/hccl_task_info.h" +#include "graph/load/new_model_manager/task_info/fusion_start_task_info.h" +#include "graph/load/new_model_manager/task_info/fusion_stop_task_info.h" +#include "graph/load/new_model_manager/task_info/event_record_task_info.h" +#include "graph/load/new_model_manager/task_info/event_wait_task_info.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/load/new_model_manager/model_manager.h" +#undef private +#undef protected + +#include "new_op_test_utils.h" +#include "graph/debug/ge_attr_define.h" + +using namespace std; +using namespace testing; +using domi::EventExDef; +using domi::KernelContext; +using domi::KernelDef; +using domi::LogTimeStampDef; +using domi::ModelTaskDef; +using domi::StreamActiveDef; +using domi::TaskDef; + +namespace ge { +class UtestModelManagerDavinciModel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +class DModelListener : public ge::ModelListener { + public: + DModelListener(){}; + uint32_t OnComputeDone(uint32_t model_id, uint32_t data_index, uint32_t resultCode) { + GELOGI("In Call back. OnComputeDone"); + return 0; + } +}; + +shared_ptr g_label_call_back(new DModelListener()); + +static ge::OpDescPtr CreateOpDesc(string name = "", string type = "") { + auto op_desc = std::make_shared(name, type); + op_desc->SetStreamId(0); + op_desc->SetId(0); + + ge::AttrUtils::SetFloat(op_desc, ge::ATTR_NAME_ALPHA, 0); + ge::AttrUtils::SetFloat(op_desc, ge::ATTR_NAME_BETA, 0); + + op_desc->SetWorkspace({}); + ; + op_desc->SetWorkspaceBytes({}); + op_desc->SetInputOffset({}); + op_desc->SetOutputOffset({}); + + ge::AttrUtils::SetListStr(op_desc, ge::ATTR_NAME_WEIGHT_NAME, {}); + ge::AttrUtils::SetInt(op_desc, ge::POOLING_ATTR_MODE, 0); + ge::AttrUtils::SetInt(op_desc, ge::POOLING_ATTR_PAD_MODE, 0); + ge::AttrUtils::SetInt(op_desc, ge::POOLING_ATTR_DATA_MODE, 0); + ge::AttrUtils::SetInt(op_desc, ge::POOLING_ATTR_CEIL_MODE, 0); + ge::AttrUtils::SetInt(op_desc, ge::POOLING_ATTR_NAN_OPT, 0); + ge::AttrUtils::SetListInt(op_desc, ge::POOLING_ATTR_WINDOW, {}); + ge::AttrUtils::SetListInt(op_desc, ge::POOLING_ATTR_PAD, {}); + ge::AttrUtils::SetListInt(op_desc, ge::POOLING_ATTR_STRIDE, {}); + ge::AttrUtils::SetListInt(op_desc, ge::ATTR_NAME_ACTIVE_STREAM_LIST, {1, 1}); + ge::AttrUtils::SetInt(op_desc, ge::ATTR_NAME_STREAM_SWITCH_COND, 0); + ge::AttrUtils::SetInt(op_desc, ge::ATTR_NAME_FRAMEWORK_FWK_TYPE, FMK_TYPE_T); + return op_desc; +} + +// tset failed_rt_free_host +TEST_F(UtestModelManagerDavinciModel, failed_rt_free_host) { + DavinciModel model(0, g_label_call_back); + + OutputData output_data; + + auto op_desc = CreateOpDesc("Pooling", "Pooling"); + op_desc->SetOutputOffset({1}); + op_desc->SetInputOffset({1}); + + { + ge::GeTensorDesc in_desc(ge::GeShape({1, 1, 1, 1})); + ge::TensorUtils::SetSize(in_desc, 16); + ge::TensorUtils::SetOutputTensor(in_desc, false); + ge::TensorUtils::SetInputTensor(in_desc, true); + op_desc->AddInputDesc(in_desc); + } + + { + ge::GeTensorDesc out_desc(ge::GeShape({1, 1, 1, 1})); + ge::TensorUtils::SetSize(out_desc, 16); + ge::TensorUtils::SetOutputTensor(out_desc, true); + ge::TensorUtils::SetInputTensor(out_desc, false); + op_desc->AddOutputDesc(out_desc); + } + ge::AttrUtils::SetInt(op_desc, ge::POOLING_ATTR_PAD_MODE, cce::CC_PADDING_DIRECTASSIGN); + ge::AttrUtils::SetListInt(op_desc, ge::POOLING_ATTR_PAD, vector({1, 1, 1, 1})); + ge::AttrUtils::SetListInt(op_desc, ge::POOLING_ATTR_WINDOW, vector({1, 1})); + ge::AttrUtils::SetListInt(op_desc, ge::POOLING_ATTR_STRIDE, vector({1, 1})); + + auto compute_graph = make_shared("g"); + auto node = compute_graph->AddNode(op_desc); + + OmeTestOpUtils::InitModel(model); + + model.data_op_list_.push_back(op_desc); + + EXPECT_EQ(ge::INTERNAL_ERROR, model.ReturnResult(1, 1, false, false, &output_data)); +} + +// test modeldef_fail +TEST_F(UtestModelManagerDavinciModel, contruct_modeldef_createfail) { + DavinciModel model(0, g_label_call_back); + + OmeTestOpUtils::InitModel(model); + + auto op_desc = CreateOpDesc("Pooling", "Pooling"); + op_desc->SetOutputOffset({1}); + op_desc->SetInputOffset({1}); + + { + ge::GeTensorDesc in_desc(ge::GeShape({1, 1, 1, 1})); + ge::TensorUtils::SetSize(in_desc, 16); + ge::TensorUtils::SetOutputTensor(in_desc, false); + ge::TensorUtils::SetInputTensor(in_desc, true); + op_desc->AddInputDesc(in_desc); + } + + { + ge::GeTensorDesc out_desc(ge::GeShape({1, 1, 1, 1})); + ge::TensorUtils::SetSize(out_desc, 16); + ge::TensorUtils::SetOutputTensor(out_desc, true); + ge::TensorUtils::SetInputTensor(out_desc, false); + op_desc->AddOutputDesc(out_desc); + } + ge::AttrUtils::SetInt(op_desc, ge::POOLING_ATTR_PAD_MODE, cce::CC_PADDING_DIRECTASSIGN); + ge::AttrUtils::SetListInt(op_desc, ge::POOLING_ATTR_PAD, vector({1, 1, 1, 1})); + ge::AttrUtils::SetListInt(op_desc, ge::POOLING_ATTR_WINDOW, vector({1, 1})); + ge::AttrUtils::SetListInt(op_desc, ge::POOLING_ATTR_STRIDE, vector({1, 1})); + + // EXPECT_EQ(ge::SUCCESS, model.Init()); + + model.GetEventList(); +} + +// test CopyInputDataToModel +TEST_F(UtestModelManagerDavinciModel, copy_input_data_to_model_fail) { + DavinciModel model(0, g_label_call_back); + + ge::InputData input_data; + ge::DataBuffer data_buffer; + data_buffer.data = new char[16]; + data_buffer.length = 16; + input_data.index = 0; + input_data.model_id = 1; + input_data.blobs.push_back(data_buffer); + + model.op_list_.clear(); + // EXPECT_EQ(ge::PARAM_INVALID, model.CopyInputDataToModel(input_data.blobs, 0)); + + delete[](char *) data_buffer.data; +} + +// test StreamNum +TEST_F(UtestModelManagerDavinciModel, streamnum_success) { + DavinciModel *model = new DavinciModel(0, g_label_call_back); + + OmeTestOpUtils::InitModel(*model); + // EXPECT_EQ(ge::SUCCESS, model->Init()); + + EXPECT_EQ(0, model->StreamNum()); + EXPECT_EQ(ge::INTERNAL_ERROR, model->ModelRunStart()); + + EXPECT_EQ(ge::SUCCESS, model->ModelRunStop()); + + delete model; +} + +// test EventNum +TEST_F(UtestModelManagerDavinciModel, eventnum_success) { + DavinciModel *model = new DavinciModel(0, g_label_call_back); + + OmeTestOpUtils::InitModel(*model); + + // EXPECT_EQ(ge::SUCCESS, model->Init()); + + EXPECT_EQ(0, model->EventNum()); + EXPECT_EQ(ge::INTERNAL_ERROR, model->ModelRunStart()); + + EXPECT_EQ(ge::SUCCESS, model->ModelRunStop()); + + delete model; +} + +TEST_F(UtestModelManagerDavinciModel, handlelist_success) { + DavinciModel *model = new DavinciModel(0, g_label_call_back); + + OmeTestOpUtils::InitModel(*model); + + // EXPECT_EQ(ge::SUCCESS, model->Init()); + + EXPECT_EQ(ge::INTERNAL_ERROR, model->ModelRunStart()); + + EXPECT_EQ(ge::SUCCESS, model->ModelRunStop()); + + delete model; +} + +// test GetEventList +TEST_F(UtestModelManagerDavinciModel, eventlist_success) { + DavinciModel *model = new DavinciModel(0, g_label_call_back); + + OmeTestOpUtils::InitModel(*model); + + // EXPECT_EQ(ge::SUCCESS, model->Init()); + + EXPECT_EQ(true, model->GetEventList().empty()); + EXPECT_EQ(ge::INTERNAL_ERROR, model->ModelRunStart()); + + EXPECT_EQ(ge::SUCCESS, model->ModelRunStop()); + + delete model; +} + +// test rtMalloc +TEST_F(UtestModelManagerDavinciModel, failed_reset_device) { + DavinciModel model(0, g_label_call_back); + ge::OutputData output_data; + ge::DataBuffer buf_data; + rtMalloc(&buf_data.data, 128, RT_MEMORY_HBM); + buf_data.length = 128; + output_data.blobs.push_back(buf_data); + EXPECT_EQ(ge::INTERNAL_ERROR, model.ReturnResult(1, 1, true, false, &output_data)); + rtFree(buf_data.data); +} + +// test priority +TEST_F(UtestModelManagerDavinciModel, init_not_support_priority) { + int32_t priority = 8; + DavinciModel model(priority, g_label_call_back); + // EXPECT_EQ(ge::PARAM_INVALID, model.Init()); +} + +// test GetInputOutputDescInfo +TEST_F(UtestModelManagerDavinciModel, success_GetInputOutputDescInfo_without_netoutput) { + DavinciModel model(0, g_label_call_back); + + auto op_desc = CreateOpDesc("Data", "Data"); + op_desc->SetOutputOffset({1}); + op_desc->SetInputOffset({1}); + op_desc->SetStreamId(0); + + { + ge::GeTensorDesc in_desc(ge::GeShape({1, 1, 10, 10}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetOutputTensor(in_desc, false); + ge::TensorUtils::SetInputTensor(in_desc, true); + op_desc->AddInputDesc(in_desc); + } + + { + ge::GeTensorDesc out_desc(ge::GeShape({1, 1, 10, 10}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetOutputTensor(out_desc, true); + ge::TensorUtils::SetInputTensor(out_desc, false); + op_desc->AddOutputDesc(out_desc); + } + + op_desc->SetSrcName({"Pooling1", "Pooling0"}); + op_desc->SetSrcIndex({0, 1}); + + auto compute_graph = make_shared("g"); + auto node = compute_graph->AddNode(op_desc); + + model.data_op_list_.push_back(op_desc); + model.output_size_list_.push_back(32); + + model.op_list_[0] = op_desc; + + model.output_op_list_.push_back(op_desc); + + vector input_shapes; + vector output_shapes; + EXPECT_EQ(ge::SUCCESS, model.GetInputOutputDescInfo(input_shapes, output_shapes)); +} + +TEST_F(UtestModelManagerDavinciModel, CopyTensorFromSrcVarNode_input_is_nullptr) { + NodePtr src_node = nullptr; + NodePtr dst_node = nullptr; + DavinciModel model(0, g_label_call_back); + Status ret = model.CopyTensorFromSrcVarNode(src_node, dst_node); + EXPECT_EQ(FAILED, ret); +} + +TEST_F(UtestModelManagerDavinciModel, CopyTensorFromSrcVarNode_success) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + OpDescPtr op_desc_ptr = make_shared("Cast", "Cast"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT16); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + NodePtr src_node = graph->AddNode(op_desc_ptr); + NodePtr dst_node = graph->AddNode(op_desc_ptr); + DavinciModel model(0, g_label_call_back); + Status ret = model.CopyTensorFromSrcVarNode(src_node, dst_node); + // EXPECT_EQ(SUCCESS, ret); +} + +TEST_F(UtestModelManagerDavinciModel, CopyVarData_graph_is_nullptr) { + ge::ComputeGraphPtr graph = nullptr; + DavinciModel model(0, g_label_call_back); + Status ret = model.CopyVarData(graph); + EXPECT_EQ(FAILED, ret); +} + +TEST_F(UtestModelManagerDavinciModel, copy_var_data_success) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + OpDescPtr op_desc_ptr = make_shared("Variable", "Variable"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT16); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT16); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + NodePtr src_node = graph->AddNode(op_desc_ptr); + (void)ge::AttrUtils::SetStr(src_node->GetOpDesc(), "_copy_from_var_node", "abc"); + (void)ge::AttrUtils::SetBool(src_node->GetOpDesc(), "_copy_value", false); + + DavinciModel model(0, g_label_call_back); + Status ret = model.CopyVarData(graph); + // EXPECT_EQ(SUCCESS, ret); +} + +TEST_F(UtestModelManagerDavinciModel, get_input_output_desc_info_without_data_op_list) { + DavinciModel model(0, g_label_call_back); + vector input_list; + vector output_list; + Status ret = model.GetInputOutputDescInfo(input_list, output_list); + EXPECT_EQ(SUCCESS, ret); +} + +// test GetInputOutputDescInfo +TEST_F(UtestModelManagerDavinciModel, success_get_input_output_descInfo_with_net_output) { + DavinciModel model(0, g_label_call_back); + + auto op_desc = CreateOpDesc("Data", "Data"); + op_desc->SetOutputOffset({1}); + op_desc->SetInputOffset({1}); + op_desc->SetStreamId(0); + + { + ge::GeTensorDesc in_desc(ge::GeShape({1, 1, 10, 10}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetOutputTensor(in_desc, false); + ge::TensorUtils::SetInputTensor(in_desc, true); + op_desc->AddInputDesc(in_desc); + } + + { + ge::GeTensorDesc out_desc(ge::GeShape({1, 1, 10, 10}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetOutputTensor(out_desc, true); + ge::TensorUtils::SetOutputTensor(out_desc, true); + ge::TensorUtils::SetInputTensor(out_desc, false); + op_desc->AddOutputDesc(out_desc); + } + op_desc->SetSrcName({"Pooling1", "Pooling0"}); + op_desc->SetSrcIndex({0, 1}); + + auto compute_graph = make_shared("g"); + auto data_node = compute_graph->AddNode(op_desc); + + model.data_op_list_.push_back(op_desc); + + op_desc->SetType("NetOutput"); + + auto no_node = compute_graph->AddNode(op_desc); + + model.op_list_[0] = op_desc; + + model.output_op_list_.push_back(op_desc); + model.output_size_list_.push_back(32); + + vector input_shapes; + vector output_shapes; + EXPECT_EQ(ge::SUCCESS, model.GetInputOutputDescInfo(input_shapes, output_shapes)); +} + +TEST_F(UtestModelManagerDavinciModel, success_get_input_output_desc_info_for_zero_copy_with_net_output) { + DavinciModel model(0, g_label_call_back); + + auto op_desc = CreateOpDesc("Data", "Data"); + op_desc->SetOutputOffset({1}); + op_desc->SetInputOffset({1}); + op_desc->SetStreamId(0); + + { + ge::GeTensorDesc in_desc(ge::GeShape({1, 1, 10, 10}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetOutputTensor(in_desc, false); + ge::TensorUtils::SetInputTensor(in_desc, true); + op_desc->AddInputDesc(in_desc); + } + + { + ge::GeTensorDesc out_desc(ge::GeShape({1, 1, 10, 10}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetOutputTensor(out_desc, true); + ge::TensorUtils::SetOutputTensor(out_desc, true); + ge::TensorUtils::SetInputTensor(out_desc, false); + op_desc->AddOutputDesc(out_desc); + } + + op_desc->SetSrcName({"Pooling1", "Pooling0"}); + op_desc->SetSrcIndex({0, 1}); + + auto compute_graph = make_shared("g"); + auto data_node = compute_graph->AddNode(op_desc); + + model.data_op_list_.push_back(op_desc); + + op_desc->SetType("NetOutput"); + + auto net_out_node = compute_graph->AddNode(op_desc); + model.op_list_[0] = op_desc; + + model.output_op_list_.push_back(op_desc); + model.output_size_list_.push_back(32); + model.output_memory_size_list_.push_back(64); + + vector input_shapes; + vector output_shapes; + EXPECT_EQ(ge::SUCCESS, model.GetInputOutputDescInfoForZeroCopy(input_shapes, output_shapes)); +} + +TEST_F(UtestModelManagerDavinciModel, success_get_input_output_desc_info_dim_size_not4) { + DavinciModel model(0, g_label_call_back); + + auto op_desc = CreateOpDesc("Data", "Data"); + op_desc->SetOutputOffset({1}); + op_desc->SetInputOffset({1}); + op_desc->SetStreamId(0); + + { + ge::GeTensorDesc in_desc(ge::GeShape({1, 1, 10}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetOutputTensor(in_desc, false); + ge::TensorUtils::SetInputTensor(in_desc, true); + op_desc->AddInputDesc(in_desc); + } + + { + ge::GeTensorDesc out_desc(ge::GeShape({1, 1, 10}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetOutputTensor(out_desc, true); + ge::TensorUtils::SetOutputTensor(out_desc, true); + ge::TensorUtils::SetInputTensor(out_desc, false); + op_desc->AddOutputDesc(out_desc); + } + + op_desc->SetSrcName({"Pooling1", "Pooling0"}); + op_desc->SetSrcIndex({0, 1}); + + auto compute_graph = make_shared("g"); + auto data_node = compute_graph->AddNode(op_desc); + + model.data_op_list_.push_back(op_desc); + + op_desc->SetType("NetOutput"); + + auto net_out_node = compute_graph->AddNode(op_desc); + model.op_list_[0] = op_desc; + + model.output_op_list_.push_back(op_desc); + model.output_size_list_.push_back(32); + + vector input_shapes; + vector output_shapes; + EXPECT_EQ(ge::SUCCESS, model.GetInputOutputDescInfo(input_shapes, output_shapes)); +} + +// test GetLabelList +TEST_F(UtestModelManagerDavinciModel, get_label_list_success) { + DavinciModel model(0, g_label_call_back); + OmeTestOpUtils::InitModel(model); + vector label_list; + model.label_list_ = label_list; + EXPECT_EQ(label_list, model.GetLabelList()); +} + +// test GetInputListSize +TEST_F(UtestModelManagerDavinciModel, get_label_list_size_success) { + DavinciModel model(0, g_label_call_back); + OmeTestOpUtils::InitModel(model); + vector data_op_list; + data_op_list.push_back(std::make_shared()); + model.data_op_list_ = data_op_list; +} + +// test GetFlowctrlOpList +TEST_F(UtestModelManagerDavinciModel, get_flow_ctrl_op_list_success) { + DavinciModel model(0, g_label_call_back); + OmeTestOpUtils::InitModel(model); + std::map flowctrl_op_index_internal_map; + flowctrl_op_index_internal_map.insert(pair(1, 1)); + model.flowctrl_op_index_internal_map_ = flowctrl_op_index_internal_map; + // EXPECT_EQ(flowctrl_op_index_internal_map_, model.GetFlowctrlOpList()); +} + +// test SetFlowctrlOpList +TEST_F(UtestModelManagerDavinciModel, get_flow_ctrl_index_success) { + DavinciModel model(0, g_label_call_back); + OmeTestOpUtils::InitModel(model); + EXPECT_EQ(0, model.GetFlowctrlIndex(0)); + EXPECT_EQ(1, model.GetFlowctrlIndex(0)); + EXPECT_EQ(0, model.GetFlowctrlIndex(1)); + EXPECT_EQ(1, model.GetFlowctrlIndex(1)); + EXPECT_EQ(2, model.GetFlowctrlIndex(0)); +} + +// test GetRegisterStub +TEST_F(UtestModelManagerDavinciModel, success_get_register_stub) { + DavinciModel model(0, g_label_call_back); + OmeTestOpUtils::InitModel(model); + std::string binfile = "tvmbin"; + string ret = model.GetRegisterStub(binfile); + EXPECT_EQ("tvmbin", ret); + model.tvm_bin_kernel_.insert("tvmbin"); + ret = model.GetRegisterStub(binfile); + EXPECT_EQ("tvmbin", ret); +} + +// test InitTbeHandle +TEST_F(UtestModelManagerDavinciModel, success_init_tbe_handle) { + DavinciModel model(0, g_label_call_back); + OmeTestOpUtils::InitModel(model); + std::shared_ptr op_desc = std::make_shared(); + Status ret = model.InitTbeHandle(op_desc); + EXPECT_EQ(ge::INTERNAL_ERROR, ret); +} + +// test InitTVMTask failed +TEST_F(UtestModelManagerDavinciModel, init_tvm_task_failed1) { + DavinciModel model(0, g_label_call_back); + uint16_t offset = 0; + TaskDef *task_def = new TaskDef(); + KernelDef *kernel_def = task_def->mutable_kernel(); + map op_list; + model.op_list_ = op_list; + + KernelTaskInfo *kernel_task_info = new KernelTaskInfo(); + Status ret = kernel_task_info->InitTVMTask(&model, offset, kernel_def[0]); + EXPECT_EQ(INTERNAL_ERROR, ret); + task_def->clear_kernel(); + delete kernel_task_info; + delete task_def; +} + +TEST_F(UtestModelManagerDavinciModel, kernel_taskInfo_init_cce_task_failed1) { + DavinciModel model(0, g_label_call_back); + + TaskDef *task_def = new TaskDef(); + KernelTaskInfo *kernel_task_info = new KernelTaskInfo(); + KernelDef *kernel_def = task_def->mutable_kernel(); + Status ret = kernel_task_info->InitCceTask(&model, kernel_def[0]); + EXPECT_EQ(ge::INTERNAL_ERROR, ret); + task_def->clear_kernel(); + delete kernel_task_info; + delete task_def; +} + +// test SetContext success +TEST_F(UtestModelManagerDavinciModel, success_kernel_taskInfo_init_set_context) { + DavinciModel model(0, g_label_call_back); + + TaskDef *task_def = new TaskDef(); + KernelTaskInfo *kernel_task_info = new KernelTaskInfo(); + KernelDef *kernel_def = task_def->mutable_kernel(); + KernelContext *context = kernel_def->mutable_context(); + context->set_op_id(1); + context->set_kernel_func_id(1); + context->set_is_flowtable(true); + context->set_args_count(1); + context->set_args_offset("args111111", 10); + + Status ret = kernel_task_info->SetContext(kernel_def[0]); + EXPECT_EQ(ge::SUCCESS, ret); + + ret = kernel_task_info->Release(); + EXPECT_EQ(ge::SUCCESS, ret); + kernel_def->clear_context(); + task_def->clear_kernel(); + delete kernel_task_info; + delete task_def; +} + +// test SetContext failed +TEST_F(UtestModelManagerDavinciModel, kernel_taskInfo_init_set_context_failed1) { + DavinciModel model(0, g_label_call_back); + + TaskDef *task_def = new TaskDef(); + KernelTaskInfo *kernel_task_info = new KernelTaskInfo(); + KernelDef *kernel_def = task_def->mutable_kernel(); + KernelContext *context = kernel_def->mutable_context(); + context->set_op_id(1); + context->set_kernel_func_id(1); + context->set_is_flowtable(true); + context->set_args_count(0); + Status ret = kernel_task_info->SetContext(kernel_def[0]); + EXPECT_EQ(ge::INTERNAL_ERROR, ret); + + kernel_def->clear_context(); + task_def->clear_kernel(); + delete kernel_task_info; + delete task_def; +} + +TEST_F(UtestModelManagerDavinciModel, kernel_taskInfo_init_set_context_failed2) { + DavinciModel model(0, g_label_call_back); + + TaskDef *task_def = new TaskDef(); + KernelTaskInfo *kernel_task_info = new KernelTaskInfo(); + KernelDef *kernel_def = task_def->mutable_kernel(); + KernelContext *context = kernel_def->mutable_context(); + context->set_op_id(1); + context->set_kernel_func_id(1); + context->set_is_flowtable(true); + context->set_args_count(5); + context->set_args_offset("\0\0"); // args_offset = 0 + + Status ret = kernel_task_info->SetContext(kernel_def[0]); + EXPECT_EQ(ge::PARAM_INVALID, ret); + + kernel_def->clear_context(); + task_def->clear_kernel(); + delete kernel_task_info; + delete task_def; +} + +// test success DistributeDumpTask +TEST_F(UtestModelManagerDavinciModel, success_distribute_dump_task) { + DavinciModel model(0, g_label_call_back); + TaskDef *task_def = new TaskDef(); + KernelTaskInfo *kernel_task_info = new KernelTaskInfo(); + KernelDef *kernel_def = task_def->mutable_kernel(); + + kernel_def->set_stub_func("kerneltaskinfo"); + kernel_def->set_block_dim(10); + kernel_def->set_args("args111111", 10); + kernel_def->set_args_size(10); + rtSmDesc_t l2CtrlInfo; + l2CtrlInfo.data[0].L2_mirror_addr = 1024; + kernel_def->set_sm_desc((void *)&l2CtrlInfo, sizeof(rtSmDesc_t)); + + // for SetStream + rtStream_t stream = nullptr; + rtStreamCreate(&stream, 0); + std::vector stream_list; + stream_list.push_back(stream); + Status ret = kernel_task_info->SetStream(0, stream_list); + EXPECT_EQ(SUCCESS, ret); + + ret = kernel_task_info->Release(); + EXPECT_EQ(SUCCESS, ret); + rtStreamDestroy(stream); + task_def->clear_kernel(); + delete kernel_task_info; + delete task_def; +} + +// test success GetTaskID +TEST_F(UtestModelManagerDavinciModel, success_get_task_id) { + ModelTaskDef *model_task_def = new ModelTaskDef(); + TaskDef *task = model_task_def->add_task(); + task->set_type(RT_MODEL_TASK_KERNEL); + TaskInfoPtr task_info = TaskInfoFactory::Instance().Create(static_cast(task->type())); + + KernelTaskInfo *kernel_task_info = new KernelTaskInfo(); + uint32_t ret = task_info->GetTaskID(); + EXPECT_EQ(0, ret); + ret = kernel_task_info->GetTaskID(); + EXPECT_EQ(0, ret); + HcclTaskInfo *hccl_task_info = new HcclTaskInfo(); + ret = hccl_task_info->GetTaskID(); + EXPECT_EQ(0, ret); + + delete hccl_task_info; + delete kernel_task_info; + delete model_task_def; +} + +// test StoreInputOutputTensor success +TEST_F(UtestModelManagerDavinciModel, success_store_input_output_tensor) { + DavinciModel model(0, g_label_call_back); + TaskDef *task_def = new TaskDef(); + KernelTaskInfo *kernel_task_info = new KernelTaskInfo(); + + std::vector input_data_addrs; + std::vector output_data_addrs; + std::vector<::tagCcAICPUTensor> input_descs; + std::vector<::tagCcAICPUTensor> output_descs; + + int test = 1; + int *addr = &test; + void *input; + void *output; + input = addr; + output = addr; + input_data_addrs.push_back(&input); + output_data_addrs.push_back(output); + + tagCcAICPUTensor input_desc; + tagCcAICPUTensor output_desc; + input_descs.push_back(input_desc); + output_descs.push_back(output_desc); + + Status ret = kernel_task_info->StoreInputOutputTensor(input_data_addrs, output_data_addrs, input_descs, output_descs); + EXPECT_EQ(SUCCESS, ret); + ret = kernel_task_info->Release(); + EXPECT_EQ(SUCCESS, ret); + delete kernel_task_info; + delete task_def; +} + +// test init EventRecordTaskInfo +TEST_F(UtestModelManagerDavinciModel, success_event_record_task_init) { + DavinciModel *model1 = nullptr; + TaskDef *task_def1 = new TaskDef(); + EventRecordTaskInfo *eventRecordTaskInfo1 = new EventRecordTaskInfo(); + Status ret1 = eventRecordTaskInfo1->Init(task_def1[0], model1); + EXPECT_EQ(PARAM_INVALID, ret1); + + delete eventRecordTaskInfo1; + delete task_def1; + delete model1; + DavinciModel model(0, g_label_call_back); + + ModelTaskDef *model_task_info = new ModelTaskDef(); + TaskDef *task = model_task_info->add_task(); + task->set_type(RT_MODEL_TASK_EVENT_RECORD); + TaskInfoPtr task_info = TaskInfoFactory::Instance().Create(static_cast(task->type())); + + task->stream_id_ = 0; + rtStream_t rt_stream; + rtStreamCreate(&rt_stream, 1); + vector stream_list; + stream_list.push_back(rt_stream); + model.stream_list_ = stream_list; + + task->set_event_id(1); + model.runtime_param_.event_num = 1; + Status ret = task_info->Init(task[0], &model); + EXPECT_EQ(ge::INTERNAL_ERROR, ret); + + model.runtime_param_.event_num = 2; + rtEvent_t event1; + rtEvent_t event2; + rtEventCreate(&event1); + rtEventCreate(&event2); + model.event_list_.push_back(event1); + model.event_list_.push_back(event2); + + EventExDef *event_ex_def = task->mutable_event_ex(); + event_ex_def->set_event_type(1); + + ret = task_info->Init(task[0], &model); + EXPECT_EQ(SUCCESS, ret); + + task->clear_event_ex(); + task_info->Release(); + delete model_task_info; +} + +// test init EventWaitTaskInfo +TEST_F(UtestModelManagerDavinciModel, success_event_wait_task_init) { + DavinciModel *model1 = nullptr; + TaskDef *task_def1 = new TaskDef(); + EventWaitTaskInfo *event_wait_task_info1 = new EventWaitTaskInfo(); + Status ret1 = event_wait_task_info1->Init(task_def1[0], model1); + EXPECT_EQ(PARAM_INVALID, ret1); + + delete event_wait_task_info1; + delete task_def1; + delete model1; + DavinciModel model(0, g_label_call_back); + + ModelTaskDef *model_task_info = new ModelTaskDef(); + TaskDef *task = model_task_info->add_task(); + task->set_type(RT_MODEL_TASK_EVENT_WAIT); + TaskInfoPtr task_info = TaskInfoFactory::Instance().Create(static_cast(task->type())); + + task->stream_id_ = 0; + rtStream_t rt_stream; + rtStreamCreate(&rt_stream, 1); + vector stream_list; + stream_list.push_back(rt_stream); + model.stream_list_ = stream_list; + + task->set_event_id(1); + model.runtime_param_.event_num = 1; + Status ret = task_info->Init(task[0], &model); + EXPECT_EQ(ge::INTERNAL_ERROR, ret); + + model.runtime_param_.event_num = 2; + rtEvent_t event1; + rtEvent_t event2; + rtEventCreate(&event1); + rtEventCreate(&event2); + model.event_list_.push_back(event1); + model.event_list_.push_back(event2); + + EventExDef *event_ex_def = task->mutable_event_ex(); + event_ex_def->set_event_type(1); + + ret = task_info->Init(task[0], &model); + EXPECT_EQ(SUCCESS, ret); + + task->clear_event_ex(); + task_info->Release(); + delete model_task_info; +} + +// test fusion_start_task Init +TEST_F(UtestModelManagerDavinciModel, success_fusion_start_task_init) { + DavinciModel *model1 = nullptr; + TaskDef *task_def1 = new TaskDef(); + FusionStartTaskInfo *fusion_start_task_info1 = new FusionStartTaskInfo(); + Status ret1 = fusion_start_task_info1->Init(task_def1[0], model1); + EXPECT_EQ(PARAM_INVALID, ret1); + + delete fusion_start_task_info1; + delete task_def1; + delete model1; + DavinciModel model(0, g_label_call_back); + TaskDef *task_def = new TaskDef(); + FusionStartTaskInfo *fusion_start_task_info = new FusionStartTaskInfo(); + task_def->set_stream_id(0); + rtStream_t stream; + rtStreamCreate(&stream, 0); + model.stream_list_.push_back(stream); + + Status ret = fusion_start_task_info->Init(task_def[0], &model); + EXPECT_EQ(SUCCESS, ret); + delete fusion_start_task_info; + delete task_def; +} + +// test fusion_end_task Init +TEST_F(UtestModelManagerDavinciModel, success_fusion_end_task_rinit) { + DavinciModel *model1 = nullptr; + TaskDef *task_def1 = new TaskDef(); + FusionStopTaskInfo *fusion_stop_task_info1 = new FusionStopTaskInfo(); + Status ret1 = fusion_stop_task_info1->Init(task_def1[0], model1); + EXPECT_EQ(PARAM_INVALID, ret1); + + delete fusion_stop_task_info1; + delete task_def1; + delete model1; + DavinciModel model(0, g_label_call_back); + TaskDef *task_def = new TaskDef(); + FusionStopTaskInfo *fusion_stop_task_info = new FusionStopTaskInfo(); + task_def->set_stream_id(0); + rtStream_t stream; + rtStreamCreate(&stream, 0); + model.stream_list_.push_back(stream); + + Status ret = fusion_stop_task_info->Init(task_def[0], &model); + EXPECT_EQ(SUCCESS, ret); + delete fusion_stop_task_info; + delete task_def; +} + +// test kernel_ex_task_Release +TEST_F(UtestModelManagerDavinciModel, success_kernel_ex_task_release) { + KernelExTaskInfo *kernel_ex_task_info = new KernelExTaskInfo(); + Status ret = kernel_ex_task_info->Release(); + EXPECT_EQ(SUCCESS, ret); + + delete kernel_ex_task_info; +} + +// test hccl_Distribute +TEST_F(UtestModelManagerDavinciModel, success_Distribute7) { + DavinciModel model(0, g_label_call_back); + + ModelTaskDef *model_task_def = new ModelTaskDef(); + TaskDef *task7 = model_task_def->add_task(); + task7->set_type(RT_MODEL_TASK_HCCL); + TaskInfoPtr task_info7 = TaskInfoFactory::Instance().Create(static_cast(task7->type())); + Status ret = task_info7->Init(task7[0], &model); + EXPECT_EQ(FAILED, ret); + + std::vector task_list; + task_list.push_back(task_info7); + model.task_list_ = task_list; + + task_info7->Release(); + delete model_task_def; +} + +// test hccl_GetPrivateDefByTaskDef +TEST_F(UtestModelManagerDavinciModel, success_hccl_get_private_def_by_task_def) { + DavinciModel model(0, g_label_call_back); + + ModelTaskDef *model_task_def = new ModelTaskDef(); + TaskDef *task7 = model_task_def->add_task(); + task7->set_type(RT_MODEL_TASK_HCCL); + // for SetStream + rtStream_t stream = nullptr; + rtStreamCreate(&stream, 0); + model.stream_list_.push_back(stream); + // for GetPrivateDefByTaskDef + task7->set_ops_kernel_store_ptr(10); + std::string value = "hccl_task"; + task7->set_private_def(value); + + TaskInfoPtr task_info7 = TaskInfoFactory::Instance().Create(static_cast(task7->type())); + // for Distribute + Status ret = task_info7->Init(task7[0], &model); + EXPECT_EQ(ge::PARAM_INVALID, ret); + + task_info7->Release(); + delete model_task_def; +} + +// test hccl_task_TransToGETaskInfo +TEST_F(UtestModelManagerDavinciModel, success_hccl_trans_to_ge_task_info) { + DavinciModel model(0, g_label_call_back); + + ModelTaskDef *model_task_def = new ModelTaskDef(); + TaskDef *task7 = model_task_def->add_task(); + // for type + task7->set_type(RT_MODEL_TASK_HCCL); + TaskInfoPtr task_info7 = TaskInfoFactory::Instance().Create(static_cast(task7->type())); + + GETaskInfo ge_task; + HcclTaskInfo *hccl_task_info = new HcclTaskInfo(); + hccl_task_info->TransToGETaskInfo(ge_task); + + delete hccl_task_info; + delete model_task_def; +} + +// test stream_active_task Init +TEST_F(UtestModelManagerDavinciModel, success_stream_active_task_init) { + DavinciModel *model1 = nullptr; + TaskDef *task_def1 = new TaskDef(); + StreamActiveTaskInfo *stream_active_task_info1 = new StreamActiveTaskInfo(); + Status ret1 = stream_active_task_info1->Init(task_def1[0], model1); + EXPECT_EQ(PARAM_INVALID, ret1); + delete stream_active_task_info1; + delete task_def1; + delete model1; + + DavinciModel model(0, g_label_call_back); + TaskDef *task_def = new TaskDef(); + task_def->set_stream_id(0); + rtStream_t stream1, stream2; + rtStreamCreate(&stream1, 0); + rtStreamCreate(&stream2, 0); + model.stream_list_.push_back(stream1); + + StreamActiveTaskInfo *stream_active_task_info = new StreamActiveTaskInfo(); + + StreamActiveDef *stream_active_def = task_def->mutable_stream_active(); + stream_active_def->set_op_index(0); + stream_active_def->set_active_stream_id(0); + + std::map flowctrl; + flowctrl.insert(pair(1, 1)); + model.flowctrl_op_index_internal_map_ = flowctrl; + + auto opDef = CreateOpDesc("", ""); + model.op_list_[0] = opDef; + + Status ret = stream_active_task_info->Init(task_def[0], &model); + EXPECT_EQ(ge::INTERNAL_ERROR, ret); // line 51 + + model.stream_list_.push_back(stream2); + ret = stream_active_task_info->Init(task_def[0], &model); + EXPECT_EQ(SUCCESS, ret); + + task_def->clear_stream_active(); + delete stream_active_task_info; + delete task_def; +} + +// test label_set_task Init +TEST_F(UtestModelManagerDavinciModel, success_label_set_task_init) { + DavinciModel *model1 = nullptr; + TaskDef *task_def1 = new TaskDef(); + LabelSetTaskInfo *label_set_task_info1 = new LabelSetTaskInfo(); + Status ret1 = label_set_task_info1->Init(task_def1[0], model1); + EXPECT_EQ(PARAM_INVALID, ret1); + delete label_set_task_info1; + delete task_def1; + delete model1; + + DavinciModel model(0, g_label_call_back); + TaskDef *task_def = new TaskDef(); + LabelSetTaskInfo *label_set_task_info = new LabelSetTaskInfo(); + task_def->set_stream_id(0); + rtStream_t stream; + rtStreamCreate(&stream, 0); + model.stream_list_.push_back(stream); + + task_def->set_label_id(1); + model.runtime_param_.batch_num = 0; + Status ret = label_set_task_info->Init(task_def[0], &model); + EXPECT_EQ(PARAM_INVALID, ret); + + task_def->clear_label_id(); + task_def->set_label_id(0); + model.runtime_param_.batch_num = 1; + rtLabel_t label; + rtLabelCreate(&label); + model.label_list_.push_back(label); + + ret = label_set_task_info->Init(task_def[0], &model); + EXPECT_EQ(SUCCESS, ret); + delete label_set_task_info; + delete task_def; +} + +// test label_goto_task init +TEST_F(UtestModelManagerDavinciModel, success_label_goto_task_init) { + DavinciModel model(0, g_label_call_back); + TaskDef *task_def = new TaskDef(); + LabelGotoTaskInfo *label_goto_task_info = new LabelGotoTaskInfo(); + task_def->set_stream_id(0); + + rtStream_t stream; + rtStreamCreate(&stream, 0); + model.stream_list_.push_back(stream); + + rtLabel_t label; + rtLabelCreate(&label); + model.label_list_.push_back(label); + + Status ret = label_goto_task_info->Init(task_def[0], &model); + EXPECT_EQ(SUCCESS, ret); + + delete label_goto_task_info; + delete task_def; +} + +// test profiler_trace_task init +TEST_F(UtestModelManagerDavinciModel, success_profiler_trace_task_init) { + DavinciModel *model1 = nullptr; + TaskDef *task_def1 = new TaskDef(); + ProfilerTraceTaskInfo *profiler_trace_task_info1 = new ProfilerTraceTaskInfo(); + Status ret1 = profiler_trace_task_info1->Init(task_def1[0], model1); + EXPECT_EQ(PARAM_INVALID, ret1); + + delete profiler_trace_task_info1; + delete task_def1; + delete model1; + DavinciModel model(0, g_label_call_back); + TaskDef *task_def = new TaskDef(); + task_def->set_stream_id(0); + rtStream_t stream; + rtStreamCreate(&stream, 0); + model.stream_list_.push_back(stream); + LogTimeStampDef *logTimeStampDef = task_def->mutable_log_timestamp(); + logTimeStampDef->set_logid(1); + logTimeStampDef->set_notify(1); + logTimeStampDef->set_flat(1); + ProfilerTraceTaskInfo *profiler_trace_task_info = new ProfilerTraceTaskInfo(); + Status ret = profiler_trace_task_info->Init(task_def[0], &model); + EXPECT_EQ(SUCCESS, ret); + + task_def->clear_log_timestamp(); + delete profiler_trace_task_info; + delete task_def; +} + +TEST_F(UtestModelManagerDavinciModel, profiling_model_success) { + rtStream_t stream = nullptr; + rtStreamCreate(&stream, 0); + + DavinciModel model(0, g_label_call_back); + model.model_id_ = 1; + model.name_ = "test"; + model.version_ = 0x01; + + model.stream_list_.push_back(stream); + + ge::ModelData data; + rtMallocHost(&data.model_data, 128); + data.model_len = 128; + + ModelDef *model_def = new ModelDef(); + auto op_def = CreateOpDesc("", "Data"); + op_def->SetInputOffset({1}); + op_def->SetOutputOffset({100}); + + ge::GeTensorDesc descin(ge::GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_FLOAT); + ge::TensorUtils::SetSize(descin, 4); + op_def->AddInputDesc(descin); + ge::GeTensorDesc desc_out(ge::GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetSize(desc_out, 32); + op_def->AddInputDesc(desc_out); + op_def->SetId(0); + + model.data_op_list_.push_back(op_def); + model.op_list_[0] = op_def; + + auto opdef1 = CreateOpDesc("", "Relu"); + opdef1->SetInputOffset({1}); + opdef1->SetOutputOffset({100}); + + ge::GeTensorDesc desc_in1(ge::GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_FLOAT); + ge::TensorUtils::SetSize(desc_in1, 4); + opdef1->AddInputDesc(desc_in1); + ge::GeTensorDesc desc_out1(ge::GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetSize(desc_out1, 32); + opdef1->AddInputDesc(desc_out1); + op_def->SetId(1); + + model.op_list_[1] = opdef1; + + auto opdef2 = CreateOpDesc("", "Relu"); + opdef2->SetInputOffset({1}); + opdef2->SetOutputOffset({100}); + + ge::GeTensorDesc desc_in2(ge::GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_FLOAT); + ge::TensorUtils::SetSize(desc_in2, 4); + opdef2->AddInputDesc(desc_in2); + ge::GeTensorDesc desc_out2(ge::GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetSize(desc_out2, 32); + opdef2->AddInputDesc(desc_out2); + op_def->SetId(2); + + model.op_list_[2] = opdef2; + + auto opdef3 = CreateOpDesc("", "Relu"); + opdef3->SetInputOffset({1}); + opdef3->SetOutputOffset({100}); + + ge::GeTensorDesc desc_in3(ge::GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_FLOAT); + ge::TensorUtils::SetSize(desc_in3, 4); + opdef3->AddInputDesc(desc_in3); + ge::GeTensorDesc desc_out3(ge::GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetSize(desc_out3, 32); + opdef3->AddInputDesc(desc_out3); + op_def->SetId(3); + + model.op_list_[3] = opdef3; + + auto opdef4 = CreateOpDesc("", "Relu"); + opdef4->SetInputOffset({1}); + opdef4->SetOutputOffset({100}); + + ge::GeTensorDesc desc_in4(ge::GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_FLOAT); + ge::TensorUtils::SetSize(desc_in4, 4); + opdef4->AddInputDesc(desc_in4); + ge::GeTensorDesc desc_out4(ge::GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_FLOAT16); + ge::TensorUtils::SetSize(desc_out4, 32); + opdef4->AddInputDesc(desc_out4); + op_def->SetId(4); + + model.op_list_[4] = opdef4; + + ge::InputData input_data; + ge::DataBuffer data_buffer; + data_buffer.data = new char[4]; + data_buffer.length = 4; + input_data.index = 0; + input_data.model_id = 1; + input_data.blobs.push_back(data_buffer); + // model.SinkModelProfile(&model); + + rtFreeHost(data.model_data); + // delete stream; + delete[](char *) data_buffer.data; + delete model_def; +} + +TEST_F(UtestModelManagerDavinciModel, success_output_list_0) { + DavinciModel model(0, g_label_call_back); + + uint32_t version = 0; + uint64_t session_id = 0; + uint32_t device_id = 0; + uint64_t job_id = 0; + Status ret = VarManager::Instance(session_id)->Init(version, session_id, device_id, job_id); + EXPECT_EQ(ret, ge::SUCCESS); + + ret = model.ReturnNoOutput(1, 1); + EXPECT_EQ(ret, ge::SUCCESS); + + VarManagerPool::Instance().Destroy(); +} + +// test dyncbatch_distributeTask_SUCCESS +TEST_F(UtestModelManagerDavinciModel, dyncbatch_distribute_task_success) { + DavinciModel model(0, g_label_call_back); + + rtStream_t stream = nullptr; + rtStreamCreate(&stream, 0); + + rtLabel_t label = nullptr; + rtLabelCreate(&label); + model.label_list_.push_back(label); + rtLabelCreate(&label); + model.label_list_.push_back(label); + rtLabelCreate(&label); + model.label_list_.push_back(label); + + rtLabelDestroy(label); + rtStreamDestroy(stream); +} + +// test GetOutputDescInfo +TEST_F(UtestModelManagerDavinciModel, success_get_output_desc_info_with_netoutput) { + setenv("GE_TRAIN", "1", true); + DavinciModel model(0, g_label_call_back); + + auto op_desc = CreateOpDesc("Data", "Data"); + op_desc->SetOutputOffset({1}); + op_desc->SetInputOffset({1}); + op_desc->SetStreamId(0); + + { + ge::GeTensorDesc in_desc(ge::GeShape({1, 1, 10, 10}), ge::FORMAT_FRACTAL_Z, ge::DT_FLOAT16); + ge::TensorUtils::SetOutputTensor(in_desc, false); + ge::TensorUtils::SetInputTensor(in_desc, true); + op_desc->AddInputDesc(in_desc); + } + + { + ge::GeTensorDesc out_desc(ge::GeShape({1, 1, 10, 10}), ge::FORMAT_NCHW, ge::DT_FLOAT); + ge::TensorUtils::SetOutputTensor(out_desc, true); + ge::TensorUtils::SetInputTensor(out_desc, false); + op_desc->AddOutputDesc(out_desc); + } + + op_desc->SetSrcName({"Pooling1", "Pooling0"}); + op_desc->SetSrcIndex({0, 1}); + + auto compute_graph = make_shared("g"); + + op_desc->SetType("NetOutput"); + + auto net_out_node = compute_graph->AddNode(op_desc); + model.op_list_[0] = op_desc; + + model.output_op_list_.push_back(op_desc); + model.output_size_list_.push_back(32); + model.output_memory_size_list_.push_back(64); + + vector output_shapes; + vector formats; + EXPECT_EQ(ge::SUCCESS, model.GetOutputDescInfo(output_shapes, formats)); + + setenv("GE_TRAIN", "0", true); +} + +TEST_F(UtestModelManagerDavinciModel, device_runtime_success_Run) { + rtStream_t stream = nullptr; + rtStreamCreate(&stream, 0); + + DavinciModel model(0, g_label_call_back); + + model.stream_list_.push_back(stream); + auto model_def = make_shared(); + + auto op_def = CreateOpDesc("", "Data"); + + auto compute_graph = make_shared("g"); + compute_graph->AddNode(op_def); + + model_def->SetGraph(ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph)); + + model.data_op_list_.push_back(op_def); + + model.data_inputer_ = new DataInputer(); + + model.ModelRunStart(); + + OutputData output_data; + ge::InputData input_data; + + ge::DataBuffer data_buffer; + data_buffer.data = new char[16]; + data_buffer.length = 16; + + input_data.index = 0; + input_data.model_id = 1; + input_data.blobs.push_back(data_buffer); + + model.ModelRunStop(); + + delete[](char *) data_buffer.data; +} + +TEST_F(UtestModelManagerDavinciModel, run_failed) { + rtStream_t stream = nullptr; + rtStreamCreate(&stream, 0); + + DavinciModel model(0, g_label_call_back); + + model.stream_list_.push_back(stream); + auto model_def = make_shared(); + + auto op_def = CreateOpDesc("", "Data"); + + auto compute_graph = make_shared("g"); + compute_graph->AddNode(op_def); + + model_def->SetGraph(ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph)); + + model.data_op_list_.push_back(op_def); + + model.data_inputer_ = new DataInputer(); + + model.ModelRunStart(); + + OutputData output_data; + ge::InputData input_data; + + ge::DataBuffer data_buffer; + data_buffer.data = new char[16]; + data_buffer.length = 16; + + input_data.index = 0; + input_data.model_id = 1; + input_data.blobs.push_back(data_buffer); + + model.ModelRunStop(); + delete[](char *) data_buffer.data; +} + +TEST_F(UtestModelManagerDavinciModel, run_failed01) { + rtStream_t stream = nullptr; + rtStreamCreate(&stream, 0); + + DavinciModel model(0, g_label_call_back); + + model.stream_list_.push_back(stream); + auto model_def = make_shared(); + + auto op_def = CreateOpDesc("", "Data"); + + auto compute_graph = make_shared("g"); + compute_graph->AddNode(op_def); + + model_def->SetGraph(ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph)); + + model.data_op_list_.push_back(op_def); + + model.data_inputer_ = nullptr; + model.ModelRunStart(); + + model.ModelRunStop(); +} + +TEST_F(UtestModelManagerDavinciModel, init_tbe_handle_fe_registered) { + DavinciModel::tvm_bin_kernel_.clear(); + DavinciModel model(0, g_label_call_back); + OpDescPtr op_desc = CreateOpDesc("MatMul", "MatMul"); + + std::vector kernelBin; + TBEKernelPtr tbe_kernel = std::make_shared("name/MatMul", std::move(kernelBin)); + op_desc->SetExtAttr(ge::OP_EXTATTR_NAME_TBE_KERNEL, tbe_kernel); + + std::string kernel_name("kernel/MatMul"); + AttrUtils::SetStr(op_desc, op_desc->GetName() + "_kernelname", kernel_name); + + EXPECT_EQ(model.InitTbeHandle(op_desc), SUCCESS); + EXPECT_EQ(model.InitTbeHandle(op_desc), SUCCESS); + + EXPECT_EQ(model.used_tbe_handle_map_.size(), 0); + DavinciModel::tvm_bin_kernel_.clear(); +} + +TEST_F(UtestModelManagerDavinciModel, init_tbe_handle_ge_registered) { + DavinciModel::tvm_bin_kernel_.clear(); + DavinciModel model(0, g_label_call_back); + OpDescPtr op_desc = CreateOpDesc("MatMul", "MatMul"); + + std::vector kernelBin; + TBEKernelPtr tbe_kernel = std::make_shared("name/MatMul", std::move(kernelBin)); + op_desc->SetExtAttr(ge::OP_EXTATTR_NAME_TBE_KERNEL, tbe_kernel); + + std::string kernel_name("kernel/MatMul"); + AttrUtils::SetStr(op_desc, op_desc->GetName() + "_kernelname", kernel_name); + + string session_graph_id; + AttrUtils::GetStr(op_desc, ATTR_NAME_SESSION_GRAPH_ID, session_graph_id); + const char *bin_file_key = DavinciModel::GetRegisterStub(op_desc->GetName(), session_graph_id); + model.used_tbe_handle_map_[bin_file_key] = 1; // test first register. + + EXPECT_EQ(model.InitTbeHandle(op_desc), SUCCESS); + EXPECT_EQ(model.InitTbeHandle(op_desc), SUCCESS); + + EXPECT_EQ(model.used_tbe_handle_map_.size(), 1); + + auto it = model.used_tbe_handle_map_.find(bin_file_key); + EXPECT_NE(it, model.used_tbe_handle_map_.end()); + EXPECT_EQ(it->second, 3); + DavinciModel::tvm_bin_kernel_.clear(); +} +} // namespace ge diff --git a/tests/ut/ge/graph/load/new_model_manager_event_manager_unittest.cc b/tests/ut/ge/graph/load/new_model_manager_event_manager_unittest.cc new file mode 100644 index 00000000..ee708501 --- /dev/null +++ b/tests/ut/ge/graph/load/new_model_manager_event_manager_unittest.cc @@ -0,0 +1,117 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/types.h" + +#define private public +#include "graph/manager/model_manager/event_manager.h" +#undef private + +using namespace ge; +using namespace std; +using namespace testing; + +class UtestModelManagerEventManager : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +// test repeat initialize +TEST_F(UtestModelManagerEventManager, repeat_initialization) { + ge::EventManager event_manager; + size_t event_num = 1; + event_manager.Init(event_num); + Status ret = event_manager.Init(event_num); + EXPECT_EQ(ret, SUCCESS); +} + +TEST_F(UtestModelManagerEventManager, call_event_record_normal) { + ge::EventManager event_manager; + size_t event_num = 1; + Status ret = event_manager.Init(event_num); + EXPECT_EQ(SUCCESS, ret); + EXPECT_NE(event_manager.event_list_.size(), 0); + + ret = event_manager.EventRecord(0, NULL); + EXPECT_EQ(SUCCESS, ret); +} + +// test load EventRecore when uninited +TEST_F(UtestModelManagerEventManager, call_event_record_while_uninited) { + ge::EventManager event_manager; + Status ret = event_manager.EventRecord(1, NULL); + EXPECT_EQ(ge::INTERNAL_ERROR, ret); +} + +// test with invalid param when load EventRecord +TEST_F(UtestModelManagerEventManager, call_event_record_with_invalid_param) { + ge::EventManager event_manager; + Status ret = event_manager.Init(1); + EXPECT_EQ(SUCCESS, ret); + ret = event_manager.EventRecord(1, NULL); + EXPECT_EQ(ge::PARAM_INVALID, ret); +} + +// test load EventElapsedTime when uninited +TEST_F(UtestModelManagerEventManager, call_event_elapsed_time_while_uninited) { + ge::EventManager event_manager; + float time = .0f; + Status ret = event_manager.EventElapsedTime(1, 2, time); + EXPECT_EQ(ge::INTERNAL_ERROR, ret); +} + +// test with invalid param when load EventElapsedTime +TEST_F(UtestModelManagerEventManager, call_event_elapsed_time_with_invalid_param) { + ge::EventManager *event_manager = new ge::EventManager; + size_t event_num = 2; + Status ret = event_manager->Init(event_num); + EXPECT_EQ(SUCCESS, ret); + float time = .0f; + + // normal load + ret = event_manager->EventElapsedTime(0, 1, time); + EXPECT_EQ(SUCCESS, ret); + + // startevent_idx overstep boundary + ret = event_manager->EventElapsedTime(2, 1, time); + EXPECT_EQ(ge::PARAM_INVALID, ret); + + // stopevent_idx overstep boundary + ret = event_manager->EventElapsedTime(1, 2, time); + EXPECT_EQ(ge::PARAM_INVALID, ret); + + // startevent_idx > stopevent_idx + ret = event_manager->EventElapsedTime(1, 0, time); + EXPECT_EQ(ge::PARAM_INVALID, ret); + + delete event_manager; +} +TEST_F(UtestModelManagerEventManager, call_get_event) { + ge::EventManager event_manager; + size_t event_num = 1; + event_manager.Init(event_num); + rtEvent_t event = nullptr; + Status ret = event_manager.GetEvent(2, event); + EXPECT_EQ(ge::PARAM_INVALID, ret); + ret = event_manager.GetEvent(0, event); + EXPECT_EQ(SUCCESS, ret); +} diff --git a/tests/ut/ge/graph/load/new_model_manager_model_manager_unittest.cc b/tests/ut/ge/graph/load/new_model_manager_model_manager_unittest.cc new file mode 100644 index 00000000..b6174793 --- /dev/null +++ b/tests/ut/ge/graph/load/new_model_manager_model_manager_unittest.cc @@ -0,0 +1,444 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include +#include "common/debug/log.h" +#include "common/model_parser/base.h" +#include "common/properties_manager.h" +#include "common/types.h" +#include "common/l2_cache_optimize.h" + +#define private public +#define protected public +#include "graph/load/new_model_manager/model_manager.h" + +#include "common/helper/om_file_helper.h" +#include "common/op/ge_op_utils.h" +#include "graph/load/graph_loader.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/load/new_model_manager/davinci_model_parser.h" +#include "new_op_test_utils.h" +#undef private +#undef protected + +using namespace std; +using namespace testing; + +namespace ge { + +const static std::string ENC_KEY = "0123456789abcdef0123456789abcdef0123456789abcdef0123456789abcdef"; + +class UtestModelManagerModelManager : public testing::Test { + protected: + static Status LoadStub(const uint8_t *data, size_t len, ge::Model &model) { + InitModelDefault(model); + return ge::SUCCESS; + } + + static void InitModelDefault(ge::Model &model) { + ge::AttrUtils::SetInt(&model, ge::ATTR_MODEL_MEMORY_SIZE, 0); + ge::AttrUtils::SetInt(&model, ge::ATTR_MODEL_WEIGHT_SIZE, 0); + ge::AttrUtils::SetInt(&model, ge::ATTR_MODEL_STREAM_NUM, 0); + ge::AttrUtils::SetInt(&model, ge::ATTR_MODEL_EVENT_NUM, 0); + ge::AttrUtils::SetStr(&model, ge::ATTR_MODEL_TARGET_TYPE, "MINI"); // domi::MINI + + auto computeGraph = std::make_shared("graph"); + auto graph = ge::GraphUtils::CreateGraphFromComputeGraph(computeGraph); + model.SetGraph(graph); + } + + void SetUp() {} + + void TearDown() {} + + void GenUnencryptModelData(ge::ModelData &data) { + const int model_len = 10; + data.key; + data.model_len = sizeof(ModelFileHeader) + model_len; + data.model_data = new uint8_t[data.model_len]; + memset((uint8_t *)data.model_data + sizeof(ModelFileHeader), 10, model_len); + + ModelFileHeader *header = (ModelFileHeader *)data.model_data; + header->magic = MODEL_FILE_MAGIC_NUM; + header->version = MODEL_VERSION; + header->is_encrypt = ModelEncryptType::UNENCRYPTED; + header->length = model_len; + header->is_checksum = ModelCheckType::CHECK; + } + + void GenEncryptModelData(ge::ModelData &data) { + const int model_len = 10; + data.key = ENC_KEY; + data.model_data = new uint8_t[data.model_len]; + uint8_t data_ori[model_len]; + memset(data_ori, 10, model_len); + uint32_t out_len; + ModelFileHeader *header = (ModelFileHeader *)data.model_data; + header->magic = MODEL_FILE_MAGIC_NUM; + header->version = MODEL_VERSION; + header->is_encrypt = ModelEncryptType::ENCRYPTED; + header->length = 10; // encrypt_len; + } + + void LoadStandardModelData(ge::ModelData &data) { + static const std::string STANDARD_MODEL_DATA_PATH = + "llt/framework/domi/ut/ome/test/data/standard_partition_model.txt"; + ge::proto::ModelDef model_def; + ReadProtoFromText(STANDARD_MODEL_DATA_PATH.c_str(), &model_def); + + data.model_len = model_def.ByteSizeLong(); + data.model_data = new uint8_t[data.model_len]; + model_def.SerializePartialToArray(data.model_data, data.model_len); + } +}; + +class DModelListener : public ge::ModelListener { + public: + DModelListener(){}; + uint32_t OnComputeDone(uint32_t model_id, uint32_t data_index, uint32_t resultCode) { return 0; } +}; + +shared_ptr UTEST_CALL_BACK_FUN(new DModelListener()); + +TEST_F(UtestModelManagerModelManager, case_load_incorrect_param) { + ModelManager mm; + uint32_t model_id = 0; + ge::ModelData model; + EXPECT_EQ(ge::FAILED, mm.LoadModelOffline(model_id, model, nullptr, nullptr)); + ge::ModelData data; + // Load allow listener is null + EXPECT_EQ(ge::FAILED, mm.LoadModelOffline(model_id, data, nullptr, nullptr)); +} + +TEST_F(UtestModelManagerModelManager, case_load_model_len_too_short) { + ModelManager mm; + ge::ModelData data; + data.model_len = 10; + uint32_t model_id = 1; + EXPECT_EQ(ge::FAILED, mm.LoadModelOffline(model_id, data, UTEST_CALL_BACK_FUN, nullptr)); +} + +TEST_F(UtestModelManagerModelManager, case_load_model_len_not_match) { + ModelManager mm; + ge::ModelData data; + GenUnencryptModelData(data); + data.model_len = sizeof(ModelFileHeader) + 1; + uint32_t model_id = 1; + EXPECT_EQ(ge::FAILED, mm.LoadModelOffline(model_id, data, UTEST_CALL_BACK_FUN, nullptr)); + delete[](uint8_t *) data.model_data; +} + +TEST_F(UtestModelManagerModelManager, case_load_model_encypt_not_match) { + ModelManager mm; + ge::ModelData data; + GenUnencryptModelData(data); + data.key = ENC_KEY; + uint32_t model_id = 1; + EXPECT_EQ(ge::PARAM_INVALID, mm.LoadModelOffline(model_id, data, UTEST_CALL_BACK_FUN, nullptr)); + delete[](uint8_t *) data.model_data; +} + +#if 0 +TEST_F(UtestModelManagerModelManager, case_load_model_signature_failed) +{ + ModelManager mm; + ge::ModelData data; + GenUnencryptModelData(data); + + uint32_t model_id = 1; + MOCKER(&WBDecryptor::CheckSignature).stubs().will(returnValue(false)); + EXPECT_EQ(ge::PARAM_INVALID, mm.LoadModelOffline(model_id, data, UTEST_CALL_BACK_FUN)); + delete[](uint8_t*)data.model_data; +} +#endif + +TEST_F(UtestModelManagerModelManager, case_load_model_encypt_type_unsupported) { + ModelManager mm; + ge::ModelData data; + GenUnencryptModelData(data); + ModelFileHeader *header = (ModelFileHeader *)data.model_data; + header->is_encrypt = 255; + uint32_t model_id = 1; + EXPECT_EQ(ge::FAILED, mm.LoadModelOffline(model_id, data, UTEST_CALL_BACK_FUN, nullptr)); + delete[](uint8_t *) data.model_data; +} + +#if 0 +TEST_F(UtestModelManagerModelManager, case_load_model_header_len_failed) +{ + ModelManager mm; + ge::ModelData data; + GenEncryptModelData(data); + ModelFileHeader *header = (ModelFileHeader*)data.model_data; + data.model_len -= header->length; + header->length = 0; + uint32_t model_id = 1; + EXPECT_EQ(ge::PARAM_INVALID, mm.LoadModelOffline(model_id, data, UTEST_CALL_BACK_FUN)); + delete[](uint8_t*)data.model_data; +} +#endif + +#if 0 +TEST_F(UtestModelManagerModelManager, case_load_success) +{ + const char* model_file = "bin/llt/framework/domi/ut/omg/data/leakyrelu.dav"; + const char* json_file = "test.json"; + const char* key = "bin/llt/framework/domi/ut/omg/data/leakyrelu.dav.PASSCODE"; + + ge::ModelData model; + Status ret = ModelParserBase::LoadFromFile(model_file, key, 0, &model); + EXPECT_EQ(ge::SUCCESS, ret); + + ModelManager mm; + uint32_t model_id = 1; + ret = mm.LoadModelOffline(model_id, model, UTEST_CALL_BACK_FUN); + EXPECT_EQ(ge::SUCCESS, ret); + + if (model.model_data) + delete[](uint8_t*)model.model_data; +} +#endif + +#if 0 +TEST_F(UtestModelManagerModelManager, case_load_encrypt_model_signature_failed) +{ + ModelManager mm; + ge::ModelData data; + GenEncryptModelData(data); + uint32_t model_id = 1; + data.key; + EXPECT_EQ(ge::PARAM_INVALID, mm.LoadModelOffline(model_id, data, UTEST_CALL_BACK_FUN)); + delete[](uint8_t*)data.model_data; +} + +TEST_F(UtestModelManagerModelManager, case_load_encrypt_model_invalid_key_len) +{ + ModelManager mm; + ge::ModelData data; + GenEncryptModelData(data); + data.key = "0123456789abcdef0123456789ABCDEF0123456789ABCDEF0123456789ABCDEF0"; + uint32_t model_id = 1; + EXPECT_EQ(ge::PARAM_INVALID, mm.LoadModelOffline(model_id, data, UTEST_CALL_BACK_FUN)); + delete[](uint8_t*)data.model_data; +} + +TEST_F(UtestModelManagerModelManager, case_load_encrypt_model_invalid_key_char) +{ + ModelManager mm; + ge::ModelData data; + GenEncryptModelData(data); + data.key = "0123456789abcdef0123456789ABCDEF0123456789ABCDEF0123456789ABCDEG"; + uint32_t model_id = 1; + EXPECT_EQ(ge::PARAM_INVALID, mm.LoadModelOffline(model_id, data, UTEST_CALL_BACK_FUN)); + delete[](uint8_t*)data.model_data; +} + +TEST_F(UtestModelManagerModelManager, case_load_encrypt_model_load_failed) +{ + ModelManager mm; + ge::ModelData data; + GenEncryptModelData(data); + uint32_t model_id = 1; + EXPECT_EQ(ge::INTERNAL_ERROR, mm.LoadModelOffline(model_id, data, UTEST_CALL_BACK_FUN)); + delete[](uint8_t*)data.model_data; +} +#endif + +shared_ptr LabelCallBack(new DModelListener()); + +// test HandleCommand +TEST_F(UtestModelManagerModelManager, command_success1) { + ModelManager manager; + ge::Command cmd; + + cmd.cmd_type = "INFERENCE"; + EXPECT_EQ(ge::PARAM_INVALID, manager.HandleCommand(cmd)); + + cmd.cmd_type = "NOT SUPPORT"; + EXPECT_EQ(ge::PARAM_INVALID, manager.HandleCommand(cmd)); +} + +TEST_F(UtestModelManagerModelManager, command_success2) { + ModelManager manager; + ge::Command cmd; + + cmd.cmd_type = "dump"; + cmd.cmd_params.push_back("status"); + cmd.cmd_params.push_back("on"); + cmd.cmd_params.push_back("model_name"); + cmd.cmd_params.push_back("test_model"); + cmd.cmd_params.push_back("path"); + cmd.cmd_params.push_back("/test"); + cmd.cmd_params.push_back("layer"); + cmd.cmd_params.push_back("layer1"); + + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); +} + +// test profile +TEST_F(UtestModelManagerModelManager, command_profile_success) { + ModelManager manager; + ge::Command cmd; + cmd.cmd_type = "profile"; + + cmd.cmd_params.push_back("ome"); + cmd.cmd_params.push_back("on"); + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); + bool ome_profile_on = PropertiesManager::Instance().GetPropertyValue(OME_PROFILE) == "1"; + EXPECT_EQ(true, ome_profile_on); + + cmd.cmd_params.clear(); + cmd.cmd_params.push_back("ome"); + cmd.cmd_params.push_back("off"); + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); + ome_profile_on = PropertiesManager::Instance().GetPropertyValue(OME_PROFILE) == "1"; + EXPECT_FALSE(ome_profile_on); + + cmd.cmd_params.clear(); + cmd.cmd_params.push_back("cce"); + cmd.cmd_params.push_back("on"); + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); + bool cce_profile_on = PropertiesManager::Instance().GetPropertyValue(CCE_PROFILE) == "1"; + EXPECT_EQ(true, cce_profile_on); + + cmd.cmd_params.clear(); + cmd.cmd_params.push_back("cce"); + cmd.cmd_params.push_back("off"); + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); + cce_profile_on = PropertiesManager::Instance().GetPropertyValue(CCE_PROFILE) == "1"; + EXPECT_FALSE(cce_profile_on); + + cmd.cmd_params.clear(); + cmd.cmd_params.push_back("runtime"); + cmd.cmd_params.push_back("on"); + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); + bool rts_profile_on = PropertiesManager::Instance().GetPropertyValue(RTS_PROFILE) == "1"; + EXPECT_EQ(true, rts_profile_on); + + cmd.cmd_params.clear(); + cmd.cmd_params.push_back("runtime"); + cmd.cmd_params.push_back("off"); + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); + rts_profile_on = PropertiesManager::Instance().GetPropertyValue(RTS_PROFILE) == "1"; + EXPECT_FALSE(rts_profile_on); + + cmd.cmd_params.clear(); + cmd.cmd_params.push_back("profiler_jobctx"); + cmd.cmd_params.push_back("jobctx"); + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); + EXPECT_EQ("jobctx", PropertiesManager::Instance().GetPropertyValue(PROFILER_JOBCTX)); + + cmd.cmd_params.clear(); + cmd.cmd_params.push_back("profiler_target_path"); + cmd.cmd_params.push_back("/test/target"); + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); + EXPECT_EQ("/test/target", PropertiesManager::Instance().GetPropertyValue(PROFILER_TARGET_PATH)); + + cmd.cmd_params.clear(); + cmd.cmd_params.push_back("RTS_PATH"); + cmd.cmd_params.push_back("/test/rts_path"); + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); + EXPECT_EQ("/test/rts_path", PropertiesManager::Instance().GetPropertyValue(RTS_PROFILE_PATH)); +} + +// test acl profiling +TEST_F(UtestModelManagerModelManager, command_profiling) { + ModelManager manager; + ge::Command cmd; + cmd.cmd_type = "profiling"; + + cmd.cmd_params.push_back("config"); + cmd.cmd_params.push_back("on"); + EXPECT_EQ(ge::SUCCESS, manager.HandleCommand(cmd)); +} + +TEST_F(UtestModelManagerModelManager, command_profile_failed) { + ModelManager manager; + ge::Command cmd; + cmd.cmd_type = "profile"; + + cmd.cmd_params.push_back("ome"); + + EXPECT_EQ(ge::PARAM_INVALID, manager.HandleCommand(cmd)); +} + +// test Start +TEST_F(UtestModelManagerModelManager, start_fail) { + ModelManager manager; + manager.model_map_[2] = nullptr; + EXPECT_EQ(ge::PARAM_INVALID, manager.Start(2)); +} + +// test GetMaxUsedMemory +TEST_F(UtestModelManagerModelManager, get_max_used_memory_fail) { + ModelManager manager; + uint64_t max_size = 0; + manager.model_map_[2] = nullptr; + EXPECT_EQ(ge::PARAM_INVALID, manager.GetMaxUsedMemory(2, max_size)); +} + +// test GetInputOutputDescInfo +TEST_F(UtestModelManagerModelManager, get_input_output_desc_info_fail) { + ModelManager manager; + manager.model_map_[2] = nullptr; + vector input_shape; + vector output_shape; + EXPECT_EQ(ge::PARAM_INVALID, manager.GetInputOutputDescInfo(2, input_shape, output_shape)); +} + +// test GetInputOutputDescInfo fail +TEST_F(UtestModelManagerModelManager, get_input_output_desc_info_zero_copy_fail) { + ModelManager manager; + manager.model_map_[2] = nullptr; + vector input_shape; + vector output_shape; + EXPECT_EQ(ge::PARAM_INVALID, manager.GetInputOutputDescInfoForZeroCopy(2, input_shape, output_shape)); +} + +// test Stop +TEST_F(UtestModelManagerModelManager, stop_fail) { + ModelManager manager; + manager.model_map_[2] = nullptr; + EXPECT_EQ(ge::PARAM_INVALID, manager.Stop(2)); +} + +// build input_data +TEST_F(UtestModelManagerModelManager, check_data_len_success) { + shared_ptr g_label_call_back(new DModelListener()); + DavinciModel model(0, g_label_call_back); + ModelManager model_manager; + ge::InputData input_data; + ge::DataBuffer data_buffer; + data_buffer.data = new char[51200]; + data_buffer.length = 51200; + input_data.index = 0; + input_data.model_id = 1; + input_data.blobs.push_back(data_buffer); + delete[](char *) data_buffer.data; +} + +// test LoadModeldef +TEST_F(UtestModelManagerModelManager, destroy_aicpu_session) { + ModelManager manager; + manager.DestroyAicpuSession(0); + + manager.sess_ids_.insert(0); + manager.DestroyAicpuSession(0); +} + +} // namespace ge diff --git a/tests/ut/ge/graph/load/new_model_manager_task_build_unittest.cc b/tests/ut/ge/graph/load/new_model_manager_task_build_unittest.cc new file mode 100644 index 00000000..620fac09 --- /dev/null +++ b/tests/ut/ge/graph/load/new_model_manager_task_build_unittest.cc @@ -0,0 +1,115 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/types.h" +#include "new_op_test_utils.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/attr_utils.h" +#include "graph/detail/model_serialize_imp.h" +#include "proto/ge_ir.pb.h" + +#define private public +#define protected public +#include "graph/compute_graph.h" +#include "graph/utils/graph_utils.h" +#include "graph/model_serialize.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "common/properties_manager.h" +#include "common/op/ge_op_utils.h" +#include +#include "runtime/dev.h" +#include "runtime/kernel.h" +#include "cce/fwk_adpt_struct.h" +#undef private +#undef protected + +using namespace std; +using namespace testing; + +namespace ge { +class UtestModelManagerTaskBuilder : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + /// data weight + /// | | | | + /// |-conv-| | | + /// | | | + /// conv2d | + /// | | + /// |-resApply + + void BuildGraph(ComputeGraphPtr graph) { + OpDescPtr data = std::make_shared("DATA1", "data"); + OpDescPtr weight = std::make_shared("WEIGHT", "weight"); + OpDescPtr conv_op = std::make_shared("conv", "conv"); + OpDescPtr conv_2D = std::make_shared("conv_2D", "conv2d"); + OpDescPtr res_apply_op = std::make_shared("res_apply_op", "resapply"); + // add descriptor + vector dim(4, 4); + GeShape shape(dim); + GeTensorDesc out_desc(shape); + int32_t blockSize = 4096; + + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 1); + data->AddOutputDesc(out_desc); + + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 2); + weight->AddOutputDesc(out_desc); + + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 1); + conv_op->AddInputDesc(out_desc); + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 2); + conv_op->AddInputDesc(out_desc); + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 3); + conv_op->AddOutputDesc(out_desc); + + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 3); + conv_2D->AddInputDesc(out_desc); + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 2); + conv_2D->AddInputDesc(out_desc); + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 4); + conv_2D->AddOutputDesc(out_desc); + + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 4); + res_apply_op->AddInputDesc(out_desc); + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 1); + res_apply_op->AddInputDesc(out_desc); + ge::TensorUtils::SetDataOffset(out_desc, blockSize * 5); + res_apply_op->AddOutputDesc(out_desc); + + NodePtr data_node = graph->AddNode(data); + NodePtr weigth_node = graph->AddNode(weight); + NodePtr conv_node = graph->AddNode(conv_op); + NodePtr conv_2D_node = graph->AddNode(conv_2D); + NodePtr res_node = graph->AddNode(res_apply_op); + + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), conv_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(weigth_node->GetOutDataAnchor(0), conv_node->GetInDataAnchor(1)); + GraphUtils::AddEdge(conv_node->GetOutDataAnchor(0), conv_2D_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(weigth_node->GetOutDataAnchor(0), conv_2D_node->GetInDataAnchor(1)); + GraphUtils::AddEdge(conv_2D_node->GetOutDataAnchor(0), res_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(weigth_node->GetOutDataAnchor(0), res_node->GetInDataAnchor(1)); + return; + } +}; +} // namespace ge diff --git a/tests/ut/ge/graph/load/new_op_test_utils.h b/tests/ut/ge/graph/load/new_op_test_utils.h new file mode 100644 index 00000000..5e1e2ec1 --- /dev/null +++ b/tests/ut/ge/graph/load/new_op_test_utils.h @@ -0,0 +1,441 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef OME_REBUILD_OME_OP_TEST_UTILS_H +#define OME_REBUILD_OME_OP_TEST_UTILS_H + +#include +#include +#include + +#include "common/fmk_types.h" +#include "common/helper/model_helper.h" +#include "common/op/attr_value_util.h" +#include "common/properties_manager.h" +#include "common/types.h" +#include "executor/ge_executor.h" +#include "graph/buffer.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/ge_attr_value.h" +#include "graph/model_serialize.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "proto/ge_ir.pb.h" + +#define protected public +#define private public +#include "graph/compute_graph.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/node.h" +#include "graph/op_desc.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#undef protected +#undef private + +using namespace ge; + +class GlobalModelData { + public: + GlobalModelData() {} + + ~GlobalModelData() { + if (data_.model_data != nullptr) { + delete[](uint8_t *) data_.model_data; + data_.model_data = nullptr; + } + } + + ge::ModelData data_; +}; + +static GlobalModelData g_model_data; + +class OmeTestOpUtils { + public: + static void InitModel(std::shared_ptr davinciModel) { InitModel(*davinciModel); } + static ge::NodePtr GenNodeFromOpDesc(ge::OpDescPtr op_desc) { + if (!op_desc) { + return nullptr; + } + + // return std::make_shared(op_desc, nullptr); + auto g = std::make_shared("g"); + return g->AddNode(std::move(op_desc)); + } + + static void AddInputOutputToTaskModel(std::shared_ptr model, + std::shared_ptr model_task_def) { + uint32_t stream_num111 = model_task_def->stream_num(); + uint32_t weights_num = model_task_def->weight_size(); + uint32_t mem_num = model_task_def->memory_size(); + + int64_t memory_size = 0; + int64_t weight_size = 0; + (void)ge::AttrUtils::GetInt(model.get(), ATTR_MODEL_MEMORY_SIZE, memory_size); + (void)ge::AttrUtils::GetInt(model.get(), ATTR_MODEL_WEIGHT_SIZE, weight_size); + // Save memory_size/weight_size/stream_num/event_num to proto + model_task_def->set_memory_size(memory_size); + model_task_def->set_weight_size(weight_size); + int64_t stream_num = 0; + (void)ge::AttrUtils::GetInt(model.get(), ATTR_MODEL_STREAM_NUM, stream_num); + model_task_def->set_stream_num(stream_num); + + ge::ComputeGraphPtr graph = ge::GraphUtils::GetComputeGraph(model->GetGraph()); + vector op_desc_ptrs; + for (const auto &node_ptr : graph->GetAllNodes()) { + if (node_ptr->GetType() == DATA_TYPE || node_ptr->GetType() == ANN_DATA_TYPE) { + op_desc_ptrs.push_back(node_ptr->GetOpDesc()); + continue; + } + + for (auto tensor_desc : node_ptr->GetOpDesc()->GetAllOutputsDescPtr()) { + bool is_output = false; + ge::TensorUtils::GetOutputTensor(*tensor_desc, is_output); + if (is_output) { + // output Op and add to array + op_desc_ptrs.push_back(node_ptr->GetOpDesc()); + break; + } + } + } + + // save multi OpDescPtr to attr + ge::ModelSerialize model_serialize; + for (auto op_desc_ptr : op_desc_ptrs) { + ge::Buffer buffer = model_serialize.SerializeOpDesc(op_desc_ptr); + model_task_def->add_op(string(reinterpret_cast(buffer.GetData()), buffer.GetSize())); + } + + int64_t run_mode = -1; + for (auto node_ptr : graph->GetAllNodes()) { + // TE CUSTOM op need to init + if (ge::AttrUtils::GetInt(node_ptr->GetOpDesc(), ATTR_NAME_IMPLY_TYPE, run_mode) && + run_mode != (uint32_t)domi::ImplyType::BUILDIN && run_mode != (uint32_t)domi::ImplyType::INVALID) { + (*(model_task_def->mutable_attr()))["contain_custom"] = "1"; + break; + } + } + } + + static void LoadStandardModelDataLocal(ge::ModelData &data) { + static const std::string STANDARD_MODEL_DATA_PATH = + "llt/framework/domi/ut/ome/test/data/standard_partition_model.txt"; + ge::proto::ModelDef model_def; + ReadProtoFromText(STANDARD_MODEL_DATA_PATH.c_str(), &model_def); + + data.model_len = model_def.ByteSizeLong(); + data.model_data = new uint8_t[data.model_len]; + model_def.SerializePartialToArray(data.model_data, data.model_len); + } + static void InitModel(ge::DavinciModel &davinciModel) { + ge::ModelData data; + LoadStandardModelDataLocal(data); + std::shared_ptr model_ = std::make_shared(); + ge::Model::Load((uint8_t *)data.model_data, data.model_len, *model_); + + GeModelPtr ge_model; + ModelHelper::TransModelToGeModel(model_, ge_model); + davinciModel.Assign(ge_model); + + if (data.model_data != nullptr) { + delete[](uint8_t *) data.model_data; + } + } + + static void InitEmptyModel(ge::DavinciModel &davinciModel) { + auto model = std::make_shared(); + ge::AttrUtils::SetInt(model, ATTR_MODEL_MEMORY_SIZE, 81000000); + ge::AttrUtils::SetInt(model, ATTR_MODEL_WEIGHT_SIZE, 4100000); + ge::AttrUtils::SetInt(model, ATTR_MODEL_STREAM_NUM, 1); + ge::AttrUtils::SetInt(model, ATTR_MODEL_EVENT_NUM, 1); + ge::AttrUtils::SetInt(model, MODEL_ATTR_TASK_GEN_BASE_ADDR, 0x123); + ge::AttrUtils::SetInt(model, MODEL_ATTR_TASK_GEN_WEIGHT_ADDR, 0x456); + ge::AttrUtils::SetInt(model, ATTR_MODEL_BATCH_NUM, 1); + + // ge::AttrUtils::SetStr(model, ATTR_MODEL_TARGET_TYPE, "MINI"); // domi::MINI + + auto compute_graph = std::make_shared("graph"); + ge::GeAttrValue::BYTES buffer(4100000, 0); + ge::AttrUtils::SetBytes(compute_graph, "weights_data", buffer); + auto graph = ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model->SetGraph(graph); + + GeModelPtr ge_model; + ModelHelper::TransModelToGeModel(model, ge_model); + + davinciModel.Assign(ge_model); + } + + static void InitModelWithoutMem(ge::DavinciModel &davinciModel) { InitModel(davinciModel); } + + static Status ModelLoadStub(const uint8_t *data, size_t len, ge::Model &model) { + auto compute_graph = std::make_shared("graph"); + auto graph = ge::GraphUtils::CreateGraphFromComputeGraph(compute_graph); + model.SetGraph(graph); + return SUCCESS; + } + static void InitDefaultTensorDesc(ge::GeTensorDesc &tensor_desc) {} + static void AddInputDesc(ge::OpDescPtr op_desc, vector shape, ge::Format format, ge::DataType dataType, + int64_t dataSize = 0) { + ge::GeTensorDesc tensor_desc(ge::GeShape(shape), format, dataType); + InitDefaultTensorDesc(tensor_desc); + ge::TensorUtils::SetSize(tensor_desc, dataSize); + op_desc->AddInputDesc(tensor_desc); + } + static void AddOutputDesc(ge::OpDescPtr op_desc, vector shape, ge::Format format, ge::DataType dataType, + int64_t dataSize = 0) { + ge::GeTensorDesc tensor_desc(ge::GeShape(shape), format, dataType); + InitDefaultTensorDesc(tensor_desc); + ge::TensorUtils::SetSize(tensor_desc, dataSize); + op_desc->AddOutputDesc(tensor_desc); + } + static void AddWeight(ge::NodePtr node_ptr, uint8_t *data, size_t dataLen, vector shape = {}, + ge::Format format = ge::FORMAT_NCHW, ge::DataType dataType = ge::DT_FLOAT) { + ge::GeTensorDesc tensor_desc(ge::GeShape(shape), format, dataType); + + vector weigths = ge::OpDescUtils::MutableWeights(node_ptr); + weigths.push_back(std::make_shared(tensor_desc, data, dataLen)); + ge::OpDescUtils::SetWeights(node_ptr, weigths); + } + static ge::OpDescPtr CreateOpDesc() { + auto op_desc = std::make_shared(); + return op_desc; + } +}; + +class OmeTestOpDescBuilder { + public: + OmeTestOpDescBuilder(ge::OpDescPtr orgOpDesc = nullptr) : orgOpDesc_(orgOpDesc) { + if (orgOpDesc_) { + streamId_ = orgOpDesc_->GetStreamId(); + } + } + + OmeTestOpDescBuilder &SetStreamId(int64_t streamId) { + streamId_ = streamId; + return *this; + } + OmeTestOpDescBuilder &SetWorkspace(vector workspace) { + workspace_ = workspace; + return *this; + } + OmeTestOpDescBuilder &SetWorkspaceBytes(vector workspaceBytes) { + workspaceBytes_ = workspaceBytes; + return *this; + } + OmeTestOpDescBuilder &SetType(const string &type) { + type_ = type; + return *this; + } + OmeTestOpDescBuilder &SetName(const string &name) { + name_ = name; + return *this; + } + OmeTestOpDescBuilder &SetInputs(vector inputs) { + inputsDataOffeset_ = inputs; + return *this; + } + OmeTestOpDescBuilder &AddInput(int64_t input) { + inputsDataOffeset_.push_back(input); + return *this; + } + OmeTestOpDescBuilder &SetOutputs(vector outputs) { + outputsDataOffeset_ = outputs; + return *this; + } + OmeTestOpDescBuilder &AddOutput(int64_t output) { + outputsDataOffeset_.push_back(output); + return *this; + } + + OmeTestOpDescBuilder &SetEventId(int64_t eventId) { + eventId_ = eventId; + return *this; + } + + OmeTestOpDescBuilder &Setscopeid(int64_t scopeid) { + scopeid_ = scopeid; + return *this; + } + + ge::GeTensorDesc &AddInputDesc(vector shape, ge::Format format, ge::DataType dataType, + int64_t dataSize = 0) { + ge::GeTensorDesc tensor_desc(ge::GeShape(shape), format, dataType); + OmeTestOpUtils::InitDefaultTensorDesc(tensor_desc); + ge::TensorUtils::SetSize(tensor_desc, dataSize); + inputTensorDescs.push_back(tensor_desc); + return inputTensorDescs.back(); + } + ge::GeTensorDesc &AddInputDesc(vector shape, ge::Format format, ge::DataType dataType, int64_t realdimcnt, + int64_t dataSize) { + ge::GeTensorDesc tensor_desc(ge::GeShape(shape), format, dataType); + OmeTestOpUtils::InitDefaultTensorDesc(tensor_desc); + ge::TensorUtils::SetSize(tensor_desc, dataSize); + ge::TensorUtils::SetRealDimCnt(tensor_desc, realdimcnt); + inputTensorDescs.push_back(tensor_desc); + return inputTensorDescs.back(); + } + + ge::GeTensorDesc &AddOutputDesc(vector shape, ge::Format format, ge::DataType dataType, + int64_t dataSize = 0) { + ge::GeTensorDesc tensor_desc(ge::GeShape(shape), format, dataType); + OmeTestOpUtils::InitDefaultTensorDesc(tensor_desc); + ge::TensorUtils::SetSize(tensor_desc, dataSize); + outputTensorDescs.push_back(tensor_desc); + return outputTensorDescs.back(); + } + + ge::GeTensorDesc &AddOutputDesc(vector shape, ge::Format format, ge::DataType dataType, int64_t realdimcnt, + int64_t dataSize) { + ge::GeTensorDesc tensor_desc(ge::GeShape(shape), format, dataType); + OmeTestOpUtils::InitDefaultTensorDesc(tensor_desc); + ge::TensorUtils::SetSize(tensor_desc, dataSize); + ge::TensorUtils::SetRealDimCnt(tensor_desc, realdimcnt); + outputTensorDescs.push_back(tensor_desc); + return outputTensorDescs.back(); + } + + ge::GeTensorPtr AddWeight(uint8_t *data, size_t dataLen, vector shape = {}, + ge::Format format = ge::FORMAT_NCHW, ge::DataType dataType = ge::DT_FLOAT) { + ge::GeTensorDesc tensor_desc(ge::GeShape(shape), format, dataType); + + weights_.emplace_back(std::make_shared(tensor_desc, data, dataLen)); + return weights_.back(); + } + ge::NodePtr Finish() { + ge::OpDescPtr op_desc; + if (orgOpDesc_) { + op_desc = orgOpDesc_; + } else { + op_desc = OmeTestOpUtils::CreateOpDesc(); // std::make_shared(name_, type_); + } + if (!type_.empty()) { + op_desc->SetType(type_); + } + if (!name_.empty()) { + op_desc->SetName(name_); + } + + op_desc->SetStreamId(streamId_); + ge::AttrUtils::SetInt(op_desc, "id", 1); + + if (eventId_ != -1) { + ge::AttrUtils::SetInt(op_desc, SEND_ATTR_EVENT_ID, eventId_); + } + + if (scopeid_ != -1) { + ge::AttrUtils::SetInt(op_desc, "fusion_scope", scopeid_); + } + // ge::AttrUtils::SetInt(op_desc, ATTR_NAME_STREAM_ID, streamId_); + // if(!inputsDataOffeset_.empty()) + { + vector inputs; + inputs = op_desc->GetInputOffset(); + inputs.insert(inputs.end(), inputsDataOffeset_.begin(), inputsDataOffeset_.end()); + + op_desc->SetInputOffset(inputs); + } + // if(!outputsDataOffeset_.empty()) + { + vector outputs; + outputs = op_desc->GetOutputOffset(); + outputs.insert(outputs.end(), outputsDataOffeset_.begin(), outputsDataOffeset_.end()); + + op_desc->SetOutputOffset(outputs); + } + // if(!workspace_.empty()) + { + vector workspace = op_desc->GetWorkspace(); + workspace.insert(workspace.end(), workspace_.begin(), workspace_.end()); + + op_desc->SetWorkspace(workspace); + } + // if(!workspaceBytes_.empty()) + { + vector workspaceBytes; + workspaceBytes = op_desc->GetWorkspaceBytes(); + workspaceBytes.insert(workspaceBytes.end(), workspaceBytes_.begin(), workspaceBytes_.end()); + + op_desc->SetWorkspaceBytes(workspaceBytes); + } + for (auto &tensor_desc : inputTensorDescs) { + op_desc->AddInputDesc(tensor_desc); + } + for (auto &tensor_desc : outputTensorDescs) { + op_desc->AddOutputDesc(tensor_desc); + } + + static std::shared_ptr graph; + // clear graph + graph = std::make_shared("g"); + + ge::NodePtr node_op = graph->AddNode(op_desc); + // for(int i=0; i < inputTensorDescs.size(); i++) + for (int i = 0; i < op_desc->GetInputsSize(); i++) { + ge::OpDescPtr src_op_desc = std::make_shared(); + + ge::GeTensorDesc src_out_desc; + src_op_desc->AddOutputDesc(src_out_desc); + + ge::NodePtr src_node = graph->AddNode(src_op_desc); + if (nullptr == src_node) { + GELOGE(ge::FAILED, "Finish: nullptr == src_node"); + } + Status res = ge::GraphUtils::AddEdge(src_node->GetOutDataAnchor(0), node_op->GetInDataAnchor(i)); + if (SUCCESS != res) { + GELOGE(ge::FAILED, "Finish: GraphUtils::AddEdge failed"); + } + // ge::NodePtr src_node = node->GetOwnerComputeGraph()->AddNodeFront(src_op_desc); + // node->AddLinkFrom(src_node); + } + + { + vector weights; + weights = ge::OpDescUtils::MutableWeights(node_op); + weights.insert(weights.end(), weights_.begin(), weights_.end()); + + ge::OpDescUtils::SetWeights(node_op, weights); + } + + *this = OmeTestOpDescBuilder(op_desc); // clear up + + return node_op; + } + + private: + ge::OpDescPtr orgOpDesc_; + int64_t streamId_ = 0; + string type_; + string name_; + vector inputsDataOffeset_; // input + vector outputsDataOffeset_; // output + vector inputTensorDescs; + vector outputTensorDescs; + vector workspace_; + vector workspaceBytes_; + vector weights_; + int64_t eventId_ = -1; + int64_t scopeid_ = -1; + + // std::shared_ptr graph_; +}; + +#endif // OME_REBUILD_OME_OP_TEST_UTILS_H diff --git a/tests/ut/ge/graph/load/output_net_output_unittest.cc b/tests/ut/ge/graph/load/output_net_output_unittest.cc new file mode 100644 index 00000000..7897378e --- /dev/null +++ b/tests/ut/ge/graph/load/output_net_output_unittest.cc @@ -0,0 +1,302 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "securec.h" + +#define protected public +#define private public +#include "common/debug/memory_dumper.h" +#include "common/op/ge_op_utils.h" +#include "graph/load/new_model_manager/davinci_model.h" +#include "graph/load/new_model_manager/model_output.h" +#include "graph/load/new_model_manager/model_utils.h" +#include "graph/load/output/output.h" +#include "graph/manager/graph_var_manager.h" +#include "new_op_test_utils.h" +#include "proto/om.pb.h" + +using namespace std; + +namespace ge { +class UtestNetOutput : public testing::Test { + protected: + void TearDown() {} + shared_ptr GenOpdef(OpDescPtr &op_desc, int flag) { + shared_ptr builder = make_shared(op_desc); + builder->SetStreamId(0); + builder->AddInput(1); + builder->SetType("NetOutput"); + + if (flag == 1) { + auto input_desc_1 = builder->AddInputDesc({1, 1, 10, 10}, FORMAT_NCHW, DT_FLOAT16); + } + auto input_desc_1 = builder->AddInputDesc({1, 1, 10, 10}, FORMAT_NCHW, DT_FLOAT16); + + if (flag == 2) { + auto input_desc_2 = builder->AddInputDesc({1, 1, 10, 10}, FORMAT_NCHW, DT_FLOAT16); + } + if (flag == 3) { + builder->AddInput(10); + } + + return builder; + } + shared_ptr GenOpdef2(OpDescPtr &op_desc) { + shared_ptr builder = make_shared(op_desc); + builder->SetStreamId(0); + builder->SetType("NetOutput"); + builder->AddInput(10); + + auto input_desc_1 = builder->AddInputDesc({64, 32, 5, 5}, FORMAT_FRACTAL_Z, DT_FLOAT); + + builder->AddInput(1000000); + auto input_desc_2 = builder->AddInputDesc({1, 10, 10, 1}, FORMAT_NHWC, DT_FLOAT); + + builder->AddOutput(2000000); + auto output_desc_1 = builder->AddOutputDesc({64, 32, 5, 5}, FORMAT_NCHW, DT_FLOAT); + + builder->AddOutput(2100000); + output_desc_1 = builder->AddOutputDesc({1, 10, 10, 1}, FORMAT_NHWC, DT_FLOAT); + + return builder; + } + + public: + shared_ptr dav_model_; +}; + +TEST_F(UtestNetOutput, test_get_input_size) { + shared_ptr custom_op_desc = make_shared(); + OmeTestOpDescBuilder builder(custom_op_desc); + builder.SetName("netoutput"); + builder.SetStreamId(0); + builder.SetType("NetOutput"); + + auto input_desc_1 = builder.AddInputDesc({1, 1, 1, 1}, FORMAT_FRACTAL_Z, DT_FLOAT); + builder.AddInput(1); + auto output_desc = builder.AddOutputDesc({1, 1, 1, 1}, FORMAT_NCHW, DT_FLOAT); + builder.AddOutput(1); + builder.Finish(); + + vector v_output_size = ModelUtils::GetInputSize(custom_op_desc); + EXPECT_EQ(v_output_size.size(), 1); +} + +// test ModelUtils::IsOutput +TEST_F(UtestNetOutput, success_is_output) { + ModelUtils *model_utils = new ModelUtils(); + std::shared_ptr op_desc = std::make_shared(); + OmeTestOpDescBuilder builder(op_desc); + builder.SetType("NetOutput"); + vector outputs_desc; + std::shared_ptr desc = std::make_shared(); + outputs_desc.push_back(desc); + op_desc->outputs_desc_ = outputs_desc; + bool ret = model_utils->IsOutput(op_desc); + EXPECT_EQ(false, ret); + + delete model_utils; +} + +// test ModelUtils::IsOutput +TEST_F(UtestNetOutput, true_is_output) { + ModelUtils *model_utils = new ModelUtils(); + std::shared_ptr op_desc = std::make_shared(); + OmeTestOpDescBuilder builder(op_desc); + builder.SetType("NetOutput"); + vector outputs_desc; + std::shared_ptr desc = std::make_shared(); + outputs_desc.push_back(desc); + op_desc->outputs_desc_ = outputs_desc; + ge::TensorUtils::SetOutputTensor(*(outputs_desc[0].get()), true); + bool ret = model_utils->IsOutput(op_desc); + EXPECT_EQ(true, ret); + + delete model_utils; +} + +// test ModelUtils::IsInputTensorNeedTrans +TEST_F(UtestNetOutput, success_is_output_tensor_need_trans) { + ModelUtils *model_utils = new ModelUtils(); + std::shared_ptr op_desc = std::make_shared(); + OmeTestOpDescBuilder builder(op_desc); + builder.SetType("NetOutput"); + size_t tensor_index = 1; + vector outputs_desc; + std::shared_ptr desc = std::make_shared(); + outputs_desc.push_back(desc); + op_desc->outputs_desc_ = outputs_desc; + op_desc->inputs_desc_ = outputs_desc; + + bool ret = model_utils->IsInputTensorNeedTrans(op_desc, tensor_index); + EXPECT_EQ(false, ret); + + delete model_utils; +} + +// test ModelUtils::GetOutputSize +TEST_F(UtestNetOutput, success_get_output_size) { + vector v_output_size; + + ModelUtils *model_utils = new ModelUtils(); + std::shared_ptr op_desc = std::make_shared(); + vector outputs_desc; + std::shared_ptr desc = std::make_shared(); + outputs_desc.push_back(desc); + op_desc->outputs_desc_ = outputs_desc; + EXPECT_EQ(v_output_size, model_utils->GetOutputSize(op_desc)); + + vector output = {1}; + op_desc->SetOutputOffset(output); + uint32_t tensor_size = 0; + v_output_size.push_back(tensor_size); + EXPECT_EQ(v_output_size, model_utils->GetOutputSize(op_desc)); + delete model_utils; +} + +// test ModelUtils::GetWorkspaceSize +TEST_F(UtestNetOutput, success_get_workspace_size) { + vector v_workspace_size; + + ModelUtils *model_utils = new ModelUtils(); + std::shared_ptr op_desc = std::make_shared(); + vector workspace = {1}; + op_desc->SetWorkspace(workspace); + EXPECT_EQ(v_workspace_size, model_utils->GetWorkspaceSize(op_desc)); + + op_desc->SetWorkspaceBytes(workspace); + v_workspace_size.push_back(1); + EXPECT_EQ(v_workspace_size, model_utils->GetWorkspaceSize(op_desc)); + delete model_utils; +} + +// test ModelUtils::GetWeightSize +TEST_F(UtestNetOutput, success_get_weight_size) { + vector v_weight_size; + + ModelUtils *model_utils = new ModelUtils(); + std::shared_ptr op_desc = std::make_shared(); + op_desc->SetType("Const"); + EXPECT_EQ(v_weight_size, model_utils->GetWeightSize(op_desc)); + + op_desc->SetType("NetOutput"); + vector inputs_desc; + std::shared_ptr desc = std::make_shared(); + inputs_desc.push_back(desc); + op_desc->inputs_desc_ = inputs_desc; + + vector is_input_const = {true}; + op_desc->SetIsInputConst(is_input_const); + v_weight_size.push_back(0); + EXPECT_EQ(v_weight_size, model_utils->GetWeightSize(op_desc)); + + delete model_utils; +} + +// test ModelUtils::GetWeights +TEST_F(UtestNetOutput, success_get_weights) { + vector v_weights; + + ModelUtils *model_utils = new ModelUtils(); + std::shared_ptr op_desc = std::make_shared(); + op_desc->SetType("Const"); + EXPECT_EQ(v_weights, model_utils->GetWeights(op_desc)); + + op_desc->SetType("NetOutput"); + vector inputs_desc; + std::shared_ptr desc = std::make_shared(); + inputs_desc.push_back(desc); + op_desc->inputs_desc_ = inputs_desc; + + vector is_input_const = {true}; + op_desc->SetIsInputConst(is_input_const); + GeTensorDesc tensor_desc; + EXPECT_EQ(v_weights, model_utils->GetWeights(op_desc)); + + delete model_utils; +} + +// test ModelUtils::GetInputDescs +TEST_F(UtestNetOutput, success_get_input_descs) { + vector<::opTensor_t> v_input_descs; + vector<::tagCcAICPUTensor> ret; + ModelUtils *model_utils = new ModelUtils(); + std::shared_ptr op_desc = std::make_shared(); + ret = model_utils->GetInputDescs(op_desc); + EXPECT_EQ(v_input_descs.size(), ret.size()); + + vector inputs_desc; + std::shared_ptr desc = std::make_shared(); + inputs_desc.push_back(desc); + op_desc->inputs_desc_ = inputs_desc; + vector is_input_const = {false}; + op_desc->SetIsInputConst(is_input_const); + + opTensor_t tmp; + tmp.format = OP_TENSOR_FORMAT_NC1HWC0; + tmp.dim_cnt = 0; + tmp.data_type = OP_DATA_FLOAT; + v_input_descs.push_back(tmp); + ret = model_utils->GetInputDescs(op_desc); + EXPECT_EQ(v_input_descs.size(), ret.size()); + + delete model_utils; +} + +// test ModelUtils::GetOutputDescs +TEST_F(UtestNetOutput, success_get_output_descs) { + vector<::opTensor_t> v_output_descs; + vector<::tagCcAICPUTensor> ret; + ModelUtils *model_utils = new ModelUtils(); + std::shared_ptr op_desc = std::make_shared(); + ret = model_utils->GetOutputDescs(op_desc); + EXPECT_EQ(v_output_descs.size(), ret.size()); + + vector outputs_desc; + std::shared_ptr desc = std::make_shared(); + outputs_desc.push_back(desc); + op_desc->outputs_desc_ = outputs_desc; + + opTensor_t tmp; + tmp.format = OP_TENSOR_FORMAT_NC1HWC0; + tmp.dim_cnt = 0; + tmp.data_type = OP_DATA_FLOAT; + v_output_descs.push_back(tmp); + ret = model_utils->GetOutputDescs(op_desc); + EXPECT_EQ(v_output_descs.size(), ret.size()); + + delete model_utils; +} + +// test Output::GetOutputData +TEST_F(UtestNetOutput, success_get_output_data) { + Output *output = new Output(nullptr, nullptr); + output->v_input_data_addr_.push_back((void *)1); + output->v_input_size_.push_back(1); + output->input_num_ = 1; + + vector v_data_addr; + vector v_data_size; + output->GetOutputData(v_data_addr, v_data_size); + + EXPECT_EQ(output->v_input_data_addr_, v_data_addr); + EXPECT_EQ(output->v_input_size_, v_data_size); + delete output; +} +} // namespace ge diff --git a/tests/ut/ge/graph/load/tbe_handle_store_unittest.cc b/tests/ut/ge/graph/load/tbe_handle_store_unittest.cc new file mode 100644 index 00000000..a98e14c6 --- /dev/null +++ b/tests/ut/ge/graph/load/tbe_handle_store_unittest.cc @@ -0,0 +1,137 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/load/new_model_manager/tbe_handle_store.h" +#include "runtime/kernel.h" +#undef protected +#undef private + +namespace ge { +class UtestTBEHandleStore : public testing::Test { + protected: + void SetUp() { + TBEHandleStore &kernel_store = TBEHandleStore::GetInstance(); + kernel_store.kernels_.clear(); + } + + void TearDown() { + TBEHandleStore &kernel_store = TBEHandleStore::GetInstance(); + kernel_store.kernels_.clear(); + } +}; + +TEST_F(UtestTBEHandleStore, test_store_tbe_handle) { + TBEHandleStore &kernel_store = TBEHandleStore::GetInstance(); + + // not in store, can`t find. + void *handle = nullptr; + std::string tbe_name0("tbe_kernel_key0"); + EXPECT_FALSE(kernel_store.FindTBEHandle(tbe_name0, handle)); + EXPECT_EQ(handle, nullptr); + + // store first, size is 1, num is 1. + std::string tbe_name1("tbe_kernel_key1"); + void *tbe_handle1 = (void *)0x12345678; + std::shared_ptr tbe_kernel = std::shared_ptr(); + kernel_store.StoreTBEHandle(tbe_name1, tbe_handle1, tbe_kernel); + EXPECT_EQ(kernel_store.kernels_.size(), 1); + + EXPECT_TRUE(kernel_store.FindTBEHandle(tbe_name1, handle)); + EXPECT_EQ(handle, tbe_handle1); + + auto it = kernel_store.kernels_.find(tbe_name1); + EXPECT_NE(it, kernel_store.kernels_.end()); + TbeHandleInfo &info1 = it->second; + EXPECT_EQ(info1.handle(), tbe_handle1); + EXPECT_EQ(info1.used_num(), 1); + + // store second, size is 1, num is 2. + kernel_store.StoreTBEHandle(tbe_name1, tbe_handle1, tbe_kernel); + EXPECT_EQ(kernel_store.kernels_.size(), 1); + + EXPECT_TRUE(kernel_store.FindTBEHandle(tbe_name1, handle)); + EXPECT_EQ(handle, tbe_handle1); + + it = kernel_store.kernels_.find(tbe_name1); + EXPECT_NE(it, kernel_store.kernels_.end()); + TbeHandleInfo &info2 = it->second; + EXPECT_EQ(info2.handle(), tbe_handle1); + EXPECT_EQ(info2.used_num(), 2); + + // store other, size is 2, num is 2, num is 1. + std::string tbe_name2("tbe_kernel_key2"); + void *tbe_handle2 = (void *)0x22345678; + kernel_store.StoreTBEHandle(tbe_name2, tbe_handle2, tbe_kernel); + EXPECT_EQ(kernel_store.kernels_.size(), 2); + + EXPECT_TRUE(kernel_store.FindTBEHandle(tbe_name2, handle)); + EXPECT_EQ(handle, tbe_handle2); + EXPECT_TRUE(kernel_store.FindTBEHandle(tbe_name1, handle)); + EXPECT_EQ(handle, tbe_handle1); + + it = kernel_store.kernels_.find(tbe_name1); + EXPECT_NE(it, kernel_store.kernels_.end()); + TbeHandleInfo &info3 = it->second; + EXPECT_EQ(info3.handle(), tbe_handle1); + EXPECT_EQ(info3.used_num(), 2); + + it = kernel_store.kernels_.find(tbe_name2); + EXPECT_NE(it, kernel_store.kernels_.end()); + TbeHandleInfo &info4 = it->second; + EXPECT_EQ(info4.handle(), tbe_handle2); + EXPECT_EQ(info4.used_num(), 1); + + // For Refer + kernel_store.ReferTBEHandle(tbe_name0); + EXPECT_EQ(kernel_store.kernels_.size(), 2); + + kernel_store.ReferTBEHandle(tbe_name1); + EXPECT_EQ(kernel_store.kernels_.size(), 2); + + // For Erase. + std::map names0 = {{tbe_name0, 1}}; + kernel_store.EraseTBEHandle(names0); + EXPECT_EQ(kernel_store.kernels_.size(), 2); + + std::map names1 = {{tbe_name1, 1}}; + kernel_store.EraseTBEHandle(names1); + EXPECT_EQ(kernel_store.kernels_.size(), 2); + + std::map names2 = {{tbe_name1, 2}, {tbe_name2, 1}}; + kernel_store.EraseTBEHandle(names2); + EXPECT_EQ(kernel_store.kernels_.size(), 0); +} + +TEST_F(UtestTBEHandleStore, test_tbe_handle_info) { + void *tbe_handle = (void *)0x12345678; + std::shared_ptr tbe_kernel = std::shared_ptr(); + TbeHandleInfo info(tbe_handle, tbe_kernel); + EXPECT_EQ(info.used_num(), 0); + + info.used_dec(); + EXPECT_EQ(info.used_num(), 0); + + info.used_inc(std::numeric_limits::max()); + EXPECT_EQ(info.used_num(), std::numeric_limits::max()); + + info.used_inc(); + EXPECT_EQ(info.used_num(), std::numeric_limits::max()); +} +} // namespace ge diff --git a/tests/ut/ge/graph/ops_stub.h b/tests/ut/ge/graph/ops_stub.h new file mode 100644 index 00000000..2a71d80a --- /dev/null +++ b/tests/ut/ge/graph/ops_stub.h @@ -0,0 +1,154 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MAIN_OPS_STUB_H +#define MAIN_OPS_STUB_H + +#include "external/graph/operator_reg.h" + +// for ir +namespace ge { +// Data +REG_OP(Data) + .INPUT(data, TensorType::ALL()) + .OUTPUT(out, TensorType::ALL()) + .ATTR(index, Int, 0) + .OP_END_FACTORY_REG(Data) + + // Softmax + REG_OP(Softmax) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16})) + .ATTR(axis, Int, 0) // which mean compute which dims + .ATTR(algo, Int, 1) // 1 means using "subtract max from every point to avoid overflow", + /// 0 means using "ubtract max from every point to avoid overflow" + /// 2 means using "perform the Log softmax operation to avoid overflow" + /// now is only support 1 + .ATTR(alpha, Float, 1) + .ATTR(beta, Float, 0) + .OP_END_FACTORY_REG(Softmax) + + // Flatten + REG_OP(Flatten) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .OP_END_FACTORY_REG(Flatten) + + REG_OP(Square) + .INPUT(x, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(Square) + + REG_OP(ReadVariable) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .OP_END_FACTORY_REG(ReadVariable) + + REG_OP(Activation) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + /// 0:sigmod, 1:relu, 2:tanh, 3:clipped ReLU, 4:Elu, + /// 5:leaky relu, 6:abs, 7:relu1, 8:softsign, 9:softplus + .ATTR(mode, Int, 1) + .ATTR(coef, Float, 0) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0) + .OP_END_FACTORY_REG(Activation) + + REG_OP(Add) + .INPUT(x1, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16})) // "First operand." + .INPUT(x2, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16})) // "Second operand." + // "Result, has same element type as two inputs" + .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16})) + .ATTR(mode, Int, 0) // mode=0, infer mode=1, train + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .ATTR(is_input_const, ListBool, {false, false}) + .ATTR(T, Int, 0) + .OP_END_FACTORY_REG(Add) + + REG_OP(Variable) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .ATTR(index, Int, 0) + .ATTR(value, Tensor, Tensor()) + .OP_END_FACTORY_REG(Variable) + + REG_OP(Summary) + .INPUT(x, TensorType::ALL()) + .OP_END_FACTORY_REG(Summary) + + REG_OP(Const) + .OUTPUT(y, TensorType::ALL()) // TensorType({DT_FLOAT, DT_INT8, DT_INT32, DT_BOOL}) + .ATTR(value, Tensor, Tensor()) // This is the value of the const op + .ATTR(dtype, Int, 0) + .OP_END_FACTORY_REG(Const) + + REG_OP(HcomBroadcast) + .DYNAMIC_INPUT(x, TensorType::ALL()) + .DYNAMIC_OUTPUT(y, TensorType::ALL()) + .REQUIRED_ATTR(root_rank, Int) + .REQUIRED_ATTR(group, String) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(HcomBroadcast) + + REG_OP(Assign) + .INPUT(resource, TensorType::ALL()) + .INPUT(value, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .OP_END_FACTORY_REG(Assign) REG_OP(Sqrt) + .INPUT(x, TensorType{(DT_FLOAT.DT_FLOAT16)}) + .OUTPUT(y, TensorType{(DT_FLOAT, DT_FLOAT16)}) + .ATTR(T, Int, 1) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(Sqrt) + + REG_OP(Save) + .DYNAMIC_INPUT(tensors, TensorType + : ALL()) + .OP_END_FACTORY_REG(Save) + + REG_OP(PReLU) + .INPUT(x, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(channel_shared, Bool, false) + .ATTR(nan_opt, Int, 0) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(PReLU) REG_OP(Acosh) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(Acosh) + + REG_OP(GuaranteeConst) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, + DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, + DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OP_END_FACTORY_REG(GuaranteeConst) + + IMPLEMT_INFERFUNC(GuaranteeConst, GuaranteeConstInfer) { + TensorDesc tensorDesc = op.GetInputDesc("x"); + (void)op.UpdateOutputDesc("y", tensorDesc); + return GRAPH_SUCCESS; +} +INFER_FUNC_REG(GuaranteeConst, GuaranteeConstInfer); +} // namespace ge +#endif // MAIN_OPS_STUB_H diff --git a/tests/ut/ge/graph/passes/addn_pass_unittest.cc b/tests/ut/ge/graph/passes/addn_pass_unittest.cc new file mode 100644 index 00000000..e31030b2 --- /dev/null +++ b/tests/ut/ge/graph/passes/addn_pass_unittest.cc @@ -0,0 +1,236 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "graph/passes/addn_pass.h" + +namespace ge { +namespace { + +GeTensorDescPtr CreateTensorDesc(std::initializer_list shape, Format format = FORMAT_NCHW, + DataType data_type = DT_FLOAT) { + GeShape ge_shape{vector(shape)}; + GeTensorDescPtr tensor_desc = std::make_shared(); + tensor_desc->SetShape(ge_shape); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + return tensor_desc; +} + +class NodeBuilder { + public: + NodeBuilder(const std::string &name, const std::string &type) { op_desc_ = std::make_shared(name, type); } + + NodeBuilder &AddInputDesc(std::initializer_list shape = {1, 1, 224, 224}, Format format = FORMAT_NCHW, + DataType data_type = DT_FLOAT) { + op_desc_->AddInputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + NodeBuilder &AddOutputDesc(std::initializer_list shape = {1, 1, 224, 224}, Format format = FORMAT_NCHW, + DataType data_type = DT_FLOAT) { + op_desc_->AddOutputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + NodeBuilder &AddOutputDesc(GeTensorDescPtr tensor_desc) { + op_desc_->AddOutputDesc(tensor_desc->Clone()); + return *this; + } + + NodePtr Build(const ComputeGraphPtr &graph) { + NodePtr node = graph->AddNode(op_desc_); + return node; + } + + private: + OpDescPtr op_desc_; +}; + +} // namespace + +TEST(UtestGraphPassesAddnPass, null_pass) { + ComputeGraphPtr graph = std::make_shared("test"); + GEPass pass(graph); + AddNPass *addn_pass = nullptr; + NamesToPass names_to_pass; + names_to_pass.emplace_back("Test", addn_pass); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); +} + +TEST(UtestGraphPassesAddnPass, null_graph) { + ComputeGraphPtr graph = nullptr; + GEPass pass(graph); + AddNPass addn_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("Test", nullptr); + EXPECT_EQ(pass.Run(names_to_pass), INTERNAL_ERROR); +} + +TEST(UtestGraphPassesAddnPass, empty_pass) { + ComputeGraphPtr graph = std::make_shared("test"); + GEPass pass(graph); + AddNPass addn_pass; + NamesToPass names_to_pass; + EXPECT_EQ(pass.Run(names_to_pass), INTERNAL_ERROR); +} + +/// | +/// AddN +/// | +TEST(UtestGraphPassesAddnPass, single_addn_node) { + ComputeGraphPtr graph = std::make_shared("test"); + GeTensorDescPtr general_ge_tensor_desc = std::make_shared(); + + NodePtr add_n_node = NodeBuilder("add_n_node", ADDN).Build(graph); + + GEPass pass(graph); + AddNPass addn_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("Test", &addn_pass); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + EXPECT_EQ(graph->GetDirectNodesSize(), 1); + EXPECT_TRUE(add_n_node->GetInDataNodes().empty()); + EXPECT_TRUE(add_n_node->GetOutDataNodes().empty()); +} + +/// Op1 +/// | +/// AddN +/// | +TEST(UtestGraphPassesAddnPass, no_output) { + ComputeGraphPtr graph = std::make_shared("test"); + GeTensorDescPtr general_ge_tensor_desc = std::make_shared(); + + NodePtr node = NodeBuilder("node", RELU).AddInputDesc({1, 1, 224, 224}).AddOutputDesc({1, 1, 224, 224}).Build(graph); + + NodePtr add_n_node = NodeBuilder("add_n_node", ADDN).AddInputDesc({1, 1, 224, 224}).Build(graph); + + GraphUtils::AddEdge(node->GetOutDataAnchor(0), add_n_node->GetInDataAnchor(0)); + GEPass pass(graph); + AddNPass addn_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("Test", &addn_pass); + EXPECT_EQ(pass.Run(names_to_pass), INTERNAL_ERROR); + + EXPECT_FALSE(add_n_node->GetInDataNodes().empty()); + EXPECT_TRUE(add_n_node->GetOutDataNodes().empty()); + EXPECT_FALSE(node->GetOutDataNodes().empty()); +} + +/// | +/// AddN +/// | +/// Op +TEST(UtestGraphPassesAddnPass, no_input) { + ComputeGraphPtr graph = std::make_shared("test"); + GeTensorDescPtr general_ge_tensor_desc = std::make_shared(); + + NodePtr add_n_node = NodeBuilder("add_n_node", ADDN).AddOutputDesc({1, 1, 224, 224}).Build(graph); + + NodePtr node = NodeBuilder("node2", RELU).AddInputDesc({1, 1, 224, 224}).AddOutputDesc({1, 1, 224, 224}).Build(graph); + + GraphUtils::AddEdge(add_n_node->GetOutDataAnchor(0), node->GetInDataAnchor(0)); + + GEPass pass(graph); + AddNPass addn_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("Test", &addn_pass); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + EXPECT_EQ(graph->GetDirectNodesSize(), 2); + EXPECT_TRUE(add_n_node->GetInDataNodes().empty()); + EXPECT_EQ(node->GetInDataNodes().at(0)->GetName(), add_n_node->GetName()); +} + +/// Op1 +/// | +/// AddN +/// | +/// Op2 +TEST(UtestGraphPassesAddnPass, single_input_remove_addn_success) { + ComputeGraphPtr graph = std::make_shared("test"); + GeTensorDescPtr general_ge_tensor_desc = std::make_shared(); + + NodePtr node1 = + NodeBuilder("node1", CONSTANTOP).AddInputDesc({1, 1, 224, 224}).AddOutputDesc({1, 1, 224, 224}).Build(graph); + + NodePtr add_n_node = + NodeBuilder("add_n_node", ADDN).AddInputDesc({1, 1, 224, 224}).AddOutputDesc({1, 1, 224, 224}).Build(graph); + + NodePtr node2 = + NodeBuilder("node2", RELU).AddInputDesc({1, 1, 224, 224}).AddOutputDesc({1, 1, 224, 224}).Build(graph); + + GraphUtils::AddEdge(node1->GetOutDataAnchor(0), add_n_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(add_n_node->GetOutDataAnchor(0), node2->GetInDataAnchor(0)); + + EXPECT_EQ(graph->GetDirectNodesSize(), 3); + + GEPass pass(graph); + + AddNPass addn_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("Test", &addn_pass); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + EXPECT_EQ(node1->GetOutDataNodes().at(0)->GetName(), node2->GetName()); + EXPECT_EQ(node2->GetInDataNodes().at(0)->GetName(), node1->GetName()); + EXPECT_TRUE(add_n_node->GetOutDataNodes().empty()); + EXPECT_TRUE(add_n_node->GetInDataNodes().empty()); +} + +/// Op1 Op2 +/// \ / +/// AddN +/// | +/// Op3 +TEST(UtestGraphPassesAddnPass, multiple_inputs_do_not_remove) { + ComputeGraphPtr graph = std::make_shared("test"); + GeTensorDescPtr general_ge_tensor_desc = std::make_shared(); + + NodePtr node1 = + NodeBuilder("node1", CONSTANTOP).AddInputDesc({1, 1, 224, 224}).AddOutputDesc({1, 1, 224, 224}).Build(graph); + + NodePtr node2 = + NodeBuilder("node2", CONSTANTOP).AddInputDesc({1, 1, 224, 224}).AddOutputDesc({1, 1, 224, 224}).Build(graph); + + NodePtr add_n_node = NodeBuilder("add_n_node", ADDN) + .AddInputDesc({1, 1, 224, 224}) + .AddInputDesc({1, 1, 224, 224}) + .AddOutputDesc({1, 1, 224, 224}) + .Build(graph); + + NodePtr node3 = + NodeBuilder("node3", RELU).AddInputDesc({1, 1, 224, 224}).AddOutputDesc({1, 1, 224, 224}).Build(graph); + + GraphUtils::AddEdge(node1->GetOutDataAnchor(0), add_n_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(node2->GetOutDataAnchor(0), add_n_node->GetInDataAnchor(1)); + GraphUtils::AddEdge(add_n_node->GetOutDataAnchor(0), node3->GetInDataAnchor(0)); + + EXPECT_EQ(graph->GetDirectNodesSize(), 4); + GEPass pass(graph); + AddNPass addn_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("Test", &addn_pass); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(graph->GetDirectNodesSize(), 4); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/assert_pass_unittest.cc b/tests/ut/ge/graph/passes/assert_pass_unittest.cc new file mode 100644 index 00000000..4aa133d3 --- /dev/null +++ b/tests/ut/ge/graph/passes/assert_pass_unittest.cc @@ -0,0 +1,178 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/assert_pass.h" + +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/anchor.h" +#include "graph/attr_value.h" +#include "graph/compute_graph.h" +#include "graph/op_desc.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/pass_manager.h" +#undef protected +#undef private + +using namespace testing; +namespace ge { +class UtestGraphPassesAssertPass : public Test { + protected: + NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 1, + int32_t out_anchors_num = 1) { + GeTensorDesc tensor_desc; + OpDescPtr op_desc = make_shared(name, type); + for (int32_t i = 0; i < in_anchors_num; i++) { + op_desc->AddInputDesc(tensor_desc); + } + for (int32_t i = 0; i < out_anchors_num; i++) { + op_desc->AddOutputDesc(tensor_desc); + } + + NodePtr node = graph->AddNode(op_desc); + return node; + } +}; + +/// D E +/// | \ | \ +/// F C G +/// : | : +/// H A I +/// : +/// B +TEST_F(UtestGraphPassesAssertPass, assert_pass_test1) { + ComputeGraphPtr graph = std::make_shared("test1"); + NodePtr node_a = AddNode(graph, "A", ge::ASSERT, 1, 0); + NodePtr node_b = AddNode(graph, "B", "B", 1, 1); + NodePtr node_c = AddNode(graph, "C", "C", 2, 1); + NodePtr node_d = AddNode(graph, "D", "D", 1, 2); + NodePtr node_e = AddNode(graph, "E", "E", 1, 2); + NodePtr node_f = AddNode(graph, "F", "F", 1, 1); + NodePtr node_g = AddNode(graph, "G", "G", 1, 1); + NodePtr node_h = AddNode(graph, "H", "H", 1, 1); + NodePtr node_i = AddNode(graph, "I", "I", 1, 1); + GraphUtils::AddEdge(node_a->GetOutControlAnchor(), node_b->GetInControlAnchor()); + GraphUtils::AddEdge(node_c->GetOutDataAnchor(0), node_a->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_c->GetOutControlAnchor(), node_h->GetInControlAnchor()); + GraphUtils::AddEdge(node_c->GetOutControlAnchor(), node_i->GetInControlAnchor()); + GraphUtils::AddEdge(node_d->GetOutDataAnchor(0), node_c->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_d->GetOutDataAnchor(1), node_f->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_e->GetOutDataAnchor(0), node_c->GetInDataAnchor(1)); + GraphUtils::AddEdge(node_e->GetOutDataAnchor(1), node_g->GetInDataAnchor(0)); + + AssertPass assert_pass; + Status status = assert_pass.Run(node_a); + EXPECT_EQ(SUCCESS, status); + + EXPECT_EQ(node_d->GetOutControlNodes().size(), 3); + EXPECT_EQ(node_e->GetOutControlNodes().size(), 3); + EXPECT_EQ(node_h->GetInControlNodes().size(), 2); + EXPECT_EQ(node_b->GetInControlNodes().size(), 2); + EXPECT_EQ(node_i->GetInControlNodes().size(), 2); + + EXPECT_EQ(graph->FindNode("A"), nullptr); + EXPECT_EQ(graph->FindNode("C"), nullptr); +} + +/// G E +/// | \ : +/// C G D +/// : | : +/// A F +/// : +/// B +TEST_F(UtestGraphPassesAssertPass, assert_pass_test2) { + ComputeGraphPtr graph = std::make_shared("test2"); + NodePtr node_a = AddNode(graph, "A", ge::ASSERT, 1, 0); + NodePtr node_b = AddNode(graph, "B", "B", 1, 1); + NodePtr node_c = AddNode(graph, "C", "C", 1, 1); + NodePtr node_d = AddNode(graph, "D", "D", 1, 1); + NodePtr node_e = AddNode(graph, "E", "E", 1, 1); + NodePtr node_f = AddNode(graph, "F", "F", 1, 1); + NodePtr node_g = AddNode(graph, "G", "G", 1, 2); + NodePtr node_h = AddNode(graph, "H", "H", 1, 1); + GraphUtils::AddEdge(node_a->GetOutControlAnchor(), node_b->GetInControlAnchor()); + GraphUtils::AddEdge(node_c->GetOutControlAnchor(), node_a->GetInControlAnchor()); + GraphUtils::AddEdge(node_d->GetOutDataAnchor(0), node_a->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_d->GetOutControlAnchor(), node_f->GetInControlAnchor()); + GraphUtils::AddEdge(node_g->GetOutDataAnchor(0), node_d->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_g->GetOutDataAnchor(1), node_h->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_e->GetOutControlAnchor(), node_d->GetInControlAnchor()); + + AssertPass assert_pass; + Status status = assert_pass.Run(node_a); + EXPECT_EQ(SUCCESS, status); + + EXPECT_EQ(node_g->GetOutControlNodes().size(), 2); + EXPECT_EQ(node_c->GetOutControlAnchor()->GetPeerInControlAnchors().size(), 1); + EXPECT_EQ(node_c->GetOutControlAnchor()->GetPeerInControlAnchors().at(0), node_b->GetInControlAnchor()); + + EXPECT_EQ(node_e->GetOutControlNodes().size(), 2); + + EXPECT_EQ(graph->FindNode("A"), nullptr); + EXPECT_EQ(graph->FindNode("D"), nullptr); +} + +/// E F +/// | \ | \ +/// H C -> D G +/// \ | : +/// A I +/// : +/// B +TEST_F(UtestGraphPassesAssertPass, assert_pass_test3) { + ComputeGraphPtr graph = std::make_shared("test1"); + NodePtr node_a = AddNode(graph, "A", ge::ASSERT, 2, 0); + NodePtr node_b = AddNode(graph, "B", "B", 1, 1); + NodePtr node_c = AddNode(graph, "C", "C", 1, 2); + NodePtr node_d = AddNode(graph, "D", "D", 2, 1); + NodePtr node_e = AddNode(graph, "E", "E", 1, 2); + NodePtr node_f = AddNode(graph, "F", "F", 1, 2); + NodePtr node_g = AddNode(graph, "G", "G", 1, 1); + NodePtr node_h = AddNode(graph, "H", "H", 1, 1); + NodePtr node_i = AddNode(graph, "I", "I", 1, 1); + GraphUtils::AddEdge(node_a->GetOutControlAnchor(), node_b->GetInControlAnchor()); + GraphUtils::AddEdge(node_c->GetOutDataAnchor(0), node_a->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_c->GetOutDataAnchor(1), node_d->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_d->GetOutDataAnchor(0), node_a->GetInDataAnchor(1)); + GraphUtils::AddEdge(node_d->GetOutControlAnchor(), node_i->GetInControlAnchor()); + GraphUtils::AddEdge(node_e->GetOutDataAnchor(0), node_c->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_e->GetOutDataAnchor(1), node_h->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_f->GetOutDataAnchor(0), node_d->GetInDataAnchor(1)); + GraphUtils::AddEdge(node_f->GetOutDataAnchor(1), node_g->GetInDataAnchor(0)); + + AssertPass assert_pass; + Status status = assert_pass.Run(node_a); + EXPECT_EQ(SUCCESS, status); + + EXPECT_EQ(node_e->GetOutControlNodes().size(), 2); + EXPECT_EQ(node_f->GetOutControlNodes().size(), 2); + EXPECT_EQ(node_b->GetInControlNodes().size(), 2); + EXPECT_EQ(node_i->GetInControlNodes().size(), 2); + + EXPECT_EQ(graph->FindNode("A"), nullptr); + EXPECT_EQ(graph->FindNode("C"), nullptr); + EXPECT_EQ(graph->FindNode("D"), nullptr); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/base_pass_unittest.cc b/tests/ut/ge/graph/passes/base_pass_unittest.cc new file mode 100644 index 00000000..b2767687 --- /dev/null +++ b/tests/ut/ge/graph/passes/base_pass_unittest.cc @@ -0,0 +1,442 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +#include "gtest/gtest.h" + +#define protected public +#include "graph/passes/base_pass.h" +#undef protected + +#include "external/graph/ge_error_codes.h" +#include "framework/common/ge_inner_error_codes.h" +#include "framework/common/types.h" +#include "graph/node.h" +#include "graph/utils/graph_utils.h" +#include "graph_builder_utils.h" + +template class std::unordered_set; + +namespace ge { +class UtestTestPass : public BaseNodePass { + public: + UtestTestPass() = default; + UtestTestPass(bool dead_loop) : dead_loop_(dead_loop), run_times_(0) {} + + Status Run(NodePtr &node) override { + ++run_times_; + iter_nodes_.push_back(node); + auto iter = names_to_add_del_.find(node->GetName()); + if (iter != names_to_add_del_.end()) { + for (const auto &node_name : iter->second) { + auto del_node = node->GetOwnerComputeGraph()->FindNode(node_name); + GraphUtils::IsolateNode(del_node, {0}); + AddNodeDeleted(del_node.get()); + } + } + iter = names_to_add_repass_.find(node->GetName()); + if (iter != names_to_add_repass_.end()) { + auto all_nodes = node->GetOwnerComputeGraph()->GetAllNodes(); + for (const auto &node_name : iter->second) { + for (auto &node_re_pass : all_nodes) { + if (node_re_pass->GetName() == node_name) { + AddRePassNode(node_re_pass); + break; + } + } + } + if (!dead_loop_) { + names_to_add_repass_.erase(iter); + } + } + return SUCCESS; + } + void clear() { iter_nodes_.clear(); } + std::vector GetIterNodes() { return iter_nodes_; } + + void AddRePassNodeName(const std::string &iter_node, const std::string &re_pass_node) { + names_to_add_repass_[iter_node].insert(re_pass_node); + } + void AddDelNodeName(const std::string &iter_node, const std::string &del_node) { + names_to_add_del_[iter_node].insert(del_node); + } + unsigned int GetRunTimes() { return run_times_; } + + private: + std::vector iter_nodes_; + std::map> names_to_add_del_; + std::map> names_to_add_repass_; + bool dead_loop_; + unsigned int run_times_; +}; + +class TestDelPass : public BaseNodePass { + public: + Status Run(NodePtr &node) override { return SUCCESS; } +}; + +class UTESTGraphPassesBasePass : public testing::Test { + protected: + UTESTGraphPassesBasePass() { + auto p1 = new UtestTestPass; + names_to_pass_.push_back(std::make_pair("test1", p1)); + } + void SetUp() override { + for (auto &name_to_pass : names_to_pass_) { + dynamic_cast(name_to_pass.second)->clear(); + } + } + ~UTESTGraphPassesBasePass() override { + for (auto &name_to_pass : names_to_pass_) { + delete name_to_pass.second; + } + } + NamesToPass names_to_pass_; +}; + +/// reshape1 +/// | +/// add1 +/// / \ +/// | | +/// data1 const1 +ComputeGraphPtr BuildGraph1() { + auto builder = ut::GraphBuilder("g1"); + auto data = builder.AddNode("data1", DATA, 0, 1); + auto a1 = builder.AddNode("add1", ADD, 2, 1); + auto c1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto r1 = builder.AddNode("reshape1", RESHAPE, 1, 1); + + builder.AddDataEdge(data, 0, a1, 0); + builder.AddDataEdge(c1, 0, a1, 1); + builder.AddDataEdge(a1, 0, r1, 0); + + return builder.GetGraph(); +} + +/// sum1 +/// / \ +/// / \ +/// / \ +/// reshape1 addn1 +/// | c | +/// add1 <--- shape1 +/// / \ | +/// | | | +/// data1 const1 const2 +ComputeGraphPtr BuildGraph2() { + auto builder = ut::GraphBuilder("g1"); + auto data1 = builder.AddNode("data1", DATA, 0, 1); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto add1 = builder.AddNode("add1", ADD, 2, 1); + auto shape1 = builder.AddNode("shape1", SHAPE, 1, 1); + auto reshape1 = builder.AddNode("reshape1", RESHAPE, 1, 1); + auto addn1 = builder.AddNode("addn1", ADDN, 1, 1); + auto sum1 = builder.AddNode("sum1", SUM, 2, 1); + + builder.AddDataEdge(data1, 0, add1, 0); + builder.AddDataEdge(const1, 0, add1, 1); + builder.AddDataEdge(const2, 0, shape1, 0); + builder.AddControlEdge(shape1, add1); + builder.AddDataEdge(add1, 0, reshape1, 0); + builder.AddDataEdge(shape1, 0, addn1, 0); + builder.AddDataEdge(reshape1, 0, sum1, 0); + builder.AddDataEdge(addn1, 0, sum1, 1); + + return builder.GetGraph(); +} + +/// rnextiteration +/// | | +/// merge +/// | +/// data1 +ComputeGraphPtr BuildGraph3() { + auto builder = ut::GraphBuilder("g1"); + auto data1 = builder.AddNode("data1", DATA, 0, 1); + auto merge1 = builder.AddNode("merge1", MERGE, 2, 1); + auto next1 = builder.AddNode("next1", NEXTITERATION, 1, 1); + + builder.AddDataEdge(data1, 0, merge1, 0); + builder.AddDataEdge(merge1, 0, next1, 0); + builder.AddDataEdge(next1, 0, merge1, 1); + builder.AddControlEdge(merge1, next1); + builder.AddControlEdge(next1, merge1); + + return builder.GetGraph(); +} + +void CheckIterOrder(UtestTestPass *pass, std::vector> &nodes_layers) { + std::unordered_set layer_nodes; + size_t layer_index = 0; + for (const auto &node : pass->GetIterNodes()) { + layer_nodes.insert(node->GetName()); + EXPECT_LT(layer_index, nodes_layers.size()); + if (layer_nodes == nodes_layers[layer_index]) { + layer_index++; + layer_nodes.clear(); + } + } + EXPECT_EQ(layer_index, nodes_layers.size()); +} + +/// Op1 +/// | +/// Merge +/// / \ +/// Op2 Op3 +TEST_F(UTESTGraphPassesBasePass, del_isolate_fail) { + auto builder = ut::GraphBuilder("g1"); + auto merge_node = builder.AddNode("Merge", MERGE, 1, 1); + auto node1 = builder.AddNode("Op1", RELU, 1, 1); + auto node2 = builder.AddNode("Op2", CONVOLUTION, 1, 1); + auto node3 = builder.AddNode("Op3", CONVOLUTION, 1, 1); + + GraphUtils::AddEdge(node1->GetOutDataAnchor(0), merge_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node3->GetInDataAnchor(0)); + + EXPECT_EQ(node1->GetOutDataNodes().size(), 1); + + TestDelPass del_pass; + auto ret = del_pass.IsolateAndDeleteNode(merge_node, {0, -1}); + EXPECT_EQ(ret, FAILED); + + OpDescPtr op_desc = std::make_shared("merge", MERGE); + NodePtr node = shared_ptr(new (std::nothrow) Node(op_desc, nullptr)); + ret = del_pass.IsolateAndDeleteNode(node, {0, -1}); + EXPECT_EQ(ret, FAILED); +} + +/// Op1 +/// | +/// Merge +/// / \ +/// Op2 Op3 +TEST_F(UTESTGraphPassesBasePass, del_isolate_success) { + auto builder = ut::GraphBuilder("g1"); + auto merge_node = builder.AddNode("Merge", MERGE, 1, 2); + auto node1 = builder.AddNode("Op1", RELU, 1, 1); + auto node2 = builder.AddNode("Op2", CONVOLUTION, 1, 1); + auto node3 = builder.AddNode("Op3", CONVOLUTION, 1, 1); + + GraphUtils::AddEdge(node1->GetOutDataAnchor(0), merge_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node3->GetInDataAnchor(0)); + + EXPECT_EQ(node1->GetOutDataNodes().size(), 1); + + TestDelPass del_pass; + auto ret = del_pass.IsolateAndDeleteNode(merge_node, {0, -1}); + EXPECT_EQ(ret, SUCCESS); +} + +TEST_F(UTESTGraphPassesBasePass, data_graph) { + auto graph = BuildGraph1(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass_), SUCCESS); + auto *pass = dynamic_cast(names_to_pass_[0].second); + + EXPECT_EQ(pass->GetIterNodes().size(), 4); + std::vector> layers; + layers.push_back({"data1", "const1"}); + layers.push_back({"add1"}); + layers.push_back({"reshape1"}); + CheckIterOrder(pass, layers); +} + +TEST_F(UTESTGraphPassesBasePass, graph_with_control_link) { + auto graph = BuildGraph2(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass_), SUCCESS); + auto *pass = dynamic_cast(names_to_pass_[0].second); + + EXPECT_EQ(pass->GetIterNodes().size(), 8); + EXPECT_EQ(pass->GetIterNodes().at(3)->GetName(), "shape1"); + + std::vector> layers; + layers.push_back({"data1", "const1", "const2"}); + layers.push_back({"shape1"}); + layers.push_back({"add1", "addn1", "reshape1"}); + layers.push_back({"sum1"}); + CheckIterOrder(pass, layers); +} + +TEST_F(UTESTGraphPassesBasePass, re_pass_after) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + test_pass.AddRePassNodeName("add1", "sum1"); + test_pass.AddRePassNodeName("shape1", "sum1"); + test_pass.AddRePassNodeName("shape1", "add1"); + test_pass.AddRePassNodeName("data1", "add1"); + + auto graph = BuildGraph2(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(test_pass.GetIterNodes().size(), 8); +} + +TEST_F(UTESTGraphPassesBasePass, re_pass_before) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + test_pass.AddRePassNodeName("add1", "data1"); + + auto graph = BuildGraph1(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(test_pass.GetIterNodes().size(), 5); + EXPECT_EQ(test_pass.GetIterNodes().at(2)->GetName(), "add1"); + EXPECT_EQ(test_pass.GetIterNodes().at(3)->GetName(), "reshape1"); + EXPECT_EQ(test_pass.GetIterNodes().at(4)->GetName(), "data1"); +} + +TEST_F(UTESTGraphPassesBasePass, re_pass_before_multi_times) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + test_pass.AddRePassNodeName("add1", "data1"); + test_pass.AddRePassNodeName("add1", "const1"); + test_pass.AddRePassNodeName("reshape1", "data1"); + + auto graph = BuildGraph1(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(test_pass.GetIterNodes().size(), 6); + EXPECT_EQ(test_pass.GetIterNodes().at(2)->GetName(), "add1"); + EXPECT_EQ(test_pass.GetIterNodes().at(3)->GetName(), "reshape1"); +} + +TEST_F(UTESTGraphPassesBasePass, del_after) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + test_pass.AddDelNodeName("add1", "sum1"); + + auto graph = BuildGraph2(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(test_pass.GetIterNodes().size(), 7); +} + +TEST_F(UTESTGraphPassesBasePass, del_after_multiple) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + test_pass.AddDelNodeName("add1", "sum1"); + test_pass.AddDelNodeName("add1", "reshape1"); + + auto graph = BuildGraph2(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(test_pass.GetIterNodes().size(), 6); +} + +TEST_F(UTESTGraphPassesBasePass, del_after_break_link) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + test_pass.AddDelNodeName("shape1", "add1"); + test_pass.AddDelNodeName("shape1", "addn1"); + test_pass.AddRePassNodeName("shape1", "shape1"); + test_pass.AddRePassNodeName("shape1", "reshape1"); + test_pass.AddRePassNodeName("shape1", "sum1"); + + auto graph = BuildGraph2(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(test_pass.GetIterNodes().size(), 7); +} + +TEST_F(UTESTGraphPassesBasePass, del_self_and_after) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + test_pass.AddDelNodeName("shape1", "add1"); + test_pass.AddDelNodeName("shape1", "addn1"); + + auto graph = BuildGraph2(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(test_pass.GetIterNodes().size(), 4); +} + +TEST_F(UTESTGraphPassesBasePass, del_before) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + test_pass.AddDelNodeName("reshape1", "add1"); + test_pass.AddDelNodeName("sum1", "addn1"); + + auto graph = BuildGraph2(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(test_pass.GetIterNodes().size(), 8); +} + +TEST_F(UTESTGraphPassesBasePass, re_pass_and_del) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + test_pass.AddRePassNodeName("add1", "sum1"); + test_pass.AddDelNodeName("reshape1", "sum1"); + + auto graph = BuildGraph2(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(test_pass.GetIterNodes().size(), 7); +} + +TEST_F(UTESTGraphPassesBasePass, dead_loop) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(true); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + test_pass.AddRePassNodeName("add1", "sum1"); + test_pass.AddRePassNodeName("sum1", "add1"); + + auto graph = BuildGraph2(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(test_pass.GetRunTimes(), 1007); +} + +TEST_F(UTESTGraphPassesBasePass, while_loop) { + NamesToPass names_to_pass; + auto test_pass = UtestTestPass(true); + names_to_pass.push_back(std::make_pair("test", &test_pass)); + + auto graph = BuildGraph3(); + auto ge_pass = GEPass(graph); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/constant_folding_pass_unittest.cc b/tests/ut/ge/graph/passes/constant_folding_pass_unittest.cc new file mode 100644 index 00000000..96788b53 --- /dev/null +++ b/tests/ut/ge/graph/passes/constant_folding_pass_unittest.cc @@ -0,0 +1,777 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/constant_folding_pass.h" + +#include +#include +#include + +#include "common/types.h" +#include "ge/common/ge/ge_util.h" +#include "graph/passes/base_pass.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph_builder_utils.h" +#include "inc/kernel.h" +#include "inc/kernel_factory.h" + +namespace ge { +const char *AddYesDim = "AddYesDim"; +const char *AddNYes = "AddNYes"; +const char *AddNNo = "AddNNo"; +const char *AddYes = "AddYes"; +const char *HuberLossYes = "HuberLossYes"; +const char *ShapeNo = "ShapeNo"; +const char *DataNo = "dataNo"; +const char *WrongYes = "WrongYes"; +const char *WrongYes1 = "WrongYes1"; +const char *WrongYes2 = "WrongYes2"; +const char *WrongYes3 = "WrongYes3"; + +class TestAddNKernel : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override { + auto output = std::make_shared(); + std::vector data{1, 2, 3}; + std::vector shape{3}; + output->MutableTensorDesc().SetShape(GeShape(shape)); + output->SetData(data); + output->MutableTensorDesc().SetDataType(DT_UINT8); + v_output.push_back(output); + return SUCCESS; + } +}; +REGISTER_KERNEL(AddNYes, TestAddNKernel); + +class TestHuberLossKernel : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override { + auto output1 = std::make_shared(); + std::vector data{1, 2, 3, 4, 5}; + std::vector shape{5}; + output1->MutableTensorDesc().SetShape(GeShape(shape)); + output1->SetData(data); + output1->MutableTensorDesc().SetDataType(DT_UINT8); + v_output.push_back(output1); + + auto output2 = std::make_shared(); + std::vector data2{1, 2, 3, 4, 5, 6}; + std::vector shape2{2, 3}; + output2->MutableTensorDesc().SetShape(GeShape(shape2)); + output2->SetData(data2); + output2->MutableTensorDesc().SetDataType(DT_UINT8); + v_output.push_back(output2); + + return SUCCESS; + } +}; +REGISTER_KERNEL(HuberLossYes, TestHuberLossKernel); + +class TestAddKernel : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override { + auto output = std::make_shared(); + std::vector data{1, 2, 3, 4, 5}; + std::vector shape{5}; + output->MutableTensorDesc().SetShape(GeShape(shape)); + output->SetData(data); + output->MutableTensorDesc().SetDataType(DT_UINT8); + v_output.push_back(output); + return SUCCESS; + } +}; +REGISTER_KERNEL(AddYes, TestAddKernel); + +class TestAddDimKernel : public Kernel { + public: + Status Compute(const ge::NodePtr &node, std::vector &v_output) { + auto output = std::make_shared(); + std::vector data{1, 2, 3, 4, 5}; + std::vector shape{5}; + output->MutableTensorDesc().SetShape(GeShape(shape)); + output->SetData(data); + output->MutableTensorDesc().SetDataType(DT_UINT8); + v_output.push_back(output); + return SUCCESS; + } +}; +REGISTER_KERNEL(AddYesDim, TestAddDimKernel); + +class TestWrongKernel : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override { + // for test: output weights is null + v_output.push_back(nullptr); + return SUCCESS; + } +}; +REGISTER_KERNEL(WrongYes, TestWrongKernel); + +class TestWrongKernel1 : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override { + // for test: no output weights + return SUCCESS; + } +}; +REGISTER_KERNEL(WrongYes1, TestWrongKernel1); + +class TestWrongKernel2 : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override { + auto output1 = std::make_shared(); + std::vector data{1, 2, 3, 4, 5}; + std::vector shape{5}; + output1->MutableTensorDesc().SetShape(GeShape(shape)); + output1->SetData(data); + output1->MutableTensorDesc().SetDataType(DT_UINT8); + v_output.push_back(output1); + // for test: output weights < output size + return SUCCESS; + } +}; +REGISTER_KERNEL(WrongYes2, TestWrongKernel2); + +class TestWrongKernel3 : public Kernel { + public: + Status Compute(const ge::OpDescPtr op_desc_ptr, const std::vector &input, + std::vector &v_output) override { + // for test: return NOT_CHANGED + return NOT_CHANGED; + } +}; +REGISTER_KERNEL(WrongYes3, TestWrongKernel3); + +class UtestGraphPassesConstantFoldingPass : public testing::Test { + protected: + UtestGraphPassesConstantFoldingPass() = default; +}; + +namespace { + +/// netoutput1 +/// | +/// shapeNo1 +/// | +/// addnYes1 +/// / \ +/// / \ +/// const1 const2 +ComputeGraphPtr BuildGraph1() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto addn1 = builder.AddNode("addn1", AddNYes, 2, 1); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(const2, 0, addn1, 1); + builder.AddDataEdge(addn1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// shapeNo1 +/// | +/// addnYes1 shapeNo2 +/// / \ / +/// / \ / +/// const1 const2 +ComputeGraphPtr BuildGraph2() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto addn1 = builder.AddNode("addn1", AddNYes, 2, 1); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto shape2 = builder.AddNode("shape2", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", DataNo, 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(const2, 0, addn1, 1); + builder.AddDataEdge(const2, 0, shape2, 0); + builder.AddDataEdge(addn1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// shapeNo1 +/// | c +/// addnYes1 <----- dataNo1 +/// / \ +/// / \ +/// const1 const2 +ComputeGraphPtr BuildGraph3() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto data1 = builder.AddNode("data1", DataNo, 0, 1); + auto addn1 = builder.AddNode("addn1", AddNYes, 2, 1); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(const2, 0, addn1, 1); + builder.AddControlEdge(data1, addn1); + builder.AddDataEdge(addn1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// shapeNo1 +/// | c +/// addnYes1 <--------- +/// / \ \ +/// / \ c \ +/// const1 const2 <----- dataNo1 +ComputeGraphPtr BuildGraph4() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto data1 = builder.AddNode("data1", DataNo, 0, 1); + auto addn1 = builder.AddNode("addn1", AddNYes, 2, 1); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(const2, 0, addn1, 1); + builder.AddControlEdge(data1, const2); + builder.AddControlEdge(data1, addn1); + builder.AddDataEdge(addn1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// shapeNo1 +/// | c +/// addnYes1 <----- dataNo1 +/// / \ +/// / \ c +/// const1 const2 <----- dataNo2 +ComputeGraphPtr BuildGraph5() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto data1 = builder.AddNode("data1", DataNo, 0, 1); + auto data2 = builder.AddNode("data2", DataNo, 0, 1); + auto addn1 = builder.AddNode("addn1", AddNYes, 2, 1); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(const2, 0, addn1, 1); + builder.AddControlEdge(data2, const2); + builder.AddControlEdge(data1, addn1); + builder.AddDataEdge(addn1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// shapeNo1 +/// | +/// addYes1 <---- const3 +/// | +/// addnYes1 <- +/// / \ \ +/// / \ \ +/// const1 const2 const4 +ComputeGraphPtr BuildGraph6() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto const3 = builder.AddNode("const3", CONSTANT, 0, 1); + auto const4 = builder.AddNode("const4", CONSTANT, 0, 1); + auto addn1 = builder.AddNode("addn1", AddNYes, 3, 1); + auto add1 = builder.AddNode("add1", AddYes, 2, 1); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(const2, 0, addn1, 1); + builder.AddDataEdge(const4, 0, addn1, 2); + builder.AddDataEdge(addn1, 0, add1, 0); + builder.AddDataEdge(const3, 0, add1, 1); + builder.AddDataEdge(add1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// / \ +/// shapeNo1 ShpaeNo2 +/// \ / +/// huberLoss1 +/// / | \ +/// / | \ +/// const1 const2 const3 +ComputeGraphPtr BuildGraph7() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto const3 = builder.AddNode("const3", CONSTANT, 0, 1); + auto huberLoss1 = builder.AddNode("huberLoss1", HuberLossYes, 3, 2); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto shape2 = builder.AddNode("shape2", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, huberLoss1, 0); + builder.AddDataEdge(const2, 0, huberLoss1, 1); + builder.AddDataEdge(const3, 0, huberLoss1, 2); + builder.AddDataEdge(huberLoss1, 0, shape1, 0); + builder.AddDataEdge(huberLoss1, 1, shape2, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + builder.AddDataEdge(shape2, 1, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// shapeNo1 +/// | +/// addnNo1 +/// / \ +/// / \ +/// const1 const2 +ComputeGraphPtr BuildGraph8() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto addn1 = builder.AddNode("addn1", AddNNo, 2, 1); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(const2, 0, addn1, 1); + builder.AddDataEdge(addn1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// shapeNo1 +/// | +/// addnYes1 +/// / \ +/// / \ +/// const1 data1 +ComputeGraphPtr BuildGraph9() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto data1 = builder.AddNode("data1", DataNo, 0, 1); + auto addn1 = builder.AddNode("addn1", AddNYes, 2, 1); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(data1, 0, addn1, 1); + builder.AddDataEdge(addn1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// / \ +/// addDim sqrt1 +/// \ / +/// switch1 +/// / \ +/// / \ +/// const1 const2 +ComputeGraphPtr BuildGraph10() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto switchNode1 = builder.AddNode("switch1", SWITCH, 2, 2); + auto sqrt1 = builder.AddNode("sqrt1", RSQRT, 1, 1); + auto add1 = builder.AddNode("addDim", AddYesDim, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, switchNode1, 0); + builder.AddDataEdge(const2, 0, switchNode1, 1); + builder.AddDataEdge(switchNode1, 0, add1, 0); + builder.AddDataEdge(switchNode1, 1, sqrt1, 0); + builder.AddDataEdge(add1, 0, netoutput1, 0); + builder.AddDataEdge(sqrt1, 0, netoutput1, 1); + + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// FRAMEWORKOP +/// | +/// const1 +ComputeGraphPtr BuildWrongGraph1() { + auto builder = ut::GraphBuilder("test"); + auto const_op = builder.AddNode("const1", CONSTANT, 0, 1); + auto op = builder.AddNode("fmk_op", FRAMEWORKOP, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + builder.AddDataEdge(const_op, 0, op, 0); + builder.AddDataEdge(op, 0, netoutput1, 0); + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// WrongYes +/// | +/// const1 +ComputeGraphPtr BuildWrongGraph2() { + auto builder = ut::GraphBuilder("test"); + auto const_op = builder.AddNode("const1", CONSTANT, 0, 1); + auto op = builder.AddNode("wrong", WrongYes, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + builder.AddDataEdge(const_op, 0, op, 0); + builder.AddDataEdge(op, 0, netoutput1, 0); + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// WrongYes1 +/// | +/// const1 +ComputeGraphPtr BuildWrongGraph3() { + auto builder = ut::GraphBuilder("test"); + auto const_op = builder.AddNode("const1", CONSTANT, 0, 1); + auto op = builder.AddNode("wrong1", WrongYes1, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + builder.AddDataEdge(const_op, 0, op, 0); + builder.AddDataEdge(op, 0, netoutput1, 0); + return builder.GetGraph(); +} + +/// netoutput1 WrongYes1 +/// | / +/// WrongYes2 +/// / +/// const1 +ComputeGraphPtr BuildWrongGraph4() { + auto builder = ut::GraphBuilder("test"); + auto const_op_1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto op = builder.AddNode("wrong2", WrongYes2, 1, 2); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + auto wrong_op = builder.AddNode("WrongYes1", WrongYes1, 1, 0); + builder.AddDataEdge(const_op_1, 0, op, 0); + builder.AddDataEdge(op, 0, netoutput1, 0); + builder.AddDataEdge(op, 1, wrong_op, 0); + return builder.GetGraph(); +} + +/// CONVOLUTION +/// | +/// WrongYes2 WrongYes1 +/// / +/// const1 +ComputeGraphPtr BuildWrongGraph5() { + auto builder = ut::GraphBuilder("test"); + auto const_op_1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto op = builder.AddNode("wrong2", WrongYes2, 1, 1); + auto conv = builder.AddNode("conv", CONVOLUTION, 1, 0); + auto wrong_op = builder.AddNode("WrongYes1", WrongYes1, 1, 0); + builder.AddDataEdge(const_op_1, 0, op, 0); + builder.AddDataEdge(op, 0, conv, 0); + return builder.GetGraph(); +} + +/// CONVOLUTION +/// | +/// WrongYes3 +/// / +/// const1 +ComputeGraphPtr BuildWrongGraph6() { + auto builder = ut::GraphBuilder("test"); + auto const_op_1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto op = builder.AddNode("wrong3", WrongYes3, 1, 1); + auto conv = builder.AddNode("conv", CONVOLUTION, 1, 0); + builder.AddDataEdge(const_op_1, 0, op, 0); + builder.AddDataEdge(op, 0, conv, 0); + return builder.GetGraph(); +} +} // namespace + +TEST_F(UtestGraphPassesConstantFoldingPass, folding_addn) { + auto graph = BuildGraph1(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), 3); + auto shape1 = graph->FindNode("shape1"); + EXPECT_NE(shape1, nullptr); + EXPECT_EQ(shape1->GetInNodes().size(), 1); + + auto folded_const = shape1->GetInDataNodes().at(0); + EXPECT_EQ(folded_const->GetType(), CONSTANT); + auto tensor = folded_const->GetOpDesc()->GetOutputDesc(0); + EXPECT_EQ(tensor.GetDataType(), DT_UINT8); + EXPECT_EQ(tensor.GetShape().GetDims(), std::vector({3})); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesConstantFoldingPass, folding_without_one_const) { + auto graph = BuildGraph2(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), 5); + EXPECT_EQ(graph->FindNode("addn1"), nullptr); + EXPECT_EQ(graph->FindNode("const1"), nullptr); + + auto const2 = graph->FindNode("const2"); + EXPECT_NE(const2, nullptr); + EXPECT_EQ(const2->GetOutDataNodes().size(), 1); + EXPECT_EQ(const2->GetOutDataNodes().at(0)->GetName(), "shape2"); + + auto shape1 = graph->FindNode("shape1"); + EXPECT_NE(shape1, nullptr); + EXPECT_EQ(shape1->GetInDataNodes().size(), 1); + EXPECT_EQ(shape1->GetInDataNodes().at(0)->GetType(), CONSTANT); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesConstantFoldingPass, folding_with_const_control_edges) { + auto graph = BuildGraph5(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), 5); + auto shape1 = graph->FindNode("shape1"); + EXPECT_NE(shape1, nullptr); + EXPECT_EQ(shape1->GetInNodes().size(), 1); + EXPECT_EQ(shape1->GetInControlNodes().size(), 0); + EXPECT_EQ(shape1->GetInDataNodes().at(0)->GetType(), CONSTANT); + std::unordered_set node_names; + for (auto node : shape1->GetInControlNodes()) { + node_names.insert(node->GetName()); + } + EXPECT_EQ(node_names, std::unordered_set()); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesConstantFoldingPass, continues_fold) { + auto graph = BuildGraph6(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), 3); + auto shape1 = graph->FindNode("shape1"); + EXPECT_NE(shape1, nullptr); + EXPECT_EQ(shape1->GetInNodes().size(), 1); + + auto folded_const = shape1->GetInDataNodes().at(0); + EXPECT_EQ(folded_const->GetType(), CONSTANT); + auto tensor = folded_const->GetOpDesc()->GetOutputDesc(0); + EXPECT_EQ(tensor.GetDataType(), DT_UINT8); + EXPECT_EQ(tensor.GetShape().GetDims(), std::vector({5})); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesConstantFoldingPass, multiple_output) { + auto graph = BuildGraph7(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + EXPECT_EQ(graph->GetAllNodes().size(), 5); + + auto shape1 = graph->FindNode("shape1"); + EXPECT_NE(shape1, nullptr); + EXPECT_EQ(shape1->GetInNodes().size(), 1); + auto folded_const = shape1->GetInDataNodes().at(0); + EXPECT_EQ(folded_const->GetType(), CONSTANT); + auto tensor = folded_const->GetOpDesc()->GetOutputDesc(0); + EXPECT_EQ(tensor.GetDataType(), DT_UINT8); + EXPECT_EQ(tensor.GetShape().GetDims(), std::vector({5})); + + auto shape2 = graph->FindNode("shape2"); + EXPECT_NE(shape2, nullptr); + EXPECT_EQ(shape2->GetInNodes().size(), 1); + auto folded_const2 = shape2->GetInDataNodes().at(0); + EXPECT_EQ(folded_const2->GetType(), CONSTANT); + auto tensor2 = folded_const2->GetOpDesc()->GetOutputDesc(0); + EXPECT_EQ(tensor2.GetDataType(), DT_UINT8); + EXPECT_EQ(tensor2.GetShape().GetDims(), std::vector({2, 3})); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesConstantFoldingPass, not_change1) { + auto graph = BuildGraph8(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + EXPECT_EQ(graph->GetAllNodes().size(), 5); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesConstantFoldingPass, not_change2) { + auto graph = BuildGraph9(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), 5); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesConstantFoldingPass, folding_size) { + auto graph = BuildGraph10(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new DimensionComputePass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), 7); + + auto switchnode = graph->FindNode("switch1"); + EXPECT_NE(switchnode, nullptr); + EXPECT_EQ(switchnode->GetOutDataNodes().size(), 2); + EXPECT_EQ(switchnode->GetOutDataNodes().at(0)->GetName(), "addDim_ctrl_identity_0"); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesConstantFoldingPass, unlikely1) { + auto graph = BuildWrongGraph1(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesConstantFoldingPass, unlikely2) { + auto graph = BuildWrongGraph2(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), INTERNAL_ERROR); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesConstantFoldingPass, unlikely3) { + auto graph = BuildWrongGraph3(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), INTERNAL_ERROR); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} +TEST_F(UtestGraphPassesConstantFoldingPass, unlikely4) { + auto graph = BuildWrongGraph4(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), INTERNAL_ERROR); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} +TEST_F(UtestGraphPassesConstantFoldingPass, unlikely5) { + auto graph = BuildWrongGraph5(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} +TEST_F(UtestGraphPassesConstantFoldingPass, unlikely6) { + auto graph = BuildWrongGraph6(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ConstantFoldingPass}); + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/dimension_adjust_pass_unittest.cc b/tests/ut/ge/graph/passes/dimension_adjust_pass_unittest.cc new file mode 100644 index 00000000..4e02af70 --- /dev/null +++ b/tests/ut/ge/graph/passes/dimension_adjust_pass_unittest.cc @@ -0,0 +1,149 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/dimension_adjust_pass.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/types.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace std; +using namespace testing; + +namespace ge { + +class UtestGraphPassesDimensionAdjustPass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesDimensionAdjustPass, succ) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + vector data_vec = {1, 2, 3}; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, DT_INT32); + GeTensorPtr data_tensor = make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = make_shared("dim", CONSTANTOP); + vector dim_value_vec = {0}; + GeTensorDesc dim_tensor_desc(ge::GeShape(), FORMAT_NCHW, DT_INT32); + GeTensorPtr dim_tensor = + make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), dim_value_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddims_op_desc = std::make_shared("Expanddims", EXPANDDIMS); + vector expanddims_vec = {1, 1, 2, 3}; + GeTensorDesc expanddims_tensor_desc(ge::GeShape(expanddims_vec), FORMAT_NCHW, DT_INT32); + GeTensorPtr expanddims_tensor = make_shared(expanddims_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(expanddims_op_desc, expanddims_tensor); + expanddims_op_desc->AddInputDesc(data_tensor_desc); + expanddims_op_desc->AddInputDesc(dim_tensor_desc); + expanddims_op_desc->AddOutputDesc(expanddims_tensor_desc); + NodePtr op_node = graph->AddNode(expanddims_op_desc); + op_node->Init(); + + // add output node + OpDescPtr netoutput_op_desc = std::make_shared("NetOutput", "NetOutput"); + netoutput_op_desc->AddInputDesc(expanddims_tensor_desc); + NodePtr netoutput_node = graph->AddNode(netoutput_op_desc); + netoutput_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dim_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(1)); + GraphUtils::AddEdge(op_node->GetOutDataAnchor(0), netoutput_node->GetInDataAnchor(0)); + + std::shared_ptr pass = make_shared(); + ge::Status ret = pass->Run(op_node); + EXPECT_EQ(SUCCESS, ret); +} + +TEST_F(UtestGraphPassesDimensionAdjustPass, input_node_is_nullptr) { + std::shared_ptr pass = make_shared(); + ge::NodePtr node = nullptr; + ge::Status ret = pass->Run(node); + EXPECT_EQ(PARAM_INVALID, ret); +} + +TEST_F(UtestGraphPassesDimensionAdjustPass, node_op_desc_is_nullptr) { + NodePtr op_node = make_shared(nullptr, nullptr); + + std::shared_ptr pass = make_shared(); + ge::Status ret = pass->Run(op_node); + EXPECT_EQ(PARAM_INVALID, ret); +} + +TEST_F(UtestGraphPassesDimensionAdjustPass, node_get_original_type_failed) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + OpDescPtr expanddim_op_desc = std::make_shared("Expanddims", FRAMEWORKOP); + NodePtr op_node = make_shared(expanddim_op_desc, graph); + + std::shared_ptr pass = make_shared(); + ge::Status ret = pass->Run(op_node); + // EXPECT_EQ(ge::SUCCESS, ret); +} + +TEST_F(UtestGraphPassesDimensionAdjustPass, node_not_register_op) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + OpDescPtr expanddim_op_desc = std::make_shared("Expanddims", FRAMEWORKOP); + AttrUtils::SetStr(expanddim_op_desc, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, "expanddims_fake"); + NodePtr op_node = make_shared(expanddim_op_desc, graph); + + std::shared_ptr pass = make_shared(); + ge::Status ret = pass->Run(op_node); + EXPECT_EQ(SUCCESS, ret); +} + +TEST_F(UtestGraphPassesDimensionAdjustPass, node_compute_failed) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + OpDescPtr expanddim_op_desc = std::make_shared("Expanddims", EXPANDDIMS); + NodePtr op_node = make_shared(expanddim_op_desc, graph); + + std::shared_ptr pass = make_shared(); + ge::Status ret = pass->Run(op_node); + EXPECT_EQ(SUCCESS, ret); +} + +} // namespace ge diff --git a/tests/ut/ge/graph/passes/dimension_compute_pass_unittest.cc b/tests/ut/ge/graph/passes/dimension_compute_pass_unittest.cc new file mode 100644 index 00000000..d2d736b6 --- /dev/null +++ b/tests/ut/ge/graph/passes/dimension_compute_pass_unittest.cc @@ -0,0 +1,171 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/dimension_compute_pass.h" + +#include +#include +#include + +#include "common/types.h" +#include "graph/passes/base_pass.h" +#include "graph_builder_utils.h" +#include "inc/kernel.h" +#include "inc/kernel_factory.h" + +namespace ge { +namespace { +const char *AddNYes = "AddNYes"; +const char *AddNNo = "AddNNo"; +const char *HuberLossYes = "HuberLossYes"; +const char *ShapeNo = "ShapeNo"; +const char *ShapeYes = "ShapeYes"; +const char *DataNo = "dataNo"; +} // namespace + +class UtestShapeYesKernel : public Kernel { + public: + Status Compute(const NodePtr &node, std::vector &v_output) override { + auto output = std::make_shared(); + std::vector data{1, 2, 3}; + std::vector shape{3}; + output->MutableTensorDesc().SetShape(GeShape(shape)); + output->SetData(data); + output->MutableTensorDesc().SetDataType(DT_UINT8); + v_output.push_back(output); + return SUCCESS; + } +}; +REGISTER_KERNEL(ShapeYes, UtestShapeYesKernel); + +class UtestGraphPassesDimensionAdjustPass : public testing::Test { + protected: + UtestGraphPassesDimensionAdjustPass() = default; +}; + +namespace { + +/// netoutput1 +/// | +/// shapeNo1 +/// | +/// addnNo1 +/// / \ +/// / \ +/// const1 const2 +ComputeGraphPtr BuildGraph8() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto addn1 = builder.AddNode("addn1", AddNNo, 2, 1); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(const2, 0, addn1, 1); + builder.AddDataEdge(addn1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// shapeNo1 +/// | +/// addnYes1 +/// / \ +/// / \ +///const1 data1 +ComputeGraphPtr BuildGraph9() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto data1 = builder.AddNode("data1", DataNo, 0, 1); + auto addn1 = builder.AddNode("addn1", AddNYes, 2, 1); + auto shape1 = builder.AddNode("shape1", ShapeNo, 1, 1); + auto netoutput1 = builder.AddNode("netoutput", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(data1, 0, addn1, 1); + builder.AddDataEdge(addn1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// shapeYes1 +/// | +/// addnNo1 +ComputeGraphPtr BuildGraph1() { + auto builder = ut::GraphBuilder("test"); + auto addnNo1 = builder.AddNode("addnNo1", AddNNo, 2, 1); + auto shapeYes1 = builder.AddNode("shapeYes1", ShapeYes, 1, 1); + auto netoutput1 = builder.AddNode("netoutput1", NETOUTPUT, 1, 0); + + builder.AddDataEdge(addnNo1, 0, shapeYes1, 0); + builder.AddDataEdge(shapeYes1, 0, netoutput1, 0); + + return builder.GetGraph(); +} +} // namespace + +TEST_F(UtestGraphPassesDimensionAdjustPass, not_changed_no_kernel) { + auto graph = BuildGraph8(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new DimensionComputePass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + EXPECT_EQ(graph->GetAllNodes().size(), 5); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesDimensionAdjustPass, not_changed_no_compute_kernel) { + auto graph = BuildGraph9(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new DimensionComputePass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + EXPECT_EQ(graph->GetAllNodes().size(), 5); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} + +TEST_F(UtestGraphPassesDimensionAdjustPass, success) { + auto graph = BuildGraph1(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new DimensionComputePass}); + + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + EXPECT_EQ(graph->GetAllNodes().size(), 2); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/dropout_pass_unittest.cc b/tests/ut/ge/graph/passes/dropout_pass_unittest.cc new file mode 100644 index 00000000..46a57640 --- /dev/null +++ b/tests/ut/ge/graph/passes/dropout_pass_unittest.cc @@ -0,0 +1,108 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/dropout_pass.h" + +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/anchor.h" +#include "graph/attr_value.h" +#include "graph/compute_graph.h" +#include "graph/op_desc.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/pass_manager.h" +#undef protected +#undef private + +using namespace testing; +namespace ge { +class UtestGraphPassesDropoutPass : public Test { + protected: + NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 1, + int32_t out_anchors_num = 1) { + GeTensorDesc tensor_desc; + OpDescPtr op_desc = make_shared(name, type); + for (int32_t i = 0; i < in_anchors_num; i++) { + op_desc->AddInputDesc(tensor_desc); + } + for (int32_t i = 0; i < out_anchors_num; i++) { + op_desc->AddOutputDesc(tensor_desc); + } + + NodePtr node = graph->AddNode(op_desc); + return node; + } +}; +TEST_F(UtestGraphPassesDropoutPass, dropout_remove_succ) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr dropout_node = AddNode(graph, "dropout", DROPOUT); + NodePtr reduce_min_node = AddNode(graph, "reduceMin", REDUCEMIN); + NodePtr reduce_max_node = AddNode(graph, "reduceMax", REDUCEMAX); + + GraphUtils::AddEdge(reduce_max_node->GetOutDataAnchor(0), dropout_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dropout_node->GetOutDataAnchor(0), reduce_min_node->GetInDataAnchor(0)); + vector is_input_const_vec = {true}; + reduce_min_node->GetOpDesc()->SetIsInputConst(is_input_const_vec); + + DropOutPass drop_out_pass; + Status status = drop_out_pass.Run(dropout_node); + EXPECT_EQ(SUCCESS, status); + is_input_const_vec = reduce_min_node->GetOpDesc()->GetIsInputConst(); + EXPECT_EQ(is_input_const_vec[0], true); + NodePtr found_node = graph->FindNode("dropout"); + EXPECT_EQ(nullptr, found_node); + + NodePtr node = std::make_shared(); + status = drop_out_pass.Run(node); + EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesDropoutPass, dropout_remove_fail1) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr dropout_node = AddNode(graph, "dropout", DROPOUT, 0, 1); + NodePtr reduce_min_node = AddNode(graph, "reduceMin", REDUCEMIN); + GraphUtils::AddEdge(dropout_node->GetOutDataAnchor(0), reduce_min_node->GetInDataAnchor(0)); + + DropOutPass drop_out_pass; + Status status = drop_out_pass.Run(dropout_node); + EXPECT_EQ(FAILED, status); +} + +TEST_F(UtestGraphPassesDropoutPass, dropout_square) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr dropout_node = AddNode(graph, "dropout", DROPOUT); + NodePtr square_node = AddNode(graph, "square", SQUARE); + NodePtr softplus_node = AddNode(graph, "softplus", SOFTPLUS); + NodePtr const_node = AddNode(graph, "const", CONSTANT); + + GraphUtils::AddEdge(square_node->GetOutControlAnchor(), dropout_node->GetInControlAnchor()); + GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), dropout_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dropout_node->GetOutDataAnchor(0), softplus_node->GetInDataAnchor(0)); + + DropOutPass drop_out_pass; + Status status = drop_out_pass.Run(dropout_node); + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(square_node->GetOutControlAnchor()->GetPeerInControlAnchors().at(0), softplus_node->GetInControlAnchor()); + EXPECT_EQ(const_node->GetOutDataAnchor(0)->GetPeerInDataAnchors().at(0), softplus_node->GetInDataAnchor(0)); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/enter_pass_unittest.cc b/tests/ut/ge/graph/passes/enter_pass_unittest.cc new file mode 100644 index 00000000..e0df5d0a --- /dev/null +++ b/tests/ut/ge/graph/passes/enter_pass_unittest.cc @@ -0,0 +1,87 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define private public +#include "graph/passes/enter_pass.h" +#include "common/ge_inner_error_codes.h" +#include "inc/pass_manager.h" +#include "utils/graph_utils.h" +#undef private + +namespace ge { +namespace { + +class UtestGraphPassesEnterPass : public testing::Test { + protected: + void BuildGraph() { + // Tensor + GeTensorDesc bool_tensor_desc(GeShape(), ge::FORMAT_NCHW, ge::DT_BOOL); + GeTensorDesc scalar_tensor_desc(GeShape(), ge::FORMAT_NCHW, ge::DT_FLOAT); + + // const + auto const_op_desc = std::make_shared("a", "Constant"); + const_op_desc->AddOutputDesc(scalar_tensor_desc); + auto const_node_ = graph_->AddNode(const_op_desc); + + // enter + auto enter_op_desc = std::make_shared("Enter", "Enter"); + enter_op_desc->AddInputDesc(scalar_tensor_desc); + enter_op_desc->AddOutputDesc(scalar_tensor_desc); + enter_node_ = graph_->AddNode(enter_op_desc); + (void)GraphUtils::AddEdge(const_node_->GetOutDataAnchor(0), enter_node_->GetInDataAnchor(0)); + + // less + auto x_op_desc = std::make_shared("x", VARIABLEV2); + x_op_desc->AddOutputDesc(scalar_tensor_desc); + auto x_node = graph_->AddNode(x_op_desc); + auto y_op_desc = std::make_shared("y", VARIABLEV2); + y_op_desc->AddOutputDesc(scalar_tensor_desc); + auto y_node = graph_->AddNode(y_op_desc); + + auto less_op_desc = std::make_shared("Less", "Less"); + less_op_desc->AddInputDesc(scalar_tensor_desc); + less_op_desc->AddInputDesc(scalar_tensor_desc); + less_op_desc->AddOutputDesc(bool_tensor_desc); + auto less_node = graph_->AddNode(less_op_desc); + (void)GraphUtils::AddEdge(x_node->GetOutDataAnchor(0), less_node->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(y_node->GetOutDataAnchor(0), less_node->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(enter_node_->GetOutControlAnchor(), less_node->GetInControlAnchor()); + } + + ComputeGraphPtr graph_; + EnterPass pass_; + NodePtr enter_node_; +}; +} // namespace + +TEST_F(UtestGraphPassesEnterPass, null_input) { + NodePtr node = nullptr; + EXPECT_EQ(pass_.Run(node), PARAM_INVALID); +} + +TEST_F(UtestGraphPassesEnterPass, run_success) { + graph_ = std::make_shared("UTEST_graph_passes_enter_pass_run_success"); + BuildGraph(); + EXPECT_NE(enter_node_, nullptr); + + EXPECT_EQ(pass_.Run(enter_node_), SUCCESS); + EXPECT_EQ(enter_node_->GetOutControlAnchor()->GetPeerAnchors().empty(), true); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/flow_ctrl_pass_unittest.cc b/tests/ut/ge/graph/passes/flow_ctrl_pass_unittest.cc new file mode 100644 index 00000000..24274f41 --- /dev/null +++ b/tests/ut/ge/graph/passes/flow_ctrl_pass_unittest.cc @@ -0,0 +1,431 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/manager/graph_var_manager.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "inc/pass_manager.h" + +#define private public +#include "graph/passes/flow_ctrl_pass.h" +#undef private + +namespace ge { +class UtestGraphPassesFlowCtrlPass : public testing::Test { + protected: + void SetUp() { + uint64_t session_id = 0; + uint32_t device_id = 0; + uint64_t job_id = 0; + uint32_t session_version = 0; + EXPECT_EQ(SUCCESS, ge::VarManager::Instance(0)->Init(session_version, session_id, device_id, job_id)); + } + + void TearDown() { VarManagerPool::Instance().Destroy(); } + + public: + /// Set up a graph with the following network structure + /// IteratorGetNext + /// | + /// MemcpyAsync + /// | + /// A + /// | + /// NetOutput + void MakeGraph(ge::ComputeGraphPtr &graph) { + auto desc_ptr = make_shared(); + auto desc = *desc_ptr; + + ge::OpDescPtr op_desc_get_next = make_shared("IteratorGetNext", FRAMEWORKOP); + + op_desc_get_next->AddOutputDesc(desc); + + ge::OpDescPtr op_desc_memcpy = make_shared("MemcpyAsync", MEMCPYASYNC); + op_desc_memcpy->AddInputDesc(desc); + op_desc_memcpy->AddOutputDesc(desc); + ge::AttrUtils::SetBool(op_desc_memcpy, ATTR_NAME_STREAM_CYCLE_EVENT_FLAG, true); + + ge::OpDescPtr op_desc_a = make_shared("A", RESOURCEAPPLYMOMENTUM); + op_desc_a->AddInputDesc(desc); + op_desc_a->AddOutputDesc(desc); + + ge::OpDescPtr op_desc_gatherv2 = make_shared("GatherV2", GATHERV2); + op_desc_gatherv2->AddInputDesc(desc); + op_desc_gatherv2->AddOutputDesc(desc); + + ge::OpDescPtr op_desc_global_step = make_shared("global_step", VARIABLE); + op_desc_global_step->AddOutputDesc(desc); + + ge::OpDescPtr op_desc_netout = make_shared("NetOutput", NETOUTPUT); + ge::AttrUtils::SetInt(op_desc_netout, ATTR_NAME_TRUE_BRANCH_STREAM, TRUE_STREAM_ID); + op_desc_netout->AddInputDesc(desc); + op_desc_netout->AddInputDesc(desc); + + // add node + ge::NodePtr get_next_node = graph->AddNode(op_desc_get_next); + ge::NodePtr memcpy_node = graph->AddNode(op_desc_memcpy); + ge::NodePtr node_a = graph->AddNode(op_desc_a); + ge::NodePtr global_step = graph->AddNode(op_desc_global_step); + ge::NodePtr gatherv2 = graph->AddNode(op_desc_gatherv2); + ge::NodePtr netoutput = graph->AddNode(op_desc_netout); + + // add edge + ge::GraphUtils::AddEdge(get_next_node->GetOutDataAnchor(0), memcpy_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(memcpy_node->GetOutDataAnchor(0), node_a->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), netoutput->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(gatherv2->GetOutDataAnchor(0), netoutput->GetInDataAnchor(1)); + + ge::GraphUtils::AddEdge(global_step->GetOutDataAnchor(0), gatherv2->GetInDataAnchor(0)); + } + + void AddSessionVariables(void) { + static std::set var_list = { + NODE_NAME_FLOWCTRL_LOOP_PER_ITER, + NODE_NAME_FLOWCTRL_LOOP_COND, + NODE_NAME_FLOWCTRL_LOOP_INCREMENT, + NODE_NAME_FLOWCTRL_LOOP_RESETVALUE, + NODE_NAME_GLOBAL_STEP, + }; + + uint8_t *dev_ptr = nullptr; + ge::GeTensorDesc tensor_desc(ge::GeShape({1}), ge::FORMAT_NHWC, ge::DT_UINT64); + for (std::string var_name : var_list) { + EXPECT_EQ(SUCCESS, ge::VarManager::Instance(0)->SetVarAddr(var_name, tensor_desc, dev_ptr, RT_MEMORY_HBM)); + } + } +}; + +TEST_F(UtestGraphPassesFlowCtrlPass, flow_ctrl_pass_success_test) { + ge::ComputeGraphPtr graph = make_shared("FlowCtrlPassSuccess"); + graph->SetNeedIteration(true); + + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + + AddSessionVariables(); + FlowCtrlPass flow_ctrl_pass; + Status ret = flow_ctrl_pass.Run(graph); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(16, graph->GetDirectNodesSize()); + + int stream_switch_cnt = 0; + int stream_activeCnt = 0; + for (ge::NodePtr node : graph->GetDirectNode()) { + if (node->GetOpDesc()->GetType() == STREAMSWITCH) { + stream_switch_cnt++; + } else if (node->GetOpDesc()->GetType() == STREAMACTIVE) { + stream_activeCnt++; + } + } + EXPECT_EQ(stream_switch_cnt, 2); + EXPECT_EQ(stream_activeCnt, 2); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, flow_ctrl_pass_success_var_node_add_before) { + ge::ComputeGraphPtr graph = make_shared("FlowCtrlPassSuccess"); + graph->SetNeedIteration(true); + + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + + AddSessionVariables(); + FlowCtrlPass flow_ctrl_pass; + + NodePtr loop_cond_node = flow_ctrl_pass.AddVariableNode(graph, NODE_NAME_FLOWCTRL_LOOP_COND); + EXPECT_NE(loop_cond_node, nullptr); + NodePtr loop_increment_node = flow_ctrl_pass.AddVariableNode(graph, NODE_NAME_FLOWCTRL_LOOP_INCREMENT); + EXPECT_NE(loop_increment_node, nullptr); + NodePtr loop_reset_node = flow_ctrl_pass.AddVariableNode(graph, NODE_NAME_FLOWCTRL_LOOP_RESETVALUE); + EXPECT_NE(loop_reset_node, nullptr); + NodePtr iter_per_loop_node = flow_ctrl_pass.AddVariableNode(graph, NODE_NAME_FLOWCTRL_LOOP_PER_ITER); + EXPECT_NE(iter_per_loop_node, nullptr); + Status ret = flow_ctrl_pass.Run(graph); + EXPECT_EQ(ret, ge::SUCCESS); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, flow_ctrl_pass_not_train) { + ge::ComputeGraphPtr graph = make_shared("TestNotChange"); + graph->SetNeedIteration(false); + + FlowCtrlPass flow_ctrl_pass; + Status ret = flow_ctrl_pass.Run(graph); + EXPECT_EQ(ret, NOT_CHANGED); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, add_fpbp_iterator_ctrl_without_var) { + ge::ComputeGraphPtr graph = make_shared("TestNotChange"); + graph->SetNeedIteration(true); + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + + // must have NODE_NAME_FLOWCTRL_LOOP_PER_ITER + ge::GeTensorDesc tensor_desc(ge::GeShape({1}), ge::FORMAT_NHWC, ge::DT_UINT64); + uint8_t *dev_ptr = nullptr; + EXPECT_EQ(SUCCESS, ge::VarManager::Instance(0)->SetVarAddr(NODE_NAME_FLOWCTRL_LOOP_PER_ITER, tensor_desc, + dev_ptr, RT_MEMORY_HBM)); + // not add var + FlowCtrlPass flow_ctrl_pass; + Status ret = flow_ctrl_pass.Run(graph); + EXPECT_NE(ret, ge::SUCCESS); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, run_add_special_node_iterator_ctrl_no_inanchor) { + ge::ComputeGraphPtr graph = make_shared("Test_WITHOUT_LOOP_PER_ITER"); + graph->SetNeedIteration(true); + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + + AddSessionVariables(); + FlowCtrlPass flow_ctrl_pass; + NodePtr getnext_node = graph->FindNode("IteratorGetNext"); + NodePtr memcpy_node = graph->FindNode("MemcpyAsync"); + GraphUtils::RemoveEdge(getnext_node->GetOutDataAnchor(0), memcpy_node->GetInDataAnchor(0)); + Status ret = flow_ctrl_pass.Run(graph); + EXPECT_NE(ret, ge::SUCCESS); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, add_fpbp_iterator_ctrl_without_loop_cond) { + ge::ComputeGraphPtr graph = make_shared("Test_WITHOUT_LOOP_COND"); + graph->SetNeedIteration(true); + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + std::set var_list = { + NODE_NAME_FLOWCTRL_LOOP_PER_ITER, + NODE_NAME_FLOWCTRL_LOOP_INCREMENT, + NODE_NAME_FLOWCTRL_LOOP_RESETVALUE, + NODE_NAME_GLOBAL_STEP, + }; + // must have NODE_NAME_FLOWCTRL_LOOP_PER_ITER + ge::GeTensorDesc tensor_desc(ge::GeShape({1}), ge::FORMAT_NHWC, ge::DT_UINT64); + uint8_t *dev_ptr = nullptr; + for (std::string var_name : var_list) { + EXPECT_EQ(SUCCESS, ge::VarManager::Instance(0)->SetVarAddr(var_name, tensor_desc, dev_ptr, RT_MEMORY_HBM)); + } + // not add var + FlowCtrlPass flow_ctrl_pass; + NodePtr pre_node = graph->FindNode("NetOutput"); + Status ret = flow_ctrl_pass.AddFpBpIteratorCtrl(graph, pre_node); + EXPECT_EQ(ret, FAILED); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, add_fpbp_iterator_ctrl_without_loop_increment) { + ge::ComputeGraphPtr graph = make_shared("Test_WITHOUT_LOOP_INCREMENT"); + graph->SetNeedIteration(true); + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + std::set var_list = { + NODE_NAME_FLOWCTRL_LOOP_PER_ITER, + NODE_NAME_FLOWCTRL_LOOP_COND, + NODE_NAME_FLOWCTRL_LOOP_RESETVALUE, + NODE_NAME_GLOBAL_STEP, + }; + // must have NODE_NAME_FLOWCTRL_LOOP_PER_ITER + ge::GeTensorDesc tensor_desc(ge::GeShape({1}), ge::FORMAT_NHWC, ge::DT_UINT64); + uint8_t *dev_ptr = nullptr; + for (std::string var_name : var_list) { + EXPECT_EQ(SUCCESS, ge::VarManager::Instance(0)->SetVarAddr(var_name, tensor_desc, dev_ptr, RT_MEMORY_HBM)); + } + // not add var + FlowCtrlPass flow_ctrl_pass; + NodePtr pre_node = graph->FindNode("NetOutput"); + Status ret = flow_ctrl_pass.AddFpBpIteratorCtrl(graph, pre_node); + EXPECT_EQ(ret, FAILED); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, add_fpbp_iterator_ctrl_without_loop_reset_value) { + ge::ComputeGraphPtr graph = make_shared("Test_WITHOUT_LOOP_RESETVALUE"); + graph->SetNeedIteration(true); + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + std::set var_list = { + NODE_NAME_FLOWCTRL_LOOP_PER_ITER, + NODE_NAME_FLOWCTRL_LOOP_COND, + NODE_NAME_FLOWCTRL_LOOP_INCREMENT, + NODE_NAME_GLOBAL_STEP, + }; + // must have NODE_NAME_FLOWCTRL_LOOP_PER_ITER + ge::GeTensorDesc tensor_desc(ge::GeShape({1}), ge::FORMAT_NHWC, ge::DT_UINT64); + uint8_t *dev_ptr = nullptr; + for (std::string var_name : var_list) { + EXPECT_EQ(SUCCESS, ge::VarManager::Instance(0)->SetVarAddr(var_name, tensor_desc, dev_ptr, RT_MEMORY_HBM)); + } + // not add var + FlowCtrlPass flow_ctrl_pass; + NodePtr pre_node = graph->FindNode("NetOutput"); + Status ret = flow_ctrl_pass.AddFpBpIteratorCtrl(graph, pre_node); + EXPECT_EQ(ret, FAILED); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, add_fpbp_iterator_ctrl_without_loop_ref_iter) { + ge::ComputeGraphPtr graph = make_shared("Test_WITHOUT_LOOP_PER_ITER"); + graph->SetNeedIteration(true); + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + std::set var_list = { + NODE_NAME_FLOWCTRL_LOOP_COND, + NODE_NAME_FLOWCTRL_LOOP_INCREMENT, + NODE_NAME_FLOWCTRL_LOOP_RESETVALUE, + NODE_NAME_GLOBAL_STEP, + }; + // must have NODE_NAME_FLOWCTRL_LOOP_PER_ITER + ge::GeTensorDesc tensor_desc(ge::GeShape({1}), ge::FORMAT_NHWC, ge::DT_UINT64); + uint8_t *dev_ptr = nullptr; + for (std::string var_name : var_list) { + EXPECT_EQ(SUCCESS, ge::VarManager::Instance(0)->SetVarAddr(var_name, tensor_desc, dev_ptr, RT_MEMORY_HBM)); + } + FlowCtrlPass flow_ctrl_pass; + NodePtr pre_node = graph->FindNode("NetOutput"); + Status ret = flow_ctrl_pass.AddFpBpIteratorCtrl(graph, pre_node); + EXPECT_EQ(ret, FAILED); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, add_special_node_iterator_ctrl_without_loop_cond) { + ge::ComputeGraphPtr graph = make_shared("Test_WITHOUT_LOOP_COND"); + graph->SetNeedIteration(true); + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + std::set var_list = { + NODE_NAME_FLOWCTRL_LOOP_PER_ITER, + NODE_NAME_FLOWCTRL_LOOP_INCREMENT, + NODE_NAME_FLOWCTRL_LOOP_RESETVALUE, + NODE_NAME_GLOBAL_STEP, + }; + // must have NODE_NAME_FLOWCTRL_LOOP_PER_ITER + ge::GeTensorDesc tensor_desc(ge::GeShape({1}), ge::FORMAT_NHWC, ge::DT_UINT64); + uint8_t *dev_ptr = nullptr; + for (std::string var_name : var_list) { + EXPECT_EQ(SUCCESS, ge::VarManager::Instance(0)->SetVarAddr(var_name, tensor_desc, dev_ptr, RT_MEMORY_HBM)); + } + + FlowCtrlPass flow_ctrl_pass; + NodePtr iter_per_loop_node = flow_ctrl_pass.AddVariableNode(graph, NODE_NAME_FLOWCTRL_LOOP_PER_ITER); + EXPECT_NE(iter_per_loop_node, nullptr); + NodePtr memcpy_node = graph->FindNode("MemcpyAsync"); + Status ret = flow_ctrl_pass.AddSpecialNodeIteratorCtrl(graph, memcpy_node); + EXPECT_EQ(ret, FAILED); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, add_special_node_iterator_ctrl_without_loop_ref_iter) { + ge::ComputeGraphPtr graph = make_shared("Test_WITHOUT_LOOP_PER_ITER"); + graph->SetNeedIteration(true); + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + std::set var_list = { + NODE_NAME_FLOWCTRL_LOOP_COND, + NODE_NAME_FLOWCTRL_LOOP_INCREMENT, + NODE_NAME_FLOWCTRL_LOOP_RESETVALUE, + NODE_NAME_GLOBAL_STEP, + }; + ge::GeTensorDesc tensor_desc(ge::GeShape({1}), ge::FORMAT_NHWC, ge::DT_UINT64); + uint8_t *dev_ptr = nullptr; + for (std::string var_name : var_list) { + EXPECT_EQ(SUCCESS, ge::VarManager::Instance(0)->SetVarAddr(var_name, tensor_desc, dev_ptr, RT_MEMORY_HBM)); + } + + FlowCtrlPass flow_ctrl_pass; + NodePtr loop_cond_node = flow_ctrl_pass.AddVariableNode(graph, NODE_NAME_FLOWCTRL_LOOP_COND); + EXPECT_NE(loop_cond_node, nullptr); + NodePtr memcpy_node = graph->FindNode("MemcpyAsync"); + Status ret = flow_ctrl_pass.AddSpecialNodeIteratorCtrl(graph, memcpy_node); + EXPECT_EQ(ret, FAILED); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, add_special_node_iterator_ctrl_no_inchor) { + ge::ComputeGraphPtr graph = make_shared("Test_WITHOUT_LOOP_PER_ITER"); + graph->SetNeedIteration(true); + // Create graph + MakeGraph(graph); + graph->TopologicalSorting(); + + FlowCtrlPass flow_ctrl_pass; + NodePtr getnext_node = graph->FindNode("IteratorGetNext"); + NodePtr memcpy_node = graph->FindNode("MemcpyAsync"); + GraphUtils::RemoveEdge(getnext_node->GetOutDataAnchor(0), memcpy_node->GetInDataAnchor(0)); + Status ret = flow_ctrl_pass.AddSpecialNodeIteratorCtrl(graph, memcpy_node); + EXPECT_EQ(ret, FAILED); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, insert_assign_op_success) { + ge::ComputeGraphPtr graph = make_shared("Test_InsertAssignOp"); + + FlowCtrlPass flow_ctrl_pass; + GeTensorDesc tmp_geT_tensor_desc; + NodePtr ref_node = flow_ctrl_pass.InsertOp(graph, VARIABLE, "ref_node", {}, {tmp_geT_tensor_desc}); + NodePtr value_node = flow_ctrl_pass.InsertOp(graph, VARIABLE, "ref_node", {}, {tmp_geT_tensor_desc}); + NodePtr add_node = flow_ctrl_pass.InsertAssignOp(graph, ASSIGNADD, "add_node", ref_node, value_node); + EXPECT_NE(add_node, nullptr); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, insert_assign_op_ref_node_no_outanchor) { + ge::ComputeGraphPtr graph = make_shared("Test_InsertAssignOp"); + + FlowCtrlPass flow_ctrl_pass; + GeTensorDesc tmp_geT_tensor_desc; + NodePtr ref_node = flow_ctrl_pass.InsertOp(graph, VARIABLE, "ref_node", {}, {}); + NodePtr value_node = flow_ctrl_pass.InsertOp(graph, VARIABLE, "ref_node", {}, {tmp_geT_tensor_desc}); + NodePtr add_node = flow_ctrl_pass.InsertAssignOp(graph, ASSIGNADD, "add_node", ref_node, value_node); + EXPECT_EQ(add_node, nullptr); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, insert_assign_op_value_node_no_outanchor) { + ge::ComputeGraphPtr graph = make_shared("Test_InsertAssignOp"); + + FlowCtrlPass flow_ctrl_pass; + GeTensorDesc tmp_geT_tensor_desc; + NodePtr ref_node = flow_ctrl_pass.InsertOp(graph, VARIABLE, "ref_node", {}, {tmp_geT_tensor_desc}); + NodePtr value_node = flow_ctrl_pass.InsertOp(graph, VARIABLE, "ref_node", {}, {}); + NodePtr add_node = flow_ctrl_pass.InsertAssignOp(graph, ASSIGNADD, "add_node", ref_node, value_node); + EXPECT_EQ(add_node, nullptr); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, create_iter_ctrl_false_branch_insert_assign_op_failed) { + ge::ComputeGraphPtr graph = make_shared("Test_CreateIterCtrlFalseBranch_InsertAssignOp_FAILED"); + + FlowCtrlPass flow_ctrl_pass; + GeTensorDesc tmp_geT_tensor_desc; + NodePtr ref_node = flow_ctrl_pass.InsertOp(graph, VARIABLE, "ref_node", {}, {tmp_geT_tensor_desc}); + NodePtr value_node = flow_ctrl_pass.InsertOp(graph, VARIABLE, "ref_node", {}, {}); + NodePtr switch_node = flow_ctrl_pass.InsertOp(graph, STREAMSWITCH, "switch_node", {}, {}); + Status ret = flow_ctrl_pass.CreateIterCtrlFalseBranch(graph, ref_node, value_node, switch_node); + EXPECT_EQ(ret, FAILED); +} + +TEST_F(UtestGraphPassesFlowCtrlPass, create_iter_ctrl_true_branch_insert_assign_op_failed) { + ge::ComputeGraphPtr graph = make_shared("CreateIterCtrlTrueBranch_InsertAssignOp_FAILED"); + + FlowCtrlPass flow_ctrl_pass; + GeTensorDesc tmp_geT_tensor_desc; + NodePtr ref_node = flow_ctrl_pass.InsertOp(graph, VARIABLE, "ref_node", {}, {tmp_geT_tensor_desc}); + NodePtr value_node = flow_ctrl_pass.InsertOp(graph, VARIABLE, "ref_node", {}, {}); + NodePtr switch_node = flow_ctrl_pass.InsertOp(graph, STREAMSWITCH, "switch_node", {}, {}); + Status ret = flow_ctrl_pass.CreateIterCtrlTrueBranch(graph, ref_node, value_node, switch_node); + EXPECT_EQ(ret, FAILED); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/folding_kernel/add_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/add_kernel_unittest.cc new file mode 100644 index 00000000..90e7aec4 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/add_kernel_unittest.cc @@ -0,0 +1,166 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/add_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/passes/constant_folding_pass.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestFoldingKernelAddKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestFoldingKernelAddKernel, AddOptimizeInitSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Add", ADD); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, static_cast(DT_INT32)); + + vector dims_vec_0; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {1, 2, 3, 4}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector v_output; + + shared_ptr kernel = KernelFactory::Instance().Create(ADD); + Status status = kernel->Compute(op_desc_ptr, input, v_output); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestFoldingKernelAddKernel, AddOptimizerInt32Scalar) { + OpDescPtr op_desc_ptr = std::make_shared("Add", ADD); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {1}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector v_output; + + shared_ptr kernel = KernelFactory::Instance().Create(ADD); + Status status = kernel->Compute(op_desc_ptr, input, v_output); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestFoldingKernelAddKernel, AddOptimizerFloatSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Add", ADD); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_FLOAT); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1; + vector data_vec_1 = {1.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector v_output; + + shared_ptr kernel = KernelFactory::Instance().Create(ADD); + Status status = kernel->Compute(op_desc_ptr, input, v_output); + + EXPECT_EQ(NOT_CHANGED, status); +} + +// optimize op of slice success +TEST_F(UtestFoldingKernelAddKernel, OptimizeOpOfSliceSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Add", ADD); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UNDEFINED); + + vector dims_vec_0; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {1, 2, 3, 4}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector v_output; + + shared_ptr kernel = KernelFactory::Instance().Create(ADD); + Status status = kernel->Compute(op_desc_ptr, input, v_output); + + EXPECT_EQ(NOT_CHANGED, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/broadcast_args_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/broadcast_args_kernel_unittest.cc new file mode 100644 index 00000000..0e04aea7 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/broadcast_args_kernel_unittest.cc @@ -0,0 +1,482 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/broadcast_args_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "folding_kernel_unittest_utils.h" +#include "framework/common/ge_inner_error_codes.h" +#include "ge/ge_api.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/operator.h" +#include "graph/passes/constant_folding_pass.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; +using namespace cce; +using namespace ge::test; + +#define TEST_OPERATOR(op_, input_shapes, output_shapes) \ + { \ + auto op = op_; \ + for (auto input_pair : input_shapes) { \ + SetInputShape(op, input_pair.first, input_pair.second); \ + SetInputDataType(op, input_pair.first); \ + } \ + op.InferShapeAndType(); \ + for (auto output_pair : output_shapes) CheckOutputShape(op, output_pair.first, output_pair.second); \ + } +#define LOOP_VEC(v) for (size_t i = 0; i < v.size(); i++) + +class UtestBroadCastArgsKernel : public testing::Test { + protected: + void SetUp() { init(); } + + void TearDown() { destory(); } + + private: + void init() { + pass_ = new ConstantFoldingPass(); + graph_ = std::make_shared("default"); + op_desc_ptr_ = std::make_shared("BroadcastArgs", BROADCASTARGS); + node_ = std::make_shared(op_desc_ptr_, graph_); + } + void destory() { + delete pass_; + pass_ = NULL; + } + + protected: + void SetInputShape(Operator op, string name, vector shape) { + TensorDesc tensor_desc = op.GetInputDesc(name); + tensor_desc.SetShape(ge::Shape(shape)); + op.UpdateInputDesc(name, tensor_desc); + } + + void SetInputDataType(Operator op, string name) { + TensorDesc tensor_desc = op.GetInputDesc(name); + tensor_desc.SetDataType(DT_INT32); + op.UpdateInputDesc(name, tensor_desc); + } + + void CheckOutputShape(Operator op, string name, vector shape) { + ge::Shape s = op.GetOutputDesc(name).GetShape(); + EXPECT_EQ(s.GetDims().size(), shape.size()); + LOOP_VEC(shape) EXPECT_EQ(s.GetDim(i), shape[i]); + } + + void InitNodeSuccessSame(ge::ComputeGraphPtr graph) { + ge::OpDescPtr op_desc = std::make_shared("broadcast_args", BROADCASTARGS); + op_desc->AddOutputDesc(ge::GeTensorDesc()); + + vector is_input_const = {true, true}; + op_desc->SetIsInputConst(is_input_const); + + vector dims_vec_0 = {1, 2, 3, 4}; + vector data_vec_0 = {1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4}; + ge::GeTensorDesc tensor_desc_0(ge::GeShape(dims_vec_0), ge::FORMAT_NCHW, ge::DT_INT32); + ge::GeTensorPtr tensor_0 = std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), + data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {1, 2, 3, 4}; + vector data_vec_1 = {1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4}; + ge::GeTensorDesc tensor_desc_1(ge::GeShape(dims_vec_1), ge::FORMAT_NCHW, ge::DT_INT32); + ge::GeTensorPtr tensor_1 = std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), + data_vec_1.size() * sizeof(int32_t)); + + vector weights = {tensor_0, tensor_1}; + + ge::NodePtr node = graph->AddNode(op_desc); + + ge::OpDescUtils::SetWeights(node, weights); + + ge::OpDescPtr op_desc1 = std::make_shared(); + op_desc1->AddInputDesc(ge::GeTensorDesc()); + vector is_input_const1 = {false}; + op_desc1->SetIsInputConst(is_input_const1); + ge::NodePtr node1 = graph->AddNode(op_desc1); + + ge::GraphUtils::AddEdge(node->GetOutDataAnchor(0), node1->GetInDataAnchor(0)); + } + + void InitNodeSuccessNotSame(ge::ComputeGraphPtr graph) { + ge::OpDescPtr op_desc = std::make_shared("broadcast_args", BROADCASTARGS); + op_desc->AddOutputDesc(ge::GeTensorDesc()); + + vector is_input_const = {true, true}; + op_desc->SetIsInputConst(is_input_const); + + vector dims_vec_0 = {1, 2, 3, 4}; + vector data_vec_0 = {1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4}; + ge::GeTensorDesc tensor_desc_0(ge::GeShape(dims_vec_0), ge::FORMAT_NCHW, ge::DT_INT32); + ge::GeTensorPtr tensor_0 = std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), + data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {0, 9, 5, 6}; + ge::GeTensorDesc tensor_desc_1(ge::GeShape(dims_vec_1), ge::FORMAT_NCHW, ge::DT_INT32); + ge::GeTensorPtr tensor_1 = std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), + data_vec_1.size() * sizeof(int32_t)); + + vector weights = {tensor_0, tensor_1}; + + ge::NodePtr node = graph->AddNode(op_desc); + + ge::OpDescUtils::SetWeights(node, weights); + + ge::OpDescPtr op_desc1 = std::make_shared(); + op_desc1->AddInputDesc(ge::GeTensorDesc()); + vector is_input_const1 = {false}; + op_desc1->SetIsInputConst(is_input_const1); + ge::NodePtr node1 = graph->AddNode(op_desc1); + + ge::GraphUtils::AddEdge(node->GetOutDataAnchor(0), node1->GetInDataAnchor(0)); + } + + void InitNodeFailed(ge::ComputeGraphPtr graph) { + ge::OpDescPtr op_desc = std::make_shared("broadcast_gradient_args", BROADCASTGRADIENTARGS); + op_desc->AddOutputDesc(ge::GeTensorDesc()); + + vector is_input_const = {true, true}; + op_desc->SetIsInputConst(is_input_const); + + vector dims_vec_0 = {1, 2, 3, 4}; + vector data_vec_0 = {1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4, 1, 2, 3, 4}; + ge::GeTensorDesc tensor_desc_0(ge::GeShape(dims_vec_0), ge::FORMAT_NCHW, ge::DT_INT32); + ge::GeTensorPtr tensor_0 = std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), + data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {1, 2}; + vector data_vec_1 = {0, 9}; + ge::GeTensorDesc tensor_desc_1(ge::GeShape(dims_vec_1), ge::FORMAT_NCHW, ge::DT_INT32); + ge::GeTensorPtr tensor_1 = std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), + data_vec_1.size() * sizeof(int32_t)); + + vector weights = {tensor_0, tensor_1}; + + ge::NodePtr node = graph->AddNode(op_desc); + + ge::OpDescUtils::SetWeights(node, weights); + + ge::OpDescPtr op_desc1 = std::make_shared(); + op_desc1->AddInputDesc(ge::GeTensorDesc()); + vector is_input_const1 = {false}; + op_desc1->SetIsInputConst(is_input_const1); + ge::NodePtr node1 = graph->AddNode(op_desc1); + + ge::GraphUtils::AddEdge(node->GetOutDataAnchor(0), node1->GetInDataAnchor(0)); + } + ConstantFoldingPass *pass_; + + ge::ComputeGraphPtr graph_; + OpDescPtr op_desc_ptr_; + NodePtr node_; +}; + +TEST_F(UtestBroadCastArgsKernel, BroadCastArgsSuccessSame) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + InitNodeSuccessSame(graph); + for (auto node : graph->GetAllNodes()) { + if (node->GetOpDesc()->GetType() == BROADCASTARGS) { + Status ret = pass_->Run(node); + EXPECT_EQ(ge::PARAM_INVALID, ret); + } + } +} + +TEST_F(UtestBroadCastArgsKernel, BroadCastArgsSuccessNotSame) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + InitNodeSuccessSame(graph); + for (auto node : graph->GetAllNodes()) { + if (node->GetOpDesc()->GetType() == BROADCASTARGS) { + Status ret = pass_->Run(node); + EXPECT_EQ(ge::PARAM_INVALID, ret); + } + } +} + +TEST_F(UtestBroadCastArgsKernel, BroadCastArgsFailed) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + InitNodeFailed(graph); + for (auto n : graph->GetAllNodes()) { + if (n->GetOpDesc()->GetType() == BROADCASTARGS) { + Status ret = pass_->Run(n); + EXPECT_EQ(ge::PARAM_INVALID, ret); + } + } +} + +TEST_F(UtestBroadCastArgsKernel, SizeCheckFail) { + vector dims_vec_0 = {8, 2}; + vector data_vec_0 = {2, 1, 4, 1, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + vector input = {tensor_0}; + + std::vector v_output; + auto kernel_ptr = KernelFactory::Instance().Create(BROADCASTARGS); + if (kernel_ptr != nullptr) { + Status status = kernel_ptr->Compute(op_desc_ptr_, input, v_output); + EXPECT_EQ(NOT_CHANGED, status); + } +} + +TEST_F(UtestBroadCastArgsKernel, UnknowShapeFail) { + vector dims_vec_0 = {-1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + vector dims_vec_1 = {-1}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr_->AddInputDesc(tensor_desc_1); + + GeTensorDesc tensor_desc_out_1(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out_1); + + vector data_vec_0 = {2, 1, 4, 1, 2}; + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + + vector data_vec_1 = {2, 2, 1, 3, 1}; + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int64_t)); + + vector input = {tensor_0, tensor_1}; + + std::vector v_output; + + auto kernel_ptr = KernelFactory::Instance().Create(BROADCASTARGS); + if (kernel_ptr != nullptr) { + Status status = kernel_ptr->Compute(op_desc_ptr_, input, v_output); + EXPECT_EQ(ge::PARAM_INVALID, status); + } +} + +TEST_F(UtestBroadCastArgsKernel, CheckOutputNormal) { + string op_type = BROADCASTARGS; + vector> i_shape_dims({ + {2}, + {5}, + }); + vector> i_data({ + {1, 1}, + {2, 2, 1, 3, 1}, + }); + + vector> o_shape_dims({ + {5}, + }); + vector> o_data({ + {2, 2, 1, 3, 1}, + }); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, i_shape_dims, i_data, DT_INT64, o_shape_dims, + o_data, DT_INT64); + EXPECT_EQ(result, true); +} + +TEST_F(UtestBroadCastArgsKernel, CheckOutputNormalInt32) { + string op_type = BROADCASTARGS; + vector> i_shape_dims({ + {2}, + {5}, + }); + vector> i_data({ + {1, 1}, + {2, 2, 1, 3, 1}, + }); + + vector> o_shape_dims({ + {5}, + }); + vector> o_data({ + {2, 2, 1, 3, 1}, + }); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, i_shape_dims, i_data, DT_INT32, o_shape_dims, + o_data, DT_INT32); + EXPECT_EQ(result, true); +} + +TEST_F(UtestBroadCastArgsKernel, CheckOutputInputsSame) { + string op_type = BROADCASTARGS; + vector> i_shape_dims({ + {5}, + {5}, + }); + vector> i_data({ + {2, 2, 1, 3, 1}, + {2, 2, 1, 3, 1}, + }); + + vector> o_shape_dims({ + {5}, + }); + vector> o_data({ + {2, 2, 1, 3, 1}, + }); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, i_shape_dims, i_data, DT_INT64, o_shape_dims, + o_data, DT_INT64); + EXPECT_EQ(result, true); +} + +TEST_F(UtestBroadCastArgsKernel, CheckOutputInputsOneScalar) { + string op_type = BROADCASTARGS; + vector> i_shape_dims({ + {1}, + {3}, + }); + vector> i_data({ + {5}, + {2, 3, 5}, + }); + + vector> o_shape_dims({ + {3}, + }); + vector> o_data({ + {2, 3, 5}, + }); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, i_shape_dims, i_data, DT_INT64, o_shape_dims, + o_data, DT_INT64); + EXPECT_EQ(result, true); +} +TEST_F(UtestBroadCastArgsKernel, CheckOutputInputsBothScalar) { + string op_type = BROADCASTARGS; + vector> i_shape_dims({ + {1}, + {1}, + }); + vector> i_data({ + {4}, + {1}, + }); + + vector> o_shape_dims({ + {1}, + }); + vector> o_data({ + {4}, + }); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, i_shape_dims, i_data, DT_INT64, o_shape_dims, + o_data, DT_INT64); + EXPECT_EQ(result, true); +} + +TEST_F(UtestBroadCastArgsKernel, GetShapeDataFromConstTensorFail) { + OpDescPtr op_desc_ptr = std::make_shared("BroadcastArgs", BROADCASTARGS); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {}; + vector data_vec_0 = {}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + op_desc_ptr->AddInputDesc(tensor_desc_0); + + vector dims_vec_1 = {5}; + vector data_vec_1 = {0, 1, 4, 1, 2}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int64_t)); + op_desc_ptr->AddInputDesc(tensor_desc_1); + op_desc_ptr->AddOutputDesc(GeTensorDesc()); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + std::shared_ptr kernel = ge::KernelFactory::Instance().Create(BROADCASTARGS); + + auto kernel_ptr = KernelFactory::Instance().Create(BROADCASTARGS); + if (kernel_ptr != nullptr) { + Status status = kernel_ptr->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); + } +} + +TEST_F(UtestBroadCastArgsKernel, GenerateBcastInfoFailed) { + OpDescPtr op_desc_ptr = std::make_shared("BroadcastArgs", BROADCASTARGS); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {5}; + vector data_vec_0 = {-1, 0, 4, 1, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + op_desc_ptr->AddInputDesc(tensor_desc_0); + + vector dims_vec_1 = {5}; + vector data_vec_1 = {1, 1, 4, 1, 2}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int64_t)); + op_desc_ptr->AddInputDesc(tensor_desc_1); + op_desc_ptr->AddOutputDesc(GeTensorDesc()); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + std::shared_ptr kernel = ge::KernelFactory::Instance().Create(BROADCASTARGS); + if (kernel != nullptr) { + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); + } +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/broadcast_gradient_args_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/broadcast_gradient_args_kernel_unittest.cc new file mode 100644 index 00000000..904e5cb7 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/broadcast_gradient_args_kernel_unittest.cc @@ -0,0 +1,298 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/broadcast_gradient_args_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "folding_kernel_unittest_utils.h" +#include "framework/common/ge_inner_error_codes.h" +#include "ge/ge_api.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/operator.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; +using namespace cce; + +class UtestBroadcastGradientArgsKernel : public testing::Test { + protected: + void SetUp() { init(); } + + void TearDown() { destory(); } + + private: + void init() { + pass_ = new ConstantFoldingPass(); + graph_ = std::make_shared("default"); + op_desc_ptr_ = std::make_shared("broadcast_gradient_args", BROADCASTGRADIENTARGS); + node_ = std::make_shared(op_desc_ptr_, graph_); + kernel_ = KernelFactory::Instance().Create(BROADCASTGRADIENTARGS); + } + void destory() { + delete pass_; + pass_ = NULL; + } + + protected: + ConstantFoldingPass *pass_; + + ge::ComputeGraphPtr graph_; + OpDescPtr op_desc_ptr_; + NodePtr node_; + shared_ptr kernel_; +}; + +TEST_F(UtestBroadcastGradientArgsKernel, SizeCheckFail) { + vector dims_vec_0 = {8, 2}; + vector data_vec_0 = {2, 1, 4, 1, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + vector input = {tensor_0}; + + std::vector outputs; + Status status = kernel_->Compute(op_desc_ptr_, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} +TEST_F(UtestBroadcastGradientArgsKernel, CheckOutputNormal) { + string op_type = BROADCASTGRADIENTARGS; + vector> input_shape_dims({ + {4}, + {3}, + }); + vector> input_data({ + {2, 1, 5, 3}, + {4, 5, 1}, + }); + + vector> output_shape_dims({ + {1}, + {2}, + }); + vector> output_data({{1}, {0, 3}}); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, input_shape_dims, input_data, DT_INT64, + output_shape_dims, output_data, DT_INT64); + EXPECT_EQ(result, true); +} +TEST_F(UtestBroadcastGradientArgsKernel, CheckOutputNormalInt32) { + string op_type = BROADCASTGRADIENTARGS; + vector> input_shape_dims({ + {4}, + {3}, + }); + vector> input_data({ + {2, 1, 5, 3}, + {4, 5, 1}, + }); + + vector> output_shape_dims({ + {1}, + {2}, + }); + vector> output_data({{1}, {0, 3}}); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, input_shape_dims, input_data, DT_INT32, + output_shape_dims, output_data, DT_INT32); + EXPECT_EQ(result, true); +} + +TEST_F(UtestBroadcastGradientArgsKernel, CheckOutputInputsSame) { + string op_type = BROADCASTGRADIENTARGS; + vector> input_shape_dims({ + {4}, + {4}, + }); + vector> input_data({ + {2, 1, 5, 3}, + {2, 1, 5, 3}, + }); + + vector> output_shape_dims({ + {1}, + {1}, + }); + vector> output_data({{1}, {1}}); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, input_shape_dims, input_data, DT_INT64, + output_shape_dims, output_data, DT_INT64); + EXPECT_EQ(result, true); +} + +TEST_F(UtestBroadcastGradientArgsKernel, CheckOutputInputsSameEmptyOut) { + string op_type = BROADCASTGRADIENTARGS; + vector> input_shape_dims({ + {4}, + {4}, + }); + vector> input_data({ + {2, 3, 5, 3}, + {2, 3, 5, 3}, + }); + + vector> output_shape_dims({ + {}, + {}, + }); + vector> output_data({{}, {}}); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, input_shape_dims, input_data, DT_INT64, + output_shape_dims, output_data, DT_INT64); + EXPECT_EQ(result, true); +} + +TEST_F(UtestBroadcastGradientArgsKernel, CheckOutputInputsOneScalar) { + string op_type = BROADCASTGRADIENTARGS; + vector> input_shape_dims({ + {4}, + {1}, + }); + vector> input_data({ + {2, 1, 5, 3}, + {1}, + }); + + vector> output_shape_dims({ + {1}, + {4}, + }); + vector> output_data({{1}, {0, 1, 2, 3}}); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, input_shape_dims, input_data, DT_INT64, + output_shape_dims, output_data, DT_INT64); + EXPECT_EQ(result, true); +} +TEST_F(UtestBroadcastGradientArgsKernel, CheckOutputInputsBothScalar) { + string op_type = BROADCASTGRADIENTARGS; + vector> input_shape_dims({ + {1}, + {1}, + }); + vector> input_data({ + {3}, + {1}, + }); + + vector> output_shape_dims({ + {}, + {1}, + }); + vector> output_data({{}, {0}}); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, input_shape_dims, input_data, DT_INT64, + output_shape_dims, output_data, DT_INT64); + EXPECT_EQ(result, true); +} + +TEST_F(UtestBroadcastGradientArgsKernel, GetShapeDataFromConstTensorFail) { + OpDescPtr op_desc_ptr = std::make_shared("BroadcastGradientArgs", BROADCASTGRADIENTARGS); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {}; + vector data_vec_0 = {}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + op_desc_ptr->AddInputDesc(tensor_desc_0); + + vector dims_vec_1 = {5}; + vector data_vec_1 = {0, 1, 4, 1, 2}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int64_t)); + op_desc_ptr->AddInputDesc(tensor_desc_1); + op_desc_ptr->AddOutputDesc(GeTensorDesc()); + op_desc_ptr->AddOutputDesc(GeTensorDesc()); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(BROADCASTGRADIENTARGS); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestBroadcastGradientArgsKernel, GenerateBcastInfoFail) { + OpDescPtr op_desc_ptr = std::make_shared("BroadcastGradientArgs", BROADCASTGRADIENTARGS); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {5}; + vector data_vec_0 = {-1, 0, 4, 1, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + op_desc_ptr->AddInputDesc(tensor_desc_0); + + vector dims_vec_1 = {5}; + vector data_vec_1 = {1, 1, 4, 1, 2}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int64_t)); + op_desc_ptr->AddInputDesc(tensor_desc_1); + op_desc_ptr->AddOutputDesc(GeTensorDesc()); + op_desc_ptr->AddOutputDesc(GeTensorDesc()); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(BROADCASTGRADIENTARGS); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::PARAM_INVALID, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/cast_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/cast_kernel_unittest.cc new file mode 100644 index 00000000..2a07236e --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/cast_kernel_unittest.cc @@ -0,0 +1,219 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/cast_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; +using ge::SHAPE; + +class UtestGraphPassesFoldingKernelCastKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelCastKernel, ComputeParamInvalid1) { + OpDescPtr op_desc_ptr = std::make_shared("Cast", "Cast"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * 2); + + vector input = {}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CAST); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelCastKernel, ComputeParamInvalid2) { + OpDescPtr op_desc_ptr = nullptr; + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * 2); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CAST); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelCastKernel, ComputeSuccessFloatToFloat16) { + OpDescPtr op_desc_ptr = std::make_shared("Cast", "Cast"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT16); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CAST); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelCastKernel, ComputeSuccessInt64ToInt32) { + OpDescPtr op_desc_ptr = std::make_shared("Cast", "Cast"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT64); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CAST); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelCastKernel, ComputeFloatToFloat16Fail) { + OpDescPtr op_desc_ptr = std::make_shared("Cast", "Cast"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT16); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CAST); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelCastKernel, ComputeFloatToFloadt16Fail2) { + OpDescPtr op_desc_ptr = std::make_shared("Cast", "Cast"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT16); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int8_t)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CAST); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelCastKernel, ComputeNotSupport) { + OpDescPtr op_desc_ptr = std::make_shared("Cast", "Cast"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT8); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CAST); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelCastKernel, ComputeShapeEmptySuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Cast", "Cast"); + GeTensorDesc dims_tensor_desc(GeShape(), FORMAT_NCHW, DT_FLOAT16); + GeTensorDesc dims_tensor_desc_in(GeShape(), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + vector dims_vec_0 = {}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CAST); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/concat_offset_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/concat_offset_kernel_unittest.cc new file mode 100644 index 00000000..3328af90 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/concat_offset_kernel_unittest.cc @@ -0,0 +1,115 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/concat_offset_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelConcatOffsetKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelConcatOffsetKernel, CheckAttrFail) { + OpDescPtr op_desc_ptr = std::make_shared("ConcatOffset", "ConcatOffset"); + + vector input = {}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CONCATOFFSET); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelConcatOffsetKernel, CheckInputSize) { + OpDescPtr op_desc_ptr = std::make_shared("ConcatOffset", "ConcatOffset"); + AttrUtils::SetInt(op_desc_ptr, "N", 2); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CONCATOFFSET); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelConcatOffsetKernel, ComputeSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("ConcatOffset", "ConcatOffset"); + (void)AttrUtils::SetInt(op_desc_ptr, "N", 3); + GeTensorDesc dims_tensor_desc(GeShape({0, 0, 0, 0}), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + + vector dims_vec_0 = {0}; + vector data_vec_0 = {0, 0, 0, 0}; + vector data_vec_1 = {1, 1, 1, 1}; + vector data_vec_2 = {1, 0, 0, 0}; + GeTensorDesc tensor_desc_0(GeShape({0}), FORMAT_ND, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_dim = + std::make_shared(tensor_desc_0, (uint8_t *)dims_vec_0.data(), dims_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input = {tensor_dim, tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CONCATOFFSET); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/concat_v2_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/concat_v2_kernel_unittest.cc new file mode 100644 index 00000000..f10bdea4 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/concat_v2_kernel_unittest.cc @@ -0,0 +1,290 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/concat_v2_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelConcatV2Kernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelConcatV2Kernel, CheckParam) { + OpDescPtr op_desc_ptr = nullptr; + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * 2); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CONCATV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelConcatV2Kernel, CheckInputSize) { + OpDescPtr op_desc_ptr = std::make_shared("Concat", "Concat"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CONCATV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelConcatV2Kernel, Check1) { + OpDescPtr op_desc_ptr = std::make_shared("Concat", "Concat"); + GeTensorDesc dims_tensor_desc(GeShape({0, 0, 0, 0}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + + vector dims_vec_0 = {0, 0, 0, 0}; + vector data_vec_0 = {0, 0, 0, 0}; + vector data_vec_1 = {1, 1, 1, 1}; + vector data_vec_2 = {0, 0, 0, 0}; + GeTensorDesc tensor_desc_0(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + GeTensorDesc tensor_desc_1(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + GeTensorDesc tensor_desc_2(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(CONCATV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelConcatV2Kernel, CheckInt32Success) { + OpDescPtr op_desc_ptr = std::make_shared("ConcatV2", "ConcatV2"); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2 = {1}; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(CONCATV2); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + + GeTensorPtr out = outputs[0]; + vector data_y = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + EXPECT_EQ(out->GetData().size(), 48); + size_t one_size = sizeof(int32_t); + size_t out_nums = out->GetData().size() / one_size; + for (size_t i = 0; i < out_nums; i++) { + int32_t *one_val = (int32_t *)(out->GetData().data() + i * one_size); + EXPECT_EQ(data_y[i], *one_val); + } +} + +TEST_F(UtestGraphPassesFoldingKernelConcatV2Kernel, CheckInt32Success1) { + OpDescPtr op_desc_ptr = std::make_shared("ConcatV2", CONCATV2); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {1}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(CONCATV2); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + + GeTensorPtr out = outputs[0]; + vector data_y = {1, 2, 3, 7, 8, 9, 4, 5, 6, 10, 11, 12}; + EXPECT_EQ(out->GetData().size(), 48); + size_t one_size = sizeof(int32_t); + size_t out_nums = out->GetData().size() / one_size; + for (size_t i = 0; i < out_nums; i++) { + int32_t *one_val = (int32_t *)(out->GetData().data() + i * one_size); + EXPECT_EQ(data_y[i], *one_val); + } +} + +TEST_F(UtestGraphPassesFoldingKernelConcatV2Kernel, CheckFloatSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("ConcatV2", CONCATV2); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_FLOAT); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1.12, 2.12, 3.12, 4.12, 5.12, 6.12}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {7.12, 8.12, 9.12, 10.12, 11.12, 12.13}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector dims_vec_2; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(CONCATV2); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + EXPECT_EQ(outputs[0]->GetData().size(), 48); // 12*4 + + GeTensorPtr out = outputs[0]; + vector data_y = {1.12, 2.12, 3.12, 4.12, 5.12, 6.12, 7.12, 8.12, 9.12, 10.12, 11.12, 12.13}; + EXPECT_EQ(out->GetData().size(), 48); + size_t one_size = sizeof(float); + size_t out_nums = out->GetData().size() / one_size; + for (size_t i = 0; i < out_nums; i++) { + float *one_val = (float *)(out->GetData().data() + i * one_size); + EXPECT_EQ(data_y[i], *one_val); + } +} + +TEST_F(UtestGraphPassesFoldingKernelConcatV2Kernel, CheckNotChange) { + OpDescPtr op_desc_ptr = std::make_shared("ConcatV2", CONCATV2); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_FLOAT); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1.12, 2.12, 3.12, 4.12, 5.12, 6.12}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + ConstGeTensorPtr tensor_6 = nullptr; + vector input_less = {tensor_0, tensor_2}; + vector input_diff = {tensor_0, tensor_1, tensor_2}; + vector input_null = {tensor_0, tensor_6, tensor_2}; + vector input_not_support = {tensor_0, tensor_0, tensor_2}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(CONCATV2); + Status status = kernel->Compute(op_desc_ptr, input_diff, outputs); + EXPECT_EQ(status, NOT_CHANGED); + EXPECT_EQ(outputs.size(), 0); + + status = kernel->Compute(op_desc_ptr, input_less, outputs); + EXPECT_EQ(status, NOT_CHANGED); + + status = kernel->Compute(op_desc_ptr, input_null, outputs); + EXPECT_EQ(status, NOT_CHANGED); + + status = kernel->Compute(op_desc_ptr, input_not_support, outputs); + EXPECT_EQ(status, NOT_CHANGED); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/dynamic_stitch_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/dynamic_stitch_kernel_unittest.cc new file mode 100644 index 00000000..7b51b893 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/dynamic_stitch_kernel_unittest.cc @@ -0,0 +1,321 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/dynamic_stitch_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelDynamicStitchKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelDynamicStitchKernel, IndiceFloatSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("dynamicstitch", "DynamicStitch"); + AttrUtils::SetInt(op_desc_ptr, "DynamicStitchN_", (int64_t)2); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {0, 1, 2, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2}; + vector data_vec_1 = {5, 4}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2 = {4}; + vector data_vec_2 = {4, 3, 5, 6}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(float)); + + vector dims_vec_3 = {2}; + vector data_vec_3 = {7, 8}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(float)); + + op_desc_ptr->AddInputDesc(tensor_desc_0); + op_desc_ptr->AddInputDesc(tensor_desc_1); + op_desc_ptr->AddInputDesc(tensor_desc_2); + op_desc_ptr->AddInputDesc(tensor_desc_3); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(DYNAMICSTITCH); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, ge::SUCCESS); + float *output_data = const_cast(reinterpret_cast(outputs[0]->GetData().data())); + EXPECT_FLOAT_EQ(output_data[0], 4); + EXPECT_FLOAT_EQ(output_data[1], 3); + EXPECT_FLOAT_EQ(output_data[2], 5); + EXPECT_FLOAT_EQ(output_data[3], 6); + EXPECT_FLOAT_EQ(output_data[4], 8); + EXPECT_FLOAT_EQ(output_data[5], 7); +} + +TEST_F(UtestGraphPassesFoldingKernelDynamicStitchKernel, ScalerIndiceDoubleSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("dynamicstitch", "DynamicStitch"); + AttrUtils::SetInt(op_desc_ptr, "DynamicStitchN_", (int64_t)2); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {0, 1, 2, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {}; + vector data_vec_1 = {4}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2 = {4}; + vector data_vec_2 = {4, 3, 5, 6}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(double)); + + vector dims_vec_3 = {}; + vector data_vec_3 = {1}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(double)); + + op_desc_ptr->AddInputDesc(tensor_desc_0); + op_desc_ptr->AddInputDesc(tensor_desc_1); + op_desc_ptr->AddInputDesc(tensor_desc_2); + op_desc_ptr->AddInputDesc(tensor_desc_3); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(DYNAMICSTITCH); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, ge::SUCCESS); + double *output_data = const_cast(reinterpret_cast(outputs[0]->GetData().data())); + EXPECT_DOUBLE_EQ(output_data[0], 4); + EXPECT_DOUBLE_EQ(output_data[1], 3); + EXPECT_DOUBLE_EQ(output_data[2], 5); + EXPECT_DOUBLE_EQ(output_data[3], 6); + EXPECT_DOUBLE_EQ(output_data[4], 1); +} + +TEST_F(UtestGraphPassesFoldingKernelDynamicStitchKernel, UnsupportedDataType) { + OpDescPtr op_desc_ptr = std::make_shared("dynamicstitch", "DynamicStitch"); + AttrUtils::SetInt(op_desc_ptr, "DynamicStitchN_", (int64_t)2); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {0, 1, 2, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {}; + vector data_vec_1 = {4}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2 = {4}; + vector data_vec_2 = {"4", "3", "5", "6"}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_STRING); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(string)); + + vector dims_vec_3 = {}; + vector data_vec_3 = {"1"}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_STRING); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(string)); + + op_desc_ptr->AddInputDesc(tensor_desc_0); + op_desc_ptr->AddInputDesc(tensor_desc_1); + op_desc_ptr->AddInputDesc(tensor_desc_2); + op_desc_ptr->AddInputDesc(tensor_desc_3); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(DYNAMICSTITCH); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, NOT_CHANGED); +} + +TEST_F(UtestGraphPassesFoldingKernelDynamicStitchKernel, ValidateParamFail) { + // op desc ptr is null + vector empty_input; + vector empty_output; + OpDescPtr op_desc_ptr = nullptr; + shared_ptr kernel = KernelFactory::Instance().Create(DYNAMICSTITCH); + Status status = kernel->Compute(nullptr, empty_input, empty_output); + EXPECT_EQ(status, ge::PARAM_INVALID); + // input is empty + op_desc_ptr = std::make_shared("dynamicstitch", "DynamicStitch"); + status = kernel->Compute(op_desc_ptr, empty_input, empty_output); + EXPECT_EQ(status, NOT_CHANGED); + + // attr N is not exist + vector dims_vec_0 = {4}; + vector data_vec_0 = {0, 1, 2, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_2 = {4}; + vector data_vec_2 = {4, 3, 5, 6}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + status = kernel->Compute(op_desc_ptr, input, empty_output); + EXPECT_EQ(status, NOT_CHANGED); + + AttrUtils::SetInt(op_desc_ptr, "DynamicStitchN_", (int64_t)4); + status = kernel->Compute(op_desc_ptr, input, empty_output); + EXPECT_EQ(status, NOT_CHANGED); +} + +TEST_F(UtestGraphPassesFoldingKernelDynamicStitchKernel, RepeatedIndiceInt32Success) { + OpDescPtr op_desc_ptr = std::make_shared("dynamicstitch", "DynamicStitch"); + AttrUtils::SetInt(op_desc_ptr, "DynamicStitchN_", (int64_t)2); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {0, 1, 2, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2}; + vector data_vec_1 = {1, 2}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2 = {4}; + vector data_vec_2 = {4, 3, 5, 6}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector dims_vec_3 = {2}; + vector data_vec_3 = {7, 8}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int32_t)); + + op_desc_ptr->AddInputDesc(tensor_desc_0); + op_desc_ptr->AddInputDesc(tensor_desc_1); + op_desc_ptr->AddInputDesc(tensor_desc_2); + op_desc_ptr->AddInputDesc(tensor_desc_3); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(DYNAMICSTITCH); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, ge::SUCCESS); + int32_t *output_data = const_cast(reinterpret_cast(outputs[0]->GetData().data())); + EXPECT_EQ(output_data[0], 4); + EXPECT_EQ(output_data[1], 7); + EXPECT_EQ(output_data[2], 8); + EXPECT_EQ(output_data[3], 6); +} + +TEST_F(UtestGraphPassesFoldingKernelDynamicStitchKernel, RepeatedIndiceInt64Success) { + OpDescPtr op_desc_ptr = std::make_shared("dynamicstitch", "DynamicStitch"); + AttrUtils::SetInt(op_desc_ptr, "DynamicStitchN_", (int64_t)2); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {0, 1, 2, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2}; + vector data_vec_1 = {1, 2}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2 = {4}; + vector data_vec_2 = {4, 3, 5, 6}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT64); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int64_t)); + + vector dims_vec_3 = {2}; + vector data_vec_3 = {7, 8}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_INT64); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int64_t)); + + op_desc_ptr->AddInputDesc(tensor_desc_0); + op_desc_ptr->AddInputDesc(tensor_desc_1); + op_desc_ptr->AddInputDesc(tensor_desc_2); + op_desc_ptr->AddInputDesc(tensor_desc_3); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(DYNAMICSTITCH); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, ge::SUCCESS); + int64_t *output_data = const_cast(reinterpret_cast(outputs[0]->GetData().data())); + EXPECT_EQ(output_data[0], 4); + EXPECT_EQ(output_data[1], 7); + EXPECT_EQ(output_data[2], 8); + EXPECT_EQ(output_data[3], 6); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/empty_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/empty_kernel_unittest.cc new file mode 100644 index 00000000..bb33582f --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/empty_kernel_unittest.cc @@ -0,0 +1,280 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/empty_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "folding_kernel_unittest_utils.h" +#include "framework/common/ge_inner_error_codes.h" +#include "ge/ge_api.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/operator.h" +#include "graph/passes/constant_folding_pass.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; +using namespace cce; +using namespace ge::test; + +class UtestEmptyKernel : public testing::Test { + protected: + void SetUp() { init(); } + + void TearDown() { destory(); } + + private: + void init() { + pass_ = new ConstantFoldingPass(); + graph_ = std::make_shared("default"); + op_desc_ptr_ = std::make_shared("Empty", EMPTY); + node_ = std::make_shared(op_desc_ptr_, graph_); + } + void destory() { + delete pass_; + pass_ = NULL; + } + + protected: + template + bool TestOtherDataType(DataType in_type, DataType out_type) { + string op_type = EMPTY; + vector> input_shape_dims({ + {5}, + }); + vector> input_data({ + {2, 2, 1, 3, 1}, + }); + + vector> output_shape_dims({ + {2, 2, 1, 3, 1}, + }); + vector> output_data({ + {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + }); + + return ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, input_shape_dims, input_data, in_type, + output_shape_dims, output_data, out_type); + } + ConstantFoldingPass *pass_; + + ge::ComputeGraphPtr graph_; + OpDescPtr op_desc_ptr_; + NodePtr node_; +}; + +TEST_F(UtestEmptyKernel, ShapeDimCheckFail) { + vector dims_vec_0 = {8, 2}; + vector data_vec_0 = {2, 1, 4, 1, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + vector input = {tensor_0}; + + std::vector outputs; + auto kernel_ptr = KernelFactory::Instance().Create(EMPTY); + if (kernel_ptr != nullptr) { + Status status = kernel_ptr->Compute(op_desc_ptr_, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); + } +} + +TEST_F(UtestEmptyKernel, ShapeDataTypeCheclFail) { + vector dims_vec_0 = {5}; + vector data_vec_0 = {2, 1, 4, 1, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + vector input = {tensor_0}; + + std::vector outputs; + auto kernel_ptr = KernelFactory::Instance().Create(EMPTY); + if (kernel_ptr != nullptr) { + Status status = kernel_ptr->Compute(op_desc_ptr_, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); + } +} + +TEST_F(UtestEmptyKernel, DtypeCheckFail) { + vector dims_vec_0 = {5}; + vector data_vec_0 = {2, 1, 4, 1, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_QINT8); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + vector input = {tensor_0}; + + std::vector outputs; + auto kernel_ptr = KernelFactory::Instance().Create(EMPTY); + if (kernel_ptr != nullptr) { + Status status = kernel_ptr->Compute(op_desc_ptr_, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); + } +} + +TEST_F(UtestEmptyKernel, SizeCheckFail) { + vector dims_vec_0 = {-1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + vector dims_vec_1 = {-1}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr_->AddInputDesc(tensor_desc_1); + + GeTensorDesc tensor_desc_out_1(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out_1); + + vector data_vec_0 = {2, 1, 4, 1, 2}; + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + + vector data_vec_1 = {2, 2, 1, 3, 1}; + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int64_t)); + + vector input = {tensor_0, tensor_1}; + + std::vector outputs; + + auto kernel_ptr = KernelFactory::Instance().Create(EMPTY); + if (kernel_ptr != nullptr) { + Status status = kernel_ptr->Compute(op_desc_ptr_, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); + } +} + +TEST_F(UtestEmptyKernel, CheckOutputNormalIn64Out64) { + string op_type = EMPTY; + vector> input_shape_dims({ + {5}, + }); + vector> input_data({ + {2, 2, 1, 3, 1}, + }); + + vector> output_shape_dims({ + {2, 2, 1, 3, 1}, + }); + vector> output_data({ + {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + }); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, input_shape_dims, input_data, DT_INT64, + output_shape_dims, output_data, DT_INT64); + EXPECT_EQ(result, true); +} +TEST_F(UtestEmptyKernel, CheckOutputNormalIn32Out64) { + string op_type = EMPTY; + vector> input_shape_dims({ + {5}, + }); + vector> input_data({ + {2, 2, 1, 3, 1}, + }); + + vector> output_shape_dims({ + {2, 2, 1, 3, 1}, + }); + vector> output_data({ + {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + }); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, input_shape_dims, input_data, DT_INT32, + output_shape_dims, output_data, DT_INT64); + EXPECT_EQ(result, true); +} + +TEST_F(UtestEmptyKernel, CheckOutputNormalIn64Out32) { + string op_type = EMPTY; + vector> input_shape_dims({ + {5}, + }); + vector> input_data({ + {2, 2, 1, 3, 1}, + }); + + vector> output_shape_dims({ + {2, 2, 1, 3, 1}, + }); + vector> output_data({ + {0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0}, + }); + + bool result = ge::test::ConstFoldingKernelCheckShapeAndOutput(op_type, input_shape_dims, input_data, DT_INT64, + output_shape_dims, output_data, DT_INT32); + EXPECT_EQ(result, true); +} + +TEST_F(UtestEmptyKernel, CheckOutputNormalOtherType) { + bool result = false; + +#define TESTBYTYPE(data_type, T) \ + result = TestOtherDataType(DT_INT32, data_type); \ + EXPECT_EQ(result, true); + + TESTBYTYPE(DT_FLOAT, float) + TESTBYTYPE(DT_INT8, int8_t) + TESTBYTYPE(DT_INT16, int16_t) + TESTBYTYPE(DT_UINT16, uint16_t) + TESTBYTYPE(DT_UINT8, uint8_t) + TESTBYTYPE(DT_INT32, int32_t) + TESTBYTYPE(DT_INT64, int64_t) + TESTBYTYPE(DT_UINT32, uint32_t) + TESTBYTYPE(DT_UINT64, uint64_t) + TESTBYTYPE(DT_BOOL, bool) + TESTBYTYPE(DT_DOUBLE, double) +#undef TESTBYTYPE +} \ No newline at end of file diff --git a/tests/ut/ge/graph/passes/folding_kernel/expanddims_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/expanddims_kernel_unittest.cc new file mode 100644 index 00000000..cc6170c7 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/expanddims_kernel_unittest.cc @@ -0,0 +1,459 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/expanddims_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/types.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelExpandDimsKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + template + void TestExpandDims(vector &data_vec, vector &dim_value_vec, vector &result) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, data_type); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(inner_data_type)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", CONSTANTOP); + GeTensorDesc dim_tensor_desc(ge::GeShape(), FORMAT_NCHW, dim_type); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(inner_dim_type)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Expanddims", EXPANDDIMS); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dim_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(1)); + + shared_ptr kernel = KernelFactory::Instance().Create(EXPANDDIMS); + Status status = kernel->Compute(op_node); + EXPECT_EQ(ge::SUCCESS, status); + } + + template + void TestInvalidExpandDims(vector &data_vec, vector &dim_value_vec, + vector &result) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), format, data_type); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(inner_data_type)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", CONSTANTOP); + GeTensorDesc dim_tensor_desc(ge::GeShape(), format, dim_type); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(inner_dim_type)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Expanddims", EXPANDDIMS); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dim_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(1)); + + shared_ptr kernel = KernelFactory::Instance().Create(EXPANDDIMS); + Status status = kernel->Compute(op_node); + EXPECT_NE(ge::SUCCESS, status); + + vector input = {data_tensor}; + vector outputs; + status = kernel->Compute(op_node->GetOpDesc(), input, outputs); + EXPECT_EQ(NOT_CHANGED, status); + } +}; + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Int8Int32Case0) { + vector data_vec = {2, 3}; + vector dim_value_vec = {0}; + vector result = {1, 2, 3}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Int8Int32Case1) { + vector data_vec = {2, 3}; + vector dim_value_vec = {1}; + vector result = {2, 1, 3}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Int8Int32Case2) { + vector data_vec = {2, 3}; + vector dim_value_vec = {2}; + vector result = {2, 3, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Int8Int32NegativeCase1) { + vector data_vec = {2, 3}; + vector dim_value_vec = {-3}; + vector result = {1, 2, 3}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Int8Int32NegativeCase2) { + vector data_vec = {2, 3}; + vector dim_value_vec = {-2}; + vector result = {2, 1, 3}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Int8Int32NegativeCase3) { + vector data_vec = {2, 3}; + vector dim_value_vec = {-1}; + vector result = {2, 3, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Int16Int32) { + vector data_vec = {3}; + vector dim_value_vec = {-1}; + vector result = {3, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Int32Int32) { + vector data_vec = {3, 3, 3, 5, 6}; + vector dim_value_vec = {3}; + vector result = {3, 3, 3, 1, 5, 6}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Int64Int32) { + vector data_vec = {6, 1, 12, 3, 4, 56, 7}; + vector dim_value_vec = {7}; + vector result = {6, 1, 12, 3, 4, 56, 7, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Uint8Int32) { + vector data_vec = {2, 3}; + vector dim_value_vec = {-1}; + vector result = {2, 3, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Uint16Int32) { + vector data_vec = {3}; + vector dim_value_vec = {-1}; + vector result = {3, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Uint32Int32) { + vector data_vec = {3, 3, 3, 5, 6}; + vector dim_value_vec = {3}; + vector result = {3, 3, 3, 1, 5, 6}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Uint64Int32) { + vector data_vec = {6, 1, 12, 3, 4, 56, 7}; + vector dim_value_vec = {7}; + vector result = {6, 1, 12, 3, 4, 56, 7, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, Fp16Int32) { + vector data_vec = {6, 1, 12, 3, 4, 56, 7}; + vector dim_value_vec = {7}; + vector result = {6, 1, 12, 3, 4, 56, 7, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, FloatInt32) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {7}; + vector result = {1, 1, 1, 11, 1, 1, 1, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, DoubleInt32) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {7}; + vector result = {1, 1, 1, 11, 1, 1, 1, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, DoubleInt64) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {7}; + vector result = {1, 1, 1, 11, 1, 1, 1, 1}; + TestExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, BoolInt64) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {7}; + vector result = {1, 1, 1, 11, 1, 1, 1, 1}; + + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, DT_BOOL); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(uint8_t)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", CONSTANTOP); + GeTensorDesc dim_tensor_desc(ge::GeShape(), FORMAT_NCHW, DT_INT64); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(int64_t)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Expanddims", EXPANDDIMS); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dim_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(1)); + + shared_ptr kernel = KernelFactory::Instance().Create(EXPANDDIMS); + Status status = kernel->Compute(op_node); + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, InvalidFormat) { + vector data_vec = {2, 3, 4}; + vector dim_value_vec = {0}; + vector result = {1, 2, 3, 4}; + + TestInvalidExpandDims(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, NodeIsNull) { + NodePtr op_node = nullptr; + shared_ptr kernel = KernelFactory::Instance().Create(EXPANDDIMS); + Status status = kernel->Compute(op_node); + EXPECT_NE(domi::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, InvalidInputNodeSize) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {7}; + vector result = {1, 1, 1, 11, 1, 1, 1, 1}; + + ge::ComputeGraphPtr graph = std::make_shared("default"); + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, DT_BOOL); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(uint8_t)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", CONSTANTOP); + GeTensorDesc dim_tensor_desc(ge::GeShape(), FORMAT_NCHW, DT_INT64); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(int64_t)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Expanddims", EXPANDDIMS); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + + shared_ptr kernel = KernelFactory::Instance().Create(EXPANDDIMS); + Status status = kernel->Compute(op_node); + EXPECT_NE(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, DimNodeNotContainWeight) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {7}; + vector result = {1, 1, 1, 11, 1, 1, 1, 1}; + + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, DT_INT32); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", "dim"); + GeTensorDesc dim_tensor_desc(ge::GeShape(), FORMAT_NCHW, DT_INT32); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Expanddims", EXPANDDIMS); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dim_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(1)); + + shared_ptr kernel = KernelFactory::Instance().Create(EXPANDDIMS); + Status status = kernel->Compute(op_node); + EXPECT_NE(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, FoldingInt64Success) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {7}; + vector result = {1, 1, 1, 11, 1, 1, 1, 1}; + + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, DT_BOOL); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(uint8_t)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", CONSTANTOP); + GeTensorDesc dim_tensor_desc(ge::GeShape(), FORMAT_NCHW, DT_INT64); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(int64_t)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Expanddims", EXPANDDIMS); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + expanddim_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dim_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(1)); + + shared_ptr kernel = KernelFactory::Instance().Create(EXPANDDIMS); + + vector input = {data_tensor, dim_tensor}; + vector outputs; + Status status = kernel->Compute(op_node->GetOpDesc(), input, outputs); + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelExpandDimsKernel, OpdescInvalidCauseFailure) { + ge::OpDescPtr null_op_desc = nullptr; + vector input = {}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(EXPANDDIMS); + Status status = kernel->Compute(null_op_desc, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/fill_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/fill_kernel_unittest.cc new file mode 100644 index 00000000..8aee0e5a --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/fill_kernel_unittest.cc @@ -0,0 +1,343 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/fill_kernel.h" + +#include "common/debug/log.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/types.h" +#include "graph/utils/op_desc_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; +using ge::SUCCESS; + +class UtestGraphPassesFoldingKernelFillKernel : public testing::Test { + protected: + void SetUp() { + graph = std::make_shared("default"); + op_desc_ptr = std::make_shared("Fill", FILL); + node = std::make_shared(op_desc_ptr, graph); + kernel = KernelFactory::Instance().Create(FILL); + } + + void TearDown() {} + + template + void TestShape2And3(DataType type, DataType dim_type = DT_INT32) { + ge::OpDescPtr op_dims = std::make_shared(); + vector dims_vec = {2}; + vector dims_value_vec = {2, 3}; + GeTensorDesc dims_tensor_desc(GeShape(dims_vec), FORMAT_NCHW, dim_type); + GeTensorPtr dim_tensor = std::make_shared(dims_tensor_desc, (uint8_t *)dims_value_vec.data(), + dims_value_vec.size() * sizeof(DimType)); + OpDescUtils::SetWeights(op_dims, dim_tensor); + + ge::OpDescPtr op_value = std::make_shared(); + vector data_vec = {1}; + GeTensorDesc value_tensor_desc(GeShape(), FORMAT_NCHW, type); + GeTensorPtr value_tensor = + std::make_shared(value_tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(T)); + OpDescUtils::SetWeights(op_value, value_tensor); + + op_desc_ptr->AddInputDesc(dims_tensor_desc); + op_desc_ptr->AddInputDesc(value_tensor_desc); + + std::vector input = {dim_tensor, value_tensor}; + std::vector outputs; + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetDataType(), type); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDimNum(), 2); + for (int i = 0; i < 2 * 3; i++) { + uint8_t *ptr = (uint8_t *)outputs[0]->GetData().data(); + EXPECT_TRUE(memcmp(ptr + i * sizeof(T), (void *)&data_vec[0], sizeof(T)) == 0); + } + } + + ge::ComputeGraphPtr graph; + OpDescPtr op_desc_ptr; + NodePtr node; + shared_ptr kernel; +}; + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillFloatShape2And3) { TestShape2And3(DT_FLOAT); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, fill_int8_shape_2_3) { TestShape2And3(DT_INT8); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillInt16Shape2And3) { TestShape2And3(DT_INT16); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillInt32Shape2And3) { TestShape2And3(DT_INT32); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillInt64Shape2And3) { TestShape2And3(DT_INT64); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillUint8Shape2And3) { TestShape2And3(DT_UINT8); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillUint16Shape2And3) { TestShape2And3(DT_UINT16); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillUint32Shape2And3) { TestShape2And3(DT_UINT32); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillUint64Shape2And3) { TestShape2And3(DT_UINT64); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillDoubleShape2And3) { TestShape2And3(DT_DOUBLE); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillFloat16Shape2And3) { TestShape2And3(DT_FLOAT16); } + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillBoolShape2And3) { + ge::OpDescPtr op_dims = std::make_shared(); + vector dims_vec = {2}; + vector dims_value_vec = {2, 3}; + GeTensorDesc dims_tensor_desc(GeShape(dims_vec), FORMAT_NCHW, DT_INT32); + GeTensorPtr dim_tensor = std::make_shared(dims_tensor_desc, (uint8_t *)dims_value_vec.data(), + dims_value_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(op_dims, dim_tensor); + + ge::OpDescPtr op_value = std::make_shared(); + vector data_vec = {1}; + GeTensorDesc value_tensor_desc(GeShape(), FORMAT_NCHW, DT_BOOL); + GeTensorPtr value_tensor = + std::make_shared(value_tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(bool)); + OpDescUtils::SetWeights(op_value, value_tensor); + + op_desc_ptr->AddInputDesc(dims_tensor_desc); + op_desc_ptr->AddInputDesc(value_tensor_desc); + + std::vector input = {dim_tensor, value_tensor}; + std::vector outputs; + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetDataType(), DT_BOOL); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDimNum(), 2); + for (int i = 0; i < 2 * 3; i++) { + uint8_t *ptr = (uint8_t *)outputs[0]->GetData().data(); + EXPECT_TRUE(memcmp(ptr + i * sizeof(bool), (void *)&data_vec[0], sizeof(bool)) == 0); + } +} + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillDimTypeInt64FloatShape2And3) { + TestShape2And3(DT_FLOAT, DT_INT64); +} + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillInputNumNotEqual2) { + ge::OpDescPtr op_dims = std::make_shared(); + vector dims_vec = {2}; + vector dims_value_vec = {2, 3}; + GeTensorDesc dims_tensor_desc(GeShape(dims_vec), FORMAT_NCHW, DT_INT32); + GeTensorPtr dim_tensor = std::make_shared(dims_tensor_desc, (uint8_t *)dims_value_vec.data(), + dims_value_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(op_dims, dim_tensor); + + ge::OpDescPtr op_value = std::make_shared(); + vector data_vec = {1}; + GeTensorDesc value_tensor_desc(GeShape(), FORMAT_NCHW, DT_BOOL); + GeTensorPtr value_tensor = + std::make_shared(value_tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(bool)); + OpDescUtils::SetWeights(op_value, value_tensor); + + std::vector input = {dim_tensor, value_tensor, dim_tensor}; + std::vector outputs; + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillInputValueNotScalar) { + ge::OpDescPtr op_dims = std::make_shared(); + vector dims_vec = {2}; + vector dims_value_vec = {2, 3}; + GeTensorDesc dims_tensor_desc(GeShape(dims_vec), FORMAT_NCHW, DT_INT32); + GeTensorPtr dim_tensor = std::make_shared(dims_tensor_desc, (uint8_t *)dims_value_vec.data(), + dims_value_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(op_dims, dim_tensor); + + ge::OpDescPtr op_value = std::make_shared(); + vector data_dims_vec = {2}; + vector data_vec = {1}; + GeTensorDesc value_tensor_desc(GeShape(data_dims_vec), FORMAT_NCHW, DT_BOOL); + GeTensorPtr value_tensor = + std::make_shared(value_tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(bool)); + OpDescUtils::SetWeights(op_value, value_tensor); + + std::vector input = {dim_tensor, value_tensor}; + std::vector outputs; + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillInputDimNotInt32Int64) { + ge::OpDescPtr op_dims = std::make_shared(); + vector dims_vec = {2}; + vector dims_value_vec = {2, 3}; + GeTensorDesc dims_tensor_desc(GeShape(dims_vec), FORMAT_NCHW, DT_INT16); + GeTensorPtr dim_tensor = std::make_shared(dims_tensor_desc, (uint8_t *)dims_value_vec.data(), + dims_value_vec.size() * sizeof(int16_t)); + OpDescUtils::SetWeights(op_dims, dim_tensor); + + ge::OpDescPtr op_value = std::make_shared(); + vector data_vec = {1}; + GeTensorDesc value_tensor_desc(GeShape(), FORMAT_NCHW, DT_BOOL); + GeTensorPtr value_tensor = + std::make_shared(value_tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(bool)); + OpDescUtils::SetWeights(op_value, value_tensor); + + std::vector input = {dim_tensor, value_tensor}; + std::vector outputs; + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillDimsHaveNegativeNumber) { + ge::OpDescPtr op_dims = std::make_shared(); + vector dims_vec = {2}; + vector dims_value_vec = {-2, 3}; + GeTensorDesc dims_tensor_desc(GeShape(dims_vec), FORMAT_NCHW, DT_INT32); + GeTensorPtr dim_tensor = std::make_shared(dims_tensor_desc, (uint8_t *)dims_value_vec.data(), + dims_value_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(op_dims, dim_tensor); + + ge::OpDescPtr op_value = std::make_shared(); + vector data_vec = {1}; + GeTensorDesc value_tensor_desc(GeShape(), FORMAT_NCHW, DT_BOOL); + GeTensorPtr value_tensor = + std::make_shared(value_tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(bool)); + OpDescUtils::SetWeights(op_value, value_tensor); + + op_desc_ptr->AddInputDesc(dims_tensor_desc); + op_desc_ptr->AddInputDesc(value_tensor_desc); + + std::vector input = {dim_tensor, value_tensor}; + std::vector outputs; + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillDataTypeNotSupport) { + ge::OpDescPtr op_dims = std::make_shared(); + vector dims_vec = {2}; + vector dims_value_vec = {2, 3}; + GeTensorDesc dims_tensor_desc(GeShape(dims_vec), FORMAT_NCHW, DT_INT32); + GeTensorPtr dim_tensor = std::make_shared(dims_tensor_desc, (uint8_t *)dims_value_vec.data(), + dims_value_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(op_dims, dim_tensor); + + ge::OpDescPtr op_value = std::make_shared(); + vector data_vec = {1}; + GeTensorDesc value_tensor_desc(GeShape(), FORMAT_NCHW, DT_DUAL); + GeTensorPtr value_tensor = + std::make_shared(value_tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(bool)); + OpDescUtils::SetWeights(op_value, value_tensor); + + op_desc_ptr->AddInputDesc(dims_tensor_desc); + op_desc_ptr->AddInputDesc(value_tensor_desc); + + std::vector input = {dim_tensor, value_tensor}; + std::vector outputs; + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillDimsTypeNotSupport) { + ge::OpDescPtr op_dims = std::make_shared(); + vector dims_vec = {2}; + vector dims_value_vec = {2, 3}; + GeTensorDesc dims_tensor_desc(GeShape(dims_vec), FORMAT_NCHW, DT_INT8); + GeTensorPtr dim_tensor = std::make_shared(dims_tensor_desc, (uint8_t *)dims_value_vec.data(), + dims_value_vec.size() * sizeof(int8_t)); + OpDescUtils::SetWeights(op_dims, dim_tensor); + + ge::OpDescPtr op_value = std::make_shared(); + vector data_vec = {1}; + GeTensorDesc value_tensor_desc(GeShape(), FORMAT_NCHW, DT_DUAL); + GeTensorPtr value_tensor = + std::make_shared(value_tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(bool)); + OpDescUtils::SetWeights(op_value, value_tensor); + + op_desc_ptr->AddInputDesc(dims_tensor_desc); + op_desc_ptr->AddInputDesc(value_tensor_desc); + + std::vector input = {dim_tensor, value_tensor}; + std::vector outputs; + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillDimsOverflow) { + ge::OpDescPtr op_dims = std::make_shared(); + vector dims_vec = {2}; + vector dims_value_vec = {9223372036854775807, 2}; + GeTensorDesc dims_tensor_desc(GeShape(dims_vec), FORMAT_NCHW, DT_INT64); + GeTensorPtr dim_tensor = std::make_shared(dims_tensor_desc, (uint8_t *)dims_value_vec.data(), + dims_value_vec.size() * sizeof(int64_t)); + OpDescUtils::SetWeights(op_dims, dim_tensor); + + ge::OpDescPtr op_value = std::make_shared(); + vector data_vec = {1}; + GeTensorDesc value_tensor_desc(GeShape(), FORMAT_NCHW, DT_BOOL); + GeTensorPtr value_tensor = + std::make_shared(value_tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(bool)); + OpDescUtils::SetWeights(op_value, value_tensor); + + op_desc_ptr->AddInputDesc(dims_tensor_desc); + op_desc_ptr->AddInputDesc(value_tensor_desc); + + std::vector input = {dim_tensor, value_tensor}; + std::vector outputs; + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelFillKernel, FillDimsMulDataTypeOverflow) { + ge::OpDescPtr op_dims = std::make_shared(); + vector dims_vec = {2}; + vector dims_value_vec = {9223372036854775807, 1}; + GeTensorDesc dims_tensor_desc(GeShape(dims_vec), FORMAT_NCHW, DT_INT64); + GeTensorPtr dim_tensor = std::make_shared(dims_tensor_desc, (uint8_t *)dims_value_vec.data(), + dims_value_vec.size() * sizeof(int64_t)); + OpDescUtils::SetWeights(op_dims, dim_tensor); + + ge::OpDescPtr op_value = std::make_shared(); + vector data_vec = {1}; + GeTensorDesc value_tensor_desc(GeShape(), FORMAT_NCHW, DT_INT32); + GeTensorPtr value_tensor = + std::make_shared(value_tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(int32_t)); + OpDescUtils::SetWeights(op_value, value_tensor); + + op_desc_ptr->AddInputDesc(dims_tensor_desc); + op_desc_ptr->AddInputDesc(value_tensor_desc); + + std::vector input = {dim_tensor, value_tensor}; + std::vector outputs; + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(PARAM_INVALID, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/floordiv_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/floordiv_kernel_unittest.cc new file mode 100644 index 00000000..7a04e912 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/floordiv_kernel_unittest.cc @@ -0,0 +1,444 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/floordiv_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassedFoldingKernelFloorDivKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, Int32VectorVectorSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 6, 32, 9, 10, 7}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {2, 9, 9, 9, 9, 9}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + int32_t *out_data = (int32_t *)outputs[0]->GetData().data(); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(out_data[0], 0); + EXPECT_EQ(out_data[1], 0); + EXPECT_EQ(out_data[2], 3); + EXPECT_EQ(out_data[3], 1); + EXPECT_EQ(out_data[4], 1); + EXPECT_EQ(out_data[5], 0); + EXPECT_EQ(outputs[0]->GetData().size(), 24); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDims().size(), 2); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(0), 2); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(1), 3); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, Int32ScaleVectorSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 6, 32, 9, 10, 7}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {}; + vector data_vec_1 = {5}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_1, tensor_0}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + int32_t *out_data = (int32_t *)outputs[0]->GetData().data(); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(out_data[0], 5); + EXPECT_EQ(out_data[1], 0); + EXPECT_EQ(out_data[2], 0); + EXPECT_EQ(out_data[3], 0); + EXPECT_EQ(out_data[4], 0); + EXPECT_EQ(out_data[5], 0); + EXPECT_EQ(outputs[0]->GetData().size(), 24); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDims().size(), 2); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(0), 2); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(1), 3); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, Int32VectorScaleSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 6, 32, 9, 10, 7}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {}; + vector data_vec_1 = {-9}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + int32_t *out_data = (int32_t *)outputs[0]->GetData().data(); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(out_data[0], -1); + EXPECT_EQ(out_data[1], -1); + EXPECT_EQ(out_data[2], -4); + EXPECT_EQ(out_data[3], -1); + EXPECT_EQ(out_data[4], -2); + EXPECT_EQ(out_data[5], -1); + EXPECT_EQ(outputs[0]->GetData().size(), 24); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDims().size(), 2); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(0), 2); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(1), 3); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, Int32ScaleScaleSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {}; + vector data_vec_0 = {-9}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {}; + vector data_vec_1 = {-5}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + int32_t *out_data = (int32_t *)outputs[0]->GetData().data(); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(out_data[0], 1); + EXPECT_EQ(outputs[0]->GetData().size(), 4); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDims().size(), 0); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, FloatVectorVectorSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 6, 32, 9, -10, -7}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {2, -9.9, -9.9, -9.9, -9.9, -9.9}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + float *out_data = const_cast(reinterpret_cast(outputs[0]->GetData().GetData())); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(outputs[0]->GetData().size(), 24); + EXPECT_EQ(out_data[0], 0); + EXPECT_EQ(out_data[1], -1); + EXPECT_EQ(out_data[2], -4); + EXPECT_EQ(out_data[3], -1); + EXPECT_EQ(out_data[4], 1); + EXPECT_EQ(out_data[5], 0); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDims().size(), 2); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(0), 2); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(1), 3); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, InvalidInputSizeFailed) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 6, 32, 9, 10, 7}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {6}; + vector data_vec_1 = {2, 9, 9, 9, 9, 9}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, InvalidDimSizeFailed) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 6, 32, 9, 10, 7}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {6}; + vector data_vec_1 = {2, 9, 9, 9, 9, 9}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, InvalidDimFailed) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 6, 32, 9, 10, 7}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2, 1}; + vector data_vec_1 = {2, 9}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, EmptyDataFailed) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2}; + vector data_vec_0 = {}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2}; + vector data_vec_1 = {2, 9}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, UnmatchedDataTypeFailed) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2}; + vector data_vec_0 = {3, 36}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1 = {2}; + vector data_vec_1 = {2, 9}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, InvalidDataTypeFailed) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2}; + vector data_vec_0 = {3, 36}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(double)); + + vector dims_vec_1 = {2}; + vector data_vec_1 = {2, 9}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, ZeroVectorVectorFailed) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 6, 32, 9, 5, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {2, 9, 9, 9, 0, 9}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, ZeroVectorScaleFailed) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 6, 32, 9, 5, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {}; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, ZeroScaleVectorFailed) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 6, 32, 9, 0, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {}; + vector data_vec_1 = {6}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_1, tensor_0}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassedFoldingKernelFloorDivKernel, ZeroScaleScaleFailed) { + OpDescPtr op_desc_ptr = std::make_shared("floor_div", FLOORDIV); + + vector dims_vec_0 = {}; + vector data_vec_0 = {0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {}; + vector data_vec_1 = {6}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_1, tensor_0}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(FLOORDIV); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/floormod_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/floormod_kernel_unittest.cc new file mode 100644 index 00000000..e0499ffb --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/floormod_kernel_unittest.cc @@ -0,0 +1,199 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "framework/common/ge_inner_error_codes.h" + +#define protected public +#define private public +#include "graph/passes/folding_kernel/floormod_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/types.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelFloormodKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelFloormodKernel, FloormodOptimizerInitSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("floormod", FLOORMOD); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {4, 4, 4, -4, -4, -4}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {1, -3, 3, -3, 3, -2}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(FLOORMOD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + + GeTensorPtr out = outputs[0]; + vector data_y = {0, -2, 1, -1, 2, 0}; + EXPECT_EQ(out->GetData().size(), 24); + size_t one_size = sizeof(int32_t); + size_t out_nums = out->GetData().size() / one_size; + for (size_t i = 0; i < out_nums; i++) { + int32_t *one_val = (int32_t *)(out->GetData().data() + i * one_size); + EXPECT_EQ(data_y[i], *one_val); + } +} + +TEST_F(UtestGraphPassesFoldingKernelFloormodKernel, FloormodOptimizerErrtypeFail) { + OpDescPtr op_desc_ptr = std::make_shared("floormod", FLOORMOD); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UNDEFINED); + + vector dims_vec_0; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {1, 2, 3, 4}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(FLOORMOD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, NOT_CHANGED); +} + +TEST_F(UtestGraphPassesFoldingKernelFloormodKernel, FloormodOptimizerDifferentType) { + OpDescPtr op_desc_ptr = std::make_shared("floormod", FLOORMOD); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(FLOORMOD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, NOT_CHANGED); +} + +TEST_F(UtestGraphPassesFoldingKernelFloormodKernel, FloormodNull) { + vector dims_vec_0; + vector data_vec_0; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + OpDescPtr op_desc_ptr = nullptr; + shared_ptr kernel = ge::KernelFactory::Instance().Create(FLOORMOD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, PARAM_INVALID); + + op_desc_ptr = std::make_shared("floormod", FLOORMOD); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, NOT_CHANGED); + + vector dims_vec_2; + vector data_vec_2; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + input.push_back(tensor_2); + + status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, NOT_CHANGED); +} + +TEST_F(UtestGraphPassesFoldingKernelFloormodKernel, FloormodZero) { + OpDescPtr op_desc_ptr = std::make_shared("floormod", FLOORMOD); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {4, 4, 4, -4, -4, -4}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {0, -3, 3, -3, 3, -2}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(FLOORMOD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, NOT_CHANGED); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/folding_kernel_unittest_utils.h b/tests/ut/ge/graph/passes/folding_kernel/folding_kernel_unittest_utils.h new file mode 100644 index 00000000..1be12b30 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/folding_kernel_unittest_utils.h @@ -0,0 +1,115 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _GRAPH_PASSES_FOLDING_KERNEL_UNITTEST_UTILS_H_ +#define _GRAPH_PASSES_FOLDING_KERNEL_UNITTEST_UTILS_H_ + +#include +#include +#include +#include + +#include "common/debug/log.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/operator.h" +#include "graph/passes/constant_folding_pass.h" +#include "graph/passes/folding_kernel/broadcast_args_kernel.h" +#include "inc/kernel_factory.h" +#include "shape_refiner.h" + +namespace ge { +namespace test { + +/// construct input with i_shape_dims and i_data +/// The kernel function depends on the output shape and type, +/// but the unit test cannot load the libopsproto.so, +/// that registers the operator's infer-shape function, +/// so you need to set the output shape and type yourself. +template +static bool ConstructOpDesc(const vector> &i_shape_dims, const vector> &o_shape_dims, + const vector> &i_data, DataType in_dt, DataType out_dt, + vector &inputs, shared_ptr &op_desc_ptr) { + for (size_t i = 0; i < i_shape_dims.size(); i++) { + auto dims_vec = i_shape_dims.at(i); + GeTensorDesc tensor_desc(GeShape(dims_vec), FORMAT_NCHW, in_dt); + auto data_vec = i_data.at(i); + ConstGeTensorPtr tensor = + make_shared(tensor_desc, (uint8_t *)data_vec.data(), data_vec.size() * sizeof(T)); + inputs.push_back(tensor); + + op_desc_ptr->AddInputDesc(tensor_desc); + } + + for (size_t i = 0; i < o_shape_dims.size(); i++) { + op_desc_ptr->AddOutputDesc(GeTensorDesc(GeShape(o_shape_dims[i]), FORMAT_NCHW, out_dt)); + } + return true; +} + +template +static bool ConstFoldingKernelCheckShapeAndOutput(string &op_type, const vector> &i_shape_dims, + const vector> &i_data, DataType in_dt, + const vector> &o_shape_dims, + const vector> &o_data, DataType out_dt) { +#ifndef CHECK_NOT_EQ_RETURN_FALSE +#define CHECK_NOT_EQ_RETURN_FALSE(lhd, rhd) \ + if (lhd != rhd) { \ + return false; \ + } + vector inputs; + auto op_desc_ptr = make_shared(op_type, op_type); + // construct input with i_shape_dims and i_data + int64_t err_flag = ConstructOpDesc(i_shape_dims, o_shape_dims, i_data, in_dt, out_dt, inputs, op_desc_ptr); + CHECK_NOT_EQ_RETURN_FALSE(err_flag, true); + + // call kernel's compute func + auto kernel = KernelFactory::Instance().Create(op_type); + if (kernel == nullptr) { + return false; + } + + std::vector v_output; + Status status = kernel->Compute(op_desc_ptr, inputs, v_output); + CHECK_NOT_EQ_RETURN_FALSE(ge::SUCCESS, status); + + // check output data + CHECK_NOT_EQ_RETURN_FALSE(o_data.size(), v_output.size()); + for (size_t i = 0; i < v_output.size(); i++) { + CHECK_NOT_EQ_RETURN_FALSE(o_data[i].size() * sizeof(OT), v_output[i]->GetData().GetSize()); + GeTensorPtr output = v_output.at(i); + OT *out_data = const_cast(reinterpret_cast(output->GetData().data())); + for (size_t j = 0; j < o_data[i].size(); j++) { + CHECK_NOT_EQ_RETURN_FALSE(o_data[i][j], out_data[j]); + } + } + + // check data type + CHECK_NOT_EQ_RETURN_FALSE(out_dt, v_output[0]->GetTensorDesc().GetDataType()); + + // check shape + CHECK_NOT_EQ_RETURN_FALSE(o_shape_dims.size(), v_output.size()); + for (size_t i = 0; i < v_output.size(); i++) { + auto out_dims = v_output[i]->GetTensorDesc().GetShape().GetDims(); + CHECK_NOT_EQ_RETURN_FALSE(out_dims, o_shape_dims[i]); + } +#endif + return true; +} + +} // namespace test +} // namespace ge +#endif //_GRAPH_PASSES_FOLDING_KERNEL_UNITTEST_UTILS_H_ diff --git a/tests/ut/ge/graph/passes/folding_kernel/gather_v2_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/gather_v2_kernel_unittest.cc new file mode 100644 index 00000000..abc680e5 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/gather_v2_kernel_unittest.cc @@ -0,0 +1,987 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/gather_v2_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelGatherV2Kernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, INT32Axis0VersionA) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + int32_t *data_buf = (int32_t *)tensor_out->GetData().data(); + vector expect_out = {2, 2}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, INT32Axis0VersionB) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {2, 2}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + int32_t *data_buf = (int32_t *)tensor_out->GetData().data(); + vector expect_out = {3, 3}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, INT64Axis0) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT64); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT64); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT64); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {2, 2}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT64); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT64); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT64); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int64_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int64_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + int64_t *data_buf = (int64_t *)tensor_out->GetData().data(); + vector expect_out = {3, 3}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} + +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, INT32Axis0) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {2, 3, 3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19}; // 2*3*3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2, 3, 3}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + int32_t *data_buf = (int32_t *)tensor_out->GetData().data(); + vector expect_out = {11, 12, 13, 14, 15, 16, 17, 18, 19, 11, 12, 13, 14, 15, 16, 17, 18, 19}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, INT32Axis0And1) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {2, 3, 3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19}; // 2*3*3 + vector data_vec_1 = {1, 0}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2, 3, 3}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + int32_t *data_buf = (int32_t *)tensor_out->GetData().data(); + vector expect_out = {11, 12, 13, 14, 15, 16, 17, 18, 19, 1, 2, 3, 4, 5, 6, 7, 8, 9}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} + +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, INT32Axis1) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {2, 3, 3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19}; // 2*3*3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {1}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2, 2, 3}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + int32_t *data_buf = (int32_t *)tensor_out->GetData().data(); + vector expect_out = {4, 5, 6, 4, 5, 6, 14, 15, 16, 14, 15, 16}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, INT32Axis2) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {2, 3, 3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19}; // 2*3*3 + vector data_vec_1 = {0, 0}; + vector axis_vec = {2}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2, 3, 2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + int32_t *data_buf = (int32_t *)tensor_out->GetData().data(); + vector expect_out = {1, 1, 4, 4, 7, 7, 11, 11, 14, 14, 17, 17}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, INT32Axis3) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {2, 2, 3, 3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19}; // 2*2*3*3 + vector data_vec_1 = {0, 1}; + vector axis_vec = {3}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2, 2, 3, 2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + int32_t *data_buf = (int32_t *)tensor_out->GetData().data(); + vector expect_out = {1, 2, 4, 5, 7, 8, 11, 12, 14, 15, 17, 18, 1, 2, 4, 5, 7, 8, 11, 12, 14, 15, 17, 18}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} + +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, INT8Axis0) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT8); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT8); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int8_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + int8_t *data_buf = (int8_t *)tensor_out->GetData().data(); + vector expect_out = {2, 2}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, INT16Axis0) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT16); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT16); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int16_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + int16_t *data_buf = (int16_t *)tensor_out->GetData().data(); + vector expect_out = {2, 2}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, UINT8Axis0) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_UINT8); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_UINT8); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint8_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + uint8_t *data_buf = (uint8_t *)tensor_out->GetData().data(); + vector expect_out = {2, 2}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, UINT16Axis0) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_UINT16); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_UINT16); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint16_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + uint16_t *data_buf = (uint16_t *)tensor_out->GetData().data(); + vector expect_out = {2, 2}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, UINT32Axis0) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_UINT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_UINT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + uint32_t *data_buf = (uint32_t *)tensor_out->GetData().data(); + vector expect_out = {2, 2}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, UINT64Axis0) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_UINT64); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_UINT64); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint64_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + uint64_t *data_buf = (uint64_t *)tensor_out->GetData().data(); + vector expect_out = {2, 2}; + for (int i = 0; i < expect_out.size(); i++) { + EXPECT_EQ(*(data_buf + i), expect_out[i]); + } +} + +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, DoubleAxis0) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_DOUBLE); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_DOUBLE); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(double)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + double *data_buf = (double *)tensor_out->GetData().data(); + vector expect_out = {2, 2}; + for (int i = 0; i < expect_out.size(); i++) { + double diff = *(data_buf + i) - expect_out[i]; + bool is_same = fabs(diff) < 0.0001 ? true : false; + EXPECT_EQ(is_same, true); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, Float16Axis0) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_FLOAT16); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_FLOAT16); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * 2); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::SUCCESS, status); + /// check result + /// 1. check shape + EXPECT_EQ(outputs.size(), 1); + vector expect_output_shape({2}); + auto real_output_shape = outputs[0]->GetTensorDesc().GetShape().GetDims(); + bool is_same_shape = (real_output_shape == expect_output_shape); + EXPECT_EQ(is_same_shape, true); + // 2. check result + GeTensorPtr tensor_out = outputs[0]; + fp16_t *data_buf = (fp16_t *)tensor_out->GetData().data(); + vector expect_out = {2, 2}; + for (int i = 0; i < expect_out.size(); i++) { + double diff = (double)*(data_buf + i) - (double)expect_out[i]; + bool is_same = fabs(diff) < 0.0001 ? true : false; + EXPECT_EQ(is_same, true); + } +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, AbnormalTestDatatypeNotSupport) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_FLOAT); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_FLOAT); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_NE(ge::SUCCESS, status); +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, AbnormalTestIndicesOverOne) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2, 2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1, 1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_NE(ge::SUCCESS, status); +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, AbnormalTestAxisOverThree) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {2, 2, 3, 3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19, + 1, 2, 3, 4, 5, 6, 7, 8, 9, 11, 12, 13, 14, 15, 16, 17, 18, 19}; // 2*2*3*3 + vector data_vec_1 = {0, 1}; + vector axis_vec = {4}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_NE(ge::SUCCESS, status); +} +TEST_F(UtestGraphPassesFoldingKernelGatherV2Kernel, AbnormalTest) { + OpDescPtr op_desc_ptr = std::make_shared("GatherV2", "GatherV2"); + + vector x_shape = {3}; + vector indices_shape = {2}; + GeTensorDesc tensor_desc_x(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_indices(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_axis(GeShape(), FORMAT_NHWC, DT_INT32); + + op_desc_ptr->AddInputDesc(0, tensor_desc_x); + op_desc_ptr->AddInputDesc(1, tensor_desc_indices); + op_desc_ptr->AddInputDesc(2, tensor_desc_axis); + + vector data_vec_0 = {1, 2, 3}; // 3 + vector data_vec_1 = {1, 1, 1, 1}; + vector axis_vec = {0}; + GeTensorDesc tensor_desc_0(GeShape(x_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_1(GeShape(indices_shape), FORMAT_NHWC, DT_INT32); + GeTensorDesc tensor_desc_2(GeShape(), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + { + shared_ptr kernel = KernelFactory::Instance().Create(GATHERV2); + ge::Status status = kernel->Compute(nullptr, input, outputs); + EXPECT_NE(ge::SUCCESS, status); + vector input_1 = {tensor_0, tensor_1, tensor_2, tensor_2}; + status = kernel->Compute(op_desc_ptr, input_1, outputs); + EXPECT_NE(ge::SUCCESS, status); + vector input_2 = {nullptr, nullptr, nullptr}; + status = kernel->Compute(op_desc_ptr, input_2, outputs); + EXPECT_NE(ge::SUCCESS, status); + ConstGeTensorPtr tensor_11 = std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), 0); + vector input_11 = {tensor_0, tensor_11, tensor_2}; + status = kernel->Compute(op_desc_ptr, input_11, outputs); + EXPECT_NE(ge::SUCCESS, status); + + GeTensorDesc tensor_desc_3(GeShape({1, 2, 3, 4}), FORMAT_NHWC, DT_INT32); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + vector input_3 = {tensor_0, tensor_1, tensor_2, tensor_2}; + status = kernel->Compute(op_desc_ptr, input_3, outputs); + EXPECT_NE(ge::SUCCESS, status); + GeTensorDesc tensor_desc_indices_1(GeShape(indices_shape), FORMAT_NHWC, DT_UINT32); + ConstGeTensorPtr tensor_22 = std::make_shared(tensor_desc_indices_1, (uint8_t *)data_vec_1.data(), + data_vec_1.size() * sizeof(int32_t)); + vector input_4 = {tensor_0, tensor_22, tensor_2}; + status = kernel->Compute(op_desc_ptr, input_4, outputs); + EXPECT_NE(ge::SUCCESS, status); + GeTensorDesc tensor_desc_axis_1(GeShape(indices_shape), FORMAT_NHWC, DT_UINT32); + ConstGeTensorPtr tensor_33 = + std::make_shared(tensor_desc_axis_1, (uint8_t *)axis_vec.data(), axis_vec.size() * sizeof(int32_t)); + vector input_5 = {tensor_0, tensor_33, tensor_2}; + status = kernel->Compute(op_desc_ptr, input_5, outputs); + EXPECT_NE(ge::SUCCESS, status); + vector data_vec_2 = {5, 1, 1, 1}; + ConstGeTensorPtr tensor_44 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + vector input_6 = {tensor_0, tensor_44, tensor_2}; + status = kernel->Compute(op_desc_ptr, input_6, outputs); + EXPECT_NE(ge::SUCCESS, status); + vector data_vec_3 = {5, 1, 1, 1}; + GeTensorDesc tensor_desc_55(GeShape(indices_shape), FORMAT_NHWC, DT_INT64); + ConstGeTensorPtr tensor_55 = + std::make_shared(tensor_desc_55, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int64_t)); + vector input_7 = {tensor_0, tensor_55, tensor_2}; + status = kernel->Compute(op_desc_ptr, input_7, outputs); + EXPECT_NE(ge::SUCCESS, status); + } +} \ No newline at end of file diff --git a/tests/ut/ge/graph/passes/folding_kernel/greater_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/greater_kernel_unittest.cc new file mode 100644 index 00000000..b4948c9c --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/greater_kernel_unittest.cc @@ -0,0 +1,504 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/greater_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelGreaterKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerIntSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + GeTensorPtr out = outputs[0]; + vector data_y = {0, 1, 0, 0, 0, 0, 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, + 1, 1, 1, 1, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, + 0, 0, 0, 0, 0, 0, 1, 1, 0, 1, 0, 0, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1, 1}; + for (size_t i = 0; i < out->GetData().size(); i++) { + EXPECT_EQ(data_y[i], *((bool *)(out->GetData().data()) + i)); + } + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterScalarSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector data_vec_1 = {2}; + GeTensorDesc tensor_desc_1(GeShape(), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + GeTensorPtr out = outputs[0]; + vector data_y = {0}; + for (uint64_t i = 0; i < out->GetData().size(); i++) { + EXPECT_EQ(data_y[i], *((bool *)(out->GetData().data()) + i)); + } + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerInt8Success) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT8); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT8); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int8_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT8); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int8_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerInt16Success) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT16); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int16_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT16); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int16_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerInt64Success) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT64); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT64); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT64); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int64_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerUint8Success) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UINT8); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UINT8); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint8_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UINT8); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(uint8_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerUint16Success) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UINT16); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UINT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint16_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UINT16); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(uint16_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerUint32Success) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UINT32); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UINT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint32_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UINT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(uint32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerUint64Success) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UINT64); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UINT64); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint64_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UINT64); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(uint64_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerDifferentType) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerBoolSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_BOOL); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {1, 0, 0, 0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_BOOL); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint8_t)); + + vector dims_vec_1; + vector data_vec_1 = {1}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_BOOL); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(uint8_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerFloat16Success) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_FLOAT16); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1; + vector data_vec_1 = {1.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerFloatSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_FLOAT); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1; + vector data_vec_1 = {1.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerDoubleSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_DOUBLE); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(double)); + + vector dims_vec_1; + vector data_vec_1 = {1.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(double)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelGreaterKernel, GreaterOptimizerErrorTypeFail) { + OpDescPtr op_desc_ptr = std::make_shared("Greater", GREATER); + vector is_input_const_vec = { + true, + true, + }; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UNDEFINED); + + vector dims_vec_0; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {1, 2, 3, 4}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(GREATER); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/maximum_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/maximum_kernel_unittest.cc new file mode 100644 index 00000000..ef3373e7 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/maximum_kernel_unittest.cc @@ -0,0 +1,473 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "framework/common/ge_inner_error_codes.h" + +#define protected public +#define private public +#include "graph/passes/folding_kernel/maximum_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/types.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelMaximumKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerIntSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 2, 3, 5, 2, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {1, 2, 3, 4, 5, 6}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + + GeTensorPtr out = outputs[0]; + vector data_y = {1, 2, 3, 5, 5, 6}; + EXPECT_EQ(out->GetData().size(), 24); + size_t one_size = sizeof(int32_t); + size_t out_nums = out->GetData().size() / one_size; + for (size_t i = 0; i < out_nums; i++) { + int32_t *one_val = (int32_t *)(out->GetData().data() + i * one_size); + EXPECT_EQ(data_y[i], *one_val); + } +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerIntSuccess2) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0 = {1, 3}; + vector data_vec_0 = {1, 2, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {2, 3}; + vector data_vec_1 = {1, 2, 3, 4, 5, 6}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + + GeTensorPtr out = outputs[0]; + vector data_y = {1, 2, 3, 4, 5, 6}; + EXPECT_EQ(out->GetData().size(), 24); + size_t one_size = sizeof(int32_t); + size_t out_nums = out->GetData().size() / one_size; + for (size_t i = 0; i < out_nums; i++) { + int32_t *one_val = (int32_t *)(out->GetData().data() + i * one_size); + EXPECT_EQ(data_y[i], *one_val); + } +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumScalarSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector data_vec_1 = {2}; + GeTensorDesc tensor_desc_1(GeShape(), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + + GeTensorPtr out = outputs[0]; + EXPECT_EQ(out->GetData().size(), 4); + vector data_y = {2}; + size_t one_size = sizeof(int32_t); + size_t out_nums = out->GetData().size() / one_size; + for (size_t i = 0; i < out_nums; i++) { + int32_t *one_val = (int32_t *)(out->GetData().data() + i * one_size); + EXPECT_EQ(data_y[i], *one_val); + } +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerInt8Success) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT8); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT8); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int8_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 20, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT8); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int8_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + EXPECT_EQ(outputs[0]->GetData().size(), 96); // 2*2*4*3*2 +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerInt16Success) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT16); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int16_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 20, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT16); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int16_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + EXPECT_EQ(outputs[0]->GetData().size(), 192); // 2*2*4*3*2*2 +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerInt64Success) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT64); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT64); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int64_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 20, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT64); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int64_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(outputs[0]->GetData().size(), 768); // 2*2*4*3*2*8 +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerUint8Success) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UINT8); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UINT8); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint8_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 20, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UINT8); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(uint8_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + EXPECT_EQ(outputs[0]->GetData().size(), 96); // 2*2*4*3*2 +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerUint16Success) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UINT16); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UINT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint16_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 20, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UINT16); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(uint16_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + EXPECT_EQ(outputs[0]->GetData().size(), 192); // 2*2*4*3*2*2 +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerUint32Success) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UINT32); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UINT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint32_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 20, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UINT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(uint32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + EXPECT_EQ(outputs[0]->GetData().size(), 384); // 2*2*4*3*2*4 +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerUint64Success) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UINT64); + + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UINT64); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint64_t)); + + vector dims_vec_1 = {2, 2, 1, 3, 1}; + vector data_vec_1 = {1, 2, 3, 4, 5, 20, 7, 8, 9, 10, 11, 12}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UINT64); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(uint64_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + EXPECT_EQ(outputs[0]->GetData().size(), 768); // 2*2*4*3*2*8 +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerFloat16Success) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_FLOAT16); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1; + vector data_vec_1 = {1.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + EXPECT_EQ(outputs[0]->GetData().size(), 8); // 4*2 +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerFloatSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_FLOAT); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1; + vector data_vec_1 = {1.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + EXPECT_EQ(outputs[0]->GetData().size(), 16); // 4*4 +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerDoubleSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_DOUBLE); + + vector dims_vec_0 = {4}; + vector data_vec_0 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(double)); + + vector dims_vec_1; + vector data_vec_1 = {1.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(double)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, SUCCESS); + EXPECT_EQ(outputs[0]->GetData().size(), 32); // 4*8 +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerErrtypeFail) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UNDEFINED); + + vector dims_vec_0; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {1, 2, 3, 4}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, NOT_CHANGED); +} + +TEST_F(UtestGraphPassesFoldingKernelMaximumKernel, MaximumOptimizerDifferentType) { + OpDescPtr op_desc_ptr = std::make_shared("Maximum", MAXIMUM); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {1.0, 2.0, 3.0, 4.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MAXIMUM); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, NOT_CHANGED); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/mul_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/mul_kernel_unittest.cc new file mode 100644 index 00000000..8c13c58d --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/mul_kernel_unittest.cc @@ -0,0 +1,223 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "framework/common/ge_inner_error_codes.h" + +#define protected public +#define private public +#include "graph/passes/folding_kernel/mul_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelMulKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelMulKernel, Int32Success) { + OpDescPtr op_desc_ptr = std::make_shared("Mul", "Mul"); + + vector dims_vec_0; + vector data_vec_0 = {3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {5}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(MUL); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + int32_t *out_data = (int32_t *)outputs[0]->GetData().data(); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(out_data[0], 15); +} + +TEST_F(UtestGraphPassesFoldingKernelMulKernel, DoubleNotchanged) { + OpDescPtr op_desc_ptr = std::make_shared("Mul", "Mul"); + + vector dims_vec_0; + vector data_vec_0 = {3.0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(double)); + + vector dims_vec_1; + vector data_vec_1 = {5.0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(double)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(MUL); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelMulKernel, MulOverflow) { + OpDescPtr op_desc_ptr = std::make_shared("Mul", "Mul"); + + vector dims_vec_0; + vector data_vec_0 = {99999}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {21476}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(MUL); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelMulKernel, Int32OneDSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Mul", "Mul"); + + vector dims_vec_0 = {2}; + vector data_vec_0 = {2, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + vector dims_vec_1 = {2}; + vector data_vec_1 = {5, 6}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(MUL); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelMulKernel, Uint32OneDSuccess) { + OpDescPtr op_desc_ptr = nullptr; + + vector dims_vec_0 = {2}; + vector data_vec_0 = {2, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UINT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(uint32_t)); + vector dims_vec_1 = {2}; + vector data_vec_1 = {5, 6}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UINT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(uint32_t)); + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(MUL); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); + + op_desc_ptr = std::make_shared("Mul", "Mul"); + status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelMulKernel, Uint32OneDInputEmpty) { + OpDescPtr op_desc_ptr = std::make_shared("Mul", "Mul"); + vector input = {}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(MUL); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelMulKernel, MulOptimizerErrtypeFail) { + OpDescPtr op_desc_ptr = std::make_shared("Mul", "Mul"); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UNDEFINED); + + vector dims_vec_0; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {4}; + vector data_vec_1 = {1, 2, 3, 4}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = ge::KernelFactory::Instance().Create(MUL); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(status, NOT_CHANGED); + + vector dims_vec_2 = {4}; + vector data_vec_2 = {1, 2, 3, 4}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input_other = {tensor_0, tensor_2}; + status = kernel->Compute(op_desc_ptr, input_other, outputs); + EXPECT_EQ(status, NOT_CHANGED); + + vector dims_vec_3 = {4}; + vector data_vec_3 = {}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int32_t)); + + vector input_other3 = {tensor_0, tensor_3}; + status = kernel->Compute(op_desc_ptr, input_other3, outputs); + EXPECT_EQ(status, NOT_CHANGED); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/pack_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/pack_kernel_unittest.cc new file mode 100644 index 00000000..787e772a --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/pack_kernel_unittest.cc @@ -0,0 +1,383 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/pack_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelPackKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +// optimize op of pack success +TEST_F(UtestGraphPassesFoldingKernelPackKernel, Test1) { + OpDescPtr op_desc_ptr = std::make_shared("pack", "Pack"); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + + vector dims_vec_0; + vector data_vec_0 = {0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector dims_vec_3; + vector data_vec_3 = {0}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(PACK); + Status status = kernel->Compute(op_desc_ptr, input, outputs); +} + +TEST_F(UtestGraphPassesFoldingKernelPackKernel, Test2) { + OpDescPtr op_desc_ptr = std::make_shared("pack", "Pack"); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, PACK_ATTR_NAME_NUM, (int64_t)4); + + vector dims_vec_0; + vector data_vec_0 = {0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector dims_vec_3; + vector data_vec_3 = {0}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(PACK); + Status status = kernel->Compute(op_desc_ptr, input, outputs); +} + +TEST_F(UtestGraphPassesFoldingKernelPackKernel, Test3) { + OpDescPtr op_desc_ptr = std::make_shared("pack", "Pack"); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, PACK_ATTR_NAME_NUM, (int64_t)4); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + vector dims_vec_0; + vector data_vec_0 = {0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector dims_vec_3; + vector data_vec_3 = {0}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(PACK); + Status status = kernel->Compute(op_desc_ptr, input, outputs); +} + +TEST_F(UtestGraphPassesFoldingKernelPackKernel, Test4) { + OpDescPtr op_desc_ptr = std::make_shared("pack", "Pack"); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, PACK_ATTR_NAME_NUM, (int64_t)4); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_AXIS, (int64_t)0); + + vector dims_vec_0; + vector data_vec_0 = {0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(PACK); + Status status = kernel->Compute(op_desc_ptr, input, outputs); +} + +TEST_F(UtestGraphPassesFoldingKernelPackKernel, Test5) { + OpDescPtr op_desc_ptr = std::make_shared("pack", "Pack"); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, PACK_ATTR_NAME_NUM, (int64_t)4); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_AXIS, (int64_t)1); + + vector dims_vec_0; + vector data_vec_0 = {0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector dims_vec_3; + vector data_vec_3 = {0}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(PACK); + Status status = kernel->Compute(op_desc_ptr, input, outputs); +} + +TEST_F(UtestGraphPassesFoldingKernelPackKernel, PackOptimizerSuccessInt) { + OpDescPtr op_desc_ptr = std::make_shared("pack", "Pack"); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, PACK_ATTR_NAME_NUM, (int64_t)4); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_AXIS, (int64_t)0); + + vector dims_vec_0; + vector data_vec_0 = {0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector dims_vec_3; + vector data_vec_3 = {0}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(PACK); + Status status = kernel->Compute(op_desc_ptr, input, outputs); +} + +TEST_F(UtestGraphPassesFoldingKernelPackKernel, PackOptimizerSuccessInt1) { + OpDescPtr op_desc_ptr = std::make_shared("pack", "Pack"); + vector is_input_const_vec = {true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + + AttrUtils::SetInt(op_desc_ptr, PACK_ATTR_NAME_NUM, (int64_t)2); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_AXIS, (int64_t)0); + + vector dims_vec_0; + vector data_vec_0 = {0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + GeTensorDesc dims_tensor_desc(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(PACK); + Status status = kernel->Compute(op_desc_ptr, input, outputs); +} + +TEST_F(UtestGraphPassesFoldingKernelPackKernel, PackOptimizerSuccessFloat) { + OpDescPtr op_desc_ptr = std::make_shared("pack", "Pack"); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, PACK_ATTR_NAME_NUM, (int64_t)4); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_AXIS, (int64_t)0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_FLOAT); + + vector dims_vec_0; + vector data_vec_0 = {0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector dims_vec_2; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(float)); + + vector dims_vec_3; + vector data_vec_3 = {0}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(PACK); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelPackKernel, PackOptimizerFailedErrtype) { + OpDescPtr op_desc_ptr = std::make_shared("pack", "Pack"); + vector is_input_const_vec = {true, true, true, true}; + op_desc_ptr->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_desc_ptr, PACK_ATTR_NAME_NUM, (int64_t)4); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_AXIS, (int64_t)0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_UNDEFINED); + + vector dims_vec_0; + vector data_vec_0 = {0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {0}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector dims_vec_3; + vector data_vec_3 = {0}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_UNDEFINED); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(PACK); + Status status = kernel->Compute(op_desc_ptr, input, outputs); +} \ No newline at end of file diff --git a/tests/ut/ge/graph/passes/folding_kernel/permute_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/permute_kernel_unittest.cc new file mode 100644 index 00000000..919490c0 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/permute_kernel_unittest.cc @@ -0,0 +1,236 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/permute_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph/utils/type_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelPermuteKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelPermuteKernel, ComputeNchwToNhwc) { + const std::string ATTR_ORDER = "order"; + const std::string ATTR_PERM = "perm"; + OpDescPtr op_desc_ptr = std::make_shared("Transpose", "Transpose"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NHWC, DT_FLOAT); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + vector perm_list{0, 3, 1, 2}; + AttrUtils::SetListInt(op_desc_ptr, ATTR_ORDER, perm_list); + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSPOSE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelPermuteKernel, ComputeTransaxises) { + const std::string ATTR_ORDER = "order"; + const std::string ATTR_PERM = "perm"; + OpDescPtr op_desc_ptr = std::make_shared("Transpose", "Transpose"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + vector perm_list{0, 3, 1, 2}; + AttrUtils::SetListInt(op_desc_ptr, ATTR_PERM, perm_list); + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSPOSE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelPermuteKernel, GetPermlistFailed) { + OpDescPtr op_desc_ptr = std::make_shared("Transpose", "Transpose"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSPOSE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelPermuteKernel, ComputeNotSupportInconsistentDatatype) { + const std::string ATTR_PERM = "perm"; + OpDescPtr op_desc_ptr = std::make_shared("Transpose", "Transpose"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT16); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + vector perm_list{0, 3, 1, 2}; + AttrUtils::SetListInt(op_desc_ptr, ATTR_PERM, perm_list); + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSPOSE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelPermuteKernel, ComputeNotSupportEmptyShape) { + const std::string ATTR_ORDER = "order"; + OpDescPtr op_desc_ptr = std::make_shared("Transpose", "Transpose"); + + GeTensorDesc dims_tensor_desc(GeShape(), FORMAT_ND, DT_FLOAT); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + vector perm_list{0, 3, 1, 2}; + AttrUtils::SetListInt(op_desc_ptr, ATTR_ORDER, perm_list); + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSPOSE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelPermuteKernel, ComputeParamInvalid1) { + OpDescPtr op_desc_ptr = std::make_shared("Transpose", "Transpose"); + auto tmp = op_desc_ptr->GetOutputDesc(0); + tmp.SetFormat(FORMAT_NHWC); + tmp.SetDataType(DT_FLOAT16); + tmp.SetShape(GeShape({1, 1, 1, 1})); + + op_desc_ptr->UpdateOutputDesc(0, tmp); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * 2); + + vector input = {}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSPOSE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelPermuteKernel, ComputeParamInvalid2) { + OpDescPtr op_desc_ptr = nullptr; + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * 2); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSPOSE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelPermuteKernel, ComputeParamInvalid3) { + OpDescPtr op_desc_ptr = std::make_shared("Transpose", "Transpose"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NHWC, DT_FLOAT); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * 2); + + vector input = {tensor_0}; + vector input2 = {}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSPOSE); + ge::Status status = kernel->Compute(op_desc_ptr, input2, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/range_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/range_kernel_unittest.cc new file mode 100644 index 00000000..6cdb226e --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/range_kernel_unittest.cc @@ -0,0 +1,362 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "framework/common/ge_inner_error_codes.h" + +#define protected public +#define private public +#include "graph/passes/folding_kernel/range_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/attr_value_util.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelRangeKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelRangeKernel, Int32Success) { + OpDescPtr op_desc_ptr = std::make_shared("Range", RANGE); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + int32_t start = 1, limit = 20, delta = 2; + + vector dims_vec_0; + vector data_vec_0 = {start}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {limit}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {delta}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(RANGE); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDimNum(), 1); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(0), 10); + + int32_t *val = (int32_t *)outputs[0]->GetData().data(); + for (int64_t i = 0; i < outputs[0]->GetTensorDesc().GetShape().GetDim(0); ++i) { + EXPECT_EQ(val[i], start + delta * i); + } +} + +TEST_F(UtestGraphPassesFoldingKernelRangeKernel, FloatSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Range", RANGE); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_FLOAT); + + float start = 0.0, limit = 10.0, delta = 1.0; + + vector dims_vec_0; + vector data_vec_0 = {start}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1; + vector data_vec_1 = {limit}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector dims_vec_2; + vector data_vec_2 = {delta}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(RANGE); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDimNum(), 1); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(0), 10); + + float *val = (float *)outputs[0]->GetData().data(); + for (int64_t i = 0; i < outputs[0]->GetTensorDesc().GetShape().GetDim(0); ++i) { + EXPECT_EQ(val[i], start + delta * (float)i); + } +} + +TEST_F(UtestGraphPassesFoldingKernelRangeKernel, FloatSuccess1) { + OpDescPtr op_desc_ptr = std::make_shared("Range", RANGE); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_FLOAT); + + float start = 0.0, limit = 2.0, delta = 1.2345678; + + vector dims_vec_0; + vector data_vec_0 = {start}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector dims_vec_1; + vector data_vec_1 = {limit}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(float)); + + vector dims_vec_2; + vector data_vec_2 = {delta}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(RANGE); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDimNum(), 1); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(0), 2); + + float *val = (float *)outputs[0]->GetData().data(); + for (int64_t i = 0; i < outputs[0]->GetTensorDesc().GetShape().GetDim(0); ++i) { + EXPECT_EQ(val[i], start + delta * (float)i); + } +} + +TEST_F(UtestGraphPassesFoldingKernelRangeKernel, DataTypeNotSupport) { + OpDescPtr op_desc_ptr = nullptr; + + int8_t start = 1, limit = 1, delta = 1; + + vector dims_vec_0; + vector data_vec_0 = {start}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT8); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int8_t)); + + vector dims_vec_1; + vector data_vec_1 = {limit}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT8); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int8_t)); + + vector dims_vec_2; + vector data_vec_2 = {delta}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT8); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int8_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(RANGE); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(PARAM_INVALID, status); + + op_desc_ptr = std::make_shared("Range", RANGE); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT8); + status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelRangeKernel, DimSizeIsZero) { + OpDescPtr op_desc_ptr = std::make_shared("Range", RANGE); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + int32_t start = 1, limit = 1, delta = 1; + + vector dims_vec_0; + vector data_vec_0 = {start}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {limit}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {delta}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(RANGE); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDimNum(), 0); +} + +TEST_F(UtestGraphPassesFoldingKernelRangeKernel, ErrorTest) { + OpDescPtr op_desc_ptr = std::make_shared("Range", RANGE); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + int32_t start = 1, limit = 1; + + vector dims_vec_0; + vector data_vec_0 = {start}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {limit}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {1}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_UINT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(uint32_t)); + + vector dims_vec_2_zero; + vector data_vec_2_zero = {}; + GeTensorDesc tensor_desc_2_zero(GeShape(dims_vec_2_zero), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2_zero = std::make_shared(tensor_desc_2_zero, (uint8_t *)data_vec_2_zero.data(), + data_vec_2_zero.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(RANGE); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); + + vector input_has_zero = {tensor_0, tensor_1, tensor_2_zero}; + status = kernel->Compute(op_desc_ptr, input_has_zero, outputs); + EXPECT_EQ(NOT_CHANGED, status); + + vector input_has_diff = {tensor_0, tensor_1, tensor_2}; + status = kernel->Compute(op_desc_ptr, input_has_diff, outputs); + EXPECT_EQ(NOT_CHANGED, status); + + vector dims_vec_3 = {2, 3}; + vector data_vec_3 = {1, -3, 3, -3, 3, -2}; + GeTensorDesc tensor_desc_3(GeShape(dims_vec_3), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_3, (uint8_t *)data_vec_3.data(), data_vec_3.size() * sizeof(int32_t)); + + vector input_not_scalar = {tensor_0, tensor_1, tensor_3}; + status = kernel->Compute(op_desc_ptr, input_not_scalar, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelRangeKernel, NegativeSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("Range", RANGE); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + int32_t start = -20, limit = 0, delta = 3; + + vector dims_vec_0; + vector data_vec_0 = {start}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {limit}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {delta}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(RANGE); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDimNum(), 1); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDim(0), 7); +} + +TEST_F(UtestGraphPassesFoldingKernelRangeKernel, RangeError) { + OpDescPtr op_desc_ptr = std::make_shared("Range", RANGE); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, (int64_t)DT_INT32); + + int32_t start = 10, limit = 1, delta = 1; + + vector dims_vec_0; + vector data_vec_0 = {start}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1; + vector data_vec_1 = {limit}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector dims_vec_2; + vector data_vec_2 = {delta}; + GeTensorDesc tensor_desc_2(GeShape(dims_vec_2), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_2, (uint8_t *)data_vec_2.data(), data_vec_2.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_2}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(RANGE); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(PARAM_INVALID, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/rank_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/rank_kernel_unittest.cc new file mode 100644 index 00000000..390a2c77 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/rank_kernel_unittest.cc @@ -0,0 +1,108 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/rank_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/attr_value_util.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; +using ge::SUCCESS; + +class UtestGraphPassesFoldingKernelRankKernel : public testing::Test { + protected: + void SetUp() { + graph = std::make_shared("default"); + op_desc_ptr = std::make_shared("Rank", RANK); + node = std::make_shared(op_desc_ptr, graph); + kernel = KernelFactory::Instance().Create(RANK); + } + + void TearDown() {} + + ge::ComputeGraphPtr graph; + OpDescPtr op_desc_ptr; + NodePtr node; + shared_ptr kernel; +}; + +TEST_F(UtestGraphPassesFoldingKernelRankKernel, RankIsOne) { + vector dims_vec_0 = {4}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + GeTensorDesc out_tensor_desc_0(GeShape(), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(tensor_desc_0); + op_desc_ptr->AddOutputDesc(out_tensor_desc_0); + std::vector v_output; + Status status = kernel->Compute(node, v_output); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(v_output[0]->GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(v_output[0]->GetTensorDesc().GetShape().GetDimNum(), 0); + EXPECT_EQ(*(int32_t *)(v_output[0]->GetData().data()), 1); +} + +TEST_F(UtestGraphPassesFoldingKernelRankKernel, RankIsThree) { + vector dims_vec_0 = {4, 2, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + GeTensorDesc out_tensor_desc_0(GeShape(), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(tensor_desc_0); + op_desc_ptr->AddOutputDesc(out_tensor_desc_0); + std::vector v_output; + Status status = kernel->Compute(node, v_output); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(v_output[0]->GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(v_output[0]->GetTensorDesc().GetShape().GetDimNum(), 0); + EXPECT_EQ(*(int32_t *)(v_output[0]->GetData().data()), 3); +} + +TEST_F(UtestGraphPassesFoldingKernelRankKernel, InvalidCaseInputSizeIsZero) { + std::vector v_output; + Status status = kernel->Compute(node, v_output); + + EXPECT_NE(SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelRankKernel, InvalidCaseInputSizeIsMoreThanOne) { + vector dims_vec_0 = {4, 2, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(tensor_desc_0); + op_desc_ptr->AddInputDesc(tensor_desc_0); + + std::vector v_output; + Status status = kernel->Compute(node, v_output); + + EXPECT_NE(SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/reduce_prod_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/reduce_prod_kernel_unittest.cc new file mode 100644 index 00000000..2c27e642 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/reduce_prod_kernel_unittest.cc @@ -0,0 +1,195 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/reduce_prod_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/passes/folding_kernel/concat_v2_kernel.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelReduceProdKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelReduceProdKernel, Int32Success) { + OpDescPtr op_desc_ptr = std::make_shared("ReduceProd", REDUCEPROD); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 2, 5, 7, 3, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {1}; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(REDUCEPROD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + int32_t *out_data = (int32_t *)outputs[0]->GetData().data(); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(out_data[0], 7); + EXPECT_EQ(out_data[1], 6); + EXPECT_EQ(out_data[2], 5); + EXPECT_EQ(outputs[0]->GetData().size(), 12); +} + +TEST_F(UtestGraphPassesFoldingKernelReduceProdKernel, DoubleNotChanged) { + OpDescPtr op_desc_ptr = std::make_shared("ReduceProd", REDUCEPROD); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 2, 5, 7, 3, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {1}; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(REDUCEPROD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReduceProdKernel, InvalidInputSizeNotChanged) { + OpDescPtr op_desc_ptr = std::make_shared("ReduceProd", REDUCEPROD); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 2, 5, 7, 3, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {1}; + vector data_vec_1 = {0}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1, tensor_1}; + vector input2 = {tensor_0}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(REDUCEPROD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + Status status2 = kernel->Compute(op_desc_ptr, input2, outputs); + + EXPECT_EQ(NOT_CHANGED, status); + EXPECT_EQ(NOT_CHANGED, status2); +} + +TEST_F(UtestGraphPassesFoldingKernelReduceProdKernel, InvalidAxisDimNotChanged) { + OpDescPtr op_desc_ptr = std::make_shared("ReduceProd", REDUCEPROD); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 2, 5, 7, 3, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {1, 1, 2}; + vector data_vec_1 = {0, 1}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(REDUCEPROD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReduceProdKernel, InvalidAxisNotChanged) { + OpDescPtr op_desc_ptr = std::make_shared("ReduceProd", REDUCEPROD); + + vector dims_vec_0 = {2, 3}; + vector data_vec_0 = {1, 2, 5, 7, 3, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {1}; + vector data_vec_1 = {3}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(REDUCEPROD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReduceProdKernel, OverflowDataNotChanged) { + OpDescPtr op_desc_ptr = std::make_shared("ReduceProd", REDUCEPROD); + + vector dims_vec_0 = {2, 3, 1}; + vector data_vec_0 = {2, 2, 2, 214748, 214748, 214748}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(int32_t)); + + vector dims_vec_1 = {1}; + vector data_vec_1 = {1}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_DOUBLE); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_1, (uint8_t *)data_vec_1.data(), data_vec_1.size() * sizeof(int32_t)); + + vector input = {tensor_0, tensor_1}; + vector outputs; + shared_ptr kernel = KernelFactory::Instance().Create(REDUCEPROD); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/reformat_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/reformat_kernel_unittest.cc new file mode 100644 index 00000000..dd5f1e77 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/reformat_kernel_unittest.cc @@ -0,0 +1,202 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "graph/passes/folding_kernel/reformat_kernel.h" + +#include "common/debug/log.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelReformatKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelReformatKernel, ComputeSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("ReFormat", "ReFormat"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(REFORMAT); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReformatKernel, EmptyInput) { + OpDescPtr op_desc_ptr = std::make_shared("ReFormat", "ReFormat"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(REFORMAT); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReformatKernel, InputNullptr) { + OpDescPtr op_desc_ptr = std::make_shared("ReFormat", "ReFormat"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(REFORMAT); + ge::Status status = kernel->Compute(nullptr, input, outputs); + + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReformatKernel, InvalidInputsize) { + OpDescPtr op_desc_ptr = std::make_shared("ReFormat", "ReFormat"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(REFORMAT); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReformatKernel, MismatchShape) { + OpDescPtr op_desc_ptr = std::make_shared("ReFormat", "ReFormat"); + + GeTensorDesc dims_tensor_desc(GeShape({1}), FORMAT_ND, DT_FLOAT); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(REFORMAT); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReformatKernel, MismatchDtype) { + OpDescPtr op_desc_ptr = std::make_shared("ReFormat", "ReFormat"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_ND, DT_FLOAT16); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(REFORMAT); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReformatKernel, MismatchDataSize) { + OpDescPtr op_desc_ptr = std::make_shared("ReFormat", "ReFormat"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 2, 3, 4}), FORMAT_ND, DT_FLOAT); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 2, 3, 4}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(REFORMAT); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/reshape_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/reshape_kernel_unittest.cc new file mode 100644 index 00000000..8f786629 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/reshape_kernel_unittest.cc @@ -0,0 +1,385 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/reshape_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/types.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelReshapeKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + template + void TestReshape(vector &data_vec, vector &dim_value_vec, vector &result) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, data_type); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(inner_data_type)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", CONSTANTOP); + vector dim_vec; + dim_vec.push_back(dim_value_vec.size()); + GeTensorDesc dim_tensor_desc(ge::GeShape(dim_vec), FORMAT_NCHW, dim_type); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(inner_dim_type)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Reshape", RESHAPE); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dim_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(1)); + + shared_ptr kernel = KernelFactory::Instance().Create(RESHAPE); + Status status = kernel->Compute(op_node); + EXPECT_EQ(ge::SUCCESS, status); + } + + template + void TestInvalidReshape(vector &data_vec, vector &dim_value_vec, vector &result) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), format, data_type); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(inner_data_type)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", CONSTANTOP); + vector dim_vec; + dim_vec.push_back(dim_value_vec.size()); + GeTensorDesc dim_tensor_desc(ge::GeShape(dim_vec), format, dim_type); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(inner_dim_type)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Reshape", RESHAPE); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dim_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(1)); + + shared_ptr kernel = KernelFactory::Instance().Create(RESHAPE); + Status status = kernel->Compute(op_node); + EXPECT_NE(ge::SUCCESS, status); + + vector input = {data_tensor}; + vector outputs; + status = kernel->Compute(op_node->GetOpDesc(), input, outputs); + EXPECT_EQ(NOT_CHANGED, status); + } +}; + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, Int8Int32) { + vector data_vec = {2, 3}; + vector dim_value_vec = {3, 2}; + vector result = {3, 2}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, Int16Int32) { + vector data_vec = {3, 3}; + vector dim_value_vec = {9}; + vector result = {9}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, Int32Int32) { + vector data_vec = {3, 3, 3, 5, 6}; + vector dim_value_vec = {9, 90}; + vector reuslt = {9, 90}; + TestReshape(data_vec, dim_value_vec, reuslt); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, Int64Int32) { + vector data_vec = {6, 1, 12, 3, 4, 56, 7}; + vector dim_value_vec = {12, 6, 3 * 4 * 56 * 7}; + vector result = {12, 6, 3 * 4 * 56 * 7}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, Uint8Int32) { + vector data_vec = {2, 3}; + vector dim_value_vec = {-1}; + vector result = {6}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, Uint16Int32) { + vector data_vec = {3}; + vector dim_value_vec = {-1}; + vector result = {3}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, Uint32Int32) { + vector data_vec = {3, 3, 3, 5, 6}; + vector dim_value_vec = {3, -1}; + vector result = {3, 3 * 3 * 5 * 6}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, Uint64Int32) { + vector data_vec = {6, 1, 12, 3, 4, 56, 7}; + vector dim_value_vec = {6, 12, 3, 4, 7, -1}; + vector result = {6, 12, 3, 4, 7, 56}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, Fp16Int32) { + vector data_vec = {6, 1, 12, 3, 4, 56, 7}; + vector dim_value_vec = {-1}; + vector result = {6 * 12 * 3 * 4 * 56 * 7 * 1}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, FloatInt32) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {-1}; + vector result = {11}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, DoubleInt32) { + vector data_vec = {7, 7, 7, 12, 2}; + vector dim_value_vec = {7, 12, 2, 7, 7}; + vector result = {7, 12, 2, 7, 7}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, DoubleInt64) { + vector data_vec = {3, 4, 2, 2, 8}; + vector dim_value_vec = {12, -1}; + vector result = {12, 32}; + TestReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, BoolInt64) { + vector data_vec = {3, 4, 2, 2, 8}; + vector dim_value_vec = {12, -1}; + vector result = {12, 32}; + + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, DT_BOOL); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(uint8_t)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", CONSTANTOP); + vector dim_vec; + dim_vec.push_back(dim_value_vec.size()); + GeTensorDesc dim_tensor_desc(ge::GeShape(dim_vec), FORMAT_NCHW, DT_INT64); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(int64_t)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Reshape", RESHAPE); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dim_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(1)); + + shared_ptr kernel = KernelFactory::Instance().Create(RESHAPE); + Status status = kernel->Compute(op_node); + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, InvalidFormat) { + vector data_vec = {2, 3}; + vector dim_value_vec = {-1}; + vector result = {0}; + + TestInvalidReshape(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, NodeIsNull) { + NodePtr op_node = nullptr; + shared_ptr kernel = KernelFactory::Instance().Create(RESHAPE); + Status status = kernel->Compute(op_node); + EXPECT_NE(domi::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, InvalidInputNodeSize) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {7}; + vector result = {1, 1, 1, 11, 1, 1, 1, 1}; + + ge::ComputeGraphPtr graph = std::make_shared("default"); + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, DT_BOOL); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(uint8_t)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", CONSTANTOP); + GeTensorDesc dim_tensor_desc(ge::GeShape(), FORMAT_NCHW, DT_INT64); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(int64_t)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Reshape", RESHAPE); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + + shared_ptr kernel = KernelFactory::Instance().Create(RESHAPE); + Status status = kernel->Compute(op_node); + EXPECT_NE(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, FoldingInt64Success) { + vector data_vec = {3, 4, 2, 2, 8}; + vector dim_value_vec = {12, -1}; + vector result = {12, 32}; + + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, DT_BOOL); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(uint8_t)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add dim node + ge::OpDescPtr dim_op_desc = std::make_shared("dim", CONSTANTOP); + vector dim_vec; + dim_vec.push_back(dim_value_vec.size()); + GeTensorDesc dim_tensor_desc(ge::GeShape(dim_vec), FORMAT_NCHW, DT_INT64); + GeTensorPtr dim_tensor = std::make_shared(dim_tensor_desc, (uint8_t *)dim_value_vec.data(), + dim_value_vec.size() * sizeof(int64_t)); + OpDescUtils::SetWeights(dim_op_desc, dim_tensor); + dim_op_desc->AddOutputDesc(dim_tensor_desc); + NodePtr dim_node = graph->AddNode(dim_op_desc); + dim_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Reshape", RESHAPE); + expanddim_op_desc->AddInputDesc(data_tensor_desc); + expanddim_op_desc->AddInputDesc(dim_tensor_desc); + expanddim_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(dim_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(1)); + + shared_ptr kernel = KernelFactory::Instance().Create(RESHAPE); + vector input = {data_tensor, dim_tensor}; + vector outputs; + Status status = kernel->Compute(op_node->GetOpDesc(), input, outputs); + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelReshapeKernel, OpdescIsNullFailed) { + shared_ptr kernel = KernelFactory::Instance().Create(RESHAPE); + ge::OpDescPtr null_op_desc = nullptr; + vector input = {}; + vector outputs; + Status status = kernel->Compute(null_op_desc, input, outputs); + EXPECT_EQ(PARAM_INVALID, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/rsqrt_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/rsqrt_kernel_unittest.cc new file mode 100644 index 00000000..3f13d911 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/rsqrt_kernel_unittest.cc @@ -0,0 +1,91 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/rsqrt_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/attr_value_util.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestFoldingKernelRsqrtKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +// optimize op of sqrt success +TEST_F(UtestFoldingKernelRsqrtKernel, RsqrtOptimizerSuccess) { + OpDescPtr op_desc_ptr = std::make_shared("RSQRT", RSQRT); + + vector dims_vec_0 = {3, 2}; + vector data_vec_0 = {4.0, 16.0, 25.0, 100.0, 400.0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(RSQRT); + Status status = kernel->Compute(op_desc_ptr, input, outputs); + + float *outdata = (float *)outputs[0]->GetData().data(); + + EXPECT_EQ(SUCCESS, status); + EXPECT_FLOAT_EQ(outdata[0], 0.5); + EXPECT_FLOAT_EQ(outdata[1], 0.25); + EXPECT_FLOAT_EQ(outdata[2], 0.2); + EXPECT_FLOAT_EQ(outdata[3], 0.1); + EXPECT_FLOAT_EQ(outdata[4], 0.05); +} + +// optimize op of sqrt fail(include 0) +TEST_F(UtestFoldingKernelRsqrtKernel, RsqrtOptimizerHasZero) { + OpDescPtr op_desc_ptr = std::make_shared("RSQRT", RSQRT); + + vector dims_vec_0 = {2}; + vector data_vec_0 = {4.0, 0.0}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(RSQRT); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/shape_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/shape_kernel_unittest.cc new file mode 100644 index 00000000..5dcb4cbf --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/shape_kernel_unittest.cc @@ -0,0 +1,166 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/shape_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelShapeKernel : public testing::Test { + protected: + void SetUp() { init(); } + + void TearDown() { destory(); } + + private: + void init() { + pass_ = new ge::DimensionComputePass(); + graph_ = std::make_shared("default"); + op_desc_ptr_ = std::make_shared("Shape", SHAPE); + node_ = std::make_shared(op_desc_ptr_, graph_); + kernel_ = KernelFactory::Instance().Create(SHAPE); + } + + void destory() { + delete pass_; + pass_ = NULL; + } + + protected: + ge::DimensionComputePass *pass_; + ge::ComputeGraphPtr graph_; + OpDescPtr op_desc_ptr_; + NodePtr node_; + shared_ptr kernel_; + + NodePtr init_node(ComputeGraphPtr graph) { + // middle + OpDescPtr op_def = std::make_shared("op_def", SHAPE); + OpDescPtr in_op_def = std::make_shared("op_def_in", "test"); + OpDescPtr out_op_def = std::make_shared("op_def_in", "test"); + // input tensor + vector dims = {11, 16, 10, 12}; + ge::GeShape shape_desc(dims); + GeTensorDesc tensor_desc(shape_desc); + (void)TensorUtils::SetRealDimCnt(tensor_desc, dims.size()); + op_def->AddInputDesc(tensor_desc); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT32); + op_def->AddOutputDesc(tensor_desc_out); + // first + in_op_def->AddOutputDesc(tensor_desc); + + // add attr of out_node + vector is_input_const(3, false); + is_input_const[0] = true; + out_op_def->SetIsInputConst(is_input_const); + out_op_def->AddInputDesc(tensor_desc); + out_op_def->AddInputDesc(tensor_desc); + + // Add node + NodePtr in_node = graph->AddNode(in_op_def); + NodePtr node = graph->AddNode(op_def); + NodePtr out_node = graph->AddNode(out_op_def); + + // Add edge + GraphUtils::AddEdge(in_node->GetOutDataAnchor(0), node->GetInDataAnchor(0)); + GraphUtils::AddEdge(node->GetOutDataAnchor(0), out_node->GetInDataAnchor(0)); + + return node; + } +}; + +TEST_F(UtestGraphPassesFoldingKernelShapeKernel, ShapeOptimizerSuccess) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = init_node(graph); + NodePtr out_node = node->GetOutDataNodes().at(0); + + ge::Status ret = pass_->Run(node); + EXPECT_EQ(ge::SUCCESS, ret); + + vector out_weights = OpDescUtils::GetWeights(out_node); + if (out_weights.size() > 1) { + int32_t dim = *(int32_t *)out_weights[1]->GetData().data(); + EXPECT_EQ(11, dim); + } +} +TEST_F(UtestGraphPassesFoldingKernelShapeKernel, ShapeDataInt32) { + vector dims_vec_0 = {8, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + (void)TensorUtils::SetRealDimCnt(tensor_desc_0, dims_vec_0.size()); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT32); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + std::vector outputs; + Status status = kernel_->Compute(node_, outputs); + EXPECT_EQ(ge::SUCCESS, status); + if (status == ge::SUCCESS) { + EXPECT_EQ(outputs[0]->GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDimNum(), 1); + EXPECT_EQ(outputs[0]->GetData().size(), sizeof(int32_t) * dims_vec_0.size()); + EXPECT_EQ(*(int32_t *)(outputs[0]->GetData().data()), 8); + } +} + +TEST_F(UtestGraphPassesFoldingKernelShapeKernel, ShapeDataInt64) { + vector dims_vec_0 = {8, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + TensorUtils::SetRealDimCnt(tensor_desc_0, dims_vec_0.size()); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + std::vector outputs; + Status status = kernel_->Compute(node_, outputs); + EXPECT_EQ(ge::SUCCESS, status); + if (status == ge::SUCCESS) { + EXPECT_EQ(outputs[0]->GetTensorDesc().GetDataType(), DT_INT64); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDimNum(), 1); + EXPECT_EQ(outputs[0]->GetData().size(), sizeof(int64_t) * dims_vec_0.size()); + EXPECT_EQ(*(int64_t *)(outputs[0]->GetData().data()), 8); + } +} + +TEST_F(UtestGraphPassesFoldingKernelShapeKernel, ShapeInputSizeFail) { + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + std::vector outputs; + Status status = kernel_->Compute(node_, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/shape_n_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/shape_n_kernel_unittest.cc new file mode 100644 index 00000000..672b3f34 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/shape_n_kernel_unittest.cc @@ -0,0 +1,182 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/shape_n_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelShapeNkernel : public testing::Test { + protected: + void SetUp() { init(); } + + void TearDown() { destory(); } + + private: + void init() { + pass_ = new ge::DimensionComputePass(); + graph_ = std::make_shared("default"); + op_desc_ptr_ = std::make_shared("ShapeN", SHAPEN); + node_ = std::make_shared(op_desc_ptr_, graph_); + kernel_ = KernelFactory::Instance().Create(SHAPEN); + } + + void destory() { + delete pass_; + pass_ = NULL; + } + + protected: + ge::DimensionComputePass *pass_; + ge::ComputeGraphPtr graph_; + OpDescPtr op_desc_ptr_; + NodePtr node_; + shared_ptr kernel_; + + NodePtr init_node(ComputeGraphPtr graph) { + // middle + OpDescPtr op_def = std::make_shared("op_def", SHAPEN); + OpDescPtr in_op_def = std::make_shared("op_def_in", "test"); + OpDescPtr out_op_def = std::make_shared("op_def_in", "test"); + // input tensor + vector dims = {11, 16, 10, 12}; + ge::GeShape shape_desc(dims); + GeTensorDesc tensor_desc(shape_desc); + (void)TensorUtils::SetRealDimCnt(tensor_desc, dims.size()); + op_def->AddInputDesc(tensor_desc); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT32); + op_def->AddOutputDesc(tensor_desc_out); + // first + in_op_def->AddOutputDesc(tensor_desc); + + // add attr of out_node + vector is_input_const(3, false); + is_input_const[0] = true; + out_op_def->SetIsInputConst(is_input_const); + out_op_def->AddInputDesc(tensor_desc); + out_op_def->AddInputDesc(tensor_desc); + + // Add node + NodePtr in_node = graph->AddNode(in_op_def); + NodePtr node = graph->AddNode(op_def); + NodePtr out_node = graph->AddNode(out_op_def); + + // Add edge + GraphUtils::AddEdge(in_node->GetOutDataAnchor(0), node->GetInDataAnchor(0)); + GraphUtils::AddEdge(node->GetOutDataAnchor(0), out_node->GetInDataAnchor(0)); + + return node; + } +}; +TEST_F(UtestGraphPassesFoldingKernelShapeNkernel, SizeOptimizerSuccess) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = init_node(graph); + NodePtr out_node = node->GetOutDataNodes().at(0); + + ge::Status ret = pass_->Run(node); + EXPECT_EQ(ge::SUCCESS, ret); + + vector out_weights = OpDescUtils::GetWeights(out_node); + if (out_weights.size() > 1) { + int32_t size = *(int32_t *)out_weights[1]->GetData().data(); + EXPECT_EQ(21120, size); + } +} + +TEST_F(UtestGraphPassesFoldingKernelShapeNkernel, ShapeNDataInt32) { + vector dims_vec_0 = {8, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + (void)TensorUtils::SetRealDimCnt(tensor_desc_0, dims_vec_0.size()); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + vector dims_vec_1 = {3, 4, 5}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + (void)TensorUtils::SetRealDimCnt(tensor_desc_1, dims_vec_1.size()); + op_desc_ptr_->AddInputDesc(tensor_desc_1); + + GeTensorDesc tensor_desc_out_0(GeShape(), FORMAT_NCHW, DT_INT32); + op_desc_ptr_->AddOutputDesc(tensor_desc_out_0); + + GeTensorDesc tensor_desc_out_1(GeShape(), FORMAT_NCHW, DT_INT32); + op_desc_ptr_->AddOutputDesc(tensor_desc_out_1); + + std::vector outputs; + Status status = kernel_->Compute(node_, outputs); + EXPECT_EQ(ge::SUCCESS, status); + if (status == ge::SUCCESS) { + EXPECT_EQ(outputs[1]->GetTensorDesc().GetDataType(), DT_INT32); + EXPECT_EQ(outputs[1]->GetTensorDesc().GetShape().GetDimNum(), 1); + EXPECT_EQ(outputs[1]->GetData().size(), sizeof(int32_t) * dims_vec_1.size()); + EXPECT_EQ(*(int32_t *)(outputs[1]->GetData().data()), 3); + } +} + +TEST_F(UtestGraphPassesFoldingKernelShapeNkernel, ShapeNDataInt64) { + vector dims_vec_0 = {8, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + TensorUtils::SetRealDimCnt(tensor_desc_0, dims_vec_0.size()); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + std::vector outputs; + Status status = kernel_->Compute(node_, outputs); + EXPECT_EQ(ge::SUCCESS, status); + if (status == ge::SUCCESS) { + EXPECT_EQ(outputs[0]->GetTensorDesc().GetDataType(), DT_INT64); + EXPECT_EQ(outputs[0]->GetTensorDesc().GetShape().GetDimNum(), 1); + EXPECT_EQ(outputs[0]->GetData().size(), sizeof(int64_t) * dims_vec_0.size()); + EXPECT_EQ(*(int64_t *)(outputs[0]->GetData().data()), 8); + } +} + +TEST_F(UtestGraphPassesFoldingKernelShapeNkernel, ShapeN2In1Out) { + vector dims_vec_0 = {8, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + TensorUtils::SetRealDimCnt(tensor_desc_0, dims_vec_0.size()); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + vector dims_vec_1 = {3, 4, 5}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + (void)TensorUtils::SetRealDimCnt(tensor_desc_1, dims_vec_1.size()); + op_desc_ptr_->AddInputDesc(tensor_desc_1); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + std::vector outputs; + Status status = kernel_->Compute(node_, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/size_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/size_kernel_unittest.cc new file mode 100644 index 00000000..f1a117f2 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/size_kernel_unittest.cc @@ -0,0 +1,171 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/size_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelSizeKernel : public testing::Test { + protected: + void SetUp() { init(); } + + void TearDown() { destory(); } + + private: + void init() { + pass_ = new ::ge::DimensionComputePass(); + graph_ = std::make_shared("default"); + op_desc_ptr_ = make_shared("Size", SIZE); + node_ = std::make_shared(op_desc_ptr_, graph_); + kernel_ = KernelFactory::Instance().Create(SIZE); + } + + void destory() { + delete pass_; + pass_ = NULL; + } + + protected: + ge::DimensionComputePass *pass_; + ge::ComputeGraphPtr graph_; + OpDescPtr op_desc_ptr_; + NodePtr node_; + shared_ptr kernel_; + + NodePtr init_node(ComputeGraphPtr graph) { + // middle + OpDescPtr op_def = std::make_shared("op_def", SIZE); + OpDescPtr in_op_def = std::make_shared("op_def_in", "test"); + OpDescPtr out_op_def = std::make_shared("op_def_in", "test"); + // input tensor + vector dims = {11, 16, 10, 12}; + ge::GeShape shape_desc(dims); + GeTensorDesc tensor_desc(shape_desc); + (void)TensorUtils::SetRealDimCnt(tensor_desc, dims.size()); + op_def->AddInputDesc(tensor_desc); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT32); + op_def->AddOutputDesc(tensor_desc_out); + // first + in_op_def->AddOutputDesc(tensor_desc); + + // add attr of out_node + vector is_input_const(3, false); + is_input_const[0] = true; + out_op_def->SetIsInputConst(is_input_const); + out_op_def->AddInputDesc(tensor_desc); + out_op_def->AddInputDesc(tensor_desc); + + // Add node + NodePtr in_node = graph->AddNode(in_op_def); + NodePtr node = graph->AddNode(op_def); + NodePtr out_node = graph->AddNode(out_op_def); + + // Add edge + GraphUtils::AddEdge(in_node->GetOutDataAnchor(0), node->GetInDataAnchor(0)); + GraphUtils::AddEdge(node->GetOutDataAnchor(0), out_node->GetInDataAnchor(0)); + + return node; + } +}; + +TEST_F(UtestGraphPassesFoldingKernelSizeKernel, SizeOptimizerSuccess) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = init_node(graph); + NodePtr out_node = node->GetOutDataNodes().at(0); + + ge::Status ret = pass_->Run(node); + EXPECT_EQ(ge::SUCCESS, ret); + + vector out_weights = OpDescUtils::GetWeights(out_node); + if (out_weights.size() > 1) { + int32_t size = *(int32_t *)out_weights[1]->GetData().data(); + EXPECT_EQ(4, size); + } +} + +TEST_F(UtestGraphPassesFoldingKernelSizeKernel, Size2InputFail) { + vector dims_vec_0 = {8, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + TensorUtils::SetRealDimCnt(tensor_desc_0, dims_vec_0.size()); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + vector dims_vec_1 = {3, 4, 5}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + (void)TensorUtils::SetRealDimCnt(tensor_desc_1, dims_vec_1.size()); + op_desc_ptr_->AddInputDesc(tensor_desc_1); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + std::vector outputs; + Status status = kernel_->Compute(node_, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelSizeKernel, SizeInputNotSetDataTypeFail) { + vector dims_vec_0 = {8, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + std::vector outputs; + Status status = kernel_->Compute(node_, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelSizeKernel, SizeInputInt64Overflow) { + vector dims_vec_0 = {8, INT64_MAX}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + std::vector outputs; + Status status = kernel_->Compute(node_, outputs); + EXPECT_EQ(ge::INTERNAL_ERROR, status); +} + +TEST_F(UtestGraphPassesFoldingKernelSizeKernel, OpdescIsNullptr) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + OpDescPtr op_desc = nullptr; + NodePtr op_node = graph->AddNode(op_desc); + std::vector outputs; + Status status = kernel_->Compute(node_, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} \ No newline at end of file diff --git a/tests/ut/ge/graph/passes/folding_kernel/slice_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/slice_kernel_unittest.cc new file mode 100644 index 00000000..a2d3c3b4 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/slice_kernel_unittest.cc @@ -0,0 +1,388 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/slice_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/types.h" +#include "gen_node.h" +#include "graph/op_desc.h" +#include "graph/passes/constant_folding_pass.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/op_desc_utils.h" +#undef protected +#undef private + +using namespace ge; +using namespace testing; + +class UtestFoldingSliceKernel : public testing::Test { + protected: + void SetUp() { init(); } + + void TearDown() { destory(); } + + private: + void init() { pass_ = new ::ge::ConstantFoldingPass(); } + + void destory() { + delete pass_; + pass_ = NULL; + } + + protected: + ::ge::ConstantFoldingPass *pass_; + + NodePtr initNode_int32(ComputeGraphPtr graph) { + OpDescPtr op_def = std::make_shared("op_def", "Slice"); + auto node_temp = GenNodeFromOpDesc(op_def); + + vector is_input_const(4, true); + op_def->SetIsInputConst(is_input_const); + AttrUtils::SetInt(op_def, ATTR_NAME_T, DT_INT32); + // Add weights + vector op_weights = OpDescUtils::MutableWeights(node_temp); + ge::GeShape op_shape({4}); + GeTensorDesc op_desc(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value[4]; + value[0] = 1; + value[1] = 2; + value[2] = 3; + value[3] = 4; + GeTensorPtr weight0 = std::make_shared(op_desc, (uint8_t *)value, 4 * sizeof(int32_t)); + int32_t value1 = 0; + GeTensorPtr weight1 = std::make_shared(op_desc, (uint8_t *)&value1, sizeof(int32_t)); + int32_t value2 = 2; + GeTensorPtr weight2 = std::make_shared(op_desc, (uint8_t *)&value2, sizeof(int32_t)); + + op_weights.push_back(weight0); + op_weights.push_back(weight1); + op_weights.push_back(weight2); + + OpDescUtils::SetWeights(node_temp, op_weights); + NodePtr node = graph->AddNode(node_temp); + return node; + } + + NodePtr initNode_float(ComputeGraphPtr graph) { + OpDescPtr op_def = std::make_shared("op_def", "Slice"); + auto node_tmp = GenNodeFromOpDesc(op_def); + + vector is_input_const(4, true); + op_def->SetIsInputConst(is_input_const); + AttrUtils::SetInt(op_def, ATTR_NAME_T, DT_FLOAT); + + // Add weights + vector op_weights = OpDescUtils::MutableWeights(node_tmp); + ge::GeShape op_shape({4}); + GeTensorDesc op_desc(op_shape); + float value[4]; + value[0] = 1.0; + value[1] = 2.0; + value[2] = 3.0; + value[3] = 4.0; + GeTensorPtr weight0 = std::make_shared(op_desc, (uint8_t *)value, 4 * sizeof(float)); + + GeTensorDesc op_desc_1(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value1 = 0; + GeTensorPtr weight1 = std::make_shared(op_desc_1, (uint8_t *)&value1, sizeof(int32_t)); + int32_t value2 = 2; + GeTensorPtr weight2 = std::make_shared(op_desc_1, (uint8_t *)&value2, sizeof(int32_t)); + + op_weights.push_back(weight0); + op_weights.push_back(weight1); + op_weights.push_back(weight2); + OpDescUtils::SetWeights(node_tmp, op_weights); + NodePtr node = graph->AddNode(node_tmp); + return node; + } + + NodePtr initNode_errtype(ComputeGraphPtr graph) { + OpDescPtr op_def = std::make_shared("op_def", "Slice"); + auto node_tmp = GenNodeFromOpDesc(op_def); + + vector is_input_const(4, true); + op_def->SetIsInputConst(is_input_const); + AttrUtils::SetInt(op_def, ATTR_NAME_T, DT_UNDEFINED); + + // Add weights + vector op_weights = OpDescUtils::MutableWeights(node_tmp); + ge::GeShape op_shape({4}); + GeTensorDesc op_desc(op_shape, FORMAT_NCHW, DT_UNDEFINED); + int32_t value[4]; + value[0] = 1; + value[1] = 2; + value[2] = 3; + value[3] = 4; + GeTensorPtr weight0 = std::make_shared(op_desc, (uint8_t *)value, 4 * sizeof(int32_t)); + + GeTensorDesc op_desc_1(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value1 = 0; + GeTensorPtr weight1 = std::make_shared(op_desc_1, (uint8_t *)&value1, sizeof(int32_t)); + int32_t value2 = 2; + GeTensorPtr weight2 = std::make_shared(op_desc_1, (uint8_t *)&value2, sizeof(int32_t)); + + op_weights.push_back(weight0); + op_weights.push_back(weight1); + op_weights.push_back(weight2); + OpDescUtils::SetWeights(node_tmp, op_weights); + NodePtr node = graph->AddNode(node_tmp); + return node; + } + + NodePtr initNode_muldims(ComputeGraphPtr graph) { + OpDescPtr op_def = std::make_shared("op_def", "Slice"); + auto node_tmp = GenNodeFromOpDesc(op_def); + + vector is_input_const(4, true); + op_def->SetIsInputConst(is_input_const); + AttrUtils::SetInt(op_def, ATTR_NAME_T, DT_INT32); + + // Add weights + vector op_weights = OpDescUtils::MutableWeights(node_tmp); + vector dims(4, 2); + ge::GeShape op_shape(dims); + GeTensorDesc op_desc(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value[2][2][2][2] = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + + GeTensorPtr weight0 = std::make_shared(op_desc, (uint8_t *)value, 16 * sizeof(int32_t)); + + GeTensorDesc op_desc_1(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value1[4] = {1, 1, 1, 1}; + GeTensorPtr weight1 = std::make_shared(op_desc_1, (uint8_t *)value1, 4 * sizeof(int32_t)); + int32_t value2[4] = {1, 1, 1, 1}; + GeTensorPtr weight2 = std::make_shared(op_desc_1, (uint8_t *)value2, 4 * sizeof(int32_t)); + + op_weights.push_back(weight0); + op_weights.push_back(weight1); + op_weights.push_back(weight2); + OpDescUtils::SetWeights(node_tmp, op_weights); + NodePtr node = graph->AddNode(node_tmp); + return node; + } + + NodePtr initNode_3muldims(ComputeGraphPtr graph) { + OpDescPtr op_def = std::make_shared("op_def", "Slice"); + auto node_tmp = GenNodeFromOpDesc(op_def); + + vector is_input_const(4, true); + op_def->SetIsInputConst(is_input_const); + AttrUtils::SetInt(op_def, ATTR_NAME_T, DT_INT32); + + // Add weights + vector op_weights = OpDescUtils::MutableWeights(node_tmp); + vector dims(3, 2); + ge::GeShape op_shape(dims); + GeTensorDesc op_desc(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value[2][2][2] = {1, 2, 3, 4, 5, 6, 7, 8}; + + GeTensorPtr weight0 = std::make_shared(op_desc, (uint8_t *)value, 8 * sizeof(int32_t)); + + GeTensorDesc op_desc_1(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value1[3] = {1, 1, 1}; + GeTensorPtr weight1 = std::make_shared(op_desc_1, (uint8_t *)value1, 3 * sizeof(int32_t)); + int32_t value2[3] = {1, 1, 1}; + GeTensorPtr weight2 = std::make_shared(op_desc_1, (uint8_t *)value2, 3 * sizeof(int32_t)); + + op_weights.push_back(weight0); + op_weights.push_back(weight1); + op_weights.push_back(weight2); + OpDescUtils::SetWeights(node_tmp, op_weights); + NodePtr node = graph->AddNode(node_tmp); + return node; + } + + NodePtr initNode_2muldims(ComputeGraphPtr graph) { + OpDescPtr op_def = std::make_shared("op_def", "Slice"); + auto node_tmp = GenNodeFromOpDesc(op_def); + + vector is_input_const(4, true); + op_def->SetIsInputConst(is_input_const); + AttrUtils::SetInt(op_def, ATTR_NAME_T, DT_INT32); + + // Add weights + vector op_weights = OpDescUtils::MutableWeights(node_tmp); + vector dims(2, 2); + ge::GeShape op_shape(dims); + GeTensorDesc op_desc(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value[2][2] = {1, 2, 3, 4}; + + GeTensorPtr weight0 = std::make_shared(op_desc, (uint8_t *)value, 4 * sizeof(int32_t)); + + GeTensorDesc op_desc_1(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value1[2] = {1, 1}; + GeTensorPtr weight1 = std::make_shared(op_desc_1, (uint8_t *)value1, 2 * sizeof(int32_t)); + int32_t value2[2] = {1, -1}; + GeTensorPtr weight2 = std::make_shared(op_desc_1, (uint8_t *)value2, 2 * sizeof(int32_t)); + + op_weights.push_back(weight0); + op_weights.push_back(weight1); + op_weights.push_back(weight2); + OpDescUtils::SetWeights(node_tmp, op_weights); + NodePtr node = graph->AddNode(node_tmp); + return node; + } + + NodePtr initNode_1muldims(ComputeGraphPtr graph) { + OpDescPtr op_def = std::make_shared("op_def", "Slice"); + auto node_tmp = GenNodeFromOpDesc(op_def); + + vector is_input_const(4, true); + op_def->SetIsInputConst(is_input_const); + AttrUtils::SetInt(op_def, ATTR_NAME_T, DT_INT32); + + // Add weights + vector op_weights = OpDescUtils::MutableWeights(node_tmp); + ge::GeShape op_shape({2}); + GeTensorDesc op_desc(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value[2] = { + 1, + 2, + }; + + GeTensorPtr weight0 = std::make_shared(op_desc, (uint8_t *)value, 2 * sizeof(int32_t)); + + GeTensorDesc op_desc_1(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value1[1] = {1}; + GeTensorPtr weight1 = std::make_shared(op_desc_1, (uint8_t *)value1, 1 * sizeof(int32_t)); + int32_t value2[1] = {1}; + GeTensorPtr weight2 = std::make_shared(op_desc_1, (uint8_t *)value2, 1 * sizeof(int32_t)); + + op_weights.push_back(weight0); + op_weights.push_back(weight1); + op_weights.push_back(weight2); + OpDescUtils::SetWeights(node_tmp, op_weights); + NodePtr node = graph->AddNode(node_tmp); + return node; + } + + NodePtr initNode_size_not_equal_fail(ComputeGraphPtr graph) { + OpDescPtr op_def = std::make_shared("op_def", "Slice"); + auto node_tmp = GenNodeFromOpDesc(op_def); + OpDescPtr child_opdef = std::make_shared("child_opdef", "test"); + child_opdef->SetIsInputConst({false}); + + vector is_input_const(3, true); + op_def->SetIsInputConst(is_input_const); + AttrUtils::SetInt(op_def, ATTR_NAME_T, DT_INT32); + + // Add weights + vector op_weights = OpDescUtils::MutableWeights(node_tmp); + ge::GeShape op_shape({3}); + GeTensorDesc op_desc(op_shape, FORMAT_NCHW, DT_INT32); + int32_t value[3] = {1, 2, 3}; + + GeTensorPtr weight0 = std::make_shared(op_desc, (uint8_t *)value, 3 * sizeof(int32_t)); + + GeTensorDesc op_desc_1(op_shape, FORMAT_NCHW, DT_INT32); + int value1 = 0; + GeTensorPtr weight1 = std::make_shared(op_desc_1, (uint8_t *)&value1, 1 * sizeof(int32_t)); + int32_t value2[2] = {0, 1}; + GeTensorPtr weight2 = std::make_shared(op_desc_1, (uint8_t *)value2, 2 * sizeof(int32_t)); + + op_weights.push_back(weight0); + op_weights.push_back(weight1); + op_weights.push_back(weight2); + OpDescUtils::SetWeights(node_tmp, op_weights); + NodePtr node = graph->AddNode(node_tmp); + NodePtr child_node = graph->AddNode(child_opdef); + return node; + } +}; + +/// test func:SliceKernel::Compute +/// case:optimize op of int +/// result: optimize op of slice success +TEST_F(UtestFoldingSliceKernel, SliceOptimizerIntSuccess) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = initNode_int32(graph); + Status ret = pass_->Run(node); + EXPECT_EQ(SUCCESS, ret); +} + +/// test func:SliceKernel::Compute +/// case:optimize op of float +/// result: optimize op of slice success +TEST_F(UtestFoldingSliceKernel, SliceOptimizerFloatSuccess) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = initNode_float(graph); + Status ret = pass_->Run(node); + EXPECT_EQ(SUCCESS, ret); +} + +/// test func:SliceKernel::Compute +/// case:optimize op of initNode_errtype +/// result: optimize op of slice success +TEST_F(UtestFoldingSliceKernel, SliceOptimizerErrtypeSuccess) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = initNode_errtype(graph); + Status ret = pass_->Run(node); + EXPECT_EQ(SUCCESS, ret); +} + +/// test func:SliceKernel::Compute +/// case:optimize op of initNode_muldims +/// result: optimize op of slice success +TEST_F(UtestFoldingSliceKernel, SliceOptimizerIntMulDims) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = initNode_muldims(graph); + Status ret = pass_->Run(node); + EXPECT_EQ(SUCCESS, ret); +} + +/// test func:SliceKernel::Compute +/// case:optimize op of initNode_3muldims +/// result: optimize op of slice success +TEST_F(UtestFoldingSliceKernel, SliceOptimizerInt3MulDims) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = initNode_3muldims(graph); + Status ret = pass_->Run(node); + EXPECT_EQ(SUCCESS, ret); +} + +/// test func:SliceKernel::Compute +/// case:optimize op of initNode_2muldims +/// result: optimize op of slice success +TEST_F(UtestFoldingSliceKernel, SliceOptimizerInt2MulDims) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = initNode_2muldims(graph); + Status ret = pass_->Run(node); + EXPECT_EQ(SUCCESS, ret); +} +/// test func:SliceKernel::Compute +/// case:optimize op of initNode_1muldims +/// result: optimize op of slice success +TEST_F(UtestFoldingSliceKernel, SliceOptimizerInt1MulDims) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = initNode_1muldims(graph); + Status ret = pass_->Run(node); + EXPECT_EQ(SUCCESS, ret); +} + +/// test func:SliceKernel::Compute +/// case:optimize op of initNode_size_not_equal_fail +/// result: optimize op of slice success +TEST_F(UtestFoldingSliceKernel, SliceOptimizerSizeNotEqual) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = initNode_size_not_equal_fail(graph); + Status ret = pass_->Run(node); + EXPECT_EQ(SUCCESS, ret); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/squeeze_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/squeeze_kernel_unittest.cc new file mode 100644 index 00000000..160c69a0 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/squeeze_kernel_unittest.cc @@ -0,0 +1,338 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/squeeze_kernel.h" + +#include "../graph_builder_utils.h" +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/types.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelSqueenzeKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + template + void TestSqueeze(vector &data_vec, vector &dim_value_vec, vector &result) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, data_type); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(inner_data_type)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add squeeze node + OpDescPtr expanddim_op_desc = std::make_shared("Squeeze", SQUEEZE); + if (!dim_value_vec.empty()) { + AttrUtils::SetListInt(expanddim_op_desc, SQUEEZE_ATTR_AXIS, dim_value_vec); + } + expanddim_op_desc->AddInputDesc(data_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + + shared_ptr kernel = KernelFactory::Instance().Create(SQUEEZE); + Status status = kernel->Compute(op_node); + EXPECT_EQ(ge::SUCCESS, status); + } + + template + void TestInvalidSqueeze(vector &data_vec, vector &dim_value_vec, vector &result) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), format, data_type); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(inner_data_type)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add squeeze node + OpDescPtr expanddim_op_desc = std::make_shared("Squeeze", SQUEEZE); + if (!dim_value_vec.empty()) { + AttrUtils::SetListInt(expanddim_op_desc, SQUEEZE_ATTR_AXIS, dim_value_vec); + } + expanddim_op_desc->AddInputDesc(data_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + + shared_ptr kernel = KernelFactory::Instance().Create(SQUEEZE); + Status status = kernel->Compute(op_node); + EXPECT_NE(ge::SUCCESS, status); + } +}; +namespace { + +/// netoutput1 +/// | +/// Squeeze +/// | +/// const1 +ComputeGraphPtr BuildGraph() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto squeeze = builder.AddNode("squeeze1", SQUEEZE, 1, 1); + auto netoutput1 = builder.AddNode("netoutput1", NETOUTPUT, 1, 0); + + builder.AddDataEdge(const1, 0, squeeze, 0); + builder.AddDataEdge(squeeze, 0, netoutput1, 0); + + return builder.GetGraph(); +} +} // namespace + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Int8Case0) { + vector data_vec = {1, 1, 1, 2, 3}; + vector dim_value_vec = {0}; + vector result = {1, 1, 2, 3}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Int8Case1) { + vector data_vec = {1, 1, 1, 2, 3}; + vector dim_value_vec = {0, 1}; + vector result = {1, 2, 3}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Int8Case2) { + vector data_vec = {1, 1, 1, 2, 3}; + vector dim_value_vec = {0, 1, 2}; + vector result = {2, 3}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Int8NegativeCase1) { + vector data_vec = {1, 1, 1, 2, 3}; + vector dim_value_vec = {-5}; + vector result = {1, 1, 2, 3}; + + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Int8NegativeCase2) { + vector data_vec = {1, 1, 1, 2, 3}; + vector dim_value_vec = {-5, -4}; + vector result = {1, 2, 3}; + + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Int8NegativeCase3) { + vector data_vec = {1, 1, 1, 2, 3}; + vector dim_value_vec = {-5, -4, -3}; + vector result = {2, 3}; + + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Int16) { + vector data_vec = {1, 1, 2}; + vector dim_value_vec = {-3}; + vector result = {1, 2}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Int32) { + vector data_vec = {3, 3, 3, 1, 6}; + vector dim_value_vec = {3}; + vector result = {3, 3, 3, 6}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Int64) { + vector data_vec = {6, 1, 12, 3, 4, 56, 7}; + vector dim_value_vec = {1}; + vector result = {6, 12, 3, 4, 56, 7}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Uint8) { + vector data_vec = {2, 1}; + vector dim_value_vec = {1}; + vector result = {2}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Uint16) { + vector data_vec = {1, 3}; + vector dim_value_vec = {0}; + vector result = {3}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Uint32) { + vector data_vec = {3, 3, 3, 5, 1}; + vector dim_value_vec = {4}; + vector result = {3, 3, 3, 5}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Uint64) { + vector data_vec = {6, 1, 12, 3, 4, 56, 7}; + vector dim_value_vec = {1}; + vector result = {6, 12, 3, 4, 56, 7}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Fp16) { + vector data_vec = {6, 1, 12, 3, 4, 56, 7}; + vector dim_value_vec = {1}; + vector result = {6, 12, 3, 4, 56, 7}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Float) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {0, 1, 2, 4, 5, 6}; + vector result = {11}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, Double) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {0, 1, 2, 4, 5, 6}; + vector result = {11}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, NodeIsNull) { + NodePtr op_node = nullptr; + shared_ptr kernel = KernelFactory::Instance().Create(SQUEEZE); + Status status = kernel->Compute(op_node); + EXPECT_NE(domi::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, BoolInt64) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {0, 1, 2, 4, 5, 6}; + vector result = {11}; + + ge::ComputeGraphPtr graph = std::make_shared("default"); + ge::OpDescPtr data_op_desc = std::make_shared("data", CONSTANTOP); + int64_t dims_size = 1; + for_each(data_vec.begin(), data_vec.end(), [&](int64_t &data) { dims_size *= data; }); + vector data_value_vec(dims_size, 1); + GeTensorDesc data_tensor_desc(GeShape(data_vec), FORMAT_NCHW, DT_BOOL); + GeTensorPtr data_tensor = std::make_shared(data_tensor_desc, (uint8_t *)data_value_vec.data(), + data_value_vec.size() * sizeof(uint8_t)); + OpDescUtils::SetWeights(data_op_desc, data_tensor); + data_op_desc->AddOutputDesc(data_tensor_desc); + NodePtr data_node = graph->AddNode(data_op_desc); + data_node->Init(); + + // add expanddims node + OpDescPtr expanddim_op_desc = std::make_shared("Squeeze", SQUEEZE); + if (!dim_value_vec.empty()) { + AttrUtils::SetListInt(expanddim_op_desc, SQUEEZE_ATTR_AXIS, dim_value_vec); + } + expanddim_op_desc->AddInputDesc(data_tensor_desc); + NodePtr op_node = graph->AddNode(expanddim_op_desc); + op_node->Init(); + + // add edge + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), op_node->GetInDataAnchor(0)); + + shared_ptr kernel = KernelFactory::Instance().Create(SQUEEZE); + Status status = kernel->Compute(op_node); + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, DoubleNotAttr) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec; + vector result = {11}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, DoubleContainSameDims) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {0, 1, 0}; + vector result = {1, 11, 1, 1, 1}; + TestSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, DoubleContainSameDimsInvalidFormat) { + vector data_vec = {1, 1, 1, 11, 1, 1, 1}; + vector dim_value_vec = {0, 1, 0}; + vector result = {1, 11, 1, 1, 1}; + TestInvalidSqueeze(data_vec, dim_value_vec, result); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, ConstFoldingSuccess) { + auto graph = BuildGraph(); + std::vector v_output; + std::vector inputs; + ConstGeTensorPtr data_tensor = std::make_shared(); + + inputs.push_back(data_tensor); + shared_ptr kernel = KernelFactory::Instance().Create(SQUEEZE); + Status status = kernel->Compute(graph->FindNode("squeeze1")->GetOpDesc(), inputs, v_output); + EXPECT_EQ(ge::SUCCESS, status); + EXPECT_EQ(1, v_output.size()); +} + +TEST_F(UtestGraphPassesFoldingKernelSqueenzeKernel, ConstFoldingUnsuccess) { + auto graph = BuildGraph(); + std::vector v_output; + std::vector inputs; + + shared_ptr kernel = KernelFactory::Instance().Create(SQUEEZE); + Status status = kernel->Compute(graph->FindNode("squeeze1")->GetOpDesc(), inputs, v_output); + EXPECT_EQ(NOT_CHANGED, status); + status = kernel->Compute(nullptr, inputs, v_output); + EXPECT_EQ(PARAM_INVALID, status); + + std::vector inputs_invalid; + inputs_invalid.push_back(nullptr); + status = kernel->Compute(graph->FindNode("squeeze1")->GetOpDesc(), inputs_invalid, v_output); + EXPECT_EQ(PARAM_INVALID, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/ssd_prior_box_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/ssd_prior_box_kernel_unittest.cc new file mode 100644 index 00000000..8fe77b93 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/ssd_prior_box_kernel_unittest.cc @@ -0,0 +1,421 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/ssd_prior_box_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelSsdPriorboxKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +/// convolution data +/// | / +/// ssdpriorbox +/// \ +/// reshape +class NodeBuilder { + public: + NodeBuilder(const std::string &name, const std::string &type) { op_desc_ = std::make_shared(name, type); } + NodeBuilder &AddInputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddInputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + NodeBuilder &AddOutputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddOutputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + ge::NodePtr Build(const ge::ComputeGraphPtr &graph) { return graph->AddNode(op_desc_); } + + private: + ge::GeTensorDescPtr CreateTensorDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + GeShape ge_shape{std::vector(shape)}; + ge::GeTensorDescPtr tensor_desc = std::make_shared(); + tensor_desc->SetShape(ge_shape); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + return tensor_desc; + } + ge::OpDescPtr op_desc_; +}; +void make_graph_ssd(ComputeGraphPtr &graph, vector temp_aspect_ratios, vector max_size, + vector min_size, vector variances, bool flip) { + NodePtr data_node = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + NodePtr conv_node = NodeBuilder("Conv2D", CONV2D).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + NodePtr ssd_priorbox_node = NodeBuilder("SSDPriorBox", SSDPRIORBOX) + .AddInputDesc({10, 10, 10, 10}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + NodePtr reshape_node = + NodeBuilder("reshape", RESHAPE).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), conv_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(conv_node->GetOutDataAnchor(0), ssd_priorbox_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(ssd_priorbox_node->GetOutDataAnchor(0), reshape_node->GetInDataAnchor(0)); + + auto ssdPriorbox_op = ssd_priorbox_node->GetOpDesc(); + AttrUtils::SetFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_OFFSET, 0.5); + AttrUtils::SetListFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_ASPECT_RATIO, temp_aspect_ratios); + AttrUtils::SetListFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_MAX_SIZE, max_size); + AttrUtils::SetListFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_MIN_SIZE, min_size); + AttrUtils::SetBool(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_CLIP, true); + AttrUtils::SetBool(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_FLIP, flip); + AttrUtils::SetInt(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_IMG_H, 100); + AttrUtils::SetInt(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_IMG_W, 100); + AttrUtils::SetFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_STEP_H, 0); + AttrUtils::SetFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_STEP_W, 0); + AttrUtils::SetListFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_VARIANCE, variances); + AttrUtils::SetInt(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_VARIANCE_NUM, 1); +} + +void make_graph_ssd_for_failed(ComputeGraphPtr &graph, vector temp_aspect_ratios, vector max_size, + vector min_size, vector variances, bool flip, bool clip) { + NodePtr data_node = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + NodePtr conv_node = NodeBuilder("Conv2D", CONV2D).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + NodePtr ssd_priorbox_node = NodeBuilder("SSDPriorBox", SSDPRIORBOX) + .AddInputDesc({10, 10, 10, 10}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + NodePtr reshape_node = NodeBuilder("reshape", RESHAPE) + .AddInputDesc({10, 10, 10, 10}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), conv_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(conv_node->GetOutDataAnchor(0), ssd_priorbox_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(ssd_priorbox_node->GetOutDataAnchor(0), reshape_node->GetInDataAnchor(0)); + + auto ssdPriorbox_op = ssd_priorbox_node->GetOpDesc(); + + AttrUtils::SetFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_OFFSET, 0.5); + if (temp_aspect_ratios.size() != 0) { + AttrUtils::SetListFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_ASPECT_RATIO, temp_aspect_ratios); + } + if (max_size.size() != 0) { + AttrUtils::SetListFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_MAX_SIZE, max_size); + } + if (min_size.size() != 0) { + AttrUtils::SetListFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_MIN_SIZE, min_size); + } + if (clip) { + AttrUtils::SetBool(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_CLIP, true); + } + AttrUtils::SetBool(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_FLIP, flip); + AttrUtils::SetInt(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_IMG_H, 100); + AttrUtils::SetInt(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_IMG_W, 100); + AttrUtils::SetFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_STEP_H, 0); + AttrUtils::SetFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_STEP_W, 0); + if (variances.size() != 0) { + AttrUtils::SetListFloat(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_VARIANCE, variances); + } + AttrUtils::SetInt(ssdPriorbox_op, SSD_PRIOR_BOX_ATTR_VARIANCE_NUM, 1); +} +TEST_F(UtestGraphPassesFoldingKernelSsdPriorboxKernel, ComputeFailed) { + ComputeGraphPtr compute_graph1 = std::make_shared("test"); + make_graph_ssd_for_failed(compute_graph1, {}, {9}, {4}, {0.1}, true, true); + NodePtr ssd_priorbox_node1 = compute_graph1->FindNode("SSDPriorBox"); + DimensionComputePass pass; + ge::Status ret = pass.Run(ssd_priorbox_node1); + EXPECT_EQ(PARAM_INVALID, ret); + ComputeGraphPtr compute_graph2 = std::make_shared("test"); + make_graph_ssd_for_failed(compute_graph2, {1}, {}, {4}, {0.1}, true, true); + NodePtr ssd_priorbox_node2 = compute_graph2->FindNode("SSDPriorBox"); + ret = pass.Run(ssd_priorbox_node2); + EXPECT_EQ(PARAM_INVALID, ret); + + ComputeGraphPtr compute_graph3 = std::make_shared("test"); + make_graph_ssd_for_failed(compute_graph3, {1}, {9}, {}, {0.1}, true, true); + NodePtr ssd_priorbox_node3 = compute_graph3->FindNode("SSDPriorBox"); + ret = pass.Run(ssd_priorbox_node3); + EXPECT_EQ(PARAM_INVALID, ret); + + ComputeGraphPtr compute_graph4 = std::make_shared("test"); + make_graph_ssd_for_failed(compute_graph4, {1}, {9}, {4}, {}, true, true); + NodePtr ssd_priorbox_node4 = compute_graph4->FindNode("SSDPriorBox"); + ret = pass.Run(ssd_priorbox_node4); + EXPECT_EQ(PARAM_INVALID, ret); + + ComputeGraphPtr compute_graph5 = std::make_shared("test"); + make_graph_ssd_for_failed(compute_graph5, {1}, {9}, {4}, {}, true, false); + NodePtr ssd_priorbox_node5 = compute_graph5->FindNode("SSDPriorBox"); + ret = pass.Run(ssd_priorbox_node5); + EXPECT_EQ(PARAM_INVALID, ret); +} +TEST_F(UtestGraphPassesFoldingKernelSsdPriorboxKernel, ComputeSuccess) { + ComputeGraphPtr compute_graph = std::make_shared("test"); + make_graph_ssd(compute_graph, {1}, {9}, {4}, {0.1}, true); + + NodePtr ssd_priorbox_node = compute_graph->FindNode("SSDPriorBox"); + DimensionComputePass pass; + ge::Status ret = pass.Run(ssd_priorbox_node); + EXPECT_EQ(SUCCESS, ret); + NodePtr reshape_node = compute_graph->FindNode("reshape"); + vector out_weights = OpDescUtils::GetWeights(reshape_node); + + const float eps = 1e-6; + if (out_weights.size() >= 1) { + int32_t dim_size = out_weights[0]->GetTensorDesc().GetShape().GetDim(2); + EXPECT_EQ(10 * 10 * 2 * 4, dim_size); + const float *top_data = (const float *)out_weights[0]->GetData().data(); + /// pick a few generated priors and compare against the expected number. + /// first prior + EXPECT_NEAR(top_data[0], 0.03, eps); + EXPECT_NEAR(top_data[1], 0.03, eps); + EXPECT_NEAR(top_data[2], 0.07, eps); + EXPECT_NEAR(top_data[3], 0.07, eps); + // second prior + EXPECT_NEAR(top_data[4], 0.02, eps); + EXPECT_NEAR(top_data[5], 0.02, eps); + EXPECT_NEAR(top_data[6], 0.08, eps); + EXPECT_NEAR(top_data[7], 0.08, eps); + // prior in the 5-th row and 5-th col + EXPECT_NEAR(top_data[4 * 10 * 2 * 4 + 4 * 2 * 4], 0.43, eps); + EXPECT_NEAR(top_data[4 * 10 * 2 * 4 + 4 * 2 * 4 + 1], 0.43, eps); + EXPECT_NEAR(top_data[4 * 10 * 2 * 4 + 4 * 2 * 4 + 2], 0.47, eps); + EXPECT_NEAR(top_data[4 * 10 * 2 * 4 + 4 * 2 * 4 + 3], 0.47, eps); + + // check variance + top_data += dim_size; + for (int d = 0; d < dim_size; ++d) { + EXPECT_NEAR(top_data[d], 0.1, eps); + } + } +} +TEST_F(UtestGraphPassesFoldingKernelSsdPriorboxKernel, AspectRatioNoflipSuccess) { + ComputeGraphPtr compute_graph = std::make_shared("test"); + make_graph_ssd(compute_graph, {1, 2}, {9}, {4}, {0.1}, false); + + NodePtr ssd_priorbox_node = compute_graph->FindNode("SSDPriorBox"); + DimensionComputePass pass; + ge::Status ret = pass.Run(ssd_priorbox_node); + EXPECT_EQ(SUCCESS, ret); + NodePtr reshape_node = compute_graph->FindNode("reshape"); + vector out_weights = OpDescUtils::GetWeights(reshape_node); + + const float eps = 1e-6; + if (out_weights.size() >= 1) { + int32_t dim_size = out_weights[0]->GetTensorDesc().GetShape().GetDim(2); + EXPECT_EQ(10 * 10 * 3 * 4, dim_size); + const float *top_data = (const float *)out_weights[0]->GetData().data(); + /// pick a few generated priors and compare against the expected number. + /// first prior + EXPECT_NEAR(top_data[0], 0.03, eps); + EXPECT_NEAR(top_data[1], 0.03, eps); + EXPECT_NEAR(top_data[2], 0.07, eps); + EXPECT_NEAR(top_data[3], 0.07, eps); + // second prior + EXPECT_NEAR(top_data[4], 0.02, eps); + EXPECT_NEAR(top_data[5], 0.02, eps); + EXPECT_NEAR(top_data[6], 0.08, eps); + EXPECT_NEAR(top_data[7], 0.08, eps); + // third prior + EXPECT_NEAR(top_data[8], 0.05 - 0.02 * sqrt(2.), eps); + EXPECT_NEAR(top_data[9], 0.05 - 0.01 * sqrt(2.), eps); + EXPECT_NEAR(top_data[10], 0.05 + 0.02 * sqrt(2.), eps); + EXPECT_NEAR(top_data[11], 0.05 + 0.01 * sqrt(2.), eps); + // prior in the 5-th row and 5-th col + EXPECT_NEAR(top_data[4 * 10 * 3 * 4 + 4 * 3 * 4], 0.43, eps); + EXPECT_NEAR(top_data[4 * 10 * 3 * 4 + 4 * 3 * 4 + 1], 0.43, eps); + EXPECT_NEAR(top_data[4 * 10 * 3 * 4 + 4 * 3 * 4 + 2], 0.47, eps); + EXPECT_NEAR(top_data[4 * 10 * 3 * 4 + 4 * 3 * 4 + 3], 0.47, eps); + // prior with ratio 1:2 in the 5-th row and 5-th col + EXPECT_NEAR(top_data[4 * 10 * 3 * 4 + 4 * 3 * 4 + 8], 0.45 - 0.02 * sqrt(2.), eps); + EXPECT_NEAR(top_data[4 * 10 * 3 * 4 + 4 * 3 * 4 + 9], 0.45 - 0.01 * sqrt(2.), eps); + EXPECT_NEAR(top_data[4 * 10 * 3 * 4 + 4 * 3 * 4 + 10], 0.45 + 0.02 * sqrt(2.), eps); + EXPECT_NEAR(top_data[4 * 10 * 3 * 4 + 4 * 3 * 4 + 11], 0.45 + 0.01 * sqrt(2.), eps); + // check variance + top_data += dim_size; + for (int d = 0; d < dim_size; ++d) { + EXPECT_NEAR(top_data[d], 0.1, eps); + } + } +} +TEST_F(UtestGraphPassesFoldingKernelSsdPriorboxKernel, AspectratioMultiSizeSuccess) { + ComputeGraphPtr compute_graph = std::make_shared("test"); + make_graph_ssd(compute_graph, {1, 2, 0.5}, {9, 18}, {4, 8}, {0.1}, true); + + NodePtr ssd_priorbox_node = compute_graph->FindNode("SSDPriorBox"); + DimensionComputePass pass; + ge::Status ret = pass.Run(ssd_priorbox_node); + EXPECT_EQ(SUCCESS, ret); + NodePtr reshape_node = compute_graph->FindNode("reshape"); + vector out_weights = OpDescUtils::GetWeights(reshape_node); + + const float eps = 1e-6; + if (out_weights.size() >= 1) { + int32_t dim_size = out_weights[0]->GetTensorDesc().GetShape().GetDim(2); + EXPECT_EQ(10 * 10 * 8 * 4, dim_size); + const float *top_data = (const float *)out_weights[0]->GetData().data(); + + /// pick a few generated priors and compare against the expected number. + /// first prior + EXPECT_NEAR(top_data[0], 0.03, eps); + EXPECT_NEAR(top_data[1], 0.03, eps); + EXPECT_NEAR(top_data[2], 0.07, eps); + EXPECT_NEAR(top_data[3], 0.07, eps); + // second prior + EXPECT_NEAR(top_data[4], 0.02, eps); + EXPECT_NEAR(top_data[5], 0.02, eps); + EXPECT_NEAR(top_data[6], 0.08, eps); + EXPECT_NEAR(top_data[7], 0.08, eps); + // third prior + EXPECT_NEAR(top_data[8], 0.05 - 0.02 * sqrt(2.), eps); + EXPECT_NEAR(top_data[9], 0.05 - 0.01 * sqrt(2.), eps); + EXPECT_NEAR(top_data[10], 0.05 + 0.02 * sqrt(2.), eps); + EXPECT_NEAR(top_data[11], 0.05 + 0.01 * sqrt(2.), eps); + // forth prior + EXPECT_NEAR(top_data[12], 0.05 - 0.01 * sqrt(2.), eps); + EXPECT_NEAR(top_data[13], 0.05 - 0.02 * sqrt(2.), eps); + EXPECT_NEAR(top_data[14], 0.05 + 0.01 * sqrt(2.), eps); + EXPECT_NEAR(top_data[15], 0.05 + 0.02 * sqrt(2.), eps); + // fifth prior + EXPECT_NEAR(top_data[16], 0.01, eps); + EXPECT_NEAR(top_data[17], 0.01, eps); + EXPECT_NEAR(top_data[18], 0.09, eps); + EXPECT_NEAR(top_data[19], 0.09, eps); + // sixth prior + EXPECT_NEAR(top_data[20], 0.00, eps); + EXPECT_NEAR(top_data[21], 0.00, eps); + EXPECT_NEAR(top_data[22], 0.11, eps); + EXPECT_NEAR(top_data[23], 0.11, eps); + // seventh prior + EXPECT_NEAR(top_data[24], 0.00, eps); + EXPECT_NEAR(top_data[25], 0.05 - 0.04 / sqrt(2.), eps); + EXPECT_NEAR(top_data[26], 0.05 + 0.04 * sqrt(2.), eps); + EXPECT_NEAR(top_data[27], 0.05 + 0.04 / sqrt(2.), eps); + // forth prior + EXPECT_NEAR(top_data[28], 0.05 - 0.04 / sqrt(2.), eps); + EXPECT_NEAR(top_data[29], 0.00, eps); + EXPECT_NEAR(top_data[30], 0.05 + 0.04 / sqrt(2.), eps); + EXPECT_NEAR(top_data[31], 0.05 + 0.04 * sqrt(2.), eps); + // prior in the 5-th row and 5-th col + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4], 0.43, eps); + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 1], 0.43, eps); + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 2], 0.47, eps); + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 3], 0.47, eps); + // prior with ratio 1:2 in the 5-th row and 5-th col + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 8], 0.45 - 0.02 * sqrt(2.), eps); + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 9], 0.45 - 0.01 * sqrt(2.), eps); + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 10], 0.45 + 0.02 * sqrt(2.), eps); + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 11], 0.45 + 0.01 * sqrt(2.), eps); + // prior with ratio 2:1 in the 5-th row and 5-th col + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 12], 0.45 - 0.01 * sqrt(2.), eps); + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 13], 0.45 - 0.02 * sqrt(2.), eps); + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 14], 0.45 + 0.01 * sqrt(2.), eps); + EXPECT_NEAR(top_data[8 * 10 * 4 * 4 + 8 * 4 * 4 + 15], 0.45 + 0.02 * sqrt(2.), eps); + + // check variance + top_data += dim_size; + for (int d = 0; d < dim_size; ++d) { + EXPECT_NEAR(top_data[d], 0.1, eps); + } + } +} +TEST_F(UtestGraphPassesFoldingKernelSsdPriorboxKernel, MultiVarianceSuccess) { + ComputeGraphPtr compute_graph = std::make_shared("test"); + make_graph_ssd(compute_graph, {1}, {9}, {4}, {0.1, 0.2, 0.3, 0.4}, true); + + NodePtr ssd_priorbox_node = compute_graph->FindNode("SSDPriorBox"); + DimensionComputePass pass; + ge::Status ret = pass.Run(ssd_priorbox_node); + EXPECT_EQ(SUCCESS, ret); + NodePtr reshape_node = compute_graph->FindNode("reshape"); + vector out_weights = OpDescUtils::GetWeights(reshape_node); + + const float eps = 1e-6; + if (out_weights.size() >= 1) { + int32_t dim_size = out_weights[0]->GetTensorDesc().GetShape().GetDim(2); + EXPECT_EQ(10 * 10 * 2 * 4, dim_size); + const float *top_data = (const float *)out_weights[0]->GetData().data(); + EXPECT_NEAR(top_data[0], 0.03, eps); + EXPECT_NEAR(top_data[1], 0.03, eps); + EXPECT_NEAR(top_data[2], 0.07, eps); + EXPECT_NEAR(top_data[3], 0.07, eps); + // second prior + EXPECT_NEAR(top_data[4], 0.02, eps); + EXPECT_NEAR(top_data[5], 0.02, eps); + EXPECT_NEAR(top_data[6], 0.08, eps); + EXPECT_NEAR(top_data[7], 0.08, eps); + // prior in the 5-th row and 5-th col + EXPECT_NEAR(top_data[4 * 10 * 2 * 4 + 4 * 2 * 4], 0.43, eps); + EXPECT_NEAR(top_data[4 * 10 * 2 * 4 + 4 * 2 * 4 + 1], 0.43, eps); + EXPECT_NEAR(top_data[4 * 10 * 2 * 4 + 4 * 2 * 4 + 2], 0.47, eps); + EXPECT_NEAR(top_data[4 * 10 * 2 * 4 + 4 * 2 * 4 + 3], 0.47, eps); + + // check variance + top_data += dim_size; + for (int d = 0; d < dim_size; ++d) { + EXPECT_NEAR(top_data[d], 0.1 * (d % 4 + 1), eps); + } + } +} + +TEST_F(UtestGraphPassesFoldingKernelSsdPriorboxKernel, AllSuccess) { + int num_priors = 0; + int dim_size = 0; + SsdPriorboxKernel kernal; + ge::Status ret = kernal.GetNumPriorAndDimSize(2, 2, 2, 2, 2, num_priors, dim_size); + EXPECT_EQ(SUCCESS, ret); +} + +TEST_F(UtestGraphPassesFoldingKernelSsdPriorboxKernel, ParamInvalid) { + int num_priors = 0; + int dim_size = 0; + SsdPriorboxKernel kernal; + ge::Status ret = kernal.GetNumPriorAndDimSize(2 * 1024 * 1024 * 1024, 2, 1, 1, 1, num_priors, dim_size); + EXPECT_EQ(PARAM_INVALID, ret); + + ret = kernal.GetNumPriorAndDimSize(4 * 1024 * 1024 * 1024 - 1, 1, 1, 1, 1, num_priors, dim_size); + EXPECT_EQ(PARAM_INVALID, ret); + + ret = kernal.GetNumPriorAndDimSize(2 * 1024 * 1024 * 1024 - 1, 1, 1, 1, 1, num_priors, dim_size); + EXPECT_EQ(PARAM_INVALID, ret); + + ret = kernal.GetNumPriorAndDimSize(1, 1, 1, 1 * 1024 * 1024 * 1024, 2, num_priors, dim_size); + EXPECT_EQ(PARAM_INVALID, ret); + + ret = kernal.GetNumPriorAndDimSize(1, 1, 1, 1024 * 1024 * 1024, 1, num_priors, dim_size); + EXPECT_EQ(PARAM_INVALID, ret); + + ret = kernal.GetNumPriorAndDimSize(1, 1, 1, 1024 * 1024 * 1024 - 1, 1, num_priors, dim_size); + EXPECT_EQ(PARAM_INVALID, ret); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/strided_slice_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/strided_slice_kernel_unittest.cc new file mode 100644 index 00000000..a2bb92f2 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/strided_slice_kernel_unittest.cc @@ -0,0 +1,604 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/strided_slice_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelStridedSliceKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, CheckInputSize) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test2) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test3) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test4) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test5) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test6) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test7) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test8) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test9) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = nullptr; + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test10) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 1); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 1); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = nullptr; + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test11) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, DT_FLOAT16); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT16); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test12) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 1); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test13) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 1); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test14) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 1); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test15) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test16) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + // EXPECT_EQ(PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelStridedSliceKernel, Test17) { + OpDescPtr op_desc_ptr = std::make_shared("StridedSlice", "StridedSlice"); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_BEGIN_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_END_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_ELLIPSIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_NEW_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, STRIDE_SLICE_ATTR_SHRINK_AXIS_MASK, 0); + AttrUtils::SetInt(op_desc_ptr, ATTR_NAME_T, 0); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_INT32); + op_desc_ptr->AddInputDesc(0, dims_tensor_desc); + op_desc_ptr->AddInputDesc(1, dims_tensor_desc); + op_desc_ptr->AddInputDesc(2, dims_tensor_desc); + op_desc_ptr->AddInputDesc(3, dims_tensor_desc); + + vector dims_vec_0 = {10, 10, 10, 10}; + vector data_vec_0 = {3, 3, 3, 3}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_1 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_2 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + ConstGeTensorPtr tensor_3 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * sizeof(float)); + + vector input = {tensor_0, tensor_1, tensor_2, tensor_3}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(STRIDEDSLICE); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/sub_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/sub_kernel_unittest.cc new file mode 100644 index 00000000..dd1a3067 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/sub_kernel_unittest.cc @@ -0,0 +1,246 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/folding_kernel/sub_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestFoldingKernelSubKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +template +void AssembleInput(vector &input, const GeTensorDesc &sub_desc, const T &sub1, const T &sub2) { + input.clear(); + T sub_0_value[1] = {sub1}; + ConstGeTensorPtr sub_0 = std::make_shared(sub_desc, (uint8_t *)sub_0_value, 1 * sizeof(T)); + + T sub_1_value[1] = {sub2}; + ConstGeTensorPtr sub_1 = std::make_shared(sub_desc, (uint8_t *)sub_1_value, 1 * sizeof(T)); + + input.push_back(sub_0); + input.push_back(sub_1); +} + +/// test func:SubKernel::Compute +/// case:optimize op of SUB +/// result: success +TEST_F(UtestFoldingKernelSubKernel, ComSuccessFloat) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_FLOAT); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (float)5, (float)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(SUCCESS, status); +} +TEST_F(UtestFoldingKernelSubKernel, ComSuccessInt16) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_INT16); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (int16_t)5, (int16_t)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestFoldingKernelSubKernel, ComSuccessDouble) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_DOUBLE); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (double)5, (double)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestFoldingKernelSubKernel, ComSuccessInt8) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_INT8); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (int8_t)5, (int8_t)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestFoldingKernelSubKernel, ComSuccessUint8) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_UINT8); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (uint8_t)5, (uint8_t)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestFoldingKernelSubKernel, ComSuccessUint16) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_UINT16); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (uint16_t)5, (uint16_t)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestFoldingKernelSubKernel, ComSuccessInt32) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_INT32); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (int32_t)5, (int32_t)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(SUCCESS, status); +} +TEST_F(UtestFoldingKernelSubKernel, ComSuccessInt64) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_INT64); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (int64_t)5, (int64_t)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestFoldingKernelSubKernel, ComSuccessUint32) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_UINT32); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (uint32_t)5, (uint32_t)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestFoldingKernelSubKernel, ComSuccessUint64) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_UINT64); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (uint64_t)5, (uint64_t)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestFoldingKernelSubKernel, StringComFailNotChange) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({1}); + GeTensorDesc sub_desc(sub_shape, ge::FORMAT_NCHW, DT_STRING); + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + + // float + AssembleInput(input, sub_desc, (int64_t)5, (int64_t)3); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + EXPECT_EQ(NOT_CHANGED, status); +} + +/// test func:SubKernel::Compute +/// case:optimize op of SUB +/// result: failed +TEST_F(UtestFoldingKernelSubKernel, ComFailed) { + OpDescPtr test_op = std::make_shared("test", "Test"); + vector input; + vector v_output; + ge::GeShape sub_shape({0}); + GeTensorDesc sub_desc(sub_shape); + float sub_0_value[1] = {3.0}; + ConstGeTensorPtr sub_0 = std::make_shared(sub_desc, (uint8_t *)sub_0_value, 1 * sizeof(float)); + + ge::GeShape sub_shape2({1}); + GeTensorDesc sub_desc2(sub_shape2); + float sub_1_value[1] = {5.0}; + ConstGeTensorPtr sub_1 = std::make_shared(sub_desc2, (uint8_t *)sub_1_value, 1 * sizeof(float)); + + input.push_back(sub_0); + input.push_back(sub_1); + + shared_ptr kernel = KernelFactory::Instance().Create(SUB); + test_op->AddOutputDesc(sub_desc); + Status status = kernel->Compute(test_op, input, v_output); + + EXPECT_EQ(SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/folding_kernel/transdata_kernel_unittest.cc b/tests/ut/ge/graph/passes/folding_kernel/transdata_kernel_unittest.cc new file mode 100644 index 00000000..341c9e82 --- /dev/null +++ b/tests/ut/ge/graph/passes/folding_kernel/transdata_kernel_unittest.cc @@ -0,0 +1,224 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "graph/passes/folding_kernel/transdata_kernel.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/fp16_t.h" +#include "common/ge_inner_error_codes.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/passes/dimension_compute_pass.h" +#include "graph/passes/folding_kernel/kernel_utils.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" + +using namespace testing; +using namespace ge; + +class UtestGraphPassesFoldingKernelTransdataKernel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +/// test func:TransDataKernel::Compute +/// case:optimize op of TransData +/// result: success +TEST_F(UtestGraphPassesFoldingKernelTransdataKernel, ComputeParamInvalid1) { + OpDescPtr op_desc_ptr = std::make_shared("Transdata", "Transdata"); + auto tmp = op_desc_ptr->GetOutputDesc(0); + tmp.SetFormat(FORMAT_FRACTAL_Z); + tmp.SetDataType(DT_FLOAT16); + tmp.SetShape(GeShape({1, 1, 1, 1})); + + op_desc_ptr->UpdateOutputDesc(0, tmp); + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * 2); + + vector input = {}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSDATA); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(1343225857, status); +} + +TEST_F(UtestGraphPassesFoldingKernelTransdataKernel, ComputeParamInvalid2) { + OpDescPtr op_desc_ptr = nullptr; + + vector dims_vec_0; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * 2); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSDATA); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelTransdataKernel, ComputeParamInvalid3) { + OpDescPtr op_desc_ptr = std::make_shared("Transdata", "Transdata"); + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NHWC, DT_FLOAT); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT16); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), data_vec_0.size() * 2); + + vector input = {tensor_0}; + vector input2 = {}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSDATA); + ge::Status status = kernel->Compute(op_desc_ptr, input2, outputs); + EXPECT_EQ(ge::PARAM_INVALID, status); +} + +TEST_F(UtestGraphPassesFoldingKernelTransdataKernel, Compute1) { + OpDescPtr op_desc_ptr = std::make_shared("Transdata", "Transdata"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 16, 16}), FORMAT_FRACTAL_Z, DT_FLOAT); + GeTensorDesc dims_tensor_desc_in(GeShape({16, 16, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + + vector dims_vec_0 = {16, 16, 1, 1}; + vector data_vec_0 = {16, 16, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSDATA); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelTransdataKernel, Compute2) { + OpDescPtr op_desc_ptr = std::make_shared("Transdata", "Transdata"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NHWC, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NHWC, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSDATA); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(ge::SUCCESS, status); +} + +TEST_F(UtestGraphPassesFoldingKernelTransdataKernel, Compute3) { + OpDescPtr op_desc_ptr = std::make_shared("Transdata", "Transdata"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NHWC, DT_FLOAT); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NHWC, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NHWC, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSDATA); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelTransdataKernel, Compute4) { + OpDescPtr op_desc_ptr = std::make_shared("Transdata", "Transdata"); + + GeTensorDesc dims_tensor_desc(GeShape({1, 1, 1, 1}), FORMAT_NCHW, DT_FLOAT16); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NHWC, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NHWC, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSDATA); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesFoldingKernelTransdataKernel, Compute5) { + OpDescPtr op_desc_ptr = std::make_shared("Transdata", "Transdata"); + + GeTensorDesc dims_tensor_desc(GeShape(), FORMAT_NCHW, DT_FLOAT16); + op_desc_ptr->AddOutputDesc(dims_tensor_desc); + GeTensorDesc dims_tensor_desc_in(GeShape({1, 1, 1, 1}), FORMAT_NHWC, DT_FLOAT); + op_desc_ptr->AddInputDesc(dims_tensor_desc_in); + + vector dims_vec_0 = {1, 1, 1, 1}; + vector data_vec_0 = {1, 1, 1, 1}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NHWC, DT_FLOAT); + ConstGeTensorPtr tensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)data_vec_0.data(), 1 * sizeof(float)); + + vector input = {tensor_0}; + vector outputs; + + shared_ptr kernel = KernelFactory::Instance().Create(TRANSDATA); + ge::Status status = kernel->Compute(op_desc_ptr, input, outputs); + + EXPECT_EQ(NOT_CHANGED, status); +} diff --git a/tests/ut/ge/graph/passes/get_original_format_pass_unittest.cc b/tests/ut/ge/graph/passes/get_original_format_pass_unittest.cc new file mode 100644 index 00000000..042db2e8 --- /dev/null +++ b/tests/ut/ge/graph/passes/get_original_format_pass_unittest.cc @@ -0,0 +1,241 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/get_original_format_pass.h" + +#include + +#include "omg/omg_inner_types.h" +#include "utils/op_desc_utils.h" + +using namespace ge; +using domi::GetContext; +using domi::DOMI_TENSOR_NCHW; + +class UtestGraphPassesGetOriginalFormatPass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + /// Set up a graph with the following network structure(A) + /// _ A _ + /// 1 2 + /// | | + /// B C + /// | + /// D + /// | + /// E + void make_graph(ComputeGraphPtr graph) { + OpDescPtr op_def_a = std::make_shared("A", "Data"); + OpDescPtr op_def_b = std::make_shared("B", "testh"); + OpDescPtr op_def_c = std::make_shared("C", "testi"); + OpDescPtr op_def_d = std::make_shared("D", "Permute"); + OpDescPtr op_def_e = std::make_shared("E", "testg"); + + vector dims(4, 1); + ge::GeShape shape(dims); + GeTensorDesc desc_anchor(shape); + + op_def_a->AddInputDesc(desc_anchor); + op_def_a->AddOutputDesc(desc_anchor); + op_def_a->AddOutputDesc(desc_anchor); + + op_def_b->AddInputDesc(desc_anchor); + + op_def_c->AddInputDesc(desc_anchor); + op_def_c->AddOutputDesc(desc_anchor); + + op_def_d->AddInputDesc(desc_anchor); + + GetContext().format = DOMI_TENSOR_NCHW; + + vector permute = {0U, 2U, 3U, 1U}; + AttrUtils::SetListInt(op_def_d, PERMUTE_ATTR_ORDER, permute); + + // Add node + NodePtr node_a = graph->AddNode(op_def_a); + NodePtr node_b = graph->AddNode(op_def_b); + NodePtr node_c = graph->AddNode(op_def_c); + NodePtr node_d = graph->AddNode(op_def_d); + NodePtr node_e = graph->AddNode(op_def_e); + + // Add edge + GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_b->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_a->GetOutDataAnchor(1), node_c->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_c->GetOutDataAnchor(1), node_d->GetInDataAnchor(0)); + } + + /// Set up a graph with the following network structure(A) + /// _ A _ + /// 1 2 + /// | | + /// B C + /// | | + /// D E + /// | + /// D + /// + void make_invalid_graph(ComputeGraphPtr graph) { + OpDescPtr op_def_a = std::make_shared("A", "Data"); + OpDescPtr op_def_b = std::make_shared("B", "testh"); + OpDescPtr op_def_c = std::make_shared("C", "Permute"); + OpDescPtr op_def_d = std::make_shared("D", "testd"); + OpDescPtr op_def_e = std::make_shared("E", "testg"); + + vector dims(4, 1); + ge::GeShape shape(dims); + GeTensorDesc desc_anchor(shape); + + op_def_a->AddInputDesc(desc_anchor); + op_def_a->AddOutputDesc(desc_anchor); + op_def_a->AddOutputDesc(desc_anchor); + + op_def_b->AddInputDesc(desc_anchor); + op_def_b->AddOutputDesc(desc_anchor); + + op_def_c->AddInputDesc(desc_anchor); + op_def_c->AddOutputDesc(desc_anchor); + + op_def_d->AddInputDesc(desc_anchor); + + GetContext().format = DOMI_TENSOR_NCHW; + + vector permute = {0U, 2U, 3U, 1U}; + AttrUtils::SetListInt(op_def_d, PERMUTE_ATTR_ORDER, permute); + + // Add node + NodePtr node_a = graph->AddNode(op_def_a); + NodePtr node_b = graph->AddNode(op_def_b); + NodePtr node_c = graph->AddNode(op_def_c); + NodePtr node_d = graph->AddNode(op_def_d); + NodePtr node_e = graph->AddNode(op_def_e); + + // Add edge + GraphUtils::AddEdge(node_a->GetOutDataAnchor(0), node_b->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_a->GetOutDataAnchor(1), node_c->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_c->GetOutDataAnchor(1), node_d->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_b->GetOutDataAnchor(0), node_d->GetInDataAnchor(0)); + } + + void CreateBiasaddNode(ComputeGraphPtr graph, int32_t flag) { + // Create Biasadd Node + OpDescPtr bias_op = std::make_shared("biasadd", BIASADD); + + vector dim(1, 4); + GeShape shape(dim); + GeTensorDesc out_desc(shape); + GeTensorPtr bias = std::make_shared(out_desc); + + // Create convolution node + OpDescPtr conv_op = std::make_shared("conv", MATMUL); + if (flag == 1) { + conv_op->SetType(CONVOLUTION); + } + // Create mul - Node + OpDescPtr mul_op = std::make_shared("mul", MUL); + + // add descriptor + vector dims(4, 1); + GeShape shapes(dims); + GeTensorDesc desc_anchor(shapes); + + conv_op->AddOutputDesc(desc_anchor); + bias_op->AddInputDesc(desc_anchor); + bias_op->AddInputDesc(desc_anchor); + bias_op->AddOutputDesc(desc_anchor); + mul_op->AddInputDesc(desc_anchor); + + NodePtr bias_node = graph->AddNode(bias_op); + OpDescUtils::SetWeights(bias_node, {bias}); + + NodePtr conv_node = graph->AddNode(conv_op); + NodePtr conv_node2 = graph->AddNode(conv_op); + NodePtr mul_node = graph->AddNode(mul_op); + + GraphUtils::AddEdge(conv_node->GetOutDataAnchor(0), bias_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(conv_node2->GetOutDataAnchor(0), bias_node->GetInDataAnchor(1)); + GraphUtils::AddEdge(bias_node->GetOutDataAnchor(0), mul_node->GetInDataAnchor(0)); + } +}; + +TEST_F(UtestGraphPassesGetOriginalFormatPass, no_transpose_success) { + ComputeGraphPtr graph = std::make_shared("test"); + make_graph(graph); + + ge::GetOriginalFormatPass get_format_pass; + Status status = get_format_pass.Run(graph); + EXPECT_EQ(SUCCESS, status); + + int32_t ori_format = 0; + for (NodePtr n : graph->GetDirectNode()) { + if ("Permute" == n->GetOpDesc()->GetType()) { + AttrUtils::GetInt(n->GetOpDesc(), ATTR_NAME_FORMAT, ori_format); + EXPECT_EQ(ori_format, 1); + } + if ("testg" == n->GetOpDesc()->GetType()) { + AttrUtils::GetInt(n->GetOpDesc(), ATTR_NAME_FORMAT, ori_format); + EXPECT_EQ(ori_format, 1); + } + if ("testh" == n->GetOpDesc()->GetType()) { + AttrUtils::GetInt(n->GetOpDesc(), ATTR_NAME_FORMAT, ori_format); + EXPECT_EQ(ori_format, 0); + } + } +} + +TEST_F(UtestGraphPassesGetOriginalFormatPass, infered_format_need_to_reset_success) { + ComputeGraphPtr graph = std::make_shared("test"); + make_graph(graph); + int32_t ori_format = 1; + for (NodePtr n : graph->GetDirectNode()) { + if ("testh" == n->GetOpDesc()->GetType()) { + AttrUtils::SetInt(n->GetOpDesc(), ATTR_NAME_FORMAT, ori_format); + } + if ("Permute" == n->GetOpDesc()->GetType()) { + vector permute = {0U, 3U, 1U, 2U}; + AttrUtils::SetListInt(n->GetOpDesc(), PERMUTE_ATTR_ORDER, permute); + } + } + + ge::GetOriginalFormatPass get_format_pass; + Status status = get_format_pass.Run(graph); + EXPECT_EQ(SUCCESS, status); + + for (NodePtr n : graph->GetDirectNode()) { + if ("Permute" == n->GetOpDesc()->GetType()) { + AttrUtils::GetInt(n->GetOpDesc(), ATTR_NAME_FORMAT, ori_format); + EXPECT_EQ(ori_format, 0); + } + if ("testg" == n->GetOpDesc()->GetType()) { + AttrUtils::GetInt(n->GetOpDesc(), ATTR_NAME_FORMAT, ori_format); + EXPECT_EQ(ori_format, 0); + } + if ("testh" == n->GetOpDesc()->GetType()) { + AttrUtils::GetInt(n->GetOpDesc(), ATTR_NAME_FORMAT, ori_format); + EXPECT_EQ(ori_format, 1); + } + } +} + +TEST_F(UtestGraphPassesGetOriginalFormatPass, infered_format_need_to_reset_success2) { + ComputeGraphPtr graph = std::make_shared("test"); + CreateBiasaddNode(graph, 1); + + ge::GetOriginalFormatPass get_format_pass; + Status status = get_format_pass.Run(graph); + EXPECT_EQ(SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/graph_builder_utils.cc b/tests/ut/ge/graph/passes/graph_builder_utils.cc new file mode 100644 index 00000000..9904e731 --- /dev/null +++ b/tests/ut/ge/graph/passes/graph_builder_utils.cc @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph_builder_utils.h" + +#include "graph/utils/graph_utils.h" + +namespace ge { +namespace ut { +NodePtr GraphBuilder::AddNode(const std::string &name, const std::string &type, int in_cnt, int out_cnt, Format format, + DataType data_type, std::vector shape) { + auto tensor_desc = std::make_shared(); + tensor_desc->SetShape(GeShape(std::move(shape))); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + + auto op_desc = std::make_shared(name, type); + for (int i = 0; i < in_cnt; ++i) { + op_desc->AddInputDesc(tensor_desc->Clone()); + } + for (int i = 0; i < out_cnt; ++i) { + op_desc->AddOutputDesc(tensor_desc->Clone()); + } + + return graph_->AddNode(op_desc); +} +void GraphBuilder::AddDataEdge(NodePtr &src_node, int src_idx, NodePtr &dst_node, int dst_idx) { + GraphUtils::AddEdge(src_node->GetOutDataAnchor(src_idx), dst_node->GetInDataAnchor(dst_idx)); +} +void GraphBuilder::AddControlEdge(NodePtr &src_node, NodePtr &dst_node) { + GraphUtils::AddEdge(src_node->GetOutControlAnchor(), dst_node->GetInControlAnchor()); +} + +} // namespace ut +} // namespace ge diff --git a/tests/ut/ge/graph/passes/graph_builder_utils.h b/tests/ut/ge/graph/passes/graph_builder_utils.h new file mode 100644 index 00000000..d024beb4 --- /dev/null +++ b/tests/ut/ge/graph/passes/graph_builder_utils.h @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MAIN_LLT_FRAMEWORK_DOMI_UT_GE_TEST_GRAPH_PASSES_GRAPH_BUILDER_UTILS_H_ +#define MAIN_LLT_FRAMEWORK_DOMI_UT_GE_TEST_GRAPH_PASSES_GRAPH_BUILDER_UTILS_H_ + +#include +#include + +#include "graph/compute_graph.h" +#include "graph/graph.h" +#include "graph/node.h" + +namespace ge { +namespace ut { +class GraphBuilder { + public: + explicit GraphBuilder(const std::string &name) { graph_ = std::make_shared(name); } + NodePtr AddNode(const std::string &name, const std::string &type, int in_cnt, int out_cnt, + Format format = FORMAT_NCHW, DataType data_type = DT_FLOAT, + std::vector shape = {1, 1, 224, 224}); + void AddDataEdge(NodePtr &src_node, int src_idx, NodePtr &dst_node, int dst_idx); + void AddControlEdge(NodePtr &src_node, NodePtr &dst_node); + ComputeGraphPtr GetGraph() { + graph_->TopologicalSorting(); + return graph_; + } + + private: + ComputeGraphPtr graph_; +}; +} // namespace ut +} // namespace ge + +#endif // MAIN_LLT_FRAMEWORK_DOMI_UT_GE_TEST_GRAPH_PASSES_GRAPH_BUILDER_UTILS_H_ diff --git a/tests/ut/ge/graph/passes/guarantee_const_pass_unittest.cc b/tests/ut/ge/graph/passes/guarantee_const_pass_unittest.cc new file mode 100644 index 00000000..eaad3df7 --- /dev/null +++ b/tests/ut/ge/graph/passes/guarantee_const_pass_unittest.cc @@ -0,0 +1,221 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#define protected public +#define private public +#include "graph/passes/guarantee_const_pass.h" + +#include "../ops_stub.h" +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/pass_manager.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; +using namespace std; + +// To check whether the shape of output is correct or not +#define TEST_OPERATOR(op_, input_shapes, output_shapes) \ + { \ + auto op = op_; \ + for (auto input_pair : input_shapes) SetInputShape(op, input_pair.first, input_pair.second); \ + op.InferShapeAndType(); \ + for (auto output_pair : output_shapes) CheckOutputShape(op, output_pair.first, output_pair.second); \ + } + +#define LOOP_VEC(v) for (unsigned i = 0; i < v.size(); i++) + +class UtestGraphPassesGuaranteeConstPass : public testing::Test { + protected: + void SetUp() { init(); } + + void TearDown() { destory(); } + + private: + void init() { guarantee_const_op_remove_pass_ = new ::ge::GuaranteeConstPass(); } + + void destory() { + delete guarantee_const_op_remove_pass_; + guarantee_const_op_remove_pass_ = NULL; + } + + protected: + ge::GuaranteeConstPass *guarantee_const_op_remove_pass_; + + void SetInputShape(Operator op, string name, vector shape) { + TensorDesc td = op.GetInputDesc(name); + td.SetShape(ge::Shape(shape)); + op.UpdateInputDesc(name, td); + } + + void CheckOutputShape(Operator op, string name, vector shape) { + ge::Shape s = op.GetOutputDesc(name).GetShape(); + EXPECT_EQ(s.GetDims().size(), shape.size()); + LOOP_VEC(shape) EXPECT_EQ(s.GetDim(i), shape[i]); + } + + /// Init the node which will be passed in graph, isMultiInput represents whether using more than + /// one data anchor or not. + NodePtr init_node(ComputeGraphPtr graph, vector dims_vec, vector data_vec, bool isMultiInput, + string type) { + // middle + OpDescPtr op_def = std::make_shared("op_def", type); + OpDescPtr in_op_def = std::make_shared("op_def_in", "test"); + OpDescPtr out_op_def = std::make_shared("op_def_out", "test"); + OpDescPtr another_in_op_def = std::make_shared("another_op_def_in", "test"); + + // whether using another input data anchor or not + if (isMultiInput) { + vector is_input_const_vec = {true, true}; + op_def->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_def, ge::ATTR_NAME_T, (int64_t)DT_INT32); + } + + // input tensor; + GeTensorDesc tensor_desc(GeShape(dims_vec), FORMAT_NCHW, DT_INT32); + ge::ConstGeTensorPtr const_tensor = + std::make_shared(tensor_desc, (uint8_t *)&data_vec[0], data_vec.size() * sizeof(int32_t)); + ge::AttrUtils::SetTensor(in_op_def, ge::ATTR_NAME_WEIGHTS, const_tensor); + op_def->AddInputDesc(tensor_desc); + + // whether using another input data anchor or not + if (isMultiInput) { + vector dims_vec_another = {6}; + vector data_vec_another = {1, 2, 3, 4, 5, 6}; + GeTensorDesc another_tensor_desc(GeShape(dims_vec_another), FORMAT_NCHW, DT_INT32); + ge::ConstGeTensorPtr const_tensor_another = std::make_shared( + another_tensor_desc, (uint8_t *)&data_vec_another[0], data_vec_another.size() * sizeof(int32_t)); + ge::AttrUtils::SetTensor(another_in_op_def, ge::ATTR_NAME_WEIGHTS, const_tensor_another); + op_def->AddInputDesc(another_tensor_desc); + another_in_op_def->AddOutputDesc(another_tensor_desc); + out_op_def->AddInputDesc(another_tensor_desc); + } + + GeTensorDesc tensor_desc_out(GeShape(dims_vec), FORMAT_NCHW, DT_INT32); + op_def->AddOutputDesc(tensor_desc_out); + in_op_def->AddOutputDesc(tensor_desc); + + // add attr of out_node + vector is_output_const(3, false); + is_output_const[0] = true; + out_op_def->SetIsInputConst(is_output_const); + out_op_def->AddInputDesc(tensor_desc); + + // Add node + NodePtr in_node = graph->AddNode(in_op_def); + NodePtr node = graph->AddNode(op_def); + NodePtr out_node = graph->AddNode(out_op_def); + + // Add edge + GraphUtils::AddEdge(in_node->GetOutDataAnchor(0), node->GetInDataAnchor(0)); + GraphUtils::AddEdge(node->GetOutDataAnchor(0), out_node->GetInDataAnchor(0)); + + // when need multi input nodes (which to verify the isolate node function) + if (isMultiInput) { + NodePtr another_in_node = graph->AddNode(another_in_op_def); + GraphUtils::AddEdge(another_in_node->GetOutDataAnchor(0), node->GetInDataAnchor(1)); + } + + return node; + } +}; + +TEST_F(UtestGraphPassesGuaranteeConstPass, not_changed) { + // the original type of op is not guarantee_const + string type = SIZE; + // input tensor + vector dims_vec = {6}; + vector data_vec = {1, 2, 3, 4, 5, 6}; + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = init_node(graph, dims_vec, data_vec, false, type); + ge::Status ret = guarantee_const_op_remove_pass_->Run(node); + EXPECT_EQ(SUCCESS, ret); +} + +TEST_F(UtestGraphPassesGuaranteeConstPass, get_origenal_type_fail) { + string type = GUARANTEECONST; + // input tensor + vector dims_vec = {6}; + vector data_vec = {1, 2, 3, 4, 5, 6}; + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = init_node(graph, dims_vec, data_vec, false, type); + // change the type + string type2 = "FrameworkOp"; + node->GetOpDesc()->SetType(type2); + ge::Status ret = guarantee_const_op_remove_pass_->Run(node); + // EXPECT_EQ(ge::SUCCESS, ret); +} + +TEST_F(UtestGraphPassesGuaranteeConstPass, int32_success_6) { + // input tensor + string type = GUARANTEECONST; + vector dims_vec = {6}; + vector data_vec = {1, 2, 3, 4, 5, 6}; + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = init_node(graph, dims_vec, data_vec, false, type); + // when input tensor is [1, 2, 3, 4, 5, 6], return success + ge::Status output = guarantee_const_op_remove_pass_->Run(node); + EXPECT_EQ(ge::SUCCESS, output); +} + +TEST_F(UtestGraphPassesGuaranteeConstPass, int32_success_2_3) { + // input tensor + string type = GUARANTEECONST; + vector dims_vec = {2, 3}; + vector data_vec = {1, 2, 3, 4, 5, 6}; + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = init_node(graph, dims_vec, data_vec, false, type); + // when input tensor is [[1, 2, 3], [4, 5, 6]], return success + ge::Status output = guarantee_const_op_remove_pass_->Run(node); + EXPECT_EQ(ge::SUCCESS, output); +} + +TEST_F(UtestGraphPassesGuaranteeConstPass, isolate_node_failed) { + // input tensor + string type = GUARANTEECONST; + vector dims_vec = {2, 3}; + vector data_vec = {1, 2, 3, 4, 5, 6}; + ComputeGraphPtr graph = std::make_shared("test"); + // add another input node + NodePtr node = init_node(graph, dims_vec, data_vec, true, type); + // when there are more than one input anchors, return failed + ge::Status output = guarantee_const_op_remove_pass_->Run(node); + EXPECT_EQ(ge::PARAM_INVALID, output); +} + +// IR test, the shape and data type of input should be equal to the shape and data type of output +TEST_F(UtestGraphPassesGuaranteeConstPass, ir_infer_shape) { + auto input = unordered_map>({ + {"x", {3, 5, 3, 4}}, + }); + auto output = unordered_map>({ + {"y", {3, 5, 3, 4}}, + }); + auto guaranteeConst = op::GuaranteeConst("guaranteeconst"); + + TEST_OPERATOR(guaranteeConst, input, output); +} diff --git a/tests/ut/ge/graph/passes/identity_pass_unittest.cc b/tests/ut/ge/graph/passes/identity_pass_unittest.cc new file mode 100644 index 00000000..eabc3b49 --- /dev/null +++ b/tests/ut/ge/graph/passes/identity_pass_unittest.cc @@ -0,0 +1,201 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/identity_pass.h" + +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/anchor.h" +#include "graph/attr_value.h" +#include "graph/compute_graph.h" +#include "graph/op_desc.h" +#include "graph/passes/base_pass.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph_builder_utils.h" +#include "inc/pass_manager.h" +#undef protected +#undef private + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestIdentityPass : public Test { + protected: + NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 1, + int32_t out_anchors_num = 1) { + GeTensorDesc tensor_desc; + OpDescPtr opdesc = make_shared(name, type); + for (int32_t i = 0; i < in_anchors_num; i++) { + opdesc->AddInputDesc(tensor_desc); + } + for (int32_t i = 0; i < out_anchors_num; i++) { + opdesc->AddOutputDesc(tensor_desc); + } + + NodePtr node = graph->AddNode(opdesc); + return node; + } +}; + +/// merge1 +/// | +/// identity1 +/// | \c +/// var1 var2 +static ComputeGraphPtr BuildGraph1() { + ge::ut::GraphBuilder builder("g1"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto var2 = builder.AddNode("var2", "Variable", 0, 1); + auto identity1 = builder.AddNode("identity1", "Identity", 1, 1); + auto merge1 = builder.AddNode("merge1", "Merge", 1, 1); + + builder.AddDataEdge(var1, 0, identity1, 0); + builder.AddControlEdge(var2, identity1); + builder.AddDataEdge(identity1, 0, merge1, 0); + return builder.GetGraph(); +} + +/// addn1 +/// |c +/// identity1 +/// | +/// switch1 +/// | +/// var1 +static ComputeGraphPtr BuildGraph2() { + ge::ut::GraphBuilder builder("g1"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto switch1 = builder.AddNode("switch1", "Switch", 2, 2); + auto identity1 = builder.AddNode("identity1", "Identity", 1, 1); + auto addn1 = builder.AddNode("addn1", "AddN", 1, 1); + + builder.AddDataEdge(var1, 0, switch1, 0); + builder.AddDataEdge(switch1, 0, identity1, 0); + builder.AddControlEdge(identity1, addn1); + return builder.GetGraph(); +} + +/// addn1 +/// | +/// identity1 +/// | +/// switch1 +/// | +/// var1 +static ComputeGraphPtr BuildGraph3() { + ge::ut::GraphBuilder builder("g3"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto switch1 = builder.AddNode("switch1", "Switch", 2, 2); + auto identity1 = builder.AddNode("identity1", "Identity", 1, 1); + auto addn1 = builder.AddNode("addn1", "AddN", 1, 1); + + builder.AddDataEdge(var1, 0, switch1, 0); + builder.AddDataEdge(switch1, 0, identity1, 0); + builder.AddDataEdge(identity1, 0, addn1, 0); + return builder.GetGraph(); +} + +TEST_F(UtestIdentityPass, succ) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = AddNode(graph, "Identity", IDENTITY); + NodePtr reduce_min_node = AddNode(graph, "reduceMin", REDUCEMIN); + + GraphUtils::AddEdge(node->GetOutDataAnchor(0), reduce_min_node->GetInDataAnchor(0)); + + IdentityPass pass(true); + Status status = pass.Run(node); + EXPECT_EQ(status, SUCCESS); + NodePtr found_node = graph->FindNode("Identity"); + EXPECT_EQ(found_node, nullptr); + + status = pass.Run(reduce_min_node); + EXPECT_EQ(status, SUCCESS); + + string type2 = "FrameworkOp"; + node->GetOpDesc()->SetType(type2); + status = pass.Run(node); + // EXPECT_EQ(ge::SUCCESS, status); + + NodePtr node_err = AddNode(graph, "Identity", IDENTITY, 1, 2); + status = pass.Run(node_err); + EXPECT_EQ(status, ge::PARAM_INVALID); +} + +TEST_F(UtestIdentityPass, skip_merge) { + auto graph = BuildGraph1(); + ge::GEPass pass(graph); + + ge::NamesToPass names_to_pass; + IdentityPass identity_pass(false); + names_to_pass.emplace_back("IdentityPass", &identity_pass); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + auto identity1 = graph->FindNode("identity1"); + EXPECT_NE(identity1, nullptr); + EXPECT_EQ(identity1->GetOutNodes().size(), 1); + EXPECT_EQ(identity1->GetOutDataNodes().at(0)->GetName(), "merge1"); + EXPECT_EQ(identity1->GetInNodes().size(), 2); + + names_to_pass.clear(); + IdentityPass force_pass(true); + names_to_pass.emplace_back("ForceIdentityPass", &force_pass); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + identity1 = graph->FindNode("identity1"); + EXPECT_EQ(identity1, nullptr); +} + +TEST_F(UtestIdentityPass, skip_switch) { + auto graph = BuildGraph2(); + ge::GEPass pass(graph); + + ge::NamesToPass names_to_pass; + IdentityPass identity_pass(false); + names_to_pass.emplace_back("IdentityPass", &identity_pass); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + auto identity1 = graph->FindNode("identity1"); + EXPECT_NE(identity1, nullptr); + EXPECT_EQ(identity1->GetInNodes().size(), 1); + EXPECT_EQ(identity1->GetInDataNodes().at(0)->GetName(), "switch1"); + + names_to_pass.clear(); + IdentityPass force_pass(true); + names_to_pass.emplace_back("ForceIdentityPass", &force_pass); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + identity1 = graph->FindNode("identity1"); + EXPECT_EQ(identity1, nullptr); +} + +TEST_F(UtestIdentityPass, norm_after_switch) { + auto graph = BuildGraph3(); + ge::GEPass pass(graph); + + ge::NamesToPass names_to_pass; + IdentityPass identity_pass(false); + names_to_pass.emplace_back("IdentityPass", &identity_pass); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + auto identity1 = graph->FindNode("identity1"); + EXPECT_EQ(identity1, nullptr); + auto switch1 = graph->FindNode("switch1"); + EXPECT_EQ(switch1->GetOutNodes().size(), 1); + EXPECT_EQ(switch1->GetOutDataNodes().at(0)->GetName(), "addn1"); +} \ No newline at end of file diff --git a/tests/ut/ge/graph/passes/infershape_pass_unittest.cc b/tests/ut/ge/graph/passes/infershape_pass_unittest.cc new file mode 100644 index 00000000..8fa5b34e --- /dev/null +++ b/tests/ut/ge/graph/passes/infershape_pass_unittest.cc @@ -0,0 +1,55 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/infershape_pass.h" + +#include "graph/compute_graph.h" +#include "graph/node.h" +#include "graph/operator.h" +#include "graph/operator_factory.h" +#include "graph/operator_reg.h" +#include "graph_builder_utils.h" +#undef protected +#undef private + +using namespace std; +using namespace testing; +using namespace ge; +namespace ge { +class UtestGraphInfershapePass : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +TEST_F(UtestGraphInfershapePass, infershape_pass_failed) { + GeTensorDesc ge_tensor_desc(GeShape({-2, 2, 3, 4}), ge::FORMAT_NCHW, DT_FLOAT16); + string type = "AddN"; + auto addn_op_desc = std::make_shared("AddN", type); + addn_op_desc->AddInputDesc(ge_tensor_desc); + addn_op_desc->AddOutputDesc(ge_tensor_desc); + auto graph = std::make_shared("test"); + auto addn_node = std::make_shared(addn_op_desc, graph); + addn_node->Init(); + + InferShapePass infershape_pass; + EXPECT_EQ(infershape_pass.Run(addn_node), GE_GRAPH_INFERSHAPE_FAILED); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/merge_pass_unittest.cc b/tests/ut/ge/graph/passes/merge_pass_unittest.cc new file mode 100644 index 00000000..75fdb21b --- /dev/null +++ b/tests/ut/ge/graph/passes/merge_pass_unittest.cc @@ -0,0 +1,403 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include + +#define private public +#include "graph/passes/merge_pass.h" + +#include "common/ge_inner_error_codes.h" +#include "inc/pass_manager.h" +#undef private + +namespace ge { +namespace { + +class UtestGraphPassesMergePass : public testing::Test { + protected: + UtestGraphPassesMergePass() { + graph_ = std::make_shared("test"); + vector shape_vec{1, 1, 224, 224}; + GeShape shape = GeShape(shape_vec); + default_tensor_desc_ = std::make_shared(); + default_tensor_desc_->SetShape(shape); + default_tensor_desc_->SetFormat(FORMAT_NCHW); + default_tensor_desc_->SetDataType(DT_FLOAT); + } + + NodePtr NewNode(const std::string &name, const std::string &type, int input_cnt, int output_cnt) { + OpDescPtr op_desc = std::make_shared(name, type); + for (int i = 0; i < input_cnt; ++i) { + op_desc->AddInputDesc(default_tensor_desc_->Clone()); + } + + for (int i = 0; i < output_cnt; ++i) { + op_desc->AddOutputDesc(default_tensor_desc_->Clone()); + } + + NodePtr node = graph_->AddNode(op_desc); + if (type == CONSTANT) { + int32_t weight[] = {1}; + GeTensorDesc weight_desc(GeShape({1}), FORMAT_NHWC, DT_INT32); + GeTensorPtr tensor = std::make_shared(weight_desc, (uint8_t *)weight, sizeof(weight)); + vector tensor_vec = {tensor}; + OpDescUtils::SetWeights(node, tensor_vec); + } + + return node; + } + + ComputeGraphPtr graph_; + GeTensorDescPtr default_tensor_desc_; + MergePass pass_; +}; + +} // namespace + +TEST_F(UtestGraphPassesMergePass, null_input) { + NodePtr node = nullptr; + auto ret = pass_.Run(node); + EXPECT_EQ(ret, PARAM_INVALID); +} + +TEST_F(UtestGraphPassesMergePass, filter_non_merge_node) { + auto node = NewNode("Op1", CONSTANT, 0, 1); + auto ret = pass_.Run(node); + EXPECT_EQ(ret, SUCCESS); +} + +TEST_F(UtestGraphPassesMergePass, invalid_merge_node) { + auto merge_node = NewNode("Merge", MERGE, 2, 0); + auto ret = pass_.Run(merge_node); + EXPECT_EQ(ret, PARAM_INVALID); +} + +/// Op1 Op2 +/// \ / +/// \ / +/// Merge +/// | +/// | +/// NetOutput +TEST_F(UtestGraphPassesMergePass, multiple_inputs) { + auto node1 = NewNode("Op1", CONSTANT, 0, 1); + auto node2 = NewNode("Op2", CONSTANT, 0, 1); + auto merge_node = NewNode("Merge", MERGE, 2, 2); + auto net_output_node = NewNode("NetOutput", NETOUTPUT, 1, 1); + + GraphUtils::AddEdge(node1->GetOutDataAnchor(0), merge_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(node2->GetOutDataAnchor(0), merge_node->GetInDataAnchor(1)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), net_output_node->GetInDataAnchor(0)); + + auto ret = pass_.Run(merge_node); + EXPECT_EQ(ret, SUCCESS); +} + +/// Merge +/// | \ +/// | \ +/// Op1 Op2 Merge2 +/// \ | | +/// \ | Op3 +/// \ | / +/// NetOutput +TEST_F(UtestGraphPassesMergePass, empty_input_cut_branch_meet_net_output_with_data_anchor) { + auto merge_node = NewNode("Merge", MERGE, 1, 1); + auto merge_node2 = NewNode("Merge2", MERGE, 1, 1); + auto node1 = NewNode("Op1", CONSTANT, 0, 1); + auto node2 = NewNode("Op2", RELU, 1, 1); + auto node3 = NewNode("Op3", RELU, 1, 1); + auto net_output = NewNode("NetOutput", NETOUTPUT, 3, 3); + + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), merge_node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(node1->GetOutDataAnchor(0), net_output->GetInDataAnchor(0)); + GraphUtils::AddEdge(node2->GetOutDataAnchor(0), net_output->GetInDataAnchor(1)); + GraphUtils::AddEdge(merge_node2->GetOutDataAnchor(0), node3->GetInDataAnchor(0)); + GraphUtils::AddEdge(node3->GetOutDataAnchor(0), net_output->GetInDataAnchor(2)); + + auto ret = pass_.Run(merge_node); + EXPECT_EQ(ret, INTERNAL_ERROR); +} + +/// Merge +/// | \ +/// | \ +/// Op1 Op2 Merge2 +/// \ | | \ +/// \ | Op3 +/// \ | : +/// NetOutput +TEST_F(UtestGraphPassesMergePass, empty_input_cut_branch_meet_net_output_with_control_anchor) { + auto merge_node = NewNode("Merge", MERGE, 1, 2); + auto merge_node2 = NewNode("Merge2", MERGE, 1, 2); + auto node1 = NewNode("Op1", CONSTANT, 0, 1); + auto node2 = NewNode("Op2", RELU, 1, 1); + auto node3 = NewNode("Op3", RELU, 1, 1); + auto net_output = NewNode("NetOutput", NETOUTPUT, 3, 3); + + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), merge_node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(node1->GetOutDataAnchor(0), net_output->GetInDataAnchor(0)); + GraphUtils::AddEdge(node2->GetOutDataAnchor(0), net_output->GetInControlAnchor()); + GraphUtils::AddEdge(merge_node2->GetOutDataAnchor(0), node3->GetInDataAnchor(0)); + GraphUtils::AddEdge(node3->GetOutDataAnchor(0), net_output->GetInControlAnchor()); + + auto ret = pass_.Run(merge_node); + EXPECT_EQ(ret, SUCCESS); +} + +TEST_F(UtestGraphPassesMergePass, empty_input_cut_branch) { + /// Merge + /// | \ + /// | \ + /// Op1 Op2 Merge2 + /// \ | | + /// \ | Op3 + /// \ | / + /// Merge3 + + auto merge_node = NewNode("Merge", MERGE, 1, 2); + auto merge_node2 = NewNode("Merge2", MERGE, 1, 2); + auto node1 = NewNode("Op1", CONSTANT, 0, 1); + auto node2 = NewNode("Op2", RELU, 1, 1); + auto node3 = NewNode("Op3", RELU, 1, 1); + auto merge_node3 = NewNode("Merge3", MERGE, 3, 3); + + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), merge_node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(node1->GetOutDataAnchor(0), merge_node3->GetInDataAnchor(0)); + GraphUtils::AddEdge(node2->GetOutDataAnchor(0), merge_node3->GetInDataAnchor(1)); + GraphUtils::AddEdge(merge_node2->GetOutDataAnchor(0), node3->GetInDataAnchor(0)); + GraphUtils::AddEdge(node3->GetOutDataAnchor(0), merge_node3->GetInDataAnchor(2)); + + /// Merge + /// | + /// | + /// Op1 Op2 Merge2 + /// \ | + /// \ Op3 + /// \ / + /// Merge3 + + auto ret = pass_.Run(merge_node); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(merge_node3->GetInDataNodes().size(), 2); + EXPECT_EQ(merge_node2->GetInDataNodes().size(), 0); + EXPECT_EQ(node2->GetOutDataNodes().size(), 0); + EXPECT_EQ(merge_node3->GetInDataNodes().at(0)->GetName(), "Op1"); + EXPECT_EQ(merge_node3->GetInDataNodes().at(1)->GetName(), "Op3"); + + /// Merge + /// | + /// | + /// Op1 Op2 Merge2 + /// \ | + /// \ Op3 + /// \ + /// Merge3 + + ret = pass_.Run(merge_node2); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(merge_node3->GetInDataNodes().size(), 1); + EXPECT_EQ(merge_node3->GetInDataNodes().at(0)->GetName(), "Op1"); + EXPECT_EQ(node3->GetOutDataNodes().size(), 0); +} + +TEST_F(UtestGraphPassesMergePass, single_non_const_input) { + /// Op1 + /// | + /// Merge + /// / \ + /// Op2 Op3 + auto merge_node = NewNode("Merge", MERGE, 1, 2); + auto node1 = NewNode("Op1", RELU, 1, 1); + auto node2 = NewNode("Op2", CONVOLUTION, 1, 1); + auto node3 = NewNode("Op3", CONVOLUTION, 1, 1); + + GraphUtils::AddEdge(node1->GetOutDataAnchor(0), merge_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node3->GetInDataAnchor(0)); + + EXPECT_EQ(node1->GetOutDataNodes().size(), 1); + + auto ret = pass_.Run(merge_node); + EXPECT_EQ(ret, SUCCESS); + + EXPECT_EQ(graph_->GetDirectNodesSize(), 3); + EXPECT_EQ(graph_->FindNode("Merge"), nullptr); + EXPECT_EQ(node1->GetOutDataNodes().size(), 2); + EXPECT_EQ(node2->GetInDataNodes().size(), 1); + EXPECT_EQ(node3->GetInDataNodes().size(), 1); + EXPECT_EQ(node2->GetInDataAnchor(0)->GetPeerOutAnchor(), node1->GetOutDataAnchor(0)); + EXPECT_EQ(node3->GetInDataAnchor(0)->GetPeerOutAnchor(), node1->GetOutDataAnchor(0)); +} + +TEST_F(UtestGraphPassesMergePass, single_const_input) { + /// Const + /// | + /// Merge Pass Const + /// / \ ===> / \ + /// Op1 Op2 Op1 Op2 + auto merge_node = NewNode("Merge", MERGE, 1, 2); + auto const_node = NewNode("Const", CONSTANT, 1, 1); + auto node1 = NewNode("Op1", ADDN, 1, 1); + auto node2 = NewNode("Op2", ADDN, 1, 1); + + node1->GetOpDesc()->SetIsInputConst({false}); + node2->GetOpDesc()->SetIsInputConst({false}); + GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), merge_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node1->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node2->GetInDataAnchor(0)); + + auto ret = pass_.Run(merge_node); + EXPECT_EQ(ret, SUCCESS); + + EXPECT_EQ(graph_->GetDirectNodesSize(), 3); + EXPECT_EQ(graph_->FindNode("Merge").get(), nullptr); + EXPECT_EQ(node1->GetInDataNodes().size(), 1); + EXPECT_EQ(node2->GetInDataNodes().size(), 1); + EXPECT_EQ(node1->GetOpDesc()->GetIsInputConst().at(0), false); + EXPECT_EQ(node2->GetOpDesc()->GetIsInputConst().at(0), false); +} + +TEST_F(UtestGraphPassesMergePass, single_const_input_value_index_two_out_nodes) { + /// Const + /// | + /// Merge Pass Const + /// / | ===> / \(control anchor) + /// Op1 | \ Op1 Constant + /// Op2 Op3 | + /// / \ + /// Op2 Op3 + auto merge_node = NewNode("Merge", MERGE, 1, 2); + auto const_node = NewNode("Const", CONSTANT, 1, 1); + auto node1 = NewNode("Op1", ADDN, 1, 1); + auto node2 = NewNode("Op2", ADDN, 1, 1); + auto node3 = NewNode("Op3", ADDN, 1, 1); + + node1->GetOpDesc()->SetIsInputConst({false}); + node2->GetOpDesc()->SetIsInputConst({false}); + GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), merge_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node1->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(1), node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(1), node3->GetInDataAnchor(0)); + + auto ret = pass_.Run(merge_node); + EXPECT_EQ(ret, SUCCESS); + + EXPECT_EQ(graph_->GetDirectNodesSize(), 5); + EXPECT_EQ(graph_->FindNode("Merge").get(), nullptr); + EXPECT_EQ(node1->GetInDataNodes().size(), 1); + EXPECT_EQ(node2->GetInDataNodes().size(), 1); + EXPECT_EQ(node1->GetOpDesc()->GetIsInputConst().at(0), false); + EXPECT_EQ(node2->GetOpDesc()->GetIsInputConst().at(0), false); + + NodePtr node_test = graph_->FindNode("Merge_value_index"); + EXPECT_NE(node_test.get(), nullptr); + EXPECT_EQ(node_test->GetOutDataNodes().size(), 2); + EXPECT_EQ(node_test->GetAllOutDataAnchors().size(), 1); + EXPECT_EQ(node_test->GetInDataNodes().size(), 0); + EXPECT_EQ(node2->GetInDataNodes().at(0)->GetInControlAnchor()->GetPeerOutControlAnchors().at(0), + const_node->GetOutControlAnchor()); + EXPECT_EQ(node3->GetInDataNodes().at(0)->GetInControlAnchor()->GetPeerOutControlAnchors().at(0), + const_node->GetOutControlAnchor()); + EXPECT_EQ(node2->GetInDataNodes().at(0), node_test); + EXPECT_EQ(node3->GetInDataNodes().at(0), node_test); +} + +TEST_F(UtestGraphPassesMergePass, single_const_input_value_index_two_out_nodes1) { + /// Const + /// | + /// Merge Pass Const + /// / | ===> / \(control anchor) + /// Op1 | \ Op1 Constant + /// Op2 Op3 | + /// / \ + /// Op2 Op3 + auto merge_node = NewNode("Merge", MERGE, 1, 2); + auto const_node = NewNode("Const", CONSTANT, 1, 1); + auto node1 = NewNode("Op1", ADDN, 1, 1); + auto node2 = NewNode("Op2", ADDN, 1, 1); + auto node3 = NewNode("Op3", ADDN, 1, 1); + + node1->GetOpDesc()->SetIsInputConst({false}); + node2->GetOpDesc()->SetIsInputConst({false}); + GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), merge_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node1->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(1), node2->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(1), node3->GetInDataAnchor(0)); + + auto ret = pass_.Run(merge_node); + EXPECT_EQ(ret, SUCCESS); +} + +TEST_F(UtestGraphPassesMergePass, const_with_control_input) { + /// Switch + /// | + /// Identity + /// . + /// . + /// C + /// | + /// Merge + /// / \ + /// Op1 Op2 + auto switch_node = NewNode("Switch", SWITCH, 1, 2); + auto identity_node = NewNode("Identity", SWITCH, 1, 1); + auto const_node = NewNode("Const", CONSTANT, 1, 1); + auto merge_node = NewNode("Merge", MERGE, 1, 2); + auto node1 = NewNode("Op1", ADDN, 1, 1); + auto node2 = NewNode("Op2", ADDN, 1, 1); + + node1->GetOpDesc()->SetIsInputConst({false}); + node2->GetOpDesc()->SetIsInputConst({false}); + GraphUtils::AddEdge(switch_node->GetOutDataAnchor(0), identity_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(identity_node->GetOutControlAnchor(), const_node->GetInControlAnchor()); + GraphUtils::AddEdge(identity_node->GetOutDataAnchor(0), const_node->GetInControlAnchor()); + GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), merge_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node1->GetInDataAnchor(0)); + GraphUtils::AddEdge(merge_node->GetOutDataAnchor(0), node2->GetInDataAnchor(0)); + + /// Switch + /// | + /// Identity + /// . + /// . + /// C + /// / \ + /// Op1 Op2 + auto ret = pass_.Run(merge_node); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(graph_->GetDirectNodesSize(), 5); + EXPECT_EQ(graph_->FindNode("Merge").get(), nullptr); + EXPECT_EQ(node1->GetInDataNodes().size(), 1); + EXPECT_EQ(node2->GetInDataNodes().size(), 1); + EXPECT_EQ(node1->GetOpDesc()->GetIsInputConst().at(0), false); + EXPECT_EQ(node2->GetOpDesc()->GetIsInputConst().at(0), false); + EXPECT_EQ(node1->GetInDataNodes().at(0)->GetInControlAnchor()->GetPeerOutDataAnchors().at(0), + identity_node->GetOutDataAnchor(0)); + EXPECT_EQ(node1->GetInDataNodes().at(0)->GetInControlAnchor()->GetPeerOutControlAnchors().at(0), + identity_node->GetOutControlAnchor()); + EXPECT_EQ(node2->GetInDataNodes().at(0)->GetInControlAnchor()->GetPeerOutDataAnchors().at(0), + identity_node->GetOutDataAnchor(0)); + EXPECT_EQ(node2->GetInDataNodes().at(0)->GetInControlAnchor()->GetPeerOutControlAnchors().at(0), + identity_node->GetOutControlAnchor()); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/net_output_pass_unittest.cc b/tests/ut/ge/graph/passes/net_output_pass_unittest.cc new file mode 100755 index 00000000..2655a403 --- /dev/null +++ b/tests/ut/ge/graph/passes/net_output_pass_unittest.cc @@ -0,0 +1,866 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/net_output_pass.h" + +#include + +#include "common/ge_inner_error_codes.h" +#include "common/types.h" +#include "ge/ge_api.h" +#include "graph/compute_graph.h" +#include "graph/debug/graph_debug.h" +#include "graph/manager/graph_manager.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/operator_reg.h" +#include "graph/utils/op_desc_utils.h" +#include "inc/pass_manager.h" +#include "init/gelib.h" +#include "opskernel_manager/ops_kernel_manager.h" + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestGraphPassesNetOutputPass : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +ge::ComputeGraphPtr BuildClearWeightGraph(void) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + ge::OpDescPtr cast_op = std::make_shared(); + cast_op->SetType(CAST); + cast_op->SetName("Cast1"); + cast_op->AddInputDesc(ge::GeTensorDesc()); + cast_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr cast_node = graph->AddNode(cast_op); + + ge::OpDescPtr const_op = std::make_shared(); + const_op->SetType(CONSTANT); + const_op->SetName("Const1"); + const_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr const_node = graph->AddNode(const_op); + + ge::GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), cast_node->GetInDataAnchor(0)); + + return graph; +} + +ge::ComputeGraphPtr build_graph(bool with_leaf_node = false) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + ge::OpDescPtr data_op = std::make_shared(); + data_op->SetType(DATA); + data_op->SetName("Data1"); + data_op->AddInputDesc(ge::GeTensorDesc()); + data_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr data1 = graph->AddNode(data_op); + + ge::OpDescPtr relu_op1 = std::make_shared(); + relu_op1->SetType(ACTIVATION); + relu_op1->SetName("Relu1"); + relu_op1->AddInputDesc(ge::GeTensorDesc()); + relu_op1->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr relu1 = graph->AddNode(relu_op1); + + ge::OpDescPtr relu_op2 = std::make_shared(); + relu_op2->SetType(RELU); + relu_op2->SetName("Relu2"); + relu_op2->AddInputDesc(ge::GeTensorDesc()); + relu_op2->AddOutputDesc(ge::GeTensorDesc()); + relu_op2->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr relu2 = graph->AddNode(relu_op2); + + ge::OpDescPtr relu_op3 = std::make_shared(); + relu_op3->SetType(ACTIVATION); + relu_op3->SetName("Relu3"); + relu_op3->AddInputDesc(ge::GeTensorDesc()); + relu_op3->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr relu3; + if (with_leaf_node == true) { + relu3 = graph->AddNode(relu_op3); + } + + ge::OpDescPtr mul_op = std::make_shared(); + mul_op->SetType(MUL); + mul_op->SetName("Mul"); + mul_op->AddInputDesc(ge::GeTensorDesc()); + mul_op->AddInputDesc(ge::GeTensorDesc()); + mul_op->AddOutputDesc(ge::GeTensorDesc()); + mul_op->AddOutputDesc(ge::GeTensorDesc()); + mul_op->AddOutputDesc(ge::GeTensorDesc()); + mul_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr mul = graph->AddNode(mul_op); + + ge::OpDescPtr mul_op1 = std::make_shared(); + mul_op1->SetType(MUL); + mul_op1->SetName("Mul1"); + mul_op1->AddInputDesc(ge::GeTensorDesc()); + mul_op1->AddInputDesc(ge::GeTensorDesc()); + mul_op1->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr mul1 = graph->AddNode(mul_op1); + + ge::OpDescPtr mul_op2 = std::make_shared(); + mul_op2->SetType(MUL); + mul_op2->SetName("Mul2"); + mul_op2->AddInputDesc(ge::GeTensorDesc()); + mul_op2->AddInputDesc(ge::GeTensorDesc()); + mul_op2->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr mul2 = graph->AddNode(mul_op2); + + ge::OpDescPtr fc_op = std::make_shared(); + fc_op->SetType(FULL_CONNECTION); + fc_op->SetName("FullConnection"); + fc_op->AddInputDesc(ge::GeTensorDesc()); + fc_op->AddOutputDesc(ge::GeTensorDesc()); + fc_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr fc = graph->AddNode(fc_op); + + ge::GraphUtils::AddEdge(data1->GetOutDataAnchor(0), relu1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(relu1->GetOutDataAnchor(0), fc->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(fc->GetOutDataAnchor(0), relu2->GetInDataAnchor(0)); + if (with_leaf_node == true) { + ge::GraphUtils::AddEdge(fc->GetOutDataAnchor(1), relu3->GetInDataAnchor(0)); + } + ge::GraphUtils::AddEdge(relu2->GetOutDataAnchor(0), mul->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(relu2->GetOutDataAnchor(1), mul->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(0), mul1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(1), mul1->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(2), mul2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(mul->GetOutDataAnchor(3), mul2->GetInDataAnchor(1)); + + return graph; +} +TEST_F(UtestGraphPassesNetOutputPass, add_ctrl_edge_for_netout_from_leaf_success) { + ge::ComputeGraphPtr compute_graph = build_graph(true); + + // construct targets + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + ge::NodePtr relu3 = compute_graph->FindNode("Relu3"); + std::vector> output_nodes = {{relu3, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes); + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + // check contain netoutput + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + /// check input data node of netoutput + /// when output and targets set conflicts each other , output set is prio + /// Check data input + int input_data_node_num = net_out_node->GetInDataNodes().size(); + EXPECT_EQ(input_data_node_num, 1); + + std::vector expect_input_data_result{"Relu3"}; + for (auto node : net_out_node->GetInDataNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_input_data_result.begin(), expect_input_data_result.end(), name); + if (iter != expect_input_data_result.end()) { + expect_input_data_result.erase(iter); + } + } + input_data_node_num = expect_input_data_result.size(); + EXPECT_EQ(input_data_node_num, 0); + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 2); + + std::vector expect_result{"Mul1", "Mul2"}; + for (auto node : net_out_node->GetInControlNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_result.begin(), expect_result.end(), name); + if (iter != expect_result.end()) { + expect_result.erase(iter); + } + } + control_node_num = expect_result.size(); + EXPECT_EQ(control_node_num, 0); +} +TEST_F(UtestGraphPassesNetOutputPass, only_target_node_success) { + ge::ComputeGraphPtr compute_graph = build_graph(); + // construct targets + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector target_nodes = {mul1, mul2}; + compute_graph->SetGraphTargetNodesInfo(target_nodes); + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + // check contain netoutput + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + /// check input data node of netoutput + /// Check data input + int input_data_node_num = net_out_node->GetInDataNodes().size(); + EXPECT_EQ(input_data_node_num, 0); + + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 2); + + std::vector expect_result{"Mul1", "Mul2"}; + for (auto node : net_out_node->GetInControlNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_result.begin(), expect_result.end(), name); + if (iter != expect_result.end()) { + expect_result.erase(iter); + } + } + control_node_num = expect_result.size(); + EXPECT_EQ(control_node_num, 0); +} +TEST_F(UtestGraphPassesNetOutputPass, targets_with_retval_success) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + // Imitate the output node of _Retval issued + ge::OpDescPtr retval_node_desc1 = std::make_shared("reval_node1", FRAMEWORKOP); + retval_node_desc1->AddInputDesc(ge::GeTensorDesc()); + (void)ge::AttrUtils::SetStr(retval_node_desc1, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, "_Retval"); + (void)ge::AttrUtils::SetInt(retval_node_desc1, RETVAL_ATTR_NAME_INDEX, 0); + ge::NodePtr retval_node1 = compute_graph->AddNode(retval_node_desc1); + EXPECT_NE(retval_node1, nullptr); + + ge::OpDescPtr retval_node_desc2 = std::make_shared("reval_node2", FRAMEWORKOP); + retval_node_desc2->AddInputDesc(ge::GeTensorDesc()); + (void)ge::AttrUtils::SetStr(retval_node_desc2, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, "_Retval"); + (void)ge::AttrUtils::SetInt(retval_node_desc2, RETVAL_ATTR_NAME_INDEX, 1); + ge::NodePtr retval_node2 = compute_graph->AddNode(retval_node_desc2); + EXPECT_NE(retval_node2, nullptr); + // construct targets + std::vector target_nodes = {retval_node1, retval_node2}; + compute_graph->SetGraphTargetNodesInfo(target_nodes); + + for (NodePtr node : compute_graph->GetDirectNode()) { + if (node->GetName() == "Mul1") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), retval_node1->GetInDataAnchor(0)); + } else if (node->GetName() == "Mul2") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), retval_node2->GetInDataAnchor(0)); + } + } + + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + // check contain netoutput + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + /// check input data node of netoutput + /// Check data input + int input_data_node_num = net_out_node->GetInDataNodes().size(); + EXPECT_EQ(input_data_node_num, 0); + + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 2); + + std::vector expect_result{"Mul1", "Mul2"}; + for (auto node : net_out_node->GetInControlNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_result.begin(), expect_result.end(), name); + if (iter != expect_result.end()) { + expect_result.erase(iter); + } + } + control_node_num = expect_result.size(); + EXPECT_EQ(control_node_num, 0); + // Check the deletion of _Retval node + retval_node1 = compute_graph->FindNode("reval_node1"); + EXPECT_EQ(retval_node1, nullptr); + retval_node2 = compute_graph->FindNode("reval_node2"); + EXPECT_EQ(retval_node2, nullptr); +} + +TEST_F(UtestGraphPassesNetOutputPass, output_node_and_target_node_no_duplicate_success) { + ge::ComputeGraphPtr compute_graph = build_graph(true); + + // construct targets + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector target_nodes = {mul1, mul2}; + compute_graph->SetGraphTargetNodesInfo(target_nodes); + ge::NodePtr relu3 = compute_graph->FindNode("Relu3"); + std::vector> output_nodes = {{relu3, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes); + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + // check contain netoutput + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + /// check input data node of netoutput + /// when output and targets set conflicts each other , output set is prio + /// Check data input + int input_data_node_num = net_out_node->GetInDataNodes().size(); + EXPECT_EQ(input_data_node_num, 1); + + std::vector expect_input_data_result{"Relu3"}; + for (auto node : net_out_node->GetInDataNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_input_data_result.begin(), expect_input_data_result.end(), name); + if (iter != expect_input_data_result.end()) { + expect_input_data_result.erase(iter); + } + } + input_data_node_num = expect_input_data_result.size(); + EXPECT_EQ(input_data_node_num, 0); + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 2); + + std::vector expect_result{"Mul1", "Mul2"}; + for (auto node : net_out_node->GetInControlNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_result.begin(), expect_result.end(), name); + if (iter != expect_result.end()) { + expect_result.erase(iter); + } + } + control_node_num = expect_result.size(); + EXPECT_EQ(control_node_num, 0); +} +TEST_F(UtestGraphPassesNetOutputPass, output_node_and_target_node_duplicate_success) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + // construct targets + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector target_nodes = {mul2}; + compute_graph->SetGraphTargetNodesInfo(target_nodes); + std::vector> output_nodes = {{mul1, 0}, {mul2, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes); + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + // check contain netoutput + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + /// check input data node of netoutput + /// Check data input + int input_data_node_num = net_out_node->GetInDataNodes().size(); + EXPECT_EQ(input_data_node_num, 2); + + std::vector expect_input_data_result{"Mul1"}; + for (auto node : net_out_node->GetInDataNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_input_data_result.begin(), expect_input_data_result.end(), name); + if (iter != expect_input_data_result.end()) { + expect_input_data_result.erase(iter); + } + } + input_data_node_num = expect_input_data_result.size(); + EXPECT_EQ(input_data_node_num, 0); + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 0); +} + +TEST_F(UtestGraphPassesNetOutputPass, net_output_node_and_target_node_success) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + ge::OpDescPtr netout = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + netout->AddInputDesc(ge::GeTensorDesc()); + netout->AddInputDesc(ge::GeTensorDesc()); + netout->AddOutputDesc(ge::GeTensorDesc()); + netout->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr netout_node = compute_graph->AddNode(netout); + EXPECT_NE(netout_node, nullptr); + + for (NodePtr node : compute_graph->GetDirectNode()) { + if (node->GetName() == "Mul1") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), netout_node->GetInDataAnchor(0)); + } else if (node->GetName() == "Mul2") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), netout_node->GetInDataAnchor(1)); + } + } + // construct targets + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector target_nodes = {mul2}; + compute_graph->SetGraphTargetNodesInfo(target_nodes); + + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + // check contain netoutput + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + /// check input data node of netoutput + /// Check data input + int input_data_node_num = net_out_node->GetInDataNodes().size(); + EXPECT_EQ(input_data_node_num, 1); + + std::vector expect_input_data_result{"Mul1"}; + for (auto node : net_out_node->GetInDataNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_input_data_result.begin(), expect_input_data_result.end(), name); + if (iter != expect_input_data_result.end()) { + expect_input_data_result.erase(iter); + } + } + input_data_node_num = expect_input_data_result.size(); + EXPECT_EQ(input_data_node_num, 0); + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 1); + std::vector expect_control_data_result{"Mul2"}; + for (auto node : net_out_node->GetInControlNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_control_data_result.begin(), expect_control_data_result.end(), name); + if (iter != expect_control_data_result.end()) { + expect_control_data_result.erase(iter); + } + } + control_node_num = expect_control_data_result.size(); + EXPECT_EQ(control_node_num, 0); +} +/// graph have netoutput node.User set outputnodes and target nodes at the same time.output nodes +/// include one common node with target nodes. +/// Notice: output nodes set is more prio +TEST_F(UtestGraphPassesNetOutputPass, net_output_node_and_output_nodes_and_target_node_success_1) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + ge::OpDescPtr netout = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + netout->AddInputDesc(ge::GeTensorDesc()); + netout->AddInputDesc(ge::GeTensorDesc()); + netout->AddOutputDesc(ge::GeTensorDesc()); + netout->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr netout_node = compute_graph->AddNode(netout); + EXPECT_NE(netout_node, nullptr); + + for (NodePtr node : compute_graph->GetDirectNode()) { + if (node->GetName() == "Mul1") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), netout_node->GetInDataAnchor(0)); + } else if (node->GetName() == "Mul2") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), netout_node->GetInDataAnchor(1)); + } + } + // construct targets + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector target_nodes = {mul2}; + compute_graph->SetGraphTargetNodesInfo(target_nodes); + std::vector> output_nodes = {{mul1, 0}, {mul2, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes); + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + // check contain netoutput + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + /// check input data node of netoutput + /// Check data input + int input_data_node_num = net_out_node->GetInDataNodes().size(); + EXPECT_EQ(input_data_node_num, 2); + + std::vector expect_input_data_result{"Mul1", "Mul2"}; + for (auto node : net_out_node->GetInDataNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_input_data_result.begin(), expect_input_data_result.end(), name); + if (iter != expect_input_data_result.end()) { + expect_input_data_result.erase(iter); + } + } + input_data_node_num = expect_input_data_result.size(); + EXPECT_EQ(input_data_node_num, 0); + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 0); +} +/// graph have netoutput node.User set outputnodes and target nodes at the same time.output nodes +/// include one common node with target nodes. +/// Notice: output nodes set is more prio +TEST_F(UtestGraphPassesNetOutputPass, net_output_node_and_output_nodes_and_target_node_success_2) { + ge::ComputeGraphPtr compute_graph = build_graph(true); + + ge::OpDescPtr netout = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + netout->AddInputDesc(ge::GeTensorDesc()); + netout->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr netout_node = compute_graph->AddNode(netout); + EXPECT_NE(netout_node, nullptr); + + for (const auto &node : compute_graph->GetDirectNode()) { + if (node->GetName() == "Mul1") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), netout_node->GetInDataAnchor(0)); + } + if (node->GetName() == "Mul2") { + GraphUtils::AddEdge(node->GetOutControlAnchor(), netout_node->GetInControlAnchor()); + } + if (node->GetName() == "Relu3") { + GraphUtils::AddEdge(node->GetOutControlAnchor(), netout_node->GetInControlAnchor()); + } + } + // construct targets + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector target_nodes = {mul2}; + compute_graph->SetGraphTargetNodesInfo(target_nodes); + std::vector> output_nodes = {{mul1, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes); + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + // check contain netoutput + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + /// check input data node of netoutput + /// Check data input + int input_data_node_num = net_out_node->GetInDataNodes().size(); + EXPECT_EQ(input_data_node_num, 1); + + std::vector expect_input_data_result{"Mul1"}; + for (const auto &node : net_out_node->GetInDataNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_input_data_result.begin(), expect_input_data_result.end(), name); + if (iter != expect_input_data_result.end()) { + expect_input_data_result.erase(iter); + } + } + input_data_node_num = expect_input_data_result.size(); + EXPECT_EQ(input_data_node_num, 0); + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 2); + std::vector expect_control_data_result{"Mul2", "Relu3"}; + for (const auto &node : net_out_node->GetInControlNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_control_data_result.begin(), expect_control_data_result.end(), name); + if (iter != expect_control_data_result.end()) { + expect_control_data_result.erase(iter); + } + } + control_node_num = expect_control_data_result.size(); + EXPECT_EQ(control_node_num, 0); +} +/// graph have netoutput node.User set outputnodes and target nodes at the same time.output nodes +/// include one common node with target nodes. +/// Notice: output nodes set is more prio +TEST_F(UtestGraphPassesNetOutputPass, net_output_node_and_output_nodes_and_target_node_success_3) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + ge::OpDescPtr netout = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + netout->AddInputDesc(ge::GeTensorDesc()); + netout->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr netout_node = compute_graph->AddNode(netout); + EXPECT_NE(netout_node, nullptr); + + for (const auto &node : compute_graph->GetDirectNode()) { + if (node->GetName() == "Mul1") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), netout_node->GetInDataAnchor(0)); + } + if (node->GetName() == "Mul2") { + GraphUtils::AddEdge(node->GetOutControlAnchor(), netout_node->GetInControlAnchor()); + GraphUtils::AddEdge(node->GetOutDataAnchor(0), netout_node->GetInControlAnchor()); + } + } + // construct targets + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector target_nodes = {mul2}; + compute_graph->SetGraphTargetNodesInfo(target_nodes); + std::vector> output_nodes = {{mul1, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes); + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + // check contain netoutput + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + /// check input data node of netoutput + /// Check data input + int input_data_node_num = net_out_node->GetInDataNodes().size(); + EXPECT_EQ(input_data_node_num, 1); + + std::vector expect_input_data_result{"Mul1"}; + for (const auto &node : net_out_node->GetInDataNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_input_data_result.begin(), expect_input_data_result.end(), name); + if (iter != expect_input_data_result.end()) { + expect_input_data_result.erase(iter); + } + } + input_data_node_num = expect_input_data_result.size(); + EXPECT_EQ(input_data_node_num, 0); + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 1); + std::vector expect_control_data_result{"Mul2"}; + for (const auto &node : net_out_node->GetInControlNodes()) { + auto name = node->GetName(); + auto iter = std::find(expect_control_data_result.begin(), expect_control_data_result.end(), name); + if (iter != expect_control_data_result.end()) { + expect_control_data_result.erase(iter); + } + } + control_node_num = expect_control_data_result.size(); + EXPECT_EQ(control_node_num, 0); +} +TEST_F(UtestGraphPassesNetOutputPass, no_output_no_target_no_retval_success) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + // Construct specified output + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector> output_nodes = {{mul1, 0}, {mul2, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes); + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); +} + +TEST_F(UtestGraphPassesNetOutputPass, user_out_node_success) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + // Construct specified output + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector> output_nodes = {{mul1, 0}, {mul2, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes); + + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + + // Check data input + string str; + for (ge::NodePtr input_data_node : net_out_node->GetInDataNodes()) { + str += input_data_node->GetName() + ";"; + } + EXPECT_EQ(str, "Mul1;Mul2;"); + + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + + EXPECT_EQ(control_node_num, 0); +} + +TEST_F(UtestGraphPassesNetOutputPass, retval_node_for_out_success) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + // Imitate the output node of _Retval issued + ge::OpDescPtr retval_node_desc1 = std::make_shared("reval_node1", FRAMEWORKOP); + retval_node_desc1->AddInputDesc(ge::GeTensorDesc()); + (void)ge::AttrUtils::SetStr(retval_node_desc1, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, "_Retval"); + (void)ge::AttrUtils::SetInt(retval_node_desc1, RETVAL_ATTR_NAME_INDEX, 0); + ge::NodePtr retval_node1 = compute_graph->AddNode(retval_node_desc1); + EXPECT_NE(retval_node1, nullptr); + + ge::OpDescPtr retval_node_desc2 = std::make_shared("reval_node2", FRAMEWORKOP); + retval_node_desc2->AddInputDesc(ge::GeTensorDesc()); + (void)ge::AttrUtils::SetStr(retval_node_desc2, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, "_Retval"); + (void)ge::AttrUtils::SetInt(retval_node_desc2, RETVAL_ATTR_NAME_INDEX, 1); + ge::NodePtr retval_node2 = compute_graph->AddNode(retval_node_desc2); + EXPECT_NE(retval_node2, nullptr); + + for (NodePtr node : compute_graph->GetDirectNode()) { + if (node->GetName() == "Mul1") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), retval_node1->GetInDataAnchor(0)); + } else if (node->GetName() == "Mul2") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), retval_node2->GetInDataAnchor(0)); + } + } + + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + + // Check data input + string str; + for (ge::NodePtr input_data_node : net_out_node->GetInDataNodes()) { + str += input_data_node->GetName() + ";"; + } + EXPECT_EQ(str, "Mul1;Mul2;"); + + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 0); + + // Check the deletion of _Retval node + retval_node1 = compute_graph->FindNode("reval_node1"); + EXPECT_EQ(retval_node1, nullptr); + retval_node2 = compute_graph->FindNode("reval_node2"); + EXPECT_EQ(retval_node2, nullptr); +} + +TEST_F(UtestGraphPassesNetOutputPass, check_order_and_const_flag_success) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + ge::OpDescPtr const_node_desc = std::make_shared("const_output", CONSTANT); + const_node_desc->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr const_node = compute_graph->AddNode(const_node_desc); + EXPECT_NE(const_node, nullptr); + NodePtr mul1 = compute_graph->FindNode("Mul1"); + EXPECT_NE(mul1, nullptr); + GraphUtils::AddEdge(mul1->GetOutControlAnchor(), const_node->GetInControlAnchor()); + + // Construct specified output + std::vector> output_nodes = {{const_node, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes); + + ge::OpDescPtr retval_node_desc2 = std::make_shared("reval_node2", FRAMEWORKOP); + retval_node_desc2->AddInputDesc(ge::GeTensorDesc()); + (void)ge::AttrUtils::SetStr(retval_node_desc2, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, "_Retval"); + (void)ge::AttrUtils::SetInt(retval_node_desc2, RETVAL_ATTR_NAME_INDEX, 0); + ge::NodePtr retval_node2 = compute_graph->AddNode(retval_node_desc2); + EXPECT_NE(retval_node2, nullptr); + NodePtr mul2 = compute_graph->FindNode("Mul2"); + EXPECT_NE(mul2, nullptr); + GraphUtils::AddEdge(mul2->GetOutDataAnchor(0), retval_node2->GetInDataAnchor(0)); + + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_NE(net_out_node, nullptr); + + // Check data input + string str; + for (ge::NodePtr input_data_node : net_out_node->GetInDataNodes()) { + str += input_data_node->GetName() + ";"; + } + EXPECT_EQ(str, "const_output;Mul2;"); + + // Check control input + int control_node_num = net_out_node->GetInControlNodes().size(); + EXPECT_EQ(control_node_num, 0); + + // Check is_input_const flag + std::vector is_input_const = net_out_node->GetOpDesc()->GetIsInputConst(); + EXPECT_EQ(is_input_const.size(), 2); + EXPECT_EQ(is_input_const[0], true); + EXPECT_EQ(is_input_const[1], false); + + // Check the deletion of _Retval node + retval_node2 = compute_graph->FindNode("reval_node2"); + EXPECT_EQ(retval_node2, nullptr); +} + +TEST_F(UtestGraphPassesNetOutputPass, out_node_check_fail) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + // Construct specified output + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector> output_nodes_invalid_name = {{nullptr, 0}, {mul2, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes_invalid_name); + + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::INTERNAL_ERROR); + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_EQ(net_out_node, nullptr); + + std::vector> output_nodes_invalid_index = {{mul1, 0}, {mul2, 100}}; + compute_graph->SetGraphOutNodesInfo(output_nodes_invalid_index); + + status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::INTERNAL_ERROR); + net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_EQ(net_out_node, nullptr); +} + +TEST_F(UtestGraphPassesNetOutputPass, retval_node_check_fail) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + // Imitate the output node of _Retval issued + ge::OpDescPtr retval_node_desc1 = std::make_shared("reval_node1", FRAMEWORKOP); + retval_node_desc1->AddInputDesc(ge::GeTensorDesc()); + (void)ge::AttrUtils::SetStr(retval_node_desc1, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, "_Retval"); + (void)ge::AttrUtils::SetInt(retval_node_desc1, RETVAL_ATTR_NAME_INDEX, 0); + ge::NodePtr retval_node1 = compute_graph->AddNode(retval_node_desc1); + EXPECT_NE(retval_node1, nullptr); + + ge::OpDescPtr retval_node_desc2 = std::make_shared("reval_node2", FRAMEWORKOP); + retval_node_desc2->AddInputDesc(ge::GeTensorDesc()); + (void)ge::AttrUtils::SetStr(retval_node_desc2, ATTR_NAME_FRAMEWORK_ORIGINAL_TYPE, "_Retval"); + (void)ge::AttrUtils::SetInt(retval_node_desc2, RETVAL_ATTR_NAME_INDEX, 0); + ge::NodePtr retval_node2 = compute_graph->AddNode(retval_node_desc2); + EXPECT_NE(retval_node2, nullptr); + + for (NodePtr node : compute_graph->GetDirectNode()) { + if (node->GetName() == "Mul1") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), retval_node1->GetInDataAnchor(0)); + } else if (node->GetName() == "Mul2") { + GraphUtils::AddEdge(node->GetOutDataAnchor(0), retval_node2->GetInDataAnchor(0)); + } + } + + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::INTERNAL_ERROR); + NodePtr net_out_node = compute_graph->FindNode(NODE_NAME_NET_OUTPUT); + EXPECT_EQ(net_out_node, nullptr); +} + +TEST_F(UtestGraphPassesNetOutputPass, out_node_update_desc_check_fail) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + ge::OpDescPtr netout = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + ge::NodePtr netout_node = compute_graph->AddNode(netout); + EXPECT_NE(netout_node, nullptr); + + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::INTERNAL_ERROR); +} + +TEST_F(UtestGraphPassesNetOutputPass, out_node_remove_check_fail) { + ge::ComputeGraphPtr compute_graph = build_graph(); + + // Construct specified output + ge::NodePtr mul1 = compute_graph->FindNode("Mul1"); + ge::NodePtr mul2 = compute_graph->FindNode("Mul2"); + std::vector> output_nodes = {{mul1, 0}, {mul2, 0}}; + compute_graph->SetGraphOutNodesInfo(output_nodes); + // compute_graph->RemoveNode(mul1); + mul1->GetInDataAnchor(0)->UnlinkAll(); + mul1->GetInDataAnchor(1)->UnlinkAll(); + GraphUtils::RemoveNodeWithoutRelink(compute_graph, mul1); + mul1 = compute_graph->FindNode("Mul1"); + EXPECT_EQ(mul1, nullptr); + + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) NetOutputPass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); +} + +TEST_F(UtestGraphPassesNetOutputPass, clear_weight) { + ge::ComputeGraphPtr compute_graph = BuildClearWeightGraph(); + auto cast = compute_graph->FindNode("Cast1"); + Status ret = ge::OpDescUtils::ClearWeights(cast); + EXPECT_EQ(ge::SUCCESS, ret); +} diff --git a/tests/ut/ge/graph/passes/no_reshape_op_remove_pass_unittest.cc b/tests/ut/ge/graph/passes/no_reshape_op_remove_pass_unittest.cc new file mode 100644 index 00000000..847daa94 --- /dev/null +++ b/tests/ut/ge/graph/passes/no_reshape_op_remove_pass_unittest.cc @@ -0,0 +1,204 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/no_reshape_op_remove_pass.h" + +#include + +#include "common/ge_inner_error_codes.h" +#include "graph/compute_graph.h" +#include "graph/debug/graph_debug.h" +#include "graph/manager/graph_manager.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/op_desc.h" +#include "graph/operator_reg.h" +#include "graph/utils/op_desc_utils.h" +#include "graph_builder_utils.h" +#include "inc/pass_manager.h" +#include "opskernel_manager/ops_kernel_manager.h" + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestGraphNoReshapeOpRemovePass : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +class NodeBuilder { + public: + NodeBuilder(const std::string &name, const std::string &type) { op_desc_ = std::make_shared(name, type); } + NodeBuilder &AddInputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddInputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + NodeBuilder &AddOutputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddOutputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + ge::NodePtr Build(const ge::ComputeGraphPtr &graph) { return graph->AddNode(op_desc_); } + + private: + ge::GeTensorDescPtr CreateTensorDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + GeShape ge_shape{std::vector(shape)}; + ge::GeTensorDescPtr tensor_desc = std::make_shared(); + tensor_desc->SetShape(ge_shape); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + return tensor_desc; + } + ge::OpDescPtr op_desc_; +}; + +/// data->expanddim->reshape1->reshape2->reshape3->squeeze->reshape4->sinh +/// / +/// const +void make_graph(ComputeGraphPtr &graph) { + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::NodePtr node_expanddim_1 = NodeBuilder("ExpandDim", EXPANDDIMS) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + + ge::NodePtr node_reshape_1 = NodeBuilder("Reshape_1", RESHAPE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 1, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + + ge::NodePtr node_reshape_2 = NodeBuilder("Reshape_2", RESHAPE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 1, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + + ge::NodePtr node_reshape_3 = NodeBuilder("Reshape_3", RESHAPE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + + ge::NodePtr node_squeeze_1 = NodeBuilder("Squeeze", SQUEEZE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 1, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + ge::NodePtr node_const = + NodeBuilder("const", CONSTANT).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::NodePtr node_reshape_4 = NodeBuilder("Reshape_4", RESHAPE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + + ge::NodePtr node_sinh_1 = NodeBuilder("sinh", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 1, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + + GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_expanddim_1->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_expanddim_1->GetOutDataAnchor(0), node_reshape_1->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_reshape_1->GetOutDataAnchor(0), node_reshape_2->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_reshape_2->GetOutDataAnchor(0), node_reshape_3->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_reshape_3->GetOutDataAnchor(0), node_squeeze_1->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_squeeze_1->GetOutDataAnchor(0), node_reshape_4->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_const->GetOutDataAnchor(0), node_reshape_4->GetInDataAnchor(1)); + GraphUtils::AddEdge(node_reshape_4->GetOutDataAnchor(0), node_sinh_1->GetInDataAnchor(0)); +} + +// reshape->permute->transdata->correlation +void make_graph_for_sfc(ComputeGraphPtr &graph) { + // Node4D + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + // reshape1 + ge::NodePtr node_reshape_1 = NodeBuilder("Reshape_3", RESHAPE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 1, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + // permute + ge::NodePtr node_permute_1 = NodeBuilder("permute", PERMUTE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + // transdata + ge::NodePtr node_transdata_1 = NodeBuilder("transdata", TRANSDATA) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + // transdata + ge::NodePtr node_correlation_1 = NodeBuilder("correlation", CORRELATION) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_reshape_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_reshape_1->GetOutDataAnchor(0), node_permute_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_permute_1->GetOutDataAnchor(0), node_transdata_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transdata_1->GetOutDataAnchor(0), node_correlation_1->GetInDataAnchor(0)); +} + +TEST_F(UtestGraphNoReshapeOpRemovePass, node_to_be_delete_success) { + ge::ComputeGraphPtr compute_graph = std::make_shared("test"); + make_graph(compute_graph); + + NoReshapeOpRemovePass noreshapepass; + ge::NodePtr expandDim1 = compute_graph->FindNode("ExpandDim"); + Status status = noreshapepass.Run(expandDim1); + EXPECT_EQ(status, ge::SUCCESS); + expandDim1 = compute_graph->FindNode("ExpandDim"); + EXPECT_EQ(expandDim1, nullptr); + + ge::NodePtr reshape1 = compute_graph->FindNode("Reshape_1"); + status = noreshapepass.Run(reshape1); + EXPECT_EQ(status, ge::SUCCESS); + reshape1 = compute_graph->FindNode("Reshape_1"); + EXPECT_EQ(reshape1, nullptr); + + ge::NodePtr reshape2 = compute_graph->FindNode("Reshape_2"); + EXPECT_EQ(reshape2, nullptr); + + ge::NodePtr reshape3 = compute_graph->FindNode("Reshape_3"); + EXPECT_EQ(reshape3, nullptr); + + ge::NodePtr reshape4 = compute_graph->FindNode("Reshape_4"); + status = noreshapepass.Run(reshape4); + EXPECT_EQ(status, ge::SUCCESS); + reshape4 = compute_graph->FindNode("Reshape_4"); + EXPECT_EQ(reshape4, nullptr); + + ge::NodePtr const1 = compute_graph->FindNode("const"); + auto output_size = const1->GetOutDataNodes().size(); + EXPECT_EQ(output_size, 0); + ge::NodePtr sinh1 = compute_graph->FindNode("sinh"); + auto input_size = sinh1->GetInDataNodes().size(); + EXPECT_EQ(input_size, 1); +} +TEST_F(UtestGraphNoReshapeOpRemovePass, reshape_for_sfc_net_success) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + make_graph_for_sfc(graph); + NoReshapeOpRemovePass noreshapepass; + + NodePtr reshape_node = graph->FindNode("Reshape_3"); + noreshapepass.Run(reshape_node); + NodePtr permute_node = graph->FindNode("permute"); + bool flag = false; + AttrUtils::GetBool(permute_node->GetOpDesc(), "reshape_correlation", flag); + EXPECT_EQ(flag, true); +} diff --git a/tests/ut/ge/graph/passes/no_use_reshape_remove_pass_unittest.cc b/tests/ut/ge/graph/passes/no_use_reshape_remove_pass_unittest.cc new file mode 100644 index 00000000..babeaa56 --- /dev/null +++ b/tests/ut/ge/graph/passes/no_use_reshape_remove_pass_unittest.cc @@ -0,0 +1,132 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/no_use_reshape_remove_pass.h" + +#include + +#include "common/ge_inner_error_codes.h" +#include "graph/compute_graph.h" +#include "graph/debug/graph_debug.h" +#include "graph/manager/graph_manager.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/op_desc.h" +#include "graph/operator_reg.h" +#include "graph/utils/op_desc_utils.h" +#include "graph_builder_utils.h" +#include "inc/pass_manager.h" +#include "opskernel_manager/ops_kernel_manager.h" + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestGraphNoUseReshapeRemovePass : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +class NodeBuilder { + public: + NodeBuilder(const std::string &name, const std::string &type) { op_desc_ = std::make_shared(name, type); } + NodeBuilder &AddInputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddInputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + NodeBuilder &AddOutputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddOutputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + ge::NodePtr Build(const ge::ComputeGraphPtr &graph) { return graph->AddNode(op_desc_); } + + private: + ge::GeTensorDescPtr CreateTensorDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + GeShape ge_shape{std::vector(shape)}; + ge::GeTensorDescPtr tensor_desc = std::make_shared(); + tensor_desc->SetShape(ge_shape); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + return tensor_desc; + } + ge::OpDescPtr op_desc_; +}; + +/// data->expanddim->reshape1->squeeze->reshape4->sinh +/// / +/// const +void make_reshape_graph(ComputeGraphPtr &graph) { + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::NodePtr node_expanddim_1 = NodeBuilder("ExpandDim", EXPANDDIMS) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + + ge::NodePtr node_reshape_1 = NodeBuilder("Reshape_1", RESHAPE) + .AddInputDesc({2, 1, 2, 2}, FORMAT_ND, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_ND, DT_FLOAT16) + .Build(graph); + + ge::NodePtr node_squeeze_1 = NodeBuilder("Squeeze", SQUEEZE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 1, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + ge::NodePtr node_const = + NodeBuilder("const", CONSTANT).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::NodePtr node_reshape_4 = NodeBuilder("Reshape_4", RESHAPE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + + ge::NodePtr node_sinh_1 = NodeBuilder("sinh", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 1, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + + GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_expanddim_1->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_expanddim_1->GetOutDataAnchor(0), node_reshape_1->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_reshape_1->GetOutDataAnchor(0), node_squeeze_1->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_squeeze_1->GetOutDataAnchor(0), node_reshape_4->GetInDataAnchor(0)); + GraphUtils::AddEdge(node_const->GetOutDataAnchor(0), node_reshape_4->GetInDataAnchor(1)); + GraphUtils::AddEdge(node_reshape_4->GetOutDataAnchor(0), node_sinh_1->GetInDataAnchor(0)); +} + +TEST_F(UtestGraphNoUseReshapeRemovePass, node_to_be_delete_success) { + ge::ComputeGraphPtr compute_graph = std::make_shared("test"); + make_reshape_graph(compute_graph); + + // normal case + NoUseReshapeRemovePass no_use_reshape_pass; + ge::NodePtr reshape_node = compute_graph->FindNode("Reshape_4"); + Status status = no_use_reshape_pass.Run(reshape_node); + EXPECT_EQ(status, ge::SUCCESS); + + // not reshape node case + ge::NodePtr squeeze_node = compute_graph->FindNode("Squeeze"); + status = no_use_reshape_pass.Run(squeeze_node); + EXPECT_EQ(status, ge::SUCCESS); + + // ND + ge::NodePtr reshape_node2 = compute_graph->FindNode("Reshape_1"); + status = no_use_reshape_pass.Run(reshape_node2); + EXPECT_EQ(status, ge::SUCCESS); +} diff --git a/tests/ut/ge/graph/passes/pass_manager_unittest.cc b/tests/ut/ge/graph/passes/pass_manager_unittest.cc new file mode 100644 index 00000000..0763a61b --- /dev/null +++ b/tests/ut/ge/graph/passes/pass_manager_unittest.cc @@ -0,0 +1,105 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "inc/pass_manager.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/attr_value_util.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/types.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class SuccessGraphPass : public GraphPass { + Status Run(ComputeGraphPtr graph) { return SUCCESS; } +}; + +class NotChangedGraphPass : public GraphPass { + Status Run(ComputeGraphPtr graph) { return NOT_CHANGED; } +}; + +class ErrorGraphPass : public GraphPass { + Status Run(ComputeGraphPtr graph) { return FAILED; } +}; + +class UtestGraphPassesPassManagerPass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +NodePtr AddNode(ComputeGraphPtr graph) { + GeTensorDesc tensor_desc(GeShape({1}), FORMAT_NHWC, DT_INT32); + OpDescPtr opdesc = make_shared("test", "Add"); + opdesc->AddInputDesc(tensor_desc); + opdesc->AddOutputDesc(tensor_desc); + NodePtr node = graph->AddNode(opdesc); + return node; +} + +ComputeGraphPtr CreatePadGraph() { + ComputeGraphPtr graph = std::make_shared("test"); + return graph; +} + +TEST_F(UtestGraphPassesPassManagerPass, all_pass_success) { + PassManager manager; + manager.AddPass(new SuccessGraphPass); + EXPECT_EQ(manager.GraphPasses().size(), 1); + + ComputeGraphPtr graph = CreatePadGraph(); + Status status = manager.Run(graph); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesPassManagerPass, graph_pass_success) { + ComputeGraphPtr graph = CreatePadGraph(); + SuccessGraphPass pass; + vector passes = {&pass}; + Status status = PassManager::Run(graph, passes); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesPassManagerPass, graph_pass_not_changed) { + ComputeGraphPtr graph = CreatePadGraph(); + NotChangedGraphPass pass; + vector passes = {&pass}; + Status status = PassManager::Run(graph, passes); + EXPECT_EQ(NOT_CHANGED, status); +} + +TEST_F(UtestGraphPassesPassManagerPass, graph_pass_error) { + ComputeGraphPtr graph = CreatePadGraph(); + ErrorGraphPass pass; + vector passes = {&pass}; + Status status = PassManager::Run(graph, passes); + EXPECT_EQ(FAILED, status); +} diff --git a/tests/ut/ge/graph/passes/pass_utils_unittest.cc b/tests/ut/ge/graph/passes/pass_utils_unittest.cc new file mode 100644 index 00000000..a151cc7a --- /dev/null +++ b/tests/ut/ge/graph/passes/pass_utils_unittest.cc @@ -0,0 +1,177 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/pass_utils.h" + +#include +#include + +#include "common/types.h" +#include "graph/types.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph_builder_utils.h" +#include "inc/kernel.h" +#include "inc/kernel_factory.h" + +using namespace ge; + +class UtestGraphPassesPassUtils : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +class NodeBuilder { + public: + NodeBuilder(const std::string &name, const std::string &type) { op_desc_ = std::make_shared(name, type); } + + NodeBuilder &AddInputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddInputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + NodeBuilder &AddOutputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddOutputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + ge::NodePtr Build(const ge::ComputeGraphPtr &graph) { return graph->AddNode(op_desc_); } + + private: + ge::GeTensorDescPtr CreateTensorDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + GeShape ge_shape{std::vector(shape)}; + ge::GeTensorDescPtr tensor_desc = std::make_shared(); + tensor_desc->SetShape(ge_shape); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + return tensor_desc; + } + + ge::OpDescPtr op_desc_; +}; + +TEST_F(UtestGraphPassesPassUtils, set_out_node_weight) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + // data + ge::NodePtr node_data = NodeBuilder("data", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + // const + ge::NodePtr node_const = + NodeBuilder("const", CONSTANT).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + // relu + ge::NodePtr node_relu = NodeBuilder("node_relu1", RELU) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + // sinh + ge::NodePtr node_sinh = NodeBuilder("node_sinh", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + // relu + ge::NodePtr node_relu2 = NodeBuilder("node_relu2", RELU) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + // sinh + ge::NodePtr node_sinh2 = NodeBuilder("node_sinh2", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutControlAnchor(), node_const->GetInControlAnchor()); + ge::GraphUtils::AddEdge(node_const->GetOutDataAnchor(0), node_relu->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_relu->GetOutDataAnchor(0), node_sinh->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_relu->GetOutDataAnchor(0), node_relu2->GetInControlAnchor()); + ge::GraphUtils::AddEdge(node_relu2->GetOutDataAnchor(0), node_sinh2->GetInDataAnchor(0)); + + for (auto node : graph->GetDirectNode()) { + if (node->GetType() == CONSTANT) { + int32_t weight[] = {1}; + GeTensorDesc weight_desc(GeShape({1}), FORMAT_NHWC, DT_INT32); + GeTensorPtr tensor = std::make_shared(weight_desc, (uint8_t *)weight, sizeof(weight)); + vector tensor_vec = {tensor}; + OpDescUtils::SetWeights(node, tensor_vec); + } + if (!node->GetOutDataNodes().empty()) { + auto out_data_anchor = node->GetOutDataNodes().at(0)->GetOutDataAnchor(0); + Status status = PassUtils::SetOutNodeWeight(out_data_anchor, node); + EXPECT_EQ(SUCCESS, status); + } + } +} + +// only some failure castes for coverage check +TEST_F(UtestGraphPassesPassUtils, is_constant_null) { + ge::NodePtr node = nullptr; + bool ret = PassUtils::IsConstant(node); + EXPECT_EQ(false, ret); +} + +TEST_F(UtestGraphPassesPassUtils, get_in_data_node_fail) { + ge::NodePtr node = nullptr; + NodePtr in_data_node = PassUtils::GetInDataNode(node, 0); + EXPECT_EQ(nullptr, in_data_node); + + ge::ComputeGraphPtr graph = std::make_shared("test"); + // relu + ge::NodePtr node_relu = NodeBuilder("relu", RELU) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + NodePtr data_node = PassUtils::GetInDataNode(node_relu, 1); + EXPECT_EQ(nullptr, data_node); +} + +TEST_F(UtestGraphPassesPassUtils, get_unique_in_data_anchor_index_failed) { + int invalid_index = -1; + ge::NodePtr node = nullptr; + int status = PassUtils::GetUniqueInDataAnchorIndex(node); + EXPECT_EQ(invalid_index, status); + + ge::ComputeGraphPtr graph = std::make_shared("test"); + // relu + ge::NodePtr node_relu = NodeBuilder("relu", RELU) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + int ret = PassUtils::GetUniqueInDataAnchorIndex(node_relu); + EXPECT_EQ(invalid_index, ret); +} + +TEST_F(UtestGraphPassesPassUtils, unlink_node_with_ctrl_copy_fail) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + // relu + ge::NodePtr node_relu = NodeBuilder("relu", RELU) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + Status status = PassUtils::UnlinkNodeWithControlCopy(node_relu, 1); + EXPECT_EQ(ge::SUCCESS, status); + Status ret = PassUtils::UnlinkNodeWithControlCopy(node_relu, 0); + EXPECT_EQ(ge::FAILED, ret); +} + +TEST_F(UtestGraphPassesPassUtils, null_input) { + std::vector deleted_nodes; + std::vector end_nodes; + EXPECT_NE(PassUtils::RemoveInactiveBranchToMerge(nullptr, deleted_nodes, end_nodes), 0); +} diff --git a/tests/ut/ge/graph/passes/permute_pass_unittest.cc b/tests/ut/ge/graph/passes/permute_pass_unittest.cc new file mode 100644 index 00000000..996c6bd2 --- /dev/null +++ b/tests/ut/ge/graph/passes/permute_pass_unittest.cc @@ -0,0 +1,73 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/permute_pass.h" + +#include +#include + +using namespace ge; + +class UtestGraphPassesPermutePass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 1, + int32_t out_anchors_num = 1) { + GeTensorDesc tensor_desc(GeShape({1}), FORMAT_NHWC, DT_INT32); + OpDescPtr op_desc = make_shared(name, type); + for (int32_t i = 0; i < in_anchors_num; i++) { + op_desc->AddInputDesc(tensor_desc); + } + for (int32_t i = 0; i < out_anchors_num; i++) { + op_desc->AddOutputDesc(tensor_desc); + } + + NodePtr node = graph->AddNode(op_desc); + return node; + } + + ComputeGraphPtr CreatePadGraph() { + ComputeGraphPtr graph = std::make_shared("test"); + + NodePtr data_node = AddNode(graph, "data_op", DATA); + + NodePtr transpose_node = AddNode(graph, "transpose1", PERMUTE); + vector order_list = {0, 3, 1, 2}; + AttrUtils::SetListInt(transpose_node->GetOpDesc(), PERMUTE_ATTR_ORDER, order_list); + AttrUtils::SetInt(transpose_node->GetOpDesc(), ATTR_NAME_FORMAT, (int64_t)DT_INT32); + + NodePtr conv_node = AddNode(graph, "conv1", CONVOLUTION); + NodePtr conv2_node = AddNode(graph, "conv2", CONVOLUTION); + + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), transpose_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(transpose_node->GetOutDataAnchor(0), conv_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(conv_node->GetOutDataAnchor(0), conv2_node->GetInDataAnchor(0)); + + return graph; + } +}; + +TEST_F(UtestGraphPassesPermutePass, transpose_and_conv3) { + ComputeGraphPtr graph = CreatePadGraph(); + + ge::PermutePass permute_pass; + Status status = permute_pass.Run(graph); + + EXPECT_EQ(SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/placeholder_with_default_pass_unittest.cc b/tests/ut/ge/graph/passes/placeholder_with_default_pass_unittest.cc new file mode 100644 index 00000000..b837bf25 --- /dev/null +++ b/tests/ut/ge/graph/passes/placeholder_with_default_pass_unittest.cc @@ -0,0 +1,79 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/placeholder_with_default_pass.h" + +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/anchor.h" +#include "graph/attr_value.h" +#include "graph/compute_graph.h" +#include "graph/op_desc.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/pass_manager.h" +#undef protected +#undef private + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestPlaceholderWithDefaultPass : public Test { + protected: + NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 1, + int32_t out_anchors_num = 1) { + GeTensorDesc tensor_desc; + OpDescPtr op_desc = make_shared(name, type); + for (int32_t i = 0; i < in_anchors_num; i++) { + op_desc->AddInputDesc(tensor_desc); + } + for (int32_t i = 0; i < out_anchors_num; i++) { + op_desc->AddOutputDesc(tensor_desc); + } + + NodePtr node = graph->AddNode(op_desc); + return node; + } +}; + +TEST_F(UtestPlaceholderWithDefaultPass, succ) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = AddNode(graph, "PlaceholderWithDefault", PLACEHOLDERWITHDEFAULT); + NodePtr reduce_min_node = AddNode(graph, "reduceMin", REDUCEMIN); + + GraphUtils::AddEdge(node->GetOutDataAnchor(0), reduce_min_node->GetInDataAnchor(0)); + + PlaceholderWithDefaultPass pass; + Status status = pass.Run(node); + EXPECT_EQ(status, SUCCESS); + NodePtr found_node = graph->FindNode("PlaceholderWithDefault"); + EXPECT_EQ(found_node, nullptr); + + status = pass.Run(reduce_min_node); + EXPECT_EQ(status, SUCCESS); + + string type2 = "FrameworkOp"; + node->GetOpDesc()->SetType(type2); + pass.Run(node); + // EXPECT_EQ(ge::SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/prevent_gradient_pass_unittest.cc b/tests/ut/ge/graph/passes/prevent_gradient_pass_unittest.cc new file mode 100644 index 00000000..39a6cb6a --- /dev/null +++ b/tests/ut/ge/graph/passes/prevent_gradient_pass_unittest.cc @@ -0,0 +1,79 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/prevent_gradient_pass.h" + +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/anchor.h" +#include "graph/attr_value.h" +#include "graph/compute_graph.h" +#include "graph/op_desc.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/pass_manager.h" +#undef protected +#undef private + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestPreventGradientPass : public Test { + protected: + NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 1, + int32_t out_anchors_num = 1) { + GeTensorDesc tensor_desc; + OpDescPtr op_desc = make_shared(name, type); + for (int32_t i = 0; i < in_anchors_num; i++) { + op_desc->AddInputDesc(tensor_desc); + } + for (int32_t i = 0; i < out_anchors_num; i++) { + op_desc->AddOutputDesc(tensor_desc); + } + + NodePtr node = graph->AddNode(op_desc); + return node; + } +}; + +TEST_F(UtestPreventGradientPass, succ) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = AddNode(graph, "PreventGradient", PREVENTGRADIENT); + NodePtr reduce_min_node = AddNode(graph, "reduceMin", REDUCEMIN); + + GraphUtils::AddEdge(node->GetOutDataAnchor(0), reduce_min_node->GetInDataAnchor(0)); + + PreventGradientPass pass; + Status status = pass.Run(node); + EXPECT_EQ(status, SUCCESS); + NodePtr found_node = graph->FindNode("PreventGradient"); + EXPECT_EQ(found_node, nullptr); + + status = pass.Run(reduce_min_node); + EXPECT_EQ(status, SUCCESS); + + string type2 = "FrameworkOp"; + node->GetOpDesc()->SetType(type2); + status = pass.Run(node); + // EXPECT_EQ(ge::SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/print_op_pass_unittest.cc b/tests/ut/ge/graph/passes/print_op_pass_unittest.cc new file mode 100644 index 00000000..65f7d37c --- /dev/null +++ b/tests/ut/ge/graph/passes/print_op_pass_unittest.cc @@ -0,0 +1,81 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/print_op_pass.h" + +#include + +#include "omg/omg_inner_types.h" +#include "utils/op_desc_utils.h" + +using domi::GetContext; + +namespace ge { +class UtestGraphPassesPrintOpPass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + public: + void make_graph(ComputeGraphPtr graph, bool match = true, int flag = 0) { + auto data = std::make_shared("Data", DATA); + GeTensorDesc tensor_desc_data(GeShape({1, 1, 1, 1})); + data->AddInputDesc(tensor_desc_data); + data->AddOutputDesc(tensor_desc_data); + auto data_node = graph->AddNode(data); + + auto data1 = std::make_shared("Data", DATA); + data1->AddInputDesc(tensor_desc_data); + data1->AddOutputDesc(tensor_desc_data); + auto data_node1 = graph->AddNode(data1); + + auto print_desc = std::make_shared("Print", "Print"); + print_desc->AddInputDesc(tensor_desc_data); + print_desc->AddInputDesc(tensor_desc_data); + print_desc->AddOutputDesc(tensor_desc_data); + auto print_node = graph->AddNode(print_desc); + + auto ret_val_desc = std::make_shared("RetVal", "RetVal"); + ret_val_desc->AddInputDesc(tensor_desc_data); + ret_val_desc->AddOutputDesc(tensor_desc_data); + auto ret_val_node = graph->AddNode(ret_val_desc); + + auto ret = GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), print_node->GetInDataAnchor(0)); + ret = GraphUtils::AddEdge(data_node1->GetOutDataAnchor(0), print_node->GetInDataAnchor(1)); + ret = GraphUtils::AddEdge(print_node->GetOutDataAnchor(0), ret_val_node->GetInDataAnchor(0)); + } +}; + +TEST_F(UtestGraphPassesPrintOpPass, apply_success) { + GetContext().out_nodes_map.clear(); + ComputeGraphPtr graph = std::make_shared("test_graph"); + make_graph(graph); + ge::PrintOpPass apply_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("Test", &apply_pass); + GEPass pass(graph); + Status status = pass.Run(names_to_pass); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesPrintOpPass, param_invalid) { + ge::NodePtr node = nullptr; + ge::PrintOpPass apply_pass; + Status status = apply_pass.Run(node); + EXPECT_EQ(ge::PARAM_INVALID, status); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/prune_pass_unittest.cc b/tests/ut/ge/graph/passes/prune_pass_unittest.cc new file mode 100644 index 00000000..f29b67da --- /dev/null +++ b/tests/ut/ge/graph/passes/prune_pass_unittest.cc @@ -0,0 +1,490 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "omg/omg_inner_types.h" + +#define protected public +#define private public +#include "graph/passes/prune_pass.h" + +#include "anchor.h" +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/attr_value.h" +#include "graph/debug/ge_attr_define.h" +#include "inc/pass_manager.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; +using namespace std; + +class UtestGraphPassesPrunePass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +// case1:no net_out_put_node +TEST_F(UtestGraphPassesPrunePass, no_net_out_put_node) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr reverse_op = std::make_shared(); + reverse_op->SetType(REVERSE); + reverse_op->SetName("Reverse"); + reverse_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr reverse_node = graph->AddNode(reverse_op); + + ge::OpDescPtr floor_op = std::make_shared(); + floor_op->SetType(FLOOR); + floor_op->SetName("Floor"); + floor_op->AddInputDesc(ge::GeTensorDesc()); + floor_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr floor_node = graph->AddNode(floor_op); + + ge::GraphUtils::AddEdge(reverse_node->GetOutDataAnchor(0), floor_node->GetInDataAnchor(0)); + + uint64_t size_ori = graph->GetDirectNode().size(); + PrunePass prune_pass; + vector passes = {&prune_pass}; + Status status = PassManager::Run(graph, passes); + + EXPECT_EQ(ge::SUCCESS, status); + + uint64_t size = graph->GetDirectNode().size(); + EXPECT_EQ(size, size_ori); +} +// case2: one net path with one bypass branch +TEST_F(UtestGraphPassesPrunePass, has_net_out_put_node_with_only_one_path) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr reverse_op = std::make_shared(); + reverse_op->SetType(REVERSE); + reverse_op->SetName("Reverse"); + reverse_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr reverse_node = graph->AddNode(reverse_op); + + ge::OpDescPtr floor_op = std::make_shared(); + floor_op->SetType(FLOOR); + floor_op->SetName("Floor"); + floor_op->AddInputDesc(ge::GeTensorDesc()); + floor_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr floor_node = graph->AddNode(floor_op); + + ge::OpDescPtr net_output_op = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + net_output_op->AddInputDesc(ge::GeTensorDesc()); + net_output_op->AddOutputDesc(ge::GeTensorDesc()); + ge::AttrUtils::SetBool(net_output_op, "identity_add_netoutput", true); + ge::NodePtr netoutput_node = graph->AddNode(net_output_op); + + ge::OpDescPtr reverse_op1 = std::make_shared(); + reverse_op->SetType(REVERSE); + reverse_op->SetName("Reverse1"); + reverse_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr reverse_node1 = graph->AddNode(reverse_op1); + + ge::GraphUtils::AddEdge(reverse_node->GetOutDataAnchor(0), floor_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(floor_node->GetOutDataAnchor(0), netoutput_node->GetInDataAnchor(0)); + + uint64_t size_ori = graph->GetDirectNode().size(); + PrunePass prune_pass; + vector passes = {&prune_pass}; + Status status = PassManager::Run(graph, passes); + + uint64_t size = graph->GetDirectNode().size(); + int diff = size_ori - size; + EXPECT_EQ(ge::SUCCESS, status); + EXPECT_EQ(diff, 1); +} +// case3: one net path with one bypass branch +TEST_F(UtestGraphPassesPrunePass, has_net_out_put_node_with_one_valid_path_and_one_bypass_path) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + // valid path construct (reverse->floor->net_out) + ge::OpDescPtr reverse_op = std::make_shared(); + reverse_op->SetType(REVERSE); + reverse_op->SetName("Reverse"); + reverse_op->AddOutputDesc(ge::GeTensorDesc()); + reverse_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr reverse_node = graph->AddNode(reverse_op); + + ge::OpDescPtr floor_op = std::make_shared(); + floor_op->SetType(FLOOR); + floor_op->SetName("Floor"); + floor_op->AddInputDesc(ge::GeTensorDesc()); + floor_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr floor_node = graph->AddNode(floor_op); + + ge::OpDescPtr net_output_op = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + net_output_op->AddInputDesc(ge::GeTensorDesc()); + net_output_op->AddOutputDesc(ge::GeTensorDesc()); + ge::AttrUtils::SetBool(net_output_op, "identity_add_netoutput", true); + ge::NodePtr netoutput_node = graph->AddNode(net_output_op); + + ge::GraphUtils::AddEdge(reverse_node->GetOutDataAnchor(0), floor_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(floor_node->GetOutDataAnchor(0), netoutput_node->GetInDataAnchor(0)); + + // incvalid path construct (reverse->floor1->floor2) + ge::OpDescPtr floor_op1 = std::make_shared(); + floor_op1->SetType(FLOOR); + floor_op1->SetName("Floor1"); + floor_op1->AddInputDesc(ge::GeTensorDesc()); + floor_op1->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr floor_node1 = graph->AddNode(floor_op1); + + ge::OpDescPtr floor_op2 = std::make_shared(); + floor_op2->SetType(FLOOR); + floor_op2->SetName("Floor2"); + floor_op2->AddInputDesc(ge::GeTensorDesc()); + floor_op2->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr floor_node2 = graph->AddNode(floor_op2); + // isolated node + ge::OpDescPtr floor_op3 = std::make_shared(); + floor_op3->SetType(FLOOR); + floor_op3->SetName("Floor3"); + floor_op3->AddInputDesc(ge::GeTensorDesc()); + floor_op3->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr floor_node3 = graph->AddNode(floor_op3); + + ge::GraphUtils::AddEdge(reverse_node->GetOutDataAnchor(1), floor_node1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(floor_node1->GetOutDataAnchor(0), floor_node2->GetInDataAnchor(0)); + + uint64_t size_ori = graph->GetDirectNode().size(); + PrunePass prune_pass; + vector passes = {&prune_pass}; +} + +// case 4: multi net path with one common netout(1:multi:1) +TEST_F(UtestGraphPassesPrunePass, has_net_out_put_node_with_multi_path) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op = std::make_shared(); + data_op->SetType(DATA); + data_op->SetName("data"); + data_op->AddOutputDesc(ge::GeTensorDesc()); + data_op->AddOutputDesc(ge::GeTensorDesc()); + data_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr data_node = graph->AddNode(data_op); + + ge::OpDescPtr reverse_op1 = std::make_shared(); + reverse_op1->SetType(REVERSE); + reverse_op1->SetName("Reverse1"); + reverse_op1->AddInputDesc(ge::GeTensorDesc()); + reverse_op1->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr reverse_node1 = graph->AddNode(reverse_op1); + + ge::OpDescPtr floor_op1 = std::make_shared(); + floor_op1->SetType(FLOOR); + floor_op1->SetName("Floor1"); + floor_op1->AddInputDesc(ge::GeTensorDesc()); + floor_op1->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr floor_node1 = graph->AddNode(floor_op1); + + ge::OpDescPtr reverse_op2 = std::make_shared(); + reverse_op2->SetType(REVERSE); + reverse_op2->SetName("Reverse2"); + reverse_op2->AddInputDesc(ge::GeTensorDesc()); + reverse_op2->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr reverse_node2 = graph->AddNode(reverse_op2); + + ge::OpDescPtr floor_op2 = std::make_shared(); + floor_op2->SetType(FLOOR); + floor_op2->SetName("Floor2"); + floor_op2->AddInputDesc(ge::GeTensorDesc()); + floor_op2->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr floor_node2 = graph->AddNode(floor_op2); + + ge::OpDescPtr reverse_op3 = std::make_shared(); + reverse_op3->SetType(REVERSE); + reverse_op3->SetName("Reverse3"); + reverse_op3->AddInputDesc(ge::GeTensorDesc()); + reverse_op3->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr reverse_node3 = graph->AddNode(reverse_op3); + + ge::OpDescPtr floor_op3 = std::make_shared(); + floor_op3->SetType(FLOOR); + floor_op3->SetName("Floor3"); + floor_op3->AddInputDesc(ge::GeTensorDesc()); + floor_op3->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr floor_node3 = graph->AddNode(floor_op3); + + ge::OpDescPtr net_output_op = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + net_output_op->AddInputDesc(ge::GeTensorDesc()); + net_output_op->AddInputDesc(ge::GeTensorDesc()); + net_output_op->AddInputDesc(ge::GeTensorDesc()); + net_output_op->AddOutputDesc(ge::GeTensorDesc()); + ge::AttrUtils::SetBool(net_output_op, "identity_add_netoutput", true); + ge::NodePtr netoutput_node = graph->AddNode(net_output_op); + + ge::GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), reverse_node1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(data_node->GetOutDataAnchor(1), reverse_node2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(data_node->GetOutDataAnchor(2), reverse_node3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(reverse_node1->GetOutDataAnchor(0), floor_node1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(floor_node1->GetOutDataAnchor(0), netoutput_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(reverse_node2->GetOutDataAnchor(0), floor_node2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(floor_node2->GetOutDataAnchor(0), netoutput_node->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(reverse_node3->GetOutDataAnchor(0), floor_node3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(floor_node3->GetOutDataAnchor(0), netoutput_node->GetInDataAnchor(2)); + + uint64_t size_ori = graph->GetDirectNode().size(); + + PrunePass prune_pass; + vector passes = {&prune_pass}; + Status status = PassManager::Run(graph, passes); + + uint64_t size_after_proc = graph->GetDirectNode().size(); + EXPECT_EQ(size_ori, size_after_proc); +} +// case 5: circle,diamand style +TEST_F(UtestGraphPassesPrunePass, multi_net_out_put_node_with_circle_net) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op = std::make_shared(); + data_op->SetType(DATA); + data_op->SetName("data"); + data_op->AddOutputDesc(ge::GeTensorDesc()); + data_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr data_node = graph->AddNode(data_op); + + ge::OpDescPtr op_1 = std::make_shared(); + op_1->SetType(REVERSE); + op_1->SetName("Reverse1"); + op_1->AddInputDesc(ge::GeTensorDesc()); + op_1->AddOutputDesc(ge::GeTensorDesc()); + op_1->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_1 = graph->AddNode(op_1); + + ge::OpDescPtr op_2 = std::make_shared(); + op_2->SetType(REVERSE); + op_2->SetName("Reverse2"); + op_2->AddInputDesc(ge::GeTensorDesc()); + op_2->AddInputDesc(ge::GeTensorDesc()); + op_2->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_2 = graph->AddNode(op_2); + + ge::OpDescPtr op_3 = std::make_shared(); + op_3->SetType(REVERSE); + op_3->SetName("Reverse3"); + op_3->AddInputDesc(ge::GeTensorDesc()); + op_3->AddInputDesc(ge::GeTensorDesc()); + op_3->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_3 = graph->AddNode(op_3); + + ge::OpDescPtr op_4 = std::make_shared(); + op_4->SetType(REVERSE); + op_4->SetName("Reverse4"); + op_4->AddInputDesc(ge::GeTensorDesc()); + op_4->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_4 = graph->AddNode(op_4); + + ge::OpDescPtr op_5 = std::make_shared(); + op_5->SetType(REVERSE); + op_5->SetName("Reverse5"); + op_5->AddInputDesc(ge::GeTensorDesc()); + op_5->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_5 = graph->AddNode(op_5); + + ge::OpDescPtr net_output_op = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + net_output_op->AddInputDesc(ge::GeTensorDesc()); + net_output_op->AddOutputDesc(ge::GeTensorDesc()); + ge::AttrUtils::SetBool(net_output_op, "identity_add_netoutput", true); + ge::NodePtr netoutput_node = graph->AddNode(net_output_op); + + ge::GraphUtils::AddEdge(node_1->GetOutDataAnchor(0), netoutput_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_2->GetOutDataAnchor(0), node_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_3->GetOutDataAnchor(0), node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4->GetOutDataAnchor(0), node_3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_1->GetOutDataAnchor(1), node_4->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), node_2->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(data_node->GetOutDataAnchor(1), node_5->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_5->GetOutDataAnchor(0), node_3->GetInDataAnchor(1)); + + uint64_t size_ori = graph->GetDirectNode().size(); + + PrunePass prune_pass; + vector passes = {&prune_pass}; + Status status = PassManager::Run(graph, passes); + EXPECT_EQ(ge::SUCCESS, status); + uint64_t size_after_proc = graph->GetDirectNode().size(); + EXPECT_EQ(size_ori, size_after_proc); +} + +// case 6: two mix circle and multi path,diamand style +TEST_F(UtestGraphPassesPrunePass, mix_two_circle_net) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr data_op = std::make_shared(); + data_op->SetType(DATA); + data_op->SetName("data"); + data_op->AddOutputDesc(ge::GeTensorDesc()); + data_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr data_node = graph->AddNode(data_op); + + ge::OpDescPtr op_1 = std::make_shared(); + op_1->SetType(REVERSE); + op_1->SetName("Reverse1"); + op_1->AddInputDesc(ge::GeTensorDesc()); + op_1->AddInputDesc(ge::GeTensorDesc()); + op_1->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_1 = graph->AddNode(op_1); + + ge::OpDescPtr op_2 = std::make_shared(); + op_2->SetType(REVERSE); + op_2->SetName("Reverse2"); + op_2->AddInputDesc(ge::GeTensorDesc()); + op_2->AddOutputDesc(ge::GeTensorDesc()); + op_2->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_2 = graph->AddNode(op_2); + + ge::OpDescPtr op_3 = std::make_shared(); + op_3->SetType(REVERSE); + op_3->SetName("Reverse3"); + op_3->AddInputDesc(ge::GeTensorDesc()); + op_3->AddInputDesc(ge::GeTensorDesc()); + op_3->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_3 = graph->AddNode(op_3); + + ge::OpDescPtr op_4 = std::make_shared(); + op_4->SetType(REVERSE); + op_4->SetName("Reverse4"); + op_4->AddInputDesc(ge::GeTensorDesc()); + op_4->AddInputDesc(ge::GeTensorDesc()); + op_4->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_4 = graph->AddNode(op_4); + + ge::OpDescPtr op_5 = std::make_shared(); + op_5->SetType(REVERSE); + op_5->SetName("Reverse5"); + op_5->AddInputDesc(ge::GeTensorDesc()); + op_5->AddOutputDesc(ge::GeTensorDesc()); + op_5->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_5 = graph->AddNode(op_5); + + ge::OpDescPtr net_output_op = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + net_output_op->AddInputDesc(ge::GeTensorDesc()); + net_output_op->AddOutputDesc(ge::GeTensorDesc()); + ge::AttrUtils::SetBool(net_output_op, "identity_add_netoutput", true); + ge::NodePtr netoutput_node = graph->AddNode(net_output_op); + + ge::GraphUtils::AddEdge(node_1->GetOutDataAnchor(0), netoutput_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_2->GetOutDataAnchor(0), node_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_5->GetOutDataAnchor(0), node_1->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(node_4->GetOutDataAnchor(0), node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_2->GetOutDataAnchor(1), node_3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_5->GetOutDataAnchor(1), node_3->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(node_3->GetOutDataAnchor(0), node_4->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), node_4->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(node_4->GetOutDataAnchor(1), node_5->GetInDataAnchor(0)); + // construct two isolated node + ge::OpDescPtr op_6 = std::make_shared(); + op_6->SetType(REVERSE); + op_6->SetName("Reverse"); + op_6->AddInputDesc(ge::GeTensorDesc()); + op_6->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_6 = graph->AddNode(op_6); + + ge::OpDescPtr op_7 = std::make_shared(); + op_7->SetType(REVERSE); + op_7->SetName("Reverse"); + op_7->AddInputDesc(ge::GeTensorDesc()); + op_7->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr node_7 = graph->AddNode(op_7); + + uint64_t size_ori = graph->GetDirectNode().size(); + + PrunePass prune_pass; + vector passes = {&prune_pass}; +} +// case7: one net path with two DATA node +TEST_F(UtestGraphPassesPrunePass, has_net_out_put_node_with_two_isolate_data_node) { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + ge::OpDescPtr reverse_op = std::make_shared(); + reverse_op->SetType(REVERSE); + reverse_op->SetName("Reverse"); + reverse_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr reverse_node = graph->AddNode(reverse_op); + + ge::OpDescPtr floor_op = std::make_shared(); + floor_op->SetType(FLOOR); + floor_op->SetName("Floor"); + floor_op->AddInputDesc(ge::GeTensorDesc()); + floor_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr floor_node = graph->AddNode(floor_op); + + ge::OpDescPtr net_output_op = std::make_shared(NODE_NAME_NET_OUTPUT, NETOUTPUT); + net_output_op->AddInputDesc(ge::GeTensorDesc()); + net_output_op->AddOutputDesc(ge::GeTensorDesc()); + ge::AttrUtils::SetBool(net_output_op, "identity_add_netoutput", true); + ge::NodePtr netoutput_node = graph->AddNode(net_output_op); + // construct one isolated DATA node (to be deleted) + ge::OpDescPtr reverse_op_1 = std::make_shared(); + reverse_op_1->SetType(REVERSE); + reverse_op_1->SetName("Reverse1"); + reverse_op_1->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr reverse_node_1 = graph->AddNode(reverse_op_1); + + ge::GraphUtils::AddEdge(reverse_node->GetOutDataAnchor(0), floor_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(floor_node->GetOutDataAnchor(0), netoutput_node->GetInDataAnchor(0)); + // construct two isolated DATA nodes(to be not deleted) + ge::OpDescPtr data_op_1 = std::make_shared(); + data_op_1->SetType(DATA); + data_op_1->SetName("data"); + data_op_1->AddOutputDesc(ge::GeTensorDesc()); + data_op_1->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr data_node_1 = graph->AddNode(data_op_1); + + ge::OpDescPtr data_op_2 = std::make_shared(); + data_op_2->SetType(DATA); + data_op_2->SetName("data1"); + data_op_2->AddOutputDesc(ge::GeTensorDesc()); + data_op_2->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr data_node = graph->AddNode(data_op_2); + + uint64_t size_ori = graph->GetDirectNode().size(); + PrunePass prune_pass; + vector passes = {&prune_pass}; + Status status = PassManager::Run(graph, passes); + + uint64_t size = graph->GetDirectNode().size(); + EXPECT_EQ(ge::SUCCESS, status); + EXPECT_EQ(size_ori, (size + 1)); + + // it should check net_out_put's input data node and input control node + auto control_vec = netoutput_node->GetInControlNodes(); + EXPECT_EQ(control_vec.size(), 2); + // check control_vec contains only data node + for (auto node : control_vec) { + bool result = (node->GetName() == "data" || node->GetName() == "data1") ? true : false; + EXPECT_EQ(result, true); + } + + auto data_vec = netoutput_node->GetInDataNodes(); + EXPECT_EQ(data_vec.size(), 1); + // check data_vec contains only Floor node + for (auto node : data_vec) { + bool result = (node->GetName() == "Floor") ? true : false; + EXPECT_EQ(result, true); + } +} diff --git a/tests/ut/ge/graph/passes/reshape_remove_pass_unittest.cc b/tests/ut/ge/graph/passes/reshape_remove_pass_unittest.cc new file mode 100644 index 00000000..12d35e1f --- /dev/null +++ b/tests/ut/ge/graph/passes/reshape_remove_pass_unittest.cc @@ -0,0 +1,183 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/reshape_remove_pass.h" + +#include +#include +#include + +#include "graph_builder_utils.h" + +namespace ge { +class UtestReshapeRemovePass : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +namespace { +/// netoutput1 +/// | +/// transdata1 +/// | +/// reshape1 +/// | \ +/// var1 const1 +ut::GraphBuilder Graph1Builder() { + ut::GraphBuilder builder = ut::GraphBuilder("g1"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + ; + auto const1 = builder.AddNode("const1", "Const", 0, 1); + auto reshape1 = builder.AddNode("reshape1", "Reshape", 2, 1); + auto transdata1 = builder.AddNode("transdata1", "Transdata", 1, 1); + auto netoutput1 = builder.AddNode("netoutput1", "Netoutput", 1, 0); + + builder.AddDataEdge(var1, 0, reshape1, 0); + builder.AddDataEdge(const1, 0, reshape1, 1); + builder.AddDataEdge(reshape1, 0, transdata1, 0); + builder.AddDataEdge(transdata1, 0, netoutput1, 0); + + return builder; +} + +/// netoutput1 +/// | \ +///transdata1 \ +/// | \ +/// reshape1 reshape2 +/// | \ / \ +/// var1 const1 var2 +ut::GraphBuilder Graph2Builder() { + ut::GraphBuilder builder = ut::GraphBuilder("g2"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto const1 = builder.AddNode("const1", "Const", 0, 1); + auto var2 = builder.AddNode("var2", "Variable", 0, 1); + auto reshape1 = builder.AddNode("reshape1", "Reshape", 2, 1); + auto reshape2 = builder.AddNode("reshape2", "Reshape", 2, 1); + auto transdata1 = builder.AddNode("transdata1", "Transdata", 1, 1); + auto netoutput1 = builder.AddNode("netoutput1", "Netoutput", 2, 0); + + builder.AddDataEdge(var1, 0, reshape1, 0); + builder.AddDataEdge(const1, 0, reshape1, 1); + builder.AddDataEdge(var2, 0, reshape2, 0); + builder.AddDataEdge(const1, 0, reshape2, 1); + builder.AddDataEdge(reshape1, 0, transdata1, 0); + builder.AddDataEdge(reshape2, 0, netoutput1, 1); + builder.AddDataEdge(transdata1, 0, netoutput1, 0); + + return builder; +} + +/// netoutput1 +/// | \ +///transdata1 \ +/// | \ +/// reshape1 transdata2 +/// | \ / +/// var1 const1 +ut::GraphBuilder Graph3Builder() { + ut::GraphBuilder builder = ut::GraphBuilder("g2"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto const1 = builder.AddNode("const1", "Const", 0, 1); + auto reshape1 = builder.AddNode("reshape1", "Reshape", 2, 1); + auto transdata2 = builder.AddNode("transdata2", "Transdata", 1, 1); + auto transdata1 = builder.AddNode("transdata1", "Transdata", 1, 1); + auto netoutput1 = builder.AddNode("netoutput1", "Netoutput", 2, 0); + + builder.AddDataEdge(var1, 0, reshape1, 0); + builder.AddDataEdge(const1, 0, reshape1, 1); + builder.AddDataEdge(const1, 0, transdata2, 0); + builder.AddDataEdge(reshape1, 0, transdata1, 0); + builder.AddDataEdge(transdata2, 0, netoutput1, 1); + builder.AddDataEdge(transdata1, 0, netoutput1, 0); + + return builder; +} + +} // namespace + +TEST_F(UtestReshapeRemovePass, reshape_remove_with_const) { + auto builder = Graph1Builder(); + auto graph = builder.GetGraph(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ReshapeRemovePass}); + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } + + EXPECT_EQ(graph->FindNode("reshape1"), nullptr); + auto const1 = graph->FindNode("const1"); + EXPECT_TRUE(const1->GetOutNodes().empty()); + EXPECT_TRUE(const1->GetInNodes().empty()); + auto var1 = graph->FindNode("var1"); + EXPECT_EQ(var1->GetOutNodes().size(), 1); + EXPECT_EQ(var1->GetOutDataNodes().at(0)->GetName(), "transdata1"); +} + +TEST_F(UtestReshapeRemovePass, reshape_remove_without_const_two_reshape) { + auto builder = Graph2Builder(); + auto graph = builder.GetGraph(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ReshapeRemovePass}); + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } + + EXPECT_EQ(graph->FindNode("reshape1"), nullptr); + auto const1 = graph->FindNode("const1"); + EXPECT_TRUE(const1->GetOutNodes().empty()); + EXPECT_TRUE(const1->GetInNodes().empty()); + auto var1 = graph->FindNode("var1"); + EXPECT_EQ(var1->GetOutNodes().size(), 1); + EXPECT_EQ(var1->GetOutDataNodes().at(0)->GetName(), "transdata1"); + auto netoutput1 = graph->FindNode("netoutput1"); + EXPECT_EQ(netoutput1->GetInNodes().size(), 2); + std::set names; + for (auto node : netoutput1->GetInNodes()) { + names.insert(node->GetName()); + } + EXPECT_EQ(names, std::set({"var2", "transdata1"})); +} + +TEST_F(UtestReshapeRemovePass, reshape_remove_without_const) { + auto builder = Graph3Builder(); + auto graph = builder.GetGraph(); + NamesToPass names_to_pass; + names_to_pass.push_back({"Test", new ReshapeRemovePass}); + GEPass pass(graph); + EXPECT_EQ(pass.Run(names_to_pass), SUCCESS); + + for (auto &name_to_pass : names_to_pass) { + delete name_to_pass.second; + } + + EXPECT_EQ(graph->FindNode("reshape1"), nullptr); + auto const1 = graph->FindNode("const1"); + auto var1 = graph->FindNode("var1"); + EXPECT_EQ(var1->GetOutNodes().size(), 1); + EXPECT_EQ(var1->GetOutDataNodes().at(0)->GetName(), "transdata1"); + EXPECT_NE(const1, nullptr); + EXPECT_EQ(const1->GetOutNodes().size(), 1); + // EXPECT_EQ(const1->GetOutDataNodes().at(0)->GetName(), "transdata2"); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/resource_pair_control_pass_unittest.cc b/tests/ut/ge/graph/passes/resource_pair_control_pass_unittest.cc new file mode 100644 index 00000000..e246c673 --- /dev/null +++ b/tests/ut/ge/graph/passes/resource_pair_control_pass_unittest.cc @@ -0,0 +1,94 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/resource_pair_add_control_pass.h" + +#include +#include +#include + +#include "graph_builder_utils.h" +#include "graph/passes/resource_pair_remove_control_pass.h" +#include "inc/pass_manager.h" +#include "framework/common/ge_inner_error_codes.h" + +namespace ge { +class UtestResourcePairControlPass : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +namespace { +/// netoutput1 +/// | \ +/// StackPush StackPop +/// | | +/// var1 const1 +ut::GraphBuilder Graph1Builder() { + ut::GraphBuilder builder = ut::GraphBuilder("g1"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1);; + auto const1 = builder.AddNode("const1", "Const", 0, 1); + auto stackpush1 = builder.AddNode("stackpush1", "StackPush", 1, 1); + auto stackpop1 = builder.AddNode("stackpop1", "StackPop", 1, 1); + auto netoutput1 = builder.AddNode("netoutput1", "Netoutput", 2, 0); + + builder.AddDataEdge(var1, 0, stackpush1, 0); + builder.AddDataEdge(const1, 0, stackpop1, 0); + builder.AddDataEdge(stackpush1, 0, netoutput1, 0); + builder.AddDataEdge(stackpop1, 0, netoutput1, 1); + + return builder; +} +} + +TEST_F(UtestResourcePairControlPass, resource_pair_control) { + auto builder = Graph1Builder(); + auto graph = builder.GetGraph(); + + auto stackpush0 = graph->FindNode("stackpush1"); + EXPECT_EQ(stackpush0->GetOutNodes().size(), 1); + EXPECT_EQ(stackpush0->GetOutControlNodes().size(), 0); + + auto stackpop0 = graph->FindNode("stackpop1"); + EXPECT_EQ(stackpop0->GetInNodes().size(), 1); + EXPECT_EQ(stackpop0->GetInControlNodes().size(), 0); + + ResourcePairAddControlPass add_pass; + vector passes = {&add_pass}; + EXPECT_EQ(PassManager::Run(graph, passes), SUCCESS); + + auto stackpush1 = graph->FindNode("stackpush1"); + EXPECT_EQ(stackpush1->GetOutNodes().size(), 2); + EXPECT_EQ(stackpush1->GetOutControlNodes().at(0)->GetName(), "stackpop1"); + + auto stackpop1 = graph->FindNode("stackpop1"); + EXPECT_EQ(stackpop1->GetInNodes().size(), 2); + EXPECT_EQ(stackpop1->GetInControlNodes().at(0)->GetName(), "stackpush1"); + + ResourcePairRemoveControlPass remove_pass; + passes = {&remove_pass}; + EXPECT_EQ(PassManager::Run(graph, passes), SUCCESS); + + auto stackpush2 = graph->FindNode("stackpush1"); + EXPECT_EQ(stackpush2->GetOutNodes().size(), 1); + EXPECT_EQ(stackpush2->GetOutControlNodes().size(), 0); + + auto stackpop2 = graph->FindNode("stackpop1"); + EXPECT_EQ(stackpop2->GetInNodes().size(), 1); + EXPECT_EQ(stackpop2->GetInControlNodes().size(), 0); +} +} diff --git a/tests/ut/ge/graph/passes/save_pass_unittest.cc b/tests/ut/ge/graph/passes/save_pass_unittest.cc new file mode 100644 index 00000000..b6806682 --- /dev/null +++ b/tests/ut/ge/graph/passes/save_pass_unittest.cc @@ -0,0 +1,76 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/save_pass.h" + +#include + +#include "common/ge_inner_error_codes.h" +#include "ge/ge_api.h" +#include "graph/compute_graph.h" +#include "graph/debug/graph_debug.h" +#include "graph/manager/graph_manager.h" +#include "graph/manager/graph_manager_utils.h" +#include "graph/operator_reg.h" +#include "graph/utils/op_desc_utils.h" +#include "inc/pass_manager.h" +#include "init/gelib.h" +#include "opskernel_manager/ops_kernel_manager.h" + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestGraphPassesSavePass : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +ge::ComputeGraphPtr CreateSaveGraph() { + ge::ComputeGraphPtr graph = std::make_shared("default"); + + // variable1 + ge::OpDescPtr variable_op = std::make_shared(); + variable_op->SetType("Variable"); + variable_op->SetName("Variable1"); + variable_op->AddInputDesc(ge::GeTensorDesc()); + variable_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr variable_node = graph->AddNode(variable_op); + // save1 + ge::OpDescPtr save_op = std::make_shared(); + save_op->SetType("Save"); + save_op->SetName("Save1"); + save_op->AddInputDesc(ge::GeTensorDesc()); + save_op->AddOutputDesc(ge::GeTensorDesc()); + ge::NodePtr save_node = graph->AddNode(save_op); + + vector targets{save_node}; + graph->SetGraphTargetNodesInfo(targets); + + // add edge + ge::GraphUtils::AddEdge(variable_node->GetOutDataAnchor(0), save_node->GetInDataAnchor(0)); + + return graph; +} + +TEST_F(UtestGraphPassesSavePass, cover_run_success) { + ge::ComputeGraphPtr compute_graph = CreateSaveGraph(); + ge::PassManager pass_managers; + pass_managers.AddPass(new (std::nothrow) SavePass); + Status status = pass_managers.Run(compute_graph); + EXPECT_EQ(status, ge::SUCCESS); +} diff --git a/tests/ut/ge/graph/passes/shape_operate_op_remove_pass_unittest.cc b/tests/ut/ge/graph/passes/shape_operate_op_remove_pass_unittest.cc new file mode 100644 index 00000000..ade56b52 --- /dev/null +++ b/tests/ut/ge/graph/passes/shape_operate_op_remove_pass_unittest.cc @@ -0,0 +1,58 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/shape_operate_op_remove_pass.h" + +#include + +using namespace ge; + +class UtestGraphPassesShapeOperateOpRemovePass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 2, + int32_t out_anchors_num = 2) { + GeTensorDesc tensor_desc(GeShape({1}), FORMAT_NHWC, DT_INT32); + OpDescPtr op_desc = make_shared(name, type); + for (int32_t i = 0; i < in_anchors_num; i++) { + op_desc->AddInputDesc(tensor_desc); + } + for (int32_t i = 0; i < out_anchors_num; i++) { + op_desc->AddOutputDesc(tensor_desc); + } + + NodePtr node = graph->AddNode(op_desc); + return node; + } +}; + +TEST_F(UtestGraphPassesShapeOperateOpRemovePass, squeeze_and_squeeze) { + ComputeGraphPtr graph = std::make_shared("test"); + + NodePtr transpose_node = AddNode(graph, "transpose1", PERMUTE); + NodePtr squeeze_node = AddNode(graph, "squeeze1", SQUEEZE); + + GraphUtils::AddEdge(transpose_node->GetOutDataAnchor(0), squeeze_node->GetInDataAnchor(0)); + + ge::ShapeOperateOpRemovePass shape_operate_op_pass; + Status status = shape_operate_op_pass.Run(graph); + EXPECT_EQ(SUCCESS, status); + NodePtr found_node = graph->FindNode("transpose1"); + EXPECT_EQ(transpose_node, found_node); +} diff --git a/tests/ut/ge/graph/passes/snapshot_pass_unittest.cc b/tests/ut/ge/graph/passes/snapshot_pass_unittest.cc new file mode 100644 index 00000000..42b2c6ad --- /dev/null +++ b/tests/ut/ge/graph/passes/snapshot_pass_unittest.cc @@ -0,0 +1,82 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/snapshot_pass.h" + +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/anchor.h" +#include "graph/attr_value.h" +#include "graph/compute_graph.h" +#include "graph/op_desc.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/pass_manager.h" +#undef protected +#undef private + +using namespace testing; +using namespace std; +using namespace ge; + +class UtestSnapshotPass : public Test { + protected: + NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 1, + int32_t out_anchors_num = 1) { + GeTensorDesc tensor_desc; + OpDescPtr op_desc = make_shared(name, type); + for (int32_t i = 0; i < in_anchors_num; i++) { + op_desc->AddInputDesc(tensor_desc); + } + for (int32_t i = 0; i < out_anchors_num; i++) { + op_desc->AddOutputDesc(tensor_desc); + } + + NodePtr node = graph->AddNode(op_desc); + return node; + } +}; + +TEST_F(UtestSnapshotPass, succ) { + ComputeGraphPtr graph = std::make_shared("test"); + + NodePtr prevent_grad = AddNode(graph, "PreventGradient", PREVENTGRADIENT); + NodePtr snapshot = AddNode(graph, "Snapshot", SNAPSHOT); + NodePtr constant = AddNode(graph, "Const", CONSTANT); + GraphUtils::AddEdge(prevent_grad->GetOutDataAnchor(0), snapshot->GetInDataAnchor(0)); + GraphUtils::AddEdge(snapshot->GetOutDataAnchor(0), constant->GetInDataAnchor(0)); + + SnapshotPass pass; + Status status = pass.Run(snapshot); + EXPECT_EQ(status, SUCCESS); + + NodePtr found_node = graph->FindNode("Snapshot"); + EXPECT_EQ(found_node, nullptr); + + status = pass.Run(prevent_grad); + EXPECT_EQ(status, SUCCESS); + + string type2 = "FrameworkOp"; + snapshot->GetOpDesc()->SetType(type2); + status = pass.Run(snapshot); + // EXPECT_EQ(ge::SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/stop_gradient_pass_unittest.cc b/tests/ut/ge/graph/passes/stop_gradient_pass_unittest.cc new file mode 100644 index 00000000..120a8753 --- /dev/null +++ b/tests/ut/ge/graph/passes/stop_gradient_pass_unittest.cc @@ -0,0 +1,208 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#define protected public +#define private public +#include "graph/passes/stop_gradient_pass.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/types.h" +#include "external/graph/operator_reg.h" +#include "framework/common/ge_inner_error_codes.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/operator.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/kernel_factory.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +// for ir +REG_OP(StopGradient) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, + DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, + DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OP_END_FACTORY_REG(StopGradient) + + IMPLEMT_INFERFUNC(StopGradient, StopGradientInfer) { + TensorDesc input_desc = op.GetInputDesc("x"); + (void)op.UpdateOutputDesc("y", input_desc); + return GRAPH_SUCCESS; +} + +INFER_FUNC_REG(StopGradient, StopGradientInfer); + +#define TEST_OPERATOR(op_, input_shapes, output_shapes) \ + { \ + auto op = op_; \ + for (auto input_pair : input_shapes) SetInputShape(op, input_pair.first, input_pair.second); \ + op.InferShapeAndType(); \ + for (auto output_pair : output_shapes) CheckOutputShape(op, output_pair.first, output_pair.second); \ + } +#define LOOP_VEC(v) for (unsigned i = 0; i < v.size(); i++) + +class UtestGraphPassesStopGradientPass : public testing::Test { + protected: + void SetUp() { init(); } + + void TearDown() { destory(); } + + private: + void init() { + pass_ = new ::ge::StopGradientPass(); + + graph_ = std::make_shared("default"); + op_desc_ptr_ = std::make_shared("stop_gradient", STOPGRADIENT); + node_ = std::make_shared(op_desc_ptr_, graph_); + kernel_ = KernelFactory::Instance().Create(STOPGRADIENT); + } + + void destory() { + delete pass_; + pass_ = NULL; + } + + protected: + ge::StopGradientPass *pass_; + ge::ComputeGraphPtr graph_; + OpDescPtr op_desc_ptr_; + NodePtr node_; + shared_ptr kernel_; + + void SetInputShape(Operator op, string name, vector shape) { + TensorDesc td = op.GetInputDesc(name); + td.SetShape(ge::Shape(shape)); + op.UpdateInputDesc(name, td); + } + + void CheckOutputShape(Operator op, string name, vector shape) { + ge::Shape s = op.GetOutputDesc(name).GetShape(); + EXPECT_EQ(s.GetDims().size(), shape.size()); + LOOP_VEC(shape) EXPECT_EQ(s.GetDim(i), shape[i]); + } + + NodePtr init_node(ComputeGraphPtr graph, string &type) { + // middle + OpDescPtr op_def = std::make_shared("op_def", type); + OpDescPtr in_op_def_0 = std::make_shared("op_def_in", "test"); + OpDescPtr out_op_def = std::make_shared("op_def_in", "test"); + + // in_op_def_0 + vector dims_vec_0 = {2, 1, 4, 1, 2}; + vector data_vec_0 = {1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_INT32); + (void)TensorUtils::SetRealDimCnt(tensor_desc_0, dims_vec_0.size()); + ge::ConstGeTensorPtr constTensor_0 = + std::make_shared(tensor_desc_0, (uint8_t *)&data_vec_0[0], data_vec_0.size() * sizeof(int32_t)); + ge::AttrUtils::SetTensor(in_op_def_0, ge::ATTR_NAME_WEIGHTS, constTensor_0); + vector dims = {2, 2, 4, 3, 2}; + ge::GeShape shape_desc(dims); + GeTensorDesc tensor_desc(shape_desc); + in_op_def_0->AddOutputDesc(tensor_desc); + in_op_def_0->SetType("Constant"); + + // op_def + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT32); + op_def->AddInputDesc(tensor_desc_0); + op_def->AddOutputDesc(tensor_desc_out); + vector is_input_const_vec = { + true, + }; + op_def->SetIsInputConst(is_input_const_vec); + AttrUtils::SetInt(op_def, ge::ATTR_NAME_T, (int64_t)DT_INT32); + + // add attr of out_node + vector is_input_const(1); + is_input_const[0] = true; + out_op_def->SetIsInputConst(is_input_const); + out_op_def->AddInputDesc(tensor_desc_0); + + // Add node + NodePtr in_node_0 = graph->AddNode(in_op_def_0); + NodePtr node = graph->AddNode(op_def); + NodePtr out_node = graph->AddNode(out_op_def); + + // Add edge + GraphUtils::AddEdge(in_node_0->GetOutDataAnchor(0), node->GetInDataAnchor(0)); + GraphUtils::AddEdge(node->GetOutDataAnchor(0), out_node->GetInDataAnchor(0)); + + return node; + } +}; + +TEST_F(UtestGraphPassesStopGradientPass, success) { + ComputeGraphPtr graph = std::make_shared("test"); + string type = STOPGRADIENT; + NodePtr node = init_node(graph, type); + ge::Status ret = pass_->Run(node); + EXPECT_EQ(ge::SUCCESS, ret); +} + +TEST_F(UtestGraphPassesStopGradientPass, not_changed) { + ComputeGraphPtr graph = std::make_shared("test"); + string type = SIZE; + NodePtr node = init_node(graph, type); + ge::Status ret = pass_->Run(node); + EXPECT_EQ(ge::SUCCESS, ret); +} + +TEST_F(UtestGraphPassesStopGradientPass, get_origenal_type_fail) { + ComputeGraphPtr graph = std::make_shared("test"); + string type = STOPGRADIENT; + NodePtr node = init_node(graph, type); + string type2 = "FrameworkOp"; + node->GetOpDesc()->SetType(type2); + ge::Status ret = pass_->Run(node); + // EXPECT_EQ(ge::SUCCESS, ret); +} +TEST_F(UtestGraphPassesStopGradientPass, size_check_fail) { + vector dims_vec_0 = {8, 2}; + GeTensorDesc tensor_desc_0(GeShape(dims_vec_0), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr_->AddInputDesc(tensor_desc_0); + + vector dims_vec_1 = {3, 4, 5}; + GeTensorDesc tensor_desc_1(GeShape(dims_vec_1), FORMAT_NCHW, DT_FLOAT); + op_desc_ptr_->AddInputDesc(tensor_desc_1); + + GeTensorDesc tensor_desc_out(GeShape(), FORMAT_NCHW, DT_INT64); + op_desc_ptr_->AddOutputDesc(tensor_desc_out); + + ge::Status ret = pass_->Run(node_); + EXPECT_EQ(ge::FAILED, ret); +} + +TEST_F(UtestGraphPassesStopGradientPass, ir_infer_shape) { + auto i = std::unordered_map>({ + {"x", {2, 1, 5, 3}}, + }); + auto o = std::unordered_map>({ + {"y", {2, 1, 5, 3}}, + }); + + auto test_op = op::StopGradient("test_op"); + + TEST_OPERATOR(test_op, i, o); +} diff --git a/tests/ut/ge/graph/passes/switch_logic_remove_pass_unittest.cc b/tests/ut/ge/graph/passes/switch_logic_remove_pass_unittest.cc new file mode 100644 index 00000000..dcad318c --- /dev/null +++ b/tests/ut/ge/graph/passes/switch_logic_remove_pass_unittest.cc @@ -0,0 +1,244 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/switch_logic_remove_pass.h" + +#include + +#include "graph/passes/base_pass.h" +#include "graph/passes/merge_pass.h" +#include "graph/passes/prune_pass.h" +#include "graph_builder_utils.h" + +namespace ge { +class UtestSwitchLogicRemovePass : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +namespace { +/// netoutput1 +/// | +/// add1 +/// / \T +/// var3 swtich2 +/// T/ | +/// switch1 | +/// / \ | +/// var1 var2 +ComputeGraphPtr BuildGraph1() { + auto builder = ut::GraphBuilder("g1"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto var2 = builder.AddNode("var2", "Variable", 0, 1); + auto switch1 = builder.AddNode("switch1", "RefSwitch", 2, 2); + auto var3 = builder.AddNode("var3", "Variable", 0, 1); + auto switch2 = builder.AddNode("switch2", "Switch", 2, 2); + auto add1 = builder.AddNode("add1", "Add", 2, 1); + auto netoutput1 = builder.AddNode("netoutput1", "NetOutput", 1, 0); + + builder.AddDataEdge(var1, 0, switch1, 0); + builder.AddDataEdge(var2, 0, switch1, 1); + builder.AddDataEdge(var2, 0, switch2, 1); + builder.AddDataEdge(switch1, 1, switch2, 0); + builder.AddDataEdge(var3, 0, add1, 0); + builder.AddDataEdge(switch2, 1, add1, 1); + builder.AddDataEdge(add1, 0, netoutput1, 0); + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// merge1 +/// / \ +/// / add1 +/// / F| \ +/// addn1 swtich2 var3 +/// \F T/ | +/// switch1 | +/// / \ | +/// var1 var2 +ComputeGraphPtr BuildGraph2() { + auto builder = ut::GraphBuilder("g2"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto var2 = builder.AddNode("var2", "Variable", 0, 1); + auto switch1 = builder.AddNode("switch1", "Switch", 2, 2); + auto addn1 = builder.AddNode("addn1", "AddN", 1, 1); + auto switch2 = builder.AddNode("switch2", "Switch", 2, 2); + auto var3 = builder.AddNode("var3", "Variable", 0, 1); + auto add1 = builder.AddNode("add1", "Add", 2, 1); + auto merge1 = builder.AddNode("merge1", "Merge", 2, 2); + auto netoutput1 = builder.AddNode("netoutput1", "NetOutput", 1, 0); + + builder.AddDataEdge(var1, 0, switch1, 0); + builder.AddDataEdge(var2, 0, switch1, 1); + builder.AddDataEdge(var2, 0, switch2, 1); + builder.AddDataEdge(switch1, 0, addn1, 0); + builder.AddDataEdge(switch1, 1, switch2, 0); + builder.AddDataEdge(addn1, 0, merge1, 0); + builder.AddDataEdge(switch2, 0, add1, 1); + builder.AddDataEdge(var3, 0, add1, 0); + builder.AddDataEdge(add1, 0, merge1, 0); + builder.AddDataEdge(merge1, 0, netoutput1, 0); + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// add1 +/// / \T +/// var3 swtich2 +/// T/ \ +/// switch1 \ +/// / \ \ +/// var1 var2 var4 +ComputeGraphPtr BuildGraph3() { + auto builder = ut::GraphBuilder("g3"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto var2 = builder.AddNode("var2", "Variable", 0, 1); + auto var4 = builder.AddNode("var4", "Variable", 0, 1); + auto switch1 = builder.AddNode("switch1", "Switch", 2, 2); + auto var3 = builder.AddNode("var3", "Variable", 0, 1); + auto switch2 = builder.AddNode("switch2", "Switch", 2, 2); + auto add1 = builder.AddNode("add1", "Add", 2, 1); + auto netoutput1 = builder.AddNode("netoutput1", "NetOutput", 1, 0); + + builder.AddDataEdge(var1, 0, switch1, 0); + builder.AddDataEdge(var2, 0, switch1, 1); + builder.AddDataEdge(var4, 0, switch2, 1); + builder.AddDataEdge(switch1, 1, switch2, 0); + builder.AddDataEdge(var3, 0, add1, 0); + builder.AddDataEdge(switch2, 1, add1, 1); + builder.AddDataEdge(add1, 0, netoutput1, 0); + return builder.GetGraph(); +} + +/// netoutput1 +/// | +/// merge1 +/// / \ +/// add1 addn1 +/// / \T F/ +/// var3 swtich2 +/// T/ | +/// switch1 | +/// / \ | +/// var1 var2 +ComputeGraphPtr BuildGraph5() { + auto builder = ut::GraphBuilder("g5"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto var2 = builder.AddNode("var2", "Variable", 0, 1); + auto switch1 = builder.AddNode("switch1", "Switch", 2, 2); + auto var3 = builder.AddNode("var3", "Variable", 0, 1); + auto switch2 = builder.AddNode("switch2", "Switch", 2, 2); + auto add1 = builder.AddNode("add1", "Add", 2, 1); + auto addn1 = builder.AddNode("addn1", "AddN", 1, 1); + auto merge1 = builder.AddNode("merge1", "Merge", 2, 2); + auto netoutput1 = builder.AddNode("netoutput1", "NetOutput", 1, 0); + + builder.AddDataEdge(var1, 0, switch1, 0); + builder.AddDataEdge(var2, 0, switch1, 1); + builder.AddDataEdge(var2, 0, switch2, 1); + builder.AddDataEdge(switch1, 1, switch2, 0); + builder.AddDataEdge(var3, 0, add1, 0); + builder.AddDataEdge(switch2, 1, add1, 1); + builder.AddDataEdge(switch2, 0, addn1, 0); + builder.AddDataEdge(add1, 0, merge1, 0); + builder.AddDataEdge(addn1, 0, merge1, 1); + builder.AddDataEdge(merge1, 0, netoutput1, 0); + return builder.GetGraph(); +} + +} // namespace + +TEST_F(UtestSwitchLogicRemovePass, remove_same_true) { + SwitchLogicRemovePass pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("SwitchLogicRemovePass", &pass); + + auto graph = BuildGraph1(); + GEPass ge_pass(graph); + + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + auto switch2 = graph->FindNode("switch2"); + EXPECT_EQ(switch2, nullptr); + auto switch1 = graph->FindNode("switch1"); + EXPECT_EQ(switch1->GetOutNodes().size(), 1); + EXPECT_EQ(switch1->GetOutDataNodes().at(0)->GetName(), "add1"); +} + +TEST_F(UtestSwitchLogicRemovePass, remove_different) { + SwitchLogicRemovePass pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("SwitchLogicRemovePass", &pass); + + auto graph = BuildGraph2(); + GEPass ge_pass(graph); + + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(graph->FindNode("switch2"), nullptr); + auto add1 = graph->FindNode("add1"); + EXPECT_EQ(add1->GetOutNodes().size(), 0); + auto switch1 = graph->FindNode("switch1"); + EXPECT_EQ(switch1->GetOutNodes().size(), 2); + EXPECT_EQ(switch1->GetOutDataNodes().at(0)->GetName(), "addn1"); + auto merge1 = graph->FindNode("merge1"); + EXPECT_EQ(merge1->GetInNodes().size(), 1); + EXPECT_EQ(merge1->GetInDataNodes().at(0)->GetName(), "addn1"); +} + +TEST_F(UtestSwitchLogicRemovePass, no_need_to_optimize) { + SwitchLogicRemovePass pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("SwitchLogicRemovePass", &pass); + + auto graph = BuildGraph3(); + GEPass ge_pass(graph); + + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + auto switch2 = graph->FindNode("switch2"); + EXPECT_NE(switch2, nullptr); + auto switch1 = graph->FindNode("switch1"); + EXPECT_EQ(switch1->GetOutNodes().size(), 1); + EXPECT_EQ(switch1->GetOutDataNodes().at(0)->GetName(), "switch2"); + EXPECT_EQ(switch2->GetOutNodes().size(), 1); + EXPECT_EQ(switch2->GetOutDataNodes().at(0)->GetName(), "add1"); +} + +TEST_F(UtestSwitchLogicRemovePass, both_true_and_false) { + SwitchLogicRemovePass pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("SwitchLogicRemovePass", &pass); + + auto graph = BuildGraph5(); + GEPass ge_pass(graph); + + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + auto switch2 = graph->FindNode("switch2"); + EXPECT_EQ(switch2, nullptr); + auto switch1 = graph->FindNode("switch1"); + EXPECT_EQ(switch1->GetOutNodes().size(), 2); + EXPECT_EQ(switch1->GetOutDataNodes().size(), 1); + EXPECT_EQ(switch1->GetOutDataNodes().at(0)->GetName(), "add1"); + auto addn1 = graph->FindNode("addn1"); + EXPECT_EQ(addn1->GetInDataNodes().size(), 0); + EXPECT_EQ(addn1->GetInNodes().size(), 2); + EXPECT_EQ(addn1->GetOutNodes().size(), 0); + auto merge1 = graph->FindNode("merge1"); + EXPECT_EQ(merge1->GetInNodes().size(), 1); + EXPECT_EQ(merge1->GetInDataNodes().at(0)->GetName(), "add1"); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/switch_op_pass_unittest.cc b/tests/ut/ge/graph/passes/switch_op_pass_unittest.cc new file mode 100644 index 00000000..26b1be81 --- /dev/null +++ b/tests/ut/ge/graph/passes/switch_op_pass_unittest.cc @@ -0,0 +1,457 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "omg/omg_inner_types.h" +#define protected public +#define private public +#include "graph/passes/switch_op_pass.h" + +#include "common/debug/log.h" +#include "common/debug/memory_dumper.h" +#include "common/op/attr_value_util.h" +#include "common/types.h" +#include "graph/debug/ge_attr_define.h" +#include "graph/graph.h" +#include "graph/passes/control_op_attr_pass.h" +#include "inc/pass_manager.h" +#undef protected +#undef private + +using namespace testing; +using namespace ge; + +class UtestGraphPassesSwitchOpPass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + public: + void make_graph(ComputeGraphPtr graph, bool match = true) { + GeTensorDesc bool_tensor_desc(GeShape(), ge::FORMAT_NCHW, ge::DT_BOOL); + GeTensorDesc int_tensor_desc(GeShape(), ge::FORMAT_NCHW, ge::DT_INT32); + GeTensorDesc scalar_tensor_desc(GeShape(), ge::FORMAT_NCHW, ge::DT_FLOAT); + + auto xOpDef = std::make_shared("x", VARIABLEV2); + xOpDef->AddOutputDesc(scalar_tensor_desc); + auto xNode = graph->AddNode(xOpDef); + + auto yOpDef = std::make_shared("y", VARIABLEV2); + yOpDef->AddOutputDesc(scalar_tensor_desc); + auto yNode = graph->AddNode(yOpDef); + + auto zOpDef = std::make_shared("z", VARIABLEV2); + zOpDef->AddOutputDesc(scalar_tensor_desc); + auto zNode = graph->AddNode(zOpDef); + + auto condOpDef = std::make_shared("Less", "Less"); + condOpDef->AddInputDesc(scalar_tensor_desc); + condOpDef->AddInputDesc(scalar_tensor_desc); + condOpDef->AddOutputDesc(bool_tensor_desc); + auto condNode = graph->AddNode(condOpDef); + + auto switch_op_def1 = std::make_shared("Add/Switch", SWITCH); + switch_op_def1->AddInputDesc(scalar_tensor_desc); + switch_op_def1->AddInputDesc(bool_tensor_desc); + switch_op_def1->AddOutputDesc(scalar_tensor_desc); + switch_op_def1->AddOutputDesc(scalar_tensor_desc); + auto switch_node1 = graph->AddNode(switch_op_def1); + + auto switch_op_def2 = std::make_shared("Add/Switch_1", SWITCH); + switch_op_def2->AddInputDesc(scalar_tensor_desc); + switch_op_def2->AddInputDesc(bool_tensor_desc); + switch_op_def2->AddOutputDesc(scalar_tensor_desc); + switch_op_def2->AddOutputDesc(scalar_tensor_desc); + auto switch_node2 = graph->AddNode(switch_op_def2); + + auto switch_op_def3 = std::make_shared("Square/Switch", SWITCH); + switch_op_def3->AddInputDesc(scalar_tensor_desc); + switch_op_def3->AddInputDesc(bool_tensor_desc); + switch_op_def3->AddOutputDesc(scalar_tensor_desc); + switch_op_def3->AddOutputDesc(scalar_tensor_desc); + auto switch_node3 = graph->AddNode(switch_op_def3); + + auto addOpDef = std::make_shared("Add", "ADD"); + addOpDef->AddInputDesc(scalar_tensor_desc); + addOpDef->AddInputDesc(scalar_tensor_desc); + addOpDef->AddOutputDesc(scalar_tensor_desc); + auto addNode = graph->AddNode(addOpDef); + + auto mergeOpDef = std::make_shared("Merge", "Merge"); + mergeOpDef->AddInputDesc(scalar_tensor_desc); + mergeOpDef->AddInputDesc(scalar_tensor_desc); + mergeOpDef->AddOutputDesc(scalar_tensor_desc); + mergeOpDef->AddOutputDesc(int_tensor_desc); + auto mergeNode = graph->AddNode(mergeOpDef); + + auto output_op_def = std::make_shared("NetOutput", "NetOutput"); + output_op_def->AddInputDesc(scalar_tensor_desc); + output_op_def->AddOutputDesc(scalar_tensor_desc); + auto output_node = graph->AddNode(output_op_def); + + (void)GraphUtils::AddEdge(xNode->GetOutDataAnchor(0), condNode->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(yNode->GetOutDataAnchor(0), condNode->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(xNode->GetOutDataAnchor(0), switch_node1->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(condNode->GetOutDataAnchor(0), switch_node1->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(yNode->GetOutDataAnchor(0), switch_node2->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(condNode->GetOutDataAnchor(0), switch_node2->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(zNode->GetOutDataAnchor(0), switch_node3->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(condNode->GetOutDataAnchor(0), switch_node3->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(switch_node1->GetOutDataAnchor(1), addNode->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(switch_node2->GetOutDataAnchor(1), addNode->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(addNode->GetOutDataAnchor(0), mergeNode->GetInDataAnchor(1)); + (void)GraphUtils::AddEdge(switch_node3->GetOutDataAnchor(0), mergeNode->GetInDataAnchor(0)); + + (void)GraphUtils::AddEdge(mergeNode->GetOutDataAnchor(0), output_node->GetInDataAnchor(0)); + } + + void make_graph_const(ComputeGraphPtr graph, bool match = true) { + // resnet50 PolynomialDecay + GeTensorDesc scalar_tensor_desc(GeShape({1, 1, 1, 1})); + GeTensorDesc bool_tensor_desc(GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_BOOL); + GeTensorDesc int_tensor_desc(GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_INT32); + + auto xOpDef = std::make_shared("x", VARIABLEV2); + xOpDef->AddOutputDesc(scalar_tensor_desc); + auto xNode = graph->AddNode(xOpDef); + + auto yOpDef = std::make_shared("y", "Const"); + yOpDef->AddOutputDesc(scalar_tensor_desc); + auto yNode = graph->AddNode(yOpDef); + + auto zOpDef = std::make_shared("z", VARIABLEV2); + zOpDef->AddOutputDesc(scalar_tensor_desc); + auto zNode = graph->AddNode(zOpDef); + + auto constOpDef = std::make_shared("Const", "Const"); + constOpDef->AddOutputDesc(scalar_tensor_desc); + auto constNode = graph->AddNode(constOpDef); + + auto condOpDef = std::make_shared("Equal", "Equal"); + condOpDef->AddInputDesc(scalar_tensor_desc); + condOpDef->AddInputDesc(scalar_tensor_desc); + condOpDef->AddOutputDesc(bool_tensor_desc); + auto condNode = graph->AddNode(condOpDef); + + auto identityOpDef = std::make_shared("identity", "Identity"); + identityOpDef->AddInputDesc(bool_tensor_desc); + identityOpDef->AddOutputDesc(bool_tensor_desc); + auto identityNode = graph->AddNode(identityOpDef); + + auto switch_op_def1 = std::make_shared("Switch", SWITCH); + switch_op_def1->AddInputDesc(bool_tensor_desc); + switch_op_def1->AddInputDesc(bool_tensor_desc); + switch_op_def1->AddOutputDesc(bool_tensor_desc); + switch_op_def1->AddOutputDesc(bool_tensor_desc); + auto switch_node1 = graph->AddNode(switch_op_def1); + + auto tIdentityOpDef = std::make_shared("switch_t", "Identity"); + tIdentityOpDef->AddInputDesc(scalar_tensor_desc); + tIdentityOpDef->AddOutputDesc(scalar_tensor_desc); + auto tIdentityNode = graph->AddNode(tIdentityOpDef); + + auto fIdentityOpDef = std::make_shared("switch_f", "Identity"); + fIdentityOpDef->AddInputDesc(scalar_tensor_desc); + fIdentityOpDef->AddOutputDesc(scalar_tensor_desc); + auto fIdentityNode = graph->AddNode(fIdentityOpDef); + + auto switch_op_def2 = std::make_shared("Switch_1", SWITCH); + switch_op_def2->AddInputDesc(scalar_tensor_desc); + switch_op_def2->AddInputDesc(bool_tensor_desc); + switch_op_def2->AddOutputDesc(scalar_tensor_desc); + switch_op_def2->AddOutputDesc(scalar_tensor_desc); + auto switch_node2 = graph->AddNode(switch_op_def2); + + auto mulOpDef = std::make_shared("truediv", "Mul"); + mulOpDef->AddInputDesc(scalar_tensor_desc); + mulOpDef->AddInputDesc(scalar_tensor_desc); + mulOpDef->AddOutputDesc(scalar_tensor_desc); + auto mulNode = graph->AddNode(mulOpDef); + + auto ceilOpDef = std::make_shared("Ceil", "Ceil"); + ceilOpDef->AddInputDesc(scalar_tensor_desc); + ceilOpDef->AddOutputDesc(scalar_tensor_desc); + auto ceilNode = graph->AddNode(ceilOpDef); + + auto mergeOpDef = std::make_shared("Merge", "Merge"); + mergeOpDef->AddInputDesc(scalar_tensor_desc); + mergeOpDef->AddInputDesc(scalar_tensor_desc); + mergeOpDef->AddOutputDesc(scalar_tensor_desc); + mergeOpDef->AddOutputDesc(int_tensor_desc); + auto mergeNode = graph->AddNode(mergeOpDef); + + auto output_op_def = std::make_shared("NetOutput", "NetOutput"); + output_op_def->AddInputDesc(scalar_tensor_desc); + output_op_def->AddOutputDesc(scalar_tensor_desc); + auto output_node = graph->AddNode(output_op_def); + + (void)GraphUtils::AddEdge(xNode->GetOutDataAnchor(0), condNode->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(yNode->GetOutDataAnchor(0), condNode->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(condNode->GetOutDataAnchor(0), identityNode->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(identityNode->GetOutDataAnchor(0), switch_node1->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(identityNode->GetOutDataAnchor(0), switch_node1->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(switch_node1->GetOutDataAnchor(0), fIdentityNode->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(switch_node1->GetOutDataAnchor(1), tIdentityNode->GetInDataAnchor(0)); + + (void)GraphUtils::AddEdge(fIdentityNode->GetOutControlAnchor(), zNode->GetInControlAnchor()); + (void)GraphUtils::AddEdge(tIdentityNode->GetOutControlAnchor(), constNode->GetInControlAnchor()); + + (void)GraphUtils::AddEdge(xNode->GetOutDataAnchor(0), switch_node2->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(identityNode->GetOutDataAnchor(0), switch_node2->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(zNode->GetOutDataAnchor(0), mulNode->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(switch_node2->GetOutDataAnchor(0), mulNode->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(mulNode->GetOutDataAnchor(0), ceilNode->GetInDataAnchor(0)); + + (void)GraphUtils::AddEdge(constNode->GetOutDataAnchor(0), mergeNode->GetInDataAnchor(1)); + (void)GraphUtils::AddEdge(ceilNode->GetOutDataAnchor(0), mergeNode->GetInDataAnchor(0)); + + (void)GraphUtils::AddEdge(mergeNode->GetOutDataAnchor(0), output_node->GetInDataAnchor(0)); + } + + void make_graph_cyclic_dependence(ComputeGraphPtr graph, bool match = true) { + GeTensorDesc scalar_tensor_desc(GeShape({1, 1, 1, 1})); + GeTensorDesc bool_tensor_desc(GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_BOOL); + GeTensorDesc int_tensor_desc(GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_INT32); + + auto xOpDef = std::make_shared("x", VARIABLEV2); + xOpDef->AddOutputDesc(scalar_tensor_desc); + auto xNode = graph->AddNode(xOpDef); + + auto yOpDef = std::make_shared("y", VARIABLEV2); + yOpDef->AddOutputDesc(scalar_tensor_desc); + auto yNode = graph->AddNode(yOpDef); + + auto zOpDef = std::make_shared("z", VARIABLEV2); + zOpDef->AddOutputDesc(scalar_tensor_desc); + auto zNode = graph->AddNode(zOpDef); + + auto condOpDef = std::make_shared("Less", "Less"); + condOpDef->AddInputDesc(scalar_tensor_desc); + condOpDef->AddInputDesc(scalar_tensor_desc); + condOpDef->AddOutputDesc(bool_tensor_desc); + auto condNode = graph->AddNode(condOpDef); + + auto switch_op_def1 = std::make_shared("Switch_f_1", SWITCH); + switch_op_def1->AddInputDesc(scalar_tensor_desc); + switch_op_def1->AddInputDesc(bool_tensor_desc); + switch_op_def1->AddOutputDesc(scalar_tensor_desc); + switch_op_def1->AddOutputDesc(scalar_tensor_desc); + auto switch_node1 = graph->AddNode(switch_op_def1); + + auto switch_op_def2 = std::make_shared("Switch_t_1", SWITCH); + switch_op_def2->AddInputDesc(scalar_tensor_desc); + switch_op_def2->AddInputDesc(bool_tensor_desc); + switch_op_def2->AddOutputDesc(scalar_tensor_desc); + switch_op_def2->AddOutputDesc(scalar_tensor_desc); + auto switch_node2 = graph->AddNode(switch_op_def2); + + auto switch_op_def3 = std::make_shared("Switch_f_2", SWITCH); + switch_op_def3->AddInputDesc(scalar_tensor_desc); + switch_op_def3->AddInputDesc(bool_tensor_desc); + switch_op_def3->AddOutputDesc(scalar_tensor_desc); + switch_op_def3->AddOutputDesc(scalar_tensor_desc); + auto switch_node3 = graph->AddNode(switch_op_def3); + + auto switch_op_def4 = std::make_shared("Switch_t_2", SWITCH); + switch_op_def4->AddInputDesc(scalar_tensor_desc); + switch_op_def4->AddInputDesc(bool_tensor_desc); + switch_op_def4->AddOutputDesc(scalar_tensor_desc); + switch_op_def4->AddOutputDesc(scalar_tensor_desc); + auto switch_node4 = graph->AddNode(switch_op_def4); + + auto squareOpDef1 = std::make_shared("Square1", "Square"); + squareOpDef1->AddInputDesc(scalar_tensor_desc); + squareOpDef1->AddOutputDesc(scalar_tensor_desc); + auto squareNode1 = graph->AddNode(squareOpDef1); + + auto squareOpDef2 = std::make_shared("Square2", "Square"); + squareOpDef2->AddInputDesc(scalar_tensor_desc); + squareOpDef2->AddOutputDesc(scalar_tensor_desc); + auto squareNode2 = graph->AddNode(squareOpDef2); + + auto squareOpDef3 = std::make_shared("Square3", "Square"); + squareOpDef3->AddInputDesc(scalar_tensor_desc); + squareOpDef3->AddOutputDesc(scalar_tensor_desc); + auto squareNode3 = graph->AddNode(squareOpDef3); + + auto squareOpDef4 = std::make_shared("Square4", "Square"); + squareOpDef4->AddInputDesc(scalar_tensor_desc); + squareOpDef4->AddOutputDesc(scalar_tensor_desc); + auto squareNode4 = graph->AddNode(squareOpDef4); + + auto merge_op_def1 = std::make_shared("Merge1", "Merge"); + merge_op_def1->AddInputDesc(scalar_tensor_desc); + merge_op_def1->AddInputDesc(scalar_tensor_desc); + merge_op_def1->AddOutputDesc(scalar_tensor_desc); + merge_op_def1->AddOutputDesc(int_tensor_desc); + auto merge_node1 = graph->AddNode(merge_op_def1); + + auto merge_op_def2 = std::make_shared("Merge2", "Merge"); + merge_op_def2->AddInputDesc(scalar_tensor_desc); + merge_op_def2->AddInputDesc(scalar_tensor_desc); + merge_op_def2->AddOutputDesc(scalar_tensor_desc); + merge_op_def2->AddOutputDesc(int_tensor_desc); + auto merge_node2 = graph->AddNode(merge_op_def2); + + auto output_op_def = std::make_shared("NetOutput", "NetOutput"); + output_op_def->AddInputDesc(scalar_tensor_desc); + output_op_def->AddOutputDesc(scalar_tensor_desc); + auto output_node = graph->AddNode(output_op_def); + + (void)GraphUtils::AddEdge(xNode->GetOutDataAnchor(0), condNode->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(yNode->GetOutDataAnchor(0), condNode->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(zNode->GetOutDataAnchor(0), switch_node1->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(condNode->GetOutDataAnchor(0), switch_node1->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(zNode->GetOutDataAnchor(0), switch_node2->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(condNode->GetOutDataAnchor(0), switch_node2->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(switch_node1->GetOutDataAnchor(0), squareNode1->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(switch_node2->GetOutDataAnchor(1), squareNode2->GetInDataAnchor(0)); + + (void)GraphUtils::AddEdge(squareNode1->GetOutDataAnchor(0), merge_node1->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(squareNode2->GetOutDataAnchor(0), merge_node1->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(merge_node1->GetOutDataAnchor(0), switch_node3->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(condNode->GetOutDataAnchor(0), switch_node3->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(zNode->GetOutDataAnchor(0), switch_node4->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(condNode->GetOutDataAnchor(0), switch_node4->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(switch_node3->GetOutDataAnchor(0), squareNode3->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(switch_node4->GetOutDataAnchor(1), squareNode4->GetInDataAnchor(0)); + + (void)GraphUtils::AddEdge(squareNode3->GetOutDataAnchor(0), merge_node2->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(squareNode4->GetOutDataAnchor(0), merge_node2->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(merge_node2->GetOutDataAnchor(0), output_node->GetInDataAnchor(0)); + } + + void make_graph_case(ComputeGraphPtr graph, bool match = true) { + GeTensorDesc scalar_tensor_desc(GeShape({1, 1, 1, 1})); + GeTensorDesc bool_tensor_desc(GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_BOOL); + GeTensorDesc int_tensor_desc(GeShape({1, 1, 1, 1}), ge::FORMAT_NCHW, ge::DT_INT32); + + auto xOpDef = std::make_shared("x", VARIABLEV2); + xOpDef->AddOutputDesc(scalar_tensor_desc); + auto xNode = graph->AddNode(xOpDef); + + auto yOpDef = std::make_shared("y", VARIABLEV2); + yOpDef->AddOutputDesc(scalar_tensor_desc); + auto yNode = graph->AddNode(yOpDef); + + auto zOpDef = std::make_shared("z", VARIABLEV2); + zOpDef->AddOutputDesc(scalar_tensor_desc); + auto zNode = graph->AddNode(zOpDef); + + auto greater_op_def = std::make_shared("Greater", "Greater"); + greater_op_def->AddInputDesc(scalar_tensor_desc); + greater_op_def->AddInputDesc(scalar_tensor_desc); + greater_op_def->AddOutputDesc(bool_tensor_desc); + auto greaterNode = graph->AddNode(greater_op_def); + + auto less_op_def = std::make_shared("Less", "Less"); + less_op_def->AddInputDesc(scalar_tensor_desc); + less_op_def->AddInputDesc(scalar_tensor_desc); + less_op_def->AddOutputDesc(bool_tensor_desc); + auto less_node = graph->AddNode(less_op_def); + + auto switch_op_def1 = std::make_shared("greater/Switch_t", SWITCH); + switch_op_def1->AddInputDesc(bool_tensor_desc); + switch_op_def1->AddInputDesc(bool_tensor_desc); + switch_op_def1->AddOutputDesc(bool_tensor_desc); + switch_op_def1->AddOutputDesc(bool_tensor_desc); + auto switch_node1 = graph->AddNode(switch_op_def1); + + auto switch_op_def2 = std::make_shared("greater/Switch_f", SWITCH); + switch_op_def2->AddInputDesc(scalar_tensor_desc); + switch_op_def2->AddInputDesc(bool_tensor_desc); + switch_op_def2->AddOutputDesc(scalar_tensor_desc); + switch_op_def2->AddOutputDesc(scalar_tensor_desc); + auto switch_node2 = graph->AddNode(switch_op_def2); + + auto switch_op_def3 = std::make_shared("less/Switch_t", SWITCH); + switch_op_def3->AddInputDesc(scalar_tensor_desc); + switch_op_def3->AddInputDesc(bool_tensor_desc); + switch_op_def3->AddOutputDesc(scalar_tensor_desc); + switch_op_def3->AddOutputDesc(scalar_tensor_desc); + auto switch_node3 = graph->AddNode(switch_op_def3); + + auto switch_op_def4 = std::make_shared("less/Switch_f", SWITCH); + switch_op_def4->AddInputDesc(scalar_tensor_desc); + switch_op_def4->AddInputDesc(bool_tensor_desc); + switch_op_def4->AddOutputDesc(scalar_tensor_desc); + switch_op_def4->AddOutputDesc(scalar_tensor_desc); + auto switch_node4 = graph->AddNode(switch_op_def4); + + auto merge_op_def1 = std::make_shared("Merge1", "Merge"); + merge_op_def1->AddInputDesc(scalar_tensor_desc); + merge_op_def1->AddInputDesc(scalar_tensor_desc); + merge_op_def1->AddOutputDesc(scalar_tensor_desc); + merge_op_def1->AddOutputDesc(int_tensor_desc); + auto merge_node1 = graph->AddNode(merge_op_def1); + + auto merge_op_def2 = std::make_shared("Merge2", "Merge"); + merge_op_def2->AddInputDesc(scalar_tensor_desc); + merge_op_def2->AddInputDesc(scalar_tensor_desc); + merge_op_def2->AddOutputDesc(scalar_tensor_desc); + merge_op_def2->AddOutputDesc(int_tensor_desc); + auto merge_node2 = graph->AddNode(merge_op_def2); + + auto output_op_def = std::make_shared("NetOutput", "NetOutput"); + output_op_def->AddInputDesc(scalar_tensor_desc); + output_op_def->AddOutputDesc(scalar_tensor_desc); + auto output_node = graph->AddNode(output_op_def); + + (void)GraphUtils::AddEdge(xNode->GetOutDataAnchor(0), greaterNode->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(yNode->GetOutDataAnchor(0), greaterNode->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(xNode->GetOutDataAnchor(0), less_node->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(yNode->GetOutDataAnchor(0), less_node->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(xNode->GetOutDataAnchor(0), switch_node1->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(greaterNode->GetOutDataAnchor(0), switch_node1->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(less_node->GetOutDataAnchor(0), switch_node2->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(greaterNode->GetOutDataAnchor(0), switch_node2->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(yNode->GetOutDataAnchor(0), switch_node3->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(switch_node2->GetOutDataAnchor(0), switch_node3->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(zNode->GetOutDataAnchor(0), switch_node4->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(switch_node2->GetOutDataAnchor(0), switch_node4->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(switch_node3->GetOutDataAnchor(1), merge_node1->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(switch_node4->GetOutDataAnchor(0), merge_node1->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(switch_node1->GetOutDataAnchor(1), merge_node2->GetInDataAnchor(0)); + (void)GraphUtils::AddEdge(merge_node1->GetOutDataAnchor(0), merge_node2->GetInDataAnchor(1)); + + (void)GraphUtils::AddEdge(merge_node2->GetOutDataAnchor(0), output_node->GetInDataAnchor(0)); + } +}; diff --git a/tests/ut/ge/graph/passes/switch_pass_unittest.cc b/tests/ut/ge/graph/passes/switch_pass_unittest.cc new file mode 100644 index 00000000..0d78fd6d --- /dev/null +++ b/tests/ut/ge/graph/passes/switch_pass_unittest.cc @@ -0,0 +1,423 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include + +#define private public +#include "graph/passes/switch_pass.h" + +#include "common/ge_inner_error_codes.h" +#include "inc/pass_manager.h" +#include "utils/graph_utils.h" +#undef private + +namespace ge { +namespace { + +class UtestGraphPassesSwitchPass : public testing::Test { + protected: + UtestGraphPassesSwitchPass() { + graph_ = std::make_shared("test"); + vector shape_vec{1, 1, 1, 1}; + GeShape shape = GeShape(shape_vec); + default_tensor_desc_ = std::make_shared(); + default_tensor_desc_->SetShape(shape); + default_tensor_desc_->SetFormat(FORMAT_NCHW); + default_tensor_desc_->SetDataType(DT_FLOAT); + } + + NodePtr NewNode(const std::string &name, const std::string &type, int input_cnt, int output_cnt) { + OpDescPtr op_desc = std::make_shared(name, type); + for (int i = 0; i < input_cnt; ++i) { + op_desc->AddInputDesc(default_tensor_desc_->Clone()); + } + + for (int i = 0; i < output_cnt; ++i) { + op_desc->AddOutputDesc(default_tensor_desc_->Clone()); + } + + NodePtr node = graph_->AddNode(op_desc); + (void)node->SetOwnerComputeGraph(graph_); + return node; + } + + void BuildDefaultGraph(bool is_input_const, const bool *pred_value = nullptr) { + /// input pred + /// \ / + /// Switch + /// | | + /// F T + /// | | + /// Merge + /// + bool is_pred_const = pred_value != nullptr; + if (is_pred_const) { + pred_node_ = NewNode("pred", CONSTANT, 0, 1); + int32_t weight[] = {static_cast(*pred_value)}; + GeTensorDesc weight_desc(GeShape({1}), FORMAT_NHWC, DT_INT32); + GeTensorPtr tensor = std::make_shared(weight_desc, (uint8_t *)weight, sizeof(weight)); + OpDescUtils::SetWeights(pred_node_, {tensor}); + } else { + pred_node_ = NewNode("pred", GREATER, 2, 1); + } + + if (is_input_const) { + int32_t weight[] = {1}; + GeTensorDesc weight_desc(GeShape({1}), FORMAT_NHWC, DT_INT32); + GeTensorPtr tensor = std::make_shared(weight_desc, (uint8_t *)weight, sizeof(weight)); + input_node_ = NewNode("input", CONSTANT, 0, 1); + OpDescUtils::SetWeights(input_node_, {tensor}); + } else { + input_node_ = NewNode("input", RELU, 0, 1); + } + + switch_node_ = NewNode("switch", SWITCH, 2, 2); + output_false_node_ = NewNode("false_output", RELU, 1, 1); + output_true_node_ = NewNode("true_output", RELU, 1, 1); + merge_node_ = NewNode("merge", MERGE, 2, 1); + + switch_node_->GetOpDesc()->SetIsInputConst({false, is_pred_const}); + + GraphUtils::AddEdge(input_node_->GetOutDataAnchor(0), switch_node_->GetInDataAnchor(0)); + GraphUtils::AddEdge(pred_node_->GetOutDataAnchor(0), switch_node_->GetInDataAnchor(1)); + GraphUtils::AddEdge(switch_node_->GetOutDataAnchor(0), output_false_node_->GetInDataAnchor(0)); + GraphUtils::AddEdge(switch_node_->GetOutDataAnchor(1), output_true_node_->GetInDataAnchor(0)); + GraphUtils::AddEdge(output_false_node_->GetOutDataAnchor(0), merge_node_->GetInDataAnchor(0)); + GraphUtils::AddEdge(output_true_node_->GetOutDataAnchor(0), merge_node_->GetInDataAnchor(1)); + + output_false_node_->GetOpDesc()->SetIsInputConst({false}); + output_true_node_->GetOpDesc()->SetIsInputConst({false}); + } + + void TestPickOutput(bool expect_output) { + auto ret = pass_.Run(switch_node_); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(graph_->GetDirectNodesSize(), 5); // has two isolate nodes + EXPECT_EQ(merge_node_->GetInDataNodes().size(), 1); + if (expect_output) { + EXPECT_EQ(merge_node_->GetInDataAnchor(0)->GetPeerOutAnchor().get(), nullptr); + EXPECT_EQ(merge_node_->GetInDataAnchor(1)->GetPeerOutAnchor(), output_true_node_->GetOutDataAnchor(0)); + EXPECT_EQ(output_true_node_->GetInDataAnchor(0)->GetPeerOutAnchor(), input_node_->GetOutDataAnchor(0)); + } else { + EXPECT_EQ(merge_node_->GetInDataAnchor(0)->GetPeerOutAnchor(), output_false_node_->GetOutDataAnchor(0)); + EXPECT_EQ(merge_node_->GetInDataAnchor(1)->GetPeerOutAnchor().get(), nullptr); + EXPECT_EQ(output_false_node_->GetInDataAnchor(0)->GetPeerOutAnchor(), input_node_->GetOutDataAnchor(0)); + } + } + + ComputeGraphPtr graph_; + GeTensorDescPtr default_tensor_desc_; + SwitchPass pass_; + NodePtr pred_node_; + NodePtr input_node_; + NodePtr switch_node_; + NodePtr output_false_node_; + NodePtr output_true_node_; + NodePtr merge_node_; +}; + +} // namespace + +TEST_F(UtestGraphPassesSwitchPass, null_input) { + NodePtr node = nullptr; + auto ret = pass_.Run(node); + EXPECT_EQ(ret, PARAM_INVALID); +} + +TEST_F(UtestGraphPassesSwitchPass, null_pred) { + BuildDefaultGraph(false); + switch_node_->GetInDataAnchor(1)->UnlinkAll(); + auto ret = pass_.Run(switch_node_); + EXPECT_EQ(ret, SUCCESS); +} + +TEST_F(UtestGraphPassesSwitchPass, null_data) { + BuildDefaultGraph(false); + switch_node_->GetInDataAnchor(0)->UnlinkAll(); + auto ret = pass_.Run(switch_node_); + EXPECT_EQ(ret, SUCCESS); +} + +TEST_F(UtestGraphPassesSwitchPass, unsupported_node_type) { + auto node = NewNode("Op1", CONSTANT, 0, 1); + auto ret = pass_.Run(node); + EXPECT_EQ(ret, SUCCESS); +} + +TEST_F(UtestGraphPassesSwitchPass, empty_output) { + BuildDefaultGraph(false); + switch_node_->GetOutDataAnchor(0)->UnlinkAll(); + switch_node_->GetOutDataAnchor(1)->UnlinkAll(); + auto ret = pass_.Run(switch_node_); + EXPECT_EQ(ret, SUCCESS); +} + +TEST_F(UtestGraphPassesSwitchPass, non_const_pred) { + BuildDefaultGraph(false); + auto ret = pass_.Run(switch_node_); + EXPECT_EQ(ret, SUCCESS); +} + +TEST_F(UtestGraphPassesSwitchPass, pick_output_false) { + bool pred_value = false; + BuildDefaultGraph(false, &pred_value); + TestPickOutput(false); +} + +TEST_F(UtestGraphPassesSwitchPass, pick_output_false_float) { + bool pred_value = false; + BuildDefaultGraph(false, &pred_value); + + float weight[] = {0.0f}; + GeTensorDesc weight_desc(GeShape({1}), FORMAT_NHWC, DT_FLOAT); + GeTensorPtr tensor = std::make_shared(weight_desc, (uint8_t *)weight, sizeof(weight)); + OpDescUtils::SetWeights(pred_node_, {tensor}); + + TestPickOutput(false); +} + +TEST_F(UtestGraphPassesSwitchPass, pick_output_false_bool) { + bool pred_value = false; + BuildDefaultGraph(false, &pred_value); + + bool weight[] = {false}; + GeTensorDesc weight_desc(GeShape({1}), FORMAT_NHWC, DT_BOOL); + GeTensorPtr tensor = std::make_shared(weight_desc, (uint8_t *)weight, sizeof(weight)); + OpDescUtils::SetWeights(pred_node_, {tensor}); + + TestPickOutput(false); +} + +TEST_F(UtestGraphPassesSwitchPass, pick_output_false_u16) { + bool pred_value = false; + BuildDefaultGraph(false, &pred_value); + + uint16_t weight[] = {0}; + GeTensorDesc weight_desc(GeShape({1}), FORMAT_NHWC, DT_UINT16); + GeTensorPtr tensor = std::make_shared(weight_desc, (uint8_t *)weight, sizeof(weight)); + OpDescUtils::SetWeights(pred_node_, {tensor}); + + TestPickOutput(false); +} + +TEST_F(UtestGraphPassesSwitchPass, pick_output_true) { + bool pred_value = true; + BuildDefaultGraph(false, &pred_value); + TestPickOutput(true); +} + +TEST_F(UtestGraphPassesSwitchPass, pick_output_true_double) { + bool pred_value = true; + BuildDefaultGraph(false, &pred_value); + double weight[] = {1.0}; + GeTensorDesc weight_desc(GeShape({1}), FORMAT_NHWC, DT_DOUBLE); + GeTensorPtr tensor = std::make_shared(weight_desc, (uint8_t *)weight, sizeof(weight)); + OpDescUtils::SetWeights(pred_node_, {tensor}); + + TestPickOutput(true); +} + +TEST_F(UtestGraphPassesSwitchPass, pick_output_true_int64) { + bool pred_value = true; + BuildDefaultGraph(false, &pred_value); + int64_t weight[] = {1L}; + GeTensorDesc weight_desc(GeShape({1}), FORMAT_NHWC, DT_INT64); + GeTensorPtr tensor = std::make_shared(weight_desc, (uint8_t *)weight, sizeof(weight)); + OpDescUtils::SetWeights(pred_node_, {tensor}); + + TestPickOutput(true); +} + +TEST_F(UtestGraphPassesSwitchPass, inactive_output_not_exists) { + /// input pred(false) + /// \ / + /// Switch + /// | + /// F + /// | + /// Merge + bool pred_value = false; + BuildDefaultGraph(false, &pred_value); + output_true_node_->GetOutDataAnchor(0)->UnlinkAll(); + GraphUtils::RemoveNodeWithoutRelink(graph_, output_true_node_); + switch_node_->GetOutDataAnchor(1)->UnlinkAll(); + // switch_node_->outDataAnchors_.pop_back(); + + /// input + /// | + /// F + /// | + /// Merge + auto ret = pass_.Run(switch_node_); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(graph_->GetDirectNodesSize(), 4); + EXPECT_EQ(merge_node_->GetInDataNodes().size(), 1); + EXPECT_EQ(merge_node_->GetInDataAnchor(0)->GetPeerOutAnchor(), output_false_node_->GetOutDataAnchor(0)); + EXPECT_EQ(merge_node_->GetInDataAnchor(1)->GetPeerOutAnchor().get(), nullptr); + EXPECT_EQ(output_false_node_->GetInDataAnchor(0)->GetPeerOutAnchor(), input_node_->GetOutDataAnchor(0)); +} + +TEST_F(UtestGraphPassesSwitchPass, const_input_pick_output_true) { + /// const pred(true) + /// \ / + /// Switch + /// | | \ + /// F T1 T2 + /// | | | + /// | | / + /// | T3 + /// | | + /// Merge + bool pred_value = true; + BuildDefaultGraph(true, &pred_value); + auto output_true_node2 = NewNode("true_output2", RELU, 1, 1); + auto output_true_node3 = NewNode("true_output3", ADD, 2, 1); + GraphUtils::AddEdge(switch_node_->GetOutDataAnchor(1), output_true_node2->GetInDataAnchor(0)); + GraphUtils::RemoveEdge(output_true_node_->GetOutDataAnchor(0), merge_node_->GetInDataAnchor(1)); + GraphUtils::AddEdge(output_true_node_->GetOutDataAnchor(0), output_true_node3->GetInDataAnchor(0)); + GraphUtils::AddEdge(output_true_node2->GetOutDataAnchor(0), output_true_node3->GetInDataAnchor(1)); + GraphUtils::AddEdge(output_true_node3->GetOutDataAnchor(0), merge_node_->GetInDataAnchor(1)); + + /// pred C + /// | | | + /// F T1 T2 + /// | / + /// T3 + /// | + /// Merge + auto ret = pass_.Run(switch_node_); + EXPECT_EQ(ret, SUCCESS); + + EXPECT_EQ(graph_->GetDirectNodesSize(), 7); + EXPECT_EQ(merge_node_->GetInDataNodes().size(), 1); + EXPECT_EQ(merge_node_->GetInDataAnchor(0)->GetPeerOutAnchor().get(), nullptr); + EXPECT_EQ(merge_node_->GetInDataAnchor(1)->GetPeerOutAnchor(), output_true_node3->GetOutDataAnchor(0)); + EXPECT_EQ(output_true_node_->GetInDataAnchor(0)->GetPeerOutAnchor(), input_node_->GetOutDataAnchor(0)); + EXPECT_NE(output_true_node2->GetInDataAnchor(0)->GetPeerOutAnchor(), + output_true_node3->GetInDataAnchor(0)->GetPeerOutAnchor()); +} + +TEST_F(UtestGraphPassesSwitchPass, after_switch_const_take_false_branch) { + /// C pred(false) + /// \ / + /// Switch + /// . . + /// . . + /// C_1 -> F T <- C_2 + /// | | + /// Merge + bool pred_value = false; + BuildDefaultGraph(true, &pred_value); + switch_node_->GetOutDataAnchor(0)->UnlinkAll(); + switch_node_->GetOutDataAnchor(1)->UnlinkAll(); + + NodePtr const_node_1 = NewNode("const_1", CONSTANT, 0, 1); + NodePtr const_node_2 = NewNode("const_2", CONSTANT, 0, 1); + GraphUtils::AddEdge(const_node_1->GetOutDataAnchor(0), output_false_node_->GetInDataAnchor(0)); + GraphUtils::AddEdge(const_node_2->GetOutDataAnchor(0), output_true_node_->GetInDataAnchor(0)); + GraphUtils::AddEdge(switch_node_->GetOutDataAnchor(0), output_false_node_->GetInControlAnchor()); + GraphUtils::AddEdge(switch_node_->GetOutDataAnchor(1), output_true_node_->GetInControlAnchor()); + + /// C pred(false) + /// + /// C_1 C_2 + /// | | + /// F T + /// | + /// Merge + auto ret = pass_.Run(switch_node_); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(graph_->GetDirectNodesSize(), 7); + EXPECT_EQ(merge_node_->GetInDataNodes().size(), 1); + EXPECT_EQ(merge_node_->GetInDataAnchor(0)->GetPeerOutAnchor(), output_false_node_->GetOutDataAnchor(0)); + EXPECT_EQ(merge_node_->GetInDataAnchor(1)->GetPeerOutAnchor().get(), nullptr); + EXPECT_EQ(output_false_node_->GetInDataAnchor(0)->GetPeerOutAnchor(), const_node_1->GetOutDataAnchor(0)); +} + +TEST_F(UtestGraphPassesSwitchPass, after_switch_const_take_true_branch) { + /// C pred(true) + /// \ / + /// Switch + /// . . + /// . . + /// C_1 -> F T <- C_2 + /// | | + /// Merge + bool pred_value = true; + BuildDefaultGraph(true, &pred_value); + switch_node_->GetOutDataAnchor(0)->UnlinkAll(); + switch_node_->GetOutDataAnchor(1)->UnlinkAll(); + + NodePtr const_node_1 = NewNode("const_1", CONSTANT, 0, 1); + NodePtr const_node_2 = NewNode("const_2", CONSTANT, 0, 1); + GraphUtils::AddEdge(const_node_1->GetOutDataAnchor(0), output_false_node_->GetInDataAnchor(0)); + GraphUtils::AddEdge(const_node_2->GetOutDataAnchor(0), output_true_node_->GetInDataAnchor(0)); + GraphUtils::AddEdge(switch_node_->GetOutDataAnchor(0), output_false_node_->GetInControlAnchor()); + GraphUtils::AddEdge(switch_node_->GetOutDataAnchor(1), output_true_node_->GetInControlAnchor()); + + /// C_1 C_2 + /// | | + /// F T + /// | + /// Merge + auto ret = pass_.Run(switch_node_); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(graph_->GetDirectNodesSize(), 7); + EXPECT_EQ(merge_node_->GetInDataNodes().size(), 1); + EXPECT_EQ(merge_node_->GetInDataAnchor(0)->GetPeerOutAnchor().get(), nullptr); + EXPECT_EQ(merge_node_->GetInDataAnchor(1)->GetPeerOutAnchor(), output_true_node_->GetOutDataAnchor(0)); + EXPECT_EQ(output_true_node_->GetInDataAnchor(0)->GetPeerOutAnchor(), const_node_2->GetOutDataAnchor(0)); +} + +TEST_F(UtestGraphPassesSwitchPass, dead_output_connected_to_merge) { + /// input pred(true) + /// \ / + /// Switch + /// | | + /// | T + /// | | + /// Merge + bool pred_value = true; + BuildDefaultGraph(false, &pred_value); + // graph_->RemoveNode(output_false_node_); + output_false_node_->GetOutDataAnchor(0)->UnlinkAll(); + GraphUtils::RemoveNodeWithoutRelink(graph_, output_false_node_); + switch_node_->GetOutDataAnchor(0)->UnlinkAll(); + + /// input pred(true) + /// \ / + /// Switch + /// | + /// T + /// | + /// Merge + auto ret = pass_.Run(switch_node_); + EXPECT_EQ(ret, SUCCESS); + + /// input + /// | + /// T + /// | + /// Merge + EXPECT_EQ(graph_->GetDirectNodesSize(), 4); + EXPECT_EQ(merge_node_->GetInDataNodes().size(), 1); + EXPECT_EQ(merge_node_->GetInDataAnchor(0)->GetPeerOutAnchor().get(), nullptr); + EXPECT_EQ(merge_node_->GetInDataAnchor(1)->GetPeerOutAnchor(), output_true_node_->GetOutDataAnchor(0)); + EXPECT_EQ(output_true_node_->GetInDataAnchor(0)->GetPeerOutAnchor(), input_node_->GetOutDataAnchor(0)); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/trans_op_breadth_fusion_pass_unittest.cc b/tests/ut/ge/graph/passes/trans_op_breadth_fusion_pass_unittest.cc new file mode 100644 index 00000000..db57fdd9 --- /dev/null +++ b/tests/ut/ge/graph/passes/trans_op_breadth_fusion_pass_unittest.cc @@ -0,0 +1,512 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/transop_breadth_fusion_pass.h" + +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "graph_builder_utils.h" + +using namespace ge; + +class UtestGraphPassesTransOpBreadthFusionPass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +class NodeBuilder { + public: + NodeBuilder(const std::string &name, const std::string &type) { op_desc_ = std::make_shared(name, type); } + + NodeBuilder &AddInputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddInputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + NodeBuilder &AddOutputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddOutputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + ge::NodePtr Build(const ge::ComputeGraphPtr &graph) { return graph->AddNode(op_desc_); } + + private: + ge::GeTensorDescPtr CreateTensorDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + GeShape ge_shape{std::vector(shape)}; + ge::GeTensorDescPtr tensor_desc = std::make_shared(); + tensor_desc->SetShape(ge_shape); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + return tensor_desc; + } + + ge::OpDescPtr op_desc_; +}; + +TEST_F(UtestGraphPassesTransOpBreadthFusionPass, test_simple_trans_data) { + /// ___ NodeTrans4DToFZ_1 __ NodeFZ + /// | + /// |___ NodeTrans4DToFZ_2 __ NodeFZ + /// Node4D __| + /// |___ NodeTrans4dTo5D_1 __ Node5D + /// | + /// |___ NodeTrans4DTo5D_2 __ Node5D + ge::ComputeGraphPtr graph = std::make_shared("test"); + + // Node4D + ge::NodePtr node_4d = NodeBuilder("Node4D", DATA).AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32).Build(graph); + + // NodeTrans4DToFZ + ge::NodePtr node_4d_to_fz_1 = NodeBuilder("4d_to_fz_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_FRACTAL_Z, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_4d_to_fz_2 = NodeBuilder("4d_to_fz_2", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_FRACTAL_Z, DT_FLOAT) + .Build(graph); + + // NodeTrans4DTo5D + ge::NodePtr node_4d_to_5d_1 = NodeBuilder("4d_to_5d_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_4d_to_5d_2 = NodeBuilder("4d_to_5d_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + // NodeFZ + ge::NodePtr node_fz_1 = + NodeBuilder("FZ_1", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_FRACTAL_Z, DT_FLOAT).Build(graph); + + ge::NodePtr node_fz_2 = + NodeBuilder("FZ_2", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_FRACTAL_Z, DT_FLOAT).Build(graph); + + // Node5D + ge::NodePtr node_5d_1 = + NodeBuilder("5D_1", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + ge::NodePtr node_5d_2 = + NodeBuilder("5D_2", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_fz_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_fz_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_5d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_5d_2->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_4d_to_fz_1->GetOutDataAnchor(0), node_fz_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d_to_fz_2->GetOutDataAnchor(0), node_fz_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d_to_5d_1->GetOutDataAnchor(0), node_5d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d_to_5d_2->GetOutDataAnchor(0), node_5d_2->GetInDataAnchor(0)); + + ge::TransOpBreadthFusionPass pass; + Status status = pass.Run(graph); + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(node_4d_to_fz_1->GetOutDataNodes().size(), 2); + EXPECT_EQ(node_4d_to_5d_1->GetOutDataNodes().size(), 2); + EXPECT_TRUE(node_4d_to_fz_2->GetOutDataNodes().empty()); + EXPECT_TRUE(node_4d_to_5d_2->GetOutDataNodes().empty()); +} + +TEST_F(UtestGraphPassesTransOpBreadthFusionPass, test_simple_cast) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + + ge::NodePtr node1 = NodeBuilder("node1", DATA).AddOutputDesc({1}, FORMAT_NCHW, DT_INT32).Build(graph); + + ge::NodePtr cast_node_1 = NodeBuilder("cast_node_1", CAST) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr cast_node_2 = NodeBuilder("cast_node_2", CAST) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_2 = NodeBuilder("node2", RELU).AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::NodePtr node_3 = NodeBuilder("node3", RELU).AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), cast_node_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), cast_node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(cast_node_1->GetOutDataAnchor(0), node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(cast_node_2->GetOutDataAnchor(0), node_3->GetInDataAnchor(0)); + + ge::TransOpBreadthFusionPass pass; + Status status = pass.Run(graph); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(cast_node_1->GetOutDataNodes().size(), 2); + EXPECT_TRUE(cast_node_2->GetOutDataNodes().empty()); +} + +TEST_F(UtestGraphPassesTransOpBreadthFusionPass, test_simple_reshape) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + + ge::NodePtr node1 = NodeBuilder("node1", DATA).AddOutputDesc({1}, FORMAT_NCHW, DT_INT32).Build(graph); + + ge::NodePtr reshape_node_1 = NodeBuilder("reshape_node_1", RESHAPE) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 1}, FORMAT_NC1HWC0, DT_INT32) + .Build(graph); + + ge::NodePtr reshape_node_2 = NodeBuilder("reshape_node_2", RESHAPE) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 1}, FORMAT_NC1HWC0, DT_INT32) + .Build(graph); + + ge::NodePtr node_2 = NodeBuilder("node2", RELU).AddInputDesc({1, 1}, FORMAT_NC1HWC0, DT_INT32).Build(graph); + + ge::NodePtr node_3 = NodeBuilder("node3", RELU).AddInputDesc({1, 1}, FORMAT_NC1HWC0, DT_INT32).Build(graph); + + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), reshape_node_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), reshape_node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(reshape_node_1->GetOutDataAnchor(0), node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(reshape_node_2->GetOutDataAnchor(0), node_3->GetInDataAnchor(0)); + + ge::TransOpBreadthFusionPass pass; + Status status = pass.Run(graph); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(reshape_node_1->GetOutDataNodes().size(), 2); + EXPECT_TRUE(reshape_node_2->GetOutDataNodes().empty()); +} + +TEST_F(UtestGraphPassesTransOpBreadthFusionPass, test_simple_transpose) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + + ge::NodePtr node1 = NodeBuilder("node1", DATA).AddOutputDesc({1}, FORMAT_NCHW, DT_INT32).Build(graph); + + ge::NodePtr transpose_node_1 = NodeBuilder("transpose_node_1", TRANSPOSE) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 1}, FORMAT_NC1HWC0, DT_INT32) + .Build(graph); + + ge::NodePtr transpose_node_2 = NodeBuilder("transpose_node_2", TRANSPOSE) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 1}, FORMAT_NC1HWC0, DT_INT32) + .Build(graph); + + ge::NodePtr node_2 = NodeBuilder("node2", RELU).AddInputDesc({1, 1}, FORMAT_NC1HWC0, DT_INT32).Build(graph); + + ge::NodePtr node_3 = NodeBuilder("node3", RELU).AddInputDesc({1, 1}, FORMAT_NC1HWC0, DT_INT32).Build(graph); + + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), transpose_node_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), transpose_node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(transpose_node_1->GetOutDataAnchor(0), node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(transpose_node_2->GetOutDataAnchor(0), node_3->GetInDataAnchor(0)); + + ge::TransOpBreadthFusionPass pass; + Status status = pass.Run(graph); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(transpose_node_1->GetOutDataNodes().size(), 2); + EXPECT_TRUE(transpose_node_2->GetOutDataNodes().empty()); +} + +TEST_F(UtestGraphPassesTransOpBreadthFusionPass, test_partial_matching) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + ge::NodePtr node1 = NodeBuilder("node1", DATA).AddOutputDesc({1}, FORMAT_NCHW, DT_INT32).Build(graph); + + ge::NodePtr cast_node = NodeBuilder("cast_node", CAST) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr transdata_node_1 = NodeBuilder("transdata_node_1", TRANSDATA) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 1}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + ge::NodePtr transdata_node_2 = NodeBuilder("transdata_node_2", TRANSDATA) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 1}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + ge::NodePtr transdata_node_3 = NodeBuilder("transdata_node_3", TRANSDATA) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 1}, FORMAT_NC1HWC0, DT_INT16) + .Build(graph); + + ge::NodePtr node_2 = NodeBuilder("node2", RELU).AddInputDesc({1, 1}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + ge::NodePtr node_3 = NodeBuilder("node3", RELU).AddInputDesc({1, 1}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + ge::NodePtr node_4 = NodeBuilder("node4", RELU).AddInputDesc({1, 1}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + ge::NodePtr node_5 = NodeBuilder("node5", RELU).AddInputDesc({1, 1}, FORMAT_NC1HWC0, DT_INT16).Build(graph); + + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), cast_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), transdata_node_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), transdata_node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), transdata_node_3->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(cast_node->GetOutDataAnchor(0), node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(transdata_node_1->GetOutDataAnchor(0), node_3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(transdata_node_2->GetOutDataAnchor(0), node_4->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(transdata_node_3->GetOutDataAnchor(0), node_5->GetInDataAnchor(0)); + + ge::TransOpBreadthFusionPass pass; + Status status = pass.Run(graph); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(cast_node->GetOutDataNodes().size(), 1); + EXPECT_EQ(transdata_node_1->GetOutDataNodes().size(), 2); + EXPECT_EQ(transdata_node_3->GetOutDataNodes().size(), 1); + EXPECT_TRUE(transdata_node_2->GetOutDataNodes().empty()); +} + +TEST_F(UtestGraphPassesTransOpBreadthFusionPass, test_control_anchor) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + + ge::NodePtr node1 = NodeBuilder("node1", DATA).AddOutputDesc({1}, FORMAT_NCHW, DT_INT32).Build(graph); + + ge::NodePtr cast_node_1 = NodeBuilder("cast_node_1", CAST) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr cast_node_2 = NodeBuilder("cast_node_2", CAST) + .AddInputDesc({1}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_2 = NodeBuilder("node2", RELU).AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::NodePtr node_3 = NodeBuilder("node3", RELU).AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), cast_node_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node1->GetOutDataAnchor(0), cast_node_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(cast_node_1->GetOutDataAnchor(0), node_2->GetInControlAnchor()); + ge::GraphUtils::AddEdge(cast_node_2->GetOutDataAnchor(0), node_3->GetInControlAnchor()); + + ge::TransOpBreadthFusionPass pass; + Status status = pass.Run(graph); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(cast_node_1->GetOutControlNodes().size(), 2); + EXPECT_TRUE(cast_node_2->GetOutControlNodes().empty()); + EXPECT_TRUE(cast_node_1->GetOutDataNodes().empty()); +} + +TEST_F(UtestGraphPassesTransOpBreadthFusionPass, test_reshape_op_failed) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + + ge::NodePtr data1 = NodeBuilder("data1", DATA).AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::NodePtr data2 = NodeBuilder("data2", DATA).AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::NodePtr constant = NodeBuilder("constant", CONSTANT).AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::NodePtr exp1 = NodeBuilder("exp1", EXP) + .AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr exp2 = NodeBuilder("exp2", EXP) + .AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr reshape1 = NodeBuilder("reshape1", RESHAPE) + .AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr reshape2 = NodeBuilder("reshape2", RESHAPE) + .AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr relu1 = NodeBuilder("relu1", RELU).AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::NodePtr relu2 = NodeBuilder("relu2", RELU).AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + ge::GraphUtils::AddEdge(data1->GetOutDataAnchor(0), exp1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(data2->GetOutDataAnchor(0), exp2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(exp1->GetOutDataAnchor(0), reshape1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(exp2->GetOutDataAnchor(0), reshape2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(constant->GetOutDataAnchor(0), reshape1->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(constant->GetOutDataAnchor(0), reshape2->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(reshape1->GetOutDataAnchor(0), relu1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(reshape2->GetOutDataAnchor(0), relu2->GetInDataAnchor(0)); + + ge::TransOpBreadthFusionPass pass; + Status status = pass.Run(graph); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(reshape1->GetOutDataNodes().size(), 1); + EXPECT_EQ(reshape2->GetOutDataNodes().size(), 1); +} + +TEST_F(UtestGraphPassesTransOpBreadthFusionPass, test_multi_anchor_case) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + + ge::NodePtr data1 = NodeBuilder("data1", DATA) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr cast1 = NodeBuilder("cast1", CAST) + .AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + + ge::NodePtr cast2 = NodeBuilder("cast2", CAST) + .AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT16) + .AddOutputDesc({1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + ge::NodePtr relu1 = NodeBuilder("relu1", RELU).AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT16).Build(graph); + + ge::NodePtr relu2 = NodeBuilder("relu2", RELU).AddInputDesc({1}, FORMAT_NCHW, DT_FLOAT16).Build(graph); + + ge::GraphUtils::AddEdge(data1->GetOutDataAnchor(0), cast1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(data1->GetOutDataAnchor(1), cast2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(cast1->GetOutDataAnchor(0), relu1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(cast2->GetOutDataAnchor(0), relu2->GetInDataAnchor(0)); + + ge::TransOpBreadthFusionPass pass; + Status status = pass.Run(graph); + + EXPECT_EQ(SUCCESS, status); + EXPECT_EQ(cast1->GetOutDataNodes().size(), 1); + EXPECT_EQ(cast1->GetOutDataNodes().size(), 1); +} + +/// ----> netoutput1 +/// / | \ +/// transdata1 transdata2 transdata3 +/// \ / | +/// var1-------------- +static ComputeGraphPtr BuildGraph1() { + ut::GraphBuilder builder("g1"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto transdata1 = builder.AddNode("transdata1", "TransData", 1, 1); + transdata1->GetOpDesc()->MutableOutputDesc(0)->SetFormat(FORMAT_NC1HWC0); + transdata1->GetOpDesc()->MutableOutputDesc(0)->SetShape(GeShape(std::vector({1, 1, 224, 224, 16}))); + AttrUtils::SetStr(transdata1->GetOpDesc(), ATTR_NAME_STREAM_LABEL, "label1"); + auto transdata2 = builder.AddNode("transdata2", "TransData", 1, 1); + transdata2->GetOpDesc()->MutableOutputDesc(0)->SetFormat(FORMAT_NC1HWC0); + transdata2->GetOpDesc()->MutableOutputDesc(0)->SetShape(GeShape(std::vector({1, 1, 224, 224, 16}))); + auto transdata3 = builder.AddNode("transdata3", "TransData", 1, 1); + transdata3->GetOpDesc()->MutableOutputDesc(0)->SetFormat(FORMAT_NC1HWC0); + transdata3->GetOpDesc()->MutableOutputDesc(0)->SetShape(GeShape(std::vector({1, 1, 224, 224, 16}))); + auto netoutput1 = builder.AddNode("netoutput1", "NetOutput1", 10, 0); + + builder.AddDataEdge(var1, 0, transdata1, 0); + builder.AddDataEdge(var1, 0, transdata2, 0); + builder.AddDataEdge(var1, 0, transdata3, 0); + builder.AddDataEdge(transdata1, 0, netoutput1, 0); + builder.AddDataEdge(transdata2, 0, netoutput1, 1); + builder.AddDataEdge(transdata3, 0, netoutput1, 2); + + return builder.GetGraph(); +} + +/// ---------> netoutput1 +/// / | \ +/// transdata1 transdata2(l1) transdata3(l1) +/// \ / | +/// var1------------------ +static ComputeGraphPtr BuildGraph2() { + ut::GraphBuilder builder("g2"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto transdata1 = builder.AddNode("transdata1", "TransData", 1, 1); + transdata1->GetOpDesc()->MutableOutputDesc(0)->SetFormat(FORMAT_NC1HWC0); + transdata1->GetOpDesc()->MutableOutputDesc(0)->SetShape(GeShape(std::vector({1, 1, 224, 224, 16}))); + auto transdata2 = builder.AddNode("transdata2", "TransData", 1, 1); + transdata2->GetOpDesc()->MutableOutputDesc(0)->SetFormat(FORMAT_NC1HWC0); + transdata2->GetOpDesc()->MutableOutputDesc(0)->SetShape(GeShape(std::vector({1, 1, 224, 224, 16}))); + AttrUtils::SetStr(transdata2->GetOpDesc(), ATTR_NAME_STREAM_LABEL, "label1"); + auto transdata3 = builder.AddNode("transdata3", "TransData", 1, 1); + transdata3->GetOpDesc()->MutableOutputDesc(0)->SetFormat(FORMAT_NC1HWC0); + transdata3->GetOpDesc()->MutableOutputDesc(0)->SetShape(GeShape(std::vector({1, 1, 224, 224, 16}))); + AttrUtils::SetStr(transdata3->GetOpDesc(), ATTR_NAME_STREAM_LABEL, "label1"); + auto netoutput1 = builder.AddNode("netoutput1", "NetOutput1", 10, 0); + + builder.AddDataEdge(var1, 0, transdata1, 0); + builder.AddDataEdge(var1, 0, transdata2, 0); + builder.AddDataEdge(var1, 0, transdata3, 0); + builder.AddDataEdge(transdata1, 0, netoutput1, 0); + builder.AddDataEdge(transdata2, 0, netoutput1, 1); + builder.AddDataEdge(transdata3, 0, netoutput1, 2); + + return builder.GetGraph(); +} + +TEST_F(UtestGraphPassesTransOpBreadthFusionPass, diff_stream1) { + auto graph = BuildGraph1(); + + ge::TransOpBreadthFusionPass pass; + Status status = pass.Run(graph); + EXPECT_EQ(SUCCESS, status); + + auto transdata1 = graph->FindNode("transdata1"); + auto transdata2 = graph->FindNode("transdata2"); + auto transdata3 = graph->FindNode("transdata3"); + + EXPECT_EQ(transdata1->GetOutNodes().size(), 1); + EXPECT_EQ(transdata1->GetOutDataNodes().at(0)->GetName(), "netoutput1"); + EXPECT_EQ(transdata1->GetInNodes().size(), 1); + EXPECT_EQ(transdata1->GetInDataNodes().at(0)->GetName(), "var1"); + + EXPECT_TRUE(transdata2 == nullptr || transdata3 == nullptr); + EXPECT_FALSE(transdata2 == nullptr && transdata3 == nullptr); + auto not_empty_node = transdata2 != nullptr ? transdata2 : transdata3; + EXPECT_FALSE(not_empty_node->GetInNodes().empty()); + EXPECT_EQ(not_empty_node->GetInDataNodes().at(0)->GetName(), "var1"); + EXPECT_FALSE(not_empty_node->GetOutNodes().empty()); + EXPECT_EQ(not_empty_node->GetOutDataNodes().at(0)->GetName(), "netoutput1"); +} + +TEST_F(UtestGraphPassesTransOpBreadthFusionPass, diff_stream2) { + auto graph = BuildGraph2(); + + ge::TransOpBreadthFusionPass pass; + Status status = pass.Run(graph); + EXPECT_EQ(SUCCESS, status); + + auto transdata1 = graph->FindNode("transdata1"); + auto transdata2 = graph->FindNode("transdata2"); + auto transdata3 = graph->FindNode("transdata3"); + + EXPECT_EQ(transdata1->GetOutNodes().size(), 1); + EXPECT_EQ(transdata1->GetOutDataNodes().at(0)->GetName(), "netoutput1"); + EXPECT_EQ(transdata1->GetInNodes().size(), 1); + EXPECT_EQ(transdata1->GetInDataNodes().at(0)->GetName(), "var1"); + + EXPECT_TRUE(transdata2 == nullptr || transdata3 == nullptr); + EXPECT_FALSE(transdata2 == nullptr && transdata3 == nullptr); + auto not_empty_node = transdata2 != nullptr ? transdata2 : transdata3; + EXPECT_FALSE(not_empty_node->GetInNodes().empty()); + EXPECT_EQ(not_empty_node->GetInDataNodes().at(0)->GetName(), "var1"); + EXPECT_FALSE(not_empty_node->GetOutNodes().empty()); + EXPECT_EQ(not_empty_node->GetOutDataNodes().at(0)->GetName(), "netoutput1"); +} diff --git a/tests/ut/ge/graph/passes/trans_op_depth_fusion_pass_unittest.cc b/tests/ut/ge/graph/passes/trans_op_depth_fusion_pass_unittest.cc new file mode 100644 index 00000000..a9ea41ea --- /dev/null +++ b/tests/ut/ge/graph/passes/trans_op_depth_fusion_pass_unittest.cc @@ -0,0 +1,724 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/transop_depth_fusion_pass.h" + +#include +#include + +using namespace ge; + +class UtestGraphPassesTransOpDepthFusionPass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +class NodeBuilder { + public: + NodeBuilder(const std::string &name, const std::string &type) { op_desc_ = std::make_shared(name, type); } + + NodeBuilder &AddInputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddInputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + NodeBuilder &AddOutputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddOutputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + ge::NodePtr Build(const ge::ComputeGraphPtr &graph) { return graph->AddNode(op_desc_); } + + private: + ge::GeTensorDescPtr CreateTensorDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + GeShape ge_shape{std::vector(shape)}; + ge::GeTensorDescPtr tensor_desc = std::make_shared(); + tensor_desc->SetShape(ge_shape); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + return tensor_desc; + } + + ge::OpDescPtr op_desc_; +}; + +TEST_F(UtestGraphPassesTransOpDepthFusionPass, test_offset_cast) { + // Node4D(fp32)->cast1(fp32->fp16)->cast2(fp16->fp32)->sinh + ge::ComputeGraphPtr graph = std::make_shared("test"); + + // Node4D + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + // cast1 + ge::NodePtr node_cast_1 = NodeBuilder("node_cast_1", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT16); + auto src_name = node_data->GetName(); + node_cast_1->GetOpDesc()->SetSrcName({src_name}); + node_cast_1->GetOpDesc()->SetInputName({src_name}); + + // cast2 + ge::NodePtr node_cast_2 = NodeBuilder("node_cast_2", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT16); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT); + src_name = node_cast_1->GetName(); + node_cast_2->GetOpDesc()->SetSrcName({src_name}); + node_cast_2->GetOpDesc()->SetInputName({src_name}); + + // sinh + ge::NodePtr node_sinh = NodeBuilder("node_sinh", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + src_name = node_cast_2->GetName(); + node_sinh->GetOpDesc()->SetSrcName({src_name}); + node_sinh->GetOpDesc()->SetInputName({src_name}); + + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_cast_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_1->GetOutDataAnchor(0), node_cast_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_2->GetOutDataAnchor(0), node_sinh->GetInDataAnchor(0)); + + ge::TransOpDepthFusionPass pass; + ge::graphStatus status = pass.Run(graph); + EXPECT_EQ(ge::GRAPH_SUCCESS, status); + EXPECT_EQ(graph->GetDirectNode().size(), 2); +} + +TEST_F(UtestGraphPassesTransOpDepthFusionPass, test_offset_cast_ctrl_edge) { + // Node4D(fp32)->sinh1->sinh2->cast1(fp32->fp16)->cast2(fp16->fp32)->sinh3 + ge::ComputeGraphPtr graph = std::make_shared("test"); + + // Node4D + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + // sinh1 + ge::NodePtr node_sinh_1 = NodeBuilder("node_sinh_1", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // sinh2 + ge::NodePtr node_sinh_2 = NodeBuilder("node_sinh_2", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // cast1 + ge::NodePtr node_cast_1 = NodeBuilder("node_cast_1", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT16); + + // cast2 + ge::NodePtr node_cast_2 = NodeBuilder("node_cast_2", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT16); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT); + + // sinh3 + ge::NodePtr node_sinh_3 = NodeBuilder("node_sinh_3", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_sinh_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_sinh_1->GetOutDataAnchor(0), node_sinh_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_sinh_2->GetOutDataAnchor(0), node_cast_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_1->GetOutDataAnchor(0), node_cast_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_2->GetOutDataAnchor(0), node_sinh_3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_sinh_1->GetOutControlAnchor(), node_cast_1->GetInControlAnchor()); + ge::GraphUtils::AddEdge(node_sinh_1->GetOutControlAnchor(), node_cast_2->GetInControlAnchor()); + + ge::TransOpDepthFusionPass pass; + ge::graphStatus status = pass.Run(graph); + EXPECT_EQ(ge::GRAPH_SUCCESS, status); + EXPECT_EQ(graph->GetDirectNode().size(), 4); +} + +TEST_F(UtestGraphPassesTransOpDepthFusionPass, test_offset_cast_ctrl_edge2) { + // Node4D(fp32)->sinh1->cast1(fp32->fp16)->cast2(fp16->fp32)->sinh2->sinh3 + ge::ComputeGraphPtr graph = std::make_shared("test"); + + // Node4D + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + // sinh1 + ge::NodePtr node_sinh_1 = NodeBuilder("node_sinh_1", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // cast1 + ge::NodePtr node_cast_1 = NodeBuilder("node_cast_1", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT16); + + // cast2 + ge::NodePtr node_cast_2 = NodeBuilder("node_cast_2", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT16); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT); + + // sinh2 + ge::NodePtr node_sinh_2 = NodeBuilder("node_sinh_2", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // sinh3 + ge::NodePtr node_sinh_3 = NodeBuilder("node_sinh_3", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_sinh_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_sinh_1->GetOutDataAnchor(0), node_cast_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_1->GetOutDataAnchor(0), node_cast_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_2->GetOutDataAnchor(0), node_sinh_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_sinh_2->GetOutDataAnchor(0), node_sinh_3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_2->GetOutControlAnchor(), node_sinh_3->GetInControlAnchor()); + + ge::TransOpDepthFusionPass pass; + ge::graphStatus status = pass.Run(graph); + EXPECT_EQ(ge::GRAPH_SUCCESS, status); + EXPECT_EQ(graph->GetDirectNode().size(), 4); +} + +TEST_F(UtestGraphPassesTransOpDepthFusionPass, test_offset_cast_ctrl_edge3) { + // Node4D(fp32)->sinh1->cast1(fp32->fp16)->cast2(fp16->fp32)->sinh2->sinh3 + ge::ComputeGraphPtr graph = std::make_shared("test"); + + // Node4D + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + // sinh_1 + ge::NodePtr node_sinh_1 = NodeBuilder("node_sinh_1", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // cast1 + ge::NodePtr node_cast_1 = NodeBuilder("node_cast_1", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT16); + + // cast2 + ge::NodePtr node_cast_2 = NodeBuilder("node_cast_2", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT16); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT); + + // sinh_2 + ge::NodePtr node_sinh_2 = NodeBuilder("node_sinh_2", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // sinh3 + ge::NodePtr node_sinh_3 = NodeBuilder("node_sinh_3", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_sinh_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_sinh_1->GetOutDataAnchor(0), node_cast_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_1->GetOutDataAnchor(0), node_cast_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_2->GetOutDataAnchor(0), node_sinh_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_sinh_2->GetOutDataAnchor(0), node_sinh_3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_2->GetOutDataAnchor(0), node_sinh_3->GetInControlAnchor()); + + ge::TransOpDepthFusionPass pass; + ge::graphStatus status = pass.Run(graph); + EXPECT_EQ(ge::GRAPH_SUCCESS, status); + EXPECT_EQ(graph->GetDirectNode().size(), 4); +} + +TEST_F(UtestGraphPassesTransOpDepthFusionPass, test_offset_cast_ctrl_edge4) { + // Node4D(fp32)->sinh1->sinh2->cast1(fp32->fp16)->cast2(fp16->fp32)->sinh3 + + ge::ComputeGraphPtr graph = std::make_shared("test"); + + // Node4D + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + // sinh1 + ge::NodePtr node_sinh_1 = NodeBuilder("node_sinh_1", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // sinh2 + ge::NodePtr node_sinh_2 = NodeBuilder("node_sinh_2", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // cast1 + ge::NodePtr node_cast_1 = NodeBuilder("node_cast_1", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT16); + + // cast2 + ge::NodePtr node_cast_2 = NodeBuilder("node_cast_2", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT16); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT); + + // sinh3 + ge::NodePtr node_sinh_3 = NodeBuilder("node_sinh_3", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_sinh_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_sinh_1->GetOutDataAnchor(0), node_sinh_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_sinh_2->GetOutDataAnchor(0), node_cast_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_1->GetOutDataAnchor(0), node_cast_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_2->GetOutDataAnchor(0), node_sinh_3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_cast_2->GetInControlAnchor()); + + ge::TransOpDepthFusionPass pass; + ge::graphStatus status = pass.Run(graph); + EXPECT_EQ(ge::GRAPH_SUCCESS, status); + EXPECT_EQ(graph->GetDirectNode().size(), 4); +} + +TEST_F(UtestGraphPassesTransOpDepthFusionPass, test_offset_transpose) { + // Node4D(NCHW)->transpose(NCHW->NHWC)->transpose(NHWC->NCHW)->sinh + + ge::ComputeGraphPtr graph = std::make_shared("test"); + + // Node4D + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + // transpose1 + ge::NodePtr node_transpose_1 = NodeBuilder("node_transpose_1", TRANSPOSE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + vector order_list = {0, 2, 3, 1}; + const std::string ATTR_PERM = "perm"; + AttrUtils::SetListInt(node_transpose_1->GetOpDesc(), ATTR_PERM, order_list); + + // transpose2 + ge::NodePtr node_transpose_2 = NodeBuilder("node_transpose_2", TRANSPOSE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + vector order_list2 = {0, 3, 1, 2}; + AttrUtils::SetListInt(node_transpose_2->GetOpDesc(), ATTR_PERM, order_list2); + + // sinh + ge::NodePtr node_sinh = NodeBuilder("node_sinh", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_transpose_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_1->GetOutDataAnchor(0), node_transpose_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_2->GetOutDataAnchor(0), node_sinh->GetInDataAnchor(0)); + + ge::TransOpDepthFusionPass pass; + ge::graphStatus status = pass.Run(graph); + EXPECT_EQ(ge::GRAPH_SUCCESS, status); + EXPECT_EQ(graph->GetDirectNode().size(), 2); +} + +TEST_F(UtestGraphPassesTransOpDepthFusionPass, test_offset_transdata) { + // Node4D(NCHW)->transdata(NCHW->NC1HWC0)->transdata(NC1HWC0->NCHW)->sinh + + ge::ComputeGraphPtr graph = std::make_shared("test"); + + // Node4D + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 16, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + // trandata1 + ge::NodePtr node_transdata_1 = NodeBuilder("node_transdata_1", TRANSDATA) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 1, 2, 2, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + // transdata2 + ge::NodePtr node_transdata_2 = NodeBuilder("node_transdata_2", TRANSDATA) + .AddInputDesc({2, 1, 2, 2, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // sinh + ge::NodePtr node_sinh = NodeBuilder("node_sinh", SINH) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_transdata_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transdata_1->GetOutDataAnchor(0), node_transdata_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transdata_2->GetOutDataAnchor(0), node_sinh->GetInDataAnchor(0)); + + ge::TransOpDepthFusionPass pass; + ge::graphStatus status = pass.Run(graph); + EXPECT_EQ(ge::GRAPH_SUCCESS, status); + EXPECT_EQ(graph->GetDirectNode().size(), 2); +} + +TEST_F(UtestGraphPassesTransOpDepthFusionPass, test_fold_reshape) { + // Node4D(NCHW)->reshape->sinh + ge::ComputeGraphPtr graph = std::make_shared("test"); + + // Node4D + ge::NodePtr node_data = NodeBuilder("Data4D", DATA).AddOutputDesc({2, 16, 2, 2}, FORMAT_NCHW, DT_FLOAT).Build(graph); + + // Node1D + ge::NodePtr node_data2 = NodeBuilder("Data1D", CONSTANTOP).AddOutputDesc({4}, FORMAT_ND, DT_INT32).Build(graph); + + // reshape + ge::NodePtr node_reshape = NodeBuilder("node_reshape", RESHAPE) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddInputDesc({4}, FORMAT_ND, DT_INT32) + .AddOutputDesc({2, 16, 4, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + vector shape_v = {2, 16, 4, 1}; + AttrUtils::SetListInt(node_reshape->GetOpDesc(), RESHAPE_ATTR_SHAPE, shape_v); + AttrUtils::SetInt(node_reshape->GetOpDesc(), RESHAPE_ATTR_AXIS, 0); + AttrUtils::SetInt(node_reshape->GetOpDesc(), RESHAPE_ATTR_NUM_AXES, -1); + + // sinh + ge::NodePtr node_sinh = NodeBuilder("node_sinh", SINH) + .AddInputDesc({2, 16, 4, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 16, 4, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_reshape->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_data2->GetOutDataAnchor(0), node_reshape->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(node_reshape->GetOutDataAnchor(0), node_sinh->GetInDataAnchor(0)); + + ge::TransOpDepthFusionPass pass; + ge::graphStatus status = pass.Run(graph); + EXPECT_EQ(ge::GRAPH_SUCCESS, status); + EXPECT_EQ(graph->GetDirectNode().size(), 3); +} + +TEST_F(UtestGraphPassesTransOpDepthFusionPass, test_transop_with_multi_out_edge) { + /// input graph + /// + /// -->sih1 + /// / + /// -->transpose1 -->transpose3-->sinh2 + /// | \ / + /// | -->transpose2 + /// | \ + /// / -->cast3-->cast4-->sinh3 + /// / + /// / -->transpose4-->transpose5-->sinh4 + /// / / + /// Node4D-->Cast1-->Cast2-->Cast5 -->reshape2-->sinh5 + /// \ \ + /// \ -->sinh6 + /// \ + /// \ -->transpose6-->transpose7-->sinh9 + /// \ / + /// -->reshape-->cast6-->cast7-->sinh8 + /// \ + /// -->sinh7 + + /// after optimized graph + /// -->Cast4-->sinh3 + /// / + /// / -->transpose1-->sinh1 + /// / / + /// / /-->transpose3-->sinh2 + /// -->Cast1 + /// / \-->sinh7 + /// / \ + /// / -->sinh9 + /// Node4D + /// \ -->sinh4 + /// \ / + /// -->Cast5-->sinh5 + /// \ \ + /// \ -->sinh6 + /// \ + /// -->Cast7-->sinh8 + ge::ComputeGraphPtr graph = std::make_shared("test"); + + // Node4D + ge::NodePtr node_data = NodeBuilder("Node4D", DATA).AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16).Build(graph); + + // cast1 + ge::NodePtr node_cast_1 = NodeBuilder("node_cast_1", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT16); + AttrUtils::SetInt(node_cast_1->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT); + + // transpose1 + ge::NodePtr node_transpose_1 = NodeBuilder("node_transpose_1", TRANSPOSE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + vector order_list1 = {0, 2, 3, 1}; + const std::string ATTR_PERM = "perm"; + AttrUtils::SetListInt(node_transpose_1->GetOpDesc(), ATTR_PERM, order_list1); + + // transpose2 + ge::NodePtr node_transpose_2 = NodeBuilder("node_transpose_2", TRANSPOSE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + vector order_list2 = {0, 3, 1, 2}; + AttrUtils::SetListInt(node_transpose_2->GetOpDesc(), ATTR_PERM, order_list2); + + // sinh1 + ge::NodePtr node_sinh_1 = NodeBuilder("node_sinh_1", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + // transpose3 + ge::NodePtr node_transpose_3 = NodeBuilder("node_transpose_3", TRANSPOSE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + vector order_list3 = {0, 2, 3, 1}; + AttrUtils::SetListInt(node_transpose_3->GetOpDesc(), ATTR_PERM, order_list3); + + // sinh2 + ge::NodePtr node_sinh_2 = NodeBuilder("node_sinh_2", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + // cast3 + ge::NodePtr node_cast_3 = NodeBuilder("node_cast_3", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + AttrUtils::SetInt(node_cast_3->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT); + AttrUtils::SetInt(node_cast_3->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT16); + + // cast4 + ge::NodePtr node_cast_4 = NodeBuilder("node_cast_4", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + AttrUtils::SetInt(node_cast_4->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT16); + AttrUtils::SetInt(node_cast_4->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT); + + // sinh3 + ge::NodePtr node_sinh_3 = NodeBuilder("node_sinh_3", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // cast2 + ge::NodePtr node_cast_2 = NodeBuilder("node_cast_2", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT); + AttrUtils::SetInt(node_cast_2->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT16); + + // cast5 + ge::NodePtr node_cast_5 = NodeBuilder("node_cast_5", CAST) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT16) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + AttrUtils::SetInt(node_cast_5->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT16); + AttrUtils::SetInt(node_cast_5->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT); + + // transpose4 + ge::NodePtr node_transpose_4 = NodeBuilder("node_transpose_4", TRANSPOSE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + vector order_list4 = {0, 2, 3, 1}; + AttrUtils::SetListInt(node_transpose_4->GetOpDesc(), ATTR_PERM, order_list4); + + // transpose5 + ge::NodePtr node_transpose_5 = NodeBuilder("node_transpose_5", TRANSPOSE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + vector order_list5 = {0, 3, 1, 2}; + AttrUtils::SetListInt(node_transpose_5->GetOpDesc(), ATTR_PERM, order_list5); + + // sinh4 + ge::NodePtr node_sinh_4 = NodeBuilder("node_sinh_4", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // reshape2 + ge::NodePtr node_reshape_2 = NodeBuilder("node_reshape_2", RESHAPE) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 4, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + vector shape_v2 = {2, 2, 4, 1}; + AttrUtils::SetListInt(node_reshape_2->GetOpDesc(), RESHAPE_ATTR_SHAPE, shape_v2); + AttrUtils::SetInt(node_reshape_2->GetOpDesc(), RESHAPE_ATTR_AXIS, 0); + AttrUtils::SetInt(node_reshape_2->GetOpDesc(), RESHAPE_ATTR_NUM_AXES, -1); + + // sinh5 + ge::NodePtr node_sinh_5 = NodeBuilder("node_sinh_5", SINH) + .AddInputDesc({2, 2, 4, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 4, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // sinh6 + ge::NodePtr node_sinh_6 = NodeBuilder("node_sinh_6", SINH) + .AddInputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 2, 2, 2}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // reshape1 + ge::NodePtr node_reshape_1 = NodeBuilder("node_reshape_1", RESHAPE) + .AddInputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + vector shape_v1 = {2, 8, 1, 1}; + AttrUtils::SetListInt(node_reshape_1->GetOpDesc(), RESHAPE_ATTR_SHAPE, shape_v1); + AttrUtils::SetInt(node_reshape_1->GetOpDesc(), RESHAPE_ATTR_AXIS, 0); + AttrUtils::SetInt(node_reshape_1->GetOpDesc(), RESHAPE_ATTR_NUM_AXES, -1); + + // sinh7 + ge::NodePtr node_sinh_7 = NodeBuilder("node_sinh_7", SINH) + .AddInputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // cast6 + ge::NodePtr node_cast_6 = NodeBuilder("node_cast_6", CAST) + .AddInputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT16) + .Build(graph); + AttrUtils::SetInt(node_cast_6->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT); + AttrUtils::SetInt(node_cast_6->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT16); + + // cast7 + ge::NodePtr node_cast_7 = NodeBuilder("node_cast_7", CAST) + .AddInputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT16) + .AddOutputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + AttrUtils::SetInt(node_cast_7->GetOpDesc(), CAST_ATTR_SRCT, DT_FLOAT16); + AttrUtils::SetInt(node_cast_7->GetOpDesc(), CAST_ATTR_DSTT, DT_FLOAT); + + // sinh8 + ge::NodePtr node_sinh_8 = NodeBuilder("node_sinh_8", SINH) + .AddInputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // transpose6 + ge::NodePtr node_transpose_6 = NodeBuilder("node_transpose_6", TRANSPOSE) + .AddInputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 8, 1, 1}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + vector order_list6 = {0, 2, 3, 1}; + AttrUtils::SetListInt(node_transpose_6->GetOpDesc(), ATTR_PERM, order_list6); + + // transpose7 + ge::NodePtr node_transpose_7 = NodeBuilder("node_transpose_7", TRANSPOSE) + .AddInputDesc({2, 8, 1, 1}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + vector order_list7 = {0, 3, 1, 2}; + AttrUtils::SetListInt(node_transpose_7->GetOpDesc(), ATTR_PERM, order_list7); + + // sinh9 + ge::NodePtr node_sinh_9 = NodeBuilder("node_sinh_9", SINH) + .AddInputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({2, 8, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_data->GetOutDataAnchor(0), node_cast_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_1->GetOutDataAnchor(0), node_transpose_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_1->GetOutDataAnchor(0), node_transpose_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_1->GetOutDataAnchor(0), node_sinh_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_2->GetOutDataAnchor(0), node_cast_3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_3->GetOutDataAnchor(0), node_cast_4->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_4->GetOutDataAnchor(0), node_sinh_3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_2->GetOutDataAnchor(0), node_transpose_3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_3->GetOutDataAnchor(0), node_sinh_2->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_cast_1->GetOutDataAnchor(0), node_cast_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_2->GetOutDataAnchor(0), node_cast_5->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_5->GetOutDataAnchor(0), node_transpose_4->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_4->GetOutDataAnchor(0), node_transpose_5->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_5->GetOutDataAnchor(0), node_sinh_4->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_5->GetOutDataAnchor(0), node_reshape_2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_reshape_2->GetOutDataAnchor(0), node_sinh_5->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_5->GetOutDataAnchor(0), node_sinh_6->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_cast_1->GetOutDataAnchor(0), node_reshape_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_reshape_1->GetOutDataAnchor(0), node_sinh_7->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_reshape_1->GetOutDataAnchor(0), node_cast_6->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_6->GetOutDataAnchor(0), node_cast_7->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_cast_7->GetOutDataAnchor(0), node_sinh_8->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_reshape_1->GetOutDataAnchor(0), node_transpose_6->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_6->GetOutDataAnchor(0), node_transpose_7->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_transpose_7->GetOutDataAnchor(0), node_sinh_9->GetInDataAnchor(0)); + + ge::TransOpDepthFusionPass pass; + ge::graphStatus status = pass.Run(graph); + EXPECT_EQ(ge::GRAPH_SUCCESS, status); + EXPECT_EQ(graph->GetDirectNode().size(), 16); + EXPECT_EQ(node_data->GetOutDataNodes().size(), 4); + EXPECT_EQ(node_cast_1->GetOutDataNodes().size(), 4); + EXPECT_EQ(node_cast_4->GetOutDataNodes().size(), 1); + EXPECT_EQ(node_cast_5->GetOutDataNodes().size(), 3); + EXPECT_EQ(node_cast_7->GetOutDataNodes().size(), 1); + EXPECT_EQ(node_transpose_1->GetOutDataNodes().size(), 1); + EXPECT_EQ(node_transpose_3->GetOutDataNodes().size(), 1); +} diff --git a/tests/ut/ge/graph/passes/transop_nearby_allreduce_fusion_pass_unittest.cc b/tests/ut/ge/graph/passes/transop_nearby_allreduce_fusion_pass_unittest.cc new file mode 100644 index 00000000..0e144432 --- /dev/null +++ b/tests/ut/ge/graph/passes/transop_nearby_allreduce_fusion_pass_unittest.cc @@ -0,0 +1,372 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/transop_nearby_allreduce_fusion_pass.h" + +#include +#include + +#include "common/ge_inner_error_codes.h" +#include "graph/passes/addn_pass.h" + +namespace ge { + +namespace { + +class NodeBuilder { + public: + NodeBuilder(const string &name, const string &type) { op_desc_ = std::make_shared(name, type); } + NodeBuilder &AddInputDesc(std::initializer_list shape, Format format, DataType data_type, size_t count = 1) { + GeTensorDesc tensor_desc; + tensor_desc.SetShape(GeShape(vector(shape))); + tensor_desc.SetFormat(format); + tensor_desc.SetDataType(data_type); + for (int i = 0; i < count; i++) { + op_desc_->AddInputDesc(tensor_desc); + } + return *this; + } + NodeBuilder &AddOutputDesc(std::initializer_list shape, Format format, DataType data_type, + size_t count = 1) { + GeTensorDesc tensor_desc; + tensor_desc.SetShape(GeShape(vector(shape))); + tensor_desc.SetFormat(format); + tensor_desc.SetDataType(data_type); + for (int i = 0; i < count; i++) { + op_desc_->AddOutputDesc(tensor_desc); + } + return *this; + } + + NodePtr Build(const ComputeGraphPtr &graph) { + NodePtr node = graph->AddNode(op_desc_); + return node; + } + + private: + OpDescPtr op_desc_; +}; + +ComputeGraphPtr GetGraph1() { return nullptr; } + +ComputeGraphPtr GetGraph2() { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr node = nullptr; + graph->AddNode(node); + return graph; +} + +ComputeGraphPtr GetGraph3() { + // HcomAllReduce + ComputeGraphPtr graph = std::make_shared("test"); + NodeBuilder("HcomAllreduce3", HCOMALLREDUCE).Build(graph); + return graph; +} + +ComputeGraphPtr GetGraph4() { + /// TransData + /// | + /// HcomAllReduce + /// | + /// TransData + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr transdata1 = NodeBuilder("TransData1", TRANSDATA) + .AddInputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .AddOutputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + NodePtr allreduce = NodeBuilder("allreduce45", HCOMALLREDUCE) + .AddInputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + NodePtr transdata2 = NodeBuilder("TransData2", TRANSDATA) + .AddInputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + GraphUtils::AddEdge(transdata1->GetOutDataAnchor(0), allreduce->GetInDataAnchor(0)); + GraphUtils::AddEdge(allreduce->GetOutDataAnchor(0), transdata2->GetInDataAnchor(0)); + return graph; +} + +ComputeGraphPtr GetGraph5() { + /// relu + /// | + /// TransData + /// | + /// HcomAllReduce + /// | + /// TransData + /// | + /// relu + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr relu1 = NodeBuilder("Relu1", RELU) + .AddInputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .AddOutputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + NodePtr transdata1 = NodeBuilder("TransData1", TRANSDATA) + .AddInputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .AddOutputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + NodePtr allreduce = NodeBuilder("allreduce45", HCOMALLREDUCE) + .AddInputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + NodePtr transdata2 = NodeBuilder("TransData2", TRANSDATA) + .AddInputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + NodePtr relu2 = NodeBuilder("Relu2", RELU) + .AddInputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .AddOutputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + GraphUtils::AddEdge(relu1->GetOutDataAnchor(0), transdata1->GetInDataAnchor(0)); + GraphUtils::AddEdge(transdata1->GetOutDataAnchor(0), allreduce->GetInDataAnchor(0)); + GraphUtils::AddEdge(allreduce->GetOutDataAnchor(0), transdata2->GetInDataAnchor(0)); + GraphUtils::AddEdge(transdata2->GetOutDataAnchor(0), relu2->GetInDataAnchor(0)); + return graph; +} + +ComputeGraphPtr GetGraph6() { + /// relu + /// | + /// TransData + /// | + /// HcomAllReduce + /// | + /// TransData + /// | + /// relu + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr relu1 = NodeBuilder("Relu1", RELU) + .AddInputDesc({1, 1, 1, 64}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({1, 1, 1, 64}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + NodePtr transdata1 = NodeBuilder("TransData1", TRANSDATA) + .AddInputDesc({1, 1, 1, 64}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + NodePtr allreduce = NodeBuilder("allreduce45", HCOMALLREDUCE) + .AddInputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + NodePtr transdata2 = NodeBuilder("TransData2", TRANSDATA) + .AddInputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + NodePtr relu2 = NodeBuilder("Relu2", RELU) + .AddInputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .AddOutputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + GraphUtils::AddEdge(relu1->GetOutDataAnchor(0), transdata1->GetInDataAnchor(0)); + GraphUtils::AddEdge(transdata1->GetOutDataAnchor(0), allreduce->GetInDataAnchor(0)); + GraphUtils::AddEdge(allreduce->GetOutDataAnchor(0), transdata2->GetInDataAnchor(0)); + GraphUtils::AddEdge(transdata2->GetOutDataAnchor(0), relu2->GetInDataAnchor(0)); + return graph; +} + +ComputeGraphPtr GetGraph7(size_t symmetric_transdata_num, size_t asymmetric_transdata_num, size_t paired_others_num) { + /// TransData TransData ... MatMul ... + /// \ | / / / + /// HcomAllReduce + /// / | \ \ \ + /// TransData TransData ... RealDiv ... + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr allreduce = + NodeBuilder("allreduce6", HCOMALLREDUCE) + .AddInputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT, symmetric_transdata_num + asymmetric_transdata_num) + .AddInputDesc({5, 64}, FORMAT_NCHW, DT_FLOAT, paired_others_num) + .AddOutputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT, symmetric_transdata_num + asymmetric_transdata_num) + .AddOutputDesc({5, 64}, FORMAT_NCHW, DT_FLOAT, paired_others_num) + .Build(graph); + + for (size_t i = 0; i < symmetric_transdata_num; i++) { + NodePtr transdata1 = NodeBuilder("TransData1", TRANSDATA) + .AddInputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .AddOutputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + NodePtr transdata2 = NodeBuilder("TransData2", TRANSDATA) + .AddInputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + GraphUtils::AddEdge(transdata1->GetOutDataAnchor(0), allreduce->GetInDataAnchor(i)); + GraphUtils::AddEdge(allreduce->GetOutDataAnchor(i), transdata2->GetInDataAnchor(0)); + } + + for (size_t i = 0; i < asymmetric_transdata_num; i++) { + NodePtr transdata1 = NodeBuilder("TransData1", TRANSDATA) + .AddInputDesc({1, 1, 1, 64}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + NodePtr transdata2 = NodeBuilder("TransData2", TRANSDATA) + .AddInputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + GraphUtils::AddEdge(transdata1->GetOutDataAnchor(0), allreduce->GetInDataAnchor(i + symmetric_transdata_num)); + GraphUtils::AddEdge(allreduce->GetOutDataAnchor(i + symmetric_transdata_num), transdata2->GetInDataAnchor(0)); + } + + for (size_t i = 0; i < paired_others_num; i++) { + NodePtr matmul = NodeBuilder("matmul", MATMUL) + .AddInputDesc({32, 5}, FORMAT_NCHW, DT_FLOAT) + .AddInputDesc({32, 64}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({5, 64}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + NodePtr realDiv = NodeBuilder("realDiv", REALDIV) + .AddInputDesc({5, 64}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({5, 64}, FORMAT_NCHW, DT_FLOAT) + .Build(graph); + GraphUtils::AddEdge(matmul->GetOutDataAnchor(0), + allreduce->GetInDataAnchor(i + symmetric_transdata_num + asymmetric_transdata_num)); + GraphUtils::AddEdge(allreduce->GetOutDataAnchor(i + symmetric_transdata_num + asymmetric_transdata_num), + realDiv->GetInDataAnchor(0)); + } + return graph; +} + +ComputeGraphPtr GetGraph8() { + /// TransData + /// | + /// HcomAllReduce + /// | + /// TransData + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr allreduce = + NodeBuilder("allreduce45", HCOMALLREDUCE).AddOutputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT).Build(graph); + NodePtr transdata2 = NodeBuilder("TransData2", TRANSDATA) + .AddInputDesc({1, 64, 1, 1}, FORMAT_NCHW, DT_FLOAT) + .AddOutputDesc({1, 4, 1, 1, 16}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + GraphUtils::AddEdge(allreduce->GetOutDataAnchor(0), transdata2->GetInDataAnchor(0)); + return graph; +} + +TEST(UtestTransopNearbyAllreduceFusionPass, test1_null_graph) { + ComputeGraphPtr graph = GetGraph1(); + GEPass ge_pass(graph); + TransOpNearbyAllreduceFusionPass transop_nearby_allreduce_fusion_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("TransOpNearbyAllreduceFusionPass", &transop_nearby_allreduce_fusion_pass); + EXPECT_EQ(ge_pass.Run(names_to_pass), INTERNAL_ERROR); +} + +TEST(UtestTransopNearbyAllreduceFusionPass, test2_null_node) { + ComputeGraphPtr graph = GetGraph2(); + GEPass ge_pass(graph); + TransOpNearbyAllreduceFusionPass transop_nearby_allreduce_fusion_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("TransOpNearbyAllreduceFusionPass", &transop_nearby_allreduce_fusion_pass); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); +} + +TEST(UtestTransopNearbyAllreduceFusionPass, test3_OnlyAllreduce) { + ComputeGraphPtr graph = GetGraph3(); + GEPass ge_pass(graph); + TransOpNearbyAllreduceFusionPass transop_nearby_allreduce_fusion_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("TransOpNearbyAllreduceFusionPass", &transop_nearby_allreduce_fusion_pass); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), 1); +} + +TEST(UtestTransopNearbyAllreduceFusionPass, test4_all_reduce_with_trans_data) { + /// TransData + /// | + /// HcomAllReduce + /// | + /// TransData + ComputeGraphPtr graph = GetGraph4(); + GEPass ge_pass(graph); + TransOpNearbyAllreduceFusionPass transop_nearby_allreduce_fusion_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("TransOpNearbyAllreduceFusionPass", &transop_nearby_allreduce_fusion_pass); + Status ret = ge_pass.Run(names_to_pass); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), 1); +} + +TEST(UtestTransopNearbyAllreduceFusionPass, test5_all_reduce_with_asymmetric_trans_data_and_relu) { + /// relu + /// | + /// TransData + /// | + /// HcomAllReduce + /// | + /// TransData + /// | + /// relu + ComputeGraphPtr graph = GetGraph5(); + GEPass ge_pass(graph); + TransOpNearbyAllreduceFusionPass transop_nearby_allreduce_fusion_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("TransOpNearbyAllreduceFusionPass", &transop_nearby_allreduce_fusion_pass); + Status ret = ge_pass.Run(names_to_pass); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), 3); +} + +TEST(UtestTransopNearbyAllreduceFusionPass, test6_all_reduce_with_asymmetric_trans_data_and_relu) { + /// relu + /// | + /// TransData + /// | + /// HcomAllReduce + /// | + /// TransData + /// | + /// relu + ComputeGraphPtr graph = GetGraph6(); + GEPass ge_pass(graph); + TransOpNearbyAllreduceFusionPass transop_nearby_allreduce_fusion_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("TransOpNearbyAllreduceFusionPass", &transop_nearby_allreduce_fusion_pass); + Status ret = ge_pass.Run(names_to_pass); + EXPECT_EQ(ret, SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), 5); +} + +TEST(UtestTransopNearbyAllreduceFusionPass, test7_all_reduce_with_multiple_trans_datas_and_other_ops) { + /// TransData TransData ... MatMul ... + /// \ | / / / + /// HcomAllReduce + /// / | \ \ \ + /// TransData TransData ... RealDiv ... + size_t symmetric_transdata_num = 20; + size_t asymmetric_transdata_num = 20; + size_t paired_others_num = 20; + ComputeGraphPtr graph = GetGraph7(symmetric_transdata_num, asymmetric_transdata_num, paired_others_num); + GEPass ge_pass(graph); + TransOpNearbyAllreduceFusionPass transop_nearby_allreduce_fusion_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("TransOpNearbyAllreduceFusionPass", &transop_nearby_allreduce_fusion_pass); + EXPECT_EQ(ge_pass.Run(names_to_pass), SUCCESS); + EXPECT_EQ(graph->GetAllNodes().size(), (asymmetric_transdata_num + paired_others_num) * 2 + 1); +} + +TEST(UtestTransopNearbyAllreduceFusionPass, test8_in_and_out_data_anchor_are_not_equal) { + /// HcomAllReduce + /// | + /// TransData + ComputeGraphPtr graph = GetGraph8(); + GEPass ge_pass(graph); + graph->GetAllNodes().at(0)->SetOwnerComputeGraph(nullptr); + TransOpNearbyAllreduceFusionPass transop_nearby_allreduce_fusion_pass; + NamesToPass names_to_pass; + names_to_pass.emplace_back("TransOpNearbyAllreduceFusionPass", &transop_nearby_allreduce_fusion_pass); + Status ret = ge_pass.Run(names_to_pass); + EXPECT_EQ(ret, INTERNAL_ERROR); +} + +} // namespace +} // namespace ge diff --git a/tests/ut/ge/graph/passes/unused_and_isolated_op_remove_pass_unittest.cc b/tests/ut/ge/graph/passes/unused_and_isolated_op_remove_pass_unittest.cc new file mode 100644 index 00000000..cb174ebd --- /dev/null +++ b/tests/ut/ge/graph/passes/unused_and_isolated_op_remove_pass_unittest.cc @@ -0,0 +1,180 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/unused_op_remove_pass.h" + +#include +#include "graph/passes/isolated_op_remove_pass.h" +#include "pass_manager.h" + +using namespace ge; + +class UtestGraphPassesUnusedAndIsolatedOpRemovePass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 1, + int32_t out_anchors_num = 1) { + GeTensorDesc tensor_desc; + OpDescPtr op_desc = make_shared(name, type); + for (int32_t i = 0; i < in_anchors_num; i++) { + op_desc->AddInputDesc(tensor_desc); + } + for (int32_t i = 0; i < out_anchors_num; i++) { + op_desc->AddOutputDesc(tensor_desc); + } + + NodePtr node = graph->AddNode(op_desc); + return node; + } +}; + +TEST_F(UtestGraphPassesUnusedAndIsolatedOpRemovePass, transpose_and_reshape) { + ComputeGraphPtr graph = std::make_shared("test"); + + NodePtr data_node = AddNode(graph, "DATA", DATA); + NodePtr transpose_node = AddNode(graph, "transpose1", PERMUTE); + NodePtr reshape_node = AddNode(graph, "reshape1", RESHAPE); + + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), transpose_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(transpose_node->GetOutDataAnchor(0), reshape_node->GetInDataAnchor(0)); + + ge::UnusedOpRemovePass unused_pass(FMK_TYPE_T); + ge::IsolatedOpRemovePass isolate_pass; + vector passes = {&unused_pass, &isolate_pass}; + Status status = PassManager::Run(graph, passes); + EXPECT_EQ(SUCCESS, status); + NodePtr found_node = graph->FindNode("transpose1"); + EXPECT_EQ(transpose_node, found_node); +} + +TEST_F(UtestGraphPassesUnusedAndIsolatedOpRemovePass, transpose_and_squeeze) { + ComputeGraphPtr graph = std::make_shared("test"); + + NodePtr data_node = AddNode(graph, "DATA", DATA); + NodePtr transpose_node = AddNode(graph, "transpose1", PERMUTE); + NodePtr squeeze_node = AddNode(graph, "squeeze1", SQUEEZE); + + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), transpose_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(transpose_node->GetOutDataAnchor(0), squeeze_node->GetInDataAnchor(0)); + + ge::UnusedOpRemovePass unused_pass(FMK_TYPE_T); + ge::IsolatedOpRemovePass isolate_pass; + vector passes = {&unused_pass, &isolate_pass}; + Status status = PassManager::Run(graph, passes); + EXPECT_EQ(SUCCESS, status); + NodePtr found_node = graph->FindNode("transpose1"); + EXPECT_EQ(transpose_node, found_node); +} + +TEST_F(UtestGraphPassesUnusedAndIsolatedOpRemovePass, transpose_and_conv) { + ComputeGraphPtr graph = std::make_shared("test"); + + NodePtr data_node = AddNode(graph, "DATA", DATA); + + NodePtr transpose_node = AddNode(graph, "transpose1", PERMUTE); + vector order_list = {0, 2, 3, 1}; + AttrUtils::SetListInt(transpose_node->GetOpDesc(), PERMUTE_ATTR_ORDER, order_list); + AttrUtils::SetInt(transpose_node->GetOpDesc(), ATTR_NAME_FORMAT, (int64_t)DT_FLOAT); + + NodePtr conv_node = AddNode(graph, "conv1", CONVOLUTION); + + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), transpose_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(transpose_node->GetOutDataAnchor(0), conv_node->GetInDataAnchor(0)); + + NodePtr conv2_node = AddNode(graph, "conv2", CONVOLUTION); + GraphUtils::AddEdge(conv_node->GetOutDataAnchor(0), conv2_node->GetInDataAnchor(0)); + + ge::UnusedOpRemovePass unused_pass(FMK_TYPE_T); + ge::IsolatedOpRemovePass isolate_pass; + vector passes = {&unused_pass, &isolate_pass}; + Status status = PassManager::Run(graph, passes); + EXPECT_EQ(SUCCESS, status); + NodePtr found_node0 = graph->FindNode("transpose1"); + // EXPECT_EQ(nullptr, found_node0); + NodePtr found_node = graph->FindNode("conv1"); + EXPECT_EQ(conv_node, found_node); +} + +TEST_F(UtestGraphPassesUnusedAndIsolatedOpRemovePass, transpose_and_conv3) { + ComputeGraphPtr graph = std::make_shared("test"); + + NodePtr data_node = AddNode(graph, "DATA", DATA); + + NodePtr transpose_node = AddNode(graph, "transpose1", PERMUTE); + vector order_list = {0, 1, 3, 2}; + AttrUtils::SetListInt(transpose_node->GetOpDesc(), PERMUTE_ATTR_ORDER, order_list); + AttrUtils::SetInt(transpose_node->GetOpDesc(), ATTR_NAME_FORMAT, (int64_t)DT_FLOAT); + + NodePtr conv_node = AddNode(graph, "conv1", CONVOLUTION); + + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), transpose_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(transpose_node->GetOutDataAnchor(0), conv_node->GetInDataAnchor(0)); + + NodePtr conv2_node = AddNode(graph, "conv2", CONVOLUTION); + GraphUtils::AddEdge(conv_node->GetOutDataAnchor(0), conv2_node->GetInDataAnchor(0)); + + ge::UnusedOpRemovePass unused_pass(FMK_TYPE_T); + ge::IsolatedOpRemovePass isolate_pass; + vector passes = {&unused_pass, &isolate_pass}; + Status status = PassManager::Run(graph, passes); + EXPECT_EQ(SUCCESS, status); + NodePtr found_node0 = graph->FindNode("transpose1"); + EXPECT_EQ(transpose_node, found_node0); + NodePtr found_node = graph->FindNode("conv1"); + EXPECT_EQ(conv_node, found_node); +} + +TEST_F(UtestGraphPassesUnusedAndIsolatedOpRemovePass, cast_and_cast) { + ComputeGraphPtr graph = std::make_shared("test"); + + NodePtr data_node = AddNode(graph, "DATA", DATA); + NodePtr conv3_node = AddNode(graph, "cast3", CAST); + NodePtr transpose_node = AddNode(graph, "cast1", CAST); + NodePtr transpose_node_1 = AddNode(graph, "cast2", CAST); + + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), conv3_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(conv3_node->GetOutDataAnchor(0), transpose_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(transpose_node->GetOutDataAnchor(0), transpose_node_1->GetInDataAnchor(0)); + + ge::UnusedOpRemovePass unused_pass(FMK_TYPE_T); + ge::IsolatedOpRemovePass isolate_pass; + vector passes = {&unused_pass, &isolate_pass}; + Status status = PassManager::Run(graph, passes); + EXPECT_EQ(SUCCESS, status); +} + +TEST_F(UtestGraphPassesUnusedAndIsolatedOpRemovePass, remove_parent_node) { + ComputeGraphPtr graph = std::make_shared("test"); + vector node_vec; + + NodePtr data_node = AddNode(graph, "DATA", DATA); + NodePtr conv3_node = AddNode(graph, "cast3", CAST); + NodePtr transpose_node = AddNode(graph, "cast1", CAST); + NodePtr transpose_node_1 = AddNode(graph, "cast2", CAST); + + GraphUtils::AddEdge(data_node->GetOutDataAnchor(0), conv3_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(conv3_node->GetOutDataAnchor(0), transpose_node->GetInDataAnchor(0)); + GraphUtils::AddEdge(transpose_node->GetOutDataAnchor(0), transpose_node_1->GetInDataAnchor(0)); + + ge::UnusedOpRemovePass unused_pass(FMK_TYPE_T); + ge::IsolatedOpRemovePass isolate_pass; + vector passes = {&unused_pass, &isolate_pass}; + Status status = PassManager::Run(graph, passes); + EXPECT_EQ(SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/unused_const_pass_unittest.cc b/tests/ut/ge/graph/passes/unused_const_pass_unittest.cc new file mode 100644 index 00000000..1d6636c3 --- /dev/null +++ b/tests/ut/ge/graph/passes/unused_const_pass_unittest.cc @@ -0,0 +1,81 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/unused_const_pass.h" + +#include "common/ge_inner_error_codes.h" +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/anchor.h" +#include "graph/attr_value.h" +#include "graph/compute_graph.h" +#include "graph/node.h" +#include "graph/op_desc.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "inc/pass_manager.h" +#undef protected +#undef private + +using namespace testing; +namespace ge { +class UtestGraphPassesUnusedConstPass : public Test { + protected: + NodePtr AddNode(ComputeGraphPtr graph, const string &name, const string &type, int32_t in_anchors_num = 1, + int32_t out_anchors_num = 1) { + GeTensorDesc tensor_desc; + OpDescPtr op_desc = make_shared(name, type); + for (int32_t i = 0; i < in_anchors_num; i++) { + op_desc->AddInputDesc(tensor_desc); + } + for (int32_t i = 0; i < out_anchors_num; i++) { + op_desc->AddOutputDesc(tensor_desc); + } + + NodePtr node = graph->AddNode(op_desc); + return node; + } +}; + +TEST_F(UtestGraphPassesUnusedConstPass, unused_const_remove_succ) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr unused_const_node = AddNode(graph, "unusedConst", UNUSEDCONST); + NodePtr reduce_min_node = AddNode(graph, "reduceMin", REDUCEMIN); + + GraphUtils::AddEdge(unused_const_node->GetOutDataAnchor(0), reduce_min_node->GetInDataAnchor(0)); + + UnusedConstPass unused_const_pass; + Status status = unused_const_pass.Run(unused_const_node); + EXPECT_EQ(SUCCESS, status); + NodePtr found_node = graph->FindNode("unusedConst"); + EXPECT_EQ(nullptr, found_node); +} + +TEST_F(UtestGraphPassesUnusedConstPass, unused_const_remove_failed) { + ComputeGraphPtr graph = std::make_shared("test"); + NodePtr unused_const_node = shared_ptr(new (std::nothrow) Node(nullptr, graph)); + + UnusedConstPass unused_const_pass; + Status status = unused_const_pass.Run(unused_const_node); + EXPECT_EQ(ge::PARAM_INVALID, status); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/update_net_output_pass_unittest.cc b/tests/ut/ge/graph/passes/update_net_output_pass_unittest.cc new file mode 100644 index 00000000..97498616 --- /dev/null +++ b/tests/ut/ge/graph/passes/update_net_output_pass_unittest.cc @@ -0,0 +1,96 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#define protected public +#define private public +#include "graph/passes/update_net_output_pass.h" + +#include "common/op/ge_op_utils.h" +#include "common/types.h" +#include "graph/anchor.h" +#include "graph/attr_value.h" +#include "graph/compute_graph.h" +#include "graph/op_desc.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/tensor_utils.h" +#include "graph_builder_utils.h" +#include "omg/omg_inner_types.h" +#undef protected +#undef private + +using namespace testing; + +namespace ge { +class UtestNodePassesUpdateNetoutputPass : public Test { + protected: + UtestNodePassesUpdateNetoutputPass() = default; +}; + +namespace { +/// net_output1 +/// | +/// addn +/// / \ +/// / \ +/// const1 const2 +ComputeGraphPtr BuildGraph1() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", CONSTANT, 0, 1); + auto const2 = builder.AddNode("const2", CONSTANT, 0, 1); + auto addn1 = builder.AddNode("addn1", ADDN, 2, 1); + auto net_output1 = builder.AddNode("net_output", NETOUTPUT, 1, 1); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(const2, 0, addn1, 1); + builder.AddDataEdge(addn1, 0, net_output1, 0); + return builder.GetGraph(); +} +} // namespace + +TEST_F(UtestNodePassesUpdateNetoutputPass, update_netoutput_succ) { + auto graph = BuildGraph1(); + auto net_output = graph->FindNode("net_output"); + EXPECT_NE(net_output, nullptr); + + auto tensor = net_output->GetOpDesc()->GetOutputDesc(0); + EXPECT_EQ(tensor.GetDataType(), DT_FLOAT); + EXPECT_EQ(tensor.GetFormat(), FORMAT_NCHW); + + ge::NodePtr node = nullptr; + ReUpdateNetOutputPass re_update_net_output_pass; + Status status = re_update_net_output_pass.Run(node); + EXPECT_EQ(FAILED, status); + + status = re_update_net_output_pass.Run(net_output); + EXPECT_EQ(SUCCESS, status); + + domi::GetContext().output_type = "FP17"; + status = re_update_net_output_pass.Run(net_output); + EXPECT_EQ(SUCCESS, status); + + domi::GetContext().output_type = "FP16"; + status = re_update_net_output_pass.Run(net_output); + EXPECT_EQ(SUCCESS, status); + auto in_desc = net_output->GetOpDesc()->GetInputDesc(0); + EXPECT_EQ(in_desc.GetDataType(), DT_FLOAT16); + auto out_desc = net_output->GetOpDesc()->GetOutputDesc(0); + EXPECT_EQ(out_desc.GetDataType(), DT_FLOAT16); +} +} // namespace ge diff --git a/tests/ut/ge/graph/passes/variable_op_pass_unittest.cc b/tests/ut/ge/graph/passes/variable_op_pass_unittest.cc new file mode 100644 index 00000000..77428549 --- /dev/null +++ b/tests/ut/ge/graph/passes/variable_op_pass_unittest.cc @@ -0,0 +1,1158 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include + +#include "common/types.h" + +#define protected public +#define private public +#include "graph/passes/variable_op_pass.h" + +#include "common/op/ge_op_utils.h" +#include "graph/utils/op_desc_utils.h" +#include "graph/utils/attr_utils.h" +#include "graph/utils/graph_utils.h" +#include "graph/op_desc.h" +#include "graph/types.h" +#include "graph/manager/graph_context.h" +#include "graph/optimize/graph_optimize.h" +#include "graph/manager/util/variable_accelerate_ctrl.h" +#include "graph/manager/graph_mem_allocator.h" +#include "graph/manager/graph_var_manager.h" +#include "graph_builder_utils.h" +#include "cce/dnn_struct_base.hpp" +#include "common/formats/format_transfers/format_transfer.h" +#include "common/formats/format_transfers/format_transfer_nchw_nc1hwc0.h" +#include "common/formats/format_transfers/format_transfer_nhwc_nc1hwc0.h" +#include "common/formats/format_transfers/datatype_transfer.h" +#undef private +#undef protected + +using namespace std; +using namespace ge; +using namespace cce; + +class UtestVariableOpPassUnit : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + // AUTO GEN PLEASE DO NOT MODIFY IT +}; +namespace { + +/// c +/// var1ref1 --> netoutput1 +/// \ / +/// transdata2 +/// | +/// assign1 +/// / \ +/// transdata1 | +/// | | +/// var1 const1 +ComputeGraphPtr BuildGraph1() { + auto builder = ut::GraphBuilder("g1"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto const1 = + builder.AddNode("const1", "Const", 0, 1, FORMAT_NC1HWC0, DT_FLOAT, std::vector({1, 1, 224, 224, 16})); + auto transdata1 = builder.AddNode("transdata1", "TransData", 1, 1, FORMAT_NC1HWC0, DT_FLOAT, + std::vector({1, 1, 224, 224, 16})); + transdata1->GetOpDesc()->MutableInputDesc(0)->SetFormat(FORMAT_NCHW); + transdata1->GetOpDesc()->MutableInputDesc(0)->SetShape(GeShape(std::vector({1, 3, 224, 224}))); + auto assign1 = + builder.AddNode("assign1", "Assign", 2, 1, FORMAT_NC1HWC0, DT_FLOAT, std::vector({1, 1, 224, 224, 16})); + auto transdata2 = builder.AddNode("transdata2", "TransData", 1, 1, FORMAT_NC1HWC0, DT_FLOAT, + std::vector({1, 1, 224, 224, 16})); + transdata2->GetOpDesc()->MutableOutputDesc(0)->SetFormat(FORMAT_NCHW); + transdata2->GetOpDesc()->MutableOutputDesc(0)->SetShape(GeShape(std::vector({1, 3, 224, 224}))); + auto var1ref1 = builder.AddNode("var1ref1", "Variable", 1, 0); + AttrUtils::SetStr(var1ref1->GetOpDesc(), REF_VAR_SRC_VAR_NAME, "var1"); + auto netoutput1 = builder.AddNode("netoutput1", "Netoutput", 2, 0); + + builder.AddDataEdge(var1, 0, transdata1, 0); + builder.AddDataEdge(const1, 0, assign1, 1); + builder.AddDataEdge(transdata1, 0, assign1, 0); + builder.AddDataEdge(assign1, 0, transdata2, 0); + builder.AddDataEdge(transdata2, 0, var1ref1, 0); + builder.AddDataEdge(transdata2, 0, netoutput1, 0); + builder.AddControlEdge(var1ref1, netoutput1); + + return builder.GetGraph(); +} + +/// conv1 +/// | +/// reshape1 +/// | +/// var1 +ComputeGraphPtr BuildGraph2() { + auto builder = ut::GraphBuilder("g1"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1, FORMAT_ND, DT_FLOAT, std::vector({8 * 8 * 3, 2})); + auto reshape1 = + builder.AddNode("reshape1", "Reshape", 2, 1, FORMAT_HWCN, DT_FLOAT, std::vector({8, 8, 3, 2})); + reshape1->GetOpDesc()->MutableInputDesc(0)->SetFormat(FORMAT_ND); + reshape1->GetOpDesc()->MutableInputDesc(0)->SetShape(GeShape(std::vector({8 * 8 * 3, 2}))); + auto conv1 = builder.AddNode("conv1", "Conv2D", 2, 1, FORMAT_HWCN, DT_FLOAT, std::vector({8, 8, 3, 2})); + + builder.AddDataEdge(var1, 0, reshape1, 0); + builder.AddDataEdge(reshape1, 0, conv1, 1); + + return builder.GetGraph(); +} + +/// conv1 +/// | +/// reformat1 +/// | +/// var1 +ComputeGraphPtr BuildGraph3() { + auto builder = ut::GraphBuilder("g1"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1, FORMAT_NCHW, DT_FLOAT, std::vector({8, 8, 3, 2})); + auto reformat1 = + builder.AddNode("reformat1", "ReFormat", 1, 1, FORMAT_ND, DT_FLOAT, std::vector({8, 8, 3, 2})); + reformat1->GetOpDesc()->MutableInputDesc(0)->SetFormat(FORMAT_NCHW); + reformat1->GetOpDesc()->MutableInputDesc(0)->SetShape(GeShape(std::vector({8, 8, 3, 2}))); + auto conv1 = builder.AddNode("conv1", "Conv2D", 2, 1, FORMAT_ND, DT_FLOAT, std::vector({8, 8, 3, 2})); + + builder.AddDataEdge(var1, 0, reformat1, 0); + builder.AddDataEdge(reformat1, 0, conv1, 1); + + return builder.GetGraph(); +} + +class NodeBuilder { + public: + NodeBuilder(const std::string &name, const std::string &type) { op_desc_ = std::make_shared(name, type); } + + NodeBuilder &AddInputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddInputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + NodeBuilder &AddOutputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddOutputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + ge::NodePtr Build(const ge::ComputeGraphPtr &graph) { return graph->AddNode(op_desc_); } + + private: + ge::GeTensorDescPtr CreateTensorDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + GeShape ge_shape{std::vector(shape)}; + ge::GeTensorDescPtr tensor_desc = std::make_shared(); + tensor_desc->SetShape(ge_shape); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + return tensor_desc; + } + + ge::OpDescPtr op_desc_; +}; + +std::string var_ref_name_0; + +ge::NodePtr CreatVariableRef(ge::NodePtr &final_writable_node, ge::NodePtr &var_node) { + GELOGI("Create VarRef Op: final_writable_node: [%s] var_node: [%s]>>>>", final_writable_node->GetName().c_str(), + var_node->GetName().c_str()); + + static uint32_t var_ref_count = 0; + std::stringstream var_ref_name; + var_ref_name << "_to_" << final_writable_node->GetName() << "_REF_" << var_ref_count++; + + OpDescPtr var_op_desc = var_node->GetOpDesc(); + GE_CHK_BOOL_EXEC(var_op_desc != nullptr, return nullptr, "get var opdesc is nullptr"); + OpDescPtr var_ref_op_desc = nullptr; + GE_MAKE_SHARED(var_ref_op_desc = + std::make_shared(var_node->GetName() + var_ref_name.str().c_str(), var_op_desc->GetType()), + return nullptr); + + var_ref_op_desc->AddOutputDesc(var_op_desc->GetOutputDesc(0)); + var_ref_op_desc->AddInputDesc(var_op_desc->GetOutputDesc(0)); + + const map var_attr_value = var_op_desc->GetAllAttrs(); + for (auto const &attrIt : var_attr_value) { + var_ref_op_desc->SetAttr(attrIt.first, attrIt.second); + } + + NodePtr var_ref_node = var_node->GetOwnerComputeGraph()->AddNode(var_ref_op_desc); + GE_CHK_BOOL_EXEC(var_ref_node != nullptr, return nullptr, "create var_REF_node failed") + + GE_IF_BOOL_EXEC(ge::AttrUtils::SetStr(var_ref_op_desc, REF_VAR_SRC_VAR_NAME, var_op_desc->GetName()), + GELOGI("Set node [%s] VAR_ATTR_VAR_IS_REF [%s]", var_ref_node->GetName().c_str(), + var_op_desc->GetName().c_str())); + var_ref_name_0 = var_ref_node->GetName(); + return var_ref_node; +} + +bool BuildComputeGraph0(ge::ComputeGraphPtr &graph) { + // graph = std::make_shared("test"); + + ge::NodePtr node_4d_new = + NodeBuilder("Node4D_new", VARIABLE).AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32).Build(graph); + + ge::NodePtr node_4d_to_5d_1_new = NodeBuilder("4d_to_5d_1_new", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_4d_to_5d_2_new = NodeBuilder("4d_to_5d_2_new", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_INT32) + .Build(graph); + + ge::GraphUtils::AddEdge(node_4d_new->GetOutDataAnchor(0), node_4d_to_5d_1_new->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d_new->GetOutDataAnchor(0), node_4d_to_5d_2_new->GetInDataAnchor(0)); + + // Node4D + ge::NodePtr node_4d = + NodeBuilder("Node4D", VARIABLE).AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32).Build(graph); + + // NodeTrans4DTo5D + ge::NodePtr node_4d_to_5d_1 = NodeBuilder("4d_to_5d_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_4d_to_5d_2 = NodeBuilder("4d_to_5d_2", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + // Node5D + ge::NodePtr node_5d_1 = + NodeBuilder("5D_1", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + ge::NodePtr node_5d_2 = + NodeBuilder("5D_2", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_5d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_5d_2->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_4d_to_5d_1->GetOutDataAnchor(0), node_5d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d_to_5d_2->GetOutDataAnchor(0), node_5d_2->GetInDataAnchor(0)); + + // Node4D + ge::NodePtr node_4d_nhwc = + NodeBuilder("Node4D_NHWC", VARIABLE).AddOutputDesc({1, 2, 3, 4}, FORMAT_NHWC, DT_INT32).Build(graph); + + // NodeTrans4DTo5D + ge::NodePtr node_4d_to_5d_1_nhwc = NodeBuilder("4d_to_5d_1_NHWC", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NHWC, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + // Node5D + ge::NodePtr node_5d_1_nhwc = + NodeBuilder("5D_1_NHWC", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_4d_nhwc->GetOutDataAnchor(0), node_4d_to_5d_1_nhwc->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_4d_to_5d_1_nhwc->GetOutDataAnchor(0), node_5d_1_nhwc->GetInDataAnchor(0)); + + // Node4D + ge::NodePtr node_4d_hwcn = + NodeBuilder("Node4D_HWCN", VARIABLE).AddOutputDesc({1, 2, 3, 4}, FORMAT_HWCN, DT_INT32).Build(graph); + + // NodeTrans4DTo5D + ge::NodePtr node_4d_to_5d_1_hwcn = NodeBuilder("4d_to_5d_1_HWCN", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_HWCN, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + // Node5D + ge::NodePtr node_5d_1_hwcn = + NodeBuilder("5D_1_HWCN", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_4d_hwcn->GetOutDataAnchor(0), node_4d_to_5d_1_hwcn->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_4d_to_5d_1_hwcn->GetOutDataAnchor(0), node_5d_1_hwcn->GetInDataAnchor(0)); + + ge::NodePtr node_4d_chwn = + NodeBuilder("Node4D_CHWN", VARIABLE).AddOutputDesc({1, 2, 3, 4}, FORMAT_CHWN, DT_INT32).Build(graph); + + // NodeTrans4DTo5D + ge::NodePtr node_4d_to_5d_1_chwn = NodeBuilder("4d_to_5d_1_CHWN", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_CHWN, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + // Node5D + ge::NodePtr node_5d_1_chwn = + NodeBuilder("5D_1_CHWN", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_4d_chwn->GetOutDataAnchor(0), node_4d_to_5d_1_chwn->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_4d_to_5d_1_chwn->GetOutDataAnchor(0), node_5d_1_chwn->GetInDataAnchor(0)); + + ge::NodePtr node_4d_d = + NodeBuilder("Node4D_D", VARIABLE).AddOutputDesc({1}, FORMAT_CHWN, DT_INT32).Build(graph); + + // NodeTrans4DTo5D + ge::NodePtr node_4d_to_5d_1_d = NodeBuilder("4d_to_5d_1_D", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_CHWN, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + // Node5D + ge::NodePtr node_5d_1_d = + NodeBuilder("5D_1_D", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + ge::NodePtr node_apply_monetum = NodeBuilder("apply_monetum", APPLYMOMENTUM) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_5d_to_4d_1 = NodeBuilder("5d_to_4d_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .Build(graph); + + ge::NodePtr node_ref = CreatVariableRef(node_5d_to_4d_1, node_4d); + + // add edge + ge::GraphUtils::AddEdge(node_4d_d->GetOutDataAnchor(0), node_4d_to_5d_1_d->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_4d_to_5d_1_d->GetOutDataAnchor(0), node_5d_1_d->GetInDataAnchor(0)); + + if (ge::GraphUtils::AddEdge(node_apply_monetum->GetOutDataAnchor(0), node_5d_to_4d_1->GetInDataAnchor(0)) != + ge::SUCCESS) { + /// GELOGE(FAILED, "ge::GraphUtils::AddEdge(node_apply_monetum->GetOutDataAnchor(0), + /// node_5d_to_4d_1->GetInDataAnchor(0) ) Failed."); + }; + ge::GraphUtils::AddEdge(node_5d_to_4d_1->GetOutDataAnchor(0), node_ref->GetInDataAnchor(0)); + + return true; +} + +bool BuildComputeGraph1(ge::ComputeGraphPtr &graph) { + // Node4D + ge::NodePtr node_4d = + NodeBuilder("Node4D", VARIABLE).AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32).Build(graph); + + // NodeTrans4DTo5D + ge::NodePtr node_4d_to_5d_1 = NodeBuilder("4d_to_5d_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_4d_to_5d_2 = NodeBuilder("4d_to_5d_2", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + // Node5D + ge::NodePtr node_5d_1 = + NodeBuilder("5D_1", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + ge::NodePtr node_5d_2 = + NodeBuilder("5D_2", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + ge::NodePtr node_5d_to_4d_1 = NodeBuilder("5d_to_4d_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_INT32) + .AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .Build(graph); + + ge::NodePtr node_apply_monetum = NodeBuilder("apply_monetum", APPLYMOMENTUM) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_INT32) + .Build(graph); + + ge::NodePtr node_ref = CreatVariableRef(node_5d_to_4d_1, node_4d); + + // add edge + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_5d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_5d_2->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_4d_to_5d_1->GetOutDataAnchor(0), node_5d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d_to_5d_2->GetOutDataAnchor(0), node_5d_2->GetInDataAnchor(0)); + + if (ge::GraphUtils::AddEdge(node_apply_monetum->GetOutDataAnchor(0), node_5d_to_4d_1->GetInDataAnchor(0)) != + ge::SUCCESS) { + /// GELOGE(FAILED, "ge::GraphUtils::AddEdge(node_apply_monetum->GetOutDataAnchor(0), + /// node_5d_to_4d_1->GetInDataAnchor(0) ) Failed."); + }; + ge::GraphUtils::AddEdge(node_5d_to_4d_1->GetOutDataAnchor(0), node_ref->GetInDataAnchor(0)); + + return true; +} + +bool BuildComputeGraph4(ge::ComputeGraphPtr &graph) { + // Node4D + ge::NodePtr node_4d = + NodeBuilder("Node4D", VARIABLE).AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32).Build(graph); + + // NodeTrans4DTo5D + ge::NodePtr node_4d_to_5d_1 = NodeBuilder("4d_to_5d_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_4d_to_5d_2 = NodeBuilder("4d_to_5d_2", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + // Node5D + ge::NodePtr node_5d_1 = + NodeBuilder("5D_1", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + ge::NodePtr node_5d_2 = + NodeBuilder("5D_2", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT).Build(graph); + + ge::NodePtr node_5d_to_4d_1 = NodeBuilder("5d_to_4d_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_INT32) + .AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .Build(graph); + + ge::NodePtr node_5d_to_4d_2 = NodeBuilder("5d_to_4d_2", TRANSDATA) + .AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_INT32) + .AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .Build(graph); + + ge::NodePtr node_apply_monetum = NodeBuilder("apply_monetum", APPLYMOMENTUM) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_INT32) + .Build(graph); + + ge::NodePtr node_ref = CreatVariableRef(node_5d_to_4d_1, node_4d); + + // add edge + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_5d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_5d_2->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_4d_to_5d_1->GetOutDataAnchor(0), node_5d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d_to_5d_2->GetOutDataAnchor(0), node_5d_2->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_apply_monetum->GetOutDataAnchor(0), node_5d_to_4d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_5d_to_4d_1->GetOutDataAnchor(0), node_ref->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_5d_to_4d_2->GetOutDataAnchor(0), node_ref->GetInDataAnchor(0)); + + return true; +} + +bool BuildComputeGraph5(ge::ComputeGraphPtr &graph) { + // Node4D + ge::NodePtr node_4d = + NodeBuilder("Node4D", VARIABLE).AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32).Build(graph); + + return true; +} + +bool BuildComputeGraph6(ge::ComputeGraphPtr &graph) { + // Node4D + ge::NodePtr node_4d = + NodeBuilder("Node4D", VARIABLE).AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32).Build(graph); + + // NodeTrans4DTo5D + ge::NodePtr node_4d_to_5d_1 = NodeBuilder("4d_to_5d_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_float_to_int_1 = NodeBuilder("float_to_int_1", CAST) + .AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_INT32) + .Build(graph); + + ge::NodePtr node_4d_to_5d_2 = NodeBuilder("4d_to_5d_2", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .Build(graph); + + ge::NodePtr node_float_to_int_2 = NodeBuilder("float_to_int_2", CAST) + .AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_FLOAT) + .AddOutputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_INT32) + .Build(graph); + + // Node5D + ge::NodePtr node_5d_1 = + NodeBuilder("5D_1", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_INT32).Build(graph); + + ge::NodePtr node_5d_2 = + NodeBuilder("5D_2", RELU).AddInputDesc({1, 2, 3, 4, 5}, FORMAT_NC1HWC0, DT_INT32).Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_5d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_5d_2->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_4d_to_5d_1->GetOutDataAnchor(0), node_float_to_int_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_4d_to_5d_2->GetOutDataAnchor(0), node_float_to_int_2->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_float_to_int_1->GetOutDataAnchor(0), node_5d_1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(node_float_to_int_2->GetOutDataAnchor(0), node_5d_2->GetInDataAnchor(0)); + + return true; +} +} // namespace + +bool BuildComputeGraph7(ge::ComputeGraphPtr &graph) { + // Node4D + ge::NodePtr node_4d = + NodeBuilder("Node4D", VARIABLE).AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32).Build(graph); + + // NodeTrans4DTo5D + ge::NodePtr node_4d_to_4d_1 = NodeBuilder("4d_to_4d_1", TRANSDATA) + .AddInputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .AddOutputDesc({1, 2, 3, 4}, FORMAT_NCHW, DT_INT32) + .Build(graph); + // Node5D + ge::NodePtr node_4d_1 = NodeBuilder("4D_1", RELU).AddInputDesc({1, 2, 3, 4}, FORMAT_NC1HWC0, DT_INT32).Build(graph); + + // add edge + ge::GraphUtils::AddEdge(node_4d->GetOutDataAnchor(0), node_4d_to_4d_1->GetInDataAnchor(0)); + + ge::GraphUtils::AddEdge(node_4d_to_4d_1->GetOutDataAnchor(0), node_4d_1->GetInDataAnchor(0)); + return true; +} + +class VariableOpPassSimulator { + public: + bool DoTest0() { + ge::ComputeGraphPtr compute_graph = std::make_shared("0"); + const std::string var_name = "Node4D"; + + uint64_t session_id = 0; + uint32_t device_id = 0; + uint64_t job_id = 0; + uint32_t session_version = 0; + std::vector dims(4, 20); + ge::GeShape shape(dims); + + MemManager::Instance().Initialize(std::vector({RT_MEMORY_HBM})); + VarManager::Instance(session_id)->Init(session_version, session_id, device_id, job_id); + + BuildComputeGraph0(compute_graph); + + std::vector var_names = {"Node4D_new", "Node4D", "Node4D_NHWC", + "Node4D_HWCN", "Node4D_CHWN", "Node4D_D"}; + for (auto name : var_names) { + auto var_node = compute_graph->FindNode(name); + auto var_tensor_desc = var_node->GetOpDesc()->GetOutputDesc(0); + + uint8_t *dev_ptr = nullptr; + ge::VarManager::Instance(session_id)->AssignVarMem(name, var_tensor_desc, RT_MEMORY_HBM); + ge::VarManager::Instance(session_id)->SetVarAddr(name, var_tensor_desc, dev_ptr, RT_MEMORY_HBM); + } + + ge::GraphNodePtr graph_node = make_shared(0); + compute_graph->InferShapeInNeed(); + graph_node->SetComputeGraph(compute_graph); + auto tmp_graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + auto tmp_graph_ptr = std::make_shared(tmp_graph); + graph_node->SetGraph(tmp_graph_ptr); + + VarAccelerateCtrl ctrl; + ctrl.AddGraph(graph_node->GetGraphId(), compute_graph); + ge::formats::FormatTransferNchwNc1hwc0 ClassObj; + VariableOpPass pass(&ctrl); + pass.Run(compute_graph); + + MemManager::Instance().Finalize(); + + return CheckTest0(compute_graph); + } + + bool DoTest1() { + ge::ComputeGraphPtr compute_graph = std::make_shared("0"); + const std::string var_name = "Node4D"; + + uint64_t session_id = 0; + uint32_t device_id = 0; + uint64_t job_id = 0; + uint32_t session_version = 0; + std::vector dims(4, 20); + ge::GeShape shape(dims); + VarManager::Instance(session_id)->Init(session_version, session_id, device_id, job_id); + + BuildComputeGraph1(compute_graph); + + auto var_node = compute_graph->FindNode(var_name); + auto var_tensor_desc = var_node->GetOpDesc()->GetOutputDesc(0); + + uint8_t *dev_ptr = nullptr; + + ge::GraphNodePtr graph_node = make_shared(0); + compute_graph->InferShapeInNeed(); + graph_node->SetComputeGraph(compute_graph); + auto tmp_graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + auto tmp_graph_ptr = std::make_shared(tmp_graph); + graph_node->SetGraph(tmp_graph_ptr); + + VarAccelerateCtrl ctrl; + ctrl.AddGraph(graph_node->GetGraphId(), compute_graph); + VariableOpPass pass(&ctrl); + pass.Run(compute_graph); + return CheckTest1(compute_graph); + } + + bool DoTest2() { + VarAccelerateCtrl ctrl; + VariableOpPass pass(&ctrl); + return pass.Run(nullptr) == ge::INTERNAL_ERROR; + } + + bool DoTest3() { + std::vector mem_type; + std::map empty_options; + mem_type.push_back(RT_MEMORY_HBM); + MemManager::Instance().Initialize(mem_type); + + ge::ComputeGraphPtr compute_graph = std::make_shared("0"); + + std::vector var_names = {"Node4D", "Node4D_NHWC", "Node4D_HWCN", "Node4D_CHWN", "Node4D_D"}; + std::vector tensor_descs; + + uint64_t session_id = 0; + uint32_t device_id = 0; + uint64_t job_id = 0; + uint32_t session_version = 0; + compute_graph->SetSessionID(session_id); + std::vector dims(4, 20); + ge::GeShape shape(dims); + VarManager::Instance(session_id)->Init(session_version, session_id, device_id, job_id); + + BuildComputeGraph0(compute_graph); + for (auto var_name : var_names) { + auto var_node = compute_graph->FindNode(var_name); + auto var_tensor_desc = var_node->GetOpDesc()->GetOutputDesc(0); + + uint8_t *dev_ptr = nullptr; + ge::VarManager::Instance(session_id)->AssignVarMem(var_name, var_tensor_desc, RT_MEMORY_HBM); + ge::VarManager::Instance(session_id)->SetVarAddr(var_name, var_tensor_desc, dev_ptr, RT_MEMORY_HBM); + } + + ge::GraphNodePtr graph_node = make_shared(0); + compute_graph->InferShapeInNeed(); + graph_node->SetComputeGraph(compute_graph); + auto tmp_graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + auto tmp_graph_ptr = std::make_shared(tmp_graph); + graph_node->SetGraph(tmp_graph_ptr); + + VarAccelerateCtrl ctrl; + ctrl.AddGraph(graph_node->GetGraphId(), compute_graph); + VariableOpPass pass(&ctrl); + auto ret = pass.Run(compute_graph); + MemManager::Instance().Finalize(); + return ret == GE_GRAPH_VARIABLE_OP_PASS_FAILED; + } + + bool DoTest4() { + ge::ComputeGraphPtr compute_graph = std::make_shared("0"); + const std::string var_name = "Node4D"; + + uint64_t session_id = 0; + uint32_t device_id = 0; + uint64_t job_id = 0; + uint32_t session_version = 0; + std::vector dims(4, 20); + ge::GeShape shape(dims); + VarManager::Instance(session_id)->Init(session_version, session_id, device_id, job_id); + + BuildComputeGraph4(compute_graph); + + auto var_node = compute_graph->FindNode(var_name); + auto var_tensor_desc = var_node->GetOpDesc()->GetOutputDesc(0); + + uint8_t *dev_ptr = nullptr; + ge::GraphNodePtr graph_node = make_shared(0); + compute_graph->InferShapeInNeed(); + graph_node->SetComputeGraph(compute_graph); + auto tmp_graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + auto tmp_graph_ptr = std::make_shared(tmp_graph); + graph_node->SetGraph(tmp_graph_ptr); + + VarAccelerateCtrl ctrl; + ctrl.AddGraph(graph_node->GetGraphId(), compute_graph); + VariableOpPass pass(&ctrl); + auto ret = pass.Run(compute_graph); + return ret == ge::SUCCESS; + } + + bool DoTest5() { + ge::ComputeGraphPtr compute_graph = std::make_shared("0"); + BuildComputeGraph5(compute_graph); + const std::string var_name = "Node4D"; + + uint64_t session_id = 0; + uint32_t device_id = 0; + uint64_t job_id = 0; + uint32_t session_version = 0; + std::vector dims(4, 20); + ge::GeShape shape(dims); + VarManager::Instance(session_id)->Init(session_version, session_id, device_id, job_id); + + BuildComputeGraph4(compute_graph); + + auto var_node = compute_graph->FindNode(var_name); + auto var_tensor_desc = var_node->GetOpDesc()->GetOutputDesc(0); + + uint8_t *dev_ptr = nullptr; + + ge::GraphNodePtr graph_node = make_shared(0); + compute_graph->InferShapeInNeed(); + graph_node->SetComputeGraph(compute_graph); + auto tmp_graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + auto tmp_graph_ptr = std::make_shared(tmp_graph); + graph_node->SetGraph(tmp_graph_ptr); + + VarAccelerateCtrl ctrl; + ctrl.AddGraph(graph_node->GetGraphId(), compute_graph); + VariableOpPass pass(&ctrl); + auto ret = pass.Run(compute_graph); + + return ret == ge::SUCCESS; + } + + bool DoTest6() { + ge::ComputeGraphPtr compute_graph = std::make_shared("0"); + const std::string var_name = "Node4D"; + + uint64_t session_id = 0; + uint32_t device_id = 0; + uint64_t job_id = 0; + uint32_t session_version = 0; + std::vector dims(4, 20); + ge::GeShape shape(dims); + MemManager::Instance().Initialize(std::vector({RT_MEMORY_HBM})); + VarManager::Instance(session_id)->Init(session_version, session_id, device_id, job_id); + + BuildComputeGraph6(compute_graph); + + auto var_node = compute_graph->FindNode(var_name); + auto var_tensor_desc = var_node->GetOpDesc()->GetOutputDesc(0); + + uint8_t *dev_ptr = nullptr; + ge::VarManager::Instance(session_id)->AssignVarMem(var_name, var_tensor_desc, RT_MEMORY_HBM); + ge::VarManager::Instance(session_id)->SetVarAddr(var_name, var_tensor_desc, dev_ptr, RT_MEMORY_HBM); + ge::GraphNodePtr graph_node = make_shared(0); + compute_graph->InferShapeInNeed(); + graph_node->SetComputeGraph(compute_graph); + auto tmp_graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + auto tmp_graph_ptr = std::make_shared(tmp_graph); + graph_node->SetGraph(tmp_graph_ptr); + + VarAccelerateCtrl ctrl; + ctrl.AddGraph(graph_node->GetGraphId(), compute_graph); + ge::formats::FormatTransferNchwNc1hwc0 ClassObj; + VariableOpPass pass(&ctrl); + auto ret = pass.Run(compute_graph); + MemManager::Instance().Finalize(); + return CheckTest6(compute_graph); + } + + bool DoTest7() { + ge::ComputeGraphPtr compute_graph = std::make_shared("0"); + const std::string var_name = "Node4D"; + + uint64_t session_id = 0; + uint32_t device_id = 0; + uint64_t job_id = 0; + uint32_t session_version = 0; + std::vector dims(4, 20); + ge::GeShape shape(dims); + VarManager::Instance(session_id)->Init(session_version, session_id, device_id, job_id); + + BuildComputeGraph7(compute_graph); + + auto var_node = compute_graph->FindNode(var_name); + auto var_tensor_desc = var_node->GetOpDesc()->GetOutputDesc(0); + + uint8_t *dev_ptr = nullptr; + + ge::GraphNodePtr graph_node = make_shared(0); + compute_graph->InferShapeInNeed(); + graph_node->SetComputeGraph(compute_graph); + auto tmp_graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + auto tmp_graph_ptr = std::make_shared(tmp_graph); + graph_node->SetGraph(tmp_graph_ptr); + + VarAccelerateCtrl ctrl; + ctrl.AddGraph(graph_node->GetGraphId(), compute_graph); + VariableOpPass pass(&ctrl); + auto ret = pass.Run(compute_graph); + return CheckTest7(compute_graph); + } + + bool DoTest8() { + ge::ComputeGraphPtr compute_graph = std::make_shared("0"); + const std::string var_name = "Node4D"; + + uint64_t session_id = 0; + uint32_t device_id = 0; + uint64_t job_id = 0; + uint32_t session_version = 0; + std::vector dims(4, 20); + ge::GeShape shape(dims); + VarManager::Instance(session_id)->Init(session_version, session_id, device_id, job_id); + + BuildComputeGraph0(compute_graph); + + auto var_node = compute_graph->FindNode(var_name); + auto var_tensor_desc = var_node->GetOpDesc()->GetOutputDesc(0); + + uint8_t *dev_ptr = nullptr; + ge::GraphNodePtr graph_node = make_shared(0); + compute_graph->InferShapeInNeed(); + graph_node->SetComputeGraph(compute_graph); + auto tmp_graph = GraphUtils::CreateGraphFromComputeGraph(compute_graph); + auto tmp_graph_ptr = std::make_shared(tmp_graph); + graph_node->SetGraph(tmp_graph_ptr); + VarAccelerateCtrl ctrl; + ctrl.AddGraph(graph_node->GetGraphId(), compute_graph); + VariableOpPass pass(&ctrl); + pass.Run(compute_graph); + return CheckTest8(compute_graph); + } + + private: + bool CheckTest0(const ge::ComputeGraphPtr compute_graph) { + const auto &variable_node = compute_graph->FindNode("Node4D"); + auto variable_node_format = variable_node->GetOpDesc()->GetOutputDesc(0).GetFormat(); + auto variable_node_data_type = variable_node->GetOpDesc()->GetOutputDesc(0).GetDataType(); + auto variable_node_shape = variable_node->GetOpDesc()->GetOutputDesc(0).GetShape().GetDims(); + + if (variable_node_format != FORMAT_NC1HWC0 || variable_node_data_type != DT_FLOAT || + variable_node_shape.size() != 5) { + std::cout << "var format not changed !" << std::endl; + return false; + } + + const auto &variable_ref_node = compute_graph->FindNode(var_ref_name_0); + GELOGD("var_ref_name_0 is %s", var_ref_name_0.c_str()); + auto variable_ref_node_format = variable_ref_node->GetOpDesc()->GetInputDesc(0).GetFormat(); + auto variable_ref_node_data_type = variable_ref_node->GetOpDesc()->GetInputDesc(0).GetDataType(); + auto variable_ref_node_shape = variable_ref_node->GetOpDesc()->GetInputDesc(0).GetShape().GetDims(); + + if (variable_ref_node_format != FORMAT_NC1HWC0 || variable_ref_node_data_type != DT_FLOAT || + variable_ref_node_shape.size() != 5) { + GELOGI("wanted data format is (%d,%d,%u)", FORMAT_NC1HWC0, DT_FLOAT, 5); + GELOGI("variable_ref_node_format is (%d,%d,%u)", variable_ref_node_format, variable_ref_node_data_type, + variable_ref_node_shape.size()); + + std::cout << "var ref format not changed !" << std::endl; + return false; + } + + ge::NodePtr trans_node = compute_graph->FindNode("4d_to_5d_1"); + if (trans_node != nullptr) { + std::cout << "4d_to_5d_1 not empty !" << std::endl; + return false; + } + + trans_node = compute_graph->FindNode("4d_to_5d_2"); + if (trans_node != nullptr) { + std::cout << "4d_to_5d_2 not empty !" << std::endl; + return false; + } + + trans_node = compute_graph->FindNode("5d_to_4d_1"); + if (trans_node != nullptr) { + std::cout << "5d_to_4d_1 not empty !" << std::endl; + return false; + } + + trans_node = compute_graph->FindNode("4d_to_5d_1_new"); + if (trans_node == nullptr) { + std::cout << "4d_to_5d_1_new is empty !" << std::endl; + return false; + } + + auto new_variable_node = compute_graph->FindNode("Node4D_new"); + + auto new_variable_node_format = new_variable_node->GetOpDesc()->GetOutputDesc(0).GetFormat(); + auto new_variable_node_data_type = new_variable_node->GetOpDesc()->GetOutputDesc(0).GetDataType(); + auto new_variable_node_shape = new_variable_node->GetOpDesc()->GetOutputDesc(0).GetShape().GetDims(); + + if (new_variable_node_format != FORMAT_NCHW || new_variable_node_data_type != DT_INT32 || + new_variable_node_shape.size() != 4) { + std::cout << "Node4D_new format Changed ! wanted data format is ( " << FORMAT_NC1HWC0 << ", " << DT_INT32 + << ", 4) " << std::endl; + std::cout << "current is ( " << new_variable_node_format << ", " << new_variable_node_data_type << ", " + << new_variable_node_shape.size() << ")" << std::endl; + return false; + } + + return true; + }; + + bool CheckTest1(const ge::ComputeGraphPtr compute_graph) { + const auto &variable_node = compute_graph->FindNode("Node4D"); + auto variable_node_format = variable_node->GetOpDesc()->GetOutputDesc(0).GetFormat(); + auto variable_node_data_type = variable_node->GetOpDesc()->GetOutputDesc(0).GetDataType(); + auto variable_node_shape = variable_node->GetOpDesc()->GetOutputDesc(0).GetShape().GetDims(); + + if (variable_node_format != FORMAT_NCHW || variable_node_data_type != DT_INT32 || variable_node_shape.size() != 4) { + std::cout << "var format changed !" << std::endl; + return false; + } + + const auto &variable_ref_node = compute_graph->FindNode(var_ref_name_0); + GELOGD("var_ref_name_0 is %s", var_ref_name_0.c_str()); + auto variable_ref_node_format = variable_ref_node->GetOpDesc()->GetInputDesc(0).GetFormat(); + auto variable_ref_node_data_type = variable_ref_node->GetOpDesc()->GetInputDesc(0).GetDataType(); + auto variable_ref_node_shape = variable_ref_node->GetOpDesc()->GetInputDesc(0).GetShape().GetDims(); + + if (variable_ref_node_format != FORMAT_NCHW || variable_ref_node_data_type != DT_INT32 || + variable_ref_node_shape.size() != 4) { + GELOGI("wanted data format is (%d,%d,%u)", FORMAT_NCHW, DT_INT32, 4); + GELOGI("variable_ref_node_format is (%d,%d,%u)", variable_ref_node_format, variable_ref_node_data_type, + variable_ref_node_shape.size()); + + std::cout << "var ref format not changed !" << std::endl; + return false; + } + + ge::NodePtr trans_node = compute_graph->FindNode("4d_to_5d_1"); + if (trans_node == nullptr) { + std::cout << "4d_to_5d_1 empty !" << std::endl; + return false; + } + + trans_node = compute_graph->FindNode("4d_to_5d_2"); + if (trans_node == nullptr) { + std::cout << "4d_to_5d_2 empty !" << std::endl; + return false; + } + + trans_node = compute_graph->FindNode("5d_to_4d_1"); + if (trans_node == nullptr) { + std::cout << "5d_to_4d_1 not empty !" << std::endl; + return false; + } + + return true; + }; + + bool CheckTest6(const ge::ComputeGraphPtr compute_graph) { + const auto &variable_node = compute_graph->FindNode("Node4D"); + auto variable_node_format = variable_node->GetOpDesc()->GetOutputDesc(0).GetFormat(); + auto variable_node_data_type = variable_node->GetOpDesc()->GetOutputDesc(0).GetDataType(); + auto variable_node_shape = variable_node->GetOpDesc()->GetOutputDesc(0).GetShape().GetDims(); + + if (variable_node_format != FORMAT_NC1HWC0 || variable_node_data_type != DT_INT32 || + variable_node_shape.size() != 5) { + std::cout << "var format not changed !" << std::endl; + return false; + } + + ge::NodePtr trans_node = compute_graph->FindNode("4d_to_5d_1"); + if (trans_node != nullptr) { + std::cout << "4d_to_5d_1 not empty !" << std::endl; + return false; + } + + trans_node = compute_graph->FindNode("4d_to_5d_2"); + if (trans_node != nullptr) { + std::cout << "4d_to_5d_2 not empty !" << std::endl; + return false; + } + + trans_node = compute_graph->FindNode("float_to_int_1"); + if (trans_node != nullptr) { + std::cout << "float_to_int_1 not empty !" << std::endl; + return false; + } + + trans_node = compute_graph->FindNode("float_to_int_2"); + if (trans_node != nullptr) { + std::cout << "float_to_int_1 not empty !" << std::endl; + return false; + } + + return true; + }; + + bool CheckTest7(const ge::ComputeGraphPtr compute_graph) { + const auto &variable_node = compute_graph->FindNode("Node4D"); + auto variable_node_format = variable_node->GetOpDesc()->GetOutputDesc(0).GetFormat(); + auto variable_node_data_type = variable_node->GetOpDesc()->GetOutputDesc(0).GetDataType(); + auto variable_node_shape = variable_node->GetOpDesc()->GetOutputDesc(0).GetShape().GetDims(); + + if (variable_node_format != FORMAT_NC1HWC0 || variable_node_data_type != DT_INT32 || + variable_node_shape.size() != 5) { + std::cout << "var format not changed !" << std::endl; + return false; + } + + ge::NodePtr trans_node = compute_graph->FindNode("4d_to_4d_1"); + if (trans_node != nullptr) { + std::cout << "4d_to_5d_1 not empty !" << std::endl; + return false; + } + return true; + }; + + bool CheckTest8(const ge::ComputeGraphPtr compute_graph) { + const auto &variable_node = compute_graph->FindNode("Node4D"); + auto variable_node_format = variable_node->GetOpDesc()->GetOutputDesc(0).GetFormat(); + auto variable_node_data_type = variable_node->GetOpDesc()->GetOutputDesc(0).GetDataType(); + auto variable_node_shape = variable_node->GetOpDesc()->GetOutputDesc(0).GetShape().GetDims(); + return true; + }; +}; + +TEST_F(UtestVariableOpPassUnit, test_trans_data_remove) { + VariableOpPassSimulator varibale_op_pass_simulator; + + bool result = varibale_op_pass_simulator.DoTest0(); + + EXPECT_EQ(result, true); +} + +TEST_F(UtestVariableOpPassUnit, test_variable_ref) { + VariableOpPassSimulator varibale_op_pass_simulator; + + bool result = varibale_op_pass_simulator.DoTest1(); + + EXPECT_EQ(result, true); +} + +TEST_F(UtestVariableOpPassUnit, test_null_graph) { + VariableOpPassSimulator varibale_op_pass_simulator; + + bool result = varibale_op_pass_simulator.DoTest2(); + + EXPECT_EQ(result, true); +} + +TEST_F(UtestVariableOpPassUnit, test_covarage_trans_var_data) { + VariableOpPassSimulator varibale_op_pass_simulator; + + bool result = varibale_op_pass_simulator.DoTest3(); + + EXPECT_EQ(result, false); +} + +TEST_F(UtestVariableOpPassUnit, test_illegally_ref) { + VariableOpPassSimulator varibale_op_pass_simulator; + + bool result = varibale_op_pass_simulator.DoTest4(); + + EXPECT_EQ(result, true); +} + +TEST_F(UtestVariableOpPassUnit, test_single_node) { + VariableOpPassSimulator varibale_op_pass_simulator; + + bool result = varibale_op_pass_simulator.DoTest5(); + + EXPECT_EQ(result, true); +} + +TEST_F(UtestVariableOpPassUnit, test_un_mathed) { + VariableOpPassSimulator varibale_op_pass_simulator; + + bool result = varibale_op_pass_simulator.DoTest6(); + + EXPECT_EQ(result, true); +} + +TEST_F(UtestVariableOpPassUnit, test_same_op) { + VariableOpPassSimulator varibale_op_pass_simulator; + + bool result = varibale_op_pass_simulator.DoTest7(); + + EXPECT_EQ(true, true); +} + +TEST_F(UtestVariableOpPassUnit, test_error_return) { + VariableOpPassSimulator varibale_op_pass_simulator; + + bool result = varibale_op_pass_simulator.DoTest8(); + EXPECT_EQ(true, true); +} + +TEST_F(UtestVariableOpPassUnit, reshape) { + // init + MemManager::Instance().Initialize(std::vector({RT_MEMORY_HBM})); + VarManager::Instance(0)->Init(0, 0, 0, 0); + auto graph = BuildGraph2(); + graph->SetSessionID(0); + auto var1 = graph->FindNode("var1"); + VarManager::Instance(0)->AssignVarMem(var1->GetName(), var1->GetOpDesc()->GetOutputDesc(0), RT_MEMORY_HBM); + uint8_t *dev_ptr = nullptr; + VarManager::Instance(0)->SetVarAddr(var1->GetName(), var1->GetOpDesc()->GetOutputDesc(0), dev_ptr, RT_MEMORY_HBM); + + ge::GraphNodePtr graph_node = make_shared(0); + graph->InferShapeInNeed(); + graph_node->SetComputeGraph(graph); + auto tmp_graph = GraphUtils::CreateGraphFromComputeGraph(graph); + auto tmp_graph_ptr = std::make_shared(tmp_graph); + graph_node->SetGraph(tmp_graph_ptr); + + VarAccelerateCtrl ctrl; + ctrl.AddGraph(graph_node->GetGraphId(), graph); + VariableOpPass pass(&ctrl); + EXPECT_EQ(pass.Run(graph), ge::SUCCESS); + MemManager::Instance().Finalize(); + + EXPECT_EQ(var1->GetOutNodes().size(), 1); + EXPECT_EQ(var1->GetOutDataNodes().at(0)->GetName(), "conv1"); + EXPECT_EQ(var1->GetOpDesc()->GetOutputDesc(0).GetFormat(), FORMAT_HWCN); + EXPECT_EQ(var1->GetOpDesc()->GetOutputDesc(0).GetShape().GetDims(), std::vector({8, 8, 3, 2})); +} + +TEST_F(UtestVariableOpPassUnit, reformat) { + // init + MemManager::Instance().Initialize(std::vector({RT_MEMORY_HBM})); + VarManager::Instance(0)->Init(0, 0, 0, 0); + auto graph = BuildGraph3(); + graph->SetSessionID(0); + auto var1 = graph->FindNode("var1"); + VarManager::Instance(0)->AssignVarMem(var1->GetName(), var1->GetOpDesc()->GetOutputDesc(0), RT_MEMORY_HBM); + uint8_t *dev_ptr = nullptr; + VarManager::Instance(0)->SetVarAddr(var1->GetName(), var1->GetOpDesc()->GetOutputDesc(0), dev_ptr, RT_MEMORY_HBM); + + ge::GraphNodePtr graph_node = make_shared(0); + graph->InferShapeInNeed(); + graph_node->SetComputeGraph(graph); + auto tmp_graph = GraphUtils::CreateGraphFromComputeGraph(graph); + auto tmp_graph_ptr = std::make_shared(tmp_graph); + graph_node->SetGraph(tmp_graph_ptr); + + VarAccelerateCtrl ctrl; + ctrl.AddGraph(graph_node->GetGraphId(), graph); + VariableOpPass pass(&ctrl); + EXPECT_EQ(pass.Run(graph), ge::SUCCESS); + MemManager::Instance().Finalize(); + + EXPECT_EQ(var1->GetOutNodes().size(), 1); + EXPECT_EQ(var1->GetOutDataNodes().at(0)->GetName(), "conv1"); + EXPECT_EQ(var1->GetOpDesc()->GetOutputDesc(0).GetFormat(), FORMAT_ND); + EXPECT_EQ(var1->GetOpDesc()->GetOutputDesc(0).GetShape().GetDims(), std::vector({8, 8, 3, 2})); +} + +TEST_F(UtestVariableOpPassUnit, invalid_src_shape2) { + formats::FormatTransferNchwNc1hwc0 t1; + formats::FormatTransferNhwcNc1hwc0 t2; + formats::TransArgs args = formats::TransArgs(); + formats::TransResult ret; + t2.TransFormat(args, ret); +} diff --git a/tests/ut/ge/graph/passes/variable_prepare_pass_unittest.cc b/tests/ut/ge/graph/passes/variable_prepare_pass_unittest.cc new file mode 100644 index 00000000..9a0cb440 --- /dev/null +++ b/tests/ut/ge/graph/passes/variable_prepare_pass_unittest.cc @@ -0,0 +1,191 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/variable_prepare_op_pass.h" + +#include +#include + +using namespace ge; + +class UtestGraphPassesVariablePreparePass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +class NodeBuilder { + public: + NodeBuilder(const std::string &name, const std::string &type) { op_desc_ = std::make_shared(name, type); } + + NodeBuilder &AddInputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddInputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + NodeBuilder &AddOutputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddOutputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + ge::NodePtr Build(const ge::ComputeGraphPtr &graph) { return graph->AddNode(op_desc_); } + + private: + ge::GeTensorDescPtr CreateTensorDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + GeShape ge_shape{std::vector(shape)}; + ge::GeTensorDescPtr tensor_desc = std::make_shared(); + tensor_desc->SetShape(ge_shape); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + return tensor_desc; + } + + ge::OpDescPtr op_desc_; +}; + +/// variable -- const +/// \ / +/// \ / +/// assign +TEST_F(UtestGraphPassesVariablePreparePass, variable_prepare_pass_succ1) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + ge::NodePtr variable_node = NodeBuilder("variable", VARIABLE) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr const_node = NodeBuilder("const", CONSTANT) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr apply_assign_node = NodeBuilder("assign", ASSIGN) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::GraphUtils::AddEdge(variable_node->GetOutDataAnchor(0), apply_assign_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), apply_assign_node->GetInDataAnchor(1)); + + ge::VariablePrepareOpPass pass_; + ge::Status status = pass_.Run(graph); + EXPECT_EQ(apply_assign_node->GetOutDataNodes().size(), 0); + EXPECT_EQ(SUCCESS, status); +} + +/// variable -- applyMoment +TEST_F(UtestGraphPassesVariablePreparePass, variable_prepare_pass_succ2) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + ge::NodePtr variable_node = NodeBuilder("variable", VARIABLE) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr apply_monetum_node = NodeBuilder("apply_monetum", APPLYMOMENTUM) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr sinh_node = NodeBuilder("sinh", SINH) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::GraphUtils::AddEdge(variable_node->GetOutDataAnchor(0), apply_monetum_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(apply_monetum_node->GetOutControlAnchor(), sinh_node->GetInControlAnchor()); + + ge::VariablePrepareOpPass pass_; + ge::Status status = pass_.Run(graph); + EXPECT_EQ(apply_monetum_node->GetOutDataNodes().size(), 0); + EXPECT_EQ(SUCCESS, status); +} + +/// variable -- const1 +/// \ / +/// \ / +/// assign_add1 -- const2 +/// \ / +/// \ / +/// assign_sub -- const3 +/// \ / +/// \ / +/// assign_add2 -- const4 +/// \ / +/// \ / +/// assign_add3 +TEST_F(UtestGraphPassesVariablePreparePass, variable_prepare_pass_succ3) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + ge::NodePtr variable_node = NodeBuilder("variable", VARIABLE) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + ge::NodePtr const_node1 = NodeBuilder("const1", CONSTANT) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + ge::NodePtr const_node2 = NodeBuilder("const2", CONSTANT) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + ge::NodePtr const_node3 = NodeBuilder("const3", CONSTANT) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr const_node4 = NodeBuilder("const4", CONSTANT) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr assign_add1 = NodeBuilder("assign_add1", ASSIGNADD) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + ge::NodePtr assign_sub = NodeBuilder("assign_sub", ASSIGNSUB) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + ge::NodePtr assign_add2 = NodeBuilder("assign_add2", ASSIGNADD) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr assign_add3 = NodeBuilder("assign_add3", ASSIGNADD) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::GraphUtils::AddEdge(variable_node->GetOutDataAnchor(0), assign_add1->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(const_node1->GetOutDataAnchor(0), assign_add1->GetInDataAnchor(1)); + + ge::GraphUtils::AddEdge(assign_add1->GetOutDataAnchor(0), assign_sub->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(const_node2->GetOutDataAnchor(0), assign_sub->GetInDataAnchor(1)); + + ge::GraphUtils::AddEdge(assign_sub->GetOutDataAnchor(0), assign_add2->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(const_node3->GetOutDataAnchor(0), assign_add2->GetInDataAnchor(1)); + + ge::GraphUtils::AddEdge(assign_add2->GetOutDataAnchor(0), assign_add3->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(const_node4->GetOutDataAnchor(0), assign_add3->GetInDataAnchor(1)); + + ge::VariablePrepareOpPass pass_; + ge::Status status = pass_.Run(graph); + EXPECT_EQ(assign_add3->GetOutDataNodes().size(), 0); + EXPECT_EQ(SUCCESS, status); +} diff --git a/tests/ut/ge/graph/passes/variable_ref_delete_pass_unittest.cc b/tests/ut/ge/graph/passes/variable_ref_delete_pass_unittest.cc new file mode 100644 index 00000000..a8fca5f7 --- /dev/null +++ b/tests/ut/ge/graph/passes/variable_ref_delete_pass_unittest.cc @@ -0,0 +1,171 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "graph/passes/variable_ref_delete_op_pass.h" + +#include +#include + +using namespace ge; + +class UtestGraphPassesVariableRefDeletePass : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +class NodeBuilder { + public: + NodeBuilder(const std::string &name, const std::string &type) { op_desc_ = std::make_shared(name, type); } + + NodeBuilder &AddInputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddInputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + NodeBuilder &AddOutputDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + op_desc_->AddOutputDesc(CreateTensorDesc(shape, format, data_type)->Clone()); + return *this; + } + + ge::NodePtr Build(const ge::ComputeGraphPtr &graph) { return graph->AddNode(op_desc_); } + + private: + ge::GeTensorDescPtr CreateTensorDesc(std::initializer_list shape, ge::Format format = FORMAT_NCHW, + ge::DataType data_type = DT_FLOAT) { + GeShape ge_shape{std::vector(shape)}; + ge::GeTensorDescPtr tensor_desc = std::make_shared(); + tensor_desc->SetShape(ge_shape); + tensor_desc->SetFormat(format); + tensor_desc->SetDataType(data_type); + return tensor_desc; + } + + ge::OpDescPtr op_desc_; +}; + +/// variable -- const +/// \ / +/// \ / +/// assign +/// | +/// | +/// variable_ref +TEST_F(UtestGraphPassesVariableRefDeletePass, variable_ref_delete_pass_succ1) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + ge::NodePtr variable_node = NodeBuilder("variable", VARIABLE) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr const_node = NodeBuilder("const", CONSTANT) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr apply_assign_node = NodeBuilder("assign", ASSIGN) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr variable_ref_node = NodeBuilder("variable_ref", VARIABLE) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + std::string ref_var_src_var_name = "variable"; + ge::AttrUtils::SetStr(variable_ref_node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, ref_var_src_var_name); + + ge::GraphUtils::AddEdge(variable_node->GetOutDataAnchor(0), apply_assign_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), apply_assign_node->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(apply_assign_node->GetOutDataAnchor(0), variable_ref_node->GetInDataAnchor(0)); + + ge::VariableRefDeleteOpPass pass_; + ge::Status status = pass_.Run(graph); + EXPECT_EQ(apply_assign_node->GetOutDataNodes().size(), 0); + EXPECT_EQ(SUCCESS, status); +} + +/// variable -- const +/// \ / +/// \ / +/// assign +/// | +/// | +/// variable_ref +TEST_F(UtestGraphPassesVariableRefDeletePass, variable_ref_delete_pass_fail1) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + ge::NodePtr variable_node = NodeBuilder("variable", VARIABLE) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr const_node = NodeBuilder("const", CONSTANT) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr apply_assign_node = NodeBuilder("assign", ASSIGN) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr variable_ref_node = NodeBuilder("variable_ref", VARIABLE) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + std::string ref_var_src_var_name = "wrong_variable"; + ge::AttrUtils::SetStr(variable_ref_node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, ref_var_src_var_name); + + ge::GraphUtils::AddEdge(variable_node->GetOutDataAnchor(0), apply_assign_node->GetInDataAnchor(0)); + ge::GraphUtils::AddEdge(const_node->GetOutDataAnchor(0), apply_assign_node->GetInDataAnchor(1)); + ge::GraphUtils::AddEdge(apply_assign_node->GetOutDataAnchor(0), variable_ref_node->GetInDataAnchor(0)); + + ge::VariableRefDeleteOpPass pass_; + ge::Status status = pass_.Run(graph); + EXPECT_EQ(FAILED, status); +} + +/// assign +/// | +/// | +/// variable_ref +TEST_F(UtestGraphPassesVariableRefDeletePass, variable_ref_delete_pass_fail2) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + + ge::NodePtr apply_assign_node = NodeBuilder("assign", ASSIGN) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + ge::NodePtr variable_ref_node = NodeBuilder("variable_ref", VARIABLE) + .AddInputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .AddOutputDesc({2, 16, 2, 2}, FORMAT_NHWC, DT_FLOAT) + .Build(graph); + + std::string ref_var_src_var_name = "variable"; + ge::AttrUtils::SetStr(variable_ref_node->GetOpDesc(), REF_VAR_SRC_VAR_NAME, ref_var_src_var_name); + + ge::GraphUtils::AddEdge(apply_assign_node->GetOutDataAnchor(0), variable_ref_node->GetInDataAnchor(0)); + + ge::VariableRefDeleteOpPass pass_; + ge::Status status = pass_.Run(graph); + EXPECT_EQ(FAILED, status); +} diff --git a/tests/ut/ge/graph/transop_util_unittest.cc b/tests/ut/ge/graph/transop_util_unittest.cc new file mode 100644 index 00000000..9f645c22 --- /dev/null +++ b/tests/ut/ge/graph/transop_util_unittest.cc @@ -0,0 +1,74 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "graph/common/transop_util.h" + +#include "common/debug/log.h" +#include "common/types.h" +#include "common/util.h" +#include "compute_graph.h" + +using namespace ge; + +class UtestTransopUtil : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestTransopUtil, test_is_transop_true) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + OpDescPtr op_desc = std::make_shared("Cast", CAST); + NodePtr node = graph->AddNode(op_desc); + + bool ret = TransOpUtil::IsTransOp(node); + EXPECT_TRUE(ret); +} + +TEST_F(UtestTransopUtil, test_is_transop_fail) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + OpDescPtr op_desc = std::make_shared("relu", RELU); + NodePtr node = graph->AddNode(op_desc); + + bool ret = TransOpUtil::IsTransOp(node); + EXPECT_FALSE(ret); +} + +TEST_F(UtestTransopUtil, test_get_transop_get_index) { + ge::ComputeGraphPtr graph = std::make_shared("test"); + OpDescPtr transdata_op_desc = std::make_shared("Transdata", TRANSDATA); + OpDescPtr transpose_op_desc = std::make_shared("Transpose", TRANSPOSE); + OpDescPtr reshape_op_desc = std::make_shared("Reshape", RESHAPE); + OpDescPtr cast_op_desc = std::make_shared("Cast", CAST); + + NodePtr transdata_node = graph->AddNode(transdata_op_desc); + NodePtr transpose_node = graph->AddNode(transpose_op_desc); + NodePtr reshape_node = graph->AddNode(reshape_op_desc); + NodePtr cast_node = graph->AddNode(cast_op_desc); + + int index1 = TransOpUtil::GetTransOpDataIndex(transdata_node); + int index2 = TransOpUtil::GetTransOpDataIndex(transpose_node); + int index3 = TransOpUtil::GetTransOpDataIndex(reshape_node); + int index4 = TransOpUtil::GetTransOpDataIndex(cast_node); + + EXPECT_EQ(index1, 0); + EXPECT_EQ(index2, 0); + EXPECT_EQ(index3, 0); + EXPECT_EQ(index4, 0); +} diff --git a/tests/ut/ge/graph/variable_accelerate_ctrl_unittest.cc b/tests/ut/ge/graph/variable_accelerate_ctrl_unittest.cc new file mode 100644 index 00000000..37b4bda7 --- /dev/null +++ b/tests/ut/ge/graph/variable_accelerate_ctrl_unittest.cc @@ -0,0 +1,232 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "passes/graph_builder_utils.h" + +#define private public +#include "graph/manager/util/variable_accelerate_ctrl.h" +#undef private + +namespace ge { +class UtestVariableAccelerateCtrl : public testing::Test { + protected: + void SetUp() {} + void TearDown() {} +}; + +namespace { +/// netoutput1 +/// | +/// shapeNo1 +/// | +/// addnYes1 +/// / \ +/// / \ +/// const1 const2 + +ComputeGraphPtr BuildGraph1() { + auto builder = ut::GraphBuilder("test"); + auto const1 = builder.AddNode("const1", "CONSTANT", 0, 1); + auto const2 = builder.AddNode("const2", "CONSTANT", 0, 1); + auto addn1 = builder.AddNode("addn1", "AddNYes", 2, 1); + auto shape1 = builder.AddNode("shape1", "ShapeNo", 1, 1); + auto netoutput1 = builder.AddNode("netoutput", "NETOUTPUT", 1, 0); + + builder.AddDataEdge(const1, 0, addn1, 0); + builder.AddDataEdge(const2, 0, addn1, 1); + builder.AddDataEdge(addn1, 0, shape1, 0); + builder.AddDataEdge(shape1, 0, netoutput1, 0); + + return builder.GetGraph(); +} + +/// +/// netoutput1 +/// / \ \ +/// add1 assign1 \ +/// / \ / \ \ +/// var1 var2 const1 var3 + +ComputeGraphPtr BuildGraph2() { + auto builder = ut::GraphBuilder("test"); + auto var1 = builder.AddNode("var1", "Variable", 0, 1); + auto var2 = builder.AddNode("var2", "VariableV2", 0, 1); + auto var3 = builder.AddNode("var3", "VarHandleOp", 0, 1); + auto const1 = builder.AddNode("const1", "Const", 0, 1); + auto add1 = builder.AddNode("add1", "Add", 2, 1); + auto assign1 = builder.AddNode("assign1", "Assign", 2, 1); + auto netoutput1 = builder.AddNode("netoutput1", "Netoutput", 3, 0); + + builder.AddDataEdge(var1, 0, add1, 0); + builder.AddDataEdge(var2, 0, add1, 1); + builder.AddDataEdge(var2, 0, assign1, 1); + builder.AddDataEdge(var3, 0, netoutput1, 2); + builder.AddDataEdge(const1, 0, assign1, 0); + builder.AddDataEdge(add1, 0, netoutput1, 0); + builder.AddDataEdge(assign1, 0, netoutput1, 1); + + return builder.GetGraph(); +} + +} // namespace + +TEST_F(UtestVariableAccelerateCtrl, add_graph_null_ptr) { + VarAccelerateCtrl c; + c.AddGraph(1, nullptr); + EXPECT_TRUE(c.graph_ids_to_var_names_.empty()); +} + +TEST_F(UtestVariableAccelerateCtrl, add_graph_no_var) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph1()); + EXPECT_TRUE(c.graph_ids_to_var_names_.count(1) > 0); + EXPECT_TRUE(c.graph_ids_to_var_names_[1].empty()); +} + +TEST_F(UtestVariableAccelerateCtrl, add_graph_vars) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph2()); + EXPECT_TRUE(c.graph_ids_to_var_names_.count(1) > 0); + EXPECT_EQ(c.graph_ids_to_var_names_[1].size(), 3); + EXPECT_EQ(c.graph_ids_to_var_names_[1].count("var1"), 1); + EXPECT_EQ(c.graph_ids_to_var_names_[1].count("var2"), 1); + EXPECT_EQ(c.graph_ids_to_var_names_[1].count("var3"), 1); +} + +TEST_F(UtestVariableAccelerateCtrl, remove_graph_vars) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph2()); + EXPECT_FALSE(c.graph_ids_to_var_names_.empty()); + c.RemoveGraph(1); + EXPECT_TRUE(c.graph_ids_to_var_names_.empty()); +} + +TEST_F(UtestVariableAccelerateCtrl, graph_rebuild) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph2()); + EXPECT_FALSE(c.IsGraphNeedRebuild(1)); + c.SetVarChanged("var1"); + EXPECT_TRUE(c.IsGraphNeedRebuild(1)); +} + +TEST_F(UtestVariableAccelerateCtrl, graph_rebuild_multi_changed) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph2()); + EXPECT_FALSE(c.IsGraphNeedRebuild(1)); + c.SetVarChanged("var2"); + c.SetVarChanged("var3"); + EXPECT_TRUE(c.IsGraphNeedRebuild(1)); +} + +TEST_F(UtestVariableAccelerateCtrl, graph_rebuild_multi_graph) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph2()); + c.AddGraph(2, BuildGraph2()); + EXPECT_FALSE(c.IsGraphNeedRebuild(1)); + EXPECT_FALSE(c.IsGraphNeedRebuild(2)); + c.SetVarChanged("var1"); + EXPECT_TRUE(c.IsGraphNeedRebuild(1)); + EXPECT_TRUE(c.IsGraphNeedRebuild(2)); +} + +TEST_F(UtestVariableAccelerateCtrl, graph_rebuild_after_remove_graph) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph2()); + c.AddGraph(2, BuildGraph2()); + EXPECT_FALSE(c.IsGraphNeedRebuild(1)); + EXPECT_FALSE(c.IsGraphNeedRebuild(2)); + c.SetVarChanged("var1"); + EXPECT_TRUE(c.IsGraphNeedRebuild(1)); + EXPECT_TRUE(c.IsGraphNeedRebuild(2)); + c.RemoveGraph(2); + EXPECT_TRUE(c.IsGraphNeedRebuild(1)); + EXPECT_FALSE(c.IsGraphNeedRebuild(2)); +} + +TEST_F(UtestVariableAccelerateCtrl, graph_rebuild_after_build_end) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph2()); + c.AddGraph(2, BuildGraph2()); + EXPECT_FALSE(c.IsGraphNeedRebuild(1)); + EXPECT_FALSE(c.IsGraphNeedRebuild(2)); + c.SetVarChanged("var1"); + EXPECT_TRUE(c.IsGraphNeedRebuild(1)); + EXPECT_TRUE(c.IsGraphNeedRebuild(2)); + c.SetGraphBuildEnd(2); + EXPECT_TRUE(c.IsGraphNeedRebuild(1)); + EXPECT_FALSE(c.IsGraphNeedRebuild(2)); +} + +TEST_F(UtestVariableAccelerateCtrl, var_permit_to_change) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph2()); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var1")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var2")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var3")); + c.SetVarChanged("var1"); + EXPECT_FALSE(c.IsVarPermitToChangeFormats("var1")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var2")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var3")); +} + +TEST_F(UtestVariableAccelerateCtrl, var_permit_to_change_remove_graph_not_change) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph2()); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var1")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var2")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var3")); + c.SetVarChanged("var1"); + EXPECT_FALSE(c.IsVarPermitToChangeFormats("var1")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var2")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var3")); + c.RemoveGraph(1); + EXPECT_FALSE(c.IsVarPermitToChangeFormats("var1")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var2")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var3")); +} + +TEST_F(UtestVariableAccelerateCtrl, var_permit_to_change_excceds_the_max_num) { + VarAccelerateCtrl c; + c.AddGraph(1, BuildGraph2()); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var1")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var2")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var3")); + c.SetVarChanged("var1"); + c.SetVarChanged("var1"); + c.SetVarChanged("var1"); + c.SetVarChanged("var1"); + c.SetVarChanged("var1"); + c.SetVarChanged("var1"); + EXPECT_FALSE(c.IsVarPermitToChangeFormats("var1")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var2")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var3")); +} + +TEST_F(UtestVariableAccelerateCtrl, var_changed_before_add_graph) { + VarAccelerateCtrl c; + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var1")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var2")); + EXPECT_TRUE(c.IsVarPermitToChangeFormats("var3")); + c.SetVarChanged("var1"); + EXPECT_FALSE(c.IsVarPermitToChangeFormats("var1")); + c.AddGraph(1, BuildGraph2()); + EXPECT_FALSE(c.IsVarPermitToChangeFormats("var1")); + // graph no need to set again + EXPECT_FALSE(c.IsGraphNeedRebuild(1)); +} +} // namespace ge \ No newline at end of file diff --git a/tests/ut/ge/graph_ir/ge_operator_factory_unittest.cc b/tests/ut/ge/graph_ir/ge_operator_factory_unittest.cc new file mode 100644 index 00000000..64f76515 --- /dev/null +++ b/tests/ut/ge/graph_ir/ge_operator_factory_unittest.cc @@ -0,0 +1,95 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "../graph/ops_stub.h" +#include "operator_factory.h" + +#define protected public +#define private public +#include "operator_factory_impl.h" +#undef private +#undef protected + +using namespace ge; +class UtestGeOperatorFactory : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST(UtestGeOperatorFactory, create_operator) { + Operator acosh = OperatorFactory::CreateOperator("acosh", "Acosh"); + EXPECT_EQ("Acosh", acosh.GetOpType()); + EXPECT_EQ("acosh", acosh.GetName()); + EXPECT_EQ(false, acosh.IsEmpty()); +} + +TEST(UtestGeOperatorFactory, create_operator_nullptr) { + Operator abc = OperatorFactory::CreateOperator("abc", "ABC"); + EXPECT_EQ(true, abc.IsEmpty()); +} + +TEST(UtestGeOperatorFactory, get_infer_shape_func) { + OperatorFactoryImpl::RegisterInferShapeFunc("test", nullptr); + InferShapeFunc infer_shape_func = OperatorFactoryImpl::GetInferShapeFunc("ABC"); + EXPECT_EQ(nullptr, infer_shape_func); +} + +TEST(UtestGeOperatorFactory, get_verify_func) { + OperatorFactoryImpl::RegisterVerifyFunc("test", nullptr); + VerifyFunc verify_func = OperatorFactoryImpl::GetVerifyFunc("ABC"); + EXPECT_EQ(nullptr, verify_func); +} + +TEST(UtestGeOperatorFactory, get_ops_type_list) { + std::vector all_ops; + graphStatus status = OperatorFactory::GetOpsTypeList(all_ops); + EXPECT_NE(0, all_ops.size()); + EXPECT_EQ(GRAPH_SUCCESS, status); +} + +TEST(UtestGeOperatorFactory, is_exist_op) { + graphStatus status = OperatorFactory::IsExistOp("Acosh"); + EXPECT_EQ(true, status); + status = OperatorFactory::IsExistOp("ABC"); + EXPECT_EQ(false, status); +} + +TEST(UtestGeOperatorFactory, register_func) { + OperatorFactoryImpl::RegisterInferShapeFunc("test", nullptr); + graphStatus status = OperatorFactoryImpl::RegisterInferShapeFunc("test", nullptr); + EXPECT_EQ(GRAPH_FAILED, status); + status = OperatorFactoryImpl::RegisterInferShapeFunc("ABC", nullptr); + EXPECT_EQ(GRAPH_SUCCESS, status); + + OperatorFactoryImpl::RegisterVerifyFunc("test", nullptr); + status = OperatorFactoryImpl::RegisterVerifyFunc("test", nullptr); + EXPECT_EQ(GRAPH_FAILED, status); + status = OperatorFactoryImpl::RegisterVerifyFunc("ABC", nullptr); + EXPECT_EQ(GRAPH_SUCCESS, status); +} + +TEST(UtestGeOperatorFactory, get_ops_type_list_fail) { + auto operator_creators_temp = OperatorFactoryImpl::operator_creators_; + OperatorFactoryImpl::operator_creators_ = nullptr; + std::vector all_ops; + graphStatus status = OperatorFactoryImpl::GetOpsTypeList(all_ops); + EXPECT_EQ(GRAPH_FAILED, status); + OperatorFactoryImpl::operator_creators_ = operator_creators_temp; +} \ No newline at end of file diff --git a/tests/ut/ge/plugin_manager/ge_util_unittest.cc b/tests/ut/ge/plugin_manager/ge_util_unittest.cc new file mode 100644 index 00000000..f02dbc2d --- /dev/null +++ b/tests/ut/ge/plugin_manager/ge_util_unittest.cc @@ -0,0 +1,48 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/ge/ge_util.h" + +using namespace ge; +using namespace std; + +class UtestGeUtil : public testing::Test { + protected: + void SetUp() override {} + + void TearDown() override {} +}; + +class Foo { + public: + int i = 0; + + Foo(int x) { i = x; } + + GE_DELETE_ASSIGN_AND_COPY(Foo); +}; + +TEST_F(UtestGeUtil, delete_assign_and_copy) { + Foo f(1); + ASSERT_EQ(f.i, 1); +} + +TEST_F(UtestGeUtil, make_shared) { + auto f = MakeShared(1); + ASSERT_EQ(f->i, 1); +} diff --git a/tests/ut/ge/profiling/ge_profiling_manager_unittest.cc b/tests/ut/ge/profiling/ge_profiling_manager_unittest.cc new file mode 100644 index 00000000..bc11b4aa --- /dev/null +++ b/tests/ut/ge/profiling/ge_profiling_manager_unittest.cc @@ -0,0 +1,164 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include +#include +#include +#include +#include + +#define protected public +#define private public +#include "common/profiling/profiling_manager.h" +#undef protected +#undef private + +using namespace ge; +using namespace std; + +class UtestGeProfilinganager : public testing::Test { + protected: + void SetUp() override {} + + void TearDown() override {} +}; + +class TestReporter : public Msprof::Engine::Reporter { + public: + TestReporter() {} + ~TestReporter() {} + + public: + int Report(const Msprof::Engine::ReporterData *data) { return 0; } + + int Flush() { return 0; } +}; + +class TestPluginIntf : public Msprof::Engine::PluginIntf { + public: + TestPluginIntf() {} + ~TestPluginIntf() {} + + public: + int Init(const Msprof::Engine::Reporter *reporter) { return 0; } + + int UnInit() { return 0; } +}; + +TEST_F(UtestGeProfilinganager, init_success) { + setenv("PROFILING_MODE", "true", true); + Options options; + options.device_id = 0; + options.job_id = 0; + string profiling_config; + + ProfilingManager::Instance().SetProfilingConfig(profiling_config); + + Status ret = ProfilingManager::Instance().Init(options); + EXPECT_EQ(ret, ge::SUCCESS); +} + +TEST_F(UtestGeProfilinganager, start_profiling_success) { + int32_t iter_num = 1; + + setenv("PROFILING_MODE", "true", true); + setenv("PROFILING_OPTIONS", "training_trace", true); + Options options; + string profiling_config; + + ProfilingManager::Instance().SetProfilingConfig(profiling_config); + + Status ret = ProfilingManager::Instance().Init(options); + EXPECT_EQ(ret, ge::SUCCESS); + ret = ProfilingManager::Instance().StartProfiling(iter_num); + EXPECT_EQ(ret, ge::SUCCESS); + + setenv("PROFILING_OPTIONS", "op_trance", true); + ret = ProfilingManager::Instance().Init(options); + EXPECT_EQ(ret, ge::SUCCESS); + ret = ProfilingManager::Instance().StartProfiling(iter_num); + EXPECT_EQ(ret, ge::SUCCESS); +} + +TEST_F(UtestGeProfilinganager, stop_profiling_success) { + int32_t iter_num = 1; + Options options; + + TestReporter test_reporter; + + string profiling_config; + ProfilingManager::Instance().SetProfilingConfig(profiling_config); + + Status ret = 0; + setenv("PROFILING_OPTIONS", "op_trance", true); + ret = ProfilingManager::Instance().Init(options); + EXPECT_EQ(ret, ge::SUCCESS); + ret = ProfilingManager::Instance().StartProfiling(iter_num); + EXPECT_EQ(ret, ge::SUCCESS); + ProfilingManager::Instance().StopProfiling(); +} + +TEST_F(UtestGeProfilinganager, report_profiling_data_success) { + map op_task_id_map; + op_task_id_map[0] = "conv"; + op_task_id_map.insert(pair(1, "mul")); + ProfilingManager::Instance().ReportProfilingData(op_task_id_map); +} + +TEST_F(UtestGeProfilinganager, plugin_impl_success) { + PluginImpl plugin_Impl("FMK"); + TestReporter test_reporter; + Msprof::Engine::Reporter *reporter_ptr = &test_reporter; + plugin_Impl.Init(reporter_ptr); + plugin_Impl.UnInit(); +} + +TEST_F(UtestGeProfilinganager, profiling_engine_impl_success) { + ProfilingEngineImpl profiling_engine_impl; + + Msprof::Engine::PluginIntf *plugin_ptr = new TestPluginIntf(); + profiling_engine_impl.ReleasePlugin(plugin_ptr); + + Msprof::Engine::PluginIntf *ptr = profiling_engine_impl.CreatePlugin(); + delete ptr; + ptr = nullptr; +} + +TEST_F(UtestGeProfilinganager, set_profilng_cfg_success) { + string profiling_config = "profiling_mode: true"; + ProfilingManager::Instance().SetProfilingConfig(profiling_config); +} + +TEST_F(UtestGeProfilinganager, init_from_cfg_success0) { + Options options; + string profiling_config = + "{\"startCfg\":[{\"deviceID\":\"0\",\"features\":[{\"name\":\"op_trace\",\"conf\":\"2\"}]}]}"; + ProfilingManager::Instance().SetProfilingConfig(profiling_config); + + Status ret = ProfilingManager::Instance().Init(options); + EXPECT_EQ(ret, ge::SUCCESS); +} + +TEST_F(UtestGeProfilinganager, init_from_cfg_success1) { + Options options; + string profiling_config = + "{\"startCfg\":[{\"deviceID\":\"0\",\"features\":[{\"name\":\"test_trace\"}],\"jobID\":\"1231231231\"}]}"; + ProfilingManager::Instance().SetProfilingConfig(profiling_config); + + Status ret = ProfilingManager::Instance().Init(options); + EXPECT_EQ(ret, ge::SUCCESS); +} diff --git a/tests/ut/ge/single_op/single_op_manager_unittest.cc b/tests/ut/ge/single_op/single_op_manager_unittest.cc new file mode 100644 index 00000000..3664d037 --- /dev/null +++ b/tests/ut/ge/single_op/single_op_manager_unittest.cc @@ -0,0 +1,95 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "cce/taskdown_common.hpp" +#include "runtime/rt.h" + +#define protected public +#define private public +#include "single_op/single_op_manager.h" +#undef private +#undef protected + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestSingleOpManager : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestSingleOpManager, test_get_resource) { + uintptr_t resource_id = 0x1; + auto &instance = SingleOpManager::GetInstance(); + ASSERT_EQ(instance.TryGetResource(resource_id), nullptr); + ASSERT_NE(instance.GetResource(resource_id), nullptr); +} + +TEST_F(UtestSingleOpManager, test_get_op_from_model) { + auto stream = (rtStream_t)0x1; + uintptr_t resource_id = 0x1; + auto &instance = SingleOpManager::GetInstance(); + + SingleOp *single_op = nullptr; + ModelData model_data; + string model_str = "123456789"; + model_data.model_data = (void *)model_str.c_str(); + model_data.model_len = model_str.size(); + + ASSERT_EQ(instance.GetOpFromModel("model", model_data, stream, &single_op), FAILED); + ASSERT_EQ(instance.GetResource(resource_id)->GetOperator(model_data.model_data), nullptr); +} + +TEST_F(UtestSingleOpManager, test_relesase_resource) { + auto stream = (rtStream_t)0x99; + auto &instance = SingleOpManager::GetInstance(); + + ASSERT_EQ(instance.ReleaseResource(stream), SUCCESS); + instance.GetResource(0x99); + ASSERT_EQ(instance.ReleaseResource(stream), SUCCESS); +} + +TEST_F(UtestSingleOpManager, test_get_op_from_model_with_null_stream) { + void *stream = nullptr; + + SingleOp *single_op = nullptr; + ModelData model_data; + string model_str = "123456789"; + model_data.model_data = (void *)model_str.c_str(); + model_data.model_len = model_str.size(); + auto &instance = SingleOpManager::GetInstance(); + + ASSERT_EQ(instance.GetOpFromModel("model", model_data, stream, &single_op), FAILED); +} + +TEST_F(UtestSingleOpManager, get_resource_failed) { + auto stream = (rtStream_t)0x1; + + SingleOp *single_op = nullptr; + ModelData model_data; + string model_str = "123456789"; + model_data.model_data = (void *)model_str.c_str(); + model_data.model_len = model_str.size(); + auto &instance = SingleOpManager::GetInstance(); + + ASSERT_EQ(instance.GetOpFromModel("model", model_data, stream, &single_op), FAILED); +} \ No newline at end of file diff --git a/tests/ut/ge/single_op/single_op_model_unittest.cc b/tests/ut/ge/single_op/single_op_model_unittest.cc new file mode 100644 index 00000000..ad589029 --- /dev/null +++ b/tests/ut/ge/single_op/single_op_model_unittest.cc @@ -0,0 +1,162 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "cce/taskdown_common.hpp" +#include "graph/load/new_model_manager/model_utils.h" +#include "graph/utils/graph_utils.h" +#include "runtime/rt.h" + +#define protected public +#define private public +#include "single_op/single_op_model.h" +#include "single_op/task/tbe_task_builder.h" +#undef private +#undef protected + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestSingleOpModel : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} +}; + +TEST_F(UtestSingleOpModel, test_init_model) { + string model_data_str = "123456789"; + SingleOpModel model("model", model_data_str.c_str(), model_data_str.size()); + ASSERT_EQ(model.InitModel(), FAILED); +} + +void ParseOpModelParamsMock(ModelHelper &model_helper, SingleOpModelParam ¶m) {} + +TEST_F(UtestSingleOpModel, test_parse_input_node) { + string model_data_str = "123456789"; + SingleOpModel model("model", model_data_str.c_str(), model_data_str.size()); + auto op_desc = make_shared("Data", "Data"); + + ASSERT_EQ(model.ParseInputNode(op_desc), PARAM_INVALID); + + vector shape{1, 2, 3, 4}; + vector offsets{16}; + GeShape ge_shape(shape); + GeTensorDesc desc(ge_shape); + op_desc->AddOutputDesc(desc); + op_desc->SetOutputOffset(offsets); + ASSERT_EQ(model.ParseInputNode(op_desc), SUCCESS); + + op_desc->AddOutputDesc(desc); + offsets.push_back(32); + op_desc->SetOutputOffset(offsets); + ASSERT_EQ(model.ParseInputNode(op_desc), PARAM_INVALID); +} + +TEST_F(UtestSingleOpModel, test_parse_output_node) { + string model_data_str = "123456789"; + SingleOpModel model("model", model_data_str.c_str(), model_data_str.size()); + auto op_desc = make_shared("NetOutput", "NetOutput"); + + vector shape{1, 2, 3, 4}; + vector offsets{16}; + + GeShape ge_shape(shape); + GeTensorDesc desc(ge_shape); + op_desc->AddInputDesc(desc); + op_desc->SetInputOffset(offsets); + op_desc->AddOutputDesc(desc); + op_desc->SetOutputOffset(offsets); + + ASSERT_NO_THROW(model.ParseOutputNode(op_desc)); + ASSERT_NO_THROW(model.ParseOutputNode(op_desc)); +} + +TEST_F(UtestSingleOpModel, test_set_inputs_and_outputs) { + string model_data_str = "123456789"; + SingleOpModel model("model", model_data_str.c_str(), model_data_str.size()); + model.input_offset_list_.push_back(0); + model.input_sizes_.push_back(16); + + model.output_offset_list_.push_back(0); + model.output_sizes_.push_back(16); + + SingleOp single_op; + + ASSERT_EQ(model.SetInputsAndOutputs(single_op), SUCCESS); +} + +TEST_F(UtestSingleOpModel, test_build_kernel_task) { + string model_data_str = "123456789"; + SingleOpModel model("model", model_data_str.c_str(), model_data_str.size()); + model.input_offset_list_.push_back(0); + model.input_sizes_.push_back(16); + + model.output_offset_list_.push_back(0); + model.output_sizes_.push_back(16); + + auto op_desc = make_shared("AddN", "AddN"); + vector shape{16, 16}; + GeShape ge_shape(shape); + GeTensorDesc desc(ge_shape); + op_desc->AddInputDesc(desc); + op_desc->AddOutputDesc(desc); + + SingleOp single_op; + domi::KernelDef kernel_def; + kernel_def.mutable_context()->set_kernel_type(cce::ccKernelType::CCE_AI_CORE); + OpTask *task = nullptr; + ASSERT_EQ(model.BuildKernelTask(kernel_def, single_op, &task), UNSUPPORTED); + + kernel_def.mutable_context()->set_kernel_type(cce::ccKernelType::TE); + ASSERT_EQ(model.BuildKernelTask(kernel_def, single_op, &task), INTERNAL_ERROR); + + model.op_list_[0] = op_desc; + + ASSERT_EQ(model.BuildKernelTask(kernel_def, single_op, &task), PARAM_INVALID); + ASSERT_EQ(task, nullptr); + delete task; +} + +TEST_F(UtestSingleOpModel, test_init) { + string model_data_str = "123456789"; + SingleOpModel op_model("model", model_data_str.c_str(), model_data_str.size()); + ASSERT_EQ(op_model.Init(), FAILED); +} + +TEST_F(UtestSingleOpModel, test_parse_arg_table) { + string model_data_str = "123456789"; + SingleOpModel op_model("model", model_data_str.c_str(), model_data_str.size()); + + TbeOpTask task; + SingleOp op; + op.arg_table_.resize(2); + + auto *args = new uintptr_t[2]; + args[0] = 0x100000; + args[1] = 0x200000; + task.SetKernelArgs(args, 16, 1); + + op_model.model_params_.addr_mapping_[0x100000] = 1; + op_model.ParseArgTable(&task, op); + + ASSERT_EQ(op.arg_table_[0].size(), 0); + ASSERT_EQ(op.arg_table_[1].size(), 1); + ASSERT_EQ(op.arg_table_[1].front(), &args[0]); +} diff --git a/tests/ut/ge/single_op/stream_resource_unittest.cc b/tests/ut/ge/single_op/stream_resource_unittest.cc new file mode 100644 index 00000000..88ec25c2 --- /dev/null +++ b/tests/ut/ge/single_op/stream_resource_unittest.cc @@ -0,0 +1,77 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include +#include + +#include "runtime/rt.h" + +#define protected public +#define private public +#include "single_op/stream_resource.h" +#undef private +#undef protected + +using namespace std; +using namespace testing; +using namespace ge; + +class UtestStreamResource : public testing::Test { + protected: + void SetUp() {} + + void TearDown() {} + + rtStream_t stream; +}; + +TEST_F(UtestStreamResource, test_cache_op) { + StreamResource res; + auto *op = new SingleOp(); + string stub_name = "stubFunc"; + const void *key = stub_name.c_str(); + ASSERT_EQ(res.GetOperator(key), nullptr); + res.CacheOperator(key, op); + ASSERT_NE(res.GetOperator(key), nullptr); +} + +TEST_F(UtestStreamResource, test_malloc_memory) { + StreamResource res; + + ASSERT_NE(res.MallocMemory(100), nullptr); + ASSERT_NE(res.MallocMemory(100), nullptr); + ASSERT_NE(res.MallocMemory(100), nullptr); +} + +TEST_F(UtestStreamResource, test_do_malloc_memory) { + size_t max_allocated = 0; + vector allocated; + + uint8_t *ret = StreamResource::DoMallocMemory(100, max_allocated, allocated); + ASSERT_EQ(allocated.size(), 1); + ASSERT_NE(allocated.back(), nullptr); + ASSERT_EQ(max_allocated, 100); + + StreamResource::DoMallocMemory(50, max_allocated, allocated); + StreamResource::DoMallocMemory(99, max_allocated, allocated); + StreamResource::DoMallocMemory(100, max_allocated, allocated); + ASSERT_EQ(allocated.size(), 1); + ASSERT_EQ(max_allocated, 100); + + StreamResource::DoMallocMemory(101, max_allocated, allocated); + ASSERT_EQ(allocated.size(), 2); + ASSERT_EQ(max_allocated, 101); +} diff --git a/tests/ut/ge/test.cc b/tests/ut/ge/test.cc new file mode 100644 index 00000000..50a5a113 --- /dev/null +++ b/tests/ut/ge/test.cc @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include + +#include "common/debug/log.h" + +using namespace std; + +int main(int argc, char **argv) { + // init the logging + testing::InitGoogleTest(&argc, argv); + int ret = RUN_ALL_TESTS(); + + printf("finish ge ut\n"); + + return ret; +} diff --git a/tests/ut/runtest.sh b/tests/ut/runtest.sh new file mode 100644 index 00000000..ced742bb --- /dev/null +++ b/tests/ut/runtest.sh @@ -0,0 +1,30 @@ +#!/bin/bash +# Copyright 2019-2020 Huawei Technologies Co., Ltd +# +# Licensed under the Apache License, Version 2.0 (the "License"); +# you may not use this file except in compliance with the License. +# You may obtain a copy of the License at +# +# http://www.apache.org/licenses/LICENSE-2.0 +# +# Unless required by applicable law or agreed to in writing, software +# distributed under the License is distributed on an "AS IS" BASIS, +# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. +# See the License for the specific language governing permissions and +# limitations under the License. +# ============================================================================ + +set -e +BASEPATH=$(cd "$(dirname $0)"; pwd) +BUILD_PATH=$BASEPATH/../../build +OUTPUT_PATH=$BASEPATH/../../output + +echo $BUILD_PATH + +export LD_LIBRARY_PATH=/usr/local/HiAI/driver/lib64:/usr/local/HiAI/runtime/lib64:${BUILD_PATH}/graphengine/:${D_LINK_PATH}/x86_64/:${LD_LIBRARY_PATH} +echo ${LD_LIBRARY_PATH} +${OUTPUT_PATH}/ut_libgraph && +${OUTPUT_PATH}/ut_libge_multiparts_utest && +${OUTPUT_PATH}/ut_libge_distinct_load_utest && +${OUTPUT_PATH}/ut_libge_others_utest && +${OUTPUT_PATH}/ut_libge_kernel_utest diff --git a/third_party/fwkacllib/inc/aicpu/common/aicpu_task_struct.h b/third_party/fwkacllib/inc/aicpu/common/aicpu_task_struct.h new file mode 100644 index 00000000..caafd600 --- /dev/null +++ b/third_party/fwkacllib/inc/aicpu/common/aicpu_task_struct.h @@ -0,0 +1,30 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __AICPU_TASK_STRUCT_H__ +#define __AICPU_TASK_STRUCT_H__ + +#include + +namespace aicpu { + +struct AicpuParamHead { + uint32_t length; // Total length: include cunstom message + uint32_t ioAddrNum; // Input and output address number +} __attribute__((packed)); + +} // end namespace aicpu +#endif //__AICPU_TASK_STRUCT_H__ diff --git a/third_party/fwkacllib/inc/cce/aicpu_engine.h b/third_party/fwkacllib/inc/cce/aicpu_engine.h new file mode 100644 index 00000000..f561dc72 --- /dev/null +++ b/third_party/fwkacllib/inc/cce/aicpu_engine.h @@ -0,0 +1,49 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __AICPU_ENGINE_H__ +#define __AICPU_ENGINE_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum { + AE_STATUS_SUCCESS = 0, + AE_STATUS_BAD_PARAM = 1, + AE_STATUS_OPEN_SO_FAILED = 2, + AE_STATUS_GET_KERNEL_NAME_FAILED = 3, + AE_STATUS_INNER_ERROR = 4, + AE_STATUS_KERNEL_API_INNER_ERROR = 5, + AE_STATUS_END_OF_SEQUENCE = 6, + AE_STATUS_DUMP_FAILED = 7, + AE_STATUS_RESERVED +} aeStatus_t; + +/** + * @ingroup aicpu engine + * @brief aeCallInterface: + * a interface to call a function in a op kernfel lib + * @param [in] addr void *, should be STR_KERNEL * format + * @return aeStatus_t + */ +aeStatus_t aeCallInterface(void *addr); + +#ifdef __cplusplus +} +#endif + +#endif diff --git a/third_party/fwkacllib/inc/cce/aicpu_engine_struct.h b/third_party/fwkacllib/inc/cce/aicpu_engine_struct.h new file mode 100644 index 00000000..b6dd1127 --- /dev/null +++ b/third_party/fwkacllib/inc/cce/aicpu_engine_struct.h @@ -0,0 +1,46 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __AICPU_ENGINE_STRUCT_H__ +#define __AICPU_ENGINE_STRUCT_H__ + +#include "fwk_adpt_struct.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/* + The different framwork we adapted for. +*/ +typedef enum { + FMK_KERNEL_TYPE_T = 0, + FMK_KERNEL_TYPE_C = 10, + FMK_KERNEL_TYPE_P = 20, + FMK_KERNEL_TYPE_RESERVED +} FwkkernelType_t; + +typedef struct { + uint32_t fwkKernelType; // FwkkernelType_t + union { + ::aicpu::FWKAdapter::FWKOperateParam fwk_kernel; + } fwkKernelBase; +} __attribute__((packed)) STR_FWK_OP_KERNEL; + +#ifdef __cplusplus +} +#endif +#endif diff --git a/third_party/fwkacllib/inc/cce/blas_struct.h b/third_party/fwkacllib/inc/cce/blas_struct.h new file mode 100644 index 00000000..b490c30d --- /dev/null +++ b/third_party/fwkacllib/inc/cce/blas_struct.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CC_BLAS_STRUCT_API__ +#define __CC_BLAS_STRUCT_API__ + +#include + +typedef enum { CCBLAS_FILL_MODE_LOWER = 0, CCBLAS_FILL_MODE_UPPER = 1 } ccblasFillMode_t; + +typedef enum { + CCBLAS_OP_N = 0, + CCBLAS_OP_T = 1, +} ccblasOperation_t; + +typedef enum { CCBLAS_DIAG_NON_UNIT = 0, CCBLAS_DIAG_UNIT = 1 } ccblasDiagType_t; + +#endif /*__CC_BLAS_STRUCT_API__*/ diff --git a/third_party/fwkacllib/inc/cce/cce.h b/third_party/fwkacllib/inc/cce/cce.h new file mode 100644 index 00000000..4953b3ec --- /dev/null +++ b/third_party/fwkacllib/inc/cce/cce.h @@ -0,0 +1,101 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_H__ +#define __CCE_H__ + +#include +#include "cce_def.hpp" + +namespace cce { + +/** + * @ingroup cce + * @brief create cc handler + * @param [in|out] handle point of cc handler + * @return ccStatus_t + */ +ccStatus_t ccCreate(ccHandle_t *handle); + +/** + * @ingroup cce + * @brief destroy cc handler + * @param [in] *handle cc handler + * @return ccStatus_t + */ +ccStatus_t ccDestroy(ccHandle_t *handle); + +/** + * @ingroup cce + * @brief bind stream with specified cc handler + * @param [in] handle cc handler + * @param [in] streamId stream + * @return ccStatus_t + */ +ccStatus_t ccSetStream(ccHandle_t handle, rtStream_t streamId); + +/** + * @ingroup cce + * @brief get the stream from cc handler + * @param [in] handle cc handler + * @param [in|out] streamId point of stream + * @return ccStatus_t + */ +ccStatus_t ccGetStream(ccHandle_t handle, rtStream_t *streamId); + +/** + * @ingroup cce + * @brief get the stream from cc handler + * @param [in] dataTypeTransMode mode of data type transform + * @param [in] inputData input data point + * @param [in] inputDataSize input data size + * @param [in|out] outputData output data point + * @param [in] outputDataSize output data size + * @return ccStatus_t + */ +ccStatus_t ccTransDataType(ccDataTypeTransMode_t dataTypeTransMode, const void *inputData, uint32_t inputDataSize, + void *outputData, const uint32_t outputDataSize); +/** + * @ingroup cce + * @brief cce sys init func + */ +void cceSysInit(); + +/** + * @ingroup cce + * @brief cce Log Start up func + */ +void cceLogStartup(); + +/** + * @ingroup cce + * @brief cce Log Shut down func + */ +void cceLogShutdown(); + +/** + * @ingroup cce + * @brief set the profiling on or off + * @param [in] const unsigned char* target: The engine gets it from ENV. Don't need care about it. + * @param const char* job_ctx: identifies profiling job + * @param [in] uint32_t flag: value: 0, on ; 1, off. + * @return ccStatus_t value: 0, success; 1, fail. + */ +ccStatus_t CceProfilingConfig(const char *target, const char *job_ctx, uint32_t flag); + +}; // namespace cce + +#endif /* __CCE_H__ */ diff --git a/third_party/fwkacllib/inc/cce/cce_def.hpp b/third_party/fwkacllib/inc/cce/cce_def.hpp new file mode 100644 index 00000000..4dded1d7 --- /dev/null +++ b/third_party/fwkacllib/inc/cce/cce_def.hpp @@ -0,0 +1,152 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_DEF_H__ +#define __CCE_DEF_H__ + +#include "runtime/rt.h" + +namespace cce { + +/** + * @ingroup cce + * @brief memory configure for fusion + */ +typedef struct TagCceFusionMemCfg { + uint64_t memAddr; /**< memAddr */ + uint32_t memSize; /**< memSize */ + uint32_t addrChangeFlag; /**< op data addr change flag. value:0,valid;1,not valid */ + uint32_t poolFlag; /**< mempool flag : value:0,is valid; value: 1, not valid */ + TagCceFusionMemCfg() { + memAddr = 0; + memSize = 0; + addrChangeFlag = 0; + poolFlag = 0; + } +} CceFusionMemCfg_t; +/** + * @ingroup cce + * @brief return value + */ +typedef enum tagCcStatus { + CC_STATUS_SUCCESS = 0, /**< succ */ + CC_STATUS_NOT_INITIALIZED = 1, /**< not init */ + CC_STATUS_ALLOC_FAILED = 2, /**< alloc mem failed */ + CC_STATUS_BAD_PARAM = 3, /**< para check failed */ + CC_STATUS_INTERNAL_ERROR = 4, /**< internal error */ + CC_STATUS_KERNEL_ERROR = 5, /**< kernel error */ + CC_STATUS_RUNTIME_ERROR = 6, /**< runtime error */ + CC_STATUS_NOT_SUPPORTED = 7, /**< unsupport error */ + CC_STATUS_INVALID_VALUE = 7, /**< invalid value error for blas*/ + CC_STATUS_RESERVED /**< just for check */ +} ccStatus_t; + +/** + * @ingroup cce + * @brief original data type + */ +typedef enum tagCcDataType { + CC_DATA_FLOAT = 0, /**< float type */ + CC_DATA_HALF, /**< fp16 type */ + CC_DATA_INT8, /**< int8 type */ + CC_DATA_INT32, /**< int32 type */ + CC_DATA_UINT8, /**< uint8 type */ + CC_DATA_HALF_UINT16_PROPOSAL, /** +#include + +#define ERROR_CODE() __catch_error_code +#define ERROR_LINE_NO() __catch_error_line_no +#define ERROR_PROC() __catch_error_line_no = __LINE__; + +#define PROC \ + uint32_t __catch_error_code = 0x7FFFFFCC; \ + uint32_t __catch_error_line_no = 0xFFFFFFFF; \ + { +#define END_PROC \ + } \ + __tabErrorCode: +#define THROW(errcode) \ + { \ + __catch_error_code = (errcode); \ + ERROR_PROC(); \ + goto __tabErrorCode; \ + } +#define EXEC(func) \ + { \ + if (0 != (__catch_error_code = (func))) THROW(__catch_error_code) \ + } +#define EXEC_EX1(func, error_code) \ + { \ + if (0 != (func)) THROW(error_code) \ + } +#define EXEC_EX(func, succRet, error_code) \ + { \ + if (succRet != (__catch_error_code = (func))) THROW(error_code) \ + } +#define ASSERT_EXEC(func, succRet) \ + { \ + if (succRet != (__catch_error_code = (func))) /*GO_ASSERT_FALSE();*/ \ + THROW(__catch_error_code) \ + } \ + } +#define NEW_ERROR_EXEC(errcode, func, succRet) \ + { \ + if (succRet != (func)) { \ + THROW(errcode) \ + } \ + } +#define JUDGE(errcode, expr) \ + { \ + if (!(expr)) { \ + THROW(errcode) \ + } \ + } +#define ASSERT_JUDGE(errcode, expr) \ + { \ + if (!(expr)) { /*GO_ASSERT_FALSE();*/ \ + THROW(errcode) \ + } \ + } +#define JUDGE_FALSE(errcode, expr) \ + { \ + if (expr) { \ + THROW(errcode) \ + } \ + } +#define JUDGE_CONTINUE(expr) \ + { \ + if (expr) { \ + continue; \ + } \ + } +#define CATCH_ERROR(errcode) if (__catch_error_code == (errcode)) { // ERROR_LOG(); +#define CATCH_ALL_ERROR { +#define END_CATCH_ERROR } +#define FINAL \ + __tabFinal: +#define END_FINAL /*GO_ASSERT_FALSE()*/ ; +#define GOTO_FINAL() goto __tabFinal; +#endif /* __ATTRINFO_MAP_HPP__ */ diff --git a/third_party/fwkacllib/inc/cce/compiler_stub.h b/third_party/fwkacllib/inc/cce/compiler_stub.h new file mode 100644 index 00000000..a380d6b6 --- /dev/null +++ b/third_party/fwkacllib/inc/cce/compiler_stub.h @@ -0,0 +1,36 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __COMPILER_STUB_H__ +#define __COMPILER_STUB_H__ + +namespace cce { + +/** + * @ingroup cce + * @brief compiler stub init func + */ +bool compilerStubInit(); + +/** + * @ingroup cce + * @brief compiler stub free func + */ +bool compilerStubFree(); + +}; // namespace cce + +#endif /* __COMPILER_STUB_H__ */ diff --git a/third_party/fwkacllib/inc/cce/customize.h b/third_party/fwkacllib/inc/cce/customize.h new file mode 100644 index 00000000..6766d987 --- /dev/null +++ b/third_party/fwkacllib/inc/cce/customize.h @@ -0,0 +1,59 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CC_CUSTOMIZE_API__ +#define __CC_CUSTOMIZE_API__ +#include + +#define CC_DEVICE_DIM_MAX 8 +typedef enum tagOpTensorFormat +{ + OP_TENSOR_FORMAT_NC1HWC0 = 0, + OP_TENSOR_FORMAT_ND, + OP_TENSOR_FORMAT_RESERVED, + +} opTensorFormat_t; + + +typedef enum tagOpDataType +{ + OP_DATA_FLOAT = 0, /**< float type */ + OP_DATA_HALF, /**< fp16 type */ + OP_DATA_INT8, /**< int8 type */ + OP_DATA_INT32, /**< int32 type */ + OP_DATA_UINT8, /**< uint8 type */ + OP_DATA_HALF_UINT16_PROPOSAL, /**dimCnt, xDesc->dimCnt) + * @param [in] num the number of outputs + * @param [in] beta scaling factors + * @param [in] yDescArr descriptors of output tensors + * @param [in|out] yArr output data array in device memory + * @return ccStatus_t + */ +ccStatus_t ccSplitForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + int32_t axis, uint32_t num, const void *beta, const ccTensorDescriptor_t yDescArr[], + void *yArr[]); + +/** + * @ingroup dnn + * @brief get the output dimensions info of split + * @param [in] xDesc descriptor of input tensor + * @param [in] axis the dimension along which to split. Must be in the range [-xDesc->dimCnt, xDesc->dimCnt) + * @param [in] num the number of outputs + * @param [in] sizes Optional, used to specify the sizes of each output tensor along split dim. The tensor x would + * be split evenly along split dim if sizes is NULL + * @param [in|out] nArr point to the first element of batch sizes + * @param [in|out] cArr point to the first element of channels + * @param [in|out] hArr point to the first element of heights of feature map + * @param [in|out] wArr point to the first element of widths of feature map + * @return ccStatus_t + */ +ccStatus_t ccGetSplitForwardOutputDim(const ccTensorDescriptor_t xDesc, int32_t axis, uint32_t num, + const uint32_t sizes[], uint32_t nArr[], uint32_t cArr[], uint32_t hArr[], + uint32_t wArr[]); + +/** + * @ingroup dnn + * @brief Get split output shape(s). + * @param [in] xDesc input tensor, support ND and NC1HWC0 + * @param [in] axis split axis, negtive axis will increased by dimCnt once time. + * @param [in] num splited nums. + * @param [in] sizes splited dim size on axis. if NULL was set, The input will be divided into num equally. + * @param [output] dimCnt splited dimCnt array. One to one correspondence with the splited output. + * @param [output] dim array of splited dim array. One to one correspondence with the splited output. + * @param [in| dimlen length of dim(Pass in the length of the entire space pointed to by dim, + not just the length of the dim array, because dim is a level 2 array + dimlen = lengthof dim[][], not just lengthof dim[]) + * @return ccStatus_t + */ +ccStatus_t ccGetSplitForwardOutputDim(const ccTensorDescriptor_t xDesc, int32_t axis, uint32_t num, + const uint32_t sizes[], int32_t *dimCnt, int32_t *dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief create weight compress info + * @param [in|out] compressInfo point to CompressInfo + * @return ccStatus_t + */ +ccStatus_t ccCreateWeightCompressInfo(ccWeightCompressInfo_t **compressInfo); + +/** + * @ingroup dnn + * @brief destory weight compress info + * @param [in] *compressInfo point to CompressInfo + * @return ccStatus_t + */ +ccStatus_t ccDestroyWeightCompressInfo(ccWeightCompressInfo_t **compressInfo); + +/** + * @ingroup dnn + * @brief create compress table + * @param [in|out] compressTab point to weight compress table + * @return ccStatus_t + */ +ccStatus_t ccCreateWeightCompressTab(ccWeightCompressTab_t **compressTab); + +/** + * @ingroup dnn + * @brief destory compress table + * @param [in] compressTab point to weight compress table + * @return ccStatus_t + */ +ccStatus_t ccDestroyWeightCompressTab(ccWeightCompressTab_t **compressTab); + +/** + * @ingroup dnn + * @brief get fc compress info + * @param [in] xDesc descriptor of input tensor + * @param [in] wDesc descriptor of weight tensor + * @param [in] biasDesc descriptor of bias tensor + * @param [in] dataTypeTransmode mode of data type transform + * @param [in] weightCompressInfo compress info, compute based on tiling method + * @param [in|out] outputSize output data size in byte + * @param [in|out] infoTabSize compress info table + * @return ccStatus_t + */ +ccStatus_t ccGetCompressedFcWeightInfo(const ccTensorDescriptor_t xDesc, const ccFilterDescriptor_t wDesc, + const ccTensorDescriptor_t biasDesc, ccDataTypeTransMode_t dataTypeTransmode, + ccWeightCompressInfo_t *weightCompressInfo, uint32_t *outputSize, + uint32_t *infoTabSize); +/** + * @ingroup dnn + * @brief compress fc + * @param [in] wDesc descriptor of weight tensor + * @param [in] w filter data in device memory + * @param [in] weightCompressInfo compress info, compute based on tiling method + * @param [in] dataTypeTransmode mode of data type transform + * @param [in|out] y output data in device memory + * @param [in] ySize transformed data size in byte + * @param [in|out] yCompressedSize compressed output data size in byte + * @param [in|out] infoTab compressed info table + * @param [in] infoTabSize compressed info table size in byte + * @return ccStatus_t + */ +ccStatus_t ccCompressWeight(const ccFilterDescriptor_t wDesc, const void *w, + const ccWeightCompressInfo_t *weightCompressInfo, ccDataTypeTransMode_t dataTypeTransmode, + ccFilterDescriptor_t yDesc, void *y, uint32_t ySize, uint32_t *yCompressedSize, + void *infoTab, uint32_t infoTabSize); + +/** + * @ingroup dnn + * @brief restore compressed fc data + * @param [in] x input data in device memory + * @param [in] xSizeInBytes input compressed weight data size in byte + * @param [in|out] y output data in device memory + * @param [in] ySizeInBytes output data size in byte + * @return ccStatus_t + */ +ccStatus_t ccRestoreCompressedWeight(const void *x, uint32_t xSizeInBytes, void *y, uint32_t ySizeInBytes, + rtMemcpyKind_t kind); + +/** + * @ingroup dnn + * @brief create quantize parameters struct + * @param [in|out] quantizeInfo descriptor of quantize parameters + * @return ccStatus_t + */ +ccStatus_t ccCreateQuantizeInfoTab(ccQuantizeDescriptor_t *quantizeInfo); + +/** + * @ingroup dnn + * @brief destroy quantize parameters struct + * @param [in] quantizeInfo descriptor of quantize parameters + * @return ccStatus_t + */ +ccStatus_t ccDestoryQuantizeInfoTab(ccQuantizeDescriptor_t *quantizeInfo); + +/** + * @ingroup dnn + * @brief set quantize parameters + * @param [in] quantizeInfo descriptor of quantize parameters + * @param [in] scaleValMode enmu type for quantize scale value type (normal or sqrt) + * @param [in] scale quantize scale value + * @param [in] offset quantize offset(when quantize algorithm is half offset or full offset,this should be + * configed) + * @param [in] offsetPad padding value for load3d (only for half offset or full offset) + * @return ccStatus_t + */ +ccStatus_t ccSetQuantizeFactors(ccQuantizeDescriptor_t quantizeInfo, ccScaleValueMode_t scaleValMode, + const uint16_t *scale, const uint16_t *offset, const uint8_t *offsetPad); + +/** + * @ingroup dnn + * @brief set Requantize parameters + * @param [in] quantizeInfo descriptor of quantize parameters + * @param [in] scaleValMode enmu type for requantize scale value type (normal or sqrt) + * @param [in] scale quantize scale value + * @param [in] offset quantize offset(when quantize algorithm is half offset or full offset,this should be + * configed) + * @param [in] offsetw offset for filter (only config for full offset quantize) + * @return ccStatus_t + */ +ccStatus_t ccSetReQuantizeFactors(ccQuantizeDescriptor_t quantizeInfo, ccScaleValueMode_t scaleValMode, + const uint16_t *scaleRq, const uint16_t *nextLayerOffset, const int32_t *offsetw); + +/** + * @ingroup dnn + * @brief set Dequantize parameters + * @param [in] quantizeInfo descriptor of quantize parameters + * @param [in] scaleValMode enmu type for dequantize scale value type (normal or sqrt) + * @param [in] scaleDq quantize scale value + * @param [in] offsetw offset for filter (only config for full offset quantize) + * @return ccStatus_t + */ +ccStatus_t ccSetDeQuantizeFactors(ccQuantizeDescriptor_t quantizeInfo, ccScaleValueMode_t scaleValMode, + const uint16_t *scaleDq, const int32_t *offsetw); + +/** + * @ingroup dnn + * @brief set convolution desciptor's quantize parameters + * @param [in] convDesc convolution descriptor + * @param [in] quantizeInfo descriptor of quantize parameters + * @return ccStatus_t + */ +ccStatus_t ccSetConvolutionQuantizeInfo(ccConvolutionDescriptor_t convDesc, const ccQuantizeDescriptor_t QuantizeInfo); + +/** + * @ingroup dnn + * @brief set convolution desciptor's all offset quantize parameters + * @param [in] convDesc convolution descriptor + * @param [in] offsetw descriptor of quantize parameters + * @param [in] scaleReq descriptor of quantize parameters + * @param [in] offset_d_next descriptor of quantize parameters + * @return ccStatus_t + */ +ccStatus_t ccSetAllOffsetQuantizeFactors(ccQuantizeDescriptor_t quantizeInfo, const uint8_t *offsetW, + const uint8_t *offsetD, const uint16_t *scaleReq, const uint16_t *offsetDNext); + +/** + * @ingroup dnn + * @brief set full connection desciptor's quantize parameters + * @param [in] fcDesc full connection descriptor + * @param [in] quantizeInfo descriptor of quantize parameters + * @return ccStatus_t + */ +ccStatus_t ccSetFullConnectionQuantizeInfo(ccFullConnectionDescriptor_t fcDesc, + const ccQuantizeDescriptor_t QuantizeInfo); + +/** + * @ingroup dnn + * @brief set pooling desciptor's quantize parameters + * @param [in] poolingDesc pooling descriptor + * @param [in] quantizeInfo descriptor of quantize parameters + * @return ccStatus_t + */ +ccStatus_t ccSetPoolingQuantizeInfo(ccPoolingDescriptor_t poolingDesc, const ccQuantizeDescriptor_t QuantizeInfo); + +/** + * @ingroup dnn + * @brief set full connection desciptor's info table + * @param [in] fcDesc full connection descriptor + * @param [in] infoTabSize table size + * @param [in] infoTab pointer to info table + * @return ccStatus_t + */ +ccStatus_t ccSetFullConnectionDescriptor(ccFullConnectionDescriptor_t fcDesc, uint32_t infoTabSize, const void *infoTab, + ccFullConnectFwdAlgo_t algo = CC_FULLCONNECT_FWD_ALGO_HALF); + +/** + * @ingroup dnn + * @brief set full connection desciptor's relu flag + * @param [in] fcDesc full connection descriptor + * @param [in] opType operation type for append at convolution operation + * @param [in] opDesc operation descritpor for the opType + * @return ccStatus_t + */ +ccStatus_t ccFullConnectionAppendOp(ccFullConnectionDescriptor_t fcDesc, tagCcOpType opType, const void *opDesc); + +/** + * @ingroup dnn + * @brief check aipp basic info + * @param [in] inputFormat format of input image + * @param [in] loadStartPosH vertical start position in source image + * @param [in] loadStartPosW horizontal start position in source image + * @param [in] srcImageSizeH vertical size of source image + * @param [in] srcImageSizeW horizontal size of source image + * @param [in] cpaddingValue C direction padding value + * @param [in] cscSwitch csc enable or not + * @param [in] rbuvSwapSwitch swap R/U and B/V position of the image + * @param [in] axSwapSwitch swap RGBA->ARGB, YUVA->AYUV + * @param [in] singleLineMode when set this bit to 1, only read 1 line. Under this case, vertical size configuration is + * not useful. + * @return ccStatus_t + */ +ccStatus_t ccCheckConvolutionAippCommInfo(ccAippInputFormat_t inputFormat, int32_t loadStartPosW, int32_t loadStartPosH, + int32_t srcImageSizeW, int32_t srcImageSizeH, float cpaddingValue, + bool cscSwitch, bool rbuvSwapSwitch, bool axSwapSwitch, bool singleLineMode); + +/** + * @ingroup dnn + * @brief check aipp dtc info + * @param [in] dtcPixelMeanChnx Mean value for YUV or RGB data channel x + * @param [in] dtcPixelMinChnx Min value for YUV or RGB data channel x + * @param [in] dtcPixelVarReciChnx Reciprocal of variance or (max-min) for YUV or RGB data channel x + * @return ccStatus_t + */ +ccStatus_t ccCheckConvolutionAippDtcInfo(int32_t dtcPixelMeanChn0, int32_t dtcPixelMeanChn1, int32_t dtcPixelMeanChn2, + float dtcPixelMinChn0, float dtcPixelMinChn1, float dtcPixelMinChn2, + float dtcPixelVarReciChn0, float dtcPixelVarReciChn1, + float dtcPixelVarReciChn2); + +/** + * @ingroup dnn + * @brief check aipp pad info + * @param [in] paddingMode padding mode + * @param [in] leftPaddingSize left hblank/padding size + * @param [in] rightPaddingSize right hblank/padding size + * @param [in] topPaddingSize top padding size + * @param [in] bottomPaddingSize bottom padding size + * @return ccStatus_t + */ +ccStatus_t ccCheckConvolutionAippPadInfo(ccAippPaddingMode_t paddingMode, int32_t leftPaddingSize, + int32_t rightPaddingSize, int32_t topPaddingSize, int32_t bottomPaddingSize); + +/** + * @ingroup dnn + * @brief check aipp csc info + * @param [in] cscMatrixRmCn 3x3 CSC matrix for YUV to RGB or RGB to YUV, element of row m and column n + * @param [in] cscOutputBiasm output Bias for RGB to YUV, element of row m + * @param [in] cscInputBiasm input Bias for YUV to RGB, element of row m + * @return ccStatus_t + */ +ccStatus_t ccCheckConvolutionAippCscInfo(int32_t cscMatrixR0C0, int32_t cscMatrixR0C1, int32_t cscMatrixR0C2, + int32_t cscMatrixR1C0, int32_t cscMatrixR1C1, int32_t cscMatrixR1C2, + int32_t cscMatrixR2C0, int32_t cscMatrixR2C1, int32_t cscMatrixR2C2, + int32_t cscOutputBias0, int32_t cscOutputBias1, int32_t cscOutputBias2, + int32_t cscInputBias0, int32_t cscInputBias1, int32_t cscInputBias2); + +/** + * @ingroup dnn + * @brief check aipp scf info + * @param [in] scfSwitch scaling enable or not + * @param [in] scfInputW input width of scaling + * @param [in] scfInputH input height of scaling + * @param [in] scfOutputW output width of scaling + * @param [in] scfOutputH output height of scaling + * @return ccStatus_t + */ +ccStatus_t ccCheckConvolutionAippScfInfo(bool scfSwitch, int32_t scfInputW, int32_t scfInputH, int32_t scfOutputW, + int32_t scfOutputH); + +/** + * @ingroup dnn + * @brief check aipp param + * @param [in] convDesc descriptor of conv operator + * @param [in] xDesc input tensor info + * @param [in] yDesc output tensor info + * @return ccStatus_t + */ +ccStatus_t ccCheckConvFwdAippParam(const ccConvolutionDescriptor_t convDesc, const ccTensorDescriptor_t xDesc, + const ccTensorDescriptor_t yDesc); + +/** + * @ingroup dnn + * @brief init aipp basic info + * @param [in|out] convDesc descriptor of conv operator + * @param [in] inputFormat format of input image + * @param [in] loadStartPosH vertical start position in source image + * @param [in] loadStartPosW horizontal start position in source image + * @param [in] srcImageSizeH vertical size of source image + * @param [in] srcImageSizeW horizontal size of source image + * @param [in] cpaddingValue C direction padding value + * @param [in] cscSwitch csc enable or not + * @param [in] rbuvSwapSwitch swap R/U and B/V position of the image + * @param [in] axSwapSwitch swap RGBA->ARGB, YUVA->AYUV + * @param [in] singleLineMode when set this bit to 1, only read 1 line. Under this case, vertical size configuration is + * not useful. + * @return ccStatus_t + */ +ccStatus_t ccSetConvolutionAippCommInfo(ccConvolutionDescriptor_t convDesc, ccAippInputFormat_t inputFormat, + int32_t loadStartPosW, int32_t loadStartPosH, int32_t srcImageSizeW, + int32_t srcImageSizeH, float cpaddingValue, bool cscSwitch, bool rbuvSwapSwitch, + bool axSwapSwitch, bool singleLineMode); +/** + * @ingroup dnn + * @brief init aipp dtc info + * @param [in|out] convDesc descriptor of conv operator + * @param [in] dtcPixelMeanChnx Mean value for YUV or RGB data channel x + * @param [in] dtcPixelMinChnx Min value for YUV or RGB data channel x + * @param [in] dtcPixelVarReciChnx Reciprocal of variance or (max-min) for YUV or RGB data channel x + * @return ccStatus_t + */ +ccStatus_t ccSetConvolutionAippDtcInfo(ccConvolutionDescriptor_t convDesc, int32_t dtcPixelMeanChn0, + int32_t dtcPixelMeanChn1, int32_t dtcPixelMeanChn2, float dtcPixelMinChn0, + float dtcPixelMinChn1, float dtcPixelMinChn2, float dtcPixelVarReciChn0, + float dtcPixelVarReciChn1, float dtcPixelVarReciChn2); +/** + * @ingroup dnn + * @brief init aipp pad info + * @param [in|out] convDesc descriptor of conv operator + * @param [in] paddingMode padding mode + * @param [in] leftPaddingSize left hblank/padding size + * @param [in] rightPaddingSize right hblank/padding size + * @param [in] topPaddingSize top padding size + * @param [in] bottomPaddingSize bottom padding size + * @return ccStatus_t + */ +ccStatus_t ccSetConvolutionAippPadInfo(ccConvolutionDescriptor_t convDesc, ccAippPaddingMode_t paddingMode, + int32_t leftPaddingSize, int32_t rightPaddingSize, int32_t topPaddingSize, + int32_t bottomPaddingSize); + +/** + * @ingroup dnn + * @brief init aipp csc info + * @param [in|out] convDesc descriptor of conv operator + * @param [in] cscMatrixRmCn 3x3 CSC matrix for YUV to RGB or RGB to YUV, element of row m and column n + * @param [in] cscOutputBiasm output Bias for RGB to YUV, element of row m + * @param [in] cscInputBiasm input Bias for YUV to RGB, element of row m + * @return ccStatus_t + */ +ccStatus_t ccSetConvolutionAippCscInfo(ccConvolutionDescriptor_t convDesc, int32_t cscMatrixR0C0, int32_t cscMatrixR0C1, + int32_t cscMatrixR0C2, int32_t cscMatrixR1C0, int32_t cscMatrixR1C1, + int32_t cscMatrixR1C2, int32_t cscMatrixR2C0, int32_t cscMatrixR2C1, + int32_t cscMatrixR2C2, int32_t cscOutputBias0, int32_t cscOutputBias1, + int32_t cscOutputBias2, int32_t cscInputBias0, int32_t cscInputBias1, + int32_t cscInputBias2); + +/** + * @ingroup dnn + * @brief init aipp scf info + * @param [in|out] convDesc descriptor of conv operator + * @param [in] scfSwitch scaling enable or not + * @param [in] scfInputW input width of scaling + * @param [in] scfInputH input height of scaling + * @param [in] scfOutputW output width of scaling + * @param [in] scfOutputH output height of scaling + * @return ccStatus_t + */ +ccStatus_t ccSetConvolutionAippScfInfo(ccConvolutionDescriptor_t convDesc, bool scfSwitch, int32_t scfInputW, + int32_t scfInputH, int32_t scfOutputW, int32_t scfOutputH); + +/** + * @ingroup dnn + * @brief set dynamic aipp parameter address and enflag info + * @param [in|out] convDesc descriptor of conv operator + * @param [in] dyncParaAddr aipp parameter address + * @param [in] dyncAippFlag flag to show whether to use dynamic aipp + * @return ccStatus_t + */ +ccStatus_t ccSetConvolutionAippDyncParaAddr(ccConvolutionDescriptor_t convDesc, const void *dyncParaAddr, + bool dyncAippFlag, bool rotationFlag = false); + +/** + * @ingroup dnn + * @brief check dynamic aipp parameter + * @param [in] dyncParaAddr aipp parameter address + * @param [in] dataLength parameter lenght + * @param [in] convolutionDimW convDimW + * @param [in] convolutionDimH convDimH + * @return ccStatus_t + */ +ccStatus_t ccCheckDynamicAippParam(const void *dynamicParamAddr, uint32_t dataLength, int64_t convolutionDimW, + int64_t convolutionDimH); + +/*** @ingroup dnn + * @brief trans mean and var + * @param [in|out] mean' = bnScale/sqrt(var) + * @param [in|out] var' = -bnScale * mean / sqrt(var) + bnBias + * @return ccStatus_t + */ + +ccStatus_t ccTransBatchnormMeanAndVar(void *mean, void *var, const ccTensorDescriptor_t bnScaleBiasMeanVarDesc, + const void *alpha, const void *beta, void *bnScale, void *bnBias, double epsilon); + +/** + * @ingroup dnn + * @brief init deconvolution adj or targetShape info. + * @param [in] convDesc conv descriptor. + * @param [in] adjH, adjust H output. + * @param [in] adjW, adjust W output. + * @param [in] targetShape, values of output shape, if this pointer was set, ignore adj. + * @return ccStatus_t + */ +ccStatus_t ccSetDeconvolutionOutShapeInfo(ccConvolutionDescriptor_t convDesc, uint32_t adjSize, const uint32_t *adj, + uint32_t targetShapeSize, const uint32_t *targetShape); + +/** + * @ingroup dnn + * @brief gather elements according to the indices. + * @param [in] alpha reserved. + * @param [in] xDesc description of the tensor from which to gather elements. + * @param [in] x data point of the tensor from which to gather elements. + * @param [in] indicesDesc description of the tensor of indices. + * @param [in] indices data point of the tensor of indices. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccGatherNdForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t indicesDesc, const void *indices, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get output shape of gather_nd. + * @param [in] xDesc description of the tensor from which to gather elements. + * @param [in] indicesDesc description of the tensor of indices. + * @param [output] n dim-size of n-dim. + * @param [output] c dim-size of c-dim. + * @param [output] h dim-size of h-dim. + * @param [output] w dim-size of w-dim. + * @param [output] realDimCnt real dim. + * @return ccStatus_t + */ +ccStatus_t ccGetGatherNdOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t indicesDesc, int32_t *n, + int32_t *c, int32_t *h, int32_t *w, int32_t *realDimCnt); +/** + * @ingroup dnn + * @brief get output shape of realdiv. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] dimCnt dim nums. + * @param [output] dim dim size. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetGatherNdOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t indicesDesc, + int32_t *dimCnt, int32_t *dim, int32_t dimLen); +/** + * @ingroup dnn + * @brief tile tensor by multiples. + * @param [in] alpha reserved. + * @param [in] xDesc description of the tensor which to be tiled. + * @param [in] x data point of the tensor which to be tiled. + * @param [in] multiples tile coefficient of each dim. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccTileForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccIntArray_t *multiples, const void *beta, const ccTensorDescriptor_t outputDesc, + void *output); + +/** + * @ingroup dnn + * @brief get output shape of tile. + * @param [in] xDesc description of the dividend tensor. + * @param [in] multiples multiples of each dim. + * @param [in|out] dimCnt [point to the output dimCnt] + * @param [in|out] dim [arrays to save dims] + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetTileOutputDim(const ccTensorDescriptor_t xDesc, const ccIntArray_t *multiples, int32_t *dimCnt, + int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief get output shape of tile. + * @param [in] xDesc description of the dividend tensor. + * @param [in] multiples multiples of each dim. + * @param [output] n dim-size of n-dim. + * @param [output] c dim-size of c-dim. + * @param [output] h dim-size of h-dim. + * @param [output] w dim-size of w-dim. + * @param [output] realDimCnt real dim. + * @return ccStatus_t + */ +ccStatus_t ccGetTileOutputDim(const ccTensorDescriptor_t xDesc, + // const ccIntArrayDescriptor_t multiples, + const ccIntArray_t *multiples, int32_t *n, int32_t *c, int32_t *h, int32_t *w, + int32_t *realDimCnt); +/** + * @ingroup dnn + * @brief get output shape of realdiv. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] dimCnt dim nums. + * @param [output] dim dim size. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetRealdivOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief realdiv between two tensors. + * @param [in] alpha reserved. + * @param [in] xDesc description of the dividend tensor. + * @param [in] x data point of the dividend tensor. + * @param [in] yDesc description of the divisor tensor. + * @param [in] y data point of the divisor tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccRealdivForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get output shape of realdiv. + * @param [in] xDesc description of the dividend tensor. + * @param [in] yDesc description of the divisor tensor. + * @param [output] n dim-size of n-dim. + * @param [output] c dim-size of c-dim. + * @param [output] h dim-size of h-dim. + * @param [output] w dim-size of w-dim. + * @param [output] realDimCnt real dim. + * @return ccStatus_t + */ +ccStatus_t ccGetRealdivOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *n, + int32_t *c, int32_t *h, int32_t *w, int32_t *realDimCnt); + +/** + * @ingroup dnn + * @brief realdiv between two tensors. + * @param [in] alpha reserved. + * @param [in] xDesc description of the left operator tensor. + * @param [in] x data point of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [in] y data point of the right operator tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccFloordivForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get output shape of realdiv. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] realDimCnt real dim. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetFloordivOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief realdiv between two tensors. + * @param [in] alpha reserved. + * @param [in] xDesc description of the left operator tensor. + * @param [in] x data point of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [in] y data point of the right operator tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccGreaterForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get output shape of realdiv. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] dimCnt dim nums. + * @param [output] dim dim size. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetGreaterOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief realdiv between two tensors. + * @param [in] alpha reserved. + * @param [in] xDesc description of the left operator tensor. + * @param [in] x data point of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [in] y data point of the right operator tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccLessForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get output shape of realdiv. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] dimCnt dim nums. + * @param [output] dim dim size. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetLessOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief get output shape of LogicalOr. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] dimCnt dim nums. + * @param [output] dim dim size. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetLogicalOrOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief get output shape of LogicalXor. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] dimCnt dim nums. + * @param [output] dim dim size. + * @param [in] dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetLogicalXorOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief sqrt forward: + * data type only support bool + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccLogicalNotForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief equal between two tensors. + * @param [in] alpha reserved. + * @param [in] xDesc description of the left operator tensor. + * @param [in] x data point of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [in] y data point of the right operator tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ + +ccStatus_t ccEqualForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief dump data during inference, only for eng ver. + * @param [in] handle cce handle + * @return ccStatus_t + */ +ccStatus_t ccDataDumpForward(ccHandle_t handle, const void *buffer, const uint64_t bufLen, const uint32_t taskIndex); + +/** + * @ingroup dnn + * @brief logicaland between two tensors. + * @param [in] alpha reserved. + * @param [in] xDesc description of the left operator tensor. + * @param [in] x data point of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [in] y data point of the right operator tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccLogicalAndForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief logical or between two tensors. + * @param [in] alpha reserved. + * @param [in] xDesc description of the left operator tensor. + * @param [in] x data point of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [in] y data point of the right operator tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccLogicalOrForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); +/** + * @ingroup dnn + * @brief logical Xor between two tensors(x ^ y = (x | y) & ~(x & y). + * @param [in] alpha reserved. + * @param [in] xDesc description of the left operator tensor. + * @param [in] x data point of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [in] y data point of the right operator tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccLogicalXorForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get output shape of equal. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] dimCnt dim nums. + * @param [output] dim dim size. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetEqualOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); +/** + * @ingroup dnn + * @brief get output shape of logicaland. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] dimCnt dim nums. + * @param [output] dim dim size. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetLogicalAndOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); +/** + * @ingroup dnn + * @brief realdiv between two tensors. + * @param [in] alpha reserved. + * @param [in] xDesc description of the left operator tensor. + * @param [in] x data point of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [in] y data point of the right operator tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccFloormodForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get output shape of realdiv. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] dimCnt dim nums. + * @param [output] dim dim size. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetFloormodOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief compare between two tensors. + * @param [in] alpha reserved. + * @param [in] xDesc description of the left operator tensor. + * @param [in] x data point of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [in] y data point of the right operator tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ +ccStatus_t ccCompareForward(ccHandle_t handle, ccCompareType_t compareType, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const ccTensorDescriptor_t yDesc, + const void *y, const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get output shape of realdiv. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [output] dimCnt dim nums. + * @param [output] dim dim size. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetCompareOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief create descriptor of FillParam + * @param [in|out] fillParamDesc point to descriptor of fill param + * @return ccStatus_t + */ +ccStatus_t ccCreateFillParamDescriptor(ccFillParamDescriptor_t *fillParamDesc); + +/** + * @ingroup dnn + * @brief destroy descriptor of FillParam + * @param [in] *fillParamDesc point to descriptor of fill param + * @return ccStatus_t + */ +ccStatus_t ccDestroyFillParamDescriptor(ccFillParamDescriptor_t *fillParamDesc); + +/** + * @ingroup dnn + * @brief get output shape of broadcat operations. + * @param [in] inputNum input number of the operation tensors. + * @param [in] xDesc[] description of the input operation tensors list. + * @param [output] dimCnt dim-size of output tensor. + * @param [output] dim dim of output tensor. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetMultiNdBroadcastOpOutputDim(const int32_t inputNum, const ccTensorDescriptor_t xDesc[], int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief get output shape of maximultitensor. + * @param [in] inputNum the num of input operator tensors. + * @param [in] xDesc[] description of the input operator tensors list. + * @param [output] dimCnt dim count of output tensor. + * @param [output] dim array of output tensor. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetMaxMultitensorOutputDim(const int32_t inputNum, const ccTensorDescriptor_t xDesc[], int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief get output shape of minmultitensor. + * @param [in] inputNum the num of input operator tensors. + * @param [in] xDesc[] description of the input operator tensors list. + * @param [output] dimCnt dim count of output tensor. + * @param [output] dim array of output tensor. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetMinMultitensorOutputDim(const int32_t inputNum, const ccTensorDescriptor_t xDesc[], int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief MaxMultitensor forward: + * data type only support float float16 and int32 + * data format only support ND + * @param [in] handle cce handle + * @param [in] inputNum input tensor number + * @param [in] alpha common scale factor + * @param [in] xDesc[] descriptor of input tensors list + * @param [in] x[] input data in device memory list + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccMaxMultitensorForward(const ccHandle_t handle, const int32_t inputNum, const void *alpha, + const ccTensorDescriptor_t xDesc[], const void *x[], const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief MinMultitensor forward: + * data type only support float float16 and int32 + * data format only support ND + * @param [in] handle cce handle + * @param [in] inputNum input tensor number + * @param [in] alpha common scale factor + * @param [in] xDesc[] descriptor of input data list + * @param [in] x[] input data in device memory list + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccMinMultitensorForward(const ccHandle_t handle, const int32_t inputNum, const void *alpha, + const ccTensorDescriptor_t xDesc[], const void *x[], const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief create descriptor of StridedSlice + * @param [in|out] stridedSliceDesc point to descriptor of StridedSlice param + * @return ccStatus_t + */ +ccStatus_t ccCreateStridedSliceDescriptor(ccStridedSliceDescriptor_t *stridedSliceDesc); + +/** + * @ingroup dnn + * @brief destroy descriptor of StridedSlice + * @param [in] *stridedSliceDesc point to descriptor of StridedSlice param + * @return ccStatus_t + */ +ccStatus_t ccDestroyStridedSliceDescriptor(ccStridedSliceDescriptor_t *stridedSliceDesc); + +/** + * @ingroup dnn + * @brief init stridedSlice descriptor_t. + * @param [out] stridedSliceDesc struct of stridedslice param + * @param [in] dimCnt dimension of the input tensor + * @param [in] begin slice begin(include) + * @param [in] end slice end index(not include) + * @param [in] strides slice stride + * @return ccStatus_t + */ +ccStatus_t ccSetStridedSliceDescriptor(ccStridedSliceDescriptor_t stridedSliceDesc, int32_t dimCnt, int32_t begin[], + int32_t end[], int32_t strides[]); + +/** + * @ingroup dnn + * @brief create descriptor of StridedSlice + * @param [in|out] stridedSliceDesc point to descriptor of StridedSlice attr + * @return ccStatus_t + */ +ccStatus_t ccCreateStridedSliceAttrsDescriptor(ccStridedSliceAttrsDescriptor_t *attrDesc); + +/** + * @ingroup dnn + * @brief destroy descriptor of StridedSlice + * @param [in] *stridedSliceDesc point to descriptor of StridedSlice attr + * @return ccStatus_t + */ +ccStatus_t ccDestroyStridedSliceAttrsDescriptor(ccStridedSliceAttrsDescriptor_t *attrDesc); + +/** + * @ingroup dnn + * @brief init stridedSlice mask attrs desescriptor. + * @param [out] attrDesc struct of stridedslice mask attrs + * @param [in] beginMask begin mask + * @param [in] endMask end mask + * @param [in] ellipsisMask ellipsis mask + * @param [in] newAxisMask new axis mask + * @param [in] shrinkAxisMask shrink axis mask + * @return ccStatus_t + */ +ccStatus_t ccSetStridedSliceAttrsDescriptor(ccStridedSliceAttrsDescriptor_t attrDesc, int32_t beginMask, + int32_t endMask, int32_t ellipsisMask, int32_t newAxisMask, + int32_t shrinkAxisMask); + +/** + * @ingroup dnn + * @brief Extracts a strided slice of a tensor. + * @param [in] xDesc descriptor of input data + * @param [in] stridedSliceDesc specifies the begin, end, strides of slice + * @param [in] attrDesc reserve for optional attributes. + * @param [out] n point to n size + * @param [out] c point to c size + * @param [out] h point to h size + * @param [out] w point to w size + * @return ccStatus_t + */ +ccStatus_t ccGetStridedSliceOutputDim(const ccTensorDescriptor_t xDesc, + const ccStridedSliceDescriptor_t stridedSliceDesc, + const ccStridedSliceAttrsDescriptor_t attrDesc, int32_t *n, int32_t *c, + int32_t *h, int32_t *w, int32_t *realDimCnt); + +/** + * @ingroup dnn + * @brief Extracts a strided slice of a tensor. + * @param [in] handle cce handle + * @param [in] stridedSliceDesc specifies the begin, end, strides of slice + * @param [in] attrDesc reserve for optional attributes. + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] yDesc descriptor of output data + * @param [in|out] y output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccStridedSliceForward(ccHandle_t handle, const ccStridedSliceDescriptor_t stridedSliceDesc, + const ccStridedSliceAttrsDescriptor_t attrDesc, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t yDesc, void *y); + +/** + * @ + * @brief get out put descrition of slice tensor. + * @param [in] xDesc descriptor of input data + * @param [in] begin begin position of tensor + * @param [in] size size to slice + * @param [out] n point to n size + * @param [out] c point to c size + * @param [out] h point to h size + * @param [out] w point to w size + * @param [out] realDimCnt realdim count + * @return ccStatus_t + */ +ccStatus_t ccGetSliceOutputDim(const ccTensorDescriptor_t xDesc, const ccIntArray_t *begin, const ccIntArray_t *size, + int32_t *n, int32_t *c, int32_t *h, int32_t *w, int32_t *realDimCnt); + +/** + * @ingroup dnn + * @brief slice of a tensor. + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] begin begin position of tensor + * @param [in] size size to slice + * @param [in] beta common scale factor + * @param [in] yDesc descriptor of output data + * @param [in|out] y output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccSliceForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccIntArray_t *begin, const ccIntArray_t *size, const void *beta, + const ccTensorDescriptor_t yDesc, void *y); + +/** + * @ingroup dnn + * @brief gather forward computation + * @param [in] handle cce handle + * @param [in] paramsDesc descriptor of params tensor + * @param [in] params input data in device memory + * @param [in] indicesDesc descriptor of indices tensor + * @param [in] indices indices data in device memory + * @param [in] axis descriptor of roi tensor + * @param [in] alpha reserved + * @param [in] beta reserved + * @param [in] outputDesc descriptor of output tensor + * @param [out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccGatherForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t paramsDesc, + const void *params, const ccTensorDescriptor_t indicesDesc, const void *indices, + const int32_t axis, const void *beta, ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief gather output dim computation, for NC1HWC0 + * @param [in] paramsDesc descriptor of params tensor + * @param [in] indicesDesc descriptor of indices tensor + * @param [in] axis descriptor of roi tensor + * @param [out] n dim of n + * @param [out] c dim of c + * @param [out] h dim of h + * @param [out] w dim of w + * @param [out] realDimCnt real dim count + * @return ccStatus_t + */ +ccStatus_t ccGetGatherOutputDim(const ccTensorDescriptor_t paramsDesc, const ccTensorDescriptor_t indicesDesc, + int32_t axis, int32_t *n, int32_t *c, int32_t *h, int32_t *w, int32_t *realDimCnt); + +/** + * @ingroup dnn + * @brief gather output dim computation + * @param [in] paramsDesc descriptor of params tensor + * @param [in] indicesDesc descriptor of indices tensor + * @param [in] axis descriptor of roi tensor + * @param [out] dimCnt dimcnt of output + * @param [out] dim dim of output + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetGatherOutputDim(const ccTensorDescriptor_t paramsDesc, const ccTensorDescriptor_t indicesDesc, + int32_t axis, int32_t *dimCnt, int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief exp forward computation + * @param [in] handle cce handle + * @param [in] expDesc descriptor of expParam + * @param [in] expParam a ternary array + * @param [in] alpha reserved parameter + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta reserved parameter + * @param [in] yDesc descriptor of output tensor + * @param [out] y output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccExpForward(ccHandle_t handle, const ccExpDescriptor_t expDesc, const void *expParam, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t yDesc, void *y); + +/** + * @ingroup dnn + * @brief expm1 forward: + * data type only support float float16 and double + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccExpm1Forward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief log1p forward: + * data type only support float float16 and double + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccLog1pForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief init descriptor for parameter of exp function + * @param [in|out] powDesc descriptor of tensor + * @param [in] dataType data type in device + * @param [in] paramCnt number of parameters + * @return ccStatus_t + */ +ccStatus_t ccSetExpDescriptor(ccExpDescriptor_t expDesc, ccDataType_t dataType, uint32_t paramCnt); + +/** + * @ingroup dnn + * @brief exp forward computation + * @param [in] handle cce handle + * @param [in] logDesc descriptor of logParam + * @param [in] logParam a ternary array + * @param [in] alpha reserved parameter + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta reserved parameter + * @param [in] yDesc descriptor of output tensor + * @param [in] y output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccLogForward(ccHandle_t handle, const ccLogDescriptor_t logDesc, const void *logParam, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t yDesc, void *y); + +/** + * @ingroup dnn + * @brief init descriptor for parameter of log function + * @param [in|out] logDesc descriptor of tensor + * @param [in] dataType data type in device + * @param [in] paramCnt number of parameters + * @return ccStatus_t + */ +ccStatus_t ccSetLogDescriptor(ccLogDescriptor_t logDesc, ccDataType_t dataType, uint32_t paramCnt); + +/** + * @ingroup dnn + * @brief pow forward computation + * @param [in] handle cce handle + * @param [in] powDesc descriptor of logParam + * @param [in] powParam a ternary array + * @param [in] alpha reserved parameter + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta reserved parameter + * @param [in] yDesc descriptor of input tensor + * @param [in] y input data in device memory + * @param [in] zDesc descriptor of output tensor + * @param [out] z output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccPowForward(ccHandle_t handle, const ccPowDescriptor_t powDesc, const void *powParam, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const ccTensorDescriptor_t yDesc, + const void *y, const void *beta, const ccTensorDescriptor_t zDesc, void *z); + +/** + * @brief init descriptor for parameter of pow function + * @param [in|out] powDesc descriptor of tensor + * @param [in] dataType data type in device + * @param [in] paramCnt number of parameters + * @return ccStatus_t + */ +ccStatus_t ccSetPowDescriptor(ccPowDescriptor_t powDesc, ccDataType_t dataType, uint32_t paramCnt); + +/** + * @ingroup dnn + * @brief non max suppression forward. + * @param [in] handle cce handle + * @param [in] nonmaxParaDesc descriptor of para + * @param [in] nonmaxPara input para in host memory + * @param [in] maxoutputsizex input para in host memory + * @param [in] alpha common scale factor + * @param [in] boxesDesc descriptor of input data boxesDesc + * @param [in] boxes input data boxes in device memory + * @param [in] scoresDesc descriptor of input data boxesDesc + * @param [in] scores input data scores in device memory + * @param [in] workSpaceSizeInBytes workspace size + * @param [in] workSpace input workspace in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccNonMaxSuppressionForward(ccHandle_t handle, const ccNonMaxSuppressionDescriptor_t nonmaxParaDesc, + const void *nonmaxPara, const int *maxoutputsize, const void *alpha, + const ccTensorDescriptor_t boxesDesc, const void *boxes, + const ccTensorDescriptor_t scoresDesc, const void *scores, + const uint32_t workSpaceSizeInBytes, void *workSpace, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); +/** + * @brief init descriptor for parameter of NonMaxSuppression function + * @param [in|out] powDesc descriptor of tensor + * @param [in] dataType data type in device + * @param [in] paramCnt number of parameters + * @return ccStatus_t + */ +ccStatus_t ccSetNonMaxSuppressionDescriptor(ccNonMaxSuppressionDescriptor_t nonMaxSuppressionDesc, + ccDataType_t dataType, uint32_t paramCnt); + +/** + * @ingroup dnn + * @brief get the output dimension info of resizeBilinear op. + * @param [in] xDesc descriptor of input data + * @param [in] resizeBilinearDesc descriptor of resize_bilinear operator + * @param [out] dimCnt + * @param [out] dim[] dim of output + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetResizeBilinearOutputDim(const ccTensorDescriptor_t xDesc, + const ccResizeBilinearDescriptor_t resizeBilinearDesc, int32_t *dimCnt, + int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief get the output dimension info of interp op. + * @param [in] xDesc descriptor of input data + * @param [in] resizeBilinearDesc descriptor of resize_bilinear operator + * @param [out] dimCnt + * @param [out] dim[] dim of output + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetInterpOutputDim(const ccTensorDescriptor_t xDesc, const ccResizeBilinearDescriptor_t resizeBilinearDesc, + int32_t *dimCnt, int32_t dim[], int32_t dimLen); +/** + * @ingroup dnn + * @brief resize bilinear forward for t network. + * @param [in] handle cce handle + * @param [in] resizeBilinearDesc descriptor of resize_bilinear operator + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] yDesc descriptor of output data + * @param [in|out] y output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccResizeBilinearForward(ccHandle_t handle, const ccResizeBilinearDescriptor_t resizeBilinearDesc, + const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief resize bilinear forward for c network. + * @param [in] handle cce handle + * @param [in] resizeBilinearDesc descriptor of resize_bilinear operator + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] yDesc descriptor of output data + * @param [in|out] y output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccInterpForward(ccHandle_t handle, const ccResizeBilinearDescriptor_t resizeBilinearDesc, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief create descriptor of ResizeBilinear + * @param [in|out] resizeBilinearDesc point to descriptor of resizeBilinear attr + * @return ccStatus_t + */ +ccStatus_t ccCreateResizeBilinearDescriptor(ccResizeBilinearDescriptor_t *resizeBilinearDesc); + +/** + * @ingroup dnn + * @brief destroy descriptor of Interp + * @param [in|out] resizeBilinearDesc point to descriptor of resizeBilinear attr + * @return ccStatus_t + */ +ccStatus_t ccDestroyResizeBilinearDescriptor(ccResizeBilinearDescriptor_t *resizeBilinearDesc); + +/** + * @ingroup dnn + * @brief set descriptor of resizeBilinear. + * @param [in|out] resizeBilinearDesc descriptor of resize_bilinear operator + * @param [in] resizeOutputDimMode way to decide output dimensions + * @param [in] alignCorners whether the centers of input and output are aligned + * @param [in] zoom_factor zoom factor + * @param [in] shrink_factor shrink factor + * @param [in] height height of output + * @param [in] width width of output + * @param [in] pad_begin padding at begin of input + * @param [in] pad_end padding at end of input + * @return ccStatus_t + */ +ccStatus_t ccSetResizeBilinearDescriptor(ccResizeBilinearDescriptor_t resizeBilinearDesc, + ccResizeOutputDimMode_t resizeOutputDimMode, bool alignCorners, + int32_t zoom_factor, int32_t shrink_factor, int32_t height, int32_t width, + int32_t pad_begin, int32_t pad_end); + +/** + * @ingroup dnn + * @brief fill forward computation + * @param [in] handle cce handle + * @param [in] fillParamDesc descriptor of fill parameter + * @param [in] alpha reserved + * @param [in] givenDesc descriptor of given tensor + * @param [in] givenData given data in device memory + * @param [in] workspace space for fill algorithm + * @param [in] workSpaceSizeInBytes space size in byte + * @param [in] beta reserved + * @param [in] outputDesc descriptor of output tensor + * @param [out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccFillForward(ccHandle_t handle, const ccFillParamDescriptor_t fillParamDesc, const void *alpha, + const ccTensorDescriptor_t givenDesc, const void *givenData, const void *workspace, + const uint32_t workSpaceSizeInBytes, const void *beta, const ccTensorDescriptor_t outputDesc, + void *output); + +/** + * @ingroup dnn + *[ccGetFillWorkspaceSize] + *@param fillType [fill type] + *@param givenDesc [given tensor descriptor] + *@param xDesc [input tensor descriptor] + *@param sizeInBytes [output size] + *@return ccStatus_t [status] + */ +ccStatus_t ccGetFillWorkspaceSize(const ccFillOpType_t fillType, const ccTensorDescriptor_t xDesc, + uint32_t *sizeInBytes); + +/** + *[ccCast] + *@param handle [cce handler] + *@param alpha [alpha] + *@param xDesc [tensor Description of tensor x] + *@param x [input tensor x] + *@param beta [beta + *@param yDesc [tensor Description of tensor y] + *@param y [output tensor y] + *@return ccStatus_t [status] + */ +ccStatus_t ccCast(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t yDesc, void *y); + +/** + * @ingroup dnn + * @brief round forward: + * data type only support float float16 and int32 + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccRoundForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief rint forward: + * data type only support float float16 + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccRintForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief sqrt forward: + * data type only support float float16 + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccSqrtForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + *[ccCast] + *@param filterSrcInfo [cce filtersrc descriptor] + *@param filterSrc [filterSrc address] + *@param filterDstInfo [cce filterdst descriptor] + *@param filterDst [filterdst address] + *@param group [group] + *@param ySizeInBytes [fraczfilter size] + *@param outputDataType [datatype] + *@return ccStatus_t [status] + */ +ccStatus_t ccTransGroupConvFilterInt8(ccFilterDescriptor_t filterSrcInfo, const void *filterSrc, + ccFilterDescriptor_t filterDstInfo, void *filterDst, uint32_t group, + uint32_t ySizeInBytes, ccDataType_t outputDataType); + +/** + *[ccGetConcatOutputDim] + *@param xDesc[] [input tensor descriptor] + *@param axis [concat axis] + *@param inputNum [input tensor numbers] + *@param dim[] [output dim] + *@param [in| dimlen length of dim + *@return ccStatus_t [status] + */ +ccStatus_t ccGetConcatOutputDim(const ccTensorDescriptor_t xDesc[], int32_t axis, int32_t inputNum, int32_t *dimCnt, + int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief get the output dimension info of reduce. + * @param [in] xDesc descriptor of input tensor + * @param [in] axis The dimensions to reduce + * @param [in] keepDims If true, retains reduced dimensions with length 1. + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetReduceOutputDim(const ccTensorDescriptor_t xDesc, const ccIntArray_t *axis, bool keepDims, + int32_t *dimCnt, int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief reduce sum forward computation + * @param [in] handle cce handle + * @param [in] axis The dimensions to reduce + * @param [in] keepDims If true, retains reduced dimensions with length 1. + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccReduceSumForward(ccHandle_t handle, const ccIntArray_t *axis, bool keepDims, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief reduce max forward computation + * @param [in] handle cce handle + * @param [in] axis The dimensions to reduce + * @param [in] keepDims If true, retains reduced dimensions with length 1. + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccReduceMaxForward(ccHandle_t handle, const ccIntArray_t *axis, bool keepDims, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief reduce min forward computation + * @param [in] handle cce handle + * @param [in] axis The dimensions to reduce + * @param [in] keepDims If true, retains reduced dimensions with length 1. + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccReduceMinForward(ccHandle_t handle, const ccIntArray_t *axis, bool keepDims, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief reduce mean forward computation + * @param [in] handle cce handle + * @param [in] axis The dimensions to reduce + * @param [in] keepDims If true, retains reduced dimensions with length 1. + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccReduceMeanForward(ccHandle_t handle, const ccIntArray_t *axis, bool keepDims, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief reduce prod forward computation + * @param [in] handle cce handle + * @param [in] axis The dimensions to reduce + * @param [in] keepDims If true, retains reduced dimensions with length 1. + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccReduceProdForward(ccHandle_t handle, const ccIntArray_t *axis, bool keepDims, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief reduce all forward computation + * @param [in] handle cce handle + * @param [in] axis The dimensions to reduce + * @param [in] keepDims If true, retains reduced dimensions with length 1. + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccReduceAllForward(ccHandle_t handle, const ccIntArray_t *axis, bool keepDims, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + *@brief print times stats + *@return ccStatus_t [status] + */ +ccStatus_t ccPrintTimeStat(); + +/** + * @ingroup dnn + * @brief reduce abs sum forward computation + * @param [in] handle cce handle + * @param [in] axis The dimensions to reduce + * @param [in] keepDims If true, retains reduced dimensions with length 1. + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccReduceAbsSumForward(ccHandle_t handle, const ccIntArray_t *axis, const bool keepDims, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief reduce square sum forward computation + * @param [in] handle cce handle + * @param [in] axis The dimensions to reduce + * @param [in] keepDims If true, retains reduced dimensions with length 1. + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccReduceSquareSumForward(ccHandle_t handle, const ccIntArray_t *axis, const bool keepDims, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get the output dimension info of crop and resize + * @param [in] imageDesc descriptor of images + * @param [in] boxesDesc descriptor of boxes + * @param [in] boxidxDesc descriptor of boxidx + * @param [in] resizeHeight resize height + * @param [in] resizeWidth resize width + * @param [out] dimCnt dimcnt of output + * @param [out] dim dim of output + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetCropAndResizeOutputDim(const ccTensorDescriptor_t imageDesc, const ccTensorDescriptor_t boxesDesc, + const ccTensorDescriptor_t boxidxDesc, const int32_t resizeHeight, + const int32_t resizeWidth, int32_t *dimCnt, int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief crop and resize forward. + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] imageDesc descriptor of images + * @param [in] image input data in device memory + * @param [in] boxesDesc descriptor of boxes + * @param [in] boxes input data in device memory + * @param [in] boxidxDesc descriptor of boxidx + * @param [in] boxidx input data in device memory + * @param [in] method enum of resize method + * @param [in] extrapolationValue Value used for extrapolation, when applicable + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccCropAndResizeForward(ccHandle_t handle, const ccResizeMethod_t method, const float extrapolationValue, + const void *alpha, const ccTensorDescriptor_t imageDesc, const void *image, + const ccTensorDescriptor_t boxesDesc, const void *boxes, + const ccTensorDescriptor_t boxidxDesc, const void *boxidx, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief select forward computation + * @param [in] handle cce handle + * @param [in] alpha reserved + * @param [in] condDesc descriptor of cond tensor + * @param [in] cond cond data in device memory + * @param [in] xDesc descriptor of x tensor + * @param [in] x x data in device memory + * @param [in] yDesc descriptor of y tensor + * @param [in] y y data in device memory + * @param [in] beta reserved + * @param [in] outputDesc descriptor of output tensor + * @param [out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccSelect(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t condDesc, const void *cond, + const ccTensorDescriptor_t xDesc, const void *x, const ccTensorDescriptor_t yDesc, const void *y, + const void *beta, const ccTensorDescriptor_t outDesc, void *out); + +/** + * @ingroup dnn + * @brief get the output dimension info of where + * @param [in] xDesc descriptor of input tensor + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @return ccStatus_t + */ +ccStatus_t ccGetWhereOutputDim(const ccTensorDescriptor_t xDesc, int32_t *dimCnt, int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief where forward computation + * @param [in] handle cce handle + * @param [in] alpha reserved + * @param [in] condDesc descriptor of cond tensor + * @param [in] cond cond data in device memory + * @param [in] xDesc descriptor of x tensor + * @param [in] x x data in device memory + * @param [in] yDesc descriptor of y tensor + * @param [out] y y data in device memory + * @return ccStatus_t + */ +ccStatus_t ccWhere(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t yDesc, void *y); + +/** + * @ingroup dnn + * @brief reverse forward. + * @param [in] handle cce handle + * @param [in] axis dim that need reverse + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccReverseForward(ccHandle_t handle, const ccIntArray_t *axis, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief floor forward: + * data type only support float float16 + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccFloorForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief ceil forward: + * data type only support float float16 + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccCeilForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get the output dimension info of truncate mod + * @param [in] xDesc descriptor of input tensor + * @param [in] yDesc descriptor of input tensor + * @param [out] dimCnt [dim count of the output tensor] + * @param [out] dim[] [shape of the output tensor] + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetTruncatemodOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, + int32_t *dimCnt, int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief truncate mod forward computation + * @param [in] handle cce handle + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] yDesc descriptor of input tensor + * @param [in] y input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccTruncatemodForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); +/** + * @ingroup dnn + * @brief Spatial Pyramid Pooling + * @param [in] handle cce handle + * @param [in] alpha reserved + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] workspace temp workspace + * @param [in] workspaceSizeInBytes temp workspace size + * @param [in] pyramidHeight pyramid height + * @param [in] poolingMode pooling mode + * @param [in] beta reserved + * @param [in] outputDesc descriptor of output tensor + * @param [out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccSPPForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + void *workspace, const uint32_t workspaceSizeInBytes, const uint32_t pyramidHeight, + const ccPoolingMode_t poolingMode, const void *beta, const ccTensorDescriptor_t outputDesc, + void *output); +/** + * @ingroup dnn + * @brief Get Spatial Pyramid Pooling output dim + * @param [in] xDesc descriptor of input tensor + * @param [in] pyramidHeight pyramid height + * @param [in] dimLen length of dim + * @param [out] dimCnt output tensor dim cnt + * @param [out] dim output tensor dim + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetSPPOutputDim(const ccTensorDescriptor_t xDesc, const uint32_t pyramidHeight, int32_t *dimCnt, + int32_t dim[], const int32_t dimLen); +/** + * @ingroup dnn + * @brief Get Spatial Pyramid Pooling workspace size + * @param [in] xDesc descriptor of input tensor + * @param [in] pyramidHeight pyramid height + * @param [out] workspaceSizeInBytes workspace size + * @return ccStatus_t + */ +ccStatus_t ccGetSPPWorkspaceSize(const ccTensorDescriptor_t xDesc, const uint32_t pyramidHeight, + uint32_t *workspaceSizeInBytes); + +/** + * @ingroup dnn + * @brief BNLL forward computation + * @param [in] handle cce handle + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccBNLLForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief bias forward. + * @param [in] handle cce handle + * @param [in] axis axis + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data x + * @param [in] x input data x in device memory + * @param [in] biasDesc descriptor of input data bias + * @param [in] bias input data bias in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccBiasForward(ccHandle_t handle, const int axis, const void *alpha, const ccTensorDescriptor_t xDesc, + const void *x, const ccTensorDescriptor_t biasDesc, const void *bias, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief threshold forward computation + * @param [in] handle cce handle + * @param [in] threshold threshold + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccThresholdForward(ccHandle_t handle, const void *threshold, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief shufflechannel forward. + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] group number of groups + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +// TODO AICPU: please add shufflechannel custom params and comment +ccStatus_t ccShuffleChannelForward(ccHandle_t handle, const void *alpha, uint32_t group, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief mvn forward. + * @param [in] handle cce handle + * @param [in] acrossChannel across channel. true: across, false: not + * @param [in] normalizeVariance normalizeVariance. true: normalizeVariance, false: not + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccMVNForward(ccHandle_t handle, bool acrossChannel, bool normalizeVariance, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, void *workSpace, uint32_t workSpaceSizeInBytes, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get the workspace size of mvn + * @param [in] xDesc descriptor of input data + * @param [in] acrossChannel across channel. true: across, false: not + * @param [in|out] sizeInBytes Workspace size need for whole computation + */ +ccStatus_t ccGetMVNWorkspaceSize(const ccTensorDescriptor_t xDesc, bool acrossChannel, uint32_t *sizeInBytes); + +/** + * @ingroup dnn + * @brief heatmap2coord forward output is hotspot value and corresponding coordinates + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] coordh calibration high + * @param [in] coordw calibration wide + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccHeatmap2coordForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + int32_t coordh, int32_t coordw, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); +/** + * @ingroup dnn + * @brief get the output dimension info of heatmap2coord + * @param [in] xDesc descriptor of input tensor + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetHeatmap2coordOutputDim(const ccTensorDescriptor_t xDesc, int32_t *dimCnt, int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief swish forward. + * @param [in] handle cce handle + * @param [in] scale param of swish function, y = x / (1 + sigmoid(scale * x)) + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ + +ccStatus_t ccSwishForward(ccHandle_t handle, const float scale, const void *alpha, const ccTensorDescriptor_t xDesc, + const void *x, const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +ccStatus_t ccTeForward(ccHandle_t handle, const void *stubFunc, uint32_t coreDim, const void *args, uint32_t argsSize, + const rtL2Ctrl_t *l2ctrl, int32_t inputNum, const ccTensorDescriptor_t xDesc[], const void *x[], + int32_t outputNum, const ccTensorDescriptor_t yDesc[], void *y[], bool isAiCore); + +#ifndef DAVINCI_LITE +ccStatus_t ccAiCpuCustomizeForward(ccHandle_t handle, aicpu_run_func stubFunc, opTensor_t *xOpDesc[], void *x[], + int32_t inputNum, opTensor_t *yOpDesc[], void *y[], void *op_attr_handle, + int32_t outputNum, const ccTensorDescriptor_t xDesc[], + const ccTensorDescriptor_t yDesc[], const void *op_attr_str, uint32_t op_attr_size); +#endif +/** + * @ingroup dnn + * @brief embedding lookup forward. + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data x + * @param [in] x input data x in device memory + * @param [in] idxDesc descriptor of input data idx + * @param [in] idx input data idx in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccEmbeddingLookupForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, + const void *x, const ccTensorDescriptor_t idxDesc, const void *idx, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup + * @brief embedding lookup forward. + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] inputNum inputNum + * @param [in] xDesc[] descriptor array of input data x + * @param [in] x[] input data x array in device memory + * @param [in] workSpace workSpace addr + * @param [in] workSpaceSizeInBytes workSpace size + * @param [in] idxDesc descriptor of input data idx + * @param [in] idx input data idx in device memory + * @param [in] partitionStrategy partitionStrategy + * @param [in] maxNorm addr of maxNorm + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccEmbeddingLookupForward(ccHandle_t handle, const void *alpha, const int32_t inputNum, + const ccTensorDescriptor_t xDesc[], const void *x[], void *workSpace, + const uint32_t workSpaceSizeInBytes, const ccTensorDescriptor_t idxDesc, + const void *idx, ccPartitionStrategy_t partitionStrategy, const void *maxNorm, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + *[ccGetEmbeddingLookupOutputDim] + *@param inputNum [input tensor numbers] + *@param xDesc[] [input tensor descriptor] + *@param idxDesc [idx tensor descriptor] + *@param dimCnt [output dim count] + *@param dim[] [output dim] + *@param [in| dimlen length of dim + *@return ccStatus_t [status] + */ +ccStatus_t ccGetEmbeddingLookupOutputDim(const int32_t inputNum, const ccTensorDescriptor_t xDesc[], + const ccTensorDescriptor_t idxDesc, int32_t *dimCnt, int32_t dim[], + int32_t dimLen); + +/** + * @ingroup dnn + *[ccGetEmbeddingLookupWorkspaceSize] + *@param inputNum [input tensor numbers] + *@param idxDesc [input tensor descriptor] + *@param isMaxNormExist [isMaxNormExist] + *@param sizeInBytes [output size] + *@return ccStatus_t [status] + */ +ccStatus_t ccGetEmbeddingLookupWorkspaceSize(const int32_t inputNum, const ccTensorDescriptor_t idxDesc, + const bool isMaxNormExist, uint32_t *sizeInBytes); + +/** + * @ingroup dnn + * @brief check if it is the first layer of resnet50 and semecefc + * @param [in] tensorDesc descriptor of input tensor. + * @param [in] convDesc conv descriptor. + * @param [in] filterDesc descriptor of weight tensor. + * @return ccStatus_t + */ +ccStatus_t c04DescParamCheck(const ccTensorDescriptor_t tensorDesc, const ccConvolutionDescriptor_t convDesc, + const ccFilterDescriptor_t filterDesc); + +#ifndef DAVINCI_LITE +/** + * @ingroup dnn + * @brief convolution forward computation + * @param [in] handle cce handle + * @param [in] convDesc descriptor of convolution operator + * @param [in] alpha scaling factors + * @param [in] beta scaling factors + * @param [in] xDesc x descriptor of input tensor + * @param [in] x x data in device memory + * @param [in] dyDesc descriptor of dy + * @param [in] dy dy data in device memory + * @param [in] dwDesc descriptor of dwDesc + * @param [out] dw dw data in device memory + * @param [in] algo algorithm of convolution forward + * @param [in] workSpace temp space, maybe NULL if no need temp space + * @param [in] workSpaceSizeInBytes sizeof workspace + * @return ccStatus_t + */ +ccStatus_t ccConvolutionBackwardFilter(ccHandle_t handle, const ccConvolutionDescriptor_t convDesc, void *alpha, + void *beta, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t dyDesc, const void *dy, + const ccFilterDescriptor_t dwDesc, void *dw, ccConvolutionBwdAlgo_t algo, + void *workSpace, uint32_t workSpaceSizeInBytes); +#endif + +/** + * @ingroup dnn + * @brief get the temp space size of convolution forward computation, maybe no need temp space + * @param [in] handle cce handle + * @param [in] dyDesc descriptor of input tensor dy + * @param [in] convDesc descriptor of convolution operator + * @param [in] xDesc descriptor of input tensor + * @param [in] dwDesc descriptor of filter + * @param [in] algo algorithm of convolution forward + * @param [in|out] sizeInBytes temp space size need for specified algorithm + * @return ccStatus_t + */ +ccStatus_t ccGetConvolutionBackwardFilterWorkspaceSize(ccHandle_t handle, const ccTensorDescriptor_t dyDesc, + const ccConvolutionDescriptor_t convDesc, + const ccTensorDescriptor_t xDesc, + const ccFilterDescriptor_t dwDesc, ccConvolutionBwdAlgo_t algo, + uint32_t *sizeInBytes); + +#ifndef DAVINCI_LITE +ccStatus_t ccBatchNormalizationBackward(ccHandle_t handle, ccBatchNormMode_t mode, const void *alphaDataDiff, + const void *betaDataDiff, const void *alphaParamDiff, const void *betaParamDiff, + const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t dyDesc, const void *dy, + const ccTensorDescriptor_t dxDesc, void *dx, + const ccTensorDescriptor_t bnScaleBiasDiffDesc, const void *bnScale, + void *resultBnScaleDiff, void *resultBnBiasDiff, const void *workSpace, + const uint32_t workSpaceSizeInBytes, double epsilon, const void *SaveMean, + const void *SaveInvVariance); +#endif + +ccStatus_t ccGetBatchNormalizationBackwardWorkspaceSize(ccHandle_t handle, ccBatchNormMode_t mode, + ccTensorDescriptor_t xDesc, ccTensorDescriptor_t dyDesc, + ccTensorDescriptor_t dxDesc, + ccTensorDescriptor_t bnScaleBiasDesc, uint32_t *sizeInBytes); + +#ifndef DAVINCI_LITE +ccStatus_t ccBatchNormalizationForwardTraining(ccHandle_t handle, ccBatchNormMode_t mode, const void *alpha, + const void *beta, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, void *y, + const ccTensorDescriptor_t bnScaleBiasMeanVarDesc, const void *bnScale, + const void *bnBias, double exponentialAverageFactor, + void *resultRunningMean, void *resultRunningVariance, void *workSpace, + uint32_t workSpaceSizeInBytes, double epsilon, void *resultSaveMean, + void *resultSaveInvVariance, const bool isTraining); +#endif + +ccStatus_t ccGetBatchNormalizationForwardTrainingWorkspaceSize(ccHandle_t handle, ccBatchNormMode_t mode, + ccTensorDescriptor_t xDesc, ccTensorDescriptor_t yDesc, + const ccTensorDescriptor_t bnScaleBiasMeanVarDesc, + uint32_t *sizeInBytes); + +/** + * @ingroup dnn + * @brief generate an random normal Tensor use given on/off scale. + * @param [in] handle Stream handle. + * @param [in] alpha reserved. + * @param [in] meanDesc Mean description of one-hot position. + * @param [in] mean Data pointer of mean. + * @param [in] scaleDesc On/off scale description. + * @param [in] scale Data pointer of on/off scale. + * @param [in] seed random seed used to generate random number + * @param [in] seed2 random seed used to generate random number + * @param [in] beta reserved. + * @param [in] outputDesc Description of the generated one-hot tensor. + * @param [output] output Data pointer of output. + * @return ccStatus_t + */ +ccStatus_t ccRandomNormalForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t meanDesc, + const void *mean, const ccTensorDescriptor_t scaleDesc, const void *scale, + const int64_t seed1, const int64_t seed2, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief generate random uniform tensor. + * @param [in] handle Stream handle. + * @param [in] alpha reserved. + * @param [in] minvalDesc Mean description of one-hot position. + * @param [in] minval Data pointer of mean. + * @param [in] maxvalDesc On/off scale description. + * @param [in] maxval Data pointer of on/off scale. + * @param [in] seed random seed used to generate random number + * @param [in] seed2 random seed used to generate random number + * @param [in] beta reserved. + * @param [in] outputDesc Description of the generated one-hot tensor. + * @param [output] output Data pointer of output. + * @return ccStatus_t + */ +ccStatus_t ccRandomUniformForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t minvalDesc, + const void *minval, const ccTensorDescriptor_t maxvalDesc, const void *maxval, + const int64_t seed1, const int64_t seed2, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/**^M + * @ingroup dnn^M\r 10932 + * @brief generate BatchMatMul tensor.^M\r 10933 + * @param [in] handle Stream handle.^M\r 10934 + * @param [in] alpha reserved.^M\r 10935 + * @param [in] xDesc tensorA Desc.^M\r 10936 + * @param [in] x Data pointer of tensorA.^M\r 10937 + * @param [in] yDesc tensorB Desc.^M\r 10938 + * @param [in] y Data pointer of tensorB.^M\r 10939 + * @param [in] beta reserved.^M\r 10940 + * @param [in] adj_x tensorA transpose flag^M\r 10941 + * @param [in] adj_y tensorB transpose flag^M\r 10942 + * @param [in] outpDesc Description of the tensor output .^M\r 10943 + * @param [output] out Data pointer of output.^M\r 10944 + * @return ccStatus_t^M + */ +ccStatus_t ccBatchMatMulForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, const bool adj_x, + const bool adj_y, const ccTensorDescriptor_t outDesc, void *out); + +ccStatus_t ccGetBatchMatMulOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, bool adj_x, + bool adj_y, int32_t *dimCnt, int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief generator conv int8 all offset factor + * @param [in] para the struct for scale and offset of input, filter and output + * @param [in|out] offsetW offset of filter + * @param [in|out] offsetPad offset of input + * @param [in|out] scaledQrq scale computing result of input , filter and output + * @param [in|out] nextoffsetq offset of output + * @return ccStatus_t + */ +ccStatus_t ccGenQuantAllOffsetFactor(const ccQuantAllOffsetPara_t *para, uint8_t &offsetW, uint8_t &offsetPad, + uint16_t &scaledQrq, uint16_t &nextoffsetq); + +/** + * @ingroup dnn + * @brief get conv int8 all offset fracZ size + * @param [in] filterDesc descriptor of filter tensor + * @param [in|out] conv int8 all offset fracZ size + * @param [in] groupNum group conv num + * @return ccStatus_t + */ +ccStatus_t ccSetGroupConvScene(const ccFilterDescriptor_t tensorDesc, ccConvolutionDescriptor_t convDesc); + +ccStatus_t ccGetInt8AllOffsetFilterFracZSizeInBytes(const ccFilterDescriptor_t filterSrcDesc, + const ccFilterDescriptor_t filterDesc, uint32_t &size, + uint32_t groupNum); + +/** + * @ingroup dnn + * @brief transform filter in conv int8 all offset scene + * @param [in] filterSrcInfo descriptor of filter tensor before fracZ transform + * @param [in] filterSrc filter addr before fracZ transform + * @param [in] filterDstInfo descriptor of filter tensor after fracZ transform + * @param [in] filterDst filter addr after fracZ transform + * @param [in] quantPara the struct for scale and offset of input, filter and output + * @param [in] ySizeInBytes filter size after fracZ transform + * @param [in|out] outputDataType output data type + * @param [in] groupNum group conv num + * @return ccStatus_t + */ +ccStatus_t ccTransFilterInt8AllOffset(ccFilterDescriptor_t filterSrcInfo, const void *filterSrc, + ccFilterDescriptor_t filterDstInfo, void *filterDst, + const ccQuantAllOffsetPara_t *quantPara, uint32_t ySizeInBytes, + ccDataType_t outputDataType, uint32_t groupNum); + +/** + * @ingroup dnn + * @brief transform bias in conv int8 all offset scene + * @param [in] filterDesc descriptor of filter tensor + * @param [in] biasDesc descriptor of bias tensor + * @param [in] quantPara the struct for scale and offset of input, filter and output + * @param [in] w filter addr + * @param [in] bias bias addr + * @return ccStatus_t + */ +ccStatus_t ccTransInt8AllOffsetBias(const ccFilterDescriptor_t filterDesc, const ccTensorDescriptor_t biasDesc, + const ccQuantAllOffsetPara_t *quantPara, const void *w, const void *bias); + +/** + * @ingroup dnn + * @get dequantize + * @param [in] handle handle id + * @param [in] alpha alpha addr + * @param [in] xDesc the input Desc descriptor + * @param [in] x x data addr + * @param [in] beta beta data addr + * @param [in] yDesc the output Desc descriptor + * @param [in] y y data addr + * @return ccStatus_t + */ +ccStatus_t ccDequantizeCoreForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, + const void *x, const void *beta, const ccTensorDescriptor_t yDesc, void *y); +/** + * @ingroup dnn + * @get quantize + * @param [in] handle handle id + * @param [in] alpha alpha addr + * @param [in] xDesc the input Desc descriptor + * @param [in] x x data addr + * @param [in] beta beta data addr + * @param [in] yDesc the output Desc descriptor + * @param [in] y y data addr + * @return ccStatus_t + */ +ccStatus_t ccQuantizeCoreForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t yDesc, void *y); + +#ifndef DAVINCI_LITE +ccStatus_t ccActivationBackward(ccHandle_t handle, const ccActivationDescriptor_t activationDesc, const void *alpha, + const ccTensorDescriptor_t dyDesc, const void *dy, const ccTensorDescriptor_t xDesc, + const void *x, const void *beta, const ccTensorDescriptor_t dxDesc, void *dx); +#endif + +ccStatus_t ccL2LossForward(ccHandle_t handle, const ccL2LossDescriptor_t l2lossDesc, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t yDesc, void *y); + +/** + * @ingroup dnn + * @brief get the output dimension info of top k v2 + * @param [in] xDesc descriptor of input tensor x + * @param [in] yDesc descriptor of input tensor y + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetTopKV2OutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t kDesc, const void *k, + const int64_t axis, int32_t *dimCnt, int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief top k v2 forward computation + * @param [in] handle cce handle + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor x + * @param [in] x input data x in device memory + * @param [in] yDesc descriptor of input tensor y + * @param [in] y input data y in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccTopKV2Forward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t kDesc, const void *k, const void *beta, const bool sorted, + const int64_t axis, void *workSpace, const uint32_t workSpaceSizeInBytes, + const ccTensorDescriptor_t outputValuesDesc, void *outputValues, + const ccTensorDescriptor_t outputIndicesDesc, void *outputIndices); + +/** + * @ingroup dnn + * @brief get the workspace size of top k v2 + * @param [in] xDesc descriptor of input tensor x + * @param [in] yDesc descriptor of input tensor y + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] sizeInBytes point to workspace size + * @return ccStatus_t + */ +ccStatus_t ccGetTopKV2ForwardWorkspaceSize(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t kDesc, + const ccTensorDescriptor_t indiceDesc, const void *k, const int64_t axis, + uint32_t *sizeInBytes); + +/** + * @ingroup dnn + * @brief Get unsorted segment reduction output dim + * @param [in] xDesc descriptor of input tensor + * @param [in] segmentIdsDesc descriptor of input segmentIds tensor + * @param [in] segmentsNum output slice num + * @param [out] dimCnt output tensor dim cnt + * @param [out] dim output tensor dim + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetUnsortedSegmentReductionOutputDim(const ccTensorDescriptor_t xDesc, + const ccTensorDescriptor_t segmentIdsDesc, int32_t segmentsNum, + int32_t *dimCnt, int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief reduce all forward computation + * @param [in] handle cce handle + * @param [in] segmentsNum output slice num + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] segmentIdsDesc descriptor of input segmentIds tensor + * @param [in] x input segmentIds data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccUnsortedSegmentSumForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, + const void *x, const ccTensorDescriptor_t segmentIdsDesc, const void *segmentIds, + const int32_t segmentsNum, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief reverse sequence forward computation + * @param [in] handle cce handle + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor x + * @param [in] x input data x in device memory + * @param [in] yDesc descriptor of input tensor y + * @param [in] y input data y in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccReverseSequenceForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t inputDesc, + const void *input, const ccTensorDescriptor_t seqLengthsDesc, + const void *seqLengths, int64_t seqAxis, int64_t batchAxis, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief realdiv between two tensors. + * @param [in] alpha reserved. + * @param [in] xDesc description of the left operator tensor. + * @param [in] x data point of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [in] y data point of the right operator tensor. + * @param [in] beta reserved. + * @param [in] outputDesc description of the output tensor. + * @param [output] output data point of the output tensor. + * @return ccStatus_t + */ + +ccStatus_t ccEqualForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get output shape of realdiv. + * @param [in] xDesc description of the left operator tensor. + * @param [in] yDesc description of the right operator tensor. + * @param [out] dimCnt output tensor dim cnt + * @param [out] dim output tensor dim + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetEqualOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t yDesc, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief invert permutation forward computation + * @param [in] handle cce handle + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccInvertPermutationForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, + const void *x, const void *beta, const ccTensorDescriptor_t outputDesc, + void *output); + +/** + * @ingroup dnn + * @brief get the workspace size of non max suppression + * @param [in] handle descriptor of handle + * @param [in] scoresDesc descriptor of input tensor scoresDesc + * @param [in] boxesDesc descriptor of input tensor boxesDesc + * @param [in|out] sizeInBytes point to workspace size + * @return ccStatus_t + */ +ccStatus_t ccGetNonMaxSuppressionWorkspaceSize(ccHandle_t handle, const ccTensorDescriptor_t scoresDesc, + const ccTensorDescriptor_t boxesDesc, uint32_t *sizeInBytes); + +/** + * @ingroup dnn + * @brief get the output dim of non max suppression + * @param [in] scoresDesc descriptor of input tensor scoresDesc + * @param [in] maxOutPutSize the max size of output + * @param [in|out] dimCnt point to the count of dim + * @param [in|out] dim[] the array of output dim + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetNonMaxSuppressionOutputDim(const ccTensorDescriptor_t scoresDesc, const int32_t maxOutPutSize, + int32_t *dimCnt, int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief multinomial forward. + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] numSamples number of independent samples to draw for each row slice + * @param [in] seed1 sed to create a random seed for the distribution + * @param [in] seed2 sed to create a random seed for the distribution + * @param [in] workSpace work space for inter access + * @param [in] workSpaceSizeInBytes work space size + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccMultinomialForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + int32_t numSamples, int64_t seed1, int64_t seed2, void *workSpace, + uint32_t workSpaceSizeInBytes, const void *beta, const ccTensorDescriptor_t outputDesc, + void *output); +/** + * @ingroup dnn + * @brief get output dim of generated one-hot tensor. + * @param [in] indicesDesc Indices description of one-hot position. + * @param [in] depth On/off value description. + * @param [in] axis Data pointer of on/off value. + * @param [output] dimCnt Description of the generated one-hot tensor. + * @param [output] dim Data pointer of output. + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetOneHotOutputDim(const ccTensorDescriptor_t indicesDesc, int32_t depth, int32_t axis, int32_t *dimCnt, + int32_t *dim, int32_t dimLen); + +/** + * @ingroup dnn + * @brief generate an one-hot Tensor use given on/off value. + * @param [in] handle Stream handle. + * @param [in] alpha reserved. + * @param [in] indicesDesc Indices description of one-hot position. + * @param [in] indices Data pointer of indices. + * @param [in] onDesc On value description. + * @param [in] on Data pointer of on value. + * @param [in] offDesc Off value description. + * @param [in] off Data pointer of off value. + * @param [in] depth On/off value description. + * @param [in] axis Data pointer of on/off value. + * @param [in] beta reserved. + * @param [in] outputDesc Description of the generated one-hot tensor. + * @param [output] output Data pointer of output. + * @return ccStatus_t + */ +ccStatus_t ccOneHotForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t indicesDesc, + const void *indices, const ccTensorDescriptor_t onDesc, const void *on, + const ccTensorDescriptor_t offDesc, const void *off, const int32_t depth, const int32_t axis, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); +/** + * @ingroup dnn + * @brief get the workspaceSize of multinomial + * @param [in] xDesc descriptor of input tensor + * @param [in] numSamples number sample + * @param [out] sizeInBytes wor space size of byte + * @return ccStatus_t + */ +ccStatus_t ccGetMultinomialWorkspaceSize(const ccTensorDescriptor_t xDesc, uint32_t *sizeInBytes); +/** + * @ingroup dnn + * @brief get the output dimension info of multinomial + * @param [in] xDesc descriptor of input tensor + * @param [in] numSample number of independent samples to draw for each row slice + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetMultinomialOutputDim(const ccTensorDescriptor_t xDesc, int32_t numSample, int32_t *dimCnt, + int32_t dim[], int32_t dimLen); +/** + * @ingroup dnn + * @brief get the output dimension info of BiasAddBackward + * @param [in] dyDesc descriptor of input tensor + * @param [in] out] n outputTensor [N]CHW + * @param [in|out] c outputTensor N[C]HW + * @param [in|out] h outputTensor NC[H]W + * @param [in|out] w outputTensor NCH[W] + * @return ccStatus_t + */ +ccStatus_t ccGetBiasAddBackwardOutputDim(const ccTensorDescriptor_t dyDesc, int32_t *n, int32_t *c, int32_t *h, + int32_t *w); + +/** + * @ingroup dnn + * @brief biasadd backward. + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] dyDesc descriptor of input data + * @param [in] dy input data in device memory + * @param [in] beta common scale factor + * @param [in] dbDesc descriptor of output data + * @param [in|out] db output data in device memory + * @return ccStatus_t + */ +#ifndef DAVINCI_LITE +ccStatus_t ccBiasAddBackward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t dyDesc, const void *dy, + const void *beta, const ccTensorDescriptor_t dbDesc, void *db); + +ccStatus_t ccMaxPoolWithArgmaxForward(ccHandle_t handle, const ccPoolingDescriptor_t poolingDesc, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t yDesc, void *y, const ccTensorDescriptor_t argMaskDesc, + void *argMask); +#endif + +ccStatus_t ccCreatePoolingMaskDescriptor(ccTensorDescriptor_t *poolingMaskDesc); + +ccStatus_t ccDestroyPoolingMaskDescriptor(ccTensorDescriptor_t *poolingMaskDesc); + +ccStatus_t ccSetPoolingMaskTensorDescriptor(ccTensorDescriptor_t poolingMaskDesc, ccTensorFormat_t format, + ccDataType_t dataType, int32_t n, int32_t c, int32_t h, int32_t w, + int32_t windowH, int32_t windowW); + +ccStatus_t ccGetPoolingMaskTensorSizeInBytes(ccTensorDescriptor_t poolingMaskDesc, uint32_t *size); + +/** + * @ingroup dnn + * @brief get the mask output dimension info of maxpooling training forward + * @param [in] pooling descriptor of convolution operator + * @param [in] xDesc descriptor of input tensor + * @param [in|out] n point to batch size + * @param [in|out] c point to channels + * @param [in|out] h point to height of feature map + * @param [in|out] w point to width of feature map + * @param [in|out] windowH point to height of window + * @param [in|out] windowW point to width of windowW + * @return ccStatus_t + */ +ccStatus_t ccGetPoolingMaskDim(const ccPoolingDescriptor_t poolingDesc, const ccTensorDescriptor_t xDesc, int32_t *n, + int32_t *c, int32_t *h, int32_t *w, int32_t *windowH, int32_t *windowW); + +#ifndef DAVINCI_LITE +ccStatus_t ccSoftmaxCrossEntropyLoss(ccHandle_t handle, ccSoftmaxAlgo_t algo, ccSoftmaxMode_t mode, + ccCrossEntropyMode_t ceMode, const void *alpha, const void *scale, + const ccTensorDescriptor_t logitsDesc, const void *logits, + const ccTensorDescriptor_t labelsDesc, const void *labels, const void *labelSmooth, + const void *beta, const ccTensorDescriptor_t lossDesc, void *loss); + +ccStatus_t ccSoftmaxCrossEntropyDx(ccHandle_t handle, ccSoftmaxAlgo_t algo, ccSoftmaxMode_t mode, + ccCrossEntropyMode_t ceMode, const void *alpha, const void *scale, + const ccTensorDescriptor_t logitsDesc, const void *logits, + const ccTensorDescriptor_t labelsDesc, const void *labels, const void *labelSmooth, + const void *beta, const ccTensorDescriptor_t dxDesc, void *dx); + +ccStatus_t ccAvgPoolingBackward(ccHandle_t handle, const ccPoolingDescriptor_t poolingDesc, const void *alpha, + const ccTensorDescriptor_t dyDesc, const void *dy, const void *beta, + const ccTensorDescriptor_t dxDesc, const void *dx); + +ccStatus_t ccTrainingAssignOp(ccHandle_t handle, const ccAssignOpMode_t assignOpDesc, const void *alpha, + const void *beta, const ccTensorDescriptor_t aDesc, void *a, + const ccTensorDescriptor_t bDesc, const void *b); + +/** + * @ingroup dnn + * @brief momentum optimizer for variable update + * @param [in] handle cce handle + * @param [in] inputDesc descriptor of input tensor: gradient,accumulation,variable + * @param [in] gradient gradient input + * @param [in|out] accumulation accumulation input and updated output + * @param [in|out] variable variable input and updated output + * @param [in] algo indicate whether need FP16 output + * @param [in] momentum scaler to control accumulation + * @param [in] learningRate scaler + * @param [in] lossScaleReciprocal scaler + * @param [in] workSpace additional memory address + * @param [in] workSpaceSizeInBytes additional memory size + * @param [out] variableUpdatedFP16Desc descriptor of FP16 output tensor: variableUpdatedFP16 + * @param [out] variableUpdatedFP16 variableUpdatedFP16 + * @return ccStatus_t + */ +ccStatus_t ccApplyMomentum(ccHandle_t handle, const ccTensorDescriptor_t inputDesc, const void *gradient, + void *accumulation, void *variable, const ccMomentumAlgo_t algo, const void *momentum, + const void *learningRate, const void *lossScaleReciprocal, void *workSpace, + const uint32_t workSpaceSizeInBytes, const ccTensorDescriptor_t variableUpdatedFP16Desc, + void *variableUpdatedFP16); + +ccStatus_t ccSsdClassifyLossTrain(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t labelDesc, + const void *label, const ccTensorDescriptor_t greaterConstDesc, + const void *greaterConst, const ccTensorDescriptor_t subConstDesc, + const void *subConst, const ccTensorDescriptor_t sparseDesc, const void *sparse, + const void *beta, const ccTensorDescriptor_t castoutDesc, const void *castout, + const ccTensorDescriptor_t muloutDesc, const void *mulout); + +#endif + +/** + * @ingroup dnn + * @brief get the workspace size of applymomentum + * @param [in] inputDesc descriptor of input tensor + * @return ccStatus_t + */ +ccStatus_t ccGetApplyMomentumWorkspaceSize(const ccTensorDescriptor_t inputDesc, uint32_t *sizeInBytes); +#ifndef DAVINCI_LITE +ccStatus_t ccHwck2FracZ(ccHandle_t handle, const ccFilterDescriptor_t xDesc, const void *x, + const ccFilterDescriptor_t yDesc, void *y); + +ccStatus_t ccFracZ2Hwck(ccHandle_t handle, const ccFilterDescriptor_t xDesc, const void *x, + const ccFilterDescriptor_t yDesc, void *y); +ccStatus_t ccAddNForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const int32_t inputNum, + const void *x[], const void *beta, void *workSpace, uint32_t workSpaceSizeInBytes, + const ccTensorDescriptor_t yDesc, void *y); +#endif +ccStatus_t ccGetAddNForwardWorkspaceSize(ccHandle_t handle, const ccTensorDescriptor_t xDesc, const int32_t inputNum, + const ccTensorDescriptor_t yDesc, uint32_t *sizeInBytes); +ccStatus_t ccGetAddNForwardOutputDim(const ccTensorDescriptor_t xDesc, int32_t *dimCnt, int32_t *dim, int32_t dimLen); +ccStatus_t ccAddTrainForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t wDesc, const void *w, const void *beta, void *workSpace, + uint32_t workSpaceSizeInBytes, const ccTensorDescriptor_t yDesc, void *y); +ccStatus_t ccGetAddTrainForwardWorkspaceSize(ccHandle_t handle, const ccTensorDescriptor_t xDesc, + const ccTensorDescriptor_t wDesc, const ccTensorDescriptor_t yDesc, + uint32_t *sizeInBytes); +ccStatus_t ccGetAddTrainForwardOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t wDesc, + int32_t *dimCnt, int32_t dim[], int32_t dimLen); +ccStatus_t ccMulTrainForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t wDesc, const void *w, const void *beta, void *workSpace, + uint32_t workSpaceSizeInBytes, const ccTensorDescriptor_t yDesc, void *y); +ccStatus_t ccGetMulTrainForwardWorkspaceSize(ccHandle_t handle, const ccTensorDescriptor_t xDesc, + const ccTensorDescriptor_t wDesc, const ccTensorDescriptor_t yDesc, + uint32_t *sizeInBytes); +ccStatus_t ccGetMulTrainForwardOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t wDesc, + int32_t *dimCnt, int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief get workspace size + * @param [in] xDesc descriptor of input tensor + * @param [in|out] sizeInBytes workspace size + * @return ccStatus_t + */ +ccStatus_t ccGetRandomShuffleWorkspaceSize(const ccTensorDescriptor_t xDesc, uint32_t *sizeInBytes); + +/** + * @ingroup dnn + * @brief random shuffle forward computation + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] workspace temporary space + * @param [in] workspaceSizeInBytes temporary space size + * @param [in] seed random seed used to generate random number + * @param [in] seed2 random seed used to generate random number + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccRandomShuffleForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + void *workspace, const uint32_t workspaceSizeInBytes, const int64_t seed1, + const int64_t seed2, const void *beta, const ccTensorDescriptor_t outputDesc, + void *output); +/** + * @ingroup dnn + * @brief sin forward: + * data type only support float float16 double + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] input input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccSinForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *input, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief cos forward: + * data type only support float float16 double + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] input input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccCosForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *input, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief tan forward: + * data type only support float float16 double + * data format only support ND + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] input input data in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccTanForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *input, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief get the output dimension info of unstack + * @param [in] xDesc descriptor of input tensor + * @param [in] axis the axis to unstack along + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetUnstackOutputDim(const ccTensorDescriptor_t xDesc, int32_t axis, int32_t *dimCnt, int32_t dim[], + int32_t dimLen); + +/** + * @ingroup dnn + * @brief unstack forward. + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data + * @param [in] x input data in device memory + * @param [in] num the length of the dimension axis + * @param [in] axis the axis to unstack along + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ + +ccStatus_t ccUnstackForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + int32_t num, int32_t axis, const void *beta, const ccTensorDescriptor_t outputDesc, + void *output[]); + +ccStatus_t ccResizeNearestNeighborCpuForward(ccHandle_t handle, const ccResizeNearestNeighborDescriptor_t resizeDesc, + const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); +/** + * @ingroup dnn + * @brief get the output dimension info of resize nearest neighbor + * @param [in] resizeDesc descriptor of resize + * @param [in] xDesc descriptor of input tensor + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetResizeNearestNeighborOutputDim(const ccResizeNearestNeighborDescriptor_t resizeDesc, + const ccTensorDescriptor_t xDesc, int32_t *dimCnt, int32_t dim[], + int32_t dimLen); + +/** + * @ingroup dnn + * @brief create descriptor of ResizeNearestNeighbor + * @param [in|out] resizeDesc point to descriptor of ResizeNearestNeighbor attr + * @return ccStatus_t + */ +ccStatus_t ccCreateResizeNearestNeighborDescriptor(ccResizeNearestNeighborDescriptor_t *resizeDesc); + +/** + * @ingroup dnn + * @brief destroy descriptor of ResizeNearestNeighbor + * @param [in|out] resizeDesc point to descriptor of ResizeNearestNeighbor attr + * @return ccStatus_t + */ +ccStatus_t ccDestroyResizeNearestNeighborDescriptor(ccResizeNearestNeighborDescriptor_t *resizeDesc); + +/** + * @ingroup dnn + * @brief set descriptor of ResizeNearestNeighbor. + * @param [in|out] resizeDesc descriptor of resize nearest neighbor operator + * @param [in] alignCorners whether the centers of input and output are aligned + * @param [in] height height of output + * @param [in] width width of output + * @return ccStatus_t + */ +ccStatus_t ccSetResizeNearestNeighborDescriptor(ccResizeNearestNeighborDescriptor_t resizeDesc, bool alignCorners, + int32_t height, int32_t width); + +/** + * @ingroup dnn + * [ccGetPadV2OutputDim] + * @brief get the output dimension info of pad + * @param [in] xDesc descriptor of input tensor x + * @param [in] padDesc descriptor of input paddings + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetPadV2OutputDim(const ccTensorDescriptor_t xDesc, const ccPadV2Descriptor_t padDesc, int32_t *dimCnt, + int32_t dim[], int32_t dimLen); + +ccStatus_t ccPadV2CpuForward(ccHandle_t handle, const ccPadV2Descriptor_t padDesc, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief create descriptor of parameters for padv2 function + * @param [in] point to descriptor of parameters for padv2 function + * @return ccStatus_t + */ +ccStatus_t ccCreatePadV2Descriptor(ccPadV2Descriptor_t *padDesc); + +/** + * @ingroup dnn + * @brief destroy descriptor of parameters for padv2 function + * @param [in] point to descriptor of parameters for padv2 function + * @return ccStatus_t + */ +ccStatus_t ccDestroyPadV2Descriptor(ccPadV2Descriptor_t *padDesc); + +/** + * @brief init descriptor for parameter of padv2 function + * @param [in|out] padDesc descriptor of pad + * @param [in] padShapeCnt padshape count + * @param [in] padShapeLow padshape low + * @param [in] padShapeHigh padshape high + * @param [in] padMode pad mode + * @param [in] padValue pad value ptr + * @param [in] padValueType pad value data type + * @return ccStatus_t + */ +ccStatus_t ccSetPadV2Descriptor(ccPadV2Descriptor_t padDesc, const int32_t padShapeCnt, const int32_t padShapeLow[], + const int32_t padShapeHigh[], const ccPadMode_t padMode, const void *padValue, + const ccDataType_t padValueType); +/** + * @ingroup dnn + * @brief create descriptor of batchToSpace + * @param [in|out] batchToSpaceDesc point to descriptor of batchToSpace + * @return ccStatus_t + */ +ccStatus_t ccCreateBatchToSpaceDescriptor(ccBatchToSpaceDescriptor_t *batchToSpaceDesc); + +/** + * @ingroup dnn + * @brief set batchToSpaceDesc + * @param [in|out] batchToSpaceDesc descriptor of batchToSpace + * @param [in] blockShape blockShape of batchToSpace + * @param [in] crops crops of batchToSpace + * @param [in] blockShapeLength blockShapeLength of batchToSpace + * @return ccStatus_t + */ +ccStatus_t ccSetBatchToSpaceDescriptor(ccBatchToSpaceDescriptor_t paramsDesc, const int32_t *blockShape, + const int32_t *crops, const int32_t blockShapeLength); + +/** + * @ingroup dnn + * @brief get batchToSpaceDesc + * @param [in|out] batchToSpaceDesc descriptor of batchToSpace + * @param [in] blockShape blockShape of batchToSpace + * @param [in] crops crops of batchToSpace + * @param [in] blockShapeLength blockShapeLength of batchToSpace + * @return ccStatus_t + */ +ccStatus_t ccGetBatchToSpaceDescriptor(const ccBatchToSpaceDescriptor_t paramsDesc, int32_t *blockShape, int32_t *crops, + int32_t *blockShapeLength); + +/** + * @ingroup dnn + * @brief destroy descriptor of batchToSpace + * @param [in] *batchToSpaceDesc descriptor of batchToSpace + * @return ccStatus_t + */ +ccStatus_t ccDestroyBatchToSpaceDescriptor(ccBatchToSpaceDescriptor_t *batchToSpaceDesc); + +/** + * @ingroup dnn + * @brief get the output dimension info of batch to space + * @param [in] xDesc descriptor of input tensor + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in| dimlen length of dim + * @return ccStatus_t + */ + +ccStatus_t ccGetBatchToSpaceOutputDim(const ccTensorDescriptor_t xDesc, + const ccBatchToSpaceDescriptor_t batchToSpaceDesc, int32_t *dimCnt, int32_t dim[], + int32_t dimLen); + +/** + * @ingroup dnn + * @brief batch to space forward computation + * @param [in] handle cce handle + * @param [in] paramsDesc descriptor of input params + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ + +ccStatus_t ccBatchToSpaceForward(ccHandle_t handle, const ccBatchToSpaceDescriptor_t paramsDesc, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief create descriptor of spaceToBatch + * @param [in|out] spaceToBatchDesc point to descriptor of spaceToBatch + * @return ccStatus_t + */ +ccStatus_t ccCreateSpaceToBatchDescriptor(ccSpaceToBatchDescriptor_t *spaceToBatchDesc); + +/** + * @ingroup dnn + * @brief set spaceToBatchDesc + * @param [in|out] spaceToBatchDesc descriptor of spaceToBatch + * @param [in] blockShape blockShape of spaceToBatch + * @param [in] paddings paddings of spaceToBatch + * @param [in] blockShapeLength blockShapeLength of spaceToBatch + * @return ccStatus_t + */ +ccStatus_t ccSetSpaceToBatchDescriptor(ccSpaceToBatchDescriptor_t paramsDesc, const int32_t *blockShape, + const int32_t *paddings, const int32_t blockShapeLength); + +/** + * @ingroup dnn + * @brief get spaceToBatchDesc + * @param [in|out] spaceToBatchDesc descriptor of spaceToBatch + * @param [in] blockShape blockShape of spaceToBatch + * @param [in] paddings paddings of spaceToBatch + * @param [in] blockShapeLength blockShapeLength of spaceToBatch + * @return ccStatus_t + */ +ccStatus_t ccGetSpaceToBatchDescriptor(const ccSpaceToBatchDescriptor_t paramsDesc, int32_t *blockShape, + int32_t *paddings, int32_t *blockShapeLength); + +/** + * @ingroup dnn + * @brief destroy descriptor of spaceToBatch + * @param [in] *spaceToBatchDesc descriptor of spaceToBatch + * @return ccStatus_t + */ +ccStatus_t ccDestroySpaceToBatchDescriptor(ccSpaceToBatchDescriptor_t *spaceToBatchDesc); + +/** + * @ingroup dnn + * @brief get the output dimension info of space to batch + * @param [in] xDesc descriptor of input tensor + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in| dimlen length of dim + * @return ccStatus_t + */ + +ccStatus_t ccGetSpaceToBatchOutputDim(const ccTensorDescriptor_t xDesc, + const ccSpaceToBatchDescriptor_t spaceToBatchDesc, int32_t *dimCnt, int32_t dim[], + int32_t dimLen); + +/** + * @ingroup dnn + * @brief space to batch forward computation + * @param [in] handle cce handle + * @param [in] paramsDesc descriptor of input params + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ + +ccStatus_t ccSpaceToBatchForward(ccHandle_t handle, const ccSpaceToBatchDescriptor_t paramsDesc, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +ccStatus_t ccTransFilterDesc2TensorDesc(ccFilterDescriptor_t wDesc, ccTensorDescriptor_t tensorDesc); + +/* + * @brief get the output dimension info of extractImagePatches + * @param [in] xDesc descriptor of input tensor x + * @param [in] ksizes ksizes array + * @param [in] strides strides array + * @param [in] rates rates array + * @param [in] padding padding type + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @return ccStatus_t + */ +ccStatus_t ccGetExtractImagePatchesOutputDim(const ccTensorDescriptor_t xDesc, const ccIntArray_t *ksizes, + const ccIntArray_t *strides, const ccIntArray_t *rates, + const ccExtractImagePatchesPadType_t padding, int32_t *dimCnt, + int32_t dim[], const int32_t dimLen); + +/** + * @ingroup dnn + * @brief cum forward. + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data, dimCnt:1~8 + * @param [in] x input data in device memory + * @param [in] axisDesc scale factor, dimCnt:0 + * @param [in] axis which axis to cum calc, device memory + * @param [in] beta common scale factor + * @param [in] opType calc type, eg. sum, prod.... + * @param [in] exclusive cum flag, true or false + * @param [in] reverse cum flag, true or false + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccCumForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t axisDesc, const void *axis, const void *beta, const CumOpType opType, + const bool exclusive, const bool reverse, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @ingroup dnn + * @brief ExtractImagePatches forward. + * @param [in] handle cce handle + * @param [in] ksizes ksizes array + * @param [in] strides strides array + * @param [in] rates rates array + * @param [in] padding padding type + * @param [in] alpha common scale factor + * @param [in] xDesc descriptor of input data x + * @param [in] x input data x in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccExtractImagePatchesForward(ccHandle_t handle, const ccIntArray_t *ksizes, const ccIntArray_t *strides, + const ccIntArray_t *rates, const ccExtractImagePatchesPadType_t padding, + const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const void *beta, const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @brief get argmax output dim info + * @param [in] argDesc argmaxmin descriptor + * @param [in] xDesc descriptor of input tensor + * @param [in|out] dimCnt output dim count + * @param [in|out] dim output dim + * @param [in| dimlen length of dim + * @return ccStatus_t + */ +ccStatus_t ccGetArgMaxOutputDim(const ccArgmaxminDescriptor_t argDesc, const ccTensorDescriptor_t xDesc, + int32_t *dimCnt, int32_t dim[], int32_t dimLen); + +/** + * @ingroup dnn + * @brief argmax forward computation + * @param [in] handle cce handle + * @param [in] argDesc argmaxmin descriptor + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] workSpace workspace pointer + * @param [in] workSpaceSizeInBytes workspace size in bytes + * @param [in] beta bias factors + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccArgMaxForward(ccHandle_t handle, const ccArgmaxminDescriptor_t argDesc, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, void *workSpace, + const uint32_t workSpaceSizeInBytes, const void *beta, const ccTensorDescriptor_t outputDesc, + void *output); + +/** + * @ingroup dnn + * @brief get the output dimension info of argmaxmin + * @param [in] argDesc descriptor of tagCcArgmaxmin + * @param [in] xDesc descriptor of input tensor + * @param [in|out] sizeInBytes workspace size + * @return ccStatus_t + */ +ccStatus_t ccGetArgMaxWorkspaceSize(const ccArgmaxminDescriptor_t argDesc, const ccTensorDescriptor_t xDesc, + uint32_t *sizeInBytes); + +/** + * @ingroup dnn + * @brief create descriptor of Argmaxmin + * @param [in|out] resizeDesc point to descriptor of Argmaxmin attr + * @return ccStatus_t + */ +ccStatus_t ccCreateArgmaxminDescriptor(ccArgmaxminDescriptor_t *argDesc); + +/** + * @ingroup dnn + * @brief destroy descriptor of Interp + * @param [in|out] resizeDesc point to descriptor of Argmaxmin attr + * @return ccStatus_t + */ +ccStatus_t ccDestroyArgmaxminDescriptor(ccArgmaxminDescriptor_t *argDesc); + +/** + * @ingroup dnn + * @brief destroy descriptor of Interp + * @param [in|out] argDesc descriptor of tagCcArgmaxmin + * @param [in] axisType + * @param [in] outMaxVal whether to return the maximum value + * @param [in] topK number that returns the maximum index or maximum value + * @param [in] axis Describes which axis of the input Tensor to reduce across + * @param [in] keepDims whether to keep reduced dim + * @param [in] reduceSize the num of elements to be reduce to get topK elements, reduceSize=-1 means the total num + * of elements in axis dimension + * @param [in] reduceStride the stride for reduce operation, reduceStride=1 means the layout of target data is + * continuous + * @return ccStatus_t + */ +ccStatus_t ccSetArgmaxminDescriptor(ccArgmaxminDescriptor_t argDesc, int32_t axisType, bool outMaxVal, int64_t topK, + int64_t axis, bool keepDims, int64_t reduceSize = -1, int64_t reduceDStride = 1); + +ccStatus_t ccArgMinForward(ccHandle_t handle, const ccArgmaxminDescriptor_t argDesc, const void *alpha, + const ccTensorDescriptor_t xDesc, const void *x, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +ccStatus_t ccGetArgMinOutputDim(const ccArgmaxminDescriptor_t argDesc, const ccTensorDescriptor_t xDesc, + int32_t *dimCnt, int32_t dim[], const int32_t dimLen); +/** + * @ingroup dnn + * @brief lsh projection forward computation + * @param [in] handle cce handle + * @param [in] alpha scaling factors + * @param [in] hashDesc descriptor of input tensor hashDesc + * @param [in] hash input data hash in device memory + * @param [in] weightDesc descriptor of input tensor weightDesc + * @param [in] weight input data weight in device memory + * @param [in] inputDesc descriptor of input tensor inputDesc + * @param [in] lookup input data lookup in device memory + * @param [in] type 1:SPARSE 2.DENSE + * @param [in] beta bias factors + * @param [in] workSpace workSpace data in device memory + * @param [in] workSpaceSizeInBytes workSpace length + * @param [in] outputDesc descriptor of output tensor + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccLshProjectionForward(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t hashDesc, + const void *hash, const ccTensorDescriptor_t weightDesc, const void *weight, + const ccTensorDescriptor_t inputDesc, const void *input, const LSHProjectionType type, + const void *beta, void *workSpace, const uint32_t workSpaceSizeInBytes, + const ccTensorDescriptor_t outputDesc, void *output); +/** + * @ingroup dnn + * @brief get the workspace size of lsh projection + * @param [in] inputDesc descriptor of input tensor input + * @param [in] hashDataType data type of hash + * @param [in|out] sizeInBytes workspace size + * @return ccStatus_t + */ +ccStatus_t ccGetLshProjectionForwardWorkspaceSize(const ccTensorDescriptor_t inputDesc, const ccDataType_t hashDataType, + uint32_t *sizeInBytes); +/** + * @ingroup dnn + * @brief get the output dimension info of LshProjection, + * @param [in] hashDesc descriptor of hash + * @param [in] type type of mode + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in] dimLen dim length + * @return ccStatus_t + */ +ccStatus_t ccGetLshProjectionOutputDim(const ccTensorDescriptor_t hashDesc, const LSHProjectionType type, + int32_t *dimCnt, int32_t dim[], const int32_t dimLen); +/** + * @ingroup dnn + * @brief get the weight dimension info of LshProjection, + * @param [in] inputDesc descriptor of input + * @param [in|out] dimCnt point to the weight dimCnt + * @param [in|out] dim arrays to save dims + * @param [in] dimLen dim length + * @return ccStatus_t + */ +ccStatus_t ccGetLshProjectionWeightDim(const ccTensorDescriptor_t inputDesc, int32_t *dimCnt, int32_t dim[], + const int32_t dimLen); + +/** + * @ingroup dnn + * @brief init descriptor for parameter of upsample function + * @param [in] handle cce handle + * @param [in] upsamplePara input para in host memory + * @param [in] alpha common scale factor + * @param [in] bottomDesc descriptor of input data bottomDesc + * @param [in] bottom input data bottom in device memory + * @param [in] bottomMaskDesc descriptor of input data bottomMaskDesc + * @param [in] bottomMask input data bottomMask in device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor of output data + * @param [in|out] output output data in device memory + * @return ccStatus_t + */ +ccStatus_t ccUpsampleForward(ccHandle_t handle, const ccUpsampleParaDescriptor_t upsamplePara, const void *alpha, + const ccTensorDescriptor_t bottomDesc, const void *bottom, + const ccTensorDescriptor_t bottomMaskDesc, const void *bottomMask, const void *beta, + const ccTensorDescriptor_t outputDesc, void *output); + +/** + * @brief creat descriptor for parameter of usample function + * @param [in|out] upsampleDesc descriptor of upsamplepara + * @return ccStatus_t + */ +ccStatus_t ccCreateUpsampleDescriptor(ccUpsampleParaDescriptor_t *upsampleDesc); + +/** + * @brief destroy descriptor for parameter of upsample function + * @param [in|out] upsampleDesc descriptor of upsamplepara + * @return ccStatus_t + */ +ccStatus_t ccDestroyUpsampleDescriptor(ccUpsampleParaDescriptor_t *upsampleDesc); + +/** + * @brief set descriptor for parameter of upsample function + * @param [in|out] upsampleDesc descriptor of upsamplepara + * @param [in] scale the scale of height and width + * @param [in] scaleHeight the scale of height + * @param [in] scaleWidth the scale of Width + * @param [in] upsampleHeight the height of output + * @param [in] upsampleWidth the width of output + * @param [in] padOutHeight pad value height + * @param [in] padOutWidth pad value width + * @return ccStatus_t + */ +ccStatus_t ccSetUpsampleDescriptor(ccUpsampleParaDescriptor_t upsampleDesc, const int32_t scale, + const int32_t scaleHeight, const int32_t scaleWidth, const int32_t upsampleHeight, + const int32_t upsampleWidth, const bool padOutHeight, const bool padOutWidth); +/** + * @ingroup dnn + * @brief get the output dimension info of upsample + * @param [in] upsamplePara para of upsample + * @param [in] bottomDesc descriptor of input bottom tensor + * @param [in|out] dimCnt point to the output dimCnt + * @param [in|out] dim arrays to save dims + * @param [in] dimLen the len of dim array + * @return ccStatus_t + */ +ccStatus_t ccGetUpsampleOutputDim(const ccUpsampleParaDescriptor_t upsamplePara, const ccTensorDescriptor_t bottomDesc, + int32_t *dimCnt, int32_t dim[], const int32_t dimLen); + +#ifndef DAVINCI_LITE +ccStatus_t ccMatmul(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t wDesc, const void *w, const ccTensorDescriptor_t biasDesc, + const void *bias, const ccFullConnectFwdAlgo_t algo, void *workSpace, + const uint32_t workSpaceSizeInBytes, const void *beta, const ccTensorDescriptor_t yDesc, void *y, + const bool transposeA, const bool transposeB); +ccStatus_t ccGetMatmulOutputDim(const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t wDesc, int32_t *n, + int32_t *c, int32_t *h, int32_t *w, bool transposeA, bool transposeB); +ccStatus_t ccGetMatmulWorkspaceSize(ccHandle_t handle, const ccFullConnectFwdAlgo_t algo, + const ccTensorDescriptor_t xDesc, const ccTensorDescriptor_t wDesc, + const ccTensorDescriptor_t yDesc, uint32_t *sizeInBytes, bool transposeA, + bool transposeB); +#endif + +/** + * @ingroup dnn + * @brief gather_v2 function + * @param [in] handle cce handle + * @param [in] alpha common scale factor + * @param [in] paramsDesc descriptor + * @param [in] params device memory + * @param [in] indicesDesc descriptor + * @param [in] indices device memory + * @param [in] axisDesc descriptor + * @param [in] axis device memory + * @param [in] beta common scale factor + * @param [in] outputDesc descriptor + * @param [in|out] output device memory + * @return ccStatus_t + */ +ccStatus_t ccGatherV2(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t paramsDesc, const void *params, + const ccTensorDescriptor_t indicesDesc, const void *indices, const ccTensorDescriptor_t axisDesc, + const void *axis, const void *beta, const ccTensorDescriptor_t outputDesc, const void *output); + +/** + * @ingroup dnn + * @brief memory_clear function + * @param [in] handle cce handle + * @param [in] addrSpaceSizeInBytes addr space size + * @param [in|out] addr device memory + * @return ccStatus_t + */ +ccStatus_t ccMemoryClear(ccHandle_t handle, const uint64_t addrSpaceSizeInBytes, const void *addr); + +/** + * @ingroup dnn + * @brief check input is overflow + * @param [in] handle cce handle + * @param [in] alpha scaling factors + * @param [in] xDesc descriptor of input tensor + * @param [in] x input data in device memory + * @param [in] yDesc descriptor of output tensor + * @param [in|out] y output data in device memory + * @param [in] beta scaling factors + * @return ccStatus_t + */ +ccStatus_t ccIsFinite(ccHandle_t handle, const void *alpha, const ccTensorDescriptor_t xDesc, const void *x, + const ccTensorDescriptor_t yDesc, const void *y, const void *beta); +}; // namespace cce + +#endif /* __DNN_OP_H__ */ diff --git a/third_party/fwkacllib/inc/cce/dnn_struct.hpp b/third_party/fwkacllib/inc/cce/dnn_struct.hpp new file mode 100644 index 00000000..8b9cc097 --- /dev/null +++ b/third_party/fwkacllib/inc/cce/dnn_struct.hpp @@ -0,0 +1,23 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __DNN_STRUCT_HPP__ +#define __DNN_STRUCT_HPP__ + +#include "dnn.h" +#include "dnn_struct_base.hpp" + +#endif /* __DNN_STRUCT_HPP__ */ diff --git a/third_party/fwkacllib/inc/cce/dnn_struct_base.hpp b/third_party/fwkacllib/inc/cce/dnn_struct_base.hpp new file mode 100644 index 00000000..672cf77f --- /dev/null +++ b/third_party/fwkacllib/inc/cce/dnn_struct_base.hpp @@ -0,0 +1,894 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __DNN_STRUCT_BASE_HPP__ +#define __DNN_STRUCT_BASE_HPP__ + +#include "cce/cce_def.hpp" + +namespace cce { + +/** + * @ingroup dnn + * @brief max number of dimensions + */ +#define CC_DIM_MAX (8) + +/** + * @ingroup dnn + * @brief max number of dimensions when use NC1HWC0 format + */ +#define CC_REALDIM_MAX (4) + +/** + * @ingroup dnn + * @brief max input count of MscnnBoxOutput + */ +#define CC_MAX_INPUT_CNT (10) + +/** + * @ingroup dnn + * @brief image dimensions of aipp input + */ +#define CC_AIPP_IMG_DIM (2) + +/** + * @ingroup dnn + * @brief image channel number of aipp input + */ +#define CC_AIPP_IMG_CHN_NUM (4) + +/** + * @ingroup dnn + * @brief element number of aipp color space convertion matrix + */ +#define CC_AIPP_CSC_MATRIX_DIM (9) + +/** + * @ingroup dnn + * @brief element number of aipp color space convertion bias + */ +#define CC_AIPP_CSC_BIAS_DIM (3) + +/** + * @ingroup dnn + * @brief parameter number of op exp/log/pow + */ +#define PARAM_CNT_THREE (3) + +/** + * @ingroup dnn + * @brief parameter number of op nonmaxsuppression + */ +#define PARAM_CNT_TWO (2) +#define DIMCNT_NUMBER_ONE (1) +#define DIMCNT_NUMBER_TWO (2) +#define DIMCNT_NUMBER_FOUR (4) + +#define COMMON_FORMAT_NCHW_N_INDEX (0) +#define COMMON_FORMAT_NCHW_C_INDEX (1) +#define COMMON_FORMAT_NCHW_H_INDEX (2) +#define COMMON_FORMAT_NCHW_W_INDEX (3) + +/** + * @ingroup dnn + * @brief parameter number of op upsample + */ +#define UPSAMPLE_SCAL_DEFAULT_TWO (2) +#define UPSAMPLE_ILLEGAL_VALUE_1 (1) + +/** + * @ingroup dnn + * @brief struct define of StridedSlice required params. + */ + +typedef struct tagCcStridedSlice { + uint32_t dimCnt; + int32_t begin[CC_DIM_MAX]; + int32_t end[CC_DIM_MAX]; + int32_t strides[CC_DIM_MAX]; +} ccStridedSlice_t; + +/** + * @ingroup dnn + * @brief struct define of Strided_slice attrs + */ +typedef struct tagCcStridedSliceAttrs { + uint32_t beginMask; + uint32_t endMask; + uint32_t ellipsisMask; + uint32_t newAxisMask; + uint32_t shrinkAxisMask; +} ccStridedSliceAttrs_t; + +/** + * @ingroup dnn + * @brief params of batchToSpace + */ +typedef struct tagCcBatchToSpace { + int32_t blockShapeLength; + int32_t blockShape[CC_DIM_MAX]; + int32_t crops[2 * CC_DIM_MAX]; +} ccBatchToSpace_t; + +/** + * @ingroup dnn + * @brief params of spaceToBatch + */ +typedef struct tagCcSpaceToBatch { + int32_t blockShapeLength; + int32_t blockShape[CC_DIM_MAX]; + int32_t paddings[2 * CC_DIM_MAX]; +} ccSpaceToBatch_t; + +/** + * @ingroup dnn + * @brief struct define of tensor + */ +typedef struct tagCcTensor { + ccTensorFormat_t format; + ccDataType_t dataType; + int32_t dimCnt; + int32_t realDimCnt; + uint32_t dataSize; + int32_t dim[CC_DIM_MAX]; + int32_t stride[CC_DIM_MAX]; + ccVecQuantizePara_t vecQuantizePara; +} ccTensor_t; + +/** + * @ingroup dnn + * @brief struct define of filter tensor + */ +typedef struct tagCcFilter { + ccTensorFormat_t format; + ccDataType_t dataType; + int32_t dimCnt; + uint32_t dataSize; + int32_t dim[CC_DIM_MAX]; +} ccFilter_t; + +/** + * @ingroup dnn + * @brief struct define of convolution operator + */ +typedef struct tagCcConvolution { + ccConvolutionMode_t mode; + ccPaddingMode_t padMode; + int32_t dimCnt; + int32_t padding[2 * (CC_DIM_MAX - 2)]; + int32_t filterStride[CC_DIM_MAX - 2]; + int32_t dilation[CC_DIM_MAX - 2]; + int32_t group; + ccQuantizeDescriptor_t quantInfo; + ccConvolutionAipp_t aippInfo; + int32_t adj[CC_DIM_MAX - 2]; + int32_t targetShape[CC_DIM_MAX - 2]; + int32_t beforePadding[2 * (CC_DIM_MAX - 2)]; // pad before conv + uint32_t reluFlag; + int64_t concatBatchSize; +} ccConvolution_t; + +#define ccCorrelation_t ccConvolution_t +typedef struct tagCcFullConnection_t { + ccQuantizeDescriptor_t quantInfo; + uint32_t infoTabSize; + const void *infoTab; + bool reluFlag; + ccFullConnectFwdAlgo_t algo; +} ccFullConnection_t; + +typedef struct tagCcConcatFour2Five_t { + uint32_t branchNum; // how many branch for box or class + uint32_t classNum; // box branch's classNum is four, class branch's classNum is class number +} ccConcatFour2Five_t; + +typedef struct tagCcTransdata_t { + uint64_t scaleQAddr; + uint8_t scaleQValueMode; + uint64_t offsetQAddr; + uint8_t quantAlgo; + uint8_t quantize8bitFlag; +} ccTransdata_t; +/** + * @ingroup dnn + * @brief struct define of pooling operator + */ +typedef struct tagCcPooling { + ccPoolingMode_t mode; + ccPaddingMode_t padMode; + ccNanPropagation_t maxpoolingNanOpt; + int32_t dimCnt; + int32_t windowDim[CC_DIM_MAX - 2]; + int32_t padding[CC_DIM_MAX - 2]; + int32_t stride[CC_DIM_MAX - 2]; + int32_t dataMode; + int32_t ceilMode; + ccQuantizeDescriptor_t quantInfo; + ccPooingFwdAlgo_t algo; +} ccPooling_t; + +/** + * @ingroup dnn + * @brief struct define of activation operator + */ +typedef struct tagCcActivation { + ccActivationMode_t mode; + ccNanPropagation_t reluNanOpt; + double coef; /* ceiling for clipped RELU, alpha for ELU */ + ccActivationPara_u activationPara; +} ccActivation_t; + +/** + * @ingroup dnn + * @brief struct define of svdf operator + */ +typedef struct tagCcSvdf { + ccTensorFormat_t format; + ccDataType_t dataType; + uint32_t batches; + uint32_t features; + uint32_t rank; + uint32_t inputSize; + uint32_t memorySize; +} ccSvdf_t; + +/** + * @ingroup dnn + * @brief struct define of svdf operator + */ +typedef struct tagCcHashTableLookup { + ccTensorFormat_t format; + ccDataType_t lookupType; + ccDataType_t keyType; + ccDataType_t valueType; + ccDataType_t outputType; + ccDataType_t hitsType; + uint32_t lookups; + uint32_t keys; + uint32_t rows; + uint32_t features; + uint16_t valueScale; + uint16_t outputScale; + uint16_t valueOffset; + uint16_t outputOffset; +} ccHashTableLookup_t; + +/** + * @ingroup dnn + * @brief struct define of prelu operator + */ +typedef struct tagCcPRelu { + ccNanPropagation_t reluNanOpt; + int32_t slopeCount; + bool channelShared; +} ccPRelu_t; + +/** + * @ingroup dnn + * @brief struct define of crop operator + */ +typedef struct tagCcCrop { + int32_t startAxis; + int32_t offset[CC_DIM_MAX]; + int32_t offsetCnt; +} ccCrop_t; + +/** + * @ingroup dnn + * @brief struct define of SpatialTransformer operator + */ +typedef struct tagCcSpatialTransformer { + ccSamplerType_t samplerType; + ccDataType_t dataType; + int32_t dimCnt; + uint64_t dim[CC_DIM_MAX]; + uint64_t alignCorner; +} ccSpatialTransformer_t; + +/** + * @ingroup dnn + * @brief struct define of ShiftTransformer operator + */ +typedef struct tagCcShiftTransformer { + ccSamplerType_t samplerType; + double xPreDefined; + double yPreDefined; + bool xShift; + bool yShift; + int32_t gridH; + int32_t gridW; +} ccShiftTransformer_t; + +/** + * @ingroup dnn + * @brief struct define of FasterRcnnProposal operator + */ +typedef struct tagCcFasterRcnnProposal { + int32_t preNMStopK; + int32_t postNMStopK; + float nmsTresh; + float minSize; + float featStride; + float baseSize; + int32_t ratioCnt; + int32_t scaleCnt; + float *ratio; + float *scale; + int32_t imgH; + int32_t imgW; +} ccFasterRcnnProposal_t; + +/** + * @ingroup dnn + * @brief struct define of LRN operator + */ +typedef struct tagCcLRN { + ccLRNMode_t lrnMode; + int32_t lrnN; + double lrnAlpha; + double lrnBeta; + double lrnK; +} ccLRN_t; + +/** + * @ingroup dnn + * @brief struct define of instanceNorm + */ +typedef struct tagCcInstancenorm { + ccInstanceNormMode_t mode; + double epsilon; +} ccInstancenorm_t; + +/** + * @ingroup dnn + * @brief struct define of assignOp operator + */ +typedef struct tagCcAssignOp { + ccAssignOpMode_t assignOpMode; +} ccAssignOp_t; + +/** + * @ingroup dnn + * @brief struct define of arcSinCos operator + */ +typedef struct tagCcArcSinCos { + ccArcSinCosMode_t arcSinCosMode; +} ccArcSinCos_t; + +/** + * @ingroup dnn + * @brief struct define of Detectpostprocess operator + */ +typedef struct tagCcDetectpostprocess { + int32_t numClasses; + float confThreshold; + float nmsThreshold; + int32_t outTopK; + float bboxRegWeightsDx; + float bboxRegWeightsDy; + float bboxRegWeightsDw; + float bboxRegWeightsDh; +} ccDetectpostprocess_t; +/** + * @ingroup dnn + * @brief struct define of FasterRcnnDetectionOutput operator + */ +typedef struct tagCcFasterRcnnDetectionOutput { + int32_t numClasses; + float nmsThreshold; + float postConfThreshold; + int32_t imgH; + int32_t imgW; + int32_t batchSize; +} ccFasterRcnnDetectionOutput_t; + +/** + * @ingroup dnn + * @brief struct define of SsdDetectionOutput operator + */ +typedef struct tagCcSsdDetectionOutput { + int32_t numClasses; + int32_t backgroundLabelId; + double preConfThreshold; + int32_t preTopK; + double nmsThreshold; + double nmsEta; + ccBoxCodeType_t codeType; + int32_t outTopK; + bool shareLocation; + bool varianceEncodedInTarget; + uint32_t boxTypeNum; + float var[4]; + uint32_t variance_num; +} ccSsdDetectionOutput_t; + +/** + * @ingroup dnn + * @brief struct define of RefinedetDetectionOutput operator + */ +typedef struct tagCcRefinedetDetectionOutput { + int32_t numClasses; + int32_t backgroundLabelId; + double preConfThreshold; + int32_t preTopK; + double nmsThreshold; + double nmsEta; + ccBoxCodeType_t codeType; + int32_t outTopK; + bool shareLocation; + bool varianceEncodedInTarget; + uint32_t boxTypeNum; + float var[4]; + uint32_t variance_num; + double objectness_score; +} ccRefinedetDetectionOutput_t; + +/** + * @ingroup dnn + * @brief struct define of MsrGenerateRpnProposals operator + */ +typedef struct tagCcMsrGenerateRpnProposals { + int32_t preNmsTopK; + int32_t postNmsTopK; + float nmsThreshold; + float rpnMiniSize; + int32_t imgH; + int32_t imgW; + uint32_t boxTypeNum; + float scoreThreshold; +} ccMsrGenerateRpnProposals_t; + +/** + * @ingroup dnn + * @brief struct define of RetinaPostprocessor operator + */ +typedef struct tagCcRetinaPostprocessor { + int32_t numClasses; + int32_t maxDetections; + float nmsThreshold; + float scoreThreshold; + int32_t imgH; + int32_t imgW; + uint32_t boxTypeNum; + float mean[4]; + int32_t meanNum; + float std[4]; + int32_t stdNum; + int32_t outputNum; + bool ocrFlag; +} ccRetinaPostprocessor_t; + +/** + * @ingroup dnn + * @brief struct define of GenerateSsdAnchors operator + */ +typedef struct tagCcGenerateSsdAnchors { + int32_t featureMapShapeList[20]; + uint32_t featureMapShapeListSize; + int32_t boxSpecsNum[10]; + uint32_t boxSpecsNumSize; + float scales[10]; + uint32_t scalesNum; + float aspectRatios[10]; + uint32_t aspectRatiosNum; + int32_t baseAnchorSize[2]; + uint32_t baseAnchorSizeNum; + int32_t anchorStride[2]; + uint32_t anchorStrideNum; + int32_t anchorOffset[2]; + uint32_t anchorOffsetNum; + bool reduceBoxesInLowestLayer; + float minScale; + float maxScale; + int32_t imgH; + int32_t imgW; +} ccGenerateSsdAnchors_t; + +/** + * @ingroup dnn + * @brief struct define of MscnnBoxOutput operator + */ +typedef struct tagCcMscnnBoxOutput { + double fgThreshold; + double nmsThreshold; + ccNmsType_t nmsType; + int32_t fieldH[CC_MAX_INPUT_CNT]; + int32_t fieldW[CC_MAX_INPUT_CNT]; + int32_t downsampleRate[CC_MAX_INPUT_CNT]; + int32_t defaultBoxCnt; + double fieldWhr; + double fieldXyr; + int32_t maxNmsNum; + int32_t maxPostNmsNum; + double minSize; +} ccMscnnBoxOutput_t; + +/** + * @ingroup dnn + * @brief struct define of NMS operator + */ +typedef struct tagCcNms { + int32_t numClasses; + int32_t backgroundLabelId; + double preConfThreshold; + int32_t preTopK; + double nmsThreshold; + double nmsEta; + int32_t postTopK; + int32_t outTopK; + double postConfThreshold; + bool shareLocation; +} ccNms_t; + +/** + * @ingroup dnn + * @brief struct define of NMS/MultiClassNMS operator + */ +typedef struct tagCcMultiClassNms { + uint64_t numClasses; + float objThreshold; + float nmsThreshold; + float clsThreshold; + bool normal; + uint64_t coorType; +} ccCcMultiClassNms_t; + +/** + * @ingroup dnn + * @brief struct define of YoloDetectionOutput operator + */ +typedef struct tagCcYoloDetectionOutput { + ccYoloVersion_t yoloVersion; + uint32_t netH; + uint32_t netW; + uint32_t postTopK; + uint32_t classes; + float nmsThreshold; + float iouThreDecay; + float coorScaleFactor; + bool relative; + float objThreshold; + float clsThreshold; + uint32_t biasNum; + float *bias; +} ccYoloDetectionOutput_t; + +/** + * @ingroup dnn + * @brief struct define of GetRegionBox operator + */ +#ifndef CC_MAX_YOLO_BIAS_NUM +#define CC_MAX_YOLO_BIAS_NUM (16) +#endif + +typedef struct tagCcGetRegionBox { + uint32_t biasNum; + uint32_t H; + uint32_t W; + float bias[CC_MAX_YOLO_BIAS_NUM]; +} ccGetRegionBox_t; + +/** + * @ingroup dnn + * @brief struct define of CorrectBoxes operator + */ +typedef struct tagCorrectBoxes { + uint32_t netW; + uint32_t netH; + bool relative; +} ccCorrectBoxes_t; + +/** + * @ingroup dnn + * @brief struct define of ClsProb operator + */ +typedef struct tagClsProb { + float objThreshold; +} ccClsProb_t; + +/** + * @ingroup dnn + * @brief struct define of SsdPriorBox operator + */ +typedef struct tagCcSsdPriorBox { + ccBoxCodeType_t codeType; + double *minSize; + int32_t minSizeNum; + double *maxSize; + int32_t maxSizeNum; + double *aspectRatio; + int32_t aspectRatioNum; + double *variance; + int32_t varianceNum; + int32_t imgH; + int32_t imgW; + double stepH; + double stepW; + double offset; + bool flip; + bool clip; +} ccSsdPriorBox_t; + +/** + * @ingroup dnn + * @brief struct define of Yolo2Region operator + */ +typedef struct tagCcYolo2Region { + ccSoftmaxTree_t softmaxTree; + bool softmax; + bool background; + bool treeSoftmax; +} ccYolo2Region_t; + +/** + * @ingroup dnn + * @brief struct define of YoloRegion operator + */ +typedef struct tagCcYoloRegion { + ccSoftmaxTree_t softmaxTree; + bool softmax; + bool background; + bool treeSoftmax; + int32_t classes; + int32_t coords; + int32_t boxes; + ccYoloVersion_t yoloV; +} ccYoloRegion_t; + +/** + * @ingroup dnn + * @brief struct define of power operator + */ +typedef struct tagCcPower { + float scale; + float shift; + float power; +} ccPower_t; + +/** + * @ingroup dnn + * @brief struct define of exp operator + */ +typedef struct tagCcExp { + ccDataType_t dataType; + uint32_t paramCnt; +} ccExp_t; + +/** + * @ingroup dnn + * @brief struct define of exp operator + */ +typedef struct tagCcLog { + ccDataType_t dataType; + uint32_t paramCnt; +} ccLog_t; + +/** + * @ingroup dnn + * @brief struct define of pow operator + */ +typedef struct tagCcPow { + ccDataType_t dataType; + uint32_t paramCnt; +} ccPow_t; + +/** + * @ingroup dnn + * @brief struct define of padv2 operator + */ +typedef struct tagCcPadV2 { + ccPadMode_t padMode; + void *padValue; + ccDataType_t padValueType; + int32_t padDimCnt; + int32_t padShapeLow[CC_DIM_MAX]; + int32_t padShapeHigh[CC_DIM_MAX]; +} ccPadV2_t; + +/** + * @ingroup dnn + * @brief struct define of psROIPooling operator + */ +typedef struct tagCcPsRoiPooling { + ccPoolingMode_t poolingMode; + int32_t pooledH; + int32_t pooledW; + float spatialScale; + float padRatio; + int32_t groupSize; + int32_t outputDim; +} ccPsRoiPooling_t; + +/** + * @ingroup dnn + * @brief struct define of RoIAlign operator + */ +typedef struct tagCcRoiAlign { + int32_t pooledH; + int32_t pooledW; + float spatialScale; + int32_t samplingRatio; +} ccRoiAlign_t; + +/** + * @ingroup dnn + * @brief struct define of RoiInterpPooling operator + */ +typedef struct tagCcRoiInterpPooling { + int32_t pooledH; + int32_t pooledW; + int32_t poolKernelH; + int32_t poolKernelW; + int32_t pooledTailH; + int32_t pooledTailW; + float spatialScaleH; + float spatialScaleW; +} ccRoiInterpPooling_t; + +/** + * @ingroup dnn + * @brief struct define of DetectionFull3DOutput operator + */ +typedef struct tagCcDetectionFull3DOutput { + int32_t imageWidth; + int32_t imageHeight; + int32_t numAngleBins; + float trcMarginRatioX; + float trcMarginRatioY; + int32_t pitchRangeD; + int32_t pitchPresetD; + float mountHeight; + int32_t visiblenessBins; + float meanVisibleness; + bool discreteVisibleness; +} ccDetectionFull3DOutput_t; + +/** + * @ingroup dnn + * @brief struct define of MsrFastRcnnPredictions operator + */ +typedef struct tagMsrFastRcnnPredictions { + int32_t numClasses; // num of classes + float scoreThreshold; // the threshold of the score + double nmsThreshold; // the threshold of nms + int32_t postTopK; + int32_t outTopK; + int32_t imgH; // the height of image + int32_t imgW; // the width of image +} ccMsrFastRcnnPredictions_t; + +typedef struct tagCcResizeBilinear { + ccResizeOutputDimMode_t resizeOutputDimMode; + bool alignCorners; + int32_t zoom_factor; + int32_t shrink_factor; + int32_t height; + int32_t width; + int32_t pad_begin; + int32_t pad_end; +} ccResizeBilinear_t; + +typedef struct tagCcResizeNearestNeighbor { + bool alignCorners; + int32_t height; + int32_t width; +} ccResizeNearestNeighbor_t; + +typedef struct tagCcEltwise { + ccQuantize_t *quantInfo; + bool reluFlag; +} ccEltwise_t; + +typedef struct tagCcBatchNorm { + bool reluFlag; +} ccBatchNorm_t; + +typedef struct tagCcPad { + ccPadMode_t padMode; + float padValue; + int32_t htoppad; // padLow[0] + int32_t hbottompad; // padHigh[0] + int32_t wleftpad; // padLow[1] + int32_t wrightpad; // padHigh[1] +} ccPad_t; + +typedef struct tagCcSubCondition { + uint32_t BaseCondValue[4]; + ccCMPType_t condType[4]; + ccResultType_t resultType; +} ccSubCondition; + +typedef struct tagCcShapeClassifyCond { + uint32_t subConditionNum; + ccResultType_t resultType; + uint32_t true_value; + ccSubCondition subCond[2]; +} ccShapeClassifyCond; + +#ifndef CC_SHAPE_CLASSIFY_CONDITION_NUM +#define CC_SHAPE_CLASSIFY_CONDITION_NUM (8) +#endif + +typedef struct tagCcShapeClassify { + uint32_t shapeClassifyConditionNum; + uint32_t defaultValue; + ccShapeClassifyCond shapeClassifyCond[CC_SHAPE_CLASSIFY_CONDITION_NUM]; +} ccShapeClassify_t; + +/** + * @ingroup dnn + * @bref struct define of square operator + */ +typedef struct tagCcSquare { + ccSquareMode_t mode; +} ccSquare_t; + +/* + * @ingroup dnn + * @brief operation of segment reduction + */ +typedef enum { + CC_SEGMENT_REDUCTION_OP_SUM = 0, /**< sum */ + CC_SEGMENT_REDUCTION_OP_INVALID +} ccSegmentReductionOpType_t; + +typedef struct tagCcFillParam { + // The filler type. + ccFillOpType_t fillType; + ccDataType_t valueDatatype; + const void *value; // the value in constant fill + const void *min; // the min value in uniform fill + const void *max; // the max value in uniform fill + const void *mean; // the mean value in Gaussian fill + const void *std; // the std value in Gaussian fill + // the seed used to generate data in Gaussian and uniform fill + int64_t seed1; + int64_t seed2; +} ccFillParam_t; + +typedef struct tagNonMaxSuppression { + ccDataType_t dataType; + uint32_t paraCount; +} ccNonMaxSuppression_t; + +typedef struct tagCcArgmaxmin { + int32_t axisType; + bool outMaxVal; + int64_t topK; + int64_t reduceSize; + int64_t reduceStride; + int64_t axis; + bool keepDims; +} ccArgmaxmin_t; + +typedef struct tagUpsamplePara { + int32_t scale; + int32_t scaleHeight; + int32_t scaleWidth; + int32_t upsampleHeight; + int32_t upsampleWidth; + bool padOutHeight; + bool padOutWidth; +} ccUpsamplePara_t; + +typedef struct tagCcConcatFive2Four_t { + ccTransForLossMode_t mode; + uint32_t classNum; +} ccConcatFive2Four_t; + +}; // namespace cce +#endif /* __DNN_STRUCT_BASE_HPP__ */ diff --git a/third_party/fwkacllib/inc/cce/fwk_adpt_struct.h b/third_party/fwkacllib/inc/cce/fwk_adpt_struct.h new file mode 100644 index 00000000..91666607 --- /dev/null +++ b/third_party/fwkacllib/inc/cce/fwk_adpt_struct.h @@ -0,0 +1,81 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __FWK_ADPT_STRUCT_H__ +#define __FWK_ADPT_STRUCT_H__ + +#include + +namespace aicpu { +namespace FWKAdapter { + +// API RETURN CODE +enum FWKAdptAPIRetCode { + FWK_ADPT_SUCCESS = 0, // success + FWK_ADPT_NOT_INIT = 1, // not init + FWK_ADPT_ALLOC_FAILED = 2, // allocate memory failed + FWK_ADPT_PARAM_INVALID = 3, // invalid input param + FWK_ADPT_PARAM_PARSE_FAILED = 4, // parase input param failed + FWK_ADPT_NATIVE_ERROR = 5, // error code + FWK_ADPT_NOT_SUPPORT_OPTYPE = 6, // unsupport operate type + FWK_ADPT_INTERNAL_ERROR = 7, // adpter internal error + FWK_ADPT_NOT_SUPPORT_DATATYPE = 8, // unsupport input/output data type + FWK_ADPT_KERNEL_ALREADY_RUNING = 9, // kernel already runing, not support parallel run + FWK_ADPT_SESSION_NOT_EXIST = 10, // session id not exist + FWK_ADPT_SESSION_ALREADY_EXIST = 11, // session id alread exist for create session + FWK_ADPT_NATIVE_END_OF_SEQUENCE = 12, // end of sequence + FWK_ADPT_UNKNOWN_ERROR = 99 // unknown error code +}; + +// FWKAdapter operate type +// Notice: add new operate type need check with OMM, and make sure append to the end line. +enum FWKOperateType { + FWK_ADPT_SESSION_CREATE = 0, + FWK_ADPT_KERNEL_RUN = 1, + FWK_ADPT_SESSION_DESTROY = 2, + FWK_ADPT_SINGLE_OP_RUN = 3 +}; + +// API Parameter Structure +struct StrFWKKernel { + FWKOperateType opType; + uint64_t sessionID; // unique + + uint64_t stepIDAddr; // step id addr + uint64_t kernelID; // run kernel id, unique in session + uint64_t nodeDefLen; // nodeDef protobuf len + uint64_t nodeDefBuf; // NodeDef protobuf offset addr, need convert to void* + uint64_t funDefLibLen; // FunctionDefLibrary protobuf len + uint64_t funDefLibBuf; // FunctionDefLibrary protobuf addr which use in NodeDef, need convert to void* + + uint64_t inputOutputLen; // InputOutput shap protobuf len + uint64_t inputOutputBuf; // InputOutput shap protobuf addr, need convert to void* + uint64_t workspaceBaseAddr; // Workspace base addr, need convert to void* + uint64_t inputOutputAddr; // InputOutput addr, need convert to void* +} __attribute__((packed)); + +typedef StrFWKKernel FWKOperateParam; + +struct ResultSummary { + uint64_t shape_data_ptr; // shape data addr, need convert to void* + uint64_t shape_data_size; // num of dims + uint64_t raw_data_ptr; // raw data addr, need convert to void* + uint64_t raw_data_size; // size of raw data +} __attribute__((packed)); +} // end namespace FWKAdapter +} // namespace aicpu + +#endif //__FWK_ADPT_STRUCT_H__ diff --git a/third_party/fwkacllib/inc/cce/l2fusion_struct.hpp b/third_party/fwkacllib/inc/cce/l2fusion_struct.hpp new file mode 100644 index 00000000..319d65b9 --- /dev/null +++ b/third_party/fwkacllib/inc/cce/l2fusion_struct.hpp @@ -0,0 +1,56 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _L2FUSION_STRUCT_HPP_ +#define _L2FUSION_STRUCT_HPP_ + +#include +#include +#include "runtime/kernel.h" + +#define L2_DYNAMIC_SPLIT_NUM + +using namespace std; + +namespace fusion { + +typedef struct tagL2Data { + uint32_t l2Index; + uint64_t l2Addr; + uint64_t l2PageNum; +} L2Data_t; + +typedef std::map L2DataMap_t; // the key is ddr addr +typedef std::pair L2DataPair_t; // the key is ddr addr + +typedef struct TagTaskL2Info { + string nodeName; + rtL2Ctrl_t l2ctrl; + + L2DataMap_t input; + L2DataMap_t output; + uint32_t isUsed; +} TaskL2Info_t; + +typedef std::map TaskL2InfoMap_t; // the key is nodeId +typedef std::pair TaskL2InfoPair_t; // the key is nodeId + +typedef std::map TaskL2InfoFEMap_t; // the key is nodeName +typedef std::pair TaskL2InfoFEPair_t; // the key is nodeName + +} // namespace fusion + +#endif diff --git a/third_party/fwkacllib/inc/cce/optimizer/fusion_engine.h b/third_party/fwkacllib/inc/cce/optimizer/fusion_engine.h new file mode 100644 index 00000000..4e38c9eb --- /dev/null +++ b/third_party/fwkacllib/inc/cce/optimizer/fusion_engine.h @@ -0,0 +1,65 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _FUSION_ENGINE_HPP_ +#define _FUSION_ENGINE_HPP_ + +#include "cce/cce.h" +#include "graph/compute_graph.h" +#include "proto/task.pb.h" + +#include +#include + +using namespace domi; +using namespace std; + +namespace fusion { +enum { + FUSION_STATUS_SUCCESS = 0, + FUSION_STATUS_FAIL = 1, +}; + +typedef struct { + uint64_t weightSize; + uint64_t memorySize; + uint8_t *dataMemBase; + uint8_t *weightMemBase; + uint32_t l2Enable; // 1 //1 - enable l2 buffer allocation, 0 - disable l2 buffer allocation + uint32_t fusionEnable; // 1 // 1 - enable buffer fusion, 0 - disable buffer fusion +} ModelRes; + +static const std::string SCOPE_ID_ATTR = "fusion_scope"; +static const std::string L2FUSION_DYNAMIC_CONVERGE_OP = "l2fusion_dynamic_converge_op"; +static const std::string L2FUSION_DYNAMIC_SPLIT_NUM = "l2fusion_dynamic_split_num"; +static const std::string FUSION_VIRTUAL_OP = "fusion_virtual_op"; +static const std::string FUSION_MULTI_BATCH_STRIDE = "fusion_multi_bathc_stride"; + +#define TVM_TYPE 1 + +typedef std::map> kScopeNodeMap_t; +typedef std::pair> kScopeNodePair_t; + +uint32_t BufferFusion(ge::ComputeGraphPtr origGraph, ge::ComputeGraphPtr fusionGraph, bool enable_l2dynamic = true); +uint32_t BufferFusionTrain(ge::ComputeGraphPtr origGraph, ge::ComputeGraphPtr fusionGraph); +uint32_t GraphFusion(ge::ComputeGraphPtr origGraph, ge::ComputeGraphPtr fusionGraph); +uint32_t FusionTaskBuild(cce::ccHandle_t ccHandle, ge::ComputeGraphPtr fusionGraph, ge::Buffer &buffer, + ModelRes &modelRes, std::vector &task_def_list_); +void FusionTaskBuildComplete(std::vector cchandleList); +uint32_t GraphFusionTrain(ge::ComputeGraphPtr origGraph, ge::ComputeGraphPtr fusionGraph); +} // namespace fusion + +#endif diff --git a/third_party/fwkacllib/inc/cce/taskdown_api.h b/third_party/fwkacllib/inc/cce/taskdown_api.h new file mode 100644 index 00000000..ef449bb2 --- /dev/null +++ b/third_party/fwkacllib/inc/cce/taskdown_api.h @@ -0,0 +1,45 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _TASKDOWN_API_H_ +#define _TASKDOWN_API_H_ + +#include +#include +#include "cce/cce.h" +#include "l2fusion_struct.hpp" +#include "taskdown_common.hpp" + +namespace cce { + +#define CC_FUSION_OP_MAX 32 + +typedef struct tagOpAddrsInfo { + void *addrPos; + uintptr_t addrData; +} ccOpAddrsInfo; + +ccStatus_t ccUpdateKernelArgs(ccOpContext &opContext, uint64_t dataBaseAddr, uint64_t weightBaseAddr, + uint64_t variableBaseAddr, void *argsAddr, uint64_t argsSize, void *l2ctrlAddr); +ccStatus_t ccGetKernelArgsAddrs(ccOpContext &opContext, void *argsAddr, uint64_t argsSize, void *l2ctrlAddr, + std::vector &opAddrsInfo); + +ccStatus_t ccSetKernelArgs(std::vector &dateInfo); + +ccStatus_t ccGetKernelTypeByOpId(uint32_t opId, ccKernelType &kernelType); + +} // namespace cce +#endif diff --git a/third_party/fwkacllib/inc/cce/taskdown_common.hpp b/third_party/fwkacllib/inc/cce/taskdown_common.hpp new file mode 100644 index 00000000..3993e50f --- /dev/null +++ b/third_party/fwkacllib/inc/cce/taskdown_common.hpp @@ -0,0 +1,106 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _TASKDOWN_COMMON_H_ +#define _TASKDOWN_COMMON_H_ + +#include +#include "cce/cce_def.hpp" +#include "common/attr_list.hpp" +#include "l2fusion_struct.hpp" + +namespace cce { + +#define CC_FUSION_OP_MAX 32 + +typedef enum tagccKernelType { + CCE_AI_CORE = 0, /* cce aicore */ + CCE_AI_CPU = 1, /* cce aicpu */ + TE = 2, /* te operator*/ + CUSTOMIZED = 3, /* customized operator */ + TE_AI_CORE = 4, /* te aicore operator*/ + TE_AI_CPU = 5, /* te aicpu operator */ + AI_CPU = 6, /* aicpu */ + INVALID = 7, /* unknown kernel type */ +} ccKernelType; + +typedef struct tagOpContext { + ccKernelType kernelType; + uint32_t opId; + uint32_t kernelFuncId; + uint32_t opIndex; + uint32_t opCount; + uint32_t opIndex2[CC_FUSION_OP_MAX]; + bool isFlowtable; + uint16_t *argsOffset; + uint32_t argsCount; + uint64_t genDataBaseAddr; + uint64_t genDataBaseSize; + uint64_t genWeightBaseAddr; + uint64_t genWeightBaseSize; + uint64_t genVariableBaseAddr; + uint64_t genVariableBaseSize; + uint64_t l2ctrlSize; +} ccOpContext; + +typedef struct tagOpReadCount { + bool isEnable; + std::map tensorRc; +} ccOpReadCount; + +typedef enum tagTaskDownKernelIdMode { + CC_TASKDOWN_RESERVED = 0, + CC_TASKDOWN_ROIPOOLING, + CC_TASKDOWN_ROIPOOLING_PERF, + CC_TASKDOWN_ROIALIGN, + CC_TASKDOWN_ROIALIGN_PERF, + CC_TASKDOWN_FC, + CC_TASKDOWN_FC_COMPRESS, + CC_TASKDOWN_SOFTMAX_LOWEST, + CC_TASKDOWN_ROIALIGN_FP16, + CC_TASKDOWN_RESIZE_NEAREST_NEIGHBOR, + CC_TASKDOWN_RESIZE_NEAREST_NEIGHBOR_COMMON, +} ccTaskDownKernelIdMode_t; + +ccStatus_t GetStream(ccHandle_t handle, rtStream_t *streamId); + +ccStatus_t ccClearOpMap(ccHandle_t handle); + +ccStatus_t ccSetKernelOpMap(ccHandle_t handle); + +ccStatus_t ccSetKernelContext(ccHandle_t handle, uint32_t opId, AttrList &attrList, bool isFlowtable, + ccKernelType kernelType, void *pgraph); + +ccStatus_t ccGetKernelContext(rtStream_t streamId, ccOpContext &opContext); + +ccStatus_t ccGetKernelTypeByOpId(uint32_t opId, ccKernelType &kernelType); + +ccStatus_t ccSetStreamL2Map(ccHandle_t handle, fusion::TaskL2InfoMap_t &l2AllocRes); + +ccStatus_t ccGetStreamL2Map(rtStream_t streamId, uint32_t opIndex, fusion::TaskL2Info_t *&l2Data); + +ccStatus_t ccSetOpIndex(ccHandle_t handle, uint32_t opIndex); + +ccStatus_t ccGetOpIndex(ccHandle_t handle, uint32_t &opIndex); + +ccStatus_t ccGetOpIndexByStream(rtStream_t streamId, uint32_t &opIndex); + +ccStatus_t ccClearStreamL2Map(ccHandle_t handle); + +ccStatus_t ccGetKernelReadCount(rtStream_t streamId, ccOpReadCount &rc); + +} // namespace cce +#endif diff --git a/third_party/fwkacllib/inc/hccl/base.h b/third_party/fwkacllib/inc/hccl/base.h new file mode 100644 index 00000000..2777fa23 --- /dev/null +++ b/third_party/fwkacllib/inc/hccl/base.h @@ -0,0 +1,106 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __HCCL_BASE_H__ +#define __HCCL_BASE_H__ + +#ifdef __cplusplus +extern "C" { +#endif + +typedef signed char s8; +typedef signed short s16; +typedef signed int s32; +typedef signed long long s64; + +typedef unsigned char u8; +typedef unsigned short u16; +typedef unsigned int u32; +typedef unsigned long long u64; + +typedef enum tagHcclResult { + HCCL_SUCCESS = 0, /**< success */ + HCCL_E_PARA = 1, /**< parameter error */ + HCCL_E_PTR = 2, /**< empty pointer */ + HCCL_E_MEMORY = 3, /**< memory error */ + HCCL_E_INTERNAL = 4, /**< internal error */ + HCCL_E_NOT_SUPPORT = 5, /**< not support feature */ + HCCL_E_NOT_FOUND = 6, /**< not found specific resource */ + HCCL_E_UNAVAIL = 7, /**< resource unavailable */ + HCCL_E_SYSCALL = 8, /**< call system interface error */ + HCCL_E_TIMEOUT = 9, /**< timeout */ + HCCL_E_OPEN_FILE_FAILURE = 10, /**< open file fail */ + HCCL_E_TCP_CONNECT = 11, /**< tcp connect fail */ + HCCL_E_ROCE_CONNECT = 12, /**< roce connect fail */ + HCCL_E_TCP_TRANSFER = 13, /**< tcp transfer fail */ + HCCL_E_ROCE_TRANSFER = 14, /**< roce transfer fail */ + HCCL_E_RUNTIME = 15, /**< call runtime api fail */ + HCCL_E_DRV = 16, /**< call driver api fail */ + HCCL_E_PROFILING = 17, /**< call profiling api fail */ + HCCL_E_CCE = 18, /**< call cce api fail */ + HCCL_E_NETWORK = 19, /**< call network api fail */ + HCCL_E_RESERVED /**< reserved */ +} hcclResult_t; + +/* handle to communicator */ +typedef void *hcclComm_t; + +typedef enum tagHcclRedOp { + HCCL_REP_OP_SUM = 0, /**< sum */ + HCCL_REP_OP_PROD = 1, /**< prod */ + HCCL_REP_OP_MAX = 2, /**< max */ + HCCL_REP_OP_MIN = 3, /**< min */ + HCCL_REP_OP_RESERVED /**< reserved */ +} hcclRedOp_t; + +typedef enum tagHcclDataType { + HCCL_DATA_TYPE_INT8 = 0, /**< int8 */ + HCCL_DATA_TYPE_INT = 1, /**< int32 */ + HCCL_DATA_TYPE_HALF = 2, /**< fp16 */ + HCCL_DATA_TYPE_FLOAT = 3, /**< fp32 */ + HCCL_DATA_TYPE_RESERVED /**< reserved */ +} hcclDataType_t; + +const s32 HCCL_TAG_ANY = -1; +const u32 BASE_UNIQUE_ID_BYTES = 27; +#define HCCL_UNIQUE_ID_BYTES (BASE_UNIQUE_ID_BYTES + 5 + 16 + 128) +typedef struct { + char internal[HCCL_UNIQUE_ID_BYTES]; +} hcclUniqueId; + +const u32 HCCL_MAX_SEGMENT_NUM = 8; + +struct model_feature { + const char *model_name; + u32 gradient_num; + float *gradient_size; + float *gradient_time; +}; + +/** + * @brief stream handle. + */ +typedef void *rtStream_t; + +/** + * @brief model handle. + */ +typedef void *rtModel_t; +#ifdef __cplusplus +} +#endif + +#endif // __HCCL_BASE_H__ diff --git a/third_party/fwkacllib/inc/hccl/hcom.h b/third_party/fwkacllib/inc/hccl/hcom.h new file mode 100644 index 00000000..323faf09 --- /dev/null +++ b/third_party/fwkacllib/inc/hccl/hcom.h @@ -0,0 +1,79 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __HCOM_H__ +#define __HCOM_H__ + +#include + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +extern hcclResult_t hcom_init(const char *rank_table, const char *identify); + +extern hcclResult_t hcom_destroy(void); + +extern hcclResult_t hcom_bind_model(rtModel_t model, rtStream_t stream); + +extern hcclResult_t hcom_unbind_model(rtModel_t model); + +extern hcclResult_t hcom_all_gather(const char *tag, void *inputPtr, void *outputPtr, u64 inputCount, + hcclDataType_t dataType, const char *group, rtStream_t stream); + +extern hcclResult_t hcom_all_reduce(const char *tag, void *inputPtr, void *outputPtr, u64 count, + hcclDataType_t dataType, hcclRedOp_t op, const char *group, rtStream_t stream); + +extern hcclResult_t hcom_broadcast(const char *tag, void *ptr, u64 count, hcclDataType_t dataType, u32 root, + const char *group, rtStream_t stream); + +extern hcclResult_t hcom_reduce_scatter(const char *tag, void *inputPtr, void *outputPtr, u64 count, + hcclDataType_t dataType, hcclRedOp_t op, const char *group, rtStream_t stream); + +hcclResult_t hcom_get_rank_size(const char *group, u32 *rankSize); + +hcclResult_t hcom_get_local_rank_size(const char *group, u32 *localRankSize); + +hcclResult_t hcom_get_rank_id(const char *group, u32 *rankId); + +hcclResult_t hcom_get_local_rank_id(const char *group, u32 *localRankId); + +hcclResult_t hcom_get_world_rank_from_group_rank(const char *group, u32 groupRank, u32 *worldRank); + +hcclResult_t hcom_get_group_rank_from_world_rank(u32 worldRank, const char *group, u32 *groupRank); + +hcclResult_t hcom_create_group(const char *group, u32 rankNum, u32 *rankIds); + +hcclResult_t hcom_destroy_group(const char *group); + +hcclResult_t hcom_send(const char *tag, void *inputPtr, u64 count, hcclDataType_t dataType, u32 destRank, u32 srTag, + const char *group, rtStream_t stream); + +hcclResult_t hcom_receive(const char *tag, void *outputPtr, u64 count, hcclDataType_t dataType, u32 srcRank, u32 srTag, + const char *group, rtStream_t stream); + +hcclResult_t hcom_get_split_strategy(const char *group, const struct model_feature *feature, u32 maxSegmentNum, + u32 *segmentNum, u32 *segmentIdx); + +extern hcclResult_t hcom_set_split_strategy_by_index(const char *group, u32 segmentNum, const u32 *IdxList); + +extern hcclResult_t hcom_set_split_strategy_by_size(const char *group, u32 segmentNum, const float *sizeList); +#ifdef __cplusplus +} +#endif +#endif // __HCOM_H__ diff --git a/third_party/fwkacllib/inc/mmpa/mmpa_api.h b/third_party/fwkacllib/inc/mmpa/mmpa_api.h new file mode 100644 index 00000000..01240198 --- /dev/null +++ b/third_party/fwkacllib/inc/mmpa/mmpa_api.h @@ -0,0 +1,125 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MMPA_API_H_ +#define _MMPA_API_H_ + +#define LINUX 0 +#define WIN 1 + +#if(OS_TYPE == LINUX) + +#ifndef _GNU_SOURCE +#define _GNU_SOURCE +#endif + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include +#include +#include +#include + +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include + +#include "securec.h" + +#include "./sub_inc/mmpa_typedef_linux.h" +#include "./sub_inc/mmpa_linux.h" + +#endif + + +#if(OS_TYPE == WIN) +#include +#include +#include "Windows.h" +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include +#include "shlwapi.h" +#include +#include "sub_inc/mmpa_typedef_win.h" +#include "sub_inc/mmpa_win.h" +#include +#include +#include +#include + +#include +#include + +#pragma comment(lib, "ws2_32.lib") +#pragma comment(lib, "mswsock.lib") +#pragma comment(lib, "Kernel32.lib") +#pragma comment(lib, "shlwapi.lib") +#pragma comment(lib, "wbemuuid.lib") +#pragma comment(lib, "Iphlpapi.lib") +#endif + +#endif /* _MMPA_API_H_ */ + diff --git a/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_linux.h b/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_linux.h new file mode 100644 index 00000000..5ed1811c --- /dev/null +++ b/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_linux.h @@ -0,0 +1,418 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MMPA_LINUX_MMPA_LINUX_H +#define MMPA_LINUX_MMPA_LINUX_H + +#ifdef __cplusplus +#if __cplusplus +extern "C" { +#endif /* __cpluscplus */ +#endif /* __cpluscplus */ + +#define MMPA_MACINFO_DEFAULT_SIZE 18 +#define MMPA_CPUDESC_DEFAULT_SIZE 64 + +typedef pthread_t mmThread; +typedef pthread_mutex_t mmMutex_t; +typedef pthread_cond_t mmCond; +typedef pthread_mutex_t mmMutexFC; +typedef signed int mmProcess; +typedef int mmPollHandle; +typedef int mmPipeHandle; +typedef int mmComPletionKey; +typedef int mmCompletionHandle; + +typedef VOID *mmExitCode; +typedef key_t mmKey_t; +typedef int mmMsgid; +typedef struct dirent mmDirent; +typedef int (*mmFilter)(const mmDirent *entry); +typedef int (*mmSort)(const mmDirent **a, const mmDirent **b); + +typedef VOID *(*userProcFunc)(VOID *pulArg); + +typedef struct { + userProcFunc procFunc; + VOID *pulArg; +} mmUserBlock_t; + +typedef struct { + int wSecond; // Seconds. [0-60] (1 leap second) + int wMinute; // Minutes. [0-59] + int wHour; // Hours. [0-23] + int wDay; // Day. [1-31] + int wMonth; // Month. [1-12] + int wYear; // Year + int wDayOfWeek; // Day of week. [0-6] + int tm_yday; // Days in year.[0-365] + int tm_isdst; // DST. [-1/0/1] + long int wMilliseconds; // milliseconds +} mmSystemTime_t; + +typedef sem_t mmSem_t; +typedef struct sockaddr mmSockAddr; +typedef socklen_t mmSocklen_t; +typedef int mmSockHandle; +typedef timer_t mmTimer; +typedef pthread_key_t mmThreadKey; + +typedef int mmOverLap; + +typedef ssize_t mmSsize_t; + +typedef struct { + UINT32 createFlag; + INT32 oaFlag; +} mmCreateFlag; + +typedef struct { + VOID *sendBuf; + INT32 sendLen; +} mmIovSegment; +typedef struct in_addr mmInAddr; + +typedef struct { + VOID *inbuf; + INT32 inbufLen; + VOID *outbuf; + INT32 outbufLen; + mmOverLap *oa; +} mmIoctlBuf; + +typedef int mmAtomicType; + +typedef enum { + pollTypeRead = 1, // pipe read + pollTypeRecv, // socket recv + pollTypeIoctl, // ioctl +} mmPollType; + +typedef struct { + mmPollHandle handle; + mmPollType pollType; + + INT32 ioctlCode; + mmComPletionKey completionKey; + +} mmPollfd; + +typedef struct { + VOID *priv; + mmPollHandle bufHandle; + mmPollType bufType; + VOID *buf; + UINT32 bufLen; + UINT32 bufRes; +} mmPollData, *pmmPollData; + +typedef VOID (*mmPollBack)(pmmPollData); + +typedef struct { + INT32 tz_minuteswest; + INT32 tz_dsttime; // type of DST correction +} mmTimezone; + +typedef struct { + LONG tv_sec; + LONG tv_usec; +} mmTimeval; + +typedef struct { + LONG tv_sec; + LONG tv_nsec; +} mmTimespec; + +typedef struct { + ULONGLONG totalSize; + ULONGLONG freeSize; + ULONGLONG availSize; +} mmDiskSize; + +#define mmTLS __thread +typedef struct stat mmStat_t; +typedef struct stat64 mmStat64_t; +typedef mode_t mmMode_t; + +typedef struct option mmStructOption; + +typedef struct { + char addr[MMPA_MACINFO_DEFAULT_SIZE]; // ex:aa-bb-cc-dd-ee-ff\0 +} mmMacInfo; + +typedef struct { + char **argv; + INT32 argvCount; + char **envp; + INT32 envpCount; +} mmArgvEnv; + +typedef struct { + char arch[MMPA_CPUDESC_DEFAULT_SIZE]; + char manufacturer[MMPA_CPUDESC_DEFAULT_SIZE]; // vendor + char version[MMPA_CPUDESC_DEFAULT_SIZE]; // modelname + INT32 frequency; // cpu frequency + INT32 maxFrequency; // max speed + INT32 ncores; // cpu cores + INT32 nthreads; // cpu thread count + INT32 ncounts; // logical cpu nums +} mmCpuDesc; + +typedef mode_t MODE; + +typedef struct { + INT32 detachFlag; + INT32 priorityFlag; + INT32 priority; + INT32 policyFlag; + INT32 policy; + + INT32 stackFlag; + UINT32 stackSize; +} mmThreadAttr; + +#ifdef __ANDROID__ +#define S_IREAD S_IRUSR +#define S_IWRITE S_IWUSR +#endif + +#define M_FILE_RDONLY O_RDONLY +#define M_FILE_WRONLY O_WRONLY +#define M_FILE_RDWR O_RDWR +#define M_FILE_CREAT O_CREAT + +#define M_RDONLY O_RDONLY +#define M_WRONLY O_WRONLY +#define M_RDWR O_RDWR +#define M_CREAT O_CREAT +#define M_BINARY O_RDONLY + +#define M_IREAD S_IREAD +#define M_IRUSR S_IRUSR +#define M_IWRITE S_IWRITE +#define M_IWUSR S_IWUSR +#define M_IXUSR S_IXUSR +#define FDSIZE 64 +#define M_MSG_CREAT IPC_CREAT +#define M_MSG_EXCL (IPC_CREAT | IPC_EXCL) +#define M_MSG_NOWAIT IPC_NOWAIT + +#define M_WAIT_NOHANG WNOHANG +#define M_WAIT_UNTRACED WUNTRACED + +#define M_UMASK_USRREAD S_IRUSR +#define M_UMASK_GRPREAD S_IRGRP +#define M_UMASK_OTHREAD S_IROTH + +#define M_UMASK_USRWRITE S_IWUSR +#define M_UMASK_GRPWRITE S_IWGRP +#define M_UMASK_OTHWRITE S_IWOTH + +#define M_UMASK_USREXEC S_IXUSR +#define M_UMASK_GRPEXEC S_IXGRP +#define M_UMASK_OTHEXEC S_IXOTH + +#define mmConstructor(x) __attribute__((constructor)) VOID x() +#define mmDestructor(x) __attribute__((destructor)) VOID x() + +#define MMPA_NO_ARGUMENT 0 +#define MMPA_REQUIRED_ARGUMENT 1 +#define MMPA_OPTIONAL_ARGUMENT 2 + +#define MMPA_MAX_PATH PATH_MAX + +#define M_F_OK F_OK +#define M_R_OK R_OK +#define M_W_OK W_OK + +#define MMPA_RTLD_NOW RTLD_NOW +#define MMPA_RTLD_GLOBAL RTLD_GLOBAL + +#define MMPA_DL_EXT_NAME ".so" + +extern INT32 mmCreateTask(mmThread *threadHandle, mmUserBlock_t *funcBlock); +extern INT32 mmJoinTask(mmThread *threadHandle); +extern INT32 mmMutexInit(mmMutex_t *mutex); +extern INT32 mmMutexLock(mmMutex_t *mutex); +extern INT32 mmMutexUnLock(mmMutex_t *mutex); +extern INT32 mmMutexDestroy(mmMutex_t *mutex); +extern INT32 mmCondInit(mmCond *cond); +extern INT32 mmCondLockInit(mmMutexFC *mutex); +extern INT32 mmCondLock(mmMutexFC *mutex); +extern INT32 mmCondUnLock(mmMutexFC *mutex); +extern INT32 mmCondLockDestroy(mmMutexFC *mutex); +extern INT32 mmCondWait(mmCond *cond, mmMutexFC *mutex); +extern INT32 mmCondTimedWait(mmCond *cond, mmMutexFC *mutex, UINT32 milliSecond); +extern INT32 mmCondNotify(mmCond *cond); +extern INT32 mmCondNotifyAll(mmCond *cond); +extern INT32 mmCondDestroy(mmCond *cond); +extern INT32 mmGetPid(); +extern INT32 mmGetTid(); +extern INT32 mmGetPidHandle(mmProcess *processHandle); +extern INT32 mmGetLocalTime(mmSystemTime_t *sysTime); + +extern INT32 mmSemInit(mmSem_t *sem, UINT32 value); +extern INT32 mmSemWait(mmSem_t *sem); +extern INT32 mmSemPost(mmSem_t *sem); +extern INT32 mmSemDestroy(mmSem_t *sem); +extern INT32 mmOpen(const CHAR *pathName, INT32 flags); +extern INT32 mmOpen2(const CHAR *pathName, INT32 flags, MODE mode); +extern INT32 mmClose(INT32 fd); +extern mmSsize_t mmWrite(INT32 fd, VOID *buf, UINT32 bufLen); +extern mmSsize_t mmRead(INT32 fd, VOID *buf, UINT32 bufLen); +extern mmSockHandle mmSocket(INT32 sockFamily, INT32 type, INT32 protocol); +extern INT32 mmBind(mmSockHandle sockFd, mmSockAddr *addr, mmSocklen_t addrLen); +extern INT32 mmListen(mmSockHandle sockFd, INT32 backLog); +extern mmSockHandle mmAccept(mmSockHandle sockFd, mmSockAddr *addr, mmSocklen_t *addrLen); +extern INT32 mmConnect(mmSockHandle sockFd, mmSockAddr *addr, mmSocklen_t addrLen); +extern INT32 mmCloseSocket(mmSockHandle sockFd); +extern mmSsize_t mmSocketSend(mmSockHandle sockFd, VOID *sendBuf, INT32 sendLen, INT32 sendFlag); +extern mmSsize_t mmSocketRecv(mmSockHandle sockFd, VOID *recvBuf, INT32 recvLen, INT32 recvFlag); +extern INT32 mmSAStartup(); +extern INT32 mmSACleanup(); +extern VOID *mmDlopen(const CHAR *fileName, INT32 mode); +extern VOID *mmDlsym(VOID *handle, CHAR *funcName); +extern INT32 mmDlclose(VOID *handle); +extern CHAR *mmDlerror(); +extern INT32 mmCreateAndSetTimer(mmTimer *timerHandle, mmUserBlock_t *timerBlock, UINT milliSecond, UINT period); +extern INT32 mmDeleteTimer(mmTimer timerHandle); +extern INT32 mmStatGet(const CHAR *path, mmStat_t *buffer); +extern INT32 mmStat64Get(const CHAR *path, mmStat64_t *buffer); +extern INT32 mmMkdir(const CHAR *pathName, mmMode_t mode); +extern INT32 mmSleep(UINT32 milliSecond); + +extern INT32 mmCreateTaskWithAttr(mmThread *threadHandle, mmUserBlock_t *funcBlock); +extern INT32 mmGetProcessPrio(mmProcess pid); +extern INT32 mmSetProcessPrio(mmProcess pid, INT32 processPrio); +extern INT32 mmGetThreadPrio(mmThread *threadHandle); +extern INT32 mmSetThreadPrio(mmThread *threadHandle, INT32 threadPrio); +extern INT32 mmAccess(const CHAR *pathName); +extern INT32 mmAccess2(const CHAR *pathName, INT32 mode); +extern INT32 mmRmdir(const CHAR *pathName); + +extern INT32 mmIoctl(mmProcess fd, INT32 ioctlCode, mmIoctlBuf *bufPtr); +extern INT32 mmSemTimedWait(mmSem_t *sem, INT32 timeout); +extern mmSsize_t mmWritev(mmProcess fd, mmIovSegment *iov, INT32 iovcnt); +extern VOID mmMb(); +extern INT32 mmInetAton(const CHAR *addrStr, mmInAddr *addr); + +extern mmProcess mmOpenFile(const CHAR *fileName, UINT32 access, mmCreateFlag fileFlag); +extern mmSsize_t mmReadFile(mmProcess fileId, VOID *buffer, INT32 len); +extern mmSsize_t mmWriteFile(mmProcess fileId, VOID *buffer, INT32 len); +extern INT32 mmCloseFile(mmProcess fileId); + +extern mmAtomicType mmSetData(mmAtomicType *ptr, mmAtomicType value); +extern mmAtomicType mmValueInc(mmAtomicType *ptr, mmAtomicType value); +extern mmAtomicType mmValueSub(mmAtomicType *ptr, mmAtomicType value); +extern INT32 mmCreateTaskWithDetach(mmThread *threadHandle, mmUserBlock_t *funcBlock); + +extern INT32 mmCreateNamedPipe(mmPipeHandle pipe[], CHAR *pipeName[], INT32 waitMode); +extern INT32 mmOpenNamePipe(mmPipeHandle pipe[], CHAR *pipeName[], INT32 waitMode); +extern VOID mmCloseNamedPipe(mmPipeHandle namedPipe[]); + +extern INT32 mmCreatePipe(mmPipeHandle pipe[], CHAR *pipeName[], UINT32 pipeCount, INT32 waitMode); +extern INT32 mmOpenPipe(mmPipeHandle pipe[], CHAR *pipeName[], UINT32 pipeCount, INT32 waitMode); +extern VOID mmClosePipe(mmPipeHandle pipe[], UINT32 pipeCount); + +extern mmCompletionHandle mmCreateCompletionPort(); +extern VOID mmCloseCompletionPort(mmCompletionHandle handle); +extern INT32 mmPoll(mmPollfd *fds, INT32 fdCount, INT32 timeout, mmCompletionHandle handleIOCP, pmmPollData polledData, + mmPollBack pollBack); +extern INT32 mmGetErrorCode(); +extern INT32 mmGetTimeOfDay(mmTimeval *timeVal, mmTimezone *timeZone); +extern mmTimespec mmGetTickCount(); +extern INT32 mmGetRealPath(CHAR *path, CHAR *realPath); +extern INT32 mmRealPath(const CHAR *path, CHAR *realPath, INT32 realPathLen); + +extern INT32 mmDup2(INT32 oldFd, INT32 newFd); + +extern INT32 mmUnlink(const CHAR *filename); + +extern INT32 mmChmod(const CHAR *filename, INT32 mode); + +extern INT32 mmFileno(FILE *stream); + +extern INT32 mmScandir(const CHAR *path, mmDirent ***entryList, mmFilter filterFunc, mmSort sort); + +extern VOID mmScandirFree(mmDirent **entryList, INT32 count); + +extern mmMsgid mmMsgCreate(mmKey_t key, INT32 msgFlag); + +extern mmMsgid mmMsgOpen(mmKey_t key, INT32 msgFlag); + +extern INT32 mmMsgSnd(mmMsgid msqid, VOID *buf, INT32 bufLen, INT32 msgFlag); + +extern INT32 mmMsgRcv(mmMsgid msqid, VOID *buf, INT32 bufLen, INT32 msgFlag); + +extern INT32 mmMsgClose(mmMsgid msqid); + +extern INT32 mmLocalTimeR(const time_t *timep, struct tm *result); + +extern INT32 mmGetOpt(INT32 argc, char *const *argv, const char *opts); +extern INT32 mmGetOptLong(INT32 argc, char *const *argv, const char *opts, const mmStructOption *longOpts, + INT32 *longIndex); + +extern LONG mmLseek(INT32 fd, INT64 offset, INT32 seekFlag); +extern INT32 mmFtruncate(mmProcess fd, UINT32 length); + +extern INT32 mmTlsCreate(mmThreadKey *key, VOID (*destructor)(VOID *)); +extern INT32 mmTlsSet(mmThreadKey key, const VOID *value); +extern VOID *mmTlsGet(mmThreadKey key); +extern INT32 mmTlsDelete(mmThreadKey key); +extern INT32 mmGetOsType(); + +extern INT32 mmFsync(mmProcess fd); +extern INT32 mmChdir(const CHAR *path); +extern INT32 mmUmask(INT32 pmode); +extern INT32 mmThreadKill(mmThread id); +extern INT32 mmWaitPid(mmProcess pid, INT32 *status, INT32 options); + +extern INT32 mmGetCwd(CHAR *buffer, INT32 maxLen); +extern INT32 mmGetEnv(const CHAR *name, CHAR *value, UINT32 len); +extern INT32 mmSetEnv(const CHAR *name, const CHAR *value, INT32 overwrite); +extern CHAR *mmStrTokR(CHAR *str, const CHAR *delim, CHAR **saveptr); +extern CHAR *mmDirName(CHAR *path); +extern CHAR *mmBaseName(CHAR *path); +extern INT32 mmGetDiskFreeSpace(const char *path, mmDiskSize *diskSize); + +extern INT32 mmSetThreadName(mmThread *threadHandle, const CHAR *name); + +extern INT32 mmGetThreadName(mmThread *threadHandle, CHAR *name, INT32 size); + +extern INT32 mmSetCurrentThreadName(const CHAR *name); + +extern INT32 mmGetCurrentThreadName(CHAR *name, INT32 size); +extern INT32 mmGetFileSize(const CHAR *fileName, ULONGLONG *length); +extern INT32 mmIsDir(const CHAR *fileName); +extern INT32 mmGetOsName(CHAR *name, INT32 nameSize); +extern INT32 mmGetOsVersion(CHAR *versionInfo, INT32 versionLength); +extern INT32 mmGetMac(mmMacInfo **list, INT32 *count); +extern INT32 mmGetMacFree(mmMacInfo *list, INT32 count); +extern INT32 mmGetCpuInfo(mmCpuDesc **cpuInfo, INT32 *count); +extern INT32 mmCpuInfoFree(mmCpuDesc *cpuInfo, INT32 count); +extern INT32 mmCreateProcess(const CHAR *fileName, const mmArgvEnv *env, const char *stdoutRedirectFile, mmProcess *id); + +extern INT32 mmCreateTaskWithThreadAttr(mmThread *threadHandle, const mmUserBlock_t *funcBlock, + const mmThreadAttr *threadAttr); +#define MMPA_DLL_API + +#ifdef __cplusplus +#if __cplusplus +} +#endif /* __cpluscplus */ +#endif /* __cpluscplus */ + +#endif /* _MMPA_LINUX_MMPA_LINUX_H_ */ diff --git a/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_typedef_linux.h b/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_typedef_linux.h new file mode 100644 index 00000000..522e9e4f --- /dev/null +++ b/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_typedef_linux.h @@ -0,0 +1,95 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MMPA_TYPEDEF_LINUX_H +#define MMPA_TYPEDEF_LINUX_H + +#ifdef __cplusplus +#if __cplusplus +extern "C" { +#endif /* __cpluscplus */ +#endif /* __cpluscplus */ + +#ifndef FALSE +#define FALSE 0 +#endif + +#ifndef TRUE +#define TRUE 1 +#endif + +typedef unsigned char UINT8; +typedef signed char INT8; +typedef unsigned short UINT16; +typedef signed short INT16; +typedef unsigned int UINT32; +typedef signed int INT32; +typedef unsigned long long UINT64; +typedef signed long long INT64; +typedef float FLOAT; +typedef double DOUBLE; +typedef void VOID; +typedef unsigned char UCHAR; +typedef char CHAR; +typedef unsigned short USHORT; +typedef short SHORT; +typedef unsigned int UINT; +typedef int INT; +typedef unsigned long ULONG; +typedef unsigned long long ULONGLONG; + +typedef long LONG; + +#define HANDLE_INVALID_VALUE (-1) +#define MMPA_MEM_MAX_LEN (0x7fffffff) +#define MMPA_PROCESS_ERROR (0x7fffffff) +#define PATH_SIZE 256 +#define MAX_IOVEC_SIZE 32 +#define MMPA_MAX_SLEEP_MILLSECOND 4294967 +#define MAX_PIPE_COUNT 2 +#define MMPA_PIPE_COUNT 2 +#define MMPA_THREADNAME_SIZE 16 +#define MMPA_MIN_OS_NAME_SIZE 64 +#define MMPA_MIN_OS_VERSION_SIZE 128 + +#define MMPA_ONE_THOUSAND 1000 +#define MMPA_ONE_BILLION 1000000000 +#define MMPA_COMPUTER_BEGIN_YEAR 1900 +#define MMPA_ZERO 0 +#define MMPA_MAX_THREAD_PIO 99 +#define MMPA_MIN_THREAD_PIO 1 +#define MMPA_DEFAULT_PIPE_PERMISSION 0777 +#define MMPA_DEFAULT_MSG_TYPE 1 + +#define MMPA_THREAD_SCHED_RR SCHED_RR +#define MMPA_THREAD_SCHED_FIFO SCHED_FIFO +#define MMPA_THREAD_SCHED_OTHER SCHED_OTHER +#define MMPA_THREAD_MIN_STACK_SIZE PTHREAD_STACK_MIN + +#define MMPA_MAX_NI 19 +#define MMPA_MIN_NI (-20) + +#define EN_OK 0 +#define EN_ERR 1 +#define EN_ERROR (-1) +#define EN_INVALID_PARAM (-2) + +#ifdef __cplusplus +#if __cplusplus +} +#endif /* __cpluscplus */ +#endif /* __cpluscplus */ +#endif /* _MMPA_TYPEDEF_LINUX_H_ */ diff --git a/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_typedef_win.h b/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_typedef_win.h new file mode 100644 index 00000000..fe95db10 --- /dev/null +++ b/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_typedef_win.h @@ -0,0 +1,79 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MMPA_TYPEDEF_WIN_H +#define MMPA_TYPEDEF_WIN_H + +#ifdef __cplusplus +#if __cplusplus +extern "C" { +#endif /* __cpluscplus */ +#endif /* __cpluscplus */ + +#ifndef FALSE +#define FALSE 0 +#endif + +#ifndef TRUE +#define TRUE 1 +#endif + +#define EN_OK 0 +#define EN_ERR 1 +#define EN_ERROR (-1) +#define EN_INVALID_PARAM (-2) + +#define HANDLE_INVALID_VALUE (-1) +#define INVALID_SOCKET_HANDLE INVALID_SOCKET +#define MMPA_MEM_MAX_LEN (0x7fffffff) +#define MMPA_PROCESS_ERROR (0x7fffffff) + +#define MMPA_ONE_THOUSAND 1000 +#define MMPA_COMPUTER_BEGIN_YEAR 1900 +#define SUMMER_TIME_OR_NOT (-1) +#define MMPA_ZERO 0 +#define MMPA_VALUE_ONE 1 +#define MMPA_SOCKET_MAIN_EDITION 2 +#define MMPA_SOCKET_SECOND_EDITION 0 +#define MMPA_PIPE_BUF_SIZE 1024 +#define MMPA_MAX_SCANDIR_COUNT 1024 +#define MAX_IOVEC_SIZE 32 +#define MMPA_PIPE_COUNT 2 +#define MMPA_THREADNAME_SIZE 16 +#define MMPA_MIN_OS_NAME_SIZE (MAX_COMPUTERNAME_LENGTH + 1) +#define MMPA_MIN_OS_VERSION_SIZE 64 + +#define MMPA_MAX_NI 19 +#define MMPA_MIDDLE_NI 5 +#define MMPA_LOW_NI (-5) +#define MMPA_MIN_NI (-20) + +#define MMPA_MAX_THREAD_PIO 99 +#define MMPA_MIDDLE_THREAD_PIO 66 +#define MMPA_LOW_THREAD_PIO 33 +#define MMPA_MIN_THREAD_PIO 1 + +#define MMPA_THREAD_SCHED_RR 0 +#define MMPA_THREAD_SCHED_FIFO 0 +#define MMPA_THREAD_SCHED_OTHER 0 +#define MMPA_THREAD_MIN_STACK_SIZE 0 + +#ifdef __cplusplus +#if __cplusplus +} +#endif /* __cpluscplus */ +#endif /* __cpluscplus */ +#endif /* _MMPA_TYPEDEF_WIN_H_ */ diff --git a/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_win.h b/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_win.h new file mode 100644 index 00000000..69185cba --- /dev/null +++ b/third_party/fwkacllib/inc/mmpa/sub_inc/mmpa_win.h @@ -0,0 +1,449 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef MMPA_WIN_MMPA_WIN_H +#define MMPA_WIN_MMPA_WIN_H +#ifdef __cplusplus +#if __cplusplus +extern "C" { +#endif /* __cpluscplus */ +#endif /* __cpluscplus */ +#ifdef MMPA_DLL +#define MMPA_DLL_API __declspec(dllexport) +#else +#define MMPA_DLL_API __declspec(dllimport) +#endif + +#define MMPA_MACINFO_DEFAULT_SIZE 18 +#define MMPA_CPUDESC_DEFAULT_SIZE 64 + +MMPA_DLL_API extern char *optarg; +MMPA_DLL_API extern int opterr; +MMPA_DLL_API extern int optind; +MMPA_DLL_API extern int optopt; + +#pragma section(".CRT$XCU", long, read) +#pragma section(".CRT$XPU", long, read) + +typedef HANDLE mmMutex_t; +typedef HANDLE mmThread; +typedef HANDLE mmProcess; +typedef HANDLE mmPollHandle; +typedef HANDLE mmPipeHandle; +typedef HANDLE mmCompletionHandle; + +typedef CRITICAL_SECTION mmMutexFC; +typedef CONDITION_VARIABLE mmCond; + +typedef VOID *(*userProcFunc)(VOID *pulArg); +typedef struct { + userProcFunc procFunc; + VOID *pulArg; +} mmUserBlock_t; + +typedef DWORD mmThreadKey; +typedef SYSTEMTIME mmSystemTime_t; + +typedef HANDLE mmSem_t; +typedef SOCKET mmSockHandle; +typedef struct sockaddr mmSockAddr; +typedef int mmSocklen_t; +typedef int mmSemTimeout_t; +typedef long mmAtomicType; +typedef DWORD mmExitCode; +typedef int mmKey_t; +typedef HANDLE mmMsgid; + +typedef INT32 mmSsize_t; + +typedef enum { + DT_DIR = FILE_ATTRIBUTE_DIRECTORY, +} mmDtype; + +typedef struct { + unsigned char d_type; + char d_name[MAX_PATH]; +} mmDirent; + +typedef int (*mmFilter)(const mmDirent *entry); +typedef int (*mmSort)(const mmDirent **a, const mmDirent **b); + +typedef struct { + VOID *sendBuf; + INT32 sendLen; +} mmIovSegment; +typedef PVOID mmInAddr; + +typedef enum { + pollTypeRead = 1, + pollTypeRecv, + pollTypeIoctl, +} mmPollType; + +typedef struct { + HANDLE completionHandle; + mmPollType overlapType; + OVERLAPPED oa; +} mmComPletionKey, *pmmComPletionKey; + +typedef struct { + VOID *priv; + mmPollHandle bufHandle; + mmPollType bufType; + VOID *buf; + UINT32 bufLen; + UINT32 bufRes; +} mmPollData, *pmmPollData; + +typedef VOID (*mmPollBack)(pmmPollData); +typedef struct { + mmPollHandle handle; + mmPollType pollType; + INT32 ioctlCode; + mmComPletionKey completionKey; +} mmPollfd; + +typedef struct { + OVERLAPPED oa; + HANDLE completionHandle; + WSABUF DataBuf; +} PRE_IO_DATA, *PPRE_IO_DATA; + +typedef OVERLAPPED mmOverLap; + +typedef struct { + UINT32 createFlag; + INT32 oaFlag; +} mmCreateFlag; + +typedef struct { + VOID *inbuf; + INT32 inbufLen; + VOID *outbuf; + INT32 outbufLen; + mmOverLap *oa; +} mmIoctlBuf; + +typedef struct { + HANDLE timerQueue; + HANDLE timerHandle; +} mmTimerHandle; + +typedef struct { + LONG tv_sec; + LONG tv_usec; +} mmTimeval; + +typedef struct { + INT32 tz_minuteswest; + INT32 tz_dsttime; +} mmTimezone; + +typedef struct { + LONG tv_sec; + LONG tv_nsec; +} mmTimespec; + +typedef mmTimerHandle mmTimer; + +#define mmTLS __declspec(thread) + +typedef struct stat mmStat_t; +typedef struct _stat64 mmStat64_t; +typedef int mmMode_t; + +typedef int MODE; + +typedef struct { + const char *name; + int has_arg; + int *flag; + int val; +} mmStructOption; + +typedef struct { + ULONGLONG totalSize; + ULONGLONG freeSize; + ULONGLONG availSize; +} mmDiskSize; + +typedef struct { + char addr[MMPA_MACINFO_DEFAULT_SIZE]; // ex:aa-bb-cc-dd-ee-ff\0 +} mmMacInfo; + +typedef struct { + char arch[MMPA_CPUDESC_DEFAULT_SIZE]; + char manufacturer[MMPA_CPUDESC_DEFAULT_SIZE]; // vendor + char version[MMPA_CPUDESC_DEFAULT_SIZE]; // modelname + INT32 frequency; // cpu frequency + INT32 maxFrequency; // max speed + INT32 ncores; // cpu cores + INT32 nthreads; // cpu thread count + INT32 ncounts; // logical cpu nums +} mmCpuDesc; + +typedef struct { + char **argv; + INT32 argvCount; + char **envp; + INT32 envpCount; +} mmArgvEnv; + +typedef struct { + INT32 detachFlag; + INT32 priorityFlag; + INT32 priority; + INT32 policyFlag; + INT32 policy; + INT32 stackFlag; + UINT32 stackSize; +} mmThreadAttr; + +typedef VOID (*mmPf)(VOID); +#define M_FILE_RDONLY GENERIC_READ +#define M_FILE_WRONLY GENERIC_WRITE +#define M_FILE_RDWR (GENERIC_READ | GENERIC_WRITE) +#define M_FILE_CREAT OPEN_ALWAYS + +#define M_RDONLY _O_RDONLY +#define M_WRONLY _O_WRONLY +#define M_RDWR _O_RDWR +#define M_CREAT _O_CREAT +#define M_BINARY _O_BINARY + +#define M_IREAD _S_IREAD +#define M_IRUSR _S_IREAD +#define M_IWRITE _S_IWRITE +#define M_IWUSR _S_IWRITE +#define M_IXUSR 0 + +#define M_MSG_CREAT 1 +#define M_MSG_EXCL 2 +#define M_MSG_NOWAIT 3 + +#define M_WAIT_NOHANG 1 +#define M_WAIT_UNTRACED 2 + +#define M_UMASK_USRREAD _S_IREAD +#define M_UMASK_GRPREAD _S_IREAD +#define M_UMASK_OTHREAD _S_IREAD + +#define M_UMASK_USRWRITE _S_IWRITE +#define M_UMASK_GRPWRITE _S_IWRITE +#define M_UMASK_OTHWRITE _S_IWRITE + +#define M_UMASK_USREXEC 0 +#define M_UMASK_GRPEXEC 0 +#define M_UMASK_OTHEXEC 0 + +#define mmConstructor(x) __declspec(allocate(".CRT$XCU")) mmPf con = x +#define mmDestructor(x) __declspec(allocate(".CRT$XPU")) mmPf de = x + +#define MMPA_PRINT_ERROR ((opterr) && (*options != ':')) +#define MMPA_FLAG_PERMUTE 0x01 // permute non-options to the end of argv +#define MMPA_FLAG_ALLARGS 0x02 // treat non-options as args to option "-1" +#define MMPA_FLAG_LONGONLY 0x04 // operate as getopt_long_only +/* return values */ +#define MMPA_BADCH (INT32)'?' +#define MMPA_BADARG ((*options == ':') ? (INT32)':' : (INT32)'?') +#define MMPA_INORDER (INT32)1 + +#define MMPA_NO_ARGUMENT 0 +#define MMPA_REQUIRED_ARGUMENT 1 +#define MMPA_OPTIONAL_ARGUMENT 2 + +#define MMPA_EMSG "" +#define MMPA_MAX_PATH MAX_PATH + +#define M_F_OK 0 +#define M_W_OK 2 +#define M_R_OK 4 + +#define MMPA_RTLD_NOW 0 +#define MMPA_RTLD_GLOBAL 0 + +#define MMPA_DL_EXT_NAME ".dll" + +#define __attribute__(v) + +_declspec(dllexport) INT32 mmCreateTask(mmThread *threadHandle, mmUserBlock_t *funcBlock); +_declspec(dllexport) INT32 mmJoinTask(mmThread *threadHandle); +_declspec(dllexport) INT32 mmMutexInit(mmMutex_t *mutex); +_declspec(dllexport) INT32 mmMutexLock(mmMutex_t *mutex); +_declspec(dllexport) INT32 mmMutexUnLock(mmMutex_t *mutex); +_declspec(dllexport) INT32 mmMutexDestroy(mmMutex_t *mutex); +_declspec(dllexport) INT32 mmCondInit(mmCond *cond); +_declspec(dllexport) INT32 mmCondLockInit(mmMutexFC *mutex); +_declspec(dllexport) INT32 mmCondLock(mmMutexFC *mutex); +_declspec(dllexport) INT32 mmCondUnLock(mmMutexFC *mutex); +_declspec(dllexport) INT32 mmCondLockDestroy(mmMutexFC *mutex); +_declspec(dllexport) INT32 mmCondWait(mmCond *cond, mmMutexFC *mutex); +_declspec(dllexport) INT32 mmCondTimedWait(mmCond *cond, mmMutexFC *mutex, UINT32 milliSecond); + +_declspec(dllexport) INT32 mmCondNotify(mmCond *cond); +_declspec(dllexport) INT32 mmCondNotifyAll(mmCond *cond); +_declspec(dllexport) INT32 mmCondDestroy(mmCond *cond); +_declspec(dllexport) INT32 mmGetPid(VOID); +_declspec(dllexport) INT32 mmGetTid(VOID); +_declspec(dllexport) INT32 mmGetPidHandle(mmProcess *processHandle); +_declspec(dllexport) INT32 mmGetLocalTime(mmSystemTime_t *sysTime); +_declspec(dllexport) INT32 mmSemInit(mmSem_t *sem, UINT32 value); +_declspec(dllexport) INT32 mmSemWait(mmSem_t *sem); +_declspec(dllexport) INT32 mmSemPost(mmSem_t *sem); +_declspec(dllexport) INT32 mmSemDestroy(mmSem_t *sem); +_declspec(dllexport) INT32 mmOpen(const CHAR *pathName, INT32 flags); +_declspec(dllexport) INT32 mmOpen2(const CHAR *pathName, INT32 flags, MODE mode); +_declspec(dllexport) INT32 mmClose(INT32 fd); +_declspec(dllexport) mmSsize_t mmWrite(INT32 fd, VOID *buf, UINT32 bufLen); +_declspec(dllexport) mmSsize_t mmRead(INT32 fd, VOID *buf, UINT32 bufLen); +_declspec(dllexport) mmSockHandle mmSocket(INT32 sockFamily, INT32 type, INT32 protocol); +_declspec(dllexport) INT32 mmBind(mmSockHandle sockFd, mmSockAddr *addr, mmSocklen_t addrLen); +_declspec(dllexport) INT32 mmListen(mmSockHandle sockFd, INT32 backLog); +_declspec(dllexport) mmSockHandle mmAccept(mmSockHandle sockFd, mmSockAddr *addr, mmSocklen_t *addrLen); +_declspec(dllexport) INT32 mmConnect(mmSockHandle sockFd, mmSockAddr *addr, mmSocklen_t addrLen); +_declspec(dllexport) INT32 mmCloseSocket(mmSockHandle sockFd); +_declspec(dllexport) mmSsize_t mmSocketRecv(mmSockHandle sockFd, VOID *recvBuf, INT32 recvLen, INT32 recvFlag); +_declspec(dllexport) mmSsize_t mmSocketSend(mmSockHandle sockFd, VOID *sendBuf, INT32 sendLen, INT32 sendFlag); +_declspec(dllexport) INT32 mmSAStartup(VOID); +_declspec(dllexport) INT32 mmSACleanup(VOID); +_declspec(dllexport) VOID *mmDlopen(const CHAR *fileName, INT mode); +_declspec(dllexport) VOID *mmDlsym(VOID *handle, CHAR *fileName); +_declspec(dllexport) INT32 mmDlclose(VOID *handle); +_declspec(dllexport) CHAR *mmDlerror(VOID); +_declspec(dllexport) INT32 + mmCreateAndSetTimer(mmTimer *timerHandle, mmUserBlock_t *timerBlock, UINT milliSecond, UINT period); +_declspec(dllexport) INT32 mmDeleteTimer(mmTimer timerHandle); +_declspec(dllexport) INT32 mmStatGet(const CHAR *path, mmStat_t *buffer); +_declspec(dllexport) INT32 mmStat64Get(const CHAR *path, mmStat64_t *buffer); +_declspec(dllexport) INT32 mmMkdir(const CHAR *pathName, mmMode_t mode); +_declspec(dllexport) INT32 mmSleep(UINT32 milliSecond); +_declspec(dllexport) INT32 mmCreateTaskWithAttr(mmThread *threadHandle, mmUserBlock_t *funcBlock); +_declspec(dllexport) INT32 mmGetProcessPrio(mmProcess pid); +_declspec(dllexport) INT32 mmSetProcessPrio(mmProcess pid, INT32 processPrio); +_declspec(dllexport) INT32 mmGetThreadPrio(mmThread *threadHandle); +_declspec(dllexport) INT32 mmSetThreadPrio(mmThread *threadHandle, INT32 threadPrio); +_declspec(dllexport) INT32 mmAccess(const CHAR *pathName); +_declspec(dllexport) INT32 mmAccess2(const CHAR *pathName, INT32 mode); +_declspec(dllexport) INT32 mmRmdir(const CHAR *pathName); + +_declspec(dllexport) INT32 mmIoctl(mmProcess fd, INT32 ioctlCode, mmIoctlBuf *bufPtr); +_declspec(dllexport) INT32 mmSemTimedWait(mmSem_t *sem, INT32 timeout); +_declspec(dllexport) mmSsize_t mmWritev(mmSockHandle fd, mmIovSegment *iov, INT32 iovcnt); +_declspec(dllexport) VOID mmMb(); +_declspec(dllexport) INT32 mmInetAton(const CHAR *addrStr, mmInAddr *addr); + +_declspec(dllexport) mmProcess mmOpenFile(const CHAR *fileName, UINT32 access, mmCreateFlag fileFlag); +_declspec(dllexport) mmSsize_t mmReadFile(mmProcess fileId, VOID *buffer, INT32 len); +_declspec(dllexport) mmSsize_t mmWriteFile(mmProcess fileId, VOID *buffer, INT32 len); +_declspec(dllexport) INT32 mmCloseFile(mmProcess fileId); + +_declspec(dllexport) mmAtomicType mmSetData(mmAtomicType *ptr, mmAtomicType value); +_declspec(dllexport) mmAtomicType mmValueInc(mmAtomicType *ptr, mmAtomicType value); +_declspec(dllexport) mmAtomicType mmValueSub(mmAtomicType *ptr, mmAtomicType value); +_declspec(dllexport) INT32 mmCreateTaskWithDetach(mmThread *threadHandle, mmUserBlock_t *funcBlock); + +_declspec(dllexport) INT32 mmCreateNamedPipe(mmPipeHandle pipe[], CHAR *pipeName[], INT32 waitMode); +_declspec(dllexport) INT32 mmOpenNamePipe(mmPipeHandle pipe[], CHAR *pipeName[], INT32 waitMode); +_declspec(dllexport) VOID mmCloseNamedPipe(mmPipeHandle namedPipe[]); + +_declspec(dllexport) INT32 mmCreatePipe(mmPipeHandle pipe[], CHAR *pipeName[], UINT32 pipeCount, INT32 waitMode); +_declspec(dllexport) INT32 mmOpenPipe(mmPipeHandle pipe[], CHAR *pipeName[], UINT32 pipeCount, INT32 waitMode); +_declspec(dllexport) VOID mmClosePipe(mmPipeHandle pipe[], UINT32 pipeCount); + +_declspec(dllexport) mmCompletionHandle mmCreateCompletionPort(); +_declspec(dllexport) VOID mmCloseCompletionPort(mmCompletionHandle handle); +_declspec(dllexport) INT32 mmPoll(mmPollfd *fds, INT32 fdCount, INT32 timeout, mmCompletionHandle handleIOCP, + pmmPollData polledData, mmPollBack pollBack); + +_declspec(dllexport) INT32 mmGetErrorCode(); +_declspec(dllexport) INT32 mmGetTimeOfDay(mmTimeval *timeVal, mmTimezone *timeZone); +_declspec(dllexport) mmTimespec mmGetTickCount(); + +_declspec(dllexport) INT32 mmGetRealPath(CHAR *path, CHAR *realPath); + +_declspec(dllexport) INT32 mmRealPath(const CHAR *path, CHAR *realPath, INT32 realPathLen); + +_declspec(dllexport) INT32 mmDup2(INT32 oldFd, INT32 newFd); +_declspec(dllexport) INT32 mmUnlink(const CHAR *filename); +_declspec(dllexport) INT32 mmChmod(const CHAR *filename, INT32 mode); +_declspec(dllexport) INT32 mmFileno(FILE *stream); +_declspec(dllexport) INT32 mmScandir(const CHAR *path, mmDirent ***entryList, mmFilter filterFunc, mmSort sort); +_declspec(dllexport) VOID mmScandirFree(mmDirent **entryList, INT32 count); + +_declspec(dllexport) mmMsgid mmMsgCreate(mmKey_t key, INT32 msgFlag); +_declspec(dllexport) mmMsgid mmMsgOpen(mmKey_t key, INT32 msgFlag); +_declspec(dllexport) INT32 mmMsgRcv(mmMsgid msqid, VOID *buf, INT32 bufLen, INT32 msgFlag); +_declspec(dllexport) INT32 mmMsgSnd(mmMsgid msqid, VOID *buf, INT32 bufLen, INT32 msgFlag); + +_declspec(dllexport) INT32 mmMsgClose(mmMsgid msqid); + +_declspec(dllexport) INT32 mmLocalTimeR(const time_t *timep, struct tm *result); +_declspec(dllexport) INT32 mmGetOpt(INT32 argc, char *const *argv, const char *opts); +_declspec(dllexport) INT32 + mmGetOptLong(INT32 argc, CHAR *const *argv, const CHAR *opts, const mmStructOption *longopts, INT32 *longindex); + +_declspec(dllexport) LONG mmLseek(INT32 fd, INT64 offset, INT32 seekFlag); +_declspec(dllexport) INT32 mmFtruncate(mmProcess fd, UINT32 length); + +_declspec(dllexport) INT32 mmTlsCreate(mmThreadKey *key, VOID (*destructor)(VOID *)); +_declspec(dllexport) INT32 mmTlsSet(mmThreadKey key, const VOID *value); +_declspec(dllexport) VOID *mmTlsGet(mmThreadKey key); +_declspec(dllexport) INT32 mmTlsDelete(mmThreadKey key); +_declspec(dllexport) INT32 mmGetOsType(); + +_declspec(dllexport) INT32 mmFsync(mmProcess fd); + +_declspec(dllexport) INT32 mmChdir(const CHAR *path); +_declspec(dllexport) INT32 mmUmask(INT32 pmode); +_declspec(dllexport) INT32 mmWaitPid(mmProcess pid, INT32 *status, INT32 options); + +_declspec(dllexport) INT32 mmGetCwd(CHAR *buffer, INT32 maxLen); +_declspec(dllexport) CHAR *mmStrTokR(CHAR *str, const CHAR *delim, CHAR **saveptr); + +_declspec(dllexport) INT32 mmGetEnv(const CHAR *name, CHAR *value, UINT32 len); +_declspec(dllexport) INT32 mmSetEnv(const CHAR *name, const CHAR *value, INT32 overwrite); +_declspec(dllexport) CHAR *mmDirName(CHAR *path); +_declspec(dllexport) CHAR *mmBaseName(CHAR *path); +_declspec(dllexport) INT32 mmGetDiskFreeSpace(const char *path, mmDiskSize *diskSize); + +_declspec(dllexport) INT32 mmSetThreadName(mmThread *threadHandle, const CHAR *name); +_declspec(dllexport) INT32 mmGetThreadName(mmThread *threadHandle, CHAR *name, INT32 size); + +_declspec(dllexport) INT32 mmSetCurrentThreadName(const CHAR *name); + +_declspec(dllexport) INT32 mmGetCurrentThreadName(CHAR *name, INT32 size); + +_declspec(dllexport) INT32 mmGetFileSize(const CHAR *fileName, ULONGLONG *length); +_declspec(dllexport) INT32 mmIsDir(const CHAR *fileName); +_declspec(dllexport) INT32 mmGetOsName(CHAR *name, INT32 nameSize); +_declspec(dllexport) INT32 mmGetOsVersion(CHAR *versionInfo, INT32 versionLength); +_declspec(dllexport) INT32 mmGetMac(mmMacInfo **list, INT32 *count); +_declspec(dllexport) INT32 mmGetMacFree(mmMacInfo *list, INT32 count); +_declspec(dllexport) INT32 mmGetCpuInfo(mmCpuDesc **cpuInfo, INT32 *count); +_declspec(dllexport) INT32 mmCpuInfoFree(mmCpuDesc *cpuInfo, INT32 count); +_declspec(dllexport) INT32 + mmCreateProcess(const CHAR *fileName, const mmArgvEnv *env, const char *stdoutRedirectFile, mmProcess *id); + +_declspec(dllexport) INT32 + mmCreateTaskWithThreadAttr(mmThread *threadHandle, const mmUserBlock_t *funcBlock, const mmThreadAttr *threadAttr); + +#ifdef __cplusplus +#if __cplusplus +} +#endif /* __cpluscplus */ +#endif /* __cpluscplus */ + +#endif /* _MMPA_WIN_MMPA_WIN_H_ */ diff --git a/third_party/fwkacllib/inc/ops/aipp.h b/third_party/fwkacllib/inc/ops/aipp.h new file mode 100755 index 00000000..d32f6fdf --- /dev/null +++ b/third_party/fwkacllib/inc/ops/aipp.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_AIPP_H +#define GE_OP_AIPP_H + +#include "../graph/operator_reg.h" + +namespace ge { +REG_OP(Aipp) + .INPUT(images, TensorType{DT_UINT8}) + .OPTIONAL_INPUT(params, TensorType{DT_UINT8}) + .OUTPUT(features, TensorType({DT_FLOAT16, DT_UINT8})) + .ATTR(aipp_config_path, String, "./aipp.cfg") + .OP_END_FACTORY_REG(Aipp) +} // namespace ge + +#endif // GE_OP_AIPP_H diff --git a/third_party/fwkacllib/inc/ops/aipp_data.h b/third_party/fwkacllib/inc/ops/aipp_data.h new file mode 100755 index 00000000..5e3961ca --- /dev/null +++ b/third_party/fwkacllib/inc/ops/aipp_data.h @@ -0,0 +1,30 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_AIPP_DATA_H +#define GE_OP_AIPP_DATA_H + +#include "../graph/operator_reg.h" + +namespace ge { +REG_OP(AippData) + .INPUT(data, TensorType::ALL()) + .OUTPUT(out, TensorType::ALL()) + .ATTR(index, Int, 0) + .OP_END_FACTORY_REG(AippData) +} // namespace ge + +#endif // GE_OP_AIPP_DATA_H diff --git a/third_party/fwkacllib/inc/ops/all_ops.h b/third_party/fwkacllib/inc/ops/all_ops.h new file mode 100755 index 00000000..c00b2b8a --- /dev/null +++ b/third_party/fwkacllib/inc/ops/all_ops.h @@ -0,0 +1,69 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef BUILT_IN_OP_PROTO_INC_ALL_OPS_H_ +#define BUILT_IN_OP_PROTO_INC_ALL_OPS_H_ + +#include "aipp.h" +#include "aipp_data.h" +#include "array_ops.h" +#include "audio_ops.h" +#include "batch_ops.h" +#include "bitwise_ops.h" +#include "boosted_trees_ops.h" +#include "candidate_sampling_ops.h" +#include "control_flow_ops.h" +#include "data_flow_ops.h" +#include "elewise_calculation_ops.h" +#include "hcom_ops.h" +#include "get_data_ops.h" +#include "image_ops.h" +#include "linalg_ops.h" +#include "lookup_ops.h" +#include "math_ops.h" +#include "matrix_calculation_ops.h" +#include "nn_batch_norm_ops.h" +#include "nn_calculation_ops.h" +#include "nn_detect_ops.h" +#include "nn_norm_ops.h" +#include "nn_ops.h" +#include "nn_other_ops.h" +#include "nn_pooling_ops.h" +#include "nn_training_ops.h" +#include "nonlinear_fuc_ops.h" +#include "npu_loss_scale_ops.h" +#include "pad_ops.h" +#include "quantize_ops.h" +#include "random_ops.h" +#include "reduce_ops.h" +#include "rpn_ops.h" +#include "save_ops.h" +#include "selection_ops.h" +#include "set_ops.h" +#include "sparse_ops.h" +#include "split_combination_ops.h" +#include "state_ops.h" +#include "transformation_ops.h" +#include "logging_ops.h" +#include "string_ops.h" +#include "outfeed_ops.h" +#include "stateless_random_ops.h" +#include "dvpp_ops.h" +#include "basic_lstm_cell.h" +#include "fsrdetectionoutput_ops.h" +#include "roipooling_ops.h" +#include "power_ops.h" +#endif // BUILT_IN_OP_PROTO_INC_ALL_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/array_ops.h b/third_party/fwkacllib/inc/ops/array_ops.h new file mode 100755 index 00000000..9354a5e5 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/array_ops.h @@ -0,0 +1,882 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_ARRAY_OPS_H_ +#define GE_OP_ARRAY_OPS_H_ + +#include "graph/operator_reg.h" +#include "graph/operator.h" + +namespace ge { + +/** +*@brief Applies lower_bound(sorted_search_values, values) along each row. + +*@par Inputs: +*The input sorted_x and values can be one-dimensional vector. Inputs include: \n +* @li sorted_x:A `Tensor`. 2-D Tensor where each row is ordered. +* @li values:A `Tensor`. Must have the same type as `sorted_x`. + +*@par Attributes: +*@li out_type:An optional `DType` from: `int32, int64`. Defaults to `int32`. + +*@par Outputs: +*y: A `Tensor` of type `out_type`. + +*@attention Constraints: \n +*-The implementation for LowerBound on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(LowerBound) + .INPUT(sorted_x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, \ + DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, \ + DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .ATTR(out_type, Type, DT_INT32) + .OP_END_FACTORY_REG(LowerBound) + +/** +*@brief Reverses variable length slices. + +*@par Inputs: +*The input x can be k-dimensional tensor, num_lower and num_upper can be zero-dimensional scalar. Inputs include: \n +* @li x:A Tensor. The input to reverse. +* @li seq_lengths:A Tensor. Must be one of the following types: int32, int64. 1-D. + +*@par Attributes: +*@li seq_dim:An optional int. Defaults to 0. The dimension along which reversal is performed. +*@li batch_dim:An optional int. Defaults to 0. The dimension along which reversal is performed. + +*@par Outputs: +*y: Rank k tensor of the same shape as input. The extracted banded tensor. + +*@attention Constraints: \n +*-The implementation for ReverseSequence on Ascend uses AI CPU, with bad performance. + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(ReverseSequence) + .INPUT(x, + TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .INPUT(seq_lengths, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, + TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .REQUIRED_ATTR(seq_dim, Int) + .ATTR(batch_dim, Int, 0) + .OP_END_FACTORY_REG(ReverseSequence) + +/** +*@brief Copy a tensor setting everything outside a central band in each innermost matrix. + +*@par Inputs: +*The input x can be k-dimensional tensor, num_lower and num_upper can be zero-dimensional scalar. Inputs include: \n +* @li x:Rank `k` tensor. +* @li num_lower:0-D tensor. Number of superdiagonals to keep. If negative, keep entire upper triangle. +* @li num_upper:0-D tensor. Number of superdiagonals to keep. If negative, keep entire upper triangle. + +*@par Outputs: +*y: Rank k tensor of the same shape as input. The extracted banded tensor. + +*@attention Constraints: \n +*-The implementation for MatrixBandPart on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(MatrixBandPart) + .INPUT(x, TensorType({ DT_INT8, DT_UINT8, \ + DT_INT16, DT_UINT16, DT_INT32, DT_INT64, + DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_BOOL })) + .INPUT(num_lower, TensorType({ DT_INT32, DT_INT64 })) + .INPUT(num_upper, TensorType({ DT_INT32, DT_INT64 })) + .OUTPUT(y, TensorType({ DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_BOOL })) + .OP_END_FACTORY_REG(MatrixBandPart) + +/** +*@brief Finds unique elements in a 1-D tensor. + +*@par Inputs: +*The input x can be k-dimensional tensor, num_lower and num_upper can be zero-dimensional scalar. Inputs include: \n +*x:1-D tensor. + +*@par Attributes: +*out_idx:An optional DType from: int32, int64. Defaults to int32. \n + +*@par Outputs: +*@li y:A Tensor. Has the same type as x. +*@li idx:A Tensor of type out_idx. +*@li count:A Tensor of type out_idx. + +*@attention Constraints: \n +*-The implementation for UniqueWithCounts on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(UniqueWithCounts) + .INPUT(x, TensorType({ DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE })) + .OUTPUT(y, TensorType({ DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE })) + .OUTPUT(idx, TensorType({ DT_INT32, DT_INT64 })) + .OUTPUT(count, TensorType({ DT_INT32, DT_INT64 })) + .REQUIRED_ATTR(out_idx, Type) + .OP_END_FACTORY_REG(UniqueWithCounts) + +/** +*@brief Finds unique elements in a 1-D tensor. + +*@par Inputs: +*The input x can be k-dimensional tensor, num_lower and num_upper can be zero-dimensional scalar. Inputs include: \n +*x:1-D tensor. + +*@par Attributes: +*out_idx:An optional DType from: int32, int64. Defaults to int32. + +*@par Outputs: +*@li y:x in the unique output y. +*@li idx:A tensor idx the same size as x that contains the index of each value of x. + +*@attention Constraints: \n +*-The implementation for Unique on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(Unique) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .OUTPUT(idx, TensorType({DT_INT32, DT_INT64})) + .ATTR(out_idx, Type, DT_INT32) + .OP_END_FACTORY_REG(Unique) + +/** +*@brief Finds unique elements in a 1-D tensor. + +*@par Inputs: +*The input x can be k-dimensional tensor, num_lower and num_upper can be zero-dimensional scalar. Inputs include: \n +* @li x:1-D tensor. +* @li axis:A `Tensor` of type `int32` (default: None). The axis of the Tensor to. + +*@par Attributes: +*out_idx:An optional DType from: int32, int64. Defaults to int32. + +*@par Outputs: +*@li y:x in the unique output y. +*@li idx:A tensor idx the same size as x that contains the index of each value of x. + +*@attention Constraints: \n +*-The implementation for UniqueExt2 on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(UniqueExt2) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(axis, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .OUTPUT(idx, TensorType({DT_INT32, DT_INT64})) + .ATTR(out_idx, Type, DT_INT32) + .OP_END_FACTORY_REG(UniqueExt2) + +/** +*@brief Computes the inverse permutation of a tensor. + +*@par Inputs: +*The input x can be k-dimensional tensor. Inputs include: \n +*x:K-D tensor. + +*@par Outputs: +*y:1-D tensor. + +*@attention Constraints:\n +*-The implementation for InvertPermutation on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(InvertPermutation) + .INPUT(x, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .OP_END_FACTORY_REG(InvertPermutation) + +/** +*@brief Checks a tensor for NaN and Inf values. + +*@par Inputs: +*The input x can be k-dimensional tensor. Inputs include: \n +*x:The input tensor. + +*@par Attributes: +*message:Prefix of the error message. + +*@par Outputs: +*y:The output tensor. + +*@attention Constraints: \n +*-The implementation for CheckNumerics on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(CheckNumerics) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .REQUIRED_ATTR(message, String) + .OP_END_FACTORY_REG(CheckNumerics) + +/** +*@brief Converts an array of flat indices into a tuple of coordinate arrays. + +*@par Inputs: +*The input indices can be 0-D or 1-D tensor, dims can be 1-D. Inputs include: \n +* @li indices: A 0-D or 1-D int Tensor whose elements are indices into the flattened version of an array of dimensions dims. +* @li dims:A Tensor. Must have the same type as indices. An 1-D int Tensor. The shape of the array to use for unraveling indices. + +*@par Outputs: +*y:A Tensor. Has the same type as indices. + +*@attention Constraints: \n +*-The implementation for UnravelIndex on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(UnravelIndex) + .INPUT(indices, TensorType({DT_INT32, DT_INT64})) + .INPUT(dims, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .OP_END_FACTORY_REG(UnravelIndex) + +/** +*@brief Applies upper_bound(sorted_search_values, values) along each row. + +*@par Inputs: +*The input sorted_x can be 2-D tensor, values can be 2-D. Inputs include: +* @li sorted_x: 2-D Tensor where each row is ordered. +* @li values:2-D Tensor with the same numbers of rows as `sorted_x. + +*@par Attributes: +*out_type:sets the optional out_type attribute to value. + +*@par Outputs: +*y:A `Tensor` with the same shape as `values`. + +*@attention Constraints: \n +*-The implementation for UpperBound on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(UpperBound) + .INPUT(sorted_x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .REQUIRED_ATTR(out_type, Type) + .OP_END_FACTORY_REG(UpperBound) + +/** +*@brief Finds unique elements in a 1-D tensor. + +*@par Inputs: +*The input x can be 1-D vector, axis can be 1-D vector. Inputs include: \n +* @li x:1-D tensor. +* @li axis:1-D tensor. + +*@par Attributes: +*out_idx:An optional DType from: int32, int64. Defaults to int32. + +*@par Outputs: +*@li y:x in the unique output y. +*@li idx:A tensor idx the same size as x that contains the index of each value of x. +*@li count:A tensor idx the same size as x that contains the index of each value of x. + +*@attention Constraints: \n +*-The implementation for UniqueWithCountsExt2 on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(UniqueWithCountsExt2) + .INPUT(x, TensorType({ DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE })) + .INPUT(axis, TensorType({ DT_INT32, DT_INT64 })) + .OUTPUT(y, TensorType({ DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE })) + .OUTPUT(idx, TensorType({ DT_INT32, DT_INT64 })) + .OUTPUT(count, TensorType({ DT_INT32, DT_INT64 })) + .REQUIRED_ATTR(out_idx, Type) + .OP_END_FACTORY_REG(UniqueWithCountsExt2) + +/** +*@brief Fill the tensor with the mirror value. + +*@par Inputs: +*The input x and paddings can be one-dimensional scalar. Inputs include: \n +* @li x: input tensor to be padded. +* @li paddings: A two-column matrix specifying the padding sizes. The number of rows must be the same as the rank of `input`. + +*@par Attributes: +*mode:Either `REFLECT` or `SYMMETRIC`. In reflect mode the padded regions do not include the borders, while in symmetric mode the padded regions do include the borders. + +*@par Outputs: +*y: The padded tensor. + +*@attention Constraints: \n +-The implementation for MirrorPad on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(MirrorPad) + .INPUT(x, TensorType({ DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_BOOL })) + .INPUT(paddings, TensorType({ DT_INT32, DT_INT64 })) + .OUTPUT(y, TensorType({ DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_BOOL })) + .REQUIRED_ATTR(mode, String) + .OP_END_FACTORY_REG(MirrorPad) + +/** +*@brief Calculate the difference between two numbers or a list of strings. + +*@par Inputs: +*The input x and y can be one-dimensional vector. Inputs include: \n +* @li x:A Tensor. 1-D. Values to keep. +* @li y:A Tensor. Must have the same type as x. 1-D. Values to remove. + +*@par Attributes: +*out_idx:An optional DType from: int32, int64. Defaults to int32. + +*@par Outputs: +*@li out:A Tensor. Has the same type as x. +*@li idx:A Tensor of type out_idx. + +*@attention Constraints:\n +-The implementation for ListDiff on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(ListDiff) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_UINT8, DT_INT8, + DT_INT16, DT_UINT16, DT_INT32, DT_INT64})) + .INPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_UINT8, DT_INT8, + DT_INT16, DT_UINT16, DT_INT32, DT_INT64})) + .OUTPUT(out, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_UINT8, DT_INT8, + DT_INT16, DT_UINT16, DT_INT32, DT_INT64})) + .OUTPUT(idx, TensorType({DT_INT32, DT_INT64})) + .ATTR(out_idx, Type, DT_INT32) + .OP_END_FACTORY_REG(ListDiff) + +/** +*@brief Creates a constant tensor from a tensor-like object. This operator is used for inference. \n +Operator Const has the same definition as operator Constant. + +*@par Attributes: +*@li value: Required. The value and type of the resulting tensor. +*@li dtype: Optional. The type of the elements of the resulting tensor. \n +The data type specified by this parameter must be the same as that of the "value" attribute. + +*@par Outputs: +*y: A constant tensor. +*/ +REG_OP(Const) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .ATTR(value, Tensor, Tensor()) // This is the value of the const op + .ATTR(dtype, Int, 0) + .OP_END_FACTORY_REG(Const) + +/** +*@brief Creates a constant tensor for training. + +*@par Attributes: +*@li value: Required. The value and type of the resulting tensor. +*@li dtype: Optional. The type of the elements of the resulting tensor. \n +The data type specified by this parameter must be the same as that of the "value" attribute. + +*@par Outputs: +*y: The constant tensor. +*/ +REG_OP(Constant) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .ATTR(value, Tensor, Tensor()) // This is the value of the constant op + .ATTR(dtype, Int, 0) + .OP_END_FACTORY_REG(Constant) + +/** +*@brief Returns a copy of the input tensor. + +*@par Inputs: +*x: A tensor. + +*@par Outputs: +*y: A tensor. +*/ +REG_OP(Snapshot) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OP_END_FACTORY_REG(Snapshot) + +/** +*@brief Gives a guarantee to the runtime that the input tensor is a constant. + +*@par Inputs: +*x: A tensor. + +*@par Outputs: +*y: The input tensor. +*/ +REG_OP(GuaranteeConst) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OP_END_FACTORY_REG(GuaranteeConst) + +/** +*@brief Returns the target shape for broadcasting shapes "x1" and "x2". + +*@par Inputs: +*@li x1: A tensor of type int32 or int64. A shape. +*@li x2: A tensor of the same type as "x1". The other shape. + +*@par Outputs: +*y: A tensor. The broadcasted shape. +*/ +REG_OP(BroadcastArgs) + .INPUT(x1, TensorType({DT_INT32, DT_INT64})) + .INPUT(x2, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .OP_END_FACTORY_REG(BroadcastArgs) + +/** +*@brief Outputs its input tensor as is and triggers an error if a gradient is requested. + +*@par Inputs: +*x: A tensor. + +*@par Attributes: +*message: Will be printed in the error at the attempt to request a gradient. + +*@par Outputs: +*y: The input tensor. +*/ +REG_OP(PreventGradient) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .ATTR(message, String, "") + .OP_END_FACTORY_REG(PreventGradient) + +/** +*@brief Returns the reduction indices for computing gradients of "x1" and "x2" with broadcast. + +*@par Inputs: +*@li x1: A tensor of type int32 or int64. +*@li x2: A tensor of type int32 or int64. \n +"x2" has the same type as "x1". + +*@par Outputs: +*@li y1: A tensor. Reduction indices of "x1". +*@li y2: A tensor. Reduction indices of "x2". +*/ +REG_OP(BroadcastGradientArgs) + .INPUT(x1, TensorType({DT_INT32, DT_INT64})) + .INPUT(x2, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y1, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y2, TensorType({DT_INT32, DT_INT64})) + .OP_END_FACTORY_REG(BroadcastGradientArgs) + +/** +*@brief Stops gradient computation. None is returned for the node where the gradient computation is stopped. + + +*@par Inputs: +*x: A tensor. + +*@par Outputs: +*y: The input tensor. +*/ +REG_OP(StopGradient) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OP_END_FACTORY_REG(StopGradient) + +/** +*@brief Return a tensor with the same shape and contents as input. + +*@par Inputs: +*x: A tensor. + +*@par Outputs: +*y: A tensor. +*/ +REG_OP(Identity) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OP_END_FACTORY_REG(Identity) + +/** +*@brief Returns a list of tensors with the same shapes and contents as the input tensors. + +*@par Inputs: +*x: A list of input tensors. + +*@par Outputs: +*y: A list of Tensor objects, with the same length as the input tensor list. +*/ +REG_OP(IdentityN) + .DYNAMIC_INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .DYNAMIC_OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OP_END_FACTORY_REG(IdentityN) + +/** +*@brief Inserts a dimension of 1 into a tensor's shape. Only the tensor shape is changed, without changing the data. + +*@par Inputs: +*@li x: A tensor. +*@li axis: The dimension index at which to expand. + +*@par Outputs: +*y: A tensor. +*/ +REG_OP(ExpandDims) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, + DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .INPUT(axis, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, + DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .ATTR(T, Int, 0) + .ATTR(Tdim, Int, 0) + .OP_END_FACTORY_REG(ExpandDims) + +/** +*@brief Reshapes a tensor. Only the tensor shape is changed, without changing the data. + +*@par Inputs: +*@li x: A tensor. +*@li shape: A tensor. Defines the shape of the output tensor. + +*@par Attributes: +*@li axis: An optional int32 or int64. The first dimension to reshape. Defaults to "0". +*@li num_axes: An optional int32 or int64. The extent of the reshape. Defaults to "-1". + +*@par Outputs: +*y: A tensor. +*/ +REG_OP(Reshape) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, + DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .INPUT(shape, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, + DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .ATTR(axis, Int, 0) + .ATTR(num_axes, Int, -1) + .OP_END_FACTORY_REG(Reshape) + +/** +*@brief Removes dimensions of size 1 from the shape of a tensor. + +*@par Inputs: +*x: A tensor. + +*@par Attributes: +*axis: An optional list of int32 or int64. If not specified, squeezes all dimensions of size 1. \n If specified, only squeezes the dimensions listed. It is an error to squeeze a dimension that is not 1. + +*@par Outputs: +*y: A tensor. +*/ +REG_OP(Squeeze) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .ATTR(T, Int, 0) + .ATTR(squeeze_dims, ListInt, {}) + .ATTR(axis, ListInt, {}) + .OP_END_FACTORY_REG(Squeeze) + +/** +*@brief Returns an integer representing the rank of input tensor. The rank of a tensor is the number of indices required to uniquely select each element of the tensor, that is, the dimension size of the tensor. + +*@par Inputs: +*x: A tensor. + +*@par Outputs: +*y: A tensor. The rank of input tensor. +*/ +REG_OP(Rank) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(Rank) + +/** +*@brief Returns the size of a tensor, that is, an integer of the number of elements of the tensor. + +*@par Inputs: +*x: A tensor. + +*@par Attributes: +*out_type: An optional int32 or int64. The output data type. Defaults to "int32". + +*@par Outputs: +*y: A tensor. The size of the input tensor. +*/ +REG_OP(Size) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT32,DT_INT64})) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .ATTR(out_type, Int, DT_INT32) + .OP_END_FACTORY_REG(Size) + +REG_OP(Data) + .INPUT(data, TensorType::ALL()) + .OUTPUT(out, TensorType::ALL()) + .ATTR(index, Int, 0) + .OP_END_FACTORY_REG(Data) + +/** +*@brief Inserts a placeholder for a tensor that will be always fed. + +*@par Inputs: +*x: A tensor. + +*@par Attributes: +*@li peerIndex: An integer type. The index of the corresponding "end" node connected to. +*@li parentId: A string, used to check if the nodes are from the saved parent node. +*@li parentOpType: A string. Op type of the original node. +*@li anchorIndex: An integer, used to check if the node is from the saved anchor. + +*@par Outputs: +*y: The created placeholder tensor. +*/ +REG_OP(PlaceHolder) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .ATTR(peerIndex, Int, 0) // the index of the corresponding 'end' node it's connected to + .ATTR(parentId, String, "") // check if these node are from save parent node + .ATTR(parentOpType, String, "") // op type of original node + .ATTR(anchorIndex, Int, 0) // check if these node are from save anchor + .OP_END_FACTORY_REG(PlaceHolder) + +REG_OP(End) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .ATTR(peerIndex, Int, 0) // the index of the corresponding 'placeholder' node it's connected to + .ATTR(parentOpType, String, "") // op type of original node + .OP_END_FACTORY_REG(End) + +REG_OP(Summary) + .INPUT(x, TensorType::ALL()) + .OP_END_FACTORY_REG(Summary) + +/** +*@brief Returns the shape of a tensor. + +*@par Inputs: +*x: A tensor. + +*@par Attributes: +*out_type: An optional int32 or int64. The output data type. Defaults to int32. + +*@par Outputs: +*y: A tensor. The shape of the input tensor. +*/ +REG_OP(Shape) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .ATTR(out_type, Int, DT_INT32) + .OP_END_FACTORY_REG(Shape) + +/** +*@brief Returns shape of tensors. + +*@par Inputs: +*x: A list of input tensors. + +*@par Attributes: +*out_type: An optional int32 or int64. The output data type. Defaults to "int32". + +*@par Outputs: +*y: A list of tensors with the same length as the input list of tensors. +*/ +REG_OP(ShapeN) + .DYNAMIC_INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .DYNAMIC_OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .ATTR(out_type, Int, DT_INT32) + .OP_END_FACTORY_REG(ShapeN) + +/** +*@brief Creates a tensor with the given "shape" and "dtype". + +*@par Inputs: +*shape: The shape of the output tensor. + +*@par Attributes: +*@li dtype: Optional. The data type of the output tensor. Defaults to "int32". +*@li init: An optional bool. If true, initializes the returned tensor with the default value of "dtype". Defaults to "false". + +*@par Outputs: +*y: A tensor. +*/ +REG_OP(Empty) + .INPUT(shape, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .ATTR(dtype, Int, DT_INT32) + .ATTR(init, Bool, 0) + .OP_END_FACTORY_REG(Empty) + +/** +*@brief Gradient op for MirrorPad op. This op folds a mirror-padded tensor. + +*@par Inputs: +*The input x and y can be one-dimensional vector. Inputs include: \n +* @li x:A Tensor. The input tensor to be folded. +* @li paddings:A Tensor. Must be one of the following types: int32, int64. A two-column matrix specifying the padding sizes. + +*@par Attributes: +*mode:A string from: "REFLECT", "SYMMETRIC". The mode used in the MirrorPad op. + +*@par Outputs: +*y:A Tensor. Has the same type as x. + +*@attention Constraints: \n +-The implementation for MirrorPadGrad on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(MirrorPadGrad) + .INPUT(x, TensorType({ DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE })) + .INPUT(paddings, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({ DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE })) + .REQUIRED_ATTR(mode, String) + .OP_END_FACTORY_REG(MirrorPadGrad) + +REG_OP(Where) + .INPUT(x, TensorType({DT_DOUBLE, DT_FLOAT, DT_FLOAT16, DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_UINT32, DT_INT64, DT_UINT64, DT_BOOL})) + .OUTPUT(y, TensorType({DT_INT64})) + .OP_END_FACTORY_REG(Where) + +} // namespace ge + +#endif // GE_OP_ARRAY_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/atomic_addr_clean_ops.h b/third_party/fwkacllib/inc/ops/atomic_addr_clean_ops.h new file mode 100644 index 00000000..1cd5dc3a --- /dev/null +++ b/third_party/fwkacllib/inc/ops/atomic_addr_clean_ops.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_ATOMICADDRCLEAN_H + #define GE_OP_ATOMICADDRCLEAN_H + + #include "../../../inc/external/graph/operator_reg.h" + +namespace ge{ +REG_OP(AtomicAddrClean) + .ATTR(automic_add_mem_size, ListInt, {}) + .OP_END() +} // namespace ge + + #endif // GE_OP_ATOMICADDRCLEAN_H diff --git a/third_party/fwkacllib/inc/ops/audio_ops.h b/third_party/fwkacllib/inc/ops/audio_ops.h new file mode 100755 index 00000000..41c25492 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/audio_ops.h @@ -0,0 +1,137 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_AUDIO_OPS_H_ +#define GE_OP_AUDIO_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +/** +*@brief Mel-Frequency Cepstral Coefficient (MFCC) calculation consists of taking the DCT-II of a log-magnitude mel-scale spectrogram. + +*@par Inputs: +*The input spectrogram must be three-dimensional tensor, sample_rate must be a scalar. Inputs include: \n +* @li spectrogram:3D float tensor of mel-frequency cepstral coefficient. +* @li sample_rate:Mel-Frequency Cepstral Coefficient (MFCC) calculation sample rate. + +*@par Attributes: +*@li upper_frequency_limit:Upper limit of the mfcc calculation frequency. +*@li lower_frequency_limit:Lower limit of the mfcc calculation frequency. +*@li filterbank_channel_count:Count of the channel filterbank. +*@li dct_coefficient_count:Count of the dct coefficient. + +*@par Outputs: +*y:A float32 Tensor of the MFCCs of spectrogram. + +*@attention Constraints:\n +*-The implementation for Mfcc on Ascend uses AI CPU, with bad performance.\n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(Mfcc) + .INPUT(spectrogram, TensorType({DT_FLOAT})) + .INPUT(sample_rate, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(upper_frequency_limit, Float, 4000) + .ATTR(lower_frequency_limit, Float, 20) + .ATTR(filterbank_channel_count, Int, 40) + .ATTR(dct_coefficient_count, Int, 13) + .OP_END_FACTORY_REG(Mfcc) + +/** +*@brief Decode and generate spectrogram using wav float tensor. + +*@par Inputs: +*The input x must be two-dimensional matrices. Inputs include: \n +* x:float tensor of the wav audio contents. contains length and channel + +*@par Attributes: +*@li window_size:Size of the spectrogram window. +*@li stride:Size of the spectrogram stride. +*@li magnitude_squared:If true, using magnitude squared. + +*@par Outputs: +*spectrogram:3-D float Tensor with the image contents. + +*@attention Constraints:\n +*-The implementation for AudioSpectrogram on Ascend uses AI CPU, with bad performance.\n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(AudioSpectrogram) + .INPUT(x, TensorType({DT_FLOAT})) + .OUTPUT(spectrogram, TensorType({DT_FLOAT})) + .REQUIRED_ATTR(window_size, Int) + .REQUIRED_ATTR(stride, Int) + .ATTR(magnitude_squared, Bool, false) + .OP_END_FACTORY_REG(AudioSpectrogram) + +/** +*@brief Decode a 16-bit WAV file into a float tensor. + +*@par Inputs: +*The input contents must be string tensor. Inputs include: \n +* @li contents:A Tensor of type string. The WAV-encoded audio, usually from a file. + +*@par Attributes: +*@li desired_channels:An optional int. Defaults to -1. Number of sample channels wanted. +*@li desired_samples:An optional int. Defaults to -1. Length of audio requested. + +*@par Outputs: +*@li *audio:A Tensor of type float32. +*@li *sample_rate:A Tensor of type int32. + +*@attention Constraints: \n +*-The implementation for DecodeWav on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(DecodeWav) + .INPUT(contents, TensorType({DT_STRING})) + .OUTPUT(audio, TensorType({DT_FLOAT})) + .OUTPUT(sample_rate, TensorType({DT_INT32})) + .ATTR(desired_channels, Int, -1) + .ATTR(desired_samples, Int, -1) + .OP_END_FACTORY_REG(DecodeWav) + +REG_OP(EncodeWav) + .INPUT(audio, TensorType({DT_FLOAT})) + .INPUT(sample_rate, TensorType({DT_INT32})) + .OUTPUT(contents, TensorType({DT_STRING})) + .OP_END_FACTORY_REG(EncodeWav) +} // namespace ge + +#endif // GE_OP_AUDIO_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/basic_lstm_cell.h b/third_party/fwkacllib/inc/ops/basic_lstm_cell.h new file mode 100755 index 00000000..68267fdb --- /dev/null +++ b/third_party/fwkacllib/inc/ops/basic_lstm_cell.h @@ -0,0 +1,154 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_BASIC_LSTM_CELL_H +#define GE_OP_BASIC_LSTM_CELL_H + +#include "../graph/operator_reg.h" + +namespace ge { +/** +*@brief: Basic LSTM Cell forward calculation. +*@par Inputs: +*five inputs: \n +*@li x:A 4D Tensor. Must be one of the following types: float16. The format must be FRACTAL_NZ. +*@li h:A 4D Tensor. Must be one of the following types: float16. The format must be FRACTAL_NZ. +*@li c:A 4D Tensor. Must be one of the following types: float16, float32. The format must be FRACTAL_NZ. +*@li w:A 4D Tensor. Must be one of the following types: float16. The format must be FRACTAL_Z. +*@li b:A 1D Tensor. Must be one of the following types: float16. The format must be ND. + +*@par Attributes: +*@li keep_prob:An integer identifying the keep prob in the op. Default to 1. +*@li forget_bias:An integer identifying the forget bias in the op. Default to 1. +*@li state_is_tuple:An bool identifying if the hidden state and cell state is tuple. Default to true. +*@li activation:An string identifying the type of activation function in the op. Default to "tanh". Only tanh is currently supported. + +*@par Outputs: +*seven outputs: \n +*@li mask:A 1D Tensor. Must be one of the following types: uint8. +*@li ct:A 4D Tensor. Must be one of the following types: float16, float32. +*@li ht:A 4D Tensor. Must be one of the following types: float16. +*@li it:A 4D Tensor. Must be one of the following types: float16, float32. +*@li jt:A 4D Tensor. Must be one of the following types: float16, float32. +*@li ft:A 4D Tensor. Must be one of the following types: float16, float32. +*@li ot:A 4D Tensor. Must be one of the following types: float16, float32. +*@li tanhct:A 4D Tensor. Must be one of the following types: float16, float32. +*/ +REG_OP(BasicLSTMCell) + .INPUT(x, TensorType({DT_FLOAT16})) + .INPUT(h, TensorType({DT_FLOAT16})) + .INPUT(c, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(w, TensorType({DT_FLOAT16})) + .INPUT(b, TensorType({DT_FLOAT16, DT_FLOAT})) + .OPTIONAL_INPUT(mask, TensorType({DT_UINT8})) + .OUTPUT(ct, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(ht, TensorType({DT_FLOAT16})) + .OUTPUT(it, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(jt, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(ft, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(ot, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(tanhct, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(keep_prob, Float, 1.0) + .ATTR(forget_bias, Float, 1.0) + .ATTR(state_is_tuple, Bool, true) + .ATTR(activation, String, "tanh") + .OP_END_FACTORY_REG(BasicLSTMCell) + +/** +*@brief: Basic LSTM Cell backward calculation.Calculate the gradient of input and hidden state. +*@par Inputs: +*three inputs: \n +*@li dgate:A 4D Tensor. Must be one of the following types: float16. The format must be FRACTAL_NZ. +*@li w:A 4D Tensor. Must be one of the following types: float16. The format must be FRACTAL_Z. +*@li dropout_mask:A 1D Tensor. Must be one of the following types: uint8. The format must be ND. + +*@par Attributes: +*keep_prob:An integer identifying the keep prob in the op. Default to 1. + +*@par Outputs: +*two outputs: \n +*@li dxt:A 4D Tensor. Must be one of the following types: float16, float32. +*@li dht:A 4D Tensor. Must be one of the following types: float16, float32. +*/ +REG_OP(BasicLSTMCellInputGrad) + .INPUT(dgate, TensorType({DT_FLOAT16})) + .INPUT(w, TensorType({DT_FLOAT16})) + .OPTIONAL_INPUT(dropout_mask, TensorType({DT_UINT8})) + .OUTPUT(dxt, TensorType({DT_FLOAT16})) + .OUTPUT(dht, TensorType({DT_FLOAT16, DT_FLOAT32})) + .ATTR(keep_prob, Float, 1.0) + .OP_END_FACTORY_REG(BasicLSTMCellInputGrad) + +/** +*@brief: Basic LSTM Cell backward calculation.Calculate the gradient of weight and bias. +*@par Inputs: +*three inputs: \n +*@li x:A 4D Tensor. Must be one of the following types: float16. The format must be FRACTAL_NZ. +*@li h:A 4D Tensor. Must be one of the following types: float16. The format must be FRACTAL_NZ. +*@li dgate:A 4D Tensor. Must be one of the following types: uint8. The format must be FRACTAL_NZ. + +*@par Outputs: +*two outputs: \n +*@li dw:A 4D Tensor. Must be one of the following types: float16. +*@li db:A 4D Tensor. Must be one of the following types: float16, float32. +*/ +REG_OP(BasicLSTMCellWeightGrad) + .INPUT(x, TensorType({DT_FLOAT16})) + .INPUT(h, TensorType({DT_FLOAT16})) + .INPUT(dgate, TensorType({DT_FLOAT16})) + .OUTPUT(dw, TensorType({DT_FLOAT16})) + .OUTPUT(db, TensorType({DT_FLOAT16, DT_FLOAT32})) + .OP_END_FACTORY_REG(BasicLSTMCellWeightGrad) + +/** +*@brief: Basic LSTM Cell backward calculation.Calculate the gradient of gates and cell state. +*@par Inputs: +*eight inputs: \n +*@li c:A 4D Tensor. Must be one of the following types: float16, float32. The format must be FRACTAL_NZ. +*@li dht:A 4D Tensor. Must be one of the following types: float16, float32. The format must be FRACTAL_NZ. +*@li dct:A 4D Tensor. Must be one of the following types: float16, float32. The format must be FRACTAL_NZ. +*@li it:A 4D Tensor. Must be one of the following types: float16, float32. The format must be FRACTAL_NZ. +*@li jt:A 4D Tensor. Must be one of the following types: float16, float32. The format must be FRACTAL_NZ. +*@li ft:A 4D Tensor. Must be one of the following types: float16, float32. The format must be FRACTAL_NZ. +*@li ot:A 4D Tensor. Must be one of the following types: float16, float32. The format must be FRACTAL_NZ. +*@li tanhct:A 4D Tensor. Must be one of the following types: float16, float32. The format must be FRACTAL_NZ. + +*@par Attributes: +*@li forget_bias:An integer identifying the forget bias in the op. Default to 1. +*@li activation:An string identifying the type of activation function in the op. Default to "tanh". Only tanh is currently supported. + +*@par Outputs: +*two outputs: \n +*@li dgate:A 4D Tensor. Must be one of the following types: float16. +*@li dct_1:A 4D Tensor. Must be one of the following types: float16, float32. +*/ +REG_OP(BasicLSTMCellCStateGrad) + .INPUT(c, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(dht, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(dct, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(it, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(jt, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(ft, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(ot, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(tanhct, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(dgate, TensorType({DT_FLOAT16})) + .OUTPUT(dct_1, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(forget_bias, Float, 1.0) + .ATTR(activation, String, "tanh") + .OP_END_FACTORY_REG(BasicLSTMCellCStateGrad) +} // namespace ge + +#endif // GE_OP_BASIC_LSTM_CELL_H diff --git a/third_party/fwkacllib/inc/ops/batch_ops.h b/third_party/fwkacllib/inc/ops/batch_ops.h new file mode 100755 index 00000000..d9151589 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/batch_ops.h @@ -0,0 +1,158 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_BATCH_OPS_H_ +#define GE_OP_BATCH_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +/** +*@brief Creates batches of tensors in tensors. + +*@par Inputs: +*The input x_tensors can be a list or a dictionary of tensors. Inputs include: \n +*x_tensors:The list or dictionary of tensors to enqueue. + +*@par Attributes: +*@li num_batch_threads:The number of threads enqueuing tensors. The batching will be nondeterministic if num_batch_threads > 1. +*@li max_batch_size:Max batch size pulled from the queue. +*@li max_enqueued_batches:Maximum number of batches pulled from the queue. +*@li batch_timeout_micros:Batch processing timeout in microseconds unit. +*@li allowed_batch_sizes:Allowed batch size pulled from the queue. +*@li grad_timeout_micros:Calculate the gradient batch processing timeout in microseconds unit. +*@li container:If non-empty, this queue is placed in the given container. Otherwise, a default container is used. +*@li shared_name:If set, this queue will be shared under the given name across multiple sessions. +*@li batching_queue:queue resource container. + +*@par Outputs: +*@li y_index:Tensor, index of a BatchTensor. Must be in row-major order. +*@li y_id:Tensor, id of a BatchTensor. Must be in row-major order. +*@li y_tensors:A list or dictionary of tensors with the same types as tensors. + +*@attention Constraints: \n +*-The implementation for Batch on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(Batch) + .DYNAMIC_INPUT(x_tensors, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, \ + DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y_index, TensorType({ DT_INT64 })) + .OUTPUT(y_id, TensorType({ DT_INT64 })) + .DYNAMIC_OUTPUT(y_tensors, TensorType({DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_BOOL})) + .REQUIRED_ATTR(num_batch_threads, Int) + .REQUIRED_ATTR(max_batch_size, Int) + .ATTR(max_enqueued_batches, Int, 10) + .REQUIRED_ATTR(batch_timeout_micros, Int) + .ATTR(allowed_batch_sizes, ListInt, {}) + .REQUIRED_ATTR(grad_timeout_micros, Int) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .ATTR(batching_queue, String, "") + .OP_END_FACTORY_REG(Batch) + +/** +*@brief Reverses the operation of Batch for a single output Tensor. + +*@par Inputs: +*The input x_tensors can be a list or a dictionary of tensors. Inputs include: \n +* @li x_tensors:The list or dictionary of tensors to enqueue. +* @li index:The matching batch_index obtained from Batch. +* @li id:The id scalar emitted by Batch. + +*@par Attributes: +*@li timeout_micros:Calculate the unbatch processing timeout in microseconds unit. +*@li container:If non-empty, this queue is placed in the given container. Otherwise, a default container is used. +*@li shared_name:If set, this queue will be shared under the given name across multiple sessions. + +*@par Outputs: +*y_tensor:A list or dictionary of tensors with the same types as tensors. + +*@attention Constraints: \n +*-The implementation for Unbatch on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(Unbatch) + .INPUT(x_tensor, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT, DT_DOUBLE})) + .INPUT(index, TensorType({DT_INT64})) + .INPUT(id, TensorType({DT_INT64})) + .OUTPUT(y_tensor, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT, DT_DOUBLE})) + .REQUIRED_ATTR(timeout_micros, Int) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(Unbatch) + +/** +*@brief Acts like Batch but using the given batch_index index of batching things as they become available. + +*@par Inputs: +*The input x_input can be a list or a dictionary of tensors. Inputs include: \n +* @li x_input:The input to the Unbatch operation. +* @li index:The batch_index given to the Unbatch operation. +* @li id:The id scalar emitted by Batch. +* @li grad:The downstream gradient. + +*@par Attributes: +*@li container:If non-empty, this queue is placed in the given container. Otherwise, a default container is used. +*@li shared_name:If set, this queue will be shared under the given name across multiple sessions. + +*@par Outputs: +*y_grad:The return value, either an empty tensor or the batched gradient. + +*@attention Constraints: \n +*-The implementation for UnbatchGrad on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(UnbatchGrad) + .INPUT(x_input, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT, DT_DOUBLE})) + .INPUT(index, TensorType({DT_INT64})) + .INPUT(grad, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT, DT_DOUBLE})) + .INPUT(id, TensorType({DT_INT64})) + .OUTPUT(y_grad, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT, DT_DOUBLE})) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(UnbatchGrad) +} // namespace ge + +#endif // GE_OP_BATCH_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/bitwise_ops.h b/third_party/fwkacllib/inc/ops/bitwise_ops.h new file mode 100755 index 00000000..d2ed88a7 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/bitwise_ops.h @@ -0,0 +1,57 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_BITWISE_OPS_H_ +#define GE_OP_BITWISE_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +/** +*@brief Elementwise computes the bitwise right-shift of x and y. + +*@par Inputs: +*The input x can be k-dimensional tensor, num_lower and num_upper can be zero-dimensional scalar. Inputs include: \n +* @li x:A Tensor. Must be one of the following types: int8, int16, int32, int64, uint8, uint16, uint32, uint64. \n +* @li y:A Tensor. Must have the same type as x. \n + +*@par Outputs: +*@li z:A Tensor. Has the same type as x. \n + +*@attention Constraints:\n +*-The implementation for Unique on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(RightShift) + .INPUT(x, TensorType({DT_INT8, DT_INT16, DT_INT32, DT_INT64, \ + DT_UINT8, DT_UINT16, DT_UINT32, DT_UINT64})) + .INPUT(y, TensorType({DT_INT8, DT_INT16, DT_INT32, DT_INT64, \ + DT_UINT8, DT_UINT16, DT_UINT32, DT_UINT64})) + .OUTPUT(z, TensorType({DT_INT8, DT_INT16, DT_INT32, DT_INT64, \ + DT_UINT8, DT_UINT16, DT_UINT32, DT_UINT64})) + .OP_END_FACTORY_REG(RightShift) + +} // namespace ge + +#endif // GE_OP_BITWISE_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/boosted_trees_ops.h b/third_party/fwkacllib/inc/ops/boosted_trees_ops.h new file mode 100755 index 00000000..3f02a4e5 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/boosted_trees_ops.h @@ -0,0 +1,58 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_BOOSTED_TREES_OPS_H_ +#define GE_OP_BOOSTED_TREES_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +/** +*@brief Bucketize each feature based on bucket boundaries. + +*@par Inputs: +*The input float_values can be 1-D tensor, bucket_boundaries can be 1-D. Inputs include: \n +* @li float_values: List of Rank 1 Tensor each containing float values for a single feature. \n +* @li bucket_boundaries:List of Rank 1 Tensors each containing the bucket boundaries for a single. \n + +*@par Attributes: +*@li num_features:number of features \n + +*@par Outputs: +*@li y:List of Rank 1 Tensors each containing the bucketized values for a single feature. \n + +*@attention Constraints: \n +*-The implementation for BoostedTreesBucketize on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(BoostedTreesBucketize) + .DYNAMIC_INPUT(float_values, TensorType({DT_FLOAT})) + .DYNAMIC_INPUT(bucket_boundaries, TensorType({DT_FLOAT})) + .DYNAMIC_OUTPUT(y, TensorType({DT_INT32})) + .REQUIRED_ATTR(num_features, Int) + .OP_END_FACTORY_REG(BoostedTreesBucketize) + +} // namespace ge + +#endif // GE_OP_BOOSTED_TREES_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/candidate_sampling_ops.h b/third_party/fwkacllib/inc/ops/candidate_sampling_ops.h new file mode 100755 index 00000000..2e3448fc --- /dev/null +++ b/third_party/fwkacllib/inc/ops/candidate_sampling_ops.h @@ -0,0 +1,340 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_CANDIDATE_SAMPLING_OPS_H_ +#define GE_OP_CANDIDATE_SAMPLING_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +/** +*@brief Generates labels for candidate sampling with a learned unigram distribution. + +*@par Inputs: +*The input true_classes must be two-dimensional matrices. Inputs include: \n +*true_classes:A batch_size * num_true matrix, in which each row contains the IDs of the num_true target_classes in the corresponding original label. + +*@par Attributes: +*@li num_true:Number of true labels per context. +*@li num_sampled:Number of candidates to randomly sample. +*@li unique:If unique is true, we sample with rejection, so that all sampled candidates in a batch are unique. This requires some approximation to estimate the post-rejection sampling probabilities. +*@li range_max:The sampler will sample integers from the interval [0, range_max). +*@li seed:If either seed or seed2 are set to be non-zero. +*@li seed2:An second seed to avoid seed collision. + +*@par Outputs: +*sampled_candidates:A vector of length num_sampled, in which each element is the ID of a sampled candidate. +*true_expected_count:A batch_size * num_true matrix, representing the number of times each candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. +*sampled_expected_count:A vector of length num_sampled, for each sampled candidate representing the number of times the candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. + +*@attention Constraints: \n +*-The implementation for ThreadUnsafeUnigramCandidateSampler on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(ThreadUnsafeUnigramCandidateSampler) + .INPUT(true_classes, TensorType({ DT_INT64 })) + .OUTPUT(sampled_candidates, TensorType({ DT_INT64 })) + .OUTPUT(true_expected_count, TensorType({ DT_FLOAT })) + .OUTPUT(sampled_expected_count, TensorType({ DT_FLOAT })) + .REQUIRED_ATTR(num_true, Int) + .REQUIRED_ATTR(num_sampled, Int) + .REQUIRED_ATTR(unique, Bool) + .REQUIRED_ATTR(range_max, Int) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(ThreadUnsafeUnigramCandidateSampler) + +/** +*@brief Generates labels for candidate sampling with a learned unigram distribution. + +*@par Inputs: +*The input true_classes must be two-dimensional matrices. Inputs include: \n +*true_classes:A batch_size * num_true matrix, in which each row contains the IDs of the num_true target_classes in the corresponding original label. + +*@par Attributes: +*@li num_true:Number of true labels per context. +*@li num_sampled:Number of candidates to randomly sample. +*@li unique:If unique is true, we sample with rejection, so that all sampled candidates in a batch are unique. This requires some approximation to estimate the post-rejection sampling probabilities. +*@li range_max:The sampler will sample integers from the interval [0, range_max). +*@li seed:If either seed or seed2 are set to be non-zero. +*@li seed2:An second seed to avoid seed collision. + +*@par Outputs: +*@li sampled_candidates:A vector of length num_sampled, in which each element is the ID of a sampled candidate. +*@li true_expected_count:A batch_size * num_true matrix, representing the number of times each candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. +*@li sampled_expected_count:A vector of length num_sampled, for each sampled candidate representing the number of times the candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. + +*@attention Constraints: \n +*-The implementation for UniformCandidateSampler on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(UniformCandidateSampler) + .INPUT(true_classes, TensorType({ DT_INT64 })) + .OUTPUT(sampled_candidates, TensorType({ DT_INT64 })) + .OUTPUT(true_expected_count, TensorType({ DT_FLOAT })) + .OUTPUT(sampled_expected_count, TensorType({ DT_FLOAT })) + .REQUIRED_ATTR(num_true, Int) + .REQUIRED_ATTR(num_sampled, Int) + .REQUIRED_ATTR(unique, Bool) + .REQUIRED_ATTR(range_max, Int) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(UniformCandidateSampler) + +/** +*@brief Generates labels for candidate sampling with a learned unigram distribution. + +*@par Inputs: +*The input true_classes can be two-dimensional matrices. Inputs include: \n +*true_classes:A batch_size * num_true matrix, in which each row contains the IDs of the num_true target_classes in the corresponding original label. + +*@par Attributes: +*@li num_true:Number of true labels per context. +*@li num_sampled:Number of candidates to randomly sample. +*@li unique:If unique is true, we sample with rejection, so that all sampled candidates in a batch are unique. This requires some approximation to estimate the post-rejection sampling probabilities. +*@li range_max:The sampler will sample integers from the interval [0, range_max). +*@li vocab_file:Each valid line in this file (which should have a CSV-like format) corresponds to a valid word ID. IDs are in sequential order, starting from num_reserved_ids. +*@li distortion:The distortion is used to skew the unigram probability distribution. Each weight is first raised to the distortion's power before adding to the internal unigram distribution. +*@li num_reserved_ids:Optionally some reserved IDs can be added in the range [0, ..., num_reserved_ids) by the users. One use case is that a special unknown word token is used as ID 0. +*@li num_shards:A sampler can be used to sample from a subset of the original range in order to speed up the whole computation through parallelism. +*@li shard:A sampler can be used to sample from a subset of the original range in order to speed up the whole computation through parallelism. +*@li unigrams:A list of unigram counts or probabilities, one per ID in sequential order. +*@li seed:If either seed or seed2 are set to be non-zero. +*@li seed2:An second seed to avoid seed collision. + +*@par Outputs: +*@li sampled_candidates:A vector of length num_sampled, in which each element is the ID of a sampled candidate. +*@li true_expected_count:A batch_size * num_true matrix, representing the number of times each candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. +*@li sampled_expected_count:A vector of length num_sampled, for each sampled candidate representing the number of times the candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. + +*@attention Constraints: \n +*-The implementation for FixedUnigramCandidateSampler on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(FixedUnigramCandidateSampler) + .INPUT(true_classes, TensorType({ DT_INT64 })) + .OUTPUT(sampled_candidates, TensorType({ DT_INT64 })) + .OUTPUT(true_expected_count, TensorType({ DT_FLOAT })) + .OUTPUT(sampled_expected_count, TensorType({ DT_FLOAT })) + .ATTR(num_true, Int, 0) + .ATTR(num_sampled, Int, 0) + .ATTR(unique, Bool, false) + .ATTR(range_max, Int, 0) + .ATTR(vocab_file, String, "") + .ATTR(distortion, Float, 1.0) + .ATTR(num_reserved_ids, Int, 0) + .ATTR(num_shards, Int, 1) + .ATTR(shard, Int, 0) + .REQUIRED_ATTR(unigrams, ListFloat) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(FixedUnigramCandidateSampler) + +/** +*@brief Generates labels for candidate sampling with a learned unigram distribution. + +*@par Inputs: +*The input true_classes can be two-dimensional matrices. Inputs include: \n +*true_classes:A batch_size * num_true matrix, in which each row contains the IDs of the num_true target_classes in the corresponding original label. + +*@par Attributes: +*@li num_true:Number of true labels per context. +*@li num_sampled:Number of candidates to randomly sample. +*@li unique:If unique is true, we sample with rejection, so that all sampled candidates in a batch are unique. This requires some approximation to estimate the post-rejection sampling probabilities. +*@li range_max:The sampler will sample integers from the interval [0, range_max). +*@li seed:If either seed or seed2 are set to be non-zero. +*@li seed2:An second seed to avoid seed collision. + +*@par Outputs: +*@li sampled_candidates:A vector of length num_sampled, in which each element is the ID of a sampled candidate. +*@li true_expected_count:A batch_size * num_true matrix, representing the number of times each candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. +*@li sampled_expected_count:A vector of length num_sampled, for each sampled candidate representing the number of times the candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. + +*@attention Constraints: \n +*-The implementation for LearnedUnigramCandidateSampler on Ascend uses AI CPU, with bad performance. \n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(LearnedUnigramCandidateSampler) + .INPUT(true_classes, TensorType({ DT_INT64 })) + .OUTPUT(sampled_candidates, TensorType({ DT_INT64 })) + .OUTPUT(true_expected_count, TensorType({ DT_FLOAT })) + .OUTPUT(sampled_expected_count, TensorType({ DT_FLOAT })) + .REQUIRED_ATTR(num_true, Int) + .REQUIRED_ATTR(num_sampled, Int) + .REQUIRED_ATTR(unique, Bool) + .REQUIRED_ATTR(range_max, Int) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(LearnedUnigramCandidateSampler) + +/** +*@brief Generates labels for candidate sampling with a log-uniform distribution. + +*@par Inputs: +*The input true_classes can be two-dimensional matrices. Inputs include: \n +*true_classes:A batch_size * num_true matrix, in which each row contains the IDs of the num_true target_classes in the corresponding original label. + +*@par Attributes: +*@li num_true:Number of true labels per context. +*@li num_sampled:Number of candidates to randomly sample. +*@li unique:If unique is true, we sample with rejection, so that all sampled candidates in a batch are unique. This requires some approximation to estimate the post-rejection sampling probabilities. +*@li range_max:The sampler will sample integers from the interval [0, range_max). +*@li seed:If either seed or seed2 are set to be non-zero. +*@li seed2:An second seed to avoid seed collision. + +*@par Outputs: +*@li sampled_candidates:A vector of length num_sampled, in which each element is the ID of a sampled candidate. +*@li true_expected_count:A batch_size * num_true matrix, representing the number of times each candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. +*@li sampled_expected_count:A vector of length num_sampled, for each sampled candidate representing the number of times the candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. + +*@attention Constraints:\n +*-The implementation for LogUniformCandidateSampler on Ascend uses AI CPU, with bad performance.\n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(LogUniformCandidateSampler) + .INPUT(true_classes, TensorType({ DT_INT64 })) + .OUTPUT(sampled_candidates, TensorType({ DT_INT64 })) + .OUTPUT(true_expected_count, TensorType({ DT_FLOAT })) + .OUTPUT(sampled_expected_count, TensorType({ DT_FLOAT })) + .REQUIRED_ATTR(num_true, Int) + .REQUIRED_ATTR(num_sampled, Int) + .REQUIRED_ATTR(unique, Bool) + .REQUIRED_ATTR(range_max, Int) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(LogUniformCandidateSampler) + +/** +*@brief Generates labels for candidate sampling with a learned unigram distribution. + +*@par Inputs: +*The input true_classes can be two-dimensional matrices. Inputs include: \n +*true_classes:A batch_size * num_true matrix, in which each row contains the IDs of the num_true target_classes in the corresponding original label. + +*@par Attributes: +*@li num_true:Number of true labels per context. +*@li num_sampled:Number of candidates to randomly sample. +*@li unique:If unique is true, we sample with rejection, so that all sampled candidates in a batch are unique. This requires some approximation to estimate the post-rejection sampling probabilities. +*@li seed:If either seed or seed2 are set to be non-zero. +*@li seed2:An second seed to avoid seed collision. + +*@par Outputs: +*@li sampled_candidates:A vector of length num_sampled, in which each element is the ID of a sampled candidate. +*@li true_expected_count:A batch_size * num_true matrix, representing the number of times each candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. +*@li sampled_expected_count:A vector of length num_sampled, for each sampled candidate representing the number of times the candidate is expected to occur in a batch of sampled candidates. If unique=true, then this is a probability. + +*@attention Constraints:\n +*-The implementation for AllCandidateSampler on Ascend uses AI CPU, with bad performance.\n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(AllCandidateSampler) + .INPUT(true_classes, TensorType({ DT_INT64 })) + .OUTPUT(sampled_candidates, TensorType({ DT_INT64 })) + .OUTPUT(true_expected_count, TensorType({ DT_FLOAT })) + .OUTPUT(sampled_expected_count, TensorType({ DT_FLOAT })) + .REQUIRED_ATTR(num_true, Int) + .REQUIRED_ATTR(num_sampled, Int) + .REQUIRED_ATTR(unique, Bool) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(AllCandidateSampler) + +/** +*@brief Computes the ids of the positions in sampled_candidates that match true_labels. + +*@par Inputs: +* @li The input true_classes can be two-dimensional matrices. Inputs include: \n +* @li true_classes:The true_classes output of UnpackSparseLabels. \n +* @li sampled_candidates:The sampled_candidates output of CandidateSampler. \n + +*@par Attributes: +*@li num_true:Number of true labels per context. +*@li seed:If either seed or seed2 are set to be non-zero. +*@li seed2:An second seed to avoid seed collision. + +*@par Outputs: +* @li indices:A vector of indices corresponding to rows of true_candidates. +* @li ids:A vector of IDs of positions in sampled_candidates that match a true_label for the row with the corresponding index in indices. +* @li weights:A vector of the same length as indices and ids, in which each element is -FLOAT_MAX. + +*@attention Constraints:\n +*-The implementation for ComputeAccidentalHits on Ascend uses AI CPU, with bad performance.\n + +*@par Quantization supported or not +*Not supported +*@par Quantized inference supported or not +*Supported +*@par L2 convergence supported or not +*@par Multiple batches supported or not +*/ + +REG_OP(ComputeAccidentalHits) + .INPUT(true_classes, TensorType({ DT_INT64 })) + .INPUT(sampled_candidates, TensorType({ DT_INT64 })) + .OUTPUT(indices, TensorType({ DT_INT32 })) + .OUTPUT(ids, TensorType({ DT_INT64 })) + .OUTPUT(weights, TensorType({ DT_FLOAT })) + .REQUIRED_ATTR(num_true, Int) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(ComputeAccidentalHits) + +} // namespace ge + +#endif // GE_OP_CANDIDATE_SAMPLING_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/clip_boxes.h b/third_party/fwkacllib/inc/ops/clip_boxes.h new file mode 100644 index 00000000..6ac07fd1 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/clip_boxes.h @@ -0,0 +1,34 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_CLIP_BOXES_H + #define GE_OP_CLIP_BOXES_H + + #include "graph/operator_reg.h" + + namespace ge { + + + REG_OP(ClipBoxes) + .INPUT(boxes_input, TensorType({DT_FLOAT16})) + .OUTPUT(boxes_output, TensorType({DT_FLOAT16})) + .REQUIRED_ATTR(img_w, Float) + .REQUIRED_ATTR(img_h, Float) + .OP_END_FACTORY_REG(ClipBoxes) + + } // namespace ge + + #endif // GE_OP_CLIP_BOXES_H diff --git a/third_party/fwkacllib/inc/ops/control_flow_ops.h b/third_party/fwkacllib/inc/ops/control_flow_ops.h new file mode 100755 index 00000000..06e047b1 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/control_flow_ops.h @@ -0,0 +1,148 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_CONTROL_FLOW_OPS_H_ +#define GE_CONTROL_FLOW_OPS_H_ + +#include "graph/operator_reg.h" +#include "graph/operator.h" + +namespace ge { + +REG_OP(Merge) + .DYNAMIC_INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(value_index, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(Merge) + +REG_OP(RefMerge) + .DYNAMIC_INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(value_index, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(RefMerge) + +REG_OP(Switch) + .INPUT(data, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .INPUT(pred, TensorType({DT_BOOL})) + .OUTPUT(output_false, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(output_true, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OP_END_FACTORY_REG(Switch) + +REG_OP(RefSwitch) + .INPUT(data, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .INPUT(pred, TensorType({DT_BOOL})) + .OUTPUT(output_false, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(output_true, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OP_END_FACTORY_REG(RefSwitch) + +REG_OP(SwitchN) + .INPUT(data, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .INPUT(pred_value, TensorType({DT_INT64})) + .DYNAMIC_OUTPUT(output, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OP_END_FACTORY_REG(SwitchN) + +REG_OP(Enter) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .ATTR(frame_name, String, "") + .ATTR(is_constant, Bool, false) + .OP_END_FACTORY_REG(Enter) + +REG_OP(RefEnter) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .ATTR(frame_name, String, "") + .ATTR(is_constant, Bool, false) + .OP_END_FACTORY_REG(RefEnter) + +REG_OP(LoopCond) + .INPUT(x, TensorType({DT_BOOL})) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(LoopCond) + +REG_OP(NextIteration) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OP_END_FACTORY_REG(NextIteration) + +REG_OP(RefNextIteration) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OP_END_FACTORY_REG(RefNextIteration) + +REG_OP(Exit) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OP_END_FACTORY_REG(Exit) + +REG_OP(RefExit) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_UINT32, + DT_UINT64, DT_BOOL})) + .OP_END_FACTORY_REG(RefExit) + +REG_OP(ControlTrigger) + .OP_END_FACTORY_REG(ControlTrigger) +} // namespace ge + +#endif // GE_CONTROL_FLOW_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/data_flow_ops.h b/third_party/fwkacllib/inc/ops/data_flow_ops.h new file mode 100755 index 00000000..e546fb3e --- /dev/null +++ b/third_party/fwkacllib/inc/ops/data_flow_ops.h @@ -0,0 +1,612 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_DATA_FLOW_OPS_H_ +#define GE_OP_DATA_FLOW_OPS_H_ + +#include +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(QueueIsClosed) + .INPUT(handle, TensorType({DT_RESOURCE})) + .OUTPUT(is_closed, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(QueueIsClosed) + +REG_OP(QueueSize) + .INPUT(handle, TensorType({DT_RESOURCE})) + .OUTPUT(size, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(QueueSize) + +REG_OP(FIFOQueue) + .OUTPUT(handle, TensorType({DT_RESOURCE})) + .REQUIRED_ATTR(component_types, ListType) + .ATTR(shapes, ListListInt, {}) + .ATTR(capacity, Int, -1) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(FIFOQueue) + +REG_OP(QueueEnqueue) + .INPUT(handle, TensorType({DT_RESOURCE})) + .DYNAMIC_INPUT(components, TensorType({DT_INT8, DT_UINT8, \ + DT_INT16, DT_UINT16, DT_INT32, DT_INT64, DT_UINT32, \ + DT_UINT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_BOOL})) + .ATTR(timeout_ms, Int, -1) + .OP_END_FACTORY_REG(QueueEnqueue) + +REG_OP(QueueEnqueueMany) + .INPUT(handle, TensorType({DT_RESOURCE})) + .DYNAMIC_INPUT(components, TensorType({DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, \ + DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_BOOL})) + .ATTR(timeout_ms, Int, -1) + .OP_END_FACTORY_REG(QueueEnqueueMany) + +REG_OP(QueueDequeue) + .INPUT(handle, TensorType({DT_RESOURCE})) + .DYNAMIC_OUTPUT(components, TensorType({DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, \ + DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_BOOL})) + .ATTR(timeout_ms, Int, -1) + .REQUIRED_ATTR(component_types, ListType) + .OP_END_FACTORY_REG(QueueDequeue) + +REG_OP(QueueDequeueMany) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(n, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(components, TensorType({DT_INT8, DT_UINT8, \ + DT_INT16, DT_UINT16, DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, \ + DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_BOOL})) + .ATTR(timeout_ms, Int, -1) + .REQUIRED_ATTR(component_types, ListType) + .OP_END_FACTORY_REG(QueueDequeueMany) + +REG_OP(QueueDequeueUpTo) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(n, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(components, TensorType({DT_INT8, DT_UINT8, \ + DT_INT16, DT_UINT16, DT_INT32, DT_INT64, DT_UINT32, \ + DT_UINT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_BOOL})) + .ATTR(timeout_ms, Int, -1) + .REQUIRED_ATTR(component_types, ListType) + .OP_END_FACTORY_REG(QueueDequeueUpTo) + +REG_OP(Stage) + .DYNAMIC_INPUT(values, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, \ + DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, \ + DT_DOUBLE, DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(Stage) + +REG_OP(StageClear) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .ATTR(dtypes, ListType, {}) + .OP_END_FACTORY_REG(StageClear) + +REG_OP(StagePeek) + .INPUT(index, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, \ + DT_DOUBLE, DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .ATTR(dtypes, ListType, {}) + .OP_END_FACTORY_REG(StagePeek) + +REG_OP(StageSize) + .OUTPUT(size, TensorType({DT_INT32})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .ATTR(dtypes, ListType, {}) + .OP_END_FACTORY_REG(StageSize) + +REG_OP(StackPop) + .INPUT(handle, TensorType({DT_RESOURCE})) + .OUTPUT(element, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, \ + DT_DOUBLE, DT_UINT32, DT_UNIT64})) + .REQUIRED_ATTR(elem_type, Type) + .OP_END_FACTORY_REG(StackPop) + +REG_OP(StackPush) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(element, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, \ + DT_DOUBLE, DT_UINT32, DT_UNIT64})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, \ + DT_DOUBLE, DT_UINT32, DT_UNIT64})) + .ATTR(swap_memory, Bool, false) + .OP_END_FACTORY_REG(StackPush) + +REG_OP(StackClose) + .INPUT(handle, TensorType({DT_RESOURCE})) + .OP_END_FACTORY_REG(StackClose) + +REG_OP(Stack) + .INPUT(max_size, TensorType({DT_INT32})) + .OUTPUT(handle, TensorType({DT_RESOURCE})) + .ATTR(stack_name, String, "") + .REQUIRED_ATTR(elem_type, Type) + .OP_END_FACTORY_REG(Stack) + +REG_OP(DynamicPartition) + .INPUT(x, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(partitions, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .ATTR(num_partitions, Int, 1) + .OP_END_FACTORY_REG(DynamicPartition) + +REG_OP(DynamicStitch) + .DYNAMIC_INPUT(indices, TensorType({DT_INT32})) + .DYNAMIC_INPUT(x, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, \ + DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .ATTR(N, Int, 1) + .OP_END_FACTORY_REG(DynamicStitch) + +REG_OP(ParallelDynamicStitch) + .DYNAMIC_INPUT(indices, TensorType({DT_INT32})) + .DYNAMIC_INPUT(x, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, \ + DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .ATTR(N, Int, 1) + .OP_END_FACTORY_REG(ParallelDynamicStitch) + +REG_OP(MapClear) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(MapClear) + +REG_OP(MapIncompleteSize) + .OUTPUT(size, TensorType({DT_INT32})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(MapIncompleteSize) + +REG_OP(Unstage) + .DYNAMIC_OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, \ + DT_DOUBLE, DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .REQUIRED_ATTR(dtypes, ListType) + .OP_END_FACTORY_REG(Unstage) + +REG_OP(MapStage) + .INPUT(key, TensorType({DT_INT64})) + .INPUT(indices, TensorType({DT_INT32})) + .DYNAMIC_INPUT(values, + TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE, \ + DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(MapStage) + +REG_OP(MapUnstage) + .INPUT(key, TensorType({DT_INT64})) + .INPUT(indices, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(values, + TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE, \ + DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(MapUnstage) + +REG_OP(MapUnstageNoKey) + .INPUT(indices, TensorType({DT_INT32})) + .OUTPUT(key, TensorType({DT_INT64})) + .DYNAMIC_OUTPUT(values, + TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE, \ + DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(MapUnstageNoKey) + +REG_OP(MapPeek) + .INPUT(key, TensorType({DT_INT64})) + .INPUT(indices, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(values, + TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE, \ + DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(MapPeek) + +REG_OP(MapSize) + .OUTPUT(size, TensorType({DT_INT32})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(MapSize) + +REG_OP(TensorArray) + .INPUT(size, TensorType({DT_INT32})) + .OUTPUT(handle, TensorType({DT_RESOURCE})) + .OUTPUT(flow, TensorType({DT_FLOAT})) + .REQUIRED_ATTR(dtype, Type) + .ATTR(element_shape, ListInt, ge::UNKNOWN_SHAPE) + .ATTR(dynamic_size, Bool, false) + .ATTR(clear_after_read, Bool, true) + .ATTR(identical_element_shapes, Bool, false) + .ATTR(tensor_array_name, String, "") + .OP_END_FACTORY_REG(TensorArray) + +REG_OP(TensorArrayClose) + .INPUT(handle, TensorType({DT_RESOURCE})) + .OP_END_FACTORY_REG(TensorArrayClose) + +REG_OP(TensorArrayConcat) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(flow_in, TensorType({DT_FLOAT})) + .OUTPUT(value, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_INT8, \ + DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL})) + .OUTPUT(lengths, TensorType({DT_INT64})) + .REQUIRED_ATTR(dtype, Type) + .ATTR(element_shape_except0, ListInt, ge::UNKNOWN_SHAPE) + .OP_END_FACTORY_REG(TensorArrayConcat) + +REG_OP(TensorArrayGather) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(flow_in, TensorType({DT_FLOAT})) + .OUTPUT(value, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_INT8, \ + DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL})) + .REQUIRED_ATTR(dtype, Type) + .ATTR(element_shape, ListInt, ge::UNKNOWN_SHAPE) + .OP_END_FACTORY_REG(TensorArrayGather) + +REG_OP(TensorArrayGrad) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(flow_in, TensorType({DT_FLOAT})) + .OUTPUT(grad_handle, TensorType({DT_RESOURCE})) + .OUTPUT(flow_out, TensorType({DT_FLOAT})) + .REQUIRED_ATTR(source, String) + .OP_END_FACTORY_REG(TensorArrayGrad) + +REG_OP(TensorArrayWrite) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(index, TensorType({DT_INT32})) + .INPUT(value, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_INT8, \ + DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL})) + .INPUT(flow_in, TensorType({DT_FLOAT})) + .OUTPUT(flow_out, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(TensorArrayWrite) + +REG_OP(TensorArrayGradWithShape) + .INPUT(handle, TensorType({ DT_RESOURCE })) + .INPUT(flow_in, TensorType({ DT_FLOAT })) + .INPUT(shape_to_prepend, TensorType({ DT_INT32 })) + .OUTPUT(grad_handle, TensorType({ DT_RESOURCE })) + .OUTPUT(flow_out, TensorType({ DT_FLOAT })) + .ATTR(source, String, "") + .OP_END_FACTORY_REG(TensorArrayGradWithShape) + +REG_OP(TensorArrayRead) + .INPUT(handle, TensorType({ DT_RESOURCE })) + .INPUT(index, TensorType({ DT_INT32 })) + .INPUT(flow_in, TensorType({ DT_FLOAT })) + .OUTPUT(y, TensorType({ DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE })) + .REQUIRED_ATTR(dtype, Type) + .OP_END_FACTORY_REG(TensorArrayRead) + +REG_OP(TensorArrayScatter) + .INPUT(handle, TensorType({ DT_RESOURCE })) + .INPUT(indices, TensorType({ DT_INT32 })) + .INPUT(value, TensorType({ DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE })) + .INPUT(flow_in, TensorType({ DT_FLOAT })) + .OUTPUT(flow_out, TensorType({ DT_FLOAT })) + .OP_END_FACTORY_REG(TensorArrayScatter) + +REG_OP(TensorArraySplit) + .INPUT(handle, TensorType({ DT_RESOURCE })) + .INPUT(value, TensorType({ DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE })) + .INPUT(lengths, TensorType({ DT_INT64 })) + .INPUT(flow_in, TensorType({ DT_FLOAT })) + .OUTPUT(flow_out, TensorType({ DT_FLOAT })) + .OP_END_FACTORY_REG(TensorArraySplit) + +REG_OP(TensorArraySize) + .INPUT(handle, TensorType({ DT_RESOURCE })) + .INPUT(flow_in, TensorType({ DT_FLOAT })) + .OUTPUT(size, TensorType({ DT_INT32 })) + .OP_END_FACTORY_REG(TensorArraySize) + +REG_OP(RandomShuffleQueue) + .OUTPUT(handle, TensorType({DT_RESOURCE})) + .REQUIRED_ATTR(component_types, ListType) + .ATTR(shapes, ListListInt, {}) + .ATTR(capacity, Int, -1) + .ATTR(min_after_dequeue, Int, 0) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(RandomShuffleQueue) + +REG_OP(PaddingFIFOQueue) + .OUTPUT(handle, TensorType({DT_RESOURCE})) + .REQUIRED_ATTR(component_types, ListType) + .ATTR(shapes, ListListInt, {}) + .ATTR(capacity, Int, -1) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(PaddingFIFOQueue) + +REG_OP(PriorityQueue) + .OUTPUT(handle, TensorType({DT_RESOURCE})) + .ATTR(component_types, ListType, {}) + .ATTR(shapes, ListListInt, {}) + .ATTR(capacity, Int, -1) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(PriorityQueue) + +REG_OP(QueueClose) + .INPUT(handle, TensorType({DT_RESOURCE})) + .ATTR(cancel_pending_enqueues, Bool, false) + .OP_END_FACTORY_REG(QueueClose) + +REG_OP(OrderedMapStage) + .INPUT(key, TensorType({DT_INT64})) + .INPUT(indices, TensorType({DT_INT32})) + .DYNAMIC_INPUT(values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, + DT_INT32, DT_INT64, DT_FLOAT, DT_FLOAT16, + DT_DOUBLE, DT_BOOL, DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(OrderedMapStage) + +REG_OP(OrderedMapSize) + .OUTPUT(size, TensorType({DT_INT32})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(OrderedMapSize) + +REG_OP(OrderedMapClear) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(OrderedMapClear) + +REG_OP(OrderedMapIncompleteSize) + .OUTPUT(size, TensorType({DT_INT32})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(OrderedMapIncompleteSize) + +REG_OP(OrderedMapPeek) + .INPUT(key, TensorType({DT_INT64})) + .INPUT(indices, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, + DT_INT32, DT_INT64, DT_FLOAT, DT_FLOAT16, + DT_DOUBLE, DT_BOOL, DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(OrderedMapPeek) + +REG_OP(OrderedMapUnstageNoKey) + .INPUT(indices, TensorType({DT_INT32})) + .OUTPUT(key, TensorType({DT_INT64})) + .DYNAMIC_OUTPUT(values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, + DT_INT32, DT_INT64, DT_FLOAT, DT_FLOAT16, + DT_DOUBLE, DT_BOOL, DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(OrderedMapUnstageNoKey) + +REG_OP(OrderedMapUnstage) + .INPUT(key, TensorType({DT_INT64})) + .INPUT(indices, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, + DT_INT32, DT_INT64, DT_FLOAT, DT_FLOAT16, + DT_DOUBLE, DT_BOOL, DT_UINT32, DT_UINT64})) + .ATTR(capacity, Int, 0) + .ATTR(memory_limit, Int, 0) + .ATTR(dtypes, ListType, {}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(OrderedMapUnstage) + +REG_OP(Barrier) + .OUTPUT(handle, TensorType({DT_STRING_REF})) + .REQUIRED_ATTR(component_types, ListType) + .ATTR(shapes, ListListInt, {}) + .ATTR(capacity, Int, -1) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(Barrier) + +REG_OP(BarrierInsertMany) + .INPUT(handle, TensorType({DT_STRING_REF})) + .INPUT(keys, TensorType({DT_STRING})) + .INPUT(values, + TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE, \ + DT_UINT32, DT_UINT64})) + .REQUIRED_ATTR(component_index, Int) + .OP_END_FACTORY_REG(BarrierInsertMany) + +REG_OP(BarrierTakeMany) + .INPUT(handle, TensorType({DT_STRING_REF})) + .INPUT(num_elements, TensorType(DT_INT32)) + .OUTPUT(indices, TensorType({DT_INT64})) + .OUTPUT(keys, TensorType({DT_STRING})) + .DYNAMIC_OUTPUT(values, + TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE, \ + DT_UINT32, DT_UINT64})) + .REQUIRED_ATTR(component_types, ListType) + .ATTR(allow_small_batch, Bool, false) + .ATTR(wait_for_incomplete, Bool, false) + .ATTR(timeout_ms, Int, -1) + .OP_END_FACTORY_REG(BarrierTakeMany) + +REG_OP(BarrierClose) + .INPUT(handle, TensorType({DT_STRING_REF})) + .ATTR(cancel_pending_enqueues, Bool, false) + .OP_END_FACTORY_REG(BarrierClose) + +REG_OP(BarrierReadySize) + .INPUT(handle, TensorType({DT_STRING_REF})) + .OUTPUT(size, TensorType(DT_INT32)) + .OP_END_FACTORY_REG(BarrierReadySize) + +REG_OP(BarrierIncompleteSize) + .INPUT(handle, TensorType({DT_STRING_REF})) + .OUTPUT(size, TensorType(DT_INT32)) + .OP_END_FACTORY_REG(BarrierIncompleteSize) + +REG_OP(RecordInput) + .OUTPUT(records, TensorType({DT_STRING})) + .REQUIRED_ATTR(file_pattern, String) + .ATTR(file_random_seed, Int, 301) + .ATTR(file_shuffle_shift_ratio, Float, 0) + .ATTR(file_buffer_size, Int, 10000) + .ATTR(file_parallelism, Int, 16) + .ATTR(batch_size, Int, 32) + .ATTR(compression_type, String, "") + .OP_END_FACTORY_REG(RecordInput) + +REG_OP(ConditionalAccumulator) + .OUTPUT(handle, TensorType({DT_STRING_REF})) + .REQUIRED_ATTR(dtype, Type) + .REQUIRED_ATTR(shape, ListInt) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .ATTR(reduction_type, String, "MEAN") + .OP_END_FACTORY_REG(ConditionalAccumulator) + +REG_OP(AccumulatorApplyGradient) + .INPUT(handle, TensorType({DT_STRING_REF})) + .INPUT(local_step, TensorType({DT_INT64})) + .INPUT(gradient, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT})) + .REQUIRED_ATTR(dtype, Type) + .OP_END_FACTORY_REG(AccumulatorApplyGradient) + +REG_OP(AccumulatorNumAccumulated) + .INPUT(handle, TensorType({DT_STRING_REF})) + .OUTPUT(y, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(AccumulatorNumAccumulated) + +REG_OP(AccumulatorSetGlobalStep) + .INPUT(handle, TensorType({DT_STRING_REF})) + .INPUT(new_global_step, TensorType({DT_INT64})) + .OP_END_FACTORY_REG(AccumulatorSetGlobalStep) + +REG_OP(AccumulatorTakeGradient) + .INPUT(handle, TensorType({DT_STRING_REF})) + .INPUT(num_required, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, \ + DT_INT64, DT_DOUBLE, DT_FLOAT})) + .REQUIRED_ATTR(dtype, Type) + .OP_END_FACTORY_REG(AccumulatorTakeGradient) + +REG_OP(SparseConditionalAccumulator) + .OUTPUT(handle, TensorType({DT_STRING_REF})) + .REQUIRED_ATTR(shape, ListInt) + .REQUIRED_ATTR(dtype, Type) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .ATTR(reduction_type, String, "MEAN") + .OP_END_FACTORY_REG(SparseConditionalAccumulator) + +REG_OP(SparseAccumulatorApplyGradient) + .INPUT(handle, TensorType({DT_STRING_REF})) + .INPUT(local_step, TensorType({DT_INT64})) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT})) + .INPUT(shape, TensorType({DT_INT64})) + .REQUIRED_ATTR(has_known_shape, Bool) + .REQUIRED_ATTR(dtype, Type) + .OP_END_FACTORY_REG(SparseAccumulatorApplyGradient) + +REG_OP(SparseAccumulatorTakeGradient) + .INPUT(handle, TensorType({DT_STRING_REF})) + .INPUT(num_required, TensorType({DT_INT32})) + .OUTPUT(indices, TensorType({DT_INT64})) + .OUTPUT(values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT})) + .OUTPUT(shape, TensorType({DT_INT64})) + .REQUIRED_ATTR(dtype, Type) + .OP_END_FACTORY_REG(SparseAccumulatorTakeGradient) +} // namespace ge + +#endif // GE_OP_DATA_FLOW_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/decode_bbox.h b/third_party/fwkacllib/inc/ops/decode_bbox.h new file mode 100644 index 00000000..9fe95488 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/decode_bbox.h @@ -0,0 +1,33 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_DECODE_BBOX_H + #define GE_OP_DECODE_BBOX_H + + #include "graph/operator_reg.h" + + namespace ge { + + REG_OP(DecodeBbox) + .INPUT(box_predictions, TensorType{DT_FLOAT16}) + .INPUT(anchors, TensorType{DT_FLOAT16}) + .OUTPUT(decoded_boxes, TensorType{DT_FLOAT16}) + .REQUIRED_ATTR(decode_clip, Float) + .OP_END_FACTORY_REG(DecodeBbox) + + } // namespace ge + + #endif // GE_OP_DECODE_BBOX_H diff --git a/third_party/fwkacllib/inc/ops/decode_boundaries_target.h b/third_party/fwkacllib/inc/ops/decode_boundaries_target.h new file mode 100644 index 00000000..42ad7b54 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/decode_boundaries_target.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_DECODE_BOUNDARIES_TARGET_H + #define GE_OP_DECODE_BOUNDARIES_TARGET_H + + #include "graph/operator_reg.h" + + namespace ge { + + REG_OP(DecodeBoundariesTarget) + .INPUT(boundary_predictions, TensorType({DT_FLOAT16})) /* "First operand." */ + .INPUT(anchors, TensorType({DT_FLOAT16})) /* "Second operand." */ + .OUTPUT(boundary_encoded, TensorType({DT_FLOAT16})) /* "Result, has same element type as two inputs" */ + .OP_END_FACTORY_REG(DecodeBoundariesTarget) + } // namespace ge + + #endif // GE_OP_DECODE_BOUNDARIES_TARGET_H diff --git a/third_party/fwkacllib/inc/ops/decode_cornerpoints_target_bg.h b/third_party/fwkacllib/inc/ops/decode_cornerpoints_target_bg.h new file mode 100755 index 00000000..ce10175f --- /dev/null +++ b/third_party/fwkacllib/inc/ops/decode_cornerpoints_target_bg.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_DECODE_CORNERPOINTS_TARGET_BG_H + #define GE_OP_DECODE_CORNERPOINTS_TARGET_BG_H + + #include "graph/operator_reg.h" + + namespace ge { + + REG_OP(DecodeCornerpointsTargetBG) + .INPUT(keypoints_prediction, TensorType({DT_FLOAT16})) /* "First operand." */ + .INPUT(anchors, TensorType({DT_FLOAT16})) /* "Second operand." */ + .OUTPUT(keypoints_decoded, TensorType({DT_FLOAT16})) /* "Result, has same element type as two inputs" */ + .OP_END_FACTORY_REG(DecodeCornerpointsTargetBG); + } // namespace ge + + #endif // GE_OP_DECODE_CORNERPOINTS_TARGET_BG_H diff --git a/third_party/fwkacllib/inc/ops/decode_cornerpoints_target_wrt_center_v1.h b/third_party/fwkacllib/inc/ops/decode_cornerpoints_target_wrt_center_v1.h new file mode 100755 index 00000000..0e96bc16 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/decode_cornerpoints_target_wrt_center_v1.h @@ -0,0 +1,32 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_DECODE_CORNERPOINTS_TARGET_WRT_CENTER_V1_H + #define GE_OP_DECODE_CORNERPOINTS_TARGET_WRT_CENTER_V1_H + + #include "graph/operator_reg.h" + + namespace ge { + + REG_OP(DecodeCornerpointsTargetWrtCenterV1) + .INPUT(keypoints_prediction, TensorType({DT_FLOAT16})) /* "First operand." */ + .INPUT(anchors, TensorType({DT_FLOAT16})) /* "Second operand." */ + .OUTPUT(keypoints_decoded, TensorType({DT_FLOAT16})) /* "Result, has same element type as two inputs" */ + .OP_END_FACTORY_REG(DecodeCornerpointsTargetWrtCenterV1) + } // namespace ge + + #endif // GE_OP_DECODE_CORNERPOINTS_TARGET_WRT_CENTER_V1_H + diff --git a/third_party/fwkacllib/inc/ops/decode_wheels_target.h b/third_party/fwkacllib/inc/ops/decode_wheels_target.h new file mode 100644 index 00000000..053a6c1a --- /dev/null +++ b/third_party/fwkacllib/inc/ops/decode_wheels_target.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_DECODE_WHEELS_TARGET_H + #define GE_OP_DECODE_WHEELS_TARGET_H + + #include "graph/operator_reg.h" + + namespace ge { + + REG_OP(DecodeWheelsTarget) + .INPUT(boundary_predictions, TensorType({DT_FLOAT16})) /* "First operand." */ + .INPUT(anchors, TensorType({DT_FLOAT16})) /* "Second operand." */ + .OUTPUT(boundary_encoded, TensorType({DT_FLOAT16})) /* "Result, has same element type as two inputs" */ + .OP_END_FACTORY_REG(DecodeWheelsTarget) + } // namespace ge + + #endif // GE_OP_DECODE_WHEELS_TARGET_H diff --git a/third_party/fwkacllib/inc/ops/dvpp_ops.h b/third_party/fwkacllib/inc/ops/dvpp_ops.h new file mode 100755 index 00000000..98294c14 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/dvpp_ops.h @@ -0,0 +1,62 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_DVPP_OPS_H_ +#define GE_OP_DVPP_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(DvppCreateChannel) + .OUTPUT(dvpp_channel, TensorType({DT_INT64})) + .OP_END_FACTORY_REG(DvppCreateChannel) + +REG_OP(DvppDestroyChannel) + .INPUT(dvpp_channel, TensorType({DT_INT64})) + .OP_END_FACTORY_REG(DvppDestroyChannel) + +REG_OP(DvppResize) + .INPUT(dvpp_channel, TensorType({DT_INT64})) + .INPUT(input_desc, TensorType({DT_UINT8})) + .INPUT(output_desc, TensorType({DT_UINT8})) + .INPUT(resize_config, TensorType({DT_UINT8})) + .OP_END_FACTORY_REG(DvppResize) + +REG_OP(DvppCrop) + .INPUT(dvpp_channel, TensorType({DT_INT64})) + .INPUT(input_desc, TensorType({DT_UINT8})) + .INPUT(output_desc, TensorType({DT_UINT8})) + .INPUT(crop_area, TensorType({DT_UINT8})) + .OP_END_FACTORY_REG(DvppCrop) + +REG_OP(DvppCropAndPaste) + .INPUT(dvpp_channel, TensorType({DT_INT64})) + .INPUT(input_desc, TensorType({DT_UINT8})) + .INPUT(output_desc, TensorType({DT_UINT8})) + .INPUT(crop_area, TensorType({DT_UINT8})) + .INPUT(paste_area, TensorType({DT_UINT8})) + .OP_END_FACTORY_REG(DvppCropAndPaste) + +REG_OP(DvppDecodeJpeg) + .INPUT(dvpp_channel, TensorType({DT_INT64})) + .INPUT(input_desc, TensorType({DT_UINT8})) + .INPUT(output_desc, TensorType({DT_UINT8})) + .INPUT(decode_area, TensorType({DT_UINT8})) + .OP_END_FACTORY_REG(DvppDecodeJpeg) +} // namespace ge + +#endif // GE_OP_DVPP_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/elewise_calculation_ops.h b/third_party/fwkacllib/inc/ops/elewise_calculation_ops.h new file mode 100644 index 00000000..6b433acf --- /dev/null +++ b/third_party/fwkacllib/inc/ops/elewise_calculation_ops.h @@ -0,0 +1,2245 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_ELEWISE_CALCULATION_OPS_H +#define GE_OP_ELEWISE_CALCULATION_OPS_H +#include "../graph/operator_reg.h" + +namespace ge { +/** +*@brief Adds all input tensors element-wise. + +*@par Inputs: +*Dynamic inputs, including: +* @li x: A list of Tensor objects, each with same shape and type. The supported types are: +* float16, float32, double, int32, uint8, int16, int8, complex64, int64, +* qint8, quint8, qint32, uint16, complex128, uint32, uint64. + +*@par Outputs: +*y: A Tensor. Has the same shape and type as the elements of "x". +*/ +REG_OP(AddN) + .DYNAMIC_INPUT(x, TensorType::NumberType()) + .OUTPUT(y, TensorType::NumberType()) + .REQUIRED_ATTR(N, Int) + .OP_END_FACTORY_REG(AddN) + +REG_OP(MaximumGrad) + .INPUT(grads, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(x2, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OUTPUT(y1, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OUTPUT(y2, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .ATTR(grad_x, Bool, true) + .ATTR(grad_y, Bool, true) + .OP_END_FACTORY_REG(MaximumGrad) + +/** +*@brief Calculates the reversed outputs of the function "minimum" + +*@par Inputs: +*Three inputs, including: +* @li grads: A mutable Tensor. Must be one of the following types: +* float16, float32, int32. +* @li x1: A mutable Tensor of the same type as "grads". +* @li x2: A mutable Tensor of the same type as "grads". + +*@par Attributes: +*@li grad_x: An optional bool. Defaults to "True". +* If "True", "y1" will be output. +* If "False", "y1" will not be output. + +*@li grad_y: An optional bool. Defaults to "True". +* If "True", "y2" will be output. +* If "False", "y2" will not be output. + +*@par Outputs: +* @li y1: A mutable Tensor. Has the same type as "grads". +* @li y2: A mutable Tensor. Has the same type as "grads". +*/ +REG_OP(MinimumGrad) + .INPUT(grads, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(x2, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OUTPUT(y1, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OUTPUT(y2, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .ATTR(grad_x, Bool, true) + .ATTR(grad_y, Bool, true) + .OP_END_FACTORY_REG(MinimumGrad) + +/** +*@brief: Cast a tensor form src data type to dst data type. + +*@par Inputs: +*One input:\n +*x:A `Tensor`. Must be one of the following types: bool, float16, float, int8, int32, uint32, uint8,\n + int64, uint64, int16, uint16, double, complex64, complex128, qint8, quint8, qint16, quint16, qint32. + +*@par Attributes: +*@li dst_type: An required attribute of type int32, specifying the dst data type. +*@li truncate: An optional attribute of type bool, specifying the src data type. Defaults to "false". + +*@par Outputs: +*y:A `Tensor`. Has the same type as `x`. + +*@par Quantization supported or not +*Not supported + +*@par Quantized inference supported or not +*Not supported + +*@par Multiple batches supported or not +*Supported + +*@since V100R001C33 +*/ +REG_OP(Cast) + .INPUT(x, TensorType({DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_INT8, DT_INT32, DT_UINT32, DT_UINT8, + DT_INT64, DT_UINT64, DT_INT16, DT_UINT16, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128, DT_QINT8, DT_QUINT8, DT_QINT16, DT_QUINT16, DT_QINT32})) + .OUTPUT(y, TensorType({DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_INT8, DT_INT32, DT_UINT32, DT_UINT8, + DT_INT64, DT_UINT64, DT_INT16, DT_UINT16, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128, DT_QINT8, DT_QUINT8, DT_QINT16, DT_QUINT16, DT_QINT32})) + .REQUIRED_ATTR(dst_type, Int) + .ATTR(truncate, Bool, false) + .OP_END_FACTORY_REG(Cast) + +/** +*@brief Returns the truth value of (x1 >= x2) element-wise. + +*@par Inputs: +*Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: float16, float32, int32, int8, uint8. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(GreaterEqual) + .INPUT(x1, TensorType::RealNumberType()) + .INPUT(x2, TensorType::RealNumberType()) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(GreaterEqual) + +REG_OP(Less) + .INPUT(x1, TensorType::RealNumberType()) + .INPUT(x2, TensorType::RealNumberType()) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(Less) + +/** +*@brief Returns x1/x2 element-wise for real types. + +*@par Inputs: +* Two inputs, including: +*@li x1: A Tensor. Must be one of the following types: float16, float32, double, uint16, + int8, uint8, int16, int32, int64, complex64, DT_COMPLEX128. +*@li x2: A Tensor. Must be one of the following types: float16, float32, double, uint16, + int8, uint8, int16, int32, int64, complex64, DT_COMPLEX128. + +*@par Outputs: +* y: A Tensor. Has the same type and format as input "x1". +*/ +REG_OP(RealDiv) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_UINT8, DT_INT8, + DT_UINT16, DT_INT16, DT_INT32, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_UINT8, DT_INT8, + DT_UINT16, DT_INT16, DT_INT32, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_UINT8, DT_INT8, + DT_UINT16, DT_INT16, DT_INT32, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(RealDiv) + +/** +*@brief Computes square root of x element-wise. + +*@par Inputs: +* x: A Tensor. Must be one of the following types: float16, float32, complex128, complex64, float64. + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Sqrt) + .INPUT(x, TensorType{(DT_FLOAT. DT_FLOAT16, DT_DOUBLE, DT_COMPLEX64, DT_COMPLEX128)}) + .OUTPUT(y, TensorType{(DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_COMPLEX64, DT_COMPLEX128)}) + .OP_END_FACTORY_REG(Sqrt) + +/** +*@brief Returns the max of "x" and "y" (i.e. x > y ? x: y) element-wise. + +*@par Inputs: +*Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: float16, float32, double, int32, int64. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(Maximum) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, + DT_INT64})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, + DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, + DT_INT64})) + .OP_END_FACTORY_REG(Maximum) + +/** +*@brief Returns the min of x and y (i.e. x1 < x2 ? x1 : x2) element-wise. + +*@par Inputs: +*Two inputs, include: +* @li x1: A Tensor. Must be one of the following types: float32, float16, double, int32, int64. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor of the same type as "x1". +*/ +REG_OP(Minimum) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_INT32, + DT_INT64})) + .INPUT(x2, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_INT32, + DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_INT32, + DT_INT64})) + .OP_END_FACTORY_REG(Minimum) + +/** +*@brief: Computes the reciprocal of "x". + +*@par Inputs: +*One inputs, include: +*x:A Tensor of type float16, float32, int32, int64, double, +* complex64, complex128.the format can be [NCHW,NC1HWC0,NHWC,ND,NCHW,NC1HWC0,NHWC,ND] + +*@par Outputs: +*y:A Tensor with same type as "x". +*/ +REG_OP(Reciprocal) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64, DT_FLOAT16, + DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64, DT_FLOAT16 + DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Reciprocal) + +REG_OP(Sub) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_UINT8, DT_INT8, + DT_UINT16, DT_INT16, DT_INT32, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_UINT8, DT_INT8, + DT_UINT16, DT_INT16, DT_INT32, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_UINT8, DT_INT8, + DT_UINT16, DT_INT16, DT_INT32, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Sub) + +REG_OP(Abs) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .OP_END_FACTORY_REG(Abs) + +/** +*@brief Computes gradients for absolute operation. + +* +*@par Inputs: +*@li y: A tensor of type float16 or float32. +*@li dy: A tensor of the same type as "y". +* +*@attention Constraints: +* "dy" has the same type as "y". +* +*@par Outputs: +* z: A tensor. Has the same type as "y". +* +*/ +REG_OP(AbsGrad) + .INPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(dy, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(z, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(AbsGrad) + +/** +*@brief: Computes the sign of "x". + +*@par Inputs: +*One inputs, include: +*x:A Tensor of type float16, float32, int32, int64, double, +* complex64, complex128. + +*@par Outputs: +*y:A Tensor with same type as "x". +*/ +REG_OP(Sign) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT, DT_DOUBLE, DT_INT32, + DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, + DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Sign) + +/** +*@brief Returns (x1 - x2)(x1 - x2) element-wise. + +*@par Inputs: +*Two inputs, including: \n +*@li x1: A Tensor. Must be one of the following types: float16, float32, float64, int32, int64, complex64,complex128 +*@li x2: A Tensor. Has the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(SquaredDifference) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, + DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, + DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, + DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(SquaredDifference) + +/** +*@brief Computes cosine of "x" element-wise. + +*@par Inputs:\n +*x: A Tensor of type float16 or float32. + +*@par Outputs:\n +*y: A Tensor of type float16 or float32. +*/ +REG_OP(Cos) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Cos) + +/** +*@brief Returns x1/x2 element-wise. + +*@par Inputs: +* Two inputs, including: +*@li x1: A Tensor. Must be one of the following types: float16, float32, int32, int8, uint8 +*@li x2: A Tensor. Must be one of the following types: float16, float32, int32, int8, uint8 + +*@par Outputs: +* y: A Tensor. Has the same type and format as input "x1". +*/ +REG_OP(Div) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_UINT8, DT_INT32, + DT_DOUBLE, DT_INT64, DT_UINT16, DT_INT16, + DT_COMPLEX64, DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_UINT8, DT_INT32, + DT_DOUBLE, DT_INT64, DT_UINT16, DT_INT16, + DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_UINT8, DT_INT32, + DT_DOUBLE, DT_INT64, DT_UINT16, DT_INT16, + DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Div) + +/** +*@brief: Returns the truth value of (x = y) element-wise. + +*@par Inputs: +* Two inputs, including: +*@li x1: A Tensor. Must be one of the following types: float16, float32, int32, int8, uint8, \n +* double, int16, int64, complex64, complex128, quint8, qint8, qint32, string, bool. +*@li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor of type bool. +*/ +REG_OP(Equal) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT8, DT_UINT8, + DT_DOUBLE, DT_INT16, DT_INT64, DT_COMPLEX64, + DT_COMPLEX128, DT_QUINT8, DT_QINT8, DT_QINT32, + DT_STRING, DT_BOOL})) + .INPUT(x2, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT8, DT_UINT8, + DT_DOUBLE, DT_INT16, DT_INT64, DT_COMPLEX64, + DT_COMPLEX128, DT_QUINT8, DT_QINT8, DT_QINT32, + DT_STRING, DT_BOOL})) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(Equal) + +/** +*@brief Computes the exponential of "x" element-wise. + +*@par Inputs: +*One input:\n +*x: A Tensor. Must be one of the following types: float16, float32, double, complex64, complex128. + +*@par Attributes: +*@li base: An optional attribute of type float32, specifying the base gamma. Defaults to "-1". +*@li scale: An optional attribute of type float32, specifying the scale alpha. Defaults to "1". +*@li shift: An optional attribute of type float32, specifying the shift beta. Defaults to "0". + +*@par Outputs: +*y: A Tensor of the same type as "x". +*/ +REG_OP(Exp) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .ATTR(base, Float, -1.0) + .ATTR(scale, Float, 1.0) + .ATTR(shift, Float, 0.0) + .OP_END_FACTORY_REG(Exp) + +/** +*@brief Computes the exp(x) - 1 element-wise, y = e^x - 1. + +*@par Inputs: +*One input:\n +*x: A Tensor. Must be one of the following types: float16, float32, double, complex64, complex128. + +*@par Outputs: +*y: A Tensor of the same type as "x". +*/ +REG_OP(Expm1) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Expm1) + +/** +*@brief: Computes the reciprocal of "x". + +*@par Inputs:\n +*x: A Tensor. Must be one of the following types: float16, float32, int32, int64, double, complex64, complex128. + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Inv) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT,DT_DOUBLE,DT_INT32,DT_INT64,DT_COMPLEX64,DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32})) + .OP_END_FACTORY_REG(Inv) + +/** +*@brief: Computes "x" reciprocal grad, dx = -1*dy*y*y, where, "y = 1/x", and "dy" + is the corresponding input gradient. + +*@par Inputs: +* Two inputs, including: +* @li x: A Tensor. Must be one of the following types: float16, float32, int32, int8. +* @li grad: A Tensor. Has the same type as "x". +*/ +REG_OP(InvGrad) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT8})) + .INPUT(grad, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT8})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT8})) + .OP_END_FACTORY_REG(InvGrad) + +/** +*@brief: Returns the truth value of (x <= y) element-wise. + +*@par Inputs: +* Two inputs, including: +*@li x1: A Tensor. Must be one of the following types: float16, float32, int32, int8, uint8. +*@li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor of type bool. +*/ +REG_OP(LessEqual) + .INPUT(x1, TensorType::RealNumberType()) + .INPUT(x2, TensorType::RealNumberType()) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(LessEqual) + +/** +*@brief Computes the logarithm of (x + 1) element-wise, y = ln(x + 1). + +*@par Inputs: +*One input:\n +*x: A Tensor. Must be one of the following types: float16, float32, double, complex64, complex128. + +*@par Outputs: +*y: A Tensor of the same type as "x". +*/ +REG_OP(Log1p) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Log1p) + +/** +*@brief Returns element-wise remainder of division. +*@par Inputs: +*Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: float16, float32, int32, int64, int8, uint8, double. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(Mod) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT8, DT_UINT8, + DT_INT64, DT_DOUBLE})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT8, DT_UINT8, + DT_INT64, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT8, DT_UINT8, + DT_INT64, DT_DOUBLE})) + .OP_END_FACTORY_REG(Mod) + +/** +*@brief: Returns the truth value of (x != y) element-wise. + +*@par Inputs: +* Two inputs, including: +*@li x1: A Tensor. Must be one of the following types: float16, float32, int32, int8, uint8. +*@li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor of type bool. +*/ +REG_OP(NotEqual) + .INPUT(x1, TensorType::RealNumberType()) + .INPUT(x2, TensorType::RealNumberType()) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(NotEqual) + +/** +*@brief Computes numerical negative value element-wise (y = -x) + +*@par Inputs: +* One input: +*x: A Tensor. Must be one of the following types: float16, float32, int32 + +*@par Outputs: +*y: A Tensor. Has the same type and format as input "x". +*/ +REG_OP(Neg) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Neg) + +/** +*@brief Returns x1/x2 element-wise for integer types. + +*@par Inputs:\n +*@li x1: A Tensor of type float16, float32, int32, int8, or uint8. +*@li x2: A Tensor of the same data type as "x1". + +*@par Outputs:\n +*y: A Tensor of type float16, float32, int32, int8, or uint8. + +*@attention Constraints:\n +* Broadcasting is supported. +*/ +REG_OP(TruncateDiv) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_UINT8, DT_INT32, + DT_DOUBLE, DT_UINT16, DT_INT16, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_UINT8, DT_INT32, + DT_DOUBLE, DT_UINT16, DT_INT16, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_UINT8, DT_INT32, + DT_DOUBLE, DT_UINT16, DT_INT16, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(TruncateDiv) + +/** +*@brief Computes x1/x2 element-wise, if x1 == 0, return 0. + +*@par Inputs: +* Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: float16, float32, double, complex64, complex128. +* @li x2: A Tensor. Has the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(Xdivy) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128})) + .OP_END_FACTORY_REG(Xdivy) + +/** +*@brief Computes "x" multiplied by the logarithm of y element-wise, if "x" == 0, return "0". + +*@par Inputs: +* Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: float16, float32, double, complex64, complex128. +* @li x2: A Tensor. Has the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(Xlogy) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128})) + .OP_END_FACTORY_REG(Xlogy) + +/** +*@brief Computes square of "x" element-wise. + +*@par Inputs: +*One input: \n +*x: A Tensor. Must be one of the following types: float16, float32, float64, int32, int64, complex64, complex128 + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Square) + .INPUT(x, TensorType({DT_DOUBLE, DT_FLOAT16, DT_FLOAT, + DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_DOUBLE, DT_FLOAT16, DT_FLOAT, + DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Square) + + +/** +*@brief Computes reciprocal of square root of "x" element-wise: y = 1/sqrt{x}. + +* +*@par Inputs: +* x: A tensor. Must be one of the following types: float16, float32, float64, complex64, complex128. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(Rsqrt) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Rsqrt) + +/** +*@brief Computes the trignometric inverse sine of "x" element-wise. + +* +*@par Inputs: +* x: A tensor. Must be one of the following types: float16, float32, float64, int32, int64, complex64, complex128. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(Asin) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Asin) + +/** +*@brief Computes gradients for Asin operation. + +* +*@par Inputs: +*@li y: A tensor of type float16 or float32. +*@li dy: A tensor of the same type as "y". +* +*@attention Constraints: +* "dy" has the same type as "y". +* +*@par Outputs: +* z: A tensor. Has the same type as "y". +* +*/ +REG_OP(AsinGrad) + .INPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .INPUT(dy, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(z, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(AsinGrad) + +/** +*@brief Computes acos of x element-wise. + +* +*@par Inputs: +* x: A tensor. Must be one of the following types: float16, float32, float64, int32, int64, complex64, complex128. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(Acos) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_INT32, DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Acos) + +/** +*@brief Computes gradients for Acos operation. + +* +*@par Inputs: +*@li y: A tensor of type float16 or float32. +*@li dy: A tensor of the same type as "y". +* +*@attention Constraints: +* "dy" has the same shape as "y". +* +*@par Outputs: +* z: A tensor. Has the same type as "y". +* +*/ +REG_OP(AcosGrad) + .INPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(dy, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(z, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(AcosGrad) + +/** +*@brief Computes inverse hyperbolic cosine of x element-wise. + +* +*@par Inputs: +* x: A tensor. Must be one of the following types: float16, float32, float64, complex64, complex128. +* +*@attention Constraints: +* x Given an input tensor, the function computes inverse hyperbolic cosine of every element.\n +* Input range is [1, inf]. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(Acosh) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Acosh) + +/** +*@brief Computes gradients for Acosh operation. + +* +*@par Inputs: +*@li y: A tensor of type float16 or float32. +*@li dy: A tensor of the same type as "y". +* +*@attention Constraints: +* "dy" has the same type as "y". +* +*@par Outputs: +* z: A tensor. Has the same type as "y". +* +*/ +REG_OP(AcoshGrad) + .INPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(dy, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(z, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(AcoshGrad) + +/** +*@brief Returns the truth value of x1 OR x2 element-wise. + +* +*@par Inputs: +*@li x1: A tensor of type bool. +*@li x2 A tensor of the same type as "x1". +* +*@attention Constraints: +* LogicalOr supports broadcasting. +* +*@par Outputs: +* z: A tensor of the same type as "x1". +* +*/ +REG_OP(LogicalOr) + .INPUT(x1, TensorType({DT_BOOL})) + .INPUT(x2, TensorType({DT_BOOL})) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(LogicalOr) + +/** +*@brief Returns the truth value of x1 AND x2 element-wise. + +* +*@par Inputs: +*@li x1: A tensor of type bool. +*@li x2 A tensor of the same type as "x1". +* +*@attention Constraints: +* LogicalOr supports broadcasting. +* +*@par Outputs: +* z: A tensor of the same type as "x1". +* +*/ +REG_OP(LogicalAnd) + .INPUT(x1, TensorType({DT_BOOL})) + .INPUT(x2, TensorType({DT_BOOL})) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(LogicalAnd) + +/** +*@brief Computes the Bessel i0e function of "x" element-wise.\n +* Exponentially scaled modified Bessel function of order 0 \n +* defined as: bessel_i0e(x) = exp(-abs(x)) bessel_i0(x).\n +* This function is faster and numerically stabler than "bessel_i0(x)". +* +*@par Inputs: +* x: A tensor of type float16, float32, or float64. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(BesselI0e) + .INPUT(x, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(BesselI0e) + +/** +*@brief Computes the Bessel i1e function of "x" element-wise.\n +* Exponentially scaled modified Bessel function of order 0 \n +* defined as: bessel_i1e(x) = exp(-abs(x)) bessel_i1(x).\n +* This function is faster and numerically stabler than "bessel_i1(x)". +* +*@par Inputs: +* x: A tensor of type float16, float32, or float64. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(BesselI1e) + .INPUT(x, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(BesselI1e) + +/** +* @brief Computes logarithm of x element-wise.\n +* y = log_base(shift + scale * x), with "base" > 0. + +* @par Inputs: +* @li x: A Tensor of type UnaryDataType. + +* @par Attributes: +* @li base: An optional float32, specifying the base "e". Defaults to "-1" + +* @li scale: An optional float32, specifying the scale of input "x". Defaults +* to "1" +* @li shift: An optional float32, specifying the shift. Defaults to "0" + +* @par Outputs: +* y: A Tensor of type UnaryDataType. + +* @attention Constraints: +* @li base > 0 or if base is set to default (-1), base is set to e; +*/ +REG_OP(Log) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .ATTR(base, Float, -1.0) + .ATTR(scale, Float, 1.0) + .ATTR(shift, Float, 0.0) + .OP_END_FACTORY_REG(Log) + +/** +* @brief Returns x1 * x2 element-wise.\n +* y = x1 * x2 + +* @par Inputs: +* @li x1: A Tensor. Must be one of the following types: float16, float32,\n +* float64, uint8, int8, uint16, int16, int32, int64, complex64, complex128. +* @li x2: A Tensor. Must be one of the following types: float16, float32, +* float64, uint8, int8, uint16, int16, int32, int64, complex64, complex128. + +* @par Outputs: +* y: A Tensor. Must be one of the following types: float16, float32, float64, +* uint8, int8, uint16, int16, int32, int64, complex64, complex128. + +* @attention Constraints: +* @li "x1" and "x2" have incompatible shapes or types. +*/ +REG_OP(Mul) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_UINT8, DT_INT8, + DI_UINT16, DT_INT16, DT_INT32, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_UINT8, DT_INT8, + DI_UINT16, DT_INT16, DT_INT32, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_UINT8, DT_INT8, + DI_UINT16, DT_INT16, DT_INT32, DT_INT64, + DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Mul) + +/** +* @brief Computes the gradient of the square root of "x" with regard to its\n +* input. grad = dy * 0.5/y, where y = sqrt(x), and "dy" is the corresponding +* input gradient. + +* @par Inputs: +* @li y: A Tensor of type float32 or float16. +* @li dy: A Tensor. Has the same type as "y". + +* @par Outputs: +* z: A Tensor. Has the same type as "y". + +* @attention Constraints: +* "dy" has the same shape and type as "y". +*/ +REG_OP(SqrtGrad) + .INPUT(y, TensorType(UnaryDataType)) + .INPUT(dy, TensorType(UnaryDataType)) + .OUTPUT(z, TensorType(UnaryDataType)) + .OP_END_FACTORY_REG(SqrtGrad) + +REG_OP(Multiply) + .INPUT(x, TensorType({DT_FLOAT,DT_UINT8,DT_INT8,DT_UINT16,DT_INT16,DT_INT32,DT_INT64,DT_DOUBLE,DT_FLOAT16})) + .INPUT(y, TensorType({DT_FLOAT,DT_UINT8,DT_INT8,DT_UINT16,DT_INT16,DT_INT32,DT_INT64,DT_DOUBLE,DT_FLOAT16})) + .OUTPUT(z, TensorType({DT_FLOAT,DT_UINT8,DT_INT8,DT_UINT16,DT_INT16,DT_INT32,DT_INT64,DT_DOUBLE,DT_FLOAT16})) + .OP_END_FACTORY_REG(Multiply) + +/** +*@brief Returns x + y element-wise. +*@par Inputs: +*Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: int8, int16, int32, int64, uint8, float64, +* float16, float32, complex128, complex64, string. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Add) + .INPUT(x1, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16, + DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128, + DT_COMPLEX64, DT_STRING})) + .INPUT(x2, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16, + DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128, + DT_COMPLEX64, DT_STRING})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16, + DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX128, + DT_COMPLEX64, DT_STRING})) + .OP_END_FACTORY_REG(Add) + +REG_OP(FusedMulAdd) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(x2, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(x3, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OP_END_FACTORY_REG(FusedMulAdd) + +/** +*@brief Returns x1 + x2 element-wise. + +* +*@par Inputs: +*@li x1: A tensor. Must be one of the following types: float16, float32, float64, uint8, int8, int16, int32, int64, complex64, complex128. +*@li x2 A tensor of the same type as "x1". +* +*@attention Constraints: +* AddV2 supports broadcasting. +* +*@par Outputs: +* y: A tensor. Has the same type as "x1". +* +*/ +REG_OP(AddV2) + .INPUT(x1, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16, + DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16, + DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT64, DT_FLOAT16, DT_INT16, + DT_INT8, DT_UINT8, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128})) + .OP_END_FACTORY_REG(AddV2) + +/** +*@brief Updates "ref" by adding "value" to it. + +*@par Inputs: +*@li ref: A Tensor. Must be one of the following types: float16, float32, int8, uint8, int32, int64. +*@li value: A Tensor of the same type as "ref". + +*@par Attributes: +*use_locking: An optional bool. Defaults to "False".\n + If "True", the addition will be protected by a lock;\n + otherwise the behavior is undefined, but may exhibit less contention.\n +* This attribute is reserved. + +*@par Outputs: +*ref: A Tensor that holds the new value of ref after the value has been added. + +*@attention Constraints:\n +*An input tensor of type int64 must have a shape with size 1. +*/ +REG_OP(AssignAdd) + .INPUT(ref, TensorType::BasicType()) + .INPUT(value,TensorType::BasicType()) + .OUTPUT(ref, TensorType::BasicType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(AssignAdd) + +/** +*@brief Updates "ref" by assigning "value" to it. + +*@par Inputs: +*@li ref: A Tensor. Must be one of the following types: float16, float32, int8, int16, int32, int64, uint8, uint16, uint32, uint64. +*@li value: A Tensor of the same type as "ref". + +*@par Attributes: +*@li validate_shape: An optional bool. Defaults to "true".\n + If "true", the operation will validate that the shape of "value" matches the shape of the Tensor being assigned to.\n +* If "false", "ref" will take on the shape of "value".\n +* This attribute is reserved. +*@li use_locking: An optional bool. Defaults to True.\n + If True, the assignment will be protected by a lock;\n + otherwise the behavior is undefined, but may exhibit less contention.\n +* This attribute is reserved. + +*@par Outputs: +*ref: A Tensor that holds the new value of ref after the value has been assigned. +*/ +REG_OP(Assign) + .INPUT(ref, TensorType::BasicType()) + .INPUT(value,TensorType::BasicType()) + .OUTPUT(ref, TensorType::BasicType()) + .ATTR(validate_shape, Bool, true) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(Assign) + +/** +*@brief Updates "var" by subtracting "value" from it.\n +* This operation outputs "var" after the update is done. \n +* This makes it easier to chain operations that need to use the reset value. + +* +*@par Inputs: +*@li var: A tensor. Must be one of the following types: float32, float64, int32, uint8, int16, int8, complex64, int64, qint8, quint8, qint32, uint16, complex128, uint32, uint64 +*@li value: A tensor of the same type as "var". +* +*@par Attributes: +* use_locking: An optional bool. Defaults to "False". If "True", the subtraction will be protected \n +* by a lock; otherwise the behavior is undefined, but may exhibit less contention. +* +*@par Outputs: +* y: A tensor. Has the same type as "var". +* +*/ +REG_OP(AssignSub) + .INPUT(var, TensorType::NumberType()) + .INPUT(value,TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(AssignSub) + +/** +*@brief: Computes the backpropagation of the square root operation. + +*@par Inputs: +* Two inputs, including: +*@li y: A Tensor. Must be one of the following types: float16, float32, int32, int8. +*@li dy: A Tensor of the same type as "y". + +*@par Outputs: +*z: A Tensor of the same type as "y". + +*@see Matmul() | Rsqrt () +*/ +REG_OP(RsqrtGrad) + .INPUT(y, TensorType({UnaryDataType,int32,int8})) + .INPUT(dy, TensorType({UnaryDataType,int32,int8})) + .OUTPUT(z, TensorType({UnaryDataType,int32,int8})) + .OP_END_FACTORY_REG(RsqrtGrad) + +/** +*@brief Computes hyperbolic sine of "x" element-wise. + +*@par Inputs:\n +*x: A Tensor of type float16 or float32. + +*@par Outputs:\n +*y: A Tensor of type float16 or float32. +*/ +REG_OP(Sinh) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Sinh) + +/** +*@brief: Clips tensor values to a specified min and max. + +*@par Inputs: +* Three inputs, including: +*@li x: A Tensor of type float16, float32, or int32. +*@li clip_value_min: A Tensor of the same type as "x". +*@li clip_value_max: A Tensor of the same type as "x". + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(ClipByValue) + .INPUT(x, TensorType::NumberType()) + .INPUT(clip_value_min, TensorType::NumberType()) + .INPUT(clip_value_max, TensorType::NumberType()) + .OUTPUT(y, TensorType::NumberType()) + .OP_END_FACTORY_REG(ClipByValue) + +/** +*@brief Computes cosine of "x" element-wise. + +*@par Inputs: +*x: A Tensor of type float16 or float32. + +*@par Outputs: +*y: A Tensor of type float16 or float32. +*/ +REG_OP(Cosh) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Cosh) + +/** +*@brief: Returns 0 if the denominator is zero, else, like Div. + +*@par Inputs: +* Two inputs, including: +*@li x1: A Tensor. Must be one of the following types: float16, float32, int32, int8, uint8, double, complex64, complex128. +*@li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(DivNoNan) + .INPUT(x1, TensorType({DT_FLOAT, DT_UINT8, DT_INT8, DT_INT32, DT_FLOAT16, + DT_DOUBLE, DT_COMPLEX64, DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT, DT_UINT8, DT_INT8, DT_INT32, DT_FLOAT16, + DT_DOUBLE, DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_UINT8, DT_INT8, DT_INT32, DT_FLOAT16, + DT_DOUBLE, DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(DivNoNan) + +/** +*@brief Reverses specific dimensions of a tensor. + +*@par Inputs: +* One input: \n +*x: A Tensor of type int16 or uint16. Up to 8D. + +*@par Outputs: +*y: A Tensor. Has the same type and format as "x" +*/ +REG_OP(Invert) + .INPUT(x, TensorType::IntegerDataType()) + .OUTPUT(y, TensorType::IntegerDataType()) + .OP_END_FACTORY_REG(Invert) + +/** +*@brief Returns a tensor of the same shape and type with all elements set to one. +*@par Inputs: +*One input: \n +*x: A Tensor. Must be one of the following types: float16, float32, int8, uint8, int16, uint16, int32, int64, complex128, bool. + +*@par Outputs: +*y: A Tensor of the same type as "x". +*/ +REG_OP(OnesLike) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT8, + DT_UINT8, DT_INT16, DI_UINT16, DT_INT32, + DT_INT64, DT_COMPLEX128, DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT8, + DT_UINT8, DT_INT16, DI_UINT16, DT_INT32, + DT_INT64, DT_COMPLEX128, DT_BOOL})) + .OP_END_FACTORY_REG(OnesLike) + +/** +*@brief Computes the gradient for the inverse of "x" with regard its input. + +*@par Inputs: +*@li input_y: A Tensor. Must be one of the following types: float16, float32, int8, int32. +*@li input_dy: A Tensor. Must be one of the following types: float16, float32, int8, int32. + +*@par Outputs:\n +*output_data: A Tensor. Must be one of the following types: float16, float32, int8, int32. + +*@attention Constraints:\n +* "input_dy" has the same shape and type as "input_y". +*/ +REG_OP(ReciprocalGrad) + .INPUT(y, TensorType::UnaryDataType()) + .INPUT(dy, TensorType::UnaryDataType()) + .OUTPUT(z, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(ReciprocalGrad) + +/** +*@brief Returns the truth value of (x1 > x2) element-wise. + +*@par Inputs:\n +*@li x1: A Tensor of type float16, float32, int32, int8, or uint8. + +*@li x2: A Tensor of the same data type as "x1". + +*@par Outputs:\n +*y: A Tensor of type bool. + +*@attention Constraints:\n +* Broadcasting is supported. +*/ +REG_OP(Greater) + .INPUT(x1, TensorType::RealNumberType()) + .INPUT(x2, TensorType::RealNumberType()) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(Greater) + +/** +*@brief Returns a tensor of the same type and shape as the input tensor with all elements set to zero. + +*@par Inputs:\n +*x: A Tensor. Must be one of the following types: float16, float32, int8, int32, uint8. + +*@par Outputs:\n +*y: A Tensor. Must be one of the following types: float16, float32, int8, int32, uint8. + +*@attention Constraints:\n +* The output has the same shape and type as the input. +*/ +REG_OP(ZerosLike) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT8, + DT_UINT8, DT_INT16, DI_UINT16, DT_INT32, + DT_INT64, DT_COMPLEX128, DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT8, + DT_UINT8, DT_INT16, DI_UINT16, DT_INT32, + DT_INT64, DT_COMPLEX128, DT_BOOL})) + .OP_END_FACTORY_REG(ZerosLike) + +/** +*@brief Returns the truth value of NOT "x" element-wise. + +*@par Inputs:\n +*x: A Tensor of type int8. + +*@par Outputs:\n +*y: A Tensor of type int8. + +*@attention Constraints:\n +* The input and output values are "1" or "0", corresponding to bool values "true" and "false". +*/ +REG_OP(LogicalNot) + .INPUT(x, TensorType({DT_BOOL})) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(LogicalNot) + +/** +*@brief Computes inverse hyperbolic sine of x element-wise.\n +* Given an input tensor, this function computes inverse hyperbolic sine for every element in the tensor. + +* +*@par Inputs: +* x: A tensor. Must be one of the following types: float16, float32, float64, complex64, complex128. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(Asinh) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Asinh) + +/** +*@brief Computes gradients for Asinh operation. + +* +*@par Inputs: +*@li y: A tensor. Must be one of the following types: float16, float32, float64, complex64, complex128. +*@li dy: A tensor of the same type as "y" +* +*@par Outputs: +* z: A tensor. Has the same type as "y". +* +*/ +REG_OP(AsinhGrad) + .INPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(dy, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(z, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(AsinhGrad) + +/** +*@brief Computes inverse hyperbolic tangent of x element-wise.\n +* Given an input tensor, this function computes inverse hyperbolic tangent for every element in the tensor. \n Input range is [-1,1] and output range is [-inf, inf]. If input is -1, \n output will be -inf and if the input is 1, output will be inf.\n Values outside the range will have nan as output. + +* +*@par Inputs: +* x: A tensor. Must be one of the following types: float16, float32, float64, complex64, complex128. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(Atanh) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Atanh) + +/** +*@brief Computes the trignometric inverse tangent of x element-wise.\n +* The atan operation returns the inverse of tan, such that if y = tan(x) then, x = atan(y). + +* +*@par Inputs: +* x: A tensor. Must be one of the following types: float16, float32, float64, int32, int64, complex64, complex128. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". The output of atan will lie within the invertible range of tan, i.e (-pi/2, pi/2). +* +*/ +REG_OP(Atan) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Atan) + +/** +*@brief Computes gradients for Atan operation. + +* +*@par Inputs: +*@li y: A tensor of type float16 or float32. +*@li dy: A tensor of the same type as "y" +* +*@par Outputs: +* z: A tensor. Has the same type as "y". +* +*/ +REG_OP(AtanGrad) + .INPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(dy, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(z, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(AtanGrad) + +/** +*@brief Computes arctangent of x1/x2 element-wise, respecting signs of the arguments. + +* +*@par Inputs: +*@li x1: A tensor. Must be one of the following types: float16, float32, float64 +*@li x2: A tensor of the same type as "x1". +* +*@par Outputs: +* y: A tensor. Has the same type as "x1". +* +*/ +REG_OP(Atan2) + .INPUT(x1, TensorType::FloatingDataType()) + .INPUT(x2, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(Atan2) + +/** +*@brief Returns the truth value of abs(x1-x2) < tolerance element-wise. + +* +*@par Inputs: +*@li x1: A tensor. Must be one of the following types: float32, float64, int32, uint8, int16, int8, complex64, int64, qint8, quint8, qint32, uint16, complex128, float16, uint32, uint64 +*@li x2: A tensor of the same type as "x1". +* +*@par Attributes: +* tolerance: Defaults to "1e-05". +* +*@par Outputs: +* y: A tensor of type bool. +* +*/ +REG_OP(ApproximateEqual) + .INPUT(x1, TensorType::NumberType()) + .INPUT(x2, TensorType::NumberType()) + .OUTPUT(y, TensorType({DT_BOOL})) + .ATTR(tolerance, Float, 1e-5) + .OP_END_FACTORY_REG(ApproximateEqual) + +/** +*@brief Returns the element-wise sum of a list of tensors.\n +* AccumulateNV2 performs the same operation as AddN, but does not wait for all of its inputs to be ready before beginning to sum.\n This can save memory if inputs are ready at different times, \n since minimum temporary storage is proportional to the output size rather than the inputs size.\n Returns a Tensor of same shape and type as the elements of inputs. + +* +*@par Inputs: +* x: A tensor. Must be one of the following types: float32, float64, int32, uint8, int16, int8, complex64, int64, \n qint8, quint8, qint32, uint16, complex128, float16, uint32, uint64. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(AccumulateNV2) + .DYNAMIC_INPUT(x, TensorType::NumberType()) + .OUTPUT(y, TensorType::NumberType()) + .OP_END_FACTORY_REG(AccumulateNV2) + +/** +*@brief Fake-quantizes the input Tensor, type float to output a Tensor of same type. \n +* [min, max] define the clamping range for the "inputs" data.\n +* the values of "x" are quantized into the quantization range ([0, 2^num_bits - 1] \n +* when "narrow_range" is "false" or [1, 2^num_bits - 1] when it is "true") and \n +* then de-quantized and output as float32 in [min; max] interval.\n +* num_bits is the bit width of the quantization, between 2 and 16, inclusive. \n +* Quantization is called fake since the output is still in floating point. \n + +*@par Inputs: +*One input: \n +*x: A Tensor of type float32. + +*@par Attributes: +*@li min: An optional attribute. Defaults to "-6". +*@li max: An optional attribute. Defaults to "6". +*@li num_bits: An optional attribute. Defaults to "8". +*@li narrow_range: An optional bool. Defaults to "false". + +*@par Outputs: +*y: A Tensor. Has the same shape and type of "x". +*/ +REG_OP(FakeQuantWithMinMaxArgs) + .INPUT(x, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(min, Float, -6.0) + .ATTR(max, Float, 6.0) + .ATTR(num_bits, Int, 8) + .ATTR(narrow_range, Bool, false) + .OP_END_FACTORY_REG(FakeQuantWithMinMaxArgs) + +/** +*@brief Computes gradients for a FakeQuantWithMinMaxArgs operation. + +*@par Inputs: +*Two inputs, including: \n +*@li gradients: A Tensor of type float32. Backpropagated gradients above the FakeQuantWithMinMaxArgs operation. +*@li x: A Tensor of type float32. Has the same type and format as "gradients".\n +* This is the input Tensor of the FakeQuantWithMinMaxArgs operator.\n + +*@par Attributes: +*@li min: An optional attribute. Defaults to "-6". +*@li max: An optional attribute. Defaults to "6". +*@li num_bits: An optional attribute. Defaults to "8". +*@li narrow_range: An optional bool. Defaults to "False". + +*@par Outputs: +*y: A Tensor of type float32. +*/ +REG_OP(FakeQuantWithMinMaxArgsGradient) + .INPUT(gradients, TensorType({DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(min, Float, -6.0) + .ATTR(max, Float, 6.0) + .ATTR(num_bits, Int, 8) + .ATTR(narrow_range, Bool, false) + .OP_END_FACTORY_REG(FakeQuantWithMinMaxArgsGradient) + +/** +*@brief Fake-quantize the 'inputs' tensor of type float via global float scalars. + +*@par Inputs: +*Three inputs, including: +*@li x: A Tensor of type float32. +*@li min: A Tensor of type float32. Has the same type and format as "x". +*@li max: A Tensor of type float32. Has the same type and format as "x".\n +*[min; max] define the clamping range for the inputs data + +*@par Attributes: +*@li num_bits: An optional attribute. Defaults to "8". +*@li narrow_range: An optional bool. Defaults to "False". + +*@par Outputs: +*y: A Tensor of type float32. +*/ +REG_OP(FakeQuantWithMinMaxVars) + .INPUT(x, TensorType({DT_FLOAT})) + .INPUT(min, TensorType({DT_FLOAT})) + .INPUT(max, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(num_bits, Int, 8) + .ATTR(narrow_range, Bool, false) + .OP_END_FACTORY_REG(FakeQuantWithMinMaxVars) + +/** +*@brief Computes gradients for a FakeQuantWithMinMaxVars operation. + +*@par Inputs: +*Four inputs, including: +*@li gradients: A Tensor of type float32. +*@li x: A Tensor of type float32. +*@li min: A Tensor of type float32. +*@li max: A Tensor of type float32. + +*@par Attributes: +*@li num_bits: An integer specifying the quantization bit width. Defaults to "8". +*@li narrow_range: A Boolean specifying whether to use a narrow range for quantization. Defaults to "False". + +*@par Outputs: +*@li backprops_wrt_x: A Tensor. Has the same type as input "x". +*@li backprops_wrt_min: A Tensor. Has the same type as input "min". +*@li backprops_wrt_max: A Tensor. Has the same type as input "max". + +*@attention Constraints: +*@li "gradients" has the same shape as "x". +*@li "min" and "max" are scalars. +*@li "num_bits" is between 2 and 16 + +*@see Region() + +*@par Third-party framework compatibility +* Compatible with the operator FakeQuantWithMinVarsGradient. +*/ +REG_OP(FakeQuantWithMinMaxVarsGradient) + .INPUT(gradients, TensorType({DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT})) + .INPUT(min, TensorType({DT_FLOAT})) + .INPUT(max, TensorType({DT_FLOAT})) + .OUTPUT(backprops_wrt_x, TensorType({DT_FLOAT})) + .OUTPUT(backprops_wrt_min, TensorType({DT_FLOAT})) + .OUTPUT(backprops_wrt_max, TensorType({DT_FLOAT})) + .ATTR(num_bits, Int, 8) + .ATTR(narrow_range, Bool, false) + .OP_END_FACTORY_REG(FakeQuantWithMinMaxVarsGradient) + +/** +*@brief Fake-quantizes the "inputs" tensor of type float \n +via per-channel floats min and max of shape [d] to "outputs" \n +tensor of same shape as inputs + +*@par Inputs: +*Three inputs, including: +*@li x: A Tensor of type float32. +*@li min: A Tensor of type float32. +*@li max: A Tensor of type float32. + +*@par Attributes: +*@li num_bits: An integer specifying the quantization bit width. Defaults to "8". +*@li narrow_range: A Boolean specifying whether to use a narrow range for quantization. Defaults to "False". + +*@par Outputs: +*y: A Tensor. Has the same type as input "x". + + +*@attention Constraints: +*@li "min" and "max" have one-dimensional shapes. +*@li "min" has the same last dimension size as "x". "max" has the same last dimension size as "x". +*@li "num_bits" is between 2 and 16 + +*@see Region() +*/ +REG_OP(FakeQuantWithMinMaxVarsPerChannel) + .INPUT(x, TensorType({DT_FLOAT})) + .INPUT(min, TensorType({DT_FLOAT})) + .INPUT(max, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(num_bits, Int, 8) + .ATTR(narrow_range, Bool, false) + .OP_END_FACTORY_REG(FakeQuantWithMinMaxVarsPerChannel) + +/** +*@brief Computes gradients for a FakeQuantWithMinMaxVarsPerChannel operation. + +*@par Inputs: +*Four inputs, including: +*@li gradients: A Tensor of type float32. +*@li x: A Tensor of type float32. +*@li min: A Tensor of type float32. +*@li max: A Tensor of type float32. + +*@par Attributes: +*@li num_bits: An integer specifying the quantization bit width. Defaults to "8". +*@li narrow_range: A Boolean specifying whether to use a narrow range for quantization. Defaults to "False". + +*@par Outputs: +*@li backprops_wrt_x: A Tensor. Has the same type as input "x". +*@li backprops_wrt_min: A Tensor. Has the same type as input "min". +*@li backprops_wrt_max: A Tensor. Has the same type as input "max". + +*@attention Constraints: +*@li "gradients" has the same shape as "x". +*@li "min" and "max" have one-dimensional shapes. +*@li "min" has the same last dimension size as "x". "max" has the same last dimension size as "x". "gradients" has the same last dimension size as "x". +*@li "num_bits" is between 2 and 16 + +*@see Region() +*/ +REG_OP(FakeQuantWithMinMaxVarsPerChannelGradient) + .INPUT(gradients, TensorType({DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT})) + .INPUT(min, TensorType({DT_FLOAT})) + .INPUT(max, TensorType({DT_FLOAT})) + .OUTPUT(backprops_wrt_x, TensorType({DT_FLOAT})) + .OUTPUT(backprops_wrt_min, TensorType({DT_FLOAT})) + .OUTPUT(backprops_wrt_max, TensorType({DT_FLOAT})) + .ATTR(num_bits, Int, 8) + .ATTR(narrow_range, Bool, false) + .OP_END_FACTORY_REG(FakeQuantWithMinMaxVarsPerChannelGradient) + +/** +*@brief Element-wise computes the bitwise AND of "x1" and "x2". + +*@par Inputs: +*Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: int8, int16, +* int32, int64, uint8, uint16, uint32, uint64. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(BitwiseAnd) + .INPUT(x1, TensorType::IntegerDataType()) + .INPUT(x2, TensorType::IntegerDataType()) + .OUTPUT(y, TensorType::IntegerDataType()) + .OP_END_FACTORY_REG(BitwiseAnd) + +/** +*@brief Element-wise computes the bitwise OR of "x1" and "x2". + +*@par Inputs: +*Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: int8, int16, +* int32, int64, uint8, uint16, uint32, uint64. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(BitwiseOr) + .INPUT(x1, TensorType::IntegerDataType()) + .INPUT(x2, TensorType::IntegerDataType()) + .OUTPUT(y, TensorType::IntegerDataType()) + .OP_END_FACTORY_REG(BitwiseOr) + +/** +*@brief Elementwise computes the bitwise XOR of "x1" and "x2". + +*@par Inputs: +*Two inputs, including: \n +*@li x1: A Tensor. Must be one of the following types: int8, int16, int32, int64, uint8, uint16, uint32, uint64.\n +* The format is NC1HWC0 or ND. +*@li x2: A Tensor. Has the same type and format as "x1". + +*@par Outputs: +*y: Output result. Has the same type as "x1". +*/ +REG_OP(BitwiseXor) + .INPUT(x1, TensorType::IntegerDataType()) + .INPUT(x2, TensorType::IntegerDataType()) + .OUTPUT(y, TensorType::IntegerDataType()) + .OP_END_FACTORY_REG(BitwiseXor) + +/** +*@brief Returns element-wise smallest integer not less than "x". + +*@par Inputs: +* x: A Tensor. TensorType::FloatingDataType(). + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Ceil) + .INPUT(x, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(Ceil) + +REG_OP(Floor) + .INPUT(x, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(Floor) + +/** +*@brief Divides "x1/x2" element-wise, rounding toward the +* most negative integer. + +*@par Inputs: +*Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: float16, float32, int32, int64, int8, +* uint8, int16, uint16, double, complex64, complex128. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(FloorDiv) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_INT32, DT_UINT8, + DT_INT64, DT_INT16, DT_UINT16, DT_DOUBLE, + DT_COMPLEX64, DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_INT32, DT_UINT8, + DT_INT64, DT_INT16,DT_UINT16, DT_DOUBLE, + DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_INT32, DT_UINT8, + DT_INT64, DT_INT16,DT_UINT16, DT_DOUBLE, + DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(FloorDiv) + +/** +*@brief Returns element-wise remainder of division. Consistent with: floor(x1/x2) * x2 + mod(x1, x2) = x1. + +*@par Inputs: +* Two inputs, including: +*@li x1: A Tensor +*@li x2: A Tensor. Must have the same type as "x1". +* +*@par Outputs: +*y: Result remainder. +*/ +REG_OP(FloorMod) + .INPUT(x1, TensorType({DT_INT32, DT_INT64, DT_FLOAT, DT_FLOAT16, + DT_DOUBLE})) + .INPUT(x2, TensorType({DT_INT32, DT_INT64, DT_FLOAT, DT_FLOAT16, + DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64, DT_FLOAT, DT_FLOAT16, + DT_DOUBLE})) + .OP_END_FACTORY_REG(FloorMod) + +/** +*@brief Computes the power of "x1" to "x2". + +*@par Inputs: +*Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: +* float16, float32, int32, int64, int8, uint8, double, complex64, complex128. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(Pow) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT64, DT_INT8, + DT_UINT8, DT_DOUBLE, DT_COMPLEX64, DT_COMPLEX128})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT64, DT_INT8, + DT_UINT8, DT_DOUBLE, DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT64, DT_INT8, + DT_UINT8, DT_DOUBLE, DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Pow) + +/** +*@brief Return element-wise integer closest to x. + +*@par Inputs: +*One input, include: +*x: A mutable Tensor. Must be one of the following types: +* float16, float32, double. + +*@par Outputs: +*y: A mutable Tensor. Has the same type as "x". +*/ +REG_OP(Rint) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(Rint) + +/** +*@brief Rounds the values of a tensor to the nearest integer, element-wise. Rounds half to even. + +*@par Inputs: +*Inputs including: \n +*x: A required Tensor of type float16, float32, or int32. +*@par Outputs: +*y: A required Tensor. Has the same data type and shape as "x". +*/ +REG_OP(Round) + .INPUT(x, TensorType(DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT64, + DT_DOUBLE, DT_COMPLEX64, DT_COMPLEX128)) + .OUTPUT(y, TensorType(DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT64, + DT_DOUBLE, DT_COMPLEX64, DT_COMPLEX128)) + .OP_END_FACTORY_REG(Round) + +/** +*@brief: Computes sine of "x" element-wise. + +*@par Inputs: +*One input: +*x: A Tensor. Must be one of the following types: float16, float32, double, complex64, complex128, int32, int64 + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Sin) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Sin) + +/** +*@brief: Computes tan of "x" element-wise. + +*@par Inputs: +*One input: +*x: A Tensor. Must be one of the following types: float16, float32, double, complex64, complex128, int32, int64 + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Tan) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128, DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_COMPLEX64, + DT_COMPLEX128, DT_INT32, DT_INT64})) + .OP_END_FACTORY_REG(Tan) + +/** +*@brief Returns element-wise remainder of division. + +*@par Inputs: +*Two inputs, including: +* @li x1: A Tensor. Must be one of the following types: float16, float32, double, int32, int64. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(TruncateMod) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT64, + DT_INT32})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT64, + DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT64, + DT_INT32})) + .OP_END_FACTORY_REG(TruncateMod) + +/** +*@brief Adds 'bias' to 'x'. + +*@par Inputs: +*Two inputs, including: +* @li x: A Tensor of type NumberType. +* @li bias: A 1D Tensor of the same type as "x". + +*@par Attributes: +*data_format: An optional string. Defaults to "NHWC". + +*@par Outputs: +*y: A Tensor with same type as "x". +*/ +REG_OP(BiasAdd) + .INPUT(x, TensorType::NumberType()) + .INPUT(bias, TensorType::NumberType()) + .OUTPUT(y, TensorType::NumberType()) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(BiasAdd) + +/** +*@brief Returns the index with the smallest value across dimensions of a tensor. + +*@par Inputs: +*Two inputs, including: \n +*@li x: A Tensor. Must be one of the following types: float32, float64, int32, uint8, int16, int8, complex64, int64, qint8, quint8, qint32, bfloat16, uint16, complex128, float16, uint32, uint64.\n +*format is ND. +*@li dimension: A Tensor. Must be one of the following types: int32, int64. Must be in the range [-rank(input x), rank(input x)]. Describes which dimension of the input Tensor to reduce across. \n +* The format is ND. +*@par Attributes: +*output_type: The output type, either "int32" or "int64". Defaults to "int64". + +*@par Outputs: +*y: A Tensor of type "output_type". +*/ +REG_OP(ArgMin) + .INPUT(x, TensorType::NumberType()) + .INPUT(dimension, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .ATTR(output_type, Type, DT_INT64) + .OP_END_FACTORY_REG(ArgMin) + +/** +*@brief Returns the index with the smallest value across dimensions of a tensor. + +*@par Inputs: +*One input: \n + +*x: A Tensor of type float16 or float32 in ND format. + +*@par Attributes: +*@li dimension: The dimension of the input Tensor to reduce across. +*@li output_type: An optional attribute, specifying the output data type. Must be "int32". Defaults to "int64". + +*@par Outputs: +*y: A Tensor of type output_type. +*/ +REG_OP(ArgMinD) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_INT32})) + .REQUIRED_ATTR(dimension, Int) + .ATTR(output_type, Type, DT_INT64) + .OP_END_FACTORY_REG(ArgMinD) + +/** +*@brief Returns the index with the largest value across axes of a tensor. + +*@par Inputs: +* Two inputs, including: \n +*@li x: A multi-dimensional Tensor of type float16, float32, or int16. +*@li dimension: A Scalar of type int32, specifying the index with the largest value. + +*@par Outputs: \n +*y: A multi-dimensional Tensor of type int32, specifying the index with the largest value. The dimension is one less than that of "x". + +*@attention Constraints: +*@li x: If there are multiple maximum values, the index of the first maximum value is used. +*@li The value range of "dimension" is [-dims, dims - 1]. "dims" is the dimension length of "x". +*/ +REG_OP(ArgMax) + .INPUT(x, TensorType::NumberType()) + .INPUT(dimension, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .ATTR(output_type, Type, DT_INT64) + .OP_END_FACTORY_REG(ArgMax) + +/** +*@brief Returns the index with the largest value across axes of a tensor. + +*@par Inputs: +* One input, including: \n +*x: A multi-dimensional Tensor of type float16, float32. + +*@par Attributes: +*dimension: An integer of type int32, specifying the axis information of the index with the maximum value. + +*@par Outputs: \n +*y: A multi-dimensional Tensor of type int32, specifying the index with the largest value. The dimension is one less than that of "x". + +*@attention Constraints: +*@li x: If there are multiple maximum values, the index of the first maximum value is used. +*@li The value range of "dimension" is [-dims, dims - 1]. "dims" is the dimension length of "x". +*/ +REG_OP(ArgMaxD) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_INT32})) + .REQUIRED_ATTR(dimension, Int) + .ATTR(output_type, Type, DT_INT64) + .OP_END_FACTORY_REG(ArgMaxD) + +/** +*@brief Returns the maximum value of all elements in the input in the given dimension. + +*@par Inputs: +*One input: \n +*x: A multi-dimensional Tensor of type float16 or float32. + +*@par Attributes: +*@li dimension: An integer of type int32, specifying the axis information of the index with the maximum value. +*@li keep_dims: A bool, specifying whether to keep dimensions for the output Tensor. Defaults to "false". + +*@par Outputs: +*@li indice: A multi-dimensional Tensor of type int32, specifying the index. (If "keep_dims" is set to "false", the output dimensions are reduced by "dimension" compared with that of "x". Otherwise, the output has one fewer dimension than "x".) +*@li values: A Tensor, specifying the maximum value. Has the same dimensions as "indice" and the same type as "x". + +*@attention Constraints: +*@li If there are multiple maximum values, the index of the first maximum value is used. +*@li The value range of "dimension" is [-dims, dims - 1]. "dims" is the dimension length of "x". +*/ +REG_OP(ArgMaxWithValue) + .INPUT(x, TensorType({DT_FLOAT,DT_FLOAT16})) + .OUTPUT(indice,TensorType({DT_INT32})) + .OUTPUT(values, TensorType({DT_FLOAT,DT_FLOAT16})) + .REQUIRED_ATTR(dimension, Int) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ArgMaxWithValue) + +/** +*@par Inputs: +*One input: \n +*x: A multi-dimensional Tensor of type float16 or float32. + +*@par Attributes: +*@li dimension: An integer of type int32, specifying the axis information of the index with the maximum value. +*@li keep_dims: A bool, specifying whether to keep dimensions for the output Tensor. Defaults to "false". + +*@par Outputs: +*@li indice: A multi-dimensional Tensor of type int32, specifying the index. (If "keep_dims" is set to "false", the output dimensions are reduced by "dimension" compared with that of "x". Otherwise, the output has one fewer dimension than "x".) +*@li values: A Tensor, specifying the minimum value. Has the same dimensions as "indice" and the same type as "x". + +*@attention Constraints: +*@li If there are multiple minimum values, the index of the first minimum value is used. +*@li The value range of "dimension" is [-dims, dims - 1]. "dims" is the dimension length of "x". +*@li Performing the ArgMinWithValue operation on the last axis of float32 data is not supported on a mini platform. +*/ +REG_OP(ArgMinWithValue) + .INPUT(x, TensorType({DT_FLOAT,DT_FLOAT16})) + .OUTPUT(indice,TensorType({DT_INT32})) + .OUTPUT(values, TensorType({DT_FLOAT,DT_FLOAT16})) + .REQUIRED_ATTR(dimension, Int) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ArgMinWithValue) + +REG_OP(Eltwise) + .DYNAMIC_INPUT(__input, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(mode, Int, 1) + .ATTR(coeff, ListFloat, {}) + .OP_END_FACTORY_REG(Eltwise) + +/** +*@brief Computes element-wise population count. + +*@par Inputs: +*x: A Tensor of type TensorType::IntegerDataType(). + +*@par Outputs: +*y: A Tensor of type uint8. +*/ +REG_OP(PopulationCount) + .INPUT(x, TensorType::IntegerDataType()) + .OUTPUT(y, TensorType({DT_UINT8})) + .OP_END_FACTORY_REG(PopulationCount) + +REG_OP(LambNextMVWithDecay) + .INPUT(input1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input4, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input5, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input6, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input7, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input8, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input9, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx0, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx3, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output1, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output2, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output3, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output4, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(LambNextMVWithDecay) + +REG_OP(LambNextMVWithDecayV1) + .INPUT(input1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input4, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input5, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input6, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input7, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input8, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input9, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx0, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx3, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output1, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output2, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output3, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output4, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(LambNextMVWithDecayV1) + +REG_OP(LambNextMV) + .INPUT(input1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input4, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input5, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input6, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input7, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input8, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input9, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx0, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(inputx3, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output1, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output2, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output3, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output4, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(LambNextMV) + +REG_OP(LambNextRight) + .INPUT(input1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul2_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul3_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(truediv1_recip, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(add2_y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output1, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output2, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(LambNextRight) + +REG_OP(LambNextLeft) + .INPUT(input1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul1_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(truediv_recip, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output1, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output2, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(LambNextLeft) + +REG_OP(LambUpdateWithLr) + .INPUT(input1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input4, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input5, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input6, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input7, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input8, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input9, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output_y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(LambUpdateWithLr) + +REG_OP(LambUpdateWithLrV2) + .INPUT(x1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(x2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(x3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(x4, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(x5, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(greater_y, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(select_e, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(LambUpdateWithLrV2) + +REG_OP(AdamApplyOneWithDecay) + .INPUT(input0, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input4, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul0_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul1_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul2_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul3_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul4_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(add2_y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output0, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output1, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output2, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(AdamApplyOneWithDecay) + +REG_OP(AdamApplyOne) + .INPUT(input0, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input4, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul0_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul1_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul2_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mul3_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(add2_y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output0, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output1, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output2, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(AdamApplyOne) + +REG_OP(ClipByNormNoDivSum) + .INPUT(input_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input3, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output_y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(ClipByNormNoDivSum) + +REG_OP(SquareSumV2) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(y1, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(y2, TensorType({DT_FLOAT16,DT_FLOAT})) + .ATTR(axis, ListInt, {}) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(SquareSumV2) +REG_OP(SquareSumV1) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .ATTR(axis, ListInt, {}) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(SquareSumV1) + +REG_OP(FusedMulAddN) + .INPUT(x1, TensorType::NumberType()) + .INPUT(x2, TensorType::NumberType()) + .INPUT(x3, TensorType::NumberType()) + .OUTPUT(y, TensorType::NumberType()) + .OP_END_FACTORY_REG(FusedMulAddN) + +/** +*@brief Add 'bias' to 'x'. + +*@par Inputs: +* Two inputs, including: +*@li x: An ND tensor of type float16 or float32. +*@li bias: An ND tensor of type float16 or float32. + +*@par Attributes: +*@li axis: An optional int32 used to compute the shape of bias input from the online bottoms. Defaults to "1". +*@li num_axes: +*@li bias_from_blob: + +*@par Outputs: +*y: An ND tensor of type float16 or float32. + +*@attention Constraints:\n +* Assume that the shape length of "x" is "n" and that of "bias" is "m". +*@li "axis" is within the range [-n, n-1]. num_axes >= -1. +*@li If "bias_from_blob = true", "num_axes = -1", and "axis >= 0", the ith axis of "bias" and the (i+"axis")th axis of "x" must have the same size (0 <= i < n-axis).\n +* If "axis < 0", the ith axis of "bias" and the (i+n+"axis")th axis of "x" must have the same size (0 <= i < -axis). +*@li If "bias_from_blob = true" and "num_axes = 0", "bias" is a scalar with shape length 1 and dimension size 1. +*@li If "bias_from_blob = true", "num_axes > 0, and "axis >= 0", "axis + num_axes" must be less than or equal to "n" and the ith axis of "bias" and the (i+"axis")th axis of "x" must have the same size (0 <= i < num_axes).\n +* If "axis < 0", "n + axis + num_axes" must be less than or equal to "n" and the ith axis of "bias" and the (i+n+"axis")th axis of "x" must have the same size (0 <= i < num_axes). +*@li If "bias_from_blob = false", "bias" is not a scalar, and "axis >= 0","axis + m" must be less than or equal to "n" and the ith axis of "bias" and the (i+"axis")th axis of "x" must have the same size (0 <= i < m).\n +* If "axis < 0", "n + axis + m" must be less than or equal to "n" and the ith axis of "bias" and the (i+n+"axis")th axis of "x" must have the same size (0 <= i < m). +*/ + +REG_OP(Bias) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) /* "First operand." */ + .INPUT(bias, TensorType({DT_FLOAT, DT_FLOAT16})) /* "Second operand." */ + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16})) /* "Result, has same element type as x" */ + .ATTR(axis, Int, 1) + .ATTR(num_axes, Int, 1) + .ATTR(bias_from_blob, Bool, true) + .OP_END_FACTORY_REG(Bias) + +/** +* @brief Computes the gradient for Local Response Normalization. + +* @par Inputs: +* @li grads: A 4D Tensor of type float16 or float32. +* @li x: A 4D Tensor of type float16 or float32. +* @li y: A 4D Tensor of type float16 or float32. + +* @par Attributes: +* @li depth_radius: An optional int, specifying the half-width of the +* normalization window. Defaults to "5". +* @li bias: An optional float32. An offset, usually > 0 to avoid dividing by 0. +* Defaults to "1". +* @li alpha: An optional float32. A scaling factor, usually positive. +* Defaults to "1". +* @li beta: An optional float32. An exponent. Defaults to "0.5". + +* @par Outputs: +* z: A Tensor. Has the same type and shape as "grads". + +* @attention Constraints: +* "x" and "y" must have the same shape and type as "grads". +*/ + +REG_OP(LRNGrad) + .INPUT(grads, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(z, TensorType({DT_FLOAT16,DT_FLOAT})) + .ATTR(depth_radius, Int, 5) + .ATTR(bias, Float, 1.0) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.5) + .OP_END_FACTORY_REG(LRNGrad) + +REG_OP(LRN) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .ATTR(depth_radius, Int, 5) + .ATTR(bias, Float, 1.0) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.5) + .ATTR(norm_region, String, "ACROSS_CHANNELS") + .OP_END_FACTORY_REG(LRN) + +} // namespace ge + +#endif // GE_OP_ELEWISE_CALCULATION_OPS_H diff --git a/third_party/fwkacllib/inc/ops/fsrdetectionoutput_ops.h b/third_party/fwkacllib/inc/ops/fsrdetectionoutput_ops.h new file mode 100755 index 00000000..8fe21d37 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/fsrdetectionoutput_ops.h @@ -0,0 +1,39 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_FSRDETECTIONOUTPUT_OPS_H_ +#define GE_OP_FSRDETECTIONOUTPUT_OPS_H_ +#include "graph/operator_reg.h" + +namespace ge { +REG_OP(FSRDetectionOutput) + .INPUT(rois, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(prior_box, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(score, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(actual_rois_num, TensorType({DT_INT32})) + .OUTPUT(actual_bbox_num, TensorType({DT_INT32})) + .OUTPUT(box, TensorType({DT_FLOAT, DT_FLOAT16})) + .ATTR(batch_rois, Int, 1024) + .ATTR(im_info, ListInt, {375,1024}) + .ATTR(num_classes, Int, 80) + .ATTR(max_rois_num, Int, 1024) + .ATTR(score_thresh, Float, 0.45) + .ATTR(nms_thresh, Float, 0.7) + .ATTR(bbox_reg_weights, ListInt, {1,1,1,1}) + .ATTR(post_nms_topn, Int, 304) + .OP_END_FACTORY_REG(FSRDetectionOutput) +} +#endif diff --git a/third_party/fwkacllib/inc/ops/functional_ops.h b/third_party/fwkacllib/inc/ops/functional_ops.h new file mode 100755 index 00000000..1529d45c --- /dev/null +++ b/third_party/fwkacllib/inc/ops/functional_ops.h @@ -0,0 +1,28 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_FUNCTIONAL_OPS_H_ +#define GE_FUNCTIONAL_OPS_H_ + +#include "graph/operator_reg.h" +#include "graph/operator.h" + +namespace ge { + + +} // namespace ge + +#endif // GE_FUNCTIONAL_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/get_data_ops.h b/third_party/fwkacllib/inc/ops/get_data_ops.h new file mode 100755 index 00000000..0a9b174b --- /dev/null +++ b/third_party/fwkacllib/inc/ops/get_data_ops.h @@ -0,0 +1,58 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_GET_DATA_OPS_H_ +#define GE_OP_GET_DATA_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(MakeIterator) + .INPUT(x, TensorType::ALL()) + .INPUT(x1, TensorType::ALL()) + .ATTR(_kernel, String, "dp") + .OP_END_FACTORY_REG(MakeIterator) + +REG_OP(IteratorV2) + .OUTPUT(y, TensorType::ALL()) + .ATTR(output_types, ListInt, {}) + .ATTR(output_shapes,ListListInt, {{}, {}}) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(IteratorV2) + +REG_OP(IteratorGetNext) + .INPUT(x, TensorType::ALL()) + .DYNAMIC_OUTPUT(y, TensorType::ALL()) + .ATTR(output_types, ListInt, {}) + .ATTR(output_shapes, ListListInt, {{},{}}) + .ATTR(output_num, Int, 1) + .ATTR(_kernel, String, "dp") + .OP_END_FACTORY_REG(IteratorGetNext) + +REG_OP(DeviceQueueDataset) + .OUTPUT(y, TensorType::ALL()) + .ATTR(output_types, ListInt, {}) + .ATTR(output_shapes, ListListInt, {{},{}}) + .ATTR(channel_name, String, "") + .ATTR(_iterator_name, String, "IteratorV2") + .OP_END_FACTORY_REG(DeviceQueueDataset) + +} // namespace ge + + +#endif // GE_OP_GET_DATA_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/hcom_ops.h b/third_party/fwkacllib/inc/ops/hcom_ops.h new file mode 100644 index 00000000..c2a50308 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/hcom_ops.h @@ -0,0 +1,195 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_HCOM_OPS_H_ +#define GE_OP_HCOM_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { +/** + * @brief Outputs a tensor gathering all input tensors. + * @par Inputs: + * x: A tensor. Must be one of the following types: int8, int32, float16, + * float32. + * @par Attributes: + * @li rank_size: An integer identifying the number of ranks participating in + * the op. + * @li group: A string identifying the group name of ranks participating in + * the op. + * @par Outputs: + * y: A Tensor. Has the same type as "x". + * @attention Constraints:\n + * "group" is limited to 128 characters. Use "hccl_world_group" + * as the name of a world group. + */ +REG_OP(HcomAllGather) + .INPUT(x, TensorType({DT_FLOAT, DT_INT32, DT_INT8, DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT8, DT_FLOAT16})) + .REQUIRED_ATTR(rank_size, Int) + .REQUIRED_ATTR(group, String) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(HcomAllGather) + +/** + * @brief Outputs a tensor containing the reduction across all input tensors + * passed to op. + * @par Inputs: + * x: A tensor. Must be one of the following types: int8, int32, float16, + * float32. + * @par Attributes: + * @li reduction: A string identifying the reduction operation to perform. \n + * The supported operation are: "sum", "max", "min", "prod". + * @li group: A string identifying the group name of ranks participating in + * the op. + * @li fusion: An optional integer identifying the fusion flag of the op. \n + * 0: no fusion; other (default): fusion. + * @par Outputs: + * y: A Tensor. Has the same type as "x". + * @attention Constraints: \n + * "group" is limited to 128 characters. Use "hccl_world_group" + * as the name of a world group. + */ +REG_OP(HcomAllReduce) + .INPUT(x, TensorType({DT_FLOAT, DT_INT32, DT_INT8, DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT8, DT_FLOAT16})) + .REQUIRED_ATTR(reduction, String) + .REQUIRED_ATTR(group, String) + .ATTR(fusion, Int, 1) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(HcomAllReduce) + +/** + * @brief Broadcasts the input tensor in root rank to all ranks. + * @par Inputs: + * x: A tensor. Must be one of the following types: int8, int32, float16, + * float32. + * @par Attributes: + * @li root_rank: An integer identifying the root rank in the op input of + * this rank will be broadcast to other ranks. + * @li group: A string identifying the group name of ranks participating in + * the op. + * @par Outputs: + * y: A Tensor. Has the same type as "x". + * @attention Constraints:\n + * "group" is limited to 128 characters. Use "hccl_world_group" + * as the name of a world group. + */ +REG_OP(HcomBroadcast) + .DYNAMIC_INPUT(x, TensorType({DT_FLOAT, DT_INT32, DT_INT8, DT_FLOAT16})) + .DYNAMIC_OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT8, DT_FLOAT16})) + .REQUIRED_ATTR(root_rank, Int) + .REQUIRED_ATTR(group, String) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(HcomBroadcast) + +/** + * @brief Performs reduction across all input tensors, scattering in equal + * blocks among ranks, each rank getting a chunk of data based on its rank + * index. + * @par Inputs: + * x: A tensor. Must be one of the following types: int8, int32, float16, + * float32. + * @par Attributes: + * @li reduction: A string identifying the reduction operation to perform. \n + * The supported operation are: "sum", "max", "min", "prod". + * @li group: A string identifying the group name of ranks participating in + * the op. + * @li rank_size: An integer identifying the number of ranks participating in + * the op. + * @par Outputs: + * y: A Tensor. Has the same type as "x". + * @attention Constraints:\n + * "group" is limited to 128 characters. Use "hccl_world_group" + * as the name of a world group. + */ +REG_OP(HcomReduceScatter) + .INPUT(x, TensorType({DT_FLOAT, DT_INT32, DT_INT8, DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT8, DT_FLOAT16})) + .REQUIRED_ATTR(reduction, String) + .REQUIRED_ATTR(group, String) + .REQUIRED_ATTR(rank_size, Int) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(HcomReduceScatter) + +/** + * @brief Sends the input tensor to destination rank. + * @par Inputs: + * x: A tensor. Must be one of the following types: int8, int32, float16, + * float32. + * @par Attributes: + * @li sr_tag: An integer identifying the send/recv message tag. The message + * will be received by the HcomReceive op with the same "sr_tag". + * @li dest_rank: An integer identifying the destination rank. + * @li group: A string identifying the group name of ranks participating in + * the op. + * @par Outputs: + * None. + * @attention Constraints:\n + * @li "group" is limited to 128 characters. Use + * "hccl_world_group" as the name of a world group. + * @li Operators HcomSend and HcomReceive have the same "sr_tag". + * @see HcomReceive +*/ +REG_OP(HcomSend) + .INPUT(x, TensorType({DT_FLOAT, DT_INT32, DT_INT8, DT_FLOAT16})) + .REQUIRED_ATTR(group, String) + .REQUIRED_ATTR(sr_tag, Int) + .REQUIRED_ATTR(dest_rank, Int) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(HcomSend) + +/** + * @brief Receives the tensor from source rank. + * @par Inputs: + * None. + * @par Attributes: + * @li sr_tag: An integer identifying the send/recv message tag. The message + * will be send by the HcomSend op with the same "sr_tag". + * @li src_rank: An integer identifying the source rank. + * @li group: A string identifying the group name of ranks participating in + * the op. + * @li shape: A list identifying the shape of the tensor to be received. + * @li dtype: An integer identifying the type of the tensor to be received. \n + * The supported types are: int8, int32, float16, float32. + * @par Outputs: + * y: A tensor with type identified in "dtype". + * @attention Constraints:\n + * @li "group" is limited to 128 characters. Use + * "hccl_world_group" as the name of a world group. + * @li Operators HcomSend and HcomReceive have the same "sr_tag". + * @li "shape" should be same as the input tensor of HcomSend. + * @li "dtype" should be same as the input tensor of HcomSend. + * @see HcomSend +*/ +REG_OP(HcomReceive) + .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_INT8, DT_FLOAT16})) + .REQUIRED_ATTR(group, String) + .REQUIRED_ATTR(sr_tag, Int) + .REQUIRED_ATTR(src_rank, Int) + .REQUIRED_ATTR(shape, ListInt) + .REQUIRED_ATTR(dtype, Type) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(HcomReceive) + +} +#endif diff --git a/third_party/fwkacllib/inc/ops/image_ops.h b/third_party/fwkacllib/inc/ops/image_ops.h new file mode 100644 index 00000000..94143ac1 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/image_ops.h @@ -0,0 +1,384 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_MAGE_OPS_H_ +#define GE_OP_MAGE_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(AdjustHue) + .INPUT(images, TensorType({DT_FLOAT})) + .INPUT(delta, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(AdjustHue) + +REG_OP(AdjustSaturation) + .INPUT(images, TensorType({DT_FLOAT})) + .INPUT(scale, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(AdjustSaturation) + +REG_OP(AdjustContrast) + .INPUT(images, TensorType({DT_FLOAT})) + .INPUT(contrast_factor, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(AdjustContrast) + +REG_OP(CropAndResize) + .INPUT(images, TensorType({DT_UINT8, DT_UINT16, DT_INT8, \ + DT_INT16, DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(boxes, TensorType({DT_FLOAT})) + .INPUT(box_index, TensorType({DT_INT32})) + .INPUT(crop_size, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(extrapolation_value, Float, 0) + .ATTR(method, String, "bilinear") + .OP_END_FACTORY_REG(CropAndResize) + +REG_OP(CropAndResizeGradBoxes) + .INPUT(grads, TensorType({DT_FLOAT})) + .INPUT(images, TensorType({DT_UINT8, DT_UINT16, DT_INT8, DT_INT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(boxes, TensorType({DT_FLOAT})) + .INPUT(box_index, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(method, String, "bilinear") + .OP_END_FACTORY_REG(CropAndResizeGradBoxes) + +REG_OP(CropAndResizeGradImage) + .INPUT(grads, TensorType({DT_FLOAT})) + .INPUT(boxes, TensorType({DT_FLOAT})) + .INPUT(box_index, TensorType({DT_INT32})) + .INPUT(image_size, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .ATTR(method, String, "bilinear") + .REQUIRED_ATTR(T, Type) + .OP_END_FACTORY_REG(CropAndResizeGradImage) + +REG_OP(ExtractGlimpse) + .INPUT(x, TensorType({DT_FLOAT})) + .INPUT(size, TensorType({DT_INT32})) + .INPUT(offsets, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(centered, Bool, true) + .ATTR(normalized, Bool, true) + .ATTR(uniform_noise, Bool, true) + .ATTR(noise, String, "uniform") + .OP_END_FACTORY_REG(ExtractGlimpse) + +REG_OP(HSVToRGB) + .INPUT(images, TensorType({ DT_FLOAT, DT_DOUBLE })) + .OUTPUT(y, TensorType({ DT_FLOAT, DT_DOUBLE })) + .OP_END_FACTORY_REG(HSVToRGB) + +REG_OP(QuantizedResizeBilinear) + .INPUT(images, TensorType({ DT_FLOAT })) + .INPUT(size, TensorType({ DT_INT32 })) + .INPUT(min, TensorType({ DT_FLOAT })) + .INPUT(max, TensorType({ DT_FLOAT })) + .OUTPUT(resized_images, TensorType({ DT_FLOAT })) + .OUTPUT(y_min, TensorType({ DT_FLOAT })) + .OUTPUT(y_max, TensorType({ DT_FLOAT })) + .ATTR(align_corners, Bool, false) + .ATTR(half_pixel_centers, Bool, false) + .OP_END_FACTORY_REG(QuantizedResizeBilinear) + +REG_OP(ResizeArea) + .INPUT(images, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(size, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(align_corners, Bool, false) + .OP_END_FACTORY_REG(ResizeArea) + +REG_OP(ResizeBicubicGrad) + .INPUT(grads, TensorType({DT_FLOAT})) + .INPUT(original_image, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .ATTR(align_corners, Bool, false) + .ATTR(half_pixel_centers, Bool, false) + .OP_END_FACTORY_REG(ResizeBicubicGrad) + +REG_OP(ResizeBicubic) + .INPUT(images, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(size, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(align_corners, Bool, false) + .ATTR(half_pixel_centers, Bool, false) + .OP_END_FACTORY_REG(ResizeBicubic) + +/** +*@brief Performs the backpropagation of ResizeNearestNeighbor for training scenarios. + +*@par Inputs: +* Two inputs, including: +*@li grads: A 4D Tensor, specifying the backpropagation gradients. Must be one of the following types: int8, uint8, int16, uint16, int32, int64, float16, float32, float64. +*@li size: A 1D Tensor of type int32, specifying the source image size (orig_height, orig_width). + +*@par Attributes: \n +*align_corners: An optional bool. If "True", the centers of the corner pixels of the input and gradient tensors are aligned. Defaults to "False". + +*@par Outputs: \n +*y: A 4D Tensor, specifying the backpropagation gradient after computation. Has the same type as "grads". + +*@attention Constraints: +* When the inputs are of type float32, the execution performance is high. + +*@see ResizeNearestNeighbor +*/ +REG_OP(ResizeNearestNeighborGrad) + .INPUT(grads, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, + DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(size, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, + DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .ATTR(align_corners, Bool, false) + .ATTR(half_pixel_centers, Bool, false) + .OP_END_FACTORY_REG(ResizeNearestNeighborGrad) + +REG_OP(ResizeNearestNeighborGradD) + .INPUT(grads, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .REQUIRED_ATTR(size, ListInt) + .ATTR(align_corners, Bool, false) + .OP_END_FACTORY_REG(ResizeNearestNeighborGradD) + +/** +*@brief Performs the backpropagation of ResizeBilinear, which is used to resize an image\n to a specified size, while this operator is used to restore the resized image to the original image. +*@par Inputs: +* Two inputs, including: +* @li grads: A float32 input in NC1HWC0 format, describing the image information after resizing,\n including the image height, width, number of channels, and number of images. +* @li original_image: A float32 input in NC1HWC0 format, describing the image information before resizing,\n including the image height, width, number of channels, and number of images. + + +*@par Attributes: +*align_corners: An optional bool. If "True", the centers of the corner pixels of the input and\n gradient tensors are aligned. Defaults to "False". + +*@par Outputs: +*y: A float32 output in NC1HWC0 format, specifying the image information before resizing, including the image height,\n +width, number of channels, and number of images. +*/ +REG_OP(ResizeBilinearGrad) + .INPUT(grads, TensorType({DT_FLOAT})) + .INPUT(original_image, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(align_corners, Bool, false) + .OP_END_FACTORY_REG(ResizeBilinearGrad) + +/** +*@brief Resizes "images" to "size" using bilinear interpolation. + +*@par Inputs: +* Two inputs, including: +*@li images: An NC1HWC0 Tensor. +* Must be one of the following types: int8, uint8, int16, uint16, int32, int64, float16, float32, double +*@li size: An ND Tensor of type int32. + +*@par Attributes: +*align_corners: An optional bool. If "true", the centers of the corner pixels of the input and output tensors are aligned. Defaults to "false". + +*@par Outputs: +*y: A Tensor with the same format as input "images". +*/ +REG_OP(ResizeBilinear) + .INPUT(images, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(size, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .ATTR(align_corners, Bool, false) + .OP_END_FACTORY_REG(ResizeBilinear) + +REG_OP(RGBToHSV) + .INPUT(images, TensorType({ DT_FLOAT, DT_DOUBLE })) + .OUTPUT(y, TensorType({ DT_FLOAT, DT_DOUBLE })) + .OP_END_FACTORY_REG(RGBToHSV) + +REG_OP(SampleDistortedBoundingBoxExt2) + .INPUT(image_size, TensorType({ DT_UINT8, DT_INT8, DT_INT16, \ + DT_INT32, DT_INT64 })) + .INPUT(bounding_boxes, TensorType({ DT_FLAOT })) + .INPUT(min_object_covered, TensorType({ DT_FLOAT })) + .OUTPUT(begin, TensorType({ DT_UINT8, DT_INT8, DT_INT16, \ + DT_INT32, DT_INT64 })) + .OUTPUT(size, TensorType({ DT_UINT8, DT_INT8, DT_INT16, \ + DT_INT32, DT_INT64 })) + .OUTPUT(bboxes, TensorType({ DT_FLOAT })) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .ATTR(aspect_ratio_range, ListFloat, { 0.75f, 1.33f }) + .ATTR(area_range, ListFloat, { 0.05f, 1.0f }) + .ATTR(max_attempts, Int, 100) + .ATTR(use_image_if_no_bounding_boxes, Bool, false) + .OP_END_FACTORY_REG(SampleDistortedBoundingBoxExt2) + +/** +*@brief Resizes "images" to "size" using nearest neighbor interpolation. + +*@par Inputs: +* Two inputs, including: +*@li images: An NC1HWC0 Tensor. +* Must be one of the following types: int8, uint8, int16, uint16, int32, int64, float16, float32, double +*@li size: An ND Tensor of type int32. + +*@par Attributes: +*align_corners: An optional bool. If "true", the centers of the corner pixels of the input and output tensors are aligned. Defaults to "false". + +*@par Outputs: +*y: A Tensor with the same type and format as input "images". +*/ +REG_OP(ResizeNearestNeighbor) + .INPUT(images, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, + DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(size, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, + DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .ATTR(align_corners, Bool, false) + .OP_END_FACTORY_REG(ResizeNearestNeighbor) + +REG_OP(DrawBoundingBoxes) + .INPUT(images, TensorType({DT_FLOAT})) + .INPUT(boxes, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(DrawBoundingBoxes) + +REG_OP(NonMaxSuppression) + .INPUT(boxes, TensorType({DT_FLOAT})) + .INPUT(scores, TensorType({DT_FLOAT})) + .INPUT(max_output_size, TensorType({DT_INT32})) + .OUTPUT(selected_indices, TensorType({DT_INT32})) + .ATTR(iou_threshold, Float, 0.5f) + .OP_END_FACTORY_REG(NonMaxSuppression) + +REG_OP(NonMaxSuppressionV2) + .INPUT(boxes, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(scores, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(max_output_size, TensorType({DT_INT32})) + .INPUT(iou_threshold, TensorType({DT_FLOAT})) + .OUTPUT(selected_indices, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(NonMaxSuppressionV2) + +REG_OP(NonMaxSuppressionV3) + .INPUT(boxes, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(scores, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(max_output_size, TensorType({DT_INT32})) + .INPUT(iou_threshold, TensorType({DT_FLOAT})) + .INPUT(score_threshold, TensorType({DT_FLOAT})) + .OUTPUT(selected_indices, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(NonMaxSuppressionV3) + +REG_OP(NonMaxSuppressionV4) + .INPUT(boxes, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(scores, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(max_output_size, TensorType({DT_INT32})) + .INPUT(iou_threshold, TensorType({DT_FLOAT})) + .INPUT(score_threshold, TensorType({DT_FLOAT})) + .OUTPUT(selected_indices, TensorType({DT_INT32})) + .OUTPUT(valid_outputs, TensorType({DT_INT32})) + .ATTR(pad_to_max_output_size, Bool, false) + .OP_END_FACTORY_REG(NonMaxSuppressionV4) + +REG_OP(NonMaxSuppressionWithOverlaps) + .INPUT(overlaps, TensorType({DT_FLOAT})) + .INPUT(scores, TensorType({DT_FLOAT})) + .INPUT(max_output_size, TensorType({DT_INT32})) + .INPUT(overlap_threshold, TensorType({DT_FLOAT})) + .INPUT(score_threshold, TensorType({DT_FLOAT})) + .OUTPUT(selected_indices, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(NonMaxSuppressionWithOverlaps) + +REG_OP(EncodeJpeg) + .INPUT(image, TensorType({DT_UINT8})) + .OUTPUT(contents, TensorType({DT_STRING})) + .ATTR(format, String, "") + .ATTR(quality, Int, 95) + .ATTR(progressive, Bool, false) + .ATTR(optimize_size, Bool, false) + .ATTR(chroma_downsampling, Bool, true) + .ATTR(density_unit, String, "in") + .ATTR(x_density, Int, 300) + .ATTR(y_density, Int, 300) + .ATTR(xmp_metadata, String, "") + .OP_END_FACTORY_REG(EncodeJpeg) + +REG_OP(EncodePng) + .INPUT(image, TensorType({DT_UINT8, DT_UINT16})) + .OUTPUT(contents, TensorType({DT_STRING})) + .ATTR(compression, Int, -1) + .OP_END_FACTORY_REG(EncodePng) + +/** +*@brief Resizes "images" to "size" using bilinear interpolation. + +*@par Inputs: +* One input: +*images: An NC1HWC0 Tensor. \n +* Must be one of the following types: float16, float32. + +*@par Attributes: +*@li size: A required int32 Tensor specifying the new size for the images. No default value. +*@li align_corners: An optional bool. If "true", the centers of the corner pixels of the input and output tensors are aligned. Defaults to "false". + +*@par Outputs: +*y: A Tensor with type float32 and the same format as input "images". + +*@attention Constraints: +*@li The input "size" must be a tensor of 2 elements: size[0] <= 2048, size[1] <= 2048. +*@li The input "images" must be a tensor of 5 elements: images[2] <= 2048, images[3] <= 2048. +*/ +REG_OP(ResizeBilinearD) + .INPUT(images, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(align_corners, Bool, false) + .REQUIRED_ATTR(size, ListInt) + .OP_END_FACTORY_REG(ResizeBilinearD) + +/** +*@brief Resizes "images" to "size" using nearest neighbor interpolation. + +*@par Inputs: +* One input: +*images: An NC1HWC0 Tensor. \n +* Must be one of the following types: float16, float32, int32, int8, uint8 + +*@par Attributes: +*@li size: A required int32 Tensor specifying the new size for the images. No default value. +*@li align_corners: An optional bool. If "true", the centers of the corner pixels of the input and output tensors are aligned. Defaults to "false". + +*@par Outputs: +*y: A Tensor with the same type and format as input "images". + +*@attention Constraints: +* The input "size" must be a tensor of 2 elements: size[0] <= 7680, size[1] <= 4320 +*/ +REG_OP(ResizeNearestNeighborD) + .INPUT(images, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .REQUIRED_ATTR(size, ListInt) + .ATTR(align_corners, Bool, false) + .OP_END_FACTORY_REG(ResizeNearestNeighborD) + +REG_OP(ExtractJpegShape) + .INPUT(contents, TensorType({DT_STRING})) + .OUTPUT(image_shape, TensorType({DT_INT32, DT_INT64})) + .REQUIRED_ATTR(output_type, Type) + .OP_END_FACTORY_REG(ExtractJpegShape) +} // namespace ge + +#endif // GE_OP_MAGE_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/linalg_ops.h b/third_party/fwkacllib/inc/ops/linalg_ops.h new file mode 100755 index 00000000..320dfb0e --- /dev/null +++ b/third_party/fwkacllib/inc/ops/linalg_ops.h @@ -0,0 +1,100 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_LINALG_OPS_H_ +#define GE_OP_LINALG_OPS_H_ + +#include "graph/operator_reg.h" +#include "../graph/operator.h" + +namespace ge { + +REG_OP(CholeskyGrad) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(grad, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(CholeskyGrad) + +REG_OP(Cholesky) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(Cholesky) + +REG_OP(LogMatrixDeterminant) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(sign, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(LogMatrixDeterminant) + +REG_OP(MatrixDeterminant) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(MatrixDeterminant) + +REG_OP(MatrixInverse) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .ATTR(adjoint, Bool, false) + .OP_END_FACTORY_REG(MatrixInverse) + +REG_OP(MatrixSolve) + .INPUT(matrix, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(rhs, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .ATTR(adjoint, Bool, false) + .OP_END_FACTORY_REG(MatrixSolve) + +REG_OP(MatrixSolveLs) + .INPUT(matrix, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(rhs, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(l2, TensorType({DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .ATTR(fast, Bool, true) + .OP_END_FACTORY_REG(MatrixSolveLs) + +REG_OP(MatrixTriangularSolve) + .INPUT(matrix, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(rhs, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .ATTR(lower, Bool, true) + .ATTR(adjoint, Bool, false) + .OP_END_FACTORY_REG(MatrixTriangularSolve) + +REG_OP(Qr) + .INPUT(x, TensorType({ DT_FLOAT16, DT_FLOAT, DT_DOUBLE })) + .OUTPUT(q, TensorType({ DT_FLOAT16, DT_FLOAT, DT_DOUBLE })) + .OUTPUT(r, TensorType({ DT_FLOAT16, DT_FLOAT, DT_DOUBLE })) + .ATTR(full_matrices, Bool, false) + .OP_END_FACTORY_REG(Qr) + +REG_OP(SelfAdjointEig) + .INPUT(x, TensorType({ DT_DOUBLE, DT_FLOAT })) + .OUTPUT(eigen_value, TensorType({ DT_DOUBLE, DT_FLOAT })) + .OUTPUT(eigen_vector, TensorType({ DT_DOUBLE, DT_FLOAT })) + .ATTR(compute_v, Bool, true) + .OP_END_FACTORY_REG(SelfAdjointEig) + +REG_OP(Svd) + .INPUT(x, TensorType({ DT_DOUBLE, DT_FLOAT })) + .OUTPUT(sigma, TensorType({ DT_DOUBLE, DT_FLOAT })) + .OUTPUT(u, TensorType({ DT_DOUBLE, DT_FLOAT })) + .OUTPUT(v, TensorType({ DT_DOUBLE, DT_FLOAT })) + .ATTR(compute_uv, Bool, true) + .ATTR(full_matrices, Bool, false) + .OP_END_FACTORY_REG(Svd) +} // namespace ge + +#endif // GE_OP_LINALG_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/logging_ops.h b/third_party/fwkacllib/inc/ops/logging_ops.h new file mode 100644 index 00000000..0dc0bd24 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/logging_ops.h @@ -0,0 +1,44 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_LOGGING_OPS_H +#define GE_OP_LOGGING_OPS_H + +#include "graph/operator.h" +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(Timestamp) + .OUTPUT(y, TensorType({DT_DOUBLE})) + .OP_END_FACTORY_REG(Timestamp) + +REG_OP(Assert) + .INPUT(input_condition, TensorType{DT_BOOL}) + .DYNAMIC_INPUT(input_data, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, + DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, + DT_UINT64, DT_BOOL, DT_DOUBLE, DT_STRING})) + .ATTR(summarize, Int, 3) + .OP_END_FACTORY_REG(Assert) + +REG_OP(PrintV2) + .INPUT(x, TensorType({DT_STRING})) + .ATTR(output_stream, String, "stderr") + .OP_END_FACTORY_REG(PrintV2) + +} // namespace ge + +#endif // GE_OP_LOGGING_OPS_H diff --git a/third_party/fwkacllib/inc/ops/lookup_ops.h b/third_party/fwkacllib/inc/ops/lookup_ops.h new file mode 100755 index 00000000..3ca5ae46 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/lookup_ops.h @@ -0,0 +1,115 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_LOOKUP_OPS_H_ +#define GE_OP_LOOKUP_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(LookupTableImport) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(keys, TensorType({DT_BOOL, DT_DOUBLE, \ + DT_FLOAT, DT_INT32, DT_INT64})) + .INPUT(values, TensorType({DT_BOOL, DT_DOUBLE, \ + DT_FLOAT, DT_INT32, DT_INT64})) + .OP_END_FACTORY_REG(LookupTableImport) + +REG_OP(LookupTableInsert) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(keys, TensorType({DT_BOOL, DT_DOUBLE, DT_FLOAT, \ + DT_INT32, DT_INT64})) + .INPUT(values, TensorType({DT_BOOL, DT_DOUBLE, DT_FLOAT, \ + DT_INT32, DT_INT64})) + .OP_END_FACTORY_REG(LookupTableInsert) + +REG_OP(LookupTableExport) + .INPUT(handle, TensorType({DT_RESOURCE})) + .OUTPUT(keys, TensorType({DT_BOOL, DT_DOUBLE, DT_FLOAT, \ + DT_INT32, DT_INT64})) + .OUTPUT(values, TensorType({DT_BOOL, DT_DOUBLE, DT_FLOAT, \ + DT_INT32,DT_INT64})) + .REQUIRED_ATTR(Tkeys, Type) + .REQUIRED_ATTR(Tvalues, Type) + .OP_END_FACTORY_REG(LookupTableExport) +REG_OP(LookupTableSize) + .INPUT(handle, TensorType({DT_RESOURCE})) + .OUTPUT(size, TensorType({DT_INT64})) + .OP_END_FACTORY_REG(LookupTableSize) + +REG_OP(LookupTableFind) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(keys, TensorType({DT_DOUBLE, DT_FLOAT, \ + DT_INT32, DT_INT64})) + .INPUT(default_value, TensorType({DT_DOUBLE, DT_FLOAT, \ + DT_INT32, DT_INT64})) + .OUTPUT(values, TensorType({DT_DOUBLE, DT_FLOAT, DT_INT32, \ + DT_INT64})) + .REQUIRED_ATTR(Tout, Type) + .OP_END_FACTORY_REG(LookupTableFind) + +REG_OP(HashTable) + .OUTPUT(handle, TensorType({DT_RESOURCE})) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .ATTR(use_node_name_sharing, Bool, false) + .REQUIRED_ATTR(key_dtype, Type) + .REQUIRED_ATTR(value_dtype, Type) + .OP_END_FACTORY_REG(HashTable) + +REG_OP(InitializeTable) + .INPUT(handle, TensorType({DT_RESOURCE})) + .INPUT(keys, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(InitializeTable) + +REG_OP(MutableDenseHashTable) + .INPUT(empty_key, TensorType({DT_INT32, DT_INT64})) + .INPUT(deleted_key, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(handle, TensorType({DT_RESOURSE})) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .ATTR(use_node_name_sharing, Bool, false) + .REQUIRED_ATTR(value_dtype, Type) + .ATTR(value_shape, ListInt, {}) + .ATTR(initial_num_buckets, Int, 131072) + .ATTR(max_load_factor, Float, 0.8) + .OP_END_FACTORY_REG(MutableDenseHashTable) + +REG_OP(MutableHashTableOfTensors) + .OUTPUT(handle, TensorType({DT_RESOURSE})) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .ATTR(use_node_name_sharing, Bool, false) + .REQUIRED_ATTR(key_dtype, Type) + .REQUIRED_ATTR(value_dtype, Type) + .ATTR(value_shape, ListInt, {}) + .OP_END_FACTORY_REG(MutableHashTableOfTensors) + +REG_OP(MutableHashTable) + .OUTPUT(handle, TensorType({DT_RESOURSE})) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .ATTR(use_node_name_sharing, Bool, false) + .REQUIRED_ATTR(key_dtype, Type) + .REQUIRED_ATTR(value_dtype, Type) + .OP_END_FACTORY_REG(MutableHashTable) +} // namespace ge + +#endif // GE_OP_LOOKUP_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/math_ops.h b/third_party/fwkacllib/inc/ops/math_ops.h new file mode 100755 index 00000000..f311f292 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/math_ops.h @@ -0,0 +1,115 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_MATH_OPS_H_ +#define GE_OP_MATH_OPS_H_ + +#include "graph/operator_reg.h" +#include "graph/operator.h" + +namespace ge { + +REG_OP(Igamma) + .INPUT(a, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(z, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(Igamma) + +REG_OP(Igammac) + .INPUT(a, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(z, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(Igammac) + +REG_OP(CompareAndBitpack) + .INPUT(x, TensorType({ DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_INT8, \ + DT_INT16, DT_INT32, DT_INT64, DT_BOOL })) + .INPUT(threshold, TensorType({ DT_FLOAT, DT_FLOAT16, DT_DOUBLE, \ + DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_BOOL })) + .OUTPUT(y, TensorType(DT_UINT8)) + .OP_END_FACTORY_REG(CompareAndBitpack) + +REG_OP(Bincount) + .INPUT(array, TensorType(DT_INT32)) + .INPUT(size, TensorType(DT_INT32)) + .INPUT(weights, TensorType({ DT_FLOAT, DT_INT32, DT_INT64, DT_DOUBLE })) + .OUTPUT(bins, TensorType({ DT_FLOAT, DT_INT32, DT_INT64, DT_DOUBLE })) + .OP_END_FACTORY_REG(Bincount) + +REG_OP(Betainc) + .INPUT(a, TensorType({DT_DOUBLE, DT_FLOAT})) + .INPUT(b, TensorType({DT_DOUBLE, DT_FLOAT})) + .INPUT(x, TensorType({DT_DOUBLE, DT_FLOAT})) + .OUTPUT(z, TensorType({DT_DOUBLE, DT_FLOAT})) + .OP_END_FACTORY_REG(Betainc) + +REG_OP(Zeta) + .INPUT(x, TensorType({DT_DOUBLE, DT_FLOAT})) + .INPUT(q, TensorType({DT_DOUBLE, DT_FLOAT})) + .OUTPUT(z, TensorType({DT_DOUBLE, DT_FLOAT})) + .OP_END_FACTORY_REG(Zeta) + +REG_OP(Bucketize) + .INPUT(x, TensorType({DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_INT32})) + .REQUIRED_ATTR(boundaries, ListFloat) + .OP_END_FACTORY_REG(Bucketize) + +REG_OP(SparseSegmentSum) + .INPUT(x, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, + DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT, DT_FLOAT16})) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(segment_ids, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, + DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT, DT_FLOAT16})) + .OP_END_FACTORY_REG(SparseSegmentSum) + +REG_OP(SparseSegmentMean) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(segment_ids, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseSegmentMean) + +REG_OP(SparseSegmentMeanGrad) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(segment_ids, TensorType({DT_INT32})) + .INPUT(output_dim0, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseSegmentMeanGrad) + +REG_OP(IgammaGradA) + .INPUT(a, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(z, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(IgammaGradA) + +REG_OP(InitData) + .ATTR(channel_name, String, "") + .OP_END_FACTORY_REG(InitData) + +REG_OP(GetNext) + .DYNAMIC_OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, + DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_BOOL})) + .ATTR(output_types, ListInt, {}) + .ATTR(output_shapes, ListListInt, {}) + .ATTR(output_num, Int, 1) + .ATTR(channel_name, String, "") + .OP_END_FACTORY_REG(GetNext) +} // namespace ge + +#endif // GE_OP_MATH_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/matrix_calculation_ops.h b/third_party/fwkacllib/inc/ops/matrix_calculation_ops.h new file mode 100755 index 00000000..53371b31 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/matrix_calculation_ops.h @@ -0,0 +1,367 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_MATRIX_CALCULATION_OPS_H +#define GE_OP_MATRIX_CALCULATION_OPS_H + +#include "../graph/operator_reg.h" + +namespace ge { + +/** +*@brief Multiplies matrix "a" by matrix "b", producing "a * b". + +*@par Inputs: +*Two inputs, including: +* @li x1: A matrix Tensor. 2D. Must be one of the following types: float16, +* float32, int32. Has format [ND, NHWC, FRACTAL_NZ]. +* @li x2: A matrix Tensor. 2D. Must be one of the following types: float16, +* float32, int32. Has format [ND, NHWC, FRACTAL_NZ]. +* @li bias: A 1D Tensor. Must be one of the following types: float16, +* float32, int32. Has format [ND, NHWC]. + +*@par Attributes: +*@li transpose_a: A bool. If True, changes the shape of "x1" from [M, K] to [K, M]. +*@li transpose_b: A bool. If True, changes the shape of "x2" from [M, K] to [K, M]. + +*@par Outputs: +*y: The result matrix Tensor. 2D. Must be one of the following types: float16, +* float32, int32. Has format [ND, NHWC, FRACTAL_NZ]. +*/ +REG_OP(MatMul) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(x2, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OPTIONAL_INPUT(bias, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .ATTR(transpose_a, Bool, false) + .ATTR(transpose_b, Bool, false) + .OP_END_FACTORY_REG(MatMul) + +REG_OP(MatMulV2) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT16, DT_INT8, DT_INT8})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT16, DT_INT8, DT_INT8})) + .INPUT(alpha, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_FLOAT})) + .INPUT(beta, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_FLOAT})) + .INPUT(bias, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_FLOAT})) + .OP_END_FACTORY_REG(MatMulV2) + +/** +*@brief Multiplies matrix "a" by matrix "b", producing "a * b". + +*@par Inputs: +*Three inputs, including: +* @li x1: A matrix Tensor. Must be one of the following types: float16, +* float32, int32. 2D or higher. Has format [ND, NHWC, FRACTAL_NZ]. +* @li x2: A matrix Tensor. Must be one of the following types: float16, +* float32, int32. 2D or higher. Has format [ND, NHWC, FRACTAL_NZ]. + +*@par Attributes: +*@li adj_x: A bool. If True, changes the shape of "x1" from [B, M, K] to [B, K, M]. +*@li adj_y: A bool. If True, changes the shape of "x2" from [B, M, K] to [B, K, M]. + +*@par Outputs: +*y: The result matrix Tensor. 2D or higher. Must be one of the following types: float16, +* float32, int32. 2D or higher. Has format [ND, NHWC, FRACTAL_NZ]. Has the same shape length as "x1" and "x2". +*/ +REG_OP(BatchMatMul) + .INPUT(x1, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(x2, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .ATTR(adj_x, Bool, false) + .ATTR(adj_y, Bool, false) + .OP_END_FACTORY_REG(BatchMatMul) + +REG_OP(MeanCCE) + .INPUT(x, TensorType::ALL()) + .INPUT(indices, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .ATTR(keep_dims, Bool, false) + .ATTR(value1, ListInt, {}) + .ATTR(mode, Int, 3) // 0:max pooling or 1:avg pooling + .ATTR(pad_mode, Int, 0) + .ATTR(global_pooling, Bool, true) + .ATTR(window, ListInt, {1,1}) // kernel size + .ATTR(pad, ListInt, {0,0,0,0}) // pad size + .ATTR(stride, ListInt, {1,1}) // stride size + .ATTR(ceil_mode, Int, 0) + .ATTR(data_mode, Int, 1) + .ATTR(nan_opt, Int, 0) + .ATTR(fomart, Int, 0) + .OP_END_FACTORY_REG(MeanCCE) + +REG_OP(MeanGrad) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .ATTR(mode, Int, 1) // 0:max pooling or 1:avg pooling + .ATTR(pad_mode, Int, 0) + .ATTR(global_pooling, Bool, false) + .ATTR(window, ListInt, {1,1}) // kernel size + .ATTR(pad, ListInt, {0,0,0,0}) // pad size + .ATTR(stride, ListInt, {1,1}) // stride size + .ATTR(ceil_mode, Int, 0) + .ATTR(data_mode, Int, 1) + .ATTR(nan_opt, Int, 0) + .ATTR(mean_grad_output_shape_value, ListInt, {1,1,1,1}) + .ATTR(mean_grad_output_shape_format, Int, 1) //must be NHWC + .OP_END_FACTORY_REG(MeanGrad) + +REG_OP(MatMulCCE) + .INPUT(x1, TensorType({DT_FLOAT})) + .INPUT(x2, TensorType({DT_FLOAT})) + .OPTIONAL_INPUT(x3, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(transpose_a, Bool, false) + .ATTR(transpose_b, Bool, false) + .ATTR(has_bias, Bool, false) + .OP_END_FACTORY_REG(MatMulCCE) + +/** +*@brief Computes half the L2 norm of a tensor without the sqrt. + +*@par Inputs: + +* x: A Tensor. +* TensorType::FloatingDataType(). + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(L2Loss) + .INPUT(x, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(L2Loss) + +REG_OP(MatrixDiag) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(MatrixDiag) + +REG_OP(MatrixDiagD) + .INPUT(x, TensorType::BasicType()) + .INPUT(assist, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(MatrixDiagD) + +REG_OP(MatrixDiagPart) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(MatrixDiagPart) + +REG_OP(MatrixDiagPartD) + .INPUT(x, TensorType::BasicType()) + .INPUT(assist, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(MatrixDiagPartD) + +REG_OP(MatrixSetDiag) + .INPUT(x, TensorType::BasicType()) + .INPUT(diagonal, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(MatrixSetDiag) + +REG_OP(MatrixSetDiagD) + .INPUT(x, TensorType::BasicType()) + .INPUT(diagonal, TensorType::BasicType()) + .INPUT(assist, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(MatrixSetDiagD) + +REG_OP(ScatterNdUpdate) + .INPUT(var, TensorType::BasicType()) + .INPUT(indices, TensorType::IndexNumberType()) + .INPUT(updates, TensorType::BasicType()) + .OUTPUT(var, TensorType::BasicType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ScatterNdUpdate) + +REG_OP(ScatterAdd) + .INPUT(var, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .INPUT(indices, TensorType::IndexNumberType()) + .INPUT(updates, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .OUTPUT(var, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ScatterAdd) + +REG_OP(ScatterDiv) + .INPUT(var, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(updates, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .OUTPUT(var, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ScatterDiv) + +REG_OP(ScatterNdAdd) + .INPUT(var, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .INPUT(indices, TensorType::IndexNumberType()) + .INPUT(updates, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .OUTPUT(var, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ScatterNdAdd) + +REG_OP(ScatterNdSub) + .INPUT(var, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .INPUT(indices, TensorType::IndexNumberType()) + .INPUT(updates, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .OUTPUT(var, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ScatterNdSub) + +REG_OP(ScatterSub) + .INPUT(var, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .INPUT(indices, TensorType::IndexNumberType()) + .INPUT(updates, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .OUTPUT(var, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ScatterSub) + +REG_OP(DiagPartD) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(assist, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OP_END_FACTORY_REG(DiagPartD) + +REG_OP(DiagPart) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT64, DT_DOUBLE, + DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT64, DT_DOUBLE, + DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(DiagPart) + +REG_OP(InnerProduct) + .INPUT(x, TensorType({DT_FLOAT16, DT_INT8})) + .INPUT(w, TensorType({DT_FLOAT16, DT_INT8})) + .OPTIONAL_INPUT(b, TensorType({DT_FLOAT16, DT_INT32})) + .OPTIONAL_INPUT(offset_w, TensorType({DT_INT8})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_INT32})) + .REQUIRED_ATTR(num_output, Int) + .ATTR(transpose, Bool, false) + .ATTR(bias_term, Bool, true) + .ATTR(axis, Int, 1) + .ATTR(offset_a, Int, 0) + .OP_END_FACTORY_REG(InnerProduct) + +REG_OP(ConfusionMatrix) + .INPUT(labels, TensorType({DT_FLOAT, DT_INT32, DT_FLOAT16, DT_INT8, DT_UINT8})) + .INPUT(predictions, TensorType({DT_FLOAT, DT_INT32, DT_FLOAT16, DT_INT8, DT_UINT8})) + .OPTIONAL_INPUT(weights, TensorType({DT_FLOAT, DT_INT32, DT_FLOAT16, DT_INT8, DT_UINT8})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_FLOAT16, DT_INT8, DT_UINT8})) + .REQUIRED_ATTR(num_classes, Int) + .REQUIRED_ATTR(dtype, String) + .OP_END_FACTORY_REG(ConfusionMatrix) + +REG_OP(ScatterMul) + .INPUT(var, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(updates, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .OUTPUT(var, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ScatterMul) + +REG_OP(ScatterMin) + .INPUT(var, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32})) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(updates, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32})) + .OUTPUT(var, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ScatterMin) + +REG_OP(ScatterMax) + .INPUT(var, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32})) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(updates, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32})) + .OUTPUT(var, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ScatterMax) + +REG_OP(SparseApplyAdagrad) + .INPUT(var, TensorType({DT_FLOAT})) + .INPUT(accum, TensorType({DT_FLOAT})) + .INPUT(lr, TensorType({DT_FLOAT})) + .INPUT(grad, TensorType({DT_FLOAT})) + .INPUT(indices, TensorType({DT_INT32})) + .OUTPUT(var, TensorType({DT_FLOAT})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(SparseApplyAdagrad) + +REG_OP(SparseApplyAdagradD) + .INPUT(var, TensorType({DT_FLOAT})) + .INPUT(accum, TensorType({DT_FLOAT})) + .INPUT(grad, TensorType({DT_FLOAT})) + .INPUT(indices, TensorType({DT_INT32})) + .OUTPUT(var, TensorType({DT_FLOAT})) + .REQUIRED_ATTR(lr, Float) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(SparseApplyAdagradD) + +REG_OP(ScatterUpdate) + .INPUT(var, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT8,DT_UINT8})) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(updates, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT8,DT_UINT8})) + .OUTPUT(var, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT8,DT_UINT8})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ScatterUpdate) + +/** +*@brief Update relevant entries in '*var' according to the Ftrl-proximal scheme. + +*@par Inputs: +* Four inputs, including: +*@li var: An NCHW, NHWC, or ND Tensor of type float32. +*@li accum: An NCHW, NHWC, or ND Tensor of type float32. +*@li grad: An NCHW, NHWC, or ND Tensor of type float32. +*@li indices: An NCHW, NHWC, or ND Tensor of type int32. + +*@par Attributes: +*@li lr: Required, used for computation. +*@li use_locking: An optional bool. Defaults to "False". If "True", the operation will be protected by a lock. + +*@par Outputs: +*var: A Tensor. Has the same type and format as input "var". +*/ +REG_OP(SparseApplyFtrlV2) + .INPUT(var, TensorType({DT_FLOAT})) + .INPUT(accum, TensorType({DT_FLOAT})) + .INPUT(linear, TensorType({DT_FLOAT})) + .INPUT(grad, TensorType({DT_FLOAT})) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(lr, TensorType({DT_FLOAT})) + .INPUT(l1, TensorType({DT_FLOAT})) + .INPUT(l2, TensorType({DT_FLOAT})) + .INPUT(l2_shrinkage, TensorType({DT_FLOAT})) + .INPUT(lr_power, TensorType({DT_FLOAT})) + .OUTPUT(var, TensorType({DT_FLOAT})) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(SparseApplyFtrlV2) + +REG_OP(SparseApplyFtrlV2D) + .INPUT(var, TensorType({DT_FLOAT})) + .INPUT(accum, TensorType({DT_FLOAT})) + .INPUT(linear, TensorType({DT_FLOAT})) + .INPUT(grad, TensorType({DT_FLOAT})) + .INPUT(indices, TensorType({DT_INT32})) + .OUTPUT(var, TensorType({DT_FLOAT})) + .REQUIRED_ATTR(lr, Float) + .REQUIRED_ATTR(l1, Float) + .REQUIRED_ATTR(l2, Float) + .REQUIRED_ATTR(l2_shrinkage, Float) + .REQUIRED_ATTR(lr_power, Float) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(SparseApplyFtrlV2D) + +} // namespace ge + +#endif // GE_OP_MATRIX_CALCULATION_OPS_H diff --git a/third_party/fwkacllib/inc/ops/mvn_ops.h b/third_party/fwkacllib/inc/ops/mvn_ops.h new file mode 100755 index 00000000..c612c45c --- /dev/null +++ b/third_party/fwkacllib/inc/ops/mvn_ops.h @@ -0,0 +1,51 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_MVN_OPS_H + #define GE_OP_MVN_OPS_H + + #include "graph/operator_reg.h" + + namespace ge { +/** +*@brief Normalizes the input. + +*@par Inputs: +* One input: +*x: An NCHW tensor of type float16 or float32. + +*@par Attributes: +*@li normalize_variance: An optional bool specifying whether to normalize the variance, either "true" (default) or "false". +*@li across_channels: An optional bool specifying whether to perform across-channel MVN, either "true" or "false" (default). +*@li eps: An optional float32 epsilon for not dividing by zero. Defaults to "1e-9". + +*@par Outputs: +*y: An NCHW tensor of type float16 or float32. + +*@attention Constraints:\n +* The input tensor must have the NCHW format, whose shape length must be 4. +*/ + + REG_OP(MVN) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) /* "First operand." */ + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16})) /* "Result, has same element type as inputs" */ + .ATTR(normalize_variance, Bool, true) + .ATTR(across_channels, Bool, false) + .ATTR(eps, Float, 1e-9) + .OP_END_FACTORY_REG(MVN) + } // namespace ge + + #endif // GE_OP_MVN_OPS_H diff --git a/third_party/fwkacllib/inc/ops/nn_batch_norm_ops.h b/third_party/fwkacllib/inc/ops/nn_batch_norm_ops.h new file mode 100755 index 00000000..bdfb9f4c --- /dev/null +++ b/third_party/fwkacllib/inc/ops/nn_batch_norm_ops.h @@ -0,0 +1,167 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_NN_BATCH_NORM_OPS_H +#define GE_OP_NN_BATCH_NORM_OPS_H + +#include "../graph/operator_reg.h" + +namespace ge { + +REG_OP(FusedBatchNorm) + .INPUT(x, TensorType{DT_FLOAT}) + .INPUT(scale, TensorType{DT_FLOAT}) + .INPUT(b, TensorType{DT_FLOAT}) + .INPUT(mean, TensorType{DT_FLOAT}) + .INPUT(variance, TensorType{DT_FLOAT}) + .OUTPUT(y, TensorType{DT_FLOAT}) + .OUTPUT(running_mean, TensorType{DT_FLOAT}) + .OUTPUT(running_variance, TensorType{DT_FLOAT}) + .OUTPUT(save_mean, TensorType{DT_FLOAT}) + .OUTPUT(save_inv_variance, TensorType{DT_FLOAT}) + .OUTPUT(save_inv_variance1, TensorType{DT_FLOAT}) + .ATTR(mode, Int, 1) + .ATTR(epsilon, Float, 1e-5f) + .ATTR(momentum, Float, 0.9) + .ATTR(is_training, Bool, true) + .ATTR(is_training_fusion, Bool, true) + .ATTR(moving_average_fraction, Float, 0.00300002098) + .OP_END_FACTORY_REG(FusedBatchNorm) + +REG_OP(FusedBatchNormGrad) + .INPUT(dy, TensorType{DT_FLOAT}) + .INPUT(x, TensorType{DT_FLOAT}) + .INPUT(scale, TensorType{DT_FLOAT}) + .INPUT(save_mean, TensorType{DT_FLOAT}) + .INPUT(save_inv_variance, TensorType{DT_FLOAT}) + .INPUT(save_inv_variance1, TensorType{DT_FLOAT}) + .OUTPUT(dx, TensorType{DT_FLOAT}) + .OUTPUT(bn_scale, TensorType{DT_FLOAT}) + .OUTPUT(bn_bias, TensorType{DT_FLOAT}) + .ATTR(epsilon, Float, 0.0) + .ATTR(momentum, Float, 0.0) + .OP_END_FACTORY_REG(FusedBatchNormGrad) + +REG_OP(L2Normalize) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(axis, ListInt, {}) + .ATTR(eps, Float, 1e-4) + .OP_END_FACTORY_REG(L2Normalize) + +REG_OP(L2NormalizeGrad) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(y, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(dy, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(dx, TensorType({DT_FLOAT, DT_FLOAT16})) + .ATTR(dim, ListInt, {}) + .ATTR(eps, Float, 0.0001) + .OP_END_FACTORY_REG(L2NormalizeGrad) + +REG_OP(BatchNorm) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(scale, TensorType({DT_FLOAT})) + .INPUT(offset, TensorType({DT_FLOAT})) + .OPTIONAL_INPUT(mean, TensorType({DT_FLOAT})) + .OPTIONAL_INPUT(variance, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(batch_mean, TensorType({DT_FLOAT})) + .OUTPUT(batch_variance, TensorType({DT_FLOAT})) + .OUTPUT(reserve_space_1, TensorType({DT_FLOAT})) + .OUTPUT(reserve_space_2, TensorType({DT_FLOAT})) + .OUTPUT(reserve_space_3, TensorType({DT_FLOAT})) + .ATTR(epsilon, Float, 0.0001) + .ATTR(data_format, String, "NHWC") + .ATTR(is_training, Bool, true) + .OP_END_FACTORY_REG(BatchNorm) + +REG_OP(BatchNormExt2) + .INPUT(input_x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(input_scale, TensorType({DT_FLOAT})) + .INPUT(input_offset, TensorType({DT_FLOAT})) + .OPTIONAL_INPUT(input_mean, TensorType({DT_FLOAT})) + .OPTIONAL_INPUT(input_variance, TensorType({DT_FLOAT})) + .OUTPUT(output_y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(output_mean, TensorType({DT_FLOAT})) + .OUTPUT(output_variance, TensorType({DT_FLOAT})) + .OUTPUT(output_reserve_space_1, TensorType({DT_FLOAT})) + .OUTPUT(output_reserve_space_2, TensorType({DT_FLOAT})) + .ATTR(epsilon, Float, 0.0001) + .ATTR(data_format, String, "NHWC") + .ATTR(is_training, Bool, true) + .OP_END_FACTORY_REG(BatchNormExt2) + +REG_OP(BatchNormGrad) + .INPUT(y_backprop, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(scale, TensorType({DT_FLOAT})) + .INPUT(reserve_space_1, TensorType({DT_FLOAT})) + .INPUT(reserve_space_2, TensorType({DT_FLOAT})) + .INPUT(reserve_space_3, TensorType({DT_FLOAT})) + .OUTPUT(x_backprop, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(scale_backprop, TensorType({DT_FLOAT})) + .OUTPUT(offset_backprop, TensorType({DT_FLOAT})) + .OUTPUT(reserve_space_4, TensorType({DT_FLOAT})) + .OUTPUT(reserve_space_5, TensorType({DT_FLOAT})) + .ATTR(epsilon, Float, 0.0001) + .ATTR(data_format, String, "NHWC") + .ATTR(is_training, Bool, true) + .OP_END_FACTORY_REG(BatchNormGrad) + +REG_OP(BatchNormGradExt2) + .INPUT(y_backprop, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(scale, TensorType({DT_FLOAT})) + .INPUT(reserve_space_1, TensorType({DT_FLOAT})) + .INPUT(reserve_space_2, TensorType({DT_FLOAT})) + .ATTR(epsilon, Float, 0.0001) + .ATTR(data_format, String, "NHWC") + .ATTR(is_training, Bool, true) + .OUTPUT(x_backprop, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(scale_backprop, TensorType({DT_FLOAT})) + .OUTPUT(offset_backprop, TensorType({DT_FLOAT})) + .OUTPUT(reserve_space_3, TensorType({DT_FLOAT})) + .OUTPUT(reserve_space_4, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(BatchNormGradExt2) + +REG_OP(BninferenceD) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mean, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(variance, TensorType({DT_FLOAT16,DT_FLOAT})) + .OPTIONAL_INPUT(scale, TensorType({DT_FLOAT16,DT_FLOAT})) + .OPTIONAL_INPUT(b, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .ATTR(scale_factor, Float,0.999) + .ATTR(epsilon, Float,1e-5f) + .ATTR(moving_average_fraction, Float,0.999) + .ATTR(use_global_stats, Bool,true) + .OP_END_FACTORY_REG(BninferenceD) +REG_OP(Bninference) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(mean, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(variance, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(scale_factor, TensorType({DT_FLOAT16,DT_FLOAT})) + .OPTIONAL_INPUT(scale, TensorType({DT_FLOAT16,DT_FLOAT})) + .OPTIONAL_INPUT(b, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .ATTR(epsilon, Float,1e-5f) + .ATTR(moving_average_fraction, Float,0.999) + .ATTR(use_global_stats, Bool,true) + .OP_END_FACTORY_REG(Bninference) + +} // namespace ge + +#endif // GE_OP_NN_BATCH_NORM_OPS_H diff --git a/third_party/fwkacllib/inc/ops/nn_calculation_ops.h b/third_party/fwkacllib/inc/ops/nn_calculation_ops.h new file mode 100755 index 00000000..f2a70ada --- /dev/null +++ b/third_party/fwkacllib/inc/ops/nn_calculation_ops.h @@ -0,0 +1,432 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_NN_CALCULATION_OPS_H +#define GE_OP_NN_CALCULATION_OPS_H + +#include "../graph/operator_reg.h" + +namespace ge { +/** +* @brief Computes the gradients of depthwise convolution with respect to the filter. + +* @par Inputs: +* Three inputs include: \n +* @li input: 4D origin shape of input tensor [N, C, H, W] or [N, H, W, C], support float16, float32, double +* @li filter_size: A 4D tensor of type int32, with shape [H, W, C, K] +* @li out_backprop: 4D tensor with shape [N, C, H, W] or [N, H, W, C]. Must be one of the following types: float16, float32, double. + +* @par Attributes: +* @li strides: The stride of the sliding window for height and width of input "x" of the convolution. +* Must be with shape [1, 1, stride_height, stride_width] or [1, stride_height, stride_width, 1]. +* @li dilations: The dilation factor for each dimension of input "x". If set to k > 1, there will be k-1 skipped cells between each +* filter element on that dimension. Must be with shape [1, 1, dilation_height, dilation_width] or [1, dilation_height, dilation_width, 1]. +* @li pads: Padding added to each dimension of the input. +* @li data_format: Input data format, either "NHWC" or "NCHW". + +* @par Outputs: +* filter_grad: Gradient of the deep convolution relative to the filter with shape [H, W, C, K]. Must be one of the following types: float16, float32, double. + +* @attention Constraints:\n +* The feature map is 4D with shape [N, C, Hi, Wi] or [N, Hi, Wi, C], but +* the data is 5D with shape [N, C1, Hi, Wi, C0], where C0 is 16.\n +* The filter is 4D with shape [Hf, Wf, C, K], but the data is 6D with shape [C1, Hf, Wf, K, Co, C0], +* where K is fixed at 1, and Co and C0 are 16.\n +* Output backprop is 4D with shape [N, C, Ho, Wo] or [N, Ho, Wo, C], but the data is 5D with shape [N, C1, Ho, Wo, C0], +* where C is the same as that of the feature map and C0 is 16.\n +* Limited by Tiling and L1 / L0 buffer memory: 512 * ceil(Wo, 16) + (480 * stride_h + 32 * filter_h) * ceil(Wi, 16) ≤ l1_size and Hf * Wf ≤ l0b_size/512.\n +*/ +REG_OP(DepthwiseConv2DBackpropFilter) + .INPUT(input, TensorType({float16})) + .INPUT(filter_size, TensorType({DT_INT32, DT_INT64})) + .INPUT(out_backprop, TensorType({float16})) + .OUTPUT(filter_grad, TensorType({float32})) + .ATTR(strides, ListInt, {1, 1, 1, 1}) + .ATTR(dilations, ListInt, {1, 1, 1, 1}) + .ATTR(pads, ListInt, {0, 0, 0, 0}) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(DepthwiseConv2DBackpropFilter) + +/** +* @brief Computes the gradients of depthwise convolution with respect to the filter. + +* @par Inputs: +* Two inputs include: \n +* @li input: 4D tensor with shape [N, C, H, W] or [N, H, W, C], of type float16 +* @li out_backprop: 4D tensor with shape [N, C, H, W] or [N, H, W, C], of type float16 + +* @par Attributes: +* @li filter_size: Shape of filter. +* @li strides: The stride of the sliding window for height and width of input "x" of the convolution. +* Must be with shape [1, 1, stride_height, stride_width] or [1, stride_height, stride_width, 1]. +* @li dilations: The dilation factor for each dimension of input "x". If set to k > 1, there will be k-1 skipped cells between each +* filter element on that dimension. Must be with shape [1, 1, dilation_height, dilation_width] or [1, dilation_height, dilation_width, 1]. +* @li pads: Padding added to each dimension of the input. +* @li data_format: Input data format, either "NHWC" or "NCHW". + +* @par Outputs: +* filter_grad: Gradient of the deep convolution relative to the filter with shape [H, W, C, K]. Must be of type float32. + +* @attention Constraints:\n +* The feature map is 4D with shape [N, C, Hi, Wi] or [N, Hi, Wi, C], but +* the data is 5D with shape [N, C1, Hi, Wi, C0], where C0 is 16.\n +* The filter is 4D with shape [Hf, Wf, C, K], but the data is 6D with shape [C1, Hf, Wf, K, Co, C0], +* where K is fixed at 1, and Co and C0 are 16.\n +* Output backprop is 4D with shape [N, C, Ho, Wo] or [N, Ho, Wo, C], but the data is 5D with shape [N, C1, Ho, Wo, C0], +* where C is the same as that of the feature map and C0 is 16.\n +* Limited by Tiling and L1 / L0 buffer memory: 512 * ceil(Wo, 16) + (480 * stride_h + 32 * filter_h) * ceil(Wi, 16) ≤ l1_size and Hf * Wf ≤ l0b_size/512.\n +*/ +REG_OP(DepthwiseConv2DBackpropFilterD) + .INPUT(input, TensorType({float16})) + .INPUT(out_backprop, TensorType({float16})) + .OUTPUT(filter_grad, TensorType({float32})) + .ATTR(filter_size, ListInt, {1, 1, 1, 1}) + .ATTR(strides, ListInt, {1, 1, 1, 1}) + .ATTR(dilations, ListInt, {1, 1, 1, 1}) + .ATTR(pads, ListInt, {0, 0, 0, 0}) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(DepthwiseConv2DBackpropFilterD) + +/** +* @brief Computes the gradients of depthwise convolution with respect to the input. + +* @par Inputs: +* Three inputs include: \n +* @li input_size: 4D shape of input tensor [N, C, H, W] or [N, H, W, C], support int32 +* @li filter: 4D filter tensor with shape of [H, W, C, K], support float16, float32, double +* @li out_backprop: 4D tensor with shape [N, C, H, W] or [N, H, W, C]. Must be one of the following types: float16, float32, double. + +* @par Attributes: +* @li strides: The stride of the sliding window for height and width of input "x" of the convolution. +* Must be with shape [1, 1, stride_height, stride_width] or [1, stride_height, stride_width, 1]. +* @li dilations: The dilation factor for each dimension of input "x". If set to k > 1, there will be k-1 skipped cells between each +* filter element on that dimension. Must be with shape [1, 1, dilation_height, dilation_width] or [1, dilation_height, dilation_width, 1]. +* @li pads: Padding added to each dimension of the input. +* @li data_format: Input data format, either "NHWC" or "NCHW". + +* @par Outputs: +* input_grad: Gradient of the deep convolution relative to the input with shape [N, C, H, W] or [N, H, W, C] Must be one of the following types: float16, float32, double. + +* @attention Constraints:\n +* The feature map is 4D with shape [N, C, Hi, Wi] or [N, Hi, Wi, C], but +* the data is 5D with shape [N, C1, Hi, Wi, C0], where C0 is 16.\n +* The filter is 4D with shape [Hf, Wf, C, K], but the data is 6D with shape [C1, Hf, Wf, K, Co, C0], +* where K is fixed at 1, and Co and C0 are 16.\n +* Output backprop is 4D with shape [N, C, Ho, Wo] or [N, Ho, Wo, C], but the data is 5D with shape [N, C1, Ho, Wo, C0], +* where C is the same as that of the feature map and C0 is 16.\n +* Limited by Tiling: max_h_in_l1 ≥ C0, where max_h_in_l1 = (l1_size - Hf*Wf*C0*C0*2) / (2* Wo *C0).\n +*/ +REG_OP(DepthwiseConv2DBackpropInput) + .INPUT(input_size, TensorType({DT_INT32, DT_INT64})) + .INPUT(filter, TensorType({DT_FLOAT16})) + .INPUT(out_backprop, TensorType({DT_FLOAT16})) + .OUTPUT(input_grad, TensorType({DT_FLOAT16})) + .ATTR(strides, ListInt, {1, 1, 1, 1}) + .ATTR(dilations, ListInt, {1, 1, 1, 1}) + .ATTR(pads, ListInt, {0, 0, 0, 0}) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(DepthwiseConv2DBackpropInput) + +/** +* @brief Computes the gradients of depthwise convolution with respect to the input. + +* @par Inputs: +* Two inputs include: \n +* @li filter: A 4D tensor of type float16, with shape [H, W, C, K] +* @li out_backprop: 4D tensor with shape [N, C, H, W] or [N, H, W, C], of type float16 + +* @par Attributes: +* @li input_size: The origin shape of input. +* @li strides: The stride of the sliding window for height and width of input "x" of the convolution. +* Must be with shape [1, 1, stride_height, stride_width] or [1, stride_height, stride_width, 1]. +* @li dilations: The dilation factor for each dimension of input "x". If set to k > 1, there will be k-1 skipped cells between each +* filter element on that dimension. Must be with shape [1, 1, dilation_height, dilation_width] or [1, dilation_height, dilation_width, 1]. +* @li pads: Padding added to each dimension of the input. +* @li data_format: Input data format, either "NHWC" or "NCHW". + +* @par Outputs: +* input_grad: Gradient of the deep convolution relative to the input with shape [N, C, H, W] or [N, H, W, C]. Must be of type float16. + +* @attention Constraints:\n +* The feature map is 4D with shape [N, C, Hi, Wi] or [N, Hi, Wi, C], but +* the data is 5D with shape [N, C1, Hi, Wi, C0], where C0 is 16.\n +* The filter is 4D with shape [Hf, Wf, C, K], but the data is 6D with shape [C1, Hf, Wf, K, Co, C0], +* where K is fixed at 1, and Co and C0 are 16.\n +* Output backprop is 4D with shape [N, C, Ho, Wo] or [N, Ho, Wo, C], but the data is 5D with shape [N, C1, Ho, Wo, C0], +* where C is the same as that of the feature map and C0 is 16.\n +* Limited by Tiling: max_h_in_l1 ≥ C0, where max_h_in_l1 = (l1_size - Hf*Wf*C0*C0*2) / (2* Wo *C0).\n +*/ +REG_OP(DepthwiseConv2DBackpropInputD) + .INPUT(filter, TensorType({DT_FLOAT16})) + .INPUT(out_backprop, TensorType({DT_FLOAT16})) + .OUTPUT(input_grad, TensorType({DT_FLOAT16})) + .ATTR(input_size, ListInt, {1, 1, 1, 1}) + .ATTR(strides, ListInt, {1, 1, 1, 1}) + .ATTR(dilations, ListInt, {1, 1, 1, 1}) + .ATTR(pads, ListInt, {0, 0, 0, 0}) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(DepthwiseConv2DBackpropInputD) + +/** +*@brief Computes a 2D deep convolution given a 4D input tensor and a filter tensor. + +*@par Inputs: +*Two required inputs and two optional inputs, including: \n +* @li x: A 4D tensor of type float16, with shape [N, C, H, W] or [N, H, W, C] +* @li filter: A 4D tensor of type float16, with shape [H, W, C, K] +* @li bias: An optional tensor of type int8 +* @li offset_w: An optional float16, used for quantized inference + +* @par Attributes: +* @li strides: The stride of the sliding window for height and width of input "x" of the convolution. +* Must be with shape [1, 1, stride_height, stride_width] or [1, stride_height, stride_width, 1]. +* @li dilations: The dilation factor for each dimension of input "x". If set to k > 1, there will be k-1 skipped cells between each +* filter element on that dimension. Must be with shape [1, 1, dilation_height, dilation_width] or [1, dilation_height, dilation_width, 1]. +* @li pads: Padding added to each dimension of the input. +* @li data_format: Input data format, either "NHWC" or "NCHW". +* @li offset_a: Input offset, used for quantized inference. + +* @par Outputs: +* y: 4D tensor of type float16, with shape [N, C, H, W] or [N, H, W, C] + +* @attention Constraints:\n +* The feature map is 4D with shape [N, C, Hi, Wi] or [N, Hi, Wi, C], but +* the data is 5D with shape [N, C1, Hi, Wi, C0], where C0 is 16.\n +* The filter is 4D with shape [Hf, Wf, C, K], but the data is 6D with shape [C1, Hf, Wf, K, Co, C0], +* where K is fixed at 1, and Co and C0 are 16.\n +* Limited by the size of L1 buffer memory: \n +* (l1_size - filter_h*filter_w*BLOCK_SIZE*BLOCK_SIZE*data_size) // (Wi*BLOCK_SIZE*data_size) >= (BLOCK_SIZE*strides_h + filter_h - strides_h).\n +*/ +REG_OP(DepthwiseConv2D) + .INPUT(x, TensorType({DT_FLOAT16})) + .INPUT(filter, TensorType({DT_FLOAT16})) + .OPTIONAL_INPUT(bias, TensorType({DT_INT8})) + .OPTIONAL_INPUT(offset_w, TensorType({DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_FLOAT16})) + .ATTR(strides, ListInt, {}) + .ATTR(dilations, ListInt, {}) + .ATTR(pads, ListInt, {0, 0, 0, 0}) + .ATTR(data_format, String, "NHWC") + .ATTR(offset_a, Int, 0) + .OP_END_FACTORY_REG(DepthwiseConv2D) + +REG_OP(Conv2DCCE) + .INPUT(x, TensorType{DT_FLOAT}) // The input tensor + .INPUT(w, TensorType({DT_FLOAT, DT_INT8})) // The weight tensor ,If QuantType =1 ,shall use type""tensor(int8) + .OPTIONAL_INPUT(b, TensorType{DT_FLOAT}) // Optional 1D bias to be added to the convolution, has size of M. + .OUTPUT(y, TensorType{DT_FLOAT}) // The output tensor + .ATTR(mode, Int, 1) + .ATTR(group, Int, 1) // number of groups input channels and output channels are divided into + .ATTR(num_output, Int, 0) // number of output tensor + .ATTR(pad, ListInt, {0, 0, 0, 0}) // Padding for the beginning and ending along each axis + .ATTR(kernel, ListInt, {0, 0}) + .ATTR(stride, ListInt, {1, 1}) // Stride along each axis. + .ATTR(dilation, ListInt, {1, 1}) // dilation value along each axis of the filter. + .ATTR(pad_mode, Int, 0) // pad mode, 0:NOTSET, 1:SAME_UPPER, SAME_LOWER or 2:VALID.defaul default value is 0:NOTSET + .ATTR(algo, Int, 2) + .OP_END_FACTORY_REG(Conv2DCCE) + +REG_OP(Conv2DBackpropFilterCCE) + .INPUT(x, TensorType{DT_FLOAT}) + .INPUT(filter_sizes, TensorType{DT_INT8}) + .INPUT(out_backprop, TensorType{DT_FLOAT}) + .OUTPUT(y, TensorType{DT_FLOAT}) + .ATTR(conv_grad_filter_output_shape, ListInt, {0, 0, 0, 0}) + .ATTR(mode, Int, 1) + .ATTR(group, Int, 1) + .ATTR(pad, ListInt, {0, 0, 0, 0}) + .ATTR(stride, ListInt, {1, 1}) + .ATTR(dilation, ListInt, {1, 1}) + .ATTR(padding, Int, 0) //pad_mode:same valid + .ATTR(algo, Int, 0) + .OP_END_FACTORY_REG(Conv2DBackpropFilterCCE) + +REG_OP(Conv2DBackpropInputCCE) + .INPUT(input_sizes, TensorType{DT_INT8}) + .INPUT(filter, TensorType{DT_FLOAT}) + .INPUT(out_backprop, TensorType{DT_FLOAT}) + .OUTPUT(output, TensorType{DT_FLOAT}) + .ATTR(conv_grad_input_output_shape, ListInt, {0, 0, 0, 0}) + .ATTR(mode, Int, 1) + .ATTR(format, Int, 0) + .ATTR(group, Int, 1) + .ATTR(pad_mode, Int, 0) + .ATTR(stride, ListInt, {1, 1}) + .ATTR(dilation, ListInt, {1, 1}) + .ATTR(pad, ListInt, {0, 0, 0, 0}) + .ATTR(algo, Int, 0) + .OP_END_FACTORY_REG(Conv2DBackpropInputCCE) + +/** +*@brief Performs the the backward operation for "BiasAdd" on the "bias" tensor. +* It accumulates all the values from out_backprop into the feature +* dimension. For NHWC data format, the feature dimension is the last. +* For NCHW data format, the feature dimension is the third-to-last. + +*@par Inputs: +*x: A Tensor of type TensorType::NumberType(). + +*@par Attributes: +*data_format: Data format. Defaults to "NHWC". + +*@par Outputs: +*y: A Tensor.Has the same type as "x". +*/ +REG_OP(BiasAddGrad) + .INPUT(x, TensorType::NumberType()) + .OUTPUT(y, TensorType::NumberType()) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(BiasAddGrad) + +/** +*@brief Computes the gradients of convolution with respect to the input. +*@par Inputs: + * Three inputs: + * @li input_sizes: A Tensor of type int32. An integer vector representing the shape of input, + * where input is a 4-D tensor [batch, height, width, channels] or [batch, channels, height, width]. + * @li filters: A Tensor. Must be one of the following types: float16. + * 4-D with shape [filter_height, filter_width, in_channels, out_channels] + * or [out_channels, filter_height, filter_width, in_channels] or [out_channels, in_channel, filter_height, filter_width]. + * @li out_backprop: A Tensor. Must have the same type as filter. 4-D with shape [batch, out_height, out_width, out_channels] + * or [batch, out_channels, out_height, out_width]. Gradients with respect to the output of the convolution. +*@par Attributes: + * Three attributes: + * @li strides: A tuple/list of 2 integers. The stride of the sliding window for H/W dimension. + * @li pads: A tuple/list of 4 integers, [top, bottom, left, right] pads on feature map + * @li dilations: A tuple/list of 4 integers, The dilation factor for each dimension of input, now only support [1,1,1,1] +*@par Outputs: + * y: A Tensor. Has the same type as filter,and has same format as input_size +*/ +REG_OP(Conv2DBackpropInput) + .INPUT(input_sizes, TensorType({DT_INT32, DT_INT64})) + .INPUT(filters, TensorType{DT_FLOAT16}) + .INPUT(out_backprop, TensorType{DT_FLOAT16}) + .OUTPUT(y, TensorType{DT_FLOAT16}) + .REQUIRED_ATTR(strides, ListInt) + .ATTR(pads, ListInt, {1, 1, 1, 1}) + .ATTR(dilations, ListInt, {1, 1, 1, 1}) + .OP_END_FACTORY_REG(Conv2DBackpropInput) + +/** +*@brief Computes the gradients of convolution with respect to the input. +*@par Inputs: + * Two inputs: + * @li filters: A Tensor. Types is float16. + * 4-D with shape [filter_height, filter_width, in_channels, out_channels] or [out_channels, filter_height, filter_width, in_channels] + * or [out_channels, in_channel, filter_height, filter_width]. + * @li out_backprop: A Tensor. Must have the same type as filter. 4-D with shape [batch, out_height, out_width, out_channels] + * or [batch, out_channels, out_height, out_width]. Gradients with respect to the output of the convolution. +*@par Attributes: + * Four attributes: + * @li input_size A Tensor of type int32. An integer vector representing the shape of input, + * where input is a 4-D tensor [batch, height, width, channels] or [batch, channels, height, width]. + * @li strides: A tuple/list of 2 integers. The stride of the sliding window for H/W dimension. + * @li pads: A tuple/list of 4 integers, [top, bottom, left, right] pads on feature map + * @li dilations: A tuple/list of 4 integers, The dilation factor for each dimension of input, now only support [1,1,1,1] +*@par Outputs: + * y: A Tensor. Has the same type as filter,4-D tensor [batch, height, width, channels] or [batch, channels, height, width]. +*/ +REG_OP(Conv2DBackpropInputD) + .INPUT(filters, TensorType{DT_FLOAT16}) + .INPUT(out_backprop, TensorType{DT_FLOAT16}) + .OUTPUT(y, TensorType{DT_FLOAT16}) + .REQUIRED_ATTR(input_sizes, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .ATTR(pads, ListInt, {1, 1, 1, 1}) + .ATTR(dilations, ListInt, {1, 1, 1, 1}) + .OP_END_FACTORY_REG(Conv2DBackpropInputD) + +REG_OP(Deconvolution) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(filter, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OPTIONAL_INPUT(bias, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE)) + .ATTR(strides, ListInt, {1, 1, 1, 1}) + .ATTR(pads, ListInt, {0, 0, 0, 0}) + .ATTR(dilations, ListInt, {1, 1, 1, 1}) + .OP_END_FACTORY_REG(Deconvolution) +/** +*@brief Computes the gradients of convolution with respect to the filter +*@par Inputs: + * Three inputs: + * @li x: A Tensor. Must be one of the following types: float16. + * 4-D with shape [batch, in_height, in_width, in_channels] or [batch, in_channels, in_height, in_width]. + * @li filter_sizes: A Tensor of type int32. An integer vector representing the tensor shape of filter, + * where filter is a 4-D tensor [filter_height, filter_width, in_channels, out_channels] + * or [out_channels, filter_height, filter_width, in_channels] or [out_channels, in_channel, filter_height, filter_width]. + * @li out_backprop: A Tensor. Must have the same type as x. 4-D with shape [batch, out_height, out_width, out_channels] + * or [batch, out_channels, out_height, out_width]. Gradients with respect to the output of the convolution. +*@par Attributes: + * Three attributes: + * @li strides: A tuple/list of 2 integers. The stride of the sliding window for H/W dimension. + * @li pads: A tuple/list of 4 integers, [top, bottom, left, right] pads on feature map. + * @li dilations: A tuple/list of 4 integers, The dilation factor for each dimension of input, now only support [1,1,1,1]. +*@par Outputs: + * y: A Tensor. Has the same type as x +*/ +REG_OP(Conv2DBackpropFilter) + .INPUT(x, TensorType{DT_FLOAT16}) + .INPUT(filter_sizes, TensorType({DT_INT32, DT_INT64})) + .INPUT(out_backprop, TensorType{DT_FLOAT16}) + .OUTPUT(y, TensorType{DT_FLOAT}) + .REQUIRED_ATTR(strides, ListInt) + .ATTR(pads, ListInt, {1, 1, 1, 1}) + .ATTR(dilations, ListInt, {1, 1, 1, 1}) + .OP_END_FACTORY_REG(Conv2DBackpropFilter) + +/** +*@brief Computes the gradients of convolution with respect to the filter. +*@par Inputs: + * Two inputs: + * @li x: A Tensor. Type is float16. + * 4-D with shape [batch, in_height, in_width, in_channels] or [batch, in_channels, in_height, in_width]. + * @li out_backprop: A Tensor. Must have the same type as x. 4-D with shape [batch, out_height, out_width, out_channels] + * or [batch, out_channels, out_height, out_width]. Gradients with respect to the output of the convolution. +*@par Attributes: + * Four attributes: + * @li filter_sizes: A Tensor of type integers. An integer vector representing the tensor shape of filter, + * where filter is a 4-D tensor [filter_height, filter_width, in_channels, out_channels] + * or [out_channels, filter_height, filter_width, in_channels] or [out_channels, in_channel, filter_height, filter_width]. + * @li strides: A tuple/list of 2 integers. The stride of the sliding window for H/W dimension. + * @li pads: A tuple/list of 4 integers, [top, bottom, left, right] pads on feature map + * @li dilations: A tuple/list of 4 integers, The dilation factor for each dimension of input, now only support [1,1,1,1]. +*@par Outputs: + * y: A Tensor. Has the same type as x +*/ +REG_OP(Conv2DBackpropFilterD) + .INPUT(x, TensorType{DT_FLOAT16}) + .INPUT(out_backprop, TensorType{DT_FLOAT16}) + .OUTPUT(y, TensorType{DT_FLOAT}) + .REQUIRED_ATTR(filter_sizes, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .ATTR(pads, ListInt, {1, 1, 1, 1}) + .ATTR(dilations, ListInt, {1, 1, 1, 1}) + .OP_END_FACTORY_REG(Conv2DBackpropFilterD) + +REG_OP(Conv2D) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT8})) // the featrue map tensor + .INPUT(filter, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT8})) // the filter tensor + .OPTIONAL_INPUT(bias, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32})) // optional 1D bias to be added to the conv2d + .OPTIONAL_INPUT(offset_w, TensorType({DT_INT8})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32})) // the output tensor + .ATTR(strides, ListInt, {1, 1, 1, 1}) // stride on H\W, format sensitive + .ATTR(pads, ListInt, {0, 0, 0, 0}) // top, bottom, left and right pads on feature map + .ATTR(dilations, ListInt, {1, 1, 1, 1}) // dilation on H\W, format sensitive + .ATTR(offset_a, Int, 0) + .OP_END_FACTORY_REG(Conv2D) + +} // namespace ge +#endif // GE_OP_NN_CALCULATION_OPS_H diff --git a/third_party/fwkacllib/inc/ops/nn_detect_ops.h b/third_party/fwkacllib/inc/ops/nn_detect_ops.h new file mode 100755 index 00000000..0a432efe --- /dev/null +++ b/third_party/fwkacllib/inc/ops/nn_detect_ops.h @@ -0,0 +1,81 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_NN_DETECT_OPS_H_ +#define GE_OP_NN_DETECT_OPS_H_ + +#include "graph/operator_reg.h" +#include "graph/operator.h" + +namespace ge { + +REG_OP(BoundingBoxDecode) + .INPUT(rois, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(deltas, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(bboxes, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(means, ListFloat, {0.0, 0.0, 0.0, 0.0}) + .ATTR(stds, ListFloat, {1.0, 1.0, 1.0, 1.0}) + .REQUIRED_ATTR(max_shape, ListInt) + .ATTR(wh_ratio_clip, Float, 0.016) + .OP_END_FACTORY_REG(BoundingBoxDecode) + +REG_OP(BoundingBoxEncode) + .INPUT(anchor_box, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(ground_truth_box, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(delats, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(means, ListFloat, {0.0, 0.0, 0.0, 0.0}) + .ATTR(stds, ListFloat, {1.0, 1.0, 1.0, 1.0}) + .OP_END_FACTORY_REG(BoundingBoxEncode) + +REG_OP(CheckValid) + .INPUT(bbox_tensor, TensorType({DT_FLOAT16})) + .INPUT(img_metas, TensorType({DT_FLOAT16})) + .OUTPUT(valid_tensor, TensorType({DT_INT8})) + .OP_END_FACTORY_REG(CheckValid) + +REG_OP(Iou) + .INPUT(bboxes, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(gtboxes, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(overlap, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(mode, String, "iou") + .OP_END_FACTORY_REG(Iou) + +REG_OP(ROIAlignGrad) + .INPUT(ydiff, TensorType({DT_FLOAT})) + .INPUT(rois, TensorType({DT_FLOAT})) + .OPTIONAL_INPUT(rois_n, TensorType({DT_INT32})) + .OUTPUT(xdiff, TensorType({DT_FLOAT})) + .REQUIRED_ATTR(xdiff_shape, ListInt) + .REQUIRED_ATTR(pooled_width, Int) + .REQUIRED_ATTR(pooled_height, Int) + .REQUIRED_ATTR(spatial_scale, Float) + .ATTR(sample_num, Int, 2) + .OP_END_FACTORY_REG(ROIAlignGrad) + +REG_OP(ROIAlign) + .INPUT(features, TensorType({DT_FLOAT})) + .INPUT(rois, TensorType({DT_FLOAT})) + .OPTIONAL_INPUT(rois_n, TensorType({DT_INT32})) + .OUTPUT(output, TensorType({DT_FLOAT})) + .REQUIRED_ATTR(spatial_scale, Float) + .REQUIRED_ATTR(pooled_height, Int) + .REQUIRED_ATTR(pooled_width, Int) + .ATTR(sample_num, Int, 2) + .OP_END_FACTORY_REG(ROIAlign) + +} // namespace ge + +#endif // GE_OP_NN_DETECT_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/nn_norm_ops.h b/third_party/fwkacllib/inc/ops/nn_norm_ops.h new file mode 100755 index 00000000..5769f426 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/nn_norm_ops.h @@ -0,0 +1,205 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_NN_NORM_OPS_H +#define GE_OP_NN_NORM_OPS_H + +#include "../graph/operator_reg.h" +namespace ge { +REG_OP(LogSoftmaxGrad) + .INPUT(grad, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(axis, ListInt, {-1}) + .OP_END_FACTORY_REG(LogSoftmaxGrad) + +REG_OP(SparseSoftmaxCrossEntropyWithLogitsCCE) + .INPUT(features, TensorType{DT_FLOAT}) + .INPUT(labels, TensorType{DT_FLOAT}) + .OUTPUT(out, TensorType{DT_FLOAT}) + .OUTPUT(non, TensorType{DT_FLOAT}) + .ATTR(cross_entropy_is_grad, Bool, 0) + .ATTR(cross_entropy_mode, Int, 1) + .ATTR(softmax_cross_entropy_lossscale_div_batch, Float, 1.0) + .OP_END_FACTORY_REG(SparseSoftmaxCrossEntropyWithLogitsCCE) + +/** +*@brief Computes sparse softmax cross entropy cost and gradients to backpropagate. + +*@par Inputs: +*Two inputs, including: +* @li features: A Tensor. Must be one of the following types: half, float32, double. +* A "batch_size * num_classes" matrix. +* @li labels: A Tensor of the same type as "features". batch_size vector with values in [0, num_classes). + + +*@par Outputs: +*loss: A Tensor for per example loss (a "batch_size" vector). Has the same type as "features". +*backprop: A Tensor for the backpropagated gradients (a batch_size * num_classes matrix). Has the same type as "features". +*/ +REG_OP(SparseSoftmaxCrossEntropyWithLogits) + .INPUT(features, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(labels, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(loss, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(backprop, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(SparseSoftmaxCrossEntropyWithLogits) + +/** +*@brief Computes softmax cross entropy cost and gradients to backpropagate. + +*@par Inputs: +*Two inputs, including: +* @li features: A Tensor. Must be one of the following types: half, float32, double. +* A "batch_size * num_classes" matrix. +* @li labels: A Tensor of the same type as "features". A "batch_size * num_classes" matrix. + +*@par Outputs: +*loss: A Tensor for per example loss (a "batch_size" vector). Has the same type as "features". +*backprop: A Tensor for the backpropagated gradients (a batch_size * num_classes matrix). Has the same type as "features". +*/ +REG_OP(SoftmaxCrossEntropyWithLogits) + .INPUT(features, TensorType({DT_DOUBLE,DT_FLOAT16,DT_FLOAT})) + .INPUT(labels, TensorType({DT_DOUBLE,DT_FLOAT16,DT_FLOAT})) + .OUTPUT(loss, TensorType({DT_DOUBLE,DT_FLOAT16,DT_FLOAT})) + .OUTPUT(backprop, TensorType({DT_DOUBLE,DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(SoftmaxCrossEntropyWithLogits) + +REG_OP(SoftmaxGrad) + .INPUT(softmax, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .INPUT(grad_softmax, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .OUTPUT(grad_x, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT32,DT_INT8,DT_UINT8})) + .OP_END_FACTORY_REG(SoftmaxGrad) + +REG_OP(SigmoidCrossEntropyWithLogitsGrad) + .INPUT(predict, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(target, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(dout, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(gradient, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(SigmoidCrossEntropyWithLogitsGrad) + +REG_OP(SigmoidCrossEntropyWithLogits) + .INPUT(predict, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(target, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(loss, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(SigmoidCrossEntropyWithLogits) + +REG_OP(SmoothL1Loss) + .INPUT(predict, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(label, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(loss, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(sigma, Float, 1.0) + .OP_END_FACTORY_REG(SmoothL1Loss) + +REG_OP(SmoothL1LossGrad) + .INPUT(predict, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(label, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(dout, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(gradient, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(sigma, Float, 1.0) + .OP_END_FACTORY_REG(SmoothL1LossGrad) + +REG_OP(BinaryCrossEntropy) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(y, TensorType({DT_FLOAT, DT_FLOAT16})) + .OPTIONAL_INPUT(weight, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(output, TensorType({DT_FLOAT, DT_FLOAT16})) + .ATTR(reduction, String, "mean") + .OP_END_FACTORY_REG(BinaryCrossEntropy) + +REG_OP(BinaryCrossEntropyGrad) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(y, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(grad_output, TensorType({DT_FLOAT, DT_FLOAT16})) + .OPTIONAL_INPUT(weight, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(output, TensorType({DT_FLOAT, DT_FLOAT16})) + .ATTR(reduction, String, "mean") + .OP_END_FACTORY_REG(BinaryCrossEntropyGrad) + +/** +*@brief Applies the Softmax function to an n-dimensional input Tensor rescaling them \n so +that the elements of the n-dimensional output Tensor lie in the range [0,1] and sum to 1. + +*@par Inputs: +*One input: +*x: A mutable Tensor. Must be one of the following types: float16, +*float32, double. Should be a Variable Tensor. + +*@par Attributes: +*axis: A list of ints. The dimension softmax would be performed on. + +*@par Outputs: +*y: A Tensor. Has the same dimensionality and shape as the "x" with values in the range [0, 1]. Must be one of the following types: float16, float32, int32. +*/ +REG_OP(Softmax) + .INPUT(x, TensorType({DT_DOUBLE, DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_DOUBLE, DT_FLOAT16, DT_FLOAT})) + .ATTR(axis, ListInt, {-1}) + .OP_END_FACTORY_REG(Softmax) + +/** +*@brief Computes log softmax activations. + +*@par Inputs: +*One input: +* logits: A Tensor. Must be one of the following types: double, float16, float32. + +*@par Attributes: +* axis: An optional list of ints. Defaults to "{-1}". + +*@par Outputs: +* logsoftmax: A Tensor. Has the same type as "logits". +*/ +REG_OP(LogSoftmax) + .INPUT(logits, TensorType({DT_DOUBLE, DT_FLOAT16, DT_FLOAT})) + .OUTPUT(logsoftmax, TensorType({DT_DOUBLE, DT_FLOAT16, DT_FLOAT})) + .ATTR(axis, ListInt, {-1}) + .OP_END_FACTORY_REG(LogSoftmax) + +REG_OP(FusedBatchNormV2) + .INPUT(x, TensorType{DT_FLOAT}) /* Input data tensor from the previous operator"" */ + .INPUT(scale, TensorType{DT_FLOAT}) /* If spatial is true, the dimension of bias is (C) If spatial is false, the dimensions of scale are (C x D1 x ... x Dn)*/ + .INPUT(b, TensorType{DT_FLOAT}) /* If spatial is true, the dimension of bias is (C) If spatial is false, the dimensions of scale are (C x D1 x ... x Dn)*/ + .OPTIONAL_INPUT(mean, TensorType{DT_FLOAT}) /* If spatial is true, the dimension of the running mean (training) or the estimated mean (testing) is (C).If spatial is false, the dimensions of the running mean (training) or the estimated mean (testing) are (C x D1 x ... x Dn)*/ + .OPTIONAL_INPUT(variance, TensorType{DT_FLOAT}) /* If spatial is true, the dimension of the running variance(training) or the estimated variance (testing) is (C). If spatial is false, the dimensions of the running variance(training) or the estimated variance (testing) are (C x D1 x ... x Dn).*/ + .OUTPUT(y, TensorType{DT_FLOAT}) /* The output tensor of the same shape as X */ + .ATTR(momentum, Float, 0.9) // Factor used in computing the running mean and variance. + .ATTR(epsilon, Float, 1e-5f) // The epsilon value to use to avoid division by zero + .ATTR(mode, Int, 1) // 1 means using "CC_BATCHNORM_SPATIAL"; 0 means using "CC_BATCHNORM_PER_ACTIVATION"; only support 1 now + .ATTR(use_global_stats, Bool, true) + .ATTR(alpha, Float, 1) + .ATTR(beta, Float, 0) + .OP_END_FACTORY_REG(FusedBatchNormV2) + +REG_OP(Scale) + .INPUT(x, TensorType{DT_FLOAT}) + .OPTIONAL_INPUT(w, TensorType{DT_FLOAT}) + .OPTIONAL_INPUT(b, TensorType{DT_FLOAT}) + .OUTPUT(y, TensorType{DT_FLOAT}) + .ATTR(bias_term, Bool, false) + .ATTR(axis, Int, 1) + .ATTR(num_axis, Int, 1) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(Scale) + +REG_OP(ConfusionSoftmaxGrad) + .INPUT(grad, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OP_END_FACTORY_REG(ConfusionSoftmaxGrad) +} // namespace ge + +#endif //GE_OP_NN_NORM_OPS_H diff --git a/third_party/fwkacllib/inc/ops/nn_ops.h b/third_party/fwkacllib/inc/ops/nn_ops.h new file mode 100755 index 00000000..b76466e9 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/nn_ops.h @@ -0,0 +1,88 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_NN_OPS_H_ +#define GE_OP_NN_OPS_H_ + +#include "graph/operator_reg.h" +#include "graph/operator.h" + +namespace ge { + +REG_OP(FractionalMaxPoolGrad) + .INPUT(orig_input, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .INPUT(orig_output, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .INPUT(out_backprop, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .INPUT(row_pooling_sequence, TensorType({ DT_INT64 })) + .INPUT(col_pooling_sequence, TensorType({ DT_INT64 })) + .OUTPUT(y, TensorType({ DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64 })) + .ATTR(overlapping, Bool, false) + .OP_END_FACTORY_REG(FractionalMaxPoolGrad) + +REG_OP(FractionalAvgPool) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .OUTPUT(row_pooling_sequence, TensorType({DT_INT64})) + .OUTPUT(col_pooling_sequence, TensorType({DT_INT64})) + .ATTR(pooling_ratio, ListFloat, {}) + .ATTR(pseudo_random, Bool, false) + .ATTR(overlapping, Bool, false) + .ATTR(deterministic, Bool, false) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(FractionalAvgPool) + +REG_OP(FractionalMaxPool) + .INPUT(x, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .OUTPUT(row_pooling_sequence, TensorType({DT_INT64})) + .OUTPUT(col_pooling_sequence, TensorType({DT_INT64})) + .ATTR(pooling_ratio, ListFloat, {}) + .ATTR(pseudo_random, Bool, false) + .ATTR(overlapping, Bool, false) + .ATTR(deterministic, Bool, false) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(FractionalMaxPool) + +REG_OP(NthElement) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(n, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .ATTR(reverse, Bool, false) + .OP_END_FACTORY_REG(NthElement) + +REG_OP(FractionalAvgPoolGrad) + .INPUT(orig_input_tensor_shape, TensorType({DT_INT64})) + .INPUT(out_backprop, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .INPUT(row_pooling_sequence, TensorType({DT_INT64})) + .INPUT(col_pooling_sequence, TensorType({DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .ATTR(overlapping, Bool, false) + .OP_END_FACTORY_REG(FractionalAvgPoolGrad) + +REG_OP(DataFormatVecPermute) + .INPUT(x, TensorType({ DT_INT32, DT_INT64 })) + .OUTPUT(y, TensorType({ DT_INT32, DT_INT64 })) + .ATTR(src_format, String, "NHWC") + .ATTR(dst_format, String, "NCHW") + .OP_END_FACTORY_REG(DataFormatVecPermute) + +} // namespace ge + +#endif // GE_OP_NN_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/nn_other_ops.h b/third_party/fwkacllib/inc/ops/nn_other_ops.h new file mode 100755 index 00000000..701296f8 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/nn_other_ops.h @@ -0,0 +1,132 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_NN_OTHER_OPS_H +#define GE_OP_NN_OTHER_OPS_H +#include "../graph/operator_reg.h" + +namespace ge { +REG_OP(Erf) + .INPUT(x, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(Erf) + +REG_OP(Erfc) + .INPUT(x, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(Erfc) + +/** +*@brief This operation returns a rank 1 histogram counting the number of entries in `values` \n +* that fell into every bin.The bins are equal width and determined by the arguments \n +* 'value_range' and 'nbins'. \n + +*@par Inputs: +*Three inputs, including: \n +*@li x: A Tensor of type float32,float16,int32. +*@li range: A Tensor of type float32,float16,int32. +*@li nbins: A Tensor of type int32. + +*@par Attributes: +* dtype: An optional attribute. Defaults to "int32". + +*@par Outputs: +*y: A Tensor. A Tensor of type int32. +*/ +REG_OP(HistogramFixedWidth) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .INPUT(range, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .INPUT(nbins, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_INT32})) + .ATTR(dtype, String, "int32") + .OP_END_FACTORY_REG(HistogramFixedWidth) + +/** +*@brief This operation returns a rank 1 histogram counting the number of entries in `values` \n +* that fell into every bin.The bins are equal width and determined by the arguments \n +* 'value_range' and 'nbins'. \n + +*@par Inputs: +*Two inputs, including: \n +*@li x: A Tensor of type float32,float16,int32. +*@li range: A Tensor of type float32,float16,int32. + +*@par Attributes: +*@li dtype: An optional attribute. Defaults to "int32". +*@li nbins: A required attribute,the type is int32. + +*@par Outputs: +*y: A Tensor. A Tensor of type int32. +*/ +REG_OP(HistogramFixedWidthD) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .INPUT(range, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .OUTPUT(y, TensorType({DT_INT32})) + .REQUIRED_ATTR(nbins, Int) + .ATTR(dtype, String, "int32") + .OP_END_FACTORY_REG(HistogramFixedWidthD) + +REG_OP(LayerNorm) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(gamma, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(beta, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(mean, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(variance, TensorType({DT_FLOAT, DT_FLOAT16})) + .ATTR(begin_norm_axis, Int, 0) + .ATTR(begin_params_axis, Int, 0) + .OP_END_FACTORY_REG(LayerNorm) + +REG_OP(LayerNormGrad) + .INPUT(dy, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(variance, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(mean, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(gamma, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(pd_x, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(pd_gamma, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(pd_beta, TensorType({DT_FLOAT, DT_FLOAT16})) + .OP_END_FACTORY_REG(LayerNormGrad) + +REG_OP(LayerNormXBackprop) + .INPUT(dy, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(variance, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(mean, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(gamma, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(pd_x, TensorType({DT_FLOAT, DT_FLOAT16})) + .OP_END_FACTORY_REG(LayerNormXBackprop) + +REG_OP(LayerNormBetaGammaBackprop) + .INPUT(dy, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(variance, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(mean, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(pd_gamma, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(pd_beta, TensorType({DT_FLOAT, DT_FLOAT16})) + .REQUIRED_ATTR(shape_gamma, ListInt) + .OP_END_FACTORY_REG(LayerNormBetaGammaBackprop) + +REG_OP(DropOutDoMask) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(mask, TensorType({DT_UINT8})) + .INPUT(keep_prob, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16})) + .OP_END_FACTORY_REG(DropOutDoMask) + +} // namespace ge + +#endif // GE_OP_NN_OTHER_OPS_H diff --git a/third_party/fwkacllib/inc/ops/nn_pooling_ops.h b/third_party/fwkacllib/inc/ops/nn_pooling_ops.h new file mode 100644 index 00000000..905c7d5d --- /dev/null +++ b/third_party/fwkacllib/inc/ops/nn_pooling_ops.h @@ -0,0 +1,359 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_NN_POOLING_OPS_H +#define GE_OP_NN_POOLING_OPS_H + +#include "../graph/operator_reg.h" +/** +*@brief Performs pooling on the input. +*@par Inputs: +*@li x: An NCHW tensor of type float16. +*@par Attributes: +*@li mode: An optional int32, specifying the pooling algorithm, either "1" (max pooling) or "0" (avg pooling). Defaults to "0". +*@li global_pooling: An optional bool. Defaults to "false". +*@li window: Optional, including: \n +*window[0]: An optional int32, specifying the window size along in the H dimension. The value range is [1, 32768]. Defaults to "1". \n +*window[1]: An optional int32, specifying the window size along in the W dimension. The value range is [1, 32768]. Defaults to "1". \n +*@li stride: Optional, including: \n +*stride[0]: An optional int32, specifying the stride along in the H dimension. The value range is [1, 63]. Defaults to "1". \n +*stride[1]: An optional int32, specifying the stride along in the W dimension. The value range is [1, 63]. Defaults to "1". \n +*@li pad: Optional, including: \n +*pad[0]: An optional int32, specifying the up padding. Defaults to "0". \n +*pad[1]: An optional int32, specifying the bottom padding. Defaults to "0". \n +*pad[2]: An optional int32, specifying the left padding. Defaults to "0". \n +*pad[3]: An optional int32, specifying the right padding. Defaults to "0". \n +*@li ceil_mode: An optional int32, either "0" (ceil mode) or "1" (floor mode). Defaults to "0". +*@par Outputs: +*y: An NCHW tensor of type float16. +*@attention Constraints:\n +*@li window[0] * window[1] < 256; +*/ +namespace ge { +REG_OP(Pooling) + .INPUT(x, TensorType({DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_FLOAT16})) + .ATTR(mode, Int, 0) // 0:max pooling or 1:avg pooling + .ATTR(global_pooling, Bool, false) + .ATTR(window, ListInt, {1,1}) // kernel size + .ATTR(stride, ListInt, {1,1}) // stride size + .ATTR(pad, ListInt, {0,0,0,0}) // pad size + .ATTR(ceil_mode, Int, 0) + .OP_END_FACTORY_REG(Pooling) + +/** +*@brief Performs average pooling on the input. + +*@par Inputs: +*x: A tensor of type float16. + +*@par Attributes: +*@li ksize: A required list of 4 ints, specifying the size (N, C, H, and W) of the sliding window, where N = C = 1, and H and W are positive integers within the range [1, 32768]. +*@li strides: A required list of 4 ints, specifying the stride of the sliding window. The strides of the N and C dimensions are 1. The strides of the H and W dimensions are positive integers within the range [1, 63]. +*@li padding: A required string, specifying the padding algorithm, either "VALID" or "SAME". With "SAME" means that the outputs will have the same spatial dimensions as its inputs. With "VALID" means no padding. +*@li data_format: An optional string, specifying the data format of "ksize" and "strides", either "NCHW", "NC1HWC0", or "NHWC" (default). + +*@par Outputs: +*y: The average pooled output tensor. + +*@attention Constraints:\n +*@li Only single input and single output are supported. +*@li Global pooling is supported. +*@li "ksize_H" and "ksize_W" are positive integers within the range [1, 32768]. ksize_H * ksize_W < 256 +*@li Due to instruction restrictions, the values of "strides_h" and "strides_w" are positive integers within the range [1, 63]. +*/ +REG_OP(AvgPool) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT32, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT32, DT_DOUBLE})) + .REQUIRED_ATTR(ksize, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .REQUIRED_ATTR(padding, String) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(AvgPool) + +REG_OP(MaxPoolExt2) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT32, DT_DOUBLE, DT_INT8, + DT_INT16, DT_INT32, DT_INT64, DT_UINT8, + DT_UINT16, DT_QINT8})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT32, DT_DOUBLE, DT_INT8, + DT_INT16, DT_INT32, DT_INT64, DT_UINT8, + DT_UINT16, DT_QINT8})) + .REQUIRED_ATTR(ksize, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .REQUIRED_ATTR(padding, String) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(MaxPoolExt2) + +REG_OP(MaxPool) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT32, DT_DOUBLE, DT_INT8, + DT_INT16, DT_INT32, DT_INT64, DT_UINT8, + DT_UINT16, DT_QINT8})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT32, DT_DOUBLE, DT_INT8, + DT_INT16, DT_INT32, DT_INT64, DT_UINT8, DT_UINT16, DT_QINT8})) + .REQUIRED_ATTR(ksize, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .REQUIRED_ATTR(padding, String) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(MaxPool) + +/** +* @brief Computes gradients of the maxpooling function. + +* @par Inputs: +* @li x1: A mutable NC1HWC0 tensor of type RealNumberType. +* @li x2: A mutable NC1HWC0 tensor of type RealNumberTypex. +* @li grad: A mutable NC1HWC0 tensor of type RealNumberType. + +* @par Attributes: +* @li ksize: A tuple or list, specifying the size of the window for each +* dimension of the input tensor. +* @li strides: A tuple or list, specifying the stride of the sliding window for +* each dimension of the input tensor. +* @li padding: A string, specifying the type of padding algorithm to use. + +* @par Outputs: +* y: A mutable tensor. Has the same shape and type as "x1. + +* @attention Constraints: +* @li Computing gradients of global pooling is not supported, which means +* "ksize < x1". +* @li "ksiez" is in the range [1, 255]. "strides" is in the range [1, 63] +*/ +REG_OP(MaxPoolGrad) + .INPUT(x1, TensorType::RealNumberType()) + .INPUT(x2, TensorType::RealNumberType()) + .INPUT(grad, TensorType::RealNumberType()) + .OUTPUT(y, TensorType::RealNumberType()) + .REQUIRED_ATTR(ksize, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .REQUIRED_ATTR(padding, String) + .OP_END_FACTORY_REG(MaxPoolGrad) + +/** +* @brief Computes second-order gradients of the maxpooling function. + +* @par Inputs: +* @li x1: Original forward input tensor of type float16 +* @li x2: Original forward output tensor of type float16 +* @li grad: Gradient tensor of type float16 + +* @par Attributes: +* @li ksize: A required list, specifying the size of the sliding window. +* @li strides: A required list, specifying the stride of the sliding window. +* @li padding: window sliding mode. Either SAME or VALID. +* @li data_format: Format of the original input, either NCHW or NHWC. Defaults +* to NHWC. + +* @attention Constraints: +* @li Only the cloud platform is supported. +* @li "x1" and "grads" must have the same shape. +* @li "x2" and "y" must have the same shape. Otherwise, an error is reported. +* @li "x1", "x2", "grads", and "y" must be 5D tensors. + +* @par Outputs: +* @li y: Result tensor of type float16 +*/ +REG_OP(MaxPoolGradGrad) + .INPUT(x1, TensorType::RealNumberType()) + .INPUT(x2, TensorType::RealNumberType()) + .INPUT(grad, TensorType::RealNumberType()) + .OUTPUT(y, TensorType::RealNumberType()) + .REQUIRED_ATTR(ksize, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .REQUIRED_ATTR(padding, String) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(MaxPoolGradGrad) + +/** +*@brief Performs max_pool_ext2 on the input. + +*@par Inputs: +* Two inputs: +*@li x: An NC1HWC0 Tensor of type float16. +*@li strides: A required type of int32 values, specifying the stride of the sliding window for each dimension of the input tensor. No default value. +*@li ksize: A required type of int32 values, specifying the size of the window for each dimension of the input tensor. No default value. + + +*@par Attributes: +*@li padding: A required string. No default value. +*@li data_format: An optional string. Defaults to "NC1HWC0". + +*@par Outputs: +*y: A Tensor. Has the same type and format as input "x". + +*@attention Constraints: +*@li "ksize" is a list that has length 4: ksize[0] = 1 or ksize[3] = 1, ksize[1] * ksize[2] <= 255. +*@li "stride is a list that has length 4: strides[0] = 1 or strides[3] = 1, strides[1] <= 63, strides[0] >= 1, strides[2] <= 63, strides[2] >= 1. +*@li "padding" is either "SAME" or "VALID". +*/ +REG_OP(MaxPoolV2) + .INPUT(x, TensorType({DT_FLOAT16})) + .INPUT(ksize, TensorType({DT_INT32})) + .INPUT(strides, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT16})) + .REQUIRED_ATTR(padding, String) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(MaxPoolV2) + +REG_OP(MaxPoolWithArgmax) + .INPUT(x, TensorType::RealNumberType()) + .OUTPUT(y, TensorType::RealNumberType()) + .OUTPUT(argmax, TensorType::IndexNumberType()) + .REQUIRED_ATTR(ksize, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .REQUIRED_ATTR(padding, String) + .ATTR(Targmax, Int, 7) + .OP_END_FACTORY_REG(MaxPoolWithArgmax) + +REG_OP(MaxPoolGradWithArgmax) + .INPUT(x, TensorType::RealNumberType()) + .INPUT(grad, TensorType::RealNumberType()) + .INPUT(argmax, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::RealNumberType()) + .REQUIRED_ATTR(ksize, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .REQUIRED_ATTR(padding, String) + .OP_END_FACTORY_REG(MaxPoolGradWithArgmax) + +/** +* @brief Computes second-order gradients of the maxpooling function. + +* @par Inputs: +* @li x:Original forward input tensor of type float16 +* @li grad:Gradient tensor of type float16 +* @li argmax:An tensor of type uint16 +* @par Attributes: +* @li ksize: A required list, specifying the size of the sliding window. +* @li strides: A required list, specifying the stride of the sliding window. +* @li padding: window sliding mode. Either SAME or VALID. +* @par Outputs: +* @li y:Result tensor of type float16 + +* @attention Constraints: +* @li Only the cloud platform is supported. +* @li "x1" and "grads" must have the same shape. +* @li length of the shape of x, grads, argmax, y must be 5. +* @li shape of argmax must be (fmap_n, fmap_c1, kernel_h * kernel_w, +* (shape_max_pool[2] * shape_max_pool[3] + 15) // 16 * 16, 1), +* or (fmap_n, fmap_c1, kernel_h * kernel_w, +* (shape_max_pool[2] * shape_max_pool[3] + 31) // 16, 16), else failed. +*/ +REG_OP(MaxPoolGradGradWithArgmax) + .INPUT(x, TensorType::RealNumberType()) + .INPUT(grad, TensorType::RealNumberType()) + .INPUT(argmax, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::RealNumberType()) + .REQUIRED_ATTR(ksize, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .REQUIRED_ATTR(padding, String) + .OP_END_FACTORY_REG(MaxPoolGradGradWithArgmax) + +/** +* @brief Computes avgpoograd function. + +* @par Inputs: +* @li orig_input_shape: An NHWC tensor of type int32. +* @li input_grad: An NHWC tensor of type float16, float32, or double. + +* @par Attributes: +* @li ksize: A tuple or list, specifying the size of the window for each +* dimension of the input tensor. +* @li strides: A tuple or list, specifying the stride of the sliding window for\n +* each dimension of the input tensor. +* @li padding: A string, specifying the type of the padding algorithm to use. +* @li data_format: A string. Defaults to "NHWC". + +* @par Outputs: +* @out_grad: A mutable tensor with the same shape and type as "orig_input". +*/ +REG_OP(AvgPoolGrad) + .INPUT(orig_input_shape, TensorType({DT_INT32})) + .INPUT(input_grad, TensorType({DT_FLOAT16, DT_FLOAT32, DT_DOUBLE})) + .OUTPUT(out_grad, TensorType({DT_FLOAT16, DT_FLOAT32, DT_DOUBLE})) + .REQUIRED_ATTR(ksize, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .REQUIRED_ATTR(padding, String) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(AvgPoolGrad) + +/** +* @brief Computes gradients of average pooling function. + +* @par Inputs: +* @input_grad: An NHWC tensor of type float16, float32, or double. + +* @par Attributes: +* @li orig_input_shape: Original input dimensions. +* @li ksize: A tuple or list, specifying the size of the window for each +* dimension of the input tensor. +* @li strides: A tuple or list, specifying the stride of the sliding window for\n +* each dimension of the input tensor. +* @li padding: A string, specifying the type of the padding algorithm to use. +* @li data_format: A string. Defaults to "NHWC". + +* @par Outputs: +* @out_grad: A mutable tensor with the same shape and type as "orig_input". +*/ +REG_OP(AvgPoolGradD) + .INPUT(input_grad, TensorType({DT_FLOAT16, DT_FLOAT32, DT_DOUBLE})) + .OUTPUT(out_grad, TensorType({DT_FLOAT16, DT_FLOAT32, DT_DOUBLE})) + .REQUIRED_ATTR(orig_input_shape, ListInt) + .REQUIRED_ATTR(ksize, ListInt) + .REQUIRED_ATTR(strides, ListInt) + .REQUIRED_ATTR(padding, String) + .ATTR(data_format, String, "NHWC") + + .OP_END_FACTORY_REG(AvgPoolGradD) + +REG_OP(MaxPoolWithArgmaxCCE) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .OUTPUT(argmax, TensorType::ALL()) + .ATTR(mode, Int, 0) + .ATTR(pad_mode, Int, 0) + .ATTR(window, ListInt, {1,1}) + .ATTR(stride, ListInt, {1,1}) + .ATTR(pad, ListInt, {0,0,0,0}) + .ATTR(ceil_mode, Int, 0) + .ATTR(data_mode, Int, 1) + .ATTR(nan_opt, Int, 0) + .OP_END_FACTORY_REG(MaxPoolWithArgmaxCCE) + +REG_OP(MaxPoolGradWithArgmaxCCE) + .INPUT(x, TensorType::ALL()) + .INPUT(grad,TensorType::ALL()) + .INPUT(arg,TensorType::ALL()) + .OUTPUT(output,TensorType::ALL()) + .ATTR(mode, Int, 0) + .ATTR(max_pool_grad_output_shape, ListInt, {0,0,0,0}) + .ATTR(pad_mode, Int, 0) + .ATTR(window, ListInt, {1,1}) + .ATTR(stride, ListInt, {1,1}) + .ATTR(pad, ListInt, {0,0,0,0}) + .ATTR(ceil_mode, Int, 0) + .ATTR(data_mode, Int, 1) + .ATTR(nan_opt, Int, 0) + .OP_END_FACTORY_REG(MaxPoolGradWithArgmaxCCE) + +REG_OP(Upsample) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_UINT8})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_UINT8})) + .ATTR(scale, Float, 1) + .ATTR(stride, Int, 2) + .OP_END_FACTORY_REG(Upsample) +} // namespace ge + +#endif // GE_OP_NN_POOLING_OPS_H diff --git a/third_party/fwkacllib/inc/ops/nn_training_ops.h b/third_party/fwkacllib/inc/ops/nn_training_ops.h new file mode 100644 index 00000000..ed43fb02 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/nn_training_ops.h @@ -0,0 +1,865 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_TRAINING_OPS_H +#define GE_OP_TRAINING_OPS_H + +#include "../graph/operator_reg.h" +namespace ge { +/** +*@brief Updates "var" according to the AdaMax algorithm.\n +* t-1 mean previous period. +* m_t <- beta1 * m{t-1} + (1 - beta1) * grad\n +* v_t <- max(beta2 * v{t-1}, abs(grad))\n +* var <- var - lr / (1 - beta1^t) * m_t / (v_t + epsilon) +* +*@attention Constraints:\n +* the input tensors must have the same shape. +* +*@par Inputs: +*@li var: A mutable tensor. Must be one of the following types: TensorType::NumberType(). +* Should be from a Variable(). +*@li m: A mutable tensor. Has the same type as "var". +* Should be from a Variable(). +*@li v: A mutable tensor. Has the same type as "var". +* Should be from a Variable(). +*@li beta1_power: A scalar. Has the same type as "var". +*@li lr: learning_rate. A scalar. Has the same type as "var". +*@li beta1: A scalar. Has the same type as "var". +*@li beta2: A scalar. Has the same type as "var". +*@li epsilon: A scalar. Has the same type as "var". +*@li grad: A tensor for the gradient. Has the same type as "var". +* +*@par Attributes:\n +* use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var", "ms", and "mom" tensors is protected +* by a lock; otherwise the behavior is undefined, but may exhibit less +* contention. +* +*@par Outputs: +* var: A mutable tensor. Has the same type as input "var". +* +*/ +REG_OP(ApplyAdaMax) + .INPUT(var, TensorType::NumberType()) + .INPUT(m, TensorType::NumberType()) + .INPUT(v, TensorType::NumberType()) + .INPUT(beta1_power, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(beta1, TensorType::NumberType()) + .INPUT(beta2, TensorType::NumberType()) + .INPUT(epsilon, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyAdaMax) + +/** +*@brief Updates "var" according to the momentum scheme. Set use_nesterov = True if you +* want to use Nesterov momentum.\n +* computing process: \n +* accum = accum * momentum + grad\n +* var -= lr * accum +* +*@attention Constraints:\n +* the input tensors must have the same shape. +* +*@par Inputs: +*@li var: A mutable tensor. Should be from a Variable(). +*@li accum: A mutable tensor. Has the same type as "var". +* Should be from a Variable(). +*@li lr: A scalar. Has the same type as "var". +*@li grad: A tensor for the gradient. Has the same type as "var". +* +*@par Attributes: +*@li use_nesterov: An optional bool. Defaults to "False". +* If "True", the tensor passed to compute grad will be +* var - lr * momentum * accum, so in the end, the var you get is actually +* var - lr * momentum * accum. +* +*@li use_locking: An optional bool. Defaults to "False".\n +* If "True", updating of the "var", "ms", and "mom" tensors is protected by a lock; +* otherwise the behavior is undefined, but may exhibit less contention. +* +*@par Outputs: +* var: A mutable tensor. Has the same type as input "var". +* +*/ + +REG_OP(ApplyMomentum) + .INPUT(var, TensorType::NumberType()) + .INPUT(accum, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .INPUT(momentum, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_nesterov, Bool, false) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyMomentum) + +REG_OP(ApplyMomentumCCE) + .INPUT(var, TensorType::NumberType()) + .INPUT(accum, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .INPUT(momentum, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_nesterov, Bool, false) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyMomentumCCE) + +/** +*@brief Updates "var" according to the AddSign update.\n +* t-1 mean previous period. +* m_t <- beta1 * m_{t-1} + (1 - beta1) * grad\n +* update <- exp(logbase * sign_decay * sign(grad) * sign(m_t)) * grad\n +* var <- var - lr * update +* +*@attention Constraints:\n +* the input tensors must have the same shape. +* +*@par Inputs: +*@li var: A mutable tensor. Should be from a Variable(). +*@li m: A mutable tensor. Has the same type as "var". +* Should be from a Variable(). +*@li lr: A scalar. Has the same type as "var". +*@li logbase: A scalar. Has the same type as "var". +*@li sign_decay: A scalar. Has the same type as "var". +*@li beta: A scalar. Has the same type as "var". +*@li grad: A tensor for the gradient. Has the same type as "var". +* +*@par Attributes: +* use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var", "ms", and "mom" tensors is protected +* by a lock; otherwise the behavior is undefined, but may exhibit less +* contention. +* +*@par Outputs: +* var: A mutable tensor. Has the same type as input "var". +* +*/ +REG_OP(ApplyPowerSign) + .INPUT(var, TensorType::NumberType()) + .INPUT(m, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(logbase, TensorType::NumberType()) + .INPUT(sign_decay, TensorType::NumberType()) + .INPUT(beta, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyPowerSign) + +/** +*@brief Updates "var" as FOBOS algorithm with fixed learning rate.\n +* prox_v = var - alpha * delta\n +* var = sign(prox_v)/(1+alpha*l2) * max{|prox_v|-alpha*l1,0} +* +*@attention Constraints:\n +* the input tensors must have the same shape. +* +*@par Inputs: +*@li var: A mutable tensor. Should be from a Variable(). +*@li alpha: A scalar. Has the same type as "var". +*@li l1: A scalar. Has the same type as "var". +*@li l2: A scalar. Has the same type as "var". +*@li delta: A tensor. Has the same type as "var". The change. +* +*@par Attributes: +* use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var", "ms", and "mom" tensors is protected +* by a lock; otherwise the behavior is undefined, but may exhibit less +* contention. +* +*@par Outputs: +* var: A mutable tensor. Has the same type as input "var". +* +*/ +REG_OP(ApplyProximalGradientDescent) + .INPUT(var, TensorType::NumberType()) + .INPUT(alpha, TensorType::NumberType()) + .INPUT(l1, TensorType::NumberType()) + .INPUT(l2, TensorType::NumberType()) + .INPUT(delta, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyProximalGradientDescent) + +/** +*@brief Updates "var" according to the AddSign update. + +*@par Inputs: +*Seven inputs, including: +* @li var: A mutable Tensor of type TensorType::NumberType(). +* Should be a Variable Tensor. +* @li m: A mutable Tensor of the same type as "var". +* Should be a Variable Tensor. +* @li lr: A Tensor of the same type as "var", for the scaling factor. Must be a scalar. +* @li alpha: A Tensor of the same type as "var". Must be a scalar. +* @li sign_decay: A Tensor of the same type as "var". Must be a scalar. +* @li beta: A Tensor of the same type as "var". Must be a scalar. +* @li grad: A Tensor of the same type as "var", for the gradient. + + +*@par Attributes: +*use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var" and "m" tensors will be +* protected by a lock; otherwise the behavior is undefined, +* but may exhibit less contention. + +*@par Outputs: +*var: A mutable Tensor. Has the same type as "var". +*/ +REG_OP(ApplyAddSign) + .INPUT(var, TensorType::NumberType()) + .INPUT(m, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(alpha, TensorType::NumberType()) + .INPUT(sign_decay, TensorType::NumberType()) + .INPUT(beta, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyAddSign) + +/** +*@brief Updates "var" according to the centered RMSProp algorithm.\n +* The centered RMSProp algorithm uses an estimate of the centered second moment +* (i.e., the variance) for normalization, as opposed to regular RMSProp, which +* uses the (uncentered) second moment. This often helps with training, but is +* slightly more expensive in terms of computation and memory. +* +* t-1 mean previous period. +* mg <- rho * mg{t-1} + (1-rho) * grad\n +* ms <- rho * ms{t-1} + (1-rho) * grad * grad\n +* mom <- momentum * mom{t-1} + lr * grad / sqrt(ms - mg * mg + epsilon)\n +* var <- var - mom\n +* +*@attention Constraints:\n +*@li in dense implementation of this algorithm, mg, ms, and mom will +* update even if the grad is zero, but in this sparse implementation, mg, ms, +* and mom will not update in iterations during which the grad is zero. +*@li the input tensors must have the same shape. +* +*@par Inputs: +*@li var: A mutable tensor. Should be from a Variable(). +*@li mg: A mutable tensor. Has the same type as "var". +* Should be from a Variable(). +*@li ms: A mutable tensor. Has the same type as "var". +* Should be from a Variable(). +*@li mom: A mutable tensor. Has the same type as "var". +* Should be from a Variable(). +*@li lr: A scalar. Has the same type as "var". +*@li rho: A scalar. Has the same type as "var". +*@li momentum: A tensor. Has the same type as "var". +*@li epsilon: A scalar. Has the same type as "var". +*@li grad: A tensor for the gradient. Has the same type as "var". +* +*@par Attributes: +* use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var", "ms", and "mom" tensors is protected +* by a lock; otherwise the behavior is undefined, but may exhibit less +* contention. +* +*@par Outputs: +* var: A mutable tensor. Has the same type as input "var". +* +*/ +REG_OP(ApplyCenteredRMSProp) + .INPUT(var, TensorType::NumberType()) + .INPUT(mg, TensorType::NumberType()) + .INPUT(ms, TensorType::NumberType()) + .INPUT(mom, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(rho, TensorType::NumberType()) + .INPUT(momentum, TensorType::NumberType()) + .INPUT(epsilon, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyCenteredRMSProp) + +/** +*@brief Updates "var" by subtracting 'alpha' * 'delta' from it.\n +* var -= delta * alpha +* +*@attention Constraints:\n +* the input tensors must have the same shape. +* +*@par Inputs: +*@li var: A mutable tensor. Should be from a Variable(). +*@li alpha: A scalar. Has the same type as "var". +*@li delta: A tensor for the change. Has the same type as "var". +* +*@par Attributes: +* use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var", "ms", and "mom" tensors is protected +* by a lock; otherwise the behavior is undefined, but may exhibit less +* contention. +* +*@par Outputs: +* var: A mutable tensor. Has the same type as input "var". +* +*/ +REG_OP(ApplyGradientDescent) + .INPUT(var, TensorType::NumberType()) + .INPUT(alpha, TensorType::NumberType()) + .INPUT(delta, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyGradientDescent) + +/** +*@brief Updates "var" according to the adagrad scheme.\n +* accum += grad * grad\n +* var -= lr * grad * (1 / sqrt(accum)) +* +*@attention Constraints:\n +* the input tensors must have the same shape. +* +*@par Inputs: +*@li var: A mutable tensor. Should be from a Variable(). +*@li accum: A mutable tensor. Has the same type as "var". +* Should be from a Variable(). +*@li lr: A scalar. Has the same type as "var". +*@li grad: A tensor for the gradient. Has the same type as "var". +* +*@par Attributes: +* use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var", "ms", and "mom" tensors is protected +* by a lock; otherwise the behavior is undefined, but may exhibit less +* contention. +* +*@par Outputs: +* var: A mutable tensor. Has the same type as input "var". +* +*/ +REG_OP(ApplyAdagrad) + .INPUT(var, TensorType::NumberType()) + .INPUT(accum, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(update_slots, Bool, true) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyAdagrad) + +/** +*@brief Updates "var" according to the proximal adagrad scheme. + +*@par Inputs: +*Eight inputs, including: +* @li var: A mutable Tensor. Must be one of the following types: +* TensorType::NumberType(). Should be a Variable Tensor. +* @li gradient_accumulator: A mutable Tensor. Must have the same +* type as "var". Should be a Variable Tensor. +* @li gradient_squared_accumulator: A mutable Tensor of the same type as "var". +* Should be a Variable Tensor. +* @li grad: A Tensor of the same type as "var", for the gradient. +* @li lr: A Tensor of the same type as "var". +* Scaling factor. Must be a scalar. +* @li l1: A Tensor of the same type as "var". +* L1 regulariation. Must be a scalar. +* @li l2: A Tensor of the same type as "var". +* L2 regulariation. Must be a scalar. +* @li global_step: A Tensor of type int32 or int64. +* Training step number. Must be a scalar. + +*@par Attributes: +*use_locking: An optional bool. Defaults to "False". +* If "True", updating of the var and accum tensors will be +* protected by a lock; otherwise the behavior is undefined, +* but may exhibit less contention. + +*@par Outputs: +*var: A mutable Tensor. Has the same type as "var". +*/ +REG_OP(ApplyAdagradDA) + .INPUT(var, TensorType::NumberType()) + .INPUT(gradient_accumulator, TensorType::NumberType()) + .INPUT(gradient_squared_accumulator, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(l1, TensorType::NumberType()) + .INPUT(l2, TensorType::NumberType()) + .INPUT(global_step, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyAdagradDA) + +/** +*@brief Returns the dimension index in the destination data format given the one in +* the source data format. +* +*@par Inputs: +* x: A tensor of type int32 or int64. +* A Tensor with each element as a dimension index in source data format. +* Must be in the range [-4, 4). +* +*@par Attributes: +*@li src_format: An optional string. Defaults to NHWC. +* source data format. +*@li dst_format: An optional string. Defaults to NCHW. +* destination data format. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(DataFormatDimMap) + .INPUT(x, TensorType::IndexNumberType()) + .ATTR(src_format, String, "NHWC") + .ATTR(dst_format, String, "NCHW") + .OUTPUT(y, TensorType::IndexNumberType()) + .OP_END_FACTORY_REG(DataFormatDimMap) + +/** +* @brief Implements stochastic gradient descent (optionally with momentum).\n +* Nesterov momentum is based on the formula from +* On the importance of initialization and momentum in deep learning.\n + +* @par Inputs: +* @li parameters: A mutable tensor of type float16 or float32.\n +* Specifies the iterable of parameters to optimize or dicts defining parameter +* groups. +* @li gradient: A tensor of type float16 or float32.\n +* Specifies the gradient of training step. +* @li learning_rate: A tensor of type float16 or float32.\n +* Specifies the learing_rate of training step. +* @li accum: A tensor of type float16 or float32. +* Specifies the velocity of training step. +* @li momentum: A tensor of type float16 or float32. +* Specifies the momentum factor. +* @li stat: A tensor of type float16 or float32. +* Specifies the status representing the first step or not. + +* @par Attributes: +* @li dampening: An optional float, specifying the dampening for momentum. +* Defaults to "0.0". +* @li weight_decay: An optional float, specifying the L2 penalty. Defaults to +* "0.0". +* @li nesterov: An optional bool, specifying whether to enable Nesterov +* momentum. Defaults to "False". + +* @par Outputs: +* parameters: A mutable tensor same as input "parameters". + +* @see ApplyMomentum() +*/ +REG_OP(SGD) + .INPUT(parameters, TensorType(DT_FLOAT, DT_FLOAT16)) + .INPUT(gradient, TensorType(DT_FLOAT, DT_FLOAT16)) + .INPUT(learning_rate, TensorType(DT_FLOAT, DT_FLOAT16)) + .INPUT(accum, TensorType(DT_FLOAT, DT_FLOAT16)) + .INPUT(momentum, TensorType(DT_FLOAT, DT_FLOAT16)) + .INPUT(stat, TensorType(DT_FLOAT, DT_FLOAT16)) + .OUTPUT(parameters, TensorType(DT_FLOAT, DT_FLOAT16)) + .ATTR(dampening, Float, 0.0) + .ATTR(weight_decay, Float, 0.0) + .ATTR(nesterov, Bool, false) + .OP_END_FACTORY_REG(SGD) + +/** +* @brief Updates "var" according to the RMSProp algorithm.\n +* mean_square = decay * mean_square + (1-decay) * gradient ** 2\n +* Delta = learning_rate * gradient / sqrt(mean_square + epsilon)\n +* ms <- rho * ms_{t-1} + (1-rho) * grad * grad\n +* mom <- momentum * mom_{t-1} + lr * grad / sqrt(ms + epsilon)\n +* var <- var - mom\n +* +* @attention Constraints: +* @li Note that in dense implementation of this algorithm, "ms" and "mom" will\n +* update even if "grad" is 0, but in this sparse implementation, "ms" and "mom"\n +* will not update in iterations during which "grad" is 0.\n +* @li The input tensors "var", "ms", "mom" and "grad" must have the same shape. +* +* @par Inputs: +* @li var: A mutable tensor. Must be one of the data types defined in\n +* TensorType::NumberType(). Should be from a Variable(). +* @li ms: A mutable tensor. Must have the same type as "var". Should be from a +* Variable(). +* @li mom: A mutable tensor. Must have the same type as "var". Should be from a +* Variable(). +* @li lr: A scalar. Must have the same type as "var". +* @li rho: A scalar. Must have the same type as "var". +* @li momentum: A scalar. Must have the same type as "var". +* @li epsilon: A scalar. Must have the same type as "var". +* @li grad: A tensor, specifying the gradient. Must have the same type as "var". +* +* @par Attributes: +* use_locking: An optional "bool". Defaults to "False". If "True", updating of\n +* the "var", "ms", and "mom" tensors will be protected by a lock; otherwise the\n +* behavior is undefined, but may exhibit less contention. +* +* @par Outputs: +* var: A mutable tensor. Has the same type as input "var". +*/ +REG_OP(ApplyRMSProp) + .INPUT(var, TensorType::NumberType()) + .INPUT(ms, TensorType::NumberType()) + .INPUT(mom, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(rho, TensorType::NumberType()) + .INPUT(momentum, TensorType::NumberType()) + .INPUT(epsilon, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyRMSProp) + +/** +* @brief Updates "var" according to the RMSProp algorithm, a const input will be +* considered as an attribute.\n +* mean_square = decay * mean_square + (1-decay) * gradient ** 2\n +* Delta = learning_rate * gradient / sqrt(mean_square + epsilon)\n +* ms <- rho * ms_{t-1} + (1-rho) * grad * grad\n +* mom <- momentum * mom_{t-1} + lr * grad / sqrt(ms + epsilon)\n +* var <- var - mom +* +* @attention Constraints: +* @li Note that in dense implementation of this algorithm, "ms" and "mom" will\n +* update even if "grad" is 0, but in this sparse implementation, "ms" and "mom"\n +* will not update in iterations during which "grad" is 0. +* @li The input tensors "var", "ms", "mom" and "grad" must have the same shape. +* +* @par Inputs: +* @li var: A mutable tensor. Must be one of the data types defined in\n +* TensorType::NumberType(). Should be from a Variable(). +* @li ms: A mutable tensor. Must have the same type as "var". Should be from a +* Variable(). +* @li mom: A mutable tensor. Must have the same type as "var". Should be from a +* Variable(). +* @li lr: A scalar. Must have the same type as "var". +* @li grad: A tensor, specifying the gradient. Must have the same type as "var". +* +* @par Attributes: +* @li use_locking: An optional "bool". Defaults to "False". If "True", updating\n +* of the "var", "ms", and "mom" tensors will be protected by a lock; otherwise +* the behavior is undefined, but may exhibit less contention. +* @li rho: A scalar. Must have the same type as "var". +* @li momentum: A scalar. Must have the same type as "var". +* @li epsilon: A scalar. Must have the same type as "var". +* +* @par Outputs: +* var: A mutable tensor. Must have the same type as input "var". +*/ +REG_OP(ApplyRMSPropD) + .INPUT(var, TensorType::NumberType()) + .INPUT(ms, TensorType::NumberType()) + .INPUT(mom, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .REQUIRED_ATTR(rho, Float) + .REQUIRED_ATTR(momentum, Float) + .REQUIRED_ATTR(epsilon, Float) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyRMSPropD) + +/** +*@brief Update "var" and "accum" according to FOBOS with Adagrad learning rate. + +*@par Inputs: +*Six inputs, including: +* @li var: A mutable Tensor of type TensorType::NumberType(). +* Should be from a Variable(). +* @li accum: A mutable Tensor of the same type as "var". Should be from a Variable(). +* @li lr: A Tensor of the same type as "var", for the scaling factor. Must be a scalar. +* @li l1: A Tensor of the same type as "var", for L1 regulariation. Must be a scalar. +* @li l2: A Tensor of the same type as "var", for L2 regulariation. Must be a scalar. +* @li grad: A Tensor of the same type as "var", for the gradient. + +*@par Attributes: +*use_locking: An optional bool. Defaults to "False". If "True", updating of the "var" and "accum" *tensors will be protected by a lock; otherwise the behavior is undefined, but may exhibit less *contention. + +*@par Outputs: +*var: A mutable Tensor. Has the same type as "var". +*/ +REG_OP(ApplyProximalAdagrad) + .INPUT(var, TensorType::NumberType()) + .INPUT(accum, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(l1, TensorType::NumberType()) + .INPUT(l2, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyProximalAdagrad) + +/** +*@brief Updates entries in 'var' and 'accum' according to the Proximal Adagrad algorithm.\ n +* Compared with op ApplyProximalAdagrad, an additional index tensor is input, +* Only the indices into the first dimensions of "var" and "accum" are updated. + +*@par Inputs: +* Seven inputs, including:\n +* @li var: A mutable Tensor.\n +* TensorType::NumberType(). Should be a Variable Tensor. +* @li accum: A mutable Tensor of the same type as "var".\n +* Should be a Variable Tensor. +* @li lr: A Tensor of the same type as "var".\n +* Scaling factor. Must be a scalar. +* @li l1: A Tensor of the same type as "var".\n +* L1 regulariation. Must be a scalar. +* @li l2: A Tensor of the same type as "var".\n +* L2 regulariation. Must be a scalar. +* @li grad: A Tensor. Has the same type as "var". \n +* The gradient. +* @li indices: A vector of indices into the first dimension of "var" and "accum".\n +* TensorType::IndexNumberType(). + +*@par Attributes: +*use_locking: An optional bool. Defaults to "False".\n +* If "True", updating of the var and accum tensors will be protected by a lock; \n +* If "False", the behavior is undefined, but may exhibit less contention. + +*@par Outputs: +*var: A mutable Tensor. Has the same type as "var". +*/ +REG_OP(SparseApplyProximalAdagrad) + .INPUT(var, TensorType::NumberType()) + .INPUT(accum, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(l1, TensorType::NumberType()) + .INPUT(l2, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .INPUT(indices, TensorType::IndexNumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(SparseApplyProximalAdagrad) + +/** +*@brief Updates "var" according to the Ftrl-proximal scheme. + +*@par Inputs: +*Eight inputs, including: +* @li var: A mutable Tensor. Must be of type TensorType::NumberType(). +* Should be a Variable Tensor. +* @li accum: A mutable Tensor of the same type as "var". +* Should be a Variable Tensor. +* @li linear: A mutable Tensor of the same type as "var". +* Should be a Variable Tensor. +* @li grad: A Tensor of the same type as "var", for the gradient. +* @li lr: A Tensor of the same type as "var", for the scaling factor. Must be a scalar. +* @li l1: A Tensor of the same type as "var", for L1 regulariation. Must be a scalar. +* @li l2: A Tensor of the same type as "var", for L2 regulariation. Must be a scalar. +* @li lr_power: A Tensor of the same type as "var", for the scaling factor. Must be a scalar. + +*@par Attributes: +*use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var" and "accum" tensors will be +* protected by a lock; otherwise the behavior is undefined, +* but may exhibit less contention. + +*@par Outputs: +*var: A mutable Tensor. Has the same type as "var". +*/ +REG_OP(ApplyFtrl) + .INPUT(var, TensorType::NumberType()) + .INPUT(accum, TensorType::NumberType()) + .INPUT(linear, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(l1, TensorType::NumberType()) + .INPUT(l2, TensorType::NumberType()) + .INPUT(lr_power, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyFtrl) + +/** +*@brief Update "var" according to the Ftrl-proximal scheme. + +*@par Inputs: +*Nine inputs, including: +* @li var: A mutable Tensor. Must be of type TensorType::NumberType(). +* Should be a Variable Tensor. +* @li accum: A mutable Tensor of the same type as "var". +* Should be a Variable Tensor. +* @li linear: A mutable Tensor of the same type as "var". +* Should be a Variable Tensor. +* @li grad: A Tensor of the same type as "var", for the gradient. +* @li lr: A Tensor of the same type as "var", for the scaling factor. Must be a scalar. +* @li l1: A Tensor of the same type as "var", for L1 regulariation. Must be a scalar. +* @li l2: A Tensor of the same type as "var", for L2 regulariation. Must be a scalar. +* @li l2_shrinkage: A Tensor of the same type as "var". +* @li lr_power: A Tensor of the same type as "var", for the scaling factor. Must be a scalar. + +*@par Attributes: +*use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var" and "accum" tensors will be +* protected by a lock; otherwise the behavior is undefined, +* but may exhibit less contention. + +*@par Outputs: +*var: A mutable Tensor. Has the same type as "var". +*/ +REG_OP(ApplyFtrlV2) + .INPUT(var, TensorType::NumberType()) + .INPUT(accum, TensorType::NumberType()) + .INPUT(linear, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(l1, TensorType::NumberType()) + .INPUT(l2, TensorType::NumberType()) + .INPUT(l2_shrinkage, TensorType::NumberType()) + .INPUT(lr_power, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyFtrlV2) + +/** +*@brief Updates "var" according to the Adam algorithm.\n +* lr_t <- text{learning\_rate} * sqrt{1 - beta_2^t} / (1 - beta_1^t)\n +* m_t <- beta_1 * m_{t-1} + (1 - beta_1) * g\n +* v_t <- max(beta2 * v{t-1}, abs(g))\n +* variable <- variable - lr_t * m_t / (sqrt{v_t} + epsilon) +* +*@attention Constraints:\n +* *The input tensors must have the same shape.* +* +*@par Inputs: +*@li var: A mutable Tensor of the type TensorType::NumberType(). +* Should be from a Variable(). +*@li m: A mutable Tensor of the same type as "var". +* Should be from a Variable(). +*@li v: A mutable Tensor of the same type as "var". +* Should be from a Variable(). +*@li beta1_power: A scalar of the same type as "var". +*@li beta2_power: A scalar of the same type as "var". +*@li lr: learning_rate. A scalar of the same type as "var". +*@li beta1: A scalar of the same type as "var". +*@li beta2: A scalar of the same type as "var". +*@li epsilon: A scalar of the same type as "var". +*@li grad: A Tensor of the same type as "var", for the gradient. +* +*@par Attributes:\n +*@li use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var", m", and "v" tensors will be protected +* by a lock; otherwise the behavior is undefined, but may exhibit less +* contention. +*@li use_nesterov: An optional bool. Defaults to "False". + If "True", uses the nesterov update. +* +*@par Outputs: +* var: A mutable Tensor. Has the same type as intput "var". +*/ +REG_OP(ApplyAdam) + .INPUT(var, TensorType::NumberType()) + .INPUT(m, TensorType::NumberType()) + .INPUT(v, TensorType::NumberType()) + .INPUT(beta1_power, TensorType::NumberType()) + .INPUT(beta2_power, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(beta1, TensorType::NumberType()) + .INPUT(beta2, TensorType::NumberType()) + .INPUT(epsilon, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .ATTR(use_nesterov, Bool, false) + .OP_END_FACTORY_REG(ApplyAdam) + +/** +*@brief Updates "var" according to the proximal adadelta scheme. + +*@par Inputs: +*Seven inputs, including: +* @li var: A mutable Tensor of type TensorType::NumberType(). +* Should be a Variable Tensor. +* @li accum: A mutable Tensor of the same type as "var". +* Should be a Variable Tensor. +* @li accum_update: A mutable Tensor of the same type as "var". +* Should be a Variable Tensor. +* @li lr: A scalar of the same type as "var", for the scaling factor. +* @li rho: A scalar of the same type as "var", for the decay factor. +* @li epsilon: A scalar of the same type as "var", for the constant factor. +* @li grad: A Tensor of the same type as "var", for the gradient. + +*@par Attributes: +*use_locking: An optional bool. Defaults to "False". +* If "True", updating of the "var", "accum" and "accum_update" tensors will be +* protected by a lock; otherwise the behavior is undefined, +* but may exhibit less contention. + +*@par Outputs: +*var: A mutable Tensor. Has the same type as "var". +*/ +REG_OP(ApplyAdadelta) + .INPUT(var, TensorType::NumberType()) + .INPUT(accum, TensorType::NumberType()) + .INPUT(accum_update, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(rho, TensorType::NumberType()) + .INPUT(epsilon, TensorType::NumberType()) + .INPUT(grad, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(ApplyAdadelta) + +REG_OP(FusedMulApplyMomentum) + .INPUT(var, TensorType::NumberType()) + .INPUT(accum, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(x1, TensorType::NumberType()) + .INPUT(momentum, TensorType::NumberType()) + .INPUT(x2, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .ATTR(use_nesterov, Bool, false) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(FusedMulApplyMomentum) + +REG_OP(FusedMulApplyMomentumExtern) + .INPUT(var, TensorType::NumberType()) + .INPUT(accum, TensorType::NumberType()) + .INPUT(lr, TensorType::NumberType()) + .INPUT(x1, TensorType::NumberType()) + .INPUT(momentum, TensorType::NumberType()) + .INPUT(x2, TensorType::NumberType()) + .INPUT(var_copy, TensorType::NumberType()) + .OUTPUT(var, TensorType::NumberType()) + .OUTPUT(var_copy, TensorType::NumberType()) + .ATTR(use_nesterov, Bool, false) + .ATTR(use_locking, Bool, false) + .OP_END_FACTORY_REG(FusedMulApplyMomentumExtern) + +REG_OP(LarsV2) + .INPUT(w, TensorType(DT_FLOAT)) + .INPUT(g, TensorType(DT_FLOAT)) + .INPUT(weight_decay, TensorType(DT_FLOAT)) + .INPUT(learning_rate, TensorType(DT_FLOAT)) + .OUTPUT(g_new, TensorType(DT_FLOAT)) + .ATTR(hyperpara, Float, 0.001) + .ATTR(epsilon, Float, 0.00001) + .ATTR(use_clip, Bool, false) + .OP_END_FACTORY_REG(LarsV2) + +REG_OP(LarsV2Update) + .INPUT(w, TensorType(DT_FLOAT)) + .INPUT(g, TensorType(DT_FLOAT)) + .INPUT(w_square_sum, TensorType(DT_FLOAT)) + .INPUT(g_square_sum, TensorType(DT_FLOAT)) + .INPUT(weight_decay, TensorType(DT_FLOAT)) + .INPUT(learning_rate, TensorType(DT_FLOAT)) + .OUTPUT(g_new, TensorType(DT_FLOAT)) + .ATTR(hyperpara, Float, 0.001) + .ATTR(epsilon, Float, 0.00001) + .ATTR(use_clip, Bool, false) + .OP_END_FACTORY_REG(LarsV2Update) + +} // namespace ge + +#endif // GE_OP_TRAINING_OPS_H diff --git a/third_party/fwkacllib/inc/ops/no_op.h b/third_party/fwkacllib/inc/ops/no_op.h new file mode 100755 index 00000000..55ed875b --- /dev/null +++ b/third_party/fwkacllib/inc/ops/no_op.h @@ -0,0 +1,34 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_NO_OP_H_ +#define GE_NO_OP_H_ + +#include "graph/operator_reg.h" +#include "graph/operator.h" + +namespace ge { + +/** +*@brief Does nothing. Only useful as a placeholder for control edges. +*/ + +REG_OP(NoOp) + .OP_END_FACTORY_REG(NoOp) + +} // namespace ge + +#endif // GE_NO_OP_H_ diff --git a/third_party/fwkacllib/inc/ops/nonlinear_fuc_ops.h b/third_party/fwkacllib/inc/ops/nonlinear_fuc_ops.h new file mode 100644 index 00000000..39357494 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/nonlinear_fuc_ops.h @@ -0,0 +1,317 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_NONLINEAR_FUC_OPS_H +#define GE_OP_NONLINEAR_FUC_OPS_H + +#include "../graph/operator_reg.h" + +namespace ge { +/** +*@brief Computes the for the gelu of "x". + +*@par Inputs: +*Two inputs, including: +* @li x: A Tensor. Must be one of the following types: float16, float32 + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Gelu) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(Gelu) + +/** +*@brief Computes the gradient for the gelu of "x". + +*@par Inputs: +*Two inputs, including: +* @li dy: A Tensor. Must be one of the following types: float16, float32 +* @li x: A Tensor of the same type as "dy". +* @li y: A Tensor of the same type as "dy". + +*@par Outputs: +*z: A Tensor. Has the same type as "dy". +*/ +REG_OP(GeluGrad) + .INPUT(dy, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(z, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(GeluGrad) + +/** +*@brief Computes the gradient for the tanh of "x". + +*@par Inputs: +*Two inputs, including: +* @li y: A Tensor. Must be one of the following types: float16, float32, +* double, complex64, complex128. +* @li dy: A Tensor of the same type as "y". + +*@par Outputs: +*z: A Tensor. Has the same type as "y". +*/ +REG_OP(TanhGrad) + .INPUT(y, TensorType::UnaryDataType()) + .INPUT(dy, TensorType::UnaryDataType()) + .OUTPUT(z, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(TanhGrad) + +REG_OP(Tanh) + .INPUT(x, TensorType::UnaryDataType()) + .OUTPUT(y, TensorType::UnaryDataType()) + .OP_END_FACTORY_REG(Tanh) + +/** +* @brief Computes rectified linear: "max(x, 0)". +* +* @par Inputs: +* x: A tensor. Must be one of the following types: float32, float64, int32, uint8,\n +* int16, int8, int64, uint16, float16, qint8. +* +* @par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(Relu) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, + DT_INT8, DT_INT32, DT_INT16, DT_INT64, + DT_UINT8, DT_UINT16, DT_QINT8})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, + DT_INT8, DT_INT32, DT_INT16, DT_INT64, + DT_UINT8, DT_UINT16, DT_QINT8})) + .OP_END_FACTORY_REG(Relu) + +/** +* @brief Computes rectified linear 6. +* activations = min(max(features, 0), 6). + +* @par Inputs: +* features: A Tensor of type RealNumberType. + +* @par Outputs: +* activations: A Tensor of type RealNumberType. +*/ +REG_OP(Relu6) + .INPUT(features, TensorType::RealNumberType()) + .OUTPUT(activations, TensorType::RealNumberType()) + .OP_END_FACTORY_REG(Relu6) + +/** +* @brief Computes rectified linear 6 gradients for a Relu6 operation. +* z = dy * (y > 0) * (y < 6). + +* @par Inputs: +* @li y: A Tensor of type RealNumberType. +* @li dy: A Tensor of type RealNumberType. + +* @par Outputs: +* z: A Tensor of type RealNumberType. +*/ +REG_OP(Relu6Grad) + .INPUT(y, TensorType::RealNumberType()) + .INPUT(dy, TensorType::RealNumberType()) + .OUTPUT(z, TensorType::RealNumberType()) + .OP_END_FACTORY_REG(Relu6Grad) + +/** +* @brief Compute sigmoid of "x" element-wise. + +* @par Inputs: +* A Tensor of type UnaryDataType. + +* @par Outputs: +* A Tensor. Has the same type as "x". + +* @attention Constraints: +* @li "x" is with shape (D1, D2, ..., DK), where, D1 * D2... * Dn <= 2^31-1, +* Di <= 1000000, n <= 8. +* @li Ascend 310 provides only 1?? accuracy for the result. + +* @see Relu() +*/ +REG_OP(Sigmoid) + .INPUT(x, TensorType(UnaryDataType)) + .OUTPUT(y, TensorType(UnaryDataType)) + .OP_END_FACTORY_REG(Sigmoid) + +/** +* @brief Computes z = (y - y*y)*dy. + +* @par Inputs: +* @li y: the input is tensor , dtype is UnaryDataType. +* @li dy the input is tensor , dtype is UnaryDataType. + +* @par Outputs: +* z: the shape of output, dtype is UnaryDataType. +*/ +REG_OP(SigmoidGrad) + .INPUT(y, TensorType(UnaryDataType)) + .INPUT(dy, TensorType(UnaryDataType)) + .OUTPUT(z, TensorType(UnaryDataType)) + .OP_END_FACTORY_REG(SigmoidGrad) + +REG_OP(Activation) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + /* + 0:sigmod, 1:relu, 2:tanh, 3:clipped ReLU, 4:Elu, + 5:leaky relu, 6:abs, 7:relu1, 8:softsign, 9:softplus + */ + .ATTR(mode, Int, 1) + .ATTR(coef, Float, 0) + .OP_END_FACTORY_REG(Activation) + +REG_OP(ActivationGrad) + .INPUT(dy, TensorType{DT_FLOAT}) + .INPUT(x, TensorType{DT_FLOAT}) + .OUTPUT(dx, TensorType{DT_FLOAT}) + .ATTR(mode, Int, 1) + .OP_END_FACTORY_REG(ActivationGrad) + +REG_OP(Softplus) + .INPUT(features, TensorType::FloatingDataType()) + .OUTPUT(activations, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(Softplus) + +REG_OP(SoftplusGrad) + .INPUT(gradients, TensorType::FloatingDataType()) + .INPUT(features, TensorType::FloatingDataType()) + .OUTPUT(backprops, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(SoftplusGrad) + +REG_OP(Softsign) + .INPUT(features, TensorType::FloatingDataType()) + .OUTPUT(activations, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(Softsign) + +REG_OP(Selu) + .INPUT(features, TensorType({DT_FLOAT16,DT_FLOAT,DT_DOUBLE, + DT_INT8,DT_INT32})) + .OUTPUT(activations, TensorType({DT_FLOAT16,DT_FLOAT,DT_DOUBLE, + DT_INT8,DT_INT32})) + .OP_END_FACTORY_REG(Selu) + +REG_OP(ReluGrad) + .INPUT(gradients, TensorType::RealNumberType()) + .INPUT(features, TensorType::RealNumberType()) + .OUTPUT(backprops, TensorType::RealNumberType()) + .OP_END_FACTORY_REG(ReluGrad) + +/** +*@brief Computes rectified linear gradients for a ReLU operation. + +*@par Inputs: +* Two inputs, including: +*@li gradients: A Tensor. Must be one of the following types: float32, double, int32, int8, int16,\n int8, int64, uint16, float16, uint32, uint64 +*@li mask: A Tensor. Must be the following types: uint8 + +*@par Outputs: +*backprops: A Tensor. Must have the same type as"gradients". + +*@attention Constraints: +* The corresponding Relu operator needs to be called before using this operator on the network. + +*@see Relu +*/ +REG_OP(ReluGradV2) + .INPUT(gradients, TensorType::RealNumberType()) + .INPUT(mask, TensorType({DT_UINT8})) + .OUTPUT(backprops, TensorType::RealNumberType()) + .OP_END_FACTORY_REG(ReluGradV2) + +/** +*@brief Computes rectified linear: `max(x, 0)`. +* +*@attention Constraints:\n +* The last dim must be mutiply of 8 +* The second output `mask` is the result of `y` use 'gt' compare with 0. +* +*@par Inputs: +* x: A tensor. Must be one of the following types: float32, float64, int32, uint8, +* int16, int8, int64, uint16, float16, qint8. +* +*@par Outputs: +*@li y : A `Tensor`. Has the same type as `x`. +*@li mask : A `Tensor`. Must be the type : `uint8`. +* +*/ +REG_OP(ReluV2) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_INT8, DT_INT32, DT_INT16, DT_INT64, DT_UINT8, DT_UINT16, DT_QINT8})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_DOUBLE, DT_INT8, DT_INT32, DT_INT16, DT_INT64, DT_UINT8, DT_UINT16, DT_QINT8})) + .OUTPUT(mask, TensorType({DT_UINT8})) + .OP_END_FACTORY_REG(ReluV2) + +REG_OP(PRelu) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(weight, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16})) + .OP_END_FACTORY_REG(PRelu) + +REG_OP(PReluGrad) + .INPUT(input_gradients, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(input_features, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(input_weights, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(output_backprops_dx, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(output_backprops_da, TensorType({DT_FLOAT16, DT_FLOAT})) + .OP_END_FACTORY_REG(PReluGrad) + +/** +*@brief Computes exponential linear: `exp(x) - 1` if < 0, `x` otherwise. +* +*@par Inputs: +* x : A `Tensor`. Must be one of the following types: `float16`, `float32`, `float64`. +* +*@par Outputs: +* y : A `Tensor`. Has the same type as `x`. +* +*/ +REG_OP(Elu) + .INPUT(x, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType::FloatingDataType()) + .ATTR(alpha, Float, 1.0) + .OP_END_FACTORY_REG(Elu) + +/** +*@brief Computes gradients for the exponential linear (Elu) operation. +* +*@par Inputs: +*@li grads : A `Tensor`. Must be one of the following types: `float16`, `float32`, `float64`. +* The backpropagated gradients to the corresponding Elu operation. +*@li activations : A `Tensor`. Must have the same type as `grads`. +* The outputs of the corresponding Elu operation. +* +*@par Outputs: +* y : A `Tensor`. Has the same type as `grads`. +* +*/ +REG_OP(EluGrad) + .INPUT(grads, TensorType::FloatingDataType()) + .INPUT(activations, TensorType::FloatingDataType()) + .OUTPUT(y, TensorType::FloatingDataType()) + .OP_END_FACTORY_REG(EluGrad) + +REG_OP(LeakyRelu) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT8})) + .ATTR(negative_slope, Float, 0.0) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT8})) + .OP_END_FACTORY_REG(LeakyRelu) + +} // namespace ge + +#endif // GE_OP_NONLINEAR_FUC_OPS_H diff --git a/third_party/fwkacllib/inc/ops/normalize_ops.h b/third_party/fwkacllib/inc/ops/normalize_ops.h new file mode 100644 index 00000000..3083c1a8 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/normalize_ops.h @@ -0,0 +1,51 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_NORMALIZE_OPS_H + #define GE_OP_NORMALIZE_OPS_H + + #include "graph/operator_reg.h" + + namespace ge { + + REG_OP(Normalize) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8})) + .ATTR(across_spatial, Bool, true) + .ATTR(channel_shared, Bool, true) + .ATTR(eps, Float, 1e-10) + .OP_END_FACTORY_REG(Normalize); + + REG_OP(NormalizeSum) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(across_spatial, Bool, true) + .OP_END_FACTORY_REG(NormalizeSum); + + REG_OP(NormalizeScale) + .INPUT(x1, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8})) + .INPUT(x2, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8})) + .INPUT(x3, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8})) + .ATTR(across_spatial, Bool, true) + .ATTR(channel_shared, Bool, true) + .ATTR(eps, Float, 1e-10) + .OP_END_FACTORY_REG(NormalizeScale); + + } // namespace ge + + #endif // GE_OP_NORMALIZE_OPS_H diff --git a/third_party/fwkacllib/inc/ops/npu_loss_scale_ops.h b/third_party/fwkacllib/inc/ops/npu_loss_scale_ops.h new file mode 100755 index 00000000..f2ed3104 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/npu_loss_scale_ops.h @@ -0,0 +1,52 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_NN_LOSS_SCALE_OPS_H +#define GE_OP_NN_LOSS_SCALE_OPS_H +#include "../graph/operator_reg.h" + +namespace ge { +REG_OP(NPUAllocFloatStatusOperator) + .OUTPUT(data, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(NPUAllocFloatStatusOperator) + +REG_OP(NPUClearFloatStatusOperator) + .INPUT(addr, TensorType{DT_FLOAT}) + .OUTPUT(data, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(NPUClearFloatStatusOperator) + +REG_OP(NPUGetFloatStatusOperator) + .INPUT(addr, TensorType{DT_FLOAT}) + .OUTPUT(data, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(NPUGetFloatStatusOperator) + +REG_OP(NPUAllocFloatStatus) + .OUTPUT(data, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(NPUAllocFloatStatus) + +REG_OP(NPUClearFloatStatus) + .INPUT(addr, TensorType{DT_FLOAT}) + .OUTPUT(data, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(NPUClearFloatStatus) + +REG_OP(NPUGetFloatStatus) + .INPUT(addr, TensorType{DT_FLOAT}) + .OUTPUT(data, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(NPUGetFloatStatus) + +} // namespace ge + +#endif // GE_OP_NN_LOSS_SCALE_OPS_H diff --git a/third_party/fwkacllib/inc/ops/outfeed_ops.h b/third_party/fwkacllib/inc/ops/outfeed_ops.h new file mode 100755 index 00000000..d5f38b30 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/outfeed_ops.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_OUTFEED_OPS_H +#define GE_OP_OUTFEED_OPS_H + +#include "graph/operator.h" +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(OutfeedEnqueueOp) + .DYNAMIC_INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, + DT_INT16, DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, + DT_UINT64, DT_BOOL, DT_DOUBLE, DT_STRING})) + .ATTR(channel_name, String, "") + .OP_END_FACTORY_REG(OutfeedEnqueueOp) + +} // namespace ge + +#endif // GE_OP_OUTFEED_OPS_H + diff --git a/third_party/fwkacllib/inc/ops/pad_ops.h b/third_party/fwkacllib/inc/ops/pad_ops.h new file mode 100644 index 00000000..dce1d906 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/pad_ops.h @@ -0,0 +1,164 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_PAD_OPS_H +#define GE_OP_PAD_OPS_H + +#include "../graph/operator_reg.h" +namespace ge { + +/** +*@brief Creates a tensor filled with a scalar value.\n +* This operation creates a tensor of shape "dims" and fills it with "value". +* +*@par Inputs: +*@li dims: A 1D tensor of types int32 or int64. Represents the shape of the output tensor. + +*@li value: A 0D scalar. Specifies the value to fill the returned tensor. +* +*@par Outputs: +* y: A tensor. Has the same type as "value". +* +*/ +REG_OP(Fill) + .INPUT(dims, TensorType::IndexNumberType()) + .INPUT(value, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(Fill) + +/** +*@brief Creates a tensor filled with a scalar value.\n +* This operation creates a tensor of shape "dims" and fills it with "value". +* +*@par Inputs: +* value: A 0D scalar for the value to fill the returned tensor. +* +*@par Attributes: +* dims: A tensor. Must be one of the following types:"int32" +* 1-D. Represents the shape of the output tensor. +* +*@par Outputs: +* y: A tensor. Has the same type as "value". +* +*/ +REG_OP(FillD) + .INPUT(value, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, + DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, + DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, + DT_UINT64, DT_BOOL, DT_DOUBLE})) + .REQUIRED_ATTR(dims, ListInt) + .OP_END_FACTORY_REG(FillD) + +/** +*@brief Broadcasts an array for a compatible shape.\n +* Broadcasting is the process of making arrays to have compatible shapes +* for arithmetic operations. Two shapes are compatible if for each +* dimension pair they are either equal or one of them is one. When trying +* to broadcast a Tensor to a shape, it starts with the trailing dimensions, +* and works its way forward. +* +*@par Inputs: +*@li x: A tensor. +*@li shape: A tensor of type int32 or int64. +* A 1D tensor of type int32, for the shape of the desired output. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +*/ +REG_OP(BroadcastTo) + .INPUT(x, TensorType::BasicType()) + .INPUT(shape, TensorType({DT_INT32})) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(BroadcastTo) + +/** +*@brief Broadcasts an array for a compatible shape.\n +* Broadcasting is the process of making arrays to have compatible shapes +* for arithmetic operations. Two shapes are compatible if for each +* dimension pair they are either equal or one of them is one. When trying +* to broadcast a Tensor to a shape, it starts with the trailing dimensions, +* and works its way forward. +* +*@par Inputs: +* x: A tensor. A tensor to broadcast. +* +*@par Attributes: +* shape: A tensor of type int32. +* A 1D tensor of type int32, for the shape of the desired output. +* +*@par Outputs: +* y: A tensor. Has the same type as "x". +* +*/ +REG_OP(BroadcastToD) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(shape, ListInt) + .OP_END_FACTORY_REG(BroadcastToD) + +REG_OP(Pad) + .INPUT(x, TensorType::BasicType()) + .INPUT(paddings, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(Pad) + +REG_OP(PadD) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_UINT8, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_UINT8, DT_FLOAT})) + .ATTR(paddings, ListListInt, {}) + .OP_END_FACTORY_REG(PadD) + +/** +*@brief Create a diagonal tensor + +*@par Inputs: +*Two inputs, including: +* @li x: A mutable Tensor. Must be one of the following types: +* float16, float32, int32. + +* @li assist: A mutable Tensor of the same type as "x". + +*@par Outputs: +*y: A mutable Tensor. Has the same type as "x". +*/ +REG_OP(DiagD) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .INPUT(assist, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .OP_END_FACTORY_REG(DiagD) + +/** +*@brief Create a diagonal tensor + +*@par Inputs: +*One input, include: +* x: A mutable Tensor. Must be one of the following types: +* float16, float32, double, int32, int64, complex64, complex128. + +*@par Outputs: +*y: A mutable Tensor. Has the same type as "x". +*/ +REG_OP(Diag) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, + DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT32, + DT_INT64, DT_COMPLEX64, DT_COMPLEX128})) + .OP_END_FACTORY_REG(Diag) +} // namespace ge + +#endif //GE_OP_PAD_OPS_H diff --git a/third_party/fwkacllib/inc/ops/parsing_ops.h b/third_party/fwkacllib/inc/ops/parsing_ops.h new file mode 100644 index 00000000..f790a03c --- /dev/null +++ b/third_party/fwkacllib/inc/ops/parsing_ops.h @@ -0,0 +1,33 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_PARSING_OPS_H +#define GE_OP_PARSING_OPS_H + +#include "graph/operator_reg.h" +#include "graph/operator.h" + +namespace ge { + +REG_OP(StringToNumber) + .INPUT(x, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE, DT_INT32, DT_INT64})) + .ATTR(out_type, Type, DT_FLOAT) + .OP_END_FACTORY_REG(StringToNumber) + +} // namespace ge + +#endif // GE_OP_PARSING_OPS_H diff --git a/third_party/fwkacllib/inc/ops/power_ops.h b/third_party/fwkacllib/inc/ops/power_ops.h new file mode 100644 index 00000000..e2b5656d --- /dev/null +++ b/third_party/fwkacllib/inc/ops/power_ops.h @@ -0,0 +1,34 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_POWER_H + #define GE_OP_POWER_H + + #include "../graph/operator_reg.h" + + namespace ge { + + REG_OP(Power) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(power, Float, 1.0) + .ATTR(scale, Float, 1.0) + .ATTR(shift, Float, 0.0) + .OP_END_FACTORY_REG(Power); + + } // namespace ge + + #endif // GE_OP_POWER_H diff --git a/third_party/fwkacllib/inc/ops/prior_box.h b/third_party/fwkacllib/inc/ops/prior_box.h new file mode 100644 index 00000000..f0c275f2 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/prior_box.h @@ -0,0 +1,129 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_PRIORBOX_H + #define GE_OP_PRIORBOX_H + + #include "graph/operator_reg.h" + + namespace ge { +/** +*@brief Performs SSD prior box detection. + +*@par Inputs: +* Two inputs, including: +*@li feature: An NC1HWC0 or NCHW feature map of type is float32 or float16. +*@li img: source image. Has the same type and format as "feature". + +*@par Attributes: +*@li min_size: A required float32, specifying the minimum edge length of a square prior box. +*@li max_size: A required float32, specifying the maximum edge length of a square prior box: sqrt(min_size * max_size) +*@li aspect_ratio: An optional float32, specifying the aspect ratio for generated rectangle boxes. The height is min_size/sqrt(aspect_ratio), the width is min_size*sqrt(aspect_ratio). Defaults to "1.0". +*@li img_size: An optional int32, specifying the source image size. Defaults to "0". +*@li img_h: An optional int32, specifying the source image height. Defaults to "0". +*@li img_w: An optional int32, specifying the source image width. Defaults to "0". +*@li step: An optional float32, specifying the step for mapping the center point from the feature map to the source image. Defaults to "0.0". +*@li step_h: An optional float32, specifying the height step for mapping the center point from the feature map to the source image. Defaults to "0.0". +*@li step_w: An optional float32, specifying the width step for mapping the center point from the feature map to the source image. Defaults to "0.0". +*@li flip: An optional bool. If "True", "aspect_ratio" will be flipped. Defaults to "True". +*@li clip: An optional bool. If "True", a prior box is clipped to within [0, 1]. Defaults to "False". +*@li offset: An optional float32, specifying the offset. Defaults to "0.5". +*@li variance: An optional float32, specifying the variance of a prior box, either one or four variances. Defaults to "0.1" (one value). + +*@par Outputs: +*y: An ND tensor of type float32 or float16, specifying the prior box information, including its coordinates and variance. + +*@attention Constraints:\n +* This operator applies only to SSD networks. +*@see SSDDetectionOutput() +*/ + REG_OP(PriorBox) + .INPUT(feature, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(img, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .REQUIRED_ATTR(min_size, ListFloat) + .REQUIRED_ATTR(max_size, ListFloat) + .ATTR(aspect_ratio, ListFloat, {1.0}) + .ATTR(img_size, Int, 0) + .ATTR(img_h, Int, 0) + .ATTR(img_w, Int, 0) + .ATTR(step, Float, 0.0) + .ATTR(step_h, Float, 0.0) + .ATTR(step_w, Float, 0.0) + .ATTR(flip, Bool, true) + .ATTR(clip, Bool, false) + .ATTR(offset, Float, 0.5) + .ATTR(variance, ListFloat, {0.1}) + .OP_END_FACTORY_REG(PriorBox); + +/** +*@brief Performs SSD prior box detection, with four additional matrices and the "aspect_ratio" attribute deleted compared to PriorBox. + +*@par Inputs: +* Six inputs, including: +*@li feature: An NC1HWC0 or NCHW feature map of type is float32 or float16. +*@li img: source image. Has the same type and format as "feature". +*@li data_h: An NC1HWC0 or NCHW tensor of type float32 or float16, specifying the matrix for indexing the feature map height. +*@li data_w: An NC1HWC0 or NCHW tensor of type float32 or float16, specifying the matrix for indexing the feature map width. +*@li box_height: An NC1HWC0 or NCHW tensor of type float32 or float16, specifying the height of each prior box. +*@li box_width: An NC1HWC0 or NCHW tensor of type float32 or float16, specifying the width of each prior box. + +*@par Attributes: +*@li min_size: A required float32, specifying the minimum edge length of a square prior box. +*@li max_size: A required float32, specifying the maximum edge length of a square prior box: sqrt(min_size * max_size) +*@li img_size: An optional int32, specifying the size of the source image. +*@li img_h: An optional int32, specifying the height of the source image. +*@li img_w: An optional int32, specifying the width of the source image. +*@li step: An optional float32, specifying the step for mapping the center point from the feature map to the source image. +*@li step_h: An optional float32, specifying the height step for mapping the center point from the feature map to the source image. +*@li step_w: An optional float32, specifying the width step for mapping the center point from the feature map to the source image. +*@li flip: An optional bool. If "True", "aspect_ratio" will be flipped. Defaults to "True". +*@li clip: An optional bool. If "True", a prior box is clipped to within [0, 1]. Defaults to "False". +*@li offset: An optional float32, specifying the offset. Defaults to "0.5". +*@li variance: An optional float32, specifying the variance of a prior box, either one or four variances. Defaults to "0.1" (one value). + +*@par Outputs: +*y: An ND tensor of type float32 or float16, specifying the prior box information, including its coordinates and variance. + +*@attention Constraints:\n +* This operator applies only to SSD networks. +*@see SSDDetectionOutput() +*/ + REG_OP(PriorBoxD) + .INPUT(feature, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(img, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(data_h, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(data_w, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(box_height, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(box_width, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT})) + .REQUIRED_ATTR(min_size, ListFloat) + .REQUIRED_ATTR(max_size, ListFloat) + .ATTR(img_size, Int, 0) + .ATTR(img_h, Int, 0) + .ATTR(img_w, Int, 0) + .ATTR(step, Float, 0.0) + .ATTR(step_h, Float, 0.0) + .ATTR(step_w, Float, 0.0) + .ATTR(flip, Bool, true) + .ATTR(clip, Bool, false) + .ATTR(offset, Float, 0.5) + .ATTR(variance, ListFloat, {0.1}) + .OP_END_FACTORY_REG(PriorBoxD); + + } // namespace ge + + #endif // GE_OP_PRIORBOX_H diff --git a/third_party/fwkacllib/inc/ops/quantize_ops.h b/third_party/fwkacllib/inc/ops/quantize_ops.h new file mode 100644 index 00000000..cac79015 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/quantize_ops.h @@ -0,0 +1,91 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_QUANTIZE_OPS_H +#define GE_OP_QUANTIZE_OPS_H +#include "../graph/operator_reg.h" + +namespace ge { +REG_OP(QuantizedInnerProduct) + .INPUT(x, TensorType({DT_UINT8})) + .INPUT(w, TensorType({DT_INT8})) + .OPTIONAL_INPUT(b, TensorType({DT_INT32})) + .OPTIONAL_INPUT(scale_q, TensorType({DT_FLOAT16})) + .OPTIONAL_INPUT(offset_q, TensorType({DT_FLOAT16})) + .OPTIONAL_INPUT(scale_deq_req, TensorType({DT_FLOAT16})) + .OPTIONAL_INPUT(offset_req, TensorType({DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_FLOAT16})) + .REQUIRED_ATTR(quant_algo, ListInt) + .REQUIRED_ATTR(scale_sqrt, ListInt) + .REQUIRED_ATTR(num_output, Int) + .ATTR(transpose, Bool, false) + .ATTR(bias_term, Bool, false) + .ATTR(axis, Int, 1) + .OP_END_FACTORY_REG(QuantizedInnerProduct) + +/** +* @brief Dequantizes the input tensor into a float tensor.\n +* [input_min_range, input_max_range] are scalar floats that specify the range +* for "output_data". +* The "mode" attribute controls exactly which calculations are used to convert\n +* the float values to their quantized equivalents. +* @par Inputs: +* @li input_data: A Tensor. Must be one of the following types: int8, uint8, +* int32. +* @li input_min_range: A Tensor of type float32. +* Specifies the minimum scalar value possibly produced for the input. +* @li input_max_range: A Tensor of type float32. +* Specifies the maximum scalar value possibly produced for the input. + +* @par Attributes: +* mode: An optional string from: "MIN_COMBINED", "MIN_FIRST", and "SCALED". +* Defaults to "MIN_COMBINED". + +* @par Outputs: +* output_data: A dictionary of type float32. + +* @attention Constraints: +* @li "input_min_range" and "input_max_range" have the same shapes. +* @li "input_data" and "output_data" have the same shapes. +*/ +REG_OP(Dequantize) + .INPUT(x, TensorType(DT_QINT8, DT_QUINT8, DT_QINT32, DT_QINT16, DT_QUINT16)) + .INPUT(min_range, TensorType{DT_FLOAT}) + .INPUT(max_range, TensorType{DT_FLOAT}) + .OUTPUT(y, TensorType({DT_FLOAT})) + .ATTR(mode, String, "MIN_COMBINED") + .OP_END_FACTORY_REG(Dequantize) + +REG_OP(AscendQuant) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT32})) + .OUTPUT(y, TensorType({DT_INT8})) + .REQUIRED_ATTR(scale, Float) + .REQUIRED_ATTR(sqrt_mode, Bool) + .REQUIRED_ATTR(offset, Float) + .ATTR(round_mode, String, "Round") + .OP_END_FACTORY_REG(AscendQuant) + +REG_OP(AscendDequant) + .INPUT(x, TensorType({DT_INT32})) + .INPUT(deq_scale, TensorType({DT_FLOAT16, DT_UINT64})) + .OUTPUT(y, TensorType({DT_FLOAT16})) + .REQUIRED_ATTR(sqrt_mode, Bool) + .REQUIRED_ATTR(relu_flag, Bool) + .OP_END_FACTORY_REG(AscendDequant) + +} // namespace ge + +#endif // GE_OP_QUANTIZE_OPS_H diff --git a/third_party/fwkacllib/inc/ops/random_ops.h b/third_party/fwkacllib/inc/ops/random_ops.h new file mode 100755 index 00000000..fe9643f4 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/random_ops.h @@ -0,0 +1,158 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_RANDOM_OPS_H_ +#define GE_OP_RANDOM_OPS_H_ + +#include + +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(Multinomial) + .INPUT(logits, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(num_samples, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .ATTR(output_dtype, Type, DT_INT64) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(Multinomial) + +REG_OP(ParameterizedTruncatedNormal) + .INPUT(shape, TensorType({DT_INT32})) + .INPUT(means, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(stdevs, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(min, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(max, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(ParameterizedTruncatedNormal) + +REG_OP(RandomGammaGrad) + .INPUT(alpha, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(sample, TensorType({DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(RandomGammaGrad) + +REG_OP(RandomGamma) + .INPUT(shape, TensorType({DT_INT32, DT_INT64})) + .INPUT(alpha, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(RandomGamma) + +REG_OP(RandomPoisson) + .INPUT(shape, TensorType({DT_INT32, DT_INT64})) + .INPUT(rate, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, \ + DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, \ + DT_INT32, DT_INT64})) + .ATTR(dtype, Type, DT_INT64) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(RandomPoisson) + +REG_OP(RandomShuffle) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, \ + DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, \ + DT_UINT64, DT_BOOL, DT_DOUBLE})) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(RandomShuffle) + +REG_OP(RandomStandardNormal) + .INPUT(shape, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .REQUIRED_ATTR(dtype, Type) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(RandomStandardNormal) + +REG_OP(RandomUniformInt) + .INPUT(shape, TensorType({DT_INT32, DT_INT64})) + .INPUT(min, TensorType({DT_INT32, DT_INT64})) + .INPUT(max, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(RandomUniformInt) + +REG_OP(RandomUniform) + .INPUT(shape, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .REQUIRED_ATTR(dtype, Type) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(RandomUniform) + +REG_OP(TruncatedNormal) + .INPUT(shape, TensorType({ DT_INT32, DT_INT64 })) + .OUTPUT(y, TensorType({ DT_FLOAT16, DT_FLOAT, DT_DOUBLE })) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(TruncatedNormal) + +REG_OP(DropOutGenMask) + .INPUT(shape, TensorType({ DT_INT32, DT_INT64 })) + .INPUT(prob, TensorType({ DT_FLOAT16, DT_FLOAT })) + .OUTPUT(y, TensorType({ DT_UINT8 })) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(DropOutGenMask) + + +REG_OP(LinSpaceD) + .INPUT(assist, TensorType({DT_FLOAT})) + .INPUT(start, TensorType({DT_FLOAT})) + .INPUT(stop, TensorType({DT_FLOAT})) + .INPUT(num, TensorType::IndexNumberType()) + .OUTPUT(output, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(LinSpaceD) + +REG_OP(LinSpace) + .INPUT(start, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(stop, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(num, TensorType::IndexNumberType()) + .OUTPUT(output, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(LinSpace) + +REG_OP(Dropout) + .INPUT(x, TensorType{DT_FLOAT}) + .OUTPUT(y, TensorType{DT_FLOAT}) + .ATTR(dropout_ratio, Float, 0.5) + .ATTR(scale_train, Bool, true) + .ATTR(alpha, Float, 1.0) + .ATTR(beta, Float, 0.0) + .OP_END_FACTORY_REG(Dropout) + +REG_OP(RandomChoiceWithMask) + .INPUT(x, TensorType({DT_BOOL})) + .OUTPUT(y, TensorType({DT_INT32})) + .OUTPUT(mask, TensorType({DT_BOOL})) + .ATTR(count, Int, 0) + .ATTR(seed, Int, 0) + .ATTR(seed2, Int, 0) + .OP_END_FACTORY_REG(RandomChoiceWithMask) + +} // namespace ge + +#endif // GE_OP_RANDOM_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/reduce_ops.h b/third_party/fwkacllib/inc/ops/reduce_ops.h new file mode 100644 index 00000000..a74ec28c --- /dev/null +++ b/third_party/fwkacllib/inc/ops/reduce_ops.h @@ -0,0 +1,303 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_REDUCE_OPS_H +#define GE_OP_REDUCE_OPS_H + +#include "../graph/operator_reg.h" + +namespace ge { +REG_OP(BNTrainingReduce) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(sum, TensorType({DT_FLOAT})) + .OUTPUT(square_sum, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(BNTrainingReduce) + +REG_OP(BNTrainingReduceGrad) + .INPUT(grads, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(diff_scale, TensorType({DT_FLOAT})) + .INPUT(diff_offset, TensorType({DT_FLOAT})) + .INPUT(scale, TensorType({DT_FLOAT})) + .INPUT(batch_mean, TensorType({DT_FLOAT})) + .INPUT(batch_variance, TensorType({DT_FLOAT})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .ATTR(epsilon, Float, 0.0001) + .OP_END_FACTORY_REG(BNTrainingReduceGrad) + +REG_OP(BNTrainingUpdate) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(sum, TensorType({DT_FLOAT})) + .INPUT(square_sum, TensorType({DT_FLOAT})) + .INPUT(scale, TensorType({DT_FLOAT})) + .INPUT(offset, TensorType({DT_FLOAT})) + .INPUT(mean, TensorType({DT_FLOAT})) + .INPUT(variance, TensorType({DT_FLOAT})) + .REQUIRED_ATTR(factor, Float) + .REQUIRED_ATTR(epsilon, Float) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(mean, TensorType({DT_FLOAT})) + .OUTPUT(variance, TensorType({DT_FLOAT})) + .OUTPUT(batch_mean, TensorType({DT_FLOAT})) + .OUTPUT(batch_variance, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(BNTrainingUpdate) + +REG_OP(BNTrainingUpdateV2) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(sum, TensorType({DT_FLOAT})) + .INPUT(square_sum, TensorType({DT_FLOAT})) + .INPUT(scale, TensorType({DT_FLOAT})) + .INPUT(offset, TensorType({DT_FLOAT})) + .REQUIRED_ATTR(epsilon, Float) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(batch_mean, TensorType({DT_FLOAT})) + .OUTPUT(batch_variance, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(BNTrainingUpdateV2) + +REG_OP(BNTrainingUpdateGrad) + .INPUT(grads, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(batch_mean, TensorType({DT_FLOAT})) + .INPUT(batch_variance, TensorType({DT_FLOAT})) + .ATTR(epsilon, Float, 0.0001) + .OUTPUT(diff_scale, TensorType({DT_FLOAT})) + .OUTPUT(diff_offset, TensorType({DT_FLOAT})) + .OP_END_FACTORY_REG(BNTrainingUpdateGrad) + +REG_OP(BNInferGrad) + .INPUT(grads, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(scale, TensorType({DT_FLOAT})) + .INPUT(batch_variance, TensorType({DT_FLOAT})) + .OUTPUT(x_backprop, TensorType({DT_FLOAT16,DT_FLOAT})) + .ATTR(epsilon, Float, 0.0001) + .OP_END_FACTORY_REG(BNInferGrad) + +REG_OP(ReduceSum) + .INPUT(x, TensorType::NumberType()) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::NumberType()) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceSum) + +REG_OP(ReduceSumD) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_UINT8, DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_UINT8, DT_INT32})) + .REQUIRED_ATTR(axis, ListInt) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceSumD) + +/** +*@brief Calculates the "logical sum" of elements of a tensor in a dimension. + +*@par Inputs: +*One input: +*x: A mutable Tensor. Must be one of the following types: float16, +* float32, double. Should be a Variable Tensor. + +*@par Attributes: +*@li keep_dims: A bool. If true, retains reduced dimensions with length 1. +*@li axis: The dimensions to reduce. If None, reduces all dimensions. +*Must be in the range [- rank (input_sensor), rank (input_sensor)). + +*@par Outputs: +*y: The reduced tensor. +*/ +REG_OP(ReduceAllD) + .INPUT(x, TensorType({DT_BOOL})) + .OUTPUT(y, TensorType({DT_BOOL})) + .REQUIRED_ATTR(axis, ListInt) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceAllD) + +/** +*@brief Calculates the "logical sum" of elements of a tensor in a dimension. + +*@par Inputs: +*Two inputs, including: +*@li x: A mutable Tensor. Must be one of the following types: float16, float32, double. Should be a Variable Tensor. +*@li axis: A mutable Tensor. The dimensions to reduce. If None, reduces all dimensions. Must be in the range [- rank (input_sensor), rank (input_sensor)). + +*@par Attributes: +*keep_dims: A bool. If true, retains reduced dimensions with length 1. + +*@par Outputs: +*y: The reduced tensor. +*/ +REG_OP(ReduceAll) + .INPUT(x, TensorType({DT_BOOL})) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType({DT_BOOL})) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceAll) + +REG_OP(ReduceProd) + .INPUT(x,TensorType::NumberType()) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(y,TensorType::NumberType()) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceProd) + +REG_OP(ReduceProdD) + .INPUT(x,TensorType({DT_FLOAT, DT_UINT8, DT_INT8, DT_INT32, DT_FLOAT16})) + .OUTPUT(y,TensorType({DT_FLOAT, DT_UINT8, DT_INT8, DT_INT32, DT_FLOAT16})) + .REQUIRED_ATTR(axis, ListInt) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceProdD) + +/** +*@brief Reduces "x" along the dimensions according to "axis". + +*@par Inputs: +*Two inputs, including: +* @li x: A Tensor. Must be one of the following types: float16, float32, int8, uint8. +* @li axis: The dimensions to reduce. Must be one of the following types: int, list, tuple, NoneType.\n +* - If None (the default), reduces all dimensions.\n +* - Must be in the range [-rank(x), rank(x)). + +*@par Attributes: +*keep_dims: A bool or NoneType. \n +* - If true, retains reduced dimensions with length 1. \n +* - If false, the rank of the tensor is reduced by 1 for each entry in axis. +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(ReduceMean) + .INPUT(x, TensorType::NumberType()) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::NumberType()) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceMean) + +/** +*@brief Reduces "x" along the dimensions according to "axis". + +*@par Inputs: +*One input: +* @li x: A Tensor. Must be one of the following types: float16, float32, int8, uint8. + +*@par Attributes: +*@li axis: The dimensions to reduce. Must be one of the following types: int, list, tuple, NoneType. \n +* If None (the default), reduces all dimensions. \n +* Must be in the range [-rank(x), rank(x)). \n +*@li keep_dims: A bool or NoneType. \n +* - If true, retains reduced dimensions with length 1. \n +* - If false, the rank of the tensor is reduced by 1 for each entry in axis. +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(ReduceMeanD) + .INPUT(x, TensorType({DT_FLOAT16, DT_INT32, DT_FLOAT, DT_INT8, DT_UINT8})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_INT32, DT_FLOAT, DT_INT8, DT_UINT8})) + .REQUIRED_ATTR(axis, ListInt) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceMeanD) + +REG_OP(ReduceMax) + .INPUT(x, TensorType::NumberType()) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::NumberType()) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceMax) + +REG_OP(ReduceMaxD) + .INPUT(x, TensorType({DT_FLOAT, DT_UINT8, DT_INT8, + DT_FLOAT16, DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_UINT8, DT_INT8, + DT_FLOAT16, DT_INT32})) + .REQUIRED_ATTR(axis, ListInt) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceMaxD) + +REG_OP(ReduceMin) + .INPUT(x, TensorType::NumberType()) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::NumberType()) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceMin) + +REG_OP(ReduceMinD) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT8,DT_UINT8})) + .OUTPUT(y, TensorType({DT_FLOAT16,DT_FLOAT,DT_INT8,DT_UINT8})) + .REQUIRED_ATTR(axis, ListInt) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceMinD) +/** +*@brief Computes the "logical or" of elements across dimensions of a tensor.\n +* Reduces `x` along the dimensions given in `axis`. +* Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each +* entry in `axis`. If `keep_dims` is true, the reduced dimensions +* are retained with length 1. +* +* If `axis` is None, all dimensions are reduced, and a +* tensor with a single element is returned. +* +*@attention Constraints:\n +* Only support bool +* +*@par Inputs: +*@li x : The boolean tensor to reduce. +*@li axis : The dimensions to reduce. If `None` (the default), reduces all +* dimensions. Must be in the range `[-rank(x), rank(x))`. +* +*@par Attributes: +* keep_dims : If true, retains reduced dimensions with length 1. +* +*@par Outputs: +* y : The reduced tensor +* +*/ +REG_OP(ReduceAny) + .INPUT(x, TensorType({DT_BOOL})) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType({DT_BOOL})) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceAny) +/** +*@brief Computes the "logical or" of elements across dimensions of a tensor.\n +* Reduces `x` along the dimensions given in `axis`. +* Unless `keep_dims` is true, the rank of the tensor is reduced by 1 for each +* entry in `axis`. If `keep_dims` is true, the reduced dimensions +* are retained with length 1. +* +* If `axis` is None, all dimensions are reduced, and a +* tensor with a single element is returned. +* +*@attention Constraints:\n +* Only support bool +* +*@par Inputs: +* x : The boolean tensor to reduce. +* +*@par Attributes: +*@li axis : The dimensions to reduce. If `None` (the default), reduces all +* dimensions. Must be in the range `[-rank(x), rank(x))`. +*@li keep_dims : If true, retains reduced dimensions with length 1. +* +*@par Outputs: +* y : The reduced tensor +* +*/ +REG_OP(ReduceAnyD) + .INPUT(x, TensorType({DT_BOOL})) + .OUTPUT(y, TensorType({DT_BOOL})) + .REQUIRED_ATTR(axis, ListInt) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(ReduceAnyD) + +} //namespace ge + + +#endif /* GE_OP_REDUCE_OPS_H */ diff --git a/third_party/fwkacllib/inc/ops/reduction_ops.h b/third_party/fwkacllib/inc/ops/reduction_ops.h new file mode 100755 index 00000000..fb4a4b59 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/reduction_ops.h @@ -0,0 +1,33 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_REDUCTION_H + #define GE_OP_REDUCTION_H + + #include "graph/operator_reg.h" + + namespace ge { + + REG_OP(Reduction) + .INPUT(input_x, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(output_y, TensorType({DT_FLOAT16, DT_FLOAT})) + .ATTR(operation, String, "SUM") + .ATTR(axis, Int, 0) + .ATTR(coeff, Float, 1.0) + .OP_END_FACTORY_REG(Reduction); + } // namespace ge + + #endif // GE_OP_REDUCTION_H diff --git a/third_party/fwkacllib/inc/ops/roipooling_ops.h b/third_party/fwkacllib/inc/ops/roipooling_ops.h new file mode 100755 index 00000000..35625815 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/roipooling_ops.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_ROIPOOLING_OPS_H_ +#define GE_OP_ROIPOOLING_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(RoiPooling) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(rois, TensorType({DT_FLOAT, DT_FLOAT16})) + .INPUT(roi_actual_num, TensorType({DT_INT32})) + .ATTR(roi_max_num, Int,3008) + .REQUIRED_ATTR(pooled_h, Int) + .REQUIRED_ATTR(pooled_w, Int) + .ATTR(spatial_scale, Float, 0.0625) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16})) + .OP_END_FACTORY_REG(RoiPooling) + +} // namespace ge + +#endif // GE_OP_BITWISE_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/rpn_ops.h b/third_party/fwkacllib/inc/ops/rpn_ops.h new file mode 100755 index 00000000..1c45f1af --- /dev/null +++ b/third_party/fwkacllib/inc/ops/rpn_ops.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_RPN_OPS_H +#define GE_OP_RPN_OPS_H + +#include "../graph/operator_reg.h" +namespace ge { +REG_OP(NMSWithMask) + .INPUT(box_scores, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(selected_boxes, TensorType({DT_FLOAT, DT_FLOAT16})) + .OUTPUT(selected_idx, TensorType({DT_INT32})) + .OUTPUT(selected_mask, TensorType({DT_UINT8})) + .ATTR(iou_threshold, Float, 0.5) + .OP_END_FACTORY_REG(NMSWithMask) +} // namespace ge + +#endif // GE_OP_TRAINING_OPS_H diff --git a/third_party/fwkacllib/inc/ops/save_ops.h b/third_party/fwkacllib/inc/ops/save_ops.h new file mode 100644 index 00000000..3f44d1ce --- /dev/null +++ b/third_party/fwkacllib/inc/ops/save_ops.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_SAVE_OPS_H_ +#define GE_OP_SAVE_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(Save) + .DYNAMIC_INPUT(tensors, TensorType:ALL()) + .OP_END_FACTORY_REG(Save) + +} // namespace ge + + +#endif // GE_OP_SAVE_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/scale_ops.h b/third_party/fwkacllib/inc/ops/scale_ops.h new file mode 100755 index 00000000..1910020b --- /dev/null +++ b/third_party/fwkacllib/inc/ops/scale_ops.h @@ -0,0 +1,37 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_SCALE_OPS_H + #define GE_OP_SCALE_OPS_H + + #include "graph/operator_reg.h" + + namespace ge { + + REG_OP(Scale) + .INPUT(input_x, TensorType({DT_FLOAT, DT_FLOAT16})) /* "First operand." */ + .INPUT(scale_param, TensorType({DT_FLOAT, DT_FLOAT16})) /* "Second operand." */ + .INPUT(bias_param, TensorType({DT_FLOAT, DT_FLOAT16})) /* "Third operand." */ + .OUTPUT(output_y, TensorType({DT_FLOAT, DT_FLOAT16})) /* "Result, has same element type as input_x" */ + .ATTR(axis, Int, 1) + .ATTR(num_axes, Int, 1) + .ATTR(bias_term, Bool, false) + .ATTR(scale_from_blob, Bool, true) + .OP_END_FACTORY_REG(Scale) + + } // namespace ge + + #endif // GE_OP_SCALE_OPS_H diff --git a/third_party/fwkacllib/inc/ops/selection_ops.h b/third_party/fwkacllib/inc/ops/selection_ops.h new file mode 100644 index 00000000..3928a7b5 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/selection_ops.h @@ -0,0 +1,1735 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_SELECTION_OPS_H +#define GE_OP_SELECTION_OPS_H +#include "../graph/operator_reg.h" + +namespace ge { +/** +*@brief Creates a sequence of numbers. + +*@par Inputs: +*Three inputs, including: +* @li start: A 0D Tensor (scalar). Acts as first entry in the range if "limit" +* is not "None"; otherwise, acts as range limit and first entry defaults to "0". +* The supported types are: float32, int32, double, int64. +* @li limit: A 0D Tensor (scalar). Upper limit of sequence, exclusive. If "None", +* defaults to the value of "start" while the first entry of the range +* defaults to "0". The supported types are: float32, int32, double, int64. +* @li delta: A 0D Tensor (scalar). Number that increments "start". +* Defaults to "1". The supported types are: float32, int32, double, int64. + +*@par Outputs: +*y: A 1D Tensor. +*/ +REG_OP(Range) + .INPUT(start, TensorType({DT_FLOAT,DT_INT32,DT_DOUBLE,DT_INT64})) + .INPUT(limit, TensorType({DT_FLOAT,DT_INT32,DT_DOUBLE,DT_INT64})) + .INPUT(delta, TensorType({DT_FLOAT,DT_INT32,DT_DOUBLE,DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT,DT_INT32,DT_DOUBLE,DT_INT64})) + .OP_END_FACTORY_REG(Range) + +/** +*@brief: Creates a sequence of numbers. + +*@par Inputs: +*Four inputs, including: +* @li x: A 1D Tensor of type float32 or int32. The assistant data. +* @li start: A 0D Tensor (scalar) of type float32 or int32. Acts as first entry in the range if "limit" +* is not "None"; otherwise, acts as range limit and first entry defaults to "0". +* @li limit: A 0D Tensor (scalar) of type float32 or int32. +* Upper limit of sequence, exclusive. If "None", +* defaults to the value of "start" while the first entry of the range +* defaults to "0". +* @li delta: A 0D Tensor (scalar) of type float32 or int32. +* Number that increments "start". Defaults to "1". + +*@par Outputs: +*y: A 1D Tensor. + +*@par Quantization supported or not +*Not supported + +*@par Quantized inference supported or not +*Not supported + +*@par Multiple batches supported or not +*Supported + +*@see Range() +*@since V100R001C33 +*/ +REG_OP(RangeD) + .INPUT(x, TensorType({DT_FLOAT,DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT,DT_INT32})) + .REQUIRED_ATTR(start, Float) + .REQUIRED_ATTR(limit, Float) + .REQUIRED_ATTR(delta, Float) + .OP_END_FACTORY_REG(RangeD) + +/** +*@brief Constructs a tensor by tiling a given tensor. + +*@par Inputs: +*Two inputs, including: +* @li x: A Tensor of type TensorType::BasicType(). +* @li multiples: A 1D Tensor of type int32 or int64. +* The length must be the same as the number of dimensions in "input" + +*@par Outputs: +*y: A Tensor. Has the same type as "x". + +*@see TileD() +*/ +REG_OP(Tile) + .INPUT(x, TensorType::BasicType()) + .INPUT(multiples, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(Tile) + +/** +*@brief Constructs a tensor by tiling a given tensor. + +*@par Inputs: +*x: A Tensor. Must be one of the following types: float32, float16, int32. + +*@par Attributes: +*multiples: A required Tensor of type int32 or int64. +* Number of replication times. + +*@par Outputs: +*y: A Tensor. Has the same type as "x". + +*@see Tile() +*/ +REG_OP(TileD) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .REQUIRED_ATTR(multiples, ListInt) + .OP_END_FACTORY_REG(TileD) + +/** +* @brief Gather slices from "params" into a tensor with shape specified by\n +* "indices". "indices" is an K-dimensional integer tensor, best thought of as a\n +* (K-1)-dimensional tensor of "indices" into "params", where each element\n +* defines a slice of "params":\n +* output[\\(i_0, ..., i_{K-2}\\)] = params[indices[\\(i_0, ..., i_{K-2}\\)]]\n +* "indices" defines slices into the first N dimensions of\n +* "params", where\n +* N = indices.shape[-1]\n +* indices = [[0, 0], [1, 1]]\n +* params = [['a', 'b'], ['c', 'd']]\n +* output = ['a', 'd']\n + +* @par Inputs: +* @li params: A Tensor of type BasicType. +* @li indices: A Tensor of type IndexNumberType. + +* @par Outputs: +* output: A Tensor of type BasicType. +* @see GatherNd() + +* @attention Constraints: +* @li "params" is one of the following types: float16, float32, int32, int8, +* uint8. +*/ +REG_OP(GatherNd) + .INPUT(x1, TensorType::BasicType()) + .INPUT(x2, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(GatherNd) + +/** +*@brief Gather slices from "x" according to "indices" by corresponding axis. + +*@par Inputs: +*Three inputs, including: +* @li x: A Tensor. Must be one of the following types: float32, float64, int32, uint8, int16, int8, \n +* complex64, int64, qint8, quint8, qint32, qint16, quint16, uint16, \n +* complex128, float16, uint32, uint64, complex64, complex128. +* @li indices: A Tensor of type int32 or int64. +* @li axis: A Tensor of type as int32. + +*@par Outputs: +*y: A Tensor. Has the same type as "x". + +*@attention Constraints: +*Value in indices must be in range [0, x.shape[axis]) +*/ +REG_OP(GatherV2) + .INPUT(x, TensorType::BasicType()) + .INPUT(indices, TensorType::IndexNumberType()) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(GatherV2) + +/** +*@brief Gather slices from "x" according to "indices" by corresponding axis. + +*@par Inputs: +*Two inputs, including: +* @li x: A Tensor. Must be one of the following types: float32, float16, int32, uint32, int8, uint8, \n +* int16, uint16, int64, uint64. +* @li indices: A Tensor of type int32 or int64. + +*@par Attributes: +*axis: A int32 specifying the axis to gather from. + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(GatherV2D) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_UINT32, DT_INT8, DT_UINT8, + DT_INT16, DT_UINT16, DT_INT64, DT_UINT64})) + .INPUT(indices, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_UINT32, DT_INT8, DT_UINT8, + DT_INT16, DT_UINT16, DT_INT64, DT_UINT64})) + .REQUIRED_ATTR(axis, Int) + .OP_END_FACTORY_REG(GatherV2D) + +/** +*@brief Extracts a strided slice of a tensor. Roughly speaking, this op \n + extracts a slice of size (end-begin)/stride from the given input tensor. \n + Starting at the location specified by begin the slice continues by \n + adding stride to the index until all dimensions are not less than end. \n + +*@par Inputs: +*Four inputs, including: +* @li x: A Tensor. Must be one of the following types: float32, float64, int32, uint8, int16, int8, \n +* complex64, int64, qint8, quint8, qint32, qint16, quint16, uint16, \n +* complex128, float16, uint32, uint64, complex64, complex128. \n +* @li begin: A Tensor of type int32 or int64, for the index of the first value to select. + +* @li end: A Tensor of type int32 or int64, for the index of the last value to select. + +* @li strides: A Tensor of type int32 or int64, for the increment. + +*@par Attributes: +* @li begin_mask: A Tensor of type int32. \n + A bitmask where a bit "i" being "1" means to ignore the begin \n + value and instead use the largest interval possible. +* @li end_mask: A Tensor of type int32. \n + Analogous to "begin_mask". +* @li ellipsis_mask: A Tensor of type int32. \n + A bitmask where bit "i" being "1" means the "i"th position \n + is actually an ellipsis. +* @li new_axis_mask: A Tensor of type int32. \n + A bitmask where bit "i" being "1" means the "i"th \n + specification creates a new shape 1 dimension. +* @li shrink_axis_mask: A Tensor of type int32. \n + A bitmask where bit "i" implies that the "i"th \n + specification should shrink the dimensionality. + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(StridedSlice) + .INPUT(x, TensorType::BasicType()) + .INPUT(begin, TensorType::IndexNumberType()) + .INPUT(end, TensorType::IndexNumberTypeT()) + .INPUT(strides, TensorType::IndexNumberType()) + .ATTR(begin_mask, Int, 0) + .ATTR(end_mask, Int, 0) + .ATTR(ellipsis_mask, Int, 0) + .ATTR(new_axis_mask, Int, 0) + .ATTR(shrink_axis_mask, Int, 0) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(StridedSlice) + +/** +*@brief Extracts a strided slice of a tensor. Roughly speaking, this op \n + extracts a slice of size "(end-begin)/stride" from the given input tensor. \n + Starting at the location specified by "begin" the slice continues by \n + adding "stride" to the index until all dimensions are not less than "end". + +*@par Inputs: +*x: A Tensor. Must be one of the following types: float32, float64, int32, uint8, int16, int8, \n +* complex64, int64, qint8, quint8, qint32, qint16, quint16, uint16, \n +* complex128, float16, uint32, uint64, complex64, complex128. + +*@par Attributes: +* @li begin: A Tensor of type int32 or int64. \n + The index of the first value to select. +* @li end: A Tensor of type int32 or int64. \n + The index of the last value to select. +* @li strides: A Tensor of type int32 or int64, for the increment. \n +* @li begin_mask: A Tensor of type int32. \n + A bitmask where a bit "i" being "1" means to ignore the begin \n + value and instead use the largest interval possible. +* @li end_mask: Analogous to "begin_mask". A Tensor of type as int32. +* @li ellipsis_mask: A Tensor of type int32. \n + A bitmask where bit "i" being "1" means the "i"th position \n + is actually an ellipsis. +* @li new_axis_mask: A Tensor of type int32. \n + A bitmask where bit "i" being "1" means the "i"th \n + specification creates a new shape 1 dimension. +* @li shrink_axis_mask: A Tensor of type int32. \n + A bitmask where bit "i" implies that the "i"th \n + specification should shrink the dimensionality. + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(StridedSliceD) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_UINT8, DT_INT8, + DT_BOOL})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_UINT8, DT_INT8, + DT_BOOL})) + .ATTR(begin, ListInt, {}) + .ATTR(end, ListInt, {}) + .ATTR(strides, ListInt, {}) + .ATTR(begin_mask, Int, 0) + .ATTR(end_mask, Int, 0) + .ATTR(ellipsis_mask, Int, 0) + .ATTR(new_axis_mask, Int, 0) + .ATTR(shrink_axis_mask, Int, 0) + .OP_END_FACTORY_REG(StridedSliceD) + +/** +*@brief Since StridedSlice cuts out pieces of its "input" which is size "dy", \n + its gradient will have the same shape (which is passed here as "shape"). \n + The gradient will be zero in any element that the slice does not select. + +*@par Inputs: +*dy: A Tensor. Must be one of the following types: float32, float64, int32, uint8, int16, int8, \n +* complex64, int64, qint8, quint8, qint32, qint16, quint16, uint16, \n +* complex128, float16, uint32, uint64, complex64, complex128. + +*@par Attributes: +* @li shape: A Tensor of type int32 or int64. +* @li begin: A Tensor of type int32 or int64. \n + The index of the first value to select. +* @li end: A Tensor of type int32 or int64. \n + The index of the last value to select. +* @li strides: A Tensor of type int32 or int64, for the increment. +* @li begin_mask: A Tensor of type int32. \n + A bitmask where a bit "i" being "1" means to ignore the begin \n + value and instead use the largest interval possible. +* @li end_mask: A Tensor of type int32. \n + Analogous to "begin_mask". +* @li ellipsis_mask: A Tensor of type int32. \n + A bitmask where bit "i" being "1" means the "i"th position \n + is actually an ellipsis. +* @li new_axis_mask: A Tensor of type int32. \n + A bitmask where bit "i" being "1" means the "i"th \n + specification creates a new shape 1 dimension. +* @li shrink_axis_mask: A Tensor of type int32. \n + A bitmask where bit "i" implies that the "i"th \n + specification should shrink the dimensionality. + +*@par Outputs: +*output: A Tensor. Has the same type as "dy". +*/ +REG_OP(StridedSliceGradD) + .INPUT(dy, TensorType::BasicType()) + .OUTPUT(output, TensorType::BasicType()) + .ATTR(shape, ListInt, {}) + .ATTR(begin, ListInt, {}) + .ATTR(end, ListInt, {}) + .ATTR(strides, ListInt, {}) + .ATTR(begin_mask, Int, 0) + .ATTR(end_mask, Int, 0) + .ATTR(ellipsis_mask, Int, 0) + .ATTR(new_axis_mask, Int, 0) + .ATTR(shrink_axis_mask, Int, 0) + .OP_END_FACTORY_REG(StridedSliceGradD) + +/** +*@brief Since StridedSlice cuts out pieces of its "input" which is size "dy", \n + its gradient will have the same shape (which is passed here as "shape"). \n + The gradient will be zero in any element that the slice does not select. + +*@par Inputs: +*Five inputs, including: +* @li shape: A Tensor of type int32 or int64. +* @li begin: A Tensor of type int32 or int64. \n + The index of the first value to select. +* @li end: A Tensor of type int32 or int64. \n + The index of the last value to select. +* @li strides: A Tensor of type int32 or int64, for the increment. +* @li dy: A Tensor. Must be one of the following types: \n +* float32, float64, int32, uint8, int16, int8, \n +* complex64, int64, qint8, quint8, qint32, qint16, quint16, uint16, \n +* complex128, float16, uint32, uint64, complex64, complex128. + +*@par Attributes: +* @li begin_mask: A Tensor of type int32. \n + A bitmask where a bit "i" being "1" means to ignore the begin \n + value and instead use the largest interval possible. +* @li end_mask: A Tensor of type int32. \n + Analogous to "begin_mask". +* @li ellipsis_mask: A Tensor of type int32. \n + A bitmask where bit "i" being "1" means the "i"th position \n + is actually an ellipsis. +* @li new_axis_mask: A Tensor of type int32. \n + A bitmask where bit "i" being "1" means the "i"th \n + specification creates a new shape 1 dimension. +* @li shrink_axis_mask: A Tensor of type int32. \n + A bitmask where bit "i" implies that the "i"th \n + specification should shrink the dimensionality. + +*@par Outputs: +*output: A Tensor has the same type as "dy". +*/ +REG_OP(StridedSliceGrad) + .INPUT(shape, TensorType::IndexNumberType()) + .INPUT(begin, TensorType::IndexNumberType()) + .INPUT(end, TensorType::IndexNumberType()) + .INPUT(strides, TensorType::IndexNumberType()) + .INPUT(dy, TensorType::BasicType()) + .OUTPUT(output, TensorType::BasicType()) + .ATTR(begin_mask, Int, 0) + .ATTR(end_mask, Int, 0) + .ATTR(ellipsis_mask, Int, 0) + .ATTR(new_axis_mask, Int, 0) + .ATTR(shrink_axis_mask, Int, 0) + .OP_END_FACTORY_REG(StridedSliceGrad) + +/** +*@brief Computes the sum along segments of a tensor. + +*@par Inputs: +*Three inputs, including: +* @li x: A Tensor of type NumberType. +* @li segment_ids: A 1D Tensor of type IndexNumberType, whose shape is a prefix +* of "x.shape". +* @li num_segments: A Tensor of type IndexNumberType. + +*@par Outputs: +*y: A Tensor of type RealNumberType. +*/ +REG_OP(UnsortedSegmentSum) + .INPUT(x, TensorType::NumberType()) + .INPUT(segment_ids, TensorType::IndexNumberType()) + .INPUT(num_segments, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::NumberType()) + .OP_END_FACTORY_REG(UnsortedSegmentSum) + +/** +*@brief Computes the sum along segments of a tensor. + +*@par Inputs: +*Two inputs, including: +* @li x: A Tensor of type float16, float32, int32, int8, uint8. +* @li segment_ids: A 1D Tensor of type int32, whose shape is a prefix +* of "x.shape". + +*@par Attributes: +*num_segments: An int32, specifying the number of distinct segment IDs. + +*@par Outputs: +*y: A Tensor with same type as "x". +*/ +REG_OP(UnsortedSegmentSumD) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT8, DT_UINT8})) + .INPUT(segment_ids, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32, DT_INT8, DT_UINT8})) + .REQUIRED_ATTR(num_segments, Int) + .OP_END_FACTORY_REG(UnsortedSegmentSumD) + +/** +*@brief Reverses specific dimensions of a tensor. + +*@par Inputs: +* Two inputs, including:\n +*@li x: An ND Tensor (up to 8D). \n +*Must be one of the following types: int8, uint8, int16, uint16, int32, int64, bool, float32, double +*@li axis: A 1D Tensor.\n +*Must be one of the following types: int32, int64 + +*@par Outputs: +*y: A Tensor. Has the same type and format as "x" + +*@attention Constraints: +"axis" must be within the rank of "x". +*/ +REG_OP(ReverseV2) + .INPUT(x, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, + DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_COMPLEX64, DT_COMPLEX128, DT_STRING})) + .INPUT(axis, TensorType({DT_INT32,DT_INT64})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, + DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_COMPLEX64, DT_COMPLEX128, DT_STRING})) + .OP_END_FACTORY_REG(ReverseV2) + +/** +*@brief Reverses specific dimensions of a tensor. + +*@par Inputs: +* One input: +*@li x: An ND Tensor (up to 8D). \n +*Must be one of the following types: int8, uint8, int16, uint16, int32, int64, bool, float32, double + +*@par Attributes: +*axis: The indices of the dimensions to reverse. + +*@par Outputs: +*y: A Tensor. Has the same type and format as "x" + +*@attention Constraints: +"axis" must be within the rank of "x". +*/ +REG_OP(ReverseExt2) + .INPUT(x, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, + DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_COMPLEX64, DT_COMPLEX128, DT_STRING})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, + DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE, + DT_COMPLEX64, DT_COMPLEX128, DT_STRING})) + .REQUIRED_ATTR(axis, ListInt) + .OP_END_FACTORY_REG(ReverseExt2) + +/** +*@brief: Selects elements from "x1" or "x2", depending on "condition". + +*@par Inputs: +* Three inputs, including: +* @li condition: A Tensor of type bool. +* @li x1: A Tensor. Must be one of the following types: float16, float32, int32, int8, uint8. +* @li x2: A Tensor of the same type as "x1". + +*@par Outputs: +*y: A Tensor. Has the same type as "x1". +*/ +REG_OP(Select) + .INPUT(condition, TensorType({DT_BOOL})) + .INPUT(x1,TensorType::BasicType()) + .INPUT(x2,TensorType::BasicType()) + .OUTPUT(y,TensorType::BasicType()) + .OP_END_FACTORY_REG(Select) + +/** +*@brief: Computes the maximum along segments of a tensor. +*Computes a tensor such that output[i]=(data[i]) where max is over j such that segment_ids[j] == i. +*If the max is empty for a given segment ID i, output[i] = 0 + +*@par Inputs: +*Two inputs, include: +* @li x:A Tensor of type float16, float32, int32,int8,uint8. +* @li segment_ids:should be the size of the first dimension + must sorted and need not cover all values in the full range of valid values + must be positive intege + +*@par Outputs: +*y:A Tensor with same type as "x". +*/ +REG_OP(SegmentMax) + .INPUT(x, TensorType::RealNumberType()) + .INPUT(segment_ids, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::RealNumberType()) + .OP_END_FACTORY_REG(SegmentMax) + +/** +*@brief: Computes the maximum along segments of a tensor. +*Computes a tensor such that output[i]=(data[i]) where max is over j such that segment_ids[j] == i. +*If the max is empty for a given segment ID i, output[i] = 0 + +*@par Inputs: +*One inputs, include: +* @li x:A Tensor of type float16, float32, int32, int8,uint8 . + +*@par Attributes: +* @li segment_ids:should be the size of the first dimension + must sorted and need not cover all values in the full range of valid values + must be positive intege + +*@par Outputs: +*y:A Tensor with same type as "x". +*/ +REG_OP(SegmentMaxD) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .REQUIRED_ATTR(segment_ids, ListInt) + .OP_END_FACTORY_REG(SegmentMaxD) + +/** +*@brief Returns a one-hot tensor. The locations represented by index in "x" take value "on_value", +* while all other locations take value "off_value". + +*@par Inputs: +*Four inputs, including: +* @li x: A Tensor of indices. Must be one of the following types: int32, uint8, int64. +* @li depth: A scalar of type int32. The depth of the one hot dimension. +* @li on_value: A scalar. The value to fill in output when indices[j] = i, +* Must be one of the following types: float16, float32, int32, int8, uint8. +* @li off_value: A scalar. The value to fill in output when indices[j] != i, +* Has the same type as "on_value". + +*@par Attributes: +*axis: An int. The axis to fill. Defaults to "-1". + +*@par Outputs: +*y: A Tensor. Has the same type as "on_value". +*/ +REG_OP(OneHot) + .INPUT(x, TensorType({DT_UINT8, DT_INT32, DT_INT64})) + .INPUT(depth, TensorType({DT_INT32})) + .INPUT(on_value, TensorType::BasicType()) + .INPUT(off_value, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .ATTR(axis, Int, -1) + .OP_END_FACTORY_REG(OneHot) + +/** +*@brief Returns a one-hot tensor. The locations represented by index in "x" take value "on_value", +* while all other locations take value "off_value". + +*@par Inputs: +*Three inputs, including: +*@li x: A Tensor of indices. Must be one of the following types: int32, uint8, int64. +*@li on_value: A scalar. The value to fill in output when indices[j] = i, +* Must be one of the following types: float16, float32, int32, int8, uint8. +*@li off_value: A scalar. The value to fill in output when indices[j] != i, +* Has the same type as "on_value". + +*@par Attributes: +*@li depth: A scalar of type int32. The depth of the one hot dimension. +*@li axis: An int. The axis to fill. Defaults to "-1". + +*@par Outputs: +*y: A Tensor. Has the same type as "on_value". +*/ +REG_OP(OneHotD) + .INPUT(x, TensorType({DT_UINT8, DT_INT32})) + .INPUT(on_value, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_UINT8, + DT_INT8})) + .INPUT(off_value, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_UINT8, + DT_INT8})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_UINT8, DT_INT8})) + .REQUIRED_ATTR(depth, Int) + .ATTR(axis, Int, -1) + .OP_END_FACTORY_REG(OneHotD) + +/** +*@brief Extracts a slice from a tensor.\n + This operation extracts a slice of size "size" from a tensor "x" starting at the location specified by "begin". + +*@par Inputs: +*@li x: A Tensor. Must be one of the following types: float16, float32, int8, int16, int32, int64, uint8, uint16, uint32, uint64. +*@li begin: A Tensor of type int32 or int64. The starting location for the slice. +*@li size: A Tensor of type int32 or int64. The tensor shape. + +*@par Outputs: +*y: A Tensor. Has the same type as "x". The slice extracted from the tensor. +*/ +REG_OP(Slice) + .INPUT(x, TensorType::BasicType()) + .INPUT(begin, TensorType::IndexNumberType()) + .INPUT(size, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(Slice) + +/** +*@brief Extracts a slice from a tensor.\n + This operation extracts a slice of size "size" from a tensor "x" starting at the location specified by "begin". + +*@par Inputs: +*x: A Tensor. Must be one of the following types: float16, float32, int8, int16, int32, int64, uint8, uint16, uint32, uint64. + +*@par Attributes: +*@li begin: The starting location for the slice. +*@li size: The tensor shape. + +*@par Outputs: +*y: A Tensor. Has the same type as "x". The slice extracted from the tensor. +*/ +REG_OP(SliceD) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(begin,ListInt) + .REQUIRED_ATTR(size,ListInt) + .OP_END_FACTORY_REG(SliceD) + +/** +* @brief Finds values and indices of the "k" largest elements for the last +* dimension. + +* @par Inputs: +* @li input: A 1D or higher tensor of type float16, with the last dimension at +* least "k". +* Specifies the data to sort. +* @li assist_seq: A 1D tensor of type float16. +* With values 0, 1, 2, ..., N-1, where "N" is the last dimension. + +* @par Attributes: +* k: An int that is at least 0, specifying the number of top elements to look\n +* for along the last dimension (along each row for matrices). + +* @par Outputs: +* @li values: A Tensor, specifying the sorted data. Has the same type as "input". +* @li indices: A Tensor of type int32, specifying the indices of sorted data. + +* @attention Constraints: +* @li k =< 4096 +* @li Size of the last dimension =< 65500 + +* @see TopKV2() +*/ +REG_OP(TopK) + .INPUT(input, TensorType::RealNumberType()) + .INPUT(assist_seq, TensorType({DT_FLOAT16})) + .OUTPUT(values, TensorType::RealNumberType()) + .OUTPUT(indices, TensorType({DT_INT32})) + .ATTR(k, Int, 0) + .OP_END_FACTORY_REG(TopK) + +/** +* @brief Finds values and indices of the "k" largest elements for the last +* dimension. + +* @par Inputs: +* @li input: A 1D or higher tensor of type BasicType, with the last dimension +* at least "k". +* @li k: A 0D Tensor of type int32.\n +* Number of top elements to look for along the last dimension (along each row +* for matrices). + +* @par Attributes: +* @li sorted: An optional bool. Defaults to true.\n +* If true, the resulting "k" elements will be sorted by the values in descending +* order. +* @li T: Indicator of indices type. + +* @par Outputs: +* @li values: A Tensor, specifying the sorted data. Has the same type as +* "input". +* @li indices: A Tensor of type int32, specifying the indices of sorted data. + +* @see TopK() +*/ +REG_OP(TopKV2) + .INPUT(input, TensorType::RealNumberType()) + .INPUT(k, TensorType({DT_INT32})) + .OUTPUT(values, TensorType::RealNumberType()) + .OUTPUT(indices, TensorType({DT_INT32})) + .ATTR(sorted, Bool, true) + .ATTR(T, Int, 0) + .OP_END_FACTORY_REG(TopKV2) +/** +*@brief Creates a new tensor by applying sparse "updates" to individual values or slices within a tensor (initially zero for numeric, empty for string) of the given "shape" according to "indices". + +*@par Inputs: +*Inputs including: \n +* @li indices: A required index tensor. Must be one of the following types: float32, float16, int32, int8, uint8. +* @li updates: A required slice tensor. Must be one of the following types: float32, float16, int32, int8, uint8. +* @li shape: A required list of int32, specifying the output shape. +*@par Outputs: +*y:A output Tensor with same datatype as "updates". + +*@attention Constraints:\n +*@li "y" has the same shape as "shape". +*@li "y" has the same type as "updates". +*/ +REG_OP(ScatterNd) + .INPUT(indices, TensorType::BasicType()) + .INPUT(updates, TensorType::BasicType()) + .INPUT(shape, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(ScatterNd) +/** +*@brief Creates a new tensor by applying sparse "updates" to individual values or slices within a tensor (initially zero for numeric, empty for string) of the given "shape" according to "indices". + +*@par Inputs: +*Inputs including: \n +* @li indices: A required index tensor. Must be one of the following types: float32, float16, int32, int8, uint8. +* @li updates: A required slice tensor. Must be one of the following types: float32, float16, int32, int8, uint8. +*@par Attributes: +* @li shape: A required list of int32, specifying the output shape. +*@par Outputs: +*y: A Tensor. Has the same type as "updates". + +*@attention Constraints:\n +*@li "y" has the same shape as "shape". +*@li "y" has the same type as "updates". +*/ +REG_OP(ScatterNdD) + .INPUT(indices, TensorType::IndexNumberType()) + .INPUT(updates, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT16})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT16})) + .ATTR(shape, ListInt,{}) + .OP_END_FACTORY_REG(ScatterNdD) + +/** +* @brief Says whether the targets are in the top "k" predictions.\n +* Let "x1" be the predictions for all classes for example i, "x2(i)" be the\n +* target class for example i, y(i) be the output for example i:\n +* y(i) = x1(i, x2(i))) ��TopKIncludingTies(x1(i)) + +* @par Inputs: +* Three inputs, including: +* @li x1: A 2D Tensor of type float32. A "batch_size * classes" tensor. +* @li x2: A 1D Tensor of type IndexNumberType. A ��batch_size�� tensor of class +* ids. +* @li k: A 1D Tensor of the same type as "x2". +* Specifies the number of top elements to look at for computing precision. + +* @par Outputs: +* y: A Tensor of type uint8. + +* @see InTopK() +*/ +REG_OP(InTopKExt2) + .INPUT(x1, TensorType({DT_FLOAT})) + .INPUT(x2, TensorType({IndexNumberType})) + .INPUT(k, TensorType({IndexNumberType})) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(InTopKExt2) + +/** +* @brief Says whether the targets are in the top "k" predictions\n +* Let "x1" be the predictions for all classes for example i, "x2(i)" be the\n +* target class for example i, y(i) be the output for example i:\n +* y(i) = x1(i, x2(i))) ��TopKIncludingTies(x1(i)) + +* @par Inputs: +* Two inputs, including: +* @li x1: A 2D Tensor of type float32. A "batch_size * classes" tensor. +* @li x2: A 1D Tensor of type IndexNumberType. A ��batch_size�� tensor of class +* ids. + +* @par Attributes: +* @li k: An int32, specifying the number of top elements to look at for +* computing precision. + +* @par Outputs: +* y: A Tensor of type uint8. + +* @attention Constraints: + +* @see InTopKEx2() +*/ +REG_OP(InTopK) + .INPUT(x1, TensorType({DT_FLOAT})) + .INPUT(x2, TensorType(IndexNumberType)) + .ATTR(k, Int, 1) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(InTopK) + +/** +* @brief Assigns "value" to the sliced l-value reference of "var".\n +* The values of "value" are assigned to the positions in the variable. "var"\n +* that are selected by the slice parameters. The slice parameters "begin, "end",\n +* "strides", etc. work exactly as in "StridedSlice". + +* @par Inputs: +* @li var: A mutable ND Tensor of type BasicType. +* @li begin: A mutable ND Tensor of type IndexNumberType. +* Specifies the index of the first value to select. +* @li end: A mutable ND Tensor of type IndexNumberType. +* Specifies the index of the last value to select. +* @li strides: A mutable ND Tensor of type IndexNumberType. +* Specifies the stride to select. +* @li input_value: A mutable ND Tensor of type BasicType. + +* @par Attributes: +* @li begin_mask: An optional int. Defaults to "0". +* @li end_mask: An optional int. Defaults to "0". +* @li ellipsis_mask: An optional int. Defaults to "0". +* @li new_axis_mask: An optional int. Defaults to "0". +* @li shrink_axis_mask: An optional int. Defaults to "0". + +* @par Outputs: +* var: A mutable Tensor. Has the same type as "var". + +* @attention Constraints: +* This operator currently does not support broadcasting. Therefore, the shape +* of "value" must be exactly the shape produced by the slice of "var". + +* @see StridedSlice() +*/ +REG_OP(StridedSliceAssign) + .INPUT(var, TensorType(BasicType)) + .INPUT(begin, TensorType(IndexNumberType)) + .INPUT(end, TensorType(IndexNumberType)) + .INPUT(strides, TensorType(IndexNumberType)) + .INPUT(input_value, TensorType(BasicType)) + .OUTPUT(var, TensorType(BasicType)) + .ATTR(begin_mask, Int, 0) + .ATTR(end_mask, Int, 0) + .ATTR(ellipsis_mask, Int, 0) + .ATTR(new_axis_mask, Int, 0) + .ATTR(shrink_axis_mask, Int, 0) + .OP_END_FACTORY_REG(StridedSliceAssign) + +/** +* @brief Assigns "value" to the sliced l-value reference of "var".\n +* The values of "value" are assigned to the positions in the variable. "var"\n +* that are selected by the slice parameters. The slice parameters "begin, "end",\n +* "strides", etc. work exactly as in "StridedSlice". + +* @par Inputs: +* @li var: A mutable ND Tensor of type BasicType. +* @li input_value: A mutable ND "Tensor" of type BasicType. + + +* @par Attributes: +* @li begin: A required list of ints. +* Specifies the index of the first value to select. +* @li end: A required list of ints. +* Specifies the index of the last value to select. +* @li strides: A required list of ints. Specifies the stride to select. +* @li begin_mask: An optional int. Defaults to "0". +* @li end_mask: An optional int. Defaults to "0". +* @li ellipsis_mask: An optional int. Defaults to "0". +* @li new_axis_mask: An optional int. Defaults to "0". +* @li shrink_axis_mask: An optional int. Defaults to "0". + +* @par Outputs: +* var: A mutable Tensor. Has the same type as input "var". + +* @attention Constraints: +* This operator currently does not support broadcasting. Therefore, the shape of +* "value" shape must be exactly the shape produced by the slice of "var". + +* @see StridedSlice() +*/ +REG_OP(StridedSliceAssignD) + .INPUT(var, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .INPUT(input_value, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32})) + .OUTPUT(var, TensorType(BasicType)) + .ATTR(begin, ListInt, {}) + .ATTR(end, ListInt, {}) + .ATTR(strides, ListInt, {}) + .ATTR(begin_mask, Int, 0) + .ATTR(end_mask, Int, 0) + .ATTR(ellipsis_mask, Int, 0) + .ATTR(new_axis_mask, Int, 0) + .ATTR(shrink_axis_mask, Int, 0) + .OP_END_FACTORY_REG(StridedSliceAssignD) + +/** +*@brief Gather slices from "params" according to "indices"."indices" must be \n + an integer tensor of any dimension(usually 0-D or 1-D). \n + Produces an output tensor with shape "indices.shape + params.shape[1:]". + +*@par Inputs: +*Two inputs, including: +* @li x: A Tensor. Must be one of the following types: float32, float64, int32, uint8, int16, int8, \n +* complex64, int64, qint8, quint8, qint32, qint16, quint16, uint16, \n +* complex128, float16, uint32, uint64, complex64, complex128. +* @li indices: A Tensor of type int32 or int64. + +*@par Attributes: +*validate_indices: A bool specifying whether to verify the argument of "indice". + +*@par Outputs: +*y: A Tensor. Has the same type as "x". + +*@attention Constraints: +* "indices" is in the range [0, x.shape[0]). +*/ +REG_OP(Gather) + .INPUT(x, TensorType::BasicType()) + .INPUT(indices, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .ATTR(validate_indices, Bool, true) + .OP_END_FACTORY_REG(Gather) + +/** +*@brief Computes the cumulative product of the tensor "x" along "axis". + +*@par Inputs: +* Two inputs, including: +*@li x: A Tensor. Must be one of the following types: int32, float32, float16, int8, uint8. +*@li axis A Tensor of type int32. Defaults to "0". +* +*@par Attributes: +*@li exclusive: If "False", performs inclusive cumprod, which means that the first element of the input is identical to the first element of the output. If "True", performs exclusive cumprod. +*@li reverse: A bool. Defaults to "False". +* +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Cumprod) + .INPUT(x, TensorType::NumberType()) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::NumberType()) + .ATTR(exclusive, Bool, false) + .ATTR(reverse, Bool, false) + .OP_END_FACTORY_REG(Cumprod) + +/** +*@brief Computes the cumulative product of the tensor "x" along "axis". + +*@par Inputs: +* One input: +*x: A Tensor. Must be one of the following types: int32, float32, float16, int8, uint8. +* +*@par Attributes: +*@li axis A Tensor of type int32. Defaults to "0". +*@li exclusive: If "False", performs inclusive cumprod, which means that the first element of the input is identical to the first element of the output. If "True", performs exclusive cumprod. +*@li reverse: A bool. Defaults to "False". +* +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(CumprodD) + .INPUT(x, TensorType::NumberType()) + .OUTPUT(y, TensorType::NumberType()) + .REQUIRED_ATTR(axis, Int) + .ATTR(exclusive, Bool, false) + .ATTR(reverse, Bool, false) + .OP_END_FACTORY_REG(CumprodD) + +/** +*@brief Computes the cumulative sum of the tensor "x" along "axis". + +*@par Inputs: +* Two inputs, including: +*@li x: A Tensor. Must be one of the following types: int32, float32, float16, int8, uint8. +*@li axis A Tensor of type int32. Defaults to "0". +* +*@par Attributes: +*@li exclusive: If "False", performs inclusive cumsum, which means that the first element of the input is identical to the first element of the output. If "True", performs exclusive cumsum. +*@li reverse: A bool. Defaults to "False". +* +*@par Outputs: +*@li y: A Tensor. Has the same type as "x". +*/ +REG_OP(Cumsum) + .INPUT(x, TensorType::NumberType()) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::NumberType()) + .ATTR(exclusive, Bool, false) + .ATTR(reverse, Bool, false) + .OP_END_FACTORY_REG(Cumsum) + +/** +*@brief Computes the cumulative sum of the tensor "x" along "axis". +* +*@par Inputs: +* One input: +*x: A Tensor. Must be one of the following types: int32, float32, float16, int8, uint8. +* +*@par Attributes: +*@li axis A Tensor of type int32. Defaults to "0". +*@li exclusive: If "False", performs inclusive cumsum, which means that the first element of the input is identical to the first element of the output. If "True", performs exclusive cumsum. +*@li reverse: A bool. Defaults to "False". +* +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(CumsumD) + .INPUT(x, TensorType::NumberType()) + .OUTPUT(y, TensorType::NumberType()) + .REQUIRED_ATTR(axis, Int) + .ATTR(exclusive, Bool, false) + .ATTR(reverse, Bool, false) + .OP_END_FACTORY_REG(CumsumD) + +/** +*@brief Updates specified rows with values in v. \n +*Computes x[i, :] = v; return x. +*@par Inputs: +*Three inputs, including: +* @li x: A Tensor. \n +* TensorType::NumberType(). +* @li indices: A vector of type int32. \n +* Indices into the left-most dimension of "x". +* @li v: A Tensor of the same type as "x". \n +* Same dimension sizes as x except the first dimension, \n +* which must be the same as the size of "indices". + +*@par Outputs: +*y: A Tensor of the same type as "x". \n +* An alias of "x". The content of "y" is undefined if there are duplicates in indices. +*/ +REG_OP(InplaceUpdate) + .INPUT(x, TensorType::BasicType()) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(v, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(InplaceUpdate) + +/** +*@brief Updates specified rows with values in v. \n +*Computes x[i, :] = v; return x. +*@par Inputs: +*Two inputs, including: +* @li x: A Tensor. \n +* TensorType::NumberType(). +* @li v: A Tensor of the same type as "x". \n +* Same dimension sizes as "x" except the first dimension, which must be the same as the size of "indices". + +*@par Attributes: +*indices: A required list of ints. Indices into the left-most dimension of "x". + +*@par Outputs: +*y: A Tensor of the same type as "x". \n +* An alias of "x". The content of "y" is undefined if there are duplicates in indices. +*/ +REG_OP(InplaceUpdateD) + .INPUT(x, TensorType::BasicType()) + .INPUT(v, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(indices, ListInt) + .OP_END_FACTORY_REG(InplaceUpdateD) + +/** +*@brief Adds "v" into specified rows of "x". \n +*Computes y = x; y[i, :] += v. +*@par Inputs: +*Three inputs, including: +* @li x: A Tensor. \n +* TensorType::NumberType(). +* @li indices: A vector of type int32. \n +* Indices into the left-most dimension of "x". +* @li v: A Tensor of the same type as "x". \n +* Same dimension sizes as x except the first dimension, \n +* which must be the same as the size of "indices". + +*@par Outputs: +*y: A Tensor of the same type as "x". \n +* An alias of "x". The content of "y" is undefined if there are duplicates in indices. +*/ +REG_OP(InplaceAdd) + .INPUT(x, TensorType::BasicType()) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(v, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(InplaceAdd) + +/** +*@brief Adds "v" into specified rows of "x". \n +*Computes y = x; y[i, :] += v. +*@par Inputs: +*Two inputs, including: +* @li x: A Tensor. \n +* TensorType::NumberType(). +* @li v: A Tensor of the same type as "x". \n +* Same dimension sizes as "x" except the first dimension, which must be the same as the size of "indices". + +*@par Attributes: +*indices: A required list of ints. Indices into the left-most dimension of "x". + +*@par Outputs: +*y: A Tensor of the same type as "x". \n +* An alias of "x". The content of "y" is undefined if there are duplicates in indices. +*/ +REG_OP(InplaceAddD) + .INPUT(x, TensorType::BasicType()) + .INPUT(v, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(indices, ListInt) + .OP_END_FACTORY_REG(InplaceAddD) + +/** +*@brief Subtracts "v" into specified rows of "x". \n +*Computes y = x; y[i, :] -= v; return y. +*@par Inputs: +**Three inputs, including: +* @li x: A Tensor. TensorType::NumberType(). +* @li indices: A vector of type int32. Indices into the left-most dimension of x. +* @li v: A Tensor of the same type as "x". \n +* Same dimension sizes as "x" except the first dimension, which must be the same as the size of "indices". + +*@par Outputs: +*y: A Tensor. Has the same type as "x".\n +* An alias of "x". The content of "y" is undefined if there are duplicates in indices. +*/ +REG_OP(InplaceSub) + .INPUT(x, TensorType::BasicType()) + .INPUT(indices, TensorType({DT_INT32})) + .INPUT(v, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(InplaceSub) + +/** +*@brief Subtracts "v" into specified rows of "x". \n +*Computes y = x; y[i, :] -= v. + +*@par Inputs: +**Two inputs, including: +* @li x: A Tensor. TensorType::NumberType(). +* @li v: A Tensor of the same type as "x". \n +* Same dimension sizes as "x" except the first dimension, which must be the same as the size of "indices". + +*@par Attributes: +*indices: A required list of ints. Indices into the left-most dimension of "x". + +*@par Outputs: +*y: A Tensor. Has the same type as "x".\n +* An alias of x. The content of y is undefined if there are duplicates in indices. +*/ +REG_OP(InplaceSubD) + .INPUT(x, TensorType::BasicType()) + .INPUT(v, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(indices, ListInt) + .OP_END_FACTORY_REG(InplaceSubD) + +/** +* @brief Applies sparse addition to input "x" using individual values or slices\n +* from "updates" according to "indices". The updates are non-aliasing: "x" is\n +* only modified in-place if no other operations will use it. Otherwise, a copy\n +* of "x" is made. This operation has a gradient with respect to both "x" and +* "updates". + +* @par Inputs: +* Three inputs, including: +* @li x: A Tensor of type NumberType. A batch_size x classes tensor. +* @li indices: A Tensor of type IndexNumberType. Specifies the indices into "x". +* @li updates: A Tensor. Must have the same type as "x". +* Specifies the updated values to add to "x". + +* @par Outputs: +* y: A Tensor with the same shape as "x", containing values of "x" updated with +* "updates". + +* @see ScatterNd(),ScatterNdAdd() +*/ +REG_OP(ScatterNonAliasingAdd) + .INPUT(x, TensorType::NumberType()) + .INPUT(indices, TensorType::IndexNumberType()) + .INPUT(updates, TensorType::NumberType()) + .OUTPUT(y, TensorType::NumberType()) + .OP_END_FACTORY_REG(ScatterNonAliasingAdd) + +/** +* @brief Computes the minimum along segments of a tensor. + +* @par Inputs: +* Three inputs, including: +* @li x: A Tensor of type RealNumberType. +* @li segment_ids: A 1D Tensor of type IndexNumberType, whose shape is a prefix +* of "x.shape". +* @li k: A Tensor. + +* @par Outputs: +* y: A Tensor of type RealNumberType. + +* @see UnsortedSegmentSum(), UnsortedSegmentProd(), +*/ +REG_OP(UnsortedSegmentMin) + .INPUT(x, TensorType::RealNumberType()) + .INPUT(segment_ids, TensorType::IndexNumberType()) + .INPUT(num_segments, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::RealNumberType()) + .OP_END_FACTORY_REG(UnsortedSegmentMin) + +/** +* @brief Computes the minimum along segments of a tensor. + +* @par Inputs: +* Three inputs, including: +* @li x: A Tensor of type RealNumberType. +* @li segment_ids: A 1D Tensor of type IndexNumberType, whose shape is a prefix +* of "x.shape". +* @li k: A Tensor. + +* @par Attributes: +* num_segments: An int32, specifying the number of distinct segment IDs. + +* @par Outputs: +* y: A Tensor of type RealNumberType. + +* @see UnsortedSegmentProdD(), +*/ +REG_OP(UnsortedSegmentMinD) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(segment_ids, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .REQUIRED_ATTR(num_segments, Int) + .OP_END_FACTORY_REG(UnsortedSegmentMinD) + +/** +* @brief Computes the product along segments of a tensor. + +* @par Inputs: +* Three inputs, including: +* @li x: A Tensor of type RealNumberType. +* @li segment_ids: A 1D Tensor of type IndexNumberType, whose shape is a prefix +* of "x.shape". +* @li k: A Tensor. + +* @par Outputs: +* y: A Tensor of type RealNumberType. + +* @see UnsortedSegmentSum(), UnsortedSegmentMin(), +*/ +REG_OP(UnsortedSegmentProd) + .INPUT(x, TensorType::NumberType()) + .INPUT(segment_ids, TensorType::IndexNumberType()) + .INPUT(num_segments, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::NumberType()) + .OP_END_FACTORY_REG(UnsortedSegmentProd) + +/** +* @brief Computes the product along segments of a tensor. + +* @par Inputs: +* Three inputs, including: +* @li x: A Tensor of type RealNumberType. +* @li segment_ids: A 1D Tensor of type IndexNumberType, whose shape is a prefix +* of "x.shape". +* @li k: A Tensor. + +* @par Attributes: +* num_segments: An int32, specifying the number of distinct segment IDs. + +* @par Outputs: +* y: A Tensor of type RealNumberType. + +* @see UnsortedSegmentMinD() +*/ +REG_OP(UnsortedSegmentProdD) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .INPUT(segment_ids, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT32})) + .REQUIRED_ATTR(num_segments, Int) + .OP_END_FACTORY_REG(UnsortedSegmentProdD) + +/** +*@brief Normalizes data. It is called Region on YOLO v2 and Yolo on YOLO v3. + +*@par Inputs: +*x: An NCHW tensor of type float16 or float32. The data is with shape (N, boxes*(coords+obj+classes), H, W),where, "obj" indicates the confidence of an object, and only one confidence is supported. Boxes are arranged as xx...xyy...yww...whh...hbb...bc0c0..c0c1c1...c1......cncn...cn. + +*@par Attributes: +*@li boxes: A required int32, specifying the number of anchor boxes. Defaults to "5" for V2 or "3" for V3. +*@li coords: An int32, specifying the number of parameters required for locating an object. The value is fixed at "4", corresponding to (x,y,w,h). +*@li classes: An int32, specifying the number of prediction classes. Defaults to "80". The value range is [1, 1024]. +*@li yolo_version: A string, specifying the YOLO version, either "V2" or "V3". +*@li softmax: A bool, specifying whether to perform softmax, valid only when "yolo_version = V2". +*@li background: A bool, specifying the operation types of the obj and classes, used in conjunction with "softmax" and valid only when "yolo_version = V2". + +*@par Outputs: +*@li coord_data: A float16 or float32 with shape [N, boxes*coords, ceilx(height*width*2+32, 32)/2], where "ceil" indicates that a detected box is aligned upwards with the second parameter. Specifies the coordinates of a detected box. +*@li obj_data: A float16 or float32 with shape [N, ceilx(boxes*height*width *2+32, 32)/2], where "ceil" indicates that a detected box is aligned upwards with the second parameter. Specifies the confidence. +*@li classes_data: A float16 or float32 with shape [N, classes, ceilx(boxes*height*width *2+32, 32)/2], where "ceil" indicates that a detected box is aligned upwards with the second parameter. Specifies the prediction classes. + +*@attention Constraints: +*@li This operator applies to YOLO v2 and v3 networks. +*@li The succeeding layer of the Yolo operator must be operator Yolov3DetectionOutput. +*/ +REG_OP(Yolo) + .INPUT(x, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(coord_data, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(obj_data, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(classes_data, TensorType({DT_FLOAT16,DT_FLOAT})) + .ATTR(boxes, Int, 3) + .ATTR(coords, Int, 4) + .ATTR(classes, Int, 80) + .ATTR(yolo_version, String, "V3") + .ATTR(softmax, Bool, false) + .ATTR(background, Bool, false) + .OP_END_FACTORY_REG(Yolo) + +/** +*@brief Performs YOLO V3 detection. + +*@par Inputs: +*Ten inputs, including: +*@li Operator Yolov3DetectionOutput takes the outputs of operator Yolo as its inputs. A Yolo operator has three outputs: "coords", "obj", and "class". \n +There are three Yolo operators at Yolov3DetectionOutput's preceding layer on Yolo v3. For details, see the description of operator Yolo. +*@li imginfo: A float16, describing the image information including the required image height and width \n +and the actual image height and width. +* +*@par Attributes: +*@li biases: A required float. "biases = Number of Yolo operators at the preceding layer x 2 x boxes" +*@li boxes: A required int32, specifying the number of anchor boxes predicted for each Yolo layer. +*@li coords: Specifies the number of coordinate parameters. Must be 4. +*@li classes: A required int32, specifying the number of classes to be predicted. The value range is [1, 80]. +*@li relative: An optional bool. Defaults to and must be "true". +*@li obj_threshold: A required float, specifying the confidence threshold for box filtering, which is the output "obj" of operator Yolo). The value range is [0.0, 1.0]. + +*@li post_top_k: An optional int32. This attribute is reserved. +*@li classes_threshold: A required float, specifying the class score threshold for box filtering, which is the output "class" of operator Yolo). The value range is [0.0, 1.0]. + +*@li nms_threshold: A required float, specifying the intersection-over-union (IOU) threshold for box filtering. The value range is [0.0, 1.0].\n + +*@li max_box_number_per_batch: An optional int, specifying the maximum number of output boxes per batch. Defaults to "1024". +*@li pre_nms_topn: An optional int, specifying the number of boxes for non-maximum suppression (NMS). Defaults to "1024". +* +*@par Outputs: +*@li boxout: An NCHW tensor of type float16, describing the information of each output box, including the coordinates, class, and confidence. +*@li boxoutnum: An NCHW tensor of type int32, specifying the number of output boxes. + +*@attention Constraints:\n +*@li This operator applies only to the YOLO v3 network. +*@li The preceding layer of operator Yolov3DetectionOutput must be three Yolo operators. +*/ +REG_OP(YoloV3DetectionOutput) + .INPUT(coord_data1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(coord_data2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(coord_data3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(obj_data1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(obj_data2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(obj_data3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(classes_data1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(classes_data2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(classes_data3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(img_info, TensorType({DT_FLOAT16,DT_FLOAT})) + .REQUIRED_ATTR(biases1, ListFloat) + .REQUIRED_ATTR(biases2, ListFloat) + .REQUIRED_ATTR(biases3, ListFloat) + .ATTR(boxes, Int, 3) + .ATTR(coords, Int, 4) + .ATTR(classes, Int, 80) + .ATTR(relative, Bool, true) + .ATTR(obj_threshold, Float, 0.5) + .ATTR(post_top_k, Int, 1024) + .ATTR(classes_threshold, Float, 0.5) + .ATTR(nms_threshold, Float, 0.45) + .ATTR(max_box_number_per_batch, Int, 1024) + .ATTR(pre_nms_topn, Int, 512) + .OUTPUT(box_out, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(box_out_num, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(YoloV3DetectionOutput) + +/** +*@brief Performs YOLO V3 detection. + +*@par Inputs: +*16 Input, including: +*@li The outputs of operator Yolo at the preceding layer (that is, three Yolo operators on YOLO v3) are used as the inputs of operator Yolov3DetectionOutput. \n +A Yolo operator has three outputs: "coords", "obj", and "class". For details, see the description of operator Yolo. +*@li imginfo: A float16, describing the image information including the required image height and width \n +and the actual image height and width. +*@li windex: A windex tensor with shape [height,weight]. Has the same type as the inputs. [[0,1,2...(weight-1)],[0,1,2...(w-1)]...[0,1,2...(weight-1)]] consisting of h groups of [0, 1, 2...(weight-1)] is formed for the three Yolo outputs, respectively. + +*@li hindex: A hindex tensor with shape [height,weight]. Has the same type as the inputs. [[0,0...0],[1,1...1],[2,2...2]...[height-1,height-1...,height-1]] is formed for the three Yolo outputs, respectively. + +* +*@par Attributes: +*@li biases: A required float32. "biases = Number of Yolo operators at the preceding layer x 2 x boxes" +*@li boxes: A required int32, specifying the number of anchor boxes predicted for each Yolo layer. +*@li coords: Specifies the number of coordinate parameters. Must be 4. +*@li classes: A required int32, specifying the number of classes to be predicted. The value range is [1, 80]. +*@li relative: An optional bool. Defaults to and must be "true". +*@li obj_threshold: A required float, specifying the confidence threshold for box filtering, which is the output "obj" of operator Yolo). The value range is [0.0, 1.0]. +*@li post_top_k: An optional int32. This attribute is reserved. +*@li classes_threshold: A required float, specifying the class score threshold for box filtering, which is the output "class" of operator Yolo). The value range is [0.0, 1.0]. +*@li nms_threshold: A required float, specifying the intersection-over-union (IOU) threshold for box filtering. The value range is [0.0, 1.0].\n +*@li max_box_number_per_batch: An optional int, specifying the maximum number of output boxes per batch. Defaults to "1024". +*@li pre_nms_topn: An optional int, specifying the number of boxes for non-maximum suppression (NMS). Defaults to "1024". +* +*@par Outputs: +*@li boxout: An NCHW tensor of type float16, describing the information of each output box, including the coordinates, class, and confidence. +*@li boxoutnum: An NCHW tensor of type int32, specifying the number of output boxes. + +*@attention Constraints:\n +*@li This operator applies only to the YOLO v3 network. +*@li The preceding layer of operator Yolov3DetectionOutput must be three Yolo operators. +*/ +REG_OP(YoloV3DetectionOutputD) + .INPUT(coord_data1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(coord_data2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(coord_data3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(obj_data1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(obj_data2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(obj_data3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(classes_data1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(classes_data2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(classes_data3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(img_info, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(windex1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(windex2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(windex3, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(hindex1, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(hindex2, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(hindex3, TensorType({DT_FLOAT16,DT_FLOAT})) + .REQUIRED_ATTR(biases1, ListFloat) + .REQUIRED_ATTR(biases2, ListFloat) + .REQUIRED_ATTR(biases3, ListFloat) + .ATTR(boxes, Int, 3) + .ATTR(coords, Int, 4) + .ATTR(classes, Int, 80) + .ATTR(relative, Bool, true) + .ATTR(obj_threshold, Float, 0.5) + .ATTR(post_top_k, Int, 1024) + .ATTR(classes_threshold, Float, 0.5) + .ATTR(nms_threshold, Float, 0.45) + .ATTR(max_box_number_per_batch, Int, 1024) + .ATTR(pre_nms_topn, Int, 512) + .OUTPUT(box_out, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(box_out_num, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(YoloV3DetectionOutputD) + +/** +*@brief Performs object detection. + +*@par Inputs: +*@li cls_prob: An NCHW tensor of type float16 or float32, specifying the probability of the proposal is the background class. +*@li bbox_pred: An NCHW tensor of type float16 or float32, specifying the coordinates of the proposals bounding boxes. + +*@par Attributes: +*@li im_info: A required list of floats, specifying the Image information. The value range is [1, 4096]. +*@li feat_stride: A required float32, specifying the stride of the sliding window. Must be greater than "0". Defaults to "16". +*@li base_size: A required float32, specifying the size of the generated base box. Must be greater than "0". Defaults to "16". +*@li min_size: A required float32, specifying the minimum edge length of a proposal. A box with any edge less than this value is removed. Must be greater than "0". Defaults to "16". +*@li ratio: A required list of floats, specifying the aspect ratio of the generated base box. Defaults to [0.5, 1, 2]. +*@li scale: A required list of floats, specifying the ratio of the size of the generated base box to "base_size". Defaults to [8, 16, 32]. +*@li pre_nms_topn: A required int, specifying top K boxes before NMS. For float16 input, pre_nms_topn <= 6000. For float32 input, pre_nms_topn <= 3000. Defaults to "3000". +*@li post_nms_topn: A required int, specifying the number of boxes to be output after NMS. The value is a multiple of 16. For float16 input, post_nms_topn <= 6000. For float32 input, post_nms_topn <= 3000 (the maximum multiple of 16 is 2992 within the range). Defaults to "304". +*@li nms_thresh: A required float32, specifying the NMS threshold. The value range is (0,1]. Defaults to "0.7". + +*@par Outputs: +*@li rois: A Tensor with shape [batch, 5, post_nms_topn], of type float16, specifying the output box information. "post_nms_topn" must be a multiple of 16. The dimension "5" indicates (batchID, x1, y1, x2, y2). The number of BBoxes output per batch is determined by "actual_rois_num". +*@li actual_rois_num: A Tensor with shape [batch, 8], of type int32, specifying the number of BBoxes output per batch. +*/ + REG_OP(Proposal) + .INPUT(cls_prob, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(bbox_pred, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(rois, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(actual_rois_num, TensorType({DT_INT32})) + .ATTR(im_info, ListFloat, {375, 1240}) + .ATTR(feat_stride, Float, 16) + .ATTR(base_size, Float, 16) + .ATTR(min_size, ListFloat, {16, 16}) + .ATTR(ratio, ListFloat, {0.5, 1, 2}) + .ATTR(scale, ListFloat, {8, 16, 32}) + .ATTR(pre_nms_topn, Int, 6000) + .ATTR(post_nms_topn, Int, 304) + .ATTR(nms_thresh, Float, 0.7) + .OP_END_FACTORY_REG(Proposal) + +/** +*@brief Performs object detection. Different from Proposal, this is an internal API called after FE fusion and has an additional "rpn_bbox" attribute. The suffix "D" in the API name will be removed from the generated model. + +*@par Inputs: +*@li cls_prob: An NCHW tensor of type float16, specifying the probability of the proposal is the background class. +*@li bbox_pred: An NCHW tensor of type float16, specifying the coordinates of the proposals bounding boxes. +*@li rpn_bbox: An NCHW tensor of type float16, specifying the coordinates of the proposals bounding boxes. + +*@par Attributes: +*@li im_info: A required list of floats, specifying the Image information. The value range is [1, 4096]. +*@li feat_stride: A required float32, specifying the stride of the sliding window. Must be greater than "0". Defaults to "16". +*@li base_size: A required float32, specifying the size of the generated base box. Must be greater than "0". Defaults to "16". +*@li min_size: A required float32, specifying the minimum edge length of a proposal. A box with any edge less than this value is removed. Must be greater than "0". Defaults to "16". +*@li ratio: A required list of floats, specifying the aspect ratio of the generated base box. Defaults to [0.5, 1, 2]. +*@li scale: A required list of floats, specifying the ratio of the size of the generated base box to "base_size". Defaults to [8, 16, 32]. +*@li pre_nms_topn: A required int, specifying top K boxes before NMS. For float16 input, pre_nms_topn <= 6000. For float32 input, pre_nms_topn <= 3000. Defaults to "3000". +*@li post_nms_topn: A required int, specifying the number of boxes to be output after NMS. The value is a multiple of 16. For float16 input, post_nms_topn <= 6000. For float32 input, post_nms_topn <= 3000 (the maximum multiple of 16 is 2992 within the range). Defaults to "304". +*@li nms_thresh: A required float32, specifying the NMS threshold. The value range is (0,1]. Defaults to 0.7. + +*@par Outputs: +*@li rois: A Tensor with shape [batch, 5, post_nms_topn], of type float16, specifying the output box information. "post_nms_topn" must be a multiple of 16. The dimension "5" indicates (batchID, x1, y1, x2, y2). The number of BBoxes output per batch is determined by "actual_rois_num". +*@li actual_rois_num: A Tensor with shape [batch, 8], of type int32, specifying the number of BBoxes output per batch. +*/ +REG_OP(ProposalD) + .INPUT(cls_prob, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(bbox_pred, TensorType({DT_FLOAT16, DT_FLOAT})) + .INPUT(rpn_bbox, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(rois, TensorType({DT_FLOAT16, DT_FLOAT})) + .OUTPUT(actual_rois_num, TensorType({DT_INT32})) + .ATTR(im_info, ListFloat, {375, 1240}) + .ATTR(feat_stride, Float, 16) + .ATTR(base_size, Float, 16) + .ATTR(min_size, ListFloat, {16, 16}) + .ATTR(ratio, ListFloat, {0.5, 1, 2}) + .ATTR(scale, ListFloat, {8, 16, 32}) + .ATTR(pre_nms_topn, Int, 6000) + .ATTR(post_nms_topn, Int, 304) + .ATTR(nms_thresh, Float, 0.7) + .OP_END_FACTORY_REG(ProposalD) + +/** +*@brief Performs YOLO V2 detection. + +*@par Inputs: +* Four inputs, including: +*@li The outputs of operator Yolo at the preceding layer (that is, one Yolo operator on YOLO v2) are used as the inputs of operator Yolov3DetectionOutput. \n +Each Yolo operator has three outputs: "coords", "obj", and "class". For details, see the description of operator Yolo. +*@li imginfo: A float16, describing the image information including the required image height and width \n +and the actual image height and width. +* +*@par Attributes: +*@li biases: A required float. "biases = Number of Yolo operators at the preceding layer x 2 x boxes" +*@li boxes: A required int32, specifying the number of anchor boxes predicted for each Yolo layer. +*@li coords: Specifies the number of coordinate parameters. Must be 4. +*@li classes: A required int32, specifying the number of classes to be predicted. The value range is [1, 80]. +*@li relative: An optional bool. Defaults to and must be "true". +*@li obj_threshold: A required float, specifying the confidence threshold for box filtering, which is the output "obj" of operator Yolo). The value range is [0.0, 1.0]. + +*@li post_top_k: An optional int32. This attribute is reserved. +*@li classes_threshold: A required float, specifying the class score threshold for box filtering, which is the output "class" of operator Yolo). The value range is [0.0, 1.0]. +*@li nms_threshold: A required float, specifying the intersection-over-union (IOU) threshold for box filtering. The value range is [0.0, 1.0].\n +*@li max_box_number_per_batch: An optional int, specifying the maximum number of output boxes per batch. Defaults to "1024". +*@li pre_nms_topn: An optional int, specifying the number of boxes for non-maximum suppression (NMS). Defaults to "1024". +* +*@par Outputs: +*@li boxout: An NCHW tensor of type float16, describing the information of each output box, including the coordinates, class, and confidence. +*@li boxoutnum: An NCHW tensor of type int32, specifying the number of output boxes. + +*@attention Constraints:\n +*@li This operator applies only to the YOLO v2 network. +*@li The preceding layer of operator Yolov2DetectionOutput must be one Yolo operator. +*/ +REG_OP(YoloV2DetectionOutput) + .INPUT(coord_data, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(obj_data, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(classes_data, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(img_info, TensorType({DT_FLOAT16,DT_FLOAT})) + .REQUIRED_ATTR(biases, ListFloat) + .ATTR(boxes, Int, 5) + .ATTR(coords, Int, 4) + .ATTR(classes, Int, 80) + .ATTR(relative, Bool, true) + .ATTR(obj_threshold, Float, 0.5) + .ATTR(post_top_k, Int, 1024) + .ATTR(classes_threshold, Float, 0.5) + .ATTR(nms_threshold, Float, 0.45) + .ATTR(max_box_number_per_batch, Int, 1024) + .ATTR(pre_nms_topn, Int, 512) + .OUTPUT(box_out, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(box_out_num, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(YoloV2DetectionOutput) + +/** +*@brief Performs YOLO V2 detection. + +*@par Inputs: +*Six inputs, including: +*@li The outputs of operator Yolo at the preceding layer (that is, one Yolo operator on YOLO v2) are used as the inputs of operator Yolov2DetectionOutput. \n +Each Yolo operator has three outputs: "coords", "obj", and "class". For details, see the description of operator Yolo. +*@li imginfo: A float16, describing the image information including the required image height and width \n +and the actual image height and width. +*@li windex: A windex tensor with shape [height, weight]. Has the same type as the inputs. [[0,1,2...(weight-1)],[0,1,2...(w-1)]...[0,1,2...(weight-1)]] consisting of h groups of [0, 1, 2...(weight-1)] is formed. \n + +*@li hindex: A hindex tensor with shape [height, weight]. Has the same type as the inputs. [[0,0...0],[1,1...1],[2,2...2]...[height-1,height-1...,height-1]]. \n + +* +*@par Attributes: +*@li biases: A required float. "biases = Number of Yolo operators at the preceding layer x 2 x boxes" +*@li boxes: A required int32, specifying the number of anchor boxes predicted for each Yolo layer. +*@li coords: Specifies the number of coordinate parameters. Must be 4. +*@li classes: A required int32, specifying the number of classes to be predicted. The value range is [1, 80]. +*@li relative: An optional bool. Defaults to and must be "true". +*@li obj_threshold: A required float, specifying the confidence threshold for box filtering, which is the output "obj" of operator Yolo). The value range is [0.0, 1.0]. +*@li post_top_k: An optional int32. This attribute is reserved. +*@li classes_threshold: A required float, specifying the class score threshold for box filtering, which is the output "class" of operator Yolo). The value range is [0.0, 1.0]. + +*@li nms_threshold: A required float, specifying the intersection-over-union (IOU) threshold for box filtering. The value range is [0.0, 1.0].\n +*@li max_box_number_per_batch: An optional int, specifying the maximum number of output boxes per batch. Defaults to "1024". +*@li pre_nms_topn: An optional int, specifying the number of boxes for non-maximum suppression (NMS). Defaults to "1024". +* +*@par Outputs: +*@li boxout: An NCHW tensor of type float16, describing the information of each output box, including the coordinates, class, and confidence. +*@li boxoutnum: An NCHW tensor of type int32, specifying the number of output boxes. +* +*@attention Constraints:\n +*@li This operator applies only to the YOLO v2 network. +*@li The preceding layer of operator Yolov2DetectionOutput must be one Yolo operator. +*/ +REG_OP(YoloV2DetectionOutputD) + .INPUT(coord_data, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(obj_data, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(classes_data, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(img_info, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(windex, TensorType({DT_FLOAT16,DT_FLOAT})) + .INPUT(hindex, TensorType({DT_FLOAT16,DT_FLOAT})) + .REQUIRED_ATTR(biases, ListFloat) + .ATTR(boxes, Int, 5) + .ATTR(coords, Int, 4) + .ATTR(classes, Int, 80) + .ATTR(relative, Bool, true) + .ATTR(obj_threshold, Float, 0.5) + .ATTR(post_top_k, Int, 1024) + .ATTR(classes_threshold, Float, 0.5) + .ATTR(nms_threshold, Float, 0.45) + .ATTR(max_box_number_per_batch, Int, 1024) + .ATTR(pre_nms_topn, Int, 512) + .OUTPUT(box_out, TensorType({DT_FLOAT16,DT_FLOAT})) + .OUTPUT(box_out_num, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(YoloV2DetectionOutputD) + +/** +*@brief Performs plane or channel conversion on YoloV2. +* If reverse=true: (N, H, W, C)->(N, H*stride, W*stride, C/(stride*stride)) +* If reverse=false: (N, H, W, C)->(N, H/stride, W/stride, C*(stride*stride)) + +*@par Inputs: +*x: An (N, H, W, C) tensor. All data types are supported. + +*@par Attributes: +*@li stride: An optional int32, specifying the plane or channel scaling factor. Defaults to "2". +*@li reverse: An optional bool, specifying the conversion mode. If "true", depth to space conversion is performed. If "false", space to depth conversion is performed. Defaults to "false". + +*@par Outputs: +*y: An (N, H, W, C) tensor. All data types are supported. + +*@attention Constraints: +*@li If reverse=true: C/(stride*stride) yields an integer result. If reverse=false: W/stride and H/stride yield integer results. +*/ +REG_OP(PassThrough) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, DT_UINT32, DT_INT64, DT_UINT64})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, DT_UINT32, DT_INT64, DT_UINT64})) + .ATTR(stride, Int, 2) + .ATTR(reverse, Bool, false) + .OP_END_FACTORY_REG(PassThrough) + +/** +*@brief Crops the input. + +*@par Inputs: +*Inputs include: \n +* @li x: A required Tensor. Must be one of the following types: float16, float32, int8, uint8, int16, uint16, int32, uint32,int64, uint64. +* @li size: A required Tensor. Must be one of the following types: float16, float32, int8, uint8, int16, uint16, int32, uint32, int64, uint64. +*@par Attributes: +*@li axis: A required int32, specifying the first dimension to crop. +*@li offset: A required array, specifying the shift for all/each dimension to align the cropped bottom with the reference bottom. Must be one of the following types: float16, float32, int8, uint8, int16, uint16, int32, uint32, int64, uint64. +*@par Outputs: +*y: A required Tensor. Has the same type and shape as "size". + +*@attention Constraints:\n +*@li "y" must have the same type and shape as "size". "x" must have the same type as "size". +*@li "axis" must be less than the rank of "x". +*@li The "offset" for each dimension must not exceed the maximum value of the corresponding dimension of "x". +*@li The array length of "offset" plus the value of "axis" equals to the rank of "y". +*/ +REG_OP(Crop) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, DT_UINT32,DT_INT64,DT_UINT64})) + .INPUT(size, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, DT_UINT32,DT_INT64,DT_UINT64})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, DT_UINT32,DT_INT64,DT_UINT64})) + .ATTR(axis, Int, 2) + .REQUIRED_ATTR(offsets, ListInt) + .OP_END_FACTORY_REG(Crop) + +/** +*@brief Extends the input with copies of data along a specified dimension. For example: \n +(1) If x = [[[1, 2], [3, 4], [5, 6]], [[7, 8], [9, 10], [11, 12]]], with shape (2, 3, 2);\n +(2) axis = 1;\n +(3) tiles = 2;\n +(4) Then, y = [[[1, 2], [3, 4], [5, 6], [1, 2], [3, 4], [5, 6]], [[7, 8], [9, 10], [11, 12], [7, 8], [9, 10], [11, 12]]], with shape (2, 6, 2). + +*@par Inputs: +* One input: +*input_x: A Tensor with any format. Must be one of the following types: float16, float32, int8, int16, int32, int64, uint8, uint16, uint32, uint64. + +*@par Attributes: +*@li axis: An optional int32, specifying the axis to tile. Defaults to 1. +*@li tiles: A required int32, specifying the number of copies (tiles) to output. + +*@par Outputs: +*output_y: A Tensor of any format. Must be one of the following types: float16, float32, int8, int16, int32, int64, uint8, uint16, uint32, uint64. + +*@attention Constraints:\n +*@li "axis" must be within the rank of the input tensor. +*@li "tiles" must be greater than 1. +*/ +REG_OP(TileV2) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT64, DT_INT32, + DT_INT16, DT_INT8, DT_UINT64, DT_UINT32, DT_UINT16, DT_UINT8})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT64, DT_INT32, + DT_INT16, DT_INT8, DT_UINT64, DT_UINT32, DT_UINT16, DT_UINT8})) + .ATTR(axis, Int, 1) + .REQUIRED_ATTR(tiles, Int) + .OP_END_FACTORY_REG(TileV2) +} // namespace ge +#endif // GE_OP_SELECTION_OPS_H diff --git a/third_party/fwkacllib/inc/ops/set_ops.h b/third_party/fwkacllib/inc/ops/set_ops.h new file mode 100755 index 00000000..8b4ca579 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/set_ops.h @@ -0,0 +1,80 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_SET_OPS_H_ +#define GE_OP_SET_OPS_H_ + +#include "graph/operator.h" +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(DenseToDenseSetOperation) + .INPUT(x1, TensorType({DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_STRING})) + .INPUT(x2, TensorType({DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_STRING})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, TensorType({DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_STRING})) + .OUTPUT(y_shape, TensorType({DT_INT64})) + .ATTR(set_operation, String, "") + .ATTR(validate_indices, Bool, true) + .OP_END_FACTORY_REG(DenseToDenseSetOperation) + +REG_OP(DenseToSparseSetOperation) + .INPUT(x1, TensorType({DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_STRING})) + .INPUT(x2_indices, TensorType({DT_INT64})) + .INPUT(x2_values, TensorType({DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_STRING})) + .INPUT(x2_shape, TensorType({DT_INT64})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, TensorType({DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_STRING})) + .OUTPUT(y_shape, TensorType({DT_INT64})) + .ATTR(set_operation, String, "") + .ATTR(validate_indices, Bool, true) + .OP_END_FACTORY_REG(DenseToSparseSetOperation) + +REG_OP(SparseToSparseSetOperation) + .INPUT(x1_indices, TensorType({DT_INT64})) + .INPUT(x1_values, TensorType({DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_STRING})) + .INPUT(x1_shape, TensorType({DT_INT64})) + .INPUT(x2_indices, TensorType({DT_INT64})) + .INPUT(x2_values, TensorType({DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_STRING})) + .INPUT(x2_shape, TensorType({DT_INT64})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, TensorType({DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_STRING})) + .OUTPUT(y_shape, TensorType({DT_INT64})) + .ATTR(set_operation, String, "") + .ATTR(validate_indices, Bool, true) + .OP_END_FACTORY_REG(SparseToSparseSetOperation) + +REG_OP(SetSize) + .INPUT(set_indices, TensorType({DT_INT64})) + .INPUT(set_values, TensorType({DT_INT8, DT_INT16, \ + DT_UINT8, DT_UINT16, DT_INT32, DT_INT64})) + .INPUT(set_shape, TensorType({DT_INT64})) + .OUTPUT(size, TensorType({DT_INT32})) + .ATTR(validate_indices, Bool, true) + .OP_END_FACTORY_REG(SetSize) +} // namespace ge + +#endif // GE_OP_SET_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/shuffle_channel_ops.h b/third_party/fwkacllib/inc/ops/shuffle_channel_ops.h new file mode 100755 index 00000000..b9f609fc --- /dev/null +++ b/third_party/fwkacllib/inc/ops/shuffle_channel_ops.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_SHUFFLE_CHANNEL_OPS_H + #define GE_OP_SHUFFLE_CHANNEL_OPS_H + + #include "graph/operator_reg.h" + + namespace ge { + + REG_OP(ShuffleChannel) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, DT_UINT32,DT_INT64,DT_UINT64})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT,DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, DT_UINT32,DT_INT64,DT_UINT64})) + .ATTR(group, Int, 1) + .OP_END_FACTORY_REG(ShuffleChannel) + } // namespace ge + + #endif // GE_OP_SHUFFLE_CHANNEL_OPS_H diff --git a/third_party/fwkacllib/inc/ops/sparse_ops.h b/third_party/fwkacllib/inc/ops/sparse_ops.h new file mode 100755 index 00000000..246fbc9b --- /dev/null +++ b/third_party/fwkacllib/inc/ops/sparse_ops.h @@ -0,0 +1,382 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_SPARSE_OPS_H_ +#define GE_OP_SPARSE_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(SparseSoftmax) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(values, TensorType({DT_FLOAT, DT_DOUBLE})) + .INPUT(shape, TensorType({DT_INT64})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseSoftmax) + +REG_OP(SparseTensorDenseAdd) + .INPUT(x1_indices, TensorType({DT_INT32, DT_INT64})) + .INPUT(x1_values, TensorType({DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_FLOAT})) + .INPUT(x1_shape, TensorType({DT_INT32, DT_INT64})) + .INPUT(x2, TensorType({DT_INT64})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT})) + .OP_END_FACTORY_REG(SparseTensorDenseAdd) + +REG_OP(SparseReorder) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .INPUT(shape, TensorType({DT_INT64})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseReorder) + +REG_OP(SparseReshape) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(shape, TensorType({DT_INT64})) + .INPUT(new_shape, TensorType({DT_INT64})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_shape, TensorType({DT_INT64})) + .OP_END_FACTORY_REG(SparseReshape) + +REG_OP(SparseDenseCwiseAdd) + .INPUT(x1_indices, TensorType({DT_INT64})) + .INPUT(x1_values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(x1_shape, TensorType({DT_INT64})) + .INPUT(x2, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, \ + DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, \ + DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseDenseCwiseAdd) + +REG_OP(SparseDenseCwiseDiv) + .INPUT(x1_indices, TensorType({DT_INT64})) + .INPUT(x1_values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(x1_shape, TensorType({DT_INT64})) + .INPUT(x2, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, \ + DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, \ + DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseDenseCwiseDiv) + +REG_OP(SparseDenseCwiseMul) + .INPUT(x1_indices, TensorType({DT_INT64})) + .INPUT(x1_values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(x1_shape, TensorType({DT_INT64})) + .INPUT(x2, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, \ + DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, DT_INT32, \ + DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseDenseCwiseMul) + +REG_OP(AddSparseToTensorsMap) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .INPUT(shape, TensorType({DT_INT64})) + .OUTPUT(handle, TensorType({DT_INT64})) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(AddSparseToTensorsMap) + +REG_OP(SparseSliceGrad) + .INPUT(backprop_val_grad, TensorType({ DT_INT8, DT_UINT8, DT_INT16, + DT_UINT16, DT_INT32, DT_INT64, DT_FLOAT, DT_FLOAT16, DT_DOUBLE })) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(start, TensorType({DT_INT64})) + .INPUT(new_indices, TensorType({DT_INT64})) + .OUTPUT(y_grad, TensorType({ DT_INT8, DT_UINT8, DT_INT16, + DT_UINT16, DT_INT32, DT_INT64, DT_FLOAT, DT_FLOAT16, DT_DOUBLE })) + .OP_END_FACTORY_REG(SparseSliceGrad) + +REG_OP(SparseSlice) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(values, TensorType({ DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE })) + .INPUT(shape, TensorType({DT_INT64})) + .INPUT(start, TensorType({DT_INT64})) + .INPUT(size, TensorType({DT_INT64})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, TensorType({ DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE })) + .OUTPUT(y_shape, TensorType({DT_INT64})) + .OP_END_FACTORY_REG(SparseSlice) + +REG_OP(SparseAddGrad) + .INPUT(backprop_val_grad, TensorType({DT_INT8, DT_INT16, DT_INT32, + DT_INT64, DT_FLOAT, DT_DOUBLE})) + .INPUT(x1_indices, TensorType({DT_INT64})) + .INPUT(x2_indices, TensorType({DT_INT64})) + .INPUT(sum_indices, TensorType({DT_INT64})) + .OUTPUT(x1_val_grad, TensorType({DT_INT8, DT_INT16, DT_INT32, + DT_INT64, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(x2_val_grad, TensorType({DT_INT8, DT_INT16, DT_INT32, + DT_INT64, DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseAddGrad) + +REG_OP(SparseFillEmptyRowsGrad) + .INPUT(reverse_index_map, TensorType({DT_INT64})) + .INPUT(grad_values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y_value, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OUTPUT(y_default_value, TensorType({DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseFillEmptyRowsGrad) + +REG_OP(SparseTensorDenseMatMul) + .INPUT(x1_indices, TensorType({DT_INT32, DT_INT64})) + .INPUT(x1_values, TensorType({DT_FLOAT, DT_INT32, DT_DOUBLE})) + .INPUT(x1_shape, TensorType({DT_INT64})) + .INPUT(x2, TensorType({DT_FLOAT, DT_INT32, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_INT32, DT_DOUBLE})) + .ATTR(adjoint_a, Bool, false) + .ATTR(adjoint_b, Bool, false) + .OP_END_FACTORY_REG(SparseTensorDenseMatMul) + +REG_OP(SparseToDense) + .INPUT(indices, TensorType({DT_INT32, DT_INT64})) + .INPUT(output_shape, TensorType({DT_INT32, DT_INT64})) + .INPUT(values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_BOOL, DT_DOUBLE})) + .INPUT(default_value, TensorType({DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_BOOL, \ + DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_FLOAT16, DT_FLOAT, DT_BOOL, DT_DOUBLE})) + .ATTR(validate_indices, Bool, true) + .OP_END_FACTORY_REG(SparseToDense) + +REG_OP(SparseConcat) + .DYNAMIC_INPUT(indices, TensorType({DT_INT64})) + .DYNAMIC_INPUT(values, + TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .DYNAMIC_INPUT(shapes, TensorType({DT_INT64})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, + TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y_shape, TensorType({DT_INT64})) + .ATTR(concat_dim, Int, 0) + .ATTR(N, Int, 1) + .OP_END_FACTORY_REG(SparseConcat) + +REG_OP(SparseAdd) + .INPUT(x1_indices, TensorType({DT_INT64})) + .INPUT(x1_values, TensorType({DT_FLOAT, DT_INT8, DT_INT16, \ + DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(x1_shape, TensorType({DT_INT64})) + .INPUT(x2_indices, TensorType({DT_INT64})) + .INPUT(x2_values, TensorType({DT_FLOAT, DT_INT8, DT_INT16, DT_INT32, \ + DT_INT64, DT_DOUBLE})) + .INPUT(x2_shape, TensorType({DT_INT64})) + .INPUT(thresh, TensorType({DT_FLOAT, DT_INT8, DT_INT16, DT_INT32, \ + DT_INT64, DT_DOUBLE})) + .OUTPUT(sum_indices, TensorType({DT_INT64})) + .OUTPUT(sum_values, TensorType({DT_FLOAT, DT_INT8, DT_INT16, \ + DT_INT32, DT_INT64, DT_DOUBLE})) + .OUTPUT(sum_shape, TensorType({DT_INT64})) + .OP_END_FACTORY_REG(SparseAdd) + +REG_OP(SparseFillEmptyRows) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .INPUT(dense_shape, TensorType({DT_INT64})) + .INPUT(default_value, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, \ + DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, \ + DT_INT16, DT_UINT16, DT_UINT8, \ + DT_INT32, DT_INT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(empty_row_indicator, TensorType({DT_BOOL})) + .OUTPUT(reverse_index_map, TensorType({DT_INT64})) + .OP_END_FACTORY_REG(SparseFillEmptyRows) + +REG_OP(SparseSparseMaximum) + .INPUT(x1_indices, TensorType({DT_INT64})) + .INPUT(x1_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(x1_shape, TensorType({DT_INT64})) + .INPUT(x2_indices, TensorType({DT_INT64})) + .INPUT(x2_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(x2_shape, TensorType({DT_INT64})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseSparseMaximum) + +REG_OP(SparseSparseMinimum) + .INPUT(x1_indices, TensorType({DT_INT64})) + .INPUT(x1_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(x1_shape, TensorType({DT_INT64})) + .INPUT(x2_indices, TensorType({DT_INT64})) + .INPUT(x2_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(x2_shape, TensorType({DT_INT64})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .OP_END_FACTORY_REG(SparseSparseMinimum) + +REG_OP(SparseReduceMax) + .INPUT(x_indices, TensorType({DT_INT64})) + .INPUT(x_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(x_shape, TensorType({DT_INT64})) + .INPUT(reduction_axes, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(SparseReduceMax) + +REG_OP(SparseReduceMaxSparse) + .INPUT(x_indices, TensorType({DT_INT64})) + .INPUT(x_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(x_shape, TensorType({DT_INT64})) + .INPUT(reduction_axes, TensorType({DT_INT32})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .OUTPUT(y_shape, TensorType({DT_INT64})) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(SparseReduceMaxSparse) + +REG_OP(SparseReduceSum) + .INPUT(x_indices, TensorType({DT_INT64})) + .INPUT(x_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(x_shape, TensorType({DT_INT64})) + .INPUT(reduction_axes, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(SparseReduceSum) + +REG_OP(SparseReduceSumSparse) + .INPUT(x_indices, TensorType({DT_INT64})) + .INPUT(x_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .INPUT(x_shape, TensorType({DT_INT64})) + .INPUT(reduction_axes, TensorType({DT_INT32})) + .OUTPUT(y_indices, TensorType({DT_INT64})) + .OUTPUT(y_values, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, \ + DT_UINT16, DT_UINT8, DT_INT32, DT_INT64, DT_DOUBLE})) + .OUTPUT(y_shape, TensorType({DT_INT64})) + .ATTR(keep_dims, Bool, false) + .OP_END_FACTORY_REG(SparseReduceSumSparse) + +REG_OP(SparseSplit) + .INPUT(split_dim, TensorType({DT_INT64})) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(shape, TensorType({DT_INT64})) + .DYNAMIC_OUTPUT(y_indices, TensorType({DT_INT64})) + .DYNAMIC_OUTPUT(y_values, TensorType({DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_BOOL, \ + DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .DYNAMIC_OUTPUT(y_shape, TensorType({DT_INT64})) + .ATTR(num_split, Int, 1) + .OP_END_FACTORY_REG(SparseSplit) + +REG_OP(SparseCross) + .DYNAMIC_INPUT(indices, TensorType({DT_INT64})) + .DYNAMIC_INPUT(values, TensorType({DT_INT64, DT_STRING})) + .DYNAMIC_INPUT(shapes, TensorType({DT_INT64})) + .DYNAMIC_INPUT(dense_inputs, TensorType({DT_INT64, DT_STRING})) + .OUTPUT(output_indices, TensorType({DT_INT64})) + .OUTPUT(output_values, TensorType({DT_INT64, DT_STRING})) + .OUTPUT(output_shape, TensorType({DT_INT64})) + .ATTR(N, Int, 0) + .REQUIRED_ATTR(hashed_output, Bool) + .ATTR(num_buckets, Int, 0) + .REQUIRED_ATTR(hash_key, Int) + .REQUIRED_ATTR(out_type, Type) + .REQUIRED_ATTR(internal_type, Type) + .OP_END_FACTORY_REG(SparseCross) + +REG_OP(AddManySparseToTensorsMap) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(values, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_BOOL, DT_FLOAT16, DT_FLOAT, DT_DOUBLE})) + .INPUT(shape, TensorType({DT_INT64})) + .OUTPUT(handles, TensorType({DT_INT64})) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(AddManySparseToTensorsMap) + +REG_OP(TakeManySparseFromTensorsMap) + .INPUT(handles, TensorType({DT_INT64})) + .OUTPUT(indices, TensorType({DT_INT64})) + .OUTPUT(values, TensorType({DT_BOOL, DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT, DT_FLAOT16})) + .OUTPUT(shape, TensorType({DT_INT64})) + .REQUIRED_ATTR(dtype, Type) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(TakeManySparseFromTensorsMap) + +REG_OP(SerializeSparse) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(values, TensorType({DT_BOOL, DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT, DT_FLAOT16})) + .INPUT(shape, TensorType({DT_INT64})) + .OUTPUT(serialized_sparse, TensorType({DT_STRING})) + .ATTR(out_type, Type, DT_STRING) + .OP_END_FACTORY_REG(SerializeSparse) + +REG_OP(SerializeManySparse) + .INPUT(indices, TensorType({DT_INT64})) + .INPUT(values, TensorType({DT_BOOL, DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT, DT_FLAOT16})) + .INPUT(shape, TensorType({DT_INT64})) + .OUTPUT(serialized_sparse, TensorType({DT_STRING})) + .ATTR(out_type, Type, DT_STRING) + .OP_END_FACTORY_REG(SerializeManySparse) + +REG_OP(DeserializeSparse) + .INPUT(serialized_sparse, TensorType({DT_STRING})) + .OUTPUT(indices, TensorType({DT_INT64})) + .OUTPUT(values, TensorType({DT_BOOL, DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT, DT_FLAOT16})) + .OUTPUT(shape, TensorType({DT_INT64})) + .REQUIRED_ATTR(dtype, Type) + .OP_END_FACTORY_REG(DeserializeSparse) + +REG_OP(DeserializeManySparse) + .INPUT(serialized_sparse, TensorType({DT_STRING})) + .OUTPUT(indices, TensorType({DT_INT64})) + .OUTPUT(values, TensorType({DT_BOOL, DT_INT8, DT_UINT8, DT_INT16, \ + DT_UINT16, DT_INT32, DT_INT64, DT_DOUBLE, DT_FLOAT, DT_FLAOT16})) + .OUTPUT(shape, TensorType({DT_INT64})) + .REQUIRED_ATTR(dtype, Type) + .OP_END_FACTORY_REG(DeserializeManySparse) +} // namespace ge + +#endif // GE_OP_SPARSE_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/split_combination_ops.h b/third_party/fwkacllib/inc/ops/split_combination_ops.h new file mode 100755 index 00000000..2deeef7e --- /dev/null +++ b/third_party/fwkacllib/inc/ops/split_combination_ops.h @@ -0,0 +1,177 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_SPLIT_COMBINATION_OPS_H +#define GE_OP_SPLIT_COMBINATION_OPS_H +#include "../graph/operator_reg.h" + +namespace ge { +REG_OP(Split) + .INPUT(split_dim, TensorType({DT_INT32})) + .INPUT(value, TensorType::BasicType()) + .DYNAMIC_OUTPUT(output, TensorType::BasicType()) + .REQUIRED_ATTR(num_split, Int) + .OP_END_FACTORY_REG(Split) + +REG_OP(SplitD) + .INPUT(value, TensorType({DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, + DT_UINT16, DT_UINT32, DT_UINT64, DT_FLOAT, DT_FLOAT16})) + .DYNAMIC_OUTPUT(output, TensorType({DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, + DT_UINT16, DT_UINT32, DT_UINT64, DT_FLOAT, DT_FLOAT16})) + .REQUIRED_ATTR(split_dim, Int) + .REQUIRED_ATTR(num_split, Int) + .OP_END_FACTORY_REG(SplitD) + +REG_OP(SplitV) + .INPUT(input_value, TensorType::BasicType()) + .INPUT(input_size_splits, TensorType::IndexNumberType()) + .INPUT(input_split_dim, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(output_data, TensorType::BasicType()) + .REQUIRED_ATTR(num_split, Int) + .OP_END_FACTORY_REG(SplitV) + +REG_OP(SplitVD) + .INPUT(input_value, TensorType({DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, + DT_UINT16, DT_UINT32, DT_UINT64, DT_FLOAT, DT_FLOAT16})) + .DYNAMIC_OUTPUT(output_data, TensorType({DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_UINT8, + DT_UINT16, DT_UINT32, DT_UINT64, DT_FLOAT, DT_FLOAT16})) + .REQUIRED_ATTR(size_splits, ListInt) + .REQUIRED_ATTR(split_dim, Int) + .REQUIRED_ATTR(num_split, Int) + .OP_END_FACTORY_REG(SplitVD) + +/** +*@brief Concatenates a list of N tensors along the first dimension. +*@par Inputs: +* Two inputs, including: +* @li values: A list of Tensors. Must be one of the following types: int8, int16, int32, \n +* int64, uint8, uint16, uint32, uint64, float16, float32. \n +* Tensors to be concatenated. \n +* All must have size 1 in the first dimension and same shape. +* @li shape: A Tensor of the same type as "x". \n +* The final shape of the result. Should be equal to the shapes of any input +* but with the number of input values in the first dimension. + +*@par Attributes: +* shape: A required list of ints. + +*@par Outputs: +*output_data: The concatenated tensor with same type as "values". +*/ +REG_OP(ParallelConcat) + .DYNAMIC_INPUT(values, TensorType({DT_FLOAT,DT_FLOAT16,DT_INT8,DT_INT16,DT_INT32,DT_INT64,DT_UINT8,DT_UINT16,DT_UINT32,DT_UINT64})) + .OUTPUT(output_data, TensorType({DT_FLOAT,DT_FLOAT16,DT_INT8,DT_INT16,DT_INT32,DT_INT64,DT_UINT8,DT_UINT16,DT_UINT32,DT_UINT64})) + .REQUIRED_ATTR(shape, ListInt) + .OP_END_FACTORY_REG(ParallelConcat) + +REG_OP(ConcatExt2) + .DYNAMIC_INPUT(input_values, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT8, DT_INT64, DT_UINT64, DT_UINT32, DT_INT16, DT_UINT16, DT_UINT8})) + .OUTPUT(output_data, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_INT8, DT_INT64, DT_UINT64, DT_UINT32, DT_INT16, DT_UINT16, DT_UINT8})) + .REQUIRED_ATTR(axis, Int) + .REQUIRED_ATTR(N, Int) + .OP_END_FACTORY_REG(ConcatExt2) + +REG_OP(ConcatV2) + .DYNAMIC_INPUT(input_values, TensorType::BasicType()) + .INPUT(axis, TensorType::IndexNumberType()) + .OUTPUT(output_data, TensorType::BasicType()) + .REQUIRED_ATTR(N, Int) + .OP_END_FACTORY_REG(ConcatV2) + +REG_OP(ConcatD) + .DYNAMIC_INPUT(input_values, TensorType({DT_FLOAT,DT_FLOAT16,DT_INT8,DT_INT16,DT_INT32,DT_INT64,DT_UINT8,DT_UINT16,DT_UINT32,DT_UINT64})) + .OUTPUT(output_data, TensorType({DT_FLOAT,DT_FLOAT16,DT_INT8,DT_INT16,DT_INT32,DT_INT64,DT_UINT8,DT_UINT16,DT_UINT32,DT_UINT64})) + .REQUIRED_ATTR(concat_dim, Int) + .REQUIRED_ATTR(N, Int) + .OP_END_FACTORY_REG(ConcatD) + +REG_OP(Concat) + .DYNAMIC_INPUT(input_values, TensorType::BasicType()) + .INPUT(concat_dim, TensorType::IndexNumberType()) + .OUTPUT(output_data, TensorType::BasicType()) + .REQUIRED_ATTR(N, Int) + .OP_END_FACTORY_REG(Concat) + +/** +*@brief Packs the list of tensors in values into a tensor with rank one higher than each tensor in +* values, by packing them along the axis dimension. Given a list of length N of tensors of +* shape (A, B, C); if axis == 0 then the output tensor will have the shape (N, A, B, C). + +*@par Inputs: +* x: A list of N Tensors. Must be one of the following types: int8, int16, int32, +* int64, uint8, uint16, uint32, uint64, float16, float32, bool. + +*@par Attributes: +*@li axis: A required int. +* Dimension along which to pack. The range is [-(R+1), R+1). +*@li N: A required int. Number of tensors. + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Pack) + .DYNAMIC_INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(axis, Int) + .REQUIRED_ATTR(N, Int) + .OP_END_FACTORY_REG(Pack) + +/** +*@brief Computes offsets of concat inputs within its output. + +*@par Inputs: +*Two inputs, including: +* @li concat_dim: A Tensor of type int32. +* @li x: A list of 1D Tensor objects of type int32. + +*@par Attributes: +*@li Concat_dim: A required int. Must be within the rank of input "x". +*@li N: A required int. + +*@par Outputs: +*y: A Tensor list with same type as "x". +*/ +REG_OP(ConcatOffset) + .INPUT(concat_dim, TensorType({DT_INT32})) + .DYNAMIC_INPUT(x, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(y, TensorType({DT_INT32})) + .REQUIRED_ATTR(N, Int) + .OP_END_FACTORY_REG(ConcatOffset) + +/** +*@brief Computes offsets of concat inputs within its output. + +*@par Inputs: +*Two inputs, including: +* @li concat_dim: A Tensor of type int32. +* @li x: A list of 1D Tensor objects of type int32. + +*@par Attributes: +*@li Concat_dim: A required int. Must be within the rank of input "x". +*@li N: A required int. + +*@par Outputs: +*y: A Tensor list with same type as "x". +*/ +REG_OP(ConcatOffsetD) + .DYNAMIC_INPUT(x, TensorType({DT_INT32})) + .DYNAMIC_OUTPUT(y, TensorType({DT_INT32})) + .REQUIRED_ATTR(concat_dim, Int) + .REQUIRED_ATTR(N, Int) + .OP_END_FACTORY_REG(ConcatOffsetD) +} // namespace ge + +#endif // GE_OP_SPLIT_COMBINATION_OPS_H diff --git a/third_party/fwkacllib/inc/ops/state_ops.h b/third_party/fwkacllib/inc/ops/state_ops.h new file mode 100755 index 00000000..524901f6 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/state_ops.h @@ -0,0 +1,113 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_STATE_OPS_H_ +#define GE_OP_STATE_OPS_H_ + +#include "graph/operator_reg.h" + +namespace ge { + +/** +*@brief Creates a variable tensor. + +*@par Inputs: +*x: A tensor, used to assign a value to the variable tensor internally. \n +The caller does not need to pass the value of the variable tensor. + +*@par Attributes: +*@li index: An integer. Index of the input tensor. +*@li value: A tensor, used to pass and record the value of the variable tensor. +*@li container: A string. The container of the variable tensor. +*@li shared_name: A string. The shared name of the variable tensor. + +*@par Outputs: +*y: The created variable tensor. +*/ +REG_OP(Variable) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, \ + DT_UINT8, DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .ATTR(index, Int, 0) + .ATTR(value, Tensor, Tensor()) + .ATTR(container, String, "") + .ATTR(shared_name, String, "") + .OP_END_FACTORY_REG(Variable) + +/** +*@brief Returns a temporary variable tensor. After the use of TemporaryVariable, \n +pass the reference to the variable tensor to the matching DestroyTemporaryVariable op for destruction. + +*@par Attributes: +*@li shape: A required list of int32 or int64. The shape of the variable tensor. +*@li dtype: Required. The type of elements in the variable tensor. +*@li var_name: An optional string. The name of the variable to be created. + +*@par Outputs: +*y: The created variable tensor. +*/ +REG_OP(TemporaryVariable) + .OUTPUT(y, TensorType::ALL()) + .ATTR(shape, ListInt, {}) + .ATTR(dtype, Int, 0) + .ATTR(var_name, String, "") + .OP_END_FACTORY_REG(TemporaryVariable) + +/** +*@brief Destroys the temporary variable and returns its final value. \n +All other uses of the temporary variable must have been executed before this op. + +*@par Inputs: +*x: A reference to the temporary variable tensor. + +*@par Attributes: +*var_name: A required string. Name of the temporary variable. \n +Must be the same as the "var_name" attribute of the reference to the temporary variable tensor. + +*@par Outputs: +*y: Final value of the reference to the temporary variable tensor. +*/ +REG_OP(DestroyTemporaryVariable) + .INPUT(x, TensorType::ALL()) + .OUTPUT(y, TensorType::ALL()) + .ATTR(var_name, String, "") + .OP_END_FACTORY_REG(DestroyTemporaryVariable) + +/** +*@brief Checks whether a tensor has been initialized. Outputs boolean scalar indicating whether the tensor has been initialized. + +*@par Inputs: +*x: A tensor. + +*@par Outputs: +*y: A tensor, indicating whether "x" has been initialized. +*/ +REG_OP(IsVariableInitialized) + .INPUT(x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_INT16, DT_UINT16, DT_UINT8, + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_BOOL, DT_DOUBLE})) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(IsVariableInitialized) + +REG_OP(CountUpTo) + .INPUT(ref, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .ATTR(limit, Int, 0) + .OP_END_FACTORY_REG(CountUpTo) + +} // namespace ge + +#endif // GE_OP_STATE_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/stateless_random_ops.h b/third_party/fwkacllib/inc/ops/stateless_random_ops.h new file mode 100644 index 00000000..5827ba9b --- /dev/null +++ b/third_party/fwkacllib/inc/ops/stateless_random_ops.h @@ -0,0 +1,35 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_STATELESS_RANDOM_OPS_H +#define GE_OP_STATELESS_RANDOM_OPS_H + +#include "graph/operator.h" +#include "graph/operator_reg.h" + +namespace ge { + +REG_OP(StatelessMultinomial) + .INPUT(logits, TensorType({DT_FLOAT16,DT_FLOAT,DT_DOUBLE})) + .INPUT(num_samples, TensorType({DT_INT32})) + .INPUT(seed, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(y, TensorType({DT_INT32, DT_INT64})) + .ATTR(output_dtype, Type, DT_INT64) + .OP_END_FACTORY_REG(StatelessMultinomial) + +} // namespace ge + +#endif //GE_OP_STATELESS_RANDOM_OPS_H diff --git a/third_party/fwkacllib/inc/ops/string_ops.h b/third_party/fwkacllib/inc/ops/string_ops.h new file mode 100644 index 00000000..20926748 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/string_ops.h @@ -0,0 +1,138 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_STRING_OPS_H_ +#define GE_OP_STRING_OPS_H_ + +#include +#include "graph/operator_reg.h" + +namespace ge { +REG_OP(StringSplit) + .INPUT(input, TensorType({DT_STRING})) + .INPUT(delimiter, TensorType({DT_STRING})) + .OUTPUT(indices, TensorType({DT_INT64})) + .OUTPUT(values, TensorType({DT_STRING})) + .OUTPUT(shape, TensorType({DT_INT64})) + .ATTR(skip_empty, Bool, true) + .OP_END_FACTORY_REG(StringSplit) + +REG_OP(StringSplitV2) + .INPUT(input, TensorType({DT_STRING})) + .INPUT(sep, TensorType({DT_STRING})) + .OUTPUT(indices, TensorType({DT_INT64})) + .OUTPUT(values, TensorType({DT_STRING})) + .OUTPUT(shape, TensorType({DT_INT64})) + .ATTR(maxsplit, Int, -1) + .OP_END_FACTORY_REG(StringSplitV2) + +REG_OP(UnicodeScript) + .INPUT(x, TensorType({DT_INT32})) + .OUTPUT(y, TensorType({DT_INT32})) + .OP_END_FACTORY_REG(UnicodeScript) + +REG_OP(Substr) + .INPUT(input, TensorType({DT_STRING})) + .INPUT(pos, TensorType({DT_INT32, DT_INT64})) + .INPUT(len, TensorType({DT_INT32, DT_INT64})) + .OUTPUT(output, TensorType({DT_STRING})) + .OP_END_FACTORY_REG(Substr) + +REG_OP(StringToHashBucketFast) + .INPUT(x, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_INT64})) + .ATTR(num_buckets, Int, 1) + .OP_END_FACTORY_REG(StringToHashBucketFast) + +REG_OP(StringToHashBucketStrong) + .INPUT(x, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_INT64})) + .ATTR(num_buckets, Int, 1) + .REQUIRED_ATTR(key, ListInt) + .OP_END_FACTORY_REG(StringToHashBucketStrong) + +REG_OP(StringToHashBucket) + .INPUT(string_tensor, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_INT64})) + .ATTR(num_buckets, Int, 1) + .OP_END_FACTORY_REG(StringToHashBucket) + +REG_OP(StringStrip) + .INPUT(x, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_STRING})) + .OP_END_FACTORY_REG(StringStrip) + +REG_OP(StringLength) + .INPUT(x, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_INT32})) + .ATTR(unit, String, "BYTE") + .OP_END_FACTORY_REG(StringLength) + +REG_OP(StringJoin) + .DYNAMIC_INPUT(x, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_STRING})) + .REQUIRED_ATTR(N, Int) + .ATTR(separator, String, "") + .OP_END_FACTORY_REG(StringJoin) + +REG_OP(StringFormat) + .DYNAMIC_INPUT(x, TensorType({DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, \ + DT_INT32, DT_INT64, DT_UINT32, DT_UINT64, DT_STRING, DT_FLOAT16, \ + DT_FLOAT, DT_DOUBLE, DT_BOOL})) + .OUTPUT(y, TensorType({DT_STRING})) + .ATTR(template, String, "%s") + .ATTR(placeholder, String, "%s") + .ATTR(summarize, Int, 3) + .OP_END_FACTORY_REG(StringFormat) + +REG_OP(RegexFullMatch) + .INPUT(x, TensorType({DT_STRING})) + .INPUT(pattern, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_BOOL})) + .OP_END_FACTORY_REG(RegexFullMatch) + +REG_OP(RegexReplace) + .INPUT(x, TensorType({DT_STRING})) + .INPUT(pattern, TensorType({DT_STRING})) + .INPUT(rewrite, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_STRING})) + .ATTR(replace_global, Bool, true) + .OP_END_FACTORY_REG(RegexReplace) + +REG_OP(AsString) + .INPUT(x, TensorType({DT_INT8, DT_INT16, DT_INT32, DT_INT64, DT_FLOAT, \ + DT_DOUBLE, DT_BOOL})) + .OUTPUT(y, TensorType({DT_STRING})) + .ATTR(precision, Int, -1) + .ATTR(scientific, Bool, false) + .ATTR(shortest, Bool, false) + .ATTR(width, Int, -1) + .ATTR(fill, String, "") + .OP_END_FACTORY_REG(AsString) + +REG_OP(EncodeBase64) + .INPUT(x, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_STRING})) + .ATTR(pad, Bool, false) + .OP_END_FACTORY_REG(EncodeBase64) + +REG_OP(DecodeBase64) + .INPUT(x, TensorType({DT_STRING})) + .OUTPUT(y, TensorType({DT_STRING})) + .OP_END_FACTORY_REG(DecodeBase64) +} // namespace ge + +#endif // GE_OP_STRING_OPS_H_ diff --git a/third_party/fwkacllib/inc/ops/threshold_ops.h b/third_party/fwkacllib/inc/ops/threshold_ops.h new file mode 100755 index 00000000..f692ad3c --- /dev/null +++ b/third_party/fwkacllib/inc/ops/threshold_ops.h @@ -0,0 +1,33 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + + #ifndef GE_OP_THRESHOLD_H + #define GE_OP_THRESHOLD_H + + #include "graph/operator_reg.h" + + namespace ge { + + REG_OP(Threshold) + .INPUT(input_x, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_UINT8, DT_INT32})) + .OUTPUT(output_y, TensorType({DT_FLOAT, DT_FLOAT16, DT_INT8, DT_UINT8, DT_INT32})) + .ATTR(threshold, Float, 0.0) + // .INFER_SHAPE_AND_TYPE(ThresholdInferShape) + .OP_END_FACTORY_REG(Threshold); + + } // namespace ge + + #endif // GE_OP_THRESHOLD_OPS_H diff --git a/third_party/fwkacllib/inc/ops/transformation_ops.h b/third_party/fwkacllib/inc/ops/transformation_ops.h new file mode 100644 index 00000000..3a421a72 --- /dev/null +++ b/third_party/fwkacllib/inc/ops/transformation_ops.h @@ -0,0 +1,287 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef GE_OP_TRANSFORMATION_OPS_H +#define GE_OP_TRANSFORMATION_OPS_H + +#include "../graph/operator_reg.h" + +namespace ge { +REG_OP(DepthwiseWeight4DTo6D) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_UINT16})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_UINT16})) + .OP_END_FACTORY_REG(DepthwiseWeight4DTo6D) + +REG_OP(DepthwiseWeight6DTo4D) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_UINT16})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT32, DT_UINT16})) + .ATTR(channel_size, Int, 16) + .OP_END_FACTORY_REG(DepthwiseWeight6DTo4D) + +/** +*@brief Permutes the dimensions according to perm.\n + The returned tensor's dimension i will correspond to the input dimension perm[i]. + +*@par Inputs: +*x: A Tensor. Must be one of the following types: float16, float32, int8, int16, int32, int64, uint8, uint16, uint32, uint64. + +*@par Attributes: +*perm: A permutation of the dimensions of "x". + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(TransposeD) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .ATTR(perm, ListInt, {}) + .OP_END_FACTORY_REG(TransposeD) + +/** +*@brief Permutes the dimensions according to perm.\n + The returned tensor's dimension i will correspond to the input dimension perm[i]. + +*@par Inputs: +*@li x: A Tensor. Must be one of the following types: float16, float32, int8, int16, int32, int64, uint8, uint16, uint32, uint64. +*@li perm: A Tensor of type int32 or int64. A permutation of the dimensions of "x". + +*@par Outputs: +*y: A Tensor. Has the same type as "x". +*/ +REG_OP(Transpose) + .INPUT(x, TensorType::BasicType()) + .INPUT(perm, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(Transpose) + +REG_OP(Flatten) + .INPUT(x, TensorType({DT_INT8, DT_INT16, DT_INT32, DT_INT64, + DT_UINT8, DT_UINT16, DT_UINT32, DT_UINT64, + DT_FLOAT, DT_FLOAT16})) + .OUTPUT(y, TensorType({DT_INT8, DT_INT16, DT_INT32, DT_INT64, + DT_UINT8, DT_UINT16, DT_UINT32, DT_UINT64, + DT_FLOAT, DT_FLOAT16})) + .OP_END_FACTORY_REG(Flatten) + +REG_OP(BatchToSpaceND) + .INPUT(x, TensorType::BasicType()) + .INPUT(block_shape, TensorType::IndexNumberType()) + .INPUT(crops, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(BatchToSpaceND) + +REG_OP(BatchToSpaceNDD) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(block_shape, ListInt) + .REQUIRED_ATTR(crops, ListInt) + .OP_END_FACTORY_REG(BatchToSpaceNDD) + +REG_OP(SpaceToBatchND) + .INPUT(x, TensorType::BasicType()) + .INPUT(block_shape, TensorType::IndexNumberType()) + .INPUT(paddings, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .OP_END_FACTORY_REG(SpaceToBatchND) + +REG_OP(SpaceToBatchNDD) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(block_shape, ListInt) + .REQUIRED_ATTR(paddings, ListInt) + .OP_END_FACTORY_REG(SpaceToBatchNDD) + +REG_OP(SpaceToDepth) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(block_size, Int) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(SpaceToDepth) + +REG_OP(DepthToSpace) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(block_size, Int) + .ATTR(data_format, String, "NHWC") + .OP_END_FACTORY_REG(DepthToSpace) + +/** +*@brief Permutes data into spatial data blocks and then prunes them. + +*@par Inputs: +*x: A 4D Tensor with format NC1HWC0. \n + +*Must be one of the following types: float16, float32 + +*@par Attributes: +*@li crops: A required list of int8, int16, int32, or int64. No default value. +*@li block_size: A required int8, int16, int32, or int64. No default value. + +*@par Outputs: +*y: A 4D Tensor with format NC1HWC0, \n + +* of type float16 or float32. + +*@attention Constraints: +*@li The size of the first dimension of input "x" must be divisible by (block_size * block_size). +*@li "crops" is a 2D tensor of non-negative integers with shape (2, 2). +*@li block_size >= 2 +*/ +REG_OP(BatchToSpace) + .INPUT(x, TensorType::BasicType()) + .INPUT(crops, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(block_size, Int) + .OP_END_FACTORY_REG(BatchToSpace) + +/** +*@brief Rearrange the batch (permutes) data into spatial data blocks, and then crop them. + +*@par Inputs: +* One input: +*x: An Tensor of shape [batch*block_size*block_size, height_pad/block_size, width_pad/block_size, depth].\n +*The batch size of the input tensor must be divisible by (block size * block size). + +*@par Attributes: +*@li block_size: Must be one of the following types: `int32`, `int64`. +*@li crops: An Tensor. Must be one of the following types: int32, Int64.\n +*2D tensor with non negative integer of shape [2, 2]. It specifies how many\n +*elements are clipped from the intermediate result of spatial dimension. + +*@par Outputs: +*y: A Tensor. Has the same type and format as input "x". + +*@attention Constraints: +*@li The size of the first dimension of input "x" must be divisible by (block_size * block_size). +*@li "crops" is a 2D tensor of non-negative integers with shape (2, 2). +*@li block_size >= 2 +*/ +REG_OP(BatchToSpaceD) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT64, DT_INT32, DT_UINT8, + DT_UINT16, DT_UINT32, DT_UINT64, DT_INT8, DT_INT16, DT_COMPLEX64, + DT_COMPLEX128, DT_QINT8, DT_QUINT8, DT_QINT16, DT_QUINT16, DT_QINT32})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_DOUBLE, DT_INT64, DT_INT32, DT_UINT8, + DT_UINT16, DT_UINT32, DT_UINT64, DT_INT8, DT_INT16, DT_COMPLEX64, + DT_COMPLEX128, DT_QINT8, DT_QUINT8, DT_QINT16, DT_QUINT16, DT_QINT32})) + .REQUIRED_ATTR(block_size, Int) + .REQUIRED_ATTR(crops, ListInt) + .OP_END_FACTORY_REG(BatchToSpaceD) + +REG_OP(SpaceToBatch) + .INPUT(x, TensorType::BasicType()) + .INPUT(paddings, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(block_size, Int) + .OP_END_FACTORY_REG(SpaceToBatch) + +REG_OP(SpaceToBatchD) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(block_size, Int) + .REQUIRED_ATTR(paddings, ListInt) + .OP_END_FACTORY_REG(SpaceToBatchD) + +/** +* @brief Unpacks the given dimension of a rank-R tensor "value" into rank-(R-1) +* tensors. + +* @par Inputs: +* @ value: A rank-R tensor (R > 0) of type BasicType, with format ND or NC1HWC0. + +* @par Attributes: +* @li num: An optional int, specifying the number of tensors to be unpacked to. +* Defaults to "None". +* @li axis: A required int, specifying the axis to unpack along. The value range +* is [-R, R). + +* @par Outputs: +* output: The list of Tensor objects unpacked from "value", of type BasicType. + +* @attention Constraints: +* @li If "num" is not specified, it is inferred from the shape of "value". +* @li For the ND format, "axis" is in the range [-R, R); For the NC1HWC0 format, +* "axis" must not be 2, 3, -2, or -3. +*/ +REG_OP(Unpack) + .INPUT(value, TensorType::BasicType()) + .DYNAMIC_OUTPUT(output, TensorType::BasicType()) + .REQUIRED_ATTR(num, Int) + .ATTR(axis, Int, 0) + .OP_END_FACTORY_REG(Unpack) + +/** +* @brief Extract "patches" from "images" and stacks them in the "depth" +* dimension of the output. + +* @par Inputs: +* images: A 4D Tensor with shape [batch, in_rows, in_cols, depth]. + +* @par Attributes: +* @li ksizes: The size of the sliding window for each dimension of images. +* @li strides: How far the centers of two consecutive patches are in the images.\n +* Must be: [1, stride_rows, stride_cols, 1]. +* @li rates: Must be: [1, rate_rows, rate_cols, 1]. This is the input stride,\n +* specifying how far two consecutive patch samples are in the input. Equivalent\n +* to extracting patches with patch_sizes_eff = patch_sizes + (patch_sizes - 1) *\n +* (rates - 1), followed by subsampling them spatially by a factor of rates. This\n +* is equivalent to rate in dilated (a.k.a. Atrous) convolutions. +* @li padding: The type of padding algorithm to use. + +* @par Outputs: +* Output: A 4D Tensor with shape [batch, out_rows, out_cols, ksize_rows *\n +* ksize_cols * depth] containing image patches with size ksize_rows x ksize_cols\n +* x depth vectorized in the "depth" dimension. Note "out_rows" and "out_cols"\n +* are the dimensions of the output patches. + +* @attention Constraints: +* "ksizes", "strides" and "rates" are lists of integers. +*/ +REG_OP(ExtractImagePatches) + .INPUT(images, TensorType::REALNUMBERTYPE()) + .OUTPUT(y, TensorType::REALNUMBERTYPE()) + .ATTR(ksizes, ListInt, {1,3,3,1}) + .ATTR(strides, ListInt, {1,1,1,1}) + .ATTR(rates, ListInt, {1,1,1,1}) + .ATTR(padding, String, "SAME") + .OP_END_FACTORY_REG(ExtractImagePatches) + +REG_OP(ConfusionTransposeD) + .INPUT(x, TensorType::BasicType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(perm, ListInt) + .REQUIRED_ATTR(shape, ListInt) + .REQUIRED_ATTR(transpose_first, Bool) + .OP_END_FACTORY_REG(ConfusionTransposeD) + +REG_OP(ConfusionTranspose) + .INPUT(x, TensorType::BasicType()) + .INPUT(shape, TensorType::IndexNumberType()) + .OUTPUT(y, TensorType::BasicType()) + .REQUIRED_ATTR(perm, ListInt) + .REQUIRED_ATTR(transpose_first, Bool) + .OP_END_FACTORY_REG(ConfusionTranspose) + +REG_OP(FlattenV2) + .INPUT(x, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, + DT_INT32, DT_UINT32, DT_INT64, DT_UINT64})) + .OUTPUT(y, TensorType({DT_FLOAT16, DT_FLOAT, DT_INT8, DT_UINT8, DT_INT16, DT_UINT16, + DT_INT32, DT_UINT32, DT_INT64, DT_UINT64})) + .ATTR(axis, Int, 1) + .ATTR(end_axis, Int, -1) + .OP_END_FACTORY_REG(FlattenV2) +} // namespace ge + +#endif // GE_OP_TRANSFORMATION_OPS_H diff --git a/third_party/fwkacllib/inc/register/op_registry.h b/third_party/fwkacllib/inc/register/op_registry.h new file mode 100644 index 00000000..3bfa8f88 --- /dev/null +++ b/third_party/fwkacllib/inc/register/op_registry.h @@ -0,0 +1,110 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_REGISTER_OP_REGISTRY_H_ +#define INC_REGISTER_OP_REGISTRY_H_ + +#include +#include +#include +#include +#include "register/register.h" + +namespace domi { +class FMK_FUNC_HOST_VISIBILITY FMK_FUNC_DEV_VISIBILITY OpRegistry { + public: + static OpRegistry *Instance(); + + std::vector registrationDatas; + + bool Register(const OpRegistrationData ®_data); + + domi::ImplyType GetImplyType(const std::string &op_type); + + void GetOpTypeByImplyType(std::vector &vec_op_type, const domi::ImplyType &imply_type); + + void GetFormats(const std::string &op_type, std::vector &input_format_vector, + std::vector &output_format_vector); + + void GetWeightFormats(const std::string &op_type, std::vector &format_vector); + + domi::ParseParamFunc GetParseParamFunc(const std::string &op_type); + + domi::InferShapeFunc GetInferShapeFunc(const std::string &op_type); + + domi::InferShapeFuncV2 GetInferShapeFuncV2(const std::string &op_type); + + domi::GetWorkspaceSizeFunc GetGetWorkspaceSizeFunc(const std::string &op_type); + + domi::UpdateOpDescFunc GetUpdateOpDescFunc(const std::string &op_type); + + domi::BuildTeBinFunc GetBuildTeBinFunc(const std::string &op_type); + + domi::ImplyType GetImplyTypeByOriOpType(const std::string &ori_optype); + + void GetSupportedInputFormats(const std::string &opType, std::vector> &suportedInputFormats); + + void GetSupportedOutputFormats(const std::string &opType, + std::vector> &supportedOutputFormats); + + void GetSupportedInputTypes(const std::string &opType, + std::vector> &suportedInputDataTypes); + void GetSupportedInputTypesByOriginOpType(const std::string &opType, + std::vector> &suportedInputDataTypes); + + void GetSupportedOutputTypes(const std::string &opType, + std::vector> &supportedOutputDataTypes); + void GetSupportedOutputTypesByOriginOpType(const std::string &opType, + std::vector> &supportedOutputDataTypes); + + void GetLimitedInputTensorDescs(const std::string &opType, + std::vector> &inputLimitedTensorDescs); + void GetLimitedInputTensorDescsByOriginOpType(const std::string &opType, + std::vector> &inputLimitedTensorDescs); + + void GetLimitedOutputTensorDescs(const std::string &opType, + std::vector> &outputLimitedTensorDescs); + void GetLimitedOutputTensorDescsByOriginOpType( + const std::string &opType, std::vector> &outputLimitedTensorDescs); + + const std::vector &GetConstInputToAttr(const std::string &ori_optype) const; + + private: + std::unordered_map> op_ori_optype_map_; + std::unordered_map op_run_mode_map_; + std::unordered_map> op_input_formats_map_; + std::unordered_map> op_output_formats_map_; + std::unordered_map> op_weight_formats_map_; + std::unordered_map opParseParamsFnMap_; + std::unordered_map opInferShapeFnMap_; + std::unordered_map opInferShapeFnMapV2_; + std::unordered_map opGetWorkspaceSizeFnMap_; + std::unordered_map opUpdateOpDescFnMap_; + std::unordered_map opBuildTeBinFnMap_; + std::unordered_map> opConstInputToAttrMap_; + + std::unordered_map>> opInputSupportedFormats_; + std::unordered_map>> opOutputSupportedFormats_; + std::unordered_map>> opInputSupportedDataTypes_; + std::unordered_map>> opOutputSupportedDataTypes_; + std::unordered_map>> opInputLimitedTensorDescs_; + std::unordered_map>> opOutputLimitedTensorDescs_; + + std::unordered_map originOpType2OmOpType_; +}; +} // namespace domi + +#endif // INC_REGISTER_OP_REGISTRY_H_ diff --git a/third_party/fwkacllib/inc/runtime/base.h b/third_party/fwkacllib/inc/runtime/base.h new file mode 100644 index 00000000..eaf44a41 --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/base.h @@ -0,0 +1,264 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_BASE_H__ +#define __CCE_RUNTIME_BASE_H__ + +#include + +#ifdef __cplusplus +extern "C" { +#endif + +// If you need export the function of this library in Win32 dll, use __declspec(dllexport) +#ifndef RTS_API +#ifdef RTS_DLL_EXPORT +#define RTS_API __declspec(dllexport) +#else +#define RTS_API +#endif +#endif + +/** + * @ingroup dvrt_base + * @brief runtime error numbers. + */ +typedef enum tagRtError { + RT_ERROR_NONE = 0x0, // succes + RT_ERROR_INVALID_VALUE = 0x1, // invalid value + RT_ERROR_MEMORY_ALLOCATION = 0x2, // memory allocation fail + RT_ERROR_INVALID_RESOURCE_HANDLE = 0x3, // invalid handle + RT_ERROR_INVALID_DEVICE_POINTER = 0x4, // invalid device point + RT_ERROR_INVALID_MEMCPY_DIRECTION = 0x5, // invalid memory copy dirction + RT_ERROR_INVALID_DEVICE = 0x6, // invalid device + RT_ERROR_NO_DEVICE = 0x7, // no valid device + RT_ERROR_CMD_OCCUPY_FAILURE = 0x8, // command occpuy failure + RT_ERROR_SET_SIGNAL_FAILURE = 0x9, // set signal failure + RT_ERROR_UNSET_SIGNAL_FAILURE = 0xA, // unset signal failure + RT_ERROR_OPEN_FILE_FAILURE = 0xB, // unset signal failure + RT_ERROR_WRITE_FILE_FAILURE = 0xC, + RT_ERROR_MEMORY_ADDRESS_UNALIGNED = 0xD, + RT_ERROR_DRV_ERR = 0xE, + RT_ERROR_LOST_HEARTBEAT = 0xF, + RT_ERROR_REPORT_TIMEOUT = 0x10, + RT_ERROR_NOT_READY = 0x11, + RT_ERROR_DATA_OPERATION_FAIL = 0x12, + RT_ERROR_INVALID_L2_INSTR_SIZE = 0x13, + RT_ERROR_DEVICE_PROC_HANG_OUT = 0x14, + RT_ERROR_DEVICE_POWER_UP_FAIL = 0x15, + RT_ERROR_DEVICE_POWER_DOWN_FAIL = 0x16, + RT_ERROR_FEATURE_NOT_SUPPROT = 0x17, + RT_ERROR_KERNEL_DUPLICATE = 0x18, // register same kernel repeatly + RT_ERROR_MODEL_STREAM_EXE_FAILED = 0x91, // the model stream failed + RT_ERROR_MODEL_LOAD_FAILED = 0x94, // the model stream failed + RT_ERROR_END_OF_SEQUENCE = 0x95, // end of sequence + + RT_ERROR_RESERVED +} rtError_t; + +/** + * @ingroup dvrt_base + * @brief runtime exception numbers. + */ +typedef enum tagRtExceptionType { + RT_EXCEPTION_NONE = 0, + RT_EXCEPTION_TS_DOWN = 1, + RT_EXCEPTION_TASK_TIMEOUT = 2, + RT_EXCEPTION_TASK_FAILURE = 3, + RT_EXCEPTION_DEV_RUNNING_DOWN = 4 +} rtExceptionType; + +/** + * @ingroup dvrt_base + * @brief Switch type. + */ +typedef enum tagRtCondition { + RT_EQUAL = 0, + RT_NOT_EQUAL, + RT_GREATER, + RT_GREATER_OR_EQUAL, + RT_LESS, + RT_LESS_OR_EQUAL +} rtCondition_t; + +/** + * @ingroup dvrt_base + * @brief Data Type of Extensible Switch Task. + */ +typedef enum tagRtSwitchDataType { + RT_SWITCH_INT32 = 0, + RT_SWITCH_INT64 = 1, +} rtSwitchDataType_t; + +typedef enum tagRtStreamFlagType { + RT_HEAD_STREAM = 0, // first stream + RT_INVALID_FLAG = 0xFFFFFFFF, +} rtStreamFlagType_t; + +typedef enum tagRtLimitType { + RT_LIMIT_TYPE_LOW_POWER_TIMEOUT = 0, // timeout for power down , ms +} rtLimitType_t; + +typedef void (*rtErrorCallback)(rtExceptionType); + +/** + * @ingroup dvrt_base + * @brief stream handle. + */ +typedef void *rtStream_t; + +/** + * @ingroup dvrt_base + * @brief runtime event handle. + */ +typedef void *rtEvent_t; + +/** + * @ingroup dvrt_base + * @brief label handle. + */ +typedef void *rtLabel_t; + +/** + * @ingroup profiling_base + * @brief runtime handle. + */ +RTS_API rtError_t rtSetProfDirEx(const char *profDir, const char *address, const char *jobCtx); + +/** + * @ingroup profiling_base + * @brief init profiler object. + */ +RTS_API rtError_t rtProfilerInit(const char *profdir, const char *address, const char *job_ctx); + +/** + * @ingroup profiling_base + * @brief start rts profiler. + */ +RTS_API rtError_t rtProfilerStart(void); + +/** + * @ingroup profiling_base + * @brief stop rts profiler. + */ +RTS_API rtError_t rtProfilerStop(void); + +/** + * @ingroup profiling_base + * @brief ts send keypoint profiler log. + */ +RTS_API rtError_t rtProfilerTrace(uint64_t id, bool notify, uint32_t flags, rtStream_t stream); + +/** + * @ingroup dvrt_base + * @brief Returns the last error from a runtime call. + */ +RTS_API rtError_t rtGetLastError(); + +/** + * @ingroup dvrt_base + * @brief Returns the last error from a runtime call. + */ +RTS_API rtError_t rtPeekAtLastError(); + +/** + * @ingroup dvrt_base + * @brief set polling receive mode for task report + * @param [out] NA + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtSetPollingMode(); + +/** + * @ingroup dvrt_base + * @brief register callback for error code + * @param [out] NA + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtSetExceptCallback(rtErrorCallback callback); + +/** + * @ingroup dvrt_base + * @brief notify handle. + */ +typedef void *rtNotify_t; + +/** + * @ingroup dvrt_base + * @brief create label instance + * @param [out] label created label + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtLabelCreate(rtLabel_t *label); + +/** + * @ingroup dvrt_base + * @brief set label and stream instance + * @param [in] label set label + * @param [in] stream set stream + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtLabelSet(rtLabel_t label, rtStream_t stream); + +/** + * @ingroup dvrt_base + * @brief destroy label instance + * @param [in] label label to destroy + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtLabelDestroy(rtLabel_t label); + +/** + * @ingroup dvrt_base + * @brief label switch instance + * @param [in] ptr address to get value compared + * @param [in] condition + * @param [in] value to compare + * @param [in] true_label goto label + * @param [in] stream to submit label_switch task + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtLabelSwitch(void *ptr, rtCondition_t condition, uint32_t value, rtLabel_t trueLabel, + rtStream_t stream); + +/** + * @ingroup dvrt_base + * @brief goto label instance + * @param [in] label goto label + * @param [in] stream to submit label_goto task + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtLabelGoto(rtLabel_t label, rtStream_t stream); + +/** + * @ingroup dvrt_base + * @brief name label instance + * @param [in] label instance + * @param [in] name label name + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtNameLabel(rtLabel_t label, const char *name); +#ifdef __cplusplus +} +#endif + +#endif // __CCE_RUNTIME_BASE_H__ diff --git a/third_party/fwkacllib/inc/runtime/config.h b/third_party/fwkacllib/inc/runtime/config.h new file mode 100644 index 00000000..131543c0 --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/config.h @@ -0,0 +1,189 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_CONFIG_H__ +#define __CCE_RUNTIME_CONFIG_H__ + +#include "base.h" + +#ifdef __cplusplus +extern "C" { +#endif + +#define PLAT_COMBINE(arch, chip, ver) ((arch << 16) | (chip << 8) | (ver)) +#define PLAT_GET_ARCH(type) ((type >> 16) & 0xffff) +#define PLAT_GET_CHIP(type) ((type >> 8) & 0xff) +#define PLAT_GET_VER(type) (type & 0xff) + +typedef enum tagRtArchType { + ARCH_BEGIN = 0, + ARCH_V100 = ARCH_BEGIN, + ARCH_V200, + ARCH_END, +} rtArchType_t; + +typedef enum tagRtChipType { + CHIP_BEGIN = 0, + CHIP_MINI = CHIP_BEGIN, + CHIP_CLOUD, + CHIP_OTHER_PHN, + CHIP_OTHER_OLD, + CHIP_END, +} rtChipType_t; + +typedef enum tagRtVersion { + VER_BEGIN = 0, + VER_NA = VER_BEGIN, + VER_ES, + VER_CS, + VER_END, +} rtVersion_t; + +// match rtChipType_t +typedef enum tagRtPlatformType { + PLATFORM_BEGIN = 0, + PLATFORM_MINI_V1 = PLATFORM_BEGIN, + PLATFORM_CLOUD_V1, + PLATFORM_END, +} rtPlatformType_t; + +typedef enum tagRtCubeFracMKNFp16 { + RT_CUBE_MKN_FP16_2_16_16 = 0, + RT_CUBE_MKN_FP16_4_16_16, + RT_CUBE_MKN_FP16_16_16_16, + RT_CUBE_MKN_FP16_Default, +} rtCubeFracMKNFp16_t; + +typedef enum tagRtCubeFracMKNInt8 { + RT_CUBE_MKN_INT8_2_32_16 = 0, + RT_CUBE_MKN_INT8_4_32_4, + RT_CUBE_MKN_INT8_4_32_16, + RT_CUBE_MKN_INT8_16_32_16, + RT_CUBE_MKN_INT8_Default, +} rtCubeFracMKNInt8_t; + +typedef enum tagRtVecFracVmulMKNFp16 { + RT_VEC_VMUL_MKN_FP16_1_16_16 = 0, + RT_VEC_VMUL_MKN_FP16_Default, +} rtVecFracVmulMKNFp16_t; + +typedef enum tagRtVecFracVmulMKNInt8 { + RT_VEC_VMUL_MKN_INT8_1_32_16 = 0, + RT_VEC_VMUL_MKN_INT8_Default, +} rtVecFracVmulMKNInt8_t; + +typedef struct tagRtAiCoreSpec { + uint32_t cubeFreq; + uint32_t cubeMSize; + uint32_t cubeKSize; + uint32_t cubeNSize; + rtCubeFracMKNFp16_t cubeFracMKNFp16; + rtCubeFracMKNInt8_t cubeFracMKNInt8; + rtVecFracVmulMKNFp16_t vecFracVmulMKNFp16; + rtVecFracVmulMKNInt8_t vecFracVmulMKNInt8; +} rtAiCoreSpec_t; + +typedef struct tagRtAiCoreRatesPara { + uint32_t ddrRate; + uint32_t l2Rate; + uint32_t l2ReadRate; + uint32_t l2WriteRate; + uint32_t l1ToL0ARate; + uint32_t l1ToL0BRate; + uint32_t l0CToUBRate; + uint32_t ubToL2; + uint32_t ubToDDR; + uint32_t ubToL1; +} rtAiCoreMemoryRates_t; + +typedef struct tagRtMemoryConfig { + uint32_t flowtableSize; + uint32_t compilerSize; +} rtMemoryConfig_t; + +typedef struct tagRtPlatformConfig { + uint32_t platformConfig; +} rtPlatformConfig_t; + +/** + * @ingroup + * @brief get platform + * @param [in] platForm + * @return platForm + */ +RTS_API rtError_t rtGetPlatformConfig(rtPlatformConfig_t *platForm); + +/** + * @ingroup + * @brief get AI core count + * @param [in] aiCoreCnt + * @return aiCoreCnt + */ +RTS_API rtError_t rtGetAiCoreCount(uint32_t *aiCoreCnt); + +/** + * @ingroup + * @brief get AI cpu count + * @param [in] aiCpuCnt + * @return aiCpuCnt + */ +RTS_API rtError_t rtGetAiCpuCount(uint32_t *aiCpuCnt); + +/** + * @ingroup + * @brief get AI core frequency + * @param [in] aiCoreSpec + * @return aiCoreSpec + */ +RTS_API rtError_t rtGetAiCoreSpec(rtAiCoreSpec_t *aiCoreSpec); + +/** + * @ingroup + * @brief AI get core band Info + * @param [in] aiCoreMemoryRates + * @return aiCoreMemoryRates + */ +RTS_API rtError_t rtGetAiCoreMemoryRates(rtAiCoreMemoryRates_t *aiCoreMemoryRates); + +/** + * @ingroup + * @brief AI get core buffer Info,FlowTable Size,Compiler Size + * @param [in] memoryConfig + * @return memoryConfig + */ +RTS_API rtError_t rtGetMemoryConfig(rtMemoryConfig_t *memoryConfig); + +/** + * @ingroup + * @brief set platform in gen ctx + * @param [in] platForm + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtSetPlatformType(rtPlatformType_t platformType); + +/** + * @ingroup + * @brief get l2 buffer Info,virtual baseaddr,Size + * @param [in] stream + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtMemGetL2Info(rtStream_t stream, void **ptr, uint32_t *size); + +#ifdef __cplusplus +} +#endif + +#endif // __CCE_RUNTIME_STREAM_H__ diff --git a/third_party/fwkacllib/inc/runtime/context.h b/third_party/fwkacllib/inc/runtime/context.h new file mode 100644 index 00000000..b91b1173 --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/context.h @@ -0,0 +1,112 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_CONTEXT_H__ +#define __CCE_RUNTIME_CONTEXT_H__ + +#include "base.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * @ingroup rt_context + * @brief runtime context handle. + */ +typedef void *rtContext_t; + +typedef enum tagDryRunFlag { RT_DRYRUN_FLAG_FALSE = 0, RT_DRYRUN_FLAG_TRUE = 1 } rtDryRunFlag_t; + +typedef enum tagCtxMode { + RT_CTX_NORMAL_MODE = 0, + RT_CTX_GEN_MODE = 1, +} rtCtxMode_t; + +/** + * @ingroup rt_context + * @brief create context and associates it with the calling thread + * @param [out] ctx created context + * @param [in] flags context creation flag. set to 0. + * @param [in] device device to create context on + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtCtxCreate(rtContext_t *ctx, uint32_t flags, int32_t device); + +/** + * @ingroup rt_context + * @brief create context and associates it with the calling thread + * @param [out] ctx created context + * @param [in] flags context creation flag. set to 0. + * @param [in] device device to create context on + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtCtxCreateEx(rtContext_t *ctx, uint32_t flags, int32_t device); + +/** + * @ingroup rt_context + * @brief destroy context instance + * @param [in] ctx context to destroy + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtCtxDestroy(rtContext_t ctx); + +/** + * @ingroup rt_context + * @brief binds context to the calling CPU thread. + * @param [in] ctx context to bind. if NULL, unbind current context. + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtCtxSetCurrent(rtContext_t ctx); + +/** + * @ingroup rt_context + * @brief block for a context's tasks to complete + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtCtxSynchronize(void); + +/** + * @ingroup rt_context + * @brief returns the context bound to the calling CPU thread. + * @param [out] ctx returned context + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtCtxGetCurrent(rtContext_t *ctx); + +/** + * @ingroup rt_context + * @brief returns the device ID for the current context + * @param [out] device returned device id + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtCtxGetDevice(int32_t *device); + +/** + * @ingroup rt_context + * @brief set ctx run mode: normal or dryrun + * @param [in] ctx: context + * @param [in] enable: set true means enable dryrun mode + * @param [in] flag: reserved + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtCtxSetDryRun(rtContext_t ctx, rtDryRunFlag_t enable, uint32_t flag); + +#ifdef __cplusplus +} +#endif + +#endif // __CCE_RUNTIME_CONTEXT_H__ \ No newline at end of file diff --git a/third_party/fwkacllib/inc/runtime/dev.h b/third_party/fwkacllib/inc/runtime/dev.h new file mode 100644 index 00000000..47ff2629 --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/dev.h @@ -0,0 +1,205 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_DEVICE_H__ +#define __CCE_RUNTIME_DEVICE_H__ + +#include "base.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef struct tagRTDeviceInfo { + uint8_t env_type; /* 0: FPGA 1: EMU 2: ESL */ + uint32_t ctrl_cpu_ip; + uint32_t ctrl_cpu_id; + uint32_t ctrl_cpu_core_num; + uint32_t ctrl_cpu_endian_little; + uint32_t ts_cpu_core_num; + uint32_t ai_cpu_core_num; + uint32_t ai_core_num; + uint32_t ai_cpu_core_id; + uint32_t ai_core_id; + uint32_t aicpu_occupy_bitmap; + uint32_t hardware_version; +#ifdef DRIVER_NEW_API + uint32_t ts_num; +#endif +} rtDeviceInfo_t; + +/** + * @ingroup dvrt_dev + * @brief get total device number. + * @param [in|out] count the device number + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_NO_DEVICE for can not find any device + */ +RTS_API rtError_t rtGetDeviceCount(int32_t *count); +/** + * @ingroup dvrt_dev + * @brief get device ids + * @param [in|out] get details of device ids + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_DRV_ERR for error + */ +RTS_API rtError_t rtGetDeviceIDs(uint32_t *devices, uint32_t len); +/** + * @ingroup dvrt_dev + * @brief get total device infomation. + * @param [in] device the device id + * @param [out] info the device info + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_NO_DEVICE for can not find any device + */ +RTS_API rtError_t rtGetDeviceInfo(int32_t device, rtDeviceInfo_t *info); + +/** + * @ingroup dvrt_dev + * @brief set target device for current thread + * @param [int] device the device id + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_DEVICE for can not match ID and device + */ +RTS_API rtError_t rtSetDevice(int32_t device); + +/** + * @ingroup dvrt_dev + * @brief set target device for current thread + * @param [int] device the device id + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_DEVICE for can not match ID and device + */ +RTS_API rtError_t rtSetDeviceEx(int32_t device); + +/** + * @ingroup dvrt_dev + * @brief get Index by phyId. + * @param [in] phyId the physical device id + * @param [out] devIndex the logic device id + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_NO_DEVICE for can not find any device + */ +RTS_API rtError_t rtGetDeviceIndexByPhyId(uint32_t phyId, uint32_t *devIndex); + +/** + * @ingroup dvrt_dev + * @brief get phyId by Index. + * @param [in] devIndex the logic device id + * @param [out] phyId the physical device id + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_NO_DEVICE for can not find any device + */ +RTS_API rtError_t rtGetDevicePhyIdByIndex(uint32_t devIndex, uint32_t *phyId); + +/** + * @ingroup dvrt_dev + * @brief enable direction:devIdDes---->phyIdSrc. + * @param [in] devIdDes the logical device id + * @param [in] phyIdSrc the physical device id + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_NO_DEVICE for can not find any device + */ +RTS_API rtError_t rtEnableP2P(uint32_t devIdDes, uint32_t phyIdSrc); + +/** + * @ingroup dvrt_dev + * @brief disable direction:devIdDes---->phyIdSrc. + * @param [in] devIdDes the logical device id + * @param [in] phyIdSrc the physical device id + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_NO_DEVICE for can not find any device + */ +RTS_API rtError_t rtDisableP2P(uint32_t devIdDes, uint32_t phyIdSrc); + +/** + * @ingroup dvrt_dev + * @brief get target device of current thread + * @param [in|out] device the device id + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + */ +RTS_API rtError_t rtGetDevice(int32_t *device); + +/** + * @ingroup dvrt_dev + * @brief reset all opened device + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_DEVICE if no device set + */ +RTS_API rtError_t rtDeviceReset(int32_t device); + +/** + * @ingroup dvrt_dev + * @brief reset opened device + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_DEVICE if no device set + */ +RTS_API rtError_t rtDeviceResetEx(int32_t device); + +/** + * @ingroup dvrt_dev + * @brief get total device infomation. + * @param [in] device the device id + * @param [in] type limit type RT_LIMIT_TYPE_LOW_POWER_TIMEOUT=0 + * @param [in] value limit value + * @param [out] info the device info + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_NO_DEVICE for can not find any device + */ +RTS_API rtError_t rtDeviceSetLimit(int32_t device, rtLimitType_t type, uint32_t value); + +/** + * @ingroup dvrt_dev + * @brief Wait for compute device to finish + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_DEVICE if no device set + */ +RTS_API rtError_t rtDeviceSynchronize(void); + +/** + * @ingroup dvrt_dev + * @brief get priority range of current device + * @param [in|out] leastPriority least priority + * @param [in|out] greatestPriority greatest priority + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + */ +RTS_API rtError_t rtDeviceGetStreamPriorityRange(int32_t *leastPriority, int32_t *greatestPriority); + +/** + * @ingroup dvrt_dev + * @brief Set exception handling callback function + * @param [in] callback rtExceptiontype + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + */ +RTS_API rtError_t rtSetExceptCallback(rtErrorCallback callback); + +/** + * @ingroup dvrt_dev + * @brief Setting Scheduling Type of Graph + * @param [in] tsId the ts id + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + */ +RTS_API rtError_t rtSetTSDevice(uint32_t tsId); + +#ifdef __cplusplus +} +#endif + +#endif // __CCE_RUNTIME_DEVICE_H__ diff --git a/third_party/fwkacllib/inc/runtime/dvfsprofile.h b/third_party/fwkacllib/inc/runtime/dvfsprofile.h new file mode 100644 index 00000000..11081546 --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/dvfsprofile.h @@ -0,0 +1,69 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_DVFSPROFILE_H__ +#define __CCE_RUNTIME_DVFSPROFILE_H__ + +#include "base.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum dvfsProfileMode { + DVFS_PROFILE_PERFORMANCE_PRIORITY, + DVFS_PROFILE_BALANCE_PRIORITY, + DVFS_PROFILE_POWER_PRIORITY, + DVFS_PROFILE_PRIORITY_MAX +} DvfsProfileMode; + +/** + * @ingroup dvrt_dvfsprofile + * @brief Set the performance mode of the device + * @param [in] mode dvfsProfileMode + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_DEVICE for invalid device handle + * @return RT_ERROR_OPEN_FILE_FAILURE for invalid file handle + */ +RTS_API rtError_t rtSetDvfsProfile(DvfsProfileMode mode); + +/** + * @ingroup dvrt_dvfsprofile + * @brief Set the performance mode of the device + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for invalid value + * @return RT_ERROR_INVALID_DEVICE for invalid device handle + */ +RTS_API rtError_t rtUnsetDvfsProfile(); + +/** + * @ingroup dvrt_dvfsprofile + * @brief Get the current performance mode of the device + * @param [in|out] pmode dvfsProfileMode type pointer + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_DEVICE for invalid device handle + * @return RT_ERROR_OPEN_FILE_FAILURE for invalid file handle + * @return RT_ERROR_NO_DEVICE for reading npu_freq_dnlimit failed + */ +RTS_API rtError_t rtGetDvfsProfile(DvfsProfileMode *pmode); + +#ifdef __cplusplus +} +#endif + +#endif // __CCE_RUNTIME_PROFILE_H__ diff --git a/third_party/fwkacllib/inc/runtime/event.h b/third_party/fwkacllib/inc/runtime/event.h new file mode 100644 index 00000000..08fb98ec --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/event.h @@ -0,0 +1,246 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_EVENT_H__ +#define __CCE_RUNTIME_EVENT_H__ + +#include "base.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * @ingroup dvrt_event + * @brief create event instance + * @param [in|out] event created event + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtEventCreate(rtEvent_t *event); + +/** + * @ingroup dvrt_event + * @brief destroy event instance + * @param [in] event event to destroy + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtEventDestroy(rtEvent_t event); + +/** + * @ingroup dvrt_event + * @brief event record + * @param [int] event event to record + * @param [int] stream stream handle + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtEventRecord(rtEvent_t event, rtStream_t stream); + +/** + * @ingroup dvrt_event + * @brief event reset + * @param [int] event event to reset + * @param [int] stream stream handle + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtEventReset(rtEvent_t event, rtStream_t stream); + +/** + * @ingroup dvrt_event + * @brief wait event to be complete + * @param [in] event event to wait + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtEventSynchronize(rtEvent_t event); + +/** + * @ingroup dvrt_event + * @brief Queries an event's status + * @param [in] event event to query + * @return RT_ERROR_NONE for complete + * @return RT_ERROR_NOT_READY for not complete + */ +RTS_API rtError_t rtEventQuery(rtEvent_t event); + +/** + * @ingroup dvrt_event + * @brief computes the elapsed time between events. + * @param [in] time time between start and end in ms + * @param [in] start starting event + * @param [in] end ending event + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtEventElapsedTime(float *time, rtEvent_t start, rtEvent_t end); + +/** + * @ingroup dvrt_event + * @brief get the elapsed time from a event after event recorded. + * @param [in] time time in ms + * @param [in] event event handle + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtEventGetTimeStamp(uint64_t *time, rtEvent_t event); + +/** + * @ingroup dvrt_event + * @brief name an event + * @param [in] event_ event to be named + * @param [in] name identification name + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of event, name + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtNameEvent(rtEvent_t event_, const char *name); + +/** + * @ingroup dvrt_event + * @brief make event shared interprocess and assigned a name + * @param [in] event event to be shared + * @param [in] name identification name + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for invalid resource handle + */ +RTS_API rtError_t rtIpcSetEventName(rtEvent_t event, char *name, uint32_t len); + +/** + * @ingroup dvrt_event + * @brief open a interprocess shared event + * @param [in|out] event event to be opened + * @param [in] name identification name + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of ptr, name + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtIpcOpenEvent(rtEvent_t *event, const char *name); + +/** + * @ingroup dvrt_event + * @brief Create a notify + * @param [in] device_id device id + * @param [in|out] notify_ notify to be created + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for invalid resource handle + */ +RTS_API rtError_t rtNotifyCreate(int32_t device_id, rtNotify_t *notify_); + +/** + * @ingroup dvrt_event + * @brief Destroy a notify + * @param [in] notify_ notify to be destroyed + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtNotifyDestroy(rtNotify_t notify_); + +/** + * @ingroup dvrt_event + * @brief Record a notify + * @param [in] notify_ notify to be recorded + * @param [in] stream_ input stream + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for invalid resource handle + * @return RT_ERROR_INVALID_DEVICE for stream is not in current ctx + */ +RTS_API rtError_t rtNotifyRecord(rtNotify_t notify_, rtStream_t stream_); + +/** + * @ingroup dvrt_event + * @brief Wait for a notify + * @param [in] notify_ notify to be wait + * @param [in] stream_ input stream + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for invalid resource handle + * @return RT_ERROR_INVALID_DEVICE for stream is not in current ctx + */ +RTS_API rtError_t rtNotifyWait(rtNotify_t notify_, rtStream_t stream_); + +/** + * @ingroup dvrt_event + * @brief Name a notify + * @param [in] notify_ notify to be named + * @param [in|out] name identification name + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + */ +RTS_API rtError_t rtNameNotify(rtNotify_t notify_, const char *name); + +/** + * @ingroup dvrt_event + * @brief get notify id + * @param [in] notify_ notify to be get + * @param [in|out] notify_id notify id + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + */ +RTS_API rtError_t rtGetNotifyID(rtNotify_t notify_, uint32_t *notify_id); + +/** + * @ingroup dvrt_event + * @brief Set a notify to IPC notify + * @param [in] notify_ notify to be set to IPC notify + * @param [in] name identification name + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of + */ +RTS_API rtError_t rtIpcSetNotifyName(rtNotify_t notify, char *name, uint32_t len); + +/** + * @ingroup dvrt_event + * @brief Open IPC notify + * @param [in] notify notify to be opened + * @param [in] name identification name + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for invalid resource handle + */ +RTS_API rtError_t rtIpcOpenNotify(rtNotify_t *notify, const char *name); + +/** + * @ingroup dvrt_event + * @brief Get the physical address corresponding to notify + * @param [in] notify notify to be queried + * @param [in] devAddrOffset device physical address offset + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtNotifyGetAddrOffset(rtNotify_t notify, uint64_t *devAddrOffset); + +/** + * @ingroup dvrt_event + * @brief Ipc set notify pid + * @param [in] name name to be queried + * @param [in] pid process id + * @param [in] num length of pid[] + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtSetIpcNotifyPid(const char *name, int32_t pid[], int num); + +#ifdef __cplusplus +} +#endif + +#endif // __CCE_RUNTIME_EVENT_H__ diff --git a/third_party/fwkacllib/inc/runtime/kernel.h b/third_party/fwkacllib/inc/runtime/kernel.h new file mode 100644 index 00000000..8dd0e0e3 --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/kernel.h @@ -0,0 +1,409 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_KERNEL_H__ +#define __CCE_RUNTIME_KERNEL_H__ + +#include "base.h" +#include "stream.h" + +#ifdef __cplusplus +extern "C" { +#endif + + +/** + * @ingroup rt_kernel + * @brief shared memory data control + */ +typedef struct tagRtSmData { + uint64_t L2_mirror_addr; // preload or swap source address + uint32_t L2_data_section_size; // every data size + uint8_t L2_preload; // 1 - preload from mirrorAddr, 0 - no preload + uint8_t modified; // 1 - data will be modified by kernel, 0 - no modified + uint8_t priority; // data priority + int8_t prev_L2_page_offset_base; // remap source section offset + uint8_t L2_page_offset_base; // remap destination section offset + uint8_t L2_load_to_ddr; // 1 - need load out, 0 - no need + uint8_t reserved[2]; // reserved +} rtSmData_t; + + +/** + * @ingroup rt_kernel + * @brief shared memory description + */ +typedef struct tagRtSmCtrl { + rtSmData_t data[8]; // data description + uint64_t size; // max page Num + uint8_t remap[64]; /* just using for static remap mode, default:0xFF + * array index: virtual l2 page id, array value: physic l2 page id */ + uint8_t l2_in_main; // 0-DDR, 1-L2, default:0xF + uint8_t reserved[3]; +} rtSmDesc_t; + +typedef rtSmDesc_t rtL2Ctrl_t; + +/** + * @ingroup rt_kernel + * @brief device binary type + */ +typedef struct tagRtDevBinary { + uint32_t magic; /**< magic number */ + uint32_t version; /**< version of binary */ + const void *data; /**< binary data */ + uint64_t length; /**< binary length */ +} rtDevBinary_t; + +/** + * @ingroup rt_kernel + * @brief function mode type + */ +typedef enum { + FUNC_MODE_NORMAL = 0, + FUNC_MODE_PCTRACE_USERPROFILE_RECORDLOOP, + FUNC_MODE_PCTRACE_USERPROFILE_SKIPLOOP, + FUNC_MODE_PCTRACE_CYCLECNT_RECORDLOOP, + FUNC_MODE_PCTRACE_CYCLECNT_SKIPLOOP, + FUNC_MODE_BUTT +} rtFuncModeType_t; + +/** + * @ingroup rt_kernel + * @brief kernel info + */ +typedef struct rtKernelInfo { + uint64_t task_offset; // kernel offset in module + /* flowtable */ + void *arg; // launch kernel arg + uint32_t arg_size; + /* module */ + void *module_addr; // module::baseaddr_ + uint32_t module_size; +} * rtKernelInfo_t; + +/** + * @ingroup rt_KernelConfigDump + * @brief device dump type + */ +typedef enum tagRtDumpKind { + RT_DATA_DUMP_KIND_INVALID = -1, + RT_DATA_DUMP_KIND_DUMP = 0, + RT_DATA_DUMP_KIND_RESERVED +} rtDumpKind_t; + +/** + * @ingroup rt_kernel + * @brief report callback + */ +typedef rtError_t (*rtKernelReportCallback)(rtStream_t stream, rtKernelInfo_t kernelInfo); + +/** + * @ingroup rt_kernel + * @brief magic number of plain binary for aicore + */ +#define RT_DEV_BINARY_MAGIC_PLAIN 0xabceed50 + +/** + * @ingroup rt_kernel + * @brief magic number of plain binary for aicpu + */ +#define RT_DEV_BINARY_MAGIC_PLAIN_AICPU 0xabceed51 + +/** + * @ingroup rt_kernel + * @brief magic number of plain binary for aivector + */ +#define RT_DEV_BINARY_MAGIC_PLAIN_AIVEC 0xabceed52 + +/** + * @ingroup rt_kernel + * @brief magic number of elf binary for aicore + */ +#define RT_DEV_BINARY_MAGIC_ELF 0x43554245 + +/** + * @ingroup rt_kernel + * @brief magic number of elf binary for aicpu + */ +#define RT_DEV_BINARY_MAGIC_ELF_AICPU 0x41415243 + +/** + * @ingroup rt_kernel + * @brief magic number of elf binary for aivector + */ +#define RT_DEV_BINARY_MAGIC_ELF_AIVEC 0x41415246 + +/** + * @ingroup rt_kernel_flags + * @brief kernel op bit flags + */ +#define RT_KERNEL_DEFAULT (0x00) +#define RT_KERNEL_CONVERT (0x01) +#define RT_KERNEL_DUMPFLAG (0x02) + +/** + * @ingroup rt_kernel + * @brief register device binary + * @param [in] bin device binary description + * @param [out] handle device binary handle + * @return RT_ERROR_NONE for ok + * @note:if this interface is changed, pls notify the compiler changing at the same time. + */ +RTS_API rtError_t rtDevBinaryRegister(const rtDevBinary_t *bin, void **handle); + +/** + * @ingroup rt_kernel + * @brief register fast memeory device binary + * @param [in] handle device binary handle + * @return RT_ERROR_NONE for ok + * @note:if this interface is changed, pls notify the compiler changing at the same time. + */ +RTS_API rtError_t rtBinaryRegisterToFastMemory(void *handle); + +/** + * @ingroup rt_kernel + * @brief unregister device binary + * @param [in] handle device binary handle + * @return RT_ERROR_NONE for ok + * @note:if this interface is changed, pls notify the compiler changing at the same time. + */ +RTS_API rtError_t rtDevBinaryUnRegister(void *handle); + +/** + * @ingroup rt_kernel + * @brief register device binary metadata + * @param [in] handle device binary description + * @param [in] metadata device binary metadata + * @return RT_ERROR_NONE for ok + * @note:if this interface is changed, pls notify the compiler changing at the same time. + */ +RTS_API rtError_t rtMetadataRegister(void *handle, const char *metadata); + +/** + * @ingroup rt_kernel + * @brief register device binary dependency + * @param [in] mHandle master device binary description + * @param [in] sHandle slave device binary description + * @return RT_ERROR_NONE for ok + * @note:if this interface is changed, pls notify the compiler changing at the same time. + */ +RTS_API rtError_t rtDependencyRegister(void *mHandle, void *sHandle); + +/** + * @ingroup rt_kernel + * @brief register device function + * @param [in] binHandle device binary handle + * @param [in] stubFunc stub function + * @param [in] stubName stub function name + * @param [in] devFunc device function description. symbol name or address + * offset, depending binary type. + * @return RT_ERROR_NONE for ok + * @note:if this interface is changed, pls notify the compiler changing at the same time. + */ +RTS_API rtError_t rtFunctionRegister(void *binHandle, const void *stubFunc, const char *stubName, const void *devFunc, + uint32_t funcMode); + +/** + * @ingroup rt_kernel + * @brief find stub function by name + * @param [in] stubName stub function name + * @param [out] stubFunc stub function + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtGetFunctionByName(const char *stubName, void **stubFunc); + +/** + * @ingroup rt_kernel + * @brief query registered or not by stubName + * @param [in] stubName stub function name + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtQueryFunctionRegistered(const char *stubName); + +/** + * @ingroup rt_kernel + * @brief config data dump + * @param [in] dumpSizePerBlock dump size + * @param [in] blockDim block dimentions + * @param [in] dumpBaseAddr dump base address + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtKernelConfigDump(uint32_t kind, uint32_t dumpSizePerBlock, uint32_t blockDim, void **dumpBaseAddr, + rtStream_t stream_); + +/** + * @ingroup rt_kernel + * @brief launch kernel to device + * @param [in] stubFunc stub function + * @param [in] blockDim block dimentions + * @param [in] args argments address for kernel function + * @param [in] argsSize argements size + * @param [in] smDesc shared memory description + * @param [in] stream associated stream + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtKernelLaunch(const void *stubFunc, uint32_t blockDim, void *args, uint32_t argsSize, + rtSmDesc_t *smDesc, rtStream_t stream); + +/** + * @ingroup rt_kernel + * @brief launch kernel to device + * @param [in] stubFunc stub function + * @param [in] blockDim block dimentions + * @param [in] args argments address for kernel function + * @param [in] argsSize argements size + * @param [in] smDesc shared memory description + * @param [in] stream associated stream + * @param [in] flag dump flag + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtKernelLaunchWithFlag(const void *stubFunc, uint32_t blockDim, void *args, uint32_t argsSize, + rtSmDesc_t *smDesc, rtStream_t stream, uint32_t flags); + +/** + * @ingroup rt_kernel + * @brief launch kernel to device + * @param [in] args argments address for kernel function + * @param [in] argsSize argements size + * @param [in] flags launch flags + * @param [in] stream associated stream + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtKernelLaunchEx(void *args, uint32_t argsSize, uint32_t flags, rtStream_t stream); + + +/** + * @ingroup rt_kernel + * @brief launch cpu kernel to device + * @param [in] soName so name + * @param [in] kernelName kernel name + * @param [in] blockDim block dimentions + * @param [in] args argments address for kernel function + * @param [in] argsSize argments size + * @param [in] smDesc shared memory description + * @param [in] stream associated stream + * @retval RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtCpuKernelLaunch(const void *soName, const void *kernelName, uint32_t blockDim, const void *args, + uint32_t argsSize, rtSmDesc_t *smDesc, rtStream_t stream); + +/** + * @ingroup rt_kernel + * @brief launch cpu kernel to device with dump identifier + * @param [in] soName so name + * @param [in] kernelName kernel name + * @param [in] blockDim block dimentions + * @param [in] args argments address for kernel function + * @param [in] argsSize argments size + * @param [in] smDesc shared memory description + * @param [in] stream associated stream + * @param [in] flag dump flag or others function flag + * @retval RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtCpuKernelLaunchWithFlag(const void *soName, const void *kernelName, uint32_t blockDim, + const void *args, uint32_t argsSize, rtSmDesc_t *smDesc, + rtStream_t stream, uint32_t flags); + +/** + * @ingroup rt_kernel + * @brief load dump info to aicpu + * @param [in] dumpInfo dump info + * @param [in] length length of dump info + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtDatadumpInfoLoad(const void *dumpInfo, uint32_t length); + +#ifndef __CLANG_CCE_RUNTIME_H__ +#define __CLANG_CCE_RUNTIME_H__ +/** + * @ingroup rt_kernel + * @brief configure call argment for next rtLaunch in current thread + * @param [in] numBlocks block dimentions + * @param [in] smDesc shared memory description + * @param [in] stream associated stream + * @return RT_ERROR_NONE for ok + * @note:if this interface is changed, pls notify the compiler changing at the same time. + */ +#ifdef __cplusplus +RTS_API rtError_t rtConfigureCall(uint32_t numBlocks, rtSmDesc_t *smDesc = nullptr, rtStream_t stream = nullptr); +#else +RTS_API rtError_t rtConfigureCall(uint32_t numBlocks, rtSmDesc_t *smDesc, rtStream_t stream); +#endif +#endif // __CLANG_CCE_RUNTIME_H__ + +/** + * @ingroup rt_kernel + * @brief setup argment for next rtLaunch in current thread + * @param [in] arg argment address for kernel function + * @param [in] size argment size + * @param [in] offset argment table offset + * @return RT_ERROR_NONE for ok + * @note:if this interface is changed, pls notify the compiler changing at the same time. + */ +RTS_API rtError_t rtSetupArgument(const void *arg, uint32_t size, uint32_t offset); + +/** + * @ingroup rt_kernel + * @brief launch kernel to device with previous setting kernel argment + * and call argment + * @param [in] stubFunc stub function + * @return RT_ERROR_NONE for ok + * @note:if this interface is changed, pls notify the compiler changing at the same time. + */ +RTS_API rtError_t rtLaunch(const void *stubFunc); + +/** + * @ingroup rt_kernel + * @brief implicitly transfered data to device. + * lifecycle end after next kernel task finish + * @param [in] ptr host memory + * @param [in] size host memory size + * @param [in] flag reserved. set to 0 + * @param [out] arg returned arg. used for next kernel's arg. + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtKernelConfigTransArg(const void *ptr, uint64_t size, uint32_t flag, void **arg); + +/** + * @ingroup rt_kernel + * @brief start fusion kernels. + * @param [in] stream stream for fusion kernels + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtKernelFusionStart(rtStream_t stream); + +/** + * @ingroup rt_kernel + * @brief end fusion kernels. + * @param [in] stream stream for fusion kernels + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtKernelFusionEnd(rtStream_t stream); + +/** + * @ingroup rt_kernel + * @brief set kernelinfo callback + * @param [in] callback + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtSetKernelReportCallback(rtKernelReportCallback callBack); + +#ifdef __cplusplus +} +#endif + +#endif // __CCE_RUNTIME_KERNEL_H__ diff --git a/third_party/fwkacllib/inc/runtime/mem.h b/third_party/fwkacllib/inc/runtime/mem.h new file mode 100644 index 00000000..c1ec7776 --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/mem.h @@ -0,0 +1,449 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_MEM_H__ +#define __CCE_RUNTIME_MEM_H__ + +#include +#include "base.h" +#include "config.h" +#include "stream.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * @ingroup dvrt_mem + * @brief memory type + */ +#define RT_MEMORY_DEFAULT ((uint32_t)0x0) // default memory on device +#define RT_MEMORY_HBM ((uint32_t)0x2) // HBM memory on device +#define RT_MEMORY_DDR ((uint32_t)0x4) // DDR memory on device +#define RT_MEMORY_SPM ((uint32_t)0x8) // shared physical memory on device +#define RT_MEMORY_P2P_HBM ((uint32_t)0x10) // HBM memory on other 4P device +#define RT_MEMORY_P2P_DDR ((uint32_t)0x11) // DDR memory on other device> +#define RT_MEMORY_DDR_NC ((uint32_t)0x20) // DDR memory of non-cache> +#define RT_MEMORY_RESERVED ((uint32_t)0x40) + +/** + * @ingroup dvrt_mem + * @brief memory Policy + */ +#define RT_MEMORY_POLICY_NONE ((uint32_t)0x0) // Malloc mem prior hage page, then default page +#define RT_MEMORY_POLICY_HUGE_PAGE_FIRST ((uint32_t)0x1 << 10) // Malloc mem prior hage page, then default page +#define RT_MEMORY_POLICY_HUGE_PAGE_ONLY ((uint32_t)0x1 << 11) // Malloc mem only use hage page +#define RT_MEMORY_POLICY_DEFAULT_PAGE_ONLY ((uint32_t)0x1 << 12) // Malloc mem only use default page + +#define MEM_ALLOC_TYPE_BIT ((uint32_t)0x3FF) // mem type bit in <0, 9> + +/** + * @ingroup dvrt_mem + * @brief memory type | memory Policy + */ +typedef uint32_t rtMemType_t; + +/** + * @ingroup dvrt_mem + * @brief memory advise type + */ +#define RT_MEMORY_ADVISE_EXE (0x02) +#define RT_MEMORY_ADVISE_THP (0x04) +#define RT_MEMORY_ADVISE_PLE (0x08) +#define RT_MEMORY_ADVISE_PIN (0x16) + +/** + * @ingroup dvrt_mem + * @brief memory copy type + */ +typedef enum tagRtMemcpyKind { + RT_MEMCPY_HOST_TO_HOST = 0, // host to host + RT_MEMCPY_HOST_TO_DEVICE, // host to device + RT_MEMCPY_DEVICE_TO_HOST, // device to host + RT_MEMCPY_DEVICE_TO_DEVICE, // device to device, 1P && P2P + RT_MEMCPY_MANAGED, // managed memory + RT_MEMCPY_RESERVED, +} rtMemcpyKind_t; + +typedef enum tagRtRecudeKind { + RT_MEMCPY_SDMA_AUTOMATIC_ADD = 10, // D2D, SDMA inline reduce, include 1P, and P2P + RT_RECUDE_KIND_END +} rtRecudeKind_t; + +typedef enum tagRtDataType { + RT_DATA_TYPE_FP32 = 0, // fp32 + RT_DATA_TYPE_END +} rtDataType_t; + +/** + * @ingroup dvrt_mem + * @brief memory copy channel type + */ +typedef enum tagRtMemcpyChannelType { + RT_MEMCPY_CHANNEL_TYPE_INNER = 0, // 1P + RT_MEMCPY_CHANNEL_TYPE_PCIe, + RT_MEMCPY_CHANNEL_TYPE_HCCs, // not support now + RT_MEMCPY_CHANNEL_TYPE_RESERVED, +} rtMemcpyChannelType_t; + +/** + * @ingroup rt_kernel + * @brief ai core memory size + */ +typedef struct rtAiCoreMemorySize { + uint32_t l0ASize; + uint32_t l0BSize; + uint32_t l0CSize; + uint32_t l1Size; + uint32_t ubSize; + uint32_t l2Size; + uint32_t l2PageNum; + uint32_t blockSize; + uint64_t bankSize; + uint64_t bankNum; + uint64_t burstInOneBlock; + uint64_t bankGroupNum; +} rtAiCoreMemorySize_t; + +/** + * @ingroup dvrt_mem + * @brief memory type + */ +typedef enum tagRtMemoryType { RT_MEMORY_TYPE_HOST = 1, RT_MEMORY_TYPE_DEVICE = 2 } rtMemoryType_t; + +/** + * @ingroup dvrt_mem + * @brief memory attribute + */ +typedef struct tagRtPointerAttributes { + rtMemoryType_t memoryType; // host memory or device memory + uint32_t deviceID; // device ID + uint32_t isManaged; + uint32_t pageSize; +} rtPointerAttributes_t; + +/** + * @ingroup dvrt_mem + * @brief alloc device memory + * @param [in|out] devPtr memory pointer + * @param [in] size memory size + * @param [in] type memory type + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_MEMORY_ALLOCATION for memory allocation failed + */ +RTS_API rtError_t rtMalloc(void **devPtr, uint64_t size, rtMemType_t type); + +/** + * @ingroup dvrt_mem + * @brief free device memory + * @param [in|out] devPtr memory pointer + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_DEVICE_POINTER for error device memory pointer + */ +RTS_API rtError_t rtFree(void *devPtr); + +/** + * @ingroup dvrt_mem + * @brief alloc device memory for dvpp + * @param [in|out] devPtr memory pointer + * @param [in] size memory size + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_MEMORY_ALLOCATION for memory allocation failed + */ +RTS_API rtError_t rtDvppMalloc(void **devPtr, uint64_t size); + +/** + * @ingroup dvrt_mem + * @brief free device memory for dvpp + * @param [in|out] devPtr memory pointer + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_DEVICE_POINTER for error device memory pointer + */ +RTS_API rtError_t rtDvppFree(void *devPtr); + +/** + * @ingroup dvrt_mem + * @brief alloc host memory + * @param [in|out] hostPtr memory pointer + * @param [in] size memory size + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_MEMORY_ALLOCATION for memory allocation failed + */ +RTS_API rtError_t rtMallocHost(void **hostPtr, uint64_t size); + +/** + * @ingroup dvrt_mem + * @brief free host memory + * @param [in] hostPtr memory pointer + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_DEVICE_POINTER for error device memory pointer + */ +RTS_API rtError_t rtFreeHost(void *hostPtr); + +/** + * @ingroup dvrt_mem + * @brief alloc managed memory + * @param [in|out] ptr memory pointer + * @param [in] size memory size + * @param [in] flag reserved, set to 0. + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_MEMORY_ALLOCATION for memory allocation failed + */ +RTS_API rtError_t rtMemAllocManaged(void **ptr, uint64_t size, uint32_t flag); + +/** + * @ingroup dvrt_mem + * @brief free managed memory + * @param [in] ptr memory pointer + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_DEVICE_POINTER for error device memory pointer + */ +RTS_API rtError_t rtMemFreeManaged(void *ptr); + +/** + * @ingroup dvrt_mem + * @brief advise memory + * @param [in] ptr memory pointer + * @param [in] size memory size + * @param [in] advise memory advise + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_DEVICE_POINTER for error device memory pointer + */ +RTS_API rtError_t rtMemAdvise(void *ptr, uint64_t size, uint32_t advise); + +/** + * @ingroup dvrt_mem + * @brief flush device mempory + * @param [in] base virtal base address + * @param [in] len memory size + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtFlushCache(uint64_t base, uint32_t len); + +/** + * @ingroup dvrt_mem + * @brief invalid device mempory + * @param [in] base virtal base address + * @param [in] len memory size + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtInvalidCache(uint64_t base, uint32_t len); + +/** + * @ingroup dvrt_mem + * @brief synchronized memcpy + * @param [in] dst destination address pointer + * @param [in] Max length of destination address memory + * @param [in] src source address pointer + * @param [in] count the number of byte to copy + * @param [in] kind memcpy type + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of count + * @return RT_ERROR_INVALID_DEVICE_POINTER for error input memory pointer of dst,src + * @return RT_ERROR_INVALID_MEMCPY_DIRECTION for error copy direction of kind + */ +RTS_API rtError_t rtMemcpy(void *dst, uint64_t destMax, const void *src, uint64_t count, rtMemcpyKind_t kind); + +/** + * @ingroup dvrt_mem + * @brief asynchronized memcpy + * @param [in] dst destination address pointer + * @param [in] Max length of destination address memory + * @param [in] src source address pointer + * @param [in] count the number of byte to copy + * @param [in] kind memcpy type + * @param [in] stream asynchronized task stream + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of count,stream + * @return RT_ERROR_INVALID_DEVICE_POINTER for error input memory pointer of dst,src + * @return RT_ERROR_INVALID_MEMCPY_DIRECTION for error copy direction of kind + */ +RTS_API rtError_t rtMemcpyAsync(void *dst, uint64_t destMax, const void *src, uint64_t count, rtMemcpyKind_t kind, + rtStream_t stream); + +/** + * @ingroup dvrt_mem + * @brief asynchronized reduce memcpy + * @param [in] dst destination address pointer + * @param [in] Max length of destination address memory + * @param [in] src source address pointer + * @param [in] count the number of byte to copy + * @param [in] kind memcpy type + * @param [in] type data type + * @param [in] stream asynchronized task stream + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of count,stream + * @return RT_ERROR_INVALID_DEVICE_POINTER for error input memory pointer of dst,src + * @return RT_ERROR_INVALID_MEMCPY_DIRECTION for error copy direction of kind + */ +RTS_API rtError_t rtReduceAsync(void *dst, uint64_t destMax, const void *src, uint64_t count, rtRecudeKind_t kind, + rtDataType_t type, rtStream_t stream); + +/** + * @ingroup dvrt_mem + * @brief query memory size + * @param [in] aiCoreMemorySize + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtAiCoreMemorySizes(rtAiCoreMemorySize_t *aiCoreMemorySize); + +/** + * @ingroup dvrt_mem + * @brief set memory size, Setting before model reasoning, Bright screen to prevent model can not be fully + integrated network due to memory limitations.Requirement come from JiaMinHu.Only use for Tiny. + * @param [in] aiCoreMemorySize + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtSetAiCoreMemorySizes(rtAiCoreMemorySize_t *aiCoreMemorySize); + +/** + * @ingroup dvrt_mem + * @brief set memory with uint32_t value + * @param [in] devPtr + * @param [in] Max length of destination address memory + * @param [in] value + * @param [in] count byte num + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtMemset(void *devPtr, uint64_t destMax, uint32_t value, uint64_t count); + +/** + * @ingroup dvrt_mem + * @brief set memory with uint32_t value async + * @param [in] devPtr + * @param [in] Max length of destination address memory + * @param [in] value + * @param [in] count byte num + * @param [in] stream + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtMemsetAsync(void *ptr, uint64_t destMax, uint32_t value, uint64_t count, rtStream_t stream); + +/** + * @ingroup dvrt_mem + * @brief get current device memory total and free + * @param [out] free + * @param [out] total + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtMemGetInfo(size_t *free, size_t *total); + +/** + * @ingroup dvrt_mem + * @brief set memory with uint32_t value + * @param [in] devPtr + * @param [in] len + * @param [in] device + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtMemPrefetchToDevice(void *devPtr, uint64_t len, int32_t device); + +/** + * @ingroup dvrt_mem + * @brief get memory attribute:Host or Device + * @param [in] ptr + * @param [out] attributes + * @return RT_ERROR_NONE for ok, errno for failed + */ +RTS_API rtError_t rtPointerGetAttributes(rtPointerAttributes_t *attributes, const void *ptr); + +/** + * @ingroup dvrt_mem + * @brief make memory shared interprocess and assigned a name + * @param [in] ptr device memory address pointer + * @param [in] name identification name + * @param [in] byteCount identification byteCount + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of ptr, name, byteCount + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtIpcSetMemoryName(const void *ptr, uint64_t byteCount, char *name, uint32_t len); + +/** + * @ingroup dvrt_mem + * @brief destroy a interprocess shared memory + * @param [in] name identification name + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of name + * @return RT_ERROR_DRV_ERR for driver error + */ +rtError_t rtIpcDestroyMemoryName(const char *name); + +/** + * @ingroup dvrt_mem + * @brief open a interprocess shared memory + * @param [in|out] ptr device memory address pointer + * @param [in] name identification name + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of ptr, name + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtIpcOpenMemory(void **ptr, const char *name); + +/** + * @ingroup dvrt_mem + * @brief close a interprocess shared memory + * @param [in] ptr device memory address pointer + * @param [in] name identification name + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of ptr, name + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtIpcCloseMemory(const void *ptr); + +/** + * @ingroup dvrt_mem + * @brief HCCL Async memory cpy + * @param [in] index sq index + * @param [in] wqe_index moudle index + * @param [in] stream asynchronized task stream + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input of ptr, name + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtRDMASend(uint32_t index, uint32_t wqe_index, rtStream_t stream); + +/** + * @ingroup dvrt_mem + * @brief Set the memory readCount value + * @param [in] devPtr memory pointer + * @param [in] size memory size + * @param [in] readCount readCount value + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for invalid resource handle + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtMemSetRC(const void *devPtr, uint64_t size, uint32_t readCount); + +/** + * @ingroup dvrt_mem + * @brief Ipc set mem pid + * @param [in] name name to be queried + * @param [in] pid process id + * @param [in] num length of pid[] + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for invalid resource handle + * @return RT_ERROR_DRV_ERR for driver error + */ +RTS_API rtError_t rtSetIpcMemPid(const char *name, int32_t pid[], int num); + +#ifdef __cplusplus +} +#endif + +#endif // __CCE_RUNTIME_MEM_H__ diff --git a/third_party/fwkacllib/inc/runtime/rt.h b/third_party/fwkacllib/inc/runtime/rt.h new file mode 100644 index 00000000..c1872941 --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/rt.h @@ -0,0 +1,31 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_RT_H__ +#define __CCE_RUNTIME_RT_H__ + +#include "base.h" +#include "config.h" +#include "context.h" +#include "dev.h" +#include "dvfsprofile.h" +#include "event.h" +#include "kernel.h" +#include "mem.h" +#include "rt_model.h" +#include "stream.h" + +#endif // __CCE_RUNTIME_RT_H__ \ No newline at end of file diff --git a/third_party/fwkacllib/inc/runtime/rt_model.h b/third_party/fwkacllib/inc/runtime/rt_model.h new file mode 100644 index 00000000..4e875107 --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/rt_model.h @@ -0,0 +1,333 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_MODEL_H__ +#define __CCE_RUNTIME_MODEL_H__ + +#include "base.h" + +#ifdef __cplusplus +extern "C" { +#endif + +typedef enum tagModelTaskType { + RT_MODEL_TASK_KERNEL = 0, + RT_MODEL_TASK_EVENT_RECORD, + RT_MODEL_TASK_EVENT_WAIT, + RT_MODEL_TASK_FUSION_START, + RT_MODEL_TASK_FUSION_END, + RT_MODEL_TASK_KERNEL_EX, + RT_MODEL_TASK_HCCL, + RT_MODEL_TASK_STREAM_SWITCH, + RT_MODEL_TASK_STREAM_ACTIVE, + RT_MODEL_TASK_LABEL_SET, + RT_MODEL_TASK_LABEL_SWITCH, + RT_MODEL_TASK_LABEL_GOTO, + RT_MODEL_TASK_PROFILER_TRACE, + RT_MODEL_TASK_MEMCPY_ASYNC, + RT_MODEL_TASK_NOTIFY_RECORD, + RT_MODEL_TASK_NOTIFY_WAIT, + RT_MODEL_TASK_REDUCE_ASYNC, + RT_MODEL_TASK_RDMA_SEND, + RT_MODEL_TASK_EVENT_RESET = 18, + RT_MODEL_TASK_MODEL_END_GRAPH, + RT_MODEL_TASK_STREAM_SWITCH_N +} rtModelTaskType_t; + +typedef enum tagModelStreamType { + RT_MODEL_HEAD_STREAM = 0, + RT_MODEL_WAIT_ACTIVE_STREAM = 1 +} rtModelStreamType_t; + +#define EXECUTOR_NONE ((uint32_t)0x0) +#define EXECUTOR_TS ((uint32_t)0x01) +#define EXECUTOR_AICPU ((uint32_t)0x02) + +/** + * @ingroup + * @brief the type defination of aicpu model task command + */ +typedef enum tagTsAicpuModelCmd { + TS_AICPU_MODEL_LOAD = 1, + TS_AICPU_MODEL_EXECUTE, + TS_AICPU_MODEL_DESTROY, + TS_AICPU_MODEL_ABORT, + TS_AICPU_MODEL_RESERVED, +} tsAicpuModelCmd; + +typedef struct tagAicpuTaskInfo { + uint32_t taskID; + uint32_t streamID; + uint32_t kernelType; + uint64_t kernelName; + uint64_t kernelSo; + uint64_t paraBase; + uint32_t taskFlag; +} rtAicpuTaskInfo_t; + +typedef struct tagModelStreamInfo { + uint32_t streamID; + uint32_t streamFlag; +} rtModelStreamInfo_t; + +typedef struct tagAicpuModelInfo { + uint32_t moduleID; + uint32_t tsId; + uint16_t streamInfoNum; + uint16_t aicpuTaskNum; + uint64_t streamInfoPtr; + uint64_t aicpuTaskPtr; +} rtAicpuModelInfo_t; + +typedef struct tagKernelTaskInfo { + uint16_t blockDim; + uint16_t argsCount; + uint16_t argsSize; + uint16_t reserved; + char *stubFunc; + uint8_t *smDesc; + uint8_t *args; + uint16_t *argsOffset; +} rtKernelTaskInfo_t; + +typedef struct tagKernelTaskInfoEx { + uint32_t flags; + uint32_t argsSize; + void *args; + uint32_t reserved[6]; +} rtKernelTaskInfoEx_t; + +typedef struct tagEventTaskInfo { + uint32_t eventID; + uint32_t reserved[9]; +} rtEventTaskInfo_t; + +typedef struct tagStreamSwitchTaskInfo { + int64_t value; + uint64_t pValuePtr; + uint32_t trueStreamID; + uint32_t dataType; + uint32_t reserved[4]; +} rtStreamSwitchTaskInfo_t; + +typedef struct tagStreamSwitchNTaskInfo { + uint64_t pValuePtr; + uint64_t pTrueStreamPtr; + uint32_t size; + uint32_t elementSize; + uint32_t dataType; + uint32_t reserved[3]; +} rtStreamSwitchNTaskInfo_t; + +typedef struct tagStreamActiveTaskInfo { + uint32_t activeStreamID; + uint32_t reserved[9]; +} rtStreamActiveTaskInfo_t; + +typedef struct tagSetTaskInfo { + uint16_t labelId; + uint32_t reserved[9]; +} rtLabelSetTaskInfo_t; + +typedef struct tagSwitchTaskInfo { + uint32_t value; + uint32_t reserved[9]; +} rtLabelSwitchTaskInfo_t; + +typedef struct tagLabelGotoTaskInfo { + uint16_t labelId; + uint32_t reserved[9]; +} rtLabelGotoTaskInfo_t; + +typedef struct tagProfilerTraceTaskInfo { + uint64_t profilerTraceId; + uint32_t notify : 8; + uint32_t reserved_ : 24; + uint32_t flags; + uint32_t reserved[6]; +} rtProfilerTrace_t; + +typedef struct tagrtMemcpyAsyncTaskInfo { + void *dst; + uint64_t destMax; + void *src; + uint64_t count; + uint32_t kind; + uint32_t reserved; +} rtMemcpyAsyncTaskInfo_t; + +typedef struct tagrtNotifyTaskInfo { + uint32_t notifyID; + uint32_t reserved[9]; +} rtNotifyTaskInfo_t; + +typedef struct tagrtReduceAsyncTaskInfo { + void *dst; + uint64_t destMax; + void *src; + uint64_t count; + uint32_t kind; + uint32_t type; +} rtReduceAsyncTaskInfo_t; + +typedef struct tagrtRdmaSendTaskInfo { + uint32_t index; + uint32_t wqe_index; + uint32_t reserved[8]; +} rtRdmaSendTaskInfo_t; + +typedef struct tagrtModelEndGraphTaskInfo { + uint32_t modelId; + uint32_t executorFlag; + uint32_t reserved[8]; +} rtModelEndGraphTaskInfo_t; + +typedef struct tagTaskInfo { + uint32_t type; + uint32_t streamID; + union { + rtKernelTaskInfoEx_t kernelTaskEx; + rtKernelTaskInfo_t kernelTask; + rtEventTaskInfo_t eventTask; + rtStreamSwitchTaskInfo_t streamSwitchTask; + rtStreamActiveTaskInfo_t streamActiveTask; + rtLabelSetTaskInfo_t labelSetTask; + rtLabelSwitchTaskInfo_t labelSwitchTask; + rtLabelGotoTaskInfo_t labelGotoTask; + rtProfilerTrace_t profilertraceTask; + rtMemcpyAsyncTaskInfo_t memcpyAsyncTask; + rtNotifyTaskInfo_t notifyTask; + rtReduceAsyncTaskInfo_t reduceAsyncTask; + rtRdmaSendTaskInfo_t rdmaSendTask; + rtModelEndGraphTaskInfo_t modelEndGraphTask; + rtStreamSwitchNTaskInfo_t streamSwitchNTask; + uint32_t reserved[10]; + } u; +} rtTaskInfo_t; + +typedef void *rtModel_t; +typedef rtError_t (*rtTaskGenCallback)(rtModel_t model, rtTaskInfo_t *taskInfo); + +/** + * @ingroup rt_model + * @brief set callback for generate model + * @param [in] callBack callback function + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtSetTaskGenCallback(rtTaskGenCallback callback); + +/** + * @ingroup rt_model + * @brief create model instance + * @param [out] model created model + * @param [in] flag reserved + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtModelCreate(rtModel_t *model, uint32_t flag); + +/** + * @ingroup rt_model + * @brief destroy model instance + * @param [in] model model to destroy + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtModelDestroy(rtModel_t model); + +/** + * @ingroup rt_model + * @brief bind model and stream instance + * @param [in] model binded model + * @param [in] stream binded stream + * @param [in] flag reserved + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtModelBindStream(rtModel_t model, rtStream_t stream, uint32_t flag); + +/** + * @ingroup rt_model + * @brief unbind model and stream instance + * @param [in] model unbinded model + * @param [in] stream unbinded stream + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtModelUnbindStream(rtModel_t model, rtStream_t stream); + +/** + * @ingroup rt_model + * @brief tell runtime Model has been Loaded + * @param [in] model model to execute + * @return RT_ERROR_NONE for ok + */ +RTS_API rtError_t rtModelLoadComplete(rtModel_t model); + +/** + * @ingroup rt_model + * @brief execute model instance + * @param [in] model model to execute + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input handle + */ +RTS_API rtError_t rtModelExecute(rtModel_t model, rtStream_t stream, uint32_t flag); + +/** + * @ingroup rt_model + * @brief get model the last persist task id + * @param [in] model model to execute + * @param [out] taskid task id of the model + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input handle + */ +RTS_API rtError_t rtModelGetTaskId(rtModel_t model, uint32_t *taskid); + +/** + * @ingroup rt_model + * @brief add a end graph task to stream + * @param [in] model model to execute + * @param [in] end graph stream + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input handle + */ +RTS_API rtError_t rtEndGraph(rtModel_t model, rtStream_t stream); + +/** + * @ingroup rt_model + * @brief add a end graph task to stream + * @param [in] model model to execute + * @param [in] flags EXECUTOR_TS | EXECUTOR_AICPU + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input handle + */ +RTS_API rtError_t rtModelExecutorSet(rtModel_t model, uint8_t flags); + +/** + * @ingroup rt_model + * @brief abort model + * @param [in] model model to abort + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_VALUE for error input handle + */ +RTS_API rtError_t rtModelAbort(rtModel_t model); + +#ifdef __cplusplus +} +#endif + +#endif // __CCE_RUNTIME_MODEL_H__ diff --git a/third_party/fwkacllib/inc/runtime/stream.h b/third_party/fwkacllib/inc/runtime/stream.h new file mode 100644 index 00000000..83bb4b63 --- /dev/null +++ b/third_party/fwkacllib/inc/runtime/stream.h @@ -0,0 +1,197 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __CCE_RUNTIME_STREAM_H__ +#define __CCE_RUNTIME_STREAM_H__ + +#include "base.h" +#include "event.h" + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * @ingroup stream_flags + * @brief stream op bit flags + */ +#define RT_STREAM_DEFAULT (0x00) +#define RT_STREAM_PERSISTENT (0x01) +#define RT_STREAM_FORCE_COPY (0x02) +#define RT_STREAM_HUGE (0x04) +#define RT_STREAM_AICPU (0x08) +#define RT_STREAM_FORBIDDEN_DEFAULT (0x10) +#define RT_STREAM_HEAD (0x20) + +/** + * priority level default value when create a stream + */ +#define RT_STREAM_PRIORITY_DEFAULT (0) + +/** + * @ingroup dvrt_stream + * @brief create stream instance + * @param [in|out] stream created stream + * @param [in] priority stream priority + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input stream handle + * @return RT_ERROR_INVALID_VALUE for error input priority + */ +RTS_API rtError_t rtStreamCreate(rtStream_t *stream, int32_t priority); + +/** + * @ingroup dvrt_stream + * @brief create stream instance + * @param [in|out] stream created stream + * @param [in] priority stream priority + * @param [in] flags stream op flags + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input stream handle + * @return RT_ERROR_INVALID_VALUE for error input priority + */ +RTS_API rtError_t rtStreamCreateWithFlags(rtStream_t *stream, int32_t priority, uint32_t flags); + +/** + * @ingroup dvrt_stream + * @brief destroy stream instance. + * @param [in] stream the stream to destroy + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input stream handle + */ +RTS_API rtError_t rtStreamDestroy(rtStream_t stream); + +/** + * @ingroup dvrt_stream + * @brief wait an recorded event for stream + * @param [in] stream the wait stream + * @param [in] event the event to wait + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input stream or event handle + */ +RTS_API rtError_t rtStreamWaitEvent(rtStream_t stream, rtEvent_t event); + +/** + * @ingroup dvrt_stream + * @brief wait stream to be complete + * @param [in] stream stream to wait + * @return RT_ERROR_NONE for ok + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input stream or event handle + */ +RTS_API rtError_t rtStreamSynchronize(rtStream_t stream); + +/** + * @ingroup dvrt_stream + * @brief queries an asynchronous stream for completion status + * @param [in] stream stream to query + * @return RT_ERROR_NONE for complete + * @return RT_ERROR_NOT_READY for not complete + */ +RTS_API rtError_t rtStreamQuery(rtStream_t stream); + +/** + * @ingroup dvrt_stream + * @brief get stream id from a stream handle + * @param [in] stream stream hadle + * @param [in] streamId stream id + * @return RT_ERROR_NONE for complete + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input stream handle + */ +RTS_API rtError_t rtGetStreamId(rtStream_t stream, int32_t *streamId); + +/** + * @ingroup dvrt_stream + * @brief inquire max stream count and max task count per stream + * @param [in] MaxStrCount Max stream count + * @param [in] MaxTaskCount max task count per stream + * @return RT_ERROR_NONE for complete + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for error input stream handle + */ +RTS_API rtError_t rtGetMaxStreamAndTask(uint32_t *MaxStrCount, uint32_t *MaxTaskCount); + +/** + * @ingroup dvrt_stream + * @brief Name a stream + * @param [in] stream_ stream to be named + * @param [in] name identification name + * @return RT_ERROR_NONE for complete + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for invalid resource handle + */ +RTS_API rtError_t rtNameStream(rtStream_t stream_, const char *name); + +/** + * @ingroup dvrt_stream + * @brief switch to the corresponding stream according to the contents of the ptr + * @param [in] ptr Determine the address where the value of the true and false branches is located + * @param [in] condition switch condition + * @param [in] value switch value + * @param [in] true_stream Stream that needs to be activated when the value is non-zero + * @param [in] stream input stream to init task + * @return RT_ERROR_NONE for complete + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for invalid resource handle + * @return RT_ERROR_INVALID_DEVICE for invalid device handle + * @return ERROR_RECYCLE for switching task init failed or submit failed + */ +RTS_API rtError_t rtStreamSwitch(void *ptr, rtCondition_t condition, int64_t value, rtStream_t true_stream, + rtStream_t stream); + +/** + * @brief execute extensible stream switch task + * @param [in] ptr pointer of value + * @param [in] condition judge condition + * @param [in] value_ptr pointer of target value + * @param [in] true_stream stream to be activated when value is not zero + * @param [in] stream stream id + * @param [in] dataType data type of target value + * @return RT_ERROR_NONE for complete + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for not complete + */ +RTS_API rtError_t rtStreamSwitchEx(void *ptr, rtCondition_t condition, void *value_ptr, rtStream_t true_stream, + rtStream_t stream, rtSwitchDataType_t dataType); + +/** + * @ingroup dvrt_stream + * @brief Active a stream + * @param [in] active_stream stream to be activated + * @param [in] stream input stream to init task + * @return RT_ERROR_NONE for complete + * @return RT_ERROR_INVALID_VALUE for error input + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for invalid resource handle + * @return RT_ERROR_INVALID_DEVICE for invalid device handle + * @return ERROR_RECYCLE for switching task init failed or submit failed + */ +RTS_API rtError_t rtStreamActive(rtStream_t active_stream, rtStream_t stream); + +/** + * @brief execute extensible stream case switch task + * @param [in] ptr pointer of value + * @param [in] size pointer num of value + * @param [in] valuePtr pointer of target value, length = size * elementSize + * @param [in] trueStreamPtr streams to be activated + * @param [in] elementSize size of to be activated true streams + * @param [in] stream input stream to init task + * @param [in] dataType data type of target value + * @return RT_ERROR_NONE for complete + * @return RT_ERROR_INVALID_RESOURCE_HANDLE for not complete + */ +RTS_API rtError_t rtStreamSwitchN(void *ptr, uint32_t size, void *valuePtr, rtStream_t *trueStreamPtr, + uint32_t elementSize, rtStream_t stream, rtSwitchDataType_t dataType); +#ifdef __cplusplus +} +#endif + +#endif // __CCE_RUNTIME_STREAM_H__ diff --git a/third_party/fwkacllib/inc/tdt/data_common.h b/third_party/fwkacllib/inc/tdt/data_common.h new file mode 100644 index 00000000..6ea9362f --- /dev/null +++ b/third_party/fwkacllib/inc/tdt/data_common.h @@ -0,0 +1,75 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HOST_INNER_INC_DATA_COMMON_H_ +#define HOST_INNER_INC_DATA_COMMON_H_ + +namespace tdt { +#ifndef TDT_DATA_TYPE +#define TDT_DATA_TYPE + +/** + * @ingroup Tdt data. + * + * Tdt data type. + */ +enum TdtDataType { + TDT_IMAGE_LABEL = 0, /**< Image label*/ + TDT_T_R, + TDT_DATA_LABEL, /**< Data label*/ + TDT_END_OF_SEQUENCE, /**< End of Sequence*/ + TDT_TENSOR, /**< Tensor*/ + TDT_DATATYPE_MAX /**< Max*/ +}; +#endif + +/** + * @ingroup Tdt data. + * + * Tdt push data between host and device. + */ +struct TdtDataItem { + TdtDataType dataType_; /**< Input data type*/ + uint64_t label_; /**< Input data label*/ + uint64_t dataLen_; /**< Input data type length*/ + std::string tensorShape_; /**< Tensor shape*/ + std::string tensorType_; /**< Tensor type*/ + uint32_t cnt_; /**< Data count*/ + uint32_t currentCnt_; /**< Data current count*/ + uint64_t index_; /**< Data inde*/ + std::string tensorName_; /**< Tensor name*/ + uint64_t md5ValueHead_; /**< Data md5*/ + uint64_t md5ValueTail_; /**< Data md5*/ + std::shared_ptr dataPtr_; /**< Data pointer*/ + std::string headMD5_; /**< MD5 header, 8byte*/ + std::string tailMD5_; /**< MD5 tail, 8byte*/ +}; + +/** + * @ingroup Tdt data. + * + * Tdt push data for queuedataset ort mind-data. + */ +struct DataItem { + TdtDataType dataType_; /**< Input data type*/ + std::string tensorName_; /**< Tensor name*/ + std::string tensorShape_; /**< Tensor shape*/ + std::string tensorType_; /**< Tensor type*/ + uint64_t dataLen_; /**< Input data type length*/ + std::shared_ptr dataPtr_; /**< Data pointer*/ +}; +} // namespace tdt +#endif diff --git a/third_party/fwkacllib/inc/tdt/status.h b/third_party/fwkacllib/inc/tdt/status.h new file mode 100644 index 00000000..1be204e6 --- /dev/null +++ b/third_party/fwkacllib/inc/tdt/status.h @@ -0,0 +1,694 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INC_TDT_STATUS_H_ +#define INC_TDT_STATUS_H_ + +#include + +#ifdef __cplusplus +#include +#include +#include +#include +#else +#include +#endif + +#ifdef __cplusplus +using TDT_StatusT = uint32_t; +#else +typedef uint32_t TDT_StatusT; +#endif + +#ifndef TDT_LIB_EXPORT +#define TDT_LIB_EXPORT __attribute__((visibility("default"))) +#endif +/** + * @ingroup tdt status. + * + * Tdt debug level + */ +enum { + TDT_DEBUG = 0, /**< Debug*/ + TDT_INFO = 1, /**< Info*/ + TDT_WARNING = 2, /**< Warning*/ + TDT_ERROR = 3, /**< Error*/ + TDT_FATAL = 4, /**< Fatal*/ + TDT_EVENT = 5, /**< Event*/ + TDT_OPLOG = 6, /**< Oplog*/ + TDT_TRACE = 7 /**< Trace*/ +}; + +enum { + TDT_OK_CODE = 0, + TDT_DEBUG_INFO_CODE, + TDT_INTERNAL_ERROR_CODE, + TDT_COMMON_WARNING_CODE, + TDT_PREFETCH_STOPED_CODE, + TDT_FILE_SIZE_TOO_LARGE_CODE, + TDT_FILE_INVALID_PATH_CODE, + TDT_MEMORY_EXHAUSTED_CODE, + TDT_INTERGER_REVERSED_CODE, + TDT_FILE_NOT_EXIST_CODE, + TDT_DEFAULT_CONFIG_FILE_NOT_EXIST_CODE, + TDT_INSTANCE_NOT_INITIALED_CODE, + TDT_INITIAL_FAILED_CODE, + TDT_INSTANCE_NOT_FOUND_CODE, + TDT_HDC_CREATE_SESSION_FAILED_CODE, + TDT_HDC_DESTROY_SESSION_FAILED_CODE, + TDT_HDC_SESSION_DO_NOT_EXIST_CODE, + TDT_PID_IS_EXIST_CODE, + TDT_HDC_SRV_INIT_ERROR_CODE, + TDT_HDC_SRV_CREATE_ERROR_CODE, + TDT_HDC_SRV_DESTROY_ERROR_CODE, + TDT_HDC_SRV_ACCEPT_ERROR_CODE, + TDT_HDC_SRV_CLOSED_ERROR_CODE, + TDT_HDC_INTERNAL_ERROR_CODE, + TDT_HDC_INFO_CODE, + TDT_HDC_SEND_ERROR_CODE, + TDT_MESSAGE_PARSE_ERROR_CODE, + TDT_HDC_SEG_SIZE_ERROR_CODE, + TDT_HDC_MESSAGE_NULL_CODE, + TDT_HDC_SEARFUNC_IS_NULL_CODE, + TDT_HDC_SENDMSG_FAILED_CODE, + TDT_HDC_SRV_CLOSE_CHILD_SESSION_ERROR_CODE, + TDT_HDC_SRV_CLOSE_SERVER_SESSION_ERROR_CODE, + TDT_HDC_SRV_HEART_BEAT_TIMEOUT_CODE, // 30 + TDT_HDC_DRV_ERROR_CODE, + TDT_HDC_SERVER_CLIENT_SOCKET_CLOSED_CODE, + TDT_TSD_START_FAIL_CODE, + TDT_TSD_CLEANPROC_FIRST_GETPID_FAILED_CODE, + TDT_TSD_CLEANPROC_KILL_PROCESS_FAILED_CODE, + TDT_TSD_CLEANPROC_SECOND_GETPID_FAILED_CODE, + TDT_TSD_CLEANPROC_FINAL_FAILED_CODE, + TDT_TSD_INIT_STATE_FAILED_CODE, + TDT_TSD_INIT_HDCSERVER_FAILED_CODE, + TDT_TSD_SEND_HEARTBEAT_FAILED_CODE, + TDT_TSD_CLEAN_RESOURCE_FAILED_CODE, + TDT_TSD_SEND_MSG_FAILED_CODE, + TDT_PPC_DRIVER_INIT_FAIL_CODE, + TDT_PPC_SERVER_CLIENT_CREATE_FAIL_CODE, + TDT_PPC_SERVER_CLIENT_DESTORY_FAIL_CODE, + TDT_PPC_SERVER_CLOSE_CODE, + TDT_PPC_GET_SET_MSG_BUFFER_FAIL_CODE, + TDT_PPC_SESSION_CONNECT_FAIL_CODE, // 40 + TDT_PPC_SESSION_NOT_EXISTED_CODE, + TDT_PPC_SEND_RECEIVE_MSG_FAIL_CODE, + TDT_PPC_MSG_FREE_FAIL_CODE, + TDT_PPC_ALLOC_MSG_FAIL_CODE, + TDT_PPC_MSG_LEN_NOT_MATCH_CODE, + TDT_PPC_MSG_BUF_NULL_CODE, + TDT_PPC_CLIENT_INVALID_PARAM_CODE, + TDT_PPC_SERVER_INVALID_PARAM_CODE, + TDT_PPC_CLIENT_RECVDATA_CONTINUE_CODE, + TDT_PPC_SERVER_CLIENT_SOCKET_CLOSED_CODE, // 50 + TDT_PPC_RECV_MSG_ERROR_CODE, + TDT_PPC_SESSION_CLOSE_ERROR_CODE, + TDT_SHUFFLE_SHUFFLE_SIZE_ILLEGAL_CODE, + TDT_SHUFFLE_ONLINE_UNIQUE_SEED_ILLEGAL_CODE, + TDT_SHUFFLE_UNABLE_TO_CREATE_SHUFFLE_LIST_CODE, + TDT_SHUFFLE_ILLEGAL_SHUFFLE_TYPE_CODE, + TDT_PREFETCH_ILLEGAL_DATATYPE_CODE, + TDT_SUPERVISOR_UNKOWN_JOB_STATE_CODE, + TDT_MAP_BUFFER_ERROR_CODE, + TDT_ALLOC_BUFFER_FAILED_CODE, + TDT_FREE_HDC_BUFFER_FAILED_CODE, + TDT_DATA_SIZE_WRONG_CODE, + TDT_MEMORY_POOL_INITED_CODE, + TDT_SENDMSG_FAILED_CODE, + TDT_INVALID_VALUE_CODE, + TDT_NO_USEFUL_MEMORY_CODE, + TDT_MESSAGE_NULL_CODE, + TDT_MEMORY_POOL_STOPPED_CODE, + TDT_HDC_MEMORY_ADDR_NOT_ALIGN_CODE, + TDT_MEMORY_POOL_GET_NULL_CODE, + TDT_MEMORY_POOL_NOT_EXISTED_CODE, + TDT_RECOVER_DATA_FAILED_CODE, + TDT_MEMORY_STATUS_ERROR_CODE, + TDT_MEMORY_POOL_UPDATE_FAILED_CODE, + TDT_MEMORY_POOL_RESIZE_FAILED_CODE, + TDT_MEMORY_DESTROY_FAILED_CODE, + TDT_EXCEED_MAX_THREAD_CODE, + TDT_WARNING_SET_THREAD_NAME_FAILED_CODE, + TDT_WRONG_PRIORITY_CODE, + TDT_JOIN_TASK_ERROR_CODE, + TDT_NULL_FUNC_CODE, + TDT_INIT_FAIL_CODE, + TDT_EXISTED_FUNC_CODE, + TDT_FILE_GET_FILE_STATE_FAIL_CODE, + TDT_FILE_OPEN_FILE_FAIL_CODE, + TDT_FILE_FILE_DESTROYED_CODE, + TDT_FILE_UNABLE_TO_GET_FILE_MEMORY_CODE, + TDT_PREFETCH_UNABLE_TO_GET_TDTDATAITEM_CODE, + TDT_HDCSERVER_DO_NOT_EXIST_CODE, + TDT_HDCSESSIONID_NOT_AVAILABLE_CODE, + TDT_SET_HDCSESSION_REFERENCE_FAILED_CODE, + TDT_HDC_RECV_MSG_ERROR_CODE, + TDT_HDC_SEND_MSG_ERROR_CODE, + TDT_FILE_CONTENT_EMPTY_CODE, + TDT_TDTSEVER_ACCEPT_FAILED_CODE, + TDT_CHANNEL_DO_NOT_EXIST_CODE, + TDT_NULL_POINTER_MSG_CODE, + TDT_TRAN_UNKNOWN_RSP_CODE, + TDT_TRAN_TIMEOUT_CODE, + TDT_TRAN_NOT_EXIST_CODE, + TDT_TRAN_ID_GEN_ERROR_CODE, + TDT_SEND_CHANNEL_FAILED_CODE, + TDT_SEND_CHANNEL_TIMEOUT_CODE, + TDT_QUEUE_STOPPED_CODE, + TDT_QUEUE_POP_FAILED_CODE, + TDT_QUEUE_PUSH_FAILED_CODE, + TDT_QUEUE_NOT_FIND_CODE, + TDT_QUEUE_CREATE_FAILED_CODE, + TDT_QUEUE_FULL_CODE, + TDT_QUEUE_EMPTY_CODE, + TDT_DATA_ENTO_CP_FAILED_CODE, + TDT_STOP_CP_QUEUE_FAILED_CODE, + TDT_RECV_MSG_NO_CHANNEL_INFO_ERROR_CODE, + TDT_CHANNEL_HAS_NO_SESSION_ERROR_CODE, + TDT_PREFETCH_SAMPLE_HAS_NO_LABEL_CODE, + TDT_HDC_CLIENT_INIT_ERROR_CODE, + TDT_HDC_CLIENT_CREATE_SESSION_ERROR_CODE, + TDT_HDC_CLIENT_DO_NOT_EXIST_CODE, + TDT_HDC_CLIENT_DESTROY_ERROR_CODE, + TDT_BIND_CPUCORE_FAILED_CODE, + TDT_HDC_CLIENT_CLOSED_CODE, + TDT_HDC_SRV_CLOSED_CODE, + TDT_HDC_SRV_TYPE_ERROR_CODE, + TDT_TSD_CLT_OPEN_FAILED_CODE, + TDT_TSD_CLT_CLOSE_FAILED_CODE, + TDT_SUPERVISOR_ILLEGAL_HEARTBEAT_TIME_CODE, + TDT_SUPERVISOR_INOTIFY_READ_SIZE_ERROR_CODE, + TDT_SUPERVISOR_INOTIFY_INTERRUPT_CODE, + TDT_SUPERVISOR_INOTIFY_INIT_ERROR_CODE, + TDT_SUPERVISOR_CLOSE_INOTIFYFD_FAIL_CODE, + TDT_SUPERVISOR_INOTIFY_WATCH_ERROR_CODE, + TDT_TRANSFER_CANNOT_OPEN_CONFIGFILE_CODE, + TDT_TRANSFER_PARSE_FILE_FAILED_CODE, + TDT_TRANSFER_NO_CHANNEL_DATA_CODE, + TDT_PREFETCH_CREATE_FAILED_CODE, + TDT_TRANSFER_NO_PARAMETER_CODE, + TDT_TRANSFER_NO_PARAMETER_ARG_CODE, + TDT_FILE_TYPE_UNSUPPORT_CODE, + TDT_FILE_DIR_IS_NULL_CODE, + TDT_FILE_GET_DIR_TREE_ERROR_CODE, + TDT_FILE_CANNOT_OPEN_DIR_CODE, + TDT_PREFETCH_SAMPLE_CANNOT_BE_READ_CODE, + TDT_PREFETCH_DATA_QUEUE_IS_CLOSED_CODE, + TDT_PREFETCH_GET_SHUFFLE_RESULT_FAIL_CODE, + TDT_FILE_CANNOT_DFREE_FILE_MEMORY_CODE, + TDT_TRANSFER_CREATE_DELIVER_FAILED_CODE, + TDT_TRANSFER_TRAIN_DATA_DELIVER_IS_NULLPTR_CODE, + TDT_TRANSFER_EMPTY_GROUPNAME_IN_MULTI_GROUPS_CODE, + TDT_TRANSFER_DUPLICATE_GROUPNAME_CODE, + TDT_TRANSFER_DUPLICATE_DEVICE_CODE, + TDT_TRANSFER_FIND_DEVICE_FAIL_CODE, + TDT_SUPERVISOR_FAIL_TO_WRITE_PID_FILE_CODE, + TDT_SUPERVISOR_HEARTBEAT_FILE_NOT_INITED_CODE, + TDT_SUPERVISOR_JOB_COMMAND_FILE_NOT_INITED_CODE, + TDT_SUPERVISOR_JOB_STATE_FILE_NOT_INITED_CODE, + TDT_PREFETCH_LABEL_FILE_NOT_INITED_CODE, + TDT_PREFETCH_SAMPLE_FILE_DIR_NOT_INITED_CODE, + TDT_PREFETCH_NOT_INITED_CODE, + TDT_PREFETCH_SHUFFLER_NOT_CREATED_CODE, + TDT_SHUFFLE_NOT_INITED_CODE, + TDT_PREFETCH_SHUFFLED_ITEM_OUT_OF_FILE_LIST_CODE, + TDT_TRANSFER_INIT_FAILED_CODE, + TDT_TRANSFER_START_FAILED_CODE, + TDT_FOLDER_CANNOT_BE_CREATED_CODE, + TDT_CANNOT_GET_STAT_OF_FOLDER_CODE, + TDT_FOLDER_IS_FILE_CODE, + TDT_TRANSFER_CONFIG_FIEL_SYNTAX_ERROR_CODE, + TDT_CHECKSUM_ILLEGAL_MD5_PARAM_CODE, + TDT_CHECKSUM_MD5_INIT_FAILED_CODE, + TDT_CHECKSUM_MD5_UPDATE_FAILED_CODE, + TDT_CHECKSUM_MD5_FINAL_FAILED_CODE, + TDT_TRANSFER_DELIVER_IS_NONE_CODE, + TDT_SUPERVISOR_FAIL_TO_DEL_JOB_CMD_FILE_CODE, + TDT_TRANSFER_FAIL_TO_GET_ENV_VARIABLE_CODE, + TDT_MONITOR_INOTIFY_INIT_ERROR_CODE, + TDT_MONITOR_INOTIFY_WATCH_ERROR_CODE, + TDT_MONITOR_CLOSE_INOTIFYFD_FAIL_CODE, + TDT_MONITOR_INOTIFY_READ_SIZE_ERROR_CODE, + TDT_MONITOR_UNSUPPORT_CFGITEM_CODE, + TDT_MONITOR_FAIL_TO_SET_CFGITEM_CODE, + TDT_MONITOR_READ_FILE_FAIL_CODE, + TDT_MONITOR_CONFIG_FILE_FORMAT_ERROR_CODE, + TDT_MONITOR_STRCAT_FAILED_CODE, + TDT_MONITOR_CREATE_CONFIG_FILE_FAIL_CODE, + TDT_PREFETCH_FAIL_TO_GENERATE_MD5_CODE, + TDT_RECV_MSG_MD5_WRONG_CODE, + TDT_RECV_MSG_FAIL_TO_GENERATE_MD5_CODE, + TDT_RECV_MSG_SEQUENCE_ERROR_CODE, + TDT_DEVICEID_ERROR_CODE, + TDT_MEMORY_DATA_TYPE_FACTORY_MAKE_SHARED_FAILED_CODE, + TDT_PREFETCH_FILELIST_NOT_EXIST_CODE, + TDT_PREFETCH_SAMPLE_FILE_NOT_FOUND_CODE, + TDT_PREFETCH_FILE_OPEN_FAIL_CODE, + TDT_PREFETCH_FILE_STAT_FAIL_CODE, + TDT_PREFETCH_FILE_MMAP_FAIL_CODE, + TDT_PREFETCH_FILE_UNMAP_FAIL_CODE, + TDT_PREFETCH_FILE_CLOSE_FAIL_CODE, + TDT_PREFETCH_FILE_PARSE_FAIL_CODE, + TDT_PREFETCH_CRC32_SIZE_FAIL_CODE, + TDT_PREFETCH_CRC32_DATA_FAIL_CODE, + TDT_PREFETCH_DATA_QUEUE_CLOSED_CODE, + TDT_PREFETCH_INITIALIZE_FAILED_CODE, + TDT_PREFETCH_MAP_INSERT_FAILED_CODE, + TDT_PREFETCH_INVALID_FILELIST_LINE_CODE, + TDT_FILE_STRINGSTREAM_TO_VALUE_FAILED_CODE, + TDT_LIST_ID_OFFSET_LENGTH_POSITIVE_INTEGER_FAILED_CODE, + TDT_SHUFFLE_ILLEGAL_SHUFFLE_PARAM_CODE, + TDT_FILE_SHUFFLER_CREATE_FAILED_CODE, + TDT_FILE_UPLOADER_CREATE_FAILED_CODE, + TDT_FILE_DOWNLOADER_CREATE_FAILED_CODE, + TDT_OBS_CONFIG_INFORMATION_FAIL_CODE, + TDT_OBS_CALLBACK_ARGUMENT_FAIL_CODE, + TDT_OBS_DOWNLOAD_CREATE_THREAD_FAILED_CODE, + TDT_OBS_DOWNLOAD_FILE_FAIL_CODE, + TDT_OBS_DOWNLOAD_INIT_FAIL_CODE, + TDT_OBS_DOWNLOAD_METADATA_FAIL_CODE, + TDT_OBS_LIST_BUCKET_OBJECTS_FAIL_CODE, + TDT_MEMORY_MEMCPY_FAILED_CODE, + TDT_MEMORY_MEMSET_FAILED_CODE, + TDT_MKDIR_CMD_FAILED_CODE, + TDT_CP_CMD_FAILED_CODE, + TDT_HOST_INIT_FAILED_CODE, + TDT_HOST_CHANNEL_NAME_EMPTY_CODE, + TDT_HOST_ALLOCATE_MEMORY_FAILED_CODE, + TDT_HOST_MEMORY_COPY_FAILED_CODE, + TDT_HOST_UNABLE_GET_TDTDATAELEM_CODE, + TDT_HOST_PUSH_NOT_INIT_CODE, + TDT_TUNING_DATA_TRANSFER_INIT_FAILED_CODE, + TDT_TUNING_DATA_RECEIVE_CHECK_PARA_ERROR_CODE, + TDT_TUNING_DATA_TRANSFER_PARAMETER_ERROR_CODE, + TDT_RECV_MSG_CHECKSUM_WRONG_ERROR_CODE, + TDT_STATUS_CODE_TOTAL +}; + +/** + * @ingroup Tdt status + * @brief Regiter error code + * @param moduleId [IN] Module ID + * @param logLevel [IN] Log level + * @param CODE_NAME [out] Error name + * @param codeDesc [IN] Error description + */ +#ifdef __cplusplus +#define TDT_DEF_ERROR_CODE(moduleId, logLevel, CODE_NAME, codeDesc) \ + constexpr TDT_StatusT CODE_NAME = ((0xFFFF & ((uint16_t)moduleId)) << 16) | \ + (0xF000 & (((uint16_t)logLevel) << 12)) | (0x0FFF & (CODE_NAME##_CODE)); \ + const tdt::ErrorNoRegisterar g_##CODE_NAME##_errorno(CODE_NAME, codeDesc); +#else +#define TDT_DEF_ERROR_CODE(moduleId, logLevel, CODE_NAME, codeDesc) \ + static const TDT_StatusT CODE_NAME = \ + ((0xFFFF & ((uint16_t)moduleId)) << 16) | (0xF000 & (((uint16_t)logLevel) << 12)) | (0x0FFF & CODE_NAME##_CODE); +#endif + +/** + * @ingroup Tdt status + * @brief Get error level according error name + * @param CODE_NAME [IN] Error code + * @param codeDesc [OUT] Error description + */ +#define TDT_GET_ERROR_LEVEL(CODE_NAME) ((CODE_NAME & 0x0000F000) >> 12) + +#ifdef __cplusplus +#define TDT_GET_ERROR_STR(CODE_NAME) (tdt::StatusFactory::GetInstance()->GetErrDesc(CODE_NAME)) +#endif + +constexpr uint16_t MODID_TDT_CLIENT = 0x0101; +constexpr uint16_t MODID_TSD_SERVER = 0x0102; // TSD_SERVER +constexpr uint16_t MODID_HDC = 0x0103; // HDC_SERVER +constexpr uint16_t MODID_TDT_SHUFFLE = 0x0104; +constexpr uint16_t MODID_TDT_PREFETCH = 0x0105; +constexpr uint16_t MODID_TDT_TRANSFER = 0x0106; +constexpr uint16_t MODID_TDT_SUPERVISOR = 0x0107; +constexpr uint16_t MODID_MEM_POOL = 0x0108; // MEMORY_POOL +constexpr uint16_t MODID_PPC = 0x0109; // TDT PPC +constexpr uint16_t MODID_TDT_FILE = 0x0110; +constexpr uint16_t MODID_HDC_SERVER = 0x0111; +constexpr uint16_t MODID_TDT_SERVER = 0x0112; +constexpr uint16_t MODID_HDC_CLIENT = 0x0113; +constexpr uint16_t MODID_TSD_CLIENT = 0x0114; +constexpr uint16_t MODID_CHECKSUM = 0x0115; +constexpr uint16_t MODID_TDT_MONITOR = 0x0116; +constexpr uint16_t MODID_TDT_HOST = 0x0117; + +constexpr uint32_t TDT_API_MAX_SUB_VERSION = 100; +static const int32_t TDT_INVAILED_DEVICE_ID = 0xFFFFFFFF; + +typedef enum tdt_api_version { + TDT_API_VERSION_V1_00 = 100, + TDT_API_VERSION_V1_01 = 101, + TDT_API_VERSION_V2_00 = 200 +} TDT_API_VERSION; + +#ifdef __cplusplus +namespace tdt { +class StatusFactory { + public: + TDT_LIB_EXPORT static StatusFactory *GetInstance(); + + TDT_LIB_EXPORT void RegisterErrorNo(const uint32_t err, const std::string &desc); + + std::string GetErrDesc(const uint32_t err); + + static std::string GetErrCodeDesc(uint32_t errCode); + + protected: + StatusFactory(); + + ~StatusFactory() {} + + StatusFactory(const StatusFactory &) = delete; + StatusFactory(StatusFactory &&) = delete; + StatusFactory &operator=(const StatusFactory &) = delete; + StatusFactory &operator=(StatusFactory &&) = delete; + + static std::mutex &GetMutex(); + + private: + std::mutex rwMutex_; + std::map errDesc_; +}; + +class ErrorNoRegisterar { + public: + ErrorNoRegisterar(const uint32_t &err, const std::string &desc) { + StatusFactory::GetInstance()->RegisterErrorNo(err, desc); + } + + ~ErrorNoRegisterar() {} + ErrorNoRegisterar(const ErrorNoRegisterar &) = delete; + ErrorNoRegisterar(ErrorNoRegisterar &&) = delete; + ErrorNoRegisterar &operator=(const ErrorNoRegisterar &) = delete; + ErrorNoRegisterar &operator=(ErrorNoRegisterar &&) = delete; +}; +} // namespace tdt +#endif + +// register error code +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_INFO, TDT_OK, "running ok"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_DEBUG, TDT_DEBUG_INFO, "debug info"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_INTERNAL_ERROR, "internal error"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_WARNING, TDT_COMMON_WARNING, "warnging"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_INFO, TDT_PREFETCH_STOPED, "stopped"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_FILE_NOT_EXIST, "File is not existed"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_DEFAULT_CONFIG_FILE_NOT_EXIST, "Default config file not exist"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_FILE_SIZE_TOO_LARGE, "file size is too large"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_FILE_INVALID_PATH, "file path is invalid"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_MEMORY_EXHAUSTED, "memory exhausted error"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_INTERGER_REVERSED, "interger reached reverse"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_INSTANCE_NOT_INITIALED, + "call member function before instance initialed"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_INITIAL_FAILED, "initial failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_INSTANCE_NOT_FOUND, "instance not found"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_HDC_CREATE_SESSION_FAILED, "create hdc session failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_HDC_DESTROY_SESSION_FAILED, "destory hdc session failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_HDC_SESSION_DO_NOT_EXIST, "hdc session id do not exist"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_PID_IS_EXIST, "tdtMain pid is exist"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SRV_INIT_ERROR, "hdc server init error"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SRV_CREATE_ERROR, "hdc server create error"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SRV_DESTROY_ERROR, "hdc server destroy error"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SRV_ACCEPT_ERROR, "hdc server accept error"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SRV_CLOSED_ERROR, "hdc server closed error"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_INTERNAL_ERROR, "hdc fail"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_DEVICEID_ERROR, "hdc device id error"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SRV_CLOSE_CHILD_SESSION_ERROR, "hdc server close child session error"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SEARFUNC_IS_NULL, "serarfunc is null"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SENDMSG_FAILED, "hdc send msg failed"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SRV_CLOSE_SERVER_SESSION_ERROR, + "hdc server close server session error"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SRV_HEART_BEAT_TIMEOUT, "hdc server heart beat timeout"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_DRV_ERROR, "hiai drv return error"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_INFO, TDT_HDC_INFO, "hdc info"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SEND_ERROR, "hdc send message failed"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_SEG_SIZE_ERROR, "hiai seg size error"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_HDC_MESSAGE_NULL, "Message input is null"); +TDT_DEF_ERROR_CODE(MODID_HDC, TDT_ERROR, TDT_MESSAGE_PARSE_ERROR, "hdc message parse error"); +TDT_DEF_ERROR_CODE(MODID_HDC_SERVER, TDT_ERROR, TDT_HDCSERVER_DO_NOT_EXIST, "hdc server do not exist"); +TDT_DEF_ERROR_CODE(MODID_HDC_SERVER, TDT_ERROR, TDT_HDCSESSIONID_NOT_AVAILABLE, "hdc sessionid vector is empty"); +TDT_DEF_ERROR_CODE(MODID_HDC_SERVER, TDT_ERROR, TDT_SET_HDCSESSION_REFERENCE_FAILED, + "hdc set hdc session reference failed"); +TDT_DEF_ERROR_CODE(MODID_HDC_SERVER, TDT_ERROR, TDT_HDC_RECV_MSG_ERROR, "hdc recv message failed"); +TDT_DEF_ERROR_CODE(MODID_HDC_SERVER, TDT_ERROR, TDT_HDC_SEND_MSG_ERROR, "hdc send message failed"); +TDT_DEF_ERROR_CODE(MODID_HDC_SERVER, TDT_ERROR, TDT_HDC_SRV_TYPE_ERROR, "hdc service type is not supported"); +TDT_DEF_ERROR_CODE(MODID_HDC_SERVER, TDT_ERROR, TDT_HDC_SERVER_CLIENT_SOCKET_CLOSED, + "hdc service or client socket closed"); + +/*********************TSDAEMON************************/ +TDT_DEF_ERROR_CODE(MODID_TSD_SERVER, TDT_ERROR, TDT_TSD_START_FAIL, "Tsdaemon start fail"); +TDT_DEF_ERROR_CODE(MODID_TSD_SERVER, TDT_ERROR, TDT_TSD_CLEANPROC_FIRST_GETPID_FAILED, "Tsdaemon first get pid fail"); +TDT_DEF_ERROR_CODE(MODID_TSD_SERVER, TDT_ERROR, TDT_TSD_CLEANPROC_KILL_PROCESS_FAILED, "Tsdaemon kill processfail"); +TDT_DEF_ERROR_CODE(MODID_TSD_SERVER, TDT_ERROR, TDT_TSD_CLEANPROC_SECOND_GETPID_FAILED, "Tsdaemon second get pid fail"); +TDT_DEF_ERROR_CODE(MODID_TSD_SERVER, TDT_ERROR, TDT_TSD_CLEANPROC_FINAL_FAILED, "Tsdaemon clean process final fail"); +TDT_DEF_ERROR_CODE(MODID_TSD_SERVER, TDT_ERROR, TDT_TSD_INIT_STATE_FAILED, "Tsdaemon init state fail"); +TDT_DEF_ERROR_CODE(MODID_TSD_SERVER, TDT_ERROR, TDT_TSD_INIT_HDCSERVER_FAILED, "Tsdaemon init hdcserver fail"); +TDT_DEF_ERROR_CODE(MODID_TSD_SERVER, TDT_ERROR, TDT_TSD_SEND_HEARTBEAT_FAILED, "Tsdaemon get pid fail"); +TDT_DEF_ERROR_CODE(MODID_TSD_SERVER, TDT_ERROR, TDT_TSD_CLEAN_RESOURCE_FAILED, "Tsdaemon clean resource fail"); +TDT_DEF_ERROR_CODE(MODID_TSD_SERVER, TDT_ERROR, TDT_TSD_SEND_MSG_FAILED, "Tsdaemon send msg fail"); + +/********************* PPC ****************************/ +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_DRIVER_INIT_FAIL, "Init PPC driver fail"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_SERVER_CLIENT_CREATE_FAIL, "Create PPC server or PPC client fail"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_SERVER_CLIENT_DESTORY_FAIL, "Destory PPC server or PPC client fail"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_SERVER_CLOSE, "PPC server is closed"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_GET_SET_MSG_BUFFER_FAIL, "PPC get or set msg buffer fail"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_SESSION_CONNECT_FAIL, "PPC connect is failed"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_SESSION_NOT_EXISTED, "PPC session is not existed"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_SEND_RECEIVE_MSG_FAIL, "PPC send or receive msg fail"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_MSG_FREE_FAIL, "PPC msg free fail"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_ALLOC_MSG_FAIL, "PPC alloc memory for msg fail"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_MSG_LEN_NOT_MATCH, "PPC message length not match"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_MSG_BUF_NULL, "PPC message buffer is null"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_CLIENT_INVALID_PARAM, "PPC message client invalid param fail"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_SERVER_INVALID_PARAM, "PPC message server invalid param fail"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_CLIENT_RECVDATA_CONTINUE, + "PPC message client receive not expected msg continue"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_SERVER_CLIENT_SOCKET_CLOSED, + "PPC message server receive server or client socket closed msg"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_RECV_MSG_ERROR, "PPC receive msg failed"); +TDT_DEF_ERROR_CODE(MODID_PPC, TDT_ERROR, TDT_PPC_SESSION_CLOSE_ERROR, "PPC close session failed"); + +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_GET_FILE_STATE_FAIL, "can not get file state"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_OPEN_FILE_FAIL, "can not open file"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_CONTENT_EMPTY, "file content is empty"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_FILE_DESTROYED, "file is destroyed"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_UNABLE_TO_GET_FILE_MEMORY, "fail to get memory for file"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_TYPE_UNSUPPORT, "file type is not supported"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_DIR_IS_NULL, "pointer to dir is null"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_GET_DIR_TREE_ERROR, "can not get the tree of dir"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_CANNOT_OPEN_DIR, "dir cannot be opened"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_CANNOT_DFREE_FILE_MEMORY, "DFree memory of file failed"); + +TDT_DEF_ERROR_CODE(MODID_TDT_SHUFFLE, TDT_ERROR, TDT_SHUFFLE_SHUFFLE_SIZE_ILLEGAL, + "shuffle size is less or equal to 0"); +TDT_DEF_ERROR_CODE(MODID_TDT_SHUFFLE, TDT_ERROR, TDT_SHUFFLE_ONLINE_UNIQUE_SEED_ILLEGAL, + "online unique seed is equal to 0"); +TDT_DEF_ERROR_CODE(MODID_TDT_SHUFFLE, TDT_ERROR, TDT_SHUFFLE_UNABLE_TO_CREATE_SHUFFLE_LIST, + "unable to create shuffle list"); +TDT_DEF_ERROR_CODE(MODID_TDT_SHUFFLE, TDT_ERROR, TDT_SHUFFLE_ILLEGAL_SHUFFLE_TYPE, "illegal shuffle type"); +TDT_DEF_ERROR_CODE(MODID_TDT_SHUFFLE, TDT_ERROR, TDT_SHUFFLE_NOT_INITED, "shuffler has not been inited"); + +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_SAMPLE_HAS_NO_LABEL, "the sample has no label"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_SAMPLE_CANNOT_BE_READ, "the sample cannot be read"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_ILLEGAL_DATATYPE, "illegal data type"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_CREATE_FAILED, "creating prefetcher failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_UNABLE_TO_GET_TDTDATAITEM, "fail to get TDTDataItem"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_DATA_QUEUE_IS_CLOSED, "data queue is closed"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_GET_SHUFFLE_RESULT_FAIL, "fail to get shuffle result"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_LABEL_FILE_NOT_INITED, "label file has not been inited"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_SAMPLE_FILE_DIR_NOT_INITED, + "directory of sample files has not been inited"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_NOT_INITED, "prefetcher in deliver has not been inited"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_SHUFFLER_NOT_CREATED, + "shuffler in prefetcher has not been created"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_SHUFFLED_ITEM_OUT_OF_FILE_LIST, + "shuffled item is out of file list"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_FAIL_TO_GENERATE_MD5, "fail to generate md5 of data"); +TDT_DEF_ERROR_CODE(MODID_CHECKSUM, TDT_ERROR, TDT_CHECKSUM_ILLEGAL_MD5_PARAM, "params to generate md5 is illegal"); +TDT_DEF_ERROR_CODE(MODID_CHECKSUM, TDT_ERROR, TDT_CHECKSUM_MD5_INIT_FAILED, "md5_init failed"); +TDT_DEF_ERROR_CODE(MODID_CHECKSUM, TDT_ERROR, TDT_CHECKSUM_MD5_UPDATE_FAILED, "md5_update failed"); +TDT_DEF_ERROR_CODE(MODID_CHECKSUM, TDT_ERROR, TDT_CHECKSUM_MD5_FINAL_FAILED, "md5_final failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_CANNOT_OPEN_CONFIGFILE, "can not open config file"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_PARSE_FILE_FAILED, "parse file failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_NO_CHANNEL_DATA, + "no channel can be found in config file"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_NO_PARAMETER, "no parameter can be found"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_NO_PARAMETER_ARG, + "the argment is not --configfile or stop"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_CREATE_DELIVER_FAILED, + "fail to create train data deliver"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_TRAIN_DATA_DELIVER_IS_NULLPTR, + "train data deliver in the list is nullptr"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_INIT_FAILED, "train data deliver init failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_START_FAILED, "train data deliver start failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_CONFIG_FIEL_SYNTAX_ERROR, + "config file has syntax error"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_DELIVER_IS_NONE, "no deliver is existed"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_MKDIR_CMD_FAILED, "mkdir cmd failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_CP_CMD_FAILED, "cp cmd failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_EMPTY_GROUPNAME_IN_MULTI_GROUPS, "empty group_name"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_DUPLICATE_GROUPNAME, + "the same group_name already exists"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_DUPLICATE_DEVICE, "the same device already exists"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_ERROR, TDT_TRANSFER_FIND_DEVICE_FAIL, "cannot find device"); + +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_WARNING, TDT_SUPERVISOR_INOTIFY_INTERRUPT, "inotify is interrupted"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_UNKOWN_JOB_STATE, "unknow job state"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_ILLEGAL_HEARTBEAT_TIME, "illegal heartbeat time"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_INOTIFY_READ_SIZE_ERROR, + "read size of inotify is error"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_INOTIFY_INIT_ERROR, + "Initialization of inotify failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_CLOSE_INOTIFYFD_FAIL, "Close inotifyFd failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_INOTIFY_WATCH_ERROR, "Add watch of inotify failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_FAIL_TO_WRITE_PID_FILE, "fail to write pid file"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_HEARTBEAT_FILE_NOT_INITED, + "heart beat file has not been inited"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_JOB_COMMAND_FILE_NOT_INITED, + "job command file has not been inited"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_JOB_STATE_FILE_NOT_INITED, + "job state file has not been inited"); +TDT_DEF_ERROR_CODE(MODID_TDT_SUPERVISOR, TDT_ERROR, TDT_SUPERVISOR_FAIL_TO_DEL_JOB_CMD_FILE, + "fail to delete job command file"); +TDT_DEF_ERROR_CODE(MODID_TDT_TRANSFER, TDT_WARNING, TDT_TRANSFER_FAIL_TO_GET_ENV_VARIABLE, + "can not get environment variable"); +TDT_DEF_ERROR_CODE(MODID_TDT_MONITOR, TDT_ERROR, TDT_MONITOR_INOTIFY_INIT_ERROR, "Initialization of inotify failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_MONITOR, TDT_ERROR, TDT_MONITOR_INOTIFY_WATCH_ERROR, "Add watch of inotify failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_MONITOR, TDT_ERROR, TDT_MONITOR_CLOSE_INOTIFYFD_FAIL, "Close inotifyFd failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_MONITOR, TDT_WARNING, TDT_MONITOR_INOTIFY_READ_SIZE_ERROR, + "read size of inotify is not correct"); +TDT_DEF_ERROR_CODE(MODID_TDT_MONITOR, TDT_WARNING, TDT_MONITOR_UNSUPPORT_CFGITEM, "unsupported config item"); +TDT_DEF_ERROR_CODE(MODID_TDT_MONITOR, TDT_WARNING, TDT_MONITOR_FAIL_TO_SET_CFGITEM, "can not set local config item"); +TDT_DEF_ERROR_CODE(MODID_TDT_MONITOR, TDT_ERROR, TDT_MONITOR_READ_FILE_FAIL, "read file fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_MONITOR, TDT_ERROR, TDT_MONITOR_CONFIG_FILE_FORMAT_ERROR, + "config file is incorrectly formatted"); +TDT_DEF_ERROR_CODE(MODID_TDT_MONITOR, TDT_ERROR, TDT_MONITOR_STRCAT_FAILED, "strcat failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_MONITOR, TDT_ERROR, TDT_MONITOR_CREATE_CONFIG_FILE_FAIL, + "create ConfigFile pointer failed"); + +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MAP_BUFFER_ERROR, "host buffer map to device failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_ALLOC_BUFFER_FAILED, "memory pool alloc buffer failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_DATA_SIZE_WRONG, "Input datasize is wrong"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_FREE_HDC_BUFFER_FAILED, "memory pool free buffer failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_INVALID_VALUE, "invalid parameter"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_NO_USEFUL_MEMORY, "no usable memory in memory pool"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MESSAGE_NULL, "recv msg is null"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_SENDMSG_FAILED, "send msg failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MEMORY_POOL_STOPPED, "mempool has stopped"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_HDC_MEMORY_ADDR_NOT_ALIGN, "buffer not aligned"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MEMORY_POOL_INITED, "memory pool has inited"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MEMORY_POOL_GET_NULL, "mempool not exist"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MEMORY_POOL_NOT_EXISTED, "mempool not exist"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_RECOVER_DATA_FAILED, "Recover recv data failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MEMORY_STATUS_ERROR, "Memory status error"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MEMORY_POOL_UPDATE_FAILED, "update memory pool status failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MEMORY_POOL_RESIZE_FAILED, "resize memory pool status failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_EXCEED_MAX_THREAD, "thread size is too large"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_WARNING, TDT_WARNING_SET_THREAD_NAME_FAILED, "rename thread failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_WARNING, TDT_WRONG_PRIORITY, "priority is invalid"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_WARNING, TDT_JOIN_TASK_ERROR, "join task failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_WARNING, TDT_NULL_FUNC, "func is null"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_WARNING, TDT_INIT_FAIL, "sear/dear init failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_WARNING, TDT_EXISTED_FUNC, "func has already existed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MEMORY_DESTROY_FAILED, "mempool destroy failed"); +TDT_DEF_ERROR_CODE(MODID_MEM_POOL, TDT_ERROR, TDT_MEMORY_DATA_TYPE_FACTORY_MAKE_SHARED_FAILED, + "data type factory make shared failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_TDTSEVER_ACCEPT_FAILED, "tdt server accept hdc session failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_CHANNEL_DO_NOT_EXIST, "channel do not exist"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_NULL_POINTER_MSG, "message is null"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_TRAN_UNKNOWN_RSP, "transcation status error"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_TRAN_TIMEOUT, "transcation time out"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_TRAN_NOT_EXIST, "transcation requst id is not exist"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_TRAN_ID_GEN_ERROR, "transcation generateid failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_SEND_CHANNEL_FAILED, "send channel info failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_SEND_CHANNEL_TIMEOUT, "send channel info time out"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_INFO, TDT_QUEUE_STOPPED, "queue has been stopped"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_INFO, TDT_QUEUE_POP_FAILED, "failed to pop data from queue"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_INFO, TDT_QUEUE_PUSH_FAILED, "failed to push data from queue"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_QUEUE_CREATE_FAILED, "queue create fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_QUEUE_NOT_FIND, "queue not find"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_QUEUE_FULL, "queue is full"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_QUEUE_EMPTY, "queue is empty"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_DATA_ENTO_CP_FAILED, "enqueue to computer process failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_STOP_CP_QUEUE_FAILED, "stop computer process queue failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_RECV_MSG_NO_CHANNEL_INFO_ERROR, "no channel in first msg"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_RECV_MSG_MD5_WRONG, "md5 of recv msg is wrong"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_RECV_MSG_CHECKSUM_WRONG_ERROR, "checksum of recv msg is wrong"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_RECV_MSG_FAIL_TO_GENERATE_MD5, "md5 of recv msg is wrong"); +TDT_DEF_ERROR_CODE(MODID_TDT_SERVER, TDT_ERROR, TDT_RECV_MSG_SEQUENCE_ERROR, "sequence recv msg is wrong"); +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_CHANNEL_HAS_NO_SESSION_ERROR, "channel has no session"); +TDT_DEF_ERROR_CODE(MODID_HDC_CLIENT, TDT_ERROR, TDT_HDC_CLIENT_INIT_ERROR, "hdc client init error"); +TDT_DEF_ERROR_CODE(MODID_HDC_CLIENT, TDT_ERROR, TDT_HDC_CLIENT_CREATE_SESSION_ERROR, "hdc client create error"); +TDT_DEF_ERROR_CODE(MODID_HDC_CLIENT, TDT_ERROR, TDT_HDC_CLIENT_DO_NOT_EXIST, "hdc client do not exist"); +TDT_DEF_ERROR_CODE(MODID_HDC_CLIENT, TDT_ERROR, TDT_HDC_CLIENT_DESTROY_ERROR, "hdc server destroy error"); +TDT_DEF_ERROR_CODE(MODID_HDC_CLIENT, TDT_ERROR, TDT_HDC_CLIENT_CLOSED, "hdc client has been closed"); +TDT_DEF_ERROR_CODE(MODID_HDC_SERVER, TDT_ERROR, TDT_BIND_CPUCORE_FAILED, "thread function bind cpu core failed"); +TDT_DEF_ERROR_CODE(MODID_HDC_SERVER, TDT_ERROR, TDT_HDC_SRV_CLOSED, "hdc server has been closed"); +TDT_DEF_ERROR_CODE(MODID_TSD_CLIENT, TDT_ERROR, TDT_TSD_CLT_OPEN_FAILED, "tsd client open failed"); +TDT_DEF_ERROR_CODE(MODID_TSD_CLIENT, TDT_ERROR, TDT_TSD_CLT_CLOSE_FAILED, "tsd client close failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_FILELIST_NOT_EXIST, "tdt filelist open failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_SAMPLE_FILE_NOT_FOUND, "tdt sample file is empty"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_FILE_OPEN_FAIL, "tdt open sample file fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_FILE_STAT_FAIL, "tdt stat sample file fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_FILE_MMAP_FAIL, "tdt mmap sample file fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_FILE_UNMAP_FAIL, "tdt unmap sample file fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_FILE_CLOSE_FAIL, "tdt close sample file fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_FILE_PARSE_FAIL, "tdt parse sample file fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_CRC32_SIZE_FAIL, "tdt crc32 of size mismatch"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_CRC32_DATA_FAIL, "tdt crc32 of data mismatch"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_DATA_QUEUE_CLOSED, "tdt prefetch data queue closed"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_MAP_INSERT_FAILED, "map insert fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_INITIALIZE_FAILED, "prefetch init fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_PREFETCH_INVALID_FILELIST_LINE, "invalid filelist line"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_STRINGSTREAM_TO_VALUE_FAILED, "string to value fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_LIST_ID_OFFSET_LENGTH_POSITIVE_INTEGER_FAILED, + "value positive integer fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_SHUFFLE, TDT_ERROR, TDT_SHUFFLE_ILLEGAL_SHUFFLE_PARAM, "Illegal shuffle parameter"); +TDT_DEF_ERROR_CODE(MODID_TDT_SHUFFLE, TDT_ERROR, TDT_FILE_SHUFFLER_CREATE_FAILED, "Create file shuffler fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_UPLOADER_CREATE_FAILED, "Create uploader fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FILE_DOWNLOADER_CREATE_FAILED, "Create downloader fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FOLDER_CANNOT_BE_CREATED, "folder cannot been created"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_CANNOT_GET_STAT_OF_FOLDER, "cannot get stat of folder"); +TDT_DEF_ERROR_CODE(MODID_TDT_FILE, TDT_ERROR, TDT_FOLDER_IS_FILE, "folder is a file"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_OBS_CONFIG_INFORMATION_FAIL, "OBS configuration fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_OBS_CALLBACK_ARGUMENT_FAIL, "OBS callback argument fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_OBS_DOWNLOAD_CREATE_THREAD_FAILED, + "OBS download create thread fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_OBS_DOWNLOAD_FILE_FAIL, "OBS download file fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_OBS_DOWNLOAD_INIT_FAIL, "OBS download init fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_OBS_DOWNLOAD_METADATA_FAIL, "OBS download metadata fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_OBS_LIST_BUCKET_OBJECTS_FAIL, "OBS list bucket fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_MEMORY_MEMCPY_FAILED, "tdt securec memcpy fail"); +TDT_DEF_ERROR_CODE(MODID_TDT_PREFETCH, TDT_ERROR, TDT_MEMORY_MEMSET_FAILED, "tdt securec memset fail"); +// TDT HOST +TDT_DEF_ERROR_CODE(MODID_TDT_HOST, TDT_ERROR, TDT_HOST_INIT_FAILED, "tdt host init failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_HOST, TDT_ERROR, TDT_HOST_CHANNEL_NAME_EMPTY, "channel name is empty"); +TDT_DEF_ERROR_CODE(MODID_TDT_HOST, TDT_ERROR, TDT_HOST_ALLOCATE_MEMORY_FAILED, "allocate memory failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_HOST, TDT_ERROR, TDT_HOST_MEMORY_COPY_FAILED, "memory copy failed"); +TDT_DEF_ERROR_CODE(MODID_TDT_HOST, TDT_WARNING, TDT_HOST_UNABLE_GET_TDTDATAELEM, "can not get data element"); +TDT_DEF_ERROR_CODE(MODID_TDT_HOST, TDT_WARNING, TDT_HOST_PUSH_NOT_INIT, "push data but not init"); + +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_TUNING_DATA_TRANSFER_INIT_FAILED, + "failed to init the channel of tuning-data"); + +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_TUNING_DATA_RECEIVE_CHECK_PARA_ERROR, "the index is error"); + +TDT_DEF_ERROR_CODE(MODID_TDT_CLIENT, TDT_ERROR, TDT_TUNING_DATA_TRANSFER_PARAMETER_ERROR, "the parameter is error"); +#endif // INC_TDT_STATUS_H_ diff --git a/third_party/fwkacllib/inc/tdt/tdt_host_interface.h b/third_party/fwkacllib/inc/tdt/tdt_host_interface.h new file mode 100644 index 00000000..6539735d --- /dev/null +++ b/third_party/fwkacllib/inc/tdt/tdt_host_interface.h @@ -0,0 +1,43 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef HOST_INNER_INC_TDT_HOST_INTERFACE_H_ +#define HOST_INNER_INC_TDT_HOST_INTERFACE_H_ + +#include +#include +#include +#include "tdt/data_common.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +namespace tdt { +int32_t TdtHostInit(uint32_t deviceId); + +int32_t TdtHostPushData(const std::string &channelName, const std::vector &item); + +int32_t TdtHostDestroy(); + +int32_t TdtHostPopData(const std::string &channelName, std::vector &item); + +int32_t TdtHostStop(const std::string &channelName); +} // namespace tdt +#ifdef __cplusplus +} +#endif // __cplusplus +#endif diff --git a/third_party/fwkacllib/inc/tdt/tsd_client.h b/third_party/fwkacllib/inc/tdt/tsd_client.h new file mode 100644 index 00000000..2a15cda7 --- /dev/null +++ b/third_party/fwkacllib/inc/tdt/tsd_client.h @@ -0,0 +1,55 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef TDT_HOST_INNER_INC_TSD_CLIENT_H_ +#define TDT_HOST_INNER_INC_TSD_CLIENT_H_ + +#include +#include +#include +#include +#include "tdt/status.h" + +#ifdef __cplusplus +extern "C" { +#endif // __cplusplus + +namespace tdt { +constexpr uint32_t RANK_SIZE_DEFAULT_VALUE = 1; + +class TsdClient { + public: + static TsdClient *GetInstance(); + + ~TsdClient(); + + TDT_StatusT Open(const uint32_t phyDeviceId, const uint32_t rankSize = RANK_SIZE_DEFAULT_VALUE); + + TDT_StatusT Close(); + + private: + TsdClient(); + TsdClient(const TsdClient &) = delete; + TsdClient(TsdClient &&) = delete; + TsdClient &operator=(const TsdClient &) = delete; + TsdClient &operator=(TsdClient &&) = delete; + uint32_t rankSize_; +}; +} // namespace tdt +#ifdef __cplusplus +} +#endif // __cplusplus +#endif // TDT_HOST_INNER_INC_TSD_CLIENT_H_ diff --git a/third_party/fwkacllib/inc/toolchain/prof_engine.h b/third_party/fwkacllib/inc/toolchain/prof_engine.h new file mode 100644 index 00000000..61a2a437 --- /dev/null +++ b/third_party/fwkacllib/inc/toolchain/prof_engine.h @@ -0,0 +1,215 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MSPROF_ENGINE_PROF_ENGINE_H_ +#define _MSPROF_ENGINE_PROF_ENGINE_H_ +#define MSVP_PROF_API __attribute__((visibility("default"))) + +#include +#include +#include "prof_reporter.h" + +namespace Msprof { +namespace Engine { + +/** + * @defgroup ModuleJobConfig the ModuleJobConfig group + * This is the ModuleJobConfig group + */ + +/** + * @ingroup ModuleJobConfig + * @brief struct ModuleJobConfig + * record config info + */ +struct ModuleJobConfig { + std::map switches; /**< key is the config name, value is the config value(on or off) */ +}; + +/** + * @defgroup PluginIntf the pluginInf group + * This is the pluginInf group + */ + +/** + * @ingroup PluginIntf + * @brief class PluginIntf + */ +class MSVP_PROF_API PluginIntf { + public: + virtual ~PluginIntf() {} + + public: + /** + * @ingroup PluginIntf + * @name : Init + * @brief : API of user plugin, libmsporf call this API to send a Reporter to user plugin + * @par description : + * API of user plugin, libmsporf call this API to send a Reporter to user plugin. + * @param reporter [IN] const Reporter* the Reporter from libmsprof + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * + * @par depend: + * @li libmsprof + * @li prof_engine.h + * @since c60 + * @see UnInit + */ + virtual int Init(const Reporter *reporter) = 0; + + /** + * @ingroup PluginIntf + * @name : OnNewConfig + * @brief : API of user plugin, libmsprof call this API to send config info to user plugin \n + If the user plugin needn't config, no need to redefine this function + * @param config [IN] const ModuleJobConfig * the config from libmsprof + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * + * @par depend: + * @li libmsprof + * @li prof_engine.h + * @since c60 + * @see Init | UnInit + */ + virtual int OnNewConfig(const ModuleJobConfig *config) { return 0; } + + /** + * @ingroup PluginIntf + * @name : UnInit + * @brief : API of user plugin, libmsprof call this API to notify plugin stop to send data + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * + * @par depend: + * @li libmsprof + * @li prof_engine.h + * @since c60 + * @see Init + */ + virtual int UnInit() = 0; +}; + +/** + * @defgroup EngineIntf the EngineIntf group + * This is the EngineIntf group + */ + +/** + * @ingroup EngineIntf + * @brief class EngineIntf + */ +class MSVP_PROF_API EngineIntf { + public: + virtual ~EngineIntf() {} + + public: + /** + * @ingroup EngineIntf + * @name : CreatePlugin + * @brief : API of user engine, libmsporf call this API to get a plugin + * @retval PluginIntf * The pointer of the new plugin + * + * @par depend: + * @li libmsprof + * @li prof_engine.h + * @since c60 + * @see ReleasePlugin + */ + virtual PluginIntf *CreatePlugin() = 0; + + /** + * @ingroup EngineIntf + * @name : ReleasePlugin + * @brief : API of user engine, libmsprof call this API to release a plugin + * @param plugin [IN] PluginIntf * the plugin to release + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * + * @par depend: + * @li libmsprof + * @li prof_engine.h + * @since c60 + * @see CreatePlugin + */ + virtual int ReleasePlugin(PluginIntf *plugin) = 0; +}; + +/** + * @defgroup EngineMgr the EngineMgr group + * This is the EngineMgr group + */ + +/** + * @ingroup EngineMgr + * @name : RegisterEngine + * @brief : API of libmsprof, register an engine with a name + * @param module [IN] const std::string the name of plugin + * @param engine [IN] const EngineIntf* the plugin + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * + * @par depend: + * @li libmsprof + * @li prof_engine.h + * @since c60 + */ +MSVP_PROF_API int RegisterEngine(const std::string &module, const EngineIntf *engine); + +/** + * @ingroup EngineMgr + * @name : Init + * @brief : API of libmsprof, init an engine with a name + * @param module [IN] const std::string the name of plugin + * @param module [IN] const EngineIntf* the plugin + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * + * @par depend: + * @li libmsprof + * @li prof_engine.h + * @since c60 + * @see UnInit + */ +MSVP_PROF_API int Init(const std::string &module, const EngineIntf *engine); + +/** + * @ingroup EngineMgr + * @name : Init + * @brief : API of libmsprof, uninit an engine with a name + * @param module [IN] const std::string the name of plugin + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * + * @par depend: + * @li libmsprof + * @li prof_engine.h + * @since c60 + * @see Init + */ +MSVP_PROF_API int UnInit(const std::string &module); +} // namespace Engine +} // namespace Msprof + +#endif +/* + * History: \n + * 2019-04-10, huawei, Create file. \n + * 2020-02-10, huawei, Add Api Comment. \n + * + * vi: set expandtab ts=4 sw=4 tw=120: + */ \ No newline at end of file diff --git a/third_party/fwkacllib/inc/toolchain/prof_mgr_core.h b/third_party/fwkacllib/inc/toolchain/prof_mgr_core.h new file mode 100644 index 00000000..5dbc15a7 --- /dev/null +++ b/third_party/fwkacllib/inc/toolchain/prof_mgr_core.h @@ -0,0 +1,89 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _MSPROF_ENGINE_PROF_MGR_CORE_H_ +#define _MSPROF_ENGINE_PROF_MGR_CORE_H_ +#define MSVP_PROF_API __attribute__((visibility("default"))) + +#include +#include + +/** + * @brief : struct ProfMgrCfg + */ +struct ProfMgrCfg { + std::string startCfg; /**< start cfg. json format */ +}; + +/** + * @name : ProfMgrConf + * @brief : struct ProfMgrConf for example [{"ai_core_events":"0xa"}].the vector size means Number of iterations + */ +struct ProfMgrConf { + std::vector conf; /**< for op trace.Ge call this api to get each iteration profiling cfg.json format.*/ +}; + +/** + * @name : ProfMgrStartUP + * @brief : start Profiling task + * @param cfg [IN]ProfMgrCfg cfg : config of start_up profiling + * @retval void * (success) + * @retval nullptr (failed) + * + * @par depend: + * @li libmsprof + * @li prof_mgr_core.h + * @since c60 + * @see ProfMgrStop + */ +MSVP_PROF_API void *ProfMgrStartUp(const ProfMgrCfg *cfg); + +/** + * @name : ProfMgrStop + * @brief : stop Profiling task + * @param handle [in] void * handle return by ProfMgrStartUP + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * + * @par depend: + * @li libmsprof + * @li prof_mgr_core.h + * @since c60 + * @see ProfMgrStartUp + */ +MSVP_PROF_API int ProfMgrStop(void *handle); + +/** + * @name : ProfMgrGetConf + * @brief : get profiler events conf + * @param conf [OUT]ProfMgrConf * return by ProfMgrGetConf + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * @par depend: + * @li libmsprof + * @li prof_mgr_core.h + * @since c60 + * @see ProfMgrStartUp + */ +MSVP_PROF_API int ProfMgrGetConf(const std::string &aicoreMetricsType, ProfMgrConf *conf); +#endif +/* + * History: \n + * 2019-04-10, huawei, Create file. \n + * 2020-02-10, huawei, Add Api Comment. \n + * + * vi: set expandtab ts=4 sw=4 tw=120: + */ \ No newline at end of file diff --git a/third_party/fwkacllib/inc/toolchain/prof_reporter.h b/third_party/fwkacllib/inc/toolchain/prof_reporter.h new file mode 100644 index 00000000..ce4ae7fa --- /dev/null +++ b/third_party/fwkacllib/inc/toolchain/prof_reporter.h @@ -0,0 +1,96 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/** + * @defgroup reporter the reporter group + * This is the reporter group + */ +#ifndef _MSPROF_ENGINE_PROF_REPORTER_H_ +#define _MSPROF_ENGINE_PROF_REPORTER_H_ +#define MSVP_PROF_API __attribute__((visibility("default"))) +namespace Msprof { +namespace Engine { + +/// the max tag length +#define MSPROF_ENGINE_MAX_TAG_LEN (31) + +/** + * @ingroup reporter + * @brief struct ReporterData + * the sturct of the data send to libmsprof + */ +struct ReporterData { + char tag[MSPROF_ENGINE_MAX_TAG_LEN + 1]; ///< the sub-type of the module, data with different tag will be writen + int deviceId; ///< the physical id of device + size_t dataLen; ///< the length of send data + unsigned char *data; ///< the data content +}; + +/** + * @ingroup reporter + * @brief class Reporter + * the Reporter class .used to send data to profiling + */ +class MSVP_PROF_API Reporter { + public: + virtual ~Reporter() {} + + public: + /** + * @ingroup reporter + * @name : Report + * @brief : API of libmsprof, report data to libmsprof, it's a non-blocking function \n + The data will be firstly appended to cache, if the cache is full, data will be ignored + * @param data [IN] const ReporterData * the data send to libmsporf + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * + * @par depend: + * @li libmsprof + * @li prof_reporter.h + * @since c60 + * @see Flush + */ + virtual int Report(const ReporterData *data) = 0; + + /** + * @ingroup reporter + * @name : Flush + * @brief : API of libmsprof, notify libmsprof send data over, it's a blocking function \n + The all datas of cache will be write to file or send to host + * @retval PROFILING_SUCCESS 0 (success) + * @retval PROFILING_FAILED -1 (failed) + * + * @par depend: + * @li libmsprof + * @li prof_reporter.h + * @since c60 + * @see ProfMgrStop + */ + virtual int Flush() = 0; +}; + +} // namespace Engine +} // namespace Msprof + +#endif +/* + * History: \n + * 2019-04-10, huawei, Create file. \n + * 2020-02-10, huawei, Add Api Comment. \n + * + * vi: set expandtab ts=4 sw=4 tw=120: + */ \ No newline at end of file diff --git a/third_party/fwkacllib/inc/toolchain/slog.h b/third_party/fwkacllib/inc/toolchain/slog.h new file mode 100644 index 00000000..e86ec846 --- /dev/null +++ b/third_party/fwkacllib/inc/toolchain/slog.h @@ -0,0 +1,356 @@ +/** + * Copyright 2019-2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef _D_SYSLOG_H +#define _D_SYSLOG_H + +#ifdef __cplusplus +extern "C" { +#endif + +/** + * @ingroup slog + * + * debug level id + */ +#define DLOG_DEBUG 0 + +/** + * @ingroup slog + * + * info level id + */ +#define DLOG_INFO 1 + +/** + * @ingroup slog + * + * warning level id + */ +#define DLOG_WARN 2 + +/** + * @ingroup slog + * + * error level id + */ +#define DLOG_ERROR 3 + +/** + * @ingroup slog + * + * don't print log + */ +#define DLOG_NULL 4 + +/** + * @ingroup slog + * + * trace log print level id + */ +#define DLOG_TRACE 5 + +/** + * @ingroup slog + * + * oplog log print level id + */ +#define DLOG_OPLOG 6 + +/** + * @ingroup slog + * + * event log print level id + */ +#define DLOG_EVENT 0x10 + +/** + * @ingroup slog + * + * max log length + */ +#define MSG_LENGTH 1024 + +typedef struct tagDCODE { + const char *cName; + int cVal; +} DCODE; + +typedef struct tagKV { + char *kname; + char *value; +} KeyValue; + +/** + * @ingroup slog + * + * module id + */ +enum { + SLOG, /**< Slog */ + IDEDD, /**< IDE daemon device */ + IDEDH, /**< IDE daemon host */ + HCCL, /**< HCCL */ + FMK, /**< Framework */ + HIAIENGINE, /**< Matrix */ + DVPP, /**< DVPP */ + RUNTIME, /**< Runtime */ + CCE, /**< CCE */ + HDC, /**< HDC */ + DRV, /**< Driver */ + MDCFUSION, /**< Mdc fusion */ + MDCLOCATION, /**< Mdc location */ + MDCPERCEPTION, /**< Mdc perception */ + MDCFSM, + MDCCOMMON, + MDCMONITOR, + MDCBSWP, /**< MDC base software platform */ + MDCDEFAULT, /**< MDC undefine */ + MDCSC, /**< MDC spatial cognition */ + MDCPNC, + MLL, + DEVMM, /**< Dlog memory managent */ + KERNEL, /**< Kernel */ + LIBMEDIA, /**< Libmedia */ + CCECPU, /**< ai cpu */ + ASCENDDK, /**< AscendDK */ + ROS, /**< ROS */ + HCCP, + ROCE, + TEFUSION, + PROFILING, /**< Profiling */ + DP, /**< Data Preprocess */ + APP, /**< User Application */ + TS, /**< TS module */ + TSDUMP, /**< TSDUMP module */ + AICPU, /**< AICPU module */ + LP, /**< LP module */ + TDT, + FE, + MD, + MB, + ME, + IMU, + IMP, + GE, /**< Fmk */ + MDCFUSA, + CAMERA, + ASCENDCL, + TEEOS, + SIS, + HSM, + INVLID_MOUDLE_ID +}; + +#ifdef MODULE_ID_NAME + +/** + * @ingroup slog + * + * set module id to map + */ +#define SET_MOUDLE_ID_MAP_NAME(x) \ + { #x, x } + +static DCODE g_moduleIdName[] = {SET_MOUDLE_ID_MAP_NAME(SLOG), + SET_MOUDLE_ID_MAP_NAME(IDEDD), + SET_MOUDLE_ID_MAP_NAME(IDEDH), + SET_MOUDLE_ID_MAP_NAME(HCCL), + SET_MOUDLE_ID_MAP_NAME(FMK), + SET_MOUDLE_ID_MAP_NAME(HIAIENGINE), + SET_MOUDLE_ID_MAP_NAME(DVPP), + SET_MOUDLE_ID_MAP_NAME(RUNTIME), + SET_MOUDLE_ID_MAP_NAME(CCE), + SET_MOUDLE_ID_MAP_NAME(HDC), + SET_MOUDLE_ID_MAP_NAME(DRV), + SET_MOUDLE_ID_MAP_NAME(MDCFUSION), + SET_MOUDLE_ID_MAP_NAME(MDCLOCATION), + SET_MOUDLE_ID_MAP_NAME(MDCPERCEPTION), + SET_MOUDLE_ID_MAP_NAME(MDCFSM), + SET_MOUDLE_ID_MAP_NAME(MDCCOMMON), + SET_MOUDLE_ID_MAP_NAME(MDCMONITOR), + SET_MOUDLE_ID_MAP_NAME(MDCBSWP), + SET_MOUDLE_ID_MAP_NAME(MDCDEFAULT), + SET_MOUDLE_ID_MAP_NAME(MDCSC), + SET_MOUDLE_ID_MAP_NAME(MDCPNC), + SET_MOUDLE_ID_MAP_NAME(MLL), + SET_MOUDLE_ID_MAP_NAME(DEVMM), + SET_MOUDLE_ID_MAP_NAME(KERNEL), + SET_MOUDLE_ID_MAP_NAME(LIBMEDIA), + SET_MOUDLE_ID_MAP_NAME(CCECPU), + SET_MOUDLE_ID_MAP_NAME(ASCENDDK), + SET_MOUDLE_ID_MAP_NAME(ROS), + SET_MOUDLE_ID_MAP_NAME(HCCP), + SET_MOUDLE_ID_MAP_NAME(ROCE), + SET_MOUDLE_ID_MAP_NAME(TEFUSION), + SET_MOUDLE_ID_MAP_NAME(PROFILING), + SET_MOUDLE_ID_MAP_NAME(DP), + SET_MOUDLE_ID_MAP_NAME(APP), + SET_MOUDLE_ID_MAP_NAME(TS), + SET_MOUDLE_ID_MAP_NAME(TSDUMP), + SET_MOUDLE_ID_MAP_NAME(AICPU), + SET_MOUDLE_ID_MAP_NAME(LP), + SET_MOUDLE_ID_MAP_NAME(TDT), + SET_MOUDLE_ID_MAP_NAME(FE), + SET_MOUDLE_ID_MAP_NAME(MD), + SET_MOUDLE_ID_MAP_NAME(MB), + SET_MOUDLE_ID_MAP_NAME(ME), + SET_MOUDLE_ID_MAP_NAME(IMU), + SET_MOUDLE_ID_MAP_NAME(IMP), + SET_MOUDLE_ID_MAP_NAME(GE), + SET_MOUDLE_ID_MAP_NAME(MDCFUSA), + SET_MOUDLE_ID_MAP_NAME(CAMERA), + SET_MOUDLE_ID_MAP_NAME(ASCENDCL), + SET_MOUDLE_ID_MAP_NAME(TEEOS), + SET_MOUDLE_ID_MAP_NAME(SIS), + SET_MOUDLE_ID_MAP_NAME(HSM), + {NULL, -1}}; +#endif + +/** + * @ingroup slog + * @brief External log interface, which called by modules + */ +extern void dlog_init(void); + +/** + * @ingroup slog + * @brief dlog_getlevel: get module level + * + * @param [in]moduleId: module id, eg: CCE + * @param [out]enableEvent: 1: enable; 0: disable + * @return: module level(0: debug, 1: info, 2: warning, 3: error, 4: null output) + */ +extern int dlog_getlevel(int moduleId, int *enableEvent); + +/** + * @ingroup slog + * @brief dlog_error: print error log + * + * @param [in]moduleId: module id, eg: CCE + * @param [in]fmt: log content + */ +#define dlog_error(moduleId, fmt, ...) \ + do { \ + DlogErrorInner(moduleId, "[%s:%d]" fmt, __FILE__, __LINE__, ##__VA_ARGS__); \ + } while (0) + +/** + * @ingroup slog + * @brief dlog_warn: print warning log + * + * @param [in]moduleId: module id, eg: CCE + * @param [in]fmt: log content + */ +#define dlog_warn(moduleId, fmt, ...) \ + do { \ + DlogWarnInner(moduleId, "[%s:%d]" fmt, __FILE__, __LINE__, ##__VA_ARGS__); \ + } while (0) + +/** + * @ingroup slog + * @brief dlog_info: print info log + * + * @param [in]moduleId: module id, eg: CCE + * @param [in]fmt: log content + */ +#define dlog_info(moduleId, fmt, ...) \ + do { \ + DlogInfoInner(moduleId, "[%s:%d]" fmt, __FILE__, __LINE__, ##__VA_ARGS__); \ + } while (0) + +/** + * @ingroup slog + * @brief dlog_debug: print debug log + * + * @param [in]moduleId: module id, eg: CCE + * @param [in]fmt: log content + */ +#define dlog_debug(moduleId, fmt, ...) \ + do { \ + DlogDebugInner(moduleId, "[%s:%d]" fmt, __FILE__, __LINE__, ##__VA_ARGS__); \ + } while (0) + +/** + * @ingroup slog + * @brief dlog_event: print event log + * + * @param [in]moduleId: module id, eg: CCE + * @param [in]fmt: log content + */ +#define dlog_event(moduleId, fmt, ...) \ + do { \ + DlogEventInner(moduleId, "[%s:%d]" fmt, __FILE__, __LINE__, ##__VA_ARGS__); \ + } while (0) + +/** + * @ingroup slog + * @brief Dlog: print log, need caller to specify level + * + * @param [in]moduleId: module id, eg: CCE + * @param [in]level(0: debug, 1: info, 2: warning, 3: error, 5: trace, 6: oplog, 16: event) + * @param [in]fmt: log content + */ +#define Dlog(moduleId, level, fmt, ...) \ + do { \ + DlogInner(moduleId, level, "[%s:%d]" fmt, __FILE__, __LINE__, ##__VA_ARGS__); \ + } while (0) + +/** + * @ingroup slog + * @brief DlogSub: print log, need caller to specify level and submodule + * + * @param [in]moduleId: module id, eg: CCE + * @param [in]submodule: eg: engine + * @param [in]level(0: debug, 1: info, 2: warning, 3: error, 5: trace, 6: oplog, 16: event) + * @param [in]fmt: log content + */ +#define DlogSub(moduleId, submodule, level, fmt, ...) \ + do { \ + DlogInner(moduleId, level, "[%s:%d][%s]" fmt, __FILE__, __LINE__, submodule, ##__VA_ARGS__); \ + } while (0) + +/** + * @ingroup slog + * @brief DlogWithKV: print log, need caller to specify level and other paramters + * + * @param [in]moduleId: module id, eg: CCE + * @param [in]level(0: debug, 1: info, 2: warning, 3: error, 5: trace, 6: oplog, 16: event) + * @param [in]pstKVArray: key-value array + * @param [in]kvNum: key-value element num in array + * @param [in]fmt: log content + */ +#define DlogWithKV(moduleId, level, pstKVArray, kvNum, fmt, ...) \ + do { \ + DlogWithKVInner(moduleId, level, pstKVArray, kvNum, "[%s:%d]" fmt, __FILE__, __LINE__, ##__VA_ARGS__); \ + } while (0) + +/** + * @ingroup slog + * @brief Internal log interface, other modules are not allowed to call this interface + */ +void DlogErrorInner(int moduleId, const char *fmt, ...); +void DlogWarnInner(int moduleId, const char *fmt, ...); +void DlogInfoInner(int moduleId, const char *fmt, ...); +void DlogDebugInner(int moduleId, const char *fmt, ...); +void DlogEventInner(int moduleId, const char *fmt, ...); +void DlogInner(int moduleId, int level, const char *fmt, ...); +void DlogWithKVInner(int moduleId, int level, KeyValue *pstKVArray, int kvNum, const char *fmt, ...); + +#ifdef __cplusplus +} +#endif +#endif /* sys/slog.h */ diff --git a/third_party/fwkacllib/version.info b/third_party/fwkacllib/version.info new file mode 100644 index 00000000..1ce2d666 --- /dev/null +++ b/third_party/fwkacllib/version.info @@ -0,0 +1 @@ +Version=1.60.T51.0.B203 diff --git a/third_party/prebuild/x86_64/libc_sec.so b/third_party/prebuild/x86_64/libc_sec.so new file mode 100755 index 00000000..8290bbcc Binary files /dev/null and b/third_party/prebuild/x86_64/libc_sec.so differ diff --git a/third_party/prebuild/x86_64/libslog.so b/third_party/prebuild/x86_64/libslog.so new file mode 100755 index 00000000..168c1ac6 Binary files /dev/null and b/third_party/prebuild/x86_64/libslog.so differ diff --git a/third_party/securec/CMakeLists.txt b/third_party/securec/CMakeLists.txt new file mode 100644 index 00000000..e360a6eb --- /dev/null +++ b/third_party/securec/CMakeLists.txt @@ -0,0 +1,11 @@ +SET(CMAKE_BUILD_TYPE "Debug") +SET(CMAKE_C_FLAGS_DEBUG "$ENV{CFLAGS} -fPIC -O0 -Wall -Wno-deprecated-declarations -g2 -ggdb -fno-inline-functions -fno-omit-frame-pointer -D_LIBCPP_INLINE_VISIBILITY='' -D'_LIBCPP_EXTERN_TEMPLATE(...)='") +SET(CMAKE_C_FLAGS_RELEASE "$ENV{CFLAGS} -fPIC -O3 -Wall -Wno-deprecated-declarations") +set(CMAKE_EXPORT_COMPILE_COMMANDS ON) + +#add flags +set(CMAKE_C_FLAGS "${CMAKE_C_FLAGS} -I/usr/local/include -Werror") + + +include_directories(./include) +add_subdirectory(src) diff --git a/third_party/securec/include/securec.h b/third_party/securec/include/securec.h new file mode 100644 index 00000000..b627a3c3 --- /dev/null +++ b/third_party/securec/include/securec.h @@ -0,0 +1,634 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __SECUREC_H__5D13A042_DC3F_4ED9_A8D1_882811274C27 +#define __SECUREC_H__5D13A042_DC3F_4ED9_A8D1_882811274C27 + +#include "securectype.h" +#include + +#ifndef SECUREC_HAVE_ERRNO_H +#if SECUREC_IN_KERNEL +#define SECUREC_HAVE_ERRNO_H 0 +#else +#define SECUREC_HAVE_ERRNO_H 1 +#endif +#endif + +/* EINVAL ERANGE may defined in errno.h */ +#if SECUREC_HAVE_ERRNO_H +#include +#endif + +/* define error code */ +#if defined(SECUREC_NEED_ERRNO_TYPE) || !defined(__STDC_WANT_LIB_EXT1__) || \ + (defined(__STDC_WANT_LIB_EXT1__) && (__STDC_WANT_LIB_EXT1__ == 0)) +#ifndef SECUREC_DEFINED_ERRNO_TYPE +#define SECUREC_DEFINED_ERRNO_TYPE +/* just check whether macrodefinition exists. */ +#ifndef errno_t +typedef int errno_t; +#endif +#endif +#endif + +/* success */ +#ifndef EOK +#define EOK 0 +#endif + +#ifndef EINVAL +/* The src buffer is not correct and destination buffer cant not be reset */ +#define EINVAL 22 +#endif + +#ifndef EINVAL_AND_RESET +/* Once the error is detected, the dest buffer must be reseted! */ +#define EINVAL_AND_RESET (22 | 128) +#endif + +#ifndef ERANGE +/* The destination buffer is not long enough and destination buffer can not be reset */ +#define ERANGE 34 +#endif + +#ifndef ERANGE_AND_RESET +/* Once the error is detected, the dest buffer must be reseted! */ +#define ERANGE_AND_RESET (34 | 128) +#endif + +#ifndef EOVERLAP_AND_RESET +/* Once the buffer overlap is detected, the dest buffer must be reseted! */ +#define EOVERLAP_AND_RESET (54 | 128) +#endif + +/* if you need export the function of this library in Win32 dll, use __declspec(dllexport) */ +#ifndef SECUREC_API +#if defined(SECUREC_DLL_EXPORT) +#define SECUREC_API __declspec(dllexport) +#elif defined(SECUREC_DLL_IMPORT) +#define SECUREC_API __declspec(dllimport) +#else +/* Standardized function declaration . If a security function is declared in the your code, + * it may cause a compilation alarm,Please delete the security function you declared + * Adding extern under windows will cause the system to have inline functions to expand, + * so do not add the extern in default + */ +#if defined(_MSC_VER) +#define SECUREC_API +#else +#define SECUREC_API extern +#endif +#endif +#endif + +#ifdef __cplusplus +extern "C" { +#endif + /* + * Description: The GetHwSecureCVersion function get SecureC Version string and version number. + * Parameter: verNumber - to store version number + * Return: version string + */ + SECUREC_API const char *GetHwSecureCVersion(unsigned short *verNumber); + +#if SECUREC_ENABLE_MEMSET + /* + * Description: The memset_s function copies the value of c (converted to an unsigned char) into each of + * the first count characters of the object pointed to by dest. + * Parameter: dest - destination address + * Parameter: destMax -The maximum length of destination buffer + * Parameter: c - the value to be copied + * Parameter: count -copies fisrt count characters of dest + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t memset_s(void *dest, size_t destMax, int c, size_t count); +#endif + +#ifndef SECUREC_ONLY_DECLARE_MEMSET +#define SECUREC_ONLY_DECLARE_MEMSET 0 +#endif + +#if SECUREC_ONLY_DECLARE_MEMSET == 0 + +#if SECUREC_ENABLE_MEMMOVE + /* + * Description: The memmove_s function copies n characters from the object pointed to by src + * into the object pointed to by dest. + * Parameter: dest - destination address + * Parameter: destMax -The maximum length of destination buffer + * Parameter: src -source address + * Parameter: count -copies count wide characters from the src + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t memmove_s(void *dest, size_t destMax, const void *src, size_t count); +#endif + +#if SECUREC_ENABLE_MEMCPY + /* + * Description: The memcpy_s function copies n characters from the object pointed to + * by src into the object pointed to by dest. + * Parameter: dest - destination address + * Parameter: destMax -The maximum length of destination buffer + * Parameter: src -source address + * Parameter: count -copies count characters from the src + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t memcpy_s(void *dest, size_t destMax, const void *src, size_t count); +#endif + +#if SECUREC_ENABLE_STRCPY + /* + * Description: The strcpy_s function copies the string pointed to by strSrc (including + * the terminating null character) into the array pointed to by strDest + * Parameter: strDest - destination address + * Parameter: destMax -The maximum length of destination buffer(including the terminating null character) + * Parameter: strSrc -source address + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t strcpy_s(char *strDest, size_t destMax, const char *strSrc); +#endif + +#if SECUREC_ENABLE_STRNCPY + /* + * Description: The strncpy_s function copies not more than n successive characters (not including + * the terminating null character) + * from the array pointed to by strSrc to the array pointed to by strDest + * Parameter: strDest - destination address + * Parameter: destMax -The maximum length of destination buffer(including the terminating null character) + * Parameter: strSrc -source address + * Parameter: count -copies count characters from the src + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t strncpy_s(char *strDest, size_t destMax, const char *strSrc, size_t count); +#endif + +#if SECUREC_ENABLE_STRCAT + /* + * Description: The strcat_s function appends a copy of the string pointed to by strSrc (including + * the terminating null character) + * to the end of the string pointed to by strDest + * Parameter: strDest - destination address + * Parameter: destMax -The maximum length of destination buffer(including the terminating null wide character) + * Parameter: strSrc -source address + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t strcat_s(char *strDest, size_t destMax, const char *strSrc); +#endif + +#if SECUREC_ENABLE_STRNCAT + /* + * Description: The strncat_s function appends not more than n successive characters (not including + * the terminating null character) + * from the array pointed to by strSrc to the end of the string pointed to by strDest. + * Parameter: strDest - destination address + * Parameter: destMax -The maximum length of destination buffer(including the terminating null character) + * Parameter: strSrc -source address + * Parameter: count -copies count characters from the src + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t strncat_s(char *strDest, size_t destMax, const char *strSrc, size_t count); +#endif + +#if SECUREC_ENABLE_VSPRINTF + /* + * Description: The vsprintf_s function is equivalent to the vsprintf function except for the Parameter: destMax + * and the explicit runtime-constraints violation + * Parameter: strDest - produce output according to a format ,write to the character string strDest + * Parameter: destMax - The maximum length of destination buffer(including the terminating null wide characte) + * Parameter: format - fromat string + * Parameter: argList - instead of a variable number of arguments + * Return: the number of characters printed(not including the terminating null byte ('\0')), + * If an error occurred Return: -1. + */ + SECUREC_API int vsprintf_s(char *strDest, size_t destMax, const char *format, + va_list argList) SECUREC_ATTRIBUTE(3, 0); +#endif + +#if SECUREC_ENABLE_SPRINTF + /* + * Description: The sprintf_s function is equivalent to the sprintf function except for the Parameter: destMax + * and the explicit runtime-constraints violation + * Parameter: strDest - produce output according to a format ,write to the character string strDest + * Parameter: destMax - The maximum length of destination buffer(including the terminating null byte ('\0')) + * Parameter: format - fromat string + * Return: the number of characters printed(not including the terminating null byte ('\0')), + * If an error occurred Return: -1. + */ + SECUREC_API int sprintf_s(char *strDest, size_t destMax, const char *format, ...) SECUREC_ATTRIBUTE(3, 4); +#endif + +#if SECUREC_ENABLE_VSNPRINTF + /* + * Description: The vsnprintf_s function is equivalent to the vsnprintf function except for the Parameter: + * destMax/count and the explicit runtime-constraints violation + * Parameter: strDest - produce output according to a format ,write to the character string strDest + * Parameter: destMax - The maximum length of destination buffer(including the terminating null byte ('\0')) + * Parameter: count - do not write more than count bytes to strDest(not including the terminating null byte ('\0')) + * Parameter: format - fromat string + * Parameter: argList - instead of a variable number of arguments + * Return: the number of characters printed(not including the terminating null byte ('\0')), + * If an error occurred Return: -1.Pay special attention to returning -1 when truncation occurs + */ + SECUREC_API int vsnprintf_s(char *strDest, size_t destMax, size_t count, const char *format, + va_list argList) SECUREC_ATTRIBUTE(4, 0); +#endif + +#if SECUREC_ENABLE_SNPRINTF + /* + * Description: The snprintf_s function is equivalent to the snprintf function except for the Parameter: + * destMax/count and the explicit runtime-constraints violation + * Parameter: strDest - produce output according to a format ,write to the character string strDest + * Parameter: destMax - The maximum length of destination buffer(including the terminating null byte ('\0')) + * Parameter: count - do not write more than count bytes to strDest(not including the terminating null byte ('\0')) + * Parameter: format - fromat string + * Return: the number of characters printed(not including the terminating null byte ('\0')), + * If an error occurred Return: -1.Pay special attention to returning -1 when truncation occurs + */ + SECUREC_API int snprintf_s(char *strDest, size_t destMax, size_t count, const char *format, + ...) SECUREC_ATTRIBUTE(4, 5); +#endif + +#if SECUREC_SNPRINTF_TRUNCATED + /* + * Description: The vsnprintf_truncated_s function is equivalent to the vsnprintf_s function except + * no count Parameter: and Return: value + * Parameter: strDest - produce output according to a format ,write to the character string strDest + * Parameter: destMax - The maximum length of destination buffer(including the terminating null byte ('\0')) + * Parameter: format - fromat string + * Parameter: argList - instead of a variable number of arguments + * Return: the number of characters printed(not including the terminating null byte ('\0')), + * If an error occurred Return: -1.Pay special attention to returning destMax - 1 when truncation occurs + */ + SECUREC_API int vsnprintf_truncated_s(char *strDest, size_t destMax, const char *format, + va_list argList) SECUREC_ATTRIBUTE(3, 0); + + /* + * Description: The snprintf_truncated_s function is equivalent to the snprintf_2 function except + * no count Parameter: and Return: value + * Parameter: strDest - produce output according to a format ,write to the character string strDest + * Parameter: destMax - The maximum length of destination buffer(including the terminating null byte ('\0')) + * Parameter: format - fromat string + * Return: the number of characters printed(not including the terminating null byte ('\0')), + * If an error occurred Return: -1.Pay special attention to returning destMax - 1 when truncation occurs + */ + SECUREC_API int snprintf_truncated_s(char *strDest, size_t destMax, + const char *format, ...) SECUREC_ATTRIBUTE(3, 4); +#endif + +#if SECUREC_ENABLE_SCANF + /* + * Description: The scanf_s function is equivalent to fscanf_s with the argument stdin + * interposed before the arguments to scanf_s + * Parameter: format - fromat string + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int scanf_s(const char *format, ...); +#endif + +#if SECUREC_ENABLE_VSCANF + /* + * Description: The vscanf_s function is equivalent to scanf_s, with the variable argument list replaced by argList + * Parameter: format - fromat string + * Parameter: argList - instead of a variable number of arguments + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int vscanf_s(const char *format, va_list argList); +#endif + +#if SECUREC_ENABLE_SSCANF + /* + * Description: The sscanf_s function is equivalent to fscanf_s, except that input is obtained from a + * string (specified by the argument buffer) rather than from a stream + * Parameter: buffer - read character from buffer + * Parameter: format - fromat string + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int sscanf_s(const char *buffer, const char *format, ...); +#endif + +#if SECUREC_ENABLE_VSSCANF + /* + * Description: The vsscanf_s function is equivalent to sscanf_s, with the variable argument list + * replaced by argList + * Parameter: buffer - read character from buffer + * Parameter: format - fromat string + * Parameter: argList - instead of a variable number of arguments + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int vsscanf_s(const char *buffer, const char *format, va_list argList); +#endif + +#if SECUREC_ENABLE_FSCANF + /* + * Description: The fscanf_s function is equivalent to fscanf except that the c, s, and [ conversion specifiers + * apply to a pair of arguments (unless assignment suppression is indicated by a*) + * Parameter: stream - stdio file stream + * Parameter: format - fromat string + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int fscanf_s(FILE *stream, const char *format, ...); +#endif + +#if SECUREC_ENABLE_VFSCANF + /* + * Description: The vfscanf_s function is equivalent to fscanf_s, with the variable argument list + * replaced by argList + * Parameter: stream - stdio file stream + * Parameter: format - fromat string + * Parameter: argList - instead of a variable number of arguments + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int vfscanf_s(FILE *stream, const char *format, va_list argList); +#endif + +#if SECUREC_ENABLE_STRTOK + /* + * Description: The strtok_s function parses a string into a sequence of strToken, + * replace all characters in strToken string that match to strDelimit set with 0. + * On the first call to strtok_s the string to be parsed should be specified in strToken. + * In each subsequent call that should parse the same string, strToken should be NULL + * Parameter: strToken - the string to be delimited + * Parameter: strDelimit -specifies a set of characters that delimit the tokens in the parsed string + * Parameter: context -is a pointer to a char * variable that is used internally by strtok_s function + * Return: On the first call returns the address of the first non \0 character, otherwise NULL is returned. + * In subsequent calls, the strtoken is set to NULL, and the context set is the same as the previous call, + * return NULL if the *context string length is equal 0, otherwise return *context. + */ + SECUREC_API char *strtok_s(char *strToken, const char *strDelimit, char **context); +#endif + +#if SECUREC_ENABLE_GETS && SECUREC_IN_KERNEL == 0 + /* + * Description: The gets_s function reads at most one less than the number of characters specified + * by destMax from the stream pointed to by stdin, into the array pointed to by buffer + * Parameter: buffer - destination address + * Parameter: destMax -The maximum length of destination buffer(including the terminating null character) + * Return: buffer if there was no runtime-constraint violation,If an error occurred Return: NULL. + */ + SECUREC_API char *gets_s(char *buffer, size_t destMax); +#endif + + +#if SECUREC_ENABLE_WCHAR_FUNC +#if SECUREC_ENABLE_MEMCPY + /* + * Description: The wmemcpy_s function copies n successive wide characters from the object pointed to + * by src into the object pointed to by dest. + * Parameter: dest - destination address + * Parameter: destMax -The maximum length of destination buffer + * Parameter: src -source address + * Parameter: count -copies count wide characters from the src + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t wmemcpy_s(wchar_t *dest, size_t destMax, const wchar_t *src, size_t count); +#endif + +#if SECUREC_ENABLE_MEMMOVE + /* + * Description: The wmemmove_s function copies n successive wide characters from the object + * pointed to by src into the object pointed to by dest. + * Parameter: dest - destination address + * Parameter: destMax -The maximum length of destination buffer + * Parameter: src -source address + * Parameter: count -copies count wide characters from the src + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t wmemmove_s(wchar_t *dest, size_t destMax, const wchar_t *src, size_t count); +#endif + +#if SECUREC_ENABLE_STRCPY + /* + * Description: The wcscpy_s function copies the wide string pointed to by strSrc (including theterminating + * null wide character) into the array pointed to by strDest + * Parameter: strDest - destination address + * Parameter: destMax -The maximum length of destination buffer + * Parameter: strSrc -source address + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t wcscpy_s(wchar_t *strDest, size_t destMax, const wchar_t *strSrc); +#endif + +#if SECUREC_ENABLE_STRNCPY + /* + * Description: The wcsncpy_s function copies not more than n successive wide characters (not including the + * terminating null wide character) from the array pointed to by strSrc to the array pointed to by strDest + * Parameter: strDest - destination address + * Parameter: destMax -The maximum length of destination buffer(including the terminating wide character) + * Parameter: strSrc -source address + * Parameter: count -copies count wide characters from the src + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t wcsncpy_s(wchar_t *strDest, size_t destMax, const wchar_t *strSrc, size_t count); +#endif + +#if SECUREC_ENABLE_STRCAT + /* + * Description: The wcscat_s function appends a copy of the wide string pointed to by strSrc (including the + * terminating null wide character) to the end of the wide string pointed to by strDest + * Parameter: strDest - destination address + * Parameter: destMax -The maximum length of destination buffer(including the terminating wide character) + * Parameter: strSrc -source address + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t wcscat_s(wchar_t *strDest, size_t destMax, const wchar_t *strSrc); +#endif + +#if SECUREC_ENABLE_STRNCAT + /* + * Description: The wcsncat_s function appends not more than n successive wide characters (not including the + * terminating null wide character) from the array pointed to by strSrc to the end of the wide string pointed to + * by strDest. + * Parameter: strDest - destination address + * Parameter: destMax -The maximum length of destination buffer(including the terminating wide character) + * Parameter: strSrc -source address + * Parameter: count -copies count wide characters from the src + * Return: EOK if there was no runtime-constraint violation + */ + SECUREC_API errno_t wcsncat_s(wchar_t *strDest, size_t destMax, const wchar_t *strSrc, size_t count); +#endif + +#if SECUREC_ENABLE_STRTOK + /* + * Description: The wcstok_s function is the wide-character equivalent of the strtok_s function + * Parameter: strToken - the string to be delimited + * Parameter: strDelimit -specifies a set of characters that delimit the tokens in the parsed string + * Parameter: context -is a pointer to a char * variable that is used internally by strtok_s function + * Return: a pointer to the first character of a token, or a null pointer if there is no token + * or there is a runtime-constraint violation. + */ + SECUREC_API wchar_t *wcstok_s(wchar_t *strToken, const wchar_t *strDelimit, wchar_t **context); +#endif + +#if SECUREC_ENABLE_VSPRINTF + /* + * Description: The vswprintf_s function is the wide-character equivalent of the vsprintf_s function + * Parameter: strDest - produce output according to a format ,write to the character string strDest + * Parameter: destMax - The maximum length of destination buffer(including the terminating null ) + * Parameter: format - fromat string + * Parameter: argList - instead of a variable number of arguments + * Return: the number of characters printed(not including the terminating null wide characte), + * If an error occurred Return: -1. + */ + SECUREC_API int vswprintf_s(wchar_t *strDest, size_t destMax, const wchar_t *format, va_list argList); +#endif + +#if SECUREC_ENABLE_SPRINTF + + /* + * Description: The swprintf_s function is the wide-character equivalent of the sprintf_s function + * Parameter: strDest - produce output according to a format ,write to the character string strDest + * Parameter: destMax - The maximum length of destination buffer(including the terminating null ) + * Parameter: format - fromat string + * Return: the number of characters printed(not including the terminating null wide characte), + * If an error occurred Return: -1. + */ + SECUREC_API int swprintf_s(wchar_t *strDest, size_t destMax, const wchar_t *format, ...); +#endif + +#if SECUREC_ENABLE_FSCANF + /* + * Description: The fwscanf_s function is the wide-character equivalent of the fscanf_s function + * Parameter: stream - stdio file stream + * Parameter: format - fromat string + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int fwscanf_s(FILE *stream, const wchar_t *format, ...); +#endif + +#if SECUREC_ENABLE_VFSCANF + /* + * Description: The vfwscanf_s function is the wide-character equivalent of the vfscanf_s function + * Parameter: stream - stdio file stream + * Parameter: format - fromat string + * Parameter: argList - instead of a variable number of arguments + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int vfwscanf_s(FILE *stream, const wchar_t *format, va_list argList); +#endif + +#if SECUREC_ENABLE_SCANF + /* + * Description: The wscanf_s function is the wide-character equivalent of the scanf_s function + * Parameter: format - fromat string + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int wscanf_s(const wchar_t *format, ...); +#endif + +#if SECUREC_ENABLE_VSCANF + /* + * Description: The vwscanf_s function is the wide-character equivalent of the vscanf_s function + * Parameter: format - fromat string + * Parameter: argList - instead of a variable number of arguments + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int vwscanf_s(const wchar_t *format, va_list argList); +#endif + +#if SECUREC_ENABLE_SSCANF + /* + * Description: The swscanf_s function is the wide-character equivalent of the sscanf_s function + * Parameter: buffer - read character from buffer + * Parameter: format - fromat string + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int swscanf_s(const wchar_t *buffer, const wchar_t *format, ...); +#endif + +#if SECUREC_ENABLE_VSSCANF + /* + * Description: The vswscanf_s function is the wide-character equivalent of the vsscanf_s function + * Parameter: buffer - read character from buffer + * Parameter: format - fromat string + * Parameter: argList - instead of a variable number of arguments + * Return: the number of input items assigned, If an error occurred Return: -1. + */ + SECUREC_API int vswscanf_s(const wchar_t *buffer, const wchar_t *format, va_list argList); +#endif +#endif /* SECUREC_ENABLE_WCHAR_FUNC */ +#endif + + /* those functions are used by macro ,must declare hare , also for without function declaration warning */ + extern errno_t strncpy_error(char *strDest, size_t destMax, const char *strSrc, size_t count); + extern errno_t strcpy_error(char *strDest, size_t destMax, const char *strSrc); + +#if SECUREC_WITH_PERFORMANCE_ADDONS + /* those functions are used by macro */ + extern errno_t memset_sOptAsm(void *dest, size_t destMax, int c, size_t count); + extern errno_t memset_sOptTc(void *dest, size_t destMax, int c, size_t count); + extern errno_t memcpy_sOptAsm(void *dest, size_t destMax, const void *src, size_t count); + extern errno_t memcpy_sOptTc(void *dest, size_t destMax, const void *src, size_t count); + +/* strcpy_sp is a macro, NOT a function in performance optimization mode. */ +#define strcpy_sp(dest, destMax, src) ((__builtin_constant_p((destMax)) && \ + __builtin_constant_p((src))) ? \ + SECUREC_STRCPY_SM((dest), (destMax), (src)) : \ + strcpy_s((dest), (destMax), (src))) + +/* strncpy_sp is a macro, NOT a function in performance optimization mode. */ +#define strncpy_sp(dest, destMax, src, count) ((__builtin_constant_p((count)) && \ + __builtin_constant_p((destMax)) && \ + __builtin_constant_p((src))) ? \ + SECUREC_STRNCPY_SM((dest), (destMax), (src), (count)) : \ + strncpy_s((dest), (destMax), (src), (count))) + +/* strcat_sp is a macro, NOT a function in performance optimization mode. */ +#define strcat_sp(dest, destMax, src) ((__builtin_constant_p((destMax)) && \ + __builtin_constant_p((src))) ? \ + SECUREC_STRCAT_SM((dest), (destMax), (src)) : \ + strcat_s((dest), (destMax), (src))) + +/* strncat_sp is a macro, NOT a function in performance optimization mode. */ +#define strncat_sp(dest, destMax, src, count) ((__builtin_constant_p((count)) && \ + __builtin_constant_p((destMax)) && \ + __builtin_constant_p((src))) ? \ + SECUREC_STRNCAT_SM((dest), (destMax), (src), (count)) : \ + strncat_s((dest), (destMax), (src), (count))) + +/* memcpy_sp is a macro, NOT a function in performance optimization mode. */ +#define memcpy_sp(dest, destMax, src, count) (__builtin_constant_p((count)) ? \ + (SECUREC_MEMCPY_SM((dest), (destMax), (src), (count))) : \ + (__builtin_constant_p((destMax)) ? \ + (((size_t)(destMax) > 0 && \ + (((unsigned long long)(destMax) & \ + (unsigned long long)(-2)) < SECUREC_MEM_MAX_LEN)) ? \ + memcpy_sOptTc((dest), (destMax), (src), (count)) : ERANGE) : \ + memcpy_sOptAsm((dest), (destMax), (src), (count)))) + +/* memset_sp is a macro, NOT a function in performance optimization mode. */ +#define memset_sp(dest, destMax, c, count) (__builtin_constant_p((count)) ? \ + (SECUREC_MEMSET_SM((dest), (destMax), (c), (count))) : \ + (__builtin_constant_p((destMax)) ? \ + (((size_t)(destMax) > 0 && \ + (((unsigned long long)(destMax) & \ + (unsigned long long)(-2)) < SECUREC_MEM_MAX_LEN)) ? \ + memset_sOptTc((dest), (destMax), (c), (count)) : ERANGE) : \ + memset_sOptAsm((dest), (destMax), (c), (count)))) +#else +#define strcpy_sp strcpy_s +#define strncpy_sp strncpy_s +#define strcat_sp strcat_s +#define strncat_sp strncat_s +#define memcpy_sp memcpy_s +#define memset_sp memset_s +#endif + +#ifdef __cplusplus +} +#endif /* __cplusplus */ +#endif /* __SECUREC_H__5D13A042_DC3F_4ED9_A8D1_882811274C27 */ + diff --git a/third_party/securec/include/securectype.h b/third_party/securec/include/securectype.h new file mode 100644 index 00000000..0aed2a67 --- /dev/null +++ b/third_party/securec/include/securectype.h @@ -0,0 +1,542 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef __SECURECTYPE_H__A7BBB686_AADA_451B_B9F9_44DACDAE18A7 +#define __SECURECTYPE_H__A7BBB686_AADA_451B_B9F9_44DACDAE18A7 + +#ifndef SECUREC_USING_STD_SECURE_LIB +#if defined(_MSC_VER) && _MSC_VER >= 1400 +#if defined(__STDC_WANT_SECURE_LIB__) && __STDC_WANT_SECURE_LIB__ == 0 +/* Security functions have been provided since vs2005, default use of system library functions */ +#define SECUREC_USING_STD_SECURE_LIB 0 +#else +#define SECUREC_USING_STD_SECURE_LIB 1 +#endif +#else +#define SECUREC_USING_STD_SECURE_LIB 0 +#endif +#endif + + +/* Compatibility with older Secure C versions, shielding VC symbol redefinition warning */ +#if defined(_MSC_VER) && _MSC_VER >= 1400 && SECUREC_USING_STD_SECURE_LIB == 0 +#ifndef SECUREC_DISABLE_CRT_FUNC +#define SECUREC_DISABLE_CRT_FUNC 1 +#endif +#ifndef SECUREC_DISABLE_CRT_IMP +#define SECUREC_DISABLE_CRT_IMP 1 +#endif +#else /* MSC VER */ +#ifndef SECUREC_DISABLE_CRT_FUNC +#define SECUREC_DISABLE_CRT_FUNC 0 +#endif +#ifndef SECUREC_DISABLE_CRT_IMP +#define SECUREC_DISABLE_CRT_IMP 0 +#endif +#endif + +#if SECUREC_DISABLE_CRT_FUNC +#ifdef __STDC_WANT_SECURE_LIB__ +#undef __STDC_WANT_SECURE_LIB__ +#endif +#define __STDC_WANT_SECURE_LIB__ 0 +#endif + +#if SECUREC_DISABLE_CRT_IMP +#ifdef _CRTIMP_ALTERNATIVE +#undef _CRTIMP_ALTERNATIVE +#endif +#define _CRTIMP_ALTERNATIVE /* comment microsoft *_s function */ +#endif + +/* Compile in kernel under macro control */ +#ifndef SECUREC_IN_KERNEL +#ifdef __KERNEL__ +#define SECUREC_IN_KERNEL 1 +#else +#define SECUREC_IN_KERNEL 0 +#endif +#endif + +#if SECUREC_IN_KERNEL +#ifndef SECUREC_ENABLE_SCANF_FILE +#define SECUREC_ENABLE_SCANF_FILE 0 +#endif +#ifndef SECUREC_ENABLE_WCHAR_FUNC +#define SECUREC_ENABLE_WCHAR_FUNC 0 +#endif +#else /* SECUREC_IN_KERNEL */ +#ifndef SECUREC_ENABLE_SCANF_FILE +#define SECUREC_ENABLE_SCANF_FILE 1 +#endif +#ifndef SECUREC_ENABLE_WCHAR_FUNC +#define SECUREC_ENABLE_WCHAR_FUNC 1 +#endif +#endif + + +/* Default secure function declaration, default declarations for non-standard functions */ +#ifndef SECUREC_SNPRINTF_TRUNCATED +#define SECUREC_SNPRINTF_TRUNCATED 1 +#endif + +#if SECUREC_USING_STD_SECURE_LIB +#if defined(_MSC_VER) && _MSC_VER >= 1400 +/* Declare secure functions that are not available in the vs compiler */ +#ifndef SECUREC_ENABLE_MEMSET +#define SECUREC_ENABLE_MEMSET 1 +#endif +/* vs 2005 have vsnprintf_s function */ +#ifndef SECUREC_ENABLE_VSNPRINTF +#define SECUREC_ENABLE_VSNPRINTF 0 +#endif +#ifndef SECUREC_ENABLE_SNPRINTF +/* vs 2005 have vsnprintf_s function Adapt the snprintf_s of the security function */ +#define snprintf_s _snprintf_s +#define SECUREC_ENABLE_SNPRINTF 0 +#endif +/* befor vs 2010 do not have v functions */ +#if _MSC_VER <= 1600 || defined(SECUREC_FOR_V_SCANFS) +#ifndef SECUREC_ENABLE_VFSCANF +#define SECUREC_ENABLE_VFSCANF 1 +#endif +#ifndef SECUREC_ENABLE_VSCANF +#define SECUREC_ENABLE_VSCANF 1 +#endif +#ifndef SECUREC_ENABLE_VSSCANF +#define SECUREC_ENABLE_VSSCANF 1 +#endif +#endif + +#else /* _MSC_VER */ +#ifndef SECUREC_ENABLE_MEMSET +#define SECUREC_ENABLE_MEMSET 0 +#endif +#ifndef SECUREC_ENABLE_SNPRINTF +#define SECUREC_ENABLE_SNPRINTF 0 +#endif +#ifndef SECUREC_ENABLE_VSNPRINTF +#define SECUREC_ENABLE_VSNPRINTF 0 +#endif +#endif + +#ifndef SECUREC_ENABLE_MEMMOVE +#define SECUREC_ENABLE_MEMMOVE 0 +#endif +#ifndef SECUREC_ENABLE_MEMCPY +#define SECUREC_ENABLE_MEMCPY 0 +#endif +#ifndef SECUREC_ENABLE_STRCPY +#define SECUREC_ENABLE_STRCPY 0 +#endif +#ifndef SECUREC_ENABLE_STRNCPY +#define SECUREC_ENABLE_STRNCPY 0 +#endif +#ifndef SECUREC_ENABLE_STRCAT +#define SECUREC_ENABLE_STRCAT 0 +#endif +#ifndef SECUREC_ENABLE_STRNCAT +#define SECUREC_ENABLE_STRNCAT 0 +#endif +#ifndef SECUREC_ENABLE_SPRINTF +#define SECUREC_ENABLE_SPRINTF 0 +#endif +#ifndef SECUREC_ENABLE_VSPRINTF +#define SECUREC_ENABLE_VSPRINTF 0 +#endif +#ifndef SECUREC_ENABLE_SSCANF +#define SECUREC_ENABLE_SSCANF 0 +#endif +#ifndef SECUREC_ENABLE_VSSCANF +#define SECUREC_ENABLE_VSSCANF 0 +#endif +#ifndef SECUREC_ENABLE_SCANF +#define SECUREC_ENABLE_SCANF 0 +#endif +#ifndef SECUREC_ENABLE_VSCANF +#define SECUREC_ENABLE_VSCANF 0 +#endif + +#ifndef SECUREC_ENABLE_FSCANF +#define SECUREC_ENABLE_FSCANF 0 +#endif +#ifndef SECUREC_ENABLE_VFSCANF +#define SECUREC_ENABLE_VFSCANF 0 +#endif +#ifndef SECUREC_ENABLE_STRTOK +#define SECUREC_ENABLE_STRTOK 0 +#endif +#ifndef SECUREC_ENABLE_GETS +#define SECUREC_ENABLE_GETS 0 +#endif + +#else /* SECUREC_USE_STD_SECURE_LIB */ + +#ifndef SECUREC_ENABLE_MEMSET +#define SECUREC_ENABLE_MEMSET 1 +#endif +#ifndef SECUREC_ENABLE_MEMMOVE +#define SECUREC_ENABLE_MEMMOVE 1 +#endif +#ifndef SECUREC_ENABLE_MEMCPY +#define SECUREC_ENABLE_MEMCPY 1 +#endif +#ifndef SECUREC_ENABLE_STRCPY +#define SECUREC_ENABLE_STRCPY 1 +#endif +#ifndef SECUREC_ENABLE_STRNCPY +#define SECUREC_ENABLE_STRNCPY 1 +#endif +#ifndef SECUREC_ENABLE_STRCAT +#define SECUREC_ENABLE_STRCAT 1 +#endif +#ifndef SECUREC_ENABLE_STRNCAT +#define SECUREC_ENABLE_STRNCAT 1 +#endif +#ifndef SECUREC_ENABLE_SPRINTF +#define SECUREC_ENABLE_SPRINTF 1 +#endif +#ifndef SECUREC_ENABLE_VSPRINTF +#define SECUREC_ENABLE_VSPRINTF 1 +#endif +#ifndef SECUREC_ENABLE_SNPRINTF +#define SECUREC_ENABLE_SNPRINTF 1 +#endif +#ifndef SECUREC_ENABLE_VSNPRINTF +#define SECUREC_ENABLE_VSNPRINTF 1 +#endif +#ifndef SECUREC_ENABLE_SSCANF +#define SECUREC_ENABLE_SSCANF 1 +#endif +#ifndef SECUREC_ENABLE_VSSCANF +#define SECUREC_ENABLE_VSSCANF 1 +#endif +#ifndef SECUREC_ENABLE_SCANF +#if SECUREC_ENABLE_SCANF_FILE +#define SECUREC_ENABLE_SCANF 1 +#else +#define SECUREC_ENABLE_SCANF 0 +#endif +#endif +#ifndef SECUREC_ENABLE_VSCANF +#if SECUREC_ENABLE_SCANF_FILE +#define SECUREC_ENABLE_VSCANF 1 +#else +#define SECUREC_ENABLE_VSCANF 0 +#endif +#endif + +#ifndef SECUREC_ENABLE_FSCANF +#if SECUREC_ENABLE_SCANF_FILE +#define SECUREC_ENABLE_FSCANF 1 +#else +#define SECUREC_ENABLE_FSCANF 0 +#endif +#endif +#ifndef SECUREC_ENABLE_VFSCANF +#if SECUREC_ENABLE_SCANF_FILE +#define SECUREC_ENABLE_VFSCANF 1 +#else +#define SECUREC_ENABLE_VFSCANF 0 +#endif +#endif + +#ifndef SECUREC_ENABLE_STRTOK +#define SECUREC_ENABLE_STRTOK 1 +#endif +#ifndef SECUREC_ENABLE_GETS +#define SECUREC_ENABLE_GETS 1 +#endif +#endif /* SECUREC_USE_STD_SECURE_LIB */ + +#if SECUREC_ENABLE_SCANF_FILE == 0 +#if SECUREC_ENABLE_FSCANF +#undef SECUREC_ENABLE_FSCANF +#define SECUREC_ENABLE_FSCANF 0 +#endif +#if SECUREC_ENABLE_VFSCANF +#undef SECUREC_ENABLE_VFSCANF +#define SECUREC_ENABLE_VFSCANF 0 +#endif +#if SECUREC_ENABLE_SCANF +#undef SECUREC_ENABLE_SCANF +#define SECUREC_ENABLE_SCANF 0 +#endif +#if SECUREC_ENABLE_FSCANF +#undef SECUREC_ENABLE_FSCANF +#define SECUREC_ENABLE_FSCANF 0 +#endif + +#endif + +#if SECUREC_IN_KERNEL +#include +#include +#else +#include +#include +#include +#endif + +/* If you need high performance, enable the SECUREC_WITH_PERFORMANCE_ADDONS macro, default is enable . + * The macro is automatically closed on the windows platform and linux kernel + */ +#ifndef SECUREC_WITH_PERFORMANCE_ADDONS +#if SECUREC_IN_KERNEL +#define SECUREC_WITH_PERFORMANCE_ADDONS 0 +#else +#define SECUREC_WITH_PERFORMANCE_ADDONS 1 +#endif +#endif + +/* if enable SECUREC_COMPATIBLE_WIN_FORMAT, the output format will be compatible to Windows. */ +#if (defined(_WIN32) || defined(_WIN64) || defined(_MSC_VER)) && !defined(SECUREC_COMPATIBLE_LINUX_FORMAT) +#if !defined(SECUREC_COMPATIBLE_WIN_FORMAT) +#define SECUREC_COMPATIBLE_WIN_FORMAT +#endif +#endif + +#if defined(SECUREC_COMPATIBLE_WIN_FORMAT) +/* in windows platform, can't use optimized function for there is no __builtin_constant_p like function */ +/* If need optimized macro, can define this: define __builtin_constant_p(x) 0 */ +#ifdef SECUREC_WITH_PERFORMANCE_ADDONS +#undef SECUREC_WITH_PERFORMANCE_ADDONS +#define SECUREC_WITH_PERFORMANCE_ADDONS 0 +#endif +#endif + +#if defined(__VXWORKS__) || defined(__vxworks) || defined(__VXWORKS) || defined(_VXWORKS_PLATFORM_) || \ + defined(SECUREC_VXWORKS_VERSION_5_4) +#if !defined(SECUREC_VXWORKS_PLATFORM) +#define SECUREC_VXWORKS_PLATFORM +#endif +#endif + +/* if enable SECUREC_COMPATIBLE_LINUX_FORMAT, the output format will be compatible to Linux. */ +#if !(defined(SECUREC_COMPATIBLE_WIN_FORMAT) || defined(SECUREC_VXWORKS_PLATFORM)) +#if !defined(SECUREC_COMPATIBLE_LINUX_FORMAT) +#define SECUREC_COMPATIBLE_LINUX_FORMAT +#endif +#endif + +#ifdef SECUREC_COMPATIBLE_LINUX_FORMAT +#include +#endif + +/* add the -DSECUREC_SUPPORT_FORMAT_WARNING compiler option to supoort -Wformat. + * default does not check the format is that the same data type in the actual code + * in the product is different in the original data type definition of VxWorks and Linux. + */ +#ifndef SECUREC_SUPPORT_FORMAT_WARNING +#define SECUREC_SUPPORT_FORMAT_WARNING 0 +#endif + +/* SECUREC_PCLINT for tool do not recognize __attribute__ just for pclint */ +#if SECUREC_SUPPORT_FORMAT_WARNING && !defined(SECUREC_PCLINT) +#define SECUREC_ATTRIBUTE(x, y) __attribute__((format(printf, (x), (y)))) +#else +#define SECUREC_ATTRIBUTE(x, y) +#endif + +/* SECUREC_PCLINT for tool do not recognize __builtin_expect, just for pclint */ +#if defined(__GNUC__) && \ + ((__GNUC__ > 3 || (__GNUC__ == 3 && __GNUC_MINOR__ > 3))) && \ + !defined(SECUREC_PCLINT) +/* This is a built-in function that can be used without a declaration, if you encounter an undeclared compilation alarm, + * you can add -DSECUREC_NEED_BUILTIN_EXPECT_DECLARE to complier options + */ +#if defined(SECUREC_NEED_BUILTIN_EXPECT_DECLARE) +long __builtin_expect(long exp, long c); +#endif +#define SECUREC_LIKELY(x) __builtin_expect(!!(x), 1) +#define SECUREC_UNLIKELY(x) __builtin_expect(!!(x), 0) +#else +#define SECUREC_LIKELY(x) (x) +#define SECUREC_UNLIKELY(x) (x) +#endif + +/* define the max length of the string */ +#ifndef SECUREC_STRING_MAX_LEN +#define SECUREC_STRING_MAX_LEN (0x7fffffffUL) +#endif +#define SECUREC_WCHAR_STRING_MAX_LEN (SECUREC_STRING_MAX_LEN / sizeof(wchar_t)) + +/* add SECUREC_MEM_MAX_LEN for memcpy and memmove */ +#ifndef SECUREC_MEM_MAX_LEN +#define SECUREC_MEM_MAX_LEN (0x7fffffffUL) +#endif +#define SECUREC_WCHAR_MEM_MAX_LEN (SECUREC_MEM_MAX_LEN / sizeof(wchar_t)) + +#if SECUREC_STRING_MAX_LEN > 0x7fffffff +#error "max string is 2G" +#endif + +#if (defined(__GNUC__) && defined(__SIZEOF_POINTER__)) +#if (__SIZEOF_POINTER__ != 4) && (__SIZEOF_POINTER__ != 8) +#error "unsupported system" +#endif +#endif + +#if defined(_WIN64) || defined(WIN64) || defined(__LP64__) || defined(_LP64) +#define SECUREC_ON_64BITS +#endif + +#if (!defined(SECUREC_ON_64BITS) && defined(__GNUC__) && defined(__SIZEOF_POINTER__)) +#if __SIZEOF_POINTER__ == 8 +#define SECUREC_ON_64BITS +#endif +#endif + +#if defined(__SVR4) || defined(__svr4__) +#define SECUREC_ON_SOLARIS +#endif + +#if (defined(__hpux) || defined(_AIX) || defined(SECUREC_ON_SOLARIS)) +#define SECUREC_ON_UNIX +#endif + +/* codes should run under the macro SECUREC_COMPATIBLE_LINUX_FORMAT in unknow system on default, + * and strtold. The function + * strtold is referenced first at ISO9899:1999(C99), and some old compilers can + * not support these functions. Here provides a macro to open these functions: + * SECUREC_SUPPORT_STRTOLD -- if defined, strtold will be used + */ +#ifndef SECUREC_SUPPORT_STRTOLD +#define SECUREC_SUPPORT_STRTOLD 0 +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT)) +#if defined(__USE_ISOC99) || \ + (defined(_AIX) && defined(_ISOC99_SOURCE)) || \ + (defined(__hpux) && defined(__ia64)) || \ + (defined(SECUREC_ON_SOLARIS) && (!defined(_STRICT_STDC) && !defined(__XOPEN_OR_POSIX)) || \ + defined(_STDC_C99) || defined(__EXTENSIONS__)) +#undef SECUREC_SUPPORT_STRTOLD +#define SECUREC_SUPPORT_STRTOLD 1 +#endif +#endif +#if ((defined(SECUREC_WRLINUX_BELOW4) || defined(_WRLINUX_BELOW4_))) +#undef SECUREC_SUPPORT_STRTOLD +#define SECUREC_SUPPORT_STRTOLD 0 +#endif +#endif + + +#if SECUREC_WITH_PERFORMANCE_ADDONS + +#ifndef SECUREC_TWO_MIN +#define SECUREC_TWO_MIN(a, b) ((a) < (b) ? (a) : (b)) +#endif + +/* for strncpy_s performance optimization */ +#define SECUREC_STRNCPY_SM(dest, destMax, src, count) \ + (((void *)(dest) != NULL && (void *)(src) != NULL && (size_t)(destMax) > 0 && \ + (((unsigned long long)(destMax) & (unsigned long long)(-2)) < SECUREC_STRING_MAX_LEN) && \ + (SECUREC_TWO_MIN((size_t)(count), strlen(src)) + 1) <= (size_t)(destMax)) ? \ + (((size_t)(count) < strlen(src)) ? (memcpy((dest), (src), (count)), *((char *)(dest) + (count)) = '\0', EOK) : \ + (memcpy((dest), (src), strlen(src) + 1), EOK)) : (strncpy_error((dest), (destMax), (src), (count)))) + +#define SECUREC_STRCPY_SM(dest, destMax, src) \ + (((void *)(dest) != NULL && (void *)(src) != NULL && (size_t)(destMax) > 0 && \ + (((unsigned long long)(destMax) & (unsigned long long)(-2)) < SECUREC_STRING_MAX_LEN) && \ + (strlen(src) + 1) <= (size_t)(destMax)) ? (memcpy((dest), (src), strlen(src) + 1), EOK) : \ + (strcpy_error((dest), (destMax), (src)))) + +/* for strcat_s performance optimization */ +#if defined(__GNUC__) +#define SECUREC_STRCAT_SM(dest, destMax, src) ({ \ + int catRet = EOK; \ + if ((void *)(dest) != NULL && (void *)(src) != NULL && (size_t)(destMax) > 0 && \ + (((unsigned long long)(destMax) & (unsigned long long)(-2)) < SECUREC_STRING_MAX_LEN)) { \ + char *catTmpDst = (char *)(dest); \ + size_t catRestSize = (destMax); \ + while (catRestSize > 0 && *catTmpDst != '\0') { \ + ++catTmpDst; \ + --catRestSize; \ + } \ + if (catRestSize == 0) { \ + catRet = EINVAL; \ + } else if ((strlen(src) + 1) <= catRestSize) { \ + memcpy(catTmpDst, (src), strlen(src) + 1); \ + catRet = EOK; \ + } else { \ + catRet = ERANGE; \ + } \ + if (catRet != EOK) { \ + catRet = strcat_s((dest), (destMax), (src)); \ + } \ + } else { \ + catRet = strcat_s((dest), (destMax), (src)); \ + } \ + catRet; \ +}) +#else +#define SECUREC_STRCAT_SM(dest, destMax, src) strcat_s((dest), (destMax), (src)) +#endif + +/* for strncat_s performance optimization */ +#if defined(__GNUC__) +#define SECUREC_STRNCAT_SM(dest, destMax, src, count) ({ \ + int ncatRet = EOK; \ + if ((void *)(dest) != NULL && (void *)(src) != NULL && (size_t)(destMax) > 0 && \ + (((unsigned long long)(destMax) & (unsigned long long)(-2)) < SECUREC_STRING_MAX_LEN) && \ + (((unsigned long long)(count) & (unsigned long long)(-2)) < SECUREC_STRING_MAX_LEN)) { \ + char *ncatTmpDest = (char *)(dest); \ + size_t ncatRestSize = (size_t)(destMax); \ + while (ncatRestSize > 0 && *ncatTmpDest != '\0') { \ + ++ncatTmpDest; \ + --ncatRestSize; \ + } \ + if (ncatRestSize == 0) { \ + ncatRet = EINVAL; \ + } else if ((SECUREC_TWO_MIN((count), strlen(src)) + 1) <= ncatRestSize) { \ + if ((size_t)(count) < strlen(src)) { \ + memcpy(ncatTmpDest, (src), (count)); \ + *(ncatTmpDest + (count)) = '\0'; \ + } else { \ + memcpy(ncatTmpDest, (src), strlen(src) + 1); \ + } \ + } else { \ + ncatRet = ERANGE; \ + } \ + if (ncatRet != EOK) { \ + ncatRet = strncat_s((dest), (destMax), (src), (count)); \ + } \ + } else { \ + ncatRet = strncat_s((dest), (destMax), (src), (count)); \ + } \ + ncatRet; \ +}) +#else +#define SECUREC_STRNCAT_SM(dest, destMax, src, count) strncat_s((dest), (destMax), (src), (count)) +#endif + +/* SECUREC_MEMCPY_SM do NOT check buffer overlap by default */ +#define SECUREC_MEMCPY_SM(dest, destMax, src, count) \ + (!(((size_t)(destMax) == 0) || \ + (((unsigned long long)(destMax) & (unsigned long long)(-2)) > SECUREC_MEM_MAX_LEN) || \ + ((size_t)(count) > (size_t)(destMax)) || ((void *)(dest)) == NULL || ((void *)(src) == NULL))? \ + (memcpy((dest), (src), (count)), EOK) : \ + (memcpy_s((dest), (destMax), (src), (count)))) + +#define SECUREC_MEMSET_SM(dest, destMax, c, count) \ + (!(((size_t)(destMax) == 0) || \ + (((unsigned long long)(destMax) & (unsigned long long)(-2)) > SECUREC_MEM_MAX_LEN) || \ + ((void *)(dest) == NULL) || ((size_t)(count) > (size_t)(destMax))) ? \ + (memset((dest), (c), (count)), EOK) : \ + (memset_s((dest), (destMax), (c), (count)))) + +#endif +#endif /* __SECURECTYPE_H__A7BBB686_AADA_451B_B9F9_44DACDAE18A7 */ + diff --git a/third_party/securec/src/CMakeLists.txt b/third_party/securec/src/CMakeLists.txt new file mode 100644 index 00000000..60ec0a90 --- /dev/null +++ b/third_party/securec/src/CMakeLists.txt @@ -0,0 +1,3 @@ +aux_source_directory(. SECUREC_SRCS) + +add_library(securec STATIC ${SECUREC_SRCS}) diff --git a/third_party/securec/src/fscanf_s.c b/third_party/securec/src/fscanf_s.c new file mode 100644 index 00000000..8ceda9ac --- /dev/null +++ b/third_party/securec/src/fscanf_s.c @@ -0,0 +1,56 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +/* + * + * The fscanf_s function is equivalent to fscanf except that the c, s, + * and [ conversion specifiers apply to a pair of arguments (unless assignment suppression is indicated by a*) + * The fscanf function reads data from the current position of stream into + * the locations given by argument (if any). Each argument must be a pointer + * to a variable of a type that corresponds to a type specifier in format. + * format controls the interpretation of the input fields and has the same + * form and function as the format argument for scanf. + * + * + * stream Pointer to FILE structure. + * format Format control string, see Format Specifications. + * ... Optional arguments. + * + * + * ... The convered value stored in user assigned address + * + * + * Each of these functions returns the number of fields successfully converted + * and assigned; the return value does not include fields that were read but + * not assigned. A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ +int fscanf_s(FILE *stream, const char *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + ret = vfscanf_s(stream, format, argList); + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} + + diff --git a/third_party/securec/src/fwscanf_s.c b/third_party/securec/src/fwscanf_s.c new file mode 100644 index 00000000..f826b7db --- /dev/null +++ b/third_party/securec/src/fwscanf_s.c @@ -0,0 +1,55 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +/* + * + * The fwscanf_s function is the wide-character equivalent of the fscanf_s function + * The fwscanf_s function reads data from the current position of stream into + * the locations given by argument (if any). Each argument must be a pointer + * to a variable of a type that corresponds to a type specifier in format. + * format controls the interpretation of the input fields and has the same + * form and function as the format argument for scanf. + * + * + * stream Pointer to FILE structure. + * format Format control string, see Format Specifications. + * ... Optional arguments. + * + * + * ... The converted value stored in user assigned address + * + * + * Each of these functions returns the number of fields successfully converted + * and assigned; the return value does not include fields that were read but + * not assigned. A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ +int fwscanf_s(FILE *stream, const wchar_t *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + ret = vfwscanf_s(stream, format, argList); + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} + + diff --git a/third_party/securec/src/gets_s.c b/third_party/securec/src/gets_s.c new file mode 100644 index 00000000..57fd6231 --- /dev/null +++ b/third_party/securec/src/gets_s.c @@ -0,0 +1,75 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securecutil.h" + +static void SecTrimCRLF(char *buffer, size_t len) +{ + int i; + /* No need to determine whether integer overflow exists */ + for (i = (int)(len - 1); i >= 0 && (buffer[i] == '\r' || buffer[i] == '\n'); --i) { + buffer[i] = '\0'; + } + return; +} + +/* + * + * The gets_s function reads at most one less than the number of characters + * specified by destMax from the stream pointed to by stdin, into the array pointed to by buffer + * The line consists of all characters up to and including + * the first newline character ('\n'). gets_s then replaces the newline + * character with a null character ('\0') before returning the line. + * If the first character read is the end-of-file character, a null character + * is stored at the beginning of buffer and NULL is returned. + * + * + * buffer Storage location for input string. + * numberOfElements The size of the buffer. + * + * + * buffer is updated + * + * + * buffer Successful operation + * NULL Improper parameter or read fail + */ +char *gets_s(char *buffer, size_t numberOfElements) +{ + size_t len; +#ifdef SECUREC_COMPATIBLE_WIN_FORMAT + size_t bufferSize = ((numberOfElements == (size_t)-1) ? SECUREC_STRING_MAX_LEN : numberOfElements); +#else + size_t bufferSize = numberOfElements; +#endif + + if (buffer == NULL || bufferSize == 0 || bufferSize > SECUREC_STRING_MAX_LEN) { + SECUREC_ERROR_INVALID_PARAMTER("gets_s"); + return NULL; + } + + if (fgets(buffer, (int)bufferSize, stdin) == NULL) { + return NULL; + } + + len = strlen(buffer); + if (len > 0 && len < bufferSize) { + SecTrimCRLF(buffer, len); + } + + return buffer; +} + diff --git a/third_party/securec/src/input.inl b/third_party/securec/src/input.inl new file mode 100644 index 00000000..a5a92e56 --- /dev/null +++ b/third_party/securec/src/input.inl @@ -0,0 +1,2125 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef INPUT_INL_5D13A042_DC3F_4ED9_A8D1_882811274C27 +#define INPUT_INL_5D13A042_DC3F_4ED9_A8D1_882811274C27 + +#if SECUREC_IN_KERNEL +#include +#ifndef EOF +#define EOF (-1) +#endif +#else +#if !defined(SECUREC_SYSAPI4VXWORKS) && !defined(SECUREC_CTYPE_MACRO_ADAPT) +#include +#ifdef SECUREC_FOR_WCHAR +#include /* for iswspace */ +#endif +#endif +#endif + +#define SECUREC_NUM_WIDTH_SHORT 0 +#define SECUREC_NUM_WIDTH_INT 1 +#define SECUREC_NUM_WIDTH_LONG 2 +#define SECUREC_NUM_WIDTH_LONG_LONG 3 /* also long double */ + +#define SECUREC_BUF_EXT_MUL 2 +#define SECUREC_BUFFERED_BLOK_SIZE 1024 + +#if defined(SECUREC_VXWORKS_PLATFORM) && !defined(va_copy) && !defined(__va_copy) +/* the name is the same as system macro. */ +#define __va_copy(d, s) do { \ + size_t size_of_d = (size_t)sizeof(d); \ + size_t size_of_s = (size_t)sizeof(s); \ + if (size_of_d != size_of_s) { \ + (void)memcpy((d), (s), sizeof(va_list)); \ + } else { \ + (void)memcpy(&(d), &(s), sizeof(va_list)); \ + } \ +} SECUREC_WHILE_ZERO +#endif + + +#define SECUREC_MULTI_BYTE_MAX_LEN 6 +/* Record a flag for each bit */ +#define SECUREC_BRACKET_INDEX(x) ((unsigned int)(x) >> 3) +#define SECUREC_BRACKET_VALUE(x) ((unsigned char)(1 << ((unsigned int)(x) & 7))) + + +/* Compatibility macro name cannot be modifie */ +#ifndef UNALIGNED +#if !(defined(_M_IA64)) && !(defined(_M_AMD64)) +#define UNALIGNED +#else +#define UNALIGNED __unaligned +#endif +#endif + +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) +/* Max 64bit value is 0xffffffffffffffff */ +#define SECUREC_MAX_64BITS_VALUE 18446744073709551615ULL +#define SECUREC_MAX_64BITS_VALUE_DIV_TEN 1844674407370955161ULL +#define SECUREC_MAX_64BITS_VALUE_CUT_LAST_DIGIT 18446744073709551610ULL +#define SECUREC_MIN_64BITS_NEG_VALUE 9223372036854775808ULL +#define SECUREC_MAX_64BITS_POS_VALUE 9223372036854775807ULL +#define SECUREC_MIN_32BITS_NEG_VALUE 2147483648ULL +#define SECUREC_MAX_32BITS_POS_VALUE 2147483647ULL +#define SECUREC_MAX_32BITS_VALUE 4294967295ULL +#define SECUREC_MAX_32BITS_VALUE_INC 4294967296ULL +#define SECUREC_MAX_32BITS_VALUE_DIV_TEN 429496729ULL +#define SECUREC_LONG_BIT_NUM ((unsigned int)(sizeof(long) << 3U)) + +#define SECUREC_LONG_HEX_BEYOND_MAX(number) (((number) >> (SECUREC_LONG_BIT_NUM - 4U)) > 0) +#define SECUREC_LONG_OCTAL_BEYOND_MAX(number) (((number) >> (SECUREC_LONG_BIT_NUM - 3U)) > 0) + +#define SECUREC_QWORD_HEX_BEYOND_MAX(number) (((number) >> (64U - 4U)) > 0) +#define SECUREC_QWORD_OCTAL_BEYOND_MAX(number) (((number) >> (64U - 3U)) > 0) + +#define SECUREC_LP64_BIT_WIDTH 64 +#define SECUREC_LP32_BIT_WIDTH 32 + +#endif + +#define SECUREC_CHAR(x) (x) +#define SECUREC_BRACE '{' /* [ to { */ + +#ifdef SECUREC_FOR_WCHAR +#define SECUREC_SCANF_BRACKET_CONDITION(comChr, ch, table, mask) ((comChr) == SECUREC_BRACE && \ + (table) != NULL && \ + (((table)[((unsigned int)(int)(ch) & SECUREC_CHAR_MASK) >> 3] ^ (mask)) & \ + (1 << ((unsigned int)(int)(ch) & 7)))) +#else +#define SECUREC_SCANF_BRACKET_CONDITION(comChr, ch, table, mask) ((comChr) == SECUREC_BRACE && \ + (((table)[((unsigned char)(ch) & 0xff) >> 3] ^ (mask)) & (1 << ((unsigned char)(ch) & 7)))) +#endif +#define SECUREC_SCANF_STRING_CONDITION(comChr, ch) ((comChr) == SECUREC_CHAR('s') && \ + (!((ch) >= SECUREC_CHAR('\t') && (ch) <= SECUREC_CHAR('\r')) && (ch) != SECUREC_CHAR(' '))) + +/* Do not use |= optimize this code, it will cause compiling warning */ +/* only supports wide characters with a maximum length of two bytes */ +#define SECUREC_BRACKET_SET_BIT(table, ch) do { \ + unsigned int tableIndex = SECUREC_BRACKET_INDEX(((unsigned int)(int)(ch) & SECUREC_CHAR_MASK)); \ + unsigned int tableValue = SECUREC_BRACKET_VALUE(((unsigned int)(int)(ch) & SECUREC_CHAR_MASK)); \ + (table)[tableIndex] = (unsigned char)((table)[tableIndex] | tableValue); \ +} SECUREC_WHILE_ZERO + +#ifdef SECUREC_FOR_WCHAR +/* table size is 32 x 256 */ +#define SECUREC_BRACKET_TABLE_SIZE 8192 +#define SECUREC_EOF WEOF +#define SECUREC_MB_LEN 16 /* max. # bytes in multibyte char ,see MB_LEN_MAX */ +/* int to unsigned int clear e571 */ +#define SECUREC_IS_DIGIT(chr) (!((unsigned int)(int)(chr) & 0xff00) && isdigit(((unsigned int)(int)(chr) & 0x00ff))) +#define SECUREC_IS_XDIGIT(chr) (!((unsigned int)(int)(chr) & 0xff00) && isxdigit(((unsigned int)(int)(chr) & 0x00ff))) +#define SECUREC_IS_SPACE(chr) iswspace((wint_t)(int)(chr)) +#else +#define SECUREC_BRACKET_TABLE_SIZE 32 +#define SECUREC_EOF EOF +#define SECUREC_IS_DIGIT(chr) isdigit((unsigned char)(chr) & 0x00ff) +#define SECUREC_IS_XDIGIT(chr) isxdigit((unsigned char)(chr) & 0x00ff) +#define SECUREC_IS_SPACE(chr) isspace((unsigned char)(chr) & 0x00ff) +#endif + + +static SecInt SecSkipSpaceChar(SecFileStream *stream, int *counter); +static SecInt SecGetChar(SecFileStream *stream, int *counter); +static void SecUnGetChar(SecInt ch, SecFileStream *stream, int *counter); + +typedef struct { +#ifdef SECUREC_FOR_WCHAR + unsigned char *table; /* default NULL */ +#else + unsigned char table[SECUREC_BRACKET_TABLE_SIZE]; /* Array length is large enough in application scenarios */ +#endif + unsigned char mask; /* default 0 */ +} SecBracketTable; + +#ifdef SECUREC_FOR_WCHAR +#define SECUREC_INIT_BRACKET_TABLE { NULL, 0 } +#else +#define SECUREC_INIT_BRACKET_TABLE { { 0 }, 0 } +#endif + +#if SECUREC_ENABLE_SCANF_FLOAT +typedef struct { + size_t floatStrSize; /* tialization must be length of buffer in charater */ + size_t floatStrUsedLen; /* store float string len */ + SecChar buffer[SECUREC_FLOAT_BUFSIZE + 1]; + SecChar *floatStr; /* Initialization must point to buffer */ + SecChar *allocatedFloatStr; /* Initialization must be NULL to store alloced point */ +} SecFloatSpec; +#endif + +typedef struct { + SecUnsignedInt64 number64; + unsigned long number; + int numberWidth; /* 0 = SHORT, 1 = int, > 1 long or L_DOUBLE */ + int isInt64Arg; /* 1 for 64-bit integer, 0 otherwise */ + int negative; /* 0 is positive */ +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + int beyondMax; /* Non-zero means beyond */ +#endif + void *argPtr; /* Variable parameter pointer */ + size_t arrayWidth; /* length of pointer Variable parameter, in charaters */ + int width; /* width number in format */ + int widthSet; /* 0 is not set width in format */ + int comChr; /* Lowercase format conversion characters */ + int oriComChr; /* store number conversion */ + signed char isWChar; /* -1/0 not wchar, 1 for wchar */ + char suppress; /* 0 is not have %* in format */ +} SecScanSpec; + +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) +#define SECUREC_INIT_NUMBER_SPEC { 0, 0, 0, 0, 0, 0, NULL, 0, 0, 0, 0, 0, 0 } +#else +#define SECUREC_INIT_NUMBER_SPEC { 0, 0, 0, 0, 0, 0, NULL, 0, 0, 0, 0, 0 } +#endif + +#ifdef SECUREC_FOR_WCHAR +#define SECUREC_GETC fgetwc +#define SECUREC_UN_GETC ungetwc +#define SECUREC_CHAR_MASK 0xffff +#else +#define SECUREC_GETC fgetc +#define SECUREC_UN_GETC ungetc +#define SECUREC_CHAR_MASK 0xff +#endif + +/* + * Determine if it is a 64-bit pointer function + * return 0 is not ,1 is 64bit pointer + */ +static int SecIs64BitPtr(size_t sizeOfVoidStar) +{ + /* point size is 4 or 8 , Under the 64 bit system, the value not 0 */ + /* to clear e778 */ + if ((sizeOfVoidStar & sizeof(SecInt64)) != 0) { + return 1; + } + return 0; +} + +#if SECUREC_ENABLE_SCANF_FLOAT + +/* + * Convert a floating point string to a floating point number + */ +static void SecAssignFloat(const char *floatStr, int numberWidth, void *argPtr) +{ + char *endPtr = NULL; + double d; +#if SECUREC_SUPPORT_STRTOLD + if (numberWidth == SECUREC_NUM_WIDTH_LONG_LONG) { + long double d2 = strtold(floatStr, &endPtr); + *(long double UNALIGNED *)(argPtr) = d2; + return; + } +#endif + d = strtod(floatStr, &endPtr); + if (numberWidth > SECUREC_NUM_WIDTH_INT) { + *(double UNALIGNED *)(argPtr) = (double)d; + } else { + *(float UNALIGNED *)(argPtr) = (float)d; + } +} + +#ifdef SECUREC_FOR_WCHAR +/* + * Convert a floating point wchar string to a floating point number + * Success ret 0 + */ +static int SecAssignFloatW(const SecFloatSpec *floatSpec, const SecScanSpec *spec) +{ + /* convert float string */ + size_t mbsLen; + size_t tempFloatStrLen = (size_t)(floatSpec->floatStrSize + 1) * sizeof(wchar_t); + char *tempFloatStr = (char *)SECUREC_MALLOC(tempFloatStrLen); + + if (tempFloatStr == NULL) { + return -1; + } + tempFloatStr[0] = '\0'; + SECUREC_MASK_MSVC_CRT_WARNING + mbsLen = wcstombs(tempFloatStr, floatSpec->floatStr, tempFloatStrLen - 1); + SECUREC_END_MASK_MSVC_CRT_WARNING + if (mbsLen != (size_t)-1) { + tempFloatStr[mbsLen] = '\0'; + SecAssignFloat(tempFloatStr, spec->numberWidth, spec->argPtr); + } else { + SECUREC_FREE(tempFloatStr); + return -1; + } + SECUREC_FREE(tempFloatStr); + return 0; +} +#endif +/* + * Splice floating point string + * return 0 OK + */ +static int SecUpdateFloatString(SecChar ch, SecFloatSpec *floatSpec) +{ + floatSpec->floatStr[floatSpec->floatStrUsedLen++] = ch; /* ch must be '0' - '9' */ + if (floatSpec->floatStrUsedLen < floatSpec->floatStrSize) { + return 0; + } + if (floatSpec->allocatedFloatStr == NULL) { + /* add 1 to clear ZERO LENGTH ALLOCATIONS warning */ + size_t oriBufSize = floatSpec->floatStrSize* (SECUREC_BUF_EXT_MUL * sizeof(SecChar)) + 1; + void *tmpPointer = (void *)SECUREC_MALLOC(oriBufSize); + if (tmpPointer == NULL) { + return -1; + } + if (memcpy_s(tmpPointer, oriBufSize, floatSpec->floatStr, floatSpec->floatStrSize * sizeof(SecChar)) != EOK) { + SECUREC_FREE(tmpPointer); /* This is a dead code, just to meet the coding requirements */ + return -1; + } + floatSpec->floatStr = (SecChar *) (tmpPointer); + floatSpec->allocatedFloatStr = (SecChar *) (tmpPointer); /* use to clear free on stack warning */ + floatSpec->floatStrSize *= SECUREC_BUF_EXT_MUL; /* this is OK, oriBufSize plus 1 just clear warning */ + return 0; + } else { + /* LSD 2014.3.6 fix, replace realloc to malloc to avoid heap injection */ + size_t oriBufSize = floatSpec->floatStrSize * sizeof(SecChar); + size_t nextSize = (oriBufSize * SECUREC_BUF_EXT_MUL) + 1; /* add 1 to clear satic check tool warning */ + /* Prevents integer overflow when calculating the wide character length. + * The maximum length of SECUREC_MAX_WIDTH_LEN is enough + */ + if (nextSize <= SECUREC_MAX_WIDTH_LEN) { + void *tmpPointer = (void *)SECUREC_MALLOC(nextSize); + if (tmpPointer == NULL) { + return -1; + } + if (memcpy_s(tmpPointer, nextSize, floatSpec->floatStr, oriBufSize) != EOK) { + SECUREC_FREE(tmpPointer); /* This is a dead code, just to meet the coding requirements */ + return -1; + } + if (memset_s(floatSpec->floatStr, oriBufSize, 0, oriBufSize) != EOK) { + SECUREC_FREE(tmpPointer); /* This is a dead code, just to meet the coding requirements */ + return -1; + } + SECUREC_FREE(floatSpec->floatStr); + + floatSpec->floatStr = (SecChar *) (tmpPointer); + floatSpec->allocatedFloatStr = (SecChar *) (tmpPointer); /* use to clear free on stack warning */ + floatSpec->floatStrSize *= SECUREC_BUF_EXT_MUL; /* this is OK, oriBufSize plus 1 just clear warning */ + return 0; + } + } + return -1; +} +#endif + +#ifndef SECUREC_FOR_WCHAR +/* LSD only multi-bytes string need isleadbyte() function */ +static int SecIsLeadByte(SecInt ch) +{ + unsigned int c = (unsigned int)ch; +#if !(defined(_MSC_VER) || defined(_INC_WCTYPE)) + return (int)(c & 0x80); +#else + return (int)isleadbyte((int)(c & 0xff)); +#endif +} +#endif + +/* + * Parsing whether it is a wide character + */ +static void SecUpdateWcharFlagByType(SecUnsignedChar ch, SecScanSpec *spec) +{ +#if defined(SECUREC_FOR_WCHAR) && (defined(SECUREC_COMPATIBLE_WIN_FORMAT)) + signed char flagForUpperType = -1; + signed char flagForLowerType = 1; +#else + signed char flagForUpperType = 1; + signed char flagForLowerType = -1; +#endif + /* if no l or h flag */ + if (spec->isWChar == 0) { + if ((ch == SECUREC_CHAR('C')) || (ch == SECUREC_CHAR('S'))) { + spec->isWChar = flagForUpperType; + } else { + spec->isWChar = flagForLowerType; + } + } + return; +} +/* + * decode %l %ll + */ +static void SecDecodeScanQualifierL(const SecUnsignedChar **format, SecScanSpec *spec) +{ + const SecUnsignedChar *fmt = *format; + if (*(fmt + 1) == SECUREC_CHAR('l')) { + spec->isInt64Arg = 1; + spec->numberWidth = SECUREC_NUM_WIDTH_LONG_LONG; + ++fmt; + } else { + spec->numberWidth = SECUREC_NUM_WIDTH_LONG; +#if defined(SECUREC_ON_64BITS) && !(defined(SECUREC_COMPATIBLE_WIN_FORMAT)) + /* on window 64 system sizeof long is 32bit */ + spec->isInt64Arg = 1; +#endif + spec->isWChar = 1; + } + *format = fmt; +} + +/* + * decode %I %I43 %I64 %Id %Ii %Io ... + * set finishFlag to 1 finish Flag + */ +static void SecDecodeScanQualifierI(const SecUnsignedChar **format, SecScanSpec *spec, int *finishFlag) +{ + const SecUnsignedChar *fmt = *format; + if ((*(fmt + 1) == SECUREC_CHAR('6')) && + (*(fmt + 2) == SECUREC_CHAR('4'))) { /* offset 2 for I64 */ + spec->isInt64Arg = 1; + *format = *format + 2; /* add 2 to skip I64 point to '4' next loop will inc */ + } else if ((*(fmt + 1) == SECUREC_CHAR('3')) && + (*(fmt + 2) == SECUREC_CHAR('2'))) { /* offset 2 for I32 */ + *format = *format + 2; /* add 2 to skip I32 point to '2' next loop will inc */ + } else if ((*(fmt + 1) == SECUREC_CHAR('d')) || + (*(fmt + 1) == SECUREC_CHAR('i')) || + (*(fmt + 1) == SECUREC_CHAR('o')) || + (*(fmt + 1) == SECUREC_CHAR('x')) || + (*(fmt + 1) == SECUREC_CHAR('X'))) { + spec->isInt64Arg = SecIs64BitPtr(sizeof(void *)); + } else { + /* for %I */ + spec->isInt64Arg = SecIs64BitPtr(sizeof(void *)); + *finishFlag = 1; + } +} + +static int SecDecodeScanWidth(const SecUnsignedChar **format, SecScanSpec *spec) +{ + const SecUnsignedChar *fmt = *format; + while (SECUREC_IS_DIGIT(*fmt)) { + spec->widthSet = 1; + if (SECUREC_MUL_TEN_ADD_BEYOND_MAX(spec->width)) { + return -1; + } + spec->width = (int)SECUREC_MUL_TEN((unsigned int)spec->width) + (unsigned char)(*fmt - SECUREC_CHAR('0')); + ++fmt; + } + *format = fmt; + return 0; +} + +/* + * init default flags for each format + */ +static void SecSetDefaultScanSpec(SecScanSpec *spec) +{ + spec->number64 = 0; + spec->number = 0; + spec->numberWidth = SECUREC_NUM_WIDTH_INT; /* 0 = SHORT, 1 = int, > 1 long or L_DOUBLE */ + spec->isInt64Arg = 0; /* 1 for 64-bit integer, 0 otherwise */ + spec->negative = 0; +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + spec->beyondMax = 0; +#endif + spec->argPtr = NULL; + spec->arrayWidth = 0; + spec->width = 0; + spec->widthSet = 0; + spec->comChr = 0; + spec->isWChar = 0; + spec->suppress = 0; +} + +/* + * decode qualifier %I %L %h ... + * set finishFlag to 1 finish Flag + */ +static void SecDecodeScanQualifier(const SecUnsignedChar **format, SecScanSpec *spec, int *finishFlag) +{ + switch ((int)(unsigned char)(**(format))) { + case SECUREC_CHAR('F'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('N'): + break; + case SECUREC_CHAR('h'): + --spec->numberWidth; /* h for SHORT , hh for CHAR */ + spec->isWChar = -1; + break; +#ifdef SECUREC_COMPATIBLE_LINUX_FORMAT + case SECUREC_CHAR('j'): + spec->numberWidth = SECUREC_NUM_WIDTH_LONG_LONG; /* intmax_t or uintmax_t */ + spec->isInt64Arg = 1; + break; + case SECUREC_CHAR('t'): /* fall-through */ /* FALLTHRU */ +#endif + case SECUREC_CHAR('z'): +#ifdef SECUREC_ON_64BITS + spec->numberWidth = SECUREC_NUM_WIDTH_LONG_LONG; + spec->isInt64Arg = 1; +#else + spec->numberWidth = SECUREC_NUM_WIDTH_LONG; +#endif + break; + case SECUREC_CHAR('L'): /* long double */ /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('q'): + spec->numberWidth = SECUREC_NUM_WIDTH_LONG_LONG; + spec->isInt64Arg = 1; + break; + case SECUREC_CHAR('l'): + SecDecodeScanQualifierL(format, spec); + break; + case SECUREC_CHAR('w'): + spec->isWChar = 1; + break; + case SECUREC_CHAR('*'): + spec->suppress = 1; + break; + case SECUREC_CHAR('I'): + SecDecodeScanQualifierI(format, spec, finishFlag); + break; + default: + *finishFlag = 1; + break; + } + +} +/* + * decode width and qualifier in format + */ +static int SecDecodeScanFlag(const SecUnsignedChar **format, SecScanSpec *spec) +{ + const SecUnsignedChar *fmt = *format; + int finishFlag = 0; + + do { + ++fmt; /* first skip % , next seek fmt */ + /* may %*6d , so put it inside the loop */ + if (SecDecodeScanWidth(&fmt, spec) != 0) { + return -1; + } + SecDecodeScanQualifier(&fmt, spec, &finishFlag); + } while (finishFlag == 0); + *format = fmt; + return 0; +} + + + + + +/* + * Judging whether a zeroing buffer is needed according to different formats + */ +static int SecDecodeClearFormat(const SecUnsignedChar *format, int *comChr) +{ + const SecUnsignedChar *fmt = format; + /* to lowercase */ + int ch = (unsigned char)(*fmt) | (SECUREC_CHAR('a') - SECUREC_CHAR('A')); + if (!(ch == SECUREC_CHAR('c') || ch == SECUREC_CHAR('s') || ch == SECUREC_BRACE)) { + return -1; /* first argument is not a string type */ + } + if (ch == SECUREC_BRACE) { +#if !(defined(SECUREC_COMPATIBLE_WIN_FORMAT)) + if (*fmt == SECUREC_CHAR('{')) { + return -1; + } +#endif + ++fmt; + if (*fmt == SECUREC_CHAR('^')) { + ++fmt; + } + if (*fmt == SECUREC_CHAR(']')) { + ++fmt; + } + while ((*fmt != SECUREC_CHAR('\0')) && (*fmt != SECUREC_CHAR(']'))) { + ++fmt; + } + if (*fmt == SECUREC_CHAR('\0')) { + return -1; /* trunc'd format string */ + } + } + *comChr = ch; + return 0; +} + +/* + * add L'\0' for wchar string , add '\0' for char string + */ +static void SecAddEndingZero(void *ptr, const SecScanSpec *spec) +{ + *(char *)ptr = '\0'; + (void)spec; /* clear not use */ +#if SECUREC_HAVE_WCHART + if (spec->isWChar > 0) { + *(wchar_t UNALIGNED *)ptr = L'\0'; + } +#endif +} + +#ifdef SECUREC_FOR_WCHAR +/* + * Clean up the first %s %c buffer to zero for wchar version + */ +void SecClearDestBufW(const wchar_t *buffer, const wchar_t *format, va_list argList) +#else +/* + * Clean up the first %s %c buffer to zero for char version + */ +void SecClearDestBuf(const char *buffer, const char *format, va_list argList) +#endif +{ + + va_list argListSave; /* backup for argList value, this variable don't need initialized */ + SecScanSpec spec; + int comChr = 0; + const SecUnsignedChar *fmt = (const SecUnsignedChar *)format; + if (fmt == NULL) { + return; + } + + /* find first % */ + while (*fmt != SECUREC_CHAR('\0') && *fmt != SECUREC_CHAR('%')) { + ++fmt; + } + if (*fmt == SECUREC_CHAR('\0')) { + return; + } + + SecSetDefaultScanSpec(&spec); + if (SecDecodeScanFlag(&fmt, &spec) != 0) { + return; + } + + /* update wchar flag for %S %C */ + SecUpdateWcharFlagByType(*fmt, &spec); + + if (spec.suppress != 0 || SecDecodeClearFormat(fmt, &comChr) != 0) { + return; + } + + if ((buffer != NULL) && (*buffer != SECUREC_CHAR('\0')) && (comChr != SECUREC_CHAR('s'))) { + /* when buffer not empty just clear %s. + * example call sscanf by argment of (" \n", "%s", s, sizeof(s)) + */ + return; + } + (void)memset(&argListSave, 0, sizeof(va_list)); /* to clear e530 argListSave not initialized */ +#if defined(va_copy) + va_copy(argListSave, argList); +#elif defined(__va_copy) /* for vxworks */ + __va_copy(argListSave, argList); +#else + argListSave = argList; +#endif + do { + void *argPtr = (void *)va_arg(argListSave, void *); + /* Get the next argument - size of the array in characters */ + size_t arrayWidth = ((size_t)(va_arg(argListSave, size_t))) & 0xFFFFFFFFUL; + va_end(argListSave); + /* to clear e438 last value assigned not used , the compiler will optimize this code */ + (void)argListSave; + /* There is no need to judge the upper limit */ + if (arrayWidth == 0 || argPtr == NULL) { + return; + } + + /* clear one char */ + SecAddEndingZero(argPtr, &spec); + } SECUREC_WHILE_ZERO; + return; + +} + +/* + * Assign number to output buffer + */ +static void SecAssignNumber(const SecScanSpec *spec) +{ + void *argPtr = spec->argPtr; + if (spec->isInt64Arg != 0) { +#if defined(SECUREC_VXWORKS_PLATFORM) +#if defined(SECUREC_VXWORKS_PLATFORM_COMP) + *(SecInt64 UNALIGNED *)argPtr = (SecInt64)(spec->number64); +#else + /* take number64 as unsigned number unsigned to int clear Compile warning */ + *(SecInt64 UNALIGNED *)argPtr = *(SecUnsignedInt64 *)(&(spec->number64)); +#endif +#else + /* take number64 as unsigned number */ + *(SecInt64 UNALIGNED *)argPtr = (SecInt64)(spec->number64); +#endif + return; + } + if (spec->numberWidth > SECUREC_NUM_WIDTH_INT) { + /* take number as unsigned number */ + *(long UNALIGNED *)argPtr = (long)(spec->number); + } else if (spec->numberWidth == SECUREC_NUM_WIDTH_INT) { + *(int UNALIGNED *)argPtr = (int)(spec->number); + } else if (spec->numberWidth == SECUREC_NUM_WIDTH_SHORT) { + /* take number as unsigned number */ + *(short UNALIGNED *)argPtr = (short)(spec->number); + } else { /* < 0 for hh format modifier */ + /* take number as unsigned number */ + *(char UNALIGNED *)argPtr = (char)(spec->number); + } +} + +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) +/* + * Judge the long bit width + */ +static int SecIsLongBitEqual(int bitNum) +{ + return (unsigned int)bitNum == SECUREC_LONG_BIT_NUM; +} +#endif +/* + * Convert hexadecimal characters to decimal value + */ +static int SecHexValueOfChar(SecInt ch) +{ + /* use isdigt Causing tool false alarms */ + return (int)((ch >= '0' && ch <= '9') ? ((unsigned char)ch - '0') : + ((((unsigned char)ch | (unsigned char)('a' - 'A')) - ('a')) + 10)); /* Adding 10 is to hex value */ +} + + + +/* + * Parse decimal character to integer for 32bit . + */ +static void SecDecodeNumberDecimal(SecInt ch, SecScanSpec *spec) +{ +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + unsigned long decimalEdge = SECUREC_MAX_32BITS_VALUE_DIV_TEN; +#ifdef SECUREC_ON_64BITS + if (SecIsLongBitEqual(SECUREC_LP64_BIT_WIDTH)) { + decimalEdge = (unsigned long)SECUREC_MAX_64BITS_VALUE_DIV_TEN; + } +#else + if (SecIsLongBitEqual(SECUREC_LP32_BIT_WIDTH)) { + decimalEdge = SECUREC_MAX_32BITS_VALUE_DIV_TEN; + } +#endif + if (spec->number > decimalEdge) { + spec->beyondMax = 1; + } +#endif + spec->number = SECUREC_MUL_TEN(spec->number); +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + if (spec->number == SECUREC_MUL_TEN(decimalEdge)) { + SecUnsignedInt64 number64As = (unsigned long)SECUREC_MAX_64BITS_VALUE - spec->number; + if (number64As < (SecUnsignedInt64)((SecUnsignedInt)ch - SECUREC_CHAR('0'))) { + spec->beyondMax = 1; + } + } +#endif + spec->number += (unsigned long)((SecUnsignedInt)ch - SECUREC_CHAR('0')); + +} + + +/* + * Parse Hex character to integer for 32bit . + */ +static void SecDecodeNumberHex(SecInt ch, SecScanSpec *spec) +{ +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + if (SECUREC_LONG_HEX_BEYOND_MAX(spec->number)) { + spec->beyondMax = 1; + } +#endif + spec->number = SECUREC_MUL_SIXTEEN(spec->number); + spec->number += (unsigned long)(unsigned int)SecHexValueOfChar(ch); +} + + +/* + * Parse Octal character to integer for 32bit . + */ +static void SecDecodeNumberOctal(SecInt ch, SecScanSpec *spec) +{ +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + if (SECUREC_LONG_OCTAL_BEYOND_MAX(spec->number)) { + spec->beyondMax = 1; + } +#endif + spec->number = SECUREC_MUL_EIGHT(spec->number); + spec->number += (unsigned long)((SecUnsignedInt)ch - SECUREC_CHAR('0')); +} + + +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) +/* Compatible with integer negative values other than int */ +static void SecFinishNumberNegativeOther(int comChr, int numberWidth, SecScanSpec *spec) +{ + if ((comChr == SECUREC_CHAR('d')) || (comChr == SECUREC_CHAR('i'))) { + if (spec->number > (unsigned long)(1ULL << (SECUREC_LONG_BIT_NUM - 1))) { + spec->number = (unsigned long)(1ULL << (SECUREC_LONG_BIT_NUM - 1)); + } else { + spec->number = (unsigned long)(-(long)spec->number); + } + if (spec->beyondMax != 0) { + if (numberWidth < SECUREC_NUM_WIDTH_INT) { + spec->number = 0; + } else if (numberWidth == SECUREC_NUM_WIDTH_LONG) { + spec->number = ((unsigned long)(1UL << (SECUREC_LONG_BIT_NUM - 1))); + } + } + } else { /* o, u, x, X, p */ + spec->number = (unsigned long)(-(long)spec->number); + if (spec->beyondMax != 0) { + spec->number |= (unsigned long)SECUREC_MAX_64BITS_VALUE; + } + } +} +/* Compatible processing of integer negative numbers */ +static void SecFinishNumberNegativeInt(int comChr, SecScanSpec *spec) +{ + if ((comChr == SECUREC_CHAR('d')) || (comChr == SECUREC_CHAR('i'))) { +#ifdef SECUREC_ON_64BITS + if (SecIsLongBitEqual(SECUREC_LP64_BIT_WIDTH)) { + if ((spec->number > SECUREC_MIN_64BITS_NEG_VALUE)) { + spec->number = 0; + } else { + spec->number = (unsigned int)(-(int)spec->number); + } + } +#else + if (SecIsLongBitEqual(SECUREC_LP32_BIT_WIDTH)) { + if ((spec->number > SECUREC_MIN_32BITS_NEG_VALUE)) { + spec->number = SECUREC_MIN_32BITS_NEG_VALUE; + } else { + spec->number = (unsigned int)(-(int)spec->number); + } + } +#endif + if (spec->beyondMax != 0) { +#ifdef SECUREC_ON_64BITS + if (SecIsLongBitEqual(SECUREC_LP64_BIT_WIDTH)) { + spec->number = 0; + } +#else + if (SecIsLongBitEqual(SECUREC_LP32_BIT_WIDTH)) { + spec->number = SECUREC_MIN_32BITS_NEG_VALUE; + } +#endif + } + } else { /* o, u, x, X ,p */ +#ifdef SECUREC_ON_64BITS + if (spec->number > SECUREC_MAX_32BITS_VALUE_INC) { + spec->number = SECUREC_MAX_32BITS_VALUE; + } else { + spec->number = (unsigned int)(-(int)spec->number); + } +#else + spec->number = (unsigned int)(-(int)spec->number); +#endif + if (spec->beyondMax != 0) { + spec->number |= (unsigned long)SECUREC_MAX_64BITS_VALUE; + } + } +} + +/* Compatible with integer positive values other than int */ +static void SecFinishNumberPositiveOther(int comChr, int numberWidth, SecScanSpec *spec) +{ + if (comChr == SECUREC_CHAR('d') || comChr == SECUREC_CHAR('i')) { + if (spec->number > ((unsigned long)(1UL << (SECUREC_LONG_BIT_NUM - 1)) - 1)) { + spec->number = ((unsigned long)(1UL << (SECUREC_LONG_BIT_NUM - 1)) - 1); + } + if ((spec->beyondMax != 0 && numberWidth < SECUREC_NUM_WIDTH_INT)) { + spec->number |= (unsigned long)SECUREC_MAX_64BITS_VALUE; + } + if (spec->beyondMax != 0 && numberWidth == SECUREC_NUM_WIDTH_LONG) { + spec->number = ((unsigned long)(1UL << (SECUREC_LONG_BIT_NUM - 1)) - 1); + } + } else { + if (spec->beyondMax != 0) { + spec->number |= (unsigned long)SECUREC_MAX_64BITS_VALUE; + } + } +} + +/* Compatible processing of integer positive numbers */ +static void SecFinishNumberPositiveInt(int comChr, SecScanSpec *spec) +{ + if ((comChr == SECUREC_CHAR('d')) || (comChr == SECUREC_CHAR('i'))) { +#ifdef SECUREC_ON_64BITS + if (SecIsLongBitEqual(SECUREC_LP64_BIT_WIDTH)) { + if (spec->number > SECUREC_MAX_64BITS_POS_VALUE) { + spec->number |= (unsigned long)SECUREC_MAX_64BITS_VALUE; + } + } + if (spec->beyondMax != 0 && SecIsLongBitEqual(SECUREC_LP64_BIT_WIDTH)) { + spec->number |= (unsigned long)SECUREC_MAX_64BITS_VALUE; + } +#else + if (SecIsLongBitEqual(SECUREC_LP32_BIT_WIDTH)) { + if (spec->number > SECUREC_MAX_32BITS_POS_VALUE) { + spec->number = SECUREC_MAX_32BITS_POS_VALUE; + } + } + if (spec->beyondMax != 0 && SecIsLongBitEqual(SECUREC_LP32_BIT_WIDTH)) { + spec->number = SECUREC_MAX_32BITS_POS_VALUE; + } +#endif + } else { /* o,u,x,X,p */ + if (spec->beyondMax != 0) { + spec->number = SECUREC_MAX_32BITS_VALUE; + } + } +} + +#endif + + +/* + * Parse decimal character to integer for 64bit . + */ +static void SecDecodeNumber64Decimal(SecInt ch, SecScanSpec *spec) +{ +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + if (spec->number64 > SECUREC_MAX_64BITS_VALUE_DIV_TEN) { + spec->beyondMax = 1; + } +#endif + spec->number64 = SECUREC_MUL_TEN(spec->number64); +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + if (spec->number64 == SECUREC_MAX_64BITS_VALUE_CUT_LAST_DIGIT) { + SecUnsignedInt64 number64As = (SecUnsignedInt64)SECUREC_MAX_64BITS_VALUE - spec->number64; + if (number64As < (SecUnsignedInt64)((SecUnsignedInt)ch - SECUREC_CHAR('0'))) { + spec->beyondMax = 1; + } + } +#endif + spec->number64 += (SecUnsignedInt64)((SecUnsignedInt)ch - SECUREC_CHAR('0')); +} + +/* + * Parse Hex character to integer for 64bit . + */ +static void SecDecodeNumber64Hex(SecInt ch, SecScanSpec *spec) +{ +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + if (SECUREC_QWORD_HEX_BEYOND_MAX(spec->number64)) { + spec->beyondMax = 1; + } +#endif + spec->number64 = SECUREC_MUL_SIXTEEN(spec->number64); + spec->number64 += (SecUnsignedInt64)(unsigned int)SecHexValueOfChar(ch); + +} + +/* + * Parse Octal character to integer for 64bit . + */ +static void SecDecodeNumber64Octal(SecInt ch, SecScanSpec *spec) +{ +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + if (SECUREC_QWORD_OCTAL_BEYOND_MAX(spec->number64)) { + spec->beyondMax = 1; + } +#endif + spec->number64 = SECUREC_MUL_EIGHT(spec->number64); + spec->number64 += (SecUnsignedInt64)((SecUnsignedInt)ch - SECUREC_CHAR('0')); +} + +#define SECUREC_DECODE_NUMBER_FUNC_NUM 2 +/* Function name cannot add address symbol, causing 546 alarm */ +static void (*g_secDecodeNumberHex[SECUREC_DECODE_NUMBER_FUNC_NUM])(SecInt ch, SecScanSpec *spec) = \ + { SecDecodeNumberHex, SecDecodeNumber64Hex }; +static void (*g_secDecodeNumberOctal[SECUREC_DECODE_NUMBER_FUNC_NUM])(SecInt ch, SecScanSpec *spec) = \ + { SecDecodeNumberOctal, SecDecodeNumber64Octal }; +static void (*g_secDecodeNumberDecimal[SECUREC_DECODE_NUMBER_FUNC_NUM])(SecInt ch, SecScanSpec *spec) = \ + { SecDecodeNumberDecimal, SecDecodeNumber64Decimal }; + +/* + * Parse 64-bit integer formatted input, return 0 when ch is a number. + */ +static int SecDecodeNumber(SecInt ch, SecScanSpec *spec) +{ + if (spec->comChr == SECUREC_CHAR('x') || spec->comChr == SECUREC_CHAR('p')) { + if (SECUREC_IS_XDIGIT(ch)) { + (*g_secDecodeNumberHex[spec->isInt64Arg])(ch, spec); + } else { + return -1; + } + return 0; + } + if (!(SECUREC_IS_DIGIT(ch))) { + return -1; + } + if (spec->comChr == SECUREC_CHAR('o')) { + if (ch < SECUREC_CHAR('8')) { + (*g_secDecodeNumberOctal[spec->isInt64Arg])(ch, spec); + } else { + return -1; + } + } else { /* comChr is 'd' */ + (*g_secDecodeNumberDecimal[spec->isInt64Arg])(ch, spec); + } + return 0; +} + + +/* + * Complete the final 32-bit integer formatted input + */ +static void SecFinishNumber(SecScanSpec *spec) +{ +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + if (spec->negative != 0) { + if (spec->numberWidth == SECUREC_NUM_WIDTH_INT) { + SecFinishNumberNegativeInt(spec->oriComChr, spec); + } else { + SecFinishNumberNegativeOther(spec->oriComChr, spec->numberWidth, spec); + } + } else { + if (spec->numberWidth == SECUREC_NUM_WIDTH_INT) { + SecFinishNumberPositiveInt(spec->oriComChr, spec); + } else { + SecFinishNumberPositiveOther(spec->oriComChr, spec->numberWidth, spec); + } + } +#else + if (spec->negative != 0) { +#if defined(__hpux) + if (spec->oriComChr != SECUREC_CHAR('p')) { + spec->number = (unsigned long)(-(long)spec->number); + } +#else + spec->number = (unsigned long)(-(long)spec->number); +#endif + } +#endif + return; +} + +/* + * Complete the final 64-bit integer formatted input + */ +static void SecFinishNumber64(SecScanSpec *spec) +{ +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && !(defined(SECUREC_ON_UNIX))) + if (spec->negative != 0) { + if (spec->oriComChr == (SECUREC_CHAR('d')) || (spec->oriComChr == SECUREC_CHAR('i'))) { + if (spec->number64 > SECUREC_MIN_64BITS_NEG_VALUE) { + spec->number64 = SECUREC_MIN_64BITS_NEG_VALUE; + } else { + spec->number64 = (SecUnsignedInt64)(-(SecInt64)spec->number64); + } + if (spec->beyondMax != 0) { + spec->number64 = SECUREC_MIN_64BITS_NEG_VALUE; + } + } else { /* o, u, x, X, p */ + spec->number64 = (SecUnsignedInt64)(-(SecInt64)spec->number64); + if (spec->beyondMax != 0) { + spec->number64 = SECUREC_MAX_64BITS_VALUE; + } + } + } else { + if ((spec->oriComChr == SECUREC_CHAR('d')) || (spec->oriComChr == SECUREC_CHAR('i'))) { + if (spec->number64 > SECUREC_MAX_64BITS_POS_VALUE) { + spec->number64 = SECUREC_MAX_64BITS_POS_VALUE; + } + if (spec->beyondMax != 0) { + spec->number64 = SECUREC_MAX_64BITS_POS_VALUE; + } + } else { + if (spec->beyondMax != 0) { + spec->number64 = SECUREC_MAX_64BITS_VALUE; + } + } + } +#else + if (spec->negative != 0) { +#if defined(__hpux) + if (spec->oriComChr != SECUREC_CHAR('p')) { + spec->number64 = (SecUnsignedInt64)(-(SecInt64)spec->number64); + } +#else + spec->number64 = (SecUnsignedInt64)(-(SecInt64)spec->number64); +#endif + } +#endif + return; +} +static void (*g_secFinishNumber[SECUREC_DECODE_NUMBER_FUNC_NUM])(SecScanSpec *spec) = \ + { SecFinishNumber, SecFinishNumber64 }; + +#if SECUREC_ENABLE_SCANF_FILE + +/* + * Adjust the pointer position of the file stream + */ +static void SecSeekStream(SecFileStream *stream) +{ + if ((stream->count == 0) && feof(stream->pf)) { + /* file pointer at the end of file, don't need to seek back */ + stream->base[0] = '\0'; + return; + } + /* LSD seek to original position, bug fix 2014 1 21 */ + if (fseek(stream->pf, stream->oriFilePos, SEEK_SET)) { + /* seek failed, ignore it */ + stream->oriFilePos = 0; + return; + } + + if (stream->fileRealRead > 0) { /* LSD bug fix. when file reach to EOF, don't seek back */ +#if (defined(SECUREC_COMPATIBLE_WIN_FORMAT)) + int loops; + for (loops = 0; loops < (stream->fileRealRead / SECUREC_BUFFERED_BLOK_SIZE); ++loops) { + if (fread(stream->base, (size_t)1, (size_t)SECUREC_BUFFERED_BLOK_SIZE, + stream->pf) != SECUREC_BUFFERED_BLOK_SIZE) { + break; + } + } + if ((stream->fileRealRead % SECUREC_BUFFERED_BLOK_SIZE) != 0) { + size_t ret = fread(stream->base, (size_t)((unsigned int)stream->fileRealRead % SECUREC_BUFFERED_BLOK_SIZE), + (size_t)1, stream->pf); + if ((ret == 1 || ret == 0) && (ftell(stream->pf) < stream->oriFilePos + stream->fileRealRead)) { + (void)fseek(stream->pf, stream->oriFilePos + stream->fileRealRead, SEEK_SET); + } + } + +#else + /* in linux like system */ + if (fseek(stream->pf, stream->oriFilePos + stream->fileRealRead, SEEK_SET)) { + /* seek failed, ignore it */ + stream->oriFilePos = 0; + } +#endif + } + + return; +} + +/* + * Adjust the pointer position of the file stream and free memory + */ +static void SecAdjustStream(SecFileStream *stream) +{ + if (stream != NULL && (stream->flag & SECUREC_FILE_STREAM_FLAG) && stream->base != NULL) { + SecSeekStream(stream); + SECUREC_FREE(stream->base); + stream->base = NULL; + } + return; +} +#endif + +static void SecSkipSpaceFormat(const SecUnsignedChar **format) +{ + const SecUnsignedChar *fmt = *format; + while (SECUREC_IS_SPACE(*fmt)) { + ++fmt; + } + *format = fmt; +} +#ifndef SECUREC_FOR_WCHAR +/* + * Handling multi-character characters + */ +static int SecDecodeLeadByte(SecInt ch, const SecUnsignedChar **format, SecFileStream *stream, int *counter) +{ +#if SECUREC_HAVE_MBTOWC + char temp[SECUREC_MULTI_BYTE_MAX_LEN]; + const SecUnsignedChar *fmt = *format; + wchar_t tempWChar = L'\0'; + int ch2 = SecGetChar(stream, counter); + if (*fmt == SECUREC_CHAR('\0') || (int)(*fmt) != (ch2)) { + /* LSD in console mode, ungetc twice may cause problem */ + SecUnGetChar(ch2, stream, counter); + SecUnGetChar(ch, stream, counter); + return -1; + } + ++fmt; + if (MB_CUR_MAX >= SECUREC_UTF8_BOM_HEADER_SIZE && + (((unsigned char)ch & SECUREC_UTF8_LEAD_1ST) == SECUREC_UTF8_LEAD_1ST) && + (((unsigned char)ch2 & SECUREC_UTF8_LEAD_2ND) == SECUREC_UTF8_LEAD_2ND)) { + /* this char is very likely to be a UTF-8 char */ + int ch3 = SecGetChar(stream, counter); + temp[0] = (char)ch; + temp[1] = (char)ch2; /* 1 index of second character */ + temp[2] = (char)ch3; /* 2 index of third character */ + temp[3] = '\0'; /* 3 of string terminator position */ + + if (mbtowc(&tempWChar, temp, sizeof(temp)) > 0) { + /* succeed */ + if (*fmt == SECUREC_CHAR('\0') || (int)(*fmt) != (int)ch3) { + SecUnGetChar(ch3, stream, counter); + return -1; + } + ++fmt; + *counter = *counter - 1; + } else { + SecUnGetChar(ch3, stream, counter); + } + } + *counter = *counter - 1; /* only count as one character read */ + *format = fmt; + return 0; +#else + SecUnGetChar(ch, stream, counter); + (void)format; + return -1; +#endif +} +#endif + + + +/* + * Resolving sequence of characters from %[ format + */ +static int SecSetupBracketTable(const SecUnsignedChar **format, SecBracketTable *bracketTable) +{ + const SecUnsignedChar *fmt = *format; + SecUnsignedChar prevChar = 0; + SecUnsignedChar expCh; + SecUnsignedChar last = 0; +#if !(defined(SECUREC_COMPATIBLE_WIN_FORMAT)) + if (*fmt == SECUREC_CHAR('{')) { + return -1; + } +#endif + /* for building "table" data */ + ++fmt; /* skip [ */ + bracketTable->mask = 0; + if (*fmt == SECUREC_CHAR('^')) { + ++fmt; + bracketTable->mask = (unsigned char)0xff; + } + if (*fmt == SECUREC_CHAR(']')) { + prevChar = SECUREC_CHAR(']'); + ++fmt; + SECUREC_BRACKET_SET_BIT(bracketTable->table, SECUREC_CHAR(']')); + } + while (*fmt != SECUREC_CHAR('\0') && *fmt != SECUREC_CHAR(']')) { + expCh = *fmt++; + if (expCh != SECUREC_CHAR('-') || prevChar == 0 || *fmt == SECUREC_CHAR(']')) { + /* normal character */ + prevChar = expCh; + SECUREC_BRACKET_SET_BIT(bracketTable->table, expCh); + } else { + /* for %[a-z] */ + expCh = *fmt++; /* get end of range */ + if (prevChar < expCh) { /* %[a-z] */ + last = expCh; + } else { + prevChar = expCh; +#if (defined(SECUREC_COMPATIBLE_WIN_FORMAT)) + /* %[z-a] */ + last = prevChar; + +#else + SECUREC_BRACKET_SET_BIT(bracketTable->table, SECUREC_CHAR('-')); + SECUREC_BRACKET_SET_BIT(bracketTable->table, expCh); + continue; +#endif + } + /* format %[a-\xff] last is 0xFF, condition (rnch <= last) cause dead loop */ + for (expCh = prevChar; expCh < last; ++expCh) { + SECUREC_BRACKET_SET_BIT(bracketTable->table, expCh); + } + SECUREC_BRACKET_SET_BIT(bracketTable->table, last); + prevChar = 0; + } + } + *format = fmt; + return 0; +} + + +#ifdef SECUREC_FOR_WCHAR +static int SecInputForWchar(SecInt ch, SecScanSpec *spec) +{ + void *endPtr = spec->argPtr; + if (spec->isWChar > 0) { + *(wchar_t UNALIGNED *)endPtr = (wchar_t)ch; + endPtr = (wchar_t *)endPtr + 1; + --spec->arrayWidth; + } else { +#if SECUREC_HAVE_WCTOMB + int temp; + char tmpBuf[SECUREC_MB_LEN + 1]; + SECUREC_MASK_MSVC_CRT_WARNING temp = wctomb(tmpBuf, (wchar_t)ch); + SECUREC_END_MASK_MSVC_CRT_WARNING + if (temp <= 0 || ((size_t)(unsigned int)temp) > sizeof(tmpBuf)) { + /* if wctomb error, then ignore character */ + return 0; + } + if (((size_t)(unsigned int)temp) > spec->arrayWidth) { + return -1; + } + if (memcpy_s(endPtr, spec->arrayWidth, tmpBuf, (size_t)(unsigned int)temp) != EOK) { + return -1; + } + endPtr = (char *)endPtr + temp; + spec->arrayWidth -= (size_t)(unsigned int)temp; +#else + return -1; +#endif + } + spec->argPtr = endPtr; + return 0; +} +#endif + + +#ifndef SECUREC_FOR_WCHAR +static int SecInputForChar(SecInt ch, SecScanSpec *spec, SecFileStream *stream, int *charCount) +{ + void *endPtr = spec->argPtr; + if (spec->isWChar > 0) { + wchar_t tempWChar = L'?'; /* set default char as ? */ +#if SECUREC_HAVE_MBTOWC + char temp[SECUREC_MULTI_BYTE_MAX_LEN + 1]; + temp[0] = (char)ch; + temp[1] = '\0'; +#if defined(SECUREC_COMPATIBLE_WIN_FORMAT) + if (SecIsLeadByte(ch)) { + temp[1] = (char)SecGetChar(stream, charCount); + temp[2] = '\0'; /* 2 of string terminator position */ + } + if (mbtowc(&tempWChar, temp, sizeof(temp)) <= 0) { + /* no string termination error for tool */ + tempWChar = L'?'; + } +#else + if (SecIsLeadByte(ch)) { + int convRes = 0; + int di = 1; + /* in Linux like system, the string is encoded in UTF-8 */ + while (convRes <= 0 && di < (int)MB_CUR_MAX && di < SECUREC_MULTI_BYTE_MAX_LEN) { + temp[di++] = (char)SecGetChar(stream, charCount); + temp[di] = '\0'; + convRes = mbtowc(&tempWChar, temp, sizeof(temp)); + } + if (convRes <= 0) { + tempWChar = L'?'; + } + } else { + if (mbtowc(&tempWChar, temp, sizeof(temp)) <= 0) { + /* no string termination error for tool */ + tempWChar = L'?'; + } + } +#endif +#endif /* SECUREC_HAVE_MBTOWC */ + *(wchar_t UNALIGNED *)endPtr = tempWChar; + /* just copy L'?' if mbtowc fails, errno is set by mbtowc */ + endPtr = (wchar_t *)endPtr + 1; + --spec->arrayWidth; + (void)charCount; + (void)stream; + } else { + *(char *)endPtr = (char)ch; + endPtr = (char *)endPtr + 1; + --spec->arrayWidth; + } + spec->argPtr = endPtr; + return 0; +} +#endif + + +#if SECUREC_ENABLE_SCANF_FLOAT + +/* no not use localeconv()->decimal_pointif onlay support '.' */ +#define SECURE_IS_FLOAT_DECIMAL(ch) ((ch) == SECUREC_CHAR('.')) +/* + * init SecFloatSpec befor parse format + */ +static void SecInitFloatSpec(SecFloatSpec *floatSpec) +{ + floatSpec->floatStr = floatSpec->buffer; + floatSpec->allocatedFloatStr = NULL; + floatSpec->floatStrSize = sizeof(floatSpec->buffer) / sizeof(floatSpec->buffer[0]); + floatSpec->floatStr = floatSpec->buffer; + floatSpec->floatStrUsedLen = 0; +} + +static void SecClearFloatSpec(SecFloatSpec *floatSpec, int *doneCount) +{ + /* LSD 2014.3.6 add, clear the stack data */ + if (memset_s(floatSpec->buffer, sizeof(floatSpec->buffer), 0, + sizeof(floatSpec->buffer)) != EOK) { + *doneCount = 0; /* This is a dead code, just to meet the coding requirements */ + } + if (floatSpec->allocatedFloatStr != NULL) { + /* pFloatStr can be alloced in SecUpdateFloatString function, clear and free it */ + if (memset_s(floatSpec->allocatedFloatStr, floatSpec->floatStrSize * sizeof(SecChar), 0, + floatSpec->floatStrSize * sizeof(SecChar)) != EOK) { + *doneCount = 0; /* This is a dead code, just to meet the coding requirements */ + } + SECUREC_FREE(floatSpec->allocatedFloatStr); + floatSpec->allocatedFloatStr = NULL; + floatSpec->floatStr = NULL; + } +} + + +/* + * scan value of exponent. + * return 0 OK + */ +static int SecInputFloatE(SecFileStream *stream, SecScanSpec *spec, SecFloatSpec *floatSpec, int *charCount) +{ + SecInt ch = SecGetChar(stream, charCount); + if (ch == SECUREC_CHAR('+') || ch == SECUREC_CHAR('-')) { + if (ch == SECUREC_CHAR('-') && SecUpdateFloatString((SecChar)'-', floatSpec) != 0) { + return -1; + } + if (spec->width != 0) { + ch = SecGetChar(stream, charCount); + --spec->width; + } + } + + while (SECUREC_IS_DIGIT(ch) && spec->width-- != 0) { + if (SecUpdateFloatString((SecChar)ch, floatSpec) != 0) { + return -1; + } + ch = SecGetChar(stream, charCount); + } + return 0; +} + +/* + * scan %f. + * return 0 OK + */ +static int SecInputFloat(SecFileStream *stream, SecScanSpec *spec, SecFloatSpec *floatSpec, int *charCount) +{ + int started = -1; + SecInt ch = SecGetChar(stream, charCount); + + floatSpec->floatStrUsedLen = 0; + if (ch == SECUREC_CHAR('-')) { + floatSpec->floatStr[floatSpec->floatStrUsedLen++] = SECUREC_CHAR('-'); + --spec->width; + ch = SecGetChar(stream, charCount); + } else if (ch == SECUREC_CHAR('+')) { + --spec->width; + ch = SecGetChar(stream, charCount); + } + + if (spec->widthSet == 0) { /* must care width */ + spec->width = -1; /* -1 is unlimited */ + } + + /* now get integral part */ + while (SECUREC_IS_DIGIT(ch) && spec->width-- != 0) { + started = 0; + /* ch must be '0' - '9' */ + if (SecUpdateFloatString((SecChar)ch, floatSpec) != 0) { + return -1; + } + ch = SecGetChar(stream, charCount); + } + + /* now get fractional part */ + if (SECURE_IS_FLOAT_DECIMAL((SecChar)ch) && spec->width-- != 0) { + /* now check for decimal */ + if (SecUpdateFloatString((SecChar)ch, floatSpec) != 0) { + return -1; + } + ch = SecGetChar(stream, charCount); + while (SECUREC_IS_DIGIT(ch) && spec->width-- != 0) { + started = 0; + if (SecUpdateFloatString((SecChar)ch, floatSpec) != 0) { + return -1; + } + ch = SecGetChar(stream, charCount); + } + } + + /* now get exponent part */ + if (started == 0 && (ch == SECUREC_CHAR('e') || ch == SECUREC_CHAR('E')) && spec->width-- != 0) { + if (SecUpdateFloatString((SecChar)'e', floatSpec) != 0) { + return -1; + } + if (SecInputFloatE(stream, spec, floatSpec, charCount) != 0) { + return -1; + } + } + /* un set the last character that is not a floating point number */ + SecUnGetChar(ch, stream, charCount); + /* Make sure have a string terminator, buffer is large enough */ + floatSpec->floatStr[floatSpec->floatStrUsedLen] = SECUREC_CHAR('\0'); + return started; + +} +#endif + +/* + * scan digital part of %d %i %o %u %x %p. + * return 0 OK + */ +static int SecInputNumberDigital(SecInt firstCh, SecFileStream *stream, SecScanSpec *spec, int *charCount) +{ + SecInt ch = firstCh; + int loopFlag = 0; + int started = -1; + while (loopFlag == 0) { + /* decode ch to number */ + loopFlag = SecDecodeNumber(ch, spec); + if (loopFlag == 0) { + started = 0; + if (spec->widthSet != 0 && --spec->width == 0) { + loopFlag = 1; + } else { + ch = SecGetChar(stream, charCount); + } + } else { + SecUnGetChar(ch, stream, charCount); + } + } + + /* Handling integer negative numbers and beyond max */ + (*g_secFinishNumber[spec->isInt64Arg])(spec); + return started; + +} + +/* + * scan %d %i %o %u %x %p. + * return 0 OK + */ +static int SecInputNumber(SecFileStream *stream, SecScanSpec *spec, int *charCount) +{ + SecInt ch = SecGetChar(stream, charCount); + + if (ch == SECUREC_CHAR('+') || ch == SECUREC_CHAR('-')) { + if (ch == SECUREC_CHAR('-')) { + spec->negative = 1; + } + if (spec->widthSet != 0 && --spec->width == 0) { + return -1; + } else { + ch = SecGetChar(stream, charCount); + } + } + + if (spec->oriComChr == SECUREC_CHAR('i')) { + /* i could be d, o, or x, use d as default */ + spec->comChr = SECUREC_CHAR('d'); + } + + if (spec->oriComChr == SECUREC_CHAR('x') || spec->oriComChr == SECUREC_CHAR('i')) { + if (ch != SECUREC_CHAR('0')) { + /* scan number */ + return SecInputNumberDigital(ch, stream, spec, charCount); + } + /* now input string may be 0x123 or 0X123 or just 0 */ + /* get next char */ + ch = SecGetChar(stream, charCount); + if ((SecChar)(ch) == SECUREC_CHAR('x') || (SecChar)ch == SECUREC_CHAR('X')) { + spec->comChr = SECUREC_CHAR('x'); + ch = SecGetChar(stream, charCount); + /* length of 0x is 2 */ + if (spec->widthSet != 0 && spec->width <= (1 + 1)) { + /* length not enough for "0x" */ + return -1; + } + spec->width -= 2; /* Subtract 2 for the length of "0x" */ + } else { + if (spec->oriComChr != SECUREC_CHAR('x')) { + spec->comChr = SECUREC_CHAR('o'); + } + /* unset the character after 0 back to stream, input only '0' result is OK */ + SecUnGetChar(ch, stream, charCount); + ch = SECUREC_CHAR('0'); + } + } + /* scan number */ + return SecInputNumberDigital(ch, stream, spec, charCount); +} +/* + * scan %c %s %[ + * return 0 OK + */ +static int SecInputString(SecFileStream *stream, SecScanSpec *spec, + const SecBracketTable *bracketTable, int *charCount, int *doneCount) +{ + void *startPtr = spec->argPtr; + int suppressed= 0; + int errNoMem = 0; + + while (spec->widthSet == 0 || spec->width-- != 0) { + SecInt ch = SecGetChar(stream, charCount); + /* char condition or string condition and bracket condition. + * only supports wide characters with a maximum length of two bytes + */ + if ((ch != SECUREC_EOF) && (spec->comChr == SECUREC_CHAR('c') || + SECUREC_SCANF_STRING_CONDITION(spec->comChr, ch) || + SECUREC_SCANF_BRACKET_CONDITION(spec->comChr, ch, bracketTable->table, bracketTable->mask))) { + if (spec->suppress != 0) { + /* Used to identify processed data for %* + * use endPtr to identify will cause 613, so use suppressed + */ + suppressed = 1; + continue; + } + /* now suppress is not set */ + if (spec->arrayWidth == 0) { + errNoMem = 1; /* We have exhausted the user's buffer */ + break; + } +#ifdef SECUREC_FOR_WCHAR + errNoMem = SecInputForWchar(ch, spec); +#else + errNoMem = SecInputForChar(ch, spec, stream, charCount); +#endif + if (errNoMem != 0) { + break; + } + } else { + SecUnGetChar(ch, stream, charCount); + break; + } + } + + if (errNoMem != 0) { + /* In case of error, blank out the input buffer */ + if (spec->suppress == 0) { + SecAddEndingZero(startPtr, spec); + } + return -1; + } + + /* No input was scanned */ + if ((spec->suppress != 0 && suppressed == 0) || + (spec->suppress == 0 && startPtr == spec->argPtr)) { + return -1; + } + + if (spec->suppress == 0) { + if (spec->comChr != 'c') { + /* null-terminate strings */ + SecAddEndingZero(spec->argPtr, spec); + } + *doneCount = *doneCount + 1; + } + return 0; +} + +#ifdef SECUREC_FOR_WCHAR +/* + * alloce buffer for wchar version of %[. + * return 0 OK + */ +static int SecAllocBracketTable(SecBracketTable *bracketTable) +{ + if (bracketTable->table == NULL) { + /* table should be freed after use */ + bracketTable->table = (unsigned char *)SECUREC_MALLOC(SECUREC_BRACKET_TABLE_SIZE); + if (bracketTable->table == NULL) { + return -1; + } + } + return 0; +} + +/* + * free buffer for wchar version of %[ + */ +static void SecFreeBracketTable(SecBracketTable *bracketTable) +{ + if (bracketTable->table != NULL) { + SECUREC_FREE(bracketTable->table); + bracketTable->table = NULL; + } +} +#endif + +#ifdef SECUREC_FOR_WCHAR +/* + * Formatting input core functions for wchar version.Called by a function such as vsscanf_s + */ +int SecInputSW(SecFileStream *stream, const wchar_t *cFormat, va_list argList) +#else +/* + * Formatting input core functions for char version.Called by a function such as vswscanf_s + */ +int SecInputS(SecFileStream *stream, const char *cFormat, va_list argList) +#endif +{ + const SecUnsignedChar *format = (const SecUnsignedChar *)cFormat; + SecBracketTable bracketTable = SECUREC_INIT_BRACKET_TABLE; + SecScanSpec spec; + SecInt ch = 0; + int charCount = 0; + int doneCount = 0; + int formatError = 0; + int paraIsNull = 0; +#if SECUREC_ENABLE_SCANF_FLOAT + SecFloatSpec floatSpec; +#endif + int match = 0; + int errRet = 0; +#if SECUREC_ENABLE_SCANF_FLOAT + SecInitFloatSpec(&floatSpec); +#endif + /* format must not NULL */ + /* use err < 1 to claer 845 */ + while (errRet < 1 && *format != SECUREC_CHAR('\0')) { + /* skip space in format and space in input */ + if (SECUREC_IS_SPACE(*format)) { + SecInt nonSpaceChar = SecSkipSpaceChar(stream, &charCount); + /* eat all space chars and put fist no space char backup */ + SecUnGetChar(nonSpaceChar, stream, &charCount); + SecSkipSpaceFormat(&format); + continue; + } + + if (*format != SECUREC_CHAR('%')) { + ch = SecGetChar(stream, &charCount); + if ((int)(*format++) != (int)(ch)) { + SecUnGetChar(ch, stream, &charCount); + ++errRet; /* use plus to clear 845 */ + continue; + } +#ifndef SECUREC_FOR_WCHAR + if (SecIsLeadByte(ch) && SecDecodeLeadByte(ch, &format, stream, &charCount) != 0) { + ++errRet; + continue; + } +#endif + /* for next %n */ + if ((ch == SECUREC_EOF) && ((*format != SECUREC_CHAR('%')) || (*(format + 1) != SECUREC_CHAR('n')))) { + break; + } + continue; + } + + /* now *format is % */ + /* set default value for each % */ + SecSetDefaultScanSpec(&spec); + if (SecDecodeScanFlag(&format, &spec) != 0) { + formatError = 1; + ++errRet; + continue; + } + /* update wchar flag for %S %C */ + SecUpdateWcharFlagByType(*format, &spec); + +#if SECUREC_HAVE_WCHART == 0 + /* in kernel not support wide char */ + if (spec.isWChar > 0) { + formatError = 1; + ++errRet; + continue; + } +#endif + if (spec.widthSet != 0 && spec.width == 0) { + /* 0 width in format */ + ++errRet; + continue; + } + + spec.comChr = (unsigned char)(*format) | (SECUREC_CHAR('a') - SECUREC_CHAR('A')); /* to lowercase */ + spec.oriComChr = spec.comChr; + + if (spec.comChr != SECUREC_CHAR('n')) { + if (spec.comChr != SECUREC_CHAR('c') && spec.comChr != SECUREC_BRACE) { + ch = SecSkipSpaceChar(stream, &charCount); + } else { + ch = SecGetChar(stream, &charCount); + } + if (ch == SECUREC_EOF) { + ++errRet; + continue; + } + } + + /* now no 0 width in format and get one char from input */ + switch (spec.comChr) { + case SECUREC_CHAR('c'): /* also 'C' */ + /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('s'): /* also 'S': */ + /* fall-through */ /* FALLTHRU */ + case SECUREC_BRACE: + /* check dest buffer and size */ + if (spec.suppress == 0) { + spec.argPtr = (void *)va_arg(argList, void *); + if (spec.argPtr == NULL) { + paraIsNull = 1; + ++errRet; + continue; + } + /* Get the next argument - size of the array in characters */ +#ifdef SECUREC_ON_64BITS + spec.arrayWidth = ((size_t)(va_arg(argList, size_t))) & 0xFFFFFFFFUL; +#else /* !SECUREC_ON_64BITS */ + spec.arrayWidth = (size_t)va_arg(argList, size_t); +#endif + if (spec.arrayWidth == 0 || (spec.isWChar <= 0 && spec.arrayWidth > SECUREC_STRING_MAX_LEN) || + (spec.isWChar > 0 && spec.arrayWidth > SECUREC_WCHAR_STRING_MAX_LEN)) { + /* do not clear buffer just go error */ + ++errRet; + continue; + } + /* One element is needed for '\0' for %s and %[ */ + if (spec.comChr != SECUREC_CHAR('c')) { + --spec.arrayWidth; + } + } else { + /* Set argPtr to NULL is necessary, in supress mode we don't use argPtr to store data */ + spec.argPtr = NULL; + } + + if (spec.comChr == 'c') { + if (spec.widthSet == 0) { + spec.widthSet = 1; + spec.width = 1; + } + } else if (spec.comChr == SECUREC_BRACE) { + /* malloc when first %[ is meet for wchar version */ +#ifdef SECUREC_FOR_WCHAR + if (SecAllocBracketTable(&bracketTable) != 0) { + ++errRet; + continue; + } + +#endif + (void)memset(bracketTable.table, 0, (size_t)SECUREC_BRACKET_TABLE_SIZE); + if (SecSetupBracketTable(&format, &bracketTable) != 0) { + ++errRet; + continue; + } + + if (*format == SECUREC_CHAR('\0')) { + if (spec.suppress == 0 && spec.arrayWidth > 0) { + SecAddEndingZero(spec.argPtr, &spec); + } + ++errRet; + /* truncated format */ + continue; + } + + } + /* un set last char to stream */ + SecUnGetChar(ch, stream, &charCount); + /* scanset completed. Now read string */ + if (SecInputString(stream, &spec, &bracketTable, &charCount, &doneCount) != 0) { + ++errRet; + continue; + } + break; + case SECUREC_CHAR('p'): + /* make %hp same as %p */ + spec.numberWidth = SECUREC_NUM_WIDTH_INT; +#ifdef SECUREC_ON_64BITS + spec.isInt64Arg = 1; +#endif + /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('o'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('u'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('d'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('i'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('x'): + /* un set last char to stream */ + SecUnGetChar(ch, stream, &charCount); + if (SecInputNumber(stream, &spec, &charCount) != 0) { + ++errRet; + continue; + } + if (spec.suppress == 0) { + spec.argPtr = (void *)va_arg(argList, void *); + if (spec.argPtr == NULL) { + paraIsNull = 1; + ++errRet; + continue; + } + SecAssignNumber(&spec); + ++doneCount; + } + break; + case SECUREC_CHAR('n'): /* char count */ + if (spec.suppress == 0) { + spec.argPtr = (void *)va_arg(argList, void *); + if (spec.argPtr == NULL) { + paraIsNull = 1; + ++errRet; + continue; + } + spec.number = (unsigned long)(unsigned int)charCount; + spec.isInt64Arg = 0; + SecAssignNumber(&spec); + } + break; + case SECUREC_CHAR('e'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('f'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('g'): /* scan a float */ +#if SECUREC_ENABLE_SCANF_FLOAT + /* un set last char to stream */ + SecUnGetChar(ch, stream, &charCount); + if (SecInputFloat(stream, &spec, &floatSpec, &charCount) != 0) { + ++errRet; + continue; + } + if (spec.suppress == 0) { + spec.argPtr = (void *)va_arg(argList, void *); + if (spec.argPtr == NULL) { + ++errRet; + paraIsNull = 1; + continue; + } +#ifdef SECUREC_FOR_WCHAR + if (SecAssignFloatW(&floatSpec, &spec) != 0) { + ++errRet; + continue; + } +#else + SecAssignFloat(floatSpec.floatStr, spec.numberWidth, spec.argPtr); +#endif + ++doneCount; + } + + break; +#else /* SECUREC_ENABLE_SCANF_FLOAT */ + ++errRet; + continue; +#endif + default: + if ((int)(*format) != (int)ch) { + SecUnGetChar(ch, stream, &charCount); + formatError = 1; + ++errRet; + continue; + } else { + --match; + } + } + + ++match; + ++format; + if ((ch == SECUREC_EOF) && ((*format != SECUREC_CHAR('%')) || (*(format + 1) != SECUREC_CHAR('n')))) { + break; + } + } + +#ifdef SECUREC_FOR_WCHAR + SecFreeBracketTable(&bracketTable); +#endif + +#if SECUREC_ENABLE_SCANF_FLOAT + SecClearFloatSpec(&floatSpec, &doneCount); +#endif + +#if SECUREC_ENABLE_SCANF_FILE + SecAdjustStream(stream); +#endif + + if (ch == SECUREC_EOF) { + return ((doneCount || match) ? doneCount : SECUREC_SCANF_EINVAL); + } else if (formatError != 0 || paraIsNull != 0) { + /* Invalid Input Format or parameter */ + return SECUREC_SCANF_ERROR_PARA; + } + + return doneCount; +} + +#if SECUREC_ENABLE_SCANF_FILE + +#if defined(SECUREC_NO_STD_UNGETC) +/* + * Get char from stdin or buffer + */ +static SecInt SecGetCharFromStdin(SecFileStream *stream) +{ + SecInt ch; + if (stream->fUnget == 1) { + ch = (SecInt) stream->lastChar; + stream->fUnget = 0; + } else { + ch = SECUREC_GETC(stream->pf); + stream->lastChar = (unsigned int)ch; + } + return ch; +} +#else +/* + * Get char from stdin or buffer use std function + */ +static SecInt SecGetCharFromStdin(const SecFileStream *stream) +{ + SecInt ch; + ch = SECUREC_GETC(stream->pf); + return ch; +} +#endif + +static void SecSkipBomHeader(SecFileStream *stream) +{ +#ifdef SECUREC_FOR_WCHAR + if (stream->count >= SECUREC_BOM_HEADER_SIZE && + (((unsigned char)(stream->base[0]) == SECUREC_BOM_HEADER_LE_1ST && + (unsigned char)(stream->base[1]) == SECUREC_BOM_HEADER_LE_2ST) || + ((unsigned char)(stream->base[0]) == SECUREC_BOM_HEADER_BE_1ST && + (unsigned char)(stream->base[1]) == SECUREC_BOM_HEADER_BE_2ST))) { + + /* the stream->count must be a multiple of sizeof(SecChar), + * otherwise this function will return SECUREC_EOF when read the last character + */ + if ((stream->count - SECUREC_BOM_HEADER_SIZE) % (int)sizeof(SecChar) != 0) { + int ret = (int)fread(stream->base + stream->count, (size_t)1, + (size_t)SECUREC_BOM_HEADER_SIZE, stream->pf); + if (ret > 0 && ret <= SECUREC_BUFFERED_BLOK_SIZE) { + stream->count += ret; + } + } + /* it's BOM header, skip */ + stream->count -= SECUREC_BOM_HEADER_SIZE; + stream->cur += SECUREC_BOM_HEADER_SIZE; + } +#else + if (stream->count >= SECUREC_UTF8_BOM_HEADER_SIZE && + (unsigned char)(stream->base[0]) == SECUREC_UTF8_BOM_HEADER_1ST && + (unsigned char)(stream->base[1]) == SECUREC_UTF8_BOM_HEADER_2ND && + (unsigned char)(stream->base[2]) == SECUREC_UTF8_BOM_HEADER_3RD) { /* 2 offset of third head character */ + /* it's BOM header, skip */ + stream->count -= SECUREC_UTF8_BOM_HEADER_SIZE; + stream->cur += SECUREC_UTF8_BOM_HEADER_SIZE; + } +#endif +} +/* + * Get char from file stream or buffer + */ +static SecInt SecGetCharFromFile(SecFileStream *stream) +{ + SecInt ch; + if (stream->count == 0) { + int firstReadOnFile = 0; + /* load file to buffer */ + if (stream->base == NULL) { + stream->base = (char *)SECUREC_MALLOC(SECUREC_BUFFERED_BLOK_SIZE + 1); + if (stream->base == NULL) { + return SECUREC_EOF; + } + stream->base[SECUREC_BUFFERED_BLOK_SIZE] = '\0'; /* for tool Warning string null */ + } + /* LSD add 2014.3.21 */ + if (stream->oriFilePos == SECUREC_UNINITIALIZED_FILE_POS) { + stream->oriFilePos = ftell(stream->pf); /* save original file read position */ + firstReadOnFile = 1; + } + stream->count = (int)fread(stream->base, (size_t)1, (size_t)SECUREC_BUFFERED_BLOK_SIZE, stream->pf); + stream->base[SECUREC_BUFFERED_BLOK_SIZE] = '\0'; /* for tool Warning string null */ + if (stream->count == 0 || stream->count > SECUREC_BUFFERED_BLOK_SIZE) { + return SECUREC_EOF; + } + stream->cur = stream->base; + stream->flag |= SECUREC_LOAD_FILE_TO_MEM_FLAG; + if (firstReadOnFile != 0) { + SecSkipBomHeader(stream); + } + } + /* according wchar_t has two bytes */ + ch = (SecInt)((stream->count -= (int)sizeof(SecChar)) >= 0 ? \ + (SecInt)(SECUREC_CHAR_MASK & \ + (unsigned int)(int)(*((const SecChar *)(const void *)stream->cur))) : SECUREC_EOF); + stream->cur += sizeof(SecChar); + + if (ch != SECUREC_EOF && stream->base != NULL) { + stream->fileRealRead += (int)sizeof(SecChar); + } + return ch; +} +#endif + +/* + * Get char for wchar version + */ +static SecInt SecGetChar(SecFileStream *stream, int *counter) +{ + SecInt ch = SECUREC_EOF; +#if SECUREC_ENABLE_SCANF_FILE + if ((stream->flag & SECUREC_FROM_STDIN_FLAG) > 0) { + ch = SecGetCharFromStdin(stream); + } else if ((stream->flag & SECUREC_FILE_STREAM_FLAG) > 0) { + ch = SecGetCharFromFile(stream); + } +#endif + if ((stream->flag & SECUREC_MEM_STR_FLAG) > 0) { + /* according wchar_t has two bytes */ + ch = (SecInt)((stream->count -= (int)sizeof(SecChar)) >= 0 ? \ + (SecInt)(SECUREC_CHAR_MASK & \ + (unsigned int)(int)(*((const SecChar *)(const void *)stream->cur))) : SECUREC_EOF); + stream->cur += sizeof(SecChar); + } + *counter = *counter + 1; + return ch; +} + +/* + * Unget Public realizatio char for wchar and char version + */ +static void SecUnGetCharImpl(SecInt ch, SecFileStream *stream) +{ + if ((stream->flag & SECUREC_FROM_STDIN_FLAG) > 0) { +#if SECUREC_ENABLE_SCANF_FILE +#if defined(SECUREC_NO_STD_UNGETC) + stream->lastChar = (unsigned int)ch; + stream->fUnget = 1; +#else + (void)SECUREC_UN_GETC(ch, stream->pf); +#endif +#else + (void)ch; /* to clear e438 last value assigned not used , the compiler will optimize this code */ +#endif + } else if ((stream->flag & SECUREC_MEM_STR_FLAG) || (stream->flag & SECUREC_LOAD_FILE_TO_MEM_FLAG) > 0) { + if (stream->cur > stream->base) { + stream->cur -= sizeof(SecChar); + stream->count += (int)sizeof(SecChar); + } + } +#if SECUREC_ENABLE_SCANF_FILE + if ((stream->flag & SECUREC_FILE_STREAM_FLAG) > 0 && stream->base) { + stream->fileRealRead -= (int)sizeof(SecChar); + } +#endif +} + +/* + * Unget char for char version + */ +static void SecUnGetChar(SecInt ch, SecFileStream *stream, int *counter) +{ + if (ch != SECUREC_EOF) { + SecUnGetCharImpl(ch, stream); + } + *counter = *counter - 1; +} + +/* + * Skip space char by isspace + */ +static SecInt SecSkipSpaceChar(SecFileStream *stream, int *counter) +{ + SecInt ch; + do { + ch = SecGetChar(stream, counter); + } while (ch != SECUREC_EOF && SECUREC_IS_SPACE(ch)); + return ch; +} +#endif /* __INPUT_INL__5D13A042_DC3F_4ED9_A8D1_882811274C27 */ + diff --git a/third_party/securec/src/memcpy_s.c b/third_party/securec/src/memcpy_s.c new file mode 100644 index 00000000..5eb100f4 --- /dev/null +++ b/third_party/securec/src/memcpy_s.c @@ -0,0 +1,577 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_DO_MEMCPY 1 +#include "securecutil.h" + +#ifndef SECUREC_MEMCOPY_WITH_PERFORMANCE +#define SECUREC_MEMCOPY_WITH_PERFORMANCE 0 +#endif + +#if SECUREC_WITH_PERFORMANCE_ADDONS || SECUREC_MEMCOPY_WITH_PERFORMANCE +#ifndef SECUREC_MEMCOPY_THRESHOLD_SIZE +#define SECUREC_MEMCOPY_THRESHOLD_SIZE 64UL +#endif +/* + * Determine whether the address is 8-byte aligned, use static to increase performance + * return 0 is aligned + */ +static int SecIsAddrAligned8(const void *addr, const void *zeroAddr) +{ + return (int)(((size_t)((const char*)addr - (const char*)zeroAddr)) & 7); /* use 7 to check aligned 8 */ +} + +#define SECUREC_SMALL_MEM_COPY do { \ + if (SECUREC_ADDR_ALIGNED_8(dest) && SECUREC_ADDR_ALIGNED_8(src)) { \ + /* use struct assignment */ \ + switch (count) { \ + case 1: \ + *(SecStrBuf1 *)dest = *(const SecStrBuf1 *)src; \ + break; \ + case 2: \ + *(SecStrBuf2 *)dest = *(const SecStrBuf2 *)src; \ + break; \ + case 3: \ + *(SecStrBuf3 *)dest = *(const SecStrBuf3 *)src; \ + break; \ + case 4: \ + *(SecStrBuf4 *)dest = *(const SecStrBuf4 *)src; \ + break; \ + case 5: \ + *(SecStrBuf5 *)dest = *(const SecStrBuf5 *)src; \ + break; \ + case 6: \ + *(SecStrBuf6 *)dest = *(const SecStrBuf6 *)src; \ + break; \ + case 7: \ + *(SecStrBuf7 *)dest = *(const SecStrBuf7 *)src; \ + break; \ + case 8: \ + *(SecStrBuf8 *)dest = *(const SecStrBuf8 *)src; \ + break; \ + case 9: \ + *(SecStrBuf9 *)dest = *(const SecStrBuf9 *)src; \ + break; \ + case 10: \ + *(SecStrBuf10 *)dest = *(const SecStrBuf10 *)src; \ + break; \ + case 11: \ + *(SecStrBuf11 *)dest = *(const SecStrBuf11 *)src; \ + break; \ + case 12: \ + *(SecStrBuf12 *)dest = *(const SecStrBuf12 *)src; \ + break; \ + case 13: \ + *(SecStrBuf13 *)dest = *(const SecStrBuf13 *)src; \ + break; \ + case 14: \ + *(SecStrBuf14 *)dest = *(const SecStrBuf14 *)src; \ + break; \ + case 15: \ + *(SecStrBuf15 *)dest = *(const SecStrBuf15 *)src; \ + break; \ + case 16: \ + *(SecStrBuf16 *)dest = *(const SecStrBuf16 *)src; \ + break; \ + case 17: \ + *(SecStrBuf17 *)dest = *(const SecStrBuf17 *)src; \ + break; \ + case 18: \ + *(SecStrBuf18 *)dest = *(const SecStrBuf18 *)src; \ + break; \ + case 19: \ + *(SecStrBuf19 *)dest = *(const SecStrBuf19 *)src; \ + break; \ + case 20: \ + *(SecStrBuf20 *)dest = *(const SecStrBuf20 *)src; \ + break; \ + case 21: \ + *(SecStrBuf21 *)dest = *(const SecStrBuf21 *)src; \ + break; \ + case 22: \ + *(SecStrBuf22 *)dest = *(const SecStrBuf22 *)src; \ + break; \ + case 23: \ + *(SecStrBuf23 *)dest = *(const SecStrBuf23 *)src; \ + break; \ + case 24: \ + *(SecStrBuf24 *)dest = *(const SecStrBuf24 *)src; \ + break; \ + case 25: \ + *(SecStrBuf25 *)dest = *(const SecStrBuf25 *)src; \ + break; \ + case 26: \ + *(SecStrBuf26 *)dest = *(const SecStrBuf26 *)src; \ + break; \ + case 27: \ + *(SecStrBuf27 *)dest = *(const SecStrBuf27 *)src; \ + break; \ + case 28: \ + *(SecStrBuf28 *)dest = *(const SecStrBuf28 *)src; \ + break; \ + case 29: \ + *(SecStrBuf29 *)dest = *(const SecStrBuf29 *)src; \ + break; \ + case 30: \ + *(SecStrBuf30 *)dest = *(const SecStrBuf30 *)src; \ + break; \ + case 31: \ + *(SecStrBuf31 *)dest = *(const SecStrBuf31 *)src; \ + break; \ + case 32: \ + *(SecStrBuf32 *)dest = *(const SecStrBuf32 *)src; \ + break; \ + case 33: \ + *(SecStrBuf33 *)dest = *(const SecStrBuf33 *)src; \ + break; \ + case 34: \ + *(SecStrBuf34 *)dest = *(const SecStrBuf34 *)src; \ + break; \ + case 35: \ + *(SecStrBuf35 *)dest = *(const SecStrBuf35 *)src; \ + break; \ + case 36: \ + *(SecStrBuf36 *)dest = *(const SecStrBuf36 *)src; \ + break; \ + case 37: \ + *(SecStrBuf37 *)dest = *(const SecStrBuf37 *)src; \ + break; \ + case 38: \ + *(SecStrBuf38 *)dest = *(const SecStrBuf38 *)src; \ + break; \ + case 39: \ + *(SecStrBuf39 *)dest = *(const SecStrBuf39 *)src; \ + break; \ + case 40: \ + *(SecStrBuf40 *)dest = *(const SecStrBuf40 *)src; \ + break; \ + case 41: \ + *(SecStrBuf41 *)dest = *(const SecStrBuf41 *)src; \ + break; \ + case 42: \ + *(SecStrBuf42 *)dest = *(const SecStrBuf42 *)src; \ + break; \ + case 43: \ + *(SecStrBuf43 *)dest = *(const SecStrBuf43 *)src; \ + break; \ + case 44: \ + *(SecStrBuf44 *)dest = *(const SecStrBuf44 *)src; \ + break; \ + case 45: \ + *(SecStrBuf45 *)dest = *(const SecStrBuf45 *)src; \ + break; \ + case 46: \ + *(SecStrBuf46 *)dest = *(const SecStrBuf46 *)src; \ + break; \ + case 47: \ + *(SecStrBuf47 *)dest = *(const SecStrBuf47 *)src; \ + break; \ + case 48: \ + *(SecStrBuf48 *)dest = *(const SecStrBuf48 *)src; \ + break; \ + case 49: \ + *(SecStrBuf49 *)dest = *(const SecStrBuf49 *)src; \ + break; \ + case 50: \ + *(SecStrBuf50 *)dest = *(const SecStrBuf50 *)src; \ + break; \ + case 51: \ + *(SecStrBuf51 *)dest = *(const SecStrBuf51 *)src; \ + break; \ + case 52: \ + *(SecStrBuf52 *)dest = *(const SecStrBuf52 *)src; \ + break; \ + case 53: \ + *(SecStrBuf53 *)dest = *(const SecStrBuf53 *)src; \ + break; \ + case 54: \ + *(SecStrBuf54 *)dest = *(const SecStrBuf54 *)src; \ + break; \ + case 55: \ + *(SecStrBuf55 *)dest = *(const SecStrBuf55 *)src; \ + break; \ + case 56: \ + *(SecStrBuf56 *)dest = *(const SecStrBuf56 *)src; \ + break; \ + case 57: \ + *(SecStrBuf57 *)dest = *(const SecStrBuf57 *)src; \ + break; \ + case 58: \ + *(SecStrBuf58 *)dest = *(const SecStrBuf58 *)src; \ + break; \ + case 59: \ + *(SecStrBuf59 *)dest = *(const SecStrBuf59 *)src; \ + break; \ + case 60: \ + *(SecStrBuf60 *)dest = *(const SecStrBuf60 *)src; \ + break; \ + case 61: \ + *(SecStrBuf61 *)dest = *(const SecStrBuf61 *)src; \ + break; \ + case 62: \ + *(SecStrBuf62 *)dest = *(const SecStrBuf62 *)src; \ + break; \ + case 63: \ + *(SecStrBuf63 *)dest = *(const SecStrBuf63 *)src; \ + break; \ + case 64: \ + *(SecStrBuf64 *)dest = *(const SecStrBuf64 *)src; \ + break; \ + default: \ + break; \ + } /* END switch */ \ + } else { \ + char *tmpDest = (char *)dest; \ + const char *tmpSrc = (const char *)src; \ + switch (count) { \ + case 64: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 63: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 62: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 61: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 60: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 59: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 58: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 57: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 56: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 55: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 54: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 53: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 52: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 51: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 50: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 49: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 48: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 47: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 46: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 45: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 44: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 43: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 42: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 41: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 40: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 39: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 38: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 37: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 36: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 35: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 34: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 33: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 32: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 31: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 30: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 29: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 28: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 27: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 26: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 25: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 24: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 23: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 22: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 21: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 20: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 19: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 18: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 17: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 16: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 15: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 14: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 13: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 12: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 11: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 10: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 9: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 8: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 7: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 6: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 5: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 4: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 3: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 2: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 1: \ + *(tmpDest++) = *(tmpSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + default: \ + break; \ + } \ + } \ +} SECUREC_WHILE_ZERO +#endif + +/* + * Handling errors + */ +static errno_t SecMemcpyError(void *dest, size_t destMax, const void *src, size_t count) +{ + if (destMax == 0 || destMax > SECUREC_MEM_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("memcpy_s"); + return ERANGE; + } + if (dest == NULL || src == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("memcpy_s"); + if (dest != NULL) { + (void)memset(dest, 0, destMax); + return EINVAL_AND_RESET; + } + return EINVAL; + } + if (count > destMax) { + (void)memset(dest, 0, destMax); + SECUREC_ERROR_INVALID_RANGE("memcpy_s"); + return ERANGE_AND_RESET; + } + if (dest == src) { + return EOK; + } + if ((dest > src && dest < (const void *)((const unsigned char *)src + count)) || \ + (src > dest && src < (void *)((unsigned char *)dest + count))) { + (void)memset(dest, 0, destMax); + SECUREC_ERROR_BUFFER_OVERLAP("memcpy_s"); + return EOVERLAP_AND_RESET; + } + /* count == 0 also return EOK */ + return EOK; +} + +#if SECUREC_WITH_PERFORMANCE_ADDONS || SECUREC_MEMCOPY_WITH_PERFORMANCE +/* + * Performance optimization + */ +static void SecDoMemcpyOpt(void *dest, const void *src, size_t count) +{ + if (count > SECUREC_MEMCOPY_THRESHOLD_SIZE) { + SecDoMemcpy(dest, src, count); + } else { + SECUREC_SMALL_MEM_COPY; + } + return; +} +#endif + +#if defined(SECUREC_COMPATIBLE_WIN_FORMAT) + /* fread API in windows will call memcpy_s and pass 0xffffffff to destMax. + * To avoid the failure of fread, we don't check desMax limit. + */ +#define SECUREC_MEMCPY_PARAM_OK(dest, destMax, src, count) (SECUREC_LIKELY((count) <= (destMax) && \ + (dest) != NULL && (src) != NULL && \ + (count) > 0 && SECUREC_MEMORY_NO_OVERLAP((dest), (src), (count)))) +#else +#define SECUREC_MEMCPY_PARAM_OK(dest, destMax, src, count) (SECUREC_LIKELY((count) <= (destMax) && \ + (dest) != NULL && (src) != NULL && \ + (destMax) <= SECUREC_MEM_MAX_LEN && \ + (count) > 0 && SECUREC_MEMORY_NO_OVERLAP((dest), (src), (count)))) +#endif + +/* + * + * The memcpy_s function copies n characters from the object pointed to by src into the object pointed to by dest + * + * + * dest Destination buffer. + * destMax Size of the destination buffer. + * src Buffer to copy from. + * count Number of characters to copy + * + * + * dest buffer is updated. + * + * + * EOK Success + * EINVAL dest is NULL and destMax != 0 and destMax <= SECUREC_MEM_MAX_LEN + * EINVAL_AND_RESET dest != NULL and src is NULLL and destMax != 0 and destMax <= SECUREC_MEM_MAX_LEN + * ERANGE destMax > SECUREC_MEM_MAX_LEN or destMax is 0 + * ERANGE_AND_RESET count > destMax and destMax != 0 and destMax <= SECUREC_MEM_MAX_LEN + * and dest != NULL and src != NULL + * EOVERLAP_AND_RESET dest buffer and source buffer are overlapped and + * count <= destMax destMax != 0 and destMax <= SECUREC_MEM_MAX_LEN and dest != NULL + * and src != NULL and dest != src + * + * if an error occured, dest will be filled with 0. + * If the source and destination overlap, the behavior of memcpy_s is undefined. + * Use memmove_s to handle overlapping regions. + */ +errno_t memcpy_s(void *dest, size_t destMax, const void *src, size_t count) +{ + if (SECUREC_MEMCPY_PARAM_OK(dest, destMax, src, count)) { +#if SECUREC_MEMCOPY_WITH_PERFORMANCE + SecDoMemcpyOpt(dest, src, count); +#else + SecDoMemcpy(dest, src, count); +#endif + return EOK; + } + /* meet some runtime violation, return error code */ + return SecMemcpyError(dest, destMax, src, count); +} + +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(memcpy_s); +#endif + +#if SECUREC_WITH_PERFORMANCE_ADDONS +/* + * Performance optimization + */ +errno_t memcpy_sOptAsm(void *dest, size_t destMax, const void *src, size_t count) +{ + if (SECUREC_MEMCPY_PARAM_OK(dest, destMax, src, count)) { + SecDoMemcpyOpt(dest, src, count); + return EOK; + } + /* meet some runtime violation, return error code */ + return SecMemcpyError(dest, destMax, src, count); +} + +/* trim judgement on "destMax <= SECUREC_MEM_MAX_LEN" */ +errno_t memcpy_sOptTc(void *dest, size_t destMax, const void *src, size_t count) +{ + if (SECUREC_LIKELY(count <= destMax && dest != NULL && src != NULL && \ + count > 0 && \ + ((dest > src && (const void *)((const unsigned char *)src + count) <= dest) || \ + (src > dest && (void *)((unsigned char *)dest + count) <= src)))) { + SecDoMemcpyOpt(dest, src, count); + return EOK; + } + /* meet some runtime violation, return error code */ + return SecMemcpyError(dest, destMax, src, count); +} +#endif + diff --git a/third_party/securec/src/memmove_s.c b/third_party/securec/src/memmove_s.c new file mode 100644 index 00000000..ec6d04a7 --- /dev/null +++ b/third_party/securec/src/memmove_s.c @@ -0,0 +1,120 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securecutil.h" + +#ifdef SECUREC_NOT_CALL_LIBC_CORE_API +/* + * Implementing memory data movement + */ +static void SecUtilMemmove(void *dst, const void *src, size_t count) +{ + unsigned char *pDest = (unsigned char *)dst; + const unsigned char *pSrc = (const unsigned char *)src; + size_t maxCount = count; + + if (dst <= src || pDest >= (pSrc + maxCount)) { + /* + * Non-Overlapping Buffers + * copy from lower addresses to higher addresses + */ + while (maxCount--) { + *pDest = *pSrc; + ++pDest; + ++pSrc; + } + } else { + /* + * Overlapping Buffers + * copy from higher addresses to lower addresses + */ + pDest = pDest + maxCount - 1; + pSrc = pSrc + maxCount - 1; + + while (maxCount--) { + *pDest = *pSrc; + + --pDest; + --pSrc; + } + } +} +#endif + +/* + * + * The memmove_s function copies count bytes of characters from src to dest. + * This function can be assigned correctly when memory overlaps. + * + * dest Destination object. + * destMax Size of the destination buffer. + * src Source object. + * count Number of characters to copy. + * + * + * dest buffer is uptdated. + * + * + * EOK Success + * EINVAL dest is NULL and destMax != 0 and destMax <= SECUREC_MEM_MAX_LEN + * EINVAL_AND_RESET dest != NULL and src is NULLL and destMax != 0 and destMax <= SECUREC_MEM_MAX_LEN + * ERANGE destMax > SECUREC_MEM_MAX_LEN or destMax is 0 + * ERANGE_AND_RESET count > destMax and dest != NULL and src != NULL and destMax != 0 + * and destMax <= SECUREC_MEM_MAX_LEN + * + * If an error occured, dest will be filled with 0 when dest and destMax valid. + * If some regions of the source area and the destination overlap, memmove_s + * ensures that the original source bytes in the overlapping region are copied + * before being overwritten. + */ +errno_t memmove_s(void *dest, size_t destMax, const void *src, size_t count) +{ + if (destMax == 0 || destMax > SECUREC_MEM_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("memmove_s"); + return ERANGE; + } + if (dest == NULL || src == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("memmove_s"); + if (dest != NULL) { + (void)memset(dest, 0, destMax); + return EINVAL_AND_RESET; + } + return EINVAL; + } + if (count > destMax) { + (void)memset(dest, 0, destMax); + SECUREC_ERROR_INVALID_RANGE("memmove_s"); + return ERANGE_AND_RESET; + } + if (dest == src) { + return EOK; + } + + if (count > 0) { +#ifdef SECUREC_NOT_CALL_LIBC_CORE_API + SecUtilMemmove(dest, src, count); +#else + /* use underlying memmove for performance consideration */ + (void)memmove(dest, src, count); +#endif + } + return EOK; +} + +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(memmove_s); +#endif + diff --git a/third_party/securec/src/memset_s.c b/third_party/securec/src/memset_s.c new file mode 100644 index 00000000..cd3f9887 --- /dev/null +++ b/third_party/securec/src/memset_s.c @@ -0,0 +1,522 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_DO_MEMSET 1 + +#include "securecutil.h" + +#ifndef SECUREC_MEMSET_WITH_PERFORMANCE +#define SECUREC_MEMSET_WITH_PERFORMANCE 0 +#endif + +#define SECUREC_MEMSET_PARAM_OK(dest, destMax, count) (SECUREC_LIKELY((count) <= (destMax) && \ + (dest) != NULL && (destMax) <= SECUREC_MEM_MAX_LEN)) + + +#if SECUREC_WITH_PERFORMANCE_ADDONS || SECUREC_MEMSET_WITH_PERFORMANCE +/* + * Determine whether the address is 8-byte aligned, use static to increase performance + * return 0 is aligned + */ +static int SecIsAddrAligned8(const void *addr, const void *zeroAddr) +{ + return (int)(((size_t)((const char*)addr - (const char*)zeroAddr)) & 7); /* use 7 to check aligned 8 */ +} + +/* use union to clear strict-aliasing warning */ +typedef union { + SecStrBuf32 buf32; + SecStrBuf31 buf31; + SecStrBuf30 buf30; + SecStrBuf29 buf29; + SecStrBuf28 buf28; + SecStrBuf27 buf27; + SecStrBuf26 buf26; + SecStrBuf25 buf25; + SecStrBuf24 buf24; + SecStrBuf23 buf23; + SecStrBuf22 buf22; + SecStrBuf21 buf21; + SecStrBuf20 buf20; + SecStrBuf19 buf19; + SecStrBuf18 buf18; + SecStrBuf17 buf17; + SecStrBuf16 buf16; + SecStrBuf15 buf15; + SecStrBuf14 buf14; + SecStrBuf13 buf13; + SecStrBuf12 buf12; + SecStrBuf11 buf11; + SecStrBuf10 buf10; + SecStrBuf9 buf9; + SecStrBuf8 buf8; + SecStrBuf7 buf7; + SecStrBuf6 buf6; + SecStrBuf5 buf5; + SecStrBuf4 buf4; + SecStrBuf3 buf3; + SecStrBuf2 buf2; + SecStrBuf1 buf1; +} SecStrBuf32Union; +/* C standard initializes the first member of the consortium. */ +static const SecStrBuf32 g_allZero = {{ + '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', + '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', + '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0', + '\0', '\0', '\0', '\0', '\0', '\0', '\0', '\0' +}}; +static const SecStrBuf32 g_allFF = {{ + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, + 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF, 0xFF +}}; + +static const SecStrBuf32Union *SecStrictAliasingCast(const SecStrBuf32 *buf) +{ + return (const SecStrBuf32Union *)buf; +} + +#ifndef SECUREC_MEMSET_THRESHOLD_SIZE +#define SECUREC_MEMSET_THRESHOLD_SIZE 32UL +#endif + +#define SECUREC_UNALIGNED_SET do { \ + char *pcDest = (char *)dest; \ + switch (count) { \ + case 32: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 31: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 30: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 29: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 28: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 27: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 26: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 25: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 24: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 23: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 22: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 21: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 20: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 19: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 18: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 17: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 16: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 15: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 14: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 13: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 12: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 11: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 10: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 9: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 8: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 7: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 6: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 5: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 4: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 3: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 2: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + case 1: \ + *(pcDest++) = (char)c; \ + /* fall-through */ /* FALLTHRU */ \ + default: \ + break; \ + } \ +} SECUREC_WHILE_ZERO + +#define SECUREC_ALIGNED_SET_OPT_ZERO_FF do { \ + switch (c) { \ + case 0: \ + switch (count) { \ + case 1: \ + *(SecStrBuf1 *)dest = *(const SecStrBuf1 *)(&((SecStrictAliasingCast(&g_allZero))->buf1)); \ + break; \ + case 2: \ + *(SecStrBuf2 *)dest = *(const SecStrBuf2 *)(&((SecStrictAliasingCast(&g_allZero))->buf2)); \ + break; \ + case 3: \ + *(SecStrBuf3 *)dest = *(const SecStrBuf3 *)(&((SecStrictAliasingCast(&g_allZero))->buf3)); \ + break; \ + case 4: \ + *(SecStrBuf4 *)dest = *(const SecStrBuf4 *)(&((SecStrictAliasingCast(&g_allZero))->buf4)); \ + break; \ + case 5: \ + *(SecStrBuf5 *)dest = *(const SecStrBuf5 *)(&((SecStrictAliasingCast(&g_allZero))->buf5)); \ + break; \ + case 6: \ + *(SecStrBuf6 *)dest = *(const SecStrBuf6 *)(&((SecStrictAliasingCast(&g_allZero))->buf6)); \ + break; \ + case 7: \ + *(SecStrBuf7 *)dest = *(const SecStrBuf7 *)(&((SecStrictAliasingCast(&g_allZero))->buf7)); \ + break; \ + case 8: \ + *(SecStrBuf8 *)dest = *(const SecStrBuf8 *)(&((SecStrictAliasingCast(&g_allZero))->buf8)); \ + break; \ + case 9: \ + *(SecStrBuf9 *)dest = *(const SecStrBuf9 *)(&((SecStrictAliasingCast(&g_allZero))->buf9)); \ + break; \ + case 10: \ + *(SecStrBuf10 *)dest = *(const SecStrBuf10 *)(&((SecStrictAliasingCast(&g_allZero))->buf10)); \ + break; \ + case 11: \ + *(SecStrBuf11 *)dest = *(const SecStrBuf11 *)(&((SecStrictAliasingCast(&g_allZero))->buf11)); \ + break; \ + case 12: \ + *(SecStrBuf12 *)dest = *(const SecStrBuf12 *)(&((SecStrictAliasingCast(&g_allZero))->buf12)); \ + break; \ + case 13: \ + *(SecStrBuf13 *)dest = *(const SecStrBuf13 *)(&((SecStrictAliasingCast(&g_allZero))->buf13)); \ + break; \ + case 14: \ + *(SecStrBuf14 *)dest = *(const SecStrBuf14 *)(&((SecStrictAliasingCast(&g_allZero))->buf14)); \ + break; \ + case 15: \ + *(SecStrBuf15 *)dest = *(const SecStrBuf15 *)(&((SecStrictAliasingCast(&g_allZero))->buf15)); \ + break; \ + case 16: \ + *(SecStrBuf16 *)dest = *(const SecStrBuf16 *)(&((SecStrictAliasingCast(&g_allZero))->buf16)); \ + break; \ + case 17: \ + *(SecStrBuf17 *)dest = *(const SecStrBuf17 *)(&((SecStrictAliasingCast(&g_allZero))->buf17)); \ + break; \ + case 18: \ + *(SecStrBuf18 *)dest = *(const SecStrBuf18 *)(&((SecStrictAliasingCast(&g_allZero))->buf18)); \ + break; \ + case 19: \ + *(SecStrBuf19 *)dest = *(const SecStrBuf19 *)(&((SecStrictAliasingCast(&g_allZero))->buf19)); \ + break; \ + case 20: \ + *(SecStrBuf20 *)dest = *(const SecStrBuf20 *)(&((SecStrictAliasingCast(&g_allZero))->buf20)); \ + break; \ + case 21: \ + *(SecStrBuf21 *)dest = *(const SecStrBuf21 *)(&((SecStrictAliasingCast(&g_allZero))->buf21)); \ + break; \ + case 22: \ + *(SecStrBuf22 *)dest = *(const SecStrBuf22 *)(&((SecStrictAliasingCast(&g_allZero))->buf22)); \ + break; \ + case 23: \ + *(SecStrBuf23 *)dest = *(const SecStrBuf23 *)(&((SecStrictAliasingCast(&g_allZero))->buf23)); \ + break; \ + case 24: \ + *(SecStrBuf24 *)dest = *(const SecStrBuf24 *)(&((SecStrictAliasingCast(&g_allZero))->buf24)); \ + break; \ + case 25: \ + *(SecStrBuf25 *)dest = *(const SecStrBuf25 *)(&((SecStrictAliasingCast(&g_allZero))->buf25)); \ + break; \ + case 26: \ + *(SecStrBuf26 *)dest = *(const SecStrBuf26 *)(&((SecStrictAliasingCast(&g_allZero))->buf26)); \ + break; \ + case 27: \ + *(SecStrBuf27 *)dest = *(const SecStrBuf27 *)(&((SecStrictAliasingCast(&g_allZero))->buf27)); \ + break; \ + case 28: \ + *(SecStrBuf28 *)dest = *(const SecStrBuf28 *)(&((SecStrictAliasingCast(&g_allZero))->buf28)); \ + break; \ + case 29: \ + *(SecStrBuf29 *)dest = *(const SecStrBuf29 *)(&((SecStrictAliasingCast(&g_allZero))->buf29)); \ + break; \ + case 30: \ + *(SecStrBuf30 *)dest = *(const SecStrBuf30 *)(&((SecStrictAliasingCast(&g_allZero))->buf30)); \ + break; \ + case 31: \ + *(SecStrBuf31 *)dest = *(const SecStrBuf31 *)(&((SecStrictAliasingCast(&g_allZero))->buf31)); \ + break; \ + case 32: \ + *(SecStrBuf32 *)dest = *(const SecStrBuf32 *)(&((SecStrictAliasingCast(&g_allZero))->buf32)); \ + break; \ + default: \ + break; \ + } \ + break; \ + case 0xFF: \ + switch (count) { \ + case 1: \ + *(SecStrBuf1 *)dest = *(const SecStrBuf1 *)(&((SecStrictAliasingCast(&g_allFF))->buf1)); \ + break; \ + case 2: \ + *(SecStrBuf2 *)dest = *(const SecStrBuf2 *)(&((SecStrictAliasingCast(&g_allFF))->buf2)); \ + break; \ + case 3: \ + *(SecStrBuf3 *)dest = *(const SecStrBuf3 *)(&((SecStrictAliasingCast(&g_allFF))->buf3)); \ + break; \ + case 4: \ + *(SecStrBuf4 *)dest = *(const SecStrBuf4 *)(&((SecStrictAliasingCast(&g_allFF))->buf4)); \ + break; \ + case 5: \ + *(SecStrBuf5 *)dest = *(const SecStrBuf5 *)(&((SecStrictAliasingCast(&g_allFF))->buf5)); \ + break; \ + case 6: \ + *(SecStrBuf6 *)dest = *(const SecStrBuf6 *)(&((SecStrictAliasingCast(&g_allFF))->buf6)); \ + break; \ + case 7: \ + *(SecStrBuf7 *)dest = *(const SecStrBuf7 *)(&((SecStrictAliasingCast(&g_allFF))->buf7)); \ + break; \ + case 8: \ + *(SecStrBuf8 *)dest = *(const SecStrBuf8 *)(&((SecStrictAliasingCast(&g_allFF))->buf8)); \ + break; \ + case 9: \ + *(SecStrBuf9 *)dest = *(const SecStrBuf9 *)(&((SecStrictAliasingCast(&g_allFF))->buf9)); \ + break; \ + case 10: \ + *(SecStrBuf10 *)dest = *(const SecStrBuf10 *)(&((SecStrictAliasingCast(&g_allFF))->buf10)); \ + break; \ + case 11: \ + *(SecStrBuf11 *)dest = *(const SecStrBuf11 *)(&((SecStrictAliasingCast(&g_allFF))->buf11)); \ + break; \ + case 12: \ + *(SecStrBuf12 *)dest = *(const SecStrBuf12 *)(&((SecStrictAliasingCast(&g_allFF))->buf12)); \ + break; \ + case 13: \ + *(SecStrBuf13 *)dest = *(const SecStrBuf13 *)(&((SecStrictAliasingCast(&g_allFF))->buf13)); \ + break; \ + case 14: \ + *(SecStrBuf14 *)dest = *(const SecStrBuf14 *)(&((SecStrictAliasingCast(&g_allFF))->buf14)); \ + break; \ + case 15: \ + *(SecStrBuf15 *)dest = *(const SecStrBuf15 *)(&((SecStrictAliasingCast(&g_allFF))->buf15)); \ + break; \ + case 16: \ + *(SecStrBuf16 *)dest = *(const SecStrBuf16 *)(&((SecStrictAliasingCast(&g_allFF))->buf16)); \ + break; \ + case 17: \ + *(SecStrBuf17 *)dest = *(const SecStrBuf17 *)(&((SecStrictAliasingCast(&g_allFF))->buf17)); \ + break; \ + case 18: \ + *(SecStrBuf18 *)dest = *(const SecStrBuf18 *)(&((SecStrictAliasingCast(&g_allFF))->buf18)); \ + break; \ + case 19: \ + *(SecStrBuf19 *)dest = *(const SecStrBuf19 *)(&((SecStrictAliasingCast(&g_allFF))->buf19)); \ + break; \ + case 20: \ + *(SecStrBuf20 *)dest = *(const SecStrBuf20 *)(&((SecStrictAliasingCast(&g_allFF))->buf20)); \ + break; \ + case 21: \ + *(SecStrBuf21 *)dest = *(const SecStrBuf21 *)(&((SecStrictAliasingCast(&g_allFF))->buf21)); \ + break; \ + case 22: \ + *(SecStrBuf22 *)dest = *(const SecStrBuf22 *)(&((SecStrictAliasingCast(&g_allFF))->buf22)); \ + break; \ + case 23: \ + *(SecStrBuf23 *)dest = *(const SecStrBuf23 *)(&((SecStrictAliasingCast(&g_allFF))->buf23)); \ + break; \ + case 24: \ + *(SecStrBuf24 *)dest = *(const SecStrBuf24 *)(&((SecStrictAliasingCast(&g_allFF))->buf24)); \ + break; \ + case 25: \ + *(SecStrBuf25 *)dest = *(const SecStrBuf25 *)(&((SecStrictAliasingCast(&g_allFF))->buf25)); \ + break; \ + case 26: \ + *(SecStrBuf26 *)dest = *(const SecStrBuf26 *)(&((SecStrictAliasingCast(&g_allFF))->buf26)); \ + break; \ + case 27: \ + *(SecStrBuf27 *)dest = *(const SecStrBuf27 *)(&((SecStrictAliasingCast(&g_allFF))->buf27)); \ + break; \ + case 28: \ + *(SecStrBuf28 *)dest = *(const SecStrBuf28 *)(&((SecStrictAliasingCast(&g_allFF))->buf28)); \ + break; \ + case 29: \ + *(SecStrBuf29 *)dest = *(const SecStrBuf29 *)(&((SecStrictAliasingCast(&g_allFF))->buf29)); \ + break; \ + case 30: \ + *(SecStrBuf30 *)dest = *(const SecStrBuf30 *)(&((SecStrictAliasingCast(&g_allFF))->buf30)); \ + break; \ + case 31: \ + *(SecStrBuf31 *)dest = *(const SecStrBuf31 *)(&((SecStrictAliasingCast(&g_allFF))->buf31)); \ + break; \ + case 32: \ + *(SecStrBuf32 *)dest = *(const SecStrBuf32 *)(&((SecStrictAliasingCast(&g_allFF))->buf32)); \ + break; \ + default: \ + break; \ + } \ + break; \ + default: \ + SECUREC_UNALIGNED_SET; \ + } /* END switch */ \ +} SECUREC_WHILE_ZERO +#endif + +/* + * Handling errors + */ +static errno_t SecMemsetError(void *dest, size_t destMax, int c, size_t count) +{ + if (destMax == 0 || destMax > SECUREC_MEM_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("memset_s"); + return ERANGE; + } + if (dest == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("memset_s"); + return EINVAL; + } + if (count > destMax) { + (void)memset(dest, c, destMax); /* set entire buffer to value c */ + SECUREC_ERROR_INVALID_RANGE("memset_s"); + return ERANGE_AND_RESET; + } + return EOK; +} + +#if SECUREC_WITH_PERFORMANCE_ADDONS || SECUREC_MEMSET_WITH_PERFORMANCE +/* + * Performance optimization + */ +static void SecDoMemsetOpt(void *dest, int c, size_t count) +{ + if (count > SECUREC_MEMSET_THRESHOLD_SIZE) { + SecDoMemset(dest, c, count); + } else { + if (SECUREC_ADDR_ALIGNED_8(dest)) { + /* use struct assignment */ + SECUREC_ALIGNED_SET_OPT_ZERO_FF; + } else { + SECUREC_UNALIGNED_SET; + } + } + return; +} +#endif + +/* + * + * The memset_s function copies the value of c (converted to an unsigned char) + * into each of the first count characters of the object pointed to by dest. + * + * + * dest Pointer to destination. + * destMax The size of the buffer. + * c Character to set. + * count Number of characters. + * + * + * dest buffer is uptdated. + * + * + * EOK Success + * EINVAL dest == NULL and destMax != 0 and destMax <= SECUREC_MEM_MAX_LEN + * ERANGE destMax is 0 or destMax > SECUREC_MEM_MAX_LEN + * ERANGE_AND_RESET count > destMax and destMax != 0 and destMax <= SECUREC_MEM_MAX_LEN and dest != NULL + * + * if return ERANGE_AND_RESET then fill dest to c ,fill length is destMax + */ +errno_t memset_s(void *dest, size_t destMax, int c, size_t count) +{ + if (SECUREC_MEMSET_PARAM_OK(dest, destMax, count)) { +#if SECUREC_MEMSET_WITH_PERFORMANCE + SecDoMemsetOpt(dest, c, count); +#else + SecDoMemset(dest, c, count); +#endif + return EOK; + } else { + /* meet some runtime violation, return error code */ + return SecMemsetError(dest, destMax, c, count); + } +} + +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(memset_s); +#endif + +#if SECUREC_WITH_PERFORMANCE_ADDONS +/* + * Performance optimization + */ +errno_t memset_sOptAsm(void *dest, size_t destMax, int c, size_t count) +{ + if (SECUREC_MEMSET_PARAM_OK(dest, destMax, count)) { + SecDoMemsetOpt(dest, c, count); + return EOK; + } + /* meet some runtime violation, return error code */ + return SecMemsetError(dest, destMax, c, count); +} + +/* + * Performance optimization + */ +errno_t memset_sOptTc(void *dest, size_t destMax, int c, size_t count) +{ + if (SECUREC_LIKELY(count <= destMax && dest != NULL)) { + SecDoMemsetOpt(dest, c, count); + return EOK; + } + /* meet some runtime violation, return error code */ + return SecMemsetError(dest, destMax, c, count); +} +#endif + diff --git a/third_party/securec/src/output.inl b/third_party/securec/src/output.inl new file mode 100644 index 00000000..d4e136c5 --- /dev/null +++ b/third_party/securec/src/output.inl @@ -0,0 +1,1401 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef OUTPUT_INL_2B263E9C_43D8_44BB_B17A_6D2033DECEE5 +#define OUTPUT_INL_2B263E9C_43D8_44BB_B17A_6D2033DECEE5 + +#define SECUREC_NULL_STRING_SIZE 8 +#define SECUREC_STATE_TABLE_SIZE 337 +#define SECUREC_OFFSET_BITS_WORD 16 +#define SECUREC_OFFSET_BITS_DWORD 32 + +#define SECUREC_OFFSET_DIV_OCTAL 3 +#define SECUREC_OFFSET_DIV_HEX 4 +#define SECUREC_RADIX_OCTAL 8 +#define SECUREC_RADIX_DECIMAL 10 +#define SECUREC_RADIX_HEX 16 +/* Use two displacements to eliminate compilation warnings */ +#define SECUREC_SHR_DWORD(x) (((x) >> 16) >> 16) +#define SECUREC_PREFIX_LEN 2 +/* size include '+' and '\0' */ +#define SECUREC_FLOAT_BUF_EXT 2 + + +#ifdef SECUREC_STACK_SIZE_LESS_THAN_1K +#define SECUREC_FMT_STR_LEN 8 +#else +#define SECUREC_FMT_STR_LEN 16 +#endif + +typedef struct { + unsigned int flags; + int fldWidth; + int precision; + int bufferIsWide; /* flag for buffer contains wide chars ;0 is not wide char */ + int dynWidth; /* %* 1 width from variable parameter ;0 not */ + int dynPrecision; /* %.* 1 precision from variable parameter ;0 not */ +} SecFormatAttr; + +typedef union { + char *str; /* not a null terminated string */ +#if SECUREC_HAVE_WCHART + wchar_t *wStr; +#endif +} SecFormatBuf; + +typedef union { + char str[SECUREC_BUFFER_SIZE + 1]; +#ifdef SECUREC_FOR_WCHAR + wchar_t wStr[SECUREC_BUFFER_SIZE + 1]; +#endif +} SecBuffer; + +#if SECUREC_ENABLE_SPRINTF_FLOAT +/* call system sprintf to format float value */ +static int SecIndirectSprintf(char *strDest, const char *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + SECUREC_MASK_MSVC_CRT_WARNING + ret = vsprintf(strDest, format, argList); + SECUREC_END_MASK_MSVC_CRT_WARNING + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} + +#ifdef SECUREC_COMPATIBLE_LINUX_FORMAT +/* out put long double value to dest */ +static int SecFormatLongDboule(char *strDest,const SecFormatAttr *formatAttr, const char *fmt, long double ldValue) +{ + int fldWidth = ((formatAttr->flags & SECUREC_FLAG_LEFT) ? (-(formatAttr->fldWidth)) : formatAttr->fldWidth); + if (formatAttr->dynWidth && formatAttr->dynPrecision) { + return SecIndirectSprintf(strDest, fmt, fldWidth, formatAttr->precision, ldValue); + } else if (formatAttr->dynWidth) { + return SecIndirectSprintf(strDest, fmt, fldWidth, ldValue); + } else if (formatAttr->dynPrecision) { + return SecIndirectSprintf(strDest, fmt, formatAttr->precision, ldValue); + } + return SecIndirectSprintf(strDest, fmt, ldValue); +} +#endif + +/* out put double value to dest */ +static int SecFormatDboule(char *strDest, const SecFormatAttr *formatAttr, const char *fmt, double dValue) +{ + int fldWidth = ((formatAttr->flags & SECUREC_FLAG_LEFT) ? (-(formatAttr->fldWidth)) : formatAttr->fldWidth); + if (formatAttr->dynWidth && formatAttr->dynPrecision) { + return SecIndirectSprintf(strDest, fmt, fldWidth, formatAttr->precision, dValue); + } else if (formatAttr->dynWidth) { + return SecIndirectSprintf(strDest, fmt, fldWidth, dValue); + } else if (formatAttr->dynPrecision) { + return SecIndirectSprintf(strDest, fmt, formatAttr->precision, dValue); + } + return SecIndirectSprintf(strDest, fmt, dValue); +} +#endif + +#ifdef SECUREC_COMPATIBLE_LINUX_FORMAT +/* to clear e506 warning */ +static int SecIsSameSize(size_t sizeA, size_t sizeB) +{ + return sizeA == sizeB; +} +#endif + +#define SECUREC_SPECIAL_DWORD(val32, numBase) do { \ + --formatBuf.str; \ + *(formatBuf.str) = digits[(val32) % (numBase)]; \ +} while (((val32) /= (numBase)) != 0) + +#if defined(SECUREC_USE_SPECIAL_DIV64) || (defined(SECUREC_VXWORKS_VERSION_5_4) && !defined(SECUREC_ON_64BITS)) +/* + * Fast divide by 10 algorithm. + * Calculation divisor multiply 0xcccccccccccccccdULL, resultHi64 >> 3 as quotient + */ +static void SecU64Div10(SecUnsignedInt64 divisor, SecUnsignedInt64 *quotient, SecUnsignedInt32 *remainder) +{ + SecUnsignedInt64 mask = 0xffffffffULL; /* use 0xffffffffULL as 32 bit mask */ + SecUnsignedInt64 magicHi = 0xccccccccULL; /* fast divide 10 magic numbers high 32bit 0xccccccccULL */ + SecUnsignedInt64 magicLow = 0xcccccccdULL; /* fast divide 10 magic numbers low 32bit 0xcccccccdULL */ + SecUnsignedInt64 divisorHi = (SecUnsignedInt64)(SECUREC_SHR_DWORD(divisor)); /* hig 32 bit use */ + SecUnsignedInt64 divisorLow = (SecUnsignedInt64)(divisor & mask); /* low 32 bit mask */ + SecUnsignedInt64 factorHi = divisorHi * magicHi; + SecUnsignedInt64 factorLow1 = divisorHi * magicLow; + SecUnsignedInt64 factorLow2 = divisorLow * magicHi; + SecUnsignedInt64 factorLow3 = divisorLow * magicLow; + SecUnsignedInt64 carry = (factorLow1 & mask) + (factorLow2 & mask) + SECUREC_SHR_DWORD(factorLow3); + SecUnsignedInt64 resultHi64 = factorHi + SECUREC_SHR_DWORD(factorLow1) + \ + SECUREC_SHR_DWORD(factorLow2) + SECUREC_SHR_DWORD(carry); + + *quotient = resultHi64 >> 3; /* fast divide 10 magic numbers 3 */ + *remainder = (SecUnsignedInt32)(divisor - ((*quotient) * 10)); /* quotient mul 10 */ + return; +} +#if defined(SECUREC_VXWORKS_VERSION_5_4) && !defined(SECUREC_ON_64BITS) +/* + * Divide function for VXWORKS + */ +static int SecU64Div32(SecUnsignedInt64 divisor, SecUnsignedInt32 radix, + SecUnsignedInt64 *quotient, SecUnsignedInt32 *remainder) +{ + switch (radix) { + case SECUREC_RADIX_DECIMAL: + SecU64Div10(divisor, quotient, remainder); + break; + case SECUREC_RADIX_HEX: + *quotient = divisor >> SECUREC_OFFSET_DIV_HEX; + *remainder = divisor & 0xfULL; /* mask one hex number by 0xfULL */ + break; + case SECUREC_RADIX_OCTAL: + *quotient = divisor >> SECUREC_OFFSET_DIV_OCTAL; + *remainder = divisor & 0x7ULL; /* mask one hex number by 0x7ULL */ + break; + default: + return -1; + } + return 0; +} +#endif +#endif + +#if defined(SECUREC_USE_SPECIAL_DIV64) +/* The compiler does not provide 64 bit division problems */ +#define SECUREC_SPECIAL_QWORD_BASE10(val64) do { \ + SecUnsignedInt64 quotient = 0; \ + SecUnsignedInt32 digit = 0; \ + SecU64Div10((val64), &(quotient), &(digit)); \ + --formatBuf.str; \ + *(formatBuf.str) = digits[digit]; \ + (val64) = quotient; \ +} while ((val64) != 0) +#else +#define SECUREC_SPECIAL_QWORD_BASE10(val64) do { \ + --formatBuf.str; \ + *(formatBuf.str) = digits[(val64) % SECUREC_RADIX_DECIMAL]; \ +} while (((val64) /= SECUREC_RADIX_DECIMAL) != 0) +#endif +#define SECUREC_SPECIAL_QWORD(val64, numBase) do { \ + --formatBuf.str; \ + *(formatBuf.str) = digits[(val64) % (numBase)]; \ +} while (((val64) /= (numBase)) != 0) + + +#define SECUREC_SAFE_WRITE_STR_OPT(src, txtLen, outStream, outChars) do { \ + int ii_; \ + for (ii_ = 0; ii_ < (txtLen); ++ii_) { \ + *((SecChar *)(void *)((outStream)->cur)) = *(SecChar *)(src); \ + (outStream)->cur += sizeof(SecChar); \ + (src) = (src) + 1; \ + } \ + (outStream)->count -= (txtLen) * (int)(sizeof(SecChar)); \ + *(outChars) = *(outChars) + (txtLen); \ +} SECUREC_WHILE_ZERO + +#define SECUREC_SAFE_WRITE_STR(src, txtLen, outStream, outChars) do { \ + if ((txtLen) < 12) { /* performance optimization for mobile number length 12 */ \ + SECUREC_SAFE_WRITE_STR_OPT((src), (txtLen), (outStream), (outChars)); \ + } else { \ + SecDoMemcpy((outStream)->cur, (src), ((size_t)(unsigned int)(txtLen) * (sizeof(SecChar)))); \ + (outStream)->cur += (size_t)((size_t)(unsigned int)(txtLen) * (sizeof(SecChar))); \ + (outStream)->count -= (txtLen) * (int)(sizeof(SecChar)); \ + *(outChars) = *(outChars) + (txtLen); \ + } \ +} SECUREC_WHILE_ZERO + +#define SECUREC_SAFE_WRITE_CHAR(c, outStream, outChars) do { \ + *((SecChar *)(void *)((outStream)->cur)) = (SecChar)(c); \ + (outStream)->cur += sizeof(SecChar); \ + (outStream)->count -= (int)(sizeof(SecChar)); \ + *(outChars) = *(outChars) + 1; \ +} SECUREC_WHILE_ZERO + +#define SECUREC_SAFE_PADDING(padChar, padLen, outStream, outChars) do { \ + int ii_; \ + for (ii_ = 0; ii_ < (padLen); ++ii_) { \ + *((SecChar *)(void *)((outStream)->cur)) = (SecChar)(padChar); \ + (outStream)->cur += sizeof(SecChar); \ + } \ + (outStream)->count -= (padLen) * (int)(sizeof(SecChar)); \ + *(outChars) = *(outChars) + (padLen); \ +} SECUREC_WHILE_ZERO + +/* The count variable can be reduced to 0, and the external function complements the \0 terminator. */ +#define SECUREC_IS_REST_BUF_ENOUGH(stream, needLen) ((int)((stream)->count - \ + (int)(needLen) * (int)(sizeof(SecChar))) >= 0) + +#define SECUREC_FMT_STATE_OFFSET 256 +#ifdef SECUREC_FOR_WCHAR +#define SECUREC_FMT_TYPE(c, fmtTable) ((((unsigned int)(int)(c)) <= (unsigned int)(int)SECUREC_CHAR('~')) ? \ + ((fmtTable)[(unsigned char)(c)]) : 0) +#define SECUREC_DECODE_STATE(c, fmtTable, lastState) (SecFmtState)((((fmtTable)[(SECUREC_FMT_TYPE(c, (fmtTable))) * \ + ((unsigned char)STAT_INVALID + 1) + \ + (unsigned char)(lastState) + \ + SECUREC_FMT_STATE_OFFSET]))) +#else +#define SECUREC_DECODE_STATE(c, fmtTable, lastState) (SecFmtState)(((fmtTable)[((fmtTable)[(unsigned char)(c)]) * \ + ((unsigned char)STAT_INVALID + 1) + \ + (unsigned char)(lastState) + \ + SECUREC_FMT_STATE_OFFSET])) +#endif + +static void SecDecodeFlags(SecChar ch, SecFormatAttr *attr) +{ + switch (ch) { + case SECUREC_CHAR(' '): + attr->flags |= SECUREC_FLAG_SIGN_SPACE; + break; + case SECUREC_CHAR('+'): + attr->flags |= SECUREC_FLAG_SIGN; + break; + case SECUREC_CHAR('-'): + attr->flags |= SECUREC_FLAG_LEFT; + break; + case SECUREC_CHAR('0'): + attr->flags |= SECUREC_FLAG_LEADZERO; /* add zero th the front */ + break; + case SECUREC_CHAR('#'): + attr->flags |= SECUREC_FLAG_ALTERNATE; /* output %x with 0x */ + break; + default: + break; + } + return; +} + + +/* + * Decoded size identifier in format string to Reduce the number of lines of function code + */ +static int SecDecodeSizeI(SecFormatAttr *attr, const SecChar **format) +{ +#ifdef SECUREC_ON_64BITS + attr->flags |= SECUREC_FLAG_I64; /* %I to INT64 */ +#endif + if ((**format == SECUREC_CHAR('6')) && (*((*format) + 1) == SECUREC_CHAR('4'))) { + (*format) += 2; /* add 2 to skip I64 */ + attr->flags |= SECUREC_FLAG_I64; /* %I64 to INT64 */ + } else if ((**format == SECUREC_CHAR('3')) && (*((*format) + 1) == SECUREC_CHAR('2'))) { + (*format) += 2; /* add 2 to skip I32 */ + attr->flags &= ~SECUREC_FLAG_I64; /* %I64 to INT32 */ + } else if ((**format == SECUREC_CHAR('d')) || (**format == SECUREC_CHAR('i')) || + (**format == SECUREC_CHAR('o')) || (**format == SECUREC_CHAR('u')) || + (**format == SECUREC_CHAR('x')) || (**format == SECUREC_CHAR('X'))) { + /* do nothing */ + } else { + /* Compatibility code for "%I" just print I */ + return -1; + } + return 0; +} +/* + * Decoded size identifier in format string + */ +static int SecDecodeSize(SecChar ch, SecFormatAttr *attr, const SecChar **format) +{ + switch (ch) { +#ifdef SECUREC_COMPATIBLE_LINUX_FORMAT + case SECUREC_CHAR('j'): + attr->flags |= SECUREC_FLAG_INTMAX; + break; +#endif + case SECUREC_CHAR('q'): + /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('L'): + attr->flags |= SECUREC_FLAG_LONGLONG | SECUREC_FLAG_LONG_DOUBLE; + break; + case SECUREC_CHAR('l'): + if (**format == SECUREC_CHAR('l')) { + *format = *format + 1; + attr->flags |= SECUREC_FLAG_LONGLONG; /* long long */ + } else { + attr->flags |= SECUREC_FLAG_LONG; /* long int or wchar_t */ + } + break; + case SECUREC_CHAR('t'): + attr->flags |= SECUREC_FLAG_PTRDIFF; + break; +#ifdef SECUREC_COMPATIBLE_LINUX_FORMAT + case SECUREC_CHAR('z'): + /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('Z'): + attr->flags |= SECUREC_FLAG_SIZE; + break; +#endif + case SECUREC_CHAR('I'): + if (SecDecodeSizeI(attr, format) != 0) { + /* Compatibility code for "%I" just print I */ + return -1; + } + break; + case SECUREC_CHAR('h'): + if (**format == SECUREC_CHAR('h')) { + attr->flags |= SECUREC_FLAG_CHAR; /* char */ + } else { + attr->flags |= SECUREC_FLAG_SHORT; /* short int */ + } + break; + case SECUREC_CHAR('w'): + attr->flags |= SECUREC_FLAG_WIDECHAR; /* wide char */ + break; + default: + break; + } + return 0; +} + +/* + * Decoded char type identifier + */ +static int SecDecodeTypeC(SecFormatAttr *attr, unsigned int cValue, SecFormatBuf *formatBuf, SecBuffer *buffer) +{ +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT)) && !(defined(__hpux)) && !(defined(SECUREC_ON_SOLARIS)) + attr->flags &= ~SECUREC_FLAG_LEADZERO; +#endif + +#ifdef SECUREC_FOR_WCHAR + attr->bufferIsWide = 1; + if (attr->flags & SECUREC_FLAG_SHORT) { +#if SECUREC_HAVE_MBTOWC + /* multibyte character to wide character */ + char tmpChar[2]; /* One character string, length is 2 */ + tmpChar[0] = (char)(cValue & 0x00ff); + tmpChar[1] = '\0'; + + if (mbtowc(buffer->wStr, tmpChar, sizeof(tmpChar)) < 0) { + return -1; + } +#else + return -1; +#endif + } else { + buffer->wStr[0] = (wchar_t)cValue; + } + formatBuf->wStr = buffer->wStr; + return 1; /* only 1 wide character */ +#else /* SECUREC_FOR_WCHAR */ + attr->bufferIsWide = 0; + if (attr->flags & (SECUREC_FLAG_LONG | SECUREC_FLAG_WIDECHAR)) { +#if SECUREC_HAVE_WCTOMB + wchar_t wChar = (wchar_t)cValue; + int textLen; + /* wide character to multibyte character */ + SECUREC_MASK_MSVC_CRT_WARNING + textLen = wctomb(buffer->str, wChar); + SECUREC_END_MASK_MSVC_CRT_WARNING + if (textLen < 0) { + return -1; + } + formatBuf->str = buffer->str; + return textLen; +#else + return -1; +#endif + } else { + /* get multibyte character from argument */ + unsigned short temp; + temp = (unsigned short)cValue; + buffer->str[0] = (char)temp; + formatBuf->str = buffer->str; + return 1; /* only 1 character */ + } +#endif + +} + +/* literal string to print null ptr, define it as array rather than const text area + * is to avoid gcc warning with pointing const text with variable + */ +#if SECUREC_HAVE_WCHART +static wchar_t g_wStrNullString[SECUREC_NULL_STRING_SIZE] = { L'(', L'n', L'u', L'l', L'l', L')', L'\0', L'\0' }; +#endif +static char g_strNullString[SECUREC_NULL_STRING_SIZE] = "(null)"; + +static int SecDecodeTypeSchar(const SecFormatAttr *attr, SecFormatBuf *formatBuf) +{ + int finalPrecision = (attr->precision == -1) ? SECUREC_INT_MAX : attr->precision; + int textLen; + + if (formatBuf->str == NULL) { /* NULL passed, use special string */ + formatBuf->str = g_strNullString; + } + if (finalPrecision == SECUREC_INT_MAX) { + /* precision NOT assigned */ + /* The strlen performance is high when the string length is greater than 32 */ + textLen = (int)strlen(formatBuf->str); + } else { + /* precision assigned */ + size_t tmpLen; + SECUREC_CALC_STR_LEN(formatBuf->str, (size_t)(unsigned int)finalPrecision, &tmpLen); + textLen = (int)tmpLen; + } + return textLen; +} + +#if SECUREC_HAVE_WCHART +static int SecDecodeTypeSwchar(SecFormatAttr *attr, SecFormatBuf *formatBuf) +{ + int finalPrecision = (attr->precision == -1) ? SECUREC_INT_MAX : attr->precision; + int textLen; + + attr->bufferIsWide = 1; + if (formatBuf->wStr == NULL) { /* NULL passed, use special string */ + formatBuf->wStr = g_wStrNullString; + } + /* textLen in wchar_t */ + SECUREC_CALC_WSTR_LEN(formatBuf->wStr, finalPrecision, &textLen); + + return textLen; +} +#endif + +/* + * Decoded string identifier + */ +static int SecDecodeTypeS(SecFormatAttr *attr, char *argPtr, SecFormatBuf *formatBuf) +{ + int textLen; +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT)) && (!defined(SECUREC_ON_UNIX)) + attr->flags &= ~SECUREC_FLAG_LEADZERO; +#endif + formatBuf->str = argPtr; +#ifdef SECUREC_FOR_WCHAR +#if defined(SECUREC_COMPATIBLE_LINUX_FORMAT) + if (!(attr->flags & SECUREC_FLAG_LONG)) { + attr->flags |= SECUREC_FLAG_SHORT; + } +#endif + if (attr->flags & SECUREC_FLAG_SHORT) { + /* textLen now contains length in multibyte chars */ + textLen = SecDecodeTypeSchar(attr, formatBuf); + } else { + /* textLen now contains length in wide chars */ + textLen = SecDecodeTypeSwchar(attr, formatBuf); + } +#else /* SECUREC_FOR_WCHAR */ + if (attr->flags & (SECUREC_FLAG_LONG | SECUREC_FLAG_WIDECHAR)) { + /* textLen now contains length in wide chars */ +#if SECUREC_HAVE_WCHART + textLen = SecDecodeTypeSwchar(attr, formatBuf); +#else + textLen = 0; +#endif + } else { + /* textLen now contains length in multibyte chars */ + textLen = SecDecodeTypeSchar(attr, formatBuf); + } +#endif /* SECUREC_FOR_WCHAR */ + return textLen; +} + +/* + * Write one character to dest buffer + */ +static void SecOutputOneChar(SecChar ch, SecPrintfStream *stream, int *counter) +{ + /* normal state, write character */ + if (SECUREC_IS_REST_BUF_ENOUGH(stream, 1)) { /* only one char */ + SECUREC_SAFE_WRITE_CHAR(ch, stream, counter); /* char * cast to wchar * */ + } else { +#ifdef SECUREC_FOR_WCHAR + SecWriteCharW(ch, stream, counter); +#else + /* optimize function call to code */ + *counter = -1; + stream->count = -1; +#endif + } +} + +/* + * Check precison in format + */ +static int SecDecodePrecision(SecChar ch, SecFormatAttr *formatAttr) +{ + if (formatAttr->dynPrecision == 0) { + /* add digit to current precision */ + if (SECUREC_MUL_TEN_ADD_BEYOND_MAX(formatAttr->precision)) { + return -1; + } + formatAttr->precision = (int)SECUREC_MUL_TEN((unsigned int)formatAttr->precision) + + (unsigned char)(ch - SECUREC_CHAR('0')); + } else { + if (formatAttr->precision < 0) { + formatAttr->precision = -1; + } + if (formatAttr->precision > SECUREC_MAX_WIDTH_LEN) { + return -1; + } + } + return 0; +} + + +/* + * Check width in format + */ +static int SecDecodeWidth(SecChar ch, SecFormatAttr *formatAttr, SecFmtState lastState) +{ + if (formatAttr->dynWidth == 0) { + if (lastState != STAT_WIDTH) { + formatAttr->fldWidth = 0; + } + if (SECUREC_MUL_TEN_ADD_BEYOND_MAX(formatAttr->fldWidth)) { + return -1; + } + formatAttr->fldWidth = (int)SECUREC_MUL_TEN((unsigned int)formatAttr->fldWidth) + + (unsigned char)(ch - SECUREC_CHAR('0')); + } else { + if (formatAttr->fldWidth < 0) { + formatAttr->flags |= SECUREC_FLAG_LEFT; + formatAttr->fldWidth = (-formatAttr->fldWidth); + if (formatAttr->fldWidth > SECUREC_MAX_WIDTH_LEN) { + return -1; + } + } + } + return 0; +} +#ifdef SECUREC_FOR_WCHAR +/* + * Formatting output core functions for wchar version.Called by a function such as vswprintf_s + * argList must not be declare as const + */ +static int SecOutputSW(SecPrintfStream *stream, const wchar_t *cFormat, va_list argList) +#else +/* + * Formatting output core functions for char version.Called by a function such as vsnprintf_s + */ +static int SecOutputS(SecPrintfStream *stream, const char *cFormat, va_list argList) +#endif +{ + const SecChar *format = cFormat; +#if SECUREC_ENABLE_SPRINTF_FLOAT + char *floatBuf = NULL; +#endif + SecFormatBuf formatBuf; + static const char *itoaUpperDigits = "0123456789ABCDEFX"; + static const char *itoaLowerDigits = "0123456789abcdefx"; + const char *digits = itoaUpperDigits; + unsigned int radix = SECUREC_RADIX_DECIMAL; + int charsOut; /* characters written */ + int prefixLen = 0; /* Must be initialized or compiler alerts */ + int padding = 0; + int textLen; /* length of the text */ + int noOutput = 0; /* Must be initialized or compiler alerts */ + SecFmtState state; + SecFmtState lastState; + SecChar prefix[SECUREC_PREFIX_LEN] = { 0 }; + SecChar ch; /* currently read character */ + static const unsigned char stateTable[SECUREC_STATE_TABLE_SIZE] = { + /* type 0: nospecial meanin; + * 1: '%'; + * 2: '.' + * 3: '*' + * 4: '0' + * 5: '1' ... '9' + * 6: ' ', '+', '-', '#' + * 7: 'h', 'l', 'L', 'F', 'w' , 'N','z','q','t','j' + * 8: 'd','o','u','i','x','X','e','f','g' + */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x06, 0x00, 0x00, 0x06, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x03, 0x06, 0x00, 0x06, 0x02, 0x00, + 0x04, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x05, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x08, 0x00, 0x08, 0x08, 0x08, 0x00, 0x07, 0x00, 0x00, 0x07, 0x00, 0x07, 0x00, + 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x08, 0x00, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x08, 0x08, 0x08, 0x08, 0x08, 0x07, 0x08, 0x07, 0x00, 0x07, 0x00, 0x00, 0x08, + 0x08, 0x07, 0x00, 0x08, 0x07, 0x08, 0x00, 0x07, 0x08, 0x00, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, + /* fill zero for normal char 128 byte for 0x80 - 0xff */ + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, + /* state 0: normal + * 1: percent + * 2: flag + * 3: width + * 4: dot + * 5: precis + * 6: size + * 7: type + * 8: invalid + */ + 0x00, 0x08, 0x08, 0x08, 0x08, 0x08, 0x08, 0x00, 0x00, 0x01, 0x00, 0x08, 0x08, 0x08, 0x08, 0x08, + 0x01, 0x00, 0x00, 0x04, 0x04, 0x04, 0x08, 0x08, 0x08, 0x00, 0x00, 0x00, 0x03, 0x03, 0x08, 0x05, + 0x08, 0x08, 0x00, 0x00, 0x00, 0x02, 0x02, 0x03, 0x05, 0x05, 0x08, 0x00, 0x00, 0x00, 0x03, 0x03, + 0x03, 0x05, 0x05, 0x08, 0x00, 0x00, 0x00, 0x02, 0x02, 0x02, 0x08, 0x08, 0x08, 0x00, 0x00, 0x00, + 0x06, 0x06, 0x06, 0x06, 0x06, 0x06, 0x00, 0x00, 0x00, 0x07, 0x07, 0x07, 0x07, 0x07, 0x07, 0x00, + 0x00 + }; + + SecFormatAttr formatAttr; + SecBuffer buffer; + formatAttr.flags = 0; + formatAttr.bufferIsWide = 0; /* flag for buffer contains wide chars */ + formatAttr.fldWidth = 0; + formatAttr.precision = 0; + formatAttr.dynWidth = 0; + formatAttr.dynPrecision = 0; + charsOut = 0; + textLen = 0; + state = STAT_NORMAL; /* starting state */ + formatBuf.str = NULL; + + /* loop each format character */ + /* remove format != NULL */ + while ((ch = *format) != SECUREC_CHAR('\0') && charsOut >= 0) { + ++format; + lastState = state; + state = SECUREC_DECODE_STATE(ch, stateTable, lastState); + switch (state) { + case STAT_NORMAL: + SecOutputOneChar(ch, stream, &charsOut); + continue; + case STAT_PERCENT: + /* set default values */ + prefixLen = 0; + noOutput = 0; + formatAttr.flags = 0; + formatAttr.fldWidth = 0; + formatAttr.precision = -1; + formatAttr.bufferIsWide = 0; + formatAttr.dynWidth = 0; + formatAttr.dynPrecision = 0; + break; + case STAT_FLAG: + /* set flag based on which flag character */ + SecDecodeFlags(ch, &formatAttr); + break; + case STAT_WIDTH: + /* update width value */ + if (ch == SECUREC_CHAR('*')) { + /* get width */ + formatAttr.fldWidth = (int)va_arg(argList, int); + formatAttr.dynWidth = 1; + } else { + formatAttr.dynWidth = 0; + } + if (SecDecodeWidth(ch, &formatAttr, lastState) != 0) { + return -1; + } + break; + case STAT_DOT: + formatAttr.precision = 0; + break; + case STAT_PRECIS: + /* update precison value */ + if (ch == SECUREC_CHAR('*')) { + /* get precision from arg list */ + formatAttr.precision = (int)va_arg(argList, int); + formatAttr.dynPrecision = 1; + } else { + formatAttr.dynPrecision = 0; + } + if (SecDecodePrecision(ch, &formatAttr) != 0) { + return -1; + } + break; + case STAT_SIZE: + /* read a size specifier, set the formatAttr.flags based on it */ + if (SecDecodeSize(ch, &formatAttr, &format) != 0) { + /* Compatibility code for "%I" just print I */ + SecOutputOneChar(ch, stream, &charsOut); + state = STAT_NORMAL; + continue; + } + break; + case STAT_TYPE: + switch (ch) { + case SECUREC_CHAR('C'): + /* wide char */ + if (!(formatAttr.flags & (SECUREC_FLAG_SHORT | SECUREC_FLAG_LONG | SECUREC_FLAG_WIDECHAR))) { +#ifdef SECUREC_FOR_WCHAR + formatAttr.flags |= SECUREC_FLAG_SHORT; +#else + formatAttr.flags |= SECUREC_FLAG_WIDECHAR; +#endif + } + /* fall-through */ + /* FALLTHRU */ + case SECUREC_CHAR('c'): + do { + unsigned int cValue = (unsigned int)va_arg(argList, int); + textLen = SecDecodeTypeC(&formatAttr, cValue, &formatBuf, &buffer); + if (textLen < 0) { + noOutput = 1; + } + } SECUREC_WHILE_ZERO; + break; + case SECUREC_CHAR('S'): /* wide char string */ + if (!(formatAttr.flags & (SECUREC_FLAG_SHORT | SECUREC_FLAG_LONG | SECUREC_FLAG_WIDECHAR))) { +#ifndef SECUREC_FOR_WCHAR + formatAttr.flags |= SECUREC_FLAG_WIDECHAR; +#else + formatAttr.flags |= SECUREC_FLAG_SHORT; +#endif + } + /* fall-through */ + /* FALLTHRU */ + case SECUREC_CHAR('s'): + do { + char *argPtr = (char *)va_arg(argList, char *); + textLen = SecDecodeTypeS(&formatAttr, argPtr, &formatBuf); + } SECUREC_WHILE_ZERO; + break; + case SECUREC_CHAR('n'): + /* higher risk disable it */ + return -1; + case SECUREC_CHAR('E'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('F'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('G'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('A'): /* fall-through */ /* FALLTHRU */ + /* convert format char to lower , use Explicit conversion to clean up compilation warning */ + ch = (SecChar)(ch + ((SecChar)(SECUREC_CHAR('a')) - (SECUREC_CHAR('A')))); + /* fall-through */ + /* FALLTHRU */ + case SECUREC_CHAR('e'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('f'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('g'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('a'): +#if SECUREC_ENABLE_SPRINTF_FLOAT + do { + int bufferSize = 0; /* size of formatBuf.str */ + /* floating point conversion */ + formatBuf.str = buffer.str; /* output buffer for float string with default size */ + + /* compute the precision value */ + if (formatAttr.precision < 0) { + formatAttr.precision = SECUREC_FLOAT_DEFAULT_PRECISION; + } else if (formatAttr.precision == 0 && ch == SECUREC_CHAR('g')) { + formatAttr.precision = 1; + } + + /* calc buffer size to store double value + * The maximum length of SECUREC_MAX_WIDTH_LEN is enough + */ + if (formatAttr.flags & SECUREC_FLAG_LONG_DOUBLE) { + if (formatAttr.precision > (SECUREC_MAX_WIDTH_LEN - SECUREC_FLOAT_BUFSIZE_LB)) { + noOutput = 1; + break; + } + /* Long double needs to meet the basic print length */ + bufferSize = SECUREC_FLOAT_BUFSIZE_LB + formatAttr.precision + SECUREC_FLOAT_BUF_EXT; + } else { + if (formatAttr.precision > (SECUREC_MAX_WIDTH_LEN - SECUREC_FLOAT_BUFSIZE)) { + noOutput = 1; + break; + } + /* Double needs to meet the basic print length */ + bufferSize = SECUREC_FLOAT_BUFSIZE + formatAttr.precision + SECUREC_FLOAT_BUF_EXT; + } + if (formatAttr.fldWidth > bufferSize) { + bufferSize = formatAttr.fldWidth + SECUREC_FLOAT_BUF_EXT; + } + + if (bufferSize > SECUREC_BUFFER_SIZE) { + /* the current vlaue of SECUREC_BUFFER_SIZE could NOT store the + * formatted float string + */ + floatBuf = (char *)SECUREC_MALLOC(((size_t)(unsigned int)bufferSize)); + if (floatBuf != NULL) { + formatBuf.str = floatBuf; + } else { + noOutput = 1; + break; + } + } + + do { + /* add following code to call system sprintf API for float number */ + const SecChar *pFloatFmt = format - 2; /* sub 2 to the position before 'f' or 'g' */ + int k; + int fFmtStrLen; + char fFmtBuf[SECUREC_FMT_STR_LEN]; + char *fFmtStr = fFmtBuf; + char *fFmtHeap = NULL; /* to clear warning */ + + while (SECUREC_CHAR('%') != *pFloatFmt) { /* must meet '%' */ + --pFloatFmt; + } + fFmtStrLen = (int)((format - pFloatFmt) + 1); /* with ending terminator */ + if (fFmtStrLen > SECUREC_FMT_STR_LEN) { + /* if SECUREC_FMT_STR_LEN is NOT enough, alloc a new buffer */ + fFmtHeap = (char *)SECUREC_MALLOC((size_t)((unsigned int)fFmtStrLen)); + if (fFmtHeap == NULL) { + noOutput = 1; + break; + } else { + for (k = 0; k < fFmtStrLen - 1; ++k) { + /* convert wchar to char */ + fFmtHeap[k] = (char)(pFloatFmt[k]); /* copy the format string */ + } + fFmtHeap[k] = '\0'; + + fFmtStr = fFmtHeap; + } + } else { + /* purpose of the repeat code is to solve the tool alarm Redundant_Null_Check */ + for (k = 0; k < fFmtStrLen - 1; ++k) { + /* convert wchar to char */ + fFmtBuf[k] = (char)(pFloatFmt[k]); /* copy the format string */ + } + fFmtBuf[k] = '\0'; + } + + if (formatAttr.flags & SECUREC_FLAG_LONG_DOUBLE) { +#ifdef SECUREC_COMPATIBLE_LINUX_FORMAT + long double tmp = (long double)va_arg(argList, long double); + textLen = SecFormatLongDboule(formatBuf.str, &formatAttr, fFmtStr, tmp); +#else + double tmp = (double)va_arg(argList, double); + textLen = SecFormatDboule(formatBuf.str, &formatAttr, fFmtStr, tmp); +#endif + } else { + double tmp = (double)va_arg(argList, double); + textLen = SecFormatDboule(formatBuf.str, &formatAttr, fFmtStr, tmp); + } + + if (fFmtHeap != NULL) { + /* if buffer is alloced on heap, free it */ + SECUREC_FREE(fFmtHeap); + fFmtHeap = NULL; + /* to clear e438 last value assigned not used , the compiler will + * optimize this code + */ + (void)fFmtHeap; + } + if (textLen < 0 || textLen >= bufferSize) { + /* bufferSize is large enough, just validation the return value */ + noOutput = 1; + break; + } + + /* no padding ,this variable to calculate amount of padding */ + formatAttr.fldWidth = textLen; + prefixLen = 0; /* no padding ,this variable to calculate amount of padding */ + formatAttr.flags = 0; /* clear all internal formatAttr.flags */ + break; + } SECUREC_WHILE_ZERO; + } SECUREC_WHILE_ZERO; + break; +#else + return -1; +#endif + case SECUREC_CHAR('p'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('X'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('x'): + /* unsigned lower hex output */ + digits = itoaLowerDigits; + radix = SECUREC_RADIX_HEX; + switch (ch) { + case SECUREC_CHAR('p'): + /* print a pointer */ +#if defined(SECUREC_COMPATIBLE_WIN_FORMAT) + formatAttr.flags &= ~SECUREC_FLAG_LEADZERO; +#else + formatAttr.flags |= SECUREC_FLAG_POINTER; +#endif +#ifdef SECUREC_ON_64BITS + formatAttr.flags |= SECUREC_FLAG_I64; /* converting an int64 */ +#else + formatAttr.flags |= SECUREC_FLAG_LONG; /* converting a long */ +#endif + +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) || defined(SECUREC_VXWORKS_PLATFORM)) && (!defined(SECUREC_ON_UNIX)) +#if defined(SECUREC_VXWORKS_PLATFORM) + formatAttr.precision = 1; +#else + formatAttr.precision = 0; +#endif + formatAttr.flags |= SECUREC_FLAG_ALTERNATE; /* "0x" is not default prefix in UNIX */ + break; +#else + /* not linux vxwoks */ +#if defined(_AIX) || defined(SECUREC_ON_SOLARIS) + formatAttr.precision = 1; +#else + formatAttr.precision = 2 * sizeof(void *); /* 2 precision of different systems */ +#endif +#endif + +#if defined(SECUREC_ON_UNIX) + break; +#endif + /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('X'): /* fall-through */ /* FALLTHRU */ + /* unsigned upper hex output */ + digits = itoaUpperDigits; + break; + default: + break; + } + + if (formatAttr.flags & SECUREC_FLAG_ALTERNATE) { + /* alternate form means '0x' prefix */ + prefix[0] = SECUREC_CHAR('0'); + prefix[1] = (SecChar)(digits[16]); /* 16 for 'x' or 'X' */ + +#if (defined(SECUREC_COMPATIBLE_LINUX_FORMAT) || defined(SECUREC_VXWORKS_PLATFORM)) + if (ch == 'p') { + prefix[1] = SECUREC_CHAR('x'); + } +#endif +#if defined(_AIX) || defined(SECUREC_ON_SOLARIS) + if (ch == 'p') { + prefixLen = 0; + } else { + prefixLen = SECUREC_PREFIX_LEN; + } +#else + prefixLen = SECUREC_PREFIX_LEN; +#endif + + } + /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('i'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('d'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('u'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('o'): /* fall-through */ /* FALLTHRU */ + switch (ch) { + case SECUREC_CHAR('i'): /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('d'): /* fall-through */ /* FALLTHRU */ + /* signed decimal output */ + formatAttr.flags |= SECUREC_FLAG_SIGNED; + /* fall-through */ /* FALLTHRU */ + case SECUREC_CHAR('u'): + radix = SECUREC_RADIX_DECIMAL; + break; + case SECUREC_CHAR('o'): + /* unsigned octal output */ + radix = SECUREC_RADIX_OCTAL; + if (formatAttr.flags & SECUREC_FLAG_ALTERNATE) { + /* alternate form means force a leading 0 */ + formatAttr.flags |= SECUREC_FLAG_FORCE_OCTAL; + } + break; + default: + break; + } + + do { + + SecUnsignedInt64 number = 0; /* number to convert */ + SecInt64 l; /* temp long value */ + + /* read argument into variable l */ + if (formatAttr.flags & SECUREC_FLAG_I64) { + l = (SecInt64)va_arg(argList, SecInt64); + } else if (formatAttr.flags & SECUREC_FLAG_LONGLONG) { + l = (SecInt64)va_arg(argList, SecInt64); + } else +#ifdef SECUREC_ON_64BITS + if (formatAttr.flags & SECUREC_FLAG_LONG) { + l = (long)va_arg(argList, long); + } else +#endif /* SECUREC_ON_64BITS */ + if (formatAttr.flags & SECUREC_FLAG_CHAR) { + if (formatAttr.flags & SECUREC_FLAG_SIGNED) { + l = (char)va_arg(argList, int); /* sign extend */ + if (l >= 128) { /* 128 on some platform, char is always unsigned */ + SecUnsignedInt64 tmpL = (SecUnsignedInt64)l; + unsigned char tmpCh = (unsigned char)(~(tmpL)); + l = tmpCh + 1; + formatAttr.flags |= SECUREC_FLAG_NEGATIVE; + } + } else { + l = (unsigned char)va_arg(argList, int); /* zero-extend */ + } + + } else if (formatAttr.flags & SECUREC_FLAG_SHORT) { + if (formatAttr.flags & SECUREC_FLAG_SIGNED) { + l = (short)va_arg(argList, int); /* sign extend */ + } else { + l = (unsigned short)va_arg(argList, int); /* zero-extend */ + } + + } +#ifdef SECUREC_COMPATIBLE_LINUX_FORMAT + else if (formatAttr.flags & SECUREC_FLAG_PTRDIFF) { + l = (ptrdiff_t)va_arg(argList, ptrdiff_t); /* sign extend */ + } else if (formatAttr.flags & SECUREC_FLAG_SIZE) { + if (formatAttr.flags & SECUREC_FLAG_SIGNED) { + /* No suitable macros were found to handle the branch */ + if (SecIsSameSize(sizeof(size_t), sizeof(long))) { + l = va_arg(argList, long); /* sign extend */ + } else if (SecIsSameSize(sizeof(size_t), sizeof(long long))) { + l = va_arg(argList, long long); /* sign extend */ + } else { + l = va_arg(argList, int); /* sign extend */ + } + } else { + l = (SecInt64)(size_t)va_arg(argList, size_t); /* sign extend */ + } + } else if (formatAttr.flags & SECUREC_FLAG_INTMAX) { + if (formatAttr.flags & SECUREC_FLAG_SIGNED) { + l = va_arg(argList, SecInt64); /* sign extend */ + } else { + /* sign extend */ + l = (SecInt64)(SecUnsignedInt64)va_arg(argList, SecUnsignedInt64); + } + } +#endif + else { + if (formatAttr.flags & SECUREC_FLAG_SIGNED) { + l = va_arg(argList, int); /* sign extend */ + } else { + l = (unsigned int)va_arg(argList, int); /* zero-extend */ + } + + } + + /* check for negative; copy into number */ + if ((formatAttr.flags & SECUREC_FLAG_SIGNED) && l < 0) { + number = (SecUnsignedInt64)(-l); + formatAttr.flags |= SECUREC_FLAG_NEGATIVE; + } else { + number = (SecUnsignedInt64)l; + } + + if (((formatAttr.flags & SECUREC_FLAG_I64) == 0) && +#ifdef SECUREC_COMPATIBLE_LINUX_FORMAT + ((formatAttr.flags & SECUREC_FLAG_INTMAX) == 0) && +#endif +#ifdef SECUREC_ON_64BITS + ((formatAttr.flags & SECUREC_FLAG_PTRDIFF) == 0) && + ((formatAttr.flags & SECUREC_FLAG_SIZE) == 0) && +#if !defined(SECUREC_COMPATIBLE_WIN_FORMAT) /* on window 64 system sizeof long is 32bit */ + ((formatAttr.flags & SECUREC_FLAG_LONG) == 0) && +#endif +#endif + ((formatAttr.flags & SECUREC_FLAG_LONGLONG) == 0)) { + + number &= 0xffffffff; /* use 0xffffffff as 32 bit mask */ + } + + /* check precision value for default */ + if (formatAttr.precision < 0) { + formatAttr.precision = 1; /* default precision */ + } else { +#if defined(SECUREC_COMPATIBLE_WIN_FORMAT) + formatAttr.flags &= ~SECUREC_FLAG_LEADZERO; +#else + if (!(formatAttr.flags & SECUREC_FLAG_POINTER)) { + formatAttr.flags &= ~SECUREC_FLAG_LEADZERO; + } +#endif + if (formatAttr.precision > SECUREC_MAX_PRECISION) { + formatAttr.precision = SECUREC_MAX_PRECISION; + } + } + + /* Check if data is 0; if so, turn off hex prefix, + * 'p' add 0x prefix, otherwise not add prefix + */ + if (number == 0) { +#if !(defined(SECUREC_VXWORKS_PLATFORM) || defined(__hpux)) + prefixLen = 0; +#else + if ((ch == 'p') && (formatAttr.flags & SECUREC_FLAG_ALTERNATE)) { + prefixLen = SECUREC_PREFIX_LEN; + } else { + prefixLen = 0; + } +#endif + } + + /* Convert data to ASCII */ + formatBuf.str = &buffer.str[SECUREC_BUFFER_SIZE]; + + if (number > 0) { +#ifdef SECUREC_ON_64BITS + switch (radix) { + /* the compiler will optimize each one */ + case SECUREC_RADIX_DECIMAL: + SECUREC_SPECIAL_QWORD_BASE10(number); + break; + case SECUREC_RADIX_HEX: + SECUREC_SPECIAL_QWORD(number, SECUREC_RADIX_HEX); + break; + case SECUREC_RADIX_OCTAL: + SECUREC_SPECIAL_QWORD(number, SECUREC_RADIX_OCTAL); + break; + default: + break; + } +#else /* for 32 bits system */ + if (number <= 0xFFFFFFFFUL) { + /* in most case, the value to be converted is small value */ + SecUnsignedInt32 n32Tmp = (SecUnsignedInt32)number; + switch (radix) { + case SECUREC_RADIX_HEX: + SECUREC_SPECIAL_DWORD(n32Tmp, SECUREC_RADIX_HEX); + break; + case SECUREC_RADIX_OCTAL: + SECUREC_SPECIAL_DWORD(n32Tmp, SECUREC_RADIX_OCTAL); + break; + +#ifdef _AIX + /* the compiler will optimize div 10 */ + case SECUREC_RADIX_DECIMAL: + SECUREC_SPECIAL_DWORD(n32Tmp, SECUREC_RADIX_DECIMAL); + break; +#else + case SECUREC_RADIX_DECIMAL: + do { + /* fast div 10 */ + SecUnsignedInt32 q; + SecUnsignedInt32 r; + do { + *--formatBuf.str = digits[n32Tmp % SECUREC_RADIX_DECIMAL]; + q = (n32Tmp >> 1) + (n32Tmp >> 2); /* fast div magic 2 */ + q = q + (q >> 4); /* fast div magic 4 */ + q = q + (q >> 8); /* fast div magic 8 */ + q = q + (q >> 16); /* fast div magic 16 */ + q = q >> 3; /* fast div magic 3 */ + r = n32Tmp - SECUREC_MUL_TEN(q); + n32Tmp = (r > 9) ? (q + 1) : q; /* fast div magic 9 */ + } while (n32Tmp != 0); + } SECUREC_WHILE_ZERO; + break; +#endif + default: + break; + } /* end switch */ + } else { + /* the value to be converted is greater than 4G */ +#if defined(SECUREC_VXWORKS_VERSION_5_4) + do { + SecUnsignedInt32 digit = 0; /* ascii value of digit */ + SecUnsignedInt64 quotient = 0; + if (SecU64Div32(number,(SecUnsignedInt32)radix, "ient, &digit) != 0) { + noOutput = 1; + break; + } + *--formatBuf.str = digits[digit]; + number = quotient; + } while (number != 0); +#else + switch (radix) { + /* the compiler will optimize div 10 */ + case SECUREC_RADIX_DECIMAL: + SECUREC_SPECIAL_QWORD_BASE10(number); + break; + case SECUREC_RADIX_OCTAL: + SECUREC_SPECIAL_QWORD(number, SECUREC_RADIX_OCTAL); + break; + case SECUREC_RADIX_HEX: + SECUREC_SPECIAL_QWORD(number, SECUREC_RADIX_HEX); + break; + default: + break; + } +#endif + } +#endif + + } + /* compute length of number,.if textLen > 0, then formatBuf.str must be in buffer.str */ + textLen = (int)(size_t)((char *)&buffer.str[SECUREC_BUFFER_SIZE] - formatBuf.str); + if (formatAttr.precision > textLen) { + int ii; + for (ii = 0; ii < formatAttr.precision - textLen; ++ii) { + *--formatBuf.str = '0'; + } + textLen = formatAttr.precision; + } + + /* Force a leading zero if FORCEOCTAL flag set */ + if ((formatAttr.flags & SECUREC_FLAG_FORCE_OCTAL) && + (textLen == 0 || formatBuf.str[0] != '0')) { + *--formatBuf.str = '0'; + ++textLen; /* add a zero */ + } + } SECUREC_WHILE_ZERO; + break; + default: + break; + } + + while (noOutput < 1) { + if (formatAttr.flags & SECUREC_FLAG_SIGNED) { + if (formatAttr.flags & SECUREC_FLAG_NEGATIVE) { + /* prefix is a '-' */ + prefix[0] = SECUREC_CHAR('-'); + prefixLen = 1; + } else if (formatAttr.flags & SECUREC_FLAG_SIGN) { + /* prefix is '+' */ + prefix[0] = SECUREC_CHAR('+'); + prefixLen = 1; + } else if (formatAttr.flags & SECUREC_FLAG_SIGN_SPACE) { + /* prefix is ' ' */ + prefix[0] = SECUREC_CHAR(' '); + prefixLen = 1; + } + } + +#if defined(SECUREC_COMPATIBLE_LINUX_FORMAT) && (!defined(SECUREC_ON_UNIX)) + if ((formatAttr.flags & SECUREC_FLAG_POINTER) && (textLen == 0)) { + formatAttr.flags &= ~SECUREC_FLAG_LEADZERO; + formatBuf.str = &buffer.str[SECUREC_BUFFER_SIZE - 1]; + *formatBuf.str-- = '\0'; + *formatBuf.str-- = ')'; + *formatBuf.str-- = 'l'; + *formatBuf.str-- = 'i'; + *formatBuf.str-- = 'n'; + *formatBuf.str = '('; + textLen = 5; /* length of (nil) is 5 */ + } +#endif + + /* calculate amount of padding */ + padding = (formatAttr.fldWidth - textLen) - prefixLen; + + /* put out the padding, prefix, and text, in the correct order */ + + if (!(formatAttr.flags & (SECUREC_FLAG_LEFT | SECUREC_FLAG_LEADZERO)) && padding > 0) { + /* pad on left with blanks */ + if (SECUREC_IS_REST_BUF_ENOUGH(stream, padding)) { + /* char * cast to wchar * */ + SECUREC_SAFE_PADDING(SECUREC_CHAR(' '), padding, stream, &charsOut); + } else { + SECUREC_WRITE_MULTI_CHAR(SECUREC_CHAR(' '), padding, stream, &charsOut); + } + } + + /* write prefix */ + if (prefixLen > 0) { + SecChar *pPrefix = prefix; + if (SECUREC_IS_REST_BUF_ENOUGH(stream, prefixLen)) { + /* max prefix len is 2, use loop copy */ /* char * cast to wchar * in WCHAR version */ + SECUREC_SAFE_WRITE_STR_OPT(pPrefix, prefixLen, stream, &charsOut); + } else { + SECUREC_WRITE_STRING(prefix, prefixLen, stream, &charsOut); + } + } + + if ((formatAttr.flags & SECUREC_FLAG_LEADZERO) && !(formatAttr.flags & SECUREC_FLAG_LEFT) + && padding > 0) { + /* write leading zeros */ + if (SECUREC_IS_REST_BUF_ENOUGH(stream, padding)) { + /* char * cast to wchar * */ + SECUREC_SAFE_PADDING(SECUREC_CHAR('0'), padding, stream, &charsOut); + } else { + SECUREC_WRITE_MULTI_CHAR(SECUREC_CHAR('0'), padding, stream, &charsOut); + } + } + + /* write text */ +#ifndef SECUREC_FOR_WCHAR + if (formatAttr.bufferIsWide != 0 && (textLen > 0)) { +#if SECUREC_HAVE_WCTOMB + wchar_t *p = formatBuf.wStr; + int count = textLen; + while (count > 0) { + char tmpBuf[SECUREC_MB_LEN + 1]; + SECUREC_MASK_MSVC_CRT_WARNING + int retVal = wctomb(tmpBuf, *p); + SECUREC_END_MASK_MSVC_CRT_WARNING + if (retVal <= 0) { + charsOut = -1; + break; + } + SECUREC_WRITE_STRING(tmpBuf, retVal, stream, &charsOut); + --count; + ++p; + } +#else + charsOut = -1; + break; +#endif + } else { + if (SECUREC_IS_REST_BUF_ENOUGH(stream, textLen)) { + SECUREC_SAFE_WRITE_STR(formatBuf.str, textLen, stream, &charsOut); + } else { + SECUREC_WRITE_STRING(formatBuf.str, textLen, stream, &charsOut); + } + } +#else /* SECUREC_FOR_WCHAR */ + if (formatAttr.bufferIsWide == 0 && textLen > 0) { +#if SECUREC_HAVE_MBTOWC + int count = textLen; + char *p = formatBuf.str; + + while (count > 0) { + wchar_t wChar = L'\0'; + int retVal = mbtowc(&wChar, p, (size_t)MB_CUR_MAX); + if (retVal <= 0) { + charsOut = -1; + break; + } + SecWriteCharW(wChar, stream, &charsOut); + p += retVal; + count -= retVal; + } +#else + charsOut = -1; + break; +#endif + } else { + if (SECUREC_IS_REST_BUF_ENOUGH(stream, textLen)) { + /* char * cast to wchar * */ + SECUREC_SAFE_WRITE_STR(formatBuf.wStr, textLen, stream, &charsOut); + } else { + SECUREC_WRITE_STRING(formatBuf.wStr, textLen, stream, &charsOut); + } + } +#endif /* SECUREC_FOR_WCHAR */ + + if (charsOut >= 0 && (formatAttr.flags & SECUREC_FLAG_LEFT) && padding > 0) { + /* pad on right with blanks */ + if (SECUREC_IS_REST_BUF_ENOUGH(stream, padding)) { + /* char * cast to wchar * */ + SECUREC_SAFE_PADDING(SECUREC_CHAR(' '), padding, stream, &charsOut); + } else { + SECUREC_WRITE_MULTI_CHAR(SECUREC_CHAR(' '), padding, stream, &charsOut); + } + } + break; + } +#if SECUREC_ENABLE_SPRINTF_FLOAT + if (floatBuf != NULL) { + SECUREC_FREE(floatBuf); + floatBuf = NULL; + } +#endif + break; + case STAT_INVALID: + return -1; + default: + return -1; /* input format is wrong, directly return */ + } + } + + if (state != STAT_NORMAL && state != STAT_TYPE) { + return -1; + } + + return charsOut; /* the number of characters written */ +} +#endif /* OUTPUT_INL_2B263E9C_43D8_44BB_B17A_6D2033DECEE5 */ + diff --git a/third_party/securec/src/scanf_s.c b/third_party/securec/src/scanf_s.c new file mode 100644 index 00000000..e4b0e602 --- /dev/null +++ b/third_party/securec/src/scanf_s.c @@ -0,0 +1,55 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +/* + * + * The scanf_s function is equivalent to fscanf_s with the argument stdin interposed before the arguments to scanf_s + * The scanf_s function reads data from the standard input stream stdin and + * writes the data into the location that's given by argument. Each argument + * must be a pointer to a variable of a type that corresponds to a type specifier + * in format. If copying occurs between strings that overlap, the behavior is + * undefined. + * + * + * format Format control string. + * ... Optional arguments. + * + * + * ... The converted value stored in user assigned address + * + * + * Returns the number of fields successfully converted and assigned; + * the return value does not include fields that were read but not assigned. + * A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ + +int scanf_s(const char *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + ret = vscanf_s(format, argList); + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} + + diff --git a/third_party/securec/src/secinput.h b/third_party/securec/src/secinput.h new file mode 100644 index 00000000..8cd92849 --- /dev/null +++ b/third_party/securec/src/secinput.h @@ -0,0 +1,156 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SEC_INPUT_H_E950DA2C_902F_4B15_BECD_948E99090D9C +#define SEC_INPUT_H_E950DA2C_902F_4B15_BECD_948E99090D9C +#include "securecutil.h" + +#define SECUREC_SCANF_EINVAL (-1) +#define SECUREC_SCANF_ERROR_PARA (-2) + +/* for internal stream flag */ +#define SECUREC_MEM_STR_FLAG 0X01 +#define SECUREC_FILE_STREAM_FLAG 0X02 +#define SECUREC_FROM_STDIN_FLAG 0X04 +#define SECUREC_LOAD_FILE_TO_MEM_FLAG 0X08 + +#define SECUREC_UNINITIALIZED_FILE_POS (-1) +#define SECUREC_BOM_HEADER_SIZE 2 +#define SECUREC_BOM_HEADER_BE_1ST 0xFEU +#define SECUREC_BOM_HEADER_BE_2ST 0xFFU +#define SECUREC_BOM_HEADER_LE_1ST 0xFFU +#define SECUREC_BOM_HEADER_LE_2ST 0xFEU +#define SECUREC_UTF8_BOM_HEADER_SIZE 3 +#define SECUREC_UTF8_BOM_HEADER_1ST 0xEFU +#define SECUREC_UTF8_BOM_HEADER_2ND 0xBBU +#define SECUREC_UTF8_BOM_HEADER_3RD 0xBFU +#define SECUREC_UTF8_LEAD_1ST 0xE0 +#define SECUREC_UTF8_LEAD_2ND 0x80 + +typedef struct { + unsigned int flag; /* mark the properties of input stream */ + int count; /* the size of buffered string in bytes */ + const char *cur; /* the pointer to next read position */ + char *base; /* the pointer to the header of buffered string */ +#if SECUREC_ENABLE_SCANF_FILE + FILE *pf; /* the file pointer */ + long oriFilePos; /* the original position of file offset when fscanf is called */ + int fileRealRead; +#if defined(SECUREC_NO_STD_UNGETC) + unsigned int lastChar; /* the char code of last input */ + int fUnget; /* the boolean flag of pushing a char back to read stream */ +#endif +#endif +} SecFileStream; + + +#define SECUREC_INIT_SEC_FILE_STREAM_COMMON(fileStream, streamFlag, curPtr, strCount) do { \ + (fileStream).flag = (streamFlag); \ + (fileStream).count = (strCount); \ + (fileStream).cur = (curPtr); \ + (fileStream).base = NULL; \ +} SECUREC_WHILE_ZERO + +#if SECUREC_ENABLE_SCANF_FILE +#if defined(SECUREC_NO_STD_UNGETC) +/* This initialization for eliminating redundant initialization. + * Compared with the previous version initialization 0, + * the current code causes the binary size to increase by some bytes + */ +#define SECUREC_INIT_SEC_FILE_STREAM(fileStream, streamFlag, stream, filePos, curPtr, strCount) do { \ + SECUREC_INIT_SEC_FILE_STREAM_COMMON((fileStream), (streamFlag), (curPtr), (strCount)); \ + (fileStream).pf = (stream); \ + (fileStream).oriFilePos = (filePos); \ + (fileStream).fileRealRead = 0; \ + (fileStream).lastChar = 0; \ + (fileStream).fUnget = 0; \ +} SECUREC_WHILE_ZERO +#else +#define SECUREC_INIT_SEC_FILE_STREAM(fileStream, streamFlag, stream, filePos, curPtr, strCount) do { \ + SECUREC_INIT_SEC_FILE_STREAM_COMMON((fileStream), (streamFlag), (curPtr), (strCount)); \ + (fileStream).pf = (stream); \ + (fileStream).oriFilePos = (filePos); \ + (fileStream).fileRealRead = 0; \ +} SECUREC_WHILE_ZERO +#endif +#else /* No SECUREC_ENABLE_SCANF_FILE */ +#define SECUREC_INIT_SEC_FILE_STREAM(fileStream, streamFlag, stream, filePos, curPtr, strCount) do { \ + SECUREC_INIT_SEC_FILE_STREAM_COMMON((fileStream), (streamFlag), (curPtr), (strCount)); \ +} SECUREC_WHILE_ZERO +#endif + +#ifdef __cplusplus +extern "C" { +#endif + + extern int SecInputS(SecFileStream *stream, const char *cFormat, va_list argList); + extern void SecClearDestBuf(const char *buffer, const char *format, va_list argList); +#if SECUREC_IN_KERNEL == 0 + extern int SecInputSW(SecFileStream *stream, const wchar_t *cFormat, va_list argList); + extern void SecClearDestBufW(const wchar_t *buffer, const wchar_t *format, va_list argList); +#endif +/* 20150105 For software and hardware decoupling,such as UMG */ +#if defined(SECUREC_SYSAPI4VXWORKS) +#ifdef feof +#undef feof +#endif + extern int feof(FILE *stream); +#endif + +#if defined(SECUREC_SYSAPI4VXWORKS) || defined(SECUREC_CTYPE_MACRO_ADAPT) +#ifndef isspace +#define isspace(c) (((c) == ' ') || ((c) == '\t') || ((c) == '\r') || ((c) == '\n')) +#endif +#ifndef iswspace +#define iswspace(c) (((c) == L' ') || ((c) == L'\t') || ((c) == L'\r') || ((c) == L'\n')) +#endif +#ifndef isascii +#define isascii(c) (((unsigned char)(c)) <= 0x7f) +#endif +#ifndef isupper +#define isupper(c) ((c) >= 'A' && (c) <= 'Z') +#endif +#ifndef islower +#define islower(c) ((c) >= 'a' && (c) <= 'z') +#endif +#ifndef isalpha +#define isalpha(c) (isupper(c) || (islower(c))) +#endif +#ifndef isdigit +#define isdigit(c) ((c) >= '0' && (c) <= '9') +#endif +#ifndef isxupper +#define isxupper(c) ((c) >= 'A' && (c) <= 'F') +#endif +#ifndef isxlower +#define isxlower(c) ((c) >= 'a' && (c) <= 'f') +#endif +#ifndef isxdigit +#define isxdigit(c) (isdigit(c) || isxupper(c) || isxlower(c)) +#endif +#endif + +#ifdef __cplusplus +} +#endif +/* Reserved file operation macro interface */ +#define SECUREC_LOCK_FILE(s) +#define SECUREC_UNLOCK_FILE(s) +#define SECUREC_LOCK_STDIN(i, s) +#define SECUREC_UNLOCK_STDIN(i, s) +#endif + + diff --git a/third_party/securec/src/securecutil.c b/third_party/securec/src/securecutil.c new file mode 100644 index 00000000..1a44cfbe --- /dev/null +++ b/third_party/securec/src/securecutil.c @@ -0,0 +1,74 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* Avoid duplicate header files,not include securecutil.h */ +#include "securecutil.h" + + +#if defined(ANDROID) && (SECUREC_HAVE_WCTOMB || SECUREC_HAVE_MBTOWC) +#include +#if SECUREC_HAVE_WCTOMB +/* + * Convert wide characters to narrow multi-bytes + */ +int wctomb(char *s, wchar_t wc) +{ + return wcrtomb(s, wc, NULL); +} +#endif + +#if SECUREC_HAVE_MBTOWC +/* + * Converting narrow multi-byte characters to wide characters + */ +int mbtowc(wchar_t *pwc, const char *s, size_t n) +{ + return mbrtowc(pwc, s, n, NULL); +} +#endif +#endif + +/* high Num << 8 | num of SPC Ver */ +#define SECUREC_C_VERSION (0x5 << 8) +#define SECUREC_SPC_VERSION 7 +#define SECUREC_VERSION_STR "Huawei Secure C V100R001C01SPC007B002" + +/* SPC verNumber<->verStr like: + * 0X201<->C01 + * 0X202<->SPC001 Redefine numbers after this version + * 0X502<->SPC002 + * 0X503<->SPC003 + * ... + * 0X50a<->SPC010 + * 0X50b<->SPC011 + * ... + */ +/* CP verNumber<->verStr like: + * 0X601<->CP0001 + * 0X602<->CP0002 + * ... + */ +const char *GetHwSecureCVersion(unsigned short *verNumber) +{ + if (verNumber != NULL) { + *verNumber = (unsigned short)(SECUREC_C_VERSION | SECUREC_SPC_VERSION); + } + return SECUREC_VERSION_STR; +} +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(GetHwSecureCVersion); +#endif + diff --git a/third_party/securec/src/securecutil.h b/third_party/securec/src/securecutil.h new file mode 100644 index 00000000..98c9aad0 --- /dev/null +++ b/third_party/securec/src/securecutil.h @@ -0,0 +1,541 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SECURECUTIL_H_46C86578_F8FF_4E49_8E64_9B175241761F +#define SECURECUTIL_H_46C86578_F8FF_4E49_8E64_9B175241761F +#include "securec.h" + +#if (defined(_MSC_VER)) && (_MSC_VER >= 1400) +#define SECUREC_MASK_MSVC_CRT_WARNING __pragma(warning(push)) \ + __pragma(warning(disable:4996 4127)) +#define SECUREC_END_MASK_MSVC_CRT_WARNING __pragma(warning(pop)) +#else +#define SECUREC_MASK_MSVC_CRT_WARNING +#define SECUREC_END_MASK_MSVC_CRT_WARNING +#endif +#define SECUREC_WHILE_ZERO SECUREC_MASK_MSVC_CRT_WARNING while (0) SECUREC_END_MASK_MSVC_CRT_WARNING + +#ifndef SECUREC_HAVE_STRNLEN +#if (defined(_XOPEN_SOURCE) && _XOPEN_SOURCE >= 700) || (defined(_POSIX_C_SOURCE) && _POSIX_C_SOURCE >= 200809L) +#if SECUREC_IN_KERNEL +#define SECUREC_HAVE_STRNLEN 0 +#else +#if defined(__GLIBC__) && __GLIBC__ >= 2 && defined(__GLIBC_MINOR__) && __GLIBC_MINOR__ >= 10 +#define SECUREC_HAVE_STRNLEN 1 +#else +#define SECUREC_HAVE_STRNLEN 0 +#endif +#endif +#else +#define SECUREC_HAVE_STRNLEN 0 +#endif +#endif + +#if SECUREC_IN_KERNEL +/* in kernel disbale functions */ +#ifndef SECUREC_ENABLE_SCANF_FILE +#define SECUREC_ENABLE_SCANF_FILE 0 +#endif +#ifndef SECUREC_ENABLE_SCANF_FLOAT +#define SECUREC_ENABLE_SCANF_FLOAT 0 +#endif +#ifndef SECUREC_ENABLE_SPRINTF_FLOAT +#define SECUREC_ENABLE_SPRINTF_FLOAT 0 +#endif +#ifndef SECUREC_HAVE_MBTOWC +#define SECUREC_HAVE_MBTOWC 0 +#endif +#ifndef SECUREC_HAVE_WCTOMB +#define SECUREC_HAVE_WCTOMB 0 +#endif +#ifndef SECUREC_HAVE_WCHART +#define SECUREC_HAVE_WCHART 0 +#endif +#else /* no in kernel */ +/* Systems that do not support file, can define this macro to 0. */ +#ifndef SECUREC_ENABLE_SCANF_FILE +#define SECUREC_ENABLE_SCANF_FILE 1 +#endif +#ifndef SECUREC_ENABLE_SCANF_FLOAT +#define SECUREC_ENABLE_SCANF_FLOAT 1 +#endif +/* Systems that do not support float, can define this macro to 0. */ +#ifndef SECUREC_ENABLE_SPRINTF_FLOAT +#define SECUREC_ENABLE_SPRINTF_FLOAT 1 +#endif +#ifndef SECUREC_HAVE_MBTOWC +#define SECUREC_HAVE_MBTOWC 1 +#endif +#ifndef SECUREC_HAVE_WCTOMB +#define SECUREC_HAVE_WCTOMB 1 +#endif +#ifndef SECUREC_HAVE_WCHART +#define SECUREC_HAVE_WCHART 1 +#endif +#endif + + +#define SECUREC_INT_MAX 2147483647 +#define SECUREC_MUL_SIXTEEN(x) ((x) << 4) +#define SECUREC_MUL_EIGHT(x) ((x) << 3) +#define SECUREC_MUL_TEN(x) ((((x) << 2) + (x)) << 1) +/* Limited format input and output width */ +#define SECUREC_MAX_WIDTH_LEN_DIV_TEN 21474836 +#define SECUREC_MAX_WIDTH_LEN SECUREC_MUL_TEN(SECUREC_MAX_WIDTH_LEN_DIV_TEN) +/* Is the x multiplied by 10 greater than */ +#define SECUREC_MUL_TEN_ADD_BEYOND_MAX(x) (((x) > SECUREC_MAX_WIDTH_LEN_DIV_TEN)) + +#define SECUREC_FLOAT_BUFSIZE (309 + 40) /* Max length of double value */ +#define SECUREC_FLOAT_BUFSIZE_LB (4932 + 40) /* Max length of long double value */ +#define SECUREC_FLOAT_DEFAULT_PRECISION 6 + +/* This macro does not handle pointer equality or integer overflow */ +#define SECUREC_MEMORY_NO_OVERLAP(dest, src, count) \ + (((src) < (dest) && ((const char *)(src) + (count)) <= (char *)(dest)) || \ + ((dest) < (src) && ((char *)(dest) + (count)) <= (const char *)(src))) + +#define SECUREC_MEMORY_IS_OVERLAP(dest, src, count) \ + (((src) < (dest) && ((const char *)(src) + (count)) > (char *)(dest)) || \ + ((dest) < (src) && ((char *)(dest) + (count)) > (const char *)(src))) + +/* + * Check whether the strings overlap, len is the length of the string not include terminator + * Length is related to data type char or wchar , do not force conversion of types + */ +#define SECUREC_STRING_NO_OVERLAP(dest, src, len) \ + (((src) < (dest) && ((src) + (len)) < (dest)) || \ + ((dest) < (src) && ((dest) + (len)) < (src))) + +/* + * Check whether the strings overlap for strcpy wcscpy function, dest len and src Len are not include terminator + * Length is related to data type char or wchar , do not force conversion of types + */ +#define SECUREC_STRING_IS_OVERLAP(dest, src, len) \ + (((src) < (dest) && ((src) + (len)) >= (dest)) || \ + ((dest) < (src) && ((dest) + (len)) >= (src))) + +/* + * Check whether the strings overlap for strcat wcscat function, dest len and src Len are not include terminator + * Length is related to data type char or wchar , do not force conversion of types + */ +#define SECUREC_CAT_STRING_IS_OVERLAP(dest, destLen, src, srcLen) \ + (((dest) < (src) && ((dest) + (destLen) + (srcLen)) >= (src)) || \ + ((src) < (dest) && ((src) + (srcLen)) >= (dest))) + + +#if SECUREC_HAVE_STRNLEN +#define SECUREC_CALC_STR_LEN(str, maxLen, outLen) do { \ + *(outLen) = strnlen((str), (maxLen)); \ +} SECUREC_WHILE_ZERO +#define SECUREC_CALC_STR_LEN_OPT(str, maxLen, outLen) do { \ + if ((maxLen) > 8) { \ + /* Optimization or len less then 8 */ \ + if (*((str) + 0) == '\0') { \ + *(outLen) = 0; \ + } else if (*((str) + 1) == '\0') { \ + *(outLen) = 1; \ + } else if (*((str) + 2) == '\0') { \ + *(outLen) = 2; \ + } else if (*((str) + 3) == '\0') { \ + *(outLen) = 3; \ + } else if (*((str) + 4) == '\0') { \ + *(outLen) = 4; \ + } else if (*((str) + 5) == '\0') { \ + *(outLen) = 5; \ + } else if (*((str) + 6) == '\0') { \ + *(outLen) = 6; \ + } else if (*((str) + 7) == '\0') { \ + *(outLen) = 7; \ + } else if (*((str) + 8) == '\0') { \ + /* Optimization with a length of 8 */ \ + *(outLen) = 8; \ + } else { \ + /* The offset is 8 because the performance of 8 byte alignment is high */ \ + *(outLen) = 8 + strnlen((str) + 8, (maxLen) - 8); \ + } \ + } else { \ + SECUREC_CALC_STR_LEN((str), (maxLen), (outLen)); \ + } \ +} SECUREC_WHILE_ZERO +#else +#define SECUREC_CALC_STR_LEN(str, maxLen, outLen) do { \ + const char *strEnd = (const char *)(str); \ + size_t availableSize = (size_t)(maxLen); \ + while (availableSize > 0 && *strEnd != '\0') { \ + --availableSize; \ + ++strEnd; \ + } \ + *(outLen) = (size_t)(strEnd - (str)); \ +} SECUREC_WHILE_ZERO +#define SECUREC_CALC_STR_LEN_OPT SECUREC_CALC_STR_LEN +#endif + +#define SECUREC_CALC_WSTR_LEN(str, maxLen, outLen) do { \ + const wchar_t *strEnd = (const wchar_t *)(str); \ + *(outLen) = 0; \ + while (*(outLen) < (maxLen) && *strEnd != L'\0') { \ + *(outLen) = *(outLen) + 1; \ + ++strEnd; \ + } \ +} SECUREC_WHILE_ZERO + + +#ifdef SECUREC_FORMAT_OUTPUT_INPUT +#if defined(SECUREC_COMPATIBLE_WIN_FORMAT) || defined(__ARMCC_VERSION) +typedef __int64 SecInt64; +typedef unsigned __int64 SecUnsignedInt64; +#if defined(__ARMCC_VERSION) +typedef unsigned int SecUnsignedInt32; +#else +typedef unsigned __int32 SecUnsignedInt32; +#endif +#else +typedef unsigned int SecUnsignedInt32; +typedef long long SecInt64; +typedef unsigned long long SecUnsignedInt64; +#endif + +#ifdef SECUREC_FOR_WCHAR +#if defined(SECUREC_VXWORKS_PLATFORM) && !defined(__WINT_TYPE__) +typedef wchar_t wint_t; +#endif +typedef wchar_t SecChar; +typedef wchar_t SecUnsignedChar; +typedef wint_t SecInt; +typedef wint_t SecUnsignedInt; +#else /* no SECUREC_FOR_WCHAR */ +typedef char SecChar; +typedef unsigned char SecUnsignedChar; +typedef int SecInt; +typedef unsigned int SecUnsignedInt; +#endif +#endif + +/* Determine whether the address is 8-byte aligned + * Some systems do not have uintptr_t type, so use NULL to clear tool alarm 507 + */ +#define SECUREC_ADDR_ALIGNED_8(addr) (SecIsAddrAligned8((addr), NULL) == 0) + +/* If you define the memory allocation function, + * you need to define the function prototype. You can define this macro as a header file. + */ +#if defined(SECUREC_MALLOC_PROTOTYPE) +SECUREC_MALLOC_PROTOTYPE +#endif + +#ifndef SECUREC_MALLOC +#define SECUREC_MALLOC(x) malloc((size_t)(x)) +#endif + +#ifndef SECUREC_FREE +#define SECUREC_FREE(x) free((void *)(x)) +#endif + +/* struct for performance */ +typedef struct { + unsigned char buf[1]; /* Performance optimization code structure assignment length 1 bytes */ +} SecStrBuf1; +typedef struct { + unsigned char buf[2]; /* Performance optimization code structure assignment length 2 bytes */ +} SecStrBuf2; +typedef struct { + unsigned char buf[3]; /* Performance optimization code structure assignment length 3 bytes */ +} SecStrBuf3; +typedef struct { + unsigned char buf[4]; /* Performance optimization code structure assignment length 4 bytes */ +} SecStrBuf4; +typedef struct { + unsigned char buf[5]; /* Performance optimization code structure assignment length 5 bytes */ +} SecStrBuf5; +typedef struct { + unsigned char buf[6]; /* Performance optimization code structure assignment length 6 bytes */ +} SecStrBuf6; +typedef struct { + unsigned char buf[7]; /* Performance optimization code structure assignment length 7 bytes */ +} SecStrBuf7; +typedef struct { + unsigned char buf[8]; /* Performance optimization code structure assignment length 8 bytes */ +} SecStrBuf8; +typedef struct { + unsigned char buf[9]; /* Performance optimization code structure assignment length 9 bytes */ +} SecStrBuf9; +typedef struct { + unsigned char buf[10]; /* Performance optimization code structure assignment length 10 bytes */ +} SecStrBuf10; +typedef struct { + unsigned char buf[11]; /* Performance optimization code structure assignment length 11 bytes */ +} SecStrBuf11; +typedef struct { + unsigned char buf[12]; /* Performance optimization code structure assignment length 12 bytes */ +} SecStrBuf12; +typedef struct { + unsigned char buf[13]; /* Performance optimization code structure assignment length 13 bytes */ +} SecStrBuf13; +typedef struct { + unsigned char buf[14]; /* Performance optimization code structure assignment length 14 bytes */ +} SecStrBuf14; +typedef struct { + unsigned char buf[15]; /* Performance optimization code structure assignment length 15 bytes */ +} SecStrBuf15; +typedef struct { + unsigned char buf[16]; /* Performance optimization code structure assignment length 16 bytes */ +} SecStrBuf16; +typedef struct { + unsigned char buf[17]; /* Performance optimization code structure assignment length 17 bytes */ +} SecStrBuf17; +typedef struct { + unsigned char buf[18]; /* Performance optimization code structure assignment length 18 bytes */ +} SecStrBuf18; +typedef struct { + unsigned char buf[19]; /* Performance optimization code structure assignment length 19 bytes */ +} SecStrBuf19; +typedef struct { + unsigned char buf[20]; /* Performance optimization code structure assignment length 20 bytes */ +} SecStrBuf20; +typedef struct { + unsigned char buf[21]; /* Performance optimization code structure assignment length 21 bytes */ +} SecStrBuf21; +typedef struct { + unsigned char buf[22]; /* Performance optimization code structure assignment length 22 bytes */ +} SecStrBuf22; +typedef struct { + unsigned char buf[23]; /* Performance optimization code structure assignment length 23 bytes */ +} SecStrBuf23; +typedef struct { + unsigned char buf[24]; /* Performance optimization code structure assignment length 24 bytes */ +} SecStrBuf24; +typedef struct { + unsigned char buf[25]; /* Performance optimization code structure assignment length 25 bytes */ +} SecStrBuf25; +typedef struct { + unsigned char buf[26]; /* Performance optimization code structure assignment length 26 bytes */ +} SecStrBuf26; +typedef struct { + unsigned char buf[27]; /* Performance optimization code structure assignment length 27 bytes */ +} SecStrBuf27; +typedef struct { + unsigned char buf[28]; /* Performance optimization code structure assignment length 28 bytes */ +} SecStrBuf28; +typedef struct { + unsigned char buf[29]; /* Performance optimization code structure assignment length 29 bytes */ +} SecStrBuf29; +typedef struct { + unsigned char buf[30]; /* Performance optimization code structure assignment length 30 bytes */ +} SecStrBuf30; +typedef struct { + unsigned char buf[31]; /* Performance optimization code structure assignment length 31 bytes */ +} SecStrBuf31; +typedef struct { + unsigned char buf[32]; /* Performance optimization code structure assignment length 32 bytes */ +} SecStrBuf32; +typedef struct { + unsigned char buf[33]; /* Performance optimization code structure assignment length 33 bytes */ +} SecStrBuf33; +typedef struct { + unsigned char buf[34]; /* Performance optimization code structure assignment length 34 bytes */ +} SecStrBuf34; +typedef struct { + unsigned char buf[35]; /* Performance optimization code structure assignment length 35 bytes */ +} SecStrBuf35; +typedef struct { + unsigned char buf[36]; /* Performance optimization code structure assignment length 36 bytes */ +} SecStrBuf36; +typedef struct { + unsigned char buf[37]; /* Performance optimization code structure assignment length 37 bytes */ +} SecStrBuf37; +typedef struct { + unsigned char buf[38]; /* Performance optimization code structure assignment length 38 bytes */ +} SecStrBuf38; +typedef struct { + unsigned char buf[39]; /* Performance optimization code structure assignment length 39 bytes */ +} SecStrBuf39; +typedef struct { + unsigned char buf[40]; /* Performance optimization code structure assignment length 40 bytes */ +} SecStrBuf40; +typedef struct { + unsigned char buf[41]; /* Performance optimization code structure assignment length 41 bytes */ +} SecStrBuf41; +typedef struct { + unsigned char buf[42]; /* Performance optimization code structure assignment length 42 bytes */ +} SecStrBuf42; +typedef struct { + unsigned char buf[43]; /* Performance optimization code structure assignment length 43 bytes */ +} SecStrBuf43; +typedef struct { + unsigned char buf[44]; /* Performance optimization code structure assignment length 44 bytes */ +} SecStrBuf44; +typedef struct { + unsigned char buf[45]; /* Performance optimization code structure assignment length 45 bytes */ +} SecStrBuf45; +typedef struct { + unsigned char buf[46]; /* Performance optimization code structure assignment length 46 bytes */ +} SecStrBuf46; +typedef struct { + unsigned char buf[47]; /* Performance optimization code structure assignment length 47 bytes */ +} SecStrBuf47; +typedef struct { + unsigned char buf[48]; /* Performance optimization code structure assignment length 48 bytes */ +} SecStrBuf48; +typedef struct { + unsigned char buf[49]; /* Performance optimization code structure assignment length 49 bytes */ +} SecStrBuf49; +typedef struct { + unsigned char buf[50]; /* Performance optimization code structure assignment length 50 bytes */ +} SecStrBuf50; +typedef struct { + unsigned char buf[51]; /* Performance optimization code structure assignment length 51 bytes */ +} SecStrBuf51; +typedef struct { + unsigned char buf[52]; /* Performance optimization code structure assignment length 52 bytes */ +} SecStrBuf52; +typedef struct { + unsigned char buf[53]; /* Performance optimization code structure assignment length 53 bytes */ +} SecStrBuf53; +typedef struct { + unsigned char buf[54]; /* Performance optimization code structure assignment length 54 bytes */ +} SecStrBuf54; +typedef struct { + unsigned char buf[55]; /* Performance optimization code structure assignment length 55 bytes */ +} SecStrBuf55; +typedef struct { + unsigned char buf[56]; /* Performance optimization code structure assignment length 56 bytes */ +} SecStrBuf56; +typedef struct { + unsigned char buf[57]; /* Performance optimization code structure assignment length 57 bytes */ +} SecStrBuf57; +typedef struct { + unsigned char buf[58]; /* Performance optimization code structure assignment length 58 bytes */ +} SecStrBuf58; +typedef struct { + unsigned char buf[59]; /* Performance optimization code structure assignment length 59 bytes */ +} SecStrBuf59; +typedef struct { + unsigned char buf[60]; /* Performance optimization code structure assignment length 60 bytes */ +} SecStrBuf60; +typedef struct { + unsigned char buf[61]; /* Performance optimization code structure assignment length 61 bytes */ +} SecStrBuf61; +typedef struct { + unsigned char buf[62]; /* Performance optimization code structure assignment length 62 bytes */ +} SecStrBuf62; +typedef struct { + unsigned char buf[63]; /* Performance optimization code structure assignment length 63 bytes */ +} SecStrBuf63; +typedef struct { + unsigned char buf[64]; /* Performance optimization code structure assignment length 64 bytes */ +} SecStrBuf64; + + + + +/* User can change the error handler by modify the following definition, + * such as logging the detail error in file. + */ +#if defined(_DEBUG) || defined(DEBUG) +#if defined(SECUREC_ERROR_HANDLER_BY_ASSERT) +#define SECUREC_ERROR_INVALID_PARAMTER(msg) assert(msg "invalid argument" == NULL) +#define SECUREC_ERROR_INVALID_RANGE(msg) assert(msg "invalid dest buffer size" == NULL) +#define SECUREC_ERROR_BUFFER_OVERLAP(msg) assert(msg "buffer overlap" == NULL) +#elif defined(SECUREC_ERROR_HANDLER_BY_PRINTF) +#if SECUREC_IN_KERNEL +#define SECUREC_ERROR_INVALID_PARAMTER(msg) printk("%s invalid argument\n", msg) +#define SECUREC_ERROR_INVALID_RANGE(msg) printk("%s invalid dest buffer size\n", msg) +#define SECUREC_ERROR_BUFFER_OVERLAP(msg) printk("%s buffer overlap\n", msg) +#else +#define SECUREC_ERROR_INVALID_PARAMTER(msg) printf("%s invalid argument\n", msg) +#define SECUREC_ERROR_INVALID_RANGE(msg) printf("%s invalid dest buffer size\n", msg) +#define SECUREC_ERROR_BUFFER_OVERLAP(msg) printf("%s buffer overlap\n", msg) +#endif +#elif defined(SECUREC_ERROR_HANDLER_BY_FILE_LOG) +#define SECUREC_ERROR_INVALID_PARAMTER(msg) LogSecureCRuntimeError(msg " EINVAL\n") +#define SECUREC_ERROR_INVALID_RANGE(msg) LogSecureCRuntimeError(msg " ERANGE\n") +#define SECUREC_ERROR_BUFFER_OVERLAP(msg) LogSecureCRuntimeError(msg " EOVERLAP\n") +#else /* no HANDLER is defined */ +#define SECUREC_ERROR_INVALID_PARAMTER(msg) ((void)0) +#define SECUREC_ERROR_INVALID_RANGE(msg) ((void)0) +#define SECUREC_ERROR_BUFFER_OVERLAP(msg) ((void)0) +#endif +#else /* no DEBUG */ +#define SECUREC_ERROR_INVALID_PARAMTER(msg) ((void)0) +#define SECUREC_ERROR_INVALID_RANGE(msg) ((void)0) +#define SECUREC_ERROR_BUFFER_OVERLAP(msg) ((void)0) +#endif + +#ifdef __cplusplus +extern "C" { +#endif + +/* assembly language memory copy and memory set for X86 or MIPS ... */ +#ifdef SECUREC_USE_ASM + extern void *memcpy_opt(void *, const void *, size_t); + extern void *memset_opt(void *, int, size_t); +#endif + +#if defined(SECUREC_ERROR_HANDLER_BY_FILE_LOG) + extern void LogSecureCRuntimeError(const char *errDetail); +#endif + +#ifdef SECUREC_INLINE_DO_MEMCPY +static void SecDoMemcpy(void *dest, const void *src, size_t count) +{ + /* + * if SECUREC_USE_ASM macro is enabled, it will call assembly language function to improve performance. + */ +#ifdef SECUREC_USE_ASM + (void)memcpy_opt(dest, src, count); +#else + /* large enough, let system API do it */ + (void)memcpy(dest, src, count); +#endif +} +#endif + +#ifdef SECUREC_INLINE_DO_MEMSET +static void SecDoMemset(void *dest, int c, size_t count) +{ +#ifdef SECUREC_USE_ASM + (void)memset_opt(dest, c, count); +#else + (void)memset(dest, c, count); +#endif +} +#endif + +#ifdef SECUREC_INLINE_STR_LEN +/* The function compiler will be inlined and not placed in other files */ +static size_t SecStrMinLen(const char *str, size_t maxLen) +{ + size_t len; + SECUREC_CALC_STR_LEN(str, maxLen, &len); + return len; +} +#endif + +#ifdef SECUREC_INLINE_STR_LEN_OPT +/* The function compiler will be inlined and not placed in other files */ +static size_t SecStrMinLenOpt(const char *str, size_t maxLen) +{ + size_t len; + SECUREC_CALC_STR_LEN_OPT(str, maxLen, &len); + return len; +} +#endif + +#ifdef __cplusplus +} +#endif /* __cplusplus */ +#endif + diff --git a/third_party/securec/src/secureinput_a.c b/third_party/securec/src/secureinput_a.c new file mode 100644 index 00000000..4f9bae83 --- /dev/null +++ b/third_party/securec/src/secureinput_a.c @@ -0,0 +1,25 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_FORMAT_OUTPUT_INPUT 1 +#ifdef SECUREC_FOR_WCHAR +#undef SECUREC_FOR_WCHAR +#endif + +#include "secinput.h" + +#include "input.inl" + diff --git a/third_party/securec/src/secureinput_w.c b/third_party/securec/src/secureinput_w.c new file mode 100644 index 00000000..7a4bef42 --- /dev/null +++ b/third_party/securec/src/secureinput_w.c @@ -0,0 +1,46 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* if some platforms don't have wchar.h, dont't include it */ +#if !(defined(SECUREC_VXWORKS_PLATFORM)) +/* This header file is placed below secinput.h, which will cause tool alarm, + * but If there is no macro above, it will cause vs2010 compiling alarm + */ +#if defined(_MSC_VER) && (_MSC_VER >= 1400) +#ifndef __STDC_WANT_SECURE_LIB__ +/* The order of adjustment is to eliminate alarm of Duplicate Block */ +#define __STDC_WANT_SECURE_LIB__ 0 +#endif +#ifndef _CRTIMP_ALTERNATIVE +#define _CRTIMP_ALTERNATIVE /* comment microsoft *_s function */ +#endif +#endif +#include +#endif +#define SECUREC_ENABLE_WCHAR_FUNC 0 +#define SECUREC_FORMAT_OUTPUT_INPUT 1 +#ifndef SECUREC_FOR_WCHAR +#define SECUREC_FOR_WCHAR +#endif + +#include "secinput.h" + +#ifndef WEOF +#define WEOF ((wchar_t)(-1)) +#endif + +#include "input.inl" + diff --git a/third_party/securec/src/secureprintoutput.h b/third_party/securec/src/secureprintoutput.h new file mode 100644 index 00000000..b690ec92 --- /dev/null +++ b/third_party/securec/src/secureprintoutput.h @@ -0,0 +1,98 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#ifndef SECUREPRINTOUTPUT_H_E950DA2C_902F_4B15_BECD_948E99090D9C +#define SECUREPRINTOUTPUT_H_E950DA2C_902F_4B15_BECD_948E99090D9C +#include "securecutil.h" + +/* flag definitions */ +/* Using macros instead of enumerations is because some of the enumerated types under the compiler are 16bit. */ +#define SECUREC_FLAG_SIGN 0x00001U +#define SECUREC_FLAG_SIGN_SPACE 0x00002U +#define SECUREC_FLAG_LEFT 0x00004U +#define SECUREC_FLAG_LEADZERO 0x00008U +#define SECUREC_FLAG_LONG 0x00010U +#define SECUREC_FLAG_SHORT 0x00020U +#define SECUREC_FLAG_SIGNED 0x00040U +#define SECUREC_FLAG_ALTERNATE 0x00080U +#define SECUREC_FLAG_NEGATIVE 0x00100U +#define SECUREC_FLAG_FORCE_OCTAL 0x00200U +#define SECUREC_FLAG_LONG_DOUBLE 0x00400U +#define SECUREC_FLAG_WIDECHAR 0x00800U +#define SECUREC_FLAG_LONGLONG 0x01000U +#define SECUREC_FLAG_CHAR 0x02000U +#define SECUREC_FLAG_POINTER 0x04000U +#define SECUREC_FLAG_I64 0x08000U +#define SECUREC_FLAG_PTRDIFF 0x10000U +#define SECUREC_FLAG_SIZE 0x20000U +#ifdef SECUREC_COMPATIBLE_LINUX_FORMAT +#define SECUREC_FLAG_INTMAX 0x40000U +#endif + +/* state definitions. Identify the status of the current format */ +typedef enum { + STAT_NORMAL, + STAT_PERCENT, + STAT_FLAG, + STAT_WIDTH, + STAT_DOT, + STAT_PRECIS, + STAT_SIZE, + STAT_TYPE, + STAT_INVALID +} SecFmtState; + +/* Format output buffer pointer and available size */ +typedef struct { + int count; + char *cur; +} SecPrintfStream; + + +#ifndef SECUREC_BUFFER_SIZE +#ifdef SECUREC_STACK_SIZE_LESS_THAN_1K +/* SECUREC_BUFFER_SIZE Can not be less than 23 , + * the length of the octal representation of 64-bit integers with zero lead + */ +#define SECUREC_BUFFER_SIZE 256 +#else +#define SECUREC_BUFFER_SIZE 512 +#endif +#endif +#if SECUREC_BUFFER_SIZE < 23 +#error SECUREC_BUFFER_SIZE Can not be less than 23 +#endif + +#define SECUREC_MAX_PRECISION SECUREC_BUFFER_SIZE +/* max. # bytes in multibyte char ,see MB_LEN_MAX */ +#define SECUREC_MB_LEN 16 +/* The return value of the internal function, which is returned when truncated */ +#define SECUREC_PRINTF_TRUNCATE (-2) + +#ifdef __cplusplus +extern "C" { +#endif + extern int SecVsnprintfImpl(char *string, size_t count, const char *format, va_list argList); +#if SECUREC_IN_KERNEL == 0 + extern int SecVswprintfImpl(wchar_t *string, size_t sizeInWchar, const wchar_t *format, va_list argList); +#endif +#ifdef __cplusplus +} +#endif + +#endif + + diff --git a/third_party/securec/src/secureprintoutput_a.c b/third_party/securec/src/secureprintoutput_a.c new file mode 100644 index 00000000..746878a1 --- /dev/null +++ b/third_party/securec/src/secureprintoutput_a.c @@ -0,0 +1,101 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_DO_MEMCPY 1 +#define SECUREC_FORMAT_OUTPUT_INPUT 1 +#ifdef SECUREC_FOR_WCHAR +#undef SECUREC_FOR_WCHAR +#endif + +#include "secureprintoutput.h" + +#define SECUREC_CHAR(x) x +#define SECUREC_WRITE_MULTI_CHAR SecWriteMultiChar +#define SECUREC_WRITE_STRING SecWriteString + +#ifndef EOF +#define EOF (-1) +#endif + +/* put a char to output */ +#define SECUREC_PUTC(c, outStream) ((--(outStream)->count >= 0) ? \ + (int)((unsigned int)(unsigned char)(*((outStream)->cur++) = (char)(c)) & 0xff) : EOF) +/* to clear e835 */ +#define SECUREC_PUTC_ZERO(outStream) ((--(outStream)->count >= 0) ? \ + ((*((outStream)->cur++) = (char)('\0'))) : EOF) + +static void SecWriteMultiChar(char ch, int num, SecPrintfStream *f, int *pnumwritten); +static void SecWriteString(const char *string, int len, SecPrintfStream *f, int *pnumwritten); + +#include "output.inl" + +/* + * Wide character formatted output implementation + */ +int SecVsnprintfImpl(char *string, size_t count, const char *format, va_list argList) +{ + SecPrintfStream str; + int retVal; + + str.count = (int)count; /* this count include \0 character, Must be greater than zero */ + str.cur = string; + + retVal = SecOutputS(&str, format, argList); + if ((retVal >= 0) && (SECUREC_PUTC_ZERO(&str) != EOF)) { + return retVal; + } else if (str.count < 0) { + /* the buffer was too small; we return truncation */ + string[count - 1] = '\0'; + return SECUREC_PRINTF_TRUNCATE; + } + string[0] = '\0'; /* empty the dest strDest */ + return -1; +} + +/* + * Sec write Wide character + */ +static void SecWriteMultiChar(char ch, int num, SecPrintfStream *f, int *pnumwritten) +{ + int count = num; + while (count-- > 0) { + if (SECUREC_PUTC(ch, f) == EOF) { + *pnumwritten = -1; + break; + } else { + *pnumwritten = *pnumwritten + 1; + } + } +} + +/* + * Sec write string function + */ +static void SecWriteString(const char *string, int len, SecPrintfStream *f, int *pnumwritten) +{ + const char *str = string; + int count = len; + while (count-- > 0) { + if (SECUREC_PUTC(*str, f) == EOF) { + *pnumwritten = -1; + break; + } else { + *pnumwritten = *pnumwritten + 1; + ++str; + } + } +} + diff --git a/third_party/securec/src/secureprintoutput_w.c b/third_party/securec/src/secureprintoutput_w.c new file mode 100644 index 00000000..9063ab4d --- /dev/null +++ b/third_party/securec/src/secureprintoutput_w.c @@ -0,0 +1,170 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +/* if some platforms don't have wchar.h, dont't include it */ +#if !(defined(SECUREC_VXWORKS_PLATFORM)) +/* This header file is placed below secinput.h, which will cause tool alarm, + * but if there is no macro above, it will cause compiling alarm + */ +#if defined(_MSC_VER) && (_MSC_VER >= 1400) +#ifndef _CRTIMP_ALTERNATIVE +#define _CRTIMP_ALTERNATIVE /* comment microsoft *_s function */ +#endif +#ifndef __STDC_WANT_SECURE_LIB__ +#define __STDC_WANT_SECURE_LIB__ 0 +#endif +#endif +#include +#endif + +#define SECUREC_ENABLE_WCHAR_FUNC 0 +#define SECUREC_INLINE_DO_MEMCPY 1 +#define SECUREC_FORMAT_OUTPUT_INPUT 1 +#ifndef SECUREC_FOR_WCHAR +#define SECUREC_FOR_WCHAR +#endif + +#include "secureprintoutput.h" + +#ifndef WEOF +#define WEOF ((wchar_t)(-1)) +#endif + +#define SECUREC_CHAR(x) L ## x +#define SECUREC_WRITE_MULTI_CHAR SecWriteMultiCharW +#define SECUREC_WRITE_STRING SecWriteStringW + +static void SecWriteCharW(wchar_t ch, SecPrintfStream *f, int *pnumwritten); +static void SecWriteMultiCharW(wchar_t ch, int num, SecPrintfStream *f, int *pnumwritten); +static void SecWriteStringW(const wchar_t *string, int len, SecPrintfStream *f, int *pnumwritten); +static int SecPutWcharStrEndingZero(SecPrintfStream *str, int zeroCount); + + +#include "output.inl" + +/* + * Wide character formatted output implementation + */ +int SecVswprintfImpl(wchar_t *string, size_t sizeInWchar, const wchar_t *format, va_list argList) +{ + SecPrintfStream str; + int retVal; /* If initialization causes e838 */ + + str.cur = (char *)string; + /* this count include \0 character, Must be greater than zero */ + str.count = (int)(sizeInWchar * sizeof(wchar_t)); + + retVal = SecOutputSW(&str, format, argList); + if ((retVal >= 0) && SecPutWcharStrEndingZero(&str, (int)sizeof(wchar_t))) { + return (retVal); + } else if (str.count < 0) { + /* the buffer was too small; we return truncation */ + string[sizeInWchar - 1] = L'\0'; + return SECUREC_PRINTF_TRUNCATE; + } + string[0] = L'\0'; + return -1; +} + +/* + * Output one zero character zero into the SecPrintfStream structure + */ +static int SecPutZeroChar(SecPrintfStream *str) +{ + if (str->count > 0) { + *(str->cur) = (char)('\0'); + str->count = str->count - 1; + str->cur = str->cur + 1; + return 0; + } + return -1; +} + +/* + * Output a wide character zero end into the SecPrintfStream structure + */ +static int SecPutWcharStrEndingZero(SecPrintfStream *str, int zeroCount) +{ + int succeed = 0; + int i = 0; + + while (i < zeroCount && (SecPutZeroChar(str) == 0)) { + ++i; + } + if (i == zeroCount) { + succeed = 1; + } + return succeed; +} + + +/* + * Output a wide character into the SecPrintfStream structure + */ +static wchar_t SecPutCharW(wchar_t ch, SecPrintfStream *f) +{ + wchar_t wcRet = 0; + if (((f)->count -= (int)sizeof(wchar_t)) >= 0) { + *(wchar_t *)(void *)(f->cur) = ch; + f->cur += sizeof(wchar_t); + wcRet = ch; + } else { + wcRet = (wchar_t)WEOF; + } + return wcRet; +} + +/* + * Output a wide character into the SecPrintfStream structure, returns the number of characters written + */ +static void SecWriteCharW(wchar_t ch, SecPrintfStream *f, int *pnumwritten) +{ + if (SecPutCharW(ch, f) == (wchar_t)WEOF) { + *pnumwritten = -1; + } else { + *pnumwritten = *pnumwritten + 1; + } +} + +/* + * Output multiple wide character into the SecPrintfStream structure, returns the number of characters written + */ +static void SecWriteMultiCharW(wchar_t ch, int num, SecPrintfStream *f, int *pnumwritten) +{ + int count = num; + while (count-- > 0) { + SecWriteCharW(ch, f, pnumwritten); + if (*pnumwritten == -1) { + break; + } + } +} + +/* + * Output a wide string into the SecPrintfStream structure, returns the number of characters written + */ +static void SecWriteStringW(const wchar_t *string, int len, SecPrintfStream *f, int *pnumwritten) +{ + const wchar_t *str = string; + int count = len; + while (count-- > 0) { + SecWriteCharW(*str++, f, pnumwritten); + if (*pnumwritten == -1) { + break; + } + } +} + diff --git a/third_party/securec/src/snprintf_s.c b/third_party/securec/src/snprintf_s.c new file mode 100644 index 00000000..0bd7ed1b --- /dev/null +++ b/third_party/securec/src/snprintf_s.c @@ -0,0 +1,113 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +#if SECUREC_ENABLE_SNPRINTF +/* + * + * The snprintf_s function is equivalent to the snprintf function + * except for the parameter destMax/count and the explicit runtime-constraints violation + * The snprintf_s function formats and stores count or fewer characters in + * strDest and appends a terminating null. Each argument (if any) is converted + * and output according to the corresponding format specification in format. + * The formatting is consistent with the printf family of functions; If copying + * occurs between strings that overlap, the behavior is undefined. + * + * + * strDest Storage location for the output. + * destMax The size of the storage location for output. Size + * in bytes for snprintf_s or size in words for snwprintf_s. + * count Maximum number of character to store. + * format Format-control string. + * ... Optional arguments. + * + * + * strDest is updated + * + * + * return the number of characters written, not including the terminating null + * return -1 if an error occurs. + * return -1 if count < destMax and the output string has been truncated + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + * + */ +int snprintf_s(char *strDest, size_t destMax, size_t count, const char *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + ret = vsnprintf_s(strDest, destMax, count, format, argList); + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(snprintf_s); +#endif +#endif + +#if SECUREC_SNPRINTF_TRUNCATED +/* + * + * The snprintf_truncated_s function is equivalent to the snprintf function + * except for the parameter destMax/count and the explicit runtime-constraints violation + * The snprintf_truncated_s function formats and stores count or fewer characters in + * strDest and appends a terminating null. Each argument (if any) is converted + * and output according to the corresponding format specification in format. + * The formatting is consistent with the printf family of functions; If copying + * occurs between strings that overlap, the behavior is undefined. + * + * + * strDest Storage location for the output. + * destMax The size of the storage location for output. Size + * in bytes for snprintf_truncated_s or size in words for snwprintf_s. + * format Format-control string. + * ... Optional arguments. + * + * + * strDest is updated + * + * + * return the number of characters written, not including the terminating null + * return -1 if an error occurs. + * return destMax-1 if output string has been truncated + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + * + */ +int snprintf_truncated_s(char *strDest, size_t destMax, const char *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + ret = vsnprintf_truncated_s(strDest, destMax, format, argList); + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(snprintf_truncated_s); +#endif + +#endif + + diff --git a/third_party/securec/src/sprintf_s.c b/third_party/securec/src/sprintf_s.c new file mode 100644 index 00000000..54a79604 --- /dev/null +++ b/third_party/securec/src/sprintf_s.c @@ -0,0 +1,61 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +/* + * + * The sprintf_s function is equivalent to the sprintf function + * except for the parameter destMax and the explicit runtime-constraints violation + * The sprintf_s function formats and stores a series of characters and values + * in strDest. Each argument (if any) is converted and output according to + * the corresponding format specification in format. The format consists of + * ordinary characters and has the same form and function as the format argument + * for printf. A null character is appended after the last character written. + * If copying occurs between strings that overlap, the behavior is undefined. + * + * + * strDest Storage location for output. + * destMax Maximum number of characters to store. + * format Format-control string. + * ... Optional arguments + * + * + * strDest is updated + * + * + * return the number of bytes stored in strDest, not counting the terminating null character. + * return -1 if an error occurred. + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +int sprintf_s(char *strDest, size_t destMax, const char *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + ret = vsprintf_s(strDest, destMax, format, argList); + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(sprintf_s); +#endif + + diff --git a/third_party/securec/src/sscanf_s.c b/third_party/securec/src/sscanf_s.c new file mode 100644 index 00000000..c8f097ef --- /dev/null +++ b/third_party/securec/src/sscanf_s.c @@ -0,0 +1,61 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +/* + * + * The sscanf_s function is equivalent to fscanf_s, + * except that input is obtained from a string (specified by the argument buffer) rather than from a stream + * The sscanf function reads data from buffer into the location given by each + * argument. Every argument must be a pointer to a variable with a type that + * corresponds to a type specifier in format. The format argument controls the + * interpretation of the input fields and has the same form and function as + * the format argument for the scanf function. + * If copying takes place between strings that overlap, the behavior is undefined. + * + * + * buffer Stored data. + * format Format control string, see Format Specifications. + * ... Optional arguments. + * + * + * ... The converted value stored in user assigned address + * + * + * Each of these functions returns the number of fields successfully converted + * and assigned; the return value does not include fields that were read but + * not assigned. + * A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ +int sscanf_s(const char *buffer, const char *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + ret = vsscanf_s(buffer, format, argList); + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(sscanf_s); +#endif + + diff --git a/third_party/securec/src/strcat_s.c b/third_party/securec/src/strcat_s.c new file mode 100644 index 00000000..6bf1379b --- /dev/null +++ b/third_party/securec/src/strcat_s.c @@ -0,0 +1,102 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_STR_LEN 1 +#define SECUREC_INLINE_STR_LEN_OPT 1 +#define SECUREC_INLINE_DO_MEMCPY 1 +#include "securecutil.h" + +/* + * Befor this function, the basic parameter checking has been done + */ +static errno_t SecDoStrcat(char *strDest, size_t destMax, const char *strSrc) +{ + size_t destLen = SecStrMinLen(strDest, destMax); + /* Only optimize strSrc, do not apply this function to strDest */ + size_t srcLen = SecStrMinLenOpt(strSrc, destMax - destLen); + + if (SECUREC_CAT_STRING_IS_OVERLAP(strDest, destLen, strSrc, srcLen)) { + strDest[0] = '\0'; + if (strDest + destLen <= strSrc && destLen == destMax) { + SECUREC_ERROR_INVALID_PARAMTER("strcat_s"); + return EINVAL_AND_RESET; + } + SECUREC_ERROR_BUFFER_OVERLAP("strcat_s"); + return EOVERLAP_AND_RESET; + } + if (srcLen + destLen >= destMax || strDest == strSrc) { + strDest[0] = '\0'; + if (destLen == destMax) { + SECUREC_ERROR_INVALID_PARAMTER("strcat_s"); + return EINVAL_AND_RESET; + } + SECUREC_ERROR_INVALID_RANGE("strcat_s"); + return ERANGE_AND_RESET; + } + SecDoMemcpy(strDest + destLen, strSrc, srcLen + 1); /* single character length include \0 */ + return EOK; +} + +/* + * + * The strcat_s function appends a copy of the string pointed to by strSrc (including the terminating null character) + * to the end of the string pointed to by strDest. + * The initial character of strSrc overwrites the terminating null character of strDest. + * strcat_s will return EOVERLAP_AND_RESET if the source and destination strings overlap. + * + * Note that the second parameter is the total size of the buffer, not the + * remaining size. + * + * + * strDest Null-terminated destination string buffer. + * destMax Size of the destination string buffer. + * strSrc Null-terminated source string buffer. + * + * + * strDest is updated + * + * + * EOK Success + * EINVAL strDest is NULL and destMax != 0 and destMax <= SECUREC_STRING_MAX_LEN + * EINVAL_AND_RESET (strDest unterminated and all other parameters are valid)or + * (strDest != NULL and strSrc is NULL and destMax != 0 and destMax <= SECUREC_STRING_MAX_LEN) + * ERANGE destMax is 0 and destMax > SECUREC_STRING_MAX_LEN + * ERANGE_AND_RESET strDest have not enough space and all other parameters are valid and not overlap + * EOVERLAP_AND_RESET dest buffer and source buffer are overlapped and all parameters are valid + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +errno_t strcat_s(char *strDest, size_t destMax, const char *strSrc) +{ + if (destMax == 0 || destMax > SECUREC_STRING_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("strcat_s"); + return ERANGE; + } + if (strDest == NULL || strSrc == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("strcat_s"); + if (strDest != NULL) { + strDest[0] = '\0'; + return EINVAL_AND_RESET; + } + return EINVAL; + } + return SecDoStrcat(strDest, destMax, strSrc); +} + +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(strcat_s); +#endif + diff --git a/third_party/securec/src/strcpy_s.c b/third_party/securec/src/strcpy_s.c new file mode 100644 index 00000000..e248da7c --- /dev/null +++ b/third_party/securec/src/strcpy_s.c @@ -0,0 +1,351 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_STR_LEN 1 +#define SECUREC_INLINE_DO_MEMCPY 1 + +#include "securecutil.h" + +#if SECUREC_IN_KERNEL== 0 +#ifndef SECUREC_STRCOPY_THRESHOLD_SIZE +#define SECUREC_STRCOPY_THRESHOLD_SIZE 32UL +#endif + +/* + * Determine whether the address is 8-byte aligned, use static to increase performance + * return 0 is aligned + */ +static int SecIsAddrAligned8(const void *addr, const void *zeroAddr) +{ + return (int)(((size_t)((const char*)addr - (const char*)zeroAddr)) & 7); /* use 7 to check aligned 8 */ +} + +/* The purpose of converting to void is to clean up the alarm */ +#define SECUREC_SMALL_STR_COPY do { \ + if (SECUREC_ADDR_ALIGNED_8(strDest) && SECUREC_ADDR_ALIGNED_8(strSrc)) { \ + /* use struct assignment */ \ + switch (srcStrLen) { \ + case 1: \ + *(SecStrBuf1 *)(void *)strDest = *(const SecStrBuf1 *)(const void *)strSrc; \ + break; \ + case 2: \ + *(SecStrBuf2 *)(void *)strDest = *(const SecStrBuf2 *)(const void *)strSrc; \ + break; \ + case 3: \ + *(SecStrBuf3 *)(void *)strDest = *(const SecStrBuf3 *)(const void *)strSrc; \ + break; \ + case 4: \ + *(SecStrBuf4 *)(void *)strDest = *(const SecStrBuf4 *)(const void *)strSrc; \ + break; \ + case 5: \ + *(SecStrBuf5 *)(void *)strDest = *(const SecStrBuf5 *)(const void *)strSrc; \ + break; \ + case 6: \ + *(SecStrBuf6 *)(void *)strDest = *(const SecStrBuf6 *)(const void *)strSrc; \ + break; \ + case 7: \ + *(SecStrBuf7 *)(void *)strDest = *(const SecStrBuf7 *)(const void *)strSrc; \ + break; \ + case 8: \ + *(SecStrBuf8 *)(void *)strDest = *(const SecStrBuf8 *)(const void *)strSrc; \ + break; \ + case 9: \ + *(SecStrBuf9 *)(void *)strDest = *(const SecStrBuf9 *)(const void *)strSrc; \ + break; \ + case 10: \ + *(SecStrBuf10 *)(void *)strDest = *(const SecStrBuf10 *)(const void *)strSrc; \ + break; \ + case 11: \ + *(SecStrBuf11 *)(void *)strDest = *(const SecStrBuf11 *)(const void *)strSrc; \ + break; \ + case 12: \ + *(SecStrBuf12 *)(void *)strDest = *(const SecStrBuf12 *)(const void *)strSrc; \ + break; \ + case 13: \ + *(SecStrBuf13 *)(void *)strDest = *(const SecStrBuf13 *)(const void *)strSrc; \ + break; \ + case 14: \ + *(SecStrBuf14 *)(void *)strDest = *(const SecStrBuf14 *)(const void *)strSrc; \ + break; \ + case 15: \ + *(SecStrBuf15 *)(void *)strDest = *(const SecStrBuf15 *)(const void *)strSrc; \ + break; \ + case 16: \ + *(SecStrBuf16 *)(void *)strDest = *(const SecStrBuf16 *)(const void *)strSrc; \ + break; \ + case 17: \ + *(SecStrBuf17 *)(void *)strDest = *(const SecStrBuf17 *)(const void *)strSrc; \ + break; \ + case 18: \ + *(SecStrBuf18 *)(void *)strDest = *(const SecStrBuf18 *)(const void *)strSrc; \ + break; \ + case 19: \ + *(SecStrBuf19 *)(void *)strDest = *(const SecStrBuf19 *)(const void *)strSrc; \ + break; \ + case 20: \ + *(SecStrBuf20 *)(void *)strDest = *(const SecStrBuf20 *)(const void *)strSrc; \ + break; \ + case 21: \ + *(SecStrBuf21 *)(void *)strDest = *(const SecStrBuf21 *)(const void *)strSrc; \ + break; \ + case 22: \ + *(SecStrBuf22 *)(void *)strDest = *(const SecStrBuf22 *)(const void *)strSrc; \ + break; \ + case 23: \ + *(SecStrBuf23 *)(void *)strDest = *(const SecStrBuf23 *)(const void *)strSrc; \ + break; \ + case 24: \ + *(SecStrBuf24 *)(void *)strDest = *(const SecStrBuf24 *)(const void *)strSrc; \ + break; \ + case 25: \ + *(SecStrBuf25 *)(void *)strDest = *(const SecStrBuf25 *)(const void *)strSrc; \ + break; \ + case 26: \ + *(SecStrBuf26 *)(void *)strDest = *(const SecStrBuf26 *)(const void *)strSrc; \ + break; \ + case 27: \ + *(SecStrBuf27 *)(void *)strDest = *(const SecStrBuf27 *)(const void *)strSrc; \ + break; \ + case 28: \ + *(SecStrBuf28 *)(void *)strDest = *(const SecStrBuf28 *)(const void *)strSrc; \ + break; \ + case 29: \ + *(SecStrBuf29 *)(void *)strDest = *(const SecStrBuf29 *)(const void *)strSrc; \ + break; \ + case 30: \ + *(SecStrBuf30 *)(void *)strDest = *(const SecStrBuf30 *)(const void *)strSrc; \ + break; \ + case 31: \ + *(SecStrBuf31 *)(void *)strDest = *(const SecStrBuf31 *)(const void *)strSrc; \ + break; \ + case 32: \ + *(SecStrBuf32 *)(void *)strDest = *(const SecStrBuf32 *)(const void *)strSrc; \ + break; \ + default: \ + break; \ + } /* END switch */ \ + } else { \ + char *tmpStrDest = (char *)strDest; \ + const char *tmpStrSrc = (const char *)strSrc; \ + switch (srcStrLen) { \ + case 32: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 31: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 30: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 29: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 28: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 27: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 26: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 25: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 24: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 23: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 22: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 21: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 20: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 19: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 18: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 17: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 16: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 15: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 14: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 13: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 12: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 11: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 10: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 9: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 8: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 7: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 6: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 5: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 4: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 3: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 2: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + case 1: \ + *(tmpStrDest++) = *(tmpStrSrc++); \ + /* fall-through */ /* FALLTHRU */ \ + default: \ + break; \ + } \ + } \ +} SECUREC_WHILE_ZERO +#endif + +/* + * Check Src Range + */ +static errno_t CheckSrcRange(char *strDest, size_t destMax, const char *strSrc) +{ + size_t tmpDestMax = destMax; + const char *tmpSrc = strSrc; + /* use destMax as boundary checker and destMax must be greater than zero */ + while (*(tmpSrc) != '\0' && tmpDestMax > 0) { + ++tmpSrc; + --tmpDestMax; + } + if (tmpDestMax == 0) { + strDest[0] = '\0'; + SECUREC_ERROR_INVALID_RANGE("strcpy_s"); + return ERANGE_AND_RESET; + } + return EOK; +} + +/* + * Handling errors + */ +errno_t strcpy_error(char *strDest, size_t destMax, const char *strSrc) +{ + if (destMax == 0 || destMax > SECUREC_STRING_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("strcpy_s"); + return ERANGE; + } else if (strDest == NULL || strSrc == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("strcpy_s"); + if (strDest != NULL) { + strDest[0] = '\0'; + return EINVAL_AND_RESET; + } + return EINVAL; + } + return CheckSrcRange(strDest, destMax, strSrc); +} + +/* + * Performance optimization. srcStrLen include '\0' + */ +static void SecDoStrcpyOpt(char *strDest, const char *strSrc, size_t srcStrLen) +{ +#if SECUREC_IN_KERNEL + SecDoMemcpy(strDest, strSrc, srcStrLen); +#else + if (srcStrLen > SECUREC_STRCOPY_THRESHOLD_SIZE) { + SecDoMemcpy(strDest, strSrc, srcStrLen); + } else { + SECUREC_SMALL_STR_COPY; + } +#endif +} + +/* + * + * The strcpy_s function copies the string pointed to strSrc + * (including the terminating null character) into the array pointed to by strDest + * The destination string must be large enough to hold the source string, + * including the terminating null character. strcpy_s will return EOVERLAP_AND_RESET + * if the source and destination strings overlap. + * + * + * strDest Location of destination string buffer + * destMax Size of the destination string buffer. + * strSrc Null-terminated source string buffer. + * + * + * strDest is updated. + * + * + * EOK Success + * EINVAL strDest is NULL and destMax != 0 and destMax <= SECUREC_STRING_MAX_LEN + * EINVAL_AND_RESET strDest != NULL and strSrc is NULL and destMax != 0 and destMax <= SECUREC_STRING_MAX_LEN + * ERANGE destMax is 0 and destMax > SECUREC_STRING_MAX_LEN + * ERANGE_AND_RESET strDest have not enough space and all other parameters are valid and not overlap + * EOVERLAP_AND_RESET dest buffer and source buffer are overlapped and all parameters are valid + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +errno_t strcpy_s(char *strDest, size_t destMax, const char *strSrc) +{ + if ((destMax > 0 && destMax <= SECUREC_STRING_MAX_LEN && strDest != NULL && strSrc != NULL && strDest != strSrc)) { + size_t srcStrLen = SecStrMinLen(strSrc, destMax) + 1; /* len include \0 */ + if (srcStrLen <= destMax) { + /* use mem overlap check include \0 */ + if (SECUREC_MEMORY_NO_OVERLAP(strDest, strSrc, srcStrLen)) { + /* performance optimization srcStrLen include '\0' */ + SecDoStrcpyOpt(strDest, strSrc, srcStrLen); + return EOK; + } else { + strDest[0] = '\0'; + SECUREC_ERROR_BUFFER_OVERLAP("strcpy_s"); + return EOVERLAP_AND_RESET; + } + } + } + return strcpy_error(strDest, destMax, strSrc); +} + +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(strcpy_s); +#endif + diff --git a/third_party/securec/src/strncat_s.c b/third_party/securec/src/strncat_s.c new file mode 100644 index 00000000..78234fd5 --- /dev/null +++ b/third_party/securec/src/strncat_s.c @@ -0,0 +1,121 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_STR_LEN 1 +#define SECUREC_INLINE_DO_MEMCPY 1 + +#include "securecutil.h" + +/* + * Befor this function, the basic parameter checking has been done + */ +static errno_t SecDoStrncat(char *strDest, size_t destMax, const char *strSrc, size_t count) +{ + size_t destLen = SecStrMinLen(strDest, destMax); + /* The strSrc is no longer optimized. The reason is that when count is small, + * the efficiency of strnlen is higher than that of self realization. + */ + size_t srcLen = SecStrMinLen(strSrc, count); + + if (SECUREC_CAT_STRING_IS_OVERLAP(strDest, destLen, strSrc, srcLen)) { + strDest[0] = '\0'; + if (strDest + destLen <= strSrc && destLen == destMax) { + SECUREC_ERROR_INVALID_PARAMTER("strncat_s"); + return EINVAL_AND_RESET; + } + SECUREC_ERROR_BUFFER_OVERLAP("strncat_s"); + return EOVERLAP_AND_RESET; + } + if (srcLen + destLen >= destMax || strDest == strSrc) { + strDest[0] = '\0'; + if (destLen == destMax) { + SECUREC_ERROR_INVALID_PARAMTER("strncat_s"); + return EINVAL_AND_RESET; + } + SECUREC_ERROR_INVALID_RANGE("strncat_s"); + return ERANGE_AND_RESET; + } + SecDoMemcpy(strDest + destLen, strSrc, srcLen); /* no terminator */ + *(strDest + destLen + srcLen) = '\0'; + return EOK; +} + +/* + * + * The strncat_s function appends not more than n successive characters + * (not including the terminating null character) + * from the array pointed to by strSrc to the end of the string pointed to by strDest + * The strncat_s function try to append the first D characters of strSrc to + * the end of strDest, where D is the lesser of count and the length of strSrc. + * If appending those D characters will fit within strDest (whose size is given + * as destMax) and still leave room for a null terminator, then those characters + * are appended, starting at the original terminating null of strDest, and a + * new terminating null is appended; otherwise, strDest[0] is set to the null + * character. + * + * + * strDest Null-terminated destination string. + * destMax Size of the destination buffer. + * strSrc Null-terminated source string. + * count Number of character to append, or truncate. + * + * + * strDest is updated + * + * + * EOK Success + * EINVAL strDest is NULL and destMax != 0 and destMax <= SECUREC_STRING_MAX_LEN + * EINVAL_AND_RESET (strDest unterminated and all other parameters are valid)or + * (strDest != NULL and strSrc is NULL and destMax != 0 and destMax <= SECUREC_STRING_MAX_LEN) + * ERANGE destMax is 0 and destMax > SECUREC_STRING_MAX_LEN + * ERANGE_AND_RESET strDest have not enough space and all other parameters are valid and not overlap + * EOVERLAP_AND_RESET dest buffer and source buffer are overlapped and all parameters are valid + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +errno_t strncat_s(char *strDest, size_t destMax, const char *strSrc, size_t count) +{ + if (destMax == 0 || destMax > SECUREC_STRING_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("strncat_s"); + return ERANGE; + } + + if (strDest == NULL || strSrc == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("strncat_s"); + if (strDest != NULL) { + strDest[0] = '\0'; + return EINVAL_AND_RESET; + } + return EINVAL; + } + if (count > SECUREC_STRING_MAX_LEN) { +#ifdef SECUREC_COMPATIBLE_WIN_FORMAT + if (count == (size_t)(-1)) { + /* Windows internal functions may pass in -1 when calling this function */ + return SecDoStrncat(strDest, destMax, strSrc, destMax); + } +#endif + strDest[0] = '\0'; + SECUREC_ERROR_INVALID_RANGE("strncat_s"); + return ERANGE_AND_RESET; + } + return SecDoStrncat(strDest, destMax, strSrc, count); +} + +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(strncat_s); +#endif + diff --git a/third_party/securec/src/strncpy_s.c b/third_party/securec/src/strncpy_s.c new file mode 100644 index 00000000..493d1f74 --- /dev/null +++ b/third_party/securec/src/strncpy_s.c @@ -0,0 +1,143 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_STR_LEN 1 +#define SECUREC_INLINE_DO_MEMCPY 1 + +#include "securecutil.h" + +#if defined(SECUREC_COMPATIBLE_WIN_FORMAT) +#define SECUREC_STRNCPY_PARAM_OK(strDest, destMax, strSrc, count) \ + (((destMax) > 0 && (destMax) <= SECUREC_STRING_MAX_LEN && (strDest) != NULL && (strSrc) != NULL && \ + ((count) <= SECUREC_STRING_MAX_LEN || (count) == ((size_t)(-1))) && (count) > 0)) +#else +#define SECUREC_STRNCPY_PARAM_OK(strDest, destMax, strSrc, count) \ + (((destMax) > 0 && (destMax) <= SECUREC_STRING_MAX_LEN && (strDest) != NULL && (strSrc) != NULL && \ + (count) <= SECUREC_STRING_MAX_LEN && (count) > 0)) +#endif + +/* + * Check Src Count Range + */ +static errno_t CheckSrcCountRange(char *strDest, size_t destMax, const char *strSrc, size_t count) +{ + size_t tmpDestMax = destMax; + size_t tmpCount = count; + const char *endPos = strSrc; + + /* use destMax and count as boundary checker and destMax must be greater than zero */ + while (*(endPos) != '\0' && tmpDestMax > 0 && tmpCount > 0) { + ++endPos; + --tmpCount; + --tmpDestMax; + } + if (tmpDestMax == 0) { + strDest[0] = '\0'; + SECUREC_ERROR_INVALID_RANGE("strncpy_s"); + return ERANGE_AND_RESET; + } + return EOK; +} + +/* + * Handling errors, when dest euqal src return EOK + */ +errno_t strncpy_error(char *strDest, size_t destMax, const char *strSrc, size_t count) +{ + if (destMax == 0 || destMax > SECUREC_STRING_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("strncpy_s"); + return ERANGE; + } else if (strDest == NULL || strSrc == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("strncpy_s"); + if (strDest != NULL) { + strDest[0] = '\0'; + return EINVAL_AND_RESET; + } + return EINVAL; + } else if (count > SECUREC_STRING_MAX_LEN) { + strDest[0] = '\0'; /* clear dest string */ + SECUREC_ERROR_INVALID_RANGE("strncpy_s"); + return ERANGE_AND_RESET; + } else if (count == 0) { + strDest[0] = '\0'; + return EOK; + } + + return CheckSrcCountRange(strDest, destMax, strSrc, count); +} + +/* + * + * The strncpy_s function copies not more than n successive characters (not including the terminating null character) + * from the array pointed to by strSrc to the array pointed to by strDest. + * + * + * strDest Destination string. + * destMax The size of the destination string, in characters. + * strSrc Source string. + * count Number of characters to be copied. + * + * + * strDest is updated + * + * + * EOK Success + * EINVAL strDest is NULL and destMax != 0 and destMax <= SECUREC_STRING_MAX_LEN + * EINVAL_AND_RESET strDest != NULL and strSrc is NULL and destMax != 0 and destMax <= SECUREC_STRING_MAX_LEN + * ERANGE destMax is 0 and destMax > SECUREC_STRING_MAX_LEN + * ERANGE_AND_RESET strDest have not enough space and all other parameters are valid and not overlap + * EOVERLAP_AND_RESET dest buffer and source buffer are overlapped and all parameters are valid + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +errno_t strncpy_s(char *strDest, size_t destMax, const char *strSrc, size_t count) +{ + if (SECUREC_STRNCPY_PARAM_OK(strDest, destMax, strSrc, count)) { + size_t minCpLen; /* use it to store the maxi length limit */ + if (count < destMax) { + minCpLen = SecStrMinLen(strSrc, count); /* no ending terminator */ + } else { + size_t tmpCount = destMax; +#ifdef SECUREC_COMPATIBLE_WIN_FORMAT + if (count == ((size_t)(-1))) { + tmpCount = destMax - 1; + } +#endif + minCpLen = SecStrMinLen(strSrc, tmpCount); + if (minCpLen == destMax) { + strDest[0] = '\0'; + SECUREC_ERROR_INVALID_RANGE("strncpy_s"); + return ERANGE_AND_RESET; + } + } + if (SECUREC_STRING_NO_OVERLAP(strDest, strSrc, minCpLen) || strDest == strSrc) { + /* Not overlap */ + SecDoMemcpy(strDest, strSrc, minCpLen); /* copy string without terminator */ + strDest[minCpLen] = '\0'; + return EOK; + } else { + strDest[0] = '\0'; + SECUREC_ERROR_BUFFER_OVERLAP("strncpy_s"); + return EOVERLAP_AND_RESET; + } + } + return strncpy_error(strDest, destMax, strSrc, count); +} + +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(strncpy_s); +#endif + diff --git a/third_party/securec/src/strtok_s.c b/third_party/securec/src/strtok_s.c new file mode 100644 index 00000000..18f977a7 --- /dev/null +++ b/third_party/securec/src/strtok_s.c @@ -0,0 +1,117 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +/* + * Find beginning of token (skip over leading delimiters).Note that + * there is no token if this loop sets string to point to the terminal null. + */ +static char *SecFindBegin(char *strToken, const char *strDelimit) +{ + char *token = strToken; + while (*token != '\0') { + const char *ctl = strDelimit; + while (*ctl != '\0' && *ctl != *token) { + ++ctl; + } + if (*ctl == '\0') { /* don't find any delimiter in string header, break the loop */ + break; + } + ++token; + } + return token; +} + +/* + * Find rest of token + */ +static char *SecFindRest(char *strToken, const char *strDelimit) +{ + /* Find the rest of the token. If it is not the end of the string, + * put a null there. + */ + char *token = strToken; + while (*token != '\0') { + const char *ctl = strDelimit; + while (*ctl != '\0' && *ctl != *token) { + ++ctl; + } + if (*ctl != '\0') { /* find a delimiter */ + *token++ = '\0'; /* set string termintor */ + break; + } + ++token; + } + return token; +} + +/* + * Find the final position pointer + */ +static char *SecUpdateToken(char *strToken, const char *strDelimit, char **context) +{ + /* point to updated position */ + char *token = SecFindRest(strToken, strDelimit); + /* record string position for next search in the context */ + *context = token; + /* Determine if a token has been found. */ + if (token == strToken) { + return NULL; + } + return strToken; +} + +/* + * + * The strtok_s function parses a string into a sequence of strToken, + * replace all characters in strToken string that match to strDelimit set with 0. + * On the first call to strtok_s the string to be parsed should be specified in strToken. + * In each subsequent call that should parse the same string, strToken should be NULL + * + * strToken String containing token or tokens. + * strDelimit Set of delimiter characters. + * context Used to store position information between calls + * to strtok_s + * + * context is updated + * + * On the first call returns the address of the first non \0 character, otherwise NULL is returned. + * In subsequent calls, the strtoken is set to NULL, and the context set is the same as the previous call, + * return NULL if the *context string length is equal 0, otherwise return *context. + */ +char *strtok_s(char *strToken, const char *strDelimit, char **context) +{ + char *orgToken = strToken; + /* validate delimiter and string context */ + if (context == NULL || strDelimit == NULL) { + return NULL; + } + /* valid input string and string pointer from where to search */ + if (orgToken == NULL && (*context) == NULL) { + return NULL; + } + /* If string is null, continue searching from previous string position stored in context */ + if (orgToken == NULL) { + orgToken = *context; + } + orgToken = SecFindBegin(orgToken, strDelimit); + return SecUpdateToken(orgToken, strDelimit, context); +} +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(strtok_s); +#endif + diff --git a/third_party/securec/src/swprintf_s.c b/third_party/securec/src/swprintf_s.c new file mode 100644 index 00000000..1fb0f6c7 --- /dev/null +++ b/third_party/securec/src/swprintf_s.c @@ -0,0 +1,51 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +/* + * + * The swprintf_s function is the wide-character equivalent of the sprintf_s function + * + * + * strDest Storage location for the output. + * destMax Maximum number of characters to store. + * format Format-control string. + * ... Optional arguments + * + * + * strDest is updated + * + * + * return the number of wide characters stored in strDest, not counting the terminating null wide character. + * return -1 if an error occurred. + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +int swprintf_s(wchar_t *strDest, size_t destMax, const wchar_t *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + ret = vswprintf_s(strDest, destMax, format, argList); + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} + + diff --git a/third_party/securec/src/swscanf_s.c b/third_party/securec/src/swscanf_s.c new file mode 100644 index 00000000..c16045fa --- /dev/null +++ b/third_party/securec/src/swscanf_s.c @@ -0,0 +1,57 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +/* + * + * The swscanf_s function is the wide-character equivalent of the sscanf_s function + * The swscanf_s function reads data from buffer into the location given by + * each argument. Every argument must be a pointer to a variable with a type + * that corresponds to a type specifier in format. The format argument controls + * the interpretation of the input fields and has the same form and function + * as the format argument for the scanf function. If copying takes place between + * strings that overlap, the behavior is undefined. + * + * + * buffer Stored data. + * format Format control string, see Format Specifications. + * ... Optional arguments. + * + * + * ... the converted value stored in user assigned address + * + * + * Each of these functions returns the number of fields successfully converted + * and assigned; The return value does not include fields that were read but not + * assigned. + * A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ +int swscanf_s(const wchar_t *buffer, const wchar_t *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + ret = vswscanf_s(buffer, format, argList); + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} + + diff --git a/third_party/securec/src/vfscanf_s.c b/third_party/securec/src/vfscanf_s.c new file mode 100644 index 00000000..78444e4b --- /dev/null +++ b/third_party/securec/src/vfscanf_s.c @@ -0,0 +1,67 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "secinput.h" + +/* + * + * The vfscanf_s function is equivalent to fscanf_s, with the variable argument list replaced by argList + * The vfscanf_s function reads data from the current position of stream into + * the locations given by argument (if any). Each argument must be a pointer + * to a variable of a type that corresponds to a type specifier in format. + * format controls the interpretation of the input fields and has the same + * form and function as the format argument for scanf. + * + * + * stream Pointer to FILE structure. + * format Format control string, see Format Specifications. + * argList pointer to list of arguments + * + * + * argList the converted value stored in user assigned address + * + * + * Each of these functions returns the number of fields successfully converted + * and assigned; the return value does not include fields that were read but + * not assigned. A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ +int vfscanf_s(FILE *stream, const char *format, va_list argList) +{ + int retVal; /* If initialization causes e838 */ + SecFileStream fStr; + + if ((stream == NULL) || (format == NULL)) { + SECUREC_ERROR_INVALID_PARAMTER("vfscanf_s"); + return SECUREC_SCANF_EINVAL; + } + if (stream == stdin) { + return vscanf_s(format, argList); + } + + SECUREC_LOCK_FILE(stream); + SECUREC_INIT_SEC_FILE_STREAM(fStr, SECUREC_FILE_STREAM_FLAG, stream, SECUREC_UNINITIALIZED_FILE_POS, NULL, 0); + retVal = SecInputS(&fStr, format, argList); + SECUREC_UNLOCK_FILE(stream); + if (retVal < 0) { + SECUREC_ERROR_INVALID_PARAMTER("vfscanf_s"); + return SECUREC_SCANF_EINVAL; + } + + return retVal; +} + + diff --git a/third_party/securec/src/vfwscanf_s.c b/third_party/securec/src/vfwscanf_s.c new file mode 100644 index 00000000..3ae62eea --- /dev/null +++ b/third_party/securec/src/vfwscanf_s.c @@ -0,0 +1,66 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "secinput.h" + +/* + * + * The vfwscanf_s function is the wide-character equivalent of the vfscanf_s function + * The vfwscanf_s function reads data from the current position of stream into + * the locations given by argument (if any). Each argument must be a pointer + * to a variable of a type that corresponds to a type specifier in format. + * format controls the interpretation of the input fields and has the same form + * and function as the format argument for scanf. + * + * + * stream Pointer to FILE structure. + * format Format control string, see Format Specifications. + * argList pointer to list of arguments + * + * + * argList the converted value stored in user assigned address + * + * + * Each of these functions returns the number of fields successfully converted + * and assigned; the return value does not include fields that were read but + * not assigned. A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ +int vfwscanf_s(FILE *stream, const wchar_t *format, va_list argList) +{ + int retVal; /* If initialization causes e838 */ + SecFileStream fStr; + + if ((stream == NULL) || (format == NULL)) { + SECUREC_ERROR_INVALID_PARAMTER("vfwscanf_s"); + return SECUREC_SCANF_EINVAL; + } + if (stream == stdin) { + return vwscanf_s(format, argList); + } + + SECUREC_LOCK_FILE(stream); + SECUREC_INIT_SEC_FILE_STREAM(fStr, SECUREC_FILE_STREAM_FLAG, stream, SECUREC_UNINITIALIZED_FILE_POS, NULL, 0); + retVal = SecInputSW(&fStr, format, argList); + SECUREC_UNLOCK_FILE(stream); + if (retVal < 0) { + SECUREC_ERROR_INVALID_PARAMTER("vfwscanf_s"); + return SECUREC_SCANF_EINVAL; + } + return retVal; +} + + diff --git a/third_party/securec/src/vscanf_s.c b/third_party/securec/src/vscanf_s.c new file mode 100644 index 00000000..66669765 --- /dev/null +++ b/third_party/securec/src/vscanf_s.c @@ -0,0 +1,68 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "secinput.h" + +/* + * + * The vscanf_s function is equivalent to scanf_s, with the variable argument list replaced by argList, + * The vscanf_s function reads data from the standard input stream stdin and + * writes the data into the location that's given by argument. Each argument + * must be a pointer to a variable of a type that corresponds to a type specifier + * in format. If copying occurs between strings that overlap, the behavior is + * undefined. + * + * + * format Format control string. + * argList pointer to list of arguments + * + * + * argList the converted value stored in user assigned address + * + * + * Returns the number of fields successfully converted and assigned; + * the return value does not include fields that were read but not assigned. + * A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ +int vscanf_s(const char *format, va_list argList) +{ + int retVal; /* If initialization causes e838 */ + SecFileStream fStr; + SECUREC_INIT_SEC_FILE_STREAM(fStr, SECUREC_FROM_STDIN_FLAG, stdin, 0, NULL, 0); + /* + * "va_list" has different definition on different platform, so we can't use argList == NULL + * to determine it's invalid. If you has fixed platform, you can check some fields to validate it, + * such as "argList == NULL" or argList.xxx != NULL or *(size_t *)&argList != 0. + */ + if (format == NULL || fStr.pf == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("vscanf_s"); + return SECUREC_SCANF_EINVAL; + } + + SECUREC_LOCK_STDIN(0, fStr.pf); + + retVal = SecInputS(&fStr, format, argList); + + SECUREC_UNLOCK_STDIN(0, fStr.pf); + if (retVal < 0) { + SECUREC_ERROR_INVALID_PARAMTER("vscanf_s"); + return SECUREC_SCANF_EINVAL; + } + return retVal; +} + + diff --git a/third_party/securec/src/vsnprintf_s.c b/third_party/securec/src/vsnprintf_s.c new file mode 100644 index 00000000..dfa55bab --- /dev/null +++ b/third_party/securec/src/vsnprintf_s.c @@ -0,0 +1,149 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "secureprintoutput.h" + +#if SECUREC_ENABLE_VSNPRINTF +/* + * + * The vsnprintf_s function is equivalent to the vsnprintf function + * except for the parameter destMax/count and the explicit runtime-constraints violation + * The vsnprintf_s function takes a pointer to an argument list, then formats + * and writes up to count characters of the given data to the memory pointed + * to by strDest and appends a terminating null. + * + * + * strDest Storage location for the output. + * destMax The size of the strDest for output. + * count Maximum number of character to write(not including + * the terminating NULL) + * format Format-control string. + * argList pointer to list of arguments. + * + * + * strDest is updated + * + * + * return the number of characters written, not including the terminating null + * return -1 if an error occurs. + * return -1 if count < destMax and the output string has been truncated + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +int vsnprintf_s(char *strDest, size_t destMax, size_t count, const char *format, va_list argList) +{ + int retVal; + + if (format == NULL || strDest == NULL || destMax == 0 || destMax > SECUREC_STRING_MAX_LEN || + (count > (SECUREC_STRING_MAX_LEN - 1) && count != (size_t)(-1))) { + if (strDest != NULL && destMax > 0 && destMax <= SECUREC_STRING_MAX_LEN) { + strDest[0] = '\0'; + } + SECUREC_ERROR_INVALID_PARAMTER("vsnprintf_s"); + return -1; + } + + if (destMax > count) { + retVal = SecVsnprintfImpl(strDest, count + 1, format, argList); + if (retVal == SECUREC_PRINTF_TRUNCATE) { /* lsd add to keep dest buffer not destroyed 2014.2.18 */ + /* the string has been truncated, return -1 */ + return -1; /* to skip error handler, return strlen(strDest) or -1 */ + } + } else { + retVal = SecVsnprintfImpl(strDest, destMax, format, argList); +#ifdef SECUREC_COMPATIBLE_WIN_FORMAT + if (retVal == SECUREC_PRINTF_TRUNCATE && count == (size_t)(-1)) { + return -1; + } +#endif + } + + if (retVal < 0) { + strDest[0] = '\0'; /* empty the dest strDest */ + + if (retVal == SECUREC_PRINTF_TRUNCATE) { + /* Buffer too small */ + SECUREC_ERROR_INVALID_RANGE("vsnprintf_s"); + } + + SECUREC_ERROR_INVALID_PARAMTER("vsnprintf_s"); + return -1; + } + + return retVal; +} +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(vsnprintf_s); +#endif +#endif + +#if SECUREC_SNPRINTF_TRUNCATED +/* + * + * The vsnprintf_truncated_s function is equivalent to the vsnprintf function + * except for the parameter destMax/count and the explicit runtime-constraints violation + * The vsnprintf_truncated_s function takes a pointer to an argument list, then formats + * and writes up to count characters of the given data to the memory pointed + * to by strDest and appends a terminating null. + * + * + * strDest Storage location for the output. + * destMax The size of the strDest for output. + * the terminating NULL) + * format Format-control string. + * argList pointer to list of arguments. + * + * + * strDest is updated + * + * + * return the number of characters written, not including the terminating null + * return -1 if an error occurs. + * return destMax-1 if output string has been truncated + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +int vsnprintf_truncated_s(char *strDest, size_t destMax, const char *format, va_list argList) +{ + int retVal; + + if (format == NULL || strDest == NULL || destMax == 0 || destMax > SECUREC_STRING_MAX_LEN) { + if (strDest != NULL && destMax > 0 && destMax <= SECUREC_STRING_MAX_LEN) { + strDest[0] = '\0'; + } + SECUREC_ERROR_INVALID_PARAMTER("vsnprintf_truncated_s"); + return -1; + } + + retVal = SecVsnprintfImpl(strDest, destMax, format, argList); + + if (retVal < 0) { + if (retVal == SECUREC_PRINTF_TRUNCATE) { + return (int)(destMax - 1); /* to skip error handler, return strlen(strDest) */ + } + strDest[0] = '\0'; /* empty the dest strDest */ + SECUREC_ERROR_INVALID_PARAMTER("vsnprintf_truncated_s"); + return -1; + } + + return retVal; +} +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(vsnprintf_truncated_s); +#endif +#endif + + diff --git a/third_party/securec/src/vsprintf_s.c b/third_party/securec/src/vsprintf_s.c new file mode 100644 index 00000000..e74c7748 --- /dev/null +++ b/third_party/securec/src/vsprintf_s.c @@ -0,0 +1,73 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "secureprintoutput.h" + +/* + * + * The vsprintf_s function is equivalent to the vsprintf function + * except for the parameter destMax and the explicit runtime-constraints violation + * The vsprintf_s function takes a pointer to an argument list, and then formats + * and writes the given data to the memory pointed to by strDest. + * The function differ from the non-secure versions only in that the secure + * versions support positional parameters. + * + * + * strDest Storage location for the output. + * destMax Size of strDest + * format Format specification. + * argList pointer to list of arguments + * + * + * strDest is updated + * + * + * return the number of characters written, not including the terminating null character, + * return -1 if an error occurs. + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +int vsprintf_s(char *strDest, size_t destMax, const char *format, va_list argList) +{ + int retVal; /* If initialization causes e838 */ + + if (format == NULL || strDest == NULL || destMax == 0 || destMax > SECUREC_STRING_MAX_LEN) { + if (strDest != NULL && destMax > 0 && destMax <= SECUREC_STRING_MAX_LEN) { + strDest[0] = '\0'; + } + SECUREC_ERROR_INVALID_PARAMTER("vsprintf_s"); + return -1; + } + + retVal = SecVsnprintfImpl(strDest, destMax, format, argList); + + if (retVal < 0) { + strDest[0] = '\0'; + if (retVal == SECUREC_PRINTF_TRUNCATE) { + /* Buffer is too small */ + SECUREC_ERROR_INVALID_RANGE("vsprintf_s"); + } + SECUREC_ERROR_INVALID_PARAMTER("vsprintf_s"); + return -1; + } + + return retVal; +} +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(vsprintf_s); +#endif + + diff --git a/third_party/securec/src/vsscanf_s.c b/third_party/securec/src/vsscanf_s.c new file mode 100644 index 00000000..e0a5ecda --- /dev/null +++ b/third_party/securec/src/vsscanf_s.c @@ -0,0 +1,88 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "secinput.h" +#if defined(SECUREC_VXWORKS_PLATFORM) && (!defined(SECUREC_SYSAPI4VXWORKS) && !defined(SECUREC_CTYPE_MACRO_ADAPT)) +#include +#endif + +/* + * + * vsscanf_s + * + * + * + * The vsscanf_s function is equivalent to sscanf_s, with the variable argument list replaced by argList + * The vsscanf_s function reads data from buffer into the location given by + * each argument. Every argument must be a pointer to a variable with a type + * that corresponds to a type specifier in format. The format argument controls + * the interpretation of the input fields and has the same form and function + * as the format argument for the scanf function. + * If copying takes place between strings that overlap, the behavior is undefined. + * + * + * buffer Stored data + * format Format control string, see Format Specifications. + * argList pointer to list of arguments + * + * + * argList the converted value stored in user assigned address + * + * + * Each of these functions returns the number of fields successfully converted + * and assigned; the return value does not include fields that were read but + * not assigned. A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ +int vsscanf_s(const char *buffer, const char *format, va_list argList) +{ + size_t count; /* If initialization causes e838 */ + int retVal; + SecFileStream fStr; + + /* validation section */ + if (buffer == NULL || format == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("vsscanf_s"); + return SECUREC_SCANF_EINVAL; + } + count = strlen(buffer); + if (count == 0 || count > SECUREC_STRING_MAX_LEN) { + SecClearDestBuf(buffer, format, argList); + SECUREC_ERROR_INVALID_PARAMTER("vsscanf_s"); + return SECUREC_SCANF_EINVAL; + } +#ifdef SECUREC_VXWORKS_PLATFORM + /* + * in vxworks platform when buffer is white string, will set first %s argument tu zero.like following useage: + * " \v\f\t\r\n", "%s", str, strSize + * do not check all character, just first and last character then consider it is white string + */ + if (isspace((int)buffer[0]) && isspace((int)buffer[count - 1])) { + SecClearDestBuf(buffer, format, argList); + } +#endif + SECUREC_INIT_SEC_FILE_STREAM(fStr, SECUREC_MEM_STR_FLAG, NULL, 0, buffer, (int)count); + retVal = SecInputS(&fStr, format, argList); + if (retVal < 0) { + SECUREC_ERROR_INVALID_PARAMTER("vsscanf_s"); + return SECUREC_SCANF_EINVAL; + } + return retVal; +} +#if SECUREC_IN_KERNEL +EXPORT_SYMBOL(vsscanf_s); +#endif + diff --git a/third_party/securec/src/vswprintf_s.c b/third_party/securec/src/vswprintf_s.c new file mode 100644 index 00000000..3403a6b5 --- /dev/null +++ b/third_party/securec/src/vswprintf_s.c @@ -0,0 +1,66 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "secureprintoutput.h" + + +/* + * + * The vswprintf_s function is the wide-character equivalent of the vsprintf_s function + * + * + * strDest Storage location for the output. + * destMax Size of strDest + * format Format specification. + * argList pointer to list of arguments + * + * + * strDest is updated + * + * + * return the number of wide characters stored in strDest, not counting the terminating null wide character. + * return -1 if an error occurred. + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +int vswprintf_s(wchar_t *strDest, size_t destMax, const wchar_t *format, va_list argList) +{ + int retVal; /* If initialization causes e838 */ + + if (format == NULL || strDest == NULL || destMax == 0 || destMax > (SECUREC_WCHAR_STRING_MAX_LEN)) { + if (strDest != NULL && destMax > 0) { + strDest[0] = '\0'; + } + SECUREC_ERROR_INVALID_PARAMTER("vswprintf_s"); + return -1; + } + + retVal = SecVswprintfImpl(strDest, destMax, format, argList); + + if (retVal < 0) { + strDest[0] = '\0'; + if (retVal == SECUREC_PRINTF_TRUNCATE) { + /* Buffer too small */ + SECUREC_ERROR_INVALID_RANGE("vswprintf_s"); + } + SECUREC_ERROR_INVALID_PARAMTER("vswprintf_s"); + return -1; + } + + return retVal; +} + + diff --git a/third_party/securec/src/vswscanf_s.c b/third_party/securec/src/vswscanf_s.c new file mode 100644 index 00000000..269e1053 --- /dev/null +++ b/third_party/securec/src/vswscanf_s.c @@ -0,0 +1,79 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "secinput.h" + +static size_t SecWcslen(const wchar_t *s) +{ + const wchar_t *end = s; + while (*end != L'\0') { + ++end; + } + return ((size_t)((end - s))); +} + +/* + * + * The vswscanf_s function is the wide-character equivalent of the vsscanf_s function + * The vsscanf_s function reads data from buffer into the location given by + * each argument. Every argument must be a pointer to a variable with a type + * that corresponds to a type specifier in format. + * The format argument controls the interpretation of the input fields and + * has the same form and function as the format argument for the scanf function. + * If copying takes place between strings that overlap, the behavior is undefined. + * + * + * buffer Stored data + * format Format control string, see Format Specifications. + * argList pointer to list of arguments + * + * + * argList the converted value stored in user assigned address + * + * + * Each of these functions returns the number of fields successfully converted + * and assigned; the return value does not include fields that were read but + * not assigned. A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ +int vswscanf_s(const wchar_t *buffer, const wchar_t *format, va_list argList) +{ + size_t count; /* If initialization causes e838 */ + SecFileStream fStr; + int retVal; + + /* validation section */ + if (buffer == NULL || format == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("vswscanf_s"); + return SECUREC_SCANF_EINVAL; + } + count = SecWcslen(buffer); + if (count == 0 || count > SECUREC_WCHAR_STRING_MAX_LEN) { + SecClearDestBufW(buffer, format, argList); + SECUREC_ERROR_INVALID_PARAMTER("vswscanf_s"); + return SECUREC_SCANF_EINVAL; + } + SECUREC_INIT_SEC_FILE_STREAM(fStr, SECUREC_MEM_STR_FLAG, NULL, 0,\ + (const char *)buffer, (int)count * ((int)sizeof(wchar_t))); + retVal = SecInputSW(&fStr, format, argList); + if (retVal < 0) { + SECUREC_ERROR_INVALID_PARAMTER("vswscanf_s"); + return SECUREC_SCANF_EINVAL; + } + return retVal; +} + + diff --git a/third_party/securec/src/vwscanf_s.c b/third_party/securec/src/vwscanf_s.c new file mode 100644 index 00000000..56e0f6b4 --- /dev/null +++ b/third_party/securec/src/vwscanf_s.c @@ -0,0 +1,67 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "secinput.h" + +/* + * + * The vwscanf_s function is the wide-character equivalent of the vscanf_s function + * The vwscanf_s function is the wide-character version of vscanf_s. The + * function reads data from the standard input stream stdin and writes the + * data into the location that's given by argument. Each argument must be a + * pointer to a variable of a type that corresponds to a type specifier in + * format. If copying occurs between strings that overlap, the behavior is + * undefined. + * + * + * format Format control string. + * argList pointer to list of arguments + * + * + * argList the converted value stored in user assigned address + * + * + * Returns the number of fields successfully converted and assigned; + * the return value does not include fields that were read but not assigned. + * A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ +int vwscanf_s(const wchar_t *format, va_list argList) +{ + int retVal; /* If initialization causes e838 */ + SecFileStream fStr; + + SECUREC_INIT_SEC_FILE_STREAM(fStr, SECUREC_FROM_STDIN_FLAG, stdin, 0, NULL, 0); + if (format == NULL || fStr.pf == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("vwscanf_s"); + return SECUREC_SCANF_EINVAL; + } + + SECUREC_LOCK_STDIN(0, fStr.pf); + + retVal = SecInputSW(&fStr, format, argList); + + SECUREC_UNLOCK_STDIN(0, fStr.pf); + + if (retVal < 0) { + SECUREC_ERROR_INVALID_PARAMTER("vwscanf_s"); + return SECUREC_SCANF_EINVAL; + } + + return retVal; +} + + diff --git a/third_party/securec/src/wcscat_s.c b/third_party/securec/src/wcscat_s.c new file mode 100644 index 00000000..51254b3f --- /dev/null +++ b/third_party/securec/src/wcscat_s.c @@ -0,0 +1,111 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_DO_MEMCPY 1 + +#include "securecutil.h" + +/* + * Befor this function, the basic parameter checking has been done + */ +static errno_t SecDoWcscat(wchar_t *strDest, size_t destMax, const wchar_t *strSrc) +{ + size_t destLen; + size_t srcLen; + size_t maxCount; /* Store the maximum available count */ + + /* To calculate the length of a wide character, the parameter must be a wide character */ + SECUREC_CALC_WSTR_LEN(strDest, destMax, &destLen); + maxCount = destMax - destLen; + SECUREC_CALC_WSTR_LEN(strSrc, maxCount, &srcLen); + + if (SECUREC_CAT_STRING_IS_OVERLAP(strDest, destLen, strSrc, srcLen)) { + strDest[0] = L'\0'; + if (strDest + destLen <= strSrc && destLen == destMax) { + SECUREC_ERROR_INVALID_PARAMTER("wcscat_s"); + return EINVAL_AND_RESET; + } + SECUREC_ERROR_BUFFER_OVERLAP("wcscat_s"); + return EOVERLAP_AND_RESET; + } + if (srcLen + destLen >= destMax || strDest == strSrc) { + strDest[0] = L'\0'; + if (destLen == destMax) { + SECUREC_ERROR_INVALID_PARAMTER("wcscat_s"); + return EINVAL_AND_RESET; + } + SECUREC_ERROR_INVALID_RANGE("wcscat_s"); + return ERANGE_AND_RESET; + } + SecDoMemcpy(strDest + destLen, strSrc, (srcLen + 1) * sizeof(wchar_t)); /* single character length include \0 */ + return EOK; +} + +/* + * + * The wcscat_s function appends a copy of the wide string pointed to by strSrc +* (including the terminating null wide character) + * to the end of the wide string pointed to by strDest. + * The arguments and return value of wcscat_s are wide-character strings. + * + * The wcscat_s function appends strSrc to strDest and terminates the resulting + * string with a null character. The initial character of strSrc overwrites the + * terminating null character of strDest. wcscat_s will return EOVERLAP_AND_RESET if the + * source and destination strings overlap. + * + * Note that the second parameter is the total size of the buffer, not the + * remaining size. + * + * + * strDest Null-terminated destination string buffer. + * destMax Size of the destination string buffer. + * strSrc Null-terminated source string buffer. + * + * + * strDest is updated + * + * + * EOK Success + * EINVAL strDest is NULL and destMax != 0 and destMax <= SECUREC_WCHAR_STRING_MAX_LEN + * EINVAL_AND_RESET (strDest unterminated and all other parameters are valid) or + * (strDest != NULL and strSrc is NULLL and destMax != 0 + * and destMax <= SECUREC_WCHAR_STRING_MAX_LEN) + * ERANGE destMax > SECUREC_WCHAR_STRING_MAX_LEN or destMax is 0 + * ERANGE_AND_RESET strDest have not enough space and all other parameters are valid and not overlap + * EOVERLAP_AND_RESET dest buffer and source buffer are overlapped and all parameters are valid + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +errno_t wcscat_s(wchar_t *strDest, size_t destMax, const wchar_t *strSrc) +{ + if (destMax == 0 || destMax > SECUREC_WCHAR_STRING_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("wcscat_s"); + return ERANGE; + } + + if (strDest == NULL || strSrc == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("wcscat_s"); + if (strDest != NULL) { + strDest[0] = L'\0'; + return EINVAL_AND_RESET; + } + return EINVAL; + } + + return SecDoWcscat(strDest, destMax, strSrc); +} + + diff --git a/third_party/securec/src/wcscpy_s.c b/third_party/securec/src/wcscpy_s.c new file mode 100644 index 00000000..2c348d4b --- /dev/null +++ b/third_party/securec/src/wcscpy_s.c @@ -0,0 +1,91 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_DO_MEMCPY 1 + +#include "securecutil.h" + +static errno_t SecDoWcscpy(wchar_t *strDest, size_t destMax, const wchar_t *strSrc) +{ + size_t srcStrLen; + + SECUREC_CALC_WSTR_LEN(strSrc, destMax, &srcStrLen); + if (srcStrLen == destMax) { + strDest[0] = '\0'; + SECUREC_ERROR_INVALID_RANGE("wcscpy_s"); + return ERANGE_AND_RESET; + } + if (strDest == strSrc) { + return EOK; + } + + if (SECUREC_STRING_NO_OVERLAP(strDest, strSrc, srcStrLen)) { + /* performance optimization srcStrLen include '\0' */ + SecDoMemcpy(strDest, strSrc, (srcStrLen + 1) * sizeof(wchar_t)); /* single character length include \0 */ + return EOK; + } else { + strDest[0] = L'\0'; + SECUREC_ERROR_BUFFER_OVERLAP("wcscpy_s"); + return EOVERLAP_AND_RESET; + } +} + +/* + * + * The wcscpy_s function copies the wide string pointed to by strSrc + * (including theterminating null wide character) into the array pointed to by strDest + + * + * strDest Destination string buffer + * destMax Size of the destination string buffer. + * strSrc Null-terminated source string buffer. + * + * + * strDest is updated. + * + * + * EOK Success + * EINVAL strDest is NULL and destMax != 0 and destMax <= SECUREC_WCHAR_STRING_MAX_LEN + * EINVAL_AND_RESET strDest != NULL and strSrc is NULLL and destMax != 0 + * and destMax <= SECUREC_WCHAR_STRING_MAX_LEN + * ERANGE destMax > SECUREC_WCHAR_STRING_MAX_LEN or destMax is 0 + * ERANGE_AND_RESET destMax <= length of strSrc and strDest != strSrc + * and strDest != NULL and strSrc != NULL and destMax != 0 + * and destMax <= SECUREC_WCHAR_STRING_MAX_LEN and not overlap + * EOVERLAP_AND_RESET dest buffer and source buffer are overlapped and destMax != 0 + * and destMax <= SECUREC_WCHAR_STRING_MAX_LEN + * and strDest != NULL and strSrc !=NULL and strDest != strSrc + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +errno_t wcscpy_s(wchar_t *strDest, size_t destMax, const wchar_t *strSrc) +{ + if (destMax == 0 || destMax > SECUREC_WCHAR_STRING_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("wcscpy_s"); + return ERANGE; + } + if (strDest == NULL || strSrc == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("wcscpy_s"); + if (strDest != NULL) { + strDest[0] = L'\0'; + return EINVAL_AND_RESET; + } + return EINVAL; + } + return SecDoWcscpy(strDest, destMax, strSrc); +} + + diff --git a/third_party/securec/src/wcsncat_s.c b/third_party/securec/src/wcsncat_s.c new file mode 100644 index 00000000..bc9e6e39 --- /dev/null +++ b/third_party/securec/src/wcsncat_s.c @@ -0,0 +1,118 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_DO_MEMCPY 1 + +#include "securecutil.h" + +/* + * Befor this function, the basic parameter checking has been done + */ +static errno_t SecDoWcsncat(wchar_t *strDest, size_t destMax, const wchar_t *strSrc, size_t count) +{ + size_t destLen; + size_t srcLen; + + /* To calculate the length of a wide character, the parameter must be a wide character */ + SECUREC_CALC_WSTR_LEN(strDest, destMax, &destLen); + SECUREC_CALC_WSTR_LEN(strSrc, count, &srcLen); + + if (SECUREC_CAT_STRING_IS_OVERLAP(strDest, destLen, strSrc, srcLen)) { + strDest[0] = L'\0'; + if (strDest + destLen <= strSrc && destLen == destMax) { + SECUREC_ERROR_INVALID_PARAMTER("wcsncat_s"); + return EINVAL_AND_RESET; + } + SECUREC_ERROR_BUFFER_OVERLAP("wcsncat_s"); + return EOVERLAP_AND_RESET; + } + if (srcLen + destLen >= destMax || strDest == strSrc) { + strDest[0] = L'\0'; + if (destLen == destMax) { + SECUREC_ERROR_INVALID_PARAMTER("wcsncat_s"); + return EINVAL_AND_RESET; + } + SECUREC_ERROR_INVALID_RANGE("wcsncat_s"); + return ERANGE_AND_RESET; + } + SecDoMemcpy(strDest + destLen, strSrc, srcLen * sizeof(wchar_t)); /* no terminator */ + *(strDest + destLen + srcLen) = L'\0'; + return EOK; +} + +/* + * + * The wcsncat_s function appends not more than n successive wide characters + * (not including the terminating null wide character) + * from the array pointed to by strSrc to the end of the wide string pointed to by strDest. + * + * The wcsncat_s function try to append the first D characters of strSrc to + * the end of strDest, where D is the lesser of count and the length of strSrc. + * If appending those D characters will fit within strDest (whose size is + * given as destMax) and still leave room for a null terminator, then those + * characters are appended, starting at the original terminating null of + * strDest, and a new terminating null is appended; otherwise, strDest[0] is + * set to the null character. + * + * + * strDest Null-terminated destination string. + * destMax Size of the destination buffer. + * strSrc Null-terminated source string. + * count Number of character to append, or truncate. + * + * + * strDest is updated + * + * + * EOK Success + * EINVAL strDest is NULL and destMax != 0 and destMax <= SECUREC_WCHAR_STRING_MAX_LEN + * EINVAL_AND_RESET (strDest unterminated and all other parameters are valid) or + * (strDest != NULL and strSrc is NULLL and destMax != 0 and destMax <= SECUREC_WCHAR_STRING_MAX_LEN) + * ERANGE destMax > SECUREC_WCHAR_STRING_MAX_LEN or destMax is 0 + * ERANGE_AND_RESET strDest have not enough space and all other parameters are valid and not overlap + * EOVERLAP_AND_RESET dest buffer and source buffer are overlapped and all parameters are valid + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +errno_t wcsncat_s(wchar_t *strDest, size_t destMax, const wchar_t *strSrc, size_t count) +{ + if (destMax == 0 || destMax > SECUREC_WCHAR_STRING_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("wcsncat_s"); + return ERANGE; + } + if (strDest == NULL || strSrc == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("wcsncat_s"); + if (strDest != NULL) { + strDest[0] = L'\0'; + return EINVAL_AND_RESET; + } + return EINVAL; + } + if (count > SECUREC_WCHAR_STRING_MAX_LEN) { +#ifdef SECUREC_COMPATIBLE_WIN_FORMAT + if (count == ((size_t)-1)) { + /* Windows internal functions may pass in -1 when calling this function */ + return SecDoWcsncat(strDest, destMax, strSrc, destMax); + } +#endif + strDest[0] = L'\0'; + SECUREC_ERROR_INVALID_RANGE("wcsncat_s"); + return ERANGE_AND_RESET; + } + return SecDoWcsncat(strDest, destMax, strSrc, count); +} + + diff --git a/third_party/securec/src/wcsncpy_s.c b/third_party/securec/src/wcsncpy_s.c new file mode 100644 index 00000000..746b1d44 --- /dev/null +++ b/third_party/securec/src/wcsncpy_s.c @@ -0,0 +1,111 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#define SECUREC_INLINE_DO_MEMCPY 1 + +#include "securecutil.h" + +static errno_t SecDoWcsncpy(wchar_t *strDest, size_t destMax, const wchar_t *strSrc, size_t count) +{ + size_t srcStrLen; + if (count < destMax) { + SECUREC_CALC_WSTR_LEN(strSrc, count, &srcStrLen); + } else { + SECUREC_CALC_WSTR_LEN(strSrc, destMax, &srcStrLen); + } + if (srcStrLen == destMax) { + strDest[0] = '\0'; + SECUREC_ERROR_INVALID_RANGE("wcsncpy_s"); + return ERANGE_AND_RESET; + } + if (strDest == strSrc) { + return EOK; + } + if (SECUREC_STRING_NO_OVERLAP(strDest, strSrc, srcStrLen)) { + /* performance optimization srcStrLen not include '\0' */ + SecDoMemcpy(strDest, strSrc, srcStrLen * sizeof(wchar_t)); + *(strDest + srcStrLen) = L'\0'; + return EOK; + } else { + strDest[0] = L'\0'; + SECUREC_ERROR_BUFFER_OVERLAP("wcsncpy_s"); + return EOVERLAP_AND_RESET; + } +} + +/* + * + * The wcsncpy_s function copies not more than n successive wide characters + * (not including the terminating null wide character) + * from the array pointed to by strSrc to the array pointed to by strDest + * + * + * strDest Destination string. + * destMax The size of the destination string, in characters. + * strSrc Source string. + * count Number of characters to be copied. + * + * + * strDest is updated + * + * + * EOK Success + * EINVAL strDest is NULL and destMax != 0 and destMax <= SECUREC_WCHAR_STRING_MAX_LEN + * EINVAL_AND_RESET strDest != NULL and strSrc is NULLL and destMax != 0 + * and destMax <= SECUREC_WCHAR_STRING_MAX_LEN + * ERANGE destMax > SECUREC_WCHAR_STRING_MAX_LEN or destMax is 0 + * ERANGE_AND_RESET count > SECUREC_WCHAR_STRING_MAX_LEN or + * (destMax <= length of strSrc and destMax <= count and strDest != strSrc + * and strDest != NULL and strSrc != NULL and destMax != 0 and + * destMax <= SECUREC_WCHAR_STRING_MAX_LEN and not overlap) + * EOVERLAP_AND_RESET dest buffer and source buffer are overlapped and all parameters are valid + * + * + * If there is a runtime-constraint violation, strDest[0] will be set to the '\0' when strDest and destMax valid + */ +errno_t wcsncpy_s(wchar_t *strDest, size_t destMax, const wchar_t *strSrc, size_t count) +{ + if (destMax == 0 || destMax > SECUREC_WCHAR_STRING_MAX_LEN) { + SECUREC_ERROR_INVALID_RANGE("wcsncpy_s"); + return ERANGE; + } + if (strDest == NULL || strSrc == NULL) { + SECUREC_ERROR_INVALID_PARAMTER("wcsncpy_s"); + if (strDest != NULL) { + strDest[0] = '\0'; + return EINVAL_AND_RESET; + } + return EINVAL; + } + if (count > SECUREC_WCHAR_STRING_MAX_LEN) { +#ifdef SECUREC_COMPATIBLE_WIN_FORMAT + if (count == (size_t)(-1)) { + return SecDoWcsncpy(strDest, destMax, strSrc, destMax - 1); + } +#endif + strDest[0] = '\0'; /* clear dest string */ + SECUREC_ERROR_INVALID_RANGE("wcsncpy_s"); + return ERANGE_AND_RESET; + } + + if (count == 0) { + strDest[0] = '\0'; + return EOK; + } + + return SecDoWcsncpy(strDest, destMax, strSrc, count); +} + diff --git a/third_party/securec/src/wcstok_s.c b/third_party/securec/src/wcstok_s.c new file mode 100644 index 00000000..99c524f0 --- /dev/null +++ b/third_party/securec/src/wcstok_s.c @@ -0,0 +1,116 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +/* + * FindBegin Wide character postion function + */ +static wchar_t *SecFindBeginW(wchar_t *strToken, const wchar_t *strDelimit) +{ + /* Find beginning of token (skip over leading delimiters). Note that + * there is no token if this loop sets string to point to the terminal null. + */ + wchar_t *token = strToken; + while (*token != L'\0') { + const wchar_t *ctl = strDelimit; + while (*ctl != L'\0' && *ctl != *token) { + ++ctl; + } + if (*ctl == L'\0') { + break; + } + ++token; + } + return token; +} + +/* + * FindBegin rest Wide character postion function + */ +static wchar_t *SecFindRestW(wchar_t *strToken, const wchar_t *strDelimit) +{ + /* Find the end of the token. If it is not the end of the string, + * put a null there. + */ + wchar_t *token = strToken; + while (*token != L'\0') { + const wchar_t *ctl = strDelimit; + while (*ctl != L'\0' && *ctl != *token) { + ++ctl; + } + if (*ctl != L'\0') { + *token++ = L'\0'; + break; + } + ++token; + } + return token; +} + +/* + * Update Token wide character function + */ +static wchar_t *SecUpdateTokenW(wchar_t *strToken, const wchar_t *strDelimit, wchar_t **context) +{ + /* point to updated position */ + wchar_t *token = SecFindRestW(strToken, strDelimit); + /* Update the context */ + *context = token; + /* Determine if a token has been found. */ + if (token == strToken) { + return NULL; + } + return strToken; +} + +/* + * + * wcstok_s + * + * + * + * The wcstok_s function is the wide-character equivalent of the strtok_s function + * + * + * strToken String containing token or tokens. + * strDelimit Set of delimiter characters. + * context Used to store position information between calls to + * wcstok_s. + * + * + * context is updated + * + * The wcstok_s function is the wide-character equivalent of the strtok_s function + */ +wchar_t *wcstok_s(wchar_t *strToken, const wchar_t *strDelimit, wchar_t **context) +{ + wchar_t *orgToken = strToken; + /* validation section */ + if (context == NULL || strDelimit == NULL) { + return NULL; + } + if (orgToken == NULL && (*context) == NULL) { + return NULL; + } + /* If string==NULL, continue with previous string */ + if (orgToken == NULL) { + orgToken = *context; + } + orgToken = SecFindBeginW(orgToken, strDelimit); + return SecUpdateTokenW(orgToken, strDelimit, context); +} + diff --git a/third_party/securec/src/wmemcpy_s.c b/third_party/securec/src/wmemcpy_s.c new file mode 100644 index 00000000..236fcce1 --- /dev/null +++ b/third_party/securec/src/wmemcpy_s.c @@ -0,0 +1,68 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securecutil.h" + +/* + * + * The wmemcpy_s function copies n successive wide characters + * from the object pointed to by src into the object pointed to by dest.t. + * + * + * dest Destination buffer. + * destMax Size of the destination buffer. + * src Buffer to copy from. + * count Number of characters to copy. + * + * + * dest buffer is uptdated. + * + * + * EOK Success + * EINVAL dest is NULL and destMax != 0 and count <= destMax + * and destMax <= SECUREC_WCHAR_MEM_MAX_LEN + * EINVAL_AND_RESET dest != NULL and src is NULLL and destMax != 0 + * and destMax <= SECUREC_WCHAR_MEM_MAX_LEN and count <= destMax + * ERANGE destMax > SECUREC_WCHAR_MEM_MAX_LEN or destMax is 0 or + * (count > destMax and dest is NULL and destMax != 0 + * and destMax <= SECUREC_WCHAR_MEM_MAX_LEN) + * ERANGE_AND_RESET count > destMax and dest != NULL and destMax != 0 + * and destMax <= SECUREC_WCHAR_MEM_MAX_LEN + * EOVERLAP_AND_RESET dest buffer and source buffer are overlapped and + * count <= destMax destMax != 0 and destMax <= SECUREC_WCHAR_MEM_MAX_LEN + * and dest != NULL and src != NULL and dest != src + * + * if an error occured, dest will be filled with 0 when dest and destMax valid . + * If the source and destination overlap, the behavior of wmemcpy_s is undefined. + * Use wmemmove_s to handle overlapping regions. + */ +errno_t wmemcpy_s(wchar_t *dest, size_t destMax, const wchar_t *src, size_t count) +{ + if (destMax == 0 || destMax > SECUREC_WCHAR_MEM_MAX_LEN) { + SECUREC_ERROR_INVALID_PARAMTER("wmemcpy_s"); + return ERANGE; + } + if (count > destMax) { + SECUREC_ERROR_INVALID_PARAMTER("wmemcpy_s"); + if (dest != NULL) { + (void)memset(dest, 0, destMax * sizeof(wchar_t)); + return ERANGE_AND_RESET; + } + return ERANGE; + } + return memcpy_s(dest, destMax * sizeof(wchar_t), src, count * sizeof(wchar_t)); +} + diff --git a/third_party/securec/src/wmemmove_s.c b/third_party/securec/src/wmemmove_s.c new file mode 100644 index 00000000..2ef549a0 --- /dev/null +++ b/third_party/securec/src/wmemmove_s.c @@ -0,0 +1,67 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securecutil.h" + +/* + * + * The wmemmove_s function copies n successive wide characters from the object pointed + * to by src into the object pointed to by dest. + * + * + * dest Destination buffer. + * destMax Size of the destination buffer. + * src Source object. + * count Number of bytes or character to copy. + * + * + * dest is updated. + * + * + * EOK Success + * EINVAL dest is NULL and destMax != 0 and count <= destMax + * and destMax <= SECUREC_WCHAR_MEM_MAX_LEN + * EINVAL_AND_RESET dest != NULL and src is NULLL and destMax != 0 + * and destMax <= SECUREC_WCHAR_MEM_MAX_LEN and count <= destMax + * ERANGE destMax > SECUREC_WCHAR_MEM_MAX_LEN or destMax is 0 or + * (count > destMax and dest is NULL and destMax != 0 + * and destMax <= SECUREC_WCHAR_MEM_MAX_LEN) + * ERANGE_AND_RESET count > destMax and dest != NULL and destMax != 0 + * and destMax <= SECUREC_WCHAR_MEM_MAX_LEN + * + * + * If an error occured, dest will be filled with 0 when dest and destMax valid. + * If some regions of the source area and the destination overlap, wmemmove_s + * ensures that the original source bytes in the overlapping region are copied + * before being overwritten + */ +errno_t wmemmove_s(wchar_t *dest, size_t destMax, const wchar_t *src, size_t count) +{ + if (destMax == 0 || destMax > SECUREC_WCHAR_MEM_MAX_LEN) { + SECUREC_ERROR_INVALID_PARAMTER("wmemmove_s"); + return ERANGE; + } + if (count > destMax) { + SECUREC_ERROR_INVALID_PARAMTER("wmemmove_s"); + if (dest != NULL) { + (void)memset(dest, 0, destMax * sizeof(wchar_t)); + return ERANGE_AND_RESET; + } + return ERANGE; + } + return memmove_s(dest, destMax * sizeof(wchar_t), src, count * sizeof(wchar_t)); +} + diff --git a/third_party/securec/src/wscanf_s.c b/third_party/securec/src/wscanf_s.c new file mode 100644 index 00000000..c1dcce27 --- /dev/null +++ b/third_party/securec/src/wscanf_s.c @@ -0,0 +1,55 @@ +/** + * Copyright 2020 Huawei Technologies Co., Ltd + * + * Licensed under the Apache License, Version 2.0 (the "License"); + * you may not use this file except in compliance with the License. + * You may obtain a copy of the License at + * + * http://www.apache.org/licenses/LICENSE-2.0 + * + * Unless required by applicable law or agreed to in writing, software + * distributed under the License is distributed on an "AS IS" BASIS, + * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. + * See the License for the specific language governing permissions and + * limitations under the License. + */ + +#include "securec.h" + +/* + * + * + * The wscanf_s function is the wide-character equivalent of the scanf_s function + * The wscanf_s function reads data from the standard input stream stdin and + * writes the data into the location that's given by argument. Each argument + * must be a pointer to a variable of a type that corresponds to a type specifier + * in format. If copying occurs between strings that overlap, the behavior is + * undefined. + * + * + * format Format control string. + * ... Optional arguments. + * + * + * ... the converted value stored in user assigned address + * + * + * Returns the number of fields successfully converted and assigned; + * the return value does not include fields that were read but not assigned. + * A return value of 0 indicates that no fields were assigned. + * return -1 if an error occurs. + */ + +int wscanf_s(const wchar_t *format, ...) +{ + int ret; /* If initialization causes e838 */ + va_list argList; + + va_start(argList, format); + ret = vwscanf_s(format, argList); + va_end(argList); + (void)argList; /* to clear e438 last value assigned not used , the compiler will optimize this code */ + + return ret; +} +